diff --git a/.gitignore b/.gitignore index 20a148aa..9e5aced1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ build/ -include/config.h +deps/glslang/build_winx64/ +deps/glslang_bak/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..7b8ae826 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "deps/glslang"] + path = deps/glslang + url = https://github.com/KhronosGroup/glslang.git diff --git a/.travis.yml b/.travis.yml index 88c679b6..e978e943 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,4 +29,4 @@ install: script: - mkdir build - cd build - - ${DEPS_DIR}/cmake/bin/cmake -G "Unix Makefiles" .. && make \ No newline at end of file + - ${DEPS_DIR}/cmake/bin/cmake -DCONTINUOUS_INTEGRATION_RUN=ON -G "Unix Makefiles" .. && make \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt index e7a70d9f..45d49c33 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,71 +1,120 @@ -cmake_minimum_required(VERSION 2.8) +cmake_minimum_required(VERSION 2.8...4.2.0) project (Anvil) option(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT "Includes 32-/64-bit Windows window system support (Windows builds only)" ON) option(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT "Includes XCB window system support (Linux builds only)" ON) +option(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT "Includes Wayland window system support (Linux builds only)" ON) +option(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT "Includes headless window system support" ON) +option(ANVIL_LINK_EXAMPLES "Build examples showing how to use Anvil" OFF) +option(ANVIL_LINK_STATICALLY_WITH_VULKAN_LIB "Link statically with Vulkan loader. If disabled, Anvil will load the func ptrs from ANVIL_VULKAN_DYNAMIC_DLL_DEPENDENCY at VK instance creation time" ON) option(ANVIL_LINK_WITH_GLSLANG "Links with glslang, instead of spawning a new process whenever GLSL->SPIR-V conversion is required" ON) +option(ANVIL_USE_BUILT_IN_GLSLANG "Use glslang version included with Anvil. If disabled, Anvil will assume ANVIL_GLSLANG_PATH holds path to library's root directory." ON) +option(ANVIL_USE_BUILT_IN_VULKAN_HEADERS "Use built-in Vulkan headers. If disabled, VK_SDK_PATH and VULKAN_SDK env vars will be assumed to hold the location where the headers can be found." ON) +if (MSVC) + set (DEFAULT_DYNAMIC_VK_DLL "vulkan-1.dll") +else() + set (DEFAULT_DYNAMIC_VK_DLL "libvulkan.so") +endif() + +if (ANVIL_USE_BUILT_IN_GLSLANG) + set(ANVIL_GLSLANG_PATH "${Anvil_SOURCE_DIR}/deps/glslang") +else() + set(ANVIL_GLSLANG_PATH "${Anvil_SOURCE_DIR}/deps/glslang" + CACHE STRING "Path to glslang directory") +endif() + +if (NOT ANVIL_LINK_STATICALLY_WITH_VULKAN_LIB) + set(ANVIL_VULKAN_DYNAMIC_DLL "${DEFAULT_DYNAMIC_VK_DLL}" + CACHE STRING "DLL to load Vulkan entrypoints from at Vulkan instance creation time. Only used if ANVIL_LINK_STATICALLY_WITH_VULKAN_LIB is disabled. Only occurs at first Vulkan instance creation time") +endif() # Do not modify anything after this line, unless you know what you're doing. -configure_file("include/config.h.in" "${Anvil_SOURCE_DIR}/include/config.h") -include_directories("$(Anvil_SOURCE_DIR)/deps" - "$(Anvil_SOURCE_DIR)/include") +if (NOT MSVC) + set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_DEBUG") +endif() -if (WIN32) - include_directories($ENV{VK_SDK_PATH}/Include - $ENV{VULKAN_SDK}/Include) - +configure_file("include/config.h.in" "include/config.h") + +if (ANVIL_LINK_STATICALLY_WITH_VULKAN_LIB AND NOT CONTINUOUS_INTEGRATION_RUN) if("${CMAKE_SIZEOF_VOID_P}" EQUAL "8") - link_directories ($ENV{VK_SDK_PATH}/Bin - $ENV{VK_SDK_PATH}/Lib - $ENV{VULKAN_SDK}/Bin - $ENV{VULKAN_SDK}/Lib) + find_library(VULKAN_LIBRARY + NAMES vulkan-1 vulkan + PATHS "$ENV{VK_SDK_PATH}/Lib" "$ENV{VK_SDK_PATH}/Bin" "$ENV{VULKAN_SDK}/Lib" "$ENV{VULKAN_SDK}/Bin" "$ENV{VULKAN_SDK}/lib") else() - link_directories ($ENV{VK_SDK_PATH}/Bin32 - $ENV{VK_SDK_PATH}/Lib32 - $ENV{VULKAN_SDK}/Bin32 - $ENV{VULKAN_SDK}/Lib32) + find_library(VULKAN_LIBRARY + NAMES vulkan-1 vulkan + PATHS "$ENV{VK_SDK_PATH}/Lib32" "$ENV{VK_SDK_PATH}/Bin32" "$ENV{VULKAN_SDK}/Lib32" "$ENV{VULKAN_SDK}/Bin32" "$ENV{VULKAN_SDK}/lib") endif() -else() - include_directories($ENV{VK_SDK_PATH}/x86_64/include - $ENV{VULKAN_SDK}/include - $ENV{VULKAN_SDK}/x86_64/include) - link_directories ($ENV{VK_SDK_PATH}/x86_64/lib - $ENV{VULKAN_SDK}/lib - $ENV{VULKAN_SDK}/x86_64/lib) endif() SET (SRC_LIST "${Anvil_SOURCE_DIR}/include/misc/memalloc_backends/backend_oneshot.h" "${Anvil_SOURCE_DIR}/include/misc/memalloc_backends/backend_vma.h" + "${Anvil_SOURCE_DIR}/include/misc/base_pipeline_create_info.h" "${Anvil_SOURCE_DIR}/include/misc/base_pipeline_manager.h" + "${Anvil_SOURCE_DIR}/include/misc/buffer_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/buffer_view_create_info.h" "${Anvil_SOURCE_DIR}/include/misc/callbacks.h" + "${Anvil_SOURCE_DIR}/include/misc/compute_pipeline_create_info.h" "${Anvil_SOURCE_DIR}/include/misc/debug.h" "${Anvil_SOURCE_DIR}/include/misc/debug_marker.h" + "${Anvil_SOURCE_DIR}/include/misc/debug_messenger_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/descriptor_pool_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/descriptor_set_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/device_create_info.h" "${Anvil_SOURCE_DIR}/include/misc/dummy_window.h" + "${Anvil_SOURCE_DIR}/include/misc/event_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/extensions.h" + "${Anvil_SOURCE_DIR}/include/misc/external_handle.h" + "${Anvil_SOURCE_DIR}/include/misc/fence_create_info.h" "${Anvil_SOURCE_DIR}/include/misc/formats.h" "${Anvil_SOURCE_DIR}/include/misc/fp16.h" - "${Anvil_SOURCE_DIR}/include/misc/glsl_to_spirv.h" + "${Anvil_SOURCE_DIR}/include/misc/framebuffer_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/graphics_pipeline_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/image_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/image_view_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/instance_create_info.h" "${Anvil_SOURCE_DIR}/include/misc/io.h" + "${Anvil_SOURCE_DIR}/include/misc/library.h" "${Anvil_SOURCE_DIR}/include/misc/memory_allocator.h" + "${Anvil_SOURCE_DIR}/include/misc/memory_block_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/mt_safety.h" "${Anvil_SOURCE_DIR}/include/misc/object_tracker.h" "${Anvil_SOURCE_DIR}/include/misc/page_tracker.h" "${Anvil_SOURCE_DIR}/include/misc/pools.h" "${Anvil_SOURCE_DIR}/include/misc/ref_counter.h" + "${Anvil_SOURCE_DIR}/include/misc/render_pass_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/rendering_surface_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/sampler_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/sampler_ycbcr_conversion_create_info.h" + "${Anvil_SOURCE_DIR}/include/misc/semaphore_create_info.h" "${Anvil_SOURCE_DIR}/include/misc/shader_module_cache.h" + "${Anvil_SOURCE_DIR}/include/misc/struct_chainer.h" + "${Anvil_SOURCE_DIR}/include/misc/swapchain_create_info.h" "${Anvil_SOURCE_DIR}/include/misc/time.h" "${Anvil_SOURCE_DIR}/include/misc/types.h" + "${Anvil_SOURCE_DIR}/include/misc/types_classes.h" + "${Anvil_SOURCE_DIR}/include/misc/types_enums.h" + "${Anvil_SOURCE_DIR}/include/misc/types_macro.h" + "${Anvil_SOURCE_DIR}/include/misc/types_struct.h" + "${Anvil_SOURCE_DIR}/include/misc/types_utils.h" + "${Anvil_SOURCE_DIR}/include/misc/vulkan.h" "${Anvil_SOURCE_DIR}/include/misc/window.h" "${Anvil_SOURCE_DIR}/include/misc/window_factory.h" + "${Anvil_SOURCE_DIR}/include/misc/window_generic.h" "${Anvil_SOURCE_DIR}/include/wrappers/buffer.h" "${Anvil_SOURCE_DIR}/include/wrappers/buffer_view.h" "${Anvil_SOURCE_DIR}/include/wrappers/command_buffer.h" "${Anvil_SOURCE_DIR}/include/wrappers/command_pool.h" "${Anvil_SOURCE_DIR}/include/wrappers/compute_pipeline_manager.h" + "${Anvil_SOURCE_DIR}/include/wrappers/debug_messenger.h" "${Anvil_SOURCE_DIR}/include/wrappers/descriptor_pool.h" "${Anvil_SOURCE_DIR}/include/wrappers/descriptor_set.h" "${Anvil_SOURCE_DIR}/include/wrappers/descriptor_set_group.h" "${Anvil_SOURCE_DIR}/include/wrappers/descriptor_set_layout.h" + "${Anvil_SOURCE_DIR}/include/wrappers/descriptor_set_layout_manager.h" + "${Anvil_SOURCE_DIR}/include/wrappers/descriptor_update_template.h" "${Anvil_SOURCE_DIR}/include/wrappers/device.h" "${Anvil_SOURCE_DIR}/include/wrappers/event.h" "${Anvil_SOURCE_DIR}/include/wrappers/fence.h" @@ -84,38 +133,70 @@ SET (SRC_LIST "${Anvil_SOURCE_DIR}/include/misc/memalloc_backends/backend_onesho "${Anvil_SOURCE_DIR}/include/wrappers/render_pass.h" "${Anvil_SOURCE_DIR}/include/wrappers/rendering_surface.h" "${Anvil_SOURCE_DIR}/include/wrappers/sampler.h" + "${Anvil_SOURCE_DIR}/include/wrappers/sampler_ycbcr_conversion.h" "${Anvil_SOURCE_DIR}/include/wrappers/semaphore.h" "${Anvil_SOURCE_DIR}/include/wrappers/shader_module.h" "${Anvil_SOURCE_DIR}/include/wrappers/swapchain.h" "${Anvil_SOURCE_DIR}/src/misc/memalloc_backends/backend_oneshot.cpp" "${Anvil_SOURCE_DIR}/src/misc/memalloc_backends/backend_vma.cpp" + "${Anvil_SOURCE_DIR}/src/misc/base_pipeline_create_info.cpp" "${Anvil_SOURCE_DIR}/src/misc/base_pipeline_manager.cpp" + "${Anvil_SOURCE_DIR}/src/misc/buffer_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/buffer_view_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/compute_pipeline_create_info.cpp" "${Anvil_SOURCE_DIR}/src/misc/debug.cpp" "${Anvil_SOURCE_DIR}/src/misc/debug_marker.cpp" + "${Anvil_SOURCE_DIR}/src/misc/debug_messenger_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/descriptor_pool_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/descriptor_set_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/device_create_info.cpp" "${Anvil_SOURCE_DIR}/src/misc/dummy_window.cpp" + "${Anvil_SOURCE_DIR}/src/misc/external_handle.cpp" + "${Anvil_SOURCE_DIR}/src/misc/event_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/fence_create_info.cpp" "${Anvil_SOURCE_DIR}/src/misc/formats.cpp" "${Anvil_SOURCE_DIR}/src/misc/fp16.cpp" - "${Anvil_SOURCE_DIR}/src/misc/glsl_to_spirv.cpp" + "${Anvil_SOURCE_DIR}/src/misc/framebuffer_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/graphics_pipeline_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/image_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/image_view_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/instance_create_info.cpp" "${Anvil_SOURCE_DIR}/src/misc/io.cpp" + "${Anvil_SOURCE_DIR}/src/misc/library.cpp" "${Anvil_SOURCE_DIR}/src/misc/memory_allocator.cpp" + "${Anvil_SOURCE_DIR}/src/misc/memory_block_create_info.cpp" "${Anvil_SOURCE_DIR}/src/misc/object_tracker.cpp" "${Anvil_SOURCE_DIR}/src/misc/page_tracker.cpp" "${Anvil_SOURCE_DIR}/src/misc/pools.cpp" + "${Anvil_SOURCE_DIR}/src/misc/render_pass_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/rendering_surface_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/sampler_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/sampler_ycbcr_conversion_create_info.cpp" + "${Anvil_SOURCE_DIR}/src/misc/semaphore_create_info.cpp" "${Anvil_SOURCE_DIR}/src/misc/shader_module_cache.cpp" + "${Anvil_SOURCE_DIR}/src/misc/swapchain_create_info.cpp" "${Anvil_SOURCE_DIR}/src/misc/time.cpp" "${Anvil_SOURCE_DIR}/src/misc/types.cpp" + "${Anvil_SOURCE_DIR}/src/misc/types_classes.cpp" + "${Anvil_SOURCE_DIR}/src/misc/types_struct.cpp" + "${Anvil_SOURCE_DIR}/src/misc/types_utils.cpp" + "${Anvil_SOURCE_DIR}/src/misc/vulkan.cpp" "${Anvil_SOURCE_DIR}/src/misc/window.cpp" "${Anvil_SOURCE_DIR}/src/misc/window_factory.cpp" + "${Anvil_SOURCE_DIR}/src/misc/window_generic.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/buffer.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/buffer_view.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/command_buffer.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/command_pool.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/compute_pipeline_manager.cpp" + "${Anvil_SOURCE_DIR}/src/wrappers/debug_messenger.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/descriptor_pool.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/descriptor_set.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/descriptor_set_group.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/descriptor_set_layout.cpp" + "${Anvil_SOURCE_DIR}/src/wrappers/descriptor_set_layout_manager.cpp" + "${Anvil_SOURCE_DIR}/src/wrappers/descriptor_update_template.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/device.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/event.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/fence.cpp" @@ -134,14 +215,22 @@ SET (SRC_LIST "${Anvil_SOURCE_DIR}/include/misc/memalloc_backends/backend_onesho "${Anvil_SOURCE_DIR}/src/wrappers/render_pass.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/rendering_surface.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/sampler.cpp" + "${Anvil_SOURCE_DIR}/src/wrappers/sampler_ycbcr_conversion.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/semaphore.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/shader_module.cpp" "${Anvil_SOURCE_DIR}/src/wrappers/swapchain.cpp" # Vulkan Memory Allocator dependency "${Anvil_SOURCE_DIR}/deps/VulkanMemoryAllocator/vk_mem_alloc.h" +# <-- ) +if (ANVIL_LINK_WITH_GLSLANG) + list(APPEND SRC_LIST + "${Anvil_SOURCE_DIR}/include/misc/glsl_to_spirv.h" + "${Anvil_SOURCE_DIR}/src/misc/glsl_to_spirv.cpp") +endif() + # prepare source code files for different OS if(WIN32) if (ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) @@ -150,8 +239,7 @@ if(WIN32) endif() else() if (ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) - list (APPEND SRC_LIST - "${Anvil_SOURCE_DIR}/include/misc/xcb_loader.h" + list (APPEND SRC_LIST "${Anvil_SOURCE_DIR}/include/misc/xcb_loader.h" "${Anvil_SOURCE_DIR}/src/misc/xcb_loader.cpp" "${Anvil_SOURCE_DIR}/include/misc/window_xcb.h" "${Anvil_SOURCE_DIR}/src/misc/window_xcb.cpp") @@ -160,31 +248,62 @@ endif() add_library(Anvil STATIC ${SRC_LIST}) +target_compile_features(Anvil PRIVATE cxx_std_20) + +target_include_directories(Anvil PUBLIC "${Anvil_BINARY_DIR}/include" + "${Anvil_SOURCE_DIR}" + "${Anvil_SOURCE_DIR}/deps" + "${ANVIL_GLSLANG_PATH}" + "${Anvil_SOURCE_DIR}/include") + +# Include the Vulkan header. +if (ANVIL_USE_BUILT_IN_VULKAN_HEADERS) + target_include_directories(Anvil PUBLIC "${Anvil_SOURCE_DIR}/include") +else() + if (WIN32) + target_include_directories(Anvil PUBLIC $ENV{VK_SDK_PATH}/Include + $ENV{VULKAN_SDK}/Include) + else() + target_include_directories(Anvil PUBLIC $ENV{VK_SDK_PATH}/x86_64/include + $ENV{VULKAN_SDK}/include + $ENV{VULKAN_SDK}/x86_64/include) + endif() +endif() + if (WIN32) if (ANVIL_LINK_WITH_GLSLANG) - target_link_libraries(Anvil glslang HLSL OGLCompiler OSDependent SPIRV vulkan-1.lib) + target_link_libraries(Anvil PUBLIC glslang OSDependent SPIRV ${VULKAN_LIBRARY}) else() - target_link_libraries(Anvil vulkan-1.lib) + target_link_libraries(Anvil PUBLIC ${VULKAN_LIBRARY}) endif() else() if (ANVIL_LINK_WITH_GLSLANG) - target_link_libraries(Anvil glslang HLSL OGLCompiler OSDependent SPIRV vulkan pthread) + target_link_libraries(Anvil PUBLIC glslang OSDependent SPIRV ${VULKAN_LIBRARY} pthread) else() - target_link_libraries(Anvil vulkan pthread) + target_link_libraries(Anvil PUBLIC ${VULKAN_LIBRARY} pthread) endif() endif() if (NOT MSVC) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fpermissive -std=c++11") else() - set(CMAKE_CXX_FLAGS "$(CMAKE_CXX_FLAGS) /EHsc /MP") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHsc /MP") endif() -# Add dependendencies -if (WIN32) - add_subdirectory("deps\\glslang") -else() - add_subdirectory("deps/glslang") +if (ANVIL_LINK_WITH_GLSLANG) + set(BUILD_TESTING OFF CACHE BOOL ".." FORCE) + set(ENABLE_HLSL OFF CACHE BOOL ".." FORCE) + set(ENABLE_OPT OFF CACHE BOOL ".." FORCE) + + add_subdirectory("${ANVIL_GLSLANG_PATH}") +endif() + +if (ANVIL_LINK_EXAMPLES) + add_subdirectory("examples/DynamicBuffers") + add_subdirectory("examples/MultiViewport") + add_subdirectory("examples/OcclusionQuery") + add_subdirectory("examples/OutOfOrderRasterization") + add_subdirectory("examples/PushConstants") endif() # Enable level-4 warnings @@ -196,8 +315,5 @@ if (CMAKE_COMPILER_IS_GNUCC) target_compile_options(Anvil PRIVATE "-Wall" "-Werror") endif (CMAKE_COMPILER_IS_GNUCC) if (MSVC) - target_compile_options(Anvil PRIVATE "/W4" "/WX" "/wd4191" "/wd4350" "/wd4668" "/wd4820" "/wd4514" "/wd4061" "/wd4710" "/wd4711" "/wd4548" "/wd4555") + target_compile_options(Anvil PRIVATE "/W4" "/wd4191" "/wd4350" "/wd4668" "/wd4820" "/wd4514" "/wd4061" "/wd4710" "/wd4711" "/wd4548" "/wd4555") endif (MSVC) - -# Add all demo sub-projects -MESSAGE(STATUS "Using Vulkan SDK at either " $ENV{VK_SDK_PATH} " or " $ENV{VULKAN_SDK}) diff --git a/README.md b/README.md index 3a648050..60a4f1e6 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Anvil -This is a README file for Anvil v1.3.1, a framework for Vulkan™. +This is a README file for Anvil, a framework for Vulkan™. The README is organized as a FAQ. |Toolchain|Status| @@ -15,8 +15,9 @@ MIT. See `LICENSE.txt`. What is this? ------ -Anvil is a framework for Vulkan v1.0, which we have been using internally for -quite some time now, in order to develop various Vulkan applications. +Anvil is a framework for Vulkan v1.0 and v1.1, which we have been using internally for +quite some time now, in order to develop various Vulkan applications. This guarantees +that vast majority of the functionality exposed by the library is regularly tested. The general idea we started from was to have a cross-platform tool, which would reduce the amount of time required to write portable Vulkan-based apps from @@ -34,16 +35,10 @@ Anvil was designed with the following goals in mind: using Vulkan, without hiding the API behind thick abstraction layers. - Simplify validation layer usage. All you have to do is specify which function you would like to be called if a debug call-back is made, and that's it. -- Provide a reasonable level of deferred approach to baking objects, in order - to emulate mutability of certain Vulkan objects to a certain degree. Note - that this is an optional feature - you are free to request baking of any - object at any time, in ordre to ensure it will never happen at draw time. - Provide a simple cross-platform implementation for areas unrelated to Vulkan (eg. window management) - -Anvil can be thought of as a toolbox. You are free to use any of its parts you -like, and write your own code for everything else. Any Anvil wrapper for an -actual Vulkan object can be queried to retrieve the raw Vulkan handle. +- Provide a simple way (with optional flexibility) to manage memory allocations + and memory bindings. Anvil is **not** the right choice for developers who do not have a reasonable understanding of how Vulkan works. Its goal is not to provide a glBegin/glEnd-like @@ -61,8 +56,7 @@ What are Anvil's requirements? In order to build Anvil, you will need the following software: - C++11 compiler. - CMake -- Vulkan RT 1.0.39.1 or newer. -- Vulkan SDK 1.0.13.0 or newer. +- Vulkan SDK (the latest available version is highly recommended) To build Anvil on Linux, additional packages should be installed: - libxcb-keysyms (For ubuntu, use "apt-get install libxcb-keysyms1-dev") @@ -78,54 +72,9 @@ implementation and on AMDGPU-PRO Vulkan implementation on Linux. What can it do? ------ -This version of Anvil provides: - -* Low-level ref-counted wrappers for the following Vulkan v1.0 features: - - Buffer objects (sparse and non-sparse) - - Buffer views - - Command buffers - - Command pools - - Descriptor pools - - Descriptor sets - - Descriptor set layouts - - Devices - - Events - - Fences - - Framebuffers - - Image objects (sparse, sparse bindings, PRT) - - Image views - - Instances - - Physical devices - - Pipeline cache - - Pipeline layouts - - Query pools - - Queues - - Renderpasses - - Rendering surfaces - - Samplers - - Semaphores - - Shader modules - - Swapchains - -* More complicated wrappers: - - Compute / graphics pipeline managers: provide a way to create regular/derivative pipelines with automatic pipeline layout re-use. - - Descriptor set groups: simplify descriptor set configuration, management and updates. - - Memory allocator: implements a memory allocator with two back-ends: one that is more appropriate for one-shot - mem alloc requests, and one that can be used for dynamic memory allocation purposes. - - Pipeline layout manager: caches all created pipeline layouts to avoid instantiating duplicate layout instances. - -* Miscellaneous functionality: - - Debug markers: names and tags can be optionally assigned to any of the created objects. If VK_EXT_debug_marker is enabled, these - will be automatically patched through to layers that implement the extension on the running platform. - - Format info reflection: provides information about format properties. - - GLSL -> SPIR-V conversion: provides glslang-based GLSL->SPIR-V conversion. The conversion is performed in run-time. Disassembly can also be retrieved, if needed. - - GLSL -> SPIR-V conversion cache: re-uses SPIR-V blobs throughout Instance's lifetime if GLSL->SPIR-V conversion is requested for GLSL source code which has already - been converted to SPIR-V before. - - IO: provides a number of functions to simplify directory- and file-related operations. - - Object tracker: tracks object allocations and helps detect object leakage. - - Shader module cache: re-uses shader module instances across Instance lifetime. Improve execution time if your application instantiates shader modules with - exactly the same configuration during its runtime. - - Time: provides a way to query current time using high-performance system queries. +Anvil provides full support for functionality exposed in Vulkan 1.0 and Vulkan 1.1. +We also try to do our best to keep it up to date with any extensions our Vulkan implementations +expose. We are planning to keep adding new features in the future. @@ -143,12 +92,7 @@ OutOfOrderRasterization and the other example applications are located in the What are the known issues? ------ -Just a handful: -* All command queues are currently assigned a priority of 1.0. -* DescriptorSetGroup wrapper does not leverage the flag at - baking time. - -These will be addressed at some point in the future. +Please observe the "Issues" tab for more details. Who made it? ------ diff --git a/deps/VulkanMemoryAllocator/vk_mem_alloc.h b/deps/VulkanMemoryAllocator/vk_mem_alloc.h index ef00fb4a..24792855 100644 --- a/deps/VulkanMemoryAllocator/vk_mem_alloc.h +++ b/deps/VulkanMemoryAllocator/vk_mem_alloc.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2026 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -25,3883 +25,19851 @@ /** \mainpage Vulkan Memory Allocator -Version 1.0, 2017-05-10 - -Members grouped: see Modules. - -All members: see vk_mem_alloc.h. - -\section problem Problem Statement +Version 3.4.0-development + +Copyright (c) 2017-2026 Advanced Micro Devices, Inc. All rights reserved. \n +License: MIT \n +See also: [product page on GPUOpen](https://gpuopen.com/vulkan-memory-allocator/), +[repository on GitHub](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator) + + +API documentation divided into groups: [Topics](topics.html) + +General documentation chapters: + +- \subpage faq +- \subpage quick_start + - [Project setup](@ref quick_start_project_setup) + - [Initialization](@ref quick_start_initialization) + - [Resource allocation](@ref quick_start_resource_allocation) +- \subpage choosing_memory_type + - [Usage](@ref choosing_memory_type_usage) + - [Required and preferred flags](@ref choosing_memory_type_required_preferred_flags) + - [Explicit memory types](@ref choosing_memory_type_explicit_memory_types) + - [Custom memory pools](@ref choosing_memory_type_custom_memory_pools) + - [Dedicated allocations](@ref choosing_memory_type_dedicated_allocations) +- \subpage memory_mapping + - [Copy functions](@ref memory_mapping_copy_functions) + - [Mapping functions](@ref memory_mapping_mapping_functions) + - [Persistently mapped memory](@ref memory_mapping_persistently_mapped_memory) + - [Cache flush and invalidate](@ref memory_mapping_cache_control) +- \subpage staying_within_budget + - [Querying for budget](@ref staying_within_budget_querying_for_budget) + - [Controlling memory usage](@ref staying_within_budget_controlling_memory_usage) +- \subpage resource_aliasing +- \subpage custom_memory_pools + - [Choosing memory type index](@ref custom_memory_pools_MemTypeIndex) + - [When not to use custom pools](@ref custom_memory_pools_when_not_use) + - [Linear allocation algorithm](@ref linear_algorithm) + - [Free-at-once](@ref linear_algorithm_free_at_once) + - [Stack](@ref linear_algorithm_stack) + - [Double stack](@ref linear_algorithm_double_stack) + - [Ring buffer](@ref linear_algorithm_ring_buffer) +- \subpage defragmentation +- \subpage statistics + - [Numeric statistics](@ref statistics_numeric_statistics) + - [JSON dump](@ref statistics_json_dump) +- \subpage allocation_annotation + - [Allocation user data](@ref allocation_user_data) + - [Allocation names](@ref allocation_names) +- \subpage virtual_allocator +- \subpage debugging_memory_usage + - [Memory initialization](@ref debugging_memory_usage_initialization) + - [Margins](@ref debugging_memory_usage_margins) + - [Corruption detection](@ref debugging_memory_usage_corruption_detection) + - [Leak detection features](@ref debugging_memory_usage_leak_detection) +- \subpage other_api_interop + - [Exporting memory](@ref other_api_interop_exporting_memory) + - [Importing memory](@ref other_api_interop_importing_memory) +- \subpage usage_patterns + - [GPU-only resource](@ref usage_patterns_gpu_only) + - [Staging copy for upload](@ref usage_patterns_staging_copy_upload) + - [Readback](@ref usage_patterns_readback) + - [Advanced data uploading](@ref usage_patterns_advanced_data_uploading) + - [Other use cases](@ref usage_patterns_other_use_cases) +- \subpage configuration + - [Pointers to Vulkan functions](@ref config_Vulkan_functions) + - [Custom host memory allocator](@ref custom_memory_allocator) + - [Device memory allocation callbacks](@ref allocation_callbacks) + - [Device heap memory limit](@ref heap_memory_limit) +- Extension support + - \subpage vk_khr_dedicated_allocation + - \subpage enabling_buffer_device_address + - \subpage vk_ext_memory_priority + - \subpage vk_amd_device_coherent_memory +- \subpage general_considerations + - [Thread safety](@ref general_considerations_thread_safety) + - [Versioning and compatibility](@ref general_considerations_versioning_and_compatibility) + - [Validation layer warnings](@ref general_considerations_validation_layer_warnings) + - [Allocation algorithm](@ref general_considerations_allocation_algorithm) + - [Features not supported](@ref general_considerations_features_not_supported) + +\defgroup group_init Library initialization + +\brief API elements related to the initialization and management of the entire library, especially #VmaAllocator object. + +\defgroup group_alloc Memory allocation + +\brief API elements related to the allocation, deallocation, and management of Vulkan memory, buffers, images. +Most basic ones being: vmaCreateBuffer(), vmaCreateImage(). + +\defgroup group_virtual Virtual allocator + +\brief API elements related to the mechanism of \ref virtual_allocator - using the core allocation algorithm +for user-defined purpose without allocating any real GPU memory. + +\defgroup group_stats Statistics + +\brief API elements that query current status of the allocator, from memory usage, budget, to full dump of the internal state in JSON format. +See documentation chapter: \ref statistics. +*/ -Memory allocation and resource (buffer and image) creation in Vulkan is -difficult (comparing to older graphics API-s, like D3D11 or OpenGL) for several -reasons: -- It requires a lot of boilerplate code, just like everything else in Vulkan, - because it is a low-level and high-performance API. -- There is additional level of indirection: VkDeviceMemory is allocated - separately from creating VkBuffer/VkImage and they must be bound together. The - binding cannot be changed later - resource must be recreated. -- Driver must be queried for supported memory heaps and memory types. Different - IHV-s provide different types of it. -- Resources that don't fit in VRAM are not automatically evicted to RAM. - Developer must handle out-of-memory errors on his own. -- It is recommended practice to allocate bigger chunks of memory and assign - parts of them to particular resources. +#ifdef __cplusplus +extern "C" { +#endif -\section features Features +#if !defined(VULKAN_H_) +#include +#endif -This library is helps game developers to manage memory allocations and resource -creation by offering some higher-level functions. Features of the library could -be divided into several layers, low level to high level: +#define VMA_VERSION (VK_MAKE_VERSION(3, 4, 0)) + +#if !defined(VMA_VULKAN_VERSION) + #if defined(VK_VERSION_1_4) + #define VMA_VULKAN_VERSION 1004000 + #elif defined(VK_VERSION_1_3) + #define VMA_VULKAN_VERSION 1003000 + #elif defined(VK_VERSION_1_2) + #define VMA_VULKAN_VERSION 1002000 + #elif defined(VK_VERSION_1_1) + #define VMA_VULKAN_VERSION 1001000 + #else + #define VMA_VULKAN_VERSION 1000000 + #endif +#endif --# Functions that help to choose correct and optimal memory type based on - intended usage of the memory. - - Required or preferred traits of the memory are expressed using higher-level - description comparing to Vulkan flags. --# Functions that allocate memory blocks, reserve and return parts of them - (VkDeviceMemory + offset + size) to the user. - - Library keeps track of allocated memory blocks, used and unused ranges - inside them, finds best matching unused ranges for new allocations, takes - all the rules of alignment into consideration. --# Functions that can create an image/buffer, allocate memory for it and bind - them together - all in one call. +#if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS + extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; + extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; + extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; + extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; + extern PFN_vkAllocateMemory vkAllocateMemory; + extern PFN_vkFreeMemory vkFreeMemory; + extern PFN_vkMapMemory vkMapMemory; + extern PFN_vkUnmapMemory vkUnmapMemory; + extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; + extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; + extern PFN_vkBindBufferMemory vkBindBufferMemory; + extern PFN_vkBindImageMemory vkBindImageMemory; + extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; + extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; + extern PFN_vkCreateBuffer vkCreateBuffer; + extern PFN_vkDestroyBuffer vkDestroyBuffer; + extern PFN_vkCreateImage vkCreateImage; + extern PFN_vkDestroyImage vkDestroyImage; + extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer; + #if VMA_VULKAN_VERSION >= 1001000 + extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; + extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; + extern PFN_vkBindBufferMemory2 vkBindBufferMemory2; + extern PFN_vkBindImageMemory2 vkBindImageMemory2; + extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2; + #endif // #if VMA_VULKAN_VERSION >= 1001000 +#endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES + +#if !defined(VMA_DEDICATED_ALLOCATION) + #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation + #define VMA_DEDICATED_ALLOCATION 1 + #else + #define VMA_DEDICATED_ALLOCATION 0 + #endif +#endif -\section prequisites Prequisites +#if !defined(VMA_BIND_MEMORY2) + #if VK_KHR_bind_memory2 + #define VMA_BIND_MEMORY2 1 + #else + #define VMA_BIND_MEMORY2 0 + #endif +#endif -- Self-contained C++ library in single header file. No external dependencies - other than standard C and C++ library and of course Vulkan. -- Public interface in C, in same convention as Vulkan API. Implementation in - C++. -- Interface documented using Doxygen-style comments. -- Platform-independent, but developed and tested on Windows using Visual Studio. -- Error handling implemented by returning VkResult error codes - same way as in - Vulkan. +#if !defined(VMA_MEMORY_BUDGET) + #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000) + #define VMA_MEMORY_BUDGET 1 + #else + #define VMA_MEMORY_BUDGET 0 + #endif +#endif -\section quick_start Quick Start +// Defined to 1 when VK_KHR_buffer_device_address device extension or equivalent core Vulkan 1.2 feature is defined in its headers. +#if !defined(VMA_BUFFER_DEVICE_ADDRESS) + #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000 + #define VMA_BUFFER_DEVICE_ADDRESS 1 + #else + #define VMA_BUFFER_DEVICE_ADDRESS 0 + #endif +#endif -In your project code: +// Defined to 1 when VK_EXT_memory_priority device extension is defined in Vulkan headers. +#if !defined(VMA_MEMORY_PRIORITY) + #if VK_EXT_memory_priority + #define VMA_MEMORY_PRIORITY 1 + #else + #define VMA_MEMORY_PRIORITY 0 + #endif +#endif --# Include "vk_mem_alloc.h" file wherever you want to use the library. --# In exacly one C++ file define following macro before include to build library - implementation. +// Defined to 1 when VK_KHR_maintenance4 device extension is defined in Vulkan headers. +#if !defined(VMA_KHR_MAINTENANCE4) + #if VK_KHR_maintenance4 + #define VMA_KHR_MAINTENANCE4 1 + #else + #define VMA_KHR_MAINTENANCE4 0 + #endif +#endif +// Defined to 1 when VK_KHR_maintenance5 device extension is defined in Vulkan headers. +#if !defined(VMA_KHR_MAINTENANCE5) + #if VK_KHR_maintenance5 + #define VMA_KHR_MAINTENANCE5 1 + #else + #define VMA_KHR_MAINTENANCE5 0 + #endif +#endif - #define VMA_IMPLEMENTATION - #include "vk_mem_alloc.h" -At program startup: +// Defined to 1 when VK_KHR_external_memory device extension is defined in Vulkan headers. +#if !defined(VMA_EXTERNAL_MEMORY) + #if VK_KHR_external_memory + #define VMA_EXTERNAL_MEMORY 1 + #else + #define VMA_EXTERNAL_MEMORY 0 + #endif +#endif --# Initialize Vulkan to have VkPhysicalDevice and VkDevice object. --# Fill VmaAllocatorCreateInfo structure and create VmaAllocator object by - calling vmaCreateAllocator(). +// Defined to 1 when VK_KHR_external_memory_win32 device extension is defined in Vulkan headers. +#if !defined(VMA_EXTERNAL_MEMORY_WIN32) + #if VK_KHR_external_memory_win32 + #define VMA_EXTERNAL_MEMORY_WIN32 1 + #else + #define VMA_EXTERNAL_MEMORY_WIN32 0 + #endif +#endif +// Define these macros to decorate all public functions with additional code, +// before and after returned type, appropriately. This may be useful for +// exporting the functions when compiling VMA as a separate library. Example: +// #define VMA_CALL_PRE __declspec(dllexport) +// #define VMA_CALL_POST __cdecl +#ifndef VMA_CALL_PRE + #define VMA_CALL_PRE +#endif +#ifndef VMA_CALL_POST + #define VMA_CALL_POST +#endif - VmaAllocatorCreateInfo allocatorInfo = {}; - allocatorInfo.physicalDevice = physicalDevice; - allocatorInfo.device = device; +// Define this macro to decorate pNext pointers with an attribute specifying the Vulkan +// structure that will be extended via the pNext chain. +#ifndef VMA_EXTENDS_VK_STRUCT + #define VMA_EXTENDS_VK_STRUCT(vkStruct) +#endif - VmaAllocator allocator; - vmaCreateAllocator(&allocatorInfo, &allocator); +// Define this macro to decorate pointers with an attribute specifying the +// length of the array they point to if they are not null. +// +// The length may be one of +// - The name of another parameter in the argument list where the pointer is declared +// - The name of another member in the struct where the pointer is declared +// - The name of a member of a struct type, meaning the value of that member in +// the context of the call. For example +// VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount"), +// this means the number of memory heaps available in the device associated +// with the VmaAllocator being dealt with. +#ifndef VMA_LEN_IF_NOT_NULL + #define VMA_LEN_IF_NOT_NULL(len) +#endif -When you want to create a buffer or image: +// The VMA_NULLABLE macro is defined to be _Nullable when compiling with Clang. +// see: https://clang.llvm.org/docs/AttributeReference.html#nullable +#ifndef VMA_NULLABLE + #ifdef __clang__ + #define VMA_NULLABLE _Nullable + #else + #define VMA_NULLABLE + #endif +#endif --# Fill VkBufferCreateInfo / VkImageCreateInfo structure. --# Fill VmaMemoryRequirements structure. --# Call vmaCreateBuffer() / vmaCreateImage() to get VkBuffer/VkImage with memory - already allocated and bound to it. +// The VMA_NOT_NULL macro is defined to be _Nonnull when compiling with Clang. +// see: https://clang.llvm.org/docs/AttributeReference.html#nonnull +#ifndef VMA_NOT_NULL + #ifdef __clang__ + #define VMA_NOT_NULL _Nonnull + #else + #define VMA_NOT_NULL + #endif +#endif +// If non-dispatchable handles are represented as pointers then we can give +// then nullability annotations +#ifndef VMA_NOT_NULL_NON_DISPATCHABLE + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL + #else + #define VMA_NOT_NULL_NON_DISPATCHABLE + #endif +#endif - VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; - bufferInfo.size = myBufferSize; - bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; +#ifndef VMA_NULLABLE_NON_DISPATCHABLE + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE + #else + #define VMA_NULLABLE_NON_DISPATCHABLE + #endif +#endif - VmaMemoryRequirements memReq = {}; - memReq.usage = VMA_MEMORY_USAGE_GPU_ONLY; +#ifndef VMA_STATS_STRING_ENABLED + #define VMA_STATS_STRING_ENABLED 1 +#endif - VkBuffer buffer; - vmaCreateBuffer(allocator, &bufferInfo, &memReq, &buffer, nullptr, nullptr); +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// +// +// INTERFACE +// +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// -When no longer needed, destroy your buffer or image using vmaDestroyBuffer() / vmaDestroyImage(). -This function would also free memory bound to it. +// Sections for managing code placement in file, only for development purposes e.g. for convenient folding inside an IDE. +#ifndef _VMA_ENUM_DECLARATIONS +/** +\addtogroup group_init +@{ +*/ - vmaDestroyBuffer(allocator, buffer); +/// Flags for created #VmaAllocator. +typedef enum VmaAllocatorCreateFlagBits +{ + /** \brief Allocator and all objects created from it will not be synchronized internally, so you must guarantee they are used from only one thread at a time or synchronized externally by you. -\section configuration Configuration + Using this flag may increase performance because internal mutexes are not used. + */ + VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001, + /** \brief Enables usage of VK_KHR_dedicated_allocation extension. -Set VMA_STATS_STRING_ENABLED macro in vk_mem_alloc.h to 0 or 1 to disable/enable -compilation of code for dumping internal allocator state to string in JSON -format. + The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`. + When it is `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1. -Please check "CONFIGURATION" section in vk_mem_alloc.cpp file to find macros and -other definitions that you can change to connect the library to your own -implementation of basic facilities like assert, min and max functions, mutex -etc. C++ STL is used by default, but changing these allows you to get rid of any -STL usage if you want, as many game developers tend to do. + Using this extension will automatically allocate dedicated blocks of memory for + some buffers and images instead of suballocating place for them out of bigger + memory blocks (as if you explicitly used #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT + flag) when it is recommended by the driver. It may improve performance on some + GPUs. -\section custom_memory_allocator Custom memory allocator + You may set this flag only if you found out that following device extensions are + supported, you enabled them while creating Vulkan device passed as + VmaAllocatorCreateInfo::device, and you want them to be used internally by this + library: -You can use custom memory allocator by filling optional member -VmaAllocatorCreateInfo::pAllocationCallbacks. These functions will be passed to -Vulkan, as well as used by the library itself to make any CPU-side allocations. + - VK_KHR_get_memory_requirements2 (device extension) + - VK_KHR_dedicated_allocation (device extension) -\section thread_safety Thread safety + When this flag is set, you can experience following warnings reported by Vulkan + validation layer. You can ignore them. -All calls to functions that take VmaAllocator as first parameter are safe to -call from multiple threads simultaneously, synchronized internally when needed. -*/ + > vkBindBufferMemory(): Binding memory to buffer 0x2d but vkGetBufferMemoryRequirements() has not been called on that buffer. + */ + VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT = 0x00000002, + /** + Enables usage of VK_KHR_bind_memory2 extension. -#include + The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`. + When it is `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1. -//////////////////////////////////////////////////////////////////////////////// -/** \defgroup general General -@{ -*/ + You may set this flag only if you found out that this device extension is supported, + you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, + and you want it to be used internally by this library. -VK_DEFINE_HANDLE(VmaAllocator) + The extension provides functions `vkBindBufferMemory2KHR` and `vkBindImageMemory2KHR`, + which allow to pass a chain of `pNext` structures while binding. + This flag is required if you use `pNext` parameter in vmaBindBufferMemory2() or vmaBindImageMemory2(). + */ + VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT = 0x00000004, + /** + Enables usage of VK_EXT_memory_budget extension. -/// Description of a Allocator to be created. -typedef struct VmaAllocatorCreateInfo -{ - /// Vulkan physical device. - /** It must be valid throughout whole lifetime of created Allocator. */ - VkPhysicalDevice physicalDevice; - /// Vulkan device. - /** It must be valid throughout whole lifetime of created Allocator. */ - VkDevice device; - /// Size of a single memory block to allocate for resources. - /** Set to 0 to use default, which is currently 256 MB. */ - VkDeviceSize preferredLargeHeapBlockSize; - /// Size of a single memory block to allocate for resources from a small heap <= 512 MB. - /** Set to 0 to use default, which is currently 64 MB. */ - VkDeviceSize preferredSmallHeapBlockSize; - /// Custom allocation callbacks. - /** Optional, can be null. When specified, will also be used for all CPU-side memory allocations. */ - const VkAllocationCallbacks* pAllocationCallbacks; -} VmaAllocatorCreateInfo; + You may set this flag only if you found out that this device extension is supported, + you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, + and you want it to be used internally by this library, along with another instance extension + VK_KHR_get_physical_device_properties2, which is required by it (or Vulkan 1.1, where this extension is promoted). -/// Creates Allocator object. -VkResult vmaCreateAllocator( - const VmaAllocatorCreateInfo* pCreateInfo, - VmaAllocator* pAllocator); + The extension provides query for current memory usage and budget, which will probably + be more accurate than an estimation used by the library otherwise. + */ + VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT = 0x00000008, + /** + Enables usage of VK_AMD_device_coherent_memory extension. -/// Destroys allocator object. -void vmaDestroyAllocator( - VmaAllocator allocator); + You may set this flag only if you: -/** -PhysicalDeviceProperties are fetched from physicalDevice by the allocator. -You can access it here, without fetching it again on your own. -*/ -void vmaGetPhysicalDeviceProperties( - VmaAllocator allocator, - const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties); + - found out that this device extension is supported and enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, + - checked that `VkPhysicalDeviceCoherentMemoryFeaturesAMD::deviceCoherentMemory` is true and set it while creating the Vulkan device, + - want it to be used internally by this library. -/** -PhysicalDeviceMemoryProperties are fetched from physicalDevice by the allocator. -You can access it here, without fetching it again on your own. -*/ -void vmaGetMemoryProperties( - VmaAllocator allocator, - const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties); + The extension and accompanying device feature provide access to memory types with + `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` and `VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` flags. + They are useful mostly for writing breadcrumb markers - a common method for debugging GPU crash/hang/TDR. -/** -\brief Given Memory Type Index, returns Property Flags of this memory type. + When the extension is not enabled, such memory types are still enumerated, but their usage is illegal. + To protect from this error, if you don't create the allocator with this flag, it will refuse to allocate any memory or create a custom pool in such memory type, + returning `VK_ERROR_FEATURE_NOT_PRESENT`. + */ + VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT = 0x00000010, + /** + Enables usage of "buffer device address" feature, which allows you to use function + `vkGetBufferDeviceAddress*` to get raw GPU pointer to a buffer and pass it for usage inside a shader. -This is just a convenience function. Same information can be obtained using -vmaGetMemoryProperties(). -*/ -void vmaGetMemoryTypeProperties( - VmaAllocator allocator, - uint32_t memoryTypeIndex, - VkMemoryPropertyFlags* pFlags); + You may set this flag only if you: -typedef struct VmaStatInfo -{ - uint32_t AllocationCount; - uint32_t SuballocationCount; - uint32_t UnusedRangeCount; - VkDeviceSize UsedBytes; - VkDeviceSize UnusedBytes; - VkDeviceSize SuballocationSizeMin, SuballocationSizeAvg, SuballocationSizeMax; - VkDeviceSize UnusedRangeSizeMin, UnusedRangeSizeAvg, UnusedRangeSizeMax; -} VmaStatInfo; + 1. (For Vulkan version < 1.2) Found as available and enabled device extension + VK_KHR_buffer_device_address. + This extension is promoted to core Vulkan 1.2. + 2. Found as available and enabled device feature `VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress`. -/// General statistics from current state of Allocator. -struct VmaStats -{ - VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]; - VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]; - VmaStatInfo total; -}; + When this flag is set, you can create buffers with `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT` using VMA. + The library automatically adds `VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT` to + allocated memory blocks wherever it might be needed. -/// Retrieves statistics from current state of the Allocator. -void vmaCalculateStats( - VmaAllocator allocator, - VmaStats* pStats); + For more information, see documentation chapter \ref enabling_buffer_device_address. + */ + VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT = 0x00000020, + /** + Enables usage of VK_EXT_memory_priority extension in the library. + + You may set this flag only if you found available and enabled this device extension, + along with `VkPhysicalDeviceMemoryPriorityFeaturesEXT::memoryPriority == VK_TRUE`, + while creating Vulkan device passed as VmaAllocatorCreateInfo::device. + + When this flag is used, VmaAllocationCreateInfo::priority and VmaPoolCreateInfo::priority + are used to set priorities of allocated Vulkan memory. Without it, these variables are ignored. + + A priority must be a floating-point value between 0 and 1, indicating the priority of the allocation relative to other memory allocations. + Larger values are higher priority. The granularity of the priorities is implementation-dependent. + It is automatically passed to every call to `vkAllocateMemory` done by the library using structure `VkMemoryPriorityAllocateInfoEXT`. + The value to be used for default priority is 0.5. + For more details, see the documentation of the VK_EXT_memory_priority extension. + */ + VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT = 0x00000040, + /** + Enables usage of VK_KHR_maintenance4 extension in the library. -#define VMA_STATS_STRING_ENABLED 0 + You may set this flag only if you found available and enabled this device extension, + while creating Vulkan device passed as VmaAllocatorCreateInfo::device. + */ + VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT = 0x00000080, + /** + Enables usage of VK_KHR_maintenance5 extension in the library. -#if VMA_STATS_STRING_ENABLED + You should set this flag if you found available and enabled this device extension, + while creating Vulkan device passed as VmaAllocatorCreateInfo::device. + */ + VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT = 0x00000100, -/// Builds and returns statistics as string in JSON format. -/** @param[out] ppStatsString Must be freed using vmaFreeStatsString() function. -*/ -void vmaBuildStatsString( - VmaAllocator allocator, - char** ppStatsString, - VkBool32 detailedMap); + /** + Enables usage of VK_KHR_external_memory_win32 extension in the library. -void vmaFreeStatsString( - VmaAllocator allocator, - char* pStatsString); + You should set this flag if you found available and enabled this device extension, + while creating Vulkan device passed as VmaAllocatorCreateInfo::device. + For more information, see \ref other_api_interop. + */ + VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT = 0x00000200, -#endif // #if VMA_STATS_STRING_ENABLED + VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaAllocatorCreateFlagBits; +/// See #VmaAllocatorCreateFlagBits. +typedef VkFlags VmaAllocatorCreateFlags; /** @} */ -//////////////////////////////////////////////////////////////////////////////// -/** \defgroup layer1 Layer 1 Choosing Memory Type +/** +\addtogroup group_alloc @{ */ +/// \brief Intended usage of the allocated memory. typedef enum VmaMemoryUsage { - /// No intended memory usage specified. + /** No intended memory usage specified. + Use other members of VmaAllocationCreateInfo to specify your requirements. + */ VMA_MEMORY_USAGE_UNKNOWN = 0, - /// Memory will be used on device only, no need to be mapped on host. + /** + \deprecated Obsolete, preserved for backward compatibility. + Prefers `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + */ VMA_MEMORY_USAGE_GPU_ONLY = 1, - /// Memory will be mapped on host. Could be used for transfer to device. + /** + \deprecated Obsolete, preserved for backward compatibility. + Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` and `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT`. + */ VMA_MEMORY_USAGE_CPU_ONLY = 2, - /// Memory will be used for frequent (dynamic) updates from host and reads on device. + /** + \deprecated Obsolete, preserved for backward compatibility. + Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`, prefers `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + */ VMA_MEMORY_USAGE_CPU_TO_GPU = 3, - /// Memory will be used for writing on device and readback on host. + /** + \deprecated Obsolete, preserved for backward compatibility. + Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`, prefers `VK_MEMORY_PROPERTY_HOST_CACHED_BIT`. + */ VMA_MEMORY_USAGE_GPU_TO_CPU = 4, + /** + \deprecated Obsolete, preserved for backward compatibility. + Prefers not `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + */ + VMA_MEMORY_USAGE_CPU_COPY = 5, + /** + Lazily allocated GPU memory having `VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT`. + Exists mostly on mobile platforms. Using it on desktop PC or other GPUs with no such memory type present will fail the allocation. + + Usage: Memory for transient attachment images (color attachments, depth attachments etc.), created with `VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT`. + + Allocations with this usage are always created as dedicated - it implies #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + */ + VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED = 6, + /** + Selects best memory type automatically. + This flag is recommended for most common use cases. + + When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), + you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT + in VmaAllocationCreateInfo::flags. + + It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. + vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() + and not with generic memory allocation functions. + */ + VMA_MEMORY_USAGE_AUTO = 7, + /** + Selects best memory type automatically with preference for GPU (device) memory. + + When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), + you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT + in VmaAllocationCreateInfo::flags. + + It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. + vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() + and not with generic memory allocation functions. + */ + VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE = 8, + /** + Selects best memory type automatically with preference for CPU (host) memory. + + When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), + you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT + in VmaAllocationCreateInfo::flags. + + It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. + vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() + and not with generic memory allocation functions. + */ + VMA_MEMORY_USAGE_AUTO_PREFER_HOST = 9, + VMA_MEMORY_USAGE_MAX_ENUM = 0x7FFFFFFF } VmaMemoryUsage; -typedef struct VmaMemoryRequirements +/// Flags to be passed as VmaAllocationCreateInfo::flags. +typedef enum VmaAllocationCreateFlagBits { - /** \brief Set to true if this allocation should have its own memory block. - + /** \brief Set this flag if the allocation should have its own memory block. + Use it for special, big resources, like fullscreen images used as attachments. - - This flag must also be used for host visible resources that you want to map - simultaneously because otherwise they might end up as regions of the same - VkDeviceMemory, while mapping same VkDeviceMemory multiple times is illegal. - */ - VkBool32 ownMemory; - /** \brief Intended usage of memory. - - Leave VMA_MEMORY_USAGE_UNKNOWN if you specify requiredFlags. You can also use both. + + If you use this flag while creating a buffer or an image, `VkMemoryDedicatedAllocateInfo` + structure is applied if possible. */ - VmaMemoryUsage usage; - /** \brief Flags that must be set in a Memory Type chosen for an allocation. - - Leave 0 if you specify requirement via usage. */ - VkMemoryPropertyFlags requiredFlags; - /** \brief Flags that preferably should be set in a Memory Type chosen for an allocation. - - Set to 0 if no additional flags are prefered and only requiredFlags should be used. - If not 0, it must be a superset or equal to requiredFlags. */ - VkMemoryPropertyFlags preferredFlags; - /** \brief Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such block. - + VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT = 0x00000001, + + /** \brief Set this flag to only try to allocate from existing `VkDeviceMemory` blocks and never create new such block. + If new allocation cannot be placed in any of the existing blocks, allocation - fails with VK_ERROR_OUT_OF_DEVICE_MEMORY error. - - It makes no sense to set ownMemory and neverAllocate at the same time. */ - VkBool32 neverAllocate; -} VmaMemoryRequirements; + fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY` error. -/** -This algorithm tries to find a memory type that: + You should not use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT and + #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT at the same time. It makes no sense. + */ + VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT = 0x00000002, + /** \brief Set this flag to use a memory that will be persistently mapped and retrieve pointer to it. -- Is allowed by memoryTypeBits. -- Contains all the flags from pMemoryRequirements->requiredFlags. -- Matches intended usage. -- Has as many flags from pMemoryRequirements->preferredFlags as possible. + Pointer to mapped memory will be returned through VmaAllocationInfo::pMappedData. -\return Returns VK_ERROR_FEATURE_NOT_PRESENT if not found. Receiving such result -from this function or any other allocating function probably means that your -device doesn't support any memory type with requested features for the specific -type of resource you want to use it for. Please check parameters of your -resource, like image layout (OPTIMAL versus LINEAR) or mip level count. -*/ -VkResult vmaFindMemoryTypeIndex( - VmaAllocator allocator, - uint32_t memoryTypeBits, - const VmaMemoryRequirements* pMemoryRequirements, - uint32_t* pMemoryTypeIndex); + It is valid to use this flag for allocation made from memory type that is not + `HOST_VISIBLE`. This flag is then ignored and memory is not mapped. This is + useful if you need an allocation that is efficient to use on GPU + (`DEVICE_LOCAL`) and still want to map it directly if possible on platforms that + support it (e.g. Intel GPU). + */ + VMA_ALLOCATION_CREATE_MAPPED_BIT = 0x00000004, + /** \deprecated Preserved for backward compatibility. Consider using vmaSetAllocationName() instead. -/** @} */ + Set this flag to treat VmaAllocationCreateInfo::pUserData as pointer to a + null-terminated string. Instead of copying pointer value, a local copy of the + string is made and stored in allocation's `pName`. The string is automatically + freed together with the allocation. It is also used in vmaBuildStatsString(). + */ + VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT = 0x00000020, + /** Allocation will be created from upper stack in a double stack pool. -//////////////////////////////////////////////////////////////////////////////// -/** \defgroup layer2 Layer 2 Allocating Memory -@{ -*/ + This flag is only allowed for custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT flag. + */ + VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = 0x00000040, + /** Create both buffer/image and allocation, but don't bind them together. + It is useful when you want to bind yourself to do some more advanced binding, e.g. using some extensions. + The flag is meaningful only with functions that bind by default: vmaCreateBuffer(), vmaCreateImage(). + Otherwise it is ignored. + + If you want to make sure the new buffer/image is not tied to the new memory allocation + through `VkMemoryDedicatedAllocateInfoKHR` structure in case the allocation ends up in its own memory block, + use also flag #VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT. + */ + VMA_ALLOCATION_CREATE_DONT_BIND_BIT = 0x00000080, + /** Create allocation only if additional device memory required for it, if any, won't exceed + memory budget. Otherwise return `VK_ERROR_OUT_OF_DEVICE_MEMORY`. + */ + VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT = 0x00000100, + /** \brief Set this flag if the allocated memory will have aliasing resources. -/** \brief General purpose memory allocation. + Usage of this flag prevents supplying `VkMemoryDedicatedAllocateInfoKHR` when #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT is specified. + Otherwise created dedicated memory will not be suitable for aliasing resources, resulting in Vulkan Validation Layer errors. + */ + VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT = 0x00000200, + /** + Requests possibility to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT). -@param[out] pMemory Allocated memory. -@param[out] pMemoryTypeIndex Optional. Index of memory type that has been chosen for this allocation. + - If you use #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` value, + you must use this flag to be able to map the allocation. Otherwise, mapping is incorrect. + - If you use other value of #VmaMemoryUsage, this flag is ignored and mapping is always possible in memory types that are `HOST_VISIBLE`. + This includes allocations created in \ref custom_memory_pools. -You should free the memory using vmaFreeMemory(). + Declares that mapped memory will only be written sequentially, e.g. using `memcpy()` or a loop writing number-by-number, + never read or accessed randomly, so a memory type can be selected that is uncached and write-combined. -All allocated memory is also automatically freed in vmaDestroyAllocator(). + \warning Violating this declaration may work correctly, but will likely be very slow. + Watch out for implicit reads introduced by doing e.g. `pMappedData[i] += x;` + Better prepare your data in a local variable and `memcpy()` it to the mapped pointer all at once. + */ + VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT = 0x00000400, + /** + Requests possibility to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT). -It is recommended to use vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(), -vmaCreateBuffer(), vmaCreateImage() instead whenever possible. -*/ -VkResult vmaAllocateMemory( - VmaAllocator allocator, - const VkMemoryRequirements* pVkMemoryRequirements, - const VmaMemoryRequirements* pVmaMemoryRequirements, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex); + - If you use #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` value, + you must use this flag to be able to map the allocation. Otherwise, mapping is incorrect. + - If you use other value of #VmaMemoryUsage, this flag is ignored and mapping is always possible in memory types that are `HOST_VISIBLE`. + This includes allocations created in \ref custom_memory_pools. -/** -@param[out] pMemoryTypeIndex Optional. Pass null if you don't need this information. + Declares that mapped memory can be read, written, and accessed in random order, + so a `HOST_CACHED` memory type is preferred. + */ + VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT = 0x00000800, + /** + Together with #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT, + it says that despite request for host access, a not-`HOST_VISIBLE` memory type can be selected + if it may improve performance. + + By using this flag, you declare that you will check if the allocation ended up in a `HOST_VISIBLE` memory type + (e.g. using vmaGetAllocationMemoryProperties()) and if not, you will create some "staging" buffer and + issue an explicit transfer to write/read your data. + To prepare for this possibility, don't forget to add appropriate flags like + `VK_BUFFER_USAGE_TRANSFER_DST_BIT`, `VK_BUFFER_USAGE_TRANSFER_SRC_BIT` to the parameters of created buffer or image. + */ + VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT = 0x00001000, + /** Allocation strategy that chooses smallest possible free range for the allocation + to minimize memory usage and fragmentation, possibly at the expense of allocation time. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = 0x00010000, + /** Allocation strategy that chooses first suitable free range for the allocation - + not necessarily in terms of the smallest offset but the one that is easiest and fastest to find + to minimize allocation time, possibly at the expense of allocation quality. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = 0x00020000, + /** Allocation strategy that chooses always the lowest offset in available space. + This is not the most efficient strategy but achieves highly packed data. + Used internally by defragmentation, not recommended in typical usage. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT = 0x00040000, + /** Alias to #VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT. + */ + VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT, + /** Alias to #VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT. + */ + VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT, + /** A bit mask to extract only `STRATEGY` bits from entire set of flags. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MASK = + VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT | + VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT | + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + + VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaAllocationCreateFlagBits; +/// See #VmaAllocationCreateFlagBits. +typedef VkFlags VmaAllocationCreateFlags; + +/// Flags to be passed as VmaPoolCreateInfo::flags. +typedef enum VmaPoolCreateFlagBits +{ + /** \brief Use this flag if you always allocate only buffers and linear images or only optimal images out of this pool and so Buffer-Image Granularity can be ignored. + + This is an optional optimization flag. + + If you always allocate using vmaCreateBuffer(), vmaCreateImage(), + vmaAllocateMemoryForBuffer(), then you don't need to use it because allocator + knows exact type of your allocations so it can handle Buffer-Image Granularity + in the optimal way. + + If you also allocate using vmaAllocateMemoryForImage() or vmaAllocateMemory(), + exact type of such allocations is not known, so allocator must be conservative + in handling Buffer-Image Granularity, which can lead to suboptimal allocation + (wasted memory). In that case, if you can make sure you always allocate only + buffers and linear images or only optimal images out of this pool, use this flag + to make allocator disregard Buffer-Image Granularity and so make allocations + faster and more optimal. + */ + VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT = 0x00000002, -You should free the memory using vmaFreeMemory(). + /** \brief Enables alternative, linear allocation algorithm in this pool. -All allocated memory is also automatically freed in vmaDestroyAllocator(). -*/ -VkResult vmaAllocateMemoryForBuffer( - VmaAllocator allocator, - VkBuffer buffer, - const VmaMemoryRequirements* pMemoryRequirements, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex); + Specify this flag to enable linear allocation algorithm, which always creates + new allocations after last one and doesn't reuse space from allocations freed in + between. It trades memory consumption for simplified algorithm and data + structure, which has better performance and uses less memory for metadata. -/// Function similar to vmaAllocateMemoryForBuffer(). -VkResult vmaAllocateMemoryForImage( - VmaAllocator allocator, - VkImage image, - const VmaMemoryRequirements* pMemoryRequirements, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex); + By using this flag, you can achieve behavior of free-at-once, stack, + ring buffer, and double stack. + For details, see documentation chapter \ref linear_algorithm. + */ + VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT = 0x00000004, -/// Frees memory previously allocated using vmaAllocateMemoryForBuffer() or vmaAllocateMemoryForImage(). -void vmaFreeMemory( - VmaAllocator allocator, - const VkMappedMemoryRange* pMemory); + /** Bit mask to extract only `ALGORITHM` bits from entire set of flags. + */ + VMA_POOL_CREATE_ALGORITHM_MASK = + VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT, -/** -Feel free to use vkMapMemory on these memory blocks on you own if you want, but -just for convenience and to make sure correct offset and size is always -specified, usage of vmaMapMemory() / vmaUnmapMemory() is recommended. -*/ -VkResult vmaMapMemory( - VmaAllocator allocator, - const VkMappedMemoryRange* pMemory, - void** ppData); + VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaPoolCreateFlagBits; +/// Flags to be passed as VmaPoolCreateInfo::flags. See #VmaPoolCreateFlagBits. +typedef VkFlags VmaPoolCreateFlags; -void vmaUnmapMemory( - VmaAllocator allocator, - const VkMappedMemoryRange* pMemory); +/// Flags to be passed as VmaDefragmentationInfo::flags. +typedef enum VmaDefragmentationFlagBits +{ + /* \brief Use simple but fast algorithm for defragmentation. + May not achieve best results but will require least time to compute and least allocations to copy. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT = 0x1, + /* \brief Default defragmentation algorithm, applied also when no `ALGORITHM` flag is specified. + Offers a balance between defragmentation quality and the amount of allocations and bytes that need to be moved. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT = 0x2, + /* \brief Perform full defragmentation of memory. + Can result in notably more time to compute and allocations to copy, but will achieve best memory packing. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT = 0x4, + /** \brief Use the most roboust algorithm at the cost of time to compute and number of copies to make. + Only available when bufferImageGranularity is greater than 1, since it aims to reduce + alignment issues between different types of resources. + Otherwise falls back to same behavior as #VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT = 0x8, + + /// A bit mask to extract only `ALGORITHM` bits from entire set of flags. + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_MASK = + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT | + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT | + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT | + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT, + + VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaDefragmentationFlagBits; +/// See #VmaDefragmentationFlagBits. +typedef VkFlags VmaDefragmentationFlags; + +/// Operation performed on single defragmentation move. See structure #VmaDefragmentationMove. +typedef enum VmaDefragmentationMoveOperation +{ + /// Buffer/image has been recreated at `dstTmpAllocation`, data has been copied, old buffer/image has been destroyed. `srcAllocation` should be changed to point to the new place. This is the default value set by vmaBeginDefragmentationPass(). + VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY = 0, + /// Set this value if you cannot move the allocation. New place reserved at `dstTmpAllocation` will be freed. `srcAllocation` will remain unchanged. + VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE = 1, + /// Set this value if you decide to abandon the allocation and you destroyed the buffer/image. New place reserved at `dstTmpAllocation` will be freed, along with `srcAllocation`, which will be destroyed. + VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY = 2, +} VmaDefragmentationMoveOperation; /** @} */ -//////////////////////////////////////////////////////////////////////////////// -/** \defgroup layer3 Layer 3 Creating Buffers and Images +/** +\addtogroup group_virtual @{ */ -/** -@param[out] pMemory Optional. Pass null if you don't need this information. -@param[out] pMemoryTypeIndex Optional. Pass null if you don't need this information. +/// Flags to be passed as VmaVirtualBlockCreateInfo::flags. +typedef enum VmaVirtualBlockCreateFlagBits +{ + /** \brief Enables alternative, linear allocation algorithm in this virtual block. -This function automatically: + Specify this flag to enable linear allocation algorithm, which always creates + new allocations after last one and doesn't reuse space from allocations freed in + between. It trades memory consumption for simplified algorithm and data + structure, which has better performance and uses less memory for metadata. --# Creates buffer/image. --# Allocates appropriate memory for it. --# Binds the buffer/image with the memory. + By using this flag, you can achieve behavior of free-at-once, stack, + ring buffer, and double stack. + For details, see documentation chapter \ref linear_algorithm. + */ + VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT = 0x00000001, -You do not (and should not) pass returned pMemory to vmaFreeMemory. Only calling -vmaDestroyBuffer() / vmaDestroyImage() is required for objects created using -vmaCreateBuffer() / vmaCreateImage(). + /** \brief Bit mask to extract only `ALGORITHM` bits from entire set of flags. + */ + VMA_VIRTUAL_BLOCK_CREATE_ALGORITHM_MASK = + VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT, -All allocated buffers and images are also automatically destroyed in -vmaDestroyAllocator(), along with their memory allocations. -*/ -VkResult vmaCreateBuffer( - VmaAllocator allocator, - const VkBufferCreateInfo* pCreateInfo, - const VmaMemoryRequirements* pMemoryRequirements, - VkBuffer* pBuffer, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex); + VMA_VIRTUAL_BLOCK_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaVirtualBlockCreateFlagBits; +/// Flags to be passed as VmaVirtualBlockCreateInfo::flags. See #VmaVirtualBlockCreateFlagBits. +typedef VkFlags VmaVirtualBlockCreateFlags; -void vmaDestroyBuffer( - VmaAllocator allocator, - VkBuffer buffer); +/// Flags to be passed as VmaVirtualAllocationCreateInfo::flags. +typedef enum VmaVirtualAllocationCreateFlagBits +{ + /** \brief Allocation will be created from upper stack in a double stack pool. -/// Function similar to vmaCreateBuffer(). -VkResult vmaCreateImage( - VmaAllocator allocator, - const VkImageCreateInfo* pCreateInfo, - const VmaMemoryRequirements* pMemoryRequirements, - VkImage* pImage, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex); + This flag is only allowed for virtual blocks created with #VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT flag. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT, + /** \brief Allocation strategy that tries to minimize memory usage. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT, + /** \brief Allocation strategy that tries to minimize allocation time. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT, + /** Allocation strategy that chooses always the lowest offset in available space. + This is not the most efficient strategy but achieves highly packed data. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + /** \brief A bit mask to extract only `STRATEGY` bits from entire set of flags. -void vmaDestroyImage( - VmaAllocator allocator, - VkImage image); + These strategy flags are binary compatible with equivalent flags in #VmaAllocationCreateFlagBits. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MASK = VMA_ALLOCATION_CREATE_STRATEGY_MASK, -/** @} */ + VMA_VIRTUAL_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaVirtualAllocationCreateFlagBits; +/// Flags to be passed as VmaVirtualAllocationCreateInfo::flags. See #VmaVirtualAllocationCreateFlagBits. +typedef VkFlags VmaVirtualAllocationCreateFlags; -#ifdef VMA_IMPLEMENTATION +/** @} */ -#include -#include +#endif // _VMA_ENUM_DECLARATIONS -/******************************************************************************* -CONFIGURATION +#ifndef _VMA_DATA_TYPES_DECLARATIONS -Change these definitions depending on your environment. -*/ +/** +\addtogroup group_init +@{ */ -#define VMA_USE_STL_CONTAINERS 0 +/** \struct VmaAllocator +\brief Represents main object of this library initialized. -/* Set this macro to 1 to make the library including and using STL containers: -std::pair, std::vector, std::list, std::unordered_map. +Fill structure #VmaAllocatorCreateInfo and call function vmaCreateAllocator() to create it. +Call function vmaDestroyAllocator() to destroy it. -Set it to 0 or undefined to make the library using its own implementation of -the containers. +It is recommended to create just one object of this type per `VkDevice` object, +right after Vulkan is initialized and keep it alive until before Vulkan device is destroyed. */ -#if VMA_USE_STL_CONTAINERS -#define VMA_USE_STL_VECTOR 1 -#define VMA_USE_STL_UNORDERED_MAP 1 -#define VMA_USE_STL_LIST 1 -#endif - -#if VMA_USE_STL_VECTOR -#include -#endif - -#if VMA_USE_STL_UNORDERED_MAP -#include -#endif +VK_DEFINE_HANDLE(VmaAllocator) -#if VMA_USE_STL_LIST -#include -#endif +/** @} */ -/* -Following headers are used in this CONFIGURATION section only, so feel free to -remove them if not needed. +/** +\addtogroup group_alloc +@{ */ -#include // for assert -#include // for min, max -#include // for std::mutex -#if !defined(_WIN32) - #include // for aligned_alloc() -#endif +/** \struct VmaPool +\brief Represents custom memory pool +Fill structure VmaPoolCreateInfo and call function vmaCreatePool() to create it. +Call function vmaDestroyPool() to destroy it. -#ifdef _DEBUG - // Normal assert to check for programmer's errors, especially in Debug configuration. - #define VMA_ASSERT(expr) assert(expr) - // Assert that will be called very often, like inside data structures e.g. operator[]. - // Making it non-empty can make program slow. - #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) -#else - #define VMA_ASSERT(expr) - #define VMA_HEAVY_ASSERT(expr) -#endif +For more information see [Custom memory pools](@ref choosing_memory_type_custom_memory_pools). +*/ +VK_DEFINE_HANDLE(VmaPool) -// Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0. -#define VMA_NULL nullptr +/** \struct VmaAllocation +\brief Represents single memory allocation. -#define VMA_ALIGN_OF(type) (__alignof(type)) +It may be either dedicated block of `VkDeviceMemory` or a specific region of a bigger block of this type +plus unique offset. -#if defined(_WIN32) - #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) - #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) -#else - #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) - #define VMA_SYSTEM_FREE(ptr) free(ptr) -#endif +There are multiple ways to create such object. +You need to fill structure VmaAllocationCreateInfo. +For more information see [Choosing memory type](@ref choosing_memory_type). -#define VMA_MIN(v1, v2) (std::min((v1), (v2))) -#define VMA_MAX(v1, v2) (std::max((v1), (v2))) -#define VMA_SWAP(v1, v2) std::swap((v1), (v2)) +Although the library provides convenience functions that create Vulkan buffer or image, +allocate memory for it and bind them together, +binding of the allocation to a buffer or an image is out of scope of the allocation itself. +Allocation object can exist without buffer/image bound, +binding can be done manually by the user, and destruction of it can be done +independently of destruction of the allocation. -#define VMA_DEBUG_LOG(format, ...) -/* -#define VMA_DEBUG_LOG(format, ...) do { \ - printf(format, __VA_ARGS__); \ - printf("\n"); \ -} while(false) +The object also remembers its size and some other information. +To retrieve this information, use function vmaGetAllocationInfo() and inspect +returned structure VmaAllocationInfo. */ +VK_DEFINE_HANDLE(VmaAllocation) -#if VMA_STATS_STRING_ENABLED +/** \struct VmaDefragmentationContext +\brief An opaque object that represents started defragmentation process. -static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num) -{ - _ultoa_s(num, outStr, strLen, 10); -} -static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num) -{ - _ui64toa_s(num, outStr, strLen, 10); -} +Fill structure #VmaDefragmentationInfo and call function vmaBeginDefragmentation() to create it. +Call function vmaEndDefragmentation() to destroy it. +*/ +VK_DEFINE_HANDLE(VmaDefragmentationContext) -#endif // #if VMA_STATS_STRING_ENABLED +/** @} */ -class VmaMutex -{ -public: - VmaMutex() { } - ~VmaMutex() { } - void Lock() { m_Mutex.lock(); } - void Unlock() { m_Mutex.unlock(); } -private: - std::mutex m_Mutex; -}; +/** +\addtogroup group_virtual +@{ +*/ -/* -Main parameter for function assessing how good is a free suballocation for a new -allocation request. +/** \struct VmaVirtualAllocation +\brief Represents single memory allocation done inside VmaVirtualBlock. -- Set to true to use Best-Fit algorithm - prefer smaller blocks, as close to the - size of requested allocations as possible. -- Set to false to use Worst-Fit algorithm - prefer larger blocks, as large as - possible. +Use it as a unique identifier to virtual allocation within the single block. -Experiments in special testing environment showed that Best-Fit algorithm is -better. +Use value `VK_NULL_HANDLE` to represent a null/invalid allocation. */ -static const bool VMA_BEST_FIT = true; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VmaVirtualAllocation) -/* -Every object will have its own allocation. -Enable for debugging purposes only. -*/ -static const bool VMA_DEBUG_ALWAYS_OWN_MEMORY = false; +/** @} */ -/* -Minimum alignment of all suballocations, in bytes. -Set to more than 1 for debugging purposes only. Must be power of two. +/** +\addtogroup group_virtual +@{ */ -static const VkDeviceSize VMA_DEBUG_ALIGNMENT = 1; -/* -Minimum margin between suballocations, in bytes. -Set nonzero for debugging purposes only. -*/ -static const VkDeviceSize VMA_DEBUG_MARGIN = 0; +/** \struct VmaVirtualBlock +\brief Handle to a virtual block object that allows to use core allocation algorithm without allocating any real GPU memory. -/* -Set this to 1 for debugging purposes only, to enable single mutex protecting all -entry calls to the library. Can be useful for debugging multithreading issues. +Fill in #VmaVirtualBlockCreateInfo structure and use vmaCreateVirtualBlock() to create it. Use vmaDestroyVirtualBlock() to destroy it. +For more information, see documentation chapter \ref virtual_allocator. + +This object is not thread-safe - should not be used from multiple threads simultaneously, must be synchronized externally. */ -#define VMA_DEBUG_GLOBAL_MUTEX 0 +VK_DEFINE_HANDLE(VmaVirtualBlock) + +/** @} */ + +/** +\addtogroup group_init +@{ +*/ + +/// Callback function called after successful vkAllocateMemory. +typedef void (VKAPI_PTR* PFN_vmaAllocateDeviceMemoryFunction)( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryType, + VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, + VkDeviceSize size, + void* VMA_NULLABLE pUserData); + +/// Callback function called before vkFreeMemory. +typedef void (VKAPI_PTR* PFN_vmaFreeDeviceMemoryFunction)( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryType, + VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, + VkDeviceSize size, + void* VMA_NULLABLE pUserData); + +/** \brief Set of callbacks that the library will call for `vkAllocateMemory` and `vkFreeMemory`. + +Provided for informative purpose, e.g. to gather statistics about number of +allocations or total amount of memory allocated in Vulkan. + +Used in VmaAllocatorCreateInfo::pDeviceMemoryCallbacks. +*/ +typedef struct VmaDeviceMemoryCallbacks +{ + /// Optional, can be null. + PFN_vmaAllocateDeviceMemoryFunction VMA_NULLABLE pfnAllocate; + /// Optional, can be null. + PFN_vmaFreeDeviceMemoryFunction VMA_NULLABLE pfnFree; + /// Optional, can be null. + void* VMA_NULLABLE pUserData; +} VmaDeviceMemoryCallbacks; + +/** \brief Pointers to some Vulkan functions - a subset used by the library. + +Used in VmaAllocatorCreateInfo::pVulkanFunctions. +*/ +typedef struct VmaVulkanFunctions +{ + /// Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS. + PFN_vkGetInstanceProcAddr VMA_NULLABLE vkGetInstanceProcAddr; + /// Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS. + PFN_vkGetDeviceProcAddr VMA_NULLABLE vkGetDeviceProcAddr; + PFN_vkGetPhysicalDeviceProperties VMA_NULLABLE vkGetPhysicalDeviceProperties; + PFN_vkGetPhysicalDeviceMemoryProperties VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties; + PFN_vkAllocateMemory VMA_NULLABLE vkAllocateMemory; + PFN_vkFreeMemory VMA_NULLABLE vkFreeMemory; + PFN_vkMapMemory VMA_NULLABLE vkMapMemory; + PFN_vkUnmapMemory VMA_NULLABLE vkUnmapMemory; + PFN_vkFlushMappedMemoryRanges VMA_NULLABLE vkFlushMappedMemoryRanges; + PFN_vkInvalidateMappedMemoryRanges VMA_NULLABLE vkInvalidateMappedMemoryRanges; + PFN_vkBindBufferMemory VMA_NULLABLE vkBindBufferMemory; + PFN_vkBindImageMemory VMA_NULLABLE vkBindImageMemory; + PFN_vkGetBufferMemoryRequirements VMA_NULLABLE vkGetBufferMemoryRequirements; + PFN_vkGetImageMemoryRequirements VMA_NULLABLE vkGetImageMemoryRequirements; + PFN_vkCreateBuffer VMA_NULLABLE vkCreateBuffer; + PFN_vkDestroyBuffer VMA_NULLABLE vkDestroyBuffer; + PFN_vkCreateImage VMA_NULLABLE vkCreateImage; + PFN_vkDestroyImage VMA_NULLABLE vkDestroyImage; + PFN_vkCmdCopyBuffer VMA_NULLABLE vkCmdCopyBuffer; +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + /// Fetch "vkGetBufferMemoryRequirements2" on Vulkan >= 1.1, fetch "vkGetBufferMemoryRequirements2KHR" when using VK_KHR_dedicated_allocation extension. + PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR; + /// Fetch "vkGetImageMemoryRequirements2" on Vulkan >= 1.1, fetch "vkGetImageMemoryRequirements2KHR" when using VK_KHR_dedicated_allocation extension. + PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR; +#endif +#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 + /// Fetch "vkBindBufferMemory2" on Vulkan >= 1.1, fetch "vkBindBufferMemory2KHR" when using VK_KHR_bind_memory2 extension. + PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR; + /// Fetch "vkBindImageMemory2" on Vulkan >= 1.1, fetch "vkBindImageMemory2KHR" when using VK_KHR_bind_memory2 extension. + PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR; +#endif +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + /// Fetch from "vkGetPhysicalDeviceMemoryProperties2" on Vulkan >= 1.1, but you can also fetch it from "vkGetPhysicalDeviceMemoryProperties2KHR" if you enabled extension VK_KHR_get_physical_device_properties2. + PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR; +#endif +#if VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + /// Fetch from "vkGetDeviceBufferMemoryRequirements" on Vulkan >= 1.3, but you can also fetch it from "vkGetDeviceBufferMemoryRequirementsKHR" if you enabled extension VK_KHR_maintenance4. + PFN_vkGetDeviceBufferMemoryRequirementsKHR VMA_NULLABLE vkGetDeviceBufferMemoryRequirements; + /// Fetch from "vkGetDeviceImageMemoryRequirements" on Vulkan >= 1.3, but you can also fetch it from "vkGetDeviceImageMemoryRequirementsKHR" if you enabled extension VK_KHR_maintenance4. + PFN_vkGetDeviceImageMemoryRequirementsKHR VMA_NULLABLE vkGetDeviceImageMemoryRequirements; +#endif +#if VMA_EXTERNAL_MEMORY_WIN32 + PFN_vkGetMemoryWin32HandleKHR VMA_NULLABLE vkGetMemoryWin32HandleKHR; +#else + void* VMA_NULLABLE vkGetMemoryWin32HandleKHR; +#endif +} VmaVulkanFunctions; + +/// Description of a Allocator to be created. +typedef struct VmaAllocatorCreateInfo +{ + /// Flags for created allocator. Use #VmaAllocatorCreateFlagBits enum. + VmaAllocatorCreateFlags flags; + /// Vulkan physical device. + /** It must be valid throughout whole lifetime of created allocator. */ + VkPhysicalDevice VMA_NOT_NULL physicalDevice; + /// Vulkan device. + /** It must be valid throughout whole lifetime of created allocator. */ + VkDevice VMA_NOT_NULL device; + /// Preferred size of a single `VkDeviceMemory` block to be allocated from large heaps > 1 GiB. Optional. + /** Set to 0 to use default, which is currently 256 MiB. */ + VkDeviceSize preferredLargeHeapBlockSize; + /// Custom CPU memory allocation callbacks. Optional. + /** Optional, can be null. When specified, will also be used for all CPU-side memory allocations. */ + const VkAllocationCallbacks* VMA_NULLABLE pAllocationCallbacks; + /// Informative callbacks for `vkAllocateMemory`, `vkFreeMemory`. Optional. + /** Optional, can be null. */ + const VmaDeviceMemoryCallbacks* VMA_NULLABLE pDeviceMemoryCallbacks; + /** \brief Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out of particular Vulkan memory heap. + + If not NULL, it must be a pointer to an array of + `VkPhysicalDeviceMemoryProperties::memoryHeapCount` elements, defining limit on + maximum number of bytes that can be allocated out of particular Vulkan memory + heap. + + Any of the elements may be equal to `VK_WHOLE_SIZE`, which means no limit on that + heap. This is also the default in case of `pHeapSizeLimit` = NULL. + + If there is a limit defined for a heap: + + - If user tries to allocate more memory from that heap using this allocator, + the allocation fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. + - If the limit is smaller than heap size reported in `VkMemoryHeap::size`, the + value of this limit will be reported instead when using vmaGetMemoryProperties(). + + Warning! Using this feature may not be equivalent to installing a GPU with + smaller amount of memory, because graphics driver doesn't necessary fail new + allocations with `VK_ERROR_OUT_OF_DEVICE_MEMORY` result when memory capacity is + exceeded. It may return success and just silently migrate some device memory + blocks to system RAM. This driver behavior can also be controlled using + VK_AMD_memory_overallocation_behavior extension. + */ + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pHeapSizeLimit; + + /** \brief Pointers to Vulkan functions. Can be null. + + For details see [Pointers to Vulkan functions](@ref config_Vulkan_functions). + */ + const VmaVulkanFunctions* VMA_NULLABLE pVulkanFunctions; + /** \brief Handle to Vulkan instance object. + + Starting from version 3.0.0 this member is no longer optional, it must be set! + */ + VkInstance VMA_NOT_NULL instance; + /** \brief Optional. Vulkan version that the application uses. + + It must be a value in the format as created by macro `VK_MAKE_VERSION` or a constant like: `VK_API_VERSION_1_1`, `VK_API_VERSION_1_0`. + The patch version number specified is ignored. Only the major and minor versions are considered. + Only versions 1.0...1.4 are supported by the current implementation. + Leaving it initialized to zero is equivalent to `VK_API_VERSION_1_0`. + It must match the Vulkan version used by the application and supported on the selected physical device, + so it must be no higher than `VkApplicationInfo::apiVersion` passed to `vkCreateInstance` + and no higher than `VkPhysicalDeviceProperties::apiVersion` found on the physical device used. + */ + uint32_t vulkanApiVersion; +#if VMA_EXTERNAL_MEMORY + /** \brief Either null or a pointer to an array of external memory handle types for each Vulkan memory type. + + If not NULL, it must be a pointer to an array of `VkPhysicalDeviceMemoryProperties::memoryTypeCount` + elements, defining external memory handle types of particular Vulkan memory type, + to be passed using `VkExportMemoryAllocateInfoKHR`. + + Any of the elements may be equal to 0, which means not to use `VkExportMemoryAllocateInfoKHR` on this memory type. + This is also the default in case of `pTypeExternalMemoryHandleTypes` = NULL. + */ + const VkExternalMemoryHandleTypeFlagsKHR* VMA_NULLABLE VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryTypeCount") pTypeExternalMemoryHandleTypes; +#endif // #if VMA_EXTERNAL_MEMORY +} VmaAllocatorCreateInfo; + +/// Information about existing #VmaAllocator object. +typedef struct VmaAllocatorInfo +{ + /** \brief Handle to Vulkan instance object. + + This is the same value as has been passed through VmaAllocatorCreateInfo::instance. + */ + VkInstance VMA_NOT_NULL instance; + /** \brief Handle to Vulkan physical device object. + + This is the same value as has been passed through VmaAllocatorCreateInfo::physicalDevice. + */ + VkPhysicalDevice VMA_NOT_NULL physicalDevice; + /** \brief Handle to Vulkan device object. + + This is the same value as has been passed through VmaAllocatorCreateInfo::device. + */ + VkDevice VMA_NOT_NULL device; +} VmaAllocatorInfo; + +/** @} */ + +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Calculated statistics of memory usage e.g. in a specific memory type, heap, custom pool, or total. + +These are fast to calculate. +See functions: vmaGetHeapBudgets(), vmaGetPoolStatistics(). +*/ +typedef struct VmaStatistics +{ + /** \brief Number of `VkDeviceMemory` objects - Vulkan memory blocks allocated. + */ + uint32_t blockCount; + /** \brief Number of #VmaAllocation objects allocated. + + Dedicated allocations have their own blocks, so each one adds 1 to `allocationCount` as well as `blockCount`. + */ + uint32_t allocationCount; + /** \brief Number of bytes allocated in `VkDeviceMemory` blocks. + + \note To avoid confusion, please be aware that what Vulkan calls an "allocation" - a whole `VkDeviceMemory` object + (e.g. as in `VkPhysicalDeviceLimits::maxMemoryAllocationCount`) is called a "block" in VMA, while VMA calls + "allocation" a #VmaAllocation object that represents a memory region sub-allocated from such block, usually for a single buffer or image. + */ + VkDeviceSize blockBytes; + /** \brief Total number of bytes occupied by all #VmaAllocation objects. + + Always less or equal than `blockBytes`. + Difference `(blockBytes - allocationBytes)` is the amount of memory allocated from Vulkan + but unused by any #VmaAllocation. + */ + VkDeviceSize allocationBytes; +} VmaStatistics; + +/** \brief More detailed statistics than #VmaStatistics. + +These are slower to calculate. Use for debugging purposes. +See functions: vmaCalculateStatistics(), vmaCalculatePoolStatistics(). + +Previous version of the statistics API provided averages, but they have been removed +because they can be easily calculated as: + +\code +VkDeviceSize allocationSizeAvg = detailedStats.statistics.allocationBytes / detailedStats.statistics.allocationCount; +VkDeviceSize unusedBytes = detailedStats.statistics.blockBytes - detailedStats.statistics.allocationBytes; +VkDeviceSize unusedRangeSizeAvg = unusedBytes / detailedStats.unusedRangeCount; +\endcode +*/ +typedef struct VmaDetailedStatistics +{ + /// Basic statistics. + VmaStatistics statistics; + /// Number of free ranges of memory between allocations. + uint32_t unusedRangeCount; + /// Smallest allocation size. `VK_WHOLE_SIZE` if there are 0 allocations. + VkDeviceSize allocationSizeMin; + /// Largest allocation size. 0 if there are 0 allocations. + VkDeviceSize allocationSizeMax; + /// Smallest empty range size. `VK_WHOLE_SIZE` if there are 0 empty ranges. + VkDeviceSize unusedRangeSizeMin; + /// Largest empty range size. 0 if there are 0 empty ranges. + VkDeviceSize unusedRangeSizeMax; +} VmaDetailedStatistics; + +/** \brief General statistics from current state of the Allocator - +total memory usage across all memory heaps and types. + +These are slower to calculate. Use for debugging purposes. +See function vmaCalculateStatistics(). +*/ +typedef struct VmaTotalStatistics +{ + VmaDetailedStatistics memoryType[VK_MAX_MEMORY_TYPES]; + VmaDetailedStatistics memoryHeap[VK_MAX_MEMORY_HEAPS]; + VmaDetailedStatistics total; +} VmaTotalStatistics; + +/** \brief Statistics of current memory usage and available budget for a specific memory heap. + +These are fast to calculate. +See function vmaGetHeapBudgets(). +*/ +typedef struct VmaBudget +{ + /** \brief Statistics fetched from the library. + */ + VmaStatistics statistics; + /** \brief Estimated current memory usage of the program, in bytes. + + Fetched from system using VK_EXT_memory_budget extension if enabled. + + It might be different than `statistics.blockBytes` (usually higher) due to additional implicit objects + also occupying the memory, like swapchain, pipelines, descriptor heaps, command buffers, or + `VkDeviceMemory` blocks allocated outside of this library, if any. + */ + VkDeviceSize usage; + /** \brief Estimated amount of memory available to the program, in bytes. + + Fetched from system using VK_EXT_memory_budget extension if enabled. + + It might be different (most probably smaller) than `VkMemoryHeap::size[heapIndex]` due to factors + external to the program, decided by the operating system. + Difference `budget - usage` is the amount of additional memory that can probably + be allocated without problems. Exceeding the budget may result in various problems. + */ + VkDeviceSize budget; +} VmaBudget; + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** \brief Parameters of new #VmaAllocation. + +To be used with functions like vmaCreateBuffer(), vmaCreateImage(), and many others. +*/ +typedef struct VmaAllocationCreateInfo +{ + /// Use #VmaAllocationCreateFlagBits enum. + VmaAllocationCreateFlags flags; + /** \brief Intended usage of memory. + + You can leave #VMA_MEMORY_USAGE_UNKNOWN if you specify memory requirements in other way. \n + If `pool` is not null, this member is ignored. + */ + VmaMemoryUsage usage; + /** \brief Flags that must be set in a Memory Type chosen for an allocation. + + Leave 0 if you specify memory requirements in other way. \n + If `pool` is not null, this member is ignored.*/ + VkMemoryPropertyFlags requiredFlags; + /** \brief Flags that preferably should be set in a memory type chosen for an allocation. + + Set to 0 if no additional flags are preferred. \n + If `pool` is not null, this member is ignored. */ + VkMemoryPropertyFlags preferredFlags; + /** \brief Bitmask containing one bit set for every memory type acceptable for this allocation. + + Value 0 is equivalent to `UINT32_MAX` - it means any memory type is accepted if + it meets other requirements specified by this structure, with no further + restrictions on memory type index. \n + If `pool` is not null, this member is ignored. + */ + uint32_t memoryTypeBits; + /** \brief Pool that this allocation should be created in. + + Leave `VK_NULL_HANDLE` to allocate from default pool. If not null, members: + `usage`, `requiredFlags`, `preferredFlags`, `memoryTypeBits` are ignored. + */ + VmaPool VMA_NULLABLE pool; + /** \brief Custom general-purpose pointer that will be stored in #VmaAllocation, can be read as VmaAllocationInfo::pUserData and changed using vmaSetAllocationUserData(). + + If #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT is used, it must be either + null or pointer to a null-terminated string. The string will be then copied to + internal buffer, so it doesn't need to be valid after allocation call. + */ + void* VMA_NULLABLE pUserData; + /** \brief A floating-point value between 0 and 1, indicating the priority of the allocation relative to other memory allocations. + + It is used only when #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT flag was used during creation of the #VmaAllocator object + and this allocation ends up as dedicated or is explicitly forced as dedicated using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + Otherwise, it has the priority of a memory block where it is placed and this variable is ignored. + */ + float priority; + /** \brief Additional minimum alignment to be used for this allocation. Can be 0. + + Leave 0 (default) not to impose any additional alignment. If not 0, it must be a power of two. + + When creating a buffer or an image, specifying a custom alignment is not needed in most cases, + because Vulkan implementation inspects the `CreateInfo` structure (including intended usage flags) + and returns required alignment through functions like `vkGetBufferMemoryRequirements2`, which VMA automatically + uses and respects. + Extra alignment may be needed in some cases, like when using a buffer for acceleration structure scratch + (`VkPhysicalDeviceAccelerationStructurePropertiesKHR::minAccelerationStructureScratchOffsetAlignment`, see also issue #523) + or when doing interop with OpenGL. + */ + VkDeviceSize minAlignment; +} VmaAllocationCreateInfo; + +/// Describes parameter of created #VmaPool. +typedef struct VmaPoolCreateInfo +{ + /** \brief Vulkan memory type index to allocate this pool from. + */ + uint32_t memoryTypeIndex; + /** \brief Use combination of #VmaPoolCreateFlagBits. + */ + VmaPoolCreateFlags flags; + /** \brief Size of a single `VkDeviceMemory` block to be allocated as part of this pool, in bytes. Optional. + + Specify nonzero to set explicit, constant size of memory blocks used by this + pool. + + Leave 0 to use default and let the library manage block sizes automatically. + Sizes of particular blocks may vary. + In this case, the pool will also support dedicated allocations. + */ + VkDeviceSize blockSize; + /** \brief Minimum number of blocks to be always allocated in this pool, even if they stay empty. + + Set to 0 to have no preallocated blocks and allow the pool be completely empty. + */ + size_t minBlockCount; + /** \brief Maximum number of blocks that can be allocated in this pool. Optional. + + Set to 0 to use default, which is `SIZE_MAX`, which means no limit. + + Set to same value as VmaPoolCreateInfo::minBlockCount to have fixed amount of memory allocated + throughout whole lifetime of this pool. + */ + size_t maxBlockCount; + /** \brief A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relative to other memory allocations. + + It is used only when #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT flag was used during creation of the #VmaAllocator object. + Otherwise, this variable is ignored. + */ + float priority; + /** \brief Additional minimum alignment to be used for all allocations created from this pool. Can be 0. + + Leave 0 (default) not to impose any additional alignment. If not 0, it must be a power of two. + + When creating a buffer or an image, specifying a custom alignment is not needed in most cases, + because Vulkan implementation inspects the `CreateInfo` structure (including intended usage flags) + and returns required alignment through functions like `vkGetBufferMemoryRequirements2`, which VMA automatically + uses and respects. + Extra alignment may be needed in some cases, like when using a buffer for acceleration structure scratch + (`VkPhysicalDeviceAccelerationStructurePropertiesKHR::minAccelerationStructureScratchOffsetAlignment`, see also issue #523) + or when doing interop with OpenGL. + */ + VkDeviceSize minAllocationAlignment; + /** \brief Additional `pNext` chain to be attached to `VkMemoryAllocateInfo` used for every allocation made by this pool. Optional. + + Optional, can be null. If not null, it must point to a `pNext` chain of structures that can be attached to `VkMemoryAllocateInfo`. + It can be useful for special needs such as adding `VkExportMemoryAllocateInfoKHR`. + Structures pointed by this member must remain alive and unchanged for the whole lifetime of the custom pool. + + Please note that some structures, e.g. `VkMemoryPriorityAllocateInfoEXT`, `VkMemoryDedicatedAllocateInfoKHR`, + can be attached automatically by this library when using other, more convenient of its features. + */ + void* VMA_NULLABLE VMA_EXTENDS_VK_STRUCT(VkMemoryAllocateInfo) pMemoryAllocateNext; +} VmaPoolCreateInfo; + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** +Parameters of #VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo(). + +There is also an extended version of this structure that carries additional parameters: #VmaAllocationInfo2. +*/ +typedef struct VmaAllocationInfo +{ + /** \brief Memory type index that this allocation was allocated from. + + It never changes. + */ + uint32_t memoryType; + /** \brief Handle to Vulkan memory object. + + Same memory object can be shared by multiple allocations. + + It can change after the allocation is moved during \ref defragmentation. + */ + VkDeviceMemory VMA_NULLABLE_NON_DISPATCHABLE deviceMemory; + /** \brief Offset in `VkDeviceMemory` object to the beginning of this allocation, in bytes. `(deviceMemory, offset)` pair is unique to this allocation. + + You usually don't need to use this offset. If you create a buffer or an image together with the allocation using e.g. function + vmaCreateBuffer(), vmaCreateImage(), functions that operate on these resources refer to the beginning of the buffer or image, + not entire device memory block. Functions like vmaMapMemory(), vmaBindBufferMemory() also refer to the beginning of the allocation + and apply this offset automatically. + + It can change after the allocation is moved during \ref defragmentation. + */ + VkDeviceSize offset; + /** \brief Size of this allocation, in bytes. + + It never changes. + + \note Allocation size returned in this variable may be greater than the size + requested for the resource e.g. as `VkBufferCreateInfo::size`. Whole size of the + allocation is accessible for operations on memory e.g. using a pointer after + mapping with vmaMapMemory(), but operations on the resource e.g. using + `vkCmdCopyBuffer` must be limited to the size of the resource. + */ + VkDeviceSize size; + /** \brief Pointer to the beginning of this allocation as mapped data. + + If the allocation hasn't been mapped using vmaMapMemory() and hasn't been + created with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag, this value is null. + + It can change after call to vmaMapMemory(), vmaUnmapMemory(). + It can also change after the allocation is moved during \ref defragmentation. + */ + void* VMA_NULLABLE pMappedData; + /** \brief Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vmaSetAllocationUserData(). + + It can change after call to vmaSetAllocationUserData() for this allocation. + */ + void* VMA_NULLABLE pUserData; + /** \brief Custom allocation name that was set with vmaSetAllocationName(). + + It can change after call to vmaSetAllocationName() for this allocation. + + Another way to set custom name is to pass it in VmaAllocationCreateInfo::pUserData with + additional flag #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT set [DEPRECATED]. + */ + const char* VMA_NULLABLE pName; +} VmaAllocationInfo; + +/// Extended parameters of a #VmaAllocation object that can be retrieved using function vmaGetAllocationInfo2(). +typedef struct VmaAllocationInfo2 +{ + /** \brief Basic parameters of the allocation. + + If you need only these, you can use function vmaGetAllocationInfo() and structure #VmaAllocationInfo instead. + */ + VmaAllocationInfo allocationInfo; + /** \brief Size of the `VkDeviceMemory` block that the allocation belongs to. + + In case of an allocation with dedicated memory, it will be equal to `allocationInfo.size`. + */ + VkDeviceSize blockSize; + /** \brief `VK_TRUE` if the allocation has dedicated memory, `VK_FALSE` if it was placed as part of a larger memory block. + + When `VK_TRUE`, it also means `VkMemoryDedicatedAllocateInfo` was used when creating the allocation + (if VK_KHR_dedicated_allocation extension or Vulkan version >= 1.1 is enabled). + */ + VkBool32 dedicatedMemory; +} VmaAllocationInfo2; + +/** Callback function called during vmaBeginDefragmentation() to check custom criterion about ending current defragmentation pass. + +Should return true if the defragmentation needs to stop current pass. +*/ +typedef VkBool32 (VKAPI_PTR* PFN_vmaCheckDefragmentationBreakFunction)(void* VMA_NULLABLE pUserData); + +/** \brief Parameters for defragmentation. + +To be used with function vmaBeginDefragmentation(). +*/ +typedef struct VmaDefragmentationInfo +{ + /// \brief Use combination of #VmaDefragmentationFlagBits. + VmaDefragmentationFlags flags; + /** \brief Custom pool to be defragmented. + + If null then default pools will undergo defragmentation process. + */ + VmaPool VMA_NULLABLE pool; + /** \brief Maximum numbers of bytes that can be copied during single pass, while moving allocations to different places. + + `0` means no limit. + */ + VkDeviceSize maxBytesPerPass; + /** \brief Maximum number of allocations that can be moved during single pass to a different place. + + `0` means no limit. + */ + uint32_t maxAllocationsPerPass; + /** \brief Optional custom callback for stopping vmaBeginDefragmentation(). + + Have to return true for breaking current defragmentation pass. + */ + PFN_vmaCheckDefragmentationBreakFunction VMA_NULLABLE pfnBreakCallback; + /// \brief Optional data to pass to custom callback for stopping pass of defragmentation. + void* VMA_NULLABLE pBreakCallbackUserData; +} VmaDefragmentationInfo; + +/// Single move of an allocation to be done for defragmentation. +typedef struct VmaDefragmentationMove +{ + /// Operation to be performed on the allocation by vmaEndDefragmentationPass(). Default value is #VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY. You can modify it. + VmaDefragmentationMoveOperation operation; + /// Allocation that should be moved. + VmaAllocation VMA_NOT_NULL srcAllocation; + /** \brief Temporary allocation pointing to destination memory that will replace `srcAllocation`. + + \warning Do not store this allocation in your data structures! It exists only temporarily, for the duration of the defragmentation pass, + to be used for binding new buffer/image to the destination memory using e.g. vmaBindBufferMemory(). + vmaEndDefragmentationPass() will destroy it and make `srcAllocation` point to this memory. + */ + VmaAllocation VMA_NOT_NULL dstTmpAllocation; +} VmaDefragmentationMove; + +/** \brief Parameters for incremental defragmentation steps. + +To be used with function vmaBeginDefragmentationPass(). +*/ +typedef struct VmaDefragmentationPassMoveInfo +{ + /// Number of elements in the `pMoves` array. + uint32_t moveCount; + /** \brief Array of moves to be performed by the user in the current defragmentation pass. + + Pointer to an array of `moveCount` elements, owned by VMA, created in vmaBeginDefragmentationPass(), destroyed in vmaEndDefragmentationPass(). + + For each element, you should: + + 1. Create a new buffer/image in the place pointed by VmaDefragmentationMove::dstMemory + VmaDefragmentationMove::dstOffset. + 2. Copy data from the VmaDefragmentationMove::srcAllocation e.g. using `vkCmdCopyBuffer`, `vkCmdCopyImage`. + 3. Make sure these commands finished executing on the GPU. + 4. Destroy the old buffer/image. + + Only then you can finish defragmentation pass by calling vmaEndDefragmentationPass(). + After this call, the allocation will point to the new place in memory. + + Alternatively, if you cannot move specific allocation, you can set VmaDefragmentationMove::operation to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. + + Alternatively, if you decide you want to completely remove the allocation: + + 1. Destroy its buffer/image. + 2. Set VmaDefragmentationMove::operation to #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY. + + Then, after vmaEndDefragmentationPass() the allocation will be freed. + */ + VmaDefragmentationMove* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(moveCount) pMoves; +} VmaDefragmentationPassMoveInfo; + +/// Statistics returned for defragmentation process in function vmaEndDefragmentation(). +typedef struct VmaDefragmentationStats +{ + /// Total number of bytes that have been copied while moving allocations to different places. + VkDeviceSize bytesMoved; + /// Total number of bytes that have been released to the system by freeing empty `VkDeviceMemory` objects. + VkDeviceSize bytesFreed; + /// Number of allocations that have been moved to different places. + uint32_t allocationsMoved; + /// Number of empty `VkDeviceMemory` objects that have been released to the system. + uint32_t deviceMemoryBlocksFreed; +} VmaDefragmentationStats; + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/// Parameters of created #VmaVirtualBlock object to be passed to vmaCreateVirtualBlock(). +typedef struct VmaVirtualBlockCreateInfo +{ + /** \brief Total size of the virtual block. + + Sizes can be expressed in bytes or any units you want as long as you are consistent in using them. + For example, if you allocate from some array of structures, 1 can mean single instance of entire structure. + */ + VkDeviceSize size; + + /** \brief Use combination of #VmaVirtualBlockCreateFlagBits. + */ + VmaVirtualBlockCreateFlags flags; + + /** \brief Custom CPU memory allocation callbacks. Optional. + + Optional, can be null. When specified, they will be used for all CPU-side memory allocations. + */ + const VkAllocationCallbacks* VMA_NULLABLE pAllocationCallbacks; +} VmaVirtualBlockCreateInfo; + +/// Parameters of created virtual allocation to be passed to vmaVirtualAllocate(). +typedef struct VmaVirtualAllocationCreateInfo +{ + /** \brief Size of the allocation. + + Cannot be zero. + */ + VkDeviceSize size; + /** \brief Required alignment of the allocation. Optional. + + Must be power of two. Special value 0 has the same meaning as 1 - means no special alignment is required, so allocation can start at any offset. + */ + VkDeviceSize alignment; + /** \brief Use combination of #VmaVirtualAllocationCreateFlagBits. + */ + VmaVirtualAllocationCreateFlags flags; + /** \brief Custom pointer to be associated with the allocation. Optional. + + It can be any value and can be used for user-defined purposes. It can be fetched or changed later. + */ + void* VMA_NULLABLE pUserData; +} VmaVirtualAllocationCreateInfo; + +/// Parameters of an existing virtual allocation, returned by vmaGetVirtualAllocationInfo(). +typedef struct VmaVirtualAllocationInfo +{ + /** \brief Offset of the allocation. + + Offset at which the allocation was made. + */ + VkDeviceSize offset; + /** \brief Size of the allocation. + + Same value as passed in VmaVirtualAllocationCreateInfo::size. + */ + VkDeviceSize size; + /** \brief Custom pointer associated with the allocation. + + Same value as passed in VmaVirtualAllocationCreateInfo::pUserData or to vmaSetVirtualAllocationUserData(). + */ + void* VMA_NULLABLE pUserData; +} VmaVirtualAllocationInfo; + +/** @} */ + +#endif // _VMA_DATA_TYPES_DECLARATIONS + +#ifndef _VMA_FUNCTION_HEADERS + +/** +\addtogroup group_init +@{ +*/ + +#ifdef VOLK_HEADER_VERSION +/** \brief Fully initializes `pDstVulkanFunctions` structure with Vulkan functions needed by VMA +using [volk library](https://github.com/zeux/volk). + +This function is defined in VMA header only if "volk.h" was included before it. + +To use this function properly: + +-# Initialize volk and Vulkan: + -# Call `volkInitialize()` + -# Create `VkInstance` object + -# Call `volkLoadInstance()` + -# Create `VkDevice` object + -# Call `volkLoadDevice()` +-# Fill in structure #VmaAllocatorCreateInfo, especially members: + - VmaAllocatorCreateInfo::device + - VmaAllocatorCreateInfo::vulkanApiVersion + - VmaAllocatorCreateInfo::flags - set appropriate flags for the Vulkan extensions you enabled +-# Create an instance of the #VmaVulkanFunctions structure. +-# Call vmaImportVulkanFunctionsFromVolk(). + Parameter `pAllocatorCreateInfo` is read to find out which functions should be fetched for + appropriate Vulkan version and extensions. + Parameter `pDstVulkanFunctions` is filled with those function pointers, or null if not applicable. +-# Attach the #VmaVulkanFunctions structure to VmaAllocatorCreateInfo::pVulkanFunctions. +-# Call vmaCreateAllocator() to create the #VmaAllocator object. + +Example: + +\code +VmaAllocatorCreateInfo allocatorCreateInfo = {}; +allocatorCreateInfo.physicalDevice = myPhysicalDevice; +allocatorCreateInfo.device = myDevice; +allocatorCreateInfo.instance = myInstance; +allocatorCreateInfo.vulkanApiVersion = VK_API_VERSION_1_3; +allocatorCreateInfo.flags = VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT | + VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT | + VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT; + +VmaVulkanFunctions vulkanFunctions; +VkResult res = vmaImportVulkanFunctionsFromVolk(&allocatorCreateInfo, &vulkanFunctions); +// Check res... +allocatorCreateInfo.pVulkanFunctions = &vulkanFunctions; + +VmaAllocator allocator; +res = vmaCreateAllocator(&allocatorCreateInfo, &allocator); +// Check res... +\endcode + +Internally in this function, pointers to functions related to the entire Vulkan instance are fetched using global function definitions, +while pointers to functions related to the Vulkan device are fetched using `volkLoadDeviceTable()` for given `pAllocatorCreateInfo->device`. + */ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaImportVulkanFunctionsFromVolk( + const VmaAllocatorCreateInfo* VMA_NOT_NULL pAllocatorCreateInfo, + VmaVulkanFunctions* VMA_NOT_NULL pDstVulkanFunctions); +#endif + +/// Creates #VmaAllocator object. +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator( + const VmaAllocatorCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocator VMA_NULLABLE* VMA_NOT_NULL pAllocator); + +/// Destroys allocator object. +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator( + VmaAllocator VMA_NULLABLE allocator); + +/** \brief Returns information about existing #VmaAllocator object - handle to Vulkan device etc. + +It might be useful if you want to keep just the #VmaAllocator handle and fetch other required handles to +`VkPhysicalDevice`, `VkDevice` etc. every time using this function. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocatorInfo( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocatorInfo* VMA_NOT_NULL pAllocatorInfo); + +/** +PhysicalDeviceProperties are fetched from physicalDevice by the allocator. +You can access it here, without fetching it again on your own. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties( + VmaAllocator VMA_NOT_NULL allocator, + const VkPhysicalDeviceProperties* VMA_NULLABLE* VMA_NOT_NULL ppPhysicalDeviceProperties); + +/** +PhysicalDeviceMemoryProperties are fetched from physicalDevice by the allocator. +You can access it here, without fetching it again on your own. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties( + VmaAllocator VMA_NOT_NULL allocator, + const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE* VMA_NOT_NULL ppPhysicalDeviceMemoryProperties); + +/** +\brief Given Memory Type Index, returns Property Flags of this memory type. + +This is just a convenience function. Same information can be obtained using +vmaGetMemoryProperties(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryTypeIndex, + VkMemoryPropertyFlags* VMA_NOT_NULL pFlags); + +/** \brief Sets index of the current frame. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t frameIndex); + +/** @} */ + +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Retrieves statistics from current state of the Allocator. + +This function is called "calculate" not "get" because it has to traverse all +internal data structures, so it may be quite slow. Use it for debugging purposes. +For faster but more brief statistics suitable to be called every frame or every allocation, +use vmaGetHeapBudgets(). + +Note that when using allocator from multiple threads, returned information may immediately +become outdated. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStatistics( + VmaAllocator VMA_NOT_NULL allocator, + VmaTotalStatistics* VMA_NOT_NULL pStats); + +/** \brief Retrieves information about current memory usage and budget for all memory heaps. + +\param allocator +\param[out] pBudgets Must point to array with number of elements at least equal to number of memory heaps in physical device used. + +This function is called "get" not "calculate" because it is very fast, suitable to be called +every frame or every allocation. For more detailed statistics use vmaCalculateStatistics(). + +Note that when using allocator from multiple threads, returned information may immediately +become outdated. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetHeapBudgets( + VmaAllocator VMA_NOT_NULL allocator, + VmaBudget* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pBudgets); + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** +\brief Helps to find `memoryTypeIndex`, given `memoryTypeBits` and #VmaAllocationCreateInfo. + +This algorithm tries to find a memory type that: + +- Is allowed by `memoryTypeBits`. +- Contains all the flags from `pAllocationCreateInfo->requiredFlags`. +- Matches intended usage. +- Has as many flags from `pAllocationCreateInfo->preferredFlags` as possible. + +\return Returns `VK_ERROR_FEATURE_NOT_PRESENT` if not found. Receiving such result +from this function or any other allocating function probably means that your +device doesn't support any memory type with requested features for the specific +type of resource you want to use it for. Please check parameters of your +resource, like image layout (`OPTIMAL` versus `LINEAR`) or mip level count. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + uint32_t* VMA_NOT_NULL pMemoryTypeIndex); + +/** +\brief Helps to find `memoryTypeIndex`, given `VkBufferCreateInfo` and #VmaAllocationCreateInfo. + +It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex. +It may need to internally create a temporary, dummy buffer that never has memory bound. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo( + VmaAllocator VMA_NOT_NULL allocator, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + uint32_t* VMA_NOT_NULL pMemoryTypeIndex); + +/** +\brief Helps to find `memoryTypeIndex`, given `VkImageCreateInfo` and #VmaAllocationCreateInfo. + +It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex. +It may need to internally create a temporary, dummy image that never has memory bound. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo( + VmaAllocator VMA_NOT_NULL allocator, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + uint32_t* VMA_NOT_NULL pMemoryTypeIndex); + +/** \brief Allocates Vulkan device memory and creates #VmaPool object. + +\param allocator Allocator object. +\param pCreateInfo Parameters of pool to create. +\param[out] pPool Handle to created pool. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool( + VmaAllocator VMA_NOT_NULL allocator, + const VmaPoolCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaPool VMA_NULLABLE* VMA_NOT_NULL pPool); + +/** \brief Destroys #VmaPool object and frees Vulkan device memory. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NULLABLE pool); + +/** @} */ + +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Retrieves statistics of existing #VmaPool object. + +\param allocator Allocator object. +\param pool Pool object. +\param[out] pPoolStats Statistics of specified pool. + +Note that when using the pool from multiple threads, returned information may immediately +become outdated. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + VmaStatistics* VMA_NOT_NULL pPoolStats); + +/** \brief Retrieves detailed statistics of existing #VmaPool object. + +\param allocator Allocator object. +\param pool Pool object. +\param[out] pPoolStats Statistics of specified pool. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaCalculatePoolStatistics( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + VmaDetailedStatistics* VMA_NOT_NULL pPoolStats); + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** \brief Checks magic number in margins around all allocations in given memory pool in search for corruptions. + +Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero, +`VMA_DEBUG_MARGIN` is defined to nonzero and the pool is created in memory type that is +`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection). + +Possible return values: + +- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for specified pool. +- `VK_SUCCESS` - corruption detection has been performed and succeeded. +- `VK_ERROR_UNKNOWN` - corruption detection has been performed and found memory corruptions around one of the allocations. + `VMA_ASSERT` is also fired in that case. +- Other value: Error returned by Vulkan, e.g. memory mapping failure. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool); + +/** \brief Retrieves name of a custom pool. + +After the call `ppName` is either null or points to an internally-owned null-terminated string +containing name of the pool that was previously set. The pointer becomes invalid when the pool is +destroyed or its name is changed using vmaSetPoolName(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + const char* VMA_NULLABLE* VMA_NOT_NULL ppName); + +/** \brief Sets name of a custom pool. + +`pName` can be either null or pointer to a null-terminated string with new name for the pool. +Function makes internal copy of the string, so it can be changed or freed immediately after this call. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + const char* VMA_NULLABLE pName); + +/** \brief General purpose memory allocation. + +\param allocator The main allocator object. +\param pVkMemoryRequirements Requirements for the allocated memory. +\param pCreateInfo Allocation creation parameters. +\param[out] pAllocation Handle to allocated memory. +\param[out] pAllocationInfo Optional, can be null. Information about allocated memory. It can be also fetched later using vmaGetAllocationInfo(). + +The function creates a #VmaAllocation object without creating a buffer or an image together with it. + +- It is recommended to use vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(), + vmaCreateBuffer(), vmaCreateImage() instead whenever possible. +- You can also create a buffer or an image later in an existing allocation using + vmaCreateAliasingBuffer2(), vmaCreateAliasingImage2(). +- You can also create a buffer or an image on your own and bind it to an existing allocation + using vmaBindBufferMemory2(), vmaBindImageMemory2(). + +You must free the returned allocation object using vmaFreeMemory() or vmaFreeMemoryPages(). + +There is also extended version of this function: vmaAllocateDedicatedMemory() +that offers additional parameter `pMemoryAllocateNext`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory( + VmaAllocator VMA_NOT_NULL allocator, + const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements, + const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief General purpose allocation of a dedicated memory. + +This function is similar vmaAllocateMemory(), but +it always allocates dedicated memory - flag #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT is implied. +It offers additional parameter `pMemoryAllocateNext`, +which can be used to attach `pNext` chain to the `VkMemoryAllocateInfo` structure. +It can be useful for importing external memory. For more information, see \ref other_api_interop. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateDedicatedMemory( + VmaAllocator VMA_NOT_NULL allocator, + const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements, + const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + void* VMA_NULLABLE VMA_EXTENDS_VK_STRUCT(VkMemoryAllocateInfo) pMemoryAllocateNext, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief General purpose memory allocation for multiple allocation objects at once. + +\param allocator Allocator object. +\param pVkMemoryRequirements Memory requirements for each allocation. +\param pCreateInfo Creation parameters for each allocation. +\param allocationCount Number of allocations to make. +\param[out] pAllocations Pointer to array that will be filled with handles to created allocations. +\param[out] pAllocationInfo Optional. Pointer to array that will be filled with parameters of created allocations. + +You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages(). + +Word "pages" is just a suggestion to use this function to allocate pieces of memory needed for sparse binding. +It is just a general purpose allocation function able to make multiple allocations at once. +It may be internally optimized to be more efficient than calling vmaAllocateMemory() `allocationCount` times. + +All allocations are made using same parameters. All of them are created out of the same memory pool and type. +If any allocation fails, all allocations already made within this function call are also freed, so that when +returned result is not `VK_SUCCESS`, `pAllocation` array is always entirely filled with `VK_NULL_HANDLE`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages( + VmaAllocator VMA_NOT_NULL allocator, + const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements, + const VmaAllocationCreateInfo* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pCreateInfo, + size_t allocationCount, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations, + VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo); + +/** \brief Allocates memory suitable for given `VkBuffer`. + +\param allocator +\param buffer +\param pCreateInfo +\param[out] pAllocation Handle to allocated memory. +\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). + +It only creates #VmaAllocation. To bind the memory to the buffer, use vmaBindBufferMemory(). + +This is a special-purpose function. In most cases you should use vmaCreateBuffer(). + +You must free the allocation using vmaFreeMemory() when no longer needed. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, + const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Allocates memory suitable for given `VkImage`. + +\param allocator +\param image +\param pCreateInfo +\param[out] pAllocation Handle to allocated memory. +\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). + +It only creates #VmaAllocation. To bind the memory to the buffer, use vmaBindImageMemory(). + +This is a special-purpose function. In most cases you should use vmaCreateImage(). + +You must free the allocation using vmaFreeMemory() when no longer needed. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage( + VmaAllocator VMA_NOT_NULL allocator, + VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, + const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage(). + +Passing `VK_NULL_HANDLE` as `allocation` is valid. Such function call is just skipped. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NULLABLE allocation); + +/** \brief Frees memory and destroys multiple allocations. + +Word "pages" is just a suggestion to use this function to free pieces of memory used for sparse binding. +It is just a general purpose function to free memory and destroy allocations made using e.g. vmaAllocateMemory(), +vmaAllocateMemoryPages() and other functions. +It may be internally optimized to be more efficient than calling vmaFreeMemory() `allocationCount` times. + +Allocations in `pAllocations` array can come from any memory pools and types. +Passing `VK_NULL_HANDLE` as elements of `pAllocations` array is valid. Such entries are just skipped. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages( + VmaAllocator VMA_NOT_NULL allocator, + size_t allocationCount, + const VmaAllocation VMA_NULLABLE* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations); + +/** \brief Returns current information about specified allocation. + +Current parameters of given allocation are returned in `pAllocationInfo`. + +Although this function doesn't lock any mutex, so it should be quite efficient, +you should avoid calling it too often. +You can retrieve same VmaAllocationInfo structure while creating your resource, from function +vmaCreateBuffer(), vmaCreateImage(). You can remember it if you are sure parameters don't change +(e.g. due to defragmentation). + +There is also a new function vmaGetAllocationInfo2() that offers extended information +about the allocation, returned using new structure #VmaAllocationInfo2. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VmaAllocationInfo* VMA_NOT_NULL pAllocationInfo); + +/** \brief Returns extended information about specified allocation. + +Current parameters of given allocation are returned in `pAllocationInfo`. +Extended parameters in structure #VmaAllocationInfo2 include memory block size +and a flag telling whether the allocation has dedicated memory. +It can be useful e.g. for interop with OpenGL. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VmaAllocationInfo2* VMA_NOT_NULL pAllocationInfo); + +/** \brief Sets pUserData in given allocation to new value. + +The value of pointer `pUserData` is copied to allocation's `pUserData`. +It is opaque, so you can use it however you want - e.g. +as a pointer, ordinal number or some handle to you own data. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + void* VMA_NULLABLE pUserData); + +/** \brief Sets pName in given allocation to new value. + +`pName` must be either null, or pointer to a null-terminated string. The function +makes local copy of the string and sets it as allocation's `pName`. String +passed as pName doesn't need to be valid for whole lifetime of the allocation - +you can free it after this call. String previously pointed by allocation's +`pName` is freed from memory. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationName( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const char* VMA_NULLABLE pName); + +/** +\brief Given an allocation, returns Property Flags of its memory type. + +This is just a convenience function. Same information can be obtained using +vmaGetAllocationInfo() + vmaGetMemoryProperties(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkMemoryPropertyFlags* VMA_NOT_NULL pFlags); + + +#if VMA_EXTERNAL_MEMORY_WIN32 +/** +\brief Given an allocation, returns Win32 handle that may be imported by other processes or APIs. + +\param allocator The main allocator object. +\param allocation Allocation. +\param hTargetProcess A valid handle to target process or null. If it's null, the function returns + handle for the current process. +\param[out] pHandle Output parameter that returns the handle. + +The function fills `pHandle` with handle that can be used in target process. +The handle is fetched using function `vkGetMemoryWin32HandleKHR`. + +Each call to this function creates a new handle that must be closed using: + +\code +CloseHandle(handle); +\endcode + +You can close it any time, before or after destroying the allocation object. +It is reference-counted internally by Windows. + +Note the handle is returned for the entire `VkDeviceMemory` block that the allocation belongs to. +If the allocation is sub-allocated from a larger block, you may need to consider the offset of the allocation +(VmaAllocationInfo::offset). + +This function always uses `VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT`. +An extended version of this function is available as vmaGetMemoryWin32Handle2() +that allows using other handle type. + +This function is available compile-time only when VK_KHR_external_memory_win32 extension is available. +It can be manually disabled by predefining `VMA_EXTERNAL_MEMORY_WIN32=0` macro. + +If the function fails with `VK_ERROR_FEATURE_NOT_PRESENT` error code, please double-check +that VmaVulkanFunctions::vkGetMemoryWin32HandleKHR function pointer is set, e.g. +either by using macro `VMA_DYNAMIC_VULKAN_FUNCTIONS` +or by manually passing it through VmaAllocatorCreateInfo::pVulkanFunctions. + +For more information, see chapter \ref other_api_interop. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaGetMemoryWin32Handle( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + HANDLE hTargetProcess, + HANDLE* VMA_NOT_NULL pHandle); + +/** +\brief Given an allocation, returns Win32 handle that may be imported by other processes or APIs. + +\param allocator The main allocator object. +\param allocation Allocation. +\param handleType Type of handle to be exported. It should be one of: + - `VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR` + - `VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR` + - `VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR` + - `VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR` + - `VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR` + - `VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR` +\param hTargetProcess A valid handle to target process or null. If it's null, the function returns + handle for the current process. +\param[out] pHandle Output parameter that returns the handle. + +The function fills `pHandle` with handle that can be used in target process. +The handle is fetched using function `vkGetMemoryWin32HandleKHR`. + +If `handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR`, +or other NT handle types, +each call to this function creates a new handle that must be closed using: + +\code +CloseHandle(handle); +\endcode + +You can close it any time, before or after destroying the allocation object. +It is reference-counted internally by Windows. + +Note the handle is returned for the entire `VkDeviceMemory` block that the allocation belongs to. +If the allocation is sub-allocated from a larger block, you may need to consider the offset of the allocation +(VmaAllocationInfo::offset). + +This function is available compile-time only when VK_KHR_external_memory_win32 extension is available. +It can be manually disabled by predefining `VMA_EXTERNAL_MEMORY_WIN32=0` macro. + +If the function fails with `VK_ERROR_FEATURE_NOT_PRESENT` error code, please double-check +that VmaVulkanFunctions::vkGetMemoryWin32HandleKHR function pointer is set, e.g. +either by using macro `VMA_DYNAMIC_VULKAN_FUNCTIONS` +or by manually passing it through VmaAllocatorCreateInfo::pVulkanFunctions. + +For more information, see chapter \ref other_api_interop. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaGetMemoryWin32Handle2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE hTargetProcess, + HANDLE* VMA_NOT_NULL pHandle); +#endif // VMA_EXTERNAL_MEMORY_WIN32 + +/** \brief Maps memory represented by given allocation and returns pointer to it. + +Maps memory represented by given allocation to make it accessible to CPU code. +When succeeded, `*ppData` contains pointer to first byte of this memory. + +\warning +If the allocation is part of a bigger `VkDeviceMemory` block, returned pointer is +correctly offsetted to the beginning of region assigned to this particular allocation. +Unlike the result of `vkMapMemory`, it points to the allocation, not to the beginning of the whole block. +You should not add VmaAllocationInfo::offset to it! + +Mapping is internally reference-counted and synchronized, so despite raw Vulkan +function `vkMapMemory()` cannot be used to map same block of `VkDeviceMemory` +multiple times simultaneously, it is safe to call this function on allocations +assigned to the same memory block. Actual Vulkan memory will be mapped on first +mapping and unmapped on last unmapping. + +If the function succeeded, you must call vmaUnmapMemory() to unmap the +allocation when mapping is no longer needed or before freeing the allocation, at +the latest. + +It also safe to call this function multiple times on the same allocation. You +must call vmaUnmapMemory() same number of times as you called vmaMapMemory(). + +It is also safe to call this function on allocation created with +#VMA_ALLOCATION_CREATE_MAPPED_BIT flag. Its memory stays mapped all the time. +You must still call vmaUnmapMemory() same number of times as you called +vmaMapMemory(). You must not call vmaUnmapMemory() additional time to free the +"0-th" mapping made automatically due to #VMA_ALLOCATION_CREATE_MAPPED_BIT flag. + +This function fails when used on allocation made in memory type that is not +`HOST_VISIBLE`. + +This function doesn't automatically flush or invalidate caches. +If the allocation is made from a memory types that is not `HOST_COHERENT`, +you also need to use vmaInvalidateAllocation() / vmaFlushAllocation(), as required by Vulkan specification. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + void* VMA_NULLABLE* VMA_NOT_NULL ppData); + +/** \brief Unmaps memory represented by given allocation, mapped previously using vmaMapMemory(). + +For details, see description of vmaMapMemory(). + +This function doesn't automatically flush or invalidate caches. +If the allocation is made from a memory types that is not `HOST_COHERENT`, +you also need to use vmaInvalidateAllocation() / vmaFlushAllocation(), as required by Vulkan specification. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation); + +/** \brief Flushes memory of given allocation. + +Calls `vkFlushMappedMemoryRanges()` for memory associated with given range of given allocation. +It needs to be called after writing to a mapped memory for memory types that are not `HOST_COHERENT`. +Unmap operation doesn't do that automatically. + +- `offset` must be relative to the beginning of allocation. +- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation. +- `offset` and `size` don't have to be aligned. + They are internally rounded down/up to multiply of `nonCoherentAtomSize`. +- If `size` is 0, this call is ignored. +- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`, + this call is ignored. + +Warning! `offset` and `size` are relative to the contents of given `allocation`. +If you mean whole allocation, you can pass 0 and `VK_WHOLE_SIZE`, respectively. +Do not pass allocation's offset as `offset`!!! + +This function returns the `VkResult` from `vkFlushMappedMemoryRanges` if it is +called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocation( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize offset, + VkDeviceSize size); + +/** \brief Invalidates memory of given allocation. + +Calls `vkInvalidateMappedMemoryRanges()` for memory associated with given range of given allocation. +It needs to be called before reading from a mapped memory for memory types that are not `HOST_COHERENT`. +Map operation doesn't do that automatically. + +- `offset` must be relative to the beginning of allocation. +- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation. +- `offset` and `size` don't have to be aligned. + They are internally rounded down/up to multiply of `nonCoherentAtomSize`. +- If `size` is 0, this call is ignored. +- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`, + this call is ignored. + +Warning! `offset` and `size` are relative to the contents of given `allocation`. +If you mean whole allocation, you can pass 0 and `VK_WHOLE_SIZE`, respectively. +Do not pass allocation's offset as `offset`!!! + +This function returns the `VkResult` from `vkInvalidateMappedMemoryRanges` if +it is called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocation( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize offset, + VkDeviceSize size); + +/** \brief Flushes memory of given set of allocations. + +Calls `vkFlushMappedMemoryRanges()` for memory associated with given ranges of given allocations. +For more information, see documentation of vmaFlushAllocation(). + +\param allocator +\param allocationCount +\param allocations +\param offsets If not null, it must point to an array of offsets of regions to flush, relative to the beginning of respective allocations. Null means all offsets are zero. +\param sizes If not null, it must point to an array of sizes of regions to flush in respective allocations. Null means `VK_WHOLE_SIZE` for all allocations. + +This function returns the `VkResult` from `vkFlushMappedMemoryRanges` if it is +called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocations( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t allocationCount, + const VmaAllocation VMA_NOT_NULL* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes); + +/** \brief Invalidates memory of given set of allocations. + +Calls `vkInvalidateMappedMemoryRanges()` for memory associated with given ranges of given allocations. +For more information, see documentation of vmaInvalidateAllocation(). + +\param allocator +\param allocationCount +\param allocations +\param offsets If not null, it must point to an array of offsets of regions to flush, relative to the beginning of respective allocations. Null means all offsets are zero. +\param sizes If not null, it must point to an array of sizes of regions to flush in respective allocations. Null means `VK_WHOLE_SIZE` for all allocations. + +This function returns the `VkResult` from `vkInvalidateMappedMemoryRanges` if it is +called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocations( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t allocationCount, + const VmaAllocation VMA_NOT_NULL* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes); + +/** \brief Maps the allocation temporarily if needed, copies data from specified host pointer to it, and flushes the memory from the host caches if needed. + +\param allocator +\param pSrcHostPointer Pointer to the host data that become source of the copy. +\param dstAllocation Handle to the allocation that becomes destination of the copy. +\param dstAllocationLocalOffset Offset within `dstAllocation` where to write copied data, in bytes. +\param size Number of bytes to copy. + +This is a convenience function that allows to copy data from a host pointer to an allocation easily. +Same behavior can be achieved by calling vmaMapMemory(), `memcpy()`, vmaUnmapMemory(), vmaFlushAllocation(). + +This function can be called only for allocations created in a memory type that has `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` flag. +It can be ensured e.g. by using #VMA_MEMORY_USAGE_AUTO and #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or +#VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +Otherwise, the function will fail and generate a Validation Layers error. + +`dstAllocationLocalOffset` is relative to the contents of given `dstAllocation`. +If you mean whole allocation, you should pass 0. +Do not pass allocation's offset within device memory block this parameter! +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCopyMemoryToAllocation( + VmaAllocator VMA_NOT_NULL allocator, + const void* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(size) pSrcHostPointer, + VmaAllocation VMA_NOT_NULL dstAllocation, + VkDeviceSize dstAllocationLocalOffset, + VkDeviceSize size); + +/** \brief Invalidates memory in the host caches if needed, maps the allocation temporarily if needed, and copies data from it to a specified host pointer. + +\param allocator +\param srcAllocation Handle to the allocation that becomes source of the copy. +\param srcAllocationLocalOffset Offset within `srcAllocation` where to read copied data, in bytes. +\param pDstHostPointer Pointer to the host memory that become destination of the copy. +\param size Number of bytes to copy. + +This is a convenience function that allows to copy data from an allocation to a host pointer easily. +Same behavior can be achieved by calling vmaInvalidateAllocation(), vmaMapMemory(), `memcpy()`, vmaUnmapMemory(). + +This function should be called only for allocations created in a memory type that has `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` +and `VK_MEMORY_PROPERTY_HOST_CACHED_BIT` flag. +It can be ensured e.g. by using #VMA_MEMORY_USAGE_AUTO and #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +Otherwise, the function may fail and generate a Validation Layers error. +It may also work very slowly when reading from an uncached memory. + +`srcAllocationLocalOffset` is relative to the contents of given `srcAllocation`. +If you mean whole allocation, you should pass 0. +Do not pass allocation's offset within device memory block as this parameter! +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCopyAllocationToMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL srcAllocation, + VkDeviceSize srcAllocationLocalOffset, + void* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(size) pDstHostPointer, + VkDeviceSize size); + +/** \brief Checks magic number in margins around all allocations in given memory types (in both default and custom pools) in search for corruptions. + +\param allocator +\param memoryTypeBits Bit mask, where each bit set means that a memory type with that index should be checked. + +Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero, +`VMA_DEBUG_MARGIN` is defined to nonzero and only for memory types that are +`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection). + +Possible return values: + +- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for any of specified memory types. +- `VK_SUCCESS` - corruption detection has been performed and succeeded. +- `VK_ERROR_UNKNOWN` - corruption detection has been performed and found memory corruptions around one of the allocations. + `VMA_ASSERT` is also fired in that case. +- Other value: Error returned by Vulkan, e.g. memory mapping failure. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryTypeBits); + +/** \brief Begins defragmentation process. + +\param allocator Allocator object. +\param pInfo Structure filled with parameters of defragmentation. +\param[out] pContext Context object that must be passed to vmaEndDefragmentation() to finish defragmentation. +\returns +- `VK_SUCCESS` if defragmentation can begin. +- `VK_ERROR_FEATURE_NOT_PRESENT` if defragmentation is not supported. + +For more information about defragmentation, see documentation chapter: +[Defragmentation](@ref defragmentation). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentation( + VmaAllocator VMA_NOT_NULL allocator, + const VmaDefragmentationInfo* VMA_NOT_NULL pInfo, + VmaDefragmentationContext VMA_NULLABLE* VMA_NOT_NULL pContext); + +/** \brief Ends defragmentation process. + +\param allocator Allocator object. +\param context Context object that has been created by vmaBeginDefragmentation(). +\param[out] pStats Optional stats for the defragmentation. Can be null. + +Use this function to finish defragmentation started by vmaBeginDefragmentation(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaEndDefragmentation( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationStats* VMA_NULLABLE pStats); + +/** \brief Starts single defragmentation pass. + +\param allocator Allocator object. +\param context Context object that has been created by vmaBeginDefragmentation(). +\param[out] pPassInfo Computed information for current pass. +\returns +- `VK_SUCCESS` if no more moves are possible. Then you can omit call to vmaEndDefragmentationPass() and simply end whole defragmentation. +- `VK_INCOMPLETE` if there are pending moves returned in `pPassInfo`. You need to perform them, call vmaEndDefragmentationPass(), + and then preferably try another pass with vmaBeginDefragmentationPass(). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo); + +/** \brief Ends single defragmentation pass. + +\param allocator Allocator object. +\param context Context object that has been created by vmaBeginDefragmentation(). +\param pPassInfo Computed information for current pass filled by vmaBeginDefragmentationPass() and possibly modified by you. + +Returns `VK_SUCCESS` if no more moves are possible or `VK_INCOMPLETE` if more defragmentations are possible. + +Ends incremental defragmentation pass and commits all defragmentation moves from `pPassInfo`. +After this call: + +- Allocations at `pPassInfo[i].srcAllocation` that had `pPassInfo[i].operation ==` #VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY + (which is the default) will be pointing to the new destination place. +- Allocation at `pPassInfo[i].srcAllocation` that had `pPassInfo[i].operation ==` #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY + will be freed. + +If no more moves are possible you can end whole defragmentation. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo); + +/** \brief Binds buffer to allocation. + +Binds specified buffer to region of memory represented by specified allocation. +Gets `VkDeviceMemory` handle and offset from the allocation. +If you want to create a buffer, allocate memory for it and bind them together separately, +you should use this function for binding instead of standard `vkBindBufferMemory()`, +because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple +allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously +(which is illegal in Vulkan). + +It is recommended to use function vmaCreateBuffer() instead of this one. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer); + +/** \brief Binds buffer to allocation with additional parameters. + +\param allocator +\param allocation +\param allocationLocalOffset Additional offset to be added while binding, relative to the beginning of the `allocation`. Normally it should be 0. +\param buffer +\param pNext A chain of structures to be attached to `VkBindBufferMemoryInfoKHR` structure used internally. Normally it should be null. + +This function is similar to vmaBindBufferMemory(), but it provides additional parameters. + +If `pNext` is not null, #VmaAllocator object must have been created with #VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT flag +or with VmaAllocatorCreateInfo::vulkanApiVersion `>= VK_API_VERSION_1_1`. Otherwise the call fails. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, + const void* VMA_NULLABLE VMA_EXTENDS_VK_STRUCT(VkBindBufferMemoryInfoKHR) pNext); + +/** \brief Binds image to allocation. + +Binds specified image to region of memory represented by specified allocation. +Gets `VkDeviceMemory` handle and offset from the allocation. +If you want to create an image, allocate memory for it and bind them together separately, +you should use this function for binding instead of standard `vkBindImageMemory()`, +because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple +allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously +(which is illegal in Vulkan). + +It is recommended to use function vmaCreateImage() instead of this one. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkImage VMA_NOT_NULL_NON_DISPATCHABLE image); + +/** \brief Binds image to allocation with additional parameters. + +\param allocator +\param allocation +\param allocationLocalOffset Additional offset to be added while binding, relative to the beginning of the `allocation`. Normally it should be 0. +\param image +\param pNext A chain of structures to be attached to `VkBindImageMemoryInfoKHR` structure used internally. Normally it should be null. + +This function is similar to vmaBindImageMemory(), but it provides additional parameters. + +If `pNext` is not null, #VmaAllocator object must have been created with #VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT flag +or with VmaAllocatorCreateInfo::vulkanApiVersion `>= VK_API_VERSION_1_1`. Otherwise the call fails. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, + const void* VMA_NULLABLE VMA_EXTENDS_VK_STRUCT(VkBindImageMemoryInfoKHR) pNext); + +/** \brief Creates a new `VkBuffer`, allocates and binds memory for it. + +\param allocator The main allocator object. +\param pBufferCreateInfo Buffer creation parameters. +\param pAllocationCreateInfo Allocation creation parameters. +\param[out] pBuffer Buffer that was created. +\param[out] pAllocation Allocation that was created. +\param[out] pAllocationInfo Optional, can be null. Information about allocated memory. + It can be also fetched later using vmaGetAllocationInfo(). + +This function automatically: + +-# Creates buffer. +-# Allocates appropriate memory for it. +-# Binds the buffer with the memory. + +If any of these operations fail, buffer and allocation are not created, +returned value is negative error code, `*pBuffer` and `*pAllocation` are returned as null. + +If the function succeeded, you must destroy both buffer and allocation when you +no longer need them using either convenience function vmaDestroyBuffer() or +separately, using `vkDestroyBuffer()` and vmaFreeMemory(). + +If VK_KHR_dedicated_allocation extenion or Vulkan version >= 1.1 is used, +the function queries the driver whether +it requires or prefers the new buffer to have dedicated allocation. If yes, +and if dedicated allocation is possible +(#VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT is not used), it creates dedicated +allocation for this buffer, just like when using +#VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + +\note This function creates a new `VkBuffer`. Sub-allocation of parts of one large buffer, +although recommended as a good practice, is out of scope of this library and could be implemented +by the user as a higher-level logic on top of VMA. + +There is also an extended versions of this function available with additional parameter `pMemoryAllocateNext` - +see vmaCreateDedicatedBuffer(). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer( + VmaAllocator VMA_NOT_NULL allocator, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Creates a buffer with additional minimum alignment. + +Similar to vmaCreateBuffer() but provides additional parameter `minAlignment` which allows to specify custom, +minimum alignment to be used when placing the buffer inside a larger memory block, which may be needed e.g. +for interop with OpenGL. + +\deprecated +This function in obsolete since new VmaAllocationCreateInfo::minAlignment member allows specifying custom +alignment while using any allocation function, like the standard vmaCreateBuffer(). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment( + VmaAllocator VMA_NOT_NULL allocator, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + VkDeviceSize minAlignment, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Creates a dedicated buffer while offering extra parameter `pMemoryAllocateNext`. + +This function is similar vmaCreateBuffer(), but +it always allocates dedicated memory for the buffer - flag #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT is implied. +It offers additional parameter `pMemoryAllocateNext`, +which can be used to attach `pNext` chain to the `VkMemoryAllocateInfo` structure. +It can be useful for importing external memory. For more information, see \ref other_api_interop. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateDedicatedBuffer( + VmaAllocator VMA_NOT_NULL allocator, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + void* VMA_NULLABLE VMA_EXTENDS_VK_STRUCT(VkMemoryAllocateInfo) pMemoryAllocateNext, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Creates a new `VkBuffer`, binds already created memory for it. + +\param allocator +\param allocation Allocation that provides memory to be used for binding new buffer to it. +\param pBufferCreateInfo +\param[out] pBuffer Buffer that was created. + +This function automatically: + +-# Creates buffer. +-# Binds the buffer with the supplied memory. + +If any of these operations fail, buffer is not created, +returned value is negative error code and `*pBuffer` is null. + +If the function succeeded, you must destroy the buffer when you +no longer need it using `vkDestroyBuffer()`. If you want to also destroy the corresponding +allocation you can use convenience function vmaDestroyBuffer(). + +\note There is a new version of this function augmented with parameter `allocationLocalOffset` - see vmaCreateAliasingBuffer2(). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer); + +/** \brief Creates a new `VkBuffer`, binds already created memory for it. + +\param allocator +\param allocation Allocation that provides memory to be used for binding new buffer to it. +\param allocationLocalOffset Additional offset to be added while binding, relative to the beginning of the allocation. Normally it should be 0. +\param pBufferCreateInfo +\param[out] pBuffer Buffer that was created. + +This function automatically: + +-# Creates buffer. +-# Binds the buffer with the supplied memory. + +If any of these operations fail, buffer is not created, +returned value is negative error code and `*pBuffer` is null. + +If the function succeeded, you must destroy the buffer when you +no longer need it using `vkDestroyBuffer()`. If you want to also destroy the corresponding +allocation you can use convenience function vmaDestroyBuffer(). + +\note This is a new version of the function augmented with parameter `allocationLocalOffset`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer); + +/** \brief Destroys Vulkan buffer and frees allocated memory. + +This is just a convenience function equivalent to: + +\code +vkDestroyBuffer(device, buffer, allocationCallbacks); +vmaFreeMemory(allocator, allocation); +\endcode + +It is safe to pass null as buffer and/or allocation. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer, + VmaAllocation VMA_NULLABLE allocation); + +/** \brief Function similar to vmaCreateBuffer() but for images. + +There is also an extended version of this function available: vmaCreateDedicatedImage() +which offers additional parameter `pMemoryAllocateNext`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage( + VmaAllocator VMA_NOT_NULL allocator, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Function similar to vmaCreateDedicatedBuffer() but for images. + +This function is similar vmaCreateImage(), but +it always allocates dedicated memory for the image - flag #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT is implied. +It offers additional parameter `pMemoryAllocateNext`, +which can be used to attach `pNext` chain to the `VkMemoryAllocateInfo` structure. +It can be useful for importing external memory. For more information, see \ref other_api_interop. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateDedicatedImage( + VmaAllocator VMA_NOT_NULL allocator, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + void* VMA_NULLABLE VMA_EXTENDS_VK_STRUCT(VkMemoryAllocateInfo) pMemoryAllocateNext, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/// Function similar to vmaCreateAliasingBuffer() but for images. +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage); + +/// Function similar to vmaCreateAliasingBuffer2() but for images. +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage); + +/** \brief Destroys Vulkan image and frees allocated memory. + +This is just a convenience function equivalent to: + +\code +vkDestroyImage(device, image, allocationCallbacks); +vmaFreeMemory(allocator, allocation); +\endcode + +It is safe to pass null as image and/or allocation. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage( + VmaAllocator VMA_NOT_NULL allocator, + VkImage VMA_NULLABLE_NON_DISPATCHABLE image, + VmaAllocation VMA_NULLABLE allocation); + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/** \brief Creates new #VmaVirtualBlock object. + +\param pCreateInfo Parameters for creation. +\param[out] pVirtualBlock Returned virtual block object or `VMA_NULL` if creation failed. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateVirtualBlock( + const VmaVirtualBlockCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaVirtualBlock VMA_NULLABLE* VMA_NOT_NULL pVirtualBlock); + +/** \brief Destroys #VmaVirtualBlock object. + +Please note that you should consciously handle virtual allocations that could remain unfreed in the block. +You should either free them individually using vmaVirtualFree() or call vmaClearVirtualBlock() +if you are sure this is what you want. If you do neither, an assert is called. + +If you keep pointers to some additional metadata associated with your virtual allocations in their `pUserData`, +don't forget to free them. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyVirtualBlock( + VmaVirtualBlock VMA_NULLABLE virtualBlock); + +/** \brief Returns true of the #VmaVirtualBlock is empty - contains 0 virtual allocations and has all its space available for new allocations. +*/ +VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaIsVirtualBlockEmpty( + VmaVirtualBlock VMA_NOT_NULL virtualBlock); + +/** \brief Returns information about a specific virtual allocation within a virtual block, like its size and `pUserData` pointer. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualAllocationInfo( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, VmaVirtualAllocationInfo* VMA_NOT_NULL pVirtualAllocInfo); + +/** \brief Allocates new virtual allocation inside given #VmaVirtualBlock. + +If the allocation fails due to not enough free space available, `VK_ERROR_OUT_OF_DEVICE_MEMORY` is returned +(despite the function doesn't ever allocate actual GPU memory). +`pAllocation` is then set to `VK_NULL_HANDLE` and `pOffset`, if not null, it set to `UINT64_MAX`. + +\param virtualBlock Virtual block +\param pCreateInfo Parameters for the allocation +\param[out] pAllocation Returned handle of the new allocation +\param[out] pOffset Returned offset of the new allocation. Optional, can be null. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaVirtualAllocate( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + const VmaVirtualAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pAllocation, + VkDeviceSize* VMA_NULLABLE pOffset); + +/** \brief Frees virtual allocation inside given #VmaVirtualBlock. + +It is correct to call this function with `allocation == VK_NULL_HANDLE` - it does nothing. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaVirtualFree( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE allocation); + +/** \brief Frees all virtual allocations inside given #VmaVirtualBlock. + +You must either call this function or free each virtual allocation individually with vmaVirtualFree() +before destroying a virtual block. Otherwise, an assert is called. + +If you keep pointer to some additional metadata associated with your virtual allocation in its `pUserData`, +don't forget to free it as well. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaClearVirtualBlock( + VmaVirtualBlock VMA_NOT_NULL virtualBlock); + +/** \brief Changes custom pointer associated with given virtual allocation. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetVirtualAllocationUserData( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, + void* VMA_NULLABLE pUserData); + +/** \brief Calculates and returns statistics about virtual allocations and memory usage in given #VmaVirtualBlock. + +This function is fast to call. For more detailed statistics, see vmaCalculateVirtualBlockStatistics(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualBlockStatistics( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaStatistics* VMA_NOT_NULL pStats); + +/** \brief Calculates and returns detailed statistics about virtual allocations and memory usage in given #VmaVirtualBlock. + +This function is slow to call. Use for debugging purposes. +For less detailed statistics, see vmaGetVirtualBlockStatistics(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateVirtualBlockStatistics( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaDetailedStatistics* VMA_NOT_NULL pStats); + +/** @} */ + +#if VMA_STATS_STRING_ENABLED +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Builds and returns a null-terminated string in JSON format with information about given #VmaVirtualBlock. +\param virtualBlock Virtual block. +\param[out] ppStatsString Returned string. +\param detailedMap Pass `VK_FALSE` to only obtain statistics as returned by vmaCalculateVirtualBlockStatistics(). Pass `VK_TRUE` to also obtain full list of allocations and free spaces. + +Returned string must be freed using vmaFreeVirtualBlockStatsString(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaBuildVirtualBlockStatsString( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE* VMA_NOT_NULL ppStatsString, + VkBool32 detailedMap); + +/// Frees a string returned by vmaBuildVirtualBlockStatsString(). +VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE pStatsString); + +/** \brief Builds and returns statistics as a null-terminated string in JSON format. +\param allocator +\param[out] ppStatsString Must be freed using vmaFreeStatsString() function. +\param detailedMap +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString( + VmaAllocator VMA_NOT_NULL allocator, + char* VMA_NULLABLE* VMA_NOT_NULL ppStatsString, + VkBool32 detailedMap); + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString( + VmaAllocator VMA_NOT_NULL allocator, + char* VMA_NULLABLE pStatsString); + +/** @} */ + +#endif // VMA_STATS_STRING_ENABLED + +#endif // _VMA_FUNCTION_HEADERS + +#ifdef __cplusplus +} +#endif + +#endif // AMD_VULKAN_MEMORY_ALLOCATOR_H + +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// +// +// IMPLEMENTATION +// +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// For Visual Studio IntelliSense. +#if defined(__cplusplus) && defined(__INTELLISENSE__) +#define VMA_IMPLEMENTATION +#endif + +#ifdef VMA_IMPLEMENTATION +#undef VMA_IMPLEMENTATION + +#include +#include +#include +#include +#include +#include + +#if !defined(VMA_CPP20) + #if __cplusplus >= 202002L || _MSVC_LANG >= 202002L // C++20 + #define VMA_CPP20 1 + #else + #define VMA_CPP20 0 + #endif +#endif + +#ifdef _MSC_VER + #include // For functions like __popcnt, _BitScanForward etc. +#endif +#if VMA_CPP20 + #include +#endif + +#if VMA_STATS_STRING_ENABLED + #include // For snprintf +#endif + +/******************************************************************************* +CONFIGURATION SECTION + +Define some of these macros before each #include of this header or change them +here if you need other then default behavior depending on your environment. +*/ +#ifndef _VMA_CONFIGURATION + +/* +Define this macro to 1 to make the library fetch pointers to Vulkan functions +internally, like: + + vulkanFunctions.vkAllocateMemory = &vkAllocateMemory; +*/ +#if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) + #define VMA_STATIC_VULKAN_FUNCTIONS 1 +#endif + +/* +Define this macro to 1 to make the library fetch pointers to Vulkan functions +internally, like: + + vulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkGetDeviceProcAddr(device, "vkAllocateMemory"); + +To use this feature in new versions of VMA you now have to pass +VmaVulkanFunctions::vkGetInstanceProcAddr and vkGetDeviceProcAddr as +VmaAllocatorCreateInfo::pVulkanFunctions. Other members can be null. +*/ +#if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS) + #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1 +#endif + +#ifndef VMA_USE_STL_SHARED_MUTEX + #if __cplusplus >= 201703L || _MSVC_LANG >= 201703L // C++17 + #define VMA_USE_STL_SHARED_MUTEX 1 + // Visual studio defines __cplusplus properly only when passed additional parameter: /Zc:__cplusplus + // Otherwise it is always 199711L, despite shared_mutex works since Visual Studio 2015 Update 2. + #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L + #define VMA_USE_STL_SHARED_MUTEX 1 + #else + #define VMA_USE_STL_SHARED_MUTEX 0 + #endif +#endif + +/* +Define this macro to include custom header files without having to edit this file directly, e.g.: + + // Inside of "my_vma_configuration_user_includes.h": + + #include "my_custom_assert.h" // for MY_CUSTOM_ASSERT + #include "my_custom_min.h" // for my_custom_min + #include + #include + + // Inside a different file, which includes "vk_mem_alloc.h": + + #define VMA_CONFIGURATION_USER_INCLUDES_H "my_vma_configuration_user_includes.h" + #define VMA_ASSERT(expr) MY_CUSTOM_ASSERT(expr) + #define VMA_MIN(v1, v2) (my_custom_min(v1, v2)) + #include "vk_mem_alloc.h" + ... + +The following headers are used in this CONFIGURATION section only, so feel free to +remove them if not needed. +*/ +#if !defined(VMA_CONFIGURATION_USER_INCLUDES_H) + #include // for assert + #include // for min, max, swap + #include +#else + #include VMA_CONFIGURATION_USER_INCLUDES_H +#endif + +#ifndef VMA_NULL + // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0. + #define VMA_NULL nullptr +#endif + +#ifndef VMA_FALLTHROUGH + #if __cplusplus >= 201703L || _MSVC_LANG >= 201703L // C++17 + #define VMA_FALLTHROUGH [[fallthrough]] + #else + #define VMA_FALLTHROUGH + #endif +#endif + +// Normal assert to check for programmer's errors, especially in Debug configuration. +#ifndef VMA_ASSERT + #ifdef NDEBUG + #define VMA_ASSERT(expr) + #else + #define VMA_ASSERT(expr) assert(expr) + #endif +#endif + +// Assert that will be called very often, like inside data structures e.g. operator[]. +// Making it non-empty can make program slow. +#ifndef VMA_HEAVY_ASSERT + #ifdef NDEBUG + #define VMA_HEAVY_ASSERT(expr) + #else + #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) + #endif +#endif + +// Assert used for reporting memory leaks - unfreed allocations. +#ifndef VMA_ASSERT_LEAK + #define VMA_ASSERT_LEAK(expr) VMA_ASSERT(expr) +#endif + +// If your compiler is not compatible with C++17 and definition of +// aligned_alloc() function is missing, uncommenting following line may help: + +//#include + +#if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) +#include +namespace +{ +void* vma_aligned_alloc(size_t alignment, size_t size) +{ + // alignment must be >= sizeof(void*) + if(alignment < sizeof(void*)) + { + alignment = sizeof(void*); + } + + return memalign(alignment, size); +} +} // namespace +#elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC)) +#include + +#if defined(__APPLE__) +#include +#endif + +namespace +{ +void* vma_aligned_alloc(size_t alignment, size_t size) +{ + // Unfortunately, aligned_alloc causes VMA to crash due to it returning null pointers. (At least under 11.4) + // Therefore, for now disable this specific exception until a proper solution is found. + //#if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0)) + //#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0 + // // For C++14, usr/include/malloc/_malloc.h declares aligned_alloc()) only + // // with the MacOSX11.0 SDK in Xcode 12 (which is what adds + // // MAC_OS_X_VERSION_10_16), even though the function is marked + // // available for 10.15. That is why the preprocessor checks for 10.16 but + // // the __builtin_available checks for 10.15. + // // People who use C++17 could call aligned_alloc with the 10.15 SDK already. + // if (__builtin_available(macOS 10.15, iOS 13, *)) + // return aligned_alloc(alignment, size); + //#endif + //#endif + + // alignment must be >= sizeof(void*) + if(alignment < sizeof(void*)) + { + alignment = sizeof(void*); + } + + void *pointer; + if(posix_memalign(&pointer, alignment, size) == 0) + return pointer; + return VMA_NULL; +} +} // namespace +#elif defined(_WIN32) +namespace { +void* vma_aligned_alloc(size_t alignment, size_t size) +{ + return _aligned_malloc(size, alignment); +} +} // namespace +#elif __cplusplus >= 201703L || _MSVC_LANG >= 201703L // C++17 +namespace { +void* vma_aligned_alloc(size_t alignment, size_t size) +{ + return aligned_alloc(alignment, size); +} +} // namespace +#else +namespace +{ +void* vma_aligned_alloc(size_t alignment, size_t size) +{ + VMA_ASSERT(0 && "Could not implement aligned_alloc automatically. Please enable C++17 or later in your compiler or provide custom implementation of macro VMA_SYSTEM_ALIGNED_MALLOC (and VMA_SYSTEM_ALIGNED_FREE if needed) using the API of your system."); + return VMA_NULL; +} +} // namespace +#endif + +namespace +{ +#if defined(_WIN32) +void vma_aligned_free(void* ptr) +{ + _aligned_free(ptr); +} +#else +void vma_aligned_free(void* VMA_NULLABLE ptr) +{ + free(ptr); +} +#endif +} // namespace + +#ifndef VMA_ALIGN_OF + #define VMA_ALIGN_OF(type) (alignof(type)) +#endif + +#ifndef VMA_SYSTEM_ALIGNED_MALLOC + #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size)) +#endif + +#ifndef VMA_SYSTEM_ALIGNED_FREE + // VMA_SYSTEM_FREE is the old name, but might have been defined by the user + #if defined(VMA_SYSTEM_FREE) + #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr) + #else + #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr) + #endif +#endif + +#ifndef VMA_COUNT_BITS_SET + // Returns number of bits set to 1 in (v) + #define VMA_COUNT_BITS_SET(v) VmaCountBitsSet(v) +#endif + +#ifndef VMA_BITSCAN_LSB + // Scans integer for index of first nonzero value from the Least Significant Bit (LSB). If mask is 0 then returns UINT8_MAX + #define VMA_BITSCAN_LSB(mask) VmaBitScanLSB(mask) +#endif + +#ifndef VMA_BITSCAN_MSB + // Scans integer for index of first nonzero value from the Most Significant Bit (MSB). If mask is 0 then returns UINT8_MAX + #define VMA_BITSCAN_MSB(mask) VmaBitScanMSB(mask) +#endif + +#ifndef VMA_MIN + #define VMA_MIN(v1, v2) ((std::min)((v1), (v2))) +#endif + +#ifndef VMA_MAX + #define VMA_MAX(v1, v2) ((std::max)((v1), (v2))) +#endif + +#ifndef VMA_SORT + #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) +#endif + +#ifndef VMA_DEBUG_LOG_FORMAT + #define VMA_DEBUG_LOG_FORMAT(format, ...) + /* + #define VMA_DEBUG_LOG_FORMAT(format, ...) do { \ + printf((format), __VA_ARGS__); \ + printf("\n"); \ + } while(false) + */ +#endif + +#ifndef VMA_DEBUG_LOG + #define VMA_DEBUG_LOG(str) VMA_DEBUG_LOG_FORMAT("%s", (str)) +#endif + +#ifndef VMA_LEAK_LOG_FORMAT + #define VMA_LEAK_LOG_FORMAT(format, ...) VMA_DEBUG_LOG_FORMAT(format, __VA_ARGS__) +#endif + +#ifndef VMA_CLASS_NO_COPY + #define VMA_CLASS_NO_COPY(className) \ + private: \ + className(const className&) = delete; \ + className& operator=(const className&) = delete; +#endif +#ifndef VMA_CLASS_NO_COPY_NO_MOVE + #define VMA_CLASS_NO_COPY_NO_MOVE(className) \ + private: \ + className(const className&) = delete; \ + className(className&&) = delete; \ + className& operator=(const className&) = delete; \ + className& operator=(className&&) = delete; +#endif + +// Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString. +#if VMA_STATS_STRING_ENABLED +namespace { + inline void VmaUint32ToStr(char* VMA_NOT_NULL outStr, size_t strLen, uint32_t num) + { + snprintf(outStr, strLen, "%" PRIu32, num); + } + inline void VmaUint64ToStr(char* VMA_NOT_NULL outStr, size_t strLen, uint64_t num) + { + snprintf(outStr, strLen, "%" PRIu64, num); + } + inline void VmaPtrToStr(char* VMA_NOT_NULL outStr, size_t strLen, const void* ptr) + { + snprintf(outStr, strLen, "%p", ptr); + } +} // namespace +#endif + +#ifndef VMA_MUTEX + class VmaMutex + { + VMA_CLASS_NO_COPY_NO_MOVE(VmaMutex) + public: + VmaMutex() = default; + void Lock() { m_Mutex.lock(); } + void Unlock() { m_Mutex.unlock(); } + bool TryLock() { return m_Mutex.try_lock(); } + private: + std::mutex m_Mutex; + }; + #define VMA_MUTEX VmaMutex +#endif + +// Read-write mutex, where "read" is shared access, "write" is exclusive access. +#ifndef VMA_RW_MUTEX + #if VMA_USE_STL_SHARED_MUTEX + // Use std::shared_mutex from C++17. + #include + class VmaRWMutex + { + public: + void LockRead() { m_Mutex.lock_shared(); } + void UnlockRead() { m_Mutex.unlock_shared(); } + bool TryLockRead() { return m_Mutex.try_lock_shared(); } + void LockWrite() { m_Mutex.lock(); } + void UnlockWrite() { m_Mutex.unlock(); } + bool TryLockWrite() { return m_Mutex.try_lock(); } + private: + std::shared_mutex m_Mutex; + }; + #define VMA_RW_MUTEX VmaRWMutex + #elif defined(_WIN32) && defined(WINVER) && defined(SRWLOCK_INIT) && WINVER >= 0x0600 + // Use SRWLOCK from WinAPI. + // Minimum supported client = Windows Vista, server = Windows Server 2008. + class VmaRWMutex + { + public: + VmaRWMutex() { InitializeSRWLock(&m_Lock); } + void LockRead() { AcquireSRWLockShared(&m_Lock); } + void UnlockRead() { ReleaseSRWLockShared(&m_Lock); } + bool TryLockRead() { return TryAcquireSRWLockShared(&m_Lock) != FALSE; } + void LockWrite() { AcquireSRWLockExclusive(&m_Lock); } + void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); } + bool TryLockWrite() { return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; } + private: + SRWLOCK m_Lock; + }; + #define VMA_RW_MUTEX VmaRWMutex + #else + // Less efficient fallback: Use normal mutex. + class VmaRWMutex + { + public: + void LockRead() { m_Mutex.Lock(); } + void UnlockRead() { m_Mutex.Unlock(); } + bool TryLockRead() { return m_Mutex.TryLock(); } + void LockWrite() { m_Mutex.Lock(); } + void UnlockWrite() { m_Mutex.Unlock(); } + bool TryLockWrite() { return m_Mutex.TryLock(); } + private: + VMA_MUTEX m_Mutex; + }; + #define VMA_RW_MUTEX VmaRWMutex + #endif // #if VMA_USE_STL_SHARED_MUTEX +#endif // #ifndef VMA_RW_MUTEX + +/* +If providing your own implementation, you need to implement a subset of std::atomic. +*/ +#ifndef VMA_ATOMIC_UINT32 + #include + #define VMA_ATOMIC_UINT32 std::atomic +#endif + +#ifndef VMA_ATOMIC_UINT64 + #include + #define VMA_ATOMIC_UINT64 std::atomic +#endif + +#ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY + /** + Every allocation will have its own memory block. + Define to 1 for debugging purposes only. + */ + #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) +#endif + +#ifndef VMA_MIN_ALIGNMENT + /** + Minimum alignment of all allocations, in bytes. + Set to more than 1 for debugging purposes. Must be power of two. + */ + #ifdef VMA_DEBUG_ALIGNMENT // Old name + #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT + #else + #define VMA_MIN_ALIGNMENT (1) + #endif +#endif + +#ifndef VMA_DEBUG_MARGIN + /** + Minimum margin after every allocation, in bytes. + Set nonzero for debugging purposes only. + */ + #define VMA_DEBUG_MARGIN (0) +#endif + +#ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS + /** + Define this macro to 1 to automatically fill new allocations and destroyed + allocations with some bit pattern. + */ + #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) +#endif + +#ifndef VMA_DEBUG_DETECT_CORRUPTION + /** + Define this macro to 1 together with non-zero value of VMA_DEBUG_MARGIN to + enable writing magic value to the margin after every allocation and + validating it, so that memory corruptions (out-of-bounds writes) are detected. + */ + #define VMA_DEBUG_DETECT_CORRUPTION (0) +#endif + +#ifndef VMA_DEBUG_GLOBAL_MUTEX + /** + Set this to 1 for debugging purposes only, to enable single mutex protecting all + entry calls to the library. Can be useful for debugging multithreading issues. + */ + #define VMA_DEBUG_GLOBAL_MUTEX (0) +#endif + +#ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY + /** + Minimum value for VkPhysicalDeviceLimits::bufferImageGranularity. + Set to more than 1 for debugging purposes only. Must be power of two. + */ + #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) +#endif + +#ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT + /* + Set this to 1 to make VMA never exceed VkPhysicalDeviceLimits::maxMemoryAllocationCount + and return error instead of leaving up to Vulkan implementation what to do in such cases. + */ + #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (1) +#endif + +#ifndef VMA_DEBUG_DONT_EXCEED_HEAP_SIZE_WITH_ALLOCATION_SIZE + /* + Set this to 1 to make VMA never exceed VkPhysicalDeviceMemoryProperties::memoryHeaps[i].size + with a single allocation size VkMemoryAllocateInfo::allocationSize + and return error instead of leaving up to Vulkan implementation what to do in such cases. + It protects agaist validation error VUID-vkAllocateMemory-pAllocateInfo-01713. + On the other hand, allowing exceeding this size may result in a successful allocation despite the validation error. + */ + #define VMA_DEBUG_DONT_EXCEED_HEAP_SIZE_WITH_ALLOCATION_SIZE (1) +#endif + +#ifndef VMA_SMALL_HEAP_MAX_SIZE + /// Maximum size of a memory heap in Vulkan to consider it "small". + #define VMA_SMALL_HEAP_MAX_SIZE (1024ULL * 1024 * 1024) +#endif + +#ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE + /// Default size of a block allocated as single VkDeviceMemory from a "large" heap. + #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ULL * 1024 * 1024) +#endif + +/* +Mapping hysteresis is a logic that launches when vmaMapMemory/vmaUnmapMemory is called +or a persistently mapped allocation is created and destroyed several times in a row. +It keeps additional +1 mapping of a device memory block to prevent calling actual +vkMapMemory/vkUnmapMemory too many times, which may improve performance and help +tools like RenderDoc. +*/ +#ifndef VMA_MAPPING_HYSTERESIS_ENABLED + #define VMA_MAPPING_HYSTERESIS_ENABLED 1 +#endif + +#define VMA_VALIDATE(cond) do { if(!(cond)) { \ + VMA_ASSERT(0 && "Validation failed: " #cond); \ + return false; \ + } } while(false) + +/******************************************************************************* +END OF CONFIGURATION +*/ +#endif // _VMA_CONFIGURATION + +namespace +{ +constexpr uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC; +constexpr uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF; +// Decimal 2139416166, float NaN, little-endian binary 66 E6 84 7F. +constexpr uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666; + +// Copy of some Vulkan definitions so we don't need to check their existence just to handle few constants. +constexpr uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040; +constexpr uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080; +constexpr uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000; +constexpr uint32_t VK_IMAGE_CREATE_DISJOINT_BIT_COPY = 0x00000200; +constexpr int32_t VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY = 1000158000; +constexpr uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000U; +constexpr uint32_t VMA_ALLOCATION_TRY_COUNT = 32; +constexpr uint32_t VMA_VENDOR_ID_AMD = 4098; + +// This one is tricky. Vulkan specification defines this code as available since +// Vulkan 1.0, but doesn't actually define it in Vulkan SDK earlier than 1.2.131. +// See pull request #207. +#define VK_ERROR_UNKNOWN_COPY ((VkResult)-13) +} // namespace + + +#if VMA_STATS_STRING_ENABLED +// Correspond to values of enum VmaSuballocationType. +const char* const VMA_SUBALLOCATION_TYPE_NAMES[] = +{ + "FREE", + "UNKNOWN", + "BUFFER", + "IMAGE_UNKNOWN", + "IMAGE_LINEAR", + "IMAGE_OPTIMAL", +}; +#endif + +const VkAllocationCallbacks VmaEmptyAllocationCallbacks = + { VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL }; + + +#ifndef _VMA_ENUM_DECLARATIONS + +enum VmaSuballocationType +{ + VMA_SUBALLOCATION_TYPE_FREE = 0, + VMA_SUBALLOCATION_TYPE_UNKNOWN = 1, + VMA_SUBALLOCATION_TYPE_BUFFER = 2, + VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3, + VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4, + VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5, + VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +}; + +enum VMA_CACHE_OPERATION +{ + VMA_CACHE_FLUSH, + VMA_CACHE_INVALIDATE +}; + +enum class VmaAllocationRequestType +{ + Normal, + TLSF, + // Used by "Linear" algorithm. + UpperAddress, + EndOf1st, + EndOf2nd, +}; + +#endif // _VMA_ENUM_DECLARATIONS + +#ifndef _VMA_FORWARD_DECLARATIONS +// Opaque handle used by allocation algorithms to identify single allocation in any conforming way. +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VmaAllocHandle); + +struct VmaBufferImageUsage; + +struct VmaMutexLock; +struct VmaMutexLockRead; +struct VmaMutexLockWrite; + +template +struct AtomicTransactionalIncrement; + +template +struct VmaStlAllocator; + +template +class VmaVector; + +template +class VmaSmallVector; + +template +class VmaPoolAllocator; + +template +struct VmaListItem; + +template +class VmaRawList; + +template +class VmaList; + +template +class VmaIntrusiveLinkedList; + +#if VMA_STATS_STRING_ENABLED +class VmaStringBuilder; +class VmaJsonWriter; +#endif + +class VmaDeviceMemoryBlock; + +struct VmaDedicatedAllocationListItemTraits; +class VmaDedicatedAllocationList; + +struct VmaSuballocation; +struct VmaSuballocationOffsetLess; +struct VmaSuballocationOffsetGreater; +struct VmaSuballocationItemSizeLess; + +typedef VmaList> VmaSuballocationList; + +struct VmaAllocationRequest; + +class VmaBlockMetadata; +class VmaBlockMetadata_Linear; +class VmaBlockMetadata_TLSF; + +class VmaBlockVector; + +struct VmaPoolListItemTraits; + +struct VmaCurrentBudgetData; + +class VmaAllocationObjectAllocator; + +#endif // _VMA_FORWARD_DECLARATIONS + +#ifndef _VMA_BUFFER_IMAGE_USAGE + +// Finds structure with s->sType == sType in mainStruct->pNext chain. +// Returns pointer to it. If not found, returns null. +template +inline const FindT* VmaPnextChainFind(const MainT* mainStruct, VkStructureType sType) +{ + for(const VkBaseInStructure* s = (const VkBaseInStructure*)mainStruct->pNext; + s != VMA_NULL; s = s->pNext) + { + if(s->sType == sType) + { + return (const FindT*)s; + } + } + return VMA_NULL; +} + +// An abstraction over buffer or image `usage` flags, depending on available extensions. +struct VmaBufferImageUsage +{ +#if VMA_KHR_MAINTENANCE5 + typedef uint64_t BaseType; // VkFlags64 +#else + typedef uint32_t BaseType; // VkFlags32 +#endif + + static const VmaBufferImageUsage UNKNOWN; + + BaseType Value; + + VmaBufferImageUsage() { *this = UNKNOWN; } + explicit VmaBufferImageUsage(BaseType usage) : Value(usage) { } + VmaBufferImageUsage(const VkBufferCreateInfo &createInfo, bool useKhrMaintenance5); + explicit VmaBufferImageUsage(const VkImageCreateInfo &createInfo); + + bool operator==(const VmaBufferImageUsage& rhs) const { return Value == rhs.Value; } + bool operator!=(const VmaBufferImageUsage& rhs) const { return Value != rhs.Value; } + + bool Contains(BaseType flag) const { return (Value & flag) != 0; } + bool ContainsDeviceAccess() const + { + // This relies on values of VK_IMAGE_USAGE_TRANSFER* being the same as VK_BUFFER_IMAGE_TRANSFER*. + return (Value & ~BaseType(VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) != 0; + } +}; + +const VmaBufferImageUsage VmaBufferImageUsage::UNKNOWN = VmaBufferImageUsage(0); + +VmaBufferImageUsage::VmaBufferImageUsage(const VkBufferCreateInfo &createInfo, + bool useKhrMaintenance5) +{ +#if VMA_KHR_MAINTENANCE5 + if(useKhrMaintenance5) + { + // If VkBufferCreateInfo::pNext chain contains VkBufferUsageFlags2CreateInfoKHR, + // take usage from it and ignore VkBufferCreateInfo::usage, per specification + // of the VK_KHR_maintenance5 extension. + const VkBufferUsageFlags2CreateInfoKHR* const usageFlags2 = + VmaPnextChainFind(&createInfo, VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR); + if(usageFlags2 != VMA_NULL) + { + this->Value = usageFlags2->usage; + return; + } + } +#endif + + this->Value = (BaseType)createInfo.usage; +} + +VmaBufferImageUsage::VmaBufferImageUsage(const VkImageCreateInfo &createInfo) + : Value((BaseType)createInfo.usage) +{ + // Maybe in the future there will be VK_KHR_maintenanceN extension with structure + // VkImageUsageFlags2CreateInfoKHR, like the one for buffers... +} + +#endif // _VMA_BUFFER_IMAGE_USAGE + +#ifndef _VMA_FUNCTIONS + +namespace +{ + +/* +Returns number of bits set to 1 in (v). + +On specific platforms and compilers you can use intrinsics like: + +Visual Studio: + return __popcnt(v); +GCC, Clang: + return static_cast(__builtin_popcount(v)); + +Define macro VMA_COUNT_BITS_SET to provide your optimized implementation. +But you need to check in runtime whether user's CPU supports these, as some old processors don't. +*/ +inline uint32_t VmaCountBitsSet(uint32_t v) +{ +#if VMA_CPP20 + return std::popcount(v); +#else + uint32_t c = v - ((v >> 1) & 0x55555555); + c = ((c >> 2) & 0x33333333) + (c & 0x33333333); + c = ((c >> 4) + c) & 0x0F0F0F0F; + c = ((c >> 8) + c) & 0x00FF00FF; + c = ((c >> 16) + c) & 0x0000FFFF; + return c; +#endif +} + +inline uint8_t VmaBitScanLSB(uint64_t mask) +{ +#if defined(_MSC_VER) && defined(_WIN64) + unsigned long pos; + if (_BitScanForward64(&pos, mask)) + return static_cast(pos); + return UINT8_MAX; +#elif VMA_CPP20 + if(mask != 0) + return static_cast(std::countr_zero(mask)); + return UINT8_MAX; +#elif defined __GNUC__ || defined __clang__ + return static_cast(__builtin_ffsll(mask)) - 1U; +#else + uint8_t pos = 0; + uint64_t bit = 1; + do + { + if (mask & bit) + return pos; + bit <<= 1; + } while (pos++ < 63); + return UINT8_MAX; +#endif +} + +inline uint8_t VmaBitScanLSB(uint32_t mask) +{ +#ifdef _MSC_VER + unsigned long pos; + if (_BitScanForward(&pos, mask)) + return static_cast(pos); + return UINT8_MAX; +#elif VMA_CPP20 + if(mask != 0) + return static_cast(std::countr_zero(mask)); + return UINT8_MAX; +#elif defined __GNUC__ || defined __clang__ + return static_cast(__builtin_ffs(mask)) - 1U; +#else + uint8_t pos = 0; + uint32_t bit = 1; + do + { + if (mask & bit) + return pos; + bit <<= 1; + } while (pos++ < 31); + return UINT8_MAX; +#endif +} + +inline uint8_t VmaBitScanMSB(uint64_t mask) +{ +#if defined(_MSC_VER) && defined(_WIN64) + unsigned long pos; + if (_BitScanReverse64(&pos, mask)) + return static_cast(pos); +#elif VMA_CPP20 + if(mask != 0) + return 63 - static_cast(std::countl_zero(mask)); +#elif defined __GNUC__ || defined __clang__ + if (mask != 0) + return 63 - static_cast(__builtin_clzll(mask)); +#else + uint8_t pos = 63; + uint64_t bit = 1ULL << 63; + do + { + if (mask & bit) + return pos; + bit >>= 1; + } while (pos-- > 0); +#endif + return UINT8_MAX; +} + +inline uint8_t VmaBitScanMSB(uint32_t mask) +{ +#ifdef _MSC_VER + unsigned long pos; + if (_BitScanReverse(&pos, mask)) + return static_cast(pos); +#elif VMA_CPP20 + if(mask != 0) + return 31 - static_cast(std::countl_zero(mask)); +#elif defined __GNUC__ || defined __clang__ + if (mask != 0) + return 31 - static_cast(__builtin_clz(mask)); +#else + uint8_t pos = 31; + uint32_t bit = 1UL << 31; + do + { + if (mask & bit) + return pos; + bit >>= 1; + } while (pos-- > 0); +#endif + return UINT8_MAX; +} + +/* +Returns true if given number is a power of two. +T must be unsigned integer number or signed integer but always nonnegative. +For 0 returns true. +*/ +template +inline bool VmaIsPow2(T x) +{ + return (x & (x - 1)) == 0; +} + +// Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16. +// Use types like uint32_t, uint64_t as T. +template +inline T VmaAlignUp(T val, T alignment) +{ + VMA_HEAVY_ASSERT(VmaIsPow2(alignment)); + return (val + alignment - 1) & ~(alignment - 1); +} + +// Aligns given value down to nearest multiply of align value. For example: VmaAlignDown(11, 8) = 8. +// Use types like uint32_t, uint64_t as T. +template +inline T VmaAlignDown(T val, T alignment) +{ + VMA_HEAVY_ASSERT(VmaIsPow2(alignment)); + return val & ~(alignment - 1); +} + +// Division with mathematical rounding to nearest number. +template +inline T VmaRoundDiv(T x, T y) +{ + return (x + (y / (T)2)) / y; +} + +// Divide by 'y' and round up to nearest integer. +template +inline T VmaDivideRoundingUp(T x, T y) +{ + return (x + y - (T)1) / y; +} + +// Returns smallest power of 2 greater or equal to v. +inline uint32_t VmaNextPow2(uint32_t v) +{ + v--; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v++; + return v; +} + +inline uint64_t VmaNextPow2(uint64_t v) +{ + v--; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v |= v >> 32; + v++; + return v; +} + +// Returns largest power of 2 less or equal to v. +inline uint32_t VmaPrevPow2(uint32_t v) +{ + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v = v ^ (v >> 1); + return v; +} + +inline uint64_t VmaPrevPow2(uint64_t v) +{ + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v |= v >> 32; + v = v ^ (v >> 1); + return v; +} + +inline bool VmaStrIsEmpty(const char* pStr) +{ + return pStr == VMA_NULL || *pStr == '\0'; +} + +/* +Returns true if two memory blocks occupy overlapping pages. +ResourceA must be in less memory offset than ResourceB. + +Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)" +chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity". +*/ +inline bool VmaBlocksOnSamePage( + VkDeviceSize resourceAOffset, + VkDeviceSize resourceASize, + VkDeviceSize resourceBOffset, + VkDeviceSize pageSize) +{ + VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0); + VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1; + VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1); + VkDeviceSize resourceBStart = resourceBOffset; + VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1); + return resourceAEndPage == resourceBStartPage; +} + +/* +Returns true if given suballocation types could conflict and must respect +VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer +or linear image and another one is optimal image. If type is unknown, behave +conservatively. +*/ +inline bool VmaIsBufferImageGranularityConflict( + VmaSuballocationType suballocType1, + VmaSuballocationType suballocType2) +{ + if (suballocType1 > suballocType2) + { + std::swap(suballocType1, suballocType2); + } + + switch (suballocType1) + { + case VMA_SUBALLOCATION_TYPE_FREE: + return false; + case VMA_SUBALLOCATION_TYPE_UNKNOWN: + return true; + case VMA_SUBALLOCATION_TYPE_BUFFER: + return + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; + case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN: + return + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR || + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; + case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR: + return + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; + case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL: + return false; + default: + VMA_ASSERT(0); + return true; + } +} + +void VmaWriteMagicValue(void* pData, VkDeviceSize offset) +{ +#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION + uint32_t* pDst = (uint32_t*)((char*)pData + offset); + const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t); + for (size_t i = 0; i < numberCount; ++i, ++pDst) + { + *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE; + } +#else + // no-op +#endif +} + +bool VmaValidateMagicValue(const void* pData, VkDeviceSize offset) +{ +#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION + const uint32_t* pSrc = (const uint32_t*)((const char*)pData + offset); + const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t); + for (size_t i = 0; i < numberCount; ++i, ++pSrc) + { + if (*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE) + { + return false; + } + } +#endif + return true; +} + +/* +Fills structure with parameters of an example buffer to be used for transfers +during GPU memory defragmentation. +*/ +void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo) +{ + memset(&outBufCreateInfo, 0, sizeof(outBufCreateInfo)); + outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; + outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE; // Example size. +} + + +/* +Performs binary search and returns iterator to first element that is greater or +equal to (key), according to comparison (cmp). + +Cmp should return true if first argument is less than second argument. + +Returned value is the found element, if present in the collection or place where +new element with value (key) should be inserted. +*/ +template +IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT& key, const CmpLess& cmp) +{ + size_t down = 0; + size_t up = size_t(end - beg); + while (down < up) + { + const size_t mid = down + (up - down) / 2; // Overflow-safe midpoint calculation + if (cmp(*(beg + mid), key)) + { + down = mid + 1; + } + else + { + up = mid; + } + } + return beg + down; +} + +template +IterT VmaBinaryFindSorted(const IterT& beg, const IterT& end, const KeyT& value, const CmpLess& cmp) +{ + IterT it = VmaBinaryFindFirstNotLess( + beg, end, value, cmp); + if (it == end || + (!cmp(*it, value) && !cmp(value, *it))) + { + return it; + } + return end; +} + +/* +Returns true if all pointers in the array are not-null and unique. +Warning! O(n^2) complexity. Use only inside VMA_HEAVY_ASSERT. +T must be pointer type, e.g. VmaAllocation, VmaPool. +*/ +template +bool VmaValidatePointerArray(uint32_t count, const T* arr) +{ + for (uint32_t i = 0; i < count; ++i) + { + const T iPtr = arr[i]; + if (iPtr == VMA_NULL) + { + return false; + } + for (uint32_t j = i + 1; j < count; ++j) + { + if (iPtr == arr[j]) + { + return false; + } + } + } + return true; +} + +template +inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct) +{ + newStruct->pNext = mainStruct->pNext; + mainStruct->pNext = newStruct; +} + +// This is the main algorithm that guides the selection of a memory type best for an allocation - +// converts usage to required/preferred/not preferred flags. +bool FindMemoryPreferences( + bool isIntegratedGPU, + const VmaAllocationCreateInfo& allocCreateInfo, + VmaBufferImageUsage bufImgUsage, + VkMemoryPropertyFlags& outRequiredFlags, + VkMemoryPropertyFlags& outPreferredFlags, + VkMemoryPropertyFlags& outNotPreferredFlags) +{ + outRequiredFlags = allocCreateInfo.requiredFlags; + outPreferredFlags = allocCreateInfo.preferredFlags; + outNotPreferredFlags = 0; + + switch(allocCreateInfo.usage) + { + case VMA_MEMORY_USAGE_UNKNOWN: + break; + case VMA_MEMORY_USAGE_GPU_ONLY: + if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) + { + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + break; + case VMA_MEMORY_USAGE_CPU_ONLY: + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + break; + case VMA_MEMORY_USAGE_CPU_TO_GPU: + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) + { + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + break; + case VMA_MEMORY_USAGE_GPU_TO_CPU: + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + outPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + break; + case VMA_MEMORY_USAGE_CPU_COPY: + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + break; + case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: + outRequiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT; + break; + case VMA_MEMORY_USAGE_AUTO: + case VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE: + case VMA_MEMORY_USAGE_AUTO_PREFER_HOST: + { + if(bufImgUsage == VmaBufferImageUsage::UNKNOWN) + { + VMA_ASSERT(0 && "VMA_MEMORY_USAGE_AUTO* values can only be used with functions like vmaCreateBuffer, vmaCreateImage so that the details of the created resource are known." + " Maybe you use VkBufferUsageFlags2CreateInfoKHR but forgot to use VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT?" ); + return false; + } + + const bool deviceAccess = bufImgUsage.ContainsDeviceAccess(); + const bool hostAccessSequentialWrite = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT) != 0; + const bool hostAccessRandom = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT) != 0; + const bool hostAccessAllowTransferInstead = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT) != 0; + const bool preferDevice = allocCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; + const bool preferHost = allocCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_HOST; + + // CPU random access - e.g. a buffer written to or transferred from GPU to read back on CPU. + if(hostAccessRandom) + { + // Prefer cached. Cannot require it, because some platforms don't have it (e.g. Raspberry Pi - see #362)! + outPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + + if (!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost) + { + // Nice if it will end up in HOST_VISIBLE, but more importantly prefer DEVICE_LOCAL. + // Omitting HOST_VISIBLE here is intentional. + // In case there is DEVICE_LOCAL | HOST_VISIBLE | HOST_CACHED, it will pick that one. + // Otherwise, this will give same weight to DEVICE_LOCAL as HOST_VISIBLE | HOST_CACHED and select the former if occurs first on the list. + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + else + { + if(hostAccessAllowTransferInstead) + outPreferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + else + // Always CPU memory. + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + } + } + // CPU sequential write - may be CPU or host-visible GPU memory, uncached and write-combined. + else if(hostAccessSequentialWrite) + { + // Want uncached and write-combined. + outNotPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + + if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost) + { + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + } + else + { + if(hostAccessAllowTransferInstead) + outPreferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + else + // Always CPU memory. + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + + // Direct GPU access, CPU sequential write (e.g. a dynamic uniform buffer updated every frame) + if(deviceAccess) + { + // Could go to CPU memory or GPU BAR/unified. Up to the user to decide. If no preference, choose GPU memory. + if(preferHost) + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + else + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + // GPU no direct access, CPU sequential write (e.g. an upload buffer to be transferred to the GPU) + else + { + // Could go to CPU memory or GPU BAR/unified. Up to the user to decide. If no preference, choose CPU memory. + if(preferDevice) + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + else + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + } + } + // No CPU access + else + { + // if(deviceAccess) + // + // GPU access, no CPU access (e.g. a color attachment image) - prefer GPU memory, + // unless there is a clear preference from the user not to do so. + // + // else: + // + // No direct GPU access, no CPU access, just transfers. + // It may be staging copy intended for e.g. preserving image for next frame (then better GPU memory) or + // a "swap file" copy to free some GPU memory (then better CPU memory). + // Up to the user to decide. If no preferece, assume the former and choose GPU memory. + + if(preferHost) + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + else + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + break; + } + default: + VMA_ASSERT(0); + } + + // Avoid DEVICE_COHERENT unless explicitly requested. + if(((allocCreateInfo.requiredFlags | allocCreateInfo.preferredFlags) & + (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0) + { + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY; + } + + return true; +} + +//////////////////////////////////////////////////////////////////////////////// +// Memory allocation + +inline void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment) +{ + void* result = VMA_NULL; + if ((pAllocationCallbacks != VMA_NULL) && + (pAllocationCallbacks->pfnAllocation != VMA_NULL)) + { + result = (*pAllocationCallbacks->pfnAllocation)( + pAllocationCallbacks->pUserData, + size, + alignment, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + } + else + { + result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment); + } + VMA_ASSERT(result != VMA_NULL && "CPU memory allocation failed."); + return result; +} + +inline void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr) +{ + if ((pAllocationCallbacks != VMA_NULL) && + (pAllocationCallbacks->pfnFree != VMA_NULL)) + { + (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr); + } + else + { + VMA_SYSTEM_ALIGNED_FREE(ptr); + } +} + +template +T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks) +{ + return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T)); +} + +template +T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count) +{ + return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T)); +} + +#define vma_new(allocator, type) new(VmaAllocate(allocator))(type) + +#define vma_new_array(allocator, type, count) new(VmaAllocateArray((allocator), (count)))(type) + +template +void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr) +{ + ptr->~T(); + VmaFree(pAllocationCallbacks, ptr); +} + +template +void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count) +{ + if (ptr != VMA_NULL) + { + for (size_t i = count; i--; ) + { + ptr[i].~T(); + } + VmaFree(pAllocationCallbacks, ptr); + } +} + +char* VmaCreateStringCopy(const VkAllocationCallbacks* allocs, const char* srcStr) +{ + if (srcStr != VMA_NULL) + { + const size_t len = strlen(srcStr); + char* const result = vma_new_array(allocs, char, len + 1); + memcpy(result, srcStr, len + 1); + return result; + } + return VMA_NULL; +} + +#if VMA_STATS_STRING_ENABLED +char* VmaCreateStringCopy(const VkAllocationCallbacks* allocs, const char* srcStr, size_t strLen) +{ + if (srcStr != VMA_NULL) + { + char* const result = vma_new_array(allocs, char, strLen + 1); + memcpy(result, srcStr, strLen); + result[strLen] = '\0'; + return result; + } + return VMA_NULL; +} +#endif // VMA_STATS_STRING_ENABLED + +void VmaFreeString(const VkAllocationCallbacks* allocs, char* str) +{ + if (str != VMA_NULL) + { + const size_t len = strlen(str); + vma_delete_array(allocs, str, len + 1); + } +} + +template +size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value) +{ + const size_t indexToInsert = VmaBinaryFindFirstNotLess( + vector.data(), + vector.data() + vector.size(), + value, + CmpLess()) - vector.data(); + VmaVectorInsert(vector, indexToInsert, value); + return indexToInsert; +} + +template +bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value) +{ + CmpLess comparator; + typename VectorT::iterator it = VmaBinaryFindFirstNotLess( + vector.begin(), + vector.end(), + value, + comparator); + if ((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it)) + { + size_t indexToRemove = it - vector.begin(); + VmaVectorRemove(vector, indexToRemove); + return true; + } + return false; +} + +} // namespace + +#endif // _VMA_FUNCTIONS + +#ifndef _VMA_STATISTICS_FUNCTIONS + +namespace +{ + +void VmaClearStatistics(VmaStatistics& outStats) +{ + outStats.blockCount = 0; + outStats.allocationCount = 0; + outStats.blockBytes = 0; + outStats.allocationBytes = 0; +} + +void VmaAddStatistics(VmaStatistics& inoutStats, const VmaStatistics& src) +{ + inoutStats.blockCount += src.blockCount; + inoutStats.allocationCount += src.allocationCount; + inoutStats.blockBytes += src.blockBytes; + inoutStats.allocationBytes += src.allocationBytes; +} + +void VmaClearDetailedStatistics(VmaDetailedStatistics& outStats) +{ + VmaClearStatistics(outStats.statistics); + outStats.unusedRangeCount = 0; + outStats.allocationSizeMin = VK_WHOLE_SIZE; + outStats.allocationSizeMax = 0; + outStats.unusedRangeSizeMin = VK_WHOLE_SIZE; + outStats.unusedRangeSizeMax = 0; +} + +void VmaAddDetailedStatisticsAllocation(VmaDetailedStatistics& inoutStats, VkDeviceSize size) +{ + inoutStats.statistics.allocationCount++; + inoutStats.statistics.allocationBytes += size; + inoutStats.allocationSizeMin = VMA_MIN(inoutStats.allocationSizeMin, size); + inoutStats.allocationSizeMax = VMA_MAX(inoutStats.allocationSizeMax, size); +} + +void VmaAddDetailedStatisticsUnusedRange(VmaDetailedStatistics& inoutStats, VkDeviceSize size) +{ + inoutStats.unusedRangeCount++; + inoutStats.unusedRangeSizeMin = VMA_MIN(inoutStats.unusedRangeSizeMin, size); + inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, size); +} + +void VmaAddDetailedStatistics(VmaDetailedStatistics& inoutStats, const VmaDetailedStatistics& src) +{ + VmaAddStatistics(inoutStats.statistics, src.statistics); + inoutStats.unusedRangeCount += src.unusedRangeCount; + inoutStats.allocationSizeMin = VMA_MIN(inoutStats.allocationSizeMin, src.allocationSizeMin); + inoutStats.allocationSizeMax = VMA_MAX(inoutStats.allocationSizeMax, src.allocationSizeMax); + inoutStats.unusedRangeSizeMin = VMA_MIN(inoutStats.unusedRangeSizeMin, src.unusedRangeSizeMin); + inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, src.unusedRangeSizeMax); +} + +} // namespace + +#endif // _VMA_STATISTICS_FUNCTIONS + +#ifndef _VMA_MUTEX_LOCK +// Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope). +struct VmaMutexLock +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaMutexLock) +public: + explicit VmaMutexLock(VMA_MUTEX& mutex, bool useMutex = true) : + m_pMutex(useMutex ? &mutex : VMA_NULL) + { + if (m_pMutex) { m_pMutex->Lock(); } + } + ~VmaMutexLock() { if (m_pMutex) { m_pMutex->Unlock(); } } + +private: + VMA_MUTEX* m_pMutex; +}; + +// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for reading. +struct VmaMutexLockRead +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaMutexLockRead) +public: + VmaMutexLockRead(VMA_RW_MUTEX& mutex, bool useMutex) : + m_pMutex(useMutex ? &mutex : VMA_NULL) + { + if (m_pMutex) { m_pMutex->LockRead(); } + } + ~VmaMutexLockRead() { if (m_pMutex) { m_pMutex->UnlockRead(); } } + +private: + VMA_RW_MUTEX* m_pMutex; +}; + +// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for writing. +struct VmaMutexLockWrite +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaMutexLockWrite) +public: + VmaMutexLockWrite(VMA_RW_MUTEX& mutex, bool useMutex) + : m_pMutex(useMutex ? &mutex : VMA_NULL) + { + if (m_pMutex) { m_pMutex->LockWrite(); } + } + ~VmaMutexLockWrite() { if (m_pMutex) { m_pMutex->UnlockWrite(); } } + +private: + VMA_RW_MUTEX* m_pMutex; +}; + +#if VMA_DEBUG_GLOBAL_MUTEX + static VMA_MUTEX gDebugGlobalMutex; + #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); +#else + #define VMA_DEBUG_GLOBAL_MUTEX_LOCK +#endif +#endif // _VMA_MUTEX_LOCK + +#ifndef _VMA_ATOMIC_TRANSACTIONAL_INCREMENT +// An object that increments given atomic but decrements it back in the destructor unless Commit() is called. +template +struct AtomicTransactionalIncrement +{ +public: + using T = decltype(AtomicT().load()); + + ~AtomicTransactionalIncrement() + { + if(m_Atomic) + --(*m_Atomic); + } + + void Commit() { m_Atomic = VMA_NULL; } + T Increment(AtomicT* atomic) + { + m_Atomic = atomic; + return m_Atomic->fetch_add(1); + } + +private: + AtomicT* m_Atomic = VMA_NULL; +}; +#endif // _VMA_ATOMIC_TRANSACTIONAL_INCREMENT + +#ifndef _VMA_STL_ALLOCATOR +// STL-compatible allocator. +template +struct VmaStlAllocator +{ + const VkAllocationCallbacks* const m_pCallbacks; + typedef T value_type; + + explicit VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) {} + template + explicit VmaStlAllocator(const VmaStlAllocator& src) : m_pCallbacks(src.m_pCallbacks) {} + VmaStlAllocator(const VmaStlAllocator&) = default; + VmaStlAllocator& operator=(const VmaStlAllocator&) = delete; + + T* allocate(size_t n); + void deallocate(T* p, size_t n); + + template + bool operator==(const VmaStlAllocator& rhs) const + { + return m_pCallbacks == rhs.m_pCallbacks; + } + template + bool operator!=(const VmaStlAllocator& rhs) const + { + return m_pCallbacks != rhs.m_pCallbacks; + } +}; + +template +T* VmaStlAllocator::allocate(size_t n) { return VmaAllocateArray(m_pCallbacks, n); } + +template +void VmaStlAllocator::deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); } +#endif // _VMA_STL_ALLOCATOR + +#ifndef _VMA_VECTOR +/* Class with interface compatible with subset of std::vector. +T must be POD because constructors and destructors are not called and memcpy is +used for these objects. */ +template +class VmaVector +{ +public: + typedef T value_type; + typedef T* iterator; + typedef const T* const_iterator; + + explicit VmaVector(const AllocatorT& allocator); + VmaVector(size_t count, const AllocatorT& allocator); + // This version of the constructor is here for compatibility with pre-C++14 std::vector. + // value is unused. + VmaVector(size_t count, const T& value, const AllocatorT& allocator) : VmaVector(count, allocator) {} + VmaVector(const VmaVector& src); + VmaVector& operator=(const VmaVector& rhs); + ~VmaVector(); + + bool empty() const { return m_Count == 0; } + size_t size() const { return m_Count; } + T* data() { return m_pArray; } + T& front() { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[0]; } + T& back() { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[m_Count - 1]; } + const T* data() const { return m_pArray; } + const T& front() const { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[0]; } + const T& back() const { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[m_Count - 1]; } + + iterator begin() { return m_pArray; } + iterator end() { return m_pArray + m_Count; } + const_iterator cbegin() const { return m_pArray; } + const_iterator cend() const { return m_pArray + m_Count; } + const_iterator begin() const { return cbegin(); } + const_iterator end() const { return cend(); } + + void pop_front() { VMA_HEAVY_ASSERT(m_Count > 0); remove(0); } + void pop_back() { VMA_HEAVY_ASSERT(m_Count > 0); resize(size() - 1); } + void push_front(const T& src) { insert(0, src); } + + void push_back(const T& src); + void reserve(size_t newCapacity, bool freeMemory = false); + void resize(size_t newCount); + void clear() { resize(0); } + void shrink_to_fit(); + void insert(size_t index, const T& src); + void remove(size_t index); + + T& operator[](size_t index) { VMA_HEAVY_ASSERT(index < m_Count); return m_pArray[index]; } + const T& operator[](size_t index) const { VMA_HEAVY_ASSERT(index < m_Count); return m_pArray[index]; } + +private: + AllocatorT m_Allocator; + T* m_pArray; + size_t m_Count; + size_t m_Capacity; +}; + +#ifndef _VMA_VECTOR_FUNCTIONS +template +VmaVector::~VmaVector() { VmaFree(m_Allocator.m_pCallbacks, m_pArray); } + +template +VmaVector::VmaVector(const AllocatorT& allocator) + : m_Allocator(allocator), + m_pArray(VMA_NULL), + m_Count(0), + m_Capacity(0) {} + +template +VmaVector::VmaVector(size_t count, const AllocatorT& allocator) + : m_Allocator(allocator), + m_pArray(count ? (T*)VmaAllocateArray(allocator.m_pCallbacks, count) : VMA_NULL), + m_Count(count), + m_Capacity(count) {} + +template +VmaVector::VmaVector(const VmaVector& src) + : m_Allocator(src.m_Allocator), + m_pArray(src.m_Count ? (T*)VmaAllocateArray(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL), + m_Count(src.m_Count), + m_Capacity(src.m_Count) +{ + if (m_Count != 0) + { + memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T)); + } +} + +template +VmaVector& VmaVector::operator=(const VmaVector& rhs) +{ + if (&rhs != this) + { + resize(rhs.m_Count); + if (m_Count != 0) + { + memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T)); + } + } + return *this; +} + +template +void VmaVector::push_back(const T& src) +{ + const size_t newIndex = size(); + resize(newIndex + 1); + m_pArray[newIndex] = src; +} + +template +void VmaVector::reserve(size_t newCapacity, bool freeMemory) +{ + newCapacity = VMA_MAX(newCapacity, m_Count); + + if ((newCapacity < m_Capacity) && !freeMemory) + { + newCapacity = m_Capacity; + } + + if (newCapacity != m_Capacity) + { + T* const newArray = newCapacity ? VmaAllocateArray(m_Allocator, newCapacity) : VMA_NULL; + if (m_Count != 0) + { + memcpy(newArray, m_pArray, m_Count * sizeof(T)); + } + VmaFree(m_Allocator.m_pCallbacks, m_pArray); + m_Capacity = newCapacity; + m_pArray = newArray; + } +} + +template +void VmaVector::resize(size_t newCount) +{ + size_t newCapacity = m_Capacity; + if (newCount > m_Capacity) + { + newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8)); + } + + if (newCapacity != m_Capacity) + { + VMA_HEAVY_ASSERT(newCapacity > 0); + T* const newArray = VmaAllocateArray(m_Allocator.m_pCallbacks, newCapacity); + const size_t elementsToCopy = VMA_MIN(m_Count, newCount); + if (elementsToCopy != 0) + { + memcpy(newArray, m_pArray, elementsToCopy * sizeof(T)); + } + VmaFree(m_Allocator.m_pCallbacks, m_pArray); + m_Capacity = newCapacity; + m_pArray = newArray; + } + + m_Count = newCount; +} + +template +void VmaVector::shrink_to_fit() +{ + if (m_Capacity > m_Count) + { + T* newArray = VMA_NULL; + if (m_Count > 0) + { + newArray = VmaAllocateArray(m_Allocator.m_pCallbacks, m_Count); + memcpy(newArray, m_pArray, m_Count * sizeof(T)); + } + VmaFree(m_Allocator.m_pCallbacks, m_pArray); + m_Capacity = m_Count; + m_pArray = newArray; + } +} + +template +void VmaVector::insert(size_t index, const T& src) +{ + VMA_HEAVY_ASSERT(index <= m_Count); + const size_t oldCount = size(); + resize(oldCount + 1); + if (index < oldCount) + { + memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T)); + } + m_pArray[index] = src; +} + +template +void VmaVector::remove(size_t index) +{ + VMA_HEAVY_ASSERT(index < m_Count); + const size_t oldCount = size(); + if (index < oldCount - 1) + { + memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T)); + } + resize(oldCount - 1); +} +#endif // _VMA_VECTOR_FUNCTIONS + +namespace +{ + +template +void VmaVectorInsert(VmaVector& vec, size_t index, const T& item) +{ + vec.insert(index, item); +} + +template +void VmaVectorRemove(VmaVector& vec, size_t index) +{ + vec.remove(index); +} + +} // namespace + +#endif // _VMA_VECTOR + +#ifndef _VMA_SMALL_VECTOR +/* +This is a vector (a variable-sized array), optimized for the case when the array is small. + +It contains some number of elements in-place, which allows it to avoid heap allocation +when the actual number of elements is below that threshold. This allows normal "small" +cases to be fast without losing generality for large inputs. +*/ +template +class VmaSmallVector +{ +public: + typedef T value_type; + typedef T* iterator; + + explicit VmaSmallVector(const AllocatorT& allocator); + VmaSmallVector(size_t count, const AllocatorT& allocator); + template + explicit VmaSmallVector(const VmaSmallVector&) = delete; + template + VmaSmallVector& operator=(const VmaSmallVector&) = delete; + ~VmaSmallVector() = default; + + bool empty() const { return m_Count == 0; } + size_t size() const { return m_Count; } + T* data() { return m_Count > N ? m_DynamicArray.data() : m_StaticArray; } + T& front() { VMA_HEAVY_ASSERT(m_Count > 0); return data()[0]; } + T& back() { VMA_HEAVY_ASSERT(m_Count > 0); return data()[m_Count - 1]; } + const T* data() const { return m_Count > N ? m_DynamicArray.data() : m_StaticArray; } + const T& front() const { VMA_HEAVY_ASSERT(m_Count > 0); return data()[0]; } + const T& back() const { VMA_HEAVY_ASSERT(m_Count > 0); return data()[m_Count - 1]; } + + iterator begin() { return data(); } + iterator end() { return data() + m_Count; } + + void pop_front() { VMA_HEAVY_ASSERT(m_Count > 0); remove(0); } + void pop_back() { VMA_HEAVY_ASSERT(m_Count > 0); resize(size() - 1); } + void push_front(const T& src) { insert(0, src); } + + void push_back(const T& src); + void resize(size_t newCount, bool freeMemory = false); + void clear(bool freeMemory = false); + void insert(size_t index, const T& src); + void remove(size_t index); + + T& operator[](size_t index) { VMA_HEAVY_ASSERT(index < m_Count); return data()[index]; } + const T& operator[](size_t index) const { VMA_HEAVY_ASSERT(index < m_Count); return data()[index]; } + +private: + size_t m_Count; + T m_StaticArray[N]; // Used when m_Size <= N + VmaVector m_DynamicArray; // Used when m_Size > N +}; + +#ifndef _VMA_SMALL_VECTOR_FUNCTIONS +template +VmaSmallVector::VmaSmallVector(const AllocatorT& allocator) + : m_Count(0), + m_DynamicArray(allocator) {} + +template +VmaSmallVector::VmaSmallVector(size_t count, const AllocatorT& allocator) + : m_Count(count), + m_DynamicArray(count > N ? count : 0, allocator) {} + +template +void VmaSmallVector::push_back(const T& src) +{ + const size_t newIndex = size(); + resize(newIndex + 1); + data()[newIndex] = src; +} + +template +void VmaSmallVector::resize(size_t newCount, bool freeMemory) +{ + if (newCount > N && m_Count > N) + { + // Any direction, staying in m_DynamicArray + m_DynamicArray.resize(newCount); + if (freeMemory) + { + m_DynamicArray.shrink_to_fit(); + } + } + else if (newCount > N && m_Count <= N) + { + // Growing, moving from m_StaticArray to m_DynamicArray + m_DynamicArray.resize(newCount); + if (m_Count > 0) + { + memcpy(m_DynamicArray.data(), m_StaticArray, m_Count * sizeof(T)); + } + } + else if (newCount <= N && m_Count > N) + { + // Shrinking, moving from m_DynamicArray to m_StaticArray + if (newCount > 0) + { + memcpy(m_StaticArray, m_DynamicArray.data(), newCount * sizeof(T)); + } + m_DynamicArray.resize(0); + if (freeMemory) + { + m_DynamicArray.shrink_to_fit(); + } + } + else + { + // Any direction, staying in m_StaticArray - nothing to do here + } + m_Count = newCount; +} + +template +void VmaSmallVector::clear(bool freeMemory) +{ + m_DynamicArray.clear(); + if (freeMemory) + { + m_DynamicArray.shrink_to_fit(); + } + m_Count = 0; +} + +template +void VmaSmallVector::insert(size_t index, const T& src) +{ + VMA_HEAVY_ASSERT(index <= m_Count); + const size_t oldCount = size(); + resize(oldCount + 1); + T* const dataPtr = data(); + if (index < oldCount) + { + // I know, this could be more optimal for case where memmove can be memcpy directly from m_StaticArray to m_DynamicArray. + memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) * sizeof(T)); + } + dataPtr[index] = src; +} + +template +void VmaSmallVector::remove(size_t index) +{ + VMA_HEAVY_ASSERT(index < m_Count); + const size_t oldCount = size(); + if (index < oldCount - 1) + { + // I know, this could be more optimal for case where memmove can be memcpy directly from m_DynamicArray to m_StaticArray. + T* const dataPtr = data(); + memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) * sizeof(T)); + } + resize(oldCount - 1); +} +#endif // _VMA_SMALL_VECTOR_FUNCTIONS +#endif // _VMA_SMALL_VECTOR + +#ifndef _VMA_POOL_ALLOCATOR +/* +Allocator for objects of type T using a list of arrays (pools) to speed up +allocation. Number of elements that can be allocated is not bounded because +allocator can create multiple blocks. +*/ +template +class VmaPoolAllocator +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaPoolAllocator) +public: + VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity); + ~VmaPoolAllocator(); + template T* Alloc(Types&&... args); + void Free(T* ptr); + +private: + union Item + { + uint32_t NextFreeIndex; + alignas(T) char Value[sizeof(T)]; + }; + struct ItemBlock + { + Item* pItems; + uint32_t Capacity; + uint32_t FirstFreeIndex; + }; + + const VkAllocationCallbacks* m_pAllocationCallbacks; + const uint32_t m_FirstBlockCapacity; + VmaVector> m_ItemBlocks; + + ItemBlock& CreateNewBlock(); +}; + +#ifndef _VMA_POOL_ALLOCATOR_FUNCTIONS +template +VmaPoolAllocator::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) + : m_pAllocationCallbacks(pAllocationCallbacks), + m_FirstBlockCapacity(firstBlockCapacity), + m_ItemBlocks(VmaStlAllocator(pAllocationCallbacks)) +{ + VMA_ASSERT(m_FirstBlockCapacity > 1); +} + +template +VmaPoolAllocator::~VmaPoolAllocator() +{ + for (size_t i = m_ItemBlocks.size(); i--;) + vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity); + m_ItemBlocks.clear(); +} + +template +template T* VmaPoolAllocator::Alloc(Types&&... args) +{ + for (size_t i = m_ItemBlocks.size(); i--; ) + { + ItemBlock& block = m_ItemBlocks[i]; + // This block has some free items: Use first one. + if (block.FirstFreeIndex != UINT32_MAX) + { + Item* const pItem = &block.pItems[block.FirstFreeIndex]; + block.FirstFreeIndex = pItem->NextFreeIndex; + T* result = (T*)&pItem->Value; + new(result)T(std::forward(args)...); // Explicit constructor call. + return result; + } + } + + // No block has free item: Create new one and use it. + ItemBlock& newBlock = CreateNewBlock(); + Item* const pItem = &newBlock.pItems[0]; + newBlock.FirstFreeIndex = pItem->NextFreeIndex; + T* result = (T*)&pItem->Value; + new(result) T(std::forward(args)...); // Explicit constructor call. + return result; +} + +template +void VmaPoolAllocator::Free(T* ptr) +{ + // Search all memory blocks to find ptr. + for (size_t i = m_ItemBlocks.size(); i--; ) + { + ItemBlock& block = m_ItemBlocks[i]; + + // Casting to union. + Item* pItemPtr = VMA_NULL; + memcpy(&pItemPtr, &ptr, sizeof(pItemPtr)); + + // Check if pItemPtr is in address range of this block. + if ((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity)) + { + ptr->~T(); // Explicit destructor call. + const uint32_t index = static_cast(pItemPtr - block.pItems); + pItemPtr->NextFreeIndex = block.FirstFreeIndex; + block.FirstFreeIndex = index; + return; + } + } + VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool."); +} + +template +typename VmaPoolAllocator::ItemBlock& VmaPoolAllocator::CreateNewBlock() +{ + const uint32_t newBlockCapacity = m_ItemBlocks.empty() ? + m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2; + + const ItemBlock newBlock = + { + vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity), + newBlockCapacity, + 0 + }; + + m_ItemBlocks.push_back(newBlock); + + // Setup singly-linked list of all free items in this block. + for (uint32_t i = 0; i < newBlockCapacity - 1; ++i) + newBlock.pItems[i].NextFreeIndex = i + 1; + newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX; + return m_ItemBlocks.back(); +} +#endif // _VMA_POOL_ALLOCATOR_FUNCTIONS +#endif // _VMA_POOL_ALLOCATOR + +#ifndef _VMA_RAW_LIST +template +struct VmaListItem +{ + VmaListItem* pPrev; + VmaListItem* pNext; + T Value; +}; + +// Doubly linked list. +template +class VmaRawList +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaRawList) +public: + typedef VmaListItem ItemType; + + explicit VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks); + // Intentionally not calling Clear, because that would be unnecessary + // computations to return all items to m_ItemAllocator as free. + ~VmaRawList() = default; + + size_t GetCount() const { return m_Count; } + bool IsEmpty() const { return m_Count == 0; } + + ItemType* Front() { return m_pFront; } + ItemType* Back() { return m_pBack; } + const ItemType* Front() const { return m_pFront; } + const ItemType* Back() const { return m_pBack; } + + ItemType* PushFront(); + ItemType* PushBack(); + ItemType* PushFront(const T& value); + ItemType* PushBack(const T& value); + void PopFront(); + void PopBack(); + + // Item can be null - it means PushBack. + ItemType* InsertBefore(ItemType* pItem); + // Item can be null - it means PushFront. + ItemType* InsertAfter(ItemType* pItem); + ItemType* InsertBefore(ItemType* pItem, const T& value); + ItemType* InsertAfter(ItemType* pItem, const T& value); + + void Clear(); + void Remove(ItemType* pItem); + +private: + const VkAllocationCallbacks* const m_pAllocationCallbacks; + VmaPoolAllocator m_ItemAllocator; + ItemType* m_pFront; + ItemType* m_pBack; + size_t m_Count; +}; + +#ifndef _VMA_RAW_LIST_FUNCTIONS +template +VmaRawList::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) + : m_pAllocationCallbacks(pAllocationCallbacks), + m_ItemAllocator(pAllocationCallbacks, 128), + m_pFront(VMA_NULL), + m_pBack(VMA_NULL), + m_Count(0) {} + +template +VmaListItem* VmaRawList::PushFront() +{ + ItemType* const pNewItem = m_ItemAllocator.Alloc(); + pNewItem->pPrev = VMA_NULL; + if (IsEmpty()) + { + pNewItem->pNext = VMA_NULL; + m_pFront = pNewItem; + m_pBack = pNewItem; + m_Count = 1; + } + else + { + pNewItem->pNext = m_pFront; + m_pFront->pPrev = pNewItem; + m_pFront = pNewItem; + ++m_Count; + } + return pNewItem; +} + +template +VmaListItem* VmaRawList::PushBack() +{ + ItemType* const pNewItem = m_ItemAllocator.Alloc(); + pNewItem->pNext = VMA_NULL; + if(IsEmpty()) + { + pNewItem->pPrev = VMA_NULL; + m_pFront = pNewItem; + m_pBack = pNewItem; + m_Count = 1; + } + else + { + pNewItem->pPrev = m_pBack; + m_pBack->pNext = pNewItem; + m_pBack = pNewItem; + ++m_Count; + } + return pNewItem; +} + +template +VmaListItem* VmaRawList::PushFront(const T& value) +{ + ItemType* const pNewItem = PushFront(); + pNewItem->Value = value; + return pNewItem; +} + +template +VmaListItem* VmaRawList::PushBack(const T& value) +{ + ItemType* const pNewItem = PushBack(); + pNewItem->Value = value; + return pNewItem; +} + +template +void VmaRawList::PopFront() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const pFrontItem = m_pFront; + ItemType* const pNextItem = pFrontItem->pNext; + if (pNextItem != VMA_NULL) + { + pNextItem->pPrev = VMA_NULL; + } + m_pFront = pNextItem; + m_ItemAllocator.Free(pFrontItem); + --m_Count; +} + +template +void VmaRawList::PopBack() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const pBackItem = m_pBack; + ItemType* const pPrevItem = pBackItem->pPrev; + if(pPrevItem != VMA_NULL) + { + pPrevItem->pNext = VMA_NULL; + } + m_pBack = pPrevItem; + m_ItemAllocator.Free(pBackItem); + --m_Count; +} + +template +void VmaRawList::Clear() +{ + if (!IsEmpty()) + { + ItemType* pItem = m_pBack; + while (pItem != VMA_NULL) + { + ItemType* const pPrevItem = pItem->pPrev; + m_ItemAllocator.Free(pItem); + pItem = pPrevItem; + } + m_pFront = VMA_NULL; + m_pBack = VMA_NULL; + m_Count = 0; + } +} + +template +void VmaRawList::Remove(ItemType* pItem) +{ + VMA_HEAVY_ASSERT(pItem != VMA_NULL); + VMA_HEAVY_ASSERT(m_Count > 0); + + if(pItem->pPrev != VMA_NULL) + { + pItem->pPrev->pNext = pItem->pNext; + } + else + { + VMA_HEAVY_ASSERT(m_pFront == pItem); + m_pFront = pItem->pNext; + } + + if(pItem->pNext != VMA_NULL) + { + pItem->pNext->pPrev = pItem->pPrev; + } + else + { + VMA_HEAVY_ASSERT(m_pBack == pItem); + m_pBack = pItem->pPrev; + } + + m_ItemAllocator.Free(pItem); + --m_Count; +} + +template +VmaListItem* VmaRawList::InsertBefore(ItemType* pItem) +{ + if(pItem != VMA_NULL) + { + ItemType* const prevItem = pItem->pPrev; + ItemType* const newItem = m_ItemAllocator.Alloc(); + newItem->pPrev = prevItem; + newItem->pNext = pItem; + pItem->pPrev = newItem; + if(prevItem != VMA_NULL) + { + prevItem->pNext = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_pFront == pItem); + m_pFront = newItem; + } + ++m_Count; + return newItem; + } + return PushBack(); +} + +template +VmaListItem* VmaRawList::InsertAfter(ItemType* pItem) +{ + if(pItem != VMA_NULL) + { + ItemType* const nextItem = pItem->pNext; + ItemType* const newItem = m_ItemAllocator.Alloc(); + newItem->pNext = nextItem; + newItem->pPrev = pItem; + pItem->pNext = newItem; + if(nextItem != VMA_NULL) + { + nextItem->pPrev = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_pBack == pItem); + m_pBack = newItem; + } + ++m_Count; + return newItem; + } + return PushFront(); +} + +template +VmaListItem* VmaRawList::InsertBefore(ItemType* pItem, const T& value) +{ + ItemType* const newItem = InsertBefore(pItem); + newItem->Value = value; + return newItem; +} + +template +VmaListItem* VmaRawList::InsertAfter(ItemType* pItem, const T& value) +{ + ItemType* const newItem = InsertAfter(pItem); + newItem->Value = value; + return newItem; +} +#endif // _VMA_RAW_LIST_FUNCTIONS +#endif // _VMA_RAW_LIST + +#ifndef _VMA_LIST +template +class VmaList +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaList) +public: + class reverse_iterator; + class const_iterator; + class const_reverse_iterator; + + class iterator + { + friend class const_iterator; + friend class VmaList; + public: + iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + explicit iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + const iterator operator++(int) { iterator result = *this; ++*this; return result; } + const iterator operator--(int) { iterator result = *this; --*this; return result; } + + iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext; return *this; } + iterator& operator--(); + + private: + VmaRawList* m_pList; + VmaListItem* m_pItem; + + iterator(VmaRawList* pList, VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + class reverse_iterator + { + friend class const_reverse_iterator; + friend class VmaList; + public: + reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + explicit reverse_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + const reverse_iterator operator++(int) { reverse_iterator result = *this; ++* this; return result; } + const reverse_iterator operator--(int) { reverse_iterator result = *this; --* this; return result; } + + reverse_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev; return *this; } + reverse_iterator& operator--(); + + private: + VmaRawList* m_pList; + VmaListItem* m_pItem; + + reverse_iterator(VmaRawList* pList, VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + class const_iterator + { + friend class VmaList; + public: + const_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + explicit const_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + explicit const_iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + iterator drop_const() { return { const_cast*>(m_pList), const_cast*>(m_pItem) }; } + + const T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + const T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const const_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const const_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + const const_iterator operator++(int) { const_iterator result = *this; ++* this; return result; } + const const_iterator operator--(int) { const_iterator result = *this; --* this; return result; } + + const_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext; return *this; } + const_iterator& operator--(); + + private: + const VmaRawList* m_pList; + const VmaListItem* m_pItem; + + const_iterator(const VmaRawList* pList, const VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + class const_reverse_iterator + { + friend class VmaList; + public: + const_reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + explicit const_reverse_iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + explicit const_reverse_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + reverse_iterator drop_const() { return { const_cast*>(m_pList), const_cast*>(m_pItem) }; } + + const T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + const T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const const_reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const const_reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + const const_reverse_iterator operator++(int) { const_reverse_iterator result = *this; ++* this; return result; } + const const_reverse_iterator operator--(int) { const_reverse_iterator result = *this; --* this; return result; } + + const_reverse_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev; return *this; } + const_reverse_iterator& operator--(); + + private: + const VmaRawList* m_pList; + const VmaListItem* m_pItem; + + const_reverse_iterator(const VmaRawList* pList, const VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + + explicit VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) {} + + bool empty() const { return m_RawList.IsEmpty(); } + size_t size() const { return m_RawList.GetCount(); } + + iterator begin() { return iterator(&m_RawList, m_RawList.Front()); } + iterator end() { return iterator(&m_RawList, VMA_NULL); } + + const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); } + const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); } + + const_iterator begin() const { return cbegin(); } + const_iterator end() const { return cend(); } + + reverse_iterator rbegin() { return reverse_iterator(&m_RawList, m_RawList.Back()); } + reverse_iterator rend() { return reverse_iterator(&m_RawList, VMA_NULL); } + + const_reverse_iterator crbegin() const { return const_reverse_iterator(&m_RawList, m_RawList.Back()); } + const_reverse_iterator crend() const { return const_reverse_iterator(&m_RawList, VMA_NULL); } + + const_reverse_iterator rbegin() const { return crbegin(); } + const_reverse_iterator rend() const { return crend(); } + + void push_back(const T& value) { m_RawList.PushBack(value); } + iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); } + + void clear() { m_RawList.Clear(); } + void erase(iterator it) { m_RawList.Remove(it.m_pItem); } + +private: + VmaRawList m_RawList; +}; + +#ifndef _VMA_LIST_FUNCTIONS +template +typename VmaList::iterator& VmaList::iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pPrev; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Back(); + } + return *this; +} + +template +typename VmaList::reverse_iterator& VmaList::reverse_iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pNext; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Front(); + } + return *this; +} + +template +typename VmaList::const_iterator& VmaList::const_iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pPrev; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Back(); + } + return *this; +} + +template +typename VmaList::const_reverse_iterator& VmaList::const_reverse_iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pNext; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Back(); + } + return *this; +} +#endif // _VMA_LIST_FUNCTIONS +#endif // _VMA_LIST + +#ifndef _VMA_INTRUSIVE_LINKED_LIST +/* +Expected interface of ItemTypeTraits: +struct MyItemTypeTraits +{ + typedef MyItem ItemType; + static ItemType* GetPrev(const ItemType* item) { return item->myPrevPtr; } + static ItemType* GetNext(const ItemType* item) { return item->myNextPtr; } + static ItemType*& AccessPrev(ItemType* item) { return item->myPrevPtr; } + static ItemType*& AccessNext(ItemType* item) { return item->myNextPtr; } +}; +*/ +template +class VmaIntrusiveLinkedList +{ +public: + typedef typename ItemTypeTraits::ItemType ItemType; + static ItemType* GetPrev(const ItemType* item) { return ItemTypeTraits::GetPrev(item); } + static ItemType* GetNext(const ItemType* item) { return ItemTypeTraits::GetNext(item); } + + // Movable, not copyable. + VmaIntrusiveLinkedList() = default; + VmaIntrusiveLinkedList(VmaIntrusiveLinkedList && src) noexcept; + VmaIntrusiveLinkedList(const VmaIntrusiveLinkedList&) = delete; + VmaIntrusiveLinkedList& operator=(VmaIntrusiveLinkedList&& src) noexcept; + VmaIntrusiveLinkedList& operator=(const VmaIntrusiveLinkedList&) = delete; + ~VmaIntrusiveLinkedList() { VMA_HEAVY_ASSERT(IsEmpty()); } + + size_t GetCount() const { return m_Count; } + bool IsEmpty() const { return m_Count == 0; } + ItemType* Front() { return m_Front; } + ItemType* Back() { return m_Back; } + const ItemType* Front() const { return m_Front; } + const ItemType* Back() const { return m_Back; } + + void PushBack(ItemType* item); + void PushFront(ItemType* item); + ItemType* PopBack(); + ItemType* PopFront(); + + // MyItem can be null - it means PushBack. + void InsertBefore(ItemType* existingItem, ItemType* newItem); + // MyItem can be null - it means PushFront. + void InsertAfter(ItemType* existingItem, ItemType* newItem); + void Remove(ItemType* item); + void RemoveAll(); + +private: + ItemType* m_Front = VMA_NULL; + ItemType* m_Back = VMA_NULL; + size_t m_Count = 0; +}; + +#ifndef _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS +template +VmaIntrusiveLinkedList::VmaIntrusiveLinkedList(VmaIntrusiveLinkedList&& src) noexcept + : m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count) +{ + src.m_Front = src.m_Back = VMA_NULL; + src.m_Count = 0; +} + +template +VmaIntrusiveLinkedList& VmaIntrusiveLinkedList::operator=(VmaIntrusiveLinkedList&& src) noexcept +{ + if (&src != this) + { + VMA_HEAVY_ASSERT(IsEmpty()); + m_Front = src.m_Front; + m_Back = src.m_Back; + m_Count = src.m_Count; + src.m_Front = src.m_Back = VMA_NULL; + src.m_Count = 0; + } + return *this; +} + +template +void VmaIntrusiveLinkedList::PushBack(ItemType* item) +{ + VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL); + if (IsEmpty()) + { + m_Front = item; + m_Back = item; + m_Count = 1; + } + else + { + ItemTypeTraits::AccessPrev(item) = m_Back; + ItemTypeTraits::AccessNext(m_Back) = item; + m_Back = item; + ++m_Count; + } +} + +template +void VmaIntrusiveLinkedList::PushFront(ItemType* item) +{ + VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL); + if (IsEmpty()) + { + m_Front = item; + m_Back = item; + m_Count = 1; + } + else + { + ItemTypeTraits::AccessNext(item) = m_Front; + ItemTypeTraits::AccessPrev(m_Front) = item; + m_Front = item; + ++m_Count; + } +} + +template +typename VmaIntrusiveLinkedList::ItemType* VmaIntrusiveLinkedList::PopBack() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const backItem = m_Back; + ItemType* const prevItem = ItemTypeTraits::GetPrev(backItem); + if (prevItem != VMA_NULL) + { + ItemTypeTraits::AccessNext(prevItem) = VMA_NULL; + } + m_Back = prevItem; + --m_Count; + ItemTypeTraits::AccessPrev(backItem) = VMA_NULL; + ItemTypeTraits::AccessNext(backItem) = VMA_NULL; + return backItem; +} + +template +typename VmaIntrusiveLinkedList::ItemType* VmaIntrusiveLinkedList::PopFront() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const frontItem = m_Front; + ItemType* const nextItem = ItemTypeTraits::GetNext(frontItem); + if (nextItem != VMA_NULL) + { + ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL; + } + m_Front = nextItem; + --m_Count; + ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL; + ItemTypeTraits::AccessNext(frontItem) = VMA_NULL; + return frontItem; +} + +template +void VmaIntrusiveLinkedList::InsertBefore(ItemType* existingItem, ItemType* newItem) +{ + VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL); + if (existingItem != VMA_NULL) + { + ItemType* const prevItem = ItemTypeTraits::GetPrev(existingItem); + ItemTypeTraits::AccessPrev(newItem) = prevItem; + ItemTypeTraits::AccessNext(newItem) = existingItem; + ItemTypeTraits::AccessPrev(existingItem) = newItem; + if (prevItem != VMA_NULL) + { + ItemTypeTraits::AccessNext(prevItem) = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_Front == existingItem); + m_Front = newItem; + } + ++m_Count; + } + else + PushBack(newItem); +} + +template +void VmaIntrusiveLinkedList::InsertAfter(ItemType* existingItem, ItemType* newItem) +{ + VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL); + if (existingItem != VMA_NULL) + { + ItemType* const nextItem = ItemTypeTraits::GetNext(existingItem); + ItemTypeTraits::AccessNext(newItem) = nextItem; + ItemTypeTraits::AccessPrev(newItem) = existingItem; + ItemTypeTraits::AccessNext(existingItem) = newItem; + if (nextItem != VMA_NULL) + { + ItemTypeTraits::AccessPrev(nextItem) = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_Back == existingItem); + m_Back = newItem; + } + ++m_Count; + } + else + return PushFront(newItem); +} + +template +void VmaIntrusiveLinkedList::Remove(ItemType* item) +{ + VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0); + if (ItemTypeTraits::GetPrev(item) != VMA_NULL) + { + ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item); + } + else + { + VMA_HEAVY_ASSERT(m_Front == item); + m_Front = ItemTypeTraits::GetNext(item); + } + + if (ItemTypeTraits::GetNext(item) != VMA_NULL) + { + ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item); + } + else + { + VMA_HEAVY_ASSERT(m_Back == item); + m_Back = ItemTypeTraits::GetPrev(item); + } + ItemTypeTraits::AccessPrev(item) = VMA_NULL; + ItemTypeTraits::AccessNext(item) = VMA_NULL; + --m_Count; +} + +template +void VmaIntrusiveLinkedList::RemoveAll() +{ + if (!IsEmpty()) + { + ItemType* item = m_Back; + while (item != VMA_NULL) + { + ItemType* const prevItem = ItemTypeTraits::AccessPrev(item); + ItemTypeTraits::AccessPrev(item) = VMA_NULL; + ItemTypeTraits::AccessNext(item) = VMA_NULL; + item = prevItem; + } + m_Front = VMA_NULL; + m_Back = VMA_NULL; + m_Count = 0; + } +} +#endif // _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS +#endif // _VMA_INTRUSIVE_LINKED_LIST + +#if !defined(_VMA_STRING_BUILDER) && VMA_STATS_STRING_ENABLED +class VmaStringBuilder +{ +public: + explicit VmaStringBuilder(const VkAllocationCallbacks* allocationCallbacks) : m_Data(VmaStlAllocator(allocationCallbacks)) {} + ~VmaStringBuilder() = default; + + size_t GetLength() const { return m_Data.size(); } + // Returned string is not null-terminated! + const char* GetData() const { return m_Data.data(); } + void AddNewLine() { Add('\n'); } + void Add(char ch) { m_Data.push_back(ch); } + + void Add(const char* pStr); + void AddNumber(uint32_t num); + void AddNumber(uint64_t num); + void AddPointer(const void* ptr); + +private: + VmaVector> m_Data; +}; + +#ifndef _VMA_STRING_BUILDER_FUNCTIONS +void VmaStringBuilder::Add(const char* pStr) +{ + const size_t strLen = strlen(pStr); + if (strLen > 0) + { + const size_t oldCount = m_Data.size(); + m_Data.resize(oldCount + strLen); + memcpy(m_Data.data() + oldCount, pStr, strLen); + } +} + +void VmaStringBuilder::AddNumber(uint32_t num) +{ + char buf[11]; + buf[10] = '\0'; + char* p = &buf[10]; + do + { + *--p = '0' + (char)(num % 10); + num /= 10; + } while (num); + Add(p); +} + +void VmaStringBuilder::AddNumber(uint64_t num) +{ + char buf[21]; + buf[20] = '\0'; + char* p = &buf[20]; + do + { + *--p = '0' + (char)(num % 10); + num /= 10; + } while (num); + Add(p); +} + +void VmaStringBuilder::AddPointer(const void* ptr) +{ + char buf[21]; + VmaPtrToStr(buf, sizeof(buf), ptr); + Add(buf); +} +#endif //_VMA_STRING_BUILDER_FUNCTIONS +#endif // _VMA_STRING_BUILDER + +#if !defined(_VMA_JSON_WRITER) && VMA_STATS_STRING_ENABLED +/* +Allows to conveniently build a correct JSON document to be written to the +VmaStringBuilder passed to the constructor. +*/ +class VmaJsonWriter +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaJsonWriter) +public: + // sb - string builder to write the document to. Must remain alive for the whole lifetime of this object. + VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb); + ~VmaJsonWriter(); + + // Begins object by writing "{". + // Inside an object, you must call pairs of WriteString and a value, e.g.: + // j.BeginObject(true); j.WriteString("A"); j.WriteNumber(1); j.WriteString("B"); j.WriteNumber(2); j.EndObject(); + // Will write: { "A": 1, "B": 2 } + void BeginObject(bool singleLine = false); + // Ends object by writing "}". + void EndObject(); + + // Begins array by writing "[". + // Inside an array, you can write a sequence of any values. + void BeginArray(bool singleLine = false); + // Ends array by writing "[". + void EndArray(); + + // Writes a string value inside "". + // pStr can contain any ANSI characters, including '"', new line etc. - they will be properly escaped. + void WriteString(const char* pStr); + + // Begins writing a string value. + // Call BeginString, ContinueString, ContinueString, ..., EndString instead of + // WriteString to conveniently build the string content incrementally, made of + // parts including numbers. + void BeginString(const char* pStr = VMA_NULL); + // Posts next part of an open string. + void ContinueString(const char* pStr); + // Posts next part of an open string. The number is converted to decimal characters. + void ContinueString(uint32_t n); + void ContinueString(uint64_t n); + // Posts next part of an open string. Pointer value is converted to characters + // using "%p" formatting - shown as hexadecimal number, e.g.: 000000081276Ad00 + void ContinueString_Pointer(const void* ptr); + // Ends writing a string value by writing '"'. + void EndString(const char* pStr = VMA_NULL); + + // Writes a number value. + void WriteNumber(uint32_t n); + void WriteNumber(uint64_t n); + // Writes a boolean value - false or true. + void WriteBool(bool b); + // Writes a null value. + void WriteNull(); + +private: + enum COLLECTION_TYPE + { + COLLECTION_TYPE_OBJECT, + COLLECTION_TYPE_ARRAY, + }; + struct StackItem + { + COLLECTION_TYPE type; + uint32_t valueCount; + bool singleLineMode; + }; + + static const char* const INDENT; + + VmaStringBuilder& m_SB; + VmaVector< StackItem, VmaStlAllocator > m_Stack; + bool m_InsideString; + + void BeginValue(bool isString); + void WriteIndent(bool oneLess = false); +}; +const char* const VmaJsonWriter::INDENT = " "; + +#ifndef _VMA_JSON_WRITER_FUNCTIONS +VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) + : m_SB(sb), + m_Stack(VmaStlAllocator(pAllocationCallbacks)), + m_InsideString(false) {} + +VmaJsonWriter::~VmaJsonWriter() +{ + VMA_ASSERT(!m_InsideString); + VMA_ASSERT(m_Stack.empty()); +} + +void VmaJsonWriter::BeginObject(bool singleLine) +{ + VMA_ASSERT(!m_InsideString); + + BeginValue(false); + m_SB.Add('{'); + + StackItem item; + item.type = COLLECTION_TYPE_OBJECT; + item.valueCount = 0; + item.singleLineMode = singleLine; + m_Stack.push_back(item); +} + +void VmaJsonWriter::EndObject() +{ + VMA_ASSERT(!m_InsideString); + + WriteIndent(true); + m_SB.Add('}'); + + VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT); + m_Stack.pop_back(); +} + +void VmaJsonWriter::BeginArray(bool singleLine) +{ + VMA_ASSERT(!m_InsideString); + + BeginValue(false); + m_SB.Add('['); + + StackItem item; + item.type = COLLECTION_TYPE_ARRAY; + item.valueCount = 0; + item.singleLineMode = singleLine; + m_Stack.push_back(item); +} + +void VmaJsonWriter::EndArray() +{ + VMA_ASSERT(!m_InsideString); + + WriteIndent(true); + m_SB.Add(']'); + + VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY); + m_Stack.pop_back(); +} + +void VmaJsonWriter::WriteString(const char* pStr) +{ + BeginString(pStr); + EndString(); +} + +void VmaJsonWriter::BeginString(const char* pStr) +{ + VMA_ASSERT(!m_InsideString); + + BeginValue(true); + m_SB.Add('"'); + m_InsideString = true; + if (pStr != VMA_NULL && pStr[0] != '\0') + { + ContinueString(pStr); + } +} + +void VmaJsonWriter::ContinueString(const char* pStr) +{ + VMA_ASSERT(m_InsideString); + + const size_t strLen = strlen(pStr); + for (size_t i = 0; i < strLen; ++i) + { + char ch = pStr[i]; + if (ch == '\\') + { + m_SB.Add("\\\\"); + } + else if (ch == '"') + { + m_SB.Add("\\\""); + } + else if ((uint8_t)ch >= 32) + { + m_SB.Add(ch); + } + else switch (ch) + { + case '\b': + m_SB.Add("\\b"); + break; + case '\f': + m_SB.Add("\\f"); + break; + case '\n': + m_SB.Add("\\n"); + break; + case '\r': + m_SB.Add("\\r"); + break; + case '\t': + m_SB.Add("\\t"); + break; + default: + VMA_ASSERT(0 && "Character not currently supported."); + } + } +} + +void VmaJsonWriter::ContinueString(uint32_t n) +{ + VMA_ASSERT(m_InsideString); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::ContinueString(uint64_t n) +{ + VMA_ASSERT(m_InsideString); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::ContinueString_Pointer(const void* ptr) +{ + VMA_ASSERT(m_InsideString); + m_SB.AddPointer(ptr); +} + +void VmaJsonWriter::EndString(const char* pStr) +{ + VMA_ASSERT(m_InsideString); + if (pStr != VMA_NULL && pStr[0] != '\0') + { + ContinueString(pStr); + } + m_SB.Add('"'); + m_InsideString = false; +} + +void VmaJsonWriter::WriteNumber(uint32_t n) +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::WriteNumber(uint64_t n) +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::WriteBool(bool b) +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.Add(b ? "true" : "false"); +} + +void VmaJsonWriter::WriteNull() +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.Add("null"); +} + +void VmaJsonWriter::BeginValue(bool isString) +{ + if (!m_Stack.empty()) + { + StackItem& currItem = m_Stack.back(); + if (currItem.type == COLLECTION_TYPE_OBJECT && + currItem.valueCount % 2 == 0) + { + VMA_ASSERT(isString); + } + + if (currItem.type == COLLECTION_TYPE_OBJECT && + currItem.valueCount % 2 != 0) + { + m_SB.Add(": "); + } + else if (currItem.valueCount > 0) + { + m_SB.Add(", "); + WriteIndent(); + } + else + { + WriteIndent(); + } + ++currItem.valueCount; + } +} + +void VmaJsonWriter::WriteIndent(bool oneLess) +{ + if (!m_Stack.empty() && !m_Stack.back().singleLineMode) + { + m_SB.AddNewLine(); + + size_t count = m_Stack.size(); + if (count > 0 && oneLess) + { + --count; + } + for (size_t i = 0; i < count; ++i) + { + m_SB.Add(INDENT); + } + } +} +#endif // _VMA_JSON_WRITER_FUNCTIONS + +namespace +{ + +void VmaPrintDetailedStatistics(VmaJsonWriter& json, const VmaDetailedStatistics& stat) +{ + json.BeginObject(); + + json.WriteString("BlockCount"); + json.WriteNumber(stat.statistics.blockCount); + json.WriteString("BlockBytes"); + json.WriteNumber(stat.statistics.blockBytes); + json.WriteString("AllocationCount"); + json.WriteNumber(stat.statistics.allocationCount); + json.WriteString("AllocationBytes"); + json.WriteNumber(stat.statistics.allocationBytes); + json.WriteString("UnusedRangeCount"); + json.WriteNumber(stat.unusedRangeCount); + + if (stat.statistics.allocationCount > 1) + { + json.WriteString("AllocationSizeMin"); + json.WriteNumber(stat.allocationSizeMin); + json.WriteString("AllocationSizeMax"); + json.WriteNumber(stat.allocationSizeMax); + } + if (stat.unusedRangeCount > 1) + { + json.WriteString("UnusedRangeSizeMin"); + json.WriteNumber(stat.unusedRangeSizeMin); + json.WriteString("UnusedRangeSizeMax"); + json.WriteNumber(stat.unusedRangeSizeMax); + } + json.EndObject(); +} + +} // namespace + +#endif // _VMA_JSON_WRITER + +#ifndef _VMA_MAPPING_HYSTERESIS + +class VmaMappingHysteresis +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaMappingHysteresis) +public: + VmaMappingHysteresis() = default; + + uint32_t GetExtraMapping() const { return m_ExtraMapping; } + + // Call when Map was called. + // Returns true if switched to extra +1 mapping reference count. + bool PostMap() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 0) + { + ++m_MajorCounter; + if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING) + { + m_ExtraMapping = 1; + m_MajorCounter = 0; + m_MinorCounter = 0; + return true; + } + } + else // m_ExtraMapping == 1 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + return false; + } + + // Call when Unmap was called. + void PostUnmap() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 0) + ++m_MajorCounter; + else // m_ExtraMapping == 1 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + } + + // Call when allocation was made from the memory block. + void PostAlloc() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 1) + ++m_MajorCounter; + else // m_ExtraMapping == 0 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + } + + // Call when allocation was freed from the memory block. + // Returns true if switched to extra -1 mapping reference count. + bool PostFree() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 1) + { + ++m_MajorCounter; + if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING && + m_MajorCounter > m_MinorCounter + 1) + { + m_ExtraMapping = 0; + m_MajorCounter = 0; + m_MinorCounter = 0; + return true; + } + } + else // m_ExtraMapping == 0 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + return false; + } + +private: + static constexpr int32_t COUNTER_MIN_EXTRA_MAPPING = 7; + + uint32_t m_MinorCounter = 0; + uint32_t m_MajorCounter = 0; + uint32_t m_ExtraMapping = 0; // 0 or 1. + + void PostMinorCounter() + { + if(m_MinorCounter < m_MajorCounter) + { + ++m_MinorCounter; + } + else if(m_MajorCounter > 0) + { + --m_MajorCounter; + --m_MinorCounter; + } + } +}; + +#endif // _VMA_MAPPING_HYSTERESIS + +#if VMA_EXTERNAL_MEMORY_WIN32 +class VmaWin32Handle +{ +public: + VmaWin32Handle() noexcept : m_hHandle(VMA_NULL) { } + explicit VmaWin32Handle(HANDLE hHandle) noexcept + : m_hHandle(hHandle) + , m_IsNTHandle(IsNTHandle(hHandle)) + { + } + ~VmaWin32Handle() noexcept { if (m_hHandle != VMA_NULL && m_IsNTHandle) { ::CloseHandle(m_hHandle); } } + VMA_CLASS_NO_COPY_NO_MOVE(VmaWin32Handle) + +public: + // Strengthened + VkResult GetHandle(VkDevice device, VkDeviceMemory memory, PFN_vkGetMemoryWin32HandleKHR pvkGetMemoryWin32HandleKHR, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE hTargetProcess, bool useMutex, HANDLE* pHandle) noexcept + { + *pHandle = VMA_NULL; + // Try to get handle first. + VkResult res = VK_SUCCESS; + if (m_hHandle == VMA_NULL) + { + VmaMutexLockWrite lock(m_Mutex, useMutex); + if (m_hHandle == VMA_NULL) + { + res = Create(device, memory, pvkGetMemoryWin32HandleKHR, handleType, &m_hHandle); + if (res != VK_SUCCESS) { + m_hHandle = VMA_NULL; + return res; + } + m_IsNTHandle = IsNTHandle(m_hHandle); + } + } + if (res == VK_SUCCESS) { + // KMT handle is returned as is. + *pHandle = m_IsNTHandle ? Duplicate(hTargetProcess) : m_hHandle; + } + return res; + } + + operator bool() const noexcept { return m_hHandle != VMA_NULL; } +private: + // Not atomic + static VkResult Create(VkDevice device, VkDeviceMemory memory, PFN_vkGetMemoryWin32HandleKHR pvkGetMemoryWin32HandleKHR, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE* pHandle) noexcept + { + VkResult res = VK_ERROR_FEATURE_NOT_PRESENT; + if (pvkGetMemoryWin32HandleKHR != VMA_NULL) + { + VkMemoryGetWin32HandleInfoKHR handleInfo{ }; + handleInfo.sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR; + handleInfo.memory = memory; + handleInfo.handleType = handleType; + res = pvkGetMemoryWin32HandleKHR(device, &handleInfo, pHandle); + } + return res; + } + HANDLE Duplicate(HANDLE hTargetProcess = VMA_NULL) const noexcept + { + if (!m_hHandle) + return m_hHandle; + + HANDLE hCurrentProcess = ::GetCurrentProcess(); + HANDLE hDupHandle = VMA_NULL; + if (!::DuplicateHandle(hCurrentProcess, m_hHandle, hTargetProcess ? hTargetProcess : hCurrentProcess, &hDupHandle, 0, FALSE, DUPLICATE_SAME_ACCESS)) + { + VMA_ASSERT(0 && "Failed to duplicate handle."); + } + return hDupHandle; + } + static bool IsNTHandle(HANDLE hHandle) noexcept + { + DWORD flags = 0; + return (hHandle != VMA_NULL) ? (::GetHandleInformation(hHandle, &flags) != 0) : false; + } +private: + HANDLE m_hHandle; + VMA_RW_MUTEX m_Mutex; // Protects access m_Handle + bool m_IsNTHandle = false; // True if m_Handle is NT handle, false if it's a KMT handle. +}; +#else +class VmaWin32Handle +{ + // ABI compatibility + void* placeholder = VMA_NULL; + VMA_RW_MUTEX placeholder2; + bool placeholder3 = false; +}; +#endif // VMA_EXTERNAL_MEMORY_WIN32 + + +#ifndef _VMA_DEVICE_MEMORY_BLOCK +/* +Represents a single block of device memory (`VkDeviceMemory`) with all the +data about its regions (aka suballocations, #VmaAllocation), assigned and free. + +Thread-safety: +- Access to m_pMetadata must be externally synchronized. +- Map, Unmap, Bind* are synchronized internally. +*/ +class VmaDeviceMemoryBlock +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaDeviceMemoryBlock) +public: + VmaBlockMetadata* m_pMetadata; + + explicit VmaDeviceMemoryBlock(VmaAllocator hAllocator); + ~VmaDeviceMemoryBlock(); + + // Always call after construction. + void Init( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t newMemoryTypeIndex, + VkDeviceMemory newMemory, + VkDeviceSize newSize, + uint32_t id, + uint32_t algorithm, + VkDeviceSize bufferImageGranularity); + // Always call before destruction. + void Destroy(VmaAllocator allocator); + + VmaPool GetParentPool() const { return m_hParentPool; } + VkDeviceMemory GetDeviceMemory() const { return m_hMemory; } + uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } + uint32_t GetId() const { return m_Id; } + void* GetMappedData() const { return m_pMappedData; } + uint32_t GetMapRefCount() const { return m_MapCount; } + + // Call when allocation/free was made from m_pMetadata. + // Used for m_MappingHysteresis. + void PostAlloc(VmaAllocator hAllocator); + void PostFree(VmaAllocator hAllocator); + + // Validates all data structures inside this object. If not valid, returns false. + bool Validate() const; + VkResult CheckCorruption(VmaAllocator hAllocator); + + // ppData can be null. + VkResult Map(VmaAllocator hAllocator, uint32_t count, void** ppData); + void Unmap(VmaAllocator hAllocator, uint32_t count); + + VkResult WriteMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize); + VkResult ValidateMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize); + + VkResult BindBufferMemory( + VmaAllocator hAllocator, + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext); + VkResult BindImageMemory( + VmaAllocator hAllocator, + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext); +#if VMA_EXTERNAL_MEMORY_WIN32 + VkResult CreateWin32Handle( + const VmaAllocator hAllocator, + PFN_vkGetMemoryWin32HandleKHR pvkGetMemoryWin32HandleKHR, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE hTargetProcess, + HANDLE* pHandle)noexcept; +#endif // VMA_EXTERNAL_MEMORY_WIN32 +private: + VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool. + uint32_t m_MemoryTypeIndex; + uint32_t m_Id; + VkDeviceMemory m_hMemory; + + /* + Protects access to m_hMemory so it is not used by multiple threads simultaneously, e.g. vkMapMemory, vkBindBufferMemory. + Also protects m_MapCount, m_pMappedData. + Allocations, deallocations, any change in m_pMetadata is protected by parent's VmaBlockVector::m_Mutex. + */ + VMA_MUTEX m_MapAndBindMutex; + VmaMappingHysteresis m_MappingHysteresis; + uint32_t m_MapCount; + void* m_pMappedData; + + VmaWin32Handle m_Handle; +}; +#endif // _VMA_DEVICE_MEMORY_BLOCK + +#ifndef _VMA_ALLOCATION_T +struct VmaAllocationExtraData +{ + void* m_pMappedData = VMA_NULL; // Not null means memory is mapped. + VmaWin32Handle m_Handle; +}; + +struct VmaAllocation_T +{ + friend struct VmaDedicatedAllocationListItemTraits; + + enum FLAGS + { + FLAG_PERSISTENT_MAP = 0x01, + FLAG_MAPPING_ALLOWED = 0x02, + }; + +public: + enum ALLOCATION_TYPE + { + ALLOCATION_TYPE_NONE, + ALLOCATION_TYPE_BLOCK, + ALLOCATION_TYPE_DEDICATED, + }; + + // This struct is allocated using VmaPoolAllocator. + explicit VmaAllocation_T(bool mappingAllowed); + ~VmaAllocation_T(); + + void InitBlockAllocation( + VmaDeviceMemoryBlock* block, + VmaAllocHandle allocHandle, + VkDeviceSize alignment, + VkDeviceSize size, + uint32_t memoryTypeIndex, + VmaSuballocationType suballocationType, + bool mapped); + // pMappedData not null means allocation is created with MAPPED flag. + void InitDedicatedAllocation( + VmaAllocator allocator, + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceMemory hMemory, + VmaSuballocationType suballocationType, + void* pMappedData, + VkDeviceSize size); + void Destroy(VmaAllocator allocator); + + ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; } + VkDeviceSize GetAlignment() const { return m_Alignment; } + VkDeviceSize GetSize() const { return m_Size; } + void* GetUserData() const { return m_pUserData; } + const char* GetName() const { return m_pName; } + VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; } + + VmaDeviceMemoryBlock* GetBlock() const { VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK); return m_BlockAllocation.m_Block; } + uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } + bool IsPersistentMap() const { return (m_Flags & FLAG_PERSISTENT_MAP) != 0; } + bool IsMappingAllowed() const { return (m_Flags & FLAG_MAPPING_ALLOWED) != 0; } + + void SetUserData(VmaAllocator hAllocator, void* pUserData) { m_pUserData = pUserData; } + void SetName(VmaAllocator hAllocator, const char* pName); + void FreeName(VmaAllocator hAllocator); + uint8_t SwapBlockAllocation(VmaAllocator hAllocator, VmaAllocation allocation); + VmaAllocHandle GetAllocHandle() const; + VkDeviceSize GetOffset() const; + VmaPool GetParentPool() const; + VkDeviceMemory GetMemory() const; + void* GetMappedData() const; + + void BlockAllocMap(); + void BlockAllocUnmap(); + VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData); + void DedicatedAllocUnmap(VmaAllocator hAllocator); + +#if VMA_STATS_STRING_ENABLED + VmaBufferImageUsage GetBufferImageUsage() const { return m_BufferImageUsage; } + void InitBufferUsage(const VkBufferCreateInfo &createInfo, bool useKhrMaintenance5) + { + VMA_ASSERT(m_BufferImageUsage == VmaBufferImageUsage::UNKNOWN); + m_BufferImageUsage = VmaBufferImageUsage(createInfo, useKhrMaintenance5); + } + void InitImageUsage(const VkImageCreateInfo &createInfo) + { + VMA_ASSERT(m_BufferImageUsage == VmaBufferImageUsage::UNKNOWN); + m_BufferImageUsage = VmaBufferImageUsage(createInfo); + } + void PrintParameters(class VmaJsonWriter& json) const; +#endif + +#if VMA_EXTERNAL_MEMORY_WIN32 + VkResult GetWin32Handle(VmaAllocator hAllocator, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE hTargetProcess, HANDLE* hHandle) noexcept; +#endif // VMA_EXTERNAL_MEMORY_WIN32 + +private: + // Allocation out of VmaDeviceMemoryBlock. + struct BlockAllocation + { + VmaDeviceMemoryBlock* m_Block; + VmaAllocHandle m_AllocHandle; + }; + // Allocation for an object that has its own private VkDeviceMemory. + struct DedicatedAllocation + { + VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool. + VkDeviceMemory m_hMemory; + VmaAllocationExtraData* m_ExtraData; + VmaAllocation_T* m_Prev; + VmaAllocation_T* m_Next; + }; + union + { + // Allocation out of VmaDeviceMemoryBlock. + BlockAllocation m_BlockAllocation; + // Allocation for an object that has its own private VkDeviceMemory. + DedicatedAllocation m_DedicatedAllocation; + }; + + VkDeviceSize m_Alignment; + VkDeviceSize m_Size; + void* m_pUserData; + char* m_pName; + uint32_t m_MemoryTypeIndex; + uint8_t m_Type; // ALLOCATION_TYPE + uint8_t m_SuballocationType; // VmaSuballocationType + // Reference counter for vmaMapMemory()/vmaUnmapMemory(). + uint8_t m_MapCount; + uint8_t m_Flags; // enum FLAGS +#if VMA_STATS_STRING_ENABLED + VmaBufferImageUsage m_BufferImageUsage; // 0 if unknown. +#endif + + void EnsureExtraData(VmaAllocator hAllocator); +}; +#endif // _VMA_ALLOCATION_T + +#ifndef _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS +struct VmaDedicatedAllocationListItemTraits +{ + typedef VmaAllocation_T ItemType; + + static ItemType* GetPrev(const ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Prev; + } + static ItemType* GetNext(const ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Next; + } + static ItemType*& AccessPrev(ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Prev; + } + static ItemType*& AccessNext(ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Next; + } +}; +#endif // _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS + +#ifndef _VMA_DEDICATED_ALLOCATION_LIST +/* +Stores linked list of VmaAllocation_T objects. +Thread-safe, synchronized internally. +*/ +class VmaDedicatedAllocationList +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaDedicatedAllocationList) +public: + VmaDedicatedAllocationList() = default; + ~VmaDedicatedAllocationList(); + + void Init(bool useMutex) { m_UseMutex = useMutex; } + bool Validate(); + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats); + void AddStatistics(VmaStatistics& inoutStats); +#if VMA_STATS_STRING_ENABLED + // Writes JSON array with the list of allocations. + void BuildStatsString(VmaJsonWriter& json); +#endif + + bool IsEmpty(); + void Register(VmaAllocation alloc); + void Unregister(VmaAllocation alloc); + +private: + typedef VmaIntrusiveLinkedList DedicatedAllocationLinkedList; + + bool m_UseMutex = true; + VMA_RW_MUTEX m_Mutex; + DedicatedAllocationLinkedList m_AllocationList; +}; + +#ifndef _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS + +VmaDedicatedAllocationList::~VmaDedicatedAllocationList() +{ + VMA_HEAVY_ASSERT(Validate()); + + if (!m_AllocationList.IsEmpty()) + { + VMA_ASSERT_LEAK(false && "Unfreed dedicated allocations found!"); + } +} + +bool VmaDedicatedAllocationList::Validate() +{ + const size_t declaredCount = m_AllocationList.GetCount(); + size_t actualCount = 0; + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + for (VmaAllocation alloc = m_AllocationList.Front(); + alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc)) + { + ++actualCount; + } + VMA_VALIDATE(actualCount == declaredCount); + + return true; +} + +void VmaDedicatedAllocationList::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) +{ + for(auto* item = m_AllocationList.Front(); item != VMA_NULL; item = DedicatedAllocationLinkedList::GetNext(item)) + { + const VkDeviceSize size = item->GetSize(); + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += size; + VmaAddDetailedStatisticsAllocation(inoutStats, item->GetSize()); + } +} + +void VmaDedicatedAllocationList::AddStatistics(VmaStatistics& inoutStats) +{ + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + + const uint32_t allocCount = (uint32_t)m_AllocationList.GetCount(); + inoutStats.blockCount += allocCount; + inoutStats.allocationCount += allocCount; + + for(auto* item = m_AllocationList.Front(); item != VMA_NULL; item = DedicatedAllocationLinkedList::GetNext(item)) + { + const VkDeviceSize size = item->GetSize(); + inoutStats.blockBytes += size; + inoutStats.allocationBytes += size; + } +} + +#if VMA_STATS_STRING_ENABLED +void VmaDedicatedAllocationList::BuildStatsString(VmaJsonWriter& json) +{ + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + json.BeginArray(); + for (VmaAllocation alloc = m_AllocationList.Front(); + alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc)) + { + json.BeginObject(true); + alloc->PrintParameters(json); + json.EndObject(); + } + json.EndArray(); +} +#endif // VMA_STATS_STRING_ENABLED + +bool VmaDedicatedAllocationList::IsEmpty() +{ + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + return m_AllocationList.IsEmpty(); +} + +void VmaDedicatedAllocationList::Register(VmaAllocation alloc) +{ + VmaMutexLockWrite lock(m_Mutex, m_UseMutex); + m_AllocationList.PushBack(alloc); +} + +void VmaDedicatedAllocationList::Unregister(VmaAllocation alloc) +{ + VmaMutexLockWrite lock(m_Mutex, m_UseMutex); + m_AllocationList.Remove(alloc); +} +#endif // _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS +#endif // _VMA_DEDICATED_ALLOCATION_LIST + +#ifndef _VMA_SUBALLOCATION +/* +Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as +allocated memory block or free. +*/ +struct VmaSuballocation +{ + VkDeviceSize offset; + VkDeviceSize size; + void* userData; + VmaSuballocationType type; +}; + +// Comparator for offsets. +struct VmaSuballocationOffsetLess +{ + bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const + { + return lhs.offset < rhs.offset; + } +}; + +struct VmaSuballocationOffsetGreater +{ + bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const + { + return lhs.offset > rhs.offset; + } +}; + +struct VmaSuballocationItemSizeLess +{ + bool operator()(const VmaSuballocationList::iterator lhs, + const VmaSuballocationList::iterator rhs) const + { + return lhs->size < rhs->size; + } + + bool operator()(const VmaSuballocationList::iterator lhs, + VkDeviceSize rhsSize) const + { + return lhs->size < rhsSize; + } +}; +#endif // _VMA_SUBALLOCATION + +#ifndef _VMA_ALLOCATION_REQUEST +/* +Parameters of planned allocation inside a VmaDeviceMemoryBlock. +item points to a FREE suballocation. +*/ +struct VmaAllocationRequest +{ + VmaAllocHandle allocHandle; + VkDeviceSize size; + VmaSuballocationList::iterator item; + void* customData; + uint64_t algorithmData; + VmaAllocationRequestType type; +}; +#endif // _VMA_ALLOCATION_REQUEST + +#ifndef _VMA_BLOCK_METADATA +/* +Data structure used for bookkeeping of allocations and unused ranges of memory +in a single VkDeviceMemory block. +*/ +class VmaBlockMetadata +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaBlockMetadata) +public: + // pAllocationCallbacks, if not null, must be owned externally - alive and unchanged for the whole lifetime of this object. + VmaBlockMetadata(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + virtual ~VmaBlockMetadata() = default; + + virtual void Init(VkDeviceSize size) { m_Size = size; } + bool IsVirtual() const { return m_IsVirtual; } + VkDeviceSize GetSize() const { return m_Size; } + + // Validates all data structures inside this object. If not valid, returns false. + virtual bool Validate() const = 0; + virtual size_t GetAllocationCount() const = 0; + virtual size_t GetFreeRegionsCount() const = 0; + virtual VkDeviceSize GetSumFreeSize() const = 0; + // Returns true if this block is empty - contains only single free suballocation. + virtual bool IsEmpty() const = 0; + virtual void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) = 0; + virtual VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const = 0; + virtual void* GetAllocationUserData(VmaAllocHandle allocHandle) const = 0; + + virtual VmaAllocHandle GetAllocationListBegin() const = 0; + virtual VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const = 0; + virtual VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const = 0; + + // Shouldn't modify blockCount. + virtual void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const = 0; + virtual void AddStatistics(VmaStatistics& inoutStats) const = 0; + +#if VMA_STATS_STRING_ENABLED + virtual void PrintDetailedMap(class VmaJsonWriter& json) const = 0; +#endif + + // Tries to find a place for suballocation with given parameters inside this block. + // If succeeded, fills pAllocationRequest and returns true. + // If failed, returns false. + virtual bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + // Always one of VMA_ALLOCATION_CREATE_STRATEGY_* or VMA_ALLOCATION_INTERNAL_STRATEGY_* flags. + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) = 0; + + virtual VkResult CheckCorruption(const void* pBlockData) = 0; + + // Makes actual allocation based on request. Request must already be checked and valid. + virtual void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) = 0; + + // Frees suballocation assigned to given memory region. + virtual void Free(VmaAllocHandle allocHandle) = 0; + + // Frees all allocations. + // Careful! Don't call it if there are VmaAllocation objects owned by userData of cleared allocations! + virtual void Clear() = 0; + + virtual void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) = 0; + virtual void DebugLogAllAllocations() const = 0; + +protected: + const VkAllocationCallbacks* GetAllocationCallbacks() const { return m_pAllocationCallbacks; } + VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; } + VkDeviceSize GetDebugMargin() const { return VkDeviceSize(IsVirtual() ? 0 : VMA_DEBUG_MARGIN); } + + void DebugLogAllocation(VkDeviceSize offset, VkDeviceSize size, void* userData) const; +#if VMA_STATS_STRING_ENABLED + // mapRefCount == UINT32_MAX means unspecified. + void PrintDetailedMap_Begin(class VmaJsonWriter& json, + VkDeviceSize unusedBytes, + size_t allocationCount, + size_t unusedRangeCount) const; + void PrintDetailedMap_Allocation(class VmaJsonWriter& json, + VkDeviceSize offset, VkDeviceSize size, void* userData) const; + static void PrintDetailedMap_UnusedRange(class VmaJsonWriter& json, + VkDeviceSize offset, + VkDeviceSize size); + static void PrintDetailedMap_End(class VmaJsonWriter& json); +#endif + +private: + VkDeviceSize m_Size; + const VkAllocationCallbacks* m_pAllocationCallbacks; + const VkDeviceSize m_BufferImageGranularity; + const bool m_IsVirtual; +}; + +#ifndef _VMA_BLOCK_METADATA_FUNCTIONS +VmaBlockMetadata::VmaBlockMetadata(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : m_Size(0), + m_pAllocationCallbacks(pAllocationCallbacks), + m_BufferImageGranularity(bufferImageGranularity), + m_IsVirtual(isVirtual) {} + +void VmaBlockMetadata::DebugLogAllocation(VkDeviceSize offset, VkDeviceSize size, void* userData) const +{ + if (IsVirtual()) + { + VMA_LEAK_LOG_FORMAT("UNFREED VIRTUAL ALLOCATION; Offset: %" PRIu64 "; Size: %" PRIu64 "; UserData: %p", offset, size, userData); + } + else + { + VMA_ASSERT(userData != VMA_NULL); + VmaAllocation allocation = reinterpret_cast(userData); + + userData = allocation->GetUserData(); + const char* name = allocation->GetName(); + +#if VMA_STATS_STRING_ENABLED + VMA_LEAK_LOG_FORMAT("UNFREED ALLOCATION; Offset: %" PRIu64 "; Size: %" PRIu64 "; UserData: %p; Name: %s; Type: %s; Usage: %" PRIu64, + offset, size, userData, name ? name : "vma_empty", + VMA_SUBALLOCATION_TYPE_NAMES[allocation->GetSuballocationType()], + (uint64_t)allocation->GetBufferImageUsage().Value); +#else + VMA_LEAK_LOG_FORMAT("UNFREED ALLOCATION; Offset: %" PRIu64 "; Size: %" PRIu64 "; UserData: %p; Name: %s; Type: %u", + offset, size, userData, name ? name : "vma_empty", + (unsigned)allocation->GetSuballocationType()); +#endif // VMA_STATS_STRING_ENABLED + } + +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata::PrintDetailedMap_Begin(class VmaJsonWriter& json, + VkDeviceSize unusedBytes, size_t allocationCount, size_t unusedRangeCount) const +{ + json.WriteString("TotalBytes"); + json.WriteNumber(GetSize()); + + json.WriteString("UnusedBytes"); + json.WriteNumber(unusedBytes); + + json.WriteString("Allocations"); + json.WriteNumber((uint64_t)allocationCount); + + json.WriteString("UnusedRanges"); + json.WriteNumber((uint64_t)unusedRangeCount); + + json.WriteString("Suballocations"); + json.BeginArray(); +} + +void VmaBlockMetadata::PrintDetailedMap_Allocation(class VmaJsonWriter& json, + VkDeviceSize offset, VkDeviceSize size, void* userData) const +{ + json.BeginObject(true); + + json.WriteString("Offset"); + json.WriteNumber(offset); + + if (IsVirtual()) + { + json.WriteString("Size"); + json.WriteNumber(size); + if (userData) + { + json.WriteString("CustomData"); + json.BeginString(); + json.ContinueString_Pointer(userData); + json.EndString(); + } + } + else + { + ((VmaAllocation)userData)->PrintParameters(json); + } + + json.EndObject(); +} + +void VmaBlockMetadata::PrintDetailedMap_UnusedRange(class VmaJsonWriter& json, + VkDeviceSize offset, VkDeviceSize size) +{ + json.BeginObject(true); + + json.WriteString("Offset"); + json.WriteNumber(offset); + + json.WriteString("Type"); + json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]); + + json.WriteString("Size"); + json.WriteNumber(size); + + json.EndObject(); +} + +void VmaBlockMetadata::PrintDetailedMap_End(class VmaJsonWriter& json) +{ + json.EndArray(); +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_BLOCK_METADATA_FUNCTIONS +#endif // _VMA_BLOCK_METADATA + +#ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY +// Before deleting object of this class remember to call 'Destroy()' +class VmaBlockBufferImageGranularity final +{ +public: + struct ValidationContext + { + const VkAllocationCallbacks* allocCallbacks; + uint16_t* pageAllocs; + }; + + explicit VmaBlockBufferImageGranularity(VkDeviceSize bufferImageGranularity); + ~VmaBlockBufferImageGranularity(); + + bool IsEnabled() const { return m_BufferImageGranularity > MAX_LOW_BUFFER_IMAGE_GRANULARITY; } + + void Init(const VkAllocationCallbacks* pAllocationCallbacks, VkDeviceSize size); + // Before destroying object you must call free it's memory + void Destroy(const VkAllocationCallbacks* pAllocationCallbacks); + + void RoundupAllocRequest(VmaSuballocationType allocType, + VkDeviceSize& inOutAllocSize, + VkDeviceSize& inOutAllocAlignment) const; + + bool CheckConflictAndAlignUp(VkDeviceSize& inOutAllocOffset, + VkDeviceSize allocSize, + VkDeviceSize blockOffset, + VkDeviceSize blockSize, + VmaSuballocationType allocType) const; + + void AllocPages(uint8_t allocType, VkDeviceSize offset, VkDeviceSize size); + void FreePages(VkDeviceSize offset, VkDeviceSize size); + void Clear(); + + ValidationContext StartValidation(const VkAllocationCallbacks* pAllocationCallbacks, + bool isVirutal) const; + bool Validate(ValidationContext& ctx, VkDeviceSize offset, VkDeviceSize size) const; + bool FinishValidation(ValidationContext& ctx) const; + +private: + static constexpr uint16_t MAX_LOW_BUFFER_IMAGE_GRANULARITY = 256; + + struct RegionInfo + { + uint8_t allocType; + uint16_t allocCount; + }; + + VkDeviceSize m_BufferImageGranularity; + uint32_t m_RegionCount; + RegionInfo* m_RegionInfo; + + uint32_t GetStartPage(VkDeviceSize offset) const { return OffsetToPageIndex(offset & ~(m_BufferImageGranularity - 1)); } + uint32_t GetEndPage(VkDeviceSize offset, VkDeviceSize size) const { return OffsetToPageIndex((offset + size - 1) & ~(m_BufferImageGranularity - 1)); } + + uint32_t OffsetToPageIndex(VkDeviceSize offset) const; + static void AllocPage(RegionInfo& page, uint8_t allocType); +}; + +#ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS +VmaBlockBufferImageGranularity::VmaBlockBufferImageGranularity(VkDeviceSize bufferImageGranularity) + : m_BufferImageGranularity(bufferImageGranularity), + m_RegionCount(0), + m_RegionInfo(VMA_NULL) {} + +VmaBlockBufferImageGranularity::~VmaBlockBufferImageGranularity() +{ + VMA_ASSERT(m_RegionInfo == VMA_NULL && "Free not called before destroying object!"); +} + +void VmaBlockBufferImageGranularity::Init(const VkAllocationCallbacks* pAllocationCallbacks, VkDeviceSize size) +{ + if (IsEnabled()) + { + m_RegionCount = static_cast(VmaDivideRoundingUp(size, m_BufferImageGranularity)); + m_RegionInfo = vma_new_array(pAllocationCallbacks, RegionInfo, m_RegionCount); + memset(m_RegionInfo, 0, m_RegionCount * sizeof(RegionInfo)); + } +} + +void VmaBlockBufferImageGranularity::Destroy(const VkAllocationCallbacks* pAllocationCallbacks) +{ + if (m_RegionInfo) + { + vma_delete_array(pAllocationCallbacks, m_RegionInfo, m_RegionCount); + m_RegionInfo = VMA_NULL; + } +} + +void VmaBlockBufferImageGranularity::RoundupAllocRequest(VmaSuballocationType allocType, + VkDeviceSize& inOutAllocSize, + VkDeviceSize& inOutAllocAlignment) const +{ + if (m_BufferImageGranularity > 1 && + m_BufferImageGranularity <= MAX_LOW_BUFFER_IMAGE_GRANULARITY) + { + if (allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN || + allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || + allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL) + { + inOutAllocAlignment = VMA_MAX(inOutAllocAlignment, m_BufferImageGranularity); + inOutAllocSize = VmaAlignUp(inOutAllocSize, m_BufferImageGranularity); + } + } +} + +bool VmaBlockBufferImageGranularity::CheckConflictAndAlignUp(VkDeviceSize& inOutAllocOffset, + VkDeviceSize allocSize, + VkDeviceSize blockOffset, + VkDeviceSize blockSize, + VmaSuballocationType allocType) const +{ + if (IsEnabled()) + { + uint32_t startPage = GetStartPage(inOutAllocOffset); + if (m_RegionInfo[startPage].allocCount > 0 && + VmaIsBufferImageGranularityConflict(static_cast(m_RegionInfo[startPage].allocType), allocType)) + { + inOutAllocOffset = VmaAlignUp(inOutAllocOffset, m_BufferImageGranularity); + if (blockSize < allocSize + inOutAllocOffset - blockOffset) + return true; + ++startPage; + } + uint32_t endPage = GetEndPage(inOutAllocOffset, allocSize); + if (endPage != startPage && + m_RegionInfo[endPage].allocCount > 0 && + VmaIsBufferImageGranularityConflict(static_cast(m_RegionInfo[endPage].allocType), allocType)) + { + return true; + } + } + return false; +} + +void VmaBlockBufferImageGranularity::AllocPages(uint8_t allocType, VkDeviceSize offset, VkDeviceSize size) +{ + if (IsEnabled()) + { + uint32_t startPage = GetStartPage(offset); + AllocPage(m_RegionInfo[startPage], allocType); + + uint32_t endPage = GetEndPage(offset, size); + if (startPage != endPage) + AllocPage(m_RegionInfo[endPage], allocType); + } +} + +void VmaBlockBufferImageGranularity::FreePages(VkDeviceSize offset, VkDeviceSize size) +{ + if (IsEnabled()) + { + uint32_t startPage = GetStartPage(offset); + --m_RegionInfo[startPage].allocCount; + if (m_RegionInfo[startPage].allocCount == 0) + m_RegionInfo[startPage].allocType = VMA_SUBALLOCATION_TYPE_FREE; + uint32_t endPage = GetEndPage(offset, size); + if (startPage != endPage) + { + --m_RegionInfo[endPage].allocCount; + if (m_RegionInfo[endPage].allocCount == 0) + m_RegionInfo[endPage].allocType = VMA_SUBALLOCATION_TYPE_FREE; + } + } +} + +void VmaBlockBufferImageGranularity::Clear() +{ + if (m_RegionInfo) + memset(m_RegionInfo, 0, m_RegionCount * sizeof(RegionInfo)); +} + +VmaBlockBufferImageGranularity::ValidationContext VmaBlockBufferImageGranularity::StartValidation( + const VkAllocationCallbacks* pAllocationCallbacks, bool isVirutal) const +{ + ValidationContext ctx{ pAllocationCallbacks, VMA_NULL }; + if (!isVirutal && IsEnabled()) + { + ctx.pageAllocs = vma_new_array(pAllocationCallbacks, uint16_t, m_RegionCount); + memset(ctx.pageAllocs, 0, m_RegionCount * sizeof(uint16_t)); + } + return ctx; +} + +bool VmaBlockBufferImageGranularity::Validate(ValidationContext& ctx, + VkDeviceSize offset, VkDeviceSize size) const +{ + if (IsEnabled()) + { + uint32_t start = GetStartPage(offset); + ++ctx.pageAllocs[start]; + VMA_VALIDATE(m_RegionInfo[start].allocCount > 0); + + uint32_t end = GetEndPage(offset, size); + if (start != end) + { + ++ctx.pageAllocs[end]; + VMA_VALIDATE(m_RegionInfo[end].allocCount > 0); + } + } + return true; +} + +bool VmaBlockBufferImageGranularity::FinishValidation(ValidationContext& ctx) const +{ + // Check proper page structure + if (IsEnabled()) + { + VMA_ASSERT(ctx.pageAllocs != VMA_NULL && "Validation context not initialized!"); + + for (uint32_t page = 0; page < m_RegionCount; ++page) + { + VMA_VALIDATE(ctx.pageAllocs[page] == m_RegionInfo[page].allocCount); + } + vma_delete_array(ctx.allocCallbacks, ctx.pageAllocs, m_RegionCount); + ctx.pageAllocs = VMA_NULL; + } + return true; +} + +uint32_t VmaBlockBufferImageGranularity::OffsetToPageIndex(VkDeviceSize offset) const +{ + return static_cast(offset >> VMA_BITSCAN_MSB(m_BufferImageGranularity)); +} + +void VmaBlockBufferImageGranularity::AllocPage(RegionInfo& page, uint8_t allocType) +{ + // When current alloc type is free then it can be overridden by new type + if (page.allocCount == 0 || (page.allocCount > 0 && page.allocType == VMA_SUBALLOCATION_TYPE_FREE)) + page.allocType = allocType; + + ++page.allocCount; +} +#endif // _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS +#endif // _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY + +#ifndef _VMA_BLOCK_METADATA_LINEAR +/* +Allocations and their references in internal data structure look like this: + +if(m_2ndVectorMode == SECOND_VECTOR_EMPTY): + + 0 +-------+ + | | + | | + | | + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount] + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount + 1] + +-------+ + | ... | + +-------+ + | Alloc | 1st[1st.size() - 1] + +-------+ + | | + | | + | | +GetSize() +-------+ + +if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER): + + 0 +-------+ + | Alloc | 2nd[0] + +-------+ + | Alloc | 2nd[1] + +-------+ + | ... | + +-------+ + | Alloc | 2nd[2nd.size() - 1] + +-------+ + | | + | | + | | + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount] + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount + 1] + +-------+ + | ... | + +-------+ + | Alloc | 1st[1st.size() - 1] + +-------+ + | | +GetSize() +-------+ + +if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK): + + 0 +-------+ + | | + | | + | | + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount] + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount + 1] + +-------+ + | ... | + +-------+ + | Alloc | 1st[1st.size() - 1] + +-------+ + | | + | | + | | + +-------+ + | Alloc | 2nd[2nd.size() - 1] + +-------+ + | ... | + +-------+ + | Alloc | 2nd[1] + +-------+ + | Alloc | 2nd[0] +GetSize() +-------+ + +*/ +class VmaBlockMetadata_Linear : public VmaBlockMetadata +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaBlockMetadata_Linear) +public: + VmaBlockMetadata_Linear(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + ~VmaBlockMetadata_Linear() override = default; + + VkDeviceSize GetSumFreeSize() const override { return m_SumFreeSize; } + bool IsEmpty() const override { return GetAllocationCount() == 0; } + VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return (VkDeviceSize)allocHandle - 1; } + + void Init(VkDeviceSize size) override; + bool Validate() const override; + size_t GetAllocationCount() const override; + size_t GetFreeRegionsCount() const override; + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; + void AddStatistics(VmaStatistics& inoutStats) const override; + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json) const override; +#endif + + bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) override; + + VkResult CheckCorruption(const void* pBlockData) override; + + void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) override; + + void Free(VmaAllocHandle allocHandle) override; + void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; + void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; + VmaAllocHandle GetAllocationListBegin() const override; + VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; + VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const override; + void Clear() override; + void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; + void DebugLogAllAllocations() const override; + +private: + /* + There are two suballocation vectors, used in ping-pong way. + The one with index m_1stVectorIndex is called 1st. + The one with index (m_1stVectorIndex ^ 1) is called 2nd. + 2nd can be non-empty only when 1st is not empty. + When 2nd is not empty, m_2ndVectorMode indicates its mode of operation. + */ + typedef VmaVector> SuballocationVectorType; + + enum SECOND_VECTOR_MODE + { + SECOND_VECTOR_EMPTY, + /* + Suballocations in 2nd vector are created later than the ones in 1st, but they + all have smaller offset. + */ + SECOND_VECTOR_RING_BUFFER, + /* + Suballocations in 2nd vector are upper side of double stack. + They all have offsets higher than those in 1st vector. + Top of this stack means smaller offsets, but higher indices in this vector. + */ + SECOND_VECTOR_DOUBLE_STACK, + }; + + VkDeviceSize m_SumFreeSize; + SuballocationVectorType m_Suballocations0, m_Suballocations1; + uint32_t m_1stVectorIndex; + SECOND_VECTOR_MODE m_2ndVectorMode; + // Number of items in 1st vector with hAllocation = null at the beginning. + size_t m_1stNullItemsBeginCount; + // Number of other items in 1st vector with hAllocation = null somewhere in the middle. + size_t m_1stNullItemsMiddleCount; + // Number of items in 2nd vector with hAllocation = null. + size_t m_2ndNullItemsCount; + + SuballocationVectorType& AccessSuballocations1st() { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; } + SuballocationVectorType& AccessSuballocations2nd() { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; } + const SuballocationVectorType& AccessSuballocations1st() const { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; } + const SuballocationVectorType& AccessSuballocations2nd() const { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; } + + VmaSuballocation& FindSuballocation(VkDeviceSize offset) const; + bool ShouldCompact1st() const; + void CleanupAfterFree(); + + bool CreateAllocationRequest_LowerAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest); + bool CreateAllocationRequest_UpperAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest); +}; + +#ifndef _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS +VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), + m_SumFreeSize(0), + m_Suballocations0(VmaStlAllocator(pAllocationCallbacks)), + m_Suballocations1(VmaStlAllocator(pAllocationCallbacks)), + m_1stVectorIndex(0), + m_2ndVectorMode(SECOND_VECTOR_EMPTY), + m_1stNullItemsBeginCount(0), + m_1stNullItemsMiddleCount(0), + m_2ndNullItemsCount(0) {} + +void VmaBlockMetadata_Linear::Init(VkDeviceSize size) +{ + VmaBlockMetadata::Init(size); + m_SumFreeSize = size; +} + +bool VmaBlockMetadata_Linear::Validate() const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY)); + VMA_VALIDATE(!suballocations1st.empty() || + suballocations2nd.empty() || + m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER); + + if (!suballocations1st.empty()) + { + // Null item at the beginning should be accounted into m_1stNullItemsBeginCount. + VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].type != VMA_SUBALLOCATION_TYPE_FREE); + // Null item at the end should be just pop_back(). + VMA_VALIDATE(suballocations1st.back().type != VMA_SUBALLOCATION_TYPE_FREE); + } + if (!suballocations2nd.empty()) + { + // Null item at the end should be just pop_back(). + VMA_VALIDATE(suballocations2nd.back().type != VMA_SUBALLOCATION_TYPE_FREE); + } + + VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size()); + VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size()); + + VkDeviceSize sumUsedSize = 0; + const size_t suballoc1stCount = suballocations1st.size(); + const VkDeviceSize debugMargin = GetDebugMargin(); + VkDeviceSize offset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const size_t suballoc2ndCount = suballocations2nd.size(); + size_t nullItem2ndCount = 0; + for (size_t i = 0; i < suballoc2ndCount; ++i) + { + const VmaSuballocation& suballoc = suballocations2nd[i]; + const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + VmaAllocation const alloc = (VmaAllocation)suballoc.userData; + if (!IsVirtual()) + { + VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); + } + VMA_VALIDATE(suballoc.offset >= offset); + + if (!currFree) + { + if (!IsVirtual()) + { + VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); + VMA_VALIDATE(alloc->GetSize() == suballoc.size); + } + sumUsedSize += suballoc.size; + } + else + { + ++nullItem2ndCount; + } + + offset = suballoc.offset + suballoc.size + debugMargin; + } + + VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount); + } + + for (size_t i = 0; i < m_1stNullItemsBeginCount; ++i) + { + const VmaSuballocation& suballoc = suballocations1st[i]; + VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE && + suballoc.userData == VMA_NULL); + } + + size_t nullItem1stCount = m_1stNullItemsBeginCount; + + for (size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i) + { + const VmaSuballocation& suballoc = suballocations1st[i]; + const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + VmaAllocation const alloc = (VmaAllocation)suballoc.userData; + if (!IsVirtual()) + { + VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); + } + VMA_VALIDATE(suballoc.offset >= offset); + VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree); + + if (!currFree) + { + if (!IsVirtual()) + { + VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); + VMA_VALIDATE(alloc->GetSize() == suballoc.size); + } + sumUsedSize += suballoc.size; + } + else + { + ++nullItem1stCount; + } + + offset = suballoc.offset + suballoc.size + debugMargin; + } + VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount); + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + const size_t suballoc2ndCount = suballocations2nd.size(); + size_t nullItem2ndCount = 0; + for (size_t i = suballoc2ndCount; i--; ) + { + const VmaSuballocation& suballoc = suballocations2nd[i]; + const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + VmaAllocation const alloc = (VmaAllocation)suballoc.userData; + if (!IsVirtual()) + { + VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); + } + VMA_VALIDATE(suballoc.offset >= offset); + + if (!currFree) + { + if (!IsVirtual()) + { + VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); + VMA_VALIDATE(alloc->GetSize() == suballoc.size); + } + sumUsedSize += suballoc.size; + } + else + { + ++nullItem2ndCount; + } + + offset = suballoc.offset + suballoc.size + debugMargin; + } + + VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount); + } + + VMA_VALIDATE(offset <= GetSize()); + VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize); + + return true; +} + +size_t VmaBlockMetadata_Linear::GetAllocationCount() const +{ + return AccessSuballocations1st().size() - m_1stNullItemsBeginCount - m_1stNullItemsMiddleCount + + AccessSuballocations2nd().size() - m_2ndNullItemsCount; +} + +size_t VmaBlockMetadata_Linear::GetFreeRegionsCount() const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return SIZE_MAX; +} + +void VmaBlockMetadata_Linear::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const +{ + const VkDeviceSize size = GetSize(); + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + const size_t suballoc1stCount = suballocations1st.size(); + const size_t suballoc2ndCount = suballocations2nd.size(); + + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += size; + + VkDeviceSize lastOffset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = 0; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + // There is free space from lastOffset to freeSpace2ndTo1stEnd. + if (lastOffset < freeSpace2ndTo1stEnd) + { + const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; + const VkDeviceSize freeSpace1stTo2ndEnd = + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + // There is free space from lastOffset to freeSpace1stTo2ndEnd. + if (lastOffset < freeSpace1stTo2ndEnd) + { + const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + // There is free space from lastOffset to size. + if (lastOffset < size) + { + const VkDeviceSize unusedRangeSize = size - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // End of loop. + lastOffset = size; + } + } + } +} + +void VmaBlockMetadata_Linear::AddStatistics(VmaStatistics& inoutStats) const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + const VkDeviceSize size = GetSize(); + const size_t suballoc1stCount = suballocations1st.size(); + const size_t suballoc2ndCount = suballocations2nd.size(); + + inoutStats.blockCount++; + inoutStats.blockBytes += size; + inoutStats.allocationBytes += size - m_SumFreeSize; + + VkDeviceSize lastOffset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++inoutStats.allocationCount; + + // Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; + const VkDeviceSize freeSpace1stTo2ndEnd = + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++inoutStats.allocationCount; + + // Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++inoutStats.allocationCount; + + // Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + // End of loop. + lastOffset = size; + } + } + } +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata_Linear::PrintDetailedMap(class VmaJsonWriter& json) const +{ + const VkDeviceSize size = GetSize(); + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + const size_t suballoc1stCount = suballocations1st.size(); + const size_t suballoc2ndCount = suballocations2nd.size(); + + // FIRST PASS + + size_t unusedRangeCount = 0; + VkDeviceSize usedBytes = 0; + + VkDeviceSize lastOffset = 0; + + size_t alloc2ndCount = 0; + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = 0; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + ++unusedRangeCount; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++alloc2ndCount; + usedBytes += suballoc.size; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace2ndTo1stEnd) + { + // There is free space from lastOffset to freeSpace2ndTo1stEnd. + ++unusedRangeCount; + } + + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; + size_t alloc1stCount = 0; + const VkDeviceSize freeSpace1stTo2ndEnd = + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + ++unusedRangeCount; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++alloc1stCount; + usedBytes += suballoc.size; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace1stTo2ndEnd) + { + // There is free space from lastOffset to freeSpace1stTo2ndEnd. + ++unusedRangeCount; + } + + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + ++unusedRangeCount; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++alloc2ndCount; + usedBytes += suballoc.size; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < size) + { + // There is free space from lastOffset to size. + ++unusedRangeCount; + } + + // End of loop. + lastOffset = size; + } + } + } + + const VkDeviceSize unusedBytes = size - usedBytes; + PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount); + + // SECOND PASS + lastOffset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = 0; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace2ndTo1stEnd) + { + // There is free space from lastOffset to freeSpace2ndTo1stEnd. + const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + nextAlloc1stIndex = m_1stNullItemsBeginCount; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace1stTo2ndEnd) + { + // There is free space from lastOffset to freeSpace1stTo2ndEnd. + const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < size) + { + // There is free space from lastOffset to size. + const VkDeviceSize unusedRangeSize = size - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // End of loop. + lastOffset = size; + } + } + } + + PrintDetailedMap_End(json); +} +#endif // VMA_STATS_STRING_ENABLED + +bool VmaBlockMetadata_Linear::CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(allocSize > 0); + VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE); + VMA_ASSERT(pAllocationRequest != VMA_NULL); + VMA_HEAVY_ASSERT(Validate()); + + if(allocSize > GetSize()) + return false; + + pAllocationRequest->size = allocSize; + return upperAddress ? + CreateAllocationRequest_UpperAddress( + allocSize, allocAlignment, allocType, strategy, pAllocationRequest) : + CreateAllocationRequest_LowerAddress( + allocSize, allocAlignment, allocType, strategy, pAllocationRequest); +} + +VkResult VmaBlockMetadata_Linear::CheckCorruption(const void* pBlockData) +{ + VMA_ASSERT(!IsVirtual()); + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + for (size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i) + { + const VmaSuballocation& suballoc = suballocations1st[i]; + if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) + { + if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); + return VK_ERROR_UNKNOWN_COPY; + } + } + } + + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + for (size_t i = 0, count = suballocations2nd.size(); i < count; ++i) + { + const VmaSuballocation& suballoc = suballocations2nd[i]; + if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) + { + if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); + return VK_ERROR_UNKNOWN_COPY; + } + } + } + + return VK_SUCCESS; +} + +void VmaBlockMetadata_Linear::Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) +{ + const VkDeviceSize offset = (VkDeviceSize)request.allocHandle - 1; + const VmaSuballocation newSuballoc = { offset, request.size, userData, type }; + + switch (request.type) + { + case VmaAllocationRequestType::UpperAddress: + { + VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER && + "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer."); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + suballocations2nd.push_back(newSuballoc); + m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK; + } + break; + case VmaAllocationRequestType::EndOf1st: + { + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + + VMA_ASSERT(suballocations1st.empty() || + offset >= suballocations1st.back().offset + suballocations1st.back().size); + // Check if it fits before the end of the block. + VMA_ASSERT(offset + request.size <= GetSize()); + + suballocations1st.push_back(newSuballoc); + } + break; + case VmaAllocationRequestType::EndOf2nd: + { + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + // New allocation at the end of 2-part ring buffer, so before first allocation from 1st vector. + VMA_ASSERT(!suballocations1st.empty() && + offset + request.size <= suballocations1st[m_1stNullItemsBeginCount].offset); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + switch (m_2ndVectorMode) + { + case SECOND_VECTOR_EMPTY: + // First allocation from second part ring buffer. + VMA_ASSERT(suballocations2nd.empty()); + m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER; + break; + case SECOND_VECTOR_RING_BUFFER: + // 2-part ring buffer is already started. + VMA_ASSERT(!suballocations2nd.empty()); + break; + case SECOND_VECTOR_DOUBLE_STACK: + VMA_ASSERT(0 && "CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack."); + break; + default: + VMA_ASSERT(0); + } + + suballocations2nd.push_back(newSuballoc); + } + break; + default: + VMA_ASSERT(0 && "CRITICAL INTERNAL ERROR."); + } + + m_SumFreeSize -= newSuballoc.size; +} + +void VmaBlockMetadata_Linear::Free(VmaAllocHandle allocHandle) +{ + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + VkDeviceSize offset = (VkDeviceSize)allocHandle - 1; + + if (!suballocations1st.empty()) + { + // First allocation: Mark it as next empty at the beginning. + VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount]; + if (firstSuballoc.offset == offset) + { + firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE; + firstSuballoc.userData = VMA_NULL; + m_SumFreeSize += firstSuballoc.size; + ++m_1stNullItemsBeginCount; + CleanupAfterFree(); + return; + } + } + + // Last allocation in 2-part ring buffer or top of upper stack (same logic). + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER || + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + VmaSuballocation& lastSuballoc = suballocations2nd.back(); + if (lastSuballoc.offset == offset) + { + m_SumFreeSize += lastSuballoc.size; + suballocations2nd.pop_back(); + CleanupAfterFree(); + return; + } + } + // Last allocation in 1st vector. + else if (m_2ndVectorMode == SECOND_VECTOR_EMPTY) + { + VmaSuballocation& lastSuballoc = suballocations1st.back(); + if (lastSuballoc.offset == offset) + { + m_SumFreeSize += lastSuballoc.size; + suballocations1st.pop_back(); + CleanupAfterFree(); + return; + } + } + + VmaSuballocation refSuballoc; + refSuballoc.offset = offset; + // Rest of members stays uninitialized intentionally for better performance. + + // Item from the middle of 1st vector. + { + const SuballocationVectorType::iterator it = VmaBinaryFindSorted( + suballocations1st.begin() + m_1stNullItemsBeginCount, + suballocations1st.end(), + refSuballoc, + VmaSuballocationOffsetLess()); + if (it != suballocations1st.end()) + { + it->type = VMA_SUBALLOCATION_TYPE_FREE; + it->userData = VMA_NULL; + ++m_1stNullItemsMiddleCount; + m_SumFreeSize += it->size; + CleanupAfterFree(); + return; + } + } + + if (m_2ndVectorMode != SECOND_VECTOR_EMPTY) + { + // Item from the middle of 2nd vector. + const SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ? + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) : + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater()); + if (it != suballocations2nd.end()) + { + it->type = VMA_SUBALLOCATION_TYPE_FREE; + it->userData = VMA_NULL; + ++m_2ndNullItemsCount; + m_SumFreeSize += it->size; + CleanupAfterFree(); + return; + } + } + + VMA_ASSERT(0 && "Allocation to free not found in linear allocator!"); +} + +void VmaBlockMetadata_Linear::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) +{ + outInfo.offset = (VkDeviceSize)allocHandle - 1; + VmaSuballocation& suballoc = FindSuballocation(outInfo.offset); + outInfo.size = suballoc.size; + outInfo.pUserData = suballoc.userData; +} + +void* VmaBlockMetadata_Linear::GetAllocationUserData(VmaAllocHandle allocHandle) const +{ + return FindSuballocation((VkDeviceSize)allocHandle - 1).userData; +} + +VmaAllocHandle VmaBlockMetadata_Linear::GetAllocationListBegin() const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return VK_NULL_HANDLE; +} + +VmaAllocHandle VmaBlockMetadata_Linear::GetNextAllocation(VmaAllocHandle prevAlloc) const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return VK_NULL_HANDLE; +} + +VkDeviceSize VmaBlockMetadata_Linear::GetNextFreeRegionSize(VmaAllocHandle alloc) const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return 0; +} + +void VmaBlockMetadata_Linear::Clear() +{ + m_SumFreeSize = GetSize(); + m_Suballocations0.clear(); + m_Suballocations1.clear(); + // Leaving m_1stVectorIndex unchanged - it doesn't matter. + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + m_1stNullItemsBeginCount = 0; + m_1stNullItemsMiddleCount = 0; + m_2ndNullItemsCount = 0; +} + +void VmaBlockMetadata_Linear::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) +{ + VmaSuballocation& suballoc = FindSuballocation((VkDeviceSize)allocHandle - 1); + suballoc.userData = userData; +} + +void VmaBlockMetadata_Linear::DebugLogAllAllocations() const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + for (auto it = suballocations1st.begin() + m_1stNullItemsBeginCount; it != suballocations1st.end(); ++it) + if (it->type != VMA_SUBALLOCATION_TYPE_FREE) + DebugLogAllocation(it->offset, it->size, it->userData); + + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + for (auto it = suballocations2nd.begin(); it != suballocations2nd.end(); ++it) + if (it->type != VMA_SUBALLOCATION_TYPE_FREE) + DebugLogAllocation(it->offset, it->size, it->userData); +} + +VmaSuballocation& VmaBlockMetadata_Linear::FindSuballocation(VkDeviceSize offset) const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + VmaSuballocation refSuballoc; + refSuballoc.offset = offset; + // Rest of members stays uninitialized intentionally for better performance. + + // Item from the 1st vector. + { + SuballocationVectorType::const_iterator it = VmaBinaryFindSorted( + suballocations1st.begin() + m_1stNullItemsBeginCount, + suballocations1st.end(), + refSuballoc, + VmaSuballocationOffsetLess()); + if (it != suballocations1st.end()) + { + return const_cast(*it); + } + } + + if (m_2ndVectorMode != SECOND_VECTOR_EMPTY) + { + // Rest of members stays uninitialized intentionally for better performance. + SuballocationVectorType::const_iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ? + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) : + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater()); + if (it != suballocations2nd.end()) + { + return const_cast(*it); + } + } + + VMA_ASSERT(0 && "Allocation not found in linear allocator!"); + return const_cast(suballocations1st.back()); // Should never occur. +} + +bool VmaBlockMetadata_Linear::ShouldCompact1st() const +{ + const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount; + const size_t suballocCount = AccessSuballocations1st().size(); + return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3; +} + +void VmaBlockMetadata_Linear::CleanupAfterFree() +{ + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + if (IsEmpty()) + { + suballocations1st.clear(); + suballocations2nd.clear(); + m_1stNullItemsBeginCount = 0; + m_1stNullItemsMiddleCount = 0; + m_2ndNullItemsCount = 0; + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + } + else + { + const size_t suballoc1stCount = suballocations1st.size(); + const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount; + VMA_ASSERT(nullItem1stCount <= suballoc1stCount); + + // Find more null items at the beginning of 1st vector. + while (m_1stNullItemsBeginCount < suballoc1stCount && + suballocations1st[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE) + { + ++m_1stNullItemsBeginCount; + --m_1stNullItemsMiddleCount; + } + + // Find more null items at the end of 1st vector. + while (m_1stNullItemsMiddleCount > 0 && + suballocations1st.back().type == VMA_SUBALLOCATION_TYPE_FREE) + { + --m_1stNullItemsMiddleCount; + suballocations1st.pop_back(); + } + + // Find more null items at the end of 2nd vector. + while (m_2ndNullItemsCount > 0 && + suballocations2nd.back().type == VMA_SUBALLOCATION_TYPE_FREE) + { + --m_2ndNullItemsCount; + suballocations2nd.pop_back(); + } + + // Find more null items at the beginning of 2nd vector. + while (m_2ndNullItemsCount > 0 && + suballocations2nd[0].type == VMA_SUBALLOCATION_TYPE_FREE) + { + --m_2ndNullItemsCount; + VmaVectorRemove(suballocations2nd, 0); + } + + if (ShouldCompact1st()) + { + const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount; + size_t srcIndex = m_1stNullItemsBeginCount; + for (size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex) + { + while (suballocations1st[srcIndex].type == VMA_SUBALLOCATION_TYPE_FREE) + { + ++srcIndex; + } + if (dstIndex != srcIndex) + { + suballocations1st[dstIndex] = suballocations1st[srcIndex]; + } + ++srcIndex; + } + suballocations1st.resize(nonNullItemCount); + m_1stNullItemsBeginCount = 0; + m_1stNullItemsMiddleCount = 0; + } + + // 2nd vector became empty. + if (suballocations2nd.empty()) + { + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + } + + // 1st vector became empty. + if (suballocations1st.size() - m_1stNullItemsBeginCount == 0) + { + suballocations1st.clear(); + m_1stNullItemsBeginCount = 0; + + if (!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + // Swap 1st with 2nd. Now 2nd is empty. + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + m_1stNullItemsMiddleCount = m_2ndNullItemsCount; + while (m_1stNullItemsBeginCount < suballocations2nd.size() && + suballocations2nd[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE) + { + ++m_1stNullItemsBeginCount; + --m_1stNullItemsMiddleCount; + } + m_2ndNullItemsCount = 0; + m_1stVectorIndex ^= 1; + } + } + } + + VMA_HEAVY_ASSERT(Validate()); +} + +bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + const VkDeviceSize blockSize = GetSize(); + const VkDeviceSize debugMargin = GetDebugMargin(); + const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity(); + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + // Try to allocate at the end of 1st vector. + + VkDeviceSize resultBaseOffset = 0; + if (!suballocations1st.empty()) + { + const VmaSuballocation& lastSuballoc = suballocations1st.back(); + resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin; + } + + // Start from offset equal to beginning of free space. + VkDeviceSize resultOffset = resultBaseOffset; + + // Apply alignment. + resultOffset = VmaAlignUp(resultOffset, allocAlignment); + + // Check previous suballocations for BufferImageGranularity conflicts. + // Make bigger alignment if necessary. + if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty()) + { + bool bufferImageGranularityConflict = false; + for (size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; ) + { + const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex]; + if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) + { + bufferImageGranularityConflict = true; + break; + } + } + else + // Already on previous page. + break; + } + if (bufferImageGranularityConflict) + { + resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity); + } + } + + const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? + suballocations2nd.back().offset : blockSize; + + // There is enough free space at the end after alignment. + if (resultOffset + allocSize + debugMargin <= freeSpaceEnd) + { + // Check next suballocations for BufferImageGranularity conflicts. + // If conflict exists, allocation cannot be made here. + if ((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + for (size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; ) + { + const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex]; + if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) + { + return false; + } + } + else + { + // Already on previous page. + break; + } + } + } + + // All tests passed: Success. + pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); + // pAllocationRequest->item, customData unused. + pAllocationRequest->type = VmaAllocationRequestType::EndOf1st; + return true; + } + } + + // Wrap-around to end of 2nd vector. Try to allocate there, watching for the + // beginning of 1st vector as the end of free space. + if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + VMA_ASSERT(!suballocations1st.empty()); + + VkDeviceSize resultBaseOffset = 0; + if (!suballocations2nd.empty()) + { + const VmaSuballocation& lastSuballoc = suballocations2nd.back(); + resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin; + } + + // Start from offset equal to beginning of free space. + VkDeviceSize resultOffset = resultBaseOffset; + + // Apply alignment. + resultOffset = VmaAlignUp(resultOffset, allocAlignment); + + // Check previous suballocations for BufferImageGranularity conflicts. + // Make bigger alignment if necessary. + if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty()) + { + bool bufferImageGranularityConflict = false; + for (size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; ) + { + const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex]; + if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) + { + bufferImageGranularityConflict = true; + break; + } + } + else + // Already on previous page. + break; + } + if (bufferImageGranularityConflict) + { + resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity); + } + } + + size_t index1st = m_1stNullItemsBeginCount; + + // There is enough free space at the end after alignment. + if ((index1st == suballocations1st.size() && resultOffset + allocSize + debugMargin <= blockSize) || + (index1st < suballocations1st.size() && resultOffset + allocSize + debugMargin <= suballocations1st[index1st].offset)) + { + // Check next suballocations for BufferImageGranularity conflicts. + // If conflict exists, allocation cannot be made here. + if (allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) + { + for (size_t nextSuballocIndex = index1st; + nextSuballocIndex < suballocations1st.size(); + nextSuballocIndex++) + { + const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex]; + if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) + { + return false; + } + } + else + { + // Already on next page. + break; + } + } + } + + // All tests passed: Success. + pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); + pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd; + // pAllocationRequest->item, customData unused. + return true; + } + } + + return false; +} + +bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + const VkDeviceSize blockSize = GetSize(); + const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity(); + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + VMA_ASSERT(0 && "Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer."); + return false; + } + + // Try to allocate before 2nd.back(), or end of block if 2nd.empty(). + if (allocSize > blockSize) + { + return false; + } + VkDeviceSize resultBaseOffset = blockSize - allocSize; + if (!suballocations2nd.empty()) + { + const VmaSuballocation& lastSuballoc = suballocations2nd.back(); + resultBaseOffset = lastSuballoc.offset - allocSize; + if (allocSize > lastSuballoc.offset) + { + return false; + } + } + + // Start from offset equal to end of free space. + VkDeviceSize resultOffset = resultBaseOffset; + + const VkDeviceSize debugMargin = GetDebugMargin(); + + // Apply debugMargin at the end. + if (debugMargin > 0) + { + if (resultOffset < debugMargin) + { + return false; + } + resultOffset -= debugMargin; + } + + // Apply alignment. + resultOffset = VmaAlignDown(resultOffset, allocAlignment); + + // Check next suballocations from 2nd for BufferImageGranularity conflicts. + // Make bigger alignment if necessary. + if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty()) + { + bool bufferImageGranularityConflict = false; + for (size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; ) + { + const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex]; + if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType)) + { + bufferImageGranularityConflict = true; + break; + } + } + else + // Already on previous page. + break; + } + if (bufferImageGranularityConflict) + { + resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity); + } + } + + // There is enough free space. + const VkDeviceSize endOf1st = !suballocations1st.empty() ? + suballocations1st.back().offset + suballocations1st.back().size : + 0; + if (endOf1st + debugMargin <= resultOffset) + { + // Check previous suballocations for BufferImageGranularity conflicts. + // If conflict exists, allocation cannot be made here. + if (bufferImageGranularity > 1) + { + for (size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; ) + { + const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex]; + if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type)) + { + return false; + } + } + else + { + // Already on next page. + break; + } + } + } + + // All tests passed: Success. + pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); + // pAllocationRequest->item unused. + pAllocationRequest->type = VmaAllocationRequestType::UpperAddress; + return true; + } + + return false; +} +#endif // _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS +#endif // _VMA_BLOCK_METADATA_LINEAR + +#ifndef _VMA_BLOCK_METADATA_TLSF +// To not search current larger region if first allocation won't succeed and skip to smaller range +// use with VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT as strategy in CreateAllocationRequest(). +// When fragmentation and reusal of previous blocks doesn't matter then use with +// VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT for fastest alloc time possible. +class VmaBlockMetadata_TLSF : public VmaBlockMetadata +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaBlockMetadata_TLSF) +public: + VmaBlockMetadata_TLSF(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + ~VmaBlockMetadata_TLSF() override; + + size_t GetAllocationCount() const override { return m_AllocCount; } + size_t GetFreeRegionsCount() const override { return m_BlocksFreeCount + 1; } + VkDeviceSize GetSumFreeSize() const override { return m_BlocksFreeSize + m_NullBlock->size; } + bool IsEmpty() const override { return m_NullBlock->offset == 0; } + VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return ((Block*)allocHandle)->offset; } + + void Init(VkDeviceSize size) override; + bool Validate() const override; + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; + void AddStatistics(VmaStatistics& inoutStats) const override; + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json) const override; +#endif + + bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) override; + + VkResult CheckCorruption(const void* pBlockData) override; + void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) override; + + void Free(VmaAllocHandle allocHandle) override; + void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; + void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; + VmaAllocHandle GetAllocationListBegin() const override; + VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; + VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const override; + void Clear() override; + void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; + void DebugLogAllAllocations() const override; + +private: + // According to original paper it should be preferable 4 or 5: + // M. Masmano, I. Ripoll, A. Crespo, and J. Real "TLSF: a New Dynamic Memory Allocator for Real-Time Systems" + // http://www.gii.upv.es/tlsf/files/ecrts04_tlsf.pdf + static constexpr uint8_t SECOND_LEVEL_INDEX = 5; + static constexpr uint16_t SMALL_BUFFER_SIZE = 256; + static constexpr uint32_t INITIAL_BLOCK_ALLOC_COUNT = 16; + static constexpr uint8_t MEMORY_CLASS_SHIFT = 7; + static constexpr uint8_t MAX_MEMORY_CLASSES = 65 - MEMORY_CLASS_SHIFT; + + class Block + { + public: + VkDeviceSize offset; + VkDeviceSize size; + Block* prevPhysical; + Block* nextPhysical; + + void MarkFree() { prevFree = VMA_NULL; } + void MarkTaken() { prevFree = this; } + bool IsFree() const { return prevFree != this; } + void*& UserData() { VMA_HEAVY_ASSERT(!IsFree()); return userData; } + Block*& PrevFree() { return prevFree; } + Block*& NextFree() { VMA_HEAVY_ASSERT(IsFree()); return nextFree; } + + private: + Block* prevFree; // Address of the same block here indicates that block is taken + union + { + Block* nextFree; + void* userData; + }; + }; + + size_t m_AllocCount; + // Total number of free blocks besides null block + size_t m_BlocksFreeCount; + // Total size of free blocks excluding null block + VkDeviceSize m_BlocksFreeSize; + uint32_t m_IsFreeBitmap; + uint8_t m_MemoryClasses; + uint32_t m_InnerIsFreeBitmap[MAX_MEMORY_CLASSES]; + uint32_t m_ListsCount; + /* + * 0: 0-3 lists for small buffers + * 1+: 0-(2^SLI-1) lists for normal buffers + */ + Block** m_FreeList; + VmaPoolAllocator m_BlockAllocator; + Block* m_NullBlock; + VmaBlockBufferImageGranularity m_GranularityHandler; + + static uint8_t SizeToMemoryClass(VkDeviceSize size); + uint16_t SizeToSecondIndex(VkDeviceSize size, uint8_t memoryClass) const; + uint32_t GetListIndex(uint8_t memoryClass, uint16_t secondIndex) const; + uint32_t GetListIndex(VkDeviceSize size) const; + + void RemoveFreeBlock(Block* block); + void InsertFreeBlock(Block* block); + void MergeBlock(Block* block, Block* prev); + + Block* FindFreeBlock(VkDeviceSize size, uint32_t& listIndex) const; + bool CheckBlock( + Block& block, + uint32_t listIndex, + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + VmaAllocationRequest* pAllocationRequest); +}; + +#ifndef _VMA_BLOCK_METADATA_TLSF_FUNCTIONS +VmaBlockMetadata_TLSF::VmaBlockMetadata_TLSF(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), + m_AllocCount(0), + m_BlocksFreeCount(0), + m_BlocksFreeSize(0), + m_IsFreeBitmap(0), + m_MemoryClasses(0), + m_ListsCount(0), + m_FreeList(VMA_NULL), + m_BlockAllocator(pAllocationCallbacks, INITIAL_BLOCK_ALLOC_COUNT), + m_NullBlock(VMA_NULL), + m_GranularityHandler(bufferImageGranularity) {} + +VmaBlockMetadata_TLSF::~VmaBlockMetadata_TLSF() +{ + if (m_FreeList) + vma_delete_array(GetAllocationCallbacks(), m_FreeList, m_ListsCount); + m_GranularityHandler.Destroy(GetAllocationCallbacks()); +} + +void VmaBlockMetadata_TLSF::Init(VkDeviceSize size) +{ + VmaBlockMetadata::Init(size); + + if (!IsVirtual()) + m_GranularityHandler.Init(GetAllocationCallbacks(), size); + + m_NullBlock = m_BlockAllocator.Alloc(); + m_NullBlock->size = size; + m_NullBlock->offset = 0; + m_NullBlock->prevPhysical = VMA_NULL; + m_NullBlock->nextPhysical = VMA_NULL; + m_NullBlock->MarkFree(); + m_NullBlock->NextFree() = VMA_NULL; + m_NullBlock->PrevFree() = VMA_NULL; + uint8_t memoryClass = SizeToMemoryClass(size); + uint16_t sli = SizeToSecondIndex(size, memoryClass); + m_ListsCount = (memoryClass == 0 ? 0 : (memoryClass - 1) * (1UL << SECOND_LEVEL_INDEX) + sli) + 1; + if (IsVirtual()) + m_ListsCount += 1UL << SECOND_LEVEL_INDEX; + else + m_ListsCount += 4; + + m_MemoryClasses = memoryClass + uint8_t(2); + memset(m_InnerIsFreeBitmap, 0, MAX_MEMORY_CLASSES * sizeof(uint32_t)); + + m_FreeList = vma_new_array(GetAllocationCallbacks(), Block*, m_ListsCount); + memset(m_FreeList, 0, m_ListsCount * sizeof(Block*)); +} + +bool VmaBlockMetadata_TLSF::Validate() const +{ + VMA_VALIDATE(GetSumFreeSize() <= GetSize()); + + VkDeviceSize calculatedSize = m_NullBlock->size; + VkDeviceSize calculatedFreeSize = m_NullBlock->size; + size_t allocCount = 0; + size_t freeCount = 0; + + // Check integrity of free lists + for (uint32_t list = 0; list < m_ListsCount; ++list) + { + Block* block = m_FreeList[list]; + if (block != VMA_NULL) + { + VMA_VALIDATE(block->IsFree()); + VMA_VALIDATE(block->PrevFree() == VMA_NULL); + while (block->NextFree()) + { + VMA_VALIDATE(block->NextFree()->IsFree()); + VMA_VALIDATE(block->NextFree()->PrevFree() == block); + block = block->NextFree(); + } + } + } + + VkDeviceSize nextOffset = m_NullBlock->offset; + auto validateCtx = m_GranularityHandler.StartValidation(GetAllocationCallbacks(), IsVirtual()); + + VMA_VALIDATE(m_NullBlock->nextPhysical == VMA_NULL); + if (m_NullBlock->prevPhysical) + { + VMA_VALIDATE(m_NullBlock->prevPhysical->nextPhysical == m_NullBlock); + } + // Check all blocks + for (Block* prev = m_NullBlock->prevPhysical; prev != VMA_NULL; prev = prev->prevPhysical) + { + VMA_VALIDATE(prev->offset + prev->size == nextOffset); + nextOffset = prev->offset; + calculatedSize += prev->size; + + uint32_t listIndex = GetListIndex(prev->size); + if (prev->IsFree()) + { + ++freeCount; + // Check if free block belongs to free list + Block* freeBlock = m_FreeList[listIndex]; + VMA_VALIDATE(freeBlock != VMA_NULL); + + bool found = false; + do + { + if (freeBlock == prev) + found = true; + + freeBlock = freeBlock->NextFree(); + } while (!found && freeBlock != VMA_NULL); + + VMA_VALIDATE(found); + calculatedFreeSize += prev->size; + } + else + { + ++allocCount; + // Check if taken block is not on a free list + Block* freeBlock = m_FreeList[listIndex]; + while (freeBlock) + { + VMA_VALIDATE(freeBlock != prev); + freeBlock = freeBlock->NextFree(); + } + + if (!IsVirtual()) + { + VMA_VALIDATE(m_GranularityHandler.Validate(validateCtx, prev->offset, prev->size)); + } + } + + if (prev->prevPhysical) + { + VMA_VALIDATE(prev->prevPhysical->nextPhysical == prev); + } + } + + if (!IsVirtual()) + { + VMA_VALIDATE(m_GranularityHandler.FinishValidation(validateCtx)); + } + + VMA_VALIDATE(nextOffset == 0); + VMA_VALIDATE(calculatedSize == GetSize()); + VMA_VALIDATE(calculatedFreeSize == GetSumFreeSize()); + VMA_VALIDATE(allocCount == m_AllocCount); + VMA_VALIDATE(freeCount == m_BlocksFreeCount); + + return true; +} + +void VmaBlockMetadata_TLSF::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const +{ + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += GetSize(); + if (m_NullBlock->size > 0) + VmaAddDetailedStatisticsUnusedRange(inoutStats, m_NullBlock->size); + + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + if (block->IsFree()) + VmaAddDetailedStatisticsUnusedRange(inoutStats, block->size); + else + VmaAddDetailedStatisticsAllocation(inoutStats, block->size); + } +} + +void VmaBlockMetadata_TLSF::AddStatistics(VmaStatistics& inoutStats) const +{ + inoutStats.blockCount++; + inoutStats.allocationCount += (uint32_t)m_AllocCount; + inoutStats.blockBytes += GetSize(); + inoutStats.allocationBytes += GetSize() - GetSumFreeSize(); +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata_TLSF::PrintDetailedMap(class VmaJsonWriter& json) const +{ + size_t blockCount = m_AllocCount + m_BlocksFreeCount; + VmaStlAllocator allocator(GetAllocationCallbacks()); + VmaVector> blockList(blockCount, allocator); + + size_t i = blockCount; + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + blockList[--i] = block; + } + VMA_ASSERT(i == 0); + + VmaDetailedStatistics stats; + VmaClearDetailedStatistics(stats); + AddDetailedStatistics(stats); + + PrintDetailedMap_Begin(json, + stats.statistics.blockBytes - stats.statistics.allocationBytes, + stats.statistics.allocationCount, + stats.unusedRangeCount); + + for (; i < blockCount; ++i) + { + Block* block = blockList[i]; + if (block->IsFree()) + PrintDetailedMap_UnusedRange(json, block->offset, block->size); + else + PrintDetailedMap_Allocation(json, block->offset, block->size, block->UserData()); + } + if (m_NullBlock->size > 0) + PrintDetailedMap_UnusedRange(json, m_NullBlock->offset, m_NullBlock->size); + + PrintDetailedMap_End(json); +} +#endif + +bool VmaBlockMetadata_TLSF::CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(allocSize > 0 && "Cannot allocate empty block!"); + VMA_ASSERT(!upperAddress && "VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm."); + + // For small granularity round up + if (!IsVirtual()) + m_GranularityHandler.RoundupAllocRequest(allocType, allocSize, allocAlignment); + + allocSize += GetDebugMargin(); + // Quick check for too small pool + if (allocSize > GetSumFreeSize()) + return false; + + // If no free blocks in pool then check only null block + if (m_BlocksFreeCount == 0) + return CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest); + + // Round up to the next block + VkDeviceSize sizeForNextList = allocSize; + VkDeviceSize smallSizeStep = VkDeviceSize(SMALL_BUFFER_SIZE / (IsVirtual() ? 1U << SECOND_LEVEL_INDEX : 4U)); + if (allocSize > SMALL_BUFFER_SIZE) + { + sizeForNextList += (1ULL << (VMA_BITSCAN_MSB(allocSize) - SECOND_LEVEL_INDEX)); + } + else if (allocSize > SMALL_BUFFER_SIZE - smallSizeStep) + sizeForNextList = SMALL_BUFFER_SIZE + 1; + else + sizeForNextList += smallSizeStep; + + uint32_t nextListIndex = m_ListsCount; + uint32_t prevListIndex = m_ListsCount; + Block* nextListBlock = VMA_NULL; + Block* prevListBlock = VMA_NULL; + + // Check blocks according to strategies + if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT) + { + // Quick check for larger block first + nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); + if (nextListBlock != VMA_NULL && CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // If not fitted then null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Null block failed, search larger bucket + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + + // Failed again, check best fit bucket + prevListBlock = FindFreeBlock(allocSize, prevListIndex); + while (prevListBlock) + { + if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + prevListBlock = prevListBlock->NextFree(); + } + } + else if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT) + { + // Check best fit bucket + prevListBlock = FindFreeBlock(allocSize, prevListIndex); + while (prevListBlock) + { + if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + prevListBlock = prevListBlock->NextFree(); + } + + // If failed check null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Check larger bucket + nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + } + else if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT ) + { + // Perform search from the start + VmaStlAllocator allocator(GetAllocationCallbacks()); + VmaVector> blockList(m_BlocksFreeCount, allocator); + + size_t i = m_BlocksFreeCount; + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + if (block->IsFree() && block->size >= allocSize) + blockList[--i] = block; + } + + for (; i < m_BlocksFreeCount; ++i) + { + Block& block = *blockList[i]; + if (CheckBlock(block, GetListIndex(block.size), allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + } + + // If failed check null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Whole range searched, no more memory + return false; + } + else + { + // Check larger bucket + nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + + // If failed check null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Check best fit bucket + prevListBlock = FindFreeBlock(allocSize, prevListIndex); + while (prevListBlock) + { + if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + prevListBlock = prevListBlock->NextFree(); + } + } + + // Worst case, full search has to be done + while (++nextListIndex < m_ListsCount) + { + nextListBlock = m_FreeList[nextListIndex]; + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + } + + // No more memory sadly + return false; +} + +VkResult VmaBlockMetadata_TLSF::CheckCorruption(const void* pBlockData) +{ + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + if (!block->IsFree()) + { + if (!VmaValidateMagicValue(pBlockData, block->offset + block->size)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); + return VK_ERROR_UNKNOWN_COPY; + } + } + } + + return VK_SUCCESS; +} + +void VmaBlockMetadata_TLSF::Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) +{ + VMA_ASSERT(request.type == VmaAllocationRequestType::TLSF); + + // Get block and pop it from the free list + Block* currentBlock = (Block*)request.allocHandle; + VkDeviceSize offset = request.algorithmData; + VMA_ASSERT(currentBlock != VMA_NULL); + VMA_ASSERT(currentBlock->offset <= offset); + + if (currentBlock != m_NullBlock) + RemoveFreeBlock(currentBlock); + + VkDeviceSize debugMargin = GetDebugMargin(); + VkDeviceSize missingAlignment = offset - currentBlock->offset; + + // Append missing alignment to prev block or create new one + if (missingAlignment) + { + Block* prevBlock = currentBlock->prevPhysical; + VMA_ASSERT(prevBlock != VMA_NULL && "There should be no missing alignment at offset 0!"); + + if (prevBlock->IsFree() && prevBlock->size != debugMargin) + { + uint32_t oldList = GetListIndex(prevBlock->size); + prevBlock->size += missingAlignment; + // Check if new size crosses list bucket + if (oldList != GetListIndex(prevBlock->size)) + { + prevBlock->size -= missingAlignment; + RemoveFreeBlock(prevBlock); + prevBlock->size += missingAlignment; + InsertFreeBlock(prevBlock); + } + else + m_BlocksFreeSize += missingAlignment; + } + else + { + Block* newBlock = m_BlockAllocator.Alloc(); + currentBlock->prevPhysical = newBlock; + prevBlock->nextPhysical = newBlock; + newBlock->prevPhysical = prevBlock; + newBlock->nextPhysical = currentBlock; + newBlock->size = missingAlignment; + newBlock->offset = currentBlock->offset; + newBlock->MarkTaken(); + + InsertFreeBlock(newBlock); + } + + currentBlock->size -= missingAlignment; + currentBlock->offset += missingAlignment; + } + + VkDeviceSize size = request.size + debugMargin; + if (currentBlock->size == size) + { + if (currentBlock == m_NullBlock) + { + // Setup new null block + m_NullBlock = m_BlockAllocator.Alloc(); + m_NullBlock->size = 0; + m_NullBlock->offset = currentBlock->offset + size; + m_NullBlock->prevPhysical = currentBlock; + m_NullBlock->nextPhysical = VMA_NULL; + m_NullBlock->MarkFree(); + m_NullBlock->PrevFree() = VMA_NULL; + m_NullBlock->NextFree() = VMA_NULL; + currentBlock->nextPhysical = m_NullBlock; + currentBlock->MarkTaken(); + } + } + else + { + VMA_ASSERT(currentBlock->size > size && "Proper block already found, shouldn't find smaller one!"); + + // Create new free block + Block* newBlock = m_BlockAllocator.Alloc(); + newBlock->size = currentBlock->size - size; + newBlock->offset = currentBlock->offset + size; + newBlock->prevPhysical = currentBlock; + newBlock->nextPhysical = currentBlock->nextPhysical; + currentBlock->nextPhysical = newBlock; + currentBlock->size = size; + + if (currentBlock == m_NullBlock) + { + m_NullBlock = newBlock; + m_NullBlock->MarkFree(); + m_NullBlock->NextFree() = VMA_NULL; + m_NullBlock->PrevFree() = VMA_NULL; + currentBlock->MarkTaken(); + } + else + { + newBlock->nextPhysical->prevPhysical = newBlock; + newBlock->MarkTaken(); + InsertFreeBlock(newBlock); + } + } + currentBlock->UserData() = userData; + + if (debugMargin > 0) + { + currentBlock->size -= debugMargin; + Block* newBlock = m_BlockAllocator.Alloc(); + newBlock->size = debugMargin; + newBlock->offset = currentBlock->offset + currentBlock->size; + newBlock->prevPhysical = currentBlock; + newBlock->nextPhysical = currentBlock->nextPhysical; + newBlock->MarkTaken(); + currentBlock->nextPhysical->prevPhysical = newBlock; + currentBlock->nextPhysical = newBlock; + InsertFreeBlock(newBlock); + } + + if (!IsVirtual()) + m_GranularityHandler.AllocPages((uint8_t)(uintptr_t)request.customData, + currentBlock->offset, currentBlock->size); + ++m_AllocCount; +} + +void VmaBlockMetadata_TLSF::Free(VmaAllocHandle allocHandle) +{ + Block* block = (Block*)allocHandle; + Block* next = block->nextPhysical; + VMA_ASSERT(!block->IsFree() && "Block is already free!"); + + if (!IsVirtual()) + m_GranularityHandler.FreePages(block->offset, block->size); + --m_AllocCount; + + VkDeviceSize debugMargin = GetDebugMargin(); + if (debugMargin > 0) + { + RemoveFreeBlock(next); + MergeBlock(next, block); + block = next; + next = next->nextPhysical; + } + + // Try merging + Block* prev = block->prevPhysical; + if (prev != VMA_NULL && prev->IsFree() && prev->size != debugMargin) + { + RemoveFreeBlock(prev); + MergeBlock(block, prev); + } + + if (!next->IsFree()) + InsertFreeBlock(block); + else if (next == m_NullBlock) + MergeBlock(m_NullBlock, block); + else + { + RemoveFreeBlock(next); + MergeBlock(next, block); + InsertFreeBlock(next); + } +} + +void VmaBlockMetadata_TLSF::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) +{ + Block* block = (Block*)allocHandle; + VMA_ASSERT(!block->IsFree() && "Cannot get allocation info for free block!"); + outInfo.offset = block->offset; + outInfo.size = block->size; + outInfo.pUserData = block->UserData(); +} + +void* VmaBlockMetadata_TLSF::GetAllocationUserData(VmaAllocHandle allocHandle) const +{ + Block* block = (Block*)allocHandle; + VMA_ASSERT(!block->IsFree() && "Cannot get user data for free block!"); + return block->UserData(); +} + +VmaAllocHandle VmaBlockMetadata_TLSF::GetAllocationListBegin() const +{ + if (m_AllocCount == 0) + return VK_NULL_HANDLE; + + for (Block* block = m_NullBlock->prevPhysical; block; block = block->prevPhysical) + { + if (!block->IsFree()) + return (VmaAllocHandle)block; + } + VMA_ASSERT(false && "If m_AllocCount > 0 then should find any allocation!"); + return VK_NULL_HANDLE; +} + +VmaAllocHandle VmaBlockMetadata_TLSF::GetNextAllocation(VmaAllocHandle prevAlloc) const +{ + Block* startBlock = (Block*)prevAlloc; + VMA_ASSERT(!startBlock->IsFree() && "Incorrect block!"); + + for (Block* block = startBlock->prevPhysical; block; block = block->prevPhysical) + { + if (!block->IsFree()) + return (VmaAllocHandle)block; + } + return VK_NULL_HANDLE; +} + +VkDeviceSize VmaBlockMetadata_TLSF::GetNextFreeRegionSize(VmaAllocHandle alloc) const +{ + Block* block = (Block*)alloc; + VMA_ASSERT(!block->IsFree() && "Incorrect block!"); + + if (block->prevPhysical) + return block->prevPhysical->IsFree() ? block->prevPhysical->size : 0; + return 0; +} + +void VmaBlockMetadata_TLSF::Clear() +{ + m_AllocCount = 0; + m_BlocksFreeCount = 0; + m_BlocksFreeSize = 0; + m_IsFreeBitmap = 0; + m_NullBlock->offset = 0; + m_NullBlock->size = GetSize(); + Block* block = m_NullBlock->prevPhysical; + m_NullBlock->prevPhysical = VMA_NULL; + while (block) + { + Block* prev = block->prevPhysical; + m_BlockAllocator.Free(block); + block = prev; + } + memset(m_FreeList, 0, m_ListsCount * sizeof(Block*)); + memset(m_InnerIsFreeBitmap, 0, m_MemoryClasses * sizeof(uint32_t)); + m_GranularityHandler.Clear(); +} + +void VmaBlockMetadata_TLSF::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) +{ + Block* block = (Block*)allocHandle; + VMA_ASSERT(!block->IsFree() && "Trying to set user data for not allocated block!"); + block->UserData() = userData; +} + +void VmaBlockMetadata_TLSF::DebugLogAllAllocations() const +{ + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + if (!block->IsFree()) + DebugLogAllocation(block->offset, block->size, block->UserData()); +} + +uint8_t VmaBlockMetadata_TLSF::SizeToMemoryClass(VkDeviceSize size) +{ + if (size > SMALL_BUFFER_SIZE) + return uint8_t(VMA_BITSCAN_MSB(size) - MEMORY_CLASS_SHIFT); + return 0; +} + +uint16_t VmaBlockMetadata_TLSF::SizeToSecondIndex(VkDeviceSize size, uint8_t memoryClass) const +{ + if (memoryClass == 0) + { + if (IsVirtual()) + return static_cast((size - 1) / 8); + return static_cast((size - 1) / 64); + } + return static_cast((size >> (memoryClass + MEMORY_CLASS_SHIFT - SECOND_LEVEL_INDEX)) ^ (1U << SECOND_LEVEL_INDEX)); +} + +uint32_t VmaBlockMetadata_TLSF::GetListIndex(uint8_t memoryClass, uint16_t secondIndex) const +{ + if (memoryClass == 0) + return secondIndex; + + const uint32_t index = static_cast(memoryClass - 1) * (1 << SECOND_LEVEL_INDEX) + secondIndex; + if (IsVirtual()) + return index + (1 << SECOND_LEVEL_INDEX); + return index + 4; +} + +uint32_t VmaBlockMetadata_TLSF::GetListIndex(VkDeviceSize size) const +{ + uint8_t memoryClass = SizeToMemoryClass(size); + return GetListIndex(memoryClass, SizeToSecondIndex(size, memoryClass)); +} + +void VmaBlockMetadata_TLSF::RemoveFreeBlock(Block* block) +{ + VMA_ASSERT(block != m_NullBlock); + VMA_ASSERT(block->IsFree()); + + if (block->NextFree() != VMA_NULL) + block->NextFree()->PrevFree() = block->PrevFree(); + if (block->PrevFree() != VMA_NULL) + block->PrevFree()->NextFree() = block->NextFree(); + else + { + uint8_t memClass = SizeToMemoryClass(block->size); + uint16_t secondIndex = SizeToSecondIndex(block->size, memClass); + uint32_t index = GetListIndex(memClass, secondIndex); + VMA_ASSERT(m_FreeList[index] == block); + m_FreeList[index] = block->NextFree(); + if (block->NextFree() == VMA_NULL) + { + m_InnerIsFreeBitmap[memClass] &= ~(1U << secondIndex); + if (m_InnerIsFreeBitmap[memClass] == 0) + m_IsFreeBitmap &= ~(1UL << memClass); + } + } + block->MarkTaken(); + block->UserData() = VMA_NULL; + --m_BlocksFreeCount; + m_BlocksFreeSize -= block->size; +} + +void VmaBlockMetadata_TLSF::InsertFreeBlock(Block* block) +{ + VMA_ASSERT(block != m_NullBlock); + VMA_ASSERT(!block->IsFree() && "Cannot insert block twice!"); + + uint8_t memClass = SizeToMemoryClass(block->size); + uint16_t secondIndex = SizeToSecondIndex(block->size, memClass); + uint32_t index = GetListIndex(memClass, secondIndex); + VMA_ASSERT(index < m_ListsCount); + block->PrevFree() = VMA_NULL; + block->NextFree() = m_FreeList[index]; + m_FreeList[index] = block; + if (block->NextFree() != VMA_NULL) + block->NextFree()->PrevFree() = block; + else + { + m_InnerIsFreeBitmap[memClass] |= 1U << secondIndex; + m_IsFreeBitmap |= 1UL << memClass; + } + ++m_BlocksFreeCount; + m_BlocksFreeSize += block->size; +} + +void VmaBlockMetadata_TLSF::MergeBlock(Block* block, Block* prev) +{ + VMA_ASSERT(block->prevPhysical == prev && "Cannot merge separate physical regions!"); + VMA_ASSERT(!prev->IsFree() && "Cannot merge block that belongs to free list!"); + + block->offset = prev->offset; + block->size += prev->size; + block->prevPhysical = prev->prevPhysical; + if (block->prevPhysical) + block->prevPhysical->nextPhysical = block; + m_BlockAllocator.Free(prev); +} + +VmaBlockMetadata_TLSF::Block* VmaBlockMetadata_TLSF::FindFreeBlock(VkDeviceSize size, uint32_t& listIndex) const +{ + uint8_t memoryClass = SizeToMemoryClass(size); + uint32_t innerFreeMap = m_InnerIsFreeBitmap[memoryClass] & (~0U << SizeToSecondIndex(size, memoryClass)); + if (!innerFreeMap) + { + // Check higher levels for available blocks + uint32_t freeMap = m_IsFreeBitmap & (~0UL << (memoryClass + 1)); + if (!freeMap) + return VMA_NULL; // No more memory available + + // Find lowest free region + memoryClass = VMA_BITSCAN_LSB(freeMap); + innerFreeMap = m_InnerIsFreeBitmap[memoryClass]; + VMA_ASSERT(innerFreeMap != 0); + } + // Find lowest free subregion + listIndex = GetListIndex(memoryClass, VMA_BITSCAN_LSB(innerFreeMap)); + VMA_ASSERT(m_FreeList[listIndex]); + return m_FreeList[listIndex]; +} + +bool VmaBlockMetadata_TLSF::CheckBlock( + Block& block, + uint32_t listIndex, + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(block.IsFree() && "Block is already taken!"); + + VkDeviceSize alignedOffset = VmaAlignUp(block.offset, allocAlignment); + if (block.size < allocSize + alignedOffset - block.offset) + return false; + + // Check for granularity conflicts + if (!IsVirtual() && + m_GranularityHandler.CheckConflictAndAlignUp(alignedOffset, allocSize, block.offset, block.size, allocType)) + return false; + + // Alloc successful + pAllocationRequest->type = VmaAllocationRequestType::TLSF; + pAllocationRequest->allocHandle = (VmaAllocHandle)█ + pAllocationRequest->size = allocSize - GetDebugMargin(); + pAllocationRequest->customData = (void*)allocType; + pAllocationRequest->algorithmData = alignedOffset; + + // Place block at the start of list if it's normal block + if (listIndex != m_ListsCount && block.PrevFree()) + { + block.PrevFree()->NextFree() = block.NextFree(); + if (block.NextFree()) + block.NextFree()->PrevFree() = block.PrevFree(); + block.PrevFree() = VMA_NULL; + block.NextFree() = m_FreeList[listIndex]; + m_FreeList[listIndex] = █ + if (block.NextFree()) + block.NextFree()->PrevFree() = █ + } + + return true; +} +#endif // _VMA_BLOCK_METADATA_TLSF_FUNCTIONS +#endif // _VMA_BLOCK_METADATA_TLSF + +#ifndef _VMA_BLOCK_VECTOR +/* +Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific +Vulkan memory type. + +Synchronized internally with a mutex. +*/ +class VmaBlockVector +{ + friend struct VmaDefragmentationContext_T; + VMA_CLASS_NO_COPY_NO_MOVE(VmaBlockVector) +public: + VmaBlockVector( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceSize preferredBlockSize, + size_t minBlockCount, + size_t maxBlockCount, + VkDeviceSize bufferImageGranularity, + bool explicitBlockSize, + uint32_t algorithm, + float priority, + VkDeviceSize minAllocationAlignment, + void* pMemoryAllocateNext); + ~VmaBlockVector(); + + VmaAllocator GetAllocator() const { return m_hAllocator; } + VmaPool GetParentPool() const { return m_hParentPool; } + bool IsCustomPool() const { return m_hParentPool != VMA_NULL; } + uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } + VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; } + VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; } + uint32_t GetAlgorithm() const { return m_Algorithm; } + bool HasExplicitBlockSize() const { return m_ExplicitBlockSize; } + float GetPriority() const { return m_Priority; } + const void* GetAllocationNextPtr() const { return m_pMemoryAllocateNext; } + // To be used only while the m_Mutex is locked. Used during defragmentation. + size_t GetBlockCount() const { return m_Blocks.size(); } + // To be used only while the m_Mutex is locked. Used during defragmentation. + VmaDeviceMemoryBlock* GetBlock(size_t index) const { return m_Blocks[index]; } + VMA_RW_MUTEX &GetMutex() { return m_Mutex; } + + VkResult CreateMinBlocks(); + void AddStatistics(VmaStatistics& inoutStats); + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats); + bool IsEmpty(); + bool IsCorruptionDetectionEnabled() const; + + VkResult Allocate( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations); + + void Free(VmaAllocation hAllocation); + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json); +#endif + + VkResult CheckCorruption(); + +private: + const VmaAllocator m_hAllocator; + const VmaPool m_hParentPool; + const uint32_t m_MemoryTypeIndex; + const VkDeviceSize m_PreferredBlockSize; + const size_t m_MinBlockCount; + const size_t m_MaxBlockCount; + const VkDeviceSize m_BufferImageGranularity; + const bool m_ExplicitBlockSize; + const uint32_t m_Algorithm; + const float m_Priority; + const VkDeviceSize m_MinAllocationAlignment; + + void* const m_pMemoryAllocateNext; + VMA_RW_MUTEX m_Mutex; + // Incrementally sorted by sumFreeSize, ascending. + VmaVector> m_Blocks; + uint32_t m_NextBlockId; + bool m_IncrementalSort = true; + + void SetIncrementalSort(bool val) { m_IncrementalSort = val; } + + VkDeviceSize CalcMaxBlockSize() const; + // Finds and removes given block from vector. + void Remove(VmaDeviceMemoryBlock* pBlock); + // Performs single step in sorting m_Blocks. They may not be fully sorted + // after this call. + void IncrementallySortBlocks(); + void SortByFreeSize(); + + VkResult AllocatePage( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation); + + VkResult AllocateFromBlock( + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize size, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + uint32_t strategy, + VmaAllocation* pAllocation); + + VkResult CommitAllocationRequest( + VmaAllocationRequest& allocRequest, + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation); + + VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex); + bool HasEmptyBlock(); +}; +#endif // _VMA_BLOCK_VECTOR + +#ifndef _VMA_DEFRAGMENTATION_CONTEXT +struct VmaDefragmentationContext_T +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaDefragmentationContext_T) +public: + VmaDefragmentationContext_T( + VmaAllocator hAllocator, + const VmaDefragmentationInfo& info); + ~VmaDefragmentationContext_T(); + + void GetStats(VmaDefragmentationStats& outStats) { outStats = m_GlobalStats; } + + VkResult DefragmentPassBegin(VmaDefragmentationPassMoveInfo& moveInfo); + VkResult DefragmentPassEnd(VmaDefragmentationPassMoveInfo& moveInfo); + +private: + // Max number of allocations to ignore due to size constraints before ending single pass + static constexpr uint8_t MAX_ALLOCS_TO_IGNORE = 16; + enum class CounterStatus { Pass, Ignore, End }; + + struct FragmentedBlock + { + uint32_t data; + VmaDeviceMemoryBlock* block; + }; + struct StateBalanced + { + VkDeviceSize avgFreeSize = 0; + VkDeviceSize avgAllocSize = UINT64_MAX; + }; + struct StateExtensive + { + enum class Operation : uint8_t + { + FindFreeBlockBuffer, FindFreeBlockTexture, FindFreeBlockAll, + MoveBuffers, MoveTextures, MoveAll, + Cleanup, Done + }; + + Operation operation = Operation::FindFreeBlockTexture; + size_t firstFreeBlock = SIZE_MAX; + }; + struct MoveAllocationData + { + VkDeviceSize size; + VkDeviceSize alignment; + VmaSuballocationType type; + VmaAllocationCreateFlags flags; + VmaDefragmentationMove move = {}; + }; + + const VkDeviceSize m_MaxPassBytes; + const uint32_t m_MaxPassAllocations; + const PFN_vmaCheckDefragmentationBreakFunction m_BreakCallback; + void* m_BreakCallbackUserData; + + VmaStlAllocator m_MoveAllocator; + VmaVector> m_Moves; + + uint8_t m_IgnoredAllocs = 0; + uint32_t m_Algorithm; + uint32_t m_BlockVectorCount; + VmaBlockVector* m_PoolBlockVector; + VmaBlockVector** m_pBlockVectors; + size_t m_ImmovableBlockCount = 0; + VmaDefragmentationStats m_GlobalStats = { 0 }; + VmaDefragmentationStats m_PassStats = { 0 }; + void* m_AlgorithmState = VMA_NULL; + + static MoveAllocationData GetMoveData(VmaAllocHandle handle, VmaBlockMetadata* metadata); + CounterStatus CheckCounters(VkDeviceSize bytes); + bool IncrementCounters(VkDeviceSize bytes); + bool ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block); + bool AllocInOtherBlock(size_t start, size_t end, MoveAllocationData& data, VmaBlockVector& vector); + + bool ComputeDefragmentation(VmaBlockVector& vector, size_t index); + bool ComputeDefragmentation_Fast(VmaBlockVector& vector); + bool ComputeDefragmentation_Balanced(VmaBlockVector& vector, size_t index, bool update); + bool ComputeDefragmentation_Full(VmaBlockVector& vector); + bool ComputeDefragmentation_Extensive(VmaBlockVector& vector, size_t index); + + static void UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced& state); + bool MoveDataToFreeBlocks(VmaSuballocationType currentType, + VmaBlockVector& vector, size_t firstFreeBlock, + bool& texturePresent, bool& bufferPresent, bool& otherPresent); +}; +#endif // _VMA_DEFRAGMENTATION_CONTEXT + +#ifndef _VMA_POOL_T +struct VmaPool_T +{ + friend struct VmaPoolListItemTraits; + VMA_CLASS_NO_COPY_NO_MOVE(VmaPool_T) +public: + VmaBlockVector m_BlockVector; + VmaDedicatedAllocationList m_DedicatedAllocations; + + VmaPool_T( + VmaAllocator hAllocator, + const VmaPoolCreateInfo& createInfo, + VkDeviceSize preferredBlockSize); + ~VmaPool_T(); + + uint32_t GetId() const { return m_Id; } + void SetId(uint32_t id) { VMA_ASSERT(m_Id == 0); m_Id = id; } + + const char* GetName() const { return m_Name; } + void SetName(const char* pName); + +#if VMA_STATS_STRING_ENABLED + //void PrintDetailedMap(class VmaStringBuilder& sb); +#endif + +private: + uint32_t m_Id; + char* m_Name; + VmaPool_T* m_PrevPool = VMA_NULL; + VmaPool_T* m_NextPool = VMA_NULL; +}; + +struct VmaPoolListItemTraits +{ + typedef VmaPool_T ItemType; + + static ItemType* GetPrev(const ItemType* item) { return item->m_PrevPool; } + static ItemType* GetNext(const ItemType* item) { return item->m_NextPool; } + static ItemType*& AccessPrev(ItemType* item) { return item->m_PrevPool; } + static ItemType*& AccessNext(ItemType* item) { return item->m_NextPool; } +}; +#endif // _VMA_POOL_T + +#ifndef _VMA_CURRENT_BUDGET_DATA +struct VmaCurrentBudgetData +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaCurrentBudgetData) +public: + + VMA_ATOMIC_UINT32 m_BlockCount[VK_MAX_MEMORY_HEAPS]; + VMA_ATOMIC_UINT32 m_AllocationCount[VK_MAX_MEMORY_HEAPS]; + VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS]; + VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS]; + +#if VMA_MEMORY_BUDGET + VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch; + VMA_RW_MUTEX m_BudgetMutex; + uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS]; + uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS]; + uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS]; +#endif // VMA_MEMORY_BUDGET + + VmaCurrentBudgetData(); + + void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize); + void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize); +}; + +#ifndef _VMA_CURRENT_BUDGET_DATA_FUNCTIONS +VmaCurrentBudgetData::VmaCurrentBudgetData() +{ + for (uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex) + { + m_BlockCount[heapIndex] = 0; + m_AllocationCount[heapIndex] = 0; + m_BlockBytes[heapIndex] = 0; + m_AllocationBytes[heapIndex] = 0; +#if VMA_MEMORY_BUDGET + m_VulkanUsage[heapIndex] = 0; + m_VulkanBudget[heapIndex] = 0; + m_BlockBytesAtBudgetFetch[heapIndex] = 0; +#endif + } + +#if VMA_MEMORY_BUDGET + m_OperationsSinceBudgetFetch = 0; +#endif +} + +void VmaCurrentBudgetData::AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize) +{ + m_AllocationBytes[heapIndex] += allocationSize; + ++m_AllocationCount[heapIndex]; +#if VMA_MEMORY_BUDGET + ++m_OperationsSinceBudgetFetch; +#endif +} + +void VmaCurrentBudgetData::RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize) +{ + VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize); + m_AllocationBytes[heapIndex] -= allocationSize; + VMA_ASSERT(m_AllocationCount[heapIndex] > 0); + --m_AllocationCount[heapIndex]; +#if VMA_MEMORY_BUDGET + ++m_OperationsSinceBudgetFetch; +#endif +} +#endif // _VMA_CURRENT_BUDGET_DATA_FUNCTIONS +#endif // _VMA_CURRENT_BUDGET_DATA + +#ifndef _VMA_ALLOCATION_OBJECT_ALLOCATOR +/* +Thread-safe wrapper over VmaPoolAllocator free list, for allocation of VmaAllocation_T objects. +*/ +class VmaAllocationObjectAllocator +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaAllocationObjectAllocator) +public: + explicit VmaAllocationObjectAllocator(const VkAllocationCallbacks* pAllocationCallbacks) + : m_Allocator(pAllocationCallbacks, 1024) {} + + template VmaAllocation Allocate(Types&&... args); + void Free(VmaAllocation hAlloc); + +private: + VMA_MUTEX m_Mutex; + VmaPoolAllocator m_Allocator; +}; + +template +VmaAllocation VmaAllocationObjectAllocator::Allocate(Types&&... args) +{ + VmaMutexLock mutexLock(m_Mutex); + return m_Allocator.Alloc(std::forward(args)...); +} + +void VmaAllocationObjectAllocator::Free(VmaAllocation hAlloc) +{ + VmaMutexLock mutexLock(m_Mutex); + m_Allocator.Free(hAlloc); +} +#endif // _VMA_ALLOCATION_OBJECT_ALLOCATOR + +#ifndef _VMA_VIRTUAL_BLOCK_T +struct VmaVirtualBlock_T +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaVirtualBlock_T) +public: + const bool m_AllocationCallbacksSpecified; + const VkAllocationCallbacks m_AllocationCallbacks; + + explicit VmaVirtualBlock_T(const VmaVirtualBlockCreateInfo& createInfo); + ~VmaVirtualBlock_T(); + + bool IsEmpty() const { return m_Metadata->IsEmpty(); } + void Free(VmaVirtualAllocation allocation) { m_Metadata->Free((VmaAllocHandle)allocation); } + void SetAllocationUserData(VmaVirtualAllocation allocation, void* userData) { m_Metadata->SetAllocationUserData((VmaAllocHandle)allocation, userData); } + void Clear() { m_Metadata->Clear(); } + + const VkAllocationCallbacks* GetAllocationCallbacks() const; + void GetAllocationInfo(VmaVirtualAllocation allocation, VmaVirtualAllocationInfo& outInfo); + VkResult Allocate(const VmaVirtualAllocationCreateInfo& createInfo, VmaVirtualAllocation& outAllocation, + VkDeviceSize* outOffset); + void GetStatistics(VmaStatistics& outStats) const; + void CalculateDetailedStatistics(VmaDetailedStatistics& outStats) const; +#if VMA_STATS_STRING_ENABLED + void BuildStatsString(bool detailedMap, VmaStringBuilder& sb) const; +#endif + +private: + VmaBlockMetadata* m_Metadata; +}; + +#ifndef _VMA_VIRTUAL_BLOCK_T_FUNCTIONS +VmaVirtualBlock_T::VmaVirtualBlock_T(const VmaVirtualBlockCreateInfo& createInfo) + : m_AllocationCallbacksSpecified(createInfo.pAllocationCallbacks != VMA_NULL), + m_AllocationCallbacks(createInfo.pAllocationCallbacks != VMA_NULL ? *createInfo.pAllocationCallbacks : VmaEmptyAllocationCallbacks) +{ + const uint32_t algorithm = createInfo.flags & VMA_VIRTUAL_BLOCK_CREATE_ALGORITHM_MASK; + switch (algorithm) + { + case 0: + m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_TLSF)(VK_NULL_HANDLE, 1, true); + break; + case VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT: + m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_Linear)(VK_NULL_HANDLE, 1, true); + break; + default: + VMA_ASSERT(0); + m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_TLSF)(VK_NULL_HANDLE, 1, true); + } + + m_Metadata->Init(createInfo.size); +} + +VmaVirtualBlock_T::~VmaVirtualBlock_T() +{ + // Define macro VMA_DEBUG_LOG_FORMAT or more specialized VMA_LEAK_LOG_FORMAT + // to receive the list of the unfreed allocations. + if (!m_Metadata->IsEmpty()) + m_Metadata->DebugLogAllAllocations(); + // This is the most important assert in the entire library. + // Hitting it means you have some memory leak - unreleased virtual allocations. + VMA_ASSERT_LEAK(m_Metadata->IsEmpty() && "Some virtual allocations were not freed before destruction of this virtual block!"); + + vma_delete(GetAllocationCallbacks(), m_Metadata); +} + +const VkAllocationCallbacks* VmaVirtualBlock_T::GetAllocationCallbacks() const +{ + return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL; +} + +void VmaVirtualBlock_T::GetAllocationInfo(VmaVirtualAllocation allocation, VmaVirtualAllocationInfo& outInfo) +{ + m_Metadata->GetAllocationInfo((VmaAllocHandle)allocation, outInfo); +} + +VkResult VmaVirtualBlock_T::Allocate(const VmaVirtualAllocationCreateInfo& createInfo, VmaVirtualAllocation& outAllocation, + VkDeviceSize* outOffset) +{ + VmaAllocationRequest request = {}; + if (m_Metadata->CreateAllocationRequest( + createInfo.size, // allocSize + VMA_MAX(createInfo.alignment, (VkDeviceSize)1), // allocAlignment + (createInfo.flags & VMA_VIRTUAL_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0, // upperAddress + VMA_SUBALLOCATION_TYPE_UNKNOWN, // allocType - unimportant + createInfo.flags & VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MASK, // strategy + &request)) + { + m_Metadata->Alloc(request, + VMA_SUBALLOCATION_TYPE_UNKNOWN, // type - unimportant + createInfo.pUserData); + outAllocation = (VmaVirtualAllocation)request.allocHandle; + if(outOffset) + *outOffset = m_Metadata->GetAllocationOffset(request.allocHandle); + return VK_SUCCESS; + } + outAllocation = (VmaVirtualAllocation)VK_NULL_HANDLE; + if (outOffset) + *outOffset = UINT64_MAX; + return VK_ERROR_OUT_OF_DEVICE_MEMORY; +} + +void VmaVirtualBlock_T::GetStatistics(VmaStatistics& outStats) const +{ + VmaClearStatistics(outStats); + m_Metadata->AddStatistics(outStats); +} + +void VmaVirtualBlock_T::CalculateDetailedStatistics(VmaDetailedStatistics& outStats) const +{ + VmaClearDetailedStatistics(outStats); + m_Metadata->AddDetailedStatistics(outStats); +} + +#if VMA_STATS_STRING_ENABLED +void VmaVirtualBlock_T::BuildStatsString(bool detailedMap, VmaStringBuilder& sb) const +{ + VmaJsonWriter json(GetAllocationCallbacks(), sb); + json.BeginObject(); + + VmaDetailedStatistics stats; + CalculateDetailedStatistics(stats); + + json.WriteString("Stats"); + VmaPrintDetailedStatistics(json, stats); + + if (detailedMap) + { + json.WriteString("Details"); + json.BeginObject(); + m_Metadata->PrintDetailedMap(json); + json.EndObject(); + } + + json.EndObject(); +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_VIRTUAL_BLOCK_T_FUNCTIONS +#endif // _VMA_VIRTUAL_BLOCK_T + + +// Main allocator object. +struct VmaAllocator_T +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaAllocator_T) +public: + const bool m_UseMutex; + const uint32_t m_VulkanApiVersion; + bool m_UseKhrDedicatedAllocation; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0). + bool m_UseKhrBindMemory2; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0). + bool m_UseExtMemoryBudget; + bool m_UseAmdDeviceCoherentMemory; + bool m_UseKhrBufferDeviceAddress; + bool m_UseExtMemoryPriority; + bool m_UseKhrMaintenance4; + bool m_UseKhrMaintenance5; + bool m_UseKhrExternalMemoryWin32; + const VkDevice m_hDevice; + const VkInstance m_hInstance; + const bool m_AllocationCallbacksSpecified; + const VkAllocationCallbacks m_AllocationCallbacks; + VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks; + VmaAllocationObjectAllocator m_AllocationObjectAllocator; + + // Each bit (1 << i) is set if HeapSizeLimit is enabled for that heap, so cannot allocate more than the heap size. + uint32_t m_HeapSizeLimitMask; + + VkPhysicalDeviceProperties m_PhysicalDeviceProperties; + VkPhysicalDeviceMemoryProperties m_MemProps; + + // Default pools. + VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES]; + VmaDedicatedAllocationList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES]; + + VmaCurrentBudgetData m_Budget; + VMA_ATOMIC_UINT32 m_DeviceMemoryCount; // Total number of VkDeviceMemory objects. + + explicit VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo); + VkResult Init(const VmaAllocatorCreateInfo* pCreateInfo); + ~VmaAllocator_T(); + + const VkAllocationCallbacks* GetAllocationCallbacks() const + { + return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL; + } + const VmaVulkanFunctions& GetVulkanFunctions() const + { + return m_VulkanFunctions; + } + + VkPhysicalDevice GetPhysicalDevice() const { return m_PhysicalDevice; } + + VkDeviceSize GetBufferImageGranularity() const + { + return VMA_MAX( + static_cast(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY), + m_PhysicalDeviceProperties.limits.bufferImageGranularity); + } + + uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; } + uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; } + + uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const + { + VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount); + return m_MemProps.memoryTypes[memTypeIndex].heapIndex; + } + // True when specific memory type is HOST_VISIBLE but not HOST_COHERENT. + bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex) const + { + return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) == + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + } + // Minimum alignment for all allocations in specific memory type. + VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex) const + { + return IsMemoryTypeNonCoherent(memTypeIndex) ? + VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) : + (VkDeviceSize)VMA_MIN_ALIGNMENT; + } + + bool IsIntegratedGpu() const + { + return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU; + } + + uint32_t GetGlobalMemoryTypeBits() const { return m_GlobalMemoryTypeBits; } + + void GetBufferMemoryRequirements( + VkBuffer hBuffer, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const; + void GetImageMemoryRequirements( + VkImage hImage, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const; + VkResult FindMemoryTypeIndex( + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VmaBufferImageUsage bufImgUsage, + uint32_t* pMemoryTypeIndex) const; + + // Common code for public functions vmaCreateBuffer, vmaCreateBufferWithAlignment, etc. + VkResult CreateBuffer( + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + void* pMemoryAllocateNext, // pNext chain for VkMemoryAllocateInfo. + VkBuffer* pBuffer, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo); + // Common code for public functions vmaCreateImage, vmaCreateDedicatedImage. + VkResult CreateImage( + const VkImageCreateInfo* pImageCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + void* pMemoryAllocateNext, // pNext chain for VkMemoryAllocateInfo. + VkImage* pImage, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo); + + // Main allocation function. + VkResult AllocateMemory( + VkMemoryRequirements vkMemReq, + bool requiresDedicatedAllocation, + bool prefersDedicatedAllocation, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + void* pMemoryAllocateNext, // Optional pNext chain for VkMemoryAllocateInfo. + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations); + + // Main deallocation function. + void FreeMemory( + size_t allocationCount, + const VmaAllocation* pAllocations); + + void CalculateStatistics(VmaTotalStatistics* pStats); + + void GetHeapBudgets( + VmaBudget* outBudgets, uint32_t firstHeap, uint32_t heapCount); + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json); +#endif + + static void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo); + static void GetAllocationInfo2(VmaAllocation hAllocation, VmaAllocationInfo2* pAllocationInfo); + + VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool); + void DestroyPool(VmaPool pool); + static void GetPoolStatistics(VmaPool pool, VmaStatistics* pPoolStats); + static void CalculatePoolStatistics(VmaPool pool, VmaDetailedStatistics* pPoolStats); + + void SetCurrentFrameIndex(uint32_t frameIndex); + uint32_t GetCurrentFrameIndex() const { return m_CurrentFrameIndex.load(); } + + static VkResult CheckPoolCorruption(VmaPool hPool); + VkResult CheckCorruption(uint32_t memoryTypeBits); + + // Call to Vulkan function vkAllocateMemory with accompanying bookkeeping. + VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory); + // Call to Vulkan function vkFreeMemory with accompanying bookkeeping. + void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory); + // Call to Vulkan function vkBindBufferMemory or vkBindBufferMemory2KHR. + VkResult BindVulkanBuffer( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkBuffer buffer, + const void* pNext) const; + // Call to Vulkan function vkBindImageMemory or vkBindImageMemory2KHR. + VkResult BindVulkanImage( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkImage image, + const void* pNext) const; + + VkResult Map(VmaAllocation hAllocation, void** ppData); + void Unmap(VmaAllocation hAllocation); + + VkResult BindBufferMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext); + VkResult BindImageMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext); + + VkResult FlushOrInvalidateAllocation( + VmaAllocation hAllocation, + VkDeviceSize offset, VkDeviceSize size, + VMA_CACHE_OPERATION op); + VkResult FlushOrInvalidateAllocations( + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, const VkDeviceSize* sizes, + VMA_CACHE_OPERATION op); + + VkResult CopyMemoryToAllocation( + const void* pSrcHostPointer, + VmaAllocation dstAllocation, + VkDeviceSize dstAllocationLocalOffset, + VkDeviceSize size); + VkResult CopyAllocationToMemory( + VmaAllocation srcAllocation, + VkDeviceSize srcAllocationLocalOffset, + void* pDstHostPointer, + VkDeviceSize size); + + void FillAllocation(VmaAllocation hAllocation, uint8_t pattern); + + /* + Returns bit mask of memory types that can support defragmentation on GPU as + they support creation of required buffer for copy operations. + */ + uint32_t GetGpuDefragmentationMemoryTypeBits(); + +#if VMA_EXTERNAL_MEMORY + VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(uint32_t memTypeIndex) const + { + return m_TypeExternalMemoryHandleTypes[memTypeIndex]; + } +#endif // #if VMA_EXTERNAL_MEMORY + +private: + VkDeviceSize m_PreferredLargeHeapBlockSize; + + VkPhysicalDevice m_PhysicalDevice; + VMA_ATOMIC_UINT32 m_CurrentFrameIndex; + VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits; // UINT32_MAX means uninitialized. +#if VMA_EXTERNAL_MEMORY + VkExternalMemoryHandleTypeFlagsKHR m_TypeExternalMemoryHandleTypes[VK_MAX_MEMORY_TYPES]; +#endif // #if VMA_EXTERNAL_MEMORY + + VMA_RW_MUTEX m_PoolsMutex; + typedef VmaIntrusiveLinkedList PoolList; + // Protected by m_PoolsMutex. + PoolList m_Pools; + uint32_t m_NextPoolId; + + VmaVulkanFunctions m_VulkanFunctions; + + // Global bit mask AND-ed with any memoryTypeBits to disallow certain memory types. + uint32_t m_GlobalMemoryTypeBits; + + void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions); + +#if VMA_STATIC_VULKAN_FUNCTIONS == 1 + void ImportVulkanFunctions_Static(); +#endif + + void ImportVulkanFunctions_Custom(const VmaVulkanFunctions* pVulkanFunctions); + +#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + void ImportVulkanFunctions_Dynamic(); +#endif + + void ValidateVulkanFunctions() const; + + VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex); + + VkResult AllocateMemoryOfType( + VmaPool pool, + VkDeviceSize size, + VkDeviceSize alignment, + bool dedicatedPreferred, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + void* pMemoryAllocateNext, // Optional pNext chain for VkMemoryAllocateInfo. + const VmaAllocationCreateInfo& createInfo, + uint32_t memTypeIndex, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + VmaBlockVector& blockVector, + size_t allocationCount, + VmaAllocation* pAllocations); + + // Helper function only to be used inside AllocateDedicatedMemory. + VkResult AllocateDedicatedMemoryPage( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + uint32_t memTypeIndex, + const VkMemoryAllocateInfo& allocInfo, + bool map, + bool isUserDataString, + bool isMappingAllowed, + void* pUserData, + VmaAllocation* pAllocation); + + // Allocates and registers new VkDeviceMemory specifically for dedicated allocations. + VkResult AllocateDedicatedMemory( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + uint32_t memTypeIndex, + bool map, + bool isUserDataString, + bool isMappingAllowed, + bool canAliasMemory, + void* pUserData, + float priority, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + size_t allocationCount, + VmaAllocation* pAllocations, + const void* pNextChain); + + void FreeDedicatedMemory(VmaAllocation allocation); + + VkResult CalcMemTypeParams( + VmaAllocationCreateInfo& outCreateInfo, + uint32_t memTypeIndex, + VkDeviceSize size, + size_t allocationCount); + static VkResult CalcAllocationParams( + VmaAllocationCreateInfo& outCreateInfo, + bool dedicatedRequired); + + /* + Calculates and returns bit mask of memory types that can support defragmentation + on GPU as they support creation of required buffer for copy operations. + */ + uint32_t CalculateGpuDefragmentationMemoryTypeBits() const; + uint32_t CalculateGlobalMemoryTypeBits() const; + + bool GetFlushOrInvalidateRange( + VmaAllocation allocation, + VkDeviceSize offset, VkDeviceSize size, + VkMappedMemoryRange& outRange) const; + +#if VMA_MEMORY_BUDGET + void UpdateVulkanBudget(); +#endif // #if VMA_MEMORY_BUDGET +}; + + +#ifndef _VMA_MEMORY_FUNCTIONS +namespace +{ +inline void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment) +{ + return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment); +} + +inline void VmaFree(VmaAllocator hAllocator, void* ptr) +{ + VmaFree(&hAllocator->m_AllocationCallbacks, ptr); +} + +template +T* VmaAllocate(VmaAllocator hAllocator) +{ + return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T)); +} + +template +T* VmaAllocateArray(VmaAllocator hAllocator, size_t count) +{ + return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T)); +} + +template +void vma_delete(VmaAllocator hAllocator, T* ptr) +{ + if(ptr != VMA_NULL) + { + ptr->~T(); + VmaFree(hAllocator, ptr); + } +} + +template +void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count) +{ + if(ptr != VMA_NULL) + { + for(size_t i = count; i--; ) + ptr[i].~T(); + VmaFree(hAllocator, ptr); + } +} +} // namespace +#endif // _VMA_MEMORY_FUNCTIONS + +#ifndef _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS +VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) + : m_pMetadata(VMA_NULL), + m_hParentPool(nullptr), + m_MemoryTypeIndex(UINT32_MAX), + m_Id(0), + m_hMemory(VK_NULL_HANDLE), + m_MapCount(0), + m_pMappedData(VMA_NULL){} + +VmaDeviceMemoryBlock::~VmaDeviceMemoryBlock() +{ + VMA_ASSERT_LEAK(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped."); + VMA_ASSERT_LEAK(m_hMemory == VK_NULL_HANDLE); +} + +void VmaDeviceMemoryBlock::Init( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t newMemoryTypeIndex, + VkDeviceMemory newMemory, + VkDeviceSize newSize, + uint32_t id, + uint32_t algorithm, + VkDeviceSize bufferImageGranularity) +{ + VMA_ASSERT(m_hMemory == VK_NULL_HANDLE); + + m_hParentPool = hParentPool; + m_MemoryTypeIndex = newMemoryTypeIndex; + m_Id = id; + m_hMemory = newMemory; + + switch (algorithm) + { + case 0: + m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_TLSF)(hAllocator->GetAllocationCallbacks(), + bufferImageGranularity, false); // isVirtual + break; + case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT: + m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator->GetAllocationCallbacks(), + bufferImageGranularity, false); // isVirtual + break; + default: + VMA_ASSERT(0); + m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_TLSF)(hAllocator->GetAllocationCallbacks(), + bufferImageGranularity, false); // isVirtual + } + m_pMetadata->Init(newSize); +} + +void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator) +{ + // Define macro VMA_DEBUG_LOG_FORMAT or more specialized VMA_LEAK_LOG_FORMAT + // to receive the list of the unfreed allocations. + if (!m_pMetadata->IsEmpty()) + m_pMetadata->DebugLogAllAllocations(); + // This is the most important assert in the entire library. + // Hitting it means you have some memory leak - unreleased VmaAllocation objects. + VMA_ASSERT_LEAK(m_pMetadata->IsEmpty() && "Some allocations were not freed before destruction of this memory block!"); + + VMA_ASSERT_LEAK(m_hMemory != VK_NULL_HANDLE); + allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory); + m_hMemory = VK_NULL_HANDLE; + + vma_delete(allocator, m_pMetadata); + m_pMetadata = VMA_NULL; +} + +void VmaDeviceMemoryBlock::PostAlloc(VmaAllocator hAllocator) +{ + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + m_MappingHysteresis.PostAlloc(); +} + +void VmaDeviceMemoryBlock::PostFree(VmaAllocator hAllocator) +{ + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + if(m_MappingHysteresis.PostFree()) + { + VMA_ASSERT(m_MappingHysteresis.GetExtraMapping() == 0); + if (m_MapCount == 0) + { + m_pMappedData = VMA_NULL; + (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory); + } + } +} + +bool VmaDeviceMemoryBlock::Validate() const +{ + VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) && + (m_pMetadata->GetSize() != 0)); + + return m_pMetadata->Validate(); +} + +VkResult VmaDeviceMemoryBlock::CheckCorruption(VmaAllocator hAllocator) +{ + void* pData = VMA_NULL; + VkResult res = Map(hAllocator, 1, &pData); + if (res != VK_SUCCESS) + { + return res; + } + + res = m_pMetadata->CheckCorruption(pData); + + Unmap(hAllocator, 1); + + return res; +} + +VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count, void** ppData) +{ + if (count == 0) + { + return VK_SUCCESS; + } + + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + const uint32_t oldTotalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping(); + if (oldTotalMapCount != 0) + { + VMA_ASSERT(m_pMappedData != VMA_NULL); + m_MappingHysteresis.PostMap(); + m_MapCount += count; + if (ppData != VMA_NULL) + { + *ppData = m_pMappedData; + } + return VK_SUCCESS; + } + + VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)( + hAllocator->m_hDevice, + m_hMemory, + 0, // offset + VK_WHOLE_SIZE, + 0, // flags + &m_pMappedData); + if (result == VK_SUCCESS) + { + VMA_ASSERT(m_pMappedData != VMA_NULL); + m_MappingHysteresis.PostMap(); + m_MapCount = count; + if (ppData != VMA_NULL) + { + *ppData = m_pMappedData; + } + } + return result; +} + +void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count) +{ + if (count == 0) + { + return; + } + + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + if (m_MapCount >= count) + { + m_MapCount -= count; + const uint32_t totalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping(); + if (totalMapCount == 0) + { + m_pMappedData = VMA_NULL; + (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory); + } + m_MappingHysteresis.PostUnmap(); + } + else + { + VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped."); + } +} + +VkResult VmaDeviceMemoryBlock::WriteMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize) +{ + VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION); + + void* pData = VMA_NULL; + VkResult res = Map(hAllocator, 1, &pData); + if (res != VK_SUCCESS) + { + return res; + } + + VmaWriteMagicValue(pData, allocOffset + allocSize); + + Unmap(hAllocator, 1); + return VK_SUCCESS; +} + +VkResult VmaDeviceMemoryBlock::ValidateMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize) +{ + VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION); + + void* pData = VMA_NULL; + VkResult res = Map(hAllocator, 1, &pData); + if (res != VK_SUCCESS) + { + return res; + } + + if (!VmaValidateMagicValue(pData, allocOffset + allocSize)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!"); + } + + Unmap(hAllocator, 1); + return VK_SUCCESS; +} + +VkResult VmaDeviceMemoryBlock::BindBufferMemory( + VmaAllocator hAllocator, + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext) +{ + VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK && + hAllocation->GetBlock() == this); + VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() && + "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?"); + const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset; + // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads. + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext); +} + +VkResult VmaDeviceMemoryBlock::BindImageMemory( + VmaAllocator hAllocator, + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext) +{ + VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK && + hAllocation->GetBlock() == this); + VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() && + "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?"); + const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset; + // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads. + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext); +} + +#if VMA_EXTERNAL_MEMORY_WIN32 +VkResult VmaDeviceMemoryBlock::CreateWin32Handle(const VmaAllocator hAllocator, PFN_vkGetMemoryWin32HandleKHR pvkGetMemoryWin32HandleKHR, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE hTargetProcess, HANDLE* pHandle) noexcept +{ + VMA_ASSERT(pHandle); + return m_Handle.GetHandle(hAllocator->m_hDevice, m_hMemory, pvkGetMemoryWin32HandleKHR, handleType, hTargetProcess, hAllocator->m_UseMutex, pHandle); +} +#endif // VMA_EXTERNAL_MEMORY_WIN32 +#endif // _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS + +#ifndef _VMA_ALLOCATION_T_FUNCTIONS +VmaAllocation_T::VmaAllocation_T(bool mappingAllowed) + : m_Alignment{ 1 }, + m_Size{ 0 }, + m_pUserData{ VMA_NULL }, + m_pName{ VMA_NULL }, + m_MemoryTypeIndex{ 0 }, + m_Type{ (uint8_t)ALLOCATION_TYPE_NONE }, + m_SuballocationType{ (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN }, + m_MapCount{ 0 }, + m_Flags{ 0 } +{ + if(mappingAllowed) + m_Flags |= (uint8_t)FLAG_MAPPING_ALLOWED; +} + +VmaAllocation_T::~VmaAllocation_T() +{ + VMA_ASSERT_LEAK(m_MapCount == 0 && "Allocation was not unmapped before destruction."); + + // Check if owned string was freed. + VMA_ASSERT(m_pName == VMA_NULL); +} + +void VmaAllocation_T::InitBlockAllocation( + VmaDeviceMemoryBlock* block, + VmaAllocHandle allocHandle, + VkDeviceSize alignment, + VkDeviceSize size, + uint32_t memoryTypeIndex, + VmaSuballocationType suballocationType, + bool mapped) +{ + VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE); + VMA_ASSERT(block != VMA_NULL); + m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK; + m_Alignment = alignment; + m_Size = size; + m_MemoryTypeIndex = memoryTypeIndex; + if(mapped) + { + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + m_Flags |= (uint8_t)FLAG_PERSISTENT_MAP; + } + m_SuballocationType = (uint8_t)suballocationType; + m_BlockAllocation.m_Block = block; + m_BlockAllocation.m_AllocHandle = allocHandle; +} + +void VmaAllocation_T::InitDedicatedAllocation( + VmaAllocator allocator, + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceMemory hMemory, + VmaSuballocationType suballocationType, + void* pMappedData, + VkDeviceSize size) +{ + VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE); + VMA_ASSERT(hMemory != VK_NULL_HANDLE); + m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED; + m_Alignment = 0; + m_Size = size; + m_MemoryTypeIndex = memoryTypeIndex; + m_SuballocationType = (uint8_t)suballocationType; + m_DedicatedAllocation.m_ExtraData = VMA_NULL; + m_DedicatedAllocation.m_hParentPool = hParentPool; + m_DedicatedAllocation.m_hMemory = hMemory; + m_DedicatedAllocation.m_Prev = VMA_NULL; + m_DedicatedAllocation.m_Next = VMA_NULL; + + if (pMappedData != VMA_NULL) + { + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + m_Flags |= (uint8_t)FLAG_PERSISTENT_MAP; + EnsureExtraData(allocator); + m_DedicatedAllocation.m_ExtraData->m_pMappedData = pMappedData; + } +} + +void VmaAllocation_T::Destroy(VmaAllocator allocator) +{ + FreeName(allocator); + + if (GetType() == ALLOCATION_TYPE_DEDICATED) + { + vma_delete(allocator, m_DedicatedAllocation.m_ExtraData); + } +} + +void VmaAllocation_T::SetName(VmaAllocator hAllocator, const char* pName) +{ + VMA_ASSERT(pName == VMA_NULL || pName != m_pName); + + FreeName(hAllocator); + + if (pName != VMA_NULL) + m_pName = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), pName); +} + +uint8_t VmaAllocation_T::SwapBlockAllocation(VmaAllocator hAllocator, VmaAllocation allocation) +{ + VMA_ASSERT(allocation != VMA_NULL); + VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK); + VMA_ASSERT(allocation->m_Type == ALLOCATION_TYPE_BLOCK); + + if (m_MapCount != 0) + m_BlockAllocation.m_Block->Unmap(hAllocator, m_MapCount); + + m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle, allocation); + std::swap(m_BlockAllocation, allocation->m_BlockAllocation); + m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle, this); + +#if VMA_STATS_STRING_ENABLED + std::swap(m_BufferImageUsage, allocation->m_BufferImageUsage); +#endif + return m_MapCount; +} + +VmaAllocHandle VmaAllocation_T::GetAllocHandle() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_AllocHandle; + case ALLOCATION_TYPE_DEDICATED: + return VK_NULL_HANDLE; + default: + VMA_ASSERT(0); + return VK_NULL_HANDLE; + } +} + +VkDeviceSize VmaAllocation_T::GetOffset() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->m_pMetadata->GetAllocationOffset(m_BlockAllocation.m_AllocHandle); + case ALLOCATION_TYPE_DEDICATED: + return 0; + default: + VMA_ASSERT(0); + return 0; + } +} + +VmaPool VmaAllocation_T::GetParentPool() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->GetParentPool(); + case ALLOCATION_TYPE_DEDICATED: + return m_DedicatedAllocation.m_hParentPool; + default: + VMA_ASSERT(0); + return VK_NULL_HANDLE; + } +} + +VkDeviceMemory VmaAllocation_T::GetMemory() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->GetDeviceMemory(); + case ALLOCATION_TYPE_DEDICATED: + return m_DedicatedAllocation.m_hMemory; + default: + VMA_ASSERT(0); + return VK_NULL_HANDLE; + } +} + +void* VmaAllocation_T::GetMappedData() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + if (m_MapCount != 0 || IsPersistentMap()) + { + void* pBlockData = m_BlockAllocation.m_Block->GetMappedData(); + VMA_ASSERT(pBlockData != VMA_NULL); + return (char*)pBlockData + GetOffset(); + } + else + { + return VMA_NULL; + } + break; + case ALLOCATION_TYPE_DEDICATED: + VMA_ASSERT((m_DedicatedAllocation.m_ExtraData != VMA_NULL && m_DedicatedAllocation.m_ExtraData->m_pMappedData != VMA_NULL) == + (m_MapCount != 0 || IsPersistentMap())); + return m_DedicatedAllocation.m_ExtraData != VMA_NULL ? m_DedicatedAllocation.m_ExtraData->m_pMappedData : VMA_NULL; + default: + VMA_ASSERT(0); + return VMA_NULL; + } +} + +void VmaAllocation_T::BlockAllocMap() +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK); + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + + if (m_MapCount < 0xFF) + { + ++m_MapCount; + } + else + { + VMA_ASSERT(0 && "Allocation mapped too many times simultaneously."); + } +} + +void VmaAllocation_T::BlockAllocUnmap() +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK); + + if (m_MapCount > 0) + { + --m_MapCount; + } + else + { + VMA_ASSERT(0 && "Unmapping allocation not previously mapped."); + } +} + +VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData) +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED); + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + + EnsureExtraData(hAllocator); + + if (m_MapCount != 0 || IsPersistentMap()) + { + if (m_MapCount < 0xFF) + { + VMA_ASSERT(m_DedicatedAllocation.m_ExtraData->m_pMappedData != VMA_NULL); + *ppData = m_DedicatedAllocation.m_ExtraData->m_pMappedData; + ++m_MapCount; + return VK_SUCCESS; + } + + VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously."); + return VK_ERROR_MEMORY_MAP_FAILED; + } + + VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)( + hAllocator->m_hDevice, + m_DedicatedAllocation.m_hMemory, + 0, // offset + VK_WHOLE_SIZE, + 0, // flags + ppData); + if (result == VK_SUCCESS) + { + m_DedicatedAllocation.m_ExtraData->m_pMappedData = *ppData; + m_MapCount = 1; + } + return result; +} + +void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator) +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED); + + if (m_MapCount > 0) + { + --m_MapCount; + if (m_MapCount == 0 && !IsPersistentMap()) + { + VMA_ASSERT(m_DedicatedAllocation.m_ExtraData != VMA_NULL); + m_DedicatedAllocation.m_ExtraData->m_pMappedData = VMA_NULL; + (*hAllocator->GetVulkanFunctions().vkUnmapMemory)( + hAllocator->m_hDevice, + m_DedicatedAllocation.m_hMemory); + } + } + else + { + VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped."); + } +} + +#if VMA_STATS_STRING_ENABLED +void VmaAllocation_T::PrintParameters(class VmaJsonWriter& json) const +{ + json.WriteString("Type"); + json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]); + + json.WriteString("Size"); + json.WriteNumber(m_Size); + json.WriteString("Usage"); + json.WriteNumber(m_BufferImageUsage.Value); // It may be uint32_t or uint64_t. + + if (m_pUserData != VMA_NULL) + { + json.WriteString("CustomData"); + json.BeginString(); + json.ContinueString_Pointer(m_pUserData); + json.EndString(); + } + if (m_pName != VMA_NULL) + { + json.WriteString("Name"); + json.WriteString(m_pName); + } +} +#if VMA_EXTERNAL_MEMORY_WIN32 +VkResult VmaAllocation_T::GetWin32Handle(VmaAllocator hAllocator, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE hTargetProcess, HANDLE* pHandle) noexcept +{ + auto pvkGetMemoryWin32HandleKHR = hAllocator->GetVulkanFunctions().vkGetMemoryWin32HandleKHR; + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->CreateWin32Handle(hAllocator, pvkGetMemoryWin32HandleKHR, handleType, hTargetProcess, pHandle); + case ALLOCATION_TYPE_DEDICATED: + EnsureExtraData(hAllocator); + return m_DedicatedAllocation.m_ExtraData->m_Handle.GetHandle(hAllocator->m_hDevice, m_DedicatedAllocation.m_hMemory, pvkGetMemoryWin32HandleKHR, handleType, hTargetProcess, hAllocator->m_UseMutex, pHandle); + default: + VMA_ASSERT(0); + return VK_ERROR_FEATURE_NOT_PRESENT; + } +} +#endif // VMA_EXTERNAL_MEMORY_WIN32 +#endif // VMA_STATS_STRING_ENABLED + +void VmaAllocation_T::EnsureExtraData(VmaAllocator hAllocator) +{ + if (m_DedicatedAllocation.m_ExtraData == VMA_NULL) + { + m_DedicatedAllocation.m_ExtraData = vma_new(hAllocator, VmaAllocationExtraData)(); + } +} + +void VmaAllocation_T::FreeName(VmaAllocator hAllocator) +{ + if(m_pName) + { + VmaFreeString(hAllocator->GetAllocationCallbacks(), m_pName); + m_pName = VMA_NULL; + } +} +#endif // _VMA_ALLOCATION_T_FUNCTIONS + +#ifndef _VMA_BLOCK_VECTOR_FUNCTIONS +VmaBlockVector::VmaBlockVector( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceSize preferredBlockSize, + size_t minBlockCount, + size_t maxBlockCount, + VkDeviceSize bufferImageGranularity, + bool explicitBlockSize, + uint32_t algorithm, + float priority, + VkDeviceSize minAllocationAlignment, + void* pMemoryAllocateNext) + : m_hAllocator(hAllocator), + m_hParentPool(hParentPool), + m_MemoryTypeIndex(memoryTypeIndex), + m_PreferredBlockSize(preferredBlockSize), + m_MinBlockCount(minBlockCount), + m_MaxBlockCount(maxBlockCount), + m_BufferImageGranularity(bufferImageGranularity), + m_ExplicitBlockSize(explicitBlockSize), + m_Algorithm(algorithm), + m_Priority(priority), + m_MinAllocationAlignment(minAllocationAlignment), + m_pMemoryAllocateNext(pMemoryAllocateNext), + m_Blocks(VmaStlAllocator(hAllocator->GetAllocationCallbacks())), + m_NextBlockId(0) {} + +VmaBlockVector::~VmaBlockVector() +{ + for (size_t i = m_Blocks.size(); i--; ) + { + m_Blocks[i]->Destroy(m_hAllocator); + vma_delete(m_hAllocator, m_Blocks[i]); + } +} + +VkResult VmaBlockVector::CreateMinBlocks() +{ + for (size_t i = 0; i < m_MinBlockCount; ++i) + { + VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL); + if (res != VK_SUCCESS) + { + return res; + } + } + return VK_SUCCESS; +} + +void VmaBlockVector::AddStatistics(VmaStatistics& inoutStats) +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + + const size_t blockCount = m_Blocks.size(); + for (uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex) + { + const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pBlock); + VMA_HEAVY_ASSERT(pBlock->Validate()); + pBlock->m_pMetadata->AddStatistics(inoutStats); + } +} + +void VmaBlockVector::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + + const size_t blockCount = m_Blocks.size(); + for (uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex) + { + const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pBlock); + VMA_HEAVY_ASSERT(pBlock->Validate()); + pBlock->m_pMetadata->AddDetailedStatistics(inoutStats); + } +} + +bool VmaBlockVector::IsEmpty() +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + return m_Blocks.empty(); +} + +bool VmaBlockVector::IsCorruptionDetectionEnabled() const +{ +#if (VMA_DEBUG_DETECT_CORRUPTION == 0) || (VMA_DEBUG_MARGIN == 0) + return false; +#else + constexpr uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + return (m_Algorithm == 0 || m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) && + (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags; +#endif +} + +VkResult VmaBlockVector::Allocate( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations) +{ + size_t allocIndex = 0; + VkResult res = VK_SUCCESS; + + alignment = VMA_MAX(alignment, m_MinAllocationAlignment); + + if (IsCorruptionDetectionEnabled()) + { + size = VmaAlignUp(size, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE)); + alignment = VmaAlignUp(alignment, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE)); + } + + { + VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex); + for (; allocIndex < allocationCount; ++allocIndex) + { + res = AllocatePage( + size, + alignment, + createInfo, + suballocType, + pAllocations + allocIndex); + if (res != VK_SUCCESS) + { + break; + } + } + } + + if (res != VK_SUCCESS) + { + // Free all already created allocations. + while (allocIndex--) + Free(pAllocations[allocIndex]); + memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); + } + + return res; +} + +VkResult VmaBlockVector::AllocatePage( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation) +{ + const bool isUpperAddress = (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0; + + VkDeviceSize freeMemory = 0; + { + const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex); + VmaBudget heapBudget = {}; + m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1); + freeMemory = (heapBudget.usage < heapBudget.budget) ? (heapBudget.budget - heapBudget.usage) : 0; + } + + const bool canFallbackToDedicated = !HasExplicitBlockSize() && + (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0; + const bool canCreateNewBlock = + ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) && + (m_Blocks.size() < m_MaxBlockCount) && + (freeMemory >= size || !canFallbackToDedicated); + uint32_t strategy = createInfo.flags & VMA_ALLOCATION_CREATE_STRATEGY_MASK; + + // Upper address can only be used with linear allocator and within single memory block. + if (isUpperAddress && + (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT || m_MaxBlockCount > 1)) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + // Early reject: requested allocation size is larger that maximum block size for this block vector. + if (size + VMA_DEBUG_MARGIN > m_PreferredBlockSize) + { + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + + // 1. Search existing allocations. Try to allocate. + if (m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) + { + // Use only last block. + if (!m_Blocks.empty()) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks.back(); + VMA_ASSERT(pCurrBlock); + VkResult res = AllocateFromBlock( + pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG_FORMAT(" Returned from last block #%" PRIu32, pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + else + { + if (strategy != VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT) // MIN_MEMORY or default + { + const bool isHostVisible = + (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0; + if(isHostVisible) + { + const bool isMappingAllowed = (createInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0; + /* + For non-mappable allocations, check blocks that are not mapped first. + For mappable allocations, check blocks that are already mapped first. + This way, having many blocks, we will separate mappable and non-mappable allocations, + hopefully limiting the number of blocks that are mapped, which will help tools like RenderDoc. + */ + for(size_t mappingI = 0; mappingI < 2; ++mappingI) + { + // Forward order in m_Blocks - prefer blocks with smallest amount of free space. + for (size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pCurrBlock); + const bool isBlockMapped = pCurrBlock->GetMappedData() != VMA_NULL; + if((mappingI == 0) == (isMappingAllowed == isBlockMapped)) + { + VkResult res = AllocateFromBlock( + pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG_FORMAT(" Returned from existing block #%" PRIu32, pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + } + } + else + { + // Forward order in m_Blocks - prefer blocks with smallest amount of free space. + for (size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pCurrBlock); + VkResult res = AllocateFromBlock( + pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG_FORMAT(" Returned from existing block #%" PRIu32, pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + } + else // VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT + { + // Backward order in m_Blocks - prefer blocks with largest amount of free space. + for (size_t blockIndex = m_Blocks.size(); blockIndex--; ) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pCurrBlock); + VkResult res = AllocateFromBlock(pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG_FORMAT(" Returned from existing block #%" PRIu32, pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + } + + // 2. Try to create new block. + if (canCreateNewBlock) + { + // Calculate optimal size for new block. + VkDeviceSize newBlockSize = m_PreferredBlockSize; + uint32_t newBlockSizeShift = 0; + const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3; + + if (!m_ExplicitBlockSize) + { + // Allocate 1/8, 1/4, 1/2 as first blocks. + const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize(); + for (uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i) + { + const VkDeviceSize smallerNewBlockSize = newBlockSize / 2; + if (smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2) + { + newBlockSize = smallerNewBlockSize; + ++newBlockSizeShift; + } + else + { + break; + } + } + } + + size_t newBlockIndex = 0; + VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ? + CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY; + // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize. + if (!m_ExplicitBlockSize) + { + while (res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX) + { + const VkDeviceSize smallerNewBlockSize = newBlockSize / 2; + if (smallerNewBlockSize >= size) + { + newBlockSize = smallerNewBlockSize; + ++newBlockSizeShift; + res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ? + CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + else + { + break; + } + } + } + + if (res == VK_SUCCESS) + { + VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex]; + VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size); + + res = AllocateFromBlock( + pBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG_FORMAT(" Created new block #%" PRIu32 " Size=%" PRIu64, pBlock->GetId(), newBlockSize); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + + // Allocation from new block failed, possibly due to VMA_DEBUG_MARGIN or alignment. + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + } + + return VK_ERROR_OUT_OF_DEVICE_MEMORY; +} + +void VmaBlockVector::Free(VmaAllocation hAllocation) +{ + VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL; + + bool budgetExceeded = false; + { + const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex); + VmaBudget heapBudget = {}; + m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1); + budgetExceeded = heapBudget.usage >= heapBudget.budget; + } + + // Scope for lock. + { + VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex); + + VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock(); + + if (IsCorruptionDetectionEnabled()) + { + VkResult res = pBlock->ValidateMagicValueAfterAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize()); + VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to validate magic value."); + } + + if (hAllocation->IsPersistentMap()) + { + pBlock->Unmap(m_hAllocator, 1); + } + + const bool hadEmptyBlockBeforeFree = HasEmptyBlock(); + pBlock->m_pMetadata->Free(hAllocation->GetAllocHandle()); + pBlock->PostFree(m_hAllocator); + VMA_HEAVY_ASSERT(pBlock->Validate()); + + VMA_DEBUG_LOG_FORMAT(" Freed from MemoryTypeIndex=%" PRIu32, m_MemoryTypeIndex); + + const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount; + // pBlock became empty after this deallocation. + if (pBlock->m_pMetadata->IsEmpty()) + { + // Already had empty block. We don't want to have two, so delete this one. + if ((hadEmptyBlockBeforeFree || budgetExceeded) && canDeleteBlock) + { + pBlockToDelete = pBlock; + Remove(pBlock); + } + // else: We now have one empty block - leave it. A hysteresis to avoid allocating whole block back and forth. + } + // pBlock didn't become empty, but we have another empty block - find and free that one. + // (This is optional, heuristics.) + else if (hadEmptyBlockBeforeFree && canDeleteBlock) + { + VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back(); + if (pLastBlock->m_pMetadata->IsEmpty()) + { + pBlockToDelete = pLastBlock; + m_Blocks.pop_back(); + } + } + + IncrementallySortBlocks(); + + m_hAllocator->m_Budget.RemoveAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), hAllocation->GetSize()); + hAllocation->Destroy(m_hAllocator); + m_hAllocator->m_AllocationObjectAllocator.Free(hAllocation); + } + + // Destruction of a free block. Deferred until this point, outside of mutex + // lock, for performance reason. + if (pBlockToDelete != VMA_NULL) + { + VMA_DEBUG_LOG_FORMAT(" Deleted empty block #%" PRIu32, pBlockToDelete->GetId()); + pBlockToDelete->Destroy(m_hAllocator); + vma_delete(m_hAllocator, pBlockToDelete); + } +} + +VkDeviceSize VmaBlockVector::CalcMaxBlockSize() const +{ + VkDeviceSize result = 0; + for (size_t i = m_Blocks.size(); i--; ) + { + result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize()); + if (result >= m_PreferredBlockSize) + { + break; + } + } + return result; +} + +void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock) +{ + for (uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + if (m_Blocks[blockIndex] == pBlock) + { + VmaVectorRemove(m_Blocks, blockIndex); + return; + } + } + VMA_ASSERT(0); +} + +void VmaBlockVector::IncrementallySortBlocks() +{ + if (!m_IncrementalSort) + return; + if (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) + { + // Bubble sort only until first swap. + for (size_t i = 1; i < m_Blocks.size(); ++i) + { + if (m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize()) + { + std::swap(m_Blocks[i - 1], m_Blocks[i]); + return; + } + } + } +} + +void VmaBlockVector::SortByFreeSize() +{ + VMA_SORT(m_Blocks.begin(), m_Blocks.end(), + [](VmaDeviceMemoryBlock* b1, VmaDeviceMemoryBlock* b2) -> bool + { + return b1->m_pMetadata->GetSumFreeSize() < b2->m_pMetadata->GetSumFreeSize(); + }); +} + +VkResult VmaBlockVector::AllocateFromBlock( + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize size, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + uint32_t strategy, + VmaAllocation* pAllocation) +{ + const bool isUpperAddress = (allocFlags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0; + + VmaAllocationRequest currRequest = {}; + if (pBlock->m_pMetadata->CreateAllocationRequest( + size, + alignment, + isUpperAddress, + suballocType, + strategy, + &currRequest)) + { + return CommitAllocationRequest(currRequest, pBlock, alignment, allocFlags, pUserData, suballocType, pAllocation); + } + return VK_ERROR_OUT_OF_DEVICE_MEMORY; +} + +VkResult VmaBlockVector::CommitAllocationRequest( + VmaAllocationRequest& allocRequest, + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation) +{ + const bool mapped = (allocFlags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0; + const bool isUserDataString = (allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0; + const bool isMappingAllowed = (allocFlags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0; + + pBlock->PostAlloc(m_hAllocator); + // Allocate from pCurrBlock. + if (mapped) + { + VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL); + if (res != VK_SUCCESS) + { + return res; + } + } + + *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(isMappingAllowed); + pBlock->m_pMetadata->Alloc(allocRequest, suballocType, *pAllocation); + (*pAllocation)->InitBlockAllocation( + pBlock, + allocRequest.allocHandle, + alignment, + allocRequest.size, // Not size, as actual allocation size may be larger than requested! + m_MemoryTypeIndex, + suballocType, + mapped); + VMA_HEAVY_ASSERT(pBlock->Validate()); + if (isUserDataString) + (*pAllocation)->SetName(m_hAllocator, (const char*)pUserData); + else + (*pAllocation)->SetUserData(m_hAllocator, pUserData); + m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), allocRequest.size); + +#if VMA_DEBUG_INITIALIZE_ALLOCATIONS + m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED); +#endif + + if (IsCorruptionDetectionEnabled()) + { + VkResult res = pBlock->WriteMagicValueAfterAllocation(m_hAllocator, (*pAllocation)->GetOffset(), allocRequest.size); + VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value."); + } + return VK_SUCCESS; +} + +VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex) +{ + VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO }; + allocInfo.pNext = m_pMemoryAllocateNext; + allocInfo.memoryTypeIndex = m_MemoryTypeIndex; + allocInfo.allocationSize = blockSize; + +#if VMA_BUFFER_DEVICE_ADDRESS + // Every standalone block can potentially contain a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT - always enable the feature. + VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR }; + if (m_hAllocator->m_UseKhrBufferDeviceAddress) + { + allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR; + VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo); + } +#endif // VMA_BUFFER_DEVICE_ADDRESS + +#if VMA_MEMORY_PRIORITY + VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT }; + if (m_hAllocator->m_UseExtMemoryPriority) + { + VMA_ASSERT(m_Priority >= 0.F && m_Priority <= 1.F); + priorityInfo.priority = m_Priority; + VmaPnextChainPushFront(&allocInfo, &priorityInfo); + } +#endif // VMA_MEMORY_PRIORITY + +#if VMA_EXTERNAL_MEMORY + // Attach VkExportMemoryAllocateInfoKHR if necessary. + VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR }; + exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex); + if (exportMemoryAllocInfo.handleTypes != 0) + { + VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo); + } +#endif // VMA_EXTERNAL_MEMORY + + VkDeviceMemory mem = VK_NULL_HANDLE; + VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem); + if (res < 0) + { + return res; + } + + // New VkDeviceMemory successfully created. + + // Create new Allocation for it. + VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator); + pBlock->Init( + m_hAllocator, + m_hParentPool, + m_MemoryTypeIndex, + mem, + allocInfo.allocationSize, + m_NextBlockId++, + m_Algorithm, + m_BufferImageGranularity); + + m_Blocks.push_back(pBlock); + if (pNewBlockIndex != VMA_NULL) + { + *pNewBlockIndex = m_Blocks.size() - 1; + } + + return VK_SUCCESS; +} + +bool VmaBlockVector::HasEmptyBlock() +{ + for (size_t index = 0, count = m_Blocks.size(); index < count; ++index) + { + VmaDeviceMemoryBlock* const pBlock = m_Blocks[index]; + if (pBlock->m_pMetadata->IsEmpty()) + { + return true; + } + } + return false; +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json) +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + + + json.BeginObject(); + for (size_t i = 0; i < m_Blocks.size(); ++i) + { + json.BeginString(); + json.ContinueString(m_Blocks[i]->GetId()); + json.EndString(); + + json.BeginObject(); + json.WriteString("MapRefCount"); + json.WriteNumber(m_Blocks[i]->GetMapRefCount()); + + m_Blocks[i]->m_pMetadata->PrintDetailedMap(json); + json.EndObject(); + } + json.EndObject(); +} +#endif // VMA_STATS_STRING_ENABLED + +VkResult VmaBlockVector::CheckCorruption() +{ + if (!IsCorruptionDetectionEnabled()) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + for (uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pBlock); + VkResult res = pBlock->CheckCorruption(m_hAllocator); + if (res != VK_SUCCESS) + { + return res; + } + } + return VK_SUCCESS; +} + +#endif // _VMA_BLOCK_VECTOR_FUNCTIONS + +#ifndef _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS +VmaDefragmentationContext_T::VmaDefragmentationContext_T( + VmaAllocator hAllocator, + const VmaDefragmentationInfo& info) + : m_MaxPassBytes(info.maxBytesPerPass == 0 ? VK_WHOLE_SIZE : info.maxBytesPerPass), + m_MaxPassAllocations(info.maxAllocationsPerPass == 0 ? UINT32_MAX : info.maxAllocationsPerPass), + m_BreakCallback(info.pfnBreakCallback), + m_BreakCallbackUserData(info.pBreakCallbackUserData), + m_MoveAllocator(hAllocator->GetAllocationCallbacks()), + m_Moves(m_MoveAllocator), + m_Algorithm(info.flags & VMA_DEFRAGMENTATION_FLAG_ALGORITHM_MASK) +{ + if (info.pool != VMA_NULL) + { + m_BlockVectorCount = 1; + m_PoolBlockVector = &info.pool->m_BlockVector; + m_pBlockVectors = &m_PoolBlockVector; + m_PoolBlockVector->SetIncrementalSort(false); + m_PoolBlockVector->SortByFreeSize(); + } + else + { + m_BlockVectorCount = hAllocator->GetMemoryTypeCount(); + m_PoolBlockVector = VMA_NULL; + m_pBlockVectors = hAllocator->m_pBlockVectors; + for (uint32_t i = 0; i < m_BlockVectorCount; ++i) + { + VmaBlockVector* vector = m_pBlockVectors[i]; + if (vector != VMA_NULL) + { + vector->SetIncrementalSort(false); + vector->SortByFreeSize(); + } + } + } + + switch (m_Algorithm) + { + case 0: // Default algorithm + m_Algorithm = VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT; + m_AlgorithmState = vma_new_array(hAllocator, StateBalanced, m_BlockVectorCount); + break; + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: + m_AlgorithmState = vma_new_array(hAllocator, StateBalanced, m_BlockVectorCount); + break; + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + if (hAllocator->GetBufferImageGranularity() > 1) + { + m_AlgorithmState = vma_new_array(hAllocator, StateExtensive, m_BlockVectorCount); + } + break; + default: + ; // Do nothing. + } +} + +VmaDefragmentationContext_T::~VmaDefragmentationContext_T() +{ + if (m_PoolBlockVector != VMA_NULL) + { + m_PoolBlockVector->SetIncrementalSort(true); + } + else + { + for (uint32_t i = 0; i < m_BlockVectorCount; ++i) + { + VmaBlockVector* vector = m_pBlockVectors[i]; + if (vector != VMA_NULL) + vector->SetIncrementalSort(true); + } + } + + if (m_AlgorithmState) + { + switch (m_Algorithm) + { + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: + vma_delete_array(m_MoveAllocator.m_pCallbacks, reinterpret_cast(m_AlgorithmState), m_BlockVectorCount); + break; + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + vma_delete_array(m_MoveAllocator.m_pCallbacks, reinterpret_cast(m_AlgorithmState), m_BlockVectorCount); + break; + default: + VMA_ASSERT(0); + } + } +} + +VkResult VmaDefragmentationContext_T::DefragmentPassBegin(VmaDefragmentationPassMoveInfo& moveInfo) +{ + if (m_PoolBlockVector != VMA_NULL) + { + VmaMutexLockWrite lock(m_PoolBlockVector->GetMutex(), m_PoolBlockVector->GetAllocator()->m_UseMutex); + + if (m_PoolBlockVector->GetBlockCount() > 1) + ComputeDefragmentation(*m_PoolBlockVector, 0); + else if (m_PoolBlockVector->GetBlockCount() == 1) + ReallocWithinBlock(*m_PoolBlockVector, m_PoolBlockVector->GetBlock(0)); + } + else + { + for (uint32_t i = 0; i < m_BlockVectorCount; ++i) + { + if (m_pBlockVectors[i] != VMA_NULL) + { + VmaMutexLockWrite lock(m_pBlockVectors[i]->GetMutex(), m_pBlockVectors[i]->GetAllocator()->m_UseMutex); + + if (m_pBlockVectors[i]->GetBlockCount() > 1) + { + if (ComputeDefragmentation(*m_pBlockVectors[i], i)) + break; + } + else if (m_pBlockVectors[i]->GetBlockCount() == 1) + { + if (ReallocWithinBlock(*m_pBlockVectors[i], m_pBlockVectors[i]->GetBlock(0))) + break; + } + } + } + } + + moveInfo.moveCount = static_cast(m_Moves.size()); + if (moveInfo.moveCount > 0) + { + moveInfo.pMoves = m_Moves.data(); + return VK_INCOMPLETE; + } + + moveInfo.pMoves = VMA_NULL; + return VK_SUCCESS; +} + +VkResult VmaDefragmentationContext_T::DefragmentPassEnd(VmaDefragmentationPassMoveInfo& moveInfo) +{ + VMA_ASSERT(moveInfo.moveCount > 0 ? moveInfo.pMoves != VMA_NULL : true); + + VkResult result = VK_SUCCESS; + VmaStlAllocator blockAllocator(m_MoveAllocator.m_pCallbacks); + VmaVector> immovableBlocks(blockAllocator); + VmaVector> mappedBlocks(blockAllocator); + + VmaAllocator allocator = VMA_NULL; + for (uint32_t i = 0; i < moveInfo.moveCount; ++i) + { + VmaDefragmentationMove& move = moveInfo.pMoves[i]; + size_t prevCount = 0; + size_t currentCount = 0; + VkDeviceSize freedBlockSize = 0; + + uint32_t vectorIndex = 0; + VmaBlockVector* vector = VMA_NULL; + if (m_PoolBlockVector != VMA_NULL) + { + vector = m_PoolBlockVector; + } + else + { + vectorIndex = move.srcAllocation->GetMemoryTypeIndex(); + vector = m_pBlockVectors[vectorIndex]; + VMA_ASSERT(vector != VMA_NULL); + } + + switch (move.operation) + { + case VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY: + { + uint8_t mapCount = move.srcAllocation->SwapBlockAllocation(vector->m_hAllocator, move.dstTmpAllocation); + if (mapCount > 0) + { + allocator = vector->m_hAllocator; + VmaDeviceMemoryBlock* newMapBlock = move.srcAllocation->GetBlock(); + bool notPresent = true; + for (FragmentedBlock& block : mappedBlocks) + { + if (block.block == newMapBlock) + { + notPresent = false; + block.data += mapCount; + break; + } + } + if (notPresent) + mappedBlocks.push_back({ mapCount, newMapBlock }); + } + + // Scope for locks, Free have it's own lock + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + prevCount = vector->GetBlockCount(); + freedBlockSize = move.dstTmpAllocation->GetBlock()->m_pMetadata->GetSize(); + } + vector->Free(move.dstTmpAllocation); + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + currentCount = vector->GetBlockCount(); + } + + result = VK_INCOMPLETE; + break; + } + case VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE: + { + m_PassStats.bytesMoved -= move.srcAllocation->GetSize(); + --m_PassStats.allocationsMoved; + vector->Free(move.dstTmpAllocation); + + VmaDeviceMemoryBlock* newBlock = move.srcAllocation->GetBlock(); + bool notPresent = true; + for (const FragmentedBlock& block : immovableBlocks) + { + if (block.block == newBlock) + { + notPresent = false; + break; + } + } + if (notPresent) + immovableBlocks.push_back({ vectorIndex, newBlock }); + break; + } + case VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY: + { + m_PassStats.bytesMoved -= move.srcAllocation->GetSize(); + --m_PassStats.allocationsMoved; + // Scope for locks, Free have it's own lock + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + prevCount = vector->GetBlockCount(); + freedBlockSize = move.srcAllocation->GetBlock()->m_pMetadata->GetSize(); + } + vector->Free(move.srcAllocation); + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + currentCount = vector->GetBlockCount(); + } + freedBlockSize *= prevCount - currentCount; + + VkDeviceSize dstBlockSize = SIZE_MAX; + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + dstBlockSize = move.dstTmpAllocation->GetBlock()->m_pMetadata->GetSize(); + } + vector->Free(move.dstTmpAllocation); + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + freedBlockSize += dstBlockSize * (currentCount - vector->GetBlockCount()); + currentCount = vector->GetBlockCount(); + } + + result = VK_INCOMPLETE; + break; + } + default: + VMA_ASSERT(0); + } + + if (prevCount > currentCount) + { + size_t freedBlocks = prevCount - currentCount; + m_PassStats.deviceMemoryBlocksFreed += static_cast(freedBlocks); + m_PassStats.bytesFreed += freedBlockSize; + } + + if(m_Algorithm == VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT && + m_AlgorithmState != VMA_NULL) + { + // Avoid unnecessary tries to allocate when new free block is available + StateExtensive& state = reinterpret_cast(m_AlgorithmState)[vectorIndex]; + if (state.firstFreeBlock != SIZE_MAX) + { + const size_t diff = prevCount - currentCount; + if (state.firstFreeBlock >= diff) + { + state.firstFreeBlock -= diff; + if (state.firstFreeBlock != 0) + state.firstFreeBlock -= vector->GetBlock(state.firstFreeBlock - 1)->m_pMetadata->IsEmpty(); + } + else + state.firstFreeBlock = 0; + } + } + } + moveInfo.moveCount = 0; + moveInfo.pMoves = VMA_NULL; + m_Moves.clear(); + + // Update stats + m_GlobalStats.allocationsMoved += m_PassStats.allocationsMoved; + m_GlobalStats.bytesFreed += m_PassStats.bytesFreed; + m_GlobalStats.bytesMoved += m_PassStats.bytesMoved; + m_GlobalStats.deviceMemoryBlocksFreed += m_PassStats.deviceMemoryBlocksFreed; + m_PassStats = { 0 }; + + // Move blocks with immovable allocations according to algorithm + if (!immovableBlocks.empty()) + { + do + { + if(m_Algorithm == VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT) + { + if (m_AlgorithmState != VMA_NULL) + { + bool swapped = false; + // Move to the start of free blocks range + for (const FragmentedBlock& block : immovableBlocks) + { + StateExtensive& state = reinterpret_cast(m_AlgorithmState)[block.data]; + if (state.operation != StateExtensive::Operation::Cleanup) + { + VmaBlockVector* vector = m_pBlockVectors[block.data]; + VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + + for (size_t i = 0, count = vector->GetBlockCount() - m_ImmovableBlockCount; i < count; ++i) + { + if (vector->GetBlock(i) == block.block) + { + std::swap(vector->m_Blocks[i], vector->m_Blocks[vector->GetBlockCount() - ++m_ImmovableBlockCount]); + if (state.firstFreeBlock != SIZE_MAX) + { + if (i + 1 < state.firstFreeBlock) + { + if (state.firstFreeBlock > 1) + std::swap(vector->m_Blocks[i], vector->m_Blocks[--state.firstFreeBlock]); + else + --state.firstFreeBlock; + } + } + swapped = true; + break; + } + } + } + } + if (swapped) + result = VK_INCOMPLETE; + break; + } + } + + // Move to the beginning + for (const FragmentedBlock& block : immovableBlocks) + { + VmaBlockVector* vector = m_pBlockVectors[block.data]; + VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + + for (size_t i = m_ImmovableBlockCount; i < vector->GetBlockCount(); ++i) + { + if (vector->GetBlock(i) == block.block) + { + std::swap(vector->m_Blocks[i], vector->m_Blocks[m_ImmovableBlockCount++]); + break; + } + } + } + } while (false); + } + + // Bulk-map destination blocks + for (const FragmentedBlock& block : mappedBlocks) + { + VkResult res = block.block->Map(allocator, block.data, VMA_NULL); + VMA_ASSERT(res == VK_SUCCESS); + } + return result; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation(VmaBlockVector& vector, size_t index) +{ + switch (m_Algorithm) + { + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT: + return ComputeDefragmentation_Fast(vector); + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: + return ComputeDefragmentation_Balanced(vector, index, true); + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT: + return ComputeDefragmentation_Full(vector); + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + return ComputeDefragmentation_Extensive(vector, index); + default: + VMA_ASSERT(0); + return ComputeDefragmentation_Balanced(vector, index, true); + } +} + +VmaDefragmentationContext_T::MoveAllocationData VmaDefragmentationContext_T::GetMoveData( + VmaAllocHandle handle, VmaBlockMetadata* metadata) +{ + MoveAllocationData moveData; + moveData.move.srcAllocation = (VmaAllocation)metadata->GetAllocationUserData(handle); + moveData.size = moveData.move.srcAllocation->GetSize(); + moveData.alignment = moveData.move.srcAllocation->GetAlignment(); + moveData.type = moveData.move.srcAllocation->GetSuballocationType(); + moveData.flags = 0; + + if (moveData.move.srcAllocation->IsPersistentMap()) + moveData.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT; + if (moveData.move.srcAllocation->IsMappingAllowed()) + moveData.flags |= VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT; + + return moveData; +} + +VmaDefragmentationContext_T::CounterStatus VmaDefragmentationContext_T::CheckCounters(VkDeviceSize bytes) +{ + // Check custom criteria if exists + if (m_BreakCallback && m_BreakCallback(m_BreakCallbackUserData)) + return CounterStatus::End; + + // Ignore allocation if will exceed max size for copy + if (m_PassStats.bytesMoved + bytes > m_MaxPassBytes) + { + if (++m_IgnoredAllocs < MAX_ALLOCS_TO_IGNORE) + return CounterStatus::Ignore; + return CounterStatus::End; + } + + m_IgnoredAllocs = 0; + return CounterStatus::Pass; +} + +bool VmaDefragmentationContext_T::IncrementCounters(VkDeviceSize bytes) +{ + m_PassStats.bytesMoved += bytes; + // Early return when max found + if (++m_PassStats.allocationsMoved >= m_MaxPassAllocations || m_PassStats.bytesMoved >= m_MaxPassBytes) + { + VMA_ASSERT((m_PassStats.allocationsMoved == m_MaxPassAllocations || + m_PassStats.bytesMoved == m_MaxPassBytes) && "Exceeded maximal pass threshold!"); + return true; + } + return false; +} + +bool VmaDefragmentationContext_T::ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block) +{ + VmaBlockMetadata* metadata = block->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); + if (offset != 0 && metadata->GetSumFreeSize() >= moveData.size) + { + VmaAllocationRequest request = {}; + if (metadata->CreateAllocationRequest( + moveData.size, + moveData.alignment, + false, + moveData.type, + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + &request)) + { + if (metadata->GetAllocationOffset(request.allocHandle) < offset) + { + if (vector.CommitAllocationRequest( + request, + block, + moveData.alignment, + moveData.flags, + this, + moveData.type, + &moveData.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(moveData.move); + if (IncrementCounters(moveData.size)) + return true; + } + } + } + } + } + return false; +} + +bool VmaDefragmentationContext_T::AllocInOtherBlock(size_t start, size_t end, MoveAllocationData& data, VmaBlockVector& vector) +{ + for (; start < end; ++start) + { + VmaDeviceMemoryBlock* dstBlock = vector.GetBlock(start); + if (dstBlock->m_pMetadata->GetSumFreeSize() >= data.size) + { + if (vector.AllocateFromBlock(dstBlock, + data.size, + data.alignment, + data.flags, + this, + data.type, + 0, + &data.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(data.move); + if (IncrementCounters(data.size)) + return true; + break; + } + } + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Fast(VmaBlockVector& vector) +{ + // Move only between blocks + + // Go through allocations in last blocks and try to fit them inside first ones + for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) + { + VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + // Check all previous blocks for free space + if (AllocInOtherBlock(0, i, moveData, vector)) + return true; + } + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Balanced(VmaBlockVector& vector, size_t index, bool update) +{ + // Go over every allocation and try to fit it in previous blocks at lowest offsets, + // if not possible: realloc within single block to minimize offset (exclude offset == 0), + // but only if there are noticeable gaps between them (some heuristic, ex. average size of allocation in block) + VMA_ASSERT(m_AlgorithmState != VMA_NULL); + + StateBalanced& vectorState = reinterpret_cast(m_AlgorithmState)[index]; + if (update && vectorState.avgAllocSize == UINT64_MAX) + UpdateVectorStatistics(vector, vectorState); + + const size_t startMoveCount = m_Moves.size(); + VkDeviceSize minimalFreeRegion = vectorState.avgFreeSize / 2; + for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) + { + VmaDeviceMemoryBlock* block = vector.GetBlock(i); + VmaBlockMetadata* metadata = block->m_pMetadata; + VkDeviceSize prevFreeRegionSize = 0; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + // Check all previous blocks for free space + const size_t prevMoveCount = m_Moves.size(); + if (AllocInOtherBlock(0, i, moveData, vector)) + return true; + + VkDeviceSize nextFreeRegionSize = metadata->GetNextFreeRegionSize(handle); + // If no room found then realloc within block for lower offset + VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); + if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size) + { + // Check if realloc will make sense + if (prevFreeRegionSize >= minimalFreeRegion || + nextFreeRegionSize >= minimalFreeRegion || + moveData.size <= vectorState.avgFreeSize || + moveData.size <= vectorState.avgAllocSize) + { + VmaAllocationRequest request = {}; + if (metadata->CreateAllocationRequest( + moveData.size, + moveData.alignment, + false, + moveData.type, + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + &request)) + { + if (metadata->GetAllocationOffset(request.allocHandle) < offset) + { + if (vector.CommitAllocationRequest( + request, + block, + moveData.alignment, + moveData.flags, + this, + moveData.type, + &moveData.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(moveData.move); + if (IncrementCounters(moveData.size)) + return true; + } + } + } + } + } + prevFreeRegionSize = nextFreeRegionSize; + } + } + + // No moves performed, update statistics to current vector state + if (startMoveCount == m_Moves.size() && !update) + { + vectorState.avgAllocSize = UINT64_MAX; + return ComputeDefragmentation_Balanced(vector, index, false); + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Full(VmaBlockVector& vector) +{ + // Go over every allocation and try to fit it in previous blocks at lowest offsets, + // if not possible: realloc within single block to minimize offset (exclude offset == 0) + + for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) + { + VmaDeviceMemoryBlock* block = vector.GetBlock(i); + VmaBlockMetadata* metadata = block->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + // Check all previous blocks for free space + const size_t prevMoveCount = m_Moves.size(); + if (AllocInOtherBlock(0, i, moveData, vector)) + return true; + + // If no room found then realloc within block for lower offset + VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); + if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size) + { + VmaAllocationRequest request = {}; + if (metadata->CreateAllocationRequest( + moveData.size, + moveData.alignment, + false, + moveData.type, + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + &request)) + { + if (metadata->GetAllocationOffset(request.allocHandle) < offset) + { + if (vector.CommitAllocationRequest( + request, + block, + moveData.alignment, + moveData.flags, + this, + moveData.type, + &moveData.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(moveData.move); + if (IncrementCounters(moveData.size)) + return true; + } + } + } + } + } + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Extensive(VmaBlockVector& vector, size_t index) +{ + // First free single block, then populate it to the brim, then free another block, and so on + + // Fallback to previous algorithm since without granularity conflicts it can achieve max packing + if (vector.m_BufferImageGranularity == 1) + return ComputeDefragmentation_Full(vector); + + VMA_ASSERT(m_AlgorithmState != VMA_NULL); + + StateExtensive& vectorState = reinterpret_cast(m_AlgorithmState)[index]; + + bool texturePresent = false; + bool bufferPresent = false; + bool otherPresent = false; + switch (vectorState.operation) + { + case StateExtensive::Operation::Done: // Vector defragmented + return false; + case StateExtensive::Operation::FindFreeBlockBuffer: + case StateExtensive::Operation::FindFreeBlockTexture: + case StateExtensive::Operation::FindFreeBlockAll: + { + // No more blocks to free, just perform fast realloc and move to cleanup + if (vectorState.firstFreeBlock == 0) + { + vectorState.operation = StateExtensive::Operation::Cleanup; + return ComputeDefragmentation_Fast(vector); + } + + // No free blocks, have to clear last one + size_t last = (vectorState.firstFreeBlock == SIZE_MAX ? vector.GetBlockCount() : vectorState.firstFreeBlock) - 1; + VmaBlockMetadata* freeMetadata = vector.GetBlock(last)->m_pMetadata; + + const size_t prevMoveCount = m_Moves.size(); + for (VmaAllocHandle handle = freeMetadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = freeMetadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, freeMetadata); + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + // Check all previous blocks for free space + if (AllocInOtherBlock(0, last, moveData, vector)) + { + // Full clear performed already + if (prevMoveCount != m_Moves.size() && freeMetadata->GetNextAllocation(handle) == VK_NULL_HANDLE) + vectorState.firstFreeBlock = last; + return true; + } + } + + if (prevMoveCount == m_Moves.size()) + { + // Cannot perform full clear, have to move data in other blocks around + if (last != 0) + { + for (size_t i = last - 1; i; --i) + { + if (ReallocWithinBlock(vector, vector.GetBlock(i))) + return true; + } + } + + if (prevMoveCount == m_Moves.size()) + { + // No possible reallocs within blocks, try to move them around fast + return ComputeDefragmentation_Fast(vector); + } + } + else + { + switch (vectorState.operation) + { + case StateExtensive::Operation::FindFreeBlockBuffer: + vectorState.operation = StateExtensive::Operation::MoveBuffers; + break; + case StateExtensive::Operation::FindFreeBlockTexture: + vectorState.operation = StateExtensive::Operation::MoveTextures; + break; + case StateExtensive::Operation::FindFreeBlockAll: + vectorState.operation = StateExtensive::Operation::MoveAll; + break; + default: + VMA_ASSERT(0); + vectorState.operation = StateExtensive::Operation::MoveTextures; + } + vectorState.firstFreeBlock = last; + // Nothing done, block found without reallocations, can perform another reallocs in same pass + return ComputeDefragmentation_Extensive(vector, index); + } + break; + } + case StateExtensive::Operation::MoveTextures: + { + if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL, vector, + vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) + { + if (texturePresent) + { + vectorState.operation = StateExtensive::Operation::FindFreeBlockTexture; + return ComputeDefragmentation_Extensive(vector, index); + } + + if (!bufferPresent && !otherPresent) + { + vectorState.operation = StateExtensive::Operation::Cleanup; + break; + } + + // No more textures to move, check buffers + vectorState.operation = StateExtensive::Operation::MoveBuffers; + bufferPresent = false; + otherPresent = false; + } + else + break; + VMA_FALLTHROUGH; // Fallthrough + } + case StateExtensive::Operation::MoveBuffers: + { + if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_BUFFER, vector, + vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) + { + if (bufferPresent) + { + vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer; + return ComputeDefragmentation_Extensive(vector, index); + } + + if (!otherPresent) + { + vectorState.operation = StateExtensive::Operation::Cleanup; + break; + } + + // No more buffers to move, check all others + vectorState.operation = StateExtensive::Operation::MoveAll; + otherPresent = false; + } + else + break; + VMA_FALLTHROUGH; // Fallthrough + } + case StateExtensive::Operation::MoveAll: + { + if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_FREE, vector, + vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) + { + if (otherPresent) + { + vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer; + return ComputeDefragmentation_Extensive(vector, index); + } + // Everything moved + vectorState.operation = StateExtensive::Operation::Cleanup; + } + break; + } + case StateExtensive::Operation::Cleanup: + // Cleanup is handled below so that other operations may reuse the cleanup code. This case is here to prevent the unhandled enum value warning (C4062). + break; + } + + if (vectorState.operation == StateExtensive::Operation::Cleanup) + { + // All other work done, pack data in blocks even tighter if possible + const size_t prevMoveCount = m_Moves.size(); + for (size_t i = 0; i < vector.GetBlockCount(); ++i) + { + if (ReallocWithinBlock(vector, vector.GetBlock(i))) + return true; + } + + if (prevMoveCount == m_Moves.size()) + vectorState.operation = StateExtensive::Operation::Done; + } + return false; +} + +void VmaDefragmentationContext_T::UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced& state) +{ + size_t allocCount = 0; + size_t freeCount = 0; + state.avgFreeSize = 0; + state.avgAllocSize = 0; + + for (size_t i = 0; i < vector.GetBlockCount(); ++i) + { + VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata; + + allocCount += metadata->GetAllocationCount(); + freeCount += metadata->GetFreeRegionsCount(); + state.avgFreeSize += metadata->GetSumFreeSize(); + state.avgAllocSize += metadata->GetSize(); + } + + state.avgAllocSize = (state.avgAllocSize - state.avgFreeSize) / allocCount; + state.avgFreeSize /= freeCount; +} + +bool VmaDefragmentationContext_T::MoveDataToFreeBlocks(VmaSuballocationType currentType, + VmaBlockVector& vector, size_t firstFreeBlock, + bool& texturePresent, bool& bufferPresent, bool& otherPresent) +{ + const size_t prevMoveCount = m_Moves.size(); + for (size_t i = firstFreeBlock ; i;) + { + VmaDeviceMemoryBlock* block = vector.GetBlock(--i); + VmaBlockMetadata* metadata = block->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + // Move only single type of resources at once + if (!VmaIsBufferImageGranularityConflict(moveData.type, currentType)) + { + // Try to fit allocation into free blocks + if (AllocInOtherBlock(firstFreeBlock, vector.GetBlockCount(), moveData, vector)) + return false; + } + + if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)) + texturePresent = true; + else if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_BUFFER)) + bufferPresent = true; + else + otherPresent = true; + } + } + return prevMoveCount == m_Moves.size(); +} +#endif // _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS + +#ifndef _VMA_POOL_T_FUNCTIONS +VmaPool_T::VmaPool_T( + VmaAllocator hAllocator, + const VmaPoolCreateInfo& createInfo, + VkDeviceSize preferredBlockSize) + : m_BlockVector( + hAllocator, + this, // hParentPool + createInfo.memoryTypeIndex, + createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize, + createInfo.minBlockCount, + createInfo.maxBlockCount, + (createInfo.flags& VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(), + createInfo.blockSize != 0, // explicitBlockSize + createInfo.flags & VMA_POOL_CREATE_ALGORITHM_MASK, // algorithm + createInfo.priority, + VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment), + createInfo.pMemoryAllocateNext), + m_Id(0), + m_Name(VMA_NULL) {} + +VmaPool_T::~VmaPool_T() +{ + VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL); + + const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks(); + VmaFreeString(allocs, m_Name); +} + +void VmaPool_T::SetName(const char* pName) +{ + const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks(); + VmaFreeString(allocs, m_Name); + + if (pName != VMA_NULL) + { + m_Name = VmaCreateStringCopy(allocs, pName); + } + else + { + m_Name = VMA_NULL; + } +} +#endif // _VMA_POOL_T_FUNCTIONS + +#ifndef _VMA_ALLOCATOR_T_FUNCTIONS +VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) : + m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0), + m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0), + m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0), + m_UseKhrBindMemory2((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0), + m_UseExtMemoryBudget((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0), + m_UseAmdDeviceCoherentMemory((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT) != 0), + m_UseKhrBufferDeviceAddress((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT) != 0), + m_UseExtMemoryPriority((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT) != 0), + m_UseKhrMaintenance4((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT) != 0), + m_UseKhrMaintenance5((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT) != 0), + m_UseKhrExternalMemoryWin32((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT) != 0), + m_hDevice(pCreateInfo->device), + m_hInstance(pCreateInfo->instance), + m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL), + m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ? + *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks), + m_AllocationObjectAllocator(&m_AllocationCallbacks), + m_HeapSizeLimitMask(0), + m_DeviceMemoryCount(0), + m_PreferredLargeHeapBlockSize(0), + m_PhysicalDevice(pCreateInfo->physicalDevice), + m_GpuDefragmentationMemoryTypeBits(UINT32_MAX), + m_NextPoolId(0), + m_GlobalMemoryTypeBits(UINT32_MAX) +{ + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + m_UseKhrDedicatedAllocation = false; + m_UseKhrBindMemory2 = false; + } + + if(VMA_DEBUG_DETECT_CORRUPTION) + { + // Needs to be multiply of uint32_t size because we are going to write VMA_CORRUPTION_DETECTION_MAGIC_VALUE to it. + VMA_ASSERT(VMA_DEBUG_MARGIN % sizeof(uint32_t) == 0); + } + + VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device && pCreateInfo->instance); + + if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0)) + { +#if !(VMA_DEDICATED_ALLOCATION) + if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros."); + } +#endif +#if !(VMA_BIND_MEMORY2) + if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros."); + } +#endif + } +#if !(VMA_MEMORY_BUDGET) + if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros."); + } +#endif +#if !(VMA_BUFFER_DEVICE_ADDRESS) + if(m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif +#if VMA_VULKAN_VERSION < 1004000 + VMA_ASSERT(m_VulkanApiVersion < VK_MAKE_VERSION(1, 4, 0) && "vulkanApiVersion >= VK_API_VERSION_1_4 but required Vulkan version is disabled by preprocessor macros."); +#endif +#if VMA_VULKAN_VERSION < 1003000 + VMA_ASSERT(m_VulkanApiVersion < VK_MAKE_VERSION(1, 3, 0) && "vulkanApiVersion >= VK_API_VERSION_1_3 but required Vulkan version is disabled by preprocessor macros."); +#endif +#if VMA_VULKAN_VERSION < 1002000 + VMA_ASSERT(m_VulkanApiVersion < VK_MAKE_VERSION(1, 2, 0) && "vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros."); +#endif +#if VMA_VULKAN_VERSION < 1001000 + VMA_ASSERT(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0) && "vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros."); +#endif +#if !(VMA_MEMORY_PRIORITY) + if(m_UseExtMemoryPriority) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif +#if !(VMA_KHR_MAINTENANCE4) + if(m_UseKhrMaintenance4) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif +#if !(VMA_KHR_MAINTENANCE5) + if(m_UseKhrMaintenance5) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif +#if !(VMA_KHR_MAINTENANCE5) + if(m_UseKhrMaintenance5) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif + +#if !(VMA_EXTERNAL_MEMORY_WIN32) + if(m_UseKhrExternalMemoryWin32) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif + + memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks)); + memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties)); + memset(&m_MemProps, 0, sizeof(m_MemProps)); + + memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors)); + memset(&m_VulkanFunctions, 0, sizeof(m_VulkanFunctions)); + +#if VMA_EXTERNAL_MEMORY + memset(&m_TypeExternalMemoryHandleTypes, 0, sizeof(m_TypeExternalMemoryHandleTypes)); +#endif // #if VMA_EXTERNAL_MEMORY + + if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL) + { + m_DeviceMemoryCallbacks.pUserData = pCreateInfo->pDeviceMemoryCallbacks->pUserData; + m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate; + m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree; + } + + ImportVulkanFunctions(pCreateInfo->pVulkanFunctions); + + (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties); + (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps); + + VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT)); + VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY)); + VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity)); + VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize)); + + m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ? + pCreateInfo->preferredLargeHeapBlockSize : static_cast(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE); + + m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits(); + +#if VMA_EXTERNAL_MEMORY + if(pCreateInfo->pTypeExternalMemoryHandleTypes != VMA_NULL) + { + memcpy(m_TypeExternalMemoryHandleTypes, pCreateInfo->pTypeExternalMemoryHandleTypes, + sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount()); + } +#endif // #if VMA_EXTERNAL_MEMORY + + if(pCreateInfo->pHeapSizeLimit != VMA_NULL) + { + for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex) + { + const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex]; + if(limit != VK_WHOLE_SIZE) + { + m_HeapSizeLimitMask |= 1U << heapIndex; + if(limit < m_MemProps.memoryHeaps[heapIndex].size) + { + m_MemProps.memoryHeaps[heapIndex].size = limit; + } + } + } + } + + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + // Create only supported types + if((m_GlobalMemoryTypeBits & (1U << memTypeIndex)) != 0) + { + const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex); + m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)( + this, + VK_NULL_HANDLE, // hParentPool + memTypeIndex, + preferredBlockSize, + 0, + SIZE_MAX, + GetBufferImageGranularity(), + false, // explicitBlockSize + 0, // algorithm + 0.5F, // priority (0.5 is the default per Vulkan spec) + GetMemoryTypeMinAlignment(memTypeIndex), // minAllocationAlignment + VMA_NULL); // // pMemoryAllocateNext + // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here, + // because minBlockCount is 0. + } + } +} + +VkResult VmaAllocator_T::Init(const VmaAllocatorCreateInfo* pCreateInfo) +{ + VkResult res = VK_SUCCESS; + +#if VMA_MEMORY_BUDGET + if(m_UseExtMemoryBudget) + { + UpdateVulkanBudget(); + } +#endif // #if VMA_MEMORY_BUDGET + + return res; +} + +VmaAllocator_T::~VmaAllocator_T() +{ + VMA_ASSERT(m_Pools.IsEmpty()); + + for(size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; ) + { + vma_delete(this, m_pBlockVectors[memTypeIndex]); + } +} + +void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions) +{ +#if VMA_STATIC_VULKAN_FUNCTIONS == 1 + ImportVulkanFunctions_Static(); +#endif + + if(pVulkanFunctions != VMA_NULL) + { + ImportVulkanFunctions_Custom(pVulkanFunctions); + } + +#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + ImportVulkanFunctions_Dynamic(); +#endif + + ValidateVulkanFunctions(); +} + +#if VMA_STATIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ImportVulkanFunctions_Static() +{ + // Vulkan 1.0 + m_VulkanFunctions.vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)vkGetInstanceProcAddr; + m_VulkanFunctions.vkGetDeviceProcAddr = (PFN_vkGetDeviceProcAddr)vkGetDeviceProcAddr; + m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties; + m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties; + m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory; + m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory; + m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory; + m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory; + m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges; + m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges; + m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory; + m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory; + m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements; + m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements; + m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer; + m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer; + m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage; + m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage; + m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer; + + // Vulkan 1.1 +#if VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2; + m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2; + m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2; + m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2; + } +#endif + +#if VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2; + } +#endif + +#if VMA_VULKAN_VERSION >= 1003000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) + { + m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements = (PFN_vkGetDeviceBufferMemoryRequirements)vkGetDeviceBufferMemoryRequirements; + m_VulkanFunctions.vkGetDeviceImageMemoryRequirements = (PFN_vkGetDeviceImageMemoryRequirements)vkGetDeviceImageMemoryRequirements; + } +#endif +} + +#endif // VMA_STATIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ImportVulkanFunctions_Custom(const VmaVulkanFunctions* pVulkanFunctions) +{ + VMA_ASSERT(pVulkanFunctions != VMA_NULL); + +#define VMA_COPY_IF_NOT_NULL(funcName) \ + if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; + + VMA_COPY_IF_NOT_NULL(vkGetInstanceProcAddr); + VMA_COPY_IF_NOT_NULL(vkGetDeviceProcAddr); + VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties); + VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties); + VMA_COPY_IF_NOT_NULL(vkAllocateMemory); + VMA_COPY_IF_NOT_NULL(vkFreeMemory); + VMA_COPY_IF_NOT_NULL(vkMapMemory); + VMA_COPY_IF_NOT_NULL(vkUnmapMemory); + VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges); + VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges); + VMA_COPY_IF_NOT_NULL(vkBindBufferMemory); + VMA_COPY_IF_NOT_NULL(vkBindImageMemory); + VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements); + VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements); + VMA_COPY_IF_NOT_NULL(vkCreateBuffer); + VMA_COPY_IF_NOT_NULL(vkDestroyBuffer); + VMA_COPY_IF_NOT_NULL(vkCreateImage); + VMA_COPY_IF_NOT_NULL(vkDestroyImage); + VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer); + +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR); + VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR); +#endif + +#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 + VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR); + VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR); +#endif + +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR); +#endif + +#if VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + VMA_COPY_IF_NOT_NULL(vkGetDeviceBufferMemoryRequirements); + VMA_COPY_IF_NOT_NULL(vkGetDeviceImageMemoryRequirements); +#endif +#if VMA_EXTERNAL_MEMORY_WIN32 + VMA_COPY_IF_NOT_NULL(vkGetMemoryWin32HandleKHR); +#endif +#undef VMA_COPY_IF_NOT_NULL +} + +#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ImportVulkanFunctions_Dynamic() +{ + VMA_ASSERT(m_VulkanFunctions.vkGetInstanceProcAddr && m_VulkanFunctions.vkGetDeviceProcAddr && + "To use VMA_DYNAMIC_VULKAN_FUNCTIONS in new versions of VMA you now have to pass " + "VmaVulkanFunctions::vkGetInstanceProcAddr and vkGetDeviceProcAddr as VmaAllocatorCreateInfo::pVulkanFunctions. " + "Other members can be null."); + +#define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \ + if(m_VulkanFunctions.memberName == VMA_NULL) \ + m_VulkanFunctions.memberName = \ + (functionPointerType)m_VulkanFunctions.vkGetInstanceProcAddr(m_hInstance, functionNameString); +#define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \ + if(m_VulkanFunctions.memberName == VMA_NULL) \ + m_VulkanFunctions.memberName = \ + (functionPointerType)m_VulkanFunctions.vkGetDeviceProcAddr(m_hDevice, functionNameString); + + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties, "vkGetPhysicalDeviceProperties"); + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties, "vkGetPhysicalDeviceMemoryProperties"); + VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory, "vkAllocateMemory"); + VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory, "vkFreeMemory"); + VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory, "vkMapMemory"); + VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory, "vkUnmapMemory"); + VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges, "vkFlushMappedMemoryRanges"); + VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges, "vkInvalidateMappedMemoryRanges"); + VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory, "vkBindBufferMemory"); + VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory, "vkBindImageMemory"); + VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements, "vkGetBufferMemoryRequirements"); + VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements, "vkGetImageMemoryRequirements"); + VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer, "vkCreateBuffer"); + VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer, "vkDestroyBuffer"); + VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage, "vkCreateImage"); + VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage, "vkDestroyImage"); + VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer, "vkCmdCopyBuffer"); + +#if VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2, "vkGetBufferMemoryRequirements2"); + VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2, "vkGetImageMemoryRequirements2"); + VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2, "vkBindBufferMemory2"); + VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2, "vkBindImageMemory2"); + } +#endif + +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2"); + // Try to fetch the pointer from the other name, based on suspected driver bug - see issue #410. + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2KHR"); + } + else if(m_UseExtMemoryBudget) + { + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2KHR"); + // Try to fetch the pointer from the other name, based on suspected driver bug - see issue #410. + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2"); + } +#endif + +#if VMA_DEDICATED_ALLOCATION + if(m_UseKhrDedicatedAllocation) + { + VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR, "vkGetBufferMemoryRequirements2KHR"); + VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR, "vkGetImageMemoryRequirements2KHR"); + } +#endif + +#if VMA_BIND_MEMORY2 + if(m_UseKhrBindMemory2) + { + VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR, "vkBindBufferMemory2KHR"); + VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR, "vkBindImageMemory2KHR"); + } +#endif // #if VMA_BIND_MEMORY2 + +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2"); + } + else if(m_UseExtMemoryBudget) + { + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2KHR"); + } +#endif // #if VMA_MEMORY_BUDGET + +#if VMA_VULKAN_VERSION >= 1003000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) + { + VMA_FETCH_DEVICE_FUNC(vkGetDeviceBufferMemoryRequirements, PFN_vkGetDeviceBufferMemoryRequirements, "vkGetDeviceBufferMemoryRequirements"); + VMA_FETCH_DEVICE_FUNC(vkGetDeviceImageMemoryRequirements, PFN_vkGetDeviceImageMemoryRequirements, "vkGetDeviceImageMemoryRequirements"); + } +#endif +#if VMA_KHR_MAINTENANCE4 + if(m_UseKhrMaintenance4) + { + VMA_FETCH_DEVICE_FUNC(vkGetDeviceBufferMemoryRequirements, PFN_vkGetDeviceBufferMemoryRequirementsKHR, "vkGetDeviceBufferMemoryRequirementsKHR"); + VMA_FETCH_DEVICE_FUNC(vkGetDeviceImageMemoryRequirements, PFN_vkGetDeviceImageMemoryRequirementsKHR, "vkGetDeviceImageMemoryRequirementsKHR"); + } +#endif +#if VMA_EXTERNAL_MEMORY_WIN32 + if (m_UseKhrExternalMemoryWin32) + { + VMA_FETCH_DEVICE_FUNC(vkGetMemoryWin32HandleKHR, PFN_vkGetMemoryWin32HandleKHR, "vkGetMemoryWin32HandleKHR"); + } +#endif +#undef VMA_FETCH_DEVICE_FUNC +#undef VMA_FETCH_INSTANCE_FUNC +} + +#endif // VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ValidateVulkanFunctions() const +{ + VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL); + +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation) + { + VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL); + } +#endif + +#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2) + { + VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL); + } +#endif + +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL); + } +#endif +#if VMA_EXTERNAL_MEMORY_WIN32 + if (m_UseKhrExternalMemoryWin32) + { + VMA_ASSERT(m_VulkanFunctions.vkGetMemoryWin32HandleKHR != VMA_NULL); + } +#endif + + // Not validating these due to suspected driver bugs with these function + // pointers being null despite correct extension or Vulkan version is enabled. + // See issue #397. Their usage in VMA is optional anyway. + // + // VMA_ASSERT(m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements != VMA_NULL); + // VMA_ASSERT(m_VulkanFunctions.vkGetDeviceImageMemoryRequirements != VMA_NULL); +} + +VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex) +{ + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex); + const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size; + const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE; + return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32); +} + +VkResult VmaAllocator_T::AllocateMemoryOfType( + VmaPool pool, + VkDeviceSize size, + VkDeviceSize alignment, + bool dedicatedPreferred, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + void* pMemoryAllocateNext, + const VmaAllocationCreateInfo& createInfo, + uint32_t memTypeIndex, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + VmaBlockVector& blockVector, + size_t allocationCount, + VmaAllocation* pAllocations) +{ + VMA_ASSERT(pAllocations != VMA_NULL); + VMA_DEBUG_LOG_FORMAT(" AllocateMemory: MemoryTypeIndex=%" PRIu32 ", AllocationCount=%zu, Size=%" PRIu64, memTypeIndex, allocationCount, size); + + VmaAllocationCreateInfo finalCreateInfo = createInfo; + VkResult res = CalcMemTypeParams( + finalCreateInfo, + memTypeIndex, + size, + allocationCount); + if(res != VK_SUCCESS) + return res; + + const void* allocateNextPtr = blockVector.GetAllocationNextPtr(); + if(pMemoryAllocateNext != VMA_NULL) + { + VMA_ASSERT(allocateNextPtr == VMA_NULL && + "You shouldn't create a dedicated allocation with a custom pMemoryAllocateNext if the pNext chain is already provided for this pool."); + allocateNextPtr = pMemoryAllocateNext; + } + + if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0) + { + return AllocateDedicatedMemory( + pool, + size, + suballocType, + dedicatedAllocations, + memTypeIndex, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, + (finalCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, + finalCreateInfo.pUserData, + finalCreateInfo.priority, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + allocationCount, + pAllocations, + allocateNextPtr); + } + + const bool canAllocateDedicated = + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 && + (pool == VK_NULL_HANDLE || !blockVector.HasExplicitBlockSize()); + + if(canAllocateDedicated) + { + // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size. + if(size > blockVector.GetPreferredBlockSize() / 2) + { + dedicatedPreferred = true; + } + // Protection against creating each allocation as dedicated when we reach or exceed heap size/budget, + // which can quickly deplete maxMemoryAllocationCount: Don't prefer dedicated allocations when above + // 3/4 of the maximum allocation count. + if(m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount < UINT32_MAX / 4 && + m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4) + { + dedicatedPreferred = false; + } + + if(dedicatedPreferred) + { + res = AllocateDedicatedMemory( + pool, + size, + suballocType, + dedicatedAllocations, + memTypeIndex, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, + (finalCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, + finalCreateInfo.pUserData, + finalCreateInfo.priority, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + allocationCount, + pAllocations, + allocateNextPtr); + if(res == VK_SUCCESS) + { + // Succeeded: AllocateDedicatedMemory function already filled pMemory, nothing more to do here. + VMA_DEBUG_LOG(" Allocated as DedicatedMemory"); + return VK_SUCCESS; + } + } + } + + res = blockVector.Allocate( + size, + alignment, + finalCreateInfo, + suballocType, + allocationCount, + pAllocations); + if(res == VK_SUCCESS) + return VK_SUCCESS; + + // Try dedicated memory. + if(canAllocateDedicated && !dedicatedPreferred) + { + res = AllocateDedicatedMemory( + pool, + size, + suballocType, + dedicatedAllocations, + memTypeIndex, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, + (finalCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, + finalCreateInfo.pUserData, + finalCreateInfo.priority, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + allocationCount, + pAllocations, + allocateNextPtr); + if(res == VK_SUCCESS) + { + // Succeeded: AllocateDedicatedMemory function already filled pMemory, nothing more to do here. + VMA_DEBUG_LOG(" Allocated as DedicatedMemory"); + return VK_SUCCESS; + } + } + + // Everything failed: Return error code. + VMA_DEBUG_LOG(" vkAllocateMemory FAILED"); + return res; +} + +VkResult VmaAllocator_T::AllocateDedicatedMemory( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + uint32_t memTypeIndex, + bool map, + bool isUserDataString, + bool isMappingAllowed, + bool canAliasMemory, + void* pUserData, + float priority, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + size_t allocationCount, + VmaAllocation* pAllocations, + const void* pNextChain) +{ + VMA_ASSERT(allocationCount > 0 && pAllocations); + + VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO }; + allocInfo.memoryTypeIndex = memTypeIndex; + allocInfo.allocationSize = size; + allocInfo.pNext = pNextChain; + +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR }; + if(!canAliasMemory) + { + if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + if(dedicatedBuffer != VK_NULL_HANDLE) + { + VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE); + dedicatedAllocInfo.buffer = dedicatedBuffer; + VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo); + } + else if(dedicatedImage != VK_NULL_HANDLE) + { + dedicatedAllocInfo.image = dedicatedImage; + VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo); + } + } + } +#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + +#if VMA_BUFFER_DEVICE_ADDRESS + VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR }; + if(m_UseKhrBufferDeviceAddress) + { + bool canContainBufferWithDeviceAddress = true; + if(dedicatedBuffer != VK_NULL_HANDLE) + { + canContainBufferWithDeviceAddress = dedicatedBufferImageUsage == VmaBufferImageUsage::UNKNOWN || + dedicatedBufferImageUsage.Contains(VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT); + } + else if(dedicatedImage != VK_NULL_HANDLE) + { + canContainBufferWithDeviceAddress = false; + } + if(canContainBufferWithDeviceAddress) + { + allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR; + VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo); + } + } +#endif // #if VMA_BUFFER_DEVICE_ADDRESS + +#if VMA_MEMORY_PRIORITY + VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT }; + if(m_UseExtMemoryPriority) + { + VMA_ASSERT(priority >= 0.F && priority <= 1.F); + priorityInfo.priority = priority; + VmaPnextChainPushFront(&allocInfo, &priorityInfo); + } +#endif // #if VMA_MEMORY_PRIORITY + +#if VMA_EXTERNAL_MEMORY + // Attach VkExportMemoryAllocateInfoKHR if necessary. + VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR }; + exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex); + if(exportMemoryAllocInfo.handleTypes != 0) + { + VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo); + } +#endif // #if VMA_EXTERNAL_MEMORY + + size_t allocIndex = 0; + VkResult res = VK_SUCCESS; + for(; allocIndex < allocationCount; ++allocIndex) + { + res = AllocateDedicatedMemoryPage( + pool, + size, + suballocType, + memTypeIndex, + allocInfo, + map, + isUserDataString, + isMappingAllowed, + pUserData, + pAllocations + allocIndex); + if(res != VK_SUCCESS) + { + break; + } + } + + if(res == VK_SUCCESS) + { + for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex) + { + dedicatedAllocations.Register(pAllocations[allocIndex]); + } + VMA_DEBUG_LOG_FORMAT(" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%" PRIu32, allocationCount, memTypeIndex); + } + else + { + // Free all already created allocations. + while(allocIndex--) + { + VmaAllocation currAlloc = pAllocations[allocIndex]; + VkDeviceMemory hMemory = currAlloc->GetMemory(); + + /* + There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory + before vkFreeMemory. + + if(currAlloc->GetMappedData() != VMA_NULL) + { + (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory); + } + */ + + FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory); + m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize()); + m_AllocationObjectAllocator.Free(currAlloc); + } + + memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); + } + + return res; +} + +VkResult VmaAllocator_T::AllocateDedicatedMemoryPage( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + uint32_t memTypeIndex, + const VkMemoryAllocateInfo& allocInfo, + bool map, + bool isUserDataString, + bool isMappingAllowed, + void* pUserData, + VmaAllocation* pAllocation) +{ + VkDeviceMemory hMemory = VK_NULL_HANDLE; + VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory); + if(res < 0) + { + VMA_DEBUG_LOG(" vkAllocateMemory FAILED"); + return res; + } + + void* pMappedData = VMA_NULL; + if(map) + { + res = (*m_VulkanFunctions.vkMapMemory)( + m_hDevice, + hMemory, + 0, + VK_WHOLE_SIZE, + 0, + &pMappedData); + if(res < 0) + { + VMA_DEBUG_LOG(" vkMapMemory FAILED"); + FreeVulkanMemory(memTypeIndex, size, hMemory); + return res; + } + } + + *pAllocation = m_AllocationObjectAllocator.Allocate(isMappingAllowed); + (*pAllocation)->InitDedicatedAllocation(this, pool, memTypeIndex, hMemory, suballocType, pMappedData, size); + if (isUserDataString) + (*pAllocation)->SetName(this, (const char*)pUserData); + else + (*pAllocation)->SetUserData(this, pUserData); + m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size); + +#if VMA_DEBUG_INITIALIZE_ALLOCATIONS + FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED); +#endif + + return VK_SUCCESS; +} + +void VmaAllocator_T::GetBufferMemoryRequirements( + VkBuffer hBuffer, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const +{ +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR }; + memReqInfo.buffer = hBuffer; + + VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR }; + + VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR }; + VmaPnextChainPushFront(&memReq2, &memDedicatedReq); + + (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2); + + memReq = memReq2.memoryRequirements; + requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE); + prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE); + } + else +#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + { + (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq); + requiresDedicatedAllocation = false; + prefersDedicatedAllocation = false; + } +} + +void VmaAllocator_T::GetImageMemoryRequirements( + VkImage hImage, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const +{ +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR }; + memReqInfo.image = hImage; + + VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR }; + + VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR }; + VmaPnextChainPushFront(&memReq2, &memDedicatedReq); + + (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2); + + memReq = memReq2.memoryRequirements; + requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE); + prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE); + } + else +#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + { + (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq); + requiresDedicatedAllocation = false; + prefersDedicatedAllocation = false; + } +} + +VkResult VmaAllocator_T::FindMemoryTypeIndex( + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VmaBufferImageUsage bufImgUsage, + uint32_t* pMemoryTypeIndex) const +{ + memoryTypeBits &= GetGlobalMemoryTypeBits(); + + if(pAllocationCreateInfo->memoryTypeBits != 0) + { + memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits; + } + + VkMemoryPropertyFlags requiredFlags = 0; + VkMemoryPropertyFlags preferredFlags = 0; + VkMemoryPropertyFlags notPreferredFlags = 0; + if(!FindMemoryPreferences( + IsIntegratedGpu(), + *pAllocationCreateInfo, + bufImgUsage, + requiredFlags, preferredFlags, notPreferredFlags)) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + *pMemoryTypeIndex = UINT32_MAX; + uint32_t minCost = UINT32_MAX; + for(uint32_t memTypeIndex = 0, memTypeBit = 1; + memTypeIndex < GetMemoryTypeCount(); + ++memTypeIndex, memTypeBit <<= 1) + { + // This memory type is acceptable according to memoryTypeBits bitmask. + if((memTypeBit & memoryTypeBits) != 0) + { + const VkMemoryPropertyFlags currFlags = + m_MemProps.memoryTypes[memTypeIndex].propertyFlags; + // This memory type contains requiredFlags. + if((requiredFlags & ~currFlags) == 0) + { + // Calculate cost as number of bits from preferredFlags not present in this memory type. + uint32_t currCost = VMA_COUNT_BITS_SET(preferredFlags & ~currFlags) + + VMA_COUNT_BITS_SET(currFlags & notPreferredFlags); + // Remember memory type with lowest cost. + if(currCost < minCost) + { + *pMemoryTypeIndex = memTypeIndex; + if(currCost == 0) + { + return VK_SUCCESS; + } + minCost = currCost; + } + } + } + } + return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT; +} + +VkResult VmaAllocator_T::CalcMemTypeParams( + VmaAllocationCreateInfo& inoutCreateInfo, + uint32_t memTypeIndex, + VkDeviceSize size, + size_t allocationCount) +{ + // If memory type is not HOST_VISIBLE, disable MAPPED. + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 && + (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) + { + inoutCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT; + } + + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT) != 0) + { + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex); + VmaBudget heapBudget = {}; + GetHeapBudgets(&heapBudget, heapIndex, 1); + if(heapBudget.usage + size * allocationCount > heapBudget.budget) + { + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + } + return VK_SUCCESS; +} + +VkResult VmaAllocator_T::CalcAllocationParams( + VmaAllocationCreateInfo& inoutCreateInfo, + bool dedicatedRequired) +{ + VMA_ASSERT((inoutCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT) && + "Specifying both flags VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT and VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT is incorrect."); + VMA_ASSERT((((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT) == 0 || + (inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0)) && + "Specifying VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT requires also VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT."); + if(inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO || inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE || inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_HOST) + { + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0) + { + VMA_ASSERT((inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0 && + "When using VMA_ALLOCATION_CREATE_MAPPED_BIT and usage = VMA_MEMORY_USAGE_AUTO*, you must also specify VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT."); + } + } + + // If memory is lazily allocated, it should be always dedicated. + if(dedicatedRequired || + inoutCreateInfo.usage == VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED) + { + inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; + } + + if(inoutCreateInfo.pool != VK_NULL_HANDLE) + { + if(inoutCreateInfo.pool->m_BlockVector.HasExplicitBlockSize() && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0) + { + VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT while current custom pool doesn't support dedicated allocations."); + return VK_ERROR_FEATURE_NOT_PRESENT; + } + inoutCreateInfo.priority = inoutCreateInfo.pool->m_BlockVector.GetPriority(); + } + + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0) + { + VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense."); + return VK_ERROR_FEATURE_NOT_PRESENT; + } + +#if VMA_DEBUG_ALWAYS_DEDICATED_MEMORY + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0) + { + inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; + } +#endif + + // Non-auto USAGE values imply HOST_ACCESS flags. + // And so does VMA_MEMORY_USAGE_UNKNOWN because it is used with custom pools. + // Which specific flag is used doesn't matter. They change things only when used with VMA_MEMORY_USAGE_AUTO*. + // Otherwise they just protect from assert on mapping. + if(inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO && + inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE && + inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO_PREFER_HOST) + { + if((inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) == 0) + { + inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT; + } + } + + return VK_SUCCESS; +} + +VkResult VmaAllocator_T::CreateBuffer( + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + void* pMemoryAllocateNext, + VkBuffer* pBuffer, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + *pBuffer = VK_NULL_HANDLE; + *pAllocation = VK_NULL_HANDLE; + + if (pBufferCreateInfo->size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + if ((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && + !m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); + return VK_ERROR_INITIALIZATION_FAILED; + } + + // 1. Create VkBuffer. + VkResult res = (*m_VulkanFunctions.vkCreateBuffer)(m_hDevice, pBufferCreateInfo, + GetAllocationCallbacks(), pBuffer); + if (res >= 0) + { + // 2. vkGetBufferMemoryRequirements. + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + GetBufferMemoryRequirements(*pBuffer, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + // 3. Allocate memory using allocator. + res = AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + *pBuffer, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + VmaBufferImageUsage(*pBufferCreateInfo, m_UseKhrMaintenance5), // dedicatedBufferImageUsage + pMemoryAllocateNext, + *pAllocationCreateInfo, + VMA_SUBALLOCATION_TYPE_BUFFER, + 1, // allocationCount + pAllocation); + if (res >= 0) + { + // 3. Bind buffer with memory. + if ((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) + { + res = BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL); + } + if (res >= 0) + { + // All steps succeeded. +#if VMA_STATS_STRING_ENABLED + (*pAllocation)->InitBufferUsage(*pBufferCreateInfo, m_UseKhrMaintenance5); +#endif + if (pAllocationInfo != VMA_NULL) + { + GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return VK_SUCCESS; + } + FreeMemory(1, pAllocation); + *pAllocation = VK_NULL_HANDLE; + (*m_VulkanFunctions.vkDestroyBuffer)(m_hDevice, *pBuffer, GetAllocationCallbacks()); + *pBuffer = VK_NULL_HANDLE; + return res; + } + (*m_VulkanFunctions.vkDestroyBuffer)(m_hDevice, *pBuffer, GetAllocationCallbacks()); + *pBuffer = VK_NULL_HANDLE; + return res; + } + return res; +} + +VkResult VmaAllocator_T::CreateImage( + const VkImageCreateInfo* pImageCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + void* pMemoryAllocateNext, + VkImage* pImage, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + *pImage = VK_NULL_HANDLE; + *pAllocation = VK_NULL_HANDLE; + + if (pImageCreateInfo->extent.width == 0 || + pImageCreateInfo->extent.height == 0 || + pImageCreateInfo->extent.depth == 0 || + pImageCreateInfo->mipLevels == 0 || + pImageCreateInfo->arrayLayers == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + + // 1. Create VkImage. + VkResult res = (*m_VulkanFunctions.vkCreateImage)(m_hDevice, pImageCreateInfo, + GetAllocationCallbacks(), pImage); + if (res == VK_SUCCESS) + { + VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ? + VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL : + VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR; + + // 2. Allocate memory using allocator. + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + GetImageMemoryRequirements(*pImage, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + res = AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + VK_NULL_HANDLE, // dedicatedBuffer + *pImage, // dedicatedImage + VmaBufferImageUsage(*pImageCreateInfo), // dedicatedBufferImageUsage + pMemoryAllocateNext, + *pAllocationCreateInfo, + suballocType, + 1, // allocationCount + pAllocation); + if (res == VK_SUCCESS) + { + // 3. Bind image with memory. + if ((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) + { + res = BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL); + } + if (res == VK_SUCCESS) + { + // All steps succeeded. +#if VMA_STATS_STRING_ENABLED + (*pAllocation)->InitImageUsage(*pImageCreateInfo); +#endif + if (pAllocationInfo != VMA_NULL) + { + GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return VK_SUCCESS; + } + FreeMemory(1, pAllocation); + *pAllocation = VK_NULL_HANDLE; + (*m_VulkanFunctions.vkDestroyImage)(m_hDevice, *pImage, GetAllocationCallbacks()); + *pImage = VK_NULL_HANDLE; + return res; + } + (*m_VulkanFunctions.vkDestroyImage)(m_hDevice, *pImage, GetAllocationCallbacks()); + *pImage = VK_NULL_HANDLE; + return res; + } + return res; +} + +VkResult VmaAllocator_T::AllocateMemory( + VkMemoryRequirements vkMemReq, + bool requiresDedicatedAllocation, + bool prefersDedicatedAllocation, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + // pNext chain for VkMemoryAllocateInfo. When used, must specify requiresDedicatedAllocation = true. + void* pMemoryAllocateNext, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations) +{ + memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); + + vkMemReq.alignment = VMA_MAX(vkMemReq.alignment, createInfo.minAlignment); + VMA_ASSERT(VmaIsPow2(vkMemReq.alignment)); + + // If using custom pNext chain for VkMemoryAllocateInfo, must require dedicated allocations. + if(pMemoryAllocateNext != VMA_NULL) + { + requiresDedicatedAllocation = true; + } + + if(vkMemReq.size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + + VmaAllocationCreateInfo createInfoFinal = createInfo; + VkResult res = CalcAllocationParams(createInfoFinal, requiresDedicatedAllocation); + if(res != VK_SUCCESS) + return res; + + if(createInfoFinal.pool != VK_NULL_HANDLE) + { + VmaBlockVector& blockVector = createInfoFinal.pool->m_BlockVector; + return AllocateMemoryOfType( + createInfoFinal.pool, + vkMemReq.size, + vkMemReq.alignment, + prefersDedicatedAllocation, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + pMemoryAllocateNext, + createInfoFinal, + blockVector.GetMemoryTypeIndex(), + suballocType, + createInfoFinal.pool->m_DedicatedAllocations, + blockVector, + allocationCount, + pAllocations); + } + + // Bit mask of memory Vulkan types acceptable for this allocation. + uint32_t memoryTypeBits = vkMemReq.memoryTypeBits; + uint32_t memTypeIndex = UINT32_MAX; + res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex); + // Can't find any single memory type matching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT. + if(res != VK_SUCCESS) + return res; + + do + { + VmaBlockVector* blockVector = m_pBlockVectors[memTypeIndex]; + VMA_ASSERT(blockVector && "Trying to use unsupported memory type!"); + res = AllocateMemoryOfType( + VK_NULL_HANDLE, + vkMemReq.size, + vkMemReq.alignment, + requiresDedicatedAllocation || prefersDedicatedAllocation, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + pMemoryAllocateNext, + createInfoFinal, + memTypeIndex, + suballocType, + m_DedicatedAllocations[memTypeIndex], + *blockVector, + allocationCount, + pAllocations); + // Allocation succeeded + if(res == VK_SUCCESS) + return VK_SUCCESS; + + // Remove old memTypeIndex from list of possibilities. + memoryTypeBits &= ~(1U << memTypeIndex); + // Find alternative memTypeIndex. + res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex); + } while(res == VK_SUCCESS); + + // No other matching memory type index could be found. + // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once. + return VK_ERROR_OUT_OF_DEVICE_MEMORY; +} + +void VmaAllocator_T::FreeMemory( + size_t allocationCount, + const VmaAllocation* pAllocations) +{ + VMA_ASSERT(pAllocations); + + for(size_t allocIndex = allocationCount; allocIndex--; ) + { + VmaAllocation allocation = pAllocations[allocIndex]; + + if(allocation != VK_NULL_HANDLE) + { +#if VMA_DEBUG_INITIALIZE_ALLOCATIONS + FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED); +#endif + + switch(allocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaBlockVector* pBlockVector = VMA_NULL; + VmaPool hPool = allocation->GetParentPool(); + if(hPool != VK_NULL_HANDLE) + { + pBlockVector = &hPool->m_BlockVector; + } + else + { + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + pBlockVector = m_pBlockVectors[memTypeIndex]; + VMA_ASSERT(pBlockVector && "Trying to free memory of unsupported type!"); + } + pBlockVector->Free(allocation); + } + break; + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + FreeDedicatedMemory(allocation); + break; + default: + VMA_ASSERT(0); + } + } + } +} + +void VmaAllocator_T::CalculateStatistics(VmaTotalStatistics* pStats) +{ + // Initialize. + VmaClearDetailedStatistics(pStats->total); + for(uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) + VmaClearDetailedStatistics(pStats->memoryType[i]); + for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) + VmaClearDetailedStatistics(pStats->memoryHeap[i]); + + // Process default pools. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex]; + if (pBlockVector != VMA_NULL) + pBlockVector->AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + } + + // Process custom pools. + { + VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); + for(VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) + { + VmaBlockVector& blockVector = pool->m_BlockVector; + const uint32_t memTypeIndex = blockVector.GetMemoryTypeIndex(); + blockVector.AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + pool->m_DedicatedAllocations.AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + } + } + + // Process dedicated allocations. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + m_DedicatedAllocations[memTypeIndex].AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + } + + // Sum from memory types to memory heaps. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + const uint32_t memHeapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex; + VmaAddDetailedStatistics(pStats->memoryHeap[memHeapIndex], pStats->memoryType[memTypeIndex]); + } + + // Sum from memory heaps to total. + for(uint32_t memHeapIndex = 0; memHeapIndex < GetMemoryHeapCount(); ++memHeapIndex) + VmaAddDetailedStatistics(pStats->total, pStats->memoryHeap[memHeapIndex]); + + VMA_ASSERT(pStats->total.statistics.allocationCount == 0 || + pStats->total.allocationSizeMax >= pStats->total.allocationSizeMin); + VMA_ASSERT(pStats->total.unusedRangeCount == 0 || + pStats->total.unusedRangeSizeMax >= pStats->total.unusedRangeSizeMin); +} + +void VmaAllocator_T::GetHeapBudgets(VmaBudget* outBudgets, uint32_t firstHeap, uint32_t heapCount) +{ +#if VMA_MEMORY_BUDGET + if(m_UseExtMemoryBudget) + { + if(m_Budget.m_OperationsSinceBudgetFetch < 30) + { + VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex); + for(uint32_t i = 0; i < heapCount; ++i, ++outBudgets) + { + const uint32_t heapIndex = firstHeap + i; + + outBudgets->statistics.blockCount = m_Budget.m_BlockCount[heapIndex]; + outBudgets->statistics.allocationCount = m_Budget.m_AllocationCount[heapIndex]; + outBudgets->statistics.blockBytes = m_Budget.m_BlockBytes[heapIndex]; + outBudgets->statistics.allocationBytes = m_Budget.m_AllocationBytes[heapIndex]; + + if(m_Budget.m_VulkanUsage[heapIndex] + outBudgets->statistics.blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]) + { + outBudgets->usage = m_Budget.m_VulkanUsage[heapIndex] + + outBudgets->statistics.blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]; + } + else + { + outBudgets->usage = 0; + } + + // Have to take MIN with heap size because explicit HeapSizeLimit is included in it. + outBudgets->budget = VMA_MIN( + m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size); + } + } + else + { + UpdateVulkanBudget(); // Outside of mutex lock + GetHeapBudgets(outBudgets, firstHeap, heapCount); // Recursion + } + } + else +#endif + { + for(uint32_t i = 0; i < heapCount; ++i, ++outBudgets) + { + const uint32_t heapIndex = firstHeap + i; + + outBudgets->statistics.blockCount = m_Budget.m_BlockCount[heapIndex]; + outBudgets->statistics.allocationCount = m_Budget.m_AllocationCount[heapIndex]; + outBudgets->statistics.blockBytes = m_Budget.m_BlockBytes[heapIndex]; + outBudgets->statistics.allocationBytes = m_Budget.m_AllocationBytes[heapIndex]; + + outBudgets->usage = outBudgets->statistics.blockBytes; + outBudgets->budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10; // 80% heuristics. + } + } +} + +void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo) +{ + pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex(); + pAllocationInfo->deviceMemory = hAllocation->GetMemory(); + pAllocationInfo->offset = hAllocation->GetOffset(); + pAllocationInfo->size = hAllocation->GetSize(); + pAllocationInfo->pMappedData = hAllocation->GetMappedData(); + pAllocationInfo->pUserData = hAllocation->GetUserData(); + pAllocationInfo->pName = hAllocation->GetName(); +} + +void VmaAllocator_T::GetAllocationInfo2(VmaAllocation hAllocation, VmaAllocationInfo2* pAllocationInfo) +{ + GetAllocationInfo(hAllocation, &pAllocationInfo->allocationInfo); + + switch (hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + pAllocationInfo->blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize(); + pAllocationInfo->dedicatedMemory = VK_FALSE; + break; + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + pAllocationInfo->blockSize = pAllocationInfo->allocationInfo.size; + pAllocationInfo->dedicatedMemory = VK_TRUE; + break; + default: + VMA_ASSERT(0); + } +} + +VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool) +{ + VMA_DEBUG_LOG_FORMAT(" CreatePool: MemoryTypeIndex=%" PRIu32 ", flags=%" PRIu32, pCreateInfo->memoryTypeIndex, pCreateInfo->flags); + + VmaPoolCreateInfo newCreateInfo = *pCreateInfo; + + // Protection against uninitialized new structure member. If garbage data are left there, this pointer dereference would crash. + if(pCreateInfo->pMemoryAllocateNext) + { + VMA_ASSERT(((const VkBaseInStructure*)pCreateInfo->pMemoryAllocateNext)->sType != 0); + } + + if(newCreateInfo.maxBlockCount == 0) + { + newCreateInfo.maxBlockCount = SIZE_MAX; + } + if(newCreateInfo.minBlockCount > newCreateInfo.maxBlockCount) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + // Memory type index out of range or forbidden. + if(pCreateInfo->memoryTypeIndex >= GetMemoryTypeCount() || + ((1U << pCreateInfo->memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + if(newCreateInfo.minAllocationAlignment > 0) + { + VMA_ASSERT(VmaIsPow2(newCreateInfo.minAllocationAlignment)); + } + + const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex); + + *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo, preferredBlockSize); + + VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks(); + if(res != VK_SUCCESS) + { + vma_delete(this, *pPool); + *pPool = VMA_NULL; + return res; + } + + // Add to m_Pools. + { + VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex); + (*pPool)->SetId(m_NextPoolId++); + m_Pools.PushBack(*pPool); + } + + return VK_SUCCESS; +} + +void VmaAllocator_T::DestroyPool(VmaPool pool) +{ + // Remove from m_Pools. + { + VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex); + m_Pools.Remove(pool); + } + + vma_delete(this, pool); +} + +void VmaAllocator_T::GetPoolStatistics(VmaPool pool, VmaStatistics* pPoolStats) +{ + VmaClearStatistics(*pPoolStats); + pool->m_BlockVector.AddStatistics(*pPoolStats); + pool->m_DedicatedAllocations.AddStatistics(*pPoolStats); +} + +void VmaAllocator_T::CalculatePoolStatistics(VmaPool pool, VmaDetailedStatistics* pPoolStats) +{ + VmaClearDetailedStatistics(*pPoolStats); + pool->m_BlockVector.AddDetailedStatistics(*pPoolStats); + pool->m_DedicatedAllocations.AddDetailedStatistics(*pPoolStats); +} + +void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex) +{ + m_CurrentFrameIndex.store(frameIndex); + +#if VMA_MEMORY_BUDGET + if(m_UseExtMemoryBudget) + { + UpdateVulkanBudget(); + } +#endif // #if VMA_MEMORY_BUDGET +} + +VkResult VmaAllocator_T::CheckPoolCorruption(VmaPool hPool) +{ + return hPool->m_BlockVector.CheckCorruption(); +} + +VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits) +{ + VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT; + + // Process default pools. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex]; + if(pBlockVector != VMA_NULL) + { + VkResult localRes = pBlockVector->CheckCorruption(); + switch(localRes) + { + case VK_ERROR_FEATURE_NOT_PRESENT: + break; + case VK_SUCCESS: + finalRes = VK_SUCCESS; + break; + default: + return localRes; + } + } + } + + // Process custom pools. + { + VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); + for(VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) + { + if(((1U << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0) + { + VkResult localRes = pool->m_BlockVector.CheckCorruption(); + switch(localRes) + { + case VK_ERROR_FEATURE_NOT_PRESENT: + break; + case VK_SUCCESS: + finalRes = VK_SUCCESS; + break; + default: + return localRes; + } + } + } + } + + return finalRes; +} + +VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory) +{ + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex); + +#if VMA_DEBUG_DONT_EXCEED_HEAP_SIZE_WITH_ALLOCATION_SIZE + if (pAllocateInfo->allocationSize > m_MemProps.memoryHeaps[heapIndex].size) + { + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } +#endif + + AtomicTransactionalIncrement deviceMemoryCountIncrement; + const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount); +#if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT + if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount) + { + return VK_ERROR_TOO_MANY_OBJECTS; + } +#endif + + // HeapSizeLimit is in effect for this heap. + if((m_HeapSizeLimitMask & (1U << heapIndex)) != 0) + { + const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size; + VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex]; + for(;;) + { + const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize; + if(blockBytesAfterAllocation > heapSize) + { + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation)) + { + break; + } + } + } + else + { + m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize; + } + ++m_Budget.m_BlockCount[heapIndex]; + + // VULKAN CALL vkAllocateMemory. + VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory); + + if(res == VK_SUCCESS) + { +#if VMA_MEMORY_BUDGET + ++m_Budget.m_OperationsSinceBudgetFetch; +#endif + + // Informative callback. + if(m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL) + { + (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.pUserData); + } + + deviceMemoryCountIncrement.Commit(); + } + else + { + --m_Budget.m_BlockCount[heapIndex]; + m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize; + } + + return res; +} + +void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory) +{ + // Informative callback. + if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL) + { + (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.pUserData); + } + + // VULKAN CALL vkFreeMemory. + (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks()); + + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType); + --m_Budget.m_BlockCount[heapIndex]; + m_Budget.m_BlockBytes[heapIndex] -= size; + + --m_DeviceMemoryCount; +} + +VkResult VmaAllocator_T::BindVulkanBuffer( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkBuffer buffer, + const void* pNext) const +{ + if(pNext != VMA_NULL) + { +#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 + if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) && + m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL) + { + VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR }; + bindBufferMemoryInfo.pNext = pNext; + bindBufferMemoryInfo.buffer = buffer; + bindBufferMemoryInfo.memory = memory; + bindBufferMemoryInfo.memoryOffset = memoryOffset; + return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo); + } +#endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 + + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + else + { + return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset); + } +} + +VkResult VmaAllocator_T::BindVulkanImage( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkImage image, + const void* pNext) const +{ + if(pNext != VMA_NULL) + { +#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 + if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) && + m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL) + { + VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR }; + bindBufferMemoryInfo.pNext = pNext; + bindBufferMemoryInfo.image = image; + bindBufferMemoryInfo.memory = memory; + bindBufferMemoryInfo.memoryOffset = memoryOffset; + return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo); + } +#endif // #if VMA_BIND_MEMORY2 + + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + + return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset); +} + +VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData) +{ + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); + char *pBytes = VMA_NULL; + VkResult res = pBlock->Map(this, 1, (void**)&pBytes); + if(res == VK_SUCCESS) + { + *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset(); + hAllocation->BlockAllocMap(); + } + return res; + } + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + return hAllocation->DedicatedAllocMap(this, ppData); + default: + VMA_ASSERT(0); + return VK_ERROR_MEMORY_MAP_FAILED; + } +} + +void VmaAllocator_T::Unmap(VmaAllocation hAllocation) +{ + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); + hAllocation->BlockAllocUnmap(); + pBlock->Unmap(this, 1); + } + break; + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + hAllocation->DedicatedAllocUnmap(this); + break; + default: + VMA_ASSERT(0); + } +} + +VkResult VmaAllocator_T::BindBufferMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext) +{ + VkResult res = VK_ERROR_UNKNOWN_COPY; + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext); + break; + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); + VMA_ASSERT(pBlock && "Binding buffer to allocation that doesn't belong to any block."); + res = pBlock->BindBufferMemory(this, hAllocation, allocationLocalOffset, hBuffer, pNext); + break; + } + default: + VMA_ASSERT(0); + } + return res; +} + +VkResult VmaAllocator_T::BindImageMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext) +{ + VkResult res = VK_ERROR_UNKNOWN_COPY; + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext); + break; + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock(); + VMA_ASSERT(pBlock && "Binding image to allocation that doesn't belong to any block."); + res = pBlock->BindImageMemory(this, hAllocation, allocationLocalOffset, hImage, pNext); + break; + } + default: + VMA_ASSERT(0); + } + return res; +} + +VkResult VmaAllocator_T::FlushOrInvalidateAllocation( + VmaAllocation hAllocation, + VkDeviceSize offset, VkDeviceSize size, + VMA_CACHE_OPERATION op) +{ + VkResult res = VK_SUCCESS; + + VkMappedMemoryRange memRange = {}; + if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange)) + { + switch(op) + { + case VMA_CACHE_FLUSH: + res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange); + break; + case VMA_CACHE_INVALIDATE: + res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange); + break; + default: + VMA_ASSERT(0); + } + } + // else: Just ignore this call. + return res; +} + +VkResult VmaAllocator_T::FlushOrInvalidateAllocations( + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, const VkDeviceSize* sizes, + VMA_CACHE_OPERATION op) +{ + typedef VmaStlAllocator RangeAllocator; + typedef VmaSmallVector RangeVector; + RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks())); + + for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex) + { + const VmaAllocation alloc = allocations[allocIndex]; + const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0; + const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE; + VkMappedMemoryRange newRange; + if(GetFlushOrInvalidateRange(alloc, offset, size, newRange)) + { + ranges.push_back(newRange); + } + } + + VkResult res = VK_SUCCESS; + if(!ranges.empty()) + { + switch(op) + { + case VMA_CACHE_FLUSH: + res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data()); + break; + case VMA_CACHE_INVALIDATE: + res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data()); + break; + default: + VMA_ASSERT(0); + } + } + // else: Just ignore this call. + return res; +} + +VkResult VmaAllocator_T::CopyMemoryToAllocation( + const void* pSrcHostPointer, + VmaAllocation dstAllocation, + VkDeviceSize dstAllocationLocalOffset, + VkDeviceSize size) +{ + void* dstMappedData = VMA_NULL; + VkResult res = Map(dstAllocation, &dstMappedData); + if(res == VK_SUCCESS) + { + memcpy((char*)dstMappedData + dstAllocationLocalOffset, pSrcHostPointer, (size_t)size); + Unmap(dstAllocation); + res = FlushOrInvalidateAllocation(dstAllocation, dstAllocationLocalOffset, size, VMA_CACHE_FLUSH); + } + return res; +} + +VkResult VmaAllocator_T::CopyAllocationToMemory( + VmaAllocation srcAllocation, + VkDeviceSize srcAllocationLocalOffset, + void* pDstHostPointer, + VkDeviceSize size) +{ + void* srcMappedData = VMA_NULL; + VkResult res = Map(srcAllocation, &srcMappedData); + if(res == VK_SUCCESS) + { + res = FlushOrInvalidateAllocation(srcAllocation, srcAllocationLocalOffset, size, VMA_CACHE_INVALIDATE); + if(res == VK_SUCCESS) + { + memcpy(pDstHostPointer, (const char*)srcMappedData + srcAllocationLocalOffset, (size_t)size); + Unmap(srcAllocation); + } + } + return res; +} + +void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation) +{ + VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + VmaPool parentPool = allocation->GetParentPool(); + if(parentPool == VK_NULL_HANDLE) + { + // Default pool + m_DedicatedAllocations[memTypeIndex].Unregister(allocation); + } + else + { + // Custom pool + parentPool->m_DedicatedAllocations.Unregister(allocation); + } + + VkDeviceMemory hMemory = allocation->GetMemory(); + + /* + There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory + before vkFreeMemory. + + if(allocation->GetMappedData() != VMA_NULL) + { + (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory); + } + */ + + FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory); + + m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize()); + allocation->Destroy(this); + m_AllocationObjectAllocator.Free(allocation); + + VMA_DEBUG_LOG_FORMAT(" Freed DedicatedMemory MemoryTypeIndex=%" PRIu32, memTypeIndex); +} + +uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits() const +{ + VkBufferCreateInfo dummyBufCreateInfo; + VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo); + + uint32_t memoryTypeBits = 0; + + // Create buffer. + VkBuffer buf = VK_NULL_HANDLE; + VkResult res = (*GetVulkanFunctions().vkCreateBuffer)( + m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf); + if(res == VK_SUCCESS) + { + // Query for supported memory types. + VkMemoryRequirements memReq; + (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq); + memoryTypeBits = memReq.memoryTypeBits; + + // Destroy buffer. + (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks()); + } + + return memoryTypeBits; +} + +uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits() const +{ + // Make sure memory information is already fetched. + VMA_ASSERT(GetMemoryTypeCount() > 0); + + uint32_t memoryTypeBits = UINT32_MAX; + + if(!m_UseAmdDeviceCoherentMemory) + { + // Exclude memory types that have VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0) + { + memoryTypeBits &= ~(1U << memTypeIndex); + } + } + } + + return memoryTypeBits; +} + +bool VmaAllocator_T::GetFlushOrInvalidateRange( + VmaAllocation allocation, + VkDeviceSize offset, VkDeviceSize size, + VkMappedMemoryRange& outRange) const +{ + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex)) + { + const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize; + const VkDeviceSize allocationSize = allocation->GetSize(); + VMA_ASSERT(offset <= allocationSize); + + outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; + outRange.pNext = VMA_NULL; + outRange.memory = allocation->GetMemory(); + + switch(allocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize); + if(size == VK_WHOLE_SIZE) + { + outRange.size = allocationSize - outRange.offset; + } + else + { + VMA_ASSERT(offset + size <= allocationSize); + outRange.size = VMA_MIN( + VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize), + allocationSize - outRange.offset); + } + break; + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + // 1. Still within this allocation. + outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize); + if(size == VK_WHOLE_SIZE) + { + size = allocationSize - offset; + } + else + { + VMA_ASSERT(offset + size <= allocationSize); + } + outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize); + + // 2. Adjust to whole block. + const VkDeviceSize allocationOffset = allocation->GetOffset(); + VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0); + const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize(); + outRange.offset += allocationOffset; + outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset); + + break; + } + default: + VMA_ASSERT(0); + } + return true; + } + return false; +} + +#if VMA_MEMORY_BUDGET +void VmaAllocator_T::UpdateVulkanBudget() +{ + VMA_ASSERT(m_UseExtMemoryBudget); + + VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR }; + + VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT }; + VmaPnextChainPushFront(&memProps, &budgetProps); + + GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps); + + { + VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex); + + for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex) + { + m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex]; + m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex]; + m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load(); + + // Some bugged drivers return the budget incorrectly, e.g. 0 or much bigger than heap size. + if(m_Budget.m_VulkanBudget[heapIndex] == 0) + { + m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10; // 80% heuristics. + } + else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size) + { + m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size; + } + if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0) + { + m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]; + } + } + m_Budget.m_OperationsSinceBudgetFetch = 0; + } +} +#endif // VMA_MEMORY_BUDGET + +void VmaAllocator_T::FillAllocation(VmaAllocation hAllocation, uint8_t pattern) +{ +#if VMA_DEBUG_INITIALIZE_ALLOCATIONS + if(hAllocation->IsMappingAllowed() && + (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) + { + void* pData = VMA_NULL; + VkResult res = Map(hAllocation, &pData); + if(res == VK_SUCCESS) + { + memset(pData, (int)pattern, (size_t)hAllocation->GetSize()); + FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH); + Unmap(hAllocation); + } + else + { + VMA_ASSERT(0 && "VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation."); + } + } +#endif // #if VMA_DEBUG_INITIALIZE_ALLOCATIONS +} + +uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits() +{ + uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load(); + if(memoryTypeBits == UINT32_MAX) + { + memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits(); + m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits); + } + return memoryTypeBits; +} + +#if VMA_STATS_STRING_ENABLED +void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json) +{ + json.WriteString("DefaultPools"); + json.BeginObject(); + { + for (uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex]; + VmaDedicatedAllocationList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex]; + if (pBlockVector != VMA_NULL) + { + json.BeginString("Type "); + json.ContinueString(memTypeIndex); + json.EndString(); + json.BeginObject(); + { + json.WriteString("PreferredBlockSize"); + json.WriteNumber(pBlockVector->GetPreferredBlockSize()); + + json.WriteString("Blocks"); + pBlockVector->PrintDetailedMap(json); + + json.WriteString("DedicatedAllocations"); + dedicatedAllocList.BuildStatsString(json); + } + json.EndObject(); + } + } + } + json.EndObject(); + + json.WriteString("CustomPools"); + json.BeginObject(); + { + VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); + if (!m_Pools.IsEmpty()) + { + for (uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + bool displayType = true; + size_t index = 0; + for (VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) + { + VmaBlockVector& blockVector = pool->m_BlockVector; + if (blockVector.GetMemoryTypeIndex() == memTypeIndex) + { + if (displayType) + { + json.BeginString("Type "); + json.ContinueString(memTypeIndex); + json.EndString(); + json.BeginArray(); + displayType = false; + } + + json.BeginObject(); + { + json.WriteString("Name"); + json.BeginString(); + json.ContinueString((uint64_t)index++); + if (pool->GetName()) + { + json.ContinueString(" - "); + json.ContinueString(pool->GetName()); + } + json.EndString(); + + json.WriteString("PreferredBlockSize"); + json.WriteNumber(blockVector.GetPreferredBlockSize()); + + json.WriteString("Blocks"); + blockVector.PrintDetailedMap(json); + + json.WriteString("DedicatedAllocations"); + pool->m_DedicatedAllocations.BuildStatsString(json); + } + json.EndObject(); + } + } + + if (!displayType) + json.EndArray(); + } + } + } + json.EndObject(); +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_ALLOCATOR_T_FUNCTIONS + + +#ifndef _VMA_PUBLIC_INTERFACE + +#ifdef VOLK_HEADER_VERSION + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaImportVulkanFunctionsFromVolk( + const VmaAllocatorCreateInfo* VMA_NOT_NULL pAllocatorCreateInfo, + VmaVulkanFunctions* VMA_NOT_NULL pDstVulkanFunctions) +{ + VMA_ASSERT(pAllocatorCreateInfo != VMA_NULL); + VMA_ASSERT(pAllocatorCreateInfo->instance != VK_NULL_HANDLE); + VMA_ASSERT(pAllocatorCreateInfo->device != VK_NULL_HANDLE); + + memset(pDstVulkanFunctions, 0, sizeof(*pDstVulkanFunctions)); + + VolkDeviceTable src = {}; + volkLoadDeviceTable(&src, pAllocatorCreateInfo->device); + +#define COPY_GLOBAL_TO_VMA_FUNC(volkName, vmaName) if(!pDstVulkanFunctions->vmaName) pDstVulkanFunctions->vmaName = volkName; +#define COPY_DEVICE_TO_VMA_FUNC(volkName, vmaName) if(!pDstVulkanFunctions->vmaName) pDstVulkanFunctions->vmaName = src.volkName; + + COPY_GLOBAL_TO_VMA_FUNC(vkGetInstanceProcAddr, vkGetInstanceProcAddr) + COPY_GLOBAL_TO_VMA_FUNC(vkGetDeviceProcAddr, vkGetDeviceProcAddr) + COPY_GLOBAL_TO_VMA_FUNC(vkGetPhysicalDeviceProperties, vkGetPhysicalDeviceProperties) + COPY_GLOBAL_TO_VMA_FUNC(vkGetPhysicalDeviceMemoryProperties, vkGetPhysicalDeviceMemoryProperties) + COPY_DEVICE_TO_VMA_FUNC(vkAllocateMemory, vkAllocateMemory) + COPY_DEVICE_TO_VMA_FUNC(vkFreeMemory, vkFreeMemory) + COPY_DEVICE_TO_VMA_FUNC(vkMapMemory, vkMapMemory) + COPY_DEVICE_TO_VMA_FUNC(vkUnmapMemory, vkUnmapMemory) + COPY_DEVICE_TO_VMA_FUNC(vkFlushMappedMemoryRanges, vkFlushMappedMemoryRanges) + COPY_DEVICE_TO_VMA_FUNC(vkInvalidateMappedMemoryRanges, vkInvalidateMappedMemoryRanges) + COPY_DEVICE_TO_VMA_FUNC(vkBindBufferMemory, vkBindBufferMemory) + COPY_DEVICE_TO_VMA_FUNC(vkBindImageMemory, vkBindImageMemory) + COPY_DEVICE_TO_VMA_FUNC(vkGetBufferMemoryRequirements, vkGetBufferMemoryRequirements) + COPY_DEVICE_TO_VMA_FUNC(vkGetImageMemoryRequirements, vkGetImageMemoryRequirements) + COPY_DEVICE_TO_VMA_FUNC(vkCreateBuffer, vkCreateBuffer) + COPY_DEVICE_TO_VMA_FUNC(vkDestroyBuffer, vkDestroyBuffer) + COPY_DEVICE_TO_VMA_FUNC(vkCreateImage, vkCreateImage) + COPY_DEVICE_TO_VMA_FUNC(vkDestroyImage, vkDestroyImage) + COPY_DEVICE_TO_VMA_FUNC(vkCmdCopyBuffer, vkCmdCopyBuffer) +#if VMA_VULKAN_VERSION >= 1001000 + if (pAllocatorCreateInfo->vulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + COPY_GLOBAL_TO_VMA_FUNC(vkGetPhysicalDeviceMemoryProperties2, vkGetPhysicalDeviceMemoryProperties2KHR) + COPY_DEVICE_TO_VMA_FUNC(vkGetBufferMemoryRequirements2, vkGetBufferMemoryRequirements2KHR) + COPY_DEVICE_TO_VMA_FUNC(vkGetImageMemoryRequirements2, vkGetImageMemoryRequirements2KHR) + COPY_DEVICE_TO_VMA_FUNC(vkBindBufferMemory2, vkBindBufferMemory2KHR) + COPY_DEVICE_TO_VMA_FUNC(vkBindImageMemory2, vkBindImageMemory2KHR) + } +#endif +#if VMA_VULKAN_VERSION >= 1003000 + if (pAllocatorCreateInfo->vulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) + { + COPY_DEVICE_TO_VMA_FUNC(vkGetDeviceBufferMemoryRequirements, vkGetDeviceBufferMemoryRequirements) + COPY_DEVICE_TO_VMA_FUNC(vkGetDeviceImageMemoryRequirements, vkGetDeviceImageMemoryRequirements) + } +#endif +#if VMA_KHR_MAINTENANCE4 + if((pAllocatorCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT) != 0) + { + COPY_DEVICE_TO_VMA_FUNC(vkGetDeviceBufferMemoryRequirementsKHR, vkGetDeviceBufferMemoryRequirements) + COPY_DEVICE_TO_VMA_FUNC(vkGetDeviceImageMemoryRequirementsKHR, vkGetDeviceImageMemoryRequirements) + } +#endif +#if VMA_DEDICATED_ALLOCATION + if ((pAllocatorCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0) + { + COPY_DEVICE_TO_VMA_FUNC(vkGetBufferMemoryRequirements2KHR, vkGetBufferMemoryRequirements2KHR) + COPY_DEVICE_TO_VMA_FUNC(vkGetImageMemoryRequirements2KHR, vkGetImageMemoryRequirements2KHR) + } +#endif +#if VMA_BIND_MEMORY2 + if ((pAllocatorCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0) + { + COPY_DEVICE_TO_VMA_FUNC(vkBindBufferMemory2KHR, vkBindBufferMemory2KHR) + COPY_DEVICE_TO_VMA_FUNC(vkBindImageMemory2KHR, vkBindImageMemory2KHR) + } +#endif +#if VMA_MEMORY_BUDGET + if ((pAllocatorCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0) + { + COPY_GLOBAL_TO_VMA_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, vkGetPhysicalDeviceMemoryProperties2KHR) + } +#endif +#if VMA_EXTERNAL_MEMORY_WIN32 + if ((pAllocatorCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT) != 0) + { + COPY_DEVICE_TO_VMA_FUNC(vkGetMemoryWin32HandleKHR, vkGetMemoryWin32HandleKHR) + } +#endif + +#undef COPY_DEVICE_TO_VMA_FUNC +#undef COPY_GLOBAL_TO_VMA_FUNC + + return VK_SUCCESS; +} + +#endif // #ifdef VOLK_HEADER_VERSION + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator( + const VmaAllocatorCreateInfo* pCreateInfo, + VmaAllocator* pAllocator) +{ + VMA_ASSERT(pCreateInfo && pAllocator); + VMA_ASSERT(pCreateInfo->vulkanApiVersion == 0 || + (VK_VERSION_MAJOR(pCreateInfo->vulkanApiVersion) == 1 && VK_VERSION_MINOR(pCreateInfo->vulkanApiVersion) <= 4)); + VMA_DEBUG_LOG("vmaCreateAllocator"); + *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo); + VkResult result = (*pAllocator)->Init(pCreateInfo); + if(result < 0) + { + vma_delete(pCreateInfo->pAllocationCallbacks, *pAllocator); + *pAllocator = VK_NULL_HANDLE; + } + return result; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator( + VmaAllocator allocator) +{ + if(allocator != VK_NULL_HANDLE) + { + VMA_DEBUG_LOG("vmaDestroyAllocator"); + VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks; // Have to copy the callbacks when destroying. + vma_delete(&allocationCallbacks, allocator); + } +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo* pAllocatorInfo) +{ + VMA_ASSERT(allocator && pAllocatorInfo); + pAllocatorInfo->instance = allocator->m_hInstance; + pAllocatorInfo->physicalDevice = allocator->GetPhysicalDevice(); + pAllocatorInfo->device = allocator->m_hDevice; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties( + VmaAllocator allocator, + const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties) +{ + VMA_ASSERT(allocator && ppPhysicalDeviceProperties); + *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties( + VmaAllocator allocator, + const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties) +{ + VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties); + *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties( + VmaAllocator allocator, + uint32_t memoryTypeIndex, + VkMemoryPropertyFlags* pFlags) +{ + VMA_ASSERT(allocator && pFlags); + VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount()); + *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex( + VmaAllocator allocator, + uint32_t frameIndex) +{ + VMA_ASSERT(allocator); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->SetCurrentFrameIndex(frameIndex); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStatistics( + VmaAllocator allocator, + VmaTotalStatistics* pStats) +{ + VMA_ASSERT(allocator && pStats); + VMA_DEBUG_GLOBAL_MUTEX_LOCK + allocator->CalculateStatistics(pStats); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetHeapBudgets( + VmaAllocator allocator, + VmaBudget* pBudgets) +{ + VMA_ASSERT(allocator && pBudgets); + VMA_DEBUG_GLOBAL_MUTEX_LOCK + allocator->GetHeapBudgets(pBudgets, 0, allocator->GetMemoryHeapCount()); +} + +#if VMA_STATS_STRING_ENABLED + +VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString( + VmaAllocator allocator, + char** ppStatsString, + VkBool32 detailedMap) +{ + VMA_ASSERT(allocator && ppStatsString); + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VmaStringBuilder sb(allocator->GetAllocationCallbacks()); + { + VmaBudget budgets[VK_MAX_MEMORY_HEAPS]; + allocator->GetHeapBudgets(budgets, 0, allocator->GetMemoryHeapCount()); + + VmaTotalStatistics stats; + allocator->CalculateStatistics(&stats); + + VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb); + json.BeginObject(); + { + json.WriteString("General"); + json.BeginObject(); + { + const VkPhysicalDeviceProperties& deviceProperties = allocator->m_PhysicalDeviceProperties; + const VkPhysicalDeviceMemoryProperties& memoryProperties = allocator->m_MemProps; + + json.WriteString("API"); + json.WriteString("Vulkan"); + + json.WriteString("apiVersion"); + json.BeginString(); + json.ContinueString(VK_VERSION_MAJOR(deviceProperties.apiVersion)); + json.ContinueString("."); + json.ContinueString(VK_VERSION_MINOR(deviceProperties.apiVersion)); + json.ContinueString("."); + json.ContinueString(VK_VERSION_PATCH(deviceProperties.apiVersion)); + json.EndString(); + + json.WriteString("GPU"); + json.WriteString(deviceProperties.deviceName); + json.WriteString("deviceType"); + json.WriteNumber(static_cast(deviceProperties.deviceType)); + + json.WriteString("maxMemoryAllocationCount"); + json.WriteNumber(deviceProperties.limits.maxMemoryAllocationCount); + json.WriteString("bufferImageGranularity"); + json.WriteNumber(deviceProperties.limits.bufferImageGranularity); + json.WriteString("nonCoherentAtomSize"); + json.WriteNumber(deviceProperties.limits.nonCoherentAtomSize); + + json.WriteString("memoryHeapCount"); + json.WriteNumber(memoryProperties.memoryHeapCount); + json.WriteString("memoryTypeCount"); + json.WriteNumber(memoryProperties.memoryTypeCount); + } + json.EndObject(); + } + { + json.WriteString("Total"); + VmaPrintDetailedStatistics(json, stats.total); + } + { + json.WriteString("MemoryInfo"); + json.BeginObject(); + { + for (uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex) + { + json.BeginString("Heap "); + json.ContinueString(heapIndex); + json.EndString(); + json.BeginObject(); + { + const VkMemoryHeap& heapInfo = allocator->m_MemProps.memoryHeaps[heapIndex]; + json.WriteString("Flags"); + json.BeginArray(true); + { + if (heapInfo.flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) + json.WriteString("DEVICE_LOCAL"); + #if VMA_VULKAN_VERSION >= 1001000 + if (heapInfo.flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) + json.WriteString("MULTI_INSTANCE"); + #endif + + VkMemoryHeapFlags flags = heapInfo.flags & + ~(VK_MEMORY_HEAP_DEVICE_LOCAL_BIT + #if VMA_VULKAN_VERSION >= 1001000 + | VK_MEMORY_HEAP_MULTI_INSTANCE_BIT + #endif + ); + if (flags != 0) + json.WriteNumber(flags); + } + json.EndArray(); + + json.WriteString("Size"); + json.WriteNumber(heapInfo.size); + + json.WriteString("Budget"); + json.BeginObject(); + { + json.WriteString("BudgetBytes"); + json.WriteNumber(budgets[heapIndex].budget); + json.WriteString("UsageBytes"); + json.WriteNumber(budgets[heapIndex].usage); + } + json.EndObject(); + + json.WriteString("Stats"); + VmaPrintDetailedStatistics(json, stats.memoryHeap[heapIndex]); + + json.WriteString("MemoryPools"); + json.BeginObject(); + { + for (uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex) + { + if (allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex) + { + json.BeginString("Type "); + json.ContinueString(typeIndex); + json.EndString(); + json.BeginObject(); + { + json.WriteString("Flags"); + json.BeginArray(true); + { + VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags; + if (flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) + json.WriteString("DEVICE_LOCAL"); + if (flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) + json.WriteString("HOST_VISIBLE"); + if (flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) + json.WriteString("HOST_COHERENT"); + if (flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) + json.WriteString("HOST_CACHED"); + if (flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) + json.WriteString("LAZILY_ALLOCATED"); + #if VMA_VULKAN_VERSION >= 1001000 + if (flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) + json.WriteString("PROTECTED"); + #endif + #if VK_AMD_device_coherent_memory + if (flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) + json.WriteString("DEVICE_COHERENT_AMD"); + if (flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) + json.WriteString("DEVICE_UNCACHED_AMD"); + #endif + + flags &= ~(VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT + #if VMA_VULKAN_VERSION >= 1001000 + | VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT + #endif + #if VK_AMD_device_coherent_memory + | VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY + | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY + #endif + | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT + | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT + | VK_MEMORY_PROPERTY_HOST_CACHED_BIT); + if (flags != 0) + json.WriteNumber(flags); + } + json.EndArray(); + + json.WriteString("Stats"); + VmaPrintDetailedStatistics(json, stats.memoryType[typeIndex]); + } + json.EndObject(); + } + } + + } + json.EndObject(); + } + json.EndObject(); + } + } + json.EndObject(); + } + + if (detailedMap == VK_TRUE) + allocator->PrintDetailedMap(json); + + json.EndObject(); + } + + *ppStatsString = VmaCreateStringCopy(allocator->GetAllocationCallbacks(), sb.GetData(), sb.GetLength()); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString( + VmaAllocator allocator, + char* pStatsString) +{ + if(pStatsString != VMA_NULL) + { + VMA_ASSERT(allocator); + VmaFreeString(allocator->GetAllocationCallbacks(), pStatsString); + } +} + +#endif // VMA_STATS_STRING_ENABLED /* -Minimum value for VkPhysicalDeviceLimits::bufferImageGranularity. -Set to more than 1 for debugging purposes only. Must be power of two. +This function is not protected by any mutex because it just reads immutable data. */ -static const VkDeviceSize VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY = 1; +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex( + VmaAllocator allocator, + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + uint32_t* pMemoryTypeIndex) +{ + VMA_ASSERT(allocator != VK_NULL_HANDLE); + VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); + VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); + + return allocator->FindMemoryTypeIndex(memoryTypeBits, pAllocationCreateInfo, VmaBufferImageUsage::UNKNOWN, pMemoryTypeIndex); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo( + VmaAllocator allocator, + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + uint32_t* pMemoryTypeIndex) +{ + VMA_ASSERT(allocator != VK_NULL_HANDLE); + VMA_ASSERT(pBufferCreateInfo != VMA_NULL); + VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); + VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); + + const VkDevice hDev = allocator->m_hDevice; + const VmaVulkanFunctions* funcs = &allocator->GetVulkanFunctions(); + VkResult res = VK_SUCCESS; + +#if VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + if (funcs->vkGetDeviceBufferMemoryRequirements && + (allocator->m_UseKhrMaintenance4 || allocator->m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0))) + { + // Can query straight from VkBufferCreateInfo :) + VkDeviceBufferMemoryRequirementsKHR devBufMemReq = {VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR}; + devBufMemReq.pCreateInfo = pBufferCreateInfo; + + VkMemoryRequirements2 memReq = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2}; + (*funcs->vkGetDeviceBufferMemoryRequirements)(hDev, &devBufMemReq, &memReq); + + return allocator->FindMemoryTypeIndex( + memReq.memoryRequirements.memoryTypeBits, pAllocationCreateInfo, + VmaBufferImageUsage(*pBufferCreateInfo, allocator->m_UseKhrMaintenance5), pMemoryTypeIndex); + } +#endif // VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + + // Must create a dummy buffer to query :( + VkBuffer hBuffer = VK_NULL_HANDLE; + res = funcs->vkCreateBuffer( + hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer); + if(res == VK_SUCCESS) + { + VkMemoryRequirements memReq = {}; + funcs->vkGetBufferMemoryRequirements(hDev, hBuffer, &memReq); + + res = allocator->FindMemoryTypeIndex( + memReq.memoryTypeBits, pAllocationCreateInfo, + VmaBufferImageUsage(*pBufferCreateInfo, allocator->m_UseKhrMaintenance5), pMemoryTypeIndex); + + funcs->vkDestroyBuffer( + hDev, hBuffer, allocator->GetAllocationCallbacks()); + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo( + VmaAllocator allocator, + const VkImageCreateInfo* pImageCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + uint32_t* pMemoryTypeIndex) +{ + VMA_ASSERT(allocator != VK_NULL_HANDLE); + VMA_ASSERT(pImageCreateInfo != VMA_NULL); + VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); + VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); + + const VkDevice hDev = allocator->m_hDevice; + const VmaVulkanFunctions* funcs = &allocator->GetVulkanFunctions(); + VkResult res = VK_SUCCESS; + +#if VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + if(funcs->vkGetDeviceImageMemoryRequirements && + (allocator->m_UseKhrMaintenance4 || allocator->m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0))) + { + // Can query straight from VkImageCreateInfo :) + VkDeviceImageMemoryRequirementsKHR devImgMemReq = {VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR}; + devImgMemReq.pCreateInfo = pImageCreateInfo; + VMA_ASSERT(pImageCreateInfo->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY && (pImageCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT_COPY) == 0 && + "Cannot use this VkImageCreateInfo with vmaFindMemoryTypeIndexForImageInfo as I don't know what to pass as VkDeviceImageMemoryRequirements::planeAspect."); + + VkMemoryRequirements2 memReq = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2}; + (*funcs->vkGetDeviceImageMemoryRequirements)(hDev, &devImgMemReq, &memReq); + + return allocator->FindMemoryTypeIndex( + memReq.memoryRequirements.memoryTypeBits, pAllocationCreateInfo, + VmaBufferImageUsage(*pImageCreateInfo), pMemoryTypeIndex); + } +#endif // VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + + // Must create a dummy image to query :( + VkImage hImage = VK_NULL_HANDLE; + res = funcs->vkCreateImage( + hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage); + if(res == VK_SUCCESS) + { + VkMemoryRequirements memReq = {}; + funcs->vkGetImageMemoryRequirements(hDev, hImage, &memReq); -// Maximum size of a memory heap in Vulkan to consider it "small". -static const VkDeviceSize VMA_SMALL_HEAP_MAX_SIZE = 512 * 1024 * 1024; -// Default size of a block allocated as single VkDeviceMemory from a "large" heap. -static const VkDeviceSize VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE = 256 * 1024 * 1024; -// Default size of a block allocated as single VkDeviceMemory from a "small" heap. -static const VkDeviceSize VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE = 64 * 1024 * 1024; + res = allocator->FindMemoryTypeIndex( + memReq.memoryTypeBits, pAllocationCreateInfo, + VmaBufferImageUsage(*pImageCreateInfo), pMemoryTypeIndex); -/******************************************************************************* -END OF CONFIGURATION -*/ + funcs->vkDestroyImage( + hDev, hImage, allocator->GetAllocationCallbacks()); + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool( + VmaAllocator allocator, + const VmaPoolCreateInfo* pCreateInfo, + VmaPool* pPool) +{ + VMA_ASSERT(allocator && pCreateInfo && pPool); + + VMA_DEBUG_LOG("vmaCreatePool"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK -static VkAllocationCallbacks VmaEmptyAllocationCallbacks = { - VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL }; + return allocator->CreatePool(pCreateInfo, pPool); +} -// Returns number of bits set to 1 in (v). -static inline uint32_t CountBitsSet(uint32_t v) +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool( + VmaAllocator allocator, + VmaPool pool) { - uint32_t c = v - ((v >> 1) & 0x55555555); - c = ((c >> 2) & 0x33333333) + (c & 0x33333333); - c = ((c >> 4) + c) & 0x0F0F0F0F; - c = ((c >> 8) + c) & 0x00FF00FF; - c = ((c >> 16) + c) & 0x0000FFFF; - return c; + VMA_ASSERT(allocator); + + if(pool == VK_NULL_HANDLE) + { + return; + } + + VMA_DEBUG_LOG("vmaDestroyPool"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->DestroyPool(pool); } -// Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16. -// Use types like uint32_t, uint64_t as T. -template -static inline T VmaAlignUp(T val, T align) +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics( + VmaAllocator allocator, + VmaPool pool, + VmaStatistics* pPoolStats) { - return (val + align - 1) / align * align; + VMA_ASSERT(allocator && pool && pPoolStats); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->GetPoolStatistics(pool, pPoolStats); } -// Division with mathematical rounding to nearest number. -template -inline T VmaRoundDiv(T x, T y) +VMA_CALL_PRE void VMA_CALL_POST vmaCalculatePoolStatistics( + VmaAllocator allocator, + VmaPool pool, + VmaDetailedStatistics* pPoolStats) { - return (x + (y / (T)2)) / y; + VMA_ASSERT(allocator && pool && pPoolStats); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->CalculatePoolStatistics(pool, pPoolStats); } -/* -Returns true if two memory blocks occupy overlapping pages. -ResourceA must be in less memory offset than ResourceB. -Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)" -chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity". -*/ -static inline bool VmaBlocksOnSamePage( - VkDeviceSize resourceAOffset, - VkDeviceSize resourceASize, - VkDeviceSize resourceBOffset, - VkDeviceSize pageSize) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool) { - VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0); - VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1; - VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1); - VkDeviceSize resourceBStart = resourceBOffset; - VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1); - return resourceAEndPage == resourceBStartPage; + VMA_ASSERT(allocator && pool); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VMA_DEBUG_LOG("vmaCheckPoolCorruption"); + + return allocator->CheckPoolCorruption(pool); } -enum VmaSuballocationType +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName( + VmaAllocator allocator, + VmaPool pool, + const char** ppName) { - VMA_SUBALLOCATION_TYPE_FREE = 0, - VMA_SUBALLOCATION_TYPE_UNKNOWN = 1, - VMA_SUBALLOCATION_TYPE_BUFFER = 2, - VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3, - VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4, - VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5, - VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF -}; + VMA_ASSERT(allocator && pool && ppName); -/* -Returns true if given suballocation types could conflict and must respect -VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer -or linear image and another one is optimal image. If type is unknown, behave -conservatively. -*/ -static inline bool VmaIsBufferImageGranularityConflict( - VmaSuballocationType suballocType1, - VmaSuballocationType suballocType2) + VMA_DEBUG_LOG("vmaGetPoolName"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *ppName = pool->GetName(); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName( + VmaAllocator allocator, + VmaPool pool, + const char* pName) { - if(suballocType1 > suballocType2) - VMA_SWAP(suballocType1, suballocType2); - - switch(suballocType1) + VMA_ASSERT(allocator && pool); + + VMA_DEBUG_LOG("vmaSetPoolName"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + pool->SetName(pName); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory( + VmaAllocator allocator, + const VkMemoryRequirements* pVkMemoryRequirements, + const VmaAllocationCreateInfo* pCreateInfo, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation); + + VMA_DEBUG_LOG("vmaAllocateMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkResult result = allocator->AllocateMemory( + *pVkMemoryRequirements, + false, // requiresDedicatedAllocation + false, // prefersDedicatedAllocation + VK_NULL_HANDLE, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + VmaBufferImageUsage::UNKNOWN, // dedicatedBufferImageUsage + VMA_NULL, // pMemoryAllocateNext + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_UNKNOWN, + 1, // allocationCount + pAllocation); + + if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS) { - case VMA_SUBALLOCATION_TYPE_FREE: - return false; - case VMA_SUBALLOCATION_TYPE_UNKNOWN: - return true; - case VMA_SUBALLOCATION_TYPE_BUFFER: - return - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; - case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN: - return - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR || - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; - case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR: - return - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; - case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL: - return false; - default: - VMA_ASSERT(0); - return true; + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); } + + return result; } -// Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope). -struct VmaMutexLock +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateDedicatedMemory( + VmaAllocator allocator, + const VkMemoryRequirements* pVkMemoryRequirements, + const VmaAllocationCreateInfo* pCreateInfo, + void* pMemoryAllocateNext, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) { -public: - VmaMutexLock(VmaMutex& mutex) : m_Mutex(mutex) { mutex.Lock(); } - ~VmaMutexLock() { m_Mutex.Unlock(); } + VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation); - VmaMutexLock& operator=(const VmaMutexLock&) = delete; -private: - VmaMutex& m_Mutex; -}; + VMA_DEBUG_LOG("vmaAllocateDedicatedMemory"); -#if VMA_DEBUG_GLOBAL_MUTEX - static VmaMutex gDebugGlobalMutex; - #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex); -#else - #define VMA_DEBUG_GLOBAL_MUTEX_LOCK -#endif + VMA_DEBUG_GLOBAL_MUTEX_LOCK -// Minimum size of a free suballocation to register it in the free suballocation collection. -static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16; + VkResult result = allocator->AllocateMemory( + *pVkMemoryRequirements, + true, // requiresDedicatedAllocation + false, // prefersDedicatedAllocation + VK_NULL_HANDLE, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + VmaBufferImageUsage::UNKNOWN, // dedicatedBufferImageUsage + pMemoryAllocateNext, + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_UNKNOWN, + 1, // allocationCount + pAllocation); -/* -Performs binary search and returns iterator to first element that is greater or -equal to (key), according to comparison (cmp). + if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } -Cmp should return true if first argument is less than second argument. + return result; +} -Returned value is the found element, if present in the collection or place where -new element with value (key) should be inserted. -*/ -template -static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpT cmp) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages( + VmaAllocator allocator, + const VkMemoryRequirements* pVkMemoryRequirements, + const VmaAllocationCreateInfo* pCreateInfo, + size_t allocationCount, + VmaAllocation* pAllocations, + VmaAllocationInfo* pAllocationInfo) { - size_t down = 0, up = (end - beg); - while(down < up) - { - const size_t mid = (down + up) / 2; - if(cmp(*(beg+mid), key)) - down = mid + 1; - else - up = mid; - } - return beg + down; -} + if(allocationCount == 0) + { + return VK_SUCCESS; + } -//////////////////////////////////////////////////////////////////////////////// -// Memory allocation + VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations); + + VMA_DEBUG_LOG("vmaAllocateMemoryPages"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkResult result = allocator->AllocateMemory( + *pVkMemoryRequirements, + false, // requiresDedicatedAllocation + false, // prefersDedicatedAllocation + VK_NULL_HANDLE, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + VmaBufferImageUsage::UNKNOWN, // dedicatedBufferImageUsage + VMA_NULL, // pMemoryAllocateNext + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_UNKNOWN, + allocationCount, + pAllocations); + + if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS) + { + for(size_t i = 0; i < allocationCount; ++i) + { + allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i); + } + } + + return result; +} -static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer( + VmaAllocator allocator, + VkBuffer buffer, + const VmaAllocationCreateInfo* pCreateInfo, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) { - if((pAllocationCallbacks != VMA_NULL) && - (pAllocationCallbacks->pfnAllocation != VMA_NULL)) + VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation); + + VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetBufferMemoryRequirements(buffer, vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation); + + VkResult result = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + buffer, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + VmaBufferImageUsage::UNKNOWN, // dedicatedBufferImageUsage + VMA_NULL, // pMemoryAllocateNext + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_BUFFER, + 1, // allocationCount + pAllocation); + + if(pAllocationInfo && result == VK_SUCCESS) { - return (*pAllocationCallbacks->pfnAllocation)( - pAllocationCallbacks->pUserData, - size, - alignment, - VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); } - else + + return result; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage( + VmaAllocator allocator, + VkImage image, + const VmaAllocationCreateInfo* pCreateInfo, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation); + + VMA_DEBUG_LOG("vmaAllocateMemoryForImage"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetImageMemoryRequirements(image, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + VkResult result = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + VK_NULL_HANDLE, // dedicatedBuffer + image, // dedicatedImage + VmaBufferImageUsage::UNKNOWN, // dedicatedBufferImageUsage + VMA_NULL, // pMemoryAllocateNext + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN, + 1, // allocationCount + pAllocation); + + if(pAllocationInfo && result == VK_SUCCESS) { - return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment); + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); } + + return result; } -static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr) +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory( + VmaAllocator allocator, + VmaAllocation allocation) { - if((pAllocationCallbacks != VMA_NULL) && - (pAllocationCallbacks->pfnFree != VMA_NULL)) + VMA_ASSERT(allocator); + + if(allocation == VK_NULL_HANDLE) { - (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr); + return; } - else + + VMA_DEBUG_LOG("vmaFreeMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->FreeMemory( + 1, // allocationCount + &allocation); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages( + VmaAllocator allocator, + size_t allocationCount, + const VmaAllocation* pAllocations) +{ + if(allocationCount == 0) { - VMA_SYSTEM_FREE(ptr); + return; } + + VMA_ASSERT(allocator); + + VMA_DEBUG_LOG("vmaFreeMemoryPages"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->FreeMemory(allocationCount, pAllocations); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo( + VmaAllocator allocator, + VmaAllocation allocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && allocation && pAllocationInfo); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->GetAllocationInfo(allocation, pAllocationInfo); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo2( + VmaAllocator allocator, + VmaAllocation allocation, + VmaAllocationInfo2* pAllocationInfo) +{ + VMA_ASSERT(allocator && allocation && pAllocationInfo); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->GetAllocationInfo2(allocation, pAllocationInfo); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData( + VmaAllocator allocator, + VmaAllocation allocation, + void* pUserData) +{ + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocation->SetUserData(allocator, pUserData); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationName( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const char* VMA_NULLABLE pName) +{ + allocation->SetName(allocator, pName); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkMemoryPropertyFlags* VMA_NOT_NULL pFlags) +{ + VMA_ASSERT(allocator && allocation && pFlags); + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + *pFlags = allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory( + VmaAllocator allocator, + VmaAllocation allocation, + void** ppData) +{ + VMA_ASSERT(allocator && allocation && ppData); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->Map(allocation, ppData); } -template -static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks) +VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory( + VmaAllocator allocator, + VmaAllocation allocation) { - return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T)); + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->Unmap(allocation); } -template -static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocation( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize offset, + VkDeviceSize size) { - return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T)); -} + VMA_ASSERT(allocator && allocation); -#define vma_new(allocator, type) new(VmaAllocate(allocator))(type) + VMA_DEBUG_LOG("vmaFlushAllocation"); -#define vma_new_array(allocator, type, count) new(VmaAllocateArray((allocator), (count)))(type) + VMA_DEBUG_GLOBAL_MUTEX_LOCK -template -static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr) + return allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocation( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize offset, + VkDeviceSize size) { - ptr->~T(); - VmaFree(pAllocationCallbacks, ptr); + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_LOG("vmaInvalidateAllocation"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE); } -template -static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocations( + VmaAllocator allocator, + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, + const VkDeviceSize* sizes) { - if(ptr != VMA_NULL) + VMA_ASSERT(allocator); + + if(allocationCount == 0) { - for(size_t i = count; i--; ) - ptr[i].~T(); - VmaFree(pAllocationCallbacks, ptr); + return VK_SUCCESS; } + + VMA_ASSERT(allocations); + + VMA_DEBUG_LOG("vmaFlushAllocations"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH); } -// STL-compatible allocator. -template -class VmaStlAllocator +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocations( + VmaAllocator allocator, + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, + const VkDeviceSize* sizes) { -public: - const VkAllocationCallbacks* const m_pCallbacks; - typedef T value_type; - - VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { } - template VmaStlAllocator(const VmaStlAllocator& src) : m_pCallbacks(src.m_pCallbacks) { } - - T* allocate(size_t n) { return VmaAllocateArray(m_pCallbacks, n); } - void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); } + VMA_ASSERT(allocator); - template - bool operator==(const VmaStlAllocator& rhs) const + if(allocationCount == 0) { - return m_pCallbacks == rhs.m_pCallbacks; - } - template - bool operator!=(const VmaStlAllocator& rhs) const - { - return m_pCallbacks != rhs.m_pCallbacks; + return VK_SUCCESS; } - VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete; -}; + VMA_ASSERT(allocations); -#if VMA_USE_STL_VECTOR + VMA_DEBUG_LOG("vmaInvalidateAllocations"); -#define VmaVector std::vector + VMA_DEBUG_GLOBAL_MUTEX_LOCK -template -static void VectorInsert(std::vector& vec, size_t index, const T& item) -{ - vec.insert(vec.begin() + index, item); + return allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE); } -template -static void VectorRemove(std::vector& vec, size_t index) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCopyMemoryToAllocation( + VmaAllocator allocator, + const void* pSrcHostPointer, + VmaAllocation dstAllocation, + VkDeviceSize dstAllocationLocalOffset, + VkDeviceSize size) { - vec.erase(vec.begin() + index); -} - -#else // #if VMA_USE_STL_VECTOR + VMA_ASSERT(allocator && pSrcHostPointer && dstAllocation); -/* Class with interface compatible with subset of std::vector. -T must be POD because constructors and destructors are not called and memcpy is -used for these objects. */ -template -class VmaVector -{ -public: - VmaVector(const AllocatorT& allocator) : - m_Allocator(allocator), - m_pArray(VMA_NULL), - m_Count(0), - m_Capacity(0) + if(size == 0) { + return VK_SUCCESS; } - VmaVector(AllocatorT& allocator) : - m_Allocator(allocator), - m_pArray(VMA_NULL), - m_Count(0), - m_Capacity(0) - { - } - - VmaVector(size_t count, AllocatorT& allocator) : - m_Allocator(allocator), - m_pArray(count ? (T*)VmaAllocateArray(allocator->m_pCallbacks, count) : VMA_NULL), - m_Count(count), - m_Capacity(count) - { - } - - VmaVector(const VmaVector& src) : - m_Allocator(src.m_Allocator), - m_pArray(src.m_Count ? (T*)VmaAllocateArray(src->m_pCallbacks, src.m_Count) : VMA_NULL), - m_Count(src.m_Count), - m_Capacity(src.m_Count) - { - if(m_Count != 0) - memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T)); - } - - ~VmaVector() - { - VmaFree(m_Allocator.m_pCallbacks, m_pArray); - } + VMA_DEBUG_LOG("vmaCopyMemoryToAllocation"); - VmaVector& operator=(const VmaVector& rhs) - { - if(&rhs != this) - { - Resize(rhs.m_Count); - if(m_Count != 0) - memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T)); - } - return *this; - } - - bool empty() const { return m_Count == 0; } - size_t size() const { return m_Count; } - T* data() { return m_pArray; } - const T* data() const { return m_pArray; } - - T& operator[](size_t index) - { - VMA_HEAVY_ASSERT(index < m_Count); - return m_pArray[index]; - } - const T& operator[](size_t index) const - { - VMA_HEAVY_ASSERT(index < m_Count); - return m_pArray[index]; - } + VMA_DEBUG_GLOBAL_MUTEX_LOCK - T& front() - { - VMA_HEAVY_ASSERT(m_Count > 0); - return m_pArray[0]; - } - const T& front() const - { - VMA_HEAVY_ASSERT(m_Count > 0); - return m_pArray[0]; - } - T& back() - { - VMA_HEAVY_ASSERT(m_Count > 0); - return m_pArray[m_Count - 1]; - } - const T& back() const - { - VMA_HEAVY_ASSERT(m_Count > 0); - return m_pArray[m_Count - 1]; - } + return allocator->CopyMemoryToAllocation(pSrcHostPointer, dstAllocation, dstAllocationLocalOffset, size); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCopyAllocationToMemory( + VmaAllocator allocator, + VmaAllocation srcAllocation, + VkDeviceSize srcAllocationLocalOffset, + void* pDstHostPointer, + VkDeviceSize size) +{ + VMA_ASSERT(allocator && srcAllocation && pDstHostPointer); - void reserve(size_t newCapacity, bool freeMemory = false) + if(size == 0) { - newCapacity = VMA_MAX(newCapacity, m_Count); - - if((newCapacity < m_Capacity) && !freeMemory) - newCapacity = m_Capacity; - - if(newCapacity != m_Capacity) - { - T* const newArray = newCapacity ? VmaAllocateArray(m_Allocator, newCapacity) : VMA_NULL; - if(m_Count != 0) - memcpy(newArray, m_pArray, m_Count * sizeof(T)); - VmaFree(m_Allocator.m_pCallbacks, m_pArray); - m_Capacity = newCapacity; - m_pArray = newArray; - } + return VK_SUCCESS; } - void resize(size_t newCount, bool freeMemory = false) - { - size_t newCapacity = m_Capacity; - if(newCount > m_Capacity) - newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8)); - else if(freeMemory) - newCapacity = newCount; + VMA_DEBUG_LOG("vmaCopyAllocationToMemory"); - if(newCapacity != m_Capacity) - { - T* const newArray = newCapacity ? VmaAllocateArray(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL; - const size_t elementsToCopy = VMA_MIN(m_Count, newCount); - if(elementsToCopy != 0) - memcpy(newArray, m_pArray, elementsToCopy * sizeof(T)); - VmaFree(m_Allocator.m_pCallbacks, m_pArray); - m_Capacity = newCapacity; - m_pArray = newArray; - } + VMA_DEBUG_GLOBAL_MUTEX_LOCK - m_Count = newCount; - } + return allocator->CopyAllocationToMemory(srcAllocation, srcAllocationLocalOffset, pDstHostPointer, size); +} - void clear(bool freeMemory = false) - { - resize(0, freeMemory); - } +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption( + VmaAllocator allocator, + uint32_t memoryTypeBits) +{ + VMA_ASSERT(allocator); - void insert(size_t index, const T& src) - { - VMA_HEAVY_ASSERT(index <= m_Count); - const size_t oldCount = size(); - resize(oldCount + 1); - if(index < oldCount) - memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T)); - m_pArray[index] = src; - } + VMA_DEBUG_LOG("vmaCheckCorruption"); - void remove(size_t index) - { - VMA_HEAVY_ASSERT(index < m_Count); - const size_t oldCount = size(); - if(index < oldCount - 1) - memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T)); - resize(oldCount - 1); - } + VMA_DEBUG_GLOBAL_MUTEX_LOCK - void push_back(const T& src) - { - const size_t newIndex = size(); - resize(newIndex + 1); - m_pArray[newIndex] = src; - } + return allocator->CheckCorruption(memoryTypeBits); +} - void pop_back() - { - VMA_HEAVY_ASSERT(m_Count > 0); - resize(size() - 1); - } +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentation( + VmaAllocator allocator, + const VmaDefragmentationInfo* pInfo, + VmaDefragmentationContext* pContext) +{ + VMA_ASSERT(allocator && pInfo && pContext); - void push_front(const T& src) - { - insert(0, src); - } + VMA_DEBUG_LOG("vmaBeginDefragmentation"); - void pop_front() + if (pInfo->pool != VMA_NULL) { - VMA_HEAVY_ASSERT(m_Count > 0); - remove(0); + // Check if run on supported algorithms + if (pInfo->pool->m_BlockVector.GetAlgorithm() & VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) + return VK_ERROR_FEATURE_NOT_PRESENT; } - typedef T* iterator; - - iterator begin() { return m_pArray; } - iterator end() { return m_pArray + m_Count; } + VMA_DEBUG_GLOBAL_MUTEX_LOCK -private: - AllocatorT m_Allocator; - T* m_pArray; - size_t m_Count; - size_t m_Capacity; -}; + *pContext = vma_new(allocator, VmaDefragmentationContext_T)(allocator, *pInfo); + return VK_SUCCESS; +} -template -static void VectorInsert(VmaVector& vec, size_t index, const T& item) +VMA_CALL_PRE void VMA_CALL_POST vmaEndDefragmentation( + VmaAllocator allocator, + VmaDefragmentationContext context, + VmaDefragmentationStats* pStats) { - vec.insert(index, item); + VMA_ASSERT(allocator && context); + + VMA_DEBUG_LOG("vmaEndDefragmentation"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + if (pStats) + context->GetStats(*pStats); + vma_delete(allocator, context); } -template -static void VectorRemove(VmaVector& vec, size_t index) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo) { - vec.remove(index); + VMA_ASSERT(context && pPassInfo); + + VMA_DEBUG_LOG("vmaBeginDefragmentationPass"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return context->DefragmentPassBegin(*pPassInfo); } -#endif // #if VMA_USE_STL_VECTOR +VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo) +{ + VMA_ASSERT(context && pPassInfo); -//////////////////////////////////////////////////////////////////////////////// -// class VmaPoolAllocator + VMA_DEBUG_LOG("vmaEndDefragmentationPass"); -/* -Allocator for objects of type T using a list of arrays (pools) to speed up -allocation. Number of elements that can be allocated is not bounded because -allocator can create multiple blocks. -*/ -template -class VmaPoolAllocator + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return context->DefragmentPassEnd(*pPassInfo); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory( + VmaAllocator allocator, + VmaAllocation allocation, + VkBuffer buffer) { -public: - VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock); - ~VmaPoolAllocator(); - void Clear(); - T* Alloc(); - void Free(T* ptr); + VMA_ASSERT(allocator && allocation && buffer); -private: - union Item - { - uint32_t NextFreeIndex; - T Value; - }; + VMA_DEBUG_LOG("vmaBindBufferMemory"); - struct ItemBlock - { - Item* pItems; - uint32_t FirstFreeIndex; - }; - - const VkAllocationCallbacks* m_pAllocationCallbacks; - size_t m_ItemsPerBlock; - VmaVector< ItemBlock, VmaStlAllocator > m_ItemBlocks; + VMA_DEBUG_GLOBAL_MUTEX_LOCK - ItemBlock& CreateNewBlock(); -}; + return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL); +} -template -VmaPoolAllocator::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) : - m_pAllocationCallbacks(pAllocationCallbacks), - m_ItemsPerBlock(itemsPerBlock), - m_ItemBlocks(VmaStlAllocator(pAllocationCallbacks)) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize allocationLocalOffset, + VkBuffer buffer, + const void* pNext) { - VMA_ASSERT(itemsPerBlock > 0); + VMA_ASSERT(allocator && allocation && buffer); + + VMA_DEBUG_LOG("vmaBindBufferMemory2"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext); } -template -VmaPoolAllocator::~VmaPoolAllocator() +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory( + VmaAllocator allocator, + VmaAllocation allocation, + VkImage image) { - Clear(); + VMA_ASSERT(allocator && allocation && image); + + VMA_DEBUG_LOG("vmaBindImageMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindImageMemory(allocation, 0, image, VMA_NULL); } -template -void VmaPoolAllocator::Clear() +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize allocationLocalOffset, + VkImage image, + const void* pNext) { - for(size_t i = m_ItemBlocks.size(); i--; ) - vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock); - m_ItemBlocks.clear(); + VMA_ASSERT(allocator && allocation && image); + + VMA_DEBUG_LOG("vmaBindImageMemory2"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext); } -template -T* VmaPoolAllocator::Alloc() +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer( + VmaAllocator allocator, + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkBuffer* pBuffer, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) { - for(size_t i = m_ItemBlocks.size(); i--; ) - { - ItemBlock& block = m_ItemBlocks[i]; - // This block has some free items: Use first one. - if(block.FirstFreeIndex != UINT32_MAX) - { - Item* const pItem = &block.pItems[block.FirstFreeIndex]; - block.FirstFreeIndex = pItem->NextFreeIndex; - return &pItem->Value; - } - } + VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation); + VMA_DEBUG_LOG("vmaCreateBuffer"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + + return allocator->CreateBuffer(pBufferCreateInfo, pAllocationCreateInfo, + VMA_NULL, // pMemoryAllocateNext + pBuffer, pAllocation, pAllocationInfo); - // No block has free item: Create new one and use it. - ItemBlock& newBlock = CreateNewBlock(); - Item* const pItem = &newBlock.pItems[0]; - newBlock.FirstFreeIndex = pItem->NextFreeIndex; - return &pItem->Value; } -template -void VmaPoolAllocator::Free(T* ptr) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment( + VmaAllocator allocator, + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkDeviceSize minAlignment, + VkBuffer* pBuffer, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) { - // Search all memory blocks to find ptr. - for(size_t i = 0; i < m_ItemBlocks.size(); ++i) - { - ItemBlock& block = m_ItemBlocks[i]; - - // Casting to union. - Item* pItemPtr; - memcpy(&pItemPtr, &ptr, sizeof(pItemPtr)); - - // Check if pItemPtr is in address range of this block. - if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock)) - { - const uint32_t index = static_cast(pItemPtr - block.pItems); - pItemPtr->NextFreeIndex = block.FirstFreeIndex; - block.FirstFreeIndex = index; - return; - } - } - VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool."); + VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && VmaIsPow2(minAlignment) && pBuffer && pAllocation); + VMA_DEBUG_LOG("vmaCreateBufferWithAlignment"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + + VmaAllocationCreateInfo allocCreateInfoCopy = *pAllocationCreateInfo; + allocCreateInfoCopy.minAlignment = VMA_MAX(allocCreateInfoCopy.minAlignment, minAlignment); + + return allocator->CreateBuffer(pBufferCreateInfo, &allocCreateInfoCopy, + VMA_NULL, // pMemoryAllocateNext + pBuffer, pAllocation, pAllocationInfo); } -template -typename VmaPoolAllocator::ItemBlock& VmaPoolAllocator::CreateNewBlock() +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateDedicatedBuffer( + VmaAllocator allocator, + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + void* pMemoryAllocateNext, + VkBuffer* pBuffer, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) { - ItemBlock newBlock = { - vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 }; + VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation); + VMA_DEBUG_LOG("vmaCreateDedicatedBuffer"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; - m_ItemBlocks.push_back(newBlock); + VmaAllocationCreateInfo allocCreateInfoCopy = *pAllocationCreateInfo; + allocCreateInfoCopy.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; - // Setup singly-linked list of all free items in this block. - for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i) - newBlock.pItems[i].NextFreeIndex = i + 1; - newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX; - return m_ItemBlocks.back(); + return allocator->CreateBuffer(pBufferCreateInfo, &allocCreateInfoCopy, + pMemoryAllocateNext, // pMemoryAllocateNext + pBuffer, pAllocation, pAllocationInfo); } -//////////////////////////////////////////////////////////////////////////////// -// class VmaRawList, VmaList +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer) +{ + return vmaCreateAliasingBuffer2(allocator, allocation, 0, pBufferCreateInfo, pBuffer); +} -#if VMA_USE_STL_LIST +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer) +{ + VMA_ASSERT(allocator && pBufferCreateInfo && pBuffer && allocation); + VMA_ASSERT(allocationLocalOffset + pBufferCreateInfo->size <= allocation->GetSize()); -#define VmaList std::list + VMA_DEBUG_LOG("vmaCreateAliasingBuffer2"); -#else // #if VMA_USE_STL_LIST + *pBuffer = VK_NULL_HANDLE; -template -struct VmaListItem -{ - VmaListItem* pPrev; - VmaListItem* pNext; - T Value; -}; + if (pBufferCreateInfo->size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + if ((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && + !allocator->m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); + return VK_ERROR_INITIALIZATION_FAILED; + } -// Doubly linked list. -template -class VmaRawList -{ -public: - typedef VmaListItem ItemType; + VMA_DEBUG_GLOBAL_MUTEX_LOCK - VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks); - ~VmaRawList(); - void Clear(); + // 1. Create VkBuffer. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)( + allocator->m_hDevice, + pBufferCreateInfo, + allocator->GetAllocationCallbacks(), + pBuffer); + if (res >= 0) + { + // 2. Bind buffer with memory. + res = allocator->BindBufferMemory(allocation, allocationLocalOffset, *pBuffer, VMA_NULL); + if (res >= 0) + { + return VK_SUCCESS; + } + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + } + return res; +} - size_t GetCount() const { return m_Count; } - bool IsEmpty() const { return m_Count == 0; } +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer( + VmaAllocator allocator, + VkBuffer buffer, + VmaAllocation allocation) +{ + VMA_ASSERT(allocator); - ItemType* Front() { return m_pFront; } - const ItemType* Front() const { return m_pFront; } - ItemType* Back() { return m_pBack; } - const ItemType* Back() const { return m_pBack; } + if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE) + { + return; + } - ItemType* PushBack(); - ItemType* PushFront(); - ItemType* PushBack(const T& value); - ItemType* PushFront(const T& value); - void PopBack(); - void PopFront(); - - // Item can be null - it means PushBack. - ItemType* InsertBefore(ItemType* pItem); - // Item can be null - it means PushFront. - ItemType* InsertAfter(ItemType* pItem); + VMA_DEBUG_LOG("vmaDestroyBuffer"); - ItemType* InsertBefore(ItemType* pItem, const T& value); - ItemType* InsertAfter(ItemType* pItem, const T& value); + VMA_DEBUG_GLOBAL_MUTEX_LOCK - void Remove(ItemType* pItem); + if(buffer != VK_NULL_HANDLE) + { + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks()); + } -private: - const VkAllocationCallbacks* const m_pAllocationCallbacks; - VmaPoolAllocator m_ItemAllocator; - ItemType* m_pFront; - ItemType* m_pBack; - size_t m_Count; + if(allocation != VK_NULL_HANDLE) + { + allocator->FreeMemory( + 1, // allocationCount + &allocation); + } +} - // Declared not defined, to block copy constructor and assignment operator. - VmaRawList(const VmaRawList& src); - VmaRawList& operator=(const VmaRawList& rhs); -}; +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage( + VmaAllocator allocator, + const VkImageCreateInfo* pImageCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkImage* pImage, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation); + VMA_ASSERT((pImageCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT_COPY) == 0 && + "vmaCreateImage() doesn't support disjoint multi-planar images. Please allocate memory for the planes using vmaAllocateMemory() and bind them using vmaBindImageMemory2()."); + VMA_DEBUG_LOG("vmaCreateImage"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; -template -VmaRawList::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) : - m_pAllocationCallbacks(pAllocationCallbacks), - m_ItemAllocator(pAllocationCallbacks, 128), - m_pFront(VMA_NULL), - m_pBack(VMA_NULL), - m_Count(0) + return allocator->CreateImage(pImageCreateInfo, pAllocationCreateInfo, + VMA_NULL, // pMemoryAllocateNext + pImage, pAllocation, pAllocationInfo); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateDedicatedImage( + VmaAllocator allocator, + const VkImageCreateInfo* pImageCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + void* pMemoryAllocateNext, + VkImage* pImage, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) { + VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation); + VMA_ASSERT((pImageCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT_COPY) == 0 && + "vmaCreateDedicatedImage() doesn't support disjoint multi-planar images. Please allocate memory for the planes using vmaAllocateMemory() and bind them using vmaBindImageMemory2()."); + VMA_DEBUG_LOG("vmaCreateDedicatedImage"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + + VmaAllocationCreateInfo allocCreateInfoCopy = *pAllocationCreateInfo; + allocCreateInfoCopy.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; + + return allocator->CreateImage(pImageCreateInfo, &allocCreateInfoCopy, + pMemoryAllocateNext, // pMemoryAllocateNext + pImage, pAllocation, pAllocationInfo); } -template -VmaRawList::~VmaRawList() +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage) { - // Intentionally not calling Clear, because that would be unnecessary - // computations to return all items to m_ItemAllocator as free. + return vmaCreateAliasingImage2(allocator, allocation, 0, pImageCreateInfo, pImage); } -template -void VmaRawList::Clear() +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage) { - if(IsEmpty() == false) + VMA_ASSERT(allocator && pImageCreateInfo && pImage && allocation); + + *pImage = VK_NULL_HANDLE; + + VMA_DEBUG_LOG("vmaCreateImage2"); + + if (pImageCreateInfo->extent.width == 0 || + pImageCreateInfo->extent.height == 0 || + pImageCreateInfo->extent.depth == 0 || + pImageCreateInfo->mipLevels == 0 || + pImageCreateInfo->arrayLayers == 0) { - ItemType* pItem = m_pBack; - while(pItem != VMA_NULL) + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + // 1. Create VkImage. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)( + allocator->m_hDevice, + pImageCreateInfo, + allocator->GetAllocationCallbacks(), + pImage); + if (res >= 0) + { + // 2. Bind image with memory. + res = allocator->BindImageMemory(allocation, allocationLocalOffset, *pImage, VMA_NULL); + if (res >= 0) { - ItemType* const pPrevItem = pItem->pPrev; - m_ItemAllocator.Free(pItem); - pItem = pPrevItem; + return VK_SUCCESS; } - m_pFront = VMA_NULL; - m_pBack = VMA_NULL; - m_Count = 0; + (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); } + return res; } -template -VmaListItem* VmaRawList::PushBack() +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage( + VmaAllocator VMA_NOT_NULL allocator, + VkImage VMA_NULLABLE_NON_DISPATCHABLE image, + VmaAllocation VMA_NULLABLE allocation) { - ItemType* const pNewItem = m_ItemAllocator.Alloc(); - pNewItem->pNext = VMA_NULL; - if(IsEmpty()) - { - pNewItem->pPrev = VMA_NULL; - m_pFront = pNewItem; - m_pBack = pNewItem; - m_Count = 1; - } - else + VMA_ASSERT(allocator); + + if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE) { - pNewItem->pPrev = m_pBack; - m_pBack->pNext = pNewItem; - m_pBack = pNewItem; - ++m_Count; + return; } - return pNewItem; -} -template -VmaListItem* VmaRawList::PushFront() -{ - ItemType* const pNewItem = m_ItemAllocator.Alloc(); - pNewItem->pPrev = VMA_NULL; - if(IsEmpty()) + VMA_DEBUG_LOG("vmaDestroyImage"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + if(image != VK_NULL_HANDLE) { - pNewItem->pNext = VMA_NULL; - m_pFront = pNewItem; - m_pBack = pNewItem; - m_Count = 1; + (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks()); } - else + if(allocation != VK_NULL_HANDLE) { - pNewItem->pNext = m_pFront; - m_pFront->pPrev = pNewItem; - m_pFront = pNewItem; - ++m_Count; + allocator->FreeMemory( + 1, // allocationCount + &allocation); } - return pNewItem; } -template -VmaListItem* VmaRawList::PushBack(const T& value) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateVirtualBlock( + const VmaVirtualBlockCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaVirtualBlock VMA_NULLABLE * VMA_NOT_NULL pVirtualBlock) { - ItemType* const pNewItem = PushBack(); - pNewItem->Value = value; - return pNewItem; + VMA_ASSERT(pCreateInfo && pVirtualBlock); + VMA_ASSERT(pCreateInfo->size > 0); + VMA_DEBUG_LOG("vmaCreateVirtualBlock"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + *pVirtualBlock = vma_new(pCreateInfo->pAllocationCallbacks, VmaVirtualBlock_T)(*pCreateInfo); + return VK_SUCCESS; + + /* + Code for the future if we ever need a separate Init() method that could fail: + + VkResult res = (*pVirtualBlock)->Init(); + if(res < 0) + { + vma_delete(pCreateInfo->pAllocationCallbacks, *pVirtualBlock); + *pVirtualBlock = VK_NULL_HANDLE; + } + return res; + */ } -template -VmaListItem* VmaRawList::PushFront(const T& value) +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyVirtualBlock(VmaVirtualBlock VMA_NULLABLE virtualBlock) { - ItemType* const pNewItem = PushFront(); - pNewItem->Value = value; - return pNewItem; + if(virtualBlock != VK_NULL_HANDLE) + { + VMA_DEBUG_LOG("vmaDestroyVirtualBlock"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + VkAllocationCallbacks allocationCallbacks = virtualBlock->m_AllocationCallbacks; // Have to copy the callbacks when destroying. + vma_delete(&allocationCallbacks, virtualBlock); + } } -template -void VmaRawList::PopBack() +VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaIsVirtualBlockEmpty(VmaVirtualBlock VMA_NOT_NULL virtualBlock) { - VMA_HEAVY_ASSERT(m_Count > 0); - ItemType* const pBackItem = m_pBack; - ItemType* const pPrevItem = pBackItem->pPrev; - if(pPrevItem != VMA_NULL) - pPrevItem->pNext = VMA_NULL; - m_pBack = pPrevItem; - m_ItemAllocator.Free(pBackItem); - --m_Count; + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaIsVirtualBlockEmpty"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + return virtualBlock->IsEmpty() ? VK_TRUE : VK_FALSE; } -template -void VmaRawList::PopFront() +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualAllocationInfo(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, VmaVirtualAllocationInfo* VMA_NOT_NULL pVirtualAllocInfo) { - VMA_HEAVY_ASSERT(m_Count > 0); - ItemType* const pFrontItem = m_pFront; - ItemType* const pNextItem = pFrontItem->pNext; - if(pNextItem != VMA_NULL) - pNextItem->pPrev = VMA_NULL; - m_pFront = pNextItem; - m_ItemAllocator.Free(pFrontItem); - --m_Count; + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pVirtualAllocInfo != VMA_NULL); + VMA_DEBUG_LOG("vmaGetVirtualAllocationInfo"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->GetAllocationInfo(allocation, *pVirtualAllocInfo); } -template -void VmaRawList::Remove(ItemType* pItem) +VMA_CALL_PRE VkResult VMA_CALL_POST vmaVirtualAllocate(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + const VmaVirtualAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pAllocation, + VkDeviceSize* VMA_NULLABLE pOffset) { - VMA_HEAVY_ASSERT(pItem != VMA_NULL); - VMA_HEAVY_ASSERT(m_Count > 0); - - if(pItem->pPrev != VMA_NULL) - pItem->pPrev->pNext = pItem->pNext; - else - { - VMA_HEAVY_ASSERT(m_pFront == pItem); - m_pFront = pItem->pNext; - } - - if(pItem->pNext != VMA_NULL) - pItem->pNext->pPrev = pItem->pPrev; - else - { - VMA_HEAVY_ASSERT(m_pBack == pItem); - m_pBack = pItem->pPrev; - } - - m_ItemAllocator.Free(pItem); - --m_Count; + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pCreateInfo != VMA_NULL && pAllocation != VMA_NULL); + VMA_DEBUG_LOG("vmaVirtualAllocate"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + return virtualBlock->Allocate(*pCreateInfo, *pAllocation, pOffset); } -template -VmaListItem* VmaRawList::InsertBefore(ItemType* pItem) +VMA_CALL_PRE void VMA_CALL_POST vmaVirtualFree(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE allocation) { - if(pItem != VMA_NULL) + if(allocation != VK_NULL_HANDLE) { - ItemType* const prevItem = pItem->pPrev; - ItemType* const newItem = m_ItemAllocator.Alloc(); - newItem->pPrev = prevItem; - newItem->pNext = pItem; - pItem->pPrev = newItem; - if(prevItem != VMA_NULL) - prevItem->pNext = newItem; - else - { - VMA_HEAVY_ASSERT(m_pFront = pItem); - m_pFront = newItem; - } - ++m_Count; - return newItem; + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaVirtualFree"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->Free(allocation); } - else - return PushBack(); } -template -VmaListItem* VmaRawList::InsertAfter(ItemType* pItem) +VMA_CALL_PRE void VMA_CALL_POST vmaClearVirtualBlock(VmaVirtualBlock VMA_NOT_NULL virtualBlock) { - if(pItem != VMA_NULL) - { - ItemType* const nextItem = pItem->pNext; - ItemType* const newItem = m_ItemAllocator.Alloc(); - newItem->pNext = nextItem; - newItem->pPrev = pItem; - pItem->pNext = newItem; - if(nextItem != VMA_NULL) - nextItem->pPrev = newItem; - else - { - VMA_HEAVY_ASSERT(m_pBack = pItem); - m_pBack = newItem; - } - ++m_Count; - return newItem; - } - else - return PushFront(); + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaClearVirtualBlock"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->Clear(); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetVirtualAllocationUserData(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, void* VMA_NULLABLE pUserData) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaSetVirtualAllocationUserData"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->SetAllocationUserData(allocation, pUserData); } -template -VmaListItem* VmaRawList::InsertBefore(ItemType* pItem, const T& value) +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaStatistics* VMA_NOT_NULL pStats) { - ItemType* const newItem = InsertBefore(pItem); - newItem->Value = value; - return newItem; + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pStats != VMA_NULL); + VMA_DEBUG_LOG("vmaGetVirtualBlockStatistics"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->GetStatistics(*pStats); } -template -VmaListItem* VmaRawList::InsertAfter(ItemType* pItem, const T& value) +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaDetailedStatistics* VMA_NOT_NULL pStats) { - ItemType* const newItem = InsertAfter(pItem); - newItem->Value = value; - return newItem; + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pStats != VMA_NULL); + VMA_DEBUG_LOG("vmaCalculateVirtualBlockStatistics"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->CalculateDetailedStatistics(*pStats); } -template -class VmaList +#if VMA_STATS_STRING_ENABLED + +VMA_CALL_PRE void VMA_CALL_POST vmaBuildVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString, VkBool32 detailedMap) { -public: - class const_iterator; + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && ppStatsString != VMA_NULL); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + const VkAllocationCallbacks* allocationCallbacks = virtualBlock->GetAllocationCallbacks(); + VmaStringBuilder sb(allocationCallbacks); + virtualBlock->BuildStatsString(detailedMap != VK_FALSE, sb); + *ppStatsString = VmaCreateStringCopy(allocationCallbacks, sb.GetData(), sb.GetLength()); +} - class iterator +VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE pStatsString) +{ + if(pStatsString != VMA_NULL) { - public: - iterator() : - m_pList(VMA_NULL), - m_pItem(VMA_NULL) - { - } + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + VmaFreeString(virtualBlock->GetAllocationCallbacks(), pStatsString); + } +} +#if VMA_EXTERNAL_MEMORY_WIN32 +VMA_CALL_PRE VkResult VMA_CALL_POST vmaGetMemoryWin32Handle(VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, HANDLE hTargetProcess, HANDLE* VMA_NOT_NULL pHandle) +{ + VMA_ASSERT(allocator && allocation && pHandle); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + return allocation->GetWin32Handle(allocator, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, hTargetProcess, pHandle); +} +VMA_CALL_PRE VkResult VMA_CALL_POST vmaGetMemoryWin32Handle2(VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE hTargetProcess, HANDLE* VMA_NOT_NULL pHandle) +{ + VMA_ASSERT(allocator && allocation && pHandle); + VMA_ASSERT(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR || + handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR || + handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR || + handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR || + handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR || + handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + return allocation->GetWin32Handle(allocator, handleType, hTargetProcess, pHandle); +} +#endif // VMA_EXTERNAL_MEMORY_WIN32 +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_PUBLIC_INTERFACE +#endif // VMA_IMPLEMENTATION - T& operator*() const - { - VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); - return m_pItem->Value; - } - T* operator->() const - { - VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); - return &m_pItem->Value; - } +/** +\page faq Frequently asked questions - iterator& operator++() - { - VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); - m_pItem = m_pItem->pNext; - return *this; - } - iterator& operator--() - { - if(m_pItem != VMA_NULL) - m_pItem = m_pItem->pPrev; - else - { - VMA_HEAVY_ASSERT(!m_pList.IsEmpty()); - m_pItem = m_pList->Back(); - } - return *this; - } +What is VMA? + +Vulkan(R) Memory Allocator (VMA) is a software library for developers who use the Vulkan graphics API in their code. +It is written in C++. - iterator operator++(int) - { - iterator result = *this; - ++*this; - return result; - } - iterator operator--(int) - { - iterator result = *this; - --*this; - return result; - } +What is the license of VMA? - bool operator==(const iterator& rhs) const - { - VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); - return m_pItem == rhs.m_pItem; - } - bool operator!=(const iterator& rhs) const - { - VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); - return m_pItem != rhs.m_pItem; - } +VMA is licensed under MIT, which means it is open source and free software. - private: - VmaRawList* m_pList; - VmaListItem* m_pItem; +What is the purpose of VMA? - iterator(VmaRawList* pList, VmaListItem* pItem) : - m_pList(pList), - m_pItem(pItem) - { - } +VMA helps with handling one aspect of Vulkan usage, which is device memory management - +allocation of `VkDeviceMemory` objects, and creation of `VkBuffer` and `VkImage` objects. + +Do I need to use VMA? + +You don't need to, but it may be beneficial in many cases. +Vulkan is a complex and low-level API, so libraries like this that abstract certain aspects of the API +and bring them to a higher level are useful. +When developing any non-trivial Vulkan application, you likely need to use a memory allocator. +Using VMA can save time compared to implementing your own. + +When should I not use VMA? + +While VMA is useful for most applications that use the Vulkan API, there are cases +when it may be a better choice not to use it. +For example, if the application is very simple, e.g. serving as a sample or a learning exercise +to help you understand or teach others the basics of Vulkan, +and it creates only a small number of buffers and images, then including VMA may be an overkill. +Developing your own memory allocator may also be a good learning exercise. + +What are the benefits of using VMA? + +-# VMA helps in choosing the optimal memory type for your resource (buffer or image). + In Vulkan, we have a two-level hierarchy of memory heaps and types with different flags, + and each device can expose a different set of those. + Implementing logic that would select the best memory type on each platform is a non-trivial task. + VMA does that, expecting only a high-level description of the intended usage of your resource. + For more information, see \subpage choosing_memory_type. +-# VMA allocates large blocks of `VkDeviceMemory` and sub-allocates parts of them for your resources. + Allocating a new block of device memory may be a time-consuming operation. + Some platforms also have a limit on the maximum number of those blocks (`VkPhysicalDeviceLimits::maxMemoryAllocationCount`) + as low as 4096, so allocating a separate one for each resource is not an option. + Sub-allocating parts of a memory block requires implementing an allocation algorithm, + which is a non-trivial task. + VMA does that, using an advanced and efficient algorithm that works well in various use cases. +-# VMA offers a simple API that allows creating buffers and textures within one function call. + In Vulkan, the creation of a resource is a multi-step process. + You need to create a `VkBuffer` or `VkImage`, ask it for memory requirements, + allocate a `VkDeviceMemory` object, and finally bind the resource to the memory block. + VMA does that automatically under a simple API within one function call: vmaCreateBuffer(), vmaCreateImage(). + +The library is doing much more under the hood. +For example, it respects limits like `bufferImageGranularity`, `nonCoherentAtomSize`, +and `VkMemoryDedicatedRequirements` automatically, so you don't need to think about it. + +Which version should I pick? + +You can just pick [the latest version from the "master" branch](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator). +It is kept in a good shape most of the time, compiling and working correctly, +with no compatibility-breaking changes and no unfinished code. + +If you want an even more stable version, you can pick +[the latest official release](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator/releases). +Current code from the master branch is occasionally tagged as a release, +with [CHANGELOG](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator/blob/master/CHANGELOG.md) +carefully curated to enumerate all important changes since the previous version. + +The library uses [Semantic Versioning](https://semver.org/), +which means versions that only differ in the patch number are forward and backward compatible +(e.g., only fixing some bugs), while versions that differ in the minor number are backward compatible +(e.g., only adding new functions to the API, but not removing or changing existing ones). + +How to integrate it with my code? + +VMA is an STB-style single-header C++ library. + +You can pull the entire GitHub repository, e.g. using Git submodules. +The repository contains ancillary files like the Cmake script, Doxygen config file, +sample application, test suite, and others. +You can compile it as a library and link with your project. + +However, a simpler way is taking the single file "include/vk_mem_alloc.h" and including it in your project. +This extensive file contains all you need: a copyright notice, +declarations of the public library interface (API), its internal implementation, +and even the documentation in form of Doxygen-style comments. + +The "STB style" means not everything is implemented as inline functions in the header file. +You need to extract the internal implementation using a special macro. +This means that in every .cpp file where you need to use the library you should +`#include "vk_mem_alloc.h"` to include its public interface, +but additionally in exactly one .cpp file you should `#define VMA_IMPLEMENTATION` +before this `#include` to enable its internal implementation. +For more information, see [Project setup](@ref quick_start_project_setup). + +Does the library work with C or C++? + +The internal implementation of VMA is written in C++. +It is distributed in the source format, so you need a compiler supporting at least C++14 to build it. + +However, the public interface of the library is written in C - using only enums, structs, and global functions, +in the same style as Vulkan, so you can use the library in the C code. + +I am not a fan of modern C++. Can I still use it? + +Very likely yes. +We acknowledge that many C++ developers, especially in the games industry, +do not appreciate all the latest features that the language has to offer. + +- VMA doesn't throw or catch any C++ exceptions. + It reports errors by returning a `VkResult` value instead, just like Vulkan. + If you don't use exceptions in your project, your code is not exception-safe, + or even if you disable exception handling in the compiler options, you can still use VMA. +- VMA doesn't use C++ run-time type information like `typeid` or `dynamic_cast`, + so if you disable RTTI in the compiler options, you can still use the library. +- VMA uses only a limited subset of standard C and C++ library. + It doesn't use STL containers like `std::vector`, `map`, or `string`, + either in the public interface nor in the internal implementation. + It implements its own containers instead. +- If you don't use the default heap memory allocator through `malloc/free` or `new/delete` + but implement your own allocator instead, you can pass it to VMA and + the library will use your functions for every dynamic heap allocation made internally, + as well as passing it further to Vulkan functions. For details, see [Custom host memory allocator](@ref custom_memory_allocator). + +Is it available for other programming languages? + +VMA is a C++ library with C interface in similar style as Vulkan. +An object-oriented C++ wrapper or bindings to other programming languages are out of scope of this project, +but they are welcome as external projects. +Some of them are listed in [README.md, "See also" section](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator?tab=readme-ov-file#see-also), +including binding to C++, Python, Rust, and Haskell. +Before using any of them, please check if they are still maintained and updated to use a recent version of VMA. + +What platforms does it support? + +VMA relies only on Vulkan and some parts of the standard C and C++ library, +so it supports any platform where a C++ compiler and Vulkan are available. +It is developed mostly on Microsoft(R) Windows(R), +but it has been successfully used in Linux(R), MacOS, Android, and even FreeBSD and Raspberry Pi. + +Does it only work on AMD GPUs? + +No! While VMA is published by AMD, it works on any GPU that supports Vulkan, +whether a discrete PC graphics card, a processor integrated graphics, or a mobile SoC. +It doesn't give AMD GPUs any advantage over any other GPUs. + +What Vulkan versions and extensions are supported? + +VMA is updated to support the latest versions of Vulkan. +It currently supports Vulkan up to 1.4. +The library also supports older versions down to the first release of Vulkan 1.0. +Defining a higher minimum version support would help simplify the code, +but we acknowledge that developers on some platforms like Android still use older versions, +so the support is provided for all of them. + +Among many extensions available for Vulkan, only a few interact with memory management. +VMA can automatically take advantage of them. Some of them are: +VK_EXT_memory_budget, VK_EXT_memory_priority, VK_KHR_external_memory_win32, and VK_KHR_maintenance* +extensions that are later promoted to the new versions of the core Vulkan API. +To use them, it is your responsibility to validate if they are available on the current system and if so, +enable them while creating the Vulkan device object. +You also need to pass appropriate #VmaAllocatorCreateFlagBits to inform VMA that they are enabled. +Then, the library will automatically take advantage of them. +For more information and the full list of supported extensions, see [Enabling extensions](@ref quick_start_initialization_enabling_extensions). + +Does it support other graphics APIs, like Microsoft DirectX(R) 12? + +No, but we offer an equivalent library for DirectX 12: +[D3D12 Memory Allocator](https://github.com/GPUOpen-LibrariesAndSDKs/D3D12MemoryAllocator). +It uses the same core allocation algorithm. +It also shares many features with VMA, like the support for custom pools and virtual allocator. +However, it is not identical in terms of the features supported. +Its API also looks different, because while the interface of VMA is similar in style to Vulkan, +the interface of D3D12MA is similar to DirectX 12. + +Is the library lightweight? + +It depends on how you define it. +VMA is implemented with high-performance and real-time applications like video games in mind. +The CPU performance overhead of using this library is low. +It uses a high-quality allocation algorithm called Two-Level Segregated Fit (TLSF), +which in most cases can find a free place for a new allocation in few steps. +The library also doesn't perform too many CPU heap allocations. +In many cases, the allocation happens with 0 new CPU heap allocations performed by the library. +Even the creation of a #VmaAllocation object doesn't typically feature an CPU allocation, +because these objects are returned out of a dedicated memory pool. + +On the other hand, however, VMA needs some extra memory and extra time +to maintain the metadata about the occupied and free regions of the memory blocks, +and the algorithms and data structures used must be generic enough to work well in most cases. +If you develop your program for a very resource-constrained platform, +a custom allocator simpler than VMA may be a better choice. + +Does it have a documentation? + +Yes! VMA comes with full documentation of all elements of the API (functions, structures, enums), +as well as many generic chapters that provide an introduction, +describe core concepts of the library, good practices, etc. +The entire documentation is written in form of code comments inside "vk_mem_alloc.h", in Doxygen format. +You can access it in multiple ways: + +- Browsable online: https://gpuopen-librariesandsdks.github.io/VulkanMemoryAllocator/html/ +- Local HTML pages available after you clone the repository and open file "docs/html/index.html". +- You can rebuild the documentation in HTML or some other format from the source code using Doxygen. + Configuration file "Doxyfile" is part of the repository. +- Finally, you can just read the comments preceding declarations of any public functions of the library. + +Is it a mature project? + +Yes! The library is in development since June 2017, has over 1000 commits, over 500 issue tickets +and pull requests (most of them resolved), and over 80 contributors. +It is distributed together with Vulkan SDK. +It is used by many software projects, including some large and popular ones like Qt or Blender, +as well as some AAA games. +According to the [LunarG 2024 Ecosystem Survey](https://www.lunarg.com/2024-ecosystem-survey-progress-report-released/), +it is used by over 50% of Vulkan developers. + +How can I contribute to the project? + +If you have an idea for improvement or a feature request, +you can go to [the library repository](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator) +and create an Issue ticket, describing your idea. +You can also implement it yourself by forking the repository, making changes to the code, +and creating a Pull request. + +If you want to ask a question, you can also create a ticket the same way. +Before doing this, please make sure you read the relevant part of the Vulkan specification and VMA documentation, +where you may find the answers to your question. + +If you want to report a suspected bug, you can also create a ticket the same way. +Before doing this, please put some effort into the investigation of whether the bug is really +in the library and not in your code or in the Vulkan implementation (the GPU driver) on your platform: + +- Enable Vulkan validation layer and make sure it is free from any errors. +- Make sure `VMA_ASSERT` is defined to an implementation that can report a failure and not ignore it. +- Try making your allocation using pure Vulkan functions rather than VMA and see if the bug persists. + +I found some compilation warnings. How can we fix them? + +Seeing compiler warnings may be annoying to some developers, +but it is a design decision to not fix all of them. +Due to the nature of the C++ language, certain preprocessor macros can make some variables unused, +function parameters unreferenced, or conditional expressions constant in some configurations. +The code of this library should not be bigger or more complicated just to silence these warnings. +It is recommended to disable such warnings instead. +For more information, see [Features not supported](@ref general_considerations_features_not_supported). + +However, if you observe a warning that is really dangerous, e.g., +about an implicit conversion from a larger to a smaller integer type, please report it and it will be fixed ASAP. + + +\page quick_start Quick start + +\section quick_start_project_setup Project setup + +Vulkan Memory Allocator comes in form of a "stb-style" single header file. +While you can pull the entire repository e.g. as Git module, there is also Cmake script provided, +you don't need to build it as a separate library project. +You can add file "vk_mem_alloc.h" directly to your project and submit it to code repository next to your other source files. + +"Single header" doesn't mean that everything is contained in C/C++ declarations, +like it tends to be in case of inline functions or C++ templates. +It means that implementation is bundled with interface in a single file and needs to be extracted using preprocessor macro. +If you don't do it properly, it will result in linker errors. + +To do it properly: + +-# Include "vk_mem_alloc.h" file in each CPP file where you want to use the library. + This includes declarations of all members of the library. +-# In exactly one CPP file define following macro before this include. + It enables also internal definitions. + +\code +#define VMA_IMPLEMENTATION +#include "vk_mem_alloc.h" +\endcode + +It may be a good idea to create dedicated CPP file just for this purpose, e.g. "VmaUsage.cpp". + +This library includes header ``, which in turn +includes `` on Windows. If you need some specific macros defined +before including these headers (like `WIN32_LEAN_AND_MEAN` or +`WINVER` for Windows, `VK_USE_PLATFORM_WIN32_KHR` for Vulkan), you must define +them before every `#include` of this library. +It may be a good idea to create a dedicate header file for this purpose, e.g. "VmaUsage.h", +that will be included in other source files instead of VMA header directly. - friend class VmaList; - //friend class VmaList::const_iterator; - }; +This library is written in C++, but has C-compatible interface. +Thus, you can include and use "vk_mem_alloc.h" in C or C++ code, but full +implementation with `VMA_IMPLEMENTATION` macro must be compiled as C++, NOT as C. +Some features of C++14 are used and required. Features of C++20 are used optionally when available. +Some headers of standard C and C++ library are used, but STL containers, RTTI, or C++ exceptions are not used. - class const_iterator - { - public: - const_iterator() : - m_pList(VMA_NULL), - m_pItem(VMA_NULL) - { - } - const_iterator(const iterator& src) : - m_pList(src.m_pList), - m_pItem(src.m_pItem) - { - } - - const T& operator*() const - { - VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); - return m_pItem->Value; - } - const T* operator->() const - { - VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); - return &m_pItem->Value; - } +\section quick_start_initialization Initialization - const_iterator& operator++() - { - VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); - m_pItem = m_pItem->pNext; - return *this; - } - const_iterator& operator--() - { - if(m_pItem != VMA_NULL) - m_pItem = m_pItem->pPrev; - else - { - VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); - m_pItem = m_pList->Back(); - } - return *this; - } +VMA offers library interface in a style similar to Vulkan, with object handles like #VmaAllocation, +structures describing parameters of objects to be created like #VmaAllocationCreateInfo, +and errors codes returned from functions using `VkResult` type. - const_iterator operator++(int) - { - const_iterator result = *this; - ++*this; - return result; - } - const_iterator operator--(int) - { - const_iterator result = *this; - --*this; - return result; - } +The first and the main object that needs to be created is #VmaAllocator. +It represents the initialization of the entire library. +Only one such object should be created per `VkDevice`. +You should create it at program startup, after `VkDevice` was created, and before any device memory allocator needs to be made. +It must be destroyed before `VkDevice` is destroyed. - bool operator==(const const_iterator& rhs) const - { - VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); - return m_pItem == rhs.m_pItem; - } - bool operator!=(const const_iterator& rhs) const - { - VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); - return m_pItem != rhs.m_pItem; - } - - private: - const_iterator(const VmaRawList* pList, const VmaListItem* pItem) : - m_pList(pList), - m_pItem(pItem) - { - } +At program startup: - const VmaRawList* m_pList; - const VmaListItem* m_pItem; +-# Initialize Vulkan to have `VkInstance`, `VkPhysicalDevice`, `VkDevice` object. +-# Fill VmaAllocatorCreateInfo structure and call vmaCreateAllocator() to create #VmaAllocator object. + +Only members `physicalDevice`, `device`, `instance` are required. +However, you should inform the library which Vulkan version do you use by setting +VmaAllocatorCreateInfo::vulkanApiVersion and which extensions did you enable +by setting VmaAllocatorCreateInfo::flags. +Otherwise, VMA would use only features of Vulkan 1.0 core with no extensions. +See below for details. + +\subsection quick_start_initialization_selecting_vulkan_version Selecting Vulkan version + +VMA supports Vulkan version down to 1.0, for backward compatibility. +If you want to use higher version, you need to inform the library about it. +This is a two-step process. + +Step 1: Compile time. By default, VMA compiles with code supporting the highest +Vulkan version found in the included `` that is also supported by the library. +If this is OK, you don't need to do anything. +However, if you want to compile VMA as if only some lower Vulkan version was available, +define macro `VMA_VULKAN_VERSION` before every `#include "vk_mem_alloc.h"`. +It should have decimal numeric value in form of ABBBCCC, where A = major, BBB = minor, CCC = patch Vulkan version. +For example, to compile against Vulkan 1.2: + +\code +#define VMA_VULKAN_VERSION 1002000 // Vulkan 1.2 +#include "vk_mem_alloc.h" +\endcode + +Step 2: Runtime. Even when compiled with higher Vulkan version available, +VMA can use only features of a lower version, which is configurable during creation of the #VmaAllocator object. +By default, only Vulkan 1.0 is used. +To initialize the allocator with support for higher Vulkan version, you need to set member +VmaAllocatorCreateInfo::vulkanApiVersion to an appropriate value, e.g. using constants like `VK_API_VERSION_1_2`. +See code sample below. + +\subsection quick_start_initialization_importing_vulkan_functions Importing Vulkan functions + +You may need to configure importing Vulkan functions. There are 4 ways to do this: + +-# **If you link with Vulkan static library** (e.g. "vulkan-1.lib" on Windows): + - You don't need to do anything. + - VMA will use these, as macro `VMA_STATIC_VULKAN_FUNCTIONS` is defined to 1 by default. +-# **If you want VMA to fetch pointers to Vulkan functions dynamically** using `vkGetInstanceProcAddr`, + `vkGetDeviceProcAddr` (this is the option presented in the example below): + - Define `VMA_STATIC_VULKAN_FUNCTIONS` to 0, `VMA_DYNAMIC_VULKAN_FUNCTIONS` to 1. + - Provide pointers to these two functions via VmaVulkanFunctions::vkGetInstanceProcAddr, + VmaVulkanFunctions::vkGetDeviceProcAddr. + - The library will fetch pointers to all other functions it needs internally. +-# **If you fetch pointers to all Vulkan functions in a custom way**: + - Define `VMA_STATIC_VULKAN_FUNCTIONS` and `VMA_DYNAMIC_VULKAN_FUNCTIONS` to 0. + - Pass these pointers via structure #VmaVulkanFunctions. +-# **If you use [volk library](https://github.com/zeux/volk)**: + - Define `VMA_STATIC_VULKAN_FUNCTIONS` and `VMA_DYNAMIC_VULKAN_FUNCTIONS` to 0. + - Use function vmaImportVulkanFunctionsFromVolk() to fill in the structure #VmaVulkanFunctions. + For more information, see the description of this function. + +\subsection quick_start_initialization_enabling_extensions Enabling extensions + +VMA can automatically use following Vulkan extensions. +If you found them available on the selected physical device and you enabled them +while creating `VkInstance` / `VkDevice` object, inform VMA about their availability +by setting appropriate flags in VmaAllocatorCreateInfo::flags. + +Vulkan extension | VMA flag +------------------------------|----------------------------------------------------- +VK_KHR_dedicated_allocation | #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT +VK_KHR_bind_memory2 | #VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT +VK_KHR_maintenance4 | #VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT +VK_KHR_maintenance5 | #VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT +VK_EXT_memory_budget | #VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT +VK_KHR_buffer_device_address | #VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT +VK_EXT_memory_priority | #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT +VK_AMD_device_coherent_memory | #VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT +VK_KHR_external_memory_win32 | #VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT + +Example with fetching pointers to Vulkan functions dynamically: + +\code +#define VMA_STATIC_VULKAN_FUNCTIONS 0 +#define VMA_DYNAMIC_VULKAN_FUNCTIONS 1 +#include "vk_mem_alloc.h" + +... + +VmaVulkanFunctions vulkanFunctions = {}; +vulkanFunctions.vkGetInstanceProcAddr = &vkGetInstanceProcAddr; +vulkanFunctions.vkGetDeviceProcAddr = &vkGetDeviceProcAddr; + +VmaAllocatorCreateInfo allocatorCreateInfo = {}; +allocatorCreateInfo.flags = VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT; +allocatorCreateInfo.vulkanApiVersion = VK_API_VERSION_1_2; +allocatorCreateInfo.physicalDevice = physicalDevice; +allocatorCreateInfo.device = device; +allocatorCreateInfo.instance = instance; +allocatorCreateInfo.pVulkanFunctions = &vulkanFunctions; + +VmaAllocator allocator; +vmaCreateAllocator(&allocatorCreateInfo, &allocator); + +// Entire program... + +// At the end, don't forget to: +vmaDestroyAllocator(allocator); +\endcode + + +\subsection quick_start_initialization_other_config Other configuration options + +There are additional configuration options available through preprocessor macros that you can define +before including VMA header and through parameters passed in #VmaAllocatorCreateInfo. +They include a possibility to use your own callbacks for host memory allocations (`VkAllocationCallbacks`), +callbacks for device memory allocations (instead of `vkAllocateMemory`, `vkFreeMemory`), +or your custom `VMA_ASSERT` macro, among others. +For more information, see: @ref configuration. + + +\section quick_start_resource_allocation Resource allocation - friend class VmaList; - }; +When you want to create a buffer or image: - VmaList(AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { } - VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { } +-# Fill `VkBufferCreateInfo` / `VkImageCreateInfo` structure. +-# Fill VmaAllocationCreateInfo structure. +-# Call vmaCreateBuffer() / vmaCreateImage() to get `VkBuffer`/`VkImage` with memory + already allocated and bound to it, plus #VmaAllocation objects that represents its underlying memory. + +\code +VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufferInfo.size = 65536; +bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.usage = VMA_MEMORY_USAGE_AUTO; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +Don't forget to destroy your buffer and allocation objects when no longer needed: + +\code +vmaDestroyBuffer(allocator, buffer, allocation); +\endcode + +If you need to map the buffer, you must set flag +#VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT +in VmaAllocationCreateInfo::flags. +There are many additional parameters that can control the choice of memory type to be used for the allocation +and other features. +For more information, see documentation chapters: @ref choosing_memory_type, @ref memory_mapping. + + +\page choosing_memory_type Choosing memory type + +Physical devices in Vulkan support various combinations of memory heaps and +types. Help with choosing correct and optimal memory type for your specific +resource is one of the key features of this library. You can use it by filling +appropriate members of VmaAllocationCreateInfo structure, as described below. +You can also combine multiple methods. + +-# If you just want to find memory type index that meets your requirements, you + can use function: vmaFindMemoryTypeIndexForBufferInfo(), + vmaFindMemoryTypeIndexForImageInfo(), vmaFindMemoryTypeIndex(). +-# If you want to allocate a region of device memory without association with any + specific image or buffer, you can use function vmaAllocateMemory(). Usage of + this function is not recommended and usually not needed. + vmaAllocateMemoryPages() function is also provided for creating multiple allocations at once, + which may be useful for sparse binding. +-# If you already have a buffer or an image created, you want to allocate memory + for it and then you will bind it yourself, you can use function + vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(). + For binding you should use functions: vmaBindBufferMemory(), vmaBindImageMemory() + or their extended versions: vmaBindBufferMemory2(), vmaBindImageMemory2(). +-# If you want to create a buffer or an image, allocate memory for it, and bind + them together, all in one call, you can use function vmaCreateBuffer(), + vmaCreateImage(). + This is the easiest and recommended way to use this library! + +When using 3. or 4., the library internally queries Vulkan for memory types +supported for that buffer or image (function `vkGetBufferMemoryRequirements()`) +and uses only one of these types. + +If no memory type can be found that meets all the requirements, these functions +return `VK_ERROR_FEATURE_NOT_PRESENT`. + +You can leave VmaAllocationCreateInfo structure completely filled with zeros. +It means no requirements are specified for memory type. +It is valid, although not very useful. + +\section choosing_memory_type_usage Usage + +The easiest way to specify memory requirements is to fill member +VmaAllocationCreateInfo::usage using one of the values of enum #VmaMemoryUsage. +It defines high level, common usage types. +Since version 3 of the library, it is recommended to use #VMA_MEMORY_USAGE_AUTO to let it select best memory type for your resource automatically. + +For example, if you want to create a uniform buffer that will be filled using +transfer only once or infrequently and then used for rendering every frame as a uniform buffer, you can +do it using following code. The buffer will most likely end up in a memory type with +`VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT` to be fast to access by the GPU device. + +\code +VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufferInfo.size = 65536; +bufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.usage = VMA_MEMORY_USAGE_AUTO; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +If you have a preference for putting the resource in GPU (device) memory or CPU (host) memory +on systems with discrete graphics card that have the memories separate, you can use +#VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE or #VMA_MEMORY_USAGE_AUTO_PREFER_HOST. + +When using `VMA_MEMORY_USAGE_AUTO*` while you want to map the allocated memory, +you also need to specify one of the host access flags: +#VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +This will help the library decide about preferred memory type to ensure it has `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` +so you can map it. + +For example, a staging buffer that will be filled via mapped pointer and then +used as a source of transfer to the buffer described previously can be created like this. +It will likely end up in a memory type that is `HOST_VISIBLE` and `HOST_COHERENT` +but not `HOST_CACHED` (meaning uncached, write-combined) and not `DEVICE_LOCAL` (meaning system RAM). + +\code +VkBufferCreateInfo stagingBufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +stagingBufferInfo.size = 65536; +stagingBufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + +VmaAllocationCreateInfo stagingAllocInfo = {}; +stagingAllocInfo.usage = VMA_MEMORY_USAGE_AUTO; +stagingAllocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; + +VkBuffer stagingBuffer; +VmaAllocation stagingAllocation; +vmaCreateBuffer(allocator, &stagingBufferInfo, &stagingAllocInfo, &stagingBuffer, &stagingAllocation, nullptr); +\endcode + +For more examples of creating different kinds of resources, see chapter \ref usage_patterns. +See also: @ref memory_mapping. + +Usage values `VMA_MEMORY_USAGE_AUTO*` are legal to use only when the library knows +about the resource being created by having `VkBufferCreateInfo` / `VkImageCreateInfo` passed, +so they work with functions like: vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo() etc. +If you allocate raw memory using function vmaAllocateMemory(), you have to use other means of selecting +memory type, as described below. + +\note +Old usage values (`VMA_MEMORY_USAGE_GPU_ONLY`, `VMA_MEMORY_USAGE_CPU_ONLY`, +`VMA_MEMORY_USAGE_CPU_TO_GPU`, `VMA_MEMORY_USAGE_GPU_TO_CPU`, `VMA_MEMORY_USAGE_CPU_COPY`) +are still available and work same way as in previous versions of the library +for backward compatibility, but they are deprecated. + +\section choosing_memory_type_required_preferred_flags Required and preferred flags + +You can specify more detailed requirements by filling members +VmaAllocationCreateInfo::requiredFlags and VmaAllocationCreateInfo::preferredFlags +with a combination of bits from enum `VkMemoryPropertyFlags`. For example, +if you want to create a buffer that will be persistently mapped on host (so it +must be `HOST_VISIBLE`) and preferably will also be `HOST_COHERENT` and `HOST_CACHED`, +use following code: + +\code +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; +allocInfo.preferredFlags = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT; +allocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT | VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +A memory type is chosen that has all the required flags and as many preferred +flags set as possible. + +Value passed in VmaAllocationCreateInfo::usage is internally converted to a set of required and preferred flags, +plus some extra "magic" (heuristics). + +\section choosing_memory_type_explicit_memory_types Explicit memory types + +If you inspected memory types available on the physical device and you have +a preference for memory types that you want to use, you can fill member +VmaAllocationCreateInfo::memoryTypeBits. It is a bit mask, where each bit set +means that a memory type with that index is allowed to be used for the +allocation. Special value 0, just like `UINT32_MAX`, means there are no +restrictions to memory type index. + +Please note that this member is NOT just a memory type index. +Still you can use it to choose just one, specific memory type. +For example, if you already determined that your buffer should be created in +memory type 2, use following code: + +\code +uint32_t memoryTypeIndex = 2; + +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.memoryTypeBits = 1U << memoryTypeIndex; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +You can also use this parameter to exclude some memory types. +If you inspect memory heaps and types available on the current physical device and +you determine that for some reason you don't want to use a specific memory type for the allocation, +you can enable automatic memory type selection but exclude certain memory type or types +by setting all bits of `memoryTypeBits` to 1 except the ones you choose. + +\code +// ... +uint32_t excludedMemoryTypeIndex = 2; +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocInfo.memoryTypeBits = ~(1U << excludedMemoryTypeIndex); +// ... +\endcode + + +\section choosing_memory_type_custom_memory_pools Custom memory pools + +If you allocate from custom memory pool, all the ways of specifying memory +requirements described above are not applicable and the aforementioned members +of VmaAllocationCreateInfo structure are ignored. Memory type is selected +explicitly when creating the pool and then used to make all the allocations from +that pool. For further details, see \ref custom_memory_pools. + +\section choosing_memory_type_dedicated_allocations Dedicated allocations + +Memory for allocations is reserved out of larger block of `VkDeviceMemory` +allocated from Vulkan internally. That is the main feature of this whole library. +You can still request a separate memory block to be created for an allocation, +just like you would do in a trivial solution without using any allocator. +In that case, a buffer or image is always bound to that memory at offset 0. +This is called a "dedicated allocation". +You can explicitly request it by using flag #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +The library can also internally decide to use dedicated allocation in some cases, e.g.: + +- When the size of the allocation is large. +- When [VK_KHR_dedicated_allocation](@ref vk_khr_dedicated_allocation) extension is enabled + and it reports that dedicated allocation is required or recommended for the resource. +- When allocation of next big memory block fails due to not enough device memory, + but allocation with the exact requested size succeeds. + + +\page memory_mapping Memory mapping + +To "map memory" in Vulkan means to obtain a CPU pointer to `VkDeviceMemory`, +to be able to read from it or write to it in CPU code. +Mapping is possible only of memory allocated from a memory type that has +`VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` flag. +Functions `vkMapMemory()`, `vkUnmapMemory()` are designed for this purpose. +You can use them directly with memory allocated by this library, +but it is not recommended because of following issue: +Mapping the same `VkDeviceMemory` block multiple times is illegal - only one mapping at a time is allowed. +This includes mapping disjoint regions. Mapping is not reference-counted internally by Vulkan. +It is also not thread-safe. +Because of this, Vulkan Memory Allocator provides following facilities: + +\note If you want to be able to map an allocation, you need to specify one of the flags +#VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT +in VmaAllocationCreateInfo::flags. These flags are required for an allocation to be mappable +when using #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` enum values. +For other usage values they are ignored and every such allocation made in `HOST_VISIBLE` memory type is mappable, +but these flags can still be used for consistency. + +\section memory_mapping_copy_functions Copy functions + +The easiest way to copy data from a host pointer to an allocation is to use convenience function vmaCopyMemoryToAllocation(). +It automatically maps the Vulkan memory temporarily (if not already mapped), performs `memcpy`, +and calls `vkFlushMappedMemoryRanges` (if required - if memory type is not `HOST_COHERENT`). + +It is also the safest one, because using `memcpy` avoids a risk of accidentally introducing memory reads +(e.g. by doing `pMappedVectors[i] += v`), which may be very slow on memory types that are not `HOST_CACHED`. + +\code +struct ConstantBuffer +{ + ... +}; +ConstantBuffer constantBufferData = ... - bool empty() const { return m_RawList.IsEmpty(); } - size_t size() const { return m_RawList.GetCount(); } +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = sizeof(ConstantBuffer); +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; - iterator begin() { return iterator(&m_RawList, m_RawList.Front()); } - iterator end() { return iterator(&m_RawList, VMA_NULL); } +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; - const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); } - const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); } +VkBuffer buf; +VmaAllocation alloc; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr); - void clear() { m_RawList.Clear(); } - void push_back(const T& value) { m_RawList.PushBack(value); } - void erase(iterator it) { m_RawList.Remove(it.m_pItem); } - iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); } +vmaCopyMemoryToAllocation(allocator, &constantBufferData, alloc, 0, sizeof(ConstantBuffer)); +\endcode -private: - VmaRawList m_RawList; +Copy in the other direction - from an allocation to a host pointer can be performed the same way using function vmaCopyAllocationToMemory(). + +\section memory_mapping_mapping_functions Mapping functions + +The library provides following functions for mapping of a specific allocation: vmaMapMemory(), vmaUnmapMemory(). +They are safer and more convenient to use than standard Vulkan functions. +You can map an allocation multiple times simultaneously - mapping is reference-counted internally. +You can also map different allocations simultaneously regardless of whether they use the same `VkDeviceMemory` block. +The way it is implemented is that the library always maps entire memory block, not just region of the allocation. +For further details, see description of vmaMapMemory() function. +Example: + +\code +// Having these objects initialized: +struct ConstantBuffer +{ + ... }; +ConstantBuffer constantBufferData = ... + +VmaAllocator allocator = ... +VkBuffer constantBuffer = ... +VmaAllocation constantBufferAllocation = ... + +// You can map and fill your buffer using following code: + +void* mappedData; +vmaMapMemory(allocator, constantBufferAllocation, &mappedData); +memcpy(mappedData, &constantBufferData, sizeof(constantBufferData)); +vmaUnmapMemory(allocator, constantBufferAllocation); +\endcode + +When mapping, you may see a warning from Vulkan validation layer similar to this one: + +Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used. + +It happens because the library maps entire `VkDeviceMemory` block, where different +types of images and buffers may end up together, especially on GPUs with unified memory like Intel. +You can safely ignore it if you are sure you access only memory of the intended +object that you wanted to map. + + +\section memory_mapping_persistently_mapped_memory Persistently mapped memory + +Keeping your memory persistently mapped is generally OK in Vulkan. +You don't need to unmap it before using its data on the GPU. +The library provides a special feature designed for that: +Allocations made with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag set in +VmaAllocationCreateInfo::flags stay mapped all the time, +so you can just access CPU pointer to it any time +without a need to call any "map" or "unmap" function. +Example: + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = sizeof(ConstantBuffer); +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +// Buffer is already mapped. You can access its memory. +memcpy(allocInfo.pMappedData, &constantBufferData, sizeof(constantBufferData)); +\endcode + +\note #VMA_ALLOCATION_CREATE_MAPPED_BIT by itself doesn't guarantee that the allocation will end up +in a mappable memory type. +For this, you need to also specify #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or +#VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +#VMA_ALLOCATION_CREATE_MAPPED_BIT only guarantees that if the memory is `HOST_VISIBLE`, the allocation will be mapped on creation. +For an example of how to make use of this fact, see section \ref usage_patterns_advanced_data_uploading. + +\section memory_mapping_cache_control Cache flush and invalidate + +Memory in Vulkan doesn't need to be unmapped before using it on GPU, +but unless a memory types has `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT` flag set, +you need to manually **invalidate** cache before reading of mapped pointer +and **flush** cache after writing to mapped pointer. +Map/unmap operations don't do that automatically. +Vulkan provides following functions for this purpose `vkFlushMappedMemoryRanges()`, +`vkInvalidateMappedMemoryRanges()`, but this library provides more convenient +functions that refer to given allocation object: vmaFlushAllocation(), +vmaInvalidateAllocation(), +or multiple objects at once: vmaFlushAllocations(), vmaInvalidateAllocations(). + +Regions of memory specified for flush/invalidate must be aligned to +`VkPhysicalDeviceLimits::nonCoherentAtomSize`. This is automatically ensured by the library. +In any memory type that is `HOST_VISIBLE` but not `HOST_COHERENT`, all allocations +within blocks are aligned to this value, so their offsets are always multiply of +`nonCoherentAtomSize` and two different allocations never share same "line" of this size. + +Also, Windows drivers from all 3 PC GPU vendors (AMD, Intel, NVIDIA) +currently provide `HOST_COHERENT` flag on all memory types that are +`HOST_VISIBLE`, so on PC you may not need to bother. + + +\page staying_within_budget Staying within budget + +When developing a graphics-intensive game or program, it is important to avoid allocating +more GPU memory than it is physically available. When the memory is over-committed, +various bad things can happen, depending on the specific GPU, graphics driver, and +operating system: + +- It may just work without any problems. +- The application may slow down because some memory blocks are moved to system RAM + and the GPU has to access them through PCI Express bus. +- A new allocation may take very long time to complete, even few seconds, and possibly + freeze entire system. +- The new allocation may fail with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. +- It may even result in GPU crash (TDR), observed as `VK_ERROR_DEVICE_LOST` + returned somewhere later. + +\section staying_within_budget_querying_for_budget Querying for budget + +To query for current memory usage and available budget, use function vmaGetHeapBudgets(). +Returned structure #VmaBudget contains quantities expressed in bytes, per Vulkan memory heap. + +Please note that this function returns different information and works faster than +vmaCalculateStatistics(). vmaGetHeapBudgets() can be called every frame or even before every +allocation, while vmaCalculateStatistics() is intended to be used rarely, +only to obtain statistical information, e.g. for debugging purposes. + +It is recommended to use VK_EXT_memory_budget device extension to obtain information +about the budget from Vulkan device. VMA is able to use this extension automatically. +When not enabled, the allocator behaves same way, but then it estimates current usage +and available budget based on its internal information and Vulkan memory heap sizes, +which may be less precise. In order to use this extension: + +1. Make sure extensions VK_EXT_memory_budget and VK_KHR_get_physical_device_properties2 + required by it are available and enable them. Please note that the first is a device + extension and the second is instance extension! +2. Use flag #VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT when creating #VmaAllocator object. +3. Make sure to call vmaSetCurrentFrameIndex() every frame. Budget is queried from + Vulkan inside of it to avoid overhead of querying it with every allocation. + +\section staying_within_budget_controlling_memory_usage Controlling memory usage + +There are many ways in which you can try to stay within the budget. + +First, when making new allocation requires allocating a new memory block, the library +tries not to exceed the budget automatically. If a block with default recommended size +(e.g. 256 MB) would go over budget, a smaller block is allocated, possibly even +dedicated memory for just this resource. + +If the size of the requested resource plus current memory usage is more than the +budget, by default the library still tries to create it, leaving it to the Vulkan +implementation whether the allocation succeeds or fails. You can change this behavior +by using #VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT flag. With it, the allocation is +not made if it would exceed the budget or if the budget is already exceeded. +VMA then tries to make the allocation from the next eligible Vulkan memory type. +If all of them fail, the call then fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. +Example usage pattern may be to pass the #VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT flag +when creating resources that are not essential for the application (e.g. the texture +of a specific object) and not to pass it when creating critically important resources +(e.g. render targets). + +On AMD graphics cards there is a custom vendor extension available: VK_AMD_memory_overallocation_behavior +that allows to control the behavior of the Vulkan implementation in out-of-memory cases - +whether it should fail with an error code or still allow the allocation. +Usage of this extension involves only passing extra structure on Vulkan device creation, +so it is out of scope of this library. + +Finally, you can also use #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT flag to make sure +a new allocation is created only when it fits inside one of the existing memory blocks. +If it would require to allocate a new block, if fails instead with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. +This also ensures that the function call is very fast because it never goes to Vulkan +to obtain a new block. + +\note Creating \ref custom_memory_pools with VmaPoolCreateInfo::minBlockCount +set to more than 0 will currently try to allocate memory blocks without checking whether they +fit within budget. + + +\page resource_aliasing Resource aliasing (overlap) + +New explicit graphics APIs (Vulkan and Direct3D 12), thanks to manual memory +management, give an opportunity to alias (overlap) multiple resources in the +same region of memory - a feature not available in the old APIs (Direct3D 11, OpenGL). +It can be useful to save video memory, but it must be used with caution. + +For example, if you know the flow of your whole render frame in advance, you +are going to use some intermediate textures or buffers only during a small range of render passes, +and you know these ranges don't overlap in time, you can bind these resources to +the same place in memory, even if they have completely different parameters (width, height, format etc.). + +![Resource aliasing (overlap)](../gfx/Aliasing.png) + +Such scenario is possible using VMA, but you need to create your images manually. +Then you need to calculate parameters of an allocation to be made using formula: + +- allocation size = max(size of each image) +- allocation alignment = max(alignment of each image) +- allocation memoryTypeBits = bitwise AND(memoryTypeBits of each image) + +Following example shows two different images bound to the same place in memory, +allocated to fit largest of them. + +\code +// A 512x512 texture to be sampled. +VkImageCreateInfo img1CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +img1CreateInfo.imageType = VK_IMAGE_TYPE_2D; +img1CreateInfo.extent.width = 512; +img1CreateInfo.extent.height = 512; +img1CreateInfo.extent.depth = 1; +img1CreateInfo.mipLevels = 10; +img1CreateInfo.arrayLayers = 1; +img1CreateInfo.format = VK_FORMAT_R8G8B8A8_SRGB; +img1CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +img1CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +img1CreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; +img1CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +// A full screen texture to be used as color attachment. +VkImageCreateInfo img2CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +img2CreateInfo.imageType = VK_IMAGE_TYPE_2D; +img2CreateInfo.extent.width = 1920; +img2CreateInfo.extent.height = 1080; +img2CreateInfo.extent.depth = 1; +img2CreateInfo.mipLevels = 1; +img2CreateInfo.arrayLayers = 1; +img2CreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; +img2CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +img2CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +img2CreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; +img2CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +VkImage img1; +res = vkCreateImage(device, &img1CreateInfo, nullptr, &img1); +VkImage img2; +res = vkCreateImage(device, &img2CreateInfo, nullptr, &img2); + +VkMemoryRequirements img1MemReq; +vkGetImageMemoryRequirements(device, img1, &img1MemReq); +VkMemoryRequirements img2MemReq; +vkGetImageMemoryRequirements(device, img2, &img2MemReq); + +VkMemoryRequirements finalMemReq = {}; +finalMemReq.size = std::max(img1MemReq.size, img2MemReq.size); +finalMemReq.alignment = std::max(img1MemReq.alignment, img2MemReq.alignment); +finalMemReq.memoryTypeBits = img1MemReq.memoryTypeBits & img2MemReq.memoryTypeBits; +// Validate if(finalMemReq.memoryTypeBits != 0) + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + +VmaAllocation alloc; +res = vmaAllocateMemory(allocator, &finalMemReq, &allocCreateInfo, &alloc, nullptr); + +res = vmaBindImageMemory(allocator, alloc, img1); +res = vmaBindImageMemory(allocator, alloc, img2); + +// You can use img1, img2 here, but not at the same time! + +vmaFreeMemory(allocator, alloc); +vkDestroyImage(allocator, img2, nullptr); +vkDestroyImage(allocator, img1, nullptr); +\endcode + +VMA also provides convenience functions that create a buffer or image and bind it to memory +represented by an existing #VmaAllocation: +vmaCreateAliasingBuffer(), vmaCreateAliasingBuffer2(), +vmaCreateAliasingImage(), vmaCreateAliasingImage2(). +Versions with "2" offer additional parameter `allocationLocalOffset`. + +Remember that using resources that alias in memory requires proper synchronization. +You need to issue a memory barrier to make sure commands that use `img1` and `img2` +don't overlap on GPU timeline. +You also need to treat a resource after aliasing as uninitialized - containing garbage data. +For example, if you use `img1` and then want to use `img2`, you need to issue +an image memory barrier for `img2` with `oldLayout` = `VK_IMAGE_LAYOUT_UNDEFINED`. + +Additional considerations: + +- Vulkan also allows to interpret contents of memory between aliasing resources consistently in some cases. +See chapter 11.8. "Memory Aliasing" of Vulkan specification or `VK_IMAGE_CREATE_ALIAS_BIT` flag. +- You can create more complex layout where different images and buffers are bound +at different offsets inside one large allocation. For example, one can imagine +a big texture used in some render passes, aliasing with a set of many small buffers +used between in some further passes. To bind a resource at non-zero offset in an allocation, +use vmaBindBufferMemory2() / vmaBindImageMemory2(). +- Before allocating memory for the resources you want to alias, check `memoryTypeBits` +returned in memory requirements of each resource to make sure the bits overlap. +Some GPUs may expose multiple memory types suitable e.g. only for buffers or +images with `COLOR_ATTACHMENT` usage, so the sets of memory types supported by your +resources may be disjoint. Aliasing them is not possible in that case. + + +\page custom_memory_pools Custom memory pools + +A memory pool contains a number of `VkDeviceMemory` blocks. +The library automatically creates and manages default pool for each memory type available on the device. +Default memory pool automatically grows in size. +Size of allocated blocks is also variable and managed automatically. +You are using default pools whenever you leave VmaAllocationCreateInfo::pool = null. + +You can create custom pool and allocate memory out of it. +It can be useful if you want to: + +- Keep certain kind of allocations separate from others. +- Enforce particular, fixed size of Vulkan memory blocks. +- Limit maximum amount of Vulkan memory allocated for that pool. +- Reserve minimum or fixed amount of Vulkan memory always preallocated for that pool. +- Use extra parameters for a set of your allocations that are available in #VmaPoolCreateInfo but not in + #VmaAllocationCreateInfo - e.g., custom minimum alignment, custom `pNext` chain. +- Perform defragmentation on a specific subset of your allocations. + +To use custom memory pools: + +-# Fill VmaPoolCreateInfo structure. +-# Call vmaCreatePool() to obtain #VmaPool handle. +-# When making an allocation, set VmaAllocationCreateInfo::pool to this handle. + You don't need to specify any other parameters of this structure, like `usage`. + +Example: + +\code +// Find memoryTypeIndex for the pool. +VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +sampleBufCreateInfo.size = 0x10000; // Doesn't matter. +sampleBufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo sampleAllocCreateInfo = {}; +sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + +uint32_t memTypeIndex; +VkResult res = vmaFindMemoryTypeIndexForBufferInfo(allocator, + &sampleBufCreateInfo, &sampleAllocCreateInfo, &memTypeIndex); +// Check res... + +// Create a pool that can have at most 2 blocks, 128 MiB each. +VmaPoolCreateInfo poolCreateInfo = {}; +poolCreateInfo.memoryTypeIndex = memTypeIndex; +poolCreateInfo.blockSize = 128ULL * 1024 * 1024; +poolCreateInfo.maxBlockCount = 2; + +VmaPool pool; +res = vmaCreatePool(allocator, &poolCreateInfo, &pool); +// Check res... + +// Allocate a buffer out of it. +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 1024; +bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.pool = pool; + +VkBuffer buf; +VmaAllocation alloc; +res = vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr); +// Check res... +\endcode + +You have to free all allocations made from this pool before destroying it. + +\code +vmaDestroyBuffer(allocator, buf, alloc); +vmaDestroyPool(allocator, pool); +\endcode + +New versions of this library support creating dedicated allocations in custom pools. +It is supported only when VmaPoolCreateInfo::blockSize = 0. +To use this feature, set VmaAllocationCreateInfo::pool to the pointer to your custom pool and +VmaAllocationCreateInfo::flags to #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + + +\section custom_memory_pools_MemTypeIndex Choosing memory type index + +When creating a pool, you must explicitly specify memory type index. +To find the one suitable for your buffers or images, you can use helper functions +vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo(). +You need to provide structures with example parameters of buffers or images +that you are going to create in that pool. + +\code +VkBufferCreateInfo exampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +exampleBufCreateInfo.size = 1024; // Doesn't matter +exampleBufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + +uint32_t memTypeIndex; +vmaFindMemoryTypeIndexForBufferInfo(allocator, &exampleBufCreateInfo, &allocCreateInfo, &memTypeIndex); + +VmaPoolCreateInfo poolCreateInfo = {}; +poolCreateInfo.memoryTypeIndex = memTypeIndex; +// ... +\endcode + +When creating buffers/images allocated in that pool, provide following parameters: + +- `VkBufferCreateInfo`: Prefer to pass same parameters as above. + Otherwise you risk creating resources in a memory type that is not suitable for them, which may result in undefined behavior. + Using different `VK_BUFFER_USAGE_` flags may work, but you shouldn't create images in a pool intended for buffers + or the other way around. +- VmaAllocationCreateInfo: You don't need to pass same parameters. Fill only `pool` member. + Other members are ignored anyway. + + +\section custom_memory_pools_when_not_use When not to use custom pools + +Custom pools are commonly overused by VMA users. +While it may feel natural to keep some logical groups of resources separate in memory, +in most cases it does more harm than good. +Using custom pool shouldn't be your first choice. +Instead, please make all allocations from default pools first and only use custom pools +if you can prove and measure that it is beneficial in some way, +e.g. it results in lower memory usage, better performance, etc. -#endif // #if VMA_USE_STL_LIST +Using custom pools has disadvantages: -//////////////////////////////////////////////////////////////////////////////// -// class VmaMap +- Each pool has its own collection of `VkDeviceMemory` blocks. + Some of them may be partially or even completely empty. + Spreading allocations across multiple pools increases the amount of wasted (allocated but unbound) memory. +- You must manually choose specific memory type to be used by a custom pool (set as VmaPoolCreateInfo::memoryTypeIndex). + When using default pools, best memory type for each of your allocations can be selected automatically + using a carefully design algorithm that works across all kinds of GPUs. +- If an allocation from a custom pool at specific memory type fails, entire allocation operation returns failure. + When using default pools, VMA tries another compatible memory type. +- If you set VmaPoolCreateInfo::blockSize != 0, each memory block has the same size, + while default pools start from small blocks and only allocate next blocks larger and larger + up to the preferred block size. -#if VMA_USE_STL_UNORDERED_MAP +Many of the common concerns can be addressed in a different way than using custom pools: -#define VmaPair std::pair +- If you want to keep your allocations of certain size (small versus large) or certain lifetime (transient versus long lived) + separate, you likely don't need to. + VMA uses a high quality allocation algorithm that manages memory well in various cases. + Please measure and check if using custom pools provides a benefit. +- If you want to keep your images and buffers separate, you don't need to. + VMA respects `bufferImageGranularity` limit automatically. +- If you want to keep your mapped and not mapped allocations separate, you don't need to. + VMA respects `nonCoherentAtomSize` limit automatically. + It also maps only those `VkDeviceMemory` blocks that need to map any allocation. + It even tries to keep mappable and non-mappable allocations in separate blocks to minimize the amount of mapped memory. +- If you want to choose a custom size for the default memory block, you can set it globally instead + using VmaAllocatorCreateInfo::preferredLargeHeapBlockSize. +- If you want to select specific memory type for your allocation, + you can set VmaAllocationCreateInfo::memoryTypeBits to `(1U << myMemoryTypeIndex)` instead. +- If you need to create a buffer with certain minimum alignment, you can still do it + using default pools by specifying VmaAllocationCreateInfo::minAlignment. -#define VMA_MAP_TYPE(KeyT, ValueT) \ - std::unordered_map< KeyT, ValueT, std::hash, std::equal_to, VmaStlAllocator< std::pair > > -#else // #if VMA_USE_STL_UNORDERED_MAP +\section linear_algorithm Linear allocation algorithm -template -struct VmaPair -{ - T1 first; - T2 second; +Each Vulkan memory block managed by this library has accompanying metadata that +keeps track of used and unused regions. By default, the metadata structure and +algorithm tries to find best place for new allocations among free regions to +optimize memory usage. This way you can allocate and free objects in any order. - VmaPair() : first(), second() { } - VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { } -}; +![Default allocation algorithm](../gfx/Linear_allocator_1_algo_default.png) -/* Class compatible with subset of interface of std::unordered_map. -KeyT, ValueT must be POD because they will be stored in VmaVector. -*/ -template -class VmaMap -{ -public: - typedef VmaPair PairType; - typedef PairType* iterator; +Sometimes there is a need to use simpler, linear allocation algorithm. You can +create custom pool that uses such algorithm by adding flag +#VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT to VmaPoolCreateInfo::flags while creating +#VmaPool object. Then an alternative metadata management is used. It always +creates new allocations after last one and doesn't reuse free regions after +allocations freed in the middle. It results in better allocation performance and +less memory consumed by metadata. - VmaMap(VmaStlAllocator& allocator) : m_Vector(allocator) { } - VmaMap(const VmaStlAllocator& allocator) : m_Vector(allocator) { } +![Linear allocation algorithm](../gfx/Linear_allocator_2_algo_linear.png) - iterator begin() { return m_Vector.begin(); } - iterator end() { return m_Vector.end(); } +With this one flag, you can create a custom pool that can be used in many ways: +free-at-once, stack, double stack, and ring buffer. See below for details. +You don't need to specify explicitly which of these options you are going to use - it is detected automatically. - void insert(const PairType& pair); - iterator find(const KeyT& key); - void erase(iterator it); - -private: - VmaVector< PairType, VmaStlAllocator > m_Vector; -}; +\subsection linear_algorithm_free_at_once Free-at-once -#define VMA_MAP_TYPE(KeyT, ValueT) VmaMap +In a pool that uses linear algorithm, you still need to free all the allocations +individually, e.g. by using vmaFreeMemory() or vmaDestroyBuffer(). You can free +them in any order. New allocations are always made after last one - free space +in the middle is not reused. However, when you release all the allocation and +the pool becomes empty, allocation starts from the beginning again. This way you +can use linear algorithm to speed up creation of allocations that you are going +to release all at once. -template -struct VmaPairFirstLess -{ - bool operator()(const VmaPair& lhs, const VmaPair& rhs) const - { - return lhs.first < rhs.first; - } - bool operator()(const VmaPair& lhs, const FirstT& rhsFirst) const - { - return lhs.first < rhsFirst; - } -}; +![Free-at-once](../gfx/Linear_allocator_3_free_at_once.png) -template -void VmaMap::insert(const PairType& pair) -{ - const size_t indexToInsert = VmaBinaryFindFirstNotLess( - m_Vector.data(), - m_Vector.data() + m_Vector.size(), - pair, - VmaPairFirstLess()) - m_Vector.data(); - VectorInsert(m_Vector, indexToInsert, pair); -} - -template -VmaPair* VmaMap::find(const KeyT& key) -{ - PairType* it = VmaBinaryFindFirstNotLess( - m_Vector.data(), - m_Vector.data() + m_Vector.size(), - key, - VmaPairFirstLess()); - if((it != m_Vector.end()) && (it->first == key)) - return it; - else - return m_Vector.end(); -} +This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount +value that allows multiple memory blocks. -template -void VmaMap::erase(iterator it) -{ - VectorRemove(m_Vector, it - m_Vector.begin()); -} +\subsection linear_algorithm_stack Stack -#endif // #if VMA_USE_STL_UNORDERED_MAP +When you free an allocation that was created last, its space can be reused. +Thanks to this, if you always release allocations in the order opposite to their +creation (LIFO - Last In First Out), you can achieve behavior of a stack. -/* -Represents a region of VmaAllocation that is either assigned and returned as -allocated memory block or free. -*/ -struct VmaSuballocation -{ - VkDeviceSize offset; - VkDeviceSize size; - VmaSuballocationType type; -}; +![Stack](../gfx/Linear_allocator_4_stack.png) -typedef VmaList< VmaSuballocation, VmaStlAllocator > VmaSuballocationList; +This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount +value that allows multiple memory blocks. -// Parameters of an allocation. -struct VmaAllocationRequest -{ - VmaSuballocationList::iterator freeSuballocationItem; - VkDeviceSize offset; -}; +\subsection linear_algorithm_double_stack Double stack -/* Single block of memory - VkDeviceMemory with all the data about its regions -assigned or free. */ -class VmaAllocation -{ -public: - VkDeviceMemory m_hMemory; - VkDeviceSize m_Size; - uint32_t m_FreeCount; - VkDeviceSize m_SumFreeSize; - VmaSuballocationList m_Suballocations; - // Suballocations that are free and have size greater than certain threshold. - // Sorted by size, ascending. - VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize; +The space reserved by a custom pool with linear algorithm may be used by two +stacks: - VmaAllocation(VmaAllocator hAllocator); +- First, default one, growing up from offset 0. +- Second, "upper" one, growing down from the end towards lower offsets. - ~VmaAllocation() - { - VMA_ASSERT(m_hMemory == VK_NULL_HANDLE); - } +To make allocation from the upper stack, add flag #VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT +to VmaAllocationCreateInfo::flags. - // Always call after construction. - void Init(VkDeviceMemory newMemory, VkDeviceSize newSize); - // Always call before destruction. - void Destroy(VmaAllocator allocator); - - // Validates all data structures inside this object. If not valid, returns false. - bool Validate() const; - - // Tries to find a place for suballocation with given parameters inside this allocation. - // If succeeded, fills pAllocationRequest and returns true. - // If failed, returns false. - bool CreateAllocationRequest( - VkDeviceSize bufferImageGranularity, - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - VmaAllocationRequest* pAllocationRequest); +![Double stack](../gfx/Linear_allocator_7_double_stack.png) - // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem. - // If yes, fills pOffset and returns true. If no, returns false. - bool CheckAllocation( - VkDeviceSize bufferImageGranularity, - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - VmaSuballocationList::const_iterator freeSuballocItem, - VkDeviceSize* pOffset) const; - - // Returns true if this allocation is empty - contains only single free suballocation. - bool IsEmpty() const; +Double stack is available only in pools with one memory block - +VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined. - // Makes actual allocation based on request. Request must already be checked - // and valid. - void Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - VkDeviceSize allocSize); +When the two stacks' ends meet so there is not enough space between them for a +new allocation, such allocation fails with usual +`VK_ERROR_OUT_OF_DEVICE_MEMORY` error. - // Frees suballocation assigned to given memory region. - void Free(const VkMappedMemoryRange* pMemory); +\subsection linear_algorithm_ring_buffer Ring buffer -#if VMA_STATS_STRING_ENABLED - void PrintDetailedMap(class VmaStringBuilder& sb) const; -#endif +When you free some allocations from the beginning and there is not enough free space +for a new one at the end of a pool, allocator's "cursor" wraps around to the +beginning and starts allocation there. Thanks to this, if you always release +allocations in the same order as you created them (FIFO - First In First Out), +you can achieve behavior of a ring buffer / queue. -private: - // Given free suballocation, it merges it with following one, which must also be free. - void MergeFreeWithNext(VmaSuballocationList::iterator item); - // Releases given suballocation, making it free. Merges it with adjacent free - // suballocations if applicable. - void FreeSuballocation(VmaSuballocationList::iterator suballocItem); - // Given free suballocation, it inserts it into sorted list of - // m_FreeSuballocationsBySize if it's suitable. - void RegisterFreeSuballocation(VmaSuballocationList::iterator item); - // Given free suballocation, it removes it from sorted list of - // m_FreeSuballocationsBySize if it's suitable. - void UnregisterFreeSuballocation(VmaSuballocationList::iterator item); -}; +![Ring buffer](../gfx/Linear_allocator_5_ring_buffer.png) -// Allocation for an object that has its own private VkDeviceMemory. -struct VmaOwnAllocation -{ - VkDeviceMemory m_hMemory; - VkDeviceSize m_Size; - VmaSuballocationType m_Type; -}; +Ring buffer is available only in pools with one memory block - +VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined. + +\note \ref defragmentation is not supported in custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT. + + +\page defragmentation Defragmentation -struct VmaOwnAllocationMemoryHandleLess +Interleaved allocations and deallocations of many objects of varying size can +cause fragmentation over time, which can lead to a situation where the library is unable +to find a continuous range of free memory for a new allocation despite there is +enough free space, just scattered across many small free ranges between existing +allocations. + +To mitigate this problem, you can use defragmentation feature. +It doesn't happen automatically though and needs your cooperation, +because VMA is a low level library that only allocates memory. +It cannot recreate buffers and images in a new place as it doesn't remember the contents of `VkBufferCreateInfo` / `VkImageCreateInfo` structures. +It cannot copy their contents as it doesn't record any commands to a command buffer. + +Example: + +\code +VmaDefragmentationInfo defragInfo = {}; +defragInfo.pool = myPool; +defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT; + +VmaDefragmentationContext defragCtx; +VkResult res = vmaBeginDefragmentation(allocator, &defragInfo, &defragCtx); +// Check res... + +for(;;) { - bool operator()(const VmaOwnAllocation& lhs, const VmaOwnAllocation& rhs) const + VmaDefragmentationPassMoveInfo pass; + res = vmaBeginDefragmentationPass(allocator, defragCtx, &pass); + if(res == VK_SUCCESS) + break; + else if(res != VK_INCOMPLETE) + // Handle error... + + for(uint32_t i = 0; i < pass.moveCount; ++i) { - return lhs.m_hMemory < rhs.m_hMemory; + // Inspect pass.pMoves[i].srcAllocation, identify what buffer/image it represents. + VmaAllocationInfo allocInfo; + vmaGetAllocationInfo(allocator, pass.pMoves[i].srcAllocation, &allocInfo); + MyEngineResourceData* resData = (MyEngineResourceData*)allocInfo.pUserData; + + // Recreate and bind this buffer/image at: pass.pMoves[i].dstMemory, pass.pMoves[i].dstOffset. + VkImageCreateInfo imgCreateInfo = ... + VkImage newImg; + res = vkCreateImage(device, &imgCreateInfo, nullptr, &newImg); + // Check res... + res = vmaBindImageMemory(allocator, pass.pMoves[i].dstTmpAllocation, newImg); + // Check res... + + // Issue a vkCmdCopyBuffer/vkCmdCopyImage to copy its content to the new place. + vkCmdCopyImage(cmdBuf, resData->img, ..., newImg, ...); } - bool operator()(const VmaOwnAllocation& lhs, VkDeviceMemory rhsMem) const + + // Make sure the copy commands finished executing. + vkWaitForFences(...); + + // Destroy old buffers/images bound with pass.pMoves[i].srcAllocation. + for(uint32_t i = 0; i < pass.moveCount; ++i) { - return lhs.m_hMemory < rhsMem; + // ... + vkDestroyImage(device, resData->img, nullptr); } -}; -/* Sequence of VmaAllocation. Represents memory blocks allocated for a specific -Vulkan memory type. */ -struct VmaAllocationVector -{ - // Incrementally sorted by sumFreeSize, ascending. - VmaVector< VmaAllocation*, VmaStlAllocator > m_Allocations; + // Update appropriate descriptors to point to the new places... - VmaAllocationVector(VmaAllocator hAllocator); - ~VmaAllocationVector(); + res = vmaEndDefragmentationPass(allocator, defragCtx, &pass); + if(res == VK_SUCCESS) + break; + else if(res != VK_INCOMPLETE) + // Handle error... +} - bool IsEmpty() const { return m_Allocations.empty(); } +vmaEndDefragmentation(allocator, defragCtx, nullptr); +\endcode + +Although functions like vmaCreateBuffer(), vmaCreateImage(), vmaDestroyBuffer(), vmaDestroyImage() +create/destroy an allocation and a buffer/image at once, these are just a shortcut for +creating the resource, allocating memory, and binding them together. +Defragmentation works on memory allocations only. You must handle the rest manually. +Defragmentation is an iterative process that should repreat "passes" as long as related functions +return `VK_INCOMPLETE` not `VK_SUCCESS`. +In each pass: + +1. vmaBeginDefragmentationPass() function call: + - Calculates and returns the list of allocations to be moved in this pass. + Note this can be a time-consuming process. + - Reserves destination memory for them by creating temporary destination allocations + that you can query for their `VkDeviceMemory` + offset using vmaGetAllocationInfo(). +2. Inside the pass, **you should**: + - Inspect the returned list of allocations to be moved. + - Create new buffers/images and bind them at the returned destination temporary allocations. + - Copy data from source to destination resources if necessary. + - Destroy the source buffers/images, but NOT their allocations. +3. vmaEndDefragmentationPass() function call: + - Frees the source memory reserved for the allocations that are moved. + - Modifies source #VmaAllocation objects that are moved to point to the destination reserved memory. + - Frees `VkDeviceMemory` blocks that became empty. + +Unlike in previous iterations of the defragmentation API, there is no list of "movable" allocations passed as a parameter. +Defragmentation algorithm tries to move all suitable allocations. +You can, however, refuse to move some of them inside a defragmentation pass, by setting +`pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. +This is not recommended and may result in suboptimal packing of the allocations after defragmentation. +If you cannot ensure any allocation can be moved, it is better to keep movable allocations separate in a custom pool. + +Inside a pass, for each allocation that should be moved: + +- You should copy its data from the source to the destination place by calling e.g. `vkCmdCopyBuffer()`, `vkCmdCopyImage()`. + - You need to make sure these commands finished executing before destroying the source buffers/images and before calling vmaEndDefragmentationPass(). +- If a resource doesn't contain any meaningful data, e.g. it is a transient color attachment image to be cleared, + filled, and used temporarily in each rendering frame, you can just recreate this image + without copying its data. +- If the resource is in `HOST_VISIBLE` and `HOST_CACHED` memory, you can copy its data on the CPU + using `memcpy()`. +- If you cannot move the allocation, you can set `pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. + This will cancel the move. + - vmaEndDefragmentationPass() will then free the destination memory + not the source memory of the allocation, leaving it unchanged. +- If you decide the allocation is unimportant and can be destroyed instead of moved (e.g. it wasn't used for long time), + you can set `pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY. + - vmaEndDefragmentationPass() will then free both source and destination memory, and will destroy the source #VmaAllocation object. + +You can defragment a specific custom pool by setting VmaDefragmentationInfo::pool +(like in the example above) or all the default pools by setting this member to null. + +Defragmentation is always performed in each pool separately. +Allocations are never moved between different Vulkan memory types. +The size of the destination memory reserved for a moved allocation is the same as the original one. +Alignment of an allocation as it was determined using `vkGetBufferMemoryRequirements()` etc. is also respected after defragmentation. +Buffers/images should be recreated with the same `VkBufferCreateInfo` / `VkImageCreateInfo` parameters as the original ones. + +You can perform the defragmentation incrementally to limit the number of allocations and bytes to be moved +in each pass, e.g. to call it in sync with render frames and not to experience too big hitches. +See members: VmaDefragmentationInfo::maxBytesPerPass, VmaDefragmentationInfo::maxAllocationsPerPass. + +It is also safe to perform the defragmentation asynchronously to render frames and other Vulkan and VMA +usage, possibly from multiple threads, with the exception that allocations +returned in VmaDefragmentationPassMoveInfo::pMoves shouldn't be destroyed until the defragmentation pass is ended. + +Mapping is preserved on allocations that are moved during defragmentation. +Whether through #VMA_ALLOCATION_CREATE_MAPPED_BIT or vmaMapMemory(), the allocations +are mapped at their new place. Of course, pointer to the mapped data changes, so it needs to be queried +using VmaAllocationInfo::pMappedData. + +\note Defragmentation is not supported in custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT. + + +\page statistics Statistics + +This library contains several functions that return information about its internal state, +especially the amount of memory allocated from Vulkan. + +\section statistics_numeric_statistics Numeric statistics + +If you need to obtain basic statistics about memory usage per heap, together with current budget, +you can call function vmaGetHeapBudgets() and inspect structure #VmaBudget. +This is useful to keep track of memory usage and stay within budget +(see also \ref staying_within_budget). +Example: + +\code +uint32_t heapIndex = ... + +VmaBudget budgets[VK_MAX_MEMORY_HEAPS]; +vmaGetHeapBudgets(allocator, budgets); + +printf("My heap currently has %u allocations taking %llu B,\n", + budgets[heapIndex].statistics.allocationCount, + budgets[heapIndex].statistics.allocationBytes); +printf("allocated out of %u Vulkan device memory blocks taking %llu B,\n", + budgets[heapIndex].statistics.blockCount, + budgets[heapIndex].statistics.blockBytes); +printf("Vulkan reports total usage %llu B with budget %llu B.\n", + budgets[heapIndex].usage, + budgets[heapIndex].budget); +\endcode + +You can query for more detailed statistics per memory heap, type, and totals, +including minimum and maximum allocation size and unused range size, +by calling function vmaCalculateStatistics() and inspecting structure #VmaTotalStatistics. +This function is slower though, as it has to traverse all the internal data structures, +so it should be used only for debugging purposes. + +You can query for statistics of a custom pool using function vmaGetPoolStatistics() +or vmaCalculatePoolStatistics(). + +You can query for information about a specific allocation using function vmaGetAllocationInfo(). +It fill structure #VmaAllocationInfo. + +\section statistics_json_dump JSON dump - // Tries to free memory from any if its Allocations. - // Returns index of Allocation that the memory was freed from, or -1 if not found. - size_t Free(const VkMappedMemoryRange* pMemory); +You can dump internal state of the allocator to a string in JSON format using function vmaBuildStatsString(). +The result is guaranteed to be correct JSON. +It uses ANSI encoding. +Any strings provided by user (see [Allocation names](@ref allocation_names)) +are copied as-is and properly escaped for JSON, so if they use UTF-8, ISO-8859-2 or any other encoding, +this JSON string can be treated as using this encoding. +It must be freed using function vmaFreeStatsString(). - // Performs single step in sorting m_Allocations. They may not be fully sorted - // after this call. - void IncrementallySortAllocations(); - - // Adds statistics of this AllocationVector to pStats. - void AddStats(VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex) const; +The format of this JSON string is not part of official documentation of the library, +but it will not change in backward-incompatible way without increasing library major version number +and appropriate mention in changelog. -#if VMA_STATS_STRING_ENABLED - void PrintDetailedMap(class VmaStringBuilder& sb) const; -#endif +The JSON string contains all the data that can be obtained using vmaCalculateStatistics(). +It can also contain detailed map of allocated memory blocks and their regions - +free and occupied by allocations. +This allows e.g. to visualize the memory or assess fragmentation. -private: - VmaAllocator m_hAllocator; -}; -// Main allocator object. -struct VmaAllocator_T -{ - VkDevice m_hDevice; - bool m_AllocationCallbacksSpecified; - VkAllocationCallbacks m_AllocationCallbacks; - VkDeviceSize m_PreferredLargeHeapBlockSize; - VkDeviceSize m_PreferredSmallHeapBlockSize; +\page allocation_annotation Allocation names and user data - VkPhysicalDeviceProperties m_PhysicalDeviceProperties; - VkPhysicalDeviceMemoryProperties m_MemProps; +\section allocation_user_data Allocation user data - VmaAllocationVector* m_pAllocations[VK_MAX_MEMORY_TYPES]; - /* There can be at most one allocation that is completely empty - a - hysteresis to avoid pessimistic case of alternating creation and destruction - of a VkDeviceMemory. */ - bool m_HasEmptyAllocation[VK_MAX_MEMORY_TYPES]; - VmaMutex m_AllocationsMutex[VK_MAX_MEMORY_TYPES]; - - // Each vector is sorted by memory (handle value). - typedef VmaVector< VmaOwnAllocation, VmaStlAllocator > OwnAllocationVectorType; - OwnAllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES]; - VmaMutex m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES]; - - // Sorted by first (VkBuffer handle value). - VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange) m_BufferToMemoryMap; - VmaMutex m_BufferToMemoryMapMutex; - // Sorted by first (VkImage handle value). - VMA_MAP_TYPE(VkImage, VkMappedMemoryRange) m_ImageToMemoryMap; - VmaMutex m_ImageToMemoryMapMutex; - - VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo); - ~VmaAllocator_T(); +You can annotate allocations with your own information, e.g. for debugging purposes. +To do that, fill VmaAllocationCreateInfo::pUserData field when creating +an allocation. It is an opaque `void*` pointer. You can use it e.g. as a pointer, +some handle, index, key, ordinal number or any other value that would associate +the allocation with your custom metadata. +It is useful to identify appropriate data structures in your engine given #VmaAllocation, +e.g. when doing \ref defragmentation. - const VkAllocationCallbacks* GetAllocationCallbacks() const - { - return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0; - } +\code +VkBufferCreateInfo bufCreateInfo = ... - VkDeviceSize GetPreferredBlockSize(uint32_t memTypeIndex) const; +MyBufferMetadata* pMetadata = CreateBufferMetadata(); - VkDeviceSize GetBufferImageGranularity() const - { - return VMA_MAX( - VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY, - m_PhysicalDeviceProperties.limits.bufferImageGranularity); - } +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.pUserData = pMetadata; - uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; } - uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; } +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buffer, &allocation, nullptr); +\endcode - // Main allocation function. - VkResult AllocateMemory( - const VkMemoryRequirements& vkMemReq, - const VmaMemoryRequirements& vmaMemReq, - VmaSuballocationType suballocType, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex); +The pointer may be later retrieved as VmaAllocationInfo::pUserData: - // Main deallocation function. - void FreeMemory(const VkMappedMemoryRange* pMemory); +\code +VmaAllocationInfo allocInfo; +vmaGetAllocationInfo(allocator, allocation, &allocInfo); +MyBufferMetadata* pMetadata = (MyBufferMetadata*)allocInfo.pUserData; +\endcode - void CalculateStats(VmaStats* pStats); +It can also be changed using function vmaSetAllocationUserData(). -#if VMA_STATS_STRING_ENABLED - void PrintDetailedMap(class VmaStringBuilder& sb); -#endif +Values of (non-zero) allocations' `pUserData` are printed in JSON report created by +vmaBuildStatsString() in hexadecimal form. -private: - VkPhysicalDevice m_PhysicalDevice; +\section allocation_names Allocation names + +An allocation can also carry a null-terminated string, giving a name to the allocation. +To set it, call vmaSetAllocationName(). +The library creates internal copy of the string, so the pointer you pass doesn't need +to be valid for whole lifetime of the allocation. You can free it after the call. + +\code +std::string imageName = "Texture: "; +imageName += fileName; +vmaSetAllocationName(allocator, allocation, imageName.c_str()); +\endcode + +The string can be later retrieved by inspecting VmaAllocationInfo::pName. +It is also printed in JSON report created by vmaBuildStatsString(). + +\note Setting string name to VMA allocation doesn't automatically set it to the Vulkan buffer or image created with it. +You must do it manually using an extension like VK_EXT_debug_utils, which is independent of this library. + + +\page virtual_allocator Virtual allocator + +As an extra feature, the core allocation algorithm of the library is exposed through a simple and convenient API of "virtual allocator". +It doesn't allocate any real GPU memory. It just keeps track of used and free regions of a "virtual block". +You can use it to allocate your own memory or other objects, even completely unrelated to Vulkan. +A common use case is sub-allocation of pieces of one large GPU buffer. + +\section virtual_allocator_creating_virtual_block Creating virtual block + +To use this functionality, there is no main "allocator" object. +You don't need to have #VmaAllocator object created. +All you need to do is to create a separate #VmaVirtualBlock object for each block of memory you want to be managed by the allocator: + +-# Fill in #VmaVirtualBlockCreateInfo structure. +-# Call vmaCreateVirtualBlock(). Get new #VmaVirtualBlock object. + +Example: + +\code +VmaVirtualBlockCreateInfo blockCreateInfo = {}; +blockCreateInfo.size = 1048576; // 1 MB + +VmaVirtualBlock block; +VkResult res = vmaCreateVirtualBlock(&blockCreateInfo, &block); +\endcode + +\section virtual_allocator_making_virtual_allocations Making virtual allocations + +#VmaVirtualBlock object contains internal data structure that keeps track of free and occupied regions +using the same code as the main Vulkan memory allocator. +Similarly to #VmaAllocation for standard GPU allocations, there is #VmaVirtualAllocation type +that represents an opaque handle to an allocation within the virtual block. + +In order to make such allocation: - VkResult AllocateMemoryOfType( - const VkMemoryRequirements& vkMemReq, - const VmaMemoryRequirements& vmaMemReq, - uint32_t memTypeIndex, - VmaSuballocationType suballocType, - VkMappedMemoryRange* pMemory); - - // Allocates and registers new VkDeviceMemory specifically for single allocation. - VkResult AllocateOwnMemory( - VkDeviceSize size, - VmaSuballocationType suballocType, - uint32_t memTypeIndex, - VkMappedMemoryRange* pMemory); +-# Fill in #VmaVirtualAllocationCreateInfo structure. +-# Call vmaVirtualAllocate(). Get new #VmaVirtualAllocation object that represents the allocation. + You can also receive `VkDeviceSize offset` that was assigned to the allocation. - // Tries to free pMemory as Own Memory. Returns true if found and freed. - bool FreeOwnMemory(const VkMappedMemoryRange* pMemory); -}; +Example: -//////////////////////////////////////////////////////////////////////////////// -// Memory allocation #2 after VmaAllocator_T definition +\code +VmaVirtualAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.size = 4096; // 4 KB -static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment) +VmaVirtualAllocation alloc; +VkDeviceSize offset; +res = vmaVirtualAllocate(block, &allocCreateInfo, &alloc, &offset); +if(res == VK_SUCCESS) { - return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment); + // Use the 4 KB of your memory starting at offset. } - -static void VmaFree(VmaAllocator hAllocator, void* ptr) +else { - VmaFree(&hAllocator->m_AllocationCallbacks, ptr); + // Allocation failed - no space for it could be found. Handle this error! } +\endcode -template -static T* VmaAllocate(VmaAllocator hAllocator) -{ - return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T)); -} +\section virtual_allocator_deallocation Deallocation -template -static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count) -{ - return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T)); -} +When no longer needed, an allocation can be freed by calling vmaVirtualFree(). +You can only pass to this function an allocation that was previously returned by vmaVirtualAllocate() +called for the same #VmaVirtualBlock. -template -static void vma_delete(VmaAllocator hAllocator, T* ptr) -{ - if(ptr != VMA_NULL) - { - ptr->~T(); - VmaFree(hAllocator, ptr); - } -} +When whole block is no longer needed, the block object can be released by calling vmaDestroyVirtualBlock(). +All allocations must be freed before the block is destroyed, which is checked internally by an assert. +However, if you don't want to call vmaVirtualFree() for each allocation, you can use vmaClearVirtualBlock() to free them all at once - +a feature not available in normal Vulkan memory allocator. Example: -template -static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count) -{ - if(ptr != VMA_NULL) - { - for(size_t i = count; i--; ) - ptr[i].~T(); - VmaFree(hAllocator, ptr); - } -} +\code +vmaVirtualFree(block, alloc); +vmaDestroyVirtualBlock(block); +\endcode -//////////////////////////////////////////////////////////////////////////////// -// VmaStringBuilder +\section virtual_allocator_allocation_parameters Allocation parameters -#if VMA_STATS_STRING_ENABLED +You can attach a custom pointer to each allocation by using vmaSetVirtualAllocationUserData(). +Its default value is null. +It can be used to store any data that needs to be associated with that allocation - e.g. an index, a handle, or a pointer to some +larger data structure containing more information. Example: -class VmaStringBuilder +\code +struct CustomAllocData { -public: - VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator(alloc->GetAllocationCallbacks())) { } - size_t GetLength() const { return m_Data.size(); } - const char* GetData() const { return m_Data.data(); } + std::string m_AllocName; +}; +CustomAllocData* allocData = new CustomAllocData(); +allocData->m_AllocName = "My allocation 1"; +vmaSetVirtualAllocationUserData(block, alloc, allocData); +\endcode - void Add(char ch) { m_Data.push_back(ch); } - void Add(const char* pStr); - void AddNewLine() { Add('\n'); } - void AddNumber(uint32_t num); - void AddNumber(uint64_t num); - void AddBool(bool b) { Add(b ? "true" : "false"); } - void AddNull() { Add("null"); } - void AddString(const char* pStr); +The pointer can later be fetched, along with allocation offset and size, by passing the allocation handle to function +vmaGetVirtualAllocationInfo() and inspecting returned structure #VmaVirtualAllocationInfo. +If you allocated a new object to be used as the custom pointer, don't forget to delete that object before freeing the allocation! +Example: -private: - VmaVector< char, VmaStlAllocator > m_Data; -}; +\code +VmaVirtualAllocationInfo allocInfo; +vmaGetVirtualAllocationInfo(block, alloc, &allocInfo); +delete (CustomAllocData*)allocInfo.pUserData; -void VmaStringBuilder::Add(const char* pStr) -{ - const size_t strLen = strlen(pStr); - if(strLen > 0) - { - const size_t oldCount = m_Data.size(); - m_Data.resize(oldCount + strLen); - memcpy(m_Data.data() + oldCount, pStr, strLen); - } -} +vmaVirtualFree(block, alloc); +\endcode -void VmaStringBuilder::AddNumber(uint32_t num) -{ - char buf[11]; - VmaUint32ToStr(buf, sizeof(buf), num); - Add(buf); -} +\section virtual_allocator_alignment_and_units Alignment and units -void VmaStringBuilder::AddNumber(uint64_t num) -{ - char buf[21]; - VmaUint64ToStr(buf, sizeof(buf), num); - Add(buf); -} +It feels natural to express sizes and offsets in bytes. +If an offset of an allocation needs to be aligned to a multiply of some number (e.g. 4 bytes), you can fill optional member +VmaVirtualAllocationCreateInfo::alignment to request it. Example: -void VmaStringBuilder::AddString(const char* pStr) -{ - Add('"'); - const size_t strLen = strlen(pStr); - for(size_t i = 0; i < strLen; ++i) - { - char ch = pStr[i]; - if(ch == '\'') - Add("\\\\"); - else if(ch == '"') - Add("\\\""); - else if(ch >= 32) - Add(ch); - else switch(ch) - { - case '\n': - Add("\\n"); - break; - case '\r': - Add("\\r"); - break; - case '\t': - Add("\\t"); - break; - default: - VMA_ASSERT(0 && "Character not currently supported."); - break; - } - } - Add('"'); -} +\code +VmaVirtualAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.size = 4096; // 4 KB +allocCreateInfo.alignment = 4; // Returned offset must be a multiply of 4 B -//////////////////////////////////////////////////////////////////////////////// +VmaVirtualAllocation alloc; +res = vmaVirtualAllocate(block, &allocCreateInfo, &alloc, nullptr); +\endcode -// Correspond to values of enum VmaSuballocationType. -static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = { - "FREE", - "UNKNOWN", - "BUFFER", - "IMAGE_UNKNOWN", - "IMAGE_LINEAR", - "IMAGE_OPTIMAL", -}; +Alignments of different allocations made from one block may vary. +However, if all alignments and sizes are always multiply of some size e.g. 4 B or `sizeof(MyDataStruct)`, +you can express all sizes, alignments, and offsets in multiples of that size instead of individual bytes. +It might be more convenient, but you need to make sure to use this new unit consistently in all the places: -static void VmaPrintStatInfo(VmaStringBuilder& sb, const VmaStatInfo& stat) -{ - sb.Add("{ \"Allocations\": "); - sb.AddNumber(stat.AllocationCount); - sb.Add(", \"Suballocations\": "); - sb.AddNumber(stat.SuballocationCount); - sb.Add(", \"UnusedRanges\": "); - sb.AddNumber(stat.UnusedRangeCount); - sb.Add(", \"UsedBytes\": "); - sb.AddNumber(stat.UsedBytes); - sb.Add(", \"UnusedBytes\": "); - sb.AddNumber(stat.UnusedBytes); - sb.Add(", \"SuballocationSize\": { \"Min\": "); - sb.AddNumber(stat.SuballocationSizeMin); - sb.Add(", \"Avg\": "); - sb.AddNumber(stat.SuballocationSizeAvg); - sb.Add(", \"Max\": "); - sb.AddNumber(stat.SuballocationSizeMax); - sb.Add(" }, \"UnusedRangeSize\": { \"Min\": "); - sb.AddNumber(stat.UnusedRangeSizeMin); - sb.Add(", \"Avg\": "); - sb.AddNumber(stat.UnusedRangeSizeAvg); - sb.Add(", \"Max\": "); - sb.AddNumber(stat.UnusedRangeSizeMax); - sb.Add(" } }"); -} - -#endif // #if VMA_STATS_STRING_ENABLED +- VmaVirtualBlockCreateInfo::size +- VmaVirtualAllocationCreateInfo::size and VmaVirtualAllocationCreateInfo::alignment +- Using offset returned by vmaVirtualAllocate() or in VmaVirtualAllocationInfo::offset -struct VmaSuballocationItemSizeLess -{ - bool operator()( - const VmaSuballocationList::iterator lhs, - const VmaSuballocationList::iterator rhs) const - { - return lhs->size < rhs->size; - } - bool operator()( - const VmaSuballocationList::iterator lhs, - VkDeviceSize rhsSize) const - { - return lhs->size < rhsSize; - } -}; +\section virtual_allocator_statistics Statistics -VmaAllocation::VmaAllocation(VmaAllocator hAllocator) : - m_hMemory(VK_NULL_HANDLE), - m_Size(0), - m_FreeCount(0), - m_SumFreeSize(0), - m_Suballocations(VmaStlAllocator(hAllocator->GetAllocationCallbacks())), - m_FreeSuballocationsBySize(VmaStlAllocator(hAllocator->GetAllocationCallbacks())) -{ -} +You can obtain statistics of a virtual block using vmaGetVirtualBlockStatistics() +(to get brief statistics that are fast to calculate) +or vmaCalculateVirtualBlockStatistics() (to get more detailed statistics, slower to calculate). +The functions fill structures #VmaStatistics, #VmaDetailedStatistics respectively - same as used by the normal Vulkan memory allocator. +Example: -void VmaAllocation::Init(VkDeviceMemory newMemory, VkDeviceSize newSize) -{ - VMA_ASSERT(m_hMemory == VK_NULL_HANDLE); +\code +VmaStatistics stats; +vmaGetVirtualBlockStatistics(block, &stats); +printf("My virtual block has %llu bytes used by %u virtual allocations\n", + stats.allocationBytes, stats.allocationCount); +\endcode - m_hMemory = newMemory; - m_Size = newSize; - m_FreeCount = 1; - m_SumFreeSize = newSize; +You can also request a full list of allocations and free regions as a string in JSON format by calling +vmaBuildVirtualBlockStatsString(). +Returned string must be later freed using vmaFreeVirtualBlockStatsString(). +The format of this string differs from the one returned by the main Vulkan allocator, but it is similar. - m_Suballocations.clear(); - m_FreeSuballocationsBySize.clear(); +\section virtual_allocator_additional_considerations Additional considerations - VmaSuballocation suballoc = {}; - suballoc.offset = 0; - suballoc.size = newSize; - suballoc.type = VMA_SUBALLOCATION_TYPE_FREE; +The "virtual allocator" functionality is implemented on a level of individual memory blocks. +Keeping track of a whole collection of blocks, allocating new ones when out of free space, +deleting empty ones, and deciding which one to try first for a new allocation must be implemented by the user. - m_Suballocations.push_back(suballoc); - VmaSuballocationList::iterator suballocItem = m_Suballocations.end(); - --suballocItem; - m_FreeSuballocationsBySize.push_back(suballocItem); -} +Alternative allocation algorithms are supported, just like in custom pools of the real GPU memory. +See enum #VmaVirtualBlockCreateFlagBits to learn how to specify them (e.g. #VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT). +You can find their description in chapter \ref custom_memory_pools. +Allocation strategies are also supported. +See enum #VmaVirtualAllocationCreateFlagBits to learn how to specify them (e.g. #VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT). -void VmaAllocation::Destroy(VmaAllocator allocator) -{ - VMA_ASSERT(m_hMemory != VK_NULL_HANDLE); - vkFreeMemory(allocator->m_hDevice, m_hMemory, allocator->GetAllocationCallbacks()); - m_hMemory = VK_NULL_HANDLE; -} +Following features are supported only by the allocator of the real GPU memory and not by virtual allocations: +buffer-image granularity, `VMA_DEBUG_MARGIN`, `VMA_MIN_ALIGNMENT`. -bool VmaAllocation::Validate() const -{ - if((m_hMemory == VK_NULL_HANDLE) || - (m_Size == 0) || - m_Suballocations.empty()) - { - return false; - } - - // Expected offset of new suballocation as calculates from previous ones. - VkDeviceSize calculatedOffset = 0; - // Expected number of free suballocations as calculated from traversing their list. - uint32_t calculatedFreeCount = 0; - // Expected sum size of free suballocations as calculated from traversing their list. - VkDeviceSize calculatedSumFreeSize = 0; - // Expected number of free suballocations that should be registered in - // m_FreeSuballocationsBySize calculated from traversing their list. - size_t freeSuballocationsToRegister = 0; - // True if previous visisted suballocation was free. - bool prevFree = false; - - for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin(); - suballocItem != m_Suballocations.cend(); - ++suballocItem) - { - const VmaSuballocation& subAlloc = *suballocItem; - - // Actual offset of this suballocation doesn't match expected one. - if(subAlloc.offset != calculatedOffset) - return false; - const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE); - // Two adjacent free suballocations are invalid. They should be merged. - if(prevFree && currFree) - return false; - prevFree = currFree; +\page debugging_memory_usage Debugging incorrect memory usage - if(currFree) - { - calculatedSumFreeSize += subAlloc.size; - ++calculatedFreeCount; - if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER) - ++freeSuballocationsToRegister; - } +If you suspect a bug with memory usage, like usage of uninitialized memory or +memory being overwritten out of bounds of an allocation, +you can use debug features of this library to verify this. - calculatedOffset += subAlloc.size; - } +\section debugging_memory_usage_initialization Memory initialization - // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't - // match expected one. - if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister) - return false; +If you experience a bug with incorrect and nondeterministic data in your program and you suspect uninitialized memory to be used, +you can enable automatic memory initialization to verify this. +To do it, define macro `VMA_DEBUG_INITIALIZE_ALLOCATIONS` to 1. - VkDeviceSize lastSize = 0; - for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i) - { - VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i]; - - // Only free suballocations can be registered in m_FreeSuballocationsBySize. - if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE) - return false; - // They must be sorted by size ascending. - if(suballocItem->size < lastSize) - return false; +\code +#define VMA_DEBUG_INITIALIZE_ALLOCATIONS 1 +#include "vk_mem_alloc.h" +\endcode - lastSize = suballocItem->size; - } +It makes memory of new allocations initialized to bit pattern `0xDCDCDCDC`. +Before an allocation is destroyed, its memory is filled with bit pattern `0xEFEFEFEF`. +Memory is automatically mapped and unmapped if necessary. - // Check if totals match calculacted values. - return - (calculatedOffset == m_Size) && - (calculatedSumFreeSize == m_SumFreeSize) && - (calculatedFreeCount == m_FreeCount); -} +If you find these values while debugging your program, good chances are that you incorrectly +read Vulkan memory that is allocated but not initialized, or already freed, respectively. -/* -How many suitable free suballocations to analyze before choosing best one. -- Set to 1 to use First-Fit algorithm - first suitable free suballocation will - be chosen. -- Set to UINT32_MAX to use Best-Fit/Worst-Fit algorithm - all suitable free - suballocations will be analized and best one will be chosen. -- Any other value is also acceptable. -*/ -//static const uint32_t MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK = 8; +Memory initialization works only with memory types that are `HOST_VISIBLE` and with allocations that can be mapped. +It works also with dedicated allocations. -bool VmaAllocation::CreateAllocationRequest( - VkDeviceSize bufferImageGranularity, - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - VmaAllocationRequest* pAllocationRequest) -{ - VMA_ASSERT(allocSize > 0); - VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE); - VMA_ASSERT(pAllocationRequest != VMA_NULL); - VMA_HEAVY_ASSERT(Validate()); +\section debugging_memory_usage_margins Margins - // There is not enough total free space in this allocation to fullfill the request: Early return. - if(m_SumFreeSize < allocSize) - return false; +By default, allocations are laid out in memory blocks next to each other if possible +(considering required alignment, `bufferImageGranularity`, and `nonCoherentAtomSize`). - // Old brute-force algorithm, linearly searching suballocations. - /* - uint32_t suitableSuballocationsFound = 0; - for(VmaSuballocationList::iterator suballocItem = suballocations.Front(); - suballocItem != VMA_NULL && - suitableSuballocationsFound < MAX_SUITABLE_SUBALLOCATIONS_TO_CHECK; - suballocItem = suballocItem->Next) - { - if(suballocItem->Value.type == VMA_SUBALLOCATION_TYPE_FREE) - { - VkDeviceSize offset = 0, cost = 0; - if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset, &cost)) - { - ++suitableSuballocationsFound; - if(cost < costLimit) - { - pAllocationRequest->freeSuballocationItem = suballocItem; - pAllocationRequest->offset = offset; - pAllocationRequest->cost = cost; - if(cost == 0) - return true; - costLimit = cost; - betterSuballocationFound = true; - } - } - } - } - */ +![Allocations without margin](../gfx/Margins_1.png) - // New algorithm, efficiently searching freeSuballocationsBySize. - const size_t freeSuballocCount = m_FreeSuballocationsBySize.size(); - if(freeSuballocCount > 0) - { - if(VMA_BEST_FIT) - { - // Find first free suballocation with size not less than allocSize. - VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess( - m_FreeSuballocationsBySize.data(), - m_FreeSuballocationsBySize.data() + freeSuballocCount, - allocSize, - VmaSuballocationItemSizeLess()); - size_t index = it - m_FreeSuballocationsBySize.data(); - for(; index < freeSuballocCount; ++index) - { - VkDeviceSize offset = 0; - const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index]; - if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset)) - { - pAllocationRequest->freeSuballocationItem = suballocItem; - pAllocationRequest->offset = offset; - return true; - } - } - } - else - { - // Search staring from biggest suballocations. - for(size_t index = freeSuballocCount; index--; ) - { - VkDeviceSize offset = 0; - const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index]; - if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset)) - { - pAllocationRequest->freeSuballocationItem = suballocItem; - pAllocationRequest->offset = offset; - return true; - } - } - } - } +Define macro `VMA_DEBUG_MARGIN` to some non-zero value (e.g. 16) to enforce specified +number of bytes as a margin after every allocation. - return false; -} +\code +#define VMA_DEBUG_MARGIN 16 +#include "vk_mem_alloc.h" +\endcode -bool VmaAllocation::CheckAllocation( - VkDeviceSize bufferImageGranularity, - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - VmaSuballocationList::const_iterator freeSuballocItem, - VkDeviceSize* pOffset) const -{ - VMA_ASSERT(allocSize > 0); - VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE); - VMA_ASSERT(freeSuballocItem != m_Suballocations.cend()); - VMA_ASSERT(pOffset != VMA_NULL); +![Allocations with margin](../gfx/Margins_2.png) - const VmaSuballocation& suballoc = *freeSuballocItem; - VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); +If your bug goes away after enabling margins, it means it may be caused by memory +being overwritten outside of allocation boundaries. It is not 100% certain though. +Change in application behavior may also be caused by different order and distribution +of allocations across memory blocks after margins are applied. - // Size of this suballocation is too small for this request: Early return. - if(suballoc.size < allocSize) - return false; +Margins work with all types of memory. - // Start from offset equal to beginning of this suballocation. - *pOffset = suballoc.offset; - - // Apply VMA_DEBUG_MARGIN at the beginning. - if((VMA_DEBUG_MARGIN > 0) && freeSuballocItem != m_Suballocations.cbegin()) - *pOffset += VMA_DEBUG_MARGIN; - - // Apply alignment. - const VkDeviceSize alignment = VMA_MAX(allocAlignment, VMA_DEBUG_ALIGNMENT); - *pOffset = VmaAlignUp(*pOffset, alignment); - - // Check previous suballocations for BufferImageGranularity conflicts. - // Make bigger alignment if necessary. - if(bufferImageGranularity > 1) - { - bool bufferImageGranularityConflict = false; - VmaSuballocationList::const_iterator prevSuballocItem = freeSuballocItem; - while(prevSuballocItem != m_Suballocations.cbegin()) - { - --prevSuballocItem; - const VmaSuballocation& prevSuballoc = *prevSuballocItem; - if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity)) - { - if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) - { - bufferImageGranularityConflict = true; - break; - } - } - else - // Already on previous page. - break; - } - if(bufferImageGranularityConflict) - *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity); - } - - // Calculate padding at the beginning based on current offset. - const VkDeviceSize paddingBegin = *pOffset - suballoc.offset; +Margin is applied only to allocations made out of memory blocks and not to dedicated +allocations, which have their own memory block of specific size. +It is thus not applied to allocations made using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT flag +or those automatically decided to put into dedicated allocations, e.g. due to its +large size or recommended by VK_KHR_dedicated_allocation extension. - // Calculate required margin at the end if this is not last suballocation. - VmaSuballocationList::const_iterator next = freeSuballocItem; - ++next; - const VkDeviceSize requiredEndMargin = - (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0; +Margins appear in [JSON dump](@ref statistics_json_dump) as part of free space. - // Fail if requested size plus margin before and after is bigger than size of this suballocation. - if(paddingBegin + allocSize + requiredEndMargin > suballoc.size) - return false; +Note that enabling margins increases memory usage and fragmentation. - // Check next suballocations for BufferImageGranularity conflicts. - // If conflict exists, allocation cannot be made here. - if(bufferImageGranularity > 1) - { - VmaSuballocationList::const_iterator nextSuballocItem = freeSuballocItem; - ++nextSuballocItem; - while(nextSuballocItem != m_Suballocations.cend()) - { - const VmaSuballocation& nextSuballoc = *nextSuballocItem; - if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) - { - if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) - return false; - } - else - // Already on next page. - break; - ++nextSuballocItem; - } - } +Margins do not apply to \ref virtual_allocator. - // All tests passed: Success. pOffset is already filled. - return true; -} +\section debugging_memory_usage_corruption_detection Corruption detection -bool VmaAllocation::IsEmpty() const -{ - return (m_Suballocations.size() == 1) && (m_FreeCount == 1); -} +You can additionally define macro `VMA_DEBUG_DETECT_CORRUPTION` to 1 to enable validation +of contents of the margins. -void VmaAllocation::Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - VkDeviceSize allocSize) -{ - VMA_ASSERT(request.freeSuballocationItem != m_Suballocations.end()); - VmaSuballocation& suballoc = *request.freeSuballocationItem; - // Given suballocation is a free block. - VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); - // Given offset is inside this suballocation. - VMA_ASSERT(request.offset >= suballoc.offset); - const VkDeviceSize paddingBegin = request.offset - suballoc.offset; - VMA_ASSERT(suballoc.size >= paddingBegin + allocSize); - const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize; - - // Unregister this free suballocation from m_FreeSuballocationsBySize and update - // it to become used. - UnregisterFreeSuballocation(request.freeSuballocationItem); - - suballoc.offset = request.offset; - suballoc.size = allocSize; - suballoc.type = type; - - // If there are any free bytes remaining at the end, insert new free suballocation after current one. - if(paddingEnd) - { - VmaSuballocation paddingSuballoc = {}; - paddingSuballoc.offset = request.offset + allocSize; - paddingSuballoc.size = paddingEnd; - paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE; - VmaSuballocationList::iterator next = request.freeSuballocationItem; - ++next; - const VmaSuballocationList::iterator paddingEndItem = - m_Suballocations.insert(next, paddingSuballoc); - RegisterFreeSuballocation(paddingEndItem); - } - - // If there are any free bytes remaining at the beginning, insert new free suballocation before current one. - if(paddingBegin) - { - VmaSuballocation paddingSuballoc = {}; - paddingSuballoc.offset = request.offset - paddingBegin; - paddingSuballoc.size = paddingBegin; - paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE; - const VmaSuballocationList::iterator paddingBeginItem = - m_Suballocations.insert(request.freeSuballocationItem, paddingSuballoc); - RegisterFreeSuballocation(paddingBeginItem); - } - - // Update totals. - m_FreeCount = m_FreeCount - 1; - if(paddingBegin > 0) - ++m_FreeCount; - if(paddingEnd > 0) - ++m_FreeCount; - m_SumFreeSize -= allocSize; -} - -void VmaAllocation::FreeSuballocation(VmaSuballocationList::iterator suballocItem) -{ - // Change this suballocation to be marked as free. - VmaSuballocation& suballoc = *suballocItem; - suballoc.type = VMA_SUBALLOCATION_TYPE_FREE; - - // Update totals. - ++m_FreeCount; - m_SumFreeSize += suballoc.size; +\code +#define VMA_DEBUG_MARGIN 16 +#define VMA_DEBUG_DETECT_CORRUPTION 1 +#include "vk_mem_alloc.h" +\endcode - // Merge with previous and/or next suballocation if it's also free. - bool mergeWithNext = false; - bool mergeWithPrev = false; - - VmaSuballocationList::iterator nextItem = suballocItem; - ++nextItem; - if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)) - mergeWithNext = true; +When this feature is enabled, number of bytes specified as `VMA_DEBUG_MARGIN` +(it must be multiply of 4) after every allocation is filled with a magic number. +This idea is also know as "canary". +Memory is automatically mapped and unmapped if necessary. - VmaSuballocationList::iterator prevItem = suballocItem; - if(suballocItem != m_Suballocations.begin()) - { - --prevItem; - if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE) - mergeWithPrev = true; - } +This number is validated automatically when the allocation is destroyed. +If it is not equal to the expected value, `VMA_ASSERT()` is executed. +It clearly means that either CPU or GPU overwritten the memory outside of boundaries of the allocation, +which indicates a serious bug. - if(mergeWithNext) - { - UnregisterFreeSuballocation(nextItem); - MergeFreeWithNext(suballocItem); - } +You can also explicitly request checking margins of all allocations in all memory blocks +that belong to specified memory types by using function vmaCheckCorruption(), +or in memory blocks that belong to specified custom pool, by using function +vmaCheckPoolCorruption(). - if(mergeWithPrev) - { - UnregisterFreeSuballocation(prevItem); - MergeFreeWithNext(prevItem); - RegisterFreeSuballocation(prevItem); - } - else - RegisterFreeSuballocation(suballocItem); -} +Margin validation (corruption detection) works only for memory types that are +`HOST_VISIBLE` and `HOST_COHERENT`. -void VmaAllocation::Free(const VkMappedMemoryRange* pMemory) -{ - // If suballocation to free has offset smaller than half of allocation size, search forward. - // Otherwise search backward. - const bool forwardDirection = pMemory->offset < (m_Size / 2); - if(forwardDirection) - { - for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin(); - suballocItem != m_Suballocations.end(); - ++suballocItem) - { - VmaSuballocation& suballoc = *suballocItem; - if(suballoc.offset == pMemory->offset) - { - FreeSuballocation(suballocItem); - VMA_HEAVY_ASSERT(Validate()); - return; - } - } - VMA_ASSERT(0 && "Not found!"); - } - else - { - for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin(); - suballocItem != m_Suballocations.end(); - ++suballocItem) - { - VmaSuballocation& suballoc = *suballocItem; - if(suballoc.offset == pMemory->offset) - { - FreeSuballocation(suballocItem); - VMA_HEAVY_ASSERT(Validate()); - return; - } - } - VMA_ASSERT(0 && "Not found!"); - } -} -#if VMA_STATS_STRING_ENABLED +\section debugging_memory_usage_leak_detection Leak detection features + +At allocation and allocator destruction time VMA checks for unfreed and unmapped blocks using +`VMA_ASSERT_LEAK()`. This macro defaults to an assertion, triggering a typically fatal error in Debug +builds, and doing nothing in Release builds. You can provide your own definition of `VMA_ASSERT_LEAK()` +to change this behavior. + +At memory block destruction time VMA lists out all unfreed allocations using the `VMA_LEAK_LOG_FORMAT()` +macro, which defaults to `VMA_DEBUG_LOG_FORMAT`, which in turn defaults to a no-op. +If you're having trouble with leaks - for example, the aforementioned assertion triggers, but you don't +quite know \em why -, overriding this macro to print out the the leaking blocks, combined with assigning +individual names to allocations using vmaSetAllocationName(), can greatly aid in fixing them. + +\page other_api_interop Interop with other graphics APIs + +VMA provides some features that help with interoperability with other graphics APIs, e.g. OpenGL, Direct3D 11, Direct3D 12. + +\section other_api_interop_exporting_memory Exporting memory + +On Windows, the VK_KHR_external_memory_win32 device extension allows exporting a Win32 `HANDLE` +of a `VkDeviceMemory` block, to be able to reference the memory on other Vulkan logical devices or instances, +in multiple processes, and/or in multiple APIs. +VMA offers support for it. + +\subsection other_api_interop_exporting_initialization Initialization + +1) Make sure the extension is defined in the code by including following header before including VMA: -void VmaAllocation::PrintDetailedMap(class VmaStringBuilder& sb) const -{ - sb.Add("{\n\t\t\t\"Bytes\": "); - sb.AddNumber(m_Size); - sb.Add(",\n\t\t\t\"FreeBytes\": "); - sb.AddNumber(m_SumFreeSize); - sb.Add(",\n\t\t\t\"Suballocations\": "); - sb.AddNumber(m_Suballocations.size()); - sb.Add(",\n\t\t\t\"FreeSuballocations\": "); - sb.AddNumber(m_FreeCount); - sb.Add(",\n\t\t\t\"SuballocationList\": ["); - - size_t i = 0; - for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin(); - suballocItem != m_Suballocations.cend(); - ++suballocItem, ++i) - { - if(i > 0) - sb.Add(",\n\t\t\t\t{ \"Type\": "); - else - sb.Add("\n\t\t\t\t{ \"Type\": "); - sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]); - sb.Add(", \"Size\": "); - sb.AddNumber(suballocItem->size); - sb.Add(", \"Offset\": "); - sb.AddNumber(suballocItem->offset); - sb.Add(" }"); - } +\code +#include +\endcode - sb.Add("\n\t\t\t]\n\t\t}"); -} +2) Check if "VK_KHR_external_memory_win32" is available among device extensions. +Enable it when creating the `VkDevice` object. -#endif // #if VMA_STATS_STRING_ENABLED +3) Enable the usage of this extension in VMA by setting flag #VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT +when calling vmaCreateAllocator(). -void VmaAllocation::MergeFreeWithNext(VmaSuballocationList::iterator item) -{ - VMA_ASSERT(item != m_Suballocations.end()); - VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE); - - VmaSuballocationList::iterator nextItem = item; - ++nextItem; - VMA_ASSERT(nextItem != m_Suballocations.end()); - VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE); +4) Make sure that VMA has access to the `vkGetMemoryWin32HandleKHR` function by either enabling `VMA_DYNAMIC_VULKAN_FUNCTIONS` macro +or setting VmaVulkanFunctions::vkGetMemoryWin32HandleKHR explicitly. +For more information, see \ref quick_start_initialization_importing_vulkan_functions. - item->size += nextItem->size; - --m_FreeCount; - m_Suballocations.erase(nextItem); -} +\subsection other_api_interop_exporting_preparations Preparations -void VmaAllocation::RegisterFreeSuballocation(VmaSuballocationList::iterator item) -{ - VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE); - VMA_ASSERT(item->size > 0); +You can find example usage among tests, in file "Tests.cpp", function `TestWin32Handles()`. - if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER) - { - if(m_FreeSuballocationsBySize.empty()) - m_FreeSuballocationsBySize.push_back(item); - else - { - VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess( - m_FreeSuballocationsBySize.data(), - m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(), - item, - VmaSuballocationItemSizeLess()); - size_t index = it - m_FreeSuballocationsBySize.data(); - VectorInsert(m_FreeSuballocationsBySize, index, item); - } - } -} +To use the extenion, buffers need to be created with `VkExternalMemoryBufferCreateInfoKHR` attached to their `pNext` chain, +and memory allocations need to be made with `VkExportMemoryAllocateInfoKHR` attached to their `pNext` chain. +To make use of them, you need to use \ref custom_memory_pools. Example: -void VmaAllocation::UnregisterFreeSuballocation(VmaSuballocationList::iterator item) -{ - VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE); - VMA_ASSERT(item->size > 0); +\code +constexpr VkExternalMemoryHandleTypeFlagsKHR handleType = + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR; - if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER) - { - VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess( - m_FreeSuballocationsBySize.data(), - m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(), - item, - VmaSuballocationItemSizeLess()); - for(size_t index = it - m_FreeSuballocationsBySize.data(); - index < m_FreeSuballocationsBySize.size(); - ++index) - { - if(m_FreeSuballocationsBySize[index] == item) - { - VectorRemove(m_FreeSuballocationsBySize, index); - return; - } - VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found."); - } - VMA_ASSERT(0 && "Not found."); - } -} +// Define an example buffer and allocation parameters. +VkExternalMemoryBufferCreateInfoKHR externalMemBufCreateInfo = { + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + nullptr, + handleType +}; +VkBufferCreateInfo exampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +exampleBufCreateInfo.size = 0x10000; // Doesn't matter here. +exampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; +exampleBufCreateInfo.pNext = &externalMemBufCreateInfo; + +VmaAllocationCreateInfo exampleAllocCreateInfo = {}; +exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + +// Find memory type index to use for the custom pool. +uint32_t memTypeIndex; +VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_Allocator, + &exampleBufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex); +// Check res... + +// Create a custom pool. +constexpr static VkExportMemoryAllocateInfoKHR exportMemAllocInfo = { + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, + nullptr, + handleType +}; +VmaPoolCreateInfo poolCreateInfo = {}; +poolCreateInfo.memoryTypeIndex = memTypeIndex; +poolCreateInfo.pMemoryAllocateNext = (void*)&exportMemAllocInfo; + +VmaPool pool; +res = vmaCreatePool(g_Allocator, &poolCreateInfo, &pool); +// Check res... + +// YOUR OTHER CODE COMES HERE.... + +// At the end, don't forget to destroy it! +vmaDestroyPool(g_Allocator, pool); +\endcode + +Note that the structure passed as VmaPoolCreateInfo::pMemoryAllocateNext must remain alive and unchanged +for the whole lifetime of the custom pool, because it will be used when the pool allocates a new device memory block. +No copy is made internally. This is why variable `exportMemAllocInfo` is defined as static. + +If you want to export all memory allocated by VMA from certain memory types, +including dedicated allocations and allocations made from default pools, +an alternative solution is to fill in VmaAllocatorCreateInfo::pTypeExternalMemoryHandleTypes. +It should point to an array with `VkExternalMemoryHandleTypeFlagsKHR` to be automatically passed by the library +through `VkExportMemoryAllocateInfoKHR` on each allocation made from a specific memory type. +You should not mix these two methods in a way that allows to apply both to the same memory type. +Otherwise, `VkExportMemoryAllocateInfoKHR` structure would be attached twice to the `pNext` chain of `VkMemoryAllocateInfo`. + +\subsection other_api_interop_exporting_memory_allocation Memory allocation + +Finally, you can create a buffer with an allocation out of the custom pool. +The buffer should use same flags as the sample buffer used to find the memory type. +It should also specify `VkExternalMemoryBufferCreateInfoKHR` in its `pNext` chain. + +\code +VkExternalMemoryBufferCreateInfoKHR externalMemBufCreateInfo = { + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + nullptr, + handleType +}; +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = // Your desired buffer size. +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; +bufCreateInfo.pNext = &externalMemBufCreateInfo; -static void InitStatInfo(VmaStatInfo& outInfo) -{ - memset(&outInfo, 0, sizeof(outInfo)); - outInfo.SuballocationSizeMin = UINT64_MAX; - outInfo.UnusedRangeSizeMin = UINT64_MAX; -} +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.pool = pool; // It is enough to set this one member. -static void CalcAllocationStatInfo(VmaStatInfo& outInfo, const VmaAllocation& alloc) -{ - outInfo.AllocationCount = 1; +VkBuffer buf; +VmaAllocation alloc; +res = vmaCreateBuffer(g_Allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr); +// Check res... - const uint32_t rangeCount = (uint32_t)alloc.m_Suballocations.size(); - outInfo.SuballocationCount = rangeCount - alloc.m_FreeCount; - outInfo.UnusedRangeCount = alloc.m_FreeCount; - - outInfo.UnusedBytes = alloc.m_SumFreeSize; - outInfo.UsedBytes = alloc.m_Size - outInfo.UnusedBytes; +// YOUR OTHER CODE COMES HERE.... - outInfo.SuballocationSizeMin = UINT64_MAX; - outInfo.SuballocationSizeMax = 0; - outInfo.UnusedRangeSizeMin = UINT64_MAX; - outInfo.UnusedRangeSizeMax = 0; +// At the end, don't forget to destroy it! +vmaDestroyBuffer(g_Allocator, buf, alloc); +\endcode - for(VmaSuballocationList::const_iterator suballocItem = alloc.m_Suballocations.cbegin(); - suballocItem != alloc.m_Suballocations.cend(); - ++suballocItem) - { - const VmaSuballocation& suballoc = *suballocItem; - if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) - { - outInfo.SuballocationSizeMin = VMA_MIN(outInfo.SuballocationSizeMin, suballoc.size); - outInfo.SuballocationSizeMax = VMA_MAX(outInfo.SuballocationSizeMax, suballoc.size); - } - else - { - outInfo.UnusedRangeSizeMin = VMA_MIN(outInfo.UnusedRangeSizeMin, suballoc.size); - outInfo.UnusedRangeSizeMax = VMA_MAX(outInfo.UnusedRangeSizeMax, suballoc.size); - } - } -} +If you need each allocation to have its own device memory block and start at offset 0, you can still do +by using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT flag. It works also with custom pools. -// Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo. -static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo) -{ - inoutInfo.AllocationCount += srcInfo.AllocationCount; - inoutInfo.SuballocationCount += srcInfo.SuballocationCount; - inoutInfo.UnusedRangeCount += srcInfo.UnusedRangeCount; - inoutInfo.UsedBytes += srcInfo.UsedBytes; - inoutInfo.UnusedBytes += srcInfo.UnusedBytes; - inoutInfo.SuballocationSizeMin = VMA_MIN(inoutInfo.SuballocationSizeMin, srcInfo.SuballocationSizeMin); - inoutInfo.SuballocationSizeMax = VMA_MAX(inoutInfo.SuballocationSizeMax, srcInfo.SuballocationSizeMax); - inoutInfo.UnusedRangeSizeMin = VMA_MIN(inoutInfo.UnusedRangeSizeMin, srcInfo.UnusedRangeSizeMin); - inoutInfo.UnusedRangeSizeMax = VMA_MAX(inoutInfo.UnusedRangeSizeMax, srcInfo.UnusedRangeSizeMax); -} +Alternatively, you can use convenient functions vmaCreateDedicatedBuffer(), vmaCreateDedicatedImage() that +always allocate dedicated memory for the buffer/image created, and also allow specifying custom `pNext` chain +for the `VkMemoryAllocateInfo` structure. -static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo) -{ - inoutInfo.SuballocationSizeAvg = (inoutInfo.SuballocationCount > 0) ? - VmaRoundDiv(inoutInfo.UsedBytes, inoutInfo.SuballocationCount) : 0; - inoutInfo.UnusedRangeSizeAvg = (inoutInfo.UnusedRangeCount > 0) ? - VmaRoundDiv(inoutInfo.UnusedBytes, inoutInfo.UnusedRangeCount) : 0; -} +\subsection other_api_interop_exporting_exporting_win32_handle Exporting Win32 handle -VmaAllocationVector::VmaAllocationVector(VmaAllocator hAllocator) : - m_hAllocator(hAllocator), - m_Allocations(VmaStlAllocator(hAllocator->GetAllocationCallbacks())) -{ -} +After the allocation is created, you can acquire a Win32 `HANDLE` to the `VkDeviceMemory` block it belongs to. +VMA function vmaGetMemoryWin32Handle2() is a replacement of the Vulkan function `vkGetMemoryWin32HandleKHR`. -VmaAllocationVector::~VmaAllocationVector() -{ - for(size_t i = m_Allocations.size(); i--; ) - { - m_Allocations[i]->Destroy(m_hAllocator); - vma_delete(m_hAllocator, m_Allocations[i]); - } -} +\code +HANDLE handle; +res = vmaGetMemoryWin32Handle2(g_Allocator, alloc, handleType, nullptr, &handle); +// Check res... -size_t VmaAllocationVector::Free(const VkMappedMemoryRange* pMemory) -{ - for(uint32_t allocIndex = 0; allocIndex < m_Allocations.size(); ++allocIndex) - { - VmaAllocation* const pAlloc = m_Allocations[allocIndex]; - VMA_ASSERT(pAlloc); - if(pAlloc->m_hMemory == pMemory->memory) - { - pAlloc->Free(pMemory); - VMA_HEAVY_ASSERT(pAlloc->Validate()); - return allocIndex; - } - } +// YOUR OTHER CODE COMES HERE.... - return (size_t)-1; -} +// At the end, you must close the handle. +CloseHandle(handle); +\endcode -void VmaAllocationVector::IncrementallySortAllocations() -{ - // Bubble sort only until first swap. - for(size_t i = 1; i < m_Allocations.size(); ++i) - { - if(m_Allocations[i - 1]->m_SumFreeSize > m_Allocations[i]->m_SumFreeSize) - { - VMA_SWAP(m_Allocations[i - 1], m_Allocations[i]); - return; - } - } -} +Documentation of the VK_KHR_external_memory_win32 extension states that: -#if VMA_STATS_STRING_ENABLED +> If handleType is defined as an NT handle, vkGetMemoryWin32HandleKHR must be called no more than once for each valid unique combination of memory and handleType. -void VmaAllocationVector::PrintDetailedMap(class VmaStringBuilder& sb) const -{ - for(size_t i = 0; i < m_Allocations.size(); ++i) - { - if(i > 0) - sb.Add(",\n\t\t"); - else - sb.Add("\n\t\t"); - m_Allocations[i]->PrintDetailedMap(sb); - } -} +This is ensured automatically inside VMA. +If `VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT` is used as the handle type, +or other NT handle types, +the library fetches the handle on first use, remembers it internally, and closes it when the memory block or dedicated allocation is destroyed. +Every time you call vmaGetMemoryWin32Handle2(), VMA calls `DuplicateHandle` and returns a new handle that you need to close. +For further information, please check the documentation of this function. -#endif // #if VMA_STATS_STRING_ENABLED +\subsection other_api_interop_exporting_custom_alignment Custom alignment -void VmaAllocationVector::AddStats(VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex) const -{ - for(uint32_t allocIndex = 0; allocIndex < m_Allocations.size(); ++allocIndex) - { - const VmaAllocation* const pAlloc = m_Allocations[allocIndex]; - VMA_ASSERT(pAlloc); - VMA_HEAVY_ASSERT(pAlloc->Validate()); - VmaStatInfo allocationStatInfo; - CalcAllocationStatInfo(allocationStatInfo, *pAlloc); - VmaAddStatInfo(pStats->total, allocationStatInfo); - VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo); - VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo); - } -} +Buffers or images exported to a different API like OpenGL may require a different alignment, +higher than the one used by the library automatically, queried from functions like `vkGetBufferMemoryRequirements`. +To impose such alignment: -//////////////////////////////////////////////////////////////////////////////// -// VmaAllocator_T +You can create \ref custom_memory_pools for such allocations. +Set VmaPoolCreateInfo::minAllocationAlignment member to the minimum alignment required for each allocation +to be made out of this pool. +The alignment actually used will be the maximum of this member and the alignment returned for the specific buffer or image +from a function like `vkGetBufferMemoryRequirements`, which is called by VMA automatically. -VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) : - m_PhysicalDevice(pCreateInfo->physicalDevice), - m_hDevice(pCreateInfo->device), - m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL), - m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ? - *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks), - m_PreferredLargeHeapBlockSize(0), - m_PreferredSmallHeapBlockSize(0), - m_BufferToMemoryMap(VmaStlAllocator< VmaPair >(pCreateInfo->pAllocationCallbacks)), - m_ImageToMemoryMap(VmaStlAllocator< VmaPair >(pCreateInfo->pAllocationCallbacks)) -{ - VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device); +If you want to create a buffer/image/allocate memory with a specific minimum alignment out of default pools, +you can use VmaAllocationCreateInfo::minAlignment. - memset(&m_MemProps, 0, sizeof(m_MemProps)); - memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties)); - - memset(&m_pAllocations, 0, sizeof(m_pAllocations)); - memset(&m_HasEmptyAllocation, 0, sizeof(m_HasEmptyAllocation)); - memset(&m_pOwnAllocations, 0, sizeof(m_pOwnAllocations)); +Note the problem of alignment affects only resources placed inside bigger `VkDeviceMemory` blocks and not dedicated +allocations, as these, by definition, always have alignment = 0 because the resource is bound to the beginning of its dedicated block. +You can ensure that an allocation is created as dedicated by using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +Contrary to Direct3D 12, Vulkan doesn't have a concept of alignment of the entire memory block passed on its allocation. - m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ? - pCreateInfo->preferredLargeHeapBlockSize : VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE; - m_PreferredSmallHeapBlockSize = (pCreateInfo->preferredSmallHeapBlockSize != 0) ? - pCreateInfo->preferredSmallHeapBlockSize : VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE; +\subsection other_api_interop_exporting_extended_allocation_information Extended allocation information - vkGetPhysicalDeviceProperties(m_PhysicalDevice, &m_PhysicalDeviceProperties); - vkGetPhysicalDeviceMemoryProperties(m_PhysicalDevice, &m_MemProps); +If you want to rely on VMA to allocate your buffers and images inside larger memory blocks, +but you need to know the size of the entire block and whether the allocation was made +with its own dedicated memory, use function vmaGetAllocationInfo2() to retrieve +extended allocation information in structure #VmaAllocationInfo2, which provides extra members: +`blockSize` and `dedicatedMemory`. - for(size_t i = 0; i < GetMemoryTypeCount(); ++i) - { - m_pAllocations[i] = vma_new(this, VmaAllocationVector)(this); - m_pOwnAllocations[i] = vma_new(this, OwnAllocationVectorType)(VmaStlAllocator(GetAllocationCallbacks())); - } -} +\section other_api_interop_importing_memory Importing memory -VmaAllocator_T::~VmaAllocator_T() -{ - for(VMA_MAP_TYPE(VkImage, VkMappedMemoryRange)::iterator it = m_ImageToMemoryMap.begin(); - it != m_ImageToMemoryMap.end(); - ++it) - { - vkDestroyImage(m_hDevice, it->first, GetAllocationCallbacks()); - } +Importing external memory requires attaching an extra structure like `VkImportMemoryWin32HandleInfoKHR` +to the `pNext` chain of `VkMemoryAllocateInfo` structure. +VMA offers support for it by providing functions that allocate memory, create a buffer or an image +always with a dedicated `VkDeviceMemory` block and accept custom `pNext` pointer: +vmaAllocateDedicatedMemory(), vmaCreateDedicatedBuffer(), vmaCreateDedicatedImage(). +Example: - for(VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange)::iterator it = m_BufferToMemoryMap.begin(); - it != m_BufferToMemoryMap.end(); - ++it) - { - vkDestroyBuffer(m_hDevice, it->first, GetAllocationCallbacks()); - } +\code +constexpr VkExternalMemoryHandleTypeFlagBits handleType = + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT; + +VkExternalMemoryBufferCreateInfoKHR externalMemBufCreateInfo = { + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR }; +externalMemBufCreateInfo.handleTypes = handleType; + +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.pNext = &externalMemBufCreateInfo; // !!! +bufCreateInfo.size = ... +bufCreateInfo.usage = ... + +VkImportMemoryWin32HandleInfoKHR importInfo = { + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR }; +importInfo.handleType = handleType; +importInfo.handle = myExternalHandleToImport; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + +VkBuffer buf = VK_NULL_HANDLE; +VmaAllocation alloc = VK_NULL_HANDLE; +VkResult res = vmaCreateDedicatedBuffer(allocator, &bufCreateInfo, &allocCreateInfo, + &importInfo, // pMemoryAllocateNext !!! + &buf, &alloc, nullptr); +// Check res... +\endcode - for(uint32_t typeIndex = 0; typeIndex < GetMemoryTypeCount(); ++typeIndex) - { - OwnAllocationVectorType* pOwnAllocations = m_pOwnAllocations[typeIndex]; - VMA_ASSERT(pOwnAllocations); - for(size_t allocationIndex = 0; allocationIndex < pOwnAllocations->size(); ++allocationIndex) - { - const VmaOwnAllocation& ownAlloc = (*pOwnAllocations)[allocationIndex]; - vkFreeMemory(m_hDevice, ownAlloc.m_hMemory, GetAllocationCallbacks()); - } - } - for(size_t i = GetMemoryTypeCount(); i--; ) - { - vma_delete(this, m_pAllocations[i]); - vma_delete(this, m_pOwnAllocations[i]); - } -} -VkDeviceSize VmaAllocator_T::GetPreferredBlockSize(uint32_t memTypeIndex) const +\page usage_patterns Recommended usage patterns + +Vulkan gives great flexibility in memory allocation. +This chapter shows the most common patterns. + +See also slides from talk: +[Sawicki, Adam. Advanced Graphics Techniques Tutorial: Memory management in Vulkan and DX12. Game Developers Conference, 2018](https://www.gdcvault.com/play/1025458/Advanced-Graphics-Techniques-Tutorial-New) + + +\section usage_patterns_gpu_only GPU-only resource + +When: +Any resources that you frequently write and read on GPU, +e.g. images used as color attachments (aka "render targets"), depth-stencil attachments, +images/buffers used as storage image/buffer (aka "Unordered Access View (UAV)"). + +What to do: +Let the library select the optimal memory type, which will likely have `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + +\code +VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +imgCreateInfo.imageType = VK_IMAGE_TYPE_2D; +imgCreateInfo.extent.width = 3840; +imgCreateInfo.extent.height = 2160; +imgCreateInfo.extent.depth = 1; +imgCreateInfo.mipLevels = 1; +imgCreateInfo.arrayLayers = 1; +imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; +imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; +imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; +allocCreateInfo.priority = 1.0f; + +VkImage img; +VmaAllocation alloc; +vmaCreateImage(allocator, &imgCreateInfo, &allocCreateInfo, &img, &alloc, nullptr); +\endcode + +Also consider: +Consider creating them as dedicated allocations using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT, +especially if they are large or if you plan to destroy and recreate them with different sizes +e.g. when display resolution changes. +Prefer to create such resources first and all other GPU resources (like textures and vertex buffers) later. +When VK_EXT_memory_priority extension is enabled, it is also worth setting high priority to such allocation +to decrease chances to be evicted to system memory by the operating system. + +\section usage_patterns_staging_copy_upload Staging copy for upload + +When: +A "staging" buffer than you want to map and fill from CPU code, then use as a source of transfer +to some GPU resource. + +What to do: +Use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT. +Let the library select the optimal memory type, which will always have `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`. + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 65536; +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +... + +memcpy(allocInfo.pMappedData, myData, myDataSize); +\endcode + +Also consider: +You can map the allocation using vmaMapMemory() or you can create it as persistenly mapped +using #VMA_ALLOCATION_CREATE_MAPPED_BIT, as in the example above. + + +\section usage_patterns_readback Readback + +When: +Buffers for data written by or transferred from the GPU that you want to read back on the CPU, +e.g. results of some computations. + +What to do: +Use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +Let the library select the optimal memory type, which will always have `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` +and `VK_MEMORY_PROPERTY_HOST_CACHED_BIT`. + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 65536; +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +... + +const float* downloadedData = (const float*)allocInfo.pMappedData; +\endcode + + +\section usage_patterns_advanced_data_uploading Advanced data uploading + +For resources that you frequently write on CPU via mapped pointer and +frequently read on GPU e.g. as a uniform buffer (also called "dynamic"), multiple options are possible: + +-# Easiest solution is to have one copy of the resource in `HOST_VISIBLE` memory, + even if it means system RAM (not `DEVICE_LOCAL`) on systems with a discrete graphics card, + and make the device reach out to that resource directly. + - Reads performed by the device will then go through PCI Express bus. + The performance of this access may be limited, but it may be fine depending on the size + of this resource (whether it is small enough to quickly end up in GPU cache) and the sparsity + of access. +-# On systems with unified memory (e.g. AMD APU or Intel integrated graphics, mobile chips), + a memory type may be available that is both `HOST_VISIBLE` (available for mapping) and `DEVICE_LOCAL` + (fast to access from the GPU). Then, it is likely the best choice for such type of resource. +-# Systems with a discrete graphics card and separate video memory may or may not expose + a memory type that is both `HOST_VISIBLE` and `DEVICE_LOCAL`, also known as Base Address Register (BAR). + If they do, it represents a piece of VRAM (or entire VRAM, if ReBAR is enabled in the motherboard BIOS) + that is available to CPU for mapping. + - Writes performed by the host to that memory go through PCI Express bus. + The performance of these writes may be limited, but it may be fine, especially on PCIe 4.0, + as long as rules of using uncached and write-combined memory are followed - only sequential writes and no reads. +-# Finally, you may need or prefer to create a separate copy of the resource in `DEVICE_LOCAL` memory, + a separate "staging" copy in `HOST_VISIBLE` memory and perform an explicit transfer command between them. + +Thankfully, VMA offers an aid to create and use such resources in the the way optimal +for the current Vulkan device. To help the library make the best choice, +use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT together with +#VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT. +It will then prefer a memory type that is both `DEVICE_LOCAL` and `HOST_VISIBLE` (integrated memory or BAR), +but if no such memory type is available or allocation from it fails +(PC graphics cards have only 256 MB of BAR by default, unless ReBAR is supported and enabled in BIOS), +it will fall back to `DEVICE_LOCAL` memory for fast GPU access. +It is then up to you to detect that the allocation ended up in a memory type that is not `HOST_VISIBLE`, +so you need to create another "staging" allocation and perform explicit transfers. + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 65536; +bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +VkResult result = vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); +// Check result... + +VkMemoryPropertyFlags memPropFlags; +vmaGetAllocationMemoryProperties(allocator, alloc, &memPropFlags); + +if(memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { - VkDeviceSize heapSize = m_MemProps.memoryHeaps[m_MemProps.memoryTypes[memTypeIndex].heapIndex].size; - return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ? - m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize; + // The Allocation ended up in a mappable memory. + // Calling vmaCopyMemoryToAllocation() does vmaMapMemory(), memcpy(), vmaUnmapMemory(), and vmaFlushAllocation(). + result = vmaCopyMemoryToAllocation(allocator, myData, alloc, 0, myDataSize); + // Check result... + + VkBufferMemoryBarrier bufMemBarrier = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER }; + bufMemBarrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT; + bufMemBarrier.dstAccessMask = VK_ACCESS_UNIFORM_READ_BIT; + bufMemBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier.buffer = buf; + bufMemBarrier.offset = 0; + bufMemBarrier.size = VK_WHOLE_SIZE; + + // It's important to insert a buffer memory barrier here to ensure writing to the buffer has finished. + vkCmdPipelineBarrier(cmdBuf, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + 0, 0, nullptr, 1, &bufMemBarrier, 0, nullptr); } - -VkResult VmaAllocator_T::AllocateMemoryOfType( - const VkMemoryRequirements& vkMemReq, - const VmaMemoryRequirements& vmaMemReq, - uint32_t memTypeIndex, - VmaSuballocationType suballocType, - VkMappedMemoryRange* pMemory) +else { - VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size); + // Allocation ended up in a non-mappable memory - a transfer using a staging buffer is required. + VkBufferCreateInfo stagingBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; + stagingBufCreateInfo.size = 65536; + stagingBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + + VmaAllocationCreateInfo stagingAllocCreateInfo = {}; + stagingAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + stagingAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + + VkBuffer stagingBuf; + VmaAllocation stagingAlloc; + VmaAllocationInfo stagingAllocInfo; + result = vmaCreateBuffer(allocator, &stagingBufCreateInfo, &stagingAllocCreateInfo, + &stagingBuf, &stagingAlloc, &stagingAllocInfo); + // Check result... + + // Calling vmaCopyMemoryToAllocation() does vmaMapMemory(), memcpy(), vmaUnmapMemory(), and vmaFlushAllocation(). + result = vmaCopyMemoryToAllocation(allocator, myData, stagingAlloc, 0, myDataSize); + // Check result... + + VkBufferMemoryBarrier bufMemBarrier = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER }; + bufMemBarrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT; + bufMemBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; + bufMemBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier.buffer = stagingBuf; + bufMemBarrier.offset = 0; + bufMemBarrier.size = VK_WHOLE_SIZE; + + // Insert a buffer memory barrier to make sure writing to the staging buffer has finished. + vkCmdPipelineBarrier(cmdBuf, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, + 0, 0, nullptr, 1, &bufMemBarrier, 0, nullptr); + + VkBufferCopy bufCopy = { + 0, // srcOffset + 0, // dstOffset, + myDataSize, // size + }; - pMemory->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; - pMemory->pNext = VMA_NULL; - pMemory->size = vkMemReq.size; + vkCmdCopyBuffer(cmdBuf, stagingBuf, buf, 1, &bufCopy); - const VkDeviceSize preferredBlockSize = GetPreferredBlockSize(memTypeIndex); - // Heuristics: Allocate own memory if requested size if greater than half of preferred block size. - const bool ownMemory = - vmaMemReq.ownMemory || - VMA_DEBUG_ALWAYS_OWN_MEMORY || - ((vmaMemReq.neverAllocate == false) && (vkMemReq.size > preferredBlockSize / 2)); + VkBufferMemoryBarrier bufMemBarrier2 = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER }; + bufMemBarrier2.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + bufMemBarrier2.dstAccessMask = VK_ACCESS_UNIFORM_READ_BIT; // We created a uniform buffer + bufMemBarrier2.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier2.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier2.buffer = buf; + bufMemBarrier2.offset = 0; + bufMemBarrier2.size = VK_WHOLE_SIZE; - if(ownMemory) - { - if(vmaMemReq.neverAllocate) - return VK_ERROR_OUT_OF_DEVICE_MEMORY; - else - return AllocateOwnMemory(vkMemReq.size, suballocType, memTypeIndex, pMemory); - } - else - { - VmaMutexLock lock(m_AllocationsMutex[memTypeIndex]); - VmaAllocationVector* const allocationVector = m_pAllocations[memTypeIndex]; - VMA_ASSERT(allocationVector); + // Make sure copying from staging buffer to the actual buffer has finished by inserting a buffer memory barrier. + vkCmdPipelineBarrier(cmdBuf, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + 0, 0, nullptr, 1, &bufMemBarrier2, 0, nullptr); +} +\endcode - // 1. Search existing allocations. - // Forward order - prefer blocks with smallest amount of free space. - for(size_t allocIndex = 0; allocIndex < allocationVector->m_Allocations.size(); ++allocIndex ) - { - VmaAllocation* const pAlloc = allocationVector->m_Allocations[allocIndex]; - VMA_ASSERT(pAlloc); - VmaAllocationRequest allocRequest = {}; - // Check if can allocate from pAlloc. - if(pAlloc->CreateAllocationRequest( - GetBufferImageGranularity(), - vkMemReq.size, - vkMemReq.alignment, - suballocType, - &allocRequest)) - { - // We no longer have an empty Allocation. - if(pAlloc->IsEmpty()) - m_HasEmptyAllocation[memTypeIndex] = false; - // Allocate from this pAlloc. - pAlloc->Alloc(allocRequest, suballocType, vkMemReq.size); - // Return VkDeviceMemory and offset (size already filled above). - pMemory->memory = pAlloc->m_hMemory; - pMemory->offset = allocRequest.offset; - VMA_HEAVY_ASSERT(pAlloc->Validate()); - VMA_DEBUG_LOG(" Returned from existing allocation #%u", (uint32_t)allocIndex); - return VK_SUCCESS; - } - } +\section usage_patterns_other_use_cases Other use cases - // 2. Create new Allocation. - if(vmaMemReq.neverAllocate) - { - VMA_DEBUG_LOG(" FAILED due to VmaMemoryRequirements::neverAllocate"); - return VK_ERROR_OUT_OF_DEVICE_MEMORY; - } - else - { - // Start with full preferredBlockSize. - VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO }; - allocInfo.memoryTypeIndex = memTypeIndex; - allocInfo.allocationSize = preferredBlockSize; - VkDeviceMemory mem = VK_NULL_HANDLE; - VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem); - if(res < 0) - { - // 3. Try half the size. - allocInfo.allocationSize /= 2; - if(allocInfo.allocationSize >= vkMemReq.size) - { - res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem); - if(res < 0) - { - // 4. Try quarter the size. - allocInfo.allocationSize /= 2; - if(allocInfo.allocationSize >= vkMemReq.size) - { - res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem); - } - } - } - } - if(res < 0) - { - // 5. Try OwnAlloc. - res = AllocateOwnMemory(vkMemReq.size, suballocType, memTypeIndex, pMemory); - if(res == VK_SUCCESS) - { - // Succeeded: AllocateOwnMemory function already filld pMemory, nothing more to do here. - VMA_DEBUG_LOG(" Allocated as OwnMemory"); - return VK_SUCCESS; - } - else - { - // Everything failed: Return error code. - VMA_DEBUG_LOG(" vkAllocateMemory FAILED"); - return res; - } - } +Here are some other, less obvious use cases and their recommended settings: - // New VkDeviceMemory successfully created. Create new Allocation for it. - VmaAllocation* const pAlloc = vma_new(this, VmaAllocation)(this); - pAlloc->Init(mem, allocInfo.allocationSize); +- An image that is used only as transfer source and destination, but it should stay on the device, + as it is used to temporarily store a copy of some texture, e.g. from the current to the next frame, + for temporal antialiasing or other temporal effects. + - Use `VkImageCreateInfo::usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT` + - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO +- An image that is used only as transfer source and destination, but it should be placed + in the system RAM despite it doesn't need to be mapped, because it serves as a "swap" copy to evict + least recently used textures from VRAM. + - Use `VkImageCreateInfo::usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT` + - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO_PREFER_HOST, + as VMA needs a hint here to differentiate from the previous case. +- A buffer that you want to map and write from the CPU, directly read from the GPU + (e.g. as a uniform or vertex buffer), but you have a clear preference to place it in device or + host memory due to its large size. + - Use `VkBufferCreateInfo::usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT` + - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE or #VMA_MEMORY_USAGE_AUTO_PREFER_HOST + - Use VmaAllocationCreateInfo::flags = #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT - allocationVector->m_Allocations.push_back(pAlloc); - // Allocate from pAlloc. Because it is empty, allocRequest can be trivially filled. - VmaAllocationRequest allocRequest = {}; - allocRequest.freeSuballocationItem = pAlloc->m_Suballocations.begin(); - allocRequest.offset = 0; - pAlloc->Alloc(allocRequest, suballocType, vkMemReq.size); - pMemory->memory = mem; - pMemory->offset = allocRequest.offset; - VMA_HEAVY_ASSERT(pAlloc->Validate()); - VMA_DEBUG_LOG(" Created new allocation Size=%llu", allocInfo.allocationSize); - return VK_SUCCESS; - } - } -} +\page configuration Configuration -VkResult VmaAllocator_T::AllocateOwnMemory( - VkDeviceSize size, - VmaSuballocationType suballocType, - uint32_t memTypeIndex, - VkMappedMemoryRange* pMemory) -{ - VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO }; - allocInfo.memoryTypeIndex = memTypeIndex; - allocInfo.allocationSize = size; +Please check "CONFIGURATION SECTION" in the code to find macros that you can define +before each include of this file or change directly in this file to provide +your own implementation of basic facilities like assert, `min()` and `max()` functions, +mutex, atomic etc. - // Allocate VkDeviceMemory. - VmaOwnAllocation ownAlloc = {}; - ownAlloc.m_Size = size; - ownAlloc.m_Type = suballocType; - VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &ownAlloc.m_hMemory); - if(res < 0) - { - VMA_DEBUG_LOG(" vkAllocateMemory FAILED"); - return res; - } +For example, define `VMA_ASSERT(expr)` before including the library to provide +custom implementation of the assertion, compatible with your project. +By default it is defined to standard C `assert(expr)` in `_DEBUG` configuration +and empty otherwise. - // Register it in m_pOwnAllocations. - VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex]); - OwnAllocationVectorType* ownAllocations = m_pOwnAllocations[memTypeIndex]; - VMA_ASSERT(ownAllocations); - VmaOwnAllocation* const pOwnAllocationsBeg = ownAllocations->data(); - VmaOwnAllocation* const pOwnAllocationsEnd = pOwnAllocationsBeg + ownAllocations->size(); - const size_t indexToInsert = VmaBinaryFindFirstNotLess( - pOwnAllocationsBeg, - pOwnAllocationsEnd, - ownAlloc, - VmaOwnAllocationMemoryHandleLess()) - pOwnAllocationsBeg; - VectorInsert(*ownAllocations, indexToInsert, ownAlloc); +Similarly, you can define `VMA_LEAK_LOG_FORMAT` macro to enable printing of leaked (unfreed) allocations, +including their names and other parameters. Example: - // Return parameters of the allocation. - pMemory->sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; - pMemory->pNext = VMA_NULL; - pMemory->memory = ownAlloc.m_hMemory; - pMemory->offset = 0; - pMemory->size = size; +\code +#define VMA_LEAK_LOG_FORMAT(format, ...) do { \ + printf((format), __VA_ARGS__); \ + printf("\n"); \ + } while(false) +\endcode - VMA_DEBUG_LOG(" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex); +\section config_Vulkan_functions Pointers to Vulkan functions - return VK_SUCCESS; -} +There are multiple ways to import pointers to Vulkan functions in the library. +In the simplest case you don't need to do anything. +If the compilation or linking of your program or the initialization of the #VmaAllocator +doesn't work for you, you can try to reconfigure it. -VkResult VmaAllocator_T::AllocateMemory( - const VkMemoryRequirements& vkMemReq, - const VmaMemoryRequirements& vmaMemReq, - VmaSuballocationType suballocType, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex) -{ - if(vmaMemReq.ownMemory && vmaMemReq.neverAllocate) - { - VMA_ASSERT(0 && "Specifying VmaMemoryRequirements::ownMemory && VmaMemoryRequirements::neverAllocate makes no sense."); - return VK_ERROR_OUT_OF_DEVICE_MEMORY; - } +First, the allocator tries to fetch pointers to Vulkan functions linked statically, +like this: - // Bit mask of memory Vulkan types acceptable for this allocation. - uint32_t memoryTypeBits = vkMemReq.memoryTypeBits; - uint32_t memTypeIndex = UINT32_MAX; - VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &vmaMemReq, &memTypeIndex); - if(res == VK_SUCCESS) - { - res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pMemory); - // Succeeded on first try. - if(res == VK_SUCCESS) - { - if(pMemoryTypeIndex != VMA_NULL) - *pMemoryTypeIndex = memTypeIndex; - return res; - } - // Allocation from this memory type failed. Try other compatible memory types. - else - { - for(;;) - { - // Remove old memTypeIndex from list of possibilities. - memoryTypeBits &= ~(1u << memTypeIndex); - // Find alternative memTypeIndex. - res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &vmaMemReq, &memTypeIndex); - if(res == VK_SUCCESS) - { - res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pMemory); - // Allocation from this alternative memory type succeeded. - if(res == VK_SUCCESS) - { - if(pMemoryTypeIndex != VMA_NULL) - *pMemoryTypeIndex = memTypeIndex; - return res; - } - // else: Allocation from this memory type failed. Try next one - next loop iteration. - } - // No other matching memory type index could be found. - else - // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once. - return VK_ERROR_OUT_OF_DEVICE_MEMORY; - } - } - } - // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT. - else - return res; -} +\code +m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory; +\endcode -void VmaAllocator_T::FreeMemory(const VkMappedMemoryRange* pMemory) -{ - uint32_t memTypeIndex = 0; - bool found = false; - VmaAllocation* allocationToDelete = VMA_NULL; - // Check all memory types because we don't know which one does pMemory come from. - for(; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - VmaMutexLock lock(m_AllocationsMutex[memTypeIndex]); - VmaAllocationVector* const pAllocationVector = m_pAllocations[memTypeIndex]; - VMA_ASSERT(pAllocationVector); - // Try to free pMemory from pAllocationVector. - const size_t allocIndex = pAllocationVector->Free(pMemory); - if(allocIndex != (size_t)-1) - { - VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", memTypeIndex); - found = true; - VmaAllocation* const pAlloc = pAllocationVector->m_Allocations[allocIndex]; - VMA_ASSERT(pAlloc); - // pAlloc became empty after this deallocation. - if(pAlloc->IsEmpty()) - { - // Already has empty Allocation. We don't want to have two, so delete this one. - if(m_HasEmptyAllocation[memTypeIndex]) - { - allocationToDelete = pAlloc; - VectorRemove(pAllocationVector->m_Allocations, allocIndex); - break; - } - // We now have first empty Allocation. - else - m_HasEmptyAllocation[memTypeIndex] = true; - } - // Must be called after allocIndex is used, because later it may become invalid! - pAllocationVector->IncrementallySortAllocations(); - break; - } - } - if(found) - { - // Destruction of a free Allocation. Deferred until this point, outside of mutex - // lock, for performance reason. - if(allocationToDelete != VMA_NULL) - { - VMA_DEBUG_LOG(" Deleted empty allocation"); - allocationToDelete->Destroy(this); - vma_delete(this, allocationToDelete); - } - return; - } +If you want to disable this feature, set configuration macro: `#define VMA_STATIC_VULKAN_FUNCTIONS 0`. - // pMemory not found in allocations. Try free it as Own Memory. - if(FreeOwnMemory(pMemory)) - return; +Second, you can provide the pointers yourself by setting member VmaAllocatorCreateInfo::pVulkanFunctions. +You can fetch them e.g. using functions `vkGetInstanceProcAddr` and `vkGetDeviceProcAddr` or +by using a helper library like [volk](https://github.com/zeux/volk). - // pMemory not found as Own Memory either. - VMA_ASSERT(0 && "Not found. Trying to free memory not allocated using this allocator (or some other bug)."); -} +Third, VMA tries to fetch remaining pointers that are still null by calling +`vkGetInstanceProcAddr` and `vkGetDeviceProcAddr` on its own. +You need to only fill in VmaVulkanFunctions::vkGetInstanceProcAddr and VmaVulkanFunctions::vkGetDeviceProcAddr. +Other pointers will be fetched automatically. +If you want to disable this feature, set configuration macro: `#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0`. -void VmaAllocator_T::CalculateStats(VmaStats* pStats) -{ - InitStatInfo(pStats->total); - for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) - InitStatInfo(pStats->memoryType[i]); - for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) - InitStatInfo(pStats->memoryHeap[i]); - - for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - VmaMutexLock allocationsLock(m_AllocationsMutex[memTypeIndex]); - const uint32_t heapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex; - const VmaAllocationVector* const allocVector = m_pAllocations[memTypeIndex]; - VMA_ASSERT(allocVector); - allocVector->AddStats(pStats, memTypeIndex, heapIndex); - } +Finally, all the function pointers required by the library (considering selected +Vulkan version and enabled extensions) are checked with `VMA_ASSERT` if they are not null. - VmaPostprocessCalcStatInfo(pStats->total); - for(size_t i = 0; i < GetMemoryTypeCount(); ++i) - VmaPostprocessCalcStatInfo(pStats->memoryType[i]); - for(size_t i = 0; i < GetMemoryHeapCount(); ++i) - VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]); -} -bool VmaAllocator_T::FreeOwnMemory(const VkMappedMemoryRange* pMemory) -{ - VkDeviceMemory vkMemory = VK_NULL_HANDLE; +\section custom_memory_allocator Custom host memory allocator - // Check all memory types because we don't know which one does pMemory come from. - for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex]); - OwnAllocationVectorType* const pOwnAllocations = m_pOwnAllocations[memTypeIndex]; - VMA_ASSERT(pOwnAllocations); - VmaOwnAllocation* const pOwnAllocationsBeg = pOwnAllocations->data(); - VmaOwnAllocation* const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size(); - VmaOwnAllocation* const pOwnAllocationIt = VmaBinaryFindFirstNotLess( - pOwnAllocationsBeg, - pOwnAllocationsEnd, - pMemory->memory, - VmaOwnAllocationMemoryHandleLess()); - if((pOwnAllocationIt != pOwnAllocationsEnd) && - (pOwnAllocationIt->m_hMemory == pMemory->memory)) - { - VMA_ASSERT(pMemory->size == pOwnAllocationIt->m_Size && pMemory->offset == 0); - vkMemory = pOwnAllocationIt->m_hMemory; - const size_t ownAllocationIndex = pOwnAllocationIt - pOwnAllocationsBeg; - VectorRemove(*pOwnAllocations, ownAllocationIndex); - VMA_DEBUG_LOG(" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex); - break; - } - } +If you use custom allocator for CPU memory rather than default operator `new` +and `delete` from C++, you can make this library using your allocator as well +by filling optional member VmaAllocatorCreateInfo::pAllocationCallbacks. These +functions will be passed to Vulkan, as well as used by the library itself to +make any CPU-side allocations. - // Found. Free VkDeviceMemory deferred until this point, outside of mutex lock, - // for performance reason. - if(vkMemory != VK_NULL_HANDLE) - { - vkFreeMemory(m_hDevice, vkMemory, GetAllocationCallbacks()); - return true; - } - else - return false; -} +\section allocation_callbacks Device memory allocation callbacks -#if VMA_STATS_STRING_ENABLED +The library makes calls to `vkAllocateMemory()` and `vkFreeMemory()` internally. +You can setup callbacks to be informed about these calls, e.g. for the purpose +of gathering some statistics. To do it, fill optional member +VmaAllocatorCreateInfo::pDeviceMemoryCallbacks. -void VmaAllocator_T::PrintDetailedMap(VmaStringBuilder& sb) -{ - bool ownAllocationsStarted = false; - for(size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex]); - OwnAllocationVectorType* const pOwnAllocVector = m_pOwnAllocations[memTypeIndex]; - VMA_ASSERT(pOwnAllocVector); - if(pOwnAllocVector->empty() == false) - { - if(ownAllocationsStarted) - sb.Add(",\n\t\"Type "); - else - { - sb.Add(",\n\"OwnAllocations\": {\n\t\"Type "); - ownAllocationsStarted = true; - } - sb.AddNumber(memTypeIndex); - sb.Add("\": ["); +\section heap_memory_limit Device heap memory limit - for(size_t i = 0; i < pOwnAllocVector->size(); ++i) - { - const VmaOwnAllocation& ownAlloc = (*pOwnAllocVector)[i]; - if(i > 0) - sb.Add(",\n\t\t{ \"Size\": "); - else - sb.Add("\n\t\t{ \"Size\": "); - sb.AddNumber(ownAlloc.m_Size); - sb.Add(", \"Type\": "); - sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[ownAlloc.m_Type]); - sb.Add(" }"); - } +When device memory of certain heap runs out of free space, new allocations may +fail (returning error code) or they may succeed, silently pushing some existing_ +memory blocks from GPU VRAM to system RAM (which degrades performance). This +behavior is implementation-dependent - it depends on GPU vendor and graphics +driver. - sb.Add("\n\t]"); - } - } - if(ownAllocationsStarted) - sb.Add("\n}"); +On AMD cards it can be controlled while creating Vulkan device object by using +VK_AMD_memory_overallocation_behavior extension, if available. - { - bool allocationsStarted = false; - for(size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - VmaMutexLock globalAllocationsLock(m_AllocationsMutex[memTypeIndex]); - if(m_pAllocations[memTypeIndex]->IsEmpty() == false) - { - if(allocationsStarted) - sb.Add(",\n\t\"Type "); - else - { - sb.Add(",\n\"Allocations\": {\n\t\"Type "); - allocationsStarted = true; - } - sb.AddNumber(memTypeIndex); - sb.Add("\": ["); +Alternatively, if you want to test how your program behaves with limited amount of Vulkan device +memory available without switching your graphics card to one that really has +smaller VRAM, you can use a feature of this library intended for this purpose. +To do it, fill optional member VmaAllocatorCreateInfo::pHeapSizeLimit. + + + +\page vk_khr_dedicated_allocation VK_KHR_dedicated_allocation - m_pAllocations[memTypeIndex]->PrintDetailedMap(sb); +VK_KHR_dedicated_allocation is a Vulkan extension which can be used to improve +performance on some GPUs. It augments Vulkan API with possibility to query +driver whether it prefers particular buffer or image to have its own, dedicated +allocation (separate `VkDeviceMemory` block) for better efficiency - to be able +to do some internal optimizations. The extension is supported by this library. +It will be used automatically when enabled. - sb.Add("\n\t]"); - } - } - if(allocationsStarted) - sb.Add("\n}"); - } -} +It has been promoted to core Vulkan 1.1, so if you use eligible Vulkan version +and inform VMA about it by setting VmaAllocatorCreateInfo::vulkanApiVersion, +you are all set. -#endif // #if VMA_STATS_STRING_ENABLED +Otherwise, if you want to use it as an extension: -static VkResult AllocateMemoryForImage( - VmaAllocator allocator, - VkImage image, - const VmaMemoryRequirements* pMemoryRequirements, - VmaSuballocationType suballocType, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex) -{ - VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements && pMemory); - - VkMemoryRequirements vkMemReq = {}; - vkGetImageMemoryRequirements(allocator->m_hDevice, image, &vkMemReq); +1 . When creating Vulkan device, check if following 2 device extensions are +supported (call `vkEnumerateDeviceExtensionProperties()`). +If yes, enable them (fill `VkDeviceCreateInfo::ppEnabledExtensionNames`). - return allocator->AllocateMemory( - vkMemReq, - *pMemoryRequirements, - suballocType, - pMemory, - pMemoryTypeIndex); -} +- VK_KHR_get_memory_requirements2 +- VK_KHR_dedicated_allocation -//////////////////////////////////////////////////////////////////////////////// -// Public interface +If you enabled these extensions: -VkResult vmaCreateAllocator( - const VmaAllocatorCreateInfo* pCreateInfo, - VmaAllocator* pAllocator) -{ - VMA_ASSERT(pCreateInfo && pAllocator); - VMA_DEBUG_LOG("vmaCreateAllocator"); - *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo); - return VK_SUCCESS; -} +2 . Use #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag when creating +your #VmaAllocator to inform the library that you enabled required extensions +and you want the library to use them. -void vmaDestroyAllocator( - VmaAllocator allocator) -{ - if(allocator != VK_NULL_HANDLE) - { - VMA_DEBUG_LOG("vmaDestroyAllocator"); - VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks; - vma_delete(&allocationCallbacks, allocator); - } -} +\code +allocatorInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT; -void vmaGetPhysicalDeviceProperties( - VmaAllocator allocator, - const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties) -{ - VMA_ASSERT(allocator && ppPhysicalDeviceProperties); - *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties; -} +vmaCreateAllocator(&allocatorInfo, &allocator); +\endcode -void vmaGetMemoryProperties( - VmaAllocator allocator, - const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties) -{ - VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties); - *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps; -} +That is all. The extension will be automatically used whenever you create a +buffer using vmaCreateBuffer() or image using vmaCreateImage(). -void vmaGetMemoryTypeProperties( - VmaAllocator allocator, - uint32_t memoryTypeIndex, - VkMemoryPropertyFlags* pFlags) -{ - VMA_ASSERT(allocator && pFlags); - VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount()); - *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags; -} +When using the extension together with Vulkan Validation Layer, you will receive +warnings like this: -void vmaCalculateStats( - VmaAllocator allocator, - VmaStats* pStats) -{ - VMA_ASSERT(allocator && pStats); - VMA_DEBUG_GLOBAL_MUTEX_LOCK - allocator->CalculateStats(pStats); -} +_vkBindBufferMemory(): Binding memory to buffer 0x33 but vkGetBufferMemoryRequirements() has not been called on that buffer._ -#if VMA_STATS_STRING_ENABLED +It is OK, you should just ignore it. It happens because you use function +`vkGetBufferMemoryRequirements2KHR()` instead of standard +`vkGetBufferMemoryRequirements()`, while the validation layer seems to be +unaware of it. -void vmaBuildStatsString( - VmaAllocator allocator, - char** ppStatsString, - VkBool32 detailedMap) -{ - VMA_ASSERT(allocator && ppStatsString); - VMA_DEBUG_GLOBAL_MUTEX_LOCK +To learn more about this extension, see: - VmaStringBuilder sb(allocator); - { - VmaStats stats; - allocator->CalculateStats(&stats); +- [VK_KHR_dedicated_allocation in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/chap50.html#VK_KHR_dedicated_allocation) +- [VK_KHR_dedicated_allocation unofficial manual](http://asawicki.info/articles/VK_KHR_dedicated_allocation.php5) - sb.Add("{\n\"Total\": "); - VmaPrintStatInfo(sb, stats.total); - - for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex) - { - sb.Add(",\n\"Heap "); - sb.AddNumber(heapIndex); - sb.Add("\": {\n\t\"Size\": "); - sb.AddNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size); - sb.Add(",\n\t\"Flags\": "); - if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0) - sb.AddString("DEVICE_LOCAL"); - else - sb.AddString(""); - if(stats.memoryHeap[heapIndex].AllocationCount > 0) - { - sb.Add(",\n\t\"Stats:\": "); - VmaPrintStatInfo(sb, stats.memoryHeap[heapIndex]); - } - for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex) - { - if(allocator->m_MemProps.memoryTypes[typeIndex].heapIndex == heapIndex) - { - sb.Add(",\n\t\"Type "); - sb.AddNumber(typeIndex); - sb.Add("\": {\n\t\t\"Flags\": \""); - VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags; - if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0) - sb.Add(" DEVICE_LOCAL"); - if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) - sb.Add(" HOST_VISIBLE"); - if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0) - sb.Add(" HOST_COHERENT"); - if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0) - sb.Add(" HOST_CACHED"); - if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0) - sb.Add(" LAZILY_ALLOCATED"); - sb.Add("\""); - if(stats.memoryType[typeIndex].AllocationCount > 0) - { - sb.Add(",\n\t\t\"Stats\": "); - VmaPrintStatInfo(sb, stats.memoryType[typeIndex]); - } - sb.Add("\n\t}"); - } - } - sb.Add("\n}"); - } - if(detailedMap == VK_TRUE) - allocator->PrintDetailedMap(sb); - sb.Add("\n}\n"); - } - const size_t len = sb.GetLength(); - char* const pChars = vma_new_array(allocator, char, len + 1); - if(len > 0) - memcpy(pChars, sb.GetData(), len); - pChars[len] = '\0'; - *ppStatsString = pChars; -} +\page vk_ext_memory_priority VK_EXT_memory_priority -void vmaFreeStatsString( - VmaAllocator allocator, - char* pStatsString) -{ - if(pStatsString != VMA_NULL) - { - VMA_ASSERT(allocator); - size_t len = strlen(pStatsString); - vma_delete_array(allocator, pStatsString, len + 1); - } -} +VK_EXT_memory_priority is a device extension that allows to pass additional "priority" +value to Vulkan memory allocations that the implementation may use prefer certain +buffers and images that are critical for performance to stay in device-local memory +in cases when the memory is over-subscribed, while some others may be moved to the system memory. -#endif // #if VMA_STATS_STRING_ENABLED +VMA offers convenient usage of this extension. +If you enable it, you can pass "priority" parameter when creating allocations or custom pools +and the library automatically passes the value to Vulkan using this extension. -/** This function is not protected by any mutex because it just reads immutable data. -*/ -VkResult vmaFindMemoryTypeIndex( - VmaAllocator allocator, - uint32_t memoryTypeBits, - const VmaMemoryRequirements* pMemoryRequirements, - uint32_t* pMemoryTypeIndex) -{ - VMA_ASSERT(allocator != VK_NULL_HANDLE); - VMA_ASSERT(pMemoryRequirements != VMA_NULL); - VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); - - uint32_t requiredFlags = pMemoryRequirements->requiredFlags; - uint32_t preferredFlags = pMemoryRequirements->preferredFlags; - if(preferredFlags == 0) - preferredFlags = requiredFlags; - // preferredFlags, if not 0, must be subset of requiredFlags. - VMA_ASSERT((requiredFlags & ~preferredFlags) == 0); +If you want to use this extension in connection with VMA, follow these steps: - // Convert usage to requiredFlags and preferredFlags. - switch(pMemoryRequirements->usage) - { - case VMA_MEMORY_USAGE_UNKNOWN: - break; - case VMA_MEMORY_USAGE_GPU_ONLY: - preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - break; - case VMA_MEMORY_USAGE_CPU_ONLY: - requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; - break; - case VMA_MEMORY_USAGE_CPU_TO_GPU: - requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; - preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - break; - case VMA_MEMORY_USAGE_GPU_TO_CPU: - requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; - preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT; - break; - default: - break; - } +\section vk_ext_memory_priority_initialization Initialization - *pMemoryTypeIndex = UINT32_MAX; - uint32_t minCost = UINT32_MAX; - for(uint32_t memTypeIndex = 0, memTypeBit = 1; - memTypeIndex < allocator->GetMemoryTypeCount(); - ++memTypeIndex, memTypeBit <<= 1) - { - // This memory type is acceptable according to memoryTypeBits bitmask. - if((memTypeBit & memoryTypeBits) != 0) - { - const VkMemoryPropertyFlags currFlags = - allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags; - // This memory type contains requiredFlags. - if((requiredFlags & ~currFlags) == 0) - { - // Calculate cost as number of bits from preferredFlags not present in this memory type. - uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags); - // Remember memory type with lowest cost. - if(currCost < minCost) - { - *pMemoryTypeIndex = memTypeIndex; - if(currCost == 0) - return VK_SUCCESS; - minCost = currCost; - } - } - } - } - return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT; -} +1) Call `vkEnumerateDeviceExtensionProperties` for the physical device. +Check if the extension is supported - if returned array of `VkExtensionProperties` contains "VK_EXT_memory_priority". -VkResult vmaAllocateMemory( - VmaAllocator allocator, - const VkMemoryRequirements* pVkMemoryRequirements, - const VmaMemoryRequirements* pVmaMemoryRequirements, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex) -{ - VMA_ASSERT(allocator && pVkMemoryRequirements && pVmaMemoryRequirements && pMemory); +2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. +Attach additional structure `VkPhysicalDeviceMemoryPriorityFeaturesEXT` to `VkPhysicalDeviceFeatures2::pNext` to be returned. +Check if the device feature is really supported - check if `VkPhysicalDeviceMemoryPriorityFeaturesEXT::memoryPriority` is true. - VMA_DEBUG_LOG("vmaAllocateMemory"); +3) While creating device with `vkCreateDevice`, enable this extension - add "VK_EXT_memory_priority" +to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. - VMA_DEBUG_GLOBAL_MUTEX_LOCK +4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. +Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. +Enable this device feature - attach additional structure `VkPhysicalDeviceMemoryPriorityFeaturesEXT` to +`VkPhysicalDeviceFeatures2::pNext` chain and set its member `memoryPriority` to `VK_TRUE`. - return allocator->AllocateMemory( - *pVkMemoryRequirements, - *pVmaMemoryRequirements, - VMA_SUBALLOCATION_TYPE_UNKNOWN, - pMemory, - pMemoryTypeIndex); -} +5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you +have enabled this extension and feature - add #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT +to VmaAllocatorCreateInfo::flags. -VkResult vmaAllocateMemoryForBuffer( - VmaAllocator allocator, - VkBuffer buffer, - const VmaMemoryRequirements* pMemoryRequirements, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex) -{ - VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pMemoryRequirements && pMemory); +\section vk_ext_memory_priority_usage Usage - VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer"); +When using this extension, you should initialize following member: - VMA_DEBUG_GLOBAL_MUTEX_LOCK +- VmaAllocationCreateInfo::priority when creating a dedicated allocation with #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +- VmaPoolCreateInfo::priority when creating a custom pool. - VkMemoryRequirements vkMemReq = {}; - vkGetBufferMemoryRequirements(allocator->m_hDevice, buffer, &vkMemReq); +It should be a floating-point value between `0.0f` and `1.0f`, where recommended default is `0.5F`. +Memory allocated with higher value can be treated by the Vulkan implementation as higher priority +and so it can have lower chances of being pushed out to system memory, experiencing degraded performance. - return allocator->AllocateMemory( - vkMemReq, - *pMemoryRequirements, - VMA_SUBALLOCATION_TYPE_BUFFER, - pMemory, - pMemoryTypeIndex); -} +It might be a good idea to create performance-critical resources like color-attachment or depth-stencil images +as dedicated and set high priority to them. For example: -VkResult vmaAllocateMemoryForImage( - VmaAllocator allocator, - VkImage image, - const VmaMemoryRequirements* pMemoryRequirements, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex) -{ - VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements); +\code +VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +imgCreateInfo.imageType = VK_IMAGE_TYPE_2D; +imgCreateInfo.extent.width = 3840; +imgCreateInfo.extent.height = 2160; +imgCreateInfo.extent.depth = 1; +imgCreateInfo.mipLevels = 1; +imgCreateInfo.arrayLayers = 1; +imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; +imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; +imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; - VMA_DEBUG_LOG("vmaAllocateMemoryForImage"); +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; +allocCreateInfo.priority = 1.0f; - VMA_DEBUG_GLOBAL_MUTEX_LOCK +VkImage img; +VmaAllocation alloc; +vmaCreateImage(allocator, &imgCreateInfo, &allocCreateInfo, &img, &alloc, nullptr); +\endcode - return AllocateMemoryForImage( - allocator, - image, - pMemoryRequirements, - VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN, - pMemory, - pMemoryTypeIndex); -} +`priority` member is ignored in the following situations: -void vmaFreeMemory( - VmaAllocator allocator, - const VkMappedMemoryRange* pMemory) -{ - VMA_ASSERT(allocator && pMemory); +- Allocations created in custom pools: They inherit the priority, along with all other allocation parameters + from the parameters passed in #VmaPoolCreateInfo when the pool was created. +- Allocations created in default pools: They inherit the priority from the parameters + VMA used when creating default pools, which means `priority == 0.5F`. - VMA_DEBUG_LOG("vmaFreeMemory"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK +\page vk_amd_device_coherent_memory VK_AMD_device_coherent_memory - allocator->FreeMemory(pMemory); -} +VK_AMD_device_coherent_memory is a device extension that enables access to +additional memory types with `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` and +`VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` flag. It is useful mostly for +allocation of buffers intended for writing "breadcrumb markers" in between passes +or draw calls, which in turn are useful for debugging GPU crash/hang/TDR cases. -VkResult vmaMapMemory( - VmaAllocator allocator, - const VkMappedMemoryRange* pMemory, - void** ppData) -{ - VMA_ASSERT(allocator && pMemory && ppData); +When the extension is available but has not been enabled, Vulkan physical device +still exposes those memory types, but their usage is forbidden. VMA automatically +takes care of that - it returns `VK_ERROR_FEATURE_NOT_PRESENT` when an attempt +to allocate memory of such type is made. - VMA_DEBUG_GLOBAL_MUTEX_LOCK +If you want to use this extension in connection with VMA, follow these steps: - return vkMapMemory(allocator->m_hDevice, pMemory->memory, - pMemory->offset, pMemory->size, 0, ppData); -} +\section vk_amd_device_coherent_memory_initialization Initialization -void vmaUnmapMemory( - VmaAllocator allocator, - const VkMappedMemoryRange* pMemory) -{ - VMA_ASSERT(allocator && pMemory); +1) Call `vkEnumerateDeviceExtensionProperties` for the physical device. +Check if the extension is supported - if returned array of `VkExtensionProperties` contains "VK_AMD_device_coherent_memory". - VMA_DEBUG_GLOBAL_MUTEX_LOCK +2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. +Attach additional structure `VkPhysicalDeviceCoherentMemoryFeaturesAMD` to `VkPhysicalDeviceFeatures2::pNext` to be returned. +Check if the device feature is really supported - check if `VkPhysicalDeviceCoherentMemoryFeaturesAMD::deviceCoherentMemory` is true. - vkUnmapMemory(allocator->m_hDevice, pMemory->memory); -} +3) While creating device with `vkCreateDevice`, enable this extension - add "VK_AMD_device_coherent_memory" +to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. -VkResult vmaCreateBuffer( - VmaAllocator allocator, - const VkBufferCreateInfo* pCreateInfo, - const VmaMemoryRequirements* pMemoryRequirements, - VkBuffer* pBuffer, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex) -{ - VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements); - - VMA_DEBUG_LOG("vmaCreateBuffer"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK +4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. +Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. +Enable this device feature - attach additional structure `VkPhysicalDeviceCoherentMemoryFeaturesAMD` to +`VkPhysicalDeviceFeatures2::pNext` and set its member `deviceCoherentMemory` to `VK_TRUE`. - // 1. Create VkBuffer. - VkResult res = vkCreateBuffer(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pBuffer); - if(res >= 0) - { - VkMappedMemoryRange mem = {}; +5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you +have enabled this extension and feature - add #VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT +to VmaAllocatorCreateInfo::flags. - // 2. vkGetBufferMemoryRequirements. - VkMemoryRequirements vkMemReq = {}; - vkGetBufferMemoryRequirements(allocator->m_hDevice, *pBuffer, &vkMemReq); +\section vk_amd_device_coherent_memory_usage Usage - // 3. Allocate memory using allocator. - res = allocator->AllocateMemory( - vkMemReq, - *pMemoryRequirements, - VMA_SUBALLOCATION_TYPE_BUFFER, - &mem, - pMemoryTypeIndex); - if(res >= 0) - { - if(pMemory != VMA_NULL) - { - *pMemory = mem; - } - // 3. Bind buffer with memory. - res = vkBindBufferMemory(allocator->m_hDevice, *pBuffer, mem.memory, mem.offset); - if(res >= 0) - { - // All steps succeeded. - VmaMutexLock lock(allocator->m_BufferToMemoryMapMutex); - allocator->m_BufferToMemoryMap.insert(VmaPair(*pBuffer, mem)); - return VK_SUCCESS; - } - allocator->FreeMemory(&mem); - return res; - } - vkDestroyBuffer(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); - return res; - } - return res; -} +After following steps described above, you can create VMA allocations and custom pools +out of the special `DEVICE_COHERENT` and `DEVICE_UNCACHED` memory types on eligible +devices. There are multiple ways to do it, for example: -void vmaDestroyBuffer( - VmaAllocator allocator, - VkBuffer buffer) -{ - if(buffer != VK_NULL_HANDLE) - { - VMA_ASSERT(allocator); +- You can request or prefer to allocate out of such memory types by adding + `VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` to VmaAllocationCreateInfo::requiredFlags + or VmaAllocationCreateInfo::preferredFlags. Those flags can be freely mixed with + other ways of \ref choosing_memory_type, like setting VmaAllocationCreateInfo::usage. +- If you manually found memory type index to use for this purpose, force allocation + from this specific index by setting VmaAllocationCreateInfo::memoryTypeBits `= 1U << index`. - VMA_DEBUG_LOG("vmaDestroyBuffer"); +\section vk_amd_device_coherent_memory_more_information More information - VMA_DEBUG_GLOBAL_MUTEX_LOCK +To learn more about this extension, see [VK_AMD_device_coherent_memory in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VK_AMD_device_coherent_memory.html) - VkMappedMemoryRange mem = {}; - { - VmaMutexLock lock(allocator->m_BufferToMemoryMapMutex); - VMA_MAP_TYPE(VkBuffer, VkMappedMemoryRange)::iterator it = allocator->m_BufferToMemoryMap.find(buffer); - if(it == allocator->m_BufferToMemoryMap.end()) - { - VMA_ASSERT(0 && "Trying to destroy buffer that was not created using vmaCreateBuffer or already freed."); - return; - } - mem = it->second; - allocator->m_BufferToMemoryMap.erase(it); - } +Example use of this extension can be found in the code of the sample and test suite +accompanying this library. - vkDestroyBuffer(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks()); - - allocator->FreeMemory(&mem); - } -} -VkResult vmaCreateImage( - VmaAllocator allocator, - const VkImageCreateInfo* pCreateInfo, - const VmaMemoryRequirements* pMemoryRequirements, - VkImage* pImage, - VkMappedMemoryRange* pMemory, - uint32_t* pMemoryTypeIndex) -{ - VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements); +\page enabling_buffer_device_address Enabling buffer device address - VMA_DEBUG_LOG("vmaCreateImage"); +Device extension VK_KHR_buffer_device_address +allow to fetch raw GPU pointer to a buffer and pass it for usage in a shader code. +It has been promoted to core Vulkan 1.2. - VMA_DEBUG_GLOBAL_MUTEX_LOCK +If you want to use this feature in connection with VMA, follow these steps: - // 1. Create VkImage. - VkResult res = vkCreateImage(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pImage); - if(res >= 0) - { - VkMappedMemoryRange mem = {}; - VmaSuballocationType suballocType = pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ? - VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL : - VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR; - - // 2. Allocate memory using allocator. - res = AllocateMemoryForImage(allocator, *pImage, pMemoryRequirements, suballocType, &mem, pMemoryTypeIndex); - if(res >= 0) - { - if(pMemory != VMA_NULL) - *pMemory = mem; - // 3. Bind image with memory. - res = vkBindImageMemory(allocator->m_hDevice, *pImage, mem.memory, mem.offset); - if(res >= 0) - { - // All steps succeeded. - VmaMutexLock lock(allocator->m_ImageToMemoryMapMutex); - allocator->m_ImageToMemoryMap.insert(VmaPair(*pImage, mem)); - return VK_SUCCESS; - } - allocator->FreeMemory(&mem); - return res; - } - vkDestroyImage(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); - return res; - } - return res; -} +\section enabling_buffer_device_address_initialization Initialization -void vmaDestroyImage( - VmaAllocator allocator, - VkImage image) -{ - if(image != VK_NULL_HANDLE) - { - VMA_ASSERT(allocator); +1) (For Vulkan version < 1.2) Call `vkEnumerateDeviceExtensionProperties` for the physical device. +Check if the extension is supported - if returned array of `VkExtensionProperties` contains +"VK_KHR_buffer_device_address". - VMA_DEBUG_LOG("vmaDestroyImage"); +2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. +Attach additional structure `VkPhysicalDeviceBufferDeviceAddressFeatures*` to `VkPhysicalDeviceFeatures2::pNext` to be returned. +Check if the device feature is really supported - check if `VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress` is true. - VMA_DEBUG_GLOBAL_MUTEX_LOCK +3) (For Vulkan version < 1.2) While creating device with `vkCreateDevice`, enable this extension - add +"VK_KHR_buffer_device_address" to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. - VkMappedMemoryRange mem = {}; - { - VmaMutexLock lock(allocator->m_ImageToMemoryMapMutex); - VMA_MAP_TYPE(VkImage, VkMappedMemoryRange)::iterator it = allocator->m_ImageToMemoryMap.find(image); - if(it == allocator->m_ImageToMemoryMap.end()) - { - VMA_ASSERT(0 && "Trying to destroy buffer that was not created using vmaCreateBuffer or already freed."); - return; - } - mem = it->second; - allocator->m_ImageToMemoryMap.erase(it); - } +4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. +Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. +Enable this device feature - attach additional structure `VkPhysicalDeviceBufferDeviceAddressFeatures*` to +`VkPhysicalDeviceFeatures2::pNext` and set its member `bufferDeviceAddress` to `VK_TRUE`. - vkDestroyImage(allocator->m_hDevice, image, allocator->GetAllocationCallbacks()); +5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you +have enabled this feature - add #VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT +to VmaAllocatorCreateInfo::flags. - allocator->FreeMemory(&mem); - } -} +\section enabling_buffer_device_address_usage Usage -#endif // #ifdef VMA_IMPLEMENTATION +After following steps described above, you can create buffers with `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT*` using VMA. +The library automatically adds `VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT*` to +allocated memory blocks wherever it might be needed. -#endif // AMD_VULKAN_MEMORY_ALLOCATOR_H +Please note that the library supports only `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT*`. +The second part of this functionality related to "capture and replay" is not supported, +as it is intended for usage in debugging tools like RenderDoc, not in everyday Vulkan usage. + +\section enabling_buffer_device_address_more_information More information + +To learn more about this extension, see [VK_KHR_buffer_device_address in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/chap46.html#VK_KHR_buffer_device_address) + +Example use of this extension can be found in the code of the sample and test suite +accompanying this library. + +\page general_considerations General considerations + +\section general_considerations_thread_safety Thread safety + +- The library has no global state, so separate #VmaAllocator objects can be used + independently. + There should be no need to create multiple such objects though - one per `VkDevice` is enough. +- By default, all calls to functions that take #VmaAllocator as first parameter + are safe to call from multiple threads simultaneously because they are + synchronized internally when needed. + This includes allocation and deallocation from default memory pool, as well as custom #VmaPool. +- When the allocator is created with #VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT + flag, calls to functions that take such #VmaAllocator object must be + synchronized externally. +- Access to a #VmaAllocation object must be externally synchronized. For example, + you must not call vmaGetAllocationInfo() and vmaMapMemory() from different + threads at the same time if you pass the same #VmaAllocation object to these + functions. +- #VmaVirtualBlock is not safe to be used from multiple threads simultaneously. + +\section general_considerations_versioning_and_compatibility Versioning and compatibility + +The library uses [**Semantic Versioning**](https://semver.org/), +which means version numbers follow convention: Major.Minor.Patch (e.g. 2.3.0), where: + +- Incremented Patch version means a release is backward- and forward-compatible, + introducing only some internal improvements, bug fixes, optimizations etc. + or changes that are out of scope of the official API described in this documentation. +- Incremented Minor version means a release is backward-compatible, + so existing code that uses the library should continue to work, while some new + symbols could have been added: new structures, functions, new values in existing + enums and bit flags, new structure members, but not new function parameters. +- Incrementing Major version means a release could break some backward compatibility. + +All changes between official releases are documented in file "CHANGELOG.md". + +\warning Backward compatibility is considered on the level of C++ source code, not binary linkage. +Adding new members to existing structures is treated as backward compatible if initializing +the new members to binary zero results in the old behavior. +You should always fully initialize all library structures to zeros and not rely on their +exact binary size. + +\section general_considerations_validation_layer_warnings Validation layer warnings + +When using this library, you can meet following types of warnings issued by +Vulkan validation layer. They don't necessarily indicate a bug, so you may need +to just ignore them. + +- *Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used.* + - It happens when you map a buffer or image, because the library maps entire + `VkDeviceMemory` block, where different types of images and buffers may end + up together, especially on GPUs with unified memory like Intel. +- *Non-linear image 0xebc91 is aliased with linear buffer 0xeb8e4 which may indicate a bug.* + - It may happen when you use [defragmentation](@ref defragmentation). + +\section general_considerations_allocation_algorithm Allocation algorithm + +The library uses following algorithm for allocation, in order: + +-# Try to find free range of memory in existing blocks. +-# If failed, try to create a new block of `VkDeviceMemory`, with preferred block size. +-# If failed, try to create such block with size / 2, size / 4, size / 8. +-# If failed, try to allocate separate `VkDeviceMemory` for this allocation, + just like when you use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +-# If failed, choose other memory type that meets the requirements specified in + VmaAllocationCreateInfo and go to point 1. +-# If failed, return `VK_ERROR_OUT_OF_DEVICE_MEMORY`. + +\section general_considerations_features_not_supported Features not supported + +Features deliberately excluded from the scope of this library: + +-# **Data transfer.** Uploading (streaming) and downloading data of buffers and images + between CPU and GPU memory and related synchronization is responsibility of the user. + Defining some "texture" object that would automatically stream its data from a + staging copy in CPU memory to GPU memory would rather be a feature of another, + higher-level library implemented on top of VMA. + VMA doesn't record any commands to a `VkCommandBuffer`. It just allocates memory. +-# **Recreation of buffers and images.** Although the library has functions for + buffer and image creation: vmaCreateBuffer(), vmaCreateImage(), you need to + recreate these objects yourself after defragmentation. That is because the big + structures `VkBufferCreateInfo`, `VkImageCreateInfo` are not stored in + #VmaAllocation object. +-# **Handling CPU memory allocation failures.** When dynamically creating small C++ + objects in CPU memory (not Vulkan memory), allocation failures are not checked + and handled gracefully, because that would complicate code significantly and + is usually not needed in desktop PC applications anyway. + Success of an allocation is just checked with an assert. +-# **Code free of any compiler warnings.** Maintaining the library to compile and + work correctly on so many different platforms is hard enough. Being free of + any warnings, on any version of any compiler, is simply not feasible. + There are many preprocessor macros that make some variables unused, function parameters unreferenced, + or conditional expressions constant in some configurations. + The code of this library should not be bigger or more complicated just to silence these warnings. + It is recommended to disable such warnings instead. +-# This is a C++ library with C interface. **Bindings or ports to any other programming languages** are welcome as external projects but + are not going to be included into this repository. +*/ diff --git a/deps/glslang b/deps/glslang new file mode 160000 index 00000000..dcf1aaa6 --- /dev/null +++ b/deps/glslang @@ -0,0 +1 @@ +Subproject commit dcf1aaa6fd7dc2081f17aa0a4f1590a76473d961 diff --git a/deps/glslang/.gitattributes b/deps/glslang/.gitattributes deleted file mode 100644 index 9ef576f6..00000000 --- a/deps/glslang/.gitattributes +++ /dev/null @@ -1,21 +0,0 @@ -# test files have a mix of lf/crlf, and that's a good thing, for testing, don't mess with it -# bash scripts need lines ending with lf, and that's correct for Windows too, e.g., under Cygwin -# (scripts often don't have a suffix) -* -text -*.sh text eof=lf -*.bash text eof=lf - -# txt files should be native and normalized -*.txt text - -# source code can be native and normalized, but simpler if lf everywhere; will try that way -*.h text eof=lf -*.hpp text eof=lf -*.c text eof=lf -*.cpp text eof=lf -*.y text eof=lf -*.out text eof=lf -*.conf text eof=lf -*.err text eof=lf -*.php text eof=lf -*.template text eof=lf diff --git a/deps/glslang/.gitignore b/deps/glslang/.gitignore deleted file mode 100644 index 153aa048..00000000 --- a/deps/glslang/.gitignore +++ /dev/null @@ -1,11 +0,0 @@ -*.o -*.a -*.so -*.exe -tags -glslang/MachineIndependent/glslang_tab.cpp -glslang/MachineIndependent/glslang_tab.cpp.h -build/ -Test/localResults/ -Test/multiThread.out -Test/singleThread.out diff --git a/deps/glslang/CMakeLists.txt b/deps/glslang/CMakeLists.txt deleted file mode 100644 index 21e92c00..00000000 --- a/deps/glslang/CMakeLists.txt +++ /dev/null @@ -1,92 +0,0 @@ -# increase to 3.1 once all major distributions -# include a version of CMake >= 3.1 -cmake_minimum_required(VERSION 2.8.12) -set_property(GLOBAL PROPERTY USE_FOLDERS ON) - -# Adhere to GNU filesystem layout conventions -include(GNUInstallDirs) - -option(ENABLE_AMD_EXTENSIONS "Enables support of AMD-specific extensions" ON) - -#Adjusted for Anvil: -option(ENABLE_GLSLANG_BINARIES "Builds glslangValidator and spirv-remap" OFF) - -option(ENABLE_NV_EXTENSIONS "Enables support of Nvidia-specific extensions" ON) -option(ENABLE_HLSL "Enables HLSL input support" ON) - -if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT AND WIN32) - set(CMAKE_INSTALL_PREFIX "install" CACHE STRING "..." FORCE) -endif() - -project(glslang) -# make testing optional -include(CTest) - -if(ENABLE_AMD_EXTENSIONS) - add_definitions(-DAMD_EXTENSIONS) -endif(ENABLE_AMD_EXTENSIONS) - -if(ENABLE_NV_EXTENSIONS) - add_definitions(-DNV_EXTENSIONS) -endif(ENABLE_NV_EXTENSIONS) - -if(ENABLE_HLSL) - add_definitions(-DENABLE_HLSL) -endif(ENABLE_HLSL) - -if(WIN32) - set(CMAKE_DEBUG_POSTFIX "d") - if(MSVC) - include(ChooseMSVCCRT.cmake) - endif(MSVC) - add_definitions(-DGLSLANG_OSINCLUDE_WIN32) -elseif(UNIX) - add_definitions(-DGLSLANG_OSINCLUDE_UNIX) -else(WIN32) - message("unknown platform") -endif(WIN32) - -if(${CMAKE_CXX_COMPILER_ID} MATCHES "GNU") - add_compile_options(-Wall -Wmaybe-uninitialized -Wuninitialized -Wunused -Wunused-local-typedefs - -Wunused-parameter -Wunused-value -Wunused-variable -Wunused-but-set-parameter -Wunused-but-set-variable) - add_compile_options(-Wno-reorder) # disable this from -Wall, since it happens all over. -elseif(${CMAKE_CXX_COMPILER_ID} MATCHES "Clang") - add_compile_options(-Wall -Wuninitialized -Wunused -Wunused-local-typedefs - -Wunused-parameter -Wunused-value -Wunused-variable) - add_compile_options(-Wno-reorder) # disable this from -Wall, since it happens all over. -endif() - -# Request C++11 -if(${CMAKE_VERSION} VERSION_LESS 3.1) - # CMake versions before 3.1 do not understand CMAKE_CXX_STANDARD - # remove this block once CMake >=3.1 has fixated in the ecosystem - add_compile_options(-std=c++11) -else() - set(CMAKE_CXX_STANDARD 11) - set(CMAKE_CXX_STANDARD_REQUIRED ON) - set(CMAKE_CXX_EXTENSIONS OFF) -endif() - -function(glslang_set_link_args TARGET) - # For MinGW compiles, statically link against the GCC and C++ runtimes. - # This avoids the need to ship those runtimes as DLLs. - if(WIN32 AND ${CMAKE_CXX_COMPILER_ID} MATCHES "GNU") - set_target_properties(${TARGET} PROPERTIES - LINK_FLAGS "-static -static-libgcc -static-libstdc++") - endif() -endfunction(glslang_set_link_args) - -# We depend on these for later projects, so they should come first. -add_subdirectory(External) - -add_subdirectory(glslang) -add_subdirectory(OGLCompilersDLL) -if(ENABLE_GLSLANG_BINARIES) - add_subdirectory(StandAlone) -endif() -add_subdirectory(SPIRV) -if(ENABLE_HLSL) - add_subdirectory(hlsl) -endif(ENABLE_HLSL) -#Disabled for Anvil: -#add_subdirectory(gtests) diff --git a/deps/glslang/ChooseMSVCCRT.cmake b/deps/glslang/ChooseMSVCCRT.cmake deleted file mode 100644 index 20978817..00000000 --- a/deps/glslang/ChooseMSVCCRT.cmake +++ /dev/null @@ -1,105 +0,0 @@ -# The macro choose_msvc_crt() takes a list of possible -# C runtimes to choose from, in the form of compiler flags, -# to present to the user. (MTd for /MTd, etc) -# -# The macro is invoked at the end of the file. -# -# CMake already sets CRT flags in the CMAKE_CXX_FLAGS_* and -# CMAKE_C_FLAGS_* variables by default. To let the user -# override that for each build type: -# 1. Detect which CRT is already selected, and reflect this in -# LLVM_USE_CRT_* so the user can have a better idea of what -# changes they're making. -# 2. Replace the flags in both variables with the new flag via a regex. -# 3. set() the variables back into the cache so the changes -# are user-visible. - -### Helper macros: ### -macro(make_crt_regex regex crts) - set(${regex} "") - foreach(crt ${${crts}}) - # Trying to match the beginning or end of the string with stuff - # like [ ^]+ didn't work, so use a bunch of parentheses instead. - set(${regex} "${${regex}}|(^| +)/${crt}($| +)") - endforeach(crt) - string(REGEX REPLACE "^\\|" "" ${regex} "${${regex}}") -endmacro(make_crt_regex) - -macro(get_current_crt crt_current regex flagsvar) - # Find the selected-by-CMake CRT for each build type, if any. - # Strip off the leading slash and any whitespace. - string(REGEX MATCH "${${regex}}" ${crt_current} "${${flagsvar}}") - string(REPLACE "/" " " ${crt_current} "${${crt_current}}") - string(STRIP "${${crt_current}}" ${crt_current}) -endmacro(get_current_crt) - -# Replaces or adds a flag to a variable. -# Expects 'flag' to be padded with spaces. -macro(set_flag_in_var flagsvar regex flag) - string(REGEX MATCH "${${regex}}" current_flag "${${flagsvar}}") - if("${current_flag}" STREQUAL "") - set(${flagsvar} "${${flagsvar}}${${flag}}") - else() - string(REGEX REPLACE "${${regex}}" "${${flag}}" ${flagsvar} "${${flagsvar}}") - endif() - string(STRIP "${${flagsvar}}" ${flagsvar}) - # Make sure this change gets reflected in the cache/gui. - # CMake requires the docstring parameter whenever set() touches the cache, - # so get the existing docstring and re-use that. - get_property(flagsvar_docs CACHE ${flagsvar} PROPERTY HELPSTRING) - set(${flagsvar} "${${flagsvar}}" CACHE STRING "${flagsvar_docs}" FORCE) -endmacro(set_flag_in_var) - - -macro(choose_msvc_crt MSVC_CRT) - if(LLVM_USE_CRT) - message(FATAL_ERROR - "LLVM_USE_CRT is deprecated. Use the CMAKE_BUILD_TYPE-specific -variables (LLVM_USE_CRT_DEBUG, etc) instead.") - endif() - - make_crt_regex(MSVC_CRT_REGEX ${MSVC_CRT}) - - foreach(build_type ${CMAKE_CONFIGURATION_TYPES} ${CMAKE_BUILD_TYPE}) - string(TOUPPER "${build_type}" build) - if (NOT LLVM_USE_CRT_${build}) - get_current_crt(LLVM_USE_CRT_${build} - MSVC_CRT_REGEX - CMAKE_CXX_FLAGS_${build}) - set(LLVM_USE_CRT_${build} - "${LLVM_USE_CRT_${build}}" - CACHE STRING "Specify VC++ CRT to use for ${build_type} configurations." - FORCE) - set_property(CACHE LLVM_USE_CRT_${build} - PROPERTY STRINGS ;${${MSVC_CRT}}) - endif(NOT LLVM_USE_CRT_${build}) - endforeach(build_type) - - foreach(build_type ${CMAKE_CONFIGURATION_TYPES} ${CMAKE_BUILD_TYPE}) - string(TOUPPER "${build_type}" build) - if ("${LLVM_USE_CRT_${build}}" STREQUAL "") - set(flag_string " ") - else() - set(flag_string " /${LLVM_USE_CRT_${build}} ") - list(FIND ${MSVC_CRT} ${LLVM_USE_CRT_${build}} idx) - if (idx LESS 0) - message(FATAL_ERROR - "Invalid value for LLVM_USE_CRT_${build}: ${LLVM_USE_CRT_${build}}. Valid options are one of: ${${MSVC_CRT}}") - endif (idx LESS 0) - message(STATUS "Using ${build_type} VC++ CRT: ${LLVM_USE_CRT_${build}}") - endif() - foreach(lang C CXX) - set_flag_in_var(CMAKE_${lang}_FLAGS_${build} MSVC_CRT_REGEX flag_string) - endforeach(lang) - endforeach(build_type) -endmacro(choose_msvc_crt MSVC_CRT) - - -# List of valid CRTs for MSVC -set(MSVC_CRT - MD - MDd - MT - MTd) - -choose_msvc_crt(MSVC_CRT) diff --git a/deps/glslang/External/CMakeLists.txt b/deps/glslang/External/CMakeLists.txt deleted file mode 100644 index 4f694ee7..00000000 --- a/deps/glslang/External/CMakeLists.txt +++ /dev/null @@ -1,35 +0,0 @@ -# Suppress all warnings from external projects. -set_property(DIRECTORY APPEND PROPERTY COMPILE_OPTIONS -w) - -if(BUILD_TESTING) - if(TARGET gmock) - message(STATUS "Google Mock already configured - use it") - elseif(IS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/googletest) - # We need to make sure Google Test does not mess up with the - # global CRT settings on Windows. - if(WIN32) - set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) - endif(WIN32) - add_subdirectory(googletest) - set(GTEST_TARGETS - gtest - gtest_main - gmock - gmock_main) - foreach(target ${GTEST_TARGETS}) - set_property(TARGET ${target} PROPERTY FOLDER gtest) - endforeach() - mark_as_advanced(gmock_build_tests - BUILD_GMOCK - BUILD_GTEST - BUILD_SHARED_LIBS - gtest_build_samples - gtest_build_tests - gtest_disable_pthreads - gtest_force_shared_crt - gtest_hide_internal_symbols) - else() - message(STATUS - "Google Mock was not found - tests based on that will not build") - endif() -endif() diff --git a/deps/glslang/LinuxDoAll.bash b/deps/glslang/LinuxDoAll.bash deleted file mode 100644 index 4bd39a33..00000000 --- a/deps/glslang/LinuxDoAll.bash +++ /dev/null @@ -1,34 +0,0 @@ -#! /bin/bash - -svn update - -rm -f StandAlone/glslangValidator -rm -f Test/glslangValidator -rm -f glslang/MachineIndependent/lib/libglslang.so -rm -f Install/Linux/libglslang.so -rm -f Install/Linux/glslangValidator - -cd StandAlone -make clean -cd ../glslang/MachineIndependent -make clean -cd ../.. - -# build the StandAlone app and all it's dependencies -make -C StandAlone - -# so we can find the shared library -#LD_LIBRARY_PATH=`pwd`/glslang/MachineIndependent/lib -#export LD_LIBRARY_PATH - -# install -cd Install/Linux -./install -cp glslangValidator ../../Test -LD_LIBRARY_PATH=/usr/local/lib -export LD_LIBRARY_PATH - -# run using test data -cd ../../Test -chmod +x runtests -./runtests diff --git a/deps/glslang/OGLCompilersDLL/CMakeLists.txt b/deps/glslang/OGLCompilersDLL/CMakeLists.txt deleted file mode 100644 index 9b9b0386..00000000 --- a/deps/glslang/OGLCompilersDLL/CMakeLists.txt +++ /dev/null @@ -1,14 +0,0 @@ -set(SOURCES InitializeDll.cpp InitializeDll.h) - -add_library(OGLCompiler STATIC ${SOURCES}) -set_property(TARGET OGLCompiler PROPERTY FOLDER glslang POSITION_INDEPENDENT_CODE ON) - -if(WIN32) - source_group("Source" FILES ${SOURCES}) -endif(WIN32) - -# Disabled for Anvil: -#>> -#install(TARGETS OGLCompiler -# ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}) -#<< diff --git a/deps/glslang/OGLCompilersDLL/InitializeDll.cpp b/deps/glslang/OGLCompilersDLL/InitializeDll.cpp deleted file mode 100644 index 2eb912c4..00000000 --- a/deps/glslang/OGLCompilersDLL/InitializeDll.cpp +++ /dev/null @@ -1,155 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#define SH_EXPORTING - -#include - -#include "InitializeDll.h" -#include "../glslang/Include/InitializeGlobals.h" - -#include "../glslang/Public/ShaderLang.h" - -namespace glslang { - -OS_TLSIndex ThreadInitializeIndex = OS_INVALID_TLS_INDEX; - -bool InitProcess() -{ - glslang::GetGlobalLock(); - - if (ThreadInitializeIndex != OS_INVALID_TLS_INDEX) { - // - // Function is re-entrant. - // - - glslang::ReleaseGlobalLock(); - return true; - } - - ThreadInitializeIndex = OS_AllocTLSIndex(); - - if (ThreadInitializeIndex == OS_INVALID_TLS_INDEX) { - assert(0 && "InitProcess(): Failed to allocate TLS area for init flag"); - - glslang::ReleaseGlobalLock(); - return false; - } - - if (! InitializePoolIndex()) { - assert(0 && "InitProcess(): Failed to initialize global pool"); - - glslang::ReleaseGlobalLock(); - return false; - } - - if (! InitThread()) { - assert(0 && "InitProcess(): Failed to initialize thread"); - - glslang::ReleaseGlobalLock(); - return false; - } - - glslang::ReleaseGlobalLock(); - return true; -} - - -bool InitThread() -{ - // - // This function is re-entrant - // - if (ThreadInitializeIndex == OS_INVALID_TLS_INDEX) { - assert(0 && "InitThread(): Process hasn't been initalised."); - return false; - } - - if (OS_GetTLSValue(ThreadInitializeIndex) != 0) - return true; - - InitializeMemoryPools(); - - if (! OS_SetTLSValue(ThreadInitializeIndex, (void *)1)) { - assert(0 && "InitThread(): Unable to set init flag."); - return false; - } - - return true; -} - - -bool DetachThread() -{ - bool success = true; - - if (ThreadInitializeIndex == OS_INVALID_TLS_INDEX) - return true; - - // - // Function is re-entrant and this thread may not have been initialized. - // - if (OS_GetTLSValue(ThreadInitializeIndex) != 0) { - if (!OS_SetTLSValue(ThreadInitializeIndex, (void *)0)) { - assert(0 && "DetachThread(): Unable to clear init flag."); - success = false; - } - - FreeGlobalPools(); - - } - - return success; -} - -bool DetachProcess() -{ - bool success = true; - - if (ThreadInitializeIndex == OS_INVALID_TLS_INDEX) - return true; - - ShFinalize(); - - success = DetachThread(); - - FreePoolIndex(); - - OS_FreeTLSIndex(ThreadInitializeIndex); - ThreadInitializeIndex = OS_INVALID_TLS_INDEX; - - return success; -} - -} // end namespace glslang diff --git a/deps/glslang/OGLCompilersDLL/InitializeDll.h b/deps/glslang/OGLCompilersDLL/InitializeDll.h deleted file mode 100644 index 60b2b159..00000000 --- a/deps/glslang/OGLCompilersDLL/InitializeDll.h +++ /dev/null @@ -1,49 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -#ifndef __INITIALIZEDLL_H -#define __INITIALIZEDLL_H - -#include "../glslang/OSDependent/osinclude.h" - -namespace glslang { - -bool InitProcess(); -bool InitThread(); -bool DetachThread(); -bool DetachProcess(); - -} // end namespace glslang - -#endif // __INITIALIZEDLL_H - diff --git a/deps/glslang/README-spirv-remap.txt b/deps/glslang/README-spirv-remap.txt deleted file mode 100644 index 8e3259f8..00000000 --- a/deps/glslang/README-spirv-remap.txt +++ /dev/null @@ -1,137 +0,0 @@ - -VERSION --------------------------------------------------------------------------------- -spirv-remap 0.97 - -INTRO: --------------------------------------------------------------------------------- -spirv-remap is a utility to improve compression of SPIR-V binary files via -entropy reduction, plus optional stripping of debug information and -load/store optimization. It transforms SPIR-V to SPIR-V, remapping IDs. The -resulting modules have an increased ID range (IDs are not as tightly packed -around zero), but will compress better when multiple modules are compressed -together, since compressor's dictionary can find better cross module -commonality. - -Remapping is accomplished via canonicalization. Thus, modules can be -compressed one at a time with no loss of quality relative to operating on -many modules at once. The command line tool operates on multiple modules -only in the trivial repetition sense, for ease of use. The remapper API -only accepts a single module at a time. - -There are two modes of use: command line, and a C++11 API. Both are -described below. - -spirv-remap is currently in an alpha state. Although there are no known -remapping defects, it has only been exercised on one real world game shader -workload. - - -FEEDBACK --------------------------------------------------------------------------------- -Report defects, enhancements requests, code improvements, etc to: - spvremapper@lunarg.com - - -COMMAND LINE USAGE: --------------------------------------------------------------------------------- -Examples are given with a verbosity of one (-v), but more verbosity can be -had via -vv, -vvv, etc, or an integer parameter to --verbose, such as -"--verbose 4". With no verbosity, the command is silent and returns 0 on -success, and a positive integer error on failure. - -Pre-built binaries for several OSs are available. Examples presented are -for Linux. Command line arguments can be provided in any order. - -1. Basic ID remapping - -Perform ID remapping on all shaders in "*.spv", writing new files with -the same basenames to /tmp/out_dir. - - spirv-remap -v --map all --input *.spv --output /tmp/out_dir - -2. Perform all possible size reductions - - spirv-remap-linux-64 -v --do-everything --input *.spv --output /tmp/out_dir - -Note that --do-everything is a synonym for: - - --map all --dce all --opt all --strip all - -API USAGE: --------------------------------------------------------------------------------- - -The public interface to the remapper is defined in SPIRV/SPVRemapper.h as follows: - -namespace spv { - -class spirvbin_t -{ -public: - enum Options { ... }; - spirvbin_t(int verbose = 0); // construct - - // remap an existing binary in memory - void remap(std::vector& spv, std::uint32_t opts = DO_EVERYTHING); - - // Type for error/log handler functions - typedef std::function errorfn_t; - typedef std::function logfn_t; - - // Register error/log handling functions (can be c/c++ fn, lambda fn, or functor) - static void registerErrorHandler(errorfn_t handler) { errorHandler = handler; } - static void registerLogHandler(logfn_t handler) { logHandler = handler; } -}; - -} // namespace spv - -The class definition is in SPVRemapper.cpp. - -remap() accepts an std::vector of SPIR-V words, modifies them per the -request given in 'opts', and leaves the 'spv' container with the result. -It is safe to instantiate one spirvbin_t per thread and process a different -SPIR-V in each. - -The "opts" parameter to remap() accepts a bit mask of desired remapping -options. See REMAPPING AND OPTIMIZATION OPTIONS. - -On error, the function supplied to registerErrorHandler() will be invoked. -This can be a standard C/C++ function, a lambda function, or a functor. -The default handler simply calls exit(5); The error handler is a static -members, so need only be set up once, not once per spirvbin_t instance. - -Log messages are supplied to registerLogHandler(). By default, log -messages are eaten silently. The log handler is also a static member. - -BUILD DEPENDENCIES: --------------------------------------------------------------------------------- - 1. C++11 compatible compiler - 2. cmake - 3. glslang - - -BUILDING --------------------------------------------------------------------------------- -The standalone remapper is built along side glslangValidator through its -normal build process. - - -REMAPPING AND OPTIMIZATION OPTIONS --------------------------------------------------------------------------------- -API: - These are bits defined under spv::spirvbin_t::, and can be - bitwise or-ed together as desired. - - MAP_TYPES = canonicalize type IDs - MAP_NAMES = canonicalize named data - MAP_FUNCS = canonicalize function bodies - DCE_FUNCS = remove dead functions - DCE_VARS = remove dead variables - DCE_TYPES = remove dead types - OPT_LOADSTORE = optimize unneeded load/stores - MAP_ALL = (MAP_TYPES | MAP_NAMES | MAP_FUNCS) - DCE_ALL = (DCE_FUNCS | DCE_VARS | DCE_TYPES) - OPT_ALL = (OPT_LOADSTORE) - ALL_BUT_STRIP = (MAP_ALL | DCE_ALL | OPT_ALL) - DO_EVERYTHING = (STRIP | ALL_BUT_STRIP) - diff --git a/deps/glslang/README.md b/deps/glslang/README.md deleted file mode 100644 index c3444467..00000000 --- a/deps/glslang/README.md +++ /dev/null @@ -1,171 +0,0 @@ -Also see the Khronos landing page for glslang as a reference front end: - -https://www.khronos.org/opengles/sdk/tools/Reference-Compiler/ - -The above page includes where to get binaries, and is kept up to date -regarding the feature level of glslang. - -glslang -======= - -An OpenGL and OpenGL ES shader front end and validator. - -There are two components: - -1. A front-end library for programmatic parsing of GLSL/ESSL into an AST. - -2. A standalone wrapper, `glslangValidator`, that can be used as a shader - validation tool. - -How to add a feature protected by a version/extension/stage/profile: See the -comment in `glslang/MachineIndependent/Versions.cpp`. - -Things left to do: See `Todo.txt` - -Execution of Standalone Wrapper -------------------------------- - -To use the standalone binary form, execute `glslangValidator`, and it will print -a usage statement. Basic operation is to give it a file containing a shader, -and it will print out warnings/errors and optionally an AST. - -The applied stage-specific rules are based on the file extension: -* `.vert` for a vertex shader -* `.tesc` for a tessellation control shader -* `.tese` for a tessellation evaluation shader -* `.geom` for a geometry shader -* `.frag` for a fragment shader -* `.comp` for a compute shader - -There is also a non-shader extension -* `.conf` for a configuration file of limits, see usage statement for example - -Building --------- - -CMake: The currently maintained and preferred way of building is through CMake. -In MSVC, after running CMake, you may need to use the Configuration Manager to -check the INSTALL project. - -Programmatic Interfaces ------------------------ - -Another piece of software can programmatically translate shaders to an AST -using one of two different interfaces: -* A new C++ class-oriented interface, or -* The original C functional interface - -The `main()` in `StandAlone/StandAlone.cpp` shows examples using both styles. - -### C++ Class Interface (new, preferred) - -This interface is in roughly the last 1/3 of `ShaderLang.h`. It is in the -glslang namespace and contains the following. - -```cxx -const char* GetEsslVersionString(); -const char* GetGlslVersionString(); -bool InitializeProcess(); -void FinalizeProcess(); - -class TShader - bool parse(...); - void setStrings(...); - const char* getInfoLog(); - -class TProgram - void addShader(...); - bool link(...); - const char* getInfoLog(); - Reflection queries -``` - -See `ShaderLang.h` and the usage of it in `StandAlone/StandAlone.cpp` for more -details. - -### C Functional Interface (orginal) - -This interface is in roughly the first 2/3 of `ShaderLang.h`, and referred to -as the `Sh*()` interface, as all the entry points start `Sh`. - -The `Sh*()` interface takes a "compiler" call-back object, which it calls after -building call back that is passed the AST and can then execute a backend on it. - -The following is a simplified resulting run-time call stack: - -```c -ShCompile(shader, compiler) -> compiler(AST) -> -``` - -In practice, `ShCompile()` takes shader strings, default version, and -warning/error and other options for controling compilation. - -Testing -------- - -Test results should always be included with a pull request that modifies -functionality. There is a simple process for doing this, described here: - -`Test` is an active test directory that contains test input and a -subdirectory `baseResults` that contains the expected results of the -tests. Both the tests and `baseResults` are under source-code control. -Executing the script `./runtests` will generate current results in -the `localResults` directory and `diff` them against the `baseResults`. - -When you want to update the tracked test results, they need to be -copied from `localResults` to `baseResults`. This can be done by -the `bump` shell script. - -The list of files tested comes from `testlist`, and lists input shaders -in this directory, which must all be public for this to work. However, -you can add your own private list of tests, not tracked here, by using -`localtestlist` to list non-tracked tests. This is automatically read -by `runtests` and included in the `diff` and `bump` process. - -Basic Internal Operation ------------------------- - -* Initial lexical analysis is done by the preprocessor in - `MachineIndependent/Preprocessor`, and then refined by a GLSL scanner - in `MachineIndependent/Scan.cpp`. There is currently no use of flex. - -* Code is parsed using bison on `MachineIndependent/glslang.y` with the - aid of a symbol table and an AST. The symbol table is not passed on to - the back-end; the intermediate representation stands on its own. - The tree is built by the grammar productions, many of which are - offloaded into `ParseHelper.cpp`, and by `Intermediate.cpp`. - -* The intermediate representation is very high-level, and represented - as an in-memory tree. This serves to lose no information from the - original program, and to have efficient transfer of the result from - parsing to the back-end. In the AST, constants are propogated and - folded, and a very small amount of dead code is eliminated. - - To aid linking and reflection, the last top-level branch in the AST - lists all global symbols. - -* The primary algorithm of the back-end compiler is to traverse the - tree (high-level intermediate representation), and create an internal - object code representation. There is an example of how to do this - in `MachineIndependent/intermOut.cpp`. - -* Reduction of the tree to a linear byte-code style low-level intermediate - representation is likely a good way to generate fully optimized code. - -* There is currently some dead old-style linker-type code still lying around. - -* Memory pool: parsing uses types derived from C++ `std` types, using a - custom allocator that puts them in a memory pool. This makes allocation - of individual container/contents just few cycles and deallocation free. - This pool is popped after the AST is made and processed. - - The use is simple: if you are going to call `new`, there are three cases: - - - the object comes from the pool (its base class has the macro - `POOL_ALLOCATOR_NEW_DELETE` in it) and you do not have to call `delete` - - - it is a `TString`, in which case call `NewPoolTString()`, which gets - it from the pool, and there is no corresponding `delete` - - - the object does not come from the pool, and you have to do normal - C++ memory management of what you `new` diff --git a/deps/glslang/SPIRV/CMakeLists.txt b/deps/glslang/SPIRV/CMakeLists.txt deleted file mode 100644 index 246a2d7f..00000000 --- a/deps/glslang/SPIRV/CMakeLists.txt +++ /dev/null @@ -1,60 +0,0 @@ -set(SOURCES - GlslangToSpv.cpp - InReadableOrder.cpp - Logger.cpp - SpvBuilder.cpp - doc.cpp - disassemble.cpp) - -set(SPVREMAP_SOURCES - SPVRemapper.cpp - doc.cpp) - -set(HEADERS - bitutils.h - spirv.hpp - GLSL.std.450.h - GLSL.ext.KHR.h - GlslangToSpv.h - hex_float.h - Logger.h - SpvBuilder.h - spvIR.h - doc.h - disassemble.h) - -set(SPVREMAP_HEADERS - SPVRemapper.h - doc.h) - -if(ENABLE_AMD_EXTENSIONS) - list(APPEND - HEADERS - GLSL.ext.AMD.h) -endif(ENABLE_AMD_EXTENSIONS) - -if(ENABLE_NV_EXTENSIONS) - list(APPEND - HEADERS - GLSL.ext.NV.h) -endif(ENABLE_NV_EXTENSIONS) - -add_library(SPIRV STATIC ${SOURCES} ${HEADERS}) -set_property(TARGET SPIRV PROPERTY FOLDER glslang POSITION_INDEPENDENT_CODE ON) -target_link_libraries(SPIRV glslang) - -add_library(SPVRemapper STATIC ${SPVREMAP_SOURCES} ${SPVREMAP_HEADERS}) -set_property(TARGET SPVRemapper PROPERTY FOLDER glslang POSITION_INDEPENDENT_CODE ON) - -if(WIN32) - source_group("Source" FILES ${SOURCES} ${HEADERS}) - source_group("Source" FILES ${SPVREMAP_SOURCES} ${SPVREMAP_HEADERS}) -endif(WIN32) - -# Disabled for Anvil: -#>> -#install(TARGETS SPIRV SPVRemapper -# ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}) -# -#install(FILES ${HEADERS} ${SPVREMAP_HEADERS} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/SPIRV/) -#<< diff --git a/deps/glslang/SPIRV/GLSL.ext.AMD.h b/deps/glslang/SPIRV/GLSL.ext.AMD.h deleted file mode 100644 index 5121ed99..00000000 --- a/deps/glslang/SPIRV/GLSL.ext.AMD.h +++ /dev/null @@ -1,109 +0,0 @@ -/* -** Copyright (c) 2014-2016 The Khronos Group Inc. -** -** Permission is hereby granted, free of charge, to any person obtaining a copy -** of this software and/or associated documentation files (the "Materials"), -** to deal in the Materials without restriction, including without limitation -** the rights to use, copy, modify, merge, publish, distribute, sublicense, -** and/or sell copies of the Materials, and to permit persons to whom the -** Materials are furnished to do so, subject to the following conditions: -** -** The above copyright notice and this permission notice shall be included in -** all copies or substantial portions of the Materials. -** -** MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS -** STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -** HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ -** -** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -** OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -** THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -** FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS -** IN THE MATERIALS. -*/ - -#ifndef GLSLextAMD_H -#define GLSLextAMD_H - -enum BuiltIn; -enum Capability; -enum Decoration; -enum Op; - -static const int GLSLextAMDVersion = 100; -static const int GLSLextAMDRevision = 5; - -// SPV_AMD_shader_ballot -static const char* const E_SPV_AMD_shader_ballot = "SPV_AMD_shader_ballot"; - -enum ShaderBallotAMD { - ShaderBallotBadAMD = 0, // Don't use - - SwizzleInvocationsAMD = 1, - SwizzleInvocationsMaskedAMD = 2, - WriteInvocationAMD = 3, - MbcntAMD = 4, - - ShaderBallotCountAMD -}; - -// SPV_AMD_shader_trinary_minmax -static const char* const E_SPV_AMD_shader_trinary_minmax = "SPV_AMD_shader_trinary_minmax"; - -enum ShaderTrinaryMinMaxAMD { - ShaderTrinaryMinMaxBadAMD = 0, // Don't use - - FMin3AMD = 1, - UMin3AMD = 2, - SMin3AMD = 3, - FMax3AMD = 4, - UMax3AMD = 5, - SMax3AMD = 6, - FMid3AMD = 7, - UMid3AMD = 8, - SMid3AMD = 9, - - ShaderTrinaryMinMaxCountAMD -}; - -// SPV_AMD_shader_explicit_vertex_parameter -static const char* const E_SPV_AMD_shader_explicit_vertex_parameter = "SPV_AMD_shader_explicit_vertex_parameter"; - -enum ShaderExplicitVertexParameterAMD { - ShaderExplicitVertexParameterBadAMD = 0, // Don't use - - InterpolateAtVertexAMD = 1, - - ShaderExplicitVertexParameterCountAMD -}; - -// SPV_AMD_gcn_shader -static const char* const E_SPV_AMD_gcn_shader = "SPV_AMD_gcn_shader"; - -enum GcnShaderAMD { - GcnShaderBadAMD = 0, // Don't use - - CubeFaceIndexAMD = 1, - CubeFaceCoordAMD = 2, - TimeAMD = 3, - - GcnShaderCountAMD -}; - -// SPV_AMD_gpu_shader_half_float -static const char* const E_SPV_AMD_gpu_shader_half_float = "SPV_AMD_gpu_shader_half_float"; - -// SPV_AMD_texture_gather_bias_lod -static const char* const E_SPV_AMD_texture_gather_bias_lod = "SPV_AMD_texture_gather_bias_lod"; - -// SPV_AMD_gpu_shader_int16 -static const char* const E_SPV_AMD_gpu_shader_int16 = "SPV_AMD_gpu_shader_int16"; - -// SPV_AMD_shader_image_load_store_lod -static const char* const E_SPV_AMD_shader_image_load_store_lod = "SPV_AMD_shader_image_load_store_lod"; - -static const Capability CapabilityImageReadWriteLodAMD = static_cast(5015); - -#endif // #ifndef GLSLextAMD_H diff --git a/deps/glslang/SPIRV/GLSL.ext.KHR.h b/deps/glslang/SPIRV/GLSL.ext.KHR.h deleted file mode 100644 index 2eb10ae6..00000000 --- a/deps/glslang/SPIRV/GLSL.ext.KHR.h +++ /dev/null @@ -1,48 +0,0 @@ -/* -** Copyright (c) 2014-2016 The Khronos Group Inc. -** -** Permission is hereby granted, free of charge, to any person obtaining a copy -** of this software and/or associated documentation files (the "Materials"), -** to deal in the Materials without restriction, including without limitation -** the rights to use, copy, modify, merge, publish, distribute, sublicense, -** and/or sell copies of the Materials, and to permit persons to whom the -** Materials are furnished to do so, subject to the following conditions: -** -** The above copyright notice and this permission notice shall be included in -** all copies or substantial portions of the Materials. -** -** MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS -** STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -** HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ -** -** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -** OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -** THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -** FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS -** IN THE MATERIALS. -*/ - -#ifndef GLSLextKHR_H -#define GLSLextKHR_H - -enum BuiltIn; -enum Op; -enum Capability; - -static const int GLSLextKHRVersion = 100; -static const int GLSLextKHRRevision = 2; - -static const char* const E_SPV_KHR_shader_ballot = "SPV_KHR_shader_ballot"; -static const char* const E_SPV_KHR_subgroup_vote = "SPV_KHR_subgroup_vote"; -static const char* const E_SPV_KHR_device_group = "SPV_KHR_device_group"; -static const char* const E_SPV_KHR_multiview = "SPV_KHR_multiview"; -static const char* const E_SPV_KHR_shader_draw_parameters = "SPV_KHR_shader_draw_parameters"; -static const char* const E_SPV_KHR_16bit_storage = "SPV_KHR_16bit_storage"; -static const char* const E_SPV_KHR_storage_buffer_storage_class = "SPV_KHR_storage_buffer_storage_class"; -static const char* const E_SPV_KHR_post_depth_coverage = "SPV_KHR_post_depth_coverage"; -static const char* const E_SPV_EXT_shader_stencil_export = "SPV_EXT_shader_stencil_export"; -static const char* const E_SPV_EXT_shader_viewport_index_layer = "SPV_EXT_shader_viewport_index_layer"; - -#endif // #ifndef GLSLextKHR_H diff --git a/deps/glslang/SPIRV/GLSL.ext.NV.h b/deps/glslang/SPIRV/GLSL.ext.NV.h deleted file mode 100644 index c01858be..00000000 --- a/deps/glslang/SPIRV/GLSL.ext.NV.h +++ /dev/null @@ -1,54 +0,0 @@ -/* -** Copyright (c) 2014-2017 The Khronos Group Inc. -** -** Permission is hereby granted, free of charge, to any person obtaining a copy -** of this software and/or associated documentation files (the "Materials"), -** to deal in the Materials without restriction, including without limitation -** the rights to use, copy, modify, merge, publish, distribute, sublicense, -** and/or sell copies of the Materials, and to permit persons to whom the -** Materials are furnished to do so, subject to the following conditions: -** -** The above copyright notice and this permission notice shall be included in -** all copies or substantial portions of the Materials. -** -** MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS -** STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -** HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ -** -** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -** OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -** THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -** FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS -** IN THE MATERIALS. -*/ - -#ifndef GLSLextNV_H -#define GLSLextNV_H - -enum BuiltIn; -enum Decoration; -enum Op; -enum Capability; - -static const int GLSLextNVVersion = 100; -static const int GLSLextNVRevision = 5; - -//SPV_NV_sample_mask_override_coverage -const char* const E_SPV_NV_sample_mask_override_coverage = "SPV_NV_sample_mask_override_coverage"; - -//SPV_NV_geometry_shader_passthrough -const char* const E_SPV_NV_geometry_shader_passthrough = "SPV_NV_geometry_shader_passthrough"; - -//SPV_NV_viewport_array2 -const char* const E_SPV_NV_viewport_array2 = "SPV_NV_viewport_array2"; -const char* const E_ARB_shader_viewport_layer_array = "SPV_ARB_shader_viewport_layer_array"; - -//SPV_NV_stereo_view_rendering -const char* const E_SPV_NV_stereo_view_rendering = "SPV_NV_stereo_view_rendering"; - -//SPV_NVX_multiview_per_view_attributes -const char* const E_SPV_NVX_multiview_per_view_attributes = "SPV_NVX_multiview_per_view_attributes"; - -#endif // #ifndef GLSLextNV_H \ No newline at end of file diff --git a/deps/glslang/SPIRV/GLSL.std.450.h b/deps/glslang/SPIRV/GLSL.std.450.h deleted file mode 100644 index df31092b..00000000 --- a/deps/glslang/SPIRV/GLSL.std.450.h +++ /dev/null @@ -1,131 +0,0 @@ -/* -** Copyright (c) 2014-2016 The Khronos Group Inc. -** -** Permission is hereby granted, free of charge, to any person obtaining a copy -** of this software and/or associated documentation files (the "Materials"), -** to deal in the Materials without restriction, including without limitation -** the rights to use, copy, modify, merge, publish, distribute, sublicense, -** and/or sell copies of the Materials, and to permit persons to whom the -** Materials are furnished to do so, subject to the following conditions: -** -** The above copyright notice and this permission notice shall be included in -** all copies or substantial portions of the Materials. -** -** MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS -** STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -** HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ -** -** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -** OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -** THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -** FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS -** IN THE MATERIALS. -*/ - -#ifndef GLSLstd450_H -#define GLSLstd450_H - -static const int GLSLstd450Version = 100; -static const int GLSLstd450Revision = 1; - -enum GLSLstd450 { - GLSLstd450Bad = 0, // Don't use - - GLSLstd450Round = 1, - GLSLstd450RoundEven = 2, - GLSLstd450Trunc = 3, - GLSLstd450FAbs = 4, - GLSLstd450SAbs = 5, - GLSLstd450FSign = 6, - GLSLstd450SSign = 7, - GLSLstd450Floor = 8, - GLSLstd450Ceil = 9, - GLSLstd450Fract = 10, - - GLSLstd450Radians = 11, - GLSLstd450Degrees = 12, - GLSLstd450Sin = 13, - GLSLstd450Cos = 14, - GLSLstd450Tan = 15, - GLSLstd450Asin = 16, - GLSLstd450Acos = 17, - GLSLstd450Atan = 18, - GLSLstd450Sinh = 19, - GLSLstd450Cosh = 20, - GLSLstd450Tanh = 21, - GLSLstd450Asinh = 22, - GLSLstd450Acosh = 23, - GLSLstd450Atanh = 24, - GLSLstd450Atan2 = 25, - - GLSLstd450Pow = 26, - GLSLstd450Exp = 27, - GLSLstd450Log = 28, - GLSLstd450Exp2 = 29, - GLSLstd450Log2 = 30, - GLSLstd450Sqrt = 31, - GLSLstd450InverseSqrt = 32, - - GLSLstd450Determinant = 33, - GLSLstd450MatrixInverse = 34, - - GLSLstd450Modf = 35, // second operand needs an OpVariable to write to - GLSLstd450ModfStruct = 36, // no OpVariable operand - GLSLstd450FMin = 37, - GLSLstd450UMin = 38, - GLSLstd450SMin = 39, - GLSLstd450FMax = 40, - GLSLstd450UMax = 41, - GLSLstd450SMax = 42, - GLSLstd450FClamp = 43, - GLSLstd450UClamp = 44, - GLSLstd450SClamp = 45, - GLSLstd450FMix = 46, - GLSLstd450IMix = 47, // Reserved - GLSLstd450Step = 48, - GLSLstd450SmoothStep = 49, - - GLSLstd450Fma = 50, - GLSLstd450Frexp = 51, // second operand needs an OpVariable to write to - GLSLstd450FrexpStruct = 52, // no OpVariable operand - GLSLstd450Ldexp = 53, - - GLSLstd450PackSnorm4x8 = 54, - GLSLstd450PackUnorm4x8 = 55, - GLSLstd450PackSnorm2x16 = 56, - GLSLstd450PackUnorm2x16 = 57, - GLSLstd450PackHalf2x16 = 58, - GLSLstd450PackDouble2x32 = 59, - GLSLstd450UnpackSnorm2x16 = 60, - GLSLstd450UnpackUnorm2x16 = 61, - GLSLstd450UnpackHalf2x16 = 62, - GLSLstd450UnpackSnorm4x8 = 63, - GLSLstd450UnpackUnorm4x8 = 64, - GLSLstd450UnpackDouble2x32 = 65, - - GLSLstd450Length = 66, - GLSLstd450Distance = 67, - GLSLstd450Cross = 68, - GLSLstd450Normalize = 69, - GLSLstd450FaceForward = 70, - GLSLstd450Reflect = 71, - GLSLstd450Refract = 72, - - GLSLstd450FindILsb = 73, - GLSLstd450FindSMsb = 74, - GLSLstd450FindUMsb = 75, - - GLSLstd450InterpolateAtCentroid = 76, - GLSLstd450InterpolateAtSample = 77, - GLSLstd450InterpolateAtOffset = 78, - - GLSLstd450NMin = 79, - GLSLstd450NMax = 80, - GLSLstd450NClamp = 81, - - GLSLstd450Count -}; - -#endif // #ifndef GLSLstd450_H diff --git a/deps/glslang/SPIRV/GlslangToSpv.cpp b/deps/glslang/SPIRV/GlslangToSpv.cpp deleted file mode 100644 index b6c9705d..00000000 --- a/deps/glslang/SPIRV/GlslangToSpv.cpp +++ /dev/null @@ -1,5958 +0,0 @@ -// -// Copyright (C) 2014-2016 LunarG, Inc. -// Copyright (C) 2015-2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// -// Visit the nodes in the glslang intermediate tree representation to -// translate them to SPIR-V. -// - -#include "spirv.hpp" -#include "GlslangToSpv.h" -#include "SpvBuilder.h" -namespace spv { - #include "GLSL.std.450.h" - #include "GLSL.ext.KHR.h" -#ifdef AMD_EXTENSIONS - #include "GLSL.ext.AMD.h" -#endif -#ifdef NV_EXTENSIONS - #include "GLSL.ext.NV.h" -#endif -} - -// Glslang includes -#include "../glslang/MachineIndependent/localintermediate.h" -#include "../glslang/MachineIndependent/SymbolTable.h" -#include "../glslang/Include/Common.h" -#include "../glslang/Include/revision.h" - -#include -#include -#include -#include -#include -#include -#include - -namespace { - -// For low-order part of the generator's magic number. Bump up -// when there is a change in the style (e.g., if SSA form changes, -// or a different instruction sequence to do something gets used). -const int GeneratorVersion = 1; - -namespace { -class SpecConstantOpModeGuard { -public: - SpecConstantOpModeGuard(spv::Builder* builder) - : builder_(builder) { - previous_flag_ = builder->isInSpecConstCodeGenMode(); - } - ~SpecConstantOpModeGuard() { - previous_flag_ ? builder_->setToSpecConstCodeGenMode() - : builder_->setToNormalCodeGenMode(); - } - void turnOnSpecConstantOpMode() { - builder_->setToSpecConstCodeGenMode(); - } - -private: - spv::Builder* builder_; - bool previous_flag_; -}; -} - -// -// The main holder of information for translating glslang to SPIR-V. -// -// Derives from the AST walking base class. -// -class TGlslangToSpvTraverser : public glslang::TIntermTraverser { -public: - TGlslangToSpvTraverser(const glslang::TIntermediate*, spv::SpvBuildLogger* logger, glslang::SpvOptions& options); - virtual ~TGlslangToSpvTraverser() { } - - bool visitAggregate(glslang::TVisit, glslang::TIntermAggregate*); - bool visitBinary(glslang::TVisit, glslang::TIntermBinary*); - void visitConstantUnion(glslang::TIntermConstantUnion*); - bool visitSelection(glslang::TVisit, glslang::TIntermSelection*); - bool visitSwitch(glslang::TVisit, glslang::TIntermSwitch*); - void visitSymbol(glslang::TIntermSymbol* symbol); - bool visitUnary(glslang::TVisit, glslang::TIntermUnary*); - bool visitLoop(glslang::TVisit, glslang::TIntermLoop*); - bool visitBranch(glslang::TVisit visit, glslang::TIntermBranch*); - - void finishSpv(); - void dumpSpv(std::vector& out); - -protected: - spv::Decoration TranslateInterpolationDecoration(const glslang::TQualifier& qualifier); - spv::Decoration TranslateAuxiliaryStorageDecoration(const glslang::TQualifier& qualifier); - spv::BuiltIn TranslateBuiltInDecoration(glslang::TBuiltInVariable, bool memberDeclaration); - spv::ImageFormat TranslateImageFormat(const glslang::TType& type); - spv::SelectionControlMask TranslateSelectionControl(glslang::TSelectionControl) const; - spv::LoopControlMask TranslateLoopControl(glslang::TLoopControl) const; - spv::StorageClass TranslateStorageClass(const glslang::TType&); - spv::Id createSpvVariable(const glslang::TIntermSymbol*); - spv::Id getSampledType(const glslang::TSampler&); - spv::Id getInvertedSwizzleType(const glslang::TIntermTyped&); - spv::Id createInvertedSwizzle(spv::Decoration precision, const glslang::TIntermTyped&, spv::Id parentResult); - void convertSwizzle(const glslang::TIntermAggregate&, std::vector& swizzle); - spv::Id convertGlslangToSpvType(const glslang::TType& type); - spv::Id convertGlslangToSpvType(const glslang::TType& type, glslang::TLayoutPacking, const glslang::TQualifier&); - bool filterMember(const glslang::TType& member); - spv::Id convertGlslangStructToSpvType(const glslang::TType&, const glslang::TTypeList* glslangStruct, - glslang::TLayoutPacking, const glslang::TQualifier&); - void decorateStructType(const glslang::TType&, const glslang::TTypeList* glslangStruct, glslang::TLayoutPacking, - const glslang::TQualifier&, spv::Id); - spv::Id makeArraySizeId(const glslang::TArraySizes&, int dim); - spv::Id accessChainLoad(const glslang::TType& type); - void accessChainStore(const glslang::TType& type, spv::Id rvalue); - void multiTypeStore(const glslang::TType&, spv::Id rValue); - glslang::TLayoutPacking getExplicitLayout(const glslang::TType& type) const; - int getArrayStride(const glslang::TType& arrayType, glslang::TLayoutPacking, glslang::TLayoutMatrix); - int getMatrixStride(const glslang::TType& matrixType, glslang::TLayoutPacking, glslang::TLayoutMatrix); - void updateMemberOffset(const glslang::TType& structType, const glslang::TType& memberType, int& currentOffset, int& nextOffset, glslang::TLayoutPacking, glslang::TLayoutMatrix); - void declareUseOfStructMember(const glslang::TTypeList& members, int glslangMember); - - bool isShaderEntryPoint(const glslang::TIntermAggregate* node); - void makeFunctions(const glslang::TIntermSequence&); - void makeGlobalInitializers(const glslang::TIntermSequence&); - void visitFunctions(const glslang::TIntermSequence&); - void handleFunctionEntry(const glslang::TIntermAggregate* node); - void translateArguments(const glslang::TIntermAggregate& node, std::vector& arguments); - void translateArguments(glslang::TIntermUnary& node, std::vector& arguments); - spv::Id createImageTextureFunctionCall(glslang::TIntermOperator* node); - spv::Id handleUserFunctionCall(const glslang::TIntermAggregate*); - - spv::Id createBinaryOperation(glslang::TOperator op, spv::Decoration precision, spv::Decoration noContraction, spv::Id typeId, spv::Id left, spv::Id right, glslang::TBasicType typeProxy, bool reduceComparison = true); - spv::Id createBinaryMatrixOperation(spv::Op, spv::Decoration precision, spv::Decoration noContraction, spv::Id typeId, spv::Id left, spv::Id right); - spv::Id createUnaryOperation(glslang::TOperator op, spv::Decoration precision, spv::Decoration noContraction, spv::Id typeId, spv::Id operand,glslang::TBasicType typeProxy); - spv::Id createUnaryMatrixOperation(spv::Op op, spv::Decoration precision, spv::Decoration noContraction, spv::Id typeId, spv::Id operand,glslang::TBasicType typeProxy); - spv::Id createConversion(glslang::TOperator op, spv::Decoration precision, spv::Decoration noContraction, spv::Id destTypeId, spv::Id operand, glslang::TBasicType typeProxy); - spv::Id makeSmearedConstant(spv::Id constant, int vectorSize); - spv::Id createAtomicOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy); - spv::Id createInvocationsOperation(glslang::TOperator op, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy); - spv::Id CreateInvocationsVectorOperation(spv::Op op, spv::GroupOperation groupOperation, spv::Id typeId, std::vector& operands); - spv::Id createMiscOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy); - spv::Id createNoArgOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId); - spv::Id getSymbolId(const glslang::TIntermSymbol* node); - void addDecoration(spv::Id id, spv::Decoration dec); - void addDecoration(spv::Id id, spv::Decoration dec, unsigned value); - void addMemberDecoration(spv::Id id, int member, spv::Decoration dec); - void addMemberDecoration(spv::Id id, int member, spv::Decoration dec, unsigned value); - spv::Id createSpvConstant(const glslang::TIntermTyped&); - spv::Id createSpvConstantFromConstUnionArray(const glslang::TType& type, const glslang::TConstUnionArray&, int& nextConst, bool specConstant); - bool isTrivialLeaf(const glslang::TIntermTyped* node); - bool isTrivial(const glslang::TIntermTyped* node); - spv::Id createShortCircuit(glslang::TOperator, glslang::TIntermTyped& left, glslang::TIntermTyped& right); - spv::Id getExtBuiltins(const char* name); - - glslang::SpvOptions& options; - spv::Function* shaderEntry; - spv::Function* currentFunction; - spv::Instruction* entryPoint; - int sequenceDepth; - - spv::SpvBuildLogger* logger; - - // There is a 1:1 mapping between a spv builder and a module; this is thread safe - spv::Builder builder; - bool inEntryPoint; - bool entryPointTerminated; - bool linkageOnly; // true when visiting the set of objects in the AST present only for establishing interface, whether or not they were statically used - std::set iOSet; // all input/output variables from either static use or declaration of interface - const glslang::TIntermediate* glslangIntermediate; - spv::Id stdBuiltins; - std::unordered_map extBuiltinMap; - - std::unordered_map symbolValues; - std::unordered_set rValueParameters; // set of formal function parameters passed as rValues, rather than a pointer - std::unordered_map functionMap; - std::unordered_map structMap[glslang::ElpCount][glslang::ElmCount]; - std::unordered_map > memberRemapper; // for mapping glslang block indices to spv indices (e.g., due to hidden members) - std::stack breakForLoop; // false means break for switch -}; - -// -// Helper functions for translating glslang representations to SPIR-V enumerants. -// - -// Translate glslang profile to SPIR-V source language. -spv::SourceLanguage TranslateSourceLanguage(glslang::EShSource source, EProfile profile) -{ - switch (source) { - case glslang::EShSourceGlsl: - switch (profile) { - case ENoProfile: - case ECoreProfile: - case ECompatibilityProfile: - return spv::SourceLanguageGLSL; - case EEsProfile: - return spv::SourceLanguageESSL; - default: - return spv::SourceLanguageUnknown; - } - case glslang::EShSourceHlsl: - return spv::SourceLanguageHLSL; - default: - return spv::SourceLanguageUnknown; - } -} - -// Translate glslang language (stage) to SPIR-V execution model. -spv::ExecutionModel TranslateExecutionModel(EShLanguage stage) -{ - switch (stage) { - case EShLangVertex: return spv::ExecutionModelVertex; - case EShLangTessControl: return spv::ExecutionModelTessellationControl; - case EShLangTessEvaluation: return spv::ExecutionModelTessellationEvaluation; - case EShLangGeometry: return spv::ExecutionModelGeometry; - case EShLangFragment: return spv::ExecutionModelFragment; - case EShLangCompute: return spv::ExecutionModelGLCompute; - default: - assert(0); - return spv::ExecutionModelFragment; - } -} - -// Translate glslang sampler type to SPIR-V dimensionality. -spv::Dim TranslateDimensionality(const glslang::TSampler& sampler) -{ - switch (sampler.dim) { - case glslang::Esd1D: return spv::Dim1D; - case glslang::Esd2D: return spv::Dim2D; - case glslang::Esd3D: return spv::Dim3D; - case glslang::EsdCube: return spv::DimCube; - case glslang::EsdRect: return spv::DimRect; - case glslang::EsdBuffer: return spv::DimBuffer; - case glslang::EsdSubpass: return spv::DimSubpassData; - default: - assert(0); - return spv::Dim2D; - } -} - -// Translate glslang precision to SPIR-V precision decorations. -spv::Decoration TranslatePrecisionDecoration(glslang::TPrecisionQualifier glslangPrecision) -{ - switch (glslangPrecision) { - case glslang::EpqLow: return spv::DecorationRelaxedPrecision; - case glslang::EpqMedium: return spv::DecorationRelaxedPrecision; - default: - return spv::NoPrecision; - } -} - -// Translate glslang type to SPIR-V precision decorations. -spv::Decoration TranslatePrecisionDecoration(const glslang::TType& type) -{ - return TranslatePrecisionDecoration(type.getQualifier().precision); -} - -// Translate glslang type to SPIR-V block decorations. -spv::Decoration TranslateBlockDecoration(const glslang::TType& type, bool useStorageBuffer) -{ - if (type.getBasicType() == glslang::EbtBlock) { - switch (type.getQualifier().storage) { - case glslang::EvqUniform: return spv::DecorationBlock; - case glslang::EvqBuffer: return useStorageBuffer ? spv::DecorationBlock : spv::DecorationBufferBlock; - case glslang::EvqVaryingIn: return spv::DecorationBlock; - case glslang::EvqVaryingOut: return spv::DecorationBlock; - default: - assert(0); - break; - } - } - - return spv::DecorationMax; -} - -// Translate glslang type to SPIR-V memory decorations. -void TranslateMemoryDecoration(const glslang::TQualifier& qualifier, std::vector& memory) -{ - if (qualifier.coherent) - memory.push_back(spv::DecorationCoherent); - if (qualifier.volatil) - memory.push_back(spv::DecorationVolatile); - if (qualifier.restrict) - memory.push_back(spv::DecorationRestrict); - if (qualifier.readonly) - memory.push_back(spv::DecorationNonWritable); - if (qualifier.writeonly) - memory.push_back(spv::DecorationNonReadable); -} - -// Translate glslang type to SPIR-V layout decorations. -spv::Decoration TranslateLayoutDecoration(const glslang::TType& type, glslang::TLayoutMatrix matrixLayout) -{ - if (type.isMatrix()) { - switch (matrixLayout) { - case glslang::ElmRowMajor: - return spv::DecorationRowMajor; - case glslang::ElmColumnMajor: - return spv::DecorationColMajor; - default: - // opaque layouts don't need a majorness - return spv::DecorationMax; - } - } else { - switch (type.getBasicType()) { - default: - return spv::DecorationMax; - break; - case glslang::EbtBlock: - switch (type.getQualifier().storage) { - case glslang::EvqUniform: - case glslang::EvqBuffer: - switch (type.getQualifier().layoutPacking) { - case glslang::ElpShared: return spv::DecorationGLSLShared; - case glslang::ElpPacked: return spv::DecorationGLSLPacked; - default: - return spv::DecorationMax; - } - case glslang::EvqVaryingIn: - case glslang::EvqVaryingOut: - assert(type.getQualifier().layoutPacking == glslang::ElpNone); - return spv::DecorationMax; - default: - assert(0); - return spv::DecorationMax; - } - } - } -} - -// Translate glslang type to SPIR-V interpolation decorations. -// Returns spv::DecorationMax when no decoration -// should be applied. -spv::Decoration TGlslangToSpvTraverser::TranslateInterpolationDecoration(const glslang::TQualifier& qualifier) -{ - if (qualifier.smooth) - // Smooth decoration doesn't exist in SPIR-V 1.0 - return spv::DecorationMax; - else if (qualifier.nopersp) - return spv::DecorationNoPerspective; - else if (qualifier.flat) - return spv::DecorationFlat; -#ifdef AMD_EXTENSIONS - else if (qualifier.explicitInterp) { - builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter); - return spv::DecorationExplicitInterpAMD; - } -#endif - else - return spv::DecorationMax; -} - -// Translate glslang type to SPIR-V auxiliary storage decorations. -// Returns spv::DecorationMax when no decoration -// should be applied. -spv::Decoration TGlslangToSpvTraverser::TranslateAuxiliaryStorageDecoration(const glslang::TQualifier& qualifier) -{ - if (qualifier.patch) - return spv::DecorationPatch; - else if (qualifier.centroid) - return spv::DecorationCentroid; - else if (qualifier.sample) { - builder.addCapability(spv::CapabilitySampleRateShading); - return spv::DecorationSample; - } else - return spv::DecorationMax; -} - -// If glslang type is invariant, return SPIR-V invariant decoration. -spv::Decoration TranslateInvariantDecoration(const glslang::TQualifier& qualifier) -{ - if (qualifier.invariant) - return spv::DecorationInvariant; - else - return spv::DecorationMax; -} - -// If glslang type is noContraction, return SPIR-V NoContraction decoration. -spv::Decoration TranslateNoContractionDecoration(const glslang::TQualifier& qualifier) -{ - if (qualifier.noContraction) - return spv::DecorationNoContraction; - else - return spv::DecorationMax; -} - -// Translate a glslang built-in variable to a SPIR-V built in decoration. Also generate -// associated capabilities when required. For some built-in variables, a capability -// is generated only when using the variable in an executable instruction, but not when -// just declaring a struct member variable with it. This is true for PointSize, -// ClipDistance, and CullDistance. -spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltInVariable builtIn, bool memberDeclaration) -{ - switch (builtIn) { - case glslang::EbvPointSize: - // Defer adding the capability until the built-in is actually used. - if (! memberDeclaration) { - switch (glslangIntermediate->getStage()) { - case EShLangGeometry: - builder.addCapability(spv::CapabilityGeometryPointSize); - break; - case EShLangTessControl: - case EShLangTessEvaluation: - builder.addCapability(spv::CapabilityTessellationPointSize); - break; - default: - break; - } - } - return spv::BuiltInPointSize; - - // These *Distance capabilities logically belong here, but if the member is declared and - // then never used, consumers of SPIR-V prefer the capability not be declared. - // They are now generated when used, rather than here when declared. - // Potentially, the specification should be more clear what the minimum - // use needed is to trigger the capability. - // - case glslang::EbvClipDistance: - if (!memberDeclaration) - builder.addCapability(spv::CapabilityClipDistance); - return spv::BuiltInClipDistance; - - case glslang::EbvCullDistance: - if (!memberDeclaration) - builder.addCapability(spv::CapabilityCullDistance); - return spv::BuiltInCullDistance; - - case glslang::EbvViewportIndex: - if (!memberDeclaration) { - builder.addCapability(spv::CapabilityMultiViewport); - if (glslangIntermediate->getStage() == EShLangVertex || - glslangIntermediate->getStage() == EShLangTessControl || - glslangIntermediate->getStage() == EShLangTessEvaluation) { - - builder.addExtension(spv::E_SPV_EXT_shader_viewport_index_layer); - builder.addCapability(spv::CapabilityShaderViewportIndexLayerEXT); - } - } - return spv::BuiltInViewportIndex; - - case glslang::EbvSampleId: - builder.addCapability(spv::CapabilitySampleRateShading); - return spv::BuiltInSampleId; - - case glslang::EbvSamplePosition: - builder.addCapability(spv::CapabilitySampleRateShading); - return spv::BuiltInSamplePosition; - - case glslang::EbvSampleMask: - builder.addCapability(spv::CapabilitySampleRateShading); - return spv::BuiltInSampleMask; - - case glslang::EbvLayer: - if (!memberDeclaration) { - builder.addCapability(spv::CapabilityGeometry); - if (glslangIntermediate->getStage() == EShLangVertex || - glslangIntermediate->getStage() == EShLangTessControl || - glslangIntermediate->getStage() == EShLangTessEvaluation) { - - builder.addExtension(spv::E_SPV_EXT_shader_viewport_index_layer); - builder.addCapability(spv::CapabilityShaderViewportIndexLayerEXT); - } - } - - return spv::BuiltInLayer; - - case glslang::EbvPosition: return spv::BuiltInPosition; - case glslang::EbvVertexId: return spv::BuiltInVertexId; - case glslang::EbvInstanceId: return spv::BuiltInInstanceId; - case glslang::EbvVertexIndex: return spv::BuiltInVertexIndex; - case glslang::EbvInstanceIndex: return spv::BuiltInInstanceIndex; - - case glslang::EbvBaseVertex: - builder.addExtension(spv::E_SPV_KHR_shader_draw_parameters); - builder.addCapability(spv::CapabilityDrawParameters); - return spv::BuiltInBaseVertex; - - case glslang::EbvBaseInstance: - builder.addExtension(spv::E_SPV_KHR_shader_draw_parameters); - builder.addCapability(spv::CapabilityDrawParameters); - return spv::BuiltInBaseInstance; - - case glslang::EbvDrawId: - builder.addExtension(spv::E_SPV_KHR_shader_draw_parameters); - builder.addCapability(spv::CapabilityDrawParameters); - return spv::BuiltInDrawIndex; - - case glslang::EbvPrimitiveId: - if (glslangIntermediate->getStage() == EShLangFragment) - builder.addCapability(spv::CapabilityGeometry); - return spv::BuiltInPrimitiveId; - - case glslang::EbvFragStencilRef: - builder.addExtension(spv::E_SPV_EXT_shader_stencil_export); - builder.addCapability(spv::CapabilityStencilExportEXT); - return spv::BuiltInFragStencilRefEXT; - - case glslang::EbvInvocationId: return spv::BuiltInInvocationId; - case glslang::EbvTessLevelInner: return spv::BuiltInTessLevelInner; - case glslang::EbvTessLevelOuter: return spv::BuiltInTessLevelOuter; - case glslang::EbvTessCoord: return spv::BuiltInTessCoord; - case glslang::EbvPatchVertices: return spv::BuiltInPatchVertices; - case glslang::EbvFragCoord: return spv::BuiltInFragCoord; - case glslang::EbvPointCoord: return spv::BuiltInPointCoord; - case glslang::EbvFace: return spv::BuiltInFrontFacing; - case glslang::EbvFragDepth: return spv::BuiltInFragDepth; - case glslang::EbvHelperInvocation: return spv::BuiltInHelperInvocation; - case glslang::EbvNumWorkGroups: return spv::BuiltInNumWorkgroups; - case glslang::EbvWorkGroupSize: return spv::BuiltInWorkgroupSize; - case glslang::EbvWorkGroupId: return spv::BuiltInWorkgroupId; - case glslang::EbvLocalInvocationId: return spv::BuiltInLocalInvocationId; - case glslang::EbvLocalInvocationIndex: return spv::BuiltInLocalInvocationIndex; - case glslang::EbvGlobalInvocationId: return spv::BuiltInGlobalInvocationId; - - case glslang::EbvSubGroupSize: - builder.addExtension(spv::E_SPV_KHR_shader_ballot); - builder.addCapability(spv::CapabilitySubgroupBallotKHR); - return spv::BuiltInSubgroupSize; - - case glslang::EbvSubGroupInvocation: - builder.addExtension(spv::E_SPV_KHR_shader_ballot); - builder.addCapability(spv::CapabilitySubgroupBallotKHR); - return spv::BuiltInSubgroupLocalInvocationId; - - case glslang::EbvSubGroupEqMask: - builder.addExtension(spv::E_SPV_KHR_shader_ballot); - builder.addCapability(spv::CapabilitySubgroupBallotKHR); - return spv::BuiltInSubgroupEqMaskKHR; - - case glslang::EbvSubGroupGeMask: - builder.addExtension(spv::E_SPV_KHR_shader_ballot); - builder.addCapability(spv::CapabilitySubgroupBallotKHR); - return spv::BuiltInSubgroupGeMaskKHR; - - case glslang::EbvSubGroupGtMask: - builder.addExtension(spv::E_SPV_KHR_shader_ballot); - builder.addCapability(spv::CapabilitySubgroupBallotKHR); - return spv::BuiltInSubgroupGtMaskKHR; - - case glslang::EbvSubGroupLeMask: - builder.addExtension(spv::E_SPV_KHR_shader_ballot); - builder.addCapability(spv::CapabilitySubgroupBallotKHR); - return spv::BuiltInSubgroupLeMaskKHR; - - case glslang::EbvSubGroupLtMask: - builder.addExtension(spv::E_SPV_KHR_shader_ballot); - builder.addCapability(spv::CapabilitySubgroupBallotKHR); - return spv::BuiltInSubgroupLtMaskKHR; - -#ifdef AMD_EXTENSIONS - case glslang::EbvBaryCoordNoPersp: - builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter); - return spv::BuiltInBaryCoordNoPerspAMD; - - case glslang::EbvBaryCoordNoPerspCentroid: - builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter); - return spv::BuiltInBaryCoordNoPerspCentroidAMD; - - case glslang::EbvBaryCoordNoPerspSample: - builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter); - return spv::BuiltInBaryCoordNoPerspSampleAMD; - - case glslang::EbvBaryCoordSmooth: - builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter); - return spv::BuiltInBaryCoordSmoothAMD; - - case glslang::EbvBaryCoordSmoothCentroid: - builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter); - return spv::BuiltInBaryCoordSmoothCentroidAMD; - - case glslang::EbvBaryCoordSmoothSample: - builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter); - return spv::BuiltInBaryCoordSmoothSampleAMD; - - case glslang::EbvBaryCoordPullModel: - builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter); - return spv::BuiltInBaryCoordPullModelAMD; -#endif - - case glslang::EbvDeviceIndex: - builder.addExtension(spv::E_SPV_KHR_device_group); - builder.addCapability(spv::CapabilityDeviceGroup); - return spv::BuiltInDeviceIndex; - - case glslang::EbvViewIndex: - builder.addExtension(spv::E_SPV_KHR_multiview); - builder.addCapability(spv::CapabilityMultiView); - return spv::BuiltInViewIndex; - -#ifdef NV_EXTENSIONS - case glslang::EbvViewportMaskNV: - if (!memberDeclaration) { - builder.addExtension(spv::E_SPV_NV_viewport_array2); - builder.addCapability(spv::CapabilityShaderViewportMaskNV); - } - return spv::BuiltInViewportMaskNV; - case glslang::EbvSecondaryPositionNV: - if (!memberDeclaration) { - builder.addExtension(spv::E_SPV_NV_stereo_view_rendering); - builder.addCapability(spv::CapabilityShaderStereoViewNV); - } - return spv::BuiltInSecondaryPositionNV; - case glslang::EbvSecondaryViewportMaskNV: - if (!memberDeclaration) { - builder.addExtension(spv::E_SPV_NV_stereo_view_rendering); - builder.addCapability(spv::CapabilityShaderStereoViewNV); - } - return spv::BuiltInSecondaryViewportMaskNV; - case glslang::EbvPositionPerViewNV: - if (!memberDeclaration) { - builder.addExtension(spv::E_SPV_NVX_multiview_per_view_attributes); - builder.addCapability(spv::CapabilityPerViewAttributesNV); - } - return spv::BuiltInPositionPerViewNV; - case glslang::EbvViewportMaskPerViewNV: - if (!memberDeclaration) { - builder.addExtension(spv::E_SPV_NVX_multiview_per_view_attributes); - builder.addCapability(spv::CapabilityPerViewAttributesNV); - } - return spv::BuiltInViewportMaskPerViewNV; -#endif - default: - return spv::BuiltInMax; - } -} - -// Translate glslang image layout format to SPIR-V image format. -spv::ImageFormat TGlslangToSpvTraverser::TranslateImageFormat(const glslang::TType& type) -{ - assert(type.getBasicType() == glslang::EbtSampler); - - // Check for capabilities - switch (type.getQualifier().layoutFormat) { - case glslang::ElfRg32f: - case glslang::ElfRg16f: - case glslang::ElfR11fG11fB10f: - case glslang::ElfR16f: - case glslang::ElfRgba16: - case glslang::ElfRgb10A2: - case glslang::ElfRg16: - case glslang::ElfRg8: - case glslang::ElfR16: - case glslang::ElfR8: - case glslang::ElfRgba16Snorm: - case glslang::ElfRg16Snorm: - case glslang::ElfRg8Snorm: - case glslang::ElfR16Snorm: - case glslang::ElfR8Snorm: - - case glslang::ElfRg32i: - case glslang::ElfRg16i: - case glslang::ElfRg8i: - case glslang::ElfR16i: - case glslang::ElfR8i: - - case glslang::ElfRgb10a2ui: - case glslang::ElfRg32ui: - case glslang::ElfRg16ui: - case glslang::ElfRg8ui: - case glslang::ElfR16ui: - case glslang::ElfR8ui: - builder.addCapability(spv::CapabilityStorageImageExtendedFormats); - break; - - default: - break; - } - - // do the translation - switch (type.getQualifier().layoutFormat) { - case glslang::ElfNone: return spv::ImageFormatUnknown; - case glslang::ElfRgba32f: return spv::ImageFormatRgba32f; - case glslang::ElfRgba16f: return spv::ImageFormatRgba16f; - case glslang::ElfR32f: return spv::ImageFormatR32f; - case glslang::ElfRgba8: return spv::ImageFormatRgba8; - case glslang::ElfRgba8Snorm: return spv::ImageFormatRgba8Snorm; - case glslang::ElfRg32f: return spv::ImageFormatRg32f; - case glslang::ElfRg16f: return spv::ImageFormatRg16f; - case glslang::ElfR11fG11fB10f: return spv::ImageFormatR11fG11fB10f; - case glslang::ElfR16f: return spv::ImageFormatR16f; - case glslang::ElfRgba16: return spv::ImageFormatRgba16; - case glslang::ElfRgb10A2: return spv::ImageFormatRgb10A2; - case glslang::ElfRg16: return spv::ImageFormatRg16; - case glslang::ElfRg8: return spv::ImageFormatRg8; - case glslang::ElfR16: return spv::ImageFormatR16; - case glslang::ElfR8: return spv::ImageFormatR8; - case glslang::ElfRgba16Snorm: return spv::ImageFormatRgba16Snorm; - case glslang::ElfRg16Snorm: return spv::ImageFormatRg16Snorm; - case glslang::ElfRg8Snorm: return spv::ImageFormatRg8Snorm; - case glslang::ElfR16Snorm: return spv::ImageFormatR16Snorm; - case glslang::ElfR8Snorm: return spv::ImageFormatR8Snorm; - case glslang::ElfRgba32i: return spv::ImageFormatRgba32i; - case glslang::ElfRgba16i: return spv::ImageFormatRgba16i; - case glslang::ElfRgba8i: return spv::ImageFormatRgba8i; - case glslang::ElfR32i: return spv::ImageFormatR32i; - case glslang::ElfRg32i: return spv::ImageFormatRg32i; - case glslang::ElfRg16i: return spv::ImageFormatRg16i; - case glslang::ElfRg8i: return spv::ImageFormatRg8i; - case glslang::ElfR16i: return spv::ImageFormatR16i; - case glslang::ElfR8i: return spv::ImageFormatR8i; - case glslang::ElfRgba32ui: return spv::ImageFormatRgba32ui; - case glslang::ElfRgba16ui: return spv::ImageFormatRgba16ui; - case glslang::ElfRgba8ui: return spv::ImageFormatRgba8ui; - case glslang::ElfR32ui: return spv::ImageFormatR32ui; - case glslang::ElfRg32ui: return spv::ImageFormatRg32ui; - case glslang::ElfRg16ui: return spv::ImageFormatRg16ui; - case glslang::ElfRgb10a2ui: return spv::ImageFormatRgb10a2ui; - case glslang::ElfRg8ui: return spv::ImageFormatRg8ui; - case glslang::ElfR16ui: return spv::ImageFormatR16ui; - case glslang::ElfR8ui: return spv::ImageFormatR8ui; - default: return spv::ImageFormatMax; - } -} - -spv::SelectionControlMask TGlslangToSpvTraverser::TranslateSelectionControl(glslang::TSelectionControl selectionControl) const -{ - switch (selectionControl) { - case glslang::ESelectionControlNone: return spv::SelectionControlMaskNone; - case glslang::ESelectionControlFlatten: return spv::SelectionControlFlattenMask; - case glslang::ESelectionControlDontFlatten: return spv::SelectionControlDontFlattenMask; - default: return spv::SelectionControlMaskNone; - } -} - -spv::LoopControlMask TGlslangToSpvTraverser::TranslateLoopControl(glslang::TLoopControl loopControl) const -{ - switch (loopControl) { - case glslang::ELoopControlNone: return spv::LoopControlMaskNone; - case glslang::ELoopControlUnroll: return spv::LoopControlUnrollMask; - case glslang::ELoopControlDontUnroll: return spv::LoopControlDontUnrollMask; - // TODO: DependencyInfinite - // TODO: DependencyLength - default: return spv::LoopControlMaskNone; - } -} - -// Translate glslang type to SPIR-V storage class. -spv::StorageClass TGlslangToSpvTraverser::TranslateStorageClass(const glslang::TType& type) -{ - if (type.getQualifier().isPipeInput()) - return spv::StorageClassInput; - else if (type.getQualifier().isPipeOutput()) - return spv::StorageClassOutput; - else if (type.getBasicType() == glslang::EbtAtomicUint) - return spv::StorageClassAtomicCounter; - else if (type.containsOpaque()) - return spv::StorageClassUniformConstant; - else if (glslangIntermediate->usingStorageBuffer() && type.getQualifier().storage == glslang::EvqBuffer) { - builder.addExtension(spv::E_SPV_KHR_storage_buffer_storage_class); - return spv::StorageClassStorageBuffer; - } else if (type.getQualifier().isUniformOrBuffer()) { - if (type.getQualifier().layoutPushConstant) - return spv::StorageClassPushConstant; - if (type.getBasicType() == glslang::EbtBlock) - return spv::StorageClassUniform; - else - return spv::StorageClassUniformConstant; - } else { - switch (type.getQualifier().storage) { - case glslang::EvqShared: return spv::StorageClassWorkgroup; break; - case glslang::EvqGlobal: return spv::StorageClassPrivate; - case glslang::EvqConstReadOnly: return spv::StorageClassFunction; - case glslang::EvqTemporary: return spv::StorageClassFunction; - default: - assert(0); - return spv::StorageClassFunction; - } - } -} - -// Return whether or not the given type is something that should be tied to a -// descriptor set. -bool IsDescriptorResource(const glslang::TType& type) -{ - // uniform and buffer blocks are included, unless it is a push_constant - if (type.getBasicType() == glslang::EbtBlock) - return type.getQualifier().isUniformOrBuffer() && ! type.getQualifier().layoutPushConstant; - - // non block... - // basically samplerXXX/subpass/sampler/texture are all included - // if they are the global-scope-class, not the function parameter - // (or local, if they ever exist) class. - if (type.getBasicType() == glslang::EbtSampler) - return type.getQualifier().isUniformOrBuffer(); - - // None of the above. - return false; -} - -void InheritQualifiers(glslang::TQualifier& child, const glslang::TQualifier& parent) -{ - if (child.layoutMatrix == glslang::ElmNone) - child.layoutMatrix = parent.layoutMatrix; - - if (parent.invariant) - child.invariant = true; - if (parent.nopersp) - child.nopersp = true; -#ifdef AMD_EXTENSIONS - if (parent.explicitInterp) - child.explicitInterp = true; -#endif - if (parent.flat) - child.flat = true; - if (parent.centroid) - child.centroid = true; - if (parent.patch) - child.patch = true; - if (parent.sample) - child.sample = true; - if (parent.coherent) - child.coherent = true; - if (parent.volatil) - child.volatil = true; - if (parent.restrict) - child.restrict = true; - if (parent.readonly) - child.readonly = true; - if (parent.writeonly) - child.writeonly = true; -} - -bool HasNonLayoutQualifiers(const glslang::TType& type, const glslang::TQualifier& qualifier) -{ - // This should list qualifiers that simultaneous satisfy: - // - struct members might inherit from a struct declaration - // (note that non-block structs don't explicitly inherit, - // only implicitly, meaning no decoration involved) - // - affect decorations on the struct members - // (note smooth does not, and expecting something like volatile - // to effect the whole object) - // - are not part of the offset/st430/etc or row/column-major layout - return qualifier.invariant || (qualifier.hasLocation() && type.getBasicType() == glslang::EbtBlock); -} - -// -// Implement the TGlslangToSpvTraverser class. -// - -TGlslangToSpvTraverser::TGlslangToSpvTraverser(const glslang::TIntermediate* glslangIntermediate, - spv::SpvBuildLogger* buildLogger, glslang::SpvOptions& options) - : TIntermTraverser(true, false, true), - options(options), - shaderEntry(nullptr), currentFunction(nullptr), - sequenceDepth(0), logger(buildLogger), - builder((glslang::GetKhronosToolId() << 16) | GeneratorVersion, logger), - inEntryPoint(false), entryPointTerminated(false), linkageOnly(false), - glslangIntermediate(glslangIntermediate) -{ - spv::ExecutionModel executionModel = TranslateExecutionModel(glslangIntermediate->getStage()); - - builder.clearAccessChain(); - builder.setSource(TranslateSourceLanguage(glslangIntermediate->getSource(), glslangIntermediate->getProfile()), - glslangIntermediate->getVersion()); - - if (options.generateDebugInfo) { - builder.setEmitOpLines(); - builder.setSourceFile(glslangIntermediate->getSourceFile()); - - // Set the source shader's text. If for SPV version 1.0, include - // a preamble in comments stating the OpModuleProcessed instructions. - // Otherwise, emit those as actual instructions. - std::string text; - const std::vector& processes = glslangIntermediate->getProcesses(); - for (int p = 0; p < (int)processes.size(); ++p) { - if (glslangIntermediate->getSpv().spv < 0x00010100) { - text.append("// OpModuleProcessed "); - text.append(processes[p]); - text.append("\n"); - } else - builder.addModuleProcessed(processes[p]); - } - if (glslangIntermediate->getSpv().spv < 0x00010100 && (int)processes.size() > 0) - text.append("#line 1\n"); - text.append(glslangIntermediate->getSourceText()); - builder.setSourceText(text); - } - stdBuiltins = builder.import("GLSL.std.450"); - builder.setMemoryModel(spv::AddressingModelLogical, spv::MemoryModelGLSL450); - shaderEntry = builder.makeEntryPoint(glslangIntermediate->getEntryPointName().c_str()); - entryPoint = builder.addEntryPoint(executionModel, shaderEntry, glslangIntermediate->getEntryPointName().c_str()); - - // Add the source extensions - const auto& sourceExtensions = glslangIntermediate->getRequestedExtensions(); - for (auto it = sourceExtensions.begin(); it != sourceExtensions.end(); ++it) - builder.addSourceExtension(it->c_str()); - - // Add the top-level modes for this shader. - - if (glslangIntermediate->getXfbMode()) { - builder.addCapability(spv::CapabilityTransformFeedback); - builder.addExecutionMode(shaderEntry, spv::ExecutionModeXfb); - } - - unsigned int mode; - switch (glslangIntermediate->getStage()) { - case EShLangVertex: - builder.addCapability(spv::CapabilityShader); - break; - - case EShLangTessEvaluation: - case EShLangTessControl: - builder.addCapability(spv::CapabilityTessellation); - - glslang::TLayoutGeometry primitive; - - if (glslangIntermediate->getStage() == EShLangTessControl) { - builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputVertices, glslangIntermediate->getVertices()); - primitive = glslangIntermediate->getOutputPrimitive(); - } else { - primitive = glslangIntermediate->getInputPrimitive(); - } - - switch (primitive) { - case glslang::ElgTriangles: mode = spv::ExecutionModeTriangles; break; - case glslang::ElgQuads: mode = spv::ExecutionModeQuads; break; - case glslang::ElgIsolines: mode = spv::ExecutionModeIsolines; break; - default: mode = spv::ExecutionModeMax; break; - } - if (mode != spv::ExecutionModeMax) - builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode); - - switch (glslangIntermediate->getVertexSpacing()) { - case glslang::EvsEqual: mode = spv::ExecutionModeSpacingEqual; break; - case glslang::EvsFractionalEven: mode = spv::ExecutionModeSpacingFractionalEven; break; - case glslang::EvsFractionalOdd: mode = spv::ExecutionModeSpacingFractionalOdd; break; - default: mode = spv::ExecutionModeMax; break; - } - if (mode != spv::ExecutionModeMax) - builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode); - - switch (glslangIntermediate->getVertexOrder()) { - case glslang::EvoCw: mode = spv::ExecutionModeVertexOrderCw; break; - case glslang::EvoCcw: mode = spv::ExecutionModeVertexOrderCcw; break; - default: mode = spv::ExecutionModeMax; break; - } - if (mode != spv::ExecutionModeMax) - builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode); - - if (glslangIntermediate->getPointMode()) - builder.addExecutionMode(shaderEntry, spv::ExecutionModePointMode); - break; - - case EShLangGeometry: - builder.addCapability(spv::CapabilityGeometry); - switch (glslangIntermediate->getInputPrimitive()) { - case glslang::ElgPoints: mode = spv::ExecutionModeInputPoints; break; - case glslang::ElgLines: mode = spv::ExecutionModeInputLines; break; - case glslang::ElgLinesAdjacency: mode = spv::ExecutionModeInputLinesAdjacency; break; - case glslang::ElgTriangles: mode = spv::ExecutionModeTriangles; break; - case glslang::ElgTrianglesAdjacency: mode = spv::ExecutionModeInputTrianglesAdjacency; break; - default: mode = spv::ExecutionModeMax; break; - } - if (mode != spv::ExecutionModeMax) - builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode); - - builder.addExecutionMode(shaderEntry, spv::ExecutionModeInvocations, glslangIntermediate->getInvocations()); - - switch (glslangIntermediate->getOutputPrimitive()) { - case glslang::ElgPoints: mode = spv::ExecutionModeOutputPoints; break; - case glslang::ElgLineStrip: mode = spv::ExecutionModeOutputLineStrip; break; - case glslang::ElgTriangleStrip: mode = spv::ExecutionModeOutputTriangleStrip; break; - default: mode = spv::ExecutionModeMax; break; - } - if (mode != spv::ExecutionModeMax) - builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode); - builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputVertices, glslangIntermediate->getVertices()); - break; - - case EShLangFragment: - builder.addCapability(spv::CapabilityShader); - if (glslangIntermediate->getPixelCenterInteger()) - builder.addExecutionMode(shaderEntry, spv::ExecutionModePixelCenterInteger); - - if (glslangIntermediate->getOriginUpperLeft()) - builder.addExecutionMode(shaderEntry, spv::ExecutionModeOriginUpperLeft); - else - builder.addExecutionMode(shaderEntry, spv::ExecutionModeOriginLowerLeft); - - if (glslangIntermediate->getEarlyFragmentTests()) - builder.addExecutionMode(shaderEntry, spv::ExecutionModeEarlyFragmentTests); - - if (glslangIntermediate->getPostDepthCoverage()) { - builder.addCapability(spv::CapabilitySampleMaskPostDepthCoverage); - builder.addExecutionMode(shaderEntry, spv::ExecutionModePostDepthCoverage); - builder.addExtension(spv::E_SPV_KHR_post_depth_coverage); - } - - switch(glslangIntermediate->getDepth()) { - case glslang::EldGreater: mode = spv::ExecutionModeDepthGreater; break; - case glslang::EldLess: mode = spv::ExecutionModeDepthLess; break; - default: mode = spv::ExecutionModeMax; break; - } - if (mode != spv::ExecutionModeMax) - builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode); - - if (glslangIntermediate->getDepth() != glslang::EldUnchanged && glslangIntermediate->isDepthReplacing()) - builder.addExecutionMode(shaderEntry, spv::ExecutionModeDepthReplacing); - break; - - case EShLangCompute: - builder.addCapability(spv::CapabilityShader); - builder.addExecutionMode(shaderEntry, spv::ExecutionModeLocalSize, glslangIntermediate->getLocalSize(0), - glslangIntermediate->getLocalSize(1), - glslangIntermediate->getLocalSize(2)); - break; - - default: - break; - } -} - -// Finish creating SPV, after the traversal is complete. -void TGlslangToSpvTraverser::finishSpv() -{ - if (! entryPointTerminated) { - builder.setBuildPoint(shaderEntry->getLastBlock()); - builder.leaveFunction(); - } - - // finish off the entry-point SPV instruction by adding the Input/Output - for (auto it = iOSet.cbegin(); it != iOSet.cend(); ++it) - entryPoint->addIdOperand(*it); - - builder.eliminateDeadDecorations(); -} - -// Write the SPV into 'out'. -void TGlslangToSpvTraverser::dumpSpv(std::vector& out) -{ - builder.dump(out); -} - -// -// Implement the traversal functions. -// -// Return true from interior nodes to have the external traversal -// continue on to children. Return false if children were -// already processed. -// - -// -// Symbols can turn into -// - uniform/input reads -// - output writes -// - complex lvalue base setups: foo.bar[3].... , where we see foo and start up an access chain -// - something simple that degenerates into the last bullet -// -void TGlslangToSpvTraverser::visitSymbol(glslang::TIntermSymbol* symbol) -{ - SpecConstantOpModeGuard spec_constant_op_mode_setter(&builder); - if (symbol->getType().getQualifier().isSpecConstant()) - spec_constant_op_mode_setter.turnOnSpecConstantOpMode(); - - // getSymbolId() will set up all the IO decorations on the first call. - // Formal function parameters were mapped during makeFunctions(). - spv::Id id = getSymbolId(symbol); - - // Include all "static use" and "linkage only" interface variables on the OpEntryPoint instruction - if (builder.isPointer(id)) { - spv::StorageClass sc = builder.getStorageClass(id); - if (sc == spv::StorageClassInput || sc == spv::StorageClassOutput) - iOSet.insert(id); - } - - // Only process non-linkage-only nodes for generating actual static uses - if (! linkageOnly || symbol->getQualifier().isSpecConstant()) { - // Prepare to generate code for the access - - // L-value chains will be computed left to right. We're on the symbol now, - // which is the left-most part of the access chain, so now is "clear" time, - // followed by setting the base. - builder.clearAccessChain(); - - // For now, we consider all user variables as being in memory, so they are pointers, - // except for - // A) R-Value arguments to a function, which are an intermediate object. - // See comments in handleUserFunctionCall(). - // B) Specialization constants (normal constants don't even come in as a variable), - // These are also pure R-values. - glslang::TQualifier qualifier = symbol->getQualifier(); - if (qualifier.isSpecConstant() || rValueParameters.find(symbol->getId()) != rValueParameters.end()) - builder.setAccessChainRValue(id); - else - builder.setAccessChainLValue(id); - } -} - -bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::TIntermBinary* node) -{ - builder.setLine(node->getLoc().line); - - SpecConstantOpModeGuard spec_constant_op_mode_setter(&builder); - if (node->getType().getQualifier().isSpecConstant()) - spec_constant_op_mode_setter.turnOnSpecConstantOpMode(); - - // First, handle special cases - switch (node->getOp()) { - case glslang::EOpAssign: - case glslang::EOpAddAssign: - case glslang::EOpSubAssign: - case glslang::EOpMulAssign: - case glslang::EOpVectorTimesMatrixAssign: - case glslang::EOpVectorTimesScalarAssign: - case glslang::EOpMatrixTimesScalarAssign: - case glslang::EOpMatrixTimesMatrixAssign: - case glslang::EOpDivAssign: - case glslang::EOpModAssign: - case glslang::EOpAndAssign: - case glslang::EOpInclusiveOrAssign: - case glslang::EOpExclusiveOrAssign: - case glslang::EOpLeftShiftAssign: - case glslang::EOpRightShiftAssign: - // A bin-op assign "a += b" means the same thing as "a = a + b" - // where a is evaluated before b. For a simple assignment, GLSL - // says to evaluate the left before the right. So, always, left - // node then right node. - { - // get the left l-value, save it away - builder.clearAccessChain(); - node->getLeft()->traverse(this); - spv::Builder::AccessChain lValue = builder.getAccessChain(); - - // evaluate the right - builder.clearAccessChain(); - node->getRight()->traverse(this); - spv::Id rValue = accessChainLoad(node->getRight()->getType()); - - if (node->getOp() != glslang::EOpAssign) { - // the left is also an r-value - builder.setAccessChain(lValue); - spv::Id leftRValue = accessChainLoad(node->getLeft()->getType()); - - // do the operation - rValue = createBinaryOperation(node->getOp(), TranslatePrecisionDecoration(node->getOperationPrecision()), - TranslateNoContractionDecoration(node->getType().getQualifier()), - convertGlslangToSpvType(node->getType()), leftRValue, rValue, - node->getType().getBasicType()); - - // these all need their counterparts in createBinaryOperation() - assert(rValue != spv::NoResult); - } - - // store the result - builder.setAccessChain(lValue); - multiTypeStore(node->getType(), rValue); - - // assignments are expressions having an rValue after they are evaluated... - builder.clearAccessChain(); - builder.setAccessChainRValue(rValue); - } - return false; - case glslang::EOpIndexDirect: - case glslang::EOpIndexDirectStruct: - { - // Get the left part of the access chain. - node->getLeft()->traverse(this); - - // Add the next element in the chain - - const int glslangIndex = node->getRight()->getAsConstantUnion()->getConstArray()[0].getIConst(); - if (! node->getLeft()->getType().isArray() && - node->getLeft()->getType().isVector() && - node->getOp() == glslang::EOpIndexDirect) { - // This is essentially a hard-coded vector swizzle of size 1, - // so short circuit the access-chain stuff with a swizzle. - std::vector swizzle; - swizzle.push_back(glslangIndex); - builder.accessChainPushSwizzle(swizzle, convertGlslangToSpvType(node->getLeft()->getType())); - } else { - int spvIndex = glslangIndex; - if (node->getLeft()->getBasicType() == glslang::EbtBlock && - node->getOp() == glslang::EOpIndexDirectStruct) - { - // This may be, e.g., an anonymous block-member selection, which generally need - // index remapping due to hidden members in anonymous blocks. - std::vector& remapper = memberRemapper[node->getLeft()->getType().getStruct()]; - assert(remapper.size() > 0); - spvIndex = remapper[glslangIndex]; - } - - // normal case for indexing array or structure or block - builder.accessChainPush(builder.makeIntConstant(spvIndex)); - - // Add capabilities here for accessing PointSize and clip/cull distance. - // We have deferred generation of associated capabilities until now. - if (node->getLeft()->getType().isStruct() && ! node->getLeft()->getType().isArray()) - declareUseOfStructMember(*(node->getLeft()->getType().getStruct()), glslangIndex); - } - } - return false; - case glslang::EOpIndexIndirect: - { - // Structure or array or vector indirection. - // Will use native SPIR-V access-chain for struct and array indirection; - // matrices are arrays of vectors, so will also work for a matrix. - // Will use the access chain's 'component' for variable index into a vector. - - // This adapter is building access chains left to right. - // Set up the access chain to the left. - node->getLeft()->traverse(this); - - // save it so that computing the right side doesn't trash it - spv::Builder::AccessChain partial = builder.getAccessChain(); - - // compute the next index in the chain - builder.clearAccessChain(); - node->getRight()->traverse(this); - spv::Id index = accessChainLoad(node->getRight()->getType()); - - // restore the saved access chain - builder.setAccessChain(partial); - - if (! node->getLeft()->getType().isArray() && node->getLeft()->getType().isVector()) - builder.accessChainPushComponent(index, convertGlslangToSpvType(node->getLeft()->getType())); - else - builder.accessChainPush(index); - } - return false; - case glslang::EOpVectorSwizzle: - { - node->getLeft()->traverse(this); - std::vector swizzle; - convertSwizzle(*node->getRight()->getAsAggregate(), swizzle); - builder.accessChainPushSwizzle(swizzle, convertGlslangToSpvType(node->getLeft()->getType())); - } - return false; - case glslang::EOpMatrixSwizzle: - logger->missingFunctionality("matrix swizzle"); - return true; - case glslang::EOpLogicalOr: - case glslang::EOpLogicalAnd: - { - - // These may require short circuiting, but can sometimes be done as straight - // binary operations. The right operand must be short circuited if it has - // side effects, and should probably be if it is complex. - if (isTrivial(node->getRight()->getAsTyped())) - break; // handle below as a normal binary operation - // otherwise, we need to do dynamic short circuiting on the right operand - spv::Id result = createShortCircuit(node->getOp(), *node->getLeft()->getAsTyped(), *node->getRight()->getAsTyped()); - builder.clearAccessChain(); - builder.setAccessChainRValue(result); - } - return false; - default: - break; - } - - // Assume generic binary op... - - // get right operand - builder.clearAccessChain(); - node->getLeft()->traverse(this); - spv::Id left = accessChainLoad(node->getLeft()->getType()); - - // get left operand - builder.clearAccessChain(); - node->getRight()->traverse(this); - spv::Id right = accessChainLoad(node->getRight()->getType()); - - // get result - spv::Id result = createBinaryOperation(node->getOp(), TranslatePrecisionDecoration(node->getOperationPrecision()), - TranslateNoContractionDecoration(node->getType().getQualifier()), - convertGlslangToSpvType(node->getType()), left, right, - node->getLeft()->getType().getBasicType()); - - builder.clearAccessChain(); - if (! result) { - logger->missingFunctionality("unknown glslang binary operation"); - return true; // pick up a child as the place-holder result - } else { - builder.setAccessChainRValue(result); - return false; - } -} - -bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TIntermUnary* node) -{ - builder.setLine(node->getLoc().line); - - SpecConstantOpModeGuard spec_constant_op_mode_setter(&builder); - if (node->getType().getQualifier().isSpecConstant()) - spec_constant_op_mode_setter.turnOnSpecConstantOpMode(); - - spv::Id result = spv::NoResult; - - // try texturing first - result = createImageTextureFunctionCall(node); - if (result != spv::NoResult) { - builder.clearAccessChain(); - builder.setAccessChainRValue(result); - - return false; // done with this node - } - - // Non-texturing. - - if (node->getOp() == glslang::EOpArrayLength) { - // Quite special; won't want to evaluate the operand. - - // Normal .length() would have been constant folded by the front-end. - // So, this has to be block.lastMember.length(). - // SPV wants "block" and member number as the operands, go get them. - assert(node->getOperand()->getType().isRuntimeSizedArray()); - glslang::TIntermTyped* block = node->getOperand()->getAsBinaryNode()->getLeft(); - block->traverse(this); - unsigned int member = node->getOperand()->getAsBinaryNode()->getRight()->getAsConstantUnion()->getConstArray()[0].getUConst(); - spv::Id length = builder.createArrayLength(builder.accessChainGetLValue(), member); - - builder.clearAccessChain(); - builder.setAccessChainRValue(length); - - return false; - } - - // Start by evaluating the operand - - // Does it need a swizzle inversion? If so, evaluation is inverted; - // operate first on the swizzle base, then apply the swizzle. - spv::Id invertedType = spv::NoType; - auto resultType = [&invertedType, &node, this](){ return invertedType != spv::NoType ? invertedType : convertGlslangToSpvType(node->getType()); }; - if (node->getOp() == glslang::EOpInterpolateAtCentroid) - invertedType = getInvertedSwizzleType(*node->getOperand()); - - builder.clearAccessChain(); - if (invertedType != spv::NoType) - node->getOperand()->getAsBinaryNode()->getLeft()->traverse(this); - else - node->getOperand()->traverse(this); - - spv::Id operand = spv::NoResult; - - if (node->getOp() == glslang::EOpAtomicCounterIncrement || - node->getOp() == glslang::EOpAtomicCounterDecrement || - node->getOp() == glslang::EOpAtomicCounter || - node->getOp() == glslang::EOpInterpolateAtCentroid) - operand = builder.accessChainGetLValue(); // Special case l-value operands - else - operand = accessChainLoad(node->getOperand()->getType()); - - spv::Decoration precision = TranslatePrecisionDecoration(node->getOperationPrecision()); - spv::Decoration noContraction = TranslateNoContractionDecoration(node->getType().getQualifier()); - - // it could be a conversion - if (! result) - result = createConversion(node->getOp(), precision, noContraction, resultType(), operand, node->getOperand()->getBasicType()); - - // if not, then possibly an operation - if (! result) - result = createUnaryOperation(node->getOp(), precision, noContraction, resultType(), operand, node->getOperand()->getBasicType()); - - if (result) { - if (invertedType) - result = createInvertedSwizzle(precision, *node->getOperand(), result); - - builder.clearAccessChain(); - builder.setAccessChainRValue(result); - - return false; // done with this node - } - - // it must be a special case, check... - switch (node->getOp()) { - case glslang::EOpPostIncrement: - case glslang::EOpPostDecrement: - case glslang::EOpPreIncrement: - case glslang::EOpPreDecrement: - { - // we need the integer value "1" or the floating point "1.0" to add/subtract - spv::Id one = 0; - if (node->getBasicType() == glslang::EbtFloat) - one = builder.makeFloatConstant(1.0F); - else if (node->getBasicType() == glslang::EbtDouble) - one = builder.makeDoubleConstant(1.0); -#ifdef AMD_EXTENSIONS - else if (node->getBasicType() == glslang::EbtFloat16) - one = builder.makeFloat16Constant(1.0F); -#endif - else if (node->getBasicType() == glslang::EbtInt64 || node->getBasicType() == glslang::EbtUint64) - one = builder.makeInt64Constant(1); -#ifdef AMD_EXTENSIONS - else if (node->getBasicType() == glslang::EbtInt16 || node->getBasicType() == glslang::EbtUint16) - one = builder.makeInt16Constant(1); -#endif - else - one = builder.makeIntConstant(1); - glslang::TOperator op; - if (node->getOp() == glslang::EOpPreIncrement || - node->getOp() == glslang::EOpPostIncrement) - op = glslang::EOpAdd; - else - op = glslang::EOpSub; - - spv::Id result = createBinaryOperation(op, precision, - TranslateNoContractionDecoration(node->getType().getQualifier()), - convertGlslangToSpvType(node->getType()), operand, one, - node->getType().getBasicType()); - assert(result != spv::NoResult); - - // The result of operation is always stored, but conditionally the - // consumed result. The consumed result is always an r-value. - builder.accessChainStore(result); - builder.clearAccessChain(); - if (node->getOp() == glslang::EOpPreIncrement || - node->getOp() == glslang::EOpPreDecrement) - builder.setAccessChainRValue(result); - else - builder.setAccessChainRValue(operand); - } - - return false; - - case glslang::EOpEmitStreamVertex: - builder.createNoResultOp(spv::OpEmitStreamVertex, operand); - return false; - case glslang::EOpEndStreamPrimitive: - builder.createNoResultOp(spv::OpEndStreamPrimitive, operand); - return false; - - default: - logger->missingFunctionality("unknown glslang unary"); - return true; // pick up operand as placeholder result - } -} - -bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TIntermAggregate* node) -{ - SpecConstantOpModeGuard spec_constant_op_mode_setter(&builder); - if (node->getType().getQualifier().isSpecConstant()) - spec_constant_op_mode_setter.turnOnSpecConstantOpMode(); - - spv::Id result = spv::NoResult; - spv::Id invertedType = spv::NoType; // to use to override the natural type of the node - auto resultType = [&invertedType, &node, this](){ return invertedType != spv::NoType ? invertedType : convertGlslangToSpvType(node->getType()); }; - - // try texturing - result = createImageTextureFunctionCall(node); - if (result != spv::NoResult) { - builder.clearAccessChain(); - builder.setAccessChainRValue(result); - - return false; -#ifdef AMD_EXTENSIONS - } else if (node->getOp() == glslang::EOpImageStore || node->getOp() == glslang::EOpImageStoreLod) { -#else - } else if (node->getOp() == glslang::EOpImageStore) { -#endif - // "imageStore" is a special case, which has no result - return false; - } - - glslang::TOperator binOp = glslang::EOpNull; - bool reduceComparison = true; - bool isMatrix = false; - bool noReturnValue = false; - bool atomic = false; - - assert(node->getOp()); - - spv::Decoration precision = TranslatePrecisionDecoration(node->getOperationPrecision()); - - switch (node->getOp()) { - case glslang::EOpSequence: - { - if (preVisit) - ++sequenceDepth; - else - --sequenceDepth; - - if (sequenceDepth == 1) { - // If this is the parent node of all the functions, we want to see them - // early, so all call points have actual SPIR-V functions to reference. - // In all cases, still let the traverser visit the children for us. - makeFunctions(node->getAsAggregate()->getSequence()); - - // Also, we want all globals initializers to go into the beginning of the entry point, before - // anything else gets there, so visit out of order, doing them all now. - makeGlobalInitializers(node->getAsAggregate()->getSequence()); - - // Initializers are done, don't want to visit again, but functions and link objects need to be processed, - // so do them manually. - visitFunctions(node->getAsAggregate()->getSequence()); - - return false; - } - - return true; - } - case glslang::EOpLinkerObjects: - { - if (visit == glslang::EvPreVisit) - linkageOnly = true; - else - linkageOnly = false; - - return true; - } - case glslang::EOpComma: - { - // processing from left to right naturally leaves the right-most - // lying around in the access chain - glslang::TIntermSequence& glslangOperands = node->getSequence(); - for (int i = 0; i < (int)glslangOperands.size(); ++i) - glslangOperands[i]->traverse(this); - - return false; - } - case glslang::EOpFunction: - if (visit == glslang::EvPreVisit) { - if (isShaderEntryPoint(node)) { - inEntryPoint = true; - builder.setBuildPoint(shaderEntry->getLastBlock()); - currentFunction = shaderEntry; - } else { - handleFunctionEntry(node); - } - } else { - if (inEntryPoint) - entryPointTerminated = true; - builder.leaveFunction(); - inEntryPoint = false; - } - - return true; - case glslang::EOpParameters: - // Parameters will have been consumed by EOpFunction processing, but not - // the body, so we still visited the function node's children, making this - // child redundant. - return false; - case glslang::EOpFunctionCall: - { - builder.setLine(node->getLoc().line); - if (node->isUserDefined()) - result = handleUserFunctionCall(node); - // assert(result); // this can happen for bad shaders because the call graph completeness checking is not yet done - if (result) { - builder.clearAccessChain(); - builder.setAccessChainRValue(result); - } else - logger->missingFunctionality("missing user function; linker needs to catch that"); - - return false; - } - case glslang::EOpConstructMat2x2: - case glslang::EOpConstructMat2x3: - case glslang::EOpConstructMat2x4: - case glslang::EOpConstructMat3x2: - case glslang::EOpConstructMat3x3: - case glslang::EOpConstructMat3x4: - case glslang::EOpConstructMat4x2: - case glslang::EOpConstructMat4x3: - case glslang::EOpConstructMat4x4: - case glslang::EOpConstructDMat2x2: - case glslang::EOpConstructDMat2x3: - case glslang::EOpConstructDMat2x4: - case glslang::EOpConstructDMat3x2: - case glslang::EOpConstructDMat3x3: - case glslang::EOpConstructDMat3x4: - case glslang::EOpConstructDMat4x2: - case glslang::EOpConstructDMat4x3: - case glslang::EOpConstructDMat4x4: - case glslang::EOpConstructIMat2x2: - case glslang::EOpConstructIMat2x3: - case glslang::EOpConstructIMat2x4: - case glslang::EOpConstructIMat3x2: - case glslang::EOpConstructIMat3x3: - case glslang::EOpConstructIMat3x4: - case glslang::EOpConstructIMat4x2: - case glslang::EOpConstructIMat4x3: - case glslang::EOpConstructIMat4x4: - case glslang::EOpConstructUMat2x2: - case glslang::EOpConstructUMat2x3: - case glslang::EOpConstructUMat2x4: - case glslang::EOpConstructUMat3x2: - case glslang::EOpConstructUMat3x3: - case glslang::EOpConstructUMat3x4: - case glslang::EOpConstructUMat4x2: - case glslang::EOpConstructUMat4x3: - case glslang::EOpConstructUMat4x4: - case glslang::EOpConstructBMat2x2: - case glslang::EOpConstructBMat2x3: - case glslang::EOpConstructBMat2x4: - case glslang::EOpConstructBMat3x2: - case glslang::EOpConstructBMat3x3: - case glslang::EOpConstructBMat3x4: - case glslang::EOpConstructBMat4x2: - case glslang::EOpConstructBMat4x3: - case glslang::EOpConstructBMat4x4: -#ifdef AMD_EXTENSIONS - case glslang::EOpConstructF16Mat2x2: - case glslang::EOpConstructF16Mat2x3: - case glslang::EOpConstructF16Mat2x4: - case glslang::EOpConstructF16Mat3x2: - case glslang::EOpConstructF16Mat3x3: - case glslang::EOpConstructF16Mat3x4: - case glslang::EOpConstructF16Mat4x2: - case glslang::EOpConstructF16Mat4x3: - case glslang::EOpConstructF16Mat4x4: -#endif - isMatrix = true; - // fall through - case glslang::EOpConstructFloat: - case glslang::EOpConstructVec2: - case glslang::EOpConstructVec3: - case glslang::EOpConstructVec4: - case glslang::EOpConstructDouble: - case glslang::EOpConstructDVec2: - case glslang::EOpConstructDVec3: - case glslang::EOpConstructDVec4: -#ifdef AMD_EXTENSIONS - case glslang::EOpConstructFloat16: - case glslang::EOpConstructF16Vec2: - case glslang::EOpConstructF16Vec3: - case glslang::EOpConstructF16Vec4: -#endif - case glslang::EOpConstructBool: - case glslang::EOpConstructBVec2: - case glslang::EOpConstructBVec3: - case glslang::EOpConstructBVec4: - case glslang::EOpConstructInt: - case glslang::EOpConstructIVec2: - case glslang::EOpConstructIVec3: - case glslang::EOpConstructIVec4: - case glslang::EOpConstructUint: - case glslang::EOpConstructUVec2: - case glslang::EOpConstructUVec3: - case glslang::EOpConstructUVec4: - case glslang::EOpConstructInt64: - case glslang::EOpConstructI64Vec2: - case glslang::EOpConstructI64Vec3: - case glslang::EOpConstructI64Vec4: - case glslang::EOpConstructUint64: - case glslang::EOpConstructU64Vec2: - case glslang::EOpConstructU64Vec3: - case glslang::EOpConstructU64Vec4: -#ifdef AMD_EXTENSIONS - case glslang::EOpConstructInt16: - case glslang::EOpConstructI16Vec2: - case glslang::EOpConstructI16Vec3: - case glslang::EOpConstructI16Vec4: - case glslang::EOpConstructUint16: - case glslang::EOpConstructU16Vec2: - case glslang::EOpConstructU16Vec3: - case glslang::EOpConstructU16Vec4: -#endif - case glslang::EOpConstructStruct: - case glslang::EOpConstructTextureSampler: - { - builder.setLine(node->getLoc().line); - std::vector arguments; - translateArguments(*node, arguments); - spv::Id constructed; - if (node->getOp() == glslang::EOpConstructTextureSampler) - constructed = builder.createOp(spv::OpSampledImage, resultType(), arguments); - else if (node->getOp() == glslang::EOpConstructStruct || node->getType().isArray()) { - std::vector constituents; - for (int c = 0; c < (int)arguments.size(); ++c) - constituents.push_back(arguments[c]); - constructed = builder.createCompositeConstruct(resultType(), constituents); - } else if (isMatrix) - constructed = builder.createMatrixConstructor(precision, arguments, resultType()); - else - constructed = builder.createConstructor(precision, arguments, resultType()); - - builder.clearAccessChain(); - builder.setAccessChainRValue(constructed); - - return false; - } - - // These six are component-wise compares with component-wise results. - // Forward on to createBinaryOperation(), requesting a vector result. - case glslang::EOpLessThan: - case glslang::EOpGreaterThan: - case glslang::EOpLessThanEqual: - case glslang::EOpGreaterThanEqual: - case glslang::EOpVectorEqual: - case glslang::EOpVectorNotEqual: - { - // Map the operation to a binary - binOp = node->getOp(); - reduceComparison = false; - switch (node->getOp()) { - case glslang::EOpVectorEqual: binOp = glslang::EOpVectorEqual; break; - case glslang::EOpVectorNotEqual: binOp = glslang::EOpVectorNotEqual; break; - default: binOp = node->getOp(); break; - } - - break; - } - case glslang::EOpMul: - // component-wise matrix multiply - binOp = glslang::EOpMul; - break; - case glslang::EOpOuterProduct: - // two vectors multiplied to make a matrix - binOp = glslang::EOpOuterProduct; - break; - case glslang::EOpDot: - { - // for scalar dot product, use multiply - glslang::TIntermSequence& glslangOperands = node->getSequence(); - if (glslangOperands[0]->getAsTyped()->getVectorSize() == 1) - binOp = glslang::EOpMul; - break; - } - case glslang::EOpMod: - // when an aggregate, this is the floating-point mod built-in function, - // which can be emitted by the one in createBinaryOperation() - binOp = glslang::EOpMod; - break; - case glslang::EOpEmitVertex: - case glslang::EOpEndPrimitive: - case glslang::EOpBarrier: - case glslang::EOpMemoryBarrier: - case glslang::EOpMemoryBarrierAtomicCounter: - case glslang::EOpMemoryBarrierBuffer: - case glslang::EOpMemoryBarrierImage: - case glslang::EOpMemoryBarrierShared: - case glslang::EOpGroupMemoryBarrier: - case glslang::EOpAllMemoryBarrierWithGroupSync: - case glslang::EOpGroupMemoryBarrierWithGroupSync: - case glslang::EOpWorkgroupMemoryBarrier: - case glslang::EOpWorkgroupMemoryBarrierWithGroupSync: - noReturnValue = true; - // These all have 0 operands and will naturally finish up in the code below for 0 operands - break; - - case glslang::EOpAtomicAdd: - case glslang::EOpAtomicMin: - case glslang::EOpAtomicMax: - case glslang::EOpAtomicAnd: - case glslang::EOpAtomicOr: - case glslang::EOpAtomicXor: - case glslang::EOpAtomicExchange: - case glslang::EOpAtomicCompSwap: - atomic = true; - break; - - case glslang::EOpAtomicCounterAdd: - case glslang::EOpAtomicCounterSubtract: - case glslang::EOpAtomicCounterMin: - case glslang::EOpAtomicCounterMax: - case glslang::EOpAtomicCounterAnd: - case glslang::EOpAtomicCounterOr: - case glslang::EOpAtomicCounterXor: - case glslang::EOpAtomicCounterExchange: - case glslang::EOpAtomicCounterCompSwap: - builder.addExtension("SPV_KHR_shader_atomic_counter_ops"); - builder.addCapability(spv::CapabilityAtomicStorageOps); - atomic = true; - break; - - default: - break; - } - - // - // See if it maps to a regular operation. - // - if (binOp != glslang::EOpNull) { - glslang::TIntermTyped* left = node->getSequence()[0]->getAsTyped(); - glslang::TIntermTyped* right = node->getSequence()[1]->getAsTyped(); - assert(left && right); - - builder.clearAccessChain(); - left->traverse(this); - spv::Id leftId = accessChainLoad(left->getType()); - - builder.clearAccessChain(); - right->traverse(this); - spv::Id rightId = accessChainLoad(right->getType()); - - builder.setLine(node->getLoc().line); - result = createBinaryOperation(binOp, precision, TranslateNoContractionDecoration(node->getType().getQualifier()), - resultType(), leftId, rightId, - left->getType().getBasicType(), reduceComparison); - - // code above should only make binOp that exists in createBinaryOperation - assert(result != spv::NoResult); - builder.clearAccessChain(); - builder.setAccessChainRValue(result); - - return false; - } - - // - // Create the list of operands. - // - glslang::TIntermSequence& glslangOperands = node->getSequence(); - std::vector operands; - for (int arg = 0; arg < (int)glslangOperands.size(); ++arg) { - // special case l-value operands; there are just a few - bool lvalue = false; - switch (node->getOp()) { - case glslang::EOpFrexp: - case glslang::EOpModf: - if (arg == 1) - lvalue = true; - break; - case glslang::EOpInterpolateAtSample: - case glslang::EOpInterpolateAtOffset: -#ifdef AMD_EXTENSIONS - case glslang::EOpInterpolateAtVertex: -#endif - if (arg == 0) { - lvalue = true; - - // Does it need a swizzle inversion? If so, evaluation is inverted; - // operate first on the swizzle base, then apply the swizzle. - if (glslangOperands[0]->getAsOperator() && - glslangOperands[0]->getAsOperator()->getOp() == glslang::EOpVectorSwizzle) - invertedType = convertGlslangToSpvType(glslangOperands[0]->getAsBinaryNode()->getLeft()->getType()); - } - break; - case glslang::EOpAtomicAdd: - case glslang::EOpAtomicMin: - case glslang::EOpAtomicMax: - case glslang::EOpAtomicAnd: - case glslang::EOpAtomicOr: - case glslang::EOpAtomicXor: - case glslang::EOpAtomicExchange: - case glslang::EOpAtomicCompSwap: - case glslang::EOpAtomicCounterAdd: - case glslang::EOpAtomicCounterSubtract: - case glslang::EOpAtomicCounterMin: - case glslang::EOpAtomicCounterMax: - case glslang::EOpAtomicCounterAnd: - case glslang::EOpAtomicCounterOr: - case glslang::EOpAtomicCounterXor: - case glslang::EOpAtomicCounterExchange: - case glslang::EOpAtomicCounterCompSwap: - if (arg == 0) - lvalue = true; - break; - case glslang::EOpAddCarry: - case glslang::EOpSubBorrow: - if (arg == 2) - lvalue = true; - break; - case glslang::EOpUMulExtended: - case glslang::EOpIMulExtended: - if (arg >= 2) - lvalue = true; - break; - default: - break; - } - builder.clearAccessChain(); - if (invertedType != spv::NoType && arg == 0) - glslangOperands[0]->getAsBinaryNode()->getLeft()->traverse(this); - else - glslangOperands[arg]->traverse(this); - if (lvalue) - operands.push_back(builder.accessChainGetLValue()); - else { - builder.setLine(node->getLoc().line); - operands.push_back(accessChainLoad(glslangOperands[arg]->getAsTyped()->getType())); - } - } - - builder.setLine(node->getLoc().line); - if (atomic) { - // Handle all atomics - result = createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType()); - } else { - // Pass through to generic operations. - switch (glslangOperands.size()) { - case 0: - result = createNoArgOperation(node->getOp(), precision, resultType()); - break; - case 1: - result = createUnaryOperation( - node->getOp(), precision, - TranslateNoContractionDecoration(node->getType().getQualifier()), - resultType(), operands.front(), - glslangOperands[0]->getAsTyped()->getBasicType()); - break; - default: - result = createMiscOperation(node->getOp(), precision, resultType(), operands, node->getBasicType()); - break; - } - if (invertedType) - result = createInvertedSwizzle(precision, *glslangOperands[0]->getAsBinaryNode(), result); - } - - if (noReturnValue) - return false; - - if (! result) { - logger->missingFunctionality("unknown glslang aggregate"); - return true; // pick up a child as a placeholder operand - } else { - builder.clearAccessChain(); - builder.setAccessChainRValue(result); - return false; - } -} - -// This path handles both if-then-else and ?: -// The if-then-else has a node type of void, while -// ?: has either a void or a non-void node type -// -// Leaving the result, when not void: -// GLSL only has r-values as the result of a :?, but -// if we have an l-value, that can be more efficient if it will -// become the base of a complex r-value expression, because the -// next layer copies r-values into memory to use the access-chain mechanism -bool TGlslangToSpvTraverser::visitSelection(glslang::TVisit /* visit */, glslang::TIntermSelection* node) -{ - // See if it simple and safe to generate OpSelect instead of using control flow. - // Crucially, side effects must be avoided, and there are performance trade-offs. - // Return true if good idea (and safe) for OpSelect, false otherwise. - const auto selectPolicy = [&]() -> bool { - if ((!node->getType().isScalar() && !node->getType().isVector()) || - node->getBasicType() == glslang::EbtVoid) - return false; - - if (node->getTrueBlock() == nullptr || - node->getFalseBlock() == nullptr) - return false; - - assert(node->getType() == node->getTrueBlock() ->getAsTyped()->getType() && - node->getType() == node->getFalseBlock()->getAsTyped()->getType()); - - // return true if a single operand to ? : is okay for OpSelect - const auto operandOkay = [](glslang::TIntermTyped* node) { - return node->getAsSymbolNode() || node->getType().getQualifier().isConstant(); - }; - - return operandOkay(node->getTrueBlock() ->getAsTyped()) && - operandOkay(node->getFalseBlock()->getAsTyped()); - }; - - // Emit OpSelect for this selection. - const auto handleAsOpSelect = [&]() { - node->getCondition()->traverse(this); - spv::Id condition = accessChainLoad(node->getCondition()->getType()); - node->getTrueBlock()->traverse(this); - spv::Id trueValue = accessChainLoad(node->getTrueBlock()->getAsTyped()->getType()); - node->getFalseBlock()->traverse(this); - spv::Id falseValue = accessChainLoad(node->getTrueBlock()->getAsTyped()->getType()); - - builder.setLine(node->getLoc().line); - - // smear condition to vector, if necessary (AST is always scalar) - if (builder.isVector(trueValue)) - condition = builder.smearScalar(spv::NoPrecision, condition, - builder.makeVectorType(builder.makeBoolType(), - builder.getNumComponents(trueValue))); - - spv::Id select = builder.createTriOp(spv::OpSelect, - convertGlslangToSpvType(node->getType()), condition, - trueValue, falseValue); - builder.clearAccessChain(); - builder.setAccessChainRValue(select); - }; - - // Try for OpSelect - - if (selectPolicy()) { - SpecConstantOpModeGuard spec_constant_op_mode_setter(&builder); - if (node->getType().getQualifier().isSpecConstant()) - spec_constant_op_mode_setter.turnOnSpecConstantOpMode(); - - handleAsOpSelect(); - return false; - } - - // Instead, emit control flow... - // Don't handle results as temporaries, because there will be two names - // and better to leave SSA to later passes. - spv::Id result = (node->getBasicType() == glslang::EbtVoid) - ? spv::NoResult - : builder.createVariable(spv::StorageClassFunction, convertGlslangToSpvType(node->getType())); - - // emit the condition before doing anything with selection - node->getCondition()->traverse(this); - - // Selection control: - const spv::SelectionControlMask control = TranslateSelectionControl(node->getSelectionControl()); - - // make an "if" based on the value created by the condition - spv::Builder::If ifBuilder(accessChainLoad(node->getCondition()->getType()), control, builder); - - // emit the "then" statement - if (node->getTrueBlock() != nullptr) { - node->getTrueBlock()->traverse(this); - if (result != spv::NoResult) - builder.createStore(accessChainLoad(node->getTrueBlock()->getAsTyped()->getType()), result); - } - - if (node->getFalseBlock() != nullptr) { - ifBuilder.makeBeginElse(); - // emit the "else" statement - node->getFalseBlock()->traverse(this); - if (result != spv::NoResult) - builder.createStore(accessChainLoad(node->getFalseBlock()->getAsTyped()->getType()), result); - } - - // finish off the control flow - ifBuilder.makeEndIf(); - - if (result != spv::NoResult) { - // GLSL only has r-values as the result of a :?, but - // if we have an l-value, that can be more efficient if it will - // become the base of a complex r-value expression, because the - // next layer copies r-values into memory to use the access-chain mechanism - builder.clearAccessChain(); - builder.setAccessChainLValue(result); - } - - return false; -} - -bool TGlslangToSpvTraverser::visitSwitch(glslang::TVisit /* visit */, glslang::TIntermSwitch* node) -{ - // emit and get the condition before doing anything with switch - node->getCondition()->traverse(this); - spv::Id selector = accessChainLoad(node->getCondition()->getAsTyped()->getType()); - - // Selection control: - const spv::SelectionControlMask control = TranslateSelectionControl(node->getSelectionControl()); - - // browse the children to sort out code segments - int defaultSegment = -1; - std::vector codeSegments; - glslang::TIntermSequence& sequence = node->getBody()->getSequence(); - std::vector caseValues; - std::vector valueIndexToSegment(sequence.size()); // note: probably not all are used, it is an overestimate - for (glslang::TIntermSequence::iterator c = sequence.begin(); c != sequence.end(); ++c) { - TIntermNode* child = *c; - if (child->getAsBranchNode() && child->getAsBranchNode()->getFlowOp() == glslang::EOpDefault) - defaultSegment = (int)codeSegments.size(); - else if (child->getAsBranchNode() && child->getAsBranchNode()->getFlowOp() == glslang::EOpCase) { - valueIndexToSegment[caseValues.size()] = (int)codeSegments.size(); - caseValues.push_back(child->getAsBranchNode()->getExpression()->getAsConstantUnion()->getConstArray()[0].getIConst()); - } else - codeSegments.push_back(child); - } - - // handle the case where the last code segment is missing, due to no code - // statements between the last case and the end of the switch statement - if ((caseValues.size() && (int)codeSegments.size() == valueIndexToSegment[caseValues.size() - 1]) || - (int)codeSegments.size() == defaultSegment) - codeSegments.push_back(nullptr); - - // make the switch statement - std::vector segmentBlocks; // returned, as the blocks allocated in the call - builder.makeSwitch(selector, control, (int)codeSegments.size(), caseValues, valueIndexToSegment, defaultSegment, segmentBlocks); - - // emit all the code in the segments - breakForLoop.push(false); - for (unsigned int s = 0; s < codeSegments.size(); ++s) { - builder.nextSwitchSegment(segmentBlocks, s); - if (codeSegments[s]) - codeSegments[s]->traverse(this); - else - builder.addSwitchBreak(); - } - breakForLoop.pop(); - - builder.endSwitch(segmentBlocks); - - return false; -} - -void TGlslangToSpvTraverser::visitConstantUnion(glslang::TIntermConstantUnion* node) -{ - int nextConst = 0; - spv::Id constant = createSpvConstantFromConstUnionArray(node->getType(), node->getConstArray(), nextConst, false); - - builder.clearAccessChain(); - builder.setAccessChainRValue(constant); -} - -bool TGlslangToSpvTraverser::visitLoop(glslang::TVisit /* visit */, glslang::TIntermLoop* node) -{ - auto blocks = builder.makeNewLoop(); - builder.createBranch(&blocks.head); - - // Loop control: - const spv::LoopControlMask control = TranslateLoopControl(node->getLoopControl()); - - // TODO: dependency length - - // Spec requires back edges to target header blocks, and every header block - // must dominate its merge block. Make a header block first to ensure these - // conditions are met. By definition, it will contain OpLoopMerge, followed - // by a block-ending branch. But we don't want to put any other body/test - // instructions in it, since the body/test may have arbitrary instructions, - // including merges of its own. - builder.setLine(node->getLoc().line); - builder.setBuildPoint(&blocks.head); - builder.createLoopMerge(&blocks.merge, &blocks.continue_target, control); - if (node->testFirst() && node->getTest()) { - spv::Block& test = builder.makeNewBlock(); - builder.createBranch(&test); - - builder.setBuildPoint(&test); - node->getTest()->traverse(this); - spv::Id condition = accessChainLoad(node->getTest()->getType()); - builder.createConditionalBranch(condition, &blocks.body, &blocks.merge); - - builder.setBuildPoint(&blocks.body); - breakForLoop.push(true); - if (node->getBody()) - node->getBody()->traverse(this); - builder.createBranch(&blocks.continue_target); - breakForLoop.pop(); - - builder.setBuildPoint(&blocks.continue_target); - if (node->getTerminal()) - node->getTerminal()->traverse(this); - builder.createBranch(&blocks.head); - } else { - builder.setLine(node->getLoc().line); - builder.createBranch(&blocks.body); - - breakForLoop.push(true); - builder.setBuildPoint(&blocks.body); - if (node->getBody()) - node->getBody()->traverse(this); - builder.createBranch(&blocks.continue_target); - breakForLoop.pop(); - - builder.setBuildPoint(&blocks.continue_target); - if (node->getTerminal()) - node->getTerminal()->traverse(this); - if (node->getTest()) { - node->getTest()->traverse(this); - spv::Id condition = - accessChainLoad(node->getTest()->getType()); - builder.createConditionalBranch(condition, &blocks.head, &blocks.merge); - } else { - // TODO: unless there was a break/return/discard instruction - // somewhere in the body, this is an infinite loop, so we should - // issue a warning. - builder.createBranch(&blocks.head); - } - } - builder.setBuildPoint(&blocks.merge); - builder.closeLoop(); - return false; -} - -bool TGlslangToSpvTraverser::visitBranch(glslang::TVisit /* visit */, glslang::TIntermBranch* node) -{ - if (node->getExpression()) - node->getExpression()->traverse(this); - - builder.setLine(node->getLoc().line); - - switch (node->getFlowOp()) { - case glslang::EOpKill: - builder.makeDiscard(); - break; - case glslang::EOpBreak: - if (breakForLoop.top()) - builder.createLoopExit(); - else - builder.addSwitchBreak(); - break; - case glslang::EOpContinue: - builder.createLoopContinue(); - break; - case glslang::EOpReturn: - if (node->getExpression()) { - const glslang::TType& glslangReturnType = node->getExpression()->getType(); - spv::Id returnId = accessChainLoad(glslangReturnType); - if (builder.getTypeId(returnId) != currentFunction->getReturnType()) { - builder.clearAccessChain(); - spv::Id copyId = builder.createVariable(spv::StorageClassFunction, currentFunction->getReturnType()); - builder.setAccessChainLValue(copyId); - multiTypeStore(glslangReturnType, returnId); - returnId = builder.createLoad(copyId); - } - builder.makeReturn(false, returnId); - } else - builder.makeReturn(false); - - builder.clearAccessChain(); - break; - - default: - assert(0); - break; - } - - return false; -} - -spv::Id TGlslangToSpvTraverser::createSpvVariable(const glslang::TIntermSymbol* node) -{ - // First, steer off constants, which are not SPIR-V variables, but - // can still have a mapping to a SPIR-V Id. - // This includes specialization constants. - if (node->getQualifier().isConstant()) { - return createSpvConstant(*node); - } - - // Now, handle actual variables - spv::StorageClass storageClass = TranslateStorageClass(node->getType()); - spv::Id spvType = convertGlslangToSpvType(node->getType()); - -#ifdef AMD_EXTENSIONS - const bool contains16BitType = node->getType().containsBasicType(glslang::EbtFloat16) || - node->getType().containsBasicType(glslang::EbtInt16) || - node->getType().containsBasicType(glslang::EbtUint16); - if (contains16BitType) { - if (storageClass == spv::StorageClassInput || storageClass == spv::StorageClassOutput) { - builder.addExtension(spv::E_SPV_KHR_16bit_storage); - builder.addCapability(spv::CapabilityStorageInputOutput16); - } else if (storageClass == spv::StorageClassPushConstant) { - builder.addExtension(spv::E_SPV_KHR_16bit_storage); - builder.addCapability(spv::CapabilityStoragePushConstant16); - } else if (storageClass == spv::StorageClassUniform) { - builder.addExtension(spv::E_SPV_KHR_16bit_storage); - builder.addCapability(spv::CapabilityStorageUniform16); - if (node->getType().getQualifier().storage == glslang::EvqBuffer) - builder.addCapability(spv::CapabilityStorageUniformBufferBlock16); - } - } -#endif - - const char* name = node->getName().c_str(); - if (glslang::IsAnonymous(name)) - name = ""; - - return builder.createVariable(storageClass, spvType, name); -} - -// Return type Id of the sampled type. -spv::Id TGlslangToSpvTraverser::getSampledType(const glslang::TSampler& sampler) -{ - switch (sampler.type) { - case glslang::EbtFloat: return builder.makeFloatType(32); - case glslang::EbtInt: return builder.makeIntType(32); - case glslang::EbtUint: return builder.makeUintType(32); - default: - assert(0); - return builder.makeFloatType(32); - } -} - -// If node is a swizzle operation, return the type that should be used if -// the swizzle base is first consumed by another operation, before the swizzle -// is applied. -spv::Id TGlslangToSpvTraverser::getInvertedSwizzleType(const glslang::TIntermTyped& node) -{ - if (node.getAsOperator() && - node.getAsOperator()->getOp() == glslang::EOpVectorSwizzle) - return convertGlslangToSpvType(node.getAsBinaryNode()->getLeft()->getType()); - else - return spv::NoType; -} - -// When inverting a swizzle with a parent op, this function -// will apply the swizzle operation to a completed parent operation. -spv::Id TGlslangToSpvTraverser::createInvertedSwizzle(spv::Decoration precision, const glslang::TIntermTyped& node, spv::Id parentResult) -{ - std::vector swizzle; - convertSwizzle(*node.getAsBinaryNode()->getRight()->getAsAggregate(), swizzle); - return builder.createRvalueSwizzle(precision, convertGlslangToSpvType(node.getType()), parentResult, swizzle); -} - -// Convert a glslang AST swizzle node to a swizzle vector for building SPIR-V. -void TGlslangToSpvTraverser::convertSwizzle(const glslang::TIntermAggregate& node, std::vector& swizzle) -{ - const glslang::TIntermSequence& swizzleSequence = node.getSequence(); - for (int i = 0; i < (int)swizzleSequence.size(); ++i) - swizzle.push_back(swizzleSequence[i]->getAsConstantUnion()->getConstArray()[0].getIConst()); -} - -// Convert from a glslang type to an SPV type, by calling into a -// recursive version of this function. This establishes the inherited -// layout state rooted from the top-level type. -spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& type) -{ - return convertGlslangToSpvType(type, getExplicitLayout(type), type.getQualifier()); -} - -// Do full recursive conversion of an arbitrary glslang type to a SPIR-V Id. -// explicitLayout can be kept the same throughout the hierarchical recursive walk. -// Mutually recursive with convertGlslangStructToSpvType(). -spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& type, glslang::TLayoutPacking explicitLayout, const glslang::TQualifier& qualifier) -{ - spv::Id spvType = spv::NoResult; - - switch (type.getBasicType()) { - case glslang::EbtVoid: - spvType = builder.makeVoidType(); - assert (! type.isArray()); - break; - case glslang::EbtFloat: - spvType = builder.makeFloatType(32); - break; - case glslang::EbtDouble: - spvType = builder.makeFloatType(64); - break; -#ifdef AMD_EXTENSIONS - case glslang::EbtFloat16: - builder.addExtension(spv::E_SPV_AMD_gpu_shader_half_float); - spvType = builder.makeFloatType(16); - break; -#endif - case glslang::EbtBool: - // "transparent" bool doesn't exist in SPIR-V. The GLSL convention is - // a 32-bit int where non-0 means true. - if (explicitLayout != glslang::ElpNone) - spvType = builder.makeUintType(32); - else - spvType = builder.makeBoolType(); - break; - case glslang::EbtInt: - spvType = builder.makeIntType(32); - break; - case glslang::EbtUint: - spvType = builder.makeUintType(32); - break; - case glslang::EbtInt64: - spvType = builder.makeIntType(64); - break; - case glslang::EbtUint64: - spvType = builder.makeUintType(64); - break; -#ifdef AMD_EXTENSIONS - case glslang::EbtInt16: - builder.addExtension(spv::E_SPV_AMD_gpu_shader_int16); - spvType = builder.makeIntType(16); - break; - case glslang::EbtUint16: - builder.addExtension(spv::E_SPV_AMD_gpu_shader_int16); - spvType = builder.makeUintType(16); - break; -#endif - case glslang::EbtAtomicUint: - builder.addCapability(spv::CapabilityAtomicStorage); - spvType = builder.makeUintType(32); - break; - case glslang::EbtSampler: - { - const glslang::TSampler& sampler = type.getSampler(); - if (sampler.sampler) { - // pure sampler - spvType = builder.makeSamplerType(); - } else { - // an image is present, make its type - spvType = builder.makeImageType(getSampledType(sampler), TranslateDimensionality(sampler), sampler.shadow, sampler.arrayed, sampler.ms, - sampler.image ? 2 : 1, TranslateImageFormat(type)); - if (sampler.combined) { - // already has both image and sampler, make the combined type - spvType = builder.makeSampledImageType(spvType); - } - } - } - break; - case glslang::EbtStruct: - case glslang::EbtBlock: - { - // If we've seen this struct type, return it - const glslang::TTypeList* glslangMembers = type.getStruct(); - - // Try to share structs for different layouts, but not yet for other - // kinds of qualification (primarily not yet including interpolant qualification). - if (! HasNonLayoutQualifiers(type, qualifier)) - spvType = structMap[explicitLayout][qualifier.layoutMatrix][glslangMembers]; - if (spvType != spv::NoResult) - break; - - // else, we haven't seen it... - if (type.getBasicType() == glslang::EbtBlock) - memberRemapper[glslangMembers].resize(glslangMembers->size()); - spvType = convertGlslangStructToSpvType(type, glslangMembers, explicitLayout, qualifier); - } - break; - default: - assert(0); - break; - } - - if (type.isMatrix()) - spvType = builder.makeMatrixType(spvType, type.getMatrixCols(), type.getMatrixRows()); - else { - // If this variable has a vector element count greater than 1, create a SPIR-V vector - if (type.getVectorSize() > 1) - spvType = builder.makeVectorType(spvType, type.getVectorSize()); - } - - if (type.isArray()) { - int stride = 0; // keep this 0 unless doing an explicit layout; 0 will mean no decoration, no stride - - // Do all but the outer dimension - if (type.getArraySizes()->getNumDims() > 1) { - // We need to decorate array strides for types needing explicit layout, except blocks. - if (explicitLayout != glslang::ElpNone && type.getBasicType() != glslang::EbtBlock) { - // Use a dummy glslang type for querying internal strides of - // arrays of arrays, but using just a one-dimensional array. - glslang::TType simpleArrayType(type, 0); // deference type of the array - while (simpleArrayType.getArraySizes().getNumDims() > 1) - simpleArrayType.getArraySizes().dereference(); - - // Will compute the higher-order strides here, rather than making a whole - // pile of types and doing repetitive recursion on their contents. - stride = getArrayStride(simpleArrayType, explicitLayout, qualifier.layoutMatrix); - } - - // make the arrays - for (int dim = type.getArraySizes()->getNumDims() - 1; dim > 0; --dim) { - spvType = builder.makeArrayType(spvType, makeArraySizeId(*type.getArraySizes(), dim), stride); - if (stride > 0) - builder.addDecoration(spvType, spv::DecorationArrayStride, stride); - stride *= type.getArraySizes()->getDimSize(dim); - } - } else { - // single-dimensional array, and don't yet have stride - - // We need to decorate array strides for types needing explicit layout, except blocks. - if (explicitLayout != glslang::ElpNone && type.getBasicType() != glslang::EbtBlock) - stride = getArrayStride(type, explicitLayout, qualifier.layoutMatrix); - } - - // Do the outer dimension, which might not be known for a runtime-sized array - if (type.isRuntimeSizedArray()) { - spvType = builder.makeRuntimeArray(spvType); - } else { - assert(type.getOuterArraySize() > 0); - spvType = builder.makeArrayType(spvType, makeArraySizeId(*type.getArraySizes(), 0), stride); - } - if (stride > 0) - builder.addDecoration(spvType, spv::DecorationArrayStride, stride); - } - - return spvType; -} - -// TODO: this functionality should exist at a higher level, in creating the AST -// -// Identify interface members that don't have their required extension turned on. -// -bool TGlslangToSpvTraverser::filterMember(const glslang::TType& member) -{ - auto& extensions = glslangIntermediate->getRequestedExtensions(); - - if (member.getFieldName() == "gl_ViewportMask" && - extensions.find("GL_NV_viewport_array2") == extensions.end()) - return true; - if (member.getFieldName() == "gl_SecondaryViewportMaskNV" && - extensions.find("GL_NV_stereo_view_rendering") == extensions.end()) - return true; - if (member.getFieldName() == "gl_SecondaryPositionNV" && - extensions.find("GL_NV_stereo_view_rendering") == extensions.end()) - return true; - if (member.getFieldName() == "gl_PositionPerViewNV" && - extensions.find("GL_NVX_multiview_per_view_attributes") == extensions.end()) - return true; - if (member.getFieldName() == "gl_ViewportMaskPerViewNV" && - extensions.find("GL_NVX_multiview_per_view_attributes") == extensions.end()) - return true; - if ((member.getFieldName() == "gl_ViewportIndex" || member.getFieldName() == "gl_Layer") && - extensions.find(glslang::E_GL_ARB_shader_viewport_layer_array) == extensions.end() && - extensions.find("GL_NV_viewport_array2") == extensions.end()) - return true; - - return false; -}; - -// Do full recursive conversion of a glslang structure (or block) type to a SPIR-V Id. -// explicitLayout can be kept the same throughout the hierarchical recursive walk. -// Mutually recursive with convertGlslangToSpvType(). -spv::Id TGlslangToSpvTraverser::convertGlslangStructToSpvType(const glslang::TType& type, - const glslang::TTypeList* glslangMembers, - glslang::TLayoutPacking explicitLayout, - const glslang::TQualifier& qualifier) -{ - // Create a vector of struct types for SPIR-V to consume - std::vector spvMembers; - int memberDelta = 0; // how much the member's index changes from glslang to SPIR-V, normally 0, except sometimes for blocks - for (int i = 0; i < (int)glslangMembers->size(); i++) { - glslang::TType& glslangMember = *(*glslangMembers)[i].type; - if (glslangMember.hiddenMember()) { - ++memberDelta; - if (type.getBasicType() == glslang::EbtBlock) - memberRemapper[glslangMembers][i] = -1; - } else { - if (type.getBasicType() == glslang::EbtBlock) { - memberRemapper[glslangMembers][i] = i - memberDelta; - if (filterMember(glslangMember)) - continue; - } - // modify just this child's view of the qualifier - glslang::TQualifier memberQualifier = glslangMember.getQualifier(); - InheritQualifiers(memberQualifier, qualifier); - - // manually inherit location - if (! memberQualifier.hasLocation() && qualifier.hasLocation()) - memberQualifier.layoutLocation = qualifier.layoutLocation; - - // recurse - spvMembers.push_back(convertGlslangToSpvType(glslangMember, explicitLayout, memberQualifier)); - } - } - - // Make the SPIR-V type - spv::Id spvType = builder.makeStructType(spvMembers, type.getTypeName().c_str()); - if (! HasNonLayoutQualifiers(type, qualifier)) - structMap[explicitLayout][qualifier.layoutMatrix][glslangMembers] = spvType; - - // Decorate it - decorateStructType(type, glslangMembers, explicitLayout, qualifier, spvType); - - return spvType; -} - -void TGlslangToSpvTraverser::decorateStructType(const glslang::TType& type, - const glslang::TTypeList* glslangMembers, - glslang::TLayoutPacking explicitLayout, - const glslang::TQualifier& qualifier, - spv::Id spvType) -{ - // Name and decorate the non-hidden members - int offset = -1; - int locationOffset = 0; // for use within the members of this struct - for (int i = 0; i < (int)glslangMembers->size(); i++) { - glslang::TType& glslangMember = *(*glslangMembers)[i].type; - int member = i; - if (type.getBasicType() == glslang::EbtBlock) { - member = memberRemapper[glslangMembers][i]; - if (filterMember(glslangMember)) - continue; - } - - // modify just this child's view of the qualifier - glslang::TQualifier memberQualifier = glslangMember.getQualifier(); - InheritQualifiers(memberQualifier, qualifier); - - // using -1 above to indicate a hidden member - if (member >= 0) { - builder.addMemberName(spvType, member, glslangMember.getFieldName().c_str()); - addMemberDecoration(spvType, member, TranslateLayoutDecoration(glslangMember, memberQualifier.layoutMatrix)); - addMemberDecoration(spvType, member, TranslatePrecisionDecoration(glslangMember)); - // Add interpolation and auxiliary storage decorations only to top-level members of Input and Output storage classes - if (type.getQualifier().storage == glslang::EvqVaryingIn || - type.getQualifier().storage == glslang::EvqVaryingOut) { - if (type.getBasicType() == glslang::EbtBlock || - glslangIntermediate->getSource() == glslang::EShSourceHlsl) { - addMemberDecoration(spvType, member, TranslateInterpolationDecoration(memberQualifier)); - addMemberDecoration(spvType, member, TranslateAuxiliaryStorageDecoration(memberQualifier)); - } - } - addMemberDecoration(spvType, member, TranslateInvariantDecoration(memberQualifier)); - - if (type.getBasicType() == glslang::EbtBlock && - qualifier.storage == glslang::EvqBuffer) { - // Add memory decorations only to top-level members of shader storage block - std::vector memory; - TranslateMemoryDecoration(memberQualifier, memory); - for (unsigned int i = 0; i < memory.size(); ++i) - addMemberDecoration(spvType, member, memory[i]); - } - - // Location assignment was already completed correctly by the front end, - // just track whether a member needs to be decorated. - // Ignore member locations if the container is an array, as that's - // ill-specified and decisions have been made to not allow this. - if (! type.isArray() && memberQualifier.hasLocation()) - builder.addMemberDecoration(spvType, member, spv::DecorationLocation, memberQualifier.layoutLocation); - - if (qualifier.hasLocation()) // track for upcoming inheritance - locationOffset += glslangIntermediate->computeTypeLocationSize(glslangMember); - - // component, XFB, others - if (glslangMember.getQualifier().hasComponent()) - builder.addMemberDecoration(spvType, member, spv::DecorationComponent, glslangMember.getQualifier().layoutComponent); - if (glslangMember.getQualifier().hasXfbOffset()) - builder.addMemberDecoration(spvType, member, spv::DecorationOffset, glslangMember.getQualifier().layoutXfbOffset); - else if (explicitLayout != glslang::ElpNone) { - // figure out what to do with offset, which is accumulating - int nextOffset; - updateMemberOffset(type, glslangMember, offset, nextOffset, explicitLayout, memberQualifier.layoutMatrix); - if (offset >= 0) - builder.addMemberDecoration(spvType, member, spv::DecorationOffset, offset); - offset = nextOffset; - } - - if (glslangMember.isMatrix() && explicitLayout != glslang::ElpNone) - builder.addMemberDecoration(spvType, member, spv::DecorationMatrixStride, getMatrixStride(glslangMember, explicitLayout, memberQualifier.layoutMatrix)); - - // built-in variable decorations - spv::BuiltIn builtIn = TranslateBuiltInDecoration(glslangMember.getQualifier().builtIn, true); - if (builtIn != spv::BuiltInMax) - addMemberDecoration(spvType, member, spv::DecorationBuiltIn, (int)builtIn); - -#ifdef NV_EXTENSIONS - if (builtIn == spv::BuiltInLayer) { - // SPV_NV_viewport_array2 extension - if (glslangMember.getQualifier().layoutViewportRelative){ - addMemberDecoration(spvType, member, (spv::Decoration)spv::DecorationViewportRelativeNV); - builder.addCapability(spv::CapabilityShaderViewportMaskNV); - builder.addExtension(spv::E_SPV_NV_viewport_array2); - } - if (glslangMember.getQualifier().layoutSecondaryViewportRelativeOffset != -2048){ - addMemberDecoration(spvType, member, (spv::Decoration)spv::DecorationSecondaryViewportRelativeNV, glslangMember.getQualifier().layoutSecondaryViewportRelativeOffset); - builder.addCapability(spv::CapabilityShaderStereoViewNV); - builder.addExtension(spv::E_SPV_NV_stereo_view_rendering); - } - } - if (glslangMember.getQualifier().layoutPassthrough) { - addMemberDecoration(spvType, member, (spv::Decoration)spv::DecorationPassthroughNV); - builder.addCapability(spv::CapabilityGeometryShaderPassthroughNV); - builder.addExtension(spv::E_SPV_NV_geometry_shader_passthrough); - } -#endif - } - } - - // Decorate the structure - addDecoration(spvType, TranslateLayoutDecoration(type, qualifier.layoutMatrix)); - addDecoration(spvType, TranslateBlockDecoration(type, glslangIntermediate->usingStorageBuffer())); - if (type.getQualifier().hasStream() && glslangIntermediate->isMultiStream()) { - builder.addCapability(spv::CapabilityGeometryStreams); - builder.addDecoration(spvType, spv::DecorationStream, type.getQualifier().layoutStream); - } - if (glslangIntermediate->getXfbMode()) { - builder.addCapability(spv::CapabilityTransformFeedback); - if (type.getQualifier().hasXfbStride()) - builder.addDecoration(spvType, spv::DecorationXfbStride, type.getQualifier().layoutXfbStride); - if (type.getQualifier().hasXfbBuffer()) - builder.addDecoration(spvType, spv::DecorationXfbBuffer, type.getQualifier().layoutXfbBuffer); - } -} - -// Turn the expression forming the array size into an id. -// This is not quite trivial, because of specialization constants. -// Sometimes, a raw constant is turned into an Id, and sometimes -// a specialization constant expression is. -spv::Id TGlslangToSpvTraverser::makeArraySizeId(const glslang::TArraySizes& arraySizes, int dim) -{ - // First, see if this is sized with a node, meaning a specialization constant: - glslang::TIntermTyped* specNode = arraySizes.getDimNode(dim); - if (specNode != nullptr) { - builder.clearAccessChain(); - specNode->traverse(this); - return accessChainLoad(specNode->getAsTyped()->getType()); - } - - // Otherwise, need a compile-time (front end) size, get it: - int size = arraySizes.getDimSize(dim); - assert(size > 0); - return builder.makeUintConstant(size); -} - -// Wrap the builder's accessChainLoad to: -// - localize handling of RelaxedPrecision -// - use the SPIR-V inferred type instead of another conversion of the glslang type -// (avoids unnecessary work and possible type punning for structures) -// - do conversion of concrete to abstract type -spv::Id TGlslangToSpvTraverser::accessChainLoad(const glslang::TType& type) -{ - spv::Id nominalTypeId = builder.accessChainGetInferredType(); - spv::Id loadedId = builder.accessChainLoad(TranslatePrecisionDecoration(type), nominalTypeId); - - // Need to convert to abstract types when necessary - if (type.getBasicType() == glslang::EbtBool) { - if (builder.isScalarType(nominalTypeId)) { - // Conversion for bool - spv::Id boolType = builder.makeBoolType(); - if (nominalTypeId != boolType) - loadedId = builder.createBinOp(spv::OpINotEqual, boolType, loadedId, builder.makeUintConstant(0)); - } else if (builder.isVectorType(nominalTypeId)) { - // Conversion for bvec - int vecSize = builder.getNumTypeComponents(nominalTypeId); - spv::Id bvecType = builder.makeVectorType(builder.makeBoolType(), vecSize); - if (nominalTypeId != bvecType) - loadedId = builder.createBinOp(spv::OpINotEqual, bvecType, loadedId, makeSmearedConstant(builder.makeUintConstant(0), vecSize)); - } - } - - return loadedId; -} - -// Wrap the builder's accessChainStore to: -// - do conversion of concrete to abstract type -// -// Implicitly uses the existing builder.accessChain as the storage target. -void TGlslangToSpvTraverser::accessChainStore(const glslang::TType& type, spv::Id rvalue) -{ - // Need to convert to abstract types when necessary - if (type.getBasicType() == glslang::EbtBool) { - spv::Id nominalTypeId = builder.accessChainGetInferredType(); - - if (builder.isScalarType(nominalTypeId)) { - // Conversion for bool - spv::Id boolType = builder.makeBoolType(); - if (nominalTypeId != boolType) { - // keep these outside arguments, for determinant order-of-evaluation - spv::Id one = builder.makeUintConstant(1); - spv::Id zero = builder.makeUintConstant(0); - rvalue = builder.createTriOp(spv::OpSelect, nominalTypeId, rvalue, one, zero); - } else if (builder.getTypeId(rvalue) != boolType) - rvalue = builder.createBinOp(spv::OpINotEqual, boolType, rvalue, builder.makeUintConstant(0)); - } else if (builder.isVectorType(nominalTypeId)) { - // Conversion for bvec - int vecSize = builder.getNumTypeComponents(nominalTypeId); - spv::Id bvecType = builder.makeVectorType(builder.makeBoolType(), vecSize); - if (nominalTypeId != bvecType) { - // keep these outside arguments, for determinant order-of-evaluation - spv::Id one = makeSmearedConstant(builder.makeUintConstant(1), vecSize); - spv::Id zero = makeSmearedConstant(builder.makeUintConstant(0), vecSize); - rvalue = builder.createTriOp(spv::OpSelect, nominalTypeId, rvalue, one, zero); - } else if (builder.getTypeId(rvalue) != bvecType) - rvalue = builder.createBinOp(spv::OpINotEqual, bvecType, rvalue, - makeSmearedConstant(builder.makeUintConstant(0), vecSize)); - } - } - - builder.accessChainStore(rvalue); -} - -// For storing when types match at the glslang level, but not might match at the -// SPIR-V level. -// -// This especially happens when a single glslang type expands to multiple -// SPIR-V types, like a struct that is used in a member-undecorated way as well -// as in a member-decorated way. -// -// NOTE: This function can handle any store request; if it's not special it -// simplifies to a simple OpStore. -// -// Implicitly uses the existing builder.accessChain as the storage target. -void TGlslangToSpvTraverser::multiTypeStore(const glslang::TType& type, spv::Id rValue) -{ - // we only do the complex path here if it's an aggregate - if (! type.isStruct() && ! type.isArray()) { - accessChainStore(type, rValue); - return; - } - - // and, it has to be a case of type aliasing - spv::Id rType = builder.getTypeId(rValue); - spv::Id lValue = builder.accessChainGetLValue(); - spv::Id lType = builder.getContainedTypeId(builder.getTypeId(lValue)); - if (lType == rType) { - accessChainStore(type, rValue); - return; - } - - // Recursively (as needed) copy an aggregate type to a different aggregate type, - // where the two types were the same type in GLSL. This requires member - // by member copy, recursively. - - // If an array, copy element by element. - if (type.isArray()) { - glslang::TType glslangElementType(type, 0); - spv::Id elementRType = builder.getContainedTypeId(rType); - for (int index = 0; index < type.getOuterArraySize(); ++index) { - // get the source member - spv::Id elementRValue = builder.createCompositeExtract(rValue, elementRType, index); - - // set up the target storage - builder.clearAccessChain(); - builder.setAccessChainLValue(lValue); - builder.accessChainPush(builder.makeIntConstant(index)); - - // store the member - multiTypeStore(glslangElementType, elementRValue); - } - } else { - assert(type.isStruct()); - - // loop over structure members - const glslang::TTypeList& members = *type.getStruct(); - for (int m = 0; m < (int)members.size(); ++m) { - const glslang::TType& glslangMemberType = *members[m].type; - - // get the source member - spv::Id memberRType = builder.getContainedTypeId(rType, m); - spv::Id memberRValue = builder.createCompositeExtract(rValue, memberRType, m); - - // set up the target storage - builder.clearAccessChain(); - builder.setAccessChainLValue(lValue); - builder.accessChainPush(builder.makeIntConstant(m)); - - // store the member - multiTypeStore(glslangMemberType, memberRValue); - } - } -} - -// Decide whether or not this type should be -// decorated with offsets and strides, and if so -// whether std140 or std430 rules should be applied. -glslang::TLayoutPacking TGlslangToSpvTraverser::getExplicitLayout(const glslang::TType& type) const -{ - // has to be a block - if (type.getBasicType() != glslang::EbtBlock) - return glslang::ElpNone; - - // has to be a uniform or buffer block - if (type.getQualifier().storage != glslang::EvqUniform && - type.getQualifier().storage != glslang::EvqBuffer) - return glslang::ElpNone; - - // return the layout to use - switch (type.getQualifier().layoutPacking) { - case glslang::ElpStd140: - case glslang::ElpStd430: - return type.getQualifier().layoutPacking; - default: - return glslang::ElpNone; - } -} - -// Given an array type, returns the integer stride required for that array -int TGlslangToSpvTraverser::getArrayStride(const glslang::TType& arrayType, glslang::TLayoutPacking explicitLayout, glslang::TLayoutMatrix matrixLayout) -{ - int size; - int stride; - glslangIntermediate->getBaseAlignment(arrayType, size, stride, explicitLayout == glslang::ElpStd140, matrixLayout == glslang::ElmRowMajor); - - return stride; -} - -// Given a matrix type, or array (of array) of matrixes type, returns the integer stride required for that matrix -// when used as a member of an interface block -int TGlslangToSpvTraverser::getMatrixStride(const glslang::TType& matrixType, glslang::TLayoutPacking explicitLayout, glslang::TLayoutMatrix matrixLayout) -{ - glslang::TType elementType; - elementType.shallowCopy(matrixType); - elementType.clearArraySizes(); - - int size; - int stride; - glslangIntermediate->getBaseAlignment(elementType, size, stride, explicitLayout == glslang::ElpStd140, matrixLayout == glslang::ElmRowMajor); - - return stride; -} - -// Given a member type of a struct, realign the current offset for it, and compute -// the next (not yet aligned) offset for the next member, which will get aligned -// on the next call. -// 'currentOffset' should be passed in already initialized, ready to modify, and reflecting -// the migration of data from nextOffset -> currentOffset. It should be -1 on the first call. -// -1 means a non-forced member offset (no decoration needed). -void TGlslangToSpvTraverser::updateMemberOffset(const glslang::TType& structType, const glslang::TType& memberType, int& currentOffset, int& nextOffset, - glslang::TLayoutPacking explicitLayout, glslang::TLayoutMatrix matrixLayout) -{ - // this will get a positive value when deemed necessary - nextOffset = -1; - - // override anything in currentOffset with user-set offset - if (memberType.getQualifier().hasOffset()) - currentOffset = memberType.getQualifier().layoutOffset; - - // It could be that current linker usage in glslang updated all the layoutOffset, - // in which case the following code does not matter. But, that's not quite right - // once cross-compilation unit GLSL validation is done, as the original user - // settings are needed in layoutOffset, and then the following will come into play. - - if (explicitLayout == glslang::ElpNone) { - if (! memberType.getQualifier().hasOffset()) - currentOffset = -1; - - return; - } - - // Getting this far means we need explicit offsets - if (currentOffset < 0) - currentOffset = 0; - - // Now, currentOffset is valid (either 0, or from a previous nextOffset), - // but possibly not yet correctly aligned. - - int memberSize; - int dummyStride; - int memberAlignment = glslangIntermediate->getBaseAlignment(memberType, memberSize, dummyStride, explicitLayout == glslang::ElpStd140, matrixLayout == glslang::ElmRowMajor); - - // Adjust alignment for HLSL rules - // TODO: make this consistent in early phases of code: - // adjusting this late means inconsistencies with earlier code, which for reflection is an issue - // Until reflection is brought in sync with these adjustments, don't apply to $Global, - // which is the most likely to rely on reflection, and least likely to rely implicit layouts - if (glslangIntermediate->usingHlslOFfsets() && - ! memberType.isArray() && memberType.isVector() && structType.getTypeName().compare("$Global") != 0) { - int dummySize; - int componentAlignment = glslangIntermediate->getBaseAlignmentScalar(memberType, dummySize); - if (componentAlignment <= 4) - memberAlignment = componentAlignment; - } - - // Bump up to member alignment - glslang::RoundToPow2(currentOffset, memberAlignment); - - // Bump up to vec4 if there is a bad straddle - if (glslangIntermediate->improperStraddle(memberType, memberSize, currentOffset)) - glslang::RoundToPow2(currentOffset, 16); - - nextOffset = currentOffset + memberSize; -} - -void TGlslangToSpvTraverser::declareUseOfStructMember(const glslang::TTypeList& members, int glslangMember) -{ - const glslang::TBuiltInVariable glslangBuiltIn = members[glslangMember].type->getQualifier().builtIn; - switch (glslangBuiltIn) - { - case glslang::EbvClipDistance: - case glslang::EbvCullDistance: - case glslang::EbvPointSize: -#ifdef NV_EXTENSIONS - case glslang::EbvLayer: - case glslang::EbvViewportIndex: - case glslang::EbvViewportMaskNV: - case glslang::EbvSecondaryPositionNV: - case glslang::EbvSecondaryViewportMaskNV: - case glslang::EbvPositionPerViewNV: - case glslang::EbvViewportMaskPerViewNV: -#endif - // Generate the associated capability. Delegate to TranslateBuiltInDecoration. - // Alternately, we could just call this for any glslang built-in, since the - // capability already guards against duplicates. - TranslateBuiltInDecoration(glslangBuiltIn, false); - break; - default: - // Capabilities were already generated when the struct was declared. - break; - } -} - -bool TGlslangToSpvTraverser::isShaderEntryPoint(const glslang::TIntermAggregate* node) -{ - return node->getName().compare(glslangIntermediate->getEntryPointMangledName().c_str()) == 0; -} - -// Make all the functions, skeletally, without actually visiting their bodies. -void TGlslangToSpvTraverser::makeFunctions(const glslang::TIntermSequence& glslFunctions) -{ - const auto getParamDecorations = [](std::vector& decorations, const glslang::TType& type) { - spv::Decoration paramPrecision = TranslatePrecisionDecoration(type); - if (paramPrecision != spv::NoPrecision) - decorations.push_back(paramPrecision); - TranslateMemoryDecoration(type.getQualifier(), decorations); - }; - - for (int f = 0; f < (int)glslFunctions.size(); ++f) { - glslang::TIntermAggregate* glslFunction = glslFunctions[f]->getAsAggregate(); - if (! glslFunction || glslFunction->getOp() != glslang::EOpFunction || isShaderEntryPoint(glslFunction)) - continue; - - // We're on a user function. Set up the basic interface for the function now, - // so that it's available to call. Translating the body will happen later. - // - // Typically (except for a "const in" parameter), an address will be passed to the - // function. What it is an address of varies: - // - // - "in" parameters not marked as "const" can be written to without modifying the calling - // argument so that write needs to be to a copy, hence the address of a copy works. - // - // - "const in" parameters can just be the r-value, as no writes need occur. - // - // - "out" and "inout" arguments can't be done as pointers to the calling argument, because - // GLSL has copy-in/copy-out semantics. They can be handled though with a pointer to a copy. - - std::vector paramTypes; - std::vector> paramDecorations; // list of decorations per parameter - glslang::TIntermSequence& parameters = glslFunction->getSequence()[0]->getAsAggregate()->getSequence(); - - bool implicitThis = (int)parameters.size() > 0 && parameters[0]->getAsSymbolNode()->getName() == - glslangIntermediate->implicitThisName; - - paramDecorations.resize(parameters.size()); - for (int p = 0; p < (int)parameters.size(); ++p) { - const glslang::TType& paramType = parameters[p]->getAsTyped()->getType(); - spv::Id typeId = convertGlslangToSpvType(paramType); - // can we pass by reference? - if (paramType.containsOpaque() || // sampler, etc. - (paramType.getBasicType() == glslang::EbtBlock && - paramType.getQualifier().storage == glslang::EvqBuffer) || // SSBO - (p == 0 && implicitThis)) // implicit 'this' - typeId = builder.makePointer(TranslateStorageClass(paramType), typeId); - else if (paramType.getQualifier().storage != glslang::EvqConstReadOnly) - typeId = builder.makePointer(spv::StorageClassFunction, typeId); - else - rValueParameters.insert(parameters[p]->getAsSymbolNode()->getId()); - getParamDecorations(paramDecorations[p], paramType); - paramTypes.push_back(typeId); - } - - spv::Block* functionBlock; - spv::Function *function = builder.makeFunctionEntry(TranslatePrecisionDecoration(glslFunction->getType()), - convertGlslangToSpvType(glslFunction->getType()), - glslFunction->getName().c_str(), paramTypes, - paramDecorations, &functionBlock); - if (implicitThis) - function->setImplicitThis(); - - // Track function to emit/call later - functionMap[glslFunction->getName().c_str()] = function; - - // Set the parameter id's - for (int p = 0; p < (int)parameters.size(); ++p) { - symbolValues[parameters[p]->getAsSymbolNode()->getId()] = function->getParamId(p); - // give a name too - builder.addName(function->getParamId(p), parameters[p]->getAsSymbolNode()->getName().c_str()); - } - } -} - -// Process all the initializers, while skipping the functions and link objects -void TGlslangToSpvTraverser::makeGlobalInitializers(const glslang::TIntermSequence& initializers) -{ - builder.setBuildPoint(shaderEntry->getLastBlock()); - for (int i = 0; i < (int)initializers.size(); ++i) { - glslang::TIntermAggregate* initializer = initializers[i]->getAsAggregate(); - if (initializer && initializer->getOp() != glslang::EOpFunction && initializer->getOp() != glslang::EOpLinkerObjects) { - - // We're on a top-level node that's not a function. Treat as an initializer, whose - // code goes into the beginning of the entry point. - initializer->traverse(this); - } - } -} - -// Process all the functions, while skipping initializers. -void TGlslangToSpvTraverser::visitFunctions(const glslang::TIntermSequence& glslFunctions) -{ - for (int f = 0; f < (int)glslFunctions.size(); ++f) { - glslang::TIntermAggregate* node = glslFunctions[f]->getAsAggregate(); - if (node && (node->getOp() == glslang::EOpFunction || node->getOp() == glslang::EOpLinkerObjects)) - node->traverse(this); - } -} - -void TGlslangToSpvTraverser::handleFunctionEntry(const glslang::TIntermAggregate* node) -{ - // SPIR-V functions should already be in the functionMap from the prepass - // that called makeFunctions(). - currentFunction = functionMap[node->getName().c_str()]; - spv::Block* functionBlock = currentFunction->getEntryBlock(); - builder.setBuildPoint(functionBlock); -} - -void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate& node, std::vector& arguments) -{ - const glslang::TIntermSequence& glslangArguments = node.getSequence(); - - glslang::TSampler sampler = {}; - bool cubeCompare = false; - if (node.isTexture() || node.isImage()) { - sampler = glslangArguments[0]->getAsTyped()->getType().getSampler(); - cubeCompare = sampler.dim == glslang::EsdCube && sampler.arrayed && sampler.shadow; - } - - for (int i = 0; i < (int)glslangArguments.size(); ++i) { - builder.clearAccessChain(); - glslangArguments[i]->traverse(this); - - // Special case l-value operands - bool lvalue = false; - switch (node.getOp()) { - case glslang::EOpImageAtomicAdd: - case glslang::EOpImageAtomicMin: - case glslang::EOpImageAtomicMax: - case glslang::EOpImageAtomicAnd: - case glslang::EOpImageAtomicOr: - case glslang::EOpImageAtomicXor: - case glslang::EOpImageAtomicExchange: - case glslang::EOpImageAtomicCompSwap: - if (i == 0) - lvalue = true; - break; - case glslang::EOpSparseImageLoad: - if ((sampler.ms && i == 3) || (! sampler.ms && i == 2)) - lvalue = true; - break; - case glslang::EOpSparseTexture: - if ((cubeCompare && i == 3) || (! cubeCompare && i == 2)) - lvalue = true; - break; - case glslang::EOpSparseTextureClamp: - if ((cubeCompare && i == 4) || (! cubeCompare && i == 3)) - lvalue = true; - break; - case glslang::EOpSparseTextureLod: - case glslang::EOpSparseTextureOffset: - if (i == 3) - lvalue = true; - break; - case glslang::EOpSparseTextureFetch: - if ((sampler.dim != glslang::EsdRect && i == 3) || (sampler.dim == glslang::EsdRect && i == 2)) - lvalue = true; - break; - case glslang::EOpSparseTextureFetchOffset: - if ((sampler.dim != glslang::EsdRect && i == 4) || (sampler.dim == glslang::EsdRect && i == 3)) - lvalue = true; - break; - case glslang::EOpSparseTextureLodOffset: - case glslang::EOpSparseTextureGrad: - case glslang::EOpSparseTextureOffsetClamp: - if (i == 4) - lvalue = true; - break; - case glslang::EOpSparseTextureGradOffset: - case glslang::EOpSparseTextureGradClamp: - if (i == 5) - lvalue = true; - break; - case glslang::EOpSparseTextureGradOffsetClamp: - if (i == 6) - lvalue = true; - break; - case glslang::EOpSparseTextureGather: - if ((sampler.shadow && i == 3) || (! sampler.shadow && i == 2)) - lvalue = true; - break; - case glslang::EOpSparseTextureGatherOffset: - case glslang::EOpSparseTextureGatherOffsets: - if ((sampler.shadow && i == 4) || (! sampler.shadow && i == 3)) - lvalue = true; - break; -#ifdef AMD_EXTENSIONS - case glslang::EOpSparseTextureGatherLod: - if (i == 3) - lvalue = true; - break; - case glslang::EOpSparseTextureGatherLodOffset: - case glslang::EOpSparseTextureGatherLodOffsets: - if (i == 4) - lvalue = true; - break; - case glslang::EOpSparseImageLoadLod: - if (i == 3) - lvalue = true; - break; -#endif - default: - break; - } - - if (lvalue) - arguments.push_back(builder.accessChainGetLValue()); - else - arguments.push_back(accessChainLoad(glslangArguments[i]->getAsTyped()->getType())); - } -} - -void TGlslangToSpvTraverser::translateArguments(glslang::TIntermUnary& node, std::vector& arguments) -{ - builder.clearAccessChain(); - node.getOperand()->traverse(this); - arguments.push_back(accessChainLoad(node.getOperand()->getType())); -} - -spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermOperator* node) -{ - if (! node->isImage() && ! node->isTexture()) - return spv::NoResult; - - builder.setLine(node->getLoc().line); - - auto resultType = [&node,this]{ return convertGlslangToSpvType(node->getType()); }; - - // Process a GLSL texturing op (will be SPV image) - const glslang::TSampler sampler = node->getAsAggregate() ? node->getAsAggregate()->getSequence()[0]->getAsTyped()->getType().getSampler() - : node->getAsUnaryNode()->getOperand()->getAsTyped()->getType().getSampler(); - std::vector arguments; - if (node->getAsAggregate()) - translateArguments(*node->getAsAggregate(), arguments); - else - translateArguments(*node->getAsUnaryNode(), arguments); - spv::Decoration precision = TranslatePrecisionDecoration(node->getOperationPrecision()); - - spv::Builder::TextureParameters params = { }; - params.sampler = arguments[0]; - - glslang::TCrackedTextureOp cracked; - node->crackTexture(sampler, cracked); - - const bool isUnsignedResult = - node->getType().getBasicType() == glslang::EbtUint64 || - node->getType().getBasicType() == glslang::EbtUint; - - // Check for queries - if (cracked.query) { - // OpImageQueryLod works on a sampled image, for other queries the image has to be extracted first - if (node->getOp() != glslang::EOpTextureQueryLod && builder.isSampledImage(params.sampler)) - params.sampler = builder.createUnaryOp(spv::OpImage, builder.getImageType(params.sampler), params.sampler); - - switch (node->getOp()) { - case glslang::EOpImageQuerySize: - case glslang::EOpTextureQuerySize: - if (arguments.size() > 1) { - params.lod = arguments[1]; - return builder.createTextureQueryCall(spv::OpImageQuerySizeLod, params, isUnsignedResult); - } else - return builder.createTextureQueryCall(spv::OpImageQuerySize, params, isUnsignedResult); - case glslang::EOpImageQuerySamples: - case glslang::EOpTextureQuerySamples: - return builder.createTextureQueryCall(spv::OpImageQuerySamples, params, isUnsignedResult); - case glslang::EOpTextureQueryLod: - params.coords = arguments[1]; - return builder.createTextureQueryCall(spv::OpImageQueryLod, params, isUnsignedResult); - case glslang::EOpTextureQueryLevels: - return builder.createTextureQueryCall(spv::OpImageQueryLevels, params, isUnsignedResult); - case glslang::EOpSparseTexelsResident: - return builder.createUnaryOp(spv::OpImageSparseTexelsResident, builder.makeBoolType(), arguments[0]); - default: - assert(0); - break; - } - } - - // Check for image functions other than queries - if (node->isImage()) { - std::vector operands; - auto opIt = arguments.begin(); - operands.push_back(*(opIt++)); - - // Handle subpass operations - // TODO: GLSL should change to have the "MS" only on the type rather than the - // built-in function. - if (cracked.subpass) { - // add on the (0,0) coordinate - spv::Id zero = builder.makeIntConstant(0); - std::vector comps; - comps.push_back(zero); - comps.push_back(zero); - operands.push_back(builder.makeCompositeConstant(builder.makeVectorType(builder.makeIntType(32), 2), comps)); - if (sampler.ms) { - operands.push_back(spv::ImageOperandsSampleMask); - operands.push_back(*(opIt++)); - } - return builder.createOp(spv::OpImageRead, resultType(), operands); - } - - operands.push_back(*(opIt++)); -#ifdef AMD_EXTENSIONS - if (node->getOp() == glslang::EOpImageLoad || node->getOp() == glslang::EOpImageLoadLod) { -#else - if (node->getOp() == glslang::EOpImageLoad) { -#endif - if (sampler.ms) { - operands.push_back(spv::ImageOperandsSampleMask); - operands.push_back(*opIt); -#ifdef AMD_EXTENSIONS - } else if (cracked.lod) { - builder.addExtension(spv::E_SPV_AMD_shader_image_load_store_lod); - builder.addCapability(spv::CapabilityImageReadWriteLodAMD); - - operands.push_back(spv::ImageOperandsLodMask); - operands.push_back(*opIt); -#endif - } - if (builder.getImageTypeFormat(builder.getImageType(operands.front())) == spv::ImageFormatUnknown) - builder.addCapability(spv::CapabilityStorageImageReadWithoutFormat); - return builder.createOp(spv::OpImageRead, resultType(), operands); -#ifdef AMD_EXTENSIONS - } else if (node->getOp() == glslang::EOpImageStore || node->getOp() == glslang::EOpImageStoreLod) { -#else - } else if (node->getOp() == glslang::EOpImageStore) { -#endif - if (sampler.ms) { - operands.push_back(*(opIt + 1)); - operands.push_back(spv::ImageOperandsSampleMask); - operands.push_back(*opIt); -#ifdef AMD_EXTENSIONS - } else if (cracked.lod) { - builder.addExtension(spv::E_SPV_AMD_shader_image_load_store_lod); - builder.addCapability(spv::CapabilityImageReadWriteLodAMD); - - operands.push_back(*(opIt + 1)); - operands.push_back(spv::ImageOperandsLodMask); - operands.push_back(*opIt); -#endif - } else - operands.push_back(*opIt); - builder.createNoResultOp(spv::OpImageWrite, operands); - if (builder.getImageTypeFormat(builder.getImageType(operands.front())) == spv::ImageFormatUnknown) - builder.addCapability(spv::CapabilityStorageImageWriteWithoutFormat); - return spv::NoResult; -#ifdef AMD_EXTENSIONS - } else if (node->getOp() == glslang::EOpSparseImageLoad || node->getOp() == glslang::EOpSparseImageLoadLod) { -#else - } else if (node->getOp() == glslang::EOpSparseImageLoad) { -#endif - builder.addCapability(spv::CapabilitySparseResidency); - if (builder.getImageTypeFormat(builder.getImageType(operands.front())) == spv::ImageFormatUnknown) - builder.addCapability(spv::CapabilityStorageImageReadWithoutFormat); - - if (sampler.ms) { - operands.push_back(spv::ImageOperandsSampleMask); - operands.push_back(*opIt++); -#ifdef AMD_EXTENSIONS - } else if (cracked.lod) { - builder.addExtension(spv::E_SPV_AMD_shader_image_load_store_lod); - builder.addCapability(spv::CapabilityImageReadWriteLodAMD); - - operands.push_back(spv::ImageOperandsLodMask); - operands.push_back(*opIt++); -#endif - } - - // Create the return type that was a special structure - spv::Id texelOut = *opIt; - spv::Id typeId0 = resultType(); - spv::Id typeId1 = builder.getDerefTypeId(texelOut); - spv::Id resultTypeId = builder.makeStructResultType(typeId0, typeId1); - - spv::Id resultId = builder.createOp(spv::OpImageSparseRead, resultTypeId, operands); - - // Decode the return type - builder.createStore(builder.createCompositeExtract(resultId, typeId1, 1), texelOut); - return builder.createCompositeExtract(resultId, typeId0, 0); - } else { - // Process image atomic operations - - // GLSL "IMAGE_PARAMS" will involve in constructing an image texel pointer and this pointer, - // as the first source operand, is required by SPIR-V atomic operations. - operands.push_back(sampler.ms ? *(opIt++) : builder.makeUintConstant(0)); // For non-MS, the value should be 0 - - spv::Id resultTypeId = builder.makePointer(spv::StorageClassImage, resultType()); - spv::Id pointer = builder.createOp(spv::OpImageTexelPointer, resultTypeId, operands); - - std::vector operands; - operands.push_back(pointer); - for (; opIt != arguments.end(); ++opIt) - operands.push_back(*opIt); - - return createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType()); - } - } - - // Check for texture functions other than queries - bool sparse = node->isSparseTexture(); - bool cubeCompare = sampler.dim == glslang::EsdCube && sampler.arrayed && sampler.shadow; - - // check for bias argument - bool bias = false; -#ifdef AMD_EXTENSIONS - if (! cracked.lod && ! cracked.grad && ! cracked.fetch && ! cubeCompare) { -#else - if (! cracked.lod && ! cracked.gather && ! cracked.grad && ! cracked.fetch && ! cubeCompare) { -#endif - int nonBiasArgCount = 2; -#ifdef AMD_EXTENSIONS - if (cracked.gather) - ++nonBiasArgCount; // comp argument should be present when bias argument is present -#endif - if (cracked.offset) - ++nonBiasArgCount; -#ifdef AMD_EXTENSIONS - else if (cracked.offsets) - ++nonBiasArgCount; -#endif - if (cracked.grad) - nonBiasArgCount += 2; - if (cracked.lodClamp) - ++nonBiasArgCount; - if (sparse) - ++nonBiasArgCount; - - if ((int)arguments.size() > nonBiasArgCount) - bias = true; - } - - // See if the sampler param should really be just the SPV image part - if (cracked.fetch) { - // a fetch needs to have the image extracted first - if (builder.isSampledImage(params.sampler)) - params.sampler = builder.createUnaryOp(spv::OpImage, builder.getImageType(params.sampler), params.sampler); - } - -#ifdef AMD_EXTENSIONS - if (cracked.gather) { - const auto& sourceExtensions = glslangIntermediate->getRequestedExtensions(); - if (bias || cracked.lod || - sourceExtensions.find(glslang::E_GL_AMD_texture_gather_bias_lod) != sourceExtensions.end()) { - builder.addExtension(spv::E_SPV_AMD_texture_gather_bias_lod); - builder.addCapability(spv::CapabilityImageGatherBiasLodAMD); - } - } -#endif - - // set the rest of the arguments - - params.coords = arguments[1]; - int extraArgs = 0; - bool noImplicitLod = false; - - // sort out where Dref is coming from - if (cubeCompare) { - params.Dref = arguments[2]; - ++extraArgs; - } else if (sampler.shadow && cracked.gather) { - params.Dref = arguments[2]; - ++extraArgs; - } else if (sampler.shadow) { - std::vector indexes; - int dRefComp; - if (cracked.proj) - dRefComp = 2; // "The resulting 3rd component of P in the shadow forms is used as Dref" - else - dRefComp = builder.getNumComponents(params.coords) - 1; - indexes.push_back(dRefComp); - params.Dref = builder.createCompositeExtract(params.coords, builder.getScalarTypeId(builder.getTypeId(params.coords)), indexes); - } - - // lod - if (cracked.lod) { - params.lod = arguments[2 + extraArgs]; - ++extraArgs; - } else if (glslangIntermediate->getStage() != EShLangFragment) { - // we need to invent the default lod for an explicit lod instruction for a non-fragment stage - noImplicitLod = true; - } - - // multisample - if (sampler.ms) { - params.sample = arguments[2 + extraArgs]; // For MS, "sample" should be specified - ++extraArgs; - } - - // gradient - if (cracked.grad) { - params.gradX = arguments[2 + extraArgs]; - params.gradY = arguments[3 + extraArgs]; - extraArgs += 2; - } - - // offset and offsets - if (cracked.offset) { - params.offset = arguments[2 + extraArgs]; - ++extraArgs; - } else if (cracked.offsets) { - params.offsets = arguments[2 + extraArgs]; - ++extraArgs; - } - - // lod clamp - if (cracked.lodClamp) { - params.lodClamp = arguments[2 + extraArgs]; - ++extraArgs; - } - - // sparse - if (sparse) { - params.texelOut = arguments[2 + extraArgs]; - ++extraArgs; - } - - // gather component - if (cracked.gather && ! sampler.shadow) { - // default component is 0, if missing, otherwise an argument - if (2 + extraArgs < (int)arguments.size()) { - params.component = arguments[2 + extraArgs]; - ++extraArgs; - } else - params.component = builder.makeIntConstant(0); - } - - // bias - if (bias) { - params.bias = arguments[2 + extraArgs]; - ++extraArgs; - } - - // projective component (might not to move) - // GLSL: "The texture coordinates consumed from P, not including the last component of P, - // are divided by the last component of P." - // SPIR-V: "... (u [, v] [, w], q)... It may be a vector larger than needed, but all - // unused components will appear after all used components." - if (cracked.proj) { - int projSourceComp = builder.getNumComponents(params.coords) - 1; - int projTargetComp; - switch (sampler.dim) { - case glslang::Esd1D: projTargetComp = 1; break; - case glslang::Esd2D: projTargetComp = 2; break; - case glslang::EsdRect: projTargetComp = 2; break; - default: projTargetComp = projSourceComp; break; - } - // copy the projective coordinate if we have to - if (projTargetComp != projSourceComp) { - spv::Id projComp = builder.createCompositeExtract(params.coords, - builder.getScalarTypeId(builder.getTypeId(params.coords)), - projSourceComp); - params.coords = builder.createCompositeInsert(projComp, params.coords, - builder.getTypeId(params.coords), projTargetComp); - } - } - - return builder.createTextureCall(precision, resultType(), sparse, cracked.fetch, cracked.proj, cracked.gather, noImplicitLod, params); -} - -spv::Id TGlslangToSpvTraverser::handleUserFunctionCall(const glslang::TIntermAggregate* node) -{ - // Grab the function's pointer from the previously created function - spv::Function* function = functionMap[node->getName().c_str()]; - if (! function) - return 0; - - const glslang::TIntermSequence& glslangArgs = node->getSequence(); - const glslang::TQualifierList& qualifiers = node->getQualifierList(); - - // See comments in makeFunctions() for details about the semantics for parameter passing. - // - // These imply we need a four step process: - // 1. Evaluate the arguments - // 2. Allocate and make copies of in, out, and inout arguments - // 3. Make the call - // 4. Copy back the results - - // 1. Evaluate the arguments - std::vector lValues; - std::vector rValues; - std::vector argTypes; - for (int a = 0; a < (int)glslangArgs.size(); ++a) { - const glslang::TType& paramType = glslangArgs[a]->getAsTyped()->getType(); - // build l-value - builder.clearAccessChain(); - glslangArgs[a]->traverse(this); - argTypes.push_back(¶mType); - // keep outputs and opaque objects as l-values, evaluate input-only as r-values - if (qualifiers[a] != glslang::EvqConstReadOnly || paramType.containsOpaque()) { - // save l-value - lValues.push_back(builder.getAccessChain()); - } else { - // process r-value - rValues.push_back(accessChainLoad(*argTypes.back())); - } - } - - // 2. Allocate space for anything needing a copy, and if it's "in" or "inout" - // copy the original into that space. - // - // Also, build up the list of actual arguments to pass in for the call - int lValueCount = 0; - int rValueCount = 0; - std::vector spvArgs; - for (int a = 0; a < (int)glslangArgs.size(); ++a) { - const glslang::TType& paramType = glslangArgs[a]->getAsTyped()->getType(); - spv::Id arg; - if (paramType.containsOpaque() || - (paramType.getBasicType() == glslang::EbtBlock && qualifiers[a] == glslang::EvqBuffer) || - (a == 0 && function->hasImplicitThis())) { - builder.setAccessChain(lValues[lValueCount]); - arg = builder.accessChainGetLValue(); - ++lValueCount; - } else if (qualifiers[a] != glslang::EvqConstReadOnly) { - // need space to hold the copy - arg = builder.createVariable(spv::StorageClassFunction, convertGlslangToSpvType(paramType), "param"); - if (qualifiers[a] == glslang::EvqIn || qualifiers[a] == glslang::EvqInOut) { - // need to copy the input into output space - builder.setAccessChain(lValues[lValueCount]); - spv::Id copy = accessChainLoad(*argTypes[a]); - builder.clearAccessChain(); - builder.setAccessChainLValue(arg); - multiTypeStore(paramType, copy); - } - ++lValueCount; - } else { - arg = rValues[rValueCount]; - ++rValueCount; - } - spvArgs.push_back(arg); - } - - // 3. Make the call. - spv::Id result = builder.createFunctionCall(function, spvArgs); - builder.setPrecision(result, TranslatePrecisionDecoration(node->getType())); - - // 4. Copy back out an "out" arguments. - lValueCount = 0; - for (int a = 0; a < (int)glslangArgs.size(); ++a) { - const glslang::TType& paramType = glslangArgs[a]->getAsTyped()->getType(); - if (qualifiers[a] != glslang::EvqConstReadOnly) { - if (qualifiers[a] == glslang::EvqOut || qualifiers[a] == glslang::EvqInOut) { - spv::Id copy = builder.createLoad(spvArgs[a]); - builder.setAccessChain(lValues[lValueCount]); - multiTypeStore(paramType, copy); - } - ++lValueCount; - } - } - - return result; -} - -// Translate AST operation to SPV operation, already having SPV-based operands/types. -spv::Id TGlslangToSpvTraverser::createBinaryOperation(glslang::TOperator op, spv::Decoration precision, - spv::Decoration noContraction, - spv::Id typeId, spv::Id left, spv::Id right, - glslang::TBasicType typeProxy, bool reduceComparison) -{ -#ifdef AMD_EXTENSIONS - bool isUnsigned = typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64 || typeProxy == glslang::EbtUint16; - bool isFloat = typeProxy == glslang::EbtFloat || typeProxy == glslang::EbtDouble || typeProxy == glslang::EbtFloat16; -#else - bool isUnsigned = typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64; - bool isFloat = typeProxy == glslang::EbtFloat || typeProxy == glslang::EbtDouble; -#endif - bool isBool = typeProxy == glslang::EbtBool; - - spv::Op binOp = spv::OpNop; - bool needMatchingVectors = true; // for non-matrix ops, would a scalar need to smear to match a vector? - bool comparison = false; - - switch (op) { - case glslang::EOpAdd: - case glslang::EOpAddAssign: - if (isFloat) - binOp = spv::OpFAdd; - else - binOp = spv::OpIAdd; - break; - case glslang::EOpSub: - case glslang::EOpSubAssign: - if (isFloat) - binOp = spv::OpFSub; - else - binOp = spv::OpISub; - break; - case glslang::EOpMul: - case glslang::EOpMulAssign: - if (isFloat) - binOp = spv::OpFMul; - else - binOp = spv::OpIMul; - break; - case glslang::EOpVectorTimesScalar: - case glslang::EOpVectorTimesScalarAssign: - if (isFloat && (builder.isVector(left) || builder.isVector(right))) { - if (builder.isVector(right)) - std::swap(left, right); - assert(builder.isScalar(right)); - needMatchingVectors = false; - binOp = spv::OpVectorTimesScalar; - } else - binOp = spv::OpIMul; - break; - case glslang::EOpVectorTimesMatrix: - case glslang::EOpVectorTimesMatrixAssign: - binOp = spv::OpVectorTimesMatrix; - break; - case glslang::EOpMatrixTimesVector: - binOp = spv::OpMatrixTimesVector; - break; - case glslang::EOpMatrixTimesScalar: - case glslang::EOpMatrixTimesScalarAssign: - binOp = spv::OpMatrixTimesScalar; - break; - case glslang::EOpMatrixTimesMatrix: - case glslang::EOpMatrixTimesMatrixAssign: - binOp = spv::OpMatrixTimesMatrix; - break; - case glslang::EOpOuterProduct: - binOp = spv::OpOuterProduct; - needMatchingVectors = false; - break; - - case glslang::EOpDiv: - case glslang::EOpDivAssign: - if (isFloat) - binOp = spv::OpFDiv; - else if (isUnsigned) - binOp = spv::OpUDiv; - else - binOp = spv::OpSDiv; - break; - case glslang::EOpMod: - case glslang::EOpModAssign: - if (isFloat) - binOp = spv::OpFMod; - else if (isUnsigned) - binOp = spv::OpUMod; - else - binOp = spv::OpSMod; - break; - case glslang::EOpRightShift: - case glslang::EOpRightShiftAssign: - if (isUnsigned) - binOp = spv::OpShiftRightLogical; - else - binOp = spv::OpShiftRightArithmetic; - break; - case glslang::EOpLeftShift: - case glslang::EOpLeftShiftAssign: - binOp = spv::OpShiftLeftLogical; - break; - case glslang::EOpAnd: - case glslang::EOpAndAssign: - binOp = spv::OpBitwiseAnd; - break; - case glslang::EOpLogicalAnd: - needMatchingVectors = false; - binOp = spv::OpLogicalAnd; - break; - case glslang::EOpInclusiveOr: - case glslang::EOpInclusiveOrAssign: - binOp = spv::OpBitwiseOr; - break; - case glslang::EOpLogicalOr: - needMatchingVectors = false; - binOp = spv::OpLogicalOr; - break; - case glslang::EOpExclusiveOr: - case glslang::EOpExclusiveOrAssign: - binOp = spv::OpBitwiseXor; - break; - case glslang::EOpLogicalXor: - needMatchingVectors = false; - binOp = spv::OpLogicalNotEqual; - break; - - case glslang::EOpLessThan: - case glslang::EOpGreaterThan: - case glslang::EOpLessThanEqual: - case glslang::EOpGreaterThanEqual: - case glslang::EOpEqual: - case glslang::EOpNotEqual: - case glslang::EOpVectorEqual: - case glslang::EOpVectorNotEqual: - comparison = true; - break; - default: - break; - } - - // handle mapped binary operations (should be non-comparison) - if (binOp != spv::OpNop) { - assert(comparison == false); - if (builder.isMatrix(left) || builder.isMatrix(right)) - return createBinaryMatrixOperation(binOp, precision, noContraction, typeId, left, right); - - // No matrix involved; make both operands be the same number of components, if needed - if (needMatchingVectors) - builder.promoteScalar(precision, left, right); - - spv::Id result = builder.createBinOp(binOp, typeId, left, right); - addDecoration(result, noContraction); - return builder.setPrecision(result, precision); - } - - if (! comparison) - return 0; - - // Handle comparison instructions - - if (reduceComparison && (op == glslang::EOpEqual || op == glslang::EOpNotEqual) - && (builder.isVector(left) || builder.isMatrix(left) || builder.isAggregate(left))) - return builder.createCompositeCompare(precision, left, right, op == glslang::EOpEqual); - - switch (op) { - case glslang::EOpLessThan: - if (isFloat) - binOp = spv::OpFOrdLessThan; - else if (isUnsigned) - binOp = spv::OpULessThan; - else - binOp = spv::OpSLessThan; - break; - case glslang::EOpGreaterThan: - if (isFloat) - binOp = spv::OpFOrdGreaterThan; - else if (isUnsigned) - binOp = spv::OpUGreaterThan; - else - binOp = spv::OpSGreaterThan; - break; - case glslang::EOpLessThanEqual: - if (isFloat) - binOp = spv::OpFOrdLessThanEqual; - else if (isUnsigned) - binOp = spv::OpULessThanEqual; - else - binOp = spv::OpSLessThanEqual; - break; - case glslang::EOpGreaterThanEqual: - if (isFloat) - binOp = spv::OpFOrdGreaterThanEqual; - else if (isUnsigned) - binOp = spv::OpUGreaterThanEqual; - else - binOp = spv::OpSGreaterThanEqual; - break; - case glslang::EOpEqual: - case glslang::EOpVectorEqual: - if (isFloat) - binOp = spv::OpFOrdEqual; - else if (isBool) - binOp = spv::OpLogicalEqual; - else - binOp = spv::OpIEqual; - break; - case glslang::EOpNotEqual: - case glslang::EOpVectorNotEqual: - if (isFloat) - binOp = spv::OpFOrdNotEqual; - else if (isBool) - binOp = spv::OpLogicalNotEqual; - else - binOp = spv::OpINotEqual; - break; - default: - break; - } - - if (binOp != spv::OpNop) { - spv::Id result = builder.createBinOp(binOp, typeId, left, right); - addDecoration(result, noContraction); - return builder.setPrecision(result, precision); - } - - return 0; -} - -// -// Translate AST matrix operation to SPV operation, already having SPV-based operands/types. -// These can be any of: -// -// matrix * scalar -// scalar * matrix -// matrix * matrix linear algebraic -// matrix * vector -// vector * matrix -// matrix * matrix componentwise -// matrix op matrix op in {+, -, /} -// matrix op scalar op in {+, -, /} -// scalar op matrix op in {+, -, /} -// -spv::Id TGlslangToSpvTraverser::createBinaryMatrixOperation(spv::Op op, spv::Decoration precision, spv::Decoration noContraction, spv::Id typeId, spv::Id left, spv::Id right) -{ - bool firstClass = true; - - // First, handle first-class matrix operations (* and matrix/scalar) - switch (op) { - case spv::OpFDiv: - if (builder.isMatrix(left) && builder.isScalar(right)) { - // turn matrix / scalar into a multiply... - right = builder.createBinOp(spv::OpFDiv, builder.getTypeId(right), builder.makeFloatConstant(1.0F), right); - op = spv::OpMatrixTimesScalar; - } else - firstClass = false; - break; - case spv::OpMatrixTimesScalar: - if (builder.isMatrix(right)) - std::swap(left, right); - assert(builder.isScalar(right)); - break; - case spv::OpVectorTimesMatrix: - assert(builder.isVector(left)); - assert(builder.isMatrix(right)); - break; - case spv::OpMatrixTimesVector: - assert(builder.isMatrix(left)); - assert(builder.isVector(right)); - break; - case spv::OpMatrixTimesMatrix: - assert(builder.isMatrix(left)); - assert(builder.isMatrix(right)); - break; - default: - firstClass = false; - break; - } - - if (firstClass) { - spv::Id result = builder.createBinOp(op, typeId, left, right); - addDecoration(result, noContraction); - return builder.setPrecision(result, precision); - } - - // Handle component-wise +, -, *, %, and / for all combinations of type. - // The result type of all of them is the same type as the (a) matrix operand. - // The algorithm is to: - // - break the matrix(es) into vectors - // - smear any scalar to a vector - // - do vector operations - // - make a matrix out the vector results - switch (op) { - case spv::OpFAdd: - case spv::OpFSub: - case spv::OpFDiv: - case spv::OpFMod: - case spv::OpFMul: - { - // one time set up... - bool leftMat = builder.isMatrix(left); - bool rightMat = builder.isMatrix(right); - unsigned int numCols = leftMat ? builder.getNumColumns(left) : builder.getNumColumns(right); - int numRows = leftMat ? builder.getNumRows(left) : builder.getNumRows(right); - spv::Id scalarType = builder.getScalarTypeId(typeId); - spv::Id vecType = builder.makeVectorType(scalarType, numRows); - std::vector results; - spv::Id smearVec = spv::NoResult; - if (builder.isScalar(left)) - smearVec = builder.smearScalar(precision, left, vecType); - else if (builder.isScalar(right)) - smearVec = builder.smearScalar(precision, right, vecType); - - // do each vector op - for (unsigned int c = 0; c < numCols; ++c) { - std::vector indexes; - indexes.push_back(c); - spv::Id leftVec = leftMat ? builder.createCompositeExtract( left, vecType, indexes) : smearVec; - spv::Id rightVec = rightMat ? builder.createCompositeExtract(right, vecType, indexes) : smearVec; - spv::Id result = builder.createBinOp(op, vecType, leftVec, rightVec); - addDecoration(result, noContraction); - results.push_back(builder.setPrecision(result, precision)); - } - - // put the pieces together - return builder.setPrecision(builder.createCompositeConstruct(typeId, results), precision); - } - default: - assert(0); - return spv::NoResult; - } -} - -spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, spv::Decoration precision, spv::Decoration noContraction, spv::Id typeId, spv::Id operand, glslang::TBasicType typeProxy) -{ - spv::Op unaryOp = spv::OpNop; - int extBuiltins = -1; - int libCall = -1; -#ifdef AMD_EXTENSIONS - bool isUnsigned = typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64 || typeProxy == glslang::EbtUint16; - bool isFloat = typeProxy == glslang::EbtFloat || typeProxy == glslang::EbtDouble || typeProxy == glslang::EbtFloat16; -#else - bool isUnsigned = typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64; - bool isFloat = typeProxy == glslang::EbtFloat || typeProxy == glslang::EbtDouble; -#endif - - switch (op) { - case glslang::EOpNegative: - if (isFloat) { - unaryOp = spv::OpFNegate; - if (builder.isMatrixType(typeId)) - return createUnaryMatrixOperation(unaryOp, precision, noContraction, typeId, operand, typeProxy); - } else - unaryOp = spv::OpSNegate; - break; - - case glslang::EOpLogicalNot: - case glslang::EOpVectorLogicalNot: - unaryOp = spv::OpLogicalNot; - break; - case glslang::EOpBitwiseNot: - unaryOp = spv::OpNot; - break; - - case glslang::EOpDeterminant: - libCall = spv::GLSLstd450Determinant; - break; - case glslang::EOpMatrixInverse: - libCall = spv::GLSLstd450MatrixInverse; - break; - case glslang::EOpTranspose: - unaryOp = spv::OpTranspose; - break; - - case glslang::EOpRadians: - libCall = spv::GLSLstd450Radians; - break; - case glslang::EOpDegrees: - libCall = spv::GLSLstd450Degrees; - break; - case glslang::EOpSin: - libCall = spv::GLSLstd450Sin; - break; - case glslang::EOpCos: - libCall = spv::GLSLstd450Cos; - break; - case glslang::EOpTan: - libCall = spv::GLSLstd450Tan; - break; - case glslang::EOpAcos: - libCall = spv::GLSLstd450Acos; - break; - case glslang::EOpAsin: - libCall = spv::GLSLstd450Asin; - break; - case glslang::EOpAtan: - libCall = spv::GLSLstd450Atan; - break; - - case glslang::EOpAcosh: - libCall = spv::GLSLstd450Acosh; - break; - case glslang::EOpAsinh: - libCall = spv::GLSLstd450Asinh; - break; - case glslang::EOpAtanh: - libCall = spv::GLSLstd450Atanh; - break; - case glslang::EOpTanh: - libCall = spv::GLSLstd450Tanh; - break; - case glslang::EOpCosh: - libCall = spv::GLSLstd450Cosh; - break; - case glslang::EOpSinh: - libCall = spv::GLSLstd450Sinh; - break; - - case glslang::EOpLength: - libCall = spv::GLSLstd450Length; - break; - case glslang::EOpNormalize: - libCall = spv::GLSLstd450Normalize; - break; - - case glslang::EOpExp: - libCall = spv::GLSLstd450Exp; - break; - case glslang::EOpLog: - libCall = spv::GLSLstd450Log; - break; - case glslang::EOpExp2: - libCall = spv::GLSLstd450Exp2; - break; - case glslang::EOpLog2: - libCall = spv::GLSLstd450Log2; - break; - case glslang::EOpSqrt: - libCall = spv::GLSLstd450Sqrt; - break; - case glslang::EOpInverseSqrt: - libCall = spv::GLSLstd450InverseSqrt; - break; - - case glslang::EOpFloor: - libCall = spv::GLSLstd450Floor; - break; - case glslang::EOpTrunc: - libCall = spv::GLSLstd450Trunc; - break; - case glslang::EOpRound: - libCall = spv::GLSLstd450Round; - break; - case glslang::EOpRoundEven: - libCall = spv::GLSLstd450RoundEven; - break; - case glslang::EOpCeil: - libCall = spv::GLSLstd450Ceil; - break; - case glslang::EOpFract: - libCall = spv::GLSLstd450Fract; - break; - - case glslang::EOpIsNan: - unaryOp = spv::OpIsNan; - break; - case glslang::EOpIsInf: - unaryOp = spv::OpIsInf; - break; - case glslang::EOpIsFinite: - unaryOp = spv::OpIsFinite; - break; - - case glslang::EOpFloatBitsToInt: - case glslang::EOpFloatBitsToUint: - case glslang::EOpIntBitsToFloat: - case glslang::EOpUintBitsToFloat: - case glslang::EOpDoubleBitsToInt64: - case glslang::EOpDoubleBitsToUint64: - case glslang::EOpInt64BitsToDouble: - case glslang::EOpUint64BitsToDouble: -#ifdef AMD_EXTENSIONS - case glslang::EOpFloat16BitsToInt16: - case glslang::EOpFloat16BitsToUint16: - case glslang::EOpInt16BitsToFloat16: - case glslang::EOpUint16BitsToFloat16: -#endif - unaryOp = spv::OpBitcast; - break; - - case glslang::EOpPackSnorm2x16: - libCall = spv::GLSLstd450PackSnorm2x16; - break; - case glslang::EOpUnpackSnorm2x16: - libCall = spv::GLSLstd450UnpackSnorm2x16; - break; - case glslang::EOpPackUnorm2x16: - libCall = spv::GLSLstd450PackUnorm2x16; - break; - case glslang::EOpUnpackUnorm2x16: - libCall = spv::GLSLstd450UnpackUnorm2x16; - break; - case glslang::EOpPackHalf2x16: - libCall = spv::GLSLstd450PackHalf2x16; - break; - case glslang::EOpUnpackHalf2x16: - libCall = spv::GLSLstd450UnpackHalf2x16; - break; - case glslang::EOpPackSnorm4x8: - libCall = spv::GLSLstd450PackSnorm4x8; - break; - case glslang::EOpUnpackSnorm4x8: - libCall = spv::GLSLstd450UnpackSnorm4x8; - break; - case glslang::EOpPackUnorm4x8: - libCall = spv::GLSLstd450PackUnorm4x8; - break; - case glslang::EOpUnpackUnorm4x8: - libCall = spv::GLSLstd450UnpackUnorm4x8; - break; - case glslang::EOpPackDouble2x32: - libCall = spv::GLSLstd450PackDouble2x32; - break; - case glslang::EOpUnpackDouble2x32: - libCall = spv::GLSLstd450UnpackDouble2x32; - break; - - case glslang::EOpPackInt2x32: - case glslang::EOpUnpackInt2x32: - case glslang::EOpPackUint2x32: - case glslang::EOpUnpackUint2x32: - unaryOp = spv::OpBitcast; - break; - -#ifdef AMD_EXTENSIONS - case glslang::EOpPackInt2x16: - case glslang::EOpUnpackInt2x16: - case glslang::EOpPackUint2x16: - case glslang::EOpUnpackUint2x16: - case glslang::EOpPackInt4x16: - case glslang::EOpUnpackInt4x16: - case glslang::EOpPackUint4x16: - case glslang::EOpUnpackUint4x16: - case glslang::EOpPackFloat2x16: - case glslang::EOpUnpackFloat2x16: - unaryOp = spv::OpBitcast; - break; -#endif - - case glslang::EOpDPdx: - unaryOp = spv::OpDPdx; - break; - case glslang::EOpDPdy: - unaryOp = spv::OpDPdy; - break; - case glslang::EOpFwidth: - unaryOp = spv::OpFwidth; - break; - case glslang::EOpDPdxFine: - builder.addCapability(spv::CapabilityDerivativeControl); - unaryOp = spv::OpDPdxFine; - break; - case glslang::EOpDPdyFine: - builder.addCapability(spv::CapabilityDerivativeControl); - unaryOp = spv::OpDPdyFine; - break; - case glslang::EOpFwidthFine: - builder.addCapability(spv::CapabilityDerivativeControl); - unaryOp = spv::OpFwidthFine; - break; - case glslang::EOpDPdxCoarse: - builder.addCapability(spv::CapabilityDerivativeControl); - unaryOp = spv::OpDPdxCoarse; - break; - case glslang::EOpDPdyCoarse: - builder.addCapability(spv::CapabilityDerivativeControl); - unaryOp = spv::OpDPdyCoarse; - break; - case glslang::EOpFwidthCoarse: - builder.addCapability(spv::CapabilityDerivativeControl); - unaryOp = spv::OpFwidthCoarse; - break; - case glslang::EOpInterpolateAtCentroid: - builder.addCapability(spv::CapabilityInterpolationFunction); - libCall = spv::GLSLstd450InterpolateAtCentroid; - break; - case glslang::EOpAny: - unaryOp = spv::OpAny; - break; - case glslang::EOpAll: - unaryOp = spv::OpAll; - break; - - case glslang::EOpAbs: - if (isFloat) - libCall = spv::GLSLstd450FAbs; - else - libCall = spv::GLSLstd450SAbs; - break; - case glslang::EOpSign: - if (isFloat) - libCall = spv::GLSLstd450FSign; - else - libCall = spv::GLSLstd450SSign; - break; - - case glslang::EOpAtomicCounterIncrement: - case glslang::EOpAtomicCounterDecrement: - case glslang::EOpAtomicCounter: - { - // Handle all of the atomics in one place, in createAtomicOperation() - std::vector operands; - operands.push_back(operand); - return createAtomicOperation(op, precision, typeId, operands, typeProxy); - } - - case glslang::EOpBitFieldReverse: - unaryOp = spv::OpBitReverse; - break; - case glslang::EOpBitCount: - unaryOp = spv::OpBitCount; - break; - case glslang::EOpFindLSB: - libCall = spv::GLSLstd450FindILsb; - break; - case glslang::EOpFindMSB: - if (isUnsigned) - libCall = spv::GLSLstd450FindUMsb; - else - libCall = spv::GLSLstd450FindSMsb; - break; - - case glslang::EOpBallot: - case glslang::EOpReadFirstInvocation: - case glslang::EOpAnyInvocation: - case glslang::EOpAllInvocations: - case glslang::EOpAllInvocationsEqual: -#ifdef AMD_EXTENSIONS - case glslang::EOpMinInvocations: - case glslang::EOpMaxInvocations: - case glslang::EOpAddInvocations: - case glslang::EOpMinInvocationsNonUniform: - case glslang::EOpMaxInvocationsNonUniform: - case glslang::EOpAddInvocationsNonUniform: - case glslang::EOpMinInvocationsInclusiveScan: - case glslang::EOpMaxInvocationsInclusiveScan: - case glslang::EOpAddInvocationsInclusiveScan: - case glslang::EOpMinInvocationsInclusiveScanNonUniform: - case glslang::EOpMaxInvocationsInclusiveScanNonUniform: - case glslang::EOpAddInvocationsInclusiveScanNonUniform: - case glslang::EOpMinInvocationsExclusiveScan: - case glslang::EOpMaxInvocationsExclusiveScan: - case glslang::EOpAddInvocationsExclusiveScan: - case glslang::EOpMinInvocationsExclusiveScanNonUniform: - case glslang::EOpMaxInvocationsExclusiveScanNonUniform: - case glslang::EOpAddInvocationsExclusiveScanNonUniform: -#endif - { - std::vector operands; - operands.push_back(operand); - return createInvocationsOperation(op, typeId, operands, typeProxy); - } - -#ifdef AMD_EXTENSIONS - case glslang::EOpMbcnt: - extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_ballot); - libCall = spv::MbcntAMD; - break; - - case glslang::EOpCubeFaceIndex: - extBuiltins = getExtBuiltins(spv::E_SPV_AMD_gcn_shader); - libCall = spv::CubeFaceIndexAMD; - break; - - case glslang::EOpCubeFaceCoord: - extBuiltins = getExtBuiltins(spv::E_SPV_AMD_gcn_shader); - libCall = spv::CubeFaceCoordAMD; - break; -#endif - - default: - return 0; - } - - spv::Id id; - if (libCall >= 0) { - std::vector args; - args.push_back(operand); - id = builder.createBuiltinCall(typeId, extBuiltins >= 0 ? extBuiltins : stdBuiltins, libCall, args); - } else { - id = builder.createUnaryOp(unaryOp, typeId, operand); - } - - addDecoration(id, noContraction); - return builder.setPrecision(id, precision); -} - -// Create a unary operation on a matrix -spv::Id TGlslangToSpvTraverser::createUnaryMatrixOperation(spv::Op op, spv::Decoration precision, spv::Decoration noContraction, spv::Id typeId, spv::Id operand, glslang::TBasicType /* typeProxy */) -{ - // Handle unary operations vector by vector. - // The result type is the same type as the original type. - // The algorithm is to: - // - break the matrix into vectors - // - apply the operation to each vector - // - make a matrix out the vector results - - // get the types sorted out - int numCols = builder.getNumColumns(operand); - int numRows = builder.getNumRows(operand); - spv::Id srcVecType = builder.makeVectorType(builder.getScalarTypeId(builder.getTypeId(operand)), numRows); - spv::Id destVecType = builder.makeVectorType(builder.getScalarTypeId(typeId), numRows); - std::vector results; - - // do each vector op - for (int c = 0; c < numCols; ++c) { - std::vector indexes; - indexes.push_back(c); - spv::Id srcVec = builder.createCompositeExtract(operand, srcVecType, indexes); - spv::Id destVec = builder.createUnaryOp(op, destVecType, srcVec); - addDecoration(destVec, noContraction); - results.push_back(builder.setPrecision(destVec, precision)); - } - - // put the pieces together - return builder.setPrecision(builder.createCompositeConstruct(typeId, results), precision); -} - -spv::Id TGlslangToSpvTraverser::createConversion(glslang::TOperator op, spv::Decoration precision, spv::Decoration noContraction, spv::Id destType, spv::Id operand, glslang::TBasicType typeProxy) -{ - spv::Op convOp = spv::OpNop; - spv::Id zero = 0; - spv::Id one = 0; - spv::Id type = 0; - - int vectorSize = builder.isVectorType(destType) ? builder.getNumTypeComponents(destType) : 0; - - switch (op) { - case glslang::EOpConvIntToBool: - case glslang::EOpConvUintToBool: - case glslang::EOpConvInt64ToBool: - case glslang::EOpConvUint64ToBool: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvInt16ToBool: - case glslang::EOpConvUint16ToBool: -#endif - if (op == glslang::EOpConvInt64ToBool || op == glslang::EOpConvUint64ToBool) - zero = builder.makeUint64Constant(0); -#ifdef AMD_EXTENSIONS - else if (op == glslang::EOpConvInt16ToBool || op == glslang::EOpConvUint16ToBool) - zero = builder.makeUint16Constant(0); -#endif - else - zero = builder.makeUintConstant(0); - zero = makeSmearedConstant(zero, vectorSize); - return builder.createBinOp(spv::OpINotEqual, destType, operand, zero); - - case glslang::EOpConvFloatToBool: - zero = builder.makeFloatConstant(0.0F); - zero = makeSmearedConstant(zero, vectorSize); - return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero); - - case glslang::EOpConvDoubleToBool: - zero = builder.makeDoubleConstant(0.0); - zero = makeSmearedConstant(zero, vectorSize); - return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero); - -#ifdef AMD_EXTENSIONS - case glslang::EOpConvFloat16ToBool: - zero = builder.makeFloat16Constant(0.0F); - zero = makeSmearedConstant(zero, vectorSize); - return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero); -#endif - - case glslang::EOpConvBoolToFloat: - convOp = spv::OpSelect; - zero = builder.makeFloatConstant(0.0F); - one = builder.makeFloatConstant(1.0F); - break; - - case glslang::EOpConvBoolToDouble: - convOp = spv::OpSelect; - zero = builder.makeDoubleConstant(0.0); - one = builder.makeDoubleConstant(1.0); - break; - -#ifdef AMD_EXTENSIONS - case glslang::EOpConvBoolToFloat16: - convOp = spv::OpSelect; - zero = builder.makeFloat16Constant(0.0F); - one = builder.makeFloat16Constant(1.0F); - break; -#endif - - case glslang::EOpConvBoolToInt: - case glslang::EOpConvBoolToInt64: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvBoolToInt16: -#endif - if (op == glslang::EOpConvBoolToInt64) - zero = builder.makeInt64Constant(0); -#ifdef AMD_EXTENSIONS - else if (op == glslang::EOpConvBoolToInt16) - zero = builder.makeInt16Constant(0); -#endif - else - zero = builder.makeIntConstant(0); - - if (op == glslang::EOpConvBoolToInt64) - one = builder.makeInt64Constant(1); -#ifdef AMD_EXTENSIONS - else if (op == glslang::EOpConvBoolToInt16) - one = builder.makeInt16Constant(1); -#endif - else - one = builder.makeIntConstant(1); - - convOp = spv::OpSelect; - break; - - case glslang::EOpConvBoolToUint: - case glslang::EOpConvBoolToUint64: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvBoolToUint16: -#endif - if (op == glslang::EOpConvBoolToUint64) - zero = builder.makeUint64Constant(0); -#ifdef AMD_EXTENSIONS - else if (op == glslang::EOpConvBoolToUint16) - zero = builder.makeUint16Constant(0); -#endif - else - zero = builder.makeUintConstant(0); - - if (op == glslang::EOpConvBoolToUint64) - one = builder.makeUint64Constant(1); -#ifdef AMD_EXTENSIONS - else if (op == glslang::EOpConvBoolToUint16) - one = builder.makeUint16Constant(1); -#endif - else - one = builder.makeUintConstant(1); - - convOp = spv::OpSelect; - break; - - case glslang::EOpConvIntToFloat: - case glslang::EOpConvIntToDouble: - case glslang::EOpConvInt64ToFloat: - case glslang::EOpConvInt64ToDouble: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvInt16ToFloat: - case glslang::EOpConvInt16ToDouble: - case glslang::EOpConvInt16ToFloat16: - case glslang::EOpConvIntToFloat16: - case glslang::EOpConvInt64ToFloat16: -#endif - convOp = spv::OpConvertSToF; - break; - - case glslang::EOpConvUintToFloat: - case glslang::EOpConvUintToDouble: - case glslang::EOpConvUint64ToFloat: - case glslang::EOpConvUint64ToDouble: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvUint16ToFloat: - case glslang::EOpConvUint16ToDouble: - case glslang::EOpConvUint16ToFloat16: - case glslang::EOpConvUintToFloat16: - case glslang::EOpConvUint64ToFloat16: -#endif - convOp = spv::OpConvertUToF; - break; - - case glslang::EOpConvDoubleToFloat: - case glslang::EOpConvFloatToDouble: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvDoubleToFloat16: - case glslang::EOpConvFloat16ToDouble: - case glslang::EOpConvFloatToFloat16: - case glslang::EOpConvFloat16ToFloat: -#endif - convOp = spv::OpFConvert; - if (builder.isMatrixType(destType)) - return createUnaryMatrixOperation(convOp, precision, noContraction, destType, operand, typeProxy); - break; - - case glslang::EOpConvFloatToInt: - case glslang::EOpConvDoubleToInt: - case glslang::EOpConvFloatToInt64: - case glslang::EOpConvDoubleToInt64: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvFloatToInt16: - case glslang::EOpConvDoubleToInt16: - case glslang::EOpConvFloat16ToInt16: - case glslang::EOpConvFloat16ToInt: - case glslang::EOpConvFloat16ToInt64: -#endif - convOp = spv::OpConvertFToS; - break; - - case glslang::EOpConvUintToInt: - case glslang::EOpConvIntToUint: - case glslang::EOpConvUint64ToInt64: - case glslang::EOpConvInt64ToUint64: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvUint16ToInt16: - case glslang::EOpConvInt16ToUint16: -#endif - if (builder.isInSpecConstCodeGenMode()) { - // Build zero scalar or vector for OpIAdd. - if (op == glslang::EOpConvUint64ToInt64 || op == glslang::EOpConvInt64ToUint64) - zero = builder.makeUint64Constant(0); -#ifdef AMD_EXTENSIONS - else if (op == glslang::EOpConvUint16ToInt16 || op == glslang::EOpConvInt16ToUint16) - zero = builder.makeUint16Constant(0); -#endif - else - zero = builder.makeUintConstant(0); - - zero = makeSmearedConstant(zero, vectorSize); - // Use OpIAdd, instead of OpBitcast to do the conversion when - // generating for OpSpecConstantOp instruction. - return builder.createBinOp(spv::OpIAdd, destType, operand, zero); - } - // For normal run-time conversion instruction, use OpBitcast. - convOp = spv::OpBitcast; - break; - - case glslang::EOpConvFloatToUint: - case glslang::EOpConvDoubleToUint: - case glslang::EOpConvFloatToUint64: - case glslang::EOpConvDoubleToUint64: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvFloatToUint16: - case glslang::EOpConvDoubleToUint16: - case glslang::EOpConvFloat16ToUint16: - case glslang::EOpConvFloat16ToUint: - case glslang::EOpConvFloat16ToUint64: -#endif - convOp = spv::OpConvertFToU; - break; - - case glslang::EOpConvIntToInt64: - case glslang::EOpConvInt64ToInt: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvIntToInt16: - case glslang::EOpConvInt16ToInt: - case glslang::EOpConvInt64ToInt16: - case glslang::EOpConvInt16ToInt64: -#endif - convOp = spv::OpSConvert; - break; - - case glslang::EOpConvUintToUint64: - case glslang::EOpConvUint64ToUint: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvUintToUint16: - case glslang::EOpConvUint16ToUint: - case glslang::EOpConvUint64ToUint16: - case glslang::EOpConvUint16ToUint64: -#endif - convOp = spv::OpUConvert; - break; - - case glslang::EOpConvIntToUint64: - case glslang::EOpConvInt64ToUint: - case glslang::EOpConvUint64ToInt: - case glslang::EOpConvUintToInt64: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvInt16ToUint: - case glslang::EOpConvUintToInt16: - case glslang::EOpConvInt16ToUint64: - case glslang::EOpConvUint64ToInt16: - case glslang::EOpConvUint16ToInt: - case glslang::EOpConvIntToUint16: - case glslang::EOpConvUint16ToInt64: - case glslang::EOpConvInt64ToUint16: -#endif - // OpSConvert/OpUConvert + OpBitCast - switch (op) { - case glslang::EOpConvIntToUint64: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvInt16ToUint64: -#endif - convOp = spv::OpSConvert; - type = builder.makeIntType(64); - break; - case glslang::EOpConvInt64ToUint: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvInt16ToUint: -#endif - convOp = spv::OpSConvert; - type = builder.makeIntType(32); - break; - case glslang::EOpConvUint64ToInt: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvUint16ToInt: -#endif - convOp = spv::OpUConvert; - type = builder.makeUintType(32); - break; - case glslang::EOpConvUintToInt64: -#ifdef AMD_EXTENSIONS - case glslang::EOpConvUint16ToInt64: -#endif - convOp = spv::OpUConvert; - type = builder.makeUintType(64); - break; -#ifdef AMD_EXTENSIONS - case glslang::EOpConvUintToInt16: - case glslang::EOpConvUint64ToInt16: - convOp = spv::OpUConvert; - type = builder.makeUintType(16); - break; - case glslang::EOpConvIntToUint16: - case glslang::EOpConvInt64ToUint16: - convOp = spv::OpSConvert; - type = builder.makeIntType(16); - break; -#endif - default: - assert(0); - break; - } - - if (vectorSize > 0) - type = builder.makeVectorType(type, vectorSize); - - operand = builder.createUnaryOp(convOp, type, operand); - - if (builder.isInSpecConstCodeGenMode()) { - // Build zero scalar or vector for OpIAdd. -#ifdef AMD_EXTENSIONS - if (op == glslang::EOpConvIntToUint64 || op == glslang::EOpConvUintToInt64 || - op == glslang::EOpConvInt16ToUint64 || op == glslang::EOpConvUint16ToInt64) - zero = builder.makeUint64Constant(0); - else if (op == glslang::EOpConvIntToUint16 || op == glslang::EOpConvUintToInt16 || - op == glslang::EOpConvInt64ToUint16 || op == glslang::EOpConvUint64ToInt16) - zero = builder.makeUint16Constant(0); - else - zero = builder.makeUintConstant(0); -#else - if (op == glslang::EOpConvIntToUint64 || op == glslang::EOpConvUintToInt64) - zero = builder.makeUint64Constant(0); - else - zero = builder.makeUintConstant(0); -#endif - - zero = makeSmearedConstant(zero, vectorSize); - // Use OpIAdd, instead of OpBitcast to do the conversion when - // generating for OpSpecConstantOp instruction. - return builder.createBinOp(spv::OpIAdd, destType, operand, zero); - } - // For normal run-time conversion instruction, use OpBitcast. - convOp = spv::OpBitcast; - break; - default: - break; - } - - spv::Id result = 0; - if (convOp == spv::OpNop) - return result; - - if (convOp == spv::OpSelect) { - zero = makeSmearedConstant(zero, vectorSize); - one = makeSmearedConstant(one, vectorSize); - result = builder.createTriOp(convOp, destType, operand, one, zero); - } else - result = builder.createUnaryOp(convOp, destType, operand); - - return builder.setPrecision(result, precision); -} - -spv::Id TGlslangToSpvTraverser::makeSmearedConstant(spv::Id constant, int vectorSize) -{ - if (vectorSize == 0) - return constant; - - spv::Id vectorTypeId = builder.makeVectorType(builder.getTypeId(constant), vectorSize); - std::vector components; - for (int c = 0; c < vectorSize; ++c) - components.push_back(constant); - return builder.makeCompositeConstant(vectorTypeId, components); -} - -// For glslang ops that map to SPV atomic opCodes -spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv::Decoration /*precision*/, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy) -{ - spv::Op opCode = spv::OpNop; - - switch (op) { - case glslang::EOpAtomicAdd: - case glslang::EOpImageAtomicAdd: - case glslang::EOpAtomicCounterAdd: - opCode = spv::OpAtomicIAdd; - break; - case glslang::EOpAtomicCounterSubtract: - opCode = spv::OpAtomicISub; - break; - case glslang::EOpAtomicMin: - case glslang::EOpImageAtomicMin: - case glslang::EOpAtomicCounterMin: - opCode = typeProxy == glslang::EbtUint ? spv::OpAtomicUMin : spv::OpAtomicSMin; - break; - case glslang::EOpAtomicMax: - case glslang::EOpImageAtomicMax: - case glslang::EOpAtomicCounterMax: - opCode = typeProxy == glslang::EbtUint ? spv::OpAtomicUMax : spv::OpAtomicSMax; - break; - case glslang::EOpAtomicAnd: - case glslang::EOpImageAtomicAnd: - case glslang::EOpAtomicCounterAnd: - opCode = spv::OpAtomicAnd; - break; - case glslang::EOpAtomicOr: - case glslang::EOpImageAtomicOr: - case glslang::EOpAtomicCounterOr: - opCode = spv::OpAtomicOr; - break; - case glslang::EOpAtomicXor: - case glslang::EOpImageAtomicXor: - case glslang::EOpAtomicCounterXor: - opCode = spv::OpAtomicXor; - break; - case glslang::EOpAtomicExchange: - case glslang::EOpImageAtomicExchange: - case glslang::EOpAtomicCounterExchange: - opCode = spv::OpAtomicExchange; - break; - case glslang::EOpAtomicCompSwap: - case glslang::EOpImageAtomicCompSwap: - case glslang::EOpAtomicCounterCompSwap: - opCode = spv::OpAtomicCompareExchange; - break; - case glslang::EOpAtomicCounterIncrement: - opCode = spv::OpAtomicIIncrement; - break; - case glslang::EOpAtomicCounterDecrement: - opCode = spv::OpAtomicIDecrement; - break; - case glslang::EOpAtomicCounter: - opCode = spv::OpAtomicLoad; - break; - default: - assert(0); - break; - } - - // Sort out the operands - // - mapping from glslang -> SPV - // - there are extra SPV operands with no glslang source - // - compare-exchange swaps the value and comparator - // - compare-exchange has an extra memory semantics - std::vector spvAtomicOperands; // hold the spv operands - auto opIt = operands.begin(); // walk the glslang operands - spvAtomicOperands.push_back(*(opIt++)); - spvAtomicOperands.push_back(builder.makeUintConstant(spv::ScopeDevice)); // TBD: what is the correct scope? - spvAtomicOperands.push_back(builder.makeUintConstant(spv::MemorySemanticsMaskNone)); // TBD: what are the correct memory semantics? - if (opCode == spv::OpAtomicCompareExchange) { - // There are 2 memory semantics for compare-exchange. And the operand order of "comparator" and "new value" in GLSL - // differs from that in SPIR-V. Hence, special processing is required. - spvAtomicOperands.push_back(builder.makeUintConstant(spv::MemorySemanticsMaskNone)); - spvAtomicOperands.push_back(*(opIt + 1)); - spvAtomicOperands.push_back(*opIt); - opIt += 2; - } - - // Add the rest of the operands, skipping any that were dealt with above. - for (; opIt != operands.end(); ++opIt) - spvAtomicOperands.push_back(*opIt); - - return builder.createOp(opCode, typeId, spvAtomicOperands); -} - -// Create group invocation operations. -spv::Id TGlslangToSpvTraverser::createInvocationsOperation(glslang::TOperator op, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy) -{ -#ifdef AMD_EXTENSIONS - bool isUnsigned = typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64; - bool isFloat = typeProxy == glslang::EbtFloat || typeProxy == glslang::EbtDouble || typeProxy == glslang::EbtFloat16; -#endif - - spv::Op opCode = spv::OpNop; - std::vector spvGroupOperands; - spv::GroupOperation groupOperation = spv::GroupOperationMax; - - if (op == glslang::EOpBallot || op == glslang::EOpReadFirstInvocation || - op == glslang::EOpReadInvocation) { - builder.addExtension(spv::E_SPV_KHR_shader_ballot); - builder.addCapability(spv::CapabilitySubgroupBallotKHR); - } else if (op == glslang::EOpAnyInvocation || - op == glslang::EOpAllInvocations || - op == glslang::EOpAllInvocationsEqual) { - builder.addExtension(spv::E_SPV_KHR_subgroup_vote); - builder.addCapability(spv::CapabilitySubgroupVoteKHR); - } else { - builder.addCapability(spv::CapabilityGroups); -#ifdef AMD_EXTENSIONS - if (op == glslang::EOpMinInvocationsNonUniform || - op == glslang::EOpMaxInvocationsNonUniform || - op == glslang::EOpAddInvocationsNonUniform || - op == glslang::EOpMinInvocationsInclusiveScanNonUniform || - op == glslang::EOpMaxInvocationsInclusiveScanNonUniform || - op == glslang::EOpAddInvocationsInclusiveScanNonUniform || - op == glslang::EOpMinInvocationsExclusiveScanNonUniform || - op == glslang::EOpMaxInvocationsExclusiveScanNonUniform || - op == glslang::EOpAddInvocationsExclusiveScanNonUniform) - builder.addExtension(spv::E_SPV_AMD_shader_ballot); -#endif - - spvGroupOperands.push_back(builder.makeUintConstant(spv::ScopeSubgroup)); -#ifdef AMD_EXTENSIONS - switch (op) { - case glslang::EOpMinInvocations: - case glslang::EOpMaxInvocations: - case glslang::EOpAddInvocations: - case glslang::EOpMinInvocationsNonUniform: - case glslang::EOpMaxInvocationsNonUniform: - case glslang::EOpAddInvocationsNonUniform: - groupOperation = spv::GroupOperationReduce; - spvGroupOperands.push_back(groupOperation); - break; - case glslang::EOpMinInvocationsInclusiveScan: - case glslang::EOpMaxInvocationsInclusiveScan: - case glslang::EOpAddInvocationsInclusiveScan: - case glslang::EOpMinInvocationsInclusiveScanNonUniform: - case glslang::EOpMaxInvocationsInclusiveScanNonUniform: - case glslang::EOpAddInvocationsInclusiveScanNonUniform: - groupOperation = spv::GroupOperationInclusiveScan; - spvGroupOperands.push_back(groupOperation); - break; - case glslang::EOpMinInvocationsExclusiveScan: - case glslang::EOpMaxInvocationsExclusiveScan: - case glslang::EOpAddInvocationsExclusiveScan: - case glslang::EOpMinInvocationsExclusiveScanNonUniform: - case glslang::EOpMaxInvocationsExclusiveScanNonUniform: - case glslang::EOpAddInvocationsExclusiveScanNonUniform: - groupOperation = spv::GroupOperationExclusiveScan; - spvGroupOperands.push_back(groupOperation); - break; - default: - break; - } -#endif - } - - for (auto opIt = operands.begin(); opIt != operands.end(); ++opIt) - spvGroupOperands.push_back(*opIt); - - switch (op) { - case glslang::EOpAnyInvocation: - opCode = spv::OpSubgroupAnyKHR; - break; - case glslang::EOpAllInvocations: - opCode = spv::OpSubgroupAllKHR; - break; - case glslang::EOpAllInvocationsEqual: - opCode = spv::OpSubgroupAllEqualKHR; - break; - case glslang::EOpReadInvocation: - opCode = spv::OpSubgroupReadInvocationKHR; - if (builder.isVectorType(typeId)) - return CreateInvocationsVectorOperation(opCode, groupOperation, typeId, operands); - break; - case glslang::EOpReadFirstInvocation: - opCode = spv::OpSubgroupFirstInvocationKHR; - break; - case glslang::EOpBallot: - { - // NOTE: According to the spec, the result type of "OpSubgroupBallotKHR" must be a 4 component vector of 32 - // bit integer types. The GLSL built-in function "ballotARB()" assumes the maximum number of invocations in - // a subgroup is 64. Thus, we have to convert uvec4.xy to uint64_t as follow: - // - // result = Bitcast(SubgroupBallotKHR(Predicate).xy) - // - spv::Id uintType = builder.makeUintType(32); - spv::Id uvec4Type = builder.makeVectorType(uintType, 4); - spv::Id result = builder.createOp(spv::OpSubgroupBallotKHR, uvec4Type, spvGroupOperands); - - std::vector components; - components.push_back(builder.createCompositeExtract(result, uintType, 0)); - components.push_back(builder.createCompositeExtract(result, uintType, 1)); - - spv::Id uvec2Type = builder.makeVectorType(uintType, 2); - return builder.createUnaryOp(spv::OpBitcast, typeId, - builder.createCompositeConstruct(uvec2Type, components)); - } - -#ifdef AMD_EXTENSIONS - case glslang::EOpMinInvocations: - case glslang::EOpMaxInvocations: - case glslang::EOpAddInvocations: - case glslang::EOpMinInvocationsInclusiveScan: - case glslang::EOpMaxInvocationsInclusiveScan: - case glslang::EOpAddInvocationsInclusiveScan: - case glslang::EOpMinInvocationsExclusiveScan: - case glslang::EOpMaxInvocationsExclusiveScan: - case glslang::EOpAddInvocationsExclusiveScan: - if (op == glslang::EOpMinInvocations || - op == glslang::EOpMinInvocationsInclusiveScan || - op == glslang::EOpMinInvocationsExclusiveScan) { - if (isFloat) - opCode = spv::OpGroupFMin; - else { - if (isUnsigned) - opCode = spv::OpGroupUMin; - else - opCode = spv::OpGroupSMin; - } - } else if (op == glslang::EOpMaxInvocations || - op == glslang::EOpMaxInvocationsInclusiveScan || - op == glslang::EOpMaxInvocationsExclusiveScan) { - if (isFloat) - opCode = spv::OpGroupFMax; - else { - if (isUnsigned) - opCode = spv::OpGroupUMax; - else - opCode = spv::OpGroupSMax; - } - } else { - if (isFloat) - opCode = spv::OpGroupFAdd; - else - opCode = spv::OpGroupIAdd; - } - - if (builder.isVectorType(typeId)) - return CreateInvocationsVectorOperation(opCode, groupOperation, typeId, operands); - - break; - case glslang::EOpMinInvocationsNonUniform: - case glslang::EOpMaxInvocationsNonUniform: - case glslang::EOpAddInvocationsNonUniform: - case glslang::EOpMinInvocationsInclusiveScanNonUniform: - case glslang::EOpMaxInvocationsInclusiveScanNonUniform: - case glslang::EOpAddInvocationsInclusiveScanNonUniform: - case glslang::EOpMinInvocationsExclusiveScanNonUniform: - case glslang::EOpMaxInvocationsExclusiveScanNonUniform: - case glslang::EOpAddInvocationsExclusiveScanNonUniform: - if (op == glslang::EOpMinInvocationsNonUniform || - op == glslang::EOpMinInvocationsInclusiveScanNonUniform || - op == glslang::EOpMinInvocationsExclusiveScanNonUniform) { - if (isFloat) - opCode = spv::OpGroupFMinNonUniformAMD; - else { - if (isUnsigned) - opCode = spv::OpGroupUMinNonUniformAMD; - else - opCode = spv::OpGroupSMinNonUniformAMD; - } - } - else if (op == glslang::EOpMaxInvocationsNonUniform || - op == glslang::EOpMaxInvocationsInclusiveScanNonUniform || - op == glslang::EOpMaxInvocationsExclusiveScanNonUniform) { - if (isFloat) - opCode = spv::OpGroupFMaxNonUniformAMD; - else { - if (isUnsigned) - opCode = spv::OpGroupUMaxNonUniformAMD; - else - opCode = spv::OpGroupSMaxNonUniformAMD; - } - } - else { - if (isFloat) - opCode = spv::OpGroupFAddNonUniformAMD; - else - opCode = spv::OpGroupIAddNonUniformAMD; - } - - if (builder.isVectorType(typeId)) - return CreateInvocationsVectorOperation(opCode, groupOperation, typeId, operands); - - break; -#endif - default: - logger->missingFunctionality("invocation operation"); - return spv::NoResult; - } - - assert(opCode != spv::OpNop); - return builder.createOp(opCode, typeId, spvGroupOperands); -} - -// Create group invocation operations on a vector -spv::Id TGlslangToSpvTraverser::CreateInvocationsVectorOperation(spv::Op op, spv::GroupOperation groupOperation, spv::Id typeId, std::vector& operands) -{ -#ifdef AMD_EXTENSIONS - assert(op == spv::OpGroupFMin || op == spv::OpGroupUMin || op == spv::OpGroupSMin || - op == spv::OpGroupFMax || op == spv::OpGroupUMax || op == spv::OpGroupSMax || - op == spv::OpGroupFAdd || op == spv::OpGroupIAdd || op == spv::OpGroupBroadcast || - op == spv::OpSubgroupReadInvocationKHR || - op == spv::OpGroupFMinNonUniformAMD || op == spv::OpGroupUMinNonUniformAMD || op == spv::OpGroupSMinNonUniformAMD || - op == spv::OpGroupFMaxNonUniformAMD || op == spv::OpGroupUMaxNonUniformAMD || op == spv::OpGroupSMaxNonUniformAMD || - op == spv::OpGroupFAddNonUniformAMD || op == spv::OpGroupIAddNonUniformAMD); -#else - assert(op == spv::OpGroupFMin || op == spv::OpGroupUMin || op == spv::OpGroupSMin || - op == spv::OpGroupFMax || op == spv::OpGroupUMax || op == spv::OpGroupSMax || - op == spv::OpGroupFAdd || op == spv::OpGroupIAdd || op == spv::OpGroupBroadcast || - op == spv::OpSubgroupReadInvocationKHR); -#endif - - // Handle group invocation operations scalar by scalar. - // The result type is the same type as the original type. - // The algorithm is to: - // - break the vector into scalars - // - apply the operation to each scalar - // - make a vector out the scalar results - - // get the types sorted out - int numComponents = builder.getNumComponents(operands[0]); - spv::Id scalarType = builder.getScalarTypeId(builder.getTypeId(operands[0])); - std::vector results; - - // do each scalar op - for (int comp = 0; comp < numComponents; ++comp) { - std::vector indexes; - indexes.push_back(comp); - spv::Id scalar = builder.createCompositeExtract(operands[0], scalarType, indexes); - std::vector spvGroupOperands; - if (op == spv::OpSubgroupReadInvocationKHR) { - spvGroupOperands.push_back(scalar); - spvGroupOperands.push_back(operands[1]); - } else if (op == spv::OpGroupBroadcast) { - spvGroupOperands.push_back(builder.makeUintConstant(spv::ScopeSubgroup)); - spvGroupOperands.push_back(scalar); - spvGroupOperands.push_back(operands[1]); - } else { - spvGroupOperands.push_back(builder.makeUintConstant(spv::ScopeSubgroup)); - spvGroupOperands.push_back(groupOperation); - spvGroupOperands.push_back(scalar); - } - - results.push_back(builder.createOp(op, scalarType, spvGroupOperands)); - } - - // put the pieces together - return builder.createCompositeConstruct(typeId, results); -} - -spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, std::vector& operands, glslang::TBasicType typeProxy) -{ -#ifdef AMD_EXTENSIONS - bool isUnsigned = typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64 || typeProxy == glslang::EbtUint16; - bool isFloat = typeProxy == glslang::EbtFloat || typeProxy == glslang::EbtDouble || typeProxy == glslang::EbtFloat16; -#else - bool isUnsigned = typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64; - bool isFloat = typeProxy == glslang::EbtFloat || typeProxy == glslang::EbtDouble; -#endif - - spv::Op opCode = spv::OpNop; - int extBuiltins = -1; - int libCall = -1; - size_t consumedOperands = operands.size(); - spv::Id typeId0 = 0; - if (consumedOperands > 0) - typeId0 = builder.getTypeId(operands[0]); - spv::Id typeId1 = 0; - if (consumedOperands > 1) - typeId1 = builder.getTypeId(operands[1]); - spv::Id frexpIntType = 0; - - switch (op) { - case glslang::EOpMin: - if (isFloat) - libCall = spv::GLSLstd450FMin; - else if (isUnsigned) - libCall = spv::GLSLstd450UMin; - else - libCall = spv::GLSLstd450SMin; - builder.promoteScalar(precision, operands.front(), operands.back()); - break; - case glslang::EOpModf: - libCall = spv::GLSLstd450Modf; - break; - case glslang::EOpMax: - if (isFloat) - libCall = spv::GLSLstd450FMax; - else if (isUnsigned) - libCall = spv::GLSLstd450UMax; - else - libCall = spv::GLSLstd450SMax; - builder.promoteScalar(precision, operands.front(), operands.back()); - break; - case glslang::EOpPow: - libCall = spv::GLSLstd450Pow; - break; - case glslang::EOpDot: - opCode = spv::OpDot; - break; - case glslang::EOpAtan: - libCall = spv::GLSLstd450Atan2; - break; - - case glslang::EOpClamp: - if (isFloat) - libCall = spv::GLSLstd450FClamp; - else if (isUnsigned) - libCall = spv::GLSLstd450UClamp; - else - libCall = spv::GLSLstd450SClamp; - builder.promoteScalar(precision, operands.front(), operands[1]); - builder.promoteScalar(precision, operands.front(), operands[2]); - break; - case glslang::EOpMix: - if (! builder.isBoolType(builder.getScalarTypeId(builder.getTypeId(operands.back())))) { - assert(isFloat); - libCall = spv::GLSLstd450FMix; - } else { - opCode = spv::OpSelect; - std::swap(operands.front(), operands.back()); - } - builder.promoteScalar(precision, operands.front(), operands.back()); - break; - case glslang::EOpStep: - libCall = spv::GLSLstd450Step; - builder.promoteScalar(precision, operands.front(), operands.back()); - break; - case glslang::EOpSmoothStep: - libCall = spv::GLSLstd450SmoothStep; - builder.promoteScalar(precision, operands[0], operands[2]); - builder.promoteScalar(precision, operands[1], operands[2]); - break; - - case glslang::EOpDistance: - libCall = spv::GLSLstd450Distance; - break; - case glslang::EOpCross: - libCall = spv::GLSLstd450Cross; - break; - case glslang::EOpFaceForward: - libCall = spv::GLSLstd450FaceForward; - break; - case glslang::EOpReflect: - libCall = spv::GLSLstd450Reflect; - break; - case glslang::EOpRefract: - libCall = spv::GLSLstd450Refract; - break; - case glslang::EOpInterpolateAtSample: - builder.addCapability(spv::CapabilityInterpolationFunction); - libCall = spv::GLSLstd450InterpolateAtSample; - break; - case glslang::EOpInterpolateAtOffset: - builder.addCapability(spv::CapabilityInterpolationFunction); - libCall = spv::GLSLstd450InterpolateAtOffset; - break; - case glslang::EOpAddCarry: - opCode = spv::OpIAddCarry; - typeId = builder.makeStructResultType(typeId0, typeId0); - consumedOperands = 2; - break; - case glslang::EOpSubBorrow: - opCode = spv::OpISubBorrow; - typeId = builder.makeStructResultType(typeId0, typeId0); - consumedOperands = 2; - break; - case glslang::EOpUMulExtended: - opCode = spv::OpUMulExtended; - typeId = builder.makeStructResultType(typeId0, typeId0); - consumedOperands = 2; - break; - case glslang::EOpIMulExtended: - opCode = spv::OpSMulExtended; - typeId = builder.makeStructResultType(typeId0, typeId0); - consumedOperands = 2; - break; - case glslang::EOpBitfieldExtract: - if (isUnsigned) - opCode = spv::OpBitFieldUExtract; - else - opCode = spv::OpBitFieldSExtract; - break; - case glslang::EOpBitfieldInsert: - opCode = spv::OpBitFieldInsert; - break; - - case glslang::EOpFma: - libCall = spv::GLSLstd450Fma; - break; - case glslang::EOpFrexp: - { - libCall = spv::GLSLstd450FrexpStruct; - assert(builder.isPointerType(typeId1)); - typeId1 = builder.getContainedTypeId(typeId1); -#ifdef AMD_EXTENSIONS - int width = builder.getScalarTypeWidth(typeId1); -#else - int width = 32; -#endif - if (builder.getNumComponents(operands[0]) == 1) - frexpIntType = builder.makeIntegerType(width, true); - else - frexpIntType = builder.makeVectorType(builder.makeIntegerType(width, true), builder.getNumComponents(operands[0])); - typeId = builder.makeStructResultType(typeId0, frexpIntType); - consumedOperands = 1; - } - break; - case glslang::EOpLdexp: - libCall = spv::GLSLstd450Ldexp; - break; - - case glslang::EOpReadInvocation: - return createInvocationsOperation(op, typeId, operands, typeProxy); - -#ifdef AMD_EXTENSIONS - case glslang::EOpSwizzleInvocations: - extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_ballot); - libCall = spv::SwizzleInvocationsAMD; - break; - case glslang::EOpSwizzleInvocationsMasked: - extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_ballot); - libCall = spv::SwizzleInvocationsMaskedAMD; - break; - case glslang::EOpWriteInvocation: - extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_ballot); - libCall = spv::WriteInvocationAMD; - break; - - case glslang::EOpMin3: - extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_trinary_minmax); - if (isFloat) - libCall = spv::FMin3AMD; - else { - if (isUnsigned) - libCall = spv::UMin3AMD; - else - libCall = spv::SMin3AMD; - } - break; - case glslang::EOpMax3: - extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_trinary_minmax); - if (isFloat) - libCall = spv::FMax3AMD; - else { - if (isUnsigned) - libCall = spv::UMax3AMD; - else - libCall = spv::SMax3AMD; - } - break; - case glslang::EOpMid3: - extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_trinary_minmax); - if (isFloat) - libCall = spv::FMid3AMD; - else { - if (isUnsigned) - libCall = spv::UMid3AMD; - else - libCall = spv::SMid3AMD; - } - break; - - case glslang::EOpInterpolateAtVertex: - extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_explicit_vertex_parameter); - libCall = spv::InterpolateAtVertexAMD; - break; -#endif - - default: - return 0; - } - - spv::Id id = 0; - if (libCall >= 0) { - // Use an extended instruction from the standard library. - // Construct the call arguments, without modifying the original operands vector. - // We might need the remaining arguments, e.g. in the EOpFrexp case. - std::vector callArguments(operands.begin(), operands.begin() + consumedOperands); - id = builder.createBuiltinCall(typeId, extBuiltins >= 0 ? extBuiltins : stdBuiltins, libCall, callArguments); - } else { - switch (consumedOperands) { - case 0: - // should all be handled by visitAggregate and createNoArgOperation - assert(0); - return 0; - case 1: - // should all be handled by createUnaryOperation - assert(0); - return 0; - case 2: - id = builder.createBinOp(opCode, typeId, operands[0], operands[1]); - break; - default: - // anything 3 or over doesn't have l-value operands, so all should be consumed - assert(consumedOperands == operands.size()); - id = builder.createOp(opCode, typeId, operands); - break; - } - } - - // Decode the return types that were structures - switch (op) { - case glslang::EOpAddCarry: - case glslang::EOpSubBorrow: - builder.createStore(builder.createCompositeExtract(id, typeId0, 1), operands[2]); - id = builder.createCompositeExtract(id, typeId0, 0); - break; - case glslang::EOpUMulExtended: - case glslang::EOpIMulExtended: - builder.createStore(builder.createCompositeExtract(id, typeId0, 0), operands[3]); - builder.createStore(builder.createCompositeExtract(id, typeId0, 1), operands[2]); - break; - case glslang::EOpFrexp: - { - assert(operands.size() == 2); - if (builder.isFloatType(builder.getScalarTypeId(typeId1))) { - // "exp" is floating-point type (from HLSL intrinsic) - spv::Id member1 = builder.createCompositeExtract(id, frexpIntType, 1); - member1 = builder.createUnaryOp(spv::OpConvertSToF, typeId1, member1); - builder.createStore(member1, operands[1]); - } else - // "exp" is integer type (from GLSL built-in function) - builder.createStore(builder.createCompositeExtract(id, frexpIntType, 1), operands[1]); - id = builder.createCompositeExtract(id, typeId0, 0); - } - break; - default: - break; - } - - return builder.setPrecision(id, precision); -} - -// Intrinsics with no arguments (or no return value, and no precision). -spv::Id TGlslangToSpvTraverser::createNoArgOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId) -{ - // TODO: get the barrier operands correct - - switch (op) { - case glslang::EOpEmitVertex: - builder.createNoResultOp(spv::OpEmitVertex); - return 0; - case glslang::EOpEndPrimitive: - builder.createNoResultOp(spv::OpEndPrimitive); - return 0; - case glslang::EOpBarrier: - builder.createControlBarrier(spv::ScopeWorkgroup, spv::ScopeDevice, spv::MemorySemanticsMaskNone); - return 0; - case glslang::EOpMemoryBarrier: - builder.createMemoryBarrier(spv::ScopeDevice, spv::MemorySemanticsAllMemory); - return 0; - case glslang::EOpMemoryBarrierAtomicCounter: - builder.createMemoryBarrier(spv::ScopeDevice, spv::MemorySemanticsAtomicCounterMemoryMask); - return 0; - case glslang::EOpMemoryBarrierBuffer: - builder.createMemoryBarrier(spv::ScopeDevice, spv::MemorySemanticsUniformMemoryMask); - return 0; - case glslang::EOpMemoryBarrierImage: - builder.createMemoryBarrier(spv::ScopeDevice, spv::MemorySemanticsImageMemoryMask); - return 0; - case glslang::EOpMemoryBarrierShared: - builder.createMemoryBarrier(spv::ScopeDevice, spv::MemorySemanticsWorkgroupMemoryMask); - return 0; - case glslang::EOpGroupMemoryBarrier: - builder.createMemoryBarrier(spv::ScopeDevice, spv::MemorySemanticsCrossWorkgroupMemoryMask); - return 0; - case glslang::EOpAllMemoryBarrierWithGroupSync: - // Control barrier with non-"None" semantic is also a memory barrier. - builder.createControlBarrier(spv::ScopeDevice, spv::ScopeDevice, spv::MemorySemanticsAllMemory); - return 0; - case glslang::EOpGroupMemoryBarrierWithGroupSync: - // Control barrier with non-"None" semantic is also a memory barrier. - builder.createControlBarrier(spv::ScopeDevice, spv::ScopeDevice, spv::MemorySemanticsCrossWorkgroupMemoryMask); - return 0; - case glslang::EOpWorkgroupMemoryBarrier: - builder.createMemoryBarrier(spv::ScopeWorkgroup, spv::MemorySemanticsWorkgroupMemoryMask); - return 0; - case glslang::EOpWorkgroupMemoryBarrierWithGroupSync: - // Control barrier with non-"None" semantic is also a memory barrier. - builder.createControlBarrier(spv::ScopeWorkgroup, spv::ScopeWorkgroup, spv::MemorySemanticsWorkgroupMemoryMask); - return 0; -#ifdef AMD_EXTENSIONS - case glslang::EOpTime: - { - std::vector args; // Dummy arguments - spv::Id id = builder.createBuiltinCall(typeId, getExtBuiltins(spv::E_SPV_AMD_gcn_shader), spv::TimeAMD, args); - return builder.setPrecision(id, precision); - } -#endif - default: - logger->missingFunctionality("unknown operation with no arguments"); - return 0; - } -} - -spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol) -{ - auto iter = symbolValues.find(symbol->getId()); - spv::Id id; - if (symbolValues.end() != iter) { - id = iter->second; - return id; - } - - // it was not found, create it - id = createSpvVariable(symbol); - symbolValues[symbol->getId()] = id; - - if (symbol->getBasicType() != glslang::EbtBlock) { - addDecoration(id, TranslatePrecisionDecoration(symbol->getType())); - addDecoration(id, TranslateInterpolationDecoration(symbol->getType().getQualifier())); - addDecoration(id, TranslateAuxiliaryStorageDecoration(symbol->getType().getQualifier())); - if (symbol->getType().getQualifier().hasSpecConstantId()) - addDecoration(id, spv::DecorationSpecId, symbol->getType().getQualifier().layoutSpecConstantId); - if (symbol->getQualifier().hasIndex()) - builder.addDecoration(id, spv::DecorationIndex, symbol->getQualifier().layoutIndex); - if (symbol->getQualifier().hasComponent()) - builder.addDecoration(id, spv::DecorationComponent, symbol->getQualifier().layoutComponent); - if (glslangIntermediate->getXfbMode()) { - builder.addCapability(spv::CapabilityTransformFeedback); - if (symbol->getQualifier().hasXfbStride()) - builder.addDecoration(id, spv::DecorationXfbStride, symbol->getQualifier().layoutXfbStride); - if (symbol->getQualifier().hasXfbBuffer()) - builder.addDecoration(id, spv::DecorationXfbBuffer, symbol->getQualifier().layoutXfbBuffer); - if (symbol->getQualifier().hasXfbOffset()) - builder.addDecoration(id, spv::DecorationOffset, symbol->getQualifier().layoutXfbOffset); - } - // atomic counters use this: - if (symbol->getQualifier().hasOffset()) - builder.addDecoration(id, spv::DecorationOffset, symbol->getQualifier().layoutOffset); - } - - if (symbol->getQualifier().hasLocation()) - builder.addDecoration(id, spv::DecorationLocation, symbol->getQualifier().layoutLocation); - addDecoration(id, TranslateInvariantDecoration(symbol->getType().getQualifier())); - if (symbol->getQualifier().hasStream() && glslangIntermediate->isMultiStream()) { - builder.addCapability(spv::CapabilityGeometryStreams); - builder.addDecoration(id, spv::DecorationStream, symbol->getQualifier().layoutStream); - } - if (symbol->getQualifier().hasSet()) - builder.addDecoration(id, spv::DecorationDescriptorSet, symbol->getQualifier().layoutSet); - else if (IsDescriptorResource(symbol->getType())) { - // default to 0 - builder.addDecoration(id, spv::DecorationDescriptorSet, 0); - } - if (symbol->getQualifier().hasBinding()) - builder.addDecoration(id, spv::DecorationBinding, symbol->getQualifier().layoutBinding); - if (symbol->getQualifier().hasAttachment()) - builder.addDecoration(id, spv::DecorationInputAttachmentIndex, symbol->getQualifier().layoutAttachment); - if (glslangIntermediate->getXfbMode()) { - builder.addCapability(spv::CapabilityTransformFeedback); - if (symbol->getQualifier().hasXfbStride()) - builder.addDecoration(id, spv::DecorationXfbStride, symbol->getQualifier().layoutXfbStride); - if (symbol->getQualifier().hasXfbBuffer()) - builder.addDecoration(id, spv::DecorationXfbBuffer, symbol->getQualifier().layoutXfbBuffer); - } - - if (symbol->getType().isImage()) { - std::vector memory; - TranslateMemoryDecoration(symbol->getType().getQualifier(), memory); - for (unsigned int i = 0; i < memory.size(); ++i) - addDecoration(id, memory[i]); - } - - // built-in variable decorations - spv::BuiltIn builtIn = TranslateBuiltInDecoration(symbol->getQualifier().builtIn, false); - if (builtIn != spv::BuiltInMax) - addDecoration(id, spv::DecorationBuiltIn, (int)builtIn); - -#ifdef NV_EXTENSIONS - if (builtIn == spv::BuiltInSampleMask) { - spv::Decoration decoration; - // GL_NV_sample_mask_override_coverage extension - if (glslangIntermediate->getLayoutOverrideCoverage()) - decoration = (spv::Decoration)spv::DecorationOverrideCoverageNV; - else - decoration = (spv::Decoration)spv::DecorationMax; - addDecoration(id, decoration); - if (decoration != spv::DecorationMax) { - builder.addExtension(spv::E_SPV_NV_sample_mask_override_coverage); - } - } - else if (builtIn == spv::BuiltInLayer) { - // SPV_NV_viewport_array2 extension - if (symbol->getQualifier().layoutViewportRelative) { - addDecoration(id, (spv::Decoration)spv::DecorationViewportRelativeNV); - builder.addCapability(spv::CapabilityShaderViewportMaskNV); - builder.addExtension(spv::E_SPV_NV_viewport_array2); - } - if (symbol->getQualifier().layoutSecondaryViewportRelativeOffset != -2048) { - addDecoration(id, (spv::Decoration)spv::DecorationSecondaryViewportRelativeNV, symbol->getQualifier().layoutSecondaryViewportRelativeOffset); - builder.addCapability(spv::CapabilityShaderStereoViewNV); - builder.addExtension(spv::E_SPV_NV_stereo_view_rendering); - } - } - - if (symbol->getQualifier().layoutPassthrough) { - addDecoration(id, spv::DecorationPassthroughNV); - builder.addCapability(spv::CapabilityGeometryShaderPassthroughNV); - builder.addExtension(spv::E_SPV_NV_geometry_shader_passthrough); - } -#endif - - return id; -} - -// If 'dec' is valid, add no-operand decoration to an object -void TGlslangToSpvTraverser::addDecoration(spv::Id id, spv::Decoration dec) -{ - if (dec != spv::DecorationMax) - builder.addDecoration(id, dec); -} - -// If 'dec' is valid, add a one-operand decoration to an object -void TGlslangToSpvTraverser::addDecoration(spv::Id id, spv::Decoration dec, unsigned value) -{ - if (dec != spv::DecorationMax) - builder.addDecoration(id, dec, value); -} - -// If 'dec' is valid, add a no-operand decoration to a struct member -void TGlslangToSpvTraverser::addMemberDecoration(spv::Id id, int member, spv::Decoration dec) -{ - if (dec != spv::DecorationMax) - builder.addMemberDecoration(id, (unsigned)member, dec); -} - -// If 'dec' is valid, add a one-operand decoration to a struct member -void TGlslangToSpvTraverser::addMemberDecoration(spv::Id id, int member, spv::Decoration dec, unsigned value) -{ - if (dec != spv::DecorationMax) - builder.addMemberDecoration(id, (unsigned)member, dec, value); -} - -// Make a full tree of instructions to build a SPIR-V specialization constant, -// or regular constant if possible. -// -// TBD: this is not yet done, nor verified to be the best design, it does do the leaf symbols though -// -// Recursively walk the nodes. The nodes form a tree whose leaves are -// regular constants, which themselves are trees that createSpvConstant() -// recursively walks. So, this function walks the "top" of the tree: -// - emit specialization constant-building instructions for specConstant -// - when running into a non-spec-constant, switch to createSpvConstant() -spv::Id TGlslangToSpvTraverser::createSpvConstant(const glslang::TIntermTyped& node) -{ - assert(node.getQualifier().isConstant()); - - // Handle front-end constants first (non-specialization constants). - if (! node.getQualifier().specConstant) { - // hand off to the non-spec-constant path - assert(node.getAsConstantUnion() != nullptr || node.getAsSymbolNode() != nullptr); - int nextConst = 0; - return createSpvConstantFromConstUnionArray(node.getType(), node.getAsConstantUnion() ? node.getAsConstantUnion()->getConstArray() : node.getAsSymbolNode()->getConstArray(), - nextConst, false); - } - - // We now know we have a specialization constant to build - - // gl_WorkGroupSize is a special case until the front-end handles hierarchical specialization constants, - // even then, it's specialization ids are handled by special case syntax in GLSL: layout(local_size_x = ... - if (node.getType().getQualifier().builtIn == glslang::EbvWorkGroupSize) { - std::vector dimConstId; - for (int dim = 0; dim < 3; ++dim) { - bool specConst = (glslangIntermediate->getLocalSizeSpecId(dim) != glslang::TQualifier::layoutNotSet); - dimConstId.push_back(builder.makeUintConstant(glslangIntermediate->getLocalSize(dim), specConst)); - if (specConst) - addDecoration(dimConstId.back(), spv::DecorationSpecId, glslangIntermediate->getLocalSizeSpecId(dim)); - } - return builder.makeCompositeConstant(builder.makeVectorType(builder.makeUintType(32), 3), dimConstId, true); - } - - // An AST node labelled as specialization constant should be a symbol node. - // Its initializer should either be a sub tree with constant nodes, or a constant union array. - if (auto* sn = node.getAsSymbolNode()) { - if (auto* sub_tree = sn->getConstSubtree()) { - // Traverse the constant constructor sub tree like generating normal run-time instructions. - // During the AST traversal, if the node is marked as 'specConstant', SpecConstantOpModeGuard - // will set the builder into spec constant op instruction generating mode. - sub_tree->traverse(this); - return accessChainLoad(sub_tree->getType()); - } else if (auto* const_union_array = &sn->getConstArray()){ - int nextConst = 0; - spv::Id id = createSpvConstantFromConstUnionArray(sn->getType(), *const_union_array, nextConst, true); - builder.addName(id, sn->getName().c_str()); - return id; - } - } - - // Neither a front-end constant node, nor a specialization constant node with constant union array or - // constant sub tree as initializer. - logger->missingFunctionality("Neither a front-end constant nor a spec constant."); - exit(1); - return spv::NoResult; -} - -// Use 'consts' as the flattened glslang source of scalar constants to recursively -// build the aggregate SPIR-V constant. -// -// If there are not enough elements present in 'consts', 0 will be substituted; -// an empty 'consts' can be used to create a fully zeroed SPIR-V constant. -// -spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glslang::TType& glslangType, const glslang::TConstUnionArray& consts, int& nextConst, bool specConstant) -{ - // vector of constants for SPIR-V - std::vector spvConsts; - - // Type is used for struct and array constants - spv::Id typeId = convertGlslangToSpvType(glslangType); - - if (glslangType.isArray()) { - glslang::TType elementType(glslangType, 0); - for (int i = 0; i < glslangType.getOuterArraySize(); ++i) - spvConsts.push_back(createSpvConstantFromConstUnionArray(elementType, consts, nextConst, false)); - } else if (glslangType.isMatrix()) { - glslang::TType vectorType(glslangType, 0); - for (int col = 0; col < glslangType.getMatrixCols(); ++col) - spvConsts.push_back(createSpvConstantFromConstUnionArray(vectorType, consts, nextConst, false)); - } else if (glslangType.getStruct()) { - glslang::TVector::const_iterator iter; - for (iter = glslangType.getStruct()->begin(); iter != glslangType.getStruct()->end(); ++iter) - spvConsts.push_back(createSpvConstantFromConstUnionArray(*iter->type, consts, nextConst, false)); - } else if (glslangType.getVectorSize() > 1) { - for (unsigned int i = 0; i < (unsigned int)glslangType.getVectorSize(); ++i) { - bool zero = nextConst >= consts.size(); - switch (glslangType.getBasicType()) { - case glslang::EbtInt: - spvConsts.push_back(builder.makeIntConstant(zero ? 0 : consts[nextConst].getIConst())); - break; - case glslang::EbtUint: - spvConsts.push_back(builder.makeUintConstant(zero ? 0 : consts[nextConst].getUConst())); - break; - case glslang::EbtInt64: - spvConsts.push_back(builder.makeInt64Constant(zero ? 0 : consts[nextConst].getI64Const())); - break; - case glslang::EbtUint64: - spvConsts.push_back(builder.makeUint64Constant(zero ? 0 : consts[nextConst].getU64Const())); - break; -#ifdef AMD_EXTENSIONS - case glslang::EbtInt16: - spvConsts.push_back(builder.makeInt16Constant(zero ? 0 : (short)consts[nextConst].getIConst())); - break; - case glslang::EbtUint16: - spvConsts.push_back(builder.makeUint16Constant(zero ? 0 : (unsigned short)consts[nextConst].getUConst())); - break; -#endif - case glslang::EbtFloat: - spvConsts.push_back(builder.makeFloatConstant(zero ? 0.0F : (float)consts[nextConst].getDConst())); - break; - case glslang::EbtDouble: - spvConsts.push_back(builder.makeDoubleConstant(zero ? 0.0 : consts[nextConst].getDConst())); - break; -#ifdef AMD_EXTENSIONS - case glslang::EbtFloat16: - spvConsts.push_back(builder.makeFloat16Constant(zero ? 0.0F : (float)consts[nextConst].getDConst())); - break; -#endif - case glslang::EbtBool: - spvConsts.push_back(builder.makeBoolConstant(zero ? false : consts[nextConst].getBConst())); - break; - default: - assert(0); - break; - } - ++nextConst; - } - } else { - // we have a non-aggregate (scalar) constant - bool zero = nextConst >= consts.size(); - spv::Id scalar = 0; - switch (glslangType.getBasicType()) { - case glslang::EbtInt: - scalar = builder.makeIntConstant(zero ? 0 : consts[nextConst].getIConst(), specConstant); - break; - case glslang::EbtUint: - scalar = builder.makeUintConstant(zero ? 0 : consts[nextConst].getUConst(), specConstant); - break; - case glslang::EbtInt64: - scalar = builder.makeInt64Constant(zero ? 0 : consts[nextConst].getI64Const(), specConstant); - break; - case glslang::EbtUint64: - scalar = builder.makeUint64Constant(zero ? 0 : consts[nextConst].getU64Const(), specConstant); - break; -#ifdef AMD_EXTENSIONS - case glslang::EbtInt16: - scalar = builder.makeInt16Constant(zero ? 0 : (short)consts[nextConst].getIConst(), specConstant); - break; - case glslang::EbtUint16: - scalar = builder.makeUint16Constant(zero ? 0 : (unsigned short)consts[nextConst].getUConst(), specConstant); - break; -#endif - case glslang::EbtFloat: - scalar = builder.makeFloatConstant(zero ? 0.0F : (float)consts[nextConst].getDConst(), specConstant); - break; - case glslang::EbtDouble: - scalar = builder.makeDoubleConstant(zero ? 0.0 : consts[nextConst].getDConst(), specConstant); - break; -#ifdef AMD_EXTENSIONS - case glslang::EbtFloat16: - scalar = builder.makeFloat16Constant(zero ? 0.0F : (float)consts[nextConst].getDConst(), specConstant); - break; -#endif - case glslang::EbtBool: - scalar = builder.makeBoolConstant(zero ? false : consts[nextConst].getBConst(), specConstant); - break; - default: - assert(0); - break; - } - ++nextConst; - return scalar; - } - - return builder.makeCompositeConstant(typeId, spvConsts); -} - -// Return true if the node is a constant or symbol whose reading has no -// non-trivial observable cost or effect. -bool TGlslangToSpvTraverser::isTrivialLeaf(const glslang::TIntermTyped* node) -{ - // don't know what this is - if (node == nullptr) - return false; - - // a constant is safe - if (node->getAsConstantUnion() != nullptr) - return true; - - // not a symbol means non-trivial - if (node->getAsSymbolNode() == nullptr) - return false; - - // a symbol, depends on what's being read - switch (node->getType().getQualifier().storage) { - case glslang::EvqTemporary: - case glslang::EvqGlobal: - case glslang::EvqIn: - case glslang::EvqInOut: - case glslang::EvqConst: - case glslang::EvqConstReadOnly: - case glslang::EvqUniform: - return true; - default: - return false; - } -} - -// A node is trivial if it is a single operation with no side effects. -// HLSL (and/or vectors) are always trivial, as it does not short circuit. -// Otherwise, error on the side of saying non-trivial. -// Return true if trivial. -bool TGlslangToSpvTraverser::isTrivial(const glslang::TIntermTyped* node) -{ - if (node == nullptr) - return false; - - // count non scalars as trivial, as well as anything coming from HLSL - if (! node->getType().isScalarOrVec1() || glslangIntermediate->getSource() == glslang::EShSourceHlsl) - return true; - - // symbols and constants are trivial - if (isTrivialLeaf(node)) - return true; - - // otherwise, it needs to be a simple operation or one or two leaf nodes - - // not a simple operation - const glslang::TIntermBinary* binaryNode = node->getAsBinaryNode(); - const glslang::TIntermUnary* unaryNode = node->getAsUnaryNode(); - if (binaryNode == nullptr && unaryNode == nullptr) - return false; - - // not on leaf nodes - if (binaryNode && (! isTrivialLeaf(binaryNode->getLeft()) || ! isTrivialLeaf(binaryNode->getRight()))) - return false; - - if (unaryNode && ! isTrivialLeaf(unaryNode->getOperand())) { - return false; - } - - switch (node->getAsOperator()->getOp()) { - case glslang::EOpLogicalNot: - case glslang::EOpConvIntToBool: - case glslang::EOpConvUintToBool: - case glslang::EOpConvFloatToBool: - case glslang::EOpConvDoubleToBool: - case glslang::EOpEqual: - case glslang::EOpNotEqual: - case glslang::EOpLessThan: - case glslang::EOpGreaterThan: - case glslang::EOpLessThanEqual: - case glslang::EOpGreaterThanEqual: - case glslang::EOpIndexDirect: - case glslang::EOpIndexDirectStruct: - case glslang::EOpLogicalXor: - case glslang::EOpAny: - case glslang::EOpAll: - return true; - default: - return false; - } -} - -// Emit short-circuiting code, where 'right' is never evaluated unless -// the left side is true (for &&) or false (for ||). -spv::Id TGlslangToSpvTraverser::createShortCircuit(glslang::TOperator op, glslang::TIntermTyped& left, glslang::TIntermTyped& right) -{ - spv::Id boolTypeId = builder.makeBoolType(); - - // emit left operand - builder.clearAccessChain(); - left.traverse(this); - spv::Id leftId = accessChainLoad(left.getType()); - - // Operands to accumulate OpPhi operands - std::vector phiOperands; - // accumulate left operand's phi information - phiOperands.push_back(leftId); - phiOperands.push_back(builder.getBuildPoint()->getId()); - - // Make the two kinds of operation symmetric with a "!" - // || => emit "if (! left) result = right" - // && => emit "if ( left) result = right" - // - // TODO: this runtime "not" for || could be avoided by adding functionality - // to 'builder' to have an "else" without an "then" - if (op == glslang::EOpLogicalOr) - leftId = builder.createUnaryOp(spv::OpLogicalNot, boolTypeId, leftId); - - // make an "if" based on the left value - spv::Builder::If ifBuilder(leftId, spv::SelectionControlMaskNone, builder); - - // emit right operand as the "then" part of the "if" - builder.clearAccessChain(); - right.traverse(this); - spv::Id rightId = accessChainLoad(right.getType()); - - // accumulate left operand's phi information - phiOperands.push_back(rightId); - phiOperands.push_back(builder.getBuildPoint()->getId()); - - // finish the "if" - ifBuilder.makeEndIf(); - - // phi together the two results - return builder.createOp(spv::OpPhi, boolTypeId, phiOperands); -} - -// Return type Id of the imported set of extended instructions corresponds to the name. -// Import this set if it has not been imported yet. -spv::Id TGlslangToSpvTraverser::getExtBuiltins(const char* name) -{ - if (extBuiltinMap.find(name) != extBuiltinMap.end()) - return extBuiltinMap[name]; - else { - builder.addExtension(name); - spv::Id extBuiltins = builder.import(name); - extBuiltinMap[name] = extBuiltins; - return extBuiltins; - } -} - -}; // end anonymous namespace - -namespace glslang { - -void GetSpirvVersion(std::string& version) -{ - const int bufSize = 100; - char buf[bufSize]; - snprintf(buf, bufSize, "0x%08x, Revision %d", spv::Version, spv::Revision); - version = buf; -} - -// Write SPIR-V out to a binary file -void OutputSpvBin(const std::vector& spirv, const char* baseName) -{ - std::ofstream out; - out.open(baseName, std::ios::binary | std::ios::out); - if (out.fail()) - printf("ERROR: Failed to open file: %s\n", baseName); - for (int i = 0; i < (int)spirv.size(); ++i) { - unsigned int word = spirv[i]; - out.write((const char*)&word, 4); - } - out.close(); -} - -// Write SPIR-V out to a text file with 32-bit hexadecimal words -void OutputSpvHex(const std::vector& spirv, const char* baseName, const char* varName) -{ - std::ofstream out; - out.open(baseName, std::ios::binary | std::ios::out); - if (out.fail()) - printf("ERROR: Failed to open file: %s\n", baseName); - out << "\t// " GLSLANG_REVISION " " GLSLANG_DATE << std::endl; - if (varName != nullptr) { - out << "\t #pragma once" << std::endl; - out << "const uint32_t " << varName << "[] = {" << std::endl; - } - const int WORDS_PER_LINE = 8; - for (int i = 0; i < (int)spirv.size(); i += WORDS_PER_LINE) { - out << "\t"; - for (int j = 0; j < WORDS_PER_LINE && i + j < (int)spirv.size(); ++j) { - const unsigned int word = spirv[i + j]; - out << "0x" << std::hex << std::setw(8) << std::setfill('0') << word; - if (i + j + 1 < (int)spirv.size()) { - out << ","; - } - } - out << std::endl; - } - if (varName != nullptr) { - out << "};"; - } - out.close(); -} - -// -// Set up the glslang traversal -// -void GlslangToSpv(const glslang::TIntermediate& intermediate, std::vector& spirv, SpvOptions* options) -{ - spv::SpvBuildLogger logger; - GlslangToSpv(intermediate, spirv, &logger, options); -} - -void GlslangToSpv(const glslang::TIntermediate& intermediate, std::vector& spirv, - spv::SpvBuildLogger* logger, SpvOptions* options) -{ - TIntermNode* root = intermediate.getTreeRoot(); - - if (root == 0) - return; - - glslang::SpvOptions defaultOptions; - if (options == nullptr) - options = &defaultOptions; - - glslang::GetThreadPoolAllocator().push(); - - TGlslangToSpvTraverser it(&intermediate, logger, *options); - root->traverse(&it); - it.finishSpv(); - it.dumpSpv(spirv); - - glslang::GetThreadPoolAllocator().pop(); -} - -}; // end namespace glslang diff --git a/deps/glslang/SPIRV/GlslangToSpv.h b/deps/glslang/SPIRV/GlslangToSpv.h deleted file mode 100644 index 0dad4d21..00000000 --- a/deps/glslang/SPIRV/GlslangToSpv.h +++ /dev/null @@ -1,63 +0,0 @@ -// -// Copyright (C) 2014 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -#pragma once - -#if _MSC_VER >= 1900 - #pragma warning(disable : 4464) // relative include path contains '..' -#endif - -#include "../glslang/Include/intermediate.h" - -#include -#include - -#include "Logger.h" - -namespace glslang { - -struct SpvOptions { - SpvOptions() : generateDebugInfo(false) { } - bool generateDebugInfo; -}; - -void GetSpirvVersion(std::string&); -void GlslangToSpv(const glslang::TIntermediate& intermediate, std::vector& spirv, - SpvOptions* options = nullptr); -void GlslangToSpv(const glslang::TIntermediate& intermediate, std::vector& spirv, - spv::SpvBuildLogger* logger, SpvOptions* options = nullptr); -void OutputSpvBin(const std::vector& spirv, const char* baseName); -void OutputSpvHex(const std::vector& spirv, const char* baseName, const char* varName); - -} diff --git a/deps/glslang/SPIRV/InReadableOrder.cpp b/deps/glslang/SPIRV/InReadableOrder.cpp deleted file mode 100644 index 52b29613..00000000 --- a/deps/glslang/SPIRV/InReadableOrder.cpp +++ /dev/null @@ -1,113 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// The SPIR-V spec requires code blocks to appear in an order satisfying the -// dominator-tree direction (ie, dominator before the dominated). This is, -// actually, easy to achieve: any pre-order CFG traversal algorithm will do it. -// Because such algorithms visit a block only after traversing some path to it -// from the root, they necessarily visit the block's idom first. -// -// But not every graph-traversal algorithm outputs blocks in an order that -// appears logical to human readers. The problem is that unrelated branches may -// be interspersed with each other, and merge blocks may come before some of the -// branches being merged. -// -// A good, human-readable order of blocks may be achieved by performing -// depth-first search but delaying merge nodes until after all their branches -// have been visited. This is implemented below by the inReadableOrder() -// function. - -#include "spvIR.h" - -#include -#include - -using spv::Block; -using spv::Id; - -namespace { -// Traverses CFG in a readable order, invoking a pre-set callback on each block. -// Use by calling visit() on the root block. -class ReadableOrderTraverser { -public: - explicit ReadableOrderTraverser(std::function callback) : callback_(callback) {} - // Visits the block if it hasn't been visited already and isn't currently - // being delayed. Invokes callback(block), then descends into its - // successors. Delays merge-block and continue-block processing until all - // the branches have been completed. - void visit(Block* block) - { - assert(block); - if (visited_.count(block) || delayed_.count(block)) - return; - callback_(block); - visited_.insert(block); - Block* mergeBlock = nullptr; - Block* continueBlock = nullptr; - auto mergeInst = block->getMergeInstruction(); - if (mergeInst) { - Id mergeId = mergeInst->getIdOperand(0); - mergeBlock = block->getParent().getParent().getInstruction(mergeId)->getBlock(); - delayed_.insert(mergeBlock); - if (mergeInst->getOpCode() == spv::OpLoopMerge) { - Id continueId = mergeInst->getIdOperand(1); - continueBlock = - block->getParent().getParent().getInstruction(continueId)->getBlock(); - delayed_.insert(continueBlock); - } - } - const auto successors = block->getSuccessors(); - for (auto it = successors.cbegin(); it != successors.cend(); ++it) - visit(*it); - if (continueBlock) { - delayed_.erase(continueBlock); - visit(continueBlock); - } - if (mergeBlock) { - delayed_.erase(mergeBlock); - visit(mergeBlock); - } - } - -private: - std::function callback_; - // Whether a block has already been visited or is being delayed. - std::unordered_set visited_, delayed_; -}; -} - -void spv::inReadableOrder(Block* root, std::function callback) -{ - ReadableOrderTraverser(callback).visit(root); -} diff --git a/deps/glslang/SPIRV/Logger.cpp b/deps/glslang/SPIRV/Logger.cpp deleted file mode 100644 index 48bd4e3a..00000000 --- a/deps/glslang/SPIRV/Logger.cpp +++ /dev/null @@ -1,68 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -#include "Logger.h" - -#include -#include -#include - -namespace spv { - -void SpvBuildLogger::tbdFunctionality(const std::string& f) -{ - if (std::find(std::begin(tbdFeatures), std::end(tbdFeatures), f) == std::end(tbdFeatures)) - tbdFeatures.push_back(f); -} - -void SpvBuildLogger::missingFunctionality(const std::string& f) -{ - if (std::find(std::begin(missingFeatures), std::end(missingFeatures), f) == std::end(missingFeatures)) - missingFeatures.push_back(f); -} - -std::string SpvBuildLogger::getAllMessages() const { - std::ostringstream messages; - for (auto it = tbdFeatures.cbegin(); it != tbdFeatures.cend(); ++it) - messages << "TBD functionality: " << *it << "\n"; - for (auto it = missingFeatures.cbegin(); it != missingFeatures.cend(); ++it) - messages << "Missing functionality: " << *it << "\n"; - for (auto it = warnings.cbegin(); it != warnings.cend(); ++it) - messages << "warning: " << *it << "\n"; - for (auto it = errors.cbegin(); it != errors.cend(); ++it) - messages << "error: " << *it << "\n"; - return messages.str(); -} - -} // end spv namespace diff --git a/deps/glslang/SPIRV/Logger.h b/deps/glslang/SPIRV/Logger.h deleted file mode 100644 index 2e4ddaf5..00000000 --- a/deps/glslang/SPIRV/Logger.h +++ /dev/null @@ -1,74 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -#ifndef GLSLANG_SPIRV_LOGGER_H -#define GLSLANG_SPIRV_LOGGER_H - -#include -#include - -namespace spv { - -// A class for holding all SPIR-V build status messages, including -// missing/TBD functionalities, warnings, and errors. -class SpvBuildLogger { -public: - SpvBuildLogger() {} - - // Registers a TBD functionality. - void tbdFunctionality(const std::string& f); - // Registers a missing functionality. - void missingFunctionality(const std::string& f); - - // Logs a warning. - void warning(const std::string& w) { warnings.push_back(w); } - // Logs an error. - void error(const std::string& e) { errors.push_back(e); } - - // Returns all messages accumulated in the order of: - // TBD functionalities, missing functionalities, warnings, errors. - std::string getAllMessages() const; - -private: - SpvBuildLogger(const SpvBuildLogger&); - - std::vector tbdFeatures; - std::vector missingFeatures; - std::vector warnings; - std::vector errors; -}; - -} // end spv namespace - -#endif // GLSLANG_SPIRV_LOGGER_H diff --git a/deps/glslang/SPIRV/SPVRemapper.cpp b/deps/glslang/SPIRV/SPVRemapper.cpp deleted file mode 100644 index e8031d02..00000000 --- a/deps/glslang/SPIRV/SPVRemapper.cpp +++ /dev/null @@ -1,1345 +0,0 @@ -// -// Copyright (C) 2015 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "SPVRemapper.h" -#include "doc.h" - -#if !defined (use_cpp11) -// ... not supported before C++11 -#else // defined (use_cpp11) - -#include -#include -#include "../glslang/Include/Common.h" - -namespace spv { - - // By default, just abort on error. Can be overridden via RegisterErrorHandler - spirvbin_t::errorfn_t spirvbin_t::errorHandler = [](const std::string&) { exit(5); }; - // By default, eat log messages. Can be overridden via RegisterLogHandler - spirvbin_t::logfn_t spirvbin_t::logHandler = [](const std::string&) { }; - - // This can be overridden to provide other message behavior if needed - void spirvbin_t::msg(int minVerbosity, int indent, const std::string& txt) const - { - if (verbose >= minVerbosity) - logHandler(std::string(indent, ' ') + txt); - } - - // hash opcode, with special handling for OpExtInst - std::uint32_t spirvbin_t::asOpCodeHash(unsigned word) - { - const spv::Op opCode = asOpCode(word); - - std::uint32_t offset = 0; - - switch (opCode) { - case spv::OpExtInst: - offset += asId(word + 4); break; - default: - break; - } - - return opCode * 19 + offset; // 19 = small prime - } - - spirvbin_t::range_t spirvbin_t::literalRange(spv::Op opCode) const - { - static const int maxCount = 1<<30; - - switch (opCode) { - case spv::OpTypeFloat: // fall through... - case spv::OpTypePointer: return range_t(2, 3); - case spv::OpTypeInt: return range_t(2, 4); - // TODO: case spv::OpTypeImage: - // TODO: case spv::OpTypeSampledImage: - case spv::OpTypeSampler: return range_t(3, 8); - case spv::OpTypeVector: // fall through - case spv::OpTypeMatrix: // ... - case spv::OpTypePipe: return range_t(3, 4); - case spv::OpConstant: return range_t(3, maxCount); - default: return range_t(0, 0); - } - } - - spirvbin_t::range_t spirvbin_t::typeRange(spv::Op opCode) const - { - static const int maxCount = 1<<30; - - if (isConstOp(opCode)) - return range_t(1, 2); - - switch (opCode) { - case spv::OpTypeVector: // fall through - case spv::OpTypeMatrix: // ... - case spv::OpTypeSampler: // ... - case spv::OpTypeArray: // ... - case spv::OpTypeRuntimeArray: // ... - case spv::OpTypePipe: return range_t(2, 3); - case spv::OpTypeStruct: // fall through - case spv::OpTypeFunction: return range_t(2, maxCount); - case spv::OpTypePointer: return range_t(3, 4); - default: return range_t(0, 0); - } - } - - spirvbin_t::range_t spirvbin_t::constRange(spv::Op opCode) const - { - static const int maxCount = 1<<30; - - switch (opCode) { - case spv::OpTypeArray: // fall through... - case spv::OpTypeRuntimeArray: return range_t(3, 4); - case spv::OpConstantComposite: return range_t(3, maxCount); - default: return range_t(0, 0); - } - } - - // Return the size of a type in 32-bit words. This currently only - // handles ints and floats, and is only invoked by queries which must be - // integer types. If ever needed, it can be generalized. - unsigned spirvbin_t::typeSizeInWords(spv::Id id) const - { - const unsigned typeStart = idPos(id); - const spv::Op opCode = asOpCode(typeStart); - - switch (opCode) { - case spv::OpTypeInt: // fall through... - case spv::OpTypeFloat: return (spv[typeStart+2]+31)/32; - default: - return 0; - } - } - - // Looks up the type of a given const or variable ID, and - // returns its size in 32-bit words. - unsigned spirvbin_t::idTypeSizeInWords(spv::Id id) const - { - const auto tid_it = idTypeSizeMap.find(id); - if (tid_it == idTypeSizeMap.end()) - error("type size for ID not found"); - - return tid_it->second; - } - - // Is this an opcode we should remove when using --strip? - bool spirvbin_t::isStripOp(spv::Op opCode) const - { - switch (opCode) { - case spv::OpSource: - case spv::OpSourceExtension: - case spv::OpName: - case spv::OpMemberName: - case spv::OpLine: return true; - default: return false; - } - } - - // Return true if this opcode is flow control - bool spirvbin_t::isFlowCtrl(spv::Op opCode) const - { - switch (opCode) { - case spv::OpBranchConditional: - case spv::OpBranch: - case spv::OpSwitch: - case spv::OpLoopMerge: - case spv::OpSelectionMerge: - case spv::OpLabel: - case spv::OpFunction: - case spv::OpFunctionEnd: return true; - default: return false; - } - } - - // Return true if this opcode defines a type - bool spirvbin_t::isTypeOp(spv::Op opCode) const - { - switch (opCode) { - case spv::OpTypeVoid: - case spv::OpTypeBool: - case spv::OpTypeInt: - case spv::OpTypeFloat: - case spv::OpTypeVector: - case spv::OpTypeMatrix: - case spv::OpTypeImage: - case spv::OpTypeSampler: - case spv::OpTypeArray: - case spv::OpTypeRuntimeArray: - case spv::OpTypeStruct: - case spv::OpTypeOpaque: - case spv::OpTypePointer: - case spv::OpTypeFunction: - case spv::OpTypeEvent: - case spv::OpTypeDeviceEvent: - case spv::OpTypeReserveId: - case spv::OpTypeQueue: - case spv::OpTypeSampledImage: - case spv::OpTypePipe: return true; - default: return false; - } - } - - // Return true if this opcode defines a constant - bool spirvbin_t::isConstOp(spv::Op opCode) const - { - switch (opCode) { - case spv::OpConstantNull: error("unimplemented constant type"); - case spv::OpConstantSampler: error("unimplemented constant type"); - - case spv::OpConstantTrue: - case spv::OpConstantFalse: - case spv::OpConstantComposite: - case spv::OpConstant: return true; - default: return false; - } - } - - const auto inst_fn_nop = [](spv::Op, unsigned) { return false; }; - const auto op_fn_nop = [](spv::Id&) { }; - - // g++ doesn't like these defined in the class proper in an anonymous namespace. - // Dunno why. Also MSVC doesn't like the constexpr keyword. Also dunno why. - // Defining them externally seems to please both compilers, so, here they are. - const spv::Id spirvbin_t::unmapped = spv::Id(-10000); - const spv::Id spirvbin_t::unused = spv::Id(-10001); - const int spirvbin_t::header_size = 5; - - spv::Id spirvbin_t::nextUnusedId(spv::Id id) - { - while (isNewIdMapped(id)) // search for an unused ID - ++id; - - return id; - } - - spv::Id spirvbin_t::localId(spv::Id id, spv::Id newId) - { - assert(id != spv::NoResult && newId != spv::NoResult); - - if (id >= idMapL.size()) - idMapL.resize(id+1, unused); - - if (newId != unmapped && newId != unused) { - if (isOldIdUnused(id)) - error(std::string("ID unused in module: ") + std::to_string(id)); - - if (!isOldIdUnmapped(id)) - error(std::string("ID already mapped: ") + std::to_string(id) + " -> " - + std::to_string(localId(id))); - - if (isNewIdMapped(newId)) - error(std::string("ID already used in module: ") + std::to_string(newId)); - - msg(4, 4, std::string("map: ") + std::to_string(id) + " -> " + std::to_string(newId)); - setMapped(newId); - largestNewId = std::max(largestNewId, newId); - } - - return idMapL[id] = newId; - } - - // Parse a literal string from the SPIR binary and return it as an std::string - // Due to C++11 RValue references, this doesn't copy the result string. - std::string spirvbin_t::literalString(unsigned word) const - { - std::string literal; - - literal.reserve(16); - - const char* bytes = reinterpret_cast(spv.data() + word); - - while (bytes && *bytes) - literal += *bytes++; - - return literal; - } - - void spirvbin_t::applyMap() - { - msg(3, 2, std::string("Applying map: ")); - - // Map local IDs through the ID map - process(inst_fn_nop, // ignore instructions - [this](spv::Id& id) { - id = localId(id); - assert(id != unused && id != unmapped); - } - ); - } - - // Find free IDs for anything we haven't mapped - void spirvbin_t::mapRemainder() - { - msg(3, 2, std::string("Remapping remainder: ")); - - spv::Id unusedId = 1; // can't use 0: that's NoResult - spirword_t maxBound = 0; - - for (spv::Id id = 0; id < idMapL.size(); ++id) { - if (isOldIdUnused(id)) - continue; - - // Find a new mapping for any used but unmapped IDs - if (isOldIdUnmapped(id)) - localId(id, unusedId = nextUnusedId(unusedId)); - - if (isOldIdUnmapped(id)) - error(std::string("old ID not mapped: ") + std::to_string(id)); - - // Track max bound - maxBound = std::max(maxBound, localId(id) + 1); - } - - bound(maxBound); // reset header ID bound to as big as it now needs to be - } - - // Mark debug instructions for stripping - void spirvbin_t::stripDebug() - { - // Strip instructions in the stripOp set: debug info. - process( - [&](spv::Op opCode, unsigned start) { - // remember opcodes we want to strip later - if (isStripOp(opCode)) - stripInst(start); - return true; - }, - op_fn_nop); - } - - // Mark instructions that refer to now-removed IDs for stripping - void spirvbin_t::stripDeadRefs() - { - process( - [&](spv::Op opCode, unsigned start) { - // strip opcodes pointing to removed data - switch (opCode) { - case spv::OpName: - case spv::OpMemberName: - case spv::OpDecorate: - case spv::OpMemberDecorate: - if (idPosR.find(asId(start+1)) == idPosR.end()) - stripInst(start); - break; - default: - break; // leave it alone - } - - return true; - }, - op_fn_nop); - - strip(); - } - - // Update local maps of ID, type, etc positions - void spirvbin_t::buildLocalMaps() - { - msg(2, 2, std::string("build local maps: ")); - - mapped.clear(); - idMapL.clear(); -// preserve nameMap, so we don't clear that. - fnPos.clear(); - fnCalls.clear(); - typeConstPos.clear(); - idPosR.clear(); - entryPoint = spv::NoResult; - largestNewId = 0; - - idMapL.resize(bound(), unused); - - int fnStart = 0; - spv::Id fnRes = spv::NoResult; - - // build local Id and name maps - process( - [&](spv::Op opCode, unsigned start) { - unsigned word = start+1; - spv::Id typeId = spv::NoResult; - - if (spv::InstructionDesc[opCode].hasType()) - typeId = asId(word++); - - // If there's a result ID, remember the size of its type - if (spv::InstructionDesc[opCode].hasResult()) { - const spv::Id resultId = asId(word++); - idPosR[resultId] = start; - - if (typeId != spv::NoResult) { - const unsigned idTypeSize = typeSizeInWords(typeId); - - if (idTypeSize != 0) - idTypeSizeMap[resultId] = idTypeSize; - } - } - - if (opCode == spv::Op::OpName) { - const spv::Id target = asId(start+1); - const std::string name = literalString(start+2); - nameMap[name] = target; - - } else if (opCode == spv::Op::OpFunctionCall) { - ++fnCalls[asId(start + 3)]; - } else if (opCode == spv::Op::OpEntryPoint) { - entryPoint = asId(start + 2); - } else if (opCode == spv::Op::OpFunction) { - if (fnStart != 0) - error("nested function found"); - fnStart = start; - fnRes = asId(start + 2); - } else if (opCode == spv::Op::OpFunctionEnd) { - assert(fnRes != spv::NoResult); - if (fnStart == 0) - error("function end without function start"); - fnPos[fnRes] = range_t(fnStart, start + asWordCount(start)); - fnStart = 0; - } else if (isConstOp(opCode)) { - assert(asId(start + 2) != spv::NoResult); - typeConstPos.insert(start); - } else if (isTypeOp(opCode)) { - assert(asId(start + 1) != spv::NoResult); - typeConstPos.insert(start); - } - - return false; - }, - - [this](spv::Id& id) { localId(id, unmapped); } - ); - } - - // Validate the SPIR header - void spirvbin_t::validate() const - { - msg(2, 2, std::string("validating: ")); - - if (spv.size() < header_size) - error("file too short: "); - - if (magic() != spv::MagicNumber) - error("bad magic number"); - - // field 1 = version - // field 2 = generator magic - // field 3 = result bound - - if (schemaNum() != 0) - error("bad schema, must be 0"); - } - - int spirvbin_t::processInstruction(unsigned word, instfn_t instFn, idfn_t idFn) - { - const auto instructionStart = word; - const unsigned wordCount = asWordCount(instructionStart); - const int nextInst = word++ + wordCount; - spv::Op opCode = asOpCode(instructionStart); - - if (nextInst > int(spv.size())) - error("spir instruction terminated too early"); - - // Base for computing number of operands; will be updated as more is learned - unsigned numOperands = wordCount - 1; - - if (instFn(opCode, instructionStart)) - return nextInst; - - // Read type and result ID from instruction desc table - if (spv::InstructionDesc[opCode].hasType()) { - idFn(asId(word++)); - --numOperands; - } - - if (spv::InstructionDesc[opCode].hasResult()) { - idFn(asId(word++)); - --numOperands; - } - - // Extended instructions: currently, assume everything is an ID. - // TODO: add whatever data we need for exceptions to that - if (opCode == spv::OpExtInst) { - word += 2; // instruction set, and instruction from set - numOperands -= 2; - - for (unsigned op=0; op < numOperands; ++op) - idFn(asId(word++)); // ID - - return nextInst; - } - - // Circular buffer so we can look back at previous unmapped values during the mapping pass. - static const unsigned idBufferSize = 4; - spv::Id idBuffer[idBufferSize]; - unsigned idBufferPos = 0; - - // Store IDs from instruction in our map - for (int op = 0; numOperands > 0; ++op, --numOperands) { - // SpecConstantOp is special: it includes the operands of another opcode which is - // given as a literal in the 3rd word. We will switch over to pretending that the - // opcode being processed is the literal opcode value of the SpecConstantOp. See the - // SPIRV spec for details. This way we will handle IDs and literals as appropriate for - // the embedded op. - if (opCode == spv::OpSpecConstantOp) { - if (op == 0) { - opCode = asOpCode(word++); // this is the opcode embedded in the SpecConstantOp. - --numOperands; - } - } - - switch (spv::InstructionDesc[opCode].operands.getClass(op)) { - case spv::OperandId: - case spv::OperandScope: - case spv::OperandMemorySemantics: - idBuffer[idBufferPos] = asId(word); - idBufferPos = (idBufferPos + 1) % idBufferSize; - idFn(asId(word++)); - break; - - case spv::OperandVariableIds: - for (unsigned i = 0; i < numOperands; ++i) - idFn(asId(word++)); - return nextInst; - - case spv::OperandVariableLiterals: - // for clarity - // if (opCode == spv::OpDecorate && asDecoration(word - 1) == spv::DecorationBuiltIn) { - // ++word; - // --numOperands; - // } - // word += numOperands; - return nextInst; - - case spv::OperandVariableLiteralId: { - if (opCode == OpSwitch) { - // word-2 is the position of the selector ID. OpSwitch Literals match its type. - // In case the IDs are currently being remapped, we get the word[-2] ID from - // the circular idBuffer. - const unsigned literalSizePos = (idBufferPos+idBufferSize-2) % idBufferSize; - const unsigned literalSize = idTypeSizeInWords(idBuffer[literalSizePos]); - const unsigned numLiteralIdPairs = (nextInst-word) / (1+literalSize); - - for (unsigned arg=0; arg instPos; - instPos.reserve(unsigned(spv.size()) / 16); // initial estimate; can grow if needed. - - // Build local table of instruction start positions - process( - [&](spv::Op, unsigned start) { instPos.push_back(start); return true; }, - op_fn_nop); - - // Window size for context-sensitive canonicalization values - // Empirical best size from a single data set. TODO: Would be a good tunable. - // We essentially perform a little convolution around each instruction, - // to capture the flavor of nearby code, to hopefully match to similar - // code in other modules. - static const unsigned windowSize = 2; - - for (unsigned entry = 0; entry < unsigned(instPos.size()); ++entry) { - const unsigned start = instPos[entry]; - const spv::Op opCode = asOpCode(start); - - if (opCode == spv::OpFunction) - fnId = asId(start + 2); - - if (opCode == spv::OpFunctionEnd) - fnId = spv::NoResult; - - if (fnId != spv::NoResult) { // if inside a function - if (spv::InstructionDesc[opCode].hasResult()) { - const unsigned word = start + (spv::InstructionDesc[opCode].hasType() ? 2 : 1); - const spv::Id resId = asId(word); - std::uint32_t hashval = fnId * 17; // small prime - - for (unsigned i = entry-1; i >= entry-windowSize; --i) { - if (asOpCode(instPos[i]) == spv::OpFunction) - break; - hashval = hashval * 30103 + asOpCodeHash(instPos[i]); // 30103 = semiarbitrary prime - } - - for (unsigned i = entry; i <= entry + windowSize; ++i) { - if (asOpCode(instPos[i]) == spv::OpFunctionEnd) - break; - hashval = hashval * 30103 + asOpCodeHash(instPos[i]); // 30103 = semiarbitrary prime - } - - if (isOldIdUnmapped(resId)) - localId(resId, nextUnusedId(hashval % softTypeIdLimit + firstMappedID)); - } - } - } - - spv::Op thisOpCode(spv::OpNop); - std::unordered_map opCounter; - int idCounter(0); - fnId = spv::NoResult; - - process( - [&](spv::Op opCode, unsigned start) { - switch (opCode) { - case spv::OpFunction: - // Reset counters at each function - idCounter = 0; - opCounter.clear(); - fnId = asId(start + 2); - break; - - case spv::OpImageSampleImplicitLod: - case spv::OpImageSampleExplicitLod: - case spv::OpImageSampleDrefImplicitLod: - case spv::OpImageSampleDrefExplicitLod: - case spv::OpImageSampleProjImplicitLod: - case spv::OpImageSampleProjExplicitLod: - case spv::OpImageSampleProjDrefImplicitLod: - case spv::OpImageSampleProjDrefExplicitLod: - case spv::OpDot: - case spv::OpCompositeExtract: - case spv::OpCompositeInsert: - case spv::OpVectorShuffle: - case spv::OpLabel: - case spv::OpVariable: - - case spv::OpAccessChain: - case spv::OpLoad: - case spv::OpStore: - case spv::OpCompositeConstruct: - case spv::OpFunctionCall: - ++opCounter[opCode]; - idCounter = 0; - thisOpCode = opCode; - break; - default: - thisOpCode = spv::OpNop; - } - - return false; - }, - - [&](spv::Id& id) { - if (thisOpCode != spv::OpNop) { - ++idCounter; - const std::uint32_t hashval = opCounter[thisOpCode] * thisOpCode * 50047 + idCounter + fnId * 117; - - if (isOldIdUnmapped(id)) - localId(id, nextUnusedId(hashval % softTypeIdLimit + firstMappedID)); - } - }); - } - - // EXPERIMENTAL: forward IO and uniform load/stores into operands - // This produces invalid Schema-0 SPIRV - void spirvbin_t::forwardLoadStores() - { - idset_t fnLocalVars; // set of function local vars - idmap_t idMap; // Map of load result IDs to what they load - - // EXPERIMENTAL: Forward input and access chain loads into consumptions - process( - [&](spv::Op opCode, unsigned start) { - // Add inputs and uniforms to the map - if ((opCode == spv::OpVariable && asWordCount(start) == 4) && - (spv[start+3] == spv::StorageClassUniform || - spv[start+3] == spv::StorageClassUniformConstant || - spv[start+3] == spv::StorageClassInput)) - fnLocalVars.insert(asId(start+2)); - - if (opCode == spv::OpAccessChain && fnLocalVars.count(asId(start+3)) > 0) - fnLocalVars.insert(asId(start+2)); - - if (opCode == spv::OpLoad && fnLocalVars.count(asId(start+3)) > 0) { - idMap[asId(start+2)] = asId(start+3); - stripInst(start); - } - - return false; - }, - - [&](spv::Id& id) { if (idMap.find(id) != idMap.end()) id = idMap[id]; } - ); - - // EXPERIMENTAL: Implicit output stores - fnLocalVars.clear(); - idMap.clear(); - - process( - [&](spv::Op opCode, unsigned start) { - // Add inputs and uniforms to the map - if ((opCode == spv::OpVariable && asWordCount(start) == 4) && - (spv[start+3] == spv::StorageClassOutput)) - fnLocalVars.insert(asId(start+2)); - - if (opCode == spv::OpStore && fnLocalVars.count(asId(start+1)) > 0) { - idMap[asId(start+2)] = asId(start+1); - stripInst(start); - } - - return false; - }, - op_fn_nop); - - process( - inst_fn_nop, - [&](spv::Id& id) { if (idMap.find(id) != idMap.end()) id = idMap[id]; } - ); - - strip(); // strip out data we decided to eliminate - } - - // optimize loads and stores - void spirvbin_t::optLoadStore() - { - idset_t fnLocalVars; // candidates for removal (only locals) - idmap_t idMap; // Map of load result IDs to what they load - blockmap_t blockMap; // Map of IDs to blocks they first appear in - int blockNum = 0; // block count, to avoid crossing flow control - - // Find all the function local pointers stored at most once, and not via access chains - process( - [&](spv::Op opCode, unsigned start) { - const int wordCount = asWordCount(start); - - // Count blocks, so we can avoid crossing flow control - if (isFlowCtrl(opCode)) - ++blockNum; - - // Add local variables to the map - if ((opCode == spv::OpVariable && spv[start+3] == spv::StorageClassFunction && asWordCount(start) == 4)) { - fnLocalVars.insert(asId(start+2)); - return true; - } - - // Ignore process vars referenced via access chain - if ((opCode == spv::OpAccessChain || opCode == spv::OpInBoundsAccessChain) && fnLocalVars.count(asId(start+3)) > 0) { - fnLocalVars.erase(asId(start+3)); - idMap.erase(asId(start+3)); - return true; - } - - if (opCode == spv::OpLoad && fnLocalVars.count(asId(start+3)) > 0) { - const spv::Id varId = asId(start+3); - - // Avoid loads before stores - if (idMap.find(varId) == idMap.end()) { - fnLocalVars.erase(varId); - idMap.erase(varId); - } - - // don't do for volatile references - if (wordCount > 4 && (spv[start+4] & spv::MemoryAccessVolatileMask)) { - fnLocalVars.erase(varId); - idMap.erase(varId); - } - - // Handle flow control - if (blockMap.find(varId) == blockMap.end()) { - blockMap[varId] = blockNum; // track block we found it in. - } else if (blockMap[varId] != blockNum) { - fnLocalVars.erase(varId); // Ignore if crosses flow control - idMap.erase(varId); - } - - return true; - } - - if (opCode == spv::OpStore && fnLocalVars.count(asId(start+1)) > 0) { - const spv::Id varId = asId(start+1); - - if (idMap.find(varId) == idMap.end()) { - idMap[varId] = asId(start+2); - } else { - // Remove if it has more than one store to the same pointer - fnLocalVars.erase(varId); - idMap.erase(varId); - } - - // don't do for volatile references - if (wordCount > 3 && (spv[start+3] & spv::MemoryAccessVolatileMask)) { - fnLocalVars.erase(asId(start+3)); - idMap.erase(asId(start+3)); - } - - // Handle flow control - if (blockMap.find(varId) == blockMap.end()) { - blockMap[varId] = blockNum; // track block we found it in. - } else if (blockMap[varId] != blockNum) { - fnLocalVars.erase(varId); // Ignore if crosses flow control - idMap.erase(varId); - } - - return true; - } - - return false; - }, - - // If local var id used anywhere else, don't eliminate - [&](spv::Id& id) { - if (fnLocalVars.count(id) > 0) { - fnLocalVars.erase(id); - idMap.erase(id); - } - } - ); - - process( - [&](spv::Op opCode, unsigned start) { - if (opCode == spv::OpLoad && fnLocalVars.count(asId(start+3)) > 0) - idMap[asId(start+2)] = idMap[asId(start+3)]; - return false; - }, - op_fn_nop); - - // Chase replacements to their origins, in case there is a chain such as: - // 2 = store 1 - // 3 = load 2 - // 4 = store 3 - // 5 = load 4 - // We want to replace uses of 5 with 1. - for (const auto& idPair : idMap) { - spv::Id id = idPair.first; - while (idMap.find(id) != idMap.end()) // Chase to end of chain - id = idMap[id]; - - idMap[idPair.first] = id; // replace with final result - } - - // Remove the load/store/variables for the ones we've discovered - process( - [&](spv::Op opCode, unsigned start) { - if ((opCode == spv::OpLoad && fnLocalVars.count(asId(start+3)) > 0) || - (opCode == spv::OpStore && fnLocalVars.count(asId(start+1)) > 0) || - (opCode == spv::OpVariable && fnLocalVars.count(asId(start+2)) > 0)) { - - stripInst(start); - return true; - } - - return false; - }, - - [&](spv::Id& id) { - if (idMap.find(id) != idMap.end()) id = idMap[id]; - } - ); - - strip(); // strip out data we decided to eliminate - } - - // remove bodies of uncalled functions - void spirvbin_t::dceFuncs() - { - msg(3, 2, std::string("Removing Dead Functions: ")); - - // TODO: There are more efficient ways to do this. - bool changed = true; - - while (changed) { - changed = false; - - for (auto fn = fnPos.begin(); fn != fnPos.end(); ) { - if (fn->first == entryPoint) { // don't DCE away the entry point! - ++fn; - continue; - } - - const auto call_it = fnCalls.find(fn->first); - - if (call_it == fnCalls.end() || call_it->second == 0) { - changed = true; - stripRange.push_back(fn->second); - - // decrease counts of called functions - process( - [&](spv::Op opCode, unsigned start) { - if (opCode == spv::Op::OpFunctionCall) { - const auto call_it = fnCalls.find(asId(start + 3)); - if (call_it != fnCalls.end()) { - if (--call_it->second <= 0) - fnCalls.erase(call_it); - } - } - - return true; - }, - op_fn_nop, - fn->second.first, - fn->second.second); - - fn = fnPos.erase(fn); - } else ++fn; - } - } - } - - // remove unused function variables + decorations - void spirvbin_t::dceVars() - { - msg(3, 2, std::string("DCE Vars: ")); - - std::unordered_map varUseCount; - - // Count function variable use - process( - [&](spv::Op opCode, unsigned start) { - if (opCode == spv::OpVariable) { - ++varUseCount[asId(start+2)]; - return true; - } else if (opCode == spv::OpEntryPoint) { - const int wordCount = asWordCount(start); - for (int i = 4; i < wordCount; i++) { - ++varUseCount[asId(start+i)]; - } - return true; - } else - return false; - }, - - [&](spv::Id& id) { if (varUseCount[id]) ++varUseCount[id]; } - ); - - // Remove single-use function variables + associated decorations and names - process( - [&](spv::Op opCode, unsigned start) { - spv::Id id = spv::NoResult; - if (opCode == spv::OpVariable) - id = asId(start+2); - if (opCode == spv::OpDecorate || opCode == spv::OpName) - id = asId(start+1); - - if (id != spv::NoResult && varUseCount[id] == 1) - stripInst(start); - - return true; - }, - op_fn_nop); - } - - // remove unused types - void spirvbin_t::dceTypes() - { - std::vector isType(bound(), false); - - // for speed, make O(1) way to get to type query (map is log(n)) - for (const auto typeStart : typeConstPos) - isType[asTypeConstId(typeStart)] = true; - - std::unordered_map typeUseCount; - - // This is not the most efficient algorithm, but this is an offline tool, and - // it's easy to write this way. Can be improved opportunistically if needed. - bool changed = true; - while (changed) { - changed = false; - strip(); - typeUseCount.clear(); - - // Count total type usage - process(inst_fn_nop, - [&](spv::Id& id) { if (isType[id]) ++typeUseCount[id]; } - ); - - // Remove single reference types - for (const auto typeStart : typeConstPos) { - const spv::Id typeId = asTypeConstId(typeStart); - if (typeUseCount[typeId] == 1) { - changed = true; - --typeUseCount[typeId]; - stripInst(typeStart); - } - } - } - } - -#ifdef NOTDEF - bool spirvbin_t::matchType(const spirvbin_t::globaltypes_t& globalTypes, spv::Id lt, spv::Id gt) const - { - // Find the local type id "lt" and global type id "gt" - const auto lt_it = typeConstPosR.find(lt); - if (lt_it == typeConstPosR.end()) - return false; - - const auto typeStart = lt_it->second; - - // Search for entry in global table - const auto gtype = globalTypes.find(gt); - if (gtype == globalTypes.end()) - return false; - - const auto& gdata = gtype->second; - - // local wordcount and opcode - const int wordCount = asWordCount(typeStart); - const spv::Op opCode = asOpCode(typeStart); - - // no type match if opcodes don't match, or operand count doesn't match - if (opCode != opOpCode(gdata[0]) || wordCount != opWordCount(gdata[0])) - return false; - - const unsigned numOperands = wordCount - 2; // all types have a result - - const auto cmpIdRange = [&](range_t range) { - for (int x=range.first; xsecond; - } - - // Hash types to canonical values. This can return ID collisions (it's a bit - // inevitable): it's up to the caller to handle that gracefully. - std::uint32_t spirvbin_t::hashType(unsigned typeStart) const - { - const unsigned wordCount = asWordCount(typeStart); - const spv::Op opCode = asOpCode(typeStart); - - switch (opCode) { - case spv::OpTypeVoid: return 0; - case spv::OpTypeBool: return 1; - case spv::OpTypeInt: return 3 + (spv[typeStart+3]); - case spv::OpTypeFloat: return 5; - case spv::OpTypeVector: - return 6 + hashType(idPos(spv[typeStart+2])) * (spv[typeStart+3] - 1); - case spv::OpTypeMatrix: - return 30 + hashType(idPos(spv[typeStart+2])) * (spv[typeStart+3] - 1); - case spv::OpTypeImage: - return 120 + hashType(idPos(spv[typeStart+2])) + - spv[typeStart+3] + // dimensionality - spv[typeStart+4] * 8 * 16 + // depth - spv[typeStart+5] * 4 * 16 + // arrayed - spv[typeStart+6] * 2 * 16 + // multisampled - spv[typeStart+7] * 1 * 16; // format - case spv::OpTypeSampler: - return 500; - case spv::OpTypeSampledImage: - return 502; - case spv::OpTypeArray: - return 501 + hashType(idPos(spv[typeStart+2])) * spv[typeStart+3]; - case spv::OpTypeRuntimeArray: - return 5000 + hashType(idPos(spv[typeStart+2])); - case spv::OpTypeStruct: - { - std::uint32_t hash = 10000; - for (unsigned w=2; w < wordCount; ++w) - hash += w * hashType(idPos(spv[typeStart+w])); - return hash; - } - - case spv::OpTypeOpaque: return 6000 + spv[typeStart+2]; - case spv::OpTypePointer: return 100000 + hashType(idPos(spv[typeStart+3])); - case spv::OpTypeFunction: - { - std::uint32_t hash = 200000; - for (unsigned w=2; w < wordCount; ++w) - hash += w * hashType(idPos(spv[typeStart+w])); - return hash; - } - - case spv::OpTypeEvent: return 300000; - case spv::OpTypeDeviceEvent: return 300001; - case spv::OpTypeReserveId: return 300002; - case spv::OpTypeQueue: return 300003; - case spv::OpTypePipe: return 300004; - - case spv::OpConstantNull: return 300005; - case spv::OpConstantSampler: return 300006; - - case spv::OpConstantTrue: return 300007; - case spv::OpConstantFalse: return 300008; - case spv::OpConstantComposite: - { - std::uint32_t hash = 300011 + hashType(idPos(spv[typeStart+1])); - for (unsigned w=3; w < wordCount; ++w) - hash += w * hashType(idPos(spv[typeStart+w])); - return hash; - } - case spv::OpConstant: - { - std::uint32_t hash = 400011 + hashType(idPos(spv[typeStart+1])); - for (unsigned w=3; w < wordCount; ++w) - hash += w * spv[typeStart+w]; - return hash; - } - - default: - error("unknown type opcode"); - return 0; - } - } - - void spirvbin_t::mapTypeConst() - { - globaltypes_t globalTypeMap; - - msg(3, 2, std::string("Remapping Consts & Types: ")); - - static const std::uint32_t softTypeIdLimit = 3011; // small prime. TODO: get from options - static const std::uint32_t firstMappedID = 8; // offset into ID space - - for (auto& typeStart : typeConstPos) { - const spv::Id resId = asTypeConstId(typeStart); - const std::uint32_t hashval = hashType(typeStart); - - if (isOldIdUnmapped(resId)) - localId(resId, nextUnusedId(hashval % softTypeIdLimit + firstMappedID)); - } - } - - // Strip a single binary by removing ranges given in stripRange - void spirvbin_t::strip() - { - if (stripRange.empty()) // nothing to do - return; - - // Sort strip ranges in order of traversal - std::sort(stripRange.begin(), stripRange.end()); - - // Allocate a new binary big enough to hold old binary - // We'll step this iterator through the strip ranges as we go through the binary - auto strip_it = stripRange.begin(); - - int strippedPos = 0; - for (unsigned word = 0; word < unsigned(spv.size()); ++word) { - if (strip_it != stripRange.end() && word >= strip_it->second) - ++strip_it; - - if (strip_it == stripRange.end() || word < strip_it->first || word >= strip_it->second) - spv[strippedPos++] = spv[word]; - } - - spv.resize(strippedPos); - stripRange.clear(); - - buildLocalMaps(); - } - - // Strip a single binary by removing ranges given in stripRange - void spirvbin_t::remap(std::uint32_t opts) - { - options = opts; - - // Set up opcode tables from SpvDoc - spv::Parameterize(); - - validate(); // validate header - buildLocalMaps(); // build ID maps - - msg(3, 4, std::string("ID bound: ") + std::to_string(bound())); - - if (options & STRIP) stripDebug(); - strip(); // strip out data we decided to eliminate - if (options & OPT_LOADSTORE) optLoadStore(); - if (options & OPT_FWD_LS) forwardLoadStores(); - if (options & DCE_FUNCS) dceFuncs(); - if (options & DCE_VARS) dceVars(); - if (options & DCE_TYPES) dceTypes(); - - strip(); // strip out data we decided to eliminate - stripDeadRefs(); // remove references to things we DCEed - // after the last strip, we must clean any debug info referring to now-deleted data - - if (options & MAP_TYPES) mapTypeConst(); - if (options & MAP_NAMES) mapNames(); - if (options & MAP_FUNCS) mapFnBodies(); - - if (options & MAP_ALL) { - mapRemainder(); // map any unmapped IDs - applyMap(); // Now remap each shader to the new IDs we've come up with - } - } - - // remap from a memory image - void spirvbin_t::remap(std::vector& in_spv, std::uint32_t opts) - { - spv.swap(in_spv); - remap(opts); - spv.swap(in_spv); - } - -} // namespace SPV - -#endif // defined (use_cpp11) - diff --git a/deps/glslang/SPIRV/SPVRemapper.h b/deps/glslang/SPIRV/SPVRemapper.h deleted file mode 100644 index f9f369a3..00000000 --- a/deps/glslang/SPIRV/SPVRemapper.h +++ /dev/null @@ -1,296 +0,0 @@ -// -// Copyright (C) 2015 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef SPIRVREMAPPER_H -#define SPIRVREMAPPER_H - -#include -#include -#include - -namespace spv { - -// MSVC defines __cplusplus as an older value, even when it supports almost all of 11. -// We handle that here by making our own symbol. -#if __cplusplus >= 201103L || _MSC_VER >= 1700 -# define use_cpp11 1 -#endif - -class spirvbin_base_t -{ -public: - enum Options { - NONE = 0, - STRIP = (1<<0), - MAP_TYPES = (1<<1), - MAP_NAMES = (1<<2), - MAP_FUNCS = (1<<3), - DCE_FUNCS = (1<<4), - DCE_VARS = (1<<5), - DCE_TYPES = (1<<6), - OPT_LOADSTORE = (1<<7), - OPT_FWD_LS = (1<<8), // EXPERIMENTAL: PRODUCES INVALID SCHEMA-0 SPIRV - MAP_ALL = (MAP_TYPES | MAP_NAMES | MAP_FUNCS), - DCE_ALL = (DCE_FUNCS | DCE_VARS | DCE_TYPES), - OPT_ALL = (OPT_LOADSTORE), - - ALL_BUT_STRIP = (MAP_ALL | DCE_ALL | OPT_ALL), - DO_EVERYTHING = (STRIP | ALL_BUT_STRIP) - }; -}; - -} // namespace SPV - -#if !defined (use_cpp11) -#include -#include - -namespace spv { -class spirvbin_t : public spirvbin_base_t -{ -public: - spirvbin_t(int /*verbose = 0*/) { } - - void remap(std::vector& /*spv*/, unsigned int /*opts = 0*/) - { - printf("Tool not compiled for C++11, which is required for SPIR-V remapping.\n"); - exit(5); - } -}; - -} // namespace SPV - -#else // defined (use_cpp11) - -#include -#include -#include -#include -#include -#include -#include - -#include "spirv.hpp" -#include "spvIR.h" - -namespace spv { - -// class to hold SPIR-V binary data for remapping, DCE, and debug stripping -class spirvbin_t : public spirvbin_base_t -{ -public: - spirvbin_t(int verbose = 0) : entryPoint(spv::NoResult), largestNewId(0), verbose(verbose) { } - virtual ~spirvbin_t() { } - - // remap on an existing binary in memory - void remap(std::vector& spv, std::uint32_t opts = DO_EVERYTHING); - - // Type for error/log handler functions - typedef std::function errorfn_t; - typedef std::function logfn_t; - - // Register error/log handling functions (can be lambda fn / functor / etc) - static void registerErrorHandler(errorfn_t handler) { errorHandler = handler; } - static void registerLogHandler(logfn_t handler) { logHandler = handler; } - -protected: - // This can be overridden to provide other message behavior if needed - virtual void msg(int minVerbosity, int indent, const std::string& txt) const; - -private: - // Local to global, or global to local ID map - typedef std::unordered_map idmap_t; - typedef std::unordered_set idset_t; - typedef std::unordered_map blockmap_t; - - void remap(std::uint32_t opts = DO_EVERYTHING); - - // Map of names to IDs - typedef std::unordered_map namemap_t; - - typedef std::uint32_t spirword_t; - - typedef std::pair range_t; - typedef std::function idfn_t; - typedef std::function instfn_t; - - // Special Values for ID map: - static const spv::Id unmapped; // unchanged from default value - static const spv::Id unused; // unused ID - static const int header_size; // SPIR header = 5 words - - class id_iterator_t; - - // For mapping type entries between different shaders - typedef std::vector typeentry_t; - typedef std::map globaltypes_t; - - // A set that preserves position order, and a reverse map - typedef std::set posmap_t; - typedef std::unordered_map posmap_rev_t; - - // Maps and ID to the size of its base type, if known. - typedef std::unordered_map typesize_map_t; - - // handle error - void error(const std::string& txt) const { errorHandler(txt); } - - bool isConstOp(spv::Op opCode) const; - bool isTypeOp(spv::Op opCode) const; - bool isStripOp(spv::Op opCode) const; - bool isFlowCtrl(spv::Op opCode) const; - range_t literalRange(spv::Op opCode) const; - range_t typeRange(spv::Op opCode) const; - range_t constRange(spv::Op opCode) const; - unsigned typeSizeInWords(spv::Id id) const; - unsigned idTypeSizeInWords(spv::Id id) const; - - spv::Id& asId(unsigned word) { return spv[word]; } - const spv::Id& asId(unsigned word) const { return spv[word]; } - spv::Op asOpCode(unsigned word) const { return opOpCode(spv[word]); } - std::uint32_t asOpCodeHash(unsigned word); - spv::Decoration asDecoration(unsigned word) const { return spv::Decoration(spv[word]); } - unsigned asWordCount(unsigned word) const { return opWordCount(spv[word]); } - spv::Id asTypeConstId(unsigned word) const { return asId(word + (isTypeOp(asOpCode(word)) ? 1 : 2)); } - unsigned idPos(spv::Id id) const; - - static unsigned opWordCount(spirword_t data) { return data >> spv::WordCountShift; } - static spv::Op opOpCode(spirword_t data) { return spv::Op(data & spv::OpCodeMask); } - - // Header access & set methods - spirword_t magic() const { return spv[0]; } // return magic number - spirword_t bound() const { return spv[3]; } // return Id bound from header - spirword_t bound(spirword_t b) { return spv[3] = b; }; - spirword_t genmagic() const { return spv[2]; } // generator magic - spirword_t genmagic(spirword_t m) { return spv[2] = m; } - spirword_t schemaNum() const { return spv[4]; } // schema number from header - - // Mapping fns: get - spv::Id localId(spv::Id id) const { return idMapL[id]; } - - // Mapping fns: set - inline spv::Id localId(spv::Id id, spv::Id newId); - void countIds(spv::Id id); - - // Return next unused new local ID. - // NOTE: boost::dynamic_bitset would be more efficient due to find_next(), - // which std::vector doens't have. - inline spv::Id nextUnusedId(spv::Id id); - - void buildLocalMaps(); - std::string literalString(unsigned word) const; // Return literal as a std::string - int literalStringWords(const std::string& str) const { return (int(str.size())+4)/4; } - - bool isNewIdMapped(spv::Id newId) const { return isMapped(newId); } - bool isOldIdUnmapped(spv::Id oldId) const { return localId(oldId) == unmapped; } - bool isOldIdUnused(spv::Id oldId) const { return localId(oldId) == unused; } - bool isOldIdMapped(spv::Id oldId) const { return !isOldIdUnused(oldId) && !isOldIdUnmapped(oldId); } - bool isFunction(spv::Id oldId) const { return fnPos.find(oldId) != fnPos.end(); } - - // bool matchType(const globaltypes_t& globalTypes, spv::Id lt, spv::Id gt) const; - // spv::Id findType(const globaltypes_t& globalTypes, spv::Id lt) const; - std::uint32_t hashType(unsigned typeStart) const; - - spirvbin_t& process(instfn_t, idfn_t, unsigned begin = 0, unsigned end = 0); - int processInstruction(unsigned word, instfn_t, idfn_t); - - void validate() const; - void mapTypeConst(); - void mapFnBodies(); - void optLoadStore(); - void dceFuncs(); - void dceVars(); - void dceTypes(); - void mapNames(); - void foldIds(); // fold IDs to smallest space - void forwardLoadStores(); // load store forwarding (EXPERIMENTAL) - void offsetIds(); // create relative offset IDs - - void applyMap(); // remap per local name map - void mapRemainder(); // map any IDs we haven't touched yet - void stripDebug(); // strip all debug info - void stripDeadRefs(); // strips debug info for now-dead references after DCE - void strip(); // remove debug symbols - - std::vector spv; // SPIR words - - namemap_t nameMap; // ID names from OpName - - // Since we want to also do binary ops, we can't use std::vector. we could use - // boost::dynamic_bitset, but we're trying to avoid a boost dependency. - typedef std::uint64_t bits_t; - std::vector mapped; // which new IDs have been mapped - static const int mBits = sizeof(bits_t) * 4; - - bool isMapped(spv::Id id) const { return id < maxMappedId() && ((mapped[id/mBits] & (1LL<<(id%mBits))) != 0); } - void setMapped(spv::Id id) { resizeMapped(id); mapped[id/mBits] |= (1LL<<(id%mBits)); } - void resizeMapped(spv::Id id) { if (id >= maxMappedId()) mapped.resize(id/mBits+1, 0); } - size_t maxMappedId() const { return mapped.size() * mBits; } - - // Add a strip range for a given instruction starting at 'start' - // Note: avoiding brace initializers to please older versions os MSVC. - void stripInst(unsigned start) { stripRange.push_back(range_t(start, start + asWordCount(start))); } - - // Function start and end. use unordered_map because we'll have - // many fewer functions than IDs. - std::unordered_map fnPos; - - // Which functions are called, anywhere in the module, with a call count - std::unordered_map fnCalls; - - posmap_t typeConstPos; // word positions that define types & consts (ordered) - posmap_rev_t idPosR; // reverse map from IDs to positions - typesize_map_t idTypeSizeMap; // maps each ID to its type size, if known. - - std::vector idMapL; // ID {M}ap from {L}ocal to {G}lobal IDs - - spv::Id entryPoint; // module entry point - spv::Id largestNewId; // biggest new ID we have mapped anything to - - // Sections of the binary to strip, given as [begin,end) - std::vector stripRange; - - // processing options: - std::uint32_t options; - int verbose; // verbosity level - - static errorfn_t errorHandler; - static logfn_t logHandler; -}; - -} // namespace SPV - -#endif // defined (use_cpp11) -#endif // SPIRVREMAPPER_H diff --git a/deps/glslang/SPIRV/SpvBuilder.cpp b/deps/glslang/SPIRV/SpvBuilder.cpp deleted file mode 100644 index 289d59af..00000000 --- a/deps/glslang/SPIRV/SpvBuilder.cpp +++ /dev/null @@ -1,2652 +0,0 @@ -// -// Copyright (C) 2014-2015 LunarG, Inc. -// Copyright (C) 2015-2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// -// Helper for making SPIR-V IR. Generally, this is documented in the header -// SpvBuilder.h. -// - -#include -#include - -#include -#include - -#include "SpvBuilder.h" - -#ifdef AMD_EXTENSIONS - #include "hex_float.h" -#endif - -#ifndef _WIN32 - #include -#endif - -namespace spv { - -Builder::Builder(unsigned int magicNumber, SpvBuildLogger* buildLogger) : - source(SourceLanguageUnknown), - sourceVersion(0), - sourceFileStringId(NoResult), - currentLine(0), - emitOpLines(false), - addressModel(AddressingModelLogical), - memoryModel(MemoryModelGLSL450), - builderNumber(magicNumber), - buildPoint(0), - uniqueId(0), - entryPointFunction(0), - generatingOpCodeForSpecConst(false), - logger(buildLogger) -{ - clearAccessChain(); -} - -Builder::~Builder() -{ -} - -Id Builder::import(const char* name) -{ - Instruction* import = new Instruction(getUniqueId(), NoType, OpExtInstImport); - import->addStringOperand(name); - - imports.push_back(std::unique_ptr(import)); - return import->getResultId(); -} - -// Emit an OpLine if we've been asked to emit OpLines and the line number -// has changed since the last time, and is a valid line number. -void Builder::setLine(int lineNum) -{ - if (lineNum != 0 && lineNum != currentLine) { - currentLine = lineNum; - if (emitOpLines) - addLine(sourceFileStringId, currentLine, 0); - } -} - -void Builder::addLine(Id fileName, int lineNum, int column) -{ - Instruction* line = new Instruction(OpLine); - line->addIdOperand(fileName); - line->addImmediateOperand(lineNum); - line->addImmediateOperand(column); - buildPoint->addInstruction(std::unique_ptr(line)); -} - -// For creating new groupedTypes (will return old type if the requested one was already made). -Id Builder::makeVoidType() -{ - Instruction* type; - if (groupedTypes[OpTypeVoid].size() == 0) { - type = new Instruction(getUniqueId(), NoType, OpTypeVoid); - groupedTypes[OpTypeVoid].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - } else - type = groupedTypes[OpTypeVoid].back(); - - return type->getResultId(); -} - -Id Builder::makeBoolType() -{ - Instruction* type; - if (groupedTypes[OpTypeBool].size() == 0) { - type = new Instruction(getUniqueId(), NoType, OpTypeBool); - groupedTypes[OpTypeBool].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - } else - type = groupedTypes[OpTypeBool].back(); - - return type->getResultId(); -} - -Id Builder::makeSamplerType() -{ - Instruction* type; - if (groupedTypes[OpTypeSampler].size() == 0) { - type = new Instruction(getUniqueId(), NoType, OpTypeSampler); - groupedTypes[OpTypeSampler].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - } else - type = groupedTypes[OpTypeSampler].back(); - - return type->getResultId(); -} - -Id Builder::makePointer(StorageClass storageClass, Id pointee) -{ - // try to find it - Instruction* type; - for (int t = 0; t < (int)groupedTypes[OpTypePointer].size(); ++t) { - type = groupedTypes[OpTypePointer][t]; - if (type->getImmediateOperand(0) == (unsigned)storageClass && - type->getIdOperand(1) == pointee) - return type->getResultId(); - } - - // not found, make it - type = new Instruction(getUniqueId(), NoType, OpTypePointer); - type->addImmediateOperand(storageClass); - type->addIdOperand(pointee); - groupedTypes[OpTypePointer].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - - return type->getResultId(); -} - -Id Builder::makeIntegerType(int width, bool hasSign) -{ - // try to find it - Instruction* type; - for (int t = 0; t < (int)groupedTypes[OpTypeInt].size(); ++t) { - type = groupedTypes[OpTypeInt][t]; - if (type->getImmediateOperand(0) == (unsigned)width && - type->getImmediateOperand(1) == (hasSign ? 1u : 0u)) - return type->getResultId(); - } - - // not found, make it - type = new Instruction(getUniqueId(), NoType, OpTypeInt); - type->addImmediateOperand(width); - type->addImmediateOperand(hasSign ? 1 : 0); - groupedTypes[OpTypeInt].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - - // deal with capabilities - switch (width) { - case 16: - addCapability(CapabilityInt16); - break; - case 64: - addCapability(CapabilityInt64); - break; - default: - break; - } - - return type->getResultId(); -} - -Id Builder::makeFloatType(int width) -{ - // try to find it - Instruction* type; - for (int t = 0; t < (int)groupedTypes[OpTypeFloat].size(); ++t) { - type = groupedTypes[OpTypeFloat][t]; - if (type->getImmediateOperand(0) == (unsigned)width) - return type->getResultId(); - } - - // not found, make it - type = new Instruction(getUniqueId(), NoType, OpTypeFloat); - type->addImmediateOperand(width); - groupedTypes[OpTypeFloat].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - - // deal with capabilities - switch (width) { - case 16: - addCapability(CapabilityFloat16); - break; - case 64: - addCapability(CapabilityFloat64); - break; - default: - break; - } - - return type->getResultId(); -} - -// Make a struct without checking for duplication. -// See makeStructResultType() for non-decorated structs -// needed as the result of some instructions, which does -// check for duplicates. -Id Builder::makeStructType(const std::vector& members, const char* name) -{ - // Don't look for previous one, because in the general case, - // structs can be duplicated except for decorations. - - // not found, make it - Instruction* type = new Instruction(getUniqueId(), NoType, OpTypeStruct); - for (int op = 0; op < (int)members.size(); ++op) - type->addIdOperand(members[op]); - groupedTypes[OpTypeStruct].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - addName(type->getResultId(), name); - - return type->getResultId(); -} - -// Make a struct for the simple results of several instructions, -// checking for duplication. -Id Builder::makeStructResultType(Id type0, Id type1) -{ - // try to find it - Instruction* type; - for (int t = 0; t < (int)groupedTypes[OpTypeStruct].size(); ++t) { - type = groupedTypes[OpTypeStruct][t]; - if (type->getNumOperands() != 2) - continue; - if (type->getIdOperand(0) != type0 || - type->getIdOperand(1) != type1) - continue; - return type->getResultId(); - } - - // not found, make it - std::vector members; - members.push_back(type0); - members.push_back(type1); - - return makeStructType(members, "ResType"); -} - -Id Builder::makeVectorType(Id component, int size) -{ - // try to find it - Instruction* type; - for (int t = 0; t < (int)groupedTypes[OpTypeVector].size(); ++t) { - type = groupedTypes[OpTypeVector][t]; - if (type->getIdOperand(0) == component && - type->getImmediateOperand(1) == (unsigned)size) - return type->getResultId(); - } - - // not found, make it - type = new Instruction(getUniqueId(), NoType, OpTypeVector); - type->addIdOperand(component); - type->addImmediateOperand(size); - groupedTypes[OpTypeVector].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - - return type->getResultId(); -} - -Id Builder::makeMatrixType(Id component, int cols, int rows) -{ - assert(cols <= maxMatrixSize && rows <= maxMatrixSize); - - Id column = makeVectorType(component, rows); - - // try to find it - Instruction* type; - for (int t = 0; t < (int)groupedTypes[OpTypeMatrix].size(); ++t) { - type = groupedTypes[OpTypeMatrix][t]; - if (type->getIdOperand(0) == column && - type->getImmediateOperand(1) == (unsigned)cols) - return type->getResultId(); - } - - // not found, make it - type = new Instruction(getUniqueId(), NoType, OpTypeMatrix); - type->addIdOperand(column); - type->addImmediateOperand(cols); - groupedTypes[OpTypeMatrix].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - - return type->getResultId(); -} - -// TODO: performance: track arrays per stride -// If a stride is supplied (non-zero) make an array. -// If no stride (0), reuse previous array types. -// 'size' is an Id of a constant or specialization constant of the array size -Id Builder::makeArrayType(Id element, Id sizeId, int stride) -{ - Instruction* type; - if (stride == 0) { - // try to find existing type - for (int t = 0; t < (int)groupedTypes[OpTypeArray].size(); ++t) { - type = groupedTypes[OpTypeArray][t]; - if (type->getIdOperand(0) == element && - type->getIdOperand(1) == sizeId) - return type->getResultId(); - } - } - - // not found, make it - type = new Instruction(getUniqueId(), NoType, OpTypeArray); - type->addIdOperand(element); - type->addIdOperand(sizeId); - groupedTypes[OpTypeArray].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - - return type->getResultId(); -} - -Id Builder::makeRuntimeArray(Id element) -{ - Instruction* type = new Instruction(getUniqueId(), NoType, OpTypeRuntimeArray); - type->addIdOperand(element); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - - return type->getResultId(); -} - -Id Builder::makeFunctionType(Id returnType, const std::vector& paramTypes) -{ - // try to find it - Instruction* type; - for (int t = 0; t < (int)groupedTypes[OpTypeFunction].size(); ++t) { - type = groupedTypes[OpTypeFunction][t]; - if (type->getIdOperand(0) != returnType || (int)paramTypes.size() != type->getNumOperands() - 1) - continue; - bool mismatch = false; - for (int p = 0; p < (int)paramTypes.size(); ++p) { - if (paramTypes[p] != type->getIdOperand(p + 1)) { - mismatch = true; - break; - } - } - if (! mismatch) - return type->getResultId(); - } - - // not found, make it - type = new Instruction(getUniqueId(), NoType, OpTypeFunction); - type->addIdOperand(returnType); - for (int p = 0; p < (int)paramTypes.size(); ++p) - type->addIdOperand(paramTypes[p]); - groupedTypes[OpTypeFunction].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - - return type->getResultId(); -} - -Id Builder::makeImageType(Id sampledType, Dim dim, bool depth, bool arrayed, bool ms, unsigned sampled, ImageFormat format) -{ - assert(sampled == 1 || sampled == 2); - - // try to find it - Instruction* type; - for (int t = 0; t < (int)groupedTypes[OpTypeImage].size(); ++t) { - type = groupedTypes[OpTypeImage][t]; - if (type->getIdOperand(0) == sampledType && - type->getImmediateOperand(1) == (unsigned int)dim && - type->getImmediateOperand(2) == ( depth ? 1u : 0u) && - type->getImmediateOperand(3) == (arrayed ? 1u : 0u) && - type->getImmediateOperand(4) == ( ms ? 1u : 0u) && - type->getImmediateOperand(5) == sampled && - type->getImmediateOperand(6) == (unsigned int)format) - return type->getResultId(); - } - - // not found, make it - type = new Instruction(getUniqueId(), NoType, OpTypeImage); - type->addIdOperand(sampledType); - type->addImmediateOperand( dim); - type->addImmediateOperand( depth ? 1 : 0); - type->addImmediateOperand(arrayed ? 1 : 0); - type->addImmediateOperand( ms ? 1 : 0); - type->addImmediateOperand(sampled); - type->addImmediateOperand((unsigned int)format); - - groupedTypes[OpTypeImage].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - - // deal with capabilities - switch (dim) { - case DimBuffer: - if (sampled == 1) - addCapability(CapabilitySampledBuffer); - else - addCapability(CapabilityImageBuffer); - break; - case Dim1D: - if (sampled == 1) - addCapability(CapabilitySampled1D); - else - addCapability(CapabilityImage1D); - break; - case DimCube: - if (arrayed) { - if (sampled == 1) - addCapability(CapabilitySampledCubeArray); - else - addCapability(CapabilityImageCubeArray); - } - break; - case DimRect: - if (sampled == 1) - addCapability(CapabilitySampledRect); - else - addCapability(CapabilityImageRect); - break; - case DimSubpassData: - addCapability(CapabilityInputAttachment); - break; - default: - break; - } - - if (ms) { - if (sampled == 2) { - addCapability(CapabilityStorageImageMultisample); - if (arrayed) - addCapability(CapabilityImageMSArray); - } - } - - return type->getResultId(); -} - -Id Builder::makeSampledImageType(Id imageType) -{ - // try to find it - Instruction* type; - for (int t = 0; t < (int)groupedTypes[OpTypeSampledImage].size(); ++t) { - type = groupedTypes[OpTypeSampledImage][t]; - if (type->getIdOperand(0) == imageType) - return type->getResultId(); - } - - // not found, make it - type = new Instruction(getUniqueId(), NoType, OpTypeSampledImage); - type->addIdOperand(imageType); - - groupedTypes[OpTypeSampledImage].push_back(type); - constantsTypesGlobals.push_back(std::unique_ptr(type)); - module.mapInstruction(type); - - return type->getResultId(); -} - -Id Builder::getDerefTypeId(Id resultId) const -{ - Id typeId = getTypeId(resultId); - assert(isPointerType(typeId)); - - return module.getInstruction(typeId)->getImmediateOperand(1); -} - -Op Builder::getMostBasicTypeClass(Id typeId) const -{ - Instruction* instr = module.getInstruction(typeId); - - Op typeClass = instr->getOpCode(); - switch (typeClass) - { - case OpTypeVoid: - case OpTypeBool: - case OpTypeInt: - case OpTypeFloat: - case OpTypeStruct: - return typeClass; - case OpTypeVector: - case OpTypeMatrix: - case OpTypeArray: - case OpTypeRuntimeArray: - return getMostBasicTypeClass(instr->getIdOperand(0)); - case OpTypePointer: - return getMostBasicTypeClass(instr->getIdOperand(1)); - default: - assert(0); - return OpTypeFloat; - } -} - -int Builder::getNumTypeConstituents(Id typeId) const -{ - Instruction* instr = module.getInstruction(typeId); - - switch (instr->getOpCode()) - { - case OpTypeBool: - case OpTypeInt: - case OpTypeFloat: - return 1; - case OpTypeVector: - case OpTypeMatrix: - return instr->getImmediateOperand(1); - case OpTypeArray: - { - Id lengthId = instr->getImmediateOperand(1); - return module.getInstruction(lengthId)->getImmediateOperand(0); - } - case OpTypeStruct: - return instr->getNumOperands(); - default: - assert(0); - return 1; - } -} - -// Return the lowest-level type of scalar that an homogeneous composite is made out of. -// Typically, this is just to find out if something is made out of ints or floats. -// However, it includes returning a structure, if say, it is an array of structure. -Id Builder::getScalarTypeId(Id typeId) const -{ - Instruction* instr = module.getInstruction(typeId); - - Op typeClass = instr->getOpCode(); - switch (typeClass) - { - case OpTypeVoid: - case OpTypeBool: - case OpTypeInt: - case OpTypeFloat: - case OpTypeStruct: - return instr->getResultId(); - case OpTypeVector: - case OpTypeMatrix: - case OpTypeArray: - case OpTypeRuntimeArray: - case OpTypePointer: - return getScalarTypeId(getContainedTypeId(typeId)); - default: - assert(0); - return NoResult; - } -} - -// Return the type of 'member' of a composite. -Id Builder::getContainedTypeId(Id typeId, int member) const -{ - Instruction* instr = module.getInstruction(typeId); - - Op typeClass = instr->getOpCode(); - switch (typeClass) - { - case OpTypeVector: - case OpTypeMatrix: - case OpTypeArray: - case OpTypeRuntimeArray: - return instr->getIdOperand(0); - case OpTypePointer: - return instr->getIdOperand(1); - case OpTypeStruct: - return instr->getIdOperand(member); - default: - assert(0); - return NoResult; - } -} - -// Return the immediately contained type of a given composite type. -Id Builder::getContainedTypeId(Id typeId) const -{ - return getContainedTypeId(typeId, 0); -} - -// See if a scalar constant of this type has already been created, so it -// can be reused rather than duplicated. (Required by the specification). -Id Builder::findScalarConstant(Op typeClass, Op opcode, Id typeId, unsigned value) const -{ - Instruction* constant; - for (int i = 0; i < (int)groupedConstants[typeClass].size(); ++i) { - constant = groupedConstants[typeClass][i]; - if (constant->getOpCode() == opcode && - constant->getTypeId() == typeId && - constant->getImmediateOperand(0) == value) - return constant->getResultId(); - } - - return 0; -} - -// Version of findScalarConstant (see above) for scalars that take two operands (e.g. a 'double' or 'int64'). -Id Builder::findScalarConstant(Op typeClass, Op opcode, Id typeId, unsigned v1, unsigned v2) const -{ - Instruction* constant; - for (int i = 0; i < (int)groupedConstants[typeClass].size(); ++i) { - constant = groupedConstants[typeClass][i]; - if (constant->getOpCode() == opcode && - constant->getTypeId() == typeId && - constant->getImmediateOperand(0) == v1 && - constant->getImmediateOperand(1) == v2) - return constant->getResultId(); - } - - return 0; -} - -// Return true if consuming 'opcode' means consuming a constant. -// "constant" here means after final transform to executable code, -// the value consumed will be a constant, so includes specialization. -bool Builder::isConstantOpCode(Op opcode) const -{ - switch (opcode) { - case OpUndef: - case OpConstantTrue: - case OpConstantFalse: - case OpConstant: - case OpConstantComposite: - case OpConstantSampler: - case OpConstantNull: - case OpSpecConstantTrue: - case OpSpecConstantFalse: - case OpSpecConstant: - case OpSpecConstantComposite: - case OpSpecConstantOp: - return true; - default: - return false; - } -} - -// Return true if consuming 'opcode' means consuming a specialization constant. -bool Builder::isSpecConstantOpCode(Op opcode) const -{ - switch (opcode) { - case OpSpecConstantTrue: - case OpSpecConstantFalse: - case OpSpecConstant: - case OpSpecConstantComposite: - case OpSpecConstantOp: - return true; - default: - return false; - } -} - -Id Builder::makeBoolConstant(bool b, bool specConstant) -{ - Id typeId = makeBoolType(); - Instruction* constant; - Op opcode = specConstant ? (b ? OpSpecConstantTrue : OpSpecConstantFalse) : (b ? OpConstantTrue : OpConstantFalse); - - // See if we already made it. Applies only to regular constants, because specialization constants - // must remain distinct for the purpose of applying a SpecId decoration. - if (! specConstant) { - Id existing = 0; - for (int i = 0; i < (int)groupedConstants[OpTypeBool].size(); ++i) { - constant = groupedConstants[OpTypeBool][i]; - if (constant->getTypeId() == typeId && constant->getOpCode() == opcode) - existing = constant->getResultId(); - } - - if (existing) - return existing; - } - - // Make it - Instruction* c = new Instruction(getUniqueId(), typeId, opcode); - constantsTypesGlobals.push_back(std::unique_ptr(c)); - groupedConstants[OpTypeBool].push_back(c); - module.mapInstruction(c); - - return c->getResultId(); -} - -Id Builder::makeIntConstant(Id typeId, unsigned value, bool specConstant) -{ - Op opcode = specConstant ? OpSpecConstant : OpConstant; - - // See if we already made it. Applies only to regular constants, because specialization constants - // must remain distinct for the purpose of applying a SpecId decoration. - if (! specConstant) { - Id existing = findScalarConstant(OpTypeInt, opcode, typeId, value); - if (existing) - return existing; - } - - Instruction* c = new Instruction(getUniqueId(), typeId, opcode); - c->addImmediateOperand(value); - constantsTypesGlobals.push_back(std::unique_ptr(c)); - groupedConstants[OpTypeInt].push_back(c); - module.mapInstruction(c); - - return c->getResultId(); -} - -Id Builder::makeInt64Constant(Id typeId, unsigned long long value, bool specConstant) -{ - Op opcode = specConstant ? OpSpecConstant : OpConstant; - - unsigned op1 = value & 0xFFFFFFFF; - unsigned op2 = value >> 32; - - // See if we already made it. Applies only to regular constants, because specialization constants - // must remain distinct for the purpose of applying a SpecId decoration. - if (! specConstant) { - Id existing = findScalarConstant(OpTypeInt, opcode, typeId, op1, op2); - if (existing) - return existing; - } - - Instruction* c = new Instruction(getUniqueId(), typeId, opcode); - c->addImmediateOperand(op1); - c->addImmediateOperand(op2); - constantsTypesGlobals.push_back(std::unique_ptr(c)); - groupedConstants[OpTypeInt].push_back(c); - module.mapInstruction(c); - - return c->getResultId(); -} - -Id Builder::makeFloatConstant(float f, bool specConstant) -{ - Op opcode = specConstant ? OpSpecConstant : OpConstant; - Id typeId = makeFloatType(32); - union { float fl; unsigned int ui; } u; - u.fl = f; - unsigned value = u.ui; - - // See if we already made it. Applies only to regular constants, because specialization constants - // must remain distinct for the purpose of applying a SpecId decoration. - if (! specConstant) { - Id existing = findScalarConstant(OpTypeFloat, opcode, typeId, value); - if (existing) - return existing; - } - - Instruction* c = new Instruction(getUniqueId(), typeId, opcode); - c->addImmediateOperand(value); - constantsTypesGlobals.push_back(std::unique_ptr(c)); - groupedConstants[OpTypeFloat].push_back(c); - module.mapInstruction(c); - - return c->getResultId(); -} - -Id Builder::makeDoubleConstant(double d, bool specConstant) -{ - Op opcode = specConstant ? OpSpecConstant : OpConstant; - Id typeId = makeFloatType(64); - union { double db; unsigned long long ull; } u; - u.db = d; - unsigned long long value = u.ull; - unsigned op1 = value & 0xFFFFFFFF; - unsigned op2 = value >> 32; - - // See if we already made it. Applies only to regular constants, because specialization constants - // must remain distinct for the purpose of applying a SpecId decoration. - if (! specConstant) { - Id existing = findScalarConstant(OpTypeFloat, opcode, typeId, op1, op2); - if (existing) - return existing; - } - - Instruction* c = new Instruction(getUniqueId(), typeId, opcode); - c->addImmediateOperand(op1); - c->addImmediateOperand(op2); - constantsTypesGlobals.push_back(std::unique_ptr(c)); - groupedConstants[OpTypeFloat].push_back(c); - module.mapInstruction(c); - - return c->getResultId(); -} - -#ifdef AMD_EXTENSIONS -Id Builder::makeFloat16Constant(float f16, bool specConstant) -{ - Op opcode = specConstant ? OpSpecConstant : OpConstant; - Id typeId = makeFloatType(16); - - spvutils::HexFloat> fVal(f16); - spvutils::HexFloat> f16Val(0); - fVal.castTo(f16Val, spvutils::kRoundToZero); - - unsigned value = f16Val.value().getAsFloat().get_value(); - - // See if we already made it. Applies only to regular constants, because specialization constants - // must remain distinct for the purpose of applying a SpecId decoration. - if (!specConstant) { - Id existing = findScalarConstant(OpTypeFloat, opcode, typeId, value); - if (existing) - return existing; - } - - Instruction* c = new Instruction(getUniqueId(), typeId, opcode); - c->addImmediateOperand(value); - constantsTypesGlobals.push_back(std::unique_ptr(c)); - groupedConstants[OpTypeFloat].push_back(c); - module.mapInstruction(c); - - return c->getResultId(); -} -#endif - -Id Builder::findCompositeConstant(Op typeClass, const std::vector& comps) const -{ - Instruction* constant = 0; - bool found = false; - for (int i = 0; i < (int)groupedConstants[typeClass].size(); ++i) { - constant = groupedConstants[typeClass][i]; - - // same shape? - if (constant->getNumOperands() != (int)comps.size()) - continue; - - // same contents? - bool mismatch = false; - for (int op = 0; op < constant->getNumOperands(); ++op) { - if (constant->getIdOperand(op) != comps[op]) { - mismatch = true; - break; - } - } - if (! mismatch) { - found = true; - break; - } - } - - return found ? constant->getResultId() : NoResult; -} - -// Comments in header -Id Builder::makeCompositeConstant(Id typeId, const std::vector& members, bool specConstant) -{ - Op opcode = specConstant ? OpSpecConstantComposite : OpConstantComposite; - assert(typeId); - Op typeClass = getTypeClass(typeId); - - switch (typeClass) { - case OpTypeVector: - case OpTypeArray: - case OpTypeStruct: - case OpTypeMatrix: - break; - default: - assert(0); - return makeFloatConstant(0.0); - } - - if (! specConstant) { - Id existing = findCompositeConstant(typeClass, members); - if (existing) - return existing; - } - - Instruction* c = new Instruction(getUniqueId(), typeId, opcode); - for (int op = 0; op < (int)members.size(); ++op) - c->addIdOperand(members[op]); - constantsTypesGlobals.push_back(std::unique_ptr(c)); - groupedConstants[typeClass].push_back(c); - module.mapInstruction(c); - - return c->getResultId(); -} - -Instruction* Builder::addEntryPoint(ExecutionModel model, Function* function, const char* name) -{ - Instruction* entryPoint = new Instruction(OpEntryPoint); - entryPoint->addImmediateOperand(model); - entryPoint->addIdOperand(function->getId()); - entryPoint->addStringOperand(name); - - entryPoints.push_back(std::unique_ptr(entryPoint)); - - return entryPoint; -} - -// Currently relying on the fact that all 'value' of interest are small non-negative values. -void Builder::addExecutionMode(Function* entryPoint, ExecutionMode mode, int value1, int value2, int value3) -{ - Instruction* instr = new Instruction(OpExecutionMode); - instr->addIdOperand(entryPoint->getId()); - instr->addImmediateOperand(mode); - if (value1 >= 0) - instr->addImmediateOperand(value1); - if (value2 >= 0) - instr->addImmediateOperand(value2); - if (value3 >= 0) - instr->addImmediateOperand(value3); - - executionModes.push_back(std::unique_ptr(instr)); -} - -void Builder::addName(Id id, const char* string) -{ - Instruction* name = new Instruction(OpName); - name->addIdOperand(id); - name->addStringOperand(string); - - names.push_back(std::unique_ptr(name)); -} - -void Builder::addMemberName(Id id, int memberNumber, const char* string) -{ - Instruction* name = new Instruction(OpMemberName); - name->addIdOperand(id); - name->addImmediateOperand(memberNumber); - name->addStringOperand(string); - - names.push_back(std::unique_ptr(name)); -} - -void Builder::addDecoration(Id id, Decoration decoration, int num) -{ - if (decoration == spv::DecorationMax) - return; - Instruction* dec = new Instruction(OpDecorate); - dec->addIdOperand(id); - dec->addImmediateOperand(decoration); - if (num >= 0) - dec->addImmediateOperand(num); - - decorations.push_back(std::unique_ptr(dec)); -} - -void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, int num) -{ - Instruction* dec = new Instruction(OpMemberDecorate); - dec->addIdOperand(id); - dec->addImmediateOperand(member); - dec->addImmediateOperand(decoration); - if (num >= 0) - dec->addImmediateOperand(num); - - decorations.push_back(std::unique_ptr(dec)); -} - -// Comments in header -Function* Builder::makeEntryPoint(const char* entryPoint) -{ - assert(! entryPointFunction); - - Block* entry; - std::vector params; - std::vector> decorations; - - entryPointFunction = makeFunctionEntry(NoPrecision, makeVoidType(), entryPoint, params, decorations, &entry); - - return entryPointFunction; -} - -// Comments in header -Function* Builder::makeFunctionEntry(Decoration precision, Id returnType, const char* name, - const std::vector& paramTypes, const std::vector>& decorations, Block **entry) -{ - // Make the function and initial instructions in it - Id typeId = makeFunctionType(returnType, paramTypes); - Id firstParamId = paramTypes.size() == 0 ? 0 : getUniqueIds((int)paramTypes.size()); - Function* function = new Function(getUniqueId(), returnType, typeId, firstParamId, module); - - // Set up the precisions - setPrecision(function->getId(), precision); - for (unsigned p = 0; p < (unsigned)decorations.size(); ++p) { - for (int d = 0; d < (int)decorations[p].size(); ++d) - addDecoration(firstParamId + p, decorations[p][d]); - } - - // CFG - if (entry) { - *entry = new Block(getUniqueId(), *function); - function->addBlock(*entry); - setBuildPoint(*entry); - } - - if (name) - addName(function->getId(), name); - - functions.push_back(std::unique_ptr(function)); - - return function; -} - -// Comments in header -void Builder::makeReturn(bool implicit, Id retVal) -{ - if (retVal) { - Instruction* inst = new Instruction(NoResult, NoType, OpReturnValue); - inst->addIdOperand(retVal); - buildPoint->addInstruction(std::unique_ptr(inst)); - } else - buildPoint->addInstruction(std::unique_ptr(new Instruction(NoResult, NoType, OpReturn))); - - if (! implicit) - createAndSetNoPredecessorBlock("post-return"); -} - -// Comments in header -void Builder::leaveFunction() -{ - Block* block = buildPoint; - Function& function = buildPoint->getParent(); - assert(block); - - // If our function did not contain a return, add a return void now. - if (! block->isTerminated()) { - if (function.getReturnType() == makeVoidType()) - makeReturn(true); - else { - makeReturn(true, createUndefined(function.getReturnType())); - } - } -} - -// Comments in header -void Builder::makeDiscard() -{ - buildPoint->addInstruction(std::unique_ptr(new Instruction(OpKill))); - createAndSetNoPredecessorBlock("post-discard"); -} - -// Comments in header -Id Builder::createVariable(StorageClass storageClass, Id type, const char* name) -{ - Id pointerType = makePointer(storageClass, type); - Instruction* inst = new Instruction(getUniqueId(), pointerType, OpVariable); - inst->addImmediateOperand(storageClass); - - switch (storageClass) { - case StorageClassFunction: - // Validation rules require the declaration in the entry block - buildPoint->getParent().addLocalVariable(std::unique_ptr(inst)); - break; - - default: - constantsTypesGlobals.push_back(std::unique_ptr(inst)); - module.mapInstruction(inst); - break; - } - - if (name) - addName(inst->getResultId(), name); - - return inst->getResultId(); -} - -// Comments in header -Id Builder::createUndefined(Id type) -{ - Instruction* inst = new Instruction(getUniqueId(), type, OpUndef); - buildPoint->addInstruction(std::unique_ptr(inst)); - return inst->getResultId(); -} - -// Comments in header -void Builder::createStore(Id rValue, Id lValue) -{ - Instruction* store = new Instruction(OpStore); - store->addIdOperand(lValue); - store->addIdOperand(rValue); - buildPoint->addInstruction(std::unique_ptr(store)); -} - -// Comments in header -Id Builder::createLoad(Id lValue) -{ - Instruction* load = new Instruction(getUniqueId(), getDerefTypeId(lValue), OpLoad); - load->addIdOperand(lValue); - buildPoint->addInstruction(std::unique_ptr(load)); - - return load->getResultId(); -} - -// Comments in header -Id Builder::createAccessChain(StorageClass storageClass, Id base, const std::vector& offsets) -{ - // Figure out the final resulting type. - spv::Id typeId = getTypeId(base); - assert(isPointerType(typeId) && offsets.size() > 0); - typeId = getContainedTypeId(typeId); - for (int i = 0; i < (int)offsets.size(); ++i) { - if (isStructType(typeId)) { - assert(isConstantScalar(offsets[i])); - typeId = getContainedTypeId(typeId, getConstantScalar(offsets[i])); - } else - typeId = getContainedTypeId(typeId, offsets[i]); - } - typeId = makePointer(storageClass, typeId); - - // Make the instruction - Instruction* chain = new Instruction(getUniqueId(), typeId, OpAccessChain); - chain->addIdOperand(base); - for (int i = 0; i < (int)offsets.size(); ++i) - chain->addIdOperand(offsets[i]); - buildPoint->addInstruction(std::unique_ptr(chain)); - - return chain->getResultId(); -} - -Id Builder::createArrayLength(Id base, unsigned int member) -{ - spv::Id intType = makeIntType(32); - Instruction* length = new Instruction(getUniqueId(), intType, OpArrayLength); - length->addIdOperand(base); - length->addImmediateOperand(member); - buildPoint->addInstruction(std::unique_ptr(length)); - - return length->getResultId(); -} - -Id Builder::createCompositeExtract(Id composite, Id typeId, unsigned index) -{ - // Generate code for spec constants if in spec constant operation - // generation mode. - if (generatingOpCodeForSpecConst) { - return createSpecConstantOp(OpCompositeExtract, typeId, std::vector(1, composite), std::vector(1, index)); - } - Instruction* extract = new Instruction(getUniqueId(), typeId, OpCompositeExtract); - extract->addIdOperand(composite); - extract->addImmediateOperand(index); - buildPoint->addInstruction(std::unique_ptr(extract)); - - return extract->getResultId(); -} - -Id Builder::createCompositeExtract(Id composite, Id typeId, const std::vector& indexes) -{ - // Generate code for spec constants if in spec constant operation - // generation mode. - if (generatingOpCodeForSpecConst) { - return createSpecConstantOp(OpCompositeExtract, typeId, std::vector(1, composite), indexes); - } - Instruction* extract = new Instruction(getUniqueId(), typeId, OpCompositeExtract); - extract->addIdOperand(composite); - for (int i = 0; i < (int)indexes.size(); ++i) - extract->addImmediateOperand(indexes[i]); - buildPoint->addInstruction(std::unique_ptr(extract)); - - return extract->getResultId(); -} - -Id Builder::createCompositeInsert(Id object, Id composite, Id typeId, unsigned index) -{ - Instruction* insert = new Instruction(getUniqueId(), typeId, OpCompositeInsert); - insert->addIdOperand(object); - insert->addIdOperand(composite); - insert->addImmediateOperand(index); - buildPoint->addInstruction(std::unique_ptr(insert)); - - return insert->getResultId(); -} - -Id Builder::createCompositeInsert(Id object, Id composite, Id typeId, const std::vector& indexes) -{ - Instruction* insert = new Instruction(getUniqueId(), typeId, OpCompositeInsert); - insert->addIdOperand(object); - insert->addIdOperand(composite); - for (int i = 0; i < (int)indexes.size(); ++i) - insert->addImmediateOperand(indexes[i]); - buildPoint->addInstruction(std::unique_ptr(insert)); - - return insert->getResultId(); -} - -Id Builder::createVectorExtractDynamic(Id vector, Id typeId, Id componentIndex) -{ - Instruction* extract = new Instruction(getUniqueId(), typeId, OpVectorExtractDynamic); - extract->addIdOperand(vector); - extract->addIdOperand(componentIndex); - buildPoint->addInstruction(std::unique_ptr(extract)); - - return extract->getResultId(); -} - -Id Builder::createVectorInsertDynamic(Id vector, Id typeId, Id component, Id componentIndex) -{ - Instruction* insert = new Instruction(getUniqueId(), typeId, OpVectorInsertDynamic); - insert->addIdOperand(vector); - insert->addIdOperand(component); - insert->addIdOperand(componentIndex); - buildPoint->addInstruction(std::unique_ptr(insert)); - - return insert->getResultId(); -} - -// An opcode that has no operands, no result id, and no type -void Builder::createNoResultOp(Op opCode) -{ - Instruction* op = new Instruction(opCode); - buildPoint->addInstruction(std::unique_ptr(op)); -} - -// An opcode that has one operand, no result id, and no type -void Builder::createNoResultOp(Op opCode, Id operand) -{ - Instruction* op = new Instruction(opCode); - op->addIdOperand(operand); - buildPoint->addInstruction(std::unique_ptr(op)); -} - -// An opcode that has one operand, no result id, and no type -void Builder::createNoResultOp(Op opCode, const std::vector& operands) -{ - Instruction* op = new Instruction(opCode); - for (auto it = operands.cbegin(); it != operands.cend(); ++it) - op->addIdOperand(*it); - buildPoint->addInstruction(std::unique_ptr(op)); -} - -void Builder::createControlBarrier(Scope execution, Scope memory, MemorySemanticsMask semantics) -{ - Instruction* op = new Instruction(OpControlBarrier); - op->addImmediateOperand(makeUintConstant(execution)); - op->addImmediateOperand(makeUintConstant(memory)); - op->addImmediateOperand(makeUintConstant(semantics)); - buildPoint->addInstruction(std::unique_ptr(op)); -} - -void Builder::createMemoryBarrier(unsigned executionScope, unsigned memorySemantics) -{ - Instruction* op = new Instruction(OpMemoryBarrier); - op->addImmediateOperand(makeUintConstant(executionScope)); - op->addImmediateOperand(makeUintConstant(memorySemantics)); - buildPoint->addInstruction(std::unique_ptr(op)); -} - -// An opcode that has one operands, a result id, and a type -Id Builder::createUnaryOp(Op opCode, Id typeId, Id operand) -{ - // Generate code for spec constants if in spec constant operation - // generation mode. - if (generatingOpCodeForSpecConst) { - return createSpecConstantOp(opCode, typeId, std::vector(1, operand), std::vector()); - } - Instruction* op = new Instruction(getUniqueId(), typeId, opCode); - op->addIdOperand(operand); - buildPoint->addInstruction(std::unique_ptr(op)); - - return op->getResultId(); -} - -Id Builder::createBinOp(Op opCode, Id typeId, Id left, Id right) -{ - // Generate code for spec constants if in spec constant operation - // generation mode. - if (generatingOpCodeForSpecConst) { - std::vector operands(2); - operands[0] = left; operands[1] = right; - return createSpecConstantOp(opCode, typeId, operands, std::vector()); - } - Instruction* op = new Instruction(getUniqueId(), typeId, opCode); - op->addIdOperand(left); - op->addIdOperand(right); - buildPoint->addInstruction(std::unique_ptr(op)); - - return op->getResultId(); -} - -Id Builder::createTriOp(Op opCode, Id typeId, Id op1, Id op2, Id op3) -{ - // Generate code for spec constants if in spec constant operation - // generation mode. - if (generatingOpCodeForSpecConst) { - std::vector operands(3); - operands[0] = op1; - operands[1] = op2; - operands[2] = op3; - return createSpecConstantOp( - opCode, typeId, operands, std::vector()); - } - Instruction* op = new Instruction(getUniqueId(), typeId, opCode); - op->addIdOperand(op1); - op->addIdOperand(op2); - op->addIdOperand(op3); - buildPoint->addInstruction(std::unique_ptr(op)); - - return op->getResultId(); -} - -Id Builder::createOp(Op opCode, Id typeId, const std::vector& operands) -{ - Instruction* op = new Instruction(getUniqueId(), typeId, opCode); - for (auto it = operands.cbegin(); it != operands.cend(); ++it) - op->addIdOperand(*it); - buildPoint->addInstruction(std::unique_ptr(op)); - - return op->getResultId(); -} - -Id Builder::createSpecConstantOp(Op opCode, Id typeId, const std::vector& operands, const std::vector& literals) -{ - Instruction* op = new Instruction(getUniqueId(), typeId, OpSpecConstantOp); - op->addImmediateOperand((unsigned) opCode); - for (auto it = operands.cbegin(); it != operands.cend(); ++it) - op->addIdOperand(*it); - for (auto it = literals.cbegin(); it != literals.cend(); ++it) - op->addImmediateOperand(*it); - module.mapInstruction(op); - constantsTypesGlobals.push_back(std::unique_ptr(op)); - - return op->getResultId(); -} - -Id Builder::createFunctionCall(spv::Function* function, const std::vector& args) -{ - Instruction* op = new Instruction(getUniqueId(), function->getReturnType(), OpFunctionCall); - op->addIdOperand(function->getId()); - for (int a = 0; a < (int)args.size(); ++a) - op->addIdOperand(args[a]); - buildPoint->addInstruction(std::unique_ptr(op)); - - return op->getResultId(); -} - -// Comments in header -Id Builder::createRvalueSwizzle(Decoration precision, Id typeId, Id source, const std::vector& channels) -{ - if (channels.size() == 1) - return setPrecision(createCompositeExtract(source, typeId, channels.front()), precision); - - if (generatingOpCodeForSpecConst) { - std::vector operands(2); - operands[0] = operands[1] = source; - return setPrecision(createSpecConstantOp(OpVectorShuffle, typeId, operands, channels), precision); - } - Instruction* swizzle = new Instruction(getUniqueId(), typeId, OpVectorShuffle); - assert(isVector(source)); - swizzle->addIdOperand(source); - swizzle->addIdOperand(source); - for (int i = 0; i < (int)channels.size(); ++i) - swizzle->addImmediateOperand(channels[i]); - buildPoint->addInstruction(std::unique_ptr(swizzle)); - - return setPrecision(swizzle->getResultId(), precision); -} - -// Comments in header -Id Builder::createLvalueSwizzle(Id typeId, Id target, Id source, const std::vector& channels) -{ - if (channels.size() == 1 && getNumComponents(source) == 1) - return createCompositeInsert(source, target, typeId, channels.front()); - - Instruction* swizzle = new Instruction(getUniqueId(), typeId, OpVectorShuffle); - assert(isVector(target)); - swizzle->addIdOperand(target); - if (accessChain.component != NoResult) - // For dynamic component selection, source does not involve in l-value swizzle - swizzle->addIdOperand(target); - else { - assert(getNumComponents(source) == (int)channels.size()); - assert(isVector(source)); - swizzle->addIdOperand(source); - } - - // Set up an identity shuffle from the base value to the result value - unsigned int components[4]; - int numTargetComponents = getNumComponents(target); - for (int i = 0; i < numTargetComponents; ++i) - components[i] = i; - - // Punch in the l-value swizzle - for (int i = 0; i < (int)channels.size(); ++i) { - if (accessChain.component != NoResult) - components[i] = channels[i]; // Only shuffle the base value - else - components[channels[i]] = numTargetComponents + i; - } - - // finish the instruction with these components selectors - for (int i = 0; i < numTargetComponents; ++i) - swizzle->addImmediateOperand(components[i]); - buildPoint->addInstruction(std::unique_ptr(swizzle)); - - return swizzle->getResultId(); -} - -// Comments in header -void Builder::promoteScalar(Decoration precision, Id& left, Id& right) -{ - int direction = getNumComponents(right) - getNumComponents(left); - - if (direction > 0) - left = smearScalar(precision, left, makeVectorType(getTypeId(left), getNumComponents(right))); - else if (direction < 0) - right = smearScalar(precision, right, makeVectorType(getTypeId(right), getNumComponents(left))); - - return; -} - -// Comments in header -Id Builder::smearScalar(Decoration precision, Id scalar, Id vectorType) -{ - assert(getNumComponents(scalar) == 1); - assert(getTypeId(scalar) == getScalarTypeId(vectorType)); - - int numComponents = getNumTypeComponents(vectorType); - if (numComponents == 1) - return scalar; - - Instruction* smear = nullptr; - if (generatingOpCodeForSpecConst) { - auto members = std::vector(numComponents, scalar); - // Sometime even in spec-constant-op mode, the temporary vector created by - // promoting a scalar might not be a spec constant. This should depend on - // the scalar. - // e.g.: - // const vec2 spec_const_result = a_spec_const_vec2 + a_front_end_const_scalar; - // In such cases, the temporary vector created from a_front_end_const_scalar - // is not a spec constant vector, even though the binary operation node is marked - // as 'specConstant' and we are in spec-constant-op mode. - auto result_id = makeCompositeConstant(vectorType, members, isSpecConstant(scalar)); - smear = module.getInstruction(result_id); - } else { - smear = new Instruction(getUniqueId(), vectorType, OpCompositeConstruct); - for (int c = 0; c < numComponents; ++c) - smear->addIdOperand(scalar); - buildPoint->addInstruction(std::unique_ptr(smear)); - } - - return setPrecision(smear->getResultId(), precision); -} - -// Comments in header -Id Builder::createBuiltinCall(Id resultType, Id builtins, int entryPoint, const std::vector& args) -{ - Instruction* inst = new Instruction(getUniqueId(), resultType, OpExtInst); - inst->addIdOperand(builtins); - inst->addImmediateOperand(entryPoint); - for (int arg = 0; arg < (int)args.size(); ++arg) - inst->addIdOperand(args[arg]); - - buildPoint->addInstruction(std::unique_ptr(inst)); - - return inst->getResultId(); -} - -// Accept all parameters needed to create a texture instruction. -// Create the correct instruction based on the inputs, and make the call. -Id Builder::createTextureCall(Decoration precision, Id resultType, bool sparse, bool fetch, bool proj, bool gather, bool noImplicitLod, const TextureParameters& parameters) -{ - static const int maxTextureArgs = 10; - Id texArgs[maxTextureArgs] = {}; - - // - // Set up the fixed arguments - // - int numArgs = 0; - bool explicitLod = false; - texArgs[numArgs++] = parameters.sampler; - texArgs[numArgs++] = parameters.coords; - if (parameters.Dref != NoResult) - texArgs[numArgs++] = parameters.Dref; - if (parameters.component != NoResult) - texArgs[numArgs++] = parameters.component; - - // - // Set up the optional arguments - // - int optArgNum = numArgs; // track which operand, if it exists, is the mask of optional arguments - ++numArgs; // speculatively make room for the mask operand - ImageOperandsMask mask = ImageOperandsMaskNone; // the mask operand - if (parameters.bias) { - mask = (ImageOperandsMask)(mask | ImageOperandsBiasMask); - texArgs[numArgs++] = parameters.bias; - } - if (parameters.lod) { - mask = (ImageOperandsMask)(mask | ImageOperandsLodMask); - texArgs[numArgs++] = parameters.lod; - explicitLod = true; - } else if (parameters.gradX) { - mask = (ImageOperandsMask)(mask | ImageOperandsGradMask); - texArgs[numArgs++] = parameters.gradX; - texArgs[numArgs++] = parameters.gradY; - explicitLod = true; - } else if (noImplicitLod && ! fetch && ! gather) { - // have to explicitly use lod of 0 if not allowed to have them be implicit, and - // we would otherwise be about to issue an implicit instruction - mask = (ImageOperandsMask)(mask | ImageOperandsLodMask); - texArgs[numArgs++] = makeFloatConstant(0.0); - explicitLod = true; - } - if (parameters.offset) { - if (isConstant(parameters.offset)) - mask = (ImageOperandsMask)(mask | ImageOperandsConstOffsetMask); - else { - addCapability(CapabilityImageGatherExtended); - mask = (ImageOperandsMask)(mask | ImageOperandsOffsetMask); - } - texArgs[numArgs++] = parameters.offset; - } - if (parameters.offsets) { - mask = (ImageOperandsMask)(mask | ImageOperandsConstOffsetsMask); - texArgs[numArgs++] = parameters.offsets; - } - if (parameters.sample) { - mask = (ImageOperandsMask)(mask | ImageOperandsSampleMask); - texArgs[numArgs++] = parameters.sample; - } - if (parameters.lodClamp) { - // capability if this bit is used - addCapability(CapabilityMinLod); - - mask = (ImageOperandsMask)(mask | ImageOperandsMinLodMask); - texArgs[numArgs++] = parameters.lodClamp; - } - if (mask == ImageOperandsMaskNone) - --numArgs; // undo speculative reservation for the mask argument - else - texArgs[optArgNum] = mask; - - // - // Set up the instruction - // - Op opCode = OpNop; // All paths below need to set this - if (fetch) { - if (sparse) - opCode = OpImageSparseFetch; - else - opCode = OpImageFetch; - } else if (gather) { - if (parameters.Dref) - if (sparse) - opCode = OpImageSparseDrefGather; - else - opCode = OpImageDrefGather; - else - if (sparse) - opCode = OpImageSparseGather; - else - opCode = OpImageGather; - } else if (explicitLod) { - if (parameters.Dref) { - if (proj) - if (sparse) - opCode = OpImageSparseSampleProjDrefExplicitLod; - else - opCode = OpImageSampleProjDrefExplicitLod; - else - if (sparse) - opCode = OpImageSparseSampleDrefExplicitLod; - else - opCode = OpImageSampleDrefExplicitLod; - } else { - if (proj) - if (sparse) - opCode = OpImageSparseSampleProjExplicitLod; - else - opCode = OpImageSampleProjExplicitLod; - else - if (sparse) - opCode = OpImageSparseSampleExplicitLod; - else - opCode = OpImageSampleExplicitLod; - } - } else { - if (parameters.Dref) { - if (proj) - if (sparse) - opCode = OpImageSparseSampleProjDrefImplicitLod; - else - opCode = OpImageSampleProjDrefImplicitLod; - else - if (sparse) - opCode = OpImageSparseSampleDrefImplicitLod; - else - opCode = OpImageSampleDrefImplicitLod; - } else { - if (proj) - if (sparse) - opCode = OpImageSparseSampleProjImplicitLod; - else - opCode = OpImageSampleProjImplicitLod; - else - if (sparse) - opCode = OpImageSparseSampleImplicitLod; - else - opCode = OpImageSampleImplicitLod; - } - } - - // See if the result type is expecting a smeared result. - // This happens when a legacy shadow*() call is made, which - // gets a vec4 back instead of a float. - Id smearedType = resultType; - if (! isScalarType(resultType)) { - switch (opCode) { - case OpImageSampleDrefImplicitLod: - case OpImageSampleDrefExplicitLod: - case OpImageSampleProjDrefImplicitLod: - case OpImageSampleProjDrefExplicitLod: - resultType = getScalarTypeId(resultType); - break; - default: - break; - } - } - - Id typeId0 = 0; - Id typeId1 = 0; - - if (sparse) { - typeId0 = resultType; - typeId1 = getDerefTypeId(parameters.texelOut); - resultType = makeStructResultType(typeId0, typeId1); - } - - // Build the SPIR-V instruction - Instruction* textureInst = new Instruction(getUniqueId(), resultType, opCode); - for (int op = 0; op < optArgNum; ++op) - textureInst->addIdOperand(texArgs[op]); - if (optArgNum < numArgs) - textureInst->addImmediateOperand(texArgs[optArgNum]); - for (int op = optArgNum + 1; op < numArgs; ++op) - textureInst->addIdOperand(texArgs[op]); - setPrecision(textureInst->getResultId(), precision); - buildPoint->addInstruction(std::unique_ptr(textureInst)); - - Id resultId = textureInst->getResultId(); - - if (sparse) { - // set capability - addCapability(CapabilitySparseResidency); - - // Decode the return type that was a special structure - createStore(createCompositeExtract(resultId, typeId1, 1), parameters.texelOut); - resultId = createCompositeExtract(resultId, typeId0, 0); - setPrecision(resultId, precision); - } else { - // When a smear is needed, do it, as per what was computed - // above when resultType was changed to a scalar type. - if (resultType != smearedType) - resultId = smearScalar(precision, resultId, smearedType); - } - - return resultId; -} - -// Comments in header -Id Builder::createTextureQueryCall(Op opCode, const TextureParameters& parameters, bool isUnsignedResult) -{ - // All these need a capability - addCapability(CapabilityImageQuery); - - // Figure out the result type - Id resultType = 0; - switch (opCode) { - case OpImageQuerySize: - case OpImageQuerySizeLod: - { - int numComponents = 0; - switch (getTypeDimensionality(getImageType(parameters.sampler))) { - case Dim1D: - case DimBuffer: - numComponents = 1; - break; - case Dim2D: - case DimCube: - case DimRect: - case DimSubpassData: - numComponents = 2; - break; - case Dim3D: - numComponents = 3; - break; - - default: - assert(0); - break; - } - if (isArrayedImageType(getImageType(parameters.sampler))) - ++numComponents; - - Id intType = isUnsignedResult ? makeUintType(32) : makeIntType(32); - if (numComponents == 1) - resultType = intType; - else - resultType = makeVectorType(intType, numComponents); - - break; - } - case OpImageQueryLod: - resultType = makeVectorType(makeFloatType(32), 2); - break; - case OpImageQueryLevels: - case OpImageQuerySamples: - resultType = isUnsignedResult ? makeUintType(32) : makeIntType(32); - break; - default: - assert(0); - break; - } - - Instruction* query = new Instruction(getUniqueId(), resultType, opCode); - query->addIdOperand(parameters.sampler); - if (parameters.coords) - query->addIdOperand(parameters.coords); - if (parameters.lod) - query->addIdOperand(parameters.lod); - buildPoint->addInstruction(std::unique_ptr(query)); - - return query->getResultId(); -} - -// External comments in header. -// Operates recursively to visit the composite's hierarchy. -Id Builder::createCompositeCompare(Decoration precision, Id value1, Id value2, bool equal) -{ - Id boolType = makeBoolType(); - Id valueType = getTypeId(value1); - - Id resultId = NoResult; - - int numConstituents = getNumTypeConstituents(valueType); - - // Scalars and Vectors - - if (isScalarType(valueType) || isVectorType(valueType)) { - assert(valueType == getTypeId(value2)); - // These just need a single comparison, just have - // to figure out what it is. - Op op; - switch (getMostBasicTypeClass(valueType)) { - case OpTypeFloat: - op = equal ? OpFOrdEqual : OpFOrdNotEqual; - break; - case OpTypeInt: - default: - op = equal ? OpIEqual : OpINotEqual; - break; - case OpTypeBool: - op = equal ? OpLogicalEqual : OpLogicalNotEqual; - precision = NoPrecision; - break; - } - - if (isScalarType(valueType)) { - // scalar - resultId = createBinOp(op, boolType, value1, value2); - } else { - // vector - resultId = createBinOp(op, makeVectorType(boolType, numConstituents), value1, value2); - setPrecision(resultId, precision); - // reduce vector compares... - resultId = createUnaryOp(equal ? OpAll : OpAny, boolType, resultId); - } - - return setPrecision(resultId, precision); - } - - // Only structs, arrays, and matrices should be left. - // They share in common the reduction operation across their constituents. - assert(isAggregateType(valueType) || isMatrixType(valueType)); - - // Compare each pair of constituents - for (int constituent = 0; constituent < numConstituents; ++constituent) { - std::vector indexes(1, constituent); - Id constituentType1 = getContainedTypeId(getTypeId(value1), constituent); - Id constituentType2 = getContainedTypeId(getTypeId(value2), constituent); - Id constituent1 = createCompositeExtract(value1, constituentType1, indexes); - Id constituent2 = createCompositeExtract(value2, constituentType2, indexes); - - Id subResultId = createCompositeCompare(precision, constituent1, constituent2, equal); - - if (constituent == 0) - resultId = subResultId; - else - resultId = setPrecision(createBinOp(equal ? OpLogicalAnd : OpLogicalOr, boolType, resultId, subResultId), precision); - } - - return resultId; -} - -// OpCompositeConstruct -Id Builder::createCompositeConstruct(Id typeId, const std::vector& constituents) -{ - assert(isAggregateType(typeId) || (getNumTypeConstituents(typeId) > 1 && getNumTypeConstituents(typeId) == (int)constituents.size())); - - if (generatingOpCodeForSpecConst) { - // Sometime, even in spec-constant-op mode, the constant composite to be - // constructed may not be a specialization constant. - // e.g.: - // const mat2 m2 = mat2(a_spec_const, a_front_end_const, another_front_end_const, third_front_end_const); - // The first column vector should be a spec constant one, as a_spec_const is a spec constant. - // The second column vector should NOT be spec constant, as it does not contain any spec constants. - // To handle such cases, we check the constituents of the constant vector to determine whether this - // vector should be created as a spec constant. - return makeCompositeConstant(typeId, constituents, - std::any_of(constituents.begin(), constituents.end(), - [&](spv::Id id) { return isSpecConstant(id); })); - } - - Instruction* op = new Instruction(getUniqueId(), typeId, OpCompositeConstruct); - for (int c = 0; c < (int)constituents.size(); ++c) - op->addIdOperand(constituents[c]); - buildPoint->addInstruction(std::unique_ptr(op)); - - return op->getResultId(); -} - -// Vector or scalar constructor -Id Builder::createConstructor(Decoration precision, const std::vector& sources, Id resultTypeId) -{ - Id result = NoResult; - unsigned int numTargetComponents = getNumTypeComponents(resultTypeId); - unsigned int targetComponent = 0; - - // Special case: when calling a vector constructor with a single scalar - // argument, smear the scalar - if (sources.size() == 1 && isScalar(sources[0]) && numTargetComponents > 1) - return smearScalar(precision, sources[0], resultTypeId); - - // accumulate the arguments for OpCompositeConstruct - std::vector constituents; - Id scalarTypeId = getScalarTypeId(resultTypeId); - - // lambda to store the result of visiting an argument component - const auto latchResult = [&](Id comp) { - if (numTargetComponents > 1) - constituents.push_back(comp); - else - result = comp; - ++targetComponent; - }; - - // lambda to visit a vector argument's components - const auto accumulateVectorConstituents = [&](Id sourceArg) { - unsigned int sourceSize = getNumComponents(sourceArg); - unsigned int sourcesToUse = sourceSize; - if (sourcesToUse + targetComponent > numTargetComponents) - sourcesToUse = numTargetComponents - targetComponent; - - for (unsigned int s = 0; s < sourcesToUse; ++s) { - std::vector swiz; - swiz.push_back(s); - latchResult(createRvalueSwizzle(precision, scalarTypeId, sourceArg, swiz)); - } - }; - - // lambda to visit a matrix argument's components - const auto accumulateMatrixConstituents = [&](Id sourceArg) { - unsigned int sourceSize = getNumColumns(sourceArg) * getNumRows(sourceArg); - unsigned int sourcesToUse = sourceSize; - if (sourcesToUse + targetComponent > numTargetComponents) - sourcesToUse = numTargetComponents - targetComponent; - - int col = 0; - int row = 0; - for (unsigned int s = 0; s < sourcesToUse; ++s) { - if (row >= getNumRows(sourceArg)) { - row = 0; - col++; - } - std::vector indexes; - indexes.push_back(col); - indexes.push_back(row); - latchResult(createCompositeExtract(sourceArg, scalarTypeId, indexes)); - row++; - } - }; - - // Go through the source arguments, each one could have either - // a single or multiple components to contribute. - for (unsigned int i = 0; i < sources.size(); ++i) { - if (isScalar(sources[i])) - latchResult(sources[i]); - else if (isVector(sources[i])) - accumulateVectorConstituents(sources[i]); - else if (isMatrix(sources[i])) - accumulateMatrixConstituents(sources[i]); - else - assert(0); - - if (targetComponent >= numTargetComponents) - break; - } - - // If the result is a vector, make it from the gathered constituents. - if (constituents.size() > 0) - result = createCompositeConstruct(resultTypeId, constituents); - - return setPrecision(result, precision); -} - -// Comments in header -Id Builder::createMatrixConstructor(Decoration precision, const std::vector& sources, Id resultTypeId) -{ - Id componentTypeId = getScalarTypeId(resultTypeId); - int numCols = getTypeNumColumns(resultTypeId); - int numRows = getTypeNumRows(resultTypeId); - - Instruction* instr = module.getInstruction(componentTypeId); - Id bitCount = instr->getIdOperand(0); - - // Will use a two step process - // 1. make a compile-time 2D array of values - // 2. construct a matrix from that array - - // Step 1. - - // initialize the array to the identity matrix - Id ids[maxMatrixSize][maxMatrixSize]; - Id one = (bitCount == 64 ? makeDoubleConstant(1.0) : makeFloatConstant(1.0)); - Id zero = (bitCount == 64 ? makeDoubleConstant(0.0) : makeFloatConstant(0.0)); - for (int col = 0; col < 4; ++col) { - for (int row = 0; row < 4; ++row) { - if (col == row) - ids[col][row] = one; - else - ids[col][row] = zero; - } - } - - // modify components as dictated by the arguments - if (sources.size() == 1 && isScalar(sources[0])) { - // a single scalar; resets the diagonals - for (int col = 0; col < 4; ++col) - ids[col][col] = sources[0]; - } else if (isMatrix(sources[0])) { - // constructing from another matrix; copy over the parts that exist in both the argument and constructee - Id matrix = sources[0]; - int minCols = std::min(numCols, getNumColumns(matrix)); - int minRows = std::min(numRows, getNumRows(matrix)); - for (int col = 0; col < minCols; ++col) { - std::vector indexes; - indexes.push_back(col); - for (int row = 0; row < minRows; ++row) { - indexes.push_back(row); - ids[col][row] = createCompositeExtract(matrix, componentTypeId, indexes); - indexes.pop_back(); - setPrecision(ids[col][row], precision); - } - } - } else { - // fill in the matrix in column-major order with whatever argument components are available - int row = 0; - int col = 0; - - for (int arg = 0; arg < (int)sources.size(); ++arg) { - Id argComp = sources[arg]; - for (int comp = 0; comp < getNumComponents(sources[arg]); ++comp) { - if (getNumComponents(sources[arg]) > 1) { - argComp = createCompositeExtract(sources[arg], componentTypeId, comp); - setPrecision(argComp, precision); - } - ids[col][row++] = argComp; - if (row == numRows) { - row = 0; - col++; - } - } - } - } - - // Step 2: Construct a matrix from that array. - // First make the column vectors, then make the matrix. - - // make the column vectors - Id columnTypeId = getContainedTypeId(resultTypeId); - std::vector matrixColumns; - for (int col = 0; col < numCols; ++col) { - std::vector vectorComponents; - for (int row = 0; row < numRows; ++row) - vectorComponents.push_back(ids[col][row]); - Id column = createCompositeConstruct(columnTypeId, vectorComponents); - setPrecision(column, precision); - matrixColumns.push_back(column); - } - - // make the matrix - return setPrecision(createCompositeConstruct(resultTypeId, matrixColumns), precision); -} - -// Comments in header -Builder::If::If(Id cond, unsigned int ctrl, Builder& gb) : - builder(gb), - condition(cond), - control(ctrl), - elseBlock(0) -{ - function = &builder.getBuildPoint()->getParent(); - - // make the blocks, but only put the then-block into the function, - // the else-block and merge-block will be added later, in order, after - // earlier code is emitted - thenBlock = new Block(builder.getUniqueId(), *function); - mergeBlock = new Block(builder.getUniqueId(), *function); - - // Save the current block, so that we can add in the flow control split when - // makeEndIf is called. - headerBlock = builder.getBuildPoint(); - - function->addBlock(thenBlock); - builder.setBuildPoint(thenBlock); -} - -// Comments in header -void Builder::If::makeBeginElse() -{ - // Close out the "then" by having it jump to the mergeBlock - builder.createBranch(mergeBlock); - - // Make the first else block and add it to the function - elseBlock = new Block(builder.getUniqueId(), *function); - function->addBlock(elseBlock); - - // Start building the else block - builder.setBuildPoint(elseBlock); -} - -// Comments in header -void Builder::If::makeEndIf() -{ - // jump to the merge block - builder.createBranch(mergeBlock); - - // Go back to the headerBlock and make the flow control split - builder.setBuildPoint(headerBlock); - builder.createSelectionMerge(mergeBlock, control); - if (elseBlock) - builder.createConditionalBranch(condition, thenBlock, elseBlock); - else - builder.createConditionalBranch(condition, thenBlock, mergeBlock); - - // add the merge block to the function - function->addBlock(mergeBlock); - builder.setBuildPoint(mergeBlock); -} - -// Comments in header -void Builder::makeSwitch(Id selector, unsigned int control, int numSegments, const std::vector& caseValues, - const std::vector& valueIndexToSegment, int defaultSegment, - std::vector& segmentBlocks) -{ - Function& function = buildPoint->getParent(); - - // make all the blocks - for (int s = 0; s < numSegments; ++s) - segmentBlocks.push_back(new Block(getUniqueId(), function)); - - Block* mergeBlock = new Block(getUniqueId(), function); - - // make and insert the switch's selection-merge instruction - createSelectionMerge(mergeBlock, control); - - // make the switch instruction - Instruction* switchInst = new Instruction(NoResult, NoType, OpSwitch); - switchInst->addIdOperand(selector); - auto defaultOrMerge = (defaultSegment >= 0) ? segmentBlocks[defaultSegment] : mergeBlock; - switchInst->addIdOperand(defaultOrMerge->getId()); - defaultOrMerge->addPredecessor(buildPoint); - for (int i = 0; i < (int)caseValues.size(); ++i) { - switchInst->addImmediateOperand(caseValues[i]); - switchInst->addIdOperand(segmentBlocks[valueIndexToSegment[i]]->getId()); - segmentBlocks[valueIndexToSegment[i]]->addPredecessor(buildPoint); - } - buildPoint->addInstruction(std::unique_ptr(switchInst)); - - // push the merge block - switchMerges.push(mergeBlock); -} - -// Comments in header -void Builder::addSwitchBreak() -{ - // branch to the top of the merge block stack - createBranch(switchMerges.top()); - createAndSetNoPredecessorBlock("post-switch-break"); -} - -// Comments in header -void Builder::nextSwitchSegment(std::vector& segmentBlock, int nextSegment) -{ - int lastSegment = nextSegment - 1; - if (lastSegment >= 0) { - // Close out previous segment by jumping, if necessary, to next segment - if (! buildPoint->isTerminated()) - createBranch(segmentBlock[nextSegment]); - } - Block* block = segmentBlock[nextSegment]; - block->getParent().addBlock(block); - setBuildPoint(block); -} - -// Comments in header -void Builder::endSwitch(std::vector& /*segmentBlock*/) -{ - // Close out previous segment by jumping, if necessary, to next segment - if (! buildPoint->isTerminated()) - addSwitchBreak(); - - switchMerges.top()->getParent().addBlock(switchMerges.top()); - setBuildPoint(switchMerges.top()); - - switchMerges.pop(); -} - -Block& Builder::makeNewBlock() -{ - Function& function = buildPoint->getParent(); - auto block = new Block(getUniqueId(), function); - function.addBlock(block); - return *block; -} - -Builder::LoopBlocks& Builder::makeNewLoop() -{ - // This verbosity is needed to simultaneously get the same behavior - // everywhere (id's in the same order), have a syntax that works - // across lots of versions of C++, have no warnings from pedantic - // compilation modes, and leave the rest of the code alone. - Block& head = makeNewBlock(); - Block& body = makeNewBlock(); - Block& merge = makeNewBlock(); - Block& continue_target = makeNewBlock(); - LoopBlocks blocks(head, body, merge, continue_target); - loops.push(blocks); - return loops.top(); -} - -void Builder::createLoopContinue() -{ - createBranch(&loops.top().continue_target); - // Set up a block for dead code. - createAndSetNoPredecessorBlock("post-loop-continue"); -} - -void Builder::createLoopExit() -{ - createBranch(&loops.top().merge); - // Set up a block for dead code. - createAndSetNoPredecessorBlock("post-loop-break"); -} - -void Builder::closeLoop() -{ - loops.pop(); -} - -void Builder::clearAccessChain() -{ - accessChain.base = NoResult; - accessChain.indexChain.clear(); - accessChain.instr = NoResult; - accessChain.swizzle.clear(); - accessChain.component = NoResult; - accessChain.preSwizzleBaseType = NoType; - accessChain.isRValue = false; -} - -// Comments in header -void Builder::accessChainPushSwizzle(std::vector& swizzle, Id preSwizzleBaseType) -{ - // swizzles can be stacked in GLSL, but simplified to a single - // one here; the base type doesn't change - if (accessChain.preSwizzleBaseType == NoType) - accessChain.preSwizzleBaseType = preSwizzleBaseType; - - // if needed, propagate the swizzle for the current access chain - if (accessChain.swizzle.size()) { - std::vector oldSwizzle = accessChain.swizzle; - accessChain.swizzle.resize(0); - for (unsigned int i = 0; i < swizzle.size(); ++i) { - assert(swizzle[i] < oldSwizzle.size()); - accessChain.swizzle.push_back(oldSwizzle[swizzle[i]]); - } - } else - accessChain.swizzle = swizzle; - - // determine if we need to track this swizzle anymore - simplifyAccessChainSwizzle(); -} - -// Comments in header -void Builder::accessChainStore(Id rvalue) -{ - assert(accessChain.isRValue == false); - - transferAccessChainSwizzle(true); - Id base = collapseAccessChain(); - - // If swizzle still exists, it is out-of-order or not full, we must load the target vector, - // extract and insert elements to perform writeMask and/or swizzle. - Id source = NoResult; - if (accessChain.swizzle.size()) { - Id tempBaseId = createLoad(base); - source = createLvalueSwizzle(getTypeId(tempBaseId), tempBaseId, rvalue, accessChain.swizzle); - } - - // dynamic component selection - if (accessChain.component != NoResult) { - Id tempBaseId = (source == NoResult) ? createLoad(base) : source; - source = createVectorInsertDynamic(tempBaseId, getTypeId(tempBaseId), rvalue, accessChain.component); - } - - if (source == NoResult) - source = rvalue; - - createStore(source, base); -} - -// Comments in header -Id Builder::accessChainLoad(Decoration precision, Id resultType) -{ - Id id; - - if (accessChain.isRValue) { - // transfer access chain, but keep it static, so we can stay in registers - transferAccessChainSwizzle(false); - if (accessChain.indexChain.size() > 0) { - Id swizzleBase = accessChain.preSwizzleBaseType != NoType ? accessChain.preSwizzleBaseType : resultType; - - // if all the accesses are constants, we can use OpCompositeExtract - std::vector indexes; - bool constant = true; - for (int i = 0; i < (int)accessChain.indexChain.size(); ++i) { - if (isConstantScalar(accessChain.indexChain[i])) - indexes.push_back(getConstantScalar(accessChain.indexChain[i])); - else { - constant = false; - break; - } - } - - if (constant) - id = createCompositeExtract(accessChain.base, swizzleBase, indexes); - else { - // make a new function variable for this r-value - Id lValue = createVariable(StorageClassFunction, getTypeId(accessChain.base), "indexable"); - - // store into it - createStore(accessChain.base, lValue); - - // move base to the new variable - accessChain.base = lValue; - accessChain.isRValue = false; - - // load through the access chain - id = createLoad(collapseAccessChain()); - } - setPrecision(id, precision); - } else - id = accessChain.base; // no precision, it was set when this was defined - } else { - transferAccessChainSwizzle(true); - // load through the access chain - id = createLoad(collapseAccessChain()); - setPrecision(id, precision); - } - - // Done, unless there are swizzles to do - if (accessChain.swizzle.size() == 0 && accessChain.component == NoResult) - return id; - - // Do remaining swizzling - // First, static swizzling - if (accessChain.swizzle.size()) { - // static swizzle - Id swizzledType = getScalarTypeId(getTypeId(id)); - if (accessChain.swizzle.size() > 1) - swizzledType = makeVectorType(swizzledType, (int)accessChain.swizzle.size()); - id = createRvalueSwizzle(precision, swizzledType, id, accessChain.swizzle); - } - - // dynamic single-component selection - if (accessChain.component != NoResult) - id = setPrecision(createVectorExtractDynamic(id, resultType, accessChain.component), precision); - - return id; -} - -Id Builder::accessChainGetLValue() -{ - assert(accessChain.isRValue == false); - - transferAccessChainSwizzle(true); - Id lvalue = collapseAccessChain(); - - // If swizzle exists, it is out-of-order or not full, we must load the target vector, - // extract and insert elements to perform writeMask and/or swizzle. This does not - // go with getting a direct l-value pointer. - assert(accessChain.swizzle.size() == 0); - assert(accessChain.component == NoResult); - - return lvalue; -} - -// comment in header -Id Builder::accessChainGetInferredType() -{ - // anything to operate on? - if (accessChain.base == NoResult) - return NoType; - Id type = getTypeId(accessChain.base); - - // do initial dereference - if (! accessChain.isRValue) - type = getContainedTypeId(type); - - // dereference each index - for (auto it = accessChain.indexChain.cbegin(); it != accessChain.indexChain.cend(); ++it) { - if (isStructType(type)) - type = getContainedTypeId(type, getConstantScalar(*it)); - else - type = getContainedTypeId(type); - } - - // dereference swizzle - if (accessChain.swizzle.size() == 1) - type = getContainedTypeId(type); - else if (accessChain.swizzle.size() > 1) - type = makeVectorType(getContainedTypeId(type), (int)accessChain.swizzle.size()); - - // dereference component selection - if (accessChain.component) - type = getContainedTypeId(type); - - return type; -} - -// comment in header -void Builder::eliminateDeadDecorations() { - std::unordered_set reachable_blocks; - std::unordered_set unreachable_definitions; - // Collect IDs defined in unreachable blocks. For each function, label the - // reachable blocks first. Then for each unreachable block, collect the - // result IDs of the instructions in it. - for (std::vector::const_iterator fi = module.getFunctions().cbegin(); - fi != module.getFunctions().cend(); fi++) { - Function* f = *fi; - Block* entry = f->getEntryBlock(); - inReadableOrder(entry, [&reachable_blocks](const Block* b) { - reachable_blocks.insert(b); - }); - for (std::vector::const_iterator bi = f->getBlocks().cbegin(); - bi != f->getBlocks().cend(); bi++) { - Block* b = *bi; - if (!reachable_blocks.count(b)) { - for (std::vector >::const_iterator - ii = b->getInstructions().cbegin(); - ii != b->getInstructions().cend(); ii++) { - Instruction* i = ii->get(); - unreachable_definitions.insert(i->getResultId()); - } - } - } - } - decorations.erase(std::remove_if(decorations.begin(), decorations.end(), - [&unreachable_definitions](std::unique_ptr& I) -> bool { - Instruction* inst = I.get(); - Id decoration_id = inst->getIdOperand(0); - return unreachable_definitions.count(decoration_id) != 0; - }), - decorations.end()); -} - -void Builder::dump(std::vector& out) const -{ - // Header, before first instructions: - out.push_back(MagicNumber); - out.push_back(Version); - out.push_back(builderNumber); - out.push_back(uniqueId + 1); - out.push_back(0); - - // Capabilities - for (auto it = capabilities.cbegin(); it != capabilities.cend(); ++it) { - Instruction capInst(0, 0, OpCapability); - capInst.addImmediateOperand(*it); - capInst.dump(out); - } - - for (auto it = extensions.cbegin(); it != extensions.cend(); ++it) { - Instruction extInst(0, 0, OpExtension); - extInst.addStringOperand(it->c_str()); - extInst.dump(out); - } - - dumpInstructions(out, imports); - Instruction memInst(0, 0, OpMemoryModel); - memInst.addImmediateOperand(addressModel); - memInst.addImmediateOperand(memoryModel); - memInst.dump(out); - - // Instructions saved up while building: - dumpInstructions(out, entryPoints); - dumpInstructions(out, executionModes); - - // Debug instructions - dumpInstructions(out, strings); - dumpModuleProcesses(out); - dumpSourceInstructions(out); - for (int e = 0; e < (int)sourceExtensions.size(); ++e) { - Instruction sourceExtInst(0, 0, OpSourceExtension); - sourceExtInst.addStringOperand(sourceExtensions[e]); - sourceExtInst.dump(out); - } - dumpInstructions(out, names); - dumpInstructions(out, lines); - - // Annotation instructions - dumpInstructions(out, decorations); - - dumpInstructions(out, constantsTypesGlobals); - dumpInstructions(out, externals); - - // The functions - module.dump(out); -} - -// -// Protected methods. -// - -// Turn the described access chain in 'accessChain' into an instruction -// computing its address. This *cannot* include complex swizzles, which must -// be handled after this is called, but it does include swizzles that select -// an individual element, as a single address of a scalar type can be -// computed by an OpAccessChain instruction. -Id Builder::collapseAccessChain() -{ - assert(accessChain.isRValue == false); - - if (accessChain.indexChain.size() > 0) { - if (accessChain.instr == 0) { - StorageClass storageClass = (StorageClass)module.getStorageClass(getTypeId(accessChain.base)); - accessChain.instr = createAccessChain(storageClass, accessChain.base, accessChain.indexChain); - } - - return accessChain.instr; - } else - return accessChain.base; - - // note that non-trivial swizzling is left pending... -} - -// clear out swizzle if it is redundant, that is reselecting the same components -// that would be present without the swizzle. -void Builder::simplifyAccessChainSwizzle() -{ - // If the swizzle has fewer components than the vector, it is subsetting, and must stay - // to preserve that fact. - if (getNumTypeComponents(accessChain.preSwizzleBaseType) > (int)accessChain.swizzle.size()) - return; - - // if components are out of order, it is a swizzle - for (unsigned int i = 0; i < accessChain.swizzle.size(); ++i) { - if (i != accessChain.swizzle[i]) - return; - } - - // otherwise, there is no need to track this swizzle - accessChain.swizzle.clear(); - if (accessChain.component == NoResult) - accessChain.preSwizzleBaseType = NoType; -} - -// To the extent any swizzling can become part of the chain -// of accesses instead of a post operation, make it so. -// If 'dynamic' is true, include transferring a non-static component index, -// otherwise, only transfer static indexes. -// -// Also, Boolean vectors are likely to be special. While -// for external storage, they should only be integer types, -// function-local bool vectors could use sub-word indexing, -// so keep that as a separate Insert/Extract on a loaded vector. -void Builder::transferAccessChainSwizzle(bool dynamic) -{ - // too complex? - if (accessChain.swizzle.size() > 1) - return; - - // non existent? - if (accessChain.swizzle.size() == 0 && accessChain.component == NoResult) - return; - - // single component... - - // skip doing it for Boolean vectors - if (isBoolType(getContainedTypeId(accessChain.preSwizzleBaseType))) - return; - - if (accessChain.swizzle.size() == 1) { - // handle static component - accessChain.indexChain.push_back(makeUintConstant(accessChain.swizzle.front())); - accessChain.swizzle.clear(); - // note, the only valid remaining dynamic access would be to this one - // component, so don't bother even looking at accessChain.component - accessChain.preSwizzleBaseType = NoType; - accessChain.component = NoResult; - } else if (dynamic && accessChain.component != NoResult) { - // handle dynamic component - accessChain.indexChain.push_back(accessChain.component); - accessChain.preSwizzleBaseType = NoType; - accessChain.component = NoResult; - } -} - -// Utility method for creating a new block and setting the insert point to -// be in it. This is useful for flow-control operations that need a "dummy" -// block proceeding them (e.g. instructions after a discard, etc). -void Builder::createAndSetNoPredecessorBlock(const char* /*name*/) -{ - Block* block = new Block(getUniqueId(), buildPoint->getParent()); - block->setUnreachable(); - buildPoint->getParent().addBlock(block); - setBuildPoint(block); - - // if (name) - // addName(block->getId(), name); -} - -// Comments in header -void Builder::createBranch(Block* block) -{ - Instruction* branch = new Instruction(OpBranch); - branch->addIdOperand(block->getId()); - buildPoint->addInstruction(std::unique_ptr(branch)); - block->addPredecessor(buildPoint); -} - -void Builder::createSelectionMerge(Block* mergeBlock, unsigned int control) -{ - Instruction* merge = new Instruction(OpSelectionMerge); - merge->addIdOperand(mergeBlock->getId()); - merge->addImmediateOperand(control); - buildPoint->addInstruction(std::unique_ptr(merge)); -} - -void Builder::createLoopMerge(Block* mergeBlock, Block* continueBlock, unsigned int control) -{ - Instruction* merge = new Instruction(OpLoopMerge); - merge->addIdOperand(mergeBlock->getId()); - merge->addIdOperand(continueBlock->getId()); - merge->addImmediateOperand(control); - buildPoint->addInstruction(std::unique_ptr(merge)); -} - -void Builder::createConditionalBranch(Id condition, Block* thenBlock, Block* elseBlock) -{ - Instruction* branch = new Instruction(OpBranchConditional); - branch->addIdOperand(condition); - branch->addIdOperand(thenBlock->getId()); - branch->addIdOperand(elseBlock->getId()); - buildPoint->addInstruction(std::unique_ptr(branch)); - thenBlock->addPredecessor(buildPoint); - elseBlock->addPredecessor(buildPoint); -} - -// OpSource -// [OpSourceContinued] -// ... -void Builder::dumpSourceInstructions(std::vector& out) const -{ - const int maxWordCount = 0xFFFF; - const int opSourceWordCount = 4; - const int nonNullBytesPerInstruction = 4 * (maxWordCount - opSourceWordCount) - 1; - - if (source != SourceLanguageUnknown) { - // OpSource Language Version File Source - Instruction sourceInst(NoResult, NoType, OpSource); - sourceInst.addImmediateOperand(source); - sourceInst.addImmediateOperand(sourceVersion); - // File operand - if (sourceFileStringId != NoResult) { - sourceInst.addIdOperand(sourceFileStringId); - // Source operand - if (sourceText.size() > 0) { - int nextByte = 0; - std::string subString; - while ((int)sourceText.size() - nextByte > 0) { - subString = sourceText.substr(nextByte, nonNullBytesPerInstruction); - if (nextByte == 0) { - // OpSource - sourceInst.addStringOperand(subString.c_str()); - sourceInst.dump(out); - } else { - // OpSourcContinued - Instruction sourceContinuedInst(OpSourceContinued); - sourceContinuedInst.addStringOperand(subString.c_str()); - sourceContinuedInst.dump(out); - } - nextByte += nonNullBytesPerInstruction; - } - } else - sourceInst.dump(out); - } else - sourceInst.dump(out); - } -} - -void Builder::dumpInstructions(std::vector& out, const std::vector >& instructions) const -{ - for (int i = 0; i < (int)instructions.size(); ++i) { - instructions[i]->dump(out); - } -} - -void Builder::dumpModuleProcesses(std::vector& out) const -{ - for (int i = 0; i < (int)moduleProcesses.size(); ++i) { - // TODO: switch this out for the 1.1 headers - const spv::Op OpModuleProcessed = (spv::Op)330; - Instruction moduleProcessed(OpModuleProcessed); - moduleProcessed.addStringOperand(moduleProcesses[i]); - moduleProcessed.dump(out); - } -} - -}; // end spv namespace diff --git a/deps/glslang/SPIRV/SpvBuilder.h b/deps/glslang/SPIRV/SpvBuilder.h deleted file mode 100644 index 59b1da2e..00000000 --- a/deps/glslang/SPIRV/SpvBuilder.h +++ /dev/null @@ -1,636 +0,0 @@ -// -// Copyright (C) 2014-2015 LunarG, Inc. -// Copyright (C) 2015-2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// -// "Builder" is an interface to fully build SPIR-V IR. Allocate one of -// these to build (a thread safe) internal SPIR-V representation (IR), -// and then dump it as a binary stream according to the SPIR-V specification. -// -// A Builder has a 1:1 relationship with a SPIR-V module. -// - -#pragma once -#ifndef SpvBuilder_H -#define SpvBuilder_H - -#include "Logger.h" -#include "spirv.hpp" -#include "spvIR.h" - -#include -#include -#include -#include -#include -#include - -namespace spv { - -class Builder { -public: - Builder(unsigned int userNumber, SpvBuildLogger* logger); - virtual ~Builder(); - - static const int maxMatrixSize = 4; - - void setSource(spv::SourceLanguage lang, int version) - { - source = lang; - sourceVersion = version; - } - void setSourceFile(const std::string& file) - { - Instruction* fileString = new Instruction(getUniqueId(), NoType, OpString); - fileString->addStringOperand(file.c_str()); - sourceFileStringId = fileString->getResultId(); - strings.push_back(std::unique_ptr(fileString)); - } - void setSourceText(const std::string& text) { sourceText = text; } - void addSourceExtension(const char* ext) { sourceExtensions.push_back(ext); } - void addModuleProcessed(const std::string& p) { moduleProcesses.push_back(p.c_str()); } - void setEmitOpLines() { emitOpLines = true; } - void addExtension(const char* ext) { extensions.insert(ext); } - Id import(const char*); - void setMemoryModel(spv::AddressingModel addr, spv::MemoryModel mem) - { - addressModel = addr; - memoryModel = mem; - } - - void addCapability(spv::Capability cap) { capabilities.insert(cap); } - - // To get a new for anything needing a new one. - Id getUniqueId() { return ++uniqueId; } - - // To get a set of new s, e.g., for a set of function parameters - Id getUniqueIds(int numIds) - { - Id id = uniqueId + 1; - uniqueId += numIds; - return id; - } - - // Log the current line, and if different than the last one, - // issue a new OpLine, using the current file name. - void setLine(int line); - // Low-level OpLine. See setLine() for a layered helper. - void addLine(Id fileName, int line, int column); - - // For creating new types (will return old type if the requested one was already made). - Id makeVoidType(); - Id makeBoolType(); - Id makePointer(StorageClass, Id type); - Id makeIntegerType(int width, bool hasSign); // generic - Id makeIntType(int width) { return makeIntegerType(width, true); } - Id makeUintType(int width) { return makeIntegerType(width, false); } - Id makeFloatType(int width); - Id makeStructType(const std::vector& members, const char*); - Id makeStructResultType(Id type0, Id type1); - Id makeVectorType(Id component, int size); - Id makeMatrixType(Id component, int cols, int rows); - Id makeArrayType(Id element, Id sizeId, int stride); // 0 stride means no stride decoration - Id makeRuntimeArray(Id element); - Id makeFunctionType(Id returnType, const std::vector& paramTypes); - Id makeImageType(Id sampledType, Dim, bool depth, bool arrayed, bool ms, unsigned sampled, ImageFormat format); - Id makeSamplerType(); - Id makeSampledImageType(Id imageType); - - // For querying about types. - Id getTypeId(Id resultId) const { return module.getTypeId(resultId); } - Id getDerefTypeId(Id resultId) const; - Op getOpCode(Id id) const { return module.getInstruction(id)->getOpCode(); } - Op getTypeClass(Id typeId) const { return getOpCode(typeId); } - Op getMostBasicTypeClass(Id typeId) const; - int getNumComponents(Id resultId) const { return getNumTypeComponents(getTypeId(resultId)); } - int getNumTypeConstituents(Id typeId) const; - int getNumTypeComponents(Id typeId) const { return getNumTypeConstituents(typeId); } - Id getScalarTypeId(Id typeId) const; - Id getContainedTypeId(Id typeId) const; - Id getContainedTypeId(Id typeId, int) const; - StorageClass getTypeStorageClass(Id typeId) const { return module.getStorageClass(typeId); } - ImageFormat getImageTypeFormat(Id typeId) const { return (ImageFormat)module.getInstruction(typeId)->getImmediateOperand(6); } - - bool isPointer(Id resultId) const { return isPointerType(getTypeId(resultId)); } - bool isScalar(Id resultId) const { return isScalarType(getTypeId(resultId)); } - bool isVector(Id resultId) const { return isVectorType(getTypeId(resultId)); } - bool isMatrix(Id resultId) const { return isMatrixType(getTypeId(resultId)); } - bool isAggregate(Id resultId) const { return isAggregateType(getTypeId(resultId)); } - bool isSampledImage(Id resultId) const { return isSampledImageType(getTypeId(resultId)); } - - bool isBoolType(Id typeId) const { return groupedTypes[OpTypeBool].size() > 0 && typeId == groupedTypes[OpTypeBool].back()->getResultId(); } - bool isIntType(Id typeId) const { return getTypeClass(typeId) == OpTypeInt && module.getInstruction(typeId)->getImmediateOperand(1) != 0; } - bool isUintType(Id typeId) const { return getTypeClass(typeId) == OpTypeInt && module.getInstruction(typeId)->getImmediateOperand(1) == 0; } - bool isFloatType(Id typeId) const { return getTypeClass(typeId) == OpTypeFloat; } - bool isPointerType(Id typeId) const { return getTypeClass(typeId) == OpTypePointer; } - bool isScalarType(Id typeId) const { return getTypeClass(typeId) == OpTypeFloat || getTypeClass(typeId) == OpTypeInt || getTypeClass(typeId) == OpTypeBool; } - bool isVectorType(Id typeId) const { return getTypeClass(typeId) == OpTypeVector; } - bool isMatrixType(Id typeId) const { return getTypeClass(typeId) == OpTypeMatrix; } - bool isStructType(Id typeId) const { return getTypeClass(typeId) == OpTypeStruct; } - bool isArrayType(Id typeId) const { return getTypeClass(typeId) == OpTypeArray; } - bool isAggregateType(Id typeId) const { return isArrayType(typeId) || isStructType(typeId); } - bool isImageType(Id typeId) const { return getTypeClass(typeId) == OpTypeImage; } - bool isSamplerType(Id typeId) const { return getTypeClass(typeId) == OpTypeSampler; } - bool isSampledImageType(Id typeId) const { return getTypeClass(typeId) == OpTypeSampledImage; } - - bool isConstantOpCode(Op opcode) const; - bool isSpecConstantOpCode(Op opcode) const; - bool isConstant(Id resultId) const { return isConstantOpCode(getOpCode(resultId)); } - bool isConstantScalar(Id resultId) const { return getOpCode(resultId) == OpConstant; } - bool isSpecConstant(Id resultId) const { return isSpecConstantOpCode(getOpCode(resultId)); } - unsigned int getConstantScalar(Id resultId) const { return module.getInstruction(resultId)->getImmediateOperand(0); } - StorageClass getStorageClass(Id resultId) const { return getTypeStorageClass(getTypeId(resultId)); } - - int getScalarTypeWidth(Id typeId) const - { - Id scalarTypeId = getScalarTypeId(typeId); - assert(getTypeClass(scalarTypeId) == OpTypeInt || getTypeClass(scalarTypeId) == OpTypeFloat); - return module.getInstruction(scalarTypeId)->getImmediateOperand(0); - } - - int getTypeNumColumns(Id typeId) const - { - assert(isMatrixType(typeId)); - return getNumTypeConstituents(typeId); - } - int getNumColumns(Id resultId) const { return getTypeNumColumns(getTypeId(resultId)); } - int getTypeNumRows(Id typeId) const - { - assert(isMatrixType(typeId)); - return getNumTypeComponents(getContainedTypeId(typeId)); - } - int getNumRows(Id resultId) const { return getTypeNumRows(getTypeId(resultId)); } - - Dim getTypeDimensionality(Id typeId) const - { - assert(isImageType(typeId)); - return (Dim)module.getInstruction(typeId)->getImmediateOperand(1); - } - Id getImageType(Id resultId) const - { - Id typeId = getTypeId(resultId); - assert(isImageType(typeId) || isSampledImageType(typeId)); - return isSampledImageType(typeId) ? module.getInstruction(typeId)->getIdOperand(0) : typeId; - } - bool isArrayedImageType(Id typeId) const - { - assert(isImageType(typeId)); - return module.getInstruction(typeId)->getImmediateOperand(3) != 0; - } - - // For making new constants (will return old constant if the requested one was already made). - Id makeBoolConstant(bool b, bool specConstant = false); - Id makeIntConstant(int i, bool specConstant = false) { return makeIntConstant(makeIntType(32), (unsigned)i, specConstant); } - Id makeUintConstant(unsigned u, bool specConstant = false) { return makeIntConstant(makeUintType(32), u, specConstant); } - Id makeInt64Constant(long long i, bool specConstant = false) { return makeInt64Constant(makeIntType(64), (unsigned long long)i, specConstant); } - Id makeUint64Constant(unsigned long long u, bool specConstant = false) { return makeInt64Constant(makeUintType(64), u, specConstant); } -#ifdef AMD_EXTENSIONS - Id makeInt16Constant(short i, bool specConstant = false) { return makeIntConstant(makeIntType(16), (unsigned)((unsigned short)i), specConstant); } - Id makeUint16Constant(unsigned short u, bool specConstant = false) { return makeIntConstant(makeUintType(16), (unsigned)u, specConstant); } -#endif - Id makeFloatConstant(float f, bool specConstant = false); - Id makeDoubleConstant(double d, bool specConstant = false); -#ifdef AMD_EXTENSIONS - Id makeFloat16Constant(float f16, bool specConstant = false); -#endif - - // Turn the array of constants into a proper spv constant of the requested type. - Id makeCompositeConstant(Id type, const std::vector& comps, bool specConst = false); - - // Methods for adding information outside the CFG. - Instruction* addEntryPoint(ExecutionModel, Function*, const char* name); - void addExecutionMode(Function*, ExecutionMode mode, int value1 = -1, int value2 = -1, int value3 = -1); - void addName(Id, const char* name); - void addMemberName(Id, int member, const char* name); - void addDecoration(Id, Decoration, int num = -1); - void addMemberDecoration(Id, unsigned int member, Decoration, int num = -1); - - // At the end of what block do the next create*() instructions go? - void setBuildPoint(Block* bp) { buildPoint = bp; } - Block* getBuildPoint() const { return buildPoint; } - - // Make the entry-point function. The returned pointer is only valid - // for the lifetime of this builder. - Function* makeEntryPoint(const char*); - - // Make a shader-style function, and create its entry block if entry is non-zero. - // Return the function, pass back the entry. - // The returned pointer is only valid for the lifetime of this builder. - Function* makeFunctionEntry(Decoration precision, Id returnType, const char* name, const std::vector& paramTypes, - const std::vector>& precisions, Block **entry = 0); - - // Create a return. An 'implicit' return is one not appearing in the source - // code. In the case of an implicit return, no post-return block is inserted. - void makeReturn(bool implicit, Id retVal = 0); - - // Generate all the code needed to finish up a function. - void leaveFunction(); - - // Create a discard. - void makeDiscard(); - - // Create a global or function local or IO variable. - Id createVariable(StorageClass, Id type, const char* name = 0); - - // Create an intermediate with an undefined value. - Id createUndefined(Id type); - - // Store into an Id and return the l-value - void createStore(Id rValue, Id lValue); - - // Load from an Id and return it - Id createLoad(Id lValue); - - // Create an OpAccessChain instruction - Id createAccessChain(StorageClass, Id base, const std::vector& offsets); - - // Create an OpArrayLength instruction - Id createArrayLength(Id base, unsigned int member); - - // Create an OpCompositeExtract instruction - Id createCompositeExtract(Id composite, Id typeId, unsigned index); - Id createCompositeExtract(Id composite, Id typeId, const std::vector& indexes); - Id createCompositeInsert(Id object, Id composite, Id typeId, unsigned index); - Id createCompositeInsert(Id object, Id composite, Id typeId, const std::vector& indexes); - - Id createVectorExtractDynamic(Id vector, Id typeId, Id componentIndex); - Id createVectorInsertDynamic(Id vector, Id typeId, Id component, Id componentIndex); - - void createNoResultOp(Op); - void createNoResultOp(Op, Id operand); - void createNoResultOp(Op, const std::vector& operands); - void createControlBarrier(Scope execution, Scope memory, MemorySemanticsMask); - void createMemoryBarrier(unsigned executionScope, unsigned memorySemantics); - Id createUnaryOp(Op, Id typeId, Id operand); - Id createBinOp(Op, Id typeId, Id operand1, Id operand2); - Id createTriOp(Op, Id typeId, Id operand1, Id operand2, Id operand3); - Id createOp(Op, Id typeId, const std::vector& operands); - Id createFunctionCall(spv::Function*, const std::vector&); - Id createSpecConstantOp(Op, Id typeId, const std::vector& operands, const std::vector& literals); - - // Take an rvalue (source) and a set of channels to extract from it to - // make a new rvalue, which is returned. - Id createRvalueSwizzle(Decoration precision, Id typeId, Id source, const std::vector& channels); - - // Take a copy of an lvalue (target) and a source of components, and set the - // source components into the lvalue where the 'channels' say to put them. - // An updated version of the target is returned. - // (No true lvalue or stores are used.) - Id createLvalueSwizzle(Id typeId, Id target, Id source, const std::vector& channels); - - // If both the id and precision are valid, the id - // gets tagged with the requested precision. - // The passed in id is always the returned id, to simplify use patterns. - Id setPrecision(Id id, Decoration precision) - { - if (precision != NoPrecision && id != NoResult) - addDecoration(id, precision); - - return id; - } - - // Can smear a scalar to a vector for the following forms: - // - promoteScalar(scalar, vector) // smear scalar to width of vector - // - promoteScalar(vector, scalar) // smear scalar to width of vector - // - promoteScalar(pointer, scalar) // smear scalar to width of what pointer points to - // - promoteScalar(scalar, scalar) // do nothing - // Other forms are not allowed. - // - // Generally, the type of 'scalar' does not need to be the same type as the components in 'vector'. - // The type of the created vector is a vector of components of the same type as the scalar. - // - // Note: One of the arguments will change, with the result coming back that way rather than - // through the return value. - void promoteScalar(Decoration precision, Id& left, Id& right); - - // Make a value by smearing the scalar to fill the type. - // vectorType should be the correct type for making a vector of scalarVal. - // (No conversions are done.) - Id smearScalar(Decoration precision, Id scalarVal, Id vectorType); - - // Create a call to a built-in function. - Id createBuiltinCall(Id resultType, Id builtins, int entryPoint, const std::vector& args); - - // List of parameters used to create a texture operation - struct TextureParameters { - Id sampler; - Id coords; - Id bias; - Id lod; - Id Dref; - Id offset; - Id offsets; - Id gradX; - Id gradY; - Id sample; - Id component; - Id texelOut; - Id lodClamp; - }; - - // Select the correct texture operation based on all inputs, and emit the correct instruction - Id createTextureCall(Decoration precision, Id resultType, bool sparse, bool fetch, bool proj, bool gather, bool noImplicit, const TextureParameters&); - - // Emit the OpTextureQuery* instruction that was passed in. - // Figure out the right return value and type, and return it. - Id createTextureQueryCall(Op, const TextureParameters&, bool isUnsignedResult); - - Id createSamplePositionCall(Decoration precision, Id, Id); - - Id createBitFieldExtractCall(Decoration precision, Id, Id, Id, bool isSigned); - Id createBitFieldInsertCall(Decoration precision, Id, Id, Id, Id); - - // Reduction comparison for composites: For equal and not-equal resulting in a scalar. - Id createCompositeCompare(Decoration precision, Id, Id, bool /* true if for equal, false if for not-equal */); - - // OpCompositeConstruct - Id createCompositeConstruct(Id typeId, const std::vector& constituents); - - // vector or scalar constructor - Id createConstructor(Decoration precision, const std::vector& sources, Id resultTypeId); - - // matrix constructor - Id createMatrixConstructor(Decoration precision, const std::vector& sources, Id constructee); - - // Helper to use for building nested control flow with if-then-else. - class If { - public: - If(Id condition, unsigned int ctrl, Builder& builder); - ~If() {} - - void makeBeginElse(); - void makeEndIf(); - - private: - If(const If&); - If& operator=(If&); - - Builder& builder; - Id condition; - unsigned int control; - Function* function; - Block* headerBlock; - Block* thenBlock; - Block* elseBlock; - Block* mergeBlock; - }; - - // Make a switch statement. A switch has 'numSegments' of pieces of code, not containing - // any case/default labels, all separated by one or more case/default labels. Each possible - // case value v is a jump to the caseValues[v] segment. The defaultSegment is also in this - // number space. How to compute the value is given by 'condition', as in switch(condition). - // - // The SPIR-V Builder will maintain the stack of post-switch merge blocks for nested switches. - // - // Use a defaultSegment < 0 if there is no default segment (to branch to post switch). - // - // Returns the right set of basic blocks to start each code segment with, so that the caller's - // recursion stack can hold the memory for it. - // - void makeSwitch(Id condition, unsigned int control, int numSegments, const std::vector& caseValues, - const std::vector& valueToSegment, int defaultSegment, std::vector& segmentBB); // return argument - - // Add a branch to the innermost switch's merge block. - void addSwitchBreak(); - - // Move to the next code segment, passing in the return argument in makeSwitch() - void nextSwitchSegment(std::vector& segmentBB, int segment); - - // Finish off the innermost switch. - void endSwitch(std::vector& segmentBB); - - struct LoopBlocks { - LoopBlocks(Block& head, Block& body, Block& merge, Block& continue_target) : - head(head), body(body), merge(merge), continue_target(continue_target) { } - Block &head, &body, &merge, &continue_target; - private: - LoopBlocks(); - LoopBlocks& operator=(const LoopBlocks&); - }; - - // Start a new loop and prepare the builder to generate code for it. Until - // closeLoop() is called for this loop, createLoopContinue() and - // createLoopExit() will target its corresponding blocks. - LoopBlocks& makeNewLoop(); - - // Create a new block in the function containing the build point. Memory is - // owned by the function object. - Block& makeNewBlock(); - - // Add a branch to the continue_target of the current (innermost) loop. - void createLoopContinue(); - - // Add an exit (e.g. "break") from the innermost loop that we're currently - // in. - void createLoopExit(); - - // Close the innermost loop that you're in - void closeLoop(); - - // - // Access chain design for an R-Value vs. L-Value: - // - // There is a single access chain the builder is building at - // any particular time. Such a chain can be used to either to a load or - // a store, when desired. - // - // Expressions can be r-values, l-values, or both, or only r-values: - // a[b.c].d = .... // l-value - // ... = a[b.c].d; // r-value, that also looks like an l-value - // ++a[b.c].d; // r-value and l-value - // (x + y)[2]; // r-value only, can't possibly be l-value - // - // Computing an r-value means generating code. Hence, - // r-values should only be computed when they are needed, not speculatively. - // - // Computing an l-value means saving away information for later use in the compiler, - // no code is generated until the l-value is later dereferenced. It is okay - // to speculatively generate an l-value, just not okay to speculatively dereference it. - // - // The base of the access chain (the left-most variable or expression - // from which everything is based) can be set either as an l-value - // or as an r-value. Most efficient would be to set an l-value if one - // is available. If an expression was evaluated, the resulting r-value - // can be set as the chain base. - // - // The users of this single access chain can save and restore if they - // want to nest or manage multiple chains. - // - - struct AccessChain { - Id base; // for l-values, pointer to the base object, for r-values, the base object - std::vector indexChain; - Id instr; // cache the instruction that generates this access chain - std::vector swizzle; // each std::vector element selects the next GLSL component number - Id component; // a dynamic component index, can coexist with a swizzle, done after the swizzle, NoResult if not present - Id preSwizzleBaseType; // dereferenced type, before swizzle or component is applied; NoType unless a swizzle or component is present - bool isRValue; // true if 'base' is an r-value, otherwise, base is an l-value - }; - - // - // the SPIR-V builder maintains a single active chain that - // the following methods operate on - // - - // for external save and restore - AccessChain getAccessChain() { return accessChain; } - void setAccessChain(AccessChain newChain) { accessChain = newChain; } - - // clear accessChain - void clearAccessChain(); - - // set new base as an l-value base - void setAccessChainLValue(Id lValue) - { - assert(isPointer(lValue)); - accessChain.base = lValue; - } - - // set new base value as an r-value - void setAccessChainRValue(Id rValue) - { - accessChain.isRValue = true; - accessChain.base = rValue; - } - - // push offset onto the end of the chain - void accessChainPush(Id offset) - { - accessChain.indexChain.push_back(offset); - } - - // push new swizzle onto the end of any existing swizzle, merging into a single swizzle - void accessChainPushSwizzle(std::vector& swizzle, Id preSwizzleBaseType); - - // push a variable component selection onto the access chain; supporting only one, so unsided - void accessChainPushComponent(Id component, Id preSwizzleBaseType) - { - accessChain.component = component; - if (accessChain.preSwizzleBaseType == NoType) - accessChain.preSwizzleBaseType = preSwizzleBaseType; - } - - // use accessChain and swizzle to store value - void accessChainStore(Id rvalue); - - // use accessChain and swizzle to load an r-value - Id accessChainLoad(Decoration precision, Id ResultType); - - // get the direct pointer for an l-value - Id accessChainGetLValue(); - - // Get the inferred SPIR-V type of the result of the current access chain, - // based on the type of the base and the chain of dereferences. - Id accessChainGetInferredType(); - - // Remove OpDecorate instructions whose operands are defined in unreachable - // blocks. - void eliminateDeadDecorations(); - void dump(std::vector&) const; - - void createBranch(Block* block); - void createConditionalBranch(Id condition, Block* thenBlock, Block* elseBlock); - void createLoopMerge(Block* mergeBlock, Block* continueBlock, unsigned int control); - - // Sets to generate opcode for specialization constants. - void setToSpecConstCodeGenMode() { generatingOpCodeForSpecConst = true; } - // Sets to generate opcode for non-specialization constants (normal mode). - void setToNormalCodeGenMode() { generatingOpCodeForSpecConst = false; } - // Check if the builder is generating code for spec constants. - bool isInSpecConstCodeGenMode() { return generatingOpCodeForSpecConst; } - - protected: - Id makeIntConstant(Id typeId, unsigned value, bool specConstant); - Id makeInt64Constant(Id typeId, unsigned long long value, bool specConstant); - Id findScalarConstant(Op typeClass, Op opcode, Id typeId, unsigned value) const; - Id findScalarConstant(Op typeClass, Op opcode, Id typeId, unsigned v1, unsigned v2) const; - Id findCompositeConstant(Op typeClass, const std::vector& comps) const; - Id collapseAccessChain(); - void transferAccessChainSwizzle(bool dynamic); - void simplifyAccessChainSwizzle(); - void createAndSetNoPredecessorBlock(const char*); - void createSelectionMerge(Block* mergeBlock, unsigned int control); - void dumpSourceInstructions(std::vector&) const; - void dumpInstructions(std::vector&, const std::vector >&) const; - void dumpModuleProcesses(std::vector&) const; - - SourceLanguage source; - int sourceVersion; - spv::Id sourceFileStringId; - std::string sourceText; - int currentLine; - bool emitOpLines; - std::set extensions; - std::vector sourceExtensions; - std::vector moduleProcesses; - AddressingModel addressModel; - MemoryModel memoryModel; - std::set capabilities; - int builderNumber; - Module module; - Block* buildPoint; - Id uniqueId; - Function* entryPointFunction; - bool generatingOpCodeForSpecConst; - AccessChain accessChain; - - // special blocks of instructions for output - std::vector > strings; - std::vector > imports; - std::vector > entryPoints; - std::vector > executionModes; - std::vector > names; - std::vector > lines; - std::vector > decorations; - std::vector > constantsTypesGlobals; - std::vector > externals; - std::vector > functions; - - // not output, internally used for quick & dirty canonical (unique) creation - std::vector groupedConstants[OpConstant]; // all types appear before OpConstant - std::vector groupedTypes[OpConstant]; - - // stack of switches - std::stack switchMerges; - - // Our loop stack. - std::stack loops; - - // The stream for outputting warnings and errors. - SpvBuildLogger* logger; -}; // end Builder class - -}; // end spv namespace - -#endif // SpvBuilder_H diff --git a/deps/glslang/SPIRV/bitutils.h b/deps/glslang/SPIRV/bitutils.h deleted file mode 100644 index 31288ab6..00000000 --- a/deps/glslang/SPIRV/bitutils.h +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright (c) 2015-2016 The Khronos Group Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef LIBSPIRV_UTIL_BITUTILS_H_ -#define LIBSPIRV_UTIL_BITUTILS_H_ - -#include -#include - -namespace spvutils { - -// Performs a bitwise copy of source to the destination type Dest. -template -Dest BitwiseCast(Src source) { - Dest dest; - static_assert(sizeof(source) == sizeof(dest), - "BitwiseCast: Source and destination must have the same size"); - std::memcpy(&dest, &source, sizeof(dest)); - return dest; -} - -// SetBits returns an integer of type with bits set -// for position through , counting from the least -// significant bit. In particular when Num == 0, no positions are set to 1. -// A static assert will be triggered if First + Num > sizeof(T) * 8, that is, -// a bit that will not fit in the underlying type is set. -template -struct SetBits { - static_assert(First < sizeof(T) * 8, - "Tried to set a bit that is shifted too far."); - const static T get = (T(1) << First) | SetBits::get; -}; - -template -struct SetBits { - const static T get = T(0); -}; - -// This is all compile-time so we can put our tests right here. -static_assert(SetBits::get == uint32_t(0x00000000), - "SetBits failed"); -static_assert(SetBits::get == uint32_t(0x00000001), - "SetBits failed"); -static_assert(SetBits::get == uint32_t(0x80000000), - "SetBits failed"); -static_assert(SetBits::get == uint32_t(0x00000006), - "SetBits failed"); -static_assert(SetBits::get == uint32_t(0xc0000000), - "SetBits failed"); -static_assert(SetBits::get == uint32_t(0x7FFFFFFF), - "SetBits failed"); -static_assert(SetBits::get == uint32_t(0xFFFFFFFF), - "SetBits failed"); -static_assert(SetBits::get == uint32_t(0xFFFF0000), - "SetBits failed"); - -static_assert(SetBits::get == uint64_t(0x0000000000000001LL), - "SetBits failed"); -static_assert(SetBits::get == uint64_t(0x8000000000000000LL), - "SetBits failed"); -static_assert(SetBits::get == uint64_t(0xc000000000000000LL), - "SetBits failed"); -static_assert(SetBits::get == uint64_t(0x0000000080000000LL), - "SetBits failed"); -static_assert(SetBits::get == uint64_t(0x00000000FFFF0000LL), - "SetBits failed"); - -} // namespace spvutils - -#endif // LIBSPIRV_UTIL_BITUTILS_H_ diff --git a/deps/glslang/SPIRV/disassemble.cpp b/deps/glslang/SPIRV/disassemble.cpp deleted file mode 100644 index c950a66a..00000000 --- a/deps/glslang/SPIRV/disassemble.cpp +++ /dev/null @@ -1,695 +0,0 @@ -// -// Copyright (C) 2014-2015 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// -// Disassembler for SPIR-V. -// - -#include -#include -#include -#include -#include -#include -#include - -#include "disassemble.h" -#include "doc.h" - -namespace spv { - extern "C" { - // Include C-based headers that don't have a namespace - #include "GLSL.std.450.h" -#ifdef AMD_EXTENSIONS - #include "GLSL.ext.AMD.h" -#endif -#ifdef NV_EXTENSIONS - #include "GLSL.ext.NV.h" -#endif - } -} -const char* GlslStd450DebugNames[spv::GLSLstd450Count]; - -namespace spv { - -#ifdef AMD_EXTENSIONS -static const char* GLSLextAMDGetDebugNames(const char*, unsigned); -#endif - -#ifdef NV_EXTENSIONS -static const char* GLSLextNVGetDebugNames(const char*, unsigned); -#endif - -static void Kill(std::ostream& out, const char* message) -{ - out << std::endl << "Disassembly failed: " << message << std::endl; - exit(1); -} - -// used to identify the extended instruction library imported when printing -enum ExtInstSet { - GLSL450Inst, -#ifdef AMD_EXTENSIONS - GLSLextAMDInst, -#endif -#ifdef NV_EXTENSIONS - GLSLextNVInst, -#endif - OpenCLExtInst, -}; - -// Container class for a single instance of a SPIR-V stream, with methods for disassembly. -class SpirvStream { -public: - SpirvStream(std::ostream& out, const std::vector& stream) : out(out), stream(stream), word(0), nextNestedControl(0) { } - virtual ~SpirvStream() { } - - void validate(); - void processInstructions(); - -protected: - SpirvStream(const SpirvStream&); - SpirvStream& operator=(const SpirvStream&); - Op getOpCode(int id) const { return idInstruction[id] ? (Op)(stream[idInstruction[id]] & OpCodeMask) : OpNop; } - - // Output methods - void outputIndent(); - void formatId(Id id, std::stringstream&); - void outputResultId(Id id); - void outputTypeId(Id id); - void outputId(Id id); - void outputMask(OperandClass operandClass, unsigned mask); - void disassembleImmediates(int numOperands); - void disassembleIds(int numOperands); - int disassembleString(); - void disassembleInstruction(Id resultId, Id typeId, Op opCode, int numOperands); - - // Data - std::ostream& out; // where to write the disassembly - const std::vector& stream; // the actual word stream - int size; // the size of the word stream - int word; // the next word of the stream to read - - // map each to the instruction that created it - Id bound; - std::vector idInstruction; // the word offset into the stream where the instruction for result [id] starts; 0 if not yet seen (forward reference or function parameter) - - std::vector idDescriptor; // the best text string known for explaining the - - // schema - unsigned int schema; - - // stack of structured-merge points - std::stack nestedControl; - Id nextNestedControl; // need a slight delay for when we are nested -}; - -void SpirvStream::validate() -{ - size = (int)stream.size(); - if (size < 4) - Kill(out, "stream is too short"); - - // Magic number - if (stream[word++] != MagicNumber) { - out << "Bad magic number"; - return; - } - - // Version - out << "// Module Version " << std::hex << stream[word++] << std::endl; - - // Generator's magic number - out << "// Generated by (magic number): " << std::hex << stream[word++] << std::dec << std::endl; - - // Result bound - bound = stream[word++]; - idInstruction.resize(bound); - idDescriptor.resize(bound); - out << "// Id's are bound by " << bound << std::endl; - out << std::endl; - - // Reserved schema, must be 0 for now - schema = stream[word++]; - if (schema != 0) - Kill(out, "bad schema, must be 0"); -} - -// Loop over all the instructions, in order, processing each. -// Boiler plate for each is handled here directly, the rest is dispatched. -void SpirvStream::processInstructions() -{ - // Instructions - while (word < size) { - int instructionStart = word; - - // Instruction wordCount and opcode - unsigned int firstWord = stream[word]; - unsigned wordCount = firstWord >> WordCountShift; - Op opCode = (Op)(firstWord & OpCodeMask); - int nextInst = word + wordCount; - ++word; - - // Presence of full instruction - if (nextInst > size) - Kill(out, "stream instruction terminated too early"); - - // Base for computing number of operands; will be updated as more is learned - unsigned numOperands = wordCount - 1; - - // Type - Id typeId = 0; - if (InstructionDesc[opCode].hasType()) { - typeId = stream[word++]; - --numOperands; - } - - // Result - Id resultId = 0; - if (InstructionDesc[opCode].hasResult()) { - resultId = stream[word++]; - --numOperands; - - // save instruction for future reference - idInstruction[resultId] = instructionStart; - } - - outputResultId(resultId); - outputTypeId(typeId); - outputIndent(); - - // Hand off the Op and all its operands - disassembleInstruction(resultId, typeId, opCode, numOperands); - if (word != nextInst) { - out << " ERROR, incorrect number of operands consumed. At " << word << " instead of " << nextInst << " instruction start was " << instructionStart; - word = nextInst; - } - out << std::endl; - } -} - -void SpirvStream::outputIndent() -{ - for (int i = 0; i < (int)nestedControl.size(); ++i) - out << " "; -} - -void SpirvStream::formatId(Id id, std::stringstream& idStream) -{ - if (id != 0) { - // On instructions with no IDs, this is called with "0", which does not - // have to be within ID bounds on null shaders. - if (id >= bound) - Kill(out, "Bad "); - - idStream << id; - if (idDescriptor[id].size() > 0) - idStream << "(" << idDescriptor[id] << ")"; - } -} - -void SpirvStream::outputResultId(Id id) -{ - const int width = 16; - std::stringstream idStream; - formatId(id, idStream); - out << std::setw(width) << std::right << idStream.str(); - if (id != 0) - out << ":"; - else - out << " "; - - if (nestedControl.size() && id == nestedControl.top()) - nestedControl.pop(); -} - -void SpirvStream::outputTypeId(Id id) -{ - const int width = 12; - std::stringstream idStream; - formatId(id, idStream); - out << std::setw(width) << std::right << idStream.str() << " "; -} - -void SpirvStream::outputId(Id id) -{ - if (id >= bound) - Kill(out, "Bad "); - - out << id; - if (idDescriptor[id].size() > 0) - out << "(" << idDescriptor[id] << ")"; -} - -void SpirvStream::outputMask(OperandClass operandClass, unsigned mask) -{ - if (mask == 0) - out << "None"; - else { - for (int m = 0; m < OperandClassParams[operandClass].ceiling; ++m) { - if (mask & (1 << m)) - out << OperandClassParams[operandClass].getName(m) << " "; - } - } -} - -void SpirvStream::disassembleImmediates(int numOperands) -{ - for (int i = 0; i < numOperands; ++i) { - out << stream[word++]; - if (i < numOperands - 1) - out << " "; - } -} - -void SpirvStream::disassembleIds(int numOperands) -{ - for (int i = 0; i < numOperands; ++i) { - outputId(stream[word++]); - if (i < numOperands - 1) - out << " "; - } -} - -// return the number of operands consumed by the string -int SpirvStream::disassembleString() -{ - int startWord = word; - - out << " \""; - - const char* wordString; - bool done = false; - do { - unsigned int content = stream[word]; - wordString = (const char*)&content; - for (int charCount = 0; charCount < 4; ++charCount) { - if (*wordString == 0) { - done = true; - break; - } - out << *(wordString++); - } - ++word; - } while (! done); - - out << "\""; - - return word - startWord; -} - -void SpirvStream::disassembleInstruction(Id resultId, Id /*typeId*/, Op opCode, int numOperands) -{ - // Process the opcode - - out << (OpcodeString(opCode) + 2); // leave out the "Op" - - if (opCode == OpLoopMerge || opCode == OpSelectionMerge) - nextNestedControl = stream[word]; - else if (opCode == OpBranchConditional || opCode == OpSwitch) { - if (nextNestedControl) { - nestedControl.push(nextNestedControl); - nextNestedControl = 0; - } - } else if (opCode == OpExtInstImport) { - idDescriptor[resultId] = (const char*)(&stream[word]); - } - else { - if (resultId != 0 && idDescriptor[resultId].size() == 0) { - switch (opCode) { - case OpTypeInt: - idDescriptor[resultId] = "int"; - break; - case OpTypeFloat: - idDescriptor[resultId] = "float"; - break; - case OpTypeBool: - idDescriptor[resultId] = "bool"; - break; - case OpTypeStruct: - idDescriptor[resultId] = "struct"; - break; - case OpTypePointer: - idDescriptor[resultId] = "ptr"; - break; - case OpTypeVector: - if (idDescriptor[stream[word]].size() > 0) - idDescriptor[resultId].append(idDescriptor[stream[word]].begin(), idDescriptor[stream[word]].begin() + 1); - idDescriptor[resultId].append("vec"); - switch (stream[word + 1]) { - case 2: idDescriptor[resultId].append("2"); break; - case 3: idDescriptor[resultId].append("3"); break; - case 4: idDescriptor[resultId].append("4"); break; - case 8: idDescriptor[resultId].append("8"); break; - case 16: idDescriptor[resultId].append("16"); break; - case 32: idDescriptor[resultId].append("32"); break; - default: break; - } - break; - default: - break; - } - } - } - - // Process the operands. Note, a new context-dependent set could be - // swapped in mid-traversal. - - // Handle images specially, so can put out helpful strings. - if (opCode == OpTypeImage) { - out << " "; - disassembleIds(1); - out << " " << DimensionString((Dim)stream[word++]); - out << (stream[word++] != 0 ? " depth" : ""); - out << (stream[word++] != 0 ? " array" : ""); - out << (stream[word++] != 0 ? " multi-sampled" : ""); - switch (stream[word++]) { - case 0: out << " runtime"; break; - case 1: out << " sampled"; break; - case 2: out << " nonsampled"; break; - } - out << " format:" << ImageFormatString((ImageFormat)stream[word++]); - - if (numOperands == 8) { - out << " " << AccessQualifierString(stream[word++]); - } - return; - } - - // Handle all the parameterized operands - for (int op = 0; op < InstructionDesc[opCode].operands.getNum() && numOperands > 0; ++op) { - out << " "; - OperandClass operandClass = InstructionDesc[opCode].operands.getClass(op); - switch (operandClass) { - case OperandId: - case OperandScope: - case OperandMemorySemantics: - disassembleIds(1); - --numOperands; - // Get names for printing "(XXX)" for readability, *after* this id - if (opCode == OpName) - idDescriptor[stream[word - 1]] = (const char*)(&stream[word]); - break; - case OperandVariableIds: - disassembleIds(numOperands); - return; - case OperandImageOperands: - outputMask(OperandImageOperands, stream[word++]); - --numOperands; - disassembleIds(numOperands); - return; - case OperandOptionalLiteral: - case OperandVariableLiterals: - if ((opCode == OpDecorate && stream[word - 1] == DecorationBuiltIn) || - (opCode == OpMemberDecorate && stream[word - 1] == DecorationBuiltIn)) { - out << BuiltInString(stream[word++]); - --numOperands; - ++op; - } - disassembleImmediates(numOperands); - return; - case OperandVariableIdLiteral: - while (numOperands > 0) { - out << std::endl; - outputResultId(0); - outputTypeId(0); - outputIndent(); - out << " Type "; - disassembleIds(1); - out << ", member "; - disassembleImmediates(1); - numOperands -= 2; - } - return; - case OperandVariableLiteralId: - while (numOperands > 0) { - out << std::endl; - outputResultId(0); - outputTypeId(0); - outputIndent(); - out << " case "; - disassembleImmediates(1); - out << ": "; - disassembleIds(1); - numOperands -= 2; - } - return; - case OperandLiteralNumber: - disassembleImmediates(1); - --numOperands; - if (opCode == OpExtInst) { - ExtInstSet extInstSet = GLSL450Inst; - const char* name = idDescriptor[stream[word - 2]].c_str(); - if (0 == memcmp("OpenCL", name, 6)) { - extInstSet = OpenCLExtInst; -#ifdef AMD_EXTENSIONS - } else if (strcmp(spv::E_SPV_AMD_shader_ballot, name) == 0 || - strcmp(spv::E_SPV_AMD_shader_trinary_minmax, name) == 0 || - strcmp(spv::E_SPV_AMD_shader_explicit_vertex_parameter, name) == 0 || - strcmp(spv::E_SPV_AMD_gcn_shader, name) == 0) { - extInstSet = GLSLextAMDInst; -#endif -#ifdef NV_EXTENSIONS - }else if (strcmp(spv::E_SPV_NV_sample_mask_override_coverage, name) == 0 || - strcmp(spv::E_SPV_NV_geometry_shader_passthrough, name) == 0 || - strcmp(spv::E_SPV_NV_viewport_array2, name) == 0 || - strcmp(spv::E_SPV_NVX_multiview_per_view_attributes, name) == 0) { - extInstSet = GLSLextNVInst; -#endif - } - unsigned entrypoint = stream[word - 1]; - if (extInstSet == GLSL450Inst) { - if (entrypoint < GLSLstd450Count) { - out << "(" << GlslStd450DebugNames[entrypoint] << ")"; - } -#ifdef AMD_EXTENSIONS - } else if (extInstSet == GLSLextAMDInst) { - out << "(" << GLSLextAMDGetDebugNames(name, entrypoint) << ")"; -#endif -#ifdef NV_EXTENSIONS - } - else if (extInstSet == GLSLextNVInst) { - out << "(" << GLSLextNVGetDebugNames(name, entrypoint) << ")"; -#endif - } - } - break; - case OperandOptionalLiteralString: - case OperandLiteralString: - numOperands -= disassembleString(); - break; - default: - assert(operandClass >= OperandSource && operandClass < OperandOpcode); - - if (OperandClassParams[operandClass].bitmask) - outputMask(operandClass, stream[word++]); - else - out << OperandClassParams[operandClass].getName(stream[word++]); - --numOperands; - - break; - } - } - - return; -} - -static void GLSLstd450GetDebugNames(const char** names) -{ - for (int i = 0; i < GLSLstd450Count; ++i) - names[i] = "Unknown"; - - names[GLSLstd450Round] = "Round"; - names[GLSLstd450RoundEven] = "RoundEven"; - names[GLSLstd450Trunc] = "Trunc"; - names[GLSLstd450FAbs] = "FAbs"; - names[GLSLstd450SAbs] = "SAbs"; - names[GLSLstd450FSign] = "FSign"; - names[GLSLstd450SSign] = "SSign"; - names[GLSLstd450Floor] = "Floor"; - names[GLSLstd450Ceil] = "Ceil"; - names[GLSLstd450Fract] = "Fract"; - names[GLSLstd450Radians] = "Radians"; - names[GLSLstd450Degrees] = "Degrees"; - names[GLSLstd450Sin] = "Sin"; - names[GLSLstd450Cos] = "Cos"; - names[GLSLstd450Tan] = "Tan"; - names[GLSLstd450Asin] = "Asin"; - names[GLSLstd450Acos] = "Acos"; - names[GLSLstd450Atan] = "Atan"; - names[GLSLstd450Sinh] = "Sinh"; - names[GLSLstd450Cosh] = "Cosh"; - names[GLSLstd450Tanh] = "Tanh"; - names[GLSLstd450Asinh] = "Asinh"; - names[GLSLstd450Acosh] = "Acosh"; - names[GLSLstd450Atanh] = "Atanh"; - names[GLSLstd450Atan2] = "Atan2"; - names[GLSLstd450Pow] = "Pow"; - names[GLSLstd450Exp] = "Exp"; - names[GLSLstd450Log] = "Log"; - names[GLSLstd450Exp2] = "Exp2"; - names[GLSLstd450Log2] = "Log2"; - names[GLSLstd450Sqrt] = "Sqrt"; - names[GLSLstd450InverseSqrt] = "InverseSqrt"; - names[GLSLstd450Determinant] = "Determinant"; - names[GLSLstd450MatrixInverse] = "MatrixInverse"; - names[GLSLstd450Modf] = "Modf"; - names[GLSLstd450ModfStruct] = "ModfStruct"; - names[GLSLstd450FMin] = "FMin"; - names[GLSLstd450SMin] = "SMin"; - names[GLSLstd450UMin] = "UMin"; - names[GLSLstd450FMax] = "FMax"; - names[GLSLstd450SMax] = "SMax"; - names[GLSLstd450UMax] = "UMax"; - names[GLSLstd450FClamp] = "FClamp"; - names[GLSLstd450SClamp] = "SClamp"; - names[GLSLstd450UClamp] = "UClamp"; - names[GLSLstd450FMix] = "FMix"; - names[GLSLstd450Step] = "Step"; - names[GLSLstd450SmoothStep] = "SmoothStep"; - names[GLSLstd450Fma] = "Fma"; - names[GLSLstd450Frexp] = "Frexp"; - names[GLSLstd450FrexpStruct] = "FrexpStruct"; - names[GLSLstd450Ldexp] = "Ldexp"; - names[GLSLstd450PackSnorm4x8] = "PackSnorm4x8"; - names[GLSLstd450PackUnorm4x8] = "PackUnorm4x8"; - names[GLSLstd450PackSnorm2x16] = "PackSnorm2x16"; - names[GLSLstd450PackUnorm2x16] = "PackUnorm2x16"; - names[GLSLstd450PackHalf2x16] = "PackHalf2x16"; - names[GLSLstd450PackDouble2x32] = "PackDouble2x32"; - names[GLSLstd450UnpackSnorm2x16] = "UnpackSnorm2x16"; - names[GLSLstd450UnpackUnorm2x16] = "UnpackUnorm2x16"; - names[GLSLstd450UnpackHalf2x16] = "UnpackHalf2x16"; - names[GLSLstd450UnpackSnorm4x8] = "UnpackSnorm4x8"; - names[GLSLstd450UnpackUnorm4x8] = "UnpackUnorm4x8"; - names[GLSLstd450UnpackDouble2x32] = "UnpackDouble2x32"; - names[GLSLstd450Length] = "Length"; - names[GLSLstd450Distance] = "Distance"; - names[GLSLstd450Cross] = "Cross"; - names[GLSLstd450Normalize] = "Normalize"; - names[GLSLstd450FaceForward] = "FaceForward"; - names[GLSLstd450Reflect] = "Reflect"; - names[GLSLstd450Refract] = "Refract"; - names[GLSLstd450FindILsb] = "FindILsb"; - names[GLSLstd450FindSMsb] = "FindSMsb"; - names[GLSLstd450FindUMsb] = "FindUMsb"; - names[GLSLstd450InterpolateAtCentroid] = "InterpolateAtCentroid"; - names[GLSLstd450InterpolateAtSample] = "InterpolateAtSample"; - names[GLSLstd450InterpolateAtOffset] = "InterpolateAtOffset"; -} - -#ifdef AMD_EXTENSIONS -static const char* GLSLextAMDGetDebugNames(const char* name, unsigned entrypoint) -{ - if (strcmp(name, spv::E_SPV_AMD_shader_ballot) == 0) { - switch (entrypoint) { - case SwizzleInvocationsAMD: return "SwizzleInvocationsAMD"; - case SwizzleInvocationsMaskedAMD: return "SwizzleInvocationsMaskedAMD"; - case WriteInvocationAMD: return "WriteInvocationAMD"; - case MbcntAMD: return "MbcntAMD"; - default: return "Bad"; - } - } else if (strcmp(name, spv::E_SPV_AMD_shader_trinary_minmax) == 0) { - switch (entrypoint) { - case FMin3AMD: return "FMin3AMD"; - case UMin3AMD: return "UMin3AMD"; - case SMin3AMD: return "SMin3AMD"; - case FMax3AMD: return "FMax3AMD"; - case UMax3AMD: return "UMax3AMD"; - case SMax3AMD: return "SMax3AMD"; - case FMid3AMD: return "FMid3AMD"; - case UMid3AMD: return "UMid3AMD"; - case SMid3AMD: return "SMid3AMD"; - default: return "Bad"; - } - } else if (strcmp(name, spv::E_SPV_AMD_shader_explicit_vertex_parameter) == 0) { - switch (entrypoint) { - case InterpolateAtVertexAMD: return "InterpolateAtVertexAMD"; - default: return "Bad"; - } - } - else if (strcmp(name, spv::E_SPV_AMD_gcn_shader) == 0) { - switch (entrypoint) { - case CubeFaceIndexAMD: return "CubeFaceIndexAMD"; - case CubeFaceCoordAMD: return "CubeFaceCoordAMD"; - case TimeAMD: return "TimeAMD"; - default: - break; - } - } - - return "Bad"; -} -#endif - - -#ifdef NV_EXTENSIONS -static const char* GLSLextNVGetDebugNames(const char* name, unsigned entrypoint) -{ - if (strcmp(name, spv::E_SPV_NV_sample_mask_override_coverage) == 0 || - strcmp(name, spv::E_SPV_NV_geometry_shader_passthrough) == 0 || - strcmp(name, spv::E_ARB_shader_viewport_layer_array) == 0 || - strcmp(name, spv::E_SPV_NV_viewport_array2) == 0 || - strcmp(spv::E_SPV_NVX_multiview_per_view_attributes, name) == 0) { - switch (entrypoint) { - case DecorationOverrideCoverageNV: return "OverrideCoverageNV"; - case DecorationPassthroughNV: return "PassthroughNV"; - case CapabilityGeometryShaderPassthroughNV: return "GeometryShaderPassthroughNV"; - case DecorationViewportRelativeNV: return "ViewportRelativeNV"; - case BuiltInViewportMaskNV: return "ViewportMaskNV"; - case CapabilityShaderViewportMaskNV: return "ShaderViewportMaskNV"; - case DecorationSecondaryViewportRelativeNV: return "SecondaryViewportRelativeNV"; - case BuiltInSecondaryPositionNV: return "SecondaryPositionNV"; - case BuiltInSecondaryViewportMaskNV: return "SecondaryViewportMaskNV"; - case CapabilityShaderStereoViewNV: return "ShaderStereoViewNV"; - case BuiltInPositionPerViewNV: return "PositionPerViewNV"; - case BuiltInViewportMaskPerViewNV: return "ViewportMaskPerViewNV"; - case CapabilityPerViewAttributesNV: return "PerViewAttributesNV"; - default: return "Bad"; - } - } - return "Bad"; -} -#endif - -void Disassemble(std::ostream& out, const std::vector& stream) -{ - SpirvStream SpirvStream(out, stream); - spv::Parameterize(); - GLSLstd450GetDebugNames(GlslStd450DebugNames); - SpirvStream.validate(); - SpirvStream.processInstructions(); -} - -}; // end namespace spv diff --git a/deps/glslang/SPIRV/disassemble.h b/deps/glslang/SPIRV/disassemble.h deleted file mode 100644 index 47cef65a..00000000 --- a/deps/glslang/SPIRV/disassemble.h +++ /dev/null @@ -1,52 +0,0 @@ -// -// Copyright (C) 2014-2015 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// -// Disassembler for SPIR-V. -// - -#pragma once -#ifndef disassembler_H -#define disassembler_H - -#include -#include - -namespace spv { - - void Disassemble(std::ostream& out, const std::vector&); - -}; // end namespace spv - -#endif // disassembler_H diff --git a/deps/glslang/SPIRV/doc.cpp b/deps/glslang/SPIRV/doc.cpp deleted file mode 100644 index 2fe5d9ce..00000000 --- a/deps/glslang/SPIRV/doc.cpp +++ /dev/null @@ -1,2875 +0,0 @@ -// -// Copyright (C) 2014-2015 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// -// 1) Programmatically fill in instruction/operand information. -// This can be used for disassembly, printing documentation, etc. -// -// 2) Print documentation from this parameterization. -// - -#include "doc.h" - -#include -#include -#include - -namespace spv { - extern "C" { - // Include C-based headers that don't have a namespace - #include "GLSL.ext.KHR.h" -#ifdef AMD_EXTENSIONS - #include "GLSL.ext.AMD.h" -#endif -#ifdef NV_EXTENSIONS - #include "GLSL.ext.NV.h" -#endif - } -} - -namespace spv { - -// -// Whole set of functions that translate enumerants to their text strings for -// the specification (or their sanitized versions for auto-generating the -// spirv headers. -// -// Also, the ceilings are declared next to these, to help keep them in sync. -// Ceilings should be -// - one more than the maximum value an enumerant takes on, for non-mask enumerants -// (for non-sparse enums, this is the number of enumerants) -// - the number of bits consumed by the set of masks -// (for non-sparse mask enums, this is the number of enumerants) -// - -const int SourceLanguageCeiling = 6; // HLSL todo: need official enumerant - -const char* SourceString(int source) -{ - switch (source) { - case 0: return "Unknown"; - case 1: return "ESSL"; - case 2: return "GLSL"; - case 3: return "OpenCL_C"; - case 4: return "OpenCL_CPP"; - case 5: return "HLSL"; - - case SourceLanguageCeiling: - default: return "Bad"; - } -} - -const int ExecutionModelCeiling = 7; - -const char* ExecutionModelString(int model) -{ - switch (model) { - case 0: return "Vertex"; - case 1: return "TessellationControl"; - case 2: return "TessellationEvaluation"; - case 3: return "Geometry"; - case 4: return "Fragment"; - case 5: return "GLCompute"; - case 6: return "Kernel"; - - case ExecutionModelCeiling: - default: return "Bad"; - } -} - -const int AddressingModelCeiling = 3; - -const char* AddressingString(int addr) -{ - switch (addr) { - case 0: return "Logical"; - case 1: return "Physical32"; - case 2: return "Physical64"; - - case AddressingModelCeiling: - default: return "Bad"; - } -} - -const int MemoryModelCeiling = 3; - -const char* MemoryString(int mem) -{ - switch (mem) { - case 0: return "Simple"; - case 1: return "GLSL450"; - case 2: return "OpenCL"; - - case MemoryModelCeiling: - default: return "Bad"; - } -} - -const int ExecutionModeCeiling = 33; - -const char* ExecutionModeString(int mode) -{ - switch (mode) { - case 0: return "Invocations"; - case 1: return "SpacingEqual"; - case 2: return "SpacingFractionalEven"; - case 3: return "SpacingFractionalOdd"; - case 4: return "VertexOrderCw"; - case 5: return "VertexOrderCcw"; - case 6: return "PixelCenterInteger"; - case 7: return "OriginUpperLeft"; - case 8: return "OriginLowerLeft"; - case 9: return "EarlyFragmentTests"; - case 10: return "PointMode"; - case 11: return "Xfb"; - case 12: return "DepthReplacing"; - case 13: return "Bad"; - case 14: return "DepthGreater"; - case 15: return "DepthLess"; - case 16: return "DepthUnchanged"; - case 17: return "LocalSize"; - case 18: return "LocalSizeHint"; - case 19: return "InputPoints"; - case 20: return "InputLines"; - case 21: return "InputLinesAdjacency"; - case 22: return "Triangles"; - case 23: return "InputTrianglesAdjacency"; - case 24: return "Quads"; - case 25: return "Isolines"; - case 26: return "OutputVertices"; - case 27: return "OutputPoints"; - case 28: return "OutputLineStrip"; - case 29: return "OutputTriangleStrip"; - case 30: return "VecTypeHint"; - case 31: return "ContractionOff"; - case 32: return "Bad"; - - case 4446: return "PostDepthCoverage"; - case ExecutionModeCeiling: - default: return "Bad"; - } -} - -const int StorageClassCeiling = 13; - -const char* StorageClassString(int StorageClass) -{ - switch (StorageClass) { - case 0: return "UniformConstant"; - case 1: return "Input"; - case 2: return "Uniform"; - case 3: return "Output"; - case 4: return "Workgroup"; - case 5: return "CrossWorkgroup"; - case 6: return "Private"; - case 7: return "Function"; - case 8: return "Generic"; - case 9: return "PushConstant"; - case 10: return "AtomicCounter"; - case 11: return "Image"; - case 12: return "StorageBuffer"; - - case StorageClassCeiling: - default: return "Bad"; - } -} - -const int DecorationCeiling = 45; - -const char* DecorationString(int decoration) -{ - switch (decoration) { - case 0: return "RelaxedPrecision"; - case 1: return "SpecId"; - case 2: return "Block"; - case 3: return "BufferBlock"; - case 4: return "RowMajor"; - case 5: return "ColMajor"; - case 6: return "ArrayStride"; - case 7: return "MatrixStride"; - case 8: return "GLSLShared"; - case 9: return "GLSLPacked"; - case 10: return "CPacked"; - case 11: return "BuiltIn"; - case 12: return "Bad"; - case 13: return "NoPerspective"; - case 14: return "Flat"; - case 15: return "Patch"; - case 16: return "Centroid"; - case 17: return "Sample"; - case 18: return "Invariant"; - case 19: return "Restrict"; - case 20: return "Aliased"; - case 21: return "Volatile"; - case 22: return "Constant"; - case 23: return "Coherent"; - case 24: return "NonWritable"; - case 25: return "NonReadable"; - case 26: return "Uniform"; - case 27: return "Bad"; - case 28: return "SaturatedConversion"; - case 29: return "Stream"; - case 30: return "Location"; - case 31: return "Component"; - case 32: return "Index"; - case 33: return "Binding"; - case 34: return "DescriptorSet"; - case 35: return "Offset"; - case 36: return "XfbBuffer"; - case 37: return "XfbStride"; - case 38: return "FuncParamAttr"; - case 39: return "FP Rounding Mode"; - case 40: return "FP Fast Math Mode"; - case 41: return "Linkage Attributes"; - case 42: return "NoContraction"; - case 43: return "InputAttachmentIndex"; - case 44: return "Alignment"; - - case DecorationCeiling: - default: return "Bad"; - -#ifdef AMD_EXTENSIONS - case 4999: return "ExplicitInterpAMD"; -#endif -#ifdef NV_EXTENSIONS - case 5248: return "OverrideCoverageNV"; - case 5250: return "PassthroughNV"; - case 5252: return "ViewportRelativeNV"; - case 5256: return "SecondaryViewportRelativeNV"; -#endif - } -} - -const int BuiltInCeiling = 44; - -const char* BuiltInString(int builtIn) -{ - switch (builtIn) { - case 0: return "Position"; - case 1: return "PointSize"; - case 2: return "Bad"; - case 3: return "ClipDistance"; - case 4: return "CullDistance"; - case 5: return "VertexId"; - case 6: return "InstanceId"; - case 7: return "PrimitiveId"; - case 8: return "InvocationId"; - case 9: return "Layer"; - case 10: return "ViewportIndex"; - case 11: return "TessLevelOuter"; - case 12: return "TessLevelInner"; - case 13: return "TessCoord"; - case 14: return "PatchVertices"; - case 15: return "FragCoord"; - case 16: return "PointCoord"; - case 17: return "FrontFacing"; - case 18: return "SampleId"; - case 19: return "SamplePosition"; - case 20: return "SampleMask"; - case 21: return "Bad"; - case 22: return "FragDepth"; - case 23: return "HelperInvocation"; - case 24: return "NumWorkgroups"; - case 25: return "WorkgroupSize"; - case 26: return "WorkgroupId"; - case 27: return "LocalInvocationId"; - case 28: return "GlobalInvocationId"; - case 29: return "LocalInvocationIndex"; - case 30: return "WorkDim"; - case 31: return "GlobalSize"; - case 32: return "EnqueuedWorkgroupSize"; - case 33: return "GlobalOffset"; - case 34: return "GlobalLinearId"; - case 35: return "Bad"; - case 36: return "SubgroupSize"; - case 37: return "SubgroupMaxSize"; - case 38: return "NumSubgroups"; - case 39: return "NumEnqueuedSubgroups"; - case 40: return "SubgroupId"; - case 41: return "SubgroupLocalInvocationId"; - case 42: return "VertexIndex"; // TBD: put next to VertexId? - case 43: return "InstanceIndex"; // TBD: put next to InstanceId? - - case 4416: return "SubgroupEqMaskKHR"; - case 4417: return "SubgroupGeMaskKHR"; - case 4418: return "SubgroupGtMaskKHR"; - case 4419: return "SubgroupLeMaskKHR"; - case 4420: return "SubgroupLtMaskKHR"; - case 4438: return "DeviceIndex"; - case 4440: return "ViewIndex"; - case 4424: return "BaseVertex"; - case 4425: return "BaseInstance"; - case 4426: return "DrawIndex"; - case 5014: return "FragStencilRefEXT"; - -#ifdef AMD_EXTENSIONS - case 4992: return "BaryCoordNoPerspAMD"; - case 4993: return "BaryCoordNoPerspCentroidAMD"; - case 4994: return "BaryCoordNoPerspSampleAMD"; - case 4995: return "BaryCoordSmoothAMD"; - case 4996: return "BaryCoordSmoothCentroidAMD"; - case 4997: return "BaryCoordSmoothSampleAMD"; - case 4998: return "BaryCoordPullModelAMD"; -#endif - -#ifdef NV_EXTENSIONS - case 5253: return "ViewportMaskNV"; - case 5257: return "SecondaryPositionNV"; - case 5258: return "SecondaryViewportMaskNV"; - case 5261: return "PositionPerViewNV"; - case 5262: return "ViewportMaskPerViewNV"; -#endif - - case BuiltInCeiling: - default: return "Bad"; - } -} - -const int DimensionCeiling = 7; - -const char* DimensionString(int dim) -{ - switch (dim) { - case 0: return "1D"; - case 1: return "2D"; - case 2: return "3D"; - case 3: return "Cube"; - case 4: return "Rect"; - case 5: return "Buffer"; - case 6: return "SubpassData"; - - case DimensionCeiling: - default: return "Bad"; - } -} - -const int SamplerAddressingModeCeiling = 5; - -const char* SamplerAddressingModeString(int mode) -{ - switch (mode) { - case 0: return "None"; - case 1: return "ClampToEdge"; - case 2: return "Clamp"; - case 3: return "Repeat"; - case 4: return "RepeatMirrored"; - - case SamplerAddressingModeCeiling: - default: return "Bad"; - } -} - -const int SamplerFilterModeCeiling = 2; - -const char* SamplerFilterModeString(int mode) -{ - switch (mode) { - case 0: return "Nearest"; - case 1: return "Linear"; - - case SamplerFilterModeCeiling: - default: return "Bad"; - } -} - -const int ImageFormatCeiling = 40; - -const char* ImageFormatString(int format) -{ - switch (format) { - case 0: return "Unknown"; - - // ES/Desktop float - case 1: return "Rgba32f"; - case 2: return "Rgba16f"; - case 3: return "R32f"; - case 4: return "Rgba8"; - case 5: return "Rgba8Snorm"; - - // Desktop float - case 6: return "Rg32f"; - case 7: return "Rg16f"; - case 8: return "R11fG11fB10f"; - case 9: return "R16f"; - case 10: return "Rgba16"; - case 11: return "Rgb10A2"; - case 12: return "Rg16"; - case 13: return "Rg8"; - case 14: return "R16"; - case 15: return "R8"; - case 16: return "Rgba16Snorm"; - case 17: return "Rg16Snorm"; - case 18: return "Rg8Snorm"; - case 19: return "R16Snorm"; - case 20: return "R8Snorm"; - - // ES/Desktop int - case 21: return "Rgba32i"; - case 22: return "Rgba16i"; - case 23: return "Rgba8i"; - case 24: return "R32i"; - - // Desktop int - case 25: return "Rg32i"; - case 26: return "Rg16i"; - case 27: return "Rg8i"; - case 28: return "R16i"; - case 29: return "R8i"; - - // ES/Desktop uint - case 30: return "Rgba32ui"; - case 31: return "Rgba16ui"; - case 32: return "Rgba8ui"; - case 33: return "R32ui"; - - // Desktop uint - case 34: return "Rgb10a2ui"; - case 35: return "Rg32ui"; - case 36: return "Rg16ui"; - case 37: return "Rg8ui"; - case 38: return "R16ui"; - case 39: return "R8ui"; - - case ImageFormatCeiling: - default: - return "Bad"; - } -} - -const int ImageChannelOrderCeiling = 19; - -const char* ImageChannelOrderString(int format) -{ - switch (format) { - case 0: return "R"; - case 1: return "A"; - case 2: return "RG"; - case 3: return "RA"; - case 4: return "RGB"; - case 5: return "RGBA"; - case 6: return "BGRA"; - case 7: return "ARGB"; - case 8: return "Intensity"; - case 9: return "Luminance"; - case 10: return "Rx"; - case 11: return "RGx"; - case 12: return "RGBx"; - case 13: return "Depth"; - case 14: return "DepthStencil"; - case 15: return "sRGB"; - case 16: return "sRGBx"; - case 17: return "sRGBA"; - case 18: return "sBGRA"; - - case ImageChannelOrderCeiling: - default: - return "Bad"; - } -} - -const int ImageChannelDataTypeCeiling = 17; - -const char* ImageChannelDataTypeString(int type) -{ - switch (type) - { - case 0: return "SnormInt8"; - case 1: return "SnormInt16"; - case 2: return "UnormInt8"; - case 3: return "UnormInt16"; - case 4: return "UnormShort565"; - case 5: return "UnormShort555"; - case 6: return "UnormInt101010"; - case 7: return "SignedInt8"; - case 8: return "SignedInt16"; - case 9: return "SignedInt32"; - case 10: return "UnsignedInt8"; - case 11: return "UnsignedInt16"; - case 12: return "UnsignedInt32"; - case 13: return "HalfFloat"; - case 14: return "Float"; - case 15: return "UnormInt24"; - case 16: return "UnormInt101010_2"; - - case ImageChannelDataTypeCeiling: - default: - return "Bad"; - } -} - -const int ImageOperandsCeiling = 8; - -const char* ImageOperandsString(int format) -{ - switch (format) { - case 0: return "Bias"; - case 1: return "Lod"; - case 2: return "Grad"; - case 3: return "ConstOffset"; - case 4: return "Offset"; - case 5: return "ConstOffsets"; - case 6: return "Sample"; - case 7: return "MinLod"; - - case ImageOperandsCeiling: - default: - return "Bad"; - } -} - -const int FPFastMathCeiling = 5; - -const char* FPFastMathString(int mode) -{ - switch (mode) { - case 0: return "NotNaN"; - case 1: return "NotInf"; - case 2: return "NSZ"; - case 3: return "AllowRecip"; - case 4: return "Fast"; - - case FPFastMathCeiling: - default: return "Bad"; - } -} - -const int FPRoundingModeCeiling = 4; - -const char* FPRoundingModeString(int mode) -{ - switch (mode) { - case 0: return "RTE"; - case 1: return "RTZ"; - case 2: return "RTP"; - case 3: return "RTN"; - - case FPRoundingModeCeiling: - default: return "Bad"; - } -} - -const int LinkageTypeCeiling = 2; - -const char* LinkageTypeString(int type) -{ - switch (type) { - case 0: return "Export"; - case 1: return "Import"; - - case LinkageTypeCeiling: - default: return "Bad"; - } -} - -const int FuncParamAttrCeiling = 8; - -const char* FuncParamAttrString(int attr) -{ - switch (attr) { - case 0: return "Zext"; - case 1: return "Sext"; - case 2: return "ByVal"; - case 3: return "Sret"; - case 4: return "NoAlias"; - case 5: return "NoCapture"; - case 6: return "NoWrite"; - case 7: return "NoReadWrite"; - - case FuncParamAttrCeiling: - default: return "Bad"; - } -} - -const int AccessQualifierCeiling = 3; - -const char* AccessQualifierString(int attr) -{ - switch (attr) { - case 0: return "ReadOnly"; - case 1: return "WriteOnly"; - case 2: return "ReadWrite"; - - case AccessQualifierCeiling: - default: return "Bad"; - } -} - -const int SelectControlCeiling = 2; - -const char* SelectControlString(int cont) -{ - switch (cont) { - case 0: return "Flatten"; - case 1: return "DontFlatten"; - - case SelectControlCeiling: - default: return "Bad"; - } -} - -const int LoopControlCeiling = 4; - -const char* LoopControlString(int cont) -{ - switch (cont) { - case 0: return "Unroll"; - case 1: return "DontUnroll"; - case 2: return "DependencyInfinite"; - case 3: return "DependencyLength"; - - case LoopControlCeiling: - default: return "Bad"; - } -} - -const int FunctionControlCeiling = 4; - -const char* FunctionControlString(int cont) -{ - switch (cont) { - case 0: return "Inline"; - case 1: return "DontInline"; - case 2: return "Pure"; - case 3: return "Const"; - - case FunctionControlCeiling: - default: return "Bad"; - } -} - -const int MemorySemanticsCeiling = 12; - -const char* MemorySemanticsString(int mem) -{ - // Note: No bits set (None) means "Relaxed" - switch (mem) { - case 0: return "Bad"; // Note: this is a placeholder for 'Consume' - case 1: return "Acquire"; - case 2: return "Release"; - case 3: return "AcquireRelease"; - case 4: return "SequentiallyConsistent"; - case 5: return "Bad"; // Note: reserved for future expansion - case 6: return "UniformMemory"; - case 7: return "SubgroupMemory"; - case 8: return "WorkgroupMemory"; - case 9: return "CrossWorkgroupMemory"; - case 10: return "AtomicCounterMemory"; - case 11: return "ImageMemory"; - - case MemorySemanticsCeiling: - default: return "Bad"; - } -} - -const int MemoryAccessCeiling = 3; - -const char* MemoryAccessString(int mem) -{ - switch (mem) { - case 0: return "Volatile"; - case 1: return "Aligned"; - case 2: return "Nontemporal"; - - case MemoryAccessCeiling: - default: return "Bad"; - } -} - -const int ScopeCeiling = 5; - -const char* ScopeString(int mem) -{ - switch (mem) { - case 0: return "CrossDevice"; - case 1: return "Device"; - case 2: return "Workgroup"; - case 3: return "Subgroup"; - case 4: return "Invocation"; - - case ScopeCeiling: - default: return "Bad"; - } -} - -const int GroupOperationCeiling = 3; - -const char* GroupOperationString(int gop) -{ - - switch (gop) - { - case 0: return "Reduce"; - case 1: return "InclusiveScan"; - case 2: return "ExclusiveScan"; - - case GroupOperationCeiling: - default: return "Bad"; - } -} - -const int KernelEnqueueFlagsCeiling = 3; - -const char* KernelEnqueueFlagsString(int flag) -{ - switch (flag) - { - case 0: return "NoWait"; - case 1: return "WaitKernel"; - case 2: return "WaitWorkGroup"; - - case KernelEnqueueFlagsCeiling: - default: return "Bad"; - } -} - -const int KernelProfilingInfoCeiling = 1; - -const char* KernelProfilingInfoString(int info) -{ - switch (info) - { - case 0: return "CmdExecTime"; - - case KernelProfilingInfoCeiling: - default: return "Bad"; - } -} - -const int CapabilityCeiling = 58; - -const char* CapabilityString(int info) -{ - switch (info) - { - case 0: return "Matrix"; - case 1: return "Shader"; - case 2: return "Geometry"; - case 3: return "Tessellation"; - case 4: return "Addresses"; - case 5: return "Linkage"; - case 6: return "Kernel"; - case 7: return "Vector16"; - case 8: return "Float16Buffer"; - case 9: return "Float16"; - case 10: return "Float64"; - case 11: return "Int64"; - case 12: return "Int64Atomics"; - case 13: return "ImageBasic"; - case 14: return "ImageReadWrite"; - case 15: return "ImageMipmap"; - case 16: return "Bad"; - case 17: return "Pipes"; - case 18: return "Groups"; - case 19: return "DeviceEnqueue"; - case 20: return "LiteralSampler"; - case 21: return "AtomicStorage"; - case 22: return "Int16"; - case 23: return "TessellationPointSize"; - case 24: return "GeometryPointSize"; - case 25: return "ImageGatherExtended"; - case 26: return "Bad"; - case 27: return "StorageImageMultisample"; - case 28: return "UniformBufferArrayDynamicIndexing"; - case 29: return "SampledImageArrayDynamicIndexing"; - case 30: return "StorageBufferArrayDynamicIndexing"; - case 31: return "StorageImageArrayDynamicIndexing"; - case 32: return "ClipDistance"; - case 33: return "CullDistance"; - case 34: return "ImageCubeArray"; - case 35: return "SampleRateShading"; - case 36: return "ImageRect"; - case 37: return "SampledRect"; - case 38: return "GenericPointer"; - case 39: return "Int8"; - case 40: return "InputAttachment"; - case 41: return "SparseResidency"; - case 42: return "MinLod"; - case 43: return "Sampled1D"; - case 44: return "Image1D"; - case 45: return "SampledCubeArray"; - case 46: return "SampledBuffer"; - case 47: return "ImageBuffer"; - case 48: return "ImageMSArray"; - case 49: return "StorageImageExtendedFormats"; - case 50: return "ImageQuery"; - case 51: return "DerivativeControl"; - case 52: return "InterpolationFunction"; - case 53: return "TransformFeedback"; - case 54: return "GeometryStreams"; - case 55: return "StorageImageReadWithoutFormat"; - case 56: return "StorageImageWriteWithoutFormat"; - case 57: return "MultiViewport"; - - case 4423: return "SubgroupBallotKHR"; - case 4427: return "DrawParameters"; - case 4431: return "SubgroupVoteKHR"; - - case 4433: return "StorageUniformBufferBlock16"; - case 4434: return "StorageUniform16"; - case 4435: return "StoragePushConstant16"; - case 4436: return "StorageInputOutput16"; - - case 4437: return "DeviceGroup"; - case 4439: return "MultiView"; - - case 5013: return "StencilExportEXT"; - -#ifdef AMD_EXTENSIONS - case 5009: return "ImageGatherBiasLodAMD"; - case 5015: return "ImageReadWriteLodAMD"; -#endif - - case 4445: return "AtomicStorageOps"; - - case 4447: return "SampleMaskPostDepthCoverage"; -#ifdef NV_EXTENSIONS - case 5251: return "GeometryShaderPassthroughNV"; - case 5254: return "ShaderViewportIndexLayerNV"; - case 5255: return "ShaderViewportMaskNV"; - case 5259: return "ShaderStereoViewNV"; - case 5260: return "PerViewAttributesNV"; -#endif - - case CapabilityCeiling: - default: return "Bad"; - } -} - -const char* OpcodeString(int op) -{ - switch (op) { - case 0: return "OpNop"; - case 1: return "OpUndef"; - case 2: return "OpSourceContinued"; - case 3: return "OpSource"; - case 4: return "OpSourceExtension"; - case 5: return "OpName"; - case 6: return "OpMemberName"; - case 7: return "OpString"; - case 8: return "OpLine"; - case 9: return "Bad"; - case 10: return "OpExtension"; - case 11: return "OpExtInstImport"; - case 12: return "OpExtInst"; - case 13: return "Bad"; - case 14: return "OpMemoryModel"; - case 15: return "OpEntryPoint"; - case 16: return "OpExecutionMode"; - case 17: return "OpCapability"; - case 18: return "Bad"; - case 19: return "OpTypeVoid"; - case 20: return "OpTypeBool"; - case 21: return "OpTypeInt"; - case 22: return "OpTypeFloat"; - case 23: return "OpTypeVector"; - case 24: return "OpTypeMatrix"; - case 25: return "OpTypeImage"; - case 26: return "OpTypeSampler"; - case 27: return "OpTypeSampledImage"; - case 28: return "OpTypeArray"; - case 29: return "OpTypeRuntimeArray"; - case 30: return "OpTypeStruct"; - case 31: return "OpTypeOpaque"; - case 32: return "OpTypePointer"; - case 33: return "OpTypeFunction"; - case 34: return "OpTypeEvent"; - case 35: return "OpTypeDeviceEvent"; - case 36: return "OpTypeReserveId"; - case 37: return "OpTypeQueue"; - case 38: return "OpTypePipe"; - case 39: return "OpTypeForwardPointer"; - case 40: return "Bad"; - case 41: return "OpConstantTrue"; - case 42: return "OpConstantFalse"; - case 43: return "OpConstant"; - case 44: return "OpConstantComposite"; - case 45: return "OpConstantSampler"; - case 46: return "OpConstantNull"; - case 47: return "Bad"; - case 48: return "OpSpecConstantTrue"; - case 49: return "OpSpecConstantFalse"; - case 50: return "OpSpecConstant"; - case 51: return "OpSpecConstantComposite"; - case 52: return "OpSpecConstantOp"; - case 53: return "Bad"; - case 54: return "OpFunction"; - case 55: return "OpFunctionParameter"; - case 56: return "OpFunctionEnd"; - case 57: return "OpFunctionCall"; - case 58: return "Bad"; - case 59: return "OpVariable"; - case 60: return "OpImageTexelPointer"; - case 61: return "OpLoad"; - case 62: return "OpStore"; - case 63: return "OpCopyMemory"; - case 64: return "OpCopyMemorySized"; - case 65: return "OpAccessChain"; - case 66: return "OpInBoundsAccessChain"; - case 67: return "OpPtrAccessChain"; - case 68: return "OpArrayLength"; - case 69: return "OpGenericPtrMemSemantics"; - case 70: return "OpInBoundsPtrAccessChain"; - case 71: return "OpDecorate"; - case 72: return "OpMemberDecorate"; - case 73: return "OpDecorationGroup"; - case 74: return "OpGroupDecorate"; - case 75: return "OpGroupMemberDecorate"; - case 76: return "Bad"; - case 77: return "OpVectorExtractDynamic"; - case 78: return "OpVectorInsertDynamic"; - case 79: return "OpVectorShuffle"; - case 80: return "OpCompositeConstruct"; - case 81: return "OpCompositeExtract"; - case 82: return "OpCompositeInsert"; - case 83: return "OpCopyObject"; - case 84: return "OpTranspose"; - case 85: return "Bad"; - case 86: return "OpSampledImage"; - case 87: return "OpImageSampleImplicitLod"; - case 88: return "OpImageSampleExplicitLod"; - case 89: return "OpImageSampleDrefImplicitLod"; - case 90: return "OpImageSampleDrefExplicitLod"; - case 91: return "OpImageSampleProjImplicitLod"; - case 92: return "OpImageSampleProjExplicitLod"; - case 93: return "OpImageSampleProjDrefImplicitLod"; - case 94: return "OpImageSampleProjDrefExplicitLod"; - case 95: return "OpImageFetch"; - case 96: return "OpImageGather"; - case 97: return "OpImageDrefGather"; - case 98: return "OpImageRead"; - case 99: return "OpImageWrite"; - case 100: return "OpImage"; - case 101: return "OpImageQueryFormat"; - case 102: return "OpImageQueryOrder"; - case 103: return "OpImageQuerySizeLod"; - case 104: return "OpImageQuerySize"; - case 105: return "OpImageQueryLod"; - case 106: return "OpImageQueryLevels"; - case 107: return "OpImageQuerySamples"; - case 108: return "Bad"; - case 109: return "OpConvertFToU"; - case 110: return "OpConvertFToS"; - case 111: return "OpConvertSToF"; - case 112: return "OpConvertUToF"; - case 113: return "OpUConvert"; - case 114: return "OpSConvert"; - case 115: return "OpFConvert"; - case 116: return "OpQuantizeToF16"; - case 117: return "OpConvertPtrToU"; - case 118: return "OpSatConvertSToU"; - case 119: return "OpSatConvertUToS"; - case 120: return "OpConvertUToPtr"; - case 121: return "OpPtrCastToGeneric"; - case 122: return "OpGenericCastToPtr"; - case 123: return "OpGenericCastToPtrExplicit"; - case 124: return "OpBitcast"; - case 125: return "Bad"; - case 126: return "OpSNegate"; - case 127: return "OpFNegate"; - case 128: return "OpIAdd"; - case 129: return "OpFAdd"; - case 130: return "OpISub"; - case 131: return "OpFSub"; - case 132: return "OpIMul"; - case 133: return "OpFMul"; - case 134: return "OpUDiv"; - case 135: return "OpSDiv"; - case 136: return "OpFDiv"; - case 137: return "OpUMod"; - case 138: return "OpSRem"; - case 139: return "OpSMod"; - case 140: return "OpFRem"; - case 141: return "OpFMod"; - case 142: return "OpVectorTimesScalar"; - case 143: return "OpMatrixTimesScalar"; - case 144: return "OpVectorTimesMatrix"; - case 145: return "OpMatrixTimesVector"; - case 146: return "OpMatrixTimesMatrix"; - case 147: return "OpOuterProduct"; - case 148: return "OpDot"; - case 149: return "OpIAddCarry"; - case 150: return "OpISubBorrow"; - case 151: return "OpUMulExtended"; - case 152: return "OpSMulExtended"; - case 153: return "Bad"; - case 154: return "OpAny"; - case 155: return "OpAll"; - case 156: return "OpIsNan"; - case 157: return "OpIsInf"; - case 158: return "OpIsFinite"; - case 159: return "OpIsNormal"; - case 160: return "OpSignBitSet"; - case 161: return "OpLessOrGreater"; - case 162: return "OpOrdered"; - case 163: return "OpUnordered"; - case 164: return "OpLogicalEqual"; - case 165: return "OpLogicalNotEqual"; - case 166: return "OpLogicalOr"; - case 167: return "OpLogicalAnd"; - case 168: return "OpLogicalNot"; - case 169: return "OpSelect"; - case 170: return "OpIEqual"; - case 171: return "OpINotEqual"; - case 172: return "OpUGreaterThan"; - case 173: return "OpSGreaterThan"; - case 174: return "OpUGreaterThanEqual"; - case 175: return "OpSGreaterThanEqual"; - case 176: return "OpULessThan"; - case 177: return "OpSLessThan"; - case 178: return "OpULessThanEqual"; - case 179: return "OpSLessThanEqual"; - case 180: return "OpFOrdEqual"; - case 181: return "OpFUnordEqual"; - case 182: return "OpFOrdNotEqual"; - case 183: return "OpFUnordNotEqual"; - case 184: return "OpFOrdLessThan"; - case 185: return "OpFUnordLessThan"; - case 186: return "OpFOrdGreaterThan"; - case 187: return "OpFUnordGreaterThan"; - case 188: return "OpFOrdLessThanEqual"; - case 189: return "OpFUnordLessThanEqual"; - case 190: return "OpFOrdGreaterThanEqual"; - case 191: return "OpFUnordGreaterThanEqual"; - case 192: return "Bad"; - case 193: return "Bad"; - case 194: return "OpShiftRightLogical"; - case 195: return "OpShiftRightArithmetic"; - case 196: return "OpShiftLeftLogical"; - case 197: return "OpBitwiseOr"; - case 198: return "OpBitwiseXor"; - case 199: return "OpBitwiseAnd"; - case 200: return "OpNot"; - case 201: return "OpBitFieldInsert"; - case 202: return "OpBitFieldSExtract"; - case 203: return "OpBitFieldUExtract"; - case 204: return "OpBitReverse"; - case 205: return "OpBitCount"; - case 206: return "Bad"; - case 207: return "OpDPdx"; - case 208: return "OpDPdy"; - case 209: return "OpFwidth"; - case 210: return "OpDPdxFine"; - case 211: return "OpDPdyFine"; - case 212: return "OpFwidthFine"; - case 213: return "OpDPdxCoarse"; - case 214: return "OpDPdyCoarse"; - case 215: return "OpFwidthCoarse"; - case 216: return "Bad"; - case 217: return "Bad"; - case 218: return "OpEmitVertex"; - case 219: return "OpEndPrimitive"; - case 220: return "OpEmitStreamVertex"; - case 221: return "OpEndStreamPrimitive"; - case 222: return "Bad"; - case 223: return "Bad"; - case 224: return "OpControlBarrier"; - case 225: return "OpMemoryBarrier"; - case 226: return "Bad"; - case 227: return "OpAtomicLoad"; - case 228: return "OpAtomicStore"; - case 229: return "OpAtomicExchange"; - case 230: return "OpAtomicCompareExchange"; - case 231: return "OpAtomicCompareExchangeWeak"; - case 232: return "OpAtomicIIncrement"; - case 233: return "OpAtomicIDecrement"; - case 234: return "OpAtomicIAdd"; - case 235: return "OpAtomicISub"; - case 236: return "OpAtomicSMin"; - case 237: return "OpAtomicUMin"; - case 238: return "OpAtomicSMax"; - case 239: return "OpAtomicUMax"; - case 240: return "OpAtomicAnd"; - case 241: return "OpAtomicOr"; - case 242: return "OpAtomicXor"; - case 243: return "Bad"; - case 244: return "Bad"; - case 245: return "OpPhi"; - case 246: return "OpLoopMerge"; - case 247: return "OpSelectionMerge"; - case 248: return "OpLabel"; - case 249: return "OpBranch"; - case 250: return "OpBranchConditional"; - case 251: return "OpSwitch"; - case 252: return "OpKill"; - case 253: return "OpReturn"; - case 254: return "OpReturnValue"; - case 255: return "OpUnreachable"; - case 256: return "OpLifetimeStart"; - case 257: return "OpLifetimeStop"; - case 258: return "Bad"; - case 259: return "OpGroupAsyncCopy"; - case 260: return "OpGroupWaitEvents"; - case 261: return "OpGroupAll"; - case 262: return "OpGroupAny"; - case 263: return "OpGroupBroadcast"; - case 264: return "OpGroupIAdd"; - case 265: return "OpGroupFAdd"; - case 266: return "OpGroupFMin"; - case 267: return "OpGroupUMin"; - case 268: return "OpGroupSMin"; - case 269: return "OpGroupFMax"; - case 270: return "OpGroupUMax"; - case 271: return "OpGroupSMax"; - case 272: return "Bad"; - case 273: return "Bad"; - case 274: return "OpReadPipe"; - case 275: return "OpWritePipe"; - case 276: return "OpReservedReadPipe"; - case 277: return "OpReservedWritePipe"; - case 278: return "OpReserveReadPipePackets"; - case 279: return "OpReserveWritePipePackets"; - case 280: return "OpCommitReadPipe"; - case 281: return "OpCommitWritePipe"; - case 282: return "OpIsValidReserveId"; - case 283: return "OpGetNumPipePackets"; - case 284: return "OpGetMaxPipePackets"; - case 285: return "OpGroupReserveReadPipePackets"; - case 286: return "OpGroupReserveWritePipePackets"; - case 287: return "OpGroupCommitReadPipe"; - case 288: return "OpGroupCommitWritePipe"; - case 289: return "Bad"; - case 290: return "Bad"; - case 291: return "OpEnqueueMarker"; - case 292: return "OpEnqueueKernel"; - case 293: return "OpGetKernelNDrangeSubGroupCount"; - case 294: return "OpGetKernelNDrangeMaxSubGroupSize"; - case 295: return "OpGetKernelWorkGroupSize"; - case 296: return "OpGetKernelPreferredWorkGroupSizeMultiple"; - case 297: return "OpRetainEvent"; - case 298: return "OpReleaseEvent"; - case 299: return "OpCreateUserEvent"; - case 300: return "OpIsValidEvent"; - case 301: return "OpSetUserEventStatus"; - case 302: return "OpCaptureEventProfilingInfo"; - case 303: return "OpGetDefaultQueue"; - case 304: return "OpBuildNDRange"; - case 305: return "OpImageSparseSampleImplicitLod"; - case 306: return "OpImageSparseSampleExplicitLod"; - case 307: return "OpImageSparseSampleDrefImplicitLod"; - case 308: return "OpImageSparseSampleDrefExplicitLod"; - case 309: return "OpImageSparseSampleProjImplicitLod"; - case 310: return "OpImageSparseSampleProjExplicitLod"; - case 311: return "OpImageSparseSampleProjDrefImplicitLod"; - case 312: return "OpImageSparseSampleProjDrefExplicitLod"; - case 313: return "OpImageSparseFetch"; - case 314: return "OpImageSparseGather"; - case 315: return "OpImageSparseDrefGather"; - case 316: return "OpImageSparseTexelsResident"; - case 317: return "OpNoLine"; - case 318: return "OpAtomicFlagTestAndSet"; - case 319: return "OpAtomicFlagClear"; - case 320: return "OpImageSparseRead"; - - case 4421: return "OpSubgroupBallotKHR"; - case 4422: return "OpSubgroupFirstInvocationKHR"; - case 4428: return "OpSubgroupAllKHR"; - case 4429: return "OpSubgroupAnyKHR"; - case 4430: return "OpSubgroupAllEqualKHR"; - case 4432: return "OpSubgroupReadInvocationKHR"; - -#ifdef AMD_EXTENSIONS - case 5000: return "OpGroupIAddNonUniformAMD"; - case 5001: return "OpGroupFAddNonUniformAMD"; - case 5002: return "OpGroupFMinNonUniformAMD"; - case 5003: return "OpGroupUMinNonUniformAMD"; - case 5004: return "OpGroupSMinNonUniformAMD"; - case 5005: return "OpGroupFMaxNonUniformAMD"; - case 5006: return "OpGroupUMaxNonUniformAMD"; - case 5007: return "OpGroupSMaxNonUniformAMD"; -#endif - - case OpcodeCeiling: - default: - return "Bad"; - } -} - -// The set of objects that hold all the instruction/operand -// parameterization information. -InstructionParameters InstructionDesc[OpCodeMask + 1]; -OperandParameters ExecutionModeOperands[ExecutionModeCeiling]; -OperandParameters DecorationOperands[DecorationCeiling]; - -EnumDefinition OperandClassParams[OperandCount]; -EnumParameters ExecutionModelParams[ExecutionModelCeiling]; -EnumParameters AddressingParams[AddressingModelCeiling]; -EnumParameters MemoryParams[MemoryModelCeiling]; -EnumParameters ExecutionModeParams[ExecutionModeCeiling]; -EnumParameters StorageParams[StorageClassCeiling]; -EnumParameters SamplerAddressingModeParams[SamplerAddressingModeCeiling]; -EnumParameters SamplerFilterModeParams[SamplerFilterModeCeiling]; -EnumParameters ImageFormatParams[ImageFormatCeiling]; -EnumParameters ImageChannelOrderParams[ImageChannelOrderCeiling]; -EnumParameters ImageChannelDataTypeParams[ImageChannelDataTypeCeiling]; -EnumParameters ImageOperandsParams[ImageOperandsCeiling]; -EnumParameters FPFastMathParams[FPFastMathCeiling]; -EnumParameters FPRoundingModeParams[FPRoundingModeCeiling]; -EnumParameters LinkageTypeParams[LinkageTypeCeiling]; -EnumParameters DecorationParams[DecorationCeiling]; -EnumParameters BuiltInParams[BuiltInCeiling]; -EnumParameters DimensionalityParams[DimensionCeiling]; -EnumParameters FuncParamAttrParams[FuncParamAttrCeiling]; -EnumParameters AccessQualifierParams[AccessQualifierCeiling]; -EnumParameters GroupOperationParams[GroupOperationCeiling]; -EnumParameters LoopControlParams[FunctionControlCeiling]; -EnumParameters SelectionControlParams[SelectControlCeiling]; -EnumParameters FunctionControlParams[FunctionControlCeiling]; -EnumParameters MemorySemanticsParams[MemorySemanticsCeiling]; -EnumParameters MemoryAccessParams[MemoryAccessCeiling]; -EnumParameters ScopeParams[ScopeCeiling]; -EnumParameters KernelEnqueueFlagsParams[KernelEnqueueFlagsCeiling]; -EnumParameters KernelProfilingInfoParams[KernelProfilingInfoCeiling]; -EnumParameters CapabilityParams[CapabilityCeiling]; - -// Set up all the parameterizing descriptions of the opcodes, operands, etc. -void Parameterize() -{ - // only do this once. - static bool initialized = false; - if (initialized) - return; - initialized = true; - - // Exceptions to having a result and a resulting type . - // (Everything is initialized to have both). - - InstructionDesc[OpNop].setResultAndType(false, false); - InstructionDesc[OpSource].setResultAndType(false, false); - InstructionDesc[OpSourceContinued].setResultAndType(false, false); - InstructionDesc[OpSourceExtension].setResultAndType(false, false); - InstructionDesc[OpExtension].setResultAndType(false, false); - InstructionDesc[OpExtInstImport].setResultAndType(true, false); - InstructionDesc[OpCapability].setResultAndType(false, false); - InstructionDesc[OpMemoryModel].setResultAndType(false, false); - InstructionDesc[OpEntryPoint].setResultAndType(false, false); - InstructionDesc[OpExecutionMode].setResultAndType(false, false); - InstructionDesc[OpTypeVoid].setResultAndType(true, false); - InstructionDesc[OpTypeBool].setResultAndType(true, false); - InstructionDesc[OpTypeInt].setResultAndType(true, false); - InstructionDesc[OpTypeFloat].setResultAndType(true, false); - InstructionDesc[OpTypeVector].setResultAndType(true, false); - InstructionDesc[OpTypeMatrix].setResultAndType(true, false); - InstructionDesc[OpTypeImage].setResultAndType(true, false); - InstructionDesc[OpTypeSampler].setResultAndType(true, false); - InstructionDesc[OpTypeSampledImage].setResultAndType(true, false); - InstructionDesc[OpTypeArray].setResultAndType(true, false); - InstructionDesc[OpTypeRuntimeArray].setResultAndType(true, false); - InstructionDesc[OpTypeStruct].setResultAndType(true, false); - InstructionDesc[OpTypeOpaque].setResultAndType(true, false); - InstructionDesc[OpTypePointer].setResultAndType(true, false); - InstructionDesc[OpTypeForwardPointer].setResultAndType(false, false); - InstructionDesc[OpTypeFunction].setResultAndType(true, false); - InstructionDesc[OpTypeEvent].setResultAndType(true, false); - InstructionDesc[OpTypeDeviceEvent].setResultAndType(true, false); - InstructionDesc[OpTypeReserveId].setResultAndType(true, false); - InstructionDesc[OpTypeQueue].setResultAndType(true, false); - InstructionDesc[OpTypePipe].setResultAndType(true, false); - InstructionDesc[OpFunctionEnd].setResultAndType(false, false); - InstructionDesc[OpStore].setResultAndType(false, false); - InstructionDesc[OpImageWrite].setResultAndType(false, false); - InstructionDesc[OpDecorationGroup].setResultAndType(true, false); - InstructionDesc[OpDecorate].setResultAndType(false, false); - InstructionDesc[OpMemberDecorate].setResultAndType(false, false); - InstructionDesc[OpGroupDecorate].setResultAndType(false, false); - InstructionDesc[OpGroupMemberDecorate].setResultAndType(false, false); - InstructionDesc[OpName].setResultAndType(false, false); - InstructionDesc[OpMemberName].setResultAndType(false, false); - InstructionDesc[OpString].setResultAndType(true, false); - InstructionDesc[OpLine].setResultAndType(false, false); - InstructionDesc[OpNoLine].setResultAndType(false, false); - InstructionDesc[OpCopyMemory].setResultAndType(false, false); - InstructionDesc[OpCopyMemorySized].setResultAndType(false, false); - InstructionDesc[OpEmitVertex].setResultAndType(false, false); - InstructionDesc[OpEndPrimitive].setResultAndType(false, false); - InstructionDesc[OpEmitStreamVertex].setResultAndType(false, false); - InstructionDesc[OpEndStreamPrimitive].setResultAndType(false, false); - InstructionDesc[OpControlBarrier].setResultAndType(false, false); - InstructionDesc[OpMemoryBarrier].setResultAndType(false, false); - InstructionDesc[OpAtomicStore].setResultAndType(false, false); - InstructionDesc[OpLoopMerge].setResultAndType(false, false); - InstructionDesc[OpSelectionMerge].setResultAndType(false, false); - InstructionDesc[OpLabel].setResultAndType(true, false); - InstructionDesc[OpBranch].setResultAndType(false, false); - InstructionDesc[OpBranchConditional].setResultAndType(false, false); - InstructionDesc[OpSwitch].setResultAndType(false, false); - InstructionDesc[OpKill].setResultAndType(false, false); - InstructionDesc[OpReturn].setResultAndType(false, false); - InstructionDesc[OpReturnValue].setResultAndType(false, false); - InstructionDesc[OpUnreachable].setResultAndType(false, false); - InstructionDesc[OpLifetimeStart].setResultAndType(false, false); - InstructionDesc[OpLifetimeStop].setResultAndType(false, false); - InstructionDesc[OpCommitReadPipe].setResultAndType(false, false); - InstructionDesc[OpCommitWritePipe].setResultAndType(false, false); - InstructionDesc[OpGroupCommitWritePipe].setResultAndType(false, false); - InstructionDesc[OpGroupCommitReadPipe].setResultAndType(false, false); - InstructionDesc[OpCaptureEventProfilingInfo].setResultAndType(false, false); - InstructionDesc[OpSetUserEventStatus].setResultAndType(false, false); - InstructionDesc[OpRetainEvent].setResultAndType(false, false); - InstructionDesc[OpReleaseEvent].setResultAndType(false, false); - InstructionDesc[OpGroupWaitEvents].setResultAndType(false, false); - InstructionDesc[OpAtomicFlagClear].setResultAndType(false, false); - - // Specific additional context-dependent operands - - ExecutionModeOperands[ExecutionModeInvocations].push(OperandLiteralNumber, "'Number of <>'"); - - ExecutionModeOperands[ExecutionModeLocalSize].push(OperandLiteralNumber, "'x size'"); - ExecutionModeOperands[ExecutionModeLocalSize].push(OperandLiteralNumber, "'y size'"); - ExecutionModeOperands[ExecutionModeLocalSize].push(OperandLiteralNumber, "'z size'"); - - ExecutionModeOperands[ExecutionModeLocalSizeHint].push(OperandLiteralNumber, "'x size'"); - ExecutionModeOperands[ExecutionModeLocalSizeHint].push(OperandLiteralNumber, "'y size'"); - ExecutionModeOperands[ExecutionModeLocalSizeHint].push(OperandLiteralNumber, "'z size'"); - - ExecutionModeOperands[ExecutionModeOutputVertices].push(OperandLiteralNumber, "'Vertex count'"); - ExecutionModeOperands[ExecutionModeVecTypeHint].push(OperandLiteralNumber, "'Vector type'"); - - DecorationOperands[DecorationStream].push(OperandLiteralNumber, "'Stream Number'"); - DecorationOperands[DecorationLocation].push(OperandLiteralNumber, "'Location'"); - DecorationOperands[DecorationComponent].push(OperandLiteralNumber, "'Component'"); - DecorationOperands[DecorationIndex].push(OperandLiteralNumber, "'Index'"); - DecorationOperands[DecorationBinding].push(OperandLiteralNumber, "'Binding Point'"); - DecorationOperands[DecorationDescriptorSet].push(OperandLiteralNumber, "'Descriptor Set'"); - DecorationOperands[DecorationOffset].push(OperandLiteralNumber, "'Byte Offset'"); - DecorationOperands[DecorationXfbBuffer].push(OperandLiteralNumber, "'XFB Buffer Number'"); - DecorationOperands[DecorationXfbStride].push(OperandLiteralNumber, "'XFB Stride'"); - DecorationOperands[DecorationArrayStride].push(OperandLiteralNumber, "'Array Stride'"); - DecorationOperands[DecorationMatrixStride].push(OperandLiteralNumber, "'Matrix Stride'"); - DecorationOperands[DecorationBuiltIn].push(OperandLiteralNumber, "See <>"); - DecorationOperands[DecorationFPRoundingMode].push(OperandFPRoundingMode, "'Floating-Point Rounding Mode'"); - DecorationOperands[DecorationFPFastMathMode].push(OperandFPFastMath, "'Fast-Math Mode'"); - DecorationOperands[DecorationLinkageAttributes].push(OperandLiteralString, "'Name'"); - DecorationOperands[DecorationLinkageAttributes].push(OperandLinkageType, "'Linkage Type'"); - DecorationOperands[DecorationFuncParamAttr].push(OperandFuncParamAttr, "'Function Parameter Attribute'"); - DecorationOperands[DecorationSpecId].push(OperandLiteralNumber, "'Specialization Constant ID'"); - DecorationOperands[DecorationInputAttachmentIndex].push(OperandLiteralNumber, "'Attachment Index'"); - DecorationOperands[DecorationAlignment].push(OperandLiteralNumber, "'Alignment'"); - - OperandClassParams[OperandSource].set(SourceLanguageCeiling, SourceString, 0); - OperandClassParams[OperandExecutionModel].set(ExecutionModelCeiling, ExecutionModelString, ExecutionModelParams); - OperandClassParams[OperandAddressing].set(AddressingModelCeiling, AddressingString, AddressingParams); - OperandClassParams[OperandMemory].set(MemoryModelCeiling, MemoryString, MemoryParams); - OperandClassParams[OperandExecutionMode].set(ExecutionModeCeiling, ExecutionModeString, ExecutionModeParams); - OperandClassParams[OperandExecutionMode].setOperands(ExecutionModeOperands); - OperandClassParams[OperandStorage].set(StorageClassCeiling, StorageClassString, StorageParams); - OperandClassParams[OperandDimensionality].set(DimensionCeiling, DimensionString, DimensionalityParams); - OperandClassParams[OperandSamplerAddressingMode].set(SamplerAddressingModeCeiling, SamplerAddressingModeString, SamplerAddressingModeParams); - OperandClassParams[OperandSamplerFilterMode].set(SamplerFilterModeCeiling, SamplerFilterModeString, SamplerFilterModeParams); - OperandClassParams[OperandSamplerImageFormat].set(ImageFormatCeiling, ImageFormatString, ImageFormatParams); - OperandClassParams[OperandImageChannelOrder].set(ImageChannelOrderCeiling, ImageChannelOrderString, ImageChannelOrderParams); - OperandClassParams[OperandImageChannelDataType].set(ImageChannelDataTypeCeiling, ImageChannelDataTypeString, ImageChannelDataTypeParams); - OperandClassParams[OperandImageOperands].set(ImageOperandsCeiling, ImageOperandsString, ImageOperandsParams, true); - OperandClassParams[OperandFPFastMath].set(FPFastMathCeiling, FPFastMathString, FPFastMathParams, true); - OperandClassParams[OperandFPRoundingMode].set(FPRoundingModeCeiling, FPRoundingModeString, FPRoundingModeParams); - OperandClassParams[OperandLinkageType].set(LinkageTypeCeiling, LinkageTypeString, LinkageTypeParams); - OperandClassParams[OperandFuncParamAttr].set(FuncParamAttrCeiling, FuncParamAttrString, FuncParamAttrParams); - OperandClassParams[OperandAccessQualifier].set(AccessQualifierCeiling, AccessQualifierString, AccessQualifierParams); - OperandClassParams[OperandDecoration].set(DecorationCeiling, DecorationString, DecorationParams); - OperandClassParams[OperandDecoration].setOperands(DecorationOperands); - OperandClassParams[OperandBuiltIn].set(BuiltInCeiling, BuiltInString, BuiltInParams); - OperandClassParams[OperandSelect].set(SelectControlCeiling, SelectControlString, SelectionControlParams, true); - OperandClassParams[OperandLoop].set(LoopControlCeiling, LoopControlString, LoopControlParams, true); - OperandClassParams[OperandFunction].set(FunctionControlCeiling, FunctionControlString, FunctionControlParams, true); - OperandClassParams[OperandMemorySemantics].set(MemorySemanticsCeiling, MemorySemanticsString, MemorySemanticsParams, true); - OperandClassParams[OperandMemoryAccess].set(MemoryAccessCeiling, MemoryAccessString, MemoryAccessParams, true); - OperandClassParams[OperandScope].set(ScopeCeiling, ScopeString, ScopeParams); - OperandClassParams[OperandGroupOperation].set(GroupOperationCeiling, GroupOperationString, GroupOperationParams); - OperandClassParams[OperandKernelEnqueueFlags].set(KernelEnqueueFlagsCeiling, KernelEnqueueFlagsString, KernelEnqueueFlagsParams); - OperandClassParams[OperandKernelProfilingInfo].set(KernelProfilingInfoCeiling, KernelProfilingInfoString, KernelProfilingInfoParams, true); - OperandClassParams[OperandCapability].set(CapabilityCeiling, CapabilityString, CapabilityParams); - OperandClassParams[OperandOpcode].set(OpcodeCeiling, OpcodeString, 0); - - CapabilityParams[CapabilityShader].caps.push_back(CapabilityMatrix); - CapabilityParams[CapabilityGeometry].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityTessellation].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityVector16].caps.push_back(CapabilityKernel); - CapabilityParams[CapabilityFloat16Buffer].caps.push_back(CapabilityKernel); - CapabilityParams[CapabilityInt64Atomics].caps.push_back(CapabilityInt64); - CapabilityParams[CapabilityImageBasic].caps.push_back(CapabilityKernel); - CapabilityParams[CapabilityImageReadWrite].caps.push_back(CapabilityImageBasic); - CapabilityParams[CapabilityImageMipmap].caps.push_back(CapabilityImageBasic); - CapabilityParams[CapabilityPipes].caps.push_back(CapabilityKernel); - CapabilityParams[CapabilityDeviceEnqueue].caps.push_back(CapabilityKernel); - CapabilityParams[CapabilityLiteralSampler].caps.push_back(CapabilityKernel); - CapabilityParams[CapabilityAtomicStorage].caps.push_back(CapabilityShader); - CapabilityParams[CapabilitySampleRateShading].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityTessellationPointSize].caps.push_back(CapabilityTessellation); - CapabilityParams[CapabilityGeometryPointSize].caps.push_back(CapabilityGeometry); - CapabilityParams[CapabilityImageGatherExtended].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityStorageImageExtendedFormats].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityStorageImageMultisample].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityUniformBufferArrayDynamicIndexing].caps.push_back(CapabilityShader); - CapabilityParams[CapabilitySampledImageArrayDynamicIndexing].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityStorageBufferArrayDynamicIndexing].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityStorageImageArrayDynamicIndexing].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityClipDistance].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityCullDistance].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityGenericPointer].caps.push_back(CapabilityAddresses); - CapabilityParams[CapabilityInt8].caps.push_back(CapabilityKernel); - CapabilityParams[CapabilityInputAttachment].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityMinLod].caps.push_back(CapabilityShader); - CapabilityParams[CapabilitySparseResidency].caps.push_back(CapabilityShader); - CapabilityParams[CapabilitySampled1D].caps.push_back(CapabilityShader); - CapabilityParams[CapabilitySampledRect].caps.push_back(CapabilityShader); - CapabilityParams[CapabilitySampledBuffer].caps.push_back(CapabilityShader); - CapabilityParams[CapabilitySampledCubeArray].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityImageMSArray].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityImage1D].caps.push_back(CapabilitySampled1D); - CapabilityParams[CapabilityImageRect].caps.push_back(CapabilitySampledRect); - CapabilityParams[CapabilityImageBuffer].caps.push_back(CapabilitySampledBuffer); - CapabilityParams[CapabilityImageCubeArray].caps.push_back(CapabilitySampledCubeArray); - CapabilityParams[CapabilityImageQuery].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityDerivativeControl].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityInterpolationFunction].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityTransformFeedback].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityGeometryStreams].caps.push_back(CapabilityGeometry); - CapabilityParams[CapabilityStorageImageReadWithoutFormat].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityStorageImageWriteWithoutFormat].caps.push_back(CapabilityShader); - CapabilityParams[CapabilityMultiViewport].caps.push_back(CapabilityGeometry); - - AddressingParams[AddressingModelPhysical32].caps.push_back(CapabilityAddresses); - AddressingParams[AddressingModelPhysical64].caps.push_back(CapabilityAddresses); - - MemoryParams[MemoryModelSimple].caps.push_back(CapabilityShader); - MemoryParams[MemoryModelGLSL450].caps.push_back(CapabilityShader); - MemoryParams[MemoryModelOpenCL].caps.push_back(CapabilityKernel); - - MemorySemanticsParams[MemorySemanticsUniformMemoryShift].caps.push_back(CapabilityShader); - MemorySemanticsParams[MemorySemanticsAtomicCounterMemoryShift].caps.push_back(CapabilityAtomicStorage); - - ExecutionModelParams[ExecutionModelVertex].caps.push_back(CapabilityShader); - ExecutionModelParams[ExecutionModelTessellationControl].caps.push_back(CapabilityTessellation); - ExecutionModelParams[ExecutionModelTessellationEvaluation].caps.push_back(CapabilityTessellation); - ExecutionModelParams[ExecutionModelGeometry].caps.push_back(CapabilityGeometry); - ExecutionModelParams[ExecutionModelFragment].caps.push_back(CapabilityShader); - ExecutionModelParams[ExecutionModelGLCompute].caps.push_back(CapabilityShader); - ExecutionModelParams[ExecutionModelKernel].caps.push_back(CapabilityKernel); - - // Storage capabilites - StorageParams[StorageClassInput].caps.push_back(CapabilityShader); - StorageParams[StorageClassUniform].caps.push_back(CapabilityShader); - StorageParams[StorageClassOutput].caps.push_back(CapabilityShader); - StorageParams[StorageClassPrivate].caps.push_back(CapabilityShader); - StorageParams[StorageClassGeneric].caps.push_back(CapabilityKernel); - StorageParams[StorageClassAtomicCounter].caps.push_back(CapabilityAtomicStorage); - StorageParams[StorageClassPushConstant].caps.push_back(CapabilityShader); - - // Sampler Filter & Addressing mode capabilities - SamplerAddressingModeParams[SamplerAddressingModeNone].caps.push_back(CapabilityKernel); - SamplerAddressingModeParams[SamplerAddressingModeClampToEdge].caps.push_back(CapabilityKernel); - SamplerAddressingModeParams[SamplerAddressingModeClamp].caps.push_back(CapabilityKernel); - SamplerAddressingModeParams[SamplerAddressingModeRepeat].caps.push_back(CapabilityKernel); - SamplerAddressingModeParams[SamplerAddressingModeRepeatMirrored].caps.push_back(CapabilityKernel); - - SamplerFilterModeParams[SamplerFilterModeNearest].caps.push_back(CapabilityKernel); - SamplerFilterModeParams[SamplerFilterModeLinear].caps.push_back(CapabilityKernel); - - // image format capabilities - - // ES/Desktop float - ImageFormatParams[ImageFormatRgba32f].caps.push_back(CapabilityShader); - ImageFormatParams[ImageFormatRgba16f].caps.push_back(CapabilityShader); - ImageFormatParams[ImageFormatR32f].caps.push_back(CapabilityShader); - ImageFormatParams[ImageFormatRgba8].caps.push_back(CapabilityShader); - ImageFormatParams[ImageFormatRgba8Snorm].caps.push_back(CapabilityShader); - - // Desktop float - ImageFormatParams[ImageFormatRg32f].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRg16f].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatR11fG11fB10f].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatR16f].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRgba16].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRgb10A2].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRg16].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRg8].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatR16].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatR8].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRgba16Snorm].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRg16Snorm].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRg8Snorm].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatR16Snorm].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatR8Snorm].caps.push_back(CapabilityStorageImageExtendedFormats); - - // ES/Desktop int - ImageFormatParams[ImageFormatRgba32i].caps.push_back(CapabilityShader); - ImageFormatParams[ImageFormatRgba16i].caps.push_back(CapabilityShader); - ImageFormatParams[ImageFormatRgba8i].caps.push_back(CapabilityShader); - ImageFormatParams[ImageFormatR32i].caps.push_back(CapabilityShader); - - // Desktop int - ImageFormatParams[ImageFormatRg32i].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRg16i].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRg8i].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatR16i].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatR8i].caps.push_back(CapabilityStorageImageExtendedFormats); - - // ES/Desktop uint - ImageFormatParams[ImageFormatRgba32ui].caps.push_back(CapabilityShader); - ImageFormatParams[ImageFormatRgba16ui].caps.push_back(CapabilityShader); - ImageFormatParams[ImageFormatRgba8ui].caps.push_back(CapabilityShader); - ImageFormatParams[ImageFormatR32ui].caps.push_back(CapabilityShader); - - // Desktop uint - ImageFormatParams[ImageFormatRgb10a2ui].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRg32ui].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRg16ui].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatRg8ui].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatR16ui].caps.push_back(CapabilityStorageImageExtendedFormats); - ImageFormatParams[ImageFormatR8ui].caps.push_back(CapabilityStorageImageExtendedFormats); - - // image channel order capabilities - for (int i = 0; i < ImageChannelOrderCeiling; ++i) { - ImageChannelOrderParams[i].caps.push_back(CapabilityKernel); - } - - // image channel type capabilities - for (int i = 0; i < ImageChannelDataTypeCeiling; ++i) { - ImageChannelDataTypeParams[i].caps.push_back(CapabilityKernel); - } - - // image lookup operands - ImageOperandsParams[ImageOperandsBiasShift].caps.push_back(CapabilityShader); - ImageOperandsParams[ImageOperandsOffsetShift].caps.push_back(CapabilityImageGatherExtended); - ImageOperandsParams[ImageOperandsMinLodShift].caps.push_back(CapabilityMinLod); - - // fast math flags capabilities - for (int i = 0; i < FPFastMathCeiling; ++i) { - FPFastMathParams[i].caps.push_back(CapabilityKernel); - } - - // fp rounding mode capabilities - for (int i = 0; i < FPRoundingModeCeiling; ++i) { - FPRoundingModeParams[i].caps.push_back(CapabilityKernel); - } - - // linkage types - for (int i = 0; i < LinkageTypeCeiling; ++i) { - LinkageTypeParams[i].caps.push_back(CapabilityLinkage); - } - - // function argument types - for (int i = 0; i < FuncParamAttrCeiling; ++i) { - FuncParamAttrParams[i].caps.push_back(CapabilityKernel); - } - - // function argument types - for (int i = 0; i < AccessQualifierCeiling; ++i) { - AccessQualifierParams[i].caps.push_back(CapabilityKernel); - } - - ExecutionModeParams[ExecutionModeInvocations].caps.push_back(CapabilityGeometry); - ExecutionModeParams[ExecutionModeSpacingEqual].caps.push_back(CapabilityTessellation); - ExecutionModeParams[ExecutionModeSpacingFractionalEven].caps.push_back(CapabilityTessellation); - ExecutionModeParams[ExecutionModeSpacingFractionalOdd].caps.push_back(CapabilityTessellation); - ExecutionModeParams[ExecutionModeVertexOrderCw].caps.push_back(CapabilityTessellation); - ExecutionModeParams[ExecutionModeVertexOrderCcw].caps.push_back(CapabilityTessellation); - ExecutionModeParams[ExecutionModePixelCenterInteger].caps.push_back(CapabilityShader); - ExecutionModeParams[ExecutionModeOriginUpperLeft].caps.push_back(CapabilityShader); - ExecutionModeParams[ExecutionModeOriginLowerLeft].caps.push_back(CapabilityShader); - ExecutionModeParams[ExecutionModeEarlyFragmentTests].caps.push_back(CapabilityShader); - ExecutionModeParams[ExecutionModePointMode].caps.push_back(CapabilityTessellation); - ExecutionModeParams[ExecutionModeXfb].caps.push_back(CapabilityTransformFeedback); - ExecutionModeParams[ExecutionModeDepthReplacing].caps.push_back(CapabilityShader); - ExecutionModeParams[ExecutionModeDepthGreater].caps.push_back(CapabilityShader); - ExecutionModeParams[ExecutionModeDepthLess].caps.push_back(CapabilityShader); - ExecutionModeParams[ExecutionModeDepthUnchanged].caps.push_back(CapabilityShader); - ExecutionModeParams[ExecutionModeLocalSizeHint].caps.push_back(CapabilityKernel); - ExecutionModeParams[ExecutionModeInputPoints].caps.push_back(CapabilityGeometry); - ExecutionModeParams[ExecutionModeInputLines].caps.push_back(CapabilityGeometry); - ExecutionModeParams[ExecutionModeInputLinesAdjacency].caps.push_back(CapabilityGeometry); - ExecutionModeParams[ExecutionModeTriangles].caps.push_back(CapabilityGeometry); - ExecutionModeParams[ExecutionModeTriangles].caps.push_back(CapabilityTessellation); - ExecutionModeParams[ExecutionModeInputTrianglesAdjacency].caps.push_back(CapabilityGeometry); - ExecutionModeParams[ExecutionModeQuads].caps.push_back(CapabilityTessellation); - ExecutionModeParams[ExecutionModeIsolines].caps.push_back(CapabilityTessellation); - ExecutionModeParams[ExecutionModeOutputVertices].caps.push_back(CapabilityGeometry); - ExecutionModeParams[ExecutionModeOutputVertices].caps.push_back(CapabilityTessellation); - ExecutionModeParams[ExecutionModeOutputPoints].caps.push_back(CapabilityGeometry); - ExecutionModeParams[ExecutionModeOutputLineStrip].caps.push_back(CapabilityGeometry); - ExecutionModeParams[ExecutionModeOutputTriangleStrip].caps.push_back(CapabilityGeometry); - ExecutionModeParams[ExecutionModeVecTypeHint].caps.push_back(CapabilityKernel); - ExecutionModeParams[ExecutionModeContractionOff].caps.push_back(CapabilityKernel); - - DecorationParams[DecorationRelaxedPrecision].caps.push_back(CapabilityShader); - DecorationParams[DecorationBlock].caps.push_back(CapabilityShader); - DecorationParams[DecorationBufferBlock].caps.push_back(CapabilityShader); - DecorationParams[DecorationRowMajor].caps.push_back(CapabilityMatrix); - DecorationParams[DecorationColMajor].caps.push_back(CapabilityMatrix); - DecorationParams[DecorationGLSLShared].caps.push_back(CapabilityShader); - DecorationParams[DecorationGLSLPacked].caps.push_back(CapabilityShader); - DecorationParams[DecorationNoPerspective].caps.push_back(CapabilityShader); - DecorationParams[DecorationFlat].caps.push_back(CapabilityShader); - DecorationParams[DecorationPatch].caps.push_back(CapabilityTessellation); - DecorationParams[DecorationCentroid].caps.push_back(CapabilityShader); - DecorationParams[DecorationSample].caps.push_back(CapabilitySampleRateShading); - DecorationParams[DecorationInvariant].caps.push_back(CapabilityShader); - DecorationParams[DecorationConstant].caps.push_back(CapabilityKernel); - DecorationParams[DecorationUniform].caps.push_back(CapabilityShader); - DecorationParams[DecorationCPacked].caps.push_back(CapabilityKernel); - DecorationParams[DecorationSaturatedConversion].caps.push_back(CapabilityKernel); - DecorationParams[DecorationStream].caps.push_back(CapabilityGeometryStreams); - DecorationParams[DecorationLocation].caps.push_back(CapabilityShader); - DecorationParams[DecorationComponent].caps.push_back(CapabilityShader); - DecorationParams[DecorationOffset].caps.push_back(CapabilityShader); - DecorationParams[DecorationIndex].caps.push_back(CapabilityShader); - DecorationParams[DecorationBinding].caps.push_back(CapabilityShader); - DecorationParams[DecorationDescriptorSet].caps.push_back(CapabilityShader); - DecorationParams[DecorationXfbBuffer].caps.push_back(CapabilityTransformFeedback); - DecorationParams[DecorationXfbStride].caps.push_back(CapabilityTransformFeedback); - DecorationParams[DecorationArrayStride].caps.push_back(CapabilityShader); - DecorationParams[DecorationMatrixStride].caps.push_back(CapabilityMatrix); - DecorationParams[DecorationFuncParamAttr].caps.push_back(CapabilityKernel); - DecorationParams[DecorationFPRoundingMode].caps.push_back(CapabilityKernel); - DecorationParams[DecorationFPFastMathMode].caps.push_back(CapabilityKernel); - DecorationParams[DecorationLinkageAttributes].caps.push_back(CapabilityLinkage); - DecorationParams[DecorationSpecId].caps.push_back(CapabilityShader); - DecorationParams[DecorationNoContraction].caps.push_back(CapabilityShader); - DecorationParams[DecorationInputAttachmentIndex].caps.push_back(CapabilityInputAttachment); - DecorationParams[DecorationAlignment].caps.push_back(CapabilityKernel); - - BuiltInParams[BuiltInPosition].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInPointSize].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInClipDistance].caps.push_back(CapabilityClipDistance); - BuiltInParams[BuiltInCullDistance].caps.push_back(CapabilityCullDistance); - - BuiltInParams[BuiltInVertexId].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInVertexId].desc = "Vertex ID, which takes on values 0, 1, 2, . . . ."; - - BuiltInParams[BuiltInInstanceId].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInInstanceId].desc = "Instance ID, which takes on values 0, 1, 2, . . . ."; - - BuiltInParams[BuiltInVertexIndex].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInVertexIndex].desc = "Vertex index, which takes on values base, base+1, base+2, . . . ."; - - BuiltInParams[BuiltInInstanceIndex].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInInstanceIndex].desc = "Instance index, which takes on values base, base+1, base+2, . . . ."; - - BuiltInParams[BuiltInPrimitiveId].caps.push_back(CapabilityGeometry); - BuiltInParams[BuiltInPrimitiveId].caps.push_back(CapabilityTessellation); - BuiltInParams[BuiltInInvocationId].caps.push_back(CapabilityGeometry); - BuiltInParams[BuiltInInvocationId].caps.push_back(CapabilityTessellation); - BuiltInParams[BuiltInLayer].caps.push_back(CapabilityGeometry); - BuiltInParams[BuiltInViewportIndex].caps.push_back(CapabilityMultiViewport); - BuiltInParams[BuiltInTessLevelOuter].caps.push_back(CapabilityTessellation); - BuiltInParams[BuiltInTessLevelInner].caps.push_back(CapabilityTessellation); - BuiltInParams[BuiltInTessCoord].caps.push_back(CapabilityTessellation); - BuiltInParams[BuiltInPatchVertices].caps.push_back(CapabilityTessellation); - BuiltInParams[BuiltInFragCoord].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInPointCoord].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInFrontFacing].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInSampleId].caps.push_back(CapabilitySampleRateShading); - BuiltInParams[BuiltInSamplePosition].caps.push_back(CapabilitySampleRateShading); - BuiltInParams[BuiltInSampleMask].caps.push_back(CapabilitySampleRateShading); - BuiltInParams[BuiltInFragDepth].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInHelperInvocation].caps.push_back(CapabilityShader); - BuiltInParams[BuiltInWorkDim].caps.push_back(CapabilityKernel); - BuiltInParams[BuiltInGlobalSize].caps.push_back(CapabilityKernel); - BuiltInParams[BuiltInEnqueuedWorkgroupSize].caps.push_back(CapabilityKernel); - BuiltInParams[BuiltInGlobalOffset].caps.push_back(CapabilityKernel); - BuiltInParams[BuiltInGlobalLinearId].caps.push_back(CapabilityKernel); - - BuiltInParams[BuiltInSubgroupSize].caps.push_back(CapabilityKernel); - BuiltInParams[BuiltInSubgroupMaxSize].caps.push_back(CapabilityKernel); - BuiltInParams[BuiltInNumSubgroups].caps.push_back(CapabilityKernel); - BuiltInParams[BuiltInNumEnqueuedSubgroups].caps.push_back(CapabilityKernel); - BuiltInParams[BuiltInSubgroupId].caps.push_back(CapabilityKernel); - BuiltInParams[BuiltInSubgroupLocalInvocationId].caps.push_back(CapabilityKernel); - - DimensionalityParams[Dim1D].caps.push_back(CapabilitySampled1D); - DimensionalityParams[DimCube].caps.push_back(CapabilityShader); - DimensionalityParams[DimRect].caps.push_back(CapabilitySampledRect); - DimensionalityParams[DimBuffer].caps.push_back(CapabilitySampledBuffer); - DimensionalityParams[DimSubpassData].caps.push_back(CapabilityInputAttachment); - - // Group Operations - for (int i = 0; i < GroupOperationCeiling; ++i) { - GroupOperationParams[i].caps.push_back(CapabilityKernel); - } - - // Enqueue flags - for (int i = 0; i < KernelEnqueueFlagsCeiling; ++i) { - KernelEnqueueFlagsParams[i].caps.push_back(CapabilityKernel); - } - - // Profiling info - KernelProfilingInfoParams[0].caps.push_back(CapabilityKernel); - - // set name of operator, an initial set of style operands, and the description - - InstructionDesc[OpSource].operands.push(OperandSource, ""); - InstructionDesc[OpSource].operands.push(OperandLiteralNumber, "'Version'"); - InstructionDesc[OpSource].operands.push(OperandId, "'File'", true); - InstructionDesc[OpSource].operands.push(OperandLiteralString, "'Source'", true); - - InstructionDesc[OpSourceContinued].operands.push(OperandLiteralString, "'Continued Source'"); - - InstructionDesc[OpSourceExtension].operands.push(OperandLiteralString, "'Extension'"); - - InstructionDesc[OpName].operands.push(OperandId, "'Target'"); - InstructionDesc[OpName].operands.push(OperandLiteralString, "'Name'"); - - InstructionDesc[OpMemberName].operands.push(OperandId, "'Type'"); - InstructionDesc[OpMemberName].operands.push(OperandLiteralNumber, "'Member'"); - InstructionDesc[OpMemberName].operands.push(OperandLiteralString, "'Name'"); - - InstructionDesc[OpString].operands.push(OperandLiteralString, "'String'"); - - InstructionDesc[OpLine].operands.push(OperandId, "'File'"); - InstructionDesc[OpLine].operands.push(OperandLiteralNumber, "'Line'"); - InstructionDesc[OpLine].operands.push(OperandLiteralNumber, "'Column'"); - - InstructionDesc[OpExtension].operands.push(OperandLiteralString, "'Name'"); - - InstructionDesc[OpExtInstImport].operands.push(OperandLiteralString, "'Name'"); - - InstructionDesc[OpCapability].operands.push(OperandCapability, "'Capability'"); - - InstructionDesc[OpMemoryModel].operands.push(OperandAddressing, ""); - InstructionDesc[OpMemoryModel].operands.push(OperandMemory, ""); - - InstructionDesc[OpEntryPoint].operands.push(OperandExecutionModel, ""); - InstructionDesc[OpEntryPoint].operands.push(OperandId, "'Entry Point'"); - InstructionDesc[OpEntryPoint].operands.push(OperandLiteralString, "'Name'"); - InstructionDesc[OpEntryPoint].operands.push(OperandVariableIds, "'Interface'"); - - InstructionDesc[OpExecutionMode].operands.push(OperandId, "'Entry Point'"); - InstructionDesc[OpExecutionMode].operands.push(OperandExecutionMode, "'Mode'"); - InstructionDesc[OpExecutionMode].operands.push(OperandOptionalLiteral, "See <>"); - - InstructionDesc[OpTypeInt].operands.push(OperandLiteralNumber, "'Width'"); - InstructionDesc[OpTypeInt].operands.push(OperandLiteralNumber, "'Signedness'"); - - InstructionDesc[OpTypeFloat].operands.push(OperandLiteralNumber, "'Width'"); - - InstructionDesc[OpTypeVector].operands.push(OperandId, "'Component Type'"); - InstructionDesc[OpTypeVector].operands.push(OperandLiteralNumber, "'Component Count'"); - - InstructionDesc[OpTypeMatrix].capabilities.push_back(CapabilityMatrix); - InstructionDesc[OpTypeMatrix].operands.push(OperandId, "'Column Type'"); - InstructionDesc[OpTypeMatrix].operands.push(OperandLiteralNumber, "'Column Count'"); - - InstructionDesc[OpTypeImage].operands.push(OperandId, "'Sampled Type'"); - InstructionDesc[OpTypeImage].operands.push(OperandDimensionality, ""); - InstructionDesc[OpTypeImage].operands.push(OperandLiteralNumber, "'Depth'"); - InstructionDesc[OpTypeImage].operands.push(OperandLiteralNumber, "'Arrayed'"); - InstructionDesc[OpTypeImage].operands.push(OperandLiteralNumber, "'MS'"); - InstructionDesc[OpTypeImage].operands.push(OperandLiteralNumber, "'Sampled'"); - InstructionDesc[OpTypeImage].operands.push(OperandSamplerImageFormat, ""); - InstructionDesc[OpTypeImage].operands.push(OperandAccessQualifier, "", true); - - InstructionDesc[OpTypeSampledImage].operands.push(OperandId, "'Image Type'"); - - InstructionDesc[OpTypeArray].operands.push(OperandId, "'Element Type'"); - InstructionDesc[OpTypeArray].operands.push(OperandId, "'Length'"); - - InstructionDesc[OpTypeRuntimeArray].capabilities.push_back(CapabilityShader); - InstructionDesc[OpTypeRuntimeArray].operands.push(OperandId, "'Element Type'"); - - InstructionDesc[OpTypeStruct].operands.push(OperandVariableIds, "'Member 0 type', +\n'member 1 type', +\n..."); - - InstructionDesc[OpTypeOpaque].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpTypeOpaque].operands.push(OperandLiteralString, "The name of the opaque type."); - - InstructionDesc[OpTypePointer].operands.push(OperandStorage, ""); - InstructionDesc[OpTypePointer].operands.push(OperandId, "'Type'"); - - InstructionDesc[OpTypeForwardPointer].capabilities.push_back(CapabilityAddresses); - InstructionDesc[OpTypeForwardPointer].operands.push(OperandId, "'Pointer Type'"); - InstructionDesc[OpTypeForwardPointer].operands.push(OperandStorage, ""); - - InstructionDesc[OpTypeEvent].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpTypeDeviceEvent].capabilities.push_back(CapabilityDeviceEnqueue); - - InstructionDesc[OpTypeReserveId].capabilities.push_back(CapabilityPipes); - - InstructionDesc[OpTypeQueue].capabilities.push_back(CapabilityDeviceEnqueue); - - InstructionDesc[OpTypePipe].operands.push(OperandAccessQualifier, "'Qualifier'"); - InstructionDesc[OpTypePipe].capabilities.push_back(CapabilityPipes); - - InstructionDesc[OpTypeFunction].operands.push(OperandId, "'Return Type'"); - InstructionDesc[OpTypeFunction].operands.push(OperandVariableIds, "'Parameter 0 Type', +\n'Parameter 1 Type', +\n..."); - - InstructionDesc[OpConstant].operands.push(OperandVariableLiterals, "'Value'"); - - InstructionDesc[OpConstantComposite].operands.push(OperandVariableIds, "'Constituents'"); - - InstructionDesc[OpConstantSampler].capabilities.push_back(CapabilityLiteralSampler); - InstructionDesc[OpConstantSampler].operands.push(OperandSamplerAddressingMode, ""); - InstructionDesc[OpConstantSampler].operands.push(OperandLiteralNumber, "'Param'"); - InstructionDesc[OpConstantSampler].operands.push(OperandSamplerFilterMode, ""); - - InstructionDesc[OpSpecConstant].operands.push(OperandVariableLiterals, "'Value'"); - - InstructionDesc[OpSpecConstantComposite].operands.push(OperandVariableIds, "'Constituents'"); - - InstructionDesc[OpSpecConstantOp].operands.push(OperandLiteralNumber, "'Opcode'"); - InstructionDesc[OpSpecConstantOp].operands.push(OperandVariableIds, "'Operands'"); - - InstructionDesc[OpVariable].operands.push(OperandStorage, ""); - InstructionDesc[OpVariable].operands.push(OperandId, "'Initializer'", true); - - InstructionDesc[OpFunction].operands.push(OperandFunction, ""); - InstructionDesc[OpFunction].operands.push(OperandId, "'Function Type'"); - - InstructionDesc[OpFunctionCall].operands.push(OperandId, "'Function'"); - InstructionDesc[OpFunctionCall].operands.push(OperandVariableIds, "'Argument 0', +\n'Argument 1', +\n..."); - - InstructionDesc[OpExtInst].operands.push(OperandId, "'Set'"); - InstructionDesc[OpExtInst].operands.push(OperandLiteralNumber, "'Instruction'"); - InstructionDesc[OpExtInst].operands.push(OperandVariableIds, "'Operand 1', +\n'Operand 2', +\n..."); - - InstructionDesc[OpLoad].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpLoad].operands.push(OperandMemoryAccess, "", true); - - InstructionDesc[OpStore].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpStore].operands.push(OperandId, "'Object'"); - InstructionDesc[OpStore].operands.push(OperandMemoryAccess, "", true); - - InstructionDesc[OpPhi].operands.push(OperandVariableIds, "'Variable, Parent, ...'"); - - InstructionDesc[OpDecorate].operands.push(OperandId, "'Target'"); - InstructionDesc[OpDecorate].operands.push(OperandDecoration, ""); - InstructionDesc[OpDecorate].operands.push(OperandVariableLiterals, "See <>."); - - InstructionDesc[OpMemberDecorate].operands.push(OperandId, "'Structure Type'"); - InstructionDesc[OpMemberDecorate].operands.push(OperandLiteralNumber, "'Member'"); - InstructionDesc[OpMemberDecorate].operands.push(OperandDecoration, ""); - InstructionDesc[OpMemberDecorate].operands.push(OperandVariableLiterals, "See <>."); - - InstructionDesc[OpGroupDecorate].operands.push(OperandId, "'Decoration Group'"); - InstructionDesc[OpGroupDecorate].operands.push(OperandVariableIds, "'Targets'"); - - InstructionDesc[OpGroupMemberDecorate].operands.push(OperandId, "'Decoration Group'"); - InstructionDesc[OpGroupMemberDecorate].operands.push(OperandVariableIdLiteral, "'Targets'"); - - InstructionDesc[OpVectorExtractDynamic].operands.push(OperandId, "'Vector'"); - InstructionDesc[OpVectorExtractDynamic].operands.push(OperandId, "'Index'"); - - InstructionDesc[OpVectorInsertDynamic].operands.push(OperandId, "'Vector'"); - InstructionDesc[OpVectorInsertDynamic].operands.push(OperandId, "'Component'"); - InstructionDesc[OpVectorInsertDynamic].operands.push(OperandId, "'Index'"); - - InstructionDesc[OpVectorShuffle].operands.push(OperandId, "'Vector 1'"); - InstructionDesc[OpVectorShuffle].operands.push(OperandId, "'Vector 2'"); - InstructionDesc[OpVectorShuffle].operands.push(OperandVariableLiterals, "'Components'"); - - InstructionDesc[OpCompositeConstruct].operands.push(OperandVariableIds, "'Constituents'"); - - InstructionDesc[OpCompositeExtract].operands.push(OperandId, "'Composite'"); - InstructionDesc[OpCompositeExtract].operands.push(OperandVariableLiterals, "'Indexes'"); - - InstructionDesc[OpCompositeInsert].operands.push(OperandId, "'Object'"); - InstructionDesc[OpCompositeInsert].operands.push(OperandId, "'Composite'"); - InstructionDesc[OpCompositeInsert].operands.push(OperandVariableLiterals, "'Indexes'"); - - InstructionDesc[OpCopyObject].operands.push(OperandId, "'Operand'"); - - InstructionDesc[OpCopyMemory].operands.push(OperandId, "'Target'"); - InstructionDesc[OpCopyMemory].operands.push(OperandId, "'Source'"); - InstructionDesc[OpCopyMemory].operands.push(OperandMemoryAccess, "", true); - - InstructionDesc[OpCopyMemorySized].operands.push(OperandId, "'Target'"); - InstructionDesc[OpCopyMemorySized].operands.push(OperandId, "'Source'"); - InstructionDesc[OpCopyMemorySized].operands.push(OperandId, "'Size'"); - InstructionDesc[OpCopyMemorySized].operands.push(OperandMemoryAccess, "", true); - - InstructionDesc[OpCopyMemorySized].capabilities.push_back(CapabilityAddresses); - - InstructionDesc[OpSampledImage].operands.push(OperandId, "'Image'"); - InstructionDesc[OpSampledImage].operands.push(OperandId, "'Sampler'"); - - InstructionDesc[OpImage].operands.push(OperandId, "'Sampled Image'"); - - InstructionDesc[OpImageRead].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageRead].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageRead].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageRead].operands.push(OperandVariableIds, "", true); - - InstructionDesc[OpImageWrite].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageWrite].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageWrite].operands.push(OperandId, "'Texel'"); - InstructionDesc[OpImageWrite].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageWrite].operands.push(OperandVariableIds, "", true); - - InstructionDesc[OpImageSampleImplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSampleImplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSampleImplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSampleImplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSampleImplicitLod].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpImageSampleExplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSampleExplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSampleExplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSampleExplicitLod].operands.push(OperandVariableIds, "", true); - - InstructionDesc[OpImageSampleDrefImplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSampleDrefImplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSampleDrefImplicitLod].operands.push(OperandId, "'D~ref~'"); - InstructionDesc[OpImageSampleDrefImplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSampleDrefImplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSampleDrefImplicitLod].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpImageSampleDrefExplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSampleDrefExplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSampleDrefExplicitLod].operands.push(OperandId, "'D~ref~'"); - InstructionDesc[OpImageSampleDrefExplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSampleDrefExplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSampleDrefExplicitLod].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpImageSampleProjImplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSampleProjImplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSampleProjImplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSampleProjImplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSampleProjImplicitLod].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpImageSampleProjExplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSampleProjExplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSampleProjExplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSampleProjExplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSampleProjExplicitLod].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpImageSampleProjDrefImplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSampleProjDrefImplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSampleProjDrefImplicitLod].operands.push(OperandId, "'D~ref~'"); - InstructionDesc[OpImageSampleProjDrefImplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSampleProjDrefImplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSampleProjDrefImplicitLod].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpImageSampleProjDrefExplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSampleProjDrefExplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSampleProjDrefExplicitLod].operands.push(OperandId, "'D~ref~'"); - InstructionDesc[OpImageSampleProjDrefExplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSampleProjDrefExplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSampleProjDrefExplicitLod].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpImageFetch].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageFetch].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageFetch].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageFetch].operands.push(OperandVariableIds, "", true); - - InstructionDesc[OpImageGather].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageGather].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageGather].operands.push(OperandId, "'Component'"); - InstructionDesc[OpImageGather].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageGather].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageGather].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpImageDrefGather].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageDrefGather].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageDrefGather].operands.push(OperandId, "'D~ref~'"); - InstructionDesc[OpImageDrefGather].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageDrefGather].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageDrefGather].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpImageSparseSampleImplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSparseSampleImplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseSampleImplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseSampleImplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseSampleImplicitLod].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseSampleExplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSparseSampleExplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseSampleExplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseSampleExplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseSampleExplicitLod].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseSampleDrefImplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSparseSampleDrefImplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseSampleDrefImplicitLod].operands.push(OperandId, "'D~ref~'"); - InstructionDesc[OpImageSparseSampleDrefImplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseSampleDrefImplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseSampleDrefImplicitLod].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseSampleDrefExplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSparseSampleDrefExplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseSampleDrefExplicitLod].operands.push(OperandId, "'D~ref~'"); - InstructionDesc[OpImageSparseSampleDrefExplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseSampleDrefExplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseSampleDrefExplicitLod].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseSampleProjImplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSparseSampleProjImplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseSampleProjImplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseSampleProjImplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseSampleProjImplicitLod].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseSampleProjExplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSparseSampleProjExplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseSampleProjExplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseSampleProjExplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseSampleProjExplicitLod].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseSampleProjDrefImplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSparseSampleProjDrefImplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseSampleProjDrefImplicitLod].operands.push(OperandId, "'D~ref~'"); - InstructionDesc[OpImageSparseSampleProjDrefImplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseSampleProjDrefImplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseSampleProjDrefImplicitLod].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseSampleProjDrefExplicitLod].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSparseSampleProjDrefExplicitLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseSampleProjDrefExplicitLod].operands.push(OperandId, "'D~ref~'"); - InstructionDesc[OpImageSparseSampleProjDrefExplicitLod].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseSampleProjDrefExplicitLod].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseSampleProjDrefExplicitLod].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseFetch].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageSparseFetch].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseFetch].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseFetch].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseFetch].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseGather].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSparseGather].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseGather].operands.push(OperandId, "'Component'"); - InstructionDesc[OpImageSparseGather].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseGather].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseGather].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseDrefGather].operands.push(OperandId, "'Sampled Image'"); - InstructionDesc[OpImageSparseDrefGather].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseDrefGather].operands.push(OperandId, "'D~ref~'"); - InstructionDesc[OpImageSparseDrefGather].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseDrefGather].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseDrefGather].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseRead].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageSparseRead].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageSparseRead].operands.push(OperandImageOperands, "", true); - InstructionDesc[OpImageSparseRead].operands.push(OperandVariableIds, "", true); - InstructionDesc[OpImageSparseRead].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageSparseTexelsResident].operands.push(OperandId, "'Resident Code'"); - InstructionDesc[OpImageSparseTexelsResident].capabilities.push_back(CapabilitySparseResidency); - - InstructionDesc[OpImageQuerySizeLod].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageQuerySizeLod].operands.push(OperandId, "'Level of Detail'"); - InstructionDesc[OpImageQuerySizeLod].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpImageQuerySizeLod].capabilities.push_back(CapabilityImageQuery); - - InstructionDesc[OpImageQuerySize].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageQuerySize].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpImageQuerySize].capabilities.push_back(CapabilityImageQuery); - - InstructionDesc[OpImageQueryLod].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageQueryLod].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageQueryLod].capabilities.push_back(CapabilityImageQuery); - - InstructionDesc[OpImageQueryLevels].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageQueryLevels].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpImageQueryLevels].capabilities.push_back(CapabilityImageQuery); - - InstructionDesc[OpImageQuerySamples].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageQuerySamples].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpImageQuerySamples].capabilities.push_back(CapabilityImageQuery); - - InstructionDesc[OpImageQueryFormat].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageQueryFormat].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpImageQueryOrder].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageQueryOrder].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpAccessChain].operands.push(OperandId, "'Base'"); - InstructionDesc[OpAccessChain].operands.push(OperandVariableIds, "'Indexes'"); - - InstructionDesc[OpInBoundsAccessChain].operands.push(OperandId, "'Base'"); - InstructionDesc[OpInBoundsAccessChain].operands.push(OperandVariableIds, "'Indexes'"); - - InstructionDesc[OpPtrAccessChain].operands.push(OperandId, "'Base'"); - InstructionDesc[OpPtrAccessChain].operands.push(OperandId, "'Element'"); - InstructionDesc[OpPtrAccessChain].operands.push(OperandVariableIds, "'Indexes'"); - InstructionDesc[OpPtrAccessChain].capabilities.push_back(CapabilityAddresses); - - InstructionDesc[OpInBoundsPtrAccessChain].operands.push(OperandId, "'Base'"); - InstructionDesc[OpInBoundsPtrAccessChain].operands.push(OperandId, "'Element'"); - InstructionDesc[OpInBoundsPtrAccessChain].operands.push(OperandVariableIds, "'Indexes'"); - InstructionDesc[OpInBoundsPtrAccessChain].capabilities.push_back(CapabilityAddresses); - - InstructionDesc[OpSNegate].operands.push(OperandId, "'Operand'"); - - InstructionDesc[OpFNegate].operands.push(OperandId, "'Operand'"); - - InstructionDesc[OpNot].operands.push(OperandId, "'Operand'"); - - InstructionDesc[OpAny].operands.push(OperandId, "'Vector'"); - - InstructionDesc[OpAll].operands.push(OperandId, "'Vector'"); - - InstructionDesc[OpConvertFToU].operands.push(OperandId, "'Float Value'"); - - InstructionDesc[OpConvertFToS].operands.push(OperandId, "'Float Value'"); - - InstructionDesc[OpConvertSToF].operands.push(OperandId, "'Signed Value'"); - - InstructionDesc[OpConvertUToF].operands.push(OperandId, "'Unsigned Value'"); - - InstructionDesc[OpUConvert].operands.push(OperandId, "'Unsigned Value'"); - - InstructionDesc[OpSConvert].operands.push(OperandId, "'Signed Value'"); - - InstructionDesc[OpFConvert].operands.push(OperandId, "'Float Value'"); - - InstructionDesc[OpSatConvertSToU].operands.push(OperandId, "'Signed Value'"); - InstructionDesc[OpSatConvertSToU].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpSatConvertUToS].operands.push(OperandId, "'Unsigned Value'"); - InstructionDesc[OpSatConvertUToS].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpConvertPtrToU].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpConvertPtrToU].capabilities.push_back(CapabilityAddresses); - - InstructionDesc[OpConvertUToPtr].operands.push(OperandId, "'Integer Value'"); - InstructionDesc[OpConvertUToPtr].capabilities.push_back(CapabilityAddresses); - - InstructionDesc[OpPtrCastToGeneric].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpPtrCastToGeneric].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpGenericCastToPtr].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpGenericCastToPtr].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpGenericCastToPtrExplicit].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpGenericCastToPtrExplicit].operands.push(OperandStorage, "'Storage'"); - InstructionDesc[OpGenericCastToPtrExplicit].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpGenericPtrMemSemantics].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpGenericPtrMemSemantics].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpBitcast].operands.push(OperandId, "'Operand'"); - - InstructionDesc[OpQuantizeToF16].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpTranspose].capabilities.push_back(CapabilityMatrix); - InstructionDesc[OpTranspose].operands.push(OperandId, "'Matrix'"); - - InstructionDesc[OpIsNan].operands.push(OperandId, "'x'"); - - InstructionDesc[OpIsInf].operands.push(OperandId, "'x'"); - - InstructionDesc[OpIsFinite].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpIsFinite].operands.push(OperandId, "'x'"); - - InstructionDesc[OpIsNormal].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpIsNormal].operands.push(OperandId, "'x'"); - - InstructionDesc[OpSignBitSet].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpSignBitSet].operands.push(OperandId, "'x'"); - - InstructionDesc[OpLessOrGreater].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpLessOrGreater].operands.push(OperandId, "'x'"); - InstructionDesc[OpLessOrGreater].operands.push(OperandId, "'y'"); - - InstructionDesc[OpOrdered].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpOrdered].operands.push(OperandId, "'x'"); - InstructionDesc[OpOrdered].operands.push(OperandId, "'y'"); - - InstructionDesc[OpUnordered].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpUnordered].operands.push(OperandId, "'x'"); - InstructionDesc[OpUnordered].operands.push(OperandId, "'y'"); - - InstructionDesc[OpArrayLength].operands.push(OperandId, "'Structure'"); - InstructionDesc[OpArrayLength].operands.push(OperandLiteralNumber, "'Array member'"); - InstructionDesc[OpArrayLength].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpIAdd].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpIAdd].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFAdd].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFAdd].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpISub].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpISub].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFSub].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFSub].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpIMul].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpIMul].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFMul].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFMul].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpUDiv].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpUDiv].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpSDiv].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpSDiv].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFDiv].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFDiv].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpUMod].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpUMod].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpSRem].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpSRem].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpSMod].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpSMod].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFRem].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFRem].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFMod].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFMod].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpVectorTimesScalar].operands.push(OperandId, "'Vector'"); - InstructionDesc[OpVectorTimesScalar].operands.push(OperandId, "'Scalar'"); - - InstructionDesc[OpMatrixTimesScalar].capabilities.push_back(CapabilityMatrix); - InstructionDesc[OpMatrixTimesScalar].operands.push(OperandId, "'Matrix'"); - InstructionDesc[OpMatrixTimesScalar].operands.push(OperandId, "'Scalar'"); - - InstructionDesc[OpVectorTimesMatrix].capabilities.push_back(CapabilityMatrix); - InstructionDesc[OpVectorTimesMatrix].operands.push(OperandId, "'Vector'"); - InstructionDesc[OpVectorTimesMatrix].operands.push(OperandId, "'Matrix'"); - - InstructionDesc[OpMatrixTimesVector].capabilities.push_back(CapabilityMatrix); - InstructionDesc[OpMatrixTimesVector].operands.push(OperandId, "'Matrix'"); - InstructionDesc[OpMatrixTimesVector].operands.push(OperandId, "'Vector'"); - - InstructionDesc[OpMatrixTimesMatrix].capabilities.push_back(CapabilityMatrix); - InstructionDesc[OpMatrixTimesMatrix].operands.push(OperandId, "'LeftMatrix'"); - InstructionDesc[OpMatrixTimesMatrix].operands.push(OperandId, "'RightMatrix'"); - - InstructionDesc[OpOuterProduct].capabilities.push_back(CapabilityMatrix); - InstructionDesc[OpOuterProduct].operands.push(OperandId, "'Vector 1'"); - InstructionDesc[OpOuterProduct].operands.push(OperandId, "'Vector 2'"); - - InstructionDesc[OpDot].operands.push(OperandId, "'Vector 1'"); - InstructionDesc[OpDot].operands.push(OperandId, "'Vector 2'"); - - InstructionDesc[OpIAddCarry].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpIAddCarry].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpISubBorrow].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpISubBorrow].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpUMulExtended].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpUMulExtended].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpSMulExtended].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpSMulExtended].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpShiftRightLogical].operands.push(OperandId, "'Base'"); - InstructionDesc[OpShiftRightLogical].operands.push(OperandId, "'Shift'"); - - InstructionDesc[OpShiftRightArithmetic].operands.push(OperandId, "'Base'"); - InstructionDesc[OpShiftRightArithmetic].operands.push(OperandId, "'Shift'"); - - InstructionDesc[OpShiftLeftLogical].operands.push(OperandId, "'Base'"); - InstructionDesc[OpShiftLeftLogical].operands.push(OperandId, "'Shift'"); - - InstructionDesc[OpLogicalOr].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpLogicalOr].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpLogicalAnd].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpLogicalAnd].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpLogicalEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpLogicalEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpLogicalNotEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpLogicalNotEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpLogicalNot].operands.push(OperandId, "'Operand'"); - - InstructionDesc[OpBitwiseOr].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpBitwiseOr].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpBitwiseXor].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpBitwiseXor].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpBitwiseAnd].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpBitwiseAnd].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpBitFieldInsert].capabilities.push_back(CapabilityShader); - InstructionDesc[OpBitFieldInsert].operands.push(OperandId, "'Base'"); - InstructionDesc[OpBitFieldInsert].operands.push(OperandId, "'Insert'"); - InstructionDesc[OpBitFieldInsert].operands.push(OperandId, "'Offset'"); - InstructionDesc[OpBitFieldInsert].operands.push(OperandId, "'Count'"); - - InstructionDesc[OpBitFieldSExtract].capabilities.push_back(CapabilityShader); - InstructionDesc[OpBitFieldSExtract].operands.push(OperandId, "'Base'"); - InstructionDesc[OpBitFieldSExtract].operands.push(OperandId, "'Offset'"); - InstructionDesc[OpBitFieldSExtract].operands.push(OperandId, "'Count'"); - - InstructionDesc[OpBitFieldUExtract].capabilities.push_back(CapabilityShader); - InstructionDesc[OpBitFieldUExtract].operands.push(OperandId, "'Base'"); - InstructionDesc[OpBitFieldUExtract].operands.push(OperandId, "'Offset'"); - InstructionDesc[OpBitFieldUExtract].operands.push(OperandId, "'Count'"); - - InstructionDesc[OpBitReverse].capabilities.push_back(CapabilityShader); - InstructionDesc[OpBitReverse].operands.push(OperandId, "'Base'"); - - InstructionDesc[OpBitCount].operands.push(OperandId, "'Base'"); - - InstructionDesc[OpSelect].operands.push(OperandId, "'Condition'"); - InstructionDesc[OpSelect].operands.push(OperandId, "'Object 1'"); - InstructionDesc[OpSelect].operands.push(OperandId, "'Object 2'"); - - InstructionDesc[OpIEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpIEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFOrdEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFOrdEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFUnordEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFUnordEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpINotEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpINotEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFOrdNotEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFOrdNotEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFUnordNotEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFUnordNotEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpULessThan].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpULessThan].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpSLessThan].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpSLessThan].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFOrdLessThan].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFOrdLessThan].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFUnordLessThan].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFUnordLessThan].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpUGreaterThan].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpUGreaterThan].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpSGreaterThan].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpSGreaterThan].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFOrdGreaterThan].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFOrdGreaterThan].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFUnordGreaterThan].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFUnordGreaterThan].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpULessThanEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpULessThanEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpSLessThanEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpSLessThanEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFOrdLessThanEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFOrdLessThanEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFUnordLessThanEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFUnordLessThanEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpUGreaterThanEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpUGreaterThanEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpSGreaterThanEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpSGreaterThanEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFOrdGreaterThanEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFOrdGreaterThanEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpFUnordGreaterThanEqual].operands.push(OperandId, "'Operand 1'"); - InstructionDesc[OpFUnordGreaterThanEqual].operands.push(OperandId, "'Operand 2'"); - - InstructionDesc[OpDPdx].capabilities.push_back(CapabilityShader); - InstructionDesc[OpDPdx].operands.push(OperandId, "'P'"); - - InstructionDesc[OpDPdy].capabilities.push_back(CapabilityShader); - InstructionDesc[OpDPdy].operands.push(OperandId, "'P'"); - - InstructionDesc[OpFwidth].capabilities.push_back(CapabilityShader); - InstructionDesc[OpFwidth].operands.push(OperandId, "'P'"); - - InstructionDesc[OpDPdxFine].capabilities.push_back(CapabilityDerivativeControl); - InstructionDesc[OpDPdxFine].operands.push(OperandId, "'P'"); - - InstructionDesc[OpDPdyFine].capabilities.push_back(CapabilityDerivativeControl); - InstructionDesc[OpDPdyFine].operands.push(OperandId, "'P'"); - - InstructionDesc[OpFwidthFine].capabilities.push_back(CapabilityDerivativeControl); - InstructionDesc[OpFwidthFine].operands.push(OperandId, "'P'"); - - InstructionDesc[OpDPdxCoarse].capabilities.push_back(CapabilityDerivativeControl); - InstructionDesc[OpDPdxCoarse].operands.push(OperandId, "'P'"); - - InstructionDesc[OpDPdyCoarse].capabilities.push_back(CapabilityDerivativeControl); - InstructionDesc[OpDPdyCoarse].operands.push(OperandId, "'P'"); - - InstructionDesc[OpFwidthCoarse].capabilities.push_back(CapabilityDerivativeControl); - InstructionDesc[OpFwidthCoarse].operands.push(OperandId, "'P'"); - - InstructionDesc[OpEmitVertex].capabilities.push_back(CapabilityGeometry); - - InstructionDesc[OpEndPrimitive].capabilities.push_back(CapabilityGeometry); - - InstructionDesc[OpEmitStreamVertex].operands.push(OperandId, "'Stream'"); - InstructionDesc[OpEmitStreamVertex].capabilities.push_back(CapabilityGeometryStreams); - - InstructionDesc[OpEndStreamPrimitive].operands.push(OperandId, "'Stream'"); - InstructionDesc[OpEndStreamPrimitive].capabilities.push_back(CapabilityGeometryStreams); - - InstructionDesc[OpControlBarrier].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpControlBarrier].operands.push(OperandScope, "'Memory'"); - InstructionDesc[OpControlBarrier].operands.push(OperandMemorySemantics, "'Semantics'"); - - InstructionDesc[OpMemoryBarrier].operands.push(OperandScope, "'Memory'"); - InstructionDesc[OpMemoryBarrier].operands.push(OperandMemorySemantics, "'Semantics'"); - - InstructionDesc[OpImageTexelPointer].operands.push(OperandId, "'Image'"); - InstructionDesc[OpImageTexelPointer].operands.push(OperandId, "'Coordinate'"); - InstructionDesc[OpImageTexelPointer].operands.push(OperandId, "'Sample'"); - - InstructionDesc[OpAtomicLoad].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicLoad].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicLoad].operands.push(OperandMemorySemantics, "'Semantics'"); - - InstructionDesc[OpAtomicStore].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicStore].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicStore].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicStore].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicExchange].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicExchange].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicExchange].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicExchange].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicCompareExchange].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicCompareExchange].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicCompareExchange].operands.push(OperandMemorySemantics, "'Equal'"); - InstructionDesc[OpAtomicCompareExchange].operands.push(OperandMemorySemantics, "'Unequal'"); - InstructionDesc[OpAtomicCompareExchange].operands.push(OperandId, "'Value'"); - InstructionDesc[OpAtomicCompareExchange].operands.push(OperandId, "'Comparator'"); - - InstructionDesc[OpAtomicCompareExchangeWeak].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicCompareExchangeWeak].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicCompareExchangeWeak].operands.push(OperandMemorySemantics, "'Equal'"); - InstructionDesc[OpAtomicCompareExchangeWeak].operands.push(OperandMemorySemantics, "'Unequal'"); - InstructionDesc[OpAtomicCompareExchangeWeak].operands.push(OperandId, "'Value'"); - InstructionDesc[OpAtomicCompareExchangeWeak].operands.push(OperandId, "'Comparator'"); - InstructionDesc[OpAtomicCompareExchangeWeak].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpAtomicIIncrement].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicIIncrement].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicIIncrement].operands.push(OperandMemorySemantics, "'Semantics'"); - - InstructionDesc[OpAtomicIDecrement].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicIDecrement].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicIDecrement].operands.push(OperandMemorySemantics, "'Semantics'"); - - InstructionDesc[OpAtomicIAdd].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicIAdd].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicIAdd].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicIAdd].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicISub].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicISub].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicISub].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicISub].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicUMin].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicUMin].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicUMin].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicUMin].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicUMax].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicUMax].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicUMax].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicUMax].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicSMin].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicSMin].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicSMin].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicSMin].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicSMax].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicSMax].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicSMax].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicSMax].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicAnd].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicAnd].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicAnd].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicAnd].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicOr].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicOr].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicOr].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicOr].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicXor].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicXor].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicXor].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicXor].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpAtomicFlagTestAndSet].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicFlagTestAndSet].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicFlagTestAndSet].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicFlagTestAndSet].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpAtomicFlagClear].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpAtomicFlagClear].operands.push(OperandScope, "'Scope'"); - InstructionDesc[OpAtomicFlagClear].operands.push(OperandMemorySemantics, "'Semantics'"); - InstructionDesc[OpAtomicFlagClear].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpLoopMerge].operands.push(OperandId, "'Merge Block'"); - InstructionDesc[OpLoopMerge].operands.push(OperandId, "'Continue Target'"); - InstructionDesc[OpLoopMerge].operands.push(OperandLoop, ""); - - InstructionDesc[OpSelectionMerge].operands.push(OperandId, "'Merge Block'"); - InstructionDesc[OpSelectionMerge].operands.push(OperandSelect, ""); - - InstructionDesc[OpBranch].operands.push(OperandId, "'Target Label'"); - - InstructionDesc[OpBranchConditional].operands.push(OperandId, "'Condition'"); - InstructionDesc[OpBranchConditional].operands.push(OperandId, "'True Label'"); - InstructionDesc[OpBranchConditional].operands.push(OperandId, "'False Label'"); - InstructionDesc[OpBranchConditional].operands.push(OperandVariableLiterals, "'Branch weights'"); - - InstructionDesc[OpSwitch].operands.push(OperandId, "'Selector'"); - InstructionDesc[OpSwitch].operands.push(OperandId, "'Default'"); - InstructionDesc[OpSwitch].operands.push(OperandVariableLiteralId, "'Target'"); - - InstructionDesc[OpKill].capabilities.push_back(CapabilityShader); - - InstructionDesc[OpReturnValue].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpLifetimeStart].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpLifetimeStart].operands.push(OperandLiteralNumber, "'Size'"); - InstructionDesc[OpLifetimeStart].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpLifetimeStop].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpLifetimeStop].operands.push(OperandLiteralNumber, "'Size'"); - InstructionDesc[OpLifetimeStop].capabilities.push_back(CapabilityKernel); - - InstructionDesc[OpGroupAsyncCopy].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpGroupAsyncCopy].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupAsyncCopy].operands.push(OperandId, "'Destination'"); - InstructionDesc[OpGroupAsyncCopy].operands.push(OperandId, "'Source'"); - InstructionDesc[OpGroupAsyncCopy].operands.push(OperandId, "'Num Elements'"); - InstructionDesc[OpGroupAsyncCopy].operands.push(OperandId, "'Stride'"); - InstructionDesc[OpGroupAsyncCopy].operands.push(OperandId, "'Event'"); - - InstructionDesc[OpGroupWaitEvents].capabilities.push_back(CapabilityKernel); - InstructionDesc[OpGroupWaitEvents].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupWaitEvents].operands.push(OperandId, "'Num Events'"); - InstructionDesc[OpGroupWaitEvents].operands.push(OperandId, "'Events List'"); - - InstructionDesc[OpGroupAll].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupAll].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupAll].operands.push(OperandId, "'Predicate'"); - - InstructionDesc[OpGroupAny].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupAny].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupAny].operands.push(OperandId, "'Predicate'"); - - InstructionDesc[OpGroupBroadcast].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupBroadcast].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupBroadcast].operands.push(OperandId, "'Value'"); - InstructionDesc[OpGroupBroadcast].operands.push(OperandId, "'LocalId'"); - - InstructionDesc[OpGroupIAdd].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupIAdd].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupIAdd].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupIAdd].operands.push(OperandId, "'X'"); - - InstructionDesc[OpGroupFAdd].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupFAdd].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupFAdd].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupFAdd].operands.push(OperandId, "'X'"); - - InstructionDesc[OpGroupUMin].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupUMin].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupUMin].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupUMin].operands.push(OperandId, "'X'"); - - InstructionDesc[OpGroupSMin].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupSMin].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupSMin].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupSMin].operands.push(OperandId, "X"); - - InstructionDesc[OpGroupFMin].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupFMin].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupFMin].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupFMin].operands.push(OperandId, "X"); - - InstructionDesc[OpGroupUMax].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupUMax].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupUMax].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupUMax].operands.push(OperandId, "X"); - - InstructionDesc[OpGroupSMax].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupSMax].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupSMax].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupSMax].operands.push(OperandId, "X"); - - InstructionDesc[OpGroupFMax].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupFMax].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupFMax].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupFMax].operands.push(OperandId, "X"); - - InstructionDesc[OpReadPipe].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpReadPipe].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpReadPipe].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpReadPipe].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpReadPipe].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpWritePipe].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpWritePipe].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpWritePipe].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpWritePipe].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpWritePipe].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpReservedReadPipe].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpReservedReadPipe].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpReservedReadPipe].operands.push(OperandId, "'Reserve Id'"); - InstructionDesc[OpReservedReadPipe].operands.push(OperandId, "'Index'"); - InstructionDesc[OpReservedReadPipe].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpReservedReadPipe].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpReservedReadPipe].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpReservedWritePipe].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpReservedWritePipe].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpReservedWritePipe].operands.push(OperandId, "'Reserve Id'"); - InstructionDesc[OpReservedWritePipe].operands.push(OperandId, "'Index'"); - InstructionDesc[OpReservedWritePipe].operands.push(OperandId, "'Pointer'"); - InstructionDesc[OpReservedWritePipe].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpReservedWritePipe].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpReserveReadPipePackets].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpReserveReadPipePackets].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpReserveReadPipePackets].operands.push(OperandId, "'Num Packets'"); - InstructionDesc[OpReserveReadPipePackets].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpReserveReadPipePackets].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpReserveWritePipePackets].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpReserveWritePipePackets].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpReserveWritePipePackets].operands.push(OperandId, "'Num Packets'"); - InstructionDesc[OpReserveWritePipePackets].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpReserveWritePipePackets].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpCommitReadPipe].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpCommitReadPipe].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpCommitReadPipe].operands.push(OperandId, "'Reserve Id'"); - InstructionDesc[OpCommitReadPipe].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpCommitReadPipe].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpCommitWritePipe].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpCommitWritePipe].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpCommitWritePipe].operands.push(OperandId, "'Reserve Id'"); - InstructionDesc[OpCommitWritePipe].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpCommitWritePipe].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpIsValidReserveId].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpIsValidReserveId].operands.push(OperandId, "'Reserve Id'"); - - InstructionDesc[OpGetNumPipePackets].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpGetNumPipePackets].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpGetNumPipePackets].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpGetNumPipePackets].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpGetMaxPipePackets].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpGetMaxPipePackets].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpGetMaxPipePackets].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpGetMaxPipePackets].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpGroupReserveReadPipePackets].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpGroupReserveReadPipePackets].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupReserveReadPipePackets].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpGroupReserveReadPipePackets].operands.push(OperandId, "'Num Packets'"); - InstructionDesc[OpGroupReserveReadPipePackets].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpGroupReserveReadPipePackets].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpGroupReserveWritePipePackets].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpGroupReserveWritePipePackets].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupReserveWritePipePackets].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpGroupReserveWritePipePackets].operands.push(OperandId, "'Num Packets'"); - InstructionDesc[OpGroupReserveWritePipePackets].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpGroupReserveWritePipePackets].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpGroupCommitReadPipe].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpGroupCommitReadPipe].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupCommitReadPipe].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpGroupCommitReadPipe].operands.push(OperandId, "'Reserve Id'"); - InstructionDesc[OpGroupCommitReadPipe].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpGroupCommitReadPipe].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpGroupCommitWritePipe].capabilities.push_back(CapabilityPipes); - InstructionDesc[OpGroupCommitWritePipe].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupCommitWritePipe].operands.push(OperandId, "'Pipe'"); - InstructionDesc[OpGroupCommitWritePipe].operands.push(OperandId, "'Reserve Id'"); - InstructionDesc[OpGroupCommitWritePipe].operands.push(OperandId, "'Packet Size'"); - InstructionDesc[OpGroupCommitWritePipe].operands.push(OperandId, "'Packet Alignment'"); - - InstructionDesc[OpBuildNDRange].capabilities.push_back(CapabilityDeviceEnqueue); - InstructionDesc[OpBuildNDRange].operands.push(OperandId, "'GlobalWorkSize'"); - InstructionDesc[OpBuildNDRange].operands.push(OperandId, "'LocalWorkSize'"); - InstructionDesc[OpBuildNDRange].operands.push(OperandId, "'GlobalWorkOffset'"); - - InstructionDesc[OpGetDefaultQueue].capabilities.push_back(CapabilityDeviceEnqueue); - - InstructionDesc[OpCaptureEventProfilingInfo].capabilities.push_back(CapabilityDeviceEnqueue); - - InstructionDesc[OpCaptureEventProfilingInfo].operands.push(OperandId, "'Event'"); - InstructionDesc[OpCaptureEventProfilingInfo].operands.push(OperandId, "'Profiling Info'"); - InstructionDesc[OpCaptureEventProfilingInfo].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpSetUserEventStatus].capabilities.push_back(CapabilityDeviceEnqueue); - - InstructionDesc[OpSetUserEventStatus].operands.push(OperandId, "'Event'"); - InstructionDesc[OpSetUserEventStatus].operands.push(OperandId, "'Status'"); - - InstructionDesc[OpIsValidEvent].capabilities.push_back(CapabilityDeviceEnqueue); - InstructionDesc[OpIsValidEvent].operands.push(OperandId, "'Event'"); - - InstructionDesc[OpCreateUserEvent].capabilities.push_back(CapabilityDeviceEnqueue); - - InstructionDesc[OpRetainEvent].capabilities.push_back(CapabilityDeviceEnqueue); - InstructionDesc[OpRetainEvent].operands.push(OperandId, "'Event'"); - - InstructionDesc[OpReleaseEvent].capabilities.push_back(CapabilityDeviceEnqueue); - InstructionDesc[OpReleaseEvent].operands.push(OperandId, "'Event'"); - - InstructionDesc[OpGetKernelWorkGroupSize].capabilities.push_back(CapabilityDeviceEnqueue); - InstructionDesc[OpGetKernelWorkGroupSize].operands.push(OperandId, "'Invoke'"); - InstructionDesc[OpGetKernelWorkGroupSize].operands.push(OperandId, "'Param'"); - InstructionDesc[OpGetKernelWorkGroupSize].operands.push(OperandId, "'Param Size'"); - InstructionDesc[OpGetKernelWorkGroupSize].operands.push(OperandId, "'Param Align'"); - - InstructionDesc[OpGetKernelPreferredWorkGroupSizeMultiple].capabilities.push_back(CapabilityDeviceEnqueue); - InstructionDesc[OpGetKernelPreferredWorkGroupSizeMultiple].operands.push(OperandId, "'Invoke'"); - InstructionDesc[OpGetKernelPreferredWorkGroupSizeMultiple].operands.push(OperandId, "'Param'"); - InstructionDesc[OpGetKernelPreferredWorkGroupSizeMultiple].operands.push(OperandId, "'Param Size'"); - InstructionDesc[OpGetKernelPreferredWorkGroupSizeMultiple].operands.push(OperandId, "'Param Align'"); - - InstructionDesc[OpGetKernelNDrangeSubGroupCount].capabilities.push_back(CapabilityDeviceEnqueue); - InstructionDesc[OpGetKernelNDrangeSubGroupCount].operands.push(OperandId, "'ND Range'"); - InstructionDesc[OpGetKernelNDrangeSubGroupCount].operands.push(OperandId, "'Invoke'"); - InstructionDesc[OpGetKernelNDrangeSubGroupCount].operands.push(OperandId, "'Param'"); - InstructionDesc[OpGetKernelNDrangeSubGroupCount].operands.push(OperandId, "'Param Size'"); - InstructionDesc[OpGetKernelNDrangeSubGroupCount].operands.push(OperandId, "'Param Align'"); - - InstructionDesc[OpGetKernelNDrangeMaxSubGroupSize].capabilities.push_back(CapabilityDeviceEnqueue); - InstructionDesc[OpGetKernelNDrangeMaxSubGroupSize].operands.push(OperandId, "'ND Range'"); - InstructionDesc[OpGetKernelNDrangeMaxSubGroupSize].operands.push(OperandId, "'Invoke'"); - InstructionDesc[OpGetKernelNDrangeMaxSubGroupSize].operands.push(OperandId, "'Param'"); - InstructionDesc[OpGetKernelNDrangeMaxSubGroupSize].operands.push(OperandId, "'Param Size'"); - InstructionDesc[OpGetKernelNDrangeMaxSubGroupSize].operands.push(OperandId, "'Param Align'"); - - InstructionDesc[OpEnqueueKernel].capabilities.push_back(CapabilityDeviceEnqueue); - InstructionDesc[OpEnqueueKernel].operands.push(OperandId, "'Queue'"); - InstructionDesc[OpEnqueueKernel].operands.push(OperandId, "'Flags'"); - InstructionDesc[OpEnqueueKernel].operands.push(OperandId, "'ND Range'"); - InstructionDesc[OpEnqueueKernel].operands.push(OperandId, "'Num Events'"); - InstructionDesc[OpEnqueueKernel].operands.push(OperandId, "'Wait Events'"); - InstructionDesc[OpEnqueueKernel].operands.push(OperandId, "'Ret Event'"); - InstructionDesc[OpEnqueueKernel].operands.push(OperandId, "'Invoke'"); - InstructionDesc[OpEnqueueKernel].operands.push(OperandId, "'Param'"); - InstructionDesc[OpEnqueueKernel].operands.push(OperandId, "'Param Size'"); - InstructionDesc[OpEnqueueKernel].operands.push(OperandId, "'Param Align'"); - InstructionDesc[OpEnqueueKernel].operands.push(OperandVariableIds, "'Local Size'"); - - InstructionDesc[OpEnqueueMarker].capabilities.push_back(CapabilityDeviceEnqueue); - InstructionDesc[OpEnqueueMarker].operands.push(OperandId, "'Queue'"); - InstructionDesc[OpEnqueueMarker].operands.push(OperandId, "'Num Events'"); - InstructionDesc[OpEnqueueMarker].operands.push(OperandId, "'Wait Events'"); - InstructionDesc[OpEnqueueMarker].operands.push(OperandId, "'Ret Event'"); - - InstructionDesc[OpSubgroupBallotKHR].operands.push(OperandId, "'Predicate'"); - - InstructionDesc[OpSubgroupFirstInvocationKHR].operands.push(OperandId, "'Value'"); - - InstructionDesc[OpSubgroupAnyKHR].capabilities.push_back(CapabilitySubgroupVoteKHR); - InstructionDesc[OpSubgroupAnyKHR].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpSubgroupAnyKHR].operands.push(OperandId, "'Predicate'"); - - InstructionDesc[OpSubgroupAllKHR].capabilities.push_back(CapabilitySubgroupVoteKHR); - InstructionDesc[OpSubgroupAllKHR].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpSubgroupAllKHR].operands.push(OperandId, "'Predicate'"); - - InstructionDesc[OpSubgroupAllEqualKHR].capabilities.push_back(CapabilitySubgroupVoteKHR); - InstructionDesc[OpSubgroupAllEqualKHR].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpSubgroupAllEqualKHR].operands.push(OperandId, "'Predicate'"); - - InstructionDesc[OpSubgroupReadInvocationKHR].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpSubgroupReadInvocationKHR].operands.push(OperandId, "'Value'"); - InstructionDesc[OpSubgroupReadInvocationKHR].operands.push(OperandId, "'Index'"); - -#ifdef AMD_EXTENSIONS - InstructionDesc[OpGroupIAddNonUniformAMD].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupIAddNonUniformAMD].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupIAddNonUniformAMD].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupIAddNonUniformAMD].operands.push(OperandId, "'X'"); - - InstructionDesc[OpGroupFAddNonUniformAMD].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupFAddNonUniformAMD].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupFAddNonUniformAMD].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupFAddNonUniformAMD].operands.push(OperandId, "'X'"); - - InstructionDesc[OpGroupUMinNonUniformAMD].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupUMinNonUniformAMD].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupUMinNonUniformAMD].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupUMinNonUniformAMD].operands.push(OperandId, "'X'"); - - InstructionDesc[OpGroupSMinNonUniformAMD].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupSMinNonUniformAMD].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupSMinNonUniformAMD].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupSMinNonUniformAMD].operands.push(OperandId, "X"); - - InstructionDesc[OpGroupFMinNonUniformAMD].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupFMinNonUniformAMD].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupFMinNonUniformAMD].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupFMinNonUniformAMD].operands.push(OperandId, "X"); - - InstructionDesc[OpGroupUMaxNonUniformAMD].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupUMaxNonUniformAMD].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupUMaxNonUniformAMD].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupUMaxNonUniformAMD].operands.push(OperandId, "X"); - - InstructionDesc[OpGroupSMaxNonUniformAMD].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupSMaxNonUniformAMD].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupSMaxNonUniformAMD].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupSMaxNonUniformAMD].operands.push(OperandId, "X"); - - InstructionDesc[OpGroupFMaxNonUniformAMD].capabilities.push_back(CapabilityGroups); - InstructionDesc[OpGroupFMaxNonUniformAMD].operands.push(OperandScope, "'Execution'"); - InstructionDesc[OpGroupFMaxNonUniformAMD].operands.push(OperandGroupOperation, "'Operation'"); - InstructionDesc[OpGroupFMaxNonUniformAMD].operands.push(OperandId, "X"); -#endif -} - -}; // end spv namespace diff --git a/deps/glslang/SPIRV/doc.h b/deps/glslang/SPIRV/doc.h deleted file mode 100644 index 710ca1a5..00000000 --- a/deps/glslang/SPIRV/doc.h +++ /dev/null @@ -1,262 +0,0 @@ -// -// Copyright (C) 2014-2015 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// -// Parameterize the SPIR-V enumerants. -// - -#pragma once - -#include "spirv.hpp" - -#include - -namespace spv { - -// Fill in all the parameters -void Parameterize(); - -// Return the English names of all the enums. -const char* SourceString(int); -const char* AddressingString(int); -const char* MemoryString(int); -const char* ExecutionModelString(int); -const char* ExecutionModeString(int); -const char* StorageClassString(int); -const char* DecorationString(int); -const char* BuiltInString(int); -const char* DimensionString(int); -const char* SelectControlString(int); -const char* LoopControlString(int); -const char* FunctionControlString(int); -const char* SamplerAddressingModeString(int); -const char* SamplerFilterModeString(int); -const char* ImageFormatString(int); -const char* ImageChannelOrderString(int); -const char* ImageChannelTypeString(int); -const char* ImageChannelDataTypeString(int type); -const char* ImageOperandsString(int format); -const char* ImageOperands(int); -const char* FPFastMathString(int); -const char* FPRoundingModeString(int); -const char* LinkageTypeString(int); -const char* FuncParamAttrString(int); -const char* AccessQualifierString(int); -const char* MemorySemanticsString(int); -const char* MemoryAccessString(int); -const char* ExecutionScopeString(int); -const char* GroupOperationString(int); -const char* KernelEnqueueFlagsString(int); -const char* KernelProfilingInfoString(int); -const char* CapabilityString(int); -const char* OpcodeString(int); -const char* ScopeString(int mem); - -// For grouping opcodes into subsections -enum OpcodeClass { - OpClassMisc, - OpClassDebug, - OpClassAnnotate, - OpClassExtension, - OpClassMode, - OpClassType, - OpClassConstant, - OpClassMemory, - OpClassFunction, - OpClassImage, - OpClassConvert, - OpClassComposite, - OpClassArithmetic, - OpClassBit, - OpClassRelationalLogical, - OpClassDerivative, - OpClassFlowControl, - OpClassAtomic, - OpClassPrimitive, - OpClassBarrier, - OpClassGroup, - OpClassDeviceSideEnqueue, - OpClassPipe, - - OpClassCount, - OpClassMissing // all instructions start out as missing -}; - -// For parameterizing operands. -enum OperandClass { - OperandNone, - OperandId, - OperandVariableIds, - OperandOptionalLiteral, - OperandOptionalLiteralString, - OperandVariableLiterals, - OperandVariableIdLiteral, - OperandVariableLiteralId, - OperandLiteralNumber, - OperandLiteralString, - OperandSource, - OperandExecutionModel, - OperandAddressing, - OperandMemory, - OperandExecutionMode, - OperandStorage, - OperandDimensionality, - OperandSamplerAddressingMode, - OperandSamplerFilterMode, - OperandSamplerImageFormat, - OperandImageChannelOrder, - OperandImageChannelDataType, - OperandImageOperands, - OperandFPFastMath, - OperandFPRoundingMode, - OperandLinkageType, - OperandAccessQualifier, - OperandFuncParamAttr, - OperandDecoration, - OperandBuiltIn, - OperandSelect, - OperandLoop, - OperandFunction, - OperandMemorySemantics, - OperandMemoryAccess, - OperandScope, - OperandGroupOperation, - OperandKernelEnqueueFlags, - OperandKernelProfilingInfo, - OperandCapability, - - OperandOpcode, - - OperandCount -}; - -// Any specific enum can have a set of capabilities that allow it: -typedef std::vector EnumCaps; - -// Parameterize a set of operands with their OperandClass(es) and descriptions. -class OperandParameters { -public: - OperandParameters() { } - void push(OperandClass oc, const char* d, bool opt = false) - { - opClass.push_back(oc); - desc.push_back(d); - optional.push_back(opt); - } - void setOptional(); - OperandClass getClass(int op) const { return opClass[op]; } - const char* getDesc(int op) const { return desc[op]; } - bool isOptional(int op) const { return optional[op]; } - int getNum() const { return (int)opClass.size(); } - -protected: - std::vector opClass; - std::vector desc; - std::vector optional; -}; - -// Parameterize an enumerant -class EnumParameters { -public: - EnumParameters() : desc(0) { } - EnumCaps caps; - const char* desc; -}; - -// Parameterize a set of enumerants that form an enum -class EnumDefinition : public EnumParameters { -public: - EnumDefinition() : - ceiling(0), bitmask(false), getName(0), enumParams(0), operandParams(0) { } - void set(int ceil, const char* (*name)(int), EnumParameters* ep, bool mask = false) - { - ceiling = ceil; - getName = name; - bitmask = mask; - enumParams = ep; - } - void setOperands(OperandParameters* op) { operandParams = op; } - int ceiling; // ceiling of enumerants - bool bitmask; // true if these enumerants combine into a bitmask - const char* (*getName)(int); // a function that returns the name for each enumerant value (or shift) - EnumParameters* enumParams; // parameters for each individual enumerant - OperandParameters* operandParams; // sets of operands -}; - -// Parameterize an instruction's logical format, including its known set of operands, -// per OperandParameters above. -class InstructionParameters { -public: - InstructionParameters() : - opDesc("TBD"), - opClass(OpClassMissing), - typePresent(true), // most normal, only exceptions have to be spelled out - resultPresent(true) // most normal, only exceptions have to be spelled out - { } - - void setResultAndType(bool r, bool t) - { - resultPresent = r; - typePresent = t; - } - - bool hasResult() const { return resultPresent != 0; } - bool hasType() const { return typePresent != 0; } - - const char* opDesc; - EnumCaps capabilities; - OpcodeClass opClass; - OperandParameters operands; - -protected: - int typePresent : 1; - int resultPresent : 1; -}; - -const int OpcodeCeiling = 321; - -// The set of objects that hold all the instruction/operand -// parameterization information. -extern InstructionParameters InstructionDesc[]; - -// These hold definitions of the enumerants used for operands -extern EnumDefinition OperandClassParams[]; - -const char* GetOperandDesc(OperandClass operand); -void PrintImmediateRow(int imm, const char* name, const EnumParameters* enumParams, bool caps, bool hex = false); -const char* AccessQualifierString(int attr); - -void PrintOperands(const OperandParameters& operands, int reservedOperands); - -}; // end namespace spv diff --git a/deps/glslang/SPIRV/hex_float.h b/deps/glslang/SPIRV/hex_float.h deleted file mode 100644 index 905b21a4..00000000 --- a/deps/glslang/SPIRV/hex_float.h +++ /dev/null @@ -1,1078 +0,0 @@ -// Copyright (c) 2015-2016 The Khronos Group Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef LIBSPIRV_UTIL_HEX_FLOAT_H_ -#define LIBSPIRV_UTIL_HEX_FLOAT_H_ - -#include -#include -#include -#include -#include -#include -#include - -#if defined(_MSC_VER) && _MSC_VER < 1800 -namespace std { -bool isnan(double f) -{ - return ::_isnan(f) != 0; -} -bool isinf(double f) -{ - return ::_finite(f) == 0; -} -} -#endif - -#include "bitutils.h" - -namespace spvutils { - -class Float16 { - public: - Float16(uint16_t v) : val(v) {} - Float16() {} - static bool isNan(const Float16& val) { - return ((val.val & 0x7C00) == 0x7C00) && ((val.val & 0x3FF) != 0); - } - // Returns true if the given value is any kind of infinity. - static bool isInfinity(const Float16& val) { - return ((val.val & 0x7C00) == 0x7C00) && ((val.val & 0x3FF) == 0); - } - Float16(const Float16& other) { val = other.val; } - uint16_t get_value() const { return val; } - - // Returns the maximum normal value. - static Float16 max() { return Float16(0x7bff); } - // Returns the lowest normal value. - static Float16 lowest() { return Float16(0xfbff); } - - private: - uint16_t val; -}; - -// To specialize this type, you must override uint_type to define -// an unsigned integer that can fit your floating point type. -// You must also add a isNan function that returns true if -// a value is Nan. -template -struct FloatProxyTraits { - typedef void uint_type; -}; - -template <> -struct FloatProxyTraits { - typedef uint32_t uint_type; - static bool isNan(float f) { return std::isnan(f); } - // Returns true if the given value is any kind of infinity. - static bool isInfinity(float f) { return std::isinf(f); } - // Returns the maximum normal value. - static float max() { return std::numeric_limits::max(); } - // Returns the lowest normal value. - static float lowest() { return std::numeric_limits::lowest(); } -}; - -template <> -struct FloatProxyTraits { - typedef uint64_t uint_type; - static bool isNan(double f) { return std::isnan(f); } - // Returns true if the given value is any kind of infinity. - static bool isInfinity(double f) { return std::isinf(f); } - // Returns the maximum normal value. - static double max() { return std::numeric_limits::max(); } - // Returns the lowest normal value. - static double lowest() { return std::numeric_limits::lowest(); } -}; - -template <> -struct FloatProxyTraits { - typedef uint16_t uint_type; - static bool isNan(Float16 f) { return Float16::isNan(f); } - // Returns true if the given value is any kind of infinity. - static bool isInfinity(Float16 f) { return Float16::isInfinity(f); } - // Returns the maximum normal value. - static Float16 max() { return Float16::max(); } - // Returns the lowest normal value. - static Float16 lowest() { return Float16::lowest(); } -}; - -// Since copying a floating point number (especially if it is NaN) -// does not guarantee that bits are preserved, this class lets us -// store the type and use it as a float when necessary. -template -class FloatProxy { - public: - typedef typename FloatProxyTraits::uint_type uint_type; - - // Since this is to act similar to the normal floats, - // do not initialize the data by default. - FloatProxy() {} - - // Intentionally non-explicit. This is a proxy type so - // implicit conversions allow us to use it more transparently. - FloatProxy(T val) { data_ = BitwiseCast(val); } - - // Intentionally non-explicit. This is a proxy type so - // implicit conversions allow us to use it more transparently. - FloatProxy(uint_type val) { data_ = val; } - - // This is helpful to have and is guaranteed not to stomp bits. - FloatProxy operator-() const { - return static_cast(data_ ^ - (uint_type(0x1) << (sizeof(T) * 8 - 1))); - } - - // Returns the data as a floating point value. - T getAsFloat() const { return BitwiseCast(data_); } - - // Returns the raw data. - uint_type data() const { return data_; } - - // Returns true if the value represents any type of NaN. - bool isNan() { return FloatProxyTraits::isNan(getAsFloat()); } - // Returns true if the value represents any type of infinity. - bool isInfinity() { return FloatProxyTraits::isInfinity(getAsFloat()); } - - // Returns the maximum normal value. - static FloatProxy max() { - return FloatProxy(FloatProxyTraits::max()); - } - // Returns the lowest normal value. - static FloatProxy lowest() { - return FloatProxy(FloatProxyTraits::lowest()); - } - - private: - uint_type data_; -}; - -template -bool operator==(const FloatProxy& first, const FloatProxy& second) { - return first.data() == second.data(); -} - -// Reads a FloatProxy value as a normal float from a stream. -template -std::istream& operator>>(std::istream& is, FloatProxy& value) { - T float_val; - is >> float_val; - value = FloatProxy(float_val); - return is; -} - -// This is an example traits. It is not meant to be used in practice, but will -// be the default for any non-specialized type. -template -struct HexFloatTraits { - // Integer type that can store this hex-float. - typedef void uint_type; - // Signed integer type that can store this hex-float. - typedef void int_type; - // The numerical type that this HexFloat represents. - typedef void underlying_type; - // The type needed to construct the underlying type. - typedef void native_type; - // The number of bits that are actually relevant in the uint_type. - // This allows us to deal with, for example, 24-bit values in a 32-bit - // integer. - static const uint32_t num_used_bits = 0; - // Number of bits that represent the exponent. - static const uint32_t num_exponent_bits = 0; - // Number of bits that represent the fractional part. - static const uint32_t num_fraction_bits = 0; - // The bias of the exponent. (How much we need to subtract from the stored - // value to get the correct value.) - static const uint32_t exponent_bias = 0; -}; - -// Traits for IEEE float. -// 1 sign bit, 8 exponent bits, 23 fractional bits. -template <> -struct HexFloatTraits> { - typedef uint32_t uint_type; - typedef int32_t int_type; - typedef FloatProxy underlying_type; - typedef float native_type; - static const uint_type num_used_bits = 32; - static const uint_type num_exponent_bits = 8; - static const uint_type num_fraction_bits = 23; - static const uint_type exponent_bias = 127; -}; - -// Traits for IEEE double. -// 1 sign bit, 11 exponent bits, 52 fractional bits. -template <> -struct HexFloatTraits> { - typedef uint64_t uint_type; - typedef int64_t int_type; - typedef FloatProxy underlying_type; - typedef double native_type; - static const uint_type num_used_bits = 64; - static const uint_type num_exponent_bits = 11; - static const uint_type num_fraction_bits = 52; - static const uint_type exponent_bias = 1023; -}; - -// Traits for IEEE half. -// 1 sign bit, 5 exponent bits, 10 fractional bits. -template <> -struct HexFloatTraits> { - typedef uint16_t uint_type; - typedef int16_t int_type; - typedef uint16_t underlying_type; - typedef uint16_t native_type; - static const uint_type num_used_bits = 16; - static const uint_type num_exponent_bits = 5; - static const uint_type num_fraction_bits = 10; - static const uint_type exponent_bias = 15; -}; - -enum round_direction { - kRoundToZero, - kRoundToNearestEven, - kRoundToPositiveInfinity, - kRoundToNegativeInfinity -}; - -// Template class that houses a floating pointer number. -// It exposes a number of constants based on the provided traits to -// assist in interpreting the bits of the value. -template > -class HexFloat { - public: - typedef typename Traits::uint_type uint_type; - typedef typename Traits::int_type int_type; - typedef typename Traits::underlying_type underlying_type; - typedef typename Traits::native_type native_type; - - explicit HexFloat(T f) : value_(f) {} - - T value() const { return value_; } - void set_value(T f) { value_ = f; } - - // These are all written like this because it is convenient to have - // compile-time constants for all of these values. - - // Pass-through values to save typing. - static const uint32_t num_used_bits = Traits::num_used_bits; - static const uint32_t exponent_bias = Traits::exponent_bias; - static const uint32_t num_exponent_bits = Traits::num_exponent_bits; - static const uint32_t num_fraction_bits = Traits::num_fraction_bits; - - // Number of bits to shift left to set the highest relevant bit. - static const uint32_t top_bit_left_shift = num_used_bits - 1; - // How many nibbles (hex characters) the fractional part takes up. - static const uint32_t fraction_nibbles = (num_fraction_bits + 3) / 4; - // If the fractional part does not fit evenly into a hex character (4-bits) - // then we have to left-shift to get rid of leading 0s. This is the amount - // we have to shift (might be 0). - static const uint32_t num_overflow_bits = - fraction_nibbles * 4 - num_fraction_bits; - - // The representation of the fraction, not the actual bits. This - // includes the leading bit that is usually implicit. - static const uint_type fraction_represent_mask = - spvutils::SetBits::get; - - // The topmost bit in the nibble-aligned fraction. - static const uint_type fraction_top_bit = - uint_type(1) << (num_fraction_bits + num_overflow_bits - 1); - - // The least significant bit in the exponent, which is also the bit - // immediately to the left of the significand. - static const uint_type first_exponent_bit = uint_type(1) - << (num_fraction_bits); - - // The mask for the encoded fraction. It does not include the - // implicit bit. - static const uint_type fraction_encode_mask = - spvutils::SetBits::get; - - // The bit that is used as a sign. - static const uint_type sign_mask = uint_type(1) << top_bit_left_shift; - - // The bits that represent the exponent. - static const uint_type exponent_mask = - spvutils::SetBits::get; - - // How far left the exponent is shifted. - static const uint32_t exponent_left_shift = num_fraction_bits; - - // How far from the right edge the fraction is shifted. - static const uint32_t fraction_right_shift = - static_cast(sizeof(uint_type) * 8) - num_fraction_bits; - - // The maximum representable unbiased exponent. - static const int_type max_exponent = - (exponent_mask >> num_fraction_bits) - exponent_bias; - // The minimum representable exponent for normalized numbers. - static const int_type min_exponent = -static_cast(exponent_bias); - - // Returns the bits associated with the value. - uint_type getBits() const { return spvutils::BitwiseCast(value_); } - - // Returns the bits associated with the value, without the leading sign bit. - uint_type getUnsignedBits() const { - return static_cast(spvutils::BitwiseCast(value_) & - ~sign_mask); - } - - // Returns the bits associated with the exponent, shifted to start at the - // lsb of the type. - const uint_type getExponentBits() const { - return static_cast((getBits() & exponent_mask) >> - num_fraction_bits); - } - - // Returns the exponent in unbiased form. This is the exponent in the - // human-friendly form. - const int_type getUnbiasedExponent() const { - return static_cast(getExponentBits() - exponent_bias); - } - - // Returns just the significand bits from the value. - const uint_type getSignificandBits() const { - return getBits() & fraction_encode_mask; - } - - // If the number was normalized, returns the unbiased exponent. - // If the number was denormal, normalize the exponent first. - const int_type getUnbiasedNormalizedExponent() const { - if ((getBits() & ~sign_mask) == 0) { // special case if everything is 0 - return 0; - } - int_type exp = getUnbiasedExponent(); - if (exp == min_exponent) { // We are in denorm land. - uint_type significand_bits = getSignificandBits(); - while ((significand_bits & (first_exponent_bit >> 1)) == 0) { - significand_bits = static_cast(significand_bits << 1); - exp = static_cast(exp - 1); - } - significand_bits &= fraction_encode_mask; - } - return exp; - } - - // Returns the signficand after it has been normalized. - const uint_type getNormalizedSignificand() const { - int_type unbiased_exponent = getUnbiasedNormalizedExponent(); - uint_type significand = getSignificandBits(); - for (int_type i = unbiased_exponent; i <= min_exponent; ++i) { - significand = static_cast(significand << 1); - } - significand &= fraction_encode_mask; - return significand; - } - - // Returns true if this number represents a negative value. - bool isNegative() const { return (getBits() & sign_mask) != 0; } - - // Sets this HexFloat from the individual components. - // Note this assumes EVERY significand is normalized, and has an implicit - // leading one. This means that the only way that this method will set 0, - // is if you set a number so denormalized that it underflows. - // Do not use this method with raw bits extracted from a subnormal number, - // since subnormals do not have an implicit leading 1 in the significand. - // The significand is also expected to be in the - // lowest-most num_fraction_bits of the uint_type. - // The exponent is expected to be unbiased, meaning an exponent of - // 0 actually means 0. - // If underflow_round_up is set, then on underflow, if a number is non-0 - // and would underflow, we round up to the smallest denorm. - void setFromSignUnbiasedExponentAndNormalizedSignificand( - bool negative, int_type exponent, uint_type significand, - bool round_denorm_up) { - bool significand_is_zero = significand == 0; - - if (exponent <= min_exponent) { - // If this was denormalized, then we have to shift the bit on, meaning - // the significand is not zero. - significand_is_zero = false; - significand |= first_exponent_bit; - significand = static_cast(significand >> 1); - } - - while (exponent < min_exponent) { - significand = static_cast(significand >> 1); - ++exponent; - } - - if (exponent == min_exponent) { - if (significand == 0 && !significand_is_zero && round_denorm_up) { - significand = static_cast(0x1); - } - } - - uint_type new_value = 0; - if (negative) { - new_value = static_cast(new_value | sign_mask); - } - exponent = static_cast(exponent + exponent_bias); - assert(exponent >= 0); - - // put it all together - exponent = static_cast((exponent << exponent_left_shift) & - exponent_mask); - significand = static_cast(significand & fraction_encode_mask); - new_value = static_cast(new_value | (exponent | significand)); - value_ = BitwiseCast(new_value); - } - - // Increments the significand of this number by the given amount. - // If this would spill the significand into the implicit bit, - // carry is set to true and the significand is shifted to fit into - // the correct location, otherwise carry is set to false. - // All significands and to_increment are assumed to be within the bounds - // for a valid significand. - static uint_type incrementSignificand(uint_type significand, - uint_type to_increment, bool* carry) { - significand = static_cast(significand + to_increment); - *carry = false; - if (significand & first_exponent_bit) { - *carry = true; - // The implicit 1-bit will have carried, so we should zero-out the - // top bit and shift back. - significand = static_cast(significand & ~first_exponent_bit); - significand = static_cast(significand >> 1); - } - return significand; - } - - // These exist because MSVC throws warnings on negative right-shifts - // even if they are not going to be executed. Eg: - // constant_number < 0? 0: constant_number - // These convert the negative left-shifts into right shifts. - - template - uint_type negatable_left_shift(int_type N, uint_type val) - { - if(N >= 0) - return val << N; - - return val >> -N; - } - - template - uint_type negatable_right_shift(int_type N, uint_type val) - { - if(N >= 0) - return val >> N; - - return val << -N; - } - - // Returns the significand, rounded to fit in a significand in - // other_T. This is shifted so that the most significant - // bit of the rounded number lines up with the most significant bit - // of the returned significand. - template - typename other_T::uint_type getRoundedNormalizedSignificand( - round_direction dir, bool* carry_bit) { - typedef typename other_T::uint_type other_uint_type; - static const int_type num_throwaway_bits = - static_cast(num_fraction_bits) - - static_cast(other_T::num_fraction_bits); - - static const uint_type last_significant_bit = - (num_throwaway_bits < 0) - ? 0 - : negatable_left_shift(num_throwaway_bits, 1u); - static const uint_type first_rounded_bit = - (num_throwaway_bits < 1) - ? 0 - : negatable_left_shift(num_throwaway_bits - 1, 1u); - - static const uint_type throwaway_mask_bits = - num_throwaway_bits > 0 ? num_throwaway_bits : 0; - static const uint_type throwaway_mask = - spvutils::SetBits::get; - - *carry_bit = false; - other_uint_type out_val = 0; - uint_type significand = getNormalizedSignificand(); - // If we are up-casting, then we just have to shift to the right location. - if (num_throwaway_bits <= 0) { - out_val = static_cast(significand); - uint_type shift_amount = static_cast(-num_throwaway_bits); - out_val = static_cast(out_val << shift_amount); - return out_val; - } - - // If every non-representable bit is 0, then we don't have any casting to - // do. - if ((significand & throwaway_mask) == 0) { - return static_cast( - negatable_right_shift(num_throwaway_bits, significand)); - } - - bool round_away_from_zero = false; - // We actually have to narrow the significand here, so we have to follow the - // rounding rules. - switch (dir) { - case kRoundToZero: - break; - case kRoundToPositiveInfinity: - round_away_from_zero = !isNegative(); - break; - case kRoundToNegativeInfinity: - round_away_from_zero = isNegative(); - break; - case kRoundToNearestEven: - // Have to round down, round bit is 0 - if ((first_rounded_bit & significand) == 0) { - break; - } - if (((significand & throwaway_mask) & ~first_rounded_bit) != 0) { - // If any subsequent bit of the rounded portion is non-0 then we round - // up. - round_away_from_zero = true; - break; - } - // We are exactly half-way between 2 numbers, pick even. - if ((significand & last_significant_bit) != 0) { - // 1 for our last bit, round up. - round_away_from_zero = true; - break; - } - break; - } - - if (round_away_from_zero) { - return static_cast( - negatable_right_shift(num_throwaway_bits, incrementSignificand( - significand, last_significant_bit, carry_bit))); - } else { - return static_cast( - negatable_right_shift(num_throwaway_bits, significand)); - } - } - - // Casts this value to another HexFloat. If the cast is widening, - // then round_dir is ignored. If the cast is narrowing, then - // the result is rounded in the direction specified. - // This number will retain Nan and Inf values. - // It will also saturate to Inf if the number overflows, and - // underflow to (0 or min depending on rounding) if the number underflows. - template - void castTo(other_T& other, round_direction round_dir) { - other = other_T(static_cast(0)); - bool negate = isNegative(); - if (getUnsignedBits() == 0) { - if (negate) { - other.set_value(-other.value()); - } - return; - } - uint_type significand = getSignificandBits(); - bool carried = false; - typename other_T::uint_type rounded_significand = - getRoundedNormalizedSignificand(round_dir, &carried); - - int_type exponent = getUnbiasedExponent(); - if (exponent == min_exponent) { - // If we are denormal, normalize the exponent, so that we can encode - // easily. - exponent = static_cast(exponent + 1); - for (uint_type check_bit = first_exponent_bit >> 1; check_bit != 0; - check_bit = static_cast(check_bit >> 1)) { - exponent = static_cast(exponent - 1); - if (check_bit & significand) break; - } - } - - bool is_nan = - (getBits() & exponent_mask) == exponent_mask && significand != 0; - bool is_inf = - !is_nan && - ((exponent + carried) > static_cast(other_T::exponent_bias) || - (significand == 0 && (getBits() & exponent_mask) == exponent_mask)); - - // If we are Nan or Inf we should pass that through. - if (is_inf) { - other.set_value(BitwiseCast( - static_cast( - (negate ? other_T::sign_mask : 0) | other_T::exponent_mask))); - return; - } - if (is_nan) { - typename other_T::uint_type shifted_significand; - shifted_significand = static_cast( - negatable_left_shift( - static_cast(other_T::num_fraction_bits) - - static_cast(num_fraction_bits), significand)); - - // We are some sort of Nan. We try to keep the bit-pattern of the Nan - // as close as possible. If we had to shift off bits so we are 0, then we - // just set the last bit. - other.set_value(BitwiseCast( - static_cast( - (negate ? other_T::sign_mask : 0) | other_T::exponent_mask | - (shifted_significand == 0 ? 0x1 : shifted_significand)))); - return; - } - - bool round_underflow_up = - isNegative() ? round_dir == kRoundToNegativeInfinity - : round_dir == kRoundToPositiveInfinity; - typedef typename other_T::int_type other_int_type; - // setFromSignUnbiasedExponentAndNormalizedSignificand will - // zero out any underflowing value (but retain the sign). - other.setFromSignUnbiasedExponentAndNormalizedSignificand( - negate, static_cast(exponent), rounded_significand, - round_underflow_up); - return; - } - - private: - T value_; - - static_assert(num_used_bits == - Traits::num_exponent_bits + Traits::num_fraction_bits + 1, - "The number of bits do not fit"); - static_assert(sizeof(T) == sizeof(uint_type), "The type sizes do not match"); -}; - -// Returns 4 bits represented by the hex character. -inline uint8_t get_nibble_from_character(int character) { - const char* dec = "0123456789"; - const char* lower = "abcdef"; - const char* upper = "ABCDEF"; - const char* p = nullptr; - if ((p = strchr(dec, character))) { - return static_cast(p - dec); - } else if ((p = strchr(lower, character))) { - return static_cast(p - lower + 0xa); - } else if ((p = strchr(upper, character))) { - return static_cast(p - upper + 0xa); - } - - assert(false && "This was called with a non-hex character"); - return 0; -} - -// Outputs the given HexFloat to the stream. -template -std::ostream& operator<<(std::ostream& os, const HexFloat& value) { - typedef HexFloat HF; - typedef typename HF::uint_type uint_type; - typedef typename HF::int_type int_type; - - static_assert(HF::num_used_bits != 0, - "num_used_bits must be non-zero for a valid float"); - static_assert(HF::num_exponent_bits != 0, - "num_exponent_bits must be non-zero for a valid float"); - static_assert(HF::num_fraction_bits != 0, - "num_fractin_bits must be non-zero for a valid float"); - - const uint_type bits = spvutils::BitwiseCast(value.value()); - const char* const sign = (bits & HF::sign_mask) ? "-" : ""; - const uint_type exponent = static_cast( - (bits & HF::exponent_mask) >> HF::num_fraction_bits); - - uint_type fraction = static_cast((bits & HF::fraction_encode_mask) - << HF::num_overflow_bits); - - const bool is_zero = exponent == 0 && fraction == 0; - const bool is_denorm = exponent == 0 && !is_zero; - - // exponent contains the biased exponent we have to convert it back into - // the normal range. - int_type int_exponent = static_cast(exponent - HF::exponent_bias); - // If the number is all zeros, then we actually have to NOT shift the - // exponent. - int_exponent = is_zero ? 0 : int_exponent; - - // If we are denorm, then start shifting, and decreasing the exponent until - // our leading bit is 1. - - if (is_denorm) { - while ((fraction & HF::fraction_top_bit) == 0) { - fraction = static_cast(fraction << 1); - int_exponent = static_cast(int_exponent - 1); - } - // Since this is denormalized, we have to consume the leading 1 since it - // will end up being implicit. - fraction = static_cast(fraction << 1); // eat the leading 1 - fraction &= HF::fraction_represent_mask; - } - - uint_type fraction_nibbles = HF::fraction_nibbles; - // We do not have to display any trailing 0s, since this represents the - // fractional part. - while (fraction_nibbles > 0 && (fraction & 0xF) == 0) { - // Shift off any trailing values; - fraction = static_cast(fraction >> 4); - --fraction_nibbles; - } - - const auto saved_flags = os.flags(); - const auto saved_fill = os.fill(); - - os << sign << "0x" << (is_zero ? '0' : '1'); - if (fraction_nibbles) { - // Make sure to keep the leading 0s in place, since this is the fractional - // part. - os << "." << std::setw(static_cast(fraction_nibbles)) - << std::setfill('0') << std::hex << fraction; - } - os << "p" << std::dec << (int_exponent >= 0 ? "+" : "") << int_exponent; - - os.flags(saved_flags); - os.fill(saved_fill); - - return os; -} - -// Returns true if negate_value is true and the next character on the -// input stream is a plus or minus sign. In that case we also set the fail bit -// on the stream and set the value to the zero value for its type. -template -inline bool RejectParseDueToLeadingSign(std::istream& is, bool negate_value, - HexFloat& value) { - if (negate_value) { - auto next_char = is.peek(); - if (next_char == '-' || next_char == '+') { - // Fail the parse. Emulate standard behaviour by setting the value to - // the zero value, and set the fail bit on the stream. - value = HexFloat(typename HexFloat::uint_type(0)); - is.setstate(std::ios_base::failbit); - return true; - } - } - return false; -} - -// Parses a floating point number from the given stream and stores it into the -// value parameter. -// If negate_value is true then the number may not have a leading minus or -// plus, and if it successfully parses, then the number is negated before -// being stored into the value parameter. -// If the value cannot be correctly parsed or overflows the target floating -// point type, then set the fail bit on the stream. -// TODO(dneto): Promise C++11 standard behavior in how the value is set in -// the error case, but only after all target platforms implement it correctly. -// In particular, the Microsoft C++ runtime appears to be out of spec. -template -inline std::istream& ParseNormalFloat(std::istream& is, bool negate_value, - HexFloat& value) { - if (RejectParseDueToLeadingSign(is, negate_value, value)) { - return is; - } - T val; - is >> val; - if (negate_value) { - val = -val; - } - value.set_value(val); - // In the failure case, map -0.0 to 0.0. - if (is.fail() && value.getUnsignedBits() == 0u) { - value = HexFloat(typename HexFloat::uint_type(0)); - } - if (val.isInfinity()) { - // Fail the parse. Emulate standard behaviour by setting the value to - // the closest normal value, and set the fail bit on the stream. - value.set_value((value.isNegative() | negate_value) ? T::lowest() - : T::max()); - is.setstate(std::ios_base::failbit); - } - return is; -} - -// Specialization of ParseNormalFloat for FloatProxy values. -// This will parse the float as it were a 32-bit floating point number, -// and then round it down to fit into a Float16 value. -// The number is rounded towards zero. -// If negate_value is true then the number may not have a leading minus or -// plus, and if it successfully parses, then the number is negated before -// being stored into the value parameter. -// If the value cannot be correctly parsed or overflows the target floating -// point type, then set the fail bit on the stream. -// TODO(dneto): Promise C++11 standard behavior in how the value is set in -// the error case, but only after all target platforms implement it correctly. -// In particular, the Microsoft C++ runtime appears to be out of spec. -template <> -inline std::istream& -ParseNormalFloat, HexFloatTraits>>( - std::istream& is, bool negate_value, - HexFloat, HexFloatTraits>>& value) { - // First parse as a 32-bit float. - HexFloat> float_val(0.0f); - ParseNormalFloat(is, negate_value, float_val); - - // Then convert to 16-bit float, saturating at infinities, and - // rounding toward zero. - float_val.castTo(value, kRoundToZero); - - // Overflow on 16-bit behaves the same as for 32- and 64-bit: set the - // fail bit and set the lowest or highest value. - if (Float16::isInfinity(value.value().getAsFloat())) { - value.set_value(value.isNegative() ? Float16::lowest() : Float16::max()); - is.setstate(std::ios_base::failbit); - } - return is; -} - -// Reads a HexFloat from the given stream. -// If the float is not encoded as a hex-float then it will be parsed -// as a regular float. -// This may fail if your stream does not support at least one unget. -// Nan values can be encoded with "0x1.p+exponent_bias". -// This would normally overflow a float and round to -// infinity but this special pattern is the exact representation for a NaN, -// and therefore is actually encoded as the correct NaN. To encode inf, -// either 0x0p+exponent_bias can be specified or any exponent greater than -// exponent_bias. -// Examples using IEEE 32-bit float encoding. -// 0x1.0p+128 (+inf) -// -0x1.0p-128 (-inf) -// -// 0x1.1p+128 (+Nan) -// -0x1.1p+128 (-Nan) -// -// 0x1p+129 (+inf) -// -0x1p+129 (-inf) -template -std::istream& operator>>(std::istream& is, HexFloat& value) { - using HF = HexFloat; - using uint_type = typename HF::uint_type; - using int_type = typename HF::int_type; - - value.set_value(static_cast(0.f)); - - if (is.flags() & std::ios::skipws) { - // If the user wants to skip whitespace , then we should obey that. - while (std::isspace(is.peek())) { - is.get(); - } - } - - auto next_char = is.peek(); - bool negate_value = false; - - if (next_char != '-' && next_char != '0') { - return ParseNormalFloat(is, negate_value, value); - } - - if (next_char == '-') { - negate_value = true; - is.get(); - next_char = is.peek(); - } - - if (next_char == '0') { - is.get(); // We may have to unget this. - auto maybe_hex_start = is.peek(); - if (maybe_hex_start != 'x' && maybe_hex_start != 'X') { - is.unget(); - return ParseNormalFloat(is, negate_value, value); - } else { - is.get(); // Throw away the 'x'; - } - } else { - return ParseNormalFloat(is, negate_value, value); - } - - // This "looks" like a hex-float so treat it as one. - bool seen_p = false; - bool seen_dot = false; - uint_type fraction_index = 0; - - uint_type fraction = 0; - int_type exponent = HF::exponent_bias; - - // Strip off leading zeros so we don't have to special-case them later. - while ((next_char = is.peek()) == '0') { - is.get(); - } - - bool is_denorm = - true; // Assume denorm "representation" until we hear otherwise. - // NB: This does not mean the value is actually denorm, - // it just means that it was written 0. - bool bits_written = false; // Stays false until we write a bit. - while (!seen_p && !seen_dot) { - // Handle characters that are left of the fractional part. - if (next_char == '.') { - seen_dot = true; - } else if (next_char == 'p') { - seen_p = true; - } else if (::isxdigit(next_char)) { - // We know this is not denormalized since we have stripped all leading - // zeroes and we are not a ".". - is_denorm = false; - int number = get_nibble_from_character(next_char); - for (int i = 0; i < 4; ++i, number <<= 1) { - uint_type write_bit = (number & 0x8) ? 0x1 : 0x0; - if (bits_written) { - // If we are here the bits represented belong in the fractional - // part of the float, and we have to adjust the exponent accordingly. - fraction = static_cast( - fraction | - static_cast( - write_bit << (HF::top_bit_left_shift - fraction_index++))); - exponent = static_cast(exponent + 1); - } - bits_written |= write_bit != 0; - } - } else { - // We have not found our exponent yet, so we have to fail. - is.setstate(std::ios::failbit); - return is; - } - is.get(); - next_char = is.peek(); - } - bits_written = false; - while (seen_dot && !seen_p) { - // Handle only fractional parts now. - if (next_char == 'p') { - seen_p = true; - } else if (::isxdigit(next_char)) { - int number = get_nibble_from_character(next_char); - for (int i = 0; i < 4; ++i, number <<= 1) { - uint_type write_bit = (number & 0x8) ? 0x01 : 0x00; - bits_written |= write_bit != 0; - if (is_denorm && !bits_written) { - // Handle modifying the exponent here this way we can handle - // an arbitrary number of hex values without overflowing our - // integer. - exponent = static_cast(exponent - 1); - } else { - fraction = static_cast( - fraction | - static_cast( - write_bit << (HF::top_bit_left_shift - fraction_index++))); - } - } - } else { - // We still have not found our 'p' exponent yet, so this is not a valid - // hex-float. - is.setstate(std::ios::failbit); - return is; - } - is.get(); - next_char = is.peek(); - } - - bool seen_sign = false; - int8_t exponent_sign = 1; - int_type written_exponent = 0; - while (true) { - if ((next_char == '-' || next_char == '+')) { - if (seen_sign) { - is.setstate(std::ios::failbit); - return is; - } - seen_sign = true; - exponent_sign = (next_char == '-') ? -1 : 1; - } else if (::isdigit(next_char)) { - // Hex-floats express their exponent as decimal. - written_exponent = static_cast(written_exponent * 10); - written_exponent = - static_cast(written_exponent + (next_char - '0')); - } else { - break; - } - is.get(); - next_char = is.peek(); - } - - written_exponent = static_cast(written_exponent * exponent_sign); - exponent = static_cast(exponent + written_exponent); - - bool is_zero = is_denorm && (fraction == 0); - if (is_denorm && !is_zero) { - fraction = static_cast(fraction << 1); - exponent = static_cast(exponent - 1); - } else if (is_zero) { - exponent = 0; - } - - if (exponent <= 0 && !is_zero) { - fraction = static_cast(fraction >> 1); - fraction |= static_cast(1) << HF::top_bit_left_shift; - } - - fraction = (fraction >> HF::fraction_right_shift) & HF::fraction_encode_mask; - - const int_type max_exponent = - SetBits::get; - - // Handle actual denorm numbers - while (exponent < 0 && !is_zero) { - fraction = static_cast(fraction >> 1); - exponent = static_cast(exponent + 1); - - fraction &= HF::fraction_encode_mask; - if (fraction == 0) { - // We have underflowed our fraction. We should clamp to zero. - is_zero = true; - exponent = 0; - } - } - - // We have overflowed so we should be inf/-inf. - if (exponent > max_exponent) { - exponent = max_exponent; - fraction = 0; - } - - uint_type output_bits = static_cast( - static_cast(negate_value ? 1 : 0) << HF::top_bit_left_shift); - output_bits |= fraction; - - uint_type shifted_exponent = static_cast( - static_cast(exponent << HF::exponent_left_shift) & - HF::exponent_mask); - output_bits |= shifted_exponent; - - T output_float = spvutils::BitwiseCast(output_bits); - value.set_value(output_float); - - return is; -} - -// Writes a FloatProxy value to a stream. -// Zero and normal numbers are printed in the usual notation, but with -// enough digits to fully reproduce the value. Other values (subnormal, -// NaN, and infinity) are printed as a hex float. -template -std::ostream& operator<<(std::ostream& os, const FloatProxy& value) { - auto float_val = value.getAsFloat(); - switch (std::fpclassify(float_val)) { - case FP_ZERO: - case FP_NORMAL: { - auto saved_precision = os.precision(); - os.precision(std::numeric_limits::digits10); - os << float_val; - os.precision(saved_precision); - } break; - default: - os << HexFloat>(value); - break; - } - return os; -} - -template <> -inline std::ostream& operator<<(std::ostream& os, - const FloatProxy& value) { - os << HexFloat>(value); - return os; -} -} - -#endif // LIBSPIRV_UTIL_HEX_FLOAT_H_ diff --git a/deps/glslang/SPIRV/spirv.hpp b/deps/glslang/SPIRV/spirv.hpp deleted file mode 100644 index 8bddf7e5..00000000 --- a/deps/glslang/SPIRV/spirv.hpp +++ /dev/null @@ -1,981 +0,0 @@ -// Copyright (c) 2014-2017 The Khronos Group Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and/or associated documentation files (the "Materials"), -// to deal in the Materials without restriction, including without limitation -// the rights to use, copy, modify, merge, publish, distribute, sublicense, -// and/or sell copies of the Materials, and to permit persons to whom the -// Materials are furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Materials. -// -// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS -// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ -// -// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS -// IN THE MATERIALS. - -// This header is automatically generated by the same tool that creates -// the Binary Section of the SPIR-V specification. - -// Enumeration tokens for SPIR-V, in various styles: -// C, C++, C++11, JSON, Lua, Python -// -// - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL -// - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL -// - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL -// - Lua will use tables, e.g.: spv.SourceLanguage.GLSL -// - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] -// -// Some tokens act like mask values, which can be OR'd together, -// while others are mutually exclusive. The mask-like ones have -// "Mask" in their name, and a parallel enum that has the shift -// amount (1 << x) for each corresponding enumerant. - -#ifndef spirv_HPP -#define spirv_HPP - -namespace spv { - -typedef unsigned int Id; - -#define SPV_VERSION 0x10000 -#define SPV_REVISION 12 - -static const unsigned int MagicNumber = 0x07230203; -static const unsigned int Version = 0x00010000; -static const unsigned int Revision = 12; -static const unsigned int OpCodeMask = 0xffff; -static const unsigned int WordCountShift = 16; - -enum SourceLanguage { - SourceLanguageUnknown = 0, - SourceLanguageESSL = 1, - SourceLanguageGLSL = 2, - SourceLanguageOpenCL_C = 3, - SourceLanguageOpenCL_CPP = 4, - SourceLanguageHLSL = 5, - SourceLanguageMax = 0x7fffffff, -}; - -enum ExecutionModel { - ExecutionModelVertex = 0, - ExecutionModelTessellationControl = 1, - ExecutionModelTessellationEvaluation = 2, - ExecutionModelGeometry = 3, - ExecutionModelFragment = 4, - ExecutionModelGLCompute = 5, - ExecutionModelKernel = 6, - ExecutionModelMax = 0x7fffffff, -}; - -enum AddressingModel { - AddressingModelLogical = 0, - AddressingModelPhysical32 = 1, - AddressingModelPhysical64 = 2, - AddressingModelMax = 0x7fffffff, -}; - -enum MemoryModel { - MemoryModelSimple = 0, - MemoryModelGLSL450 = 1, - MemoryModelOpenCL = 2, - MemoryModelMax = 0x7fffffff, -}; - -enum ExecutionMode { - ExecutionModeInvocations = 0, - ExecutionModeSpacingEqual = 1, - ExecutionModeSpacingFractionalEven = 2, - ExecutionModeSpacingFractionalOdd = 3, - ExecutionModeVertexOrderCw = 4, - ExecutionModeVertexOrderCcw = 5, - ExecutionModePixelCenterInteger = 6, - ExecutionModeOriginUpperLeft = 7, - ExecutionModeOriginLowerLeft = 8, - ExecutionModeEarlyFragmentTests = 9, - ExecutionModePointMode = 10, - ExecutionModeXfb = 11, - ExecutionModeDepthReplacing = 12, - ExecutionModeDepthGreater = 14, - ExecutionModeDepthLess = 15, - ExecutionModeDepthUnchanged = 16, - ExecutionModeLocalSize = 17, - ExecutionModeLocalSizeHint = 18, - ExecutionModeInputPoints = 19, - ExecutionModeInputLines = 20, - ExecutionModeInputLinesAdjacency = 21, - ExecutionModeTriangles = 22, - ExecutionModeInputTrianglesAdjacency = 23, - ExecutionModeQuads = 24, - ExecutionModeIsolines = 25, - ExecutionModeOutputVertices = 26, - ExecutionModeOutputPoints = 27, - ExecutionModeOutputLineStrip = 28, - ExecutionModeOutputTriangleStrip = 29, - ExecutionModeVecTypeHint = 30, - ExecutionModeContractionOff = 31, - ExecutionModePostDepthCoverage = 4446, - ExecutionModeMax = 0x7fffffff, -}; - -enum StorageClass { - StorageClassUniformConstant = 0, - StorageClassInput = 1, - StorageClassUniform = 2, - StorageClassOutput = 3, - StorageClassWorkgroup = 4, - StorageClassCrossWorkgroup = 5, - StorageClassPrivate = 6, - StorageClassFunction = 7, - StorageClassGeneric = 8, - StorageClassPushConstant = 9, - StorageClassAtomicCounter = 10, - StorageClassImage = 11, - StorageClassStorageBuffer = 12, - StorageClassMax = 0x7fffffff, -}; - -enum Dim { - Dim1D = 0, - Dim2D = 1, - Dim3D = 2, - DimCube = 3, - DimRect = 4, - DimBuffer = 5, - DimSubpassData = 6, - DimMax = 0x7fffffff, -}; - -enum SamplerAddressingMode { - SamplerAddressingModeNone = 0, - SamplerAddressingModeClampToEdge = 1, - SamplerAddressingModeClamp = 2, - SamplerAddressingModeRepeat = 3, - SamplerAddressingModeRepeatMirrored = 4, - SamplerAddressingModeMax = 0x7fffffff, -}; - -enum SamplerFilterMode { - SamplerFilterModeNearest = 0, - SamplerFilterModeLinear = 1, - SamplerFilterModeMax = 0x7fffffff, -}; - -enum ImageFormat { - ImageFormatUnknown = 0, - ImageFormatRgba32f = 1, - ImageFormatRgba16f = 2, - ImageFormatR32f = 3, - ImageFormatRgba8 = 4, - ImageFormatRgba8Snorm = 5, - ImageFormatRg32f = 6, - ImageFormatRg16f = 7, - ImageFormatR11fG11fB10f = 8, - ImageFormatR16f = 9, - ImageFormatRgba16 = 10, - ImageFormatRgb10A2 = 11, - ImageFormatRg16 = 12, - ImageFormatRg8 = 13, - ImageFormatR16 = 14, - ImageFormatR8 = 15, - ImageFormatRgba16Snorm = 16, - ImageFormatRg16Snorm = 17, - ImageFormatRg8Snorm = 18, - ImageFormatR16Snorm = 19, - ImageFormatR8Snorm = 20, - ImageFormatRgba32i = 21, - ImageFormatRgba16i = 22, - ImageFormatRgba8i = 23, - ImageFormatR32i = 24, - ImageFormatRg32i = 25, - ImageFormatRg16i = 26, - ImageFormatRg8i = 27, - ImageFormatR16i = 28, - ImageFormatR8i = 29, - ImageFormatRgba32ui = 30, - ImageFormatRgba16ui = 31, - ImageFormatRgba8ui = 32, - ImageFormatR32ui = 33, - ImageFormatRgb10a2ui = 34, - ImageFormatRg32ui = 35, - ImageFormatRg16ui = 36, - ImageFormatRg8ui = 37, - ImageFormatR16ui = 38, - ImageFormatR8ui = 39, - ImageFormatMax = 0x7fffffff, -}; - -enum ImageChannelOrder { - ImageChannelOrderR = 0, - ImageChannelOrderA = 1, - ImageChannelOrderRG = 2, - ImageChannelOrderRA = 3, - ImageChannelOrderRGB = 4, - ImageChannelOrderRGBA = 5, - ImageChannelOrderBGRA = 6, - ImageChannelOrderARGB = 7, - ImageChannelOrderIntensity = 8, - ImageChannelOrderLuminance = 9, - ImageChannelOrderRx = 10, - ImageChannelOrderRGx = 11, - ImageChannelOrderRGBx = 12, - ImageChannelOrderDepth = 13, - ImageChannelOrderDepthStencil = 14, - ImageChannelOrdersRGB = 15, - ImageChannelOrdersRGBx = 16, - ImageChannelOrdersRGBA = 17, - ImageChannelOrdersBGRA = 18, - ImageChannelOrderABGR = 19, - ImageChannelOrderMax = 0x7fffffff, -}; - -enum ImageChannelDataType { - ImageChannelDataTypeSnormInt8 = 0, - ImageChannelDataTypeSnormInt16 = 1, - ImageChannelDataTypeUnormInt8 = 2, - ImageChannelDataTypeUnormInt16 = 3, - ImageChannelDataTypeUnormShort565 = 4, - ImageChannelDataTypeUnormShort555 = 5, - ImageChannelDataTypeUnormInt101010 = 6, - ImageChannelDataTypeSignedInt8 = 7, - ImageChannelDataTypeSignedInt16 = 8, - ImageChannelDataTypeSignedInt32 = 9, - ImageChannelDataTypeUnsignedInt8 = 10, - ImageChannelDataTypeUnsignedInt16 = 11, - ImageChannelDataTypeUnsignedInt32 = 12, - ImageChannelDataTypeHalfFloat = 13, - ImageChannelDataTypeFloat = 14, - ImageChannelDataTypeUnormInt24 = 15, - ImageChannelDataTypeUnormInt101010_2 = 16, - ImageChannelDataTypeMax = 0x7fffffff, -}; - -enum ImageOperandsShift { - ImageOperandsBiasShift = 0, - ImageOperandsLodShift = 1, - ImageOperandsGradShift = 2, - ImageOperandsConstOffsetShift = 3, - ImageOperandsOffsetShift = 4, - ImageOperandsConstOffsetsShift = 5, - ImageOperandsSampleShift = 6, - ImageOperandsMinLodShift = 7, - ImageOperandsMax = 0x7fffffff, -}; - -enum ImageOperandsMask { - ImageOperandsMaskNone = 0, - ImageOperandsBiasMask = 0x00000001, - ImageOperandsLodMask = 0x00000002, - ImageOperandsGradMask = 0x00000004, - ImageOperandsConstOffsetMask = 0x00000008, - ImageOperandsOffsetMask = 0x00000010, - ImageOperandsConstOffsetsMask = 0x00000020, - ImageOperandsSampleMask = 0x00000040, - ImageOperandsMinLodMask = 0x00000080, -}; - -enum FPFastMathModeShift { - FPFastMathModeNotNaNShift = 0, - FPFastMathModeNotInfShift = 1, - FPFastMathModeNSZShift = 2, - FPFastMathModeAllowRecipShift = 3, - FPFastMathModeFastShift = 4, - FPFastMathModeMax = 0x7fffffff, -}; - -enum FPFastMathModeMask { - FPFastMathModeMaskNone = 0, - FPFastMathModeNotNaNMask = 0x00000001, - FPFastMathModeNotInfMask = 0x00000002, - FPFastMathModeNSZMask = 0x00000004, - FPFastMathModeAllowRecipMask = 0x00000008, - FPFastMathModeFastMask = 0x00000010, -}; - -enum FPRoundingMode { - FPRoundingModeRTE = 0, - FPRoundingModeRTZ = 1, - FPRoundingModeRTP = 2, - FPRoundingModeRTN = 3, - FPRoundingModeMax = 0x7fffffff, -}; - -enum LinkageType { - LinkageTypeExport = 0, - LinkageTypeImport = 1, - LinkageTypeMax = 0x7fffffff, -}; - -enum AccessQualifier { - AccessQualifierReadOnly = 0, - AccessQualifierWriteOnly = 1, - AccessQualifierReadWrite = 2, - AccessQualifierMax = 0x7fffffff, -}; - -enum FunctionParameterAttribute { - FunctionParameterAttributeZext = 0, - FunctionParameterAttributeSext = 1, - FunctionParameterAttributeByVal = 2, - FunctionParameterAttributeSret = 3, - FunctionParameterAttributeNoAlias = 4, - FunctionParameterAttributeNoCapture = 5, - FunctionParameterAttributeNoWrite = 6, - FunctionParameterAttributeNoReadWrite = 7, - FunctionParameterAttributeMax = 0x7fffffff, -}; - -enum Decoration { - DecorationRelaxedPrecision = 0, - DecorationSpecId = 1, - DecorationBlock = 2, - DecorationBufferBlock = 3, - DecorationRowMajor = 4, - DecorationColMajor = 5, - DecorationArrayStride = 6, - DecorationMatrixStride = 7, - DecorationGLSLShared = 8, - DecorationGLSLPacked = 9, - DecorationCPacked = 10, - DecorationBuiltIn = 11, - DecorationNoPerspective = 13, - DecorationFlat = 14, - DecorationPatch = 15, - DecorationCentroid = 16, - DecorationSample = 17, - DecorationInvariant = 18, - DecorationRestrict = 19, - DecorationAliased = 20, - DecorationVolatile = 21, - DecorationConstant = 22, - DecorationCoherent = 23, - DecorationNonWritable = 24, - DecorationNonReadable = 25, - DecorationUniform = 26, - DecorationSaturatedConversion = 28, - DecorationStream = 29, - DecorationLocation = 30, - DecorationComponent = 31, - DecorationIndex = 32, - DecorationBinding = 33, - DecorationDescriptorSet = 34, - DecorationOffset = 35, - DecorationXfbBuffer = 36, - DecorationXfbStride = 37, - DecorationFuncParamAttr = 38, - DecorationFPRoundingMode = 39, - DecorationFPFastMathMode = 40, - DecorationLinkageAttributes = 41, - DecorationNoContraction = 42, - DecorationInputAttachmentIndex = 43, - DecorationAlignment = 44, - DecorationExplicitInterpAMD = 4999, - DecorationOverrideCoverageNV = 5248, - DecorationPassthroughNV = 5250, - DecorationViewportRelativeNV = 5252, - DecorationSecondaryViewportRelativeNV = 5256, - DecorationMax = 0x7fffffff, -}; - -enum BuiltIn { - BuiltInPosition = 0, - BuiltInPointSize = 1, - BuiltInClipDistance = 3, - BuiltInCullDistance = 4, - BuiltInVertexId = 5, - BuiltInInstanceId = 6, - BuiltInPrimitiveId = 7, - BuiltInInvocationId = 8, - BuiltInLayer = 9, - BuiltInViewportIndex = 10, - BuiltInTessLevelOuter = 11, - BuiltInTessLevelInner = 12, - BuiltInTessCoord = 13, - BuiltInPatchVertices = 14, - BuiltInFragCoord = 15, - BuiltInPointCoord = 16, - BuiltInFrontFacing = 17, - BuiltInSampleId = 18, - BuiltInSamplePosition = 19, - BuiltInSampleMask = 20, - BuiltInFragDepth = 22, - BuiltInHelperInvocation = 23, - BuiltInNumWorkgroups = 24, - BuiltInWorkgroupSize = 25, - BuiltInWorkgroupId = 26, - BuiltInLocalInvocationId = 27, - BuiltInGlobalInvocationId = 28, - BuiltInLocalInvocationIndex = 29, - BuiltInWorkDim = 30, - BuiltInGlobalSize = 31, - BuiltInEnqueuedWorkgroupSize = 32, - BuiltInGlobalOffset = 33, - BuiltInGlobalLinearId = 34, - BuiltInSubgroupSize = 36, - BuiltInSubgroupMaxSize = 37, - BuiltInNumSubgroups = 38, - BuiltInNumEnqueuedSubgroups = 39, - BuiltInSubgroupId = 40, - BuiltInSubgroupLocalInvocationId = 41, - BuiltInVertexIndex = 42, - BuiltInInstanceIndex = 43, - BuiltInSubgroupEqMaskKHR = 4416, - BuiltInSubgroupGeMaskKHR = 4417, - BuiltInSubgroupGtMaskKHR = 4418, - BuiltInSubgroupLeMaskKHR = 4419, - BuiltInSubgroupLtMaskKHR = 4420, - BuiltInBaseVertex = 4424, - BuiltInBaseInstance = 4425, - BuiltInDrawIndex = 4426, - BuiltInDeviceIndex = 4438, - BuiltInViewIndex = 4440, - BuiltInBaryCoordNoPerspAMD = 4992, - BuiltInBaryCoordNoPerspCentroidAMD = 4993, - BuiltInBaryCoordNoPerspSampleAMD = 4994, - BuiltInBaryCoordSmoothAMD = 4995, - BuiltInBaryCoordSmoothCentroidAMD = 4996, - BuiltInBaryCoordSmoothSampleAMD = 4997, - BuiltInBaryCoordPullModelAMD = 4998, - BuiltInFragStencilRefEXT = 5014, - BuiltInViewportMaskNV = 5253, - BuiltInSecondaryPositionNV = 5257, - BuiltInSecondaryViewportMaskNV = 5258, - BuiltInPositionPerViewNV = 5261, - BuiltInViewportMaskPerViewNV = 5262, - BuiltInMax = 0x7fffffff, -}; - -enum SelectionControlShift { - SelectionControlFlattenShift = 0, - SelectionControlDontFlattenShift = 1, - SelectionControlMax = 0x7fffffff, -}; - -enum SelectionControlMask { - SelectionControlMaskNone = 0, - SelectionControlFlattenMask = 0x00000001, - SelectionControlDontFlattenMask = 0x00000002, -}; - -enum LoopControlShift { - LoopControlUnrollShift = 0, - LoopControlDontUnrollShift = 1, - LoopControlMax = 0x7fffffff, -}; - -enum LoopControlMask { - LoopControlMaskNone = 0, - LoopControlUnrollMask = 0x00000001, - LoopControlDontUnrollMask = 0x00000002, -}; - -enum FunctionControlShift { - FunctionControlInlineShift = 0, - FunctionControlDontInlineShift = 1, - FunctionControlPureShift = 2, - FunctionControlConstShift = 3, - FunctionControlMax = 0x7fffffff, -}; - -enum FunctionControlMask { - FunctionControlMaskNone = 0, - FunctionControlInlineMask = 0x00000001, - FunctionControlDontInlineMask = 0x00000002, - FunctionControlPureMask = 0x00000004, - FunctionControlConstMask = 0x00000008, -}; - -enum MemorySemanticsShift { - MemorySemanticsAcquireShift = 1, - MemorySemanticsReleaseShift = 2, - MemorySemanticsAcquireReleaseShift = 3, - MemorySemanticsSequentiallyConsistentShift = 4, - MemorySemanticsUniformMemoryShift = 6, - MemorySemanticsSubgroupMemoryShift = 7, - MemorySemanticsWorkgroupMemoryShift = 8, - MemorySemanticsCrossWorkgroupMemoryShift = 9, - MemorySemanticsAtomicCounterMemoryShift = 10, - MemorySemanticsImageMemoryShift = 11, - MemorySemanticsMax = 0x7fffffff, -}; - -enum MemorySemanticsMask { - MemorySemanticsMaskNone = 0, - MemorySemanticsAcquireMask = 0x00000002, - MemorySemanticsReleaseMask = 0x00000004, - MemorySemanticsAcquireReleaseMask = 0x00000008, - MemorySemanticsSequentiallyConsistentMask = 0x00000010, - MemorySemanticsUniformMemoryMask = 0x00000040, - MemorySemanticsSubgroupMemoryMask = 0x00000080, - MemorySemanticsWorkgroupMemoryMask = 0x00000100, - MemorySemanticsCrossWorkgroupMemoryMask = 0x00000200, - MemorySemanticsAtomicCounterMemoryMask = 0x00000400, - MemorySemanticsImageMemoryMask = 0x00000800, -}; - -enum MemoryAccessShift { - MemoryAccessVolatileShift = 0, - MemoryAccessAlignedShift = 1, - MemoryAccessNontemporalShift = 2, - MemoryAccessMax = 0x7fffffff, -}; - -enum MemoryAccessMask { - MemoryAccessMaskNone = 0, - MemoryAccessVolatileMask = 0x00000001, - MemoryAccessAlignedMask = 0x00000002, - MemoryAccessNontemporalMask = 0x00000004, -}; - -enum Scope { - ScopeCrossDevice = 0, - ScopeDevice = 1, - ScopeWorkgroup = 2, - ScopeSubgroup = 3, - ScopeInvocation = 4, - ScopeMax = 0x7fffffff, -}; - -enum GroupOperation { - GroupOperationReduce = 0, - GroupOperationInclusiveScan = 1, - GroupOperationExclusiveScan = 2, - GroupOperationMax = 0x7fffffff, -}; - -enum KernelEnqueueFlags { - KernelEnqueueFlagsNoWait = 0, - KernelEnqueueFlagsWaitKernel = 1, - KernelEnqueueFlagsWaitWorkGroup = 2, - KernelEnqueueFlagsMax = 0x7fffffff, -}; - -enum KernelProfilingInfoShift { - KernelProfilingInfoCmdExecTimeShift = 0, - KernelProfilingInfoMax = 0x7fffffff, -}; - -enum KernelProfilingInfoMask { - KernelProfilingInfoMaskNone = 0, - KernelProfilingInfoCmdExecTimeMask = 0x00000001, -}; - -enum Capability { - CapabilityMatrix = 0, - CapabilityShader = 1, - CapabilityGeometry = 2, - CapabilityTessellation = 3, - CapabilityAddresses = 4, - CapabilityLinkage = 5, - CapabilityKernel = 6, - CapabilityVector16 = 7, - CapabilityFloat16Buffer = 8, - CapabilityFloat16 = 9, - CapabilityFloat64 = 10, - CapabilityInt64 = 11, - CapabilityInt64Atomics = 12, - CapabilityImageBasic = 13, - CapabilityImageReadWrite = 14, - CapabilityImageMipmap = 15, - CapabilityPipes = 17, - CapabilityGroups = 18, - CapabilityDeviceEnqueue = 19, - CapabilityLiteralSampler = 20, - CapabilityAtomicStorage = 21, - CapabilityInt16 = 22, - CapabilityTessellationPointSize = 23, - CapabilityGeometryPointSize = 24, - CapabilityImageGatherExtended = 25, - CapabilityStorageImageMultisample = 27, - CapabilityUniformBufferArrayDynamicIndexing = 28, - CapabilitySampledImageArrayDynamicIndexing = 29, - CapabilityStorageBufferArrayDynamicIndexing = 30, - CapabilityStorageImageArrayDynamicIndexing = 31, - CapabilityClipDistance = 32, - CapabilityCullDistance = 33, - CapabilityImageCubeArray = 34, - CapabilitySampleRateShading = 35, - CapabilityImageRect = 36, - CapabilitySampledRect = 37, - CapabilityGenericPointer = 38, - CapabilityInt8 = 39, - CapabilityInputAttachment = 40, - CapabilitySparseResidency = 41, - CapabilityMinLod = 42, - CapabilitySampled1D = 43, - CapabilityImage1D = 44, - CapabilitySampledCubeArray = 45, - CapabilitySampledBuffer = 46, - CapabilityImageBuffer = 47, - CapabilityImageMSArray = 48, - CapabilityStorageImageExtendedFormats = 49, - CapabilityImageQuery = 50, - CapabilityDerivativeControl = 51, - CapabilityInterpolationFunction = 52, - CapabilityTransformFeedback = 53, - CapabilityGeometryStreams = 54, - CapabilityStorageImageReadWithoutFormat = 55, - CapabilityStorageImageWriteWithoutFormat = 56, - CapabilityMultiViewport = 57, - CapabilitySubgroupBallotKHR = 4423, - CapabilityDrawParameters = 4427, - CapabilitySubgroupVoteKHR = 4431, - CapabilityStorageBuffer16BitAccess = 4433, - CapabilityStorageUniformBufferBlock16 = 4433, - CapabilityStorageUniform16 = 4434, - CapabilityUniformAndStorageBuffer16BitAccess = 4434, - CapabilityStoragePushConstant16 = 4435, - CapabilityStorageInputOutput16 = 4436, - CapabilityDeviceGroup = 4437, - CapabilityMultiView = 4439, - CapabilityVariablePointersStorageBuffer = 4441, - CapabilityVariablePointers = 4442, - CapabilityAtomicStorageOps = 4445, - CapabilitySampleMaskPostDepthCoverage = 4447, - CapabilityImageGatherBiasLodAMD = 5009, - CapabilityStencilExportEXT = 5013, - CapabilitySampleMaskOverrideCoverageNV = 5249, - CapabilityGeometryShaderPassthroughNV = 5251, - CapabilityShaderViewportIndexLayerEXT = 5254, - CapabilityShaderViewportIndexLayerNV = 5254, - CapabilityShaderViewportMaskNV = 5255, - CapabilityShaderStereoViewNV = 5259, - CapabilityPerViewAttributesNV = 5260, - CapabilityMax = 0x7fffffff, -}; - -enum Op { - OpNop = 0, - OpUndef = 1, - OpSourceContinued = 2, - OpSource = 3, - OpSourceExtension = 4, - OpName = 5, - OpMemberName = 6, - OpString = 7, - OpLine = 8, - OpExtension = 10, - OpExtInstImport = 11, - OpExtInst = 12, - OpMemoryModel = 14, - OpEntryPoint = 15, - OpExecutionMode = 16, - OpCapability = 17, - OpTypeVoid = 19, - OpTypeBool = 20, - OpTypeInt = 21, - OpTypeFloat = 22, - OpTypeVector = 23, - OpTypeMatrix = 24, - OpTypeImage = 25, - OpTypeSampler = 26, - OpTypeSampledImage = 27, - OpTypeArray = 28, - OpTypeRuntimeArray = 29, - OpTypeStruct = 30, - OpTypeOpaque = 31, - OpTypePointer = 32, - OpTypeFunction = 33, - OpTypeEvent = 34, - OpTypeDeviceEvent = 35, - OpTypeReserveId = 36, - OpTypeQueue = 37, - OpTypePipe = 38, - OpTypeForwardPointer = 39, - OpConstantTrue = 41, - OpConstantFalse = 42, - OpConstant = 43, - OpConstantComposite = 44, - OpConstantSampler = 45, - OpConstantNull = 46, - OpSpecConstantTrue = 48, - OpSpecConstantFalse = 49, - OpSpecConstant = 50, - OpSpecConstantComposite = 51, - OpSpecConstantOp = 52, - OpFunction = 54, - OpFunctionParameter = 55, - OpFunctionEnd = 56, - OpFunctionCall = 57, - OpVariable = 59, - OpImageTexelPointer = 60, - OpLoad = 61, - OpStore = 62, - OpCopyMemory = 63, - OpCopyMemorySized = 64, - OpAccessChain = 65, - OpInBoundsAccessChain = 66, - OpPtrAccessChain = 67, - OpArrayLength = 68, - OpGenericPtrMemSemantics = 69, - OpInBoundsPtrAccessChain = 70, - OpDecorate = 71, - OpMemberDecorate = 72, - OpDecorationGroup = 73, - OpGroupDecorate = 74, - OpGroupMemberDecorate = 75, - OpVectorExtractDynamic = 77, - OpVectorInsertDynamic = 78, - OpVectorShuffle = 79, - OpCompositeConstruct = 80, - OpCompositeExtract = 81, - OpCompositeInsert = 82, - OpCopyObject = 83, - OpTranspose = 84, - OpSampledImage = 86, - OpImageSampleImplicitLod = 87, - OpImageSampleExplicitLod = 88, - OpImageSampleDrefImplicitLod = 89, - OpImageSampleDrefExplicitLod = 90, - OpImageSampleProjImplicitLod = 91, - OpImageSampleProjExplicitLod = 92, - OpImageSampleProjDrefImplicitLod = 93, - OpImageSampleProjDrefExplicitLod = 94, - OpImageFetch = 95, - OpImageGather = 96, - OpImageDrefGather = 97, - OpImageRead = 98, - OpImageWrite = 99, - OpImage = 100, - OpImageQueryFormat = 101, - OpImageQueryOrder = 102, - OpImageQuerySizeLod = 103, - OpImageQuerySize = 104, - OpImageQueryLod = 105, - OpImageQueryLevels = 106, - OpImageQuerySamples = 107, - OpConvertFToU = 109, - OpConvertFToS = 110, - OpConvertSToF = 111, - OpConvertUToF = 112, - OpUConvert = 113, - OpSConvert = 114, - OpFConvert = 115, - OpQuantizeToF16 = 116, - OpConvertPtrToU = 117, - OpSatConvertSToU = 118, - OpSatConvertUToS = 119, - OpConvertUToPtr = 120, - OpPtrCastToGeneric = 121, - OpGenericCastToPtr = 122, - OpGenericCastToPtrExplicit = 123, - OpBitcast = 124, - OpSNegate = 126, - OpFNegate = 127, - OpIAdd = 128, - OpFAdd = 129, - OpISub = 130, - OpFSub = 131, - OpIMul = 132, - OpFMul = 133, - OpUDiv = 134, - OpSDiv = 135, - OpFDiv = 136, - OpUMod = 137, - OpSRem = 138, - OpSMod = 139, - OpFRem = 140, - OpFMod = 141, - OpVectorTimesScalar = 142, - OpMatrixTimesScalar = 143, - OpVectorTimesMatrix = 144, - OpMatrixTimesVector = 145, - OpMatrixTimesMatrix = 146, - OpOuterProduct = 147, - OpDot = 148, - OpIAddCarry = 149, - OpISubBorrow = 150, - OpUMulExtended = 151, - OpSMulExtended = 152, - OpAny = 154, - OpAll = 155, - OpIsNan = 156, - OpIsInf = 157, - OpIsFinite = 158, - OpIsNormal = 159, - OpSignBitSet = 160, - OpLessOrGreater = 161, - OpOrdered = 162, - OpUnordered = 163, - OpLogicalEqual = 164, - OpLogicalNotEqual = 165, - OpLogicalOr = 166, - OpLogicalAnd = 167, - OpLogicalNot = 168, - OpSelect = 169, - OpIEqual = 170, - OpINotEqual = 171, - OpUGreaterThan = 172, - OpSGreaterThan = 173, - OpUGreaterThanEqual = 174, - OpSGreaterThanEqual = 175, - OpULessThan = 176, - OpSLessThan = 177, - OpULessThanEqual = 178, - OpSLessThanEqual = 179, - OpFOrdEqual = 180, - OpFUnordEqual = 181, - OpFOrdNotEqual = 182, - OpFUnordNotEqual = 183, - OpFOrdLessThan = 184, - OpFUnordLessThan = 185, - OpFOrdGreaterThan = 186, - OpFUnordGreaterThan = 187, - OpFOrdLessThanEqual = 188, - OpFUnordLessThanEqual = 189, - OpFOrdGreaterThanEqual = 190, - OpFUnordGreaterThanEqual = 191, - OpShiftRightLogical = 194, - OpShiftRightArithmetic = 195, - OpShiftLeftLogical = 196, - OpBitwiseOr = 197, - OpBitwiseXor = 198, - OpBitwiseAnd = 199, - OpNot = 200, - OpBitFieldInsert = 201, - OpBitFieldSExtract = 202, - OpBitFieldUExtract = 203, - OpBitReverse = 204, - OpBitCount = 205, - OpDPdx = 207, - OpDPdy = 208, - OpFwidth = 209, - OpDPdxFine = 210, - OpDPdyFine = 211, - OpFwidthFine = 212, - OpDPdxCoarse = 213, - OpDPdyCoarse = 214, - OpFwidthCoarse = 215, - OpEmitVertex = 218, - OpEndPrimitive = 219, - OpEmitStreamVertex = 220, - OpEndStreamPrimitive = 221, - OpControlBarrier = 224, - OpMemoryBarrier = 225, - OpAtomicLoad = 227, - OpAtomicStore = 228, - OpAtomicExchange = 229, - OpAtomicCompareExchange = 230, - OpAtomicCompareExchangeWeak = 231, - OpAtomicIIncrement = 232, - OpAtomicIDecrement = 233, - OpAtomicIAdd = 234, - OpAtomicISub = 235, - OpAtomicSMin = 236, - OpAtomicUMin = 237, - OpAtomicSMax = 238, - OpAtomicUMax = 239, - OpAtomicAnd = 240, - OpAtomicOr = 241, - OpAtomicXor = 242, - OpPhi = 245, - OpLoopMerge = 246, - OpSelectionMerge = 247, - OpLabel = 248, - OpBranch = 249, - OpBranchConditional = 250, - OpSwitch = 251, - OpKill = 252, - OpReturn = 253, - OpReturnValue = 254, - OpUnreachable = 255, - OpLifetimeStart = 256, - OpLifetimeStop = 257, - OpGroupAsyncCopy = 259, - OpGroupWaitEvents = 260, - OpGroupAll = 261, - OpGroupAny = 262, - OpGroupBroadcast = 263, - OpGroupIAdd = 264, - OpGroupFAdd = 265, - OpGroupFMin = 266, - OpGroupUMin = 267, - OpGroupSMin = 268, - OpGroupFMax = 269, - OpGroupUMax = 270, - OpGroupSMax = 271, - OpReadPipe = 274, - OpWritePipe = 275, - OpReservedReadPipe = 276, - OpReservedWritePipe = 277, - OpReserveReadPipePackets = 278, - OpReserveWritePipePackets = 279, - OpCommitReadPipe = 280, - OpCommitWritePipe = 281, - OpIsValidReserveId = 282, - OpGetNumPipePackets = 283, - OpGetMaxPipePackets = 284, - OpGroupReserveReadPipePackets = 285, - OpGroupReserveWritePipePackets = 286, - OpGroupCommitReadPipe = 287, - OpGroupCommitWritePipe = 288, - OpEnqueueMarker = 291, - OpEnqueueKernel = 292, - OpGetKernelNDrangeSubGroupCount = 293, - OpGetKernelNDrangeMaxSubGroupSize = 294, - OpGetKernelWorkGroupSize = 295, - OpGetKernelPreferredWorkGroupSizeMultiple = 296, - OpRetainEvent = 297, - OpReleaseEvent = 298, - OpCreateUserEvent = 299, - OpIsValidEvent = 300, - OpSetUserEventStatus = 301, - OpCaptureEventProfilingInfo = 302, - OpGetDefaultQueue = 303, - OpBuildNDRange = 304, - OpImageSparseSampleImplicitLod = 305, - OpImageSparseSampleExplicitLod = 306, - OpImageSparseSampleDrefImplicitLod = 307, - OpImageSparseSampleDrefExplicitLod = 308, - OpImageSparseSampleProjImplicitLod = 309, - OpImageSparseSampleProjExplicitLod = 310, - OpImageSparseSampleProjDrefImplicitLod = 311, - OpImageSparseSampleProjDrefExplicitLod = 312, - OpImageSparseFetch = 313, - OpImageSparseGather = 314, - OpImageSparseDrefGather = 315, - OpImageSparseTexelsResident = 316, - OpNoLine = 317, - OpAtomicFlagTestAndSet = 318, - OpAtomicFlagClear = 319, - OpImageSparseRead = 320, - OpSubgroupBallotKHR = 4421, - OpSubgroupFirstInvocationKHR = 4422, - OpSubgroupAllKHR = 4428, - OpSubgroupAnyKHR = 4429, - OpSubgroupAllEqualKHR = 4430, - OpSubgroupReadInvocationKHR = 4432, - OpGroupIAddNonUniformAMD = 5000, - OpGroupFAddNonUniformAMD = 5001, - OpGroupFMinNonUniformAMD = 5002, - OpGroupUMinNonUniformAMD = 5003, - OpGroupSMinNonUniformAMD = 5004, - OpGroupFMaxNonUniformAMD = 5005, - OpGroupUMaxNonUniformAMD = 5006, - OpGroupSMaxNonUniformAMD = 5007, - OpMax = 0x7fffffff, -}; - -// Overload operator| for mask bit combining - -inline ImageOperandsMask operator|(ImageOperandsMask a, ImageOperandsMask b) { return ImageOperandsMask(unsigned(a) | unsigned(b)); } -inline FPFastMathModeMask operator|(FPFastMathModeMask a, FPFastMathModeMask b) { return FPFastMathModeMask(unsigned(a) | unsigned(b)); } -inline SelectionControlMask operator|(SelectionControlMask a, SelectionControlMask b) { return SelectionControlMask(unsigned(a) | unsigned(b)); } -inline LoopControlMask operator|(LoopControlMask a, LoopControlMask b) { return LoopControlMask(unsigned(a) | unsigned(b)); } -inline FunctionControlMask operator|(FunctionControlMask a, FunctionControlMask b) { return FunctionControlMask(unsigned(a) | unsigned(b)); } -inline MemorySemanticsMask operator|(MemorySemanticsMask a, MemorySemanticsMask b) { return MemorySemanticsMask(unsigned(a) | unsigned(b)); } -inline MemoryAccessMask operator|(MemoryAccessMask a, MemoryAccessMask b) { return MemoryAccessMask(unsigned(a) | unsigned(b)); } -inline KernelProfilingInfoMask operator|(KernelProfilingInfoMask a, KernelProfilingInfoMask b) { return KernelProfilingInfoMask(unsigned(a) | unsigned(b)); } - -} // end namespace spv - -#endif // #ifndef spirv_HPP - diff --git a/deps/glslang/SPIRV/spvIR.h b/deps/glslang/SPIRV/spvIR.h deleted file mode 100644 index 087a53f2..00000000 --- a/deps/glslang/SPIRV/spvIR.h +++ /dev/null @@ -1,402 +0,0 @@ -// -// Copyright (C) 2014 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// SPIRV-IR -// -// Simple in-memory representation (IR) of SPIRV. Just for holding -// Each function's CFG of blocks. Has this hierarchy: -// - Module, which is a list of -// - Function, which is a list of -// - Block, which is a list of -// - Instruction -// - -#pragma once -#ifndef spvIR_H -#define spvIR_H - -#include "spirv.hpp" - -#include -#include -#include -#include -#include -#include - -namespace spv { - -class Block; -class Function; -class Module; - -const Id NoResult = 0; -const Id NoType = 0; - -const Decoration NoPrecision = DecorationMax; -const MemorySemanticsMask MemorySemanticsAllMemory = - (MemorySemanticsMask)(MemorySemanticsSequentiallyConsistentMask | - MemorySemanticsUniformMemoryMask | - MemorySemanticsSubgroupMemoryMask | - MemorySemanticsWorkgroupMemoryMask | - MemorySemanticsCrossWorkgroupMemoryMask | - MemorySemanticsAtomicCounterMemoryMask | - MemorySemanticsImageMemoryMask); - -// -// SPIR-V IR instruction. -// - -class Instruction { -public: - Instruction(Id resultId, Id typeId, Op opCode) : resultId(resultId), typeId(typeId), opCode(opCode), block(nullptr) { } - explicit Instruction(Op opCode) : resultId(NoResult), typeId(NoType), opCode(opCode), block(nullptr) { } - virtual ~Instruction() {} - void addIdOperand(Id id) { operands.push_back(id); } - void addImmediateOperand(unsigned int immediate) { operands.push_back(immediate); } - void addStringOperand(const char* str) - { - unsigned int word; - char* wordString = (char*)&word; - char* wordPtr = wordString; - int charCount = 0; - char c; - do { - c = *(str++); - *(wordPtr++) = c; - ++charCount; - if (charCount == 4) { - addImmediateOperand(word); - wordPtr = wordString; - charCount = 0; - } - } while (c != 0); - - // deal with partial last word - if (charCount > 0) { - // pad with 0s - for (; charCount < 4; ++charCount) - *(wordPtr++) = 0; - addImmediateOperand(word); - } - } - void setBlock(Block* b) { block = b; } - Block* getBlock() const { return block; } - Op getOpCode() const { return opCode; } - int getNumOperands() const { return (int)operands.size(); } - Id getResultId() const { return resultId; } - Id getTypeId() const { return typeId; } - Id getIdOperand(int op) const { return operands[op]; } - unsigned int getImmediateOperand(int op) const { return operands[op]; } - - // Write out the binary form. - void dump(std::vector& out) const - { - // Compute the wordCount - unsigned int wordCount = 1; - if (typeId) - ++wordCount; - if (resultId) - ++wordCount; - wordCount += (unsigned int)operands.size(); - - // Write out the beginning of the instruction - out.push_back(((wordCount) << WordCountShift) | opCode); - if (typeId) - out.push_back(typeId); - if (resultId) - out.push_back(resultId); - - // Write out the operands - for (int op = 0; op < (int)operands.size(); ++op) - out.push_back(operands[op]); - } - -protected: - Instruction(const Instruction&); - Id resultId; - Id typeId; - Op opCode; - std::vector operands; - Block* block; -}; - -// -// SPIR-V IR block. -// - -class Block { -public: - Block(Id id, Function& parent); - virtual ~Block() - { - } - - Id getId() { return instructions.front()->getResultId(); } - - Function& getParent() const { return parent; } - void addInstruction(std::unique_ptr inst); - void addPredecessor(Block* pred) { predecessors.push_back(pred); pred->successors.push_back(this);} - void addLocalVariable(std::unique_ptr inst) { localVariables.push_back(std::move(inst)); } - const std::vector& getPredecessors() const { return predecessors; } - const std::vector& getSuccessors() const { return successors; } - const std::vector >& getInstructions() const { - return instructions; - } - void setUnreachable() { unreachable = true; } - bool isUnreachable() const { return unreachable; } - // Returns the block's merge instruction, if one exists (otherwise null). - const Instruction* getMergeInstruction() const { - if (instructions.size() < 2) return nullptr; - const Instruction* nextToLast = (instructions.cend() - 2)->get(); - switch (nextToLast->getOpCode()) { - case OpSelectionMerge: - case OpLoopMerge: - return nextToLast; - default: - return nullptr; - } - return nullptr; - } - - bool isTerminated() const - { - switch (instructions.back()->getOpCode()) { - case OpBranch: - case OpBranchConditional: - case OpSwitch: - case OpKill: - case OpReturn: - case OpReturnValue: - return true; - default: - return false; - } - } - - void dump(std::vector& out) const - { - instructions[0]->dump(out); - for (int i = 0; i < (int)localVariables.size(); ++i) - localVariables[i]->dump(out); - for (int i = 1; i < (int)instructions.size(); ++i) - instructions[i]->dump(out); - } - -protected: - Block(const Block&); - Block& operator=(Block&); - - // To enforce keeping parent and ownership in sync: - friend Function; - - std::vector > instructions; - std::vector predecessors, successors; - std::vector > localVariables; - Function& parent; - - // track whether this block is known to be uncreachable (not necessarily - // true for all unreachable blocks, but should be set at least - // for the extraneous ones introduced by the builder). - bool unreachable; -}; - -// Traverses the control-flow graph rooted at root in an order suited for -// readable code generation. Invokes callback at every node in the traversal -// order. -void inReadableOrder(Block* root, std::function callback); - -// -// SPIR-V IR Function. -// - -class Function { -public: - Function(Id id, Id resultType, Id functionType, Id firstParam, Module& parent); - virtual ~Function() - { - for (int i = 0; i < (int)parameterInstructions.size(); ++i) - delete parameterInstructions[i]; - - for (int i = 0; i < (int)blocks.size(); ++i) - delete blocks[i]; - } - Id getId() const { return functionInstruction.getResultId(); } - Id getParamId(int p) { return parameterInstructions[p]->getResultId(); } - - void addBlock(Block* block) { blocks.push_back(block); } - void removeBlock(Block* block) - { - auto found = find(blocks.begin(), blocks.end(), block); - assert(found != blocks.end()); - blocks.erase(found); - delete block; - } - - Module& getParent() const { return parent; } - Block* getEntryBlock() const { return blocks.front(); } - Block* getLastBlock() const { return blocks.back(); } - const std::vector& getBlocks() const { return blocks; } - void addLocalVariable(std::unique_ptr inst); - Id getReturnType() const { return functionInstruction.getTypeId(); } - - void setImplicitThis() { implicitThis = true; } - bool hasImplicitThis() const { return implicitThis; } - - void dump(std::vector& out) const - { - // OpFunction - functionInstruction.dump(out); - - // OpFunctionParameter - for (int p = 0; p < (int)parameterInstructions.size(); ++p) - parameterInstructions[p]->dump(out); - - // Blocks - inReadableOrder(blocks[0], [&out](const Block* b) { b->dump(out); }); - Instruction end(0, 0, OpFunctionEnd); - end.dump(out); - } - -protected: - Function(const Function&); - Function& operator=(Function&); - - Module& parent; - Instruction functionInstruction; - std::vector parameterInstructions; - std::vector blocks; - bool implicitThis; // true if this is a member function expecting to be passed a 'this' as the first argument -}; - -// -// SPIR-V IR Module. -// - -class Module { -public: - Module() {} - virtual ~Module() - { - // TODO delete things - } - - void addFunction(Function *fun) { functions.push_back(fun); } - - void mapInstruction(Instruction *instruction) - { - spv::Id resultId = instruction->getResultId(); - // map the instruction's result id - if (resultId >= idToInstruction.size()) - idToInstruction.resize(resultId + 16); - idToInstruction[resultId] = instruction; - } - - Instruction* getInstruction(Id id) const { return idToInstruction[id]; } - const std::vector& getFunctions() const { return functions; } - spv::Id getTypeId(Id resultId) const { return idToInstruction[resultId]->getTypeId(); } - StorageClass getStorageClass(Id typeId) const - { - assert(idToInstruction[typeId]->getOpCode() == spv::OpTypePointer); - return (StorageClass)idToInstruction[typeId]->getImmediateOperand(0); - } - - void dump(std::vector& out) const - { - for (int f = 0; f < (int)functions.size(); ++f) - functions[f]->dump(out); - } - -protected: - Module(const Module&); - std::vector functions; - - // map from result id to instruction having that result id - std::vector idToInstruction; - - // map from a result id to its type id -}; - -// -// Implementation (it's here due to circular type definitions). -// - -// Add both -// - the OpFunction instruction -// - all the OpFunctionParameter instructions -__inline Function::Function(Id id, Id resultType, Id functionType, Id firstParamId, Module& parent) - : parent(parent), functionInstruction(id, resultType, OpFunction), implicitThis(false) -{ - // OpFunction - functionInstruction.addImmediateOperand(FunctionControlMaskNone); - functionInstruction.addIdOperand(functionType); - parent.mapInstruction(&functionInstruction); - parent.addFunction(this); - - // OpFunctionParameter - Instruction* typeInst = parent.getInstruction(functionType); - int numParams = typeInst->getNumOperands() - 1; - for (int p = 0; p < numParams; ++p) { - Instruction* param = new Instruction(firstParamId + p, typeInst->getIdOperand(p + 1), OpFunctionParameter); - parent.mapInstruction(param); - parameterInstructions.push_back(param); - } -} - -__inline void Function::addLocalVariable(std::unique_ptr inst) -{ - Instruction* raw_instruction = inst.get(); - blocks[0]->addLocalVariable(std::move(inst)); - parent.mapInstruction(raw_instruction); -} - -__inline Block::Block(Id id, Function& parent) : parent(parent), unreachable(false) -{ - instructions.push_back(std::unique_ptr(new Instruction(id, NoType, OpLabel))); - instructions.back()->setBlock(this); - parent.getParent().mapInstruction(instructions.back().get()); -} - -__inline void Block::addInstruction(std::unique_ptr inst) -{ - Instruction* raw_instruction = inst.get(); - instructions.push_back(std::move(inst)); - raw_instruction->setBlock(this); - if (raw_instruction->getResultId()) - parent.getParent().mapInstruction(raw_instruction); -} - -}; // end spv namespace - -#endif // spvIR_H diff --git a/deps/glslang/SetupLinux.sh b/deps/glslang/SetupLinux.sh deleted file mode 100644 index 2a6ff6a4..00000000 --- a/deps/glslang/SetupLinux.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -rm -rf build -mkdir build -pushd build -cmake .. -cmake .. -make -j 2 -make install -install/bin/glslangValidator -i ../Test/sample.vert ../Test/sample.frag -popd diff --git a/deps/glslang/glslang/CMakeLists.txt b/deps/glslang/glslang/CMakeLists.txt deleted file mode 100644 index 8d4615c8..00000000 --- a/deps/glslang/glslang/CMakeLists.txt +++ /dev/null @@ -1,104 +0,0 @@ -if(WIN32) - add_subdirectory(OSDependent/Windows) -elseif(UNIX) - add_subdirectory(OSDependent/Unix) -else(WIN32) - message("unknown platform") -endif(WIN32) - -set(SOURCES - MachineIndependent/glslang.y - MachineIndependent/glslang_tab.cpp - MachineIndependent/Constant.cpp - MachineIndependent/iomapper.cpp - MachineIndependent/InfoSink.cpp - MachineIndependent/Initialize.cpp - MachineIndependent/IntermTraverse.cpp - MachineIndependent/Intermediate.cpp - MachineIndependent/ParseContextBase.cpp - MachineIndependent/ParseHelper.cpp - MachineIndependent/PoolAlloc.cpp - MachineIndependent/RemoveTree.cpp - MachineIndependent/Scan.cpp - MachineIndependent/ShaderLang.cpp - MachineIndependent/SymbolTable.cpp - MachineIndependent/Versions.cpp - MachineIndependent/intermOut.cpp - MachineIndependent/limits.cpp - MachineIndependent/linkValidate.cpp - MachineIndependent/parseConst.cpp - MachineIndependent/reflection.cpp - MachineIndependent/preprocessor/Pp.cpp - MachineIndependent/preprocessor/PpAtom.cpp - MachineIndependent/preprocessor/PpContext.cpp - MachineIndependent/preprocessor/PpScanner.cpp - MachineIndependent/preprocessor/PpTokens.cpp - MachineIndependent/propagateNoContraction.cpp - GenericCodeGen/CodeGen.cpp - GenericCodeGen/Link.cpp) - -set(HEADERS - Public/ShaderLang.h - Include/arrays.h - Include/BaseTypes.h - Include/Common.h - Include/ConstantUnion.h - Include/InfoSink.h - Include/InitializeGlobals.h - Include/intermediate.h - Include/PoolAlloc.h - Include/ResourceLimits.h - Include/revision.h - Include/ShHandle.h - Include/Types.h - MachineIndependent/glslang_tab.cpp.h - MachineIndependent/gl_types.h - MachineIndependent/Initialize.h - MachineIndependent/iomapper.h - MachineIndependent/LiveTraverser.h - MachineIndependent/localintermediate.h - MachineIndependent/ParseHelper.h - MachineIndependent/reflection.h - MachineIndependent/RemoveTree.h - MachineIndependent/Scan.h - MachineIndependent/ScanContext.h - MachineIndependent/SymbolTable.h - MachineIndependent/Versions.h - MachineIndependent/parseVersions.h - MachineIndependent/propagateNoContraction.h - MachineIndependent/preprocessor/PpContext.h - MachineIndependent/preprocessor/PpTokens.h) - -# This might be useful for making grammar changes: -# -# find_package(BISON) -# add_custom_command(OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/MachineIndependent/glslang_tab.cpp ${CMAKE_CURRENT_SOURCE_DIR}/MachineIndependent/glslang_tab.cpp.h -# COMMAND ${BISON_EXECUTABLE} --defines=${CMAKE_CURRENT_SOURCE_DIR}/MachineIndependent/glslang_tab.cpp.h -t ${CMAKE_CURRENT_SOURCE_DIR}/MachineIndependent/glslang.y -o ${CMAKE_CURRENT_SOURCE_DIR}/MachineIndependent/glslang_tab.cpp -# MAIN_DEPENDENCY MachineIndependent/glslang.y -# WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) -# set(BISON_GLSLParser_OUTPUT_SOURCE ${CMAKE_CURRENT_SOURCE_DIR}/MachineIndependent/glslang_tab.cpp) - -add_library(glslang STATIC ${BISON_GLSLParser_OUTPUT_SOURCE} ${SOURCES} ${HEADERS}) -set_property(TARGET glslang PROPERTY FOLDER glslang POSITION_INDEPENDENT_CODE ON) -target_link_libraries(glslang OGLCompiler OSDependent) -if(ENABLE_HLSL) - target_link_libraries(glslang HLSL) -endif() - -if(WIN32) - source_group("Public" REGULAR_EXPRESSION "Public/*") - source_group("MachineIndependent" REGULAR_EXPRESSION "MachineIndependent/[^/]*") - source_group("Include" REGULAR_EXPRESSION "Include/[^/]*") - source_group("GenericCodeGen" REGULAR_EXPRESSION "GenericCodeGen/*") - source_group("MachineIndependent\\Preprocessor" REGULAR_EXPRESSION "MachineIndependent/preprocessor/*") -endif(WIN32) - -#>> Disabled for Anvil -#install(TARGETS glslang -# ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}) -# -#foreach(file ${HEADERS}) -# get_filename_component(dir ${file} DIRECTORY) -# install(FILES ${file} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/glslang/${dir}) -#endforeach() -#<< \ No newline at end of file diff --git a/deps/glslang/glslang/GenericCodeGen/CodeGen.cpp b/deps/glslang/glslang/GenericCodeGen/CodeGen.cpp deleted file mode 100644 index b3c7226d..00000000 --- a/deps/glslang/glslang/GenericCodeGen/CodeGen.cpp +++ /dev/null @@ -1,76 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "../Include/Common.h" -#include "../Include/ShHandle.h" -#include "../MachineIndependent/Versions.h" - -// -// Here is where real machine specific high-level data would be defined. -// -class TGenericCompiler : public TCompiler { -public: - TGenericCompiler(EShLanguage l, int dOptions) : TCompiler(l, infoSink), debugOptions(dOptions) { } - virtual bool compile(TIntermNode* root, int version = 0, EProfile profile = ENoProfile); - TInfoSink infoSink; - int debugOptions; -}; - -// -// This function must be provided to create the actual -// compile object used by higher level code. It returns -// a subclass of TCompiler. -// -TCompiler* ConstructCompiler(EShLanguage language, int debugOptions) -{ - return new TGenericCompiler(language, debugOptions); -} - -// -// Delete the compiler made by ConstructCompiler -// -void DeleteCompiler(TCompiler* compiler) -{ - delete compiler; -} - -// -// Generate code from the given parse tree -// -bool TGenericCompiler::compile(TIntermNode* /*root*/, int /*version*/, EProfile /*profile*/) -{ - haveValidObjectCode = true; - - return haveValidObjectCode; -} diff --git a/deps/glslang/glslang/GenericCodeGen/Link.cpp b/deps/glslang/glslang/GenericCodeGen/Link.cpp deleted file mode 100644 index c38db0f6..00000000 --- a/deps/glslang/glslang/GenericCodeGen/Link.cpp +++ /dev/null @@ -1,91 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// The top level algorithms for linking multiple -// shaders together. -// -#include "../Include/Common.h" -#include "../Include/ShHandle.h" - -// -// Actual link object, derived from the shader handle base classes. -// -class TGenericLinker : public TLinker { -public: - TGenericLinker(EShExecutable e, int dOptions) : TLinker(e, infoSink), debugOptions(dOptions) { } - bool link(TCompilerList&, TUniformMap*) { return true; } - void getAttributeBindings(ShBindingTable const **) const { } - TInfoSink infoSink; - int debugOptions; -}; - -// -// The internal view of a uniform/float object exchanged with the driver. -// -class TUniformLinkedMap : public TUniformMap { -public: - TUniformLinkedMap() { } - virtual int getLocation(const char*) { return 0; } -}; - -TShHandleBase* ConstructLinker(EShExecutable executable, int debugOptions) -{ - return new TGenericLinker(executable, debugOptions); -} - -void DeleteLinker(TShHandleBase* linker) -{ - delete linker; -} - -TUniformMap* ConstructUniformMap() -{ - return new TUniformLinkedMap(); -} - -void DeleteUniformMap(TUniformMap* map) -{ - delete map; -} - -TShHandleBase* ConstructBindings() -{ - return 0; -} - -void DeleteBindingList(TShHandleBase* bindingList) -{ - delete bindingList; -} diff --git a/deps/glslang/glslang/Include/BaseTypes.h b/deps/glslang/glslang/Include/BaseTypes.h deleted file mode 100644 index 08fb623a..00000000 --- a/deps/glslang/glslang/Include/BaseTypes.h +++ /dev/null @@ -1,383 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _BASICTYPES_INCLUDED_ -#define _BASICTYPES_INCLUDED_ - -namespace glslang { - -// -// Basic type. Arrays, vectors, sampler details, etc., are orthogonal to this. -// -enum TBasicType { - EbtVoid, - EbtFloat, - EbtDouble, -#ifdef AMD_EXTENSIONS - EbtFloat16, -#endif - EbtInt, - EbtUint, - EbtInt64, - EbtUint64, -#ifdef AMD_EXTENSIONS - EbtInt16, - EbtUint16, -#endif - EbtBool, - EbtAtomicUint, - EbtSampler, - EbtStruct, - EbtBlock, - - // HLSL types that live only temporarily. - EbtString, - - EbtNumTypes -}; - -// -// Storage qualifiers. Should align with different kinds of storage or -// resource or GLSL storage qualifier. Expansion is deprecated. -// -// N.B.: You probably DON'T want to add anything here, but rather just add it -// to the built-in variables. See the comment above TBuiltInVariable. -// -// A new built-in variable will normally be an existing qualifier, like 'in', 'out', etc. -// DO NOT follow the design pattern of, say EvqInstanceId, etc. -// -enum TStorageQualifier { - EvqTemporary, // For temporaries (within a function), read/write - EvqGlobal, // For globals read/write - EvqConst, // User-defined constant values, will be semantically constant and constant folded - EvqVaryingIn, // pipeline input, read only, also supercategory for all built-ins not included in this enum (see TBuiltInVariable) - EvqVaryingOut, // pipeline output, read/write, also supercategory for all built-ins not included in this enum (see TBuiltInVariable) - EvqUniform, // read only, shared with app - EvqBuffer, // read/write, shared with app - EvqShared, // compute shader's read/write 'shared' qualifier - - // parameters - EvqIn, // also, for 'in' in the grammar before we know if it's a pipeline input or an 'in' parameter - EvqOut, // also, for 'out' in the grammar before we know if it's a pipeline output or an 'out' parameter - EvqInOut, - EvqConstReadOnly, // input; also other read-only types having neither a constant value nor constant-value semantics - - // built-ins read by vertex shader - EvqVertexId, - EvqInstanceId, - - // built-ins written by vertex shader - EvqPosition, - EvqPointSize, - EvqClipVertex, - - // built-ins read by fragment shader - EvqFace, - EvqFragCoord, - EvqPointCoord, - - // built-ins written by fragment shader - EvqFragColor, - EvqFragDepth, - - // end of list - EvqLast -}; - -// -// Subcategories of the TStorageQualifier, simply to give a direct mapping -// between built-in variable names and an numerical value (the enum). -// -// For backward compatibility, there is some redundancy between the -// TStorageQualifier and these. Existing members should both be maintained accurately. -// However, any new built-in variable (and any existing non-redundant one) -// must follow the pattern that the specific built-in is here, and only its -// general qualifier is in TStorageQualifier. -// -// Something like gl_Position, which is sometimes 'in' and sometimes 'out' -// shows up as two different built-in variables in a single stage, but -// only has a single enum in TBuiltInVariable, so both the -// TStorageQualifier and the TBuitinVariable are needed to distinguish -// between them. -// -enum TBuiltInVariable { - EbvNone, - EbvNumWorkGroups, - EbvWorkGroupSize, - EbvWorkGroupId, - EbvLocalInvocationId, - EbvGlobalInvocationId, - EbvLocalInvocationIndex, - EbvSubGroupSize, - EbvSubGroupInvocation, - EbvSubGroupEqMask, - EbvSubGroupGeMask, - EbvSubGroupGtMask, - EbvSubGroupLeMask, - EbvSubGroupLtMask, - EbvVertexId, - EbvInstanceId, - EbvVertexIndex, - EbvInstanceIndex, - EbvBaseVertex, - EbvBaseInstance, - EbvDrawId, - EbvPosition, - EbvPointSize, - EbvClipVertex, - EbvClipDistance, - EbvCullDistance, - EbvNormal, - EbvVertex, - EbvMultiTexCoord0, - EbvMultiTexCoord1, - EbvMultiTexCoord2, - EbvMultiTexCoord3, - EbvMultiTexCoord4, - EbvMultiTexCoord5, - EbvMultiTexCoord6, - EbvMultiTexCoord7, - EbvFrontColor, - EbvBackColor, - EbvFrontSecondaryColor, - EbvBackSecondaryColor, - EbvTexCoord, - EbvFogFragCoord, - EbvInvocationId, - EbvPrimitiveId, - EbvLayer, - EbvViewportIndex, - EbvPatchVertices, - EbvTessLevelOuter, - EbvTessLevelInner, - EbvBoundingBox, - EbvTessCoord, - EbvColor, - EbvSecondaryColor, - EbvFace, - EbvFragCoord, - EbvPointCoord, - EbvFragColor, - EbvFragData, - EbvFragDepth, - EbvFragStencilRef, - EbvSampleId, - EbvSamplePosition, - EbvSampleMask, - EbvHelperInvocation, -#ifdef AMD_EXTENSIONS - EbvBaryCoordNoPersp, - EbvBaryCoordNoPerspCentroid, - EbvBaryCoordNoPerspSample, - EbvBaryCoordSmooth, - EbvBaryCoordSmoothCentroid, - EbvBaryCoordSmoothSample, - EbvBaryCoordPullModel, -#endif - - EbvViewIndex, - EbvDeviceIndex, - -#ifdef NV_EXTENSIONS - EbvViewportMaskNV, - EbvSecondaryPositionNV, - EbvSecondaryViewportMaskNV, - EbvPositionPerViewNV, - EbvViewportMaskPerViewNV, -#endif - - // HLSL built-ins that live only temporarily, until they get remapped - // to one of the above. - EbvFragDepthGreater, - EbvFragDepthLesser, - EbvGsOutputStream, - EbvOutputPatch, - EbvInputPatch, - - // structbuffer types - EbvAppendConsume, // no need to differentiate append and consume - EbvRWStructuredBuffer, - EbvStructuredBuffer, - EbvByteAddressBuffer, - EbvRWByteAddressBuffer, - - EbvLast -}; - -// These will show up in error messages -__inline const char* GetStorageQualifierString(TStorageQualifier q) -{ - switch (q) { - case EvqTemporary: return "temp"; break; - case EvqGlobal: return "global"; break; - case EvqConst: return "const"; break; - case EvqConstReadOnly: return "const (read only)"; break; - case EvqVaryingIn: return "in"; break; - case EvqVaryingOut: return "out"; break; - case EvqUniform: return "uniform"; break; - case EvqBuffer: return "buffer"; break; - case EvqShared: return "shared"; break; - case EvqIn: return "in"; break; - case EvqOut: return "out"; break; - case EvqInOut: return "inout"; break; - case EvqVertexId: return "gl_VertexId"; break; - case EvqInstanceId: return "gl_InstanceId"; break; - case EvqPosition: return "gl_Position"; break; - case EvqPointSize: return "gl_PointSize"; break; - case EvqClipVertex: return "gl_ClipVertex"; break; - case EvqFace: return "gl_FrontFacing"; break; - case EvqFragCoord: return "gl_FragCoord"; break; - case EvqPointCoord: return "gl_PointCoord"; break; - case EvqFragColor: return "fragColor"; break; - case EvqFragDepth: return "gl_FragDepth"; break; - default: return "unknown qualifier"; - } -} - -__inline const char* GetBuiltInVariableString(TBuiltInVariable v) -{ - switch (v) { - case EbvNone: return ""; - case EbvNumWorkGroups: return "NumWorkGroups"; - case EbvWorkGroupSize: return "WorkGroupSize"; - case EbvWorkGroupId: return "WorkGroupID"; - case EbvLocalInvocationId: return "LocalInvocationID"; - case EbvGlobalInvocationId: return "GlobalInvocationID"; - case EbvLocalInvocationIndex: return "LocalInvocationIndex"; - case EbvSubGroupSize: return "SubGroupSize"; - case EbvSubGroupInvocation: return "SubGroupInvocation"; - case EbvSubGroupEqMask: return "SubGroupEqMask"; - case EbvSubGroupGeMask: return "SubGroupGeMask"; - case EbvSubGroupGtMask: return "SubGroupGtMask"; - case EbvSubGroupLeMask: return "SubGroupLeMask"; - case EbvSubGroupLtMask: return "SubGroupLtMask"; - case EbvVertexId: return "VertexId"; - case EbvInstanceId: return "InstanceId"; - case EbvVertexIndex: return "VertexIndex"; - case EbvInstanceIndex: return "InstanceIndex"; - case EbvBaseVertex: return "BaseVertex"; - case EbvBaseInstance: return "BaseInstance"; - case EbvDrawId: return "DrawId"; - case EbvPosition: return "Position"; - case EbvPointSize: return "PointSize"; - case EbvClipVertex: return "ClipVertex"; - case EbvClipDistance: return "ClipDistance"; - case EbvCullDistance: return "CullDistance"; - case EbvNormal: return "Normal"; - case EbvVertex: return "Vertex"; - case EbvMultiTexCoord0: return "MultiTexCoord0"; - case EbvMultiTexCoord1: return "MultiTexCoord1"; - case EbvMultiTexCoord2: return "MultiTexCoord2"; - case EbvMultiTexCoord3: return "MultiTexCoord3"; - case EbvMultiTexCoord4: return "MultiTexCoord4"; - case EbvMultiTexCoord5: return "MultiTexCoord5"; - case EbvMultiTexCoord6: return "MultiTexCoord6"; - case EbvMultiTexCoord7: return "MultiTexCoord7"; - case EbvFrontColor: return "FrontColor"; - case EbvBackColor: return "BackColor"; - case EbvFrontSecondaryColor: return "FrontSecondaryColor"; - case EbvBackSecondaryColor: return "BackSecondaryColor"; - case EbvTexCoord: return "TexCoord"; - case EbvFogFragCoord: return "FogFragCoord"; - case EbvInvocationId: return "InvocationID"; - case EbvPrimitiveId: return "PrimitiveID"; - case EbvLayer: return "Layer"; - case EbvViewportIndex: return "ViewportIndex"; - case EbvPatchVertices: return "PatchVertices"; - case EbvTessLevelOuter: return "TessLevelOuter"; - case EbvTessLevelInner: return "TessLevelInner"; - case EbvBoundingBox: return "BoundingBox"; - case EbvTessCoord: return "TessCoord"; - case EbvColor: return "Color"; - case EbvSecondaryColor: return "SecondaryColor"; - case EbvFace: return "Face"; - case EbvFragCoord: return "FragCoord"; - case EbvPointCoord: return "PointCoord"; - case EbvFragColor: return "FragColor"; - case EbvFragData: return "FragData"; - case EbvFragDepth: return "FragDepth"; - case EbvFragStencilRef: return "FragStencilRef"; - case EbvSampleId: return "SampleId"; - case EbvSamplePosition: return "SamplePosition"; - case EbvSampleMask: return "SampleMaskIn"; - case EbvHelperInvocation: return "HelperInvocation"; -#ifdef AMD_EXTENSIONS - case EbvBaryCoordNoPersp: return "BaryCoordNoPersp"; - case EbvBaryCoordNoPerspCentroid: return "BaryCoordNoPerspCentroid"; - case EbvBaryCoordNoPerspSample: return "BaryCoordNoPerspSample"; - case EbvBaryCoordSmooth: return "BaryCoordSmooth"; - case EbvBaryCoordSmoothCentroid: return "BaryCoordSmoothCentroid"; - case EbvBaryCoordSmoothSample: return "BaryCoordSmoothSample"; - case EbvBaryCoordPullModel: return "BaryCoordPullModel"; -#endif - - case EbvViewIndex: return "ViewIndex"; - case EbvDeviceIndex: return "DeviceIndex"; - -#ifdef NV_EXTENSIONS - case EbvViewportMaskNV: return "ViewportMaskNV"; - case EbvSecondaryPositionNV: return "SecondaryPositionNV"; - case EbvSecondaryViewportMaskNV: return "SecondaryViewportMaskNV"; - case EbvPositionPerViewNV: return "PositionPerViewNV"; - case EbvViewportMaskPerViewNV: return "ViewportMaskPerViewNV"; -#endif - default: return "unknown built-in variable"; - } -} - -// In this enum, order matters; users can assume higher precision is a bigger value -// and EpqNone is 0. -enum TPrecisionQualifier { - EpqNone = 0, - EpqLow, - EpqMedium, - EpqHigh -}; - -__inline const char* GetPrecisionQualifierString(TPrecisionQualifier p) -{ - switch(p) { - case EpqNone: return ""; break; - case EpqLow: return "lowp"; break; - case EpqMedium: return "mediump"; break; - case EpqHigh: return "highp"; break; - default: return "unknown precision qualifier"; - } -} - -} // end namespace glslang - -#endif // _BASICTYPES_INCLUDED_ diff --git a/deps/glslang/glslang/Include/Common.h b/deps/glslang/glslang/Include/Common.h deleted file mode 100644 index 08201548..00000000 --- a/deps/glslang/glslang/Include/Common.h +++ /dev/null @@ -1,266 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _COMMON_INCLUDED_ -#define _COMMON_INCLUDED_ - -#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) || defined MINGW_HAS_SECURE_API - #include - #define snprintf sprintf_s - #define safe_vsprintf(buf,max,format,args) vsnprintf_s((buf), (max), (max), (format), (args)) -#elif defined (solaris) - #define safe_vsprintf(buf,max,format,args) vsnprintf((buf), (max), (format), (args)) - #include - #define UINT_PTR uintptr_t -#else - #define safe_vsprintf(buf,max,format,args) vsnprintf((buf), (max), (format), (args)) - #include - #define UINT_PTR uintptr_t -#endif - -#if defined(__ANDROID__) || _MSC_VER < 1700 -#include -namespace std { -template -std::string to_string(const T& val) { - std::ostringstream os; - os << val; - return os.str(); -} -} -#endif - -#if defined(_MSC_VER) && _MSC_VER < 1800 -inline long long int strtoll (const char* str, char** endptr, int base) -{ - return _strtoi64(str, endptr, base); -} -inline unsigned long long int strtoull (const char* str, char** endptr, int base) -{ - return _strtoui64(str, endptr, base); -} -inline long long int atoll (const char* str) -{ - return strtoll(str, NULL, 10); -} -#endif - -/* windows only pragma */ -#ifdef _MSC_VER - #pragma warning(disable : 4786) // Don't warn about too long identifiers - #pragma warning(disable : 4514) // unused inline method - #pragma warning(disable : 4201) // nameless union -#endif - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "PoolAlloc.h" - -// -// Put POOL_ALLOCATOR_NEW_DELETE in base classes to make them use this scheme. -// -#define POOL_ALLOCATOR_NEW_DELETE(A) \ - void* operator new(size_t s) { return (A).allocate(s); } \ - void* operator new(size_t, void *_Where) { return (_Where); } \ - void operator delete(void*) { } \ - void operator delete(void *, void *) { } \ - void* operator new[](size_t s) { return (A).allocate(s); } \ - void* operator new[](size_t, void *_Where) { return (_Where); } \ - void operator delete[](void*) { } \ - void operator delete[](void *, void *) { } - -namespace glslang { - - // - // Pool version of string. - // - typedef pool_allocator TStringAllocator; - typedef std::basic_string , TStringAllocator> TString; - -} // end namespace glslang - -// Repackage the std::hash for use by unordered map/set with a TString key. -namespace std { - - template<> struct hash { - std::size_t operator()(const glslang::TString& s) const - { - const unsigned _FNV_offset_basis = 2166136261U; - const unsigned _FNV_prime = 16777619U; - unsigned _Val = _FNV_offset_basis; - size_t _Count = s.size(); - const char* _First = s.c_str(); - for (size_t _Next = 0; _Next < _Count; ++_Next) - { - _Val ^= (unsigned)_First[_Next]; - _Val *= _FNV_prime; - } - - return _Val; - } - }; -} - -namespace glslang { - -inline TString* NewPoolTString(const char* s) -{ - void* memory = GetThreadPoolAllocator().allocate(sizeof(TString)); - return new(memory) TString(s); -} - -template inline T* NewPoolObject(T) -{ - return new(GetThreadPoolAllocator().allocate(sizeof(T))) T; -} - -template inline T* NewPoolObject(T, int instances) -{ - return new(GetThreadPoolAllocator().allocate(instances * sizeof(T))) T[instances]; -} - -// -// Pool allocator versions of vectors, lists, and maps -// -template class TVector : public std::vector > { -public: - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - - typedef typename std::vector >::size_type size_type; - TVector() : std::vector >() {} - TVector(const pool_allocator& a) : std::vector >(a) {} - TVector(size_type i) : std::vector >(i) {} - TVector(size_type i, const T& val) : std::vector >(i, val) {} -}; - -template class TList : public std::list > { -}; - -template > -class TMap : public std::map > > { -}; - -template , class PRED = std::equal_to > -class TUnorderedMap : public std::unordered_map > > { -}; - -// -// Persistent string memory. Should only be used for strings that survive -// across compiles/links. -// -typedef std::basic_string TPersistString; - -// -// templatized min and max functions. -// -template T Min(const T a, const T b) { return a < b ? a : b; } -template T Max(const T a, const T b) { return a > b ? a : b; } - -// -// Create a TString object from an integer. -// -#if defined _MSC_VER || defined MINGW_HAS_SECURE_API -inline const TString String(const int i, const int base = 10) -{ - char text[16]; // 32 bit ints are at most 10 digits in base 10 - _itoa_s(i, text, sizeof(text), base); - return text; -} -#else -inline const TString String(const int i, const int /*base*/ = 10) -{ - char text[16]; // 32 bit ints are at most 10 digits in base 10 - - // we assume base 10 for all cases - snprintf(text, sizeof(text), "%d", i); - - return text; -} -#endif - -struct TSourceLoc { - void init() { name = nullptr; string = 0; line = 0; column = 0; } - // Returns the name if it exists. Otherwise, returns the string number. - std::string getStringNameOrNum(bool quoteStringName = true) const - { - if (name != nullptr) - return quoteStringName ? ("\"" + std::string(name) + "\"") : name; - return std::to_string((long long)string); - } - const char* name; // descriptive name for this string - int string; - int line; - int column; -}; - -typedef TMap TPragmaTable; - -const int MaxTokenLength = 1024; - -template bool IsPow2(T powerOf2) -{ - if (powerOf2 <= 0) - return false; - - return (powerOf2 & (powerOf2 - 1)) == 0; -} - -// Round number up to a multiple of the given powerOf2, which is not -// a power, just a number that must be a power of 2. -template void RoundToPow2(T& number, int powerOf2) -{ - assert(IsPow2(powerOf2)); - number = (number + powerOf2 - 1) & ~(powerOf2 - 1); -} - -template bool IsMultipleOfPow2(T number, int powerOf2) -{ - assert(IsPow2(powerOf2)); - return ! (number & (powerOf2 - 1)); -} - -} // end namespace glslang - -#endif // _COMMON_INCLUDED_ diff --git a/deps/glslang/glslang/Include/ConstantUnion.h b/deps/glslang/glslang/Include/ConstantUnion.h deleted file mode 100644 index f66a7ff5..00000000 --- a/deps/glslang/glslang/Include/ConstantUnion.h +++ /dev/null @@ -1,625 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _CONSTANT_UNION_INCLUDED_ -#define _CONSTANT_UNION_INCLUDED_ - -namespace glslang { - -class TConstUnion { -public: - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - - TConstUnion() : iConst(0), type(EbtInt) { } - - void setIConst(int i) - { - iConst = i; - type = EbtInt; - } - - void setUConst(unsigned int u) - { - uConst = u; - type = EbtUint; - } - - void setI64Const(long long i64) - { - i64Const = i64; - type = EbtInt64; - } - - void setU64Const(unsigned long long u64) - { - u64Const = u64; - type = EbtUint64; - } - - void setDConst(double d) - { - dConst = d; - type = EbtDouble; - } - - void setBConst(bool b) - { - bConst = b; - type = EbtBool; - } - - void setSConst(const TString* s) - { - sConst = s; - type = EbtString; - } - - int getIConst() const { return iConst; } - unsigned int getUConst() const { return uConst; } - long long getI64Const() const { return i64Const; } - unsigned long long getU64Const() const { return u64Const; } - double getDConst() const { return dConst; } - bool getBConst() const { return bConst; } - const TString* getSConst() const { return sConst; } - - bool operator==(const int i) const - { - if (i == iConst) - return true; - - return false; - } - - bool operator==(const unsigned int u) const - { - if (u == uConst) - return true; - - return false; - } - - bool operator==(const long long i64) const - { - if (i64 == i64Const) - return true; - - return false; - } - - bool operator==(const unsigned long long u64) const - { - if (u64 == u64Const) - return true; - - return false; - } - - bool operator==(const double d) const - { - if (d == dConst) - return true; - - return false; - } - - bool operator==(const bool b) const - { - if (b == bConst) - return true; - - return false; - } - - bool operator==(const TConstUnion& constant) const - { - if (constant.type != type) - return false; - - switch (type) { - case EbtInt: - if (constant.iConst == iConst) - return true; - - break; - case EbtUint: - if (constant.uConst == uConst) - return true; - - break; - case EbtInt64: - if (constant.i64Const == i64Const) - return true; - - break; - case EbtUint64: - if (constant.u64Const == u64Const) - return true; - - break; - case EbtDouble: - if (constant.dConst == dConst) - return true; - - break; - case EbtBool: - if (constant.bConst == bConst) - return true; - - break; - default: - assert(false && "Default missing"); - } - - return false; - } - - bool operator!=(const int i) const - { - return !operator==(i); - } - - bool operator!=(const unsigned int u) const - { - return !operator==(u); - } - - bool operator!=(const long long i) const - { - return !operator==(i); - } - - bool operator!=(const unsigned long long u) const - { - return !operator==(u); - } - - bool operator!=(const float f) const - { - return !operator==(f); - } - - bool operator!=(const bool b) const - { - return !operator==(b); - } - - bool operator!=(const TConstUnion& constant) const - { - return !operator==(constant); - } - - bool operator>(const TConstUnion& constant) const - { - assert(type == constant.type); - switch (type) { - case EbtInt: - if (iConst > constant.iConst) - return true; - - return false; - case EbtUint: - if (uConst > constant.uConst) - return true; - - return false; - case EbtInt64: - if (i64Const > constant.i64Const) - return true; - - return false; - case EbtUint64: - if (u64Const > constant.u64Const) - return true; - - return false; - case EbtDouble: - if (dConst > constant.dConst) - return true; - - return false; - default: - assert(false && "Default missing"); - return false; - } - } - - bool operator<(const TConstUnion& constant) const - { - assert(type == constant.type); - switch (type) { - case EbtInt: - if (iConst < constant.iConst) - return true; - - return false; - case EbtUint: - if (uConst < constant.uConst) - return true; - - return false; - case EbtInt64: - if (i64Const < constant.i64Const) - return true; - - return false; - case EbtUint64: - if (u64Const < constant.u64Const) - return true; - - return false; - case EbtDouble: - if (dConst < constant.dConst) - return true; - - return false; - default: - assert(false && "Default missing"); - return false; - } - } - - TConstUnion operator+(const TConstUnion& constant) const - { - TConstUnion returnValue; - assert(type == constant.type); - switch (type) { - case EbtInt: returnValue.setIConst(iConst + constant.iConst); break; - case EbtInt64: returnValue.setI64Const(i64Const + constant.i64Const); break; - case EbtUint: returnValue.setUConst(uConst + constant.uConst); break; - case EbtUint64: returnValue.setU64Const(u64Const + constant.u64Const); break; - case EbtDouble: returnValue.setDConst(dConst + constant.dConst); break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator-(const TConstUnion& constant) const - { - TConstUnion returnValue; - assert(type == constant.type); - switch (type) { - case EbtInt: returnValue.setIConst(iConst - constant.iConst); break; - case EbtInt64: returnValue.setI64Const(i64Const - constant.i64Const); break; - case EbtUint: returnValue.setUConst(uConst - constant.uConst); break; - case EbtUint64: returnValue.setU64Const(u64Const - constant.u64Const); break; - case EbtDouble: returnValue.setDConst(dConst - constant.dConst); break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator*(const TConstUnion& constant) const - { - TConstUnion returnValue; - assert(type == constant.type); - switch (type) { - case EbtInt: returnValue.setIConst(iConst * constant.iConst); break; - case EbtInt64: returnValue.setI64Const(i64Const * constant.i64Const); break; - case EbtUint: returnValue.setUConst(uConst * constant.uConst); break; - case EbtUint64: returnValue.setU64Const(u64Const * constant.u64Const); break; - case EbtDouble: returnValue.setDConst(dConst * constant.dConst); break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator%(const TConstUnion& constant) const - { - TConstUnion returnValue; - assert(type == constant.type); - switch (type) { - case EbtInt: returnValue.setIConst(iConst % constant.iConst); break; - case EbtInt64: returnValue.setI64Const(i64Const % constant.i64Const); break; - case EbtUint: returnValue.setUConst(uConst % constant.uConst); break; - case EbtUint64: returnValue.setU64Const(u64Const % constant.u64Const); break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator>>(const TConstUnion& constant) const - { - TConstUnion returnValue; - switch (type) { - case EbtInt: - switch (constant.type) { - case EbtInt: returnValue.setIConst(iConst >> constant.iConst); break; - case EbtUint: returnValue.setIConst(iConst >> constant.uConst); break; - case EbtInt64: returnValue.setIConst(iConst >> constant.i64Const); break; - case EbtUint64: returnValue.setIConst(iConst >> constant.u64Const); break; - default: assert(false && "Default missing"); - } - break; - case EbtUint: - switch (constant.type) { - case EbtInt: returnValue.setUConst(uConst >> constant.iConst); break; - case EbtUint: returnValue.setUConst(uConst >> constant.uConst); break; - case EbtInt64: returnValue.setUConst(uConst >> constant.i64Const); break; - case EbtUint64: returnValue.setUConst(uConst >> constant.u64Const); break; - default: assert(false && "Default missing"); - } - break; - case EbtInt64: - switch (constant.type) { - case EbtInt: returnValue.setI64Const(i64Const >> constant.iConst); break; - case EbtUint: returnValue.setI64Const(i64Const >> constant.uConst); break; - case EbtInt64: returnValue.setI64Const(i64Const >> constant.i64Const); break; - case EbtUint64: returnValue.setI64Const(i64Const >> constant.u64Const); break; - default: assert(false && "Default missing"); - } - break; - case EbtUint64: - switch (constant.type) { - case EbtInt: returnValue.setU64Const(u64Const >> constant.iConst); break; - case EbtUint: returnValue.setU64Const(u64Const >> constant.uConst); break; - case EbtInt64: returnValue.setU64Const(u64Const >> constant.i64Const); break; - case EbtUint64: returnValue.setU64Const(u64Const >> constant.u64Const); break; - default: assert(false && "Default missing"); - } - break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator<<(const TConstUnion& constant) const - { - TConstUnion returnValue; - switch (type) { - case EbtInt: - switch (constant.type) { - case EbtInt: returnValue.setIConst(iConst << constant.iConst); break; - case EbtUint: returnValue.setIConst(iConst << constant.uConst); break; - case EbtInt64: returnValue.setIConst(iConst << constant.i64Const); break; - case EbtUint64: returnValue.setIConst(iConst << constant.u64Const); break; - default: assert(false && "Default missing"); - } - break; - case EbtUint: - switch (constant.type) { - case EbtInt: returnValue.setUConst(uConst << constant.iConst); break; - case EbtUint: returnValue.setUConst(uConst << constant.uConst); break; - case EbtInt64: returnValue.setUConst(uConst << constant.i64Const); break; - case EbtUint64: returnValue.setUConst(uConst << constant.u64Const); break; - default: assert(false && "Default missing"); - } - break; - case EbtInt64: - switch (constant.type) { - case EbtInt: returnValue.setI64Const(i64Const << constant.iConst); break; - case EbtUint: returnValue.setI64Const(i64Const << constant.uConst); break; - case EbtInt64: returnValue.setI64Const(i64Const << constant.i64Const); break; - case EbtUint64: returnValue.setI64Const(i64Const << constant.u64Const); break; - default: assert(false && "Default missing"); - } - break; - case EbtUint64: - switch (constant.type) { - case EbtInt: returnValue.setU64Const(u64Const << constant.iConst); break; - case EbtUint: returnValue.setU64Const(u64Const << constant.uConst); break; - case EbtInt64: returnValue.setU64Const(u64Const << constant.i64Const); break; - case EbtUint64: returnValue.setU64Const(u64Const << constant.u64Const); break; - default: assert(false && "Default missing"); - } - break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator&(const TConstUnion& constant) const - { - TConstUnion returnValue; - assert(type == constant.type); - switch (type) { - case EbtInt: returnValue.setIConst(iConst & constant.iConst); break; - case EbtUint: returnValue.setUConst(uConst & constant.uConst); break; - case EbtInt64: returnValue.setI64Const(i64Const & constant.i64Const); break; - case EbtUint64: returnValue.setU64Const(u64Const & constant.u64Const); break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator|(const TConstUnion& constant) const - { - TConstUnion returnValue; - assert(type == constant.type); - switch (type) { - case EbtInt: returnValue.setIConst(iConst | constant.iConst); break; - case EbtUint: returnValue.setUConst(uConst | constant.uConst); break; - case EbtInt64: returnValue.setI64Const(i64Const | constant.i64Const); break; - case EbtUint64: returnValue.setU64Const(u64Const | constant.u64Const); break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator^(const TConstUnion& constant) const - { - TConstUnion returnValue; - assert(type == constant.type); - switch (type) { - case EbtInt: returnValue.setIConst(iConst ^ constant.iConst); break; - case EbtUint: returnValue.setUConst(uConst ^ constant.uConst); break; - case EbtInt64: returnValue.setI64Const(i64Const ^ constant.i64Const); break; - case EbtUint64: returnValue.setU64Const(u64Const ^ constant.u64Const); break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator~() const - { - TConstUnion returnValue; - switch (type) { - case EbtInt: returnValue.setIConst(~iConst); break; - case EbtUint: returnValue.setUConst(~uConst); break; - case EbtInt64: returnValue.setI64Const(~i64Const); break; - case EbtUint64: returnValue.setU64Const(~u64Const); break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator&&(const TConstUnion& constant) const - { - TConstUnion returnValue; - assert(type == constant.type); - switch (type) { - case EbtBool: returnValue.setBConst(bConst && constant.bConst); break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TConstUnion operator||(const TConstUnion& constant) const - { - TConstUnion returnValue; - assert(type == constant.type); - switch (type) { - case EbtBool: returnValue.setBConst(bConst || constant.bConst); break; - default: assert(false && "Default missing"); - } - - return returnValue; - } - - TBasicType getType() const { return type; } - -private: - union { - int iConst; // used for ivec, scalar ints - unsigned int uConst; // used for uvec, scalar uints - long long i64Const; // used for i64vec, scalar int64s - unsigned long long u64Const; // used for u64vec, scalar uint64s - bool bConst; // used for bvec, scalar bools - double dConst; // used for vec, dvec, mat, dmat, scalar floats and doubles - const TString* sConst; // string constant - }; - - TBasicType type; -}; - -// Encapsulate having a pointer to an array of TConstUnion, -// which only needs to be allocated if its size is going to be -// bigger than 0. -// -// One convenience is being able to use [] to go inside the array, instead -// of C++ assuming it as an array of pointers to vectors. -// -// General usage is that the size is known up front, and it is -// created once with the proper size. -// -class TConstUnionArray { -public: - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - - TConstUnionArray() : unionArray(nullptr) { } - virtual ~TConstUnionArray() { } - - explicit TConstUnionArray(int size) - { - if (size == 0) - unionArray = nullptr; - else - unionArray = new TConstUnionVector(size); - } - TConstUnionArray(const TConstUnionArray& a) : unionArray(a.unionArray) { } - TConstUnionArray(const TConstUnionArray& a, int start, int size) - { - unionArray = new TConstUnionVector(size); - for (int i = 0; i < size; ++i) - (*unionArray)[i] = a[start + i]; - } - - // Use this constructor for a smear operation - TConstUnionArray(int size, const TConstUnion& val) - { - unionArray = new TConstUnionVector(size, val); - } - - int size() const { return unionArray ? (int)unionArray->size() : 0; } - TConstUnion& operator[](size_t index) { return (*unionArray)[index]; } - const TConstUnion& operator[](size_t index) const { return (*unionArray)[index]; } - bool operator==(const TConstUnionArray& rhs) const - { - // this includes the case that both are unallocated - if (unionArray == rhs.unionArray) - return true; - - if (! unionArray || ! rhs.unionArray) - return false; - - if (! unionArray || ! rhs.unionArray) - return false; - - return *unionArray == *rhs.unionArray; - } - bool operator!=(const TConstUnionArray& rhs) const { return ! operator==(rhs); } - - double dot(const TConstUnionArray& rhs) - { - assert(rhs.unionArray->size() == unionArray->size()); - double sum = 0.0; - - for (size_t comp = 0; comp < unionArray->size(); ++comp) - sum += (*this)[comp].getDConst() * rhs[comp].getDConst(); - - return sum; - } - - bool empty() const { return unionArray == nullptr; } - -protected: - typedef TVector TConstUnionVector; - TConstUnionVector* unionArray; -}; - -} // end namespace glslang - -#endif // _CONSTANT_UNION_INCLUDED_ diff --git a/deps/glslang/glslang/Include/InfoSink.h b/deps/glslang/glslang/Include/InfoSink.h deleted file mode 100644 index dceb603c..00000000 --- a/deps/glslang/glslang/Include/InfoSink.h +++ /dev/null @@ -1,144 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _INFOSINK_INCLUDED_ -#define _INFOSINK_INCLUDED_ - -#include "../Include/Common.h" -#include - -namespace glslang { - -// -// TPrefixType is used to centralize how info log messages start. -// See below. -// -enum TPrefixType { - EPrefixNone, - EPrefixWarning, - EPrefixError, - EPrefixInternalError, - EPrefixUnimplemented, - EPrefixNote -}; - -enum TOutputStream { - ENull = 0, - EDebugger = 0x01, - EStdOut = 0x02, - EString = 0x04, -}; -// -// Encapsulate info logs for all objects that have them. -// -// The methods are a general set of tools for getting a variety of -// messages and types inserted into the log. -// -class TInfoSinkBase { -public: - TInfoSinkBase() : outputStream(4) {} - void erase() { sink.erase(); } - TInfoSinkBase& operator<<(const TPersistString& t) { append(t); return *this; } - TInfoSinkBase& operator<<(char c) { append(1, c); return *this; } - TInfoSinkBase& operator<<(const char* s) { append(s); return *this; } - TInfoSinkBase& operator<<(int n) { append(String(n)); return *this; } - TInfoSinkBase& operator<<(unsigned int n) { append(String(n)); return *this; } - TInfoSinkBase& operator<<(float n) { const int size = 40; char buf[size]; - snprintf(buf, size, (fabs(n) > 1e-8 && fabs(n) < 1e8) || n == 0.0f ? "%f" : "%g", n); - append(buf); - return *this; } - TInfoSinkBase& operator+(const TPersistString& t) { append(t); return *this; } - TInfoSinkBase& operator+(const TString& t) { append(t); return *this; } - TInfoSinkBase& operator<<(const TString& t) { append(t); return *this; } - TInfoSinkBase& operator+(const char* s) { append(s); return *this; } - const char* c_str() const { return sink.c_str(); } - void prefix(TPrefixType message) { - switch(message) { - case EPrefixNone: break; - case EPrefixWarning: append("WARNING: "); break; - case EPrefixError: append("ERROR: "); break; - case EPrefixInternalError: append("INTERNAL ERROR: "); break; - case EPrefixUnimplemented: append("UNIMPLEMENTED: "); break; - case EPrefixNote: append("NOTE: "); break; - default: append("UNKNOWN ERROR: "); break; - } - } - void location(const TSourceLoc& loc) { - const int maxSize = 24; - char locText[maxSize]; - snprintf(locText, maxSize, ":%d", loc.line); - append(loc.getStringNameOrNum(false).c_str()); - append(locText); - append(": "); - } - void message(TPrefixType message, const char* s) { - prefix(message); - append(s); - append("\n"); - } - void message(TPrefixType message, const char* s, const TSourceLoc& loc) { - prefix(message); - location(loc); - append(s); - append("\n"); - } - - void setOutputStream(int output = 4) - { - outputStream = output; - } - -protected: - void append(const char* s); - - void append(int count, char c); - void append(const TPersistString& t); - void append(const TString& t); - - void checkMem(size_t growth) { if (sink.capacity() < sink.size() + growth + 2) - sink.reserve(sink.capacity() + sink.capacity() / 2); } - void appendToStream(const char* s); - TPersistString sink; - int outputStream; -}; - -} // end namespace glslang - -class TInfoSink { -public: - glslang::TInfoSinkBase info; - glslang::TInfoSinkBase debug; -}; - -#endif // _INFOSINK_INCLUDED_ diff --git a/deps/glslang/glslang/Include/InitializeGlobals.h b/deps/glslang/glslang/Include/InitializeGlobals.h deleted file mode 100644 index 4cf2dca7..00000000 --- a/deps/glslang/glslang/Include/InitializeGlobals.h +++ /dev/null @@ -1,47 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef __INITIALIZE_GLOBALS_INCLUDED_ -#define __INITIALIZE_GLOBALS_INCLUDED_ - -namespace glslang { - -void InitializeMemoryPools(); -void FreeGlobalPools(); -bool InitializePoolIndex(); -void FreePoolIndex(); - -} // end namespace glslang - -#endif // __INITIALIZE_GLOBALS_INCLUDED_ diff --git a/deps/glslang/glslang/Include/PoolAlloc.h b/deps/glslang/glslang/Include/PoolAlloc.h deleted file mode 100644 index 69bacb15..00000000 --- a/deps/glslang/glslang/Include/PoolAlloc.h +++ /dev/null @@ -1,324 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _POOLALLOC_INCLUDED_ -#define _POOLALLOC_INCLUDED_ - -#ifdef _DEBUG -# define GUARD_BLOCKS // define to enable guard block sanity checking -#endif - -// -// This header defines an allocator that can be used to efficiently -// allocate a large number of small requests for heap memory, with the -// intention that they are not individually deallocated, but rather -// collectively deallocated at one time. -// -// This simultaneously -// -// * Makes each individual allocation much more efficient; the -// typical allocation is trivial. -// * Completely avoids the cost of doing individual deallocation. -// * Saves the trouble of tracking down and plugging a large class of leaks. -// -// Individual classes can use this allocator by supplying their own -// new and delete methods. -// -// STL containers can use this allocator by using the pool_allocator -// class as the allocator (second) template argument. -// - -#include -#include -#include - -namespace glslang { - -// If we are using guard blocks, we must track each individual -// allocation. If we aren't using guard blocks, these -// never get instantiated, so won't have any impact. -// - -class TAllocation { -public: - TAllocation(size_t size, unsigned char* mem, TAllocation* prev = 0) : - size(size), mem(mem), prevAlloc(prev) { - // Allocations are bracketed: - // [allocationHeader][initialGuardBlock][userData][finalGuardBlock] - // This would be cleaner with if (guardBlockSize)..., but that - // makes the compiler print warnings about 0 length memsets, - // even with the if() protecting them. -# ifdef GUARD_BLOCKS - memset(preGuard(), guardBlockBeginVal, guardBlockSize); - memset(data(), userDataFill, size); - memset(postGuard(), guardBlockEndVal, guardBlockSize); -# endif - } - - void check() const { - checkGuardBlock(preGuard(), guardBlockBeginVal, "before"); - checkGuardBlock(postGuard(), guardBlockEndVal, "after"); - } - - void checkAllocList() const; - - // Return total size needed to accommodate user buffer of 'size', - // plus our tracking data. - inline static size_t allocationSize(size_t size) { - return size + 2 * guardBlockSize + headerSize(); - } - - // Offset from surrounding buffer to get to user data buffer. - inline static unsigned char* offsetAllocation(unsigned char* m) { - return m + guardBlockSize + headerSize(); - } - -private: - void checkGuardBlock(unsigned char* blockMem, unsigned char val, const char* locText) const; - - // Find offsets to pre and post guard blocks, and user data buffer - unsigned char* preGuard() const { return mem + headerSize(); } - unsigned char* data() const { return preGuard() + guardBlockSize; } - unsigned char* postGuard() const { return data() + size; } - - size_t size; // size of the user data area - unsigned char* mem; // beginning of our allocation (pts to header) - TAllocation* prevAlloc; // prior allocation in the chain - - const static unsigned char guardBlockBeginVal; - const static unsigned char guardBlockEndVal; - const static unsigned char userDataFill; - - const static size_t guardBlockSize; -# ifdef GUARD_BLOCKS - inline static size_t headerSize() { return sizeof(TAllocation); } -# else - inline static size_t headerSize() { return 0; } -# endif -}; - -// -// There are several stacks. One is to track the pushing and popping -// of the user, and not yet implemented. The others are simply a -// repositories of free pages or used pages. -// -// Page stacks are linked together with a simple header at the beginning -// of each allocation obtained from the underlying OS. Multi-page allocations -// are returned to the OS. Individual page allocations are kept for future -// re-use. -// -// The "page size" used is not, nor must it match, the underlying OS -// page size. But, having it be about that size or equal to a set of -// pages is likely most optimal. -// -class TPoolAllocator { -public: - TPoolAllocator(int growthIncrement = 8*1024, int allocationAlignment = 16); - - // - // Don't call the destructor just to free up the memory, call pop() - // - ~TPoolAllocator(); - - // - // Call push() to establish a new place to pop memory too. Does not - // have to be called to get things started. - // - void push(); - - // - // Call pop() to free all memory allocated since the last call to push(), - // or if no last call to push, frees all memory since first allocation. - // - void pop(); - - // - // Call popAll() to free all memory allocated. - // - void popAll(); - - // - // Call allocate() to actually acquire memory. Returns 0 if no memory - // available, otherwise a properly aligned pointer to 'numBytes' of memory. - // - void* allocate(size_t numBytes); - - // - // There is no deallocate. The point of this class is that - // deallocation can be skipped by the user of it, as the model - // of use is to simultaneously deallocate everything at once - // by calling pop(), and to not have to solve memory leak problems. - // - -protected: - friend struct tHeader; - - struct tHeader { - tHeader(tHeader* nextPage, size_t pageCount) : -#ifdef GUARD_BLOCKS - lastAllocation(0), -#endif - nextPage(nextPage), pageCount(pageCount) { } - - ~tHeader() { -#ifdef GUARD_BLOCKS - if (lastAllocation) - lastAllocation->checkAllocList(); -#endif - } - -#ifdef GUARD_BLOCKS - TAllocation* lastAllocation; -#endif - tHeader* nextPage; - size_t pageCount; - }; - - struct tAllocState { - size_t offset; - tHeader* page; - }; - typedef std::vector tAllocStack; - - // Track allocations if and only if we're using guard blocks -#ifndef GUARD_BLOCKS - void* initializeAllocation(tHeader*, unsigned char* memory, size_t) { -#else - void* initializeAllocation(tHeader* block, unsigned char* memory, size_t numBytes) { - new(memory) TAllocation(numBytes, memory, block->lastAllocation); - block->lastAllocation = reinterpret_cast(memory); -#endif - - // This is optimized entirely away if GUARD_BLOCKS is not defined. - return TAllocation::offsetAllocation(memory); - } - - size_t pageSize; // granularity of allocation from the OS - size_t alignment; // all returned allocations will be aligned at - // this granularity, which will be a power of 2 - size_t alignmentMask; - size_t headerSkip; // amount of memory to skip to make room for the - // header (basically, size of header, rounded - // up to make it aligned - size_t currentPageOffset; // next offset in top of inUseList to allocate from - tHeader* freeList; // list of popped memory - tHeader* inUseList; // list of all memory currently being used - tAllocStack stack; // stack of where to allocate from, to partition pool - - int numCalls; // just an interesting statistic - size_t totalBytes; // just an interesting statistic -private: - TPoolAllocator& operator=(const TPoolAllocator&); // don't allow assignment operator - TPoolAllocator(const TPoolAllocator&); // don't allow default copy constructor -}; - -// -// There could potentially be many pools with pops happening at -// different times. But a simple use is to have a global pop -// with everyone using the same global allocator. -// -typedef TPoolAllocator* PoolAllocatorPointer; -extern TPoolAllocator& GetThreadPoolAllocator(); - -struct TThreadMemoryPools -{ - TPoolAllocator* threadPoolAllocator; -}; - -void SetThreadPoolAllocator(TPoolAllocator& poolAllocator); - -// -// This STL compatible allocator is intended to be used as the allocator -// parameter to templatized STL containers, like vector and map. -// -// It will use the pools for allocation, and not -// do any deallocation, but will still do destruction. -// -template -class pool_allocator { -public: - typedef size_t size_type; - typedef ptrdiff_t difference_type; - typedef T *pointer; - typedef const T *const_pointer; - typedef T& reference; - typedef const T& const_reference; - typedef T value_type; - template - struct rebind { - typedef pool_allocator other; - }; - pointer address(reference x) const { return &x; } - const_pointer address(const_reference x) const { return &x; } - - pool_allocator() : allocator(GetThreadPoolAllocator()) { } - pool_allocator(TPoolAllocator& a) : allocator(a) { } - pool_allocator(const pool_allocator& p) : allocator(p.allocator) { } - - template - pool_allocator(const pool_allocator& p) : allocator(p.getAllocator()) { } - - pointer allocate(size_type n) { - return reinterpret_cast(getAllocator().allocate(n * sizeof(T))); } - pointer allocate(size_type n, const void*) { - return reinterpret_cast(getAllocator().allocate(n * sizeof(T))); } - - void deallocate(void*, size_type) { } - void deallocate(pointer, size_type) { } - - pointer _Charalloc(size_t n) { - return reinterpret_cast(getAllocator().allocate(n)); } - - void construct(pointer p, const T& val) { new ((void *)p) T(val); } - void destroy(pointer p) { p->T::~T(); } - - bool operator==(const pool_allocator& rhs) const { return &getAllocator() == &rhs.getAllocator(); } - bool operator!=(const pool_allocator& rhs) const { return &getAllocator() != &rhs.getAllocator(); } - - size_type max_size() const { return static_cast(-1) / sizeof(T); } - size_type max_size(int size) const { return static_cast(-1) / size; } - - void setAllocator(TPoolAllocator* a) { allocator = *a; } - TPoolAllocator& getAllocator() const { return allocator; } - -protected: - pool_allocator& operator=(const pool_allocator&) { return *this; } - TPoolAllocator& allocator; -}; - -} // end namespace glslang - -#endif // _POOLALLOC_INCLUDED_ diff --git a/deps/glslang/glslang/Include/ResourceLimits.h b/deps/glslang/glslang/Include/ResourceLimits.h deleted file mode 100644 index 0d07b8c8..00000000 --- a/deps/glslang/glslang/Include/ResourceLimits.h +++ /dev/null @@ -1,140 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _RESOURCE_LIMITS_INCLUDED_ -#define _RESOURCE_LIMITS_INCLUDED_ - -struct TLimits { - bool nonInductiveForLoops; - bool whileLoops; - bool doWhileLoops; - bool generalUniformIndexing; - bool generalAttributeMatrixVectorIndexing; - bool generalVaryingIndexing; - bool generalSamplerIndexing; - bool generalVariableIndexing; - bool generalConstantMatrixVectorIndexing; -}; - -struct TBuiltInResource { - int maxLights; - int maxClipPlanes; - int maxTextureUnits; - int maxTextureCoords; - int maxVertexAttribs; - int maxVertexUniformComponents; - int maxVaryingFloats; - int maxVertexTextureImageUnits; - int maxCombinedTextureImageUnits; - int maxTextureImageUnits; - int maxFragmentUniformComponents; - int maxDrawBuffers; - int maxVertexUniformVectors; - int maxVaryingVectors; - int maxFragmentUniformVectors; - int maxVertexOutputVectors; - int maxFragmentInputVectors; - int minProgramTexelOffset; - int maxProgramTexelOffset; - int maxClipDistances; - int maxComputeWorkGroupCountX; - int maxComputeWorkGroupCountY; - int maxComputeWorkGroupCountZ; - int maxComputeWorkGroupSizeX; - int maxComputeWorkGroupSizeY; - int maxComputeWorkGroupSizeZ; - int maxComputeUniformComponents; - int maxComputeTextureImageUnits; - int maxComputeImageUniforms; - int maxComputeAtomicCounters; - int maxComputeAtomicCounterBuffers; - int maxVaryingComponents; - int maxVertexOutputComponents; - int maxGeometryInputComponents; - int maxGeometryOutputComponents; - int maxFragmentInputComponents; - int maxImageUnits; - int maxCombinedImageUnitsAndFragmentOutputs; - int maxCombinedShaderOutputResources; - int maxImageSamples; - int maxVertexImageUniforms; - int maxTessControlImageUniforms; - int maxTessEvaluationImageUniforms; - int maxGeometryImageUniforms; - int maxFragmentImageUniforms; - int maxCombinedImageUniforms; - int maxGeometryTextureImageUnits; - int maxGeometryOutputVertices; - int maxGeometryTotalOutputComponents; - int maxGeometryUniformComponents; - int maxGeometryVaryingComponents; - int maxTessControlInputComponents; - int maxTessControlOutputComponents; - int maxTessControlTextureImageUnits; - int maxTessControlUniformComponents; - int maxTessControlTotalOutputComponents; - int maxTessEvaluationInputComponents; - int maxTessEvaluationOutputComponents; - int maxTessEvaluationTextureImageUnits; - int maxTessEvaluationUniformComponents; - int maxTessPatchComponents; - int maxPatchVertices; - int maxTessGenLevel; - int maxViewports; - int maxVertexAtomicCounters; - int maxTessControlAtomicCounters; - int maxTessEvaluationAtomicCounters; - int maxGeometryAtomicCounters; - int maxFragmentAtomicCounters; - int maxCombinedAtomicCounters; - int maxAtomicCounterBindings; - int maxVertexAtomicCounterBuffers; - int maxTessControlAtomicCounterBuffers; - int maxTessEvaluationAtomicCounterBuffers; - int maxGeometryAtomicCounterBuffers; - int maxFragmentAtomicCounterBuffers; - int maxCombinedAtomicCounterBuffers; - int maxAtomicCounterBufferSize; - int maxTransformFeedbackBuffers; - int maxTransformFeedbackInterleavedComponents; - int maxCullDistances; - int maxCombinedClipAndCullDistances; - int maxSamples; - - TLimits limits; -}; - -#endif // _RESOURCE_LIMITS_INCLUDED_ diff --git a/deps/glslang/glslang/Include/ShHandle.h b/deps/glslang/glslang/Include/ShHandle.h deleted file mode 100644 index 64ba6d63..00000000 --- a/deps/glslang/glslang/Include/ShHandle.h +++ /dev/null @@ -1,173 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _SHHANDLE_INCLUDED_ -#define _SHHANDLE_INCLUDED_ - -// -// Machine independent part of the compiler private objects -// sent as ShHandle to the driver. -// -// This should not be included by driver code. -// - -#define SH_EXPORTING -#include "../Public/ShaderLang.h" -#include "../MachineIndependent/Versions.h" -#include "InfoSink.h" - -class TCompiler; -class TLinker; -class TUniformMap; - -// -// The base class used to back handles returned to the driver. -// -class TShHandleBase { -public: - TShHandleBase() { } - virtual ~TShHandleBase() { } - virtual TCompiler* getAsCompiler() { return 0; } - virtual TLinker* getAsLinker() { return 0; } - virtual TUniformMap* getAsUniformMap() { return 0; } -}; - -// -// The base class for the machine dependent linker to derive from -// for managing where uniforms live. -// -class TUniformMap : public TShHandleBase { -public: - TUniformMap() { } - virtual ~TUniformMap() { } - virtual TUniformMap* getAsUniformMap() { return this; } - virtual int getLocation(const char* name) = 0; - virtual TInfoSink& getInfoSink() { return infoSink; } - TInfoSink infoSink; -}; - -class TIntermNode; - -// -// The base class for the machine dependent compiler to derive from -// for managing object code from the compile. -// -class TCompiler : public TShHandleBase { -public: - TCompiler(EShLanguage l, TInfoSink& sink) : infoSink(sink) , language(l), haveValidObjectCode(false) { } - virtual ~TCompiler() { } - EShLanguage getLanguage() { return language; } - virtual TInfoSink& getInfoSink() { return infoSink; } - - virtual bool compile(TIntermNode* root, int version = 0, EProfile profile = ENoProfile) = 0; - - virtual TCompiler* getAsCompiler() { return this; } - virtual bool linkable() { return haveValidObjectCode; } - - TInfoSink& infoSink; -protected: - TCompiler& operator=(TCompiler&); - - EShLanguage language; - bool haveValidObjectCode; -}; - -// -// Link operations are based on a list of compile results... -// -typedef glslang::TVector TCompilerList; -typedef glslang::TVector THandleList; - -// -// The base class for the machine dependent linker to derive from -// to manage the resulting executable. -// - -class TLinker : public TShHandleBase { -public: - TLinker(EShExecutable e, TInfoSink& iSink) : - infoSink(iSink), - executable(e), - haveReturnableObjectCode(false), - appAttributeBindings(0), - fixedAttributeBindings(0), - excludedAttributes(0), - excludedCount(0), - uniformBindings(0) { } - virtual TLinker* getAsLinker() { return this; } - virtual ~TLinker() { } - virtual bool link(TCompilerList&, TUniformMap*) = 0; - virtual bool link(THandleList&) { return false; } - virtual void setAppAttributeBindings(const ShBindingTable* t) { appAttributeBindings = t; } - virtual void setFixedAttributeBindings(const ShBindingTable* t) { fixedAttributeBindings = t; } - virtual void getAttributeBindings(ShBindingTable const **t) const = 0; - virtual void setExcludedAttributes(const int* attributes, int count) { excludedAttributes = attributes; excludedCount = count; } - virtual ShBindingTable* getUniformBindings() const { return uniformBindings; } - virtual const void* getObjectCode() const { return 0; } // a real compiler would be returning object code here - virtual TInfoSink& getInfoSink() { return infoSink; } - TInfoSink& infoSink; -protected: - TLinker& operator=(TLinker&); - EShExecutable executable; - bool haveReturnableObjectCode; // true when objectCode is acceptable to send to driver - - const ShBindingTable* appAttributeBindings; - const ShBindingTable* fixedAttributeBindings; - const int* excludedAttributes; - int excludedCount; - ShBindingTable* uniformBindings; // created by the linker -}; - -// -// This is the interface between the machine independent code -// and the machine dependent code. -// -// The machine dependent code should derive from the classes -// above. Then Construct*() and Delete*() will create and -// destroy the machine dependent objects, which contain the -// above machine independent information. -// -TCompiler* ConstructCompiler(EShLanguage, int); - -TShHandleBase* ConstructLinker(EShExecutable, int); -TShHandleBase* ConstructBindings(); -void DeleteLinker(TShHandleBase*); -void DeleteBindingList(TShHandleBase* bindingList); - -TUniformMap* ConstructUniformMap(); -void DeleteCompiler(TCompiler*); - -void DeleteUniformMap(TUniformMap*); - -#endif // _SHHANDLE_INCLUDED_ diff --git a/deps/glslang/glslang/Include/Types.h b/deps/glslang/glslang/Include/Types.h deleted file mode 100644 index b5b91f52..00000000 --- a/deps/glslang/glslang/Include/Types.h +++ /dev/null @@ -1,1918 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2016 LunarG, Inc. -// Copyright (C) 2015-2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _TYPES_INCLUDED -#define _TYPES_INCLUDED - -#include "../Include/Common.h" -#include "../Include/BaseTypes.h" -#include "../Public/ShaderLang.h" -#include "arrays.h" - -#include - -namespace glslang { - -const int GlslangMaxTypeLength = 200; // TODO: need to print block/struct one member per line, so this can stay bounded - -const char* const AnonymousPrefix = "anon@"; // for something like a block whose members can be directly accessed -inline bool IsAnonymous(const TString& name) -{ - return name.compare(0, 5, AnonymousPrefix) == 0; -} - -// -// Details within a sampler type -// -enum TSamplerDim { - EsdNone, - Esd1D, - Esd2D, - Esd3D, - EsdCube, - EsdRect, - EsdBuffer, - EsdSubpass, // goes only with non-sampled image (image is true) - EsdNumDims -}; - -struct TSampler { // misnomer now; includes images, textures without sampler, and textures with sampler - TBasicType type : 8; // type returned by sampler - TSamplerDim dim : 8; - bool arrayed : 1; - bool shadow : 1; - bool ms : 1; - bool image : 1; // image, combined should be false - bool combined : 1; // true means texture is combined with a sampler, false means texture with no sampler - bool sampler : 1; // true means a pure sampler, other fields should be clear() - bool external : 1; // GL_OES_EGL_image_external - unsigned int vectorSize : 3; // vector return type size. - - // Some languages support structures as sample results. Storing the whole structure in the - // TSampler is too large, so there is an index to a separate table. - static const unsigned structReturnIndexBits = 4; // number of index bits to use. - static const unsigned structReturnSlots = (1< TTypeList; - -typedef TVector TIdentifierList; - -// -// Following are a series of helper enums for managing layouts and qualifiers, -// used for TPublicType, TType, others. -// - -enum TLayoutPacking { - ElpNone, - ElpShared, // default, but different than saying nothing - ElpStd140, - ElpStd430, - ElpPacked, - ElpCount // If expanding, see bitfield width below -}; - -enum TLayoutMatrix { - ElmNone, - ElmRowMajor, - ElmColumnMajor, // default, but different than saying nothing - ElmCount // If expanding, see bitfield width below -}; - -// Union of geometry shader and tessellation shader geometry types. -// They don't go into TType, but rather have current state per shader or -// active parser type (TPublicType). -enum TLayoutGeometry { - ElgNone, - ElgPoints, - ElgLines, - ElgLinesAdjacency, - ElgLineStrip, - ElgTriangles, - ElgTrianglesAdjacency, - ElgTriangleStrip, - ElgQuads, - ElgIsolines, -}; - -enum TVertexSpacing { - EvsNone, - EvsEqual, - EvsFractionalEven, - EvsFractionalOdd -}; - -enum TVertexOrder { - EvoNone, - EvoCw, - EvoCcw -}; - -// Note: order matters, as type of format is done by comparison. -enum TLayoutFormat { - ElfNone, - - // Float image - ElfRgba32f, - ElfRgba16f, - ElfR32f, - ElfRgba8, - ElfRgba8Snorm, - - ElfEsFloatGuard, // to help with comparisons - - ElfRg32f, - ElfRg16f, - ElfR11fG11fB10f, - ElfR16f, - ElfRgba16, - ElfRgb10A2, - ElfRg16, - ElfRg8, - ElfR16, - ElfR8, - ElfRgba16Snorm, - ElfRg16Snorm, - ElfRg8Snorm, - ElfR16Snorm, - ElfR8Snorm, - - ElfFloatGuard, // to help with comparisons - - // Int image - ElfRgba32i, - ElfRgba16i, - ElfRgba8i, - ElfR32i, - - ElfEsIntGuard, // to help with comparisons - - ElfRg32i, - ElfRg16i, - ElfRg8i, - ElfR16i, - ElfR8i, - - ElfIntGuard, // to help with comparisons - - // Uint image - ElfRgba32ui, - ElfRgba16ui, - ElfRgba8ui, - ElfR32ui, - - ElfEsUintGuard, // to help with comparisons - - ElfRg32ui, - ElfRg16ui, - ElfRgb10a2ui, - ElfRg8ui, - ElfR16ui, - ElfR8ui, - - ElfCount -}; - -enum TLayoutDepth { - EldNone, - EldAny, - EldGreater, - EldLess, - EldUnchanged, - - EldCount -}; - -enum TBlendEquationShift { - // No 'EBlendNone': - // These are used as bit-shift amounts. A mask of such shifts will have type 'int', - // and in that space, 0 means no bits set, or none. In this enum, 0 means (1 << 0), a bit is set. - EBlendMultiply, - EBlendScreen, - EBlendOverlay, - EBlendDarken, - EBlendLighten, - EBlendColordodge, - EBlendColorburn, - EBlendHardlight, - EBlendSoftlight, - EBlendDifference, - EBlendExclusion, - EBlendHslHue, - EBlendHslSaturation, - EBlendHslColor, - EBlendHslLuminosity, - EBlendAllEquations, - - EBlendCount -}; - -class TQualifier { -public: - static const int layoutNotSet = -1; - - void clear() - { - precision = EpqNone; - invariant = false; - noContraction = false; - makeTemporary(); - declaredBuiltIn = EbvNone; - } - - // drop qualifiers that don't belong in a temporary variable - void makeTemporary() - { - semanticName = nullptr; - storage = EvqTemporary; - builtIn = EbvNone; - clearInterstage(); - clearMemory(); - specConstant = false; - clearLayout(); - } - - void clearInterstage() - { - clearInterpolation(); - patch = false; - sample = false; - } - - void clearInterpolation() - { - centroid = false; - smooth = false; - flat = false; - nopersp = false; -#ifdef AMD_EXTENSIONS - explicitInterp = false; -#endif - } - - void clearMemory() - { - coherent = false; - volatil = false; - restrict = false; - readonly = false; - writeonly = false; - } - - // Drop just the storage qualification, which perhaps should - // never be done, as it is fundamentally inconsistent, but need to - // explore what downstream consumers need. - // E.g., in a deference, it is an inconsistency between: - // A) partially dereferenced resource is still in the storage class it started in - // B) partially dereferenced resource is a new temporary object - // If A, then nothing should change, if B, then everything should change, but this is half way. - void makePartialTemporary() - { - storage = EvqTemporary; - specConstant = false; - } - - const char* semanticName; - TStorageQualifier storage : 6; - TBuiltInVariable builtIn : 8; - TBuiltInVariable declaredBuiltIn : 8; - TPrecisionQualifier precision : 3; - bool invariant : 1; // require canonical treatment for cross-shader invariance - bool noContraction: 1; // prevent contraction and reassociation, e.g., for 'precise' keyword, and expressions it affects - bool centroid : 1; - bool smooth : 1; - bool flat : 1; - bool nopersp : 1; -#ifdef AMD_EXTENSIONS - bool explicitInterp : 1; -#endif - bool patch : 1; - bool sample : 1; - bool coherent : 1; - bool volatil : 1; - bool restrict : 1; - bool readonly : 1; - bool writeonly : 1; - bool specConstant : 1; // having a constant_id is not sufficient: expressions have no id, but are still specConstant - - bool isMemory() const - { - return coherent || volatil || restrict || readonly || writeonly; - } - bool isInterpolation() const - { -#ifdef AMD_EXTENSIONS - return flat || smooth || nopersp || explicitInterp; -#else - return flat || smooth || nopersp; -#endif - } - bool isAuxiliary() const - { - return centroid || patch || sample; - } - - bool isPipeInput() const - { - switch (storage) { - case EvqVaryingIn: - case EvqFragCoord: - case EvqPointCoord: - case EvqFace: - case EvqVertexId: - case EvqInstanceId: - return true; - default: - return false; - } - } - - bool isPipeOutput() const - { - switch (storage) { - case EvqPosition: - case EvqPointSize: - case EvqClipVertex: - case EvqVaryingOut: - case EvqFragColor: - case EvqFragDepth: - return true; - default: - return false; - } - } - - bool isParamInput() const - { - switch (storage) { - case EvqIn: - case EvqInOut: - case EvqConstReadOnly: - return true; - default: - return false; - } - } - - bool isParamOutput() const - { - switch (storage) { - case EvqOut: - case EvqInOut: - return true; - default: - return false; - } - } - - bool isUniformOrBuffer() const - { - switch (storage) { - case EvqUniform: - case EvqBuffer: - return true; - default: - return false; - } - } - - bool isIo() const - { - switch (storage) { - case EvqUniform: - case EvqBuffer: - case EvqVaryingIn: - case EvqFragCoord: - case EvqPointCoord: - case EvqFace: - case EvqVertexId: - case EvqInstanceId: - case EvqPosition: - case EvqPointSize: - case EvqClipVertex: - case EvqVaryingOut: - case EvqFragColor: - case EvqFragDepth: - return true; - default: - return false; - } - } - - // True if this type of IO is supposed to be arrayed with extra level for per-vertex data - bool isArrayedIo(EShLanguage language) const - { - switch (language) { - case EShLangGeometry: - return isPipeInput(); - case EShLangTessControl: - return ! patch && (isPipeInput() || isPipeOutput()); - case EShLangTessEvaluation: - return ! patch && isPipeInput(); - default: - return false; - } - } - - // Implementing an embedded layout-qualifier class here, since C++ can't have a real class bitfield - void clearLayout() // all layout - { - clearUniformLayout(); - - layoutPushConstant = false; -#ifdef NV_EXTENSIONS - layoutPassthrough = false; - layoutViewportRelative = false; - // -2048 as the default value indicating layoutSecondaryViewportRelative is not set - layoutSecondaryViewportRelativeOffset = -2048; -#endif - - clearInterstageLayout(); - - layoutSpecConstantId = layoutSpecConstantIdEnd; - - layoutFormat = ElfNone; - } - void clearInterstageLayout() - { - layoutLocation = layoutLocationEnd; - layoutComponent = layoutComponentEnd; - layoutIndex = layoutIndexEnd; - clearStreamLayout(); - clearXfbLayout(); - } - void clearStreamLayout() - { - layoutStream = layoutStreamEnd; - } - void clearXfbLayout() - { - layoutXfbBuffer = layoutXfbBufferEnd; - layoutXfbStride = layoutXfbStrideEnd; - layoutXfbOffset = layoutXfbOffsetEnd; - } - - bool hasLayout() const - { - return hasUniformLayout() || - hasAnyLocation() || - hasStream() || - hasXfb() || - hasFormat() || - layoutPushConstant; - } - TLayoutMatrix layoutMatrix : 3; - TLayoutPacking layoutPacking : 4; - int layoutOffset; - int layoutAlign; - - unsigned int layoutLocation :12; - static const unsigned int layoutLocationEnd = 0xFFF; - - unsigned int layoutComponent : 3; - static const unsigned int layoutComponentEnd = 4; - - unsigned int layoutSet : 7; - static const unsigned int layoutSetEnd = 0x3F; - - unsigned int layoutBinding : 16; - static const unsigned int layoutBindingEnd = 0xFFFF; - - unsigned int layoutIndex : 8; - static const unsigned int layoutIndexEnd = 0xFF; - - unsigned int layoutStream : 8; - static const unsigned int layoutStreamEnd = 0xFF; - - unsigned int layoutXfbBuffer : 4; - static const unsigned int layoutXfbBufferEnd = 0xF; - - unsigned int layoutXfbStride : 10; - static const unsigned int layoutXfbStrideEnd = 0x3FF; - - unsigned int layoutXfbOffset : 10; - static const unsigned int layoutXfbOffsetEnd = 0x3FF; - - unsigned int layoutAttachment : 8; // for input_attachment_index - static const unsigned int layoutAttachmentEnd = 0XFF; - - unsigned int layoutSpecConstantId : 11; - static const unsigned int layoutSpecConstantIdEnd = 0x7FF; - - TLayoutFormat layoutFormat : 8; - - bool layoutPushConstant; - -#ifdef NV_EXTENSIONS - bool layoutPassthrough; - bool layoutViewportRelative; - int layoutSecondaryViewportRelativeOffset; -#endif - - bool hasUniformLayout() const - { - return hasMatrix() || - hasPacking() || - hasOffset() || - hasBinding() || - hasSet() || - hasAlign(); - } - void clearUniformLayout() // only uniform specific - { - layoutMatrix = ElmNone; - layoutPacking = ElpNone; - layoutOffset = layoutNotSet; - layoutAlign = layoutNotSet; - - layoutSet = layoutSetEnd; - layoutBinding = layoutBindingEnd; - layoutAttachment = layoutAttachmentEnd; - } - - bool hasMatrix() const - { - return layoutMatrix != ElmNone; - } - bool hasPacking() const - { - return layoutPacking != ElpNone; - } - bool hasOffset() const - { - return layoutOffset != layoutNotSet; - } - bool hasAlign() const - { - return layoutAlign != layoutNotSet; - } - bool hasAnyLocation() const - { - return hasLocation() || - hasComponent() || - hasIndex(); - } - bool hasLocation() const - { - return layoutLocation != layoutLocationEnd; - } - bool hasComponent() const - { - return layoutComponent != layoutComponentEnd; - } - bool hasIndex() const - { - return layoutIndex != layoutIndexEnd; - } - bool hasSet() const - { - return layoutSet != layoutSetEnd; - } - bool hasBinding() const - { - return layoutBinding != layoutBindingEnd; - } - bool hasStream() const - { - return layoutStream != layoutStreamEnd; - } - bool hasFormat() const - { - return layoutFormat != ElfNone; - } - bool hasXfb() const - { - return hasXfbBuffer() || - hasXfbStride() || - hasXfbOffset(); - } - bool hasXfbBuffer() const - { - return layoutXfbBuffer != layoutXfbBufferEnd; - } - bool hasXfbStride() const - { - return layoutXfbStride != layoutXfbStrideEnd; - } - bool hasXfbOffset() const - { - return layoutXfbOffset != layoutXfbOffsetEnd; - } - bool hasAttachment() const - { - return layoutAttachment != layoutAttachmentEnd; - } - bool hasSpecConstantId() const - { - // Not the same thing as being a specialization constant, this - // is just whether or not it was declared with an ID. - return layoutSpecConstantId != layoutSpecConstantIdEnd; - } - bool isSpecConstant() const - { - // True if type is a specialization constant, whether or not it - // had a specialization-constant ID, and false if it is not a - // true front-end constant. - return specConstant; - } - bool isFrontEndConstant() const - { - // True if the front-end knows the final constant value. - // This allows front-end constant folding. - return storage == EvqConst && ! specConstant; - } - bool isConstant() const - { - // True if is either kind of constant; specialization or regular. - return isFrontEndConstant() || isSpecConstant(); - } - void makeSpecConstant() - { - storage = EvqConst; - specConstant = true; - } - static const char* getLayoutPackingString(TLayoutPacking packing) - { - switch (packing) { - case ElpPacked: return "packed"; - case ElpShared: return "shared"; - case ElpStd140: return "std140"; - case ElpStd430: return "std430"; - default: return "none"; - } - } - static const char* getLayoutMatrixString(TLayoutMatrix m) - { - switch (m) { - case ElmColumnMajor: return "column_major"; - case ElmRowMajor: return "row_major"; - default: return "none"; - } - } - static const char* getLayoutFormatString(TLayoutFormat f) - { - switch (f) { - case ElfRgba32f: return "rgba32f"; - case ElfRgba16f: return "rgba16f"; - case ElfRg32f: return "rg32f"; - case ElfRg16f: return "rg16f"; - case ElfR11fG11fB10f: return "r11f_g11f_b10f"; - case ElfR32f: return "r32f"; - case ElfR16f: return "r16f"; - case ElfRgba16: return "rgba16"; - case ElfRgb10A2: return "rgb10_a2"; - case ElfRgba8: return "rgba8"; - case ElfRg16: return "rg16"; - case ElfRg8: return "rg8"; - case ElfR16: return "r16"; - case ElfR8: return "r8"; - case ElfRgba16Snorm: return "rgba16_snorm"; - case ElfRgba8Snorm: return "rgba8_snorm"; - case ElfRg16Snorm: return "rg16_snorm"; - case ElfRg8Snorm: return "rg8_snorm"; - case ElfR16Snorm: return "r16_snorm"; - case ElfR8Snorm: return "r8_snorm"; - - case ElfRgba32i: return "rgba32i"; - case ElfRgba16i: return "rgba16i"; - case ElfRgba8i: return "rgba8i"; - case ElfRg32i: return "rg32i"; - case ElfRg16i: return "rg16i"; - case ElfRg8i: return "rg8i"; - case ElfR32i: return "r32i"; - case ElfR16i: return "r16i"; - case ElfR8i: return "r8i"; - - case ElfRgba32ui: return "rgba32ui"; - case ElfRgba16ui: return "rgba16ui"; - case ElfRgba8ui: return "rgba8ui"; - case ElfRg32ui: return "rg32ui"; - case ElfRg16ui: return "rg16ui"; - case ElfRgb10a2ui: return "rgb10_a2ui"; - case ElfRg8ui: return "rg8ui"; - case ElfR32ui: return "r32ui"; - case ElfR16ui: return "r16ui"; - case ElfR8ui: return "r8ui"; - default: return "none"; - } - } - static const char* getLayoutDepthString(TLayoutDepth d) - { - switch (d) { - case EldAny: return "depth_any"; - case EldGreater: return "depth_greater"; - case EldLess: return "depth_less"; - case EldUnchanged: return "depth_unchanged"; - default: return "none"; - } - } - static const char* getBlendEquationString(TBlendEquationShift e) - { - switch (e) { - case EBlendMultiply: return "blend_support_multiply"; - case EBlendScreen: return "blend_support_screen"; - case EBlendOverlay: return "blend_support_overlay"; - case EBlendDarken: return "blend_support_darken"; - case EBlendLighten: return "blend_support_lighten"; - case EBlendColordodge: return "blend_support_colordodge"; - case EBlendColorburn: return "blend_support_colorburn"; - case EBlendHardlight: return "blend_support_hardlight"; - case EBlendSoftlight: return "blend_support_softlight"; - case EBlendDifference: return "blend_support_difference"; - case EBlendExclusion: return "blend_support_exclusion"; - case EBlendHslHue: return "blend_support_hsl_hue"; - case EBlendHslSaturation: return "blend_support_hsl_saturation"; - case EBlendHslColor: return "blend_support_hsl_color"; - case EBlendHslLuminosity: return "blend_support_hsl_luminosity"; - case EBlendAllEquations: return "blend_support_all_equations"; - default: return "unknown"; - } - } - static const char* getGeometryString(TLayoutGeometry geometry) - { - switch (geometry) { - case ElgPoints: return "points"; - case ElgLines: return "lines"; - case ElgLinesAdjacency: return "lines_adjacency"; - case ElgLineStrip: return "line_strip"; - case ElgTriangles: return "triangles"; - case ElgTrianglesAdjacency: return "triangles_adjacency"; - case ElgTriangleStrip: return "triangle_strip"; - case ElgQuads: return "quads"; - case ElgIsolines: return "isolines"; - default: return "none"; - } - } - static const char* getVertexSpacingString(TVertexSpacing spacing) - { - switch (spacing) { - case EvsEqual: return "equal_spacing"; - case EvsFractionalEven: return "fractional_even_spacing"; - case EvsFractionalOdd: return "fractional_odd_spacing"; - default: return "none"; - } - } - static const char* getVertexOrderString(TVertexOrder order) - { - switch (order) { - case EvoCw: return "cw"; - case EvoCcw: return "ccw"; - default: return "none"; - } - } - static int mapGeometryToSize(TLayoutGeometry geometry) - { - switch (geometry) { - case ElgPoints: return 1; - case ElgLines: return 2; - case ElgLinesAdjacency: return 4; - case ElgTriangles: return 3; - case ElgTrianglesAdjacency: return 6; - default: return 0; - } - } -}; - -// Qualifiers that don't need to be keep per object. They have shader scope, not object scope. -// So, they will not be part of TType, TQualifier, etc. -struct TShaderQualifiers { - TLayoutGeometry geometry; // geometry/tessellation shader in/out primitives - bool pixelCenterInteger; // fragment shader - bool originUpperLeft; // fragment shader - int invocations; - int vertices; // both for tessellation "vertices" and geometry "max_vertices" - TVertexSpacing spacing; - TVertexOrder order; - bool pointMode; - int localSize[3]; // compute shader - int localSizeSpecId[3]; // compute shader specialization id for gl_WorkGroupSize - bool earlyFragmentTests; // fragment input - bool postDepthCoverage; // fragment input - TLayoutDepth layoutDepth; - bool blendEquation; // true if any blend equation was specified - int numViews; // multiview extenstions - -#ifdef NV_EXTENSIONS - bool layoutOverrideCoverage; // true if layout override_coverage set -#endif - - void init() - { - geometry = ElgNone; - originUpperLeft = false; - pixelCenterInteger = false; - invocations = TQualifier::layoutNotSet; - vertices = TQualifier::layoutNotSet; - spacing = EvsNone; - order = EvoNone; - pointMode = false; - localSize[0] = 1; - localSize[1] = 1; - localSize[2] = 1; - localSizeSpecId[0] = TQualifier::layoutNotSet; - localSizeSpecId[1] = TQualifier::layoutNotSet; - localSizeSpecId[2] = TQualifier::layoutNotSet; - earlyFragmentTests = false; - postDepthCoverage = false; - layoutDepth = EldNone; - blendEquation = false; - numViews = TQualifier::layoutNotSet; -#ifdef NV_EXTENSIONS - layoutOverrideCoverage = false; -#endif - } - - // Merge in characteristics from the 'src' qualifier. They can override when - // set, but never erase when not set. - void merge(const TShaderQualifiers& src) - { - if (src.geometry != ElgNone) - geometry = src.geometry; - if (src.pixelCenterInteger) - pixelCenterInteger = src.pixelCenterInteger; - if (src.originUpperLeft) - originUpperLeft = src.originUpperLeft; - if (src.invocations != TQualifier::layoutNotSet) - invocations = src.invocations; - if (src.vertices != TQualifier::layoutNotSet) - vertices = src.vertices; - if (src.spacing != EvsNone) - spacing = src.spacing; - if (src.order != EvoNone) - order = src.order; - if (src.pointMode) - pointMode = true; - for (int i = 0; i < 3; ++i) { - if (src.localSize[i] > 1) - localSize[i] = src.localSize[i]; - } - for (int i = 0; i < 3; ++i) { - if (src.localSizeSpecId[i] != TQualifier::layoutNotSet) - localSizeSpecId[i] = src.localSizeSpecId[i]; - } - if (src.earlyFragmentTests) - earlyFragmentTests = true; - if (src.postDepthCoverage) - postDepthCoverage = true; - if (src.layoutDepth) - layoutDepth = src.layoutDepth; - if (src.blendEquation) - blendEquation = src.blendEquation; - if (src.numViews != TQualifier::layoutNotSet) - numViews = src.numViews; -#ifdef NV_EXTENSIONS - if (src.layoutOverrideCoverage) - layoutOverrideCoverage = src.layoutOverrideCoverage; -#endif - } -}; - -// -// TPublicType is just temporarily used while parsing and not quite the same -// information kept per node in TType. Due to the bison stack, it can't have -// types that it thinks have non-trivial constructors. It should -// just be used while recognizing the grammar, not anything else. -// Once enough is known about the situation, the proper information -// moved into a TType, or the parse context, etc. -// -class TPublicType { -public: - TBasicType basicType; - TSampler sampler; - TQualifier qualifier; - TShaderQualifiers shaderQualifiers; - int vectorSize : 4; - int matrixCols : 4; - int matrixRows : 4; - TArraySizes* arraySizes; - const TType* userDef; - TSourceLoc loc; - - void initType(const TSourceLoc& l) - { - basicType = EbtVoid; - vectorSize = 1; - matrixRows = 0; - matrixCols = 0; - arraySizes = nullptr; - userDef = nullptr; - loc = l; - } - - void initQualifiers(bool global = false) - { - qualifier.clear(); - if (global) - qualifier.storage = EvqGlobal; - } - - void init(const TSourceLoc& l, bool global = false) - { - initType(l); - sampler.clear(); - initQualifiers(global); - shaderQualifiers.init(); - } - - void setVector(int s) - { - matrixRows = 0; - matrixCols = 0; - vectorSize = s; - } - - void setMatrix(int c, int r) - { - matrixRows = r; - matrixCols = c; - vectorSize = 0; - } - - bool isScalar() const - { - return matrixCols == 0 && vectorSize == 1 && arraySizes == nullptr && userDef == nullptr; - } - - // "Image" is a superset of "Subpass" - bool isImage() const { return basicType == EbtSampler && sampler.isImage(); } - bool isSubpass() const { return basicType == EbtSampler && sampler.isSubpass(); } -}; - -// -// Base class for things that have a type. -// -class TType { -public: - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - - // for "empty" type (no args) or simple scalar/vector/matrix - explicit TType(TBasicType t = EbtVoid, TStorageQualifier q = EvqTemporary, int vs = 1, int mc = 0, int mr = 0, - bool isVector = false) : - basicType(t), vectorSize(vs), matrixCols(mc), matrixRows(mr), vector1(isVector && vs == 1), - arraySizes(nullptr), structure(nullptr), fieldName(nullptr), typeName(nullptr) - { - sampler.clear(); - qualifier.clear(); - qualifier.storage = q; - } - // for explicit precision qualifier - TType(TBasicType t, TStorageQualifier q, TPrecisionQualifier p, int vs = 1, int mc = 0, int mr = 0, - bool isVector = false) : - basicType(t), vectorSize(vs), matrixCols(mc), matrixRows(mr), vector1(isVector && vs == 1), - arraySizes(nullptr), structure(nullptr), fieldName(nullptr), typeName(nullptr) - { - sampler.clear(); - qualifier.clear(); - qualifier.storage = q; - qualifier.precision = p; - assert(p >= EpqNone && p <= EpqHigh); - } - // for turning a TPublicType into a TType, using a shallow copy - explicit TType(const TPublicType& p) : - basicType(p.basicType), - vectorSize(p.vectorSize), matrixCols(p.matrixCols), matrixRows(p.matrixRows), vector1(false), - arraySizes(p.arraySizes), structure(nullptr), fieldName(nullptr), typeName(nullptr) - { - if (basicType == EbtSampler) - sampler = p.sampler; - else - sampler.clear(); - qualifier = p.qualifier; - if (p.userDef) { - structure = p.userDef->getWritableStruct(); // public type is short-lived; there are no sharing issues - typeName = NewPoolTString(p.userDef->getTypeName().c_str()); - } - } - // for construction of sampler types - TType(const TSampler& sampler, TStorageQualifier q = EvqUniform, TArraySizes* as = nullptr) : - basicType(EbtSampler), vectorSize(1), matrixCols(0), matrixRows(0), vector1(false), - arraySizes(as), structure(nullptr), fieldName(nullptr), typeName(nullptr), - sampler(sampler) - { - qualifier.clear(); - qualifier.storage = q; - } - // to efficiently make a dereferenced type - // without ever duplicating the outer structure that will be thrown away - // and using only shallow copy - TType(const TType& type, int derefIndex, bool rowMajor = false) - { - if (type.isArray()) { - shallowCopy(type); - if (type.getArraySizes()->getNumDims() == 1) { - arraySizes = nullptr; - } else { - // want our own copy of the array, so we can edit it - arraySizes = new TArraySizes; - arraySizes->copyDereferenced(*type.arraySizes); - } - } else if (type.basicType == EbtStruct || type.basicType == EbtBlock) { - // do a structure dereference - const TTypeList& memberList = *type.getStruct(); - shallowCopy(*memberList[derefIndex].type); - return; - } else { - // do a vector/matrix dereference - shallowCopy(type); - if (matrixCols > 0) { - // dereference from matrix to vector - if (rowMajor) - vectorSize = matrixCols; - else - vectorSize = matrixRows; - matrixCols = 0; - matrixRows = 0; - if (vectorSize == 1) - vector1 = true; - } else if (isVector()) { - // dereference from vector to scalar - vectorSize = 1; - vector1 = false; - } - } - } - // for making structures, ... - TType(TTypeList* userDef, const TString& n) : - basicType(EbtStruct), vectorSize(1), matrixCols(0), matrixRows(0), vector1(false), - arraySizes(nullptr), structure(userDef), fieldName(nullptr) - { - sampler.clear(); - qualifier.clear(); - typeName = NewPoolTString(n.c_str()); - } - // For interface blocks - TType(TTypeList* userDef, const TString& n, const TQualifier& q) : - basicType(EbtBlock), vectorSize(1), matrixCols(0), matrixRows(0), vector1(false), - qualifier(q), arraySizes(nullptr), structure(userDef), fieldName(nullptr) - { - sampler.clear(); - typeName = NewPoolTString(n.c_str()); - } - virtual ~TType() {} - - // Not for use across pool pops; it will cause multiple instances of TType to point to the same information. - // This only works if that information (like a structure's list of types) does not change and - // the instances are sharing the same pool. - void shallowCopy(const TType& copyOf) - { - basicType = copyOf.basicType; - sampler = copyOf.sampler; - qualifier = copyOf.qualifier; - vectorSize = copyOf.vectorSize; - matrixCols = copyOf.matrixCols; - matrixRows = copyOf.matrixRows; - vector1 = copyOf.vector1; - arraySizes = copyOf.arraySizes; // copying the pointer only, not the contents - structure = copyOf.structure; - fieldName = copyOf.fieldName; - typeName = copyOf.typeName; - } - - // Make complete copy of the whole type graph rooted at 'copyOf'. - void deepCopy(const TType& copyOf) - { - TMap copied; // to enable copying a type graph as a graph, not a tree - deepCopy(copyOf, copied); - } - - // Recursively make temporary - void makeTemporary() - { - getQualifier().makeTemporary(); - - if (isStruct()) - for (unsigned int i = 0; i < structure->size(); ++i) - (*structure)[i].type->makeTemporary(); - } - - TType* clone() const - { - TType *newType = new TType(); - newType->deepCopy(*this); - - return newType; - } - - void makeVector() { vector1 = true; } - - // Merge type from parent, where a parentType is at the beginning of a declaration, - // establishing some characteristics for all subsequent names, while this type - // is on the individual names. - void mergeType(const TPublicType& parentType) - { - // arrayness is currently the only child aspect that has to be preserved - basicType = parentType.basicType; - vectorSize = parentType.vectorSize; - matrixCols = parentType.matrixCols; - matrixRows = parentType.matrixRows; - vector1 = false; // TPublicType is only GLSL which so far has no vec1 - qualifier = parentType.qualifier; - sampler = parentType.sampler; - if (parentType.arraySizes) - newArraySizes(*parentType.arraySizes); - if (parentType.userDef) { - structure = parentType.userDef->getWritableStruct(); - setTypeName(parentType.userDef->getTypeName()); - } - } - - virtual void hideMember() { basicType = EbtVoid; vectorSize = 1; } - virtual bool hiddenMember() const { return basicType == EbtVoid; } - - virtual void setTypeName(const TString& n) { typeName = NewPoolTString(n.c_str()); } - virtual void setFieldName(const TString& n) { fieldName = NewPoolTString(n.c_str()); } - virtual const TString& getTypeName() const - { - assert(typeName); - return *typeName; - } - - virtual const TString& getFieldName() const - { - assert(fieldName); - return *fieldName; - } - - virtual TBasicType getBasicType() const { return basicType; } - virtual const TSampler& getSampler() const { return sampler; } - virtual TSampler& getSampler() { return sampler; } - - virtual TQualifier& getQualifier() { return qualifier; } - virtual const TQualifier& getQualifier() const { return qualifier; } - - virtual int getVectorSize() const { return vectorSize; } // returns 1 for either scalar or vector of size 1, valid for both - virtual int getMatrixCols() const { return matrixCols; } - virtual int getMatrixRows() const { return matrixRows; } - virtual int getOuterArraySize() const { return arraySizes->getOuterSize(); } - virtual TIntermTyped* getOuterArrayNode() const { return arraySizes->getOuterNode(); } - virtual int getCumulativeArraySize() const { return arraySizes->getCumulativeSize(); } - virtual bool isArrayOfArrays() const { return arraySizes != nullptr && arraySizes->getNumDims() > 1; } - virtual int getImplicitArraySize() const { return arraySizes->getImplicitSize(); } - virtual const TArraySizes* getArraySizes() const { return arraySizes; } - virtual TArraySizes& getArraySizes() { assert(arraySizes != nullptr); return *arraySizes; } - - virtual bool isScalar() const { return ! isVector() && ! isMatrix() && ! isStruct() && ! isArray(); } - virtual bool isScalarOrVec1() const { return isScalar() || vector1; } - virtual bool isVector() const { return vectorSize > 1 || vector1; } - virtual bool isMatrix() const { return matrixCols ? true : false; } - virtual bool isArray() const { return arraySizes != nullptr; } - virtual bool isExplicitlySizedArray() const { return isArray() && getOuterArraySize() != UnsizedArraySize; } - virtual bool isImplicitlySizedArray() const { return isArray() && getOuterArraySize() == UnsizedArraySize && qualifier.storage != EvqBuffer; } - virtual bool isRuntimeSizedArray() const { return isArray() && getOuterArraySize() == UnsizedArraySize && qualifier.storage == EvqBuffer; } - virtual bool isStruct() const { return structure != nullptr; } -#ifdef AMD_EXTENSIONS - virtual bool isFloatingDomain() const { return basicType == EbtFloat || basicType == EbtDouble || basicType == EbtFloat16; } -#else - virtual bool isFloatingDomain() const { return basicType == EbtFloat || basicType == EbtDouble; } -#endif - virtual bool isIntegerDomain() const - { - switch (basicType) { - case EbtInt: - case EbtUint: - case EbtInt64: - case EbtUint64: -#ifdef AMD_EXTENSIONS - case EbtInt16: - case EbtUint16: -#endif - case EbtAtomicUint: - return true; - default: - break; - } - return false; - } - virtual bool isOpaque() const { return basicType == EbtSampler || basicType == EbtAtomicUint; } - virtual bool isBuiltIn() const { return getQualifier().builtIn != EbvNone; } - - // "Image" is a superset of "Subpass" - virtual bool isImage() const { return basicType == EbtSampler && getSampler().isImage(); } - virtual bool isSubpass() const { return basicType == EbtSampler && getSampler().isSubpass(); } - - // return true if this type contains any subtype which satisfies the given predicate. - template - bool contains(P predicate) const - { - if (predicate(this)) - return true; - - const auto hasa = [predicate](const TTypeLoc& tl) { return tl.type->contains(predicate); }; - - return structure && std::any_of(structure->begin(), structure->end(), hasa); - } - - // Recursively checks if the type contains the given basic type - virtual bool containsBasicType(TBasicType checkType) const - { - return contains([checkType](const TType* t) { return t->basicType == checkType; } ); - } - - // Recursively check the structure for any arrays, needed for some error checks - virtual bool containsArray() const - { - return contains([](const TType* t) { return t->isArray(); } ); - } - - // Check the structure for any structures, needed for some error checks - virtual bool containsStructure() const - { - return contains([this](const TType* t) { return t != this && t->isStruct(); } ); - } - - // Recursively check the structure for any implicitly-sized arrays, needed for triggering a copyUp(). - virtual bool containsImplicitlySizedArray() const - { - return contains([](const TType* t) { return t->isImplicitlySizedArray(); } ); - } - - virtual bool containsOpaque() const - { - return contains([](const TType* t) { return t->isOpaque(); } ); - } - - // Recursively checks if the type contains a built-in variable - virtual bool containsBuiltIn() const - { - return contains([](const TType* t) { return t->isBuiltIn(); } ); - } - - virtual bool containsNonOpaque() const - { - const auto nonOpaque = [](const TType* t) { - switch (t->basicType) { - case EbtVoid: - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - case EbtInt: - case EbtUint: - case EbtInt64: - case EbtUint64: -#ifdef AMD_EXTENSIONS - case EbtInt16: - case EbtUint16: -#endif - case EbtBool: - return true; - default: - return false; - } - }; - - return contains(nonOpaque); - } - - virtual bool containsSpecializationSize() const - { - return contains([](const TType* t) { return t->isArray() && t->arraySizes->isOuterSpecialization(); } ); - } - - // Array editing methods. Array descriptors can be shared across - // type instances. This allows all uses of the same array - // to be updated at once. E.g., all nodes can be explicitly sized - // by tracking and correcting one implicit size. Or, all nodes - // can get the explicit size on a redeclaration that gives size. - // - // N.B.: Don't share with the shared symbol tables (symbols are - // marked as isReadOnly(). Such symbols with arrays that will be - // edited need to copyUp() on first use, so that - // A) the edits don't effect the shared symbol table, and - // B) the edits are shared across all users. - void updateArraySizes(const TType& type) - { - // For when we may already be sharing existing array descriptors, - // keeping the pointers the same, just updating the contents. - assert(arraySizes != nullptr); - assert(type.arraySizes != nullptr); - *arraySizes = *type.arraySizes; - } - void newArraySizes(const TArraySizes& s) - { - // For setting a fresh new set of array sizes, not yet worrying about sharing. - arraySizes = new TArraySizes; - *arraySizes = s; - } - void clearArraySizes() - { - arraySizes = 0; - } - void addArrayOuterSizes(const TArraySizes& s) - { - if (arraySizes == nullptr) - newArraySizes(s); - else - arraySizes->addOuterSizes(s); - } - void changeOuterArraySize(int s) { arraySizes->changeOuterSize(s); } - void setImplicitArraySize(int s) { arraySizes->setImplicitSize(s); } - - // Recursively make the implicit array size the explicit array size, through the type tree. - void adoptImplicitArraySizes() - { - if (isImplicitlySizedArray()) - changeOuterArraySize(getImplicitArraySize()); - if (isStruct()) { - for (int i = 0; i < (int)structure->size(); ++i) - (*structure)[i].type->adoptImplicitArraySizes(); - } - } - - const char* getBasicString() const - { - return TType::getBasicString(basicType); - } - - static const char* getBasicString(TBasicType t) - { - switch (t) { - case EbtVoid: return "void"; - case EbtFloat: return "float"; - case EbtDouble: return "double"; -#ifdef AMD_EXTENSIONS - case EbtFloat16: return "float16_t"; -#endif - case EbtInt: return "int"; - case EbtUint: return "uint"; - case EbtInt64: return "int64_t"; - case EbtUint64: return "uint64_t"; -#ifdef AMD_EXTENSIONS - case EbtInt16: return "int16_t"; - case EbtUint16: return "uint16_t"; -#endif - case EbtBool: return "bool"; - case EbtAtomicUint: return "atomic_uint"; - case EbtSampler: return "sampler/image"; - case EbtStruct: return "structure"; - case EbtBlock: return "block"; - default: return "unknown type"; - } - } - - TString getCompleteString() const - { - TString typeString; - - const auto appendStr = [&](const char* s) { typeString.append(s); }; - const auto appendUint = [&](unsigned int u) { typeString.append(std::to_string(u).c_str()); }; - const auto appendInt = [&](int i) { typeString.append(std::to_string(i).c_str()); }; - - if (qualifier.hasLayout()) { - // To reduce noise, skip this if the only layout is an xfb_buffer - // with no triggering xfb_offset. - TQualifier noXfbBuffer = qualifier; - noXfbBuffer.layoutXfbBuffer = TQualifier::layoutXfbBufferEnd; - if (noXfbBuffer.hasLayout()) { - appendStr("layout("); - if (qualifier.hasAnyLocation()) { - appendStr(" location="); - appendUint(qualifier.layoutLocation); - if (qualifier.hasComponent()) { - appendStr(" component="); - appendUint(qualifier.layoutComponent); - } - if (qualifier.hasIndex()) { - appendStr(" index="); - appendUint(qualifier.layoutIndex); - } - } - if (qualifier.hasSet()) { - appendStr(" set="); - appendUint(qualifier.layoutSet); - } - if (qualifier.hasBinding()) { - appendStr(" binding="); - appendUint(qualifier.layoutBinding); - } - if (qualifier.hasStream()) { - appendStr(" stream="); - appendUint(qualifier.layoutStream); - } - if (qualifier.hasMatrix()) { - appendStr(" "); - appendStr(TQualifier::getLayoutMatrixString(qualifier.layoutMatrix)); - } - if (qualifier.hasPacking()) { - appendStr(" "); - appendStr(TQualifier::getLayoutPackingString(qualifier.layoutPacking)); - } - if (qualifier.hasOffset()) { - appendStr(" offset="); - appendInt(qualifier.layoutOffset); - } - if (qualifier.hasAlign()) { - appendStr(" align="); - appendInt(qualifier.layoutAlign); - } - if (qualifier.hasFormat()) { - appendStr(" "); - appendStr(TQualifier::getLayoutFormatString(qualifier.layoutFormat)); - } - if (qualifier.hasXfbBuffer() && qualifier.hasXfbOffset()) { - appendStr(" xfb_buffer="); - appendUint(qualifier.layoutXfbBuffer); - } - if (qualifier.hasXfbOffset()) { - appendStr(" xfb_offset="); - appendUint(qualifier.layoutXfbOffset); - } - if (qualifier.hasXfbStride()) { - appendStr(" xfb_stride="); - appendUint(qualifier.layoutXfbStride); - } - if (qualifier.hasAttachment()) { - appendStr(" input_attachment_index="); - appendUint(qualifier.layoutAttachment); - } - if (qualifier.hasSpecConstantId()) { - appendStr(" constant_id="); - appendUint(qualifier.layoutSpecConstantId); - } - if (qualifier.layoutPushConstant) - appendStr(" push_constant"); - -#ifdef NV_EXTENSIONS - if (qualifier.layoutPassthrough) - appendStr(" passthrough"); - if (qualifier.layoutViewportRelative) - appendStr(" layoutViewportRelative"); - if (qualifier.layoutSecondaryViewportRelativeOffset != -2048) { - appendStr(" layoutSecondaryViewportRelativeOffset="); - appendInt(qualifier.layoutSecondaryViewportRelativeOffset); - } -#endif - - appendStr(")"); - } - } - - if (qualifier.invariant) - appendStr(" invariant"); - if (qualifier.noContraction) - appendStr(" noContraction"); - if (qualifier.centroid) - appendStr(" centroid"); - if (qualifier.smooth) - appendStr(" smooth"); - if (qualifier.flat) - appendStr(" flat"); - if (qualifier.nopersp) - appendStr(" noperspective"); -#ifdef AMD_EXTENSIONS - if (qualifier.explicitInterp) - appendStr(" __explicitInterpAMD"); -#endif - if (qualifier.patch) - appendStr(" patch"); - if (qualifier.sample) - appendStr(" sample"); - if (qualifier.coherent) - appendStr(" coherent"); - if (qualifier.volatil) - appendStr(" volatile"); - if (qualifier.restrict) - appendStr(" restrict"); - if (qualifier.readonly) - appendStr(" readonly"); - if (qualifier.writeonly) - appendStr(" writeonly"); - if (qualifier.specConstant) - appendStr(" specialization-constant"); - appendStr(" "); - appendStr(getStorageQualifierString()); - if (isArray()) { - for(int i = 0; i < (int)arraySizes->getNumDims(); ++i) { - int size = arraySizes->getDimSize(i); - if (size == 0) - appendStr(" implicitly-sized array of"); - else { - appendStr(" "); - appendInt(arraySizes->getDimSize(i)); - appendStr("-element array of"); - } - } - } - if (qualifier.precision != EpqNone) { - appendStr(" "); - appendStr(getPrecisionQualifierString()); - } - if (isMatrix()) { - appendStr(" "); - appendInt(matrixCols); - appendStr("X"); - appendInt(matrixRows); - appendStr(" matrix of"); - } else if (isVector()) { - appendStr(" "); - appendInt(vectorSize); - appendStr("-component vector of"); - } - - appendStr(" "); - typeString.append(getBasicTypeString()); - - if (qualifier.builtIn != EbvNone) { - appendStr(" "); - appendStr(getBuiltInVariableString()); - } - - // Add struct/block members - if (structure) { - appendStr("{"); - for (size_t i = 0; i < structure->size(); ++i) { - if (! (*structure)[i].type->hiddenMember()) { - typeString.append((*structure)[i].type->getCompleteString()); - typeString.append(" "); - typeString.append((*structure)[i].type->getFieldName()); - if (i < structure->size() - 1) - appendStr(", "); - } - } - appendStr("}"); - } - - return typeString; - } - - TString getBasicTypeString() const - { - if (basicType == EbtSampler) - return sampler.getString(); - else - return getBasicString(); - } - - const char* getStorageQualifierString() const { return GetStorageQualifierString(qualifier.storage); } - const char* getBuiltInVariableString() const { return GetBuiltInVariableString(qualifier.builtIn); } - const char* getPrecisionQualifierString() const { return GetPrecisionQualifierString(qualifier.precision); } - const TTypeList* getStruct() const { return structure; } - void setStruct(TTypeList* s) { structure = s; } - TTypeList* getWritableStruct() const { return structure; } // This should only be used when known to not be sharing with other threads - - int computeNumComponents() const - { - int components = 0; - - if (getBasicType() == EbtStruct || getBasicType() == EbtBlock) { - for (TTypeList::const_iterator tl = getStruct()->begin(); tl != getStruct()->end(); tl++) - components += ((*tl).type)->computeNumComponents(); - } else if (matrixCols) - components = matrixCols * matrixRows; - else - components = vectorSize; - - if (arraySizes != nullptr) { - components *= arraySizes->getCumulativeSize(); - } - - return components; - } - - // append this type's mangled name to the passed in 'name' - void appendMangledName(TString& name) const - { - buildMangledName(name); - name += ';' ; - } - - // Do two structure types match? They could be declared independently, - // in different places, but still might satisfy the definition of matching. - // From the spec: - // - // "Structures must have the same name, sequence of type names, and - // type definitions, and member names to be considered the same type. - // This rule applies recursively for nested or embedded types." - // - bool sameStructType(const TType& right) const - { - // Most commonly, they are both nullptr, or the same pointer to the same actual structure - if (structure == right.structure) - return true; - - // Both being nullptr was caught above, now they both have to be structures of the same number of elements - if (structure == nullptr || right.structure == nullptr || - structure->size() != right.structure->size()) - return false; - - // Structure names have to match - if (*typeName != *right.typeName) - return false; - - // Compare the names and types of all the members, which have to match - for (unsigned int i = 0; i < structure->size(); ++i) { - if ((*structure)[i].type->getFieldName() != (*right.structure)[i].type->getFieldName()) - return false; - - if (*(*structure)[i].type != *(*right.structure)[i].type) - return false; - } - - return true; - } - - // See if two types match, in all aspects except arrayness - bool sameElementType(const TType& right) const - { - return basicType == right.basicType && sameElementShape(right); - } - - // See if two type's arrayness match - bool sameArrayness(const TType& right) const - { - return ((arraySizes == nullptr && right.arraySizes == nullptr) || - (arraySizes != nullptr && right.arraySizes != nullptr && *arraySizes == *right.arraySizes)); - } - - // See if two type's arrayness match in everything except their outer dimension - bool sameInnerArrayness(const TType& right) const - { - assert(arraySizes != nullptr && right.arraySizes != nullptr); - return arraySizes->sameInnerArrayness(*right.arraySizes); - } - - // See if two type's elements match in all ways except basic type - bool sameElementShape(const TType& right) const - { - return sampler == right.sampler && - vectorSize == right.vectorSize && - matrixCols == right.matrixCols && - matrixRows == right.matrixRows && - vector1 == right.vector1 && - sameStructType(right); - } - - // See if two types match in all ways (just the actual type, not qualification) - bool operator==(const TType& right) const - { - return sameElementType(right) && sameArrayness(right); - } - - bool operator!=(const TType& right) const - { - return ! operator==(right); - } - -protected: - // Require consumer to pick between deep copy and shallow copy. - TType(const TType& type); - TType& operator=(const TType& type); - - // Recursively copy a type graph, while preserving the graph-like - // quality. That is, don't make more than one copy of a structure that - // gets reused multiple times in the type graph. - void deepCopy(const TType& copyOf, TMap& copiedMap) - { - shallowCopy(copyOf); - - if (copyOf.arraySizes) { - arraySizes = new TArraySizes; - *arraySizes = *copyOf.arraySizes; - } - - if (copyOf.structure) { - auto prevCopy = copiedMap.find(copyOf.structure); - if (prevCopy != copiedMap.end()) - structure = prevCopy->second; - else { - structure = new TTypeList; - copiedMap[copyOf.structure] = structure; - for (unsigned int i = 0; i < copyOf.structure->size(); ++i) { - TTypeLoc typeLoc; - typeLoc.loc = (*copyOf.structure)[i].loc; - typeLoc.type = new TType(); - typeLoc.type->deepCopy(*(*copyOf.structure)[i].type, copiedMap); - structure->push_back(typeLoc); - } - } - } - - if (copyOf.fieldName) - fieldName = NewPoolTString(copyOf.fieldName->c_str()); - if (copyOf.typeName) - typeName = NewPoolTString(copyOf.typeName->c_str()); - } - - - void buildMangledName(TString&) const; - - TBasicType basicType : 8; - int vectorSize : 4; // 1 means either scalar or 1-component vector; see vector1 to disambiguate. - int matrixCols : 4; - int matrixRows : 4; - bool vector1 : 1; // Backward-compatible tracking of a 1-component vector distinguished from a scalar. - // GLSL 4.5 never has a 1-component vector; so this will always be false until such - // functionality is added. - // HLSL does have a 1-component vectors, so this will be true to disambiguate - // from a scalar. - TQualifier qualifier; - - TArraySizes* arraySizes; // nullptr unless an array; can be shared across types - TTypeList* structure; // nullptr unless this is a struct; can be shared across types - TString *fieldName; // for structure field names - TString *typeName; // for structure type name - TSampler sampler; -}; - -} // end namespace glslang - -#endif // _TYPES_INCLUDED_ diff --git a/deps/glslang/glslang/Include/arrays.h b/deps/glslang/glslang/Include/arrays.h deleted file mode 100644 index bc21c6c5..00000000 --- a/deps/glslang/glslang/Include/arrays.h +++ /dev/null @@ -1,320 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Implement types for tracking GLSL arrays, arrays of arrays, etc. -// - -#ifndef _ARRAYS_INCLUDED -#define _ARRAYS_INCLUDED - -namespace glslang { - -// This is used to mean there is no size yet (unsized), it is waiting to get a size from somewhere else. -const int UnsizedArraySize = 0; - -class TIntermTyped; -extern bool SameSpecializationConstants(TIntermTyped*, TIntermTyped*); - -// Specialization constants need both a nominal size and a node that defines -// the specialization constant being used. Array types are the same when their -// size and specialization constant nodes are the same. -struct TArraySize { - unsigned int size; - TIntermTyped* node; // nullptr means no specialization constant node - bool operator==(const TArraySize& rhs) const - { - if (size != rhs.size) - return false; - if (node == nullptr || rhs.node == nullptr) - return node == rhs.node; - - return SameSpecializationConstants(node, rhs.node); - } -}; - -// -// TSmallArrayVector is used as the container for the set of sizes in TArraySizes. -// It has generic-container semantics, while TArraySizes has array-of-array semantics. -// That is, TSmallArrayVector should be more focused on mechanism and TArraySizes on policy. -// -struct TSmallArrayVector { - // - // TODO: memory: TSmallArrayVector is intended to be smaller. - // Almost all arrays could be handled by two sizes each fitting - // in 16 bits, needing a real vector only in the cases where there - // are more than 3 sizes or a size needing more than 16 bits. - // - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - - TSmallArrayVector() : sizes(nullptr) { } - virtual ~TSmallArrayVector() { dealloc(); } - - // For breaking into two non-shared copies, independently modifiable. - TSmallArrayVector& operator=(const TSmallArrayVector& from) - { - if (from.sizes == nullptr) - sizes = nullptr; - else { - alloc(); - *sizes = *from.sizes; - } - - return *this; - } - - int size() const - { - if (sizes == nullptr) - return 0; - return (int)sizes->size(); - } - - unsigned int frontSize() const - { - assert(sizes != nullptr && sizes->size() > 0); - return sizes->front().size; - } - - TIntermTyped* frontNode() const - { - assert(sizes != nullptr && sizes->size() > 0); - return sizes->front().node; - } - - void changeFront(unsigned int s) - { - assert(sizes != nullptr); - // this should only happen for implicitly sized arrays, not specialization constants - assert(sizes->front().node == nullptr); - sizes->front().size = s; - } - - void push_back(unsigned int e, TIntermTyped* n) - { - alloc(); - TArraySize pair = { e, n }; - sizes->push_back(pair); - } - - void push_front(const TSmallArrayVector& newDims) - { - alloc(); - sizes->insert(sizes->begin(), newDims.sizes->begin(), newDims.sizes->end()); - } - - void pop_front() - { - assert(sizes != nullptr && sizes->size() > 0); - if (sizes->size() == 1) - dealloc(); - else - sizes->erase(sizes->begin()); - } - - // 'this' should currently not be holding anything, and copyNonFront - // will make it hold a copy of all but the first element of rhs. - // (This would be useful for making a type that is dereferenced by - // one dimension.) - void copyNonFront(const TSmallArrayVector& rhs) - { - assert(sizes == nullptr); - if (rhs.size() > 1) { - alloc(); - sizes->insert(sizes->begin(), rhs.sizes->begin() + 1, rhs.sizes->end()); - } - } - - unsigned int getDimSize(int i) const - { - assert(sizes != nullptr && (int)sizes->size() > i); - return (*sizes)[i].size; - } - - void setDimSize(int i, unsigned int size) const - { - assert(sizes != nullptr && (int)sizes->size() > i); - assert((*sizes)[i].node == nullptr); - (*sizes)[i].size = size; - } - - TIntermTyped* getDimNode(int i) const - { - assert(sizes != nullptr && (int)sizes->size() > i); - return (*sizes)[i].node; - } - - bool operator==(const TSmallArrayVector& rhs) const - { - if (sizes == nullptr && rhs.sizes == nullptr) - return true; - if (sizes == nullptr || rhs.sizes == nullptr) - return false; - return *sizes == *rhs.sizes; - } - bool operator!=(const TSmallArrayVector& rhs) const { return ! operator==(rhs); } - -protected: - TSmallArrayVector(const TSmallArrayVector&); - - void alloc() - { - if (sizes == nullptr) - sizes = new TVector; - } - void dealloc() - { - delete sizes; - sizes = nullptr; - } - - TVector* sizes; // will either hold such a pointer, or in the future, hold the two array sizes -}; - -// -// Represent an array, or array of arrays, to arbitrary depth. This is not -// done through a hierarchy of types in a type tree, rather all contiguous arrayness -// in the type hierarchy is localized into this single cumulative object. -// -// The arrayness in TTtype is a pointer, so that it can be non-allocated and zero -// for the vast majority of types that are non-array types. -// -// Order Policy: these are all identical: -// - left to right order within a contiguous set of ...[..][..][..]... in the source language -// - index order 0, 1, 2, ... within the 'sizes' member below -// - outer-most to inner-most -// -struct TArraySizes { - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - - TArraySizes() : implicitArraySize(1) { } - - // For breaking into two non-shared copies, independently modifiable. - TArraySizes& operator=(const TArraySizes& from) - { - implicitArraySize = from.implicitArraySize; - sizes = from.sizes; - - return *this; - } - - // translate from array-of-array semantics to container semantics - int getNumDims() const { return sizes.size(); } - int getDimSize(int dim) const { return sizes.getDimSize(dim); } - TIntermTyped* getDimNode(int dim) const { return sizes.getDimNode(dim); } - void setDimSize(int dim, int size) { sizes.setDimSize(dim, size); } - int getOuterSize() const { return sizes.frontSize(); } - TIntermTyped* getOuterNode() const { return sizes.frontNode(); } - int getCumulativeSize() const - { - int size = 1; - for (int d = 0; d < sizes.size(); ++d) { - // this only makes sense in paths that have a known array size - assert(sizes.getDimSize(d) != UnsizedArraySize); - size *= sizes.getDimSize(d); - } - return size; - } - void addInnerSize() { addInnerSize((unsigned)UnsizedArraySize); } - void addInnerSize(int s) { addInnerSize((unsigned)s, nullptr); } - void addInnerSize(int s, TIntermTyped* n) { sizes.push_back((unsigned)s, n); } - void addInnerSize(TArraySize pair) { sizes.push_back(pair.size, pair.node); } - void changeOuterSize(int s) { sizes.changeFront((unsigned)s); } - int getImplicitSize() const { return (int)implicitArraySize; } - void setImplicitSize(int s) { implicitArraySize = s; } - bool isInnerImplicit() const - { - for (int d = 1; d < sizes.size(); ++d) { - if (sizes.getDimSize(d) == (unsigned)UnsizedArraySize) - return true; - } - - return false; - } - bool isInnerSpecialization() const - { - for (int d = 1; d < sizes.size(); ++d) { - if (sizes.getDimNode(d) != nullptr) - return true; - } - - return false; - } - bool isOuterSpecialization() - { - return sizes.getDimNode(0) != nullptr; - } - - bool isImplicit() const { return getOuterSize() == UnsizedArraySize || isInnerImplicit(); } - void addOuterSizes(const TArraySizes& s) { sizes.push_front(s.sizes); } - void dereference() { sizes.pop_front(); } - void copyDereferenced(const TArraySizes& rhs) - { - assert(sizes.size() == 0); - if (rhs.sizes.size() > 1) - sizes.copyNonFront(rhs.sizes); - } - - bool sameInnerArrayness(const TArraySizes& rhs) const - { - if (sizes.size() != rhs.sizes.size()) - return false; - - for (int d = 1; d < sizes.size(); ++d) { - if (sizes.getDimSize(d) != rhs.sizes.getDimSize(d) || - sizes.getDimNode(d) != rhs.sizes.getDimNode(d)) - return false; - } - - return true; - } - - bool operator==(const TArraySizes& rhs) { return sizes == rhs.sizes; } - bool operator!=(const TArraySizes& rhs) { return sizes != rhs.sizes; } - -protected: - TSmallArrayVector sizes; - - TArraySizes(const TArraySizes&); - - // for tracking maximum referenced index, before an explicit size is given - // applies only to the outer-most dimension - int implicitArraySize; -}; - -} // end namespace glslang - -#endif // _ARRAYS_INCLUDED_ diff --git a/deps/glslang/glslang/Include/intermediate.h b/deps/glslang/glslang/Include/intermediate.h deleted file mode 100644 index 6149b19b..00000000 --- a/deps/glslang/glslang/Include/intermediate.h +++ /dev/null @@ -1,1453 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Definition of the in-memory high-level intermediate representation -// of shaders. This is a tree that parser creates. -// -// Nodes in the tree are defined as a hierarchy of classes derived from -// TIntermNode. Each is a node in a tree. There is no preset branching factor; -// each node can have it's own type of list of children. -// - -#ifndef __INTERMEDIATE_H -#define __INTERMEDIATE_H - -#if _MSC_VER >= 1900 - #pragma warning(disable : 4464) // relative include path contains '..' - #pragma warning(disable : 5026) // 'glslang::TIntermUnary': move constructor was implicitly defined as deleted -#endif - -#include "../Include/Common.h" -#include "../Include/Types.h" -#include "../Include/ConstantUnion.h" - -namespace glslang { - -class TIntermediate; - -// -// Operators used by the high-level (parse tree) representation. -// -enum TOperator { - EOpNull, // if in a node, should only mean a node is still being built - EOpSequence, // denotes a list of statements, or parameters, etc. - EOpLinkerObjects, // for aggregate node of objects the linker may need, if not reference by the rest of the AST - EOpFunctionCall, - EOpFunction, // For function definition - EOpParameters, // an aggregate listing the parameters to a function - - // - // Unary operators - // - - EOpNegative, - EOpLogicalNot, - EOpVectorLogicalNot, - EOpBitwiseNot, - - EOpPostIncrement, - EOpPostDecrement, - EOpPreIncrement, - EOpPreDecrement, - - EOpConvIntToBool, - EOpConvUintToBool, - EOpConvFloatToBool, - EOpConvDoubleToBool, - EOpConvInt64ToBool, - EOpConvUint64ToBool, - EOpConvBoolToFloat, - EOpConvIntToFloat, - EOpConvUintToFloat, - EOpConvDoubleToFloat, - EOpConvInt64ToFloat, - EOpConvUint64ToFloat, - EOpConvUintToInt, - EOpConvFloatToInt, - EOpConvBoolToInt, - EOpConvDoubleToInt, - EOpConvInt64ToInt, - EOpConvUint64ToInt, - EOpConvIntToUint, - EOpConvFloatToUint, - EOpConvBoolToUint, - EOpConvDoubleToUint, - EOpConvInt64ToUint, - EOpConvUint64ToUint, - EOpConvIntToDouble, - EOpConvUintToDouble, - EOpConvFloatToDouble, - EOpConvBoolToDouble, - EOpConvInt64ToDouble, - EOpConvUint64ToDouble, - EOpConvBoolToInt64, - EOpConvIntToInt64, - EOpConvUintToInt64, - EOpConvFloatToInt64, - EOpConvDoubleToInt64, - EOpConvUint64ToInt64, - EOpConvBoolToUint64, - EOpConvIntToUint64, - EOpConvUintToUint64, - EOpConvFloatToUint64, - EOpConvDoubleToUint64, - EOpConvInt64ToUint64, -#ifdef AMD_EXTENSIONS - EOpConvBoolToFloat16, - EOpConvIntToFloat16, - EOpConvUintToFloat16, - EOpConvFloatToFloat16, - EOpConvDoubleToFloat16, - EOpConvInt64ToFloat16, - EOpConvUint64ToFloat16, - EOpConvFloat16ToBool, - EOpConvFloat16ToInt, - EOpConvFloat16ToUint, - EOpConvFloat16ToFloat, - EOpConvFloat16ToDouble, - EOpConvFloat16ToInt64, - EOpConvFloat16ToUint64, - - EOpConvBoolToInt16, - EOpConvIntToInt16, - EOpConvUintToInt16, - EOpConvFloatToInt16, - EOpConvDoubleToInt16, - EOpConvFloat16ToInt16, - EOpConvInt64ToInt16, - EOpConvUint64ToInt16, - EOpConvUint16ToInt16, - EOpConvInt16ToBool, - EOpConvInt16ToInt, - EOpConvInt16ToUint, - EOpConvInt16ToFloat, - EOpConvInt16ToDouble, - EOpConvInt16ToFloat16, - EOpConvInt16ToInt64, - EOpConvInt16ToUint64, - - EOpConvBoolToUint16, - EOpConvIntToUint16, - EOpConvUintToUint16, - EOpConvFloatToUint16, - EOpConvDoubleToUint16, - EOpConvFloat16ToUint16, - EOpConvInt64ToUint16, - EOpConvUint64ToUint16, - EOpConvInt16ToUint16, - EOpConvUint16ToBool, - EOpConvUint16ToInt, - EOpConvUint16ToUint, - EOpConvUint16ToFloat, - EOpConvUint16ToDouble, - EOpConvUint16ToFloat16, - EOpConvUint16ToInt64, - EOpConvUint16ToUint64, -#endif - - // - // binary operations - // - - EOpAdd, - EOpSub, - EOpMul, - EOpDiv, - EOpMod, - EOpRightShift, - EOpLeftShift, - EOpAnd, - EOpInclusiveOr, - EOpExclusiveOr, - EOpEqual, - EOpNotEqual, - EOpVectorEqual, - EOpVectorNotEqual, - EOpLessThan, - EOpGreaterThan, - EOpLessThanEqual, - EOpGreaterThanEqual, - EOpComma, - - EOpVectorTimesScalar, - EOpVectorTimesMatrix, - EOpMatrixTimesVector, - EOpMatrixTimesScalar, - - EOpLogicalOr, - EOpLogicalXor, - EOpLogicalAnd, - - EOpIndexDirect, - EOpIndexIndirect, - EOpIndexDirectStruct, - - EOpVectorSwizzle, - - EOpMethod, - EOpScoping, - - // - // Built-in functions mapped to operators - // - - EOpRadians, - EOpDegrees, - EOpSin, - EOpCos, - EOpTan, - EOpAsin, - EOpAcos, - EOpAtan, - EOpSinh, - EOpCosh, - EOpTanh, - EOpAsinh, - EOpAcosh, - EOpAtanh, - - EOpPow, - EOpExp, - EOpLog, - EOpExp2, - EOpLog2, - EOpSqrt, - EOpInverseSqrt, - - EOpAbs, - EOpSign, - EOpFloor, - EOpTrunc, - EOpRound, - EOpRoundEven, - EOpCeil, - EOpFract, - EOpModf, - EOpMin, - EOpMax, - EOpClamp, - EOpMix, - EOpStep, - EOpSmoothStep, - - EOpIsNan, - EOpIsInf, - - EOpFma, - - EOpFrexp, - EOpLdexp, - - EOpFloatBitsToInt, - EOpFloatBitsToUint, - EOpIntBitsToFloat, - EOpUintBitsToFloat, - EOpDoubleBitsToInt64, - EOpDoubleBitsToUint64, - EOpInt64BitsToDouble, - EOpUint64BitsToDouble, -#ifdef AMD_EXTENSIONS - EOpFloat16BitsToInt16, - EOpFloat16BitsToUint16, - EOpInt16BitsToFloat16, - EOpUint16BitsToFloat16, -#endif - EOpPackSnorm2x16, - EOpUnpackSnorm2x16, - EOpPackUnorm2x16, - EOpUnpackUnorm2x16, - EOpPackSnorm4x8, - EOpUnpackSnorm4x8, - EOpPackUnorm4x8, - EOpUnpackUnorm4x8, - EOpPackHalf2x16, - EOpUnpackHalf2x16, - EOpPackDouble2x32, - EOpUnpackDouble2x32, - EOpPackInt2x32, - EOpUnpackInt2x32, - EOpPackUint2x32, - EOpUnpackUint2x32, -#ifdef AMD_EXTENSIONS - EOpPackFloat2x16, - EOpUnpackFloat2x16, - EOpPackInt2x16, - EOpUnpackInt2x16, - EOpPackUint2x16, - EOpUnpackUint2x16, - EOpPackInt4x16, - EOpUnpackInt4x16, - EOpPackUint4x16, - EOpUnpackUint4x16, -#endif - - EOpLength, - EOpDistance, - EOpDot, - EOpCross, - EOpNormalize, - EOpFaceForward, - EOpReflect, - EOpRefract, - -#ifdef AMD_EXTENSIONS - EOpMin3, - EOpMax3, - EOpMid3, -#endif - - EOpDPdx, // Fragment only - EOpDPdy, // Fragment only - EOpFwidth, // Fragment only - EOpDPdxFine, // Fragment only - EOpDPdyFine, // Fragment only - EOpFwidthFine, // Fragment only - EOpDPdxCoarse, // Fragment only - EOpDPdyCoarse, // Fragment only - EOpFwidthCoarse, // Fragment only - - EOpInterpolateAtCentroid, // Fragment only - EOpInterpolateAtSample, // Fragment only - EOpInterpolateAtOffset, // Fragment only - -#ifdef AMD_EXTENSIONS - EOpInterpolateAtVertex, -#endif - - EOpMatrixTimesMatrix, - EOpOuterProduct, - EOpDeterminant, - EOpMatrixInverse, - EOpTranspose, - - EOpFtransform, - - EOpNoise, - - EOpEmitVertex, // geometry only - EOpEndPrimitive, // geometry only - EOpEmitStreamVertex, // geometry only - EOpEndStreamPrimitive, // geometry only - - EOpBarrier, - EOpMemoryBarrier, - EOpMemoryBarrierAtomicCounter, - EOpMemoryBarrierBuffer, - EOpMemoryBarrierImage, - EOpMemoryBarrierShared, // compute only - EOpGroupMemoryBarrier, // compute only - - EOpBallot, - EOpReadInvocation, - EOpReadFirstInvocation, - - EOpAnyInvocation, - EOpAllInvocations, - EOpAllInvocationsEqual, - -#ifdef AMD_EXTENSIONS - EOpMinInvocations, - EOpMaxInvocations, - EOpAddInvocations, - EOpMinInvocationsNonUniform, - EOpMaxInvocationsNonUniform, - EOpAddInvocationsNonUniform, - EOpMinInvocationsInclusiveScan, - EOpMaxInvocationsInclusiveScan, - EOpAddInvocationsInclusiveScan, - EOpMinInvocationsInclusiveScanNonUniform, - EOpMaxInvocationsInclusiveScanNonUniform, - EOpAddInvocationsInclusiveScanNonUniform, - EOpMinInvocationsExclusiveScan, - EOpMaxInvocationsExclusiveScan, - EOpAddInvocationsExclusiveScan, - EOpMinInvocationsExclusiveScanNonUniform, - EOpMaxInvocationsExclusiveScanNonUniform, - EOpAddInvocationsExclusiveScanNonUniform, - EOpSwizzleInvocations, - EOpSwizzleInvocationsMasked, - EOpWriteInvocation, - EOpMbcnt, - - EOpCubeFaceIndex, - EOpCubeFaceCoord, - EOpTime, -#endif - - EOpAtomicAdd, - EOpAtomicMin, - EOpAtomicMax, - EOpAtomicAnd, - EOpAtomicOr, - EOpAtomicXor, - EOpAtomicExchange, - EOpAtomicCompSwap, - - EOpAtomicCounterIncrement, - EOpAtomicCounterDecrement, - EOpAtomicCounter, - EOpAtomicCounterAdd, - EOpAtomicCounterSubtract, - EOpAtomicCounterMin, - EOpAtomicCounterMax, - EOpAtomicCounterAnd, - EOpAtomicCounterOr, - EOpAtomicCounterXor, - EOpAtomicCounterExchange, - EOpAtomicCounterCompSwap, - - EOpAny, - EOpAll, - - // - // Branch - // - - EOpKill, // Fragment only - EOpReturn, - EOpBreak, - EOpContinue, - EOpCase, - EOpDefault, - - // - // Constructors - // - - EOpConstructGuardStart, - EOpConstructInt, // these first scalar forms also identify what implicit conversion is needed - EOpConstructUint, - EOpConstructInt64, - EOpConstructUint64, -#ifdef AMD_EXTENSIONS - EOpConstructInt16, - EOpConstructUint16, -#endif - EOpConstructBool, - EOpConstructFloat, - EOpConstructDouble, -#ifdef AMD_EXTENSIONS - EOpConstructFloat16, -#endif - EOpConstructVec2, - EOpConstructVec3, - EOpConstructVec4, - EOpConstructDVec2, - EOpConstructDVec3, - EOpConstructDVec4, -#ifdef AMD_EXTENSIONS - EOpConstructF16Vec2, - EOpConstructF16Vec3, - EOpConstructF16Vec4, -#endif - EOpConstructBVec2, - EOpConstructBVec3, - EOpConstructBVec4, - EOpConstructIVec2, - EOpConstructIVec3, - EOpConstructIVec4, - EOpConstructUVec2, - EOpConstructUVec3, - EOpConstructUVec4, - EOpConstructI64Vec2, - EOpConstructI64Vec3, - EOpConstructI64Vec4, - EOpConstructU64Vec2, - EOpConstructU64Vec3, - EOpConstructU64Vec4, -#ifdef AMD_EXTENSIONS - EOpConstructI16Vec2, - EOpConstructI16Vec3, - EOpConstructI16Vec4, - EOpConstructU16Vec2, - EOpConstructU16Vec3, - EOpConstructU16Vec4, -#endif - EOpConstructMat2x2, - EOpConstructMat2x3, - EOpConstructMat2x4, - EOpConstructMat3x2, - EOpConstructMat3x3, - EOpConstructMat3x4, - EOpConstructMat4x2, - EOpConstructMat4x3, - EOpConstructMat4x4, - EOpConstructDMat2x2, - EOpConstructDMat2x3, - EOpConstructDMat2x4, - EOpConstructDMat3x2, - EOpConstructDMat3x3, - EOpConstructDMat3x4, - EOpConstructDMat4x2, - EOpConstructDMat4x3, - EOpConstructDMat4x4, - EOpConstructIMat2x2, - EOpConstructIMat2x3, - EOpConstructIMat2x4, - EOpConstructIMat3x2, - EOpConstructIMat3x3, - EOpConstructIMat3x4, - EOpConstructIMat4x2, - EOpConstructIMat4x3, - EOpConstructIMat4x4, - EOpConstructUMat2x2, - EOpConstructUMat2x3, - EOpConstructUMat2x4, - EOpConstructUMat3x2, - EOpConstructUMat3x3, - EOpConstructUMat3x4, - EOpConstructUMat4x2, - EOpConstructUMat4x3, - EOpConstructUMat4x4, - EOpConstructBMat2x2, - EOpConstructBMat2x3, - EOpConstructBMat2x4, - EOpConstructBMat3x2, - EOpConstructBMat3x3, - EOpConstructBMat3x4, - EOpConstructBMat4x2, - EOpConstructBMat4x3, - EOpConstructBMat4x4, -#ifdef AMD_EXTENSIONS - EOpConstructF16Mat2x2, - EOpConstructF16Mat2x3, - EOpConstructF16Mat2x4, - EOpConstructF16Mat3x2, - EOpConstructF16Mat3x3, - EOpConstructF16Mat3x4, - EOpConstructF16Mat4x2, - EOpConstructF16Mat4x3, - EOpConstructF16Mat4x4, -#endif - EOpConstructStruct, - EOpConstructTextureSampler, - EOpConstructGuardEnd, - - // - // moves - // - - EOpAssign, - EOpAddAssign, - EOpSubAssign, - EOpMulAssign, - EOpVectorTimesMatrixAssign, - EOpVectorTimesScalarAssign, - EOpMatrixTimesScalarAssign, - EOpMatrixTimesMatrixAssign, - EOpDivAssign, - EOpModAssign, - EOpAndAssign, - EOpInclusiveOrAssign, - EOpExclusiveOrAssign, - EOpLeftShiftAssign, - EOpRightShiftAssign, - - // - // Array operators - // - - EOpArrayLength, // "Array" distinguishes from length(v) built-in function, but it applies to vectors and matrices as well. - - // - // Image operations - // - - EOpImageGuardBegin, - - EOpImageQuerySize, - EOpImageQuerySamples, - EOpImageLoad, - EOpImageStore, -#ifdef AMD_EXTENSIONS - EOpImageLoadLod, - EOpImageStoreLod, -#endif - EOpImageAtomicAdd, - EOpImageAtomicMin, - EOpImageAtomicMax, - EOpImageAtomicAnd, - EOpImageAtomicOr, - EOpImageAtomicXor, - EOpImageAtomicExchange, - EOpImageAtomicCompSwap, - - EOpSubpassLoad, - EOpSubpassLoadMS, - EOpSparseImageLoad, -#ifdef AMD_EXTENSIONS - EOpSparseImageLoadLod, -#endif - - EOpImageGuardEnd, - - // - // Texture operations - // - - EOpTextureGuardBegin, - - EOpTextureQuerySize, - EOpTextureQueryLod, - EOpTextureQueryLevels, - EOpTextureQuerySamples, - - EOpSamplingGuardBegin, - - EOpTexture, - EOpTextureProj, - EOpTextureLod, - EOpTextureOffset, - EOpTextureFetch, - EOpTextureFetchOffset, - EOpTextureProjOffset, - EOpTextureLodOffset, - EOpTextureProjLod, - EOpTextureProjLodOffset, - EOpTextureGrad, - EOpTextureGradOffset, - EOpTextureProjGrad, - EOpTextureProjGradOffset, - EOpTextureGather, - EOpTextureGatherOffset, - EOpTextureGatherOffsets, - EOpTextureClamp, - EOpTextureOffsetClamp, - EOpTextureGradClamp, - EOpTextureGradOffsetClamp, -#ifdef AMD_EXTENSIONS - EOpTextureGatherLod, - EOpTextureGatherLodOffset, - EOpTextureGatherLodOffsets, -#endif - - EOpSparseTextureGuardBegin, - - EOpSparseTexture, - EOpSparseTextureLod, - EOpSparseTextureOffset, - EOpSparseTextureFetch, - EOpSparseTextureFetchOffset, - EOpSparseTextureLodOffset, - EOpSparseTextureGrad, - EOpSparseTextureGradOffset, - EOpSparseTextureGather, - EOpSparseTextureGatherOffset, - EOpSparseTextureGatherOffsets, - EOpSparseTexelsResident, - EOpSparseTextureClamp, - EOpSparseTextureOffsetClamp, - EOpSparseTextureGradClamp, - EOpSparseTextureGradOffsetClamp, -#ifdef AMD_EXTENSIONS - EOpSparseTextureGatherLod, - EOpSparseTextureGatherLodOffset, - EOpSparseTextureGatherLodOffsets, -#endif - - EOpSparseTextureGuardEnd, - EOpSamplingGuardEnd, - EOpTextureGuardEnd, - - // - // Integer operations - // - - EOpAddCarry, - EOpSubBorrow, - EOpUMulExtended, - EOpIMulExtended, - EOpBitfieldExtract, - EOpBitfieldInsert, - EOpBitFieldReverse, - EOpBitCount, - EOpFindLSB, - EOpFindMSB, - - // - // HLSL operations - // - - EOpClip, // discard if input value < 0 - EOpIsFinite, - EOpLog10, // base 10 log - EOpRcp, // 1/x - EOpSaturate, // clamp from 0 to 1 - EOpSinCos, // sin and cos in out parameters - EOpGenMul, // mul(x,y) on any of mat/vec/scalars - EOpDst, // x = 1, y=src0.y * src1.y, z=src0.z, w=src1.w - EOpInterlockedAdd, // atomic ops, but uses [optional] out arg instead of return - EOpInterlockedAnd, // ... - EOpInterlockedCompareExchange, // ... - EOpInterlockedCompareStore, // ... - EOpInterlockedExchange, // ... - EOpInterlockedMax, // ... - EOpInterlockedMin, // ... - EOpInterlockedOr, // ... - EOpInterlockedXor, // ... - EOpAllMemoryBarrierWithGroupSync, // memory barriers without non-hlsl AST equivalents - EOpGroupMemoryBarrierWithGroupSync, // ... - EOpWorkgroupMemoryBarrier, // ... - EOpWorkgroupMemoryBarrierWithGroupSync, // ... - EOpEvaluateAttributeSnapped, // InterpolateAtOffset with int position on 16x16 grid - EOpF32tof16, // HLSL conversion: half of a PackHalf2x16 - EOpF16tof32, // HLSL conversion: half of an UnpackHalf2x16 - EOpLit, // HLSL lighting coefficient vector - EOpTextureBias, // HLSL texture bias: will be lowered to EOpTexture - EOpAsDouble, // slightly different from EOpUint64BitsToDouble - EOpD3DCOLORtoUBYTE4, // convert and swizzle 4-component color to UBYTE4 range - - EOpMethodSample, // Texture object methods. These are translated to existing - EOpMethodSampleBias, // AST methods, and exist to represent HLSL semantics until that - EOpMethodSampleCmp, // translation is performed. See HlslParseContext::decomposeSampleMethods(). - EOpMethodSampleCmpLevelZero, // ... - EOpMethodSampleGrad, // ... - EOpMethodSampleLevel, // ... - EOpMethodLoad, // ... - EOpMethodGetDimensions, // ... - EOpMethodGetSamplePosition, // ... - EOpMethodGather, // ... - EOpMethodCalculateLevelOfDetail, // ... - EOpMethodCalculateLevelOfDetailUnclamped, // ... - - // Load already defined above for textures - EOpMethodLoad2, // Structure buffer object methods. These are translated to existing - EOpMethodLoad3, // AST methods, and exist to represent HLSL semantics until that - EOpMethodLoad4, // translation is performed. See HlslParseContext::decomposeSampleMethods(). - EOpMethodStore, // ... - EOpMethodStore2, // ... - EOpMethodStore3, // ... - EOpMethodStore4, // ... - EOpMethodIncrementCounter, // ... - EOpMethodDecrementCounter, // ... - // EOpMethodAppend is defined for geo shaders below - EOpMethodConsume, - - // SM5 texture methods - EOpMethodGatherRed, // These are covered under the above EOpMethodSample comment about - EOpMethodGatherGreen, // translation to existing AST opcodes. They exist temporarily - EOpMethodGatherBlue, // because HLSL arguments are slightly different. - EOpMethodGatherAlpha, // ... - EOpMethodGatherCmp, // ... - EOpMethodGatherCmpRed, // ... - EOpMethodGatherCmpGreen, // ... - EOpMethodGatherCmpBlue, // ... - EOpMethodGatherCmpAlpha, // ... - - // geometry methods - EOpMethodAppend, // Geometry shader methods - EOpMethodRestartStrip, // ... - - // matrix - EOpMatrixSwizzle, // select multiple matrix components (non-column) -}; - -class TIntermTraverser; -class TIntermOperator; -class TIntermAggregate; -class TIntermUnary; -class TIntermBinary; -class TIntermConstantUnion; -class TIntermSelection; -class TIntermSwitch; -class TIntermBranch; -class TIntermTyped; -class TIntermMethod; -class TIntermSymbol; - -} // end namespace glslang - -// -// Base class for the tree nodes -// -// (Put outside the glslang namespace, as it's used as part of the external interface.) -// -class TIntermNode { -public: - POOL_ALLOCATOR_NEW_DELETE(glslang::GetThreadPoolAllocator()) - - TIntermNode() { loc.init(); } - virtual const glslang::TSourceLoc& getLoc() const { return loc; } - virtual void setLoc(const glslang::TSourceLoc& l) { loc = l; } - virtual void traverse(glslang::TIntermTraverser*) = 0; - virtual glslang::TIntermTyped* getAsTyped() { return 0; } - virtual glslang::TIntermOperator* getAsOperator() { return 0; } - virtual glslang::TIntermConstantUnion* getAsConstantUnion() { return 0; } - virtual glslang::TIntermAggregate* getAsAggregate() { return 0; } - virtual glslang::TIntermUnary* getAsUnaryNode() { return 0; } - virtual glslang::TIntermBinary* getAsBinaryNode() { return 0; } - virtual glslang::TIntermSelection* getAsSelectionNode() { return 0; } - virtual glslang::TIntermSwitch* getAsSwitchNode() { return 0; } - virtual glslang::TIntermMethod* getAsMethodNode() { return 0; } - virtual glslang::TIntermSymbol* getAsSymbolNode() { return 0; } - virtual glslang::TIntermBranch* getAsBranchNode() { return 0; } - - virtual const glslang::TIntermTyped* getAsTyped() const { return 0; } - virtual const glslang::TIntermOperator* getAsOperator() const { return 0; } - virtual const glslang::TIntermConstantUnion* getAsConstantUnion() const { return 0; } - virtual const glslang::TIntermAggregate* getAsAggregate() const { return 0; } - virtual const glslang::TIntermUnary* getAsUnaryNode() const { return 0; } - virtual const glslang::TIntermBinary* getAsBinaryNode() const { return 0; } - virtual const glslang::TIntermSelection* getAsSelectionNode() const { return 0; } - virtual const glslang::TIntermSwitch* getAsSwitchNode() const { return 0; } - virtual const glslang::TIntermMethod* getAsMethodNode() const { return 0; } - virtual const glslang::TIntermSymbol* getAsSymbolNode() const { return 0; } - virtual const glslang::TIntermBranch* getAsBranchNode() const { return 0; } - virtual ~TIntermNode() { } - -protected: - TIntermNode(const TIntermNode&); - TIntermNode& operator=(const TIntermNode&); - glslang::TSourceLoc loc; -}; - -namespace glslang { - -// -// This is just to help yacc. -// -struct TIntermNodePair { - TIntermNode* node1; - TIntermNode* node2; -}; - -// -// Intermediate class for nodes that have a type. -// -class TIntermTyped : public TIntermNode { -public: - TIntermTyped(const TType& t) { type.shallowCopy(t); } - TIntermTyped(TBasicType basicType) { TType bt(basicType); type.shallowCopy(bt); } - virtual TIntermTyped* getAsTyped() { return this; } - virtual const TIntermTyped* getAsTyped() const { return this; } - virtual void setType(const TType& t) { type.shallowCopy(t); } - virtual const TType& getType() const { return type; } - virtual TType& getWritableType() { return type; } - - virtual TBasicType getBasicType() const { return type.getBasicType(); } - virtual TQualifier& getQualifier() { return type.getQualifier(); } - virtual const TQualifier& getQualifier() const { return type.getQualifier(); } - virtual void propagatePrecision(TPrecisionQualifier); - virtual int getVectorSize() const { return type.getVectorSize(); } - virtual int getMatrixCols() const { return type.getMatrixCols(); } - virtual int getMatrixRows() const { return type.getMatrixRows(); } - virtual bool isMatrix() const { return type.isMatrix(); } - virtual bool isArray() const { return type.isArray(); } - virtual bool isVector() const { return type.isVector(); } - virtual bool isScalar() const { return type.isScalar(); } - virtual bool isStruct() const { return type.isStruct(); } - TString getCompleteString() const { return type.getCompleteString(); } - -protected: - TIntermTyped& operator=(const TIntermTyped&); - TType type; -}; - -// -// Selection control hints -// -enum TSelectionControl { - ESelectionControlNone, - ESelectionControlFlatten, - ESelectionControlDontFlatten, -}; - -// -// Loop control hints -// -enum TLoopControl { - ELoopControlNone, - ELoopControlUnroll, - ELoopControlDontUnroll, -}; - -// -// Handle for, do-while, and while loops. -// -class TIntermLoop : public TIntermNode { -public: - TIntermLoop(TIntermNode* aBody, TIntermTyped* aTest, TIntermTyped* aTerminal, bool testFirst) : - body(aBody), - test(aTest), - terminal(aTerminal), - first(testFirst), - control(ELoopControlNone) - { } - - virtual void traverse(TIntermTraverser*); - TIntermNode* getBody() const { return body; } - TIntermTyped* getTest() const { return test; } - TIntermTyped* getTerminal() const { return terminal; } - bool testFirst() const { return first; } - - void setLoopControl(TLoopControl c) { control = c; } - TLoopControl getLoopControl() const { return control; } - -protected: - TIntermNode* body; // code to loop over - TIntermTyped* test; // exit condition associated with loop, could be 0 for 'for' loops - TIntermTyped* terminal; // exists for for-loops - bool first; // true for while and for, not for do-while - TLoopControl control; // loop control hint -}; - -// -// Handle case, break, continue, return, and kill. -// -class TIntermBranch : public TIntermNode { -public: - TIntermBranch(TOperator op, TIntermTyped* e) : - flowOp(op), - expression(e) { } - virtual TIntermBranch* getAsBranchNode() { return this; } - virtual const TIntermBranch* getAsBranchNode() const { return this; } - virtual void traverse(TIntermTraverser*); - TOperator getFlowOp() const { return flowOp; } - TIntermTyped* getExpression() const { return expression; } -protected: - TOperator flowOp; - TIntermTyped* expression; -}; - -// -// Represent method names before seeing their calling signature -// or resolving them to operations. Just an expression as the base object -// and a textural name. -// -class TIntermMethod : public TIntermTyped { -public: - TIntermMethod(TIntermTyped* o, const TType& t, const TString& m) : TIntermTyped(t), object(o), method(m) { } - virtual TIntermMethod* getAsMethodNode() { return this; } - virtual const TIntermMethod* getAsMethodNode() const { return this; } - virtual const TString& getMethodName() const { return method; } - virtual TIntermTyped* getObject() const { return object; } - virtual void traverse(TIntermTraverser*); -protected: - TIntermTyped* object; - TString method; -}; - -// -// Nodes that correspond to symbols or constants in the source code. -// -class TIntermSymbol : public TIntermTyped { -public: - // if symbol is initialized as symbol(sym), the memory comes from the pool allocator of sym. If sym comes from - // per process threadPoolAllocator, then it causes increased memory usage per compile - // it is essential to use "symbol = sym" to assign to symbol - TIntermSymbol(int i, const TString& n, const TType& t) - : TIntermTyped(t), id(i), -#ifdef ENABLE_HLSL - flattenSubset(-1), -#endif - constSubtree(nullptr) - { name = n; } - virtual int getId() const { return id; } - virtual const TString& getName() const { return name; } - virtual void traverse(TIntermTraverser*); - virtual TIntermSymbol* getAsSymbolNode() { return this; } - virtual const TIntermSymbol* getAsSymbolNode() const { return this; } - void setConstArray(const TConstUnionArray& c) { constArray = c; } - const TConstUnionArray& getConstArray() const { return constArray; } - void setConstSubtree(TIntermTyped* subtree) { constSubtree = subtree; } - TIntermTyped* getConstSubtree() const { return constSubtree; } -#ifdef ENABLE_HLSL - void setFlattenSubset(int subset) { flattenSubset = subset; } - int getFlattenSubset() const { return flattenSubset; } // -1 means full object -#endif - -protected: - int id; // the unique id of the symbol this node represents -#ifdef ENABLE_HLSL - int flattenSubset; // how deeply the flattened object rooted at id has been dereferenced -#endif - TString name; // the name of the symbol this node represents - TConstUnionArray constArray; // if the symbol is a front-end compile-time constant, this is its value - TIntermTyped* constSubtree; -}; - -class TIntermConstantUnion : public TIntermTyped { -public: - TIntermConstantUnion(const TConstUnionArray& ua, const TType& t) : TIntermTyped(t), constArray(ua), literal(false) { } - const TConstUnionArray& getConstArray() const { return constArray; } - virtual TIntermConstantUnion* getAsConstantUnion() { return this; } - virtual const TIntermConstantUnion* getAsConstantUnion() const { return this; } - virtual void traverse(TIntermTraverser*); - virtual TIntermTyped* fold(TOperator, const TIntermTyped*) const; - virtual TIntermTyped* fold(TOperator, const TType&) const; - void setLiteral() { literal = true; } - void setExpression() { literal = false; } - bool isLiteral() const { return literal; } - -protected: - TIntermConstantUnion& operator=(const TIntermConstantUnion&); - - const TConstUnionArray constArray; - bool literal; // true if node represents a literal in the source code -}; - -// Represent the independent aspects of a texturing TOperator -struct TCrackedTextureOp { - bool query; - bool proj; - bool lod; - bool fetch; - bool offset; - bool offsets; - bool gather; - bool grad; - bool subpass; - bool lodClamp; -}; - -// -// Intermediate class for node types that hold operators. -// -class TIntermOperator : public TIntermTyped { -public: - virtual TIntermOperator* getAsOperator() { return this; } - virtual const TIntermOperator* getAsOperator() const { return this; } - TOperator getOp() const { return op; } - void setOp(TOperator newOp) { op = newOp; } - bool modifiesState() const; - bool isConstructor() const; - bool isTexture() const { return op > EOpTextureGuardBegin && op < EOpTextureGuardEnd; } - bool isSampling() const { return op > EOpSamplingGuardBegin && op < EOpSamplingGuardEnd; } - bool isImage() const { return op > EOpImageGuardBegin && op < EOpImageGuardEnd; } - bool isSparseTexture() const { return op > EOpSparseTextureGuardBegin && op < EOpSparseTextureGuardEnd; } - bool isSparseImage() const { return op == EOpSparseImageLoad; } - - void setOperationPrecision(TPrecisionQualifier p) { operationPrecision = p; } - TPrecisionQualifier getOperationPrecision() const { return operationPrecision != EpqNone ? - operationPrecision : - type.getQualifier().precision; } - TString getCompleteString() const - { - TString cs = type.getCompleteString(); - if (getOperationPrecision() != type.getQualifier().precision) { - cs += ", operation at "; - cs += GetPrecisionQualifierString(getOperationPrecision()); - } - - return cs; - } - - // Crack the op into the individual dimensions of texturing operation. - void crackTexture(TSampler sampler, TCrackedTextureOp& cracked) const - { - cracked.query = false; - cracked.proj = false; - cracked.lod = false; - cracked.fetch = false; - cracked.offset = false; - cracked.offsets = false; - cracked.gather = false; - cracked.grad = false; - cracked.subpass = false; - cracked.lodClamp = false; - - switch (op) { - case EOpImageQuerySize: - case EOpImageQuerySamples: - case EOpTextureQuerySize: - case EOpTextureQueryLod: - case EOpTextureQueryLevels: - case EOpTextureQuerySamples: - case EOpSparseTexelsResident: - cracked.query = true; - break; - case EOpTexture: - case EOpSparseTexture: - break; - case EOpTextureClamp: - case EOpSparseTextureClamp: - cracked.lodClamp = true; - break; - case EOpTextureProj: - cracked.proj = true; - break; - case EOpTextureLod: - case EOpSparseTextureLod: - cracked.lod = true; - break; - case EOpTextureOffset: - case EOpSparseTextureOffset: - cracked.offset = true; - break; - case EOpTextureOffsetClamp: - case EOpSparseTextureOffsetClamp: - cracked.offset = true; - cracked.lodClamp = true; - break; - case EOpTextureFetch: - case EOpSparseTextureFetch: - cracked.fetch = true; - if (sampler.dim == Esd1D || (sampler.dim == Esd2D && ! sampler.ms) || sampler.dim == Esd3D) - cracked.lod = true; - break; - case EOpTextureFetchOffset: - case EOpSparseTextureFetchOffset: - cracked.fetch = true; - cracked.offset = true; - if (sampler.dim == Esd1D || (sampler.dim == Esd2D && ! sampler.ms) || sampler.dim == Esd3D) - cracked.lod = true; - break; - case EOpTextureProjOffset: - cracked.offset = true; - cracked.proj = true; - break; - case EOpTextureLodOffset: - case EOpSparseTextureLodOffset: - cracked.offset = true; - cracked.lod = true; - break; - case EOpTextureProjLod: - cracked.lod = true; - cracked.proj = true; - break; - case EOpTextureProjLodOffset: - cracked.offset = true; - cracked.lod = true; - cracked.proj = true; - break; - case EOpTextureGrad: - case EOpSparseTextureGrad: - cracked.grad = true; - break; - case EOpTextureGradClamp: - case EOpSparseTextureGradClamp: - cracked.grad = true; - cracked.lodClamp = true; - break; - case EOpTextureGradOffset: - case EOpSparseTextureGradOffset: - cracked.grad = true; - cracked.offset = true; - break; - case EOpTextureProjGrad: - cracked.grad = true; - cracked.proj = true; - break; - case EOpTextureProjGradOffset: - cracked.grad = true; - cracked.offset = true; - cracked.proj = true; - break; - case EOpTextureGradOffsetClamp: - case EOpSparseTextureGradOffsetClamp: - cracked.grad = true; - cracked.offset = true; - cracked.lodClamp = true; - break; - case EOpTextureGather: - case EOpSparseTextureGather: - cracked.gather = true; - break; - case EOpTextureGatherOffset: - case EOpSparseTextureGatherOffset: - cracked.gather = true; - cracked.offset = true; - break; - case EOpTextureGatherOffsets: - case EOpSparseTextureGatherOffsets: - cracked.gather = true; - cracked.offsets = true; - break; -#ifdef AMD_EXTENSIONS - case EOpTextureGatherLod: - case EOpSparseTextureGatherLod: - cracked.gather = true; - cracked.lod = true; - break; - case EOpTextureGatherLodOffset: - case EOpSparseTextureGatherLodOffset: - cracked.gather = true; - cracked.offset = true; - cracked.lod = true; - break; - case EOpTextureGatherLodOffsets: - case EOpSparseTextureGatherLodOffsets: - cracked.gather = true; - cracked.offsets = true; - cracked.lod = true; - break; - case EOpImageLoadLod: - case EOpImageStoreLod: - case EOpSparseImageLoadLod: - cracked.lod = true; - break; -#endif - case EOpSubpassLoad: - case EOpSubpassLoadMS: - cracked.subpass = true; - break; - default: - break; - } - } - -protected: - TIntermOperator(TOperator o) : TIntermTyped(EbtFloat), op(o), operationPrecision(EpqNone) {} - TIntermOperator(TOperator o, TType& t) : TIntermTyped(t), op(o), operationPrecision(EpqNone) {} - TOperator op; - // The result precision is in the inherited TType, and is usually meant to be both - // the operation precision and the result precision. However, some more complex things, - // like built-in function calls, distinguish between the two, in which case non-EqpNone - // 'operationPrecision' overrides the result precision as far as operation precision - // is concerned. - TPrecisionQualifier operationPrecision; -}; - -// -// Nodes for all the basic binary math operators. -// -class TIntermBinary : public TIntermOperator { -public: - TIntermBinary(TOperator o) : TIntermOperator(o) {} - virtual void traverse(TIntermTraverser*); - virtual void setLeft(TIntermTyped* n) { left = n; } - virtual void setRight(TIntermTyped* n) { right = n; } - virtual TIntermTyped* getLeft() const { return left; } - virtual TIntermTyped* getRight() const { return right; } - virtual TIntermBinary* getAsBinaryNode() { return this; } - virtual const TIntermBinary* getAsBinaryNode() const { return this; } - virtual void updatePrecision(); -protected: - TIntermTyped* left; - TIntermTyped* right; -}; - -// -// Nodes for unary math operators. -// -class TIntermUnary : public TIntermOperator { -public: - TIntermUnary(TOperator o, TType& t) : TIntermOperator(o, t), operand(0) {} - TIntermUnary(TOperator o) : TIntermOperator(o), operand(0) {} - virtual void traverse(TIntermTraverser*); - virtual void setOperand(TIntermTyped* o) { operand = o; } - virtual TIntermTyped* getOperand() { return operand; } - virtual const TIntermTyped* getOperand() const { return operand; } - virtual TIntermUnary* getAsUnaryNode() { return this; } - virtual const TIntermUnary* getAsUnaryNode() const { return this; } - virtual void updatePrecision(); -protected: - TIntermTyped* operand; -}; - -typedef TVector TIntermSequence; -typedef TVector TQualifierList; -// -// Nodes that operate on an arbitrary sized set of children. -// -class TIntermAggregate : public TIntermOperator { -public: - TIntermAggregate() : TIntermOperator(EOpNull), userDefined(false), pragmaTable(0) { } - TIntermAggregate(TOperator o) : TIntermOperator(o), pragmaTable(0) { } - ~TIntermAggregate() { delete pragmaTable; } - virtual TIntermAggregate* getAsAggregate() { return this; } - virtual const TIntermAggregate* getAsAggregate() const { return this; } - virtual void setOperator(TOperator o) { op = o; } - virtual TIntermSequence& getSequence() { return sequence; } - virtual const TIntermSequence& getSequence() const { return sequence; } - virtual void setName(const TString& n) { name = n; } - virtual const TString& getName() const { return name; } - virtual void traverse(TIntermTraverser*); - virtual void setUserDefined() { userDefined = true; } - virtual bool isUserDefined() { return userDefined; } - virtual TQualifierList& getQualifierList() { return qualifier; } - virtual const TQualifierList& getQualifierList() const { return qualifier; } - void setOptimize(bool o) { optimize = o; } - void setDebug(bool d) { debug = d; } - bool getOptimize() const { return optimize; } - bool getDebug() const { return debug; } - void addToPragmaTable(const TPragmaTable& pTable); - const TPragmaTable& getPragmaTable() const { return *pragmaTable; } -protected: - TIntermAggregate(const TIntermAggregate&); // disallow copy constructor - TIntermAggregate& operator=(const TIntermAggregate&); // disallow assignment operator - TIntermSequence sequence; - TQualifierList qualifier; - TString name; - bool userDefined; // used for user defined function names - bool optimize; - bool debug; - TPragmaTable* pragmaTable; -}; - -// -// For if tests. -// -class TIntermSelection : public TIntermTyped { -public: - TIntermSelection(TIntermTyped* cond, TIntermNode* trueB, TIntermNode* falseB) : - TIntermTyped(EbtVoid), condition(cond), trueBlock(trueB), falseBlock(falseB), control(ESelectionControlNone) {} - TIntermSelection(TIntermTyped* cond, TIntermNode* trueB, TIntermNode* falseB, const TType& type) : - TIntermTyped(type), condition(cond), trueBlock(trueB), falseBlock(falseB), control(ESelectionControlNone) {} - virtual void traverse(TIntermTraverser*); - virtual TIntermTyped* getCondition() const { return condition; } - virtual TIntermNode* getTrueBlock() const { return trueBlock; } - virtual TIntermNode* getFalseBlock() const { return falseBlock; } - virtual TIntermSelection* getAsSelectionNode() { return this; } - virtual const TIntermSelection* getAsSelectionNode() const { return this; } - void setSelectionControl(TSelectionControl c) { control = c; } - TSelectionControl getSelectionControl() const { return control; } -protected: - TIntermTyped* condition; - TIntermNode* trueBlock; - TIntermNode* falseBlock; - TSelectionControl control; // selection control hint -}; - -// -// For switch statements. Designed use is that a switch will have sequence of nodes -// that are either case/default nodes or a *single* node that represents all the code -// in between (if any) consecutive case/defaults. So, a traversal need only deal with -// 0 or 1 nodes per case/default statement. -// -class TIntermSwitch : public TIntermNode { -public: - TIntermSwitch(TIntermTyped* cond, TIntermAggregate* b) : condition(cond), body(b), control(ESelectionControlNone) { } - virtual void traverse(TIntermTraverser*); - virtual TIntermNode* getCondition() const { return condition; } - virtual TIntermAggregate* getBody() const { return body; } - virtual TIntermSwitch* getAsSwitchNode() { return this; } - virtual const TIntermSwitch* getAsSwitchNode() const { return this; } - void setSelectionControl(TSelectionControl c) { control = c; } - TSelectionControl getSelectionControl() const { return control; } -protected: - TIntermTyped* condition; - TIntermAggregate* body; - TSelectionControl control; // selection control hint -}; - -enum TVisit -{ - EvPreVisit, - EvInVisit, - EvPostVisit -}; - -// -// For traversing the tree. User should derive from this, -// put their traversal specific data in it, and then pass -// it to a Traverse method. -// -// When using this, just fill in the methods for nodes you want visited. -// Return false from a pre-visit to skip visiting that node's subtree. -// -// Explicitly set postVisit to true if you want post visiting, otherwise, -// filled in methods will only be called at pre-visit time (before processing -// the subtree). Similarly for inVisit for in-order visiting of nodes with -// multiple children. -// -// If you only want post-visits, explicitly turn off preVisit (and inVisit) -// and turn on postVisit. -// -// In general, for the visit*() methods, return true from interior nodes -// to have the traversal continue on to children. -// -// If you process children yourself, or don't want them processed, return false. -// -class TIntermTraverser { -public: - POOL_ALLOCATOR_NEW_DELETE(glslang::GetThreadPoolAllocator()) - TIntermTraverser(bool preVisit = true, bool inVisit = false, bool postVisit = false, bool rightToLeft = false) : - preVisit(preVisit), - inVisit(inVisit), - postVisit(postVisit), - rightToLeft(rightToLeft), - depth(0), - maxDepth(0) { } - virtual ~TIntermTraverser() { } - - virtual void visitSymbol(TIntermSymbol*) { } - virtual void visitConstantUnion(TIntermConstantUnion*) { } - virtual bool visitBinary(TVisit, TIntermBinary*) { return true; } - virtual bool visitUnary(TVisit, TIntermUnary*) { return true; } - virtual bool visitSelection(TVisit, TIntermSelection*) { return true; } - virtual bool visitAggregate(TVisit, TIntermAggregate*) { return true; } - virtual bool visitLoop(TVisit, TIntermLoop*) { return true; } - virtual bool visitBranch(TVisit, TIntermBranch*) { return true; } - virtual bool visitSwitch(TVisit, TIntermSwitch*) { return true; } - - int getMaxDepth() const { return maxDepth; } - - void incrementDepth(TIntermNode *current) - { - depth++; - maxDepth = (std::max)(maxDepth, depth); - path.push_back(current); - } - - void decrementDepth() - { - depth--; - path.pop_back(); - } - - TIntermNode *getParentNode() - { - return path.size() == 0 ? NULL : path.back(); - } - - const bool preVisit; - const bool inVisit; - const bool postVisit; - const bool rightToLeft; - -protected: - TIntermTraverser& operator=(TIntermTraverser&); - - int depth; - int maxDepth; - - // All the nodes from root to the current node's parent during traversing. - TVector path; -}; - -// KHR_vulkan_glsl says "Two arrays sized with specialization constants are the same type only if -// sized with the same symbol, involving no operations" -inline bool SameSpecializationConstants(TIntermTyped* node1, TIntermTyped* node2) -{ - return node1->getAsSymbolNode() && node2->getAsSymbolNode() && - node1->getAsSymbolNode()->getId() == node2->getAsSymbolNode()->getId(); -} - -} // end namespace glslang - -#endif // __INTERMEDIATE_H diff --git a/deps/glslang/glslang/Include/revision.h b/deps/glslang/glslang/Include/revision.h deleted file mode 100644 index 218f8b67..00000000 --- a/deps/glslang/glslang/Include/revision.h +++ /dev/null @@ -1,6 +0,0 @@ -// This header is generated by the make-revision script. -// For the version, it uses the latest git tag followed by the number of commits. -// For the date, it uses the current date (when then script is run). - -#define GLSLANG_REVISION "Overload400-PrecQual.2000" -#define GLSLANG_DATE "12-Apr-2017" diff --git a/deps/glslang/glslang/Include/revision.template b/deps/glslang/glslang/Include/revision.template deleted file mode 100644 index 4a16beeb..00000000 --- a/deps/glslang/glslang/Include/revision.template +++ /dev/null @@ -1,13 +0,0 @@ -// The file revision.h should be updated to the latest version, somehow, on -// check-in, if glslang has changed. -// -// revision.template is the source for revision.h when using SubWCRev as the -// method of updating revision.h. You don't have to do it this way, the -// requirement is only that revision.h gets updated. -// -// revision.h is under source control so that not all consumers of glslang -// source have to figure out how to create revision.h just to get a build -// going. However, if it is not updated, it can be a version behind. - -#define GLSLANG_REVISION "$WCREV$" -#define GLSLANG_DATE "$WCDATE$" diff --git a/deps/glslang/glslang/MachineIndependent/Constant.cpp b/deps/glslang/glslang/MachineIndependent/Constant.cpp deleted file mode 100644 index 625b8e94..00000000 --- a/deps/glslang/glslang/MachineIndependent/Constant.cpp +++ /dev/null @@ -1,1029 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "localintermediate.h" -#include -#include -#include - -namespace { - -using namespace glslang; - -typedef union { - double d; - int i[2]; -} DoubleIntUnion; - -// Some helper functions - -bool isNan(double x) -{ - DoubleIntUnion u; - // tough to find a platform independent library function, do it directly - u.d = x; - int bitPatternL = u.i[0]; - int bitPatternH = u.i[1]; - return (bitPatternH & 0x7ff80000) == 0x7ff80000 && - ((bitPatternH & 0xFFFFF) != 0 || bitPatternL != 0); -} - -bool isInf(double x) -{ - DoubleIntUnion u; - // tough to find a platform independent library function, do it directly - u.d = x; - int bitPatternL = u.i[0]; - int bitPatternH = u.i[1]; - return (bitPatternH & 0x7ff00000) == 0x7ff00000 && - (bitPatternH & 0xFFFFF) == 0 && bitPatternL == 0; -} - -const double pi = 3.1415926535897932384626433832795; - -} // end anonymous namespace - - -namespace glslang { - -// -// The fold functions see if an operation on a constant can be done in place, -// without generating run-time code. -// -// Returns the node to keep using, which may or may not be the node passed in. -// -// Note: As of version 1.2, all constant operations must be folded. It is -// not opportunistic, but rather a semantic requirement. -// - -// -// Do folding between a pair of nodes. -// 'this' is the left-hand operand and 'rightConstantNode' is the right-hand operand. -// -// Returns a new node representing the result. -// -TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TIntermTyped* rightConstantNode) const -{ - // For most cases, the return type matches the argument type, so set that - // up and just code to exceptions below. - TType returnType; - returnType.shallowCopy(getType()); - - // - // A pair of nodes is to be folded together - // - - const TIntermConstantUnion *rightNode = rightConstantNode->getAsConstantUnion(); - TConstUnionArray leftUnionArray = getConstArray(); - TConstUnionArray rightUnionArray = rightNode->getConstArray(); - - // Figure out the size of the result - int newComps; - int constComps; - switch(op) { - case EOpMatrixTimesMatrix: - newComps = rightNode->getMatrixCols() * getMatrixRows(); - break; - case EOpMatrixTimesVector: - newComps = getMatrixRows(); - break; - case EOpVectorTimesMatrix: - newComps = rightNode->getMatrixCols(); - break; - default: - newComps = getType().computeNumComponents(); - constComps = rightConstantNode->getType().computeNumComponents(); - if (constComps == 1 && newComps > 1) { - // for a case like vec4 f = vec4(2,3,4,5) + 1.2; - TConstUnionArray smearedArray(newComps, rightNode->getConstArray()[0]); - rightUnionArray = smearedArray; - } else if (constComps > 1 && newComps == 1) { - // for a case like vec4 f = 1.2 + vec4(2,3,4,5); - newComps = constComps; - rightUnionArray = rightNode->getConstArray(); - TConstUnionArray smearedArray(newComps, getConstArray()[0]); - leftUnionArray = smearedArray; - returnType.shallowCopy(rightNode->getType()); - } - break; - } - - TConstUnionArray newConstArray(newComps); - TType constBool(EbtBool, EvqConst); - - switch(op) { - case EOpAdd: - for (int i = 0; i < newComps; i++) - newConstArray[i] = leftUnionArray[i] + rightUnionArray[i]; - break; - case EOpSub: - for (int i = 0; i < newComps; i++) - newConstArray[i] = leftUnionArray[i] - rightUnionArray[i]; - break; - - case EOpMul: - case EOpVectorTimesScalar: - case EOpMatrixTimesScalar: - for (int i = 0; i < newComps; i++) - newConstArray[i] = leftUnionArray[i] * rightUnionArray[i]; - break; - case EOpMatrixTimesMatrix: - for (int row = 0; row < getMatrixRows(); row++) { - for (int column = 0; column < rightNode->getMatrixCols(); column++) { - double sum = 0.0f; - for (int i = 0; i < rightNode->getMatrixRows(); i++) - sum += leftUnionArray[i * getMatrixRows() + row].getDConst() * rightUnionArray[column * rightNode->getMatrixRows() + i].getDConst(); - newConstArray[column * getMatrixRows() + row].setDConst(sum); - } - } - returnType.shallowCopy(TType(getType().getBasicType(), EvqConst, 0, rightNode->getMatrixCols(), getMatrixRows())); - break; - case EOpDiv: - for (int i = 0; i < newComps; i++) { - switch (getType().getBasicType()) { - case EbtDouble: - case EbtFloat: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - newConstArray[i].setDConst(leftUnionArray[i].getDConst() / rightUnionArray[i].getDConst()); - break; - - case EbtInt: - if (rightUnionArray[i] == 0) - newConstArray[i].setIConst(0x7FFFFFFF); - else if (rightUnionArray[i].getIConst() == -1 && leftUnionArray[i].getIConst() == (int)0x80000000) - newConstArray[i].setIConst(0x80000000); - else - newConstArray[i].setIConst(leftUnionArray[i].getIConst() / rightUnionArray[i].getIConst()); - break; - - case EbtUint: - if (rightUnionArray[i] == 0) { - newConstArray[i].setUConst(0xFFFFFFFFu); - } else - newConstArray[i].setUConst(leftUnionArray[i].getUConst() / rightUnionArray[i].getUConst()); - break; - - case EbtInt64: - if (rightUnionArray[i] == 0) - newConstArray[i].setI64Const(0x7FFFFFFFFFFFFFFFll); - else if (rightUnionArray[i].getI64Const() == -1 && leftUnionArray[i].getI64Const() == (long long)0x8000000000000000) - newConstArray[i].setI64Const(0x8000000000000000); - else - newConstArray[i].setI64Const(leftUnionArray[i].getI64Const() / rightUnionArray[i].getI64Const()); - break; - - case EbtUint64: - if (rightUnionArray[i] == 0) { - newConstArray[i].setU64Const(0xFFFFFFFFFFFFFFFFull); - } else - newConstArray[i].setU64Const(leftUnionArray[i].getU64Const() / rightUnionArray[i].getU64Const()); - break; -#ifdef AMD_EXTENSIONS - case EbtInt16: - if (rightUnionArray[i] == 0) - newConstArray[i].setIConst(0x7FFF); - else if (rightUnionArray[i].getIConst() == -1 && leftUnionArray[i].getIConst() == (int)0x8000) - newConstArray[i].setIConst(0x8000); - else - newConstArray[i].setIConst(leftUnionArray[i].getIConst() / rightUnionArray[i].getIConst()); - break; - - case EbtUint16: - if (rightUnionArray[i] == 0) { - newConstArray[i].setUConst(0xFFFFu); - } else - newConstArray[i].setUConst(leftUnionArray[i].getUConst() / rightUnionArray[i].getUConst()); - break; -#endif - default: - return 0; - } - } - break; - - case EOpMatrixTimesVector: - for (int i = 0; i < getMatrixRows(); i++) { - double sum = 0.0f; - for (int j = 0; j < rightNode->getVectorSize(); j++) { - sum += leftUnionArray[j*getMatrixRows() + i].getDConst() * rightUnionArray[j].getDConst(); - } - newConstArray[i].setDConst(sum); - } - - returnType.shallowCopy(TType(getBasicType(), EvqConst, getMatrixRows())); - break; - - case EOpVectorTimesMatrix: - for (int i = 0; i < rightNode->getMatrixCols(); i++) { - double sum = 0.0f; - for (int j = 0; j < getVectorSize(); j++) - sum += leftUnionArray[j].getDConst() * rightUnionArray[i*rightNode->getMatrixRows() + j].getDConst(); - newConstArray[i].setDConst(sum); - } - - returnType.shallowCopy(TType(getBasicType(), EvqConst, rightNode->getMatrixCols())); - break; - - case EOpMod: - for (int i = 0; i < newComps; i++) { - if (rightUnionArray[i] == 0) - newConstArray[i] = leftUnionArray[i]; - else - newConstArray[i] = leftUnionArray[i] % rightUnionArray[i]; - } - break; - - case EOpRightShift: - for (int i = 0; i < newComps; i++) - newConstArray[i] = leftUnionArray[i] >> rightUnionArray[i]; - break; - - case EOpLeftShift: - for (int i = 0; i < newComps; i++) - newConstArray[i] = leftUnionArray[i] << rightUnionArray[i]; - break; - - case EOpAnd: - for (int i = 0; i < newComps; i++) - newConstArray[i] = leftUnionArray[i] & rightUnionArray[i]; - break; - case EOpInclusiveOr: - for (int i = 0; i < newComps; i++) - newConstArray[i] = leftUnionArray[i] | rightUnionArray[i]; - break; - case EOpExclusiveOr: - for (int i = 0; i < newComps; i++) - newConstArray[i] = leftUnionArray[i] ^ rightUnionArray[i]; - break; - - case EOpLogicalAnd: // this code is written for possible future use, will not get executed currently - for (int i = 0; i < newComps; i++) - newConstArray[i] = leftUnionArray[i] && rightUnionArray[i]; - break; - - case EOpLogicalOr: // this code is written for possible future use, will not get executed currently - for (int i = 0; i < newComps; i++) - newConstArray[i] = leftUnionArray[i] || rightUnionArray[i]; - break; - - case EOpLogicalXor: - for (int i = 0; i < newComps; i++) { - switch (getType().getBasicType()) { - case EbtBool: newConstArray[i].setBConst((leftUnionArray[i] == rightUnionArray[i]) ? false : true); break; - default: assert(false && "Default missing"); - } - } - break; - - case EOpLessThan: - newConstArray[0].setBConst(leftUnionArray[0] < rightUnionArray[0]); - returnType.shallowCopy(constBool); - break; - case EOpGreaterThan: - newConstArray[0].setBConst(leftUnionArray[0] > rightUnionArray[0]); - returnType.shallowCopy(constBool); - break; - case EOpLessThanEqual: - newConstArray[0].setBConst(! (leftUnionArray[0] > rightUnionArray[0])); - returnType.shallowCopy(constBool); - break; - case EOpGreaterThanEqual: - newConstArray[0].setBConst(! (leftUnionArray[0] < rightUnionArray[0])); - returnType.shallowCopy(constBool); - break; - case EOpEqual: - newConstArray[0].setBConst(rightNode->getConstArray() == leftUnionArray); - returnType.shallowCopy(constBool); - break; - case EOpNotEqual: - newConstArray[0].setBConst(rightNode->getConstArray() != leftUnionArray); - returnType.shallowCopy(constBool); - break; - - default: - return 0; - } - - TIntermConstantUnion *newNode = new TIntermConstantUnion(newConstArray, returnType); - newNode->setLoc(getLoc()); - - return newNode; -} - -// -// Do single unary node folding -// -// Returns a new node representing the result. -// -TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType) const -{ - // First, size the result, which is mostly the same as the argument's size, - // but not always, and classify what is componentwise. - // Also, eliminate cases that can't be compile-time constant. - int resultSize; - bool componentWise = true; - - int objectSize = getType().computeNumComponents(); - switch (op) { - case EOpDeterminant: - case EOpAny: - case EOpAll: - case EOpLength: - componentWise = false; - resultSize = 1; - break; - - case EOpEmitStreamVertex: - case EOpEndStreamPrimitive: - // These don't actually fold - return 0; - - case EOpPackSnorm2x16: - case EOpPackUnorm2x16: - case EOpPackHalf2x16: - componentWise = false; - resultSize = 1; - break; - - case EOpUnpackSnorm2x16: - case EOpUnpackUnorm2x16: - case EOpUnpackHalf2x16: - componentWise = false; - resultSize = 2; - break; - - case EOpNormalize: - componentWise = false; - resultSize = objectSize; - break; - - default: - resultSize = objectSize; - break; - } - - // Set up for processing - TConstUnionArray newConstArray(resultSize); - const TConstUnionArray& unionArray = getConstArray(); - - // Process non-component-wise operations - switch (op) { - case EOpLength: - case EOpNormalize: - { - double sum = 0; - for (int i = 0; i < objectSize; i++) - sum += unionArray[i].getDConst() * unionArray[i].getDConst(); - double length = sqrt(sum); - if (op == EOpLength) - newConstArray[0].setDConst(length); - else { - for (int i = 0; i < objectSize; i++) - newConstArray[i].setDConst(unionArray[i].getDConst() / length); - } - break; - } - - case EOpAny: - { - bool result = false; - for (int i = 0; i < objectSize; i++) { - if (unionArray[i].getBConst()) - result = true; - } - newConstArray[0].setBConst(result); - break; - } - case EOpAll: - { - bool result = true; - for (int i = 0; i < objectSize; i++) { - if (! unionArray[i].getBConst()) - result = false; - } - newConstArray[0].setBConst(result); - break; - } - - // TODO: 3.0 Functionality: unary constant folding: the rest of the ops have to be fleshed out - - case EOpPackSnorm2x16: - case EOpPackUnorm2x16: - case EOpPackHalf2x16: - - case EOpUnpackSnorm2x16: - case EOpUnpackUnorm2x16: - case EOpUnpackHalf2x16: - - case EOpDeterminant: - case EOpMatrixInverse: - case EOpTranspose: - return 0; - - default: - assert(componentWise); - break; - } - - // Turn off the componentwise loop - if (! componentWise) - objectSize = 0; - - // Process component-wise operations - for (int i = 0; i < objectSize; i++) { - switch (op) { - case EOpNegative: - switch (getType().getBasicType()) { - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - case EbtFloat: newConstArray[i].setDConst(-unionArray[i].getDConst()); break; -#ifdef AMD_EXTENSIONS - case EbtInt16: -#endif - case EbtInt: newConstArray[i].setIConst(-unionArray[i].getIConst()); break; -#ifdef AMD_EXTENSIONS - case EbtUint16: -#endif - case EbtUint: newConstArray[i].setUConst(static_cast(-static_cast(unionArray[i].getUConst()))); break; - case EbtInt64: newConstArray[i].setI64Const(-unionArray[i].getI64Const()); break; - case EbtUint64: newConstArray[i].setU64Const(static_cast(-static_cast(unionArray[i].getU64Const()))); break; - default: - return 0; - } - break; - case EOpLogicalNot: - case EOpVectorLogicalNot: - switch (getType().getBasicType()) { - case EbtBool: newConstArray[i].setBConst(!unionArray[i].getBConst()); break; - default: - return 0; - } - break; - case EOpBitwiseNot: - newConstArray[i] = ~unionArray[i]; - break; - case EOpRadians: - newConstArray[i].setDConst(unionArray[i].getDConst() * pi / 180.0); - break; - case EOpDegrees: - newConstArray[i].setDConst(unionArray[i].getDConst() * 180.0 / pi); - break; - case EOpSin: - newConstArray[i].setDConst(sin(unionArray[i].getDConst())); - break; - case EOpCos: - newConstArray[i].setDConst(cos(unionArray[i].getDConst())); - break; - case EOpTan: - newConstArray[i].setDConst(tan(unionArray[i].getDConst())); - break; - case EOpAsin: - newConstArray[i].setDConst(asin(unionArray[i].getDConst())); - break; - case EOpAcos: - newConstArray[i].setDConst(acos(unionArray[i].getDConst())); - break; - case EOpAtan: - newConstArray[i].setDConst(atan(unionArray[i].getDConst())); - break; - - case EOpDPdx: - case EOpDPdy: - case EOpFwidth: - case EOpDPdxFine: - case EOpDPdyFine: - case EOpFwidthFine: - case EOpDPdxCoarse: - case EOpDPdyCoarse: - case EOpFwidthCoarse: - // The derivatives are all mandated to create a constant 0. - newConstArray[i].setDConst(0.0); - break; - - case EOpExp: - newConstArray[i].setDConst(exp(unionArray[i].getDConst())); - break; - case EOpLog: - newConstArray[i].setDConst(log(unionArray[i].getDConst())); - break; - case EOpExp2: - { - const double inv_log2_e = 0.69314718055994530941723212145818; - newConstArray[i].setDConst(exp(unionArray[i].getDConst() * inv_log2_e)); - break; - } - case EOpLog2: - { - const double log2_e = 1.4426950408889634073599246810019; - newConstArray[i].setDConst(log2_e * log(unionArray[i].getDConst())); - break; - } - case EOpSqrt: - newConstArray[i].setDConst(sqrt(unionArray[i].getDConst())); - break; - case EOpInverseSqrt: - newConstArray[i].setDConst(1.0 / sqrt(unionArray[i].getDConst())); - break; - - case EOpAbs: - if (unionArray[i].getType() == EbtDouble) - newConstArray[i].setDConst(fabs(unionArray[i].getDConst())); - else if (unionArray[i].getType() == EbtInt) - newConstArray[i].setIConst(abs(unionArray[i].getIConst())); - else - newConstArray[i] = unionArray[i]; - break; - case EOpSign: - #define SIGN(X) (X == 0 ? 0 : (X < 0 ? -1 : 1)) - if (unionArray[i].getType() == EbtDouble) - newConstArray[i].setDConst(SIGN(unionArray[i].getDConst())); - else - newConstArray[i].setIConst(SIGN(unionArray[i].getIConst())); - break; - case EOpFloor: - newConstArray[i].setDConst(floor(unionArray[i].getDConst())); - break; - case EOpTrunc: - if (unionArray[i].getDConst() > 0) - newConstArray[i].setDConst(floor(unionArray[i].getDConst())); - else - newConstArray[i].setDConst(ceil(unionArray[i].getDConst())); - break; - case EOpRound: - newConstArray[i].setDConst(floor(0.5 + unionArray[i].getDConst())); - break; - case EOpRoundEven: - { - double flr = floor(unionArray[i].getDConst()); - bool even = flr / 2.0 == floor(flr / 2.0); - double rounded = even ? ceil(unionArray[i].getDConst() - 0.5) : floor(unionArray[i].getDConst() + 0.5); - newConstArray[i].setDConst(rounded); - break; - } - case EOpCeil: - newConstArray[i].setDConst(ceil(unionArray[i].getDConst())); - break; - case EOpFract: - { - double x = unionArray[i].getDConst(); - newConstArray[i].setDConst(x - floor(x)); - break; - } - - case EOpIsNan: - { - newConstArray[i].setBConst(isNan(unionArray[i].getDConst())); - break; - } - case EOpIsInf: - { - newConstArray[i].setBConst(isInf(unionArray[i].getDConst())); - break; - } - - // TODO: 3.0 Functionality: unary constant folding: the rest of the ops have to be fleshed out - - case EOpSinh: - case EOpCosh: - case EOpTanh: - case EOpAsinh: - case EOpAcosh: - case EOpAtanh: - - case EOpFloatBitsToInt: - case EOpFloatBitsToUint: - case EOpIntBitsToFloat: - case EOpUintBitsToFloat: - case EOpDoubleBitsToInt64: - case EOpDoubleBitsToUint64: - case EOpInt64BitsToDouble: - case EOpUint64BitsToDouble: -#ifdef AMD_EXTENSIONS - case EOpFloat16BitsToInt16: - case EOpFloat16BitsToUint16: - case EOpInt16BitsToFloat16: - case EOpUint16BitsToFloat16: -#endif - - default: - return 0; - } - } - - TIntermConstantUnion *newNode = new TIntermConstantUnion(newConstArray, returnType); - newNode->getWritableType().getQualifier().storage = EvqConst; - newNode->setLoc(getLoc()); - - return newNode; -} - -// -// Do constant folding for an aggregate node that has all its children -// as constants and an operator that requires constant folding. -// -TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode) -{ - if (aggrNode == nullptr) - return aggrNode; - - if (! areAllChildConst(aggrNode)) - return aggrNode; - - if (aggrNode->isConstructor()) - return foldConstructor(aggrNode); - - TIntermSequence& children = aggrNode->getSequence(); - - // First, see if this is an operation to constant fold, kick out if not, - // see what size the result is if so. - - bool componentwise = false; // will also say componentwise if a scalar argument gets repeated to make per-component results - int objectSize; - switch (aggrNode->getOp()) { - case EOpAtan: - case EOpPow: - case EOpMin: - case EOpMax: - case EOpMix: - case EOpClamp: - case EOpLessThan: - case EOpGreaterThan: - case EOpLessThanEqual: - case EOpGreaterThanEqual: - case EOpVectorEqual: - case EOpVectorNotEqual: - componentwise = true; - objectSize = children[0]->getAsConstantUnion()->getType().computeNumComponents(); - break; - case EOpCross: - case EOpReflect: - case EOpRefract: - case EOpFaceForward: - objectSize = children[0]->getAsConstantUnion()->getType().computeNumComponents(); - break; - case EOpDistance: - case EOpDot: - objectSize = 1; - break; - case EOpOuterProduct: - objectSize = children[0]->getAsTyped()->getType().getVectorSize() * - children[1]->getAsTyped()->getType().getVectorSize(); - break; - case EOpStep: - componentwise = true; - objectSize = std::max(children[0]->getAsTyped()->getType().getVectorSize(), - children[1]->getAsTyped()->getType().getVectorSize()); - break; - case EOpSmoothStep: - componentwise = true; - objectSize = std::max(children[0]->getAsTyped()->getType().getVectorSize(), - children[2]->getAsTyped()->getType().getVectorSize()); - break; - default: - return aggrNode; - } - TConstUnionArray newConstArray(objectSize); - - TVector childConstUnions; - for (unsigned int arg = 0; arg < children.size(); ++arg) - childConstUnions.push_back(children[arg]->getAsConstantUnion()->getConstArray()); - - // Second, do the actual folding - - bool isFloatingPoint = children[0]->getAsTyped()->getBasicType() == EbtFloat || -#ifdef AMD_EXTENSIONS - children[0]->getAsTyped()->getBasicType() == EbtFloat16 || -#endif - children[0]->getAsTyped()->getBasicType() == EbtDouble; - bool isSigned = children[0]->getAsTyped()->getBasicType() == EbtInt || -#ifdef AMD_EXTENSIONS - children[0]->getAsTyped()->getBasicType() == EbtInt16 || -#endif - children[0]->getAsTyped()->getBasicType() == EbtInt64; - bool isInt64 = children[0]->getAsTyped()->getBasicType() == EbtInt64 || - children[0]->getAsTyped()->getBasicType() == EbtUint64; - if (componentwise) { - for (int comp = 0; comp < objectSize; comp++) { - - // some arguments are scalars instead of matching vectors; simulate a smear - int arg0comp = std::min(comp, children[0]->getAsTyped()->getType().getVectorSize() - 1); - int arg1comp = 0; - if (children.size() > 1) - arg1comp = std::min(comp, children[1]->getAsTyped()->getType().getVectorSize() - 1); - int arg2comp = 0; - if (children.size() > 2) - arg2comp = std::min(comp, children[2]->getAsTyped()->getType().getVectorSize() - 1); - - switch (aggrNode->getOp()) { - case EOpAtan: - newConstArray[comp].setDConst(atan2(childConstUnions[0][arg0comp].getDConst(), childConstUnions[1][arg1comp].getDConst())); - break; - case EOpPow: - newConstArray[comp].setDConst(pow(childConstUnions[0][arg0comp].getDConst(), childConstUnions[1][arg1comp].getDConst())); - break; - case EOpMin: - if (isFloatingPoint) - newConstArray[comp].setDConst(std::min(childConstUnions[0][arg0comp].getDConst(), childConstUnions[1][arg1comp].getDConst())); - else if (isSigned) { - if (isInt64) - newConstArray[comp].setI64Const(std::min(childConstUnions[0][arg0comp].getI64Const(), childConstUnions[1][arg1comp].getI64Const())); - else - newConstArray[comp].setIConst(std::min(childConstUnions[0][arg0comp].getIConst(), childConstUnions[1][arg1comp].getIConst())); - } else { - if (isInt64) - newConstArray[comp].setU64Const(std::min(childConstUnions[0][arg0comp].getU64Const(), childConstUnions[1][arg1comp].getU64Const())); - else - newConstArray[comp].setUConst(std::min(childConstUnions[0][arg0comp].getUConst(), childConstUnions[1][arg1comp].getUConst())); - } - break; - case EOpMax: - if (isFloatingPoint) - newConstArray[comp].setDConst(std::max(childConstUnions[0][arg0comp].getDConst(), childConstUnions[1][arg1comp].getDConst())); - else if (isSigned) { - if (isInt64) - newConstArray[comp].setI64Const(std::max(childConstUnions[0][arg0comp].getI64Const(), childConstUnions[1][arg1comp].getI64Const())); - else - newConstArray[comp].setIConst(std::max(childConstUnions[0][arg0comp].getIConst(), childConstUnions[1][arg1comp].getIConst())); - } else { - if (isInt64) - newConstArray[comp].setU64Const(std::max(childConstUnions[0][arg0comp].getU64Const(), childConstUnions[1][arg1comp].getU64Const())); - else - newConstArray[comp].setUConst(std::max(childConstUnions[0][arg0comp].getUConst(), childConstUnions[1][arg1comp].getUConst())); - } - break; - case EOpClamp: - if (isFloatingPoint) - newConstArray[comp].setDConst(std::min(std::max(childConstUnions[0][arg0comp].getDConst(), childConstUnions[1][arg1comp].getDConst()), - childConstUnions[2][arg2comp].getDConst())); - else if (isSigned) { - if (isInt64) - newConstArray[comp].setI64Const(std::min(std::max(childConstUnions[0][arg0comp].getI64Const(), childConstUnions[1][arg1comp].getI64Const()), - childConstUnions[2][arg2comp].getI64Const())); - else - newConstArray[comp].setIConst(std::min(std::max(childConstUnions[0][arg0comp].getIConst(), childConstUnions[1][arg1comp].getIConst()), - childConstUnions[2][arg2comp].getIConst())); - } else { - if (isInt64) - newConstArray[comp].setU64Const(std::min(std::max(childConstUnions[0][arg0comp].getU64Const(), childConstUnions[1][arg1comp].getU64Const()), - childConstUnions[2][arg2comp].getU64Const())); - else - newConstArray[comp].setUConst(std::min(std::max(childConstUnions[0][arg0comp].getUConst(), childConstUnions[1][arg1comp].getUConst()), - childConstUnions[2][arg2comp].getUConst())); - } - break; - case EOpLessThan: - newConstArray[comp].setBConst(childConstUnions[0][arg0comp] < childConstUnions[1][arg1comp]); - break; - case EOpGreaterThan: - newConstArray[comp].setBConst(childConstUnions[0][arg0comp] > childConstUnions[1][arg1comp]); - break; - case EOpLessThanEqual: - newConstArray[comp].setBConst(! (childConstUnions[0][arg0comp] > childConstUnions[1][arg1comp])); - break; - case EOpGreaterThanEqual: - newConstArray[comp].setBConst(! (childConstUnions[0][arg0comp] < childConstUnions[1][arg1comp])); - break; - case EOpVectorEqual: - newConstArray[comp].setBConst(childConstUnions[0][arg0comp] == childConstUnions[1][arg1comp]); - break; - case EOpVectorNotEqual: - newConstArray[comp].setBConst(childConstUnions[0][arg0comp] != childConstUnions[1][arg1comp]); - break; - case EOpMix: - if (children[2]->getAsTyped()->getBasicType() == EbtBool) - newConstArray[comp].setDConst(childConstUnions[2][arg2comp].getBConst() ? childConstUnions[1][arg1comp].getDConst() : - childConstUnions[0][arg0comp].getDConst()); - else - newConstArray[comp].setDConst(childConstUnions[0][arg0comp].getDConst() * (1.0 - childConstUnions[2][arg2comp].getDConst()) + - childConstUnions[1][arg1comp].getDConst() * childConstUnions[2][arg2comp].getDConst()); - break; - case EOpStep: - newConstArray[comp].setDConst(childConstUnions[1][arg1comp].getDConst() < childConstUnions[0][arg0comp].getDConst() ? 0.0 : 1.0); - break; - case EOpSmoothStep: - { - double t = (childConstUnions[2][arg2comp].getDConst() - childConstUnions[0][arg0comp].getDConst()) / - (childConstUnions[1][arg1comp].getDConst() - childConstUnions[0][arg0comp].getDConst()); - if (t < 0.0) - t = 0.0; - if (t > 1.0) - t = 1.0; - newConstArray[comp].setDConst(t * t * (3.0 - 2.0 * t)); - break; - } - default: - return aggrNode; - } - } - } else { - // Non-componentwise... - - int numComps = children[0]->getAsConstantUnion()->getType().computeNumComponents(); - double dot; - - switch (aggrNode->getOp()) { - case EOpDistance: - { - double sum = 0.0; - for (int comp = 0; comp < numComps; ++comp) { - double diff = childConstUnions[1][comp].getDConst() - childConstUnions[0][comp].getDConst(); - sum += diff * diff; - } - newConstArray[0].setDConst(sqrt(sum)); - break; - } - case EOpDot: - newConstArray[0].setDConst(childConstUnions[0].dot(childConstUnions[1])); - break; - case EOpCross: - newConstArray[0] = childConstUnions[0][1] * childConstUnions[1][2] - childConstUnions[0][2] * childConstUnions[1][1]; - newConstArray[1] = childConstUnions[0][2] * childConstUnions[1][0] - childConstUnions[0][0] * childConstUnions[1][2]; - newConstArray[2] = childConstUnions[0][0] * childConstUnions[1][1] - childConstUnions[0][1] * childConstUnions[1][0]; - break; - case EOpFaceForward: - // If dot(Nref, I) < 0 return N, otherwise return -N: Arguments are (N, I, Nref). - dot = childConstUnions[1].dot(childConstUnions[2]); - for (int comp = 0; comp < numComps; ++comp) { - if (dot < 0.0) - newConstArray[comp] = childConstUnions[0][comp]; - else - newConstArray[comp].setDConst(-childConstUnions[0][comp].getDConst()); - } - break; - case EOpReflect: - // I - 2 * dot(N, I) * N: Arguments are (I, N). - dot = childConstUnions[0].dot(childConstUnions[1]); - dot *= 2.0; - for (int comp = 0; comp < numComps; ++comp) - newConstArray[comp].setDConst(childConstUnions[0][comp].getDConst() - dot * childConstUnions[1][comp].getDConst()); - break; - case EOpRefract: - { - // Arguments are (I, N, eta). - // k = 1.0 - eta * eta * (1.0 - dot(N, I) * dot(N, I)) - // if (k < 0.0) - // return dvec(0.0) - // else - // return eta * I - (eta * dot(N, I) + sqrt(k)) * N - dot = childConstUnions[0].dot(childConstUnions[1]); - double eta = childConstUnions[2][0].getDConst(); - double k = 1.0 - eta * eta * (1.0 - dot * dot); - if (k < 0.0) { - for (int comp = 0; comp < numComps; ++comp) - newConstArray[comp].setDConst(0.0); - } else { - for (int comp = 0; comp < numComps; ++comp) - newConstArray[comp].setDConst(eta * childConstUnions[0][comp].getDConst() - (eta * dot + sqrt(k)) * childConstUnions[1][comp].getDConst()); - } - break; - } - case EOpOuterProduct: - { - int numRows = numComps; - int numCols = children[1]->getAsConstantUnion()->getType().computeNumComponents(); - for (int row = 0; row < numRows; ++row) - for (int col = 0; col < numCols; ++col) - newConstArray[col * numRows + row] = childConstUnions[0][row] * childConstUnions[1][col]; - break; - } - default: - return aggrNode; - } - } - - TIntermConstantUnion *newNode = new TIntermConstantUnion(newConstArray, aggrNode->getType()); - newNode->getWritableType().getQualifier().storage = EvqConst; - newNode->setLoc(aggrNode->getLoc()); - - return newNode; -} - -bool TIntermediate::areAllChildConst(TIntermAggregate* aggrNode) -{ - bool allConstant = true; - - // check if all the child nodes are constants so that they can be inserted into - // the parent node - if (aggrNode) { - TIntermSequence& childSequenceVector = aggrNode->getSequence(); - for (TIntermSequence::iterator p = childSequenceVector.begin(); - p != childSequenceVector.end(); p++) { - if (!(*p)->getAsTyped()->getAsConstantUnion()) - return false; - } - } - - return allConstant; -} - -TIntermTyped* TIntermediate::foldConstructor(TIntermAggregate* aggrNode) -{ - bool error = false; - - TConstUnionArray unionArray(aggrNode->getType().computeNumComponents()); - if (aggrNode->getSequence().size() == 1) - error = parseConstTree(aggrNode, unionArray, aggrNode->getOp(), aggrNode->getType(), true); - else - error = parseConstTree(aggrNode, unionArray, aggrNode->getOp(), aggrNode->getType()); - - if (error) - return aggrNode; - - return addConstantUnion(unionArray, aggrNode->getType(), aggrNode->getLoc()); -} - -// -// Constant folding of a bracket (array-style) dereference or struct-like dot -// dereference. Can handle anything except a multi-character swizzle, though -// all swizzles may go to foldSwizzle(). -// -TIntermTyped* TIntermediate::foldDereference(TIntermTyped* node, int index, const TSourceLoc& loc) -{ - TType dereferencedType(node->getType(), index); - dereferencedType.getQualifier().storage = EvqConst; - TIntermTyped* result = 0; - int size = dereferencedType.computeNumComponents(); - - // arrays, vectors, matrices, all use simple multiplicative math - // while structures need to add up heterogeneous members - int start; - if (node->isArray() || ! node->isStruct()) - start = size * index; - else { - // it is a structure - assert(node->isStruct()); - start = 0; - for (int i = 0; i < index; ++i) - start += (*node->getType().getStruct())[i].type->computeNumComponents(); - } - - result = addConstantUnion(TConstUnionArray(node->getAsConstantUnion()->getConstArray(), start, size), node->getType(), loc); - - if (result == 0) - result = node; - else - result->setType(dereferencedType); - - return result; -} - -// -// Make a constant vector node or constant scalar node, representing a given -// constant vector and constant swizzle into it. -// -TIntermTyped* TIntermediate::foldSwizzle(TIntermTyped* node, TSwizzleSelectors& selectors, const TSourceLoc& loc) -{ - const TConstUnionArray& unionArray = node->getAsConstantUnion()->getConstArray(); - TConstUnionArray constArray(selectors.size()); - - for (int i = 0; i < selectors.size(); i++) - constArray[i] = unionArray[selectors[i]]; - - TIntermTyped* result = addConstantUnion(constArray, node->getType(), loc); - - if (result == 0) - result = node; - else - result->setType(TType(node->getBasicType(), EvqConst, selectors.size())); - - return result; -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/InfoSink.cpp b/deps/glslang/glslang/MachineIndependent/InfoSink.cpp deleted file mode 100644 index d00c4225..00000000 --- a/deps/glslang/glslang/MachineIndependent/InfoSink.cpp +++ /dev/null @@ -1,113 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "../Include/InfoSink.h" - -#include - -namespace glslang { - -void TInfoSinkBase::append(const char* s) -{ - if (outputStream & EString) { - if (s == nullptr) - sink.append("(null)"); - else { - checkMem(strlen(s)); - sink.append(s); - } - } - -//#ifdef _WIN32 -// if (outputStream & EDebugger) -// OutputDebugString(s); -//#endif - - if (outputStream & EStdOut) - fprintf(stdout, "%s", s); -} - -void TInfoSinkBase::append(int count, char c) -{ - if (outputStream & EString) { - checkMem(count); - sink.append(count, c); - } - -//#ifdef _WIN32 -// if (outputStream & EDebugger) { -// char str[2]; -// str[0] = c; -// str[1] = '\0'; -// OutputDebugString(str); -// } -//#endif - - if (outputStream & EStdOut) - fprintf(stdout, "%c", c); -} - -void TInfoSinkBase::append(const TPersistString& t) -{ - if (outputStream & EString) { - checkMem(t.size()); - sink.append(t); - } - -//#ifdef _WIN32 -// if (outputStream & EDebugger) -// OutputDebugString(t.c_str()); -//#endif - - if (outputStream & EStdOut) - fprintf(stdout, "%s", t.c_str()); -} - -void TInfoSinkBase::append(const TString& t) -{ - if (outputStream & EString) { - checkMem(t.size()); - sink.append(t.c_str()); - } - -//#ifdef _WIN32 -// if (outputStream & EDebugger) -// OutputDebugString(t.c_str()); -//#endif - - if (outputStream & EStdOut) - fprintf(stdout, "%s", t.c_str()); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/Initialize.cpp b/deps/glslang/glslang/MachineIndependent/Initialize.cpp deleted file mode 100644 index 6e6660e8..00000000 --- a/deps/glslang/glslang/MachineIndependent/Initialize.cpp +++ /dev/null @@ -1,6314 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2016 LunarG, Inc. -// Copyright (C) 2015-2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Create strings that declare built-in definitions, add built-ins programmatically -// that cannot be expressed in the strings, and establish mappings between -// built-in functions and operators. -// -// Where to put a built-in: -// TBuiltIns::initialize(version,profile) context-independent textual built-ins; add them to the right string -// TBuiltIns::initialize(resources,...) context-dependent textual built-ins; add them to the right string -// TBuiltIns::identifyBuiltIns(...,symbolTable) context-independent programmatic additions/mappings to the symbol table, -// including identifying what extensions are needed if a version does not allow a symbol -// TBuiltIns::identifyBuiltIns(...,symbolTable, resources) context-dependent programmatic additions/mappings to the symbol table, -// including identifying what extensions are needed if a version does not allow a symbol -// - -#include "../Include/intermediate.h" -#include "Initialize.h" - -namespace glslang { - -// TODO: ARB_Compatability: do full extension support -const bool ARBCompatibility = true; - -const bool ForwardCompatibility = false; - -// change this back to false if depending on textual spellings of texturing calls when consuming the AST -// Using PureOperatorBuiltins=false is deprecated. -bool PureOperatorBuiltins = true; - -inline bool IncludeLegacy(int version, EProfile profile, const SpvVersion& spvVersion) -{ - return profile != EEsProfile && (version <= 130 || (spvVersion.spv == 0 && ARBCompatibility) || profile == ECompatibilityProfile); -} - -// Construct TBuiltInParseables base class. This can be used for language-common constructs. -TBuiltInParseables::TBuiltInParseables() -{ -} - -// Destroy TBuiltInParseables. -TBuiltInParseables::~TBuiltInParseables() -{ -} - -TBuiltIns::TBuiltIns() -{ - // Set up textual representations for making all the permutations - // of texturing/imaging functions. - prefixes[EbtFloat] = ""; - prefixes[EbtInt] = "i"; - prefixes[EbtUint] = "u"; - postfixes[2] = "2"; - postfixes[3] = "3"; - postfixes[4] = "4"; - - // Map from symbolic class of texturing dimension to numeric dimensions. - dimMap[Esd1D] = 1; - dimMap[Esd2D] = 2; - dimMap[EsdRect] = 2; - dimMap[Esd3D] = 3; - dimMap[EsdCube] = 3; - dimMap[EsdBuffer] = 1; - dimMap[EsdSubpass] = 2; // potientially unused for now -} - -TBuiltIns::~TBuiltIns() -{ -} - - -// -// Add all context-independent built-in functions and variables that are present -// for the given version and profile. Share common ones across stages, otherwise -// make stage-specific entries. -// -// Most built-ins variables can be added as simple text strings. Some need to -// be added programmatically, which is done later in IdentifyBuiltIns() below. -// -void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvVersion) -{ - //============================================================================ - // - // Prototypes for built-in functions seen by both vertex and fragment shaders. - // - //============================================================================ - - // - // Angle and Trigonometric Functions. - // - commonBuiltins.append( - "float radians(float degrees);" - "vec2 radians(vec2 degrees);" - "vec3 radians(vec3 degrees);" - "vec4 radians(vec4 degrees);" - - "float degrees(float radians);" - "vec2 degrees(vec2 radians);" - "vec3 degrees(vec3 radians);" - "vec4 degrees(vec4 radians);" - - "float sin(float angle);" - "vec2 sin(vec2 angle);" - "vec3 sin(vec3 angle);" - "vec4 sin(vec4 angle);" - - "float cos(float angle);" - "vec2 cos(vec2 angle);" - "vec3 cos(vec3 angle);" - "vec4 cos(vec4 angle);" - - "float tan(float angle);" - "vec2 tan(vec2 angle);" - "vec3 tan(vec3 angle);" - "vec4 tan(vec4 angle);" - - "float asin(float x);" - "vec2 asin(vec2 x);" - "vec3 asin(vec3 x);" - "vec4 asin(vec4 x);" - - "float acos(float x);" - "vec2 acos(vec2 x);" - "vec3 acos(vec3 x);" - "vec4 acos(vec4 x);" - - "float atan(float y, float x);" - "vec2 atan(vec2 y, vec2 x);" - "vec3 atan(vec3 y, vec3 x);" - "vec4 atan(vec4 y, vec4 x);" - - "float atan(float y_over_x);" - "vec2 atan(vec2 y_over_x);" - "vec3 atan(vec3 y_over_x);" - "vec4 atan(vec4 y_over_x);" - - "\n"); - - if (version >= 130) { - commonBuiltins.append( - "float sinh(float angle);" - "vec2 sinh(vec2 angle);" - "vec3 sinh(vec3 angle);" - "vec4 sinh(vec4 angle);" - - "float cosh(float angle);" - "vec2 cosh(vec2 angle);" - "vec3 cosh(vec3 angle);" - "vec4 cosh(vec4 angle);" - - "float tanh(float angle);" - "vec2 tanh(vec2 angle);" - "vec3 tanh(vec3 angle);" - "vec4 tanh(vec4 angle);" - - "float asinh(float x);" - "vec2 asinh(vec2 x);" - "vec3 asinh(vec3 x);" - "vec4 asinh(vec4 x);" - - "float acosh(float x);" - "vec2 acosh(vec2 x);" - "vec3 acosh(vec3 x);" - "vec4 acosh(vec4 x);" - - "float atanh(float y_over_x);" - "vec2 atanh(vec2 y_over_x);" - "vec3 atanh(vec3 y_over_x);" - "vec4 atanh(vec4 y_over_x);" - - "\n"); - } - - // - // Exponential Functions. - // - commonBuiltins.append( - "float pow(float x, float y);" - "vec2 pow(vec2 x, vec2 y);" - "vec3 pow(vec3 x, vec3 y);" - "vec4 pow(vec4 x, vec4 y);" - - "float exp(float x);" - "vec2 exp(vec2 x);" - "vec3 exp(vec3 x);" - "vec4 exp(vec4 x);" - - "float log(float x);" - "vec2 log(vec2 x);" - "vec3 log(vec3 x);" - "vec4 log(vec4 x);" - - "float exp2(float x);" - "vec2 exp2(vec2 x);" - "vec3 exp2(vec3 x);" - "vec4 exp2(vec4 x);" - - "float log2(float x);" - "vec2 log2(vec2 x);" - "vec3 log2(vec3 x);" - "vec4 log2(vec4 x);" - - "float sqrt(float x);" - "vec2 sqrt(vec2 x);" - "vec3 sqrt(vec3 x);" - "vec4 sqrt(vec4 x);" - - "float inversesqrt(float x);" - "vec2 inversesqrt(vec2 x);" - "vec3 inversesqrt(vec3 x);" - "vec4 inversesqrt(vec4 x);" - - "\n"); - - // - // Common Functions. - // - commonBuiltins.append( - "float abs(float x);" - "vec2 abs(vec2 x);" - "vec3 abs(vec3 x);" - "vec4 abs(vec4 x);" - - "float sign(float x);" - "vec2 sign(vec2 x);" - "vec3 sign(vec3 x);" - "vec4 sign(vec4 x);" - - "float floor(float x);" - "vec2 floor(vec2 x);" - "vec3 floor(vec3 x);" - "vec4 floor(vec4 x);" - - "float ceil(float x);" - "vec2 ceil(vec2 x);" - "vec3 ceil(vec3 x);" - "vec4 ceil(vec4 x);" - - "float fract(float x);" - "vec2 fract(vec2 x);" - "vec3 fract(vec3 x);" - "vec4 fract(vec4 x);" - - "float mod(float x, float y);" - "vec2 mod(vec2 x, float y);" - "vec3 mod(vec3 x, float y);" - "vec4 mod(vec4 x, float y);" - "vec2 mod(vec2 x, vec2 y);" - "vec3 mod(vec3 x, vec3 y);" - "vec4 mod(vec4 x, vec4 y);" - - "float min(float x, float y);" - "vec2 min(vec2 x, float y);" - "vec3 min(vec3 x, float y);" - "vec4 min(vec4 x, float y);" - "vec2 min(vec2 x, vec2 y);" - "vec3 min(vec3 x, vec3 y);" - "vec4 min(vec4 x, vec4 y);" - - "float max(float x, float y);" - "vec2 max(vec2 x, float y);" - "vec3 max(vec3 x, float y);" - "vec4 max(vec4 x, float y);" - "vec2 max(vec2 x, vec2 y);" - "vec3 max(vec3 x, vec3 y);" - "vec4 max(vec4 x, vec4 y);" - - "float clamp(float x, float minVal, float maxVal);" - "vec2 clamp(vec2 x, float minVal, float maxVal);" - "vec3 clamp(vec3 x, float minVal, float maxVal);" - "vec4 clamp(vec4 x, float minVal, float maxVal);" - "vec2 clamp(vec2 x, vec2 minVal, vec2 maxVal);" - "vec3 clamp(vec3 x, vec3 minVal, vec3 maxVal);" - "vec4 clamp(vec4 x, vec4 minVal, vec4 maxVal);" - - "float mix(float x, float y, float a);" - "vec2 mix(vec2 x, vec2 y, float a);" - "vec3 mix(vec3 x, vec3 y, float a);" - "vec4 mix(vec4 x, vec4 y, float a);" - "vec2 mix(vec2 x, vec2 y, vec2 a);" - "vec3 mix(vec3 x, vec3 y, vec3 a);" - "vec4 mix(vec4 x, vec4 y, vec4 a);" - - "float step(float edge, float x);" - "vec2 step(vec2 edge, vec2 x);" - "vec3 step(vec3 edge, vec3 x);" - "vec4 step(vec4 edge, vec4 x);" - "vec2 step(float edge, vec2 x);" - "vec3 step(float edge, vec3 x);" - "vec4 step(float edge, vec4 x);" - - "float smoothstep(float edge0, float edge1, float x);" - "vec2 smoothstep(vec2 edge0, vec2 edge1, vec2 x);" - "vec3 smoothstep(vec3 edge0, vec3 edge1, vec3 x);" - "vec4 smoothstep(vec4 edge0, vec4 edge1, vec4 x);" - "vec2 smoothstep(float edge0, float edge1, vec2 x);" - "vec3 smoothstep(float edge0, float edge1, vec3 x);" - "vec4 smoothstep(float edge0, float edge1, vec4 x);" - - "\n"); - - if (version >= 130) { - commonBuiltins.append( - " int abs( int x);" - "ivec2 abs(ivec2 x);" - "ivec3 abs(ivec3 x);" - "ivec4 abs(ivec4 x);" - - " int sign( int x);" - "ivec2 sign(ivec2 x);" - "ivec3 sign(ivec3 x);" - "ivec4 sign(ivec4 x);" - - "float trunc(float x);" - "vec2 trunc(vec2 x);" - "vec3 trunc(vec3 x);" - "vec4 trunc(vec4 x);" - - "float round(float x);" - "vec2 round(vec2 x);" - "vec3 round(vec3 x);" - "vec4 round(vec4 x);" - - "float roundEven(float x);" - "vec2 roundEven(vec2 x);" - "vec3 roundEven(vec3 x);" - "vec4 roundEven(vec4 x);" - - "float modf(float, out float);" - "vec2 modf(vec2, out vec2 );" - "vec3 modf(vec3, out vec3 );" - "vec4 modf(vec4, out vec4 );" - - " int min(int x, int y);" - "ivec2 min(ivec2 x, int y);" - "ivec3 min(ivec3 x, int y);" - "ivec4 min(ivec4 x, int y);" - "ivec2 min(ivec2 x, ivec2 y);" - "ivec3 min(ivec3 x, ivec3 y);" - "ivec4 min(ivec4 x, ivec4 y);" - - " uint min(uint x, uint y);" - "uvec2 min(uvec2 x, uint y);" - "uvec3 min(uvec3 x, uint y);" - "uvec4 min(uvec4 x, uint y);" - "uvec2 min(uvec2 x, uvec2 y);" - "uvec3 min(uvec3 x, uvec3 y);" - "uvec4 min(uvec4 x, uvec4 y);" - - " int max(int x, int y);" - "ivec2 max(ivec2 x, int y);" - "ivec3 max(ivec3 x, int y);" - "ivec4 max(ivec4 x, int y);" - "ivec2 max(ivec2 x, ivec2 y);" - "ivec3 max(ivec3 x, ivec3 y);" - "ivec4 max(ivec4 x, ivec4 y);" - - " uint max(uint x, uint y);" - "uvec2 max(uvec2 x, uint y);" - "uvec3 max(uvec3 x, uint y);" - "uvec4 max(uvec4 x, uint y);" - "uvec2 max(uvec2 x, uvec2 y);" - "uvec3 max(uvec3 x, uvec3 y);" - "uvec4 max(uvec4 x, uvec4 y);" - - "int clamp(int x, int minVal, int maxVal);" - "ivec2 clamp(ivec2 x, int minVal, int maxVal);" - "ivec3 clamp(ivec3 x, int minVal, int maxVal);" - "ivec4 clamp(ivec4 x, int minVal, int maxVal);" - "ivec2 clamp(ivec2 x, ivec2 minVal, ivec2 maxVal);" - "ivec3 clamp(ivec3 x, ivec3 minVal, ivec3 maxVal);" - "ivec4 clamp(ivec4 x, ivec4 minVal, ivec4 maxVal);" - - "uint clamp(uint x, uint minVal, uint maxVal);" - "uvec2 clamp(uvec2 x, uint minVal, uint maxVal);" - "uvec3 clamp(uvec3 x, uint minVal, uint maxVal);" - "uvec4 clamp(uvec4 x, uint minVal, uint maxVal);" - "uvec2 clamp(uvec2 x, uvec2 minVal, uvec2 maxVal);" - "uvec3 clamp(uvec3 x, uvec3 minVal, uvec3 maxVal);" - "uvec4 clamp(uvec4 x, uvec4 minVal, uvec4 maxVal);" - - "float mix(float x, float y, bool a);" - "vec2 mix(vec2 x, vec2 y, bvec2 a);" - "vec3 mix(vec3 x, vec3 y, bvec3 a);" - "vec4 mix(vec4 x, vec4 y, bvec4 a);" - - "bool isnan(float x);" - "bvec2 isnan(vec2 x);" - "bvec3 isnan(vec3 x);" - "bvec4 isnan(vec4 x);" - - "bool isinf(float x);" - "bvec2 isinf(vec2 x);" - "bvec3 isinf(vec3 x);" - "bvec4 isinf(vec4 x);" - - "\n"); - } - - // - // double functions added to desktop 4.00, but not fma, frexp, ldexp, or pack/unpack - // - if (profile != EEsProfile && version >= 400) { - commonBuiltins.append( - - "double sqrt(double);" - "dvec2 sqrt(dvec2);" - "dvec3 sqrt(dvec3);" - "dvec4 sqrt(dvec4);" - - "double inversesqrt(double);" - "dvec2 inversesqrt(dvec2);" - "dvec3 inversesqrt(dvec3);" - "dvec4 inversesqrt(dvec4);" - - "double abs(double);" - "dvec2 abs(dvec2);" - "dvec3 abs(dvec3);" - "dvec4 abs(dvec4);" - - "double sign(double);" - "dvec2 sign(dvec2);" - "dvec3 sign(dvec3);" - "dvec4 sign(dvec4);" - - "double floor(double);" - "dvec2 floor(dvec2);" - "dvec3 floor(dvec3);" - "dvec4 floor(dvec4);" - - "double trunc(double);" - "dvec2 trunc(dvec2);" - "dvec3 trunc(dvec3);" - "dvec4 trunc(dvec4);" - - "double round(double);" - "dvec2 round(dvec2);" - "dvec3 round(dvec3);" - "dvec4 round(dvec4);" - - "double roundEven(double);" - "dvec2 roundEven(dvec2);" - "dvec3 roundEven(dvec3);" - "dvec4 roundEven(dvec4);" - - "double ceil(double);" - "dvec2 ceil(dvec2);" - "dvec3 ceil(dvec3);" - "dvec4 ceil(dvec4);" - - "double fract(double);" - "dvec2 fract(dvec2);" - "dvec3 fract(dvec3);" - "dvec4 fract(dvec4);" - - "double mod(double, double);" - "dvec2 mod(dvec2 , double);" - "dvec3 mod(dvec3 , double);" - "dvec4 mod(dvec4 , double);" - "dvec2 mod(dvec2 , dvec2);" - "dvec3 mod(dvec3 , dvec3);" - "dvec4 mod(dvec4 , dvec4);" - - "double modf(double, out double);" - "dvec2 modf(dvec2, out dvec2);" - "dvec3 modf(dvec3, out dvec3);" - "dvec4 modf(dvec4, out dvec4);" - - "double min(double, double);" - "dvec2 min(dvec2, double);" - "dvec3 min(dvec3, double);" - "dvec4 min(dvec4, double);" - "dvec2 min(dvec2, dvec2);" - "dvec3 min(dvec3, dvec3);" - "dvec4 min(dvec4, dvec4);" - - "double max(double, double);" - "dvec2 max(dvec2 , double);" - "dvec3 max(dvec3 , double);" - "dvec4 max(dvec4 , double);" - "dvec2 max(dvec2 , dvec2);" - "dvec3 max(dvec3 , dvec3);" - "dvec4 max(dvec4 , dvec4);" - - "double clamp(double, double, double);" - "dvec2 clamp(dvec2 , double, double);" - "dvec3 clamp(dvec3 , double, double);" - "dvec4 clamp(dvec4 , double, double);" - "dvec2 clamp(dvec2 , dvec2 , dvec2);" - "dvec3 clamp(dvec3 , dvec3 , dvec3);" - "dvec4 clamp(dvec4 , dvec4 , dvec4);" - - "double mix(double, double, double);" - "dvec2 mix(dvec2, dvec2, double);" - "dvec3 mix(dvec3, dvec3, double);" - "dvec4 mix(dvec4, dvec4, double);" - "dvec2 mix(dvec2, dvec2, dvec2);" - "dvec3 mix(dvec3, dvec3, dvec3);" - "dvec4 mix(dvec4, dvec4, dvec4);" - "double mix(double, double, bool);" - "dvec2 mix(dvec2, dvec2, bvec2);" - "dvec3 mix(dvec3, dvec3, bvec3);" - "dvec4 mix(dvec4, dvec4, bvec4);" - - "double step(double, double);" - "dvec2 step(dvec2 , dvec2);" - "dvec3 step(dvec3 , dvec3);" - "dvec4 step(dvec4 , dvec4);" - "dvec2 step(double, dvec2);" - "dvec3 step(double, dvec3);" - "dvec4 step(double, dvec4);" - - "double smoothstep(double, double, double);" - "dvec2 smoothstep(dvec2 , dvec2 , dvec2);" - "dvec3 smoothstep(dvec3 , dvec3 , dvec3);" - "dvec4 smoothstep(dvec4 , dvec4 , dvec4);" - "dvec2 smoothstep(double, double, dvec2);" - "dvec3 smoothstep(double, double, dvec3);" - "dvec4 smoothstep(double, double, dvec4);" - - "bool isnan(double);" - "bvec2 isnan(dvec2);" - "bvec3 isnan(dvec3);" - "bvec4 isnan(dvec4);" - - "bool isinf(double);" - "bvec2 isinf(dvec2);" - "bvec3 isinf(dvec3);" - "bvec4 isinf(dvec4);" - - "double length(double);" - "double length(dvec2);" - "double length(dvec3);" - "double length(dvec4);" - - "double distance(double, double);" - "double distance(dvec2 , dvec2);" - "double distance(dvec3 , dvec3);" - "double distance(dvec4 , dvec4);" - - "double dot(double, double);" - "double dot(dvec2 , dvec2);" - "double dot(dvec3 , dvec3);" - "double dot(dvec4 , dvec4);" - - "dvec3 cross(dvec3, dvec3);" - - "double normalize(double);" - "dvec2 normalize(dvec2);" - "dvec3 normalize(dvec3);" - "dvec4 normalize(dvec4);" - - "double faceforward(double, double, double);" - "dvec2 faceforward(dvec2, dvec2, dvec2);" - "dvec3 faceforward(dvec3, dvec3, dvec3);" - "dvec4 faceforward(dvec4, dvec4, dvec4);" - - "double reflect(double, double);" - "dvec2 reflect(dvec2 , dvec2 );" - "dvec3 reflect(dvec3 , dvec3 );" - "dvec4 reflect(dvec4 , dvec4 );" - - "double refract(double, double, double);" - "dvec2 refract(dvec2 , dvec2 , double);" - "dvec3 refract(dvec3 , dvec3 , double);" - "dvec4 refract(dvec4 , dvec4 , double);" - - "dmat2 matrixCompMult(dmat2, dmat2);" - "dmat3 matrixCompMult(dmat3, dmat3);" - "dmat4 matrixCompMult(dmat4, dmat4);" - "dmat2x3 matrixCompMult(dmat2x3, dmat2x3);" - "dmat2x4 matrixCompMult(dmat2x4, dmat2x4);" - "dmat3x2 matrixCompMult(dmat3x2, dmat3x2);" - "dmat3x4 matrixCompMult(dmat3x4, dmat3x4);" - "dmat4x2 matrixCompMult(dmat4x2, dmat4x2);" - "dmat4x3 matrixCompMult(dmat4x3, dmat4x3);" - - "dmat2 outerProduct(dvec2, dvec2);" - "dmat3 outerProduct(dvec3, dvec3);" - "dmat4 outerProduct(dvec4, dvec4);" - "dmat2x3 outerProduct(dvec3, dvec2);" - "dmat3x2 outerProduct(dvec2, dvec3);" - "dmat2x4 outerProduct(dvec4, dvec2);" - "dmat4x2 outerProduct(dvec2, dvec4);" - "dmat3x4 outerProduct(dvec4, dvec3);" - "dmat4x3 outerProduct(dvec3, dvec4);" - - "dmat2 transpose(dmat2);" - "dmat3 transpose(dmat3);" - "dmat4 transpose(dmat4);" - "dmat2x3 transpose(dmat3x2);" - "dmat3x2 transpose(dmat2x3);" - "dmat2x4 transpose(dmat4x2);" - "dmat4x2 transpose(dmat2x4);" - "dmat3x4 transpose(dmat4x3);" - "dmat4x3 transpose(dmat3x4);" - - "double determinant(dmat2);" - "double determinant(dmat3);" - "double determinant(dmat4);" - - "dmat2 inverse(dmat2);" - "dmat3 inverse(dmat3);" - "dmat4 inverse(dmat4);" - - "bvec2 lessThan(dvec2, dvec2);" - "bvec3 lessThan(dvec3, dvec3);" - "bvec4 lessThan(dvec4, dvec4);" - - "bvec2 lessThanEqual(dvec2, dvec2);" - "bvec3 lessThanEqual(dvec3, dvec3);" - "bvec4 lessThanEqual(dvec4, dvec4);" - - "bvec2 greaterThan(dvec2, dvec2);" - "bvec3 greaterThan(dvec3, dvec3);" - "bvec4 greaterThan(dvec4, dvec4);" - - "bvec2 greaterThanEqual(dvec2, dvec2);" - "bvec3 greaterThanEqual(dvec3, dvec3);" - "bvec4 greaterThanEqual(dvec4, dvec4);" - - "bvec2 equal(dvec2, dvec2);" - "bvec3 equal(dvec3, dvec3);" - "bvec4 equal(dvec4, dvec4);" - - "bvec2 notEqual(dvec2, dvec2);" - "bvec3 notEqual(dvec3, dvec3);" - "bvec4 notEqual(dvec4, dvec4);" - - "\n"); - } - - if (profile != EEsProfile && version >= 450) { - commonBuiltins.append( - - "int64_t abs(int64_t);" - "i64vec2 abs(i64vec2);" - "i64vec3 abs(i64vec3);" - "i64vec4 abs(i64vec4);" - - "int64_t sign(int64_t);" - "i64vec2 sign(i64vec2);" - "i64vec3 sign(i64vec3);" - "i64vec4 sign(i64vec4);" - - "int64_t min(int64_t, int64_t);" - "i64vec2 min(i64vec2, int64_t);" - "i64vec3 min(i64vec3, int64_t);" - "i64vec4 min(i64vec4, int64_t);" - "i64vec2 min(i64vec2, i64vec2);" - "i64vec3 min(i64vec3, i64vec3);" - "i64vec4 min(i64vec4, i64vec4);" - "uint64_t min(uint64_t, uint64_t);" - "u64vec2 min(u64vec2, uint64_t);" - "u64vec3 min(u64vec3, uint64_t);" - "u64vec4 min(u64vec4, uint64_t);" - "u64vec2 min(u64vec2, u64vec2);" - "u64vec3 min(u64vec3, u64vec3);" - "u64vec4 min(u64vec4, u64vec4);" - - "int64_t max(int64_t, int64_t);" - "i64vec2 max(i64vec2, int64_t);" - "i64vec3 max(i64vec3, int64_t);" - "i64vec4 max(i64vec4, int64_t);" - "i64vec2 max(i64vec2, i64vec2);" - "i64vec3 max(i64vec3, i64vec3);" - "i64vec4 max(i64vec4, i64vec4);" - "uint64_t max(uint64_t, uint64_t);" - "u64vec2 max(u64vec2, uint64_t);" - "u64vec3 max(u64vec3, uint64_t);" - "u64vec4 max(u64vec4, uint64_t);" - "u64vec2 max(u64vec2, u64vec2);" - "u64vec3 max(u64vec3, u64vec3);" - "u64vec4 max(u64vec4, u64vec4);" - - "int64_t clamp(int64_t, int64_t, int64_t);" - "i64vec2 clamp(i64vec2, int64_t, int64_t);" - "i64vec3 clamp(i64vec3, int64_t, int64_t);" - "i64vec4 clamp(i64vec4, int64_t, int64_t);" - "i64vec2 clamp(i64vec2, i64vec2, i64vec2);" - "i64vec3 clamp(i64vec3, i64vec3, i64vec3);" - "i64vec4 clamp(i64vec4, i64vec4, i64vec4);" - "uint64_t clamp(uint64_t, uint64_t, uint64_t);" - "u64vec2 clamp(u64vec2, uint64_t, uint64_t);" - "u64vec3 clamp(u64vec3, uint64_t, uint64_t);" - "u64vec4 clamp(u64vec4, uint64_t, uint64_t);" - "u64vec2 clamp(u64vec2, u64vec2, u64vec2);" - "u64vec3 clamp(u64vec3, u64vec3, u64vec3);" - "u64vec4 clamp(u64vec4, u64vec4, u64vec4);" - - "int64_t mix(int64_t, int64_t, bool);" - "i64vec2 mix(i64vec2, i64vec2, bvec2);" - "i64vec3 mix(i64vec3, i64vec3, bvec3);" - "i64vec4 mix(i64vec4, i64vec4, bvec4);" - "uint64_t mix(uint64_t, uint64_t, bool);" - "u64vec2 mix(u64vec2, u64vec2, bvec2);" - "u64vec3 mix(u64vec3, u64vec3, bvec3);" - "u64vec4 mix(u64vec4, u64vec4, bvec4);" - - "int64_t doubleBitsToInt64(double);" - "i64vec2 doubleBitsToInt64(dvec2);" - "i64vec3 doubleBitsToInt64(dvec3);" - "i64vec4 doubleBitsToInt64(dvec4);" - - "uint64_t doubleBitsToUint64(double);" - "u64vec2 doubleBitsToUint64(dvec2);" - "u64vec3 doubleBitsToUint64(dvec3);" - "u64vec4 doubleBitsToUint64(dvec4);" - - "double int64BitsToDouble(int64_t);" - "dvec2 int64BitsToDouble(i64vec2);" - "dvec3 int64BitsToDouble(i64vec3);" - "dvec4 int64BitsToDouble(i64vec4);" - - "double uint64BitsToDouble(uint64_t);" - "dvec2 uint64BitsToDouble(u64vec2);" - "dvec3 uint64BitsToDouble(u64vec3);" - "dvec4 uint64BitsToDouble(u64vec4);" - - "int64_t packInt2x32(ivec2);" - "uint64_t packUint2x32(uvec2);" - "ivec2 unpackInt2x32(int64_t);" - "uvec2 unpackUint2x32(uint64_t);" - - "bvec2 lessThan(i64vec2, i64vec2);" - "bvec3 lessThan(i64vec3, i64vec3);" - "bvec4 lessThan(i64vec4, i64vec4);" - "bvec2 lessThan(u64vec2, u64vec2);" - "bvec3 lessThan(u64vec3, u64vec3);" - "bvec4 lessThan(u64vec4, u64vec4);" - - "bvec2 lessThanEqual(i64vec2, i64vec2);" - "bvec3 lessThanEqual(i64vec3, i64vec3);" - "bvec4 lessThanEqual(i64vec4, i64vec4);" - "bvec2 lessThanEqual(u64vec2, u64vec2);" - "bvec3 lessThanEqual(u64vec3, u64vec3);" - "bvec4 lessThanEqual(u64vec4, u64vec4);" - - "bvec2 greaterThan(i64vec2, i64vec2);" - "bvec3 greaterThan(i64vec3, i64vec3);" - "bvec4 greaterThan(i64vec4, i64vec4);" - "bvec2 greaterThan(u64vec2, u64vec2);" - "bvec3 greaterThan(u64vec3, u64vec3);" - "bvec4 greaterThan(u64vec4, u64vec4);" - - "bvec2 greaterThanEqual(i64vec2, i64vec2);" - "bvec3 greaterThanEqual(i64vec3, i64vec3);" - "bvec4 greaterThanEqual(i64vec4, i64vec4);" - "bvec2 greaterThanEqual(u64vec2, u64vec2);" - "bvec3 greaterThanEqual(u64vec3, u64vec3);" - "bvec4 greaterThanEqual(u64vec4, u64vec4);" - - "bvec2 equal(i64vec2, i64vec2);" - "bvec3 equal(i64vec3, i64vec3);" - "bvec4 equal(i64vec4, i64vec4);" - "bvec2 equal(u64vec2, u64vec2);" - "bvec3 equal(u64vec3, u64vec3);" - "bvec4 equal(u64vec4, u64vec4);" - - "bvec2 notEqual(i64vec2, i64vec2);" - "bvec3 notEqual(i64vec3, i64vec3);" - "bvec4 notEqual(i64vec4, i64vec4);" - "bvec2 notEqual(u64vec2, u64vec2);" - "bvec3 notEqual(u64vec3, u64vec3);" - "bvec4 notEqual(u64vec4, u64vec4);" - -#ifdef AMD_EXTENSIONS - "int findLSB(int64_t);" - "ivec2 findLSB(i64vec2);" - "ivec3 findLSB(i64vec3);" - "ivec4 findLSB(i64vec4);" - - "int findLSB(uint64_t);" - "ivec2 findLSB(u64vec2);" - "ivec3 findLSB(u64vec3);" - "ivec4 findLSB(u64vec4);" - - "int findMSB(int64_t);" - "ivec2 findMSB(i64vec2);" - "ivec3 findMSB(i64vec3);" - "ivec4 findMSB(i64vec4);" - - "int findMSB(uint64_t);" - "ivec2 findMSB(u64vec2);" - "ivec3 findMSB(u64vec3);" - "ivec4 findMSB(u64vec4);" -#endif - "\n" - ); - } - -#ifdef AMD_EXTENSIONS - // GL_AMD_shader_trinary_minmax - if (profile != EEsProfile && version >= 430) { - commonBuiltins.append( - "float min3(float, float, float);" - "vec2 min3(vec2, vec2, vec2);" - "vec3 min3(vec3, vec3, vec3);" - "vec4 min3(vec4, vec4, vec4);" - - "int min3(int, int, int);" - "ivec2 min3(ivec2, ivec2, ivec2);" - "ivec3 min3(ivec3, ivec3, ivec3);" - "ivec4 min3(ivec4, ivec4, ivec4);" - - "uint min3(uint, uint, uint);" - "uvec2 min3(uvec2, uvec2, uvec2);" - "uvec3 min3(uvec3, uvec3, uvec3);" - "uvec4 min3(uvec4, uvec4, uvec4);" - - "float max3(float, float, float);" - "vec2 max3(vec2, vec2, vec2);" - "vec3 max3(vec3, vec3, vec3);" - "vec4 max3(vec4, vec4, vec4);" - - "int max3(int, int, int);" - "ivec2 max3(ivec2, ivec2, ivec2);" - "ivec3 max3(ivec3, ivec3, ivec3);" - "ivec4 max3(ivec4, ivec4, ivec4);" - - "uint max3(uint, uint, uint);" - "uvec2 max3(uvec2, uvec2, uvec2);" - "uvec3 max3(uvec3, uvec3, uvec3);" - "uvec4 max3(uvec4, uvec4, uvec4);" - - "float mid3(float, float, float);" - "vec2 mid3(vec2, vec2, vec2);" - "vec3 mid3(vec3, vec3, vec3);" - "vec4 mid3(vec4, vec4, vec4);" - - "int mid3(int, int, int);" - "ivec2 mid3(ivec2, ivec2, ivec2);" - "ivec3 mid3(ivec3, ivec3, ivec3);" - "ivec4 mid3(ivec4, ivec4, ivec4);" - - "uint mid3(uint, uint, uint);" - "uvec2 mid3(uvec2, uvec2, uvec2);" - "uvec3 mid3(uvec3, uvec3, uvec3);" - "uvec4 mid3(uvec4, uvec4, uvec4);" - - "float16_t min3(float16_t, float16_t, float16_t);" - "f16vec2 min3(f16vec2, f16vec2, f16vec2);" - "f16vec3 min3(f16vec3, f16vec3, f16vec3);" - "f16vec4 min3(f16vec4, f16vec4, f16vec4);" - - "float16_t max3(float16_t, float16_t, float16_t);" - "f16vec2 max3(f16vec2, f16vec2, f16vec2);" - "f16vec3 max3(f16vec3, f16vec3, f16vec3);" - "f16vec4 max3(f16vec4, f16vec4, f16vec4);" - - "float16_t mid3(float16_t, float16_t, float16_t);" - "f16vec2 mid3(f16vec2, f16vec2, f16vec2);" - "f16vec3 mid3(f16vec3, f16vec3, f16vec3);" - "f16vec4 mid3(f16vec4, f16vec4, f16vec4);" - - "\n" - ); - } -#endif - - if ((profile == EEsProfile && version >= 310) || - (profile != EEsProfile && version >= 430)) { - commonBuiltins.append( - "uint atomicAdd(coherent volatile inout uint, uint);" - " int atomicAdd(coherent volatile inout int, int);" - - "uint atomicMin(coherent volatile inout uint, uint);" - " int atomicMin(coherent volatile inout int, int);" - - "uint atomicMax(coherent volatile inout uint, uint);" - " int atomicMax(coherent volatile inout int, int);" - - "uint atomicAnd(coherent volatile inout uint, uint);" - " int atomicAnd(coherent volatile inout int, int);" - - "uint atomicOr (coherent volatile inout uint, uint);" - " int atomicOr (coherent volatile inout int, int);" - - "uint atomicXor(coherent volatile inout uint, uint);" - " int atomicXor(coherent volatile inout int, int);" - - "uint atomicExchange(coherent volatile inout uint, uint);" - " int atomicExchange(coherent volatile inout int, int);" - - "uint atomicCompSwap(coherent volatile inout uint, uint, uint);" - " int atomicCompSwap(coherent volatile inout int, int, int);" - - "\n"); - } - - if ((profile == EEsProfile && version >= 310) || - (profile != EEsProfile && version >= 450)) { - commonBuiltins.append( - "int mix(int x, int y, bool a);" - "ivec2 mix(ivec2 x, ivec2 y, bvec2 a);" - "ivec3 mix(ivec3 x, ivec3 y, bvec3 a);" - "ivec4 mix(ivec4 x, ivec4 y, bvec4 a);" - - "uint mix(uint x, uint y, bool a);" - "uvec2 mix(uvec2 x, uvec2 y, bvec2 a);" - "uvec3 mix(uvec3 x, uvec3 y, bvec3 a);" - "uvec4 mix(uvec4 x, uvec4 y, bvec4 a);" - - "bool mix(bool x, bool y, bool a);" - "bvec2 mix(bvec2 x, bvec2 y, bvec2 a);" - "bvec3 mix(bvec3 x, bvec3 y, bvec3 a);" - "bvec4 mix(bvec4 x, bvec4 y, bvec4 a);" - - "\n"); - } - - if ((profile == EEsProfile && version >= 300) || - (profile != EEsProfile && version >= 330)) { - commonBuiltins.append( - "int floatBitsToInt(highp float value);" - "ivec2 floatBitsToInt(highp vec2 value);" - "ivec3 floatBitsToInt(highp vec3 value);" - "ivec4 floatBitsToInt(highp vec4 value);" - - "uint floatBitsToUint(highp float value);" - "uvec2 floatBitsToUint(highp vec2 value);" - "uvec3 floatBitsToUint(highp vec3 value);" - "uvec4 floatBitsToUint(highp vec4 value);" - - "float intBitsToFloat(highp int value);" - "vec2 intBitsToFloat(highp ivec2 value);" - "vec3 intBitsToFloat(highp ivec3 value);" - "vec4 intBitsToFloat(highp ivec4 value);" - - "float uintBitsToFloat(highp uint value);" - "vec2 uintBitsToFloat(highp uvec2 value);" - "vec3 uintBitsToFloat(highp uvec3 value);" - "vec4 uintBitsToFloat(highp uvec4 value);" - - "\n"); - } - - if ((profile != EEsProfile && version >= 400) || - (profile == EEsProfile && version >= 310)) { // GL_OES_gpu_shader5 - - commonBuiltins.append( - "float fma(float, float, float );" - "vec2 fma(vec2, vec2, vec2 );" - "vec3 fma(vec3, vec3, vec3 );" - "vec4 fma(vec4, vec4, vec4 );" - "\n"); - - if (profile != EEsProfile) { - commonBuiltins.append( - "double fma(double, double, double);" - "dvec2 fma(dvec2, dvec2, dvec2 );" - "dvec3 fma(dvec3, dvec3, dvec3 );" - "dvec4 fma(dvec4, dvec4, dvec4 );" - "\n"); - } - } - - if ((profile == EEsProfile && version >= 310) || - (profile != EEsProfile && version >= 400)) { - commonBuiltins.append( - "float frexp(highp float, out highp int);" - "vec2 frexp(highp vec2, out highp ivec2);" - "vec3 frexp(highp vec3, out highp ivec3);" - "vec4 frexp(highp vec4, out highp ivec4);" - - "float ldexp(highp float, highp int);" - "vec2 ldexp(highp vec2, highp ivec2);" - "vec3 ldexp(highp vec3, highp ivec3);" - "vec4 ldexp(highp vec4, highp ivec4);" - - "\n"); - } - - if (profile != EEsProfile && version >= 400) { - commonBuiltins.append( - "double frexp(double, out int);" - "dvec2 frexp( dvec2, out ivec2);" - "dvec3 frexp( dvec3, out ivec3);" - "dvec4 frexp( dvec4, out ivec4);" - - "double ldexp(double, int);" - "dvec2 ldexp( dvec2, ivec2);" - "dvec3 ldexp( dvec3, ivec3);" - "dvec4 ldexp( dvec4, ivec4);" - - "double packDouble2x32(uvec2);" - "uvec2 unpackDouble2x32(double);" - - "\n"); - } - - if ((profile == EEsProfile && version >= 300) || - (profile != EEsProfile && version >= 400)) { - commonBuiltins.append( - "highp uint packUnorm2x16(vec2);" - "vec2 unpackUnorm2x16(highp uint);" - "\n"); - } - - if ((profile == EEsProfile && version >= 300) || - (profile != EEsProfile && version >= 420)) { - commonBuiltins.append( - "highp uint packSnorm2x16(vec2);" - " vec2 unpackSnorm2x16(highp uint);" - "highp uint packHalf2x16(vec2);" - "\n"); - } - - if (profile == EEsProfile && version >= 300) { - commonBuiltins.append( - "mediump vec2 unpackHalf2x16(highp uint);" - "\n"); - } else if (profile != EEsProfile && version >= 420) { - commonBuiltins.append( - " vec2 unpackHalf2x16(highp uint);" - "\n"); - } - - if ((profile == EEsProfile && version >= 310) || - (profile != EEsProfile && version >= 400)) { - commonBuiltins.append( - "highp uint packSnorm4x8(vec4);" - "highp uint packUnorm4x8(vec4);" - "\n"); - } - - if (profile == EEsProfile && version >= 310) { - commonBuiltins.append( - "mediump vec4 unpackSnorm4x8(highp uint);" - "mediump vec4 unpackUnorm4x8(highp uint);" - "\n"); - } else if (profile != EEsProfile && version >= 400) { - commonBuiltins.append( - "vec4 unpackSnorm4x8(highp uint);" - "vec4 unpackUnorm4x8(highp uint);" - "\n"); - } - - // - // Geometric Functions. - // - commonBuiltins.append( - "float length(float x);" - "float length(vec2 x);" - "float length(vec3 x);" - "float length(vec4 x);" - - "float distance(float p0, float p1);" - "float distance(vec2 p0, vec2 p1);" - "float distance(vec3 p0, vec3 p1);" - "float distance(vec4 p0, vec4 p1);" - - "float dot(float x, float y);" - "float dot(vec2 x, vec2 y);" - "float dot(vec3 x, vec3 y);" - "float dot(vec4 x, vec4 y);" - - "vec3 cross(vec3 x, vec3 y);" - "float normalize(float x);" - "vec2 normalize(vec2 x);" - "vec3 normalize(vec3 x);" - "vec4 normalize(vec4 x);" - - "float faceforward(float N, float I, float Nref);" - "vec2 faceforward(vec2 N, vec2 I, vec2 Nref);" - "vec3 faceforward(vec3 N, vec3 I, vec3 Nref);" - "vec4 faceforward(vec4 N, vec4 I, vec4 Nref);" - - "float reflect(float I, float N);" - "vec2 reflect(vec2 I, vec2 N);" - "vec3 reflect(vec3 I, vec3 N);" - "vec4 reflect(vec4 I, vec4 N);" - - "float refract(float I, float N, float eta);" - "vec2 refract(vec2 I, vec2 N, float eta);" - "vec3 refract(vec3 I, vec3 N, float eta);" - "vec4 refract(vec4 I, vec4 N, float eta);" - - "\n"); - - // - // Matrix Functions. - // - commonBuiltins.append( - "mat2 matrixCompMult(mat2 x, mat2 y);" - "mat3 matrixCompMult(mat3 x, mat3 y);" - "mat4 matrixCompMult(mat4 x, mat4 y);" - - "\n"); - - // 120 is correct for both ES and desktop - if (version >= 120) { - commonBuiltins.append( - "mat2 outerProduct(vec2 c, vec2 r);" - "mat3 outerProduct(vec3 c, vec3 r);" - "mat4 outerProduct(vec4 c, vec4 r);" - "mat2x3 outerProduct(vec3 c, vec2 r);" - "mat3x2 outerProduct(vec2 c, vec3 r);" - "mat2x4 outerProduct(vec4 c, vec2 r);" - "mat4x2 outerProduct(vec2 c, vec4 r);" - "mat3x4 outerProduct(vec4 c, vec3 r);" - "mat4x3 outerProduct(vec3 c, vec4 r);" - - "mat2 transpose(mat2 m);" - "mat3 transpose(mat3 m);" - "mat4 transpose(mat4 m);" - "mat2x3 transpose(mat3x2 m);" - "mat3x2 transpose(mat2x3 m);" - "mat2x4 transpose(mat4x2 m);" - "mat4x2 transpose(mat2x4 m);" - "mat3x4 transpose(mat4x3 m);" - "mat4x3 transpose(mat3x4 m);" - - "mat2x3 matrixCompMult(mat2x3, mat2x3);" - "mat2x4 matrixCompMult(mat2x4, mat2x4);" - "mat3x2 matrixCompMult(mat3x2, mat3x2);" - "mat3x4 matrixCompMult(mat3x4, mat3x4);" - "mat4x2 matrixCompMult(mat4x2, mat4x2);" - "mat4x3 matrixCompMult(mat4x3, mat4x3);" - - "\n"); - - // 150 is correct for both ES and desktop - if (version >= 150) { - commonBuiltins.append( - "float determinant(mat2 m);" - "float determinant(mat3 m);" - "float determinant(mat4 m);" - - "mat2 inverse(mat2 m);" - "mat3 inverse(mat3 m);" - "mat4 inverse(mat4 m);" - - "\n"); - } - } - - // - // Vector relational functions. - // - commonBuiltins.append( - "bvec2 lessThan(vec2 x, vec2 y);" - "bvec3 lessThan(vec3 x, vec3 y);" - "bvec4 lessThan(vec4 x, vec4 y);" - - "bvec2 lessThan(ivec2 x, ivec2 y);" - "bvec3 lessThan(ivec3 x, ivec3 y);" - "bvec4 lessThan(ivec4 x, ivec4 y);" - - "bvec2 lessThanEqual(vec2 x, vec2 y);" - "bvec3 lessThanEqual(vec3 x, vec3 y);" - "bvec4 lessThanEqual(vec4 x, vec4 y);" - - "bvec2 lessThanEqual(ivec2 x, ivec2 y);" - "bvec3 lessThanEqual(ivec3 x, ivec3 y);" - "bvec4 lessThanEqual(ivec4 x, ivec4 y);" - - "bvec2 greaterThan(vec2 x, vec2 y);" - "bvec3 greaterThan(vec3 x, vec3 y);" - "bvec4 greaterThan(vec4 x, vec4 y);" - - "bvec2 greaterThan(ivec2 x, ivec2 y);" - "bvec3 greaterThan(ivec3 x, ivec3 y);" - "bvec4 greaterThan(ivec4 x, ivec4 y);" - - "bvec2 greaterThanEqual(vec2 x, vec2 y);" - "bvec3 greaterThanEqual(vec3 x, vec3 y);" - "bvec4 greaterThanEqual(vec4 x, vec4 y);" - - "bvec2 greaterThanEqual(ivec2 x, ivec2 y);" - "bvec3 greaterThanEqual(ivec3 x, ivec3 y);" - "bvec4 greaterThanEqual(ivec4 x, ivec4 y);" - - "bvec2 equal(vec2 x, vec2 y);" - "bvec3 equal(vec3 x, vec3 y);" - "bvec4 equal(vec4 x, vec4 y);" - - "bvec2 equal(ivec2 x, ivec2 y);" - "bvec3 equal(ivec3 x, ivec3 y);" - "bvec4 equal(ivec4 x, ivec4 y);" - - "bvec2 equal(bvec2 x, bvec2 y);" - "bvec3 equal(bvec3 x, bvec3 y);" - "bvec4 equal(bvec4 x, bvec4 y);" - - "bvec2 notEqual(vec2 x, vec2 y);" - "bvec3 notEqual(vec3 x, vec3 y);" - "bvec4 notEqual(vec4 x, vec4 y);" - - "bvec2 notEqual(ivec2 x, ivec2 y);" - "bvec3 notEqual(ivec3 x, ivec3 y);" - "bvec4 notEqual(ivec4 x, ivec4 y);" - - "bvec2 notEqual(bvec2 x, bvec2 y);" - "bvec3 notEqual(bvec3 x, bvec3 y);" - "bvec4 notEqual(bvec4 x, bvec4 y);" - - "bool any(bvec2 x);" - "bool any(bvec3 x);" - "bool any(bvec4 x);" - - "bool all(bvec2 x);" - "bool all(bvec3 x);" - "bool all(bvec4 x);" - - "bvec2 not(bvec2 x);" - "bvec3 not(bvec3 x);" - "bvec4 not(bvec4 x);" - - "\n"); - - if (version >= 130) { - commonBuiltins.append( - "bvec2 lessThan(uvec2 x, uvec2 y);" - "bvec3 lessThan(uvec3 x, uvec3 y);" - "bvec4 lessThan(uvec4 x, uvec4 y);" - - "bvec2 lessThanEqual(uvec2 x, uvec2 y);" - "bvec3 lessThanEqual(uvec3 x, uvec3 y);" - "bvec4 lessThanEqual(uvec4 x, uvec4 y);" - - "bvec2 greaterThan(uvec2 x, uvec2 y);" - "bvec3 greaterThan(uvec3 x, uvec3 y);" - "bvec4 greaterThan(uvec4 x, uvec4 y);" - - "bvec2 greaterThanEqual(uvec2 x, uvec2 y);" - "bvec3 greaterThanEqual(uvec3 x, uvec3 y);" - "bvec4 greaterThanEqual(uvec4 x, uvec4 y);" - - "bvec2 equal(uvec2 x, uvec2 y);" - "bvec3 equal(uvec3 x, uvec3 y);" - "bvec4 equal(uvec4 x, uvec4 y);" - - "bvec2 notEqual(uvec2 x, uvec2 y);" - "bvec3 notEqual(uvec3 x, uvec3 y);" - "bvec4 notEqual(uvec4 x, uvec4 y);" - - "\n"); - } - - // - // Original-style texture functions existing in all stages. - // (Per-stage functions below.) - // - if ((profile == EEsProfile && version == 100) || - profile == ECompatibilityProfile || - (profile == ECoreProfile && version < 420) || - profile == ENoProfile) { - if (spvVersion.spv == 0) { - commonBuiltins.append( - "vec4 texture2D(sampler2D, vec2);" - - "vec4 texture2DProj(sampler2D, vec3);" - "vec4 texture2DProj(sampler2D, vec4);" - - "vec4 texture3D(sampler3D, vec3);" // OES_texture_3D, but caught by keyword check - "vec4 texture3DProj(sampler3D, vec4);" // OES_texture_3D, but caught by keyword check - - "vec4 textureCube(samplerCube, vec3);" - - "\n"); - } - } - - if ( profile == ECompatibilityProfile || - (profile == ECoreProfile && version < 420) || - profile == ENoProfile) { - if (spvVersion.spv == 0) { - commonBuiltins.append( - "vec4 texture1D(sampler1D, float);" - - "vec4 texture1DProj(sampler1D, vec2);" - "vec4 texture1DProj(sampler1D, vec4);" - - "vec4 shadow1D(sampler1DShadow, vec3);" - "vec4 shadow2D(sampler2DShadow, vec3);" - "vec4 shadow1DProj(sampler1DShadow, vec4);" - "vec4 shadow2DProj(sampler2DShadow, vec4);" - - "vec4 texture2DRect(sampler2DRect, vec2);" // GL_ARB_texture_rectangle, caught by keyword check - "vec4 texture2DRectProj(sampler2DRect, vec3);" // GL_ARB_texture_rectangle, caught by keyword check - "vec4 texture2DRectProj(sampler2DRect, vec4);" // GL_ARB_texture_rectangle, caught by keyword check - "vec4 shadow2DRect(sampler2DRectShadow, vec3);" // GL_ARB_texture_rectangle, caught by keyword check - "vec4 shadow2DRectProj(sampler2DRectShadow, vec4);" // GL_ARB_texture_rectangle, caught by keyword check - - "\n"); - } - } - - if (profile == EEsProfile) { - if (spvVersion.spv == 0) { - commonBuiltins.append( - "vec4 texture2D(samplerExternalOES, vec2 coord);" // GL_OES_EGL_image_external, caught by keyword check - "vec4 texture2DProj(samplerExternalOES, vec3);" // GL_OES_EGL_image_external, caught by keyword check - "vec4 texture2DProj(samplerExternalOES, vec4);" // GL_OES_EGL_image_external, caught by keyword check - "vec4 texture2DGradEXT(sampler2D, vec2, vec2, vec2);" // GL_EXT_shader_texture_lod - "vec4 texture2DProjGradEXT(sampler2D, vec3, vec2, vec2);" // GL_EXT_shader_texture_lod - "vec4 texture2DProjGradEXT(sampler2D, vec4, vec2, vec2);" // GL_EXT_shader_texture_lod - "vec4 textureCubeGradEXT(samplerCube, vec3, vec3, vec3);" // GL_EXT_shader_texture_lod - - "float shadow2DEXT(sampler2DShadow, vec3);" // GL_EXT_shadow_samplers - "float shadow2DProjEXT(sampler2DShadow, vec4);" // GL_EXT_shadow_samplers - - "\n"); - } - } - - // - // Noise functions. - // - if (profile != EEsProfile) { - commonBuiltins.append( - "float noise1(float x);" - "float noise1(vec2 x);" - "float noise1(vec3 x);" - "float noise1(vec4 x);" - - "vec2 noise2(float x);" - "vec2 noise2(vec2 x);" - "vec2 noise2(vec3 x);" - "vec2 noise2(vec4 x);" - - "vec3 noise3(float x);" - "vec3 noise3(vec2 x);" - "vec3 noise3(vec3 x);" - "vec3 noise3(vec4 x);" - - "vec4 noise4(float x);" - "vec4 noise4(vec2 x);" - "vec4 noise4(vec3 x);" - "vec4 noise4(vec4 x);" - - "\n"); - } - - if (spvVersion.vulkan == 0) { - // - // Atomic counter functions. - // - if ((profile != EEsProfile && version >= 300) || - (profile == EEsProfile && version >= 310)) { - commonBuiltins.append( - "uint atomicCounterIncrement(atomic_uint);" - "uint atomicCounterDecrement(atomic_uint);" - "uint atomicCounter(atomic_uint);" - - "\n"); - } - if (profile != EEsProfile && version >= 460) { - commonBuiltins.append( - "uint atomicCounterAdd(atomic_uint, uint);" - "uint atomicCounterSubtract(atomic_uint, uint);" - "uint atomicCounterMin(atomic_uint, uint);" - "uint atomicCounterMax(atomic_uint, uint);" - "uint atomicCounterAnd(atomic_uint, uint);" - "uint atomicCounterOr(atomic_uint, uint);" - "uint atomicCounterXor(atomic_uint, uint);" - "uint atomicCounterExchange(atomic_uint, uint);" - "uint atomicCounterCompSwap(atomic_uint, uint, uint);" - - "\n"); - } - } - - // Bitfield - if ((profile == EEsProfile && version >= 310) || - (profile != EEsProfile && version >= 400)) { - commonBuiltins.append( - " int bitfieldExtract( int, int, int);" - "ivec2 bitfieldExtract(ivec2, int, int);" - "ivec3 bitfieldExtract(ivec3, int, int);" - "ivec4 bitfieldExtract(ivec4, int, int);" - - " uint bitfieldExtract( uint, int, int);" - "uvec2 bitfieldExtract(uvec2, int, int);" - "uvec3 bitfieldExtract(uvec3, int, int);" - "uvec4 bitfieldExtract(uvec4, int, int);" - - " int bitfieldInsert( int base, int, int, int);" - "ivec2 bitfieldInsert(ivec2 base, ivec2, int, int);" - "ivec3 bitfieldInsert(ivec3 base, ivec3, int, int);" - "ivec4 bitfieldInsert(ivec4 base, ivec4, int, int);" - - " uint bitfieldInsert( uint base, uint, int, int);" - "uvec2 bitfieldInsert(uvec2 base, uvec2, int, int);" - "uvec3 bitfieldInsert(uvec3 base, uvec3, int, int);" - "uvec4 bitfieldInsert(uvec4 base, uvec4, int, int);" - - "\n"); - } - - if (profile != EEsProfile && version >= 400) { - commonBuiltins.append( - " int findLSB( int);" - "ivec2 findLSB(ivec2);" - "ivec3 findLSB(ivec3);" - "ivec4 findLSB(ivec4);" - - " int findLSB( uint);" - "ivec2 findLSB(uvec2);" - "ivec3 findLSB(uvec3);" - "ivec4 findLSB(uvec4);" - - "\n"); - } else if (profile == EEsProfile && version >= 310) { - commonBuiltins.append( - "lowp int findLSB( int);" - "lowp ivec2 findLSB(ivec2);" - "lowp ivec3 findLSB(ivec3);" - "lowp ivec4 findLSB(ivec4);" - - "lowp int findLSB( uint);" - "lowp ivec2 findLSB(uvec2);" - "lowp ivec3 findLSB(uvec3);" - "lowp ivec4 findLSB(uvec4);" - - "\n"); - } - - if (profile != EEsProfile && version >= 400) { - commonBuiltins.append( - " int bitCount( int);" - "ivec2 bitCount(ivec2);" - "ivec3 bitCount(ivec3);" - "ivec4 bitCount(ivec4);" - - " int bitCount( uint);" - "ivec2 bitCount(uvec2);" - "ivec3 bitCount(uvec3);" - "ivec4 bitCount(uvec4);" - - " int findMSB(highp int);" - "ivec2 findMSB(highp ivec2);" - "ivec3 findMSB(highp ivec3);" - "ivec4 findMSB(highp ivec4);" - - " int findMSB(highp uint);" - "ivec2 findMSB(highp uvec2);" - "ivec3 findMSB(highp uvec3);" - "ivec4 findMSB(highp uvec4);" - - "\n"); - } - - if ((profile == EEsProfile && version >= 310) || - (profile != EEsProfile && version >= 400)) { - commonBuiltins.append( - " uint uaddCarry(highp uint, highp uint, out lowp uint carry);" - "uvec2 uaddCarry(highp uvec2, highp uvec2, out lowp uvec2 carry);" - "uvec3 uaddCarry(highp uvec3, highp uvec3, out lowp uvec3 carry);" - "uvec4 uaddCarry(highp uvec4, highp uvec4, out lowp uvec4 carry);" - - " uint usubBorrow(highp uint, highp uint, out lowp uint borrow);" - "uvec2 usubBorrow(highp uvec2, highp uvec2, out lowp uvec2 borrow);" - "uvec3 usubBorrow(highp uvec3, highp uvec3, out lowp uvec3 borrow);" - "uvec4 usubBorrow(highp uvec4, highp uvec4, out lowp uvec4 borrow);" - - "void umulExtended(highp uint, highp uint, out highp uint, out highp uint lsb);" - "void umulExtended(highp uvec2, highp uvec2, out highp uvec2, out highp uvec2 lsb);" - "void umulExtended(highp uvec3, highp uvec3, out highp uvec3, out highp uvec3 lsb);" - "void umulExtended(highp uvec4, highp uvec4, out highp uvec4, out highp uvec4 lsb);" - - "void imulExtended(highp int, highp int, out highp int, out highp int lsb);" - "void imulExtended(highp ivec2, highp ivec2, out highp ivec2, out highp ivec2 lsb);" - "void imulExtended(highp ivec3, highp ivec3, out highp ivec3, out highp ivec3 lsb);" - "void imulExtended(highp ivec4, highp ivec4, out highp ivec4, out highp ivec4 lsb);" - - " int bitfieldReverse(highp int);" - "ivec2 bitfieldReverse(highp ivec2);" - "ivec3 bitfieldReverse(highp ivec3);" - "ivec4 bitfieldReverse(highp ivec4);" - - " uint bitfieldReverse(highp uint);" - "uvec2 bitfieldReverse(highp uvec2);" - "uvec3 bitfieldReverse(highp uvec3);" - "uvec4 bitfieldReverse(highp uvec4);" - - "\n"); - } - - if (profile == EEsProfile && version >= 310) { - commonBuiltins.append( - "lowp int bitCount( int);" - "lowp ivec2 bitCount(ivec2);" - "lowp ivec3 bitCount(ivec3);" - "lowp ivec4 bitCount(ivec4);" - - "lowp int bitCount( uint);" - "lowp ivec2 bitCount(uvec2);" - "lowp ivec3 bitCount(uvec3);" - "lowp ivec4 bitCount(uvec4);" - - "lowp int findMSB(highp int);" - "lowp ivec2 findMSB(highp ivec2);" - "lowp ivec3 findMSB(highp ivec3);" - "lowp ivec4 findMSB(highp ivec4);" - - "lowp int findMSB(highp uint);" - "lowp ivec2 findMSB(highp uvec2);" - "lowp ivec3 findMSB(highp uvec3);" - "lowp ivec4 findMSB(highp uvec4);" - - "\n"); - } - - // GL_ARB_shader_ballot - if (profile != EEsProfile && version >= 450) { - commonBuiltins.append( - "uint64_t ballotARB(bool);" - - "float readInvocationARB(float, uint);" - "vec2 readInvocationARB(vec2, uint);" - "vec3 readInvocationARB(vec3, uint);" - "vec4 readInvocationARB(vec4, uint);" - - "int readInvocationARB(int, uint);" - "ivec2 readInvocationARB(ivec2, uint);" - "ivec3 readInvocationARB(ivec3, uint);" - "ivec4 readInvocationARB(ivec4, uint);" - - "uint readInvocationARB(uint, uint);" - "uvec2 readInvocationARB(uvec2, uint);" - "uvec3 readInvocationARB(uvec3, uint);" - "uvec4 readInvocationARB(uvec4, uint);" - - "float readFirstInvocationARB(float);" - "vec2 readFirstInvocationARB(vec2);" - "vec3 readFirstInvocationARB(vec3);" - "vec4 readFirstInvocationARB(vec4);" - - "int readFirstInvocationARB(int);" - "ivec2 readFirstInvocationARB(ivec2);" - "ivec3 readFirstInvocationARB(ivec3);" - "ivec4 readFirstInvocationARB(ivec4);" - - "uint readFirstInvocationARB(uint);" - "uvec2 readFirstInvocationARB(uvec2);" - "uvec3 readFirstInvocationARB(uvec3);" - "uvec4 readFirstInvocationARB(uvec4);" - - "\n"); - } - - // GL_ARB_shader_group_vote - if (profile != EEsProfile && version >= 430) { - commonBuiltins.append( - "bool anyInvocationARB(bool);" - "bool allInvocationsARB(bool);" - "bool allInvocationsEqualARB(bool);" - - "\n"); - } - - if (profile != EEsProfile && version >= 460) { - commonBuiltins.append( - "bool anyInvocation(bool);" - "bool allInvocations(bool);" - "bool allInvocationsEqual(bool);" - - "\n"); - } - -#ifdef AMD_EXTENSIONS - // GL_AMD_shader_ballot - if (profile != EEsProfile && version >= 450) { - commonBuiltins.append( - "float minInvocationsAMD(float);" - "vec2 minInvocationsAMD(vec2);" - "vec3 minInvocationsAMD(vec3);" - "vec4 minInvocationsAMD(vec4);" - - "int minInvocationsAMD(int);" - "ivec2 minInvocationsAMD(ivec2);" - "ivec3 minInvocationsAMD(ivec3);" - "ivec4 minInvocationsAMD(ivec4);" - - "uint minInvocationsAMD(uint);" - "uvec2 minInvocationsAMD(uvec2);" - "uvec3 minInvocationsAMD(uvec3);" - "uvec4 minInvocationsAMD(uvec4);" - - "double minInvocationsAMD(double);" - "dvec2 minInvocationsAMD(dvec2);" - "dvec3 minInvocationsAMD(dvec3);" - "dvec4 minInvocationsAMD(dvec4);" - - "int64_t minInvocationsAMD(int64_t);" - "i64vec2 minInvocationsAMD(i64vec2);" - "i64vec3 minInvocationsAMD(i64vec3);" - "i64vec4 minInvocationsAMD(i64vec4);" - - "uint64_t minInvocationsAMD(uint64_t);" - "u64vec2 minInvocationsAMD(u64vec2);" - "u64vec3 minInvocationsAMD(u64vec3);" - "u64vec4 minInvocationsAMD(u64vec4);" - - "float16_t minInvocationsAMD(float16_t);" - "f16vec2 minInvocationsAMD(f16vec2);" - "f16vec3 minInvocationsAMD(f16vec3);" - "f16vec4 minInvocationsAMD(f16vec4);" - - "float minInvocationsInclusiveScanAMD(float);" - "vec2 minInvocationsInclusiveScanAMD(vec2);" - "vec3 minInvocationsInclusiveScanAMD(vec3);" - "vec4 minInvocationsInclusiveScanAMD(vec4);" - - "int minInvocationsInclusiveScanAMD(int);" - "ivec2 minInvocationsInclusiveScanAMD(ivec2);" - "ivec3 minInvocationsInclusiveScanAMD(ivec3);" - "ivec4 minInvocationsInclusiveScanAMD(ivec4);" - - "uint minInvocationsInclusiveScanAMD(uint);" - "uvec2 minInvocationsInclusiveScanAMD(uvec2);" - "uvec3 minInvocationsInclusiveScanAMD(uvec3);" - "uvec4 minInvocationsInclusiveScanAMD(uvec4);" - - "double minInvocationsInclusiveScanAMD(double);" - "dvec2 minInvocationsInclusiveScanAMD(dvec2);" - "dvec3 minInvocationsInclusiveScanAMD(dvec3);" - "dvec4 minInvocationsInclusiveScanAMD(dvec4);" - - "int64_t minInvocationsInclusiveScanAMD(int64_t);" - "i64vec2 minInvocationsInclusiveScanAMD(i64vec2);" - "i64vec3 minInvocationsInclusiveScanAMD(i64vec3);" - "i64vec4 minInvocationsInclusiveScanAMD(i64vec4);" - - "uint64_t minInvocationsInclusiveScanAMD(uint64_t);" - "u64vec2 minInvocationsInclusiveScanAMD(u64vec2);" - "u64vec3 minInvocationsInclusiveScanAMD(u64vec3);" - "u64vec4 minInvocationsInclusiveScanAMD(u64vec4);" - - "float16_t minInvocationsInclusiveScanAMD(float16_t);" - "f16vec2 minInvocationsInclusiveScanAMD(f16vec2);" - "f16vec3 minInvocationsInclusiveScanAMD(f16vec3);" - "f16vec4 minInvocationsInclusiveScanAMD(f16vec4);" - - "float minInvocationsExclusiveScanAMD(float);" - "vec2 minInvocationsExclusiveScanAMD(vec2);" - "vec3 minInvocationsExclusiveScanAMD(vec3);" - "vec4 minInvocationsExclusiveScanAMD(vec4);" - - "int minInvocationsExclusiveScanAMD(int);" - "ivec2 minInvocationsExclusiveScanAMD(ivec2);" - "ivec3 minInvocationsExclusiveScanAMD(ivec3);" - "ivec4 minInvocationsExclusiveScanAMD(ivec4);" - - "uint minInvocationsExclusiveScanAMD(uint);" - "uvec2 minInvocationsExclusiveScanAMD(uvec2);" - "uvec3 minInvocationsExclusiveScanAMD(uvec3);" - "uvec4 minInvocationsExclusiveScanAMD(uvec4);" - - "double minInvocationsExclusiveScanAMD(double);" - "dvec2 minInvocationsExclusiveScanAMD(dvec2);" - "dvec3 minInvocationsExclusiveScanAMD(dvec3);" - "dvec4 minInvocationsExclusiveScanAMD(dvec4);" - - "int64_t minInvocationsExclusiveScanAMD(int64_t);" - "i64vec2 minInvocationsExclusiveScanAMD(i64vec2);" - "i64vec3 minInvocationsExclusiveScanAMD(i64vec3);" - "i64vec4 minInvocationsExclusiveScanAMD(i64vec4);" - - "uint64_t minInvocationsExclusiveScanAMD(uint64_t);" - "u64vec2 minInvocationsExclusiveScanAMD(u64vec2);" - "u64vec3 minInvocationsExclusiveScanAMD(u64vec3);" - "u64vec4 minInvocationsExclusiveScanAMD(u64vec4);" - - "float16_t minInvocationsExclusiveScanAMD(float16_t);" - "f16vec2 minInvocationsExclusiveScanAMD(f16vec2);" - "f16vec3 minInvocationsExclusiveScanAMD(f16vec3);" - "f16vec4 minInvocationsExclusiveScanAMD(f16vec4);" - - "float maxInvocationsAMD(float);" - "vec2 maxInvocationsAMD(vec2);" - "vec3 maxInvocationsAMD(vec3);" - "vec4 maxInvocationsAMD(vec4);" - - "int maxInvocationsAMD(int);" - "ivec2 maxInvocationsAMD(ivec2);" - "ivec3 maxInvocationsAMD(ivec3);" - "ivec4 maxInvocationsAMD(ivec4);" - - "uint maxInvocationsAMD(uint);" - "uvec2 maxInvocationsAMD(uvec2);" - "uvec3 maxInvocationsAMD(uvec3);" - "uvec4 maxInvocationsAMD(uvec4);" - - "double maxInvocationsAMD(double);" - "dvec2 maxInvocationsAMD(dvec2);" - "dvec3 maxInvocationsAMD(dvec3);" - "dvec4 maxInvocationsAMD(dvec4);" - - "int64_t maxInvocationsAMD(int64_t);" - "i64vec2 maxInvocationsAMD(i64vec2);" - "i64vec3 maxInvocationsAMD(i64vec3);" - "i64vec4 maxInvocationsAMD(i64vec4);" - - "uint64_t maxInvocationsAMD(uint64_t);" - "u64vec2 maxInvocationsAMD(u64vec2);" - "u64vec3 maxInvocationsAMD(u64vec3);" - "u64vec4 maxInvocationsAMD(u64vec4);" - - "float16_t maxInvocationsAMD(float16_t);" - "f16vec2 maxInvocationsAMD(f16vec2);" - "f16vec3 maxInvocationsAMD(f16vec3);" - "f16vec4 maxInvocationsAMD(f16vec4);" - - "float maxInvocationsInclusiveScanAMD(float);" - "vec2 maxInvocationsInclusiveScanAMD(vec2);" - "vec3 maxInvocationsInclusiveScanAMD(vec3);" - "vec4 maxInvocationsInclusiveScanAMD(vec4);" - - "int maxInvocationsInclusiveScanAMD(int);" - "ivec2 maxInvocationsInclusiveScanAMD(ivec2);" - "ivec3 maxInvocationsInclusiveScanAMD(ivec3);" - "ivec4 maxInvocationsInclusiveScanAMD(ivec4);" - - "uint maxInvocationsInclusiveScanAMD(uint);" - "uvec2 maxInvocationsInclusiveScanAMD(uvec2);" - "uvec3 maxInvocationsInclusiveScanAMD(uvec3);" - "uvec4 maxInvocationsInclusiveScanAMD(uvec4);" - - "double maxInvocationsInclusiveScanAMD(double);" - "dvec2 maxInvocationsInclusiveScanAMD(dvec2);" - "dvec3 maxInvocationsInclusiveScanAMD(dvec3);" - "dvec4 maxInvocationsInclusiveScanAMD(dvec4);" - - "int64_t maxInvocationsInclusiveScanAMD(int64_t);" - "i64vec2 maxInvocationsInclusiveScanAMD(i64vec2);" - "i64vec3 maxInvocationsInclusiveScanAMD(i64vec3);" - "i64vec4 maxInvocationsInclusiveScanAMD(i64vec4);" - - "uint64_t maxInvocationsInclusiveScanAMD(uint64_t);" - "u64vec2 maxInvocationsInclusiveScanAMD(u64vec2);" - "u64vec3 maxInvocationsInclusiveScanAMD(u64vec3);" - "u64vec4 maxInvocationsInclusiveScanAMD(u64vec4);" - - "float16_t maxInvocationsInclusiveScanAMD(float16_t);" - "f16vec2 maxInvocationsInclusiveScanAMD(f16vec2);" - "f16vec3 maxInvocationsInclusiveScanAMD(f16vec3);" - "f16vec4 maxInvocationsInclusiveScanAMD(f16vec4);" - - "float maxInvocationsExclusiveScanAMD(float);" - "vec2 maxInvocationsExclusiveScanAMD(vec2);" - "vec3 maxInvocationsExclusiveScanAMD(vec3);" - "vec4 maxInvocationsExclusiveScanAMD(vec4);" - - "int maxInvocationsExclusiveScanAMD(int);" - "ivec2 maxInvocationsExclusiveScanAMD(ivec2);" - "ivec3 maxInvocationsExclusiveScanAMD(ivec3);" - "ivec4 maxInvocationsExclusiveScanAMD(ivec4);" - - "uint maxInvocationsExclusiveScanAMD(uint);" - "uvec2 maxInvocationsExclusiveScanAMD(uvec2);" - "uvec3 maxInvocationsExclusiveScanAMD(uvec3);" - "uvec4 maxInvocationsExclusiveScanAMD(uvec4);" - - "double maxInvocationsExclusiveScanAMD(double);" - "dvec2 maxInvocationsExclusiveScanAMD(dvec2);" - "dvec3 maxInvocationsExclusiveScanAMD(dvec3);" - "dvec4 maxInvocationsExclusiveScanAMD(dvec4);" - - "int64_t maxInvocationsExclusiveScanAMD(int64_t);" - "i64vec2 maxInvocationsExclusiveScanAMD(i64vec2);" - "i64vec3 maxInvocationsExclusiveScanAMD(i64vec3);" - "i64vec4 maxInvocationsExclusiveScanAMD(i64vec4);" - - "uint64_t maxInvocationsExclusiveScanAMD(uint64_t);" - "u64vec2 maxInvocationsExclusiveScanAMD(u64vec2);" - "u64vec3 maxInvocationsExclusiveScanAMD(u64vec3);" - "u64vec4 maxInvocationsExclusiveScanAMD(u64vec4);" - - "float16_t maxInvocationsExclusiveScanAMD(float16_t);" - "f16vec2 maxInvocationsExclusiveScanAMD(f16vec2);" - "f16vec3 maxInvocationsExclusiveScanAMD(f16vec3);" - "f16vec4 maxInvocationsExclusiveScanAMD(f16vec4);" - - "float addInvocationsAMD(float);" - "vec2 addInvocationsAMD(vec2);" - "vec3 addInvocationsAMD(vec3);" - "vec4 addInvocationsAMD(vec4);" - - "int addInvocationsAMD(int);" - "ivec2 addInvocationsAMD(ivec2);" - "ivec3 addInvocationsAMD(ivec3);" - "ivec4 addInvocationsAMD(ivec4);" - - "uint addInvocationsAMD(uint);" - "uvec2 addInvocationsAMD(uvec2);" - "uvec3 addInvocationsAMD(uvec3);" - "uvec4 addInvocationsAMD(uvec4);" - - "double addInvocationsAMD(double);" - "dvec2 addInvocationsAMD(dvec2);" - "dvec3 addInvocationsAMD(dvec3);" - "dvec4 addInvocationsAMD(dvec4);" - - "int64_t addInvocationsAMD(int64_t);" - "i64vec2 addInvocationsAMD(i64vec2);" - "i64vec3 addInvocationsAMD(i64vec3);" - "i64vec4 addInvocationsAMD(i64vec4);" - - "uint64_t addInvocationsAMD(uint64_t);" - "u64vec2 addInvocationsAMD(u64vec2);" - "u64vec3 addInvocationsAMD(u64vec3);" - "u64vec4 addInvocationsAMD(u64vec4);" - - "float16_t addInvocationsAMD(float16_t);" - "f16vec2 addInvocationsAMD(f16vec2);" - "f16vec3 addInvocationsAMD(f16vec3);" - "f16vec4 addInvocationsAMD(f16vec4);" - - "float addInvocationsInclusiveScanAMD(float);" - "vec2 addInvocationsInclusiveScanAMD(vec2);" - "vec3 addInvocationsInclusiveScanAMD(vec3);" - "vec4 addInvocationsInclusiveScanAMD(vec4);" - - "int addInvocationsInclusiveScanAMD(int);" - "ivec2 addInvocationsInclusiveScanAMD(ivec2);" - "ivec3 addInvocationsInclusiveScanAMD(ivec3);" - "ivec4 addInvocationsInclusiveScanAMD(ivec4);" - - "uint addInvocationsInclusiveScanAMD(uint);" - "uvec2 addInvocationsInclusiveScanAMD(uvec2);" - "uvec3 addInvocationsInclusiveScanAMD(uvec3);" - "uvec4 addInvocationsInclusiveScanAMD(uvec4);" - - "double addInvocationsInclusiveScanAMD(double);" - "dvec2 addInvocationsInclusiveScanAMD(dvec2);" - "dvec3 addInvocationsInclusiveScanAMD(dvec3);" - "dvec4 addInvocationsInclusiveScanAMD(dvec4);" - - "int64_t addInvocationsInclusiveScanAMD(int64_t);" - "i64vec2 addInvocationsInclusiveScanAMD(i64vec2);" - "i64vec3 addInvocationsInclusiveScanAMD(i64vec3);" - "i64vec4 addInvocationsInclusiveScanAMD(i64vec4);" - - "uint64_t addInvocationsInclusiveScanAMD(uint64_t);" - "u64vec2 addInvocationsInclusiveScanAMD(u64vec2);" - "u64vec3 addInvocationsInclusiveScanAMD(u64vec3);" - "u64vec4 addInvocationsInclusiveScanAMD(u64vec4);" - - "float16_t addInvocationsInclusiveScanAMD(float16_t);" - "f16vec2 addInvocationsInclusiveScanAMD(f16vec2);" - "f16vec3 addInvocationsInclusiveScanAMD(f16vec3);" - "f16vec4 addInvocationsInclusiveScanAMD(f16vec4);" - - "float addInvocationsExclusiveScanAMD(float);" - "vec2 addInvocationsExclusiveScanAMD(vec2);" - "vec3 addInvocationsExclusiveScanAMD(vec3);" - "vec4 addInvocationsExclusiveScanAMD(vec4);" - - "int addInvocationsExclusiveScanAMD(int);" - "ivec2 addInvocationsExclusiveScanAMD(ivec2);" - "ivec3 addInvocationsExclusiveScanAMD(ivec3);" - "ivec4 addInvocationsExclusiveScanAMD(ivec4);" - - "uint addInvocationsExclusiveScanAMD(uint);" - "uvec2 addInvocationsExclusiveScanAMD(uvec2);" - "uvec3 addInvocationsExclusiveScanAMD(uvec3);" - "uvec4 addInvocationsExclusiveScanAMD(uvec4);" - - "double addInvocationsExclusiveScanAMD(double);" - "dvec2 addInvocationsExclusiveScanAMD(dvec2);" - "dvec3 addInvocationsExclusiveScanAMD(dvec3);" - "dvec4 addInvocationsExclusiveScanAMD(dvec4);" - - "int64_t addInvocationsExclusiveScanAMD(int64_t);" - "i64vec2 addInvocationsExclusiveScanAMD(i64vec2);" - "i64vec3 addInvocationsExclusiveScanAMD(i64vec3);" - "i64vec4 addInvocationsExclusiveScanAMD(i64vec4);" - - "uint64_t addInvocationsExclusiveScanAMD(uint64_t);" - "u64vec2 addInvocationsExclusiveScanAMD(u64vec2);" - "u64vec3 addInvocationsExclusiveScanAMD(u64vec3);" - "u64vec4 addInvocationsExclusiveScanAMD(u64vec4);" - - "float16_t addInvocationsExclusiveScanAMD(float16_t);" - "f16vec2 addInvocationsExclusiveScanAMD(f16vec2);" - "f16vec3 addInvocationsExclusiveScanAMD(f16vec3);" - "f16vec4 addInvocationsExclusiveScanAMD(f16vec4);" - - "float minInvocationsNonUniformAMD(float);" - "vec2 minInvocationsNonUniformAMD(vec2);" - "vec3 minInvocationsNonUniformAMD(vec3);" - "vec4 minInvocationsNonUniformAMD(vec4);" - - "int minInvocationsNonUniformAMD(int);" - "ivec2 minInvocationsNonUniformAMD(ivec2);" - "ivec3 minInvocationsNonUniformAMD(ivec3);" - "ivec4 minInvocationsNonUniformAMD(ivec4);" - - "uint minInvocationsNonUniformAMD(uint);" - "uvec2 minInvocationsNonUniformAMD(uvec2);" - "uvec3 minInvocationsNonUniformAMD(uvec3);" - "uvec4 minInvocationsNonUniformAMD(uvec4);" - - "double minInvocationsNonUniformAMD(double);" - "dvec2 minInvocationsNonUniformAMD(dvec2);" - "dvec3 minInvocationsNonUniformAMD(dvec3);" - "dvec4 minInvocationsNonUniformAMD(dvec4);" - - "int64_t minInvocationsNonUniformAMD(int64_t);" - "i64vec2 minInvocationsNonUniformAMD(i64vec2);" - "i64vec3 minInvocationsNonUniformAMD(i64vec3);" - "i64vec4 minInvocationsNonUniformAMD(i64vec4);" - - "uint64_t minInvocationsNonUniformAMD(uint64_t);" - "u64vec2 minInvocationsNonUniformAMD(u64vec2);" - "u64vec3 minInvocationsNonUniformAMD(u64vec3);" - "u64vec4 minInvocationsNonUniformAMD(u64vec4);" - - "float16_t minInvocationsNonUniformAMD(float16_t);" - "f16vec2 minInvocationsNonUniformAMD(f16vec2);" - "f16vec3 minInvocationsNonUniformAMD(f16vec3);" - "f16vec4 minInvocationsNonUniformAMD(f16vec4);" - - "float minInvocationsInclusiveScanNonUniformAMD(float);" - "vec2 minInvocationsInclusiveScanNonUniformAMD(vec2);" - "vec3 minInvocationsInclusiveScanNonUniformAMD(vec3);" - "vec4 minInvocationsInclusiveScanNonUniformAMD(vec4);" - - "int minInvocationsInclusiveScanNonUniformAMD(int);" - "ivec2 minInvocationsInclusiveScanNonUniformAMD(ivec2);" - "ivec3 minInvocationsInclusiveScanNonUniformAMD(ivec3);" - "ivec4 minInvocationsInclusiveScanNonUniformAMD(ivec4);" - - "uint minInvocationsInclusiveScanNonUniformAMD(uint);" - "uvec2 minInvocationsInclusiveScanNonUniformAMD(uvec2);" - "uvec3 minInvocationsInclusiveScanNonUniformAMD(uvec3);" - "uvec4 minInvocationsInclusiveScanNonUniformAMD(uvec4);" - - "double minInvocationsInclusiveScanNonUniformAMD(double);" - "dvec2 minInvocationsInclusiveScanNonUniformAMD(dvec2);" - "dvec3 minInvocationsInclusiveScanNonUniformAMD(dvec3);" - "dvec4 minInvocationsInclusiveScanNonUniformAMD(dvec4);" - - "int64_t minInvocationsInclusiveScanNonUniformAMD(int64_t);" - "i64vec2 minInvocationsInclusiveScanNonUniformAMD(i64vec2);" - "i64vec3 minInvocationsInclusiveScanNonUniformAMD(i64vec3);" - "i64vec4 minInvocationsInclusiveScanNonUniformAMD(i64vec4);" - - "uint64_t minInvocationsInclusiveScanNonUniformAMD(uint64_t);" - "u64vec2 minInvocationsInclusiveScanNonUniformAMD(u64vec2);" - "u64vec3 minInvocationsInclusiveScanNonUniformAMD(u64vec3);" - "u64vec4 minInvocationsInclusiveScanNonUniformAMD(u64vec4);" - - "float16_t minInvocationsInclusiveScanNonUniformAMD(float16_t);" - "f16vec2 minInvocationsInclusiveScanNonUniformAMD(f16vec2);" - "f16vec3 minInvocationsInclusiveScanNonUniformAMD(f16vec3);" - "f16vec4 minInvocationsInclusiveScanNonUniformAMD(f16vec4);" - - "float minInvocationsExclusiveScanNonUniformAMD(float);" - "vec2 minInvocationsExclusiveScanNonUniformAMD(vec2);" - "vec3 minInvocationsExclusiveScanNonUniformAMD(vec3);" - "vec4 minInvocationsExclusiveScanNonUniformAMD(vec4);" - - "int minInvocationsExclusiveScanNonUniformAMD(int);" - "ivec2 minInvocationsExclusiveScanNonUniformAMD(ivec2);" - "ivec3 minInvocationsExclusiveScanNonUniformAMD(ivec3);" - "ivec4 minInvocationsExclusiveScanNonUniformAMD(ivec4);" - - "uint minInvocationsExclusiveScanNonUniformAMD(uint);" - "uvec2 minInvocationsExclusiveScanNonUniformAMD(uvec2);" - "uvec3 minInvocationsExclusiveScanNonUniformAMD(uvec3);" - "uvec4 minInvocationsExclusiveScanNonUniformAMD(uvec4);" - - "double minInvocationsExclusiveScanNonUniformAMD(double);" - "dvec2 minInvocationsExclusiveScanNonUniformAMD(dvec2);" - "dvec3 minInvocationsExclusiveScanNonUniformAMD(dvec3);" - "dvec4 minInvocationsExclusiveScanNonUniformAMD(dvec4);" - - "int64_t minInvocationsExclusiveScanNonUniformAMD(int64_t);" - "i64vec2 minInvocationsExclusiveScanNonUniformAMD(i64vec2);" - "i64vec3 minInvocationsExclusiveScanNonUniformAMD(i64vec3);" - "i64vec4 minInvocationsExclusiveScanNonUniformAMD(i64vec4);" - - "uint64_t minInvocationsExclusiveScanNonUniformAMD(uint64_t);" - "u64vec2 minInvocationsExclusiveScanNonUniformAMD(u64vec2);" - "u64vec3 minInvocationsExclusiveScanNonUniformAMD(u64vec3);" - "u64vec4 minInvocationsExclusiveScanNonUniformAMD(u64vec4);" - - "float16_t minInvocationsExclusiveScanNonUniformAMD(float16_t);" - "f16vec2 minInvocationsExclusiveScanNonUniformAMD(f16vec2);" - "f16vec3 minInvocationsExclusiveScanNonUniformAMD(f16vec3);" - "f16vec4 minInvocationsExclusiveScanNonUniformAMD(f16vec4);" - - "float maxInvocationsNonUniformAMD(float);" - "vec2 maxInvocationsNonUniformAMD(vec2);" - "vec3 maxInvocationsNonUniformAMD(vec3);" - "vec4 maxInvocationsNonUniformAMD(vec4);" - - "int maxInvocationsNonUniformAMD(int);" - "ivec2 maxInvocationsNonUniformAMD(ivec2);" - "ivec3 maxInvocationsNonUniformAMD(ivec3);" - "ivec4 maxInvocationsNonUniformAMD(ivec4);" - - "uint maxInvocationsNonUniformAMD(uint);" - "uvec2 maxInvocationsNonUniformAMD(uvec2);" - "uvec3 maxInvocationsNonUniformAMD(uvec3);" - "uvec4 maxInvocationsNonUniformAMD(uvec4);" - - "double maxInvocationsNonUniformAMD(double);" - "dvec2 maxInvocationsNonUniformAMD(dvec2);" - "dvec3 maxInvocationsNonUniformAMD(dvec3);" - "dvec4 maxInvocationsNonUniformAMD(dvec4);" - - "int64_t maxInvocationsNonUniformAMD(int64_t);" - "i64vec2 maxInvocationsNonUniformAMD(i64vec2);" - "i64vec3 maxInvocationsNonUniformAMD(i64vec3);" - "i64vec4 maxInvocationsNonUniformAMD(i64vec4);" - - "uint64_t maxInvocationsNonUniformAMD(uint64_t);" - "u64vec2 maxInvocationsNonUniformAMD(u64vec2);" - "u64vec3 maxInvocationsNonUniformAMD(u64vec3);" - "u64vec4 maxInvocationsNonUniformAMD(u64vec4);" - - "float16_t maxInvocationsNonUniformAMD(float16_t);" - "f16vec2 maxInvocationsNonUniformAMD(f16vec2);" - "f16vec3 maxInvocationsNonUniformAMD(f16vec3);" - "f16vec4 maxInvocationsNonUniformAMD(f16vec4);" - - "float maxInvocationsInclusiveScanNonUniformAMD(float);" - "vec2 maxInvocationsInclusiveScanNonUniformAMD(vec2);" - "vec3 maxInvocationsInclusiveScanNonUniformAMD(vec3);" - "vec4 maxInvocationsInclusiveScanNonUniformAMD(vec4);" - - "int maxInvocationsInclusiveScanNonUniformAMD(int);" - "ivec2 maxInvocationsInclusiveScanNonUniformAMD(ivec2);" - "ivec3 maxInvocationsInclusiveScanNonUniformAMD(ivec3);" - "ivec4 maxInvocationsInclusiveScanNonUniformAMD(ivec4);" - - "uint maxInvocationsInclusiveScanNonUniformAMD(uint);" - "uvec2 maxInvocationsInclusiveScanNonUniformAMD(uvec2);" - "uvec3 maxInvocationsInclusiveScanNonUniformAMD(uvec3);" - "uvec4 maxInvocationsInclusiveScanNonUniformAMD(uvec4);" - - "double maxInvocationsInclusiveScanNonUniformAMD(double);" - "dvec2 maxInvocationsInclusiveScanNonUniformAMD(dvec2);" - "dvec3 maxInvocationsInclusiveScanNonUniformAMD(dvec3);" - "dvec4 maxInvocationsInclusiveScanNonUniformAMD(dvec4);" - - "int64_t maxInvocationsInclusiveScanNonUniformAMD(int64_t);" - "i64vec2 maxInvocationsInclusiveScanNonUniformAMD(i64vec2);" - "i64vec3 maxInvocationsInclusiveScanNonUniformAMD(i64vec3);" - "i64vec4 maxInvocationsInclusiveScanNonUniformAMD(i64vec4);" - - "uint64_t maxInvocationsInclusiveScanNonUniformAMD(uint64_t);" - "u64vec2 maxInvocationsInclusiveScanNonUniformAMD(u64vec2);" - "u64vec3 maxInvocationsInclusiveScanNonUniformAMD(u64vec3);" - "u64vec4 maxInvocationsInclusiveScanNonUniformAMD(u64vec4);" - - "float16_t maxInvocationsInclusiveScanNonUniformAMD(float16_t);" - "f16vec2 maxInvocationsInclusiveScanNonUniformAMD(f16vec2);" - "f16vec3 maxInvocationsInclusiveScanNonUniformAMD(f16vec3);" - "f16vec4 maxInvocationsInclusiveScanNonUniformAMD(f16vec4);" - - "float maxInvocationsExclusiveScanNonUniformAMD(float);" - "vec2 maxInvocationsExclusiveScanNonUniformAMD(vec2);" - "vec3 maxInvocationsExclusiveScanNonUniformAMD(vec3);" - "vec4 maxInvocationsExclusiveScanNonUniformAMD(vec4);" - - "int maxInvocationsExclusiveScanNonUniformAMD(int);" - "ivec2 maxInvocationsExclusiveScanNonUniformAMD(ivec2);" - "ivec3 maxInvocationsExclusiveScanNonUniformAMD(ivec3);" - "ivec4 maxInvocationsExclusiveScanNonUniformAMD(ivec4);" - - "uint maxInvocationsExclusiveScanNonUniformAMD(uint);" - "uvec2 maxInvocationsExclusiveScanNonUniformAMD(uvec2);" - "uvec3 maxInvocationsExclusiveScanNonUniformAMD(uvec3);" - "uvec4 maxInvocationsExclusiveScanNonUniformAMD(uvec4);" - - "double maxInvocationsExclusiveScanNonUniformAMD(double);" - "dvec2 maxInvocationsExclusiveScanNonUniformAMD(dvec2);" - "dvec3 maxInvocationsExclusiveScanNonUniformAMD(dvec3);" - "dvec4 maxInvocationsExclusiveScanNonUniformAMD(dvec4);" - - "int64_t maxInvocationsExclusiveScanNonUniformAMD(int64_t);" - "i64vec2 maxInvocationsExclusiveScanNonUniformAMD(i64vec2);" - "i64vec3 maxInvocationsExclusiveScanNonUniformAMD(i64vec3);" - "i64vec4 maxInvocationsExclusiveScanNonUniformAMD(i64vec4);" - - "uint64_t maxInvocationsExclusiveScanNonUniformAMD(uint64_t);" - "u64vec2 maxInvocationsExclusiveScanNonUniformAMD(u64vec2);" - "u64vec3 maxInvocationsExclusiveScanNonUniformAMD(u64vec3);" - "u64vec4 maxInvocationsExclusiveScanNonUniformAMD(u64vec4);" - - "float16_t maxInvocationsExclusiveScanNonUniformAMD(float16_t);" - "f16vec2 maxInvocationsExclusiveScanNonUniformAMD(f16vec2);" - "f16vec3 maxInvocationsExclusiveScanNonUniformAMD(f16vec3);" - "f16vec4 maxInvocationsExclusiveScanNonUniformAMD(f16vec4);" - - "float addInvocationsNonUniformAMD(float);" - "vec2 addInvocationsNonUniformAMD(vec2);" - "vec3 addInvocationsNonUniformAMD(vec3);" - "vec4 addInvocationsNonUniformAMD(vec4);" - - "int addInvocationsNonUniformAMD(int);" - "ivec2 addInvocationsNonUniformAMD(ivec2);" - "ivec3 addInvocationsNonUniformAMD(ivec3);" - "ivec4 addInvocationsNonUniformAMD(ivec4);" - - "uint addInvocationsNonUniformAMD(uint);" - "uvec2 addInvocationsNonUniformAMD(uvec2);" - "uvec3 addInvocationsNonUniformAMD(uvec3);" - "uvec4 addInvocationsNonUniformAMD(uvec4);" - - "double addInvocationsNonUniformAMD(double);" - "dvec2 addInvocationsNonUniformAMD(dvec2);" - "dvec3 addInvocationsNonUniformAMD(dvec3);" - "dvec4 addInvocationsNonUniformAMD(dvec4);" - - "int64_t addInvocationsNonUniformAMD(int64_t);" - "i64vec2 addInvocationsNonUniformAMD(i64vec2);" - "i64vec3 addInvocationsNonUniformAMD(i64vec3);" - "i64vec4 addInvocationsNonUniformAMD(i64vec4);" - - "uint64_t addInvocationsNonUniformAMD(uint64_t);" - "u64vec2 addInvocationsNonUniformAMD(u64vec2);" - "u64vec3 addInvocationsNonUniformAMD(u64vec3);" - "u64vec4 addInvocationsNonUniformAMD(u64vec4);" - - "float16_t addInvocationsNonUniformAMD(float16_t);" - "f16vec2 addInvocationsNonUniformAMD(f16vec2);" - "f16vec3 addInvocationsNonUniformAMD(f16vec3);" - "f16vec4 addInvocationsNonUniformAMD(f16vec4);" - - "float addInvocationsInclusiveScanNonUniformAMD(float);" - "vec2 addInvocationsInclusiveScanNonUniformAMD(vec2);" - "vec3 addInvocationsInclusiveScanNonUniformAMD(vec3);" - "vec4 addInvocationsInclusiveScanNonUniformAMD(vec4);" - - "int addInvocationsInclusiveScanNonUniformAMD(int);" - "ivec2 addInvocationsInclusiveScanNonUniformAMD(ivec2);" - "ivec3 addInvocationsInclusiveScanNonUniformAMD(ivec3);" - "ivec4 addInvocationsInclusiveScanNonUniformAMD(ivec4);" - - "uint addInvocationsInclusiveScanNonUniformAMD(uint);" - "uvec2 addInvocationsInclusiveScanNonUniformAMD(uvec2);" - "uvec3 addInvocationsInclusiveScanNonUniformAMD(uvec3);" - "uvec4 addInvocationsInclusiveScanNonUniformAMD(uvec4);" - - "double addInvocationsInclusiveScanNonUniformAMD(double);" - "dvec2 addInvocationsInclusiveScanNonUniformAMD(dvec2);" - "dvec3 addInvocationsInclusiveScanNonUniformAMD(dvec3);" - "dvec4 addInvocationsInclusiveScanNonUniformAMD(dvec4);" - - "int64_t addInvocationsInclusiveScanNonUniformAMD(int64_t);" - "i64vec2 addInvocationsInclusiveScanNonUniformAMD(i64vec2);" - "i64vec3 addInvocationsInclusiveScanNonUniformAMD(i64vec3);" - "i64vec4 addInvocationsInclusiveScanNonUniformAMD(i64vec4);" - - "uint64_t addInvocationsInclusiveScanNonUniformAMD(uint64_t);" - "u64vec2 addInvocationsInclusiveScanNonUniformAMD(u64vec2);" - "u64vec3 addInvocationsInclusiveScanNonUniformAMD(u64vec3);" - "u64vec4 addInvocationsInclusiveScanNonUniformAMD(u64vec4);" - - "float16_t addInvocationsInclusiveScanNonUniformAMD(float16_t);" - "f16vec2 addInvocationsInclusiveScanNonUniformAMD(f16vec2);" - "f16vec3 addInvocationsInclusiveScanNonUniformAMD(f16vec3);" - "f16vec4 addInvocationsInclusiveScanNonUniformAMD(f16vec4);" - - "float addInvocationsExclusiveScanNonUniformAMD(float);" - "vec2 addInvocationsExclusiveScanNonUniformAMD(vec2);" - "vec3 addInvocationsExclusiveScanNonUniformAMD(vec3);" - "vec4 addInvocationsExclusiveScanNonUniformAMD(vec4);" - - "int addInvocationsExclusiveScanNonUniformAMD(int);" - "ivec2 addInvocationsExclusiveScanNonUniformAMD(ivec2);" - "ivec3 addInvocationsExclusiveScanNonUniformAMD(ivec3);" - "ivec4 addInvocationsExclusiveScanNonUniformAMD(ivec4);" - - "uint addInvocationsExclusiveScanNonUniformAMD(uint);" - "uvec2 addInvocationsExclusiveScanNonUniformAMD(uvec2);" - "uvec3 addInvocationsExclusiveScanNonUniformAMD(uvec3);" - "uvec4 addInvocationsExclusiveScanNonUniformAMD(uvec4);" - - "double addInvocationsExclusiveScanNonUniformAMD(double);" - "dvec2 addInvocationsExclusiveScanNonUniformAMD(dvec2);" - "dvec3 addInvocationsExclusiveScanNonUniformAMD(dvec3);" - "dvec4 addInvocationsExclusiveScanNonUniformAMD(dvec4);" - - "int64_t addInvocationsExclusiveScanNonUniformAMD(int64_t);" - "i64vec2 addInvocationsExclusiveScanNonUniformAMD(i64vec2);" - "i64vec3 addInvocationsExclusiveScanNonUniformAMD(i64vec3);" - "i64vec4 addInvocationsExclusiveScanNonUniformAMD(i64vec4);" - - "uint64_t addInvocationsExclusiveScanNonUniformAMD(uint64_t);" - "u64vec2 addInvocationsExclusiveScanNonUniformAMD(u64vec2);" - "u64vec3 addInvocationsExclusiveScanNonUniformAMD(u64vec3);" - "u64vec4 addInvocationsExclusiveScanNonUniformAMD(u64vec4);" - - "float16_t addInvocationsExclusiveScanNonUniformAMD(float16_t);" - "f16vec2 addInvocationsExclusiveScanNonUniformAMD(f16vec2);" - "f16vec3 addInvocationsExclusiveScanNonUniformAMD(f16vec3);" - "f16vec4 addInvocationsExclusiveScanNonUniformAMD(f16vec4);" - - "float swizzleInvocationsAMD(float, uvec4);" - "vec2 swizzleInvocationsAMD(vec2, uvec4);" - "vec3 swizzleInvocationsAMD(vec3, uvec4);" - "vec4 swizzleInvocationsAMD(vec4, uvec4);" - - "int swizzleInvocationsAMD(int, uvec4);" - "ivec2 swizzleInvocationsAMD(ivec2, uvec4);" - "ivec3 swizzleInvocationsAMD(ivec3, uvec4);" - "ivec4 swizzleInvocationsAMD(ivec4, uvec4);" - - "uint swizzleInvocationsAMD(uint, uvec4);" - "uvec2 swizzleInvocationsAMD(uvec2, uvec4);" - "uvec3 swizzleInvocationsAMD(uvec3, uvec4);" - "uvec4 swizzleInvocationsAMD(uvec4, uvec4);" - - "float swizzleInvocationsMaskedAMD(float, uvec3);" - "vec2 swizzleInvocationsMaskedAMD(vec2, uvec3);" - "vec3 swizzleInvocationsMaskedAMD(vec3, uvec3);" - "vec4 swizzleInvocationsMaskedAMD(vec4, uvec3);" - - "int swizzleInvocationsMaskedAMD(int, uvec3);" - "ivec2 swizzleInvocationsMaskedAMD(ivec2, uvec3);" - "ivec3 swizzleInvocationsMaskedAMD(ivec3, uvec3);" - "ivec4 swizzleInvocationsMaskedAMD(ivec4, uvec3);" - - "uint swizzleInvocationsMaskedAMD(uint, uvec3);" - "uvec2 swizzleInvocationsMaskedAMD(uvec2, uvec3);" - "uvec3 swizzleInvocationsMaskedAMD(uvec3, uvec3);" - "uvec4 swizzleInvocationsMaskedAMD(uvec4, uvec3);" - - "float writeInvocationAMD(float, float, uint);" - "vec2 writeInvocationAMD(vec2, vec2, uint);" - "vec3 writeInvocationAMD(vec3, vec3, uint);" - "vec4 writeInvocationAMD(vec4, vec4, uint);" - - "int writeInvocationAMD(int, int, uint);" - "ivec2 writeInvocationAMD(ivec2, ivec2, uint);" - "ivec3 writeInvocationAMD(ivec3, ivec3, uint);" - "ivec4 writeInvocationAMD(ivec4, ivec4, uint);" - - "uint writeInvocationAMD(uint, uint, uint);" - "uvec2 writeInvocationAMD(uvec2, uvec2, uint);" - "uvec3 writeInvocationAMD(uvec3, uvec3, uint);" - "uvec4 writeInvocationAMD(uvec4, uvec4, uint);" - - "uint mbcntAMD(uint64_t);" - - "\n"); - } - - // GL_AMD_gcn_shader - if (profile != EEsProfile && version >= 450) { - commonBuiltins.append( - "float cubeFaceIndexAMD(vec3);" - "vec2 cubeFaceCoordAMD(vec3);" - "uint64_t timeAMD();" - - "\n"); - } - - // GL_AMD_gpu_shader_half_float - if (profile != EEsProfile && version >= 450) { - commonBuiltins.append( - "float16_t radians(float16_t);" - "f16vec2 radians(f16vec2);" - "f16vec3 radians(f16vec3);" - "f16vec4 radians(f16vec4);" - - "float16_t degrees(float16_t);" - "f16vec2 degrees(f16vec2);" - "f16vec3 degrees(f16vec3);" - "f16vec4 degrees(f16vec4);" - - "float16_t sin(float16_t);" - "f16vec2 sin(f16vec2);" - "f16vec3 sin(f16vec3);" - "f16vec4 sin(f16vec4);" - - "float16_t cos(float16_t);" - "f16vec2 cos(f16vec2);" - "f16vec3 cos(f16vec3);" - "f16vec4 cos(f16vec4);" - - "float16_t tan(float16_t);" - "f16vec2 tan(f16vec2);" - "f16vec3 tan(f16vec3);" - "f16vec4 tan(f16vec4);" - - "float16_t asin(float16_t);" - "f16vec2 asin(f16vec2);" - "f16vec3 asin(f16vec3);" - "f16vec4 asin(f16vec4);" - - "float16_t acos(float16_t);" - "f16vec2 acos(f16vec2);" - "f16vec3 acos(f16vec3);" - "f16vec4 acos(f16vec4);" - - "float16_t atan(float16_t, float16_t);" - "f16vec2 atan(f16vec2, f16vec2);" - "f16vec3 atan(f16vec3, f16vec3);" - "f16vec4 atan(f16vec4, f16vec4);" - - "float16_t atan(float16_t);" - "f16vec2 atan(f16vec2);" - "f16vec3 atan(f16vec3);" - "f16vec4 atan(f16vec4);" - - "float16_t sinh(float16_t);" - "f16vec2 sinh(f16vec2);" - "f16vec3 sinh(f16vec3);" - "f16vec4 sinh(f16vec4);" - - "float16_t cosh(float16_t);" - "f16vec2 cosh(f16vec2);" - "f16vec3 cosh(f16vec3);" - "f16vec4 cosh(f16vec4);" - - "float16_t tanh(float16_t);" - "f16vec2 tanh(f16vec2);" - "f16vec3 tanh(f16vec3);" - "f16vec4 tanh(f16vec4);" - - "float16_t asinh(float16_t);" - "f16vec2 asinh(f16vec2);" - "f16vec3 asinh(f16vec3);" - "f16vec4 asinh(f16vec4);" - - "float16_t acosh(float16_t);" - "f16vec2 acosh(f16vec2);" - "f16vec3 acosh(f16vec3);" - "f16vec4 acosh(f16vec4);" - - "float16_t atanh(float16_t);" - "f16vec2 atanh(f16vec2);" - "f16vec3 atanh(f16vec3);" - "f16vec4 atanh(f16vec4);" - - "float16_t pow(float16_t, float16_t);" - "f16vec2 pow(f16vec2, f16vec2);" - "f16vec3 pow(f16vec3, f16vec3);" - "f16vec4 pow(f16vec4, f16vec4);" - - "float16_t exp(float16_t);" - "f16vec2 exp(f16vec2);" - "f16vec3 exp(f16vec3);" - "f16vec4 exp(f16vec4);" - - "float16_t log(float16_t);" - "f16vec2 log(f16vec2);" - "f16vec3 log(f16vec3);" - "f16vec4 log(f16vec4);" - - "float16_t exp2(float16_t);" - "f16vec2 exp2(f16vec2);" - "f16vec3 exp2(f16vec3);" - "f16vec4 exp2(f16vec4);" - - "float16_t log2(float16_t);" - "f16vec2 log2(f16vec2);" - "f16vec3 log2(f16vec3);" - "f16vec4 log2(f16vec4);" - - "float16_t sqrt(float16_t);" - "f16vec2 sqrt(f16vec2);" - "f16vec3 sqrt(f16vec3);" - "f16vec4 sqrt(f16vec4);" - - "float16_t inversesqrt(float16_t);" - "f16vec2 inversesqrt(f16vec2);" - "f16vec3 inversesqrt(f16vec3);" - "f16vec4 inversesqrt(f16vec4);" - - "float16_t abs(float16_t);" - "f16vec2 abs(f16vec2);" - "f16vec3 abs(f16vec3);" - "f16vec4 abs(f16vec4);" - - "float16_t sign(float16_t);" - "f16vec2 sign(f16vec2);" - "f16vec3 sign(f16vec3);" - "f16vec4 sign(f16vec4);" - - "float16_t floor(float16_t);" - "f16vec2 floor(f16vec2);" - "f16vec3 floor(f16vec3);" - "f16vec4 floor(f16vec4);" - - "float16_t trunc(float16_t);" - "f16vec2 trunc(f16vec2);" - "f16vec3 trunc(f16vec3);" - "f16vec4 trunc(f16vec4);" - - "float16_t round(float16_t);" - "f16vec2 round(f16vec2);" - "f16vec3 round(f16vec3);" - "f16vec4 round(f16vec4);" - - "float16_t roundEven(float16_t);" - "f16vec2 roundEven(f16vec2);" - "f16vec3 roundEven(f16vec3);" - "f16vec4 roundEven(f16vec4);" - - "float16_t ceil(float16_t);" - "f16vec2 ceil(f16vec2);" - "f16vec3 ceil(f16vec3);" - "f16vec4 ceil(f16vec4);" - - "float16_t fract(float16_t);" - "f16vec2 fract(f16vec2);" - "f16vec3 fract(f16vec3);" - "f16vec4 fract(f16vec4);" - - "float16_t mod(float16_t, float16_t);" - "f16vec2 mod(f16vec2, float16_t);" - "f16vec3 mod(f16vec3, float16_t);" - "f16vec4 mod(f16vec4, float16_t);" - "f16vec2 mod(f16vec2, f16vec2);" - "f16vec3 mod(f16vec3, f16vec3);" - "f16vec4 mod(f16vec4, f16vec4);" - - "float16_t modf(float16_t, out float16_t);" - "f16vec2 modf(f16vec2, out f16vec2);" - "f16vec3 modf(f16vec3, out f16vec3);" - "f16vec4 modf(f16vec4, out f16vec4);" - - "float16_t min(float16_t, float16_t);" - "f16vec2 min(f16vec2, float16_t);" - "f16vec3 min(f16vec3, float16_t);" - "f16vec4 min(f16vec4, float16_t);" - "f16vec2 min(f16vec2, f16vec2);" - "f16vec3 min(f16vec3, f16vec3);" - "f16vec4 min(f16vec4, f16vec4);" - - "float16_t max(float16_t, float16_t);" - "f16vec2 max(f16vec2, float16_t);" - "f16vec3 max(f16vec3, float16_t);" - "f16vec4 max(f16vec4, float16_t);" - "f16vec2 max(f16vec2, f16vec2);" - "f16vec3 max(f16vec3, f16vec3);" - "f16vec4 max(f16vec4, f16vec4);" - - "float16_t clamp(float16_t, float16_t, float16_t);" - "f16vec2 clamp(f16vec2, float16_t, float16_t);" - "f16vec3 clamp(f16vec3, float16_t, float16_t);" - "f16vec4 clamp(f16vec4, float16_t, float16_t);" - "f16vec2 clamp(f16vec2, f16vec2, f16vec2);" - "f16vec3 clamp(f16vec3, f16vec3, f16vec3);" - "f16vec4 clamp(f16vec4, f16vec4, f16vec4);" - - "float16_t mix(float16_t, float16_t, float16_t);" - "f16vec2 mix(f16vec2, f16vec2, float16_t);" - "f16vec3 mix(f16vec3, f16vec3, float16_t);" - "f16vec4 mix(f16vec4, f16vec4, float16_t);" - "f16vec2 mix(f16vec2, f16vec2, f16vec2);" - "f16vec3 mix(f16vec3, f16vec3, f16vec3);" - "f16vec4 mix(f16vec4, f16vec4, f16vec4);" - "float16_t mix(float16_t, float16_t, bool);" - "f16vec2 mix(f16vec2, f16vec2, bvec2);" - "f16vec3 mix(f16vec3, f16vec3, bvec3);" - "f16vec4 mix(f16vec4, f16vec4, bvec4);" - - "float16_t step(float16_t, float16_t);" - "f16vec2 step(f16vec2, f16vec2);" - "f16vec3 step(f16vec3, f16vec3);" - "f16vec4 step(f16vec4, f16vec4);" - "f16vec2 step(float16_t, f16vec2);" - "f16vec3 step(float16_t, f16vec3);" - "f16vec4 step(float16_t, f16vec4);" - - "float16_t smoothstep(float16_t, float16_t, float16_t);" - "f16vec2 smoothstep(f16vec2, f16vec2, f16vec2);" - "f16vec3 smoothstep(f16vec3, f16vec3, f16vec3);" - "f16vec4 smoothstep(f16vec4, f16vec4, f16vec4);" - "f16vec2 smoothstep(float16_t, float16_t, f16vec2);" - "f16vec3 smoothstep(float16_t, float16_t, f16vec3);" - "f16vec4 smoothstep(float16_t, float16_t, f16vec4);" - - "bool isnan(float16_t);" - "bvec2 isnan(f16vec2);" - "bvec3 isnan(f16vec3);" - "bvec4 isnan(f16vec4);" - - "bool isinf(float16_t);" - "bvec2 isinf(f16vec2);" - "bvec3 isinf(f16vec3);" - "bvec4 isinf(f16vec4);" - - "float16_t fma(float16_t, float16_t, float16_t);" - "f16vec2 fma(f16vec2, f16vec2, f16vec2);" - "f16vec3 fma(f16vec3, f16vec3, f16vec3);" - "f16vec4 fma(f16vec4, f16vec4, f16vec4);" - - "float16_t frexp(float16_t, out int);" - "f16vec2 frexp(f16vec2, out ivec2);" - "f16vec3 frexp(f16vec3, out ivec3);" - "f16vec4 frexp(f16vec4, out ivec4);" - - "float16_t ldexp(float16_t, in int);" - "f16vec2 ldexp(f16vec2, in ivec2);" - "f16vec3 ldexp(f16vec3, in ivec3);" - "f16vec4 ldexp(f16vec4, in ivec4);" - - "uint packFloat2x16(f16vec2);" - "f16vec2 unpackFloat2x16(uint);" - - "float16_t length(float16_t);" - "float16_t length(f16vec2);" - "float16_t length(f16vec3);" - "float16_t length(f16vec4);" - - "float16_t distance(float16_t, float16_t);" - "float16_t distance(f16vec2, f16vec2);" - "float16_t distance(f16vec3, f16vec3);" - "float16_t distance(f16vec4, f16vec4);" - - "float16_t dot(float16_t, float16_t);" - "float16_t dot(f16vec2, f16vec2);" - "float16_t dot(f16vec3, f16vec3);" - "float16_t dot(f16vec4, f16vec4);" - - "f16vec3 cross(f16vec3, f16vec3);" - - "float16_t normalize(float16_t);" - "f16vec2 normalize(f16vec2);" - "f16vec3 normalize(f16vec3);" - "f16vec4 normalize(f16vec4);" - - "float16_t faceforward(float16_t, float16_t, float16_t);" - "f16vec2 faceforward(f16vec2, f16vec2, f16vec2);" - "f16vec3 faceforward(f16vec3, f16vec3, f16vec3);" - "f16vec4 faceforward(f16vec4, f16vec4, f16vec4);" - - "float16_t reflect(float16_t, float16_t);" - "f16vec2 reflect(f16vec2, f16vec2);" - "f16vec3 reflect(f16vec3, f16vec3);" - "f16vec4 reflect(f16vec4, f16vec4);" - - "float16_t refract(float16_t, float16_t, float16_t);" - "f16vec2 refract(f16vec2, f16vec2, float16_t);" - "f16vec3 refract(f16vec3, f16vec3, float16_t);" - "f16vec4 refract(f16vec4, f16vec4, float16_t);" - - "f16mat2 matrixCompMult(f16mat2, f16mat2);" - "f16mat3 matrixCompMult(f16mat3, f16mat3);" - "f16mat4 matrixCompMult(f16mat4, f16mat4);" - "f16mat2x3 matrixCompMult(f16mat2x3, f16mat2x3);" - "f16mat2x4 matrixCompMult(f16mat2x4, f16mat2x4);" - "f16mat3x2 matrixCompMult(f16mat3x2, f16mat3x2);" - "f16mat3x4 matrixCompMult(f16mat3x4, f16mat3x4);" - "f16mat4x2 matrixCompMult(f16mat4x2, f16mat4x2);" - "f16mat4x3 matrixCompMult(f16mat4x3, f16mat4x3);" - - "f16mat2 outerProduct(f16vec2, f16vec2);" - "f16mat3 outerProduct(f16vec3, f16vec3);" - "f16mat4 outerProduct(f16vec4, f16vec4);" - "f16mat2x3 outerProduct(f16vec3, f16vec2);" - "f16mat3x2 outerProduct(f16vec2, f16vec3);" - "f16mat2x4 outerProduct(f16vec4, f16vec2);" - "f16mat4x2 outerProduct(f16vec2, f16vec4);" - "f16mat3x4 outerProduct(f16vec4, f16vec3);" - "f16mat4x3 outerProduct(f16vec3, f16vec4);" - - "f16mat2 transpose(f16mat2);" - "f16mat3 transpose(f16mat3);" - "f16mat4 transpose(f16mat4);" - "f16mat2x3 transpose(f16mat3x2);" - "f16mat3x2 transpose(f16mat2x3);" - "f16mat2x4 transpose(f16mat4x2);" - "f16mat4x2 transpose(f16mat2x4);" - "f16mat3x4 transpose(f16mat4x3);" - "f16mat4x3 transpose(f16mat3x4);" - - "float16_t determinant(f16mat2);" - "float16_t determinant(f16mat3);" - "float16_t determinant(f16mat4);" - - "f16mat2 inverse(f16mat2);" - "f16mat3 inverse(f16mat3);" - "f16mat4 inverse(f16mat4);" - - "bvec2 lessThan(f16vec2, f16vec2);" - "bvec3 lessThan(f16vec3, f16vec3);" - "bvec4 lessThan(f16vec4, f16vec4);" - - "bvec2 lessThanEqual(f16vec2, f16vec2);" - "bvec3 lessThanEqual(f16vec3, f16vec3);" - "bvec4 lessThanEqual(f16vec4, f16vec4);" - - "bvec2 greaterThan(f16vec2, f16vec2);" - "bvec3 greaterThan(f16vec3, f16vec3);" - "bvec4 greaterThan(f16vec4, f16vec4);" - - "bvec2 greaterThanEqual(f16vec2, f16vec2);" - "bvec3 greaterThanEqual(f16vec3, f16vec3);" - "bvec4 greaterThanEqual(f16vec4, f16vec4);" - - "bvec2 equal(f16vec2, f16vec2);" - "bvec3 equal(f16vec3, f16vec3);" - "bvec4 equal(f16vec4, f16vec4);" - - "bvec2 notEqual(f16vec2, f16vec2);" - "bvec3 notEqual(f16vec3, f16vec3);" - "bvec4 notEqual(f16vec4, f16vec4);" - - "\n"); - } - - // GL_AMD_gpu_shader_int16 - if (profile != EEsProfile && version >= 450) { - commonBuiltins.append( - "int16_t abs(int16_t);" - "i16vec2 abs(i16vec2);" - "i16vec3 abs(i16vec3);" - "i16vec4 abs(i16vec4);" - - "int16_t sign(int16_t);" - "i16vec2 sign(i16vec2);" - "i16vec3 sign(i16vec3);" - "i16vec4 sign(i16vec4);" - - "int16_t min(int16_t, int16_t);" - "i16vec2 min(i16vec2, int16_t);" - "i16vec3 min(i16vec3, int16_t);" - "i16vec4 min(i16vec4, int16_t);" - "i16vec2 min(i16vec2, i16vec2);" - "i16vec3 min(i16vec3, i16vec3);" - "i16vec4 min(i16vec4, i16vec4);" - "uint16_t min(uint16_t, uint16_t);" - "u16vec2 min(u16vec2, uint16_t);" - "u16vec3 min(u16vec3, uint16_t);" - "u16vec4 min(u16vec4, uint16_t);" - "u16vec2 min(u16vec2, u16vec2);" - "u16vec3 min(u16vec3, u16vec3);" - "u16vec4 min(u16vec4, u16vec4);" - - "int16_t max(int16_t, int16_t);" - "i16vec2 max(i16vec2, int16_t);" - "i16vec3 max(i16vec3, int16_t);" - "i16vec4 max(i16vec4, int16_t);" - "i16vec2 max(i16vec2, i16vec2);" - "i16vec3 max(i16vec3, i16vec3);" - "i16vec4 max(i16vec4, i16vec4);" - "uint16_t max(uint16_t, uint16_t);" - "u16vec2 max(u16vec2, uint16_t);" - "u16vec3 max(u16vec3, uint16_t);" - "u16vec4 max(u16vec4, uint16_t);" - "u16vec2 max(u16vec2, u16vec2);" - "u16vec3 max(u16vec3, u16vec3);" - "u16vec4 max(u16vec4, u16vec4);" - - "int16_t clamp(int16_t, int16_t, int16_t);" - "i16vec2 clamp(i16vec2, int16_t, int16_t);" - "i16vec3 clamp(i16vec3, int16_t, int16_t);" - "i16vec4 clamp(i16vec4, int16_t, int16_t);" - "i16vec2 clamp(i16vec2, i16vec2, i16vec2);" - "i16vec3 clamp(i16vec3, i16vec3, i16vec3);" - "i16vec4 clamp(i16vec4, i16vec4, i16vec4);" - "uint16_t clamp(uint16_t, uint16_t, uint16_t);" - "u16vec2 clamp(u16vec2, uint16_t, uint16_t);" - "u16vec3 clamp(u16vec3, uint16_t, uint16_t);" - "u16vec4 clamp(u16vec4, uint16_t, uint16_t);" - "u16vec2 clamp(u16vec2, u16vec2, u16vec2);" - "u16vec3 clamp(u16vec3, u16vec3, u16vec3);" - "u16vec4 clamp(u16vec4, u16vec4, u16vec4);" - - "int16_t mix(int16_t, int16_t, bool);" - "i16vec2 mix(i16vec2, i16vec2, bvec2);" - "i16vec3 mix(i16vec3, i16vec3, bvec3);" - "i16vec4 mix(i16vec4, i16vec4, bvec4);" - "uint16_t mix(uint16_t, uint16_t, bool);" - "u16vec2 mix(u16vec2, u16vec2, bvec2);" - "u16vec3 mix(u16vec3, u16vec3, bvec3);" - "u16vec4 mix(u16vec4, u16vec4, bvec4);" - - "float16_t frexp(float16_t, out int16_t);" - "f16vec2 frexp(f16vec2, out i16vec2);" - "f16vec3 frexp(f16vec3, out i16vec3);" - "f16vec4 frexp(f16vec4, out i16vec4);" - - "float16_t ldexp(float16_t, int16_t);" - "f16vec2 ldexp(f16vec2, i16vec2);" - "f16vec3 ldexp(f16vec3, i16vec3);" - "f16vec4 ldexp(f16vec4, i16vec4);" - - "int16_t float16BitsToInt16(float16_t);" - "i16vec2 float16BitsToInt16(f16vec2);" - "i16vec3 float16BitsToInt16(f16vec3);" - "i16vec4 float16BitsToInt16(f16vec4);" - - "uint16_t float16BitsToUint16(float16_t);" - "u16vec2 float16BitsToUint16(f16vec2);" - "u16vec3 float16BitsToUint16(f16vec3);" - "u16vec4 float16BitsToUint16(f16vec4);" - - "float16_t int16BitsToFloat16(int16_t);" - "f16vec2 int16BitsToFloat16(i16vec2);" - "f16vec3 int16BitsToFloat16(i16vec3);" - "f16vec4 int16BitsToFloat16(i16vec4);" - - "float16_t uint16BitsToFloat16(uint16_t);" - "f16vec2 uint16BitsToFloat16(u16vec2);" - "f16vec3 uint16BitsToFloat16(u16vec3);" - "f16vec4 uint16BitsToFloat16(u16vec4);" - - "int packInt2x16(i16vec2);" - "uint packUint2x16(u16vec2);" - "int64_t packInt4x16(i16vec4);" - "uint64_t packUint4x16(u16vec4);" - "i16vec2 unpackInt2x16(int);" - "u16vec2 unpackUint2x16(uint);" - "i16vec4 unpackInt4x16(int64_t);" - "u16vec4 unpackUint4x16(uint64_t);" - - "bvec2 lessThan(i16vec2, i16vec2);" - "bvec3 lessThan(i16vec3, i16vec3);" - "bvec4 lessThan(i16vec4, i16vec4);" - "bvec2 lessThan(u16vec2, u16vec2);" - "bvec3 lessThan(u16vec3, u16vec3);" - "bvec4 lessThan(u16vec4, u16vec4);" - - "bvec2 lessThanEqual(i16vec2, i16vec2);" - "bvec3 lessThanEqual(i16vec3, i16vec3);" - "bvec4 lessThanEqual(i16vec4, i16vec4);" - "bvec2 lessThanEqual(u16vec2, u16vec2);" - "bvec3 lessThanEqual(u16vec3, u16vec3);" - "bvec4 lessThanEqual(u16vec4, u16vec4);" - - "bvec2 greaterThan(i16vec2, i16vec2);" - "bvec3 greaterThan(i16vec3, i16vec3);" - "bvec4 greaterThan(i16vec4, i16vec4);" - "bvec2 greaterThan(u16vec2, u16vec2);" - "bvec3 greaterThan(u16vec3, u16vec3);" - "bvec4 greaterThan(u16vec4, u16vec4);" - - "bvec2 greaterThanEqual(i16vec2, i16vec2);" - "bvec3 greaterThanEqual(i16vec3, i16vec3);" - "bvec4 greaterThanEqual(i16vec4, i16vec4);" - "bvec2 greaterThanEqual(u16vec2, u16vec2);" - "bvec3 greaterThanEqual(u16vec3, u16vec3);" - "bvec4 greaterThanEqual(u16vec4, u16vec4);" - - "bvec2 equal(i16vec2, i16vec2);" - "bvec3 equal(i16vec3, i16vec3);" - "bvec4 equal(i16vec4, i16vec4);" - "bvec2 equal(u16vec2, u16vec2);" - "bvec3 equal(u16vec3, u16vec3);" - "bvec4 equal(u16vec4, u16vec4);" - - "bvec2 notEqual(i16vec2, i16vec2);" - "bvec3 notEqual(i16vec3, i16vec3);" - "bvec4 notEqual(i16vec4, i16vec4);" - "bvec2 notEqual(u16vec2, u16vec2);" - "bvec3 notEqual(u16vec3, u16vec3);" - "bvec4 notEqual(u16vec4, u16vec4);" - - "\n"); - } -#endif - - //============================================================================ - // - // Prototypes for built-in functions seen by vertex shaders only. - // (Except legacy lod functions, where it depends which release they are - // vertex only.) - // - //============================================================================ - - // - // Geometric Functions. - // - if (IncludeLegacy(version, profile, spvVersion)) - stageBuiltins[EShLangVertex].append("vec4 ftransform();"); - - // - // Original-style texture Functions with lod. - // - TString* s; - if (version == 100) - s = &stageBuiltins[EShLangVertex]; - else - s = &commonBuiltins; - if ((profile == EEsProfile && version == 100) || - profile == ECompatibilityProfile || - (profile == ECoreProfile && version < 420) || - profile == ENoProfile) { - if (spvVersion.spv == 0) { - s->append( - "vec4 texture2DLod(sampler2D, vec2, float);" // GL_ARB_shader_texture_lod - "vec4 texture2DProjLod(sampler2D, vec3, float);" // GL_ARB_shader_texture_lod - "vec4 texture2DProjLod(sampler2D, vec4, float);" // GL_ARB_shader_texture_lod - "vec4 texture3DLod(sampler3D, vec3, float);" // GL_ARB_shader_texture_lod // OES_texture_3D, but caught by keyword check - "vec4 texture3DProjLod(sampler3D, vec4, float);" // GL_ARB_shader_texture_lod // OES_texture_3D, but caught by keyword check - "vec4 textureCubeLod(samplerCube, vec3, float);" // GL_ARB_shader_texture_lod - - "\n"); - } - } - if ( profile == ECompatibilityProfile || - (profile == ECoreProfile && version < 420) || - profile == ENoProfile) { - if (spvVersion.spv == 0) { - s->append( - "vec4 texture1DLod(sampler1D, float, float);" // GL_ARB_shader_texture_lod - "vec4 texture1DProjLod(sampler1D, vec2, float);" // GL_ARB_shader_texture_lod - "vec4 texture1DProjLod(sampler1D, vec4, float);" // GL_ARB_shader_texture_lod - "vec4 shadow1DLod(sampler1DShadow, vec3, float);" // GL_ARB_shader_texture_lod - "vec4 shadow2DLod(sampler2DShadow, vec3, float);" // GL_ARB_shader_texture_lod - "vec4 shadow1DProjLod(sampler1DShadow, vec4, float);" // GL_ARB_shader_texture_lod - "vec4 shadow2DProjLod(sampler2DShadow, vec4, float);" // GL_ARB_shader_texture_lod - - "vec4 texture1DGradARB(sampler1D, float, float, float);" // GL_ARB_shader_texture_lod - "vec4 texture1DProjGradARB(sampler1D, vec2, float, float);" // GL_ARB_shader_texture_lod - "vec4 texture1DProjGradARB(sampler1D, vec4, float, float);" // GL_ARB_shader_texture_lod - "vec4 texture2DGradARB(sampler2D, vec2, vec2, vec2);" // GL_ARB_shader_texture_lod - "vec4 texture2DProjGradARB(sampler2D, vec3, vec2, vec2);" // GL_ARB_shader_texture_lod - "vec4 texture2DProjGradARB(sampler2D, vec4, vec2, vec2);" // GL_ARB_shader_texture_lod - "vec4 texture3DGradARB(sampler3D, vec3, vec3, vec3);" // GL_ARB_shader_texture_lod - "vec4 texture3DProjGradARB(sampler3D, vec4, vec3, vec3);" // GL_ARB_shader_texture_lod - "vec4 textureCubeGradARB(samplerCube, vec3, vec3, vec3);" // GL_ARB_shader_texture_lod - "vec4 shadow1DGradARB(sampler1DShadow, vec3, float, float);" // GL_ARB_shader_texture_lod - "vec4 shadow1DProjGradARB( sampler1DShadow, vec4, float, float);" // GL_ARB_shader_texture_lod - "vec4 shadow2DGradARB(sampler2DShadow, vec3, vec2, vec2);" // GL_ARB_shader_texture_lod - "vec4 shadow2DProjGradARB( sampler2DShadow, vec4, vec2, vec2);" // GL_ARB_shader_texture_lod - "vec4 texture2DRectGradARB(sampler2DRect, vec2, vec2, vec2);" // GL_ARB_shader_texture_lod - "vec4 texture2DRectProjGradARB( sampler2DRect, vec3, vec2, vec2);" // GL_ARB_shader_texture_lod - "vec4 texture2DRectProjGradARB( sampler2DRect, vec4, vec2, vec2);" // GL_ARB_shader_texture_lod - "vec4 shadow2DRectGradARB( sampler2DRectShadow, vec3, vec2, vec2);" // GL_ARB_shader_texture_lod - "vec4 shadow2DRectProjGradARB(sampler2DRectShadow, vec4, vec2, vec2);" // GL_ARB_shader_texture_lod - - "\n"); - } - } - - if ((profile != EEsProfile && version >= 150) || - (profile == EEsProfile && version >= 310)) { - //============================================================================ - // - // Prototypes for built-in functions seen by geometry shaders only. - // - //============================================================================ - - if (profile != EEsProfile && version >= 400) { - stageBuiltins[EShLangGeometry].append( - "void EmitStreamVertex(int);" - "void EndStreamPrimitive(int);" - ); - } - stageBuiltins[EShLangGeometry].append( - "void EmitVertex();" - "void EndPrimitive();" - "\n"); - } - - //============================================================================ - // - // Prototypes for all control functions. - // - //============================================================================ - bool esBarrier = (profile == EEsProfile && version >= 310); - if ((profile != EEsProfile && version >= 150) || esBarrier) - stageBuiltins[EShLangTessControl].append( - "void barrier();" - ); - if ((profile != EEsProfile && version >= 420) || esBarrier) - stageBuiltins[EShLangCompute].append( - "void barrier();" - ); - if ((profile != EEsProfile && version >= 130) || esBarrier) - commonBuiltins.append( - "void memoryBarrier();" - ); - if ((profile != EEsProfile && version >= 420) || esBarrier) { - commonBuiltins.append( - "void memoryBarrierAtomicCounter();" - "void memoryBarrierBuffer();" - "void memoryBarrierImage();" - ); - stageBuiltins[EShLangCompute].append( - "void memoryBarrierShared();" - "void groupMemoryBarrier();" - ); - } - - //============================================================================ - // - // Prototypes for built-in functions seen by fragment shaders only. - // - //============================================================================ - - // - // Original-style texture Functions with bias. - // - if (spvVersion.spv == 0 && (profile != EEsProfile || version == 100)) { - stageBuiltins[EShLangFragment].append( - "vec4 texture2D(sampler2D, vec2, float);" - "vec4 texture2DProj(sampler2D, vec3, float);" - "vec4 texture2DProj(sampler2D, vec4, float);" - "vec4 texture3D(sampler3D, vec3, float);" // OES_texture_3D - "vec4 texture3DProj(sampler3D, vec4, float);" // OES_texture_3D - "vec4 textureCube(samplerCube, vec3, float);" - - "\n"); - } - if (spvVersion.spv == 0 && (profile != EEsProfile && version > 100)) { - stageBuiltins[EShLangFragment].append( - "vec4 texture1D(sampler1D, float, float);" - "vec4 texture1DProj(sampler1D, vec2, float);" - "vec4 texture1DProj(sampler1D, vec4, float);" - "vec4 shadow1D(sampler1DShadow, vec3, float);" - "vec4 shadow2D(sampler2DShadow, vec3, float);" - "vec4 shadow1DProj(sampler1DShadow, vec4, float);" - "vec4 shadow2DProj(sampler2DShadow, vec4, float);" - - "\n"); - } - if (spvVersion.spv == 0 && profile == EEsProfile) { - stageBuiltins[EShLangFragment].append( - "vec4 texture2DLodEXT(sampler2D, vec2, float);" // GL_EXT_shader_texture_lod - "vec4 texture2DProjLodEXT(sampler2D, vec3, float);" // GL_EXT_shader_texture_lod - "vec4 texture2DProjLodEXT(sampler2D, vec4, float);" // GL_EXT_shader_texture_lod - "vec4 textureCubeLodEXT(samplerCube, vec3, float);" // GL_EXT_shader_texture_lod - - "\n"); - } - - stageBuiltins[EShLangFragment].append( - "float dFdx(float p);" - "vec2 dFdx(vec2 p);" - "vec3 dFdx(vec3 p);" - "vec4 dFdx(vec4 p);" - - "float dFdy(float p);" - "vec2 dFdy(vec2 p);" - "vec3 dFdy(vec3 p);" - "vec4 dFdy(vec4 p);" - - "float fwidth(float p);" - "vec2 fwidth(vec2 p);" - "vec3 fwidth(vec3 p);" - "vec4 fwidth(vec4 p);" - - "\n"); - - // GL_ARB_derivative_control - if (profile != EEsProfile && version >= 400) { - stageBuiltins[EShLangFragment].append( - "float dFdxFine(float p);" - "vec2 dFdxFine(vec2 p);" - "vec3 dFdxFine(vec3 p);" - "vec4 dFdxFine(vec4 p);" - - "float dFdyFine(float p);" - "vec2 dFdyFine(vec2 p);" - "vec3 dFdyFine(vec3 p);" - "vec4 dFdyFine(vec4 p);" - - "float fwidthFine(float p);" - "vec2 fwidthFine(vec2 p);" - "vec3 fwidthFine(vec3 p);" - "vec4 fwidthFine(vec4 p);" - - "\n"); - - stageBuiltins[EShLangFragment].append( - "float dFdxCoarse(float p);" - "vec2 dFdxCoarse(vec2 p);" - "vec3 dFdxCoarse(vec3 p);" - "vec4 dFdxCoarse(vec4 p);" - - "float dFdyCoarse(float p);" - "vec2 dFdyCoarse(vec2 p);" - "vec3 dFdyCoarse(vec3 p);" - "vec4 dFdyCoarse(vec4 p);" - - "float fwidthCoarse(float p);" - "vec2 fwidthCoarse(vec2 p);" - "vec3 fwidthCoarse(vec3 p);" - "vec4 fwidthCoarse(vec4 p);" - - "\n"); - } - - // GL_OES_shader_multisample_interpolation - if ((profile == EEsProfile && version >= 310) || - (profile != EEsProfile && version >= 400)) { - stageBuiltins[EShLangFragment].append( - "float interpolateAtCentroid(float);" - "vec2 interpolateAtCentroid(vec2);" - "vec3 interpolateAtCentroid(vec3);" - "vec4 interpolateAtCentroid(vec4);" - - "float interpolateAtSample(float, int);" - "vec2 interpolateAtSample(vec2, int);" - "vec3 interpolateAtSample(vec3, int);" - "vec4 interpolateAtSample(vec4, int);" - - "float interpolateAtOffset(float, vec2);" - "vec2 interpolateAtOffset(vec2, vec2);" - "vec3 interpolateAtOffset(vec3, vec2);" - "vec4 interpolateAtOffset(vec4, vec2);" - - "\n"); - } - -#ifdef AMD_EXTENSIONS - // GL_AMD_shader_explicit_vertex_parameter - if (profile != EEsProfile && version >= 450) { - stageBuiltins[EShLangFragment].append( - "float interpolateAtVertexAMD(float, uint);" - "vec2 interpolateAtVertexAMD(vec2, uint);" - "vec3 interpolateAtVertexAMD(vec3, uint);" - "vec4 interpolateAtVertexAMD(vec4, uint);" - - "int interpolateAtVertexAMD(int, uint);" - "ivec2 interpolateAtVertexAMD(ivec2, uint);" - "ivec3 interpolateAtVertexAMD(ivec3, uint);" - "ivec4 interpolateAtVertexAMD(ivec4, uint);" - - "uint interpolateAtVertexAMD(uint, uint);" - "uvec2 interpolateAtVertexAMD(uvec2, uint);" - "uvec3 interpolateAtVertexAMD(uvec3, uint);" - "uvec4 interpolateAtVertexAMD(uvec4, uint);" - - "float16_t interpolateAtVertexAMD(float16_t, uint);" - "f16vec2 interpolateAtVertexAMD(f16vec2, uint);" - "f16vec3 interpolateAtVertexAMD(f16vec3, uint);" - "f16vec4 interpolateAtVertexAMD(f16vec4, uint);" - - "\n"); - } - - // GL_AMD_gpu_shader_half_float - if (profile != EEsProfile && version >= 450) { - stageBuiltins[EShLangFragment].append( - "float16_t dFdx(float16_t);" - "f16vec2 dFdx(f16vec2);" - "f16vec3 dFdx(f16vec3);" - "f16vec4 dFdx(f16vec4);" - - "float16_t dFdy(float16_t);" - "f16vec2 dFdy(f16vec2);" - "f16vec3 dFdy(f16vec3);" - "f16vec4 dFdy(f16vec4);" - - "float16_t dFdxFine(float16_t);" - "f16vec2 dFdxFine(f16vec2);" - "f16vec3 dFdxFine(f16vec3);" - "f16vec4 dFdxFine(f16vec4);" - - "float16_t dFdyFine(float16_t);" - "f16vec2 dFdyFine(f16vec2);" - "f16vec3 dFdyFine(f16vec3);" - "f16vec4 dFdyFine(f16vec4);" - - "float16_t dFdxCoarse(float16_t);" - "f16vec2 dFdxCoarse(f16vec2);" - "f16vec3 dFdxCoarse(f16vec3);" - "f16vec4 dFdxCoarse(f16vec4);" - - "float16_t dFdyCoarse(float16_t);" - "f16vec2 dFdyCoarse(f16vec2);" - "f16vec3 dFdyCoarse(f16vec3);" - "f16vec4 dFdyCoarse(f16vec4);" - - "float16_t fwidth(float16_t);" - "f16vec2 fwidth(f16vec2);" - "f16vec3 fwidth(f16vec3);" - "f16vec4 fwidth(f16vec4);" - - "float16_t fwidthFine(float16_t);" - "f16vec2 fwidthFine(f16vec2);" - "f16vec3 fwidthFine(f16vec3);" - "f16vec4 fwidthFine(f16vec4);" - - "float16_t fwidthCoarse(float16_t);" - "f16vec2 fwidthCoarse(f16vec2);" - "f16vec3 fwidthCoarse(f16vec3);" - "f16vec4 fwidthCoarse(f16vec4);" - - "float16_t interpolateAtCentroid(float16_t);" - "f16vec2 interpolateAtCentroid(f16vec2);" - "f16vec3 interpolateAtCentroid(f16vec3);" - "f16vec4 interpolateAtCentroid(f16vec4);" - - "float16_t interpolateAtSample(float16_t, int);" - "f16vec2 interpolateAtSample(f16vec2, int);" - "f16vec3 interpolateAtSample(f16vec3, int);" - "f16vec4 interpolateAtSample(f16vec4, int);" - - "float16_t interpolateAtOffset(float16_t, f16vec2);" - "f16vec2 interpolateAtOffset(f16vec2, f16vec2);" - "f16vec3 interpolateAtOffset(f16vec3, f16vec2);" - "f16vec4 interpolateAtOffset(f16vec4, f16vec2);" - - "\n"); - } -#endif - - //============================================================================ - // - // Standard Uniforms - // - //============================================================================ - - // - // Depth range in window coordinates, p. 33 - // - if (spvVersion.spv == 0) { - commonBuiltins.append( - "struct gl_DepthRangeParameters {" - ); - if (profile == EEsProfile) { - commonBuiltins.append( - "highp float near;" // n - "highp float far;" // f - "highp float diff;" // f - n - ); - } else { - commonBuiltins.append( - "float near;" // n - "float far;" // f - "float diff;" // f - n - ); - } - - commonBuiltins.append( - "};" - "uniform gl_DepthRangeParameters gl_DepthRange;" - "\n"); - } - - if (spvVersion.spv == 0 && IncludeLegacy(version, profile, spvVersion)) { - // - // Matrix state. p. 31, 32, 37, 39, 40. - // - commonBuiltins.append( - "uniform mat4 gl_ModelViewMatrix;" - "uniform mat4 gl_ProjectionMatrix;" - "uniform mat4 gl_ModelViewProjectionMatrix;" - - // - // Derived matrix state that provides inverse and transposed versions - // of the matrices above. - // - "uniform mat3 gl_NormalMatrix;" - - "uniform mat4 gl_ModelViewMatrixInverse;" - "uniform mat4 gl_ProjectionMatrixInverse;" - "uniform mat4 gl_ModelViewProjectionMatrixInverse;" - - "uniform mat4 gl_ModelViewMatrixTranspose;" - "uniform mat4 gl_ProjectionMatrixTranspose;" - "uniform mat4 gl_ModelViewProjectionMatrixTranspose;" - - "uniform mat4 gl_ModelViewMatrixInverseTranspose;" - "uniform mat4 gl_ProjectionMatrixInverseTranspose;" - "uniform mat4 gl_ModelViewProjectionMatrixInverseTranspose;" - - // - // Normal scaling p. 39. - // - "uniform float gl_NormalScale;" - - // - // Point Size, p. 66, 67. - // - "struct gl_PointParameters {" - "float size;" - "float sizeMin;" - "float sizeMax;" - "float fadeThresholdSize;" - "float distanceConstantAttenuation;" - "float distanceLinearAttenuation;" - "float distanceQuadraticAttenuation;" - "};" - - "uniform gl_PointParameters gl_Point;" - - // - // Material State p. 50, 55. - // - "struct gl_MaterialParameters {" - "vec4 emission;" // Ecm - "vec4 ambient;" // Acm - "vec4 diffuse;" // Dcm - "vec4 specular;" // Scm - "float shininess;" // Srm - "};" - "uniform gl_MaterialParameters gl_FrontMaterial;" - "uniform gl_MaterialParameters gl_BackMaterial;" - - // - // Light State p 50, 53, 55. - // - "struct gl_LightSourceParameters {" - "vec4 ambient;" // Acli - "vec4 diffuse;" // Dcli - "vec4 specular;" // Scli - "vec4 position;" // Ppli - "vec4 halfVector;" // Derived: Hi - "vec3 spotDirection;" // Sdli - "float spotExponent;" // Srli - "float spotCutoff;" // Crli - // (range: [0.0,90.0], 180.0) - "float spotCosCutoff;" // Derived: cos(Crli) - // (range: [1.0,0.0],-1.0) - "float constantAttenuation;" // K0 - "float linearAttenuation;" // K1 - "float quadraticAttenuation;"// K2 - "};" - - "struct gl_LightModelParameters {" - "vec4 ambient;" // Acs - "};" - - "uniform gl_LightModelParameters gl_LightModel;" - - // - // Derived state from products of light and material. - // - "struct gl_LightModelProducts {" - "vec4 sceneColor;" // Derived. Ecm + Acm * Acs - "};" - - "uniform gl_LightModelProducts gl_FrontLightModelProduct;" - "uniform gl_LightModelProducts gl_BackLightModelProduct;" - - "struct gl_LightProducts {" - "vec4 ambient;" // Acm * Acli - "vec4 diffuse;" // Dcm * Dcli - "vec4 specular;" // Scm * Scli - "};" - - // - // Fog p. 161 - // - "struct gl_FogParameters {" - "vec4 color;" - "float density;" - "float start;" - "float end;" - "float scale;" // 1 / (gl_FogEnd - gl_FogStart) - "};" - - "uniform gl_FogParameters gl_Fog;" - - "\n"); - } - - //============================================================================ - // - // Define the interface to the compute shader. - // - //============================================================================ - - if ((profile != EEsProfile && version >= 420) || - (profile == EEsProfile && version >= 310)) { - stageBuiltins[EShLangCompute].append( - "in highp uvec3 gl_NumWorkGroups;" - "const highp uvec3 gl_WorkGroupSize = uvec3(1,1,1);" - - "in highp uvec3 gl_WorkGroupID;" - "in highp uvec3 gl_LocalInvocationID;" - - "in highp uvec3 gl_GlobalInvocationID;" - "in highp uint gl_LocalInvocationIndex;" - - "\n"); - } - - if ((profile != EEsProfile && version >= 140) || - (profile == EEsProfile && version >= 310)) { - stageBuiltins[EShLangCompute].append( - "in highp int gl_DeviceIndex;" // GL_EXT_device_group - "\n"); - } - - //============================================================================ - // - // Define the interface to the vertex shader. - // - //============================================================================ - - if (profile != EEsProfile) { - if (version < 130) { - stageBuiltins[EShLangVertex].append( - "attribute vec4 gl_Color;" - "attribute vec4 gl_SecondaryColor;" - "attribute vec3 gl_Normal;" - "attribute vec4 gl_Vertex;" - "attribute vec4 gl_MultiTexCoord0;" - "attribute vec4 gl_MultiTexCoord1;" - "attribute vec4 gl_MultiTexCoord2;" - "attribute vec4 gl_MultiTexCoord3;" - "attribute vec4 gl_MultiTexCoord4;" - "attribute vec4 gl_MultiTexCoord5;" - "attribute vec4 gl_MultiTexCoord6;" - "attribute vec4 gl_MultiTexCoord7;" - "attribute float gl_FogCoord;" - "\n"); - } else if (IncludeLegacy(version, profile, spvVersion)) { - stageBuiltins[EShLangVertex].append( - "in vec4 gl_Color;" - "in vec4 gl_SecondaryColor;" - "in vec3 gl_Normal;" - "in vec4 gl_Vertex;" - "in vec4 gl_MultiTexCoord0;" - "in vec4 gl_MultiTexCoord1;" - "in vec4 gl_MultiTexCoord2;" - "in vec4 gl_MultiTexCoord3;" - "in vec4 gl_MultiTexCoord4;" - "in vec4 gl_MultiTexCoord5;" - "in vec4 gl_MultiTexCoord6;" - "in vec4 gl_MultiTexCoord7;" - "in float gl_FogCoord;" - "\n"); - } - - if (version < 150) { - if (version < 130) { - stageBuiltins[EShLangVertex].append( - " vec4 gl_ClipVertex;" // needs qualifier fixed later - "varying vec4 gl_FrontColor;" - "varying vec4 gl_BackColor;" - "varying vec4 gl_FrontSecondaryColor;" - "varying vec4 gl_BackSecondaryColor;" - "varying vec4 gl_TexCoord[];" - "varying float gl_FogFragCoord;" - "\n"); - } else if (IncludeLegacy(version, profile, spvVersion)) { - stageBuiltins[EShLangVertex].append( - " vec4 gl_ClipVertex;" // needs qualifier fixed later - "out vec4 gl_FrontColor;" - "out vec4 gl_BackColor;" - "out vec4 gl_FrontSecondaryColor;" - "out vec4 gl_BackSecondaryColor;" - "out vec4 gl_TexCoord[];" - "out float gl_FogFragCoord;" - "\n"); - } - stageBuiltins[EShLangVertex].append( - "vec4 gl_Position;" // needs qualifier fixed later - "float gl_PointSize;" // needs qualifier fixed later - ); - - if (version == 130 || version == 140) - stageBuiltins[EShLangVertex].append( - "out float gl_ClipDistance[];" - ); - } else { - // version >= 150 - stageBuiltins[EShLangVertex].append( - "out gl_PerVertex {" - "vec4 gl_Position;" // needs qualifier fixed later - "float gl_PointSize;" // needs qualifier fixed later - "float gl_ClipDistance[];" - ); - if (IncludeLegacy(version, profile, spvVersion)) - stageBuiltins[EShLangVertex].append( - "vec4 gl_ClipVertex;" // needs qualifier fixed later - "vec4 gl_FrontColor;" - "vec4 gl_BackColor;" - "vec4 gl_FrontSecondaryColor;" - "vec4 gl_BackSecondaryColor;" - "vec4 gl_TexCoord[];" - "float gl_FogFragCoord;" - ); - if (version >= 450) - stageBuiltins[EShLangVertex].append( - "float gl_CullDistance[];" - ); - stageBuiltins[EShLangVertex].append( - "};" - "\n"); - } - if (version >= 130 && spvVersion.vulkan == 0) - stageBuiltins[EShLangVertex].append( - "int gl_VertexID;" // needs qualifier fixed later - ); - if (version >= 140 && spvVersion.vulkan == 0) - stageBuiltins[EShLangVertex].append( - "int gl_InstanceID;" // needs qualifier fixed later - ); - if (spvVersion.vulkan >= 100 && version >= 140) - stageBuiltins[EShLangVertex].append( - "in int gl_VertexIndex;" - "in int gl_InstanceIndex;" - ); - if (version >= 440) { - stageBuiltins[EShLangVertex].append( - "in int gl_BaseVertexARB;" - "in int gl_BaseInstanceARB;" - "in int gl_DrawIDARB;" - ); - } - if (version >= 460) { - stageBuiltins[EShLangVertex].append( - "in int gl_BaseVertex;" - "in int gl_BaseInstance;" - "in int gl_DrawID;" - ); - } - -#ifdef NV_EXTENSIONS - if (version >= 450) - stageBuiltins[EShLangVertex].append( - "out int gl_ViewportIndex;" - "out int gl_Layer;" - "out int gl_ViewportMask[];" // GL_NV_viewport_array2 - "out int gl_SecondaryViewportMaskNV[];" // GL_NV_stereo_view_rendering - "out vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering - "out vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes - "out int gl_ViewportMaskPerViewNV[];" // GL_NVX_multiview_per_view_attributes - ); -#endif - - } else { - // ES profile - if (version == 100) { - stageBuiltins[EShLangVertex].append( - "highp vec4 gl_Position;" // needs qualifier fixed later - "mediump float gl_PointSize;" // needs qualifier fixed later - ); - } else { - if (spvVersion.vulkan == 0) - stageBuiltins[EShLangVertex].append( - "in highp int gl_VertexID;" // needs qualifier fixed later - "in highp int gl_InstanceID;" // needs qualifier fixed later - ); - if (spvVersion.vulkan >= 100) - stageBuiltins[EShLangVertex].append( - "in highp int gl_VertexIndex;" - "in highp int gl_InstanceIndex;" - ); - if (version < 310) - stageBuiltins[EShLangVertex].append( - "highp vec4 gl_Position;" // needs qualifier fixed later - "highp float gl_PointSize;" // needs qualifier fixed later - ); - else - stageBuiltins[EShLangVertex].append( - "out gl_PerVertex {" - "highp vec4 gl_Position;" // needs qualifier fixed later - "highp float gl_PointSize;" // needs qualifier fixed later - "};" - ); - } - } - - if ((profile != EEsProfile && version >= 140) || - (profile == EEsProfile && version >= 310)) { - stageBuiltins[EShLangVertex].append( - "in highp int gl_DeviceIndex;" // GL_EXT_device_group - "in highp int gl_ViewIndex;" // GL_EXT_multiview - "\n"); - } - - if (version >= 300 /* both ES and non-ES */) { - stageBuiltins[EShLangVertex].append( - "in highp uint gl_ViewID_OVR;" // GL_OVR_multiview, GL_OVR_multiview2 - "\n"); - } - - - //============================================================================ - // - // Define the interface to the geometry shader. - // - //============================================================================ - - if (profile == ECoreProfile || profile == ECompatibilityProfile) { - stageBuiltins[EShLangGeometry].append( - "in gl_PerVertex {" - "vec4 gl_Position;" - "float gl_PointSize;" - "float gl_ClipDistance[];" - ); - if (profile == ECompatibilityProfile) - stageBuiltins[EShLangGeometry].append( - "vec4 gl_ClipVertex;" - "vec4 gl_FrontColor;" - "vec4 gl_BackColor;" - "vec4 gl_FrontSecondaryColor;" - "vec4 gl_BackSecondaryColor;" - "vec4 gl_TexCoord[];" - "float gl_FogFragCoord;" - ); - if (version >= 450) - stageBuiltins[EShLangGeometry].append( - "float gl_CullDistance[];" -#ifdef NV_EXTENSIONS - "vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering - "vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes -#endif - ); - stageBuiltins[EShLangGeometry].append( - "} gl_in[];" - - "in int gl_PrimitiveIDIn;" - "out gl_PerVertex {" - "vec4 gl_Position;" - "float gl_PointSize;" - "float gl_ClipDistance[];" - "\n"); - if (profile == ECompatibilityProfile && version >= 400) - stageBuiltins[EShLangGeometry].append( - "vec4 gl_ClipVertex;" - "vec4 gl_FrontColor;" - "vec4 gl_BackColor;" - "vec4 gl_FrontSecondaryColor;" - "vec4 gl_BackSecondaryColor;" - "vec4 gl_TexCoord[];" - "float gl_FogFragCoord;" - ); - if (version >= 450) - stageBuiltins[EShLangGeometry].append( - "float gl_CullDistance[];" - ); - stageBuiltins[EShLangGeometry].append( - "};" - - "out int gl_PrimitiveID;" - "out int gl_Layer;"); - - if (profile == ECompatibilityProfile && version < 400) - stageBuiltins[EShLangGeometry].append( - "out vec4 gl_ClipVertex;" - ); - - if (version >= 400) - stageBuiltins[EShLangGeometry].append( - "in int gl_InvocationID;" - ); - // GL_ARB_viewport_array - if (version >= 150) - stageBuiltins[EShLangGeometry].append( - "out int gl_ViewportIndex;" - ); - -#ifdef NV_EXTENSIONS - if (version >= 450) - stageBuiltins[EShLangGeometry].append( - "out int gl_ViewportMask[];" // GL_NV_viewport_array2 - "out int gl_SecondaryViewportMaskNV[];" // GL_NV_stereo_view_rendering - "out vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering - "out vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes - "out int gl_ViewportMaskPerViewNV[];" // GL_NVX_multiview_per_view_attributes - ); -#endif - - stageBuiltins[EShLangGeometry].append("\n"); - } else if (profile == EEsProfile && version >= 310) { - stageBuiltins[EShLangGeometry].append( - "in gl_PerVertex {" - "highp vec4 gl_Position;" - "highp float gl_PointSize;" - "} gl_in[];" - "\n" - "in highp int gl_PrimitiveIDIn;" - "in highp int gl_InvocationID;" - "\n" - "out gl_PerVertex {" - "highp vec4 gl_Position;" - "highp float gl_PointSize;" - "};" - "\n" - "out highp int gl_PrimitiveID;" - "out highp int gl_Layer;" - "\n" - ); - } - - if ((profile != EEsProfile && version >= 140) || - (profile == EEsProfile && version >= 310)) { - stageBuiltins[EShLangGeometry].append( - "in highp int gl_DeviceIndex;" // GL_EXT_device_group - "in highp int gl_ViewIndex;" // GL_EXT_multiview - "\n"); - } - - //============================================================================ - // - // Define the interface to the tessellation control shader. - // - //============================================================================ - - if (profile != EEsProfile && version >= 150) { - // Note: "in gl_PerVertex {...} gl_in[gl_MaxPatchVertices];" is declared in initialize() below, - // as it depends on the resource sizing of gl_MaxPatchVertices. - - stageBuiltins[EShLangTessControl].append( - "in int gl_PatchVerticesIn;" - "in int gl_PrimitiveID;" - "in int gl_InvocationID;" - - "out gl_PerVertex {" - "vec4 gl_Position;" - "float gl_PointSize;" - "float gl_ClipDistance[];" - ); - if (profile == ECompatibilityProfile) - stageBuiltins[EShLangTessControl].append( - "vec4 gl_ClipVertex;" - "vec4 gl_FrontColor;" - "vec4 gl_BackColor;" - "vec4 gl_FrontSecondaryColor;" - "vec4 gl_BackSecondaryColor;" - "vec4 gl_TexCoord[];" - "float gl_FogFragCoord;" - ); - if (version >= 450) - stageBuiltins[EShLangTessControl].append( - "float gl_CullDistance[];" -#ifdef NV_EXTENSIONS - "int gl_ViewportIndex;" - "int gl_Layer;" - "int gl_ViewportMask[];" // GL_NV_viewport_array2 - "vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering - "int gl_SecondaryViewportMaskNV[];" // GL_NV_stereo_view_rendering - "vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes - "int gl_ViewportMaskPerViewNV[];" // GL_NVX_multiview_per_view_attributes -#endif - ); - stageBuiltins[EShLangTessControl].append( - "} gl_out[];" - - "patch out float gl_TessLevelOuter[4];" - "patch out float gl_TessLevelInner[2];" - "\n"); - } else { - // Note: "in gl_PerVertex {...} gl_in[gl_MaxPatchVertices];" is declared in initialize() below, - // as it depends on the resource sizing of gl_MaxPatchVertices. - - stageBuiltins[EShLangTessControl].append( - "in highp int gl_PatchVerticesIn;" - "in highp int gl_PrimitiveID;" - "in highp int gl_InvocationID;" - - "out gl_PerVertex {" - "highp vec4 gl_Position;" - "highp float gl_PointSize;" - ); - stageBuiltins[EShLangTessControl].append( - "} gl_out[];" - - "patch out highp float gl_TessLevelOuter[4];" - "patch out highp float gl_TessLevelInner[2];" - "patch out highp vec4 gl_BoundingBoxOES[2];" - "\n"); - } - - if ((profile != EEsProfile && version >= 140) || - (profile == EEsProfile && version >= 310)) { - stageBuiltins[EShLangTessControl].append( - "in highp int gl_DeviceIndex;" // GL_EXT_device_group - "in highp int gl_ViewIndex;" // GL_EXT_multiview - "\n"); - } - - //============================================================================ - // - // Define the interface to the tessellation evaluation shader. - // - //============================================================================ - - if (profile != EEsProfile && version >= 150) { - // Note: "in gl_PerVertex {...} gl_in[gl_MaxPatchVertices];" is declared in initialize() below, - // as it depends on the resource sizing of gl_MaxPatchVertices. - - stageBuiltins[EShLangTessEvaluation].append( - "in int gl_PatchVerticesIn;" - "in int gl_PrimitiveID;" - "in vec3 gl_TessCoord;" - - "patch in float gl_TessLevelOuter[4];" - "patch in float gl_TessLevelInner[2];" - - "out gl_PerVertex {" - "vec4 gl_Position;" - "float gl_PointSize;" - "float gl_ClipDistance[];" - ); - if (version >= 400 && profile == ECompatibilityProfile) - stageBuiltins[EShLangTessEvaluation].append( - "vec4 gl_ClipVertex;" - "vec4 gl_FrontColor;" - "vec4 gl_BackColor;" - "vec4 gl_FrontSecondaryColor;" - "vec4 gl_BackSecondaryColor;" - "vec4 gl_TexCoord[];" - "float gl_FogFragCoord;" - ); - if (version >= 450) - stageBuiltins[EShLangTessEvaluation].append( - "float gl_CullDistance[];" - ); - stageBuiltins[EShLangTessEvaluation].append( - "};" - "\n"); - -#ifdef NV_EXTENSIONS - if (version >= 450) - stageBuiltins[EShLangTessEvaluation].append( - "out int gl_ViewportIndex;" - "out int gl_Layer;" - "out int gl_ViewportMask[];" // GL_NV_viewport_array2 - "out vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering - "out int gl_SecondaryViewportMaskNV[];" // GL_NV_stereo_view_rendering - "out vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes - "out int gl_ViewportMaskPerViewNV[];" // GL_NVX_multiview_per_view_attributes - ); -#endif - - } else if (profile == EEsProfile && version >= 310) { - // Note: "in gl_PerVertex {...} gl_in[gl_MaxPatchVertices];" is declared in initialize() below, - // as it depends on the resource sizing of gl_MaxPatchVertices. - - stageBuiltins[EShLangTessEvaluation].append( - "in highp int gl_PatchVerticesIn;" - "in highp int gl_PrimitiveID;" - "in highp vec3 gl_TessCoord;" - - "patch in highp float gl_TessLevelOuter[4];" - "patch in highp float gl_TessLevelInner[2];" - - "out gl_PerVertex {" - "highp vec4 gl_Position;" - "highp float gl_PointSize;" - ); - stageBuiltins[EShLangTessEvaluation].append( - "};" - "\n"); - } - - if ((profile != EEsProfile && version >= 140) || - (profile == EEsProfile && version >= 310)) { - stageBuiltins[EShLangTessEvaluation].append( - "in highp int gl_DeviceIndex;" // GL_EXT_device_group - "in highp int gl_ViewIndex;" // GL_EXT_multiview - "\n"); - } - - //============================================================================ - // - // Define the interface to the fragment shader. - // - //============================================================================ - - if (profile != EEsProfile) { - - stageBuiltins[EShLangFragment].append( - "vec4 gl_FragCoord;" // needs qualifier fixed later - "bool gl_FrontFacing;" // needs qualifier fixed later - "float gl_FragDepth;" // needs qualifier fixed later - ); - if (version >= 120) - stageBuiltins[EShLangFragment].append( - "vec2 gl_PointCoord;" // needs qualifier fixed later - ); - if (version >= 140) - stageBuiltins[EShLangFragment].append( - "out int gl_FragStencilRefARB;" - ); - if (IncludeLegacy(version, profile, spvVersion) || (! ForwardCompatibility && version < 420)) - stageBuiltins[EShLangFragment].append( - "vec4 gl_FragColor;" // needs qualifier fixed later - ); - - if (version < 130) { - stageBuiltins[EShLangFragment].append( - "varying vec4 gl_Color;" - "varying vec4 gl_SecondaryColor;" - "varying vec4 gl_TexCoord[];" - "varying float gl_FogFragCoord;" - ); - } else { - stageBuiltins[EShLangFragment].append( - "in float gl_ClipDistance[];" - ); - - if (IncludeLegacy(version, profile, spvVersion)) { - if (version < 150) - stageBuiltins[EShLangFragment].append( - "in float gl_FogFragCoord;" - "in vec4 gl_TexCoord[];" - "in vec4 gl_Color;" - "in vec4 gl_SecondaryColor;" - ); - else - stageBuiltins[EShLangFragment].append( - "in gl_PerFragment {" - "in float gl_FogFragCoord;" - "in vec4 gl_TexCoord[];" - "in vec4 gl_Color;" - "in vec4 gl_SecondaryColor;" - "};" - ); - } - } - - if (version >= 150) - stageBuiltins[EShLangFragment].append( - "flat in int gl_PrimitiveID;" - ); - - if (version >= 400) { - stageBuiltins[EShLangFragment].append( - "flat in int gl_SampleID;" - " in vec2 gl_SamplePosition;" - "flat in int gl_SampleMaskIn[];" - " out int gl_SampleMask[];" - ); - if (spvVersion.spv == 0) - stageBuiltins[EShLangFragment].append( - "uniform int gl_NumSamples;" - ); - } - - if (version >= 430) - stageBuiltins[EShLangFragment].append( - "flat in int gl_Layer;" - "flat in int gl_ViewportIndex;" - ); - - if (version >= 450) - stageBuiltins[EShLangFragment].append( - "in float gl_CullDistance[];" - "bool gl_HelperInvocation;" // needs qualifier fixed later - ); - -#ifdef AMD_EXTENSIONS - if (version >= 450) - stageBuiltins[EShLangFragment].append( - "in vec2 gl_BaryCoordNoPerspAMD;" - "in vec2 gl_BaryCoordNoPerspCentroidAMD;" - "in vec2 gl_BaryCoordNoPerspSampleAMD;" - "in vec2 gl_BaryCoordSmoothAMD;" - "in vec2 gl_BaryCoordSmoothCentroidAMD;" - "in vec2 gl_BaryCoordSmoothSampleAMD;" - "in vec3 gl_BaryCoordPullModelAMD;" - ); -#endif - } else { - // ES profile - - if (version == 100) { - stageBuiltins[EShLangFragment].append( - "mediump vec4 gl_FragCoord;" // needs qualifier fixed later - " bool gl_FrontFacing;" // needs qualifier fixed later - "mediump vec4 gl_FragColor;" // needs qualifier fixed later - "mediump vec2 gl_PointCoord;" // needs qualifier fixed later - ); - } - if (version >= 300) { - stageBuiltins[EShLangFragment].append( - "highp vec4 gl_FragCoord;" // needs qualifier fixed later - " bool gl_FrontFacing;" // needs qualifier fixed later - "mediump vec2 gl_PointCoord;" // needs qualifier fixed later - "highp float gl_FragDepth;" // needs qualifier fixed later - ); - } - if (version >= 310) { - stageBuiltins[EShLangFragment].append( - "bool gl_HelperInvocation;" // needs qualifier fixed later - "flat in highp int gl_PrimitiveID;" // needs qualifier fixed later - "flat in highp int gl_Layer;" // needs qualifier fixed later - ); - - stageBuiltins[EShLangFragment].append( // GL_OES_sample_variables - "flat in lowp int gl_SampleID;" - " in mediump vec2 gl_SamplePosition;" - "flat in highp int gl_SampleMaskIn[];" - " out highp int gl_SampleMask[];" - ); - if (spvVersion.spv == 0) - stageBuiltins[EShLangFragment].append( // GL_OES_sample_variables - "uniform lowp int gl_NumSamples;" - ); - } - stageBuiltins[EShLangFragment].append( - "highp float gl_FragDepthEXT;" // GL_EXT_frag_depth - ); - } - stageBuiltins[EShLangFragment].append("\n"); - - if (version >= 130) - add2ndGenerationSamplingImaging(version, profile, spvVersion); - - // GL_ARB_shader_ballot - if (profile != EEsProfile && version >= 450) { - commonBuiltins.append( - "uniform uint gl_SubGroupSizeARB;" - - "in uint gl_SubGroupInvocationARB;" - "in uint64_t gl_SubGroupEqMaskARB;" - "in uint64_t gl_SubGroupGeMaskARB;" - "in uint64_t gl_SubGroupGtMaskARB;" - "in uint64_t gl_SubGroupLeMaskARB;" - "in uint64_t gl_SubGroupLtMaskARB;" - - "\n"); - } - - if ((profile != EEsProfile && version >= 140) || - (profile == EEsProfile && version >= 310)) { - stageBuiltins[EShLangFragment].append( - "flat in highp int gl_DeviceIndex;" // GL_EXT_device_group - "flat in highp int gl_ViewIndex;" // GL_EXT_multiview - "\n"); - } - - if (version >= 300 /* both ES and non-ES */) { - stageBuiltins[EShLangFragment].append( - "flat in highp uint gl_ViewID_OVR;" // GL_OVR_multiview, GL_OVR_multiview2 - "\n"); - } - - // printf("%s\n", commonBuiltins.c_str()); - // printf("%s\n", stageBuiltins[EShLangFragment].c_str()); -} - -// -// Helper function for initialize(), to add the second set of names for texturing, -// when adding context-independent built-in functions. -// -void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, const SpvVersion& spvVersion) -{ - // - // In this function proper, enumerate the types, then calls the next set of functions - // to enumerate all the uses for that type. - // - - TBasicType bTypes[3] = { EbtFloat, EbtInt, EbtUint }; - bool skipBuffer = (profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 140); - bool skipCubeArrayed = (profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 130); - - // enumerate all the types - for (int image = 0; image <= 1; ++image) { // loop over "bool" image vs sampler - - for (int shadow = 0; shadow <= 1; ++shadow) { // loop over "bool" shadow or not - for (int ms = 0; ms <=1; ++ms) { - if ((ms || image) && shadow) - continue; - if (ms && profile != EEsProfile && version < 150) - continue; - if (ms && image && profile == EEsProfile) - continue; - if (ms && profile == EEsProfile && version < 310) - continue; - - for (int arrayed = 0; arrayed <= 1; ++arrayed) { // loop over "bool" arrayed or not - for (int dim = Esd1D; dim < EsdNumDims; ++dim) { // 1D, 2D, ..., buffer - if (dim == EsdSubpass && spvVersion.vulkan == 0) - continue; - if (dim == EsdSubpass && (image || shadow || arrayed)) - continue; - if ((dim == Esd1D || dim == EsdRect) && profile == EEsProfile) - continue; - if (dim != Esd2D && dim != EsdSubpass && ms) - continue; - if ((dim == Esd3D || dim == EsdRect) && arrayed) - continue; - if (dim == Esd3D && shadow) - continue; - if (dim == EsdCube && arrayed && skipCubeArrayed) - continue; - if (dim == EsdBuffer && skipBuffer) - continue; - if (dim == EsdBuffer && (shadow || arrayed || ms)) - continue; - if (ms && arrayed && profile == EEsProfile && version < 310) - continue; - - for (int bType = 0; bType < 3; ++bType) { // float, int, uint results - - if (shadow && bType > 0) - continue; - - if (dim == EsdRect && version < 140 && bType > 0) - continue; - - // - // Now, make all the function prototypes for the type we just built... - // - - TSampler sampler; - if (dim == EsdSubpass) { - sampler.setSubpass(bTypes[bType], ms ? true : false); - } else if (image) { - sampler.setImage(bTypes[bType], (TSamplerDim)dim, arrayed ? true : false, - shadow ? true : false, - ms ? true : false); - } else { - sampler.set(bTypes[bType], (TSamplerDim)dim, arrayed ? true : false, - shadow ? true : false, - ms ? true : false); - } - - TString typeName = sampler.getString(); - - if (dim == EsdSubpass) { - addSubpassSampling(sampler, typeName, version, profile); - continue; - } - - addQueryFunctions(sampler, typeName, version, profile); - - if (image) - addImageFunctions(sampler, typeName, version, profile); - else { - addSamplingFunctions(sampler, typeName, version, profile); - addGatherFunctions(sampler, typeName, version, profile); - - if (spvVersion.vulkan > 0 && sampler.dim == EsdBuffer && sampler.isCombined()) { - // Vulkan wants a textureBuffer to allow texelFetch() -- - // a sampled image with no sampler. - // So, add sampling functions for both the - // samplerBuffer and textureBuffer types. - sampler.setTexture(sampler.type, sampler.dim, sampler.arrayed, sampler.shadow, - sampler.ms); - TString textureTypeName = sampler.getString(); - addSamplingFunctions(sampler, textureTypeName, version, profile); - } - } - } - } - } - } - } - } - - // - // sparseTexelsResidentARB() - // - - if (profile != EEsProfile && version >= 450) { - commonBuiltins.append("bool sparseTexelsResidentARB(int code);\n"); - } -} - -// -// Helper function for add2ndGenerationSamplingImaging(), -// when adding context-independent built-in functions. -// -// Add all the query functions for the given type. -// -void TBuiltIns::addQueryFunctions(TSampler sampler, const TString& typeName, int version, EProfile profile) -{ - if (sampler.image && ((profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 430))) - return; - - // - // textureSize() and imageSize() - // - - int sizeDims = dimMap[sampler.dim] + (sampler.arrayed ? 1 : 0) - (sampler.dim == EsdCube ? 1 : 0); - if (profile == EEsProfile) - commonBuiltins.append("highp "); - if (sizeDims == 1) - commonBuiltins.append("int"); - else { - commonBuiltins.append("ivec"); - commonBuiltins.append(postfixes[sizeDims]); - } - if (sampler.image) - commonBuiltins.append(" imageSize(readonly writeonly volatile coherent "); - else - commonBuiltins.append(" textureSize("); - commonBuiltins.append(typeName); - if (! sampler.image && sampler.dim != EsdRect && sampler.dim != EsdBuffer && ! sampler.ms) - commonBuiltins.append(",int);\n"); - else - commonBuiltins.append(");\n"); - - // - // textureSamples() and imageSamples() - // - - // GL_ARB_shader_texture_image_samples - // TODO: spec issue? there are no memory qualifiers; how to query a writeonly/readonly image, etc? - if (profile != EEsProfile && version >= 430 && sampler.ms) { - commonBuiltins.append("int "); - if (sampler.image) - commonBuiltins.append("imageSamples(readonly writeonly volatile coherent "); - else - commonBuiltins.append("textureSamples("); - commonBuiltins.append(typeName); - commonBuiltins.append(");\n"); - } - - // - // textureQueryLod(), fragment stage only - // - - if (profile != EEsProfile && version >= 400 && ! sampler.image && sampler.dim != EsdRect && ! sampler.ms && sampler.dim != EsdBuffer) { - stageBuiltins[EShLangFragment].append("vec2 textureQueryLod("); - stageBuiltins[EShLangFragment].append(typeName); - if (dimMap[sampler.dim] == 1) - stageBuiltins[EShLangFragment].append(", float"); - else { - stageBuiltins[EShLangFragment].append(", vec"); - stageBuiltins[EShLangFragment].append(postfixes[dimMap[sampler.dim]]); - } - stageBuiltins[EShLangFragment].append(");\n"); - } - - // - // textureQueryLevels() - // - - if (profile != EEsProfile && version >= 430 && ! sampler.image && sampler.dim != EsdRect && ! sampler.ms && sampler.dim != EsdBuffer) { - commonBuiltins.append("int textureQueryLevels("); - commonBuiltins.append(typeName); - commonBuiltins.append(");\n"); - } -} - -// -// Helper function for add2ndGenerationSamplingImaging(), -// when adding context-independent built-in functions. -// -// Add all the image access functions for the given type. -// -void TBuiltIns::addImageFunctions(TSampler sampler, const TString& typeName, int version, EProfile profile) -{ - int dims = dimMap[sampler.dim]; - // most things with an array add a dimension, except for cubemaps - if (sampler.arrayed && sampler.dim != EsdCube) - ++dims; - - TString imageParams = typeName; - if (dims == 1) - imageParams.append(", int"); - else { - imageParams.append(", ivec"); - imageParams.append(postfixes[dims]); - } - if (sampler.ms) - imageParams.append(", int"); - - if (profile == EEsProfile) - commonBuiltins.append("highp "); - commonBuiltins.append(prefixes[sampler.type]); - commonBuiltins.append("vec4 imageLoad(readonly volatile coherent "); - commonBuiltins.append(imageParams); - commonBuiltins.append(");\n"); - - commonBuiltins.append("void imageStore(writeonly volatile coherent "); - commonBuiltins.append(imageParams); - commonBuiltins.append(", "); - commonBuiltins.append(prefixes[sampler.type]); - commonBuiltins.append("vec4);\n"); - - if (sampler.dim != Esd1D && sampler.dim != EsdBuffer && profile != EEsProfile && version >= 450) { - commonBuiltins.append("int sparseImageLoadARB(readonly volatile coherent "); - commonBuiltins.append(imageParams); - commonBuiltins.append(", out "); - commonBuiltins.append(prefixes[sampler.type]); - commonBuiltins.append("vec4"); - commonBuiltins.append(");\n"); - } - - if ( profile != EEsProfile || - (profile == EEsProfile && version >= 310)) { - if (sampler.type == EbtInt || sampler.type == EbtUint) { - const char* dataType = sampler.type == EbtInt ? "highp int" : "highp uint"; - - const int numBuiltins = 7; - - static const char* atomicFunc[numBuiltins] = { - " imageAtomicAdd(volatile coherent ", - " imageAtomicMin(volatile coherent ", - " imageAtomicMax(volatile coherent ", - " imageAtomicAnd(volatile coherent ", - " imageAtomicOr(volatile coherent ", - " imageAtomicXor(volatile coherent ", - " imageAtomicExchange(volatile coherent " - }; - - for (size_t i = 0; i < numBuiltins; ++i) { - commonBuiltins.append(dataType); - commonBuiltins.append(atomicFunc[i]); - commonBuiltins.append(imageParams); - commonBuiltins.append(", "); - commonBuiltins.append(dataType); - commonBuiltins.append(");\n"); - } - - commonBuiltins.append(dataType); - commonBuiltins.append(" imageAtomicCompSwap(volatile coherent "); - commonBuiltins.append(imageParams); - commonBuiltins.append(", "); - commonBuiltins.append(dataType); - commonBuiltins.append(", "); - commonBuiltins.append(dataType); - commonBuiltins.append(");\n"); - } else { - // not int or uint - // GL_ARB_ES3_1_compatibility - // TODO: spec issue: are there restrictions on the kind of layout() that can be used? what about dropping memory qualifiers? - if ((profile != EEsProfile && version >= 450) || - (profile == EEsProfile && version >= 310)) { - commonBuiltins.append("float imageAtomicExchange(volatile coherent "); - commonBuiltins.append(imageParams); - commonBuiltins.append(", float);\n"); - } - } - } - -#ifdef AMD_EXTENSIONS - if (sampler.dim == EsdRect || sampler.dim == EsdBuffer || sampler.shadow || sampler.ms) - return; - - if (profile == EEsProfile || version < 450) - return; - - TString imageLodParams = typeName; - if (dims == 1) - imageLodParams.append(", int"); - else { - imageLodParams.append(", ivec"); - imageLodParams.append(postfixes[dims]); - } - imageLodParams.append(", int"); - - commonBuiltins.append(prefixes[sampler.type]); - commonBuiltins.append("vec4 imageLoadLodAMD(readonly volatile coherent "); - commonBuiltins.append(imageLodParams); - commonBuiltins.append(");\n"); - - commonBuiltins.append("void imageStoreLodAMD(writeonly volatile coherent "); - commonBuiltins.append(imageLodParams); - commonBuiltins.append(", "); - commonBuiltins.append(prefixes[sampler.type]); - commonBuiltins.append("vec4);\n"); - - if (sampler.dim != Esd1D) { - commonBuiltins.append("int sparseImageLoadLodAMD(readonly volatile coherent "); - commonBuiltins.append(imageLodParams); - commonBuiltins.append(", out "); - commonBuiltins.append(prefixes[sampler.type]); - commonBuiltins.append("vec4"); - commonBuiltins.append(");\n"); - } -#endif -} - -// -// Helper function for initialize(), -// when adding context-independent built-in functions. -// -// Add all the subpass access functions for the given type. -// -void TBuiltIns::addSubpassSampling(TSampler sampler, const TString& typeName, int /*version*/, EProfile /*profile*/) -{ - stageBuiltins[EShLangFragment].append(prefixes[sampler.type]); - stageBuiltins[EShLangFragment].append("vec4 subpassLoad"); - stageBuiltins[EShLangFragment].append("("); - stageBuiltins[EShLangFragment].append(typeName.c_str()); - if (sampler.ms) - stageBuiltins[EShLangFragment].append(", int"); - stageBuiltins[EShLangFragment].append(");\n"); -} - -// -// Helper function for add2ndGenerationSamplingImaging(), -// when adding context-independent built-in functions. -// -// Add all the texture lookup functions for the given type. -// -void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName, int version, EProfile profile) -{ - // - // texturing - // - for (int proj = 0; proj <= 1; ++proj) { // loop over "bool" projective or not - - if (proj && (sampler.dim == EsdCube || sampler.dim == EsdBuffer || sampler.arrayed || sampler.ms)) - continue; - - for (int lod = 0; lod <= 1; ++lod) { - - if (lod && (sampler.dim == EsdBuffer || sampler.dim == EsdRect || sampler.ms)) - continue; - if (lod && sampler.dim == Esd2D && sampler.arrayed && sampler.shadow) - continue; - if (lod && sampler.dim == EsdCube && sampler.shadow) - continue; - - for (int bias = 0; bias <= 1; ++bias) { - - if (bias && (lod || sampler.ms)) - continue; - if (bias && sampler.dim == Esd2D && sampler.shadow && sampler.arrayed) - continue; - if (bias && (sampler.dim == EsdRect || sampler.dim == EsdBuffer)) - continue; - - for (int offset = 0; offset <= 1; ++offset) { // loop over "bool" offset or not - - if (proj + offset + bias + lod > 3) - continue; - if (offset && (sampler.dim == EsdCube || sampler.dim == EsdBuffer || sampler.ms)) - continue; - - for (int fetch = 0; fetch <= 1; ++fetch) { // loop over "bool" fetch or not - - if (proj + offset + fetch + bias + lod > 3) - continue; - if (fetch && (lod || bias)) - continue; - if (fetch && (sampler.shadow || sampler.dim == EsdCube)) - continue; - if (fetch == 0 && (sampler.ms || sampler.dim == EsdBuffer)) - continue; - - for (int grad = 0; grad <= 1; ++grad) { // loop over "bool" grad or not - - if (grad && (lod || bias || sampler.ms)) - continue; - if (grad && sampler.dim == EsdBuffer) - continue; - if (proj + offset + fetch + grad + bias + lod > 3) - continue; - - for (int extraProj = 0; extraProj <= 1; ++extraProj) { - bool compare = false; - int totalDims = dimMap[sampler.dim] + (sampler.arrayed ? 1 : 0); - // skip dummy unused second component for 1D non-array shadows - if (sampler.shadow && totalDims < 2) - totalDims = 2; - totalDims += (sampler.shadow ? 1 : 0) + proj; - if (totalDims > 4 && sampler.shadow) { - compare = true; - totalDims = 4; - } - assert(totalDims <= 4); - - if (extraProj && ! proj) - continue; - if (extraProj && (sampler.dim == Esd3D || sampler.shadow)) - continue; - - for (int lodClamp = 0; lodClamp <= 1 ;++lodClamp) { // loop over "bool" lod clamp - - if (lodClamp && (profile == EEsProfile || version < 450)) - continue; - if (lodClamp && (proj || lod || fetch)) - continue; - - for (int sparse = 0; sparse <= 1; ++sparse) { // loop over "bool" sparse or not - - if (sparse && (profile == EEsProfile || version < 450)) - continue; - // Sparse sampling is not for 1D/1D array texture, buffer texture, and projective texture - if (sparse && (sampler.dim == Esd1D || sampler.dim == EsdBuffer || proj)) - continue; - - TString s; - - // return type - if (sparse) - s.append("int "); - else { - if (sampler.shadow) - s.append("float "); - else { - s.append(prefixes[sampler.type]); - s.append("vec4 "); - } - } - - // name - if (sparse) { - if (fetch) - s.append("sparseTexel"); - else - s.append("sparseTexture"); - } else { - if (fetch) - s.append("texel"); - else - s.append("texture"); - } - if (proj) - s.append("Proj"); - if (lod) - s.append("Lod"); - if (grad) - s.append("Grad"); - if (fetch) - s.append("Fetch"); - if (offset) - s.append("Offset"); - if (lodClamp) - s.append("Clamp"); - if (lodClamp || sparse) - s.append("ARB"); - s.append("("); - - // sampler type - s.append(typeName); - - // P coordinate - if (extraProj) - s.append(",vec4"); - else { - s.append(","); - TBasicType t = fetch ? EbtInt : EbtFloat; - if (totalDims == 1) - s.append(TType::getBasicString(t)); - else { - s.append(prefixes[t]); - s.append("vec"); - s.append(postfixes[totalDims]); - } - } - - if (bias && compare) - continue; - - // non-optional lod argument (lod that's not driven by lod loop) or sample - if ((fetch && sampler.dim != EsdBuffer && sampler.dim != EsdRect && !sampler.ms) || - (sampler.ms && fetch)) - s.append(",int"); - - // non-optional lod - if (lod) - s.append(",float"); - - // gradient arguments - if (grad) { - if (dimMap[sampler.dim] == 1) - s.append(",float,float"); - else { - s.append(",vec"); - s.append(postfixes[dimMap[sampler.dim]]); - s.append(",vec"); - s.append(postfixes[dimMap[sampler.dim]]); - } - } - - // offset - if (offset) { - if (dimMap[sampler.dim] == 1) - s.append(",int"); - else { - s.append(",ivec"); - s.append(postfixes[dimMap[sampler.dim]]); - } - } - - // non-optional compare - if (compare) - s.append(",float"); - - // lod clamp - if (lodClamp) - s.append(",float"); - - // texel out (for sparse texture) - if (sparse) { - s.append(",out "); - if (sampler.shadow) - s.append("float "); - else { - s.append(prefixes[sampler.type]); - s.append("vec4 "); - } - } - - // optional bias - if (bias) - s.append(",float"); - - s.append(");\n"); - - // Add to the per-language set of built-ins - - if (bias || lodClamp) - stageBuiltins[EShLangFragment].append(s); - else - commonBuiltins.append(s); - } - } - } - } - } - } - } - } - } -} - -// -// Helper function for add2ndGenerationSamplingImaging(), -// when adding context-independent built-in functions. -// -// Add all the texture gather functions for the given type. -// -void TBuiltIns::addGatherFunctions(TSampler sampler, const TString& typeName, int version, EProfile profile) -{ - switch (sampler.dim) { - case Esd2D: - case EsdRect: - case EsdCube: - break; - default: - return; - } - - if (sampler.ms) - return; - - if (version < 140 && sampler.dim == EsdRect && sampler.type != EbtFloat) - return; - - for (int offset = 0; offset < 3; ++offset) { // loop over three forms of offset in the call name: none, Offset, and Offsets - - for (int comp = 0; comp < 2; ++comp) { // loop over presence of comp argument - - if (comp > 0 && sampler.shadow) - continue; - - if (offset > 0 && sampler.dim == EsdCube) - continue; - - for (int sparse = 0; sparse <= 1; ++sparse) { // loop over "bool" sparse or not - if (sparse && (profile == EEsProfile || version < 450)) - continue; - - TString s; - - // return type - if (sparse) - s.append("int "); - else { - s.append(prefixes[sampler.type]); - s.append("vec4 "); - } - - // name - if (sparse) - s.append("sparseTextureGather"); - else - s.append("textureGather"); - switch (offset) { - case 1: - s.append("Offset"); - break; - case 2: - s.append("Offsets"); - default: - break; - } - - if (sparse) - s.append("ARB"); - s.append("("); - - // sampler type argument - s.append(typeName); - - // P coordinate argument - s.append(",vec"); - int totalDims = dimMap[sampler.dim] + (sampler.arrayed ? 1 : 0); - s.append(postfixes[totalDims]); - - // refZ argument - if (sampler.shadow) - s.append(",float"); - - // offset argument - if (offset > 0) { - s.append(",ivec2"); - if (offset == 2) - s.append("[4]"); - } - - // texel out (for sparse texture) - if (sparse) { - s.append(",out "); - s.append(prefixes[sampler.type]); - s.append("vec4 "); - } - - // comp argument - if (comp) - s.append(",int"); - - s.append(");\n"); - commonBuiltins.append(s); - } - } - } - -#ifdef AMD_EXTENSIONS - if (sampler.dim == EsdRect || sampler.shadow) - return; - - if (profile == EEsProfile || version < 450) - return; - - for (int bias = 0; bias < 2; ++bias) { // loop over presence of bias argument - - for (int lod = 0; lod < 2; ++lod) { // loop over presence of lod argument - - if ((lod && bias) || (lod == 0 && bias == 0)) - continue; - - for (int offset = 0; offset < 3; ++offset) { // loop over three forms of offset in the call name: none, Offset, and Offsets - - for (int comp = 0; comp < 2; ++comp) { // loop over presence of comp argument - - if (comp == 0 && bias) - continue; - - if (offset > 0 && sampler.dim == EsdCube) - continue; - - for (int sparse = 0; sparse <= 1; ++sparse) { // loop over "bool" sparse or not - if (sparse && (profile == EEsProfile || version < 450)) - continue; - - TString s; - - // return type - if (sparse) - s.append("int "); - else { - s.append(prefixes[sampler.type]); - s.append("vec4 "); - } - - // name - if (sparse) - s.append("sparseTextureGather"); - else - s.append("textureGather"); - - if (lod) - s.append("Lod"); - - switch (offset) { - case 1: - s.append("Offset"); - break; - case 2: - s.append("Offsets"); - default: - break; - } - - if (lod) - s.append("AMD"); - else if (sparse) - s.append("ARB"); - - s.append("("); - - // sampler type argument - s.append(typeName); - - // P coordinate argument - s.append(",vec"); - int totalDims = dimMap[sampler.dim] + (sampler.arrayed ? 1 : 0); - s.append(postfixes[totalDims]); - - // lod argument - if (lod) - s.append(",float"); - - // offset argument - if (offset > 0) { - s.append(",ivec2"); - if (offset == 2) - s.append("[4]"); - } - - // texel out (for sparse texture) - if (sparse) { - s.append(",out "); - s.append(prefixes[sampler.type]); - s.append("vec4 "); - } - - // comp argument - if (comp) - s.append(",int"); - - // bias argument - if (bias) - s.append(",float"); - - s.append(");\n"); - if (bias) - stageBuiltins[EShLangFragment].append(s); - else - commonBuiltins.append(s); - } - } - } - } - } -#endif -} - -// -// Add context-dependent built-in functions and variables that are present -// for the given version and profile. All the results are put into just the -// commonBuiltins, because it is called for just a specific stage. So, -// add stage-specific entries to the commonBuiltins, and only if that stage -// was requested. -// -void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language) -{ - // - // Initialize the context-dependent (resource-dependent) built-in strings for parsing. - // - - //============================================================================ - // - // Standard Uniforms - // - //============================================================================ - - TString& s = commonBuiltins; - const int maxSize = 80; - char builtInConstant[maxSize]; - - // - // Build string of implementation dependent constants. - // - - if (profile == EEsProfile) { - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxVertexAttribs = %d;", resources.maxVertexAttribs); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxVertexUniformVectors = %d;", resources.maxVertexUniformVectors); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxVertexTextureImageUnits = %d;", resources.maxVertexTextureImageUnits); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxCombinedTextureImageUnits = %d;", resources.maxCombinedTextureImageUnits); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxTextureImageUnits = %d;", resources.maxTextureImageUnits); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxFragmentUniformVectors = %d;", resources.maxFragmentUniformVectors); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxDrawBuffers = %d;", resources.maxDrawBuffers); - s.append(builtInConstant); - - if (version == 100) { - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxVaryingVectors = %d;", resources.maxVaryingVectors); - s.append(builtInConstant); - } else { - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxVertexOutputVectors = %d;", resources.maxVertexOutputVectors); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxFragmentInputVectors = %d;", resources.maxFragmentInputVectors); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const mediump int gl_MinProgramTexelOffset = %d;", resources.minProgramTexelOffset); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxProgramTexelOffset = %d;", resources.maxProgramTexelOffset); - s.append(builtInConstant); - } - - if (version >= 310) { - // geometry - - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryInputComponents = %d;", resources.maxGeometryInputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryOutputComponents = %d;", resources.maxGeometryOutputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryImageUniforms = %d;", resources.maxGeometryImageUniforms); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryTextureImageUnits = %d;", resources.maxGeometryTextureImageUnits); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryOutputVertices = %d;", resources.maxGeometryOutputVertices); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryTotalOutputComponents = %d;", resources.maxGeometryTotalOutputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryUniformComponents = %d;", resources.maxGeometryUniformComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryAtomicCounters = %d;", resources.maxGeometryAtomicCounters); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryAtomicCounterBuffers = %d;", resources.maxGeometryAtomicCounterBuffers); - s.append(builtInConstant); - - // tessellation - - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlInputComponents = %d;", resources.maxTessControlInputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlOutputComponents = %d;", resources.maxTessControlOutputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlTextureImageUnits = %d;", resources.maxTessControlTextureImageUnits); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlUniformComponents = %d;", resources.maxTessControlUniformComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlTotalOutputComponents = %d;", resources.maxTessControlTotalOutputComponents); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationInputComponents = %d;", resources.maxTessEvaluationInputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationOutputComponents = %d;", resources.maxTessEvaluationOutputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationTextureImageUnits = %d;", resources.maxTessEvaluationTextureImageUnits); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationUniformComponents = %d;", resources.maxTessEvaluationUniformComponents); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxTessPatchComponents = %d;", resources.maxTessPatchComponents); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxPatchVertices = %d;", resources.maxPatchVertices); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessGenLevel = %d;", resources.maxTessGenLevel); - s.append(builtInConstant); - - // this is here instead of with the others in initialize(version, profile) due to the dependence on gl_MaxPatchVertices - if (language == EShLangTessControl || language == EShLangTessEvaluation) { - s.append( - "in gl_PerVertex {" - "highp vec4 gl_Position;" - "highp float gl_PointSize;" -#ifdef NV_EXTENSIONS - "highp vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering - "highp vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes -#endif - "} gl_in[gl_MaxPatchVertices];" - "\n"); - } - } - - } else { - // non-ES profile - - snprintf(builtInConstant, maxSize, "const int gl_MaxVertexAttribs = %d;", resources.maxVertexAttribs); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxVertexTextureImageUnits = %d;", resources.maxVertexTextureImageUnits); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxCombinedTextureImageUnits = %d;", resources.maxCombinedTextureImageUnits); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxTextureImageUnits = %d;", resources.maxTextureImageUnits); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxDrawBuffers = %d;", resources.maxDrawBuffers); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxLights = %d;", resources.maxLights); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxClipPlanes = %d;", resources.maxClipPlanes); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxTextureUnits = %d;", resources.maxTextureUnits); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxTextureCoords = %d;", resources.maxTextureCoords); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxVertexUniformComponents = %d;", resources.maxVertexUniformComponents); - s.append(builtInConstant); - - if (version < 150 || ARBCompatibility) { - snprintf(builtInConstant, maxSize, "const int gl_MaxVaryingFloats = %d;", resources.maxVaryingFloats); - s.append(builtInConstant); - } - - snprintf(builtInConstant, maxSize, "const int gl_MaxFragmentUniformComponents = %d;", resources.maxFragmentUniformComponents); - s.append(builtInConstant); - - if (spvVersion.spv == 0 && IncludeLegacy(version, profile, spvVersion)) { - // - // OpenGL'uniform' state. Page numbers are in reference to version - // 1.4 of the OpenGL specification. - // - - // - // Matrix state. p. 31, 32, 37, 39, 40. - // - s.append("uniform mat4 gl_TextureMatrix[gl_MaxTextureCoords];" - - // - // Derived matrix state that provides inverse and transposed versions - // of the matrices above. - // - "uniform mat4 gl_TextureMatrixInverse[gl_MaxTextureCoords];" - - "uniform mat4 gl_TextureMatrixTranspose[gl_MaxTextureCoords];" - - "uniform mat4 gl_TextureMatrixInverseTranspose[gl_MaxTextureCoords];" - - // - // Clip planes p. 42. - // - "uniform vec4 gl_ClipPlane[gl_MaxClipPlanes];" - - // - // Light State p 50, 53, 55. - // - "uniform gl_LightSourceParameters gl_LightSource[gl_MaxLights];" - - // - // Derived state from products of light. - // - "uniform gl_LightProducts gl_FrontLightProduct[gl_MaxLights];" - "uniform gl_LightProducts gl_BackLightProduct[gl_MaxLights];" - - // - // Texture Environment and Generation, p. 152, p. 40-42. - // - "uniform vec4 gl_TextureEnvColor[gl_MaxTextureImageUnits];" - "uniform vec4 gl_EyePlaneS[gl_MaxTextureCoords];" - "uniform vec4 gl_EyePlaneT[gl_MaxTextureCoords];" - "uniform vec4 gl_EyePlaneR[gl_MaxTextureCoords];" - "uniform vec4 gl_EyePlaneQ[gl_MaxTextureCoords];" - "uniform vec4 gl_ObjectPlaneS[gl_MaxTextureCoords];" - "uniform vec4 gl_ObjectPlaneT[gl_MaxTextureCoords];" - "uniform vec4 gl_ObjectPlaneR[gl_MaxTextureCoords];" - "uniform vec4 gl_ObjectPlaneQ[gl_MaxTextureCoords];"); - } - - if (version >= 130) { - snprintf(builtInConstant, maxSize, "const int gl_MaxClipDistances = %d;", resources.maxClipDistances); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxVaryingComponents = %d;", resources.maxVaryingComponents); - s.append(builtInConstant); - - // GL_ARB_shading_language_420pack - snprintf(builtInConstant, maxSize, "const mediump int gl_MinProgramTexelOffset = %d;", resources.minProgramTexelOffset); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const mediump int gl_MaxProgramTexelOffset = %d;", resources.maxProgramTexelOffset); - s.append(builtInConstant); - } - - // geometry - if (version >= 150) { - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryInputComponents = %d;", resources.maxGeometryInputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryOutputComponents = %d;", resources.maxGeometryOutputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryTextureImageUnits = %d;", resources.maxGeometryTextureImageUnits); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryOutputVertices = %d;", resources.maxGeometryOutputVertices); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryTotalOutputComponents = %d;", resources.maxGeometryTotalOutputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryUniformComponents = %d;", resources.maxGeometryUniformComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryVaryingComponents = %d;", resources.maxGeometryVaryingComponents); - s.append(builtInConstant); - - } - - if (version >= 150) { - snprintf(builtInConstant, maxSize, "const int gl_MaxVertexOutputComponents = %d;", resources.maxVertexOutputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxFragmentInputComponents = %d;", resources.maxFragmentInputComponents); - s.append(builtInConstant); - } - - // tessellation - if (version >= 150) { - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlInputComponents = %d;", resources.maxTessControlInputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlOutputComponents = %d;", resources.maxTessControlOutputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlTextureImageUnits = %d;", resources.maxTessControlTextureImageUnits); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlUniformComponents = %d;", resources.maxTessControlUniformComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlTotalOutputComponents = %d;", resources.maxTessControlTotalOutputComponents); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationInputComponents = %d;", resources.maxTessEvaluationInputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationOutputComponents = %d;", resources.maxTessEvaluationOutputComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationTextureImageUnits = %d;", resources.maxTessEvaluationTextureImageUnits); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationUniformComponents = %d;", resources.maxTessEvaluationUniformComponents); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxTessPatchComponents = %d;", resources.maxTessPatchComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessGenLevel = %d;", resources.maxTessGenLevel); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxPatchVertices = %d;", resources.maxPatchVertices); - s.append(builtInConstant); - - // this is here instead of with the others in initialize(version, profile) due to the dependence on gl_MaxPatchVertices - if (language == EShLangTessControl || language == EShLangTessEvaluation) { - s.append( - "in gl_PerVertex {" - "vec4 gl_Position;" - "float gl_PointSize;" - "float gl_ClipDistance[];" - ); - if (profile == ECompatibilityProfile) - s.append( - "vec4 gl_ClipVertex;" - "vec4 gl_FrontColor;" - "vec4 gl_BackColor;" - "vec4 gl_FrontSecondaryColor;" - "vec4 gl_BackSecondaryColor;" - "vec4 gl_TexCoord[];" - "float gl_FogFragCoord;" - ); - if (profile != EEsProfile && version >= 450) - s.append( - "float gl_CullDistance[];" -#ifdef NV_EXTENSIONS - "vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering - "vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes -#endif - ); - s.append( - "} gl_in[gl_MaxPatchVertices];" - "\n"); - } - } - - if (version >= 150) { - snprintf(builtInConstant, maxSize, "const int gl_MaxViewports = %d;", resources.maxViewports); - s.append(builtInConstant); - } - - // images - if (version >= 130) { - snprintf(builtInConstant, maxSize, "const int gl_MaxCombinedImageUnitsAndFragmentOutputs = %d;", resources.maxCombinedImageUnitsAndFragmentOutputs); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxImageSamples = %d;", resources.maxImageSamples); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlImageUniforms = %d;", resources.maxTessControlImageUniforms); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationImageUniforms = %d;", resources.maxTessEvaluationImageUniforms); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryImageUniforms = %d;", resources.maxGeometryImageUniforms); - s.append(builtInConstant); - } - - // enhanced layouts - if (version >= 430) { - snprintf(builtInConstant, maxSize, "const int gl_MaxTransformFeedbackBuffers = %d;", resources.maxTransformFeedbackBuffers); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTransformFeedbackInterleavedComponents = %d;", resources.maxTransformFeedbackInterleavedComponents); - s.append(builtInConstant); - } - } - - // images (some in compute below) - if ((profile == EEsProfile && version >= 310) || - (profile != EEsProfile && version >= 130)) { - snprintf(builtInConstant, maxSize, "const int gl_MaxImageUnits = %d;", resources.maxImageUnits); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxCombinedShaderOutputResources = %d;", resources.maxCombinedShaderOutputResources); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxVertexImageUniforms = %d;", resources.maxVertexImageUniforms); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxFragmentImageUniforms = %d;", resources.maxFragmentImageUniforms); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxCombinedImageUniforms = %d;", resources.maxCombinedImageUniforms); - s.append(builtInConstant); - } - - // atomic counters (some in compute below) - if ((profile == EEsProfile && version >= 310) || - (profile != EEsProfile && version >= 420)) { - snprintf(builtInConstant, maxSize, "const int gl_MaxVertexAtomicCounters = %d;", resources. maxVertexAtomicCounters); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxFragmentAtomicCounters = %d;", resources. maxFragmentAtomicCounters); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxCombinedAtomicCounters = %d;", resources. maxCombinedAtomicCounters); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxAtomicCounterBindings = %d;", resources. maxAtomicCounterBindings); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxVertexAtomicCounterBuffers = %d;", resources. maxVertexAtomicCounterBuffers); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxFragmentAtomicCounterBuffers = %d;", resources. maxFragmentAtomicCounterBuffers); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxCombinedAtomicCounterBuffers = %d;", resources. maxCombinedAtomicCounterBuffers); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxAtomicCounterBufferSize = %d;", resources. maxAtomicCounterBufferSize); - s.append(builtInConstant); - } - if (profile != EEsProfile && version >= 420) { - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlAtomicCounters = %d;", resources. maxTessControlAtomicCounters); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationAtomicCounters = %d;", resources. maxTessEvaluationAtomicCounters); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryAtomicCounters = %d;", resources. maxGeometryAtomicCounters); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessControlAtomicCounterBuffers = %d;", resources. maxTessControlAtomicCounterBuffers); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxTessEvaluationAtomicCounterBuffers = %d;", resources. maxTessEvaluationAtomicCounterBuffers); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxGeometryAtomicCounterBuffers = %d;", resources. maxGeometryAtomicCounterBuffers); - s.append(builtInConstant); - - s.append("\n"); - } - - // compute - if ((profile == EEsProfile && version >= 310) || (profile != EEsProfile && version >= 420)) { - snprintf(builtInConstant, maxSize, "const ivec3 gl_MaxComputeWorkGroupCount = ivec3(%d,%d,%d);", resources.maxComputeWorkGroupCountX, - resources.maxComputeWorkGroupCountY, - resources.maxComputeWorkGroupCountZ); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const ivec3 gl_MaxComputeWorkGroupSize = ivec3(%d,%d,%d);", resources.maxComputeWorkGroupSizeX, - resources.maxComputeWorkGroupSizeY, - resources.maxComputeWorkGroupSizeZ); - s.append(builtInConstant); - - snprintf(builtInConstant, maxSize, "const int gl_MaxComputeUniformComponents = %d;", resources.maxComputeUniformComponents); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxComputeTextureImageUnits = %d;", resources.maxComputeTextureImageUnits); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxComputeImageUniforms = %d;", resources.maxComputeImageUniforms); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxComputeAtomicCounters = %d;", resources.maxComputeAtomicCounters); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxComputeAtomicCounterBuffers = %d;", resources.maxComputeAtomicCounterBuffers); - s.append(builtInConstant); - - s.append("\n"); - } - - // GL_ARB_cull_distance - if (profile != EEsProfile && version >= 450) { - snprintf(builtInConstant, maxSize, "const int gl_MaxCullDistances = %d;", resources.maxCullDistances); - s.append(builtInConstant); - snprintf(builtInConstant, maxSize, "const int gl_MaxCombinedClipAndCullDistances = %d;", resources.maxCombinedClipAndCullDistances); - s.append(builtInConstant); - } - - // GL_ARB_ES3_1_compatibility - if ((profile != EEsProfile && version >= 450) || - (profile == EEsProfile && version >= 310)) { - snprintf(builtInConstant, maxSize, "const int gl_MaxSamples = %d;", resources.maxSamples); - s.append(builtInConstant); - } - -#ifdef AMD_EXTENSIONS - // GL_AMD_gcn_shader - if (profile != EEsProfile && version >= 450) { - snprintf(builtInConstant, maxSize, "const int gl_SIMDGroupSizeAMD = 64;"); - s.append(builtInConstant); - } -#endif - - s.append("\n"); -} - -// -// To support special built-ins that have a special qualifier that cannot be declared textually -// in a shader, like gl_Position. -// -// This lets the type of the built-in be declared textually, and then have just its qualifier be -// updated afterward. -// -// Safe to call even if name is not present. -// -// Only use this for built-in variables that have a special qualifier in TStorageQualifier. -// New built-in variables should use a generic (textually declarable) qualifier in -// TStoraregQualifier and only call BuiltInVariable(). -// -static void SpecialQualifier(const char* name, TStorageQualifier qualifier, TBuiltInVariable builtIn, TSymbolTable& symbolTable) -{ - TSymbol* symbol = symbolTable.find(name); - if (symbol) { - TQualifier& symQualifier = symbol->getWritableType().getQualifier(); - symQualifier.storage = qualifier; - symQualifier.builtIn = builtIn; - } -} - -// -// To tag built-in variables with their TBuiltInVariable enum. Use this when the -// normal declaration text already gets the qualifier right, and all that's needed -// is setting the builtIn field. This should be the normal way for all new -// built-in variables. -// -// If SpecialQualifier() was called, this does not need to be called. -// -// Safe to call even if name is not present. -// -static void BuiltInVariable(const char* name, TBuiltInVariable builtIn, TSymbolTable& symbolTable) -{ - TSymbol* symbol = symbolTable.find(name); - if (! symbol) - return; - - TQualifier& symQualifier = symbol->getWritableType().getQualifier(); - symQualifier.builtIn = builtIn; -} - -// -// For built-in variables inside a named block. -// SpecialQualifier() won't ever go inside a block; their member's qualifier come -// from the qualification of the block. -// -// See comments above for other detail. -// -static void BuiltInVariable(const char* blockName, const char* name, TBuiltInVariable builtIn, TSymbolTable& symbolTable) -{ - TSymbol* symbol = symbolTable.find(blockName); - if (! symbol) - return; - - TTypeList& structure = *symbol->getWritableType().getWritableStruct(); - for (int i = 0; i < (int)structure.size(); ++i) { - if (structure[i].type->getFieldName().compare(name) == 0) { - structure[i].type->getQualifier().builtIn = builtIn; - return; - } - } -} - -// -// Finish adding/processing context-independent built-in symbols. -// 1) Programmatically add symbols that could not be added by simple text strings above. -// 2) Map built-in functions to operators, for those that will turn into an operation node -// instead of remaining a function call. -// 3) Tag extension-related symbols added to their base version with their extensions, so -// that if an early version has the extension turned off, there is an error reported on use. -// -void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable) -{ - // - // Tag built-in variables and functions with additional qualifier and extension information - // that cannot be declared with the text strings. - // - - // N.B.: a symbol should only be tagged once, and this function is called multiple times, once - // per stage that's used for this profile. So - // - generally, stick common ones in the fragment stage to ensure they are tagged exactly once - // - for ES, which has different precisions for different stages, the coarsest-grained tagging - // for a built-in used in many stages needs to be once for the fragment stage and once for - // the vertex stage - - switch(language) { - case EShLangVertex: - if (profile != EEsProfile) { - if (version >= 440) { - symbolTable.setVariableExtensions("gl_BaseVertexARB", 1, &E_GL_ARB_shader_draw_parameters); - symbolTable.setVariableExtensions("gl_BaseInstanceARB", 1, &E_GL_ARB_shader_draw_parameters); - symbolTable.setVariableExtensions("gl_DrawIDARB", 1, &E_GL_ARB_shader_draw_parameters); - BuiltInVariable("gl_BaseVertexARB", EbvBaseVertex, symbolTable); - BuiltInVariable("gl_BaseInstanceARB", EbvBaseInstance, symbolTable); - BuiltInVariable("gl_DrawIDARB", EbvDrawId, symbolTable); - } - if (version >= 460) { - BuiltInVariable("gl_BaseVertex", EbvBaseVertex, symbolTable); - BuiltInVariable("gl_BaseInstance", EbvBaseInstance, symbolTable); - BuiltInVariable("gl_DrawID", EbvDrawId, symbolTable); - } - symbolTable.setVariableExtensions("gl_SubGroupSizeARB", 1, &E_GL_ARB_shader_ballot); - symbolTable.setVariableExtensions("gl_SubGroupInvocationARB", 1, &E_GL_ARB_shader_ballot); - symbolTable.setVariableExtensions("gl_SubGroupEqMaskARB", 1, &E_GL_ARB_shader_ballot); - symbolTable.setVariableExtensions("gl_SubGroupGeMaskARB", 1, &E_GL_ARB_shader_ballot); - symbolTable.setVariableExtensions("gl_SubGroupGtMaskARB", 1, &E_GL_ARB_shader_ballot); - symbolTable.setVariableExtensions("gl_SubGroupLeMaskARB", 1, &E_GL_ARB_shader_ballot); - symbolTable.setVariableExtensions("gl_SubGroupLtMaskARB", 1, &E_GL_ARB_shader_ballot); - - symbolTable.setFunctionExtensions("ballotARB", 1, &E_GL_ARB_shader_ballot); - symbolTable.setFunctionExtensions("readInvocationARB", 1, &E_GL_ARB_shader_ballot); - symbolTable.setFunctionExtensions("readFirstInvocationARB", 1, &E_GL_ARB_shader_ballot); - - BuiltInVariable("gl_SubGroupInvocationARB", EbvSubGroupInvocation, symbolTable); - BuiltInVariable("gl_SubGroupEqMaskARB", EbvSubGroupEqMask, symbolTable); - BuiltInVariable("gl_SubGroupGeMaskARB", EbvSubGroupGeMask, symbolTable); - BuiltInVariable("gl_SubGroupGtMaskARB", EbvSubGroupGtMask, symbolTable); - BuiltInVariable("gl_SubGroupLeMaskARB", EbvSubGroupLeMask, symbolTable); - BuiltInVariable("gl_SubGroupLtMaskARB", EbvSubGroupLtMask, symbolTable); - - if (spvVersion.vulkan >= 100) - // Treat "gl_SubGroupSizeARB" as shader input instead of uniform for Vulkan - SpecialQualifier("gl_SubGroupSizeARB", EvqVaryingIn, EbvSubGroupSize, symbolTable); - - if (version >= 430) { - symbolTable.setFunctionExtensions("anyInvocationARB", 1, &E_GL_ARB_shader_group_vote); - symbolTable.setFunctionExtensions("allInvocationsARB", 1, &E_GL_ARB_shader_group_vote); - symbolTable.setFunctionExtensions("allInvocationsEqualARB", 1, &E_GL_ARB_shader_group_vote); - } - } - -#ifdef AMD_EXTENSIONS - if (profile != EEsProfile) { - symbolTable.setFunctionExtensions("minInvocationsAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("maxInvocationsAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("addInvocationsAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("minInvocationsNonUniformAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("maxInvocationsNonUniformAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("addInvocationsNonUniformAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("swizzleInvocationsAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("swizzleInvocationsWithPatternAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("writeInvocationAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("mbcntAMD", 1, &E_GL_AMD_shader_ballot); - - symbolTable.setFunctionExtensions("minInvocationsInclusiveScanAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("maxInvocationsInclusiveScanAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("addInvocationsInclusiveScanAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("minInvocationsInclusiveScanNonUniformAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("maxInvocationsInclusiveScanNonUniformAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("addInvocationsInclusiveScanNonUniformAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("minInvocationsExclusiveScanAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("maxInvocationsExclusiveScanAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("addInvocationsExclusiveScanAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("minInvocationsExclusiveScanNonUniformAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("maxInvocationsExclusiveScanNonUniformAMD", 1, &E_GL_AMD_shader_ballot); - symbolTable.setFunctionExtensions("addInvocationsExclusiveScanNonUniformAMD", 1, &E_GL_AMD_shader_ballot); - } - - if (profile != EEsProfile) { - symbolTable.setFunctionExtensions("min3", 1, &E_GL_AMD_shader_trinary_minmax); - symbolTable.setFunctionExtensions("max3", 1, &E_GL_AMD_shader_trinary_minmax); - symbolTable.setFunctionExtensions("mid3", 1, &E_GL_AMD_shader_trinary_minmax); - } - - if (profile != EEsProfile) { - symbolTable.setFunctionExtensions("cubeFaceIndexAMD", 1, &E_GL_AMD_gcn_shader); - symbolTable.setFunctionExtensions("cubeFaceCoordAMD", 1, &E_GL_AMD_gcn_shader); - symbolTable.setFunctionExtensions("timeAMD", 1, &E_GL_AMD_gcn_shader); - } -#endif - - // Compatibility variables, vertex only - if (spvVersion.spv == 0) { - BuiltInVariable("gl_Color", EbvColor, symbolTable); - BuiltInVariable("gl_SecondaryColor", EbvSecondaryColor, symbolTable); - BuiltInVariable("gl_Normal", EbvNormal, symbolTable); - BuiltInVariable("gl_Vertex", EbvVertex, symbolTable); - BuiltInVariable("gl_MultiTexCoord0", EbvMultiTexCoord0, symbolTable); - BuiltInVariable("gl_MultiTexCoord1", EbvMultiTexCoord1, symbolTable); - BuiltInVariable("gl_MultiTexCoord2", EbvMultiTexCoord2, symbolTable); - BuiltInVariable("gl_MultiTexCoord3", EbvMultiTexCoord3, symbolTable); - BuiltInVariable("gl_MultiTexCoord4", EbvMultiTexCoord4, symbolTable); - BuiltInVariable("gl_MultiTexCoord5", EbvMultiTexCoord5, symbolTable); - BuiltInVariable("gl_MultiTexCoord6", EbvMultiTexCoord6, symbolTable); - BuiltInVariable("gl_MultiTexCoord7", EbvMultiTexCoord7, symbolTable); - BuiltInVariable("gl_FogCoord", EbvFogFragCoord, symbolTable); - } - - if (profile == EEsProfile) { - if (spvVersion.spv == 0) { - symbolTable.setFunctionExtensions("texture2DGradEXT", 1, &E_GL_EXT_shader_texture_lod); - symbolTable.setFunctionExtensions("texture2DProjGradEXT", 1, &E_GL_EXT_shader_texture_lod); - symbolTable.setFunctionExtensions("textureCubeGradEXT", 1, &E_GL_EXT_shader_texture_lod); - if (version == 310) - symbolTable.setFunctionExtensions("textureGatherOffsets", Num_AEP_gpu_shader5, AEP_gpu_shader5); - } - if (version == 310) - symbolTable.setFunctionExtensions("fma", Num_AEP_gpu_shader5, AEP_gpu_shader5); - } - - if (profile == EEsProfile && version < 320) { - symbolTable.setFunctionExtensions("imageAtomicAdd", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicMin", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicMax", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicAnd", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicOr", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicXor", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicExchange", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicCompSwap", 1, &E_GL_OES_shader_image_atomic); - } - - if (spvVersion.vulkan == 0) { - SpecialQualifier("gl_VertexID", EvqVertexId, EbvVertexId, symbolTable); - SpecialQualifier("gl_InstanceID", EvqInstanceId, EbvInstanceId, symbolTable); - } - - if (spvVersion.vulkan >= 100) { - BuiltInVariable("gl_VertexIndex", EbvVertexIndex, symbolTable); - BuiltInVariable("gl_InstanceIndex", EbvInstanceIndex, symbolTable); - } - - if (version >= 300 /* both ES and non-ES */) { - symbolTable.setVariableExtensions("gl_ViewID_OVR", Num_OVR_multiview_EXTs, OVR_multiview_EXTs); - BuiltInVariable("gl_ViewID_OVR", EbvViewIndex, symbolTable); - } - - if (profile == EEsProfile) { - symbolTable.setFunctionExtensions("shadow2DEXT", 1, &E_GL_EXT_shadow_samplers); - symbolTable.setFunctionExtensions("shadow2DProjEXT", 1, &E_GL_EXT_shadow_samplers); - } - - // Fall through - - case EShLangTessControl: - if (profile == EEsProfile && version >= 310) { - BuiltInVariable("gl_BoundingBoxOES", EbvBoundingBox, symbolTable); - if (version < 320) - symbolTable.setVariableExtensions("gl_BoundingBoxOES", Num_AEP_primitive_bounding_box, - AEP_primitive_bounding_box); - } - - // Fall through - - case EShLangTessEvaluation: - case EShLangGeometry: - SpecialQualifier("gl_Position", EvqPosition, EbvPosition, symbolTable); - SpecialQualifier("gl_PointSize", EvqPointSize, EbvPointSize, symbolTable); - SpecialQualifier("gl_ClipVertex", EvqClipVertex, EbvClipVertex, symbolTable); - - BuiltInVariable("gl_in", "gl_Position", EbvPosition, symbolTable); - BuiltInVariable("gl_in", "gl_PointSize", EbvPointSize, symbolTable); - BuiltInVariable("gl_in", "gl_ClipDistance", EbvClipDistance, symbolTable); - BuiltInVariable("gl_in", "gl_CullDistance", EbvCullDistance, symbolTable); - - BuiltInVariable("gl_out", "gl_Position", EbvPosition, symbolTable); - BuiltInVariable("gl_out", "gl_PointSize", EbvPointSize, symbolTable); - BuiltInVariable("gl_out", "gl_ClipDistance", EbvClipDistance, symbolTable); - BuiltInVariable("gl_out", "gl_CullDistance", EbvCullDistance, symbolTable); - - BuiltInVariable("gl_ClipDistance", EbvClipDistance, symbolTable); - BuiltInVariable("gl_CullDistance", EbvCullDistance, symbolTable); - BuiltInVariable("gl_PrimitiveIDIn", EbvPrimitiveId, symbolTable); - BuiltInVariable("gl_PrimitiveID", EbvPrimitiveId, symbolTable); - BuiltInVariable("gl_InvocationID", EbvInvocationId, symbolTable); - BuiltInVariable("gl_Layer", EbvLayer, symbolTable); - BuiltInVariable("gl_ViewportIndex", EbvViewportIndex, symbolTable); - -#ifdef NV_EXTENSIONS - if (language != EShLangGeometry) { - symbolTable.setVariableExtensions("gl_Layer", Num_viewportEXTs, viewportEXTs); - symbolTable.setVariableExtensions("gl_ViewportIndex", Num_viewportEXTs, viewportEXTs); - } - symbolTable.setVariableExtensions("gl_ViewportMask", 1, &E_GL_NV_viewport_array2); - symbolTable.setVariableExtensions("gl_SecondaryPositionNV", 1, &E_GL_NV_stereo_view_rendering); - symbolTable.setVariableExtensions("gl_SecondaryViewportMaskNV", 1, &E_GL_NV_stereo_view_rendering); - symbolTable.setVariableExtensions("gl_PositionPerViewNV", 1, &E_GL_NVX_multiview_per_view_attributes); - symbolTable.setVariableExtensions("gl_ViewportMaskPerViewNV", 1, &E_GL_NVX_multiview_per_view_attributes); - - BuiltInVariable("gl_ViewportMask", EbvViewportMaskNV, symbolTable); - BuiltInVariable("gl_SecondaryPositionNV", EbvSecondaryPositionNV, symbolTable); - BuiltInVariable("gl_SecondaryViewportMaskNV", EbvSecondaryViewportMaskNV, symbolTable); - BuiltInVariable("gl_PositionPerViewNV", EbvPositionPerViewNV, symbolTable); - BuiltInVariable("gl_ViewportMaskPerViewNV", EbvViewportMaskPerViewNV, symbolTable); - - if (language != EShLangVertex) { - BuiltInVariable("gl_in", "gl_SecondaryPositionNV", EbvSecondaryPositionNV, symbolTable); - BuiltInVariable("gl_in", "gl_PositionPerViewNV", EbvPositionPerViewNV, symbolTable); - } - BuiltInVariable("gl_out", "gl_Layer", EbvLayer, symbolTable); - BuiltInVariable("gl_out", "gl_ViewportIndex", EbvViewportIndex, symbolTable); - BuiltInVariable("gl_out", "gl_ViewportMask", EbvViewportMaskNV, symbolTable); - BuiltInVariable("gl_out", "gl_SecondaryPositionNV", EbvSecondaryPositionNV, symbolTable); - BuiltInVariable("gl_out", "gl_SecondaryViewportMaskNV", EbvSecondaryViewportMaskNV, symbolTable); - BuiltInVariable("gl_out", "gl_PositionPerViewNV", EbvPositionPerViewNV, symbolTable); - BuiltInVariable("gl_out", "gl_ViewportMaskPerViewNV", EbvViewportMaskPerViewNV, symbolTable); -#endif - - BuiltInVariable("gl_PatchVerticesIn", EbvPatchVertices, symbolTable); - BuiltInVariable("gl_TessLevelOuter", EbvTessLevelOuter, symbolTable); - BuiltInVariable("gl_TessLevelInner", EbvTessLevelInner, symbolTable); - BuiltInVariable("gl_TessCoord", EbvTessCoord, symbolTable); - - if (version < 410) - symbolTable.setVariableExtensions("gl_ViewportIndex", 1, &E_GL_ARB_viewport_array); - - // Compatibility variables - - BuiltInVariable("gl_in", "gl_ClipVertex", EbvClipVertex, symbolTable); - BuiltInVariable("gl_in", "gl_FrontColor", EbvFrontColor, symbolTable); - BuiltInVariable("gl_in", "gl_BackColor", EbvBackColor, symbolTable); - BuiltInVariable("gl_in", "gl_FrontSecondaryColor", EbvFrontSecondaryColor, symbolTable); - BuiltInVariable("gl_in", "gl_BackSecondaryColor", EbvBackSecondaryColor, symbolTable); - BuiltInVariable("gl_in", "gl_TexCoord", EbvTexCoord, symbolTable); - BuiltInVariable("gl_in", "gl_FogFragCoord", EbvFogFragCoord, symbolTable); - - BuiltInVariable("gl_out", "gl_ClipVertex", EbvClipVertex, symbolTable); - BuiltInVariable("gl_out", "gl_FrontColor", EbvFrontColor, symbolTable); - BuiltInVariable("gl_out", "gl_BackColor", EbvBackColor, symbolTable); - BuiltInVariable("gl_out", "gl_FrontSecondaryColor", EbvFrontSecondaryColor, symbolTable); - BuiltInVariable("gl_out", "gl_BackSecondaryColor", EbvBackSecondaryColor, symbolTable); - BuiltInVariable("gl_out", "gl_TexCoord", EbvTexCoord, symbolTable); - BuiltInVariable("gl_out", "gl_FogFragCoord", EbvFogFragCoord, symbolTable); - - BuiltInVariable("gl_ClipVertex", EbvClipVertex, symbolTable); - BuiltInVariable("gl_FrontColor", EbvFrontColor, symbolTable); - BuiltInVariable("gl_BackColor", EbvBackColor, symbolTable); - BuiltInVariable("gl_FrontSecondaryColor", EbvFrontSecondaryColor, symbolTable); - BuiltInVariable("gl_BackSecondaryColor", EbvBackSecondaryColor, symbolTable); - BuiltInVariable("gl_TexCoord", EbvTexCoord, symbolTable); - BuiltInVariable("gl_FogFragCoord", EbvFogFragCoord, symbolTable); - - // gl_PointSize, when it needs to be tied to an extension, is always a member of a block. - // (Sometimes with an instance name, sometimes anonymous). - // However, the current automatic extension scheme does not work per block member, - // so for now check when parsing. - // - // if (profile == EEsProfile) { - // if (language == EShLangGeometry) - // symbolTable.setVariableExtensions("gl_PointSize", Num_AEP_geometry_point_size, AEP_geometry_point_size); - // else if (language == EShLangTessEvaluation || language == EShLangTessControl) - // symbolTable.setVariableExtensions("gl_PointSize", Num_AEP_tessellation_point_size, AEP_tessellation_point_size); - //} - - if ((profile != EEsProfile && version >= 140) || - (profile == EEsProfile && version >= 310)) { - symbolTable.setVariableExtensions("gl_DeviceIndex", 1, &E_GL_EXT_device_group); - BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable); - symbolTable.setVariableExtensions("gl_ViewIndex", 1, &E_GL_EXT_multiview); - BuiltInVariable("gl_ViewIndex", EbvViewIndex, symbolTable); - } - - break; - - case EShLangFragment: - SpecialQualifier("gl_FrontFacing", EvqFace, EbvFace, symbolTable); - SpecialQualifier("gl_FragCoord", EvqFragCoord, EbvFragCoord, symbolTable); - SpecialQualifier("gl_PointCoord", EvqPointCoord, EbvPointCoord, symbolTable); - if (spvVersion.spv == 0) - SpecialQualifier("gl_FragColor", EvqFragColor, EbvFragColor, symbolTable); - else { - TSymbol* symbol = symbolTable.find("gl_FragColor"); - if (symbol) { - symbol->getWritableType().getQualifier().storage = EvqVaryingOut; - symbol->getWritableType().getQualifier().layoutLocation = 0; - } - } - SpecialQualifier("gl_FragDepth", EvqFragDepth, EbvFragDepth, symbolTable); - SpecialQualifier("gl_FragDepthEXT", EvqFragDepth, EbvFragDepth, symbolTable); - SpecialQualifier("gl_HelperInvocation", EvqVaryingIn, EbvHelperInvocation, symbolTable); - - BuiltInVariable("gl_ClipDistance", EbvClipDistance, symbolTable); - BuiltInVariable("gl_CullDistance", EbvCullDistance, symbolTable); - BuiltInVariable("gl_PrimitiveID", EbvPrimitiveId, symbolTable); - - if (profile != EEsProfile && version >= 140) { - symbolTable.setVariableExtensions("gl_FragStencilRefARB", 1, &E_GL_ARB_shader_stencil_export); - BuiltInVariable("gl_FragStencilRefARB", EbvFragStencilRef, symbolTable); - } - - if ((profile != EEsProfile && version >= 400) || - (profile == EEsProfile && version >= 310)) { - BuiltInVariable("gl_SampleID", EbvSampleId, symbolTable); - BuiltInVariable("gl_SamplePosition", EbvSamplePosition, symbolTable); - BuiltInVariable("gl_SampleMaskIn", EbvSampleMask, symbolTable); - BuiltInVariable("gl_SampleMask", EbvSampleMask, symbolTable); - if (profile == EEsProfile && version < 320) { - symbolTable.setVariableExtensions("gl_SampleID", 1, &E_GL_OES_sample_variables); - symbolTable.setVariableExtensions("gl_SamplePosition", 1, &E_GL_OES_sample_variables); - symbolTable.setVariableExtensions("gl_SampleMaskIn", 1, &E_GL_OES_sample_variables); - symbolTable.setVariableExtensions("gl_SampleMask", 1, &E_GL_OES_sample_variables); - symbolTable.setVariableExtensions("gl_NumSamples", 1, &E_GL_OES_sample_variables); - } - } - - BuiltInVariable("gl_Layer", EbvLayer, symbolTable); - BuiltInVariable("gl_ViewportIndex", EbvViewportIndex, symbolTable); - - // Compatibility variables - - BuiltInVariable("gl_in", "gl_FogFragCoord", EbvFogFragCoord, symbolTable); - BuiltInVariable("gl_in", "gl_TexCoord", EbvTexCoord, symbolTable); - BuiltInVariable("gl_in", "gl_Color", EbvColor, symbolTable); - BuiltInVariable("gl_in", "gl_SecondaryColor", EbvSecondaryColor, symbolTable); - - BuiltInVariable("gl_FogFragCoord", EbvFogFragCoord, symbolTable); - BuiltInVariable("gl_TexCoord", EbvTexCoord, symbolTable); - BuiltInVariable("gl_Color", EbvColor, symbolTable); - BuiltInVariable("gl_SecondaryColor", EbvSecondaryColor, symbolTable); - - // built-in functions - - if (profile == EEsProfile) { - if (spvVersion.spv == 0) { - symbolTable.setFunctionExtensions("texture2DLodEXT", 1, &E_GL_EXT_shader_texture_lod); - symbolTable.setFunctionExtensions("texture2DProjLodEXT", 1, &E_GL_EXT_shader_texture_lod); - symbolTable.setFunctionExtensions("textureCubeLodEXT", 1, &E_GL_EXT_shader_texture_lod); - symbolTable.setFunctionExtensions("texture2DGradEXT", 1, &E_GL_EXT_shader_texture_lod); - symbolTable.setFunctionExtensions("texture2DProjGradEXT", 1, &E_GL_EXT_shader_texture_lod); - symbolTable.setFunctionExtensions("textureCubeGradEXT", 1, &E_GL_EXT_shader_texture_lod); - if (version < 320) - symbolTable.setFunctionExtensions("textureGatherOffsets", Num_AEP_gpu_shader5, AEP_gpu_shader5); - } - if (version == 100) { - symbolTable.setFunctionExtensions("dFdx", 1, &E_GL_OES_standard_derivatives); - symbolTable.setFunctionExtensions("dFdy", 1, &E_GL_OES_standard_derivatives); - symbolTable.setFunctionExtensions("fwidth", 1, &E_GL_OES_standard_derivatives); - } - if (version == 310) { - symbolTable.setFunctionExtensions("fma", Num_AEP_gpu_shader5, AEP_gpu_shader5); - symbolTable.setFunctionExtensions("interpolateAtCentroid", 1, &E_GL_OES_shader_multisample_interpolation); - symbolTable.setFunctionExtensions("interpolateAtSample", 1, &E_GL_OES_shader_multisample_interpolation); - symbolTable.setFunctionExtensions("interpolateAtOffset", 1, &E_GL_OES_shader_multisample_interpolation); - } - } else if (version < 130) { - if (spvVersion.spv == 0) { - symbolTable.setFunctionExtensions("texture1DLod", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture2DLod", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture3DLod", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("textureCubeLod", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture1DProjLod", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture2DProjLod", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture3DProjLod", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("shadow1DLod", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("shadow2DLod", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("shadow1DProjLod", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("shadow2DProjLod", 1, &E_GL_ARB_shader_texture_lod); - } - } - - // E_GL_ARB_shader_texture_lod functions usable only with the extension enabled - if (profile != EEsProfile && spvVersion.spv == 0) { - symbolTable.setFunctionExtensions("texture1DGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture1DProjGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture2DGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture2DProjGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture3DGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture3DProjGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("textureCubeGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("shadow1DGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("shadow1DProjGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("shadow2DGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("shadow2DProjGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture2DRectGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("texture2DRectProjGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("shadow2DRectGradARB", 1, &E_GL_ARB_shader_texture_lod); - symbolTable.setFunctionExtensions("shadow2DRectProjGradARB", 1, &E_GL_ARB_shader_texture_lod); - } - - // E_GL_ARB_shader_image_load_store - if (profile != EEsProfile && version < 420) - symbolTable.setFunctionExtensions("memoryBarrier", 1, &E_GL_ARB_shader_image_load_store); - // All the image access functions are protected by checks on the type of the first argument. - - // E_GL_ARB_shader_atomic_counters - if (profile != EEsProfile && version < 420) { - symbolTable.setFunctionExtensions("atomicCounterIncrement", 1, &E_GL_ARB_shader_atomic_counters); - symbolTable.setFunctionExtensions("atomicCounterDecrement", 1, &E_GL_ARB_shader_atomic_counters); - symbolTable.setFunctionExtensions("atomicCounter" , 1, &E_GL_ARB_shader_atomic_counters); - } - - // E_GL_ARB_derivative_control - if (profile != EEsProfile && version < 450) { - symbolTable.setFunctionExtensions("dFdxFine", 1, &E_GL_ARB_derivative_control); - symbolTable.setFunctionExtensions("dFdyFine", 1, &E_GL_ARB_derivative_control); - symbolTable.setFunctionExtensions("fwidthFine", 1, &E_GL_ARB_derivative_control); - symbolTable.setFunctionExtensions("dFdxCoarse", 1, &E_GL_ARB_derivative_control); - symbolTable.setFunctionExtensions("dFdyCoarse", 1, &E_GL_ARB_derivative_control); - symbolTable.setFunctionExtensions("fwidthCoarse", 1, &E_GL_ARB_derivative_control); - } - - // E_GL_ARB_sparse_texture2 - if (profile != EEsProfile) - { - symbolTable.setFunctionExtensions("sparseTextureARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTextureLodARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTextureOffsetARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTexelFetchARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTexelFetchOffsetARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTextureLodOffsetARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTextureGradARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTextureGradOffsetARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTextureGatherARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTextureGatherOffsetARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTextureGatherOffsetsARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseImageLoadARB", 1, &E_GL_ARB_sparse_texture2); - symbolTable.setFunctionExtensions("sparseTexelsResident", 1, &E_GL_ARB_sparse_texture2); - } - - // E_GL_ARB_sparse_texture_clamp - if (profile != EEsProfile) - { - symbolTable.setFunctionExtensions("sparseTextureClampARB", 1, &E_GL_ARB_sparse_texture_clamp); - symbolTable.setFunctionExtensions("sparseTextureOffsetClampARB", 1, &E_GL_ARB_sparse_texture_clamp); - symbolTable.setFunctionExtensions("sparseTextureGradClampARB", 1, &E_GL_ARB_sparse_texture_clamp); - symbolTable.setFunctionExtensions("sparseTextureGradOffsetClampARB", 1, &E_GL_ARB_sparse_texture_clamp); - symbolTable.setFunctionExtensions("textureClampARB", 1, &E_GL_ARB_sparse_texture_clamp); - symbolTable.setFunctionExtensions("textureOffsetClampARB", 1, &E_GL_ARB_sparse_texture_clamp); - symbolTable.setFunctionExtensions("textureGradClampARB", 1, &E_GL_ARB_sparse_texture_clamp); - symbolTable.setFunctionExtensions("textureGradOffsetClampARB", 1, &E_GL_ARB_sparse_texture_clamp); - } - -#ifdef AMD_EXTENSIONS - // E_GL_AMD_shader_explicit_vertex_parameter - if (profile != EEsProfile) { - symbolTable.setVariableExtensions("gl_BaryCoordNoPerspAMD", 1, &E_GL_AMD_shader_explicit_vertex_parameter); - symbolTable.setVariableExtensions("gl_BaryCoordNoPerspCentroidAMD", 1, &E_GL_AMD_shader_explicit_vertex_parameter); - symbolTable.setVariableExtensions("gl_BaryCoordNoPerspSampleAMD", 1, &E_GL_AMD_shader_explicit_vertex_parameter); - symbolTable.setVariableExtensions("gl_BaryCoordSmoothAMD", 1, &E_GL_AMD_shader_explicit_vertex_parameter); - symbolTable.setVariableExtensions("gl_BaryCoordSmoothCentroidAMD", 1, &E_GL_AMD_shader_explicit_vertex_parameter); - symbolTable.setVariableExtensions("gl_BaryCoordSmoothSampleAMD", 1, &E_GL_AMD_shader_explicit_vertex_parameter); - symbolTable.setVariableExtensions("gl_BaryCoordPullModelAMD", 1, &E_GL_AMD_shader_explicit_vertex_parameter); - - symbolTable.setFunctionExtensions("interpolateAtVertexAMD", 1, &E_GL_AMD_shader_explicit_vertex_parameter); - - BuiltInVariable("gl_BaryCoordNoPerspAMD", EbvBaryCoordNoPersp, symbolTable); - BuiltInVariable("gl_BaryCoordNoPerspCentroidAMD", EbvBaryCoordNoPerspCentroid, symbolTable); - BuiltInVariable("gl_BaryCoordNoPerspSampleAMD", EbvBaryCoordNoPerspSample, symbolTable); - BuiltInVariable("gl_BaryCoordSmoothAMD", EbvBaryCoordSmooth, symbolTable); - BuiltInVariable("gl_BaryCoordSmoothCentroidAMD", EbvBaryCoordSmoothCentroid, symbolTable); - BuiltInVariable("gl_BaryCoordSmoothSampleAMD", EbvBaryCoordSmoothSample, symbolTable); - BuiltInVariable("gl_BaryCoordPullModelAMD", EbvBaryCoordPullModel, symbolTable); - } - - // E_GL_AMD_texture_gather_bias_lod - if (profile != EEsProfile) { - symbolTable.setFunctionExtensions("textureGatherLodAMD", 1, &E_GL_AMD_texture_gather_bias_lod); - symbolTable.setFunctionExtensions("textureGatherLodOffsetAMD", 1, &E_GL_AMD_texture_gather_bias_lod); - symbolTable.setFunctionExtensions("textureGatherLodOffsetsAMD", 1, &E_GL_AMD_texture_gather_bias_lod); - symbolTable.setFunctionExtensions("sparseTextureGatherLodAMD", 1, &E_GL_AMD_texture_gather_bias_lod); - symbolTable.setFunctionExtensions("sparseTextureGatherLodOffsetAMD", 1, &E_GL_AMD_texture_gather_bias_lod); - symbolTable.setFunctionExtensions("sparseTextureGatherLodOffsetsAMD", 1, &E_GL_AMD_texture_gather_bias_lod); - } - - // E_GL_AMD_shader_image_load_store_lod - if (profile != EEsProfile) { - symbolTable.setFunctionExtensions("imageLoadLodAMD", 1, &E_GL_AMD_shader_image_load_store_lod); - symbolTable.setFunctionExtensions("imageStoreLodAMD", 1, &E_GL_AMD_shader_image_load_store_lod); - symbolTable.setFunctionExtensions("sparseImageLoadLodAMD", 1, &E_GL_AMD_shader_image_load_store_lod); - } -#endif - - symbolTable.setVariableExtensions("gl_FragDepthEXT", 1, &E_GL_EXT_frag_depth); - - if (profile == EEsProfile && version < 320) { - symbolTable.setVariableExtensions("gl_PrimitiveID", Num_AEP_geometry_shader, AEP_geometry_shader); - symbolTable.setVariableExtensions("gl_Layer", Num_AEP_geometry_shader, AEP_geometry_shader); - } - - if (profile == EEsProfile && version < 320) { - symbolTable.setFunctionExtensions("imageAtomicAdd", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicMin", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicMax", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicAnd", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicOr", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicXor", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicExchange", 1, &E_GL_OES_shader_image_atomic); - symbolTable.setFunctionExtensions("imageAtomicCompSwap", 1, &E_GL_OES_shader_image_atomic); - } - - symbolTable.setVariableExtensions("gl_DeviceIndex", 1, &E_GL_EXT_device_group); - BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable); - symbolTable.setVariableExtensions("gl_ViewIndex", 1, &E_GL_EXT_multiview); - BuiltInVariable("gl_ViewIndex", EbvViewIndex, symbolTable); - if (version >= 300 /* both ES and non-ES */) { - symbolTable.setVariableExtensions("gl_ViewID_OVR", Num_OVR_multiview_EXTs, OVR_multiview_EXTs); - BuiltInVariable("gl_ViewID_OVR", EbvViewIndex, symbolTable); - } - - if (profile == EEsProfile) { - symbolTable.setFunctionExtensions("shadow2DEXT", 1, &E_GL_EXT_shadow_samplers); - symbolTable.setFunctionExtensions("shadow2DProjEXT", 1, &E_GL_EXT_shadow_samplers); - } - break; - - case EShLangCompute: - BuiltInVariable("gl_NumWorkGroups", EbvNumWorkGroups, symbolTable); - BuiltInVariable("gl_WorkGroupSize", EbvWorkGroupSize, symbolTable); - BuiltInVariable("gl_WorkGroupID", EbvWorkGroupId, symbolTable); - BuiltInVariable("gl_LocalInvocationID", EbvLocalInvocationId, symbolTable); - BuiltInVariable("gl_GlobalInvocationID", EbvGlobalInvocationId, symbolTable); - BuiltInVariable("gl_LocalInvocationIndex", EbvLocalInvocationIndex, symbolTable); - - if (profile != EEsProfile && version < 430) { - symbolTable.setVariableExtensions("gl_NumWorkGroups", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_WorkGroupSize", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_WorkGroupID", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_LocalInvocationID", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_GlobalInvocationID", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_LocalInvocationIndex", 1, &E_GL_ARB_compute_shader); - - symbolTable.setVariableExtensions("gl_MaxComputeWorkGroupCount", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_MaxComputeWorkGroupSize", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_MaxComputeUniformComponents", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_MaxComputeTextureImageUnits", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_MaxComputeImageUniforms", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_MaxComputeAtomicCounters", 1, &E_GL_ARB_compute_shader); - symbolTable.setVariableExtensions("gl_MaxComputeAtomicCounterBuffers", 1, &E_GL_ARB_compute_shader); - - symbolTable.setFunctionExtensions("barrier", 1, &E_GL_ARB_compute_shader); - symbolTable.setFunctionExtensions("memoryBarrierAtomicCounter", 1, &E_GL_ARB_compute_shader); - symbolTable.setFunctionExtensions("memoryBarrierBuffer", 1, &E_GL_ARB_compute_shader); - symbolTable.setFunctionExtensions("memoryBarrierImage", 1, &E_GL_ARB_compute_shader); - symbolTable.setFunctionExtensions("memoryBarrierShared", 1, &E_GL_ARB_compute_shader); - symbolTable.setFunctionExtensions("groupMemoryBarrier", 1, &E_GL_ARB_compute_shader); - } - - if ((profile != EEsProfile && version >= 140) || - (profile == EEsProfile && version >= 310)) { - symbolTable.setVariableExtensions("gl_DeviceIndex", 1, &E_GL_EXT_device_group); - BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable); - symbolTable.setVariableExtensions("gl_ViewIndex", 1, &E_GL_EXT_multiview); - BuiltInVariable("gl_ViewIndex", EbvViewIndex, symbolTable); - } - - break; - - default: - assert(false && "Language not supported"); - break; - } - - // - // Next, identify which built-ins have a mapping to an operator. - // If PureOperatorBuiltins is false, those that are not identified as such are - // expected to be resolved through a library of functions, versus as - // operations. - // - symbolTable.relateToOperator("not", EOpVectorLogicalNot); - - symbolTable.relateToOperator("matrixCompMult", EOpMul); - // 120 and 150 are correct for both ES and desktop - if (version >= 120) { - symbolTable.relateToOperator("outerProduct", EOpOuterProduct); - symbolTable.relateToOperator("transpose", EOpTranspose); - if (version >= 150) { - symbolTable.relateToOperator("determinant", EOpDeterminant); - symbolTable.relateToOperator("inverse", EOpMatrixInverse); - } - } - - symbolTable.relateToOperator("mod", EOpMod); - symbolTable.relateToOperator("modf", EOpModf); - - symbolTable.relateToOperator("equal", EOpVectorEqual); - symbolTable.relateToOperator("notEqual", EOpVectorNotEqual); - symbolTable.relateToOperator("lessThan", EOpLessThan); - symbolTable.relateToOperator("greaterThan", EOpGreaterThan); - symbolTable.relateToOperator("lessThanEqual", EOpLessThanEqual); - symbolTable.relateToOperator("greaterThanEqual", EOpGreaterThanEqual); - - symbolTable.relateToOperator("radians", EOpRadians); - symbolTable.relateToOperator("degrees", EOpDegrees); - symbolTable.relateToOperator("sin", EOpSin); - symbolTable.relateToOperator("cos", EOpCos); - symbolTable.relateToOperator("tan", EOpTan); - symbolTable.relateToOperator("asin", EOpAsin); - symbolTable.relateToOperator("acos", EOpAcos); - symbolTable.relateToOperator("atan", EOpAtan); - symbolTable.relateToOperator("sinh", EOpSinh); - symbolTable.relateToOperator("cosh", EOpCosh); - symbolTable.relateToOperator("tanh", EOpTanh); - symbolTable.relateToOperator("asinh", EOpAsinh); - symbolTable.relateToOperator("acosh", EOpAcosh); - symbolTable.relateToOperator("atanh", EOpAtanh); - - symbolTable.relateToOperator("pow", EOpPow); - symbolTable.relateToOperator("exp2", EOpExp2); - symbolTable.relateToOperator("log", EOpLog); - symbolTable.relateToOperator("exp", EOpExp); - symbolTable.relateToOperator("log2", EOpLog2); - symbolTable.relateToOperator("sqrt", EOpSqrt); - symbolTable.relateToOperator("inversesqrt", EOpInverseSqrt); - - symbolTable.relateToOperator("abs", EOpAbs); - symbolTable.relateToOperator("sign", EOpSign); - symbolTable.relateToOperator("floor", EOpFloor); - symbolTable.relateToOperator("trunc", EOpTrunc); - symbolTable.relateToOperator("round", EOpRound); - symbolTable.relateToOperator("roundEven", EOpRoundEven); - symbolTable.relateToOperator("ceil", EOpCeil); - symbolTable.relateToOperator("fract", EOpFract); - symbolTable.relateToOperator("min", EOpMin); - symbolTable.relateToOperator("max", EOpMax); - symbolTable.relateToOperator("clamp", EOpClamp); - symbolTable.relateToOperator("mix", EOpMix); - symbolTable.relateToOperator("step", EOpStep); - symbolTable.relateToOperator("smoothstep", EOpSmoothStep); - - symbolTable.relateToOperator("isnan", EOpIsNan); - symbolTable.relateToOperator("isinf", EOpIsInf); - - symbolTable.relateToOperator("floatBitsToInt", EOpFloatBitsToInt); - symbolTable.relateToOperator("floatBitsToUint", EOpFloatBitsToUint); - symbolTable.relateToOperator("intBitsToFloat", EOpIntBitsToFloat); - symbolTable.relateToOperator("uintBitsToFloat", EOpUintBitsToFloat); - symbolTable.relateToOperator("doubleBitsToInt64", EOpDoubleBitsToInt64); - symbolTable.relateToOperator("doubleBitsToUint64", EOpDoubleBitsToUint64); - symbolTable.relateToOperator("int64BitsToDouble", EOpInt64BitsToDouble); - symbolTable.relateToOperator("uint64BitsToDouble", EOpUint64BitsToDouble); -#ifdef AMD_EXTENSIONS - symbolTable.relateToOperator("float16BitsToInt16", EOpFloat16BitsToInt16); - symbolTable.relateToOperator("float16BitsToUint16", EOpFloat16BitsToUint16); - symbolTable.relateToOperator("int16BitsToFloat16", EOpInt16BitsToFloat16); - symbolTable.relateToOperator("uint16BitsToFloat16", EOpUint16BitsToFloat16); -#endif - - symbolTable.relateToOperator("packSnorm2x16", EOpPackSnorm2x16); - symbolTable.relateToOperator("unpackSnorm2x16", EOpUnpackSnorm2x16); - symbolTable.relateToOperator("packUnorm2x16", EOpPackUnorm2x16); - symbolTable.relateToOperator("unpackUnorm2x16", EOpUnpackUnorm2x16); - - symbolTable.relateToOperator("packSnorm4x8", EOpPackSnorm4x8); - symbolTable.relateToOperator("unpackSnorm4x8", EOpUnpackSnorm4x8); - symbolTable.relateToOperator("packUnorm4x8", EOpPackUnorm4x8); - symbolTable.relateToOperator("unpackUnorm4x8", EOpUnpackUnorm4x8); - - symbolTable.relateToOperator("packDouble2x32", EOpPackDouble2x32); - symbolTable.relateToOperator("unpackDouble2x32", EOpUnpackDouble2x32); - - symbolTable.relateToOperator("packHalf2x16", EOpPackHalf2x16); - symbolTable.relateToOperator("unpackHalf2x16", EOpUnpackHalf2x16); - - symbolTable.relateToOperator("packInt2x32", EOpPackInt2x32); - symbolTable.relateToOperator("unpackInt2x32", EOpUnpackInt2x32); - symbolTable.relateToOperator("packUint2x32", EOpPackUint2x32); - symbolTable.relateToOperator("unpackUint2x32", EOpUnpackUint2x32); - -#ifdef AMD_EXTENSIONS - symbolTable.relateToOperator("packInt2x16", EOpPackInt2x16); - symbolTable.relateToOperator("unpackInt2x16", EOpUnpackInt2x16); - symbolTable.relateToOperator("packUint2x16", EOpPackUint2x16); - symbolTable.relateToOperator("unpackUint2x16", EOpUnpackUint2x16); - - symbolTable.relateToOperator("packInt4x16", EOpPackInt4x16); - symbolTable.relateToOperator("unpackInt4x16", EOpUnpackInt4x16); - symbolTable.relateToOperator("packUint4x16", EOpPackUint4x16); - symbolTable.relateToOperator("unpackUint4x16", EOpUnpackUint4x16); - - symbolTable.relateToOperator("packFloat2x16", EOpPackFloat2x16); - symbolTable.relateToOperator("unpackFloat2x16", EOpUnpackFloat2x16); -#endif - - symbolTable.relateToOperator("length", EOpLength); - symbolTable.relateToOperator("distance", EOpDistance); - symbolTable.relateToOperator("dot", EOpDot); - symbolTable.relateToOperator("cross", EOpCross); - symbolTable.relateToOperator("normalize", EOpNormalize); - symbolTable.relateToOperator("faceforward", EOpFaceForward); - symbolTable.relateToOperator("reflect", EOpReflect); - symbolTable.relateToOperator("refract", EOpRefract); - - symbolTable.relateToOperator("any", EOpAny); - symbolTable.relateToOperator("all", EOpAll); - - symbolTable.relateToOperator("barrier", EOpBarrier); - symbolTable.relateToOperator("memoryBarrier", EOpMemoryBarrier); - symbolTable.relateToOperator("memoryBarrierAtomicCounter", EOpMemoryBarrierAtomicCounter); - symbolTable.relateToOperator("memoryBarrierBuffer", EOpMemoryBarrierBuffer); - symbolTable.relateToOperator("memoryBarrierImage", EOpMemoryBarrierImage); - - symbolTable.relateToOperator("atomicAdd", EOpAtomicAdd); - symbolTable.relateToOperator("atomicMin", EOpAtomicMin); - symbolTable.relateToOperator("atomicMax", EOpAtomicMax); - symbolTable.relateToOperator("atomicAnd", EOpAtomicAnd); - symbolTable.relateToOperator("atomicOr", EOpAtomicOr); - symbolTable.relateToOperator("atomicXor", EOpAtomicXor); - symbolTable.relateToOperator("atomicExchange", EOpAtomicExchange); - symbolTable.relateToOperator("atomicCompSwap", EOpAtomicCompSwap); - - symbolTable.relateToOperator("atomicCounterIncrement", EOpAtomicCounterIncrement); - symbolTable.relateToOperator("atomicCounterDecrement", EOpAtomicCounterDecrement); - symbolTable.relateToOperator("atomicCounter", EOpAtomicCounter); - - if (profile != EEsProfile && version >= 460) { - symbolTable.relateToOperator("atomicCounterAdd", EOpAtomicCounterAdd); - symbolTable.relateToOperator("atomicCounterSubtract", EOpAtomicCounterSubtract); - symbolTable.relateToOperator("atomicCounterMin", EOpAtomicCounterMin); - symbolTable.relateToOperator("atomicCounterMax", EOpAtomicCounterMax); - symbolTable.relateToOperator("atomicCounterAnd", EOpAtomicCounterAnd); - symbolTable.relateToOperator("atomicCounterOr", EOpAtomicCounterOr); - symbolTable.relateToOperator("atomicCounterXor", EOpAtomicCounterXor); - symbolTable.relateToOperator("atomicCounterExchange", EOpAtomicCounterExchange); - symbolTable.relateToOperator("atomicCounterCompSwap", EOpAtomicCounterCompSwap); - } - - symbolTable.relateToOperator("fma", EOpFma); - symbolTable.relateToOperator("frexp", EOpFrexp); - symbolTable.relateToOperator("ldexp", EOpLdexp); - symbolTable.relateToOperator("uaddCarry", EOpAddCarry); - symbolTable.relateToOperator("usubBorrow", EOpSubBorrow); - symbolTable.relateToOperator("umulExtended", EOpUMulExtended); - symbolTable.relateToOperator("imulExtended", EOpIMulExtended); - symbolTable.relateToOperator("bitfieldExtract", EOpBitfieldExtract); - symbolTable.relateToOperator("bitfieldInsert", EOpBitfieldInsert); - symbolTable.relateToOperator("bitfieldReverse", EOpBitFieldReverse); - symbolTable.relateToOperator("bitCount", EOpBitCount); - symbolTable.relateToOperator("findLSB", EOpFindLSB); - symbolTable.relateToOperator("findMSB", EOpFindMSB); - - if (PureOperatorBuiltins) { - symbolTable.relateToOperator("imageSize", EOpImageQuerySize); - symbolTable.relateToOperator("imageSamples", EOpImageQuerySamples); - symbolTable.relateToOperator("imageLoad", EOpImageLoad); - symbolTable.relateToOperator("imageStore", EOpImageStore); - symbolTable.relateToOperator("imageAtomicAdd", EOpImageAtomicAdd); - symbolTable.relateToOperator("imageAtomicMin", EOpImageAtomicMin); - symbolTable.relateToOperator("imageAtomicMax", EOpImageAtomicMax); - symbolTable.relateToOperator("imageAtomicAnd", EOpImageAtomicAnd); - symbolTable.relateToOperator("imageAtomicOr", EOpImageAtomicOr); - symbolTable.relateToOperator("imageAtomicXor", EOpImageAtomicXor); - symbolTable.relateToOperator("imageAtomicExchange", EOpImageAtomicExchange); - symbolTable.relateToOperator("imageAtomicCompSwap", EOpImageAtomicCompSwap); - - symbolTable.relateToOperator("subpassLoad", EOpSubpassLoad); - symbolTable.relateToOperator("subpassLoadMS", EOpSubpassLoadMS); - - symbolTable.relateToOperator("textureSize", EOpTextureQuerySize); - symbolTable.relateToOperator("textureQueryLod", EOpTextureQueryLod); - symbolTable.relateToOperator("textureQueryLevels", EOpTextureQueryLevels); - symbolTable.relateToOperator("textureSamples", EOpTextureQuerySamples); - symbolTable.relateToOperator("texture", EOpTexture); - symbolTable.relateToOperator("textureProj", EOpTextureProj); - symbolTable.relateToOperator("textureLod", EOpTextureLod); - symbolTable.relateToOperator("textureOffset", EOpTextureOffset); - symbolTable.relateToOperator("texelFetch", EOpTextureFetch); - symbolTable.relateToOperator("texelFetchOffset", EOpTextureFetchOffset); - symbolTable.relateToOperator("textureProjOffset", EOpTextureProjOffset); - symbolTable.relateToOperator("textureLodOffset", EOpTextureLodOffset); - symbolTable.relateToOperator("textureProjLod", EOpTextureProjLod); - symbolTable.relateToOperator("textureProjLodOffset", EOpTextureProjLodOffset); - symbolTable.relateToOperator("textureGrad", EOpTextureGrad); - symbolTable.relateToOperator("textureGradOffset", EOpTextureGradOffset); - symbolTable.relateToOperator("textureProjGrad", EOpTextureProjGrad); - symbolTable.relateToOperator("textureProjGradOffset", EOpTextureProjGradOffset); - symbolTable.relateToOperator("textureGather", EOpTextureGather); - symbolTable.relateToOperator("textureGatherOffset", EOpTextureGatherOffset); - symbolTable.relateToOperator("textureGatherOffsets", EOpTextureGatherOffsets); - - symbolTable.relateToOperator("noise1", EOpNoise); - symbolTable.relateToOperator("noise2", EOpNoise); - symbolTable.relateToOperator("noise3", EOpNoise); - symbolTable.relateToOperator("noise4", EOpNoise); - - if (spvVersion.spv == 0 && (IncludeLegacy(version, profile, spvVersion) || - (profile == EEsProfile && version == 100))) { - symbolTable.relateToOperator("ftransform", EOpFtransform); - - symbolTable.relateToOperator("texture1D", EOpTexture); - symbolTable.relateToOperator("texture1DGradARB", EOpTextureGrad); - symbolTable.relateToOperator("texture1DProj", EOpTextureProj); - symbolTable.relateToOperator("texture1DProjGradARB", EOpTextureProjGrad); - symbolTable.relateToOperator("texture1DLod", EOpTextureLod); - symbolTable.relateToOperator("texture1DProjLod", EOpTextureProjLod); - - symbolTable.relateToOperator("texture2DRect", EOpTexture); - symbolTable.relateToOperator("texture2DRectProj", EOpTextureProj); - symbolTable.relateToOperator("texture2DRectGradARB", EOpTextureGrad); - symbolTable.relateToOperator("texture2DRectProjGradARB", EOpTextureProjGrad); - symbolTable.relateToOperator("shadow2DRect", EOpTexture); - symbolTable.relateToOperator("shadow2DRectProj", EOpTextureProj); - symbolTable.relateToOperator("shadow2DRectGradARB", EOpTextureGrad); - symbolTable.relateToOperator("shadow2DRectProjGradARB", EOpTextureProjGrad); - - symbolTable.relateToOperator("texture2D", EOpTexture); - symbolTable.relateToOperator("texture2DProj", EOpTextureProj); - symbolTable.relateToOperator("texture2DGradEXT", EOpTextureGrad); - symbolTable.relateToOperator("texture2DGradARB", EOpTextureGrad); - symbolTable.relateToOperator("texture2DProjGradEXT", EOpTextureProjGrad); - symbolTable.relateToOperator("texture2DProjGradARB", EOpTextureProjGrad); - symbolTable.relateToOperator("texture2DLod", EOpTextureLod); - symbolTable.relateToOperator("texture2DLodEXT", EOpTextureLod); - symbolTable.relateToOperator("texture2DProjLod", EOpTextureProjLod); - symbolTable.relateToOperator("texture2DProjLodEXT", EOpTextureProjLod); - - symbolTable.relateToOperator("texture3D", EOpTexture); - symbolTable.relateToOperator("texture3DGradARB", EOpTextureGrad); - symbolTable.relateToOperator("texture3DProj", EOpTextureProj); - symbolTable.relateToOperator("texture3DProjGradARB", EOpTextureProjGrad); - symbolTable.relateToOperator("texture3DLod", EOpTextureLod); - symbolTable.relateToOperator("texture3DProjLod", EOpTextureProjLod); - symbolTable.relateToOperator("textureCube", EOpTexture); - symbolTable.relateToOperator("textureCubeGradEXT", EOpTextureGrad); - symbolTable.relateToOperator("textureCubeGradARB", EOpTextureGrad); - symbolTable.relateToOperator("textureCubeLod", EOpTextureLod); - symbolTable.relateToOperator("textureCubeLodEXT", EOpTextureLod); - symbolTable.relateToOperator("shadow1D", EOpTexture); - symbolTable.relateToOperator("shadow1DGradARB", EOpTextureGrad); - symbolTable.relateToOperator("shadow2D", EOpTexture); - symbolTable.relateToOperator("shadow2DGradARB", EOpTextureGrad); - symbolTable.relateToOperator("shadow1DProj", EOpTextureProj); - symbolTable.relateToOperator("shadow2DProj", EOpTextureProj); - symbolTable.relateToOperator("shadow1DProjGradARB", EOpTextureProjGrad); - symbolTable.relateToOperator("shadow2DProjGradARB", EOpTextureProjGrad); - symbolTable.relateToOperator("shadow1DLod", EOpTextureLod); - symbolTable.relateToOperator("shadow2DLod", EOpTextureLod); - symbolTable.relateToOperator("shadow1DProjLod", EOpTextureProjLod); - symbolTable.relateToOperator("shadow2DProjLod", EOpTextureProjLod); - } - - if (profile != EEsProfile) { - symbolTable.relateToOperator("sparseTextureARB", EOpSparseTexture); - symbolTable.relateToOperator("sparseTextureLodARB", EOpSparseTextureLod); - symbolTable.relateToOperator("sparseTextureOffsetARB", EOpSparseTextureOffset); - symbolTable.relateToOperator("sparseTexelFetchARB", EOpSparseTextureFetch); - symbolTable.relateToOperator("sparseTexelFetchOffsetARB", EOpSparseTextureFetchOffset); - symbolTable.relateToOperator("sparseTextureLodOffsetARB", EOpSparseTextureLodOffset); - symbolTable.relateToOperator("sparseTextureGradARB", EOpSparseTextureGrad); - symbolTable.relateToOperator("sparseTextureGradOffsetARB", EOpSparseTextureGradOffset); - symbolTable.relateToOperator("sparseTextureGatherARB", EOpSparseTextureGather); - symbolTable.relateToOperator("sparseTextureGatherOffsetARB", EOpSparseTextureGatherOffset); - symbolTable.relateToOperator("sparseTextureGatherOffsetsARB", EOpSparseTextureGatherOffsets); - symbolTable.relateToOperator("sparseImageLoadARB", EOpSparseImageLoad); - symbolTable.relateToOperator("sparseTexelsResidentARB", EOpSparseTexelsResident); - - symbolTable.relateToOperator("sparseTextureClampARB", EOpSparseTextureClamp); - symbolTable.relateToOperator("sparseTextureOffsetClampARB", EOpSparseTextureOffsetClamp); - symbolTable.relateToOperator("sparseTextureGradClampARB", EOpSparseTextureGradClamp); - symbolTable.relateToOperator("sparseTextureGradOffsetClampARB", EOpSparseTextureGradOffsetClamp); - symbolTable.relateToOperator("textureClampARB", EOpTextureClamp); - symbolTable.relateToOperator("textureOffsetClampARB", EOpTextureOffsetClamp); - symbolTable.relateToOperator("textureGradClampARB", EOpTextureGradClamp); - symbolTable.relateToOperator("textureGradOffsetClampARB", EOpTextureGradOffsetClamp); - - symbolTable.relateToOperator("ballotARB", EOpBallot); - symbolTable.relateToOperator("readInvocationARB", EOpReadInvocation); - symbolTable.relateToOperator("readFirstInvocationARB", EOpReadFirstInvocation); - - if (version >= 430) { - symbolTable.relateToOperator("anyInvocationARB", EOpAnyInvocation); - symbolTable.relateToOperator("allInvocationsARB", EOpAllInvocations); - symbolTable.relateToOperator("allInvocationsEqualARB", EOpAllInvocationsEqual); - } - if (version >= 460) { - symbolTable.relateToOperator("anyInvocation", EOpAnyInvocation); - symbolTable.relateToOperator("allInvocations", EOpAllInvocations); - symbolTable.relateToOperator("allInvocationsEqual", EOpAllInvocationsEqual); - } -#ifdef AMD_EXTENSIONS - symbolTable.relateToOperator("minInvocationsAMD", EOpMinInvocations); - symbolTable.relateToOperator("maxInvocationsAMD", EOpMaxInvocations); - symbolTable.relateToOperator("addInvocationsAMD", EOpAddInvocations); - symbolTable.relateToOperator("minInvocationsNonUniformAMD", EOpMinInvocationsNonUniform); - symbolTable.relateToOperator("maxInvocationsNonUniformAMD", EOpMaxInvocationsNonUniform); - symbolTable.relateToOperator("addInvocationsNonUniformAMD", EOpAddInvocationsNonUniform); - symbolTable.relateToOperator("minInvocationsInclusiveScanAMD", EOpMinInvocationsInclusiveScan); - symbolTable.relateToOperator("maxInvocationsInclusiveScanAMD", EOpMaxInvocationsInclusiveScan); - symbolTable.relateToOperator("addInvocationsInclusiveScanAMD", EOpAddInvocationsInclusiveScan); - symbolTable.relateToOperator("minInvocationsInclusiveScanNonUniformAMD", EOpMinInvocationsInclusiveScanNonUniform); - symbolTable.relateToOperator("maxInvocationsInclusiveScanNonUniformAMD", EOpMaxInvocationsInclusiveScanNonUniform); - symbolTable.relateToOperator("addInvocationsInclusiveScanNonUniformAMD", EOpAddInvocationsInclusiveScanNonUniform); - symbolTable.relateToOperator("minInvocationsExclusiveScanAMD", EOpMinInvocationsExclusiveScan); - symbolTable.relateToOperator("maxInvocationsExclusiveScanAMD", EOpMaxInvocationsExclusiveScan); - symbolTable.relateToOperator("addInvocationsExclusiveScanAMD", EOpAddInvocationsExclusiveScan); - symbolTable.relateToOperator("minInvocationsExclusiveScanNonUniformAMD", EOpMinInvocationsExclusiveScanNonUniform); - symbolTable.relateToOperator("maxInvocationsExclusiveScanNonUniformAMD", EOpMaxInvocationsExclusiveScanNonUniform); - symbolTable.relateToOperator("addInvocationsExclusiveScanNonUniformAMD", EOpAddInvocationsExclusiveScanNonUniform); - symbolTable.relateToOperator("swizzleInvocationsAMD", EOpSwizzleInvocations); - symbolTable.relateToOperator("swizzleInvocationsMaskedAMD", EOpSwizzleInvocationsMasked); - symbolTable.relateToOperator("writeInvocationAMD", EOpWriteInvocation); - symbolTable.relateToOperator("mbcntAMD", EOpMbcnt); - - symbolTable.relateToOperator("min3", EOpMin3); - symbolTable.relateToOperator("max3", EOpMax3); - symbolTable.relateToOperator("mid3", EOpMid3); - - symbolTable.relateToOperator("cubeFaceIndexAMD", EOpCubeFaceIndex); - symbolTable.relateToOperator("cubeFaceCoordAMD", EOpCubeFaceCoord); - symbolTable.relateToOperator("timeAMD", EOpTime); - - symbolTable.relateToOperator("textureGatherLodAMD", EOpTextureGatherLod); - symbolTable.relateToOperator("textureGatherLodOffsetAMD", EOpTextureGatherLodOffset); - symbolTable.relateToOperator("textureGatherLodOffsetsAMD", EOpTextureGatherLodOffsets); - symbolTable.relateToOperator("sparseTextureGatherLodAMD", EOpSparseTextureGatherLod); - symbolTable.relateToOperator("sparseTextureGatherLodOffsetAMD", EOpSparseTextureGatherLodOffset); - symbolTable.relateToOperator("sparseTextureGatherLodOffsetsAMD", EOpSparseTextureGatherLodOffsets); - - symbolTable.relateToOperator("imageLoadLodAMD", EOpImageLoadLod); - symbolTable.relateToOperator("imageStoreLodAMD", EOpImageStoreLod); - symbolTable.relateToOperator("sparseImageLoadLodAMD", EOpSparseImageLoadLod); -#endif - } - if (profile == EEsProfile) { - symbolTable.relateToOperator("shadow2DEXT", EOpTexture); - symbolTable.relateToOperator("shadow2DProjEXT", EOpTextureProj); - } - } - - switch(language) { - case EShLangVertex: - break; - - case EShLangTessControl: - case EShLangTessEvaluation: - break; - - case EShLangGeometry: - symbolTable.relateToOperator("EmitStreamVertex", EOpEmitStreamVertex); - symbolTable.relateToOperator("EndStreamPrimitive", EOpEndStreamPrimitive); - symbolTable.relateToOperator("EmitVertex", EOpEmitVertex); - symbolTable.relateToOperator("EndPrimitive", EOpEndPrimitive); - break; - - case EShLangFragment: - symbolTable.relateToOperator("dFdx", EOpDPdx); - symbolTable.relateToOperator("dFdy", EOpDPdy); - symbolTable.relateToOperator("fwidth", EOpFwidth); - if (profile != EEsProfile && version >= 400) { - symbolTable.relateToOperator("dFdxFine", EOpDPdxFine); - symbolTable.relateToOperator("dFdyFine", EOpDPdyFine); - symbolTable.relateToOperator("fwidthFine", EOpFwidthFine); - symbolTable.relateToOperator("dFdxCoarse", EOpDPdxCoarse); - symbolTable.relateToOperator("dFdyCoarse", EOpDPdyCoarse); - symbolTable.relateToOperator("fwidthCoarse", EOpFwidthCoarse); - } - symbolTable.relateToOperator("interpolateAtCentroid", EOpInterpolateAtCentroid); - symbolTable.relateToOperator("interpolateAtSample", EOpInterpolateAtSample); - symbolTable.relateToOperator("interpolateAtOffset", EOpInterpolateAtOffset); - -#ifdef AMD_EXTENSIONS - if (profile != EEsProfile) - symbolTable.relateToOperator("interpolateAtVertexAMD", EOpInterpolateAtVertex); - break; -#endif - - case EShLangCompute: - symbolTable.relateToOperator("memoryBarrierShared", EOpMemoryBarrierShared); - symbolTable.relateToOperator("groupMemoryBarrier", EOpGroupMemoryBarrier); - break; - - default: - assert(false && "Language not supported"); - } -} - -// -// Add context-dependent (resource-specific) built-ins not handled by the above. These -// would be ones that need to be programmatically added because they cannot -// be added by simple text strings. For these, also -// 1) Map built-in functions to operators, for those that will turn into an operation node -// instead of remaining a function call. -// 2) Tag extension-related symbols added to their base version with their extensions, so -// that if an early version has the extension turned off, there is an error reported on use. -// -void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable, const TBuiltInResource &resources) -{ - if (profile != EEsProfile && version >= 430 && version < 440) { - symbolTable.setVariableExtensions("gl_MaxTransformFeedbackBuffers", 1, &E_GL_ARB_enhanced_layouts); - symbolTable.setVariableExtensions("gl_MaxTransformFeedbackInterleavedComponents", 1, &E_GL_ARB_enhanced_layouts); - } - if (profile != EEsProfile && version >= 130 && version < 420) { - symbolTable.setVariableExtensions("gl_MinProgramTexelOffset", 1, &E_GL_ARB_shading_language_420pack); - symbolTable.setVariableExtensions("gl_MaxProgramTexelOffset", 1, &E_GL_ARB_shading_language_420pack); - } - if (profile != EEsProfile && version >= 150 && version < 410) - symbolTable.setVariableExtensions("gl_MaxViewports", 1, &E_GL_ARB_viewport_array); - - switch(language) { - case EShLangFragment: - // Set up gl_FragData based on current array size. - if (version == 100 || IncludeLegacy(version, profile, spvVersion) || (! ForwardCompatibility && profile != EEsProfile && version < 420)) { - TPrecisionQualifier pq = profile == EEsProfile ? EpqMedium : EpqNone; - TType fragData(EbtFloat, EvqFragColor, pq, 4); - TArraySizes& arraySizes = *new TArraySizes; - arraySizes.addInnerSize(resources.maxDrawBuffers); - fragData.newArraySizes(arraySizes); - symbolTable.insert(*new TVariable(NewPoolTString("gl_FragData"), fragData)); - SpecialQualifier("gl_FragData", EvqFragColor, EbvFragData, symbolTable); - } - break; - - case EShLangTessControl: - case EShLangTessEvaluation: - // Because of the context-dependent array size (gl_MaxPatchVertices), - // these variables were added later than the others and need to be mapped now. - - // standard members - BuiltInVariable("gl_in", "gl_Position", EbvPosition, symbolTable); - BuiltInVariable("gl_in", "gl_PointSize", EbvPointSize, symbolTable); - BuiltInVariable("gl_in", "gl_ClipDistance", EbvClipDistance, symbolTable); - BuiltInVariable("gl_in", "gl_CullDistance", EbvCullDistance, symbolTable); - - // compatibility members - BuiltInVariable("gl_in", "gl_ClipVertex", EbvClipVertex, symbolTable); - BuiltInVariable("gl_in", "gl_FrontColor", EbvFrontColor, symbolTable); - BuiltInVariable("gl_in", "gl_BackColor", EbvBackColor, symbolTable); - BuiltInVariable("gl_in", "gl_FrontSecondaryColor", EbvFrontSecondaryColor, symbolTable); - BuiltInVariable("gl_in", "gl_BackSecondaryColor", EbvBackSecondaryColor, symbolTable); - BuiltInVariable("gl_in", "gl_TexCoord", EbvTexCoord, symbolTable); - BuiltInVariable("gl_in", "gl_FogFragCoord", EbvFogFragCoord, symbolTable); - break; - - default: - break; - } -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/Initialize.h b/deps/glslang/glslang/MachineIndependent/Initialize.h deleted file mode 100644 index 6b54c4da..00000000 --- a/deps/glslang/glslang/MachineIndependent/Initialize.h +++ /dev/null @@ -1,112 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013-2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _INITIALIZE_INCLUDED_ -#define _INITIALIZE_INCLUDED_ - -#include "../Include/ResourceLimits.h" -#include "../Include/Common.h" -#include "../Include/ShHandle.h" -#include "SymbolTable.h" -#include "Versions.h" - -namespace glslang { - -// -// This is made to hold parseable strings for almost all the built-in -// functions and variables for one specific combination of version -// and profile. (Some still need to be added programmatically.) -// This is a base class for language-specific derivations, which -// can be used for language independent builtins. -// -// The strings are organized by -// commonBuiltins: intersection of all stages' built-ins, processed just once -// stageBuiltins[]: anything a stage needs that's not in commonBuiltins -// -class TBuiltInParseables { -public: - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - TBuiltInParseables(); - virtual ~TBuiltInParseables(); - virtual void initialize(int version, EProfile, const SpvVersion& spvVersion) = 0; - virtual void initialize(const TBuiltInResource& resources, int version, EProfile, const SpvVersion& spvVersion, EShLanguage) = 0; - virtual const TString& getCommonString() const { return commonBuiltins; } - virtual const TString& getStageString(EShLanguage language) const { return stageBuiltins[language]; } - - virtual void identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable) = 0; - - virtual void identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable, const TBuiltInResource &resources) = 0; - -protected: - TString commonBuiltins; - TString stageBuiltins[EShLangCount]; -}; - -// -// This is a GLSL specific derivation of TBuiltInParseables. To present a stable -// interface and match other similar code, it is called TBuiltIns, rather -// than TBuiltInParseablesGlsl. -// -class TBuiltIns : public TBuiltInParseables { -public: - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - TBuiltIns(); - virtual ~TBuiltIns(); - void initialize(int version, EProfile, const SpvVersion& spvVersion); - void initialize(const TBuiltInResource& resources, int version, EProfile, const SpvVersion& spvVersion, EShLanguage); - - void identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable); - - void identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable, const TBuiltInResource &resources); - -protected: - void add2ndGenerationSamplingImaging(int version, EProfile profile, const SpvVersion& spvVersion); - void addSubpassSampling(TSampler, const TString& typeName, int version, EProfile profile); - void addQueryFunctions(TSampler, const TString& typeName, int version, EProfile profile); - void addImageFunctions(TSampler, const TString& typeName, int version, EProfile profile); - void addSamplingFunctions(TSampler, const TString& typeName, int version, EProfile profile); - void addGatherFunctions(TSampler, const TString& typeName, int version, EProfile profile); - - // Helpers for making textual representations of the permutations - // of texturing/imaging functions. - const char* postfixes[5]; - const char* prefixes[EbtNumTypes]; - int dimMap[EsdNumDims]; -}; - -} // end namespace glslang - -#endif // _INITIALIZE_INCLUDED_ diff --git a/deps/glslang/glslang/MachineIndependent/IntermTraverse.cpp b/deps/glslang/glslang/MachineIndependent/IntermTraverse.cpp deleted file mode 100644 index f46010b7..00000000 --- a/deps/glslang/glslang/MachineIndependent/IntermTraverse.cpp +++ /dev/null @@ -1,302 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// Copyright (c) 2002-2010 The ANGLE Project Authors. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "../Include/intermediate.h" - -namespace glslang { - -// -// Traverse the intermediate representation tree, and -// call a node type specific function for each node. -// Done recursively through the member function Traverse(). -// Node types can be skipped if their function to call is 0, -// but their subtree will still be traversed. -// Nodes with children can have their whole subtree skipped -// if preVisit is turned on and the type specific function -// returns false. -// -// preVisit, postVisit, and rightToLeft control what order -// nodes are visited in. -// - -// -// Traversal functions for terminals are straightforward.... -// -void TIntermMethod::traverse(TIntermTraverser*) -{ - // Tree should always resolve all methods as a non-method. -} - -void TIntermSymbol::traverse(TIntermTraverser *it) -{ - it->visitSymbol(this); -} - -void TIntermConstantUnion::traverse(TIntermTraverser *it) -{ - it->visitConstantUnion(this); -} - -// -// Traverse a binary node. -// -void TIntermBinary::traverse(TIntermTraverser *it) -{ - bool visit = true; - - // - // visit the node before children if pre-visiting. - // - if (it->preVisit) - visit = it->visitBinary(EvPreVisit, this); - - // - // Visit the children, in the right order. - // - if (visit) { - it->incrementDepth(this); - - if (it->rightToLeft) { - if (right) - right->traverse(it); - - if (it->inVisit) - visit = it->visitBinary(EvInVisit, this); - - if (visit && left) - left->traverse(it); - } else { - if (left) - left->traverse(it); - - if (it->inVisit) - visit = it->visitBinary(EvInVisit, this); - - if (visit && right) - right->traverse(it); - } - - it->decrementDepth(); - } - - // - // Visit the node after the children, if requested and the traversal - // hasn't been canceled yet. - // - if (visit && it->postVisit) - it->visitBinary(EvPostVisit, this); -} - -// -// Traverse a unary node. Same comments in binary node apply here. -// -void TIntermUnary::traverse(TIntermTraverser *it) -{ - bool visit = true; - - if (it->preVisit) - visit = it->visitUnary(EvPreVisit, this); - - if (visit) { - it->incrementDepth(this); - operand->traverse(it); - it->decrementDepth(); - } - - if (visit && it->postVisit) - it->visitUnary(EvPostVisit, this); -} - -// -// Traverse an aggregate node. Same comments in binary node apply here. -// -void TIntermAggregate::traverse(TIntermTraverser *it) -{ - bool visit = true; - - if (it->preVisit) - visit = it->visitAggregate(EvPreVisit, this); - - if (visit) { - it->incrementDepth(this); - - if (it->rightToLeft) { - for (TIntermSequence::reverse_iterator sit = sequence.rbegin(); sit != sequence.rend(); sit++) { - (*sit)->traverse(it); - - if (visit && it->inVisit) { - if (*sit != sequence.front()) - visit = it->visitAggregate(EvInVisit, this); - } - } - } else { - for (TIntermSequence::iterator sit = sequence.begin(); sit != sequence.end(); sit++) { - (*sit)->traverse(it); - - if (visit && it->inVisit) { - if (*sit != sequence.back()) - visit = it->visitAggregate(EvInVisit, this); - } - } - } - - it->decrementDepth(); - } - - if (visit && it->postVisit) - it->visitAggregate(EvPostVisit, this); -} - -// -// Traverse a selection node. Same comments in binary node apply here. -// -void TIntermSelection::traverse(TIntermTraverser *it) -{ - bool visit = true; - - if (it->preVisit) - visit = it->visitSelection(EvPreVisit, this); - - if (visit) { - it->incrementDepth(this); - if (it->rightToLeft) { - if (falseBlock) - falseBlock->traverse(it); - if (trueBlock) - trueBlock->traverse(it); - condition->traverse(it); - } else { - condition->traverse(it); - if (trueBlock) - trueBlock->traverse(it); - if (falseBlock) - falseBlock->traverse(it); - } - it->decrementDepth(); - } - - if (visit && it->postVisit) - it->visitSelection(EvPostVisit, this); -} - -// -// Traverse a loop node. Same comments in binary node apply here. -// -void TIntermLoop::traverse(TIntermTraverser *it) -{ - bool visit = true; - - if (it->preVisit) - visit = it->visitLoop(EvPreVisit, this); - - if (visit) { - it->incrementDepth(this); - - if (it->rightToLeft) { - if (terminal) - terminal->traverse(it); - - if (body) - body->traverse(it); - - if (test) - test->traverse(it); - } else { - if (test) - test->traverse(it); - - if (body) - body->traverse(it); - - if (terminal) - terminal->traverse(it); - } - - it->decrementDepth(); - } - - if (visit && it->postVisit) - it->visitLoop(EvPostVisit, this); -} - -// -// Traverse a branch node. Same comments in binary node apply here. -// -void TIntermBranch::traverse(TIntermTraverser *it) -{ - bool visit = true; - - if (it->preVisit) - visit = it->visitBranch(EvPreVisit, this); - - if (visit && expression) { - it->incrementDepth(this); - expression->traverse(it); - it->decrementDepth(); - } - - if (visit && it->postVisit) - it->visitBranch(EvPostVisit, this); -} - -// -// Traverse a switch node. -// -void TIntermSwitch::traverse(TIntermTraverser* it) -{ - bool visit = true; - - if (it->preVisit) - visit = it->visitSwitch(EvPreVisit, this); - - if (visit) { - it->incrementDepth(this); - if (it->rightToLeft) { - body->traverse(it); - condition->traverse(it); - } else { - condition->traverse(it); - body->traverse(it); - } - it->decrementDepth(); - } - - if (visit && it->postVisit) - it->visitSwitch(EvPostVisit, this); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/Intermediate.cpp b/deps/glslang/glslang/MachineIndependent/Intermediate.cpp deleted file mode 100644 index 04a45e03..00000000 --- a/deps/glslang/glslang/MachineIndependent/Intermediate.cpp +++ /dev/null @@ -1,3225 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2015 LunarG, Inc. -// Copyright (C) 2015-2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Build the intermediate representation. -// - -#include "localintermediate.h" -#include "RemoveTree.h" -#include "SymbolTable.h" -#include "propagateNoContraction.h" - -#include -#include - -namespace glslang { - -//////////////////////////////////////////////////////////////////////////// -// -// First set of functions are to help build the intermediate representation. -// These functions are not member functions of the nodes. -// They are called from parser productions. -// -///////////////////////////////////////////////////////////////////////////// - -// -// Add a terminal node for an identifier in an expression. -// -// Returns the added node. -// - -TIntermSymbol* TIntermediate::addSymbol(int id, const TString& name, const TType& type, const TConstUnionArray& constArray, - TIntermTyped* constSubtree, const TSourceLoc& loc) -{ - TIntermSymbol* node = new TIntermSymbol(id, name, type); - node->setLoc(loc); - node->setConstArray(constArray); - node->setConstSubtree(constSubtree); - - return node; -} - -TIntermSymbol* TIntermediate::addSymbol(const TIntermSymbol& intermSymbol) -{ - return addSymbol(intermSymbol.getId(), - intermSymbol.getName(), - intermSymbol.getType(), - intermSymbol.getConstArray(), - intermSymbol.getConstSubtree(), - intermSymbol.getLoc()); -} - -TIntermSymbol* TIntermediate::addSymbol(const TVariable& variable) -{ - glslang::TSourceLoc loc; // just a null location - loc.init(); - - return addSymbol(variable, loc); -} - -TIntermSymbol* TIntermediate::addSymbol(const TVariable& variable, const TSourceLoc& loc) -{ - return addSymbol(variable.getUniqueId(), variable.getName(), variable.getType(), variable.getConstArray(), variable.getConstSubtree(), loc); -} - -TIntermSymbol* TIntermediate::addSymbol(const TType& type, const TSourceLoc& loc) -{ - TConstUnionArray unionArray; // just a null constant - - return addSymbol(0, "", type, unionArray, nullptr, loc); -} - -// -// Connect two nodes with a new parent that does a binary operation on the nodes. -// -// Returns the added node. -// -// Returns nullptr if the working conversions and promotions could not be found. -// -TIntermTyped* TIntermediate::addBinaryMath(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc loc) -{ - // No operations work on blocks - if (left->getType().getBasicType() == EbtBlock || right->getType().getBasicType() == EbtBlock) - return nullptr; - - // Try converting the children's base types to compatible types. - TIntermTyped* child = addConversion(op, left->getType(), right); - if (child) - right = child; - else { - child = addConversion(op, right->getType(), left); - if (child) - left = child; - else - return nullptr; - } - - // Convert the children's type shape to be compatible. - addBiShapeConversion(op, left, right); - if (left == nullptr || right == nullptr) - return nullptr; - - // - // Need a new node holding things together. Make - // one and promote it to the right type. - // - TIntermBinary* node = addBinaryNode(op, left, right, loc); - if (! promote(node)) - return nullptr; - - node->updatePrecision(); - - // - // If they are both (non-specialization) constants, they must be folded. - // (Unless it's the sequence (comma) operator, but that's handled in addComma().) - // - TIntermConstantUnion *leftTempConstant = node->getLeft()->getAsConstantUnion(); - TIntermConstantUnion *rightTempConstant = node->getRight()->getAsConstantUnion(); - if (leftTempConstant && rightTempConstant) { - TIntermTyped* folded = leftTempConstant->fold(node->getOp(), rightTempConstant); - if (folded) - return folded; - } - - // If can propagate spec-constantness and if the operation is an allowed - // specialization-constant operation, make a spec-constant. - if (specConstantPropagates(*node->getLeft(), *node->getRight()) && isSpecializationOperation(*node)) - node->getWritableType().getQualifier().makeSpecConstant(); - - return node; -} - -// -// Low level: add binary node (no promotions or other argument modifications) -// -TIntermBinary* TIntermediate::addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc loc) const -{ - // build the node - TIntermBinary* node = new TIntermBinary(op); - if (loc.line == 0) - loc = left->getLoc(); - node->setLoc(loc); - node->setLeft(left); - node->setRight(right); - - return node; -} - -// -// like non-type form, but sets node's type. -// -TIntermBinary* TIntermediate::addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc loc, const TType& type) const -{ - TIntermBinary* node = addBinaryNode(op, left, right, loc); - node->setType(type); - return node; -} - -// -// Low level: add unary node (no promotions or other argument modifications) -// -TIntermUnary* TIntermediate::addUnaryNode(TOperator op, TIntermTyped* child, TSourceLoc loc) const -{ - TIntermUnary* node = new TIntermUnary(op); - if (loc.line == 0) - loc = child->getLoc(); - node->setLoc(loc); - node->setOperand(child); - - return node; -} - -// -// like non-type form, but sets node's type. -// -TIntermUnary* TIntermediate::addUnaryNode(TOperator op, TIntermTyped* child, TSourceLoc loc, const TType& type) const -{ - TIntermUnary* node = addUnaryNode(op, child, loc); - node->setType(type); - return node; -} - -// -// Connect two nodes through an assignment. -// -// Returns the added node. -// -// Returns nullptr if the 'right' type could not be converted to match the 'left' type, -// or the resulting operation cannot be properly promoted. -// -TIntermTyped* TIntermediate::addAssign(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc loc) -{ - // No block assignment - if (left->getType().getBasicType() == EbtBlock || right->getType().getBasicType() == EbtBlock) - return nullptr; - - // - // Like adding binary math, except the conversion can only go - // from right to left. - // - - // convert base types, nullptr return means not possible - right = addConversion(op, left->getType(), right); - if (right == nullptr) - return nullptr; - - // convert shape - right = addUniShapeConversion(op, left->getType(), right); - - // build the node - TIntermBinary* node = addBinaryNode(op, left, right, loc); - - if (! promote(node)) - return nullptr; - - node->updatePrecision(); - - return node; -} - -// -// Connect two nodes through an index operator, where the left node is the base -// of an array or struct, and the right node is a direct or indirect offset. -// -// Returns the added node. -// The caller should set the type of the returned node. -// -TIntermTyped* TIntermediate::addIndex(TOperator op, TIntermTyped* base, TIntermTyped* index, TSourceLoc loc) -{ - // caller should set the type - return addBinaryNode(op, base, index, loc); -} - -// -// Add one node as the parent of another that it operates on. -// -// Returns the added node. -// -TIntermTyped* TIntermediate::addUnaryMath(TOperator op, TIntermTyped* child, TSourceLoc loc) -{ - if (child == 0) - return nullptr; - - if (child->getType().getBasicType() == EbtBlock) - return nullptr; - - switch (op) { - case EOpLogicalNot: - if (source == EShSourceHlsl) { - break; // HLSL can promote logical not - } - - if (child->getType().getBasicType() != EbtBool || child->getType().isMatrix() || child->getType().isArray() || child->getType().isVector()) { - return nullptr; - } - break; - - case EOpPostIncrement: - case EOpPreIncrement: - case EOpPostDecrement: - case EOpPreDecrement: - case EOpNegative: - if (child->getType().getBasicType() == EbtStruct || child->getType().isArray()) - return nullptr; - default: break; // some compilers want this - } - - // - // Do we need to promote the operand? - // - TBasicType newType = EbtVoid; - switch (op) { - case EOpConstructInt: newType = EbtInt; break; - case EOpConstructUint: newType = EbtUint; break; - case EOpConstructInt64: newType = EbtInt64; break; - case EOpConstructUint64: newType = EbtUint64; break; -#ifdef AMD_EXTENSIONS - case EOpConstructInt16: newType = EbtInt16; break; - case EOpConstructUint16: newType = EbtUint16; break; -#endif - case EOpConstructBool: newType = EbtBool; break; - case EOpConstructFloat: newType = EbtFloat; break; - case EOpConstructDouble: newType = EbtDouble; break; -#ifdef AMD_EXTENSIONS - case EOpConstructFloat16: newType = EbtFloat16; break; -#endif - default: break; // some compilers want this - } - - if (newType != EbtVoid) { - child = addConversion(op, TType(newType, EvqTemporary, child->getVectorSize(), - child->getMatrixCols(), - child->getMatrixRows(), - child->isVector()), - child); - if (child == nullptr) - return nullptr; - } - - // - // For constructors, we are now done, it was all in the conversion. - // TODO: but, did this bypass constant folding? - // - switch (op) { - case EOpConstructInt: - case EOpConstructUint: - case EOpConstructInt64: - case EOpConstructUint64: -#ifdef AMD_EXTENSIONS - case EOpConstructInt16: - case EOpConstructUint16: -#endif - case EOpConstructBool: - case EOpConstructFloat: - case EOpConstructDouble: -#ifdef AMD_EXTENSIONS - case EOpConstructFloat16: -#endif - return child; - default: break; // some compilers want this - } - - // - // Make a new node for the operator. - // - TIntermUnary* node = addUnaryNode(op, child, loc); - - if (! promote(node)) - return nullptr; - - node->updatePrecision(); - - // If it's a (non-specialization) constant, it must be folded. - if (node->getOperand()->getAsConstantUnion()) - return node->getOperand()->getAsConstantUnion()->fold(op, node->getType()); - - // If it's a specialization constant, the result is too, - // if the operation is allowed for specialization constants. - if (node->getOperand()->getType().getQualifier().isSpecConstant() && isSpecializationOperation(*node)) - node->getWritableType().getQualifier().makeSpecConstant(); - - return node; -} - -TIntermTyped* TIntermediate::addBuiltInFunctionCall(const TSourceLoc& loc, TOperator op, bool unary, TIntermNode* childNode, const TType& returnType) -{ - if (unary) { - // - // Treat it like a unary operator. - // addUnaryMath() should get the type correct on its own; - // including constness (which would differ from the prototype). - // - TIntermTyped* child = childNode->getAsTyped(); - if (child == nullptr) - return nullptr; - - if (child->getAsConstantUnion()) { - TIntermTyped* folded = child->getAsConstantUnion()->fold(op, returnType); - if (folded) - return folded; - } - - return addUnaryNode(op, child, child->getLoc(), returnType); - } else { - // setAggregateOperater() calls fold() for constant folding - TIntermTyped* node = setAggregateOperator(childNode, op, returnType, loc); - - return node; - } -} - -// -// This is the safe way to change the operator on an aggregate, as it -// does lots of error checking and fixing. Especially for establishing -// a function call's operation on it's set of parameters. Sequences -// of instructions are also aggregates, but they just directly set -// their operator to EOpSequence. -// -// Returns an aggregate node, which could be the one passed in if -// it was already an aggregate. -// -TIntermTyped* TIntermediate::setAggregateOperator(TIntermNode* node, TOperator op, const TType& type, TSourceLoc loc) -{ - TIntermAggregate* aggNode; - - // - // Make sure we have an aggregate. If not turn it into one. - // - if (node) { - aggNode = node->getAsAggregate(); - if (aggNode == nullptr || aggNode->getOp() != EOpNull) { - // - // Make an aggregate containing this node. - // - aggNode = new TIntermAggregate(); - aggNode->getSequence().push_back(node); - if (loc.line == 0) - loc = node->getLoc(); - } - } else - aggNode = new TIntermAggregate(); - - // - // Set the operator. - // - aggNode->setOperator(op); - if (loc.line != 0) - aggNode->setLoc(loc); - - aggNode->setType(type); - - return fold(aggNode); -} - -// -// Convert the node's type to the given type, as allowed by the operation involved: 'op'. -// For implicit conversions, 'op' is not the requested conversion, it is the explicit -// operation requiring the implicit conversion. -// -// Returns a node representing the conversion, which could be the same -// node passed in if no conversion was needed. -// -// Generally, this is focused on basic type conversion, not shape conversion. -// See addShapeConversion(). -// -// Return nullptr if a conversion can't be done. -// -TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TIntermTyped* node) const -{ - // - // Does the base type even allow the operation? - // - switch (node->getBasicType()) { - case EbtVoid: - return nullptr; - case EbtAtomicUint: - case EbtSampler: - // opaque types can be passed to functions - if (op == EOpFunction) - break; - - // HLSL can assign samplers directly (no constructor) - if (source == EShSourceHlsl && node->getBasicType() == EbtSampler) - break; - - // samplers can get assigned via a sampler constructor - // (well, not yet, but code in the rest of this function is ready for it) - if (node->getBasicType() == EbtSampler && op == EOpAssign && - node->getAsOperator() != nullptr && node->getAsOperator()->getOp() == EOpConstructTextureSampler) - break; - - // otherwise, opaque types can't even be operated on, let alone converted - return nullptr; - default: - break; - } - - // Otherwise, if types are identical, no problem - if (type == node->getType()) - return node; - - // If one's a structure, then no conversions. - if (type.isStruct() || node->isStruct()) - return nullptr; - - // If one's an array, then no conversions. - if (type.isArray() || node->getType().isArray()) - return nullptr; - - // Note: callers are responsible for other aspects of shape, - // like vector and matrix sizes. - - TBasicType promoteTo; - - switch (op) { - // - // Explicit conversions (unary operations) - // - case EOpConstructBool: - promoteTo = EbtBool; - break; - case EOpConstructFloat: - promoteTo = EbtFloat; - break; - case EOpConstructDouble: - promoteTo = EbtDouble; - break; -#ifdef AMD_EXTENSIONS - case EOpConstructFloat16: - promoteTo = EbtFloat16; - break; -#endif - case EOpConstructInt: - promoteTo = EbtInt; - break; - case EOpConstructUint: - promoteTo = EbtUint; - break; - case EOpConstructInt64: - promoteTo = EbtInt64; - break; - case EOpConstructUint64: - promoteTo = EbtUint64; - break; -#ifdef AMD_EXTENSIONS - case EOpConstructInt16: - promoteTo = EbtInt16; - break; - case EOpConstructUint16: - promoteTo = EbtUint16; - break; -#endif - - // - // List all the binary ops that can implicitly convert one operand to the other's type; - // This implements the 'policy' for implicit type conversion. - // - case EOpLessThan: - case EOpGreaterThan: - case EOpLessThanEqual: - case EOpGreaterThanEqual: - case EOpEqual: - case EOpNotEqual: - - case EOpAdd: - case EOpSub: - case EOpMul: - case EOpDiv: - case EOpMod: - - case EOpVectorTimesScalar: - case EOpVectorTimesMatrix: - case EOpMatrixTimesVector: - case EOpMatrixTimesScalar: - - case EOpAnd: - case EOpInclusiveOr: - case EOpExclusiveOr: - case EOpAndAssign: - case EOpInclusiveOrAssign: - case EOpExclusiveOrAssign: - case EOpLogicalNot: - case EOpLogicalAnd: - case EOpLogicalOr: - case EOpLogicalXor: - - case EOpFunctionCall: - case EOpReturn: - case EOpAssign: - case EOpAddAssign: - case EOpSubAssign: - case EOpMulAssign: - case EOpVectorTimesScalarAssign: - case EOpMatrixTimesScalarAssign: - case EOpDivAssign: - case EOpModAssign: - - case EOpAtan: - case EOpClamp: - case EOpCross: - case EOpDistance: - case EOpDot: - case EOpDst: - case EOpFaceForward: - case EOpFma: - case EOpFrexp: - case EOpLdexp: - case EOpMix: - case EOpLit: - case EOpMax: - case EOpMin: - case EOpModf: - case EOpPow: - case EOpReflect: - case EOpRefract: - case EOpSmoothStep: - case EOpStep: - - case EOpSequence: - case EOpConstructStruct: - - if (type.getBasicType() == node->getType().getBasicType()) - return node; - - if (canImplicitlyPromote(node->getType().getBasicType(), type.getBasicType(), op)) - promoteTo = type.getBasicType(); - else - return nullptr; - - break; - - // Shifts can have mixed types as long as they are integer, without converting. - // It's the left operand's type that determines the resulting type, so no issue - // with assign shift ops either. - case EOpLeftShift: - case EOpRightShift: - case EOpLeftShiftAssign: - case EOpRightShiftAssign: - if ((type.getBasicType() == EbtInt || - type.getBasicType() == EbtUint || -#ifdef AMD_EXTENSIONS - type.getBasicType() == EbtInt16 || - type.getBasicType() == EbtUint16 || -#endif - type.getBasicType() == EbtInt64 || - type.getBasicType() == EbtUint64) && - (node->getType().getBasicType() == EbtInt || - node->getType().getBasicType() == EbtUint || -#ifdef AMD_EXTENSIONS - node->getType().getBasicType() == EbtInt16 || - node->getType().getBasicType() == EbtUint16 || -#endif - node->getType().getBasicType() == EbtInt64 || - node->getType().getBasicType() == EbtUint64)) - - return node; - else if (source == EShSourceHlsl && node->getType().getBasicType() == EbtBool) { - promoteTo = type.getBasicType(); - break; - } else - return nullptr; - - default: - // default is to require a match; all exceptions should have case statements above - - if (type.getBasicType() == node->getType().getBasicType()) - return node; - else - return nullptr; - } - - if (node->getAsConstantUnion()) - return promoteConstantUnion(promoteTo, node->getAsConstantUnion()); - - // - // Add a new newNode for the conversion. - // - TIntermUnary* newNode = nullptr; - - TOperator newOp = EOpNull; - - // This is 'mechanism' here, it does any conversion told. The policy comes - // from the shader or the above code. - switch (promoteTo) { - case EbtDouble: - switch (node->getBasicType()) { - case EbtInt: newOp = EOpConvIntToDouble; break; - case EbtUint: newOp = EOpConvUintToDouble; break; - case EbtBool: newOp = EOpConvBoolToDouble; break; - case EbtFloat: newOp = EOpConvFloatToDouble; break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: newOp = EOpConvFloat16ToDouble; break; -#endif - case EbtInt64: newOp = EOpConvInt64ToDouble; break; - case EbtUint64: newOp = EOpConvUint64ToDouble; break; -#ifdef AMD_EXTENSIONS - case EbtInt16: newOp = EOpConvInt16ToDouble; break; - case EbtUint16: newOp = EOpConvUint16ToDouble; break; -#endif - default: - return nullptr; - } - break; - case EbtFloat: - switch (node->getBasicType()) { - case EbtInt: newOp = EOpConvIntToFloat; break; - case EbtUint: newOp = EOpConvUintToFloat; break; - case EbtBool: newOp = EOpConvBoolToFloat; break; - case EbtDouble: newOp = EOpConvDoubleToFloat; break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: newOp = EOpConvFloat16ToFloat; break; -#endif - case EbtInt64: newOp = EOpConvInt64ToFloat; break; - case EbtUint64: newOp = EOpConvUint64ToFloat; break; -#ifdef AMD_EXTENSIONS - case EbtInt16: newOp = EOpConvInt16ToFloat; break; - case EbtUint16: newOp = EOpConvUint16ToFloat; break; -#endif - default: - return nullptr; - } - break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: - switch (node->getBasicType()) { - case EbtInt: newOp = EOpConvIntToFloat16; break; - case EbtUint: newOp = EOpConvUintToFloat16; break; - case EbtBool: newOp = EOpConvBoolToFloat16; break; - case EbtFloat: newOp = EOpConvFloatToFloat16; break; - case EbtDouble: newOp = EOpConvDoubleToFloat16; break; - case EbtInt64: newOp = EOpConvInt64ToFloat16; break; - case EbtUint64: newOp = EOpConvUint64ToFloat16; break; - case EbtInt16: newOp = EOpConvInt16ToFloat16; break; - case EbtUint16: newOp = EOpConvUint16ToFloat16; break; - default: - return nullptr; - } - break; -#endif - case EbtBool: - switch (node->getBasicType()) { - case EbtInt: newOp = EOpConvIntToBool; break; - case EbtUint: newOp = EOpConvUintToBool; break; - case EbtFloat: newOp = EOpConvFloatToBool; break; - case EbtDouble: newOp = EOpConvDoubleToBool; break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: newOp = EOpConvFloat16ToBool; break; -#endif - case EbtInt64: newOp = EOpConvInt64ToBool; break; - case EbtUint64: newOp = EOpConvUint64ToBool; break; -#ifdef AMD_EXTENSIONS - case EbtInt16: newOp = EOpConvInt16ToBool; break; - case EbtUint16: newOp = EOpConvUint16ToBool; break; -#endif - default: - return nullptr; - } - break; - case EbtInt: - switch (node->getBasicType()) { - case EbtUint: newOp = EOpConvUintToInt; break; - case EbtBool: newOp = EOpConvBoolToInt; break; - case EbtFloat: newOp = EOpConvFloatToInt; break; - case EbtDouble: newOp = EOpConvDoubleToInt; break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: newOp = EOpConvFloat16ToInt; break; -#endif - case EbtInt64: newOp = EOpConvInt64ToInt; break; - case EbtUint64: newOp = EOpConvUint64ToInt; break; -#ifdef AMD_EXTENSIONS - case EbtInt16: newOp = EOpConvInt16ToInt; break; - case EbtUint16: newOp = EOpConvUint16ToInt; break; -#endif - default: - return nullptr; - } - break; - case EbtUint: - switch (node->getBasicType()) { - case EbtInt: newOp = EOpConvIntToUint; break; - case EbtBool: newOp = EOpConvBoolToUint; break; - case EbtFloat: newOp = EOpConvFloatToUint; break; - case EbtDouble: newOp = EOpConvDoubleToUint; break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: newOp = EOpConvFloat16ToUint; break; -#endif - case EbtInt64: newOp = EOpConvInt64ToUint; break; - case EbtUint64: newOp = EOpConvUint64ToUint; break; -#ifdef AMD_EXTENSIONS - case EbtInt16: newOp = EOpConvInt16ToUint; break; - case EbtUint16: newOp = EOpConvUint16ToUint; break; -#endif - default: - return nullptr; - } - break; - case EbtInt64: - switch (node->getBasicType()) { - case EbtInt: newOp = EOpConvIntToInt64; break; - case EbtUint: newOp = EOpConvUintToInt64; break; - case EbtBool: newOp = EOpConvBoolToInt64; break; - case EbtFloat: newOp = EOpConvFloatToInt64; break; - case EbtDouble: newOp = EOpConvDoubleToInt64; break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: newOp = EOpConvFloat16ToInt64; break; -#endif - case EbtUint64: newOp = EOpConvUint64ToInt64; break; -#ifdef AMD_EXTENSIONS - case EbtInt16: newOp = EOpConvInt16ToInt64; break; - case EbtUint16: newOp = EOpConvUint16ToInt64; break; -#endif - default: - return nullptr; - } - break; - case EbtUint64: - switch (node->getBasicType()) { - case EbtInt: newOp = EOpConvIntToUint64; break; - case EbtUint: newOp = EOpConvUintToUint64; break; - case EbtBool: newOp = EOpConvBoolToUint64; break; - case EbtFloat: newOp = EOpConvFloatToUint64; break; - case EbtDouble: newOp = EOpConvDoubleToUint64; break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: newOp = EOpConvFloat16ToUint64; break; -#endif - case EbtInt64: newOp = EOpConvInt64ToUint64; break; -#ifdef AMD_EXTENSIONS - case EbtInt16: newOp = EOpConvInt16ToUint64; break; - case EbtUint16: newOp = EOpConvUint16ToUint64; break; -#endif - default: - return nullptr; - } - break; -#ifdef AMD_EXTENSIONS - case EbtInt16: - switch (node->getBasicType()) { - case EbtInt: newOp = EOpConvIntToInt16; break; - case EbtUint: newOp = EOpConvUintToInt16; break; - case EbtBool: newOp = EOpConvBoolToInt16; break; - case EbtFloat: newOp = EOpConvFloatToInt16; break; - case EbtDouble: newOp = EOpConvDoubleToInt16; break; - case EbtFloat16: newOp = EOpConvFloat16ToInt16; break; - case EbtInt64: newOp = EOpConvInt64ToInt16; break; - case EbtUint64: newOp = EOpConvUint64ToInt16; break; - case EbtUint16: newOp = EOpConvUint16ToInt16; break; - default: - return nullptr; - } - break; - case EbtUint16: - switch (node->getBasicType()) { - case EbtInt: newOp = EOpConvIntToUint16; break; - case EbtUint: newOp = EOpConvUintToUint16; break; - case EbtBool: newOp = EOpConvBoolToUint16; break; - case EbtFloat: newOp = EOpConvFloatToUint16; break; - case EbtDouble: newOp = EOpConvDoubleToUint16; break; - case EbtFloat16: newOp = EOpConvFloat16ToUint16; break; - case EbtInt64: newOp = EOpConvInt64ToUint16; break; - case EbtUint64: newOp = EOpConvUint64ToUint16; break; - case EbtInt16: newOp = EOpConvInt16ToUint16; break; - default: - return nullptr; - } - break; -#endif - default: - return nullptr; - } - - TType newType(promoteTo, EvqTemporary, node->getVectorSize(), node->getMatrixCols(), node->getMatrixRows()); - newNode = addUnaryNode(newOp, node, node->getLoc(), newType); - - // TODO: it seems that some unary folding operations should occur here, but are not - - // Propagate specialization-constant-ness, if allowed - if (node->getType().getQualifier().isSpecConstant() && isSpecializationOperation(*newNode)) - newNode->getWritableType().getQualifier().makeSpecConstant(); - - return newNode; -} - -// Convert the node's shape of type for the given type, as allowed by the -// operation involved: 'op'. This is for situations where there is only one -// direction to consider doing the shape conversion. -// -// This implements policy, it call addShapeConversion() for the mechanism. -// -// Generally, the AST represents allowed GLSL shapes, so this isn't needed -// for GLSL. Bad shapes are caught in conversion or promotion. -// -// Return 'node' if no conversion was done. Promotion handles final shape -// checking. -// -TIntermTyped* TIntermediate::addUniShapeConversion(TOperator op, const TType& type, TIntermTyped* node) -{ - // some source languages don't do this - switch (source) { - case EShSourceHlsl: - break; - case EShSourceGlsl: - default: - return node; - } - - // some operations don't do this - switch (op) { - case EOpFunctionCall: - case EOpReturn: - break; - - case EOpMulAssign: - // want to support vector *= scalar native ops in AST and lower, not smear, similarly for - // matrix *= scalar, etc. - - case EOpAddAssign: - case EOpSubAssign: - case EOpDivAssign: - case EOpAndAssign: - case EOpInclusiveOrAssign: - case EOpExclusiveOrAssign: - case EOpRightShiftAssign: - case EOpLeftShiftAssign: - if (node->getVectorSize() == 1) - return node; - break; - - case EOpAssign: - break; - - case EOpMix: - break; - - default: - return node; - } - - return addShapeConversion(type, node); -} - -// Convert the nodes' shapes to be compatible for the operation 'op'. -// -// This implements policy, it call addShapeConversion() for the mechanism. -// -// Generally, the AST represents allowed GLSL shapes, so this isn't needed -// for GLSL. Bad shapes are caught in conversion or promotion. -// -void TIntermediate::addBiShapeConversion(TOperator op, TIntermTyped*& lhsNode, TIntermTyped*& rhsNode) -{ - // some source languages don't do this - switch (source) { - case EShSourceHlsl: - break; - case EShSourceGlsl: - default: - return; - } - - // some operations don't do this - // 'break' will mean attempt bidirectional conversion - switch (op) { - case EOpMulAssign: - case EOpAssign: - case EOpAddAssign: - case EOpSubAssign: - case EOpDivAssign: - case EOpAndAssign: - case EOpInclusiveOrAssign: - case EOpExclusiveOrAssign: - case EOpRightShiftAssign: - case EOpLeftShiftAssign: - // switch to unidirectional conversion (the lhs can't change) - rhsNode = addUniShapeConversion(op, lhsNode->getType(), rhsNode); - return; - - case EOpAdd: - case EOpSub: - case EOpMul: - case EOpDiv: - // want to support vector * scalar native ops in AST and lower, not smear, similarly for - // matrix * vector, etc. - if (lhsNode->getVectorSize() == 1 || rhsNode->getVectorSize() == 1) - return; - break; - - case EOpRightShift: - case EOpLeftShift: - // can natively support the right operand being a scalar and the left a vector, - // but not the reverse - if (rhsNode->getVectorSize() == 1) - return; - break; - - case EOpLessThan: - case EOpGreaterThan: - case EOpLessThanEqual: - case EOpGreaterThanEqual: - - case EOpEqual: - case EOpNotEqual: - - case EOpLogicalAnd: - case EOpLogicalOr: - case EOpLogicalXor: - - case EOpAnd: - case EOpInclusiveOr: - case EOpExclusiveOr: - - case EOpMix: - break; - - default: - return; - } - - // Do bidirectional conversions - if (lhsNode->getType().isScalarOrVec1() || rhsNode->getType().isScalarOrVec1()) { - if (lhsNode->getType().isScalarOrVec1()) - lhsNode = addShapeConversion(rhsNode->getType(), lhsNode); - else - rhsNode = addShapeConversion(lhsNode->getType(), rhsNode); - } - lhsNode = addShapeConversion(rhsNode->getType(), lhsNode); - rhsNode = addShapeConversion(lhsNode->getType(), rhsNode); -} - -// Convert the node's shape of type for the given type. It's not necessarily -// an error if they are different and not converted, as some operations accept -// mixed types. Promotion will do final shape checking. -// -// If there is a chance of two nodes, with conversions possible in each direction, -// the policy for what to ask for must be in the caller; this will do what is asked. -// -// Return 'node' if no conversion was done. Promotion handles final shape -// checking. -// -TIntermTyped* TIntermediate::addShapeConversion(const TType& type, TIntermTyped* node) -{ - // no conversion needed - if (node->getType() == type) - return node; - - // structures and arrays don't change shape, either to or from - if (node->getType().isStruct() || node->getType().isArray() || - type.isStruct() || type.isArray()) - return node; - - // The new node that handles the conversion - TOperator constructorOp = mapTypeToConstructorOp(type); - - // HLSL has custom semantics for scalar->mat shape conversions. - if (source == EShSourceHlsl) { - if (node->getType().isScalarOrVec1() && type.isMatrix()) { - - // HLSL semantics: the scalar (or vec1) is replicated to every component of the matrix. Left to its - // own devices, the constructor from a scalar would populate the diagonal. This forces replication - // to every matrix element. - - // Note that if the node is complex (e.g, a function call), we don't want to duplicate it here - // repeatedly, so we copy it to a temp, then use the temp. - const int matSize = type.getMatrixRows() * type.getMatrixCols(); - TIntermAggregate* rhsAggregate = new TIntermAggregate(); - - const bool isSimple = (node->getAsSymbolNode() != nullptr) || (node->getAsConstantUnion() != nullptr); - - if (!isSimple) { - assert(0); // TODO: use node replicator service when available. - } - - for (int x=0; xgetSequence().push_back(node); - - return setAggregateOperator(rhsAggregate, constructorOp, type, node->getLoc()); - } - } - - // scalar -> vector or vec1 -> vector or - // vector -> scalar or - // bigger vector -> smaller vector - if ((node->getType().isScalarOrVec1() && type.isVector()) || - (node->getType().isVector() && type.isScalar()) || - (node->isVector() && type.isVector() && node->getVectorSize() > type.getVectorSize())) - return setAggregateOperator(makeAggregate(node), constructorOp, type, node->getLoc()); - - return node; -} - -// -// See if the 'from' type is allowed to be implicitly converted to the -// 'to' type. This is not about vector/array/struct, only about basic type. -// -bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperator op) const -{ - if (profile == EEsProfile || version == 110) - return false; - - if (from == to) - return true; - - // TODO: Move more policies into language-specific handlers. - // Some languages allow more general (or potentially, more specific) conversions under some conditions. - if (source == EShSourceHlsl) { - const bool fromConvertable = (from == EbtFloat || from == EbtDouble || from == EbtInt || from == EbtUint || from == EbtBool); - const bool toConvertable = (to == EbtFloat || to == EbtDouble || to == EbtInt || to == EbtUint || to == EbtBool); - - if (fromConvertable && toConvertable) { - switch (op) { - case EOpAndAssign: // assignments can perform arbitrary conversions - case EOpInclusiveOrAssign: // ... - case EOpExclusiveOrAssign: // ... - case EOpAssign: // ... - case EOpAddAssign: // ... - case EOpSubAssign: // ... - case EOpMulAssign: // ... - case EOpVectorTimesScalarAssign: // ... - case EOpMatrixTimesScalarAssign: // ... - case EOpDivAssign: // ... - case EOpModAssign: // ... - case EOpReturn: // function returns can also perform arbitrary conversions - case EOpFunctionCall: // conversion of a calling parameter - case EOpLogicalNot: - case EOpLogicalAnd: - case EOpLogicalOr: - case EOpLogicalXor: - case EOpConstructStruct: - return true; - default: - break; - } - } - } - - switch (to) { - case EbtDouble: - switch (from) { - case EbtInt: - case EbtUint: - case EbtInt64: - case EbtUint64: -#ifdef AMD_EXTENSIONS - case EbtInt16: - case EbtUint16: -#endif - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - return true; - default: - return false; - } - case EbtFloat: - switch (from) { - case EbtInt: - case EbtUint: -#ifdef AMD_EXTENSIONS - case EbtInt16: - case EbtUint16: -#endif - case EbtFloat: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - return true; - case EbtBool: - return (source == EShSourceHlsl); - default: - return false; - } - case EbtUint: - switch (from) { - case EbtInt: - return version >= 400 || (source == EShSourceHlsl); - case EbtUint: -#ifdef AMD_EXTENSIONS - case EbtInt16: - case EbtUint16: -#endif - return true; - case EbtBool: - return (source == EShSourceHlsl); - default: - return false; - } - case EbtInt: - switch (from) { - case EbtInt: -#ifdef AMD_EXTENSIONS - case EbtInt16: -#endif - return true; - case EbtBool: - return (source == EShSourceHlsl); - default: - return false; - } - case EbtUint64: - switch (from) { - case EbtInt: - case EbtUint: - case EbtInt64: - case EbtUint64: -#ifdef AMD_EXTENSIONS - case EbtInt16: - case EbtUint16: -#endif - return true; - default: - return false; - } - case EbtInt64: - switch (from) { - case EbtInt: - case EbtInt64: -#ifdef AMD_EXTENSIONS - case EbtInt16: -#endif - return true; - default: - return false; - } -#ifdef AMD_EXTENSIONS - case EbtFloat16: - switch (from) { - case EbtInt16: - case EbtUint16: - case EbtFloat16: - return true; - default: - return false; - } - case EbtUint16: - switch (from) { - case EbtInt16: - case EbtUint16: - return true; - default: - return false; - } -#endif - default: - return false; - } -} - -// -// Given a type, find what operation would fully construct it. -// -TOperator TIntermediate::mapTypeToConstructorOp(const TType& type) const -{ - TOperator op = EOpNull; - - switch (type.getBasicType()) { - case EbtStruct: - op = EOpConstructStruct; - break; - case EbtSampler: - if (type.getSampler().combined) - op = EOpConstructTextureSampler; - break; - case EbtFloat: - if (type.isMatrix()) { - switch (type.getMatrixCols()) { - case 2: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructMat2x2; break; - case 3: op = EOpConstructMat2x3; break; - case 4: op = EOpConstructMat2x4; break; - default: break; // some compilers want this - } - break; - case 3: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructMat3x2; break; - case 3: op = EOpConstructMat3x3; break; - case 4: op = EOpConstructMat3x4; break; - default: break; // some compilers want this - } - break; - case 4: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructMat4x2; break; - case 3: op = EOpConstructMat4x3; break; - case 4: op = EOpConstructMat4x4; break; - default: break; // some compilers want this - } - break; - default: break; // some compilers want this - } - } else { - switch(type.getVectorSize()) { - case 1: op = EOpConstructFloat; break; - case 2: op = EOpConstructVec2; break; - case 3: op = EOpConstructVec3; break; - case 4: op = EOpConstructVec4; break; - default: break; // some compilers want this - } - } - break; - case EbtDouble: - if (type.getMatrixCols()) { - switch (type.getMatrixCols()) { - case 2: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructDMat2x2; break; - case 3: op = EOpConstructDMat2x3; break; - case 4: op = EOpConstructDMat2x4; break; - default: break; // some compilers want this - } - break; - case 3: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructDMat3x2; break; - case 3: op = EOpConstructDMat3x3; break; - case 4: op = EOpConstructDMat3x4; break; - default: break; // some compilers want this - } - break; - case 4: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructDMat4x2; break; - case 3: op = EOpConstructDMat4x3; break; - case 4: op = EOpConstructDMat4x4; break; - default: break; // some compilers want this - } - break; - } - } else { - switch(type.getVectorSize()) { - case 1: op = EOpConstructDouble; break; - case 2: op = EOpConstructDVec2; break; - case 3: op = EOpConstructDVec3; break; - case 4: op = EOpConstructDVec4; break; - default: break; // some compilers want this - } - } - break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: - if (type.getMatrixCols()) { - switch (type.getMatrixCols()) { - case 2: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructF16Mat2x2; break; - case 3: op = EOpConstructF16Mat2x3; break; - case 4: op = EOpConstructF16Mat2x4; break; - default: break; // some compilers want this - } - break; - case 3: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructF16Mat3x2; break; - case 3: op = EOpConstructF16Mat3x3; break; - case 4: op = EOpConstructF16Mat3x4; break; - default: break; // some compilers want this - } - break; - case 4: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructF16Mat4x2; break; - case 3: op = EOpConstructF16Mat4x3; break; - case 4: op = EOpConstructF16Mat4x4; break; - default: break; // some compilers want this - } - break; - } - } - else { - switch (type.getVectorSize()) { - case 1: op = EOpConstructFloat16; break; - case 2: op = EOpConstructF16Vec2; break; - case 3: op = EOpConstructF16Vec3; break; - case 4: op = EOpConstructF16Vec4; break; - default: break; // some compilers want this - } - } - break; -#endif - case EbtInt: - if (type.getMatrixCols()) { - switch (type.getMatrixCols()) { - case 2: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructIMat2x2; break; - case 3: op = EOpConstructIMat2x3; break; - case 4: op = EOpConstructIMat2x4; break; - default: break; // some compilers want this - } - break; - case 3: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructIMat3x2; break; - case 3: op = EOpConstructIMat3x3; break; - case 4: op = EOpConstructIMat3x4; break; - default: break; // some compilers want this - } - break; - case 4: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructIMat4x2; break; - case 3: op = EOpConstructIMat4x3; break; - case 4: op = EOpConstructIMat4x4; break; - default: break; // some compilers want this - } - break; - } - } else { - switch(type.getVectorSize()) { - case 1: op = EOpConstructInt; break; - case 2: op = EOpConstructIVec2; break; - case 3: op = EOpConstructIVec3; break; - case 4: op = EOpConstructIVec4; break; - default: break; // some compilers want this - } - } - break; - case EbtUint: - if (type.getMatrixCols()) { - switch (type.getMatrixCols()) { - case 2: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructUMat2x2; break; - case 3: op = EOpConstructUMat2x3; break; - case 4: op = EOpConstructUMat2x4; break; - default: break; // some compilers want this - } - break; - case 3: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructUMat3x2; break; - case 3: op = EOpConstructUMat3x3; break; - case 4: op = EOpConstructUMat3x4; break; - default: break; // some compilers want this - } - break; - case 4: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructUMat4x2; break; - case 3: op = EOpConstructUMat4x3; break; - case 4: op = EOpConstructUMat4x4; break; - default: break; // some compilers want this - } - break; - } - } else { - switch(type.getVectorSize()) { - case 1: op = EOpConstructUint; break; - case 2: op = EOpConstructUVec2; break; - case 3: op = EOpConstructUVec3; break; - case 4: op = EOpConstructUVec4; break; - default: break; // some compilers want this - } - } - break; - case EbtInt64: - switch(type.getVectorSize()) { - case 1: op = EOpConstructInt64; break; - case 2: op = EOpConstructI64Vec2; break; - case 3: op = EOpConstructI64Vec3; break; - case 4: op = EOpConstructI64Vec4; break; - default: break; // some compilers want this - } - break; - case EbtUint64: - switch(type.getVectorSize()) { - case 1: op = EOpConstructUint64; break; - case 2: op = EOpConstructU64Vec2; break; - case 3: op = EOpConstructU64Vec3; break; - case 4: op = EOpConstructU64Vec4; break; - default: break; // some compilers want this - } - break; -#ifdef AMD_EXTENSIONS - case EbtInt16: - switch(type.getVectorSize()) { - case 1: op = EOpConstructInt16; break; - case 2: op = EOpConstructI16Vec2; break; - case 3: op = EOpConstructI16Vec3; break; - case 4: op = EOpConstructI16Vec4; break; - default: break; // some compilers want this - } - break; - case EbtUint16: - switch(type.getVectorSize()) { - case 1: op = EOpConstructUint16; break; - case 2: op = EOpConstructU16Vec2; break; - case 3: op = EOpConstructU16Vec3; break; - case 4: op = EOpConstructU16Vec4; break; - default: break; // some compilers want this - } - break; -#endif - case EbtBool: - if (type.getMatrixCols()) { - switch (type.getMatrixCols()) { - case 2: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructBMat2x2; break; - case 3: op = EOpConstructBMat2x3; break; - case 4: op = EOpConstructBMat2x4; break; - default: break; // some compilers want this - } - break; - case 3: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructBMat3x2; break; - case 3: op = EOpConstructBMat3x3; break; - case 4: op = EOpConstructBMat3x4; break; - default: break; // some compilers want this - } - break; - case 4: - switch (type.getMatrixRows()) { - case 2: op = EOpConstructBMat4x2; break; - case 3: op = EOpConstructBMat4x3; break; - case 4: op = EOpConstructBMat4x4; break; - default: break; // some compilers want this - } - break; - } - } else { - switch(type.getVectorSize()) { - case 1: op = EOpConstructBool; break; - case 2: op = EOpConstructBVec2; break; - case 3: op = EOpConstructBVec3; break; - case 4: op = EOpConstructBVec4; break; - default: break; // some compilers want this - } - } - break; - default: - break; - } - - return op; -} - -// -// Safe way to combine two nodes into an aggregate. Works with null pointers, -// a node that's not a aggregate yet, etc. -// -// Returns the resulting aggregate, unless nullptr was passed in for -// both existing nodes. -// -TIntermAggregate* TIntermediate::growAggregate(TIntermNode* left, TIntermNode* right) -{ - if (left == nullptr && right == nullptr) - return nullptr; - - TIntermAggregate* aggNode = nullptr; - if (left != nullptr) - aggNode = left->getAsAggregate(); - if (aggNode == nullptr || aggNode->getOp() != EOpNull) { - aggNode = new TIntermAggregate; - if (left != nullptr) - aggNode->getSequence().push_back(left); - } - - if (right != nullptr) - aggNode->getSequence().push_back(right); - - return aggNode; -} - -TIntermAggregate* TIntermediate::growAggregate(TIntermNode* left, TIntermNode* right, const TSourceLoc& loc) -{ - TIntermAggregate* aggNode = growAggregate(left, right); - if (aggNode) - aggNode->setLoc(loc); - - return aggNode; -} - -// -// Turn an existing node into an aggregate. -// -// Returns an aggregate, unless nullptr was passed in for the existing node. -// -TIntermAggregate* TIntermediate::makeAggregate(TIntermNode* node) -{ - if (node == nullptr) - return nullptr; - - TIntermAggregate* aggNode = new TIntermAggregate; - aggNode->getSequence().push_back(node); - aggNode->setLoc(node->getLoc()); - - return aggNode; -} - -TIntermAggregate* TIntermediate::makeAggregate(TIntermNode* node, const TSourceLoc& loc) -{ - if (node == nullptr) - return nullptr; - - TIntermAggregate* aggNode = new TIntermAggregate; - aggNode->getSequence().push_back(node); - aggNode->setLoc(loc); - - return aggNode; -} - -// -// Make an aggregate with an empty sequence. -// -TIntermAggregate* TIntermediate::makeAggregate(const TSourceLoc& loc) -{ - TIntermAggregate* aggNode = new TIntermAggregate; - aggNode->setLoc(loc); - - return aggNode; -} - -// -// For "if" test nodes. There are three children; a condition, -// a true path, and a false path. The two paths are in the -// nodePair. -// -// Returns the selection node created. -// -TIntermTyped* TIntermediate::addSelection(TIntermTyped* cond, TIntermNodePair nodePair, const TSourceLoc& loc, TSelectionControl control) -{ - // - // Don't prune the false path for compile-time constants; it's needed - // for static access analysis. - // - - TIntermSelection* node = new TIntermSelection(cond, nodePair.node1, nodePair.node2); - node->setLoc(loc); - node->setSelectionControl(control); - - return node; -} - -TIntermTyped* TIntermediate::addComma(TIntermTyped* left, TIntermTyped* right, const TSourceLoc& loc) -{ - // However, the lowest precedence operators of the sequence operator ( , ) and the assignment operators - // ... are not included in the operators that can create a constant expression. - // - // if (left->getType().getQualifier().storage == EvqConst && - // right->getType().getQualifier().storage == EvqConst) { - - // return right; - //} - - TIntermTyped *commaAggregate = growAggregate(left, right, loc); - commaAggregate->getAsAggregate()->setOperator(EOpComma); - commaAggregate->setType(right->getType()); - commaAggregate->getWritableType().getQualifier().makeTemporary(); - - return commaAggregate; -} - -TIntermTyped* TIntermediate::addMethod(TIntermTyped* object, const TType& type, const TString* name, const TSourceLoc& loc) -{ - TIntermMethod* method = new TIntermMethod(object, type, *name); - method->setLoc(loc); - - return method; -} - -// -// For "?:" test nodes. There are three children; a condition, -// a true path, and a false path. The two paths are specified -// as separate parameters. For vector 'cond', the true and false -// are not paths, but vectors to mix. -// -// Specialization constant operations include -// - The ternary operator ( ? : ) -// -// Returns the selection node created, or nullptr if one could not be. -// -TIntermTyped* TIntermediate::addSelection(TIntermTyped* cond, TIntermTyped* trueBlock, TIntermTyped* falseBlock, const TSourceLoc& loc, TSelectionControl control) -{ - // If it's void, go to the if-then-else selection() - if (trueBlock->getBasicType() == EbtVoid && falseBlock->getBasicType() == EbtVoid) { - TIntermNodePair pair = { trueBlock, falseBlock }; - return addSelection(cond, pair, loc, control); - } - - // - // Get compatible types. - // - TIntermTyped* child = addConversion(EOpSequence, trueBlock->getType(), falseBlock); - if (child) - falseBlock = child; - else { - child = addConversion(EOpSequence, falseBlock->getType(), trueBlock); - if (child) - trueBlock = child; - else - return nullptr; - } - - // Handle a vector condition as a mix - if (!cond->getType().isScalarOrVec1()) { - TType targetVectorType(trueBlock->getType().getBasicType(), EvqTemporary, - cond->getType().getVectorSize()); - // smear true/false operands as needed - trueBlock = addUniShapeConversion(EOpMix, targetVectorType, trueBlock); - falseBlock = addUniShapeConversion(EOpMix, targetVectorType, falseBlock); - - // After conversion, types have to match. - if (falseBlock->getType() != trueBlock->getType()) - return nullptr; - - // make the mix operation - TIntermAggregate* mix = makeAggregate(loc); - mix = growAggregate(mix, falseBlock); - mix = growAggregate(mix, trueBlock); - mix = growAggregate(mix, cond); - mix->setType(targetVectorType); - mix->setOp(EOpMix); - - return mix; - } - - // Now have a scalar condition... - - // Convert true and false expressions to matching types - addBiShapeConversion(EOpMix, trueBlock, falseBlock); - - // After conversion, types have to match. - if (falseBlock->getType() != trueBlock->getType()) - return nullptr; - - // Eliminate the selection when the condition is a scalar and all operands are constant. - if (cond->getAsConstantUnion() && trueBlock->getAsConstantUnion() && falseBlock->getAsConstantUnion()) { - if (cond->getAsConstantUnion()->getConstArray()[0].getBConst()) - return trueBlock; - else - return falseBlock; - } - - // - // Make a selection node. - // - TIntermSelection* node = new TIntermSelection(cond, trueBlock, falseBlock, trueBlock->getType()); - node->setLoc(loc); - node->getQualifier().precision = std::max(trueBlock->getQualifier().precision, falseBlock->getQualifier().precision); - - if ((cond->getQualifier().isConstant() && specConstantPropagates(*trueBlock, *falseBlock)) || - (cond->getQualifier().isSpecConstant() && trueBlock->getQualifier().isConstant() && - falseBlock->getQualifier().isConstant())) - node->getQualifier().makeSpecConstant(); - else - node->getQualifier().makeTemporary(); - - return node; -} - -// -// Constant terminal nodes. Has a union that contains bool, float or int constants -// -// Returns the constant union node created. -// - -TIntermConstantUnion* TIntermediate::addConstantUnion(const TConstUnionArray& unionArray, const TType& t, const TSourceLoc& loc, bool literal) const -{ - TIntermConstantUnion* node = new TIntermConstantUnion(unionArray, t); - node->getQualifier().storage = EvqConst; - node->setLoc(loc); - if (literal) - node->setLiteral(); - - return node; -} - -TIntermConstantUnion* TIntermediate::addConstantUnion(int i, const TSourceLoc& loc, bool literal) const -{ - TConstUnionArray unionArray(1); - unionArray[0].setIConst(i); - - return addConstantUnion(unionArray, TType(EbtInt, EvqConst), loc, literal); -} - -TIntermConstantUnion* TIntermediate::addConstantUnion(unsigned int u, const TSourceLoc& loc, bool literal) const -{ - TConstUnionArray unionArray(1); - unionArray[0].setUConst(u); - - return addConstantUnion(unionArray, TType(EbtUint, EvqConst), loc, literal); -} - -TIntermConstantUnion* TIntermediate::addConstantUnion(long long i64, const TSourceLoc& loc, bool literal) const -{ - TConstUnionArray unionArray(1); - unionArray[0].setI64Const(i64); - - return addConstantUnion(unionArray, TType(EbtInt64, EvqConst), loc, literal); -} - -TIntermConstantUnion* TIntermediate::addConstantUnion(unsigned long long u64, const TSourceLoc& loc, bool literal) const -{ - TConstUnionArray unionArray(1); - unionArray[0].setU64Const(u64); - - return addConstantUnion(unionArray, TType(EbtUint64, EvqConst), loc, literal); -} - -#ifdef AMD_EXTENSIONS -TIntermConstantUnion* TIntermediate::addConstantUnion(short i16, const TSourceLoc& loc, bool literal) const -{ - TConstUnionArray unionArray(1); - unionArray[0].setIConst(i16); - - return addConstantUnion(unionArray, TType(EbtInt16, EvqConst), loc, literal); -} - -TIntermConstantUnion* TIntermediate::addConstantUnion(unsigned short u16, const TSourceLoc& loc, bool literal) const -{ - TConstUnionArray unionArray(1); - unionArray[0].setUConst(u16); - - return addConstantUnion(unionArray, TType(EbtUint16, EvqConst), loc, literal); -} -#endif - -TIntermConstantUnion* TIntermediate::addConstantUnion(bool b, const TSourceLoc& loc, bool literal) const -{ - TConstUnionArray unionArray(1); - unionArray[0].setBConst(b); - - return addConstantUnion(unionArray, TType(EbtBool, EvqConst), loc, literal); -} - -TIntermConstantUnion* TIntermediate::addConstantUnion(double d, TBasicType baseType, const TSourceLoc& loc, bool literal) const -{ -#ifdef AMD_EXTENSIONS - assert(baseType == EbtFloat || baseType == EbtDouble || baseType == EbtFloat16); -#else - assert(baseType == EbtFloat || baseType == EbtDouble); -#endif - - TConstUnionArray unionArray(1); - unionArray[0].setDConst(d); - - return addConstantUnion(unionArray, TType(baseType, EvqConst), loc, literal); -} - -TIntermConstantUnion* TIntermediate::addConstantUnion(const TString* s, const TSourceLoc& loc, bool literal) const -{ - TConstUnionArray unionArray(1); - unionArray[0].setSConst(s); - - return addConstantUnion(unionArray, TType(EbtString, EvqConst), loc, literal); -} - -// Put vector swizzle selectors onto the given sequence -void TIntermediate::pushSelector(TIntermSequence& sequence, const TVectorSelector& selector, const TSourceLoc& loc) -{ - TIntermConstantUnion* constIntNode = addConstantUnion(selector, loc); - sequence.push_back(constIntNode); -} - -// Put matrix swizzle selectors onto the given sequence -void TIntermediate::pushSelector(TIntermSequence& sequence, const TMatrixSelector& selector, const TSourceLoc& loc) -{ - TIntermConstantUnion* constIntNode = addConstantUnion(selector.coord1, loc); - sequence.push_back(constIntNode); - constIntNode = addConstantUnion(selector.coord2, loc); - sequence.push_back(constIntNode); -} - -// Make an aggregate node that has a sequence of all selectors. -template TIntermTyped* TIntermediate::addSwizzle(TSwizzleSelectors& selector, const TSourceLoc& loc); -template TIntermTyped* TIntermediate::addSwizzle(TSwizzleSelectors& selector, const TSourceLoc& loc); -template -TIntermTyped* TIntermediate::addSwizzle(TSwizzleSelectors& selector, const TSourceLoc& loc) -{ - TIntermAggregate* node = new TIntermAggregate(EOpSequence); - - node->setLoc(loc); - TIntermSequence &sequenceVector = node->getSequence(); - - for (int i = 0; i < selector.size(); i++) - pushSelector(sequenceVector, selector[i], loc); - - return node; -} - -// -// Follow the left branches down to the root of an l-value -// expression (just "." and []). -// -// Return the base of the l-value (where following indexing quits working). -// Return nullptr if a chain following dereferences cannot be followed. -// -// 'swizzleOkay' says whether or not it is okay to consider a swizzle -// a valid part of the dereference chain. -// -const TIntermTyped* TIntermediate::findLValueBase(const TIntermTyped* node, bool swizzleOkay) -{ - do { - const TIntermBinary* binary = node->getAsBinaryNode(); - if (binary == nullptr) - return node; - TOperator op = binary->getOp(); - if (op != EOpIndexDirect && op != EOpIndexIndirect && op != EOpIndexDirectStruct && op != EOpVectorSwizzle && op != EOpMatrixSwizzle) - return nullptr; - if (! swizzleOkay) { - if (op == EOpVectorSwizzle || op == EOpMatrixSwizzle) - return nullptr; - if ((op == EOpIndexDirect || op == EOpIndexIndirect) && - (binary->getLeft()->getType().isVector() || binary->getLeft()->getType().isScalar()) && - ! binary->getLeft()->getType().isArray()) - return nullptr; - } - node = node->getAsBinaryNode()->getLeft(); - } while (true); -} - -// -// Create while and do-while loop nodes. -// -TIntermLoop* TIntermediate::addLoop(TIntermNode* body, TIntermTyped* test, TIntermTyped* terminal, bool testFirst, const TSourceLoc& loc, TLoopControl control) -{ - TIntermLoop* node = new TIntermLoop(body, test, terminal, testFirst); - node->setLoc(loc); - node->setLoopControl(control); - - return node; -} - -// -// Create a for-loop sequence. -// -TIntermAggregate* TIntermediate::addForLoop(TIntermNode* body, TIntermNode* initializer, TIntermTyped* test, TIntermTyped* terminal, bool testFirst, const TSourceLoc& loc, TLoopControl control) -{ - TIntermLoop* node = new TIntermLoop(body, test, terminal, testFirst); - node->setLoc(loc); - node->setLoopControl(control); - - // make a sequence of the initializer and statement, but try to reuse the - // aggregate already created for whatever is in the initializer, if there is one - TIntermAggregate* loopSequence = (initializer == nullptr || - initializer->getAsAggregate() == nullptr) ? makeAggregate(initializer, loc) - : initializer->getAsAggregate(); - if (loopSequence != nullptr && loopSequence->getOp() == EOpSequence) - loopSequence->setOp(EOpNull); - loopSequence = growAggregate(loopSequence, node); - loopSequence->setOperator(EOpSequence); - - return loopSequence; -} - -// -// Add branches. -// -TIntermBranch* TIntermediate::addBranch(TOperator branchOp, const TSourceLoc& loc) -{ - return addBranch(branchOp, nullptr, loc); -} - -TIntermBranch* TIntermediate::addBranch(TOperator branchOp, TIntermTyped* expression, const TSourceLoc& loc) -{ - TIntermBranch* node = new TIntermBranch(branchOp, expression); - node->setLoc(loc); - - return node; -} - -// -// This is to be executed after the final root is put on top by the parsing -// process. -// -bool TIntermediate::postProcess(TIntermNode* root, EShLanguage /*language*/) -{ - if (root == nullptr) - return true; - - // Finish off the top-level sequence - TIntermAggregate* aggRoot = root->getAsAggregate(); - if (aggRoot && aggRoot->getOp() == EOpNull) - aggRoot->setOperator(EOpSequence); - - // Propagate 'noContraction' label in backward from 'precise' variables. - glslang::PropagateNoContraction(*this); - - switch (textureSamplerTransformMode) { - case EShTexSampTransKeep: - break; - case EShTexSampTransUpgradeTextureRemoveSampler: - performTextureUpgradeAndSamplerRemovalTransformation(root); - break; - } - - return true; -} - -void TIntermediate::addSymbolLinkageNodes(TIntermAggregate*& linkage, EShLanguage language, TSymbolTable& symbolTable) -{ - // Add top-level nodes for declarations that must be checked cross - // compilation unit by a linker, yet might not have been referenced - // by the AST. - // - // Almost entirely, translation of symbols is driven by what's present - // in the AST traversal, not by translating the symbol table. - // - // However, there are some special cases: - // - From the specification: "Special built-in inputs gl_VertexID and - // gl_InstanceID are also considered active vertex attributes." - // - Linker-based type mismatch error reporting needs to see all - // uniforms/ins/outs variables and blocks. - // - ftransform() can make gl_Vertex and gl_ModelViewProjectionMatrix active. - // - - // if (ftransformUsed) { - // TODO: 1.1 lowering functionality: track ftransform() usage - // addSymbolLinkageNode(root, symbolTable, "gl_Vertex"); - // addSymbolLinkageNode(root, symbolTable, "gl_ModelViewProjectionMatrix"); - //} - - if (language == EShLangVertex) { - // the names won't be found in the symbol table unless the versions are right, - // so version logic does not need to be repeated here - addSymbolLinkageNode(linkage, symbolTable, "gl_VertexID"); - addSymbolLinkageNode(linkage, symbolTable, "gl_InstanceID"); - } - - // Add a child to the root node for the linker objects - linkage->setOperator(EOpLinkerObjects); - treeRoot = growAggregate(treeRoot, linkage); -} - -// -// Add the given name or symbol to the list of nodes at the end of the tree used -// for link-time checking and external linkage. -// - -void TIntermediate::addSymbolLinkageNode(TIntermAggregate*& linkage, TSymbolTable& symbolTable, const TString& name) -{ - TSymbol* symbol = symbolTable.find(name); - if (symbol) - addSymbolLinkageNode(linkage, *symbol->getAsVariable()); -} - -void TIntermediate::addSymbolLinkageNode(TIntermAggregate*& linkage, const TSymbol& symbol) -{ - const TVariable* variable = symbol.getAsVariable(); - if (! variable) { - // This must be a member of an anonymous block, and we need to add the whole block - const TAnonMember* anon = symbol.getAsAnonMember(); - variable = &anon->getAnonContainer(); - } - TIntermSymbol* node = addSymbol(*variable); - linkage = growAggregate(linkage, node); -} - -// -// Add a caller->callee relationship to the call graph. -// Assumes the strings are unique per signature. -// -void TIntermediate::addToCallGraph(TInfoSink& /*infoSink*/, const TString& caller, const TString& callee) -{ - // Duplicates are okay, but faster to not keep them, and they come grouped by caller, - // as long as new ones are push on the same end we check on for duplicates - for (TGraph::const_iterator call = callGraph.begin(); call != callGraph.end(); ++call) { - if (call->caller != caller) - break; - if (call->callee == callee) - return; - } - - callGraph.push_front(TCall(caller, callee)); -} - -// -// This deletes the tree. -// -void TIntermediate::removeTree() -{ - if (treeRoot) - RemoveAllTreeNodes(treeRoot); -} - -// -// Implement the part of KHR_vulkan_glsl that lists the set of operations -// that can result in a specialization constant operation. -// -// "5.x Specialization Constant Operations" -// -// Only some operations discussed in this section may be applied to a -// specialization constant and still yield a result that is as -// specialization constant. The operations allowed are listed below. -// When a specialization constant is operated on with one of these -// operators and with another constant or specialization constant, the -// result is implicitly a specialization constant. -// -// - int(), uint(), and bool() constructors for type conversions -// from any of the following types to any of the following types: -// * int -// * uint -// * bool -// - vector versions of the above conversion constructors -// - allowed implicit conversions of the above -// - swizzles (e.g., foo.yx) -// - The following when applied to integer or unsigned integer types: -// * unary negative ( - ) -// * binary operations ( + , - , * , / , % ) -// * shift ( <<, >> ) -// * bitwise operations ( & , | , ^ ) -// - The following when applied to integer or unsigned integer scalar types: -// * comparison ( == , != , > , >= , < , <= ) -// - The following when applied to the Boolean scalar type: -// * not ( ! ) -// * logical operations ( && , || , ^^ ) -// * comparison ( == , != )" -// -// This function just handles binary and unary nodes. Construction -// rules are handled in construction paths that are not covered by the unary -// and binary paths, while required conversions will still show up here -// as unary converters in the from a construction operator. -// -bool TIntermediate::isSpecializationOperation(const TIntermOperator& node) const -{ - // The operations resulting in floating point are quite limited - // (However, some floating-point operations result in bool, like ">", - // so are handled later.) - if (node.getType().isFloatingDomain()) { - switch (node.getOp()) { - case EOpIndexDirect: - case EOpIndexIndirect: - case EOpIndexDirectStruct: - case EOpVectorSwizzle: - case EOpConvFloatToDouble: - case EOpConvDoubleToFloat: -#ifdef AMD_EXTENSIONS - case EOpConvFloat16ToFloat: - case EOpConvFloatToFloat16: - case EOpConvFloat16ToDouble: - case EOpConvDoubleToFloat16: -#endif - return true; - default: - return false; - } - } - - // Check for floating-point arguments - if (const TIntermBinary* bin = node.getAsBinaryNode()) - if (bin->getLeft() ->getType().isFloatingDomain() || - bin->getRight()->getType().isFloatingDomain()) - return false; - - // So, for now, we can assume everything left is non-floating-point... - - // Now check for integer/bool-based operations - switch (node.getOp()) { - - // dereference/swizzle - case EOpIndexDirect: - case EOpIndexIndirect: - case EOpIndexDirectStruct: - case EOpVectorSwizzle: - - // conversion constructors - case EOpConvIntToBool: - case EOpConvUintToBool: - case EOpConvUintToInt: - case EOpConvBoolToInt: - case EOpConvIntToUint: - case EOpConvBoolToUint: - case EOpConvInt64ToBool: - case EOpConvBoolToInt64: - case EOpConvUint64ToBool: - case EOpConvBoolToUint64: - case EOpConvInt64ToInt: - case EOpConvIntToInt64: - case EOpConvUint64ToUint: - case EOpConvUintToUint64: - case EOpConvInt64ToUint64: - case EOpConvUint64ToInt64: - case EOpConvInt64ToUint: - case EOpConvUintToInt64: - case EOpConvUint64ToInt: - case EOpConvIntToUint64: -#ifdef AMD_EXTENSIONS - case EOpConvInt16ToBool: - case EOpConvBoolToInt16: - case EOpConvInt16ToInt: - case EOpConvIntToInt16: - case EOpConvInt16ToUint: - case EOpConvUintToInt16: - case EOpConvInt16ToInt64: - case EOpConvInt64ToInt16: - case EOpConvInt16ToUint64: - case EOpConvUint64ToInt16: - case EOpConvUint16ToBool: - case EOpConvBoolToUint16: - case EOpConvUint16ToInt: - case EOpConvIntToUint16: - case EOpConvUint16ToUint: - case EOpConvUintToUint16: - case EOpConvUint16ToInt64: - case EOpConvInt64ToUint16: - case EOpConvUint16ToUint64: - case EOpConvUint64ToUint16: - case EOpConvInt16ToUint16: - case EOpConvUint16ToInt16: -#endif - - // unary operations - case EOpNegative: - case EOpLogicalNot: - case EOpBitwiseNot: - - // binary operations - case EOpAdd: - case EOpSub: - case EOpMul: - case EOpVectorTimesScalar: - case EOpDiv: - case EOpMod: - case EOpRightShift: - case EOpLeftShift: - case EOpAnd: - case EOpInclusiveOr: - case EOpExclusiveOr: - case EOpLogicalOr: - case EOpLogicalXor: - case EOpLogicalAnd: - case EOpEqual: - case EOpNotEqual: - case EOpLessThan: - case EOpGreaterThan: - case EOpLessThanEqual: - case EOpGreaterThanEqual: - return true; - default: - return false; - } -} - -//////////////////////////////////////////////////////////////// -// -// Member functions of the nodes used for building the tree. -// -//////////////////////////////////////////////////////////////// - -// -// Say whether or not an operation node changes the value of a variable. -// -// Returns true if state is modified. -// -bool TIntermOperator::modifiesState() const -{ - switch (op) { - case EOpPostIncrement: - case EOpPostDecrement: - case EOpPreIncrement: - case EOpPreDecrement: - case EOpAssign: - case EOpAddAssign: - case EOpSubAssign: - case EOpMulAssign: - case EOpVectorTimesMatrixAssign: - case EOpVectorTimesScalarAssign: - case EOpMatrixTimesScalarAssign: - case EOpMatrixTimesMatrixAssign: - case EOpDivAssign: - case EOpModAssign: - case EOpAndAssign: - case EOpInclusiveOrAssign: - case EOpExclusiveOrAssign: - case EOpLeftShiftAssign: - case EOpRightShiftAssign: - return true; - default: - return false; - } -} - -// -// returns true if the operator is for one of the constructors -// -bool TIntermOperator::isConstructor() const -{ - return op > EOpConstructGuardStart && op < EOpConstructGuardEnd; -} - -// -// Make sure the type of an operator is appropriate for its -// combination of operation and operand type. This will invoke -// promoteUnary, promoteBinary, etc as needed. -// -// Returns false if nothing makes sense. -// -bool TIntermediate::promote(TIntermOperator* node) -{ - if (node == nullptr) - return false; - - if (node->getAsUnaryNode()) - return promoteUnary(*node->getAsUnaryNode()); - - if (node->getAsBinaryNode()) - return promoteBinary(*node->getAsBinaryNode()); - - if (node->getAsAggregate()) - return promoteAggregate(*node->getAsAggregate()); - - return false; -} - -// -// See TIntermediate::promote -// -bool TIntermediate::promoteUnary(TIntermUnary& node) -{ - const TOperator op = node.getOp(); - TIntermTyped* operand = node.getOperand(); - - switch (op) { - case EOpLogicalNot: - // Convert operand to a boolean type - if (operand->getBasicType() != EbtBool) { - // Add constructor to boolean type. If that fails, we can't do it, so return false. - TIntermTyped* converted = convertToBasicType(op, EbtBool, operand); - if (converted == nullptr) - return false; - - // Use the result of converting the node to a bool. - node.setOperand(operand = converted); // also updates stack variable - } - break; - case EOpBitwiseNot: - if (operand->getBasicType() != EbtInt && - operand->getBasicType() != EbtUint && -#ifdef AMD_EXTENSIONS - operand->getBasicType() != EbtInt16 && - operand->getBasicType() != EbtUint16 && -#endif - operand->getBasicType() != EbtInt64 && - operand->getBasicType() != EbtUint64) - - return false; - break; - case EOpNegative: - case EOpPostIncrement: - case EOpPostDecrement: - case EOpPreIncrement: - case EOpPreDecrement: - if (operand->getBasicType() != EbtInt && - operand->getBasicType() != EbtUint && - operand->getBasicType() != EbtInt64 && - operand->getBasicType() != EbtUint64 && -#ifdef AMD_EXTENSIONS - operand->getBasicType() != EbtInt16 && - operand->getBasicType() != EbtUint16 && -#endif - operand->getBasicType() != EbtFloat && -#ifdef AMD_EXTENSIONS - operand->getBasicType() != EbtFloat16 && -#endif - operand->getBasicType() != EbtDouble) - - return false; - break; - - default: - if (operand->getBasicType() != EbtFloat) - - return false; - } - - node.setType(operand->getType()); - node.getWritableType().getQualifier().makeTemporary(); - - return true; -} - -void TIntermUnary::updatePrecision() -{ -#ifdef AMD_EXTENSIONS - if (getBasicType() == EbtInt || getBasicType() == EbtUint || getBasicType() == EbtFloat || getBasicType() == EbtFloat16) { -#else - if (getBasicType() == EbtInt || getBasicType() == EbtUint || getBasicType() == EbtFloat) { -#endif - if (operand->getQualifier().precision > getQualifier().precision) - getQualifier().precision = operand->getQualifier().precision; - } -} - -// If it is not already, convert this node to the given basic type. -TIntermTyped* TIntermediate::convertToBasicType(TOperator op, TBasicType basicType, TIntermTyped* node) const -{ - if (node == nullptr) - return nullptr; - - // It's already this basic type: nothing needs to be done, so use the node directly. - if (node->getBasicType() == basicType) - return node; - - const TType& type = node->getType(); - const TType newType(basicType, type.getQualifier().storage, - type.getVectorSize(), type.getMatrixCols(), type.getMatrixRows(), type.isVector()); - - // Add constructor to the right vectorness of the right type. If that fails, we can't do it, so return nullptr. - return addConversion(op, newType, node); -} - -// -// See TIntermediate::promote -// -bool TIntermediate::promoteBinary(TIntermBinary& node) -{ - TOperator op = node.getOp(); - TIntermTyped* left = node.getLeft(); - TIntermTyped* right = node.getRight(); - - // Arrays and structures have to be exact matches. - if ((left->isArray() || right->isArray() || left->getBasicType() == EbtStruct || right->getBasicType() == EbtStruct) - && left->getType() != right->getType()) - return false; - - // Base assumption: just make the type the same as the left - // operand. Only deviations from this will be coded. - node.setType(left->getType()); - node.getWritableType().getQualifier().clear(); - - // Composite and opaque types don't having pending operator changes, e.g., - // array, structure, and samplers. Just establish final type and correctness. - if (left->isArray() || left->getBasicType() == EbtStruct || left->getBasicType() == EbtSampler) { - switch (op) { - case EOpEqual: - case EOpNotEqual: - if (left->getBasicType() == EbtSampler) { - // can't compare samplers - return false; - } else { - // Promote to conditional - node.setType(TType(EbtBool)); - } - - return true; - - case EOpAssign: - // Keep type from above - - return true; - - default: - return false; - } - } - - // - // We now have only scalars, vectors, and matrices to worry about. - // - - // HLSL implicitly promotes bool -> int for numeric operations. - // (Implicit conversions to make the operands match each other's types were already done.) - if (getSource() == EShSourceHlsl && - (left->getBasicType() == EbtBool || right->getBasicType() == EbtBool)) { - switch (op) { - case EOpLessThan: - case EOpGreaterThan: - case EOpLessThanEqual: - case EOpGreaterThanEqual: - - case EOpRightShift: - case EOpLeftShift: - - case EOpMod: - - case EOpAnd: - case EOpInclusiveOr: - case EOpExclusiveOr: - - case EOpAdd: - case EOpSub: - case EOpDiv: - case EOpMul: - left = addConversion(op, TType(EbtInt, EvqTemporary, left->getVectorSize()), left); - right = addConversion(op, TType(EbtInt, EvqTemporary, right->getVectorSize()), right); - if (left == nullptr || right == nullptr) - return false; - node.setLeft(left); - node.setRight(right); - break; - - default: - break; - } - } - - // Do general type checks against individual operands (comparing left and right is coming up, checking mixed shapes after that) - switch (op) { - case EOpLessThan: - case EOpGreaterThan: - case EOpLessThanEqual: - case EOpGreaterThanEqual: - // Relational comparisons need numeric types and will promote to scalar Boolean. - if (left->getBasicType() == EbtBool) - return false; - - node.setType(TType(EbtBool, EvqTemporary, left->getVectorSize())); - break; - - case EOpEqual: - case EOpNotEqual: - if (getSource() == EShSourceHlsl) { - const int resultWidth = std::max(left->getVectorSize(), right->getVectorSize()); - - // In HLSL, == or != on vectors means component-wise comparison. - if (resultWidth > 1) { - op = (op == EOpEqual) ? EOpVectorEqual : EOpVectorNotEqual; - node.setOp(op); - } - - node.setType(TType(EbtBool, EvqTemporary, resultWidth)); - } else { - // All the above comparisons result in a bool (but not the vector compares) - node.setType(TType(EbtBool)); - } - break; - - case EOpLogicalAnd: - case EOpLogicalOr: - case EOpLogicalXor: - if (getSource() == EShSourceHlsl) { - TIntermTyped* convertedL = convertToBasicType(op, EbtBool, left); - TIntermTyped* convertedR = convertToBasicType(op, EbtBool, right); - if (convertedL == nullptr || convertedR == nullptr) - return false; - node.setLeft(left = convertedL); // also updates stack variable - node.setRight(right = convertedR); // also updates stack variable - } else { - // logical ops operate only on scalar Booleans and will promote to scalar Boolean. - if (left->getBasicType() != EbtBool || left->isVector() || left->isMatrix()) - return false; - } - - node.setType(TType(EbtBool, EvqTemporary, left->getVectorSize())); - - break; - - case EOpRightShift: - case EOpLeftShift: - case EOpRightShiftAssign: - case EOpLeftShiftAssign: - - case EOpMod: - case EOpModAssign: - - case EOpAnd: - case EOpInclusiveOr: - case EOpExclusiveOr: - case EOpAndAssign: - case EOpInclusiveOrAssign: - case EOpExclusiveOrAssign: - if (getSource() == EShSourceHlsl) - break; - - // Check for integer-only operands. - if ((left->getBasicType() != EbtInt && left->getBasicType() != EbtUint && -#ifdef AMD_EXTENSIONS - left->getBasicType() != EbtInt16 && left->getBasicType() != EbtUint16 && -#endif - left->getBasicType() != EbtInt64 && left->getBasicType() != EbtUint64) || - (right->getBasicType() != EbtInt && right->getBasicType() != EbtUint && -#ifdef AMD_EXTENSIONS - right->getBasicType() != EbtInt16 && right->getBasicType() != EbtUint16 && -#endif - right->getBasicType() != EbtInt64 && right->getBasicType() != EbtUint64)) - return false; - if (left->isMatrix() || right->isMatrix()) - return false; - - break; - - case EOpAdd: - case EOpSub: - case EOpDiv: - case EOpMul: - case EOpAddAssign: - case EOpSubAssign: - case EOpMulAssign: - case EOpDivAssign: - // check for non-Boolean operands - if (left->getBasicType() == EbtBool || right->getBasicType() == EbtBool) - return false; - - default: - break; - } - - // Compare left and right, and finish with the cases where the operand types must match - switch (op) { - case EOpLessThan: - case EOpGreaterThan: - case EOpLessThanEqual: - case EOpGreaterThanEqual: - - case EOpEqual: - case EOpNotEqual: - case EOpVectorEqual: - case EOpVectorNotEqual: - - case EOpLogicalAnd: - case EOpLogicalOr: - case EOpLogicalXor: - return left->getType() == right->getType(); - - case EOpMod: - case EOpModAssign: - - case EOpAnd: - case EOpInclusiveOr: - case EOpExclusiveOr: - case EOpAndAssign: - case EOpInclusiveOrAssign: - case EOpExclusiveOrAssign: - - case EOpAdd: - case EOpSub: - case EOpDiv: - - case EOpAddAssign: - case EOpSubAssign: - case EOpDivAssign: - // Quick out in case the types do match - if (left->getType() == right->getType()) - return true; - - // Fall through - - case EOpMul: - case EOpMulAssign: - // At least the basic type has to match - if (left->getBasicType() != right->getBasicType()) - return false; - - default: - break; - } - - // Finish handling the case, for all ops, where both operands are scalars. - if (left->isScalar() && right->isScalar()) - return true; - - // Finish handling the case, for all ops, where there are two vectors of different sizes - if (left->isVector() && right->isVector() && left->getVectorSize() != right->getVectorSize() && right->getVectorSize() > 1) - return false; - - // - // We now have a mix of scalars, vectors, or matrices, for non-relational operations. - // - - // Can these two operands be combined, what is the resulting type? - TBasicType basicType = left->getBasicType(); - switch (op) { - case EOpMul: - if (!left->isMatrix() && right->isMatrix()) { - if (left->isVector()) { - if (left->getVectorSize() != right->getMatrixRows()) - return false; - node.setOp(op = EOpVectorTimesMatrix); - node.setType(TType(basicType, EvqTemporary, right->getMatrixCols())); - } else { - node.setOp(op = EOpMatrixTimesScalar); - node.setType(TType(basicType, EvqTemporary, 0, right->getMatrixCols(), right->getMatrixRows())); - } - } else if (left->isMatrix() && !right->isMatrix()) { - if (right->isVector()) { - if (left->getMatrixCols() != right->getVectorSize()) - return false; - node.setOp(op = EOpMatrixTimesVector); - node.setType(TType(basicType, EvqTemporary, left->getMatrixRows())); - } else { - node.setOp(op = EOpMatrixTimesScalar); - } - } else if (left->isMatrix() && right->isMatrix()) { - if (left->getMatrixCols() != right->getMatrixRows()) - return false; - node.setOp(op = EOpMatrixTimesMatrix); - node.setType(TType(basicType, EvqTemporary, 0, right->getMatrixCols(), left->getMatrixRows())); - } else if (! left->isMatrix() && ! right->isMatrix()) { - if (left->isVector() && right->isVector()) { - ; // leave as component product - } else if (left->isVector() || right->isVector()) { - node.setOp(op = EOpVectorTimesScalar); - if (right->isVector()) - node.setType(TType(basicType, EvqTemporary, right->getVectorSize())); - } - } else { - return false; - } - break; - case EOpMulAssign: - if (! left->isMatrix() && right->isMatrix()) { - if (left->isVector()) { - if (left->getVectorSize() != right->getMatrixRows() || left->getVectorSize() != right->getMatrixCols()) - return false; - node.setOp(op = EOpVectorTimesMatrixAssign); - } else { - return false; - } - } else if (left->isMatrix() && !right->isMatrix()) { - if (right->isVector()) { - return false; - } else { - node.setOp(op = EOpMatrixTimesScalarAssign); - } - } else if (left->isMatrix() && right->isMatrix()) { - if (left->getMatrixCols() != left->getMatrixRows() || left->getMatrixCols() != right->getMatrixCols() || left->getMatrixCols() != right->getMatrixRows()) - return false; - node.setOp(op = EOpMatrixTimesMatrixAssign); - } else if (!left->isMatrix() && !right->isMatrix()) { - if (left->isVector() && right->isVector()) { - // leave as component product - } else if (left->isVector() || right->isVector()) { - if (! left->isVector()) - return false; - node.setOp(op = EOpVectorTimesScalarAssign); - } - } else { - return false; - } - break; - - case EOpRightShift: - case EOpLeftShift: - case EOpRightShiftAssign: - case EOpLeftShiftAssign: - if (right->isVector() && (! left->isVector() || right->getVectorSize() != left->getVectorSize())) - return false; - break; - - case EOpAssign: - if (left->getVectorSize() != right->getVectorSize() || left->getMatrixCols() != right->getMatrixCols() || left->getMatrixRows() != right->getMatrixRows()) - return false; - // fall through - - case EOpAdd: - case EOpSub: - case EOpDiv: - case EOpMod: - case EOpAnd: - case EOpInclusiveOr: - case EOpExclusiveOr: - case EOpAddAssign: - case EOpSubAssign: - case EOpDivAssign: - case EOpModAssign: - case EOpAndAssign: - case EOpInclusiveOrAssign: - case EOpExclusiveOrAssign: - - if ((left->isMatrix() && right->isVector()) || - (left->isVector() && right->isMatrix()) || - left->getBasicType() != right->getBasicType()) - return false; - if (left->isMatrix() && right->isMatrix() && (left->getMatrixCols() != right->getMatrixCols() || left->getMatrixRows() != right->getMatrixRows())) - return false; - if (left->isVector() && right->isVector() && left->getVectorSize() != right->getVectorSize()) - return false; - if (right->isVector() || right->isMatrix()) { - node.getWritableType().shallowCopy(right->getType()); - node.getWritableType().getQualifier().makeTemporary(); - } - break; - - default: - return false; - } - - // - // One more check for assignment. - // - switch (op) { - // The resulting type has to match the left operand. - case EOpAssign: - case EOpAddAssign: - case EOpSubAssign: - case EOpMulAssign: - case EOpDivAssign: - case EOpModAssign: - case EOpAndAssign: - case EOpInclusiveOrAssign: - case EOpExclusiveOrAssign: - case EOpLeftShiftAssign: - case EOpRightShiftAssign: - if (node.getType() != left->getType()) - return false; - break; - default: - break; - } - - return true; -} - -// -// See TIntermediate::promote -// -bool TIntermediate::promoteAggregate(TIntermAggregate& node) -{ - TOperator op = node.getOp(); - TIntermSequence& args = node.getSequence(); - const int numArgs = static_cast(args.size()); - - // Presently, only hlsl does intrinsic promotions. - if (getSource() != EShSourceHlsl) - return true; - - // set of opcodes that can be promoted in this manner. - switch (op) { - case EOpAtan: - case EOpClamp: - case EOpCross: - case EOpDistance: - case EOpDot: - case EOpDst: - case EOpFaceForward: - // case EOpFindMSB: TODO: - // case EOpFindLSB: TODO: - case EOpFma: - case EOpMod: - case EOpFrexp: - case EOpLdexp: - case EOpMix: - case EOpLit: - case EOpMax: - case EOpMin: - case EOpModf: - // case EOpGenMul: TODO: - case EOpPow: - case EOpReflect: - case EOpRefract: - // case EOpSinCos: TODO: - case EOpSmoothStep: - case EOpStep: - break; - default: - return true; - } - - // TODO: array and struct behavior - - // Try converting all nodes to the given node's type - TIntermSequence convertedArgs(numArgs, nullptr); - - // Try to convert all types to the nonConvArg type. - for (int nonConvArg = 0; nonConvArg < numArgs; ++nonConvArg) { - // Try converting all args to this arg's type - for (int convArg = 0; convArg < numArgs; ++convArg) { - convertedArgs[convArg] = addConversion(op, args[nonConvArg]->getAsTyped()->getType(), - args[convArg]->getAsTyped()); - } - - // If we successfully converted all the args, use the result. - if (std::all_of(convertedArgs.begin(), convertedArgs.end(), - [](const TIntermNode* node) { return node != nullptr; })) { - - std::swap(args, convertedArgs); - return true; - } - } - - return false; -} - -void TIntermBinary::updatePrecision() -{ -#ifdef AMD_EXTENSIONS - if (getBasicType() == EbtInt || getBasicType() == EbtUint || getBasicType() == EbtFloat || getBasicType() == EbtFloat16) { -#else - if (getBasicType() == EbtInt || getBasicType() == EbtUint || getBasicType() == EbtFloat) { -#endif - getQualifier().precision = std::max(right->getQualifier().precision, left->getQualifier().precision); - if (getQualifier().precision != EpqNone) { - left->propagatePrecision(getQualifier().precision); - right->propagatePrecision(getQualifier().precision); - } - } -} - -void TIntermTyped::propagatePrecision(TPrecisionQualifier newPrecision) -{ -#ifdef AMD_EXTENSIONS - if (getQualifier().precision != EpqNone || (getBasicType() != EbtInt && getBasicType() != EbtUint && getBasicType() != EbtFloat && getBasicType() != EbtFloat16)) -#else - if (getQualifier().precision != EpqNone || (getBasicType() != EbtInt && getBasicType() != EbtUint && getBasicType() != EbtFloat)) -#endif - return; - - getQualifier().precision = newPrecision; - - TIntermBinary* binaryNode = getAsBinaryNode(); - if (binaryNode) { - binaryNode->getLeft()->propagatePrecision(newPrecision); - binaryNode->getRight()->propagatePrecision(newPrecision); - - return; - } - - TIntermUnary* unaryNode = getAsUnaryNode(); - if (unaryNode) { - unaryNode->getOperand()->propagatePrecision(newPrecision); - - return; - } - - TIntermAggregate* aggregateNode = getAsAggregate(); - if (aggregateNode) { - TIntermSequence operands = aggregateNode->getSequence(); - for (unsigned int i = 0; i < operands.size(); ++i) { - TIntermTyped* typedNode = operands[i]->getAsTyped(); - if (! typedNode) - break; - typedNode->propagatePrecision(newPrecision); - } - - return; - } - - TIntermSelection* selectionNode = getAsSelectionNode(); - if (selectionNode) { - TIntermTyped* typedNode = selectionNode->getTrueBlock()->getAsTyped(); - if (typedNode) { - typedNode->propagatePrecision(newPrecision); - typedNode = selectionNode->getFalseBlock()->getAsTyped(); - if (typedNode) - typedNode->propagatePrecision(newPrecision); - } - - return; - } -} - -TIntermTyped* TIntermediate::promoteConstantUnion(TBasicType promoteTo, TIntermConstantUnion* node) const -{ - const TConstUnionArray& rightUnionArray = node->getConstArray(); - int size = node->getType().computeNumComponents(); - - TConstUnionArray leftUnionArray(size); - - for (int i=0; i < size; i++) { - switch (promoteTo) { - case EbtFloat: - switch (node->getType().getBasicType()) { - case EbtInt: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getIConst())); - break; - case EbtUint: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getUConst())); - break; - case EbtInt64: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getI64Const())); - break; - case EbtUint64: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getU64Const())); - break; - case EbtBool: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getBConst())); - break; - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - leftUnionArray[i] = rightUnionArray[i]; - break; - default: - return node; - } - break; - case EbtDouble: - switch (node->getType().getBasicType()) { - case EbtInt: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getIConst())); - break; - case EbtUint: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getUConst())); - break; - case EbtInt64: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getI64Const())); - break; - case EbtUint64: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getU64Const())); - break; - case EbtBool: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getBConst())); - break; - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - leftUnionArray[i] = rightUnionArray[i]; - break; - default: - return node; - } - break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: - switch (node->getType().getBasicType()) { - case EbtInt: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getIConst())); - break; - case EbtUint: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getUConst())); - break; - case EbtInt64: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getI64Const())); - break; - case EbtUint64: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getU64Const())); - break; - case EbtBool: - leftUnionArray[i].setDConst(static_cast(rightUnionArray[i].getBConst())); - break; - case EbtFloat: - case EbtDouble: - case EbtFloat16: - leftUnionArray[i] = rightUnionArray[i]; - break; - default: - return node; - } - break; -#endif - case EbtInt: - switch (node->getType().getBasicType()) { - case EbtInt: - leftUnionArray[i] = rightUnionArray[i]; - break; - case EbtUint: - leftUnionArray[i].setIConst(static_cast(rightUnionArray[i].getUConst())); - break; - case EbtInt64: - leftUnionArray[i].setIConst(static_cast(rightUnionArray[i].getI64Const())); - break; - case EbtUint64: - leftUnionArray[i].setIConst(static_cast(rightUnionArray[i].getU64Const())); - break; - case EbtBool: - leftUnionArray[i].setIConst(static_cast(rightUnionArray[i].getBConst())); - break; - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - leftUnionArray[i].setIConst(static_cast(rightUnionArray[i].getDConst())); - break; - default: - return node; - } - break; - case EbtUint: - switch (node->getType().getBasicType()) { - case EbtInt: - leftUnionArray[i].setUConst(static_cast(rightUnionArray[i].getIConst())); - break; - case EbtUint: - leftUnionArray[i] = rightUnionArray[i]; - break; - case EbtInt64: - leftUnionArray[i].setUConst(static_cast(rightUnionArray[i].getI64Const())); - break; - case EbtUint64: - leftUnionArray[i].setUConst(static_cast(rightUnionArray[i].getU64Const())); - break; - case EbtBool: - leftUnionArray[i].setUConst(static_cast(rightUnionArray[i].getBConst())); - break; - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - leftUnionArray[i].setUConst(static_cast(rightUnionArray[i].getDConst())); - break; - default: - return node; - } - break; - case EbtBool: - switch (node->getType().getBasicType()) { - case EbtInt: - leftUnionArray[i].setBConst(rightUnionArray[i].getIConst() != 0); - break; - case EbtUint: - leftUnionArray[i].setBConst(rightUnionArray[i].getUConst() != 0); - break; - case EbtInt64: - leftUnionArray[i].setBConst(rightUnionArray[i].getI64Const() != 0); - break; - case EbtUint64: - leftUnionArray[i].setBConst(rightUnionArray[i].getU64Const() != 0); - break; - case EbtBool: - leftUnionArray[i] = rightUnionArray[i]; - break; - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - leftUnionArray[i].setBConst(rightUnionArray[i].getDConst() != 0.0); - break; - default: - return node; - } - break; - case EbtInt64: - switch (node->getType().getBasicType()) { - case EbtInt: - leftUnionArray[i].setI64Const(static_cast(rightUnionArray[i].getIConst())); - break; - case EbtUint: - leftUnionArray[i].setI64Const(static_cast(rightUnionArray[i].getUConst())); - break; - case EbtInt64: - leftUnionArray[i] = rightUnionArray[i]; - break; - case EbtUint64: - leftUnionArray[i].setI64Const(static_cast(rightUnionArray[i].getU64Const())); - break; - case EbtBool: - leftUnionArray[i].setI64Const(static_cast(rightUnionArray[i].getBConst())); - break; - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - leftUnionArray[i].setI64Const(static_cast(rightUnionArray[i].getDConst())); - break; - default: - return node; - } - break; - case EbtUint64: - switch (node->getType().getBasicType()) { - case EbtInt: - leftUnionArray[i].setU64Const(static_cast(rightUnionArray[i].getIConst())); - break; - case EbtUint: - leftUnionArray[i].setU64Const(static_cast(rightUnionArray[i].getUConst())); - break; - case EbtInt64: - leftUnionArray[i].setU64Const(static_cast(rightUnionArray[i].getI64Const())); - break; - case EbtUint64: - leftUnionArray[i] = rightUnionArray[i]; - break; - case EbtBool: - leftUnionArray[i].setU64Const(static_cast(rightUnionArray[i].getBConst())); - break; - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - leftUnionArray[i].setU64Const(static_cast(rightUnionArray[i].getDConst())); - break; - default: - return node; - } - break; - default: - return node; - } - } - - const TType& t = node->getType(); - - return addConstantUnion(leftUnionArray, TType(promoteTo, t.getQualifier().storage, t.getVectorSize(), t.getMatrixCols(), t.getMatrixRows()), - node->getLoc()); -} - -void TIntermAggregate::addToPragmaTable(const TPragmaTable& pTable) -{ - assert(!pragmaTable); - pragmaTable = new TPragmaTable(); - *pragmaTable = pTable; -} - -// If either node is a specialization constant, while the other is -// a constant (or specialization constant), the result is still -// a specialization constant. -bool TIntermediate::specConstantPropagates(const TIntermTyped& node1, const TIntermTyped& node2) -{ - return (node1.getType().getQualifier().isSpecConstant() && node2.getType().getQualifier().isConstant()) || - (node2.getType().getQualifier().isSpecConstant() && node1.getType().getQualifier().isConstant()); -} - -struct TextureUpgradeAndSamplerRemovalTransform : public TIntermTraverser { - void visitSymbol(TIntermSymbol* symbol) override { - if (symbol->getBasicType() == EbtSampler && symbol->getType().getSampler().isTexture()) { - symbol->getWritableType().getSampler().combined = true; - } - } - bool visitAggregate(TVisit, TIntermAggregate* ag) override { - using namespace std; - TIntermSequence& seq = ag->getSequence(); - // remove pure sampler variables - TIntermSequence::iterator newEnd = remove_if(seq.begin(), seq.end(), [](TIntermNode* node) { - TIntermSymbol* symbol = node->getAsSymbolNode(); - if (!symbol) - return false; - - return (symbol->getBasicType() == EbtSampler && symbol->getType().getSampler().isPureSampler()); - }); - seq.erase(newEnd, seq.end()); - // replace constructors with sampler/textures - for_each(seq.begin(), seq.end(), [](TIntermNode*& node) { - TIntermAggregate *constructor = node->getAsAggregate(); - if (constructor && constructor->getOp() == EOpConstructTextureSampler) { - if (!constructor->getSequence().empty()) - node = constructor->getSequence()[0]; - } - }); - return true; - } -}; - -void TIntermediate::performTextureUpgradeAndSamplerRemovalTransformation(TIntermNode* root) -{ - TextureUpgradeAndSamplerRemovalTransform transform; - root->traverse(&transform); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/LiveTraverser.h b/deps/glslang/glslang/MachineIndependent/LiveTraverser.h deleted file mode 100644 index 7333bc96..00000000 --- a/deps/glslang/glslang/MachineIndependent/LiveTraverser.h +++ /dev/null @@ -1,138 +0,0 @@ -// -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#pragma once - -#include "../Include/Common.h" -#include "reflection.h" -#include "localintermediate.h" - -#include "gl_types.h" - -#include -#include - -namespace glslang { - -// -// The traverser: mostly pass through, except -// - processing function-call nodes to push live functions onto the stack of functions to process -// - processing selection nodes to trim semantically dead code -// -// This is in the glslang namespace directly so it can be a friend of TReflection. -// This can be derived from to implement reflection database traversers or -// binding mappers: anything that wants to traverse the live subset of the tree. -// - -class TLiveTraverser : public TIntermTraverser { -public: - TLiveTraverser(const TIntermediate& i, bool traverseAll = false, - bool preVisit = true, bool inVisit = false, bool postVisit = false) : - TIntermTraverser(preVisit, inVisit, postVisit), - intermediate(i), traverseAll(traverseAll) - { } - - // - // Given a function name, find its subroot in the tree, and push it onto the stack of - // functions left to process. - // - void pushFunction(const TString& name) - { - TIntermSequence& globals = intermediate.getTreeRoot()->getAsAggregate()->getSequence(); - for (unsigned int f = 0; f < globals.size(); ++f) { - TIntermAggregate* candidate = globals[f]->getAsAggregate(); - if (candidate && candidate->getOp() == EOpFunction && candidate->getName() == name) { - functions.push_back(candidate); - break; - } - } - } - - typedef std::list TFunctionStack; - TFunctionStack functions; - -protected: - // To catch which function calls are not dead, and hence which functions must be visited. - virtual bool visitAggregate(TVisit, TIntermAggregate* node) - { - if (!traverseAll) - if (node->getOp() == EOpFunctionCall) - addFunctionCall(node); - - return true; // traverse this subtree - } - - // To prune semantically dead paths. - virtual bool visitSelection(TVisit /* visit */, TIntermSelection* node) - { - if (traverseAll) - return true; // traverse all code - - TIntermConstantUnion* constant = node->getCondition()->getAsConstantUnion(); - if (constant) { - // cull the path that is dead - if (constant->getConstArray()[0].getBConst() == true && node->getTrueBlock()) - node->getTrueBlock()->traverse(this); - if (constant->getConstArray()[0].getBConst() == false && node->getFalseBlock()) - node->getFalseBlock()->traverse(this); - - return false; // don't traverse any more, we did it all above - } else - return true; // traverse the whole subtree - } - - // Track live functions as well as uniforms, so that we don't visit dead functions - // and only visit each function once. - void addFunctionCall(TIntermAggregate* call) - { - // // just use the map to ensure we process each function at most once - if (liveFunctions.find(call->getName()) == liveFunctions.end()) { - liveFunctions.insert(call->getName()); - pushFunction(call->getName()); - } - } - - const TIntermediate& intermediate; - typedef std::unordered_set TLiveFunctions; - TLiveFunctions liveFunctions; - bool traverseAll; - -private: - // prevent copy & copy construct - TLiveTraverser(TLiveTraverser&); - TLiveTraverser& operator=(TLiveTraverser&); -}; - -} // namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/ParseContextBase.cpp b/deps/glslang/glslang/MachineIndependent/ParseContextBase.cpp deleted file mode 100644 index 44fc0b47..00000000 --- a/deps/glslang/glslang/MachineIndependent/ParseContextBase.cpp +++ /dev/null @@ -1,583 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// Implement the TParseContextBase class. - -#include - -#include "ParseHelper.h" - -extern int yyparse(glslang::TParseContext*); - -namespace glslang { - -// -// Used to output syntax, parsing, and semantic errors. -// - -void TParseContextBase::outputMessage(const TSourceLoc& loc, const char* szReason, - const char* szToken, - const char* szExtraInfoFormat, - TPrefixType prefix, va_list args) -{ - const int maxSize = MaxTokenLength + 200; - char szExtraInfo[maxSize]; - - safe_vsprintf(szExtraInfo, maxSize, szExtraInfoFormat, args); - - infoSink.info.prefix(prefix); - infoSink.info.location(loc); - infoSink.info << "'" << szToken << "' : " << szReason << " " << szExtraInfo << "\n"; - - if (prefix == EPrefixError) { - ++numErrors; - } -} - -void C_DECL TParseContextBase::error(const TSourceLoc& loc, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...) -{ - if (messages & EShMsgOnlyPreprocessor) - return; - va_list args; - va_start(args, szExtraInfoFormat); - outputMessage(loc, szReason, szToken, szExtraInfoFormat, EPrefixError, args); - va_end(args); - - if ((messages & EShMsgCascadingErrors) == 0) - currentScanner->setEndOfInput(); -} - -void C_DECL TParseContextBase::warn(const TSourceLoc& loc, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...) -{ - if (suppressWarnings()) - return; - va_list args; - va_start(args, szExtraInfoFormat); - outputMessage(loc, szReason, szToken, szExtraInfoFormat, EPrefixWarning, args); - va_end(args); -} - -void C_DECL TParseContextBase::ppError(const TSourceLoc& loc, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...) -{ - va_list args; - va_start(args, szExtraInfoFormat); - outputMessage(loc, szReason, szToken, szExtraInfoFormat, EPrefixError, args); - va_end(args); - - if ((messages & EShMsgCascadingErrors) == 0) - currentScanner->setEndOfInput(); -} - -void C_DECL TParseContextBase::ppWarn(const TSourceLoc& loc, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...) -{ - va_list args; - va_start(args, szExtraInfoFormat); - outputMessage(loc, szReason, szToken, szExtraInfoFormat, EPrefixWarning, args); - va_end(args); -} - -// -// Both test and if necessary, spit out an error, to see if the node is really -// an l-value that can be operated on this way. -// -// Returns true if there was an error. -// -bool TParseContextBase::lValueErrorCheck(const TSourceLoc& loc, const char* op, TIntermTyped* node) -{ - TIntermBinary* binaryNode = node->getAsBinaryNode(); - - if (binaryNode) { - switch(binaryNode->getOp()) { - case EOpIndexDirect: - case EOpIndexIndirect: // fall through - case EOpIndexDirectStruct: // fall through - case EOpVectorSwizzle: - case EOpMatrixSwizzle: - return lValueErrorCheck(loc, op, binaryNode->getLeft()); - default: - break; - } - error(loc, " l-value required", op, "", ""); - - return true; - } - - const char* symbol = nullptr; - TIntermSymbol* symNode = node->getAsSymbolNode(); - if (symNode != nullptr) - symbol = symNode->getName().c_str(); - - const char* message = nullptr; - switch (node->getQualifier().storage) { - case EvqConst: message = "can't modify a const"; break; - case EvqConstReadOnly: message = "can't modify a const"; break; - case EvqUniform: message = "can't modify a uniform"; break; - case EvqBuffer: - if (node->getQualifier().readonly) - message = "can't modify a readonly buffer"; - break; - - default: - // - // Type that can't be written to? - // - switch (node->getBasicType()) { - case EbtSampler: - message = "can't modify a sampler"; - break; - case EbtAtomicUint: - message = "can't modify an atomic_uint"; - break; - case EbtVoid: - message = "can't modify void"; - break; - default: - break; - } - } - - if (message == nullptr && binaryNode == nullptr && symNode == nullptr) { - error(loc, " l-value required", op, "", ""); - - return true; - } - - // - // Everything else is okay, no error. - // - if (message == nullptr) - return false; - - // - // If we get here, we have an error and a message. - // - if (symNode) - error(loc, " l-value required", op, "\"%s\" (%s)", symbol, message); - else - error(loc, " l-value required", op, "(%s)", message); - - return true; -} - -// Test for and give an error if the node can't be read from. -void TParseContextBase::rValueErrorCheck(const TSourceLoc& loc, const char* op, TIntermTyped* node) -{ - if (! node) - return; - - TIntermBinary* binaryNode = node->getAsBinaryNode(); - if (binaryNode) { - switch(binaryNode->getOp()) { - case EOpIndexDirect: - case EOpIndexIndirect: - case EOpIndexDirectStruct: - case EOpVectorSwizzle: - case EOpMatrixSwizzle: - rValueErrorCheck(loc, op, binaryNode->getLeft()); - default: - break; - } - - return; - } - - TIntermSymbol* symNode = node->getAsSymbolNode(); - if (symNode && symNode->getQualifier().writeonly) - error(loc, "can't read from writeonly object: ", op, symNode->getName().c_str()); -} - -// Add 'symbol' to the list of deferred linkage symbols, which -// are later processed in finish(), at which point the symbol -// must still be valid. -// It is okay if the symbol's type will be subsequently edited; -// the modifications will be tracked. -void TParseContextBase::trackLinkage(TSymbol& symbol) -{ - if (!parsingBuiltins) - linkageSymbols.push_back(&symbol); -} - -// Make a shared symbol have a non-shared version that can be edited by the current -// compile, such that editing its type will not change the shared version and will -// effect all nodes already sharing it (non-shallow type), -// or adopting its full type after being edited (shallow type). -void TParseContextBase::makeEditable(TSymbol*& symbol) -{ - // copyUp() does a deep copy of the type. - symbol = symbolTable.copyUp(symbol); - - // Save it (deferred, so it can be edited first) in the AST for linker use. - if (symbol) - trackLinkage(*symbol); -} - -// Return a writable version of the variable 'name'. -// -// Return nullptr if 'name' is not found. This should mean -// something is seriously wrong (e.g., compiler asking self for -// built-in that doesn't exist). -TVariable* TParseContextBase::getEditableVariable(const char* name) -{ - bool builtIn; - TSymbol* symbol = symbolTable.find(name, &builtIn); - - assert(symbol != nullptr); - if (symbol == nullptr) - return nullptr; - - if (builtIn) - makeEditable(symbol); - - return symbol->getAsVariable(); -} - -// Select the best matching function for 'call' from 'candidateList'. -// -// Assumptions -// -// There is no exact match, so a selection algorithm needs to run. That is, the -// language-specific handler should check for exact match first, to -// decide what to do, before calling this selector. -// -// Input -// -// * list of candidate signatures to select from -// * the call -// * a predicate function convertible(from, to) that says whether or not type -// 'from' can implicitly convert to type 'to' (it includes the case of what -// the calling language would consider a matching type with no conversion -// needed) -// * a predicate function better(from1, from2, to1, to2) that says whether or -// not a conversion from <-> to2 is considered better than a conversion -// from <-> to1 (both in and out directions need testing, as declared by the -// formal parameter) -// -// Output -// -// * best matching candidate (or none, if no viable candidates found) -// * whether there was a tie for the best match (ambiguous overload selection, -// caller's choice for how to report) -// -const TFunction* TParseContextBase::selectFunction( - const TVector candidateList, - const TFunction& call, - std::function convertible, - std::function better, - /* output */ bool& tie) -{ -// -// Operation -// -// 1. Prune the input list of candidates down to a list of viable candidates, -// where each viable candidate has -// -// * at least as many parameters as there are calling arguments, with any -// remaining parameters being optional or having default values -// * each parameter is true under convertible(A, B), where A is the calling -// type for in and B is the formal type, and in addition, for out B is the -// calling type and A is the formal type -// -// 2. If there are no viable candidates, return with no match. -// -// 3. If there is only one viable candidate, it is the best match. -// -// 4. If there are multiple viable candidates, select the first viable candidate -// as the incumbent. Compare the incumbent to the next viable candidate, and if -// that candidate is better (bullets below), make it the incumbent. Repeat, with -// a linear walk through the viable candidate list. The final incumbent will be -// returned as the best match. A viable candidate is better than the incumbent if -// -// * it has a function argument with a better(...) conversion than the incumbent, -// for all directions needed by in and out -// * the incumbent has no argument with a better(...) conversion then the -// candidate, for either in or out (as needed) -// -// 5. Check for ambiguity by comparing the best match against all other viable -// candidates. If any other viable candidate has a function argument with a -// better(...) conversion than the best candidate (for either in or out -// directions), return that there was a tie for best. -// - - tie = false; - - // 1. prune to viable... - TVector viableCandidates; - for (auto it = candidateList.begin(); it != candidateList.end(); ++it) { - const TFunction& candidate = *(*it); - - // to even be a potential match, number of arguments must be >= the number of - // fixed (non-default) parameters, and <= the total (including parameter with defaults). - if (call.getParamCount() < candidate.getFixedParamCount() || - call.getParamCount() > candidate.getParamCount()) - continue; - - // see if arguments are convertible - bool viable = true; - - // The call can have fewer parameters than the candidate, if some have defaults. - const int paramCount = std::min(call.getParamCount(), candidate.getParamCount()); - for (int param = 0; param < paramCount; ++param) { - if (candidate[param].type->getQualifier().isParamInput()) { - if (! convertible(*call[param].type, *candidate[param].type, candidate.getBuiltInOp(), param)) { - viable = false; - break; - } - } - if (candidate[param].type->getQualifier().isParamOutput()) { - if (! convertible(*candidate[param].type, *call[param].type, candidate.getBuiltInOp(), param)) { - viable = false; - break; - } - } - } - - if (viable) - viableCandidates.push_back(&candidate); - } - - // 2. none viable... - if (viableCandidates.size() == 0) - return nullptr; - - // 3. only one viable... - if (viableCandidates.size() == 1) - return viableCandidates.front(); - - // 4. find best... - const auto betterParam = [&call, &better](const TFunction& can1, const TFunction& can2) -> bool { - // is call -> can2 better than call -> can1 for any parameter - bool hasBetterParam = false; - for (int param = 0; param < call.getParamCount(); ++param) { - if (better(*call[param].type, *can1[param].type, *can2[param].type)) { - hasBetterParam = true; - break; - } - } - return hasBetterParam; - }; - - const auto equivalentParams = [&call, &better](const TFunction& can1, const TFunction& can2) -> bool { - // is call -> can2 equivalent to call -> can1 for all the call parameters? - for (int param = 0; param < call.getParamCount(); ++param) { - if (better(*call[param].type, *can1[param].type, *can2[param].type) || - better(*call[param].type, *can2[param].type, *can1[param].type)) - return false; - } - return true; - }; - - const TFunction* incumbent = viableCandidates.front(); - for (auto it = viableCandidates.begin() + 1; it != viableCandidates.end(); ++it) { - const TFunction& candidate = *(*it); - if (betterParam(*incumbent, candidate) && ! betterParam(candidate, *incumbent)) - incumbent = &candidate; - } - - // 5. ambiguity... - for (auto it = viableCandidates.begin(); it != viableCandidates.end(); ++it) { - if (incumbent == *it) - continue; - const TFunction& candidate = *(*it); - - // In the case of default parameters, it may have an identical initial set, which is - // also ambiguous - if (betterParam(*incumbent, candidate) || equivalentParams(*incumbent, candidate)) - tie = true; - } - - return incumbent; -} - -// -// Look at a '.' field selector string and change it into numerical selectors -// for a vector or scalar. -// -// Always return some form of swizzle, so the result is always usable. -// -void TParseContextBase::parseSwizzleSelector(const TSourceLoc& loc, const TString& compString, int vecSize, - TSwizzleSelectors& selector) -{ - // Too long? - if (compString.size() > MaxSwizzleSelectors) - error(loc, "vector swizzle too long", compString.c_str(), ""); - - // Use this to test that all swizzle characters are from the same swizzle-namespace-set - enum { - exyzw, - ergba, - estpq, - } fieldSet[MaxSwizzleSelectors]; - - // Decode the swizzle string. - int size = std::min(MaxSwizzleSelectors, (int)compString.size()); - for (int i = 0; i < size; ++i) { - switch (compString[i]) { - case 'x': - selector.push_back(0); - fieldSet[i] = exyzw; - break; - case 'r': - selector.push_back(0); - fieldSet[i] = ergba; - break; - case 's': - selector.push_back(0); - fieldSet[i] = estpq; - break; - - case 'y': - selector.push_back(1); - fieldSet[i] = exyzw; - break; - case 'g': - selector.push_back(1); - fieldSet[i] = ergba; - break; - case 't': - selector.push_back(1); - fieldSet[i] = estpq; - break; - - case 'z': - selector.push_back(2); - fieldSet[i] = exyzw; - break; - case 'b': - selector.push_back(2); - fieldSet[i] = ergba; - break; - case 'p': - selector.push_back(2); - fieldSet[i] = estpq; - break; - - case 'w': - selector.push_back(3); - fieldSet[i] = exyzw; - break; - case 'a': - selector.push_back(3); - fieldSet[i] = ergba; - break; - case 'q': - selector.push_back(3); - fieldSet[i] = estpq; - break; - - default: - error(loc, "unknown swizzle selection", compString.c_str(), ""); - break; - } - } - - // Additional error checking. - for (int i = 0; i < selector.size(); ++i) { - if (selector[i] >= vecSize) { - error(loc, "vector swizzle selection out of range", compString.c_str(), ""); - selector.resize(i); - break; - } - - if (i > 0 && fieldSet[i] != fieldSet[i-1]) { - error(loc, "vector swizzle selectors not from the same set", compString.c_str(), ""); - selector.resize(i); - break; - } - } - - // Ensure it is valid. - if (selector.size() == 0) - selector.push_back(0); -} - -// -// Make the passed-in variable information become a member of the -// global uniform block. If this doesn't exist yet, make it. -// -void TParseContextBase::growGlobalUniformBlock(const TSourceLoc& loc, TType& memberType, const TString& memberName, TTypeList* typeList) -{ - // Make the global block, if not yet made. - if (globalUniformBlock == nullptr) { - TQualifier blockQualifier; - blockQualifier.clear(); - blockQualifier.storage = EvqUniform; - TType blockType(new TTypeList, *NewPoolTString(getGlobalUniformBlockName()), blockQualifier); - setUniformBlockDefaults(blockType); - globalUniformBlock = new TVariable(NewPoolTString(""), blockType, true); - firstNewMember = 0; - } - - // Add the requested member as a member to the global block. - TType* type = new TType; - type->shallowCopy(memberType); - type->setFieldName(memberName); - if (typeList) - type->setStruct(typeList); - TTypeLoc typeLoc = {type, loc}; - globalUniformBlock->getType().getWritableStruct()->push_back(typeLoc); - - // Insert into the symbol table. - if (firstNewMember == 0) { - // This is the first request; we need a normal symbol table insert - if (symbolTable.insert(*globalUniformBlock)) - trackLinkage(*globalUniformBlock); - else - error(loc, "failed to insert the global constant buffer", "uniform", ""); - } else { - // This is a follow-on request; we need to amend the first insert - symbolTable.amend(*globalUniformBlock, firstNewMember); - } - - ++firstNewMember; -} - -void TParseContextBase::finish() -{ - if (parsingBuiltins) - return; - - // Transfer the linkage symbols to AST nodes - TIntermAggregate* linkage = new TIntermAggregate; - for (auto i = linkageSymbols.begin(); i != linkageSymbols.end(); ++i) - intermediate.addSymbolLinkageNode(linkage, **i); - intermediate.addSymbolLinkageNodes(linkage, getLanguage(), symbolTable); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/ParseHelper.cpp b/deps/glslang/glslang/MachineIndependent/ParseHelper.cpp deleted file mode 100644 index 6decfea7..00000000 --- a/deps/glslang/glslang/MachineIndependent/ParseHelper.cpp +++ /dev/null @@ -1,6474 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2015 LunarG, Inc. -// Copyright (C) 2015-2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "ParseHelper.h" -#include "Scan.h" - -#include "../OSDependent/osinclude.h" -#include - -#include "preprocessor/PpContext.h" - -extern int yyparse(glslang::TParseContext*); - -namespace glslang { - -TParseContext::TParseContext(TSymbolTable& symbolTable, TIntermediate& interm, bool parsingBuiltins, - int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, - TInfoSink& infoSink, bool forwardCompatible, EShMessages messages) : - TParseContextBase(symbolTable, interm, parsingBuiltins, version, profile, spvVersion, language, - infoSink, forwardCompatible, messages), - inMain(false), - blockName(nullptr), - limits(resources.limits), - atomicUintOffsets(nullptr), anyIndexLimits(false) -{ - // decide whether precision qualifiers should be ignored or respected - if (profile == EEsProfile || spvVersion.vulkan > 0) { - precisionManager.respectPrecisionQualifiers(); - if (! parsingBuiltins && language == EShLangFragment && profile != EEsProfile && spvVersion.vulkan > 0) - precisionManager.warnAboutDefaults(); - } - - setPrecisionDefaults(); - - globalUniformDefaults.clear(); - globalUniformDefaults.layoutMatrix = ElmColumnMajor; - globalUniformDefaults.layoutPacking = spvVersion.spv != 0 ? ElpStd140 : ElpShared; - - globalBufferDefaults.clear(); - globalBufferDefaults.layoutMatrix = ElmColumnMajor; - globalBufferDefaults.layoutPacking = spvVersion.spv != 0 ? ElpStd430 : ElpShared; - - globalInputDefaults.clear(); - globalOutputDefaults.clear(); - - // "Shaders in the transform - // feedback capturing mode have an initial global default of - // layout(xfb_buffer = 0) out;" - if (language == EShLangVertex || - language == EShLangTessControl || - language == EShLangTessEvaluation || - language == EShLangGeometry) - globalOutputDefaults.layoutXfbBuffer = 0; - - if (language == EShLangGeometry) - globalOutputDefaults.layoutStream = 0; -} - -TParseContext::~TParseContext() -{ - delete [] atomicUintOffsets; -} - -// Set up all default precisions as needed by the current environment. -// Intended just as a TParseContext constructor helper. -void TParseContext::setPrecisionDefaults() -{ - // Set all precision defaults to EpqNone, which is correct for all types - // when not obeying precision qualifiers, and correct for types that don't - // have defaults (thus getting an error on use) when obeying precision - // qualifiers. - - for (int type = 0; type < EbtNumTypes; ++type) - defaultPrecision[type] = EpqNone; - - for (int type = 0; type < maxSamplerIndex; ++type) - defaultSamplerPrecision[type] = EpqNone; - - // replace with real precision defaults for those that have them - if (obeyPrecisionQualifiers()) { - if (profile == EEsProfile) { - // Most don't have defaults, a few default to lowp. - TSampler sampler; - sampler.set(EbtFloat, Esd2D); - defaultSamplerPrecision[computeSamplerTypeIndex(sampler)] = EpqLow; - sampler.set(EbtFloat, EsdCube); - defaultSamplerPrecision[computeSamplerTypeIndex(sampler)] = EpqLow; - sampler.set(EbtFloat, Esd2D); - sampler.external = true; - defaultSamplerPrecision[computeSamplerTypeIndex(sampler)] = EpqLow; - } else { - // Non-ES profile - // All default to highp. - for (int type = 0; type < maxSamplerIndex; ++type) - defaultSamplerPrecision[type] = EpqHigh; - } - - // If we are parsing built-in computational variables/functions, it is meaningful to record - // whether the built-in has no precision qualifier, as that ambiguity - // is used to resolve the precision from the supplied arguments/operands instead. - // So, we don't actually want to replace EpqNone with a default precision for built-ins. - if (! parsingBuiltins) { - if (profile == EEsProfile && language == EShLangFragment) { - defaultPrecision[EbtInt] = EpqMedium; - defaultPrecision[EbtUint] = EpqMedium; - } else { - defaultPrecision[EbtInt] = EpqHigh; - defaultPrecision[EbtUint] = EpqHigh; - defaultPrecision[EbtFloat] = EpqHigh; - } - } - - defaultPrecision[EbtSampler] = EpqLow; - defaultPrecision[EbtAtomicUint] = EpqHigh; - } -} - -void TParseContext::setLimits(const TBuiltInResource& r) -{ - resources = r; - - anyIndexLimits = ! limits.generalAttributeMatrixVectorIndexing || - ! limits.generalConstantMatrixVectorIndexing || - ! limits.generalSamplerIndexing || - ! limits.generalUniformIndexing || - ! limits.generalVariableIndexing || - ! limits.generalVaryingIndexing; - - intermediate.setLimits(resources); - - // "Each binding point tracks its own current default offset for - // inheritance of subsequent variables using the same binding. The initial state of compilation is that all - // binding points have an offset of 0." - atomicUintOffsets = new int[resources.maxAtomicCounterBindings]; - for (int b = 0; b < resources.maxAtomicCounterBindings; ++b) - atomicUintOffsets[b] = 0; -} - -// -// Parse an array of strings using yyparse, going through the -// preprocessor to tokenize the shader strings, then through -// the GLSL scanner. -// -// Returns true for successful acceptance of the shader, false if any errors. -// -bool TParseContext::parseShaderStrings(TPpContext& ppContext, TInputScanner& input, bool versionWillBeError) -{ - currentScanner = &input; - ppContext.setInput(input, versionWillBeError); - yyparse(this); - - finish(); - - return numErrors == 0; -} - -// This is called from bison when it has a parse (syntax) error -// Note though that to stop cascading errors, we set EOF, which -// will usually cause a syntax error, so be more accurate that -// compilation is terminating. -void TParseContext::parserError(const char* s) -{ - if (! getScanner()->atEndOfInput() || numErrors == 0) - error(getCurrentLoc(), "", "", s, ""); - else - error(getCurrentLoc(), "compilation terminated", "", ""); -} - -void TParseContext::handlePragma(const TSourceLoc& loc, const TVector& tokens) -{ - if (pragmaCallback) - pragmaCallback(loc.line, tokens); - - if (tokens.size() == 0) - return; - - if (tokens[0].compare("optimize") == 0) { - if (tokens.size() != 4) { - error(loc, "optimize pragma syntax is incorrect", "#pragma", ""); - return; - } - - if (tokens[1].compare("(") != 0) { - error(loc, "\"(\" expected after 'optimize' keyword", "#pragma", ""); - return; - } - - if (tokens[2].compare("on") == 0) - contextPragma.optimize = true; - else if (tokens[2].compare("off") == 0) - contextPragma.optimize = false; - else { - error(loc, "\"on\" or \"off\" expected after '(' for 'optimize' pragma", "#pragma", ""); - return; - } - - if (tokens[3].compare(")") != 0) { - error(loc, "\")\" expected to end 'optimize' pragma", "#pragma", ""); - return; - } - } else if (tokens[0].compare("debug") == 0) { - if (tokens.size() != 4) { - error(loc, "debug pragma syntax is incorrect", "#pragma", ""); - return; - } - - if (tokens[1].compare("(") != 0) { - error(loc, "\"(\" expected after 'debug' keyword", "#pragma", ""); - return; - } - - if (tokens[2].compare("on") == 0) - contextPragma.debug = true; - else if (tokens[2].compare("off") == 0) - contextPragma.debug = false; - else { - error(loc, "\"on\" or \"off\" expected after '(' for 'debug' pragma", "#pragma", ""); - return; - } - - if (tokens[3].compare(")") != 0) { - error(loc, "\")\" expected to end 'debug' pragma", "#pragma", ""); - return; - } - } else if (spvVersion.spv > 0 && tokens[0].compare("use_storage_buffer") == 0) { - if (tokens.size() != 1) - error(loc, "extra tokens", "#pragma", ""); - intermediate.setUseStorageBuffer(); - } -} - -// -// Handle seeing a variable identifier in the grammar. -// -TIntermTyped* TParseContext::handleVariable(const TSourceLoc& loc, TSymbol* symbol, const TString* string) -{ - TIntermTyped* node = nullptr; - - // Error check for requiring specific extensions present. - if (symbol && symbol->getNumExtensions()) - requireExtensions(loc, symbol->getNumExtensions(), symbol->getExtensions(), symbol->getName().c_str()); - - if (symbol && symbol->isReadOnly()) { - // All shared things containing an implicitly sized array must be copied up - // on first use, so that all future references will share its array structure, - // so that editing the implicit size will effect all nodes consuming it, - // and so that editing the implicit size won't change the shared one. - // - // If this is a variable or a block, check it and all it contains, but if this - // is a member of an anonymous block, check the whole block, as the whole block - // will need to be copied up if it contains an implicitly-sized array. - if (symbol->getType().containsImplicitlySizedArray() || - (symbol->getAsAnonMember() && - symbol->getAsAnonMember()->getAnonContainer().getType().containsImplicitlySizedArray())) - makeEditable(symbol); - } - - const TVariable* variable; - const TAnonMember* anon = symbol ? symbol->getAsAnonMember() : nullptr; - if (anon) { - // It was a member of an anonymous container. - - // The "getNumExtensions()" mechanism above doesn't yet work for block members - blockMemberExtensionCheck(loc, nullptr, *string); - - // Create a subtree for its dereference. - variable = anon->getAnonContainer().getAsVariable(); - TIntermTyped* container = intermediate.addSymbol(*variable, loc); - TIntermTyped* constNode = intermediate.addConstantUnion(anon->getMemberNumber(), loc); - node = intermediate.addIndex(EOpIndexDirectStruct, container, constNode, loc); - - node->setType(*(*variable->getType().getStruct())[anon->getMemberNumber()].type); - if (node->getType().hiddenMember()) - error(loc, "member of nameless block was not redeclared", string->c_str(), ""); - } else { - // Not a member of an anonymous container. - - // The symbol table search was done in the lexical phase. - // See if it was a variable. - variable = symbol ? symbol->getAsVariable() : nullptr; - if (variable) { - if ((variable->getType().getBasicType() == EbtBlock || - variable->getType().getBasicType() == EbtStruct) && variable->getType().getStruct() == nullptr) { - error(loc, "cannot be used (maybe an instance name is needed)", string->c_str(), ""); - variable = nullptr; - } - } else { - if (symbol) - error(loc, "variable name expected", string->c_str(), ""); - } - - // Recovery, if it wasn't found or was not a variable. - if (! variable) - variable = new TVariable(string, TType(EbtVoid)); - - if (variable->getType().getQualifier().isFrontEndConstant()) - node = intermediate.addConstantUnion(variable->getConstArray(), variable->getType(), loc); - else - node = intermediate.addSymbol(*variable, loc); - } - - if (variable->getType().getQualifier().isIo()) - intermediate.addIoAccessed(*string); - - return node; -} - -// -// Handle seeing a base[index] dereference in the grammar. -// -TIntermTyped* TParseContext::handleBracketDereference(const TSourceLoc& loc, TIntermTyped* base, TIntermTyped* index) -{ - TIntermTyped* result = nullptr; - - int indexValue = 0; - if (index->getQualifier().isFrontEndConstant()) - indexValue = index->getAsConstantUnion()->getConstArray()[0].getIConst(); - - variableCheck(base); - if (! base->isArray() && ! base->isMatrix() && ! base->isVector()) { - if (base->getAsSymbolNode()) - error(loc, " left of '[' is not of type array, matrix, or vector ", base->getAsSymbolNode()->getName().c_str(), ""); - else - error(loc, " left of '[' is not of type array, matrix, or vector ", "expression", ""); - } else if (base->getType().getQualifier().isFrontEndConstant() && index->getQualifier().isFrontEndConstant()) { - // both base and index are front-end constants - checkIndex(loc, base->getType(), indexValue); - return intermediate.foldDereference(base, indexValue, loc); - } else { - // at least one of base and index is not a front-end constant variable... - - if (base->getAsSymbolNode() && isIoResizeArray(base->getType())) - handleIoResizeArrayAccess(loc, base); - - if (index->getQualifier().isFrontEndConstant()) { - if (base->getType().isImplicitlySizedArray()) - updateImplicitArraySize(loc, base, indexValue); - else - checkIndex(loc, base->getType(), indexValue); - result = intermediate.addIndex(EOpIndexDirect, base, index, loc); - } else { - if (base->getType().isImplicitlySizedArray()) { - if (base->getAsSymbolNode() && isIoResizeArray(base->getType())) - error(loc, "", "[", "array must be sized by a redeclaration or layout qualifier before being indexed with a variable"); - else - error(loc, "", "[", "array must be redeclared with a size before being indexed with a variable"); - } - if (base->getBasicType() == EbtBlock) { - if (base->getQualifier().storage == EvqBuffer) - requireProfile(base->getLoc(), ~EEsProfile, "variable indexing buffer block array"); - else if (base->getQualifier().storage == EvqUniform) - profileRequires(base->getLoc(), EEsProfile, 320, Num_AEP_gpu_shader5, AEP_gpu_shader5, - "variable indexing uniform block array"); - else { - // input/output blocks either don't exist or can be variable indexed - } - } else if (language == EShLangFragment && base->getQualifier().isPipeOutput()) - requireProfile(base->getLoc(), ~EEsProfile, "variable indexing fragment shader output array"); - else if (base->getBasicType() == EbtSampler && version >= 130) { - const char* explanation = "variable indexing sampler array"; - requireProfile(base->getLoc(), EEsProfile | ECoreProfile | ECompatibilityProfile, explanation); - profileRequires(base->getLoc(), EEsProfile, 320, Num_AEP_gpu_shader5, AEP_gpu_shader5, explanation); - profileRequires(base->getLoc(), ECoreProfile | ECompatibilityProfile, 400, nullptr, explanation); - } - - result = intermediate.addIndex(EOpIndexIndirect, base, index, loc); - } - } - - if (result == nullptr) { - // Insert dummy error-recovery result - result = intermediate.addConstantUnion(0.0, EbtFloat, loc); - } else { - // Insert valid dereferenced result - TType newType(base->getType(), 0); // dereferenced type - if (base->getType().getQualifier().isConstant() && index->getQualifier().isConstant()) { - newType.getQualifier().storage = EvqConst; - // If base or index is a specialization constant, the result should also be a specialization constant. - if (base->getType().getQualifier().isSpecConstant() || index->getQualifier().isSpecConstant()) { - newType.getQualifier().makeSpecConstant(); - } - } else { - newType.getQualifier().makePartialTemporary(); - } - result->setType(newType); - - if (anyIndexLimits) - handleIndexLimits(loc, base, index); - } - - return result; -} - -void TParseContext::checkIndex(const TSourceLoc& loc, const TType& type, int& index) -{ - if (index < 0) { - error(loc, "", "[", "index out of range '%d'", index); - index = 0; - } else if (type.isArray()) { - if (type.isExplicitlySizedArray() && index >= type.getOuterArraySize()) { - error(loc, "", "[", "array index out of range '%d'", index); - index = type.getOuterArraySize() - 1; - } - } else if (type.isVector()) { - if (index >= type.getVectorSize()) { - error(loc, "", "[", "vector index out of range '%d'", index); - index = type.getVectorSize() - 1; - } - } else if (type.isMatrix()) { - if (index >= type.getMatrixCols()) { - error(loc, "", "[", "matrix index out of range '%d'", index); - index = type.getMatrixCols() - 1; - } - } -} - -// for ES 2.0 (version 100) limitations for almost all index operations except vertex-shader uniforms -void TParseContext::handleIndexLimits(const TSourceLoc& /*loc*/, TIntermTyped* base, TIntermTyped* index) -{ - if ((! limits.generalSamplerIndexing && base->getBasicType() == EbtSampler) || - (! limits.generalUniformIndexing && base->getQualifier().isUniformOrBuffer() && language != EShLangVertex) || - (! limits.generalAttributeMatrixVectorIndexing && base->getQualifier().isPipeInput() && language == EShLangVertex && (base->getType().isMatrix() || base->getType().isVector())) || - (! limits.generalConstantMatrixVectorIndexing && base->getAsConstantUnion()) || - (! limits.generalVariableIndexing && ! base->getType().getQualifier().isUniformOrBuffer() && - ! base->getType().getQualifier().isPipeInput() && - ! base->getType().getQualifier().isPipeOutput() && - ! base->getType().getQualifier().isConstant()) || - (! limits.generalVaryingIndexing && (base->getType().getQualifier().isPipeInput() || - base->getType().getQualifier().isPipeOutput()))) { - // it's too early to know what the inductive variables are, save it for post processing - needsIndexLimitationChecking.push_back(index); - } -} - -// Make a shared symbol have a non-shared version that can be edited by the current -// compile, such that editing its type will not change the shared version and will -// effect all nodes sharing it. -void TParseContext::makeEditable(TSymbol*& symbol) -{ - TParseContextBase::makeEditable(symbol); - - // See if it's tied to IO resizing - if (isIoResizeArray(symbol->getType())) - ioArraySymbolResizeList.push_back(symbol); -} - -// Return true if this is a geometry shader input array or tessellation control output array. -bool TParseContext::isIoResizeArray(const TType& type) const -{ - return type.isArray() && - ((language == EShLangGeometry && type.getQualifier().storage == EvqVaryingIn) || - (language == EShLangTessControl && type.getQualifier().storage == EvqVaryingOut && ! type.getQualifier().patch)); -} - -// If an array is not isIoResizeArray() but is an io array, make sure it has the right size -void TParseContext::fixIoArraySize(const TSourceLoc& loc, TType& type) -{ - if (! type.isArray() || type.getQualifier().patch || symbolTable.atBuiltInLevel()) - return; - - assert(! isIoResizeArray(type)); - - if (type.getQualifier().storage != EvqVaryingIn || type.getQualifier().patch) - return; - - if (language == EShLangTessControl || language == EShLangTessEvaluation) { - if (type.getOuterArraySize() != resources.maxPatchVertices) { - if (type.isExplicitlySizedArray()) - error(loc, "tessellation input array size must be gl_MaxPatchVertices or implicitly sized", "[]", ""); - type.changeOuterArraySize(resources.maxPatchVertices); - } - } -} - -// Issue any errors if the non-array object is missing arrayness WRT -// shader I/O that has array requirements. -// All arrayness checking is handled in array paths, this is for -void TParseContext::ioArrayCheck(const TSourceLoc& loc, const TType& type, const TString& identifier) -{ - if (! type.isArray() && ! symbolTable.atBuiltInLevel()) { - if (type.getQualifier().isArrayedIo(language) -#ifdef NV_EXTENSIONS - && !type.getQualifier().layoutPassthrough -#endif - ) - error(loc, "type must be an array:", type.getStorageQualifierString(), identifier.c_str()); - } -} - -// Handle a dereference of a geometry shader input array or tessellation control output array. -// See ioArraySymbolResizeList comment in ParseHelper.h. -// -void TParseContext::handleIoResizeArrayAccess(const TSourceLoc& /*loc*/, TIntermTyped* base) -{ - TIntermSymbol* symbolNode = base->getAsSymbolNode(); - assert(symbolNode); - if (! symbolNode) - return; - - // fix array size, if it can be fixed and needs to be fixed (will allow variable indexing) - if (symbolNode->getType().isImplicitlySizedArray()) { - int newSize = getIoArrayImplicitSize(); - if (newSize > 0) - symbolNode->getWritableType().changeOuterArraySize(newSize); - } -} - -// If there has been an input primitive declaration (geometry shader) or an output -// number of vertices declaration(tessellation shader), make sure all input array types -// match it in size. Types come either from nodes in the AST or symbols in the -// symbol table. -// -// Types without an array size will be given one. -// Types already having a size that is wrong will get an error. -// -void TParseContext::checkIoArraysConsistency(const TSourceLoc& loc, bool tailOnly) -{ - int requiredSize = getIoArrayImplicitSize(); - if (requiredSize == 0) - return; - - const char* feature; - if (language == EShLangGeometry) - feature = TQualifier::getGeometryString(intermediate.getInputPrimitive()); - else if (language == EShLangTessControl) - feature = "vertices"; - else - feature = "unknown"; - - if (tailOnly) { - checkIoArrayConsistency(loc, requiredSize, feature, ioArraySymbolResizeList.back()->getWritableType(), ioArraySymbolResizeList.back()->getName()); - return; - } - - for (size_t i = 0; i < ioArraySymbolResizeList.size(); ++i) - checkIoArrayConsistency(loc, requiredSize, feature, ioArraySymbolResizeList[i]->getWritableType(), ioArraySymbolResizeList[i]->getName()); -} - -int TParseContext::getIoArrayImplicitSize() const -{ - if (language == EShLangGeometry) - return TQualifier::mapGeometryToSize(intermediate.getInputPrimitive()); - else if (language == EShLangTessControl) - return intermediate.getVertices() != TQualifier::layoutNotSet ? intermediate.getVertices() : 0; - else - return 0; -} - -void TParseContext::checkIoArrayConsistency(const TSourceLoc& loc, int requiredSize, const char* feature, TType& type, const TString& name) -{ - if (type.isImplicitlySizedArray()) - type.changeOuterArraySize(requiredSize); - else if (type.getOuterArraySize() != requiredSize) { - if (language == EShLangGeometry) - error(loc, "inconsistent input primitive for array size of", feature, name.c_str()); - else if (language == EShLangTessControl) - error(loc, "inconsistent output number of vertices for array size of", feature, name.c_str()); - else - assert(0); - } -} - -// Handle seeing a binary node with a math operation. -// Returns nullptr if not semantically allowed. -TIntermTyped* TParseContext::handleBinaryMath(const TSourceLoc& loc, const char* str, TOperator op, TIntermTyped* left, TIntermTyped* right) -{ - rValueErrorCheck(loc, str, left->getAsTyped()); - rValueErrorCheck(loc, str, right->getAsTyped()); - - bool allowed = true; - switch (op) { - // TODO: Bring more source language-specific checks up from intermediate.cpp - // to the specific parse helpers for that source language. - case EOpLessThan: - case EOpGreaterThan: - case EOpLessThanEqual: - case EOpGreaterThanEqual: - if (! left->isScalar() || ! right->isScalar()) - allowed = false; - break; - default: - break; - } - - TIntermTyped* result = nullptr; - if (allowed) - result = intermediate.addBinaryMath(op, left, right, loc); - - if (result == nullptr) - binaryOpError(loc, str, left->getCompleteString(), right->getCompleteString()); - - return result; -} - -// Handle seeing a unary node with a math operation. -TIntermTyped* TParseContext::handleUnaryMath(const TSourceLoc& loc, const char* str, TOperator op, TIntermTyped* childNode) -{ - rValueErrorCheck(loc, str, childNode); - - TIntermTyped* result = intermediate.addUnaryMath(op, childNode, loc); - - if (result) - return result; - else - unaryOpError(loc, str, childNode->getCompleteString()); - - return childNode; -} - -// -// Handle seeing a base.field dereference in the grammar. -// -TIntermTyped* TParseContext::handleDotDereference(const TSourceLoc& loc, TIntermTyped* base, const TString& field) -{ - variableCheck(base); - - // - // .length() can't be resolved until we later see the function-calling syntax. - // Save away the name in the AST for now. Processing is completed in - // handleLengthMethod(). - // - if (field == "length") { - if (base->isArray()) { - profileRequires(loc, ENoProfile, 120, E_GL_3DL_array_objects, ".length"); - profileRequires(loc, EEsProfile, 300, nullptr, ".length"); - } else if (base->isVector() || base->isMatrix()) { - const char* feature = ".length() on vectors and matrices"; - requireProfile(loc, ~EEsProfile, feature); - profileRequires(loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, feature); - } else { - error(loc, "does not operate on this type:", field.c_str(), base->getType().getCompleteString().c_str()); - - return base; - } - - return intermediate.addMethod(base, TType(EbtInt), &field, loc); - } - - // It's not .length() if we get to here. - - if (base->isArray()) { - error(loc, "cannot apply to an array:", ".", field.c_str()); - - return base; - } - - // It's neither an array nor .length() if we get here, - // leaving swizzles and struct/block dereferences. - - TIntermTyped* result = base; - if (base->isVector() || base->isScalar()) { - if (base->isScalar()) { - const char* dotFeature = "scalar swizzle"; - requireProfile(loc, ~EEsProfile, dotFeature); - profileRequires(loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, dotFeature); - } - - TSwizzleSelectors selectors; - parseSwizzleSelector(loc, field, base->getVectorSize(), selectors); - - if (base->isScalar()) { - if (selectors.size() == 1) - return result; - else { - TType type(base->getBasicType(), EvqTemporary, selectors.size()); - // Swizzle operations propagate specialization-constantness - if (base->getQualifier().isSpecConstant()) - type.getQualifier().makeSpecConstant(); - return addConstructor(loc, base, type); - } - } - - if (base->getType().getQualifier().isFrontEndConstant()) - result = intermediate.foldSwizzle(base, selectors, loc); - else { - if (selectors.size() == 1) { - TIntermTyped* index = intermediate.addConstantUnion(selectors[0], loc); - result = intermediate.addIndex(EOpIndexDirect, base, index, loc); - result->setType(TType(base->getBasicType(), EvqTemporary, base->getType().getQualifier().precision)); - } else { - TIntermTyped* index = intermediate.addSwizzle(selectors, loc); - result = intermediate.addIndex(EOpVectorSwizzle, base, index, loc); - result->setType(TType(base->getBasicType(), EvqTemporary, base->getType().getQualifier().precision, selectors.size())); - } - // Swizzle operations propagate specialization-constantness - if (base->getType().getQualifier().isSpecConstant()) - result->getWritableType().getQualifier().makeSpecConstant(); - } - } else if (base->getBasicType() == EbtStruct || base->getBasicType() == EbtBlock) { - const TTypeList* fields = base->getType().getStruct(); - bool fieldFound = false; - int member; - for (member = 0; member < (int)fields->size(); ++member) { - if ((*fields)[member].type->getFieldName() == field) { - fieldFound = true; - break; - } - } - if (fieldFound) { - if (base->getType().getQualifier().isFrontEndConstant()) - result = intermediate.foldDereference(base, member, loc); - else { - blockMemberExtensionCheck(loc, base, field); - TIntermTyped* index = intermediate.addConstantUnion(member, loc); - result = intermediate.addIndex(EOpIndexDirectStruct, base, index, loc); - result->setType(*(*fields)[member].type); - } - } else - error(loc, "no such field in structure", field.c_str(), ""); - } else - error(loc, "does not apply to this type:", field.c_str(), base->getType().getCompleteString().c_str()); - - // Propagate noContraction up the dereference chain - if (base->getQualifier().noContraction) - result->getWritableType().getQualifier().noContraction = true; - - return result; -} - -void TParseContext::blockMemberExtensionCheck(const TSourceLoc& loc, const TIntermTyped* /*base*/, const TString& field) -{ - if (profile == EEsProfile && field == "gl_PointSize") { - if (language == EShLangGeometry) - requireExtensions(loc, Num_AEP_geometry_point_size, AEP_geometry_point_size, "gl_PointSize"); - else if (language == EShLangTessControl || language == EShLangTessEvaluation) - requireExtensions(loc, Num_AEP_tessellation_point_size, AEP_tessellation_point_size, "gl_PointSize"); - } -} - -// -// Handle seeing a function declarator in the grammar. This is the precursor -// to recognizing a function prototype or function definition. -// -TFunction* TParseContext::handleFunctionDeclarator(const TSourceLoc& loc, TFunction& function, bool prototype) -{ - // ES can't declare prototypes inside functions - if (! symbolTable.atGlobalLevel()) - requireProfile(loc, ~EEsProfile, "local function declaration"); - - // - // Multiple declarations of the same function name are allowed. - // - // If this is a definition, the definition production code will check for redefinitions - // (we don't know at this point if it's a definition or not). - // - // Redeclarations (full signature match) are allowed. But, return types and parameter qualifiers must also match. - // - except ES 100, which only allows a single prototype - // - // ES 100 does not allow redefining, but does allow overloading of built-in functions. - // ES 300 does not allow redefining or overloading of built-in functions. - // - bool builtIn; - TSymbol* symbol = symbolTable.find(function.getMangledName(), &builtIn); - if (symbol && symbol->getAsFunction() && builtIn) - requireProfile(loc, ~EEsProfile, "redefinition of built-in function"); - const TFunction* prevDec = symbol ? symbol->getAsFunction() : 0; - if (prevDec) { - if (prevDec->isPrototyped() && prototype) - profileRequires(loc, EEsProfile, 300, nullptr, "multiple prototypes for same function"); - if (prevDec->getType() != function.getType()) - error(loc, "overloaded functions must have the same return type", function.getType().getBasicTypeString().c_str(), ""); - for (int i = 0; i < prevDec->getParamCount(); ++i) { - if ((*prevDec)[i].type->getQualifier().storage != function[i].type->getQualifier().storage) - error(loc, "overloaded functions must have the same parameter storage qualifiers for argument", function[i].type->getStorageQualifierString(), "%d", i+1); - - if ((*prevDec)[i].type->getQualifier().precision != function[i].type->getQualifier().precision) - error(loc, "overloaded functions must have the same parameter precision qualifiers for argument", function[i].type->getPrecisionQualifierString(), "%d", i+1); - } - } - - arrayObjectCheck(loc, function.getType(), "array in function return type"); - - if (prototype) { - // All built-in functions are defined, even though they don't have a body. - // Count their prototype as a definition instead. - if (symbolTable.atBuiltInLevel()) - function.setDefined(); - else { - if (prevDec && ! builtIn) - symbol->getAsFunction()->setPrototyped(); // need a writable one, but like having prevDec as a const - function.setPrototyped(); - } - } - - // This insert won't actually insert it if it's a duplicate signature, but it will still check for - // other forms of name collisions. - if (! symbolTable.insert(function)) - error(loc, "function name is redeclaration of existing name", function.getName().c_str(), ""); - - // - // If this is a redeclaration, it could also be a definition, - // in which case, we need to use the parameter names from this one, and not the one that's - // being redeclared. So, pass back this declaration, not the one in the symbol table. - // - return &function; -} - -// -// Handle seeing the function prototype in front of a function definition in the grammar. -// The body is handled after this function returns. -// -TIntermAggregate* TParseContext::handleFunctionDefinition(const TSourceLoc& loc, TFunction& function) -{ - currentCaller = function.getMangledName(); - TSymbol* symbol = symbolTable.find(function.getMangledName()); - TFunction* prevDec = symbol ? symbol->getAsFunction() : nullptr; - - if (! prevDec) - error(loc, "can't find function", function.getName().c_str(), ""); - // Note: 'prevDec' could be 'function' if this is the first time we've seen function - // as it would have just been put in the symbol table. Otherwise, we're looking up - // an earlier occurrence. - - if (prevDec && prevDec->isDefined()) { - // Then this function already has a body. - error(loc, "function already has a body", function.getName().c_str(), ""); - } - if (prevDec && ! prevDec->isDefined()) { - prevDec->setDefined(); - - // Remember the return type for later checking for RETURN statements. - currentFunctionType = &(prevDec->getType()); - } else - currentFunctionType = new TType(EbtVoid); - functionReturnsValue = false; - - // Check for entry point - if (function.getName().compare(intermediate.getEntryPointName().c_str()) == 0) { - intermediate.setEntryPointMangledName(function.getMangledName().c_str()); - intermediate.incrementEntryPointCount(); - inMain = true; - } else - inMain = false; - - // - // Raise error message if main function takes any parameters or returns anything other than void - // - if (inMain) { - if (function.getParamCount() > 0) - error(loc, "function cannot take any parameter(s)", function.getName().c_str(), ""); - if (function.getType().getBasicType() != EbtVoid) - error(loc, "", function.getType().getBasicTypeString().c_str(), "entry point cannot return a value"); - } - - // - // New symbol table scope for body of function plus its arguments - // - symbolTable.push(); - - // - // Insert parameters into the symbol table. - // If the parameter has no name, it's not an error, just don't insert it - // (could be used for unused args). - // - // Also, accumulate the list of parameters into the HIL, so lower level code - // knows where to find parameters. - // - TIntermAggregate* paramNodes = new TIntermAggregate; - for (int i = 0; i < function.getParamCount(); i++) { - TParameter& param = function[i]; - if (param.name != nullptr) { - TVariable *variable = new TVariable(param.name, *param.type); - - // Insert the parameters with name in the symbol table. - if (! symbolTable.insert(*variable)) - error(loc, "redefinition", variable->getName().c_str(), ""); - else { - // Transfer ownership of name pointer to symbol table. - param.name = nullptr; - - // Add the parameter to the HIL - paramNodes = intermediate.growAggregate(paramNodes, - intermediate.addSymbol(*variable, loc), - loc); - } - } else - paramNodes = intermediate.growAggregate(paramNodes, intermediate.addSymbol(*param.type, loc), loc); - } - intermediate.setAggregateOperator(paramNodes, EOpParameters, TType(EbtVoid), loc); - loopNestingLevel = 0; - statementNestingLevel = 0; - controlFlowNestingLevel = 0; - postEntryPointReturn = false; - - return paramNodes; -} - -// -// Handle seeing function call syntax in the grammar, which could be any of -// - .length() method -// - constructor -// - a call to a built-in function mapped to an operator -// - a call to a built-in function that will remain a function call (e.g., texturing) -// - user function -// - subroutine call (not implemented yet) -// -TIntermTyped* TParseContext::handleFunctionCall(const TSourceLoc& loc, TFunction* function, TIntermNode* arguments) -{ - TIntermTyped* result = nullptr; - - if (function->getBuiltInOp() == EOpArrayLength) - result = handleLengthMethod(loc, function, arguments); - else if (function->getBuiltInOp() != EOpNull) { - // - // Then this should be a constructor. - // Don't go through the symbol table for constructors. - // Their parameters will be verified algorithmically. - // - TType type(EbtVoid); // use this to get the type back - if (! constructorError(loc, arguments, *function, function->getBuiltInOp(), type)) { - // - // It's a constructor, of type 'type'. - // - result = addConstructor(loc, arguments, type); - if (result == nullptr) - error(loc, "cannot construct with these arguments", type.getCompleteString().c_str(), ""); - } - } else { - // - // Find it in the symbol table. - // - const TFunction* fnCandidate; - bool builtIn; - fnCandidate = findFunction(loc, *function, builtIn); - if (fnCandidate) { - // This is a declared function that might map to - // - a built-in operator, - // - a built-in function not mapped to an operator, or - // - a user function. - - // Error check for a function requiring specific extensions present. - if (builtIn && fnCandidate->getNumExtensions()) - requireExtensions(loc, fnCandidate->getNumExtensions(), fnCandidate->getExtensions(), fnCandidate->getName().c_str()); - - if (arguments) { - // Make sure qualifications work for these arguments. - TIntermAggregate* aggregate = arguments->getAsAggregate(); - for (int i = 0; i < fnCandidate->getParamCount(); ++i) { - // At this early point there is a slight ambiguity between whether an aggregate 'arguments' - // is the single argument itself or its children are the arguments. Only one argument - // means take 'arguments' itself as the one argument. - TIntermNode* arg = fnCandidate->getParamCount() == 1 ? arguments : (aggregate ? aggregate->getSequence()[i] : arguments); - TQualifier& formalQualifier = (*fnCandidate)[i].type->getQualifier(); - if (formalQualifier.isParamOutput()) { - if (lValueErrorCheck(arguments->getLoc(), "assign", arg->getAsTyped())) - error(arguments->getLoc(), "Non-L-value cannot be passed for 'out' or 'inout' parameters.", "out", ""); - } - TQualifier& argQualifier = arg->getAsTyped()->getQualifier(); - if (argQualifier.isMemory()) { - const char* message = "argument cannot drop memory qualifier when passed to formal parameter"; - if (argQualifier.volatil && ! formalQualifier.volatil) - error(arguments->getLoc(), message, "volatile", ""); - if (argQualifier.coherent && ! formalQualifier.coherent) - error(arguments->getLoc(), message, "coherent", ""); - if (argQualifier.readonly && ! formalQualifier.readonly) - error(arguments->getLoc(), message, "readonly", ""); - if (argQualifier.writeonly && ! formalQualifier.writeonly) - error(arguments->getLoc(), message, "writeonly", ""); - } - // TODO 4.5 functionality: A shader will fail to compile - // if the value passed to the memargument of an atomic memory function does not correspond to a buffer or - // shared variable. It is acceptable to pass an element of an array or a single component of a vector to the - // memargument of an atomic memory function, as long as the underlying array or vector is a buffer or - // shared variable. - } - - // Convert 'in' arguments - addInputArgumentConversions(*fnCandidate, arguments); // arguments may be modified if it's just a single argument node - } - - if (builtIn && fnCandidate->getBuiltInOp() != EOpNull) { - // A function call mapped to a built-in operation. - result = handleBuiltInFunctionCall(loc, *arguments, *fnCandidate); - } else { - // This is a function call not mapped to built-in operator. - // It could still be a built-in function, but only if PureOperatorBuiltins == false. - result = intermediate.setAggregateOperator(arguments, EOpFunctionCall, fnCandidate->getType(), loc); - TIntermAggregate* call = result->getAsAggregate(); - call->setName(fnCandidate->getMangledName()); - - // this is how we know whether the given function is a built-in function or a user-defined function - // if builtIn == false, it's a userDefined -> could be an overloaded built-in function also - // if builtIn == true, it's definitely a built-in function with EOpNull - if (! builtIn) { - call->setUserDefined(); - if (symbolTable.atGlobalLevel()) { - requireProfile(loc, ~EEsProfile, "calling user function from global scope"); - intermediate.addToCallGraph(infoSink, "main(", fnCandidate->getMangledName()); - } else - intermediate.addToCallGraph(infoSink, currentCaller, fnCandidate->getMangledName()); - } - - if (builtIn) - nonOpBuiltInCheck(loc, *fnCandidate, *call); - else - userFunctionCallCheck(loc, *call); - } - - // Convert 'out' arguments. If it was a constant folded built-in, it won't be an aggregate anymore. - // Built-ins with a single argument aren't called with an aggregate, but they also don't have an output. - // Also, build the qualifier list for user function calls, which are always called with an aggregate. - if (result->getAsAggregate()) { - TQualifierList& qualifierList = result->getAsAggregate()->getQualifierList(); - for (int i = 0; i < fnCandidate->getParamCount(); ++i) { - TStorageQualifier qual = (*fnCandidate)[i].type->getQualifier().storage; - qualifierList.push_back(qual); - } - result = addOutputArgumentConversions(*fnCandidate, *result->getAsAggregate()); - } - } - } - - // generic error recovery - // TODO: simplification: localize all the error recoveries that look like this, and taking type into account to reduce cascades - if (result == nullptr) - result = intermediate.addConstantUnion(0.0, EbtFloat, loc); - - return result; -} - -TIntermTyped* TParseContext::handleBuiltInFunctionCall(TSourceLoc loc, TIntermNode& arguments, - const TFunction& function) -{ - checkLocation(loc, function.getBuiltInOp()); - TIntermTyped *result = intermediate.addBuiltInFunctionCall(loc, function.getBuiltInOp(), - function.getParamCount() == 1, - &arguments, function.getType()); - if (obeyPrecisionQualifiers()) - computeBuiltinPrecisions(*result, function); - - if (result == nullptr) { - error(arguments.getLoc(), " wrong operand type", "Internal Error", - "built in unary operator function. Type: %s", - static_cast(&arguments)->getCompleteString().c_str()); - } else if (result->getAsOperator()) - builtInOpCheck(loc, function, *result->getAsOperator()); - - return result; -} - -// "The operation of a built-in function can have a different precision -// qualification than the precision qualification of the resulting value. -// These two precision qualifications are established as follows. -// -// The precision qualification of the operation of a built-in function is -// based on the precision qualification of its input arguments and formal -// parameters: When a formal parameter specifies a precision qualifier, -// that is used, otherwise, the precision qualification of the calling -// argument is used. The highest precision of these will be the precision -// qualification of the operation of the built-in function. Generally, -// this is applied across all arguments to a built-in function, with the -// exceptions being: -// - bitfieldExtract and bitfieldInsert ignore the 'offset' and 'bits' -// arguments. -// - interpolateAt* functions only look at the 'interpolant' argument. -// -// The precision qualification of the result of a built-in function is -// determined in one of the following ways: -// -// - For the texture sampling, image load, and image store functions, -// the precision of the return type matches the precision of the -// sampler type -// -// Otherwise: -// -// - For prototypes that do not specify a resulting precision qualifier, -// the precision will be the same as the precision of the operation. -// -// - For prototypes that do specify a resulting precision qualifier, -// the specified precision qualifier is the precision qualification of -// the result." -// -void TParseContext::computeBuiltinPrecisions(TIntermTyped& node, const TFunction& function) -{ - TPrecisionQualifier operationPrecision = EpqNone; - TPrecisionQualifier resultPrecision = EpqNone; - - TIntermOperator* opNode = node.getAsOperator(); - if (opNode == nullptr) - return; - - if (TIntermUnary* unaryNode = node.getAsUnaryNode()) { - operationPrecision = std::max(function[0].type->getQualifier().precision, - unaryNode->getOperand()->getType().getQualifier().precision); - if (function.getType().getBasicType() != EbtBool) - resultPrecision = function.getType().getQualifier().precision == EpqNone ? - operationPrecision : - function.getType().getQualifier().precision; - } else if (TIntermAggregate* agg = node.getAsAggregate()) { - TIntermSequence& sequence = agg->getSequence(); - unsigned int numArgs = (unsigned int)sequence.size(); - switch (agg->getOp()) { - case EOpBitfieldExtract: - numArgs = 1; - break; - case EOpBitfieldInsert: - numArgs = 2; - break; - case EOpInterpolateAtCentroid: - case EOpInterpolateAtOffset: - case EOpInterpolateAtSample: - numArgs = 1; - break; - default: - break; - } - // find the maximum precision from the arguments and parameters - for (unsigned int arg = 0; arg < numArgs; ++arg) { - operationPrecision = std::max(operationPrecision, sequence[arg]->getAsTyped()->getQualifier().precision); - operationPrecision = std::max(operationPrecision, function[arg].type->getQualifier().precision); - } - // compute the result precision -#ifdef AMD_EXTENSIONS - if (agg->isSampling() || - agg->getOp() == EOpImageLoad || agg->getOp() == EOpImageStore || - agg->getOp() == EOpImageLoadLod || agg->getOp() == EOpImageStoreLod) -#else - if (agg->isSampling() || agg->getOp() == EOpImageLoad || agg->getOp() == EOpImageStore) -#endif - resultPrecision = sequence[0]->getAsTyped()->getQualifier().precision; - else if (function.getType().getBasicType() != EbtBool) - resultPrecision = function.getType().getQualifier().precision == EpqNone ? - operationPrecision : - function.getType().getQualifier().precision; - } - - // Propagate precision through this node and its children. That algorithm stops - // when a precision is found, so start by clearing this subroot precision - opNode->getQualifier().precision = EpqNone; - if (operationPrecision != EpqNone) { - opNode->propagatePrecision(operationPrecision); - opNode->setOperationPrecision(operationPrecision); - } - // Now, set the result precision, which might not match - opNode->getQualifier().precision = resultPrecision; -} - -TIntermNode* TParseContext::handleReturnValue(const TSourceLoc& loc, TIntermTyped* value) -{ - functionReturnsValue = true; - if (currentFunctionType->getBasicType() == EbtVoid) { - error(loc, "void function cannot return a value", "return", ""); - return intermediate.addBranch(EOpReturn, loc); - } else if (*currentFunctionType != value->getType()) { - TIntermTyped* converted = intermediate.addConversion(EOpReturn, *currentFunctionType, value); - if (converted) { - if (*currentFunctionType != converted->getType()) - error(loc, "cannot convert return value to function return type", "return", ""); - if (version < 420) - warn(loc, "type conversion on return values was not explicitly allowed until version 420", "return", ""); - return intermediate.addBranch(EOpReturn, converted, loc); - } else { - error(loc, "type does not match, or is not convertible to, the function's return type", "return", ""); - return intermediate.addBranch(EOpReturn, value, loc); - } - } else - return intermediate.addBranch(EOpReturn, value, loc); -} - -// See if the operation is being done in an illegal location. -void TParseContext::checkLocation(const TSourceLoc& loc, TOperator op) -{ - switch (op) { - case EOpBarrier: - if (language == EShLangTessControl) { - if (controlFlowNestingLevel > 0) - error(loc, "tessellation control barrier() cannot be placed within flow control", "", ""); - if (! inMain) - error(loc, "tessellation control barrier() must be in main()", "", ""); - else if (postEntryPointReturn) - error(loc, "tessellation control barrier() cannot be placed after a return from main()", "", ""); - } - break; - default: - break; - } -} - -// Finish processing object.length(). This started earlier in handleDotDereference(), where -// the ".length" part was recognized and semantically checked, and finished here where the -// function syntax "()" is recognized. -// -// Return resulting tree node. -TIntermTyped* TParseContext::handleLengthMethod(const TSourceLoc& loc, TFunction* function, TIntermNode* intermNode) -{ - int length = 0; - - if (function->getParamCount() > 0) - error(loc, "method does not accept any arguments", function->getName().c_str(), ""); - else { - const TType& type = intermNode->getAsTyped()->getType(); - if (type.isArray()) { - if (type.isRuntimeSizedArray()) { - // Create a unary op and let the back end handle it - return intermediate.addBuiltInFunctionCall(loc, EOpArrayLength, true, intermNode, TType(EbtInt)); - } else if (type.isImplicitlySizedArray()) { - if (intermNode->getAsSymbolNode() && isIoResizeArray(type)) { - // We could be between a layout declaration that gives a built-in io array implicit size and - // a user redeclaration of that array, meaning we have to substitute its implicit size here - // without actually redeclaring the array. (It is an error to use a member before the - // redeclaration, but not an error to use the array name itself.) - const TString& name = intermNode->getAsSymbolNode()->getName(); - if (name == "gl_in" || name == "gl_out") - length = getIoArrayImplicitSize(); - } - if (length == 0) { - if (intermNode->getAsSymbolNode() && isIoResizeArray(type)) - error(loc, "", function->getName().c_str(), "array must first be sized by a redeclaration or layout qualifier"); - else - error(loc, "", function->getName().c_str(), "array must be declared with a size before using this method"); - } - } else if (type.getOuterArrayNode()) { - // If the array's outer size is specified by an intermediate node, it means the array's length - // was specified by a specialization constant. In such a case, we should return the node of the - // specialization constants to represent the length. - return type.getOuterArrayNode(); - } else - length = type.getOuterArraySize(); - } else if (type.isMatrix()) - length = type.getMatrixCols(); - else if (type.isVector()) - length = type.getVectorSize(); - else { - // we should not get here, because earlier semantic checking should have prevented this path - error(loc, ".length()", "unexpected use of .length()", ""); - } - } - - if (length == 0) - length = 1; - - return intermediate.addConstantUnion(length, loc); -} - -// -// Add any needed implicit conversions for function-call arguments to input parameters. -// -void TParseContext::addInputArgumentConversions(const TFunction& function, TIntermNode*& arguments) const -{ - TIntermAggregate* aggregate = arguments->getAsAggregate(); - - // Process each argument's conversion - for (int i = 0; i < function.getParamCount(); ++i) { - // At this early point there is a slight ambiguity between whether an aggregate 'arguments' - // is the single argument itself or its children are the arguments. Only one argument - // means take 'arguments' itself as the one argument. - TIntermTyped* arg = function.getParamCount() == 1 ? arguments->getAsTyped() : (aggregate ? aggregate->getSequence()[i]->getAsTyped() : arguments->getAsTyped()); - if (*function[i].type != arg->getType()) { - if (function[i].type->getQualifier().isParamInput()) { - // In-qualified arguments just need an extra node added above the argument to - // convert to the correct type. - arg = intermediate.addConversion(EOpFunctionCall, *function[i].type, arg); - if (arg) { - if (function.getParamCount() == 1) - arguments = arg; - else { - if (aggregate) - aggregate->getSequence()[i] = arg; - else - arguments = arg; - } - } - } - } - } -} - -// -// Add any needed implicit output conversions for function-call arguments. This -// can require a new tree topology, complicated further by whether the function -// has a return value. -// -// Returns a node of a subtree that evaluates to the return value of the function. -// -TIntermTyped* TParseContext::addOutputArgumentConversions(const TFunction& function, TIntermAggregate& intermNode) const -{ - TIntermSequence& arguments = intermNode.getSequence(); - - // Will there be any output conversions? - bool outputConversions = false; - for (int i = 0; i < function.getParamCount(); ++i) { - if (*function[i].type != arguments[i]->getAsTyped()->getType() && function[i].type->getQualifier().isParamOutput()) { - outputConversions = true; - break; - } - } - - if (! outputConversions) - return &intermNode; - - // Setup for the new tree, if needed: - // - // Output conversions need a different tree topology. - // Out-qualified arguments need a temporary of the correct type, with the call - // followed by an assignment of the temporary to the original argument: - // void: function(arg, ...) -> ( function(tempArg, ...), arg = tempArg, ...) - // ret = function(arg, ...) -> ret = (tempRet = function(tempArg, ...), arg = tempArg, ..., tempRet) - // Where the "tempArg" type needs no conversion as an argument, but will convert on assignment. - TIntermTyped* conversionTree = nullptr; - TVariable* tempRet = nullptr; - if (intermNode.getBasicType() != EbtVoid) { - // do the "tempRet = function(...), " bit from above - tempRet = makeInternalVariable("tempReturn", intermNode.getType()); - TIntermSymbol* tempRetNode = intermediate.addSymbol(*tempRet, intermNode.getLoc()); - conversionTree = intermediate.addAssign(EOpAssign, tempRetNode, &intermNode, intermNode.getLoc()); - } else - conversionTree = &intermNode; - - conversionTree = intermediate.makeAggregate(conversionTree); - - // Process each argument's conversion - for (int i = 0; i < function.getParamCount(); ++i) { - if (*function[i].type != arguments[i]->getAsTyped()->getType()) { - if (function[i].type->getQualifier().isParamOutput()) { - // Out-qualified arguments need to use the topology set up above. - // do the " ...(tempArg, ...), arg = tempArg" bit from above - TVariable* tempArg = makeInternalVariable("tempArg", *function[i].type); - tempArg->getWritableType().getQualifier().makeTemporary(); - TIntermSymbol* tempArgNode = intermediate.addSymbol(*tempArg, intermNode.getLoc()); - TIntermTyped* tempAssign = intermediate.addAssign(EOpAssign, arguments[i]->getAsTyped(), tempArgNode, arguments[i]->getLoc()); - conversionTree = intermediate.growAggregate(conversionTree, tempAssign, arguments[i]->getLoc()); - // replace the argument with another node for the same tempArg variable - arguments[i] = intermediate.addSymbol(*tempArg, intermNode.getLoc()); - } - } - } - - // Finalize the tree topology (see bigger comment above). - if (tempRet) { - // do the "..., tempRet" bit from above - TIntermSymbol* tempRetNode = intermediate.addSymbol(*tempRet, intermNode.getLoc()); - conversionTree = intermediate.growAggregate(conversionTree, tempRetNode, intermNode.getLoc()); - } - conversionTree = intermediate.setAggregateOperator(conversionTree, EOpComma, intermNode.getType(), intermNode.getLoc()); - - return conversionTree; -} - -// -// Do additional checking of built-in function calls that is not caught -// by normal semantic checks on argument type, extension tagging, etc. -// -// Assumes there has been a semantically correct match to a built-in function prototype. -// -void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCandidate, TIntermOperator& callNode) -{ - // Set up convenience accessors to the argument(s). There is almost always - // multiple arguments for the cases below, but when there might be one, - // check the unaryArg first. - const TIntermSequence* argp = nullptr; // confusing to use [] syntax on a pointer, so this is to help get a reference - const TIntermTyped* unaryArg = nullptr; - const TIntermTyped* arg0 = nullptr; - if (callNode.getAsAggregate()) { - argp = &callNode.getAsAggregate()->getSequence(); - if (argp->size() > 0) - arg0 = (*argp)[0]->getAsTyped(); - } else { - assert(callNode.getAsUnaryNode()); - unaryArg = callNode.getAsUnaryNode()->getOperand(); - arg0 = unaryArg; - } - - switch (callNode.getOp()) { - case EOpTextureGather: - case EOpTextureGatherOffset: - case EOpTextureGatherOffsets: - { - // Figure out which variants are allowed by what extensions, - // and what arguments must be constant for which situations. - - TString featureString = fnCandidate.getName() + "(...)"; - const char* feature = featureString.c_str(); - profileRequires(loc, EEsProfile, 310, nullptr, feature); - int compArg = -1; // track which argument, if any, is the constant component argument - switch (callNode.getOp()) { - case EOpTextureGather: - // More than two arguments needs gpu_shader5, and rectangular or shadow needs gpu_shader5, - // otherwise, need GL_ARB_texture_gather. - if (fnCandidate.getParamCount() > 2 || fnCandidate[0].type->getSampler().dim == EsdRect || fnCandidate[0].type->getSampler().shadow) { - profileRequires(loc, ~EEsProfile, 400, E_GL_ARB_gpu_shader5, feature); - if (! fnCandidate[0].type->getSampler().shadow) - compArg = 2; - } else - profileRequires(loc, ~EEsProfile, 400, E_GL_ARB_texture_gather, feature); - break; - case EOpTextureGatherOffset: - // GL_ARB_texture_gather is good enough for 2D non-shadow textures with no component argument - if (fnCandidate[0].type->getSampler().dim == Esd2D && ! fnCandidate[0].type->getSampler().shadow && fnCandidate.getParamCount() == 3) - profileRequires(loc, ~EEsProfile, 400, E_GL_ARB_texture_gather, feature); - else - profileRequires(loc, ~EEsProfile, 400, E_GL_ARB_gpu_shader5, feature); - if (! (*argp)[fnCandidate[0].type->getSampler().shadow ? 3 : 2]->getAsConstantUnion()) - profileRequires(loc, EEsProfile, 320, Num_AEP_gpu_shader5, AEP_gpu_shader5, - "non-constant offset argument"); - if (! fnCandidate[0].type->getSampler().shadow) - compArg = 3; - break; - case EOpTextureGatherOffsets: - profileRequires(loc, ~EEsProfile, 400, E_GL_ARB_gpu_shader5, feature); - if (! fnCandidate[0].type->getSampler().shadow) - compArg = 3; - // check for constant offsets - if (! (*argp)[fnCandidate[0].type->getSampler().shadow ? 3 : 2]->getAsConstantUnion()) - error(loc, "must be a compile-time constant:", feature, "offsets argument"); - break; - default: - break; - } - - if (compArg > 0 && compArg < fnCandidate.getParamCount()) { - if ((*argp)[compArg]->getAsConstantUnion()) { - int value = (*argp)[compArg]->getAsConstantUnion()->getConstArray()[0].getIConst(); - if (value < 0 || value > 3) - error(loc, "must be 0, 1, 2, or 3:", feature, "component argument"); - } else - error(loc, "must be a compile-time constant:", feature, "component argument"); - } - -#ifdef AMD_EXTENSIONS - bool bias = false; - if (callNode.getOp() == EOpTextureGather) - bias = fnCandidate.getParamCount() > 3; - else if (callNode.getOp() == EOpTextureGatherOffset || - callNode.getOp() == EOpTextureGatherOffsets) - bias = fnCandidate.getParamCount() > 4; - - if (bias) { - TString biasFeatureString = fnCandidate.getName() + "with bias argument"; - const char* feature = biasFeatureString.c_str(); - profileRequires(loc, ~EEsProfile, 450, nullptr, feature); - requireExtensions(loc, 1, &E_GL_AMD_texture_gather_bias_lod, feature); - } -#endif - - break; - } - -#ifdef AMD_EXTENSIONS - case EOpSparseTextureGather: - case EOpSparseTextureGatherOffset: - case EOpSparseTextureGatherOffsets: - { - bool bias = false; - if (callNode.getOp() == EOpSparseTextureGather) - bias = fnCandidate.getParamCount() > 4; - else if (callNode.getOp() == EOpSparseTextureGatherOffset || - callNode.getOp() == EOpSparseTextureGatherOffsets) - bias = fnCandidate.getParamCount() > 5; - - if (bias) { - TString featureString = fnCandidate.getName() + "with bias argument"; - const char* feature = featureString.c_str(); - profileRequires(loc, ~EEsProfile, 450, nullptr, feature); - requireExtensions(loc, 1, &E_GL_AMD_texture_gather_bias_lod, feature); - } - - break; - } - - case EOpSparseTextureGatherLod: - case EOpSparseTextureGatherLodOffset: - case EOpSparseTextureGatherLodOffsets: - { - requireExtensions(loc, 1, &E_GL_ARB_sparse_texture2, fnCandidate.getName().c_str()); - break; - } -#endif - - case EOpTextureOffset: - case EOpTextureFetchOffset: - case EOpTextureProjOffset: - case EOpTextureLodOffset: - case EOpTextureProjLodOffset: - case EOpTextureGradOffset: - case EOpTextureProjGradOffset: - { - // Handle texture-offset limits checking - // Pick which argument has to hold constant offsets - int arg = -1; - switch (callNode.getOp()) { - case EOpTextureOffset: arg = 2; break; - case EOpTextureFetchOffset: arg = (arg0->getType().getSampler().dim != EsdRect) ? 3 : 2; break; - case EOpTextureProjOffset: arg = 2; break; - case EOpTextureLodOffset: arg = 3; break; - case EOpTextureProjLodOffset: arg = 3; break; - case EOpTextureGradOffset: arg = 4; break; - case EOpTextureProjGradOffset: arg = 4; break; - default: - assert(0); - break; - } - - if (arg > 0) { - if (! (*argp)[arg]->getAsConstantUnion()) - error(loc, "argument must be compile-time constant", "texel offset", ""); - else { - const TType& type = (*argp)[arg]->getAsTyped()->getType(); - for (int c = 0; c < type.getVectorSize(); ++c) { - int offset = (*argp)[arg]->getAsConstantUnion()->getConstArray()[c].getIConst(); - if (offset > resources.maxProgramTexelOffset || offset < resources.minProgramTexelOffset) - error(loc, "value is out of range:", "texel offset", "[gl_MinProgramTexelOffset, gl_MaxProgramTexelOffset]"); - } - } - } - - break; - } - - case EOpTextureQuerySamples: - case EOpImageQuerySamples: - // GL_ARB_shader_texture_image_samples - profileRequires(loc, ~EEsProfile, 450, E_GL_ARB_shader_texture_image_samples, "textureSamples and imageSamples"); - break; - - case EOpImageAtomicAdd: - case EOpImageAtomicMin: - case EOpImageAtomicMax: - case EOpImageAtomicAnd: - case EOpImageAtomicOr: - case EOpImageAtomicXor: - case EOpImageAtomicExchange: - case EOpImageAtomicCompSwap: - { - // Make sure the image types have the correct layout() format and correct argument types - const TType& imageType = arg0->getType(); - if (imageType.getSampler().type == EbtInt || imageType.getSampler().type == EbtUint) { - if (imageType.getQualifier().layoutFormat != ElfR32i && imageType.getQualifier().layoutFormat != ElfR32ui) - error(loc, "only supported on image with format r32i or r32ui", fnCandidate.getName().c_str(), ""); - } else { - if (fnCandidate.getName().compare(0, 19, "imageAtomicExchange") != 0) - error(loc, "only supported on integer images", fnCandidate.getName().c_str(), ""); - else if (imageType.getQualifier().layoutFormat != ElfR32f && profile == EEsProfile) - error(loc, "only supported on image with format r32f", fnCandidate.getName().c_str(), ""); - } - - break; - } - - case EOpInterpolateAtCentroid: - case EOpInterpolateAtSample: - case EOpInterpolateAtOffset: - // Make sure the first argument is an interpolant, or an array element of an interpolant - if (arg0->getType().getQualifier().storage != EvqVaryingIn) { - // It might still be an array element. - // - // We could check more, but the semantics of the first argument are already met; the - // only way to turn an array into a float/vec* is array dereference and swizzle. - // - // ES and desktop 4.3 and earlier: swizzles may not be used - // desktop 4.4 and later: swizzles may be used - bool swizzleOkay = (profile != EEsProfile) && (version >= 440); - const TIntermTyped* base = TIntermediate::findLValueBase(arg0, swizzleOkay); - if (base == nullptr || base->getType().getQualifier().storage != EvqVaryingIn) - error(loc, "first argument must be an interpolant, or interpolant-array element", fnCandidate.getName().c_str(), ""); - } - break; - - case EOpEmitStreamVertex: - case EOpEndStreamPrimitive: - intermediate.setMultiStream(); - break; - - default: - break; - } -} - -extern bool PureOperatorBuiltins; - -// Deprecated! Use PureOperatorBuiltins == true instead, in which case this -// functionality is handled in builtInOpCheck() instead of here. -// -// Do additional checking of built-in function calls that were not mapped -// to built-in operations (e.g., texturing functions). -// -// Assumes there has been a semantically correct match to a built-in function. -// -void TParseContext::nonOpBuiltInCheck(const TSourceLoc& loc, const TFunction& fnCandidate, TIntermAggregate& callNode) -{ - // Further maintenance of this function is deprecated, because the "correct" - // future-oriented design is to not have to do string compares on function names. - - // If PureOperatorBuiltins == true, then all built-ins should be mapped - // to a TOperator, and this function would then never get called. - - assert(PureOperatorBuiltins == false); - - // built-in texturing functions get their return value precision from the precision of the sampler - if (fnCandidate.getType().getQualifier().precision == EpqNone && - fnCandidate.getParamCount() > 0 && fnCandidate[0].type->getBasicType() == EbtSampler) - callNode.getQualifier().precision = callNode.getSequence()[0]->getAsTyped()->getQualifier().precision; - - if (fnCandidate.getName().compare(0, 7, "texture") == 0) { - if (fnCandidate.getName().compare(0, 13, "textureGather") == 0) { - TString featureString = fnCandidate.getName() + "(...)"; - const char* feature = featureString.c_str(); - profileRequires(loc, EEsProfile, 310, nullptr, feature); - - int compArg = -1; // track which argument, if any, is the constant component argument - if (fnCandidate.getName().compare("textureGatherOffset") == 0) { - // GL_ARB_texture_gather is good enough for 2D non-shadow textures with no component argument - if (fnCandidate[0].type->getSampler().dim == Esd2D && ! fnCandidate[0].type->getSampler().shadow && fnCandidate.getParamCount() == 3) - profileRequires(loc, ~EEsProfile, 400, E_GL_ARB_texture_gather, feature); - else - profileRequires(loc, ~EEsProfile, 400, E_GL_ARB_gpu_shader5, feature); - int offsetArg = fnCandidate[0].type->getSampler().shadow ? 3 : 2; - if (! callNode.getSequence()[offsetArg]->getAsConstantUnion()) - profileRequires(loc, EEsProfile, 320, Num_AEP_gpu_shader5, AEP_gpu_shader5, - "non-constant offset argument"); - if (! fnCandidate[0].type->getSampler().shadow) - compArg = 3; - } else if (fnCandidate.getName().compare("textureGatherOffsets") == 0) { - profileRequires(loc, ~EEsProfile, 400, E_GL_ARB_gpu_shader5, feature); - if (! fnCandidate[0].type->getSampler().shadow) - compArg = 3; - // check for constant offsets - int offsetArg = fnCandidate[0].type->getSampler().shadow ? 3 : 2; - if (! callNode.getSequence()[offsetArg]->getAsConstantUnion()) - error(loc, "must be a compile-time constant:", feature, "offsets argument"); - } else if (fnCandidate.getName().compare("textureGather") == 0) { - // More than two arguments needs gpu_shader5, and rectangular or shadow needs gpu_shader5, - // otherwise, need GL_ARB_texture_gather. - if (fnCandidate.getParamCount() > 2 || fnCandidate[0].type->getSampler().dim == EsdRect || fnCandidate[0].type->getSampler().shadow) { - profileRequires(loc, ~EEsProfile, 400, E_GL_ARB_gpu_shader5, feature); - if (! fnCandidate[0].type->getSampler().shadow) - compArg = 2; - } else - profileRequires(loc, ~EEsProfile, 400, E_GL_ARB_texture_gather, feature); - } - - if (compArg > 0 && compArg < fnCandidate.getParamCount()) { - if (callNode.getSequence()[compArg]->getAsConstantUnion()) { - int value = callNode.getSequence()[compArg]->getAsConstantUnion()->getConstArray()[0].getIConst(); - if (value < 0 || value > 3) - error(loc, "must be 0, 1, 2, or 3:", feature, "component argument"); - } else - error(loc, "must be a compile-time constant:", feature, "component argument"); - } - } else { - // this is only for functions not starting "textureGather"... - if (fnCandidate.getName().find("Offset") != TString::npos) { - - // Handle texture-offset limits checking - int arg = -1; - if (fnCandidate.getName().compare("textureOffset") == 0) - arg = 2; - else if (fnCandidate.getName().compare("texelFetchOffset") == 0) - arg = 3; - else if (fnCandidate.getName().compare("textureProjOffset") == 0) - arg = 2; - else if (fnCandidate.getName().compare("textureLodOffset") == 0) - arg = 3; - else if (fnCandidate.getName().compare("textureProjLodOffset") == 0) - arg = 3; - else if (fnCandidate.getName().compare("textureGradOffset") == 0) - arg = 4; - else if (fnCandidate.getName().compare("textureProjGradOffset") == 0) - arg = 4; - - if (arg > 0) { - if (! callNode.getSequence()[arg]->getAsConstantUnion()) - error(loc, "argument must be compile-time constant", "texel offset", ""); - else { - const TType& type = callNode.getSequence()[arg]->getAsTyped()->getType(); - for (int c = 0; c < type.getVectorSize(); ++c) { - int offset = callNode.getSequence()[arg]->getAsConstantUnion()->getConstArray()[c].getIConst(); - if (offset > resources.maxProgramTexelOffset || offset < resources.minProgramTexelOffset) - error(loc, "value is out of range:", "texel offset", "[gl_MinProgramTexelOffset, gl_MaxProgramTexelOffset]"); - } - } - } - } - } - } - - // GL_ARB_shader_texture_image_samples - if (fnCandidate.getName().compare(0, 14, "textureSamples") == 0 || fnCandidate.getName().compare(0, 12, "imageSamples") == 0) - profileRequires(loc, ~EEsProfile, 450, E_GL_ARB_shader_texture_image_samples, "textureSamples and imageSamples"); - - if (fnCandidate.getName().compare(0, 11, "imageAtomic") == 0) { - const TType& imageType = callNode.getSequence()[0]->getAsTyped()->getType(); - if (imageType.getSampler().type == EbtInt || imageType.getSampler().type == EbtUint) { - if (imageType.getQualifier().layoutFormat != ElfR32i && imageType.getQualifier().layoutFormat != ElfR32ui) - error(loc, "only supported on image with format r32i or r32ui", fnCandidate.getName().c_str(), ""); - } else { - if (fnCandidate.getName().compare(0, 19, "imageAtomicExchange") != 0) - error(loc, "only supported on integer images", fnCandidate.getName().c_str(), ""); - else if (imageType.getQualifier().layoutFormat != ElfR32f && profile == EEsProfile) - error(loc, "only supported on image with format r32f", fnCandidate.getName().c_str(), ""); - } - } -} - -// -// Do any extra checking for a user function call. -// -void TParseContext::userFunctionCallCheck(const TSourceLoc& loc, TIntermAggregate& callNode) -{ - TIntermSequence& arguments = callNode.getSequence(); - - for (int i = 0; i < (int)arguments.size(); ++i) - samplerConstructorLocationCheck(loc, "call argument", arguments[i]); -} - -// -// Emit an error if this is a sampler constructor -// -void TParseContext::samplerConstructorLocationCheck(const TSourceLoc& loc, const char* token, TIntermNode* node) -{ - if (node->getAsOperator() && node->getAsOperator()->getOp() == EOpConstructTextureSampler) - error(loc, "sampler constructor must appear at point of use", token, ""); -} - -// -// Handle seeing a built-in constructor in a grammar production. -// -TFunction* TParseContext::handleConstructorCall(const TSourceLoc& loc, const TPublicType& publicType) -{ - TType type(publicType); - type.getQualifier().precision = EpqNone; - - if (type.isArray()) { - profileRequires(loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed constructor"); - profileRequires(loc, EEsProfile, 300, nullptr, "arrayed constructor"); - } - - TOperator op = intermediate.mapTypeToConstructorOp(type); - - if (op == EOpNull) { - error(loc, "cannot construct this type", type.getBasicString(), ""); - op = EOpConstructFloat; - TType errorType(EbtFloat); - type.shallowCopy(errorType); - } - - TString empty(""); - - return new TFunction(&empty, type, op); -} - -// Handle seeing a precision qualifier in the grammar. -void TParseContext::handlePrecisionQualifier(const TSourceLoc& /*loc*/, TQualifier& qualifier, TPrecisionQualifier precision) -{ - if (obeyPrecisionQualifiers()) - qualifier.precision = precision; -} - -// Check for messages to give on seeing a precision qualifier used in a -// declaration in the grammar. -void TParseContext::checkPrecisionQualifier(const TSourceLoc& loc, TPrecisionQualifier) -{ - if (precisionManager.shouldWarnAboutDefaults()) { - warn(loc, "all default precisions are highp; use precision statements to quiet warning, e.g.:\n" - " \"precision mediump int; precision highp float;\"", "", ""); - precisionManager.defaultWarningGiven(); - } -} - -// -// Same error message for all places assignments don't work. -// -void TParseContext::assignError(const TSourceLoc& loc, const char* op, TString left, TString right) -{ - error(loc, "", op, "cannot convert from '%s' to '%s'", - right.c_str(), left.c_str()); -} - -// -// Same error message for all places unary operations don't work. -// -void TParseContext::unaryOpError(const TSourceLoc& loc, const char* op, TString operand) -{ - error(loc, " wrong operand type", op, - "no operation '%s' exists that takes an operand of type %s (or there is no acceptable conversion)", - op, operand.c_str()); -} - -// -// Same error message for all binary operations don't work. -// -void TParseContext::binaryOpError(const TSourceLoc& loc, const char* op, TString left, TString right) -{ - error(loc, " wrong operand types:", op, - "no operation '%s' exists that takes a left-hand operand of type '%s' and " - "a right operand of type '%s' (or there is no acceptable conversion)", - op, left.c_str(), right.c_str()); -} - -// -// A basic type of EbtVoid is a key that the name string was seen in the source, but -// it was not found as a variable in the symbol table. If so, give the error -// message and insert a dummy variable in the symbol table to prevent future errors. -// -void TParseContext::variableCheck(TIntermTyped*& nodePtr) -{ - TIntermSymbol* symbol = nodePtr->getAsSymbolNode(); - if (! symbol) - return; - - if (symbol->getType().getBasicType() == EbtVoid) { - const char *extraInfoFormat = ""; - if (spvVersion.vulkan != 0 && symbol->getName() == "gl_VertexID") { - extraInfoFormat = "(Did you mean gl_VertexIndex?)"; - } else if (spvVersion.vulkan != 0 && symbol->getName() == "gl_InstanceID") { - extraInfoFormat = "(Did you mean gl_InstanceIndex?)"; - } - error(symbol->getLoc(), "undeclared identifier", symbol->getName().c_str(), extraInfoFormat); - - // Add to symbol table to prevent future error messages on the same name - if (symbol->getName().size() > 0) { - TVariable* fakeVariable = new TVariable(&symbol->getName(), TType(EbtFloat)); - symbolTable.insert(*fakeVariable); - - // substitute a symbol node for this new variable - nodePtr = intermediate.addSymbol(*fakeVariable, symbol->getLoc()); - } - } else { - switch (symbol->getQualifier().storage) { - case EvqPointCoord: - profileRequires(symbol->getLoc(), ENoProfile, 120, nullptr, "gl_PointCoord"); - break; - default: break; // some compilers want this - } - } -} - -// -// Both test and if necessary, spit out an error, to see if the node is really -// an l-value that can be operated on this way. -// -// Returns true if there was an error. -// -bool TParseContext::lValueErrorCheck(const TSourceLoc& loc, const char* op, TIntermTyped* node) -{ - TIntermBinary* binaryNode = node->getAsBinaryNode(); - - if (binaryNode) { - bool errorReturn = false; - - switch(binaryNode->getOp()) { - case EOpIndexDirect: - case EOpIndexIndirect: - // ... tessellation control shader ... - // If a per-vertex output variable is used as an l-value, it is a - // compile-time or link-time error if the expression indicating the - // vertex index is not the identifier gl_InvocationID. - if (language == EShLangTessControl) { - const TType& leftType = binaryNode->getLeft()->getType(); - if (leftType.getQualifier().storage == EvqVaryingOut && ! leftType.getQualifier().patch && binaryNode->getLeft()->getAsSymbolNode()) { - // we have a per-vertex output - const TIntermSymbol* rightSymbol = binaryNode->getRight()->getAsSymbolNode(); - if (! rightSymbol || rightSymbol->getQualifier().builtIn != EbvInvocationId) - error(loc, "tessellation-control per-vertex output l-value must be indexed with gl_InvocationID", "[]", ""); - } - } - - break; // left node is checked by base class - case EOpIndexDirectStruct: - break; // left node is checked by base class - case EOpVectorSwizzle: - errorReturn = lValueErrorCheck(loc, op, binaryNode->getLeft()); - if (!errorReturn) { - int offset[4] = {0,0,0,0}; - - TIntermTyped* rightNode = binaryNode->getRight(); - TIntermAggregate *aggrNode = rightNode->getAsAggregate(); - - for (TIntermSequence::iterator p = aggrNode->getSequence().begin(); - p != aggrNode->getSequence().end(); p++) { - int value = (*p)->getAsTyped()->getAsConstantUnion()->getConstArray()[0].getIConst(); - offset[value]++; - if (offset[value] > 1) { - error(loc, " l-value of swizzle cannot have duplicate components", op, "", ""); - - return true; - } - } - } - - return errorReturn; - default: - break; - } - - if (errorReturn) { - error(loc, " l-value required", op, "", ""); - return true; - } - } - - // Let the base class check errors - if (TParseContextBase::lValueErrorCheck(loc, op, node)) - return true; - - const char* symbol = nullptr; - TIntermSymbol* symNode = node->getAsSymbolNode(); - if (symNode != nullptr) - symbol = symNode->getName().c_str(); - - const char* message = nullptr; - switch (node->getQualifier().storage) { - case EvqVaryingIn: message = "can't modify shader input"; break; - case EvqInstanceId: message = "can't modify gl_InstanceID"; break; - case EvqVertexId: message = "can't modify gl_VertexID"; break; - case EvqFace: message = "can't modify gl_FrontFace"; break; - case EvqFragCoord: message = "can't modify gl_FragCoord"; break; - case EvqPointCoord: message = "can't modify gl_PointCoord"; break; - case EvqFragDepth: - intermediate.setDepthReplacing(); - // "In addition, it is an error to statically write to gl_FragDepth in the fragment shader." - if (profile == EEsProfile && intermediate.getEarlyFragmentTests()) - message = "can't modify gl_FragDepth if using early_fragment_tests"; - break; - - default: - break; - } - - if (message == nullptr && binaryNode == nullptr && symNode == nullptr) { - error(loc, " l-value required", op, "", ""); - - return true; - } - - // - // Everything else is okay, no error. - // - if (message == nullptr) - return false; - - // - // If we get here, we have an error and a message. - // - if (symNode) - error(loc, " l-value required", op, "\"%s\" (%s)", symbol, message); - else - error(loc, " l-value required", op, "(%s)", message); - - return true; -} - -// Test for and give an error if the node can't be read from. -void TParseContext::rValueErrorCheck(const TSourceLoc& loc, const char* op, TIntermTyped* node) -{ - // Let the base class check errors - TParseContextBase::rValueErrorCheck(loc, op, node); - -#ifdef AMD_EXTENSIONS - TIntermSymbol* symNode = node->getAsSymbolNode(); - if (!(symNode && symNode->getQualifier().writeonly)) // base class checks - if (symNode && symNode->getQualifier().explicitInterp) - error(loc, "can't read from explicitly-interpolated object: ", op, symNode->getName().c_str()); -#endif -} - -// -// Both test, and if necessary spit out an error, to see if the node is really -// a constant. -// -void TParseContext::constantValueCheck(TIntermTyped* node, const char* token) -{ - if (! node->getQualifier().isConstant()) - error(node->getLoc(), "constant expression required", token, ""); -} - -// -// Both test, and if necessary spit out an error, to see if the node is really -// an integer. -// -void TParseContext::integerCheck(const TIntermTyped* node, const char* token) -{ - if ((node->getBasicType() == EbtInt || node->getBasicType() == EbtUint) && node->isScalar()) - return; - - error(node->getLoc(), "scalar integer expression required", token, ""); -} - -// -// Both test, and if necessary spit out an error, to see if we are currently -// globally scoped. -// -void TParseContext::globalCheck(const TSourceLoc& loc, const char* token) -{ - if (! symbolTable.atGlobalLevel()) - error(loc, "not allowed in nested scope", token, ""); -} - -// -// Reserved errors for GLSL. -// -void TParseContext::reservedErrorCheck(const TSourceLoc& loc, const TString& identifier) -{ - // "Identifiers starting with "gl_" are reserved for use by OpenGL, and may not be - // declared in a shader; this results in a compile-time error." - if (! symbolTable.atBuiltInLevel()) { - if (builtInName(identifier)) - error(loc, "identifiers starting with \"gl_\" are reserved", identifier.c_str(), ""); - - // "__" are not supposed to be an error. ES 310 (and desktop) added the clarification: - // "In addition, all identifiers containing two consecutive underscores (__) are - // reserved; using such a name does not itself result in an error, but may result - // in undefined behavior." - // however, before that, ES tests required an error. - if (identifier.find("__") != TString::npos) { - if (profile == EEsProfile && version <= 300) - error(loc, "identifiers containing consecutive underscores (\"__\") are reserved, and an error if version <= 300", identifier.c_str(), ""); - else - warn(loc, "identifiers containing consecutive underscores (\"__\") are reserved", identifier.c_str(), ""); - } - } -} - -// -// Reserved errors for the preprocessor. -// -void TParseContext::reservedPpErrorCheck(const TSourceLoc& loc, const char* identifier, const char* op) -{ - // "__" are not supposed to be an error. ES 310 (and desktop) added the clarification: - // "All macro names containing two consecutive underscores ( __ ) are reserved; - // defining such a name does not itself result in an error, but may result in - // undefined behavior. All macro names prefixed with "GL_" ("GL" followed by a - // single underscore) are also reserved, and defining such a name results in a - // compile-time error." - // however, before that, ES tests required an error. - if (strncmp(identifier, "GL_", 3) == 0) - ppError(loc, "names beginning with \"GL_\" can't be (un)defined:", op, identifier); - else if (strncmp(identifier, "defined", 8) == 0) - ppError(loc, "\"defined\" can't be (un)defined:", op, identifier); - else if (strstr(identifier, "__") != 0) { - if (profile == EEsProfile && version >= 300 && - (strcmp(identifier, "__LINE__") == 0 || - strcmp(identifier, "__FILE__") == 0 || - strcmp(identifier, "__VERSION__") == 0)) - ppError(loc, "predefined names can't be (un)defined:", op, identifier); - else { - if (profile == EEsProfile && version <= 300) - ppError(loc, "names containing consecutive underscores are reserved, and an error if version <= 300:", op, identifier); - else - ppWarn(loc, "names containing consecutive underscores are reserved:", op, identifier); - } - } -} - -// -// See if this version/profile allows use of the line-continuation character '\'. -// -// Returns true if a line continuation should be done. -// -bool TParseContext::lineContinuationCheck(const TSourceLoc& loc, bool endOfComment) -{ - const char* message = "line continuation"; - - bool lineContinuationAllowed = (profile == EEsProfile && version >= 300) || - (profile != EEsProfile && (version >= 420 || extensionTurnedOn(E_GL_ARB_shading_language_420pack))); - - if (endOfComment) { - if (lineContinuationAllowed) - warn(loc, "used at end of comment; the following line is still part of the comment", message, ""); - else - warn(loc, "used at end of comment, but this version does not provide line continuation", message, ""); - - return lineContinuationAllowed; - } - - if (relaxedErrors()) { - if (! lineContinuationAllowed) - warn(loc, "not allowed in this version", message, ""); - return true; - } else { - profileRequires(loc, EEsProfile, 300, nullptr, message); - profileRequires(loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, message); - } - - return lineContinuationAllowed; -} - -bool TParseContext::builtInName(const TString& identifier) -{ - return identifier.compare(0, 3, "gl_") == 0; -} - -// -// Make sure there is enough data and not too many arguments provided to the -// constructor to build something of the type of the constructor. Also returns -// the type of the constructor. -// -// Part of establishing type is establishing specialization-constness. -// We don't yet know "top down" whether type is a specialization constant, -// but a const constructor can becomes a specialization constant if any of -// its children are, subject to KHR_vulkan_glsl rules: -// -// - int(), uint(), and bool() constructors for type conversions -// from any of the following types to any of the following types: -// * int -// * uint -// * bool -// - vector versions of the above conversion constructors -// -// Returns true if there was an error in construction. -// -bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, TFunction& function, TOperator op, TType& type) -{ - type.shallowCopy(function.getType()); - - bool constructingMatrix = false; - switch(op) { - case EOpConstructTextureSampler: - return constructorTextureSamplerError(loc, function); - case EOpConstructMat2x2: - case EOpConstructMat2x3: - case EOpConstructMat2x4: - case EOpConstructMat3x2: - case EOpConstructMat3x3: - case EOpConstructMat3x4: - case EOpConstructMat4x2: - case EOpConstructMat4x3: - case EOpConstructMat4x4: - case EOpConstructDMat2x2: - case EOpConstructDMat2x3: - case EOpConstructDMat2x4: - case EOpConstructDMat3x2: - case EOpConstructDMat3x3: - case EOpConstructDMat3x4: - case EOpConstructDMat4x2: - case EOpConstructDMat4x3: - case EOpConstructDMat4x4: -#ifdef AMD_EXTENSIONS - case EOpConstructF16Mat2x2: - case EOpConstructF16Mat2x3: - case EOpConstructF16Mat2x4: - case EOpConstructF16Mat3x2: - case EOpConstructF16Mat3x3: - case EOpConstructF16Mat3x4: - case EOpConstructF16Mat4x2: - case EOpConstructF16Mat4x3: - case EOpConstructF16Mat4x4: -#endif - constructingMatrix = true; - break; - default: - break; - } - - // - // Walk the arguments for first-pass checks and collection of information. - // - - int size = 0; - bool constType = true; - bool specConstType = false; // value is only valid if constType is true - bool full = false; - bool overFull = false; - bool matrixInMatrix = false; - bool arrayArg = false; - bool floatArgument = false; - for (int arg = 0; arg < function.getParamCount(); ++arg) { - if (function[arg].type->isArray()) { - if (! function[arg].type->isExplicitlySizedArray()) { - // Can't construct from an unsized array. - error(loc, "array argument must be sized", "constructor", ""); - return true; - } - arrayArg = true; - } - if (constructingMatrix && function[arg].type->isMatrix()) - matrixInMatrix = true; - - // 'full' will go to true when enough args have been seen. If we loop - // again, there is an extra argument. - if (full) { - // For vectors and matrices, it's okay to have too many components - // available, but not okay to have unused arguments. - overFull = true; - } - - size += function[arg].type->computeNumComponents(); - if (op != EOpConstructStruct && ! type.isArray() && size >= type.computeNumComponents()) - full = true; - - if (! function[arg].type->getQualifier().isConstant()) - constType = false; - if (function[arg].type->getQualifier().isSpecConstant()) - specConstType = true; - if (function[arg].type->isFloatingDomain()) - floatArgument = true; - } - - // inherit constness from children - if (constType) { - bool makeSpecConst; - // Finish pinning down spec-const semantics - if (specConstType) { - switch (op) { - case EOpConstructInt: - case EOpConstructUint: - case EOpConstructInt64: - case EOpConstructUint64: -#ifdef AMD_EXTENSIONS - case EOpConstructInt16: - case EOpConstructUint16: -#endif - case EOpConstructBool: - case EOpConstructBVec2: - case EOpConstructBVec3: - case EOpConstructBVec4: - case EOpConstructIVec2: - case EOpConstructIVec3: - case EOpConstructIVec4: - case EOpConstructUVec2: - case EOpConstructUVec3: - case EOpConstructUVec4: - case EOpConstructI64Vec2: - case EOpConstructI64Vec3: - case EOpConstructI64Vec4: - case EOpConstructU64Vec2: - case EOpConstructU64Vec3: - case EOpConstructU64Vec4: -#ifdef AMD_EXTENSIONS - case EOpConstructI16Vec2: - case EOpConstructI16Vec3: - case EOpConstructI16Vec4: - case EOpConstructU16Vec2: - case EOpConstructU16Vec3: - case EOpConstructU16Vec4: -#endif - // This was the list of valid ones, if they aren't converting from float - // and aren't making an array. - makeSpecConst = ! floatArgument && ! type.isArray(); - break; - default: - // anything else wasn't white-listed in the spec as a conversion - makeSpecConst = false; - break; - } - } else - makeSpecConst = false; - - if (makeSpecConst) - type.getQualifier().makeSpecConstant(); - else if (specConstType) - type.getQualifier().makeTemporary(); - else - type.getQualifier().storage = EvqConst; - } - - if (type.isArray()) { - if (function.getParamCount() == 0) { - error(loc, "array constructor must have at least one argument", "constructor", ""); - return true; - } - - if (type.isImplicitlySizedArray()) { - // auto adapt the constructor type to the number of arguments - type.changeOuterArraySize(function.getParamCount()); - } else if (type.getOuterArraySize() != function.getParamCount()) { - error(loc, "array constructor needs one argument per array element", "constructor", ""); - return true; - } - - if (type.isArrayOfArrays()) { - // Types have to match, but we're still making the type. - // Finish making the type, and the comparison is done later - // when checking for conversion. - TArraySizes& arraySizes = type.getArraySizes(); - - // At least the dimensionalities have to match. - if (! function[0].type->isArray() || arraySizes.getNumDims() != function[0].type->getArraySizes().getNumDims() + 1) { - error(loc, "array constructor argument not correct type to construct array element", "constructor", ""); - return true; - } - - if (arraySizes.isInnerImplicit()) { - // "Arrays of arrays ..., and the size for any dimension is optional" - // That means we need to adopt (from the first argument) the other array sizes into the type. - for (int d = 1; d < arraySizes.getNumDims(); ++d) { - if (arraySizes.getDimSize(d) == UnsizedArraySize) { - arraySizes.setDimSize(d, function[0].type->getArraySizes().getDimSize(d - 1)); - } - } - } - } - } - - if (arrayArg && op != EOpConstructStruct && ! type.isArrayOfArrays()) { - error(loc, "constructing non-array constituent from array argument", "constructor", ""); - return true; - } - - if (matrixInMatrix && ! type.isArray()) { - profileRequires(loc, ENoProfile, 120, nullptr, "constructing matrix from matrix"); - - // "If a matrix argument is given to a matrix constructor, - // it is a compile-time error to have any other arguments." - if (function.getParamCount() != 1) - error(loc, "matrix constructed from matrix can only have one argument", "constructor", ""); - return false; - } - - if (overFull) { - error(loc, "too many arguments", "constructor", ""); - return true; - } - - if (op == EOpConstructStruct && ! type.isArray() && (int)type.getStruct()->size() != function.getParamCount()) { - error(loc, "Number of constructor parameters does not match the number of structure fields", "constructor", ""); - return true; - } - - if ((op != EOpConstructStruct && size != 1 && size < type.computeNumComponents()) || - (op == EOpConstructStruct && size < type.computeNumComponents())) { - error(loc, "not enough data provided for construction", "constructor", ""); - return true; - } - - TIntermTyped* typed = node->getAsTyped(); - if (typed == nullptr) { - error(loc, "constructor argument does not have a type", "constructor", ""); - return true; - } - if (op != EOpConstructStruct && typed->getBasicType() == EbtSampler) { - error(loc, "cannot convert a sampler", "constructor", ""); - return true; - } - if (op != EOpConstructStruct && typed->getBasicType() == EbtAtomicUint) { - error(loc, "cannot convert an atomic_uint", "constructor", ""); - return true; - } - if (typed->getBasicType() == EbtVoid) { - error(loc, "cannot convert a void", "constructor", ""); - return true; - } - - return false; -} - -// Verify all the correct semantics for constructing a combined texture/sampler. -// Return true if the semantics are incorrect. -bool TParseContext::constructorTextureSamplerError(const TSourceLoc& loc, const TFunction& function) -{ - TString constructorName = function.getType().getBasicTypeString(); // TODO: performance: should not be making copy; interface needs to change - const char* token = constructorName.c_str(); - - // exactly two arguments needed - if (function.getParamCount() != 2) { - error(loc, "sampler-constructor requires two arguments", token, ""); - return true; - } - - // For now, not allowing arrayed constructors, the rest of this function - // is set up to allow them, if this test is removed: - if (function.getType().isArray()) { - error(loc, "sampler-constructor cannot make an array of samplers", token, ""); - return true; - } - - // first argument - // * the constructor's first argument must be a texture type - // * the dimensionality (1D, 2D, 3D, Cube, Rect, Buffer, MS, and Array) - // of the texture type must match that of the constructed sampler type - // (that is, the suffixes of the type of the first argument and the - // type of the constructor will be spelled the same way) - if (function[0].type->getBasicType() != EbtSampler || - ! function[0].type->getSampler().isTexture() || - function[0].type->isArray()) { - error(loc, "sampler-constructor first argument must be a scalar textureXXX type", token, ""); - return true; - } - // simulate the first argument's impact on the result type, so it can be compared with the encapsulated operator!=() - TSampler texture = function.getType().getSampler(); - texture.combined = false; - texture.shadow = false; - if (texture != function[0].type->getSampler()) { - error(loc, "sampler-constructor first argument must match type and dimensionality of constructor type", token, ""); - return true; - } - - // second argument - // * the constructor's second argument must be a scalar of type - // *sampler* or *samplerShadow* - // * the presence or absence of depth comparison (Shadow) must match - // between the constructed sampler type and the type of the second argument - if ( function[1].type->getBasicType() != EbtSampler || - ! function[1].type->getSampler().isPureSampler() || - function[1].type->isArray()) { - error(loc, "sampler-constructor second argument must be a scalar type 'sampler'", token, ""); - return true; - } - if (function.getType().getSampler().shadow != function[1].type->getSampler().shadow) { - error(loc, "sampler-constructor second argument presence of shadow must match constructor presence of shadow", token, ""); - return true; - } - - return false; -} - -// Checks to see if a void variable has been declared and raise an error message for such a case -// -// returns true in case of an error -// -bool TParseContext::voidErrorCheck(const TSourceLoc& loc, const TString& identifier, const TBasicType basicType) -{ - if (basicType == EbtVoid) { - error(loc, "illegal use of type 'void'", identifier.c_str(), ""); - return true; - } - - return false; -} - -// Checks to see if the node (for the expression) contains a scalar boolean expression or not -void TParseContext::boolCheck(const TSourceLoc& loc, const TIntermTyped* type) -{ - if (type->getBasicType() != EbtBool || type->isArray() || type->isMatrix() || type->isVector()) - error(loc, "boolean expression expected", "", ""); -} - -// This function checks to see if the node (for the expression) contains a scalar boolean expression or not -void TParseContext::boolCheck(const TSourceLoc& loc, const TPublicType& pType) -{ - if (pType.basicType != EbtBool || pType.arraySizes || pType.matrixCols > 1 || (pType.vectorSize > 1)) - error(loc, "boolean expression expected", "", ""); -} - -void TParseContext::samplerCheck(const TSourceLoc& loc, const TType& type, const TString& identifier, TIntermTyped* /*initializer*/) -{ - if (type.getQualifier().storage == EvqUniform) - return; - - if (type.getBasicType() == EbtStruct && containsFieldWithBasicType(type, EbtSampler)) - error(loc, "non-uniform struct contains a sampler or image:", type.getBasicTypeString().c_str(), identifier.c_str()); - else if (type.getBasicType() == EbtSampler && type.getQualifier().storage != EvqUniform) { - // non-uniform sampler - // not yet: okay if it has an initializer - // if (! initializer) - error(loc, "sampler/image types can only be used in uniform variables or function parameters:", type.getBasicTypeString().c_str(), identifier.c_str()); - } -} - -void TParseContext::atomicUintCheck(const TSourceLoc& loc, const TType& type, const TString& identifier) -{ - if (type.getQualifier().storage == EvqUniform) - return; - - if (type.getBasicType() == EbtStruct && containsFieldWithBasicType(type, EbtAtomicUint)) - error(loc, "non-uniform struct contains an atomic_uint:", type.getBasicTypeString().c_str(), identifier.c_str()); - else if (type.getBasicType() == EbtAtomicUint && type.getQualifier().storage != EvqUniform) - error(loc, "atomic_uints can only be used in uniform variables or function parameters:", type.getBasicTypeString().c_str(), identifier.c_str()); -} - -void TParseContext::transparentOpaqueCheck(const TSourceLoc& loc, const TType& type, const TString& identifier) -{ - if (parsingBuiltins) - return; - - if (type.getQualifier().storage != EvqUniform) - return; - - if (type.containsNonOpaque()) { - // Vulkan doesn't allow transparent uniforms outside of blocks - if (spvVersion.vulkan > 0) - vulkanRemoved(loc, "non-opaque uniforms outside a block"); - // OpenGL wants locations on these (unless they are getting automapped) - if (spvVersion.openGl > 0 && !type.getQualifier().hasLocation() && !intermediate.getAutoMapLocations()) - error(loc, "non-opaque uniform variables need a layout(location=L)", identifier.c_str(), ""); - } -} - -// -// Check/fix just a full qualifier (no variables or types yet, but qualifier is complete) at global level. -// -void TParseContext::globalQualifierFixCheck(const TSourceLoc& loc, TQualifier& qualifier) -{ - // move from parameter/unknown qualifiers to pipeline in/out qualifiers - switch (qualifier.storage) { - case EvqIn: - profileRequires(loc, ENoProfile, 130, nullptr, "in for stage inputs"); - profileRequires(loc, EEsProfile, 300, nullptr, "in for stage inputs"); - qualifier.storage = EvqVaryingIn; - break; - case EvqOut: - profileRequires(loc, ENoProfile, 130, nullptr, "out for stage outputs"); - profileRequires(loc, EEsProfile, 300, nullptr, "out for stage outputs"); - qualifier.storage = EvqVaryingOut; - break; - case EvqInOut: - qualifier.storage = EvqVaryingIn; - error(loc, "cannot use 'inout' at global scope", "", ""); - break; - default: - break; - } - - invariantCheck(loc, qualifier); -} - -// -// Check a full qualifier and type (no variable yet) at global level. -// -void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQualifier& qualifier, const TPublicType& publicType) -{ - if (! symbolTable.atGlobalLevel()) - return; - - if (qualifier.isMemory() && ! publicType.isImage() && publicType.qualifier.storage != EvqBuffer) - error(loc, "memory qualifiers cannot be used on this type", "", ""); - - if (qualifier.storage == EvqBuffer && publicType.basicType != EbtBlock) - error(loc, "buffers can be declared only as blocks", "buffer", ""); - - if (qualifier.storage != EvqVaryingIn && qualifier.storage != EvqVaryingOut) - return; - - if (publicType.shaderQualifiers.blendEquation) - error(loc, "can only be applied to a standalone 'out'", "blend equation", ""); - - // now, knowing it is a shader in/out, do all the in/out semantic checks - - if (publicType.basicType == EbtBool) { - error(loc, "cannot be bool", GetStorageQualifierString(qualifier.storage), ""); - return; - } - - if (publicType.basicType == EbtInt || publicType.basicType == EbtUint || -#ifdef AMD_EXTENSIONS - publicType.basicType == EbtInt16 || publicType.basicType == EbtUint16 || -#endif - publicType.basicType == EbtInt64 || publicType.basicType == EbtUint64 || - publicType.basicType == EbtDouble) - profileRequires(loc, EEsProfile, 300, nullptr, "shader input/output"); - -#ifdef AMD_EXTENSIONS - if (! qualifier.flat && ! qualifier.explicitInterp) { -#else - if (!qualifier.flat) { -#endif - if (publicType.basicType == EbtInt || publicType.basicType == EbtUint || -#ifdef AMD_EXTENSIONS - publicType.basicType == EbtInt16 || publicType.basicType == EbtUint16 || -#endif - publicType.basicType == EbtInt64 || publicType.basicType == EbtUint64 || - publicType.basicType == EbtDouble || - (publicType.userDef && (publicType.userDef->containsBasicType(EbtInt) || - publicType.userDef->containsBasicType(EbtUint) || - publicType.userDef->containsBasicType(EbtInt64) || - publicType.userDef->containsBasicType(EbtUint64) || - publicType.userDef->containsBasicType(EbtDouble)))) { - if (qualifier.storage == EvqVaryingIn && language == EShLangFragment) - error(loc, "must be qualified as flat", TType::getBasicString(publicType.basicType), GetStorageQualifierString(qualifier.storage)); - else if (qualifier.storage == EvqVaryingOut && language == EShLangVertex && version == 300) - error(loc, "must be qualified as flat", TType::getBasicString(publicType.basicType), GetStorageQualifierString(qualifier.storage)); - } - } - - if (qualifier.patch && qualifier.isInterpolation()) - error(loc, "cannot use interpolation qualifiers with patch", "patch", ""); - - if (qualifier.storage == EvqVaryingIn) { - switch (language) { - case EShLangVertex: - if (publicType.basicType == EbtStruct) { - error(loc, "cannot be a structure or array", GetStorageQualifierString(qualifier.storage), ""); - return; - } - if (publicType.arraySizes) { - requireProfile(loc, ~EEsProfile, "vertex input arrays"); - profileRequires(loc, ENoProfile, 150, nullptr, "vertex input arrays"); - } - if (publicType.basicType == EbtDouble) - profileRequires(loc, ~EEsProfile, 410, nullptr, "vertex-shader `double` type input"); - if (qualifier.isAuxiliary() || qualifier.isInterpolation() || qualifier.isMemory() || qualifier.invariant) - error(loc, "vertex input cannot be further qualified", "", ""); - break; - - case EShLangTessControl: - if (qualifier.patch) - error(loc, "can only use on output in tessellation-control shader", "patch", ""); - break; - - case EShLangTessEvaluation: - break; - - case EShLangGeometry: - break; - - case EShLangFragment: - if (publicType.userDef) { - profileRequires(loc, EEsProfile, 300, nullptr, "fragment-shader struct input"); - profileRequires(loc, ~EEsProfile, 150, nullptr, "fragment-shader struct input"); - if (publicType.userDef->containsStructure()) - requireProfile(loc, ~EEsProfile, "fragment-shader struct input containing structure"); - if (publicType.userDef->containsArray()) - requireProfile(loc, ~EEsProfile, "fragment-shader struct input containing an array"); - } - break; - - case EShLangCompute: - if (! symbolTable.atBuiltInLevel()) - error(loc, "global storage input qualifier cannot be used in a compute shader", "in", ""); - break; - - default: - break; - } - } else { - // qualifier.storage == EvqVaryingOut - switch (language) { - case EShLangVertex: - if (publicType.userDef) { - profileRequires(loc, EEsProfile, 300, nullptr, "vertex-shader struct output"); - profileRequires(loc, ~EEsProfile, 150, nullptr, "vertex-shader struct output"); - if (publicType.userDef->containsStructure()) - requireProfile(loc, ~EEsProfile, "vertex-shader struct output containing structure"); - if (publicType.userDef->containsArray()) - requireProfile(loc, ~EEsProfile, "vertex-shader struct output containing an array"); - } - - break; - - case EShLangTessControl: - break; - - case EShLangTessEvaluation: - if (qualifier.patch) - error(loc, "can only use on input in tessellation-evaluation shader", "patch", ""); - break; - - case EShLangGeometry: - break; - - case EShLangFragment: - profileRequires(loc, EEsProfile, 300, nullptr, "fragment shader output"); - if (publicType.basicType == EbtStruct) { - error(loc, "cannot be a structure", GetStorageQualifierString(qualifier.storage), ""); - return; - } - if (publicType.matrixRows > 0) { - error(loc, "cannot be a matrix", GetStorageQualifierString(qualifier.storage), ""); - return; - } - if (qualifier.isAuxiliary()) - error(loc, "can't use auxiliary qualifier on a fragment output", "centroid/sample/patch", ""); - if (qualifier.isInterpolation()) - error(loc, "can't use interpolation qualifier on a fragment output", "flat/smooth/noperspective", ""); - if (publicType.basicType == EbtDouble) - error(loc, "cannot contain a double", GetStorageQualifierString(qualifier.storage), ""); - break; - - case EShLangCompute: - error(loc, "global storage output qualifier cannot be used in a compute shader", "out", ""); - break; - - default: - break; - } - } -} - -// -// Merge characteristics of the 'src' qualifier into the 'dst'. -// If there is duplication, issue error messages, unless 'force' -// is specified, which means to just override default settings. -// -// Also, when force is false, it will be assumed that 'src' follows -// 'dst', for the purpose of error checking order for versions -// that require specific orderings of qualifiers. -// -void TParseContext::mergeQualifiers(const TSourceLoc& loc, TQualifier& dst, const TQualifier& src, bool force) -{ - // Multiple auxiliary qualifiers (mostly done later by 'individual qualifiers') - if (src.isAuxiliary() && dst.isAuxiliary()) - error(loc, "can only have one auxiliary qualifier (centroid, patch, and sample)", "", ""); - - // Multiple interpolation qualifiers (mostly done later by 'individual qualifiers') - if (src.isInterpolation() && dst.isInterpolation()) -#ifdef AMD_EXTENSIONS - error(loc, "can only have one interpolation qualifier (flat, smooth, noperspective, __explicitInterpAMD)", "", ""); -#else - error(loc, "can only have one interpolation qualifier (flat, smooth, noperspective)", "", ""); -#endif - - // Ordering - if (! force && ((profile != EEsProfile && version < 420) || - (profile == EEsProfile && version < 310)) - && ! extensionTurnedOn(E_GL_ARB_shading_language_420pack)) { - // non-function parameters - if (src.noContraction && (dst.invariant || dst.isInterpolation() || dst.isAuxiliary() || dst.storage != EvqTemporary || dst.precision != EpqNone)) - error(loc, "precise qualifier must appear first", "", ""); - if (src.invariant && (dst.isInterpolation() || dst.isAuxiliary() || dst.storage != EvqTemporary || dst.precision != EpqNone)) - error(loc, "invariant qualifier must appear before interpolation, storage, and precision qualifiers ", "", ""); - else if (src.isInterpolation() && (dst.isAuxiliary() || dst.storage != EvqTemporary || dst.precision != EpqNone)) - error(loc, "interpolation qualifiers must appear before storage and precision qualifiers", "", ""); - else if (src.isAuxiliary() && (dst.storage != EvqTemporary || dst.precision != EpqNone)) - error(loc, "Auxiliary qualifiers (centroid, patch, and sample) must appear before storage and precision qualifiers", "", ""); - else if (src.storage != EvqTemporary && (dst.precision != EpqNone)) - error(loc, "precision qualifier must appear as last qualifier", "", ""); - - // function parameters - if (src.noContraction && (dst.storage == EvqConst || dst.storage == EvqIn || dst.storage == EvqOut)) - error(loc, "precise qualifier must appear first", "", ""); - if (src.storage == EvqConst && (dst.storage == EvqIn || dst.storage == EvqOut)) - error(loc, "in/out must appear before const", "", ""); - } - - // Storage qualification - if (dst.storage == EvqTemporary || dst.storage == EvqGlobal) - dst.storage = src.storage; - else if ((dst.storage == EvqIn && src.storage == EvqOut) || - (dst.storage == EvqOut && src.storage == EvqIn)) - dst.storage = EvqInOut; - else if ((dst.storage == EvqIn && src.storage == EvqConst) || - (dst.storage == EvqConst && src.storage == EvqIn)) - dst.storage = EvqConstReadOnly; - else if (src.storage != EvqTemporary && - src.storage != EvqGlobal) - error(loc, "too many storage qualifiers", GetStorageQualifierString(src.storage), ""); - - // Precision qualifiers - if (! force && src.precision != EpqNone && dst.precision != EpqNone) - error(loc, "only one precision qualifier allowed", GetPrecisionQualifierString(src.precision), ""); - if (dst.precision == EpqNone || (force && src.precision != EpqNone)) - dst.precision = src.precision; - - // Layout qualifiers - mergeObjectLayoutQualifiers(dst, src, false); - - // individual qualifiers - bool repeated = false; - #define MERGE_SINGLETON(field) repeated |= dst.field && src.field; dst.field |= src.field; - MERGE_SINGLETON(invariant); - MERGE_SINGLETON(noContraction); - MERGE_SINGLETON(centroid); - MERGE_SINGLETON(smooth); - MERGE_SINGLETON(flat); - MERGE_SINGLETON(nopersp); -#ifdef AMD_EXTENSIONS - MERGE_SINGLETON(explicitInterp); -#endif - MERGE_SINGLETON(patch); - MERGE_SINGLETON(sample); - MERGE_SINGLETON(coherent); - MERGE_SINGLETON(volatil); - MERGE_SINGLETON(restrict); - MERGE_SINGLETON(readonly); - MERGE_SINGLETON(writeonly); - MERGE_SINGLETON(specConstant); - - if (repeated) - error(loc, "replicated qualifiers", "", ""); -} - -void TParseContext::setDefaultPrecision(const TSourceLoc& loc, TPublicType& publicType, TPrecisionQualifier qualifier) -{ - TBasicType basicType = publicType.basicType; - - if (basicType == EbtSampler) { - defaultSamplerPrecision[computeSamplerTypeIndex(publicType.sampler)] = qualifier; - - return; // all is well - } - - if (basicType == EbtInt || basicType == EbtFloat) { - if (publicType.isScalar()) { - defaultPrecision[basicType] = qualifier; - if (basicType == EbtInt) { - defaultPrecision[EbtUint] = qualifier; - precisionManager.explicitIntDefaultSeen(); - } else - precisionManager.explicitFloatDefaultSeen(); - - return; // all is well - } - } - - if (basicType == EbtAtomicUint) { - if (qualifier != EpqHigh) - error(loc, "can only apply highp to atomic_uint", "precision", ""); - - return; - } - - error(loc, "cannot apply precision statement to this type; use 'float', 'int' or a sampler type", TType::getBasicString(basicType), ""); -} - -// used to flatten the sampler type space into a single dimension -// correlates with the declaration of defaultSamplerPrecision[] -int TParseContext::computeSamplerTypeIndex(TSampler& sampler) -{ - int arrayIndex = sampler.arrayed ? 1 : 0; - int shadowIndex = sampler.shadow ? 1 : 0; - int externalIndex = sampler.external? 1 : 0; - int imageIndex = sampler.image ? 1 : 0; - int msIndex = sampler.ms ? 1 : 0; - - int flattened = EsdNumDims * (EbtNumTypes * (2 * (2 * (2 * (2 * arrayIndex + msIndex) + imageIndex) + shadowIndex) + - externalIndex) + sampler.type) + sampler.dim; - assert(flattened < maxSamplerIndex); - - return flattened; -} - -TPrecisionQualifier TParseContext::getDefaultPrecision(TPublicType& publicType) -{ - if (publicType.basicType == EbtSampler) - return defaultSamplerPrecision[computeSamplerTypeIndex(publicType.sampler)]; - else - return defaultPrecision[publicType.basicType]; -} - -void TParseContext::precisionQualifierCheck(const TSourceLoc& loc, TBasicType baseType, TQualifier& qualifier) -{ - // Built-in symbols are allowed some ambiguous precisions, to be pinned down - // later by context. - if (! obeyPrecisionQualifiers() || parsingBuiltins) - return; - - if (baseType == EbtAtomicUint && qualifier.precision != EpqNone && qualifier.precision != EpqHigh) - error(loc, "atomic counters can only be highp", "atomic_uint", ""); - - if (baseType == EbtFloat || baseType == EbtUint || baseType == EbtInt || baseType == EbtSampler || baseType == EbtAtomicUint) { - if (qualifier.precision == EpqNone) { - if (relaxedErrors()) - warn(loc, "type requires declaration of default precision qualifier", TType::getBasicString(baseType), "substituting 'mediump'"); - else - error(loc, "type requires declaration of default precision qualifier", TType::getBasicString(baseType), ""); - qualifier.precision = EpqMedium; - defaultPrecision[baseType] = EpqMedium; - } - } else if (qualifier.precision != EpqNone) - error(loc, "type cannot have precision qualifier", TType::getBasicString(baseType), ""); -} - -void TParseContext::parameterTypeCheck(const TSourceLoc& loc, TStorageQualifier qualifier, const TType& type) -{ - if ((qualifier == EvqOut || qualifier == EvqInOut) && type.isOpaque()) - error(loc, "samplers and atomic_uints cannot be output parameters", type.getBasicTypeString().c_str(), ""); -} - -bool TParseContext::containsFieldWithBasicType(const TType& type, TBasicType basicType) -{ - if (type.getBasicType() == basicType) - return true; - - if (type.getBasicType() == EbtStruct) { - const TTypeList& structure = *type.getStruct(); - for (unsigned int i = 0; i < structure.size(); ++i) { - if (containsFieldWithBasicType(*structure[i].type, basicType)) - return true; - } - } - - return false; -} - -// -// Do size checking for an array type's size. -// -void TParseContext::arraySizeCheck(const TSourceLoc& loc, TIntermTyped* expr, TArraySize& sizePair) -{ - bool isConst = false; - sizePair.node = nullptr; - - int size = 1; - - TIntermConstantUnion* constant = expr->getAsConstantUnion(); - if (constant) { - // handle true (non-specialization) constant - size = constant->getConstArray()[0].getIConst(); - isConst = true; - } else { - // see if it's a specialization constant instead - if (expr->getQualifier().isSpecConstant()) { - isConst = true; - sizePair.node = expr; - TIntermSymbol* symbol = expr->getAsSymbolNode(); - if (symbol && symbol->getConstArray().size() > 0) - size = symbol->getConstArray()[0].getIConst(); - } - } - - sizePair.size = size; - - if (! isConst || (expr->getBasicType() != EbtInt && expr->getBasicType() != EbtUint)) { - error(loc, "array size must be a constant integer expression", "", ""); - return; - } - - if (size <= 0) { - error(loc, "array size must be a positive integer", "", ""); - return; - } -} - -// -// See if this qualifier can be an array. -// -// Returns true if there is an error. -// -bool TParseContext::arrayQualifierError(const TSourceLoc& loc, const TQualifier& qualifier) -{ - if (qualifier.storage == EvqConst) { - profileRequires(loc, ENoProfile, 120, E_GL_3DL_array_objects, "const array"); - profileRequires(loc, EEsProfile, 300, nullptr, "const array"); - } - - if (qualifier.storage == EvqVaryingIn && language == EShLangVertex) { - requireProfile(loc, ~EEsProfile, "vertex input arrays"); - profileRequires(loc, ENoProfile, 150, nullptr, "vertex input arrays"); - } - - return false; -} - -// -// See if this qualifier and type combination can be an array. -// Assumes arrayQualifierError() was also called to catch the type-invariant tests. -// -// Returns true if there is an error. -// -bool TParseContext::arrayError(const TSourceLoc& loc, const TType& type) -{ - if (type.getQualifier().storage == EvqVaryingOut && language == EShLangVertex) { - if (type.isArrayOfArrays()) - requireProfile(loc, ~EEsProfile, "vertex-shader array-of-array output"); - else if (type.isStruct()) - requireProfile(loc, ~EEsProfile, "vertex-shader array-of-struct output"); - } - if (type.getQualifier().storage == EvqVaryingIn && language == EShLangFragment) { - if (type.isArrayOfArrays()) - requireProfile(loc, ~EEsProfile, "fragment-shader array-of-array input"); - else if (type.isStruct()) - requireProfile(loc, ~EEsProfile, "fragment-shader array-of-struct input"); - } - if (type.getQualifier().storage == EvqVaryingOut && language == EShLangFragment) { - if (type.isArrayOfArrays()) - requireProfile(loc, ~EEsProfile, "fragment-shader array-of-array output"); - } - - return false; -} - -// -// Require array to be completely sized -// -void TParseContext::arraySizeRequiredCheck(const TSourceLoc& loc, const TArraySizes& arraySizes) -{ - if (arraySizes.isImplicit()) - error(loc, "array size required", "", ""); -} - -void TParseContext::structArrayCheck(const TSourceLoc& /*loc*/, const TType& type) -{ - const TTypeList& structure = *type.getStruct(); - for (int m = 0; m < (int)structure.size(); ++m) { - const TType& member = *structure[m].type; - if (member.isArray()) - arraySizeRequiredCheck(structure[m].loc, *member.getArraySizes()); - } -} - -void TParseContext::arraySizesCheck(const TSourceLoc& loc, const TQualifier& qualifier, const TArraySizes* arraySizes, bool initializer, bool lastMember) -{ - assert(arraySizes); - - // always allow special built-in ins/outs sized to topologies - if (parsingBuiltins) - return; - - // always allow an initializer to set any unknown array sizes - if (initializer) - return; - - // No environment allows any non-outer-dimension to be implicitly sized - if (arraySizes->isInnerImplicit()) - error(loc, "only outermost dimension of an array of arrays can be implicitly sized", "[]", ""); - - if (arraySizes->isInnerSpecialization()) - error(loc, "only outermost dimension of an array of arrays can be a specialization constant", "[]", ""); - - // desktop always allows outer-dimension-unsized variable arrays, - if (profile != EEsProfile) - return; - - // for ES, if size isn't coming from an initializer, it has to be explicitly declared now, - // with very few exceptions - - // last member of ssbo block exception: - if (qualifier.storage == EvqBuffer && lastMember) - return; - - // implicitly-sized io exceptions: - switch (language) { - case EShLangGeometry: - if (qualifier.storage == EvqVaryingIn) - if ((profile == EEsProfile && version >= 320) || - extensionsTurnedOn(Num_AEP_geometry_shader, AEP_geometry_shader)) - return; - break; - case EShLangTessControl: - if ( qualifier.storage == EvqVaryingIn || - (qualifier.storage == EvqVaryingOut && ! qualifier.patch)) - if ((profile == EEsProfile && version >= 320) || - extensionsTurnedOn(Num_AEP_tessellation_shader, AEP_tessellation_shader)) - return; - break; - case EShLangTessEvaluation: - if ((qualifier.storage == EvqVaryingIn && ! qualifier.patch) || - qualifier.storage == EvqVaryingOut) - if ((profile == EEsProfile && version >= 320) || - extensionsTurnedOn(Num_AEP_tessellation_shader, AEP_tessellation_shader)) - return; - break; - default: - break; - } - - arraySizeRequiredCheck(loc, *arraySizes); -} - -void TParseContext::arrayOfArrayVersionCheck(const TSourceLoc& loc) -{ - const char* feature = "arrays of arrays"; - - requireProfile(loc, EEsProfile | ECoreProfile | ECompatibilityProfile, feature); - profileRequires(loc, EEsProfile, 310, nullptr, feature); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 430, nullptr, feature); -} - -void TParseContext::arrayDimCheck(const TSourceLoc& loc, const TArraySizes* sizes1, const TArraySizes* sizes2) -{ - if ((sizes1 && sizes2) || - (sizes1 && sizes1->getNumDims() > 1) || - (sizes2 && sizes2->getNumDims() > 1)) - arrayOfArrayVersionCheck(loc); -} - -void TParseContext::arrayDimCheck(const TSourceLoc& loc, const TType* type, const TArraySizes* sizes2) -{ - // skip checking for multiple dimensions on the type; it was caught earlier - if ((type && type->isArray() && sizes2) || - (sizes2 && sizes2->getNumDims() > 1)) - arrayOfArrayVersionCheck(loc); -} - -// Merge array dimensions listed in 'sizes' onto the type's array dimensions. -// -// From the spec: "vec4[2] a[3]; // size-3 array of size-2 array of vec4" -// -// That means, the 'sizes' go in front of the 'type' as outermost sizes. -// 'type' is the type part of the declaration (to the left) -// 'sizes' is the arrayness tagged on the identifier (to the right) -// -void TParseContext::arrayDimMerge(TType& type, const TArraySizes* sizes) -{ - if (sizes) - type.addArrayOuterSizes(*sizes); -} - -// -// Do all the semantic checking for declaring or redeclaring an array, with and -// without a size, and make the right changes to the symbol table. -// -void TParseContext::declareArray(const TSourceLoc& loc, const TString& identifier, const TType& type, TSymbol*& symbol) -{ - if (symbol == nullptr) { - bool currentScope; - symbol = symbolTable.find(identifier, nullptr, ¤tScope); - - if (symbol && builtInName(identifier) && ! symbolTable.atBuiltInLevel()) { - // bad shader (errors already reported) trying to redeclare a built-in name as an array - symbol = nullptr; - return; - } - if (symbol == nullptr || ! currentScope) { - // - // Successfully process a new definition. - // (Redeclarations have to take place at the same scope; otherwise they are hiding declarations) - // - symbol = new TVariable(&identifier, type); - symbolTable.insert(*symbol); - if (symbolTable.atGlobalLevel()) - trackLinkage(*symbol); - - if (! symbolTable.atBuiltInLevel()) { - if (isIoResizeArray(type)) { - ioArraySymbolResizeList.push_back(symbol); - checkIoArraysConsistency(loc, true); - } else - fixIoArraySize(loc, symbol->getWritableType()); - } - - return; - } - if (symbol->getAsAnonMember()) { - error(loc, "cannot redeclare a user-block member array", identifier.c_str(), ""); - symbol = nullptr; - return; - } - } - - // - // Process a redeclaration. - // - - if (symbol == nullptr) { - error(loc, "array variable name expected", identifier.c_str(), ""); - return; - } - - // redeclareBuiltinVariable() should have already done the copyUp() - TType& existingType = symbol->getWritableType(); - - if (! existingType.isArray()) { - error(loc, "redeclaring non-array as array", identifier.c_str(), ""); - return; - } - - if (! existingType.sameElementType(type)) { - error(loc, "redeclaration of array with a different element type", identifier.c_str(), ""); - return; - } - - if (! existingType.sameInnerArrayness(type)) { - error(loc, "redeclaration of array with a different array dimensions or sizes", identifier.c_str(), ""); - return; - } - - if (existingType.isExplicitlySizedArray()) { - // be more leniant for input arrays to geometry shaders and tessellation control outputs, where the redeclaration is the same size - if (! (isIoResizeArray(type) && existingType.getOuterArraySize() == type.getOuterArraySize())) - error(loc, "redeclaration of array with size", identifier.c_str(), ""); - return; - } - - arrayLimitCheck(loc, identifier, type.getOuterArraySize()); - - existingType.updateArraySizes(type); - - if (isIoResizeArray(type)) - checkIoArraysConsistency(loc); -} - -void TParseContext::updateImplicitArraySize(const TSourceLoc& loc, TIntermNode *node, int index) -{ - // maybe there is nothing to do... - TIntermTyped* typedNode = node->getAsTyped(); - if (typedNode->getType().getImplicitArraySize() > index) - return; - - // something to do... - - // Figure out what symbol to lookup, as we will use its type to edit for the size change, - // as that type will be shared through shallow copies for future references. - TSymbol* symbol = nullptr; - int blockIndex = -1; - const TString* lookupName = nullptr; - if (node->getAsSymbolNode()) - lookupName = &node->getAsSymbolNode()->getName(); - else if (node->getAsBinaryNode()) { - const TIntermBinary* deref = node->getAsBinaryNode(); - // This has to be the result of a block dereference, unless it's bad shader code - // If it's a uniform block, then an error will be issued elsewhere, but - // return early now to avoid crashing later in this function. - if (deref->getLeft()->getBasicType() != EbtBlock || - deref->getLeft()->getType().getQualifier().storage == EvqUniform || - deref->getRight()->getAsConstantUnion() == nullptr) - return; - - const TIntermTyped* left = deref->getLeft(); - const TIntermTyped* right = deref->getRight(); - - if (left->getAsBinaryNode()) { - left = left->getAsBinaryNode()->getLeft(); // Block array access - assert(left->isArray()); - } - - if (! left->getAsSymbolNode()) - return; - - blockIndex = right->getAsConstantUnion()->getConstArray()[0].getIConst(); - - lookupName = &left->getAsSymbolNode()->getName(); - if (IsAnonymous(*lookupName)) - lookupName = &(*left->getType().getStruct())[blockIndex].type->getFieldName(); - } - - // Lookup the symbol, should only fail if shader code is incorrect - symbol = symbolTable.find(*lookupName); - if (symbol == nullptr) - return; - - if (symbol->getAsFunction()) { - error(loc, "array variable name expected", symbol->getName().c_str(), ""); - return; - } - - if (symbol->getType().isStruct() && blockIndex != -1) - (*symbol->getWritableType().getStruct())[blockIndex].type->setImplicitArraySize(index + 1); - else - symbol->getWritableType().setImplicitArraySize(index + 1); -} - -// Returns true if the first argument to the #line directive is the line number for the next line. -// -// Desktop, pre-version 3.30: "After processing this directive -// (including its new-line), the implementation will behave as if it is compiling at line number line+1 and -// source string number source-string-number." -// -// Desktop, version 3.30 and later, and ES: "After processing this directive -// (including its new-line), the implementation will behave as if it is compiling at line number line and -// source string number source-string-number. -bool TParseContext::lineDirectiveShouldSetNextLine() const -{ - return profile == EEsProfile || version >= 330; -} - -// -// Enforce non-initializer type/qualifier rules. -// -void TParseContext::nonInitConstCheck(const TSourceLoc& loc, TString& identifier, TType& type) -{ - // - // Make the qualifier make sense, given that there is not an initializer. - // - if (type.getQualifier().storage == EvqConst || - type.getQualifier().storage == EvqConstReadOnly) { - type.getQualifier().makeTemporary(); - error(loc, "variables with qualifier 'const' must be initialized", identifier.c_str(), ""); - } -} - -// -// See if the identifier is a built-in symbol that can be redeclared, and if so, -// copy the symbol table's read-only built-in variable to the current -// global level, where it can be modified based on the passed in type. -// -// Returns nullptr if no redeclaration took place; meaning a normal declaration still -// needs to occur for it, not necessarily an error. -// -// Returns a redeclared and type-modified variable if a redeclarated occurred. -// -TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TString& identifier, - const TQualifier& qualifier, const TShaderQualifiers& publicType) -{ - if (! builtInName(identifier) || symbolTable.atBuiltInLevel() || ! symbolTable.atGlobalLevel()) - return nullptr; - - bool nonEsRedecls = (profile != EEsProfile && (version >= 130 || identifier == "gl_TexCoord")); - bool esRedecls = (profile == EEsProfile && - (version >= 320 || extensionsTurnedOn(Num_AEP_shader_io_blocks, AEP_shader_io_blocks))); - if (! esRedecls && ! nonEsRedecls) - return nullptr; - - // Special case when using GL_ARB_separate_shader_objects - bool ssoPre150 = false; // means the only reason this variable is redeclared is due to this combination - if (profile != EEsProfile && version <= 140 && extensionTurnedOn(E_GL_ARB_separate_shader_objects)) { - if (identifier == "gl_Position" || - identifier == "gl_PointSize" || - identifier == "gl_ClipVertex" || - identifier == "gl_FogFragCoord") - ssoPre150 = true; - } - - // Potentially redeclaring a built-in variable... - - if (ssoPre150 || - (identifier == "gl_FragDepth" && ((nonEsRedecls && version >= 420) || esRedecls)) || - (identifier == "gl_FragCoord" && ((nonEsRedecls && version >= 150) || esRedecls)) || - identifier == "gl_ClipDistance" || - identifier == "gl_CullDistance" || - identifier == "gl_FrontColor" || - identifier == "gl_BackColor" || - identifier == "gl_FrontSecondaryColor" || - identifier == "gl_BackSecondaryColor" || - identifier == "gl_SecondaryColor" || - (identifier == "gl_Color" && language == EShLangFragment) || -#ifdef NV_EXTENSIONS - identifier == "gl_SampleMask" || - identifier == "gl_Layer" || -#endif - identifier == "gl_TexCoord") { - - // Find the existing symbol, if any. - bool builtIn; - TSymbol* symbol = symbolTable.find(identifier, &builtIn); - - // If the symbol was not found, this must be a version/profile/stage - // that doesn't have it. - if (! symbol) - return nullptr; - - // If it wasn't at a built-in level, then it's already been redeclared; - // that is, this is a redeclaration of a redeclaration; reuse that initial - // redeclaration. Otherwise, make the new one. - if (builtIn) - makeEditable(symbol); - - // Now, modify the type of the copy, as per the type of the current redeclaration. - - TQualifier& symbolQualifier = symbol->getWritableType().getQualifier(); - if (ssoPre150) { - if (intermediate.inIoAccessed(identifier)) - error(loc, "cannot redeclare after use", identifier.c_str(), ""); - if (qualifier.hasLayout()) - error(loc, "cannot apply layout qualifier to", "redeclaration", symbol->getName().c_str()); - if (qualifier.isMemory() || qualifier.isAuxiliary() || (language == EShLangVertex && qualifier.storage != EvqVaryingOut) || - (language == EShLangFragment && qualifier.storage != EvqVaryingIn)) - error(loc, "cannot change storage, memory, or auxiliary qualification of", "redeclaration", symbol->getName().c_str()); - if (! qualifier.smooth) - error(loc, "cannot change interpolation qualification of", "redeclaration", symbol->getName().c_str()); - } else if (identifier == "gl_FrontColor" || - identifier == "gl_BackColor" || - identifier == "gl_FrontSecondaryColor" || - identifier == "gl_BackSecondaryColor" || - identifier == "gl_SecondaryColor" || - identifier == "gl_Color") { - symbolQualifier.flat = qualifier.flat; - symbolQualifier.smooth = qualifier.smooth; - symbolQualifier.nopersp = qualifier.nopersp; - if (qualifier.hasLayout()) - error(loc, "cannot apply layout qualifier to", "redeclaration", symbol->getName().c_str()); - if (qualifier.isMemory() || qualifier.isAuxiliary() || symbol->getType().getQualifier().storage != qualifier.storage) - error(loc, "cannot change storage, memory, or auxiliary qualification of", "redeclaration", symbol->getName().c_str()); - } else if (identifier == "gl_TexCoord" || - identifier == "gl_ClipDistance" || - identifier == "gl_CullDistance") { - if (qualifier.hasLayout() || qualifier.isMemory() || qualifier.isAuxiliary() || - qualifier.nopersp != symbolQualifier.nopersp || qualifier.flat != symbolQualifier.flat || - symbolQualifier.storage != qualifier.storage) - error(loc, "cannot change qualification of", "redeclaration", symbol->getName().c_str()); - } else if (identifier == "gl_FragCoord") { - if (intermediate.inIoAccessed("gl_FragCoord")) - error(loc, "cannot redeclare after use", "gl_FragCoord", ""); - if (qualifier.nopersp != symbolQualifier.nopersp || qualifier.flat != symbolQualifier.flat || - qualifier.isMemory() || qualifier.isAuxiliary()) - error(loc, "can only change layout qualification of", "redeclaration", symbol->getName().c_str()); - if (qualifier.storage != EvqVaryingIn) - error(loc, "cannot change input storage qualification of", "redeclaration", symbol->getName().c_str()); - if (! builtIn && (publicType.pixelCenterInteger != intermediate.getPixelCenterInteger() || - publicType.originUpperLeft != intermediate.getOriginUpperLeft())) - error(loc, "cannot redeclare with different qualification:", "redeclaration", symbol->getName().c_str()); - if (publicType.pixelCenterInteger) - intermediate.setPixelCenterInteger(); - if (publicType.originUpperLeft) - intermediate.setOriginUpperLeft(); - } else if (identifier == "gl_FragDepth") { - if (qualifier.nopersp != symbolQualifier.nopersp || qualifier.flat != symbolQualifier.flat || - qualifier.isMemory() || qualifier.isAuxiliary()) - error(loc, "can only change layout qualification of", "redeclaration", symbol->getName().c_str()); - if (qualifier.storage != EvqVaryingOut) - error(loc, "cannot change output storage qualification of", "redeclaration", symbol->getName().c_str()); - if (publicType.layoutDepth != EldNone) { - if (intermediate.inIoAccessed("gl_FragDepth")) - error(loc, "cannot redeclare after use", "gl_FragDepth", ""); - if (! intermediate.setDepth(publicType.layoutDepth)) - error(loc, "all redeclarations must use the same depth layout on", "redeclaration", symbol->getName().c_str()); - } - } -#ifdef NV_EXTENSIONS - else if (identifier == "gl_SampleMask") { - if (!publicType.layoutOverrideCoverage) { - error(loc, "redeclaration only allowed for override_coverage layout", "redeclaration", symbol->getName().c_str()); - } - intermediate.setLayoutOverrideCoverage(); - } - else if (identifier == "gl_Layer") { - if (!qualifier.layoutViewportRelative && qualifier.layoutSecondaryViewportRelativeOffset == -2048) - error(loc, "redeclaration only allowed for viewport_relative or secondary_view_offset layout", "redeclaration", symbol->getName().c_str()); - symbolQualifier.layoutViewportRelative = qualifier.layoutViewportRelative; - symbolQualifier.layoutSecondaryViewportRelativeOffset = qualifier.layoutSecondaryViewportRelativeOffset; - } -#endif - - // TODO: semantics quality: separate smooth from nothing declared, then use IsInterpolation for several tests above - - return symbol; - } - - return nullptr; -} - -// -// Either redeclare the requested block, or give an error message why it can't be done. -// -// TODO: functionality: explicitly sizing members of redeclared blocks is not giving them an explicit size -void TParseContext::redeclareBuiltinBlock(const TSourceLoc& loc, TTypeList& newTypeList, const TString& blockName, const TString* instanceName, TArraySizes* arraySizes) -{ - const char* feature = "built-in block redeclaration"; - profileRequires(loc, EEsProfile, 320, Num_AEP_shader_io_blocks, AEP_shader_io_blocks, feature); - profileRequires(loc, ~EEsProfile, 410, E_GL_ARB_separate_shader_objects, feature); - - if (blockName != "gl_PerVertex" && blockName != "gl_PerFragment") { - error(loc, "cannot redeclare block: ", "block declaration", blockName.c_str()); - return; - } - - // Redeclaring a built-in block... - - if (instanceName && ! builtInName(*instanceName)) { - error(loc, "cannot redeclare a built-in block with a user name", instanceName->c_str(), ""); - return; - } - - // Blocks with instance names are easy to find, lookup the instance name, - // Anonymous blocks need to be found via a member. - bool builtIn; - TSymbol* block; - if (instanceName) - block = symbolTable.find(*instanceName, &builtIn); - else - block = symbolTable.find(newTypeList.front().type->getFieldName(), &builtIn); - - // If the block was not found, this must be a version/profile/stage - // that doesn't have it, or the instance name is wrong. - const char* errorName = instanceName ? instanceName->c_str() : newTypeList.front().type->getFieldName().c_str(); - if (! block) { - error(loc, "no declaration found for redeclaration", errorName, ""); - return; - } - // Built-in blocks cannot be redeclared more than once, which if happened, - // we'd be finding the already redeclared one here, rather than the built in. - if (! builtIn) { - error(loc, "can only redeclare a built-in block once, and before any use", blockName.c_str(), ""); - return; - } - - // Copy the block to make a writable version, to insert into the block table after editing. - block = symbolTable.copyUpDeferredInsert(block); - - if (block->getType().getBasicType() != EbtBlock) { - error(loc, "cannot redeclare a non block as a block", errorName, ""); - return; - } - - // Edit and error check the container against the redeclaration - // - remove unused members - // - ensure remaining qualifiers/types match - TType& type = block->getWritableType(); - -#ifdef NV_EXTENSIONS - // if gl_PerVertex is redeclared for the purpose of passing through "gl_Position" - // for passthrough purpose, the redclared block should have the same qualifers as - // the current one - if (currentBlockQualifier.layoutPassthrough) - { - type.getQualifier().layoutPassthrough = currentBlockQualifier.layoutPassthrough; - type.getQualifier().storage = currentBlockQualifier.storage; - type.getQualifier().layoutStream = currentBlockQualifier.layoutStream; - type.getQualifier().layoutXfbBuffer = currentBlockQualifier.layoutXfbBuffer; - } -#endif - - TTypeList::iterator member = type.getWritableStruct()->begin(); - size_t numOriginalMembersFound = 0; - while (member != type.getStruct()->end()) { - // look for match - bool found = false; - TTypeList::const_iterator newMember; - TSourceLoc memberLoc; - memberLoc.init(); - for (newMember = newTypeList.begin(); newMember != newTypeList.end(); ++newMember) { - if (member->type->getFieldName() == newMember->type->getFieldName()) { - found = true; - memberLoc = newMember->loc; - break; - } - } - - if (found) { - ++numOriginalMembersFound; - // - ensure match between redeclared members' types - // - check for things that can't be changed - // - update things that can be changed - TType& oldType = *member->type; - const TType& newType = *newMember->type; - if (! newType.sameElementType(oldType)) - error(memberLoc, "cannot redeclare block member with a different type", member->type->getFieldName().c_str(), ""); - if (oldType.isArray() != newType.isArray()) - error(memberLoc, "cannot change arrayness of redeclared block member", member->type->getFieldName().c_str(), ""); - else if (! oldType.sameArrayness(newType) && oldType.isExplicitlySizedArray()) - error(memberLoc, "cannot change array size of redeclared block member", member->type->getFieldName().c_str(), ""); - else if (newType.isArray()) - arrayLimitCheck(loc, member->type->getFieldName(), newType.getOuterArraySize()); - if (newType.getQualifier().isMemory()) - error(memberLoc, "cannot add memory qualifier to redeclared block member", member->type->getFieldName().c_str(), ""); - if (newType.getQualifier().hasLayout()) - error(memberLoc, "cannot add layout to redeclared block member", member->type->getFieldName().c_str(), ""); - if (newType.getQualifier().patch) - error(memberLoc, "cannot add patch to redeclared block member", member->type->getFieldName().c_str(), ""); - oldType.getQualifier().centroid = newType.getQualifier().centroid; - oldType.getQualifier().sample = newType.getQualifier().sample; - oldType.getQualifier().invariant = newType.getQualifier().invariant; - oldType.getQualifier().noContraction = newType.getQualifier().noContraction; - oldType.getQualifier().smooth = newType.getQualifier().smooth; - oldType.getQualifier().flat = newType.getQualifier().flat; - oldType.getQualifier().nopersp = newType.getQualifier().nopersp; - -#ifdef NV_EXTENSIONS - if (member->type->getFieldName() == "gl_Layer") { - if (!newType.getQualifier().layoutViewportRelative && newType.getQualifier().layoutSecondaryViewportRelativeOffset == -2048) - error(loc, "redeclaration only allowed for viewport_relative or secondary_view_offset layout", "redeclaration", member->type->getFieldName().c_str()); - oldType.getQualifier().layoutViewportRelative = newType.getQualifier().layoutViewportRelative; - oldType.getQualifier().layoutSecondaryViewportRelativeOffset = newType.getQualifier().layoutSecondaryViewportRelativeOffset; - } -#endif - if (oldType.isImplicitlySizedArray() && newType.isExplicitlySizedArray()) - oldType.changeOuterArraySize(newType.getOuterArraySize()); - - // go to next member - ++member; - } else { - // For missing members of anonymous blocks that have been redeclared, - // hide the original (shared) declaration. - // Instance-named blocks can just have the member removed. - if (instanceName) - member = type.getWritableStruct()->erase(member); - else { - member->type->hideMember(); - ++member; - } - } - } - - if (numOriginalMembersFound < newTypeList.size()) - error(loc, "block redeclaration has extra members", blockName.c_str(), ""); - if (type.isArray() != (arraySizes != nullptr)) - error(loc, "cannot change arrayness of redeclared block", blockName.c_str(), ""); - else if (type.isArray()) { - if (type.isExplicitlySizedArray() && arraySizes->getOuterSize() == UnsizedArraySize) - error(loc, "block already declared with size, can't redeclare as implicitly-sized", blockName.c_str(), ""); - else if (type.isExplicitlySizedArray() && type.getArraySizes() != *arraySizes) - error(loc, "cannot change array size of redeclared block", blockName.c_str(), ""); - else if (type.isImplicitlySizedArray() && arraySizes->getOuterSize() != UnsizedArraySize) - type.changeOuterArraySize(arraySizes->getOuterSize()); - } - - symbolTable.insert(*block); - - // Check for general layout qualifier errors - layoutObjectCheck(loc, *block); - - // Tracking for implicit sizing of array - if (isIoResizeArray(block->getType())) { - ioArraySymbolResizeList.push_back(block); - checkIoArraysConsistency(loc, true); - } else if (block->getType().isArray()) - fixIoArraySize(loc, block->getWritableType()); - - // Save it in the AST for linker use. - trackLinkage(*block); -} - -void TParseContext::paramCheckFix(const TSourceLoc& loc, const TStorageQualifier& qualifier, TType& type) -{ - switch (qualifier) { - case EvqConst: - case EvqConstReadOnly: - type.getQualifier().storage = EvqConstReadOnly; - break; - case EvqIn: - case EvqOut: - case EvqInOut: - type.getQualifier().storage = qualifier; - break; - case EvqGlobal: - case EvqTemporary: - type.getQualifier().storage = EvqIn; - break; - default: - type.getQualifier().storage = EvqIn; - error(loc, "storage qualifier not allowed on function parameter", GetStorageQualifierString(qualifier), ""); - break; - } -} - -void TParseContext::paramCheckFix(const TSourceLoc& loc, const TQualifier& qualifier, TType& type) -{ - if (qualifier.isMemory()) { - type.getQualifier().volatil = qualifier.volatil; - type.getQualifier().coherent = qualifier.coherent; - type.getQualifier().readonly = qualifier.readonly; - type.getQualifier().writeonly = qualifier.writeonly; - type.getQualifier().restrict = qualifier.restrict; - } - - if (qualifier.isAuxiliary() || - qualifier.isInterpolation()) - error(loc, "cannot use auxiliary or interpolation qualifiers on a function parameter", "", ""); - if (qualifier.hasLayout()) - error(loc, "cannot use layout qualifiers on a function parameter", "", ""); - if (qualifier.invariant) - error(loc, "cannot use invariant qualifier on a function parameter", "", ""); - if (qualifier.noContraction) { - if (qualifier.isParamOutput()) - type.getQualifier().noContraction = true; - else - warn(loc, "qualifier has no effect on non-output parameters", "precise", ""); - } - - paramCheckFix(loc, qualifier.storage, type); -} - -void TParseContext::nestedBlockCheck(const TSourceLoc& loc) -{ - if (structNestingLevel > 0) - error(loc, "cannot nest a block definition inside a structure or block", "", ""); - ++structNestingLevel; -} - -void TParseContext::nestedStructCheck(const TSourceLoc& loc) -{ - if (structNestingLevel > 0) - error(loc, "cannot nest a structure definition inside a structure or block", "", ""); - ++structNestingLevel; -} - -void TParseContext::arrayObjectCheck(const TSourceLoc& loc, const TType& type, const char* op) -{ - // Some versions don't allow comparing arrays or structures containing arrays - if (type.containsArray()) { - profileRequires(loc, ENoProfile, 120, E_GL_3DL_array_objects, op); - profileRequires(loc, EEsProfile, 300, nullptr, op); - } -} - -void TParseContext::opaqueCheck(const TSourceLoc& loc, const TType& type, const char* op) -{ - if (containsFieldWithBasicType(type, EbtSampler)) - error(loc, "can't use with samplers or structs containing samplers", op, ""); -} - -void TParseContext::specializationCheck(const TSourceLoc& loc, const TType& type, const char* op) -{ - if (type.containsSpecializationSize()) - error(loc, "can't use with types containing arrays sized with a specialization constant", op, ""); -} - -void TParseContext::structTypeCheck(const TSourceLoc& /*loc*/, TPublicType& publicType) -{ - const TTypeList& typeList = *publicType.userDef->getStruct(); - - // fix and check for member storage qualifiers and types that don't belong within a structure - for (unsigned int member = 0; member < typeList.size(); ++member) { - TQualifier& memberQualifier = typeList[member].type->getQualifier(); - const TSourceLoc& memberLoc = typeList[member].loc; - if (memberQualifier.isAuxiliary() || - memberQualifier.isInterpolation() || - (memberQualifier.storage != EvqTemporary && memberQualifier.storage != EvqGlobal)) - error(memberLoc, "cannot use storage or interpolation qualifiers on structure members", typeList[member].type->getFieldName().c_str(), ""); - if (memberQualifier.isMemory()) - error(memberLoc, "cannot use memory qualifiers on structure members", typeList[member].type->getFieldName().c_str(), ""); - if (memberQualifier.hasLayout()) { - error(memberLoc, "cannot use layout qualifiers on structure members", typeList[member].type->getFieldName().c_str(), ""); - memberQualifier.clearLayout(); - } - if (memberQualifier.invariant) - error(memberLoc, "cannot use invariant qualifier on structure members", typeList[member].type->getFieldName().c_str(), ""); - } -} - -// -// See if this loop satisfies the limitations for ES 2.0 (version 100) for loops in Appendex A: -// -// "The loop index has type int or float. -// -// "The for statement has the form: -// for ( init-declaration ; condition ; expression ) -// init-declaration has the form: type-specifier identifier = constant-expression -// condition has the form: loop-index relational_operator constant-expression -// where relational_operator is one of: > >= < <= == or != -// expression [sic] has one of the following forms: -// loop-index++ -// loop-index-- -// loop-index += constant-expression -// loop-index -= constant-expression -// -// The body is handled in an AST traversal. -// -void TParseContext::inductiveLoopCheck(const TSourceLoc& loc, TIntermNode* init, TIntermLoop* loop) -{ - // loop index init must exist and be a declaration, which shows up in the AST as an aggregate of size 1 of the declaration - bool badInit = false; - if (! init || ! init->getAsAggregate() || init->getAsAggregate()->getSequence().size() != 1) - badInit = true; - TIntermBinary* binaryInit = 0; - if (! badInit) { - // get the declaration assignment - binaryInit = init->getAsAggregate()->getSequence()[0]->getAsBinaryNode(); - if (! binaryInit) - badInit = true; - } - if (badInit) { - error(loc, "inductive-loop init-declaration requires the form \"type-specifier loop-index = constant-expression\"", "limitations", ""); - return; - } - - // loop index must be type int or float - if (! binaryInit->getType().isScalar() || (binaryInit->getBasicType() != EbtInt && binaryInit->getBasicType() != EbtFloat)) { - error(loc, "inductive loop requires a scalar 'int' or 'float' loop index", "limitations", ""); - return; - } - - // init is the form "loop-index = constant" - if (binaryInit->getOp() != EOpAssign || ! binaryInit->getLeft()->getAsSymbolNode() || ! binaryInit->getRight()->getAsConstantUnion()) { - error(loc, "inductive-loop init-declaration requires the form \"type-specifier loop-index = constant-expression\"", "limitations", ""); - return; - } - - // get the unique id of the loop index - int loopIndex = binaryInit->getLeft()->getAsSymbolNode()->getId(); - inductiveLoopIds.insert(loopIndex); - - // condition's form must be "loop-index relational-operator constant-expression" - bool badCond = ! loop->getTest(); - if (! badCond) { - TIntermBinary* binaryCond = loop->getTest()->getAsBinaryNode(); - badCond = ! binaryCond; - if (! badCond) { - switch (binaryCond->getOp()) { - case EOpGreaterThan: - case EOpGreaterThanEqual: - case EOpLessThan: - case EOpLessThanEqual: - case EOpEqual: - case EOpNotEqual: - break; - default: - badCond = true; - } - } - if (binaryCond && (! binaryCond->getLeft()->getAsSymbolNode() || - binaryCond->getLeft()->getAsSymbolNode()->getId() != loopIndex || - ! binaryCond->getRight()->getAsConstantUnion())) - badCond = true; - } - if (badCond) { - error(loc, "inductive-loop condition requires the form \"loop-index constant-expression\"", "limitations", ""); - return; - } - - // loop-index++ - // loop-index-- - // loop-index += constant-expression - // loop-index -= constant-expression - bool badTerminal = ! loop->getTerminal(); - if (! badTerminal) { - TIntermUnary* unaryTerminal = loop->getTerminal()->getAsUnaryNode(); - TIntermBinary* binaryTerminal = loop->getTerminal()->getAsBinaryNode(); - if (unaryTerminal || binaryTerminal) { - switch(loop->getTerminal()->getAsOperator()->getOp()) { - case EOpPostDecrement: - case EOpPostIncrement: - case EOpAddAssign: - case EOpSubAssign: - break; - default: - badTerminal = true; - } - } else - badTerminal = true; - if (binaryTerminal && (! binaryTerminal->getLeft()->getAsSymbolNode() || - binaryTerminal->getLeft()->getAsSymbolNode()->getId() != loopIndex || - ! binaryTerminal->getRight()->getAsConstantUnion())) - badTerminal = true; - if (unaryTerminal && (! unaryTerminal->getOperand()->getAsSymbolNode() || - unaryTerminal->getOperand()->getAsSymbolNode()->getId() != loopIndex)) - badTerminal = true; - } - if (badTerminal) { - error(loc, "inductive-loop termination requires the form \"loop-index++, loop-index--, loop-index += constant-expression, or loop-index -= constant-expression\"", "limitations", ""); - return; - } - - // the body - inductiveLoopBodyCheck(loop->getBody(), loopIndex, symbolTable); -} - -// Do limit checks for built-in arrays. -void TParseContext::arrayLimitCheck(const TSourceLoc& loc, const TString& identifier, int size) -{ - if (identifier.compare("gl_TexCoord") == 0) - limitCheck(loc, size, "gl_MaxTextureCoords", "gl_TexCoord array size"); - else if (identifier.compare("gl_ClipDistance") == 0) - limitCheck(loc, size, "gl_MaxClipDistances", "gl_ClipDistance array size"); - else if (identifier.compare("gl_CullDistance") == 0) - limitCheck(loc, size, "gl_MaxCullDistances", "gl_CullDistance array size"); -} - -// See if the provided value is less than or equal to the symbol indicated by limit, -// which should be a constant in the symbol table. -void TParseContext::limitCheck(const TSourceLoc& loc, int value, const char* limit, const char* feature) -{ - TSymbol* symbol = symbolTable.find(limit); - assert(symbol->getAsVariable()); - const TConstUnionArray& constArray = symbol->getAsVariable()->getConstArray(); - assert(! constArray.empty()); - if (value > constArray[0].getIConst()) - error(loc, "must be less than or equal to", feature, "%s (%d)", limit, constArray[0].getIConst()); -} - -// -// Do any additional error checking, etc., once we know the parsing is done. -// -void TParseContext::finish() -{ - TParseContextBase::finish(); - - if (parsingBuiltins) - return; - - // Check on array indexes for ES 2.0 (version 100) limitations. - for (size_t i = 0; i < needsIndexLimitationChecking.size(); ++i) - constantIndexExpressionCheck(needsIndexLimitationChecking[i]); - - // Check for stages that are enabled by extension. - // Can't do this at the beginning, it is chicken and egg to add a stage by - // extension. - // Stage-specific features were correctly tested for already, this is just - // about the stage itself. - switch (language) { - case EShLangGeometry: - if (profile == EEsProfile && version == 310) - requireExtensions(getCurrentLoc(), Num_AEP_geometry_shader, AEP_geometry_shader, "geometry shaders"); - break; - case EShLangTessControl: - case EShLangTessEvaluation: - if (profile == EEsProfile && version == 310) - requireExtensions(getCurrentLoc(), Num_AEP_tessellation_shader, AEP_tessellation_shader, "tessellation shaders"); - else if (profile != EEsProfile && version < 400) - requireExtensions(getCurrentLoc(), 1, &E_GL_ARB_tessellation_shader, "tessellation shaders"); - break; - case EShLangCompute: - if (profile != EEsProfile && version < 430) - requireExtensions(getCurrentLoc(), 1, &E_GL_ARB_compute_shader, "compute shaders"); - break; - default: - break; - } -} - -// -// Layout qualifier stuff. -// - -// Put the id's layout qualification into the public type, for qualifiers not having a number set. -// This is before we know any type information for error checking. -void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publicType, TString& id) -{ - std::transform(id.begin(), id.end(), id.begin(), ::tolower); - - if (id == TQualifier::getLayoutMatrixString(ElmColumnMajor)) { - publicType.qualifier.layoutMatrix = ElmColumnMajor; - return; - } - if (id == TQualifier::getLayoutMatrixString(ElmRowMajor)) { - publicType.qualifier.layoutMatrix = ElmRowMajor; - return; - } - if (id == TQualifier::getLayoutPackingString(ElpPacked)) { - if (spvVersion.spv != 0) - spvRemoved(loc, "packed"); - publicType.qualifier.layoutPacking = ElpPacked; - return; - } - if (id == TQualifier::getLayoutPackingString(ElpShared)) { - if (spvVersion.spv != 0) - spvRemoved(loc, "shared"); - publicType.qualifier.layoutPacking = ElpShared; - return; - } - if (id == TQualifier::getLayoutPackingString(ElpStd140)) { - publicType.qualifier.layoutPacking = ElpStd140; - return; - } - if (id == TQualifier::getLayoutPackingString(ElpStd430)) { - requireProfile(loc, EEsProfile | ECoreProfile | ECompatibilityProfile, "std430"); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 430, nullptr, "std430"); - profileRequires(loc, EEsProfile, 310, nullptr, "std430"); - publicType.qualifier.layoutPacking = ElpStd430; - return; - } - // TODO: compile-time performance: may need to stop doing linear searches - for (TLayoutFormat format = (TLayoutFormat)(ElfNone + 1); format < ElfCount; format = (TLayoutFormat)(format + 1)) { - if (id == TQualifier::getLayoutFormatString(format)) { - if ((format > ElfEsFloatGuard && format < ElfFloatGuard) || - (format > ElfEsIntGuard && format < ElfIntGuard) || - (format > ElfEsUintGuard && format < ElfCount)) - requireProfile(loc, ENoProfile | ECoreProfile | ECompatibilityProfile, "image load-store format"); - profileRequires(loc, ENoProfile | ECoreProfile | ECompatibilityProfile, 420, E_GL_ARB_shader_image_load_store, "image load store"); - profileRequires(loc, EEsProfile, 310, E_GL_ARB_shader_image_load_store, "image load store"); - publicType.qualifier.layoutFormat = format; - return; - } - } - if (id == "push_constant") { - requireVulkan(loc, "push_constant"); - publicType.qualifier.layoutPushConstant = true; - return; - } - if (language == EShLangGeometry || language == EShLangTessEvaluation) { - if (id == TQualifier::getGeometryString(ElgTriangles)) { - publicType.shaderQualifiers.geometry = ElgTriangles; - return; - } - if (language == EShLangGeometry) { - if (id == TQualifier::getGeometryString(ElgPoints)) { - publicType.shaderQualifiers.geometry = ElgPoints; - return; - } - if (id == TQualifier::getGeometryString(ElgLineStrip)) { - publicType.shaderQualifiers.geometry = ElgLineStrip; - return; - } - if (id == TQualifier::getGeometryString(ElgLines)) { - publicType.shaderQualifiers.geometry = ElgLines; - return; - } - if (id == TQualifier::getGeometryString(ElgLinesAdjacency)) { - publicType.shaderQualifiers.geometry = ElgLinesAdjacency; - return; - } - if (id == TQualifier::getGeometryString(ElgTrianglesAdjacency)) { - publicType.shaderQualifiers.geometry = ElgTrianglesAdjacency; - return; - } - if (id == TQualifier::getGeometryString(ElgTriangleStrip)) { - publicType.shaderQualifiers.geometry = ElgTriangleStrip; - return; - } -#ifdef NV_EXTENSIONS - if (id == "passthrough") { - requireExtensions(loc, 1, &E_SPV_NV_geometry_shader_passthrough, "geometry shader passthrough"); - publicType.qualifier.layoutPassthrough = true; - intermediate.setGeoPassthroughEXT(); - return; - } -#endif - } else { - assert(language == EShLangTessEvaluation); - - // input primitive - if (id == TQualifier::getGeometryString(ElgTriangles)) { - publicType.shaderQualifiers.geometry = ElgTriangles; - return; - } - if (id == TQualifier::getGeometryString(ElgQuads)) { - publicType.shaderQualifiers.geometry = ElgQuads; - return; - } - if (id == TQualifier::getGeometryString(ElgIsolines)) { - publicType.shaderQualifiers.geometry = ElgIsolines; - return; - } - - // vertex spacing - if (id == TQualifier::getVertexSpacingString(EvsEqual)) { - publicType.shaderQualifiers.spacing = EvsEqual; - return; - } - if (id == TQualifier::getVertexSpacingString(EvsFractionalEven)) { - publicType.shaderQualifiers.spacing = EvsFractionalEven; - return; - } - if (id == TQualifier::getVertexSpacingString(EvsFractionalOdd)) { - publicType.shaderQualifiers.spacing = EvsFractionalOdd; - return; - } - - // triangle order - if (id == TQualifier::getVertexOrderString(EvoCw)) { - publicType.shaderQualifiers.order = EvoCw; - return; - } - if (id == TQualifier::getVertexOrderString(EvoCcw)) { - publicType.shaderQualifiers.order = EvoCcw; - return; - } - - // point mode - if (id == "point_mode") { - publicType.shaderQualifiers.pointMode = true; - return; - } - } - } - if (language == EShLangFragment) { - if (id == "origin_upper_left") { - requireProfile(loc, ECoreProfile | ECompatibilityProfile, "origin_upper_left"); - publicType.shaderQualifiers.originUpperLeft = true; - return; - } - if (id == "pixel_center_integer") { - requireProfile(loc, ECoreProfile | ECompatibilityProfile, "pixel_center_integer"); - publicType.shaderQualifiers.pixelCenterInteger = true; - return; - } - if (id == "early_fragment_tests") { - profileRequires(loc, ENoProfile | ECoreProfile | ECompatibilityProfile, 420, E_GL_ARB_shader_image_load_store, "early_fragment_tests"); - profileRequires(loc, EEsProfile, 310, nullptr, "early_fragment_tests"); - publicType.shaderQualifiers.earlyFragmentTests = true; - return; - } - if (id == "post_depth_coverage") { - requireExtensions(loc, Num_post_depth_coverageEXTs, post_depth_coverageEXTs, "post depth coverage"); - if (extensionTurnedOn(E_GL_ARB_post_depth_coverage)) { - publicType.shaderQualifiers.earlyFragmentTests = true; - } - publicType.shaderQualifiers.postDepthCoverage = true; - return; - } - for (TLayoutDepth depth = (TLayoutDepth)(EldNone + 1); depth < EldCount; depth = (TLayoutDepth)(depth+1)) { - if (id == TQualifier::getLayoutDepthString(depth)) { - requireProfile(loc, ECoreProfile | ECompatibilityProfile, "depth layout qualifier"); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 420, nullptr, "depth layout qualifier"); - publicType.shaderQualifiers.layoutDepth = depth; - return; - } - } - if (id.compare(0, 13, "blend_support") == 0) { - bool found = false; - for (TBlendEquationShift be = (TBlendEquationShift)0; be < EBlendCount; be = (TBlendEquationShift)(be + 1)) { - if (id == TQualifier::getBlendEquationString(be)) { - profileRequires(loc, EEsProfile, 320, E_GL_KHR_blend_equation_advanced, "blend equation"); - profileRequires(loc, ~EEsProfile, 0, E_GL_KHR_blend_equation_advanced, "blend equation"); - intermediate.addBlendEquation(be); - publicType.shaderQualifiers.blendEquation = true; - found = true; - break; - } - } - if (! found) - error(loc, "unknown blend equation", "blend_support", ""); - return; - } -#ifdef NV_EXTENSIONS - if (id == "override_coverage") { - requireExtensions(loc, 1, &E_GL_NV_sample_mask_override_coverage, "sample mask override coverage"); - publicType.shaderQualifiers.layoutOverrideCoverage = true; - return; - } - } - if (language == EShLangVertex || - language == EShLangTessControl || - language == EShLangTessEvaluation || - language == EShLangGeometry ) { - if (id == "viewport_relative") { - requireExtensions(loc, 1, &E_GL_NV_viewport_array2, "view port array2"); - publicType.qualifier.layoutViewportRelative = true; - return; - } - } -#else - } -#endif - error(loc, "unrecognized layout identifier, or qualifier requires assignment (e.g., binding = 4)", id.c_str(), ""); -} - -// Put the id's layout qualifier value into the public type, for qualifiers having a number set. -// This is before we know any type information for error checking. -void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publicType, TString& id, const TIntermTyped* node) -{ - const char* feature = "layout-id value"; - const char* nonLiteralFeature = "non-literal layout-id value"; - - integerCheck(node, feature); - const TIntermConstantUnion* constUnion = node->getAsConstantUnion(); - int value; - if (constUnion) { - value = constUnion->getConstArray()[0].getIConst(); - if (! constUnion->isLiteral()) { - requireProfile(loc, ECoreProfile | ECompatibilityProfile, nonLiteralFeature); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 440, E_GL_ARB_enhanced_layouts, nonLiteralFeature); - } - } else { - // grammar should have give out the error message - value = 0; - } - - if (value < 0) { - error(loc, "cannot be negative", feature, ""); - return; - } - - std::transform(id.begin(), id.end(), id.begin(), ::tolower); - - if (id == "offset") { - // "offset" can be for either - // - uniform offsets - // - atomic_uint offsets - const char* feature = "offset"; - if (spvVersion.spv == 0) { - requireProfile(loc, EEsProfile | ECoreProfile | ECompatibilityProfile, feature); - const char* exts[2] = { E_GL_ARB_enhanced_layouts, E_GL_ARB_shader_atomic_counters }; - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 420, 2, exts, feature); - profileRequires(loc, EEsProfile, 310, nullptr, feature); - } - publicType.qualifier.layoutOffset = value; - return; - } else if (id == "align") { - const char* feature = "uniform buffer-member align"; - if (spvVersion.spv == 0) { - requireProfile(loc, ECoreProfile | ECompatibilityProfile, feature); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 440, E_GL_ARB_enhanced_layouts, feature); - } - // "The specified alignment must be a power of 2, or a compile-time error results." - if (! IsPow2(value)) - error(loc, "must be a power of 2", "align", ""); - else - publicType.qualifier.layoutAlign = value; - return; - } else if (id == "location") { - profileRequires(loc, EEsProfile, 300, nullptr, "location"); - const char* exts[2] = { E_GL_ARB_separate_shader_objects, E_GL_ARB_explicit_attrib_location }; - profileRequires(loc, ~EEsProfile, 330, 2, exts, "location"); - if ((unsigned int)value >= TQualifier::layoutLocationEnd) - error(loc, "location is too large", id.c_str(), ""); - else - publicType.qualifier.layoutLocation = value; - return; - } else if (id == "set") { - if ((unsigned int)value >= TQualifier::layoutSetEnd) - error(loc, "set is too large", id.c_str(), ""); - else - publicType.qualifier.layoutSet = value; - if (value != 0) - requireVulkan(loc, "descriptor set"); - return; - } else if (id == "binding") { - profileRequires(loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, "binding"); - profileRequires(loc, EEsProfile, 310, nullptr, "binding"); - if ((unsigned int)value >= TQualifier::layoutBindingEnd) - error(loc, "binding is too large", id.c_str(), ""); - else - publicType.qualifier.layoutBinding = value; - return; - } else if (id == "component") { - requireProfile(loc, ECoreProfile | ECompatibilityProfile, "component"); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 440, E_GL_ARB_enhanced_layouts, "component"); - if ((unsigned)value >= TQualifier::layoutComponentEnd) - error(loc, "component is too large", id.c_str(), ""); - else - publicType.qualifier.layoutComponent = value; - return; - } else if (id.compare(0, 4, "xfb_") == 0) { - // "Any shader making any static use (after preprocessing) of any of these - // *xfb_* qualifiers will cause the shader to be in a transform feedback - // capturing mode and hence responsible for describing the transform feedback - // setup." - intermediate.setXfbMode(); - const char* feature = "transform feedback qualifier"; - requireStage(loc, (EShLanguageMask)(EShLangVertexMask | EShLangGeometryMask | EShLangTessControlMask | EShLangTessEvaluationMask), feature); - requireProfile(loc, ECoreProfile | ECompatibilityProfile, feature); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 440, E_GL_ARB_enhanced_layouts, feature); - if (id == "xfb_buffer") { - // "It is a compile-time error to specify an *xfb_buffer* that is greater than - // the implementation-dependent constant gl_MaxTransformFeedbackBuffers." - if (value >= resources.maxTransformFeedbackBuffers) - error(loc, "buffer is too large:", id.c_str(), "gl_MaxTransformFeedbackBuffers is %d", resources.maxTransformFeedbackBuffers); - if (value >= (int)TQualifier::layoutXfbBufferEnd) - error(loc, "buffer is too large:", id.c_str(), "internal max is %d", TQualifier::layoutXfbBufferEnd-1); - else - publicType.qualifier.layoutXfbBuffer = value; - return; - } else if (id == "xfb_offset") { - if (value >= (int)TQualifier::layoutXfbOffsetEnd) - error(loc, "offset is too large:", id.c_str(), "internal max is %d", TQualifier::layoutXfbOffsetEnd-1); - else - publicType.qualifier.layoutXfbOffset = value; - return; - } else if (id == "xfb_stride") { - // "The resulting stride (implicit or explicit), when divided by 4, must be less than or equal to the - // implementation-dependent constant gl_MaxTransformFeedbackInterleavedComponents." - if (value > 4 * resources.maxTransformFeedbackInterleavedComponents) - error(loc, "1/4 stride is too large:", id.c_str(), "gl_MaxTransformFeedbackInterleavedComponents is %d", resources.maxTransformFeedbackInterleavedComponents); - else if (value >= (int)TQualifier::layoutXfbStrideEnd) - error(loc, "stride is too large:", id.c_str(), "internal max is %d", TQualifier::layoutXfbStrideEnd-1); - if (value < (int)TQualifier::layoutXfbStrideEnd) - publicType.qualifier.layoutXfbStride = value; - return; - } - } - - if (id == "input_attachment_index") { - requireVulkan(loc, "input_attachment_index"); - if (value >= (int)TQualifier::layoutAttachmentEnd) - error(loc, "attachment index is too large", id.c_str(), ""); - else - publicType.qualifier.layoutAttachment = value; - return; - } - if (id == "constant_id") { - requireSpv(loc, "constant_id"); - if (value >= (int)TQualifier::layoutSpecConstantIdEnd) { - error(loc, "specialization-constant id is too large", id.c_str(), ""); - } else { - publicType.qualifier.layoutSpecConstantId = value; - publicType.qualifier.specConstant = true; - if (! intermediate.addUsedConstantId(value)) - error(loc, "specialization-constant id already used", id.c_str(), ""); - } - return; - } - if (id == "num_views") { - requireExtensions(loc, Num_OVR_multiview_EXTs, OVR_multiview_EXTs, "num_views"); - publicType.shaderQualifiers.numViews = value; - return; - } - -#if NV_EXTENSIONS - if (language == EShLangVertex || - language == EShLangTessControl || - language == EShLangTessEvaluation || - language == EShLangGeometry) { - if (id == "secondary_view_offset") { - requireExtensions(loc, 1, &E_GL_NV_stereo_view_rendering, "stereo view rendering"); - publicType.qualifier.layoutSecondaryViewportRelativeOffset = value; - return; - } - } -#endif - - switch (language) { - case EShLangVertex: - break; - - case EShLangTessControl: - if (id == "vertices") { - if (value == 0) - error(loc, "must be greater than 0", "vertices", ""); - else - publicType.shaderQualifiers.vertices = value; - return; - } - break; - - case EShLangTessEvaluation: - break; - - case EShLangGeometry: - if (id == "invocations") { - profileRequires(loc, ECompatibilityProfile | ECoreProfile, 400, nullptr, "invocations"); - if (value == 0) - error(loc, "must be at least 1", "invocations", ""); - else - publicType.shaderQualifiers.invocations = value; - return; - } - if (id == "max_vertices") { - publicType.shaderQualifiers.vertices = value; - if (value > resources.maxGeometryOutputVertices) - error(loc, "too large, must be less than gl_MaxGeometryOutputVertices", "max_vertices", ""); - return; - } - if (id == "stream") { - requireProfile(loc, ~EEsProfile, "selecting output stream"); - publicType.qualifier.layoutStream = value; - if (value > 0) - intermediate.setMultiStream(); - return; - } - break; - - case EShLangFragment: - if (id == "index") { - requireProfile(loc, ECompatibilityProfile | ECoreProfile, "index layout qualifier on fragment output"); - const char* exts[2] = { E_GL_ARB_separate_shader_objects, E_GL_ARB_explicit_attrib_location }; - profileRequires(loc, ECompatibilityProfile | ECoreProfile, 330, 2, exts, "index layout qualifier on fragment output"); - - // "It is also a compile-time error if a fragment shader sets a layout index to less than 0 or greater than 1." - if (value < 0 || value > 1) { - value = 0; - error(loc, "value must be 0 or 1", "index", ""); - } - - publicType.qualifier.layoutIndex = value; - return; - } - break; - - case EShLangCompute: - if (id.compare(0, 11, "local_size_") == 0) { - profileRequires(loc, EEsProfile, 310, 0, "gl_WorkGroupSize"); - profileRequires(loc, ~EEsProfile, 430, E_GL_ARB_compute_shader, "gl_WorkGroupSize"); - if (id.size() == 12 && value == 0) { - error(loc, "must be at least 1", id.c_str(), ""); - return; - } - if (id == "local_size_x") { - publicType.shaderQualifiers.localSize[0] = value; - return; - } - if (id == "local_size_y") { - publicType.shaderQualifiers.localSize[1] = value; - return; - } - if (id == "local_size_z") { - publicType.shaderQualifiers.localSize[2] = value; - return; - } - if (spvVersion.spv != 0) { - if (id == "local_size_x_id") { - publicType.shaderQualifiers.localSizeSpecId[0] = value; - return; - } - if (id == "local_size_y_id") { - publicType.shaderQualifiers.localSizeSpecId[1] = value; - return; - } - if (id == "local_size_z_id") { - publicType.shaderQualifiers.localSizeSpecId[2] = value; - return; - } - } - } - break; - - default: - break; - } - - error(loc, "there is no such layout identifier for this stage taking an assigned value", id.c_str(), ""); -} - -// Merge any layout qualifier information from src into dst, leaving everything else in dst alone -// -// "More than one layout qualifier may appear in a single declaration. -// Additionally, the same layout-qualifier-name can occur multiple times -// within a layout qualifier or across multiple layout qualifiers in the -// same declaration. When the same layout-qualifier-name occurs -// multiple times, in a single declaration, the last occurrence overrides -// the former occurrence(s). Further, if such a layout-qualifier-name -// will effect subsequent declarations or other observable behavior, it -// is only the last occurrence that will have any effect, behaving as if -// the earlier occurrence(s) within the declaration are not present. -// This is also true for overriding layout-qualifier-names, where one -// overrides the other (e.g., row_major vs. column_major); only the last -// occurrence has any effect." -void TParseContext::mergeObjectLayoutQualifiers(TQualifier& dst, const TQualifier& src, bool inheritOnly) -{ - if (src.hasMatrix()) - dst.layoutMatrix = src.layoutMatrix; - if (src.hasPacking()) - dst.layoutPacking = src.layoutPacking; - - if (src.hasStream()) - dst.layoutStream = src.layoutStream; - - if (src.hasFormat()) - dst.layoutFormat = src.layoutFormat; - - if (src.hasXfbBuffer()) - dst.layoutXfbBuffer = src.layoutXfbBuffer; - - if (src.hasAlign()) - dst.layoutAlign = src.layoutAlign; - - if (! inheritOnly) { - if (src.hasLocation()) - dst.layoutLocation = src.layoutLocation; - if (src.hasComponent()) - dst.layoutComponent = src.layoutComponent; - if (src.hasIndex()) - dst.layoutIndex = src.layoutIndex; - - if (src.hasOffset()) - dst.layoutOffset = src.layoutOffset; - - if (src.hasSet()) - dst.layoutSet = src.layoutSet; - if (src.layoutBinding != TQualifier::layoutBindingEnd) - dst.layoutBinding = src.layoutBinding; - - if (src.hasXfbStride()) - dst.layoutXfbStride = src.layoutXfbStride; - if (src.hasXfbOffset()) - dst.layoutXfbOffset = src.layoutXfbOffset; - if (src.hasAttachment()) - dst.layoutAttachment = src.layoutAttachment; - if (src.hasSpecConstantId()) - dst.layoutSpecConstantId = src.layoutSpecConstantId; - - if (src.layoutPushConstant) - dst.layoutPushConstant = true; - -#ifdef NV_EXTENSIONS - if (src.layoutPassthrough) - dst.layoutPassthrough = true; - if (src.layoutViewportRelative) - dst.layoutViewportRelative = true; - if (src.layoutSecondaryViewportRelativeOffset != -2048) - dst.layoutSecondaryViewportRelativeOffset = src.layoutSecondaryViewportRelativeOffset; -#endif - } -} - -// Do error layout error checking given a full variable/block declaration. -void TParseContext::layoutObjectCheck(const TSourceLoc& loc, const TSymbol& symbol) -{ - const TType& type = symbol.getType(); - const TQualifier& qualifier = type.getQualifier(); - - // first, cross check WRT to just the type - layoutTypeCheck(loc, type); - - // now, any remaining error checking based on the object itself - - if (qualifier.hasAnyLocation()) { - switch (qualifier.storage) { - case EvqUniform: - case EvqBuffer: - if (symbol.getAsVariable() == nullptr) - error(loc, "can only be used on variable declaration", "location", ""); - break; - default: - break; - } - } - - // user-variable location check, which are required for SPIR-V in/out: - // - variables have it directly, - // - blocks have it on each member (already enforced), so check first one - if (spvVersion.spv > 0 && !parsingBuiltins && qualifier.builtIn == EbvNone && - !qualifier.hasLocation() && !intermediate.getAutoMapLocations()) { - - switch (qualifier.storage) { - case EvqVaryingIn: - case EvqVaryingOut: - if (type.getBasicType() != EbtBlock || - (!(*type.getStruct())[0].type->getQualifier().hasLocation() && - (*type.getStruct())[0].type->getQualifier().builtIn == EbvNone)) - error(loc, "SPIR-V requires location for user input/output", "location", ""); - break; - default: - break; - } - } - - // Check packing and matrix - if (qualifier.hasUniformLayout()) { - switch (qualifier.storage) { - case EvqUniform: - case EvqBuffer: - if (type.getBasicType() != EbtBlock) { - if (qualifier.hasMatrix()) - error(loc, "cannot specify matrix layout on a variable declaration", "layout", ""); - if (qualifier.hasPacking()) - error(loc, "cannot specify packing on a variable declaration", "layout", ""); - // "The offset qualifier can only be used on block members of blocks..." - if (qualifier.hasOffset() && type.getBasicType() != EbtAtomicUint) - error(loc, "cannot specify on a variable declaration", "offset", ""); - // "The align qualifier can only be used on blocks or block members..." - if (qualifier.hasAlign()) - error(loc, "cannot specify on a variable declaration", "align", ""); - if (qualifier.layoutPushConstant) - error(loc, "can only specify on a uniform block", "push_constant", ""); - } - break; - default: - // these were already filtered by layoutTypeCheck() (or its callees) - break; - } - } -} - -// "For some blocks declared as arrays, the location can only be applied at the block level: -// When a block is declared as an array where additional locations are needed for each member -// for each block array element, it is a compile-time error to specify locations on the block -// members. That is, when locations would be under specified by applying them on block members, -// they are not allowed on block members. For arrayed interfaces (those generally having an -// extra level of arrayness due to interface expansion), the outer array is stripped before -// applying this rule." -void TParseContext::layoutMemberLocationArrayCheck(const TSourceLoc& loc, bool memberWithLocation, TArraySizes* arraySizes) -{ - if (memberWithLocation && arraySizes != nullptr) { - if (arraySizes->getNumDims() > (currentBlockQualifier.isArrayedIo(language) ? 1 : 0)) - error(loc, "cannot use in a block array where new locations are needed for each block element", - "location", ""); - } -} - -// Do layout error checking with respect to a type. -void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type) -{ - const TQualifier& qualifier = type.getQualifier(); - - // first, intra-layout qualifier-only error checking - layoutQualifierCheck(loc, qualifier); - - // now, error checking combining type and qualifier - - if (qualifier.hasAnyLocation()) { - if (qualifier.hasLocation()) { - if (qualifier.storage == EvqVaryingOut && language == EShLangFragment) { - if (qualifier.layoutLocation >= (unsigned int)resources.maxDrawBuffers) - error(loc, "too large for fragment output", "location", ""); - } - } - if (qualifier.hasComponent()) { - // "It is a compile-time error if this sequence of components gets larger than 3." - if (qualifier.layoutComponent + type.getVectorSize() * (type.getBasicType() == EbtDouble ? 2 : 1) > 4) - error(loc, "type overflows the available 4 components", "component", ""); - - // "It is a compile-time error to apply the component qualifier to a matrix, a structure, a block, or an array containing any of these." - if (type.isMatrix() || type.getBasicType() == EbtBlock || type.getBasicType() == EbtStruct) - error(loc, "cannot apply to a matrix, structure, or block", "component", ""); - - // " It is a compile-time error to use component 1 or 3 as the beginning of a double or dvec2." - if (type.getBasicType() == EbtDouble) - if (qualifier.layoutComponent & 1) - error(loc, "doubles cannot start on an odd-numbered component", "component", ""); - } - - switch (qualifier.storage) { - case EvqVaryingIn: - case EvqVaryingOut: - if (type.getBasicType() == EbtBlock) - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 440, E_GL_ARB_enhanced_layouts, "location qualifier on in/out block"); - break; - case EvqUniform: - case EvqBuffer: - break; - default: - error(loc, "can only apply to uniform, buffer, in, or out storage qualifiers", "location", ""); - break; - } - - bool typeCollision; - int repeated = intermediate.addUsedLocation(qualifier, type, typeCollision); - if (repeated >= 0 && ! typeCollision) - error(loc, "overlapping use of location", "location", "%d", repeated); - // "fragment-shader outputs ... if two variables are placed within the same - // location, they must have the same underlying type (floating-point or integer)" - if (typeCollision && language == EShLangFragment && qualifier.isPipeOutput()) - error(loc, "fragment outputs sharing the same location must be the same basic type", "location", "%d", repeated); - } - - if (qualifier.hasXfbOffset() && qualifier.hasXfbBuffer()) { - int repeated = intermediate.addXfbBufferOffset(type); - if (repeated >= 0) - error(loc, "overlapping offsets at", "xfb_offset", "offset %d in buffer %d", repeated, qualifier.layoutXfbBuffer); - - // "The offset must be a multiple of the size of the first component of the first - // qualified variable or block member, or a compile-time error results. Further, if applied to an aggregate - // containing a double, the offset must also be a multiple of 8..." - if (type.containsBasicType(EbtDouble) && ! IsMultipleOfPow2(qualifier.layoutXfbOffset, 8)) - error(loc, "type contains double; xfb_offset must be a multiple of 8", "xfb_offset", ""); -#ifdef AMD_EXTENSIONS - // ..., if applied to an aggregate containing a float16_t, the offset must also be a multiple of 2..." - else if (type.containsBasicType(EbtFloat16) && !IsMultipleOfPow2(qualifier.layoutXfbOffset, 2)) - error(loc, "type contains half float; xfb_offset must be a multiple of 2", "xfb_offset", ""); -#endif - else if (! IsMultipleOfPow2(qualifier.layoutXfbOffset, 4)) - error(loc, "must be a multiple of size of first component", "xfb_offset", ""); - } - - if (qualifier.hasXfbStride() && qualifier.hasXfbBuffer()) { - if (! intermediate.setXfbBufferStride(qualifier.layoutXfbBuffer, qualifier.layoutXfbStride)) - error(loc, "all stride settings must match for xfb buffer", "xfb_stride", "%d", qualifier.layoutXfbBuffer); - } - - if (qualifier.hasBinding()) { - // Binding checking, from the spec: - // - // "If the binding point for any uniform or shader storage block instance is less than zero, or greater than or - // equal to the implementation-dependent maximum number of uniform buffer bindings, a compile-time - // error will occur. When the binding identifier is used with a uniform or shader storage block instanced as - // an array of size N, all elements of the array from binding through binding + N - 1 must be within this - // range." - // - if (! type.isOpaque() && type.getBasicType() != EbtBlock) - error(loc, "requires block, or sampler/image, or atomic-counter type", "binding", ""); - if (type.getBasicType() == EbtSampler) { - int lastBinding = qualifier.layoutBinding; - if (type.isArray()) { - if (type.isImplicitlySizedArray()) { - lastBinding += 1; - warn(loc, "assuming array size of one for compile-time checking of binding numbers for implicitly-sized array", "[]", ""); - } else - lastBinding += type.getCumulativeArraySize(); - } - if (lastBinding >= resources.maxCombinedTextureImageUnits) - error(loc, "sampler binding not less than gl_MaxCombinedTextureImageUnits", "binding", type.isArray() ? "(using array)" : ""); - } - if (type.getBasicType() == EbtAtomicUint) { - if (qualifier.layoutBinding >= (unsigned int)resources.maxAtomicCounterBindings) { - error(loc, "atomic_uint binding is too large; see gl_MaxAtomicCounterBindings", "binding", ""); - return; - } - } - } - - // atomic_uint - if (type.getBasicType() == EbtAtomicUint) { - if (! type.getQualifier().hasBinding()) - error(loc, "layout(binding=X) is required", "atomic_uint", ""); - } - - // "The offset qualifier can only be used on block members of blocks..." - if (qualifier.hasOffset()) { - if (type.getBasicType() == EbtBlock) - error(loc, "only applies to block members, not blocks", "offset", ""); - } - - // Image format - if (qualifier.hasFormat()) { - if (! type.isImage()) - error(loc, "only apply to images", TQualifier::getLayoutFormatString(qualifier.layoutFormat), ""); - else { - if (type.getSampler().type == EbtFloat && qualifier.layoutFormat > ElfFloatGuard) - error(loc, "does not apply to floating point images", TQualifier::getLayoutFormatString(qualifier.layoutFormat), ""); - if (type.getSampler().type == EbtInt && (qualifier.layoutFormat < ElfFloatGuard || qualifier.layoutFormat > ElfIntGuard)) - error(loc, "does not apply to signed integer images", TQualifier::getLayoutFormatString(qualifier.layoutFormat), ""); - if (type.getSampler().type == EbtUint && qualifier.layoutFormat < ElfIntGuard) - error(loc, "does not apply to unsigned integer images", TQualifier::getLayoutFormatString(qualifier.layoutFormat), ""); - - if (profile == EEsProfile) { - // "Except for image variables qualified with the format qualifiers r32f, r32i, and r32ui, image variables must - // specify either memory qualifier readonly or the memory qualifier writeonly." - if (! (qualifier.layoutFormat == ElfR32f || qualifier.layoutFormat == ElfR32i || qualifier.layoutFormat == ElfR32ui)) { - if (! qualifier.readonly && ! qualifier.writeonly) - error(loc, "format requires readonly or writeonly memory qualifier", TQualifier::getLayoutFormatString(qualifier.layoutFormat), ""); - } - } - } - } else if (type.isImage() && ! qualifier.writeonly) { - const char *explanation = "image variables not declared 'writeonly' and without a format layout qualifier"; - requireProfile(loc, ECoreProfile | ECompatibilityProfile, explanation); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 0, E_GL_EXT_shader_image_load_formatted, explanation); - } - - if (qualifier.layoutPushConstant && type.getBasicType() != EbtBlock) - error(loc, "can only be used with a block", "push_constant", ""); - - // input attachment - if (type.isSubpass()) { - if (! qualifier.hasAttachment()) - error(loc, "requires an input_attachment_index layout qualifier", "subpass", ""); - } else { - if (qualifier.hasAttachment()) - error(loc, "can only be used with a subpass", "input_attachment_index", ""); - } - - // specialization-constant id - if (qualifier.hasSpecConstantId()) { - if (type.getQualifier().storage != EvqConst) - error(loc, "can only be applied to 'const'-qualified scalar", "constant_id", ""); - if (! type.isScalar()) - error(loc, "can only be applied to a scalar", "constant_id", ""); - switch (type.getBasicType()) - { - case EbtInt: - case EbtUint: - case EbtInt64: - case EbtUint64: -#ifdef AMD_EXTENSIONS - case EbtInt16: - case EbtUint16: -#endif - case EbtBool: - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - break; - default: - error(loc, "cannot be applied to this type", "constant_id", ""); - break; - } - } -} - -// Do layout error checking that can be done within a layout qualifier proper, not needing to know -// if there are blocks, atomic counters, variables, etc. -void TParseContext::layoutQualifierCheck(const TSourceLoc& loc, const TQualifier& qualifier) -{ - if (qualifier.storage == EvqShared && qualifier.hasLayout()) - error(loc, "cannot apply layout qualifiers to a shared variable", "shared", ""); - - // "It is a compile-time error to use *component* without also specifying the location qualifier (order does not matter)." - if (qualifier.hasComponent() && ! qualifier.hasLocation()) - error(loc, "must specify 'location' to use 'component'", "component", ""); - - if (qualifier.hasAnyLocation()) { - - // "As with input layout qualifiers, all shaders except compute shaders - // allow *location* layout qualifiers on output variable declarations, - // output block declarations, and output block member declarations." - - switch (qualifier.storage) { - case EvqVaryingIn: - { - const char* feature = "location qualifier on input"; - if (profile == EEsProfile && version < 310) - requireStage(loc, EShLangVertex, feature); - else - requireStage(loc, (EShLanguageMask)~EShLangComputeMask, feature); - if (language == EShLangVertex) { - const char* exts[2] = { E_GL_ARB_separate_shader_objects, E_GL_ARB_explicit_attrib_location }; - profileRequires(loc, ~EEsProfile, 330, 2, exts, feature); - profileRequires(loc, EEsProfile, 300, nullptr, feature); - } else { - profileRequires(loc, ~EEsProfile, 410, E_GL_ARB_separate_shader_objects, feature); - profileRequires(loc, EEsProfile, 310, nullptr, feature); - } - break; - } - case EvqVaryingOut: - { - const char* feature = "location qualifier on output"; - if (profile == EEsProfile && version < 310) - requireStage(loc, EShLangFragment, feature); - else - requireStage(loc, (EShLanguageMask)~EShLangComputeMask, feature); - if (language == EShLangFragment) { - const char* exts[2] = { E_GL_ARB_separate_shader_objects, E_GL_ARB_explicit_attrib_location }; - profileRequires(loc, ~EEsProfile, 330, 2, exts, feature); - profileRequires(loc, EEsProfile, 300, nullptr, feature); - } else { - profileRequires(loc, ~EEsProfile, 410, E_GL_ARB_separate_shader_objects, feature); - profileRequires(loc, EEsProfile, 310, nullptr, feature); - } - break; - } - case EvqUniform: - case EvqBuffer: - { - const char* feature = "location qualifier on uniform or buffer"; - requireProfile(loc, EEsProfile | ECoreProfile | ECompatibilityProfile, feature); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 430, nullptr, feature); - profileRequires(loc, EEsProfile, 310, nullptr, feature); - break; - } - default: - break; - } - if (qualifier.hasIndex()) { - if (qualifier.storage != EvqVaryingOut) - error(loc, "can only be used on an output", "index", ""); - if (! qualifier.hasLocation()) - error(loc, "can only be used with an explicit location", "index", ""); - } - } - - if (qualifier.hasBinding()) { - if (! qualifier.isUniformOrBuffer()) - error(loc, "requires uniform or buffer storage qualifier", "binding", ""); - } - if (qualifier.hasStream()) { - if (qualifier.storage != EvqVaryingOut) - error(loc, "can only be used on an output", "stream", ""); - } - if (qualifier.hasXfb()) { - if (qualifier.storage != EvqVaryingOut) - error(loc, "can only be used on an output", "xfb layout qualifier", ""); - } - if (qualifier.hasUniformLayout()) { - if (! qualifier.isUniformOrBuffer()) { - if (qualifier.hasMatrix() || qualifier.hasPacking()) - error(loc, "matrix or packing qualifiers can only be used on a uniform or buffer", "layout", ""); - if (qualifier.hasOffset() || qualifier.hasAlign()) - error(loc, "offset/align can only be used on a uniform or buffer", "layout", ""); - } - } - if (qualifier.layoutPushConstant) { - if (qualifier.storage != EvqUniform) - error(loc, "can only be used with a uniform", "push_constant", ""); - if (qualifier.hasSet()) - error(loc, "cannot be used with push_constant", "set", ""); - } -} - -// For places that can't have shader-level layout qualifiers -void TParseContext::checkNoShaderLayouts(const TSourceLoc& loc, const TShaderQualifiers& shaderQualifiers) -{ - const char* message = "can only apply to a standalone qualifier"; - - if (shaderQualifiers.geometry != ElgNone) - error(loc, message, TQualifier::getGeometryString(shaderQualifiers.geometry), ""); - if (shaderQualifiers.spacing != EvsNone) - error(loc, message, TQualifier::getVertexSpacingString(shaderQualifiers.spacing), ""); - if (shaderQualifiers.order != EvoNone) - error(loc, message, TQualifier::getVertexOrderString(shaderQualifiers.order), ""); - if (shaderQualifiers.pointMode) - error(loc, message, "point_mode", ""); - if (shaderQualifiers.invocations != TQualifier::layoutNotSet) - error(loc, message, "invocations", ""); - if (shaderQualifiers.earlyFragmentTests) - error(loc, message, "early_fragment_tests", ""); - if (shaderQualifiers.postDepthCoverage) - error(loc, message, "post_depth_coverage", ""); - for (int i = 0; i < 3; ++i) { - if (shaderQualifiers.localSize[i] > 1) - error(loc, message, "local_size", ""); - if (shaderQualifiers.localSizeSpecId[i] != TQualifier::layoutNotSet) - error(loc, message, "local_size id", ""); - } - if (shaderQualifiers.vertices != TQualifier::layoutNotSet) { - if (language == EShLangGeometry) - error(loc, message, "max_vertices", ""); - else if (language == EShLangTessControl) - error(loc, message, "vertices", ""); - else - assert(0); - } - if (shaderQualifiers.blendEquation) - error(loc, message, "blend equation", ""); - if (shaderQualifiers.numViews != TQualifier::layoutNotSet) - error(loc, message, "num_views", ""); -} - -// Correct and/or advance an object's offset layout qualifier. -void TParseContext::fixOffset(const TSourceLoc& loc, TSymbol& symbol) -{ - const TQualifier& qualifier = symbol.getType().getQualifier(); - if (symbol.getType().getBasicType() == EbtAtomicUint) { - if (qualifier.hasBinding() && (int)qualifier.layoutBinding < resources.maxAtomicCounterBindings) { - - // Set the offset - int offset; - if (qualifier.hasOffset()) - offset = qualifier.layoutOffset; - else - offset = atomicUintOffsets[qualifier.layoutBinding]; - symbol.getWritableType().getQualifier().layoutOffset = offset; - - // Check for overlap - int numOffsets = 4; - if (symbol.getType().isArray()) { - if (symbol.getType().isExplicitlySizedArray()) - numOffsets *= symbol.getType().getCumulativeArraySize(); - else { - // "It is a compile-time error to declare an unsized array of atomic_uint." - error(loc, "array must be explicitly sized", "atomic_uint", ""); - } - } - int repeated = intermediate.addUsedOffsets(qualifier.layoutBinding, offset, numOffsets); - if (repeated >= 0) - error(loc, "atomic counters sharing the same offset:", "offset", "%d", repeated); - - // Bump the default offset - atomicUintOffsets[qualifier.layoutBinding] = offset + numOffsets; - } - } -} - -// -// Look up a function name in the symbol table, and make sure it is a function. -// -// Return the function symbol if found, otherwise nullptr. -// -const TFunction* TParseContext::findFunction(const TSourceLoc& loc, const TFunction& call, bool& builtIn) -{ - const TFunction* function = nullptr; - - if (symbolTable.isFunctionNameVariable(call.getName())) { - error(loc, "can't use function syntax on variable", call.getName().c_str(), ""); - return nullptr; - } - - if (profile == EEsProfile || version < 120) - function = findFunctionExact(loc, call, builtIn); - else if (version < 400) - function = findFunction120(loc, call, builtIn); - else - function = findFunction400(loc, call, builtIn); - - return function; -} - -// Function finding algorithm for ES and desktop 110. -const TFunction* TParseContext::findFunctionExact(const TSourceLoc& loc, const TFunction& call, bool& builtIn) -{ - TSymbol* symbol = symbolTable.find(call.getMangledName(), &builtIn); - if (symbol == nullptr) { - error(loc, "no matching overloaded function found", call.getName().c_str(), ""); - - return nullptr; - } - - return symbol->getAsFunction(); -} - -// Function finding algorithm for desktop versions 120 through 330. -const TFunction* TParseContext::findFunction120(const TSourceLoc& loc, const TFunction& call, bool& builtIn) -{ - // first, look for an exact match - TSymbol* symbol = symbolTable.find(call.getMangledName(), &builtIn); - if (symbol) - return symbol->getAsFunction(); - - // exact match not found, look through a list of overloaded functions of the same name - - // "If no exact match is found, then [implicit conversions] will be applied to find a match. Mismatched types - // on input parameters (in or inout or default) must have a conversion from the calling argument type to the - // formal parameter type. Mismatched types on output parameters (out or inout) must have a conversion - // from the formal parameter type to the calling argument type. When argument conversions are used to find - // a match, it is a semantic error if there are multiple ways to apply these conversions to make the call match - // more than one function." - - const TFunction* candidate = nullptr; - TVector candidateList; - symbolTable.findFunctionNameList(call.getMangledName(), candidateList, builtIn); - - for (auto it = candidateList.begin(); it != candidateList.end(); ++it) { - const TFunction& function = *(*it); - - // to even be a potential match, number of arguments has to match - if (call.getParamCount() != function.getParamCount()) - continue; - - bool possibleMatch = true; - for (int i = 0; i < function.getParamCount(); ++i) { - // same types is easy - if (*function[i].type == *call[i].type) - continue; - - // We have a mismatch in type, see if it is implicitly convertible - - if (function[i].type->isArray() || call[i].type->isArray() || - ! function[i].type->sameElementShape(*call[i].type)) - possibleMatch = false; - else { - // do direction-specific checks for conversion of basic type - if (function[i].type->getQualifier().isParamInput()) { - if (! intermediate.canImplicitlyPromote(call[i].type->getBasicType(), function[i].type->getBasicType())) - possibleMatch = false; - } - if (function[i].type->getQualifier().isParamOutput()) { - if (! intermediate.canImplicitlyPromote(function[i].type->getBasicType(), call[i].type->getBasicType())) - possibleMatch = false; - } - } - if (! possibleMatch) - break; - } - if (possibleMatch) { - if (candidate) { - // our second match, meaning ambiguity - error(loc, "ambiguous function signature match: multiple signatures match under implicit type conversion", call.getName().c_str(), ""); - } else - candidate = &function; - } - } - - if (candidate == nullptr) - error(loc, "no matching overloaded function found", call.getName().c_str(), ""); - - return candidate; -} - -// Function finding algorithm for desktop version 400 and above. -// -// "When function calls are resolved, an exact type match for all the arguments -// is sought. If an exact match is found, all other functions are ignored, and -// the exact match is used. If no exact match is found, then the implicit -// conversions in section 4.1.10 Implicit Conversions will be applied to find -// a match. Mismatched types on input parameters (in or inout or default) must -// have a conversion from the calling argument type to the formal parameter type. -// Mismatched types on output parameters (out or inout) must have a conversion -// from the formal parameter type to the calling argument type. -// -// "If implicit conversions can be used to find more than one matching function, -// a single best-matching function is sought. To determine a best match, the -// conversions between calling argument and formal parameter types are compared -// for each function argument and pair of matching functions. After these -// comparisons are performed, each pair of matching functions are compared. -// A function declaration A is considered a better match than function -// declaration B if -// -// * for at least one function argument, the conversion for that argument in A -// is better than the corresponding conversion in B; and -// * there is no function argument for which the conversion in B is better than -// the corresponding conversion in A. -// -// "If a single function declaration is considered a better match than every -// other matching function declaration, it will be used. Otherwise, a -// compile-time semantic error for an ambiguous overloaded function call occurs. -// -// "To determine whether the conversion for a single argument in one match is -// better than that for another match, the following rules are applied, in order: -// -// 1. An exact match is better than a match involving any implicit conversion. -// 2. A match involving an implicit conversion from float to double is better -// than a match involving any other implicit conversion. -// 3. A match involving an implicit conversion from either int or uint to float -// is better than a match involving an implicit conversion from either int -// or uint to double. -// -// "If none of the rules above apply to a particular pair of conversions, neither -// conversion is considered better than the other." -// -const TFunction* TParseContext::findFunction400(const TSourceLoc& loc, const TFunction& call, bool& builtIn) -{ - // first, look for an exact match - TSymbol* symbol = symbolTable.find(call.getMangledName(), &builtIn); - if (symbol) - return symbol->getAsFunction(); - - // no exact match, use the generic selector, parameterized by the GLSL rules - - // create list of candidates to send - TVector candidateList; - symbolTable.findFunctionNameList(call.getMangledName(), candidateList, builtIn); - - // can 'from' convert to 'to'? - const auto convertible = [this](const TType& from, const TType& to, TOperator, int) -> bool { - if (from == to) - return true; - if (from.isArray() || to.isArray() || ! from.sameElementShape(to)) - return false; - return intermediate.canImplicitlyPromote(from.getBasicType(), to.getBasicType()); - }; - - // Is 'to2' a better conversion than 'to1'? - // Ties should not be considered as better. - // Assumes 'convertible' already said true. - const auto better = [](const TType& from, const TType& to1, const TType& to2) -> bool { - // 1. exact match - if (from == to2) - return from != to1; - if (from == to1) - return false; - - // 2. float -> double is better - if (from.getBasicType() == EbtFloat) { - if (to2.getBasicType() == EbtDouble && to1.getBasicType() != EbtDouble) - return true; - } - - // 3. -> float is better than -> double - return to2.getBasicType() == EbtFloat && to1.getBasicType() == EbtDouble; - }; - - // for ambiguity reporting - bool tie = false; - - // send to the generic selector - const TFunction* bestMatch = selectFunction(candidateList, call, convertible, better, tie); - - if (bestMatch == nullptr) - error(loc, "no matching overloaded function found", call.getName().c_str(), ""); - else if (tie) - error(loc, "ambiguous best function under implicit type conversion", call.getName().c_str(), ""); - - return bestMatch; -} - -// When a declaration includes a type, but not a variable name, it can be -// to establish defaults. -void TParseContext::declareTypeDefaults(const TSourceLoc& loc, const TPublicType& publicType) -{ - if (publicType.basicType == EbtAtomicUint && publicType.qualifier.hasBinding() && publicType.qualifier.hasOffset()) { - if (publicType.qualifier.layoutBinding >= (unsigned int)resources.maxAtomicCounterBindings) { - error(loc, "atomic_uint binding is too large", "binding", ""); - return; - } - atomicUintOffsets[publicType.qualifier.layoutBinding] = publicType.qualifier.layoutOffset; - return; - } - - if (publicType.qualifier.hasLayout()) - warn(loc, "useless application of layout qualifier", "layout", ""); -} - -// -// Do everything necessary to handle a variable (non-block) declaration. -// Either redeclaring a variable, or making a new one, updating the symbol -// table, and all error checking. -// -// Returns a subtree node that computes an initializer, if needed. -// Returns nullptr if there is no code to execute for initialization. -// -// 'publicType' is the type part of the declaration (to the left) -// 'arraySizes' is the arrayness tagged on the identifier (to the right) -// -TIntermNode* TParseContext::declareVariable(const TSourceLoc& loc, TString& identifier, const TPublicType& publicType, TArraySizes* arraySizes, TIntermTyped* initializer) -{ - TType type(publicType); // shallow copy; 'type' shares the arrayness and structure definition with 'publicType' - if (type.isImplicitlySizedArray()) { - // Because "int[] a = int[2](...), b = int[3](...)" makes two arrays a and b - // of different sizes, for this case sharing the shallow copy of arrayness - // with the publicType oversubscribes it, so get a deep copy of the arrayness. - type.newArraySizes(*publicType.arraySizes); - } - - if (voidErrorCheck(loc, identifier, type.getBasicType())) - return nullptr; - - if (initializer) - rValueErrorCheck(loc, "initializer", initializer); - else - nonInitConstCheck(loc, identifier, type); - - samplerCheck(loc, type, identifier, initializer); - atomicUintCheck(loc, type, identifier); - transparentOpaqueCheck(loc, type, identifier); - - if (identifier != "gl_FragCoord" && (publicType.shaderQualifiers.originUpperLeft || publicType.shaderQualifiers.pixelCenterInteger)) - error(loc, "can only apply origin_upper_left and pixel_center_origin to gl_FragCoord", "layout qualifier", ""); - if (identifier != "gl_FragDepth" && publicType.shaderQualifiers.layoutDepth != EldNone) - error(loc, "can only apply depth layout to gl_FragDepth", "layout qualifier", ""); - - // Check for redeclaration of built-ins and/or attempting to declare a reserved name - TSymbol* symbol = redeclareBuiltinVariable(loc, identifier, type.getQualifier(), publicType.shaderQualifiers); - if (symbol == nullptr) - reservedErrorCheck(loc, identifier); - - inheritGlobalDefaults(type.getQualifier()); - - // Declare the variable - if (arraySizes || type.isArray()) { - // Arrayness is potentially coming both from the type and from the - // variable: "int[] a[];" or just one or the other. - // Merge it all to the type, so all arrayness is part of the type. - arrayDimCheck(loc, &type, arraySizes); - arrayDimMerge(type, arraySizes); - - // Check that implicit sizing is only where allowed. - arraySizesCheck(loc, type.getQualifier(), &type.getArraySizes(), initializer != nullptr, false); - - if (! arrayQualifierError(loc, type.getQualifier()) && ! arrayError(loc, type)) - declareArray(loc, identifier, type, symbol); - - if (initializer) { - profileRequires(loc, ENoProfile, 120, E_GL_3DL_array_objects, "initializer"); - profileRequires(loc, EEsProfile, 300, nullptr, "initializer"); - } - } else { - // non-array case - if (symbol == nullptr) - symbol = declareNonArray(loc, identifier, type); - else if (type != symbol->getType()) - error(loc, "cannot change the type of", "redeclaration", symbol->getName().c_str()); - } - - if (symbol == nullptr) - return nullptr; - - // Deal with initializer - TIntermNode* initNode = nullptr; - if (symbol != nullptr && initializer) { - TVariable* variable = symbol->getAsVariable(); - if (! variable) { - error(loc, "initializer requires a variable, not a member", identifier.c_str(), ""); - return nullptr; - } - initNode = executeInitializer(loc, initializer, variable); - } - - // look for errors in layout qualifier use - layoutObjectCheck(loc, *symbol); - - // fix up - fixOffset(loc, *symbol); - - return initNode; -} - -// Pick up global defaults from the provide global defaults into dst. -void TParseContext::inheritGlobalDefaults(TQualifier& dst) const -{ - if (dst.storage == EvqVaryingOut) { - if (! dst.hasStream() && language == EShLangGeometry) - dst.layoutStream = globalOutputDefaults.layoutStream; - if (! dst.hasXfbBuffer()) - dst.layoutXfbBuffer = globalOutputDefaults.layoutXfbBuffer; - } -} - -// -// Make an internal-only variable whose name is for debug purposes only -// and won't be searched for. Callers will only use the return value to use -// the variable, not the name to look it up. It is okay if the name -// is the same as other names; there won't be any conflict. -// -TVariable* TParseContext::makeInternalVariable(const char* name, const TType& type) const -{ - TString* nameString = NewPoolTString(name); - TVariable* variable = new TVariable(nameString, type); - symbolTable.makeInternalVariable(*variable); - - return variable; -} - -// -// Declare a non-array variable, the main point being there is no redeclaration -// for resizing allowed. -// -// Return the successfully declared variable. -// -TVariable* TParseContext::declareNonArray(const TSourceLoc& loc, const TString& identifier, const TType& type) -{ - // make a new variable - TVariable* variable = new TVariable(&identifier, type); - - ioArrayCheck(loc, type, identifier); - - // add variable to symbol table - if (symbolTable.insert(*variable)) { - if (symbolTable.atGlobalLevel()) - trackLinkage(*variable); - return variable; - } - - error(loc, "redefinition", variable->getName().c_str(), ""); - return nullptr; -} - -// -// Handle all types of initializers from the grammar. -// -// Returning nullptr just means there is no code to execute to handle the -// initializer, which will, for example, be the case for constant initializers. -// -TIntermNode* TParseContext::executeInitializer(const TSourceLoc& loc, TIntermTyped* initializer, TVariable* variable) -{ - // - // Identifier must be of type constant, a global, or a temporary, and - // starting at version 120, desktop allows uniforms to have initializers. - // - TStorageQualifier qualifier = variable->getType().getQualifier().storage; - if (! (qualifier == EvqTemporary || qualifier == EvqGlobal || qualifier == EvqConst || - (qualifier == EvqUniform && profile != EEsProfile && version >= 120))) { - error(loc, " cannot initialize this type of qualifier ", variable->getType().getStorageQualifierString(), ""); - return nullptr; - } - arrayObjectCheck(loc, variable->getType(), "array initializer"); - - // - // If the initializer was from braces { ... }, we convert the whole subtree to a - // constructor-style subtree, allowing the rest of the code to operate - // identically for both kinds of initializers. - // - // Type can't be deduced from the initializer list, so a skeletal type to - // follow has to be passed in. Constness and specialization-constness - // should be deduced bottom up, not dictated by the skeletal type. - // - TType skeletalType; - skeletalType.shallowCopy(variable->getType()); - skeletalType.getQualifier().makeTemporary(); - initializer = convertInitializerList(loc, skeletalType, initializer); - if (! initializer) { - // error recovery; don't leave const without constant values - if (qualifier == EvqConst) - variable->getWritableType().getQualifier().makeTemporary(); - return nullptr; - } - - // Fix outer arrayness if variable is unsized, getting size from the initializer - if (initializer->getType().isExplicitlySizedArray() && - variable->getType().isImplicitlySizedArray()) - variable->getWritableType().changeOuterArraySize(initializer->getType().getOuterArraySize()); - - // Inner arrayness can also get set by an initializer - if (initializer->getType().isArrayOfArrays() && variable->getType().isArrayOfArrays() && - initializer->getType().getArraySizes()->getNumDims() == - variable->getType().getArraySizes()->getNumDims()) { - // adopt unsized sizes from the initializer's sizes - for (int d = 1; d < variable->getType().getArraySizes()->getNumDims(); ++d) { - if (variable->getType().getArraySizes()->getDimSize(d) == UnsizedArraySize) - variable->getWritableType().getArraySizes().setDimSize(d, initializer->getType().getArraySizes()->getDimSize(d)); - } - } - - // Uniforms require a compile-time constant initializer - if (qualifier == EvqUniform && ! initializer->getType().getQualifier().isFrontEndConstant()) { - error(loc, "uniform initializers must be constant", "=", "'%s'", variable->getType().getCompleteString().c_str()); - variable->getWritableType().getQualifier().makeTemporary(); - return nullptr; - } - // Global consts require a constant initializer (specialization constant is okay) - if (qualifier == EvqConst && symbolTable.atGlobalLevel() && ! initializer->getType().getQualifier().isConstant()) { - error(loc, "global const initializers must be constant", "=", "'%s'", variable->getType().getCompleteString().c_str()); - variable->getWritableType().getQualifier().makeTemporary(); - return nullptr; - } - - // Const variables require a constant initializer, depending on version - if (qualifier == EvqConst) { - if (! initializer->getType().getQualifier().isConstant()) { - const char* initFeature = "non-constant initializer"; - requireProfile(loc, ~EEsProfile, initFeature); - profileRequires(loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature); - variable->getWritableType().getQualifier().storage = EvqConstReadOnly; - qualifier = EvqConstReadOnly; - } - } else { - // Non-const global variables in ES need a const initializer. - // - // "In declarations of global variables with no storage qualifier or with a const - // qualifier any initializer must be a constant expression." - if (symbolTable.atGlobalLevel() && ! initializer->getType().getQualifier().isConstant()) { - const char* initFeature = "non-constant global initializer (needs GL_EXT_shader_non_constant_global_initializers)"; - if (profile == EEsProfile) { - if (relaxedErrors() && ! extensionTurnedOn(E_GL_EXT_shader_non_constant_global_initializers)) - warn(loc, "not allowed in this version", initFeature, ""); - else - profileRequires(loc, EEsProfile, 0, E_GL_EXT_shader_non_constant_global_initializers, initFeature); - } - } - } - - if (qualifier == EvqConst || qualifier == EvqUniform) { - // Compile-time tagging of the variable with its constant value... - - initializer = intermediate.addConversion(EOpAssign, variable->getType(), initializer); - if (! initializer || ! initializer->getType().getQualifier().isConstant() || variable->getType() != initializer->getType()) { - error(loc, "non-matching or non-convertible constant type for const initializer", - variable->getType().getStorageQualifierString(), ""); - variable->getWritableType().getQualifier().makeTemporary(); - return nullptr; - } - - // We either have a folded constant in getAsConstantUnion, or we have to use - // the initializer's subtree in the AST to represent the computation of a - // specialization constant. - assert(initializer->getAsConstantUnion() || initializer->getType().getQualifier().isSpecConstant()); - if (initializer->getAsConstantUnion()) - variable->setConstArray(initializer->getAsConstantUnion()->getConstArray()); - else { - // It's a specialization constant. - variable->getWritableType().getQualifier().makeSpecConstant(); - - // Keep the subtree that computes the specialization constant with the variable. - // Later, a symbol node will adopt the subtree from the variable. - variable->setConstSubtree(initializer); - } - } else { - // normal assigning of a value to a variable... - specializationCheck(loc, initializer->getType(), "initializer"); - TIntermSymbol* intermSymbol = intermediate.addSymbol(*variable, loc); - TIntermTyped* initNode = intermediate.addAssign(EOpAssign, intermSymbol, initializer, loc); - if (! initNode) - assignError(loc, "=", intermSymbol->getCompleteString(), initializer->getCompleteString()); - - return initNode; - } - - return nullptr; -} - -// -// Reprocess any initializer-list (the "{ ... }" syntax) parts of the -// initializer. -// -// Need to hierarchically assign correct types and implicit -// conversions. Will do this mimicking the same process used for -// creating a constructor-style initializer, ensuring we get the -// same form. However, it has to in parallel walk the 'type' -// passed in, as type cannot be deduced from an initializer list. -// -TIntermTyped* TParseContext::convertInitializerList(const TSourceLoc& loc, const TType& type, TIntermTyped* initializer) -{ - // Will operate recursively. Once a subtree is found that is constructor style, - // everything below it is already good: Only the "top part" of the initializer - // can be an initializer list, where "top part" can extend for several (or all) levels. - - // see if we have bottomed out in the tree within the initializer-list part - TIntermAggregate* initList = initializer->getAsAggregate(); - if (! initList || initList->getOp() != EOpNull) - return initializer; - - // Of the initializer-list set of nodes, need to process bottom up, - // so recurse deep, then process on the way up. - - // Go down the tree here... - if (type.isArray()) { - // The type's array might be unsized, which could be okay, so base sizes on the size of the aggregate. - // Later on, initializer execution code will deal with array size logic. - TType arrayType; - arrayType.shallowCopy(type); // sharing struct stuff is fine - arrayType.newArraySizes(*type.getArraySizes()); // but get a fresh copy of the array information, to edit below - - // edit array sizes to fill in unsized dimensions - arrayType.changeOuterArraySize((int)initList->getSequence().size()); - TIntermTyped* firstInit = initList->getSequence()[0]->getAsTyped(); - if (arrayType.isArrayOfArrays() && firstInit->getType().isArray() && - arrayType.getArraySizes().getNumDims() == firstInit->getType().getArraySizes()->getNumDims() + 1) { - for (int d = 1; d < arrayType.getArraySizes().getNumDims(); ++d) { - if (arrayType.getArraySizes().getDimSize(d) == UnsizedArraySize) - arrayType.getArraySizes().setDimSize(d, firstInit->getType().getArraySizes()->getDimSize(d - 1)); - } - } - - TType elementType(arrayType, 0); // dereferenced type - for (size_t i = 0; i < initList->getSequence().size(); ++i) { - initList->getSequence()[i] = convertInitializerList(loc, elementType, initList->getSequence()[i]->getAsTyped()); - if (initList->getSequence()[i] == nullptr) - return nullptr; - } - - return addConstructor(loc, initList, arrayType); - } else if (type.isStruct()) { - if (type.getStruct()->size() != initList->getSequence().size()) { - error(loc, "wrong number of structure members", "initializer list", ""); - return nullptr; - } - for (size_t i = 0; i < type.getStruct()->size(); ++i) { - initList->getSequence()[i] = convertInitializerList(loc, *(*type.getStruct())[i].type, initList->getSequence()[i]->getAsTyped()); - if (initList->getSequence()[i] == nullptr) - return nullptr; - } - } else if (type.isMatrix()) { - if (type.getMatrixCols() != (int)initList->getSequence().size()) { - error(loc, "wrong number of matrix columns:", "initializer list", type.getCompleteString().c_str()); - return nullptr; - } - TType vectorType(type, 0); // dereferenced type - for (int i = 0; i < type.getMatrixCols(); ++i) { - initList->getSequence()[i] = convertInitializerList(loc, vectorType, initList->getSequence()[i]->getAsTyped()); - if (initList->getSequence()[i] == nullptr) - return nullptr; - } - } else if (type.isVector()) { - if (type.getVectorSize() != (int)initList->getSequence().size()) { - error(loc, "wrong vector size (or rows in a matrix column):", "initializer list", type.getCompleteString().c_str()); - return nullptr; - } - } else { - error(loc, "unexpected initializer-list type:", "initializer list", type.getCompleteString().c_str()); - return nullptr; - } - - // Now that the subtree is processed, process this node as if the - // initializer list is a set of arguments to a constructor. - TIntermNode* emulatedConstructorArguments; - if (initList->getSequence().size() == 1) - emulatedConstructorArguments = initList->getSequence()[0]; - else - emulatedConstructorArguments = initList; - return addConstructor(loc, emulatedConstructorArguments, type); -} - -// -// Test for the correctness of the parameters passed to various constructor functions -// and also convert them to the right data type, if allowed and required. -// -// 'node' is what to construct from. -// 'type' is what type to construct. -// -// Returns nullptr for an error or the constructed node (aggregate or typed) for no error. -// -TIntermTyped* TParseContext::addConstructor(const TSourceLoc& loc, TIntermNode* node, const TType& type) -{ - if (node == nullptr || node->getAsTyped() == nullptr) - return nullptr; - rValueErrorCheck(loc, "constructor", node->getAsTyped()); - - TIntermAggregate* aggrNode = node->getAsAggregate(); - TOperator op = intermediate.mapTypeToConstructorOp(type); - - // Combined texture-sampler constructors are completely semantic checked - // in constructorTextureSamplerError() - if (op == EOpConstructTextureSampler) - return intermediate.setAggregateOperator(aggrNode, op, type, loc); - - TTypeList::const_iterator memberTypes; - if (op == EOpConstructStruct) - memberTypes = type.getStruct()->begin(); - - TType elementType; - if (type.isArray()) { - TType dereferenced(type, 0); - elementType.shallowCopy(dereferenced); - } else - elementType.shallowCopy(type); - - bool singleArg; - if (aggrNode) { - if (aggrNode->getOp() != EOpNull || aggrNode->getSequence().size() == 1) - singleArg = true; - else - singleArg = false; - } else - singleArg = true; - - TIntermTyped *newNode; - if (singleArg) { - // If structure constructor or array constructor is being called - // for only one parameter inside the structure, we need to call constructAggregate function once. - if (type.isArray()) - newNode = constructAggregate(node, elementType, 1, node->getLoc()); - else if (op == EOpConstructStruct) - newNode = constructAggregate(node, *(*memberTypes).type, 1, node->getLoc()); - else - newNode = constructBuiltIn(type, op, node->getAsTyped(), node->getLoc(), false); - - if (newNode && (type.isArray() || op == EOpConstructStruct)) - newNode = intermediate.setAggregateOperator(newNode, EOpConstructStruct, type, loc); - - return newNode; - } - - // - // Handle list of arguments. - // - TIntermSequence &sequenceVector = aggrNode->getSequence(); // Stores the information about the parameter to the constructor - // if the structure constructor contains more than one parameter, then construct - // each parameter - - int paramCount = 0; // keeps track of the constructor parameter number being checked - - // for each parameter to the constructor call, check to see if the right type is passed or convert them - // to the right type if possible (and allowed). - // for structure constructors, just check if the right type is passed, no conversion is allowed. - for (TIntermSequence::iterator p = sequenceVector.begin(); - p != sequenceVector.end(); p++, paramCount++) { - if (type.isArray()) - newNode = constructAggregate(*p, elementType, paramCount+1, node->getLoc()); - else if (op == EOpConstructStruct) - newNode = constructAggregate(*p, *(memberTypes[paramCount]).type, paramCount+1, node->getLoc()); - else - newNode = constructBuiltIn(type, op, (*p)->getAsTyped(), node->getLoc(), true); - - if (newNode) - *p = newNode; - else - return nullptr; - } - - return intermediate.setAggregateOperator(aggrNode, op, type, loc); -} - -// Function for constructor implementation. Calls addUnaryMath with appropriate EOp value -// for the parameter to the constructor (passed to this function). Essentially, it converts -// the parameter types correctly. If a constructor expects an int (like ivec2) and is passed a -// float, then float is converted to int. -// -// Returns nullptr for an error or the constructed node. -// -TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, TIntermTyped* node, const TSourceLoc& loc, bool subset) -{ - TIntermTyped* newNode; - TOperator basicOp; - - // - // First, convert types as needed. - // - switch (op) { - case EOpConstructVec2: - case EOpConstructVec3: - case EOpConstructVec4: - case EOpConstructMat2x2: - case EOpConstructMat2x3: - case EOpConstructMat2x4: - case EOpConstructMat3x2: - case EOpConstructMat3x3: - case EOpConstructMat3x4: - case EOpConstructMat4x2: - case EOpConstructMat4x3: - case EOpConstructMat4x4: - case EOpConstructFloat: - basicOp = EOpConstructFloat; - break; - - case EOpConstructDVec2: - case EOpConstructDVec3: - case EOpConstructDVec4: - case EOpConstructDMat2x2: - case EOpConstructDMat2x3: - case EOpConstructDMat2x4: - case EOpConstructDMat3x2: - case EOpConstructDMat3x3: - case EOpConstructDMat3x4: - case EOpConstructDMat4x2: - case EOpConstructDMat4x3: - case EOpConstructDMat4x4: - case EOpConstructDouble: - basicOp = EOpConstructDouble; - break; - -#ifdef AMD_EXTENSIONS - case EOpConstructF16Vec2: - case EOpConstructF16Vec3: - case EOpConstructF16Vec4: - case EOpConstructF16Mat2x2: - case EOpConstructF16Mat2x3: - case EOpConstructF16Mat2x4: - case EOpConstructF16Mat3x2: - case EOpConstructF16Mat3x3: - case EOpConstructF16Mat3x4: - case EOpConstructF16Mat4x2: - case EOpConstructF16Mat4x3: - case EOpConstructF16Mat4x4: - case EOpConstructFloat16: - basicOp = EOpConstructFloat16; - break; -#endif - - case EOpConstructIVec2: - case EOpConstructIVec3: - case EOpConstructIVec4: - case EOpConstructInt: - basicOp = EOpConstructInt; - break; - - case EOpConstructUVec2: - case EOpConstructUVec3: - case EOpConstructUVec4: - case EOpConstructUint: - basicOp = EOpConstructUint; - break; - - case EOpConstructI64Vec2: - case EOpConstructI64Vec3: - case EOpConstructI64Vec4: - case EOpConstructInt64: - basicOp = EOpConstructInt64; - break; - - case EOpConstructU64Vec2: - case EOpConstructU64Vec3: - case EOpConstructU64Vec4: - case EOpConstructUint64: - basicOp = EOpConstructUint64; - break; - -#ifdef AMD_EXTENSIONS - case EOpConstructI16Vec2: - case EOpConstructI16Vec3: - case EOpConstructI16Vec4: - case EOpConstructInt16: - basicOp = EOpConstructInt16; - break; - - case EOpConstructU16Vec2: - case EOpConstructU16Vec3: - case EOpConstructU16Vec4: - case EOpConstructUint16: - basicOp = EOpConstructUint16; - break; -#endif - - case EOpConstructBVec2: - case EOpConstructBVec3: - case EOpConstructBVec4: - case EOpConstructBool: - basicOp = EOpConstructBool; - break; - - default: - error(loc, "unsupported construction", "", ""); - - return nullptr; - } - newNode = intermediate.addUnaryMath(basicOp, node, node->getLoc()); - if (newNode == nullptr) { - error(loc, "can't convert", "constructor", ""); - return nullptr; - } - - // - // Now, if there still isn't an operation to do the construction, and we need one, add one. - // - - // Otherwise, skip out early. - if (subset || (newNode != node && newNode->getType() == type)) - return newNode; - - // setAggregateOperator will insert a new node for the constructor, as needed. - return intermediate.setAggregateOperator(newNode, op, type, loc); -} - -// This function tests for the type of the parameters to the structure or array constructor. Raises -// an error message if the expected type does not match the parameter passed to the constructor. -// -// Returns nullptr for an error or the input node itself if the expected and the given parameter types match. -// -TIntermTyped* TParseContext::constructAggregate(TIntermNode* node, const TType& type, int paramCount, const TSourceLoc& loc) -{ - TIntermTyped* converted = intermediate.addConversion(EOpConstructStruct, type, node->getAsTyped()); - if (! converted || converted->getType() != type) { - error(loc, "", "constructor", "cannot convert parameter %d from '%s' to '%s'", paramCount, - node->getAsTyped()->getType().getCompleteString().c_str(), type.getCompleteString().c_str()); - - return nullptr; - } - - return converted; -} - -// -// Do everything needed to add an interface block. -// -void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, const TString* instanceName, TArraySizes* arraySizes) -{ - blockStageIoCheck(loc, currentBlockQualifier); - blockQualifierCheck(loc, currentBlockQualifier, instanceName != nullptr); - if (arraySizes) { - arraySizesCheck(loc, currentBlockQualifier, arraySizes, false, false); - arrayDimCheck(loc, arraySizes, 0); - if (arraySizes->getNumDims() > 1) - requireProfile(loc, ~EEsProfile, "array-of-array of block"); - } - - // fix and check for member storage qualifiers and types that don't belong within a block - for (unsigned int member = 0; member < typeList.size(); ++member) { - TType& memberType = *typeList[member].type; - TQualifier& memberQualifier = memberType.getQualifier(); - const TSourceLoc& memberLoc = typeList[member].loc; - globalQualifierFixCheck(memberLoc, memberQualifier); - if (memberQualifier.storage != EvqTemporary && memberQualifier.storage != EvqGlobal && memberQualifier.storage != currentBlockQualifier.storage) - error(memberLoc, "member storage qualifier cannot contradict block storage qualifier", memberType.getFieldName().c_str(), ""); - memberQualifier.storage = currentBlockQualifier.storage; - if ((currentBlockQualifier.storage == EvqUniform || currentBlockQualifier.storage == EvqBuffer) && (memberQualifier.isInterpolation() || memberQualifier.isAuxiliary())) - error(memberLoc, "member of uniform or buffer block cannot have an auxiliary or interpolation qualifier", memberType.getFieldName().c_str(), ""); - if (memberType.isArray()) - arraySizesCheck(memberLoc, currentBlockQualifier, &memberType.getArraySizes(), false, member == typeList.size() - 1); - if (memberQualifier.hasOffset()) { - if (spvVersion.spv == 0) { - requireProfile(memberLoc, ~EEsProfile, "offset on block member"); - profileRequires(memberLoc, ~EEsProfile, 440, E_GL_ARB_enhanced_layouts, "offset on block member"); - } - } - - if (memberType.containsOpaque()) - error(memberLoc, "member of block cannot be or contain a sampler, image, or atomic_uint type", typeList[member].type->getFieldName().c_str(), ""); - } - - // This might be a redeclaration of a built-in block. If so, redeclareBuiltinBlock() will - // do all the rest. - if (! symbolTable.atBuiltInLevel() && builtInName(*blockName)) { - redeclareBuiltinBlock(loc, typeList, *blockName, instanceName, arraySizes); - return; - } - - // Not a redeclaration of a built-in; check that all names are user names. - reservedErrorCheck(loc, *blockName); - if (instanceName) - reservedErrorCheck(loc, *instanceName); - for (unsigned int member = 0; member < typeList.size(); ++member) - reservedErrorCheck(typeList[member].loc, typeList[member].type->getFieldName()); - - // Make default block qualification, and adjust the member qualifications - - TQualifier defaultQualification; - switch (currentBlockQualifier.storage) { - case EvqUniform: defaultQualification = globalUniformDefaults; break; - case EvqBuffer: defaultQualification = globalBufferDefaults; break; - case EvqVaryingIn: defaultQualification = globalInputDefaults; break; - case EvqVaryingOut: defaultQualification = globalOutputDefaults; break; - default: defaultQualification.clear(); break; - } - - // Special case for "push_constant uniform", which has a default of std430, - // contrary to normal uniform defaults, and can't have a default tracked for it. - if (currentBlockQualifier.layoutPushConstant && !currentBlockQualifier.hasPacking()) - currentBlockQualifier.layoutPacking = ElpStd430; - - // fix and check for member layout qualifiers - - mergeObjectLayoutQualifiers(defaultQualification, currentBlockQualifier, true); - - // "The align qualifier can only be used on blocks or block members, and only for blocks declared with std140 or std430 layouts." - if (currentBlockQualifier.hasAlign()) { - if (defaultQualification.layoutPacking != ElpStd140 && defaultQualification.layoutPacking != ElpStd430) { - error(loc, "can only be used with std140 or std430 layout packing", "align", ""); - defaultQualification.layoutAlign = -1; - } - } - - bool memberWithLocation = false; - bool memberWithoutLocation = false; - for (unsigned int member = 0; member < typeList.size(); ++member) { - TQualifier& memberQualifier = typeList[member].type->getQualifier(); - const TSourceLoc& memberLoc = typeList[member].loc; - if (memberQualifier.hasStream()) { - if (defaultQualification.layoutStream != memberQualifier.layoutStream) - error(memberLoc, "member cannot contradict block", "stream", ""); - } - - // "This includes a block's inheritance of the - // current global default buffer, a block member's inheritance of the block's - // buffer, and the requirement that any *xfb_buffer* declared on a block - // member must match the buffer inherited from the block." - if (memberQualifier.hasXfbBuffer()) { - if (defaultQualification.layoutXfbBuffer != memberQualifier.layoutXfbBuffer) - error(memberLoc, "member cannot contradict block (or what block inherited from global)", "xfb_buffer", ""); - } - - if (memberQualifier.hasPacking()) - error(memberLoc, "member of block cannot have a packing layout qualifier", typeList[member].type->getFieldName().c_str(), ""); - if (memberQualifier.hasLocation()) { - const char* feature = "location on block member"; - switch (currentBlockQualifier.storage) { - case EvqVaryingIn: - case EvqVaryingOut: - requireProfile(memberLoc, ECoreProfile | ECompatibilityProfile | EEsProfile, feature); - profileRequires(memberLoc, ECoreProfile | ECompatibilityProfile, 440, E_GL_ARB_enhanced_layouts, feature); - profileRequires(memberLoc, EEsProfile, 320, Num_AEP_shader_io_blocks, AEP_shader_io_blocks, feature); - memberWithLocation = true; - break; - default: - error(memberLoc, "can only use in an in/out block", feature, ""); - break; - } - } else - memberWithoutLocation = true; - - // "The offset qualifier can only be used on block members of blocks declared with std140 or std430 layouts." - // "The align qualifier can only be used on blocks or block members, and only for blocks declared with std140 or std430 layouts." - if (memberQualifier.hasAlign() || memberQualifier.hasOffset()) { - if (defaultQualification.layoutPacking != ElpStd140 && defaultQualification.layoutPacking != ElpStd430) - error(memberLoc, "can only be used with std140 or std430 layout packing", "offset/align", ""); - } - - TQualifier newMemberQualification = defaultQualification; - mergeQualifiers(memberLoc, newMemberQualification, memberQualifier, false); - memberQualifier = newMemberQualification; - } - - layoutMemberLocationArrayCheck(loc, memberWithLocation, arraySizes); - - // Process the members - fixBlockLocations(loc, currentBlockQualifier, typeList, memberWithLocation, memberWithoutLocation); - fixBlockXfbOffsets(currentBlockQualifier, typeList); - fixBlockUniformOffsets(currentBlockQualifier, typeList); - for (unsigned int member = 0; member < typeList.size(); ++member) - layoutTypeCheck(typeList[member].loc, *typeList[member].type); - - // reverse merge, so that currentBlockQualifier now has all layout information - // (can't use defaultQualification directly, it's missing other non-layout-default-class qualifiers) - mergeObjectLayoutQualifiers(currentBlockQualifier, defaultQualification, true); - - // - // Build and add the interface block as a new type named 'blockName' - // - - TType blockType(&typeList, *blockName, currentBlockQualifier); - if (arraySizes) - blockType.newArraySizes(*arraySizes); - else - ioArrayCheck(loc, blockType, instanceName ? *instanceName : *blockName); - - // - // Don't make a user-defined type out of block name; that will cause an error - // if the same block name gets reused in a different interface. - // - // "Block names have no other use within a shader - // beyond interface matching; it is a compile-time error to use a block name at global scope for anything - // other than as a block name (e.g., use of a block name for a global variable name or function name is - // currently reserved)." - // - // Use the symbol table to prevent normal reuse of the block's name, as a variable entry, - // whose type is EbtBlock, but without all the structure; that will come from the type - // the instances point to. - // - TType blockNameType(EbtBlock, blockType.getQualifier().storage); - TVariable* blockNameVar = new TVariable(blockName, blockNameType); - if (! symbolTable.insert(*blockNameVar)) { - TSymbol* existingName = symbolTable.find(*blockName); - if (existingName->getType().getBasicType() == EbtBlock) { - if (existingName->getType().getQualifier().storage == blockType.getQualifier().storage) { - error(loc, "Cannot reuse block name within the same interface:", blockName->c_str(), blockType.getStorageQualifierString()); - return; - } - } else { - error(loc, "block name cannot redefine a non-block name", blockName->c_str(), ""); - return; - } - } - - // Add the variable, as anonymous or named instanceName. - // Make an anonymous variable if no name was provided. - if (! instanceName) - instanceName = NewPoolTString(""); - - TVariable& variable = *new TVariable(instanceName, blockType); - if (! symbolTable.insert(variable)) { - if (*instanceName == "") - error(loc, "nameless block contains a member that already has a name at global scope", blockName->c_str(), ""); - else - error(loc, "block instance name redefinition", variable.getName().c_str(), ""); - - return; - } - - // Check for general layout qualifier errors - layoutObjectCheck(loc, variable); - - // fix up - if (isIoResizeArray(blockType)) { - ioArraySymbolResizeList.push_back(&variable); - checkIoArraysConsistency(loc, true); - } else - fixIoArraySize(loc, variable.getWritableType()); - - // Save it in the AST for linker use. - trackLinkage(variable); -} - -// Do all block-declaration checking regarding the combination of in/out/uniform/buffer -// with a particular stage. -void TParseContext::blockStageIoCheck(const TSourceLoc& loc, const TQualifier& qualifier) -{ - switch (qualifier.storage) { - case EvqUniform: - profileRequires(loc, EEsProfile, 300, nullptr, "uniform block"); - profileRequires(loc, ENoProfile, 140, nullptr, "uniform block"); - if (currentBlockQualifier.layoutPacking == ElpStd430 && ! currentBlockQualifier.layoutPushConstant) - error(loc, "requires the 'buffer' storage qualifier", "std430", ""); - break; - case EvqBuffer: - requireProfile(loc, EEsProfile | ECoreProfile | ECompatibilityProfile, "buffer block"); - profileRequires(loc, ECoreProfile | ECompatibilityProfile, 430, nullptr, "buffer block"); - profileRequires(loc, EEsProfile, 310, nullptr, "buffer block"); - break; - case EvqVaryingIn: - profileRequires(loc, ~EEsProfile, 150, E_GL_ARB_separate_shader_objects, "input block"); - // It is a compile-time error to have an input block in a vertex shader or an output block in a fragment shader - // "Compute shaders do not permit user-defined input variables..." - requireStage(loc, (EShLanguageMask)(EShLangTessControlMask|EShLangTessEvaluationMask|EShLangGeometryMask|EShLangFragmentMask), "input block"); - if (language == EShLangFragment) - profileRequires(loc, EEsProfile, 320, Num_AEP_shader_io_blocks, AEP_shader_io_blocks, "fragment input block"); - break; - case EvqVaryingOut: - profileRequires(loc, ~EEsProfile, 150, E_GL_ARB_separate_shader_objects, "output block"); - requireStage(loc, (EShLanguageMask)(EShLangVertexMask|EShLangTessControlMask|EShLangTessEvaluationMask|EShLangGeometryMask), "output block"); - // ES 310 can have a block before shader_io is turned on, so skip this test for built-ins - if (language == EShLangVertex && ! parsingBuiltins) - profileRequires(loc, EEsProfile, 320, Num_AEP_shader_io_blocks, AEP_shader_io_blocks, "vertex output block"); - break; - default: - error(loc, "only uniform, buffer, in, or out blocks are supported", blockName->c_str(), ""); - break; - } -} - -// Do all block-declaration checking regarding its qualifiers. -void TParseContext::blockQualifierCheck(const TSourceLoc& loc, const TQualifier& qualifier, bool /*instanceName*/) -{ - // The 4.5 specification says: - // - // interface-block : - // layout-qualifieropt interface-qualifier block-name { member-list } instance-nameopt ; - // - // interface-qualifier : - // in - // out - // patch in - // patch out - // uniform - // buffer - // - // Note however memory qualifiers aren't included, yet the specification also says - // - // "...memory qualifiers may also be used in the declaration of shader storage blocks..." - - if (qualifier.isInterpolation()) - error(loc, "cannot use interpolation qualifiers on an interface block", "flat/smooth/noperspective", ""); - if (qualifier.centroid) - error(loc, "cannot use centroid qualifier on an interface block", "centroid", ""); - if (qualifier.sample) - error(loc, "cannot use sample qualifier on an interface block", "sample", ""); - if (qualifier.invariant) - error(loc, "cannot use invariant qualifier on an interface block", "invariant", ""); - if (qualifier.layoutPushConstant) - intermediate.addPushConstantCount(); -} - -// -// "For a block, this process applies to the entire block, or until the first member -// is reached that has a location layout qualifier. When a block member is declared with a location -// qualifier, its location comes from that qualifier: The member's location qualifier overrides the block-level -// declaration. Subsequent members are again assigned consecutive locations, based on the newest location, -// until the next member declared with a location qualifier. The values used for locations do not have to be -// declared in increasing order." -void TParseContext::fixBlockLocations(const TSourceLoc& loc, TQualifier& qualifier, TTypeList& typeList, bool memberWithLocation, bool memberWithoutLocation) -{ - // "If a block has no block-level location layout qualifier, it is required that either all or none of its members - // have a location layout qualifier, or a compile-time error results." - if (! qualifier.hasLocation() && memberWithLocation && memberWithoutLocation) - error(loc, "either the block needs a location, or all members need a location, or no members have a location", "location", ""); - else { - if (memberWithLocation) { - // remove any block-level location and make it per *every* member - int nextLocation = 0; // by the rule above, initial value is not relevant - if (qualifier.hasAnyLocation()) { - nextLocation = qualifier.layoutLocation; - qualifier.layoutLocation = TQualifier::layoutLocationEnd; - if (qualifier.hasComponent()) { - // "It is a compile-time error to apply the *component* qualifier to a ... block" - error(loc, "cannot apply to a block", "component", ""); - } - if (qualifier.hasIndex()) { - error(loc, "cannot apply to a block", "index", ""); - } - } - for (unsigned int member = 0; member < typeList.size(); ++member) { - TQualifier& memberQualifier = typeList[member].type->getQualifier(); - const TSourceLoc& memberLoc = typeList[member].loc; - if (! memberQualifier.hasLocation()) { - if (nextLocation >= (int)TQualifier::layoutLocationEnd) - error(memberLoc, "location is too large", "location", ""); - memberQualifier.layoutLocation = nextLocation; - memberQualifier.layoutComponent = TQualifier::layoutComponentEnd; - } - nextLocation = memberQualifier.layoutLocation + intermediate.computeTypeLocationSize(*typeList[member].type); - } - } - } -} - -void TParseContext::fixBlockXfbOffsets(TQualifier& qualifier, TTypeList& typeList) -{ - // "If a block is qualified with xfb_offset, all its - // members are assigned transform feedback buffer offsets. If a block is not qualified with xfb_offset, any - // members of that block not qualified with an xfb_offset will not be assigned transform feedback buffer - // offsets." - - if (! qualifier.hasXfbBuffer() || ! qualifier.hasXfbOffset()) - return; - - int nextOffset = qualifier.layoutXfbOffset; - for (unsigned int member = 0; member < typeList.size(); ++member) { - TQualifier& memberQualifier = typeList[member].type->getQualifier(); - bool containsDouble = false; - int memberSize = intermediate.computeTypeXfbSize(*typeList[member].type, containsDouble); - // see if we need to auto-assign an offset to this member - if (! memberQualifier.hasXfbOffset()) { - // "if applied to an aggregate containing a double, the offset must also be a multiple of 8" - if (containsDouble) - RoundToPow2(nextOffset, 8); - memberQualifier.layoutXfbOffset = nextOffset; - } else - nextOffset = memberQualifier.layoutXfbOffset; - nextOffset += memberSize; - } - - // The above gave all block members an offset, so we can take it off the block now, - // which will avoid double counting the offset usage. - qualifier.layoutXfbOffset = TQualifier::layoutXfbOffsetEnd; -} - -// Calculate and save the offset of each block member, using the recursively -// defined block offset rules and the user-provided offset and align. -// -// Also, compute and save the total size of the block. For the block's size, arrayness -// is not taken into account, as each element is backed by a separate buffer. -// -void TParseContext::fixBlockUniformOffsets(TQualifier& qualifier, TTypeList& typeList) -{ - if (! qualifier.isUniformOrBuffer()) - return; - if (qualifier.layoutPacking != ElpStd140 && qualifier.layoutPacking != ElpStd430) - return; - - int offset = 0; - int memberSize; - for (unsigned int member = 0; member < typeList.size(); ++member) { - TQualifier& memberQualifier = typeList[member].type->getQualifier(); - const TSourceLoc& memberLoc = typeList[member].loc; - - // "When align is applied to an array, it effects only the start of the array, not the array's internal stride." - - // modify just the children's view of matrix layout, if there is one for this member - TLayoutMatrix subMatrixLayout = typeList[member].type->getQualifier().layoutMatrix; - int dummyStride; - int memberAlignment = intermediate.getBaseAlignment(*typeList[member].type, memberSize, dummyStride, qualifier.layoutPacking == ElpStd140, - subMatrixLayout != ElmNone ? subMatrixLayout == ElmRowMajor : qualifier.layoutMatrix == ElmRowMajor); - if (memberQualifier.hasOffset()) { - // "The specified offset must be a multiple - // of the base alignment of the type of the block member it qualifies, or a compile-time error results." - if (! IsMultipleOfPow2(memberQualifier.layoutOffset, memberAlignment)) - error(memberLoc, "must be a multiple of the member's alignment", "offset", ""); - - // GLSL: "It is a compile-time error to specify an offset that is smaller than the offset of the previous - // member in the block or that lies within the previous member of the block" - if (spvVersion.spv == 0) { - if (memberQualifier.layoutOffset < offset) - error(memberLoc, "cannot lie in previous members", "offset", ""); - - // "The offset qualifier forces the qualified member to start at or after the specified - // integral-constant expression, which will be its byte offset from the beginning of the buffer. - // "The actual offset of a member is computed as - // follows: If offset was declared, start with that offset, otherwise start with the next available offset." - offset = std::max(offset, memberQualifier.layoutOffset); - } else { - // TODO: Vulkan: "It is a compile-time error to have any offset, explicit or assigned, - // that lies within another member of the block." - - offset = memberQualifier.layoutOffset; - } - } - - // "The actual alignment of a member will be the greater of the specified align alignment and the standard - // (e.g., std140) base alignment for the member's type." - if (memberQualifier.hasAlign()) - memberAlignment = std::max(memberAlignment, memberQualifier.layoutAlign); - - // "If the resulting offset is not a multiple of the actual alignment, - // increase it to the first offset that is a multiple of - // the actual alignment." - RoundToPow2(offset, memberAlignment); - typeList[member].type->getQualifier().layoutOffset = offset; - offset += memberSize; - } -} - -// For an identifier that is already declared, add more qualification to it. -void TParseContext::addQualifierToExisting(const TSourceLoc& loc, TQualifier qualifier, const TString& identifier) -{ - TSymbol* symbol = symbolTable.find(identifier); - if (! symbol) { - error(loc, "identifier not previously declared", identifier.c_str(), ""); - return; - } - if (symbol->getAsFunction()) { - error(loc, "cannot re-qualify a function name", identifier.c_str(), ""); - return; - } - - if (qualifier.isAuxiliary() || - qualifier.isMemory() || - qualifier.isInterpolation() || - qualifier.hasLayout() || - qualifier.storage != EvqTemporary || - qualifier.precision != EpqNone) { - error(loc, "cannot add storage, auxiliary, memory, interpolation, layout, or precision qualifier to an existing variable", identifier.c_str(), ""); - return; - } - - // For read-only built-ins, add a new symbol for holding the modified qualifier. - // This will bring up an entire block, if a block type has to be modified (e.g., gl_Position inside a block) - if (symbol->isReadOnly()) - symbol = symbolTable.copyUp(symbol); - - if (qualifier.invariant) { - if (intermediate.inIoAccessed(identifier)) - error(loc, "cannot change qualification after use", "invariant", ""); - symbol->getWritableType().getQualifier().invariant = true; - invariantCheck(loc, symbol->getType().getQualifier()); - } else if (qualifier.noContraction) { - if (intermediate.inIoAccessed(identifier)) - error(loc, "cannot change qualification after use", "precise", ""); - symbol->getWritableType().getQualifier().noContraction = true; - } else if (qualifier.specConstant) { - symbol->getWritableType().getQualifier().makeSpecConstant(); - if (qualifier.hasSpecConstantId()) - symbol->getWritableType().getQualifier().layoutSpecConstantId = qualifier.layoutSpecConstantId; - } else - warn(loc, "unknown requalification", "", ""); -} - -void TParseContext::addQualifierToExisting(const TSourceLoc& loc, TQualifier qualifier, TIdentifierList& identifiers) -{ - for (unsigned int i = 0; i < identifiers.size(); ++i) - addQualifierToExisting(loc, qualifier, *identifiers[i]); -} - -// Make sure 'invariant' isn't being applied to a non-allowed object. -void TParseContext::invariantCheck(const TSourceLoc& loc, const TQualifier& qualifier) -{ - if (! qualifier.invariant) - return; - - bool pipeOut = qualifier.isPipeOutput(); - bool pipeIn = qualifier.isPipeInput(); - if (version >= 300 || (profile != EEsProfile && version >= 420)) { - if (! pipeOut) - error(loc, "can only apply to an output", "invariant", ""); - } else { - if ((language == EShLangVertex && pipeIn) || (! pipeOut && ! pipeIn)) - error(loc, "can only apply to an output, or to an input in a non-vertex stage\n", "invariant", ""); - } -} - -// -// Updating default qualifier for the case of a declaration with just a qualifier, -// no type, block, or identifier. -// -void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, const TPublicType& publicType) -{ - if (publicType.shaderQualifiers.vertices != TQualifier::layoutNotSet) { - assert(language == EShLangTessControl || language == EShLangGeometry); - const char* id = (language == EShLangTessControl) ? "vertices" : "max_vertices"; - - if (publicType.qualifier.storage != EvqVaryingOut) - error(loc, "can only apply to 'out'", id, ""); - if (! intermediate.setVertices(publicType.shaderQualifiers.vertices)) - error(loc, "cannot change previously set layout value", id, ""); - - if (language == EShLangTessControl) - checkIoArraysConsistency(loc); - } - if (publicType.shaderQualifiers.invocations != TQualifier::layoutNotSet) { - if (publicType.qualifier.storage != EvqVaryingIn) - error(loc, "can only apply to 'in'", "invocations", ""); - if (! intermediate.setInvocations(publicType.shaderQualifiers.invocations)) - error(loc, "cannot change previously set layout value", "invocations", ""); - } - if (publicType.shaderQualifiers.geometry != ElgNone) { - if (publicType.qualifier.storage == EvqVaryingIn) { - switch (publicType.shaderQualifiers.geometry) { - case ElgPoints: - case ElgLines: - case ElgLinesAdjacency: - case ElgTriangles: - case ElgTrianglesAdjacency: - case ElgQuads: - case ElgIsolines: - if (intermediate.setInputPrimitive(publicType.shaderQualifiers.geometry)) { - if (language == EShLangGeometry) - checkIoArraysConsistency(loc); - } else - error(loc, "cannot change previously set input primitive", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), ""); - break; - default: - error(loc, "cannot apply to input", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), ""); - } - } else if (publicType.qualifier.storage == EvqVaryingOut) { - switch (publicType.shaderQualifiers.geometry) { - case ElgPoints: - case ElgLineStrip: - case ElgTriangleStrip: - if (! intermediate.setOutputPrimitive(publicType.shaderQualifiers.geometry)) - error(loc, "cannot change previously set output primitive", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), ""); - break; - default: - error(loc, "cannot apply to 'out'", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), ""); - } - } else - error(loc, "cannot apply to:", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), GetStorageQualifierString(publicType.qualifier.storage)); - } - if (publicType.shaderQualifiers.spacing != EvsNone) { - if (publicType.qualifier.storage == EvqVaryingIn) { - if (! intermediate.setVertexSpacing(publicType.shaderQualifiers.spacing)) - error(loc, "cannot change previously set vertex spacing", TQualifier::getVertexSpacingString(publicType.shaderQualifiers.spacing), ""); - } else - error(loc, "can only apply to 'in'", TQualifier::getVertexSpacingString(publicType.shaderQualifiers.spacing), ""); - } - if (publicType.shaderQualifiers.order != EvoNone) { - if (publicType.qualifier.storage == EvqVaryingIn) { - if (! intermediate.setVertexOrder(publicType.shaderQualifiers.order)) - error(loc, "cannot change previously set vertex order", TQualifier::getVertexOrderString(publicType.shaderQualifiers.order), ""); - } else - error(loc, "can only apply to 'in'", TQualifier::getVertexOrderString(publicType.shaderQualifiers.order), ""); - } - if (publicType.shaderQualifiers.pointMode) { - if (publicType.qualifier.storage == EvqVaryingIn) - intermediate.setPointMode(); - else - error(loc, "can only apply to 'in'", "point_mode", ""); - } - for (int i = 0; i < 3; ++i) { - if (publicType.shaderQualifiers.localSize[i] > 1) { - if (publicType.qualifier.storage == EvqVaryingIn) { - if (! intermediate.setLocalSize(i, publicType.shaderQualifiers.localSize[i])) - error(loc, "cannot change previously set size", "local_size", ""); - else { - int max = 0; - switch (i) { - case 0: max = resources.maxComputeWorkGroupSizeX; break; - case 1: max = resources.maxComputeWorkGroupSizeY; break; - case 2: max = resources.maxComputeWorkGroupSizeZ; break; - default: break; - } - if (intermediate.getLocalSize(i) > (unsigned int)max) - error(loc, "too large; see gl_MaxComputeWorkGroupSize", "local_size", ""); - - // Fix the existing constant gl_WorkGroupSize with this new information. - TVariable* workGroupSize = getEditableVariable("gl_WorkGroupSize"); - if (workGroupSize != nullptr) - workGroupSize->getWritableConstArray()[i].setUConst(intermediate.getLocalSize(i)); - } - } else - error(loc, "can only apply to 'in'", "local_size", ""); - } - if (publicType.shaderQualifiers.localSizeSpecId[i] != TQualifier::layoutNotSet) { - if (publicType.qualifier.storage == EvqVaryingIn) { - if (! intermediate.setLocalSizeSpecId(i, publicType.shaderQualifiers.localSizeSpecId[i])) - error(loc, "cannot change previously set size", "local_size", ""); - } else - error(loc, "can only apply to 'in'", "local_size id", ""); - // Set the workgroup built-in variable as a specialization constant - TVariable* workGroupSize = getEditableVariable("gl_WorkGroupSize"); - if (workGroupSize != nullptr) - workGroupSize->getWritableType().getQualifier().specConstant = true; - } - } - if (publicType.shaderQualifiers.earlyFragmentTests) { - if (publicType.qualifier.storage == EvqVaryingIn) - intermediate.setEarlyFragmentTests(); - else - error(loc, "can only apply to 'in'", "early_fragment_tests", ""); - } - if (publicType.shaderQualifiers.postDepthCoverage) { - if (publicType.qualifier.storage == EvqVaryingIn) - intermediate.setPostDepthCoverage(); - else - error(loc, "can only apply to 'in'", "post_coverage_coverage", ""); - } - if (publicType.shaderQualifiers.blendEquation) { - if (publicType.qualifier.storage != EvqVaryingOut) - error(loc, "can only apply to 'out'", "blend equation", ""); - } - - const TQualifier& qualifier = publicType.qualifier; - - if (qualifier.isAuxiliary() || - qualifier.isMemory() || - qualifier.isInterpolation() || - qualifier.precision != EpqNone) - error(loc, "cannot use auxiliary, memory, interpolation, or precision qualifier in a default qualifier declaration (declaration with no type)", "qualifier", ""); - // "The offset qualifier can only be used on block members of blocks..." - // "The align qualifier can only be used on blocks or block members..." - if (qualifier.hasOffset() || - qualifier.hasAlign()) - error(loc, "cannot use offset or align qualifiers in a default qualifier declaration (declaration with no type)", "layout qualifier", ""); - - layoutQualifierCheck(loc, qualifier); - - switch (qualifier.storage) { - case EvqUniform: - if (qualifier.hasMatrix()) - globalUniformDefaults.layoutMatrix = qualifier.layoutMatrix; - if (qualifier.hasPacking()) - globalUniformDefaults.layoutPacking = qualifier.layoutPacking; - break; - case EvqBuffer: - if (qualifier.hasMatrix()) - globalBufferDefaults.layoutMatrix = qualifier.layoutMatrix; - if (qualifier.hasPacking()) - globalBufferDefaults.layoutPacking = qualifier.layoutPacking; - break; - case EvqVaryingIn: - break; - case EvqVaryingOut: - if (qualifier.hasStream()) - globalOutputDefaults.layoutStream = qualifier.layoutStream; - if (qualifier.hasXfbBuffer()) - globalOutputDefaults.layoutXfbBuffer = qualifier.layoutXfbBuffer; - if (globalOutputDefaults.hasXfbBuffer() && qualifier.hasXfbStride()) { - if (! intermediate.setXfbBufferStride(globalOutputDefaults.layoutXfbBuffer, qualifier.layoutXfbStride)) - error(loc, "all stride settings must match for xfb buffer", "xfb_stride", "%d", qualifier.layoutXfbBuffer); - } - break; - default: - error(loc, "default qualifier requires 'uniform', 'buffer', 'in', or 'out' storage qualification", "", ""); - return; - } - - if (qualifier.hasBinding()) - error(loc, "cannot declare a default, include a type or full declaration", "binding", ""); - if (qualifier.hasAnyLocation()) - error(loc, "cannot declare a default, use a full declaration", "location/component/index", ""); - if (qualifier.hasXfbOffset()) - error(loc, "cannot declare a default, use a full declaration", "xfb_offset", ""); - if (qualifier.layoutPushConstant) - error(loc, "cannot declare a default, can only be used on a block", "push_constant", ""); - if (qualifier.hasSpecConstantId()) - error(loc, "cannot declare a default, can only be used on a scalar", "constant_id", ""); -} - -// -// Take the sequence of statements that has been built up since the last case/default, -// put it on the list of top-level nodes for the current (inner-most) switch statement, -// and follow that by the case/default we are on now. (See switch topology comment on -// TIntermSwitch.) -// -void TParseContext::wrapupSwitchSubsequence(TIntermAggregate* statements, TIntermNode* branchNode) -{ - TIntermSequence* switchSequence = switchSequenceStack.back(); - - if (statements) { - if (switchSequence->size() == 0) - error(statements->getLoc(), "cannot have statements before first case/default label", "switch", ""); - statements->setOperator(EOpSequence); - switchSequence->push_back(statements); - } - if (branchNode) { - // check all previous cases for the same label (or both are 'default') - for (unsigned int s = 0; s < switchSequence->size(); ++s) { - TIntermBranch* prevBranch = (*switchSequence)[s]->getAsBranchNode(); - if (prevBranch) { - TIntermTyped* prevExpression = prevBranch->getExpression(); - TIntermTyped* newExpression = branchNode->getAsBranchNode()->getExpression(); - if (prevExpression == nullptr && newExpression == nullptr) - error(branchNode->getLoc(), "duplicate label", "default", ""); - else if (prevExpression != nullptr && - newExpression != nullptr && - prevExpression->getAsConstantUnion() && - newExpression->getAsConstantUnion() && - prevExpression->getAsConstantUnion()->getConstArray()[0].getIConst() == - newExpression->getAsConstantUnion()->getConstArray()[0].getIConst()) - error(branchNode->getLoc(), "duplicated value", "case", ""); - } - } - switchSequence->push_back(branchNode); - } -} - -// -// Turn the top-level node sequence built up of wrapupSwitchSubsequence9) -// into a switch node. -// -TIntermNode* TParseContext::addSwitch(const TSourceLoc& loc, TIntermTyped* expression, TIntermAggregate* lastStatements) -{ - profileRequires(loc, EEsProfile, 300, nullptr, "switch statements"); - profileRequires(loc, ENoProfile, 130, nullptr, "switch statements"); - - wrapupSwitchSubsequence(lastStatements, nullptr); - - if (expression == nullptr || - (expression->getBasicType() != EbtInt && expression->getBasicType() != EbtUint) || - expression->getType().isArray() || expression->getType().isMatrix() || expression->getType().isVector()) - error(loc, "condition must be a scalar integer expression", "switch", ""); - - // If there is nothing to do, drop the switch but still execute the expression - TIntermSequence* switchSequence = switchSequenceStack.back(); - if (switchSequence->size() == 0) - return expression; - - if (lastStatements == nullptr) { - // This was originally an ERRROR, because early versions of the specification said - // "it is an error to have no statement between a label and the end of the switch statement." - // The specifications were updated to remove this (being ill-defined what a "statement" was), - // so, this became a warning. However, 3.0 tests still check for the error. - if (profile == EEsProfile && version <= 300 && ! relaxedErrors()) - error(loc, "last case/default label not followed by statements", "switch", ""); - else - warn(loc, "last case/default label not followed by statements", "switch", ""); - - // emulate a break for error recovery - lastStatements = intermediate.makeAggregate(intermediate.addBranch(EOpBreak, loc)); - lastStatements->setOperator(EOpSequence); - switchSequence->push_back(lastStatements); - } - - TIntermAggregate* body = new TIntermAggregate(EOpSequence); - body->getSequence() = *switchSequenceStack.back(); - body->setLoc(loc); - - TIntermSwitch* switchNode = new TIntermSwitch(expression, body); - switchNode->setLoc(loc); - - return switchNode; -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/ParseHelper.h b/deps/glslang/glslang/MachineIndependent/ParseHelper.h deleted file mode 100644 index d04607a0..00000000 --- a/deps/glslang/glslang/MachineIndependent/ParseHelper.h +++ /dev/null @@ -1,470 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// This header defines a two-level parse-helper hierarchy, derived from -// TParseVersions: -// - TParseContextBase: sharable across multiple parsers -// - TParseContext: GLSL specific helper -// - -#ifndef _PARSER_HELPER_INCLUDED_ -#define _PARSER_HELPER_INCLUDED_ - -#include "parseVersions.h" -#include "../Include/ShHandle.h" -#include "SymbolTable.h" -#include "localintermediate.h" -#include "Scan.h" -#include -#include - -namespace glslang { - -struct TPragma { - TPragma(bool o, bool d) : optimize(o), debug(d) { } - bool optimize; - bool debug; - TPragmaTable pragmaTable; -}; - -class TScanContext; -class TPpContext; - -typedef std::set TIdSetType; - -// -// Sharable code (as well as what's in TParseVersions) across -// parse helpers. -// -class TParseContextBase : public TParseVersions { -public: - TParseContextBase(TSymbolTable& symbolTable, TIntermediate& interm, bool parsingBuiltins, int version, - EProfile profile, const SpvVersion& spvVersion, EShLanguage language, - TInfoSink& infoSink, bool forwardCompatible, EShMessages messages) - : TParseVersions(interm, version, profile, spvVersion, language, infoSink, forwardCompatible, messages), - scopeMangler("::"), - symbolTable(symbolTable), - statementNestingLevel(0), loopNestingLevel(0), structNestingLevel(0), controlFlowNestingLevel(0), - postEntryPointReturn(false), - contextPragma(true, false), - parsingBuiltins(parsingBuiltins), scanContext(nullptr), ppContext(nullptr), - limits(resources.limits), - globalUniformBlock(nullptr) - { } - virtual ~TParseContextBase() { } - - virtual void C_DECL error(const TSourceLoc&, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...); - virtual void C_DECL warn(const TSourceLoc&, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...); - virtual void C_DECL ppError(const TSourceLoc&, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...); - virtual void C_DECL ppWarn(const TSourceLoc&, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...); - - virtual void setLimits(const TBuiltInResource&) = 0; - - EShLanguage getLanguage() const { return language; } - void setScanContext(TScanContext* c) { scanContext = c; } - TScanContext* getScanContext() const { return scanContext; } - void setPpContext(TPpContext* c) { ppContext = c; } - TPpContext* getPpContext() const { return ppContext; } - - virtual void setLineCallback(const std::function& func) { lineCallback = func; } - virtual void setExtensionCallback(const std::function& func) { extensionCallback = func; } - virtual void setVersionCallback(const std::function& func) { versionCallback = func; } - virtual void setPragmaCallback(const std::function&)>& func) { pragmaCallback = func; } - virtual void setErrorCallback(const std::function& func) { errorCallback = func; } - - virtual void reservedPpErrorCheck(const TSourceLoc&, const char* name, const char* op) = 0; - virtual bool lineContinuationCheck(const TSourceLoc&, bool endOfComment) = 0; - virtual bool lineDirectiveShouldSetNextLine() const = 0; - virtual void handlePragma(const TSourceLoc&, const TVector&) = 0; - - virtual bool parseShaderStrings(TPpContext&, TInputScanner& input, bool versionWillBeError = false) = 0; - - virtual void notifyVersion(int line, int version, const char* type_string) - { - if (versionCallback) - versionCallback(line, version, type_string); - } - virtual void notifyErrorDirective(int line, const char* error_message) - { - if (errorCallback) - errorCallback(line, error_message); - } - virtual void notifyLineDirective(int curLineNo, int newLineNo, bool hasSource, int sourceNum, const char* sourceName) - { - if (lineCallback) - lineCallback(curLineNo, newLineNo, hasSource, sourceNum, sourceName); - } - virtual void notifyExtensionDirective(int line, const char* extension, const char* behavior) - { - if (extensionCallback) - extensionCallback(line, extension, behavior); - } - - // Manage the global uniform block (default uniforms in GLSL, $Global in HLSL) - virtual void growGlobalUniformBlock(const TSourceLoc&, TType&, const TString& memberName, TTypeList* typeList = nullptr); - - virtual bool lValueErrorCheck(const TSourceLoc&, const char* op, TIntermTyped*); - virtual void rValueErrorCheck(const TSourceLoc&, const char* op, TIntermTyped*); - - const char* const scopeMangler; - - // Basic parsing state, easily accessible to the grammar - - TSymbolTable& symbolTable; // symbol table that goes with the current language, version, and profile - int statementNestingLevel; // 0 if outside all flow control or compound statements - int loopNestingLevel; // 0 if outside all loops - int structNestingLevel; // 0 if outside blocks and structures - int controlFlowNestingLevel; // 0 if outside all flow control - const TType* currentFunctionType; // the return type of the function that's currently being parsed - bool functionReturnsValue; // true if a non-void function has a return - // if inside a function, true if the function is the entry point and this is after a return statement - bool postEntryPointReturn; - // case, node, case, case, node, ...; ensure only one node between cases; stack of them for nesting - TList switchSequenceStack; - // the statementNestingLevel the current switch statement is at, which must match the level of its case statements - TList switchLevel; - struct TPragma contextPragma; - -protected: - TParseContextBase(TParseContextBase&); - TParseContextBase& operator=(TParseContextBase&); - - const bool parsingBuiltins; // true if parsing built-in symbols/functions - TVector linkageSymbols; // these need to be transferred to 'linkage', after all editing is done - TScanContext* scanContext; - TPpContext* ppContext; - TBuiltInResource resources; - TLimits& limits; - - // These, if set, will be called when a line, pragma ... is preprocessed. - // They will be called with any parameters to the original directive. - std::function lineCallback; - std::function&)> pragmaCallback; - std::function versionCallback; - std::function extensionCallback; - std::function errorCallback; - - // see implementation for detail - const TFunction* selectFunction(const TVector, const TFunction&, - std::function, - std::function, - /* output */ bool& tie); - - virtual void parseSwizzleSelector(const TSourceLoc&, const TString&, int size, - TSwizzleSelectors&); - - // Manage the global uniform block (default uniforms in GLSL, $Global in HLSL) - TVariable* globalUniformBlock; // the actual block, inserted into the symbol table - int firstNewMember; // the index of the first member not yet inserted into the symbol table - // override this to set the language-specific name - virtual const char* getGlobalUniformBlockName() const { return ""; } - virtual void setUniformBlockDefaults(TType&) const { } - virtual void finalizeGlobalUniformBlockLayout(TVariable&) { } - virtual void outputMessage(const TSourceLoc&, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, TPrefixType prefix, - va_list args); - virtual void trackLinkage(TSymbol& symbol); - virtual void makeEditable(TSymbol*&); - virtual TVariable* getEditableVariable(const char* name); - virtual void finish(); -}; - -// -// Manage the state for when to respect precision qualifiers and when to warn about -// the defaults being different than might be expected. -// -class TPrecisionManager { -public: - TPrecisionManager() : obey(false), warn(false), explicitIntDefault(false), explicitFloatDefault(false){ } - virtual ~TPrecisionManager() {} - - void respectPrecisionQualifiers() { obey = true; } - bool respectingPrecisionQualifiers() const { return obey; } - bool shouldWarnAboutDefaults() const { return warn; } - void defaultWarningGiven() { warn = false; } - void warnAboutDefaults() { warn = true; } - void explicitIntDefaultSeen() - { - explicitIntDefault = true; - if (explicitFloatDefault) - warn = false; - } - void explicitFloatDefaultSeen() - { - explicitFloatDefault = true; - if (explicitIntDefault) - warn = false; - } - -protected: - bool obey; // respect precision qualifiers - bool warn; // need to give a warning about the defaults - bool explicitIntDefault; // user set the default for int/uint - bool explicitFloatDefault; // user set the default for float -}; - -// -// GLSL-specific parse helper. Should have GLSL in the name, but that's -// too big of a change for comparing branches at the moment, and perhaps -// impacts downstream consumers as well. -// -class TParseContext : public TParseContextBase { -public: - TParseContext(TSymbolTable&, TIntermediate&, bool parsingBuiltins, int version, EProfile, const SpvVersion& spvVersion, EShLanguage, TInfoSink&, - bool forwardCompatible = false, EShMessages messages = EShMsgDefault); - virtual ~TParseContext(); - - bool obeyPrecisionQualifiers() const { return precisionManager.respectingPrecisionQualifiers(); }; - void setPrecisionDefaults(); - - void setLimits(const TBuiltInResource&) override; - bool parseShaderStrings(TPpContext&, TInputScanner& input, bool versionWillBeError = false) override; - void parserError(const char* s); // for bison's yyerror - - void reservedErrorCheck(const TSourceLoc&, const TString&); - void reservedPpErrorCheck(const TSourceLoc&, const char* name, const char* op) override; - bool lineContinuationCheck(const TSourceLoc&, bool endOfComment) override; - bool lineDirectiveShouldSetNextLine() const override; - bool builtInName(const TString&); - - void handlePragma(const TSourceLoc&, const TVector&) override; - TIntermTyped* handleVariable(const TSourceLoc&, TSymbol* symbol, const TString* string); - TIntermTyped* handleBracketDereference(const TSourceLoc&, TIntermTyped* base, TIntermTyped* index); - void checkIndex(const TSourceLoc&, const TType&, int& index); - void handleIndexLimits(const TSourceLoc&, TIntermTyped* base, TIntermTyped* index); - - void makeEditable(TSymbol*&) override; - bool isIoResizeArray(const TType&) const; - void fixIoArraySize(const TSourceLoc&, TType&); - void ioArrayCheck(const TSourceLoc&, const TType&, const TString& identifier); - void handleIoResizeArrayAccess(const TSourceLoc&, TIntermTyped* base); - void checkIoArraysConsistency(const TSourceLoc&, bool tailOnly = false); - int getIoArrayImplicitSize() const; - void checkIoArrayConsistency(const TSourceLoc&, int requiredSize, const char* feature, TType&, const TString&); - - TIntermTyped* handleBinaryMath(const TSourceLoc&, const char* str, TOperator op, TIntermTyped* left, TIntermTyped* right); - TIntermTyped* handleUnaryMath(const TSourceLoc&, const char* str, TOperator op, TIntermTyped* childNode); - TIntermTyped* handleDotDereference(const TSourceLoc&, TIntermTyped* base, const TString& field); - void blockMemberExtensionCheck(const TSourceLoc&, const TIntermTyped* base, const TString& field); - TFunction* handleFunctionDeclarator(const TSourceLoc&, TFunction& function, bool prototype); - TIntermAggregate* handleFunctionDefinition(const TSourceLoc&, TFunction&); - TIntermTyped* handleFunctionCall(const TSourceLoc&, TFunction*, TIntermNode*); - TIntermTyped* handleBuiltInFunctionCall(TSourceLoc, TIntermNode& arguments, const TFunction& function); - void computeBuiltinPrecisions(TIntermTyped&, const TFunction&); - TIntermNode* handleReturnValue(const TSourceLoc&, TIntermTyped*); - void checkLocation(const TSourceLoc&, TOperator); - TIntermTyped* handleLengthMethod(const TSourceLoc&, TFunction*, TIntermNode*); - void addInputArgumentConversions(const TFunction&, TIntermNode*&) const; - TIntermTyped* addOutputArgumentConversions(const TFunction&, TIntermAggregate&) const; - void builtInOpCheck(const TSourceLoc&, const TFunction&, TIntermOperator&); - void nonOpBuiltInCheck(const TSourceLoc&, const TFunction&, TIntermAggregate&); - void userFunctionCallCheck(const TSourceLoc&, TIntermAggregate&); - void samplerConstructorLocationCheck(const TSourceLoc&, const char* token, TIntermNode*); - TFunction* handleConstructorCall(const TSourceLoc&, const TPublicType&); - void handlePrecisionQualifier(const TSourceLoc&, TQualifier&, TPrecisionQualifier); - void checkPrecisionQualifier(const TSourceLoc&, TPrecisionQualifier); - - void assignError(const TSourceLoc&, const char* op, TString left, TString right); - void unaryOpError(const TSourceLoc&, const char* op, TString operand); - void binaryOpError(const TSourceLoc&, const char* op, TString left, TString right); - void variableCheck(TIntermTyped*& nodePtr); - bool lValueErrorCheck(const TSourceLoc&, const char* op, TIntermTyped*) override; - void rValueErrorCheck(const TSourceLoc&, const char* op, TIntermTyped*) override; - void constantValueCheck(TIntermTyped* node, const char* token); - void integerCheck(const TIntermTyped* node, const char* token); - void globalCheck(const TSourceLoc&, const char* token); - bool constructorError(const TSourceLoc&, TIntermNode*, TFunction&, TOperator, TType&); - bool constructorTextureSamplerError(const TSourceLoc&, const TFunction&); - void arraySizeCheck(const TSourceLoc&, TIntermTyped* expr, TArraySize&); - bool arrayQualifierError(const TSourceLoc&, const TQualifier&); - bool arrayError(const TSourceLoc&, const TType&); - void arraySizeRequiredCheck(const TSourceLoc&, const TArraySizes&); - void structArrayCheck(const TSourceLoc&, const TType& structure); - void arraySizesCheck(const TSourceLoc&, const TQualifier&, const TArraySizes*, bool initializer, bool lastMember); - void arrayOfArrayVersionCheck(const TSourceLoc&); - void arrayDimCheck(const TSourceLoc&, const TArraySizes* sizes1, const TArraySizes* sizes2); - void arrayDimCheck(const TSourceLoc&, const TType*, const TArraySizes*); - void arrayDimMerge(TType& type, const TArraySizes* sizes); - bool voidErrorCheck(const TSourceLoc&, const TString&, TBasicType); - void boolCheck(const TSourceLoc&, const TIntermTyped*); - void boolCheck(const TSourceLoc&, const TPublicType&); - void samplerCheck(const TSourceLoc&, const TType&, const TString& identifier, TIntermTyped* initializer); - void atomicUintCheck(const TSourceLoc&, const TType&, const TString& identifier); - void transparentOpaqueCheck(const TSourceLoc&, const TType&, const TString& identifier); - void globalQualifierFixCheck(const TSourceLoc&, TQualifier&); - void globalQualifierTypeCheck(const TSourceLoc&, const TQualifier&, const TPublicType&); - bool structQualifierErrorCheck(const TSourceLoc&, const TPublicType& pType); - void mergeQualifiers(const TSourceLoc&, TQualifier& dst, const TQualifier& src, bool force); - void setDefaultPrecision(const TSourceLoc&, TPublicType&, TPrecisionQualifier); - int computeSamplerTypeIndex(TSampler&); - TPrecisionQualifier getDefaultPrecision(TPublicType&); - void precisionQualifierCheck(const TSourceLoc&, TBasicType, TQualifier&); - void parameterTypeCheck(const TSourceLoc&, TStorageQualifier qualifier, const TType& type); - bool containsFieldWithBasicType(const TType& type ,TBasicType basicType); - TSymbol* redeclareBuiltinVariable(const TSourceLoc&, const TString&, const TQualifier&, const TShaderQualifiers&); - void redeclareBuiltinBlock(const TSourceLoc&, TTypeList& typeList, const TString& blockName, const TString* instanceName, TArraySizes* arraySizes); - void paramCheckFix(const TSourceLoc&, const TStorageQualifier&, TType& type); - void paramCheckFix(const TSourceLoc&, const TQualifier&, TType& type); - void nestedBlockCheck(const TSourceLoc&); - void nestedStructCheck(const TSourceLoc&); - void arrayObjectCheck(const TSourceLoc&, const TType&, const char* op); - void opaqueCheck(const TSourceLoc&, const TType&, const char* op); - void specializationCheck(const TSourceLoc&, const TType&, const char* op); - void structTypeCheck(const TSourceLoc&, TPublicType&); - void inductiveLoopCheck(const TSourceLoc&, TIntermNode* init, TIntermLoop* loop); - void arrayLimitCheck(const TSourceLoc&, const TString&, int size); - void limitCheck(const TSourceLoc&, int value, const char* limit, const char* feature); - - void inductiveLoopBodyCheck(TIntermNode*, int loopIndexId, TSymbolTable&); - void constantIndexExpressionCheck(TIntermNode*); - - void setLayoutQualifier(const TSourceLoc&, TPublicType&, TString&); - void setLayoutQualifier(const TSourceLoc&, TPublicType&, TString&, const TIntermTyped*); - void mergeObjectLayoutQualifiers(TQualifier& dest, const TQualifier& src, bool inheritOnly); - void layoutObjectCheck(const TSourceLoc&, const TSymbol&); - void layoutMemberLocationArrayCheck(const TSourceLoc&, bool memberWithLocation, TArraySizes* arraySizes); - void layoutTypeCheck(const TSourceLoc&, const TType&); - void layoutQualifierCheck(const TSourceLoc&, const TQualifier&); - void checkNoShaderLayouts(const TSourceLoc&, const TShaderQualifiers&); - void fixOffset(const TSourceLoc&, TSymbol&); - - const TFunction* findFunction(const TSourceLoc& loc, const TFunction& call, bool& builtIn); - const TFunction* findFunctionExact(const TSourceLoc& loc, const TFunction& call, bool& builtIn); - const TFunction* findFunction120(const TSourceLoc& loc, const TFunction& call, bool& builtIn); - const TFunction* findFunction400(const TSourceLoc& loc, const TFunction& call, bool& builtIn); - void declareTypeDefaults(const TSourceLoc&, const TPublicType&); - TIntermNode* declareVariable(const TSourceLoc&, TString& identifier, const TPublicType&, TArraySizes* typeArray = 0, TIntermTyped* initializer = 0); - TIntermTyped* addConstructor(const TSourceLoc&, TIntermNode*, const TType&); - TIntermTyped* constructAggregate(TIntermNode*, const TType&, int, const TSourceLoc&); - TIntermTyped* constructBuiltIn(const TType&, TOperator, TIntermTyped*, const TSourceLoc&, bool subset); - void declareBlock(const TSourceLoc&, TTypeList& typeList, const TString* instanceName = 0, TArraySizes* arraySizes = 0); - void blockStageIoCheck(const TSourceLoc&, const TQualifier&); - void blockQualifierCheck(const TSourceLoc&, const TQualifier&, bool instanceName); - void fixBlockLocations(const TSourceLoc&, TQualifier&, TTypeList&, bool memberWithLocation, bool memberWithoutLocation); - void fixBlockXfbOffsets(TQualifier&, TTypeList&); - void fixBlockUniformOffsets(TQualifier&, TTypeList&); - void addQualifierToExisting(const TSourceLoc&, TQualifier, const TString& identifier); - void addQualifierToExisting(const TSourceLoc&, TQualifier, TIdentifierList&); - void invariantCheck(const TSourceLoc&, const TQualifier&); - void updateStandaloneQualifierDefaults(const TSourceLoc&, const TPublicType&); - void wrapupSwitchSubsequence(TIntermAggregate* statements, TIntermNode* branchNode); - TIntermNode* addSwitch(const TSourceLoc&, TIntermTyped* expression, TIntermAggregate* body); - - void updateImplicitArraySize(const TSourceLoc&, TIntermNode*, int index); - -protected: - void nonInitConstCheck(const TSourceLoc&, TString& identifier, TType& type); - void inheritGlobalDefaults(TQualifier& dst) const; - TVariable* makeInternalVariable(const char* name, const TType&) const; - TVariable* declareNonArray(const TSourceLoc&, const TString& identifier, const TType&); - void declareArray(const TSourceLoc&, const TString& identifier, const TType&, TSymbol*&); - TIntermNode* executeInitializer(const TSourceLoc&, TIntermTyped* initializer, TVariable* variable); - TIntermTyped* convertInitializerList(const TSourceLoc&, const TType&, TIntermTyped* initializer); - void finish() override; - -public: - // - // Generally, bison productions, the scanner, and the PP need read/write access to these; just give them direct access - // - - // Current state of parsing - bool inMain; // if inside a function, true if the function is main - const TString* blockName; - TQualifier currentBlockQualifier; - TPrecisionQualifier defaultPrecision[EbtNumTypes]; - TBuiltInResource resources; - TLimits& limits; - -protected: - TParseContext(TParseContext&); - TParseContext& operator=(TParseContext&); - - static const int maxSamplerIndex = EsdNumDims * (EbtNumTypes * (2 * 2 * 2 * 2 * 2)); // see computeSamplerTypeIndex() - TPrecisionQualifier defaultSamplerPrecision[maxSamplerIndex]; - TPrecisionManager precisionManager; - TQualifier globalBufferDefaults; - TQualifier globalUniformDefaults; - TQualifier globalInputDefaults; - TQualifier globalOutputDefaults; - int* atomicUintOffsets; // to become an array of the right size to hold an offset per binding point - TString currentCaller; // name of last function body entered (not valid when at global scope) - TIdSetType inductiveLoopIds; - bool anyIndexLimits; - TVector needsIndexLimitationChecking; - - // - // Geometry shader input arrays: - // - array sizing is based on input primitive and/or explicit size - // - // Tessellation control output arrays: - // - array sizing is based on output layout(vertices=...) and/or explicit size - // - // Both: - // - array sizing is retroactive - // - built-in block redeclarations interact with this - // - // Design: - // - use a per-context "resize-list", a list of symbols whose array sizes - // can be fixed - // - // - the resize-list starts empty at beginning of user-shader compilation, it does - // not have built-ins in it - // - // - on built-in array use: copyUp() symbol and add it to the resize-list - // - // - on user array declaration: add it to the resize-list - // - // - on block redeclaration: copyUp() symbol and add it to the resize-list - // * note, that appropriately gives an error if redeclaring a block that - // was already used and hence already copied-up - // - // - on seeing a layout declaration that sizes the array, fix everything in the - // resize-list, giving errors for mismatch - // - // - on seeing an array size declaration, give errors on mismatch between it and previous - // array-sizing declarations - // - TVector ioArraySymbolResizeList; -}; - -} // end namespace glslang - -#endif // _PARSER_HELPER_INCLUDED_ diff --git a/deps/glslang/glslang/MachineIndependent/PoolAlloc.cpp b/deps/glslang/glslang/MachineIndependent/PoolAlloc.cpp deleted file mode 100644 index 4007c386..00000000 --- a/deps/glslang/glslang/MachineIndependent/PoolAlloc.cpp +++ /dev/null @@ -1,345 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "../Include/Common.h" -#include "../Include/PoolAlloc.h" - -#include "../Include/InitializeGlobals.h" -#include "../OSDependent/osinclude.h" - -namespace glslang { - -OS_TLSIndex PoolIndex; - -void InitializeMemoryPools() -{ - TThreadMemoryPools* pools = static_cast(OS_GetTLSValue(PoolIndex)); - if (pools) - return; - - TPoolAllocator *threadPoolAllocator = new TPoolAllocator(); - - TThreadMemoryPools* threadData = new TThreadMemoryPools(); - - threadData->threadPoolAllocator = threadPoolAllocator; - - OS_SetTLSValue(PoolIndex, threadData); -} - -void FreeGlobalPools() -{ - // Release the allocated memory for this thread. - TThreadMemoryPools* globalPools = static_cast(OS_GetTLSValue(PoolIndex)); - if (! globalPools) - return; - - GetThreadPoolAllocator().popAll(); - delete &GetThreadPoolAllocator(); - delete globalPools; -} - -bool InitializePoolIndex() -{ - // Allocate a TLS index. - if ((PoolIndex = OS_AllocTLSIndex()) == OS_INVALID_TLS_INDEX) - return false; - - return true; -} - -void FreePoolIndex() -{ - // Release the TLS index. - OS_FreeTLSIndex(PoolIndex); -} - -TPoolAllocator& GetThreadPoolAllocator() -{ - TThreadMemoryPools* threadData = static_cast(OS_GetTLSValue(PoolIndex)); - - return *threadData->threadPoolAllocator; -} - -void SetThreadPoolAllocator(TPoolAllocator& poolAllocator) -{ - TThreadMemoryPools* threadData = static_cast(OS_GetTLSValue(PoolIndex)); - - threadData->threadPoolAllocator = &poolAllocator; -} - -// -// Implement the functionality of the TPoolAllocator class, which -// is documented in PoolAlloc.h. -// -TPoolAllocator::TPoolAllocator(int growthIncrement, int allocationAlignment) : - pageSize(growthIncrement), - alignment(allocationAlignment), - freeList(nullptr), - inUseList(nullptr), - numCalls(0) -{ - // - // Don't allow page sizes we know are smaller than all common - // OS page sizes. - // - if (pageSize < 4*1024) - pageSize = 4*1024; - - // - // A large currentPageOffset indicates a new page needs to - // be obtained to allocate memory. - // - currentPageOffset = pageSize; - - // - // Adjust alignment to be at least pointer aligned and - // power of 2. - // - size_t minAlign = sizeof(void*); - alignment &= ~(minAlign - 1); - if (alignment < minAlign) - alignment = minAlign; - size_t a = 1; - while (a < alignment) - a <<= 1; - alignment = a; - alignmentMask = a - 1; - - // - // Align header skip - // - headerSkip = minAlign; - if (headerSkip < sizeof(tHeader)) { - headerSkip = (sizeof(tHeader) + alignmentMask) & ~alignmentMask; - } - - push(); -} - -TPoolAllocator::~TPoolAllocator() -{ - while (inUseList) { - tHeader* next = inUseList->nextPage; - inUseList->~tHeader(); - delete [] reinterpret_cast(inUseList); - inUseList = next; - } - - // - // Always delete the free list memory - it can't be being - // (correctly) referenced, whether the pool allocator was - // global or not. We should not check the guard blocks - // here, because we did it already when the block was - // placed into the free list. - // - while (freeList) { - tHeader* next = freeList->nextPage; - delete [] reinterpret_cast(freeList); - freeList = next; - } -} - -const unsigned char TAllocation::guardBlockBeginVal = 0xfb; -const unsigned char TAllocation::guardBlockEndVal = 0xfe; -const unsigned char TAllocation::userDataFill = 0xcd; - -# ifdef GUARD_BLOCKS - const size_t TAllocation::guardBlockSize = 16; -# else - const size_t TAllocation::guardBlockSize = 0; -# endif - -// -// Check a single guard block for damage -// -#ifdef GUARD_BLOCKS -void TAllocation::checkGuardBlock(unsigned char* blockMem, unsigned char val, const char* locText) const -#else -void TAllocation::checkGuardBlock(unsigned char*, unsigned char, const char*) const -#endif -{ -#ifdef GUARD_BLOCKS - for (size_t x = 0; x < guardBlockSize; x++) { - if (blockMem[x] != val) { - const int maxSize = 80; - char assertMsg[maxSize]; - - // We don't print the assert message. It's here just to be helpful. - snprintf(assertMsg, maxSize, "PoolAlloc: Damage %s %zu byte allocation at 0x%p\n", - locText, size, data()); - assert(0 && "PoolAlloc: Damage in guard block"); - } - } -#else - assert(guardBlockSize == 0); -#endif -} - -void TPoolAllocator::push() -{ - tAllocState state = { currentPageOffset, inUseList }; - - stack.push_back(state); - - // - // Indicate there is no current page to allocate from. - // - currentPageOffset = pageSize; -} - -// -// Do a mass-deallocation of all the individual allocations -// that have occurred since the last push(), or since the -// last pop(), or since the object's creation. -// -// The deallocated pages are saved for future allocations. -// -void TPoolAllocator::pop() -{ - if (stack.size() < 1) - return; - - tHeader* page = stack.back().page; - currentPageOffset = stack.back().offset; - - while (inUseList != page) { - // invoke destructor to free allocation list - inUseList->~tHeader(); - - tHeader* nextInUse = inUseList->nextPage; - if (inUseList->pageCount > 1) - delete [] reinterpret_cast(inUseList); - else { - inUseList->nextPage = freeList; - freeList = inUseList; - } - inUseList = nextInUse; - } - - stack.pop_back(); -} - -// -// Do a mass-deallocation of all the individual allocations -// that have occurred. -// -void TPoolAllocator::popAll() -{ - while (stack.size() > 0) - pop(); -} - -void* TPoolAllocator::allocate(size_t numBytes) -{ - // If we are using guard blocks, all allocations are bracketed by - // them: [guardblock][allocation][guardblock]. numBytes is how - // much memory the caller asked for. allocationSize is the total - // size including guard blocks. In release build, - // guardBlockSize=0 and this all gets optimized away. - size_t allocationSize = TAllocation::allocationSize(numBytes); - - // - // Just keep some interesting statistics. - // - ++numCalls; - totalBytes += numBytes; - - // - // Do the allocation, most likely case first, for efficiency. - // This step could be moved to be inline sometime. - // - if (currentPageOffset + allocationSize <= pageSize) { - // - // Safe to allocate from currentPageOffset. - // - unsigned char* memory = reinterpret_cast(inUseList) + currentPageOffset; - currentPageOffset += allocationSize; - currentPageOffset = (currentPageOffset + alignmentMask) & ~alignmentMask; - - return initializeAllocation(inUseList, memory, numBytes); - } - - if (allocationSize + headerSkip > pageSize) { - // - // Do a multi-page allocation. Don't mix these with the others. - // The OS is efficient and allocating and free-ing multiple pages. - // - size_t numBytesToAlloc = allocationSize + headerSkip; - tHeader* memory = reinterpret_cast(::new char[numBytesToAlloc]); - if (memory == 0) - return 0; - - // Use placement-new to initialize header - new(memory) tHeader(inUseList, (numBytesToAlloc + pageSize - 1) / pageSize); - inUseList = memory; - - currentPageOffset = pageSize; // make next allocation come from a new page - - // No guard blocks for multi-page allocations (yet) - return reinterpret_cast(reinterpret_cast(memory) + headerSkip); - } - - // - // Need a simple page to allocate from. - // - tHeader* memory; - if (freeList) { - memory = freeList; - freeList = freeList->nextPage; - } else { - memory = reinterpret_cast(::new char[pageSize]); - if (memory == 0) - return 0; - } - - // Use placement-new to initialize header - new(memory) tHeader(inUseList, 1); - inUseList = memory; - - unsigned char* ret = reinterpret_cast(inUseList) + headerSkip; - currentPageOffset = (headerSkip + allocationSize + alignmentMask) & ~alignmentMask; - - return initializeAllocation(inUseList, ret, numBytes); -} - -// -// Check all allocations in a list for damage by calling check on each. -// -void TAllocation::checkAllocList() const -{ - for (const TAllocation* alloc = this; alloc != 0; alloc = alloc->prevAlloc) - alloc->check(); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/RemoveTree.cpp b/deps/glslang/glslang/MachineIndependent/RemoveTree.cpp deleted file mode 100644 index 1d33bfd2..00000000 --- a/deps/glslang/glslang/MachineIndependent/RemoveTree.cpp +++ /dev/null @@ -1,118 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "../Include/intermediate.h" -#include "RemoveTree.h" - -namespace glslang { - -// -// Code to recursively delete the intermediate tree. -// -struct TRemoveTraverser : TIntermTraverser { - TRemoveTraverser() : TIntermTraverser(false, false, true, false) {} - - virtual void visitSymbol(TIntermSymbol* node) - { - delete node; - } - - virtual bool visitBinary(TVisit /* visit*/ , TIntermBinary* node) - { - delete node; - - return true; - } - - virtual bool visitUnary(TVisit /* visit */, TIntermUnary* node) - { - delete node; - - return true; - } - - virtual bool visitAggregate(TVisit /* visit*/ , TIntermAggregate* node) - { - delete node; - - return true; - } - - virtual bool visitSelection(TVisit /* visit*/ , TIntermSelection* node) - { - delete node; - - return true; - } - - virtual bool visitSwitch(TVisit /* visit*/ , TIntermSwitch* node) - { - delete node; - - return true; - } - - virtual void visitConstantUnion(TIntermConstantUnion* node) - { - delete node; - } - - virtual bool visitLoop(TVisit /* visit*/ , TIntermLoop* node) - { - delete node; - - return true; - } - - virtual bool visitBranch(TVisit /* visit*/ , TIntermBranch* node) - { - delete node; - - return true; - } -}; - -// -// Entry point. -// -void RemoveAllTreeNodes(TIntermNode* root) -{ - TRemoveTraverser it; - - root->traverse(&it); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/RemoveTree.h b/deps/glslang/glslang/MachineIndependent/RemoveTree.h deleted file mode 100644 index 1ed01562..00000000 --- a/deps/glslang/glslang/MachineIndependent/RemoveTree.h +++ /dev/null @@ -1,41 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#pragma once - -namespace glslang { - -void RemoveAllTreeNodes(TIntermNode*); - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/Scan.cpp b/deps/glslang/glslang/MachineIndependent/Scan.cpp deleted file mode 100644 index 25122eec..00000000 --- a/deps/glslang/glslang/MachineIndependent/Scan.cpp +++ /dev/null @@ -1,1439 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// GLSL scanning, leveraging the scanning done by the preprocessor. -// - -#include -#include -#include - -#include "../Include/Types.h" -#include "SymbolTable.h" -#include "ParseHelper.h" -#include "glslang_tab.cpp.h" -#include "ScanContext.h" -#include "Scan.h" - -// preprocessor includes -#include "preprocessor/PpContext.h" -#include "preprocessor/PpTokens.h" - -// Required to avoid missing prototype warnings for some compilers -int yylex(YYSTYPE*, glslang::TParseContext&); - -namespace glslang { - -// read past any white space -void TInputScanner::consumeWhiteSpace(bool& foundNonSpaceTab) -{ - int c = peek(); // don't accidentally consume anything other than whitespace - while (c == ' ' || c == '\t' || c == '\r' || c == '\n') { - if (c == '\r' || c == '\n') - foundNonSpaceTab = true; - get(); - c = peek(); - } -} - -// return true if a comment was actually consumed -bool TInputScanner::consumeComment() -{ - if (peek() != '/') - return false; - - get(); // consume the '/' - int c = peek(); - if (c == '/') { - - // a '//' style comment - get(); // consume the second '/' - c = get(); - do { - while (c != EndOfInput && c != '\\' && c != '\r' && c != '\n') - c = get(); - - if (c == EndOfInput || c == '\r' || c == '\n') { - while (c == '\r' || c == '\n') - c = get(); - - // we reached the end of the comment - break; - } else { - // it's a '\', so we need to keep going, after skipping what's escaped - - // read the skipped character - c = get(); - - // if it's a two-character newline, skip both characters - if (c == '\r' && peek() == '\n') - get(); - c = get(); - } - } while (true); - - // put back the last non-comment character - if (c != EndOfInput) - unget(); - - return true; - } else if (c == '*') { - - // a '/*' style comment - get(); // consume the '*' - c = get(); - do { - while (c != EndOfInput && c != '*') - c = get(); - if (c == '*') { - c = get(); - if (c == '/') - break; // end of comment - // not end of comment - } else // end of input - break; - } while (true); - - return true; - } else { - // it's not a comment, put the '/' back - unget(); - - return false; - } -} - -// skip whitespace, then skip a comment, rinse, repeat -void TInputScanner::consumeWhitespaceComment(bool& foundNonSpaceTab) -{ - do { - consumeWhiteSpace(foundNonSpaceTab); - - // if not starting a comment now, then done - int c = peek(); - if (c != '/' || c == EndOfInput) - return; - - // skip potential comment - foundNonSpaceTab = true; - if (! consumeComment()) - return; - - } while (true); -} - -// Returns true if there was non-white space (e.g., a comment, newline) before the #version -// or no #version was found; otherwise, returns false. There is no error case, it always -// succeeds, but will leave version == 0 if no #version was found. -// -// Sets notFirstToken based on whether tokens (beyond white space and comments) -// appeared before the #version. -// -// N.B. does not attempt to leave input in any particular known state. The assumption -// is that scanning will start anew, following the rules for the chosen version/profile, -// and with a corresponding parsing context. -// -bool TInputScanner::scanVersion(int& version, EProfile& profile, bool& notFirstToken) -{ - // This function doesn't have to get all the semantics correct, - // just find the #version if there is a correct one present. - // The preprocessor will have the responsibility of getting all the semantics right. - - bool versionNotFirst = false; // means not first WRT comments and white space, nothing more - notFirstToken = false; // means not first WRT to real tokens - version = 0; // means not found - profile = ENoProfile; - - bool foundNonSpaceTab = false; - bool lookingInMiddle = false; - int c; - do { - if (lookingInMiddle) { - notFirstToken = true; - // make forward progress by finishing off the current line plus extra new lines - if (peek() == '\n' || peek() == '\r') { - while (peek() == '\n' || peek() == '\r') - get(); - } else - do { - c = get(); - } while (c != EndOfInput && c != '\n' && c != '\r'); - while (peek() == '\n' || peek() == '\r') - get(); - if (peek() == EndOfInput) - return true; - } - lookingInMiddle = true; - - // Nominal start, skipping the desktop allowed comments and white space, but tracking if - // something else was found for ES: - consumeWhitespaceComment(foundNonSpaceTab); - if (foundNonSpaceTab) - versionNotFirst = true; - - // "#" - if (get() != '#') { - versionNotFirst = true; - continue; - } - - // whitespace - do { - c = get(); - } while (c == ' ' || c == '\t'); - - // "version" - if ( c != 'v' || - get() != 'e' || - get() != 'r' || - get() != 's' || - get() != 'i' || - get() != 'o' || - get() != 'n') { - versionNotFirst = true; - continue; - } - - // whitespace - do { - c = get(); - } while (c == ' ' || c == '\t'); - - // version number - while (c >= '0' && c <= '9') { - version = 10 * version + (c - '0'); - c = get(); - } - if (version == 0) { - versionNotFirst = true; - continue; - } - - // whitespace - while (c == ' ' || c == '\t') - c = get(); - - // profile - const int maxProfileLength = 13; // not including any 0 - char profileString[maxProfileLength]; - int profileLength; - for (profileLength = 0; profileLength < maxProfileLength; ++profileLength) { - if (c == EndOfInput || c == ' ' || c == '\t' || c == '\n' || c == '\r') - break; - profileString[profileLength] = (char)c; - c = get(); - } - if (c != EndOfInput && c != ' ' && c != '\t' && c != '\n' && c != '\r') { - versionNotFirst = true; - continue; - } - - if (profileLength == 2 && strncmp(profileString, "es", profileLength) == 0) - profile = EEsProfile; - else if (profileLength == 4 && strncmp(profileString, "core", profileLength) == 0) - profile = ECoreProfile; - else if (profileLength == 13 && strncmp(profileString, "compatibility", profileLength) == 0) - profile = ECompatibilityProfile; - - return versionNotFirst; - } while (true); -} - -// Fill this in when doing glslang-level scanning, to hand back to the parser. -class TParserToken { -public: - explicit TParserToken(YYSTYPE& b) : sType(b) { } - - YYSTYPE& sType; -protected: - TParserToken(TParserToken&); - TParserToken& operator=(TParserToken&); -}; - -} // end namespace glslang - -// This is the function the glslang parser (i.e., bison) calls to get its next token -int yylex(YYSTYPE* glslangTokenDesc, glslang::TParseContext& parseContext) -{ - glslang::TParserToken token(*glslangTokenDesc); - - return parseContext.getScanContext()->tokenize(parseContext.getPpContext(), token); -} - -namespace { - -struct str_eq -{ - bool operator()(const char* lhs, const char* rhs) const - { - return strcmp(lhs, rhs) == 0; - } -}; - -struct str_hash -{ - size_t operator()(const char* str) const - { - // djb2 - unsigned long hash = 5381; - int c; - - while ((c = *str++) != 0) - hash = ((hash << 5) + hash) + c; - - return hash; - } -}; - -// A single global usable by all threads, by all versions, by all languages. -// After a single process-level initialization, this is read only and thread safe -std::unordered_map* KeywordMap = nullptr; -std::unordered_set* ReservedSet = nullptr; - -}; - -namespace glslang { - -void TScanContext::fillInKeywordMap() -{ - if (KeywordMap != nullptr) { - // this is really an error, as this should called only once per process - // but, the only risk is if two threads called simultaneously - return; - } - KeywordMap = new std::unordered_map; - - (*KeywordMap)["const"] = CONST; - (*KeywordMap)["uniform"] = UNIFORM; - (*KeywordMap)["in"] = IN; - (*KeywordMap)["out"] = OUT; - (*KeywordMap)["inout"] = INOUT; - (*KeywordMap)["struct"] = STRUCT; - (*KeywordMap)["break"] = BREAK; - (*KeywordMap)["continue"] = CONTINUE; - (*KeywordMap)["do"] = DO; - (*KeywordMap)["for"] = FOR; - (*KeywordMap)["while"] = WHILE; - (*KeywordMap)["switch"] = SWITCH; - (*KeywordMap)["case"] = CASE; - (*KeywordMap)["default"] = DEFAULT; - (*KeywordMap)["if"] = IF; - (*KeywordMap)["else"] = ELSE; - (*KeywordMap)["discard"] = DISCARD; - (*KeywordMap)["return"] = RETURN; - (*KeywordMap)["void"] = VOID; - (*KeywordMap)["bool"] = BOOL; - (*KeywordMap)["float"] = FLOAT; - (*KeywordMap)["int"] = INT; - (*KeywordMap)["bvec2"] = BVEC2; - (*KeywordMap)["bvec3"] = BVEC3; - (*KeywordMap)["bvec4"] = BVEC4; - (*KeywordMap)["vec2"] = VEC2; - (*KeywordMap)["vec3"] = VEC3; - (*KeywordMap)["vec4"] = VEC4; - (*KeywordMap)["ivec2"] = IVEC2; - (*KeywordMap)["ivec3"] = IVEC3; - (*KeywordMap)["ivec4"] = IVEC4; - (*KeywordMap)["mat2"] = MAT2; - (*KeywordMap)["mat3"] = MAT3; - (*KeywordMap)["mat4"] = MAT4; - (*KeywordMap)["true"] = BOOLCONSTANT; - (*KeywordMap)["false"] = BOOLCONSTANT; - (*KeywordMap)["attribute"] = ATTRIBUTE; - (*KeywordMap)["varying"] = VARYING; - (*KeywordMap)["buffer"] = BUFFER; - (*KeywordMap)["coherent"] = COHERENT; - (*KeywordMap)["restrict"] = RESTRICT; - (*KeywordMap)["readonly"] = READONLY; - (*KeywordMap)["writeonly"] = WRITEONLY; - (*KeywordMap)["atomic_uint"] = ATOMIC_UINT; - (*KeywordMap)["volatile"] = VOLATILE; - (*KeywordMap)["layout"] = LAYOUT; - (*KeywordMap)["shared"] = SHARED; - (*KeywordMap)["patch"] = PATCH; - (*KeywordMap)["sample"] = SAMPLE; - (*KeywordMap)["subroutine"] = SUBROUTINE; - (*KeywordMap)["highp"] = HIGH_PRECISION; - (*KeywordMap)["mediump"] = MEDIUM_PRECISION; - (*KeywordMap)["lowp"] = LOW_PRECISION; - (*KeywordMap)["precision"] = PRECISION; - (*KeywordMap)["mat2x2"] = MAT2X2; - (*KeywordMap)["mat2x3"] = MAT2X3; - (*KeywordMap)["mat2x4"] = MAT2X4; - (*KeywordMap)["mat3x2"] = MAT3X2; - (*KeywordMap)["mat3x3"] = MAT3X3; - (*KeywordMap)["mat3x4"] = MAT3X4; - (*KeywordMap)["mat4x2"] = MAT4X2; - (*KeywordMap)["mat4x3"] = MAT4X3; - (*KeywordMap)["mat4x4"] = MAT4X4; - (*KeywordMap)["dmat2"] = DMAT2; - (*KeywordMap)["dmat3"] = DMAT3; - (*KeywordMap)["dmat4"] = DMAT4; - (*KeywordMap)["dmat2x2"] = DMAT2X2; - (*KeywordMap)["dmat2x3"] = DMAT2X3; - (*KeywordMap)["dmat2x4"] = DMAT2X4; - (*KeywordMap)["dmat3x2"] = DMAT3X2; - (*KeywordMap)["dmat3x3"] = DMAT3X3; - (*KeywordMap)["dmat3x4"] = DMAT3X4; - (*KeywordMap)["dmat4x2"] = DMAT4X2; - (*KeywordMap)["dmat4x3"] = DMAT4X3; - (*KeywordMap)["dmat4x4"] = DMAT4X4; - (*KeywordMap)["image1D"] = IMAGE1D; - (*KeywordMap)["iimage1D"] = IIMAGE1D; - (*KeywordMap)["uimage1D"] = UIMAGE1D; - (*KeywordMap)["image2D"] = IMAGE2D; - (*KeywordMap)["iimage2D"] = IIMAGE2D; - (*KeywordMap)["uimage2D"] = UIMAGE2D; - (*KeywordMap)["image3D"] = IMAGE3D; - (*KeywordMap)["iimage3D"] = IIMAGE3D; - (*KeywordMap)["uimage3D"] = UIMAGE3D; - (*KeywordMap)["image2DRect"] = IMAGE2DRECT; - (*KeywordMap)["iimage2DRect"] = IIMAGE2DRECT; - (*KeywordMap)["uimage2DRect"] = UIMAGE2DRECT; - (*KeywordMap)["imageCube"] = IMAGECUBE; - (*KeywordMap)["iimageCube"] = IIMAGECUBE; - (*KeywordMap)["uimageCube"] = UIMAGECUBE; - (*KeywordMap)["imageBuffer"] = IMAGEBUFFER; - (*KeywordMap)["iimageBuffer"] = IIMAGEBUFFER; - (*KeywordMap)["uimageBuffer"] = UIMAGEBUFFER; - (*KeywordMap)["image1DArray"] = IMAGE1DARRAY; - (*KeywordMap)["iimage1DArray"] = IIMAGE1DARRAY; - (*KeywordMap)["uimage1DArray"] = UIMAGE1DARRAY; - (*KeywordMap)["image2DArray"] = IMAGE2DARRAY; - (*KeywordMap)["iimage2DArray"] = IIMAGE2DARRAY; - (*KeywordMap)["uimage2DArray"] = UIMAGE2DARRAY; - (*KeywordMap)["imageCubeArray"] = IMAGECUBEARRAY; - (*KeywordMap)["iimageCubeArray"] = IIMAGECUBEARRAY; - (*KeywordMap)["uimageCubeArray"] = UIMAGECUBEARRAY; - (*KeywordMap)["image2DMS"] = IMAGE2DMS; - (*KeywordMap)["iimage2DMS"] = IIMAGE2DMS; - (*KeywordMap)["uimage2DMS"] = UIMAGE2DMS; - (*KeywordMap)["image2DMSArray"] = IMAGE2DMSARRAY; - (*KeywordMap)["iimage2DMSArray"] = IIMAGE2DMSARRAY; - (*KeywordMap)["uimage2DMSArray"] = UIMAGE2DMSARRAY; - (*KeywordMap)["double"] = DOUBLE; - (*KeywordMap)["dvec2"] = DVEC2; - (*KeywordMap)["dvec3"] = DVEC3; - (*KeywordMap)["dvec4"] = DVEC4; - (*KeywordMap)["uint"] = UINT; - (*KeywordMap)["uvec2"] = UVEC2; - (*KeywordMap)["uvec3"] = UVEC3; - (*KeywordMap)["uvec4"] = UVEC4; - - (*KeywordMap)["int64_t"] = INT64_T; - (*KeywordMap)["uint64_t"] = UINT64_T; - (*KeywordMap)["i64vec2"] = I64VEC2; - (*KeywordMap)["i64vec3"] = I64VEC3; - (*KeywordMap)["i64vec4"] = I64VEC4; - (*KeywordMap)["u64vec2"] = U64VEC2; - (*KeywordMap)["u64vec3"] = U64VEC3; - (*KeywordMap)["u64vec4"] = U64VEC4; - -#ifdef AMD_EXTENSIONS - (*KeywordMap)["int16_t"] = INT16_T; - (*KeywordMap)["uint16_t"] = UINT16_T; - (*KeywordMap)["i16vec2"] = I16VEC2; - (*KeywordMap)["i16vec3"] = I16VEC3; - (*KeywordMap)["i16vec4"] = I16VEC4; - (*KeywordMap)["u16vec2"] = U16VEC2; - (*KeywordMap)["u16vec3"] = U16VEC3; - (*KeywordMap)["u16vec4"] = U16VEC4; - - (*KeywordMap)["float16_t"] = FLOAT16_T; - (*KeywordMap)["f16vec2"] = F16VEC2; - (*KeywordMap)["f16vec3"] = F16VEC3; - (*KeywordMap)["f16vec4"] = F16VEC4; - (*KeywordMap)["f16mat2"] = F16MAT2; - (*KeywordMap)["f16mat3"] = F16MAT3; - (*KeywordMap)["f16mat4"] = F16MAT4; - (*KeywordMap)["f16mat2x2"] = F16MAT2X2; - (*KeywordMap)["f16mat2x3"] = F16MAT2X3; - (*KeywordMap)["f16mat2x4"] = F16MAT2X4; - (*KeywordMap)["f16mat3x2"] = F16MAT3X2; - (*KeywordMap)["f16mat3x3"] = F16MAT3X3; - (*KeywordMap)["f16mat3x4"] = F16MAT3X4; - (*KeywordMap)["f16mat4x2"] = F16MAT4X2; - (*KeywordMap)["f16mat4x3"] = F16MAT4X3; - (*KeywordMap)["f16mat4x4"] = F16MAT4X4; -#endif - - (*KeywordMap)["sampler2D"] = SAMPLER2D; - (*KeywordMap)["samplerCube"] = SAMPLERCUBE; - (*KeywordMap)["samplerCubeArray"] = SAMPLERCUBEARRAY; - (*KeywordMap)["samplerCubeArrayShadow"] = SAMPLERCUBEARRAYSHADOW; - (*KeywordMap)["isamplerCubeArray"] = ISAMPLERCUBEARRAY; - (*KeywordMap)["usamplerCubeArray"] = USAMPLERCUBEARRAY; - (*KeywordMap)["sampler1DArrayShadow"] = SAMPLER1DARRAYSHADOW; - (*KeywordMap)["isampler1DArray"] = ISAMPLER1DARRAY; - (*KeywordMap)["usampler1D"] = USAMPLER1D; - (*KeywordMap)["isampler1D"] = ISAMPLER1D; - (*KeywordMap)["usampler1DArray"] = USAMPLER1DARRAY; - (*KeywordMap)["samplerBuffer"] = SAMPLERBUFFER; - (*KeywordMap)["samplerCubeShadow"] = SAMPLERCUBESHADOW; - (*KeywordMap)["sampler2DArray"] = SAMPLER2DARRAY; - (*KeywordMap)["sampler2DArrayShadow"] = SAMPLER2DARRAYSHADOW; - (*KeywordMap)["isampler2D"] = ISAMPLER2D; - (*KeywordMap)["isampler3D"] = ISAMPLER3D; - (*KeywordMap)["isamplerCube"] = ISAMPLERCUBE; - (*KeywordMap)["isampler2DArray"] = ISAMPLER2DARRAY; - (*KeywordMap)["usampler2D"] = USAMPLER2D; - (*KeywordMap)["usampler3D"] = USAMPLER3D; - (*KeywordMap)["usamplerCube"] = USAMPLERCUBE; - (*KeywordMap)["usampler2DArray"] = USAMPLER2DARRAY; - (*KeywordMap)["isampler2DRect"] = ISAMPLER2DRECT; - (*KeywordMap)["usampler2DRect"] = USAMPLER2DRECT; - (*KeywordMap)["isamplerBuffer"] = ISAMPLERBUFFER; - (*KeywordMap)["usamplerBuffer"] = USAMPLERBUFFER; - (*KeywordMap)["sampler2DMS"] = SAMPLER2DMS; - (*KeywordMap)["isampler2DMS"] = ISAMPLER2DMS; - (*KeywordMap)["usampler2DMS"] = USAMPLER2DMS; - (*KeywordMap)["sampler2DMSArray"] = SAMPLER2DMSARRAY; - (*KeywordMap)["isampler2DMSArray"] = ISAMPLER2DMSARRAY; - (*KeywordMap)["usampler2DMSArray"] = USAMPLER2DMSARRAY; - (*KeywordMap)["sampler1D"] = SAMPLER1D; - (*KeywordMap)["sampler1DShadow"] = SAMPLER1DSHADOW; - (*KeywordMap)["sampler3D"] = SAMPLER3D; - (*KeywordMap)["sampler2DShadow"] = SAMPLER2DSHADOW; - (*KeywordMap)["sampler2DRect"] = SAMPLER2DRECT; - (*KeywordMap)["sampler2DRectShadow"] = SAMPLER2DRECTSHADOW; - (*KeywordMap)["sampler1DArray"] = SAMPLER1DARRAY; - - (*KeywordMap)["samplerExternalOES"] = SAMPLEREXTERNALOES; // GL_OES_EGL_image_external - - (*KeywordMap)["sampler"] = SAMPLER; - (*KeywordMap)["samplerShadow"] = SAMPLERSHADOW; - - (*KeywordMap)["texture2D"] = TEXTURE2D; - (*KeywordMap)["textureCube"] = TEXTURECUBE; - (*KeywordMap)["textureCubeArray"] = TEXTURECUBEARRAY; - (*KeywordMap)["itextureCubeArray"] = ITEXTURECUBEARRAY; - (*KeywordMap)["utextureCubeArray"] = UTEXTURECUBEARRAY; - (*KeywordMap)["itexture1DArray"] = ITEXTURE1DARRAY; - (*KeywordMap)["utexture1D"] = UTEXTURE1D; - (*KeywordMap)["itexture1D"] = ITEXTURE1D; - (*KeywordMap)["utexture1DArray"] = UTEXTURE1DARRAY; - (*KeywordMap)["textureBuffer"] = TEXTUREBUFFER; - (*KeywordMap)["texture2DArray"] = TEXTURE2DARRAY; - (*KeywordMap)["itexture2D"] = ITEXTURE2D; - (*KeywordMap)["itexture3D"] = ITEXTURE3D; - (*KeywordMap)["itextureCube"] = ITEXTURECUBE; - (*KeywordMap)["itexture2DArray"] = ITEXTURE2DARRAY; - (*KeywordMap)["utexture2D"] = UTEXTURE2D; - (*KeywordMap)["utexture3D"] = UTEXTURE3D; - (*KeywordMap)["utextureCube"] = UTEXTURECUBE; - (*KeywordMap)["utexture2DArray"] = UTEXTURE2DARRAY; - (*KeywordMap)["itexture2DRect"] = ITEXTURE2DRECT; - (*KeywordMap)["utexture2DRect"] = UTEXTURE2DRECT; - (*KeywordMap)["itextureBuffer"] = ITEXTUREBUFFER; - (*KeywordMap)["utextureBuffer"] = UTEXTUREBUFFER; - (*KeywordMap)["texture2DMS"] = TEXTURE2DMS; - (*KeywordMap)["itexture2DMS"] = ITEXTURE2DMS; - (*KeywordMap)["utexture2DMS"] = UTEXTURE2DMS; - (*KeywordMap)["texture2DMSArray"] = TEXTURE2DMSARRAY; - (*KeywordMap)["itexture2DMSArray"] = ITEXTURE2DMSARRAY; - (*KeywordMap)["utexture2DMSArray"] = UTEXTURE2DMSARRAY; - (*KeywordMap)["texture1D"] = TEXTURE1D; - (*KeywordMap)["texture3D"] = TEXTURE3D; - (*KeywordMap)["texture2DRect"] = TEXTURE2DRECT; - (*KeywordMap)["texture1DArray"] = TEXTURE1DARRAY; - - (*KeywordMap)["subpassInput"] = SUBPASSINPUT; - (*KeywordMap)["subpassInputMS"] = SUBPASSINPUTMS; - (*KeywordMap)["isubpassInput"] = ISUBPASSINPUT; - (*KeywordMap)["isubpassInputMS"] = ISUBPASSINPUTMS; - (*KeywordMap)["usubpassInput"] = USUBPASSINPUT; - (*KeywordMap)["usubpassInputMS"] = USUBPASSINPUTMS; - - (*KeywordMap)["noperspective"] = NOPERSPECTIVE; - (*KeywordMap)["smooth"] = SMOOTH; - (*KeywordMap)["flat"] = FLAT; -#ifdef AMD_EXTENSIONS - (*KeywordMap)["__explicitInterpAMD"] = __EXPLICITINTERPAMD; -#endif - (*KeywordMap)["centroid"] = CENTROID; - (*KeywordMap)["precise"] = PRECISE; - (*KeywordMap)["invariant"] = INVARIANT; - (*KeywordMap)["packed"] = PACKED; - (*KeywordMap)["resource"] = RESOURCE; - (*KeywordMap)["superp"] = SUPERP; - - ReservedSet = new std::unordered_set; - - ReservedSet->insert("common"); - ReservedSet->insert("partition"); - ReservedSet->insert("active"); - ReservedSet->insert("asm"); - ReservedSet->insert("class"); - ReservedSet->insert("union"); - ReservedSet->insert("enum"); - ReservedSet->insert("typedef"); - ReservedSet->insert("template"); - ReservedSet->insert("this"); - ReservedSet->insert("goto"); - ReservedSet->insert("inline"); - ReservedSet->insert("noinline"); - ReservedSet->insert("public"); - ReservedSet->insert("static"); - ReservedSet->insert("extern"); - ReservedSet->insert("external"); - ReservedSet->insert("interface"); - ReservedSet->insert("long"); - ReservedSet->insert("short"); - ReservedSet->insert("half"); - ReservedSet->insert("fixed"); - ReservedSet->insert("unsigned"); - ReservedSet->insert("input"); - ReservedSet->insert("output"); - ReservedSet->insert("hvec2"); - ReservedSet->insert("hvec3"); - ReservedSet->insert("hvec4"); - ReservedSet->insert("fvec2"); - ReservedSet->insert("fvec3"); - ReservedSet->insert("fvec4"); - ReservedSet->insert("sampler3DRect"); - ReservedSet->insert("filter"); - ReservedSet->insert("sizeof"); - ReservedSet->insert("cast"); - ReservedSet->insert("namespace"); - ReservedSet->insert("using"); -} - -void TScanContext::deleteKeywordMap() -{ - delete KeywordMap; - KeywordMap = nullptr; - delete ReservedSet; - ReservedSet = nullptr; -} - -// Called by yylex to get the next token. -// Returning 0 implies end of input. -int TScanContext::tokenize(TPpContext* pp, TParserToken& token) -{ - do { - parserToken = &token; - TPpToken ppToken; - int token = pp->tokenize(ppToken); - if (token == EndOfInput) - return 0; - - tokenText = ppToken.name; - loc = ppToken.loc; - parserToken->sType.lex.loc = loc; - switch (token) { - case ';': afterType = false; return SEMICOLON; - case ',': afterType = false; return COMMA; - case ':': return COLON; - case '=': afterType = false; return EQUAL; - case '(': afterType = false; return LEFT_PAREN; - case ')': afterType = false; return RIGHT_PAREN; - case '.': field = true; return DOT; - case '!': return BANG; - case '-': return DASH; - case '~': return TILDE; - case '+': return PLUS; - case '*': return STAR; - case '/': return SLASH; - case '%': return PERCENT; - case '<': return LEFT_ANGLE; - case '>': return RIGHT_ANGLE; - case '|': return VERTICAL_BAR; - case '^': return CARET; - case '&': return AMPERSAND; - case '?': return QUESTION; - case '[': return LEFT_BRACKET; - case ']': return RIGHT_BRACKET; - case '{': return LEFT_BRACE; - case '}': return RIGHT_BRACE; - case '\\': - parseContext.error(loc, "illegal use of escape character", "\\", ""); - break; - - case PPAtomAddAssign: return ADD_ASSIGN; - case PPAtomSubAssign: return SUB_ASSIGN; - case PPAtomMulAssign: return MUL_ASSIGN; - case PPAtomDivAssign: return DIV_ASSIGN; - case PPAtomModAssign: return MOD_ASSIGN; - - case PpAtomRight: return RIGHT_OP; - case PpAtomLeft: return LEFT_OP; - - case PpAtomRightAssign: return RIGHT_ASSIGN; - case PpAtomLeftAssign: return LEFT_ASSIGN; - case PpAtomAndAssign: return AND_ASSIGN; - case PpAtomOrAssign: return OR_ASSIGN; - case PpAtomXorAssign: return XOR_ASSIGN; - - case PpAtomAnd: return AND_OP; - case PpAtomOr: return OR_OP; - case PpAtomXor: return XOR_OP; - - case PpAtomEQ: return EQ_OP; - case PpAtomGE: return GE_OP; - case PpAtomNE: return NE_OP; - case PpAtomLE: return LE_OP; - - case PpAtomDecrement: return DEC_OP; - case PpAtomIncrement: return INC_OP; - - case PpAtomColonColon: - parseContext.error(loc, "not supported", "::", ""); - break; - - case PpAtomConstInt: parserToken->sType.lex.i = ppToken.ival; return INTCONSTANT; - case PpAtomConstUint: parserToken->sType.lex.i = ppToken.ival; return UINTCONSTANT; - case PpAtomConstInt64: parserToken->sType.lex.i64 = ppToken.i64val; return INT64CONSTANT; - case PpAtomConstUint64: parserToken->sType.lex.i64 = ppToken.i64val; return UINT64CONSTANT; -#ifdef AMD_EXTENSIONS - case PpAtomConstInt16: parserToken->sType.lex.i = ppToken.ival; return INT16CONSTANT; - case PpAtomConstUint16: parserToken->sType.lex.i = ppToken.ival; return UINT16CONSTANT; -#endif - case PpAtomConstFloat: parserToken->sType.lex.d = ppToken.dval; return FLOATCONSTANT; - case PpAtomConstDouble: parserToken->sType.lex.d = ppToken.dval; return DOUBLECONSTANT; -#ifdef AMD_EXTENSIONS - case PpAtomConstFloat16: parserToken->sType.lex.d = ppToken.dval; return FLOAT16CONSTANT; -#endif - case PpAtomIdentifier: - { - int token = tokenizeIdentifier(); - field = false; - return token; - } - - case EndOfInput: return 0; - - default: - char buf[2]; - buf[0] = (char)token; - buf[1] = 0; - parseContext.error(loc, "unexpected token", buf, ""); - break; - } - } while (true); -} - -int TScanContext::tokenizeIdentifier() -{ - if (ReservedSet->find(tokenText) != ReservedSet->end()) - return reservedWord(); - - auto it = KeywordMap->find(tokenText); - if (it == KeywordMap->end()) { - // Should have an identifier of some sort - return identifierOrType(); - } - keyword = it->second; - - switch (keyword) { - case CONST: - case UNIFORM: - case IN: - case OUT: - case INOUT: - case STRUCT: - case BREAK: - case CONTINUE: - case DO: - case FOR: - case WHILE: - case IF: - case ELSE: - case DISCARD: - case RETURN: - case CASE: - return keyword; - - case SWITCH: - case DEFAULT: - if ((parseContext.profile == EEsProfile && parseContext.version < 300) || - (parseContext.profile != EEsProfile && parseContext.version < 130)) - reservedWord(); - return keyword; - - case VOID: - case BOOL: - case FLOAT: - case INT: - case BVEC2: - case BVEC3: - case BVEC4: - case VEC2: - case VEC3: - case VEC4: - case IVEC2: - case IVEC3: - case IVEC4: - case MAT2: - case MAT3: - case MAT4: - case SAMPLER2D: - case SAMPLERCUBE: - afterType = true; - return keyword; - - case BOOLCONSTANT: - if (strcmp("true", tokenText) == 0) - parserToken->sType.lex.b = true; - else - parserToken->sType.lex.b = false; - return keyword; - - case ATTRIBUTE: - case VARYING: - if (parseContext.profile == EEsProfile && parseContext.version >= 300) - reservedWord(); - return keyword; - - case BUFFER: - if ((parseContext.profile == EEsProfile && parseContext.version < 310) || - (parseContext.profile != EEsProfile && parseContext.version < 430)) - return identifierOrType(); - return keyword; - - case ATOMIC_UINT: - if ((parseContext.profile == EEsProfile && parseContext.version >= 310) || - parseContext.extensionTurnedOn(E_GL_ARB_shader_atomic_counters)) - return keyword; - return es30ReservedFromGLSL(420); - - case COHERENT: - case RESTRICT: - case READONLY: - case WRITEONLY: - if (parseContext.profile == EEsProfile && parseContext.version >= 310) - return keyword; - return es30ReservedFromGLSL(parseContext.extensionTurnedOn(E_GL_ARB_shader_image_load_store) ? 130 : 420); - - case VOLATILE: - if (parseContext.profile == EEsProfile && parseContext.version >= 310) - return keyword; - if (! parseContext.symbolTable.atBuiltInLevel() && (parseContext.profile == EEsProfile || (parseContext.version < 420 && ! parseContext.extensionTurnedOn(E_GL_ARB_shader_image_load_store)))) - reservedWord(); - return keyword; - - case LAYOUT: - { - const int numLayoutExts = 2; - const char* layoutExts[numLayoutExts] = { E_GL_ARB_shading_language_420pack, - E_GL_ARB_explicit_attrib_location }; - if ((parseContext.profile == EEsProfile && parseContext.version < 300) || - (parseContext.profile != EEsProfile && parseContext.version < 140 && - ! parseContext.extensionsTurnedOn(numLayoutExts, layoutExts))) - return identifierOrType(); - return keyword; - } - case SHARED: - if ((parseContext.profile == EEsProfile && parseContext.version < 300) || - (parseContext.profile != EEsProfile && parseContext.version < 140)) - return identifierOrType(); - return keyword; - - case PATCH: - if (parseContext.symbolTable.atBuiltInLevel() || - (parseContext.profile == EEsProfile && - (parseContext.version >= 320 || - parseContext.extensionsTurnedOn(Num_AEP_tessellation_shader, AEP_tessellation_shader))) || - (parseContext.profile != EEsProfile && parseContext.extensionTurnedOn(E_GL_ARB_tessellation_shader))) - return keyword; - - return es30ReservedFromGLSL(400); - - case SAMPLE: - if ((parseContext.profile == EEsProfile && parseContext.version >= 320) || - parseContext.extensionsTurnedOn(1, &E_GL_OES_shader_multisample_interpolation)) - return keyword; - return es30ReservedFromGLSL(400); - - case SUBROUTINE: - return es30ReservedFromGLSL(400); - - case HIGH_PRECISION: - case MEDIUM_PRECISION: - case LOW_PRECISION: - case PRECISION: - return precisionKeyword(); - - case MAT2X2: - case MAT2X3: - case MAT2X4: - case MAT3X2: - case MAT3X3: - case MAT3X4: - case MAT4X2: - case MAT4X3: - case MAT4X4: - return matNxM(); - - case DMAT2: - case DMAT3: - case DMAT4: - case DMAT2X2: - case DMAT2X3: - case DMAT2X4: - case DMAT3X2: - case DMAT3X3: - case DMAT3X4: - case DMAT4X2: - case DMAT4X3: - case DMAT4X4: - return dMat(); - - case IMAGE1D: - case IIMAGE1D: - case UIMAGE1D: - case IMAGE1DARRAY: - case IIMAGE1DARRAY: - case UIMAGE1DARRAY: - case IMAGE2DRECT: - case IIMAGE2DRECT: - case UIMAGE2DRECT: - afterType = true; - return firstGenerationImage(false); - - case IMAGEBUFFER: - case IIMAGEBUFFER: - case UIMAGEBUFFER: - afterType = true; - if ((parseContext.profile == EEsProfile && parseContext.version >= 320) || - parseContext.extensionsTurnedOn(Num_AEP_texture_buffer, AEP_texture_buffer)) - return keyword; - return firstGenerationImage(false); - - case IMAGE2D: - case IIMAGE2D: - case UIMAGE2D: - case IMAGE3D: - case IIMAGE3D: - case UIMAGE3D: - case IMAGECUBE: - case IIMAGECUBE: - case UIMAGECUBE: - case IMAGE2DARRAY: - case IIMAGE2DARRAY: - case UIMAGE2DARRAY: - afterType = true; - return firstGenerationImage(true); - - case IMAGECUBEARRAY: - case IIMAGECUBEARRAY: - case UIMAGECUBEARRAY: - afterType = true; - if ((parseContext.profile == EEsProfile && parseContext.version >= 320) || - parseContext.extensionsTurnedOn(Num_AEP_texture_cube_map_array, AEP_texture_cube_map_array)) - return keyword; - return secondGenerationImage(); - - case IMAGE2DMS: - case IIMAGE2DMS: - case UIMAGE2DMS: - case IMAGE2DMSARRAY: - case IIMAGE2DMSARRAY: - case UIMAGE2DMSARRAY: - afterType = true; - return secondGenerationImage(); - - case DOUBLE: - case DVEC2: - case DVEC3: - case DVEC4: - afterType = true; - if (parseContext.profile == EEsProfile || parseContext.version < 400) - reservedWord(); - return keyword; - - case INT64_T: - case UINT64_T: - case I64VEC2: - case I64VEC3: - case I64VEC4: - case U64VEC2: - case U64VEC3: - case U64VEC4: - afterType = true; - if (parseContext.symbolTable.atBuiltInLevel() || - (parseContext.extensionTurnedOn(E_GL_ARB_gpu_shader_int64) && - parseContext.profile != EEsProfile && parseContext.version >= 450)) - return keyword; - return identifierOrType(); - -#ifdef AMD_EXTENSIONS - case INT16_T: - case UINT16_T: - case I16VEC2: - case I16VEC3: - case I16VEC4: - case U16VEC2: - case U16VEC3: - case U16VEC4: - afterType = true; - if (parseContext.symbolTable.atBuiltInLevel() || - (parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_int16) && - parseContext.profile != EEsProfile && parseContext.version >= 450)) - return keyword; - return identifierOrType(); - - case FLOAT16_T: - case F16VEC2: - case F16VEC3: - case F16VEC4: - case F16MAT2: - case F16MAT3: - case F16MAT4: - case F16MAT2X2: - case F16MAT2X3: - case F16MAT2X4: - case F16MAT3X2: - case F16MAT3X3: - case F16MAT3X4: - case F16MAT4X2: - case F16MAT4X3: - case F16MAT4X4: - afterType = true; - if (parseContext.symbolTable.atBuiltInLevel() || - (parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_half_float) && - parseContext.profile != EEsProfile && parseContext.version >= 450)) - return keyword; - return identifierOrType(); -#endif - - case SAMPLERCUBEARRAY: - case SAMPLERCUBEARRAYSHADOW: - case ISAMPLERCUBEARRAY: - case USAMPLERCUBEARRAY: - afterType = true; - if ((parseContext.profile == EEsProfile && parseContext.version >= 320) || - parseContext.extensionsTurnedOn(Num_AEP_texture_cube_map_array, AEP_texture_cube_map_array)) - return keyword; - if (parseContext.profile == EEsProfile || (parseContext.version < 400 && ! parseContext.extensionTurnedOn(E_GL_ARB_texture_cube_map_array))) - reservedWord(); - return keyword; - - case ISAMPLER1D: - case ISAMPLER1DARRAY: - case SAMPLER1DARRAYSHADOW: - case USAMPLER1D: - case USAMPLER1DARRAY: - afterType = true; - return es30ReservedFromGLSL(130); - - case UINT: - case UVEC2: - case UVEC3: - case UVEC4: - case SAMPLERCUBESHADOW: - case SAMPLER2DARRAY: - case SAMPLER2DARRAYSHADOW: - case ISAMPLER2D: - case ISAMPLER3D: - case ISAMPLERCUBE: - case ISAMPLER2DARRAY: - case USAMPLER2D: - case USAMPLER3D: - case USAMPLERCUBE: - case USAMPLER2DARRAY: - afterType = true; - return nonreservedKeyword(300, 130); - - case ISAMPLER2DRECT: - case USAMPLER2DRECT: - afterType = true; - return es30ReservedFromGLSL(140); - - case SAMPLERBUFFER: - afterType = true; - if ((parseContext.profile == EEsProfile && parseContext.version >= 320) || - parseContext.extensionsTurnedOn(Num_AEP_texture_buffer, AEP_texture_buffer)) - return keyword; - return es30ReservedFromGLSL(130); - - case ISAMPLERBUFFER: - case USAMPLERBUFFER: - afterType = true; - if ((parseContext.profile == EEsProfile && parseContext.version >= 320) || - parseContext.extensionsTurnedOn(Num_AEP_texture_buffer, AEP_texture_buffer)) - return keyword; - return es30ReservedFromGLSL(140); - - case SAMPLER2DMS: - case ISAMPLER2DMS: - case USAMPLER2DMS: - afterType = true; - if (parseContext.profile == EEsProfile && parseContext.version >= 310) - return keyword; - return es30ReservedFromGLSL(150); - - case SAMPLER2DMSARRAY: - case ISAMPLER2DMSARRAY: - case USAMPLER2DMSARRAY: - afterType = true; - if ((parseContext.profile == EEsProfile && parseContext.version >= 320) || - parseContext.extensionsTurnedOn(1, &E_GL_OES_texture_storage_multisample_2d_array)) - return keyword; - return es30ReservedFromGLSL(150); - - case SAMPLER1D: - case SAMPLER1DSHADOW: - afterType = true; - if (parseContext.profile == EEsProfile) - reservedWord(); - return keyword; - - case SAMPLER3D: - afterType = true; - if (parseContext.profile == EEsProfile && parseContext.version < 300) { - if (! parseContext.extensionTurnedOn(E_GL_OES_texture_3D)) - reservedWord(); - } - return keyword; - - case SAMPLER2DSHADOW: - afterType = true; - if (parseContext.profile == EEsProfile && parseContext.version < 300) { - if (!parseContext.extensionTurnedOn(E_GL_EXT_shadow_samplers)) - reservedWord(); - } - return keyword; - - case SAMPLER2DRECT: - case SAMPLER2DRECTSHADOW: - afterType = true; - if (parseContext.profile == EEsProfile) - reservedWord(); - else if (parseContext.version < 140 && ! parseContext.symbolTable.atBuiltInLevel() && ! parseContext.extensionTurnedOn(E_GL_ARB_texture_rectangle)) { - if (parseContext.relaxedErrors()) - parseContext.requireExtensions(loc, 1, &E_GL_ARB_texture_rectangle, "texture-rectangle sampler keyword"); - else - reservedWord(); - } - return keyword; - - case SAMPLER1DARRAY: - afterType = true; - if (parseContext.profile == EEsProfile && parseContext.version == 300) - reservedWord(); - else if ((parseContext.profile == EEsProfile && parseContext.version < 300) || - (parseContext.profile != EEsProfile && parseContext.version < 130)) - return identifierOrType(); - return keyword; - - case SAMPLEREXTERNALOES: - afterType = true; - if (parseContext.symbolTable.atBuiltInLevel() || parseContext.extensionTurnedOn(E_GL_OES_EGL_image_external)) - return keyword; - return identifierOrType(); - - case TEXTURE2D: - case TEXTURECUBE: - case TEXTURECUBEARRAY: - case ITEXTURECUBEARRAY: - case UTEXTURECUBEARRAY: - case ITEXTURE1DARRAY: - case UTEXTURE1D: - case ITEXTURE1D: - case UTEXTURE1DARRAY: - case TEXTUREBUFFER: - case TEXTURE2DARRAY: - case ITEXTURE2D: - case ITEXTURE3D: - case ITEXTURECUBE: - case ITEXTURE2DARRAY: - case UTEXTURE2D: - case UTEXTURE3D: - case UTEXTURECUBE: - case UTEXTURE2DARRAY: - case ITEXTURE2DRECT: - case UTEXTURE2DRECT: - case ITEXTUREBUFFER: - case UTEXTUREBUFFER: - case TEXTURE2DMS: - case ITEXTURE2DMS: - case UTEXTURE2DMS: - case TEXTURE2DMSARRAY: - case ITEXTURE2DMSARRAY: - case UTEXTURE2DMSARRAY: - case TEXTURE1D: - case TEXTURE3D: - case TEXTURE2DRECT: - case TEXTURE1DARRAY: - case SAMPLER: - case SAMPLERSHADOW: - if (parseContext.spvVersion.vulkan >= 100) - return keyword; - else - return identifierOrType(); - - case SUBPASSINPUT: - case SUBPASSINPUTMS: - case ISUBPASSINPUT: - case ISUBPASSINPUTMS: - case USUBPASSINPUT: - case USUBPASSINPUTMS: - if (parseContext.spvVersion.vulkan >= 100) - return keyword; - else - return identifierOrType(); - - case NOPERSPECTIVE: - return es30ReservedFromGLSL(130); - - case SMOOTH: - if ((parseContext.profile == EEsProfile && parseContext.version < 300) || - (parseContext.profile != EEsProfile && parseContext.version < 130)) - return identifierOrType(); - return keyword; - -#ifdef AMD_EXTENSIONS - case __EXPLICITINTERPAMD: - if (parseContext.profile != EEsProfile && parseContext.version >= 450 && - parseContext.extensionTurnedOn(E_GL_AMD_shader_explicit_vertex_parameter)) - return keyword; - return identifierOrType(); -#endif - - case FLAT: - if (parseContext.profile == EEsProfile && parseContext.version < 300) - reservedWord(); - else if (parseContext.profile != EEsProfile && parseContext.version < 130) - return identifierOrType(); - return keyword; - - case CENTROID: - if (parseContext.version < 120) - return identifierOrType(); - return keyword; - - case PRECISE: - if ((parseContext.profile == EEsProfile && - (parseContext.version >= 320 || parseContext.extensionsTurnedOn(Num_AEP_gpu_shader5, AEP_gpu_shader5))) || - (parseContext.profile != EEsProfile && parseContext.version >= 400)) - return keyword; - if (parseContext.profile == EEsProfile && parseContext.version == 310) { - reservedWord(); - return keyword; - } - return identifierOrType(); - - case INVARIANT: - if (parseContext.profile != EEsProfile && parseContext.version < 120) - return identifierOrType(); - return keyword; - - case PACKED: - if ((parseContext.profile == EEsProfile && parseContext.version < 300) || - (parseContext.profile != EEsProfile && parseContext.version < 330)) - return reservedWord(); - return identifierOrType(); - - case RESOURCE: - { - bool reserved = (parseContext.profile == EEsProfile && parseContext.version >= 300) || - (parseContext.profile != EEsProfile && parseContext.version >= 420); - return identifierOrReserved(reserved); - } - case SUPERP: - { - bool reserved = parseContext.profile == EEsProfile || parseContext.version >= 130; - return identifierOrReserved(reserved); - } - - default: - parseContext.infoSink.info.message(EPrefixInternalError, "Unknown glslang keyword", loc); - return 0; - } -} - -int TScanContext::identifierOrType() -{ - parserToken->sType.lex.string = NewPoolTString(tokenText); - if (field) - return IDENTIFIER; - - parserToken->sType.lex.symbol = parseContext.symbolTable.find(*parserToken->sType.lex.string); - if (afterType == false && parserToken->sType.lex.symbol) { - if (const TVariable* variable = parserToken->sType.lex.symbol->getAsVariable()) { - if (variable->isUserType()) { - afterType = true; - - return TYPE_NAME; - } - } - } - - return IDENTIFIER; -} - -// Give an error for use of a reserved symbol. -// However, allow built-in declarations to use reserved words, to allow -// extension support before the extension is enabled. -int TScanContext::reservedWord() -{ - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.error(loc, "Reserved word.", tokenText, "", ""); - - return 0; -} - -int TScanContext::identifierOrReserved(bool reserved) -{ - if (reserved) { - reservedWord(); - - return 0; - } - - if (parseContext.forwardCompatible) - parseContext.warn(loc, "using future reserved keyword", tokenText, ""); - - return identifierOrType(); -} - -// For keywords that suddenly showed up on non-ES (not previously reserved) -// but then got reserved by ES 3.0. -int TScanContext::es30ReservedFromGLSL(int version) -{ - if (parseContext.symbolTable.atBuiltInLevel()) - return keyword; - - if ((parseContext.profile == EEsProfile && parseContext.version < 300) || - (parseContext.profile != EEsProfile && parseContext.version < version)) { - if (parseContext.forwardCompatible) - parseContext.warn(loc, "future reserved word in ES 300 and keyword in GLSL", tokenText, ""); - - return identifierOrType(); - } else if (parseContext.profile == EEsProfile && parseContext.version >= 300) - reservedWord(); - - return keyword; -} - -// For a keyword that was never reserved, until it suddenly -// showed up, both in an es version and a non-ES version. -int TScanContext::nonreservedKeyword(int esVersion, int nonEsVersion) -{ - if ((parseContext.profile == EEsProfile && parseContext.version < esVersion) || - (parseContext.profile != EEsProfile && parseContext.version < nonEsVersion)) { - if (parseContext.forwardCompatible) - parseContext.warn(loc, "using future keyword", tokenText, ""); - - return identifierOrType(); - } - - return keyword; -} - -int TScanContext::precisionKeyword() -{ - if (parseContext.profile == EEsProfile || parseContext.version >= 130) - return keyword; - - if (parseContext.forwardCompatible) - parseContext.warn(loc, "using ES precision qualifier keyword", tokenText, ""); - - return identifierOrType(); -} - -int TScanContext::matNxM() -{ - afterType = true; - - if (parseContext.version > 110) - return keyword; - - if (parseContext.forwardCompatible) - parseContext.warn(loc, "using future non-square matrix type keyword", tokenText, ""); - - return identifierOrType(); -} - -int TScanContext::dMat() -{ - afterType = true; - - if (parseContext.profile == EEsProfile && parseContext.version >= 300) { - reservedWord(); - - return keyword; - } - - if (parseContext.profile != EEsProfile && parseContext.version >= 400) - return keyword; - - if (parseContext.forwardCompatible) - parseContext.warn(loc, "using future type keyword", tokenText, ""); - - return identifierOrType(); -} - -int TScanContext::firstGenerationImage(bool inEs310) -{ - if (parseContext.symbolTable.atBuiltInLevel() || - (parseContext.profile != EEsProfile && (parseContext.version >= 420 || parseContext.extensionTurnedOn(E_GL_ARB_shader_image_load_store))) || - (inEs310 && parseContext.profile == EEsProfile && parseContext.version >= 310)) - return keyword; - - if ((parseContext.profile == EEsProfile && parseContext.version >= 300) || - (parseContext.profile != EEsProfile && parseContext.version >= 130)) { - reservedWord(); - - return keyword; - } - - if (parseContext.forwardCompatible) - parseContext.warn(loc, "using future type keyword", tokenText, ""); - - return identifierOrType(); -} - -int TScanContext::secondGenerationImage() -{ - if (parseContext.profile == EEsProfile && parseContext.version >= 310) { - reservedWord(); - return keyword; - } - - if (parseContext.symbolTable.atBuiltInLevel() || - (parseContext.profile != EEsProfile && - (parseContext.version >= 420 || parseContext.extensionTurnedOn(E_GL_ARB_shader_image_load_store)))) - return keyword; - - if (parseContext.forwardCompatible) - parseContext.warn(loc, "using future type keyword", tokenText, ""); - - return identifierOrType(); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/Scan.h b/deps/glslang/glslang/MachineIndependent/Scan.h deleted file mode 100644 index 9b8f2d45..00000000 --- a/deps/glslang/glslang/MachineIndependent/Scan.h +++ /dev/null @@ -1,275 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -#ifndef _GLSLANG_SCAN_INCLUDED_ -#define _GLSLANG_SCAN_INCLUDED_ - -#include "Versions.h" - -namespace glslang { - -// Use a global end-of-input character, so no translation is needed across -// layers of encapsulation. Characters are all 8 bit, and positive, so there is -// no aliasing of character 255 onto -1, for example. -const int EndOfInput = -1; - -// -// A character scanner that seamlessly, on read-only strings, reads across an -// array of strings without assuming null termination. -// -class TInputScanner { -public: - TInputScanner(int n, const char* const s[], size_t L[], const char* const* names = nullptr, int b = 0, int f = 0, bool single = false) : - numSources(n), - sources(reinterpret_cast(s)), // up to this point, common usage is "char*", but now we need positive 8-bit characters - lengths(L), currentSource(0), currentChar(0), stringBias(b), finale(f), singleLogical(single), endOfFileReached(false) - { - loc = new TSourceLoc[numSources]; - for (int i = 0; i < numSources; ++i) { - loc[i].init(); - } - if (names != nullptr) { - for (int i = 0; i < numSources; ++i) - loc[i].name = names[i]; - } - loc[currentSource].string = -stringBias; - loc[currentSource].line = 1; - loc[currentSource].column = 0; - logicalSourceLoc.string = 0; - logicalSourceLoc.line = 1; - logicalSourceLoc.column = 0; - logicalSourceLoc.name = loc[0].name; - } - - virtual ~TInputScanner() - { - delete [] loc; - } - - // retrieve the next character and advance one character - int get() - { - int ret = peek(); - if (ret == EndOfInput) - return ret; - ++loc[currentSource].column; - ++logicalSourceLoc.column; - if (ret == '\n') { - ++loc[currentSource].line; - ++logicalSourceLoc.line; - logicalSourceLoc.column = 0; - loc[currentSource].column = 0; - } - advance(); - - return ret; - } - - // retrieve the next character, no advance - int peek() - { - if (currentSource >= numSources) { - endOfFileReached = true; - return EndOfInput; - } - // Make sure we do not read off the end of a string. - // N.B. Sources can have a length of 0. - int sourceToRead = currentSource; - size_t charToRead = currentChar; - while(charToRead >= lengths[sourceToRead]) { - charToRead = 0; - sourceToRead += 1; - if (sourceToRead >= numSources) { - return EndOfInput; - } - } - - // Here, we care about making negative valued characters positive - return sources[sourceToRead][charToRead]; - } - - // go back one character - void unget() - { - // Do not roll back once we've reached the end of the file. - if (endOfFileReached) - return; - - if (currentChar > 0) { - --currentChar; - --loc[currentSource].column; - --logicalSourceLoc.column; - if (loc[currentSource].column < 0) { - // We've moved back past a new line. Find the - // previous newline (or start of the file) to compute - // the column count on the now current line. - size_t chIndex = currentChar; - while (chIndex > 0) { - if (sources[currentSource][chIndex] == '\n') { - break; - } - --chIndex; - } - logicalSourceLoc.column = (int)(currentChar - chIndex); - loc[currentSource].column = (int)(currentChar - chIndex); - } - } else { - do { - --currentSource; - } while (currentSource > 0 && lengths[currentSource] == 0); - if (lengths[currentSource] == 0) { - // set to 0 if we've backed up to the start of an empty string - currentChar = 0; - } else - currentChar = lengths[currentSource] - 1; - } - if (peek() == '\n') { - --loc[currentSource].line; - --logicalSourceLoc.line; - } - } - - // for #line override - void setLine(int newLine) - { - logicalSourceLoc.line = newLine; - loc[getLastValidSourceIndex()].line = newLine; - } - - // for #line override in filename based parsing - void setFile(const char* filename) - { - logicalSourceLoc.name = filename; - loc[getLastValidSourceIndex()].name = filename; - } - - void setFile(const char* filename, int i) - { - if (i == getLastValidSourceIndex()) { - logicalSourceLoc.name = filename; - } - loc[i].name = filename; - } - - void setString(int newString) - { - logicalSourceLoc.string = newString; - loc[getLastValidSourceIndex()].string = newString; - logicalSourceLoc.name = nullptr; - loc[getLastValidSourceIndex()].name = nullptr; - } - - // for #include content indentation - void setColumn(int col) - { - logicalSourceLoc.column = col; - loc[getLastValidSourceIndex()].column = col; - } - - void setEndOfInput() - { - endOfFileReached = true; - currentSource = numSources; - } - - bool atEndOfInput() const { return endOfFileReached; } - - const TSourceLoc& getSourceLoc() const - { - if (singleLogical) { - return logicalSourceLoc; - } else { - return loc[std::max(0, std::min(currentSource, numSources - finale - 1))]; - } - } - // Returns the index (starting from 0) of the most recent valid source string we are reading from. - int getLastValidSourceIndex() const { return std::min(currentSource, numSources - 1); } - - void consumeWhiteSpace(bool& foundNonSpaceTab); - bool consumeComment(); - void consumeWhitespaceComment(bool& foundNonSpaceTab); - bool scanVersion(int& version, EProfile& profile, bool& notFirstToken); - -protected: - - // advance one character - void advance() - { - ++currentChar; - if (currentChar >= lengths[currentSource]) { - ++currentSource; - if (currentSource < numSources) { - loc[currentSource].string = loc[currentSource - 1].string + 1; - loc[currentSource].line = 1; - loc[currentSource].column = 0; - } - while (currentSource < numSources && lengths[currentSource] == 0) { - ++currentSource; - if (currentSource < numSources) { - loc[currentSource].string = loc[currentSource - 1].string + 1; - loc[currentSource].line = 1; - loc[currentSource].column = 0; - } - } - currentChar = 0; - } - } - - int numSources; // number of strings in source - const unsigned char* const *sources; // array of strings; must be converted to positive values on use, to avoid aliasing with -1 as EndOfInput - const size_t *lengths; // length of each string - int currentSource; - size_t currentChar; - - // This is for reporting what string/line an error occurred on, and can be overridden by #line. - // It remembers the last state of each source string as it is left for the next one, so unget() - // can restore that state. - TSourceLoc* loc; // an array - - int stringBias; // the first string that is the user's string number 0 - int finale; // number of internal strings after user's last string - - TSourceLoc logicalSourceLoc; - bool singleLogical; // treats the strings as a single logical string. - // locations will be reported from the first string. - - // Set to true once peek() returns EndOfFile, so that we won't roll back - // once we've reached EndOfFile. - bool endOfFileReached; -}; - -} // end namespace glslang - -#endif // _GLSLANG_SCAN_INCLUDED_ diff --git a/deps/glslang/glslang/MachineIndependent/ScanContext.h b/deps/glslang/glslang/MachineIndependent/ScanContext.h deleted file mode 100644 index 608ae067..00000000 --- a/deps/glslang/glslang/MachineIndependent/ScanContext.h +++ /dev/null @@ -1,88 +0,0 @@ -// -// Copyright (C) 2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// This holds context specific to the GLSL scanner, which -// sits between the preprocessor scanner and parser. -// - -#pragma once - -#include "ParseHelper.h" - -namespace glslang { - -class TPpContext; -class TPpToken; -class TParserToken; - -class TScanContext { -public: - explicit TScanContext(TParseContextBase& pc) : parseContext(pc), afterType(false), field(false) { } - virtual ~TScanContext() { } - - static void fillInKeywordMap(); - static void deleteKeywordMap(); - - int tokenize(TPpContext*, TParserToken&); - -protected: - TScanContext(TScanContext&); - TScanContext& operator=(TScanContext&); - - int tokenizeIdentifier(); - int identifierOrType(); - int reservedWord(); - int identifierOrReserved(bool reserved); - int es30ReservedFromGLSL(int version); - int nonreservedKeyword(int esVersion, int nonEsVersion); - int precisionKeyword(); - int matNxM(); - int dMat(); - int firstGenerationImage(bool inEs310); - int secondGenerationImage(); - - TParseContextBase& parseContext; - bool afterType; // true if we've recognized a type, so can only be looking for an identifier - bool field; // true if we're on a field, right after a '.' - TSourceLoc loc; - TParserToken* parserToken; - TPpToken* ppToken; - - const char* tokenText; - int keyword; -}; - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/ShaderLang.cpp b/deps/glslang/glslang/MachineIndependent/ShaderLang.cpp deleted file mode 100644 index 2a904f45..00000000 --- a/deps/glslang/glslang/MachineIndependent/ShaderLang.cpp +++ /dev/null @@ -1,1936 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013-2016 LunarG, Inc. -// Copyright (C) 2015-2017 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Implement the top-level of interface to the compiler/linker, -// as defined in ShaderLang.h -// This is the platform independent interface between an OGL driver -// and the shading language compiler/linker. -// -#include -#include -#include -#include -#include "SymbolTable.h" -#include "ParseHelper.h" -#include "Scan.h" -#include "ScanContext.h" - -#ifdef ENABLE_HLSL -#include "../../hlsl/hlslParseHelper.h" -#include "../../hlsl/hlslParseables.h" -#include "../../hlsl/hlslScanContext.h" -#endif - -#include "../Include/ShHandle.h" -#include "../../OGLCompilersDLL/InitializeDll.h" - -#include "preprocessor/PpContext.h" - -#define SH_EXPORTING -#include "../Public/ShaderLang.h" -#include "reflection.h" -#include "iomapper.h" -#include "Initialize.h" - -namespace { // anonymous namespace for file-local functions and symbols - -using namespace glslang; - -// Create a language specific version of parseables. -TBuiltInParseables* CreateBuiltInParseables(TInfoSink& infoSink, EShSource source) -{ - switch (source) { - case EShSourceGlsl: return new TBuiltIns(); // GLSL builtIns -#ifdef ENABLE_HLSL - case EShSourceHlsl: return new TBuiltInParseablesHlsl(); // HLSL intrinsics -#endif - - default: - infoSink.info.message(EPrefixInternalError, "Unable to determine source language"); - return nullptr; - } -} - -// Create a language specific version of a parse context. -TParseContextBase* CreateParseContext(TSymbolTable& symbolTable, TIntermediate& intermediate, - int version, EProfile profile, EShSource source, - EShLanguage language, TInfoSink& infoSink, - SpvVersion spvVersion, bool forwardCompatible, EShMessages messages, - bool parsingBuiltIns, const std::string sourceEntryPointName = "") -{ -#ifndef ENABLE_HLSL - (void)sourceEntryPointName; // Unused argument. -#endif - - switch (source) { - case EShSourceGlsl: - intermediate.setEntryPointName("main"); - return new TParseContext(symbolTable, intermediate, parsingBuiltIns, version, profile, spvVersion, - language, infoSink, forwardCompatible, messages); - -#ifdef ENABLE_HLSL - case EShSourceHlsl: - return new HlslParseContext(symbolTable, intermediate, parsingBuiltIns, version, profile, spvVersion, - language, infoSink, sourceEntryPointName.c_str(), forwardCompatible, messages); -#endif - default: - infoSink.info.message(EPrefixInternalError, "Unable to determine source language"); - return nullptr; - } -} - -// Local mapping functions for making arrays of symbol tables.... - -const int VersionCount = 17; // index range in MapVersionToIndex - -int MapVersionToIndex(int version) -{ - int index = 0; - - switch (version) { - case 100: index = 0; break; - case 110: index = 1; break; - case 120: index = 2; break; - case 130: index = 3; break; - case 140: index = 4; break; - case 150: index = 5; break; - case 300: index = 6; break; - case 330: index = 7; break; - case 400: index = 8; break; - case 410: index = 9; break; - case 420: index = 10; break; - case 430: index = 11; break; - case 440: index = 12; break; - case 310: index = 13; break; - case 450: index = 14; break; - case 500: index = 0; break; // HLSL - case 320: index = 15; break; - case 460: index = 16; break; - default: assert(0); break; - } - - assert(index < VersionCount); - - return index; -} - -const int SpvVersionCount = 3; // index range in MapSpvVersionToIndex - -int MapSpvVersionToIndex(const SpvVersion& spvVersion) -{ - int index = 0; - - if (spvVersion.openGl > 0) - index = 1; - else if (spvVersion.vulkan > 0) - index = 2; - - assert(index < SpvVersionCount); - - return index; -} - -const int ProfileCount = 4; // index range in MapProfileToIndex - -int MapProfileToIndex(EProfile profile) -{ - int index = 0; - - switch (profile) { - case ENoProfile: index = 0; break; - case ECoreProfile: index = 1; break; - case ECompatibilityProfile: index = 2; break; - case EEsProfile: index = 3; break; - default: break; - } - - assert(index < ProfileCount); - - return index; -} - -const int SourceCount = 2; - -int MapSourceToIndex(EShSource source) -{ - int index = 0; - - switch (source) { - case EShSourceGlsl: index = 0; break; - case EShSourceHlsl: index = 1; break; - default: break; - } - - assert(index < SourceCount); - - return index; -} - -// only one of these needed for non-ES; ES needs 2 for different precision defaults of built-ins -enum EPrecisionClass { - EPcGeneral, - EPcFragment, - EPcCount -}; - -// A process-global symbol table per version per profile for built-ins common -// to multiple stages (languages), and a process-global symbol table per version -// per profile per stage for built-ins unique to each stage. They will be sparsely -// populated, so they will only be generated as needed. -// -// Each has a different set of built-ins, and we want to preserve that from -// compile to compile. -// -TSymbolTable* CommonSymbolTable[VersionCount][SpvVersionCount][ProfileCount][SourceCount][EPcCount] = {}; -TSymbolTable* SharedSymbolTables[VersionCount][SpvVersionCount][ProfileCount][SourceCount][EShLangCount] = {}; - -TPoolAllocator* PerProcessGPA = 0; - -// -// Parse and add to the given symbol table the content of the given shader string. -// -bool InitializeSymbolTable(const TString& builtIns, int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, - EShSource source, TInfoSink& infoSink, TSymbolTable& symbolTable) -{ - TIntermediate intermediate(language, version, profile); - - intermediate.setSource(source); - - std::unique_ptr parseContext(CreateParseContext(symbolTable, intermediate, version, profile, source, - language, infoSink, spvVersion, true, EShMsgDefault, - true)); - - TShader::ForbidIncluder includer; - TPpContext ppContext(*parseContext, "", includer); - TScanContext scanContext(*parseContext); - parseContext->setScanContext(&scanContext); - parseContext->setPpContext(&ppContext); - - // - // Push the symbol table to give it an initial scope. This - // push should not have a corresponding pop, so that built-ins - // are preserved, and the test for an empty table fails. - // - - symbolTable.push(); - - const char* builtInShaders[2]; - size_t builtInLengths[2]; - builtInShaders[0] = builtIns.c_str(); - builtInLengths[0] = builtIns.size(); - - if (builtInLengths[0] == 0) - return true; - - TInputScanner input(1, builtInShaders, builtInLengths); - if (! parseContext->parseShaderStrings(ppContext, input) != 0) { - infoSink.info.message(EPrefixInternalError, "Unable to parse built-ins"); - printf("Unable to parse built-ins\n%s\n", infoSink.info.c_str()); - printf("%s\n", builtInShaders[0]); - - return false; - } - - return true; -} - -int CommonIndex(EProfile profile, EShLanguage language) -{ - return (profile == EEsProfile && language == EShLangFragment) ? EPcFragment : EPcGeneral; -} - -// -// To initialize per-stage shared tables, with the common table already complete. -// -void InitializeStageSymbolTable(TBuiltInParseables& builtInParseables, int version, EProfile profile, const SpvVersion& spvVersion, - EShLanguage language, EShSource source, TInfoSink& infoSink, TSymbolTable** commonTable, - TSymbolTable** symbolTables) -{ - (*symbolTables[language]).adoptLevels(*commonTable[CommonIndex(profile, language)]); - InitializeSymbolTable(builtInParseables.getStageString(language), version, profile, spvVersion, language, source, - infoSink, *symbolTables[language]); - builtInParseables.identifyBuiltIns(version, profile, spvVersion, language, *symbolTables[language]); - if (profile == EEsProfile && version >= 300) - (*symbolTables[language]).setNoBuiltInRedeclarations(); - if (version == 110) - (*symbolTables[language]).setSeparateNameSpaces(); -} - -// -// Initialize the full set of shareable symbol tables; -// The common (cross-stage) and those shareable per-stage. -// -bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TSymbolTable** symbolTables, int version, EProfile profile, const SpvVersion& spvVersion, EShSource source) -{ - std::unique_ptr builtInParseables(CreateBuiltInParseables(infoSink, source)); - - if (builtInParseables == nullptr) - return false; - - builtInParseables->initialize(version, profile, spvVersion); - - // do the common tables - InitializeSymbolTable(builtInParseables->getCommonString(), version, profile, spvVersion, EShLangVertex, source, - infoSink, *commonTable[EPcGeneral]); - if (profile == EEsProfile) - InitializeSymbolTable(builtInParseables->getCommonString(), version, profile, spvVersion, EShLangFragment, source, - infoSink, *commonTable[EPcFragment]); - - // do the per-stage tables - - // always have vertex and fragment - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangVertex, source, - infoSink, commonTable, symbolTables); - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangFragment, source, - infoSink, commonTable, symbolTables); - - // check for tessellation - if ((profile != EEsProfile && version >= 150) || - (profile == EEsProfile && version >= 310)) { - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangTessControl, source, - infoSink, commonTable, symbolTables); - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangTessEvaluation, source, - infoSink, commonTable, symbolTables); - } - - // check for geometry - if ((profile != EEsProfile && version >= 150) || - (profile == EEsProfile && version >= 310)) - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangGeometry, source, - infoSink, commonTable, symbolTables); - - // check for compute - if ((profile != EEsProfile && version >= 420) || - (profile == EEsProfile && version >= 310)) - InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangCompute, source, - infoSink, commonTable, symbolTables); - - return true; -} - -bool AddContextSpecificSymbols(const TBuiltInResource* resources, TInfoSink& infoSink, TSymbolTable& symbolTable, int version, - EProfile profile, const SpvVersion& spvVersion, EShLanguage language, EShSource source) -{ - std::unique_ptr builtInParseables(CreateBuiltInParseables(infoSink, source)); - - if (builtInParseables == nullptr) - return false; - - builtInParseables->initialize(*resources, version, profile, spvVersion, language); - InitializeSymbolTable(builtInParseables->getCommonString(), version, profile, spvVersion, language, source, infoSink, symbolTable); - builtInParseables->identifyBuiltIns(version, profile, spvVersion, language, symbolTable, *resources); - - return true; -} - -// -// To do this on the fly, we want to leave the current state of our thread's -// pool allocator intact, so: -// - Switch to a new pool for parsing the built-ins -// - Do the parsing, which builds the symbol table, using the new pool -// - Switch to the process-global pool to save a copy the resulting symbol table -// - Free up the new pool used to parse the built-ins -// - Switch back to the original thread's pool -// -// This only gets done the first time any thread needs a particular symbol table -// (lazy evaluation). -// -void SetupBuiltinSymbolTable(int version, EProfile profile, const SpvVersion& spvVersion, EShSource source) -{ - TInfoSink infoSink; - - // Make sure only one thread tries to do this at a time - glslang::GetGlobalLock(); - - // See if it's already been done for this version/profile combination - int versionIndex = MapVersionToIndex(version); - int spvVersionIndex = MapSpvVersionToIndex(spvVersion); - int profileIndex = MapProfileToIndex(profile); - int sourceIndex = MapSourceToIndex(source); - if (CommonSymbolTable[versionIndex][spvVersionIndex][profileIndex][sourceIndex][EPcGeneral]) { - glslang::ReleaseGlobalLock(); - - return; - } - - // Switch to a new pool - TPoolAllocator& previousAllocator = GetThreadPoolAllocator(); - TPoolAllocator* builtInPoolAllocator = new TPoolAllocator(); - SetThreadPoolAllocator(*builtInPoolAllocator); - - // Dynamically allocate the local symbol tables so we can control when they are deallocated WRT when the pool is popped. - TSymbolTable* commonTable[EPcCount]; - TSymbolTable* stageTables[EShLangCount]; - for (int precClass = 0; precClass < EPcCount; ++precClass) - commonTable[precClass] = new TSymbolTable; - for (int stage = 0; stage < EShLangCount; ++stage) - stageTables[stage] = new TSymbolTable; - - // Generate the local symbol tables using the new pool - InitializeSymbolTables(infoSink, commonTable, stageTables, version, profile, spvVersion, source); - - // Switch to the process-global pool - SetThreadPoolAllocator(*PerProcessGPA); - - // Copy the local symbol tables from the new pool to the global tables using the process-global pool - for (int precClass = 0; precClass < EPcCount; ++precClass) { - if (! commonTable[precClass]->isEmpty()) { - CommonSymbolTable[versionIndex][spvVersionIndex][profileIndex][sourceIndex][precClass] = new TSymbolTable; - CommonSymbolTable[versionIndex][spvVersionIndex][profileIndex][sourceIndex][precClass]->copyTable(*commonTable[precClass]); - CommonSymbolTable[versionIndex][spvVersionIndex][profileIndex][sourceIndex][precClass]->readOnly(); - } - } - for (int stage = 0; stage < EShLangCount; ++stage) { - if (! stageTables[stage]->isEmpty()) { - SharedSymbolTables[versionIndex][spvVersionIndex][profileIndex][sourceIndex][stage] = new TSymbolTable; - SharedSymbolTables[versionIndex][spvVersionIndex][profileIndex][sourceIndex][stage]->adoptLevels(*CommonSymbolTable - [versionIndex][spvVersionIndex][profileIndex][sourceIndex][CommonIndex(profile, (EShLanguage)stage)]); - SharedSymbolTables[versionIndex][spvVersionIndex][profileIndex][sourceIndex][stage]->copyTable(*stageTables[stage]); - SharedSymbolTables[versionIndex][spvVersionIndex][profileIndex][sourceIndex][stage]->readOnly(); - } - } - - // Clean up the local tables before deleting the pool they used. - for (int precClass = 0; precClass < EPcCount; ++precClass) - delete commonTable[precClass]; - for (int stage = 0; stage < EShLangCount; ++stage) - delete stageTables[stage]; - - delete builtInPoolAllocator; - SetThreadPoolAllocator(previousAllocator); - - glslang::ReleaseGlobalLock(); -} - -// Return true if the shader was correctly specified for version/profile/stage. -bool DeduceVersionProfile(TInfoSink& infoSink, EShLanguage stage, bool versionNotFirst, int defaultVersion, - EShSource source, int& version, EProfile& profile, const SpvVersion& spvVersion) -{ - const int FirstProfileVersion = 150; - bool correct = true; - - if (source == EShSourceHlsl) { - version = 500; // shader model; currently a characteristic of glslang, not the input - profile = ECoreProfile; // allow doubles in prototype parsing - return correct; - } - - // Get a version... - if (version == 0) { - version = defaultVersion; - // infoSink.info.message(EPrefixWarning, "#version: statement missing; use #version on first line of shader"); - } - - // Get a good profile... - if (profile == ENoProfile) { - if (version == 300 || version == 310 || version == 320) { - correct = false; - infoSink.info.message(EPrefixError, "#version: versions 300, 310, and 320 require specifying the 'es' profile"); - profile = EEsProfile; - } else if (version == 100) - profile = EEsProfile; - else if (version >= FirstProfileVersion) - profile = ECoreProfile; - else - profile = ENoProfile; - } else { - // a profile was provided... - if (version < 150) { - correct = false; - infoSink.info.message(EPrefixError, "#version: versions before 150 do not allow a profile token"); - if (version == 100) - profile = EEsProfile; - else - profile = ENoProfile; - } else if (version == 300 || version == 310 || version == 320) { - if (profile != EEsProfile) { - correct = false; - infoSink.info.message(EPrefixError, "#version: versions 300, 310, and 320 support only the es profile"); - } - profile = EEsProfile; - } else { - if (profile == EEsProfile) { - correct = false; - infoSink.info.message(EPrefixError, "#version: only version 300, 310, and 320 support the es profile"); - if (version >= FirstProfileVersion) - profile = ECoreProfile; - else - profile = ENoProfile; - } - // else: typical desktop case... e.g., "#version 410 core" - } - } - - // Fix version... - switch (version) { - // ES versions - case 100: break; - case 300: break; - case 310: break; - case 320: break; - - // desktop versions - case 110: break; - case 120: break; - case 130: break; - case 140: break; - case 150: break; - case 330: break; - case 400: break; - case 410: break; - case 420: break; - case 430: break; - case 440: break; - case 450: break; - case 460: break; - - // unknown version - default: - correct = false; - infoSink.info.message(EPrefixError, "version not supported"); - if (profile == EEsProfile) - version = 310; - else { - version = 450; - profile = ECoreProfile; - } - break; - } - - // Correct for stage type... - switch (stage) { - case EShLangGeometry: - if ((profile == EEsProfile && version < 310) || - (profile != EEsProfile && version < 150)) { - correct = false; - infoSink.info.message(EPrefixError, "#version: geometry shaders require es profile with version 310 or non-es profile with version 150 or above"); - version = (profile == EEsProfile) ? 310 : 150; - if (profile == EEsProfile || profile == ENoProfile) - profile = ECoreProfile; - } - break; - case EShLangTessControl: - case EShLangTessEvaluation: - if ((profile == EEsProfile && version < 310) || - (profile != EEsProfile && version < 150)) { - correct = false; - infoSink.info.message(EPrefixError, "#version: tessellation shaders require es profile with version 310 or non-es profile with version 150 or above"); - version = (profile == EEsProfile) ? 310 : 400; // 150 supports the extension, correction is to 400 which does not - if (profile == EEsProfile || profile == ENoProfile) - profile = ECoreProfile; - } - break; - case EShLangCompute: - if ((profile == EEsProfile && version < 310) || - (profile != EEsProfile && version < 420)) { - correct = false; - infoSink.info.message(EPrefixError, "#version: compute shaders require es profile with version 310 or above, or non-es profile with version 420 or above"); - version = profile == EEsProfile ? 310 : 420; - } - break; - default: - break; - } - - if (profile == EEsProfile && version >= 300 && versionNotFirst) { - correct = false; - infoSink.info.message(EPrefixError, "#version: statement must appear first in es-profile shader; before comments or newlines"); - } - - // Check for SPIR-V compatibility - if (spvVersion.spv != 0) { - switch (profile) { - case EEsProfile: - if (spvVersion.vulkan >= 100 && version < 310) { - correct = false; - infoSink.info.message(EPrefixError, "#version: ES shaders for Vulkan SPIR-V require version 310 or higher"); - version = 310; - } - if (spvVersion.openGl >= 100) { - correct = false; - infoSink.info.message(EPrefixError, "#version: ES shaders for OpenGL SPIR-V are not supported"); - version = 310; - } - break; - case ECompatibilityProfile: - infoSink.info.message(EPrefixError, "#version: compilation for SPIR-V does not support the compatibility profile"); - break; - default: - if (spvVersion.vulkan >= 100 && version < 140) { - correct = false; - infoSink.info.message(EPrefixError, "#version: Desktop shaders for Vulkan SPIR-V require version 140 or higher"); - version = 140; - } - if (spvVersion.openGl >= 100 && version < 330) { - correct = false; - infoSink.info.message(EPrefixError, "#version: Desktop shaders for OpenGL SPIR-V require version 330 or higher"); - version = 330; - } - break; - } - } - - return correct; -} - -// There are multiple paths in for setting environment stuff. -// TEnvironment takes precedence, for what it sets, so sort all this out. -// Ideally, the internal code could be made to use TEnvironment, but for -// now, translate it to the historically used parameters. -void TranslateEnvironment(const TEnvironment* environment, EShMessages& messages, EShSource& source, - EShLanguage& stage, SpvVersion& spvVersion) -{ - // Set up environmental defaults, first ignoring 'environment'. - if (messages & EShMsgSpvRules) - spvVersion.spv = 0x00010000; - if (messages & EShMsgVulkanRules) { - spvVersion.vulkan = 100; - spvVersion.vulkanGlsl = 100; - } else if (spvVersion.spv != 0) - spvVersion.openGl = 100; - - // Now, override, based on any content set in 'environment'. - // 'environment' must be cleared to ESh*None settings when items - // are not being set. - if (environment != nullptr) { - // input language - if (environment->input.languageFamily != EShSourceNone) { - stage = environment->input.stage; - switch (environment->input.dialect) { - case EShClientNone: - break; - case EShClientVulkan: - spvVersion.vulkanGlsl = environment->input.dialectVersion; - break; - case EShClientOpenGL: - spvVersion.openGl = environment->input.dialectVersion; - break; - } - switch (environment->input.languageFamily) { - case EShSourceNone: - break; - case EShSourceGlsl: - source = EShSourceGlsl; - messages = static_cast(messages & ~EShMsgReadHlsl); - break; - case EShSourceHlsl: - source = EShSourceHlsl; - messages = static_cast(messages | EShMsgReadHlsl); - break; - } - } - - // client - switch (environment->client.client) { - case EShClientVulkan: - spvVersion.vulkan = environment->client.version; - break; - default: - break; - } - - // generated code - switch (environment->target.language) { - case EshTargetSpv: - spvVersion.spv = environment->target.version; - break; - default: - break; - } - } -} - -// Most processes are recorded when set in the intermediate representation, -// These are the few that are not. -void RecordProcesses(TIntermediate& intermediate, EShMessages messages, const std::string& sourceEntryPointName) -{ - if ((messages & EShMsgRelaxedErrors) != 0) - intermediate.addProcess("relaxed-errors"); - if ((messages & EShMsgSuppressWarnings) != 0) - intermediate.addProcess("suppress-warnings"); - if ((messages & EShMsgKeepUncalled) != 0) - intermediate.addProcess("keep-uncalled"); - if (sourceEntryPointName.size() > 0) { - intermediate.addProcess("source-entrypoint"); - intermediate.addProcessArgument(sourceEntryPointName); - } -} - -// This is the common setup and cleanup code for PreprocessDeferred and -// CompileDeferred. -// It takes any callable with a signature of -// bool (TParseContextBase& parseContext, TPpContext& ppContext, -// TInputScanner& input, bool versionWillBeError, -// TSymbolTable& , TIntermediate& , -// EShOptimizationLevel , EShMessages ); -// Which returns false if a failure was detected and true otherwise. -// -template -bool ProcessDeferred( - TCompiler* compiler, - const char* const shaderStrings[], - const int numStrings, - const int* inputLengths, - const char* const stringNames[], - const char* customPreamble, - const EShOptimizationLevel optLevel, - const TBuiltInResource* resources, - int defaultVersion, // use 100 for ES environment, 110 for desktop; this is the GLSL version, not SPIR-V or Vulkan - EProfile defaultProfile, - // set version/profile to defaultVersion/defaultProfile regardless of the #version - // directive in the source code - bool forceDefaultVersionAndProfile, - bool forwardCompatible, // give errors for use of deprecated features - EShMessages messages, // warnings/errors/AST; things to print out - TIntermediate& intermediate, // returned tree, etc. - ProcessingContext& processingContext, - bool requireNonempty, - TShader::Includer& includer, - const std::string sourceEntryPointName = "", - const TEnvironment* environment = nullptr) // optional way of fully setting all versions, overriding the above -{ - if (! InitThread()) - return false; - - // This must be undone (.pop()) by the caller, after it finishes consuming the created tree. - GetThreadPoolAllocator().push(); - - if (numStrings == 0) - return true; - - // Move to length-based strings, rather than null-terminated strings. - // Also, add strings to include the preamble and to ensure the shader is not null, - // which lets the grammar accept what was a null (post preprocessing) shader. - // - // Shader will look like - // string 0: system preamble - // string 1: custom preamble - // string 2...numStrings+1: user's shader - // string numStrings+2: "int;" - const int numPre = 2; - const int numPost = requireNonempty? 1 : 0; - const int numTotal = numPre + numStrings + numPost; - size_t* lengths = new size_t[numTotal]; - const char** strings = new const char*[numTotal]; - const char** names = new const char*[numTotal]; - for (int s = 0; s < numStrings; ++s) { - strings[s + numPre] = shaderStrings[s]; - if (inputLengths == nullptr || inputLengths[s] < 0) - lengths[s + numPre] = strlen(shaderStrings[s]); - else - lengths[s + numPre] = inputLengths[s]; - } - if (stringNames != nullptr) { - for (int s = 0; s < numStrings; ++s) - names[s + numPre] = stringNames[s]; - } else { - for (int s = 0; s < numStrings; ++s) - names[s + numPre] = nullptr; - } - - // Get all the stages, languages, clients, and other environment - // stuff sorted out. - EShSource source = (messages & EShMsgReadHlsl) != 0 ? EShSourceHlsl : EShSourceGlsl; - SpvVersion spvVersion; - EShLanguage stage = compiler->getLanguage(); - TranslateEnvironment(environment, messages, source, stage, spvVersion); - - // First, without using the preprocessor or parser, find the #version, so we know what - // symbol tables, processing rules, etc. to set up. This does not need the extra strings - // outlined above, just the user shader, after the system and user preambles. - glslang::TInputScanner userInput(numStrings, &strings[numPre], &lengths[numPre]); - int version = 0; - EProfile profile = ENoProfile; - bool versionNotFirstToken = false; - bool versionNotFirst = (source == EShSourceHlsl) - ? true - : userInput.scanVersion(version, profile, versionNotFirstToken); - bool versionNotFound = version == 0; - if (forceDefaultVersionAndProfile && source == EShSourceGlsl) { - if (! (messages & EShMsgSuppressWarnings) && ! versionNotFound && - (version != defaultVersion || profile != defaultProfile)) { - compiler->infoSink.info << "Warning, (version, profile) forced to be (" - << defaultVersion << ", " << ProfileName(defaultProfile) - << "), while in source code it is (" - << version << ", " << ProfileName(profile) << ")\n"; - } - - if (versionNotFound) { - versionNotFirstToken = false; - versionNotFirst = false; - versionNotFound = false; - } - version = defaultVersion; - profile = defaultProfile; - } - - bool goodVersion = DeduceVersionProfile(compiler->infoSink, stage, - versionNotFirst, defaultVersion, source, version, profile, spvVersion); - bool versionWillBeError = (versionNotFound || (profile == EEsProfile && version >= 300 && versionNotFirst)); - bool warnVersionNotFirst = false; - if (! versionWillBeError && versionNotFirstToken) { - if (messages & EShMsgRelaxedErrors) - warnVersionNotFirst = true; - else - versionWillBeError = true; - } - - intermediate.setSource(source); - intermediate.setVersion(version); - intermediate.setProfile(profile); - intermediate.setSpv(spvVersion); - RecordProcesses(intermediate, messages, sourceEntryPointName); - if (spvVersion.vulkan >= 100) - intermediate.setOriginUpperLeft(); - if ((messages & EShMsgHlslOffsets) || source == EShSourceHlsl) - intermediate.setHlslOffsets(); - if (messages & EShMsgDebugInfo) { - intermediate.setSourceFile(names[numPre]); - for (int s = 0; s < numStrings; ++s) - intermediate.addSourceText(strings[numPre + s]); - } - SetupBuiltinSymbolTable(version, profile, spvVersion, source); - - TSymbolTable* cachedTable = SharedSymbolTables[MapVersionToIndex(version)] - [MapSpvVersionToIndex(spvVersion)] - [MapProfileToIndex(profile)] - [MapSourceToIndex(source)] - [stage]; - - // Dynamically allocate the symbol table so we can control when it is deallocated WRT the pool. - TSymbolTable* symbolTableMemory = new TSymbolTable; - TSymbolTable& symbolTable = *symbolTableMemory; - if (cachedTable) - symbolTable.adoptLevels(*cachedTable); - - // Add built-in symbols that are potentially context dependent; - // they get popped again further down. - if (! AddContextSpecificSymbols(resources, compiler->infoSink, symbolTable, version, profile, spvVersion, - stage, source)) - return false; - - // - // Now we can process the full shader under proper symbols and rules. - // - - TParseContextBase* parseContext = CreateParseContext(symbolTable, intermediate, version, profile, source, - stage, compiler->infoSink, - spvVersion, forwardCompatible, messages, false, sourceEntryPointName); - - TPpContext ppContext(*parseContext, names[numPre] ? names[numPre] : "", includer); - - // only GLSL (bison triggered, really) needs an externally set scan context - glslang::TScanContext scanContext(*parseContext); - if (source == EShSourceGlsl) - parseContext->setScanContext(&scanContext); - - parseContext->setPpContext(&ppContext); - parseContext->setLimits(*resources); - if (! goodVersion) - parseContext->addError(); - if (warnVersionNotFirst) { - TSourceLoc loc; - loc.init(); - parseContext->warn(loc, "Illegal to have non-comment, non-whitespace tokens before #version", "#version", ""); - } - - parseContext->initializeExtensionBehavior(); - - // Fill in the strings as outlined above. - std::string preamble; - parseContext->getPreamble(preamble); - strings[0] = preamble.c_str(); - lengths[0] = strlen(strings[0]); - names[0] = nullptr; - strings[1] = customPreamble; - lengths[1] = strlen(strings[1]); - names[1] = nullptr; - assert(2 == numPre); - if (requireNonempty) { - const int postIndex = numStrings + numPre; - strings[postIndex] = "\n int;"; - lengths[postIndex] = strlen(strings[numStrings + numPre]); - names[postIndex] = nullptr; - } - TInputScanner fullInput(numStrings + numPre + numPost, strings, lengths, names, numPre, numPost); - - // Push a new symbol allocation scope that will get used for the shader's globals. - symbolTable.push(); - - bool success = processingContext(*parseContext, ppContext, fullInput, - versionWillBeError, symbolTable, - intermediate, optLevel, messages); - - // Clean up the symbol table. The AST is self-sufficient now. - delete symbolTableMemory; - - delete parseContext; - delete [] lengths; - delete [] strings; - delete [] names; - - return success; -} - -// Responsible for keeping track of the most recent source string and line in -// the preprocessor and outputting newlines appropriately if the source string -// or line changes. -class SourceLineSynchronizer { -public: - SourceLineSynchronizer(const std::function& lastSourceIndex, - std::stringstream* output) - : getLastSourceIndex(lastSourceIndex), output(output), lastSource(-1), lastLine(0) {} -// SourceLineSynchronizer(const SourceLineSynchronizer&) = delete; -// SourceLineSynchronizer& operator=(const SourceLineSynchronizer&) = delete; - - // Sets the internally tracked source string index to that of the most - // recently read token. If we switched to a new source string, returns - // true and inserts a newline. Otherwise, returns false and outputs nothing. - bool syncToMostRecentString() { - if (getLastSourceIndex() != lastSource) { - // After switching to a new source string, we need to reset lastLine - // because line number resets every time a new source string is - // used. We also need to output a newline to separate the output - // from the previous source string (if there is one). - if (lastSource != -1 || lastLine != 0) - *output << std::endl; - lastSource = getLastSourceIndex(); - lastLine = -1; - return true; - } - return false; - } - - // Calls syncToMostRecentString() and then sets the internally tracked line - // number to tokenLine. If we switched to a new line, returns true and inserts - // newlines appropriately. Otherwise, returns false and outputs nothing. - bool syncToLine(int tokenLine) { - syncToMostRecentString(); - const bool newLineStarted = lastLine < tokenLine; - for (; lastLine < tokenLine; ++lastLine) { - if (lastLine > 0) *output << std::endl; - } - return newLineStarted; - } - - // Sets the internally tracked line number to newLineNum. - void setLineNum(int newLineNum) { lastLine = newLineNum; } - -private: - SourceLineSynchronizer& operator=(const SourceLineSynchronizer&); - - // A function for getting the index of the last valid source string we've - // read tokens from. - const std::function getLastSourceIndex; - // output stream for newlines. - std::stringstream* output; - // lastSource is the source string index (starting from 0) of the last token - // processed. It is tracked in order for newlines to be inserted when a new - // source string starts. -1 means we haven't started processing any source - // string. - int lastSource; - // lastLine is the line number (starting from 1) of the last token processed. - // It is tracked in order for newlines to be inserted when a token appears - // on a new line. 0 means we haven't started processing any line in the - // current source string. - int lastLine; -}; - -// DoPreprocessing is a valid ProcessingContext template argument, -// which only performs the preprocessing step of compilation. -// It places the result in the "string" argument to its constructor. -struct DoPreprocessing { - explicit DoPreprocessing(std::string* string): outputString(string) {} - bool operator()(TParseContextBase& parseContext, TPpContext& ppContext, - TInputScanner& input, bool versionWillBeError, - TSymbolTable&, TIntermediate&, - EShOptimizationLevel, EShMessages) - { - // This is a list of tokens that do not require a space before or after. - static const std::string unNeededSpaceTokens = ";()[]"; - static const std::string noSpaceBeforeTokens = ","; - glslang::TPpToken ppToken; - - parseContext.setScanner(&input); - ppContext.setInput(input, versionWillBeError); - - std::stringstream outputStream; - SourceLineSynchronizer lineSync( - std::bind(&TInputScanner::getLastValidSourceIndex, &input), &outputStream); - - parseContext.setExtensionCallback([&lineSync, &outputStream]( - int line, const char* extension, const char* behavior) { - lineSync.syncToLine(line); - outputStream << "#extension " << extension << " : " << behavior; - }); - - parseContext.setLineCallback([&lineSync, &outputStream, &parseContext]( - int curLineNum, int newLineNum, bool hasSource, int sourceNum, const char* sourceName) { - // SourceNum is the number of the source-string that is being parsed. - lineSync.syncToLine(curLineNum); - outputStream << "#line " << newLineNum; - if (hasSource) { - outputStream << " "; - if (sourceName != nullptr) { - outputStream << "\"" << sourceName << "\""; - } else { - outputStream << sourceNum; - } - } - if (parseContext.lineDirectiveShouldSetNextLine()) { - // newLineNum is the new line number for the line following the #line - // directive. So the new line number for the current line is - newLineNum -= 1; - } - outputStream << std::endl; - // And we are at the next line of the #line directive now. - lineSync.setLineNum(newLineNum + 1); - }); - - parseContext.setVersionCallback( - [&lineSync, &outputStream](int line, int version, const char* str) { - lineSync.syncToLine(line); - outputStream << "#version " << version; - if (str) { - outputStream << " " << str; - } - }); - - parseContext.setPragmaCallback([&lineSync, &outputStream]( - int line, const glslang::TVector& ops) { - lineSync.syncToLine(line); - outputStream << "#pragma "; - for(size_t i = 0; i < ops.size(); ++i) { - outputStream << ops[i]; - } - }); - - parseContext.setErrorCallback([&lineSync, &outputStream]( - int line, const char* errorMessage) { - lineSync.syncToLine(line); - outputStream << "#error " << errorMessage; - }); - - int lastToken = EndOfInput; // lastToken records the last token processed. - do { - int token = ppContext.tokenize(ppToken); - if (token == EndOfInput) - break; - - bool isNewString = lineSync.syncToMostRecentString(); - bool isNewLine = lineSync.syncToLine(ppToken.loc.line); - - if (isNewLine) { - // Don't emit whitespace onto empty lines. - // Copy any whitespace characters at the start of a line - // from the input to the output. - outputStream << std::string(ppToken.loc.column - 1, ' '); - } - - // Output a space in between tokens, but not at the start of a line, - // and also not around special tokens. This helps with readability - // and consistency. - if (!isNewString && !isNewLine && lastToken != EndOfInput && - (unNeededSpaceTokens.find((char)token) == std::string::npos) && - (unNeededSpaceTokens.find((char)lastToken) == std::string::npos) && - (noSpaceBeforeTokens.find((char)token) == std::string::npos)) { - outputStream << " "; - } - lastToken = token; - outputStream << ppToken.name; - } while (true); - outputStream << std::endl; - *outputString = outputStream.str(); - - bool success = true; - if (parseContext.getNumErrors() > 0) { - success = false; - parseContext.infoSink.info.prefix(EPrefixError); - parseContext.infoSink.info << parseContext.getNumErrors() << " compilation errors. No code generated.\n\n"; - } - return success; - } - std::string* outputString; -}; - -// DoFullParse is a valid ProcessingConext template argument for fully -// parsing the shader. It populates the "intermediate" with the AST. -struct DoFullParse{ - bool operator()(TParseContextBase& parseContext, TPpContext& ppContext, - TInputScanner& fullInput, bool versionWillBeError, - TSymbolTable&, TIntermediate& intermediate, - EShOptimizationLevel optLevel, EShMessages messages) - { - bool success = true; - // Parse the full shader. - if (! parseContext.parseShaderStrings(ppContext, fullInput, versionWillBeError)) - success = false; - - if (success && intermediate.getTreeRoot()) { - if (optLevel == EShOptNoGeneration) - parseContext.infoSink.info.message(EPrefixNone, "No errors. No code generation or linking was requested."); - else - success = intermediate.postProcess(intermediate.getTreeRoot(), parseContext.getLanguage()); - } else if (! success) { - parseContext.infoSink.info.prefix(EPrefixError); - parseContext.infoSink.info << parseContext.getNumErrors() << " compilation errors. No code generated.\n\n"; - } - - if (messages & EShMsgAST) - intermediate.output(parseContext.infoSink, true); - - return success; - } -}; - -// Take a single compilation unit, and run the preprocessor on it. -// Return: True if there were no issues found in preprocessing, -// False if during preprocessing any unknown version, pragmas or -// extensions were found. -bool PreprocessDeferred( - TCompiler* compiler, - const char* const shaderStrings[], - const int numStrings, - const int* inputLengths, - const char* const stringNames[], - const char* preamble, - const EShOptimizationLevel optLevel, - const TBuiltInResource* resources, - int defaultVersion, // use 100 for ES environment, 110 for desktop - EProfile defaultProfile, - bool forceDefaultVersionAndProfile, - bool forwardCompatible, // give errors for use of deprecated features - EShMessages messages, // warnings/errors/AST; things to print out - TShader::Includer& includer, - TIntermediate& intermediate, // returned tree, etc. - std::string* outputString) -{ - DoPreprocessing parser(outputString); - return ProcessDeferred(compiler, shaderStrings, numStrings, inputLengths, stringNames, - preamble, optLevel, resources, defaultVersion, - defaultProfile, forceDefaultVersionAndProfile, - forwardCompatible, messages, intermediate, parser, - false, includer); -} - -// -// do a partial compile on the given strings for a single compilation unit -// for a potential deferred link into a single stage (and deferred full compile of that -// stage through machine-dependent compilation). -// -// all preprocessing, parsing, semantic checks, etc. for a single compilation unit -// are done here. -// -// return: the tree and other information is filled into the intermediate argument, -// and true is returned by the function for success. -// -bool CompileDeferred( - TCompiler* compiler, - const char* const shaderStrings[], - const int numStrings, - const int* inputLengths, - const char* const stringNames[], - const char* preamble, - const EShOptimizationLevel optLevel, - const TBuiltInResource* resources, - int defaultVersion, // use 100 for ES environment, 110 for desktop - EProfile defaultProfile, - bool forceDefaultVersionAndProfile, - bool forwardCompatible, // give errors for use of deprecated features - EShMessages messages, // warnings/errors/AST; things to print out - TIntermediate& intermediate,// returned tree, etc. - TShader::Includer& includer, - const std::string sourceEntryPointName = "", - TEnvironment* environment = nullptr) -{ - DoFullParse parser; - return ProcessDeferred(compiler, shaderStrings, numStrings, inputLengths, stringNames, - preamble, optLevel, resources, defaultVersion, - defaultProfile, forceDefaultVersionAndProfile, - forwardCompatible, messages, intermediate, parser, - true, includer, sourceEntryPointName, environment); -} - -} // end anonymous namespace for local functions - -// -// ShInitialize() should be called exactly once per process, not per thread. -// -int ShInitialize() -{ - glslang::InitGlobalLock(); - - if (! InitProcess()) - return 0; - - if (! PerProcessGPA) - PerProcessGPA = new TPoolAllocator(); - - glslang::TScanContext::fillInKeywordMap(); -#ifdef ENABLE_HLSL - glslang::HlslScanContext::fillInKeywordMap(); -#endif - - return 1; -} - -// -// Driver calls these to create and destroy compiler/linker -// objects. -// - -ShHandle ShConstructCompiler(const EShLanguage language, int debugOptions) -{ - if (!InitThread()) - return 0; - - TShHandleBase* base = static_cast(ConstructCompiler(language, debugOptions)); - - return reinterpret_cast(base); -} - -ShHandle ShConstructLinker(const EShExecutable executable, int debugOptions) -{ - if (!InitThread()) - return 0; - - TShHandleBase* base = static_cast(ConstructLinker(executable, debugOptions)); - - return reinterpret_cast(base); -} - -ShHandle ShConstructUniformMap() -{ - if (!InitThread()) - return 0; - - TShHandleBase* base = static_cast(ConstructUniformMap()); - - return reinterpret_cast(base); -} - -void ShDestruct(ShHandle handle) -{ - if (handle == 0) - return; - - TShHandleBase* base = static_cast(handle); - - if (base->getAsCompiler()) - DeleteCompiler(base->getAsCompiler()); - else if (base->getAsLinker()) - DeleteLinker(base->getAsLinker()); - else if (base->getAsUniformMap()) - DeleteUniformMap(base->getAsUniformMap()); -} - -// -// Cleanup symbol tables -// -int __fastcall ShFinalize() -{ - for (int version = 0; version < VersionCount; ++version) { - for (int spvVersion = 0; spvVersion < SpvVersionCount; ++spvVersion) { - for (int p = 0; p < ProfileCount; ++p) { - for (int source = 0; source < SourceCount; ++source) { - for (int stage = 0; stage < EShLangCount; ++stage) { - delete SharedSymbolTables[version][spvVersion][p][source][stage]; - SharedSymbolTables[version][spvVersion][p][source][stage] = 0; - } - } - } - } - } - - for (int version = 0; version < VersionCount; ++version) { - for (int spvVersion = 0; spvVersion < SpvVersionCount; ++spvVersion) { - for (int p = 0; p < ProfileCount; ++p) { - for (int source = 0; source < SourceCount; ++source) { - for (int pc = 0; pc < EPcCount; ++pc) { - delete CommonSymbolTable[version][spvVersion][p][source][pc]; - CommonSymbolTable[version][spvVersion][p][source][pc] = 0; - } - } - } - } - } - - if (PerProcessGPA) { - PerProcessGPA->popAll(); - delete PerProcessGPA; - PerProcessGPA = 0; - } - - glslang::TScanContext::deleteKeywordMap(); -#ifdef ENABLE_HLSL - glslang::HlslScanContext::deleteKeywordMap(); -#endif - - return 1; -} - -// -// Do a full compile on the given strings for a single compilation unit -// forming a complete stage. The result of the machine dependent compilation -// is left in the provided compile object. -// -// Return: The return value is really boolean, indicating -// success (1) or failure (0). -// -int ShCompile( - const ShHandle handle, - const char* const shaderStrings[], - const int numStrings, - const int* inputLengths, - const EShOptimizationLevel optLevel, - const TBuiltInResource* resources, - int /*debugOptions*/, - int defaultVersion, // use 100 for ES environment, 110 for desktop - bool forwardCompatible, // give errors for use of deprecated features - EShMessages messages // warnings/errors/AST; things to print out - ) -{ - // Map the generic handle to the C++ object - if (handle == 0) - return 0; - - TShHandleBase* base = reinterpret_cast(handle); - TCompiler* compiler = base->getAsCompiler(); - if (compiler == 0) - return 0; - - compiler->infoSink.info.erase(); - compiler->infoSink.debug.erase(); - - TIntermediate intermediate(compiler->getLanguage()); - TShader::ForbidIncluder includer; - bool success = CompileDeferred(compiler, shaderStrings, numStrings, inputLengths, nullptr, - "", optLevel, resources, defaultVersion, ENoProfile, false, - forwardCompatible, messages, intermediate, includer); - - // - // Call the machine dependent compiler - // - if (success && intermediate.getTreeRoot() && optLevel != EShOptNoGeneration) - success = compiler->compile(intermediate.getTreeRoot(), intermediate.getVersion(), intermediate.getProfile()); - - intermediate.removeTree(); - - // Throw away all the temporary memory used by the compilation process. - // The push was done in the CompileDeferred() call above. - GetThreadPoolAllocator().pop(); - - return success ? 1 : 0; -} - -// -// Link the given compile objects. -// -// Return: The return value of is really boolean, indicating -// success or failure. -// -int ShLinkExt( - const ShHandle linkHandle, - const ShHandle compHandles[], - const int numHandles) -{ - if (linkHandle == 0 || numHandles == 0) - return 0; - - THandleList cObjects; - - for (int i = 0; i < numHandles; ++i) { - if (compHandles[i] == 0) - return 0; - TShHandleBase* base = reinterpret_cast(compHandles[i]); - if (base->getAsLinker()) { - cObjects.push_back(base->getAsLinker()); - } - if (base->getAsCompiler()) - cObjects.push_back(base->getAsCompiler()); - - if (cObjects[i] == 0) - return 0; - } - - TShHandleBase* base = reinterpret_cast(linkHandle); - TLinker* linker = static_cast(base->getAsLinker()); - - if (linker == 0) - return 0; - - linker->infoSink.info.erase(); - - for (int i = 0; i < numHandles; ++i) { - if (cObjects[i]->getAsCompiler()) { - if (! cObjects[i]->getAsCompiler()->linkable()) { - linker->infoSink.info.message(EPrefixError, "Not all shaders have valid object code."); - return 0; - } - } - } - - bool ret = linker->link(cObjects); - - return ret ? 1 : 0; -} - -// -// ShSetEncrpytionMethod is a place-holder for specifying -// how source code is encrypted. -// -void ShSetEncryptionMethod(ShHandle handle) -{ - if (handle == 0) - return; -} - -// -// Return any compiler/linker/uniformmap log of messages for the application. -// -const char* ShGetInfoLog(const ShHandle handle) -{ - if (!InitThread()) - return 0; - - if (handle == 0) - return 0; - - TShHandleBase* base = static_cast(handle); - TInfoSink* infoSink; - - if (base->getAsCompiler()) - infoSink = &(base->getAsCompiler()->getInfoSink()); - else if (base->getAsLinker()) - infoSink = &(base->getAsLinker()->getInfoSink()); - else - return 0; - - infoSink->info << infoSink->debug.c_str(); - return infoSink->info.c_str(); -} - -// -// Return the resulting binary code from the link process. Structure -// is machine dependent. -// -const void* ShGetExecutable(const ShHandle handle) -{ - if (!InitThread()) - return 0; - - if (handle == 0) - return 0; - - TShHandleBase* base = reinterpret_cast(handle); - - TLinker* linker = static_cast(base->getAsLinker()); - if (linker == 0) - return 0; - - return linker->getObjectCode(); -} - -// -// Let the linker know where the application said it's attributes are bound. -// The linker does not use these values, they are remapped by the ICD or -// hardware. It just needs them to know what's aliased. -// -// Return: The return value of is really boolean, indicating -// success or failure. -// -int ShSetVirtualAttributeBindings(const ShHandle handle, const ShBindingTable* table) -{ - if (!InitThread()) - return 0; - - if (handle == 0) - return 0; - - TShHandleBase* base = reinterpret_cast(handle); - TLinker* linker = static_cast(base->getAsLinker()); - - if (linker == 0) - return 0; - - linker->setAppAttributeBindings(table); - - return 1; -} - -// -// Let the linker know where the predefined attributes have to live. -// -int ShSetFixedAttributeBindings(const ShHandle handle, const ShBindingTable* table) -{ - if (!InitThread()) - return 0; - - if (handle == 0) - return 0; - - TShHandleBase* base = reinterpret_cast(handle); - TLinker* linker = static_cast(base->getAsLinker()); - - if (linker == 0) - return 0; - - linker->setFixedAttributeBindings(table); - return 1; -} - -// -// Some attribute locations are off-limits to the linker... -// -int ShExcludeAttributes(const ShHandle handle, int *attributes, int count) -{ - if (!InitThread()) - return 0; - - if (handle == 0) - return 0; - - TShHandleBase* base = reinterpret_cast(handle); - TLinker* linker = static_cast(base->getAsLinker()); - if (linker == 0) - return 0; - - linker->setExcludedAttributes(attributes, count); - - return 1; -} - -// -// Return the index for OpenGL to use for knowing where a uniform lives. -// -// Return: The return value of is really boolean, indicating -// success or failure. -// -int ShGetUniformLocation(const ShHandle handle, const char* name) -{ - if (!InitThread()) - return 0; - - if (handle == 0) - return -1; - - TShHandleBase* base = reinterpret_cast(handle); - TUniformMap* uniformMap= base->getAsUniformMap(); - if (uniformMap == 0) - return -1; - - return uniformMap->getLocation(name); -} - -//////////////////////////////////////////////////////////////////////////////////////////// -// -// Deferred-Lowering C++ Interface -// ----------------------------------- -// -// Below is a new alternate C++ interface that might potentially replace the above -// opaque handle-based interface. -// -// See more detailed comment in ShaderLang.h -// - -namespace glslang { - -#include "../Include/revision.h" - -const char* GetEsslVersionString() -{ - return "OpenGL ES GLSL 3.00 glslang LunarG Khronos." GLSLANG_REVISION " " GLSLANG_DATE; -} - -const char* GetGlslVersionString() -{ - return "4.20 glslang LunarG Khronos." GLSLANG_REVISION " " GLSLANG_DATE; -} - -int GetKhronosToolId() -{ - return 8; -} - -bool InitializeProcess() -{ - return ShInitialize() != 0; -} - -void FinalizeProcess() -{ - ShFinalize(); -} - -class TDeferredCompiler : public TCompiler { -public: - TDeferredCompiler(EShLanguage s, TInfoSink& i) : TCompiler(s, i) { } - virtual bool compile(TIntermNode*, int = 0, EProfile = ENoProfile) { return true; } -}; - -TShader::TShader(EShLanguage s) - : pool(0), stage(s), lengths(nullptr), stringNames(nullptr), preamble("") -{ - infoSink = new TInfoSink; - compiler = new TDeferredCompiler(stage, *infoSink); - intermediate = new TIntermediate(s); - - // clear environment (avoid constructors in them for use in a C interface) - environment.input.languageFamily = EShSourceNone; - environment.input.dialect = EShClientNone; - environment.client.client = EShClientNone; - environment.target.language = EShTargetNone; -} - -TShader::~TShader() -{ - delete infoSink; - delete compiler; - delete intermediate; - delete pool; -} - -void TShader::setStrings(const char* const* s, int n) -{ - strings = s; - numStrings = n; - lengths = nullptr; -} - -void TShader::setStringsWithLengths(const char* const* s, const int* l, int n) -{ - strings = s; - numStrings = n; - lengths = l; -} - -void TShader::setStringsWithLengthsAndNames( - const char* const* s, const int* l, const char* const* names, int n) -{ - strings = s; - numStrings = n; - lengths = l; - stringNames = names; -} - -void TShader::setEntryPoint(const char* entryPoint) -{ - intermediate->setEntryPointName(entryPoint); -} - -void TShader::setSourceEntryPoint(const char* name) -{ - sourceEntryPointName = name; -} - -void TShader::addProcesses(const std::vector& p) -{ - intermediate->addProcesses(p); -} - -// Set binding base for sampler types -void TShader::setShiftSamplerBinding(unsigned int base) { intermediate->setShiftSamplerBinding(base); } -// Set binding base for texture types (SRV) -void TShader::setShiftTextureBinding(unsigned int base) { intermediate->setShiftTextureBinding(base); } -// Set binding base for image types -void TShader::setShiftImageBinding(unsigned int base) { intermediate->setShiftImageBinding(base); } -// Set binding base for uniform buffer objects (CBV) -void TShader::setShiftUboBinding(unsigned int base) { intermediate->setShiftUboBinding(base); } -// Synonym for setShiftUboBinding, to match HLSL language. -void TShader::setShiftCbufferBinding(unsigned int base) { intermediate->setShiftUboBinding(base); } -// Set binding base for UAV (unordered access view) -void TShader::setShiftUavBinding(unsigned int base) { intermediate->setShiftUavBinding(base); } -// Set binding base for SSBOs -void TShader::setShiftSsboBinding(unsigned int base) { intermediate->setShiftSsboBinding(base); } -// Enables binding automapping using TIoMapper -void TShader::setAutoMapBindings(bool map) { intermediate->setAutoMapBindings(map); } -// Fragile: currently within one stage: simple auto-assignment of location -void TShader::setAutoMapLocations(bool map) { intermediate->setAutoMapLocations(map); } -// See comment above TDefaultHlslIoMapper in iomapper.cpp: -void TShader::setHlslIoMapping(bool hlslIoMap) { intermediate->setHlslIoMapping(hlslIoMap); } -void TShader::setFlattenUniformArrays(bool flatten) { intermediate->setFlattenUniformArrays(flatten); } -void TShader::setNoStorageFormat(bool useUnknownFormat) { intermediate->setNoStorageFormat(useUnknownFormat); } -void TShader::setResourceSetBinding(const std::vector& base) { intermediate->setResourceSetBinding(base); } -void TShader::setTextureSamplerTransformMode(EShTextureSamplerTransformMode mode) { intermediate->setTextureSamplerTransformMode(mode); } - -// -// Turn the shader strings into a parse tree in the TIntermediate. -// -// Returns true for success. -// -bool TShader::parse(const TBuiltInResource* builtInResources, int defaultVersion, EProfile defaultProfile, bool forceDefaultVersionAndProfile, - bool forwardCompatible, EShMessages messages, Includer& includer) -{ - if (! InitThread()) - return false; - - pool = new TPoolAllocator(); - SetThreadPoolAllocator(*pool); - if (! preamble) - preamble = ""; - - return CompileDeferred(compiler, strings, numStrings, lengths, stringNames, - preamble, EShOptNone, builtInResources, defaultVersion, - defaultProfile, forceDefaultVersionAndProfile, - forwardCompatible, messages, *intermediate, includer, sourceEntryPointName, - &environment); -} - -// Fill in a string with the result of preprocessing ShaderStrings -// Returns true if all extensions, pragmas and version strings were valid. -bool TShader::preprocess(const TBuiltInResource* builtInResources, - int defaultVersion, EProfile defaultProfile, - bool forceDefaultVersionAndProfile, - bool forwardCompatible, EShMessages message, - std::string* output_string, - Includer& includer) -{ - if (! InitThread()) - return false; - - pool = new TPoolAllocator(); - SetThreadPoolAllocator(*pool); - if (! preamble) - preamble = ""; - - return PreprocessDeferred(compiler, strings, numStrings, lengths, stringNames, preamble, - EShOptNone, builtInResources, defaultVersion, - defaultProfile, forceDefaultVersionAndProfile, - forwardCompatible, message, includer, *intermediate, output_string); -} - -const char* TShader::getInfoLog() -{ - return infoSink->info.c_str(); -} - -const char* TShader::getInfoDebugLog() -{ - return infoSink->debug.c_str(); -} - -TProgram::TProgram() : pool(0), reflection(0), ioMapper(nullptr), linked(false) -{ - infoSink = new TInfoSink; - for (int s = 0; s < EShLangCount; ++s) { - intermediate[s] = 0; - newedIntermediate[s] = false; - } -} - -TProgram::~TProgram() -{ - delete ioMapper; - delete infoSink; - delete reflection; - - for (int s = 0; s < EShLangCount; ++s) - if (newedIntermediate[s]) - delete intermediate[s]; - - delete pool; -} - -// -// Merge the compilation units within each stage into a single TIntermediate. -// All starting compilation units need to be the result of calling TShader::parse(). -// -// Return true for success. -// -bool TProgram::link(EShMessages messages) -{ - if (linked) - return false; - linked = true; - - bool error = false; - - pool = new TPoolAllocator(); - SetThreadPoolAllocator(*pool); - - for (int s = 0; s < EShLangCount; ++s) { - if (! linkStage((EShLanguage)s, messages)) - error = true; - } - - // TODO: Link: cross-stage error checking - - return ! error; -} - -// -// Merge the compilation units within the given stage into a single TIntermediate. -// -// Return true for success. -// -bool TProgram::linkStage(EShLanguage stage, EShMessages messages) -{ - if (stages[stage].size() == 0) - return true; - - int numEsShaders = 0, numNonEsShaders = 0; - for (auto it = stages[stage].begin(); it != stages[stage].end(); ++it) { - if ((*it)->intermediate->getProfile() == EEsProfile) { - numEsShaders++; - } else { - numNonEsShaders++; - } - } - - if (numEsShaders > 0 && numNonEsShaders > 0) { - infoSink->info.message(EPrefixError, "Cannot mix ES profile with non-ES profile shaders"); - return false; - } else if (numEsShaders > 1) { - infoSink->info.message(EPrefixError, "Cannot attach multiple ES shaders of the same type to a single program"); - return false; - } - - // - // Be efficient for the common single compilation unit per stage case, - // reusing it's TIntermediate instead of merging into a new one. - // - TIntermediate *firstIntermediate = stages[stage].front()->intermediate; - if (stages[stage].size() == 1) - intermediate[stage] = firstIntermediate; - else { - intermediate[stage] = new TIntermediate(stage, - firstIntermediate->getVersion(), - firstIntermediate->getProfile()); - - - // The new TIntermediate must use the same origin as the original TIntermediates. - // Otherwise linking will fail due to different coordinate systems. - if (firstIntermediate->getOriginUpperLeft()) { - intermediate[stage]->setOriginUpperLeft(); - } - intermediate[stage]->setSpv(firstIntermediate->getSpv()); - - newedIntermediate[stage] = true; - } - - if (messages & EShMsgAST) - infoSink->info << "\nLinked " << StageName(stage) << " stage:\n\n"; - - if (stages[stage].size() > 1) { - std::list::const_iterator it; - for (it = stages[stage].begin(); it != stages[stage].end(); ++it) - intermediate[stage]->merge(*infoSink, *(*it)->intermediate); - } - - intermediate[stage]->finalCheck(*infoSink, (messages & EShMsgKeepUncalled) != 0); - - if (messages & EShMsgAST) - intermediate[stage]->output(*infoSink, true); - - return intermediate[stage]->getNumErrors() == 0; -} - -const char* TProgram::getInfoLog() -{ - return infoSink->info.c_str(); -} - -const char* TProgram::getInfoDebugLog() -{ - return infoSink->debug.c_str(); -} - -// -// Reflection implementation. -// - -bool TProgram::buildReflection() -{ - if (! linked || reflection) - return false; - - reflection = new TReflection; - - for (int s = 0; s < EShLangCount; ++s) { - if (intermediate[s]) { - if (! reflection->addStage((EShLanguage)s, *intermediate[s])) - return false; - } - } - - return true; -} - -int TProgram::getNumLiveUniformVariables() const { return reflection->getNumUniforms(); } -int TProgram::getNumLiveUniformBlocks() const { return reflection->getNumUniformBlocks(); } -const char* TProgram::getUniformName(int index) const { return reflection->getUniform(index).name.c_str(); } -const char* TProgram::getUniformBlockName(int index) const { return reflection->getUniformBlock(index).name.c_str(); } -int TProgram::getUniformBlockSize(int index) const { return reflection->getUniformBlock(index).size; } -int TProgram::getUniformIndex(const char* name) const { return reflection->getIndex(name); } -int TProgram::getUniformBinding(int index) const { return reflection->getUniform(index).getBinding(); } -int TProgram::getUniformBlockIndex(int index) const { return reflection->getUniform(index).index; } -int TProgram::getUniformBlockCounterIndex(int index) const { return reflection->getUniformBlock(index).counterIndex; } -int TProgram::getUniformType(int index) const { return reflection->getUniform(index).glDefineType; } -int TProgram::getUniformBufferOffset(int index) const { return reflection->getUniform(index).offset; } -int TProgram::getUniformArraySize(int index) const { return reflection->getUniform(index).size; } -int TProgram::getNumLiveAttributes() const { return reflection->getNumAttributes(); } -const char* TProgram::getAttributeName(int index) const { return reflection->getAttribute(index).name.c_str(); } -int TProgram::getAttributeType(int index) const { return reflection->getAttribute(index).glDefineType; } -const TType* TProgram::getAttributeTType(int index) const { return reflection->getAttribute(index).getType(); } -const TType* TProgram::getUniformTType(int index) const { return reflection->getUniform(index).getType(); } -const TType* TProgram::getUniformBlockTType(int index) const { return reflection->getUniformBlock(index).getType(); } -unsigned TProgram::getLocalSize(int dim) const { return reflection->getLocalSize(dim); } - -void TProgram::dumpReflection() { reflection->dump(); } - -// -// I/O mapping implementation. -// -bool TProgram::mapIO(TIoMapResolver* resolver) -{ - if (! linked || ioMapper) - return false; - - ioMapper = new TIoMapper; - - for (int s = 0; s < EShLangCount; ++s) { - if (intermediate[s]) { - if (! ioMapper->addStage((EShLanguage)s, *intermediate[s], *infoSink, resolver)) - return false; - } - } - - return true; -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/SymbolTable.cpp b/deps/glslang/glslang/MachineIndependent/SymbolTable.cpp deleted file mode 100644 index 233033e7..00000000 --- a/deps/glslang/glslang/MachineIndependent/SymbolTable.cpp +++ /dev/null @@ -1,383 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Symbol table for parsing. Most functionality and main ideas -// are documented in the header file. -// - -#include "SymbolTable.h" - -namespace glslang { - -// -// TType helper function needs a place to live. -// - -// -// Recursively generate mangled names. -// -void TType::buildMangledName(TString& mangledName) const -{ - if (isMatrix()) - mangledName += 'm'; - else if (isVector()) - mangledName += 'v'; - - switch (basicType) { - case EbtFloat: mangledName += 'f'; break; - case EbtDouble: mangledName += 'd'; break; -#ifdef AMD_EXTENSIONS - case EbtFloat16: mangledName += "f16"; break; -#endif - case EbtInt: mangledName += 'i'; break; - case EbtUint: mangledName += 'u'; break; - case EbtInt64: mangledName += "i64"; break; - case EbtUint64: mangledName += "u64"; break; -#ifdef AMD_EXTENSIONS - case EbtInt16: mangledName += "i16"; break; - case EbtUint16: mangledName += "u16"; break; -#endif - case EbtBool: mangledName += 'b'; break; - case EbtAtomicUint: mangledName += "au"; break; - case EbtSampler: - switch (sampler.type) { - case EbtInt: mangledName += "i"; break; - case EbtUint: mangledName += "u"; break; - default: break; // some compilers want this - } - if (sampler.image) - mangledName += "I"; // a normal image - else if (sampler.sampler) - mangledName += "p"; // a "pure" sampler - else if (!sampler.combined) - mangledName += "t"; // a "pure" texture - else - mangledName += "s"; // traditional combined sampler - if (sampler.arrayed) - mangledName += "A"; - if (sampler.shadow) - mangledName += "S"; - if (sampler.external) - mangledName += "E"; - switch (sampler.dim) { - case Esd1D: mangledName += "1"; break; - case Esd2D: mangledName += "2"; break; - case Esd3D: mangledName += "3"; break; - case EsdCube: mangledName += "C"; break; - case EsdRect: mangledName += "R2"; break; - case EsdBuffer: mangledName += "B"; break; - case EsdSubpass: mangledName += "P"; break; - default: break; // some compilers want this - } - - if (sampler.hasReturnStruct()) { - // Name mangle for sampler return struct uses struct table index. - mangledName += "-tx-struct"; - - char text[16]; // plenty enough space for the small integers. - snprintf(text, sizeof(text), "%d-", sampler.structReturnIndex); - mangledName += text; - } else { - switch (sampler.getVectorSize()) { - case 1: mangledName += "1"; break; - case 2: mangledName += "2"; break; - case 3: mangledName += "3"; break; - case 4: break; // default to prior name mangle behavior - } - } - - if (sampler.ms) - mangledName += "M"; - break; - case EbtStruct: - case EbtBlock: - if (basicType == EbtStruct) - mangledName += "struct-"; - else - mangledName += "block-"; - if (typeName) - mangledName += *typeName; - for (unsigned int i = 0; i < structure->size(); ++i) { - mangledName += '-'; - (*structure)[i].type->buildMangledName(mangledName); - } - default: - break; - } - - if (getVectorSize() > 0) - mangledName += static_cast('0' + getVectorSize()); - else { - mangledName += static_cast('0' + getMatrixCols()); - mangledName += static_cast('0' + getMatrixRows()); - } - - if (arraySizes) { - const int maxSize = 11; - char buf[maxSize]; - for (int i = 0; i < arraySizes->getNumDims(); ++i) { - if (arraySizes->getDimNode(i)) { - if (arraySizes->getDimNode(i)->getAsSymbolNode()) - snprintf(buf, maxSize, "s%d", arraySizes->getDimNode(i)->getAsSymbolNode()->getId()); - else - snprintf(buf, maxSize, "s%p", arraySizes->getDimNode(i)); - } else - snprintf(buf, maxSize, "%d", arraySizes->getDimSize(i)); - mangledName += '['; - mangledName += buf; - mangledName += ']'; - } - } -} - -// -// Dump functions. -// - -void TVariable::dump(TInfoSink& infoSink) const -{ - infoSink.debug << getName().c_str() << ": " << type.getStorageQualifierString() << " " << type.getBasicTypeString(); - if (type.isArray()) { - infoSink.debug << "[0]"; - } - infoSink.debug << "\n"; -} - -void TFunction::dump(TInfoSink& infoSink) const -{ - infoSink.debug << getName().c_str() << ": " << returnType.getBasicTypeString() << " " << getMangledName().c_str() << "\n"; -} - -void TAnonMember::dump(TInfoSink& TInfoSink) const -{ - TInfoSink.debug << "anonymous member " << getMemberNumber() << " of " << getAnonContainer().getName().c_str() << "\n"; -} - -void TSymbolTableLevel::dump(TInfoSink &infoSink) const -{ - tLevel::const_iterator it; - for (it = level.begin(); it != level.end(); ++it) - (*it).second->dump(infoSink); -} - -void TSymbolTable::dump(TInfoSink &infoSink) const -{ - for (int level = currentLevel(); level >= 0; --level) { - infoSink.debug << "LEVEL " << level << "\n"; - table[level]->dump(infoSink); - } -} - -// -// Functions have buried pointers to delete. -// -TFunction::~TFunction() -{ - for (TParamList::iterator i = parameters.begin(); i != parameters.end(); ++i) - delete (*i).type; -} - -// -// Symbol table levels are a map of pointers to symbols that have to be deleted. -// -TSymbolTableLevel::~TSymbolTableLevel() -{ - for (tLevel::iterator it = level.begin(); it != level.end(); ++it) - delete (*it).second; - - delete [] defaultPrecision; -} - -// -// Change all function entries in the table with the non-mangled name -// to be related to the provided built-in operation. -// -void TSymbolTableLevel::relateToOperator(const char* name, TOperator op) -{ - tLevel::const_iterator candidate = level.lower_bound(name); - while (candidate != level.end()) { - const TString& candidateName = (*candidate).first; - TString::size_type parenAt = candidateName.find_first_of('('); - if (parenAt != candidateName.npos && candidateName.compare(0, parenAt, name) == 0) { - TFunction* function = (*candidate).second->getAsFunction(); - function->relateToOperator(op); - } else - break; - ++candidate; - } -} - -// Make all function overloads of the given name require an extension(s). -// Should only be used for a version/profile that actually needs the extension(s). -void TSymbolTableLevel::setFunctionExtensions(const char* name, int num, const char* const extensions[]) -{ - tLevel::const_iterator candidate = level.lower_bound(name); - while (candidate != level.end()) { - const TString& candidateName = (*candidate).first; - TString::size_type parenAt = candidateName.find_first_of('('); - if (parenAt != candidateName.npos && candidateName.compare(0, parenAt, name) == 0) { - TSymbol* symbol = candidate->second; - symbol->setExtensions(num, extensions); - } else - break; - ++candidate; - } -} - -// -// Make all symbols in this table level read only. -// -void TSymbolTableLevel::readOnly() -{ - for (tLevel::iterator it = level.begin(); it != level.end(); ++it) - (*it).second->makeReadOnly(); -} - -// -// Copy a symbol, but the copy is writable; call readOnly() afterward if that's not desired. -// -TSymbol::TSymbol(const TSymbol& copyOf) -{ - name = NewPoolTString(copyOf.name->c_str()); - uniqueId = copyOf.uniqueId; - writable = true; -} - -TVariable::TVariable(const TVariable& copyOf) : TSymbol(copyOf) -{ - type.deepCopy(copyOf.type); - userType = copyOf.userType; - numExtensions = 0; - extensions = 0; - if (copyOf.numExtensions != 0) - setExtensions(copyOf.numExtensions, copyOf.extensions); - - if (! copyOf.constArray.empty()) { - assert(! copyOf.type.isStruct()); - TConstUnionArray newArray(copyOf.constArray, 0, copyOf.constArray.size()); - constArray = newArray; - } - - // don't support specialization-constant subtrees in cloned tables - constSubtree = nullptr; -} - -TVariable* TVariable::clone() const -{ - TVariable *variable = new TVariable(*this); - - return variable; -} - -TFunction::TFunction(const TFunction& copyOf) : TSymbol(copyOf) -{ - for (unsigned int i = 0; i < copyOf.parameters.size(); ++i) { - TParameter param; - parameters.push_back(param); - parameters.back().copyParam(copyOf.parameters[i]); - } - - numExtensions = 0; - extensions = 0; - if (copyOf.extensions != 0) - setExtensions(copyOf.numExtensions, copyOf.extensions); - returnType.deepCopy(copyOf.returnType); - mangledName = copyOf.mangledName; - op = copyOf.op; - defined = copyOf.defined; - prototyped = copyOf.prototyped; - implicitThis = copyOf.implicitThis; - illegalImplicitThis = copyOf.illegalImplicitThis; - defaultParamCount = copyOf.defaultParamCount; -} - -TFunction* TFunction::clone() const -{ - TFunction *function = new TFunction(*this); - - return function; -} - -TAnonMember* TAnonMember::clone() const -{ - // Anonymous members of a given block should be cloned at a higher level, - // where they can all be assured to still end up pointing to a single - // copy of the original container. - assert(0); - - return 0; -} - -TSymbolTableLevel* TSymbolTableLevel::clone() const -{ - TSymbolTableLevel *symTableLevel = new TSymbolTableLevel(); - symTableLevel->anonId = anonId; - symTableLevel->thisLevel = thisLevel; - std::vector containerCopied(anonId, false); - tLevel::const_iterator iter; - for (iter = level.begin(); iter != level.end(); ++iter) { - const TAnonMember* anon = iter->second->getAsAnonMember(); - if (anon) { - // Insert all the anonymous members of this same container at once, - // avoid inserting the other members in the future, once this has been done, - // allowing them to all be part of the same new container. - if (! containerCopied[anon->getAnonId()]) { - TVariable* container = anon->getAnonContainer().clone(); - container->changeName(NewPoolTString("")); - // insert the whole container - symTableLevel->insert(*container, false); - containerCopied[anon->getAnonId()] = true; - } - } else - symTableLevel->insert(*iter->second->clone(), false); - } - - return symTableLevel; -} - -void TSymbolTable::copyTable(const TSymbolTable& copyOf) -{ - assert(adoptedLevels == copyOf.adoptedLevels); - - uniqueId = copyOf.uniqueId; - noBuiltInRedeclarations = copyOf.noBuiltInRedeclarations; - separateNameSpaces = copyOf.separateNameSpaces; - for (unsigned int i = copyOf.adoptedLevels; i < copyOf.table.size(); ++i) - table.push_back(copyOf.table[i]->clone()); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/SymbolTable.h b/deps/glslang/glslang/MachineIndependent/SymbolTable.h deleted file mode 100644 index f928b7ae..00000000 --- a/deps/glslang/glslang/MachineIndependent/SymbolTable.h +++ /dev/null @@ -1,825 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _SYMBOL_TABLE_INCLUDED_ -#define _SYMBOL_TABLE_INCLUDED_ - -// -// Symbol table for parsing. Has these design characteristics: -// -// * Same symbol table can be used to compile many shaders, to preserve -// effort of creating and loading with the large numbers of built-in -// symbols. -// -// --> This requires a copy mechanism, so initial pools used to create -// the shared information can be popped. Done through "clone" -// methods. -// -// * Name mangling will be used to give each function a unique name -// so that symbol table lookups are never ambiguous. This allows -// a simpler symbol table structure. -// -// * Pushing and popping of scope, so symbol table will really be a stack -// of symbol tables. Searched from the top, with new inserts going into -// the top. -// -// * Constants: Compile time constant symbols will keep their values -// in the symbol table. The parser can substitute constants at parse -// time, including doing constant folding and constant propagation. -// -// * No temporaries: Temporaries made from operations (+, --, .xy, etc.) -// are tracked in the intermediate representation, not the symbol table. -// - -#include "../Include/Common.h" -#include "../Include/intermediate.h" -#include "../Include/InfoSink.h" - -namespace glslang { - -// -// Symbol base class. (Can build functions or variables out of these...) -// - -class TVariable; -class TFunction; -class TAnonMember; - -class TSymbol { -public: - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - explicit TSymbol(const TString *n) : name(n), numExtensions(0), extensions(0), writable(true) { } - virtual TSymbol* clone() const = 0; - virtual ~TSymbol() { } // rely on all symbol owned memory coming from the pool - - virtual const TString& getName() const { return *name; } - virtual void changeName(const TString* newName) { name = newName; } - virtual void addPrefix(const char* prefix) - { - TString newName(prefix); - newName.append(*name); - changeName(NewPoolTString(newName.c_str())); - } - virtual const TString& getMangledName() const { return getName(); } - virtual TFunction* getAsFunction() { return 0; } - virtual const TFunction* getAsFunction() const { return 0; } - virtual TVariable* getAsVariable() { return 0; } - virtual const TVariable* getAsVariable() const { return 0; } - virtual const TAnonMember* getAsAnonMember() const { return 0; } - virtual const TType& getType() const = 0; - virtual TType& getWritableType() = 0; - virtual void setUniqueId(int id) { uniqueId = id; } - virtual int getUniqueId() const { return uniqueId; } - virtual void setExtensions(int num, const char* const exts[]) - { - assert(extensions == 0); - assert(num > 0); - numExtensions = num; - extensions = NewPoolObject(exts[0], num); - for (int e = 0; e < num; ++e) - extensions[e] = exts[e]; - } - virtual int getNumExtensions() const { return numExtensions; } - virtual const char** getExtensions() const { return extensions; } - virtual void dump(TInfoSink &infoSink) const = 0; - - virtual bool isReadOnly() const { return ! writable; } - virtual void makeReadOnly() { writable = false; } - -protected: - explicit TSymbol(const TSymbol&); - TSymbol& operator=(const TSymbol&); - - const TString *name; - unsigned int uniqueId; // For cross-scope comparing during code generation - - // For tracking what extensions must be present - // (don't use if correct version/profile is present). - int numExtensions; - const char** extensions; // an array of pointers to existing constant char strings - - // - // N.B.: Non-const functions that will be generally used should assert on this, - // to avoid overwriting shared symbol-table information. - // - bool writable; -}; - -// -// Variable class, meaning a symbol that's not a function. -// -// There could be a separate class hierarchy for Constant variables; -// Only one of int, bool, or float, (or none) is correct for -// any particular use, but it's easy to do this way, and doesn't -// seem worth having separate classes, and "getConst" can't simply return -// different values for different types polymorphically, so this is -// just simple and pragmatic. -// -class TVariable : public TSymbol { -public: - TVariable(const TString *name, const TType& t, bool uT = false ) - : TSymbol(name), - userType(uT), - constSubtree(nullptr), - anonId(-1) { type.shallowCopy(t); } - virtual TVariable* clone() const; - virtual ~TVariable() { } - - virtual TVariable* getAsVariable() { return this; } - virtual const TVariable* getAsVariable() const { return this; } - virtual const TType& getType() const { return type; } - virtual TType& getWritableType() { assert(writable); return type; } - virtual bool isUserType() const { return userType; } - virtual const TConstUnionArray& getConstArray() const { return constArray; } - virtual TConstUnionArray& getWritableConstArray() { assert(writable); return constArray; } - virtual void setConstArray(const TConstUnionArray& array) { constArray = array; } - virtual void setConstSubtree(TIntermTyped* subtree) { constSubtree = subtree; } - virtual TIntermTyped* getConstSubtree() const { return constSubtree; } - virtual void setAnonId(int i) { anonId = i; } - virtual int getAnonId() const { return anonId; } - - virtual void dump(TInfoSink &infoSink) const; - -protected: - explicit TVariable(const TVariable&); - TVariable& operator=(const TVariable&); - - TType type; - bool userType; - // we are assuming that Pool Allocator will free the memory allocated to unionArray - // when this object is destroyed - - // TODO: these two should be a union - // A variable could be a compile-time constant, or a specialization - // constant, or neither, but never both. - TConstUnionArray constArray; // for compile-time constant value - TIntermTyped* constSubtree; // for specialization constant computation - int anonId; // the ID used for anonymous blocks: TODO: see if uniqueId could serve a dual purpose -}; - -// -// The function sub-class of symbols and the parser will need to -// share this definition of a function parameter. -// -struct TParameter { - TString *name; - TType* type; - TIntermTyped* defaultValue; - void copyParam(const TParameter& param) - { - if (param.name) - name = NewPoolTString(param.name->c_str()); - else - name = 0; - type = param.type->clone(); - defaultValue = param.defaultValue; - } - TBuiltInVariable getDeclaredBuiltIn() const { return type->getQualifier().declaredBuiltIn; } -}; - -// -// The function sub-class of a symbol. -// -class TFunction : public TSymbol { -public: - explicit TFunction(TOperator o) : - TSymbol(0), - op(o), - defined(false), prototyped(false), implicitThis(false), illegalImplicitThis(false), defaultParamCount(0) { } - TFunction(const TString *name, const TType& retType, TOperator tOp = EOpNull) : - TSymbol(name), - mangledName(*name + '('), - op(tOp), - defined(false), prototyped(false), implicitThis(false), illegalImplicitThis(false), defaultParamCount(0) - { - returnType.shallowCopy(retType); - declaredBuiltIn = retType.getQualifier().builtIn; - } - virtual TFunction* clone() const override; - virtual ~TFunction(); - - virtual TFunction* getAsFunction() override { return this; } - virtual const TFunction* getAsFunction() const override { return this; } - - // Install 'p' as the (non-'this') last parameter. - // Non-'this' parameters are reflected in both the list of parameters and the - // mangled name. - virtual void addParameter(TParameter& p) - { - assert(writable); - parameters.push_back(p); - p.type->appendMangledName(mangledName); - - if (p.defaultValue != nullptr) - defaultParamCount++; - } - - // Install 'this' as the first parameter. - // 'this' is reflected in the list of parameters, but not the mangled name. - virtual void addThisParameter(TType& type, const char* name) - { - TParameter p = { NewPoolTString(name), new TType, nullptr }; - p.type->shallowCopy(type); - parameters.insert(parameters.begin(), p); - } - - virtual void addPrefix(const char* prefix) override - { - TSymbol::addPrefix(prefix); - mangledName.insert(0, prefix); - } - - virtual void removePrefix(const TString& prefix) - { - assert(mangledName.compare(0, prefix.size(), prefix) == 0); - mangledName.erase(0, prefix.size()); - } - - virtual const TString& getMangledName() const override { return mangledName; } - virtual const TType& getType() const override { return returnType; } - virtual TBuiltInVariable getDeclaredBuiltInType() const { return declaredBuiltIn; } - virtual TType& getWritableType() override { return returnType; } - virtual void relateToOperator(TOperator o) { assert(writable); op = o; } - virtual TOperator getBuiltInOp() const { return op; } - virtual void setDefined() { assert(writable); defined = true; } - virtual bool isDefined() const { return defined; } - virtual void setPrototyped() { assert(writable); prototyped = true; } - virtual bool isPrototyped() const { return prototyped; } - virtual void setImplicitThis() { assert(writable); implicitThis = true; } - virtual bool hasImplicitThis() const { return implicitThis; } - virtual void setIllegalImplicitThis() { assert(writable); illegalImplicitThis = true; } - virtual bool hasIllegalImplicitThis() const { return illegalImplicitThis; } - - // Return total number of parameters - virtual int getParamCount() const { return static_cast(parameters.size()); } - // Return number of parameters with default values. - virtual int getDefaultParamCount() const { return defaultParamCount; } - // Return number of fixed parameters (without default values) - virtual int getFixedParamCount() const { return getParamCount() - getDefaultParamCount(); } - - virtual TParameter& operator[](int i) { assert(writable); return parameters[i]; } - virtual const TParameter& operator[](int i) const { return parameters[i]; } - - virtual void dump(TInfoSink &infoSink) const override; - -protected: - explicit TFunction(const TFunction&); - TFunction& operator=(const TFunction&); - - typedef TVector TParamList; - TParamList parameters; - TType returnType; - TBuiltInVariable declaredBuiltIn; - - TString mangledName; - TOperator op; - bool defined; - bool prototyped; - bool implicitThis; // True if this function is allowed to see all members of 'this' - bool illegalImplicitThis; // True if this function is not supposed to have access to dynamic members of 'this', - // even if it finds member variables in the symbol table. - // This is important for a static member function that has member variables in scope, - // but is not allowed to use them, or see hidden symbols instead. - int defaultParamCount; -}; - -// -// Members of anonymous blocks are a kind of TSymbol. They are not hidden in -// the symbol table behind a container; rather they are visible and point to -// their anonymous container. (The anonymous container is found through the -// member, not the other way around.) -// -class TAnonMember : public TSymbol { -public: - TAnonMember(const TString* n, unsigned int m, const TVariable& a, int an) : TSymbol(n), anonContainer(a), memberNumber(m), anonId(an) { } - virtual TAnonMember* clone() const; - virtual ~TAnonMember() { } - - virtual const TAnonMember* getAsAnonMember() const { return this; } - virtual const TVariable& getAnonContainer() const { return anonContainer; } - virtual unsigned int getMemberNumber() const { return memberNumber; } - - virtual const TType& getType() const - { - const TTypeList& types = *anonContainer.getType().getStruct(); - return *types[memberNumber].type; - } - - virtual TType& getWritableType() - { - assert(writable); - const TTypeList& types = *anonContainer.getType().getStruct(); - return *types[memberNumber].type; - } - - virtual int getAnonId() const { return anonId; } - virtual void dump(TInfoSink &infoSink) const; - -protected: - explicit TAnonMember(const TAnonMember&); - TAnonMember& operator=(const TAnonMember&); - - const TVariable& anonContainer; - unsigned int memberNumber; - int anonId; -}; - -class TSymbolTableLevel { -public: - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - TSymbolTableLevel() : defaultPrecision(0), anonId(0), thisLevel(false) { } - ~TSymbolTableLevel(); - - bool insert(TSymbol& symbol, bool separateNameSpaces) - { - // - // returning true means symbol was added to the table with no semantic errors - // - const TString& name = symbol.getName(); - if (name == "") { - symbol.getAsVariable()->setAnonId(anonId++); - // An empty name means an anonymous container, exposing its members to the external scope. - // Give it a name and insert its members in the symbol table, pointing to the container. - char buf[20]; - snprintf(buf, 20, "%s%d", AnonymousPrefix, symbol.getAsVariable()->getAnonId()); - symbol.changeName(NewPoolTString(buf)); - - return insertAnonymousMembers(symbol, 0); - } else { - // Check for redefinition errors: - // - STL itself will tell us if there is a direct name collision, with name mangling, at this level - // - additionally, check for function-redefining-variable name collisions - const TString& insertName = symbol.getMangledName(); - if (symbol.getAsFunction()) { - // make sure there isn't a variable of this name - if (! separateNameSpaces && level.find(name) != level.end()) - return false; - - // insert, and whatever happens is okay - level.insert(tLevelPair(insertName, &symbol)); - - return true; - } else - return level.insert(tLevelPair(insertName, &symbol)).second; - } - } - - // Add more members to an already inserted aggregate object - bool amend(TSymbol& symbol, int firstNewMember) - { - // See insert() for comments on basic explanation of insert. - // This operates similarly, but more simply. - // Only supporting amend of anonymous blocks so far. - if (IsAnonymous(symbol.getName())) - return insertAnonymousMembers(symbol, firstNewMember); - else - return false; - } - - bool insertAnonymousMembers(TSymbol& symbol, int firstMember) - { - const TTypeList& types = *symbol.getAsVariable()->getType().getStruct(); - for (unsigned int m = firstMember; m < types.size(); ++m) { - TAnonMember* member = new TAnonMember(&types[m].type->getFieldName(), m, *symbol.getAsVariable(), symbol.getAsVariable()->getAnonId()); - if (! level.insert(tLevelPair(member->getMangledName(), member)).second) - return false; - } - - return true; - } - - TSymbol* find(const TString& name) const - { - tLevel::const_iterator it = level.find(name); - if (it == level.end()) - return 0; - else - return (*it).second; - } - - void findFunctionNameList(const TString& name, TVector& list) - { - size_t parenAt = name.find_first_of('('); - TString base(name, 0, parenAt + 1); - - tLevel::const_iterator begin = level.lower_bound(base); - base[parenAt] = ')'; // assume ')' is lexically after '(' - tLevel::const_iterator end = level.upper_bound(base); - for (tLevel::const_iterator it = begin; it != end; ++it) - list.push_back(it->second->getAsFunction()); - } - - // See if there is already a function in the table having the given non-function-style name. - bool hasFunctionName(const TString& name) const - { - tLevel::const_iterator candidate = level.lower_bound(name); - if (candidate != level.end()) { - const TString& candidateName = (*candidate).first; - TString::size_type parenAt = candidateName.find_first_of('('); - if (parenAt != candidateName.npos && candidateName.compare(0, parenAt, name) == 0) - - return true; - } - - return false; - } - - // See if there is a variable at this level having the given non-function-style name. - // Return true if name is found, and set variable to true if the name was a variable. - bool findFunctionVariableName(const TString& name, bool& variable) const - { - tLevel::const_iterator candidate = level.lower_bound(name); - if (candidate != level.end()) { - const TString& candidateName = (*candidate).first; - TString::size_type parenAt = candidateName.find_first_of('('); - if (parenAt == candidateName.npos) { - // not a mangled name - if (candidateName == name) { - // found a variable name match - variable = true; - return true; - } - } else { - // a mangled name - if (candidateName.compare(0, parenAt, name) == 0) { - // found a function name match - variable = false; - return true; - } - } - } - - return false; - } - - // Use this to do a lazy 'push' of precision defaults the first time - // a precision statement is seen in a new scope. Leave it at 0 for - // when no push was needed. Thus, it is not the current defaults, - // it is what to restore the defaults to when popping a level. - void setPreviousDefaultPrecisions(const TPrecisionQualifier *p) - { - // can call multiple times at one scope, will only latch on first call, - // as we're tracking the previous scope's values, not the current values - if (defaultPrecision != 0) - return; - - defaultPrecision = new TPrecisionQualifier[EbtNumTypes]; - for (int t = 0; t < EbtNumTypes; ++t) - defaultPrecision[t] = p[t]; - } - - void getPreviousDefaultPrecisions(TPrecisionQualifier *p) - { - // can be called for table level pops that didn't set the - // defaults - if (defaultPrecision == 0 || p == 0) - return; - - for (int t = 0; t < EbtNumTypes; ++t) - p[t] = defaultPrecision[t]; - } - - void relateToOperator(const char* name, TOperator op); - void setFunctionExtensions(const char* name, int num, const char* const extensions[]); - void dump(TInfoSink &infoSink) const; - TSymbolTableLevel* clone() const; - void readOnly(); - - void setThisLevel() { thisLevel = true; } - bool isThisLevel() const { return thisLevel; } - -protected: - explicit TSymbolTableLevel(TSymbolTableLevel&); - TSymbolTableLevel& operator=(TSymbolTableLevel&); - - typedef std::map, pool_allocator > > tLevel; - typedef const tLevel::value_type tLevelPair; - typedef std::pair tInsertResult; - - tLevel level; // named mappings - TPrecisionQualifier *defaultPrecision; - int anonId; - bool thisLevel; // True if this level of the symbol table is a structure scope containing member function - // that are supposed to see anonymous access to member variables. -}; - -class TSymbolTable { -public: - TSymbolTable() : uniqueId(0), noBuiltInRedeclarations(false), separateNameSpaces(false), adoptedLevels(0) - { - // - // This symbol table cannot be used until push() is called. - // - } - ~TSymbolTable() - { - // this can be called explicitly; safest to code it so it can be called multiple times - - // don't deallocate levels passed in from elsewhere - while (table.size() > adoptedLevels) - pop(0); - } - - void adoptLevels(TSymbolTable& symTable) - { - for (unsigned int level = 0; level < symTable.table.size(); ++level) { - table.push_back(symTable.table[level]); - ++adoptedLevels; - } - uniqueId = symTable.uniqueId; - noBuiltInRedeclarations = symTable.noBuiltInRedeclarations; - separateNameSpaces = symTable.separateNameSpaces; - } - - // - // While level adopting is generic, the methods below enact a the following - // convention for levels: - // 0: common built-ins shared across all stages, all compiles, only one copy for all symbol tables - // 1: per-stage built-ins, shared across all compiles, but a different copy per stage - // 2: built-ins specific to a compile, like resources that are context-dependent, or redeclared built-ins - // 3: user-shader globals - // -protected: - static const int globalLevel = 3; - bool isSharedLevel(int level) { return level <= 1; } // exclude all per-compile levels - bool isBuiltInLevel(int level) { return level <= 2; } // exclude user globals - bool isGlobalLevel(int level) { return level <= globalLevel; } // include user globals -public: - bool isEmpty() { return table.size() == 0; } - bool atBuiltInLevel() { return isBuiltInLevel(currentLevel()); } - bool atGlobalLevel() { return isGlobalLevel(currentLevel()); } - - void setNoBuiltInRedeclarations() { noBuiltInRedeclarations = true; } - void setSeparateNameSpaces() { separateNameSpaces = true; } - - void push() - { - table.push_back(new TSymbolTableLevel); - } - - // Make a new symbol-table level to represent the scope introduced by a structure - // containing member functions, such that the member functions can find anonymous - // references to member variables. - // - // 'thisSymbol' should have a name of "" to trigger anonymous structure-member - // symbol finds. - void pushThis(TSymbol& thisSymbol) - { - assert(thisSymbol.getName().size() == 0); - table.push_back(new TSymbolTableLevel); - table.back()->setThisLevel(); - insert(thisSymbol); - } - - void pop(TPrecisionQualifier *p) - { - table[currentLevel()]->getPreviousDefaultPrecisions(p); - delete table.back(); - table.pop_back(); - } - - // - // Insert a visible symbol into the symbol table so it can - // be found later by name. - // - // Returns false if the was a name collision. - // - bool insert(TSymbol& symbol) - { - symbol.setUniqueId(++uniqueId); - - // make sure there isn't a function of this variable name - if (! separateNameSpaces && ! symbol.getAsFunction() && table[currentLevel()]->hasFunctionName(symbol.getName())) - return false; - - // check for not overloading or redefining a built-in function - if (noBuiltInRedeclarations) { - if (atGlobalLevel() && currentLevel() > 0) { - if (table[0]->hasFunctionName(symbol.getName())) - return false; - if (currentLevel() > 1 && table[1]->hasFunctionName(symbol.getName())) - return false; - } - } - - return table[currentLevel()]->insert(symbol, separateNameSpaces); - } - - // Add more members to an already inserted aggregate object - bool amend(TSymbol& symbol, int firstNewMember) - { - // See insert() for comments on basic explanation of insert. - // This operates similarly, but more simply. - return table[currentLevel()]->amend(symbol, firstNewMember); - } - - // - // To allocate an internal temporary, which will need to be uniquely - // identified by the consumer of the AST, but never need to - // found by doing a symbol table search by name, hence allowed an - // arbitrary name in the symbol with no worry of collision. - // - void makeInternalVariable(TSymbol& symbol) - { - symbol.setUniqueId(++uniqueId); - } - - // - // Copy a variable or anonymous member's structure from a shared level so that - // it can be added (soon after return) to the symbol table where it can be - // modified without impacting other users of the shared table. - // - TSymbol* copyUpDeferredInsert(TSymbol* shared) - { - if (shared->getAsVariable()) { - TSymbol* copy = shared->clone(); - copy->setUniqueId(shared->getUniqueId()); - return copy; - } else { - const TAnonMember* anon = shared->getAsAnonMember(); - assert(anon); - TVariable* container = anon->getAnonContainer().clone(); - container->changeName(NewPoolTString("")); - container->setUniqueId(anon->getAnonContainer().getUniqueId()); - return container; - } - } - - TSymbol* copyUp(TSymbol* shared) - { - TSymbol* copy = copyUpDeferredInsert(shared); - table[globalLevel]->insert(*copy, separateNameSpaces); - if (shared->getAsVariable()) - return copy; - else { - // return the copy of the anonymous member - return table[globalLevel]->find(shared->getName()); - } - } - - // Normal find of a symbol, that can optionally say whether the symbol was found - // at a built-in level or the current top-scope level. - TSymbol* find(const TString& name, bool* builtIn = 0, bool* currentScope = 0, int* thisDepthP = 0) - { - int level = currentLevel(); - TSymbol* symbol; - int thisDepth = 0; - do { - if (table[level]->isThisLevel()) - ++thisDepth; - symbol = table[level]->find(name); - --level; - } while (symbol == nullptr && level >= 0); - level++; - if (builtIn) - *builtIn = isBuiltInLevel(level); - if (currentScope) - *currentScope = isGlobalLevel(currentLevel()) || level == currentLevel(); // consider shared levels as "current scope" WRT user globals - if (thisDepthP != nullptr) { - if (! table[level]->isThisLevel()) - thisDepth = 0; - *thisDepthP = thisDepth; - } - - return symbol; - } - - // Find of a symbol that returns how many layers deep of nested - // structures-with-member-functions ('this' scopes) deep the symbol was - // found in. - TSymbol* find(const TString& name, int& thisDepth) - { - int level = currentLevel(); - TSymbol* symbol; - thisDepth = 0; - do { - if (table[level]->isThisLevel()) - ++thisDepth; - symbol = table[level]->find(name); - --level; - } while (symbol == 0 && level >= 0); - - if (! table[level + 1]->isThisLevel()) - thisDepth = 0; - - return symbol; - } - - bool isFunctionNameVariable(const TString& name) const - { - if (separateNameSpaces) - return false; - - int level = currentLevel(); - do { - bool variable; - bool found = table[level]->findFunctionVariableName(name, variable); - if (found) - return variable; - --level; - } while (level >= 0); - - return false; - } - - void findFunctionNameList(const TString& name, TVector& list, bool& builtIn) - { - // For user levels, return the set found in the first scope with a match - builtIn = false; - int level = currentLevel(); - do { - table[level]->findFunctionNameList(name, list); - --level; - } while (list.empty() && level >= globalLevel); - - if (! list.empty()) - return; - - // Gather across all built-in levels; they don't hide each other - builtIn = true; - do { - table[level]->findFunctionNameList(name, list); - --level; - } while (level >= 0); - } - - void relateToOperator(const char* name, TOperator op) - { - for (unsigned int level = 0; level < table.size(); ++level) - table[level]->relateToOperator(name, op); - } - - void setFunctionExtensions(const char* name, int num, const char* const extensions[]) - { - for (unsigned int level = 0; level < table.size(); ++level) - table[level]->setFunctionExtensions(name, num, extensions); - } - - void setVariableExtensions(const char* name, int num, const char* const extensions[]) - { - TSymbol* symbol = find(TString(name)); - if (symbol) - symbol->setExtensions(num, extensions); - } - - int getMaxSymbolId() { return uniqueId; } - void dump(TInfoSink &infoSink) const; - void copyTable(const TSymbolTable& copyOf); - - void setPreviousDefaultPrecisions(TPrecisionQualifier *p) { table[currentLevel()]->setPreviousDefaultPrecisions(p); } - - void readOnly() - { - for (unsigned int level = 0; level < table.size(); ++level) - table[level]->readOnly(); - } - -protected: - TSymbolTable(TSymbolTable&); - TSymbolTable& operator=(TSymbolTableLevel&); - - int currentLevel() const { return static_cast(table.size()) - 1; } - - std::vector table; - int uniqueId; // for unique identification in code generation - bool noBuiltInRedeclarations; - bool separateNameSpaces; - unsigned int adoptedLevels; -}; - -} // end namespace glslang - -#endif // _SYMBOL_TABLE_INCLUDED_ diff --git a/deps/glslang/glslang/MachineIndependent/Versions.cpp b/deps/glslang/glslang/MachineIndependent/Versions.cpp deleted file mode 100644 index 531aa159..00000000 --- a/deps/glslang/glslang/MachineIndependent/Versions.cpp +++ /dev/null @@ -1,797 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Help manage multiple profiles, versions, extensions etc. -// -// These don't return error codes, as the presumption is parsing will -// always continue as if the tested feature were enabled, and thus there -// is no error recovery needed. -// - -// -// HOW TO add a feature enabled by an extension. -// -// To add a new hypothetical "Feature F" to the front end, where an extension -// "XXX_extension_X" can be used to enable the feature, do the following. -// -// OVERVIEW: Specific features are what are error-checked for, not -// extensions: A specific Feature F might be enabled by an extension, or a -// particular version in a particular profile, or a stage, or combinations, etc. -// -// The basic mechanism is to use the following to "declare" all the things that -// enable/disable Feature F, in a code path that implements Feature F: -// -// requireProfile() -// profileRequires() -// requireStage() -// checkDeprecated() -// requireNotRemoved() -// requireExtensions() -// -// Typically, only the first two calls are needed. They go into a code path that -// implements Feature F, and will log the proper error/warning messages. Parsing -// will then always continue as if the tested feature was enabled. -// -// There is typically no if-testing or conditional parsing, just insertion of the calls above. -// However, if symbols specific to the extension are added (step 5), they will -// only be added under tests that the minimum version and profile are present. -// -// 1) Add a symbol name for the extension string at the bottom of Versions.h: -// -// const char* const XXX_extension_X = "XXX_extension_X"; -// -// 2) Add extension initialization to TParseVersions::initializeExtensionBehavior(), -// the first function below: -// -// extensionBehavior[XXX_extension_X] = EBhDisable; -// -// 3) Add any preprocessor directives etc. in the next function, TParseVersions::getPreamble(): -// -// "#define XXX_extension_X 1\n" -// -// The new-line is important, as that ends preprocess tokens. -// -// 4) Insert a profile check in the feature's path (unless all profiles support the feature, -// for some version level). That is, call requireProfile() to constrain the profiles, e.g.: -// -// // ... in a path specific to Feature F... -// requireProfile(loc, -// ECoreProfile | ECompatibilityProfile, -// "Feature F"); -// -// 5) For each profile that supports the feature, insert version/extension checks: -// -// The mostly likely scenario is that Feature F can only be used with a -// particular profile if XXX_extension_X is present or the version is -// high enough that the core specification already incorporated it. -// -// // following the requireProfile() call... -// profileRequires(loc, -// ECoreProfile | ECompatibilityProfile, -// 420, // 0 if no version incorporated the feature into the core spec. -// XXX_extension_X, // can be a list of extensions that all add the feature -// "Feature F Description"); -// -// This allows the feature if either A) one of the extensions is enabled or -// B) the version is high enough. If no version yet incorporates the feature -// into core, pass in 0. -// -// This can be called multiple times, if different profiles support the -// feature starting at different version numbers or with different -// extensions. -// -// This must be called for each profile allowed by the initial call to requireProfile(). -// -// Profiles are all masks, which can be "or"-ed together. -// -// ENoProfile -// ECoreProfile -// ECompatibilityProfile -// EEsProfile -// -// The ENoProfile profile is only for desktop, before profiles showed up in version 150; -// All other #version with no profile default to either es or core, and so have profiles. -// -// You can select all but a particular profile using ~. The following basically means "desktop": -// -// ~EEsProfile -// -// 6) If built-in symbols are added by the extension, add them in Initialize.cpp: Their use -// will be automatically error checked against the extensions enabled at that moment. -// see the comment at the top of Initialize.cpp for where to put them. Establish them at -// the earliest release that supports the extension. Then, tag them with the -// set of extensions that both enable them and are necessary, given the version of the symbol -// table. (There is a different symbol table for each version.) -// - -#include "parseVersions.h" -#include "localintermediate.h" - -namespace glslang { - -// -// Initialize all extensions, almost always to 'disable', as once their features -// are incorporated into a core version, their features are supported through allowing that -// core version, not through a pseudo-enablement of the extension. -// -void TParseVersions::initializeExtensionBehavior() -{ - extensionBehavior[E_GL_OES_texture_3D] = EBhDisable; - extensionBehavior[E_GL_OES_standard_derivatives] = EBhDisable; - extensionBehavior[E_GL_EXT_frag_depth] = EBhDisable; - extensionBehavior[E_GL_OES_EGL_image_external] = EBhDisable; - extensionBehavior[E_GL_EXT_shader_texture_lod] = EBhDisable; - extensionBehavior[E_GL_EXT_shadow_samplers] = EBhDisable; - extensionBehavior[E_GL_ARB_texture_rectangle] = EBhDisable; - extensionBehavior[E_GL_3DL_array_objects] = EBhDisable; - extensionBehavior[E_GL_ARB_shading_language_420pack] = EBhDisable; - extensionBehavior[E_GL_ARB_texture_gather] = EBhDisable; - extensionBehavior[E_GL_ARB_gpu_shader5] = EBhDisablePartial; - extensionBehavior[E_GL_ARB_separate_shader_objects] = EBhDisable; - extensionBehavior[E_GL_ARB_compute_shader] = EBhDisable; - extensionBehavior[E_GL_ARB_tessellation_shader] = EBhDisable; - extensionBehavior[E_GL_ARB_enhanced_layouts] = EBhDisable; - extensionBehavior[E_GL_ARB_texture_cube_map_array] = EBhDisable; - extensionBehavior[E_GL_ARB_shader_texture_lod] = EBhDisable; - extensionBehavior[E_GL_ARB_explicit_attrib_location] = EBhDisable; - extensionBehavior[E_GL_ARB_shader_image_load_store] = EBhDisable; - extensionBehavior[E_GL_ARB_shader_atomic_counters] = EBhDisable; - extensionBehavior[E_GL_ARB_shader_draw_parameters] = EBhDisable; - extensionBehavior[E_GL_ARB_shader_group_vote] = EBhDisable; - extensionBehavior[E_GL_ARB_derivative_control] = EBhDisable; - extensionBehavior[E_GL_ARB_shader_texture_image_samples] = EBhDisable; - extensionBehavior[E_GL_ARB_viewport_array] = EBhDisable; - extensionBehavior[E_GL_ARB_gpu_shader_int64] = EBhDisable; - extensionBehavior[E_GL_ARB_shader_ballot] = EBhDisable; - extensionBehavior[E_GL_ARB_sparse_texture2] = EBhDisable; - extensionBehavior[E_GL_ARB_sparse_texture_clamp] = EBhDisable; - extensionBehavior[E_GL_ARB_shader_stencil_export] = EBhDisable; -// extensionBehavior[E_GL_ARB_cull_distance] = EBhDisable; // present for 4.5, but need extension control over block members - extensionBehavior[E_GL_ARB_post_depth_coverage] = EBhDisable; - extensionBehavior[E_GL_ARB_shader_viewport_layer_array] = EBhDisable; - - extensionBehavior[E_GL_EXT_shader_non_constant_global_initializers] = EBhDisable; - extensionBehavior[E_GL_EXT_shader_image_load_formatted] = EBhDisable; - extensionBehavior[E_GL_EXT_post_depth_coverage] = EBhDisable; - - // #line and #include - extensionBehavior[E_GL_GOOGLE_cpp_style_line_directive] = EBhDisable; - extensionBehavior[E_GL_GOOGLE_include_directive] = EBhDisable; - -#ifdef AMD_EXTENSIONS - extensionBehavior[E_GL_AMD_shader_ballot] = EBhDisable; - extensionBehavior[E_GL_AMD_shader_trinary_minmax] = EBhDisable; - extensionBehavior[E_GL_AMD_shader_explicit_vertex_parameter] = EBhDisable; - extensionBehavior[E_GL_AMD_gcn_shader] = EBhDisable; - extensionBehavior[E_GL_AMD_gpu_shader_half_float] = EBhDisable; - extensionBehavior[E_GL_AMD_texture_gather_bias_lod] = EBhDisable; - extensionBehavior[E_GL_AMD_gpu_shader_int16] = EBhDisable; - extensionBehavior[E_GL_AMD_shader_image_load_store_lod] = EBhDisable; -#endif - -#ifdef NV_EXTENSIONS - extensionBehavior[E_GL_NV_sample_mask_override_coverage] = EBhDisable; - extensionBehavior[E_SPV_NV_geometry_shader_passthrough] = EBhDisable; - extensionBehavior[E_GL_NV_viewport_array2] = EBhDisable; - extensionBehavior[E_GL_NV_stereo_view_rendering] = EBhDisable; - extensionBehavior[E_GL_NVX_multiview_per_view_attributes] = EBhDisable; -#endif - - // AEP - extensionBehavior[E_GL_ANDROID_extension_pack_es31a] = EBhDisable; - extensionBehavior[E_GL_KHR_blend_equation_advanced] = EBhDisable; - extensionBehavior[E_GL_OES_sample_variables] = EBhDisable; - extensionBehavior[E_GL_OES_shader_image_atomic] = EBhDisable; - extensionBehavior[E_GL_OES_shader_multisample_interpolation] = EBhDisable; - extensionBehavior[E_GL_OES_texture_storage_multisample_2d_array] = EBhDisable; - extensionBehavior[E_GL_EXT_geometry_shader] = EBhDisable; - extensionBehavior[E_GL_EXT_geometry_point_size] = EBhDisable; - extensionBehavior[E_GL_EXT_gpu_shader5] = EBhDisable; - extensionBehavior[E_GL_EXT_primitive_bounding_box] = EBhDisable; - extensionBehavior[E_GL_EXT_shader_io_blocks] = EBhDisable; - extensionBehavior[E_GL_EXT_tessellation_shader] = EBhDisable; - extensionBehavior[E_GL_EXT_tessellation_point_size] = EBhDisable; - extensionBehavior[E_GL_EXT_texture_buffer] = EBhDisable; - extensionBehavior[E_GL_EXT_texture_cube_map_array] = EBhDisable; - - // OES matching AEP - extensionBehavior[E_GL_OES_geometry_shader] = EBhDisable; - extensionBehavior[E_GL_OES_geometry_point_size] = EBhDisable; - extensionBehavior[E_GL_OES_gpu_shader5] = EBhDisable; - extensionBehavior[E_GL_OES_primitive_bounding_box] = EBhDisable; - extensionBehavior[E_GL_OES_shader_io_blocks] = EBhDisable; - extensionBehavior[E_GL_OES_tessellation_shader] = EBhDisable; - extensionBehavior[E_GL_OES_tessellation_point_size] = EBhDisable; - extensionBehavior[E_GL_OES_texture_buffer] = EBhDisable; - extensionBehavior[E_GL_OES_texture_cube_map_array] = EBhDisable; - - // EXT extensions - extensionBehavior[E_GL_EXT_device_group] = EBhDisable; - extensionBehavior[E_GL_EXT_multiview] = EBhDisable; - - // OVR extensions - extensionBehavior[E_GL_OVR_multiview] = EBhDisable; - extensionBehavior[E_GL_OVR_multiview2] = EBhDisable; -} - -// Get code that is not part of a shared symbol table, is specific to this shader, -// or needed by the preprocessor (which does not use a shared symbol table). -void TParseVersions::getPreamble(std::string& preamble) -{ - if (profile == EEsProfile) { - preamble = - "#define GL_ES 1\n" - "#define GL_FRAGMENT_PRECISION_HIGH 1\n" - "#define GL_OES_texture_3D 1\n" - "#define GL_OES_standard_derivatives 1\n" - "#define GL_EXT_frag_depth 1\n" - "#define GL_OES_EGL_image_external 1\n" - "#define GL_EXT_shader_texture_lod 1\n" - "#define GL_EXT_shadow_samplers 1\n" - - // AEP - "#define GL_ANDROID_extension_pack_es31a 1\n" - "#define GL_KHR_blend_equation_advanced 1\n" - "#define GL_OES_sample_variables 1\n" - "#define GL_OES_shader_image_atomic 1\n" - "#define GL_OES_shader_multisample_interpolation 1\n" - "#define GL_OES_texture_storage_multisample_2d_array 1\n" - "#define GL_EXT_geometry_shader 1\n" - "#define GL_EXT_geometry_point_size 1\n" - "#define GL_EXT_gpu_shader5 1\n" - "#define GL_EXT_primitive_bounding_box 1\n" - "#define GL_EXT_shader_io_blocks 1\n" - "#define GL_EXT_tessellation_shader 1\n" - "#define GL_EXT_tessellation_point_size 1\n" - "#define GL_EXT_texture_buffer 1\n" - "#define GL_EXT_texture_cube_map_array 1\n" - - // OES matching AEP - "#define GL_OES_geometry_shader 1\n" - "#define GL_OES_geometry_point_size 1\n" - "#define GL_OES_gpu_shader5 1\n" - "#define GL_OES_primitive_bounding_box 1\n" - "#define GL_OES_shader_io_blocks 1\n" - "#define GL_OES_tessellation_shader 1\n" - "#define GL_OES_tessellation_point_size 1\n" - "#define GL_OES_texture_buffer 1\n" - "#define GL_OES_texture_cube_map_array 1\n" - "#define GL_EXT_shader_non_constant_global_initializers 1\n" - ; - } else { - preamble = - "#define GL_FRAGMENT_PRECISION_HIGH 1\n" - "#define GL_ARB_texture_rectangle 1\n" - "#define GL_ARB_shading_language_420pack 1\n" - "#define GL_ARB_texture_gather 1\n" - "#define GL_ARB_gpu_shader5 1\n" - "#define GL_ARB_separate_shader_objects 1\n" - "#define GL_ARB_compute_shader 1\n" - "#define GL_ARB_tessellation_shader 1\n" - "#define GL_ARB_enhanced_layouts 1\n" - "#define GL_ARB_texture_cube_map_array 1\n" - "#define GL_ARB_shader_texture_lod 1\n" - "#define GL_ARB_explicit_attrib_location 1\n" - "#define GL_ARB_shader_image_load_store 1\n" - "#define GL_ARB_shader_atomic_counters 1\n" - "#define GL_ARB_shader_draw_parameters 1\n" - "#define GL_ARB_shader_group_vote 1\n" - "#define GL_ARB_derivative_control 1\n" - "#define GL_ARB_shader_texture_image_samples 1\n" - "#define GL_ARB_viewport_array 1\n" - "#define GL_ARB_gpu_shader_int64 1\n" - "#define GL_ARB_shader_ballot 1\n" - "#define GL_ARB_sparse_texture2 1\n" - "#define GL_ARB_sparse_texture_clamp 1\n" - "#define GL_ARB_shader_stencil_export 1\n" -// "#define GL_ARB_cull_distance 1\n" // present for 4.5, but need extension control over block members - "#define GL_ARB_post_depth_coverage 1\n" - "#define GL_EXT_shader_non_constant_global_initializers 1\n" - "#define GL_EXT_shader_image_load_formatted 1\n" - "#define GL_EXT_post_depth_coverage 1\n" - -#ifdef AMD_EXTENSIONS - "#define GL_AMD_shader_ballot 1\n" - "#define GL_AMD_shader_trinary_minmax 1\n" - "#define GL_AMD_shader_explicit_vertex_parameter 1\n" - "#define GL_AMD_gcn_shader 1\n" - "#define GL_AMD_gpu_shader_half_float 1\n" - "#define GL_AMD_texture_gather_bias_lod 1\n" - "#define GL_AMD_gpu_shader_int16 1\n" - "#define GL_AMD_shader_image_load_store_lod 1\n" -#endif - -#ifdef NV_EXTENSIONS - "#define GL_NV_sample_mask_override_coverage 1\n" - "#define GL_NV_geometry_shader_passthrough 1\n" - "#define GL_NV_viewport_array2 1\n" -#endif - ; - - if (version >= 150) { - // define GL_core_profile and GL_compatibility_profile - preamble += "#define GL_core_profile 1\n"; - - if (profile == ECompatibilityProfile) - preamble += "#define GL_compatibility_profile 1\n"; - } - } - - if ((profile != EEsProfile && version >= 140) || - (profile == EEsProfile && version >= 310)) { - preamble += - "#define GL_EXT_device_group 1\n" - "#define GL_EXT_multiview 1\n" - ; - } - - if (version >= 300 /* both ES and non-ES */) { - preamble += - "#define GL_OVR_multiview 1\n" - "#define GL_OVR_multiview2 1\n" - ; - } - - // #line and #include - preamble += - "#define GL_GOOGLE_cpp_style_line_directive 1\n" - "#define GL_GOOGLE_include_directive 1\n" - ; - - // #define VULKAN XXXX - const int numberBufSize = 12; - char numberBuf[numberBufSize]; - if (spvVersion.vulkanGlsl > 0) { - preamble += "#define VULKAN "; - snprintf(numberBuf, numberBufSize, "%d", spvVersion.vulkanGlsl); - preamble += numberBuf; - preamble += "\n"; - } - // #define GL_SPIRV XXXX - if (spvVersion.openGl > 0) { - preamble += "#define GL_SPIRV "; - snprintf(numberBuf, numberBufSize, "%d", spvVersion.openGl); - preamble += numberBuf; - preamble += "\n"; - } - -} - -// -// When to use requireProfile(): -// -// Use if only some profiles support a feature. However, if within a profile the feature -// is version or extension specific, follow this call with calls to profileRequires(). -// -// Operation: If the current profile is not one of the profileMask, -// give an error message. -// -void TParseVersions::requireProfile(const TSourceLoc& loc, int profileMask, const char* featureDesc) -{ - if (! (profile & profileMask)) - error(loc, "not supported with this profile:", featureDesc, ProfileName(profile)); -} - -// -// Map from stage enum to externally readable text name. -// -const char* StageName(EShLanguage stage) -{ - switch(stage) { - case EShLangVertex: return "vertex"; - case EShLangTessControl: return "tessellation control"; - case EShLangTessEvaluation: return "tessellation evaluation"; - case EShLangGeometry: return "geometry"; - case EShLangFragment: return "fragment"; - case EShLangCompute: return "compute"; - default: return "unknown stage"; - } -} - -// -// When to use profileRequires(): -// -// If a set of profiles have the same requirements for what version or extensions -// are needed to support a feature. -// -// It must be called for each profile that needs protection. Use requireProfile() first -// to reduce that set of profiles. -// -// Operation: Will issue warnings/errors based on the current profile, version, and extension -// behaviors. It only checks extensions when the current profile is one of the profileMask. -// -// A minVersion of 0 means no version of the profileMask support this in core, -// the extension must be present. -// - -// entry point that takes multiple extensions -void TParseVersions::profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, int numExtensions, const char* const extensions[], const char* featureDesc) -{ - if (profile & profileMask) { - bool okay = false; - if (minVersion > 0 && version >= minVersion) - okay = true; - for (int i = 0; i < numExtensions; ++i) { - switch (getExtensionBehavior(extensions[i])) { - case EBhWarn: - infoSink.info.message(EPrefixWarning, ("extension " + TString(extensions[i]) + " is being used for " + featureDesc).c_str(), loc); - // fall through - case EBhRequire: - case EBhEnable: - okay = true; - break; - default: break; // some compilers want this - } - } - - if (! okay) - error(loc, "not supported for this version or the enabled extensions", featureDesc, ""); - } -} - -// entry point for the above that takes a single extension -void TParseVersions::profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, const char* extension, const char* featureDesc) -{ - profileRequires(loc, profileMask, minVersion, extension ? 1 : 0, &extension, featureDesc); -} - -// -// When to use requireStage() -// -// If only some stages support a feature. -// -// Operation: If the current stage is not present, give an error message. -// -void TParseVersions::requireStage(const TSourceLoc& loc, EShLanguageMask languageMask, const char* featureDesc) -{ - if (((1 << language) & languageMask) == 0) - error(loc, "not supported in this stage:", featureDesc, StageName(language)); -} - -// If only one stage supports a feature, this can be called. But, all supporting stages -// must be specified with one call. -void TParseVersions::requireStage(const TSourceLoc& loc, EShLanguage stage, const char* featureDesc) -{ - requireStage(loc, static_cast(1 << stage), featureDesc); -} - -// -// Within a set of profiles, see if a feature is deprecated and give an error or warning based on whether -// a future compatibility context is being use. -// -void TParseVersions::checkDeprecated(const TSourceLoc& loc, int profileMask, int depVersion, const char* featureDesc) -{ - if (profile & profileMask) { - if (version >= depVersion) { - if (forwardCompatible) - error(loc, "deprecated, may be removed in future release", featureDesc, ""); - else if (! suppressWarnings()) - infoSink.info.message(EPrefixWarning, (TString(featureDesc) + " deprecated in version " + - String(depVersion) + "; may be removed in future release").c_str(), loc); - } - } -} - -// -// Within a set of profiles, see if a feature has now been removed and if so, give an error. -// The version argument is the first version no longer having the feature. -// -void TParseVersions::requireNotRemoved(const TSourceLoc& loc, int profileMask, int removedVersion, const char* featureDesc) -{ - if (profile & profileMask) { - if (version >= removedVersion) { - const int maxSize = 60; - char buf[maxSize]; - snprintf(buf, maxSize, "%s profile; removed in version %d", ProfileName(profile), removedVersion); - error(loc, "no longer supported in", featureDesc, buf); - } - } -} - -void TParseVersions::unimplemented(const TSourceLoc& loc, const char* featureDesc) -{ - error(loc, "feature not yet implemented", featureDesc, ""); -} - -// Returns true if at least one of the extensions in the extensions parameter is requested. Otherwise, returns false. -// Warns appropriately if the requested behavior of an extension is "warn". -bool TParseVersions::checkExtensionsRequested(const TSourceLoc& loc, int numExtensions, const char* const extensions[], const char* featureDesc) -{ - // First, see if any of the extensions are enabled - for (int i = 0; i < numExtensions; ++i) { - TExtensionBehavior behavior = getExtensionBehavior(extensions[i]); - if (behavior == EBhEnable || behavior == EBhRequire) - return true; - } - - // See if any extensions want to give a warning on use; give warnings for all such extensions - bool warned = false; - for (int i = 0; i < numExtensions; ++i) { - TExtensionBehavior behavior = getExtensionBehavior(extensions[i]); - if (behavior == EBhDisable && relaxedErrors()) { - infoSink.info.message(EPrefixWarning, "The following extension must be enabled to use this feature:", loc); - behavior = EBhWarn; - } - if (behavior == EBhWarn) { - infoSink.info.message(EPrefixWarning, ("extension " + TString(extensions[i]) + " is being used for " + featureDesc).c_str(), loc); - warned = true; - } - } - if (warned) - return true; - return false; -} - -// -// Use when there are no profile/version to check, it's just an error if one of the -// extensions is not present. -// -void TParseVersions::requireExtensions(const TSourceLoc& loc, int numExtensions, const char* const extensions[], const char* featureDesc) -{ - if (checkExtensionsRequested(loc, numExtensions, extensions, featureDesc)) return; - - // If we get this far, give errors explaining what extensions are needed - if (numExtensions == 1) - error(loc, "required extension not requested:", featureDesc, extensions[0]); - else { - error(loc, "required extension not requested:", featureDesc, "Possible extensions include:"); - for (int i = 0; i < numExtensions; ++i) - infoSink.info.message(EPrefixNone, extensions[i]); - } -} - -// -// Use by preprocessor when there are no profile/version to check, it's just an error if one of the -// extensions is not present. -// -void TParseVersions::ppRequireExtensions(const TSourceLoc& loc, int numExtensions, const char* const extensions[], const char* featureDesc) -{ - if (checkExtensionsRequested(loc, numExtensions, extensions, featureDesc)) return; - - // If we get this far, give errors explaining what extensions are needed - if (numExtensions == 1) - ppError(loc, "required extension not requested:", featureDesc, extensions[0]); - else { - ppError(loc, "required extension not requested:", featureDesc, "Possible extensions include:"); - for (int i = 0; i < numExtensions; ++i) - infoSink.info.message(EPrefixNone, extensions[i]); - } -} - -TExtensionBehavior TParseVersions::getExtensionBehavior(const char* extension) -{ - auto iter = extensionBehavior.find(TString(extension)); - if (iter == extensionBehavior.end()) - return EBhMissing; - else - return iter->second; -} - -// Returns true if the given extension is set to enable, require, or warn. -bool TParseVersions::extensionTurnedOn(const char* const extension) -{ - switch (getExtensionBehavior(extension)) { - case EBhEnable: - case EBhRequire: - case EBhWarn: - return true; - default: - break; - } - return false; -} -// See if any of the extensions are set to enable, require, or warn. -bool TParseVersions::extensionsTurnedOn(int numExtensions, const char* const extensions[]) -{ - for (int i = 0; i < numExtensions; ++i) { - if (extensionTurnedOn(extensions[i])) return true; - } - return false; -} - -// -// Change the current state of an extension's behavior. -// -void TParseVersions::updateExtensionBehavior(int line, const char* extension, const char* behaviorString) -{ - // Translate from text string of extension's behavior to an enum. - TExtensionBehavior behavior = EBhDisable; - if (! strcmp("require", behaviorString)) - behavior = EBhRequire; - else if (! strcmp("enable", behaviorString)) - behavior = EBhEnable; - else if (! strcmp("disable", behaviorString)) - behavior = EBhDisable; - else if (! strcmp("warn", behaviorString)) - behavior = EBhWarn; - else { - error(getCurrentLoc(), "behavior not supported:", "#extension", behaviorString); - return; - } - - // update the requested extension - updateExtensionBehavior(extension, behavior); - - // see if need to propagate to implicitly modified things - if (strcmp(extension, "GL_ANDROID_extension_pack_es31a") == 0) { - // to everything in AEP - updateExtensionBehavior(line, "GL_KHR_blend_equation_advanced", behaviorString); - updateExtensionBehavior(line, "GL_OES_sample_variables", behaviorString); - updateExtensionBehavior(line, "GL_OES_shader_image_atomic", behaviorString); - updateExtensionBehavior(line, "GL_OES_shader_multisample_interpolation", behaviorString); - updateExtensionBehavior(line, "GL_OES_texture_storage_multisample_2d_array", behaviorString); - updateExtensionBehavior(line, "GL_EXT_geometry_shader", behaviorString); - updateExtensionBehavior(line, "GL_EXT_gpu_shader5", behaviorString); - updateExtensionBehavior(line, "GL_EXT_primitive_bounding_box", behaviorString); - updateExtensionBehavior(line, "GL_EXT_shader_io_blocks", behaviorString); - updateExtensionBehavior(line, "GL_EXT_tessellation_shader", behaviorString); - updateExtensionBehavior(line, "GL_EXT_texture_buffer", behaviorString); - updateExtensionBehavior(line, "GL_EXT_texture_cube_map_array", behaviorString); - } - // geometry to io_blocks - else if (strcmp(extension, "GL_EXT_geometry_shader") == 0) - updateExtensionBehavior(line, "GL_EXT_shader_io_blocks", behaviorString); - else if (strcmp(extension, "GL_OES_geometry_shader") == 0) - updateExtensionBehavior(line, "GL_OES_shader_io_blocks", behaviorString); - // tessellation to io_blocks - else if (strcmp(extension, "GL_EXT_tessellation_shader") == 0) - updateExtensionBehavior(line, "GL_EXT_shader_io_blocks", behaviorString); - else if (strcmp(extension, "GL_OES_tessellation_shader") == 0) - updateExtensionBehavior(line, "GL_OES_shader_io_blocks", behaviorString); - else if (strcmp(extension, "GL_GOOGLE_include_directive") == 0) - updateExtensionBehavior(line, "GL_GOOGLE_cpp_style_line_directive", behaviorString); -} - -void TParseVersions::updateExtensionBehavior(const char* extension, TExtensionBehavior behavior) -{ - // Update the current behavior - if (strcmp(extension, "all") == 0) { - // special case for the 'all' extension; apply it to every extension present - if (behavior == EBhRequire || behavior == EBhEnable) { - error(getCurrentLoc(), "extension 'all' cannot have 'require' or 'enable' behavior", "#extension", ""); - return; - } else { - for (auto iter = extensionBehavior.begin(); iter != extensionBehavior.end(); ++iter) - iter->second = behavior; - } - } else { - // Do the update for this single extension - auto iter = extensionBehavior.find(TString(extension)); - if (iter == extensionBehavior.end()) { - switch (behavior) { - case EBhRequire: - error(getCurrentLoc(), "extension not supported:", "#extension", extension); - break; - case EBhEnable: - case EBhWarn: - case EBhDisable: - warn(getCurrentLoc(), "extension not supported:", "#extension", extension); - break; - default: - assert(0 && "unexpected behavior"); - } - - return; - } else { - if (iter->second == EBhDisablePartial) - warn(getCurrentLoc(), "extension is only partially supported:", "#extension", extension); - if (behavior == EBhEnable || behavior == EBhRequire) - intermediate.addRequestedExtension(extension); - iter->second = behavior; - } - } -} - -// Call for any operation needing full GLSL integer data-type support. -void TParseVersions::fullIntegerCheck(const TSourceLoc& loc, const char* op) -{ - profileRequires(loc, ENoProfile, 130, nullptr, op); - profileRequires(loc, EEsProfile, 300, nullptr, op); -} - -// Call for any operation needing GLSL double data-type support. -void TParseVersions::doubleCheck(const TSourceLoc& loc, const char* op) -{ - requireProfile(loc, ECoreProfile | ECompatibilityProfile, op); - profileRequires(loc, ECoreProfile, 400, nullptr, op); - profileRequires(loc, ECompatibilityProfile, 400, nullptr, op); -} - -#ifdef AMD_EXTENSIONS -// Call for any operation needing GLSL 16-bit integer data-type support. -void TParseVersions::int16Check(const TSourceLoc& loc, const char* op, bool builtIn) -{ - if (! builtIn) { - requireExtensions(loc, 1, &E_GL_AMD_gpu_shader_int16, "shader int16"); - requireProfile(loc, ECoreProfile | ECompatibilityProfile, op); - profileRequires(loc, ECoreProfile, 450, nullptr, op); - profileRequires(loc, ECompatibilityProfile, 450, nullptr, op); - } -} - -// Call for any operation needing GLSL float16 data-type support. -void TParseVersions::float16Check(const TSourceLoc& loc, const char* op, bool builtIn) -{ - if (! builtIn) { - requireExtensions(loc, 1, &E_GL_AMD_gpu_shader_half_float, "shader half float"); - requireProfile(loc, ECoreProfile | ECompatibilityProfile, op); - profileRequires(loc, ECoreProfile, 450, nullptr, op); - profileRequires(loc, ECompatibilityProfile, 450, nullptr, op); - } -} -#endif - -// Call for any operation needing GLSL 64-bit integer data-type support. -void TParseVersions::int64Check(const TSourceLoc& loc, const char* op, bool builtIn) -{ - if (! builtIn) { - requireExtensions(loc, 1, &E_GL_ARB_gpu_shader_int64, "shader int64"); - requireProfile(loc, ECoreProfile | ECompatibilityProfile, op); - profileRequires(loc, ECoreProfile, 450, nullptr, op); - profileRequires(loc, ECompatibilityProfile, 450, nullptr, op); - } -} - -// Call for any operation removed because SPIR-V is in use. -void TParseVersions::spvRemoved(const TSourceLoc& loc, const char* op) -{ - if (spvVersion.spv != 0) - error(loc, "not allowed when generating SPIR-V", op, ""); -} - -// Call for any operation removed because Vulkan SPIR-V is being generated. -void TParseVersions::vulkanRemoved(const TSourceLoc& loc, const char* op) -{ - if (spvVersion.vulkan >= 100) - error(loc, "not allowed when using GLSL for Vulkan", op, ""); -} - -// Call for any operation that requires Vulkan. -void TParseVersions::requireVulkan(const TSourceLoc& loc, const char* op) -{ - if (spvVersion.vulkan == 0) - error(loc, "only allowed when using GLSL for Vulkan", op, ""); -} - -// Call for any operation that requires SPIR-V. -void TParseVersions::requireSpv(const TSourceLoc& loc, const char* op) -{ - if (spvVersion.spv == 0) - error(loc, "only allowed when generating SPIR-V", op, ""); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/Versions.h b/deps/glslang/glslang/MachineIndependent/Versions.h deleted file mode 100644 index 78a9b286..00000000 --- a/deps/glslang/glslang/MachineIndependent/Versions.h +++ /dev/null @@ -1,249 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -#ifndef _VERSIONS_INCLUDED_ -#define _VERSIONS_INCLUDED_ - -// -// Help manage multiple profiles, versions, extensions etc. -// - -// -// Profiles are set up for masking operations, so queries can be done on multiple -// profiles at the same time. -// -// Don't maintain an ordinal set of enums (0,1,2,3...) to avoid all possible -// defects from mixing the two different forms. -// -typedef enum { - EBadProfile = 0, - ENoProfile = (1 << 0), // only for desktop, before profiles showed up - ECoreProfile = (1 << 1), - ECompatibilityProfile = (1 << 2), - EEsProfile = (1 << 3) -} EProfile; - -namespace glslang { - -// -// Map from profile enum to externally readable text name. -// -inline const char* ProfileName(EProfile profile) -{ - switch (profile) { - case ENoProfile: return "none"; - case ECoreProfile: return "core"; - case ECompatibilityProfile: return "compatibility"; - case EEsProfile: return "es"; - default: return "unknown profile"; - } -} - -// -// What source rules, validation rules, target language, etc. are needed -// desired for SPIR-V? -// -// 0 means a target or rule set is not enabled (ignore rules from that entity). -// Non-0 means to apply semantic rules arising from that version of its rule set. -// The union of all requested rule sets will be applied. -// -struct SpvVersion { - SpvVersion() : spv(0), vulkanGlsl(0), vulkan(0), openGl(0) {} - unsigned int spv; // the version of SPIR-V to target, as defined by "word 1" of the SPIR-V binary header - int vulkanGlsl; // the version of GLSL semantics for Vulkan, from GL_KHR_vulkan_glsl, for "#define VULKAN XXX" - int vulkan; // the version of Vulkan, for which SPIR-V execution environment rules to use (100 means 1.0) - int openGl; // the version of GLSL semantics for OpenGL, from GL_ARB_gl_spirv, for "#define GL_SPIRV XXX" -}; - -// -// The behaviors from the GLSL "#extension extension_name : behavior" -// -typedef enum { - EBhMissing = 0, - EBhRequire, - EBhEnable, - EBhWarn, - EBhDisable, - EBhDisablePartial // use as initial state of an extension that is only partially implemented -} TExtensionBehavior; - -// -// Symbolic names for extensions. Strings may be directly used when calling the -// functions, but better to have the compiler do spelling checks. -// -const char* const E_GL_OES_texture_3D = "GL_OES_texture_3D"; -const char* const E_GL_OES_standard_derivatives = "GL_OES_standard_derivatives"; -const char* const E_GL_EXT_frag_depth = "GL_EXT_frag_depth"; -const char* const E_GL_OES_EGL_image_external = "GL_OES_EGL_image_external"; -const char* const E_GL_EXT_shader_texture_lod = "GL_EXT_shader_texture_lod"; -const char* const E_GL_EXT_shadow_samplers = "GL_EXT_shadow_samplers"; - -const char* const E_GL_ARB_texture_rectangle = "GL_ARB_texture_rectangle"; -const char* const E_GL_3DL_array_objects = "GL_3DL_array_objects"; -const char* const E_GL_ARB_shading_language_420pack = "GL_ARB_shading_language_420pack"; -const char* const E_GL_ARB_texture_gather = "GL_ARB_texture_gather"; -const char* const E_GL_ARB_gpu_shader5 = "GL_ARB_gpu_shader5"; -const char* const E_GL_ARB_separate_shader_objects = "GL_ARB_separate_shader_objects"; -const char* const E_GL_ARB_compute_shader = "GL_ARB_compute_shader"; -const char* const E_GL_ARB_tessellation_shader = "GL_ARB_tessellation_shader"; -const char* const E_GL_ARB_enhanced_layouts = "GL_ARB_enhanced_layouts"; -const char* const E_GL_ARB_texture_cube_map_array = "GL_ARB_texture_cube_map_array"; -const char* const E_GL_ARB_shader_texture_lod = "GL_ARB_shader_texture_lod"; -const char* const E_GL_ARB_explicit_attrib_location = "GL_ARB_explicit_attrib_location"; -const char* const E_GL_ARB_shader_image_load_store = "GL_ARB_shader_image_load_store"; -const char* const E_GL_ARB_shader_atomic_counters = "GL_ARB_shader_atomic_counters"; -const char* const E_GL_ARB_shader_draw_parameters = "GL_ARB_shader_draw_parameters"; -const char* const E_GL_ARB_shader_group_vote = "GL_ARB_shader_group_vote"; -const char* const E_GL_ARB_derivative_control = "GL_ARB_derivative_control"; -const char* const E_GL_ARB_shader_texture_image_samples = "GL_ARB_shader_texture_image_samples"; -const char* const E_GL_ARB_viewport_array = "GL_ARB_viewport_array"; -const char* const E_GL_ARB_gpu_shader_int64 = "GL_ARB_gpu_shader_int64"; -const char* const E_GL_ARB_shader_ballot = "GL_ARB_shader_ballot"; -const char* const E_GL_ARB_sparse_texture2 = "GL_ARB_sparse_texture2"; -const char* const E_GL_ARB_sparse_texture_clamp = "GL_ARB_sparse_texture_clamp"; -const char* const E_GL_ARB_shader_stencil_export = "GL_ARB_shader_stencil_export"; -// const char* const E_GL_ARB_cull_distance = "GL_ARB_cull_distance"; // present for 4.5, but need extension control over block members -const char* const E_GL_ARB_post_depth_coverage = "GL_ARB_post_depth_coverage"; -const char* const E_GL_ARB_shader_viewport_layer_array = "GL_ARB_shader_viewport_layer_array"; - -const char* const E_GL_EXT_shader_non_constant_global_initializers = "GL_EXT_shader_non_constant_global_initializers"; -const char* const E_GL_EXT_shader_image_load_formatted = "GL_EXT_shader_image_load_formatted"; - -// EXT extensions -const char* const E_GL_EXT_device_group = "GL_EXT_device_group"; -const char* const E_GL_EXT_multiview = "GL_EXT_multiview"; -const char* const E_GL_EXT_post_depth_coverage = "GL_EXT_post_depth_coverage"; - -// Arrays of extensions for the above viewportEXTs duplications - -const char* const post_depth_coverageEXTs[] = { E_GL_ARB_post_depth_coverage, E_GL_EXT_post_depth_coverage }; -const int Num_post_depth_coverageEXTs = sizeof(post_depth_coverageEXTs) / sizeof(post_depth_coverageEXTs[0]); - -// OVR extensions -const char* const E_GL_OVR_multiview = "GL_OVR_multiview"; -const char* const E_GL_OVR_multiview2 = "GL_OVR_multiview2"; - -const char* const OVR_multiview_EXTs[] = { E_GL_OVR_multiview, E_GL_OVR_multiview2 }; -const int Num_OVR_multiview_EXTs = sizeof(OVR_multiview_EXTs) / sizeof(OVR_multiview_EXTs[0]); - -// #line and #include -const char* const E_GL_GOOGLE_cpp_style_line_directive = "GL_GOOGLE_cpp_style_line_directive"; -const char* const E_GL_GOOGLE_include_directive = "GL_GOOGLE_include_directive"; - -#ifdef AMD_EXTENSIONS -const char* const E_GL_AMD_shader_ballot = "GL_AMD_shader_ballot"; -const char* const E_GL_AMD_shader_trinary_minmax = "GL_AMD_shader_trinary_minmax"; -const char* const E_GL_AMD_shader_explicit_vertex_parameter = "GL_AMD_shader_explicit_vertex_parameter"; -const char* const E_GL_AMD_gcn_shader = "GL_AMD_gcn_shader"; -const char* const E_GL_AMD_gpu_shader_half_float = "GL_AMD_gpu_shader_half_float"; -const char* const E_GL_AMD_texture_gather_bias_lod = "GL_AMD_texture_gather_bias_lod"; -const char* const E_GL_AMD_gpu_shader_int16 = "GL_AMD_gpu_shader_int16"; -const char* const E_GL_AMD_shader_image_load_store_lod = "GL_AMD_shader_image_load_store_lod"; -#endif - -#ifdef NV_EXTENSIONS - -const char* const E_GL_NV_sample_mask_override_coverage = "GL_NV_sample_mask_override_coverage"; -const char* const E_SPV_NV_geometry_shader_passthrough = "GL_NV_geometry_shader_passthrough"; -const char* const E_GL_NV_viewport_array2 = "GL_NV_viewport_array2"; -const char* const E_GL_NV_stereo_view_rendering = "GL_NV_stereo_view_rendering"; -const char* const E_GL_NVX_multiview_per_view_attributes = "GL_NVX_multiview_per_view_attributes"; - -// Arrays of extensions for the above viewportEXTs duplications - -const char* const viewportEXTs[] = { E_GL_ARB_shader_viewport_layer_array, E_GL_NV_viewport_array2 }; -const int Num_viewportEXTs = sizeof(viewportEXTs) / sizeof(viewportEXTs[0]); -#endif - -// AEP -const char* const E_GL_ANDROID_extension_pack_es31a = "GL_ANDROID_extension_pack_es31a"; -const char* const E_GL_KHR_blend_equation_advanced = "GL_KHR_blend_equation_advanced"; -const char* const E_GL_OES_sample_variables = "GL_OES_sample_variables"; -const char* const E_GL_OES_shader_image_atomic = "GL_OES_shader_image_atomic"; -const char* const E_GL_OES_shader_multisample_interpolation = "GL_OES_shader_multisample_interpolation"; -const char* const E_GL_OES_texture_storage_multisample_2d_array = "GL_OES_texture_storage_multisample_2d_array"; -const char* const E_GL_EXT_geometry_shader = "GL_EXT_geometry_shader"; -const char* const E_GL_EXT_geometry_point_size = "GL_EXT_geometry_point_size"; -const char* const E_GL_EXT_gpu_shader5 = "GL_EXT_gpu_shader5"; -const char* const E_GL_EXT_primitive_bounding_box = "GL_EXT_primitive_bounding_box"; -const char* const E_GL_EXT_shader_io_blocks = "GL_EXT_shader_io_blocks"; -const char* const E_GL_EXT_tessellation_shader = "GL_EXT_tessellation_shader"; -const char* const E_GL_EXT_tessellation_point_size = "GL_EXT_tessellation_point_size"; -const char* const E_GL_EXT_texture_buffer = "GL_EXT_texture_buffer"; -const char* const E_GL_EXT_texture_cube_map_array = "GL_EXT_texture_cube_map_array"; - -// OES matching AEP -const char* const E_GL_OES_geometry_shader = "GL_OES_geometry_shader"; -const char* const E_GL_OES_geometry_point_size = "GL_OES_geometry_point_size"; -const char* const E_GL_OES_gpu_shader5 = "GL_OES_gpu_shader5"; -const char* const E_GL_OES_primitive_bounding_box = "GL_OES_primitive_bounding_box"; -const char* const E_GL_OES_shader_io_blocks = "GL_OES_shader_io_blocks"; -const char* const E_GL_OES_tessellation_shader = "GL_OES_tessellation_shader"; -const char* const E_GL_OES_tessellation_point_size = "GL_OES_tessellation_point_size"; -const char* const E_GL_OES_texture_buffer = "GL_OES_texture_buffer"; -const char* const E_GL_OES_texture_cube_map_array = "GL_OES_texture_cube_map_array"; - -// Arrays of extensions for the above AEP duplications - -const char* const AEP_geometry_shader[] = { E_GL_EXT_geometry_shader, E_GL_OES_geometry_shader }; -const int Num_AEP_geometry_shader = sizeof(AEP_geometry_shader)/sizeof(AEP_geometry_shader[0]); - -const char* const AEP_geometry_point_size[] = { E_GL_EXT_geometry_point_size, E_GL_OES_geometry_point_size }; -const int Num_AEP_geometry_point_size = sizeof(AEP_geometry_point_size)/sizeof(AEP_geometry_point_size[0]); - -const char* const AEP_gpu_shader5[] = { E_GL_EXT_gpu_shader5, E_GL_OES_gpu_shader5 }; -const int Num_AEP_gpu_shader5 = sizeof(AEP_gpu_shader5)/sizeof(AEP_gpu_shader5[0]); - -const char* const AEP_primitive_bounding_box[] = { E_GL_EXT_primitive_bounding_box, E_GL_OES_primitive_bounding_box }; -const int Num_AEP_primitive_bounding_box = sizeof(AEP_primitive_bounding_box)/sizeof(AEP_primitive_bounding_box[0]); - -const char* const AEP_shader_io_blocks[] = { E_GL_EXT_shader_io_blocks, E_GL_OES_shader_io_blocks }; -const int Num_AEP_shader_io_blocks = sizeof(AEP_shader_io_blocks)/sizeof(AEP_shader_io_blocks[0]); - -const char* const AEP_tessellation_shader[] = { E_GL_EXT_tessellation_shader, E_GL_OES_tessellation_shader }; -const int Num_AEP_tessellation_shader = sizeof(AEP_tessellation_shader)/sizeof(AEP_tessellation_shader[0]); - -const char* const AEP_tessellation_point_size[] = { E_GL_EXT_tessellation_point_size, E_GL_OES_tessellation_point_size }; -const int Num_AEP_tessellation_point_size = sizeof(AEP_tessellation_point_size)/sizeof(AEP_tessellation_point_size[0]); - -const char* const AEP_texture_buffer[] = { E_GL_EXT_texture_buffer, E_GL_OES_texture_buffer }; -const int Num_AEP_texture_buffer = sizeof(AEP_texture_buffer)/sizeof(AEP_texture_buffer[0]); - -const char* const AEP_texture_cube_map_array[] = { E_GL_EXT_texture_cube_map_array, E_GL_OES_texture_cube_map_array }; -const int Num_AEP_texture_cube_map_array = sizeof(AEP_texture_cube_map_array)/sizeof(AEP_texture_cube_map_array[0]); - -} // end namespace glslang - -#endif // _VERSIONS_INCLUDED_ diff --git a/deps/glslang/glslang/MachineIndependent/gl_types.h b/deps/glslang/glslang/MachineIndependent/gl_types.h deleted file mode 100644 index ae00ae57..00000000 --- a/deps/glslang/glslang/MachineIndependent/gl_types.h +++ /dev/null @@ -1,180 +0,0 @@ -/* -** Copyright (c) 2013 The Khronos Group Inc. -** -** Permission is hereby granted, free of charge, to any person obtaining a -** copy of this software and/or associated documentation files (the -** "Materials"), to deal in the Materials without restriction, including -** without limitation the rights to use, copy, modify, merge, publish, -** distribute, sublicense, and/or sell copies of the Materials, and to -** permit persons to whom the Materials are furnished to do so, subject to -** the following conditions: -** -** The above copyright notice and this permission notice shall be included -** in all copies or substantial portions of the Materials. -** -** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. -*/ - -#pragma once - -#define GL_FLOAT 0x1406 -#define GL_FLOAT_VEC2 0x8B50 -#define GL_FLOAT_VEC3 0x8B51 -#define GL_FLOAT_VEC4 0x8B52 - -#define GL_DOUBLE 0x140A -#define GL_DOUBLE_VEC2 0x8FFC -#define GL_DOUBLE_VEC3 0x8FFD -#define GL_DOUBLE_VEC4 0x8FFE - -#define GL_INT 0x1404 -#define GL_INT_VEC2 0x8B53 -#define GL_INT_VEC3 0x8B54 -#define GL_INT_VEC4 0x8B55 - -#define GL_UNSIGNED_INT 0x1405 -#define GL_UNSIGNED_INT_VEC2 0x8DC6 -#define GL_UNSIGNED_INT_VEC3 0x8DC7 -#define GL_UNSIGNED_INT_VEC4 0x8DC8 - -#define GL_INT64_ARB 0x140E -#define GL_INT64_VEC2_ARB 0x8FE9 -#define GL_INT64_VEC3_ARB 0x8FEA -#define GL_INT64_VEC4_ARB 0x8FEB - -#define GL_UNSIGNED_INT64_ARB 0x140F -#define GL_UNSIGNED_INT64_VEC2_ARB 0x8FE5 -#define GL_UNSIGNED_INT64_VEC3_ARB 0x8FE6 -#define GL_UNSIGNED_INT64_VEC4_ARB 0x8FE7 - -#define GL_BOOL 0x8B56 -#define GL_BOOL_VEC2 0x8B57 -#define GL_BOOL_VEC3 0x8B58 -#define GL_BOOL_VEC4 0x8B59 - -#define GL_FLOAT_MAT2 0x8B5A -#define GL_FLOAT_MAT3 0x8B5B -#define GL_FLOAT_MAT4 0x8B5C -#define GL_FLOAT_MAT2x3 0x8B65 -#define GL_FLOAT_MAT2x4 0x8B66 -#define GL_FLOAT_MAT3x2 0x8B67 -#define GL_FLOAT_MAT3x4 0x8B68 -#define GL_FLOAT_MAT4x2 0x8B69 -#define GL_FLOAT_MAT4x3 0x8B6A - -#define GL_DOUBLE_MAT2 0x8F46 -#define GL_DOUBLE_MAT3 0x8F47 -#define GL_DOUBLE_MAT4 0x8F48 -#define GL_DOUBLE_MAT2x3 0x8F49 -#define GL_DOUBLE_MAT2x4 0x8F4A -#define GL_DOUBLE_MAT3x2 0x8F4B -#define GL_DOUBLE_MAT3x4 0x8F4C -#define GL_DOUBLE_MAT4x2 0x8F4D -#define GL_DOUBLE_MAT4x3 0x8F4E - -#ifdef AMD_EXTENSIONS -// Those constants are borrowed from extension NV_gpu_shader5 -#define GL_FLOAT16_NV 0x8FF8 -#define GL_FLOAT16_VEC2_NV 0x8FF9 -#define GL_FLOAT16_VEC3_NV 0x8FFA -#define GL_FLOAT16_VEC4_NV 0x8FFB - -#define GL_FLOAT16_MAT2_AMD 0x91C5 -#define GL_FLOAT16_MAT3_AMD 0x91C6 -#define GL_FLOAT16_MAT4_AMD 0x91C7 -#define GL_FLOAT16_MAT2x3_AMD 0x91C8 -#define GL_FLOAT16_MAT2x4_AMD 0x91C9 -#define GL_FLOAT16_MAT3x2_AMD 0x91CA -#define GL_FLOAT16_MAT3x4_AMD 0x91CB -#define GL_FLOAT16_MAT4x2_AMD 0x91CC -#define GL_FLOAT16_MAT4x3_AMD 0x91CD -#endif - -#define GL_SAMPLER_1D 0x8B5D -#define GL_SAMPLER_2D 0x8B5E -#define GL_SAMPLER_3D 0x8B5F -#define GL_SAMPLER_CUBE 0x8B60 -#define GL_SAMPLER_BUFFER 0x8DC2 -#define GL_SAMPLER_1D_ARRAY 0x8DC0 -#define GL_SAMPLER_2D_ARRAY 0x8DC1 -#define GL_SAMPLER_1D_ARRAY_SHADOW 0x8DC3 -#define GL_SAMPLER_2D_ARRAY_SHADOW 0x8DC4 -#define GL_SAMPLER_CUBE_SHADOW 0x8DC5 -#define GL_SAMPLER_1D_SHADOW 0x8B61 -#define GL_SAMPLER_2D_SHADOW 0x8B62 -#define GL_SAMPLER_2D_RECT 0x8B63 -#define GL_SAMPLER_2D_RECT_SHADOW 0x8B64 -#define GL_SAMPLER_2D_MULTISAMPLE 0x9108 -#define GL_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910B -#define GL_SAMPLER_CUBE_MAP_ARRAY 0x900C -#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW 0x900D -#define GL_SAMPLER_CUBE_MAP_ARRAY_ARB 0x900C -#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW_ARB 0x900D - -#define GL_INT_SAMPLER_1D 0x8DC9 -#define GL_INT_SAMPLER_2D 0x8DCA -#define GL_INT_SAMPLER_3D 0x8DCB -#define GL_INT_SAMPLER_CUBE 0x8DCC -#define GL_INT_SAMPLER_1D_ARRAY 0x8DCE -#define GL_INT_SAMPLER_2D_ARRAY 0x8DCF -#define GL_INT_SAMPLER_2D_RECT 0x8DCD -#define GL_INT_SAMPLER_BUFFER 0x8DD0 -#define GL_INT_SAMPLER_2D_MULTISAMPLE 0x9109 -#define GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910C -#define GL_INT_SAMPLER_CUBE_MAP_ARRAY 0x900E -#define GL_INT_SAMPLER_CUBE_MAP_ARRAY_ARB 0x900E - -#define GL_UNSIGNED_INT_SAMPLER_1D 0x8DD1 -#define GL_UNSIGNED_INT_SAMPLER_2D 0x8DD2 -#define GL_UNSIGNED_INT_SAMPLER_3D 0x8DD3 -#define GL_UNSIGNED_INT_SAMPLER_CUBE 0x8DD4 -#define GL_UNSIGNED_INT_SAMPLER_1D_ARRAY 0x8DD6 -#define GL_UNSIGNED_INT_SAMPLER_2D_ARRAY 0x8DD7 -#define GL_UNSIGNED_INT_SAMPLER_2D_RECT 0x8DD5 -#define GL_UNSIGNED_INT_SAMPLER_BUFFER 0x8DD8 -#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY 0x910D -#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY 0x900F -#define GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY_ARB 0x900F -#define GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE 0x910A - -#define GL_IMAGE_1D 0x904C -#define GL_IMAGE_2D 0x904D -#define GL_IMAGE_3D 0x904E -#define GL_IMAGE_2D_RECT 0x904F -#define GL_IMAGE_CUBE 0x9050 -#define GL_IMAGE_BUFFER 0x9051 -#define GL_IMAGE_1D_ARRAY 0x9052 -#define GL_IMAGE_2D_ARRAY 0x9053 -#define GL_IMAGE_CUBE_MAP_ARRAY 0x9054 -#define GL_IMAGE_2D_MULTISAMPLE 0x9055 -#define GL_IMAGE_2D_MULTISAMPLE_ARRAY 0x9056 -#define GL_INT_IMAGE_1D 0x9057 -#define GL_INT_IMAGE_2D 0x9058 -#define GL_INT_IMAGE_3D 0x9059 -#define GL_INT_IMAGE_2D_RECT 0x905A -#define GL_INT_IMAGE_CUBE 0x905B -#define GL_INT_IMAGE_BUFFER 0x905C -#define GL_INT_IMAGE_1D_ARRAY 0x905D -#define GL_INT_IMAGE_2D_ARRAY 0x905E -#define GL_INT_IMAGE_CUBE_MAP_ARRAY 0x905F -#define GL_INT_IMAGE_2D_MULTISAMPLE 0x9060 -#define GL_INT_IMAGE_2D_MULTISAMPLE_ARRAY 0x9061 -#define GL_UNSIGNED_INT_IMAGE_1D 0x9062 -#define GL_UNSIGNED_INT_IMAGE_2D 0x9063 -#define GL_UNSIGNED_INT_IMAGE_3D 0x9064 -#define GL_UNSIGNED_INT_IMAGE_2D_RECT 0x9065 -#define GL_UNSIGNED_INT_IMAGE_CUBE 0x9066 -#define GL_UNSIGNED_INT_IMAGE_BUFFER 0x9067 -#define GL_UNSIGNED_INT_IMAGE_1D_ARRAY 0x9068 -#define GL_UNSIGNED_INT_IMAGE_2D_ARRAY 0x9069 -#define GL_UNSIGNED_INT_IMAGE_CUBE_MAP_ARRAY 0x906A -#define GL_UNSIGNED_INT_IMAGE_2D_MULTISAMPLE 0x906B -#define GL_UNSIGNED_INT_IMAGE_2D_MULTISAMPLE_ARRAY 0x906C - -#define GL_UNSIGNED_INT_ATOMIC_COUNTER 0x92DB diff --git a/deps/glslang/glslang/MachineIndependent/glslang.y b/deps/glslang/glslang/MachineIndependent/glslang.y deleted file mode 100644 index cd6df7b0..00000000 --- a/deps/glslang/glslang/MachineIndependent/glslang.y +++ /dev/null @@ -1,2918 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -/** - * This is bison grammar and productions for parsing all versions of the - * GLSL shading languages. - */ -%{ - -/* Based on: -ANSI C Yacc grammar - -In 1985, Jeff Lee published his Yacc grammar (which is accompanied by a -matching Lex specification) for the April 30, 1985 draft version of the -ANSI C standard. Tom Stockfisch reposted it to net.sources in 1987; that -original, as mentioned in the answer to question 17.25 of the comp.lang.c -FAQ, can be ftp'ed from ftp.uu.net, file usenet/net.sources/ansi.c.grammar.Z. - -I intend to keep this version as close to the current C Standard grammar as -possible; please let me know if you discover discrepancies. - -Jutta Degener, 1995 -*/ - -#include "SymbolTable.h" -#include "ParseHelper.h" -#include "../Public/ShaderLang.h" - -using namespace glslang; - -%} - -%define parse.error verbose - -%union { - struct { - glslang::TSourceLoc loc; - union { - glslang::TString *string; - int i; - unsigned int u; - long long i64; - unsigned long long u64; - bool b; - double d; - }; - glslang::TSymbol* symbol; - } lex; - struct { - glslang::TSourceLoc loc; - glslang::TOperator op; - union { - TIntermNode* intermNode; - glslang::TIntermNodePair nodePair; - glslang::TIntermTyped* intermTypedNode; - }; - union { - glslang::TPublicType type; - glslang::TFunction* function; - glslang::TParameter param; - glslang::TTypeLoc typeLine; - glslang::TTypeList* typeList; - glslang::TArraySizes* arraySizes; - glslang::TIdentifierList* identifierList; - }; - } interm; -} - -%{ - -/* windows only pragma */ -#ifdef _MSC_VER - #pragma warning(disable : 4065) - #pragma warning(disable : 4127) - #pragma warning(disable : 4244) -#endif - -#define parseContext (*pParseContext) -#define yyerror(context, msg) context->parserError(msg) - -extern int yylex(YYSTYPE*, TParseContext&); - -%} - -%parse-param {glslang::TParseContext* pParseContext} -%lex-param {parseContext} -%pure-parser // enable thread safety -%expect 1 // One shift reduce conflict because of if | else - -%token ATTRIBUTE VARYING -%token CONST BOOL FLOAT DOUBLE INT UINT INT64_T UINT64_T INT16_T UINT16_T FLOAT16_T -%token BREAK CONTINUE DO ELSE FOR IF DISCARD RETURN SWITCH CASE DEFAULT SUBROUTINE -%token BVEC2 BVEC3 BVEC4 IVEC2 IVEC3 IVEC4 I64VEC2 I64VEC3 I64VEC4 UVEC2 UVEC3 UVEC4 U64VEC2 U64VEC3 U64VEC4 VEC2 VEC3 VEC4 -%token MAT2 MAT3 MAT4 CENTROID IN OUT INOUT -%token UNIFORM PATCH SAMPLE BUFFER SHARED -%token COHERENT VOLATILE RESTRICT READONLY WRITEONLY -%token DVEC2 DVEC3 DVEC4 DMAT2 DMAT3 DMAT4 -%token F16VEC2 F16VEC3 F16VEC4 F16MAT2 F16MAT3 F16MAT4 -%token I16VEC2 I16VEC3 I16VEC4 U16VEC2 U16VEC3 U16VEC4 -%token NOPERSPECTIVE FLAT SMOOTH LAYOUT __EXPLICITINTERPAMD - -%token MAT2X2 MAT2X3 MAT2X4 -%token MAT3X2 MAT3X3 MAT3X4 -%token MAT4X2 MAT4X3 MAT4X4 -%token DMAT2X2 DMAT2X3 DMAT2X4 -%token DMAT3X2 DMAT3X3 DMAT3X4 -%token DMAT4X2 DMAT4X3 DMAT4X4 -%token F16MAT2X2 F16MAT2X3 F16MAT2X4 -%token F16MAT3X2 F16MAT3X3 F16MAT3X4 -%token F16MAT4X2 F16MAT4X3 F16MAT4X4 -%token ATOMIC_UINT - -// combined image/sampler -%token SAMPLER1D SAMPLER2D SAMPLER3D SAMPLERCUBE SAMPLER1DSHADOW SAMPLER2DSHADOW -%token SAMPLERCUBESHADOW SAMPLER1DARRAY SAMPLER2DARRAY SAMPLER1DARRAYSHADOW -%token SAMPLER2DARRAYSHADOW ISAMPLER1D ISAMPLER2D ISAMPLER3D ISAMPLERCUBE -%token ISAMPLER1DARRAY ISAMPLER2DARRAY USAMPLER1D USAMPLER2D USAMPLER3D -%token USAMPLERCUBE USAMPLER1DARRAY USAMPLER2DARRAY -%token SAMPLER2DRECT SAMPLER2DRECTSHADOW ISAMPLER2DRECT USAMPLER2DRECT -%token SAMPLERBUFFER ISAMPLERBUFFER USAMPLERBUFFER -%token SAMPLERCUBEARRAY SAMPLERCUBEARRAYSHADOW -%token ISAMPLERCUBEARRAY USAMPLERCUBEARRAY -%token SAMPLER2DMS ISAMPLER2DMS USAMPLER2DMS -%token SAMPLER2DMSARRAY ISAMPLER2DMSARRAY USAMPLER2DMSARRAY -%token SAMPLEREXTERNALOES - -// pure sampler -%token SAMPLER SAMPLERSHADOW - -// texture without sampler -%token TEXTURE1D TEXTURE2D TEXTURE3D TEXTURECUBE -%token TEXTURE1DARRAY TEXTURE2DARRAY -%token ITEXTURE1D ITEXTURE2D ITEXTURE3D ITEXTURECUBE -%token ITEXTURE1DARRAY ITEXTURE2DARRAY UTEXTURE1D UTEXTURE2D UTEXTURE3D -%token UTEXTURECUBE UTEXTURE1DARRAY UTEXTURE2DARRAY -%token TEXTURE2DRECT ITEXTURE2DRECT UTEXTURE2DRECT -%token TEXTUREBUFFER ITEXTUREBUFFER UTEXTUREBUFFER -%token TEXTURECUBEARRAY ITEXTURECUBEARRAY UTEXTURECUBEARRAY -%token TEXTURE2DMS ITEXTURE2DMS UTEXTURE2DMS -%token TEXTURE2DMSARRAY ITEXTURE2DMSARRAY UTEXTURE2DMSARRAY - -// input attachments -%token SUBPASSINPUT SUBPASSINPUTMS ISUBPASSINPUT ISUBPASSINPUTMS USUBPASSINPUT USUBPASSINPUTMS - -%token IMAGE1D IIMAGE1D UIMAGE1D IMAGE2D IIMAGE2D -%token UIMAGE2D IMAGE3D IIMAGE3D UIMAGE3D -%token IMAGE2DRECT IIMAGE2DRECT UIMAGE2DRECT -%token IMAGECUBE IIMAGECUBE UIMAGECUBE -%token IMAGEBUFFER IIMAGEBUFFER UIMAGEBUFFER -%token IMAGE1DARRAY IIMAGE1DARRAY UIMAGE1DARRAY -%token IMAGE2DARRAY IIMAGE2DARRAY UIMAGE2DARRAY -%token IMAGECUBEARRAY IIMAGECUBEARRAY UIMAGECUBEARRAY -%token IMAGE2DMS IIMAGE2DMS UIMAGE2DMS -%token IMAGE2DMSARRAY IIMAGE2DMSARRAY UIMAGE2DMSARRAY - -%token STRUCT VOID WHILE - -%token IDENTIFIER TYPE_NAME -%token FLOATCONSTANT DOUBLECONSTANT INTCONSTANT UINTCONSTANT INT64CONSTANT UINT64CONSTANT INT16CONSTANT UINT16CONSTANT BOOLCONSTANT FLOAT16CONSTANT -%token LEFT_OP RIGHT_OP -%token INC_OP DEC_OP LE_OP GE_OP EQ_OP NE_OP -%token AND_OP OR_OP XOR_OP MUL_ASSIGN DIV_ASSIGN ADD_ASSIGN -%token MOD_ASSIGN LEFT_ASSIGN RIGHT_ASSIGN AND_ASSIGN XOR_ASSIGN OR_ASSIGN -%token SUB_ASSIGN - -%token LEFT_PAREN RIGHT_PAREN LEFT_BRACKET RIGHT_BRACKET LEFT_BRACE RIGHT_BRACE DOT -%token COMMA COLON EQUAL SEMICOLON BANG DASH TILDE PLUS STAR SLASH PERCENT -%token LEFT_ANGLE RIGHT_ANGLE VERTICAL_BAR CARET AMPERSAND QUESTION - -%token INVARIANT PRECISE -%token HIGH_PRECISION MEDIUM_PRECISION LOW_PRECISION PRECISION - -%token PACKED RESOURCE SUPERP - -%type assignment_operator unary_operator -%type variable_identifier primary_expression postfix_expression -%type expression integer_expression assignment_expression -%type unary_expression multiplicative_expression additive_expression -%type relational_expression equality_expression -%type conditional_expression constant_expression -%type logical_or_expression logical_xor_expression logical_and_expression -%type shift_expression and_expression exclusive_or_expression inclusive_or_expression -%type function_call initializer initializer_list condition conditionopt - -%type translation_unit function_definition -%type statement simple_statement -%type statement_list switch_statement_list compound_statement -%type declaration_statement selection_statement expression_statement -%type switch_statement case_label -%type declaration external_declaration -%type for_init_statement compound_statement_no_new_scope -%type selection_rest_statement for_rest_statement -%type iteration_statement jump_statement statement_no_new_scope statement_scoped -%type single_declaration init_declarator_list - -%type parameter_declaration parameter_declarator parameter_type_specifier - -%type array_specifier -%type precise_qualifier invariant_qualifier interpolation_qualifier storage_qualifier precision_qualifier -%type layout_qualifier layout_qualifier_id_list layout_qualifier_id - -%type type_qualifier fully_specified_type type_specifier -%type single_type_qualifier -%type type_specifier_nonarray -%type struct_specifier -%type struct_declarator -%type struct_declarator_list struct_declaration struct_declaration_list type_name_list -%type block_structure -%type function_header function_declarator -%type function_header_with_parameters -%type function_call_header_with_parameters function_call_header_no_parameters function_call_generic function_prototype -%type function_call_or_method function_identifier function_call_header - -%type identifier_list - -%start translation_unit -%% - -variable_identifier - : IDENTIFIER { - $$ = parseContext.handleVariable($1.loc, $1.symbol, $1.string); - } - ; - -primary_expression - : variable_identifier { - $$ = $1; - } - | INTCONSTANT { - $$ = parseContext.intermediate.addConstantUnion($1.i, $1.loc, true); - } - | UINTCONSTANT { - parseContext.fullIntegerCheck($1.loc, "unsigned literal"); - $$ = parseContext.intermediate.addConstantUnion($1.u, $1.loc, true); - } - | INT64CONSTANT { - parseContext.int64Check($1.loc, "64-bit integer literal"); - $$ = parseContext.intermediate.addConstantUnion($1.i64, $1.loc, true); - } - | UINT64CONSTANT { - parseContext.int64Check($1.loc, "64-bit unsigned integer literal"); - $$ = parseContext.intermediate.addConstantUnion($1.u64, $1.loc, true); - } - | INT16CONSTANT { -#ifdef AMD_EXTENSIONS - parseContext.int16Check($1.loc, "16-bit integer literal"); - $$ = parseContext.intermediate.addConstantUnion((short)$1.i, $1.loc, true); -#endif - } - | UINT16CONSTANT { -#ifdef AMD_EXTENSIONS - parseContext.int16Check($1.loc, "16-bit unsigned integer literal"); - $$ = parseContext.intermediate.addConstantUnion((unsigned short)$1.u, $1.loc, true); -#endif - } - | FLOATCONSTANT { - $$ = parseContext.intermediate.addConstantUnion($1.d, EbtFloat, $1.loc, true); - } - | DOUBLECONSTANT { - parseContext.doubleCheck($1.loc, "double literal"); - $$ = parseContext.intermediate.addConstantUnion($1.d, EbtDouble, $1.loc, true); - } - | FLOAT16CONSTANT { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float literal"); - $$ = parseContext.intermediate.addConstantUnion($1.d, EbtFloat16, $1.loc, true); -#endif - } - | BOOLCONSTANT { - $$ = parseContext.intermediate.addConstantUnion($1.b, $1.loc, true); - } - | LEFT_PAREN expression RIGHT_PAREN { - $$ = $2; - if ($$->getAsConstantUnion()) - $$->getAsConstantUnion()->setExpression(); - } - ; - -postfix_expression - : primary_expression { - $$ = $1; - } - | postfix_expression LEFT_BRACKET integer_expression RIGHT_BRACKET { - $$ = parseContext.handleBracketDereference($2.loc, $1, $3); - } - | function_call { - $$ = $1; - } - | postfix_expression DOT IDENTIFIER { - $$ = parseContext.handleDotDereference($3.loc, $1, *$3.string); - } - | postfix_expression INC_OP { - parseContext.variableCheck($1); - parseContext.lValueErrorCheck($2.loc, "++", $1); - $$ = parseContext.handleUnaryMath($2.loc, "++", EOpPostIncrement, $1); - } - | postfix_expression DEC_OP { - parseContext.variableCheck($1); - parseContext.lValueErrorCheck($2.loc, "--", $1); - $$ = parseContext.handleUnaryMath($2.loc, "--", EOpPostDecrement, $1); - } - ; - -integer_expression - : expression { - parseContext.integerCheck($1, "[]"); - $$ = $1; - } - ; - -function_call - : function_call_or_method { - $$ = parseContext.handleFunctionCall($1.loc, $1.function, $1.intermNode); - delete $1.function; - } - ; - -function_call_or_method - : function_call_generic { - $$ = $1; - } - ; - -function_call_generic - : function_call_header_with_parameters RIGHT_PAREN { - $$ = $1; - $$.loc = $2.loc; - } - | function_call_header_no_parameters RIGHT_PAREN { - $$ = $1; - $$.loc = $2.loc; - } - ; - -function_call_header_no_parameters - : function_call_header VOID { - $$ = $1; - } - | function_call_header { - $$ = $1; - } - ; - -function_call_header_with_parameters - : function_call_header assignment_expression { - TParameter param = { 0, new TType }; - param.type->shallowCopy($2->getType()); - $1.function->addParameter(param); - $$.function = $1.function; - $$.intermNode = $2; - } - | function_call_header_with_parameters COMMA assignment_expression { - TParameter param = { 0, new TType }; - param.type->shallowCopy($3->getType()); - $1.function->addParameter(param); - $$.function = $1.function; - $$.intermNode = parseContext.intermediate.growAggregate($1.intermNode, $3, $2.loc); - } - ; - -function_call_header - : function_identifier LEFT_PAREN { - $$ = $1; - } - ; - -// Grammar Note: Constructors look like functions, but are recognized as types. - -function_identifier - : type_specifier { - // Constructor - $$.intermNode = 0; - $$.function = parseContext.handleConstructorCall($1.loc, $1); - } - | postfix_expression { - // - // Should be a method or subroutine call, but we haven't recognized the arguments yet. - // - $$.function = 0; - $$.intermNode = 0; - - TIntermMethod* method = $1->getAsMethodNode(); - if (method) { - $$.function = new TFunction(&method->getMethodName(), TType(EbtInt), EOpArrayLength); - $$.intermNode = method->getObject(); - } else { - TIntermSymbol* symbol = $1->getAsSymbolNode(); - if (symbol) { - parseContext.reservedErrorCheck(symbol->getLoc(), symbol->getName()); - TFunction *function = new TFunction(&symbol->getName(), TType(EbtVoid)); - $$.function = function; - } else - parseContext.error($1->getLoc(), "function call, method, or subroutine call expected", "", ""); - } - - if ($$.function == 0) { - // error recover - TString empty(""); - $$.function = new TFunction(&empty, TType(EbtVoid), EOpNull); - } - } - ; - -unary_expression - : postfix_expression { - parseContext.variableCheck($1); - $$ = $1; - if (TIntermMethod* method = $1->getAsMethodNode()) - parseContext.error($1->getLoc(), "incomplete method syntax", method->getMethodName().c_str(), ""); - } - | INC_OP unary_expression { - parseContext.lValueErrorCheck($1.loc, "++", $2); - $$ = parseContext.handleUnaryMath($1.loc, "++", EOpPreIncrement, $2); - } - | DEC_OP unary_expression { - parseContext.lValueErrorCheck($1.loc, "--", $2); - $$ = parseContext.handleUnaryMath($1.loc, "--", EOpPreDecrement, $2); - } - | unary_operator unary_expression { - if ($1.op != EOpNull) { - char errorOp[2] = {0, 0}; - switch($1.op) { - case EOpNegative: errorOp[0] = '-'; break; - case EOpLogicalNot: errorOp[0] = '!'; break; - case EOpBitwiseNot: errorOp[0] = '~'; break; - default: break; // some compilers want this - } - $$ = parseContext.handleUnaryMath($1.loc, errorOp, $1.op, $2); - } else { - $$ = $2; - if ($$->getAsConstantUnion()) - $$->getAsConstantUnion()->setExpression(); - } - } - ; -// Grammar Note: No traditional style type casts. - -unary_operator - : PLUS { $$.loc = $1.loc; $$.op = EOpNull; } - | DASH { $$.loc = $1.loc; $$.op = EOpNegative; } - | BANG { $$.loc = $1.loc; $$.op = EOpLogicalNot; } - | TILDE { $$.loc = $1.loc; $$.op = EOpBitwiseNot; - parseContext.fullIntegerCheck($1.loc, "bitwise not"); } - ; -// Grammar Note: No '*' or '&' unary ops. Pointers are not supported. - -multiplicative_expression - : unary_expression { $$ = $1; } - | multiplicative_expression STAR unary_expression { - $$ = parseContext.handleBinaryMath($2.loc, "*", EOpMul, $1, $3); - if ($$ == 0) - $$ = $1; - } - | multiplicative_expression SLASH unary_expression { - $$ = parseContext.handleBinaryMath($2.loc, "/", EOpDiv, $1, $3); - if ($$ == 0) - $$ = $1; - } - | multiplicative_expression PERCENT unary_expression { - parseContext.fullIntegerCheck($2.loc, "%"); - $$ = parseContext.handleBinaryMath($2.loc, "%", EOpMod, $1, $3); - if ($$ == 0) - $$ = $1; - } - ; - -additive_expression - : multiplicative_expression { $$ = $1; } - | additive_expression PLUS multiplicative_expression { - $$ = parseContext.handleBinaryMath($2.loc, "+", EOpAdd, $1, $3); - if ($$ == 0) - $$ = $1; - } - | additive_expression DASH multiplicative_expression { - $$ = parseContext.handleBinaryMath($2.loc, "-", EOpSub, $1, $3); - if ($$ == 0) - $$ = $1; - } - ; - -shift_expression - : additive_expression { $$ = $1; } - | shift_expression LEFT_OP additive_expression { - parseContext.fullIntegerCheck($2.loc, "bit shift left"); - $$ = parseContext.handleBinaryMath($2.loc, "<<", EOpLeftShift, $1, $3); - if ($$ == 0) - $$ = $1; - } - | shift_expression RIGHT_OP additive_expression { - parseContext.fullIntegerCheck($2.loc, "bit shift right"); - $$ = parseContext.handleBinaryMath($2.loc, ">>", EOpRightShift, $1, $3); - if ($$ == 0) - $$ = $1; - } - ; - -relational_expression - : shift_expression { $$ = $1; } - | relational_expression LEFT_ANGLE shift_expression { - $$ = parseContext.handleBinaryMath($2.loc, "<", EOpLessThan, $1, $3); - if ($$ == 0) - $$ = parseContext.intermediate.addConstantUnion(false, $2.loc); - } - | relational_expression RIGHT_ANGLE shift_expression { - $$ = parseContext.handleBinaryMath($2.loc, ">", EOpGreaterThan, $1, $3); - if ($$ == 0) - $$ = parseContext.intermediate.addConstantUnion(false, $2.loc); - } - | relational_expression LE_OP shift_expression { - $$ = parseContext.handleBinaryMath($2.loc, "<=", EOpLessThanEqual, $1, $3); - if ($$ == 0) - $$ = parseContext.intermediate.addConstantUnion(false, $2.loc); - } - | relational_expression GE_OP shift_expression { - $$ = parseContext.handleBinaryMath($2.loc, ">=", EOpGreaterThanEqual, $1, $3); - if ($$ == 0) - $$ = parseContext.intermediate.addConstantUnion(false, $2.loc); - } - ; - -equality_expression - : relational_expression { $$ = $1; } - | equality_expression EQ_OP relational_expression { - parseContext.arrayObjectCheck($2.loc, $1->getType(), "array comparison"); - parseContext.opaqueCheck($2.loc, $1->getType(), "=="); - parseContext.specializationCheck($2.loc, $1->getType(), "=="); - $$ = parseContext.handleBinaryMath($2.loc, "==", EOpEqual, $1, $3); - if ($$ == 0) - $$ = parseContext.intermediate.addConstantUnion(false, $2.loc); - } - | equality_expression NE_OP relational_expression { - parseContext.arrayObjectCheck($2.loc, $1->getType(), "array comparison"); - parseContext.opaqueCheck($2.loc, $1->getType(), "!="); - parseContext.specializationCheck($2.loc, $1->getType(), "!="); - $$ = parseContext.handleBinaryMath($2.loc, "!=", EOpNotEqual, $1, $3); - if ($$ == 0) - $$ = parseContext.intermediate.addConstantUnion(false, $2.loc); - } - ; - -and_expression - : equality_expression { $$ = $1; } - | and_expression AMPERSAND equality_expression { - parseContext.fullIntegerCheck($2.loc, "bitwise and"); - $$ = parseContext.handleBinaryMath($2.loc, "&", EOpAnd, $1, $3); - if ($$ == 0) - $$ = $1; - } - ; - -exclusive_or_expression - : and_expression { $$ = $1; } - | exclusive_or_expression CARET and_expression { - parseContext.fullIntegerCheck($2.loc, "bitwise exclusive or"); - $$ = parseContext.handleBinaryMath($2.loc, "^", EOpExclusiveOr, $1, $3); - if ($$ == 0) - $$ = $1; - } - ; - -inclusive_or_expression - : exclusive_or_expression { $$ = $1; } - | inclusive_or_expression VERTICAL_BAR exclusive_or_expression { - parseContext.fullIntegerCheck($2.loc, "bitwise inclusive or"); - $$ = parseContext.handleBinaryMath($2.loc, "|", EOpInclusiveOr, $1, $3); - if ($$ == 0) - $$ = $1; - } - ; - -logical_and_expression - : inclusive_or_expression { $$ = $1; } - | logical_and_expression AND_OP inclusive_or_expression { - $$ = parseContext.handleBinaryMath($2.loc, "&&", EOpLogicalAnd, $1, $3); - if ($$ == 0) - $$ = parseContext.intermediate.addConstantUnion(false, $2.loc); - } - ; - -logical_xor_expression - : logical_and_expression { $$ = $1; } - | logical_xor_expression XOR_OP logical_and_expression { - $$ = parseContext.handleBinaryMath($2.loc, "^^", EOpLogicalXor, $1, $3); - if ($$ == 0) - $$ = parseContext.intermediate.addConstantUnion(false, $2.loc); - } - ; - -logical_or_expression - : logical_xor_expression { $$ = $1; } - | logical_or_expression OR_OP logical_xor_expression { - $$ = parseContext.handleBinaryMath($2.loc, "||", EOpLogicalOr, $1, $3); - if ($$ == 0) - $$ = parseContext.intermediate.addConstantUnion(false, $2.loc); - } - ; - -conditional_expression - : logical_or_expression { $$ = $1; } - | logical_or_expression QUESTION { - ++parseContext.controlFlowNestingLevel; - } - expression COLON assignment_expression { - --parseContext.controlFlowNestingLevel; - parseContext.boolCheck($2.loc, $1); - parseContext.rValueErrorCheck($2.loc, "?", $1); - parseContext.rValueErrorCheck($5.loc, ":", $4); - parseContext.rValueErrorCheck($5.loc, ":", $6); - $$ = parseContext.intermediate.addSelection($1, $4, $6, $2.loc); - if ($$ == 0) { - parseContext.binaryOpError($2.loc, ":", $4->getCompleteString(), $6->getCompleteString()); - $$ = $6; - } - } - ; - -assignment_expression - : conditional_expression { $$ = $1; } - | unary_expression assignment_operator assignment_expression { - parseContext.arrayObjectCheck($2.loc, $1->getType(), "array assignment"); - parseContext.opaqueCheck($2.loc, $1->getType(), "="); - parseContext.specializationCheck($2.loc, $1->getType(), "="); - parseContext.lValueErrorCheck($2.loc, "assign", $1); - parseContext.rValueErrorCheck($2.loc, "assign", $3); - $$ = parseContext.intermediate.addAssign($2.op, $1, $3, $2.loc); - if ($$ == 0) { - parseContext.assignError($2.loc, "assign", $1->getCompleteString(), $3->getCompleteString()); - $$ = $1; - } - } - ; - -assignment_operator - : EQUAL { - $$.loc = $1.loc; - $$.op = EOpAssign; - } - | MUL_ASSIGN { - $$.loc = $1.loc; - $$.op = EOpMulAssign; - } - | DIV_ASSIGN { - $$.loc = $1.loc; - $$.op = EOpDivAssign; - } - | MOD_ASSIGN { - parseContext.fullIntegerCheck($1.loc, "%="); - $$.loc = $1.loc; - $$.op = EOpModAssign; - } - | ADD_ASSIGN { - $$.loc = $1.loc; - $$.op = EOpAddAssign; - } - | SUB_ASSIGN { - $$.loc = $1.loc; - $$.op = EOpSubAssign; - } - | LEFT_ASSIGN { - parseContext.fullIntegerCheck($1.loc, "bit-shift left assign"); - $$.loc = $1.loc; $$.op = EOpLeftShiftAssign; - } - | RIGHT_ASSIGN { - parseContext.fullIntegerCheck($1.loc, "bit-shift right assign"); - $$.loc = $1.loc; $$.op = EOpRightShiftAssign; - } - | AND_ASSIGN { - parseContext.fullIntegerCheck($1.loc, "bitwise-and assign"); - $$.loc = $1.loc; $$.op = EOpAndAssign; - } - | XOR_ASSIGN { - parseContext.fullIntegerCheck($1.loc, "bitwise-xor assign"); - $$.loc = $1.loc; $$.op = EOpExclusiveOrAssign; - } - | OR_ASSIGN { - parseContext.fullIntegerCheck($1.loc, "bitwise-or assign"); - $$.loc = $1.loc; $$.op = EOpInclusiveOrAssign; - } - ; - -expression - : assignment_expression { - $$ = $1; - } - | expression COMMA assignment_expression { - parseContext.samplerConstructorLocationCheck($2.loc, ",", $3); - $$ = parseContext.intermediate.addComma($1, $3, $2.loc); - if ($$ == 0) { - parseContext.binaryOpError($2.loc, ",", $1->getCompleteString(), $3->getCompleteString()); - $$ = $3; - } - } - ; - -constant_expression - : conditional_expression { - parseContext.constantValueCheck($1, ""); - $$ = $1; - } - ; - -declaration - : function_prototype SEMICOLON { - parseContext.handleFunctionDeclarator($1.loc, *$1.function, true /* prototype */); - $$ = 0; - // TODO: 4.0 functionality: subroutines: make the identifier a user type for this signature - } - | init_declarator_list SEMICOLON { - if ($1.intermNode && $1.intermNode->getAsAggregate()) - $1.intermNode->getAsAggregate()->setOperator(EOpSequence); - $$ = $1.intermNode; - } - | PRECISION precision_qualifier type_specifier SEMICOLON { - parseContext.profileRequires($1.loc, ENoProfile, 130, 0, "precision statement"); - - // lazy setting of the previous scope's defaults, has effect only the first time it is called in a particular scope - parseContext.symbolTable.setPreviousDefaultPrecisions(&parseContext.defaultPrecision[0]); - parseContext.setDefaultPrecision($1.loc, $3, $2.qualifier.precision); - $$ = 0; - } - | block_structure SEMICOLON { - parseContext.declareBlock($1.loc, *$1.typeList); - $$ = 0; - } - | block_structure IDENTIFIER SEMICOLON { - parseContext.declareBlock($1.loc, *$1.typeList, $2.string); - $$ = 0; - } - | block_structure IDENTIFIER array_specifier SEMICOLON { - parseContext.declareBlock($1.loc, *$1.typeList, $2.string, $3.arraySizes); - $$ = 0; - } - | type_qualifier SEMICOLON { - parseContext.globalQualifierFixCheck($1.loc, $1.qualifier); - parseContext.updateStandaloneQualifierDefaults($1.loc, $1); - $$ = 0; - } - | type_qualifier IDENTIFIER SEMICOLON { - parseContext.checkNoShaderLayouts($1.loc, $1.shaderQualifiers); - parseContext.addQualifierToExisting($1.loc, $1.qualifier, *$2.string); - $$ = 0; - } - | type_qualifier IDENTIFIER identifier_list SEMICOLON { - parseContext.checkNoShaderLayouts($1.loc, $1.shaderQualifiers); - $3->push_back($2.string); - parseContext.addQualifierToExisting($1.loc, $1.qualifier, *$3); - $$ = 0; - } - ; - -block_structure - : type_qualifier IDENTIFIER LEFT_BRACE { parseContext.nestedBlockCheck($1.loc); } struct_declaration_list RIGHT_BRACE { - --parseContext.structNestingLevel; - parseContext.blockName = $2.string; - parseContext.globalQualifierFixCheck($1.loc, $1.qualifier); - parseContext.checkNoShaderLayouts($1.loc, $1.shaderQualifiers); - parseContext.currentBlockQualifier = $1.qualifier; - $$.loc = $1.loc; - $$.typeList = $5; - } - -identifier_list - : COMMA IDENTIFIER { - $$ = new TIdentifierList; - $$->push_back($2.string); - } - | identifier_list COMMA IDENTIFIER { - $$ = $1; - $$->push_back($3.string); - } - ; - -function_prototype - : function_declarator RIGHT_PAREN { - $$.function = $1; - $$.loc = $2.loc; - } - ; - -function_declarator - : function_header { - $$ = $1; - } - | function_header_with_parameters { - $$ = $1; - } - ; - - -function_header_with_parameters - : function_header parameter_declaration { - // Add the parameter - $$ = $1; - if ($2.param.type->getBasicType() != EbtVoid) - $1->addParameter($2.param); - else - delete $2.param.type; - } - | function_header_with_parameters COMMA parameter_declaration { - // - // Only first parameter of one-parameter functions can be void - // The check for named parameters not being void is done in parameter_declarator - // - if ($3.param.type->getBasicType() == EbtVoid) { - // - // This parameter > first is void - // - parseContext.error($2.loc, "cannot be an argument type except for '(void)'", "void", ""); - delete $3.param.type; - } else { - // Add the parameter - $$ = $1; - $1->addParameter($3.param); - } - } - ; - -function_header - : fully_specified_type IDENTIFIER LEFT_PAREN { - if ($1.qualifier.storage != EvqGlobal && $1.qualifier.storage != EvqTemporary) { - parseContext.error($2.loc, "no qualifiers allowed for function return", - GetStorageQualifierString($1.qualifier.storage), ""); - } - if ($1.arraySizes) - parseContext.arraySizeRequiredCheck($1.loc, *$1.arraySizes); - - // Add the function as a prototype after parsing it (we do not support recursion) - TFunction *function; - TType type($1); - function = new TFunction($2.string, type); - $$ = function; - } - ; - -parameter_declarator - // Type + name - : type_specifier IDENTIFIER { - if ($1.arraySizes) { - parseContext.profileRequires($1.loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "arrayed type"); - parseContext.arraySizeRequiredCheck($1.loc, *$1.arraySizes); - } - if ($1.basicType == EbtVoid) { - parseContext.error($2.loc, "illegal use of type 'void'", $2.string->c_str(), ""); - } - parseContext.reservedErrorCheck($2.loc, *$2.string); - - TParameter param = {$2.string, new TType($1)}; - $$.loc = $2.loc; - $$.param = param; - } - | type_specifier IDENTIFIER array_specifier { - if ($1.arraySizes) { - parseContext.profileRequires($1.loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "arrayed type"); - parseContext.arraySizeRequiredCheck($1.loc, *$1.arraySizes); - } - parseContext.arrayDimCheck($2.loc, $1.arraySizes, $3.arraySizes); - - parseContext.arraySizeRequiredCheck($3.loc, *$3.arraySizes); - parseContext.reservedErrorCheck($2.loc, *$2.string); - - $1.arraySizes = $3.arraySizes; - - TParameter param = { $2.string, new TType($1)}; - $$.loc = $2.loc; - $$.param = param; - } - ; - -parameter_declaration - // - // With name - // - : type_qualifier parameter_declarator { - $$ = $2; - if ($1.qualifier.precision != EpqNone) - $$.param.type->getQualifier().precision = $1.qualifier.precision; - parseContext.precisionQualifierCheck($$.loc, $$.param.type->getBasicType(), $$.param.type->getQualifier()); - - parseContext.checkNoShaderLayouts($1.loc, $1.shaderQualifiers); - parseContext.parameterTypeCheck($2.loc, $1.qualifier.storage, *$$.param.type); - parseContext.paramCheckFix($1.loc, $1.qualifier, *$$.param.type); - - } - | parameter_declarator { - $$ = $1; - - parseContext.parameterTypeCheck($1.loc, EvqIn, *$1.param.type); - parseContext.paramCheckFix($1.loc, EvqTemporary, *$$.param.type); - parseContext.precisionQualifierCheck($$.loc, $$.param.type->getBasicType(), $$.param.type->getQualifier()); - } - // - // Without name - // - | type_qualifier parameter_type_specifier { - $$ = $2; - if ($1.qualifier.precision != EpqNone) - $$.param.type->getQualifier().precision = $1.qualifier.precision; - parseContext.precisionQualifierCheck($1.loc, $$.param.type->getBasicType(), $$.param.type->getQualifier()); - - parseContext.checkNoShaderLayouts($1.loc, $1.shaderQualifiers); - parseContext.parameterTypeCheck($2.loc, $1.qualifier.storage, *$$.param.type); - parseContext.paramCheckFix($1.loc, $1.qualifier, *$$.param.type); - } - | parameter_type_specifier { - $$ = $1; - - parseContext.parameterTypeCheck($1.loc, EvqIn, *$1.param.type); - parseContext.paramCheckFix($1.loc, EvqTemporary, *$$.param.type); - parseContext.precisionQualifierCheck($$.loc, $$.param.type->getBasicType(), $$.param.type->getQualifier()); - } - ; - -parameter_type_specifier - : type_specifier { - TParameter param = { 0, new TType($1) }; - $$.param = param; - if ($1.arraySizes) - parseContext.arraySizeRequiredCheck($1.loc, *$1.arraySizes); - } - ; - -init_declarator_list - : single_declaration { - $$ = $1; - } - | init_declarator_list COMMA IDENTIFIER { - $$ = $1; - parseContext.declareVariable($3.loc, *$3.string, $1.type); - } - | init_declarator_list COMMA IDENTIFIER array_specifier { - $$ = $1; - parseContext.declareVariable($3.loc, *$3.string, $1.type, $4.arraySizes); - } - | init_declarator_list COMMA IDENTIFIER array_specifier EQUAL initializer { - $$.type = $1.type; - TIntermNode* initNode = parseContext.declareVariable($3.loc, *$3.string, $1.type, $4.arraySizes, $6); - $$.intermNode = parseContext.intermediate.growAggregate($1.intermNode, initNode, $5.loc); - } - | init_declarator_list COMMA IDENTIFIER EQUAL initializer { - $$.type = $1.type; - TIntermNode* initNode = parseContext.declareVariable($3.loc, *$3.string, $1.type, 0, $5); - $$.intermNode = parseContext.intermediate.growAggregate($1.intermNode, initNode, $4.loc); - } - ; - -single_declaration - : fully_specified_type { - $$.type = $1; - $$.intermNode = 0; - parseContext.declareTypeDefaults($$.loc, $$.type); - } - | fully_specified_type IDENTIFIER { - $$.type = $1; - $$.intermNode = 0; - parseContext.declareVariable($2.loc, *$2.string, $1); - } - | fully_specified_type IDENTIFIER array_specifier { - $$.type = $1; - $$.intermNode = 0; - parseContext.declareVariable($2.loc, *$2.string, $1, $3.arraySizes); - } - | fully_specified_type IDENTIFIER array_specifier EQUAL initializer { - $$.type = $1; - TIntermNode* initNode = parseContext.declareVariable($2.loc, *$2.string, $1, $3.arraySizes, $5); - $$.intermNode = parseContext.intermediate.growAggregate(0, initNode, $4.loc); - } - | fully_specified_type IDENTIFIER EQUAL initializer { - $$.type = $1; - TIntermNode* initNode = parseContext.declareVariable($2.loc, *$2.string, $1, 0, $4); - $$.intermNode = parseContext.intermediate.growAggregate(0, initNode, $3.loc); - } - -// Grammar Note: No 'enum', or 'typedef'. - -fully_specified_type - : type_specifier { - $$ = $1; - - parseContext.globalQualifierTypeCheck($1.loc, $1.qualifier, $$); - if ($1.arraySizes) { - parseContext.profileRequires($1.loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "arrayed type"); - } - - parseContext.precisionQualifierCheck($$.loc, $$.basicType, $$.qualifier); - } - | type_qualifier type_specifier { - parseContext.globalQualifierFixCheck($1.loc, $1.qualifier); - parseContext.globalQualifierTypeCheck($1.loc, $1.qualifier, $2); - - if ($2.arraySizes) { - parseContext.profileRequires($2.loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires($2.loc, EEsProfile, 300, 0, "arrayed type"); - } - - if ($2.arraySizes && parseContext.arrayQualifierError($2.loc, $1.qualifier)) - $2.arraySizes = 0; - - parseContext.checkNoShaderLayouts($2.loc, $1.shaderQualifiers); - $2.shaderQualifiers.merge($1.shaderQualifiers); - parseContext.mergeQualifiers($2.loc, $2.qualifier, $1.qualifier, true); - parseContext.precisionQualifierCheck($2.loc, $2.basicType, $2.qualifier); - - $$ = $2; - - if (! $$.qualifier.isInterpolation() && - ((parseContext.language == EShLangVertex && $$.qualifier.storage == EvqVaryingOut) || - (parseContext.language == EShLangFragment && $$.qualifier.storage == EvqVaryingIn))) - $$.qualifier.smooth = true; - } - ; - -invariant_qualifier - : INVARIANT { - parseContext.globalCheck($1.loc, "invariant"); - parseContext.profileRequires($$.loc, ENoProfile, 120, 0, "invariant"); - $$.init($1.loc); - $$.qualifier.invariant = true; - } - ; - -interpolation_qualifier - : SMOOTH { - parseContext.globalCheck($1.loc, "smooth"); - parseContext.profileRequires($1.loc, ENoProfile, 130, 0, "smooth"); - parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "smooth"); - $$.init($1.loc); - $$.qualifier.smooth = true; - } - | FLAT { - parseContext.globalCheck($1.loc, "flat"); - parseContext.profileRequires($1.loc, ENoProfile, 130, 0, "flat"); - parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "flat"); - $$.init($1.loc); - $$.qualifier.flat = true; - } - | NOPERSPECTIVE { - parseContext.globalCheck($1.loc, "noperspective"); - parseContext.requireProfile($1.loc, ~EEsProfile, "noperspective"); - parseContext.profileRequires($1.loc, ENoProfile, 130, 0, "noperspective"); - $$.init($1.loc); - $$.qualifier.nopersp = true; - } - | __EXPLICITINTERPAMD { -#ifdef AMD_EXTENSIONS - parseContext.globalCheck($1.loc, "__explicitInterpAMD"); - parseContext.profileRequires($1.loc, ECoreProfile, 450, E_GL_AMD_shader_explicit_vertex_parameter, "explicit interpolation"); - parseContext.profileRequires($1.loc, ECompatibilityProfile, 450, E_GL_AMD_shader_explicit_vertex_parameter, "explicit interpolation"); - $$.init($1.loc); - $$.qualifier.explicitInterp = true; -#endif - } - ; - -layout_qualifier - : LAYOUT LEFT_PAREN layout_qualifier_id_list RIGHT_PAREN { - $$ = $3; - } - ; - -layout_qualifier_id_list - : layout_qualifier_id { - $$ = $1; - } - | layout_qualifier_id_list COMMA layout_qualifier_id { - $$ = $1; - $$.shaderQualifiers.merge($3.shaderQualifiers); - parseContext.mergeObjectLayoutQualifiers($$.qualifier, $3.qualifier, false); - } - -layout_qualifier_id - : IDENTIFIER { - $$.init($1.loc); - parseContext.setLayoutQualifier($1.loc, $$, *$1.string); - } - | IDENTIFIER EQUAL constant_expression { - $$.init($1.loc); - parseContext.setLayoutQualifier($1.loc, $$, *$1.string, $3); - } - | SHARED { // because "shared" is both an identifier and a keyword - $$.init($1.loc); - TString strShared("shared"); - parseContext.setLayoutQualifier($1.loc, $$, strShared); - } - ; - -precise_qualifier - : PRECISE { - parseContext.profileRequires($$.loc, ECoreProfile | ECompatibilityProfile, 400, E_GL_ARB_gpu_shader5, "precise"); - parseContext.profileRequires($1.loc, EEsProfile, 320, Num_AEP_gpu_shader5, AEP_gpu_shader5, "precise"); - $$.init($1.loc); - $$.qualifier.noContraction = true; - } - ; - -type_qualifier - : single_type_qualifier { - $$ = $1; - } - | type_qualifier single_type_qualifier { - $$ = $1; - if ($$.basicType == EbtVoid) - $$.basicType = $2.basicType; - - $$.shaderQualifiers.merge($2.shaderQualifiers); - parseContext.mergeQualifiers($$.loc, $$.qualifier, $2.qualifier, false); - } - ; - -single_type_qualifier - : storage_qualifier { - $$ = $1; - } - | layout_qualifier { - $$ = $1; - } - | precision_qualifier { - parseContext.checkPrecisionQualifier($1.loc, $1.qualifier.precision); - $$ = $1; - } - | interpolation_qualifier { - // allow inheritance of storage qualifier from block declaration - $$ = $1; - } - | invariant_qualifier { - // allow inheritance of storage qualifier from block declaration - $$ = $1; - } - | precise_qualifier { - // allow inheritance of storage qualifier from block declaration - $$ = $1; - } - ; - -storage_qualifier - : CONST { - $$.init($1.loc); - $$.qualifier.storage = EvqConst; // will later turn into EvqConstReadOnly, if the initializer is not constant - } - | ATTRIBUTE { - parseContext.requireStage($1.loc, EShLangVertex, "attribute"); - parseContext.checkDeprecated($1.loc, ECoreProfile, 130, "attribute"); - parseContext.checkDeprecated($1.loc, ENoProfile, 130, "attribute"); - parseContext.requireNotRemoved($1.loc, ECoreProfile, 420, "attribute"); - parseContext.requireNotRemoved($1.loc, EEsProfile, 300, "attribute"); - - parseContext.globalCheck($1.loc, "attribute"); - - $$.init($1.loc); - $$.qualifier.storage = EvqVaryingIn; - } - | VARYING { - parseContext.checkDeprecated($1.loc, ENoProfile, 130, "varying"); - parseContext.checkDeprecated($1.loc, ECoreProfile, 130, "varying"); - parseContext.requireNotRemoved($1.loc, ECoreProfile, 420, "varying"); - parseContext.requireNotRemoved($1.loc, EEsProfile, 300, "varying"); - - parseContext.globalCheck($1.loc, "varying"); - - $$.init($1.loc); - if (parseContext.language == EShLangVertex) - $$.qualifier.storage = EvqVaryingOut; - else - $$.qualifier.storage = EvqVaryingIn; - } - | INOUT { - parseContext.globalCheck($1.loc, "inout"); - $$.init($1.loc); - $$.qualifier.storage = EvqInOut; - } - | IN { - parseContext.globalCheck($1.loc, "in"); - $$.init($1.loc); - // whether this is a parameter "in" or a pipeline "in" will get sorted out a bit later - $$.qualifier.storage = EvqIn; - } - | OUT { - parseContext.globalCheck($1.loc, "out"); - $$.init($1.loc); - // whether this is a parameter "out" or a pipeline "out" will get sorted out a bit later - $$.qualifier.storage = EvqOut; - } - | CENTROID { - parseContext.profileRequires($1.loc, ENoProfile, 120, 0, "centroid"); - parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "centroid"); - parseContext.globalCheck($1.loc, "centroid"); - $$.init($1.loc); - $$.qualifier.centroid = true; - } - | PATCH { - parseContext.globalCheck($1.loc, "patch"); - parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangTessControlMask | EShLangTessEvaluationMask), "patch"); - $$.init($1.loc); - $$.qualifier.patch = true; - } - | SAMPLE { - parseContext.globalCheck($1.loc, "sample"); - $$.init($1.loc); - $$.qualifier.sample = true; - } - | UNIFORM { - parseContext.globalCheck($1.loc, "uniform"); - $$.init($1.loc); - $$.qualifier.storage = EvqUniform; - } - | BUFFER { - parseContext.globalCheck($1.loc, "buffer"); - $$.init($1.loc); - $$.qualifier.storage = EvqBuffer; - } - | SHARED { - parseContext.globalCheck($1.loc, "shared"); - parseContext.profileRequires($1.loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_compute_shader, "shared"); - parseContext.profileRequires($1.loc, EEsProfile, 310, 0, "shared"); - parseContext.requireStage($1.loc, EShLangCompute, "shared"); - $$.init($1.loc); - $$.qualifier.storage = EvqShared; - } - | COHERENT { - $$.init($1.loc); - $$.qualifier.coherent = true; - } - | VOLATILE { - $$.init($1.loc); - $$.qualifier.volatil = true; - } - | RESTRICT { - $$.init($1.loc); - $$.qualifier.restrict = true; - } - | READONLY { - $$.init($1.loc); - $$.qualifier.readonly = true; - } - | WRITEONLY { - $$.init($1.loc); - $$.qualifier.writeonly = true; - } - | SUBROUTINE { - parseContext.spvRemoved($1.loc, "subroutine"); - parseContext.globalCheck($1.loc, "subroutine"); - parseContext.unimplemented($1.loc, "subroutine"); - $$.init($1.loc); - } - | SUBROUTINE LEFT_PAREN type_name_list RIGHT_PAREN { - parseContext.spvRemoved($1.loc, "subroutine"); - parseContext.globalCheck($1.loc, "subroutine"); - parseContext.unimplemented($1.loc, "subroutine"); - $$.init($1.loc); - } - ; - -type_name_list - : IDENTIFIER { - // TODO - } - | type_name_list COMMA IDENTIFIER { - // TODO: 4.0 semantics: subroutines - // 1) make sure each identifier is a type declared earlier with SUBROUTINE - // 2) save all of the identifiers for future comparison with the declared function - } - ; - -type_specifier - : type_specifier_nonarray { - $$ = $1; - $$.qualifier.precision = parseContext.getDefaultPrecision($$); - } - | type_specifier_nonarray array_specifier { - parseContext.arrayDimCheck($2.loc, $2.arraySizes, 0); - $$ = $1; - $$.qualifier.precision = parseContext.getDefaultPrecision($$); - $$.arraySizes = $2.arraySizes; - } - ; - -array_specifier - : LEFT_BRACKET RIGHT_BRACKET { - $$.loc = $1.loc; - $$.arraySizes = new TArraySizes; - $$.arraySizes->addInnerSize(); - } - | LEFT_BRACKET conditional_expression RIGHT_BRACKET { - $$.loc = $1.loc; - $$.arraySizes = new TArraySizes; - - TArraySize size; - parseContext.arraySizeCheck($2->getLoc(), $2, size); - $$.arraySizes->addInnerSize(size); - } - | array_specifier LEFT_BRACKET RIGHT_BRACKET { - $$ = $1; - $$.arraySizes->addInnerSize(); - } - | array_specifier LEFT_BRACKET conditional_expression RIGHT_BRACKET { - $$ = $1; - - TArraySize size; - parseContext.arraySizeCheck($3->getLoc(), $3, size); - $$.arraySizes->addInnerSize(size); - } - ; - -type_specifier_nonarray - : VOID { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtVoid; - } - | FLOAT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - } - | DOUBLE { - parseContext.doubleCheck($1.loc, "double"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - } - | FLOAT16_T { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; -#endif - } - | INT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt; - } - | UINT { - parseContext.fullIntegerCheck($1.loc, "unsigned integer"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint; - } - | INT64_T { - parseContext.int64Check($1.loc, "64-bit integer", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt64; - } - | UINT64_T { - parseContext.int64Check($1.loc, "64-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint64; - } - | INT16_T { -#ifdef AMD_EXTENSIONS - parseContext.int16Check($1.loc, "16-bit integer", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt16; -#endif - } - | UINT16_T { -#ifdef AMD_EXTENSIONS - parseContext.int16Check($1.loc, "16-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint16; -#endif - } - | BOOL { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtBool; - } - | VEC2 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setVector(2); - } - | VEC3 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setVector(3); - } - | VEC4 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setVector(4); - } - | DVEC2 { - parseContext.doubleCheck($1.loc, "double vector"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setVector(2); - } - | DVEC3 { - parseContext.doubleCheck($1.loc, "double vector"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setVector(3); - } - | DVEC4 { - parseContext.doubleCheck($1.loc, "double vector"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setVector(4); - } - | F16VEC2 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setVector(2); -#endif - } - | F16VEC3 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setVector(3); -#endif - } - | F16VEC4 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setVector(4); -#endif - } - | BVEC2 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtBool; - $$.setVector(2); - } - | BVEC3 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtBool; - $$.setVector(3); - } - | BVEC4 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtBool; - $$.setVector(4); - } - | IVEC2 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt; - $$.setVector(2); - } - | IVEC3 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt; - $$.setVector(3); - } - | IVEC4 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt; - $$.setVector(4); - } - | I64VEC2 { - parseContext.int64Check($1.loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt64; - $$.setVector(2); - } - | I64VEC3 { - parseContext.int64Check($1.loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt64; - $$.setVector(3); - } - | I64VEC4 { - parseContext.int64Check($1.loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt64; - $$.setVector(4); - } - | I16VEC2 { -#ifdef AMD_EXTENSIONS - parseContext.int16Check($1.loc, "16-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt16; - $$.setVector(2); -#endif - } - | I16VEC3 { -#ifdef AMD_EXTENSIONS - parseContext.int16Check($1.loc, "16-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt16; - $$.setVector(3); -#endif - } - | I16VEC4 { -#ifdef AMD_EXTENSIONS - parseContext.int16Check($1.loc, "16-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtInt16; - $$.setVector(4); -#endif - } - | UVEC2 { - parseContext.fullIntegerCheck($1.loc, "unsigned integer vector"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint; - $$.setVector(2); - } - | UVEC3 { - parseContext.fullIntegerCheck($1.loc, "unsigned integer vector"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint; - $$.setVector(3); - } - | UVEC4 { - parseContext.fullIntegerCheck($1.loc, "unsigned integer vector"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint; - $$.setVector(4); - } - | U64VEC2 { - parseContext.int64Check($1.loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint64; - $$.setVector(2); - } - | U64VEC3 { - parseContext.int64Check($1.loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint64; - $$.setVector(3); - } - | U64VEC4 { - parseContext.int64Check($1.loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint64; - $$.setVector(4); - } - | U16VEC2 { -#ifdef AMD_EXTENSIONS - parseContext.int16Check($1.loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint16; - $$.setVector(2); -#endif - } - | U16VEC3 { -#ifdef AMD_EXTENSIONS - parseContext.int16Check($1.loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint16; - $$.setVector(3); -#endif - } - | U16VEC4 { -#ifdef AMD_EXTENSIONS - parseContext.int16Check($1.loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtUint16; - $$.setVector(4); -#endif - } - | MAT2 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(2, 2); - } - | MAT3 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(3, 3); - } - | MAT4 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(4, 4); - } - | MAT2X2 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(2, 2); - } - | MAT2X3 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(2, 3); - } - | MAT2X4 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(2, 4); - } - | MAT3X2 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(3, 2); - } - | MAT3X3 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(3, 3); - } - | MAT3X4 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(3, 4); - } - | MAT4X2 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(4, 2); - } - | MAT4X3 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(4, 3); - } - | MAT4X4 { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat; - $$.setMatrix(4, 4); - } - | DMAT2 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(2, 2); - } - | DMAT3 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(3, 3); - } - | DMAT4 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(4, 4); - } - | DMAT2X2 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(2, 2); - } - | DMAT2X3 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(2, 3); - } - | DMAT2X4 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(2, 4); - } - | DMAT3X2 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(3, 2); - } - | DMAT3X3 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(3, 3); - } - | DMAT3X4 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(3, 4); - } - | DMAT4X2 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(4, 2); - } - | DMAT4X3 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(4, 3); - } - | DMAT4X4 { - parseContext.doubleCheck($1.loc, "double matrix"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtDouble; - $$.setMatrix(4, 4); - } - | F16MAT2 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(2, 2); -#endif - } - | F16MAT3 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(3, 3); -#endif - } - | F16MAT4 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(4, 4); -#endif - } - | F16MAT2X2 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(2, 2); -#endif - } - | F16MAT2X3 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(2, 3); -#endif - } - | F16MAT2X4 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(2, 4); -#endif - } - | F16MAT3X2 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(3, 2); -#endif - } - | F16MAT3X3 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(3, 3); -#endif - } - | F16MAT3X4 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(3, 4); -#endif - } - | F16MAT4X2 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(4, 2); -#endif - } - | F16MAT4X3 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(4, 3); -#endif - } - | F16MAT4X4 { -#ifdef AMD_EXTENSIONS - parseContext.float16Check($1.loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtFloat16; - $$.setMatrix(4, 4); -#endif - } - | ATOMIC_UINT { - parseContext.vulkanRemoved($1.loc, "atomic counter types"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtAtomicUint; - } - | SAMPLER1D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd1D); - } - | SAMPLER2D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd2D); - } - | SAMPLER3D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd3D); - } - | SAMPLERCUBE { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, EsdCube); - } - | SAMPLER1DSHADOW { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd1D, false, true); - } - | SAMPLER2DSHADOW { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd2D, false, true); - } - | SAMPLERCUBESHADOW { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, EsdCube, false, true); - } - | SAMPLER1DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd1D, true); - } - | SAMPLER2DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd2D, true); - } - | SAMPLER1DARRAYSHADOW { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd1D, true, true); - } - | SAMPLER2DARRAYSHADOW { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd2D, true, true); - } - | SAMPLERCUBEARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, EsdCube, true); - } - | SAMPLERCUBEARRAYSHADOW { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, EsdCube, true, true); - } - | ISAMPLER1D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, Esd1D); - } - | ISAMPLER2D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, Esd2D); - } - | ISAMPLER3D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, Esd3D); - } - | ISAMPLERCUBE { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, EsdCube); - } - | ISAMPLER1DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, Esd1D, true); - } - | ISAMPLER2DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, Esd2D, true); - } - | ISAMPLERCUBEARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, EsdCube, true); - } - | USAMPLER1D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, Esd1D); - } - | USAMPLER2D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, Esd2D); - } - | USAMPLER3D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, Esd3D); - } - | USAMPLERCUBE { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, EsdCube); - } - | USAMPLER1DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, Esd1D, true); - } - | USAMPLER2DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, Esd2D, true); - } - | USAMPLERCUBEARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, EsdCube, true); - } - | SAMPLER2DRECT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, EsdRect); - } - | SAMPLER2DRECTSHADOW { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, EsdRect, false, true); - } - | ISAMPLER2DRECT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, EsdRect); - } - | USAMPLER2DRECT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, EsdRect); - } - | SAMPLERBUFFER { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, EsdBuffer); - } - | ISAMPLERBUFFER { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, EsdBuffer); - } - | USAMPLERBUFFER { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, EsdBuffer); - } - | SAMPLER2DMS { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd2D, false, false, true); - } - | ISAMPLER2DMS { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, Esd2D, false, false, true); - } - | USAMPLER2DMS { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, Esd2D, false, false, true); - } - | SAMPLER2DMSARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd2D, true, false, true); - } - | ISAMPLER2DMSARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtInt, Esd2D, true, false, true); - } - | USAMPLER2DMSARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtUint, Esd2D, true, false, true); - } - | SAMPLER { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setPureSampler(false); - } - | SAMPLERSHADOW { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setPureSampler(true); - } - | TEXTURE1D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, Esd1D); - } - | TEXTURE2D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, Esd2D); - } - | TEXTURE3D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, Esd3D); - } - | TEXTURECUBE { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, EsdCube); - } - | TEXTURE1DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, Esd1D, true); - } - | TEXTURE2DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, Esd2D, true); - } - | TEXTURECUBEARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, EsdCube, true); - } - | ITEXTURE1D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, Esd1D); - } - | ITEXTURE2D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, Esd2D); - } - | ITEXTURE3D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, Esd3D); - } - | ITEXTURECUBE { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, EsdCube); - } - | ITEXTURE1DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, Esd1D, true); - } - | ITEXTURE2DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, Esd2D, true); - } - | ITEXTURECUBEARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, EsdCube, true); - } - | UTEXTURE1D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, Esd1D); - } - | UTEXTURE2D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, Esd2D); - } - | UTEXTURE3D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, Esd3D); - } - | UTEXTURECUBE { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, EsdCube); - } - | UTEXTURE1DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, Esd1D, true); - } - | UTEXTURE2DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, Esd2D, true); - } - | UTEXTURECUBEARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, EsdCube, true); - } - | TEXTURE2DRECT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, EsdRect); - } - | ITEXTURE2DRECT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, EsdRect); - } - | UTEXTURE2DRECT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, EsdRect); - } - | TEXTUREBUFFER { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, EsdBuffer); - } - | ITEXTUREBUFFER { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, EsdBuffer); - } - | UTEXTUREBUFFER { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, EsdBuffer); - } - | TEXTURE2DMS { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, Esd2D, false, false, true); - } - | ITEXTURE2DMS { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, Esd2D, false, false, true); - } - | UTEXTURE2DMS { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, Esd2D, false, false, true); - } - | TEXTURE2DMSARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtFloat, Esd2D, true, false, true); - } - | ITEXTURE2DMSARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtInt, Esd2D, true, false, true); - } - | UTEXTURE2DMSARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setTexture(EbtUint, Esd2D, true, false, true); - } - | IMAGE1D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, Esd1D); - } - | IIMAGE1D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, Esd1D); - } - | UIMAGE1D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, Esd1D); - } - | IMAGE2D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, Esd2D); - } - | IIMAGE2D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, Esd2D); - } - | UIMAGE2D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, Esd2D); - } - | IMAGE3D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, Esd3D); - } - | IIMAGE3D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, Esd3D); - } - | UIMAGE3D { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, Esd3D); - } - | IMAGE2DRECT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, EsdRect); - } - | IIMAGE2DRECT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, EsdRect); - } - | UIMAGE2DRECT { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, EsdRect); - } - | IMAGECUBE { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, EsdCube); - } - | IIMAGECUBE { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, EsdCube); - } - | UIMAGECUBE { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, EsdCube); - } - | IMAGEBUFFER { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, EsdBuffer); - } - | IIMAGEBUFFER { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, EsdBuffer); - } - | UIMAGEBUFFER { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, EsdBuffer); - } - | IMAGE1DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, Esd1D, true); - } - | IIMAGE1DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, Esd1D, true); - } - | UIMAGE1DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, Esd1D, true); - } - | IMAGE2DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, Esd2D, true); - } - | IIMAGE2DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, Esd2D, true); - } - | UIMAGE2DARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, Esd2D, true); - } - | IMAGECUBEARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, EsdCube, true); - } - | IIMAGECUBEARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, EsdCube, true); - } - | UIMAGECUBEARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, EsdCube, true); - } - | IMAGE2DMS { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, Esd2D, false, false, true); - } - | IIMAGE2DMS { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, Esd2D, false, false, true); - } - | UIMAGE2DMS { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, Esd2D, false, false, true); - } - | IMAGE2DMSARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtFloat, Esd2D, true, false, true); - } - | IIMAGE2DMSARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtInt, Esd2D, true, false, true); - } - | UIMAGE2DMSARRAY { - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setImage(EbtUint, Esd2D, true, false, true); - } - | SAMPLEREXTERNALOES { // GL_OES_EGL_image_external - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.set(EbtFloat, Esd2D); - $$.sampler.external = true; - } - | SUBPASSINPUT { - parseContext.requireStage($1.loc, EShLangFragment, "subpass input"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setSubpass(EbtFloat); - } - | SUBPASSINPUTMS { - parseContext.requireStage($1.loc, EShLangFragment, "subpass input"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setSubpass(EbtFloat, true); - } - | ISUBPASSINPUT { - parseContext.requireStage($1.loc, EShLangFragment, "subpass input"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setSubpass(EbtInt); - } - | ISUBPASSINPUTMS { - parseContext.requireStage($1.loc, EShLangFragment, "subpass input"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setSubpass(EbtInt, true); - } - | USUBPASSINPUT { - parseContext.requireStage($1.loc, EShLangFragment, "subpass input"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setSubpass(EbtUint); - } - | USUBPASSINPUTMS { - parseContext.requireStage($1.loc, EShLangFragment, "subpass input"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtSampler; - $$.sampler.setSubpass(EbtUint, true); - } - | struct_specifier { - $$ = $1; - $$.qualifier.storage = parseContext.symbolTable.atGlobalLevel() ? EvqGlobal : EvqTemporary; - parseContext.structTypeCheck($$.loc, $$); - } - | TYPE_NAME { - // - // This is for user defined type names. The lexical phase looked up the - // type. - // - if (const TVariable* variable = ($1.symbol)->getAsVariable()) { - const TType& structure = variable->getType(); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - $$.basicType = EbtStruct; - $$.userDef = &structure; - } else - parseContext.error($1.loc, "expected type name", $1.string->c_str(), ""); - } - ; - -precision_qualifier - : HIGH_PRECISION { - parseContext.profileRequires($1.loc, ENoProfile, 130, 0, "highp precision qualifier"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - parseContext.handlePrecisionQualifier($1.loc, $$.qualifier, EpqHigh); - } - | MEDIUM_PRECISION { - parseContext.profileRequires($1.loc, ENoProfile, 130, 0, "mediump precision qualifier"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - parseContext.handlePrecisionQualifier($1.loc, $$.qualifier, EpqMedium); - } - | LOW_PRECISION { - parseContext.profileRequires($1.loc, ENoProfile, 130, 0, "lowp precision qualifier"); - $$.init($1.loc, parseContext.symbolTable.atGlobalLevel()); - parseContext.handlePrecisionQualifier($1.loc, $$.qualifier, EpqLow); - } - ; - -struct_specifier - : STRUCT IDENTIFIER LEFT_BRACE { parseContext.nestedStructCheck($1.loc); } struct_declaration_list RIGHT_BRACE { - TType* structure = new TType($5, *$2.string); - parseContext.structArrayCheck($2.loc, *structure); - TVariable* userTypeDef = new TVariable($2.string, *structure, true); - if (! parseContext.symbolTable.insert(*userTypeDef)) - parseContext.error($2.loc, "redefinition", $2.string->c_str(), "struct"); - $$.init($1.loc); - $$.basicType = EbtStruct; - $$.userDef = structure; - --parseContext.structNestingLevel; - } - | STRUCT LEFT_BRACE { parseContext.nestedStructCheck($1.loc); } struct_declaration_list RIGHT_BRACE { - TType* structure = new TType($4, TString("")); - $$.init($1.loc); - $$.basicType = EbtStruct; - $$.userDef = structure; - --parseContext.structNestingLevel; - } - ; - -struct_declaration_list - : struct_declaration { - $$ = $1; - } - | struct_declaration_list struct_declaration { - $$ = $1; - for (unsigned int i = 0; i < $2->size(); ++i) { - for (unsigned int j = 0; j < $$->size(); ++j) { - if ((*$$)[j].type->getFieldName() == (*$2)[i].type->getFieldName()) - parseContext.error((*$2)[i].loc, "duplicate member name:", "", (*$2)[i].type->getFieldName().c_str()); - } - $$->push_back((*$2)[i]); - } - } - ; - -struct_declaration - : type_specifier struct_declarator_list SEMICOLON { - if ($1.arraySizes) { - parseContext.profileRequires($1.loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "arrayed type"); - if (parseContext.profile == EEsProfile) - parseContext.arraySizeRequiredCheck($1.loc, *$1.arraySizes); - } - - $$ = $2; - - parseContext.voidErrorCheck($1.loc, (*$2)[0].type->getFieldName(), $1.basicType); - parseContext.precisionQualifierCheck($1.loc, $1.basicType, $1.qualifier); - - for (unsigned int i = 0; i < $$->size(); ++i) { - parseContext.arrayDimCheck($1.loc, (*$$)[i].type, $1.arraySizes); - (*$$)[i].type->mergeType($1); - } - } - | type_qualifier type_specifier struct_declarator_list SEMICOLON { - parseContext.globalQualifierFixCheck($1.loc, $1.qualifier); - if ($2.arraySizes) { - parseContext.profileRequires($2.loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires($2.loc, EEsProfile, 300, 0, "arrayed type"); - if (parseContext.profile == EEsProfile) - parseContext.arraySizeRequiredCheck($2.loc, *$2.arraySizes); - } - - $$ = $3; - - parseContext.checkNoShaderLayouts($1.loc, $1.shaderQualifiers); - parseContext.voidErrorCheck($2.loc, (*$3)[0].type->getFieldName(), $2.basicType); - parseContext.mergeQualifiers($2.loc, $2.qualifier, $1.qualifier, true); - parseContext.precisionQualifierCheck($2.loc, $2.basicType, $2.qualifier); - - for (unsigned int i = 0; i < $$->size(); ++i) { - parseContext.arrayDimCheck($1.loc, (*$$)[i].type, $2.arraySizes); - (*$$)[i].type->mergeType($2); - } - } - ; - -struct_declarator_list - : struct_declarator { - $$ = new TTypeList; - $$->push_back($1); - } - | struct_declarator_list COMMA struct_declarator { - $$->push_back($3); - } - ; - -struct_declarator - : IDENTIFIER { - $$.type = new TType(EbtVoid); - $$.loc = $1.loc; - $$.type->setFieldName(*$1.string); - } - | IDENTIFIER array_specifier { - parseContext.arrayDimCheck($1.loc, $2.arraySizes, 0); - - $$.type = new TType(EbtVoid); - $$.loc = $1.loc; - $$.type->setFieldName(*$1.string); - $$.type->newArraySizes(*$2.arraySizes); - } - ; - -initializer - : assignment_expression { - $$ = $1; - } - | LEFT_BRACE initializer_list RIGHT_BRACE { - const char* initFeature = "{ } style initializers"; - parseContext.requireProfile($1.loc, ~EEsProfile, initFeature); - parseContext.profileRequires($1.loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature); - $$ = $2; - } - | LEFT_BRACE initializer_list COMMA RIGHT_BRACE { - const char* initFeature = "{ } style initializers"; - parseContext.requireProfile($1.loc, ~EEsProfile, initFeature); - parseContext.profileRequires($1.loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature); - $$ = $2; - } - ; - -initializer_list - : initializer { - $$ = parseContext.intermediate.growAggregate(0, $1, $1->getLoc()); - } - | initializer_list COMMA initializer { - $$ = parseContext.intermediate.growAggregate($1, $3); - } - ; - -declaration_statement - : declaration { $$ = $1; } - ; - -statement - : compound_statement { $$ = $1; } - | simple_statement { $$ = $1; } - ; - -// Grammar Note: labeled statements for switch statements only; 'goto' is not supported. - -simple_statement - : declaration_statement { $$ = $1; } - | expression_statement { $$ = $1; } - | selection_statement { $$ = $1; } - | switch_statement { $$ = $1; } - | case_label { $$ = $1; } - | iteration_statement { $$ = $1; } - | jump_statement { $$ = $1; } - ; - -compound_statement - : LEFT_BRACE RIGHT_BRACE { $$ = 0; } - | LEFT_BRACE { - parseContext.symbolTable.push(); - ++parseContext.statementNestingLevel; - } - statement_list { - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - --parseContext.statementNestingLevel; - } - RIGHT_BRACE { - if ($3 && $3->getAsAggregate()) - $3->getAsAggregate()->setOperator(EOpSequence); - $$ = $3; - } - ; - -statement_no_new_scope - : compound_statement_no_new_scope { $$ = $1; } - | simple_statement { $$ = $1; } - ; - -statement_scoped - : { - ++parseContext.controlFlowNestingLevel; - } - compound_statement { - --parseContext.controlFlowNestingLevel; - $$ = $2; - } - | { - parseContext.symbolTable.push(); - ++parseContext.statementNestingLevel; - ++parseContext.controlFlowNestingLevel; - } - simple_statement { - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - --parseContext.statementNestingLevel; - --parseContext.controlFlowNestingLevel; - $$ = $2; - } - -compound_statement_no_new_scope - // Statement that doesn't create a new scope, for selection_statement, iteration_statement - : LEFT_BRACE RIGHT_BRACE { - $$ = 0; - } - | LEFT_BRACE statement_list RIGHT_BRACE { - if ($2 && $2->getAsAggregate()) - $2->getAsAggregate()->setOperator(EOpSequence); - $$ = $2; - } - ; - -statement_list - : statement { - $$ = parseContext.intermediate.makeAggregate($1); - if ($1 && $1->getAsBranchNode() && ($1->getAsBranchNode()->getFlowOp() == EOpCase || - $1->getAsBranchNode()->getFlowOp() == EOpDefault)) { - parseContext.wrapupSwitchSubsequence(0, $1); - $$ = 0; // start a fresh subsequence for what's after this case - } - } - | statement_list statement { - if ($2 && $2->getAsBranchNode() && ($2->getAsBranchNode()->getFlowOp() == EOpCase || - $2->getAsBranchNode()->getFlowOp() == EOpDefault)) { - parseContext.wrapupSwitchSubsequence($1 ? $1->getAsAggregate() : 0, $2); - $$ = 0; // start a fresh subsequence for what's after this case - } else - $$ = parseContext.intermediate.growAggregate($1, $2); - } - ; - -expression_statement - : SEMICOLON { $$ = 0; } - | expression SEMICOLON { $$ = static_cast($1); } - ; - -selection_statement - : IF LEFT_PAREN expression RIGHT_PAREN selection_rest_statement { - parseContext.boolCheck($1.loc, $3); - $$ = parseContext.intermediate.addSelection($3, $5, $1.loc); - } - ; - -selection_rest_statement - : statement_scoped ELSE statement_scoped { - $$.node1 = $1; - $$.node2 = $3; - } - | statement_scoped { - $$.node1 = $1; - $$.node2 = 0; - } - ; - -condition - // In 1996 c++ draft, conditions can include single declarations - : expression { - $$ = $1; - parseContext.boolCheck($1->getLoc(), $1); - } - | fully_specified_type IDENTIFIER EQUAL initializer { - parseContext.boolCheck($2.loc, $1); - - TType type($1); - TIntermNode* initNode = parseContext.declareVariable($2.loc, *$2.string, $1, 0, $4); - if (initNode) - $$ = initNode->getAsTyped(); - else - $$ = 0; - } - ; - -switch_statement - : SWITCH LEFT_PAREN expression RIGHT_PAREN { - // start new switch sequence on the switch stack - ++parseContext.controlFlowNestingLevel; - ++parseContext.statementNestingLevel; - parseContext.switchSequenceStack.push_back(new TIntermSequence); - parseContext.switchLevel.push_back(parseContext.statementNestingLevel); - parseContext.symbolTable.push(); - } - LEFT_BRACE switch_statement_list RIGHT_BRACE { - $$ = parseContext.addSwitch($1.loc, $3, $7 ? $7->getAsAggregate() : 0); - delete parseContext.switchSequenceStack.back(); - parseContext.switchSequenceStack.pop_back(); - parseContext.switchLevel.pop_back(); - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - --parseContext.statementNestingLevel; - --parseContext.controlFlowNestingLevel; - } - ; - -switch_statement_list - : /* nothing */ { - $$ = 0; - } - | statement_list { - $$ = $1; - } - ; - -case_label - : CASE expression COLON { - $$ = 0; - if (parseContext.switchLevel.size() == 0) - parseContext.error($1.loc, "cannot appear outside switch statement", "case", ""); - else if (parseContext.switchLevel.back() != parseContext.statementNestingLevel) - parseContext.error($1.loc, "cannot be nested inside control flow", "case", ""); - else { - parseContext.constantValueCheck($2, "case"); - parseContext.integerCheck($2, "case"); - $$ = parseContext.intermediate.addBranch(EOpCase, $2, $1.loc); - } - } - | DEFAULT COLON { - $$ = 0; - if (parseContext.switchLevel.size() == 0) - parseContext.error($1.loc, "cannot appear outside switch statement", "default", ""); - else if (parseContext.switchLevel.back() != parseContext.statementNestingLevel) - parseContext.error($1.loc, "cannot be nested inside control flow", "default", ""); - else - $$ = parseContext.intermediate.addBranch(EOpDefault, $1.loc); - } - ; - -iteration_statement - : WHILE LEFT_PAREN { - if (! parseContext.limits.whileLoops) - parseContext.error($1.loc, "while loops not available", "limitation", ""); - parseContext.symbolTable.push(); - ++parseContext.loopNestingLevel; - ++parseContext.statementNestingLevel; - ++parseContext.controlFlowNestingLevel; - } - condition RIGHT_PAREN statement_no_new_scope { - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - $$ = parseContext.intermediate.addLoop($6, $4, 0, true, $1.loc); - --parseContext.loopNestingLevel; - --parseContext.statementNestingLevel; - --parseContext.controlFlowNestingLevel; - } - | DO { - ++parseContext.loopNestingLevel; - ++parseContext.statementNestingLevel; - ++parseContext.controlFlowNestingLevel; - } - statement WHILE LEFT_PAREN expression RIGHT_PAREN SEMICOLON { - if (! parseContext.limits.whileLoops) - parseContext.error($1.loc, "do-while loops not available", "limitation", ""); - - parseContext.boolCheck($8.loc, $6); - - $$ = parseContext.intermediate.addLoop($3, $6, 0, false, $4.loc); - --parseContext.loopNestingLevel; - --parseContext.statementNestingLevel; - --parseContext.controlFlowNestingLevel; - } - | FOR LEFT_PAREN { - parseContext.symbolTable.push(); - ++parseContext.loopNestingLevel; - ++parseContext.statementNestingLevel; - ++parseContext.controlFlowNestingLevel; - } - for_init_statement for_rest_statement RIGHT_PAREN statement_no_new_scope { - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - $$ = parseContext.intermediate.makeAggregate($4, $2.loc); - TIntermLoop* forLoop = parseContext.intermediate.addLoop($7, reinterpret_cast($5.node1), reinterpret_cast($5.node2), true, $1.loc); - if (! parseContext.limits.nonInductiveForLoops) - parseContext.inductiveLoopCheck($1.loc, $4, forLoop); - $$ = parseContext.intermediate.growAggregate($$, forLoop, $1.loc); - $$->getAsAggregate()->setOperator(EOpSequence); - --parseContext.loopNestingLevel; - --parseContext.statementNestingLevel; - --parseContext.controlFlowNestingLevel; - } - ; - -for_init_statement - : expression_statement { - $$ = $1; - } - | declaration_statement { - $$ = $1; - } - ; - -conditionopt - : condition { - $$ = $1; - } - | /* May be null */ { - $$ = 0; - } - ; - -for_rest_statement - : conditionopt SEMICOLON { - $$.node1 = $1; - $$.node2 = 0; - } - | conditionopt SEMICOLON expression { - $$.node1 = $1; - $$.node2 = $3; - } - ; - -jump_statement - : CONTINUE SEMICOLON { - if (parseContext.loopNestingLevel <= 0) - parseContext.error($1.loc, "continue statement only allowed in loops", "", ""); - $$ = parseContext.intermediate.addBranch(EOpContinue, $1.loc); - } - | BREAK SEMICOLON { - if (parseContext.loopNestingLevel + parseContext.switchSequenceStack.size() <= 0) - parseContext.error($1.loc, "break statement only allowed in switch and loops", "", ""); - $$ = parseContext.intermediate.addBranch(EOpBreak, $1.loc); - } - | RETURN SEMICOLON { - $$ = parseContext.intermediate.addBranch(EOpReturn, $1.loc); - if (parseContext.currentFunctionType->getBasicType() != EbtVoid) - parseContext.error($1.loc, "non-void function must return a value", "return", ""); - if (parseContext.inMain) - parseContext.postEntryPointReturn = true; - } - | RETURN expression SEMICOLON { - $$ = parseContext.handleReturnValue($1.loc, $2); - } - | DISCARD SEMICOLON { - parseContext.requireStage($1.loc, EShLangFragment, "discard"); - $$ = parseContext.intermediate.addBranch(EOpKill, $1.loc); - } - ; - -// Grammar Note: No 'goto'. Gotos are not supported. - -translation_unit - : external_declaration { - $$ = $1; - parseContext.intermediate.setTreeRoot($$); - } - | translation_unit external_declaration { - if ($2 != nullptr) { - $$ = parseContext.intermediate.growAggregate($1, $2); - parseContext.intermediate.setTreeRoot($$); - } - } - ; - -external_declaration - : function_definition { - $$ = $1; - } - | declaration { - $$ = $1; - } - | SEMICOLON { - parseContext.requireProfile($1.loc, ~EEsProfile, "extraneous semicolon"); - parseContext.profileRequires($1.loc, ~EEsProfile, 460, nullptr, "extraneous semicolon"); - $$ = nullptr; - } - ; - -function_definition - : function_prototype { - $1.function = parseContext.handleFunctionDeclarator($1.loc, *$1.function, false /* not prototype */); - $1.intermNode = parseContext.handleFunctionDefinition($1.loc, *$1.function); - } - compound_statement_no_new_scope { - // May be best done as post process phase on intermediate code - if (parseContext.currentFunctionType->getBasicType() != EbtVoid && ! parseContext.functionReturnsValue) - parseContext.error($1.loc, "function does not return a value:", "", $1.function->getName().c_str()); - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - $$ = parseContext.intermediate.growAggregate($1.intermNode, $3); - parseContext.intermediate.setAggregateOperator($$, EOpFunction, $1.function->getType(), $1.loc); - $$->getAsAggregate()->setName($1.function->getMangledName().c_str()); - - // store the pragma information for debug and optimize and other vendor specific - // information. This information can be queried from the parse tree - $$->getAsAggregate()->setOptimize(parseContext.contextPragma.optimize); - $$->getAsAggregate()->setDebug(parseContext.contextPragma.debug); - $$->getAsAggregate()->addToPragmaTable(parseContext.contextPragma.pragmaTable); - } - ; - -%% diff --git a/deps/glslang/glslang/MachineIndependent/glslang_tab.cpp b/deps/glslang/glslang/MachineIndependent/glslang_tab.cpp deleted file mode 100644 index c6bae9ff..00000000 --- a/deps/glslang/glslang/MachineIndependent/glslang_tab.cpp +++ /dev/null @@ -1,8229 +0,0 @@ -/* A Bison parser, made by GNU Bison 3.0.4. */ - -/* Bison implementation for Yacc-like parsers in C - - Copyright (C) 1984, 1989-1990, 2000-2015 Free Software Foundation, Inc. - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . */ - -/* As a special exception, you may create a larger work that contains - part or all of the Bison parser skeleton and distribute that work - under terms of your choice, so long as that work isn't itself a - parser generator using the skeleton or a modified version thereof - as a parser skeleton. Alternatively, if you modify or redistribute - the parser skeleton itself, you may (at your option) remove this - special exception, which will cause the skeleton and the resulting - Bison output files to be licensed under the GNU General Public - License without this special exception. - - This special exception was added by the Free Software Foundation in - version 2.2 of Bison. */ - -/* C LALR(1) parser skeleton written by Richard Stallman, by - simplifying the original so-called "semantic" parser. */ - -/* All symbols defined below should begin with yy or YY, to avoid - infringing on user name space. This should be done even for local - variables, as they might otherwise be expanded by user macros. - There are some unavoidable exceptions within include files to - define necessary library symbols; they are noted "INFRINGES ON - USER NAME SPACE" below. */ - -/* Identify Bison output. */ -#define YYBISON 1 - -/* Bison version. */ -#define YYBISON_VERSION "3.0.4" - -/* Skeleton name. */ -#define YYSKELETON_NAME "yacc.c" - -/* Pure parsers. */ -#define YYPURE 1 - -/* Push parsers. */ -#define YYPUSH 0 - -/* Pull parsers. */ -#define YYPULL 1 - - - - -/* Copy the first part of user declarations. */ -#line 41 "MachineIndependent/glslang.y" /* yacc.c:339 */ - - -/* Based on: -ANSI C Yacc grammar - -In 1985, Jeff Lee published his Yacc grammar (which is accompanied by a -matching Lex specification) for the April 30, 1985 draft version of the -ANSI C standard. Tom Stockfisch reposted it to net.sources in 1987; that -original, as mentioned in the answer to question 17.25 of the comp.lang.c -FAQ, can be ftp'ed from ftp.uu.net, file usenet/net.sources/ansi.c.grammar.Z. - -I intend to keep this version as close to the current C Standard grammar as -possible; please let me know if you discover discrepancies. - -Jutta Degener, 1995 -*/ - -#include "SymbolTable.h" -#include "ParseHelper.h" -#include "../Public/ShaderLang.h" - -using namespace glslang; - - -#line 91 "MachineIndependent/glslang_tab.cpp" /* yacc.c:339 */ - -# ifndef YY_NULLPTR -# if defined __cplusplus && 201103L <= __cplusplus -# define YY_NULLPTR nullptr -# else -# define YY_NULLPTR 0 -# endif -# endif - -/* Enabling verbose error messages. */ -#ifdef YYERROR_VERBOSE -# undef YYERROR_VERBOSE -# define YYERROR_VERBOSE 1 -#else -# define YYERROR_VERBOSE 1 -#endif - -/* In a future release of Bison, this section will be replaced - by #include "glslang_tab.cpp.h". */ -#ifndef YY_YY_MACHINEINDEPENDENT_GLSLANG_TAB_CPP_H_INCLUDED -# define YY_YY_MACHINEINDEPENDENT_GLSLANG_TAB_CPP_H_INCLUDED -/* Debug traces. */ -#ifndef YYDEBUG -# define YYDEBUG 1 -#endif -#if YYDEBUG -extern int yydebug; -#endif - -/* Token type. */ -#ifndef YYTOKENTYPE -# define YYTOKENTYPE - enum yytokentype - { - ATTRIBUTE = 258, - VARYING = 259, - CONST = 260, - BOOL = 261, - FLOAT = 262, - DOUBLE = 263, - INT = 264, - UINT = 265, - INT64_T = 266, - UINT64_T = 267, - INT16_T = 268, - UINT16_T = 269, - FLOAT16_T = 270, - BREAK = 271, - CONTINUE = 272, - DO = 273, - ELSE = 274, - FOR = 275, - IF = 276, - DISCARD = 277, - RETURN = 278, - SWITCH = 279, - CASE = 280, - DEFAULT = 281, - SUBROUTINE = 282, - BVEC2 = 283, - BVEC3 = 284, - BVEC4 = 285, - IVEC2 = 286, - IVEC3 = 287, - IVEC4 = 288, - I64VEC2 = 289, - I64VEC3 = 290, - I64VEC4 = 291, - UVEC2 = 292, - UVEC3 = 293, - UVEC4 = 294, - U64VEC2 = 295, - U64VEC3 = 296, - U64VEC4 = 297, - VEC2 = 298, - VEC3 = 299, - VEC4 = 300, - MAT2 = 301, - MAT3 = 302, - MAT4 = 303, - CENTROID = 304, - IN = 305, - OUT = 306, - INOUT = 307, - UNIFORM = 308, - PATCH = 309, - SAMPLE = 310, - BUFFER = 311, - SHARED = 312, - COHERENT = 313, - VOLATILE = 314, - RESTRICT = 315, - READONLY = 316, - WRITEONLY = 317, - DVEC2 = 318, - DVEC3 = 319, - DVEC4 = 320, - DMAT2 = 321, - DMAT3 = 322, - DMAT4 = 323, - F16VEC2 = 324, - F16VEC3 = 325, - F16VEC4 = 326, - F16MAT2 = 327, - F16MAT3 = 328, - F16MAT4 = 329, - I16VEC2 = 330, - I16VEC3 = 331, - I16VEC4 = 332, - U16VEC2 = 333, - U16VEC3 = 334, - U16VEC4 = 335, - NOPERSPECTIVE = 336, - FLAT = 337, - SMOOTH = 338, - LAYOUT = 339, - __EXPLICITINTERPAMD = 340, - MAT2X2 = 341, - MAT2X3 = 342, - MAT2X4 = 343, - MAT3X2 = 344, - MAT3X3 = 345, - MAT3X4 = 346, - MAT4X2 = 347, - MAT4X3 = 348, - MAT4X4 = 349, - DMAT2X2 = 350, - DMAT2X3 = 351, - DMAT2X4 = 352, - DMAT3X2 = 353, - DMAT3X3 = 354, - DMAT3X4 = 355, - DMAT4X2 = 356, - DMAT4X3 = 357, - DMAT4X4 = 358, - F16MAT2X2 = 359, - F16MAT2X3 = 360, - F16MAT2X4 = 361, - F16MAT3X2 = 362, - F16MAT3X3 = 363, - F16MAT3X4 = 364, - F16MAT4X2 = 365, - F16MAT4X3 = 366, - F16MAT4X4 = 367, - ATOMIC_UINT = 368, - SAMPLER1D = 369, - SAMPLER2D = 370, - SAMPLER3D = 371, - SAMPLERCUBE = 372, - SAMPLER1DSHADOW = 373, - SAMPLER2DSHADOW = 374, - SAMPLERCUBESHADOW = 375, - SAMPLER1DARRAY = 376, - SAMPLER2DARRAY = 377, - SAMPLER1DARRAYSHADOW = 378, - SAMPLER2DARRAYSHADOW = 379, - ISAMPLER1D = 380, - ISAMPLER2D = 381, - ISAMPLER3D = 382, - ISAMPLERCUBE = 383, - ISAMPLER1DARRAY = 384, - ISAMPLER2DARRAY = 385, - USAMPLER1D = 386, - USAMPLER2D = 387, - USAMPLER3D = 388, - USAMPLERCUBE = 389, - USAMPLER1DARRAY = 390, - USAMPLER2DARRAY = 391, - SAMPLER2DRECT = 392, - SAMPLER2DRECTSHADOW = 393, - ISAMPLER2DRECT = 394, - USAMPLER2DRECT = 395, - SAMPLERBUFFER = 396, - ISAMPLERBUFFER = 397, - USAMPLERBUFFER = 398, - SAMPLERCUBEARRAY = 399, - SAMPLERCUBEARRAYSHADOW = 400, - ISAMPLERCUBEARRAY = 401, - USAMPLERCUBEARRAY = 402, - SAMPLER2DMS = 403, - ISAMPLER2DMS = 404, - USAMPLER2DMS = 405, - SAMPLER2DMSARRAY = 406, - ISAMPLER2DMSARRAY = 407, - USAMPLER2DMSARRAY = 408, - SAMPLEREXTERNALOES = 409, - SAMPLER = 410, - SAMPLERSHADOW = 411, - TEXTURE1D = 412, - TEXTURE2D = 413, - TEXTURE3D = 414, - TEXTURECUBE = 415, - TEXTURE1DARRAY = 416, - TEXTURE2DARRAY = 417, - ITEXTURE1D = 418, - ITEXTURE2D = 419, - ITEXTURE3D = 420, - ITEXTURECUBE = 421, - ITEXTURE1DARRAY = 422, - ITEXTURE2DARRAY = 423, - UTEXTURE1D = 424, - UTEXTURE2D = 425, - UTEXTURE3D = 426, - UTEXTURECUBE = 427, - UTEXTURE1DARRAY = 428, - UTEXTURE2DARRAY = 429, - TEXTURE2DRECT = 430, - ITEXTURE2DRECT = 431, - UTEXTURE2DRECT = 432, - TEXTUREBUFFER = 433, - ITEXTUREBUFFER = 434, - UTEXTUREBUFFER = 435, - TEXTURECUBEARRAY = 436, - ITEXTURECUBEARRAY = 437, - UTEXTURECUBEARRAY = 438, - TEXTURE2DMS = 439, - ITEXTURE2DMS = 440, - UTEXTURE2DMS = 441, - TEXTURE2DMSARRAY = 442, - ITEXTURE2DMSARRAY = 443, - UTEXTURE2DMSARRAY = 444, - SUBPASSINPUT = 445, - SUBPASSINPUTMS = 446, - ISUBPASSINPUT = 447, - ISUBPASSINPUTMS = 448, - USUBPASSINPUT = 449, - USUBPASSINPUTMS = 450, - IMAGE1D = 451, - IIMAGE1D = 452, - UIMAGE1D = 453, - IMAGE2D = 454, - IIMAGE2D = 455, - UIMAGE2D = 456, - IMAGE3D = 457, - IIMAGE3D = 458, - UIMAGE3D = 459, - IMAGE2DRECT = 460, - IIMAGE2DRECT = 461, - UIMAGE2DRECT = 462, - IMAGECUBE = 463, - IIMAGECUBE = 464, - UIMAGECUBE = 465, - IMAGEBUFFER = 466, - IIMAGEBUFFER = 467, - UIMAGEBUFFER = 468, - IMAGE1DARRAY = 469, - IIMAGE1DARRAY = 470, - UIMAGE1DARRAY = 471, - IMAGE2DARRAY = 472, - IIMAGE2DARRAY = 473, - UIMAGE2DARRAY = 474, - IMAGECUBEARRAY = 475, - IIMAGECUBEARRAY = 476, - UIMAGECUBEARRAY = 477, - IMAGE2DMS = 478, - IIMAGE2DMS = 479, - UIMAGE2DMS = 480, - IMAGE2DMSARRAY = 481, - IIMAGE2DMSARRAY = 482, - UIMAGE2DMSARRAY = 483, - STRUCT = 484, - VOID = 485, - WHILE = 486, - IDENTIFIER = 487, - TYPE_NAME = 488, - FLOATCONSTANT = 489, - DOUBLECONSTANT = 490, - INTCONSTANT = 491, - UINTCONSTANT = 492, - INT64CONSTANT = 493, - UINT64CONSTANT = 494, - INT16CONSTANT = 495, - UINT16CONSTANT = 496, - BOOLCONSTANT = 497, - FLOAT16CONSTANT = 498, - LEFT_OP = 499, - RIGHT_OP = 500, - INC_OP = 501, - DEC_OP = 502, - LE_OP = 503, - GE_OP = 504, - EQ_OP = 505, - NE_OP = 506, - AND_OP = 507, - OR_OP = 508, - XOR_OP = 509, - MUL_ASSIGN = 510, - DIV_ASSIGN = 511, - ADD_ASSIGN = 512, - MOD_ASSIGN = 513, - LEFT_ASSIGN = 514, - RIGHT_ASSIGN = 515, - AND_ASSIGN = 516, - XOR_ASSIGN = 517, - OR_ASSIGN = 518, - SUB_ASSIGN = 519, - LEFT_PAREN = 520, - RIGHT_PAREN = 521, - LEFT_BRACKET = 522, - RIGHT_BRACKET = 523, - LEFT_BRACE = 524, - RIGHT_BRACE = 525, - DOT = 526, - COMMA = 527, - COLON = 528, - EQUAL = 529, - SEMICOLON = 530, - BANG = 531, - DASH = 532, - TILDE = 533, - PLUS = 534, - STAR = 535, - SLASH = 536, - PERCENT = 537, - LEFT_ANGLE = 538, - RIGHT_ANGLE = 539, - VERTICAL_BAR = 540, - CARET = 541, - AMPERSAND = 542, - QUESTION = 543, - INVARIANT = 544, - PRECISE = 545, - HIGH_PRECISION = 546, - MEDIUM_PRECISION = 547, - LOW_PRECISION = 548, - PRECISION = 549, - PACKED = 550, - RESOURCE = 551, - SUPERP = 552 - }; -#endif - -/* Value type. */ -#if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED - -union YYSTYPE -{ -#line 68 "MachineIndependent/glslang.y" /* yacc.c:355 */ - - struct { - glslang::TSourceLoc loc; - union { - glslang::TString *string; - int i; - unsigned int u; - long long i64; - unsigned long long u64; - bool b; - double d; - }; - glslang::TSymbol* symbol; - } lex; - struct { - glslang::TSourceLoc loc; - glslang::TOperator op; - union { - TIntermNode* intermNode; - glslang::TIntermNodePair nodePair; - glslang::TIntermTyped* intermTypedNode; - }; - union { - glslang::TPublicType type; - glslang::TFunction* function; - glslang::TParameter param; - glslang::TTypeLoc typeLine; - glslang::TTypeList* typeList; - glslang::TArraySizes* arraySizes; - glslang::TIdentifierList* identifierList; - }; - } interm; - -#line 463 "MachineIndependent/glslang_tab.cpp" /* yacc.c:355 */ -}; - -typedef union YYSTYPE YYSTYPE; -# define YYSTYPE_IS_TRIVIAL 1 -# define YYSTYPE_IS_DECLARED 1 -#endif - - - -int yyparse (glslang::TParseContext* pParseContext); - -#endif /* !YY_YY_MACHINEINDEPENDENT_GLSLANG_TAB_CPP_H_INCLUDED */ - -/* Copy the second part of user declarations. */ -#line 102 "MachineIndependent/glslang.y" /* yacc.c:358 */ - - -/* windows only pragma */ -#ifdef _MSC_VER - #pragma warning(disable : 4065) - #pragma warning(disable : 4127) - #pragma warning(disable : 4244) -#endif - -#define parseContext (*pParseContext) -#define yyerror(context, msg) context->parserError(msg) - -extern int yylex(YYSTYPE*, TParseContext&); - - -#line 494 "MachineIndependent/glslang_tab.cpp" /* yacc.c:358 */ - -#ifdef short -# undef short -#endif - -#ifdef YYTYPE_UINT8 -typedef YYTYPE_UINT8 yytype_uint8; -#else -typedef unsigned char yytype_uint8; -#endif - -#ifdef YYTYPE_INT8 -typedef YYTYPE_INT8 yytype_int8; -#else -typedef signed char yytype_int8; -#endif - -#ifdef YYTYPE_UINT16 -typedef YYTYPE_UINT16 yytype_uint16; -#else -typedef unsigned short int yytype_uint16; -#endif - -#ifdef YYTYPE_INT16 -typedef YYTYPE_INT16 yytype_int16; -#else -typedef short int yytype_int16; -#endif - -#ifndef YYSIZE_T -# ifdef __SIZE_TYPE__ -# define YYSIZE_T __SIZE_TYPE__ -# elif defined size_t -# define YYSIZE_T size_t -# elif ! defined YYSIZE_T -# include /* INFRINGES ON USER NAME SPACE */ -# define YYSIZE_T size_t -# else -# define YYSIZE_T unsigned int -# endif -#endif - -#define YYSIZE_MAXIMUM ((YYSIZE_T) -1) - -#ifndef YY_ -# if defined YYENABLE_NLS && YYENABLE_NLS -# if ENABLE_NLS -# include /* INFRINGES ON USER NAME SPACE */ -# define YY_(Msgid) dgettext ("bison-runtime", Msgid) -# endif -# endif -# ifndef YY_ -# define YY_(Msgid) Msgid -# endif -#endif - -#ifndef YY_ATTRIBUTE -# if (defined __GNUC__ \ - && (2 < __GNUC__ || (__GNUC__ == 2 && 96 <= __GNUC_MINOR__))) \ - || defined __SUNPRO_C && 0x5110 <= __SUNPRO_C -# define YY_ATTRIBUTE(Spec) __attribute__(Spec) -# else -# define YY_ATTRIBUTE(Spec) /* empty */ -# endif -#endif - -#ifndef YY_ATTRIBUTE_PURE -# define YY_ATTRIBUTE_PURE YY_ATTRIBUTE ((__pure__)) -#endif - -#ifndef YY_ATTRIBUTE_UNUSED -# define YY_ATTRIBUTE_UNUSED YY_ATTRIBUTE ((__unused__)) -#endif - -#if !defined _Noreturn \ - && (!defined __STDC_VERSION__ || __STDC_VERSION__ < 201112) -# if defined _MSC_VER && 1200 <= _MSC_VER -# define _Noreturn __declspec (noreturn) -# else -# define _Noreturn YY_ATTRIBUTE ((__noreturn__)) -# endif -#endif - -/* Suppress unused-variable warnings by "using" E. */ -#if ! defined lint || defined __GNUC__ -# define YYUSE(E) ((void) (E)) -#else -# define YYUSE(E) /* empty */ -#endif - -#if defined __GNUC__ && 407 <= __GNUC__ * 100 + __GNUC_MINOR__ -/* Suppress an incorrect diagnostic about yylval being uninitialized. */ -# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN \ - _Pragma ("GCC diagnostic push") \ - _Pragma ("GCC diagnostic ignored \"-Wuninitialized\"")\ - _Pragma ("GCC diagnostic ignored \"-Wmaybe-uninitialized\"") -# define YY_IGNORE_MAYBE_UNINITIALIZED_END \ - _Pragma ("GCC diagnostic pop") -#else -# define YY_INITIAL_VALUE(Value) Value -#endif -#ifndef YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN -# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN -# define YY_IGNORE_MAYBE_UNINITIALIZED_END -#endif -#ifndef YY_INITIAL_VALUE -# define YY_INITIAL_VALUE(Value) /* Nothing. */ -#endif - - -#if ! defined yyoverflow || YYERROR_VERBOSE - -/* The parser invokes alloca or malloc; define the necessary symbols. */ - -# ifdef YYSTACK_USE_ALLOCA -# if YYSTACK_USE_ALLOCA -# ifdef __GNUC__ -# define YYSTACK_ALLOC __builtin_alloca -# elif defined __BUILTIN_VA_ARG_INCR -# include /* INFRINGES ON USER NAME SPACE */ -# elif defined _AIX -# define YYSTACK_ALLOC __alloca -# elif defined _MSC_VER -# include /* INFRINGES ON USER NAME SPACE */ -# define alloca _alloca -# else -# define YYSTACK_ALLOC alloca -# if ! defined _ALLOCA_H && ! defined EXIT_SUCCESS -# include /* INFRINGES ON USER NAME SPACE */ - /* Use EXIT_SUCCESS as a witness for stdlib.h. */ -# ifndef EXIT_SUCCESS -# define EXIT_SUCCESS 0 -# endif -# endif -# endif -# endif -# endif - -# ifdef YYSTACK_ALLOC - /* Pacify GCC's 'empty if-body' warning. */ -# define YYSTACK_FREE(Ptr) do { /* empty */; } while (0) -# ifndef YYSTACK_ALLOC_MAXIMUM - /* The OS might guarantee only one guard page at the bottom of the stack, - and a page size can be as small as 4096 bytes. So we cannot safely - invoke alloca (N) if N exceeds 4096. Use a slightly smaller number - to allow for a few compiler-allocated temporary stack slots. */ -# define YYSTACK_ALLOC_MAXIMUM 4032 /* reasonable circa 2006 */ -# endif -# else -# define YYSTACK_ALLOC YYMALLOC -# define YYSTACK_FREE YYFREE -# ifndef YYSTACK_ALLOC_MAXIMUM -# define YYSTACK_ALLOC_MAXIMUM YYSIZE_MAXIMUM -# endif -# if (defined __cplusplus && ! defined EXIT_SUCCESS \ - && ! ((defined YYMALLOC || defined malloc) \ - && (defined YYFREE || defined free))) -# include /* INFRINGES ON USER NAME SPACE */ -# ifndef EXIT_SUCCESS -# define EXIT_SUCCESS 0 -# endif -# endif -# ifndef YYMALLOC -# define YYMALLOC malloc -# if ! defined malloc && ! defined EXIT_SUCCESS -void *malloc (YYSIZE_T); /* INFRINGES ON USER NAME SPACE */ -# endif -# endif -# ifndef YYFREE -# define YYFREE free -# if ! defined free && ! defined EXIT_SUCCESS -void free (void *); /* INFRINGES ON USER NAME SPACE */ -# endif -# endif -# endif -#endif /* ! defined yyoverflow || YYERROR_VERBOSE */ - - -#if (! defined yyoverflow \ - && (! defined __cplusplus \ - || (defined YYSTYPE_IS_TRIVIAL && YYSTYPE_IS_TRIVIAL))) - -/* A type that is properly aligned for any stack member. */ -union yyalloc -{ - yytype_int16 yyss_alloc; - YYSTYPE yyvs_alloc; -}; - -/* The size of the maximum gap between one aligned stack and the next. */ -# define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1) - -/* The size of an array large to enough to hold all stacks, each with - N elements. */ -# define YYSTACK_BYTES(N) \ - ((N) * (sizeof (yytype_int16) + sizeof (YYSTYPE)) \ - + YYSTACK_GAP_MAXIMUM) - -# define YYCOPY_NEEDED 1 - -/* Relocate STACK from its old location to the new one. The - local variables YYSIZE and YYSTACKSIZE give the old and new number of - elements in the stack, and YYPTR gives the new location of the - stack. Advance YYPTR to a properly aligned location for the next - stack. */ -# define YYSTACK_RELOCATE(Stack_alloc, Stack) \ - do \ - { \ - YYSIZE_T yynewbytes; \ - YYCOPY (&yyptr->Stack_alloc, Stack, yysize); \ - Stack = &yyptr->Stack_alloc; \ - yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \ - yyptr += yynewbytes / sizeof (*yyptr); \ - } \ - while (0) - -#endif - -#if defined YYCOPY_NEEDED && YYCOPY_NEEDED -/* Copy COUNT objects from SRC to DST. The source and destination do - not overlap. */ -# ifndef YYCOPY -# if defined __GNUC__ && 1 < __GNUC__ -# define YYCOPY(Dst, Src, Count) \ - __builtin_memcpy (Dst, Src, (Count) * sizeof (*(Src))) -# else -# define YYCOPY(Dst, Src, Count) \ - do \ - { \ - YYSIZE_T yyi; \ - for (yyi = 0; yyi < (Count); yyi++) \ - (Dst)[yyi] = (Src)[yyi]; \ - } \ - while (0) -# endif -# endif -#endif /* !YYCOPY_NEEDED */ - -/* YYFINAL -- State number of the termination state. */ -#define YYFINAL 274 -/* YYLAST -- Last index in YYTABLE. */ -#define YYLAST 6614 - -/* YYNTOKENS -- Number of terminals. */ -#define YYNTOKENS 298 -/* YYNNTS -- Number of nonterminals. */ -#define YYNNTS 100 -/* YYNRULES -- Number of rules. */ -#define YYNRULES 450 -/* YYNSTATES -- Number of states. */ -#define YYNSTATES 582 - -/* YYTRANSLATE[YYX] -- Symbol number corresponding to YYX as returned - by yylex, with out-of-bounds checking. */ -#define YYUNDEFTOK 2 -#define YYMAXUTOK 552 - -#define YYTRANSLATE(YYX) \ - ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK) - -/* YYTRANSLATE[TOKEN-NUM] -- Symbol number corresponding to TOKEN-NUM - as returned by yylex, without out-of-bounds checking. */ -static const yytype_uint16 yytranslate[] = -{ - 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 1, 2, 3, 4, - 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, - 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, - 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, - 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, - 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, - 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, - 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, - 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, - 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, - 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, - 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, - 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, - 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, - 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, - 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, - 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, - 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, - 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, - 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, - 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, - 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, - 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, - 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, - 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, - 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, - 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, - 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, - 295, 296, 297 -}; - -#if YYDEBUG - /* YYRLINE[YYN] -- Source line where rule number YYN was defined. */ -static const yytype_uint16 yyrline[] = -{ - 0, 253, 253, 259, 262, 265, 269, 273, 277, 283, - 289, 292, 296, 302, 305, 313, 316, 319, 322, 325, - 330, 338, 345, 352, 358, 362, 369, 372, 378, 385, - 395, 403, 408, 438, 444, 448, 452, 472, 473, 474, - 475, 481, 482, 487, 492, 501, 502, 507, 515, 516, - 522, 531, 532, 537, 542, 547, 555, 556, 564, 575, - 576, 585, 586, 595, 596, 605, 606, 614, 615, 623, - 624, 632, 633, 633, 651, 652, 667, 671, 675, 679, - 684, 688, 692, 696, 700, 704, 708, 715, 718, 729, - 736, 741, 746, 754, 758, 762, 766, 771, 776, 785, - 785, 796, 800, 807, 814, 817, 824, 832, 852, 870, - 885, 908, 919, 929, 939, 949, 958, 961, 965, 969, - 974, 982, 987, 992, 997, 1002, 1011, 1022, 1049, 1058, - 1065, 1072, 1079, 1091, 1097, 1100, 1107, 1111, 1115, 1123, - 1132, 1135, 1146, 1149, 1152, 1156, 1160, 1164, 1171, 1175, - 1187, 1201, 1206, 1212, 1218, 1225, 1231, 1236, 1241, 1246, - 1254, 1258, 1262, 1266, 1270, 1274, 1280, 1289, 1292, 1300, - 1304, 1313, 1318, 1326, 1330, 1340, 1344, 1348, 1353, 1360, - 1364, 1369, 1374, 1379, 1386, 1393, 1397, 1402, 1407, 1412, - 1418, 1424, 1430, 1438, 1446, 1454, 1459, 1464, 1469, 1474, - 1479, 1484, 1490, 1496, 1502, 1510, 1518, 1526, 1532, 1538, - 1544, 1550, 1556, 1562, 1570, 1578, 1586, 1591, 1596, 1601, - 1606, 1611, 1616, 1621, 1626, 1631, 1636, 1641, 1646, 1652, - 1658, 1664, 1670, 1676, 1682, 1688, 1694, 1700, 1706, 1712, - 1718, 1726, 1734, 1742, 1750, 1758, 1766, 1774, 1782, 1790, - 1798, 1806, 1814, 1819, 1824, 1829, 1834, 1839, 1844, 1849, - 1854, 1859, 1864, 1869, 1874, 1879, 1884, 1889, 1894, 1899, - 1904, 1909, 1914, 1919, 1924, 1929, 1934, 1939, 1944, 1949, - 1954, 1959, 1964, 1969, 1974, 1979, 1984, 1989, 1994, 1999, - 2004, 2009, 2014, 2019, 2024, 2029, 2034, 2039, 2044, 2049, - 2054, 2059, 2064, 2069, 2074, 2079, 2084, 2089, 2094, 2099, - 2104, 2109, 2114, 2119, 2124, 2129, 2134, 2139, 2144, 2149, - 2154, 2159, 2164, 2169, 2174, 2179, 2184, 2189, 2194, 2199, - 2204, 2209, 2214, 2219, 2224, 2229, 2234, 2239, 2244, 2249, - 2254, 2259, 2264, 2269, 2274, 2279, 2284, 2289, 2294, 2299, - 2304, 2309, 2314, 2319, 2324, 2329, 2334, 2339, 2344, 2349, - 2354, 2359, 2365, 2371, 2377, 2383, 2389, 2395, 2401, 2406, - 2422, 2427, 2432, 2440, 2440, 2451, 2451, 2461, 2464, 2477, - 2495, 2519, 2523, 2529, 2534, 2545, 2548, 2554, 2563, 2566, - 2572, 2576, 2577, 2583, 2584, 2585, 2586, 2587, 2588, 2589, - 2593, 2594, 2598, 2594, 2610, 2611, 2615, 2615, 2622, 2622, - 2636, 2639, 2647, 2655, 2666, 2667, 2671, 2678, 2682, 2690, - 2694, 2707, 2707, 2727, 2730, 2736, 2748, 2760, 2760, 2775, - 2775, 2791, 2791, 2812, 2815, 2821, 2824, 2830, 2834, 2841, - 2846, 2851, 2858, 2861, 2870, 2874, 2883, 2886, 2889, 2897, - 2897 -}; -#endif - -#if YYDEBUG || YYERROR_VERBOSE || 1 -/* YYTNAME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM. - First, the terminals, then, starting at YYNTOKENS, nonterminals. */ -static const char *const yytname[] = -{ - "$end", "error", "$undefined", "ATTRIBUTE", "VARYING", "CONST", "BOOL", - "FLOAT", "DOUBLE", "INT", "UINT", "INT64_T", "UINT64_T", "INT16_T", - "UINT16_T", "FLOAT16_T", "BREAK", "CONTINUE", "DO", "ELSE", "FOR", "IF", - "DISCARD", "RETURN", "SWITCH", "CASE", "DEFAULT", "SUBROUTINE", "BVEC2", - "BVEC3", "BVEC4", "IVEC2", "IVEC3", "IVEC4", "I64VEC2", "I64VEC3", - "I64VEC4", "UVEC2", "UVEC3", "UVEC4", "U64VEC2", "U64VEC3", "U64VEC4", - "VEC2", "VEC3", "VEC4", "MAT2", "MAT3", "MAT4", "CENTROID", "IN", "OUT", - "INOUT", "UNIFORM", "PATCH", "SAMPLE", "BUFFER", "SHARED", "COHERENT", - "VOLATILE", "RESTRICT", "READONLY", "WRITEONLY", "DVEC2", "DVEC3", - "DVEC4", "DMAT2", "DMAT3", "DMAT4", "F16VEC2", "F16VEC3", "F16VEC4", - "F16MAT2", "F16MAT3", "F16MAT4", "I16VEC2", "I16VEC3", "I16VEC4", - "U16VEC2", "U16VEC3", "U16VEC4", "NOPERSPECTIVE", "FLAT", "SMOOTH", - "LAYOUT", "__EXPLICITINTERPAMD", "MAT2X2", "MAT2X3", "MAT2X4", "MAT3X2", - "MAT3X3", "MAT3X4", "MAT4X2", "MAT4X3", "MAT4X4", "DMAT2X2", "DMAT2X3", - "DMAT2X4", "DMAT3X2", "DMAT3X3", "DMAT3X4", "DMAT4X2", "DMAT4X3", - "DMAT4X4", "F16MAT2X2", "F16MAT2X3", "F16MAT2X4", "F16MAT3X2", - "F16MAT3X3", "F16MAT3X4", "F16MAT4X2", "F16MAT4X3", "F16MAT4X4", - "ATOMIC_UINT", "SAMPLER1D", "SAMPLER2D", "SAMPLER3D", "SAMPLERCUBE", - "SAMPLER1DSHADOW", "SAMPLER2DSHADOW", "SAMPLERCUBESHADOW", - "SAMPLER1DARRAY", "SAMPLER2DARRAY", "SAMPLER1DARRAYSHADOW", - "SAMPLER2DARRAYSHADOW", "ISAMPLER1D", "ISAMPLER2D", "ISAMPLER3D", - "ISAMPLERCUBE", "ISAMPLER1DARRAY", "ISAMPLER2DARRAY", "USAMPLER1D", - "USAMPLER2D", "USAMPLER3D", "USAMPLERCUBE", "USAMPLER1DARRAY", - "USAMPLER2DARRAY", "SAMPLER2DRECT", "SAMPLER2DRECTSHADOW", - "ISAMPLER2DRECT", "USAMPLER2DRECT", "SAMPLERBUFFER", "ISAMPLERBUFFER", - "USAMPLERBUFFER", "SAMPLERCUBEARRAY", "SAMPLERCUBEARRAYSHADOW", - "ISAMPLERCUBEARRAY", "USAMPLERCUBEARRAY", "SAMPLER2DMS", "ISAMPLER2DMS", - "USAMPLER2DMS", "SAMPLER2DMSARRAY", "ISAMPLER2DMSARRAY", - "USAMPLER2DMSARRAY", "SAMPLEREXTERNALOES", "SAMPLER", "SAMPLERSHADOW", - "TEXTURE1D", "TEXTURE2D", "TEXTURE3D", "TEXTURECUBE", "TEXTURE1DARRAY", - "TEXTURE2DARRAY", "ITEXTURE1D", "ITEXTURE2D", "ITEXTURE3D", - "ITEXTURECUBE", "ITEXTURE1DARRAY", "ITEXTURE2DARRAY", "UTEXTURE1D", - "UTEXTURE2D", "UTEXTURE3D", "UTEXTURECUBE", "UTEXTURE1DARRAY", - "UTEXTURE2DARRAY", "TEXTURE2DRECT", "ITEXTURE2DRECT", "UTEXTURE2DRECT", - "TEXTUREBUFFER", "ITEXTUREBUFFER", "UTEXTUREBUFFER", "TEXTURECUBEARRAY", - "ITEXTURECUBEARRAY", "UTEXTURECUBEARRAY", "TEXTURE2DMS", "ITEXTURE2DMS", - "UTEXTURE2DMS", "TEXTURE2DMSARRAY", "ITEXTURE2DMSARRAY", - "UTEXTURE2DMSARRAY", "SUBPASSINPUT", "SUBPASSINPUTMS", "ISUBPASSINPUT", - "ISUBPASSINPUTMS", "USUBPASSINPUT", "USUBPASSINPUTMS", "IMAGE1D", - "IIMAGE1D", "UIMAGE1D", "IMAGE2D", "IIMAGE2D", "UIMAGE2D", "IMAGE3D", - "IIMAGE3D", "UIMAGE3D", "IMAGE2DRECT", "IIMAGE2DRECT", "UIMAGE2DRECT", - "IMAGECUBE", "IIMAGECUBE", "UIMAGECUBE", "IMAGEBUFFER", "IIMAGEBUFFER", - "UIMAGEBUFFER", "IMAGE1DARRAY", "IIMAGE1DARRAY", "UIMAGE1DARRAY", - "IMAGE2DARRAY", "IIMAGE2DARRAY", "UIMAGE2DARRAY", "IMAGECUBEARRAY", - "IIMAGECUBEARRAY", "UIMAGECUBEARRAY", "IMAGE2DMS", "IIMAGE2DMS", - "UIMAGE2DMS", "IMAGE2DMSARRAY", "IIMAGE2DMSARRAY", "UIMAGE2DMSARRAY", - "STRUCT", "VOID", "WHILE", "IDENTIFIER", "TYPE_NAME", "FLOATCONSTANT", - "DOUBLECONSTANT", "INTCONSTANT", "UINTCONSTANT", "INT64CONSTANT", - "UINT64CONSTANT", "INT16CONSTANT", "UINT16CONSTANT", "BOOLCONSTANT", - "FLOAT16CONSTANT", "LEFT_OP", "RIGHT_OP", "INC_OP", "DEC_OP", "LE_OP", - "GE_OP", "EQ_OP", "NE_OP", "AND_OP", "OR_OP", "XOR_OP", "MUL_ASSIGN", - "DIV_ASSIGN", "ADD_ASSIGN", "MOD_ASSIGN", "LEFT_ASSIGN", "RIGHT_ASSIGN", - "AND_ASSIGN", "XOR_ASSIGN", "OR_ASSIGN", "SUB_ASSIGN", "LEFT_PAREN", - "RIGHT_PAREN", "LEFT_BRACKET", "RIGHT_BRACKET", "LEFT_BRACE", - "RIGHT_BRACE", "DOT", "COMMA", "COLON", "EQUAL", "SEMICOLON", "BANG", - "DASH", "TILDE", "PLUS", "STAR", "SLASH", "PERCENT", "LEFT_ANGLE", - "RIGHT_ANGLE", "VERTICAL_BAR", "CARET", "AMPERSAND", "QUESTION", - "INVARIANT", "PRECISE", "HIGH_PRECISION", "MEDIUM_PRECISION", - "LOW_PRECISION", "PRECISION", "PACKED", "RESOURCE", "SUPERP", "$accept", - "variable_identifier", "primary_expression", "postfix_expression", - "integer_expression", "function_call", "function_call_or_method", - "function_call_generic", "function_call_header_no_parameters", - "function_call_header_with_parameters", "function_call_header", - "function_identifier", "unary_expression", "unary_operator", - "multiplicative_expression", "additive_expression", "shift_expression", - "relational_expression", "equality_expression", "and_expression", - "exclusive_or_expression", "inclusive_or_expression", - "logical_and_expression", "logical_xor_expression", - "logical_or_expression", "conditional_expression", "$@1", - "assignment_expression", "assignment_operator", "expression", - "constant_expression", "declaration", "block_structure", "$@2", - "identifier_list", "function_prototype", "function_declarator", - "function_header_with_parameters", "function_header", - "parameter_declarator", "parameter_declaration", - "parameter_type_specifier", "init_declarator_list", "single_declaration", - "fully_specified_type", "invariant_qualifier", "interpolation_qualifier", - "layout_qualifier", "layout_qualifier_id_list", "layout_qualifier_id", - "precise_qualifier", "type_qualifier", "single_type_qualifier", - "storage_qualifier", "type_name_list", "type_specifier", - "array_specifier", "type_specifier_nonarray", "precision_qualifier", - "struct_specifier", "$@3", "$@4", "struct_declaration_list", - "struct_declaration", "struct_declarator_list", "struct_declarator", - "initializer", "initializer_list", "declaration_statement", "statement", - "simple_statement", "compound_statement", "$@5", "$@6", - "statement_no_new_scope", "statement_scoped", "$@7", "$@8", - "compound_statement_no_new_scope", "statement_list", - "expression_statement", "selection_statement", - "selection_rest_statement", "condition", "switch_statement", "$@9", - "switch_statement_list", "case_label", "iteration_statement", "$@10", - "$@11", "$@12", "for_init_statement", "conditionopt", - "for_rest_statement", "jump_statement", "translation_unit", - "external_declaration", "function_definition", "$@13", YY_NULLPTR -}; -#endif - -# ifdef YYPRINT -/* YYTOKNUM[NUM] -- (External) token number corresponding to the - (internal) symbol number NUM (which must be that of a token). */ -static const yytype_uint16 yytoknum[] = -{ - 0, 256, 257, 258, 259, 260, 261, 262, 263, 264, - 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, - 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, - 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, - 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, - 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, - 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, - 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, - 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, - 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, - 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, - 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, - 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, - 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, - 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, - 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, - 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, - 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, - 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, - 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, - 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, - 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, - 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, - 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, - 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, - 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, - 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, - 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, - 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, - 545, 546, 547, 548, 549, 550, 551, 552 -}; -# endif - -#define YYPACT_NINF -525 - -#define yypact_value_is_default(Yystate) \ - (!!((Yystate) == (-525))) - -#define YYTABLE_NINF -407 - -#define yytable_value_is_error(Yytable_value) \ - 0 - - /* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing - STATE-NUM. */ -static const yytype_int16 yypact[] = -{ - 2619, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -243, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -228, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -215, -525, -525, -525, - -525, -525, -525, -525, -525, -157, -525, -216, -218, -205, - -141, 4260, -165, -525, -94, -525, -525, -525, -525, 3183, - -525, -525, -525, -117, -525, -525, 575, -525, -525, -80, - -48, -114, -525, 6381, -242, -525, -525, -113, -525, 4260, - -525, -525, -525, 4260, -75, -74, -525, -235, -190, -525, - -525, -525, 4765, -108, -525, -525, -525, -186, -525, -112, - -178, -525, -525, 4260, -115, -525, -226, 867, -525, -525, - -525, -525, -117, -229, -525, 5039, -224, -525, -71, -525, - -158, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, 5861, 5861, 5861, -525, -525, -525, -525, -525, - -525, -525, -223, -525, -525, -525, -102, -177, 6121, -100, - -525, 5861, -204, -171, -132, -221, -199, -124, -120, -111, - -84, -83, -233, -98, -525, 5313, -525, -60, 5861, -525, - -48, 4260, 4260, -59, 3456, -525, -525, -525, -99, -97, - -525, -90, -88, -96, 5587, -85, 5861, -92, -79, -81, - -525, -525, -191, -525, -525, -153, -525, -218, -78, -525, - -525, -525, -525, 1159, -525, -525, -525, -525, -525, -525, - -108, 5039, -193, 5039, -525, -525, 5039, 4260, -525, -47, - -525, -525, -525, -176, -525, -525, 5861, -42, -525, -525, - 5861, -73, -525, -525, -525, 5861, 5861, 5861, 5861, 5861, - 5861, 5861, 5861, 5861, 5861, 5861, 5861, 5861, 5861, 5861, - 5861, 5861, 5861, 5861, -525, -525, -525, -76, -525, -525, - -525, -525, 3724, -59, -117, -152, -525, -525, -525, -525, - -525, 1451, -525, 5861, -525, -525, -143, 5861, -180, -525, - -525, -525, 1451, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, 5861, 5861, -525, -525, -525, -525, - 5039, -525, -133, -525, 3992, -525, -525, -72, -77, -525, - -525, -525, -525, -525, -204, -204, -171, -171, -132, -132, - -132, -132, -221, -221, -199, -124, -120, -111, -84, -83, - 5861, -525, -525, -142, -108, -59, -525, -37, 2327, -175, - -525, -163, -525, 2892, 1451, -525, -525, -525, -525, 4491, - -525, -525, -129, -525, -525, -68, -525, -525, 2892, -70, - -525, -77, -32, 4260, -63, -66, -525, -525, 5861, 5861, - -525, -69, -61, 188, -58, 2035, -525, -56, -57, 1743, - -525, -525, -161, 5861, 1743, -70, -525, -525, 1451, 5039, - -525, -525, -525, -67, -77, -525, -525, 1451, -54, -525, - -525, -525 -}; - - /* YYDEFACT[STATE-NUM] -- Default reduction number in state STATE-NUM. - Performed when YYTABLE does not specify something else to do. Zero - means the default is an error. */ -static const yytype_uint16 yydefact[] = -{ - 0, 149, 150, 148, 185, 176, 177, 179, 180, 181, - 182, 183, 184, 178, 165, 195, 196, 197, 198, 199, - 200, 201, 202, 203, 207, 208, 209, 210, 211, 212, - 186, 187, 188, 216, 217, 218, 154, 152, 153, 151, - 157, 155, 156, 158, 159, 160, 161, 162, 163, 164, - 189, 190, 191, 228, 229, 230, 192, 193, 194, 240, - 241, 242, 204, 205, 206, 213, 214, 215, 131, 130, - 129, 0, 132, 219, 220, 221, 222, 223, 224, 225, - 226, 227, 231, 232, 233, 234, 235, 236, 237, 238, - 239, 243, 244, 245, 246, 247, 248, 249, 250, 251, - 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, - 262, 263, 266, 267, 268, 269, 270, 271, 273, 274, - 275, 276, 277, 278, 280, 281, 282, 283, 284, 285, - 286, 264, 265, 272, 279, 287, 288, 289, 290, 291, - 292, 361, 293, 294, 295, 296, 297, 298, 299, 300, - 302, 303, 304, 305, 306, 307, 309, 310, 311, 312, - 313, 314, 316, 317, 318, 319, 320, 321, 301, 308, - 315, 322, 323, 324, 325, 326, 327, 362, 363, 364, - 365, 366, 367, 328, 329, 330, 331, 332, 333, 334, - 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, - 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, - 355, 356, 357, 358, 359, 360, 0, 175, 369, 448, - 128, 139, 370, 371, 372, 0, 447, 0, 449, 0, - 105, 104, 0, 116, 121, 146, 145, 143, 147, 0, - 140, 142, 126, 169, 144, 368, 0, 444, 446, 0, - 0, 0, 375, 0, 0, 93, 90, 0, 103, 0, - 112, 106, 114, 0, 115, 0, 91, 122, 0, 96, - 141, 127, 0, 170, 1, 445, 167, 0, 138, 136, - 0, 134, 373, 0, 0, 94, 0, 0, 450, 107, - 111, 113, 109, 117, 108, 0, 123, 99, 0, 97, - 0, 2, 10, 11, 4, 5, 6, 7, 8, 9, - 13, 12, 0, 0, 0, 171, 39, 38, 40, 37, - 3, 15, 33, 17, 22, 23, 0, 0, 27, 0, - 41, 0, 45, 48, 51, 56, 59, 61, 63, 65, - 67, 69, 71, 0, 31, 0, 166, 0, 0, 133, - 0, 0, 0, 0, 0, 377, 92, 95, 0, 0, - 429, 0, 0, 0, 0, 0, 0, 0, 0, 401, - 410, 414, 41, 74, 87, 0, 390, 0, 126, 393, - 412, 392, 391, 0, 394, 395, 396, 397, 398, 399, - 110, 0, 118, 0, 385, 125, 0, 0, 101, 0, - 98, 34, 35, 0, 19, 20, 0, 0, 25, 24, - 0, 175, 28, 30, 36, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 72, 172, 173, 0, 168, 89, - 137, 135, 0, 0, 383, 0, 381, 376, 378, 440, - 439, 0, 431, 0, 443, 441, 0, 0, 0, 426, - 427, 400, 0, 77, 78, 80, 79, 82, 83, 84, - 85, 86, 81, 76, 0, 0, 415, 411, 413, 120, - 0, 388, 0, 124, 0, 102, 14, 0, 21, 18, - 29, 42, 43, 44, 47, 46, 49, 50, 54, 55, - 52, 53, 57, 58, 60, 62, 64, 66, 68, 70, - 0, 174, 374, 0, 384, 0, 379, 0, 0, 0, - 442, 0, 425, 0, 402, 75, 88, 119, 386, 0, - 100, 16, 0, 380, 382, 0, 434, 433, 436, 408, - 421, 419, 0, 0, 0, 0, 387, 389, 0, 0, - 435, 0, 0, 418, 0, 0, 416, 0, 0, 0, - 403, 73, 0, 437, 0, 408, 407, 409, 423, 0, - 405, 428, 404, 0, 438, 432, 417, 424, 0, 420, - 430, 422 -}; - - /* YYPGOTO[NTERM-NUM]. */ -static const yytype_int16 yypgoto[] = -{ - -525, -525, -525, -525, -525, -525, -525, -525, -525, -525, - -525, -525, -103, -525, -278, -274, -297, -273, -214, -211, - -210, -212, -209, -208, -525, -261, -525, -292, -525, -308, - -525, 4, -525, -525, -525, 5, -525, -525, -525, -41, - -38, -39, -525, -525, -504, -525, -525, -525, -525, -123, - -525, -230, -237, -525, -525, 0, -246, -525, 1, -525, - -525, -525, -337, -342, -207, -286, -378, -525, -285, -376, - -524, -322, -525, -525, -330, -327, -525, -525, -22, -452, - -275, -525, -525, -298, -525, -525, -525, -525, -525, -525, - -525, -525, -525, -525, -525, -525, -525, -2, -525, -525 -}; - - /* YYDEFGOTO[NTERM-NUM]. */ -static const yytype_int16 yydefgoto[] = -{ - -1, 320, 321, 322, 487, 323, 324, 325, 326, 327, - 328, 329, 372, 331, 332, 333, 334, 335, 336, 337, - 338, 339, 340, 341, 342, 373, 510, 374, 474, 375, - 440, 376, 227, 397, 300, 377, 229, 230, 231, 260, - 261, 262, 232, 233, 234, 235, 236, 237, 280, 281, - 238, 239, 240, 241, 277, 344, 273, 243, 244, 245, - 351, 283, 354, 355, 445, 446, 395, 482, 379, 380, - 381, 382, 462, 545, 571, 553, 554, 555, 572, 383, - 384, 385, 556, 544, 386, 557, 578, 387, 388, 523, - 451, 518, 538, 551, 552, 389, 246, 247, 248, 257 -}; - - /* YYTABLE[YYPACT[STATE-NUM]] -- What to do in state STATE-NUM. If - positive, shift that token. If negative, reduce the rule whose - number is the opposite. If YYTABLE_NINF, syntax error. */ -static const yytype_int16 yytable[] = -{ - 242, 263, 270, 394, 226, 228, 403, 478, 286, 278, - 524, 343, 448, 479, 442, 481, 254, 251, 483, 542, - 433, 296, 249, 404, 405, 272, 270, 422, 423, 263, - 294, 567, 272, 285, 542, 570, 412, 250, 272, 295, - 570, 345, -32, 345, 406, 391, 390, 392, 407, 357, - 396, 426, 427, 352, 252, 434, 456, 256, 458, 255, - 484, 258, 424, 425, 463, 464, 465, 466, 467, 468, - 469, 470, 471, 472, 345, 517, 415, 416, 417, 297, - 346, 480, 298, 473, 437, 299, 347, 439, 349, 409, - 486, 539, 475, 522, 350, 410, 475, 475, 488, 394, - 448, 394, 527, 540, 394, 573, 418, 265, 419, 475, - 266, 475, 420, 421, 399, 270, 577, 400, 490, 475, - 515, 352, 476, 516, 352, 498, 499, 500, 501, 475, - 515, 259, 520, 533, 222, 223, 224, 528, 267, 529, - 494, 495, 448, 475, 548, 519, 496, 497, 478, 521, - 272, 547, 276, 502, 503, 282, 287, 292, 293, 345, - 356, 398, 348, 428, 408, 413, 429, 352, 431, 330, - 435, 432, 438, 444, 430, 452, 449, 453, 450, 454, - 457, 459, 525, 526, 279, 485, 460, -31, 394, 461, - 489, 579, 511, -26, 535, 475, 531, 549, 514, -406, - 558, 478, 532, 559, 560, 564, 563, 565, 580, 401, - 402, 369, 352, 568, 504, 541, 581, 569, 505, 507, - 506, 289, 290, 508, 291, 509, 253, 441, 414, 534, - 541, 264, 566, 536, 575, 288, 513, 394, 576, 271, - 550, 562, 330, 537, 275, 330, 242, 0, 0, 0, - 226, 228, 0, 284, 352, 574, 561, 0, 0, 264, - 0, 0, 0, 264, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 394, 0, 0, - 0, 0, 0, 353, 0, 0, 0, 378, 0, 0, - 0, 0, 0, 543, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 270, 0, 543, 0, - 0, 0, 491, 492, 493, 330, 330, 330, 330, 330, - 330, 330, 330, 330, 330, 330, 330, 330, 330, 330, - 330, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 353, 443, 0, 353, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 378, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 353, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 353, 0, 0, 0, 0, 0, 0, 0, - 0, 378, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 378, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 353, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 378, 0, - 0, 0, 0, 378, 378, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 378, 0, - 0, 0, 0, 271, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 378, 0, 0, 0, 378, - 0, 0, 0, 0, 378, 0, 0, 0, 378, 0, - 0, 0, 0, 0, 0, 274, 0, 378, 1, 2, - 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, - 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 14, 15, 16, 17, 18, 19, 20, 21, - 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, - 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, - 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, - 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, - 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, - 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, - 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, - 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, - 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, - 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, - 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, - 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, - 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, - 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, - 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, - 212, 213, 214, 215, 216, 217, 0, 0, 218, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 219, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 220, 221, 222, 223, 224, 225, - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, - 11, 12, 13, 358, 359, 360, 0, 361, 362, 363, - 364, 365, 366, 367, 14, 15, 16, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, - 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, - 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, - 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, - 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, - 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, - 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, - 210, 211, 212, 213, 214, 215, 216, 217, 368, 301, - 218, 302, 303, 304, 305, 306, 307, 308, 309, 310, - 311, 0, 0, 312, 313, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 314, 0, 0, 0, 369, 370, 0, 0, - 0, 0, 371, 316, 317, 318, 319, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 220, 221, 222, 223, - 224, 225, 1, 2, 3, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 358, 359, 360, 0, 361, - 362, 363, 364, 365, 366, 367, 14, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, - 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, - 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, - 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, - 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, - 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, - 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, - 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, - 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, - 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, - 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, - 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, - 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, - 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, - 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, - 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, - 368, 301, 218, 302, 303, 304, 305, 306, 307, 308, - 309, 310, 311, 0, 0, 312, 313, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 314, 0, 0, 0, 369, 477, - 0, 0, 0, 0, 371, 316, 317, 318, 319, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 220, 221, - 222, 223, 224, 225, 1, 2, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 358, 359, 360, - 0, 361, 362, 363, 364, 365, 366, 367, 14, 15, - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, - 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, - 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, - 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, - 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, - 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, - 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, - 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, - 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, - 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, - 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, - 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, - 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, - 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, - 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, - 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, - 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, - 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, - 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, - 216, 217, 368, 301, 218, 302, 303, 304, 305, 306, - 307, 308, 309, 310, 311, 0, 0, 312, 313, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 314, 0, 0, 0, - 369, 0, 0, 0, 0, 0, 371, 316, 317, 318, - 319, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 220, 221, 222, 223, 224, 225, 1, 2, 3, 4, - 5, 6, 7, 8, 9, 10, 11, 12, 13, 358, - 359, 360, 0, 361, 362, 363, 364, 365, 366, 367, - 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, - 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, - 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, - 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, - 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, - 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, - 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, - 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, - 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, - 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, - 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, - 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, - 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, - 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, - 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, - 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, - 214, 215, 216, 217, 368, 301, 218, 302, 303, 304, - 305, 306, 307, 308, 309, 310, 311, 0, 0, 312, - 313, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 314, 0, - 0, 0, 287, 0, 0, 0, 0, 0, 371, 316, - 317, 318, 319, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 220, 221, 222, 223, 224, 225, 1, 2, - 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, - 13, 358, 359, 360, 0, 361, 362, 363, 364, 365, - 366, 367, 14, 15, 16, 17, 18, 19, 20, 21, - 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, - 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, - 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, - 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, - 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, - 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, - 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, - 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, - 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, - 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, - 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, - 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, - 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, - 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, - 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, - 212, 213, 214, 215, 216, 217, 368, 301, 218, 302, - 303, 304, 305, 306, 307, 308, 309, 310, 311, 0, - 0, 312, 313, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 314, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 371, 316, 317, 318, 319, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 220, 221, 222, 223, 224, 225, - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, - 11, 12, 13, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 14, 15, 16, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, - 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, - 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, - 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, - 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, - 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, - 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, - 210, 211, 212, 213, 214, 215, 216, 217, 0, 301, - 218, 302, 303, 304, 305, 306, 307, 308, 309, 310, - 311, 0, 0, 312, 313, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 314, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 371, 316, 317, 318, 319, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 220, 221, 222, 223, - 224, 225, 1, 2, 3, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 14, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, - 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, - 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, - 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, - 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, - 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, - 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, - 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, - 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, - 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, - 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, - 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, - 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, - 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, - 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, - 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, - 0, 0, 218, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 219, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 10, 11, 12, 13, 220, 221, - 222, 223, 224, 225, 0, 0, 0, 0, 0, 14, - 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, - 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, - 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, - 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, - 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, - 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, - 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, - 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, - 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, - 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, - 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, - 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, - 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, - 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, - 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, - 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, - 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, - 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, - 215, 216, 217, 0, 301, 218, 302, 303, 304, 305, - 306, 307, 308, 309, 310, 311, 0, 0, 312, 313, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 314, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 316, 317, - 318, 319, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 220, 221, 222, 223, 224, 1, 2, 3, 4, - 5, 6, 7, 8, 9, 10, 11, 12, 13, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, - 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, - 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, - 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, - 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, - 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, - 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, - 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, - 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, - 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, - 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, - 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, - 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, - 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, - 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, - 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, - 214, 215, 216, 217, 0, 268, 218, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 269, 1, - 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, - 12, 13, 220, 221, 222, 223, 224, 0, 0, 0, - 0, 0, 0, 14, 15, 16, 17, 18, 19, 20, - 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, - 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, - 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, - 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, - 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, - 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, - 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, - 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, - 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, - 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, - 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, - 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, - 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, - 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, - 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, - 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, - 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, - 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, - 211, 212, 213, 214, 215, 216, 217, 0, 0, 218, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 447, 1, 2, 3, - 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, - 0, 0, 0, 0, 0, 220, 221, 222, 223, 224, - 0, 14, 15, 16, 17, 18, 19, 20, 21, 22, - 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, - 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, - 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, - 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, - 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, - 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, - 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, - 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, - 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, - 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, - 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, - 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, - 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, - 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, - 213, 214, 215, 216, 217, 0, 0, 218, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 512, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 10, 11, 12, 13, 0, 0, - 0, 0, 0, 220, 221, 222, 223, 224, 0, 14, - 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, - 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, - 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, - 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, - 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, - 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, - 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, - 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, - 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, - 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, - 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, - 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, - 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, - 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, - 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, - 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, - 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, - 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, - 215, 216, 217, 0, 0, 218, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 530, 1, 2, 3, 4, 5, 6, 7, - 8, 9, 10, 11, 12, 13, 0, 0, 0, 0, - 0, 220, 221, 222, 223, 224, 0, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 0, 0, 218, 0, 0, 0, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, - 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 220, - 221, 222, 223, 224, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, - 66, 67, 0, 0, 0, 0, 0, 73, 74, 75, - 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, - 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, - 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, - 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, - 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, - 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, - 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, - 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, - 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, - 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, - 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, - 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, - 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, - 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, - 216, 217, 0, 301, 218, 302, 303, 304, 305, 306, - 307, 308, 309, 310, 311, 0, 0, 312, 313, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 314, 0, 0, 0, - 393, 546, 0, 0, 0, 0, 0, 316, 317, 318, - 319, 4, 5, 6, 7, 8, 9, 10, 11, 12, - 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 15, 16, 17, 18, 19, 20, 21, - 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - 32, 33, 34, 35, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 0, 0, 0, 0, - 0, 73, 74, 75, 76, 77, 78, 79, 80, 81, - 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, - 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, - 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, - 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, - 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, - 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, - 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, - 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, - 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, - 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, - 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, - 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, - 212, 213, 214, 215, 216, 217, 0, 301, 218, 302, - 303, 304, 305, 306, 307, 308, 309, 310, 311, 0, - 0, 312, 313, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 314, 0, 0, 315, 0, 0, 0, 0, 0, 0, - 0, 316, 317, 318, 319, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 15, 16, 17, - 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, - 28, 29, 30, 31, 32, 33, 34, 35, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, - 0, 0, 0, 0, 0, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, - 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, - 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, - 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, - 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, - 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, - 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, - 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, - 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, - 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, - 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, - 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, - 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, - 0, 301, 218, 302, 303, 304, 305, 306, 307, 308, - 309, 310, 311, 0, 0, 312, 313, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 314, 0, 0, 0, 393, 0, - 0, 0, 0, 0, 0, 316, 317, 318, 319, 4, - 5, 6, 7, 8, 9, 10, 11, 12, 13, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 15, 16, 17, 18, 19, 20, 21, 22, 23, - 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, - 34, 35, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 0, 0, 0, 0, 0, 73, - 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, - 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, - 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, - 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, - 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, - 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, - 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, - 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, - 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, - 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, - 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, - 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, - 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, - 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, - 214, 215, 216, 217, 0, 301, 218, 302, 303, 304, - 305, 306, 307, 308, 309, 310, 311, 0, 0, 312, - 313, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 314, 0, - 0, 436, 0, 0, 0, 0, 0, 0, 0, 316, - 317, 318, 319, 4, 5, 6, 7, 8, 9, 10, - 11, 12, 13, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 15, 16, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 0, 0, - 0, 0, 0, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, - 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, - 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, - 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, - 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, - 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, - 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, - 210, 211, 212, 213, 214, 215, 216, 217, 0, 301, - 218, 302, 303, 304, 305, 306, 307, 308, 309, 310, - 311, 0, 0, 312, 313, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 314, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 455, 316, 317, 318, 319, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, - 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, - 66, 67, 0, 0, 0, 0, 0, 73, 74, 75, - 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, - 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, - 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, - 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, - 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, - 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, - 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, - 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, - 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, - 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, - 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, - 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, - 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, - 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, - 216, 217, 0, 301, 218, 302, 303, 304, 305, 306, - 307, 308, 309, 310, 311, 0, 0, 312, 313, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 314, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 316, 317, 318, - 319, 0, 0, 0, 0, 0, 0, 0, 0, 15, - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, - 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, - 66, 67, 0, 0, 0, 0, 0, 73, 74, 75, - 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, - 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, - 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, - 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, - 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, - 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, - 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, - 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, - 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, - 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, - 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, - 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, - 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, - 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, - 216, 411, 0, 301, 218, 302, 303, 304, 305, 306, - 307, 308, 309, 310, 311, 0, 0, 312, 313, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 314, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 316, 317, 318, - 319, 0, 0, 0, 0, 0, 0, 0, 0, 15, - 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, - 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, - 66, 67, 0, 0, 0, 0, 0, 73, 74, 75, - 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, - 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, - 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, - 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, - 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, - 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, - 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, - 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, - 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, - 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, - 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, - 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, - 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, - 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, - 216, 217, 0, 0, 218 -}; - -static const yytype_int16 yycheck[] = -{ - 0, 231, 239, 295, 0, 0, 314, 383, 254, 57, - 462, 272, 354, 391, 351, 393, 232, 232, 396, 523, - 253, 267, 265, 246, 247, 267, 263, 248, 249, 259, - 265, 555, 267, 275, 538, 559, 328, 265, 267, 274, - 564, 267, 265, 267, 267, 274, 292, 293, 271, 275, - 274, 250, 251, 283, 269, 288, 364, 275, 366, 275, - 397, 266, 283, 284, 255, 256, 257, 258, 259, 260, - 261, 262, 263, 264, 267, 451, 280, 281, 282, 269, - 266, 274, 272, 274, 345, 275, 272, 348, 266, 266, - 266, 266, 272, 273, 272, 272, 272, 272, 406, 391, - 442, 393, 480, 266, 396, 266, 277, 272, 279, 272, - 275, 272, 244, 245, 272, 352, 568, 275, 410, 272, - 272, 351, 275, 275, 354, 422, 423, 424, 425, 272, - 272, 272, 275, 275, 291, 292, 293, 270, 232, 272, - 418, 419, 484, 272, 273, 453, 420, 421, 524, 457, - 267, 529, 232, 426, 427, 269, 269, 232, 232, 267, - 275, 232, 274, 287, 266, 265, 286, 397, 252, 272, - 268, 254, 232, 232, 285, 265, 275, 265, 275, 275, - 265, 273, 474, 475, 232, 232, 265, 265, 480, 270, - 232, 569, 268, 266, 231, 272, 268, 265, 444, 269, - 232, 577, 510, 266, 270, 266, 275, 19, 275, 312, - 313, 269, 442, 269, 428, 523, 270, 274, 429, 431, - 430, 259, 263, 432, 263, 433, 225, 350, 331, 515, - 538, 231, 554, 518, 564, 257, 443, 529, 565, 239, - 538, 549, 345, 518, 246, 348, 246, -1, -1, -1, - 246, 246, -1, 253, 484, 563, 548, -1, -1, 259, - -1, -1, -1, 263, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 569, -1, -1, - -1, -1, -1, 283, -1, -1, -1, 287, -1, -1, - -1, -1, -1, 523, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 543, -1, 538, -1, - -1, -1, 415, 416, 417, 418, 419, 420, 421, 422, - 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, - 433, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, 351, 352, -1, 354, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, 383, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 397, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 442, -1, -1, -1, -1, -1, -1, -1, - -1, 451, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 462, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 484, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 518, -1, - -1, -1, -1, 523, 524, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 538, -1, - -1, -1, -1, 543, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 555, -1, -1, -1, 559, - -1, -1, -1, -1, 564, -1, -1, -1, 568, -1, - -1, -1, -1, -1, -1, 0, -1, 577, 3, 4, - 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, - 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 27, 28, 29, 30, 31, 32, 33, 34, - 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, - 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, - 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, - 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, - 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, - 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, - 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, - 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, - 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, - 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, - 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, - 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, - 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, - 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, - 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, - 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, - 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, - 225, 226, 227, 228, 229, 230, -1, -1, 233, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 275, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 289, 290, 291, 292, 293, 294, - 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, - 13, 14, 15, 16, 17, 18, -1, 20, 21, 22, - 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, - 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, - 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, - 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, - 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, - 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, - 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, - 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, - 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, - 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, - 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, - 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, - 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, - 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, - 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, - 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, - 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, - 243, -1, -1, 246, 247, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 265, -1, -1, -1, 269, 270, -1, -1, - -1, -1, 275, 276, 277, 278, 279, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 289, 290, 291, 292, - 293, 294, 3, 4, 5, 6, 7, 8, 9, 10, - 11, 12, 13, 14, 15, 16, 17, 18, -1, 20, - 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, - 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, - 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, - 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, - 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, - 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, - 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, - 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, - 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, - 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, - 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, - 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, - 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, - 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, - 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, - 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, - 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, - 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, - 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, - 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, - 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, - 241, 242, 243, -1, -1, 246, 247, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 265, -1, -1, -1, 269, 270, - -1, -1, -1, -1, 275, 276, 277, 278, 279, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 289, 290, - 291, 292, 293, 294, 3, 4, 5, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, - -1, 20, 21, 22, 23, 24, 25, 26, 27, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, -1, -1, 246, 247, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 265, -1, -1, -1, - 269, -1, -1, -1, -1, -1, 275, 276, 277, 278, - 279, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 289, 290, 291, 292, 293, 294, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, -1, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, - 237, 238, 239, 240, 241, 242, 243, -1, -1, 246, - 247, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 265, -1, - -1, -1, 269, -1, -1, -1, -1, -1, 275, 276, - 277, 278, 279, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 289, 290, 291, 292, 293, 294, 3, 4, - 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, - 15, 16, 17, 18, -1, 20, 21, 22, 23, 24, - 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, - 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, - 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, - 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, - 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, - 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, - 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, - 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, - 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, - 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, - 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, - 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, - 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, - 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, - 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, - 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, - 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, - 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, - 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, - 235, 236, 237, 238, 239, 240, 241, 242, 243, -1, - -1, 246, 247, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 265, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 275, 276, 277, 278, 279, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 289, 290, 291, 292, 293, 294, - 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, - 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 27, 28, 29, 30, 31, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, - 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, - 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, - 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, - 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, - 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, - 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, - 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, - 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, - 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, - 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, - 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, - 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, - 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, - 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, - 223, 224, 225, 226, 227, 228, 229, 230, -1, 232, - 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, - 243, -1, -1, 246, 247, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 265, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 275, 276, 277, 278, 279, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 289, 290, 291, 292, - 293, 294, 3, 4, 5, 6, 7, 8, 9, 10, - 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 27, 28, 29, 30, - 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, - 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, - 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, - 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, - 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, - 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, - 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, - 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, - 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, - 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, - 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, - 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, - 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, - 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, - 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, - 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, - 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, - 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, - 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, - -1, -1, 233, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 275, 3, 4, 5, 6, 7, - 8, 9, 10, 11, 12, 13, 14, 15, 289, 290, - 291, 292, 293, 294, -1, -1, -1, -1, -1, 27, - 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, - 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, - 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, - 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, - 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, - 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, - 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, - 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, - 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, - 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, - 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, - 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, - 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, - 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, - 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, - 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, - 228, 229, 230, -1, 232, 233, 234, 235, 236, 237, - 238, 239, 240, 241, 242, 243, -1, -1, 246, 247, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 265, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 276, 277, - 278, 279, -1, -1, -1, -1, -1, -1, -1, -1, - -1, 289, 290, 291, 292, 293, 3, 4, 5, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, -1, 232, 233, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 275, 3, - 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, - 14, 15, 289, 290, 291, 292, 293, -1, -1, -1, - -1, -1, -1, 27, 28, 29, 30, 31, 32, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, - 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, - 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, - 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, - 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, - 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, - 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, - 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, - 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, - 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, - 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, - 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, - 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, - 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, - 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, - 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, - 224, 225, 226, 227, 228, 229, 230, -1, -1, 233, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 270, 3, 4, 5, - 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - -1, -1, -1, -1, -1, 289, 290, 291, 292, 293, - -1, 27, 28, 29, 30, 31, 32, 33, 34, 35, - 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, - 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, - 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, - 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, - 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, - 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, - 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, - 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, - 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, - 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, - 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, - 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, - 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, - 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, - 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, - 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, - 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, - 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, - 226, 227, 228, 229, 230, -1, -1, 233, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 270, 3, 4, 5, 6, 7, - 8, 9, 10, 11, 12, 13, 14, 15, -1, -1, - -1, -1, -1, 289, 290, 291, 292, 293, -1, 27, - 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, - 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, - 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, - 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, - 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, - 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, - 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, - 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, - 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, - 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, - 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, - 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, - 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, - 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, - 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, - 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, - 228, 229, 230, -1, -1, 233, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 270, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, - -1, 289, 290, 291, 292, 293, -1, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, - 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, - 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, - 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, - 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, - 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, - 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, - 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, - 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, - 230, -1, -1, 233, -1, -1, -1, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - -1, -1, -1, -1, -1, -1, -1, -1, -1, 289, - 290, 291, 292, 293, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, -1, -1, -1, -1, -1, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, -1, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, -1, -1, 246, 247, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 265, -1, -1, -1, - 269, 270, -1, -1, -1, -1, -1, 276, 277, 278, - 279, 6, 7, 8, 9, 10, 11, 12, 13, 14, - 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, 28, 29, 30, 31, 32, 33, 34, - 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, - 45, 46, 47, 48, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 63, 64, - 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, - 75, 76, 77, 78, 79, 80, -1, -1, -1, -1, - -1, 86, 87, 88, 89, 90, 91, 92, 93, 94, - 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, - 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, - 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, - 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, - 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, - 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, - 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, - 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, - 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, - 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, - 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, - 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, - 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, - 225, 226, 227, 228, 229, 230, -1, 232, 233, 234, - 235, 236, 237, 238, 239, 240, 241, 242, 243, -1, - -1, 246, 247, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 265, -1, -1, 268, -1, -1, -1, -1, -1, -1, - -1, 276, 277, 278, 279, 6, 7, 8, 9, 10, - 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, 28, 29, 30, - 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, - 41, 42, 43, 44, 45, 46, 47, 48, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 63, 64, 65, 66, 67, 68, 69, 70, - 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, - -1, -1, -1, -1, -1, 86, 87, 88, 89, 90, - 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, - 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, - 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, - 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, - 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, - 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, - 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, - 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, - 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, - 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, - 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, - 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, - 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, - 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, - -1, 232, 233, 234, 235, 236, 237, 238, 239, 240, - 241, 242, 243, -1, -1, 246, 247, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 265, -1, -1, -1, 269, -1, - -1, -1, -1, -1, -1, 276, 277, 278, 279, 6, - 7, 8, 9, 10, 11, 12, 13, 14, 15, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, - 77, 78, 79, 80, -1, -1, -1, -1, -1, 86, - 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, - 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, - 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, - 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, - 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, - 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, - 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, - 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, - 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, - 227, 228, 229, 230, -1, 232, 233, 234, 235, 236, - 237, 238, 239, 240, 241, 242, 243, -1, -1, 246, - 247, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 265, -1, - -1, 268, -1, -1, -1, -1, -1, -1, -1, 276, - 277, 278, 279, 6, 7, 8, 9, 10, 11, 12, - 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, 28, 29, 30, 31, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 73, 74, 75, 76, 77, 78, 79, 80, -1, -1, - -1, -1, -1, 86, 87, 88, 89, 90, 91, 92, - 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, - 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, - 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, - 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, - 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, - 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, - 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, - 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, - 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, - 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, - 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, - 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, - 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, - 223, 224, 225, 226, 227, 228, 229, 230, -1, 232, - 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, - 243, -1, -1, 246, 247, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 265, -1, -1, -1, -1, -1, -1, -1, - -1, -1, 275, 276, 277, 278, 279, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, -1, -1, -1, -1, -1, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, -1, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, -1, -1, 246, 247, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 265, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 276, 277, 278, - 279, -1, -1, -1, -1, -1, -1, -1, -1, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, -1, -1, -1, -1, -1, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, -1, 232, 233, 234, 235, 236, 237, 238, - 239, 240, 241, 242, 243, -1, -1, 246, 247, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, 265, 6, 7, 8, - 9, 10, 11, 12, 13, 14, 15, 276, 277, 278, - 279, -1, -1, -1, -1, -1, -1, -1, -1, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, - 79, 80, -1, -1, -1, -1, -1, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, - 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, - 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, - 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, - 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, - 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, - 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, - 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, - 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, - 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, - 229, 230, -1, -1, 233 -}; - - /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing - symbol of state STATE-NUM. */ -static const yytype_uint16 yystos[] = -{ - 0, 3, 4, 5, 6, 7, 8, 9, 10, 11, - 12, 13, 14, 15, 27, 28, 29, 30, 31, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, - 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, - 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, - 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, - 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, - 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, - 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, - 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, - 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, - 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, - 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, - 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, - 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, - 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, - 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, - 223, 224, 225, 226, 227, 228, 229, 230, 233, 275, - 289, 290, 291, 292, 293, 294, 329, 330, 333, 334, - 335, 336, 340, 341, 342, 343, 344, 345, 348, 349, - 350, 351, 353, 355, 356, 357, 394, 395, 396, 265, - 265, 232, 269, 356, 232, 275, 275, 397, 266, 272, - 337, 338, 339, 349, 353, 272, 275, 232, 232, 275, - 350, 353, 267, 354, 0, 395, 232, 352, 57, 232, - 346, 347, 269, 359, 353, 275, 354, 269, 376, 338, - 337, 339, 232, 232, 265, 274, 354, 269, 272, 275, - 332, 232, 234, 235, 236, 237, 238, 239, 240, 241, - 242, 243, 246, 247, 265, 268, 276, 277, 278, 279, - 299, 300, 301, 303, 304, 305, 306, 307, 308, 309, - 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, - 320, 321, 322, 323, 353, 267, 266, 272, 274, 266, - 272, 358, 349, 353, 360, 361, 275, 275, 16, 17, - 18, 20, 21, 22, 23, 24, 25, 26, 231, 269, - 270, 275, 310, 323, 325, 327, 329, 333, 353, 366, - 367, 368, 369, 377, 378, 379, 382, 385, 386, 393, - 354, 274, 354, 269, 325, 364, 274, 331, 232, 272, - 275, 310, 310, 327, 246, 247, 267, 271, 266, 266, - 272, 230, 325, 265, 310, 280, 281, 282, 277, 279, - 244, 245, 248, 249, 283, 284, 250, 251, 287, 286, - 285, 252, 254, 253, 288, 268, 268, 323, 232, 323, - 328, 347, 360, 353, 232, 362, 363, 270, 361, 275, - 275, 388, 265, 265, 275, 275, 327, 265, 327, 273, - 265, 270, 370, 255, 256, 257, 258, 259, 260, 261, - 262, 263, 264, 274, 326, 272, 275, 270, 367, 364, - 274, 364, 365, 364, 360, 232, 266, 302, 327, 232, - 325, 310, 310, 310, 312, 312, 313, 313, 314, 314, - 314, 314, 315, 315, 316, 317, 318, 319, 320, 321, - 324, 268, 270, 362, 354, 272, 275, 367, 389, 327, - 275, 327, 273, 387, 377, 325, 325, 364, 270, 272, - 270, 268, 327, 275, 363, 231, 366, 378, 390, 266, - 266, 327, 342, 349, 381, 371, 270, 364, 273, 265, - 381, 391, 392, 373, 374, 375, 380, 383, 232, 266, - 270, 325, 327, 275, 266, 19, 369, 368, 269, 274, - 368, 372, 376, 266, 327, 372, 373, 377, 384, 364, - 275, 270 -}; - - /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ -static const yytype_uint16 yyr1[] = -{ - 0, 298, 299, 300, 300, 300, 300, 300, 300, 300, - 300, 300, 300, 300, 300, 301, 301, 301, 301, 301, - 301, 302, 303, 304, 305, 305, 306, 306, 307, 307, - 308, 309, 309, 310, 310, 310, 310, 311, 311, 311, - 311, 312, 312, 312, 312, 313, 313, 313, 314, 314, - 314, 315, 315, 315, 315, 315, 316, 316, 316, 317, - 317, 318, 318, 319, 319, 320, 320, 321, 321, 322, - 322, 323, 324, 323, 325, 325, 326, 326, 326, 326, - 326, 326, 326, 326, 326, 326, 326, 327, 327, 328, - 329, 329, 329, 329, 329, 329, 329, 329, 329, 331, - 330, 332, 332, 333, 334, 334, 335, 335, 336, 337, - 337, 338, 338, 338, 338, 339, 340, 340, 340, 340, - 340, 341, 341, 341, 341, 341, 342, 342, 343, 344, - 344, 344, 344, 345, 346, 346, 347, 347, 347, 348, - 349, 349, 350, 350, 350, 350, 350, 350, 351, 351, - 351, 351, 351, 351, 351, 351, 351, 351, 351, 351, - 351, 351, 351, 351, 351, 351, 351, 352, 352, 353, - 353, 354, 354, 354, 354, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 355, 355, 355, 355, 355, 355, 355, 355, 355, 355, - 356, 356, 356, 358, 357, 359, 357, 360, 360, 361, - 361, 362, 362, 363, 363, 364, 364, 364, 365, 365, - 366, 367, 367, 368, 368, 368, 368, 368, 368, 368, - 369, 370, 371, 369, 372, 372, 374, 373, 375, 373, - 376, 376, 377, 377, 378, 378, 379, 380, 380, 381, - 381, 383, 382, 384, 384, 385, 385, 387, 386, 388, - 386, 389, 386, 390, 390, 391, 391, 392, 392, 393, - 393, 393, 393, 393, 394, 394, 395, 395, 395, 397, - 396 -}; - - /* YYR2[YYN] -- Number of symbols on the right hand side of rule YYN. */ -static const yytype_uint8 yyr2[] = -{ - 0, 2, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 3, 1, 4, 1, 3, 2, - 2, 1, 1, 1, 2, 2, 2, 1, 2, 3, - 2, 1, 1, 1, 2, 2, 2, 1, 1, 1, - 1, 1, 3, 3, 3, 1, 3, 3, 1, 3, - 3, 1, 3, 3, 3, 3, 1, 3, 3, 1, - 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, - 3, 1, 0, 6, 1, 3, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, - 2, 2, 4, 2, 3, 4, 2, 3, 4, 0, - 6, 2, 3, 2, 1, 1, 2, 3, 3, 2, - 3, 2, 1, 2, 1, 1, 1, 3, 4, 6, - 5, 1, 2, 3, 5, 4, 1, 2, 1, 1, - 1, 1, 1, 4, 1, 3, 1, 3, 1, 1, - 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 4, 1, 3, 1, - 2, 2, 3, 3, 4, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 0, 6, 0, 5, 1, 2, 3, - 4, 1, 3, 1, 2, 1, 3, 4, 1, 3, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 2, 0, 0, 5, 1, 1, 0, 2, 0, 2, - 2, 3, 1, 2, 1, 2, 5, 3, 1, 1, - 4, 0, 8, 0, 1, 3, 2, 0, 6, 0, - 8, 0, 7, 1, 1, 1, 0, 2, 3, 2, - 2, 2, 3, 2, 1, 2, 1, 1, 1, 0, - 3 -}; - - -#define yyerrok (yyerrstatus = 0) -#define yyclearin (yychar = YYEMPTY) -#define YYEMPTY (-2) -#define YYEOF 0 - -#define YYACCEPT goto yyacceptlab -#define YYABORT goto yyabortlab -#define YYERROR goto yyerrorlab - - -#define YYRECOVERING() (!!yyerrstatus) - -#define YYBACKUP(Token, Value) \ -do \ - if (yychar == YYEMPTY) \ - { \ - yychar = (Token); \ - yylval = (Value); \ - YYPOPSTACK (yylen); \ - yystate = *yyssp; \ - goto yybackup; \ - } \ - else \ - { \ - yyerror (pParseContext, YY_("syntax error: cannot back up")); \ - YYERROR; \ - } \ -while (0) - -/* Error token number */ -#define YYTERROR 1 -#define YYERRCODE 256 - - - -/* Enable debugging if requested. */ -#if YYDEBUG - -# ifndef YYFPRINTF -# include /* INFRINGES ON USER NAME SPACE */ -# define YYFPRINTF fprintf -# endif - -# define YYDPRINTF(Args) \ -do { \ - if (yydebug) \ - YYFPRINTF Args; \ -} while (0) - -/* This macro is provided for backward compatibility. */ -#ifndef YY_LOCATION_PRINT -# define YY_LOCATION_PRINT(File, Loc) ((void) 0) -#endif - - -# define YY_SYMBOL_PRINT(Title, Type, Value, Location) \ -do { \ - if (yydebug) \ - { \ - YYFPRINTF (stderr, "%s ", Title); \ - yy_symbol_print (stderr, \ - Type, Value, pParseContext); \ - YYFPRINTF (stderr, "\n"); \ - } \ -} while (0) - - -/*----------------------------------------. -| Print this symbol's value on YYOUTPUT. | -`----------------------------------------*/ - -static void -yy_symbol_value_print (FILE *yyoutput, int yytype, YYSTYPE const * const yyvaluep, glslang::TParseContext* pParseContext) -{ - FILE *yyo = yyoutput; - YYUSE (yyo); - YYUSE (pParseContext); - if (!yyvaluep) - return; -# ifdef YYPRINT - if (yytype < YYNTOKENS) - YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep); -# endif - YYUSE (yytype); -} - - -/*--------------------------------. -| Print this symbol on YYOUTPUT. | -`--------------------------------*/ - -static void -yy_symbol_print (FILE *yyoutput, int yytype, YYSTYPE const * const yyvaluep, glslang::TParseContext* pParseContext) -{ - YYFPRINTF (yyoutput, "%s %s (", - yytype < YYNTOKENS ? "token" : "nterm", yytname[yytype]); - - yy_symbol_value_print (yyoutput, yytype, yyvaluep, pParseContext); - YYFPRINTF (yyoutput, ")"); -} - -/*------------------------------------------------------------------. -| yy_stack_print -- Print the state stack from its BOTTOM up to its | -| TOP (included). | -`------------------------------------------------------------------*/ - -static void -yy_stack_print (yytype_int16 *yybottom, yytype_int16 *yytop) -{ - YYFPRINTF (stderr, "Stack now"); - for (; yybottom <= yytop; yybottom++) - { - int yybot = *yybottom; - YYFPRINTF (stderr, " %d", yybot); - } - YYFPRINTF (stderr, "\n"); -} - -# define YY_STACK_PRINT(Bottom, Top) \ -do { \ - if (yydebug) \ - yy_stack_print ((Bottom), (Top)); \ -} while (0) - - -/*------------------------------------------------. -| Report that the YYRULE is going to be reduced. | -`------------------------------------------------*/ - -static void -yy_reduce_print (yytype_int16 *yyssp, YYSTYPE *yyvsp, int yyrule, glslang::TParseContext* pParseContext) -{ - unsigned long int yylno = yyrline[yyrule]; - int yynrhs = yyr2[yyrule]; - int yyi; - YYFPRINTF (stderr, "Reducing stack by rule %d (line %lu):\n", - yyrule - 1, yylno); - /* The symbols being reduced. */ - for (yyi = 0; yyi < yynrhs; yyi++) - { - YYFPRINTF (stderr, " $%d = ", yyi + 1); - yy_symbol_print (stderr, - yystos[yyssp[yyi + 1 - yynrhs]], - &(yyvsp[(yyi + 1) - (yynrhs)]) - , pParseContext); - YYFPRINTF (stderr, "\n"); - } -} - -# define YY_REDUCE_PRINT(Rule) \ -do { \ - if (yydebug) \ - yy_reduce_print (yyssp, yyvsp, Rule, pParseContext); \ -} while (0) - -/* Nonzero means print parse trace. It is left uninitialized so that - multiple parsers can coexist. */ -int yydebug; -#else /* !YYDEBUG */ -# define YYDPRINTF(Args) -# define YY_SYMBOL_PRINT(Title, Type, Value, Location) -# define YY_STACK_PRINT(Bottom, Top) -# define YY_REDUCE_PRINT(Rule) -#endif /* !YYDEBUG */ - - -/* YYINITDEPTH -- initial size of the parser's stacks. */ -#ifndef YYINITDEPTH -# define YYINITDEPTH 200 -#endif - -/* YYMAXDEPTH -- maximum size the stacks can grow to (effective only - if the built-in stack extension method is used). - - Do not make this value too large; the results are undefined if - YYSTACK_ALLOC_MAXIMUM < YYSTACK_BYTES (YYMAXDEPTH) - evaluated with infinite-precision integer arithmetic. */ - -#ifndef YYMAXDEPTH -# define YYMAXDEPTH 10000 -#endif - - -#if YYERROR_VERBOSE - -# ifndef yystrlen -# if defined __GLIBC__ && defined _STRING_H -# define yystrlen strlen -# else -/* Return the length of YYSTR. */ -static YYSIZE_T -yystrlen (const char *yystr) -{ - YYSIZE_T yylen; - for (yylen = 0; yystr[yylen]; yylen++) - continue; - return yylen; -} -# endif -# endif - -# ifndef yystpcpy -# if defined __GLIBC__ && defined _STRING_H && defined _GNU_SOURCE -# define yystpcpy stpcpy -# else -/* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in - YYDEST. */ -static char * -yystpcpy (char *yydest, const char *yysrc) -{ - char *yyd = yydest; - const char *yys = yysrc; - - while ((*yyd++ = *yys++) != '\0') - continue; - - return yyd - 1; -} -# endif -# endif - -# ifndef yytnamerr -/* Copy to YYRES the contents of YYSTR after stripping away unnecessary - quotes and backslashes, so that it's suitable for yyerror. The - heuristic is that double-quoting is unnecessary unless the string - contains an apostrophe, a comma, or backslash (other than - backslash-backslash). YYSTR is taken from yytname. If YYRES is - null, do not copy; instead, return the length of what the result - would have been. */ -static YYSIZE_T -yytnamerr (char *yyres, const char *yystr) -{ - if (*yystr == '"') - { - YYSIZE_T yyn = 0; - char const *yyp = yystr; - - for (;;) - switch (*++yyp) - { - case '\'': - case ',': - goto do_not_strip_quotes; - - case '\\': - if (*++yyp != '\\') - goto do_not_strip_quotes; - /* Fall through. */ - default: - if (yyres) - yyres[yyn] = *yyp; - yyn++; - break; - - case '"': - if (yyres) - yyres[yyn] = '\0'; - return yyn; - } - do_not_strip_quotes: ; - } - - if (! yyres) - return yystrlen (yystr); - - return yystpcpy (yyres, yystr) - yyres; -} -# endif - -/* Copy into *YYMSG, which is of size *YYMSG_ALLOC, an error message - about the unexpected token YYTOKEN for the state stack whose top is - YYSSP. - - Return 0 if *YYMSG was successfully written. Return 1 if *YYMSG is - not large enough to hold the message. In that case, also set - *YYMSG_ALLOC to the required number of bytes. Return 2 if the - required number of bytes is too large to store. */ -static int -yysyntax_error (YYSIZE_T *yymsg_alloc, char **yymsg, - yytype_int16 *yyssp, int yytoken) -{ - YYSIZE_T yysize0 = yytnamerr (YY_NULLPTR, yytname[yytoken]); - YYSIZE_T yysize = yysize0; - enum { YYERROR_VERBOSE_ARGS_MAXIMUM = 5 }; - /* Internationalized format string. */ - const char *yyformat = YY_NULLPTR; - /* Arguments of yyformat. */ - char const *yyarg[YYERROR_VERBOSE_ARGS_MAXIMUM]; - /* Number of reported tokens (one for the "unexpected", one per - "expected"). */ - int yycount = 0; - - /* There are many possibilities here to consider: - - If this state is a consistent state with a default action, then - the only way this function was invoked is if the default action - is an error action. In that case, don't check for expected - tokens because there are none. - - The only way there can be no lookahead present (in yychar) is if - this state is a consistent state with a default action. Thus, - detecting the absence of a lookahead is sufficient to determine - that there is no unexpected or expected token to report. In that - case, just report a simple "syntax error". - - Don't assume there isn't a lookahead just because this state is a - consistent state with a default action. There might have been a - previous inconsistent state, consistent state with a non-default - action, or user semantic action that manipulated yychar. - - Of course, the expected token list depends on states to have - correct lookahead information, and it depends on the parser not - to perform extra reductions after fetching a lookahead from the - scanner and before detecting a syntax error. Thus, state merging - (from LALR or IELR) and default reductions corrupt the expected - token list. However, the list is correct for canonical LR with - one exception: it will still contain any token that will not be - accepted due to an error action in a later state. - */ - if (yytoken != YYEMPTY) - { - int yyn = yypact[*yyssp]; - yyarg[yycount++] = yytname[yytoken]; - if (!yypact_value_is_default (yyn)) - { - /* Start YYX at -YYN if negative to avoid negative indexes in - YYCHECK. In other words, skip the first -YYN actions for - this state because they are default actions. */ - int yyxbegin = yyn < 0 ? -yyn : 0; - /* Stay within bounds of both yycheck and yytname. */ - int yychecklim = YYLAST - yyn + 1; - int yyxend = yychecklim < YYNTOKENS ? yychecklim : YYNTOKENS; - int yyx; - - for (yyx = yyxbegin; yyx < yyxend; ++yyx) - if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR - && !yytable_value_is_error (yytable[yyx + yyn])) - { - if (yycount == YYERROR_VERBOSE_ARGS_MAXIMUM) - { - yycount = 1; - yysize = yysize0; - break; - } - yyarg[yycount++] = yytname[yyx]; - { - YYSIZE_T yysize1 = yysize + yytnamerr (YY_NULLPTR, yytname[yyx]); - if (! (yysize <= yysize1 - && yysize1 <= YYSTACK_ALLOC_MAXIMUM)) - return 2; - yysize = yysize1; - } - } - } - } - - switch (yycount) - { -# define YYCASE_(N, S) \ - case N: \ - yyformat = S; \ - break - YYCASE_(0, YY_("syntax error")); - YYCASE_(1, YY_("syntax error, unexpected %s")); - YYCASE_(2, YY_("syntax error, unexpected %s, expecting %s")); - YYCASE_(3, YY_("syntax error, unexpected %s, expecting %s or %s")); - YYCASE_(4, YY_("syntax error, unexpected %s, expecting %s or %s or %s")); - YYCASE_(5, YY_("syntax error, unexpected %s, expecting %s or %s or %s or %s")); -# undef YYCASE_ - } - - { - YYSIZE_T yysize1 = yysize + yystrlen (yyformat); - if (! (yysize <= yysize1 && yysize1 <= YYSTACK_ALLOC_MAXIMUM)) - return 2; - yysize = yysize1; - } - - if (*yymsg_alloc < yysize) - { - *yymsg_alloc = 2 * yysize; - if (! (yysize <= *yymsg_alloc - && *yymsg_alloc <= YYSTACK_ALLOC_MAXIMUM)) - *yymsg_alloc = YYSTACK_ALLOC_MAXIMUM; - return 1; - } - - /* Avoid sprintf, as that infringes on the user's name space. - Don't have undefined behavior even if the translation - produced a string with the wrong number of "%s"s. */ - { - char *yyp = *yymsg; - int yyi = 0; - while ((*yyp = *yyformat) != '\0') - if (*yyp == '%' && yyformat[1] == 's' && yyi < yycount) - { - yyp += yytnamerr (yyp, yyarg[yyi++]); - yyformat += 2; - } - else - { - yyp++; - yyformat++; - } - } - return 0; -} -#endif /* YYERROR_VERBOSE */ - -/*-----------------------------------------------. -| Release the memory associated to this symbol. | -`-----------------------------------------------*/ - -static void -yydestruct (const char *yymsg, int yytype, YYSTYPE *yyvaluep, glslang::TParseContext* pParseContext) -{ - YYUSE (yyvaluep); - YYUSE (pParseContext); - if (!yymsg) - yymsg = "Deleting"; - YY_SYMBOL_PRINT (yymsg, yytype, yyvaluep, yylocationp); - - YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN - YYUSE (yytype); - YY_IGNORE_MAYBE_UNINITIALIZED_END -} - - - - -/*----------. -| yyparse. | -`----------*/ - -int -yyparse (glslang::TParseContext* pParseContext) -{ -/* The lookahead symbol. */ -int yychar; - - -/* The semantic value of the lookahead symbol. */ -/* Default value used for initialization, for pacifying older GCCs - or non-GCC compilers. */ -YY_INITIAL_VALUE (static YYSTYPE yyval_default;) -YYSTYPE yylval YY_INITIAL_VALUE (= yyval_default); - - /* Number of syntax errors so far. */ - int yynerrs; - - int yystate; - /* Number of tokens to shift before error messages enabled. */ - int yyerrstatus; - - /* The stacks and their tools: - 'yyss': related to states. - 'yyvs': related to semantic values. - - Refer to the stacks through separate pointers, to allow yyoverflow - to reallocate them elsewhere. */ - - /* The state stack. */ - yytype_int16 yyssa[YYINITDEPTH]; - yytype_int16 *yyss; - yytype_int16 *yyssp; - - /* The semantic value stack. */ - YYSTYPE yyvsa[YYINITDEPTH]; - YYSTYPE *yyvs; - YYSTYPE *yyvsp; - - YYSIZE_T yystacksize; - - int yyn; - int yyresult; - /* Lookahead token as an internal (translated) token number. */ - int yytoken = 0; - /* The variables used to return semantic value and location from the - action routines. */ - YYSTYPE yyval; - -#if YYERROR_VERBOSE - /* Buffer for error messages, and its allocated size. */ - char yymsgbuf[128]; - char *yymsg = yymsgbuf; - YYSIZE_T yymsg_alloc = sizeof yymsgbuf; -#endif - -#define YYPOPSTACK(N) (yyvsp -= (N), yyssp -= (N)) - - /* The number of symbols on the RHS of the reduced rule. - Keep to zero when no symbol should be popped. */ - int yylen = 0; - - yyssp = yyss = yyssa; - yyvsp = yyvs = yyvsa; - yystacksize = YYINITDEPTH; - - YYDPRINTF ((stderr, "Starting parse\n")); - - yystate = 0; - yyerrstatus = 0; - yynerrs = 0; - yychar = YYEMPTY; /* Cause a token to be read. */ - goto yysetstate; - -/*------------------------------------------------------------. -| yynewstate -- Push a new state, which is found in yystate. | -`------------------------------------------------------------*/ - yynewstate: - /* In all cases, when you get here, the value and location stacks - have just been pushed. So pushing a state here evens the stacks. */ - yyssp++; - - yysetstate: - *yyssp = yystate; - - if (yyss + yystacksize - 1 <= yyssp) - { - /* Get the current used size of the three stacks, in elements. */ - YYSIZE_T yysize = yyssp - yyss + 1; - -#ifdef yyoverflow - { - /* Give user a chance to reallocate the stack. Use copies of - these so that the &'s don't force the real ones into - memory. */ - YYSTYPE *yyvs1 = yyvs; - yytype_int16 *yyss1 = yyss; - - /* Each stack pointer address is followed by the size of the - data in use in that stack, in bytes. This used to be a - conditional around just the two extra args, but that might - be undefined if yyoverflow is a macro. */ - yyoverflow (YY_("memory exhausted"), - &yyss1, yysize * sizeof (*yyssp), - &yyvs1, yysize * sizeof (*yyvsp), - &yystacksize); - - yyss = yyss1; - yyvs = yyvs1; - } -#else /* no yyoverflow */ -# ifndef YYSTACK_RELOCATE - goto yyexhaustedlab; -# else - /* Extend the stack our own way. */ - if (YYMAXDEPTH <= yystacksize) - goto yyexhaustedlab; - yystacksize *= 2; - if (YYMAXDEPTH < yystacksize) - yystacksize = YYMAXDEPTH; - - { - yytype_int16 *yyss1 = yyss; - union yyalloc *yyptr = - (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize)); - if (! yyptr) - goto yyexhaustedlab; - YYSTACK_RELOCATE (yyss_alloc, yyss); - YYSTACK_RELOCATE (yyvs_alloc, yyvs); -# undef YYSTACK_RELOCATE - if (yyss1 != yyssa) - YYSTACK_FREE (yyss1); - } -# endif -#endif /* no yyoverflow */ - - yyssp = yyss + yysize - 1; - yyvsp = yyvs + yysize - 1; - - YYDPRINTF ((stderr, "Stack size increased to %lu\n", - (unsigned long int) yystacksize)); - - if (yyss + yystacksize - 1 <= yyssp) - YYABORT; - } - - YYDPRINTF ((stderr, "Entering state %d\n", yystate)); - - if (yystate == YYFINAL) - YYACCEPT; - - goto yybackup; - -/*-----------. -| yybackup. | -`-----------*/ -yybackup: - - /* Do appropriate processing given the current state. Read a - lookahead token if we need one and don't already have one. */ - - /* First try to decide what to do without reference to lookahead token. */ - yyn = yypact[yystate]; - if (yypact_value_is_default (yyn)) - goto yydefault; - - /* Not known => get a lookahead token if don't already have one. */ - - /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */ - if (yychar == YYEMPTY) - { - YYDPRINTF ((stderr, "Reading a token: ")); - yychar = yylex (&yylval, parseContext); - } - - if (yychar <= YYEOF) - { - yychar = yytoken = YYEOF; - YYDPRINTF ((stderr, "Now at end of input.\n")); - } - else - { - yytoken = YYTRANSLATE (yychar); - YY_SYMBOL_PRINT ("Next token is", yytoken, &yylval, &yylloc); - } - - /* If the proper action on seeing token YYTOKEN is to reduce or to - detect an error, take that action. */ - yyn += yytoken; - if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken) - goto yydefault; - yyn = yytable[yyn]; - if (yyn <= 0) - { - if (yytable_value_is_error (yyn)) - goto yyerrlab; - yyn = -yyn; - goto yyreduce; - } - - /* Count tokens shifted since error; after three, turn off error - status. */ - if (yyerrstatus) - yyerrstatus--; - - /* Shift the lookahead token. */ - YY_SYMBOL_PRINT ("Shifting", yytoken, &yylval, &yylloc); - - /* Discard the shifted token. */ - yychar = YYEMPTY; - - yystate = yyn; - YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN - *++yyvsp = yylval; - YY_IGNORE_MAYBE_UNINITIALIZED_END - - goto yynewstate; - - -/*-----------------------------------------------------------. -| yydefault -- do the default action for the current state. | -`-----------------------------------------------------------*/ -yydefault: - yyn = yydefact[yystate]; - if (yyn == 0) - goto yyerrlab; - goto yyreduce; - - -/*-----------------------------. -| yyreduce -- Do a reduction. | -`-----------------------------*/ -yyreduce: - /* yyn is the number of a rule to reduce with. */ - yylen = yyr2[yyn]; - - /* If YYLEN is nonzero, implement the default value of the action: - '$$ = $1'. - - Otherwise, the following line sets YYVAL to garbage. - This behavior is undocumented and Bison - users should not rely upon it. Assigning to YYVAL - unconditionally makes the parser a bit smaller, and it avoids a - GCC warning that YYVAL may be used uninitialized. */ - yyval = yyvsp[1-yylen]; - - - YY_REDUCE_PRINT (yyn); - switch (yyn) - { - case 2: -#line 253 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleVariable((yyvsp[0].lex).loc, (yyvsp[0].lex).symbol, (yyvsp[0].lex).string); - } -#line 3365 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 3: -#line 259 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } -#line 3373 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 4: -#line 262 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true); - } -#line 3381 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 5: -#line 265 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned literal"); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u, (yyvsp[0].lex).loc, true); - } -#line 3390 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 6: -#line 269 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer literal"); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i64, (yyvsp[0].lex).loc, true); - } -#line 3399 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 7: -#line 273 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer literal"); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u64, (yyvsp[0].lex).loc, true); - } -#line 3408 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 8: -#line 277 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.int16Check((yyvsp[0].lex).loc, "16-bit integer literal"); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((short)(yyvsp[0].lex).i, (yyvsp[0].lex).loc, true); -#endif - } -#line 3419 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 9: -#line 283 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.int16Check((yyvsp[0].lex).loc, "16-bit unsigned integer literal"); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((unsigned short)(yyvsp[0].lex).u, (yyvsp[0].lex).loc, true); -#endif - } -#line 3430 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 10: -#line 289 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat, (yyvsp[0].lex).loc, true); - } -#line 3438 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 11: -#line 292 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double literal"); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtDouble, (yyvsp[0].lex).loc, true); - } -#line 3447 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 12: -#line 296 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float literal"); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat16, (yyvsp[0].lex).loc, true); -#endif - } -#line 3458 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 13: -#line 302 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).b, (yyvsp[0].lex).loc, true); - } -#line 3466 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 14: -#line 305 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = (yyvsp[-1].interm.intermTypedNode); - if ((yyval.interm.intermTypedNode)->getAsConstantUnion()) - (yyval.interm.intermTypedNode)->getAsConstantUnion()->setExpression(); - } -#line 3476 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 15: -#line 313 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } -#line 3484 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 16: -#line 316 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBracketDereference((yyvsp[-2].lex).loc, (yyvsp[-3].interm.intermTypedNode), (yyvsp[-1].interm.intermTypedNode)); - } -#line 3492 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 17: -#line 319 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } -#line 3500 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 18: -#line 322 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleDotDereference((yyvsp[0].lex).loc, (yyvsp[-2].interm.intermTypedNode), *(yyvsp[0].lex).string); - } -#line 3508 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 19: -#line 325 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.variableCheck((yyvsp[-1].interm.intermTypedNode)); - parseContext.lValueErrorCheck((yyvsp[0].lex).loc, "++", (yyvsp[-1].interm.intermTypedNode)); - (yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[0].lex).loc, "++", EOpPostIncrement, (yyvsp[-1].interm.intermTypedNode)); - } -#line 3518 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 20: -#line 330 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.variableCheck((yyvsp[-1].interm.intermTypedNode)); - parseContext.lValueErrorCheck((yyvsp[0].lex).loc, "--", (yyvsp[-1].interm.intermTypedNode)); - (yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[0].lex).loc, "--", EOpPostDecrement, (yyvsp[-1].interm.intermTypedNode)); - } -#line 3528 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 21: -#line 338 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.integerCheck((yyvsp[0].interm.intermTypedNode), "[]"); - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } -#line 3537 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 22: -#line 345 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleFunctionCall((yyvsp[0].interm).loc, (yyvsp[0].interm).function, (yyvsp[0].interm).intermNode); - delete (yyvsp[0].interm).function; - } -#line 3546 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 23: -#line 352 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[0].interm); - } -#line 3554 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 24: -#line 358 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-1].interm); - (yyval.interm).loc = (yyvsp[0].lex).loc; - } -#line 3563 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 25: -#line 362 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-1].interm); - (yyval.interm).loc = (yyvsp[0].lex).loc; - } -#line 3572 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 26: -#line 369 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-1].interm); - } -#line 3580 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 27: -#line 372 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[0].interm); - } -#line 3588 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 28: -#line 378 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - TParameter param = { 0, new TType }; - param.type->shallowCopy((yyvsp[0].interm.intermTypedNode)->getType()); - (yyvsp[-1].interm).function->addParameter(param); - (yyval.interm).function = (yyvsp[-1].interm).function; - (yyval.interm).intermNode = (yyvsp[0].interm.intermTypedNode); - } -#line 3600 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 29: -#line 385 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - TParameter param = { 0, new TType }; - param.type->shallowCopy((yyvsp[0].interm.intermTypedNode)->getType()); - (yyvsp[-2].interm).function->addParameter(param); - (yyval.interm).function = (yyvsp[-2].interm).function; - (yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-2].interm).intermNode, (yyvsp[0].interm.intermTypedNode), (yyvsp[-1].lex).loc); - } -#line 3612 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 30: -#line 395 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-1].interm); - } -#line 3620 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 31: -#line 403 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // Constructor - (yyval.interm).intermNode = 0; - (yyval.interm).function = parseContext.handleConstructorCall((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type)); - } -#line 3630 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 32: -#line 408 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // - // Should be a method or subroutine call, but we haven't recognized the arguments yet. - // - (yyval.interm).function = 0; - (yyval.interm).intermNode = 0; - - TIntermMethod* method = (yyvsp[0].interm.intermTypedNode)->getAsMethodNode(); - if (method) { - (yyval.interm).function = new TFunction(&method->getMethodName(), TType(EbtInt), EOpArrayLength); - (yyval.interm).intermNode = method->getObject(); - } else { - TIntermSymbol* symbol = (yyvsp[0].interm.intermTypedNode)->getAsSymbolNode(); - if (symbol) { - parseContext.reservedErrorCheck(symbol->getLoc(), symbol->getName()); - TFunction *function = new TFunction(&symbol->getName(), TType(EbtVoid)); - (yyval.interm).function = function; - } else - parseContext.error((yyvsp[0].interm.intermTypedNode)->getLoc(), "function call, method, or subroutine call expected", "", ""); - } - - if ((yyval.interm).function == 0) { - // error recover - TString empty(""); - (yyval.interm).function = new TFunction(&empty, TType(EbtVoid), EOpNull); - } - } -#line 3662 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 33: -#line 438 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.variableCheck((yyvsp[0].interm.intermTypedNode)); - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - if (TIntermMethod* method = (yyvsp[0].interm.intermTypedNode)->getAsMethodNode()) - parseContext.error((yyvsp[0].interm.intermTypedNode)->getLoc(), "incomplete method syntax", method->getMethodName().c_str(), ""); - } -#line 3673 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 34: -#line 444 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.lValueErrorCheck((yyvsp[-1].lex).loc, "++", (yyvsp[0].interm.intermTypedNode)); - (yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[-1].lex).loc, "++", EOpPreIncrement, (yyvsp[0].interm.intermTypedNode)); - } -#line 3682 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 35: -#line 448 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.lValueErrorCheck((yyvsp[-1].lex).loc, "--", (yyvsp[0].interm.intermTypedNode)); - (yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[-1].lex).loc, "--", EOpPreDecrement, (yyvsp[0].interm.intermTypedNode)); - } -#line 3691 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 36: -#line 452 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if ((yyvsp[-1].interm).op != EOpNull) { - char errorOp[2] = {0, 0}; - switch((yyvsp[-1].interm).op) { - case EOpNegative: errorOp[0] = '-'; break; - case EOpLogicalNot: errorOp[0] = '!'; break; - case EOpBitwiseNot: errorOp[0] = '~'; break; - default: break; // some compilers want this - } - (yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[-1].interm).loc, errorOp, (yyvsp[-1].interm).op, (yyvsp[0].interm.intermTypedNode)); - } else { - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - if ((yyval.interm.intermTypedNode)->getAsConstantUnion()) - (yyval.interm.intermTypedNode)->getAsConstantUnion()->setExpression(); - } - } -#line 3712 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 37: -#line 472 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpNull; } -#line 3718 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 38: -#line 473 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpNegative; } -#line 3724 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 39: -#line 474 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpLogicalNot; } -#line 3730 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 40: -#line 475 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpBitwiseNot; - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise not"); } -#line 3737 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 41: -#line 481 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3743 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 42: -#line 482 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "*", EOpMul, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 3753 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 43: -#line 487 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "/", EOpDiv, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 3763 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 44: -#line 492 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "%"); - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "%", EOpMod, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 3774 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 45: -#line 501 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3780 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 46: -#line 502 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "+", EOpAdd, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 3790 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 47: -#line 507 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "-", EOpSub, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 3800 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 48: -#line 515 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3806 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 49: -#line 516 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bit shift left"); - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "<<", EOpLeftShift, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 3817 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 50: -#line 522 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bit shift right"); - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, ">>", EOpRightShift, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 3828 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 51: -#line 531 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3834 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 52: -#line 532 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "<", EOpLessThan, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); - } -#line 3844 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 53: -#line 537 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, ">", EOpGreaterThan, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); - } -#line 3854 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 54: -#line 542 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "<=", EOpLessThanEqual, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); - } -#line 3864 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 55: -#line 547 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, ">=", EOpGreaterThanEqual, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); - } -#line 3874 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 56: -#line 555 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3880 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 57: -#line 556 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.arrayObjectCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "array comparison"); - parseContext.opaqueCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "=="); - parseContext.specializationCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "=="); - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "==", EOpEqual, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); - } -#line 3893 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 58: -#line 564 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.arrayObjectCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "array comparison"); - parseContext.opaqueCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "!="); - parseContext.specializationCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "!="); - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "!=", EOpNotEqual, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); - } -#line 3906 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 59: -#line 575 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3912 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 60: -#line 576 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bitwise and"); - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "&", EOpAnd, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 3923 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 61: -#line 585 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3929 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 62: -#line 586 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bitwise exclusive or"); - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "^", EOpExclusiveOr, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 3940 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 63: -#line 595 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3946 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 64: -#line 596 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bitwise inclusive or"); - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "|", EOpInclusiveOr, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 3957 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 65: -#line 605 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3963 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 66: -#line 606 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "&&", EOpLogicalAnd, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); - } -#line 3973 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 67: -#line 614 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3979 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 68: -#line 615 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "^^", EOpLogicalXor, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); - } -#line 3989 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 69: -#line 623 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 3995 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 70: -#line 624 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "||", EOpLogicalOr, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - if ((yyval.interm.intermTypedNode) == 0) - (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc); - } -#line 4005 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 71: -#line 632 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4011 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 72: -#line 633 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - ++parseContext.controlFlowNestingLevel; - } -#line 4019 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 73: -#line 636 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - --parseContext.controlFlowNestingLevel; - parseContext.boolCheck((yyvsp[-4].lex).loc, (yyvsp[-5].interm.intermTypedNode)); - parseContext.rValueErrorCheck((yyvsp[-4].lex).loc, "?", (yyvsp[-5].interm.intermTypedNode)); - parseContext.rValueErrorCheck((yyvsp[-1].lex).loc, ":", (yyvsp[-2].interm.intermTypedNode)); - parseContext.rValueErrorCheck((yyvsp[-1].lex).loc, ":", (yyvsp[0].interm.intermTypedNode)); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addSelection((yyvsp[-5].interm.intermTypedNode), (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode), (yyvsp[-4].lex).loc); - if ((yyval.interm.intermTypedNode) == 0) { - parseContext.binaryOpError((yyvsp[-4].lex).loc, ":", (yyvsp[-2].interm.intermTypedNode)->getCompleteString(), (yyvsp[0].interm.intermTypedNode)->getCompleteString()); - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } - } -#line 4036 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 74: -#line 651 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); } -#line 4042 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 75: -#line 652 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.arrayObjectCheck((yyvsp[-1].interm).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "array assignment"); - parseContext.opaqueCheck((yyvsp[-1].interm).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "="); - parseContext.specializationCheck((yyvsp[-1].interm).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "="); - parseContext.lValueErrorCheck((yyvsp[-1].interm).loc, "assign", (yyvsp[-2].interm.intermTypedNode)); - parseContext.rValueErrorCheck((yyvsp[-1].interm).loc, "assign", (yyvsp[0].interm.intermTypedNode)); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addAssign((yyvsp[-1].interm).op, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode), (yyvsp[-1].interm).loc); - if ((yyval.interm.intermTypedNode) == 0) { - parseContext.assignError((yyvsp[-1].interm).loc, "assign", (yyvsp[-2].interm.intermTypedNode)->getCompleteString(), (yyvsp[0].interm.intermTypedNode)->getCompleteString()); - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } - } -#line 4059 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 76: -#line 667 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).loc = (yyvsp[0].lex).loc; - (yyval.interm).op = EOpAssign; - } -#line 4068 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 77: -#line 671 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).loc = (yyvsp[0].lex).loc; - (yyval.interm).op = EOpMulAssign; - } -#line 4077 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 78: -#line 675 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).loc = (yyvsp[0].lex).loc; - (yyval.interm).op = EOpDivAssign; - } -#line 4086 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 79: -#line 679 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "%="); - (yyval.interm).loc = (yyvsp[0].lex).loc; - (yyval.interm).op = EOpModAssign; - } -#line 4096 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 80: -#line 684 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).loc = (yyvsp[0].lex).loc; - (yyval.interm).op = EOpAddAssign; - } -#line 4105 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 81: -#line 688 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).loc = (yyvsp[0].lex).loc; - (yyval.interm).op = EOpSubAssign; - } -#line 4114 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 82: -#line 692 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bit-shift left assign"); - (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpLeftShiftAssign; - } -#line 4123 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 83: -#line 696 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bit-shift right assign"); - (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpRightShiftAssign; - } -#line 4132 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 84: -#line 700 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-and assign"); - (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpAndAssign; - } -#line 4141 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 85: -#line 704 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-xor assign"); - (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpExclusiveOrAssign; - } -#line 4150 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 86: -#line 708 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-or assign"); - (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpInclusiveOrAssign; - } -#line 4159 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 87: -#line 715 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } -#line 4167 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 88: -#line 718 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.samplerConstructorLocationCheck((yyvsp[-1].lex).loc, ",", (yyvsp[0].interm.intermTypedNode)); - (yyval.interm.intermTypedNode) = parseContext.intermediate.addComma((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode), (yyvsp[-1].lex).loc); - if ((yyval.interm.intermTypedNode) == 0) { - parseContext.binaryOpError((yyvsp[-1].lex).loc, ",", (yyvsp[-2].interm.intermTypedNode)->getCompleteString(), (yyvsp[0].interm.intermTypedNode)->getCompleteString()); - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } - } -#line 4180 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 89: -#line 729 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.constantValueCheck((yyvsp[0].interm.intermTypedNode), ""); - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } -#line 4189 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 90: -#line 736 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.handleFunctionDeclarator((yyvsp[-1].interm).loc, *(yyvsp[-1].interm).function, true /* prototype */); - (yyval.interm.intermNode) = 0; - // TODO: 4.0 functionality: subroutines: make the identifier a user type for this signature - } -#line 4199 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 91: -#line 741 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if ((yyvsp[-1].interm).intermNode && (yyvsp[-1].interm).intermNode->getAsAggregate()) - (yyvsp[-1].interm).intermNode->getAsAggregate()->setOperator(EOpSequence); - (yyval.interm.intermNode) = (yyvsp[-1].interm).intermNode; - } -#line 4209 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 92: -#line 746 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.profileRequires((yyvsp[-3].lex).loc, ENoProfile, 130, 0, "precision statement"); - - // lazy setting of the previous scope's defaults, has effect only the first time it is called in a particular scope - parseContext.symbolTable.setPreviousDefaultPrecisions(&parseContext.defaultPrecision[0]); - parseContext.setDefaultPrecision((yyvsp[-3].lex).loc, (yyvsp[-1].interm.type), (yyvsp[-2].interm.type).qualifier.precision); - (yyval.interm.intermNode) = 0; - } -#line 4222 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 93: -#line 754 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.declareBlock((yyvsp[-1].interm).loc, *(yyvsp[-1].interm).typeList); - (yyval.interm.intermNode) = 0; - } -#line 4231 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 94: -#line 758 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.declareBlock((yyvsp[-2].interm).loc, *(yyvsp[-2].interm).typeList, (yyvsp[-1].lex).string); - (yyval.interm.intermNode) = 0; - } -#line 4240 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 95: -#line 762 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.declareBlock((yyvsp[-3].interm).loc, *(yyvsp[-3].interm).typeList, (yyvsp[-2].lex).string, (yyvsp[-1].interm).arraySizes); - (yyval.interm.intermNode) = 0; - } -#line 4249 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 96: -#line 766 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalQualifierFixCheck((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier); - parseContext.updateStandaloneQualifierDefaults((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type)); - (yyval.interm.intermNode) = 0; - } -#line 4259 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 97: -#line 771 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.checkNoShaderLayouts((yyvsp[-2].interm.type).loc, (yyvsp[-2].interm.type).shaderQualifiers); - parseContext.addQualifierToExisting((yyvsp[-2].interm.type).loc, (yyvsp[-2].interm.type).qualifier, *(yyvsp[-1].lex).string); - (yyval.interm.intermNode) = 0; - } -#line 4269 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 98: -#line 776 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.checkNoShaderLayouts((yyvsp[-3].interm.type).loc, (yyvsp[-3].interm.type).shaderQualifiers); - (yyvsp[-1].interm.identifierList)->push_back((yyvsp[-2].lex).string); - parseContext.addQualifierToExisting((yyvsp[-3].interm.type).loc, (yyvsp[-3].interm.type).qualifier, *(yyvsp[-1].interm.identifierList)); - (yyval.interm.intermNode) = 0; - } -#line 4280 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 99: -#line 785 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { parseContext.nestedBlockCheck((yyvsp[-2].interm.type).loc); } -#line 4286 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 100: -#line 785 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - --parseContext.structNestingLevel; - parseContext.blockName = (yyvsp[-4].lex).string; - parseContext.globalQualifierFixCheck((yyvsp[-5].interm.type).loc, (yyvsp[-5].interm.type).qualifier); - parseContext.checkNoShaderLayouts((yyvsp[-5].interm.type).loc, (yyvsp[-5].interm.type).shaderQualifiers); - parseContext.currentBlockQualifier = (yyvsp[-5].interm.type).qualifier; - (yyval.interm).loc = (yyvsp[-5].interm.type).loc; - (yyval.interm).typeList = (yyvsp[-1].interm.typeList); - } -#line 4300 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 101: -#line 796 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.identifierList) = new TIdentifierList; - (yyval.interm.identifierList)->push_back((yyvsp[0].lex).string); - } -#line 4309 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 102: -#line 800 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.identifierList) = (yyvsp[-2].interm.identifierList); - (yyval.interm.identifierList)->push_back((yyvsp[0].lex).string); - } -#line 4318 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 103: -#line 807 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).function = (yyvsp[-1].interm.function); - (yyval.interm).loc = (yyvsp[0].lex).loc; - } -#line 4327 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 104: -#line 814 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.function) = (yyvsp[0].interm.function); - } -#line 4335 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 105: -#line 817 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.function) = (yyvsp[0].interm.function); - } -#line 4343 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 106: -#line 824 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // Add the parameter - (yyval.interm.function) = (yyvsp[-1].interm.function); - if ((yyvsp[0].interm).param.type->getBasicType() != EbtVoid) - (yyvsp[-1].interm.function)->addParameter((yyvsp[0].interm).param); - else - delete (yyvsp[0].interm).param.type; - } -#line 4356 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 107: -#line 832 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // - // Only first parameter of one-parameter functions can be void - // The check for named parameters not being void is done in parameter_declarator - // - if ((yyvsp[0].interm).param.type->getBasicType() == EbtVoid) { - // - // This parameter > first is void - // - parseContext.error((yyvsp[-1].lex).loc, "cannot be an argument type except for '(void)'", "void", ""); - delete (yyvsp[0].interm).param.type; - } else { - // Add the parameter - (yyval.interm.function) = (yyvsp[-2].interm.function); - (yyvsp[-2].interm.function)->addParameter((yyvsp[0].interm).param); - } - } -#line 4378 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 108: -#line 852 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if ((yyvsp[-2].interm.type).qualifier.storage != EvqGlobal && (yyvsp[-2].interm.type).qualifier.storage != EvqTemporary) { - parseContext.error((yyvsp[-1].lex).loc, "no qualifiers allowed for function return", - GetStorageQualifierString((yyvsp[-2].interm.type).qualifier.storage), ""); - } - if ((yyvsp[-2].interm.type).arraySizes) - parseContext.arraySizeRequiredCheck((yyvsp[-2].interm.type).loc, *(yyvsp[-2].interm.type).arraySizes); - - // Add the function as a prototype after parsing it (we do not support recursion) - TFunction *function; - TType type((yyvsp[-2].interm.type)); - function = new TFunction((yyvsp[-1].lex).string, type); - (yyval.interm.function) = function; - } -#line 4397 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 109: -#line 870 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if ((yyvsp[-1].interm.type).arraySizes) { - parseContext.profileRequires((yyvsp[-1].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires((yyvsp[-1].interm.type).loc, EEsProfile, 300, 0, "arrayed type"); - parseContext.arraySizeRequiredCheck((yyvsp[-1].interm.type).loc, *(yyvsp[-1].interm.type).arraySizes); - } - if ((yyvsp[-1].interm.type).basicType == EbtVoid) { - parseContext.error((yyvsp[0].lex).loc, "illegal use of type 'void'", (yyvsp[0].lex).string->c_str(), ""); - } - parseContext.reservedErrorCheck((yyvsp[0].lex).loc, *(yyvsp[0].lex).string); - - TParameter param = {(yyvsp[0].lex).string, new TType((yyvsp[-1].interm.type))}; - (yyval.interm).loc = (yyvsp[0].lex).loc; - (yyval.interm).param = param; - } -#line 4417 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 110: -#line 885 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if ((yyvsp[-2].interm.type).arraySizes) { - parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires((yyvsp[-2].interm.type).loc, EEsProfile, 300, 0, "arrayed type"); - parseContext.arraySizeRequiredCheck((yyvsp[-2].interm.type).loc, *(yyvsp[-2].interm.type).arraySizes); - } - parseContext.arrayDimCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.type).arraySizes, (yyvsp[0].interm).arraySizes); - - parseContext.arraySizeRequiredCheck((yyvsp[0].interm).loc, *(yyvsp[0].interm).arraySizes); - parseContext.reservedErrorCheck((yyvsp[-1].lex).loc, *(yyvsp[-1].lex).string); - - (yyvsp[-2].interm.type).arraySizes = (yyvsp[0].interm).arraySizes; - - TParameter param = { (yyvsp[-1].lex).string, new TType((yyvsp[-2].interm.type))}; - (yyval.interm).loc = (yyvsp[-1].lex).loc; - (yyval.interm).param = param; - } -#line 4439 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 111: -#line 908 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[0].interm); - if ((yyvsp[-1].interm.type).qualifier.precision != EpqNone) - (yyval.interm).param.type->getQualifier().precision = (yyvsp[-1].interm.type).qualifier.precision; - parseContext.precisionQualifierCheck((yyval.interm).loc, (yyval.interm).param.type->getBasicType(), (yyval.interm).param.type->getQualifier()); - - parseContext.checkNoShaderLayouts((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).shaderQualifiers); - parseContext.parameterTypeCheck((yyvsp[0].interm).loc, (yyvsp[-1].interm.type).qualifier.storage, *(yyval.interm).param.type); - parseContext.paramCheckFix((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, *(yyval.interm).param.type); - - } -#line 4455 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 112: -#line 919 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[0].interm); - - parseContext.parameterTypeCheck((yyvsp[0].interm).loc, EvqIn, *(yyvsp[0].interm).param.type); - parseContext.paramCheckFix((yyvsp[0].interm).loc, EvqTemporary, *(yyval.interm).param.type); - parseContext.precisionQualifierCheck((yyval.interm).loc, (yyval.interm).param.type->getBasicType(), (yyval.interm).param.type->getQualifier()); - } -#line 4467 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 113: -#line 929 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[0].interm); - if ((yyvsp[-1].interm.type).qualifier.precision != EpqNone) - (yyval.interm).param.type->getQualifier().precision = (yyvsp[-1].interm.type).qualifier.precision; - parseContext.precisionQualifierCheck((yyvsp[-1].interm.type).loc, (yyval.interm).param.type->getBasicType(), (yyval.interm).param.type->getQualifier()); - - parseContext.checkNoShaderLayouts((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).shaderQualifiers); - parseContext.parameterTypeCheck((yyvsp[0].interm).loc, (yyvsp[-1].interm.type).qualifier.storage, *(yyval.interm).param.type); - parseContext.paramCheckFix((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, *(yyval.interm).param.type); - } -#line 4482 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 114: -#line 939 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[0].interm); - - parseContext.parameterTypeCheck((yyvsp[0].interm).loc, EvqIn, *(yyvsp[0].interm).param.type); - parseContext.paramCheckFix((yyvsp[0].interm).loc, EvqTemporary, *(yyval.interm).param.type); - parseContext.precisionQualifierCheck((yyval.interm).loc, (yyval.interm).param.type->getBasicType(), (yyval.interm).param.type->getQualifier()); - } -#line 4494 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 115: -#line 949 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - TParameter param = { 0, new TType((yyvsp[0].interm.type)) }; - (yyval.interm).param = param; - if ((yyvsp[0].interm.type).arraySizes) - parseContext.arraySizeRequiredCheck((yyvsp[0].interm.type).loc, *(yyvsp[0].interm.type).arraySizes); - } -#line 4505 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 116: -#line 958 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[0].interm); - } -#line 4513 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 117: -#line 961 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-2].interm); - parseContext.declareVariable((yyvsp[0].lex).loc, *(yyvsp[0].lex).string, (yyvsp[-2].interm).type); - } -#line 4522 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 118: -#line 965 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-3].interm); - parseContext.declareVariable((yyvsp[-1].lex).loc, *(yyvsp[-1].lex).string, (yyvsp[-3].interm).type, (yyvsp[0].interm).arraySizes); - } -#line 4531 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 119: -#line 969 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).type = (yyvsp[-5].interm).type; - TIntermNode* initNode = parseContext.declareVariable((yyvsp[-3].lex).loc, *(yyvsp[-3].lex).string, (yyvsp[-5].interm).type, (yyvsp[-2].interm).arraySizes, (yyvsp[0].interm.intermTypedNode)); - (yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-5].interm).intermNode, initNode, (yyvsp[-1].lex).loc); - } -#line 4541 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 120: -#line 974 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).type = (yyvsp[-4].interm).type; - TIntermNode* initNode = parseContext.declareVariable((yyvsp[-2].lex).loc, *(yyvsp[-2].lex).string, (yyvsp[-4].interm).type, 0, (yyvsp[0].interm.intermTypedNode)); - (yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-4].interm).intermNode, initNode, (yyvsp[-1].lex).loc); - } -#line 4551 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 121: -#line 982 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).type = (yyvsp[0].interm.type); - (yyval.interm).intermNode = 0; - parseContext.declareTypeDefaults((yyval.interm).loc, (yyval.interm).type); - } -#line 4561 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 122: -#line 987 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).type = (yyvsp[-1].interm.type); - (yyval.interm).intermNode = 0; - parseContext.declareVariable((yyvsp[0].lex).loc, *(yyvsp[0].lex).string, (yyvsp[-1].interm.type)); - } -#line 4571 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 123: -#line 992 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).type = (yyvsp[-2].interm.type); - (yyval.interm).intermNode = 0; - parseContext.declareVariable((yyvsp[-1].lex).loc, *(yyvsp[-1].lex).string, (yyvsp[-2].interm.type), (yyvsp[0].interm).arraySizes); - } -#line 4581 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 124: -#line 997 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).type = (yyvsp[-4].interm.type); - TIntermNode* initNode = parseContext.declareVariable((yyvsp[-3].lex).loc, *(yyvsp[-3].lex).string, (yyvsp[-4].interm.type), (yyvsp[-2].interm).arraySizes, (yyvsp[0].interm.intermTypedNode)); - (yyval.interm).intermNode = parseContext.intermediate.growAggregate(0, initNode, (yyvsp[-1].lex).loc); - } -#line 4591 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 125: -#line 1002 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).type = (yyvsp[-3].interm.type); - TIntermNode* initNode = parseContext.declareVariable((yyvsp[-2].lex).loc, *(yyvsp[-2].lex).string, (yyvsp[-3].interm.type), 0, (yyvsp[0].interm.intermTypedNode)); - (yyval.interm).intermNode = parseContext.intermediate.growAggregate(0, initNode, (yyvsp[-1].lex).loc); - } -#line 4601 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 126: -#line 1011 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[0].interm.type); - - parseContext.globalQualifierTypeCheck((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type).qualifier, (yyval.interm.type)); - if ((yyvsp[0].interm.type).arraySizes) { - parseContext.profileRequires((yyvsp[0].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires((yyvsp[0].interm.type).loc, EEsProfile, 300, 0, "arrayed type"); - } - - parseContext.precisionQualifierCheck((yyval.interm.type).loc, (yyval.interm.type).basicType, (yyval.interm.type).qualifier); - } -#line 4617 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 127: -#line 1022 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalQualifierFixCheck((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier); - parseContext.globalQualifierTypeCheck((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, (yyvsp[0].interm.type)); - - if ((yyvsp[0].interm.type).arraySizes) { - parseContext.profileRequires((yyvsp[0].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires((yyvsp[0].interm.type).loc, EEsProfile, 300, 0, "arrayed type"); - } - - if ((yyvsp[0].interm.type).arraySizes && parseContext.arrayQualifierError((yyvsp[0].interm.type).loc, (yyvsp[-1].interm.type).qualifier)) - (yyvsp[0].interm.type).arraySizes = 0; - - parseContext.checkNoShaderLayouts((yyvsp[0].interm.type).loc, (yyvsp[-1].interm.type).shaderQualifiers); - (yyvsp[0].interm.type).shaderQualifiers.merge((yyvsp[-1].interm.type).shaderQualifiers); - parseContext.mergeQualifiers((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type).qualifier, (yyvsp[-1].interm.type).qualifier, true); - parseContext.precisionQualifierCheck((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type).basicType, (yyvsp[0].interm.type).qualifier); - - (yyval.interm.type) = (yyvsp[0].interm.type); - - if (! (yyval.interm.type).qualifier.isInterpolation() && - ((parseContext.language == EShLangVertex && (yyval.interm.type).qualifier.storage == EvqVaryingOut) || - (parseContext.language == EShLangFragment && (yyval.interm.type).qualifier.storage == EvqVaryingIn))) - (yyval.interm.type).qualifier.smooth = true; - } -#line 4646 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 128: -#line 1049 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "invariant"); - parseContext.profileRequires((yyval.interm.type).loc, ENoProfile, 120, 0, "invariant"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.invariant = true; - } -#line 4657 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 129: -#line 1058 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "smooth"); - parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "smooth"); - parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 300, 0, "smooth"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.smooth = true; - } -#line 4669 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 130: -#line 1065 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "flat"); - parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "flat"); - parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 300, 0, "flat"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.flat = true; - } -#line 4681 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 131: -#line 1072 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "noperspective"); - parseContext.requireProfile((yyvsp[0].lex).loc, ~EEsProfile, "noperspective"); - parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "noperspective"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.nopersp = true; - } -#line 4693 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 132: -#line 1079 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.globalCheck((yyvsp[0].lex).loc, "__explicitInterpAMD"); - parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 450, E_GL_AMD_shader_explicit_vertex_parameter, "explicit interpolation"); - parseContext.profileRequires((yyvsp[0].lex).loc, ECompatibilityProfile, 450, E_GL_AMD_shader_explicit_vertex_parameter, "explicit interpolation"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.explicitInterp = true; -#endif - } -#line 4707 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 133: -#line 1091 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[-1].interm.type); - } -#line 4715 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 134: -#line 1097 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 4723 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 135: -#line 1100 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[-2].interm.type); - (yyval.interm.type).shaderQualifiers.merge((yyvsp[0].interm.type).shaderQualifiers); - parseContext.mergeObjectLayoutQualifiers((yyval.interm.type).qualifier, (yyvsp[0].interm.type).qualifier, false); - } -#line 4733 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 136: -#line 1107 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc); - parseContext.setLayoutQualifier((yyvsp[0].lex).loc, (yyval.interm.type), *(yyvsp[0].lex).string); - } -#line 4742 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 137: -#line 1111 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[-2].lex).loc); - parseContext.setLayoutQualifier((yyvsp[-2].lex).loc, (yyval.interm.type), *(yyvsp[-2].lex).string, (yyvsp[0].interm.intermTypedNode)); - } -#line 4751 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 138: -#line 1115 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { // because "shared" is both an identifier and a keyword - (yyval.interm.type).init((yyvsp[0].lex).loc); - TString strShared("shared"); - parseContext.setLayoutQualifier((yyvsp[0].lex).loc, (yyval.interm.type), strShared); - } -#line 4761 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 139: -#line 1123 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.profileRequires((yyval.interm.type).loc, ECoreProfile | ECompatibilityProfile, 400, E_GL_ARB_gpu_shader5, "precise"); - parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 320, Num_AEP_gpu_shader5, AEP_gpu_shader5, "precise"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.noContraction = true; - } -#line 4772 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 140: -#line 1132 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 4780 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 141: -#line 1135 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[-1].interm.type); - if ((yyval.interm.type).basicType == EbtVoid) - (yyval.interm.type).basicType = (yyvsp[0].interm.type).basicType; - - (yyval.interm.type).shaderQualifiers.merge((yyvsp[0].interm.type).shaderQualifiers); - parseContext.mergeQualifiers((yyval.interm.type).loc, (yyval.interm.type).qualifier, (yyvsp[0].interm.type).qualifier, false); - } -#line 4793 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 142: -#line 1146 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 4801 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 143: -#line 1149 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 4809 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 144: -#line 1152 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.checkPrecisionQualifier((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type).qualifier.precision); - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 4818 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 145: -#line 1156 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // allow inheritance of storage qualifier from block declaration - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 4827 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 146: -#line 1160 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // allow inheritance of storage qualifier from block declaration - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 4836 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 147: -#line 1164 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // allow inheritance of storage qualifier from block declaration - (yyval.interm.type) = (yyvsp[0].interm.type); - } -#line 4845 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 148: -#line 1171 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqConst; // will later turn into EvqConstReadOnly, if the initializer is not constant - } -#line 4854 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 149: -#line 1175 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireStage((yyvsp[0].lex).loc, EShLangVertex, "attribute"); - parseContext.checkDeprecated((yyvsp[0].lex).loc, ECoreProfile, 130, "attribute"); - parseContext.checkDeprecated((yyvsp[0].lex).loc, ENoProfile, 130, "attribute"); - parseContext.requireNotRemoved((yyvsp[0].lex).loc, ECoreProfile, 420, "attribute"); - parseContext.requireNotRemoved((yyvsp[0].lex).loc, EEsProfile, 300, "attribute"); - - parseContext.globalCheck((yyvsp[0].lex).loc, "attribute"); - - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqVaryingIn; - } -#line 4871 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 150: -#line 1187 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.checkDeprecated((yyvsp[0].lex).loc, ENoProfile, 130, "varying"); - parseContext.checkDeprecated((yyvsp[0].lex).loc, ECoreProfile, 130, "varying"); - parseContext.requireNotRemoved((yyvsp[0].lex).loc, ECoreProfile, 420, "varying"); - parseContext.requireNotRemoved((yyvsp[0].lex).loc, EEsProfile, 300, "varying"); - - parseContext.globalCheck((yyvsp[0].lex).loc, "varying"); - - (yyval.interm.type).init((yyvsp[0].lex).loc); - if (parseContext.language == EShLangVertex) - (yyval.interm.type).qualifier.storage = EvqVaryingOut; - else - (yyval.interm.type).qualifier.storage = EvqVaryingIn; - } -#line 4890 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 151: -#line 1201 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "inout"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqInOut; - } -#line 4900 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 152: -#line 1206 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "in"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - // whether this is a parameter "in" or a pipeline "in" will get sorted out a bit later - (yyval.interm.type).qualifier.storage = EvqIn; - } -#line 4911 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 153: -#line 1212 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "out"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - // whether this is a parameter "out" or a pipeline "out" will get sorted out a bit later - (yyval.interm.type).qualifier.storage = EvqOut; - } -#line 4922 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 154: -#line 1218 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 120, 0, "centroid"); - parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 300, 0, "centroid"); - parseContext.globalCheck((yyvsp[0].lex).loc, "centroid"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.centroid = true; - } -#line 4934 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 155: -#line 1225 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "patch"); - parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangTessControlMask | EShLangTessEvaluationMask), "patch"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.patch = true; - } -#line 4945 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 156: -#line 1231 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "sample"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.sample = true; - } -#line 4955 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 157: -#line 1236 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "uniform"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqUniform; - } -#line 4965 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 158: -#line 1241 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "buffer"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqBuffer; - } -#line 4975 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 159: -#line 1246 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalCheck((yyvsp[0].lex).loc, "shared"); - parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_compute_shader, "shared"); - parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 310, 0, "shared"); - parseContext.requireStage((yyvsp[0].lex).loc, EShLangCompute, "shared"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.storage = EvqShared; - } -#line 4988 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 160: -#line 1254 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.coherent = true; - } -#line 4997 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 161: -#line 1258 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.volatil = true; - } -#line 5006 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 162: -#line 1262 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.restrict = true; - } -#line 5015 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 163: -#line 1266 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.readonly = true; - } -#line 5024 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 164: -#line 1270 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc); - (yyval.interm.type).qualifier.writeonly = true; - } -#line 5033 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 165: -#line 1274 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.spvRemoved((yyvsp[0].lex).loc, "subroutine"); - parseContext.globalCheck((yyvsp[0].lex).loc, "subroutine"); - parseContext.unimplemented((yyvsp[0].lex).loc, "subroutine"); - (yyval.interm.type).init((yyvsp[0].lex).loc); - } -#line 5044 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 166: -#line 1280 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.spvRemoved((yyvsp[-3].lex).loc, "subroutine"); - parseContext.globalCheck((yyvsp[-3].lex).loc, "subroutine"); - parseContext.unimplemented((yyvsp[-3].lex).loc, "subroutine"); - (yyval.interm.type).init((yyvsp[-3].lex).loc); - } -#line 5055 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 167: -#line 1289 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // TODO - } -#line 5063 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 168: -#line 1292 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // TODO: 4.0 semantics: subroutines - // 1) make sure each identifier is a type declared earlier with SUBROUTINE - // 2) save all of the identifiers for future comparison with the declared function - } -#line 5073 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 169: -#line 1300 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[0].interm.type); - (yyval.interm.type).qualifier.precision = parseContext.getDefaultPrecision((yyval.interm.type)); - } -#line 5082 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 170: -#line 1304 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.arrayDimCheck((yyvsp[0].interm).loc, (yyvsp[0].interm).arraySizes, 0); - (yyval.interm.type) = (yyvsp[-1].interm.type); - (yyval.interm.type).qualifier.precision = parseContext.getDefaultPrecision((yyval.interm.type)); - (yyval.interm.type).arraySizes = (yyvsp[0].interm).arraySizes; - } -#line 5093 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 171: -#line 1313 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).loc = (yyvsp[-1].lex).loc; - (yyval.interm).arraySizes = new TArraySizes; - (yyval.interm).arraySizes->addInnerSize(); - } -#line 5103 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 172: -#line 1318 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm).loc = (yyvsp[-2].lex).loc; - (yyval.interm).arraySizes = new TArraySizes; - - TArraySize size; - parseContext.arraySizeCheck((yyvsp[-1].interm.intermTypedNode)->getLoc(), (yyvsp[-1].interm.intermTypedNode), size); - (yyval.interm).arraySizes->addInnerSize(size); - } -#line 5116 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 173: -#line 1326 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-2].interm); - (yyval.interm).arraySizes->addInnerSize(); - } -#line 5125 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 174: -#line 1330 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm) = (yyvsp[-3].interm); - - TArraySize size; - parseContext.arraySizeCheck((yyvsp[-1].interm.intermTypedNode)->getLoc(), (yyvsp[-1].interm.intermTypedNode), size); - (yyval.interm).arraySizes->addInnerSize(size); - } -#line 5137 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 175: -#line 1340 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtVoid; - } -#line 5146 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 176: -#line 1344 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - } -#line 5155 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 177: -#line 1348 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - } -#line 5165 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 178: -#line 1353 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; -#endif - } -#line 5177 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 179: -#line 1360 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt; - } -#line 5186 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 180: -#line 1364 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint; - } -#line 5196 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 181: -#line 1369 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt64; - } -#line 5206 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 182: -#line 1374 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint64; - } -#line 5216 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 183: -#line 1379 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.int16Check((yyvsp[0].lex).loc, "16-bit integer", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt16; -#endif - } -#line 5228 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 184: -#line 1386 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.int16Check((yyvsp[0].lex).loc, "16-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint16; -#endif - } -#line 5240 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 185: -#line 1393 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtBool; - } -#line 5249 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 186: -#line 1397 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setVector(2); - } -#line 5259 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 187: -#line 1402 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setVector(3); - } -#line 5269 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 188: -#line 1407 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setVector(4); - } -#line 5279 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 189: -#line 1412 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setVector(2); - } -#line 5290 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 190: -#line 1418 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setVector(3); - } -#line 5301 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 191: -#line 1424 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setVector(4); - } -#line 5312 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 192: -#line 1430 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setVector(2); -#endif - } -#line 5325 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 193: -#line 1438 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setVector(3); -#endif - } -#line 5338 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 194: -#line 1446 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setVector(4); -#endif - } -#line 5351 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 195: -#line 1454 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtBool; - (yyval.interm.type).setVector(2); - } -#line 5361 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 196: -#line 1459 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtBool; - (yyval.interm.type).setVector(3); - } -#line 5371 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 197: -#line 1464 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtBool; - (yyval.interm.type).setVector(4); - } -#line 5381 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 198: -#line 1469 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt; - (yyval.interm.type).setVector(2); - } -#line 5391 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 199: -#line 1474 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt; - (yyval.interm.type).setVector(3); - } -#line 5401 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 200: -#line 1479 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt; - (yyval.interm.type).setVector(4); - } -#line 5411 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 201: -#line 1484 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt64; - (yyval.interm.type).setVector(2); - } -#line 5422 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 202: -#line 1490 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt64; - (yyval.interm.type).setVector(3); - } -#line 5433 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 203: -#line 1496 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt64; - (yyval.interm.type).setVector(4); - } -#line 5444 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 204: -#line 1502 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.int16Check((yyvsp[0].lex).loc, "16-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt16; - (yyval.interm.type).setVector(2); -#endif - } -#line 5457 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 205: -#line 1510 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.int16Check((yyvsp[0].lex).loc, "16-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt16; - (yyval.interm.type).setVector(3); -#endif - } -#line 5470 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 206: -#line 1518 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.int16Check((yyvsp[0].lex).loc, "16-bit integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtInt16; - (yyval.interm.type).setVector(4); -#endif - } -#line 5483 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 207: -#line 1526 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint; - (yyval.interm.type).setVector(2); - } -#line 5494 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 208: -#line 1532 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint; - (yyval.interm.type).setVector(3); - } -#line 5505 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 209: -#line 1538 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint; - (yyval.interm.type).setVector(4); - } -#line 5516 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 210: -#line 1544 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint64; - (yyval.interm.type).setVector(2); - } -#line 5527 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 211: -#line 1550 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint64; - (yyval.interm.type).setVector(3); - } -#line 5538 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 212: -#line 1556 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint64; - (yyval.interm.type).setVector(4); - } -#line 5549 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 213: -#line 1562 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.int16Check((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint16; - (yyval.interm.type).setVector(2); -#endif - } -#line 5562 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 214: -#line 1570 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.int16Check((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint16; - (yyval.interm.type).setVector(3); -#endif - } -#line 5575 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 215: -#line 1578 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.int16Check((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtUint16; - (yyval.interm.type).setVector(4); -#endif - } -#line 5588 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 216: -#line 1586 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(2, 2); - } -#line 5598 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 217: -#line 1591 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(3, 3); - } -#line 5608 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 218: -#line 1596 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(4, 4); - } -#line 5618 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 219: -#line 1601 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(2, 2); - } -#line 5628 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 220: -#line 1606 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(2, 3); - } -#line 5638 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 221: -#line 1611 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(2, 4); - } -#line 5648 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 222: -#line 1616 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(3, 2); - } -#line 5658 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 223: -#line 1621 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(3, 3); - } -#line 5668 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 224: -#line 1626 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(3, 4); - } -#line 5678 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 225: -#line 1631 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(4, 2); - } -#line 5688 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 226: -#line 1636 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(4, 3); - } -#line 5698 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 227: -#line 1641 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat; - (yyval.interm.type).setMatrix(4, 4); - } -#line 5708 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 228: -#line 1646 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(2, 2); - } -#line 5719 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 229: -#line 1652 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(3, 3); - } -#line 5730 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 230: -#line 1658 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(4, 4); - } -#line 5741 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 231: -#line 1664 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(2, 2); - } -#line 5752 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 232: -#line 1670 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(2, 3); - } -#line 5763 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 233: -#line 1676 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(2, 4); - } -#line 5774 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 234: -#line 1682 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(3, 2); - } -#line 5785 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 235: -#line 1688 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(3, 3); - } -#line 5796 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 236: -#line 1694 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(3, 4); - } -#line 5807 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 237: -#line 1700 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(4, 2); - } -#line 5818 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 238: -#line 1706 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(4, 3); - } -#line 5829 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 239: -#line 1712 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtDouble; - (yyval.interm.type).setMatrix(4, 4); - } -#line 5840 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 240: -#line 1718 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(2, 2); -#endif - } -#line 5853 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 241: -#line 1726 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(3, 3); -#endif - } -#line 5866 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 242: -#line 1734 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(4, 4); -#endif - } -#line 5879 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 243: -#line 1742 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(2, 2); -#endif - } -#line 5892 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 244: -#line 1750 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(2, 3); -#endif - } -#line 5905 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 245: -#line 1758 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(2, 4); -#endif - } -#line 5918 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 246: -#line 1766 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(3, 2); -#endif - } -#line 5931 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 247: -#line 1774 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(3, 3); -#endif - } -#line 5944 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 248: -#line 1782 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(3, 4); -#endif - } -#line 5957 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 249: -#line 1790 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(4, 2); -#endif - } -#line 5970 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 250: -#line 1798 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(4, 3); -#endif - } -#line 5983 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 251: -#line 1806 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { -#ifdef AMD_EXTENSIONS - parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel()); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtFloat16; - (yyval.interm.type).setMatrix(4, 4); -#endif - } -#line 5996 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 252: -#line 1814 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.vulkanRemoved((yyvsp[0].lex).loc, "atomic counter types"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtAtomicUint; - } -#line 6006 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 253: -#line 1819 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd1D); - } -#line 6016 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 254: -#line 1824 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd2D); - } -#line 6026 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 255: -#line 1829 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd3D); - } -#line 6036 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 256: -#line 1834 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, EsdCube); - } -#line 6046 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 257: -#line 1839 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd1D, false, true); - } -#line 6056 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 258: -#line 1844 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd2D, false, true); - } -#line 6066 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 259: -#line 1849 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, EsdCube, false, true); - } -#line 6076 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 260: -#line 1854 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd1D, true); - } -#line 6086 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 261: -#line 1859 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true); - } -#line 6096 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 262: -#line 1864 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd1D, true, true); - } -#line 6106 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 263: -#line 1869 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true, true); - } -#line 6116 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 264: -#line 1874 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, EsdCube, true); - } -#line 6126 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 265: -#line 1879 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, EsdCube, true, true); - } -#line 6136 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 266: -#line 1884 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, Esd1D); - } -#line 6146 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 267: -#line 1889 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, Esd2D); - } -#line 6156 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 268: -#line 1894 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, Esd3D); - } -#line 6166 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 269: -#line 1899 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, EsdCube); - } -#line 6176 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 270: -#line 1904 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, Esd1D, true); - } -#line 6186 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 271: -#line 1909 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, Esd2D, true); - } -#line 6196 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 272: -#line 1914 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, EsdCube, true); - } -#line 6206 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 273: -#line 1919 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, Esd1D); - } -#line 6216 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 274: -#line 1924 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, Esd2D); - } -#line 6226 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 275: -#line 1929 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, Esd3D); - } -#line 6236 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 276: -#line 1934 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, EsdCube); - } -#line 6246 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 277: -#line 1939 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, Esd1D, true); - } -#line 6256 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 278: -#line 1944 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, Esd2D, true); - } -#line 6266 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 279: -#line 1949 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, EsdCube, true); - } -#line 6276 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 280: -#line 1954 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, EsdRect); - } -#line 6286 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 281: -#line 1959 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, EsdRect, false, true); - } -#line 6296 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 282: -#line 1964 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, EsdRect); - } -#line 6306 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 283: -#line 1969 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, EsdRect); - } -#line 6316 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 284: -#line 1974 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, EsdBuffer); - } -#line 6326 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 285: -#line 1979 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, EsdBuffer); - } -#line 6336 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 286: -#line 1984 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, EsdBuffer); - } -#line 6346 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 287: -#line 1989 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd2D, false, false, true); - } -#line 6356 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 288: -#line 1994 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, Esd2D, false, false, true); - } -#line 6366 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 289: -#line 1999 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, Esd2D, false, false, true); - } -#line 6376 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 290: -#line 2004 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true, false, true); - } -#line 6386 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 291: -#line 2009 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtInt, Esd2D, true, false, true); - } -#line 6396 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 292: -#line 2014 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtUint, Esd2D, true, false, true); - } -#line 6406 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 293: -#line 2019 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setPureSampler(false); - } -#line 6416 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 294: -#line 2024 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setPureSampler(true); - } -#line 6426 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 295: -#line 2029 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, Esd1D); - } -#line 6436 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 296: -#line 2034 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D); - } -#line 6446 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 297: -#line 2039 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, Esd3D); - } -#line 6456 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 298: -#line 2044 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, EsdCube); - } -#line 6466 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 299: -#line 2049 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, Esd1D, true); - } -#line 6476 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 300: -#line 2054 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, true); - } -#line 6486 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 301: -#line 2059 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, EsdCube, true); - } -#line 6496 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 302: -#line 2064 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, Esd1D); - } -#line 6506 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 303: -#line 2069 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D); - } -#line 6516 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 304: -#line 2074 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, Esd3D); - } -#line 6526 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 305: -#line 2079 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, EsdCube); - } -#line 6536 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 306: -#line 2084 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, Esd1D, true); - } -#line 6546 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 307: -#line 2089 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, true); - } -#line 6556 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 308: -#line 2094 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, EsdCube, true); - } -#line 6566 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 309: -#line 2099 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, Esd1D); - } -#line 6576 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 310: -#line 2104 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D); - } -#line 6586 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 311: -#line 2109 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, Esd3D); - } -#line 6596 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 312: -#line 2114 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, EsdCube); - } -#line 6606 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 313: -#line 2119 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, Esd1D, true); - } -#line 6616 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 314: -#line 2124 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, true); - } -#line 6626 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 315: -#line 2129 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, EsdCube, true); - } -#line 6636 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 316: -#line 2134 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, EsdRect); - } -#line 6646 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 317: -#line 2139 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, EsdRect); - } -#line 6656 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 318: -#line 2144 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, EsdRect); - } -#line 6666 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 319: -#line 2149 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, EsdBuffer); - } -#line 6676 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 320: -#line 2154 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, EsdBuffer); - } -#line 6686 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 321: -#line 2159 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, EsdBuffer); - } -#line 6696 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 322: -#line 2164 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, false, false, true); - } -#line 6706 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 323: -#line 2169 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, false, false, true); - } -#line 6716 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 324: -#line 2174 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, false, false, true); - } -#line 6726 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 325: -#line 2179 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, true, false, true); - } -#line 6736 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 326: -#line 2184 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, true, false, true); - } -#line 6746 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 327: -#line 2189 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, true, false, true); - } -#line 6756 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 328: -#line 2194 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, Esd1D); - } -#line 6766 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 329: -#line 2199 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, Esd1D); - } -#line 6776 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 330: -#line 2204 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, Esd1D); - } -#line 6786 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 331: -#line 2209 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, Esd2D); - } -#line 6796 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 332: -#line 2214 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, Esd2D); - } -#line 6806 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 333: -#line 2219 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, Esd2D); - } -#line 6816 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 334: -#line 2224 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, Esd3D); - } -#line 6826 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 335: -#line 2229 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, Esd3D); - } -#line 6836 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 336: -#line 2234 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, Esd3D); - } -#line 6846 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 337: -#line 2239 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, EsdRect); - } -#line 6856 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 338: -#line 2244 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, EsdRect); - } -#line 6866 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 339: -#line 2249 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, EsdRect); - } -#line 6876 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 340: -#line 2254 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, EsdCube); - } -#line 6886 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 341: -#line 2259 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, EsdCube); - } -#line 6896 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 342: -#line 2264 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, EsdCube); - } -#line 6906 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 343: -#line 2269 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, EsdBuffer); - } -#line 6916 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 344: -#line 2274 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, EsdBuffer); - } -#line 6926 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 345: -#line 2279 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, EsdBuffer); - } -#line 6936 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 346: -#line 2284 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, Esd1D, true); - } -#line 6946 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 347: -#line 2289 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, Esd1D, true); - } -#line 6956 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 348: -#line 2294 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, Esd1D, true); - } -#line 6966 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 349: -#line 2299 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, true); - } -#line 6976 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 350: -#line 2304 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, Esd2D, true); - } -#line 6986 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 351: -#line 2309 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, Esd2D, true); - } -#line 6996 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 352: -#line 2314 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, EsdCube, true); - } -#line 7006 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 353: -#line 2319 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, EsdCube, true); - } -#line 7016 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 354: -#line 2324 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, EsdCube, true); - } -#line 7026 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 355: -#line 2329 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, false, false, true); - } -#line 7036 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 356: -#line 2334 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, Esd2D, false, false, true); - } -#line 7046 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 357: -#line 2339 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, Esd2D, false, false, true); - } -#line 7056 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 358: -#line 2344 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, true, false, true); - } -#line 7066 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 359: -#line 2349 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtInt, Esd2D, true, false, true); - } -#line 7076 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 360: -#line 2354 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setImage(EbtUint, Esd2D, true, false, true); - } -#line 7086 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 361: -#line 2359 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { // GL_OES_EGL_image_external - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.set(EbtFloat, Esd2D); - (yyval.interm.type).sampler.external = true; - } -#line 7097 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 362: -#line 2365 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setSubpass(EbtFloat); - } -#line 7108 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 363: -#line 2371 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setSubpass(EbtFloat, true); - } -#line 7119 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 364: -#line 2377 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setSubpass(EbtInt); - } -#line 7130 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 365: -#line 2383 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setSubpass(EbtInt, true); - } -#line 7141 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 366: -#line 2389 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setSubpass(EbtUint); - } -#line 7152 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 367: -#line 2395 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtSampler; - (yyval.interm.type).sampler.setSubpass(EbtUint, true); - } -#line 7163 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 368: -#line 2401 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.type) = (yyvsp[0].interm.type); - (yyval.interm.type).qualifier.storage = parseContext.symbolTable.atGlobalLevel() ? EvqGlobal : EvqTemporary; - parseContext.structTypeCheck((yyval.interm.type).loc, (yyval.interm.type)); - } -#line 7173 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 369: -#line 2406 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // - // This is for user defined type names. The lexical phase looked up the - // type. - // - if (const TVariable* variable = ((yyvsp[0].lex).symbol)->getAsVariable()) { - const TType& structure = variable->getType(); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - (yyval.interm.type).basicType = EbtStruct; - (yyval.interm.type).userDef = &structure; - } else - parseContext.error((yyvsp[0].lex).loc, "expected type name", (yyvsp[0].lex).string->c_str(), ""); - } -#line 7191 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 370: -#line 2422 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "highp precision qualifier"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqHigh); - } -#line 7201 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 371: -#line 2427 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "mediump precision qualifier"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqMedium); - } -#line 7211 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 372: -#line 2432 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "lowp precision qualifier"); - (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel()); - parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqLow); - } -#line 7221 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 373: -#line 2440 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { parseContext.nestedStructCheck((yyvsp[-2].lex).loc); } -#line 7227 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 374: -#line 2440 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - TType* structure = new TType((yyvsp[-1].interm.typeList), *(yyvsp[-4].lex).string); - parseContext.structArrayCheck((yyvsp[-4].lex).loc, *structure); - TVariable* userTypeDef = new TVariable((yyvsp[-4].lex).string, *structure, true); - if (! parseContext.symbolTable.insert(*userTypeDef)) - parseContext.error((yyvsp[-4].lex).loc, "redefinition", (yyvsp[-4].lex).string->c_str(), "struct"); - (yyval.interm.type).init((yyvsp[-5].lex).loc); - (yyval.interm.type).basicType = EbtStruct; - (yyval.interm.type).userDef = structure; - --parseContext.structNestingLevel; - } -#line 7243 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 375: -#line 2451 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { parseContext.nestedStructCheck((yyvsp[-1].lex).loc); } -#line 7249 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 376: -#line 2451 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - TType* structure = new TType((yyvsp[-1].interm.typeList), TString("")); - (yyval.interm.type).init((yyvsp[-4].lex).loc); - (yyval.interm.type).basicType = EbtStruct; - (yyval.interm.type).userDef = structure; - --parseContext.structNestingLevel; - } -#line 7261 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 377: -#line 2461 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.typeList) = (yyvsp[0].interm.typeList); - } -#line 7269 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 378: -#line 2464 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.typeList) = (yyvsp[-1].interm.typeList); - for (unsigned int i = 0; i < (yyvsp[0].interm.typeList)->size(); ++i) { - for (unsigned int j = 0; j < (yyval.interm.typeList)->size(); ++j) { - if ((*(yyval.interm.typeList))[j].type->getFieldName() == (*(yyvsp[0].interm.typeList))[i].type->getFieldName()) - parseContext.error((*(yyvsp[0].interm.typeList))[i].loc, "duplicate member name:", "", (*(yyvsp[0].interm.typeList))[i].type->getFieldName().c_str()); - } - (yyval.interm.typeList)->push_back((*(yyvsp[0].interm.typeList))[i]); - } - } -#line 7284 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 379: -#line 2477 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if ((yyvsp[-2].interm.type).arraySizes) { - parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires((yyvsp[-2].interm.type).loc, EEsProfile, 300, 0, "arrayed type"); - if (parseContext.profile == EEsProfile) - parseContext.arraySizeRequiredCheck((yyvsp[-2].interm.type).loc, *(yyvsp[-2].interm.type).arraySizes); - } - - (yyval.interm.typeList) = (yyvsp[-1].interm.typeList); - - parseContext.voidErrorCheck((yyvsp[-2].interm.type).loc, (*(yyvsp[-1].interm.typeList))[0].type->getFieldName(), (yyvsp[-2].interm.type).basicType); - parseContext.precisionQualifierCheck((yyvsp[-2].interm.type).loc, (yyvsp[-2].interm.type).basicType, (yyvsp[-2].interm.type).qualifier); - - for (unsigned int i = 0; i < (yyval.interm.typeList)->size(); ++i) { - parseContext.arrayDimCheck((yyvsp[-2].interm.type).loc, (*(yyval.interm.typeList))[i].type, (yyvsp[-2].interm.type).arraySizes); - (*(yyval.interm.typeList))[i].type->mergeType((yyvsp[-2].interm.type)); - } - } -#line 7307 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 380: -#line 2495 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.globalQualifierFixCheck((yyvsp[-3].interm.type).loc, (yyvsp[-3].interm.type).qualifier); - if ((yyvsp[-2].interm.type).arraySizes) { - parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type"); - parseContext.profileRequires((yyvsp[-2].interm.type).loc, EEsProfile, 300, 0, "arrayed type"); - if (parseContext.profile == EEsProfile) - parseContext.arraySizeRequiredCheck((yyvsp[-2].interm.type).loc, *(yyvsp[-2].interm.type).arraySizes); - } - - (yyval.interm.typeList) = (yyvsp[-1].interm.typeList); - - parseContext.checkNoShaderLayouts((yyvsp[-3].interm.type).loc, (yyvsp[-3].interm.type).shaderQualifiers); - parseContext.voidErrorCheck((yyvsp[-2].interm.type).loc, (*(yyvsp[-1].interm.typeList))[0].type->getFieldName(), (yyvsp[-2].interm.type).basicType); - parseContext.mergeQualifiers((yyvsp[-2].interm.type).loc, (yyvsp[-2].interm.type).qualifier, (yyvsp[-3].interm.type).qualifier, true); - parseContext.precisionQualifierCheck((yyvsp[-2].interm.type).loc, (yyvsp[-2].interm.type).basicType, (yyvsp[-2].interm.type).qualifier); - - for (unsigned int i = 0; i < (yyval.interm.typeList)->size(); ++i) { - parseContext.arrayDimCheck((yyvsp[-3].interm.type).loc, (*(yyval.interm.typeList))[i].type, (yyvsp[-2].interm.type).arraySizes); - (*(yyval.interm.typeList))[i].type->mergeType((yyvsp[-2].interm.type)); - } - } -#line 7333 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 381: -#line 2519 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.typeList) = new TTypeList; - (yyval.interm.typeList)->push_back((yyvsp[0].interm.typeLine)); - } -#line 7342 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 382: -#line 2523 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.typeList)->push_back((yyvsp[0].interm.typeLine)); - } -#line 7350 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 383: -#line 2529 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.typeLine).type = new TType(EbtVoid); - (yyval.interm.typeLine).loc = (yyvsp[0].lex).loc; - (yyval.interm.typeLine).type->setFieldName(*(yyvsp[0].lex).string); - } -#line 7360 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 384: -#line 2534 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.arrayDimCheck((yyvsp[-1].lex).loc, (yyvsp[0].interm).arraySizes, 0); - - (yyval.interm.typeLine).type = new TType(EbtVoid); - (yyval.interm.typeLine).loc = (yyvsp[-1].lex).loc; - (yyval.interm.typeLine).type->setFieldName(*(yyvsp[-1].lex).string); - (yyval.interm.typeLine).type->newArraySizes(*(yyvsp[0].interm).arraySizes); - } -#line 7373 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 385: -#line 2545 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } -#line 7381 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 386: -#line 2548 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - const char* initFeature = "{ } style initializers"; - parseContext.requireProfile((yyvsp[-2].lex).loc, ~EEsProfile, initFeature); - parseContext.profileRequires((yyvsp[-2].lex).loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature); - (yyval.interm.intermTypedNode) = (yyvsp[-1].interm.intermTypedNode); - } -#line 7392 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 387: -#line 2554 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - const char* initFeature = "{ } style initializers"; - parseContext.requireProfile((yyvsp[-3].lex).loc, ~EEsProfile, initFeature); - parseContext.profileRequires((yyvsp[-3].lex).loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature); - (yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode); - } -#line 7403 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 388: -#line 2563 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.intermediate.growAggregate(0, (yyvsp[0].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)->getLoc()); - } -#line 7411 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 389: -#line 2566 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)); - } -#line 7419 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 390: -#line 2572 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7425 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 391: -#line 2576 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7431 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 392: -#line 2577 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7437 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 393: -#line 2583 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7443 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 394: -#line 2584 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7449 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 395: -#line 2585 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7455 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 396: -#line 2586 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7461 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 397: -#line 2587 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7467 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 398: -#line 2588 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7473 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 399: -#line 2589 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7479 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 400: -#line 2593 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = 0; } -#line 7485 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 401: -#line 2594 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.symbolTable.push(); - ++parseContext.statementNestingLevel; - } -#line 7494 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 402: -#line 2598 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - --parseContext.statementNestingLevel; - } -#line 7503 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 403: -#line 2602 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if ((yyvsp[-2].interm.intermNode) && (yyvsp[-2].interm.intermNode)->getAsAggregate()) - (yyvsp[-2].interm.intermNode)->getAsAggregate()->setOperator(EOpSequence); - (yyval.interm.intermNode) = (yyvsp[-2].interm.intermNode); - } -#line 7513 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 404: -#line 2610 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7519 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 405: -#line 2611 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); } -#line 7525 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 406: -#line 2615 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - ++parseContext.controlFlowNestingLevel; - } -#line 7533 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 407: -#line 2618 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - --parseContext.controlFlowNestingLevel; - (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); - } -#line 7542 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 408: -#line 2622 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.symbolTable.push(); - ++parseContext.statementNestingLevel; - ++parseContext.controlFlowNestingLevel; - } -#line 7552 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 409: -#line 2627 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - --parseContext.statementNestingLevel; - --parseContext.controlFlowNestingLevel; - (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); - } -#line 7563 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 410: -#line 2636 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = 0; - } -#line 7571 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 411: -#line 2639 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if ((yyvsp[-1].interm.intermNode) && (yyvsp[-1].interm.intermNode)->getAsAggregate()) - (yyvsp[-1].interm.intermNode)->getAsAggregate()->setOperator(EOpSequence); - (yyval.interm.intermNode) = (yyvsp[-1].interm.intermNode); - } -#line 7581 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 412: -#line 2647 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[0].interm.intermNode)); - if ((yyvsp[0].interm.intermNode) && (yyvsp[0].interm.intermNode)->getAsBranchNode() && ((yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpCase || - (yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpDefault)) { - parseContext.wrapupSwitchSubsequence(0, (yyvsp[0].interm.intermNode)); - (yyval.interm.intermNode) = 0; // start a fresh subsequence for what's after this case - } - } -#line 7594 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 413: -#line 2655 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if ((yyvsp[0].interm.intermNode) && (yyvsp[0].interm.intermNode)->getAsBranchNode() && ((yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpCase || - (yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpDefault)) { - parseContext.wrapupSwitchSubsequence((yyvsp[-1].interm.intermNode) ? (yyvsp[-1].interm.intermNode)->getAsAggregate() : 0, (yyvsp[0].interm.intermNode)); - (yyval.interm.intermNode) = 0; // start a fresh subsequence for what's after this case - } else - (yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-1].interm.intermNode), (yyvsp[0].interm.intermNode)); - } -#line 7607 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 414: -#line 2666 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = 0; } -#line 7613 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 415: -#line 2667 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { (yyval.interm.intermNode) = static_cast((yyvsp[-1].interm.intermTypedNode)); } -#line 7619 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 416: -#line 2671 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.boolCheck((yyvsp[-4].lex).loc, (yyvsp[-2].interm.intermTypedNode)); - (yyval.interm.intermNode) = parseContext.intermediate.addSelection((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.nodePair), (yyvsp[-4].lex).loc); - } -#line 7628 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 417: -#line 2678 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.nodePair).node1 = (yyvsp[-2].interm.intermNode); - (yyval.interm.nodePair).node2 = (yyvsp[0].interm.intermNode); - } -#line 7637 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 418: -#line 2682 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.nodePair).node1 = (yyvsp[0].interm.intermNode); - (yyval.interm.nodePair).node2 = 0; - } -#line 7646 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 419: -#line 2690 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - parseContext.boolCheck((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode)); - } -#line 7655 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 420: -#line 2694 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.boolCheck((yyvsp[-2].lex).loc, (yyvsp[-3].interm.type)); - - TType type((yyvsp[-3].interm.type)); - TIntermNode* initNode = parseContext.declareVariable((yyvsp[-2].lex).loc, *(yyvsp[-2].lex).string, (yyvsp[-3].interm.type), 0, (yyvsp[0].interm.intermTypedNode)); - if (initNode) - (yyval.interm.intermTypedNode) = initNode->getAsTyped(); - else - (yyval.interm.intermTypedNode) = 0; - } -#line 7670 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 421: -#line 2707 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // start new switch sequence on the switch stack - ++parseContext.controlFlowNestingLevel; - ++parseContext.statementNestingLevel; - parseContext.switchSequenceStack.push_back(new TIntermSequence); - parseContext.switchLevel.push_back(parseContext.statementNestingLevel); - parseContext.symbolTable.push(); - } -#line 7683 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 422: -#line 2715 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = parseContext.addSwitch((yyvsp[-7].lex).loc, (yyvsp[-5].interm.intermTypedNode), (yyvsp[-1].interm.intermNode) ? (yyvsp[-1].interm.intermNode)->getAsAggregate() : 0); - delete parseContext.switchSequenceStack.back(); - parseContext.switchSequenceStack.pop_back(); - parseContext.switchLevel.pop_back(); - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - --parseContext.statementNestingLevel; - --parseContext.controlFlowNestingLevel; - } -#line 7697 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 423: -#line 2727 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = 0; - } -#line 7705 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 424: -#line 2730 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); - } -#line 7713 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 425: -#line 2736 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = 0; - if (parseContext.switchLevel.size() == 0) - parseContext.error((yyvsp[-2].lex).loc, "cannot appear outside switch statement", "case", ""); - else if (parseContext.switchLevel.back() != parseContext.statementNestingLevel) - parseContext.error((yyvsp[-2].lex).loc, "cannot be nested inside control flow", "case", ""); - else { - parseContext.constantValueCheck((yyvsp[-1].interm.intermTypedNode), "case"); - parseContext.integerCheck((yyvsp[-1].interm.intermTypedNode), "case"); - (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpCase, (yyvsp[-1].interm.intermTypedNode), (yyvsp[-2].lex).loc); - } - } -#line 7730 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 426: -#line 2748 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = 0; - if (parseContext.switchLevel.size() == 0) - parseContext.error((yyvsp[-1].lex).loc, "cannot appear outside switch statement", "default", ""); - else if (parseContext.switchLevel.back() != parseContext.statementNestingLevel) - parseContext.error((yyvsp[-1].lex).loc, "cannot be nested inside control flow", "default", ""); - else - (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpDefault, (yyvsp[-1].lex).loc); - } -#line 7744 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 427: -#line 2760 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if (! parseContext.limits.whileLoops) - parseContext.error((yyvsp[-1].lex).loc, "while loops not available", "limitation", ""); - parseContext.symbolTable.push(); - ++parseContext.loopNestingLevel; - ++parseContext.statementNestingLevel; - ++parseContext.controlFlowNestingLevel; - } -#line 7757 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 428: -#line 2768 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - (yyval.interm.intermNode) = parseContext.intermediate.addLoop((yyvsp[0].interm.intermNode), (yyvsp[-2].interm.intermTypedNode), 0, true, (yyvsp[-5].lex).loc); - --parseContext.loopNestingLevel; - --parseContext.statementNestingLevel; - --parseContext.controlFlowNestingLevel; - } -#line 7769 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 429: -#line 2775 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - ++parseContext.loopNestingLevel; - ++parseContext.statementNestingLevel; - ++parseContext.controlFlowNestingLevel; - } -#line 7779 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 430: -#line 2780 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if (! parseContext.limits.whileLoops) - parseContext.error((yyvsp[-7].lex).loc, "do-while loops not available", "limitation", ""); - - parseContext.boolCheck((yyvsp[0].lex).loc, (yyvsp[-2].interm.intermTypedNode)); - - (yyval.interm.intermNode) = parseContext.intermediate.addLoop((yyvsp[-5].interm.intermNode), (yyvsp[-2].interm.intermTypedNode), 0, false, (yyvsp[-4].lex).loc); - --parseContext.loopNestingLevel; - --parseContext.statementNestingLevel; - --parseContext.controlFlowNestingLevel; - } -#line 7795 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 431: -#line 2791 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.symbolTable.push(); - ++parseContext.loopNestingLevel; - ++parseContext.statementNestingLevel; - ++parseContext.controlFlowNestingLevel; - } -#line 7806 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 432: -#line 2797 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - (yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[-3].interm.intermNode), (yyvsp[-5].lex).loc); - TIntermLoop* forLoop = parseContext.intermediate.addLoop((yyvsp[0].interm.intermNode), reinterpret_cast((yyvsp[-2].interm.nodePair).node1), reinterpret_cast((yyvsp[-2].interm.nodePair).node2), true, (yyvsp[-6].lex).loc); - if (! parseContext.limits.nonInductiveForLoops) - parseContext.inductiveLoopCheck((yyvsp[-6].lex).loc, (yyvsp[-3].interm.intermNode), forLoop); - (yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyval.interm.intermNode), forLoop, (yyvsp[-6].lex).loc); - (yyval.interm.intermNode)->getAsAggregate()->setOperator(EOpSequence); - --parseContext.loopNestingLevel; - --parseContext.statementNestingLevel; - --parseContext.controlFlowNestingLevel; - } -#line 7823 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 433: -#line 2812 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); - } -#line 7831 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 434: -#line 2815 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); - } -#line 7839 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 435: -#line 2821 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); - } -#line 7847 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 436: -#line 2824 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermTypedNode) = 0; - } -#line 7855 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 437: -#line 2830 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.nodePair).node1 = (yyvsp[-1].interm.intermTypedNode); - (yyval.interm.nodePair).node2 = 0; - } -#line 7864 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 438: -#line 2834 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.nodePair).node1 = (yyvsp[-2].interm.intermTypedNode); - (yyval.interm.nodePair).node2 = (yyvsp[0].interm.intermTypedNode); - } -#line 7873 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 439: -#line 2841 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if (parseContext.loopNestingLevel <= 0) - parseContext.error((yyvsp[-1].lex).loc, "continue statement only allowed in loops", "", ""); - (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpContinue, (yyvsp[-1].lex).loc); - } -#line 7883 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 440: -#line 2846 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if (parseContext.loopNestingLevel + parseContext.switchSequenceStack.size() <= 0) - parseContext.error((yyvsp[-1].lex).loc, "break statement only allowed in switch and loops", "", ""); - (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpBreak, (yyvsp[-1].lex).loc); - } -#line 7893 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 441: -#line 2851 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpReturn, (yyvsp[-1].lex).loc); - if (parseContext.currentFunctionType->getBasicType() != EbtVoid) - parseContext.error((yyvsp[-1].lex).loc, "non-void function must return a value", "return", ""); - if (parseContext.inMain) - parseContext.postEntryPointReturn = true; - } -#line 7905 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 442: -#line 2858 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = parseContext.handleReturnValue((yyvsp[-2].lex).loc, (yyvsp[-1].interm.intermTypedNode)); - } -#line 7913 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 443: -#line 2861 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireStage((yyvsp[-1].lex).loc, EShLangFragment, "discard"); - (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpKill, (yyvsp[-1].lex).loc); - } -#line 7922 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 444: -#line 2870 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); - parseContext.intermediate.setTreeRoot((yyval.interm.intermNode)); - } -#line 7931 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 445: -#line 2874 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - if ((yyvsp[0].interm.intermNode) != nullptr) { - (yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-1].interm.intermNode), (yyvsp[0].interm.intermNode)); - parseContext.intermediate.setTreeRoot((yyval.interm.intermNode)); - } - } -#line 7942 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 446: -#line 2883 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); - } -#line 7950 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 447: -#line 2886 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); - } -#line 7958 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 448: -#line 2889 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - parseContext.requireProfile((yyvsp[0].lex).loc, ~EEsProfile, "extraneous semicolon"); - parseContext.profileRequires((yyvsp[0].lex).loc, ~EEsProfile, 460, nullptr, "extraneous semicolon"); - (yyval.interm.intermNode) = nullptr; - } -#line 7968 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 449: -#line 2897 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - (yyvsp[0].interm).function = parseContext.handleFunctionDeclarator((yyvsp[0].interm).loc, *(yyvsp[0].interm).function, false /* not prototype */); - (yyvsp[0].interm).intermNode = parseContext.handleFunctionDefinition((yyvsp[0].interm).loc, *(yyvsp[0].interm).function); - } -#line 7977 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - case 450: -#line 2901 "MachineIndependent/glslang.y" /* yacc.c:1646 */ - { - // May be best done as post process phase on intermediate code - if (parseContext.currentFunctionType->getBasicType() != EbtVoid && ! parseContext.functionReturnsValue) - parseContext.error((yyvsp[-2].interm).loc, "function does not return a value:", "", (yyvsp[-2].interm).function->getName().c_str()); - parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]); - (yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm).intermNode, (yyvsp[0].interm.intermNode)); - parseContext.intermediate.setAggregateOperator((yyval.interm.intermNode), EOpFunction, (yyvsp[-2].interm).function->getType(), (yyvsp[-2].interm).loc); - (yyval.interm.intermNode)->getAsAggregate()->setName((yyvsp[-2].interm).function->getMangledName().c_str()); - - // store the pragma information for debug and optimize and other vendor specific - // information. This information can be queried from the parse tree - (yyval.interm.intermNode)->getAsAggregate()->setOptimize(parseContext.contextPragma.optimize); - (yyval.interm.intermNode)->getAsAggregate()->setDebug(parseContext.contextPragma.debug); - (yyval.interm.intermNode)->getAsAggregate()->addToPragmaTable(parseContext.contextPragma.pragmaTable); - } -#line 7997 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - break; - - -#line 8001 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */ - default: break; - } - /* User semantic actions sometimes alter yychar, and that requires - that yytoken be updated with the new translation. We take the - approach of translating immediately before every use of yytoken. - One alternative is translating here after every semantic action, - but that translation would be missed if the semantic action invokes - YYABORT, YYACCEPT, or YYERROR immediately after altering yychar or - if it invokes YYBACKUP. In the case of YYABORT or YYACCEPT, an - incorrect destructor might then be invoked immediately. In the - case of YYERROR or YYBACKUP, subsequent parser actions might lead - to an incorrect destructor call or verbose syntax error message - before the lookahead is translated. */ - YY_SYMBOL_PRINT ("-> $$ =", yyr1[yyn], &yyval, &yyloc); - - YYPOPSTACK (yylen); - yylen = 0; - YY_STACK_PRINT (yyss, yyssp); - - *++yyvsp = yyval; - - /* Now 'shift' the result of the reduction. Determine what state - that goes to, based on the state we popped back to and the rule - number reduced by. */ - - yyn = yyr1[yyn]; - - yystate = yypgoto[yyn - YYNTOKENS] + *yyssp; - if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp) - yystate = yytable[yystate]; - else - yystate = yydefgoto[yyn - YYNTOKENS]; - - goto yynewstate; - - -/*--------------------------------------. -| yyerrlab -- here on detecting error. | -`--------------------------------------*/ -yyerrlab: - /* Make sure we have latest lookahead translation. See comments at - user semantic actions for why this is necessary. */ - yytoken = yychar == YYEMPTY ? YYEMPTY : YYTRANSLATE (yychar); - - /* If not already recovering from an error, report this error. */ - if (!yyerrstatus) - { - ++yynerrs; -#if ! YYERROR_VERBOSE - yyerror (pParseContext, YY_("syntax error")); -#else -# define YYSYNTAX_ERROR yysyntax_error (&yymsg_alloc, &yymsg, \ - yyssp, yytoken) - { - char const *yymsgp = YY_("syntax error"); - int yysyntax_error_status; - yysyntax_error_status = YYSYNTAX_ERROR; - if (yysyntax_error_status == 0) - yymsgp = yymsg; - else if (yysyntax_error_status == 1) - { - if (yymsg != yymsgbuf) - YYSTACK_FREE (yymsg); - yymsg = (char *) YYSTACK_ALLOC (yymsg_alloc); - if (!yymsg) - { - yymsg = yymsgbuf; - yymsg_alloc = sizeof yymsgbuf; - yysyntax_error_status = 2; - } - else - { - yysyntax_error_status = YYSYNTAX_ERROR; - yymsgp = yymsg; - } - } - yyerror (pParseContext, yymsgp); - if (yysyntax_error_status == 2) - goto yyexhaustedlab; - } -# undef YYSYNTAX_ERROR -#endif - } - - - - if (yyerrstatus == 3) - { - /* If just tried and failed to reuse lookahead token after an - error, discard it. */ - - if (yychar <= YYEOF) - { - /* Return failure if at end of input. */ - if (yychar == YYEOF) - YYABORT; - } - else - { - yydestruct ("Error: discarding", - yytoken, &yylval, pParseContext); - yychar = YYEMPTY; - } - } - - /* Else will try to reuse lookahead token after shifting the error - token. */ - goto yyerrlab1; - - -/*---------------------------------------------------. -| yyerrorlab -- error raised explicitly by YYERROR. | -`---------------------------------------------------*/ -yyerrorlab: - - /* Pacify compilers like GCC when the user code never invokes - YYERROR and the label yyerrorlab therefore never appears in user - code. */ - if (/*CONSTCOND*/ 0) - goto yyerrorlab; - - /* Do not reclaim the symbols of the rule whose action triggered - this YYERROR. */ - YYPOPSTACK (yylen); - yylen = 0; - YY_STACK_PRINT (yyss, yyssp); - yystate = *yyssp; - goto yyerrlab1; - - -/*-------------------------------------------------------------. -| yyerrlab1 -- common code for both syntax error and YYERROR. | -`-------------------------------------------------------------*/ -yyerrlab1: - yyerrstatus = 3; /* Each real token shifted decrements this. */ - - for (;;) - { - yyn = yypact[yystate]; - if (!yypact_value_is_default (yyn)) - { - yyn += YYTERROR; - if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR) - { - yyn = yytable[yyn]; - if (0 < yyn) - break; - } - } - - /* Pop the current state because it cannot handle the error token. */ - if (yyssp == yyss) - YYABORT; - - - yydestruct ("Error: popping", - yystos[yystate], yyvsp, pParseContext); - YYPOPSTACK (1); - yystate = *yyssp; - YY_STACK_PRINT (yyss, yyssp); - } - - YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN - *++yyvsp = yylval; - YY_IGNORE_MAYBE_UNINITIALIZED_END - - - /* Shift the error token. */ - YY_SYMBOL_PRINT ("Shifting", yystos[yyn], yyvsp, yylsp); - - yystate = yyn; - goto yynewstate; - - -/*-------------------------------------. -| yyacceptlab -- YYACCEPT comes here. | -`-------------------------------------*/ -yyacceptlab: - yyresult = 0; - goto yyreturn; - -/*-----------------------------------. -| yyabortlab -- YYABORT comes here. | -`-----------------------------------*/ -yyabortlab: - yyresult = 1; - goto yyreturn; - -#if !defined yyoverflow || YYERROR_VERBOSE -/*-------------------------------------------------. -| yyexhaustedlab -- memory exhaustion comes here. | -`-------------------------------------------------*/ -yyexhaustedlab: - yyerror (pParseContext, YY_("memory exhausted")); - yyresult = 2; - /* Fall through. */ -#endif - -yyreturn: - if (yychar != YYEMPTY) - { - /* Make sure we have latest lookahead translation. See comments at - user semantic actions for why this is necessary. */ - yytoken = YYTRANSLATE (yychar); - yydestruct ("Cleanup: discarding lookahead", - yytoken, &yylval, pParseContext); - } - /* Do not reclaim the symbols of the rule whose action triggered - this YYABORT or YYACCEPT. */ - YYPOPSTACK (yylen); - YY_STACK_PRINT (yyss, yyssp); - while (yyssp != yyss) - { - yydestruct ("Cleanup: popping", - yystos[*yyssp], yyvsp, pParseContext); - YYPOPSTACK (1); - } -#ifndef yyoverflow - if (yyss != yyssa) - YYSTACK_FREE (yyss); -#endif -#if YYERROR_VERBOSE - if (yymsg != yymsgbuf) - YYSTACK_FREE (yymsg); -#endif - return yyresult; -} -#line 2918 "MachineIndependent/glslang.y" /* yacc.c:1906 */ - diff --git a/deps/glslang/glslang/MachineIndependent/glslang_tab.cpp.h b/deps/glslang/glslang/MachineIndependent/glslang_tab.cpp.h deleted file mode 100644 index b8e78d6c..00000000 --- a/deps/glslang/glslang/MachineIndependent/glslang_tab.cpp.h +++ /dev/null @@ -1,397 +0,0 @@ -/* A Bison parser, made by GNU Bison 3.0.4. */ - -/* Bison interface for Yacc-like parsers in C - - Copyright (C) 1984, 1989-1990, 2000-2015 Free Software Foundation, Inc. - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . */ - -/* As a special exception, you may create a larger work that contains - part or all of the Bison parser skeleton and distribute that work - under terms of your choice, so long as that work isn't itself a - parser generator using the skeleton or a modified version thereof - as a parser skeleton. Alternatively, if you modify or redistribute - the parser skeleton itself, you may (at your option) remove this - special exception, which will cause the skeleton and the resulting - Bison output files to be licensed under the GNU General Public - License without this special exception. - - This special exception was added by the Free Software Foundation in - version 2.2 of Bison. */ - -#ifndef YY_YY_MACHINEINDEPENDENT_GLSLANG_TAB_CPP_H_INCLUDED -# define YY_YY_MACHINEINDEPENDENT_GLSLANG_TAB_CPP_H_INCLUDED -/* Debug traces. */ -#ifndef YYDEBUG -# define YYDEBUG 1 -#endif -#if YYDEBUG -extern int yydebug; -#endif - -/* Token type. */ -#ifndef YYTOKENTYPE -# define YYTOKENTYPE - enum yytokentype - { - ATTRIBUTE = 258, - VARYING = 259, - CONST = 260, - BOOL = 261, - FLOAT = 262, - DOUBLE = 263, - INT = 264, - UINT = 265, - INT64_T = 266, - UINT64_T = 267, - INT16_T = 268, - UINT16_T = 269, - FLOAT16_T = 270, - BREAK = 271, - CONTINUE = 272, - DO = 273, - ELSE = 274, - FOR = 275, - IF = 276, - DISCARD = 277, - RETURN = 278, - SWITCH = 279, - CASE = 280, - DEFAULT = 281, - SUBROUTINE = 282, - BVEC2 = 283, - BVEC3 = 284, - BVEC4 = 285, - IVEC2 = 286, - IVEC3 = 287, - IVEC4 = 288, - I64VEC2 = 289, - I64VEC3 = 290, - I64VEC4 = 291, - UVEC2 = 292, - UVEC3 = 293, - UVEC4 = 294, - U64VEC2 = 295, - U64VEC3 = 296, - U64VEC4 = 297, - VEC2 = 298, - VEC3 = 299, - VEC4 = 300, - MAT2 = 301, - MAT3 = 302, - MAT4 = 303, - CENTROID = 304, - IN = 305, - OUT = 306, - INOUT = 307, - UNIFORM = 308, - PATCH = 309, - SAMPLE = 310, - BUFFER = 311, - SHARED = 312, - COHERENT = 313, - VOLATILE = 314, - RESTRICT = 315, - READONLY = 316, - WRITEONLY = 317, - DVEC2 = 318, - DVEC3 = 319, - DVEC4 = 320, - DMAT2 = 321, - DMAT3 = 322, - DMAT4 = 323, - F16VEC2 = 324, - F16VEC3 = 325, - F16VEC4 = 326, - F16MAT2 = 327, - F16MAT3 = 328, - F16MAT4 = 329, - I16VEC2 = 330, - I16VEC3 = 331, - I16VEC4 = 332, - U16VEC2 = 333, - U16VEC3 = 334, - U16VEC4 = 335, - NOPERSPECTIVE = 336, - FLAT = 337, - SMOOTH = 338, - LAYOUT = 339, - __EXPLICITINTERPAMD = 340, - MAT2X2 = 341, - MAT2X3 = 342, - MAT2X4 = 343, - MAT3X2 = 344, - MAT3X3 = 345, - MAT3X4 = 346, - MAT4X2 = 347, - MAT4X3 = 348, - MAT4X4 = 349, - DMAT2X2 = 350, - DMAT2X3 = 351, - DMAT2X4 = 352, - DMAT3X2 = 353, - DMAT3X3 = 354, - DMAT3X4 = 355, - DMAT4X2 = 356, - DMAT4X3 = 357, - DMAT4X4 = 358, - F16MAT2X2 = 359, - F16MAT2X3 = 360, - F16MAT2X4 = 361, - F16MAT3X2 = 362, - F16MAT3X3 = 363, - F16MAT3X4 = 364, - F16MAT4X2 = 365, - F16MAT4X3 = 366, - F16MAT4X4 = 367, - ATOMIC_UINT = 368, - SAMPLER1D = 369, - SAMPLER2D = 370, - SAMPLER3D = 371, - SAMPLERCUBE = 372, - SAMPLER1DSHADOW = 373, - SAMPLER2DSHADOW = 374, - SAMPLERCUBESHADOW = 375, - SAMPLER1DARRAY = 376, - SAMPLER2DARRAY = 377, - SAMPLER1DARRAYSHADOW = 378, - SAMPLER2DARRAYSHADOW = 379, - ISAMPLER1D = 380, - ISAMPLER2D = 381, - ISAMPLER3D = 382, - ISAMPLERCUBE = 383, - ISAMPLER1DARRAY = 384, - ISAMPLER2DARRAY = 385, - USAMPLER1D = 386, - USAMPLER2D = 387, - USAMPLER3D = 388, - USAMPLERCUBE = 389, - USAMPLER1DARRAY = 390, - USAMPLER2DARRAY = 391, - SAMPLER2DRECT = 392, - SAMPLER2DRECTSHADOW = 393, - ISAMPLER2DRECT = 394, - USAMPLER2DRECT = 395, - SAMPLERBUFFER = 396, - ISAMPLERBUFFER = 397, - USAMPLERBUFFER = 398, - SAMPLERCUBEARRAY = 399, - SAMPLERCUBEARRAYSHADOW = 400, - ISAMPLERCUBEARRAY = 401, - USAMPLERCUBEARRAY = 402, - SAMPLER2DMS = 403, - ISAMPLER2DMS = 404, - USAMPLER2DMS = 405, - SAMPLER2DMSARRAY = 406, - ISAMPLER2DMSARRAY = 407, - USAMPLER2DMSARRAY = 408, - SAMPLEREXTERNALOES = 409, - SAMPLER = 410, - SAMPLERSHADOW = 411, - TEXTURE1D = 412, - TEXTURE2D = 413, - TEXTURE3D = 414, - TEXTURECUBE = 415, - TEXTURE1DARRAY = 416, - TEXTURE2DARRAY = 417, - ITEXTURE1D = 418, - ITEXTURE2D = 419, - ITEXTURE3D = 420, - ITEXTURECUBE = 421, - ITEXTURE1DARRAY = 422, - ITEXTURE2DARRAY = 423, - UTEXTURE1D = 424, - UTEXTURE2D = 425, - UTEXTURE3D = 426, - UTEXTURECUBE = 427, - UTEXTURE1DARRAY = 428, - UTEXTURE2DARRAY = 429, - TEXTURE2DRECT = 430, - ITEXTURE2DRECT = 431, - UTEXTURE2DRECT = 432, - TEXTUREBUFFER = 433, - ITEXTUREBUFFER = 434, - UTEXTUREBUFFER = 435, - TEXTURECUBEARRAY = 436, - ITEXTURECUBEARRAY = 437, - UTEXTURECUBEARRAY = 438, - TEXTURE2DMS = 439, - ITEXTURE2DMS = 440, - UTEXTURE2DMS = 441, - TEXTURE2DMSARRAY = 442, - ITEXTURE2DMSARRAY = 443, - UTEXTURE2DMSARRAY = 444, - SUBPASSINPUT = 445, - SUBPASSINPUTMS = 446, - ISUBPASSINPUT = 447, - ISUBPASSINPUTMS = 448, - USUBPASSINPUT = 449, - USUBPASSINPUTMS = 450, - IMAGE1D = 451, - IIMAGE1D = 452, - UIMAGE1D = 453, - IMAGE2D = 454, - IIMAGE2D = 455, - UIMAGE2D = 456, - IMAGE3D = 457, - IIMAGE3D = 458, - UIMAGE3D = 459, - IMAGE2DRECT = 460, - IIMAGE2DRECT = 461, - UIMAGE2DRECT = 462, - IMAGECUBE = 463, - IIMAGECUBE = 464, - UIMAGECUBE = 465, - IMAGEBUFFER = 466, - IIMAGEBUFFER = 467, - UIMAGEBUFFER = 468, - IMAGE1DARRAY = 469, - IIMAGE1DARRAY = 470, - UIMAGE1DARRAY = 471, - IMAGE2DARRAY = 472, - IIMAGE2DARRAY = 473, - UIMAGE2DARRAY = 474, - IMAGECUBEARRAY = 475, - IIMAGECUBEARRAY = 476, - UIMAGECUBEARRAY = 477, - IMAGE2DMS = 478, - IIMAGE2DMS = 479, - UIMAGE2DMS = 480, - IMAGE2DMSARRAY = 481, - IIMAGE2DMSARRAY = 482, - UIMAGE2DMSARRAY = 483, - STRUCT = 484, - VOID = 485, - WHILE = 486, - IDENTIFIER = 487, - TYPE_NAME = 488, - FLOATCONSTANT = 489, - DOUBLECONSTANT = 490, - INTCONSTANT = 491, - UINTCONSTANT = 492, - INT64CONSTANT = 493, - UINT64CONSTANT = 494, - INT16CONSTANT = 495, - UINT16CONSTANT = 496, - BOOLCONSTANT = 497, - FLOAT16CONSTANT = 498, - LEFT_OP = 499, - RIGHT_OP = 500, - INC_OP = 501, - DEC_OP = 502, - LE_OP = 503, - GE_OP = 504, - EQ_OP = 505, - NE_OP = 506, - AND_OP = 507, - OR_OP = 508, - XOR_OP = 509, - MUL_ASSIGN = 510, - DIV_ASSIGN = 511, - ADD_ASSIGN = 512, - MOD_ASSIGN = 513, - LEFT_ASSIGN = 514, - RIGHT_ASSIGN = 515, - AND_ASSIGN = 516, - XOR_ASSIGN = 517, - OR_ASSIGN = 518, - SUB_ASSIGN = 519, - LEFT_PAREN = 520, - RIGHT_PAREN = 521, - LEFT_BRACKET = 522, - RIGHT_BRACKET = 523, - LEFT_BRACE = 524, - RIGHT_BRACE = 525, - DOT = 526, - COMMA = 527, - COLON = 528, - EQUAL = 529, - SEMICOLON = 530, - BANG = 531, - DASH = 532, - TILDE = 533, - PLUS = 534, - STAR = 535, - SLASH = 536, - PERCENT = 537, - LEFT_ANGLE = 538, - RIGHT_ANGLE = 539, - VERTICAL_BAR = 540, - CARET = 541, - AMPERSAND = 542, - QUESTION = 543, - INVARIANT = 544, - PRECISE = 545, - HIGH_PRECISION = 546, - MEDIUM_PRECISION = 547, - LOW_PRECISION = 548, - PRECISION = 549, - PACKED = 550, - RESOURCE = 551, - SUPERP = 552 - }; -#endif - -/* Value type. */ -#if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED - -union YYSTYPE -{ -#line 68 "MachineIndependent/glslang.y" /* yacc.c:1909 */ - - struct { - glslang::TSourceLoc loc; - union { - glslang::TString *string; - int i; - unsigned int u; - long long i64; - unsigned long long u64; - bool b; - double d; - }; - glslang::TSymbol* symbol; - } lex; - struct { - glslang::TSourceLoc loc; - glslang::TOperator op; - union { - TIntermNode* intermNode; - glslang::TIntermNodePair nodePair; - glslang::TIntermTyped* intermTypedNode; - }; - union { - glslang::TPublicType type; - glslang::TFunction* function; - glslang::TParameter param; - glslang::TTypeLoc typeLine; - glslang::TTypeList* typeList; - glslang::TArraySizes* arraySizes; - glslang::TIdentifierList* identifierList; - }; - } interm; - -#line 386 "MachineIndependent/glslang_tab.cpp.h" /* yacc.c:1909 */ -}; - -typedef union YYSTYPE YYSTYPE; -# define YYSTYPE_IS_TRIVIAL 1 -# define YYSTYPE_IS_DECLARED 1 -#endif - - - -int yyparse (glslang::TParseContext* pParseContext); - -#endif /* !YY_YY_MACHINEINDEPENDENT_GLSLANG_TAB_CPP_H_INCLUDED */ diff --git a/deps/glslang/glslang/MachineIndependent/intermOut.cpp b/deps/glslang/glslang/MachineIndependent/intermOut.cpp deleted file mode 100644 index 30240fcc..00000000 --- a/deps/glslang/glslang/MachineIndependent/intermOut.cpp +++ /dev/null @@ -1,1144 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2012-2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "localintermediate.h" -#include "../Include/InfoSink.h" - -#ifdef _MSC_VER -#include -#else -#include -#endif - -namespace { - -bool IsInfinity(double x) { -#ifdef _MSC_VER - switch (_fpclass(x)) { - case _FPCLASS_NINF: - case _FPCLASS_PINF: - return true; - default: - return false; - } -#else - return std::isinf(x); -#endif -} - -bool IsNan(double x) { -#ifdef _MSC_VER - switch (_fpclass(x)) { - case _FPCLASS_SNAN: - case _FPCLASS_QNAN: - return true; - default: - return false; - } -#else - return std::isnan(x); -#endif -} - -} - -namespace glslang { - -// -// Two purposes: -// 1. Show an example of how to iterate tree. Functions can -// also directly call Traverse() on children themselves to -// have finer grained control over the process than shown here. -// See the last function for how to get started. -// 2. Print out a text based description of the tree. -// - -// -// Use this class to carry along data from node to node in -// the traversal -// -class TOutputTraverser : public TIntermTraverser { -public: - TOutputTraverser(TInfoSink& i) : infoSink(i) { } - - virtual bool visitBinary(TVisit, TIntermBinary* node); - virtual bool visitUnary(TVisit, TIntermUnary* node); - virtual bool visitAggregate(TVisit, TIntermAggregate* node); - virtual bool visitSelection(TVisit, TIntermSelection* node); - virtual void visitConstantUnion(TIntermConstantUnion* node); - virtual void visitSymbol(TIntermSymbol* node); - virtual bool visitLoop(TVisit, TIntermLoop* node); - virtual bool visitBranch(TVisit, TIntermBranch* node); - virtual bool visitSwitch(TVisit, TIntermSwitch* node); - - TInfoSink& infoSink; -protected: - TOutputTraverser(TOutputTraverser&); - TOutputTraverser& operator=(TOutputTraverser&); -}; - -// -// Helper functions for printing, not part of traversing. -// - -static void OutputTreeText(TInfoSink& infoSink, const TIntermNode* node, const int depth) -{ - int i; - - infoSink.debug << node->getLoc().string << ":"; - if (node->getLoc().line) - infoSink.debug << node->getLoc().line; - else - infoSink.debug << "? "; - - for (i = 0; i < depth; ++i) - infoSink.debug << " "; -} - -// -// The rest of the file are the traversal functions. The last one -// is the one that starts the traversal. -// -// Return true from interior nodes to have the external traversal -// continue on to children. If you process children yourself, -// return false. -// - -bool TOutputTraverser::visitBinary(TVisit /* visit */, TIntermBinary* node) -{ - TInfoSink& out = infoSink; - - OutputTreeText(out, node, depth); - - switch (node->getOp()) { - case EOpAssign: out.debug << "move second child to first child"; break; - case EOpAddAssign: out.debug << "add second child into first child"; break; - case EOpSubAssign: out.debug << "subtract second child into first child"; break; - case EOpMulAssign: out.debug << "multiply second child into first child"; break; - case EOpVectorTimesMatrixAssign: out.debug << "matrix mult second child into first child"; break; - case EOpVectorTimesScalarAssign: out.debug << "vector scale second child into first child"; break; - case EOpMatrixTimesScalarAssign: out.debug << "matrix scale second child into first child"; break; - case EOpMatrixTimesMatrixAssign: out.debug << "matrix mult second child into first child"; break; - case EOpDivAssign: out.debug << "divide second child into first child"; break; - case EOpModAssign: out.debug << "mod second child into first child"; break; - case EOpAndAssign: out.debug << "and second child into first child"; break; - case EOpInclusiveOrAssign: out.debug << "or second child into first child"; break; - case EOpExclusiveOrAssign: out.debug << "exclusive or second child into first child"; break; - case EOpLeftShiftAssign: out.debug << "left shift second child into first child"; break; - case EOpRightShiftAssign: out.debug << "right shift second child into first child"; break; - - case EOpIndexDirect: out.debug << "direct index"; break; - case EOpIndexIndirect: out.debug << "indirect index"; break; - case EOpIndexDirectStruct: - out.debug << (*node->getLeft()->getType().getStruct())[node->getRight()->getAsConstantUnion()->getConstArray()[0].getIConst()].type->getFieldName(); - out.debug << ": direct index for structure"; break; - case EOpVectorSwizzle: out.debug << "vector swizzle"; break; - case EOpMatrixSwizzle: out.debug << "matrix swizzle"; break; - - case EOpAdd: out.debug << "add"; break; - case EOpSub: out.debug << "subtract"; break; - case EOpMul: out.debug << "component-wise multiply"; break; - case EOpDiv: out.debug << "divide"; break; - case EOpMod: out.debug << "mod"; break; - case EOpRightShift: out.debug << "right-shift"; break; - case EOpLeftShift: out.debug << "left-shift"; break; - case EOpAnd: out.debug << "bitwise and"; break; - case EOpInclusiveOr: out.debug << "inclusive-or"; break; - case EOpExclusiveOr: out.debug << "exclusive-or"; break; - case EOpEqual: out.debug << "Compare Equal"; break; - case EOpNotEqual: out.debug << "Compare Not Equal"; break; - case EOpLessThan: out.debug << "Compare Less Than"; break; - case EOpGreaterThan: out.debug << "Compare Greater Than"; break; - case EOpLessThanEqual: out.debug << "Compare Less Than or Equal"; break; - case EOpGreaterThanEqual: out.debug << "Compare Greater Than or Equal"; break; - case EOpVectorEqual: out.debug << "Equal"; break; - case EOpVectorNotEqual: out.debug << "NotEqual"; break; - - case EOpVectorTimesScalar: out.debug << "vector-scale"; break; - case EOpVectorTimesMatrix: out.debug << "vector-times-matrix"; break; - case EOpMatrixTimesVector: out.debug << "matrix-times-vector"; break; - case EOpMatrixTimesScalar: out.debug << "matrix-scale"; break; - case EOpMatrixTimesMatrix: out.debug << "matrix-multiply"; break; - - case EOpLogicalOr: out.debug << "logical-or"; break; - case EOpLogicalXor: out.debug << "logical-xor"; break; - case EOpLogicalAnd: out.debug << "logical-and"; break; - default: out.debug << ""; - } - - out.debug << " (" << node->getCompleteString() << ")"; - - out.debug << "\n"; - - return true; -} - -bool TOutputTraverser::visitUnary(TVisit /* visit */, TIntermUnary* node) -{ - TInfoSink& out = infoSink; - - OutputTreeText(out, node, depth); - - switch (node->getOp()) { - case EOpNegative: out.debug << "Negate value"; break; - case EOpVectorLogicalNot: - case EOpLogicalNot: out.debug << "Negate conditional"; break; - case EOpBitwiseNot: out.debug << "Bitwise not"; break; - - case EOpPostIncrement: out.debug << "Post-Increment"; break; - case EOpPostDecrement: out.debug << "Post-Decrement"; break; - case EOpPreIncrement: out.debug << "Pre-Increment"; break; - case EOpPreDecrement: out.debug << "Pre-Decrement"; break; - - case EOpConvIntToBool: out.debug << "Convert int to bool"; break; - case EOpConvUintToBool: out.debug << "Convert uint to bool"; break; - case EOpConvFloatToBool: out.debug << "Convert float to bool"; break; - case EOpConvDoubleToBool: out.debug << "Convert double to bool"; break; - case EOpConvInt64ToBool: out.debug << "Convert int64 to bool"; break; - case EOpConvUint64ToBool: out.debug << "Convert uint64 to bool"; break; - case EOpConvIntToFloat: out.debug << "Convert int to float"; break; - case EOpConvUintToFloat: out.debug << "Convert uint to float"; break; - case EOpConvDoubleToFloat: out.debug << "Convert double to float"; break; - case EOpConvInt64ToFloat: out.debug << "Convert int64 to float"; break; - case EOpConvUint64ToFloat: out.debug << "Convert uint64 to float"; break; - case EOpConvBoolToFloat: out.debug << "Convert bool to float"; break; - case EOpConvUintToInt: out.debug << "Convert uint to int"; break; - case EOpConvFloatToInt: out.debug << "Convert float to int"; break; - case EOpConvDoubleToInt: out.debug << "Convert double to int"; break; - case EOpConvBoolToInt: out.debug << "Convert bool to int"; break; - case EOpConvInt64ToInt: out.debug << "Convert int64 to int"; break; - case EOpConvUint64ToInt: out.debug << "Convert uint64 to int"; break; - case EOpConvIntToUint: out.debug << "Convert int to uint"; break; - case EOpConvFloatToUint: out.debug << "Convert float to uint"; break; - case EOpConvDoubleToUint: out.debug << "Convert double to uint"; break; - case EOpConvBoolToUint: out.debug << "Convert bool to uint"; break; - case EOpConvInt64ToUint: out.debug << "Convert int64 to uint"; break; - case EOpConvUint64ToUint: out.debug << "Convert uint64 to uint"; break; - case EOpConvIntToDouble: out.debug << "Convert int to double"; break; - case EOpConvUintToDouble: out.debug << "Convert uint to double"; break; - case EOpConvFloatToDouble: out.debug << "Convert float to double"; break; - case EOpConvBoolToDouble: out.debug << "Convert bool to double"; break; - case EOpConvInt64ToDouble: out.debug << "Convert int64 to double"; break; - case EOpConvUint64ToDouble: out.debug << "Convert uint64 to double"; break; - case EOpConvBoolToInt64: out.debug << "Convert bool to int64"; break; - case EOpConvIntToInt64: out.debug << "Convert int to int64"; break; - case EOpConvUintToInt64: out.debug << "Convert uint to int64"; break; - case EOpConvFloatToInt64: out.debug << "Convert float to int64"; break; - case EOpConvDoubleToInt64: out.debug << "Convert double to int64"; break; - case EOpConvUint64ToInt64: out.debug << "Convert uint64 to int64"; break; - case EOpConvBoolToUint64: out.debug << "Convert bool to uint64"; break; - case EOpConvIntToUint64: out.debug << "Convert int to uint64"; break; - case EOpConvUintToUint64: out.debug << "Convert uint to uint64"; break; - case EOpConvFloatToUint64: out.debug << "Convert float to uint64"; break; - case EOpConvDoubleToUint64: out.debug << "Convert double to uint64"; break; - case EOpConvInt64ToUint64: out.debug << "Convert uint64 to uint64"; break; - - case EOpRadians: out.debug << "radians"; break; - case EOpDegrees: out.debug << "degrees"; break; - case EOpSin: out.debug << "sine"; break; - case EOpCos: out.debug << "cosine"; break; - case EOpTan: out.debug << "tangent"; break; - case EOpAsin: out.debug << "arc sine"; break; - case EOpAcos: out.debug << "arc cosine"; break; - case EOpAtan: out.debug << "arc tangent"; break; - case EOpSinh: out.debug << "hyp. sine"; break; - case EOpCosh: out.debug << "hyp. cosine"; break; - case EOpTanh: out.debug << "hyp. tangent"; break; - case EOpAsinh: out.debug << "arc hyp. sine"; break; - case EOpAcosh: out.debug << "arc hyp. cosine"; break; - case EOpAtanh: out.debug << "arc hyp. tangent"; break; - - case EOpExp: out.debug << "exp"; break; - case EOpLog: out.debug << "log"; break; - case EOpExp2: out.debug << "exp2"; break; - case EOpLog2: out.debug << "log2"; break; - case EOpSqrt: out.debug << "sqrt"; break; - case EOpInverseSqrt: out.debug << "inverse sqrt"; break; - - case EOpAbs: out.debug << "Absolute value"; break; - case EOpSign: out.debug << "Sign"; break; - case EOpFloor: out.debug << "Floor"; break; - case EOpTrunc: out.debug << "trunc"; break; - case EOpRound: out.debug << "round"; break; - case EOpRoundEven: out.debug << "roundEven"; break; - case EOpCeil: out.debug << "Ceiling"; break; - case EOpFract: out.debug << "Fraction"; break; - - case EOpIsNan: out.debug << "isnan"; break; - case EOpIsInf: out.debug << "isinf"; break; - - case EOpFloatBitsToInt: out.debug << "floatBitsToInt"; break; - case EOpFloatBitsToUint:out.debug << "floatBitsToUint"; break; - case EOpIntBitsToFloat: out.debug << "intBitsToFloat"; break; - case EOpUintBitsToFloat:out.debug << "uintBitsToFloat"; break; - case EOpDoubleBitsToInt64: out.debug << "doubleBitsToInt64"; break; - case EOpDoubleBitsToUint64: out.debug << "doubleBitsToUint64"; break; - case EOpInt64BitsToDouble: out.debug << "int64BitsToDouble"; break; - case EOpUint64BitsToDouble: out.debug << "uint64BitsToDouble"; break; -#ifdef AMD_EXTENSIONS - case EOpFloat16BitsToInt16: out.debug << "float16BitsToInt16"; break; - case EOpFloat16BitsToUint16: out.debug << "float16BitsToUint16"; break; - case EOpInt16BitsToFloat16: out.debug << "int16BitsToFloat16"; break; - case EOpUint16BitsToFloat16: out.debug << "uint16BitsToFloat16"; break; -#endif - - case EOpPackSnorm2x16: out.debug << "packSnorm2x16"; break; - case EOpUnpackSnorm2x16:out.debug << "unpackSnorm2x16"; break; - case EOpPackUnorm2x16: out.debug << "packUnorm2x16"; break; - case EOpUnpackUnorm2x16:out.debug << "unpackUnorm2x16"; break; - case EOpPackHalf2x16: out.debug << "packHalf2x16"; break; - case EOpUnpackHalf2x16: out.debug << "unpackHalf2x16"; break; - - case EOpPackSnorm4x8: out.debug << "PackSnorm4x8"; break; - case EOpUnpackSnorm4x8: out.debug << "UnpackSnorm4x8"; break; - case EOpPackUnorm4x8: out.debug << "PackUnorm4x8"; break; - case EOpUnpackUnorm4x8: out.debug << "UnpackUnorm4x8"; break; - case EOpPackDouble2x32: out.debug << "PackDouble2x32"; break; - case EOpUnpackDouble2x32: out.debug << "UnpackDouble2x32"; break; - - case EOpPackInt2x32: out.debug << "packInt2x32"; break; - case EOpUnpackInt2x32: out.debug << "unpackInt2x32"; break; - case EOpPackUint2x32: out.debug << "packUint2x32"; break; - case EOpUnpackUint2x32: out.debug << "unpackUint2x32"; break; - -#ifdef AMD_EXTENSIONS - case EOpPackInt2x16: out.debug << "packInt2x16"; break; - case EOpUnpackInt2x16: out.debug << "unpackInt2x16"; break; - case EOpPackUint2x16: out.debug << "packUint2x16"; break; - case EOpUnpackUint2x16: out.debug << "unpackUint2x16"; break; - - case EOpPackInt4x16: out.debug << "packInt4x16"; break; - case EOpUnpackInt4x16: out.debug << "unpackInt4x16"; break; - case EOpPackUint4x16: out.debug << "packUint4x16"; break; - case EOpUnpackUint4x16: out.debug << "unpackUint4x16"; break; - - case EOpPackFloat2x16: out.debug << "packFloat2x16"; break; - case EOpUnpackFloat2x16: out.debug << "unpackFloat2x16"; break; -#endif - - case EOpLength: out.debug << "length"; break; - case EOpNormalize: out.debug << "normalize"; break; - case EOpDPdx: out.debug << "dPdx"; break; - case EOpDPdy: out.debug << "dPdy"; break; - case EOpFwidth: out.debug << "fwidth"; break; - case EOpDPdxFine: out.debug << "dPdxFine"; break; - case EOpDPdyFine: out.debug << "dPdyFine"; break; - case EOpFwidthFine: out.debug << "fwidthFine"; break; - case EOpDPdxCoarse: out.debug << "dPdxCoarse"; break; - case EOpDPdyCoarse: out.debug << "dPdyCoarse"; break; - case EOpFwidthCoarse: out.debug << "fwidthCoarse"; break; - - case EOpInterpolateAtCentroid: out.debug << "interpolateAtCentroid"; break; - - case EOpDeterminant: out.debug << "determinant"; break; - case EOpMatrixInverse: out.debug << "inverse"; break; - case EOpTranspose: out.debug << "transpose"; break; - - case EOpAny: out.debug << "any"; break; - case EOpAll: out.debug << "all"; break; - - case EOpArrayLength: out.debug << "array length"; break; - - case EOpEmitStreamVertex: out.debug << "EmitStreamVertex"; break; - case EOpEndStreamPrimitive: out.debug << "EndStreamPrimitive"; break; - - case EOpAtomicCounterIncrement: out.debug << "AtomicCounterIncrement";break; - case EOpAtomicCounterDecrement: out.debug << "AtomicCounterDecrement";break; - case EOpAtomicCounter: out.debug << "AtomicCounter"; break; - - case EOpTextureQuerySize: out.debug << "textureSize"; break; - case EOpTextureQueryLod: out.debug << "textureQueryLod"; break; - case EOpTextureQueryLevels: out.debug << "textureQueryLevels"; break; - case EOpTextureQuerySamples: out.debug << "textureSamples"; break; - case EOpImageQuerySize: out.debug << "imageQuerySize"; break; - case EOpImageQuerySamples: out.debug << "imageQuerySamples"; break; - case EOpImageLoad: out.debug << "imageLoad"; break; - - case EOpBitFieldReverse: out.debug << "bitFieldReverse"; break; - case EOpBitCount: out.debug << "bitCount"; break; - case EOpFindLSB: out.debug << "findLSB"; break; - case EOpFindMSB: out.debug << "findMSB"; break; - - case EOpNoise: out.debug << "noise"; break; - - case EOpBallot: out.debug << "ballot"; break; - case EOpReadFirstInvocation: out.debug << "readFirstInvocation"; break; - - case EOpAnyInvocation: out.debug << "anyInvocation"; break; - case EOpAllInvocations: out.debug << "allInvocations"; break; - case EOpAllInvocationsEqual: out.debug << "allInvocationsEqual"; break; - - case EOpClip: out.debug << "clip"; break; - case EOpIsFinite: out.debug << "isfinite"; break; - case EOpLog10: out.debug << "log10"; break; - case EOpRcp: out.debug << "rcp"; break; - case EOpSaturate: out.debug << "saturate"; break; - - case EOpSparseTexelsResident: out.debug << "sparseTexelsResident"; break; - -#ifdef AMD_EXTENSIONS - case EOpMinInvocations: out.debug << "minInvocations"; break; - case EOpMaxInvocations: out.debug << "maxInvocations"; break; - case EOpAddInvocations: out.debug << "addInvocations"; break; - case EOpMinInvocationsNonUniform: out.debug << "minInvocationsNonUniform"; break; - case EOpMaxInvocationsNonUniform: out.debug << "maxInvocationsNonUniform"; break; - case EOpAddInvocationsNonUniform: out.debug << "addInvocationsNonUniform"; break; - - case EOpMinInvocationsInclusiveScan: out.debug << "minInvocationsInclusiveScan"; break; - case EOpMaxInvocationsInclusiveScan: out.debug << "maxInvocationsInclusiveScan"; break; - case EOpAddInvocationsInclusiveScan: out.debug << "addInvocationsInclusiveScan"; break; - case EOpMinInvocationsInclusiveScanNonUniform: out.debug << "minInvocationsInclusiveScanNonUniform"; break; - case EOpMaxInvocationsInclusiveScanNonUniform: out.debug << "maxInvocationsInclusiveScanNonUniform"; break; - case EOpAddInvocationsInclusiveScanNonUniform: out.debug << "addInvocationsInclusiveScanNonUniform"; break; - - case EOpMinInvocationsExclusiveScan: out.debug << "minInvocationsExclusiveScan"; break; - case EOpMaxInvocationsExclusiveScan: out.debug << "maxInvocationsExclusiveScan"; break; - case EOpAddInvocationsExclusiveScan: out.debug << "addInvocationsExclusiveScan"; break; - case EOpMinInvocationsExclusiveScanNonUniform: out.debug << "minInvocationsExclusiveScanNonUniform"; break; - case EOpMaxInvocationsExclusiveScanNonUniform: out.debug << "maxInvocationsExclusiveScanNonUniform"; break; - case EOpAddInvocationsExclusiveScanNonUniform: out.debug << "addInvocationsExclusiveScanNonUniform"; break; - - case EOpMbcnt: out.debug << "mbcnt"; break; - - case EOpCubeFaceIndex: out.debug << "cubeFaceIndex"; break; - case EOpCubeFaceCoord: out.debug << "cubeFaceCoord"; break; - - case EOpConvBoolToFloat16: out.debug << "Convert bool to float16"; break; - case EOpConvIntToFloat16: out.debug << "Convert int to float16"; break; - case EOpConvUintToFloat16: out.debug << "Convert uint to float16"; break; - case EOpConvFloatToFloat16: out.debug << "Convert float to float16"; break; - case EOpConvDoubleToFloat16: out.debug << "Convert double to float16"; break; - case EOpConvInt64ToFloat16: out.debug << "Convert int64 to float16"; break; - case EOpConvUint64ToFloat16: out.debug << "Convert uint64 to float16"; break; - case EOpConvFloat16ToBool: out.debug << "Convert float16 to bool"; break; - case EOpConvFloat16ToInt: out.debug << "Convert float16 to int"; break; - case EOpConvFloat16ToUint: out.debug << "Convert float16 to uint"; break; - case EOpConvFloat16ToFloat: out.debug << "Convert float16 to float"; break; - case EOpConvFloat16ToDouble: out.debug << "Convert float16 to double"; break; - case EOpConvFloat16ToInt64: out.debug << "Convert float16 to int64"; break; - case EOpConvFloat16ToUint64: out.debug << "Convert float16 to uint64"; break; - - case EOpConvBoolToInt16: out.debug << "Convert bool to int16"; break; - case EOpConvIntToInt16: out.debug << "Convert int to int16"; break; - case EOpConvUintToInt16: out.debug << "Convert uint to int16"; break; - case EOpConvFloatToInt16: out.debug << "Convert float to int16"; break; - case EOpConvDoubleToInt16: out.debug << "Convert double to int16"; break; - case EOpConvFloat16ToInt16: out.debug << "Convert float16 to int16"; break; - case EOpConvInt64ToInt16: out.debug << "Convert int64 to int16"; break; - case EOpConvUint64ToInt16: out.debug << "Convert uint64 to int16"; break; - case EOpConvUint16ToInt16: out.debug << "Convert uint16 to int16"; break; - case EOpConvInt16ToBool: out.debug << "Convert int16 to bool"; break; - case EOpConvInt16ToInt: out.debug << "Convert int16 to int"; break; - case EOpConvInt16ToUint: out.debug << "Convert int16 to uint"; break; - case EOpConvInt16ToFloat: out.debug << "Convert int16 to float"; break; - case EOpConvInt16ToDouble: out.debug << "Convert int16 to double"; break; - case EOpConvInt16ToFloat16: out.debug << "Convert int16 to float16"; break; - case EOpConvInt16ToInt64: out.debug << "Convert int16 to int64"; break; - case EOpConvInt16ToUint64: out.debug << "Convert int16 to uint64"; break; - - case EOpConvBoolToUint16: out.debug << "Convert bool to uint16"; break; - case EOpConvIntToUint16: out.debug << "Convert int to uint16"; break; - case EOpConvUintToUint16: out.debug << "Convert uint to uint16"; break; - case EOpConvFloatToUint16: out.debug << "Convert float to uint16"; break; - case EOpConvDoubleToUint16: out.debug << "Convert double to uint16"; break; - case EOpConvFloat16ToUint16: out.debug << "Convert float16 to uint16"; break; - case EOpConvInt64ToUint16: out.debug << "Convert int64 to uint16"; break; - case EOpConvUint64ToUint16: out.debug << "Convert uint64 to uint16"; break; - case EOpConvInt16ToUint16: out.debug << "Convert int16 to uint16"; break; - case EOpConvUint16ToBool: out.debug << "Convert uint16 to bool"; break; - case EOpConvUint16ToInt: out.debug << "Convert uint16 to int"; break; - case EOpConvUint16ToUint: out.debug << "Convert uint16 to uint"; break; - case EOpConvUint16ToFloat: out.debug << "Convert uint16 to float"; break; - case EOpConvUint16ToDouble: out.debug << "Convert uint16 to double"; break; - case EOpConvUint16ToFloat16: out.debug << "Convert uint16 to float16"; break; - case EOpConvUint16ToInt64: out.debug << "Convert uint16 to int64"; break; - case EOpConvUint16ToUint64: out.debug << "Convert uint16 to uint64"; break; -#endif - - default: out.debug.message(EPrefixError, "Bad unary op"); - } - - out.debug << " (" << node->getCompleteString() << ")"; - - out.debug << "\n"; - - return true; -} - -bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node) -{ - TInfoSink& out = infoSink; - - if (node->getOp() == EOpNull) { - out.debug.message(EPrefixError, "node is still EOpNull!"); - return true; - } - - OutputTreeText(out, node, depth); - - switch (node->getOp()) { - case EOpSequence: out.debug << "Sequence\n"; return true; - case EOpLinkerObjects: out.debug << "Linker Objects\n"; return true; - case EOpComma: out.debug << "Comma"; break; - case EOpFunction: out.debug << "Function Definition: " << node->getName(); break; - case EOpFunctionCall: out.debug << "Function Call: " << node->getName(); break; - case EOpParameters: out.debug << "Function Parameters: "; break; - - case EOpConstructFloat: out.debug << "Construct float"; break; - case EOpConstructDouble:out.debug << "Construct double"; break; - - case EOpConstructVec2: out.debug << "Construct vec2"; break; - case EOpConstructVec3: out.debug << "Construct vec3"; break; - case EOpConstructVec4: out.debug << "Construct vec4"; break; - case EOpConstructDVec2: out.debug << "Construct dvec2"; break; - case EOpConstructDVec3: out.debug << "Construct dvec3"; break; - case EOpConstructDVec4: out.debug << "Construct dvec4"; break; - case EOpConstructBool: out.debug << "Construct bool"; break; - case EOpConstructBVec2: out.debug << "Construct bvec2"; break; - case EOpConstructBVec3: out.debug << "Construct bvec3"; break; - case EOpConstructBVec4: out.debug << "Construct bvec4"; break; - case EOpConstructInt: out.debug << "Construct int"; break; - case EOpConstructIVec2: out.debug << "Construct ivec2"; break; - case EOpConstructIVec3: out.debug << "Construct ivec3"; break; - case EOpConstructIVec4: out.debug << "Construct ivec4"; break; - case EOpConstructUint: out.debug << "Construct uint"; break; - case EOpConstructUVec2: out.debug << "Construct uvec2"; break; - case EOpConstructUVec3: out.debug << "Construct uvec3"; break; - case EOpConstructUVec4: out.debug << "Construct uvec4"; break; - case EOpConstructInt64: out.debug << "Construct int64_t"; break; - case EOpConstructI64Vec2: out.debug << "Construct i64vec2"; break; - case EOpConstructI64Vec3: out.debug << "Construct i64vec3"; break; - case EOpConstructI64Vec4: out.debug << "Construct i64vec4"; break; - case EOpConstructUint64: out.debug << "Construct uint64_t"; break; - case EOpConstructU64Vec2: out.debug << "Construct u64vec2"; break; - case EOpConstructU64Vec3: out.debug << "Construct u64vec3"; break; - case EOpConstructU64Vec4: out.debug << "Construct u64vec4"; break; -#ifdef AMD_EXTENSIONS - case EOpConstructInt16: out.debug << "Construct int16_t"; break; - case EOpConstructI16Vec2: out.debug << "Construct i16vec2"; break; - case EOpConstructI16Vec3: out.debug << "Construct i16vec3"; break; - case EOpConstructI16Vec4: out.debug << "Construct i16vec4"; break; - case EOpConstructUint16: out.debug << "Construct uint16_t"; break; - case EOpConstructU16Vec2: out.debug << "Construct u16vec2"; break; - case EOpConstructU16Vec3: out.debug << "Construct u16vec3"; break; - case EOpConstructU16Vec4: out.debug << "Construct u16vec4"; break; -#endif - case EOpConstructMat2x2: out.debug << "Construct mat2"; break; - case EOpConstructMat2x3: out.debug << "Construct mat2x3"; break; - case EOpConstructMat2x4: out.debug << "Construct mat2x4"; break; - case EOpConstructMat3x2: out.debug << "Construct mat3x2"; break; - case EOpConstructMat3x3: out.debug << "Construct mat3"; break; - case EOpConstructMat3x4: out.debug << "Construct mat3x4"; break; - case EOpConstructMat4x2: out.debug << "Construct mat4x2"; break; - case EOpConstructMat4x3: out.debug << "Construct mat4x3"; break; - case EOpConstructMat4x4: out.debug << "Construct mat4"; break; - case EOpConstructDMat2x2: out.debug << "Construct dmat2"; break; - case EOpConstructDMat2x3: out.debug << "Construct dmat2x3"; break; - case EOpConstructDMat2x4: out.debug << "Construct dmat2x4"; break; - case EOpConstructDMat3x2: out.debug << "Construct dmat3x2"; break; - case EOpConstructDMat3x3: out.debug << "Construct dmat3"; break; - case EOpConstructDMat3x4: out.debug << "Construct dmat3x4"; break; - case EOpConstructDMat4x2: out.debug << "Construct dmat4x2"; break; - case EOpConstructDMat4x3: out.debug << "Construct dmat4x3"; break; - case EOpConstructDMat4x4: out.debug << "Construct dmat4"; break; - case EOpConstructIMat2x2: out.debug << "Construct imat2"; break; - case EOpConstructIMat2x3: out.debug << "Construct imat2x3"; break; - case EOpConstructIMat2x4: out.debug << "Construct imat2x4"; break; - case EOpConstructIMat3x2: out.debug << "Construct imat3x2"; break; - case EOpConstructIMat3x3: out.debug << "Construct imat3"; break; - case EOpConstructIMat3x4: out.debug << "Construct imat3x4"; break; - case EOpConstructIMat4x2: out.debug << "Construct imat4x2"; break; - case EOpConstructIMat4x3: out.debug << "Construct imat4x3"; break; - case EOpConstructIMat4x4: out.debug << "Construct imat4"; break; - case EOpConstructUMat2x2: out.debug << "Construct umat2"; break; - case EOpConstructUMat2x3: out.debug << "Construct umat2x3"; break; - case EOpConstructUMat2x4: out.debug << "Construct umat2x4"; break; - case EOpConstructUMat3x2: out.debug << "Construct umat3x2"; break; - case EOpConstructUMat3x3: out.debug << "Construct umat3"; break; - case EOpConstructUMat3x4: out.debug << "Construct umat3x4"; break; - case EOpConstructUMat4x2: out.debug << "Construct umat4x2"; break; - case EOpConstructUMat4x3: out.debug << "Construct umat4x3"; break; - case EOpConstructUMat4x4: out.debug << "Construct umat4"; break; - case EOpConstructBMat2x2: out.debug << "Construct bmat2"; break; - case EOpConstructBMat2x3: out.debug << "Construct bmat2x3"; break; - case EOpConstructBMat2x4: out.debug << "Construct bmat2x4"; break; - case EOpConstructBMat3x2: out.debug << "Construct bmat3x2"; break; - case EOpConstructBMat3x3: out.debug << "Construct bmat3"; break; - case EOpConstructBMat3x4: out.debug << "Construct bmat3x4"; break; - case EOpConstructBMat4x2: out.debug << "Construct bmat4x2"; break; - case EOpConstructBMat4x3: out.debug << "Construct bmat4x3"; break; - case EOpConstructBMat4x4: out.debug << "Construct bmat4"; break; -#ifdef AMD_EXTENSIONS - case EOpConstructFloat16: out.debug << "Construct float16_t"; break; - case EOpConstructF16Vec2: out.debug << "Construct f16vec2"; break; - case EOpConstructF16Vec3: out.debug << "Construct f16vec3"; break; - case EOpConstructF16Vec4: out.debug << "Construct f16vec4"; break; - case EOpConstructF16Mat2x2: out.debug << "Construct f16mat2"; break; - case EOpConstructF16Mat2x3: out.debug << "Construct f16mat2x3"; break; - case EOpConstructF16Mat2x4: out.debug << "Construct f16mat2x4"; break; - case EOpConstructF16Mat3x2: out.debug << "Construct f16mat3x2"; break; - case EOpConstructF16Mat3x3: out.debug << "Construct f16mat3"; break; - case EOpConstructF16Mat3x4: out.debug << "Construct f16mat3x4"; break; - case EOpConstructF16Mat4x2: out.debug << "Construct f16mat4x2"; break; - case EOpConstructF16Mat4x3: out.debug << "Construct f16mat4x3"; break; - case EOpConstructF16Mat4x4: out.debug << "Construct f16mat4"; break; -#endif - case EOpConstructStruct: out.debug << "Construct structure"; break; - case EOpConstructTextureSampler: out.debug << "Construct combined texture-sampler"; break; - - case EOpLessThan: out.debug << "Compare Less Than"; break; - case EOpGreaterThan: out.debug << "Compare Greater Than"; break; - case EOpLessThanEqual: out.debug << "Compare Less Than or Equal"; break; - case EOpGreaterThanEqual: out.debug << "Compare Greater Than or Equal"; break; - case EOpVectorEqual: out.debug << "Equal"; break; - case EOpVectorNotEqual: out.debug << "NotEqual"; break; - - case EOpMod: out.debug << "mod"; break; - case EOpModf: out.debug << "modf"; break; - case EOpPow: out.debug << "pow"; break; - - case EOpAtan: out.debug << "arc tangent"; break; - - case EOpMin: out.debug << "min"; break; - case EOpMax: out.debug << "max"; break; - case EOpClamp: out.debug << "clamp"; break; - case EOpMix: out.debug << "mix"; break; - case EOpStep: out.debug << "step"; break; - case EOpSmoothStep: out.debug << "smoothstep"; break; - - case EOpDistance: out.debug << "distance"; break; - case EOpDot: out.debug << "dot-product"; break; - case EOpCross: out.debug << "cross-product"; break; - case EOpFaceForward: out.debug << "face-forward"; break; - case EOpReflect: out.debug << "reflect"; break; - case EOpRefract: out.debug << "refract"; break; - case EOpMul: out.debug << "component-wise multiply"; break; - case EOpOuterProduct: out.debug << "outer product"; break; - - case EOpEmitVertex: out.debug << "EmitVertex"; break; - case EOpEndPrimitive: out.debug << "EndPrimitive"; break; - - case EOpBarrier: out.debug << "Barrier"; break; - case EOpMemoryBarrier: out.debug << "MemoryBarrier"; break; - case EOpMemoryBarrierAtomicCounter: out.debug << "MemoryBarrierAtomicCounter"; break; - case EOpMemoryBarrierBuffer: out.debug << "MemoryBarrierBuffer"; break; - case EOpMemoryBarrierImage: out.debug << "MemoryBarrierImage"; break; - case EOpMemoryBarrierShared: out.debug << "MemoryBarrierShared"; break; - case EOpGroupMemoryBarrier: out.debug << "GroupMemoryBarrier"; break; - - case EOpReadInvocation: out.debug << "readInvocation"; break; - -#ifdef AMD_EXTENSIONS - case EOpSwizzleInvocations: out.debug << "swizzleInvocations"; break; - case EOpSwizzleInvocationsMasked: out.debug << "swizzleInvocationsMasked"; break; - case EOpWriteInvocation: out.debug << "writeInvocation"; break; - - case EOpMin3: out.debug << "min3"; break; - case EOpMax3: out.debug << "max3"; break; - case EOpMid3: out.debug << "mid3"; break; - - case EOpTime: out.debug << "time"; break; -#endif - - case EOpAtomicAdd: out.debug << "AtomicAdd"; break; - case EOpAtomicMin: out.debug << "AtomicMin"; break; - case EOpAtomicMax: out.debug << "AtomicMax"; break; - case EOpAtomicAnd: out.debug << "AtomicAnd"; break; - case EOpAtomicOr: out.debug << "AtomicOr"; break; - case EOpAtomicXor: out.debug << "AtomicXor"; break; - case EOpAtomicExchange: out.debug << "AtomicExchange"; break; - case EOpAtomicCompSwap: out.debug << "AtomicCompSwap"; break; - - case EOpAtomicCounterAdd: out.debug << "AtomicCounterAdd"; break; - case EOpAtomicCounterSubtract: out.debug << "AtomicCounterSubtract"; break; - case EOpAtomicCounterMin: out.debug << "AtomicCounterMin"; break; - case EOpAtomicCounterMax: out.debug << "AtomicCounterMax"; break; - case EOpAtomicCounterAnd: out.debug << "AtomicCounterAnd"; break; - case EOpAtomicCounterOr: out.debug << "AtomicCounterOr"; break; - case EOpAtomicCounterXor: out.debug << "AtomicCounterXor"; break; - case EOpAtomicCounterExchange: out.debug << "AtomicCounterExchange"; break; - case EOpAtomicCounterCompSwap: out.debug << "AtomicCounterCompSwap"; break; - - case EOpImageQuerySize: out.debug << "imageQuerySize"; break; - case EOpImageQuerySamples: out.debug << "imageQuerySamples"; break; - case EOpImageLoad: out.debug << "imageLoad"; break; - case EOpImageStore: out.debug << "imageStore"; break; - case EOpImageAtomicAdd: out.debug << "imageAtomicAdd"; break; - case EOpImageAtomicMin: out.debug << "imageAtomicMin"; break; - case EOpImageAtomicMax: out.debug << "imageAtomicMax"; break; - case EOpImageAtomicAnd: out.debug << "imageAtomicAnd"; break; - case EOpImageAtomicOr: out.debug << "imageAtomicOr"; break; - case EOpImageAtomicXor: out.debug << "imageAtomicXor"; break; - case EOpImageAtomicExchange: out.debug << "imageAtomicExchange"; break; - case EOpImageAtomicCompSwap: out.debug << "imageAtomicCompSwap"; break; -#ifdef AMD_EXTENSIONS - case EOpImageLoadLod: out.debug << "imageLoadLod"; break; - case EOpImageStoreLod: out.debug << "imageStoreLod"; break; -#endif - - case EOpTextureQuerySize: out.debug << "textureSize"; break; - case EOpTextureQueryLod: out.debug << "textureQueryLod"; break; - case EOpTextureQueryLevels: out.debug << "textureQueryLevels"; break; - case EOpTextureQuerySamples: out.debug << "textureSamples"; break; - case EOpTexture: out.debug << "texture"; break; - case EOpTextureProj: out.debug << "textureProj"; break; - case EOpTextureLod: out.debug << "textureLod"; break; - case EOpTextureOffset: out.debug << "textureOffset"; break; - case EOpTextureFetch: out.debug << "textureFetch"; break; - case EOpTextureFetchOffset: out.debug << "textureFetchOffset"; break; - case EOpTextureProjOffset: out.debug << "textureProjOffset"; break; - case EOpTextureLodOffset: out.debug << "textureLodOffset"; break; - case EOpTextureProjLod: out.debug << "textureProjLod"; break; - case EOpTextureProjLodOffset: out.debug << "textureProjLodOffset"; break; - case EOpTextureGrad: out.debug << "textureGrad"; break; - case EOpTextureGradOffset: out.debug << "textureGradOffset"; break; - case EOpTextureProjGrad: out.debug << "textureProjGrad"; break; - case EOpTextureProjGradOffset: out.debug << "textureProjGradOffset"; break; - case EOpTextureGather: out.debug << "textureGather"; break; - case EOpTextureGatherOffset: out.debug << "textureGatherOffset"; break; - case EOpTextureGatherOffsets: out.debug << "textureGatherOffsets"; break; - case EOpTextureClamp: out.debug << "textureClamp"; break; - case EOpTextureOffsetClamp: out.debug << "textureOffsetClamp"; break; - case EOpTextureGradClamp: out.debug << "textureGradClamp"; break; - case EOpTextureGradOffsetClamp: out.debug << "textureGradOffsetClamp"; break; -#ifdef AMD_EXTENSIONS - case EOpTextureGatherLod: out.debug << "textureGatherLod"; break; - case EOpTextureGatherLodOffset: out.debug << "textureGatherLodOffset"; break; - case EOpTextureGatherLodOffsets: out.debug << "textureGatherLodOffsets"; break; -#endif - - case EOpSparseTexture: out.debug << "sparseTexture"; break; - case EOpSparseTextureOffset: out.debug << "sparseTextureOffset"; break; - case EOpSparseTextureLod: out.debug << "sparseTextureLod"; break; - case EOpSparseTextureLodOffset: out.debug << "sparseTextureLodOffset"; break; - case EOpSparseTextureFetch: out.debug << "sparseTexelFetch"; break; - case EOpSparseTextureFetchOffset: out.debug << "sparseTexelFetchOffset"; break; - case EOpSparseTextureGrad: out.debug << "sparseTextureGrad"; break; - case EOpSparseTextureGradOffset: out.debug << "sparseTextureGradOffset"; break; - case EOpSparseTextureGather: out.debug << "sparseTextureGather"; break; - case EOpSparseTextureGatherOffset: out.debug << "sparseTextureGatherOffset"; break; - case EOpSparseTextureGatherOffsets: out.debug << "sparseTextureGatherOffsets"; break; - case EOpSparseImageLoad: out.debug << "sparseImageLoad"; break; - case EOpSparseTextureClamp: out.debug << "sparseTextureClamp"; break; - case EOpSparseTextureOffsetClamp: out.debug << "sparseTextureOffsetClamp"; break; - case EOpSparseTextureGradClamp: out.debug << "sparseTextureGradClamp"; break; - case EOpSparseTextureGradOffsetClamp: out.debug << "sparseTextureGradOffsetClam"; break; -#ifdef AMD_EXTENSIONS - case EOpSparseTextureGatherLod: out.debug << "sparseTextureGatherLod"; break; - case EOpSparseTextureGatherLodOffset: out.debug << "sparseTextureGatherLodOffset"; break; - case EOpSparseTextureGatherLodOffsets: out.debug << "sparseTextureGatherLodOffsets"; break; - case EOpSparseImageLoadLod: out.debug << "sparseImageLoadLod"; break; -#endif - - case EOpAddCarry: out.debug << "addCarry"; break; - case EOpSubBorrow: out.debug << "subBorrow"; break; - case EOpUMulExtended: out.debug << "uMulExtended"; break; - case EOpIMulExtended: out.debug << "iMulExtended"; break; - case EOpBitfieldExtract: out.debug << "bitfieldExtract"; break; - case EOpBitfieldInsert: out.debug << "bitfieldInsert"; break; - - case EOpFma: out.debug << "fma"; break; - case EOpFrexp: out.debug << "frexp"; break; - case EOpLdexp: out.debug << "ldexp"; break; - - case EOpInterpolateAtSample: out.debug << "interpolateAtSample"; break; - case EOpInterpolateAtOffset: out.debug << "interpolateAtOffset"; break; -#ifdef AMD_EXTENSIONS - case EOpInterpolateAtVertex: out.debug << "interpolateAtVertex"; break; -#endif - - case EOpSinCos: out.debug << "sincos"; break; - case EOpGenMul: out.debug << "mul"; break; - - case EOpAllMemoryBarrierWithGroupSync: out.debug << "AllMemoryBarrierWithGroupSync"; break; - case EOpGroupMemoryBarrierWithGroupSync: out.debug << "GroupMemoryBarrierWithGroupSync"; break; - case EOpWorkgroupMemoryBarrier: out.debug << "WorkgroupMemoryBarrier"; break; - case EOpWorkgroupMemoryBarrierWithGroupSync: out.debug << "WorkgroupMemoryBarrierWithGroupSync"; break; - - default: out.debug.message(EPrefixError, "Bad aggregation op"); - } - - if (node->getOp() != EOpSequence && node->getOp() != EOpParameters) - out.debug << " (" << node->getCompleteString() << ")"; - - out.debug << "\n"; - - return true; -} - -bool TOutputTraverser::visitSelection(TVisit /* visit */, TIntermSelection* node) -{ - TInfoSink& out = infoSink; - - OutputTreeText(out, node, depth); - - out.debug << "Test condition and select"; - out.debug << " (" << node->getCompleteString() << ")\n"; - - ++depth; - - OutputTreeText(out, node, depth); - out.debug << "Condition\n"; - node->getCondition()->traverse(this); - - OutputTreeText(out, node, depth); - if (node->getTrueBlock()) { - out.debug << "true case\n"; - node->getTrueBlock()->traverse(this); - } else - out.debug << "true case is null\n"; - - if (node->getFalseBlock()) { - OutputTreeText(out, node, depth); - out.debug << "false case\n"; - node->getFalseBlock()->traverse(this); - } - - --depth; - - return false; -} - -static void OutputConstantUnion(TInfoSink& out, const TIntermTyped* node, const TConstUnionArray& constUnion, int depth) -{ - int size = node->getType().computeNumComponents(); - - for (int i = 0; i < size; i++) { - OutputTreeText(out, node, depth); - switch (constUnion[i].getType()) { - case EbtBool: - if (constUnion[i].getBConst()) - out.debug << "true"; - else - out.debug << "false"; - - out.debug << " (" << "const bool" << ")"; - - out.debug << "\n"; - break; - case EbtFloat: - case EbtDouble: -#ifdef AMD_EXTENSIONS - case EbtFloat16: -#endif - { - const double value = constUnion[i].getDConst(); - // Print infinities and NaNs in a portable way. - if (IsInfinity(value)) { - if (value < 0) - out.debug << "-1.#INF\n"; - else - out.debug << "+1.#INF\n"; - } else if (IsNan(value)) - out.debug << "1.#IND\n"; - else { - const int maxSize = 300; - char buf[maxSize]; - snprintf(buf, maxSize, "%f", value); - - out.debug << buf << "\n"; - } - } - break; - case EbtInt: - { - const int maxSize = 300; - char buf[maxSize]; - snprintf(buf, maxSize, "%d (%s)", constUnion[i].getIConst(), "const int"); - - out.debug << buf << "\n"; - } - break; - case EbtUint: - { - const int maxSize = 300; - char buf[maxSize]; - snprintf(buf, maxSize, "%u (%s)", constUnion[i].getUConst(), "const uint"); - - out.debug << buf << "\n"; - } - break; - case EbtInt64: - { - const int maxSize = 300; - char buf[maxSize]; - snprintf(buf, maxSize, "%lld (%s)", constUnion[i].getI64Const(), "const int64_t"); - - out.debug << buf << "\n"; - } - break; - case EbtUint64: - { - const int maxSize = 300; - char buf[maxSize]; - snprintf(buf, maxSize, "%llu (%s)", constUnion[i].getU64Const(), "const uint64_t"); - - out.debug << buf << "\n"; - } - break; -#ifdef AMD_EXTENSIONS - case EbtInt16: - { - const int maxSize = 300; - char buf[maxSize]; - snprintf(buf, maxSize, "%d (%s)", constUnion[i].getIConst(), "const int16_t"); - - out.debug << buf << "\n"; - } - break; - case EbtUint16: - { - const int maxSize = 300; - char buf[maxSize]; - snprintf(buf, maxSize, "%u (%s)", constUnion[i].getUConst(), "const uint16_t"); - - out.debug << buf << "\n"; - } - break; -#endif - default: - out.info.message(EPrefixInternalError, "Unknown constant", node->getLoc()); - break; - } - } -} - -void TOutputTraverser::visitConstantUnion(TIntermConstantUnion* node) -{ - OutputTreeText(infoSink, node, depth); - infoSink.debug << "Constant:\n"; - - OutputConstantUnion(infoSink, node, node->getConstArray(), depth + 1); -} - -void TOutputTraverser::visitSymbol(TIntermSymbol* node) -{ - OutputTreeText(infoSink, node, depth); - - infoSink.debug << "'" << node->getName() << "' (" << node->getCompleteString() << ")\n"; - - if (! node->getConstArray().empty()) - OutputConstantUnion(infoSink, node, node->getConstArray(), depth + 1); - else if (node->getConstSubtree()) { - incrementDepth(node); - node->getConstSubtree()->traverse(this); - decrementDepth(); - } -} - -bool TOutputTraverser::visitLoop(TVisit /* visit */, TIntermLoop* node) -{ - TInfoSink& out = infoSink; - - OutputTreeText(out, node, depth); - - out.debug << "Loop with condition "; - if (! node->testFirst()) - out.debug << "not "; - out.debug << "tested first\n"; - - ++depth; - - OutputTreeText(infoSink, node, depth); - if (node->getTest()) { - out.debug << "Loop Condition\n"; - node->getTest()->traverse(this); - } else - out.debug << "No loop condition\n"; - - OutputTreeText(infoSink, node, depth); - if (node->getBody()) { - out.debug << "Loop Body\n"; - node->getBody()->traverse(this); - } else - out.debug << "No loop body\n"; - - if (node->getTerminal()) { - OutputTreeText(infoSink, node, depth); - out.debug << "Loop Terminal Expression\n"; - node->getTerminal()->traverse(this); - } - - --depth; - - return false; -} - -bool TOutputTraverser::visitBranch(TVisit /* visit*/, TIntermBranch* node) -{ - TInfoSink& out = infoSink; - - OutputTreeText(out, node, depth); - - switch (node->getFlowOp()) { - case EOpKill: out.debug << "Branch: Kill"; break; - case EOpBreak: out.debug << "Branch: Break"; break; - case EOpContinue: out.debug << "Branch: Continue"; break; - case EOpReturn: out.debug << "Branch: Return"; break; - case EOpCase: out.debug << "case: "; break; - case EOpDefault: out.debug << "default: "; break; - default: out.debug << "Branch: Unknown Branch"; break; - } - - if (node->getExpression()) { - out.debug << " with expression\n"; - ++depth; - node->getExpression()->traverse(this); - --depth; - } else - out.debug << "\n"; - - return false; -} - -bool TOutputTraverser::visitSwitch(TVisit /* visit */, TIntermSwitch* node) -{ - TInfoSink& out = infoSink; - - OutputTreeText(out, node, depth); - out.debug << "switch\n"; - - OutputTreeText(out, node, depth); - out.debug << "condition\n"; - ++depth; - node->getCondition()->traverse(this); - - --depth; - OutputTreeText(out, node, depth); - out.debug << "body\n"; - ++depth; - node->getBody()->traverse(this); - - --depth; - - return false; -} - -// -// This function is the one to call externally to start the traversal. -// Individual functions can be initialized to 0 to skip processing of that -// type of node. It's children will still be processed. -// -void TIntermediate::output(TInfoSink& infoSink, bool tree) -{ - infoSink.debug << "Shader version: " << version << "\n"; - if (requestedExtensions.size() > 0) { - for (auto extIt = requestedExtensions.begin(); extIt != requestedExtensions.end(); ++extIt) - infoSink.debug << "Requested " << *extIt << "\n"; - } - - if (xfbMode) - infoSink.debug << "in xfb mode\n"; - - switch (language) { - case EShLangVertex: - break; - - case EShLangTessControl: - infoSink.debug << "vertices = " << vertices << "\n"; - - if (inputPrimitive != ElgNone) - infoSink.debug << "input primitive = " << TQualifier::getGeometryString(inputPrimitive) << "\n"; - if (vertexSpacing != EvsNone) - infoSink.debug << "vertex spacing = " << TQualifier::getVertexSpacingString(vertexSpacing) << "\n"; - if (vertexOrder != EvoNone) - infoSink.debug << "triangle order = " << TQualifier::getVertexOrderString(vertexOrder) << "\n"; - break; - - case EShLangTessEvaluation: - infoSink.debug << "input primitive = " << TQualifier::getGeometryString(inputPrimitive) << "\n"; - infoSink.debug << "vertex spacing = " << TQualifier::getVertexSpacingString(vertexSpacing) << "\n"; - infoSink.debug << "triangle order = " << TQualifier::getVertexOrderString(vertexOrder) << "\n"; - if (pointMode) - infoSink.debug << "using point mode\n"; - break; - - case EShLangGeometry: - infoSink.debug << "invocations = " << invocations << "\n"; - infoSink.debug << "max_vertices = " << vertices << "\n"; - infoSink.debug << "input primitive = " << TQualifier::getGeometryString(inputPrimitive) << "\n"; - infoSink.debug << "output primitive = " << TQualifier::getGeometryString(outputPrimitive) << "\n"; - break; - - case EShLangFragment: - if (pixelCenterInteger) - infoSink.debug << "gl_FragCoord pixel center is integer\n"; - if (originUpperLeft) - infoSink.debug << "gl_FragCoord origin is upper left\n"; - if (earlyFragmentTests) - infoSink.debug << "using early_fragment_tests\n"; - if (postDepthCoverage) - infoSink.debug << "using post_depth_coverage\n"; - if (depthLayout != EldNone) - infoSink.debug << "using " << TQualifier::getLayoutDepthString(depthLayout) << "\n"; - if (blendEquations != 0) { - infoSink.debug << "using"; - // blendEquations is a mask, decode it - for (TBlendEquationShift be = (TBlendEquationShift)0; be < EBlendCount; be = (TBlendEquationShift)(be + 1)) { - if (blendEquations & (1 << be)) - infoSink.debug << " " << TQualifier::getBlendEquationString(be); - } - infoSink.debug << "\n"; - } - break; - - case EShLangCompute: - infoSink.debug << "local_size = (" << localSize[0] << ", " << localSize[1] << ", " << localSize[2] << ")\n"; - { - if (localSizeSpecId[0] != TQualifier::layoutNotSet || - localSizeSpecId[1] != TQualifier::layoutNotSet || - localSizeSpecId[2] != TQualifier::layoutNotSet) { - infoSink.debug << "local_size ids = (" << - localSizeSpecId[0] << ", " << - localSizeSpecId[1] << ", " << - localSizeSpecId[2] << ")\n"; - } - } - break; - - default: - break; - } - - if (treeRoot == 0 || ! tree) - return; - - TOutputTraverser it(infoSink); - - treeRoot->traverse(&it); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/iomapper.cpp b/deps/glslang/glslang/MachineIndependent/iomapper.cpp deleted file mode 100644 index 59db1ade..00000000 --- a/deps/glslang/glslang/MachineIndependent/iomapper.cpp +++ /dev/null @@ -1,775 +0,0 @@ -// -// Copyright (C) 2016-2017 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "../Include/Common.h" -#include "../Include/InfoSink.h" -#include "iomapper.h" -#include "LiveTraverser.h" -#include "localintermediate.h" - -#include "gl_types.h" - -#include -#include - -// -// Map IO bindings. -// -// High-level algorithm for one stage: -// -// 1. Traverse all code (live+dead) to find the explicitly provided bindings. -// -// 2. Traverse (just) the live code to determine which non-provided bindings -// require auto-numbering. We do not auto-number dead ones. -// -// 3. Traverse all the code to apply the bindings: -// a. explicitly given bindings are offset according to their type -// b. implicit live bindings are auto-numbered into the holes, using -// any open binding slot. -// c. implicit dead bindings are left un-bound. -// - - -namespace glslang { - -struct TVarEntryInfo -{ - int id; - TIntermSymbol* symbol; - bool live; - int newBinding; - int newSet; - int newLocation; - int newComponent; - int newIndex; - - struct TOrderById - { - inline bool operator()(const TVarEntryInfo& l, const TVarEntryInfo& r) - { - return l.id < r.id; - } - }; - - struct TOrderByPriority - { - // ordering: - // 1) has both binding and set - // 2) has binding but no set - // 3) has no binding but set - // 4) has no binding and no set - inline bool operator()(const TVarEntryInfo& l, const TVarEntryInfo& r) - { - const TQualifier& lq = l.symbol->getQualifier(); - const TQualifier& rq = r.symbol->getQualifier(); - - // simple rules: - // has binding gives 2 points - // has set gives 1 point - // who has the most points is more important. - int lPoints = (lq.hasBinding() ? 2 : 0) + (lq.hasSet() ? 1 : 0); - int rPoints = (rq.hasBinding() ? 2 : 0) + (rq.hasSet() ? 1 : 0); - - if (lPoints == rPoints) - return l.id < r.id; - return lPoints > rPoints; - } - }; -}; - - - -typedef std::vector TVarLiveMap; - -class TVarGatherTraverser : public TLiveTraverser -{ -public: - TVarGatherTraverser(const TIntermediate& i, bool traverseDeadCode, TVarLiveMap& inList, TVarLiveMap& outList, TVarLiveMap& uniformList) - : TLiveTraverser(i, traverseDeadCode, true, true, false) - , inputList(inList) - , outputList(outList) - , uniformList(uniformList) - { - } - - - virtual void visitSymbol(TIntermSymbol* base) - { - TVarLiveMap* target = nullptr; - if (base->getQualifier().storage == EvqVaryingIn) - target = &inputList; - else if (base->getQualifier().storage == EvqVaryingOut) - target = &outputList; - else if (base->getQualifier().isUniformOrBuffer()) - target = &uniformList; - - if (target) { - TVarEntryInfo ent = { base->getId(), base, !traverseAll }; - TVarLiveMap::iterator at = std::lower_bound(target->begin(), target->end(), ent, TVarEntryInfo::TOrderById()); - if (at != target->end() && at->id == ent.id) - at->live = at->live || !traverseAll; // update live state - else - target->insert(at, ent); - } - } - -private: - TVarLiveMap& inputList; - TVarLiveMap& outputList; - TVarLiveMap& uniformList; -}; - -class TVarSetTraverser : public TLiveTraverser -{ -public: - TVarSetTraverser(const TIntermediate& i, const TVarLiveMap& inList, const TVarLiveMap& outList, const TVarLiveMap& uniformList) - : TLiveTraverser(i, true, true, true, false) - , inputList(inList) - , outputList(outList) - , uniformList(uniformList) - { - } - - - virtual void visitSymbol(TIntermSymbol* base) - { - const TVarLiveMap* source; - if (base->getQualifier().storage == EvqVaryingIn) - source = &inputList; - else if (base->getQualifier().storage == EvqVaryingOut) - source = &outputList; - else if (base->getQualifier().isUniformOrBuffer()) - source = &uniformList; - else - return; - - TVarEntryInfo ent = { base->getId() }; - TVarLiveMap::const_iterator at = std::lower_bound(source->begin(), source->end(), ent, TVarEntryInfo::TOrderById()); - if (at == source->end()) - return; - - if (at->id != ent.id) - return; - - if (at->newBinding != -1) - base->getWritableType().getQualifier().layoutBinding = at->newBinding; - if (at->newSet != -1) - base->getWritableType().getQualifier().layoutSet = at->newSet; - if (at->newLocation != -1) - base->getWritableType().getQualifier().layoutLocation = at->newLocation; - if (at->newComponent != -1) - base->getWritableType().getQualifier().layoutComponent = at->newComponent; - if (at->newIndex != -1) - base->getWritableType().getQualifier().layoutIndex = at->newIndex; - } - - private: - const TVarLiveMap& inputList; - const TVarLiveMap& outputList; - const TVarLiveMap& uniformList; -}; - -struct TNotifyUniformAdaptor -{ - EShLanguage stage; - TIoMapResolver& resolver; - inline TNotifyUniformAdaptor(EShLanguage s, TIoMapResolver& r) - : stage(s) - , resolver(r) - { - } - inline void operator()(TVarEntryInfo& ent) - { - resolver.notifyBinding(stage, ent.symbol->getName().c_str(), ent.symbol->getType(), ent.live); - } -private: - TNotifyUniformAdaptor& operator=(TNotifyUniformAdaptor&); -}; - -struct TNotifyInOutAdaptor -{ - EShLanguage stage; - TIoMapResolver& resolver; - inline TNotifyInOutAdaptor(EShLanguage s, TIoMapResolver& r) - : stage(s) - , resolver(r) - { - } - inline void operator()(TVarEntryInfo& ent) - { - resolver.notifyInOut(stage, ent.symbol->getName().c_str(), ent.symbol->getType(), ent.live); - } -private: - TNotifyInOutAdaptor& operator=(TNotifyInOutAdaptor&); -}; - -struct TResolverUniformAdaptor -{ - TResolverUniformAdaptor(EShLanguage s, TIoMapResolver& r, TInfoSink& i, bool& e, TIntermediate& interm) - : stage(s) - , resolver(r) - , infoSink(i) - , error(e) - , intermediate(interm) - { - } - - inline void operator()(TVarEntryInfo& ent) - { - ent.newLocation = -1; - ent.newComponent = -1; - ent.newBinding = -1; - ent.newSet = -1; - ent.newIndex = -1; - const bool isValid = resolver.validateBinding(stage, ent.symbol->getName().c_str(), ent.symbol->getType(), - ent.live); - if (isValid) { - ent.newBinding = resolver.resolveBinding(stage, ent.symbol->getName().c_str(), ent.symbol->getType(), - ent.live); - ent.newSet = resolver.resolveSet(stage, ent.symbol->getName().c_str(), ent.symbol->getType(), ent.live); - ent.newLocation = resolver.resolveUniformLocation(stage, ent.symbol->getName().c_str(), - ent.symbol->getType(), ent.live); - - if (ent.newBinding != -1) { - if (ent.newBinding >= int(TQualifier::layoutBindingEnd)) { - TString err = "mapped binding out of range: " + ent.symbol->getName(); - - infoSink.info.message(EPrefixInternalError, err.c_str()); - error = true; - } - } - if (ent.newSet != -1) { - if (ent.newSet >= int(TQualifier::layoutSetEnd)) { - TString err = "mapped set out of range: " + ent.symbol->getName(); - - infoSink.info.message(EPrefixInternalError, err.c_str()); - error = true; - } - } - } else { - TString errorMsg = "Invalid binding: " + ent.symbol->getName(); - infoSink.info.message(EPrefixInternalError, errorMsg.c_str()); - error = true; - } - } - - EShLanguage stage; - TIoMapResolver& resolver; - TInfoSink& infoSink; - bool& error; - TIntermediate& intermediate; - -private: - TResolverUniformAdaptor& operator=(TResolverUniformAdaptor&); -}; - -struct TResolverInOutAdaptor -{ - TResolverInOutAdaptor(EShLanguage s, TIoMapResolver& r, TInfoSink& i, bool& e, TIntermediate& interm) - : stage(s) - , resolver(r) - , infoSink(i) - , error(e) - , intermediate(interm) - { - } - - inline void operator()(TVarEntryInfo& ent) - { - ent.newLocation = -1; - ent.newComponent = -1; - ent.newBinding = -1; - ent.newSet = -1; - ent.newIndex = -1; - const bool isValid = resolver.validateInOut(stage, - ent.symbol->getName().c_str(), - ent.symbol->getType(), - ent.live); - if (isValid) { - ent.newLocation = resolver.resolveInOutLocation(stage, - ent.symbol->getName().c_str(), - ent.symbol->getType(), - ent.live); - ent.newComponent = resolver.resolveInOutComponent(stage, - ent.symbol->getName().c_str(), - ent.symbol->getType(), - ent.live); - ent.newIndex = resolver.resolveInOutIndex(stage, - ent.symbol->getName().c_str(), - ent.symbol->getType(), - ent.live); - } else { - TString errorMsg = "Invalid shader In/Out variable semantic: "; - errorMsg += ent.symbol->getType().getQualifier().semanticName; - infoSink.info.message(EPrefixInternalError, errorMsg.c_str()); - error = true; - } - } - - EShLanguage stage; - TIoMapResolver& resolver; - TInfoSink& infoSink; - bool& error; - TIntermediate& intermediate; - -private: - TResolverInOutAdaptor& operator=(TResolverInOutAdaptor&); -}; - -// Base class for shared TIoMapResolver services, used by several derivations. -struct TDefaultIoResolverBase : public glslang::TIoMapResolver -{ - int baseSamplerBinding; - int baseTextureBinding; - int baseImageBinding; - int baseUboBinding; - int baseSsboBinding; - int baseUavBinding; - std::vector baseResourceSetBinding; - bool doAutoBindingMapping; - bool doAutoLocationMapping; - int nextUniformLocation; - typedef std::vector TSlotSet; - typedef std::unordered_map TSlotSetMap; - TSlotSetMap slots; - - TSlotSet::iterator findSlot(int set, int slot) - { - return std::lower_bound(slots[set].begin(), slots[set].end(), slot); - } - - bool checkEmpty(int set, int slot) - { - TSlotSet::iterator at = findSlot(set, slot); - return !(at != slots[set].end() && *at == slot); - } - - int reserveSlot(int set, int slot) - { - TSlotSet::iterator at = findSlot(set, slot); - - // tolerate aliasing, by not double-recording aliases - // (policy about appropriateness of the alias is higher up) - if (at == slots[set].end() || *at != slot) - slots[set].insert(at, slot); - - return slot; - } - - int getFreeSlot(int set, int base) - { - TSlotSet::iterator at = findSlot(set, base); - if (at == slots[set].end()) - return reserveSlot(set, base); - - // look in locksteps, if they not match, then there is a free slot - for (; at != slots[set].end(); ++at, ++base) - if (*at != base) - break; - return reserveSlot(set, base); - } - - virtual bool validateBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool /*is_live*/) override = 0; - - virtual int resolveBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool is_live) override = 0; - - int resolveSet(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool /*is_live*/) override - { - if (type.getQualifier().hasSet()) - return type.getQualifier().layoutSet; - - // If a command line or API option requested a single descriptor set, use that (if not overrided by spaceN) - if (baseResourceSetBinding.size() == 1) - return atoi(baseResourceSetBinding[0].c_str()); - - return 0; - } - int resolveUniformLocation(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool is_live) override - { - // kick out of not doing this - if (!doAutoLocationMapping) - return -1; - - // no locations added if already present, a built-in variable, a block, or an opaque - if (type.getQualifier().hasLocation() || type.isBuiltIn() || - type.getBasicType() == EbtBlock || type.containsOpaque()) - return -1; - - // no locations on blocks of built-in variables - if (type.isStruct()) { - if (type.getStruct()->size() < 1) - return -1; - if ((*type.getStruct())[0].type->isBuiltIn()) - return -1; - } - - return nextUniformLocation++; - } - bool validateInOut(EShLanguage /*stage*/, const char* /*name*/, const TType& /*type*/, bool /*is_live*/) override - { - return true; - } - int resolveInOutLocation(EShLanguage /*stage*/, const char* /*name*/, const TType& type, bool /*is_live*/) override - { - // kick out of not doing this - if (!doAutoLocationMapping) - return -1; - - // no locations added if already present, or a built-in variable - if (type.getQualifier().hasLocation() || type.isBuiltIn()) - return -1; - - // no locations on blocks of built-in variables - if (type.isStruct()) { - if (type.getStruct()->size() < 1) - return -1; - if ((*type.getStruct())[0].type->isBuiltIn()) - return -1; - } - - // Placeholder. - // TODO: It would be nice to flesh this out using - // intermediate->computeTypeLocationSize(type), or functions that call it like - // intermediate->addUsedLocation() - // These in turn would want the intermediate, which is not available here, but - // is available in many places, and a lot of copying from it could be saved if - // it were just available. - return 0; - } - int resolveInOutComponent(EShLanguage /*stage*/, const char* /*name*/, const TType& /*type*/, bool /*is_live*/) override - { - return -1; - } - int resolveInOutIndex(EShLanguage /*stage*/, const char* /*name*/, const TType& /*type*/, bool /*is_live*/) override - { - return -1; - } - - void notifyBinding(EShLanguage, const char* /*name*/, const TType&, bool /*is_live*/) override {} - void notifyInOut(EShLanguage, const char* /*name*/, const TType&, bool /*is_live*/) override {} - void endNotifications(EShLanguage) override {} - void beginNotifications(EShLanguage) override {} - void beginResolve(EShLanguage) override {} - void endResolve(EShLanguage) override {} - -protected: - static int getLayoutSet(const glslang::TType& type) { - if (type.getQualifier().hasSet()) - return type.getQualifier().layoutSet; - else - return 0; - } - - static bool isSamplerType(const glslang::TType& type) { - return type.getBasicType() == glslang::EbtSampler && type.getSampler().isPureSampler(); - } - - static bool isTextureType(const glslang::TType& type) { - return type.getBasicType() == glslang::EbtSampler && type.getSampler().isTexture(); - } - - static bool isUboType(const glslang::TType& type) { - return type.getQualifier().storage == EvqUniform; - } -}; - -/* - * Basic implementation of glslang::TIoMapResolver that replaces the - * previous offset behavior. - * It does the same, uses the offsets for the corresponding uniform - * types. Also respects the EOptionAutoMapBindings flag and binds - * them if needed. - */ -/* - * Default resolver - */ -struct TDefaultIoResolver : public TDefaultIoResolverBase -{ - bool validateBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& /*type*/, bool /*is_live*/) override - { - return true; - } - - int resolveBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool is_live) override - { - const int set = getLayoutSet(type); - - if (type.getQualifier().hasBinding()) { - if (isImageType(type)) - return reserveSlot(set, baseImageBinding + type.getQualifier().layoutBinding); - - if (isTextureType(type)) - return reserveSlot(set, baseTextureBinding + type.getQualifier().layoutBinding); - - if (isSsboType(type)) - return reserveSlot(set, baseSsboBinding + type.getQualifier().layoutBinding); - - if (isSamplerType(type)) - return reserveSlot(set, baseSamplerBinding + type.getQualifier().layoutBinding); - - if (isUboType(type)) - return reserveSlot(set, baseUboBinding + type.getQualifier().layoutBinding); - } else if (is_live && doAutoBindingMapping) { - // find free slot, the caller did make sure it passes all vars with binding - // first and now all are passed that do not have a binding and needs one - - if (isImageType(type)) - return getFreeSlot(set, baseImageBinding); - - if (isTextureType(type)) - return getFreeSlot(set, baseTextureBinding); - - if (isSsboType(type)) - return getFreeSlot(set, baseSsboBinding); - - if (isSamplerType(type)) - return getFreeSlot(set, baseSamplerBinding); - - if (isUboType(type)) - return getFreeSlot(set, baseUboBinding); - } - - return -1; - } - -protected: - static bool isImageType(const glslang::TType& type) { - return type.getBasicType() == glslang::EbtSampler && type.getSampler().isImage(); - } - - static bool isSsboType(const glslang::TType& type) { - return type.getQualifier().storage == EvqBuffer; - } -}; - -/******************************************************************************** -The following IO resolver maps types in HLSL register space, as follows: - -t – for shader resource views (SRV) - TEXTURE1D - TEXTURE1DARRAY - TEXTURE2D - TEXTURE2DARRAY - TEXTURE3D - TEXTURECUBE - TEXTURECUBEARRAY - TEXTURE2DMS - TEXTURE2DMSARRAY - STRUCTUREDBUFFER - BYTEADDRESSBUFFER - BUFFER - TBUFFER - -s – for samplers - SAMPLER - SAMPLER1D - SAMPLER2D - SAMPLER3D - SAMPLERCUBE - SAMPLERSTATE - SAMPLERCOMPARISONSTATE - -u – for unordered access views (UAV) - RWBYTEADDRESSBUFFER - RWSTRUCTUREDBUFFER - APPENDSTRUCTUREDBUFFER - CONSUMESTRUCTUREDBUFFER - RWBUFFER - RWTEXTURE1D - RWTEXTURE1DARRAY - RWTEXTURE2D - RWTEXTURE2DARRAY - RWTEXTURE3D - -b – for constant buffer views (CBV) - CBUFFER - CONSTANTBUFFER - ********************************************************************************/ -struct TDefaultHlslIoResolver : public TDefaultIoResolverBase -{ - bool validateBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& /*type*/, bool /*is_live*/) override - { - return true; - } - - int resolveBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool is_live) override - { - const int set = getLayoutSet(type); - - if (type.getQualifier().hasBinding()) { - if (isUavType(type)) - return reserveSlot(set, baseUavBinding + type.getQualifier().layoutBinding); - - if (isSrvType(type)) - return reserveSlot(set, baseTextureBinding + type.getQualifier().layoutBinding); - - if (isSamplerType(type)) - return reserveSlot(set, baseSamplerBinding + type.getQualifier().layoutBinding); - - if (isUboType(type)) - return reserveSlot(set, baseUboBinding + type.getQualifier().layoutBinding); - } else if (is_live && doAutoBindingMapping) { - // find free slot, the caller did make sure it passes all vars with binding - // first and now all are passed that do not have a binding and needs one - - if (isUavType(type)) - return getFreeSlot(set, baseUavBinding); - - if (isSrvType(type)) - return getFreeSlot(set, baseTextureBinding); - - if (isSamplerType(type)) - return getFreeSlot(set, baseSamplerBinding); - - if (isUboType(type)) - return getFreeSlot(set, baseUboBinding); - } - - return -1; - } - -protected: - // Return true if this is a SRV (shader resource view) type: - static bool isSrvType(const glslang::TType& type) { - return isTextureType(type) || type.getQualifier().storage == EvqBuffer; - } - - // Return true if this is a UAV (unordered access view) type: - static bool isUavType(const glslang::TType& type) { - if (type.getQualifier().readonly) - return false; - - return (type.getBasicType() == glslang::EbtSampler && type.getSampler().isImage()) || - (type.getQualifier().storage == EvqBuffer); - } -}; - - -// Map I/O variables to provided offsets, and make bindings for -// unbound but live variables. -// -// Returns false if the input is too malformed to do this. -bool TIoMapper::addStage(EShLanguage stage, TIntermediate &intermediate, TInfoSink &infoSink, TIoMapResolver *resolver) -{ - // Trivial return if there is nothing to do. - if (intermediate.getShiftSamplerBinding() == 0 && - intermediate.getShiftTextureBinding() == 0 && - intermediate.getShiftImageBinding() == 0 && - intermediate.getShiftUboBinding() == 0 && - intermediate.getShiftSsboBinding() == 0 && - intermediate.getShiftUavBinding() == 0 && - intermediate.getResourceSetBinding().empty() && - intermediate.getAutoMapBindings() == false && - intermediate.getAutoMapLocations() == false && - resolver == nullptr) - return true; - - if (intermediate.getNumEntryPoints() != 1 || intermediate.isRecursive()) - return false; - - TIntermNode* root = intermediate.getTreeRoot(); - if (root == nullptr) - return false; - - // if no resolver is provided, use the default resolver with the given shifts and auto map settings - TDefaultIoResolver defaultResolver; - TDefaultHlslIoResolver defaultHlslResolver; - - if (resolver == nullptr) { - TDefaultIoResolverBase* resolverBase; - - // TODO: use a passed in IO mapper for this - if (intermediate.usingHlslIoMapping()) - resolverBase = &defaultHlslResolver; - else - resolverBase = &defaultResolver; - - resolverBase->baseSamplerBinding = intermediate.getShiftSamplerBinding(); - resolverBase->baseTextureBinding = intermediate.getShiftTextureBinding(); - resolverBase->baseImageBinding = intermediate.getShiftImageBinding(); - resolverBase->baseUboBinding = intermediate.getShiftUboBinding(); - resolverBase->baseSsboBinding = intermediate.getShiftSsboBinding(); - resolverBase->baseUavBinding = intermediate.getShiftUavBinding(); - resolverBase->baseResourceSetBinding = intermediate.getResourceSetBinding(); - resolverBase->doAutoBindingMapping = intermediate.getAutoMapBindings(); - resolverBase->doAutoLocationMapping = intermediate.getAutoMapLocations(); - resolverBase->nextUniformLocation = 0; - - resolver = resolverBase; - } - - TVarLiveMap inVarMap, outVarMap, uniformVarMap; - TVarGatherTraverser iter_binding_all(intermediate, true, inVarMap, outVarMap, uniformVarMap); - TVarGatherTraverser iter_binding_live(intermediate, false, inVarMap, outVarMap, uniformVarMap); - - root->traverse(&iter_binding_all); - iter_binding_live.pushFunction(intermediate.getEntryPointMangledName().c_str()); - - while (!iter_binding_live.functions.empty()) { - TIntermNode* function = iter_binding_live.functions.back(); - iter_binding_live.functions.pop_back(); - function->traverse(&iter_binding_live); - } - - // sort entries by priority. see TVarEntryInfo::TOrderByPriority for info. - std::sort(uniformVarMap.begin(), uniformVarMap.end(), TVarEntryInfo::TOrderByPriority()); - - bool hadError = false; - TNotifyInOutAdaptor inOutNotify(stage, *resolver); - TNotifyUniformAdaptor uniformNotify(stage, *resolver); - TResolverUniformAdaptor uniformResolve(stage, *resolver, infoSink, hadError, intermediate); - TResolverInOutAdaptor inOutResolve(stage, *resolver, infoSink, hadError, intermediate); - resolver->beginNotifications(stage); - std::for_each(inVarMap.begin(), inVarMap.end(), inOutNotify); - std::for_each(outVarMap.begin(), outVarMap.end(), inOutNotify); - std::for_each(uniformVarMap.begin(), uniformVarMap.end(), uniformNotify); - resolver->endNotifications(stage); - resolver->beginResolve(stage); - std::for_each(inVarMap.begin(), inVarMap.end(), inOutResolve); - std::for_each(outVarMap.begin(), outVarMap.end(), inOutResolve); - std::for_each(uniformVarMap.begin(), uniformVarMap.end(), uniformResolve); - resolver->endResolve(stage); - - if (!hadError) { - // sort by id again, so we can use lower bound to find entries - std::sort(uniformVarMap.begin(), uniformVarMap.end(), TVarEntryInfo::TOrderById()); - TVarSetTraverser iter_iomap(intermediate, inVarMap, outVarMap, uniformVarMap); - root->traverse(&iter_iomap); - } - - return !hadError; -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/iomapper.h b/deps/glslang/glslang/MachineIndependent/iomapper.h deleted file mode 100644 index 5e0d4391..00000000 --- a/deps/glslang/glslang/MachineIndependent/iomapper.h +++ /dev/null @@ -1,63 +0,0 @@ -// -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _IOMAPPER_INCLUDED -#define _IOMAPPER_INCLUDED - -#include "../Public/ShaderLang.h" - -// -// A reflection database and its interface, consistent with the OpenGL API reflection queries. -// - -class TInfoSink; - -namespace glslang { - -class TIntermediate; - -// I/O mapper -class TIoMapper { -public: - TIoMapper() {} - virtual ~TIoMapper() {} - - // grow the reflection stage by stage - bool addStage(EShLanguage, TIntermediate&, TInfoSink&, TIoMapResolver*); -}; - -} // end namespace glslang - -#endif // _IOMAPPER_INCLUDED diff --git a/deps/glslang/glslang/MachineIndependent/limits.cpp b/deps/glslang/glslang/MachineIndependent/limits.cpp deleted file mode 100644 index 64d191b4..00000000 --- a/deps/glslang/glslang/MachineIndependent/limits.cpp +++ /dev/null @@ -1,198 +0,0 @@ -// -// Copyright (C) 2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Do sub tree walks for -// 1) inductive loop bodies to see if the inductive variable is modified -// 2) array-index expressions to see if they are "constant-index-expression" -// -// These are per Appendix A of ES 2.0: -// -// "Within the body of the loop, the loop index is not statically assigned to nor is it used as the -// argument to a function out or inout parameter." -// -// "The following are constant-index-expressions: -// - Constant expressions -// - Loop indices as defined in section 4 -// - Expressions composed of both of the above" -// -// N.B.: assuming the last rule excludes function calls -// - -#include "ParseHelper.h" - -namespace glslang { - -// -// The inductive loop-body traverser. -// -// Just look at things that might modify the loop index. -// - -class TInductiveTraverser : public TIntermTraverser { -public: - TInductiveTraverser(int id, TSymbolTable& st) - : loopId(id), symbolTable(st), bad(false) { } - - virtual bool visitBinary(TVisit, TIntermBinary* node); - virtual bool visitUnary(TVisit, TIntermUnary* node); - virtual bool visitAggregate(TVisit, TIntermAggregate* node); - - int loopId; // unique ID of the symbol that's the loop inductive variable - TSymbolTable& symbolTable; - bool bad; - TSourceLoc badLoc; - -protected: - TInductiveTraverser(TInductiveTraverser&); - TInductiveTraverser& operator=(TInductiveTraverser&); -}; - -// check binary operations for those modifying the loop index -bool TInductiveTraverser::visitBinary(TVisit /* visit */, TIntermBinary* node) -{ - if (node->modifiesState() && node->getLeft()->getAsSymbolNode() && - node->getLeft()->getAsSymbolNode()->getId() == loopId) { - bad = true; - badLoc = node->getLoc(); - } - - return true; -} - -// check unary operations for those modifying the loop index -bool TInductiveTraverser::visitUnary(TVisit /* visit */, TIntermUnary* node) -{ - if (node->modifiesState() && node->getOperand()->getAsSymbolNode() && - node->getOperand()->getAsSymbolNode()->getId() == loopId) { - bad = true; - badLoc = node->getLoc(); - } - - return true; -} - -// check function calls for arguments modifying the loop index -bool TInductiveTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node) -{ - if (node->getOp() == EOpFunctionCall) { - // see if an out or inout argument is the loop index - const TIntermSequence& args = node->getSequence(); - for (int i = 0; i < (int)args.size(); ++i) { - if (args[i]->getAsSymbolNode() && args[i]->getAsSymbolNode()->getId() == loopId) { - TSymbol* function = symbolTable.find(node->getName()); - const TType* type = (*function->getAsFunction())[i].type; - if (type->getQualifier().storage == EvqOut || - type->getQualifier().storage == EvqInOut) { - bad = true; - badLoc = node->getLoc(); - } - } - } - } - - return true; -} - -// -// External function to call for loop check. -// -void TParseContext::inductiveLoopBodyCheck(TIntermNode* body, int loopId, TSymbolTable& symbolTable) -{ - TInductiveTraverser it(loopId, symbolTable); - - if (body == nullptr) - return; - - body->traverse(&it); - - if (it.bad) - error(it.badLoc, "inductive loop index modified", "limitations", ""); -} - -// -// The "constant-index-expression" tranverser. -// -// Just look at things that can form an index. -// - -class TIndexTraverser : public TIntermTraverser { -public: - TIndexTraverser(const TIdSetType& ids) : inductiveLoopIds(ids), bad(false) { } - virtual void visitSymbol(TIntermSymbol* symbol); - virtual bool visitAggregate(TVisit, TIntermAggregate* node); - const TIdSetType& inductiveLoopIds; - bool bad; - TSourceLoc badLoc; - -protected: - TIndexTraverser(TIndexTraverser&); - TIndexTraverser& operator=(TIndexTraverser&); -}; - -// make sure symbols are inductive-loop indexes -void TIndexTraverser::visitSymbol(TIntermSymbol* symbol) -{ - if (inductiveLoopIds.find(symbol->getId()) == inductiveLoopIds.end()) { - bad = true; - badLoc = symbol->getLoc(); - } -} - -// check for function calls, assuming they are bad; spec. doesn't really say -bool TIndexTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node) -{ - if (node->getOp() == EOpFunctionCall) { - bad = true; - badLoc = node->getLoc(); - } - - return true; -} - -// -// External function to call for loop check. -// -void TParseContext::constantIndexExpressionCheck(TIntermNode* index) -{ - TIndexTraverser it(inductiveLoopIds); - - index->traverse(&it); - - if (it.bad) - error(it.badLoc, "Non-constant-index-expression", "limitations", ""); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/linkValidate.cpp b/deps/glslang/glslang/MachineIndependent/linkValidate.cpp deleted file mode 100644 index 9ca1557b..00000000 --- a/deps/glslang/glslang/MachineIndependent/linkValidate.cpp +++ /dev/null @@ -1,1242 +0,0 @@ -// -// Copyright (C) 2013 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Do link-time merging and validation of intermediate representations. -// -// Basic model is that during compilation, each compilation unit (shader) is -// compiled into one TIntermediate instance. Then, at link time, multiple -// units for the same stage can be merged together, which can generate errors. -// Then, after all merging, a single instance of TIntermediate represents -// the whole stage. A final error check can be done on the resulting stage, -// even if no merging was done (i.e., the stage was only one compilation unit). -// - -#include "localintermediate.h" -#include "../Include/InfoSink.h" - -namespace glslang { - -// -// Link-time error emitter. -// -void TIntermediate::error(TInfoSink& infoSink, const char* message) -{ - infoSink.info.prefix(EPrefixError); - infoSink.info << "Linking " << StageName(language) << " stage: " << message << "\n"; - - ++numErrors; -} - -// Link-time warning. -void TIntermediate::warn(TInfoSink& infoSink, const char* message) -{ - infoSink.info.prefix(EPrefixWarning); - infoSink.info << "Linking " << StageName(language) << " stage: " << message << "\n"; -} - -// TODO: 4.4 offset/align: "Two blocks linked together in the same program with the same block -// name must have the exact same set of members qualified with offset and their integral-constant -// expression values must be the same, or a link-time error results." - -// -// Merge the information from 'unit' into 'this' -// -void TIntermediate::merge(TInfoSink& infoSink, TIntermediate& unit) -{ - if (source == EShSourceNone) - source = unit.source; - - if (source != unit.source) - error(infoSink, "can't link compilation units from different source languages"); - - if (unit.getNumEntryPoints() > 0) { - if (getNumEntryPoints() > 0) - error(infoSink, "can't handle multiple entry points per stage"); - else { - entryPointName = unit.getEntryPointName(); - entryPointMangledName = unit.getEntryPointMangledName(); - } - } - numEntryPoints += unit.getNumEntryPoints(); - numErrors += unit.getNumErrors(); - numPushConstants += unit.numPushConstants; - callGraph.insert(callGraph.end(), unit.callGraph.begin(), unit.callGraph.end()); - - if (originUpperLeft != unit.originUpperLeft || pixelCenterInteger != unit.pixelCenterInteger) - error(infoSink, "gl_FragCoord redeclarations must match across shaders"); - - if (! earlyFragmentTests) - earlyFragmentTests = unit.earlyFragmentTests; - - if (!postDepthCoverage) - postDepthCoverage = unit.postDepthCoverage; - - if (depthLayout == EldNone) - depthLayout = unit.depthLayout; - else if (depthLayout != unit.depthLayout) - error(infoSink, "Contradictory depth layouts"); - - blendEquations |= unit.blendEquations; - - if (inputPrimitive == ElgNone) - inputPrimitive = unit.inputPrimitive; - else if (inputPrimitive != unit.inputPrimitive) - error(infoSink, "Contradictory input layout primitives"); - - if (outputPrimitive == ElgNone) - outputPrimitive = unit.outputPrimitive; - else if (outputPrimitive != unit.outputPrimitive) - error(infoSink, "Contradictory output layout primitives"); - - if (vertices == TQualifier::layoutNotSet) - vertices = unit.vertices; - else if (vertices != unit.vertices) { - if (language == EShLangGeometry) - error(infoSink, "Contradictory layout max_vertices values"); - else if (language == EShLangTessControl) - error(infoSink, "Contradictory layout vertices values"); - else - assert(0); - } - - if (vertexSpacing == EvsNone) - vertexSpacing = unit.vertexSpacing; - else if (vertexSpacing != unit.vertexSpacing) - error(infoSink, "Contradictory input vertex spacing"); - - if (vertexOrder == EvoNone) - vertexOrder = unit.vertexOrder; - else if (vertexOrder != unit.vertexOrder) - error(infoSink, "Contradictory triangle ordering"); - - if (unit.pointMode) - pointMode = true; - - for (int i = 0; i < 3; ++i) { - if (localSize[i] > 1) - localSize[i] = unit.localSize[i]; - else if (localSize[i] != unit.localSize[i]) - error(infoSink, "Contradictory local size"); - - if (localSizeSpecId[i] != TQualifier::layoutNotSet) - localSizeSpecId[i] = unit.localSizeSpecId[i]; - else if (localSizeSpecId[i] != unit.localSizeSpecId[i]) - error(infoSink, "Contradictory local size specialization ids"); - } - - if (unit.xfbMode) - xfbMode = true; - for (size_t b = 0; b < xfbBuffers.size(); ++b) { - if (xfbBuffers[b].stride == TQualifier::layoutXfbStrideEnd) - xfbBuffers[b].stride = unit.xfbBuffers[b].stride; - else if (xfbBuffers[b].stride != unit.xfbBuffers[b].stride) - error(infoSink, "Contradictory xfb_stride"); - xfbBuffers[b].implicitStride = std::max(xfbBuffers[b].implicitStride, unit.xfbBuffers[b].implicitStride); - if (unit.xfbBuffers[b].containsDouble) - xfbBuffers[b].containsDouble = true; - // TODO: 4.4 link: enhanced layouts: compare ranges - } - - if (unit.treeRoot == 0) - return; - - if (treeRoot == 0) { - treeRoot = unit.treeRoot; - version = unit.version; - requestedExtensions = unit.requestedExtensions; - return; - } - - // Getting this far means we have two existing trees to merge... - - version = std::max(version, unit.version); - requestedExtensions.insert(unit.requestedExtensions.begin(), unit.requestedExtensions.end()); - - // Get the top-level globals of each unit - TIntermSequence& globals = treeRoot->getAsAggregate()->getSequence(); - TIntermSequence& unitGlobals = unit.treeRoot->getAsAggregate()->getSequence(); - - // Get the linker-object lists - TIntermSequence& linkerObjects = findLinkerObjects(); - TIntermSequence& unitLinkerObjects = unit.findLinkerObjects(); - - mergeBodies(infoSink, globals, unitGlobals); - mergeLinkerObjects(infoSink, linkerObjects, unitLinkerObjects); - - ioAccessed.insert(unit.ioAccessed.begin(), unit.ioAccessed.end()); -} - -// -// Merge the function bodies and global-level initializers from unitGlobals into globals. -// Will error check duplication of function bodies for the same signature. -// -void TIntermediate::mergeBodies(TInfoSink& infoSink, TIntermSequence& globals, const TIntermSequence& unitGlobals) -{ - // TODO: link-time performance: Processing in alphabetical order will be faster - - // Error check the global objects, not including the linker objects - for (unsigned int child = 0; child < globals.size() - 1; ++child) { - for (unsigned int unitChild = 0; unitChild < unitGlobals.size() - 1; ++unitChild) { - TIntermAggregate* body = globals[child]->getAsAggregate(); - TIntermAggregate* unitBody = unitGlobals[unitChild]->getAsAggregate(); - if (body && unitBody && body->getOp() == EOpFunction && unitBody->getOp() == EOpFunction && body->getName() == unitBody->getName()) { - error(infoSink, "Multiple function bodies in multiple compilation units for the same signature in the same stage:"); - infoSink.info << " " << globals[child]->getAsAggregate()->getName() << "\n"; - } - } - } - - // Merge the global objects, just in front of the linker objects - globals.insert(globals.end() - 1, unitGlobals.begin(), unitGlobals.end() - 1); -} - -// -// Merge the linker objects from unitLinkerObjects into linkerObjects. -// Duplication is expected and filtered out, but contradictions are an error. -// -void TIntermediate::mergeLinkerObjects(TInfoSink& infoSink, TIntermSequence& linkerObjects, const TIntermSequence& unitLinkerObjects) -{ - // Error check and merge the linker objects (duplicates should not be created) - std::size_t initialNumLinkerObjects = linkerObjects.size(); - for (unsigned int unitLinkObj = 0; unitLinkObj < unitLinkerObjects.size(); ++unitLinkObj) { - bool merge = true; - for (std::size_t linkObj = 0; linkObj < initialNumLinkerObjects; ++linkObj) { - TIntermSymbol* symbol = linkerObjects[linkObj]->getAsSymbolNode(); - TIntermSymbol* unitSymbol = unitLinkerObjects[unitLinkObj]->getAsSymbolNode(); - assert(symbol && unitSymbol); - if (symbol->getName() == unitSymbol->getName()) { - // filter out copy - merge = false; - - // but if one has an initializer and the other does not, update - // the initializer - if (symbol->getConstArray().empty() && ! unitSymbol->getConstArray().empty()) - symbol->setConstArray(unitSymbol->getConstArray()); - - // Similarly for binding - if (! symbol->getQualifier().hasBinding() && unitSymbol->getQualifier().hasBinding()) - symbol->getQualifier().layoutBinding = unitSymbol->getQualifier().layoutBinding; - - // Update implicit array sizes - mergeImplicitArraySizes(symbol->getWritableType(), unitSymbol->getType()); - - // Check for consistent types/qualification/initializers etc. - mergeErrorCheck(infoSink, *symbol, *unitSymbol, false); - } - } - if (merge) - linkerObjects.push_back(unitLinkerObjects[unitLinkObj]); - } -} - -// TODO 4.5 link functionality: cull distance array size checking - -// Recursively merge the implicit array sizes through the objects' respective type trees. -void TIntermediate::mergeImplicitArraySizes(TType& type, const TType& unitType) -{ - if (type.isImplicitlySizedArray() && unitType.isArray()) { - int newImplicitArraySize = unitType.isImplicitlySizedArray() ? unitType.getImplicitArraySize() : unitType.getOuterArraySize(); - if (newImplicitArraySize > type.getImplicitArraySize ()) - type.setImplicitArraySize(newImplicitArraySize); - } - - // Type mismatches are caught and reported after this, just be careful for now. - if (! type.isStruct() || ! unitType.isStruct() || type.getStruct()->size() != unitType.getStruct()->size()) - return; - - for (int i = 0; i < (int)type.getStruct()->size(); ++i) - mergeImplicitArraySizes(*(*type.getStruct())[i].type, *(*unitType.getStruct())[i].type); -} - -// -// Compare two global objects from two compilation units and see if they match -// well enough. Rules can be different for intra- vs. cross-stage matching. -// -// This function only does one of intra- or cross-stage matching per call. -// -void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& symbol, const TIntermSymbol& unitSymbol, bool crossStage) -{ - bool writeTypeComparison = false; - - // Types have to match - if (symbol.getType() != unitSymbol.getType()) { - error(infoSink, "Types must match:"); - writeTypeComparison = true; - } - - // Qualifiers have to (almost) match - - // Storage... - if (symbol.getQualifier().storage != unitSymbol.getQualifier().storage) { - error(infoSink, "Storage qualifiers must match:"); - writeTypeComparison = true; - } - - // Precision... - if (symbol.getQualifier().precision != unitSymbol.getQualifier().precision) { - error(infoSink, "Precision qualifiers must match:"); - writeTypeComparison = true; - } - - // Invariance... - if (! crossStage && symbol.getQualifier().invariant != unitSymbol.getQualifier().invariant) { - error(infoSink, "Presence of invariant qualifier must match:"); - writeTypeComparison = true; - } - - // Precise... - if (! crossStage && symbol.getQualifier().noContraction != unitSymbol.getQualifier().noContraction) { - error(infoSink, "Presence of precise qualifier must match:"); - writeTypeComparison = true; - } - - // Auxiliary and interpolation... - if (symbol.getQualifier().centroid != unitSymbol.getQualifier().centroid || - symbol.getQualifier().smooth != unitSymbol.getQualifier().smooth || - symbol.getQualifier().flat != unitSymbol.getQualifier().flat || - symbol.getQualifier().sample != unitSymbol.getQualifier().sample || - symbol.getQualifier().patch != unitSymbol.getQualifier().patch || - symbol.getQualifier().nopersp != unitSymbol.getQualifier().nopersp) { - error(infoSink, "Interpolation and auxiliary storage qualifiers must match:"); - writeTypeComparison = true; - } - - // Memory... - if (symbol.getQualifier().coherent != unitSymbol.getQualifier().coherent || - symbol.getQualifier().volatil != unitSymbol.getQualifier().volatil || - symbol.getQualifier().restrict != unitSymbol.getQualifier().restrict || - symbol.getQualifier().readonly != unitSymbol.getQualifier().readonly || - symbol.getQualifier().writeonly != unitSymbol.getQualifier().writeonly) { - error(infoSink, "Memory qualifiers must match:"); - writeTypeComparison = true; - } - - // Layouts... - // TODO: 4.4 enhanced layouts: Generalize to include offset/align: current spec - // requires separate user-supplied offset from actual computed offset, but - // current implementation only has one offset. - if (symbol.getQualifier().layoutMatrix != unitSymbol.getQualifier().layoutMatrix || - symbol.getQualifier().layoutPacking != unitSymbol.getQualifier().layoutPacking || - symbol.getQualifier().layoutLocation != unitSymbol.getQualifier().layoutLocation || - symbol.getQualifier().layoutComponent != unitSymbol.getQualifier().layoutComponent || - symbol.getQualifier().layoutIndex != unitSymbol.getQualifier().layoutIndex || - symbol.getQualifier().layoutBinding != unitSymbol.getQualifier().layoutBinding || - (symbol.getQualifier().hasBinding() && (symbol.getQualifier().layoutOffset != unitSymbol.getQualifier().layoutOffset))) { - error(infoSink, "Layout qualification must match:"); - writeTypeComparison = true; - } - - // Initializers have to match, if both are present, and if we don't already know the types don't match - if (! writeTypeComparison) { - if (! symbol.getConstArray().empty() && ! unitSymbol.getConstArray().empty()) { - if (symbol.getConstArray() != unitSymbol.getConstArray()) { - error(infoSink, "Initializers must match:"); - infoSink.info << " " << symbol.getName() << "\n"; - } - } - } - - if (writeTypeComparison) - infoSink.info << " " << symbol.getName() << ": \"" << symbol.getType().getCompleteString() << "\" versus \"" << - unitSymbol.getType().getCompleteString() << "\"\n"; -} - -// -// Do final link-time error checking of a complete (merged) intermediate representation. -// (Much error checking was done during merging). -// -// Also, lock in defaults of things not set, including array sizes. -// -void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled) -{ - if (getTreeRoot() == nullptr) - return; - - if (numEntryPoints < 1) { - if (source == EShSourceGlsl) - error(infoSink, "Missing entry point: Each stage requires one entry point"); - else - warn(infoSink, "Entry point not found"); - } - - if (numPushConstants > 1) - error(infoSink, "Only one push_constant block is allowed per stage"); - - // recursion and missing body checking - checkCallGraphCycles(infoSink); - checkCallGraphBodies(infoSink, keepUncalled); - - // overlap/alias/missing I/O, etc. - inOutLocationCheck(infoSink); - - // invocations - if (invocations == TQualifier::layoutNotSet) - invocations = 1; - - if (inIoAccessed("gl_ClipDistance") && inIoAccessed("gl_ClipVertex")) - error(infoSink, "Can only use one of gl_ClipDistance or gl_ClipVertex (gl_ClipDistance is preferred)"); - if (inIoAccessed("gl_CullDistance") && inIoAccessed("gl_ClipVertex")) - error(infoSink, "Can only use one of gl_CullDistance or gl_ClipVertex (gl_ClipDistance is preferred)"); - - if (userOutputUsed() && (inIoAccessed("gl_FragColor") || inIoAccessed("gl_FragData"))) - error(infoSink, "Cannot use gl_FragColor or gl_FragData when using user-defined outputs"); - if (inIoAccessed("gl_FragColor") && inIoAccessed("gl_FragData")) - error(infoSink, "Cannot use both gl_FragColor and gl_FragData"); - - for (size_t b = 0; b < xfbBuffers.size(); ++b) { - if (xfbBuffers[b].containsDouble) - RoundToPow2(xfbBuffers[b].implicitStride, 8); - - // "It is a compile-time or link-time error to have - // any xfb_offset that overflows xfb_stride, whether stated on declarations before or after the xfb_stride, or - // in different compilation units. While xfb_stride can be declared multiple times for the same buffer, it is a - // compile-time or link-time error to have different values specified for the stride for the same buffer." - if (xfbBuffers[b].stride != TQualifier::layoutXfbStrideEnd && xfbBuffers[b].implicitStride > xfbBuffers[b].stride) { - error(infoSink, "xfb_stride is too small to hold all buffer entries:"); - infoSink.info.prefix(EPrefixError); - infoSink.info << " xfb_buffer " << (unsigned int)b << ", xfb_stride " << xfbBuffers[b].stride << ", minimum stride needed: " << xfbBuffers[b].implicitStride << "\n"; - } - if (xfbBuffers[b].stride == TQualifier::layoutXfbStrideEnd) - xfbBuffers[b].stride = xfbBuffers[b].implicitStride; - - // "If the buffer is capturing any - // outputs with double-precision components, the stride must be a multiple of 8, otherwise it must be a - // multiple of 4, or a compile-time or link-time error results." - if (xfbBuffers[b].containsDouble && ! IsMultipleOfPow2(xfbBuffers[b].stride, 8)) { - error(infoSink, "xfb_stride must be multiple of 8 for buffer holding a double:"); - infoSink.info.prefix(EPrefixError); - infoSink.info << " xfb_buffer " << (unsigned int)b << ", xfb_stride " << xfbBuffers[b].stride << "\n"; - } else if (! IsMultipleOfPow2(xfbBuffers[b].stride, 4)) { - error(infoSink, "xfb_stride must be multiple of 4:"); - infoSink.info.prefix(EPrefixError); - infoSink.info << " xfb_buffer " << (unsigned int)b << ", xfb_stride " << xfbBuffers[b].stride << "\n"; - } - - // "The resulting stride (implicit or explicit), when divided by 4, must be less than or equal to the - // implementation-dependent constant gl_MaxTransformFeedbackInterleavedComponents." - if (xfbBuffers[b].stride > (unsigned int)(4 * resources.maxTransformFeedbackInterleavedComponents)) { - error(infoSink, "xfb_stride is too large:"); - infoSink.info.prefix(EPrefixError); - infoSink.info << " xfb_buffer " << (unsigned int)b << ", components (1/4 stride) needed are " << xfbBuffers[b].stride/4 << ", gl_MaxTransformFeedbackInterleavedComponents is " << resources.maxTransformFeedbackInterleavedComponents << "\n"; - } - } - - switch (language) { - case EShLangVertex: - break; - case EShLangTessControl: - if (vertices == TQualifier::layoutNotSet) - error(infoSink, "At least one shader must specify an output layout(vertices=...)"); - break; - case EShLangTessEvaluation: - if (source == EShSourceGlsl) { - if (inputPrimitive == ElgNone) - error(infoSink, "At least one shader must specify an input layout primitive"); - if (vertexSpacing == EvsNone) - vertexSpacing = EvsEqual; - if (vertexOrder == EvoNone) - vertexOrder = EvoCcw; - } - break; - case EShLangGeometry: - if (inputPrimitive == ElgNone) - error(infoSink, "At least one shader must specify an input layout primitive"); - if (outputPrimitive == ElgNone -#ifdef NV_EXTENSIONS - && !getGeoPassthroughEXT() -#endif - ) - error(infoSink, "At least one shader must specify an output layout primitive"); - if (vertices == TQualifier::layoutNotSet -#ifdef NV_EXTENSIONS - && !getGeoPassthroughEXT() -#endif - ) - error(infoSink, "At least one shader must specify a layout(max_vertices = value)"); - break; - case EShLangFragment: - // for GL_ARB_post_depth_coverage, EarlyFragmentTest is set automatically in - // ParseHelper.cpp. So if we reach here, this must be GL_EXT_post_depth_coverage - // requiring explicit early_fragment_tests - if (getPostDepthCoverage() && !getEarlyFragmentTests()) - error(infoSink, "post_depth_coverage requires early_fragment_tests"); - break; - case EShLangCompute: - break; - default: - error(infoSink, "Unknown Stage."); - break; - } - - // Process the tree for any node-specific work. - class TFinalLinkTraverser : public TIntermTraverser { - public: - TFinalLinkTraverser() { } - virtual ~TFinalLinkTraverser() { } - - virtual void visitSymbol(TIntermSymbol* symbol) - { - // Implicitly size arrays. - symbol->getWritableType().adoptImplicitArraySizes(); - } - } finalLinkTraverser; - - treeRoot->traverse(&finalLinkTraverser); -} - -// -// See if the call graph contains any static recursion, which is disallowed -// by the specification. -// -void TIntermediate::checkCallGraphCycles(TInfoSink& infoSink) -{ - // Clear fields we'll use for this. - for (TGraph::iterator call = callGraph.begin(); call != callGraph.end(); ++call) { - call->visited = false; - call->currentPath = false; - call->errorGiven = false; - } - - // - // Loop, looking for a new connected subgraph. One subgraph is handled per loop iteration. - // - - TCall* newRoot; - do { - // See if we have unvisited parts of the graph. - newRoot = 0; - for (TGraph::iterator call = callGraph.begin(); call != callGraph.end(); ++call) { - if (! call->visited) { - newRoot = &(*call); - break; - } - } - - // If not, we are done. - if (! newRoot) - break; - - // Otherwise, we found a new subgraph, process it: - // See what all can be reached by this new root, and if any of - // that is recursive. This is done by depth-first traversals, seeing - // if a new call is found that was already in the currentPath (a back edge), - // thereby detecting recursion. - std::list stack; - newRoot->currentPath = true; // currentPath will be true iff it is on the stack - stack.push_back(newRoot); - while (! stack.empty()) { - // get a caller - TCall* call = stack.back(); - - // Add to the stack just one callee. - // This algorithm always terminates, because only !visited and !currentPath causes a push - // and all pushes change currentPath to true, and all pops change visited to true. - TGraph::iterator child = callGraph.begin(); - for (; child != callGraph.end(); ++child) { - - // If we already visited this node, its whole subgraph has already been processed, so skip it. - if (child->visited) - continue; - - if (call->callee == child->caller) { - if (child->currentPath) { - // Then, we found a back edge - if (! child->errorGiven) { - error(infoSink, "Recursion detected:"); - infoSink.info << " " << call->callee << " calling " << child->callee << "\n"; - child->errorGiven = true; - recursive = true; - } - } else { - child->currentPath = true; - stack.push_back(&(*child)); - break; - } - } - } - if (child == callGraph.end()) { - // no more callees, we bottomed out, never look at this node again - stack.back()->currentPath = false; - stack.back()->visited = true; - stack.pop_back(); - } - } // end while, meaning nothing left to process in this subtree - - } while (newRoot); // redundant loop check; should always exit via the 'break' above -} - -// -// See which functions are reachable from the entry point and which have bodies. -// Reachable ones with missing bodies are errors. -// Unreachable bodies are dead code. -// -void TIntermediate::checkCallGraphBodies(TInfoSink& infoSink, bool keepUncalled) -{ - // Clear fields we'll use for this. - for (TGraph::iterator call = callGraph.begin(); call != callGraph.end(); ++call) { - call->visited = false; - call->calleeBodyPosition = -1; - } - - // The top level of the AST includes function definitions (bodies). - // Compare these to function calls in the call graph. - // We'll end up knowing which have bodies, and if so, - // how to map the call-graph node to the location in the AST. - TIntermSequence &functionSequence = getTreeRoot()->getAsAggregate()->getSequence(); - std::vector reachable(functionSequence.size(), true); // so that non-functions are reachable - for (int f = 0; f < (int)functionSequence.size(); ++f) { - glslang::TIntermAggregate* node = functionSequence[f]->getAsAggregate(); - if (node && (node->getOp() == glslang::EOpFunction)) { - if (node->getName().compare(getEntryPointMangledName().c_str()) != 0) - reachable[f] = false; // so that function bodies are unreachable, until proven otherwise - for (TGraph::iterator call = callGraph.begin(); call != callGraph.end(); ++call) { - if (call->callee == node->getName()) - call->calleeBodyPosition = f; - } - } - } - - // Start call-graph traversal by visiting the entry point nodes. - for (TGraph::iterator call = callGraph.begin(); call != callGraph.end(); ++call) { - if (call->caller.compare(getEntryPointMangledName().c_str()) == 0) - call->visited = true; - } - - // Propagate 'visited' through the call-graph to every part of the graph it - // can reach (seeded with the entry-point setting above). - bool changed; - do { - changed = false; - for (auto call1 = callGraph.begin(); call1 != callGraph.end(); ++call1) { - if (call1->visited) { - for (TGraph::iterator call2 = callGraph.begin(); call2 != callGraph.end(); ++call2) { - if (! call2->visited) { - if (call1->callee == call2->caller) { - changed = true; - call2->visited = true; - } - } - } - } - } - } while (changed); - - // Any call-graph node set to visited but without a callee body is an error. - for (TGraph::iterator call = callGraph.begin(); call != callGraph.end(); ++call) { - if (call->visited) { - if (call->calleeBodyPosition == -1) { - error(infoSink, "No function definition (body) found: "); - infoSink.info << " " << call->callee << "\n"; - } else - reachable[call->calleeBodyPosition] = true; - } - } - - // Bodies in the AST not reached by the call graph are dead; - // clear them out, since they can't be reached and also can't - // be translated further due to possibility of being ill defined. - if (! keepUncalled) { - for (int f = 0; f < (int)functionSequence.size(); ++f) { - if (! reachable[f]) - functionSequence[f] = nullptr; - } - functionSequence.erase(std::remove(functionSequence.begin(), functionSequence.end(), nullptr), functionSequence.end()); - } -} - -// -// Satisfy rules for location qualifiers on inputs and outputs -// -void TIntermediate::inOutLocationCheck(TInfoSink& infoSink) -{ - // ES 3.0 requires all outputs to have location qualifiers if there is more than one output - bool fragOutWithNoLocation = false; - int numFragOut = 0; - - // TODO: linker functionality: location collision checking - - TIntermSequence& linkObjects = findLinkerObjects(); - for (size_t i = 0; i < linkObjects.size(); ++i) { - const TType& type = linkObjects[i]->getAsTyped()->getType(); - const TQualifier& qualifier = type.getQualifier(); - if (language == EShLangFragment) { - if (qualifier.storage == EvqVaryingOut && qualifier.builtIn == EbvNone) { - ++numFragOut; - if (!qualifier.hasAnyLocation()) - fragOutWithNoLocation = true; - } - } - } - - if (profile == EEsProfile) { - if (numFragOut > 1 && fragOutWithNoLocation) - error(infoSink, "when more than one fragment shader output, all must have location qualifiers"); - } -} - -TIntermSequence& TIntermediate::findLinkerObjects() const -{ - // Get the top-level globals - TIntermSequence& globals = treeRoot->getAsAggregate()->getSequence(); - - // Get the last member of the sequences, expected to be the linker-object lists - assert(globals.back()->getAsAggregate()->getOp() == EOpLinkerObjects); - - return globals.back()->getAsAggregate()->getSequence(); -} - -// See if a variable was both a user-declared output and used. -// Note: the spec discusses writing to one, but this looks at read or write, which -// is more useful, and perhaps the spec should be changed to reflect that. -bool TIntermediate::userOutputUsed() const -{ - const TIntermSequence& linkerObjects = findLinkerObjects(); - - bool found = false; - for (size_t i = 0; i < linkerObjects.size(); ++i) { - const TIntermSymbol& symbolNode = *linkerObjects[i]->getAsSymbolNode(); - if (symbolNode.getQualifier().storage == EvqVaryingOut && - symbolNode.getName().compare(0, 3, "gl_") != 0 && - inIoAccessed(symbolNode.getName())) { - found = true; - break; - } - } - - return found; -} - -// Accumulate locations used for inputs, outputs, and uniforms, and check for collisions -// as the accumulation is done. -// -// Returns < 0 if no collision, >= 0 if collision and the value returned is a colliding value. -// -// typeCollision is set to true if there is no direct collision, but the types in the same location -// are different. -// -int TIntermediate::addUsedLocation(const TQualifier& qualifier, const TType& type, bool& typeCollision) -{ - typeCollision = false; - - int set; - if (qualifier.isPipeInput()) - set = 0; - else if (qualifier.isPipeOutput()) - set = 1; - else if (qualifier.storage == EvqUniform) - set = 2; - else if (qualifier.storage == EvqBuffer) - set = 3; - else - return -1; - - int size; - if (qualifier.isUniformOrBuffer()) { - if (type.isExplicitlySizedArray()) - size = type.getCumulativeArraySize(); - else - size = 1; - } else { - // Strip off the outer array dimension for those having an extra one. - if (type.isArray() && qualifier.isArrayedIo(language)) { - TType elementType(type, 0); - size = computeTypeLocationSize(elementType); - } else - size = computeTypeLocationSize(type); - } - - // Locations, and components within locations. - // - // Almost always, dealing with components means a single location is involved. - // The exception is a dvec3. From the spec: - // - // "A dvec3 will consume all four components of the first location and components 0 and 1 of - // the second location. This leaves components 2 and 3 available for other component-qualified - // declarations." - // - // That means, without ever mentioning a component, a component range - // for a different location gets specified, if it's not a vertex shader input. (!) - // (A vertex shader input will show using only one location, even for a dvec3/4.) - // - // So, for the case of dvec3, we need two independent ioRanges. - - int collision = -1; // no collision - if (size == 2 && type.getBasicType() == EbtDouble && type.getVectorSize() == 3 && - (qualifier.isPipeInput() || qualifier.isPipeOutput())) { - // Dealing with dvec3 in/out split across two locations. - // Need two io-ranges. - // The case where the dvec3 doesn't start at component 0 was previously caught as overflow. - - // First range: - TRange locationRange(qualifier.layoutLocation, qualifier.layoutLocation); - TRange componentRange(0, 3); - TIoRange range(locationRange, componentRange, type.getBasicType(), 0); - - // check for collisions - collision = checkLocationRange(set, range, type, typeCollision); - if (collision < 0) { - usedIo[set].push_back(range); - - // Second range: - TRange locationRange2(qualifier.layoutLocation + 1, qualifier.layoutLocation + 1); - TRange componentRange2(0, 1); - TIoRange range2(locationRange2, componentRange2, type.getBasicType(), 0); - - // check for collisions - collision = checkLocationRange(set, range2, type, typeCollision); - if (collision < 0) - usedIo[set].push_back(range2); - } - } else { - // Not a dvec3 in/out split across two locations, generic path. - // Need a single IO-range block. - - TRange locationRange(qualifier.layoutLocation, qualifier.layoutLocation + size - 1); - TRange componentRange(0, 3); - if (qualifier.hasComponent() || type.getVectorSize() > 0) { - int consumedComponents = type.getVectorSize() * (type.getBasicType() == EbtDouble ? 2 : 1); - if (qualifier.hasComponent()) - componentRange.start = qualifier.layoutComponent; - componentRange.last = componentRange.start + consumedComponents - 1; - } - - // combine location and component ranges - TIoRange range(locationRange, componentRange, type.getBasicType(), qualifier.hasIndex() ? qualifier.layoutIndex : 0); - - // check for collisions, except for vertex inputs on desktop - if (! (profile != EEsProfile && language == EShLangVertex && qualifier.isPipeInput())) - collision = checkLocationRange(set, range, type, typeCollision); - - if (collision < 0) - usedIo[set].push_back(range); - } - - return collision; -} - -// Compare a new (the passed in) 'range' against the existing set, and see -// if there are any collisions. -// -// Returns < 0 if no collision, >= 0 if collision and the value returned is a colliding value. -// -int TIntermediate::checkLocationRange(int set, const TIoRange& range, const TType& type, bool& typeCollision) -{ - for (size_t r = 0; r < usedIo[set].size(); ++r) { - if (range.overlap(usedIo[set][r])) { - // there is a collision; pick one - return std::max(range.location.start, usedIo[set][r].location.start); - } else if (range.location.overlap(usedIo[set][r].location) && type.getBasicType() != usedIo[set][r].basicType) { - // aliased-type mismatch - typeCollision = true; - return std::max(range.location.start, usedIo[set][r].location.start); - } - } - - return -1; // no collision -} - -// Accumulate bindings and offsets, and check for collisions -// as the accumulation is done. -// -// Returns < 0 if no collision, >= 0 if collision and the value returned is a colliding value. -// -int TIntermediate::addUsedOffsets(int binding, int offset, int numOffsets) -{ - TRange bindingRange(binding, binding); - TRange offsetRange(offset, offset + numOffsets - 1); - TOffsetRange range(bindingRange, offsetRange); - - // check for collisions, except for vertex inputs on desktop - for (size_t r = 0; r < usedAtomics.size(); ++r) { - if (range.overlap(usedAtomics[r])) { - // there is a collision; pick one - return std::max(offset, usedAtomics[r].offset.start); - } - } - - usedAtomics.push_back(range); - - return -1; // no collision -} - -// Accumulate used constant_id values. -// -// Return false is one was already used. -bool TIntermediate::addUsedConstantId(int id) -{ - if (usedConstantId.find(id) != usedConstantId.end()) - return false; - - usedConstantId.insert(id); - - return true; -} - -// Recursively figure out how many locations are used up by an input or output type. -// Return the size of type, as measured by "locations". -int TIntermediate::computeTypeLocationSize(const TType& type) const -{ - // "If the declared input is an array of size n and each element takes m locations, it will be assigned m * n - // consecutive locations..." - if (type.isArray()) { - // TODO: perf: this can be flattened by using getCumulativeArraySize(), and a deref that discards all arrayness - TType elementType(type, 0); - if (type.isImplicitlySizedArray()) { - // TODO: are there valid cases of having an implicitly-sized array with a location? If so, running this code too early. - return computeTypeLocationSize(elementType); - } else - return type.getOuterArraySize() * computeTypeLocationSize(elementType); - } - - // "The locations consumed by block and structure members are determined by applying the rules above - // recursively..." - if (type.isStruct()) { - int size = 0; - for (int member = 0; member < (int)type.getStruct()->size(); ++member) { - TType memberType(type, member); - size += computeTypeLocationSize(memberType); - } - return size; - } - - // ES: "If a shader input is any scalar or vector type, it will consume a single location." - - // Desktop: "If a vertex shader input is any scalar or vector type, it will consume a single location. If a non-vertex - // shader input is a scalar or vector type other than dvec3 or dvec4, it will consume a single location, while - // types dvec3 or dvec4 will consume two consecutive locations. Inputs of type double and dvec2 will - // consume only a single location, in all stages." - if (type.isScalar()) - return 1; - if (type.isVector()) { - if (language == EShLangVertex && type.getQualifier().isPipeInput()) - return 1; - if (type.getBasicType() == EbtDouble && type.getVectorSize() > 2) - return 2; - else - return 1; - } - - // "If the declared input is an n x m single- or double-precision matrix, ... - // The number of locations assigned for each matrix will be the same as - // for an n-element array of m-component vectors..." - if (type.isMatrix()) { - TType columnType(type, 0); - return type.getMatrixCols() * computeTypeLocationSize(columnType); - } - - assert(0); - return 1; -} - -// Accumulate xfb buffer ranges and check for collisions as the accumulation is done. -// -// Returns < 0 if no collision, >= 0 if collision and the value returned is a colliding value. -// -int TIntermediate::addXfbBufferOffset(const TType& type) -{ - const TQualifier& qualifier = type.getQualifier(); - - assert(qualifier.hasXfbOffset() && qualifier.hasXfbBuffer()); - TXfbBuffer& buffer = xfbBuffers[qualifier.layoutXfbBuffer]; - - // compute the range - unsigned int size = computeTypeXfbSize(type, buffer.containsDouble); - buffer.implicitStride = std::max(buffer.implicitStride, qualifier.layoutXfbOffset + size); - TRange range(qualifier.layoutXfbOffset, qualifier.layoutXfbOffset + size - 1); - - // check for collisions - for (size_t r = 0; r < buffer.ranges.size(); ++r) { - if (range.overlap(buffer.ranges[r])) { - // there is a collision; pick an example to return - return std::max(range.start, buffer.ranges[r].start); - } - } - - buffer.ranges.push_back(range); - - return -1; // no collision -} - -// Recursively figure out how many bytes of xfb buffer are used by the given type. -// Return the size of type, in bytes. -// Sets containsDouble to true if the type contains a double. -// N.B. Caller must set containsDouble to false before calling. -unsigned int TIntermediate::computeTypeXfbSize(const TType& type, bool& containsDouble) const -{ - // "...if applied to an aggregate containing a double, the offset must also be a multiple of 8, - // and the space taken in the buffer will be a multiple of 8. - // ...within the qualified entity, subsequent components are each - // assigned, in order, to the next available offset aligned to a multiple of - // that component's size. Aggregate types are flattened down to the component - // level to get this sequence of components." - - if (type.isArray()) { - // TODO: perf: this can be flattened by using getCumulativeArraySize(), and a deref that discards all arrayness - assert(type.isExplicitlySizedArray()); - TType elementType(type, 0); - return type.getOuterArraySize() * computeTypeXfbSize(elementType, containsDouble); - } - - if (type.isStruct()) { - unsigned int size = 0; - bool structContainsDouble = false; - for (int member = 0; member < (int)type.getStruct()->size(); ++member) { - TType memberType(type, member); - // "... if applied to - // an aggregate containing a double, the offset must also be a multiple of 8, - // and the space taken in the buffer will be a multiple of 8." - bool memberContainsDouble = false; - int memberSize = computeTypeXfbSize(memberType, memberContainsDouble); - if (memberContainsDouble) { - structContainsDouble = true; - RoundToPow2(size, 8); - } - size += memberSize; - } - - if (structContainsDouble) { - containsDouble = true; - RoundToPow2(size, 8); - } - return size; - } - - int numComponents; - if (type.isScalar()) - numComponents = 1; - else if (type.isVector()) - numComponents = type.getVectorSize(); - else if (type.isMatrix()) - numComponents = type.getMatrixCols() * type.getMatrixRows(); - else { - assert(0); - numComponents = 1; - } - - if (type.getBasicType() == EbtDouble) { - containsDouble = true; - return 8 * numComponents; - } else - return 4 * numComponents; -} - -const int baseAlignmentVec4Std140 = 16; - -// Return the size and alignment of a component of the given type. -// The size is returned in the 'size' parameter -// Return value is the alignment.. -int TIntermediate::getBaseAlignmentScalar(const TType& type, int& size) -{ - switch (type.getBasicType()) { - case EbtInt64: - case EbtUint64: - case EbtDouble: size = 8; return 8; -#ifdef AMD_EXTENSIONS - case EbtInt16: - case EbtUint16: - case EbtFloat16: size = 2; return 2; -#endif - default: size = 4; return 4; - } -} - -// Implement base-alignment and size rules from section 7.6.2.2 Standard Uniform Block Layout -// Operates recursively. -// -// If std140 is true, it does the rounding up to vec4 size required by std140, -// otherwise it does not, yielding std430 rules. -// -// The size is returned in the 'size' parameter -// -// The stride is only non-0 for arrays or matrices, and is the stride of the -// top-level object nested within the type. E.g., for an array of matrices, -// it is the distances needed between matrices, despite the rules saying the -// stride comes from the flattening down to vectors. -// -// Return value is the alignment of the type. -int TIntermediate::getBaseAlignment(const TType& type, int& size, int& stride, bool std140, bool rowMajor) -{ - int alignment; - - // When using the std140 storage layout, structures will be laid out in buffer - // storage with its members stored in monotonically increasing order based on their - // location in the declaration. A structure and each structure member have a base - // offset and a base alignment, from which an aligned offset is computed by rounding - // the base offset up to a multiple of the base alignment. The base offset of the first - // member of a structure is taken from the aligned offset of the structure itself. The - // base offset of all other structure members is derived by taking the offset of the - // last basic machine unit consumed by the previous member and adding one. Each - // structure member is stored in memory at its aligned offset. The members of a top- - // level uniform block are laid out in buffer storage by treating the uniform block as - // a structure with a base offset of zero. - // - // 1. If the member is a scalar consuming N basic machine units, the base alignment is N. - // - // 2. If the member is a two- or four-component vector with components consuming N basic - // machine units, the base alignment is 2N or 4N, respectively. - // - // 3. If the member is a three-component vector with components consuming N - // basic machine units, the base alignment is 4N. - // - // 4. If the member is an array of scalars or vectors, the base alignment and array - // stride are set to match the base alignment of a single array element, according - // to rules (1), (2), and (3), and rounded up to the base alignment of a vec4. The - // array may have padding at the end; the base offset of the member following - // the array is rounded up to the next multiple of the base alignment. - // - // 5. If the member is a column-major matrix with C columns and R rows, the - // matrix is stored identically to an array of C column vectors with R - // components each, according to rule (4). - // - // 6. If the member is an array of S column-major matrices with C columns and - // R rows, the matrix is stored identically to a row of S X C column vectors - // with R components each, according to rule (4). - // - // 7. If the member is a row-major matrix with C columns and R rows, the matrix - // is stored identically to an array of R row vectors with C components each, - // according to rule (4). - // - // 8. If the member is an array of S row-major matrices with C columns and R - // rows, the matrix is stored identically to a row of S X R row vectors with C - // components each, according to rule (4). - // - // 9. If the member is a structure, the base alignment of the structure is N , where - // N is the largest base alignment value of any of its members, and rounded - // up to the base alignment of a vec4. The individual members of this substructure - // are then assigned offsets by applying this set of rules recursively, - // where the base offset of the first member of the sub-structure is equal to the - // aligned offset of the structure. The structure may have padding at the end; - // the base offset of the member following the sub-structure is rounded up to - // the next multiple of the base alignment of the structure. - // - // 10. If the member is an array of S structures, the S elements of the array are laid - // out in order, according to rule (9). - // - // Assuming, for rule 10: The stride is the same as the size of an element. - - stride = 0; - int dummyStride; - - // rules 4, 6, 8, and 10 - if (type.isArray()) { - // TODO: perf: this might be flattened by using getCumulativeArraySize(), and a deref that discards all arrayness - TType derefType(type, 0); - alignment = getBaseAlignment(derefType, size, dummyStride, std140, rowMajor); - if (std140) - alignment = std::max(baseAlignmentVec4Std140, alignment); - RoundToPow2(size, alignment); - stride = size; // uses full matrix size for stride of an array of matrices (not quite what rule 6/8, but what's expected) - // uses the assumption for rule 10 in the comment above - size = stride * type.getOuterArraySize(); - return alignment; - } - - // rule 9 - if (type.getBasicType() == EbtStruct) { - const TTypeList& memberList = *type.getStruct(); - - size = 0; - int maxAlignment = std140 ? baseAlignmentVec4Std140 : 0; - for (size_t m = 0; m < memberList.size(); ++m) { - int memberSize; - // modify just the children's view of matrix layout, if there is one for this member - TLayoutMatrix subMatrixLayout = memberList[m].type->getQualifier().layoutMatrix; - int memberAlignment = getBaseAlignment(*memberList[m].type, memberSize, dummyStride, std140, - (subMatrixLayout != ElmNone) ? (subMatrixLayout == ElmRowMajor) : rowMajor); - maxAlignment = std::max(maxAlignment, memberAlignment); - RoundToPow2(size, memberAlignment); - size += memberSize; - } - - // The structure may have padding at the end; the base offset of - // the member following the sub-structure is rounded up to the next - // multiple of the base alignment of the structure. - RoundToPow2(size, maxAlignment); - - return maxAlignment; - } - - // rule 1 - if (type.isScalar()) - return getBaseAlignmentScalar(type, size); - - // rules 2 and 3 - if (type.isVector()) { - int scalarAlign = getBaseAlignmentScalar(type, size); - switch (type.getVectorSize()) { - case 2: - size *= 2; - return 2 * scalarAlign; - default: - size *= type.getVectorSize(); - return 4 * scalarAlign; - } - } - - // rules 5 and 7 - if (type.isMatrix()) { - // rule 5: deref to row, not to column, meaning the size of vector is num columns instead of num rows - TType derefType(type, 0, rowMajor); - - alignment = getBaseAlignment(derefType, size, dummyStride, std140, rowMajor); - if (std140) - alignment = std::max(baseAlignmentVec4Std140, alignment); - RoundToPow2(size, alignment); - stride = size; // use intra-matrix stride for stride of a just a matrix - if (rowMajor) - size = stride * type.getMatrixRows(); - else - size = stride * type.getMatrixCols(); - - return alignment; - } - - assert(0); // all cases should be covered above - size = baseAlignmentVec4Std140; - return baseAlignmentVec4Std140; -} - -// To aid the basic HLSL rule about crossing vec4 boundaries. -bool TIntermediate::improperStraddle(const TType& type, int size, int offset) -{ - if (! type.isVector() || type.isArray()) - return false; - - return size <= 16 ? offset / 16 != (offset + size - 1) / 16 - : offset % 16 != 0; -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/localintermediate.h b/deps/glslang/glslang/MachineIndependent/localintermediate.h deleted file mode 100644 index 9f6e3da2..00000000 --- a/deps/glslang/glslang/MachineIndependent/localintermediate.h +++ /dev/null @@ -1,717 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2016 LunarG, Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _LOCAL_INTERMEDIATE_INCLUDED_ -#define _LOCAL_INTERMEDIATE_INCLUDED_ - -#include "../Include/intermediate.h" -#include "../Public/ShaderLang.h" -#include "Versions.h" - -#include -#include - -class TInfoSink; - -namespace glslang { - -struct TMatrixSelector { - int coord1; // stay agnostic about column/row; this is parse order - int coord2; -}; - -typedef int TVectorSelector; - -const int MaxSwizzleSelectors = 4; - -template -class TSwizzleSelectors { -public: - TSwizzleSelectors() : size_(0) { } - - void push_back(selectorType comp) - { - if (size_ < MaxSwizzleSelectors) - components[size_++] = comp; - } - void resize(int s) - { - assert(s <= size_); - size_ = s; - } - int size() const { return size_; } - selectorType operator[](int i) const - { - assert(i < MaxSwizzleSelectors); - return components[i]; - } - -private: - int size_; - selectorType components[MaxSwizzleSelectors]; -}; - -// -// Some helper structures for TIntermediate. Their contents are encapsulated -// by TIntermediate. -// - -// Used for call-graph algorithms for detecting recursion, missing bodies, and dead bodies. -// A "call" is a pair: . -// There can be duplicates. General assumption is the list is small. -struct TCall { - TCall(const TString& pCaller, const TString& pCallee) : caller(pCaller), callee(pCallee) { } - TString caller; - TString callee; - bool visited; - bool currentPath; - bool errorGiven; - int calleeBodyPosition; -}; - -// A generic 1-D range. -struct TRange { - TRange(int start, int last) : start(start), last(last) { } - bool overlap(const TRange& rhs) const - { - return last >= rhs.start && start <= rhs.last; - } - int start; - int last; -}; - -// An IO range is a 3-D rectangle; the set of (location, component, index) triples all lying -// within the same location range, component range, and index value. Locations don't alias unless -// all other dimensions of their range overlap. -struct TIoRange { - TIoRange(TRange location, TRange component, TBasicType basicType, int index) - : location(location), component(component), basicType(basicType), index(index) { } - bool overlap(const TIoRange& rhs) const - { - return location.overlap(rhs.location) && component.overlap(rhs.component) && index == rhs.index; - } - TRange location; - TRange component; - TBasicType basicType; - int index; -}; - -// An offset range is a 2-D rectangle; the set of (binding, offset) pairs all lying -// within the same binding and offset range. -struct TOffsetRange { - TOffsetRange(TRange binding, TRange offset) - : binding(binding), offset(offset) { } - bool overlap(const TOffsetRange& rhs) const - { - return binding.overlap(rhs.binding) && offset.overlap(rhs.offset); - } - TRange binding; - TRange offset; -}; - -// Things that need to be tracked per xfb buffer. -struct TXfbBuffer { - TXfbBuffer() : stride(TQualifier::layoutXfbStrideEnd), implicitStride(0), containsDouble(false) { } - std::vector ranges; // byte offsets that have already been assigned - unsigned int stride; - unsigned int implicitStride; - bool containsDouble; -}; - -// Track a set of strings describing how the module was processed. -// Using the form: -// process arg0 arg1 arg2 ... -// process arg0 arg1 arg2 ... -// where everything is textual, and there can be zero or more arguments -class TProcesses { -public: - TProcesses() {} - ~TProcesses() {} - - void addProcess(const char* process) - { - processes.push_back(process); - } - void addProcess(const std::string& process) - { - processes.push_back(process); - } - void addArgument(int arg) - { - processes.back().append(" "); - std::string argString = std::to_string(arg); - processes.back().append(argString); - } - void addArgument(const char* arg) - { - processes.back().append(" "); - processes.back().append(arg); - } - void addArgument(const std::string& arg) - { - processes.back().append(" "); - processes.back().append(arg); - } - void addIfNonZero(const char* process, int value) - { - if (value != 0) { - addProcess(process); - addArgument(value); - } - } - - const std::vector& getProcesses() const { return processes; } - -private: - std::vector processes; -}; - -class TSymbolTable; -class TSymbol; -class TVariable; - -// -// Set of helper functions to help parse and build the tree. -// -class TIntermediate { -public: - explicit TIntermediate(EShLanguage l, int v = 0, EProfile p = ENoProfile) : - implicitThisName("@this"), - language(l), source(EShSourceNone), profile(p), version(v), treeRoot(0), - numEntryPoints(0), numErrors(0), numPushConstants(0), recursive(false), - invocations(TQualifier::layoutNotSet), vertices(TQualifier::layoutNotSet), - inputPrimitive(ElgNone), outputPrimitive(ElgNone), - pixelCenterInteger(false), originUpperLeft(false), - vertexSpacing(EvsNone), vertexOrder(EvoNone), pointMode(false), earlyFragmentTests(false), - postDepthCoverage(false), depthLayout(EldNone), depthReplacing(false), - blendEquations(0), xfbMode(false), multiStream(false), -#ifdef NV_EXTENSIONS - layoutOverrideCoverage(false), - geoPassthroughEXT(false), -#endif - shiftSamplerBinding(0), - shiftTextureBinding(0), - shiftImageBinding(0), - shiftUboBinding(0), - shiftSsboBinding(0), - shiftUavBinding(0), - autoMapBindings(false), - autoMapLocations(false), - flattenUniformArrays(false), - useUnknownFormat(false), - hlslOffsets(false), - useStorageBuffer(false), - hlslIoMapping(false), - textureSamplerTransformMode(EShTexSampTransKeep) - { - localSize[0] = 1; - localSize[1] = 1; - localSize[2] = 1; - localSizeSpecId[0] = TQualifier::layoutNotSet; - localSizeSpecId[1] = TQualifier::layoutNotSet; - localSizeSpecId[2] = TQualifier::layoutNotSet; - xfbBuffers.resize(TQualifier::layoutXfbBufferEnd); - } - void setLimits(const TBuiltInResource& r) { resources = r; } - - bool postProcess(TIntermNode*, EShLanguage); - void output(TInfoSink&, bool tree); - void removeTree(); - - void setSource(EShSource s) { source = s; } - EShSource getSource() const { return source; } - void setEntryPointName(const char* ep) - { - entryPointName = ep; - processes.addProcess("entry-point"); - processes.addArgument(entryPointName); - } - void setEntryPointMangledName(const char* ep) { entryPointMangledName = ep; } - const std::string& getEntryPointName() const { return entryPointName; } - const std::string& getEntryPointMangledName() const { return entryPointMangledName; } - - void setShiftSamplerBinding(unsigned int shift) - { - shiftSamplerBinding = shift; - processes.addIfNonZero("shift-sampler-binding", shift); - } - unsigned int getShiftSamplerBinding() const { return shiftSamplerBinding; } - void setShiftTextureBinding(unsigned int shift) - { - shiftTextureBinding = shift; - processes.addIfNonZero("shift-texture-binding", shift); - } - unsigned int getShiftTextureBinding() const { return shiftTextureBinding; } - void setShiftImageBinding(unsigned int shift) - { - shiftImageBinding = shift; - processes.addIfNonZero("shift-image-binding", shift); - } - unsigned int getShiftImageBinding() const { return shiftImageBinding; } - void setShiftUboBinding(unsigned int shift) - { - shiftUboBinding = shift; - processes.addIfNonZero("shift-UBO-binding", shift); - } - unsigned int getShiftUboBinding() const { return shiftUboBinding; } - void setShiftSsboBinding(unsigned int shift) - { - shiftSsboBinding = shift; - processes.addIfNonZero("shift-ssbo-binding", shift); - } - unsigned int getShiftSsboBinding() const { return shiftSsboBinding; } - void setShiftUavBinding(unsigned int shift) - { - shiftUavBinding = shift; - processes.addIfNonZero("shift-uav-binding", shift); - } - unsigned int getShiftUavBinding() const { return shiftUavBinding; } - void setResourceSetBinding(const std::vector& shift) - { - resourceSetBinding = shift; - if (shift.size() > 0) { - processes.addProcess("resource-set-binding"); - for (int s = 0; s < (int)shift.size(); ++s) - processes.addArgument(shift[s]); - } - } - const std::vector& getResourceSetBinding() const { return resourceSetBinding; } - void setAutoMapBindings(bool map) - { - autoMapBindings = map; - if (autoMapBindings) - processes.addProcess("auto-map-bindings"); - } - bool getAutoMapBindings() const { return autoMapBindings; } - void setAutoMapLocations(bool map) - { - autoMapLocations = map; - if (autoMapLocations) - processes.addProcess("auto-map-locations"); - } - bool getAutoMapLocations() const { return autoMapLocations; } - void setFlattenUniformArrays(bool flatten) - { - flattenUniformArrays = flatten; - if (flattenUniformArrays) - processes.addProcess("flatten-uniform-arrays"); - } - bool getFlattenUniformArrays() const { return flattenUniformArrays; } - void setNoStorageFormat(bool b) - { - useUnknownFormat = b; - if (useUnknownFormat) - processes.addProcess("no-storage-format"); - } - bool getNoStorageFormat() const { return useUnknownFormat; } - void setHlslOffsets() - { - hlslOffsets = true; - if (hlslOffsets) - processes.addProcess("hlsl-offsets"); - } - bool usingHlslOFfsets() const { return hlslOffsets; } - void setUseStorageBuffer() - { - useStorageBuffer = true; - processes.addProcess("use-storage-buffer"); - } - bool usingStorageBuffer() const { return useStorageBuffer; } - void setHlslIoMapping(bool b) - { - hlslIoMapping = b; - if (hlslIoMapping) - processes.addProcess("hlsl-iomap"); - } - bool usingHlslIoMapping() { return hlslIoMapping; } - - void setTextureSamplerTransformMode(EShTextureSamplerTransformMode mode) { textureSamplerTransformMode = mode; } - - void setVersion(int v) { version = v; } - int getVersion() const { return version; } - void setProfile(EProfile p) { profile = p; } - EProfile getProfile() const { return profile; } - void setSpv(const SpvVersion& s) - { - spvVersion = s; - - // client processes - if (spvVersion.vulkan > 0) - processes.addProcess("client vulkan100"); - if (spvVersion.openGl > 0) - processes.addProcess("client opengl100"); - - // target-environment processes - if (spvVersion.vulkan == 100) - processes.addProcess("target-env vulkan1.0"); - else if (spvVersion.vulkan > 0) - processes.addProcess("target-env vulkanUnknown"); - if (spvVersion.openGl > 0) - processes.addProcess("target-env opengl"); - } - const SpvVersion& getSpv() const { return spvVersion; } - EShLanguage getStage() const { return language; } - void addRequestedExtension(const char* extension) { requestedExtensions.insert(extension); } - const std::set& getRequestedExtensions() const { return requestedExtensions; } - - void setTreeRoot(TIntermNode* r) { treeRoot = r; } - TIntermNode* getTreeRoot() const { return treeRoot; } - void incrementEntryPointCount() { ++numEntryPoints; } - int getNumEntryPoints() const { return numEntryPoints; } - int getNumErrors() const { return numErrors; } - void addPushConstantCount() { ++numPushConstants; } - bool isRecursive() const { return recursive; } - - TIntermSymbol* addSymbol(const TVariable&); - TIntermSymbol* addSymbol(const TVariable&, const TSourceLoc&); - TIntermSymbol* addSymbol(const TType&, const TSourceLoc&); - TIntermSymbol* addSymbol(const TIntermSymbol&); - TIntermTyped* addConversion(TOperator, const TType&, TIntermTyped*) const; - TIntermTyped* addUniShapeConversion(TOperator, const TType&, TIntermTyped*); - void addBiShapeConversion(TOperator, TIntermTyped*& lhsNode, TIntermTyped*& rhsNode); - TIntermTyped* addShapeConversion(const TType&, TIntermTyped*); - TIntermTyped* addBinaryMath(TOperator, TIntermTyped* left, TIntermTyped* right, TSourceLoc); - TIntermTyped* addAssign(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc); - TIntermTyped* addIndex(TOperator op, TIntermTyped* base, TIntermTyped* index, TSourceLoc); - TIntermTyped* addUnaryMath(TOperator, TIntermTyped* child, TSourceLoc); - TIntermTyped* addBuiltInFunctionCall(const TSourceLoc& line, TOperator, bool unary, TIntermNode*, const TType& returnType); - bool canImplicitlyPromote(TBasicType from, TBasicType to, TOperator op = EOpNull) const; - TOperator mapTypeToConstructorOp(const TType&) const; - TIntermAggregate* growAggregate(TIntermNode* left, TIntermNode* right); - TIntermAggregate* growAggregate(TIntermNode* left, TIntermNode* right, const TSourceLoc&); - TIntermAggregate* makeAggregate(TIntermNode* node); - TIntermAggregate* makeAggregate(TIntermNode* node, const TSourceLoc&); - TIntermAggregate* makeAggregate(const TSourceLoc&); - TIntermTyped* setAggregateOperator(TIntermNode*, TOperator, const TType& type, TSourceLoc); - bool areAllChildConst(TIntermAggregate* aggrNode); - TIntermTyped* addSelection(TIntermTyped* cond, TIntermNodePair code, const TSourceLoc&, TSelectionControl = ESelectionControlNone); - TIntermTyped* addSelection(TIntermTyped* cond, TIntermTyped* trueBlock, TIntermTyped* falseBlock, const TSourceLoc&, TSelectionControl = ESelectionControlNone); - TIntermTyped* addComma(TIntermTyped* left, TIntermTyped* right, const TSourceLoc&); - TIntermTyped* addMethod(TIntermTyped*, const TType&, const TString*, const TSourceLoc&); - TIntermConstantUnion* addConstantUnion(const TConstUnionArray&, const TType&, const TSourceLoc&, bool literal = false) const; - TIntermConstantUnion* addConstantUnion(int, const TSourceLoc&, bool literal = false) const; - TIntermConstantUnion* addConstantUnion(unsigned int, const TSourceLoc&, bool literal = false) const; - TIntermConstantUnion* addConstantUnion(long long, const TSourceLoc&, bool literal = false) const; - TIntermConstantUnion* addConstantUnion(unsigned long long, const TSourceLoc&, bool literal = false) const; -#ifdef AMD_EXTENSIONS - TIntermConstantUnion* addConstantUnion(short, const TSourceLoc&, bool literal = false) const; - TIntermConstantUnion* addConstantUnion(unsigned short, const TSourceLoc&, bool literal = false) const; - -#endif - TIntermConstantUnion* addConstantUnion(bool, const TSourceLoc&, bool literal = false) const; - TIntermConstantUnion* addConstantUnion(double, TBasicType, const TSourceLoc&, bool literal = false) const; - TIntermConstantUnion* addConstantUnion(const TString*, const TSourceLoc&, bool literal = false) const; - TIntermTyped* promoteConstantUnion(TBasicType, TIntermConstantUnion*) const; - bool parseConstTree(TIntermNode*, TConstUnionArray, TOperator, const TType&, bool singleConstantParam = false); - TIntermLoop* addLoop(TIntermNode*, TIntermTyped*, TIntermTyped*, bool testFirst, const TSourceLoc&, TLoopControl = ELoopControlNone); - TIntermAggregate* addForLoop(TIntermNode*, TIntermNode*, TIntermTyped*, TIntermTyped*, bool testFirst, const TSourceLoc&, TLoopControl = ELoopControlNone); - TIntermBranch* addBranch(TOperator, const TSourceLoc&); - TIntermBranch* addBranch(TOperator, TIntermTyped*, const TSourceLoc&); - template TIntermTyped* addSwizzle(TSwizzleSelectors&, const TSourceLoc&); - - // Low level functions to add nodes (no conversions or other higher level transformations) - // If a type is provided, the node's type will be set to it. - TIntermBinary* addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc) const; - TIntermBinary* addBinaryNode(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc, const TType&) const; - TIntermUnary* addUnaryNode(TOperator op, TIntermTyped* child, TSourceLoc) const; - TIntermUnary* addUnaryNode(TOperator op, TIntermTyped* child, TSourceLoc, const TType&) const; - - // Add conversion from node's type to given basic type. - TIntermTyped* convertToBasicType(TOperator op, TBasicType basicType, TIntermTyped* node) const; - - // Constant folding (in Constant.cpp) - TIntermTyped* fold(TIntermAggregate* aggrNode); - TIntermTyped* foldConstructor(TIntermAggregate* aggrNode); - TIntermTyped* foldDereference(TIntermTyped* node, int index, const TSourceLoc&); - TIntermTyped* foldSwizzle(TIntermTyped* node, TSwizzleSelectors& fields, const TSourceLoc&); - - // Tree ops - static const TIntermTyped* findLValueBase(const TIntermTyped*, bool swizzleOkay); - - // Linkage related - void addSymbolLinkageNodes(TIntermAggregate*& linkage, EShLanguage, TSymbolTable&); - void addSymbolLinkageNode(TIntermAggregate*& linkage, const TSymbol&); - - bool setInvocations(int i) - { - if (invocations != TQualifier::layoutNotSet) - return invocations == i; - invocations = i; - return true; - } - int getInvocations() const { return invocations; } - bool setVertices(int m) - { - if (vertices != TQualifier::layoutNotSet) - return vertices == m; - vertices = m; - return true; - } - int getVertices() const { return vertices; } - bool setInputPrimitive(TLayoutGeometry p) - { - if (inputPrimitive != ElgNone) - return inputPrimitive == p; - inputPrimitive = p; - return true; - } - TLayoutGeometry getInputPrimitive() const { return inputPrimitive; } - bool setVertexSpacing(TVertexSpacing s) - { - if (vertexSpacing != EvsNone) - return vertexSpacing == s; - vertexSpacing = s; - return true; - } - TVertexSpacing getVertexSpacing() const { return vertexSpacing; } - bool setVertexOrder(TVertexOrder o) - { - if (vertexOrder != EvoNone) - return vertexOrder == o; - vertexOrder = o; - return true; - } - TVertexOrder getVertexOrder() const { return vertexOrder; } - void setPointMode() { pointMode = true; } - bool getPointMode() const { return pointMode; } - - bool setLocalSize(int dim, int size) - { - if (localSize[dim] > 1) - return size == localSize[dim]; - localSize[dim] = size; - return true; - } - unsigned int getLocalSize(int dim) const { return localSize[dim]; } - - bool setLocalSizeSpecId(int dim, int id) - { - if (localSizeSpecId[dim] != TQualifier::layoutNotSet) - return id == localSizeSpecId[dim]; - localSizeSpecId[dim] = id; - return true; - } - int getLocalSizeSpecId(int dim) const { return localSizeSpecId[dim]; } - - void setXfbMode() { xfbMode = true; } - bool getXfbMode() const { return xfbMode; } - void setMultiStream() { multiStream = true; } - bool isMultiStream() const { return multiStream; } - bool setOutputPrimitive(TLayoutGeometry p) - { - if (outputPrimitive != ElgNone) - return outputPrimitive == p; - outputPrimitive = p; - return true; - } - TLayoutGeometry getOutputPrimitive() const { return outputPrimitive; } - void setOriginUpperLeft() { originUpperLeft = true; } - bool getOriginUpperLeft() const { return originUpperLeft; } - void setPixelCenterInteger() { pixelCenterInteger = true; } - bool getPixelCenterInteger() const { return pixelCenterInteger; } - void setEarlyFragmentTests() { earlyFragmentTests = true; } - bool getEarlyFragmentTests() const { return earlyFragmentTests; } - void setPostDepthCoverage() { postDepthCoverage = true; } - bool getPostDepthCoverage() const { return postDepthCoverage; } - bool setDepth(TLayoutDepth d) - { - if (depthLayout != EldNone) - return depthLayout == d; - depthLayout = d; - return true; - } - TLayoutDepth getDepth() const { return depthLayout; } - void setDepthReplacing() { depthReplacing = true; } - bool isDepthReplacing() const { return depthReplacing; } - - void addBlendEquation(TBlendEquationShift b) { blendEquations |= (1 << b); } - unsigned int getBlendEquations() const { return blendEquations; } - - void addToCallGraph(TInfoSink&, const TString& caller, const TString& callee); - void merge(TInfoSink&, TIntermediate&); - void finalCheck(TInfoSink&, bool keepUncalled); - - void addIoAccessed(const TString& name) { ioAccessed.insert(name); } - bool inIoAccessed(const TString& name) const { return ioAccessed.find(name) != ioAccessed.end(); } - - int addUsedLocation(const TQualifier&, const TType&, bool& typeCollision); - int checkLocationRange(int set, const TIoRange& range, const TType&, bool& typeCollision); - int addUsedOffsets(int binding, int offset, int numOffsets); - bool addUsedConstantId(int id); - int computeTypeLocationSize(const TType&) const; - - bool setXfbBufferStride(int buffer, unsigned stride) - { - if (xfbBuffers[buffer].stride != TQualifier::layoutXfbStrideEnd) - return xfbBuffers[buffer].stride == stride; - xfbBuffers[buffer].stride = stride; - return true; - } - int addXfbBufferOffset(const TType&); - unsigned int computeTypeXfbSize(const TType&, bool& containsDouble) const; - static int getBaseAlignmentScalar(const TType&, int& size); - static int getBaseAlignment(const TType&, int& size, int& stride, bool std140, bool rowMajor); - static bool improperStraddle(const TType& type, int size, int offset); - bool promote(TIntermOperator*); - -#ifdef NV_EXTENSIONS - void setLayoutOverrideCoverage() { layoutOverrideCoverage = true; } - bool getLayoutOverrideCoverage() const { return layoutOverrideCoverage; } - void setGeoPassthroughEXT() { geoPassthroughEXT = true; } - bool getGeoPassthroughEXT() const { return geoPassthroughEXT; } -#endif - - const char* addSemanticName(const TString& name) - { - return semanticNameSet.insert(name).first->c_str(); - } - - void setSourceFile(const char* file) { sourceFile = file; } - const std::string& getSourceFile() const { return sourceFile; } - void addSourceText(const char* text) { sourceText = sourceText + text; } - const std::string& getSourceText() const { return sourceText; } - void addProcesses(const std::vector& p) { - for (int i = 0; i < (int)p.size(); ++i) - processes.addProcess(p[i]); - } - void addProcess(const std::string& process) { processes.addProcess(process); } - void addProcessArgument(const std::string& arg) { processes.addArgument(arg); } - const std::vector& getProcesses() const { return processes.getProcesses(); } - - const char* const implicitThisName; - -protected: - TIntermSymbol* addSymbol(int Id, const TString&, const TType&, const TConstUnionArray&, TIntermTyped* subtree, const TSourceLoc&); - void error(TInfoSink& infoSink, const char*); - void warn(TInfoSink& infoSink, const char*); - void mergeBodies(TInfoSink&, TIntermSequence& globals, const TIntermSequence& unitGlobals); - void mergeLinkerObjects(TInfoSink&, TIntermSequence& linkerObjects, const TIntermSequence& unitLinkerObjects); - void mergeImplicitArraySizes(TType&, const TType&); - void mergeErrorCheck(TInfoSink&, const TIntermSymbol&, const TIntermSymbol&, bool crossStage); - void checkCallGraphCycles(TInfoSink&); - void checkCallGraphBodies(TInfoSink&, bool keepUncalled); - void inOutLocationCheck(TInfoSink&); - TIntermSequence& findLinkerObjects() const; - bool userOutputUsed() const; - bool isSpecializationOperation(const TIntermOperator&) const; - bool promoteUnary(TIntermUnary&); - bool promoteBinary(TIntermBinary&); - void addSymbolLinkageNode(TIntermAggregate*& linkage, TSymbolTable&, const TString&); - bool promoteAggregate(TIntermAggregate&); - void pushSelector(TIntermSequence&, const TVectorSelector&, const TSourceLoc&); - void pushSelector(TIntermSequence&, const TMatrixSelector&, const TSourceLoc&); - bool specConstantPropagates(const TIntermTyped&, const TIntermTyped&); - void performTextureUpgradeAndSamplerRemovalTransformation(TIntermNode* root); - - const EShLanguage language; // stage, known at construction time - EShSource source; // source language, known a bit later - std::string entryPointName; - std::string entryPointMangledName; - - EProfile profile; - int version; - SpvVersion spvVersion; - TIntermNode* treeRoot; - std::set requestedExtensions; // cumulation of all enabled or required extensions; not connected to what subset of the shader used them - TBuiltInResource resources; - int numEntryPoints; - int numErrors; - int numPushConstants; - bool recursive; - int invocations; - int vertices; - TLayoutGeometry inputPrimitive; - TLayoutGeometry outputPrimitive; - bool pixelCenterInteger; - bool originUpperLeft; - TVertexSpacing vertexSpacing; - TVertexOrder vertexOrder; - bool pointMode; - int localSize[3]; - int localSizeSpecId[3]; - bool earlyFragmentTests; - bool postDepthCoverage; - TLayoutDepth depthLayout; - bool depthReplacing; - int blendEquations; // an 'or'ing of masks of shifts of TBlendEquationShift - bool xfbMode; - bool multiStream; - -#ifdef NV_EXTENSIONS - bool layoutOverrideCoverage; - bool geoPassthroughEXT; -#endif - - unsigned int shiftSamplerBinding; - unsigned int shiftTextureBinding; - unsigned int shiftImageBinding; - unsigned int shiftUboBinding; - unsigned int shiftSsboBinding; - unsigned int shiftUavBinding; - std::vector resourceSetBinding; - bool autoMapBindings; - bool autoMapLocations; - bool flattenUniformArrays; - bool useUnknownFormat; - bool hlslOffsets; - bool useStorageBuffer; - bool hlslIoMapping; - - typedef std::list TGraph; - TGraph callGraph; - - std::set ioAccessed; // set of names of statically read/written I/O that might need extra checking - std::vector usedIo[4]; // sets of used locations, one for each of in, out, uniform, and buffers - std::vector usedAtomics; // sets of bindings used by atomic counters - std::vector xfbBuffers; // all the data we need to track per xfb buffer - std::unordered_set usedConstantId; // specialization constant ids used - std::set semanticNameSet; - - EShTextureSamplerTransformMode textureSamplerTransformMode; - - // source code of shader, useful as part of debug information - std::string sourceFile; - std::string sourceText; - - // for OpModuleProcessed, or equivalent - TProcesses processes; - -private: - void operator=(TIntermediate&); // prevent assignments -}; - -} // end namespace glslang - -#endif // _LOCAL_INTERMEDIATE_INCLUDED_ diff --git a/deps/glslang/glslang/MachineIndependent/parseConst.cpp b/deps/glslang/glslang/MachineIndependent/parseConst.cpp deleted file mode 100644 index 1a8e6d99..00000000 --- a/deps/glslang/glslang/MachineIndependent/parseConst.cpp +++ /dev/null @@ -1,204 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Traverse a tree of constants to create a single folded constant. -// It should only be used when the whole tree is known to be constant. -// - -#include "ParseHelper.h" - -namespace glslang { - -class TConstTraverser : public TIntermTraverser { -public: - TConstTraverser(const TConstUnionArray& cUnion, bool singleConstParam, TOperator constructType, const TType& t) - : unionArray(cUnion), type(t), - constructorType(constructType), singleConstantParam(singleConstParam), error(false), isMatrix(false), - matrixCols(0), matrixRows(0) { index = 0; tOp = EOpNull; } - - virtual void visitConstantUnion(TIntermConstantUnion* node); - virtual bool visitAggregate(TVisit, TIntermAggregate* node); - - int index; - TConstUnionArray unionArray; - TOperator tOp; - const TType& type; - TOperator constructorType; - bool singleConstantParam; - bool error; - int size; // size of the constructor ( 4 for vec4) - bool isMatrix; - int matrixCols; - int matrixRows; - -protected: - TConstTraverser(TConstTraverser&); - TConstTraverser& operator=(TConstTraverser&); -}; - -bool TConstTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node) -{ - if (! node->isConstructor() && node->getOp() != EOpComma) { - error = true; - - return false; - } - - bool flag = node->getSequence().size() == 1 && node->getSequence()[0]->getAsTyped()->getAsConstantUnion(); - if (flag) { - singleConstantParam = true; - constructorType = node->getOp(); - size = node->getType().computeNumComponents(); - - if (node->getType().isMatrix()) { - isMatrix = true; - matrixCols = node->getType().getMatrixCols(); - matrixRows = node->getType().getMatrixRows(); - } - } - - for (TIntermSequence::iterator p = node->getSequence().begin(); - p != node->getSequence().end(); p++) { - - if (node->getOp() == EOpComma) - index = 0; - - (*p)->traverse(this); - } - if (flag) - { - singleConstantParam = false; - constructorType = EOpNull; - size = 0; - isMatrix = false; - matrixCols = 0; - matrixRows = 0; - } - - return false; -} - -void TConstTraverser::visitConstantUnion(TIntermConstantUnion* node) -{ - TConstUnionArray leftUnionArray(unionArray); - int instanceSize = type.computeNumComponents(); - - if (index >= instanceSize) - return; - - if (! singleConstantParam) { - int rightUnionSize = node->getType().computeNumComponents(); - - const TConstUnionArray& rightUnionArray = node->getConstArray(); - for (int i = 0; i < rightUnionSize; i++) { - if (index >= instanceSize) - return; - leftUnionArray[index] = rightUnionArray[i]; - - index++; - } - } else { - int endIndex = index + size; - const TConstUnionArray& rightUnionArray = node->getConstArray(); - if (! isMatrix) { - int count = 0; - int nodeComps = node->getType().computeNumComponents(); - for (int i = index; i < endIndex; i++) { - if (i >= instanceSize) - return; - - leftUnionArray[i] = rightUnionArray[count]; - - (index)++; - - if (nodeComps > 1) - count++; - } - } else { - // constructing a matrix, but from what? - if (node->isMatrix()) { - // Matrix from a matrix; this has the outer matrix, node is the argument matrix. - // Traverse the outer, potentially bigger matrix, fill in missing pieces with the - // identity matrix. - for (int c = 0; c < matrixCols; ++c) { - for (int r = 0; r < matrixRows; ++r) { - int targetOffset = index + c * matrixRows + r; - if (r < node->getType().getMatrixRows() && c < node->getType().getMatrixCols()) { - int srcOffset = c * node->getType().getMatrixRows() + r; - leftUnionArray[targetOffset] = rightUnionArray[srcOffset]; - } else if (r == c) - leftUnionArray[targetOffset].setDConst(1.0); - else - leftUnionArray[targetOffset].setDConst(0.0); - } - } - } else { - // matrix from vector - int count = 0; - const int startIndex = index; - int nodeComps = node->getType().computeNumComponents(); - for (int i = startIndex; i < endIndex; i++) { - if (i >= instanceSize) - return; - if (i == startIndex || (i - startIndex) % (matrixRows + 1) == 0 ) - leftUnionArray[i] = rightUnionArray[count]; - else - leftUnionArray[i].setDConst(0.0); - - index++; - - if (nodeComps > 1) - count++; - } - } - } - } -} - -bool TIntermediate::parseConstTree(TIntermNode* root, TConstUnionArray unionArray, TOperator constructorType, const TType& t, bool singleConstantParam) -{ - if (root == 0) - return false; - - TConstTraverser it(unionArray, singleConstantParam, constructorType, t); - - root->traverse(&it); - if (it.error) - return true; - else - return false; -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/parseVersions.h b/deps/glslang/glslang/MachineIndependent/parseVersions.h deleted file mode 100644 index 5f26b437..00000000 --- a/deps/glslang/glslang/MachineIndependent/parseVersions.h +++ /dev/null @@ -1,140 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// This is implemented in Versions.cpp - -#ifndef _PARSE_VERSIONS_INCLUDED_ -#define _PARSE_VERSIONS_INCLUDED_ - -#include "../Public/ShaderLang.h" -#include "../Include/InfoSink.h" -#include "Scan.h" - -#include - -namespace glslang { - -// -// Base class for parse helpers. -// This just has version-related information and checking. -// This class should be sufficient for preprocessing. -// -class TParseVersions { -public: - TParseVersions(TIntermediate& interm, int version, EProfile profile, - const SpvVersion& spvVersion, EShLanguage language, TInfoSink& infoSink, - bool forwardCompatible, EShMessages messages) - : infoSink(infoSink), version(version), profile(profile), language(language), - spvVersion(spvVersion), forwardCompatible(forwardCompatible), - intermediate(interm), messages(messages), numErrors(0), currentScanner(0) { } - virtual ~TParseVersions() { } - virtual void initializeExtensionBehavior(); - virtual void requireProfile(const TSourceLoc&, int queryProfiles, const char* featureDesc); - virtual void profileRequires(const TSourceLoc&, int queryProfiles, int minVersion, int numExtensions, const char* const extensions[], const char* featureDesc); - virtual void profileRequires(const TSourceLoc&, int queryProfiles, int minVersion, const char* const extension, const char* featureDesc); - virtual void requireStage(const TSourceLoc&, EShLanguageMask, const char* featureDesc); - virtual void requireStage(const TSourceLoc&, EShLanguage, const char* featureDesc); - virtual void checkDeprecated(const TSourceLoc&, int queryProfiles, int depVersion, const char* featureDesc); - virtual void requireNotRemoved(const TSourceLoc&, int queryProfiles, int removedVersion, const char* featureDesc); - virtual void unimplemented(const TSourceLoc&, const char* featureDesc); - virtual void requireExtensions(const TSourceLoc&, int numExtensions, const char* const extensions[], const char* featureDesc); - virtual void ppRequireExtensions(const TSourceLoc&, int numExtensions, const char* const extensions[], const char* featureDesc); - virtual TExtensionBehavior getExtensionBehavior(const char*); - virtual bool extensionTurnedOn(const char* const extension); - virtual bool extensionsTurnedOn(int numExtensions, const char* const extensions[]); - virtual void updateExtensionBehavior(int line, const char* const extension, const char* behavior); - virtual void fullIntegerCheck(const TSourceLoc&, const char* op); - virtual void doubleCheck(const TSourceLoc&, const char* op); -#ifdef AMD_EXTENSIONS - virtual void int16Check(const TSourceLoc& loc, const char* op, bool builtIn = false); - virtual void float16Check(const TSourceLoc&, const char* op, bool builtIn = false); -#endif - virtual void int64Check(const TSourceLoc&, const char* op, bool builtIn = false); - virtual void spvRemoved(const TSourceLoc&, const char* op); - virtual void vulkanRemoved(const TSourceLoc&, const char* op); - virtual void requireVulkan(const TSourceLoc&, const char* op); - virtual void requireSpv(const TSourceLoc&, const char* op); - virtual bool checkExtensionsRequested(const TSourceLoc&, int numExtensions, const char* const extensions[], const char* featureDesc); - virtual void updateExtensionBehavior(const char* const extension, TExtensionBehavior); - - virtual void C_DECL error(const TSourceLoc&, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...) = 0; - virtual void C_DECL warn(const TSourceLoc&, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...) = 0; - virtual void C_DECL ppError(const TSourceLoc&, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...) = 0; - virtual void C_DECL ppWarn(const TSourceLoc&, const char* szReason, const char* szToken, - const char* szExtraInfoFormat, ...) = 0; - - void addError() { ++numErrors; } - int getNumErrors() const { return numErrors; } - - void setScanner(TInputScanner* scanner) { currentScanner = scanner; } - TInputScanner* getScanner() const { return currentScanner; } - const TSourceLoc& getCurrentLoc() const { return currentScanner->getSourceLoc(); } - void setCurrentLine(int line) { currentScanner->setLine(line); } - void setCurrentColumn(int col) { currentScanner->setColumn(col); } - void setCurrentSourceName(const char* name) { currentScanner->setFile(name); } - void setCurrentString(int string) { currentScanner->setString(string); } - - void getPreamble(std::string&); - bool relaxedErrors() const { return (messages & EShMsgRelaxedErrors) != 0; } - bool suppressWarnings() const { return (messages & EShMsgSuppressWarnings) != 0; } - bool isReadingHLSL() const { return (messages & EShMsgReadHlsl) == EShMsgReadHlsl; } - - TInfoSink& infoSink; - - // compilation mode - int version; // version, updated by #version in the shader - EProfile profile; // the declared profile in the shader (core by default) - EShLanguage language; // really the stage - SpvVersion spvVersion; - bool forwardCompatible; // true if errors are to be given for use of deprecated features - TIntermediate& intermediate; // helper for making and hooking up pieces of the parse tree - -protected: - TMap extensionBehavior; // for each extension string, what its current behavior is set to - EShMessages messages; // errors/warnings/rule-sets - int numErrors; // number of compile-time errors encountered - TInputScanner* currentScanner; - -private: - explicit TParseVersions(const TParseVersions&); - TParseVersions& operator=(const TParseVersions&); -}; - -} // end namespace glslang - -#endif // _PARSE_VERSIONS_INCLUDED_ diff --git a/deps/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp b/deps/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp deleted file mode 100644 index ecda34cd..00000000 --- a/deps/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp +++ /dev/null @@ -1,1266 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -/****************************************************************************\ -Copyright (c) 2002, NVIDIA Corporation. - -NVIDIA Corporation("NVIDIA") supplies this software to you in -consideration of your agreement to the following terms, and your use, -installation, modification or redistribution of this NVIDIA software -constitutes acceptance of these terms. If you do not agree with these -terms, please do not use, install, modify or redistribute this NVIDIA -software. - -In consideration of your agreement to abide by the following terms, and -subject to these terms, NVIDIA grants you a personal, non-exclusive -license, under NVIDIA's copyrights in this original NVIDIA software (the -"NVIDIA Software"), to use, reproduce, modify and redistribute the -NVIDIA Software, with or without modifications, in source and/or binary -forms; provided that if you redistribute the NVIDIA Software, you must -retain the copyright notice of NVIDIA, this notice and the following -text and disclaimers in all such redistributions of the NVIDIA Software. -Neither the name, trademarks, service marks nor logos of NVIDIA -Corporation may be used to endorse or promote products derived from the -NVIDIA Software without specific prior written permission from NVIDIA. -Except as expressly stated in this notice, no other rights or licenses -express or implied, are granted by NVIDIA herein, including but not -limited to any patent rights that may be infringed by your derivative -works or by other works in which the NVIDIA Software may be -incorporated. No hardware is licensed hereunder. - -THE NVIDIA SOFTWARE IS BEING PROVIDED ON AN "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, -INCLUDING WITHOUT LIMITATION, WARRANTIES OR CONDITIONS OF TITLE, -NON-INFRINGEMENT, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR -ITS USE AND OPERATION EITHER ALONE OR IN COMBINATION WITH OTHER -PRODUCTS. - -IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, -INCIDENTAL, EXEMPLARY, CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, LOST PROFITS; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF -USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) OR ARISING IN ANY WAY -OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE -NVIDIA SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, -TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF -NVIDIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -\****************************************************************************/ - -#ifndef _CRT_SECURE_NO_WARNINGS -#define _CRT_SECURE_NO_WARNINGS -#endif - -#include -#include -#include -#include - -#include "PpContext.h" -#include "PpTokens.h" - -namespace glslang { - -// Handle #define -int TPpContext::CPPdefine(TPpToken* ppToken) -{ - MacroSymbol mac; - - // get the macro name - int token = scanToken(ppToken); - if (token != PpAtomIdentifier) { - parseContext.ppError(ppToken->loc, "must be followed by macro name", "#define", ""); - return token; - } - if (ppToken->loc.string >= 0) { - // We are in user code; check for reserved name use: - parseContext.reservedPpErrorCheck(ppToken->loc, ppToken->name, "#define"); - } - - // save the macro name - const int defAtom = atomStrings.getAddAtom(ppToken->name); - - // gather parameters to the macro, between (...) - token = scanToken(ppToken); - if (token == '(' && ! ppToken->space) { - mac.emptyArgs = 1; - do { - token = scanToken(ppToken); - if (mac.args.size() == 0 && token == ')') - break; - if (token != PpAtomIdentifier) { - parseContext.ppError(ppToken->loc, "bad argument", "#define", ""); - - return token; - } - mac.emptyArgs = 0; - const int argAtom = atomStrings.getAddAtom(ppToken->name); - - // check for duplication of parameter name - bool duplicate = false; - for (size_t a = 0; a < mac.args.size(); ++a) { - if (mac.args[a] == argAtom) { - parseContext.ppError(ppToken->loc, "duplicate macro parameter", "#define", ""); - duplicate = true; - break; - } - } - if (! duplicate) - mac.args.push_back(argAtom); - token = scanToken(ppToken); - } while (token == ','); - if (token != ')') { - parseContext.ppError(ppToken->loc, "missing parenthesis", "#define", ""); - - return token; - } - - token = scanToken(ppToken); - } - - // record the definition of the macro - TSourceLoc defineLoc = ppToken->loc; // because ppToken is going to go to the next line before we report errors - while (token != '\n' && token != EndOfInput) { - mac.body.putToken(token, ppToken); - token = scanToken(ppToken); - if (token != '\n' && ppToken->space) - mac.body.putToken(' ', ppToken); - } - - // check for duplicate definition - MacroSymbol* existing = lookupMacroDef(defAtom); - if (existing != nullptr) { - if (! existing->undef) { - // Already defined -- need to make sure they are identical: - // "Two replacement lists are identical if and only if the preprocessing tokens in both have the same number, - // ordering, spelling, and white-space separation, where all white-space separations are considered identical." - if (existing->args.size() != mac.args.size() || existing->emptyArgs != mac.emptyArgs) - parseContext.ppError(defineLoc, "Macro redefined; different number of arguments:", "#define", atomStrings.getString(defAtom)); - else { - if (existing->args != mac.args) - parseContext.ppError(defineLoc, "Macro redefined; different argument names:", "#define", atomStrings.getString(defAtom)); - existing->body.reset(); - mac.body.reset(); - int newToken; - do { - int oldToken; - TPpToken oldPpToken; - TPpToken newPpToken; - oldToken = existing->body.getToken(parseContext, &oldPpToken); - newToken = mac.body.getToken(parseContext, &newPpToken); - if (oldToken != newToken || oldPpToken != newPpToken) { - parseContext.ppError(defineLoc, "Macro redefined; different substitutions:", "#define", atomStrings.getString(defAtom)); - break; - } - } while (newToken > 0); - } - } - *existing = mac; - } else - addMacroDef(defAtom, mac); - - return '\n'; -} - -// Handle #undef -int TPpContext::CPPundef(TPpToken* ppToken) -{ - int token = scanToken(ppToken); - if (token != PpAtomIdentifier) { - parseContext.ppError(ppToken->loc, "must be followed by macro name", "#undef", ""); - - return token; - } - - parseContext.reservedPpErrorCheck(ppToken->loc, ppToken->name, "#undef"); - - MacroSymbol* macro = lookupMacroDef(atomStrings.getAtom(ppToken->name)); - if (macro != nullptr) - macro->undef = 1; - token = scanToken(ppToken); - if (token != '\n') - parseContext.ppError(ppToken->loc, "can only be followed by a single macro name", "#undef", ""); - - return token; -} - -// Handle #else -/* Skip forward to appropriate spot. This is used both -** to skip to a #endif after seeing an #else, AND to skip to a #else, -** #elif, or #endif after a #if/#ifdef/#ifndef/#elif test was false. -*/ -int TPpContext::CPPelse(int matchelse, TPpToken* ppToken) -{ - int depth = 0; - int token = scanToken(ppToken); - - while (token != EndOfInput) { - if (token != '#') { - while (token != '\n' && token != EndOfInput) - token = scanToken(ppToken); - - if (token == EndOfInput) - return token; - - token = scanToken(ppToken); - continue; - } - - if ((token = scanToken(ppToken)) != PpAtomIdentifier) - continue; - - int nextAtom = atomStrings.getAtom(ppToken->name); - if (nextAtom == PpAtomIf || nextAtom == PpAtomIfdef || nextAtom == PpAtomIfndef) { - depth++; - ifdepth++; - elsetracker++; - } else if (nextAtom == PpAtomEndif) { - token = extraTokenCheck(nextAtom, ppToken, scanToken(ppToken)); - elseSeen[elsetracker] = false; - --elsetracker; - if (depth == 0) { - // found the #endif we are looking for - if (ifdepth) - --ifdepth; - break; - } - --depth; - --ifdepth; - } else if (matchelse && depth == 0) { - if (nextAtom == PpAtomElse) { - elseSeen[elsetracker] = true; - token = extraTokenCheck(nextAtom, ppToken, scanToken(ppToken)); - // found the #else we are looking for - break; - } else if (nextAtom == PpAtomElif) { - if (elseSeen[elsetracker]) - parseContext.ppError(ppToken->loc, "#elif after #else", "#elif", ""); - /* we decrement ifdepth here, because CPPif will increment - * it and we really want to leave it alone */ - if (ifdepth) { - --ifdepth; - elseSeen[elsetracker] = false; - --elsetracker; - } - - return CPPif(ppToken); - } - } else if (nextAtom == PpAtomElse) { - if (elseSeen[elsetracker]) - parseContext.ppError(ppToken->loc, "#else after #else", "#else", ""); - else - elseSeen[elsetracker] = true; - token = extraTokenCheck(nextAtom, ppToken, scanToken(ppToken)); - } else if (nextAtom == PpAtomElif) { - if (elseSeen[elsetracker]) - parseContext.ppError(ppToken->loc, "#elif after #else", "#elif", ""); - } - } - - return token; -} - -// Call when there should be no more tokens left on a line. -int TPpContext::extraTokenCheck(int contextAtom, TPpToken* ppToken, int token) -{ - if (token != '\n' && token != EndOfInput) { - static const char* message = "unexpected tokens following directive"; - - const char* label; - if (contextAtom == PpAtomElse) - label = "#else"; - else if (contextAtom == PpAtomElif) - label = "#elif"; - else if (contextAtom == PpAtomEndif) - label = "#endif"; - else if (contextAtom == PpAtomIf) - label = "#if"; - else if (contextAtom == PpAtomLine) - label = "#line"; - else - label = ""; - - if (parseContext.relaxedErrors()) - parseContext.ppWarn(ppToken->loc, message, label, ""); - else - parseContext.ppError(ppToken->loc, message, label, ""); - - while (token != '\n' && token != EndOfInput) - token = scanToken(ppToken); - } - - return token; -} - -enum eval_prec { - MIN_PRECEDENCE, - COND, LOGOR, LOGAND, OR, XOR, AND, EQUAL, RELATION, SHIFT, ADD, MUL, UNARY, - MAX_PRECEDENCE -}; - -namespace { - - int op_logor(int a, int b) { return a || b; } - int op_logand(int a, int b) { return a && b; } - int op_or(int a, int b) { return a | b; } - int op_xor(int a, int b) { return a ^ b; } - int op_and(int a, int b) { return a & b; } - int op_eq(int a, int b) { return a == b; } - int op_ne(int a, int b) { return a != b; } - int op_ge(int a, int b) { return a >= b; } - int op_le(int a, int b) { return a <= b; } - int op_gt(int a, int b) { return a > b; } - int op_lt(int a, int b) { return a < b; } - int op_shl(int a, int b) { return a << b; } - int op_shr(int a, int b) { return a >> b; } - int op_add(int a, int b) { return a + b; } - int op_sub(int a, int b) { return a - b; } - int op_mul(int a, int b) { return a * b; } - int op_div(int a, int b) { return a / b; } - int op_mod(int a, int b) { return a % b; } - int op_pos(int a) { return a; } - int op_neg(int a) { return -a; } - int op_cmpl(int a) { return ~a; } - int op_not(int a) { return !a; } - -}; - -struct TBinop { - int token, precedence, (*op)(int, int); -} binop[] = { - { PpAtomOr, LOGOR, op_logor }, - { PpAtomAnd, LOGAND, op_logand }, - { '|', OR, op_or }, - { '^', XOR, op_xor }, - { '&', AND, op_and }, - { PpAtomEQ, EQUAL, op_eq }, - { PpAtomNE, EQUAL, op_ne }, - { '>', RELATION, op_gt }, - { PpAtomGE, RELATION, op_ge }, - { '<', RELATION, op_lt }, - { PpAtomLE, RELATION, op_le }, - { PpAtomLeft, SHIFT, op_shl }, - { PpAtomRight, SHIFT, op_shr }, - { '+', ADD, op_add }, - { '-', ADD, op_sub }, - { '*', MUL, op_mul }, - { '/', MUL, op_div }, - { '%', MUL, op_mod }, -}; - -struct TUnop { - int token, (*op)(int); -} unop[] = { - { '+', op_pos }, - { '-', op_neg }, - { '~', op_cmpl }, - { '!', op_not }, -}; - -#define NUM_ELEMENTS(A) (sizeof(A) / sizeof(A[0])) - -int TPpContext::eval(int token, int precedence, bool shortCircuit, int& res, bool& err, TPpToken* ppToken) -{ - TSourceLoc loc = ppToken->loc; // because we sometimes read the newline before reporting the error - if (token == PpAtomIdentifier) { - if (strcmp("defined", ppToken->name) == 0) { - if (! parseContext.isReadingHLSL() && isMacroInput()) { - if (parseContext.relaxedErrors()) - parseContext.ppWarn(ppToken->loc, "nonportable when expanded from macros for preprocessor expression", - "defined", ""); - else - parseContext.ppError(ppToken->loc, "cannot use in preprocessor expression when expanded from macros", - "defined", ""); - } - bool needclose = 0; - token = scanToken(ppToken); - if (token == '(') { - needclose = true; - token = scanToken(ppToken); - } - if (token != PpAtomIdentifier) { - parseContext.ppError(loc, "incorrect directive, expected identifier", "preprocessor evaluation", ""); - err = true; - res = 0; - - return token; - } - - MacroSymbol* macro = lookupMacroDef(atomStrings.getAtom(ppToken->name)); - res = macro != nullptr ? !macro->undef : 0; - token = scanToken(ppToken); - if (needclose) { - if (token != ')') { - parseContext.ppError(loc, "expected ')'", "preprocessor evaluation", ""); - err = true; - res = 0; - - return token; - } - token = scanToken(ppToken); - } - } else { - token = evalToToken(token, shortCircuit, res, err, ppToken); - return eval(token, precedence, shortCircuit, res, err, ppToken); - } - } else if (token == PpAtomConstInt) { - res = ppToken->ival; - token = scanToken(ppToken); - } else if (token == '(') { - token = scanToken(ppToken); - token = eval(token, MIN_PRECEDENCE, shortCircuit, res, err, ppToken); - if (! err) { - if (token != ')') { - parseContext.ppError(loc, "expected ')'", "preprocessor evaluation", ""); - err = true; - res = 0; - - return token; - } - token = scanToken(ppToken); - } - } else { - int op = NUM_ELEMENTS(unop) - 1; - for (; op >= 0; op--) { - if (unop[op].token == token) - break; - } - if (op >= 0) { - token = scanToken(ppToken); - token = eval(token, UNARY, shortCircuit, res, err, ppToken); - res = unop[op].op(res); - } else { - parseContext.ppError(loc, "bad expression", "preprocessor evaluation", ""); - err = true; - res = 0; - - return token; - } - } - - token = evalToToken(token, shortCircuit, res, err, ppToken); - - // Perform evaluation of binary operation, if there is one, otherwise we are done. - while (! err) { - if (token == ')' || token == '\n') - break; - int op; - for (op = NUM_ELEMENTS(binop) - 1; op >= 0; op--) { - if (binop[op].token == token) - break; - } - if (op < 0 || binop[op].precedence <= precedence) - break; - int leftSide = res; - - // Setup short-circuiting, needed for ES, unless already in a short circuit. - // (Once in a short-circuit, can't turn off again, until that whole subexpression is done. - if (! shortCircuit) { - if ((token == PpAtomOr && leftSide == 1) || - (token == PpAtomAnd && leftSide == 0)) - shortCircuit = true; - } - - token = scanToken(ppToken); - token = eval(token, binop[op].precedence, shortCircuit, res, err, ppToken); - - if (binop[op].op == op_div || binop[op].op == op_mod) { - if (res == 0) { - parseContext.ppError(loc, "division by 0", "preprocessor evaluation", ""); - res = 1; - } - } - res = binop[op].op(leftSide, res); - } - - return token; -} - -// Expand macros, skipping empty expansions, to get to the first real token in those expansions. -int TPpContext::evalToToken(int token, bool shortCircuit, int& res, bool& err, TPpToken* ppToken) -{ - while (token == PpAtomIdentifier && strcmp("defined", ppToken->name) != 0) { - int macroReturn = MacroExpand(ppToken, true, false); - if (macroReturn == 0) { - parseContext.ppError(ppToken->loc, "can't evaluate expression", "preprocessor evaluation", ""); - err = true; - res = 0; - token = scanToken(ppToken); - break; - } - if (macroReturn == -1) { - if (! shortCircuit && parseContext.profile == EEsProfile) { - const char* message = "undefined macro in expression not allowed in es profile"; - if (parseContext.relaxedErrors()) - parseContext.ppWarn(ppToken->loc, message, "preprocessor evaluation", ppToken->name); - else - parseContext.ppError(ppToken->loc, message, "preprocessor evaluation", ppToken->name); - } - } - token = scanToken(ppToken); - } - - return token; -} - -// Handle #if -int TPpContext::CPPif(TPpToken* ppToken) -{ - int token = scanToken(ppToken); - elsetracker++; - ifdepth++; - if (ifdepth > maxIfNesting) { - parseContext.ppError(ppToken->loc, "maximum nesting depth exceeded", "#if", ""); - return 0; - } - int res = 0; - bool err = false; - token = eval(token, MIN_PRECEDENCE, false, res, err, ppToken); - token = extraTokenCheck(PpAtomIf, ppToken, token); - if (!res && !err) - token = CPPelse(1, ppToken); - - return token; -} - -// Handle #ifdef -int TPpContext::CPPifdef(int defined, TPpToken* ppToken) -{ - int token = scanToken(ppToken); - if (++ifdepth > maxIfNesting) { - parseContext.ppError(ppToken->loc, "maximum nesting depth exceeded", "#ifdef", ""); - return 0; - } - elsetracker++; - if (token != PpAtomIdentifier) { - if (defined) - parseContext.ppError(ppToken->loc, "must be followed by macro name", "#ifdef", ""); - else - parseContext.ppError(ppToken->loc, "must be followed by macro name", "#ifndef", ""); - } else { - MacroSymbol* macro = lookupMacroDef(atomStrings.getAtom(ppToken->name)); - token = scanToken(ppToken); - if (token != '\n') { - parseContext.ppError(ppToken->loc, "unexpected tokens following #ifdef directive - expected a newline", "#ifdef", ""); - while (token != '\n' && token != EndOfInput) - token = scanToken(ppToken); - } - if (((macro != nullptr && !macro->undef) ? 1 : 0) != defined) - token = CPPelse(1, ppToken); - } - - return token; -} - -// Handle #include ... -// TODO: Handle macro expansions for the header name -int TPpContext::CPPinclude(TPpToken* ppToken) -{ - const TSourceLoc directiveLoc = ppToken->loc; - bool startWithLocalSearch = true; // to additionally include the extra "" paths - int token = scanToken(ppToken); - - // handle -style #include - if (token == '<') { - startWithLocalSearch = false; - token = scanHeaderName(ppToken, '>'); - } - // otherwise ppToken already has the header name and it was "header-name" style - - if (token != PpAtomConstString) { - parseContext.ppError(directiveLoc, "must be followed by a header name", "#include", ""); - return token; - } - - // Make a copy of the name because it will be overwritten by the next token scan. - const std::string filename = ppToken->name; - - // See if the directive was well formed - token = scanToken(ppToken); - if (token != '\n') { - if (token == EndOfInput) - parseContext.ppError(ppToken->loc, "expected newline after header name:", "#include", "%s", filename.c_str()); - else - parseContext.ppError(ppToken->loc, "extra content after header name:", "#include", "%s", filename.c_str()); - return token; - } - - // Process well-formed directive - - // Find the inclusion, first look in "Local" ("") paths, if requested, - // otherwise, only search the "System" (<>) paths. - TShader::Includer::IncludeResult* res = nullptr; - if (startWithLocalSearch) - res = includer.includeLocal(filename.c_str(), currentSourceFile.c_str(), includeStack.size() + 1); - if (res == nullptr || res->headerName.empty()) { - includer.releaseInclude(res); - res = includer.includeSystem(filename.c_str(), currentSourceFile.c_str(), includeStack.size() + 1); - } - - // Process the results - if (res != nullptr && !res->headerName.empty()) { - if (res->headerData != nullptr && res->headerLength > 0) { - // path for processing one or more tokens from an included header, hand off 'res' - const bool forNextLine = parseContext.lineDirectiveShouldSetNextLine(); - std::ostringstream prologue; - std::ostringstream epilogue; - prologue << "#line " << forNextLine << " " << "\"" << res->headerName << "\"\n"; - epilogue << (res->headerData[res->headerLength - 1] == '\n'? "" : "\n") << - "#line " << directiveLoc.line + forNextLine << " " << directiveLoc.getStringNameOrNum() << "\n"; - pushInput(new TokenizableIncludeFile(directiveLoc, prologue.str(), res, epilogue.str(), this)); - // There's no "current" location anymore. - parseContext.setCurrentColumn(0); - } else { - // things are okay, but there is nothing to process - includer.releaseInclude(res); - } - } else { - // error path, clean up - std::string message = - res != nullptr ? std::string(res->headerData, res->headerLength) - : std::string("Could not process include directive"); - parseContext.ppError(directiveLoc, message.c_str(), "#include", "for header name: %s", filename.c_str()); - includer.releaseInclude(res); - } - - return token; -} - -// Handle #line -int TPpContext::CPPline(TPpToken* ppToken) -{ - // "#line must have, after macro substitution, one of the following forms: - // "#line line - // "#line line source-string-number" - - int token = scanToken(ppToken); - const TSourceLoc directiveLoc = ppToken->loc; - if (token == '\n') { - parseContext.ppError(ppToken->loc, "must by followed by an integral literal", "#line", ""); - return token; - } - - int lineRes = 0; // Line number after macro expansion. - int lineToken = 0; - bool hasFile = false; - int fileRes = 0; // Source file number after macro expansion. - const char* sourceName = nullptr; // Optional source file name. - bool lineErr = false; - bool fileErr = false; - token = eval(token, MIN_PRECEDENCE, false, lineRes, lineErr, ppToken); - if (! lineErr) { - lineToken = lineRes; - if (token == '\n') - ++lineRes; - - if (parseContext.lineDirectiveShouldSetNextLine()) - --lineRes; - parseContext.setCurrentLine(lineRes); - - if (token != '\n') { - if (token == PpAtomConstString) { - parseContext.ppRequireExtensions(directiveLoc, 1, &E_GL_GOOGLE_cpp_style_line_directive, "filename-based #line"); - // We need to save a copy of the string instead of pointing - // to the name field of the token since the name field - // will likely be overwritten by the next token scan. - sourceName = atomStrings.getString(atomStrings.getAddAtom(ppToken->name)); - parseContext.setCurrentSourceName(sourceName); - hasFile = true; - token = scanToken(ppToken); - } else { - token = eval(token, MIN_PRECEDENCE, false, fileRes, fileErr, ppToken); - if (! fileErr) { - parseContext.setCurrentString(fileRes); - hasFile = true; - } - } - } - } - if (!fileErr && !lineErr) { - parseContext.notifyLineDirective(directiveLoc.line, lineToken, hasFile, fileRes, sourceName); - } - token = extraTokenCheck(PpAtomLine, ppToken, token); - - return token; -} - -// Handle #error -int TPpContext::CPPerror(TPpToken* ppToken) -{ - int token = scanToken(ppToken); - std::string message; - TSourceLoc loc = ppToken->loc; - - while (token != '\n' && token != EndOfInput) { - if (token == PpAtomConstInt || token == PpAtomConstUint || - token == PpAtomConstInt64 || token == PpAtomConstUint64 || -#ifdef AMD_EXTENSIONS - token == PpAtomConstInt16 || token == PpAtomConstUint16 || - token == PpAtomConstFloat16 || -#endif - token == PpAtomConstFloat || token == PpAtomConstDouble) { - message.append(ppToken->name); - } else if (token == PpAtomIdentifier || token == PpAtomConstString) { - message.append(ppToken->name); - } else { - message.append(atomStrings.getString(token)); - } - message.append(" "); - token = scanToken(ppToken); - } - parseContext.notifyErrorDirective(loc.line, message.c_str()); - // store this msg into the shader's information log..set the Compile Error flag!!!! - parseContext.ppError(loc, message.c_str(), "#error", ""); - - return '\n'; -} - -// Handle #pragma -int TPpContext::CPPpragma(TPpToken* ppToken) -{ - char SrcStrName[2]; - TVector tokens; - - TSourceLoc loc = ppToken->loc; // because we go to the next line before processing - int token = scanToken(ppToken); - while (token != '\n' && token != EndOfInput) { - switch (token) { - case PpAtomIdentifier: - case PpAtomConstInt: - case PpAtomConstUint: - case PpAtomConstInt64: - case PpAtomConstUint64: -#ifdef AMD_EXTENSIONS - case PpAtomConstInt16: - case PpAtomConstUint16: -#endif - case PpAtomConstFloat: - case PpAtomConstDouble: -#ifdef AMD_EXTENSIONS - case PpAtomConstFloat16: -#endif - tokens.push_back(ppToken->name); - break; - default: - SrcStrName[0] = (char)token; - SrcStrName[1] = '\0'; - tokens.push_back(SrcStrName); - } - token = scanToken(ppToken); - } - - if (token == EndOfInput) - parseContext.ppError(loc, "directive must end with a newline", "#pragma", ""); - else - parseContext.handlePragma(loc, tokens); - - return token; -} - -// #version: This is just for error checking: the version and profile are decided before preprocessing starts -int TPpContext::CPPversion(TPpToken* ppToken) -{ - int token = scanToken(ppToken); - - if (errorOnVersion || versionSeen) { - if (parseContext.isReadingHLSL()) - parseContext.ppError(ppToken->loc, "invalid preprocessor command", "#version", ""); - else - parseContext.ppError(ppToken->loc, "must occur first in shader", "#version", ""); - } - versionSeen = true; - - if (token == '\n') { - parseContext.ppError(ppToken->loc, "must be followed by version number", "#version", ""); - - return token; - } - - if (token != PpAtomConstInt) - parseContext.ppError(ppToken->loc, "must be followed by version number", "#version", ""); - - ppToken->ival = atoi(ppToken->name); - int versionNumber = ppToken->ival; - int line = ppToken->loc.line; - token = scanToken(ppToken); - - if (token == '\n') { - parseContext.notifyVersion(line, versionNumber, nullptr); - return token; - } else { - int profileAtom = atomStrings.getAtom(ppToken->name); - if (profileAtom != PpAtomCore && - profileAtom != PpAtomCompatibility && - profileAtom != PpAtomEs) - parseContext.ppError(ppToken->loc, "bad profile name; use es, core, or compatibility", "#version", ""); - parseContext.notifyVersion(line, versionNumber, ppToken->name); - token = scanToken(ppToken); - - if (token == '\n') - return token; - else - parseContext.ppError(ppToken->loc, "bad tokens following profile -- expected newline", "#version", ""); - } - - return token; -} - -// Handle #extension -int TPpContext::CPPextension(TPpToken* ppToken) -{ - int line = ppToken->loc.line; - int token = scanToken(ppToken); - char extensionName[MaxTokenLength + 1]; - - if (token=='\n') { - parseContext.ppError(ppToken->loc, "extension name not specified", "#extension", ""); - return token; - } - - if (token != PpAtomIdentifier) - parseContext.ppError(ppToken->loc, "extension name expected", "#extension", ""); - - assert(strlen(ppToken->name) <= MaxTokenLength); - strcpy(extensionName, ppToken->name); - - token = scanToken(ppToken); - if (token != ':') { - parseContext.ppError(ppToken->loc, "':' missing after extension name", "#extension", ""); - return token; - } - - token = scanToken(ppToken); - if (token != PpAtomIdentifier) { - parseContext.ppError(ppToken->loc, "behavior for extension not specified", "#extension", ""); - return token; - } - - parseContext.updateExtensionBehavior(line, extensionName, ppToken->name); - parseContext.notifyExtensionDirective(line, extensionName, ppToken->name); - - token = scanToken(ppToken); - if (token == '\n') - return token; - else - parseContext.ppError(ppToken->loc, "extra tokens -- expected newline", "#extension",""); - - return token; -} - -int TPpContext::readCPPline(TPpToken* ppToken) -{ - int token = scanToken(ppToken); - - if (token == PpAtomIdentifier) { - switch (atomStrings.getAtom(ppToken->name)) { - case PpAtomDefine: - token = CPPdefine(ppToken); - break; - case PpAtomElse: - if (elsetracker[elseSeen]) - parseContext.ppError(ppToken->loc, "#else after #else", "#else", ""); - elsetracker[elseSeen] = true; - if (! ifdepth) - parseContext.ppError(ppToken->loc, "mismatched statements", "#else", ""); - token = extraTokenCheck(PpAtomElse, ppToken, scanToken(ppToken)); - token = CPPelse(0, ppToken); - break; - case PpAtomElif: - if (! ifdepth) - parseContext.ppError(ppToken->loc, "mismatched statements", "#elif", ""); - if (elseSeen[elsetracker]) - parseContext.ppError(ppToken->loc, "#elif after #else", "#elif", ""); - // this token is really a dont care, but we still need to eat the tokens - token = scanToken(ppToken); - while (token != '\n' && token != EndOfInput) - token = scanToken(ppToken); - token = CPPelse(0, ppToken); - break; - case PpAtomEndif: - if (! ifdepth) - parseContext.ppError(ppToken->loc, "mismatched statements", "#endif", ""); - else { - elseSeen[elsetracker] = false; - --elsetracker; - --ifdepth; - } - token = extraTokenCheck(PpAtomEndif, ppToken, scanToken(ppToken)); - break; - case PpAtomIf: - token = CPPif(ppToken); - break; - case PpAtomIfdef: - token = CPPifdef(1, ppToken); - break; - case PpAtomIfndef: - token = CPPifdef(0, ppToken); - break; - case PpAtomInclude: - if(!parseContext.isReadingHLSL()) { - parseContext.ppRequireExtensions(ppToken->loc, 1, &E_GL_GOOGLE_include_directive, "#include"); - } - token = CPPinclude(ppToken); - break; - case PpAtomLine: - token = CPPline(ppToken); - break; - case PpAtomPragma: - token = CPPpragma(ppToken); - break; - case PpAtomUndef: - token = CPPundef(ppToken); - break; - case PpAtomError: - token = CPPerror(ppToken); - break; - case PpAtomVersion: - token = CPPversion(ppToken); - break; - case PpAtomExtension: - token = CPPextension(ppToken); - break; - default: - parseContext.ppError(ppToken->loc, "invalid directive:", "#", ppToken->name); - break; - } - } else if (token != '\n' && token != EndOfInput) - parseContext.ppError(ppToken->loc, "invalid directive", "#", ""); - - while (token != '\n' && token != EndOfInput) - token = scanToken(ppToken); - - return token; -} - -// Context-dependent parsing of a #include . -// Assumes no macro expansions etc. are being done; the name is just on the current input. -// Always creates a name and returns PpAtomicConstString, unless we run out of input. -int TPpContext::scanHeaderName(TPpToken* ppToken, char delimit) -{ - bool tooLong = false; - - if (inputStack.empty()) - return EndOfInput; - - int len = 0; - ppToken->name[0] = '\0'; - do { - int ch = inputStack.back()->getch(); - - // done yet? - if (ch == delimit) { - ppToken->name[len] = '\0'; - if (tooLong) - parseContext.ppError(ppToken->loc, "header name too long", "", ""); - return PpAtomConstString; - } else if (ch == EndOfInput) - return EndOfInput; - - // found a character to expand the name with - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - else - tooLong = true; - } while (true); -} - -// Macro-expand a macro argument 'arg' to create 'expandedArg'. -// Does not replace 'arg'. -// Returns nullptr if no expanded argument is created. -TPpContext::TokenStream* TPpContext::PrescanMacroArg(TokenStream& arg, TPpToken* ppToken, bool newLineOkay) -{ - // expand the argument - TokenStream* expandedArg = new TokenStream; - pushInput(new tMarkerInput(this)); - pushTokenStreamInput(arg); - int token; - while ((token = scanToken(ppToken)) != tMarkerInput::marker && token != EndOfInput) { - token = tokenPaste(token, *ppToken); - if (token == tMarkerInput::marker || token == EndOfInput) - break; - if (token == PpAtomIdentifier && MacroExpand(ppToken, false, newLineOkay) != 0) - continue; - expandedArg->putToken(token, ppToken); - } - - if (token == EndOfInput) { - // MacroExpand ate the marker, so had bad input, recover - delete expandedArg; - expandedArg = nullptr; - } else { - // remove the marker - popInput(); - } - - return expandedArg; -} - -// -// Return the next token for a macro expansion, handling macro arguments, -// whose semantics are dependent on being adjacent to ##. -// -int TPpContext::tMacroInput::scan(TPpToken* ppToken) -{ - int token; - do { - token = mac->body.getToken(pp->parseContext, ppToken); - } while (token == ' '); // handle white space in macro - - // Hash operators basically turn off a round of macro substitution - // (the round done on the argument before the round done on the RHS of the - // macro definition): - // - // "A parameter in the replacement list, unless preceded by a # or ## - // preprocessing token or followed by a ## preprocessing token (see below), - // is replaced by the corresponding argument after all macros contained - // therein have been expanded." - // - // "If, in the replacement list, a parameter is immediately preceded or - // followed by a ## preprocessing token, the parameter is replaced by the - // corresponding argument's preprocessing token sequence." - - bool pasting = false; - if (postpaste) { - // don't expand next token - pasting = true; - postpaste = false; - } - - if (prepaste) { - // already know we should be on a ##, verify - assert(token == PpAtomPaste); - prepaste = false; - postpaste = true; - } - - // see if are preceding a ## - if (mac->body.peekUntokenizedPasting()) { - prepaste = true; - pasting = true; - } - - // HLSL does expand macros before concatenation - if (pasting && pp->parseContext.isReadingHLSL()) - pasting = false; - - // TODO: preprocessor: properly handle whitespace (or lack of it) between tokens when expanding - if (token == PpAtomIdentifier) { - int i; - for (i = (int)mac->args.size() - 1; i >= 0; i--) - if (strcmp(pp->atomStrings.getString(mac->args[i]), ppToken->name) == 0) - break; - if (i >= 0) { - TokenStream* arg = expandedArgs[i]; - if (arg == nullptr || pasting) - arg = args[i]; - pp->pushTokenStreamInput(*arg, prepaste); - - return pp->scanToken(ppToken); - } - } - - if (token == EndOfInput) - mac->busy = 0; - - return token; -} - -// return a textual zero, for scanning a macro that was never defined -int TPpContext::tZeroInput::scan(TPpToken* ppToken) -{ - if (done) - return EndOfInput; - - strcpy(ppToken->name, "0"); - ppToken->ival = 0; - ppToken->space = false; - done = true; - - return PpAtomConstInt; -} - -// -// Check a token to see if it is a macro that should be expanded. -// If it is, and defined, push a tInput that will produce the appropriate expansion -// and return 1. -// If it is, but undefined, and expandUndef is requested, push a tInput that will -// expand to 0 and return -1. -// Otherwise, return 0 to indicate no expansion, which is not necessarily an error. -// -int TPpContext::MacroExpand(TPpToken* ppToken, bool expandUndef, bool newLineOkay) -{ - ppToken->space = false; - int macroAtom = atomStrings.getAtom(ppToken->name); - switch (macroAtom) { - case PpAtomLineMacro: - ppToken->ival = parseContext.getCurrentLoc().line; - snprintf(ppToken->name, sizeof(ppToken->name), "%d", ppToken->ival); - UngetToken(PpAtomConstInt, ppToken); - return 1; - - case PpAtomFileMacro: { - if (parseContext.getCurrentLoc().name) - parseContext.ppRequireExtensions(ppToken->loc, 1, &E_GL_GOOGLE_cpp_style_line_directive, "filename-based __FILE__"); - ppToken->ival = parseContext.getCurrentLoc().string; - snprintf(ppToken->name, sizeof(ppToken->name), "%s", ppToken->loc.getStringNameOrNum().c_str()); - UngetToken(PpAtomConstInt, ppToken); - return 1; - } - - case PpAtomVersionMacro: - ppToken->ival = parseContext.version; - snprintf(ppToken->name, sizeof(ppToken->name), "%d", ppToken->ival); - UngetToken(PpAtomConstInt, ppToken); - return 1; - - default: - break; - } - - MacroSymbol* macro = macroAtom == 0 ? nullptr : lookupMacroDef(macroAtom); - int token; - int depth = 0; - - // no recursive expansions - if (macro != nullptr && macro->busy) - return 0; - - // not expanding undefined macros - if ((macro == nullptr || macro->undef) && ! expandUndef) - return 0; - - // 0 is the value of an undefined macro - if ((macro == nullptr || macro->undef) && expandUndef) { - pushInput(new tZeroInput(this)); - return -1; - } - - tMacroInput *in = new tMacroInput(this); - - TSourceLoc loc = ppToken->loc; // in case we go to the next line before discovering the error - in->mac = macro; - if (macro->args.size() > 0 || macro->emptyArgs) { - token = scanToken(ppToken); - if (newLineOkay) { - while (token == '\n') - token = scanToken(ppToken); - } - if (token != '(') { - parseContext.ppError(loc, "expected '(' following", "macro expansion", atomStrings.getString(macroAtom)); - UngetToken(token, ppToken); - delete in; - return 0; - } - in->args.resize(in->mac->args.size()); - for (size_t i = 0; i < in->mac->args.size(); i++) - in->args[i] = new TokenStream; - in->expandedArgs.resize(in->mac->args.size()); - for (size_t i = 0; i < in->mac->args.size(); i++) - in->expandedArgs[i] = nullptr; - size_t arg = 0; - bool tokenRecorded = false; - do { - depth = 0; - while (1) { - token = scanToken(ppToken); - if (token == EndOfInput || token == tMarkerInput::marker) { - parseContext.ppError(loc, "End of input in macro", "macro expansion", atomStrings.getString(macroAtom)); - delete in; - return 0; - } - if (token == '\n') { - if (! newLineOkay) { - parseContext.ppError(loc, "End of line in macro substitution:", "macro expansion", atomStrings.getString(macroAtom)); - delete in; - return 0; - } - continue; - } - if (token == '#') { - parseContext.ppError(ppToken->loc, "unexpected '#'", "macro expansion", atomStrings.getString(macroAtom)); - delete in; - return 0; - } - if (in->mac->args.size() == 0 && token != ')') - break; - if (depth == 0 && (token == ',' || token == ')')) - break; - if (token == '(') - depth++; - if (token == ')') - depth--; - in->args[arg]->putToken(token, ppToken); - tokenRecorded = true; - } - if (token == ')') { - if (in->mac->args.size() == 1 && tokenRecorded == 0) - break; - arg++; - break; - } - arg++; - } while (arg < in->mac->args.size()); - - if (arg < in->mac->args.size()) - parseContext.ppError(loc, "Too few args in Macro", "macro expansion", atomStrings.getString(macroAtom)); - else if (token != ')') { - depth=0; - while (token != EndOfInput && (depth > 0 || token != ')')) { - if (token == ')') - depth--; - token = scanToken(ppToken); - if (token == '(') - depth++; - } - - if (token == EndOfInput) { - parseContext.ppError(loc, "End of input in macro", "macro expansion", atomStrings.getString(macroAtom)); - delete in; - return 0; - } - parseContext.ppError(loc, "Too many args in macro", "macro expansion", atomStrings.getString(macroAtom)); - } - - // We need both expanded and non-expanded forms of the argument, for whether or - // not token pasting will be applied later when the argument is consumed next to ##. - for (size_t i = 0; i < in->mac->args.size(); i++) - in->expandedArgs[i] = PrescanMacroArg(*in->args[i], ppToken, newLineOkay); - } - - pushInput(in); - macro->busy = 1; - macro->body.reset(); - - return 1; -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/preprocessor/PpAtom.cpp b/deps/glslang/glslang/MachineIndependent/preprocessor/PpAtom.cpp deleted file mode 100644 index 06c2333e..00000000 --- a/deps/glslang/glslang/MachineIndependent/preprocessor/PpAtom.cpp +++ /dev/null @@ -1,181 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -/****************************************************************************\ -Copyright (c) 2002, NVIDIA Corporation. - -NVIDIA Corporation("NVIDIA") supplies this software to you in -consideration of your agreement to the following terms, and your use, -installation, modification or redistribution of this NVIDIA software -constitutes acceptance of these terms. If you do not agree with these -terms, please do not use, install, modify or redistribute this NVIDIA -software. - -In consideration of your agreement to abide by the following terms, and -subject to these terms, NVIDIA grants you a personal, non-exclusive -license, under NVIDIA's copyrights in this original NVIDIA software (the -"NVIDIA Software"), to use, reproduce, modify and redistribute the -NVIDIA Software, with or without modifications, in source and/or binary -forms; provided that if you redistribute the NVIDIA Software, you must -retain the copyright notice of NVIDIA, this notice and the following -text and disclaimers in all such redistributions of the NVIDIA Software. -Neither the name, trademarks, service marks nor logos of NVIDIA -Corporation may be used to endorse or promote products derived from the -NVIDIA Software without specific prior written permission from NVIDIA. -Except as expressly stated in this notice, no other rights or licenses -express or implied, are granted by NVIDIA herein, including but not -limited to any patent rights that may be infringed by your derivative -works or by other works in which the NVIDIA Software may be -incorporated. No hardware is licensed hereunder. - -THE NVIDIA SOFTWARE IS BEING PROVIDED ON AN "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, -INCLUDING WITHOUT LIMITATION, WARRANTIES OR CONDITIONS OF TITLE, -NON-INFRINGEMENT, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR -ITS USE AND OPERATION EITHER ALONE OR IN COMBINATION WITH OTHER -PRODUCTS. - -IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, -INCIDENTAL, EXEMPLARY, CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, LOST PROFITS; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF -USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) OR ARISING IN ANY WAY -OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE -NVIDIA SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, -TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF -NVIDIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -\****************************************************************************/ - -#ifndef _CRT_SECURE_NO_WARNINGS -#define _CRT_SECURE_NO_WARNINGS -#endif - -#include -#include -#include - -#include "PpContext.h" -#include "PpTokens.h" - -namespace { - -using namespace glslang; - -const struct { - int val; - const char* str; -} tokens[] = { - - { PPAtomAddAssign, "+=" }, - { PPAtomSubAssign, "-=" }, - { PPAtomMulAssign, "*=" }, - { PPAtomDivAssign, "/=" }, - { PPAtomModAssign, "%=" }, - - { PpAtomRight, ">>" }, - { PpAtomLeft, "<<" }, - { PpAtomAnd, "&&" }, - { PpAtomOr, "||" }, - { PpAtomXor, "^^" }, - - { PpAtomRightAssign, ">>=" }, - { PpAtomLeftAssign, "<<=" }, - { PpAtomAndAssign, "&=" }, - { PpAtomOrAssign, "|=" }, - { PpAtomXorAssign, "^=" }, - - { PpAtomEQ, "==" }, - { PpAtomNE, "!=" }, - { PpAtomGE, ">=" }, - { PpAtomLE, "<=" }, - - { PpAtomDecrement, "--" }, - { PpAtomIncrement, "++" }, - - { PpAtomColonColon, "::" }, - - { PpAtomDefine, "define" }, - { PpAtomUndef, "undef" }, - { PpAtomIf, "if" }, - { PpAtomElif, "elif" }, - { PpAtomElse, "else" }, - { PpAtomEndif, "endif" }, - { PpAtomIfdef, "ifdef" }, - { PpAtomIfndef, "ifndef" }, - { PpAtomLine, "line" }, - { PpAtomPragma, "pragma" }, - { PpAtomError, "error" }, - - { PpAtomVersion, "version" }, - { PpAtomCore, "core" }, - { PpAtomCompatibility, "compatibility" }, - { PpAtomEs, "es" }, - { PpAtomExtension, "extension" }, - - { PpAtomLineMacro, "__LINE__" }, - { PpAtomFileMacro, "__FILE__" }, - { PpAtomVersionMacro, "__VERSION__" }, - - { PpAtomInclude, "include" }, -}; - -} // end anonymous namespace - -namespace glslang { - -// -// Initialize the atom table. -// -TStringAtomMap::TStringAtomMap() -{ - badToken.assign(""); - - // Add single character tokens to the atom table: - const char* s = "~!%^&*()-+=|,.<>/?;:[]{}#\\"; - char t[2]; - - t[1] = '\0'; - while (*s) { - t[0] = *s; - addAtomFixed(t, s[0]); - s++; - } - - // Add multiple character scanner tokens : - for (size_t ii = 0; ii < sizeof(tokens)/sizeof(tokens[0]); ii++) - addAtomFixed(tokens[ii].str, tokens[ii].val); - - nextAtom = PpAtomLast; -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp b/deps/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp deleted file mode 100644 index 6a2e05fe..00000000 --- a/deps/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp +++ /dev/null @@ -1,115 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -/****************************************************************************\ -Copyright (c) 2002, NVIDIA Corporation. - -NVIDIA Corporation("NVIDIA") supplies this software to you in -consideration of your agreement to the following terms, and your use, -installation, modification or redistribution of this NVIDIA software -constitutes acceptance of these terms. If you do not agree with these -terms, please do not use, install, modify or redistribute this NVIDIA -software. - -In consideration of your agreement to abide by the following terms, and -subject to these terms, NVIDIA grants you a personal, non-exclusive -license, under NVIDIA's copyrights in this original NVIDIA software (the -"NVIDIA Software"), to use, reproduce, modify and redistribute the -NVIDIA Software, with or without modifications, in source and/or binary -forms; provided that if you redistribute the NVIDIA Software, you must -retain the copyright notice of NVIDIA, this notice and the following -text and disclaimers in all such redistributions of the NVIDIA Software. -Neither the name, trademarks, service marks nor logos of NVIDIA -Corporation may be used to endorse or promote products derived from the -NVIDIA Software without specific prior written permission from NVIDIA. -Except as expressly stated in this notice, no other rights or licenses -express or implied, are granted by NVIDIA herein, including but not -limited to any patent rights that may be infringed by your derivative -works or by other works in which the NVIDIA Software may be -incorporated. No hardware is licensed hereunder. - -THE NVIDIA SOFTWARE IS BEING PROVIDED ON AN "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, -INCLUDING WITHOUT LIMITATION, WARRANTIES OR CONDITIONS OF TITLE, -NON-INFRINGEMENT, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR -ITS USE AND OPERATION EITHER ALONE OR IN COMBINATION WITH OTHER -PRODUCTS. - -IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, -INCIDENTAL, EXEMPLARY, CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, LOST PROFITS; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF -USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) OR ARISING IN ANY WAY -OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE -NVIDIA SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, -TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF -NVIDIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -\****************************************************************************/ - -#include - -#include "PpContext.h" - -namespace glslang { - -TPpContext::TPpContext(TParseContextBase& pc, const std::string& rootFileName, TShader::Includer& inclr) : - preamble(0), strings(0), previous_token('\n'), parseContext(pc), includer(inclr), inComment(false), - rootFileName(rootFileName), - currentSourceFile(rootFileName) -{ - ifdepth = 0; - for (elsetracker = 0; elsetracker < maxIfNesting; elsetracker++) - elseSeen[elsetracker] = false; - elsetracker = 0; -} - -TPpContext::~TPpContext() -{ - delete [] preamble; - - // free up the inputStack - while (! inputStack.empty()) - popInput(); -} - -void TPpContext::setInput(TInputScanner& input, bool versionWillBeError) -{ - assert(inputStack.size() == 0); - - pushInput(new tStringInput(this, input)); - - errorOnVersion = versionWillBeError; - versionSeen = false; -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/preprocessor/PpContext.h b/deps/glslang/glslang/MachineIndependent/preprocessor/PpContext.h deleted file mode 100644 index de48e279..00000000 --- a/deps/glslang/glslang/MachineIndependent/preprocessor/PpContext.h +++ /dev/null @@ -1,622 +0,0 @@ -// -// Copyright (C) 2013 LunarG, Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -/****************************************************************************\ -Copyright (c) 2002, NVIDIA Corporation. - -NVIDIA Corporation("NVIDIA") supplies this software to you in -consideration of your agreement to the following terms, and your use, -installation, modification or redistribution of this NVIDIA software -constitutes acceptance of these terms. If you do not agree with these -terms, please do not use, install, modify or redistribute this NVIDIA -software. - -In consideration of your agreement to abide by the following terms, and -subject to these terms, NVIDIA grants you a personal, non-exclusive -license, under NVIDIA's copyrights in this original NVIDIA software (the -"NVIDIA Software"), to use, reproduce, modify and redistribute the -NVIDIA Software, with or without modifications, in source and/or binary -forms; provided that if you redistribute the NVIDIA Software, you must -retain the copyright notice of NVIDIA, this notice and the following -text and disclaimers in all such redistributions of the NVIDIA Software. -Neither the name, trademarks, service marks nor logos of NVIDIA -Corporation may be used to endorse or promote products derived from the -NVIDIA Software without specific prior written permission from NVIDIA. -Except as expressly stated in this notice, no other rights or licenses -express or implied, are granted by NVIDIA herein, including but not -limited to any patent rights that may be infringed by your derivative -works or by other works in which the NVIDIA Software may be -incorporated. No hardware is licensed hereunder. - -THE NVIDIA SOFTWARE IS BEING PROVIDED ON AN "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, -INCLUDING WITHOUT LIMITATION, WARRANTIES OR CONDITIONS OF TITLE, -NON-INFRINGEMENT, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR -ITS USE AND OPERATION EITHER ALONE OR IN COMBINATION WITH OTHER -PRODUCTS. - -IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, -INCIDENTAL, EXEMPLARY, CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, LOST PROFITS; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF -USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) OR ARISING IN ANY WAY -OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE -NVIDIA SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, -TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF -NVIDIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -\****************************************************************************/ - -#ifndef PPCONTEXT_H -#define PPCONTEXT_H - -#include -#include - -#include "../ParseHelper.h" - -/* windows only pragma */ -#ifdef _MSC_VER - #pragma warning(disable : 4127) -#endif - -namespace glslang { - -class TPpToken { -public: - TPpToken() : space(false), i64val(0) - { - loc.init(); - name[0] = 0; - } - - // This is used for comparing macro definitions, so checks what is relevant for that. - bool operator==(const TPpToken& right) - { - return space == right.space && - ival == right.ival && dval == right.dval && i64val == right.i64val && - strncmp(name, right.name, MaxTokenLength) == 0; - } - bool operator!=(const TPpToken& right) { return ! operator==(right); } - - TSourceLoc loc; - bool space; // true if a space (for white space or a removed comment) should also be recognized, in front of the token returned - - union { - int ival; - double dval; - long long i64val; - }; - - char name[MaxTokenLength + 1]; -}; - -class TStringAtomMap { -// -// Implementation is in PpAtom.cpp -// -// Maintain a bi-directional mapping between relevant preprocessor strings and -// "atoms" which a unique integers (small, contiguous, not hash-like) per string. -// -public: - TStringAtomMap(); - - // Map string -> atom. - // Return 0 if no existing string. - int getAtom(const char* s) const - { - auto it = atomMap.find(s); - return it == atomMap.end() ? 0 : it->second; - } - - // Map a new or existing string -> atom, inventing a new atom if necessary. - int getAddAtom(const char* s) - { - int atom = getAtom(s); - if (atom == 0) { - atom = nextAtom++; - addAtomFixed(s, atom); - } - return atom; - } - - // Map atom -> string. - const char* getString(int atom) const { return stringMap[atom]->c_str(); } - -protected: - TStringAtomMap(TStringAtomMap&); - TStringAtomMap& operator=(TStringAtomMap&); - - TUnorderedMap atomMap; - TVector stringMap; // these point into the TString in atomMap - int nextAtom; - - // Bad source characters can lead to bad atoms, so gracefully handle those by - // pre-filling the table with them (to avoid if tests later). - TString badToken; - - // Add bi-directional mappings: - // - string -> atom - // - atom -> string - void addAtomFixed(const char* s, int atom) - { - auto it = atomMap.insert(std::pair(s, atom)).first; - if (stringMap.size() < (size_t)atom + 1) - stringMap.resize(atom + 100, &badToken); - stringMap[atom] = &it->first; - } -}; - -class TInputScanner; - -// This class is the result of turning a huge pile of C code communicating through globals -// into a class. This was done to allowing instancing to attain thread safety. -// Don't expect too much in terms of OO design. -class TPpContext { -public: - TPpContext(TParseContextBase&, const std::string& rootFileName, TShader::Includer&); - virtual ~TPpContext(); - - void setPreamble(const char* preamble, size_t length); - - int tokenize(TPpToken& ppToken); - int tokenPaste(int token, TPpToken&); - - class tInput { - public: - tInput(TPpContext* p) : done(false), pp(p) { } - virtual ~tInput() { } - - virtual int scan(TPpToken*) = 0; - virtual int getch() = 0; - virtual void ungetch() = 0; - virtual bool peekPasting() { return false; } // true when about to see ## - virtual bool endOfReplacementList() { return false; } // true when at the end of a macro replacement list (RHS of #define) - virtual bool isMacroInput() { return false; } - - // Will be called when we start reading tokens from this instance - virtual void notifyActivated() {} - // Will be called when we do not read tokens from this instance anymore - virtual void notifyDeleted() {} - protected: - bool done; - TPpContext* pp; - }; - - void setInput(TInputScanner& input, bool versionWillBeError); - - void pushInput(tInput* in) - { - inputStack.push_back(in); - in->notifyActivated(); - } - void popInput() - { - inputStack.back()->notifyDeleted(); - delete inputStack.back(); - inputStack.pop_back(); - } - - // - // From PpTokens.cpp - // - - class TokenStream { - public: - TokenStream() : current(0) { } - - void putToken(int token, TPpToken* ppToken); - int getToken(TParseContextBase&, TPpToken*); - bool atEnd() { return current >= data.size(); } - bool peekTokenizedPasting(bool lastTokenPastes); - bool peekUntokenizedPasting(); - void reset() { current = 0; } - - protected: - void putSubtoken(char); - int getSubtoken(); - void ungetSubtoken(); - - TVector data; - size_t current; - }; - - // - // From Pp.cpp - // - - struct MacroSymbol { - MacroSymbol() : emptyArgs(0), busy(0), undef(0) { } - TVector args; - TokenStream body; - unsigned emptyArgs : 1; - unsigned busy : 1; - unsigned undef : 1; - }; - - typedef TMap TSymbolMap; - TSymbolMap macroDefs; // map atoms to macro definitions - MacroSymbol* lookupMacroDef(int atom) - { - auto existingMacroIt = macroDefs.find(atom); - return (existingMacroIt == macroDefs.end()) ? nullptr : &(existingMacroIt->second); - } - void addMacroDef(int atom, MacroSymbol& macroDef) { macroDefs[atom] = macroDef; } - -protected: - TPpContext(TPpContext&); - TPpContext& operator=(TPpContext&); - - TStringAtomMap atomStrings; - char* preamble; // string to parse, all before line 1 of string 0, it is 0 if no preamble - int preambleLength; - char** strings; // official strings of shader, starting a string 0 line 1 - size_t* lengths; - int numStrings; // how many official strings there are - int currentString; // which string we're currently parsing (-1 for preamble) - - // Scanner data: - int previous_token; - TParseContextBase& parseContext; - - // Get the next token from *stack* of input sources, popping input sources - // that are out of tokens, down until an input source is found that has a token. - // Return EndOfInput when there are no more tokens to be found by doing this. - int scanToken(TPpToken* ppToken) - { - int token = EndOfInput; - - while (! inputStack.empty()) { - token = inputStack.back()->scan(ppToken); - if (token != EndOfInput || inputStack.empty()) - break; - popInput(); - } - - return token; - } - int getChar() { return inputStack.back()->getch(); } - void ungetChar() { inputStack.back()->ungetch(); } - bool peekPasting() { return !inputStack.empty() && inputStack.back()->peekPasting(); } - bool endOfReplacementList() { return inputStack.empty() || inputStack.back()->endOfReplacementList(); } - bool isMacroInput() { return inputStack.size() > 0 && inputStack.back()->isMacroInput(); } - - static const int maxIfNesting = 64; - - int ifdepth; // current #if-#else-#endif nesting in the cpp.c file (pre-processor) - bool elseSeen[maxIfNesting]; // Keep a track of whether an else has been seen at a particular depth - int elsetracker; // #if-#else and #endif constructs...Counter. - - class tMacroInput : public tInput { - public: - tMacroInput(TPpContext* pp) : tInput(pp), prepaste(false), postpaste(false) { } - virtual ~tMacroInput() - { - for (size_t i = 0; i < args.size(); ++i) - delete args[i]; - for (size_t i = 0; i < expandedArgs.size(); ++i) - delete expandedArgs[i]; - } - - virtual int scan(TPpToken*) override; - virtual int getch() override { assert(0); return EndOfInput; } - virtual void ungetch() override { assert(0); } - bool peekPasting() override { return prepaste; } - bool endOfReplacementList() override { return mac->body.atEnd(); } - bool isMacroInput() override { return true; } - - MacroSymbol *mac; - TVector args; - TVector expandedArgs; - - protected: - bool prepaste; // true if we are just before ## - bool postpaste; // true if we are right after ## - }; - - class tMarkerInput : public tInput { - public: - tMarkerInput(TPpContext* pp) : tInput(pp) { } - virtual int scan(TPpToken*) override - { - if (done) - return EndOfInput; - done = true; - - return marker; - } - virtual int getch() override { assert(0); return EndOfInput; } - virtual void ungetch() override { assert(0); } - static const int marker = -3; - }; - - class tZeroInput : public tInput { - public: - tZeroInput(TPpContext* pp) : tInput(pp) { } - virtual int scan(TPpToken*) override; - virtual int getch() override { assert(0); return EndOfInput; } - virtual void ungetch() override { assert(0); } - }; - - std::vector inputStack; - bool errorOnVersion; - bool versionSeen; - - // - // from Pp.cpp - // - - // Used to obtain #include content. - TShader::Includer& includer; - - int CPPdefine(TPpToken * ppToken); - int CPPundef(TPpToken * ppToken); - int CPPelse(int matchelse, TPpToken * ppToken); - int extraTokenCheck(int atom, TPpToken* ppToken, int token); - int eval(int token, int precedence, bool shortCircuit, int& res, bool& err, TPpToken * ppToken); - int evalToToken(int token, bool shortCircuit, int& res, bool& err, TPpToken * ppToken); - int CPPif (TPpToken * ppToken); - int CPPifdef(int defined, TPpToken * ppToken); - int CPPinclude(TPpToken * ppToken); - int CPPline(TPpToken * ppToken); - int CPPerror(TPpToken * ppToken); - int CPPpragma(TPpToken * ppToken); - int CPPversion(TPpToken * ppToken); - int CPPextension(TPpToken * ppToken); - int readCPPline(TPpToken * ppToken); - int scanHeaderName(TPpToken* ppToken, char delimit); - TokenStream* PrescanMacroArg(TokenStream&, TPpToken*, bool newLineOkay); - int MacroExpand(TPpToken* ppToken, bool expandUndef, bool newLineOkay); - - // - // From PpTokens.cpp - // - void pushTokenStreamInput(TokenStream&, bool pasting = false); - void UngetToken(int token, TPpToken*); - - class tTokenInput : public tInput { - public: - tTokenInput(TPpContext* pp, TokenStream* t, bool prepasting) : tInput(pp), tokens(t), lastTokenPastes(prepasting) { } - virtual int scan(TPpToken *ppToken) override { return tokens->getToken(pp->parseContext, ppToken); } - virtual int getch() override { assert(0); return EndOfInput; } - virtual void ungetch() override { assert(0); } - virtual bool peekPasting() override { return tokens->peekTokenizedPasting(lastTokenPastes); } - protected: - TokenStream* tokens; - bool lastTokenPastes; // true if the last token in the input is to be pasted, rather than consumed as a token - }; - - class tUngotTokenInput : public tInput { - public: - tUngotTokenInput(TPpContext* pp, int t, TPpToken* p) : tInput(pp), token(t), lval(*p) { } - virtual int scan(TPpToken *) override; - virtual int getch() override { assert(0); return EndOfInput; } - virtual void ungetch() override { assert(0); } - protected: - int token; - TPpToken lval; - }; - - // - // From PpScanner.cpp - // - class tStringInput : public tInput { - public: - tStringInput(TPpContext* pp, TInputScanner& i) : tInput(pp), input(&i) { } - virtual int scan(TPpToken*) override; - - // Scanner used to get source stream characters. - // - Escaped newlines are handled here, invisibly to the caller. - // - All forms of newline are handled, and turned into just a '\n'. - int getch() override - { - int ch = input->get(); - - if (ch == '\\') { - // Move past escaped newlines, as many as sequentially exist - do { - if (input->peek() == '\r' || input->peek() == '\n') { - bool allowed = pp->parseContext.lineContinuationCheck(input->getSourceLoc(), pp->inComment); - if (! allowed && pp->inComment) - return '\\'; - - // escape one newline now - ch = input->get(); - int nextch = input->get(); - if (ch == '\r' && nextch == '\n') - ch = input->get(); - else - ch = nextch; - } else - return '\\'; - } while (ch == '\\'); - } - - // handle any non-escaped newline - if (ch == '\r' || ch == '\n') { - if (ch == '\r' && input->peek() == '\n') - input->get(); - return '\n'; - } - - return ch; - } - - // Scanner used to backup the source stream characters. Newlines are - // handled here, invisibly to the caller, meaning have to undo exactly - // what getch() above does (e.g., don't leave things in the middle of a - // sequence of escaped newlines). - void ungetch() override - { - input->unget(); - - do { - int ch = input->peek(); - if (ch == '\r' || ch == '\n') { - if (ch == '\n') { - // correct for two-character newline - input->unget(); - if (input->peek() != '\r') - input->get(); - } - // now in front of a complete newline, move past an escape character - input->unget(); - if (input->peek() == '\\') - input->unget(); - else { - input->get(); - break; - } - } else - break; - } while (true); - } - - protected: - TInputScanner* input; - }; - - // Holds a reference to included file data, as well as a - // prologue and an epilogue string. This can be scanned using the tInput - // interface and acts as a single source string. - class TokenizableIncludeFile : public tInput { - public: - // Copies prologue and epilogue. The includedFile must remain valid - // until this TokenizableIncludeFile is no longer used. - TokenizableIncludeFile(const TSourceLoc& startLoc, - const std::string& prologue, - TShader::Includer::IncludeResult* includedFile, - const std::string& epilogue, - TPpContext* pp) - : tInput(pp), - prologue_(prologue), - epilogue_(epilogue), - includedFile_(includedFile), - scanner(3, strings, lengths, names, 0, 0, true), - prevScanner(nullptr), - stringInput(pp, scanner) - { - strings[0] = prologue_.data(); - strings[1] = includedFile_->headerData; - strings[2] = epilogue_.data(); - - lengths[0] = prologue_.size(); - lengths[1] = includedFile_->headerLength; - lengths[2] = epilogue_.size(); - - scanner.setLine(startLoc.line); - scanner.setString(startLoc.string); - - scanner.setFile(startLoc.name, 0); - scanner.setFile(startLoc.name, 1); - scanner.setFile(startLoc.name, 2); - } - - // tInput methods: - int scan(TPpToken* t) override { return stringInput.scan(t); } - int getch() override { return stringInput.getch(); } - void ungetch() override { stringInput.ungetch(); } - - void notifyActivated() override - { - prevScanner = pp->parseContext.getScanner(); - pp->parseContext.setScanner(&scanner); - pp->push_include(includedFile_); - } - - void notifyDeleted() override - { - pp->parseContext.setScanner(prevScanner); - pp->pop_include(); - } - - private: - TokenizableIncludeFile& operator=(const TokenizableIncludeFile&); - - // Stores the prologue for this string. - const std::string prologue_; - - // Stores the epilogue for this string. - const std::string epilogue_; - - // Points to the IncludeResult that this TokenizableIncludeFile represents. - TShader::Includer::IncludeResult* includedFile_; - - // Will point to prologue_, includedFile_->headerData and epilogue_ - // This is passed to scanner constructor. - // These do not own the storage and it must remain valid until this - // object has been destroyed. - const char* strings[3]; - // Length of str_, passed to scanner constructor. - size_t lengths[3]; - // String names - const char* names[3]; - // Scans over str_. - TInputScanner scanner; - // The previous effective scanner before the scanner in this instance - // has been activated. - TInputScanner* prevScanner; - // Delegate object implementing the tInput interface. - tStringInput stringInput; - }; - - int ScanFromString(char* s); - void missingEndifCheck(); - int lFloatConst(int len, int ch, TPpToken* ppToken); - int characterLiteral(TPpToken* ppToken); - - void push_include(TShader::Includer::IncludeResult* result) - { - currentSourceFile = result->headerName; - includeStack.push(result); - } - - void pop_include() - { - TShader::Includer::IncludeResult* include = includeStack.top(); - includeStack.pop(); - includer.releaseInclude(include); - if (includeStack.empty()) { - currentSourceFile = rootFileName; - } else { - currentSourceFile = includeStack.top()->headerName; - } - } - - bool inComment; - std::string rootFileName; - std::stack includeStack; - std::string currentSourceFile; -}; - -} // end namespace glslang - -#endif // PPCONTEXT_H diff --git a/deps/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp b/deps/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp deleted file mode 100644 index fa01549d..00000000 --- a/deps/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp +++ /dev/null @@ -1,1079 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -/****************************************************************************\ -Copyright (c) 2002, NVIDIA Corporation. - -NVIDIA Corporation("NVIDIA") supplies this software to you in -consideration of your agreement to the following terms, and your use, -installation, modification or redistribution of this NVIDIA software -constitutes acceptance of these terms. If you do not agree with these -terms, please do not use, install, modify or redistribute this NVIDIA -software. - -In consideration of your agreement to abide by the following terms, and -subject to these terms, NVIDIA grants you a personal, non-exclusive -license, under NVIDIA's copyrights in this original NVIDIA software (the -"NVIDIA Software"), to use, reproduce, modify and redistribute the -NVIDIA Software, with or without modifications, in source and/or binary -forms; provided that if you redistribute the NVIDIA Software, you must -retain the copyright notice of NVIDIA, this notice and the following -text and disclaimers in all such redistributions of the NVIDIA Software. -Neither the name, trademarks, service marks nor logos of NVIDIA -Corporation may be used to endorse or promote products derived from the -NVIDIA Software without specific prior written permission from NVIDIA. -Except as expressly stated in this notice, no other rights or licenses -express or implied, are granted by NVIDIA herein, including but not -limited to any patent rights that may be infringed by your derivative -works or by other works in which the NVIDIA Software may be -incorporated. No hardware is licensed hereunder. - -THE NVIDIA SOFTWARE IS BEING PROVIDED ON AN "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, -INCLUDING WITHOUT LIMITATION, WARRANTIES OR CONDITIONS OF TITLE, -NON-INFRINGEMENT, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR -ITS USE AND OPERATION EITHER ALONE OR IN COMBINATION WITH OTHER -PRODUCTS. - -IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, -INCIDENTAL, EXEMPLARY, CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, LOST PROFITS; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF -USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) OR ARISING IN ANY WAY -OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE -NVIDIA SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, -TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF -NVIDIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -\****************************************************************************/ - -#ifndef _CRT_SECURE_NO_WARNINGS -#define _CRT_SECURE_NO_WARNINGS -#endif - -#include -#include - -#include "PpContext.h" -#include "PpTokens.h" -#include "../Scan.h" - -namespace glslang { - -/////////////////////////////////////////////////////////////////////////////////////////////// -/////////////////////////////////// Floating point constants: ///////////////////////////////// -/////////////////////////////////////////////////////////////////////////////////////////////// - -/* -* lFloatConst() - Scan a single- or double-precision floating point constant. Assumes that the scanner -* has seen at least one digit, followed by either a decimal '.' or the -* letter 'e', or a precision ending (e.g., F or LF). -*/ - -int TPpContext::lFloatConst(int len, int ch, TPpToken* ppToken) -{ - bool HasDecimalOrExponent = false; - int isDouble = 0; - bool generateFloat16 = false; - bool acceptFloat16 = parseContext.intermediate.getSource() == EShSourceHlsl; - bool isFloat16 = false; - bool requireHF = false; -#ifdef AMD_EXTENSIONS - if (parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_half_float)) { - acceptFloat16 = true; - generateFloat16 = true; - requireHF = true; - } -#endif - - const auto saveName = [&](int ch) { - if (len <= MaxTokenLength) - ppToken->name[len++] = static_cast(ch); - }; - - // Decimal: - - if (ch == '.') { - HasDecimalOrExponent = true; - saveName(ch); - ch = getChar(); - - // 1.#INF or -1.#INF - if (ch == '#') { - if ((len < 2) || - (len == 2 && ppToken->name[0] != '1') || - (len == 3 && ppToken->name[1] != '1' && !(ppToken->name[0] == '-' || ppToken->name[0] == '+')) || - (len > 3)) - parseContext.ppError(ppToken->loc, "unexpected use of", "#", ""); - else { - // we have 1.# or -1.# or +1.#, check for 'INF' - if ((ch = getChar()) != 'I' || - (ch = getChar()) != 'N' || - (ch = getChar()) != 'F') - parseContext.ppError(ppToken->loc, "expected 'INF'", "#", ""); - else { - // we have [+-].#INF, and we are targeting IEEE 754, so wrap it up: - saveName('I'); - saveName('N'); - saveName('F'); - ppToken->name[len] = '\0'; - if (ppToken->name[0] == '-') - ppToken->i64val = 0xfff0000000000000; // -Infinity - else - ppToken->i64val = 0x7ff0000000000000; // +Infinity - return PpAtomConstFloat; - } - } - } - - while (ch >= '0' && ch <= '9') { - saveName(ch); - ch = getChar(); - } - } - - // Exponent: - - if (ch == 'e' || ch == 'E') { - HasDecimalOrExponent = true; - saveName(ch); - ch = getChar(); - if (ch == '+' || ch == '-') { - saveName(ch); - ch = getChar(); - } - if (ch >= '0' && ch <= '9') { - while (ch >= '0' && ch <= '9') { - saveName(ch); - ch = getChar(); - } - } else { - parseContext.ppError(ppToken->loc, "bad character in float exponent", "", ""); - } - } - - // Suffix: - - if (ch == 'l' || ch == 'L') { - parseContext.doubleCheck(ppToken->loc, "double floating-point suffix"); - if (! HasDecimalOrExponent) - parseContext.ppError(ppToken->loc, "float literal needs a decimal point or exponent", "", ""); - int ch2 = getChar(); - if (ch2 != 'f' && ch2 != 'F') { - ungetChar(); - ungetChar(); - } else { - saveName(ch); - saveName(ch2); - isDouble = 1; - } - } else if (acceptFloat16 && (ch == 'h' || ch == 'H')) { -#ifdef AMD_EXTENSIONS - if (generateFloat16) - parseContext.float16Check(ppToken->loc, "half floating-point suffix"); -#endif - if (!HasDecimalOrExponent) - parseContext.ppError(ppToken->loc, "float literal needs a decimal point or exponent", "", ""); - if (requireHF) { - int ch2 = getChar(); - if (ch2 != 'f' && ch2 != 'F') { - ungetChar(); - ungetChar(); - } else { - saveName(ch); - saveName(ch2); - isFloat16 = generateFloat16; - } - } else { - saveName(ch); - isFloat16 = generateFloat16; - } - } else if (ch == 'f' || ch == 'F') { - parseContext.profileRequires(ppToken->loc, EEsProfile, 300, nullptr, "floating-point suffix"); - if (! parseContext.relaxedErrors()) - parseContext.profileRequires(ppToken->loc, ~EEsProfile, 120, nullptr, "floating-point suffix"); - if (! HasDecimalOrExponent) - parseContext.ppError(ppToken->loc, "float literal needs a decimal point or exponent", "", ""); - saveName(ch); - } else - ungetChar(); - - // Patch up the name, length, etc. - - if (len > MaxTokenLength) { - len = MaxTokenLength; - parseContext.ppError(ppToken->loc, "float literal too long", "", ""); - } - ppToken->name[len] = '\0'; - - // Get the numerical value - ppToken->dval = strtod(ppToken->name, nullptr); - - // Return the right token type - if (isDouble) - return PpAtomConstDouble; - else if (isFloat16) - return PpAtomConstFloat16; - else - return PpAtomConstFloat; -} - -// Recognize a character literal. -// -// The first ' has already been accepted, read the rest, through the closing '. -// -// Always returns PpAtomConstInt. -// -int TPpContext::characterLiteral(TPpToken* ppToken) -{ - ppToken->name[0] = 0; - ppToken->ival = 0; - - if (parseContext.intermediate.getSource() != EShSourceHlsl) { - // illegal, except in macro definition, for which case we report the character - return '\''; - } - - int ch = getChar(); - switch (ch) { - case '\'': - // As empty sequence: '' - parseContext.ppError(ppToken->loc, "unexpected", "\'", ""); - return PpAtomConstInt; - case '\\': - // As escape sequence: '\XXX' - switch (ch = getChar()) { - case 'a': - ppToken->ival = 7; - break; - case 'b': - ppToken->ival = 8; - break; - case 't': - ppToken->ival = 9; - break; - case 'n': - ppToken->ival = 10; - break; - case 'v': - ppToken->ival = 11; - break; - case 'f': - ppToken->ival = 12; - break; - case 'r': - ppToken->ival = 13; - break; - case 'x': - case '0': - parseContext.ppError(ppToken->loc, "octal and hex sequences not supported", "\\", ""); - break; - default: - // This catches '\'', '\"', '\?', etc. - // Also, things like '\C' mean the same thing as 'C' - // (after the above cases are filtered out). - ppToken->ival = ch; - break; - } - break; - default: - ppToken->ival = ch; - break; - } - ppToken->name[0] = (char)ppToken->ival; - ppToken->name[1] = '\0'; - ch = getChar(); - if (ch != '\'') { - parseContext.ppError(ppToken->loc, "expected", "\'", ""); - // Look ahead for a closing ' - do { - ch = getChar(); - } while (ch != '\'' && ch != EndOfInput && ch != '\n'); - } - - return PpAtomConstInt; -} - -// -// Scanner used to tokenize source stream. -// -int TPpContext::tStringInput::scan(TPpToken* ppToken) -{ - int AlreadyComplained = 0; - int len = 0; - int ch = 0; - int ii = 0; - unsigned long long ival = 0; - bool enableInt64 = pp->parseContext.version >= 450 && pp->parseContext.extensionTurnedOn(E_GL_ARB_gpu_shader_int64); -#ifdef AMD_EXTENSIONS - bool enableInt16 = pp->parseContext.version >= 450 && pp->parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_int16); -#endif - bool acceptHalf = pp->parseContext.intermediate.getSource() == EShSourceHlsl; -#ifdef AMD_EXTENSIONS - if (pp->parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_half_float)) - acceptHalf = true; -#endif - - const auto floatingPointChar = [&](int ch) { return ch == '.' || ch == 'e' || ch == 'E' || - ch == 'f' || ch == 'F' || - (acceptHalf && (ch == 'h' || ch == 'H')); }; - - ppToken->ival = 0; - ppToken->i64val = 0; - ppToken->space = false; - ch = getch(); - for (;;) { - while (ch == ' ' || ch == '\t') { - ppToken->space = true; - ch = getch(); - } - - ppToken->loc = pp->parseContext.getCurrentLoc(); - len = 0; - switch (ch) { - default: - // Single character token, including EndOfInput, '#' and '\' (escaped newlines are handled at a lower level, so this is just a '\' token) - if (ch > PpAtomMaxSingle) - ch = PpAtomBadToken; - return ch; - - case 'A': case 'B': case 'C': case 'D': case 'E': - case 'F': case 'G': case 'H': case 'I': case 'J': - case 'K': case 'L': case 'M': case 'N': case 'O': - case 'P': case 'Q': case 'R': case 'S': case 'T': - case 'U': case 'V': case 'W': case 'X': case 'Y': - case 'Z': case '_': - case 'a': case 'b': case 'c': case 'd': case 'e': - case 'f': case 'g': case 'h': case 'i': case 'j': - case 'k': case 'l': case 'm': case 'n': case 'o': - case 'p': case 'q': case 'r': case 's': case 't': - case 'u': case 'v': case 'w': case 'x': case 'y': - case 'z': - do { - if (len < MaxTokenLength) { - ppToken->name[len++] = (char)ch; - ch = getch(); - } else { - if (! AlreadyComplained) { - pp->parseContext.ppError(ppToken->loc, "name too long", "", ""); - AlreadyComplained = 1; - } - ch = getch(); - } - } while ((ch >= 'a' && ch <= 'z') || - (ch >= 'A' && ch <= 'Z') || - (ch >= '0' && ch <= '9') || - ch == '_'); - - // line continuation with no token before or after makes len == 0, and need to start over skipping white space, etc. - if (len == 0) - continue; - - ppToken->name[len] = '\0'; - ungetch(); - return PpAtomIdentifier; - case '0': - ppToken->name[len++] = (char)ch; - ch = getch(); - if (ch == 'x' || ch == 'X') { - // must be hexadecimal - - bool isUnsigned = false; - bool isInt64 = false; -#ifdef AMD_EXTENSIONS - bool isInt16 = false; -#endif - ppToken->name[len++] = (char)ch; - ch = getch(); - if ((ch >= '0' && ch <= '9') || - (ch >= 'A' && ch <= 'F') || - (ch >= 'a' && ch <= 'f')) { - - ival = 0; - do { - if (ival <= 0x0fffffffu || (enableInt64 && ival <= 0x0fffffffffffffffull)) { - ppToken->name[len++] = (char)ch; - if (ch >= '0' && ch <= '9') { - ii = ch - '0'; - } else if (ch >= 'A' && ch <= 'F') { - ii = ch - 'A' + 10; - } else if (ch >= 'a' && ch <= 'f') { - ii = ch - 'a' + 10; - } else - pp->parseContext.ppError(ppToken->loc, "bad digit in hexadecimal literal", "", ""); - ival = (ival << 4) | ii; - } else { - if (! AlreadyComplained) { - pp->parseContext.ppError(ppToken->loc, "hexadecimal literal too big", "", ""); - AlreadyComplained = 1; - } - ival = 0xffffffffffffffffull; - } - ch = getch(); - } while ((ch >= '0' && ch <= '9') || - (ch >= 'A' && ch <= 'F') || - (ch >= 'a' && ch <= 'f')); - } else { - pp->parseContext.ppError(ppToken->loc, "bad digit in hexadecimal literal", "", ""); - } - if (ch == 'u' || ch == 'U') { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - isUnsigned = true; - - if (enableInt64) { - int nextCh = getch(); - if ((ch == 'u' && nextCh == 'l') || (ch == 'U' && nextCh == 'L')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)nextCh; - isInt64 = true; - } else - ungetch(); - } - -#ifdef AMD_EXTENSIONS - if (enableInt16) { - int nextCh = getch(); - if ((ch == 'u' && nextCh == 's') || (ch == 'U' && nextCh == 'S')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)nextCh; - isInt16 = true; - } else - ungetch(); - } -#endif - } else if (enableInt64 && (ch == 'l' || ch == 'L')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - isInt64 = true; -#ifdef AMD_EXTENSIONS - } else if (enableInt16 && (ch == 's' || ch == 'S')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - isInt16 = true; -#endif - } else - ungetch(); - ppToken->name[len] = '\0'; - - if (isInt64) { - ppToken->i64val = ival; - return isUnsigned ? PpAtomConstUint64 : PpAtomConstInt64; -#ifdef AMD_EXTENSIONS - } else if (isInt16) { - ppToken->ival = (int)ival; - return isUnsigned ? PpAtomConstUint16 : PpAtomConstInt16; -#endif - } else { - ppToken->ival = (int)ival; - return isUnsigned ? PpAtomConstUint : PpAtomConstInt; - } - } else { - // could be octal integer or floating point, speculative pursue octal until it must be floating point - - bool isUnsigned = false; - bool isInt64 = false; -#ifdef AMD_EXTENSIONS - bool isInt16 = false; -#endif - bool octalOverflow = false; - bool nonOctal = false; - ival = 0; - - // see how much octal-like stuff we can read - while (ch >= '0' && ch <= '7') { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - else if (! AlreadyComplained) { - pp->parseContext.ppError(ppToken->loc, "numeric literal too long", "", ""); - AlreadyComplained = 1; - } - if (ival <= 0x1fffffffu || (enableInt64 && ival <= 0x1fffffffffffffffull)) { - ii = ch - '0'; - ival = (ival << 3) | ii; - } else - octalOverflow = true; - ch = getch(); - } - - // could be part of a float... - if (ch == '8' || ch == '9') { - nonOctal = true; - do { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - else if (! AlreadyComplained) { - pp->parseContext.ppError(ppToken->loc, "numeric literal too long", "", ""); - AlreadyComplained = 1; - } - ch = getch(); - } while (ch >= '0' && ch <= '9'); - } - if (floatingPointChar(ch)) - return pp->lFloatConst(len, ch, ppToken); - - // wasn't a float, so must be octal... - if (nonOctal) - pp->parseContext.ppError(ppToken->loc, "octal literal digit too large", "", ""); - - if (ch == 'u' || ch == 'U') { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - isUnsigned = true; - - if (enableInt64) { - int nextCh = getch(); - if ((ch == 'u' && nextCh == 'l') || (ch == 'U' && nextCh == 'L')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)nextCh; - isInt64 = true; - } else - ungetch(); - } - -#ifdef AMD_EXTENSIONS - if (enableInt16) { - int nextCh = getch(); - if ((ch == 'u' && nextCh == 's') || (ch == 'U' && nextCh == 'S')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)nextCh; - isInt16 = true; - } else - ungetch(); - } -#endif - } else if (enableInt64 && (ch == 'l' || ch == 'L')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - isInt64 = true; -#ifdef AMD_EXTENSIONS - } else if (enableInt16 && (ch == 's' || ch == 'S')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - isInt16 = true; -#endif - } else - ungetch(); - ppToken->name[len] = '\0'; - - if (octalOverflow) - pp->parseContext.ppError(ppToken->loc, "octal literal too big", "", ""); - - if (isInt64) { - ppToken->i64val = ival; - return isUnsigned ? PpAtomConstUint64 : PpAtomConstInt64; -#ifdef AMD_EXTENSIONS - } else if (isInt16) { - ppToken->ival = (int)ival; - return isUnsigned ? PpAtomConstUint16 : PpAtomConstInt16; -#endif - } else { - ppToken->ival = (int)ival; - return isUnsigned ? PpAtomConstUint : PpAtomConstInt; - } - } - break; - case '1': case '2': case '3': case '4': - case '5': case '6': case '7': case '8': case '9': - // can't be hexadecimal or octal, is either decimal or floating point - - do { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - else if (! AlreadyComplained) { - pp->parseContext.ppError(ppToken->loc, "numeric literal too long", "", ""); - AlreadyComplained = 1; - } - ch = getch(); - } while (ch >= '0' && ch <= '9'); - if (floatingPointChar(ch)) - return pp->lFloatConst(len, ch, ppToken); - else { - // Finish handling signed and unsigned integers - int numericLen = len; - bool isUnsigned = false; - bool isInt64 = false; -#ifdef AMD_EXTENSIONS - bool isInt16 = false; -#endif - if (ch == 'u' || ch == 'U') { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - isUnsigned = true; - - if (enableInt64) { - int nextCh = getch(); - if ((ch == 'u' && nextCh == 'l') || (ch == 'U' && nextCh == 'L')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)nextCh; - isInt64 = true; - } else - ungetch(); - } - -#ifdef AMD_EXTENSIONS - if (enableInt16) { - int nextCh = getch(); - if ((ch == 'u' && nextCh == 's') || (ch == 'U' && nextCh == 'S')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)nextCh; - isInt16 = true; - } else - ungetch(); - } -#endif - } else if (enableInt64 && (ch == 'l' || ch == 'L')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - isInt64 = true; -#ifdef AMD_EXTENSIONS - } else if (enableInt16 && (ch == 's' || ch == 'S')) { - if (len < MaxTokenLength) - ppToken->name[len++] = (char)ch; - isInt16 = true; -#endif - } else - ungetch(); - - ppToken->name[len] = '\0'; - ival = 0; - const unsigned oneTenthMaxInt = 0xFFFFFFFFu / 10; - const unsigned remainderMaxInt = 0xFFFFFFFFu - 10 * oneTenthMaxInt; - const unsigned long long oneTenthMaxInt64 = 0xFFFFFFFFFFFFFFFFull / 10; - const unsigned long long remainderMaxInt64 = 0xFFFFFFFFFFFFFFFFull - 10 * oneTenthMaxInt64; -#ifdef AMD_EXTENSIONS - const unsigned short oneTenthMaxInt16 = 0xFFFFu / 10; - const unsigned short remainderMaxInt16 = 0xFFFFu - 10 * oneTenthMaxInt16; -#endif - for (int i = 0; i < numericLen; i++) { - ch = ppToken->name[i] - '0'; - bool overflow = false; - if (isInt64) - overflow = (ival > oneTenthMaxInt64 || (ival == oneTenthMaxInt64 && (unsigned long long)ch > remainderMaxInt64)); -#ifdef AMD_EXTENSIONS - else if (isInt16) - overflow = (ival > oneTenthMaxInt16 || (ival == oneTenthMaxInt16 && (unsigned short)ch > remainderMaxInt16)); -#endif - else - overflow = (ival > oneTenthMaxInt || (ival == oneTenthMaxInt && (unsigned)ch > remainderMaxInt)); - if (overflow) { - pp->parseContext.ppError(ppToken->loc, "numeric literal too big", "", ""); - ival = 0xFFFFFFFFFFFFFFFFull; - break; - } else - ival = ival * 10 + ch; - } - - if (isInt64) { - ppToken->i64val = ival; - return isUnsigned ? PpAtomConstUint64 : PpAtomConstInt64; -#ifdef AMD_EXTENSIONS - } else if (isInt16) { - ppToken->ival = (int)ival; - return isUnsigned ? PpAtomConstUint16 : PpAtomConstInt16; -#endif - } else { - ppToken->ival = (int)ival; - return isUnsigned ? PpAtomConstUint : PpAtomConstInt; - } - } - break; - case '-': - ch = getch(); - if (ch == '-') { - return PpAtomDecrement; - } else if (ch == '=') { - return PPAtomSubAssign; - } else { - ungetch(); - return '-'; - } - case '+': - ch = getch(); - if (ch == '+') { - return PpAtomIncrement; - } else if (ch == '=') { - return PPAtomAddAssign; - } else { - ungetch(); - return '+'; - } - case '*': - ch = getch(); - if (ch == '=') { - return PPAtomMulAssign; - } else { - ungetch(); - return '*'; - } - case '%': - ch = getch(); - if (ch == '=') { - return PPAtomModAssign; - } else { - ungetch(); - return '%'; - } - case '^': - ch = getch(); - if (ch == '^') { - return PpAtomXor; - } else { - if (ch == '=') - return PpAtomXorAssign; - else{ - ungetch(); - return '^'; - } - } - - case '=': - ch = getch(); - if (ch == '=') { - return PpAtomEQ; - } else { - ungetch(); - return '='; - } - case '!': - ch = getch(); - if (ch == '=') { - return PpAtomNE; - } else { - ungetch(); - return '!'; - } - case '|': - ch = getch(); - if (ch == '|') { - return PpAtomOr; - } else if (ch == '=') { - return PpAtomOrAssign; - } else { - ungetch(); - return '|'; - } - case '&': - ch = getch(); - if (ch == '&') { - return PpAtomAnd; - } else if (ch == '=') { - return PpAtomAndAssign; - } else { - ungetch(); - return '&'; - } - case '<': - ch = getch(); - if (ch == '<') { - ch = getch(); - if (ch == '=') - return PpAtomLeftAssign; - else { - ungetch(); - return PpAtomLeft; - } - } else if (ch == '=') { - return PpAtomLE; - } else { - ungetch(); - return '<'; - } - case '>': - ch = getch(); - if (ch == '>') { - ch = getch(); - if (ch == '=') - return PpAtomRightAssign; - else { - ungetch(); - return PpAtomRight; - } - } else if (ch == '=') { - return PpAtomGE; - } else { - ungetch(); - return '>'; - } - case '.': - ch = getch(); - if (ch >= '0' && ch <= '9') { - ungetch(); - return pp->lFloatConst(0, '.', ppToken); - } else { - ungetch(); - return '.'; - } - case '/': - ch = getch(); - if (ch == '/') { - pp->inComment = true; - do { - ch = getch(); - } while (ch != '\n' && ch != EndOfInput); - ppToken->space = true; - pp->inComment = false; - - return ch; - } else if (ch == '*') { - ch = getch(); - do { - while (ch != '*') { - if (ch == EndOfInput) { - pp->parseContext.ppError(ppToken->loc, "End of input in comment", "comment", ""); - return ch; - } - ch = getch(); - } - ch = getch(); - if (ch == EndOfInput) { - pp->parseContext.ppError(ppToken->loc, "End of input in comment", "comment", ""); - return ch; - } - } while (ch != '/'); - ppToken->space = true; - // loop again to get the next token... - break; - } else if (ch == '=') { - return PPAtomDivAssign; - } else { - ungetch(); - return '/'; - } - break; - case '\'': - return pp->characterLiteral(ppToken); - case '"': - // TODO: If this gets enhanced to handle escape sequences, or - // anything that is different than what #include needs, then - // #include needs to use scanHeaderName() for this. - ch = getch(); - while (ch != '"' && ch != '\n' && ch != EndOfInput) { - if (len < MaxTokenLength) { - ppToken->name[len] = (char)ch; - len++; - ch = getch(); - } else - break; - }; - ppToken->name[len] = '\0'; - if (ch != '"') { - ungetch(); - pp->parseContext.ppError(ppToken->loc, "End of line in string", "string", ""); - } - return PpAtomConstString; - case ':': - ch = getch(); - if (ch == ':') - return PpAtomColonColon; - ungetch(); - return ':'; - } - - ch = getch(); - } -} - -// -// The main functional entry point into the preprocessor, which will -// scan the source strings to figure out and return the next processing token. -// -// Return the token, or EndOfInput when no more tokens. -// -int TPpContext::tokenize(TPpToken& ppToken) -{ - for(;;) { - int token = scanToken(&ppToken); - - // Handle token-pasting logic - token = tokenPaste(token, ppToken); - - if (token == EndOfInput) { - missingEndifCheck(); - return EndOfInput; - } - if (token == '#') { - if (previous_token == '\n') { - token = readCPPline(&ppToken); - if (token == EndOfInput) { - missingEndifCheck(); - return EndOfInput; - } - continue; - } else { - parseContext.ppError(ppToken.loc, "preprocessor directive cannot be preceded by another token", "#", ""); - return EndOfInput; - } - } - previous_token = token; - - if (token == '\n') - continue; - - // expand macros - if (token == PpAtomIdentifier && MacroExpand(&ppToken, false, true) != 0) - continue; - - switch (token) { - case PpAtomIdentifier: - case PpAtomConstInt: - case PpAtomConstUint: - case PpAtomConstFloat: - case PpAtomConstInt64: - case PpAtomConstUint64: -#ifdef AMD_EXTENSIONS - case PpAtomConstInt16: - case PpAtomConstUint16: -#endif - case PpAtomConstDouble: -#ifdef AMD_EXTENSIONS - case PpAtomConstFloat16: -#endif - if (ppToken.name[0] == '\0') - continue; - break; - case PpAtomConstString: - if (parseContext.intermediate.getSource() != EShSourceHlsl) { - // HLSL allows string literals. - parseContext.ppError(ppToken.loc, "string literals not supported", "\"\"", ""); - continue; - } - break; - case '\'': - parseContext.ppError(ppToken.loc, "character literals not supported", "\'", ""); - continue; - default: - strcpy(ppToken.name, atomStrings.getString(token)); - break; - } - - return token; - } -} - -// -// Do all token-pasting related combining of two pasted tokens when getting a -// stream of tokens from a replacement list. Degenerates to no processing if a -// replacement list is not the source of the token stream. -// -int TPpContext::tokenPaste(int token, TPpToken& ppToken) -{ - // starting with ## is illegal, skip to next token - if (token == PpAtomPaste) { - parseContext.ppError(ppToken.loc, "unexpected location", "##", ""); - return scanToken(&ppToken); - } - - int resultToken = token; // "foo" pasted with "35" is an identifier, not a number - - // ## can be chained, process all in the chain at once - while (peekPasting()) { - TPpToken pastedPpToken; - - // next token has to be ## - token = scanToken(&pastedPpToken); - assert(token == PpAtomPaste); - - // This covers end of macro expansion - if (endOfReplacementList()) { - parseContext.ppError(ppToken.loc, "unexpected location; end of replacement list", "##", ""); - break; - } - - // get the token after the ## - token = scanToken(&pastedPpToken); - - // This covers end of argument expansion - if (token == tMarkerInput::marker) { - parseContext.ppError(ppToken.loc, "unexpected location; end of argument", "##", ""); - break; - } - - // get the token text - switch (resultToken) { - case PpAtomIdentifier: - // already have the correct text in token.names - break; - case '=': - case '!': - case '-': - case '~': - case '+': - case '*': - case '/': - case '%': - case '<': - case '>': - case '|': - case '^': - case '&': - case PpAtomRight: - case PpAtomLeft: - case PpAtomAnd: - case PpAtomOr: - case PpAtomXor: - strcpy(ppToken.name, atomStrings.getString(resultToken)); - strcpy(pastedPpToken.name, atomStrings.getString(token)); - break; - default: - parseContext.ppError(ppToken.loc, "not supported for these tokens", "##", ""); - return resultToken; - } - - // combine the tokens - if (strlen(ppToken.name) + strlen(pastedPpToken.name) > MaxTokenLength) { - parseContext.ppError(ppToken.loc, "combined tokens are too long", "##", ""); - return resultToken; - } - strncat(ppToken.name, pastedPpToken.name, MaxTokenLength - strlen(ppToken.name)); - - // correct the kind of token we are making, if needed (identifiers stay identifiers) - if (resultToken != PpAtomIdentifier) { - int newToken = atomStrings.getAtom(ppToken.name); - if (newToken > 0) - resultToken = newToken; - else - parseContext.ppError(ppToken.loc, "combined token is invalid", "##", ""); - } - } - - return resultToken; -} - -// Checks if we've seen balanced #if...#endif -void TPpContext::missingEndifCheck() -{ - if (ifdepth > 0) - parseContext.ppError(parseContext.getCurrentLoc(), "missing #endif", "", ""); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/preprocessor/PpTokens.cpp b/deps/glslang/glslang/MachineIndependent/preprocessor/PpTokens.cpp deleted file mode 100644 index bc145e25..00000000 --- a/deps/glslang/glslang/MachineIndependent/preprocessor/PpTokens.cpp +++ /dev/null @@ -1,365 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013 LunarG, Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -/****************************************************************************\ -Copyright (c) 2002, NVIDIA Corporation. - -NVIDIA Corporation("NVIDIA") supplies this software to you in -consideration of your agreement to the following terms, and your use, -installation, modification or redistribution of this NVIDIA software -constitutes acceptance of these terms. If you do not agree with these -terms, please do not use, install, modify or redistribute this NVIDIA -software. - -In consideration of your agreement to abide by the following terms, and -subject to these terms, NVIDIA grants you a personal, non-exclusive -license, under NVIDIA's copyrights in this original NVIDIA software (the -"NVIDIA Software"), to use, reproduce, modify and redistribute the -NVIDIA Software, with or without modifications, in source and/or binary -forms; provided that if you redistribute the NVIDIA Software, you must -retain the copyright notice of NVIDIA, this notice and the following -text and disclaimers in all such redistributions of the NVIDIA Software. -Neither the name, trademarks, service marks nor logos of NVIDIA -Corporation may be used to endorse or promote products derived from the -NVIDIA Software without specific prior written permission from NVIDIA. -Except as expressly stated in this notice, no other rights or licenses -express or implied, are granted by NVIDIA herein, including but not -limited to any patent rights that may be infringed by your derivative -works or by other works in which the NVIDIA Software may be -incorporated. No hardware is licensed hereunder. - -THE NVIDIA SOFTWARE IS BEING PROVIDED ON AN "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, -INCLUDING WITHOUT LIMITATION, WARRANTIES OR CONDITIONS OF TITLE, -NON-INFRINGEMENT, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR -ITS USE AND OPERATION EITHER ALONE OR IN COMBINATION WITH OTHER -PRODUCTS. - -IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, -INCIDENTAL, EXEMPLARY, CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, LOST PROFITS; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF -USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) OR ARISING IN ANY WAY -OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE -NVIDIA SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, -TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF -NVIDIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -\****************************************************************************/ - -// -// For recording and playing back the stream of tokens in a macro definition. -// - -#ifndef _CRT_SECURE_NO_WARNINGS -#define _CRT_SECURE_NO_WARNINGS -#endif -#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) -#define snprintf sprintf_s -#endif - -#include -#include -#include -#include - -#include "PpContext.h" -#include "PpTokens.h" - -namespace glslang { - -// push onto back of stream -void TPpContext::TokenStream::putSubtoken(char subtoken) -{ - data.push_back(static_cast(subtoken)); -} - -// get the next token in stream -int TPpContext::TokenStream::getSubtoken() -{ - if (current < data.size()) - return data[current++]; - else - return EndOfInput; -} - -// back up one position in the stream -void TPpContext::TokenStream::ungetSubtoken() -{ - if (current > 0) - --current; -} - -// Add a complete token (including backing string) to the end of a list -// for later playback. -void TPpContext::TokenStream::putToken(int token, TPpToken* ppToken) -{ - const char* s; - char* str = NULL; - - assert((token & ~0xff) == 0); - putSubtoken(static_cast(token)); - - switch (token) { - case PpAtomIdentifier: - case PpAtomConstString: - s = ppToken->name; - while (*s) - putSubtoken(*s++); - putSubtoken(0); - break; - case PpAtomConstInt: - case PpAtomConstUint: - case PpAtomConstInt64: - case PpAtomConstUint64: -#ifdef AMD_EXTENSIONS - case PpAtomConstInt16: - case PpAtomConstUint16: -#endif - case PpAtomConstFloat: - case PpAtomConstDouble: -#ifdef AMD_EXTENSIONS - case PpAtomConstFloat16: -#endif - str = ppToken->name; - while (*str) { - putSubtoken(*str); - str++; - } - putSubtoken(0); - break; - default: - break; - } -} - -// Read the next token from a token stream. -// (Not the source stream, but a stream used to hold a tokenized macro). -int TPpContext::TokenStream::getToken(TParseContextBase& parseContext, TPpToken *ppToken) -{ - int len; - int ch; - - int subtoken = getSubtoken(); - ppToken->loc = parseContext.getCurrentLoc(); - switch (subtoken) { - case '#': - // Check for ##, unless the current # is the last character - if (current < data.size()) { - if (getSubtoken() == '#') { - parseContext.requireProfile(ppToken->loc, ~EEsProfile, "token pasting (##)"); - parseContext.profileRequires(ppToken->loc, ~EEsProfile, 130, 0, "token pasting (##)"); - subtoken = PpAtomPaste; - } else - ungetSubtoken(); - } - break; - case PpAtomConstString: - case PpAtomIdentifier: - case PpAtomConstFloat: - case PpAtomConstDouble: -#ifdef AMD_EXTENSIONS - case PpAtomConstFloat16: -#endif - case PpAtomConstInt: - case PpAtomConstUint: - case PpAtomConstInt64: - case PpAtomConstUint64: -#ifdef AMD_EXTENSIONS - case PpAtomConstInt16: - case PpAtomConstUint16: -#endif - len = 0; - ch = getSubtoken(); - while (ch != 0 && ch != EndOfInput) { - if (len < MaxTokenLength) { - ppToken->name[len] = (char)ch; - len++; - ch = getSubtoken(); - } else { - parseContext.error(ppToken->loc, "token too long", "", ""); - break; - } - } - ppToken->name[len] = 0; - - switch (subtoken) { - case PpAtomIdentifier: - break; - case PpAtomConstString: - break; - case PpAtomConstFloat: - case PpAtomConstDouble: -#ifdef AMD_EXTENSIONS - case PpAtomConstFloat16: -#endif - ppToken->dval = atof(ppToken->name); - break; - case PpAtomConstInt: -#ifdef AMD_EXTENSIONS - case PpAtomConstInt16: -#endif - if (len > 0 && ppToken->name[0] == '0') { - if (len > 1 && (ppToken->name[1] == 'x' || ppToken->name[1] == 'X')) - ppToken->ival = (int)strtol(ppToken->name, 0, 16); - else - ppToken->ival = (int)strtol(ppToken->name, 0, 8); - } else - ppToken->ival = atoi(ppToken->name); - break; - case PpAtomConstUint: -#ifdef AMD_EXTENSIONS - case PpAtomConstUint16: -#endif - if (len > 0 && ppToken->name[0] == '0') { - if (len > 1 && (ppToken->name[1] == 'x' || ppToken->name[1] == 'X')) - ppToken->ival = (int)strtoul(ppToken->name, 0, 16); - else - ppToken->ival = (int)strtoul(ppToken->name, 0, 8); - } else - ppToken->ival = (int)strtoul(ppToken->name, 0, 10); - break; - case PpAtomConstInt64: - if (len > 0 && ppToken->name[0] == '0') { - if (len > 1 && (ppToken->name[1] == 'x' || ppToken->name[1] == 'X')) - ppToken->i64val = strtoll(ppToken->name, nullptr, 16); - else - ppToken->i64val = strtoll(ppToken->name, nullptr, 8); - } else - ppToken->i64val = atoll(ppToken->name); - break; - case PpAtomConstUint64: - if (len > 0 && ppToken->name[0] == '0') { - if (len > 1 && (ppToken->name[1] == 'x' || ppToken->name[1] == 'X')) - ppToken->i64val = (long long)strtoull(ppToken->name, nullptr, 16); - else - ppToken->i64val = (long long)strtoull(ppToken->name, nullptr, 8); - } else - ppToken->i64val = (long long)strtoull(ppToken->name, 0, 10); - break; - } - } - - return subtoken; -} - -// We are pasting if -// 1. we are preceding a pasting operator within this stream -// or -// 2. the entire macro is preceding a pasting operator (lastTokenPastes) -// and we are also on the last token -bool TPpContext::TokenStream::peekTokenizedPasting(bool lastTokenPastes) -{ - // 1. preceding ##? - - size_t savePos = current; - int subtoken; - // skip white space - do { - subtoken = getSubtoken(); - } while (subtoken == ' '); - current = savePos; - if (subtoken == PpAtomPaste) - return true; - - // 2. last token and we've been told after this there will be a ## - - if (! lastTokenPastes) - return false; - // Getting here means the last token will be pasted, after this - - // Are we at the last non-whitespace token? - savePos = current; - bool moreTokens = false; - do { - subtoken = getSubtoken(); - if (subtoken == EndOfInput) - break; - if (subtoken != ' ') { - moreTokens = true; - break; - } - } while (true); - current = savePos; - - return !moreTokens; -} - -// See if the next non-white-space tokens are two consecutive # -bool TPpContext::TokenStream::peekUntokenizedPasting() -{ - // don't return early, have to restore this - size_t savePos = current; - - // skip white-space - int subtoken; - do { - subtoken = getSubtoken(); - } while (subtoken == ' '); - - // check for ## - bool pasting = false; - if (subtoken == '#') { - subtoken = getSubtoken(); - if (subtoken == '#') - pasting = true; - } - - current = savePos; - - return pasting; -} - -void TPpContext::pushTokenStreamInput(TokenStream& ts, bool prepasting) -{ - pushInput(new tTokenInput(this, &ts, prepasting)); - ts.reset(); -} - -int TPpContext::tUngotTokenInput::scan(TPpToken* ppToken) -{ - if (done) - return EndOfInput; - - int ret = token; - *ppToken = lval; - done = true; - - return ret; -} - -void TPpContext::UngetToken(int token, TPpToken* ppToken) -{ - pushInput(new tUngotTokenInput(this, token, ppToken)); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/preprocessor/PpTokens.h b/deps/glslang/glslang/MachineIndependent/preprocessor/PpTokens.h deleted file mode 100644 index d56df576..00000000 --- a/deps/glslang/glslang/MachineIndependent/preprocessor/PpTokens.h +++ /dev/null @@ -1,181 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -/****************************************************************************\ -Copyright (c) 2002, NVIDIA Corporation. - -NVIDIA Corporation("NVIDIA") supplies this software to you in -consideration of your agreement to the following terms, and your use, -installation, modification or redistribution of this NVIDIA software -constitutes acceptance of these terms. If you do not agree with these -terms, please do not use, install, modify or redistribute this NVIDIA -software. - -In consideration of your agreement to abide by the following terms, and -subject to these terms, NVIDIA grants you a personal, non-exclusive -license, under NVIDIA's copyrights in this original NVIDIA software (the -"NVIDIA Software"), to use, reproduce, modify and redistribute the -NVIDIA Software, with or without modifications, in source and/or binary -forms; provided that if you redistribute the NVIDIA Software, you must -retain the copyright notice of NVIDIA, this notice and the following -text and disclaimers in all such redistributions of the NVIDIA Software. -Neither the name, trademarks, service marks nor logos of NVIDIA -Corporation may be used to endorse or promote products derived from the -NVIDIA Software without specific prior written permission from NVIDIA. -Except as expressly stated in this notice, no other rights or licenses -express or implied, are granted by NVIDIA herein, including but not -limited to any patent rights that may be infringed by your derivative -works or by other works in which the NVIDIA Software may be -incorporated. No hardware is licensed hereunder. - -THE NVIDIA SOFTWARE IS BEING PROVIDED ON AN "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, -INCLUDING WITHOUT LIMITATION, WARRANTIES OR CONDITIONS OF TITLE, -NON-INFRINGEMENT, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR -ITS USE AND OPERATION EITHER ALONE OR IN COMBINATION WITH OTHER -PRODUCTS. - -IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, -INCIDENTAL, EXEMPLARY, CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, LOST PROFITS; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF -USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) OR ARISING IN ANY WAY -OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE -NVIDIA SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, -TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF -NVIDIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -\****************************************************************************/ - -#ifndef PARSER_H -#define PARSER_H - -namespace glslang { - -// Multi-character tokens -enum EFixedAtoms { - // single character tokens get their own char value as their token; start here for multi-character tokens - PpAtomMaxSingle = 127, - - // replace bad character tokens with this, to avoid accidental aliasing with the below - PpAtomBadToken, - - // Operators - - PPAtomAddAssign, - PPAtomSubAssign, - PPAtomMulAssign, - PPAtomDivAssign, - PPAtomModAssign, - - PpAtomRight, - PpAtomLeft, - - PpAtomRightAssign, - PpAtomLeftAssign, - PpAtomAndAssign, - PpAtomOrAssign, - PpAtomXorAssign, - - PpAtomAnd, - PpAtomOr, - PpAtomXor, - - PpAtomEQ, - PpAtomNE, - PpAtomGE, - PpAtomLE, - - PpAtomDecrement, - PpAtomIncrement, - - PpAtomColonColon, - - PpAtomPaste, - - // Constants - - PpAtomConstInt, - PpAtomConstUint, - PpAtomConstInt64, - PpAtomConstUint64, -#ifdef AMD_EXTENSIONS - PpAtomConstInt16, - PpAtomConstUint16, -#endif - PpAtomConstFloat, - PpAtomConstDouble, - PpAtomConstFloat16, - PpAtomConstString, - - // Identifiers - PpAtomIdentifier, - - // preprocessor "keywords" - - PpAtomDefine, - PpAtomUndef, - - PpAtomIf, - PpAtomIfdef, - PpAtomIfndef, - PpAtomElse, - PpAtomElif, - PpAtomEndif, - - PpAtomLine, - PpAtomPragma, - PpAtomError, - - // #version ... - PpAtomVersion, - PpAtomCore, - PpAtomCompatibility, - PpAtomEs, - - // #extension - PpAtomExtension, - - // __LINE__, __FILE__, __VERSION__ - - PpAtomLineMacro, - PpAtomFileMacro, - PpAtomVersionMacro, - - // #include - PpAtomInclude, - - PpAtomLast, -}; - -} // end namespace glslang - -#endif /* not PARSER_H */ diff --git a/deps/glslang/glslang/MachineIndependent/propagateNoContraction.cpp b/deps/glslang/glslang/MachineIndependent/propagateNoContraction.cpp deleted file mode 100644 index ae95688a..00000000 --- a/deps/glslang/glslang/MachineIndependent/propagateNoContraction.cpp +++ /dev/null @@ -1,866 +0,0 @@ -// -// Copyright (C) 2015-2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// -// Visit the nodes in the glslang intermediate tree representation to -// propagate the 'noContraction' qualifier. -// - -#include "propagateNoContraction.h" - -#include -#include -#include -#include -#include - -#include "localintermediate.h" -namespace { - -// Use a string to hold the access chain information, as in most cases the -// access chain is short and may contain only one element, which is the symbol -// ID. -// Example: struct {float a; float b;} s; -// Object s.a will be represented with: /0 -// Object s.b will be represented with: /1 -// Object s will be represented with: -// For members of vector, matrix and arrays, they will be represented with the -// same symbol ID of their container symbol objects. This is because their -// preciseness is always the same as their container symbol objects. -typedef std::string ObjectAccessChain; - -// The delimiter used in the ObjectAccessChain string to separate symbol ID and -// different level of struct indices. -const char ObjectAccesschainDelimiter = '/'; - -// Mapping from Symbol IDs of symbol nodes, to their defining operation -// nodes. -typedef std::unordered_multimap NodeMapping; -// Mapping from object nodes to their access chain info string. -typedef std::unordered_map AccessChainMapping; - -// Set of object IDs. -typedef std::unordered_set ObjectAccesschainSet; -// Set of return branch nodes. -typedef std::unordered_set ReturnBranchNodeSet; - -// A helper function to tell whether a node is 'noContraction'. Returns true if -// the node has 'noContraction' qualifier, otherwise false. -bool isPreciseObjectNode(glslang::TIntermTyped* node) -{ - return node->getType().getQualifier().noContraction; -} - -// Returns true if the opcode is a dereferencing one. -bool isDereferenceOperation(glslang::TOperator op) -{ - switch (op) { - case glslang::EOpIndexDirect: - case glslang::EOpIndexDirectStruct: - case glslang::EOpIndexIndirect: - case glslang::EOpVectorSwizzle: - case glslang::EOpMatrixSwizzle: - return true; - default: - return false; - } -} - -// Returns true if the opcode leads to an assignment operation. -bool isAssignOperation(glslang::TOperator op) -{ - switch (op) { - case glslang::EOpAssign: - case glslang::EOpAddAssign: - case glslang::EOpSubAssign: - case glslang::EOpMulAssign: - case glslang::EOpVectorTimesMatrixAssign: - case glslang::EOpVectorTimesScalarAssign: - case glslang::EOpMatrixTimesScalarAssign: - case glslang::EOpMatrixTimesMatrixAssign: - case glslang::EOpDivAssign: - case glslang::EOpModAssign: - case glslang::EOpAndAssign: - case glslang::EOpLeftShiftAssign: - case glslang::EOpRightShiftAssign: - case glslang::EOpInclusiveOrAssign: - case glslang::EOpExclusiveOrAssign: - - case glslang::EOpPostIncrement: - case glslang::EOpPostDecrement: - case glslang::EOpPreIncrement: - case glslang::EOpPreDecrement: - return true; - default: - return false; - } -} - -// A helper function to get the unsigned int from a given constant union node. -// Note the node should only hold a uint scalar. -unsigned getStructIndexFromConstantUnion(glslang::TIntermTyped* node) -{ - assert(node->getAsConstantUnion() && node->getAsConstantUnion()->isScalar()); - unsigned struct_dereference_index = node->getAsConstantUnion()->getConstArray()[0].getUConst(); - return struct_dereference_index; -} - -// A helper function to generate symbol_label. -ObjectAccessChain generateSymbolLabel(glslang::TIntermSymbol* node) -{ - ObjectAccessChain symbol_id = - std::to_string(node->getId()) + "(" + node->getName().c_str() + ")"; - return symbol_id; -} - -// Returns true if the operation is an arithmetic operation and valid for -// the 'NoContraction' decoration. -bool isArithmeticOperation(glslang::TOperator op) -{ - switch (op) { - case glslang::EOpAddAssign: - case glslang::EOpSubAssign: - case glslang::EOpMulAssign: - case glslang::EOpVectorTimesMatrixAssign: - case glslang::EOpVectorTimesScalarAssign: - case glslang::EOpMatrixTimesScalarAssign: - case glslang::EOpMatrixTimesMatrixAssign: - case glslang::EOpDivAssign: - case glslang::EOpModAssign: - - case glslang::EOpNegative: - - case glslang::EOpAdd: - case glslang::EOpSub: - case glslang::EOpMul: - case glslang::EOpDiv: - case glslang::EOpMod: - - case glslang::EOpVectorTimesScalar: - case glslang::EOpVectorTimesMatrix: - case glslang::EOpMatrixTimesVector: - case glslang::EOpMatrixTimesScalar: - case glslang::EOpMatrixTimesMatrix: - - case glslang::EOpDot: - - case glslang::EOpPostIncrement: - case glslang::EOpPostDecrement: - case glslang::EOpPreIncrement: - case glslang::EOpPreDecrement: - return true; - default: - return false; - } -} - -// A helper class to help manage the populating_initial_no_contraction_ flag. -template class StateSettingGuard { -public: - StateSettingGuard(T* state_ptr, T new_state_value) - : state_ptr_(state_ptr), previous_state_(*state_ptr) - { - *state_ptr = new_state_value; - } - StateSettingGuard(T* state_ptr) : state_ptr_(state_ptr), previous_state_(*state_ptr) {} - void setState(T new_state_value) { *state_ptr_ = new_state_value; } - ~StateSettingGuard() { *state_ptr_ = previous_state_; } - -private: - T* state_ptr_; - T previous_state_; -}; - -// A helper function to get the front element from a given ObjectAccessChain -ObjectAccessChain getFrontElement(const ObjectAccessChain& chain) -{ - size_t pos_delimiter = chain.find(ObjectAccesschainDelimiter); - return pos_delimiter == std::string::npos ? chain : chain.substr(0, pos_delimiter); -} - -// A helper function to get the access chain starting from the second element. -ObjectAccessChain subAccessChainFromSecondElement(const ObjectAccessChain& chain) -{ - size_t pos_delimiter = chain.find(ObjectAccesschainDelimiter); - return pos_delimiter == std::string::npos ? "" : chain.substr(pos_delimiter + 1); -} - -// A helper function to get the access chain after removing a given prefix. -ObjectAccessChain getSubAccessChainAfterPrefix(const ObjectAccessChain& chain, - const ObjectAccessChain& prefix) -{ - size_t pos = chain.find(prefix); - if (pos != 0) - return chain; - return chain.substr(prefix.length() + sizeof(ObjectAccesschainDelimiter)); -} - -// -// A traverser which traverses the whole AST and populates: -// 1) A mapping from symbol nodes' IDs to their defining operation nodes. -// 2) A set of access chains of the initial precise object nodes. -// -class TSymbolDefinitionCollectingTraverser : public glslang::TIntermTraverser { -public: - TSymbolDefinitionCollectingTraverser(NodeMapping* symbol_definition_mapping, - AccessChainMapping* accesschain_mapping, - ObjectAccesschainSet* precise_objects, - ReturnBranchNodeSet* precise_return_nodes); - - bool visitUnary(glslang::TVisit, glslang::TIntermUnary*) override; - bool visitBinary(glslang::TVisit, glslang::TIntermBinary*) override; - void visitSymbol(glslang::TIntermSymbol*) override; - bool visitAggregate(glslang::TVisit, glslang::TIntermAggregate*) override; - bool visitBranch(glslang::TVisit, glslang::TIntermBranch*) override; - -protected: - TSymbolDefinitionCollectingTraverser& operator=(const TSymbolDefinitionCollectingTraverser&); - - // The mapping from symbol node IDs to their defining nodes. This should be - // populated along traversing the AST. - NodeMapping& symbol_definition_mapping_; - // The set of symbol node IDs for precise symbol nodes, the ones marked as - // 'noContraction'. - ObjectAccesschainSet& precise_objects_; - // The set of precise return nodes. - ReturnBranchNodeSet& precise_return_nodes_; - // A temporary cache of the symbol node whose defining node is to be found - // currently along traversing the AST. - ObjectAccessChain current_object_; - // A map from object node to its access chain. This traverser stores - // the built access chains into this map for each object node it has - // visited. - AccessChainMapping& accesschain_mapping_; - // The pointer to the Function Definition node, so we can get the - // preciseness of the return expression from it when we traverse the - // return branch node. - glslang::TIntermAggregate* current_function_definition_node_; -}; - -TSymbolDefinitionCollectingTraverser::TSymbolDefinitionCollectingTraverser( - NodeMapping* symbol_definition_mapping, AccessChainMapping* accesschain_mapping, - ObjectAccesschainSet* precise_objects, - std::unordered_set* precise_return_nodes) - : TIntermTraverser(true, false, false), symbol_definition_mapping_(*symbol_definition_mapping), - precise_objects_(*precise_objects), precise_return_nodes_(*precise_return_nodes), - current_object_(), accesschain_mapping_(*accesschain_mapping), - current_function_definition_node_(nullptr) {} - -// Visits a symbol node, set the current_object_ to the -// current node symbol ID, and record a mapping from this node to the current -// current_object_, which is the just obtained symbol -// ID. -void TSymbolDefinitionCollectingTraverser::visitSymbol(glslang::TIntermSymbol* node) -{ - current_object_ = generateSymbolLabel(node); - accesschain_mapping_[node] = current_object_; -} - -// Visits an aggregate node, traverses all of its children. -bool TSymbolDefinitionCollectingTraverser::visitAggregate(glslang::TVisit, - glslang::TIntermAggregate* node) -{ - // This aggregate node might be a function definition node, in which case we need to - // cache this node, so we can get the preciseness information of the return value - // of this function later. - StateSettingGuard current_function_definition_node_setting_guard( - ¤t_function_definition_node_); - if (node->getOp() == glslang::EOpFunction) { - // This is function definition node, we need to cache this node so that we can - // get the preciseness of the return value later. - current_function_definition_node_setting_guard.setState(node); - } - // Traverse the items in the sequence. - glslang::TIntermSequence& seq = node->getSequence(); - for (int i = 0; i < (int)seq.size(); ++i) { - current_object_.clear(); - seq[i]->traverse(this); - } - return false; -} - -bool TSymbolDefinitionCollectingTraverser::visitBranch(glslang::TVisit, - glslang::TIntermBranch* node) -{ - if (node->getFlowOp() == glslang::EOpReturn && node->getExpression() && - current_function_definition_node_ && - current_function_definition_node_->getType().getQualifier().noContraction) { - // This node is a return node with an expression, and its function has a - // precise return value. We need to find the involved objects in its - // expression and add them to the set of initial precise objects. - precise_return_nodes_.insert(node); - node->getExpression()->traverse(this); - } - return false; -} - -// Visits a unary node. This might be an implicit assignment like i++, i--. etc. -bool TSymbolDefinitionCollectingTraverser::visitUnary(glslang::TVisit /* visit */, - glslang::TIntermUnary* node) -{ - current_object_.clear(); - node->getOperand()->traverse(this); - if (isAssignOperation(node->getOp())) { - // We should always be able to get an access chain of the operand node. - assert(!current_object_.empty()); - - // If the operand node object is 'precise', we collect its access chain - // for the initial set of 'precise' objects. - if (isPreciseObjectNode(node->getOperand())) { - // The operand node is an 'precise' object node, add its - // access chain to the set of 'precise' objects. This is to collect - // the initial set of 'precise' objects. - precise_objects_.insert(current_object_); - } - // Gets the symbol ID from the object's access chain. - ObjectAccessChain id_symbol = getFrontElement(current_object_); - // Add a mapping from the symbol ID to this assignment operation node. - symbol_definition_mapping_.insert(std::make_pair(id_symbol, node)); - } - // A unary node is not a dereference node, so we clear the access chain which - // is under construction. - current_object_.clear(); - return false; -} - -// Visits a binary node and updates the mapping from symbol IDs to the definition -// nodes. Also collects the access chains for the initial precise objects. -bool TSymbolDefinitionCollectingTraverser::visitBinary(glslang::TVisit /* visit */, - glslang::TIntermBinary* node) -{ - // Traverses the left node to build the access chain info for the object. - current_object_.clear(); - node->getLeft()->traverse(this); - - if (isAssignOperation(node->getOp())) { - // We should always be able to get an access chain for the left node. - assert(!current_object_.empty()); - - // If the left node object is 'precise', it is an initial precise object - // specified in the shader source. Adds it to the initial work list to - // process later. - if (isPreciseObjectNode(node->getLeft())) { - // The left node is an 'precise' object node, add its access chain to - // the set of 'precise' objects. This is to collect the initial set - // of 'precise' objects. - precise_objects_.insert(current_object_); - } - // Gets the symbol ID from the object access chain, which should be the - // first element recorded in the access chain. - ObjectAccessChain id_symbol = getFrontElement(current_object_); - // Adds a mapping from the symbol ID to this assignment operation node. - symbol_definition_mapping_.insert(std::make_pair(id_symbol, node)); - - // Traverses the right node, there may be other 'assignment' - // operations in the right. - current_object_.clear(); - node->getRight()->traverse(this); - - } else if (isDereferenceOperation(node->getOp())) { - // The left node (parent node) is a struct type object. We need to - // record the access chain information of the current node into its - // object id. - if (node->getOp() == glslang::EOpIndexDirectStruct) { - unsigned struct_dereference_index = getStructIndexFromConstantUnion(node->getRight()); - current_object_.push_back(ObjectAccesschainDelimiter); - current_object_.append(std::to_string(struct_dereference_index)); - } - accesschain_mapping_[node] = current_object_; - - // For a dereference node, there is no need to traverse the right child - // node as the right node should always be an integer type object. - - } else { - // For other binary nodes, still traverse the right node. - current_object_.clear(); - node->getRight()->traverse(this); - } - return false; -} - -// Traverses the AST and returns a tuple of four members: -// 1) a mapping from symbol IDs to the definition nodes (aka. assignment nodes) of these symbols. -// 2) a mapping from object nodes in the AST to the access chains of these objects. -// 3) a set of access chains of precise objects. -// 4) a set of return nodes with precise expressions. -std::tuple -getSymbolToDefinitionMappingAndPreciseSymbolIDs(const glslang::TIntermediate& intermediate) -{ - auto result_tuple = std::make_tuple(NodeMapping(), AccessChainMapping(), ObjectAccesschainSet(), - ReturnBranchNodeSet()); - - TIntermNode* root = intermediate.getTreeRoot(); - if (root == 0) - return result_tuple; - - NodeMapping& symbol_definition_mapping = std::get<0>(result_tuple); - AccessChainMapping& accesschain_mapping = std::get<1>(result_tuple); - ObjectAccesschainSet& precise_objects = std::get<2>(result_tuple); - ReturnBranchNodeSet& precise_return_nodes = std::get<3>(result_tuple); - - // Traverses the AST and populate the results. - TSymbolDefinitionCollectingTraverser collector(&symbol_definition_mapping, &accesschain_mapping, - &precise_objects, &precise_return_nodes); - root->traverse(&collector); - - return result_tuple; -} - -// -// A traverser that determine whether the left node (or operand node for unary -// node) of an assignment node is 'precise', containing 'precise' or not, -// according to the access chain a given precise object which share the same -// symbol as the left node. -// -// Post-orderly traverses the left node subtree of an binary assignment node and: -// -// 1) Propagates the 'precise' from the left object nodes to this object node. -// -// 2) Builds object access chain along the traversal, and also compares with -// the access chain of the given 'precise' object along with the traversal to -// tell if the node to be defined is 'precise' or not. -// -class TNoContractionAssigneeCheckingTraverser : public glslang::TIntermTraverser { - - enum DecisionStatus { - // The object node to be assigned to may contain 'precise' objects and also not 'precise' objects. - Mixed = 0, - // The object node to be assigned to is either a 'precise' object or a struct objects whose members are all 'precise'. - Precise = 1, - // The object node to be assigned to is not a 'precise' object. - NotPreicse = 2, - }; - -public: - TNoContractionAssigneeCheckingTraverser(const AccessChainMapping& accesschain_mapping) - : TIntermTraverser(true, false, false), accesschain_mapping_(accesschain_mapping), - precise_object_(nullptr) {} - - // Checks the preciseness of a given assignment node with a precise object - // represented as access chain. The precise object shares the same symbol - // with the assignee of the given assignment node. Return a tuple of two: - // - // 1) The preciseness of the assignee node of this assignment node. True - // if the assignee contains 'precise' objects or is 'precise', false if - // the assignee is not 'precise' according to the access chain of the given - // precise object. - // - // 2) The incremental access chain from the assignee node to its nested - // 'precise' object, according to the access chain of the given precise - // object. This incremental access chain can be empty, which means the - // assignee is 'precise'. Otherwise it shows the path to the nested - // precise object. - std::tuple - getPrecisenessAndRemainedAccessChain(glslang::TIntermOperator* node, - const ObjectAccessChain& precise_object) - { - assert(isAssignOperation(node->getOp())); - precise_object_ = &precise_object; - ObjectAccessChain assignee_object; - if (glslang::TIntermBinary* BN = node->getAsBinaryNode()) { - // This is a binary assignment node, we need to check the - // preciseness of the left node. - assert(accesschain_mapping_.count(BN->getLeft())); - // The left node (assignee node) is an object node, traverse the - // node to let the 'precise' of nesting objects being transfered to - // nested objects. - BN->getLeft()->traverse(this); - // After traversing the left node, if the left node is 'precise', - // we can conclude this assignment should propagate 'precise'. - if (isPreciseObjectNode(BN->getLeft())) { - return make_tuple(true, ObjectAccessChain()); - } - // If the preciseness of the left node (assignee node) can not - // be determined by now, we need to compare the access chain string - // of the assignee object with the given precise object. - assignee_object = accesschain_mapping_.at(BN->getLeft()); - - } else if (glslang::TIntermUnary* UN = node->getAsUnaryNode()) { - // This is a unary assignment node, we need to check the - // preciseness of the operand node. For unary assignment node, the - // operand node should always be an object node. - assert(accesschain_mapping_.count(UN->getOperand())); - // Traverse the operand node to let the 'precise' being propagated - // from lower nodes to upper nodes. - UN->getOperand()->traverse(this); - // After traversing the operand node, if the operand node is - // 'precise', this assignment should propagate 'precise'. - if (isPreciseObjectNode(UN->getOperand())) { - return make_tuple(true, ObjectAccessChain()); - } - // If the preciseness of the operand node (assignee node) can not - // be determined by now, we need to compare the access chain string - // of the assignee object with the given precise object. - assignee_object = accesschain_mapping_.at(UN->getOperand()); - } else { - // Not a binary or unary node, should not happen. - assert(false); - } - - // Compare the access chain string of the assignee node with the given - // precise object to determine if this assignment should propagate - // 'precise'. - if (assignee_object.find(precise_object) == 0) { - // The access chain string of the given precise object is a prefix - // of assignee's access chain string. The assignee should be - // 'precise'. - return make_tuple(true, ObjectAccessChain()); - } else if (precise_object.find(assignee_object) == 0) { - // The assignee's access chain string is a prefix of the given - // precise object, the assignee object contains 'precise' object, - // and we need to pass the remained access chain to the object nodes - // in the right. - return make_tuple(true, getSubAccessChainAfterPrefix(precise_object, assignee_object)); - } else { - // The access chain strings do not match, the assignee object can - // not be labeled as 'precise' according to the given precise - // object. - return make_tuple(false, ObjectAccessChain()); - } - } - -protected: - TNoContractionAssigneeCheckingTraverser& operator=(const TNoContractionAssigneeCheckingTraverser&); - - bool visitBinary(glslang::TVisit, glslang::TIntermBinary* node) override; - void visitSymbol(glslang::TIntermSymbol* node) override; - - // A map from object nodes to their access chain string (used as object ID). - const AccessChainMapping& accesschain_mapping_; - // A given precise object, represented in it access chain string. This - // precise object is used to be compared with the assignee node to tell if - // the assignee node is 'precise', contains 'precise' object or not - // 'precise'. - const ObjectAccessChain* precise_object_; -}; - -// Visits a binary node. If the node is an object node, it must be a dereference -// node. In such cases, if the left node is 'precise', this node should also be -// 'precise'. -bool TNoContractionAssigneeCheckingTraverser::visitBinary(glslang::TVisit, - glslang::TIntermBinary* node) -{ - // Traverses the left so that we transfer the 'precise' from nesting object - // to its nested object. - node->getLeft()->traverse(this); - // If this binary node is an object node, we should have it in the - // accesschain_mapping_. - if (accesschain_mapping_.count(node)) { - // A binary object node must be a dereference node. - assert(isDereferenceOperation(node->getOp())); - // If the left node is 'precise', this node should also be precise, - // otherwise, compare with the given precise_object_. If the - // access chain of this node matches with the given precise_object_, - // this node should be marked as 'precise'. - if (isPreciseObjectNode(node->getLeft())) { - node->getWritableType().getQualifier().noContraction = true; - } else if (accesschain_mapping_.at(node) == *precise_object_) { - node->getWritableType().getQualifier().noContraction = true; - } - } - return false; -} - -// Visits a symbol node, if the symbol node ID (its access chain string) matches -// with the given precise object, this node should be 'precise'. -void TNoContractionAssigneeCheckingTraverser::visitSymbol(glslang::TIntermSymbol* node) -{ - // A symbol node should always be an object node, and should have been added - // to the map from object nodes to their access chain strings. - assert(accesschain_mapping_.count(node)); - if (accesschain_mapping_.at(node) == *precise_object_) { - node->getWritableType().getQualifier().noContraction = true; - } -} - -// -// A traverser that only traverses the right side of binary assignment nodes -// and the operand node of unary assignment nodes. -// -// 1) Marks arithmetic operations as 'NoContraction'. -// -// 2) Find the object which should be marked as 'precise' in the right and -// update the 'precise' object work list. -// -class TNoContractionPropagator : public glslang::TIntermTraverser { -public: - TNoContractionPropagator(ObjectAccesschainSet* precise_objects, - const AccessChainMapping& accesschain_mapping) - : TIntermTraverser(true, false, false), - precise_objects_(*precise_objects), added_precise_object_ids_(), - remained_accesschain_(), accesschain_mapping_(accesschain_mapping) {} - - // Propagates 'precise' in the right nodes of a given assignment node with - // access chain record from the assignee node to a 'precise' object it - // contains. - void - propagateNoContractionInOneExpression(glslang::TIntermTyped* defining_node, - const ObjectAccessChain& assignee_remained_accesschain) - { - remained_accesschain_ = assignee_remained_accesschain; - if (glslang::TIntermBinary* BN = defining_node->getAsBinaryNode()) { - assert(isAssignOperation(BN->getOp())); - BN->getRight()->traverse(this); - if (isArithmeticOperation(BN->getOp())) { - BN->getWritableType().getQualifier().noContraction = true; - } - } else if (glslang::TIntermUnary* UN = defining_node->getAsUnaryNode()) { - assert(isAssignOperation(UN->getOp())); - UN->getOperand()->traverse(this); - if (isArithmeticOperation(UN->getOp())) { - UN->getWritableType().getQualifier().noContraction = true; - } - } - } - - // Propagates 'precise' in a given precise return node. - void propagateNoContractionInReturnNode(glslang::TIntermBranch* return_node) - { - remained_accesschain_ = ""; - assert(return_node->getFlowOp() == glslang::EOpReturn && return_node->getExpression()); - return_node->getExpression()->traverse(this); - } - -protected: - TNoContractionPropagator& operator=(const TNoContractionPropagator&); - - // Visits an aggregate node. The node can be a initializer list, in which - // case we need to find the 'precise' or 'precise' containing object node - // with the access chain record. In other cases, just need to traverse all - // the children nodes. - bool visitAggregate(glslang::TVisit, glslang::TIntermAggregate* node) override - { - if (!remained_accesschain_.empty() && node->getOp() == glslang::EOpConstructStruct) { - // This is a struct initializer node, and the remained - // access chain is not empty, we need to refer to the - // assignee_remained_access_chain_ to find the nested - // 'precise' object. And we don't need to visit other nodes in this - // aggregate node. - - // Gets the struct dereference index that leads to 'precise' object. - ObjectAccessChain precise_accesschain_index_str = - getFrontElement(remained_accesschain_); - unsigned precise_accesschain_index = (unsigned)strtoul(precise_accesschain_index_str.c_str(), nullptr, 10); - // Gets the node pointed by the access chain index extracted before. - glslang::TIntermTyped* potential_precise_node = - node->getSequence()[precise_accesschain_index]->getAsTyped(); - assert(potential_precise_node); - // Pop the front access chain index from the path, and visit the nested node. - { - ObjectAccessChain next_level_accesschain = - subAccessChainFromSecondElement(remained_accesschain_); - StateSettingGuard setup_remained_accesschain_for_next_level( - &remained_accesschain_, next_level_accesschain); - potential_precise_node->traverse(this); - } - return false; - } - return true; - } - - // Visits a binary node. A binary node can be an object node, e.g. a dereference node. - // As only the top object nodes in the right side of an assignment needs to be visited - // and added to 'precise' work list, this traverser won't visit the children nodes of - // an object node. If the binary node does not represent an object node, it should - // go on to traverse its children nodes and if it is an arithmetic operation node, this - // operation should be marked as 'noContraction'. - bool visitBinary(glslang::TVisit, glslang::TIntermBinary* node) override - { - if (isDereferenceOperation(node->getOp())) { - // This binary node is an object node. Need to update the precise - // object set with the access chain of this node + remained - // access chain . - ObjectAccessChain new_precise_accesschain = accesschain_mapping_.at(node); - if (remained_accesschain_.empty()) { - node->getWritableType().getQualifier().noContraction = true; - } else { - new_precise_accesschain += ObjectAccesschainDelimiter + remained_accesschain_; - } - // Cache the access chain as added precise object, so we won't add the - // same object to the work list again. - if (!added_precise_object_ids_.count(new_precise_accesschain)) { - precise_objects_.insert(new_precise_accesschain); - added_precise_object_ids_.insert(new_precise_accesschain); - } - // Only the upper-most object nodes should be visited, so do not - // visit children of this object node. - return false; - } - // If this is an arithmetic operation, marks this node as 'noContraction'. - if (isArithmeticOperation(node->getOp()) && node->getBasicType() != glslang::EbtInt) { - node->getWritableType().getQualifier().noContraction = true; - } - // As this node is not an object node, need to traverse the children nodes. - return true; - } - - // Visits a unary node. A unary node can not be an object node. If the operation - // is an arithmetic operation, need to mark this node as 'noContraction'. - bool visitUnary(glslang::TVisit /* visit */, glslang::TIntermUnary* node) override - { - // If this is an arithmetic operation, marks this with 'noContraction' - if (isArithmeticOperation(node->getOp())) { - node->getWritableType().getQualifier().noContraction = true; - } - return true; - } - - // Visits a symbol node. A symbol node is always an object node. So we - // should always be able to find its in our collected mapping from object - // nodes to access chains. As an object node, a symbol node can be either - // 'precise' or containing 'precise' objects according to unused - // access chain information we have when we visit this node. - void visitSymbol(glslang::TIntermSymbol* node) override - { - // Symbol nodes are object nodes and should always have an - // access chain collected before matches with it. - assert(accesschain_mapping_.count(node)); - ObjectAccessChain new_precise_accesschain = accesschain_mapping_.at(node); - // If the unused access chain is empty, this symbol node should be - // marked as 'precise'. Otherwise, the unused access chain should be - // appended to the symbol ID to build a new access chain which points to - // the nested 'precise' object in this symbol object. - if (remained_accesschain_.empty()) { - node->getWritableType().getQualifier().noContraction = true; - } else { - new_precise_accesschain += ObjectAccesschainDelimiter + remained_accesschain_; - } - // Add the new 'precise' access chain to the work list and make sure we - // don't visit it again. - if (!added_precise_object_ids_.count(new_precise_accesschain)) { - precise_objects_.insert(new_precise_accesschain); - added_precise_object_ids_.insert(new_precise_accesschain); - } - } - - // A set of precise objects, represented as access chains. - ObjectAccesschainSet& precise_objects_; - // Visited symbol nodes, should not revisit these nodes. - ObjectAccesschainSet added_precise_object_ids_; - // The left node of an assignment operation might be an parent of 'precise' objects. - // This means the left node might not be an 'precise' object node, but it may contains - // 'precise' qualifier which should be propagated to the corresponding child node in - // the right. So we need the path from the left node to its nested 'precise' node to - // tell us how to find the corresponding 'precise' node in the right. - ObjectAccessChain remained_accesschain_; - // A map from node pointers to their access chains. - const AccessChainMapping& accesschain_mapping_; -}; -} - -namespace glslang { - -void PropagateNoContraction(const glslang::TIntermediate& intermediate) -{ - // First, traverses the AST, records symbols with their defining operations - // and collects the initial set of precise symbols (symbol nodes that marked - // as 'noContraction') and precise return nodes. - auto mappings_and_precise_objects = - getSymbolToDefinitionMappingAndPreciseSymbolIDs(intermediate); - - // The mapping of symbol node IDs to their defining nodes. This enables us - // to get the defining node directly from a given symbol ID without - // traversing the tree again. - NodeMapping& symbol_definition_mapping = std::get<0>(mappings_and_precise_objects); - - // The mapping of object nodes to their access chains recorded. - AccessChainMapping& accesschain_mapping = std::get<1>(mappings_and_precise_objects); - - // The initial set of 'precise' objects which are represented as the - // access chain toward them. - ObjectAccesschainSet& precise_object_accesschains = std::get<2>(mappings_and_precise_objects); - - // The set of 'precise' return nodes. - ReturnBranchNodeSet& precise_return_nodes = std::get<3>(mappings_and_precise_objects); - - // Second, uses the initial set of precise objects as a work list, pops an - // access chain, extract the symbol ID from it. Then: - // 1) Check the assignee object, see if it is 'precise' object node or - // contains 'precise' object. Obtain the incremental access chain from the - // assignee node to its nested 'precise' node (if any). - // 2) If the assignee object node is 'precise' or it contains 'precise' - // objects, traverses the right side of the assignment operation - // expression to mark arithmetic operations as 'noContration' and update - // 'precise' access chain work list with new found object nodes. - // Repeat above steps until the work list is empty. - TNoContractionAssigneeCheckingTraverser checker(accesschain_mapping); - TNoContractionPropagator propagator(&precise_object_accesschains, accesschain_mapping); - - // We have two initial precise work lists to handle: - // 1) precise return nodes - // 2) precise object access chains - // We should process the precise return nodes first and the involved - // objects in the return expression should be added to the precise object - // access chain set. - while (!precise_return_nodes.empty()) { - glslang::TIntermBranch* precise_return_node = *precise_return_nodes.begin(); - propagator.propagateNoContractionInReturnNode(precise_return_node); - precise_return_nodes.erase(precise_return_node); - } - - while (!precise_object_accesschains.empty()) { - // Get the access chain of a precise object from the work list. - ObjectAccessChain precise_object_accesschain = *precise_object_accesschains.begin(); - // Get the symbol id from the access chain. - ObjectAccessChain symbol_id = getFrontElement(precise_object_accesschain); - // Get all the defining nodes of that symbol ID. - std::pair range = - symbol_definition_mapping.equal_range(symbol_id); - // Visits all the assignment nodes of that symbol ID and - // 1) Check if the assignee node is 'precise' or contains 'precise' - // objects. - // 2) Propagate the 'precise' to the top layer object nodes - // in the right side of the assignment operation, update the 'precise' - // work list with new access chains representing the new 'precise' - // objects, and mark arithmetic operations as 'noContraction'. - for (NodeMapping::iterator defining_node_iter = range.first; - defining_node_iter != range.second; defining_node_iter++) { - TIntermOperator* defining_node = defining_node_iter->second; - // Check the assignee node. - auto checker_result = checker.getPrecisenessAndRemainedAccessChain( - defining_node, precise_object_accesschain); - bool& contain_precise = std::get<0>(checker_result); - ObjectAccessChain& remained_accesschain = std::get<1>(checker_result); - // If the assignee node is 'precise' or contains 'precise', propagate the - // 'precise' to the right. Otherwise just skip this assignment node. - if (contain_precise) { - propagator.propagateNoContractionInOneExpression(defining_node, - remained_accesschain); - } - } - // Remove the last processed 'precise' object from the work list. - precise_object_accesschains.erase(precise_object_accesschain); - } -} -}; diff --git a/deps/glslang/glslang/MachineIndependent/propagateNoContraction.h b/deps/glslang/glslang/MachineIndependent/propagateNoContraction.h deleted file mode 100644 index 8521ad7d..00000000 --- a/deps/glslang/glslang/MachineIndependent/propagateNoContraction.h +++ /dev/null @@ -1,55 +0,0 @@ -// -// Copyright (C) 2015-2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. - -// -// Visit the nodes in the glslang intermediate tree representation to -// propagate 'noContraction' qualifier. -// - -#pragma once - -#include "../Include/intermediate.h" - -namespace glslang { - -// Propagates the 'precise' qualifier for objects (objects marked with -// 'noContraction' qualifier) from the shader source specified 'precise' -// variables to all the involved objects, and add 'noContraction' qualifier for -// the involved arithmetic operations. -// Note that the same qualifier: 'noContraction' is used in both object nodes -// and arithmetic operation nodes, but has different meaning. For object nodes, -// 'noContraction' means the object is 'precise'; and for arithmetic operation -// nodes, it means the operation should not be contracted. -void PropagateNoContraction(const glslang::TIntermediate& intermediate); -}; diff --git a/deps/glslang/glslang/MachineIndependent/reflection.cpp b/deps/glslang/glslang/MachineIndependent/reflection.cpp deleted file mode 100644 index 50fb6027..00000000 --- a/deps/glslang/glslang/MachineIndependent/reflection.cpp +++ /dev/null @@ -1,793 +0,0 @@ -// -// Copyright (C) 2013-2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "../Include/Common.h" -#include "reflection.h" -#include "LiveTraverser.h" -#include "localintermediate.h" - -#include "gl_types.h" - -// -// Grow the reflection database through a friend traverser class of TReflection and a -// collection of functions to do a liveness traversal that note what uniforms are used -// in semantically non-dead code. -// -// Can be used multiple times, once per stage, to grow a program reflection. -// -// High-level algorithm for one stage: -// -// 1. Put the entry point on the list of live functions. -// -// 2. Traverse any live function, while skipping if-tests with a compile-time constant -// condition of false, and while adding any encountered function calls to the live -// function list. -// -// Repeat until the live function list is empty. -// -// 3. Add any encountered uniform variables and blocks to the reflection database. -// -// Can be attempted with a failed link, but will return false if recursion had been detected, or -// there wasn't exactly one entry point. -// - -namespace glslang { - -// -// The traverser: mostly pass through, except -// - processing binary nodes to see if they are dereferences of an aggregates to track -// - processing symbol nodes to see if they are non-aggregate objects to track -// -// This ignores semantically dead code by using TLiveTraverser. -// -// This is in the glslang namespace directly so it can be a friend of TReflection. -// - -class TReflectionTraverser : public TLiveTraverser { -public: - TReflectionTraverser(const TIntermediate& i, TReflection& r) : - TLiveTraverser(i), reflection(r) { } - - virtual bool visitBinary(TVisit, TIntermBinary* node); - virtual void visitSymbol(TIntermSymbol* base); - - // Add a simple reference to a uniform variable to the uniform database, no dereference involved. - // However, no dereference doesn't mean simple... it could be a complex aggregate. - void addUniform(const TIntermSymbol& base) - { - if (processedDerefs.find(&base) == processedDerefs.end()) { - processedDerefs.insert(&base); - - // Use a degenerate (empty) set of dereferences to immediately put as at the end of - // the dereference change expected by blowUpActiveAggregate. - TList derefs; - blowUpActiveAggregate(base.getType(), base.getName(), derefs, derefs.end(), -1, -1, 0); - } - } - - void addAttribute(const TIntermSymbol& base) - { - if (processedDerefs.find(&base) == processedDerefs.end()) { - processedDerefs.insert(&base); - - const TString &name = base.getName(); - const TType &type = base.getType(); - - TReflection::TNameToIndex::const_iterator it = reflection.nameToIndex.find(name); - if (it == reflection.nameToIndex.end()) { - reflection.nameToIndex[name] = (int)reflection.indexToAttribute.size(); - reflection.indexToAttribute.push_back(TObjectReflection(name, type, 0, mapToGlType(type), 0, 0)); - } - } - } - - // Lookup or calculate the offset of a block member, using the recursively - // defined block offset rules. - int getOffset(const TType& type, int index) - { - const TTypeList& memberList = *type.getStruct(); - - // Don't calculate offset if one is present, it could be user supplied - // and different than what would be calculated. That is, this is faster, - // but not just an optimization. - if (memberList[index].type->getQualifier().hasOffset()) - return memberList[index].type->getQualifier().layoutOffset; - - int memberSize; - int dummyStride; - int offset = 0; - for (int m = 0; m <= index; ++m) { - // modify just the children's view of matrix layout, if there is one for this member - TLayoutMatrix subMatrixLayout = memberList[m].type->getQualifier().layoutMatrix; - int memberAlignment = intermediate.getBaseAlignment(*memberList[m].type, memberSize, dummyStride, - type.getQualifier().layoutPacking == ElpStd140, - subMatrixLayout != ElmNone - ? subMatrixLayout == ElmRowMajor - : type.getQualifier().layoutMatrix == ElmRowMajor); - RoundToPow2(offset, memberAlignment); - if (m < index) - offset += memberSize; - } - - return offset; - } - - // Calculate the block data size. - // Block arrayness is not taken into account, each element is backed by a separate buffer. - int getBlockSize(const TType& blockType) - { - const TTypeList& memberList = *blockType.getStruct(); - int lastIndex = (int)memberList.size() - 1; - int lastOffset = getOffset(blockType, lastIndex); - - int lastMemberSize; - int dummyStride; - intermediate.getBaseAlignment(*memberList[lastIndex].type, lastMemberSize, dummyStride, - blockType.getQualifier().layoutPacking == ElpStd140, - blockType.getQualifier().layoutMatrix == ElmRowMajor); - - return lastOffset + lastMemberSize; - } - - // Traverse the provided deref chain, including the base, and - // - build a full reflection-granularity name, array size, etc. entry out of it, if it goes down to that granularity - // - recursively expand any variable array index in the middle of that traversal - // - recursively expand what's left at the end if the deref chain did not reach down to reflection granularity - // - // arraySize tracks, just for the final dereference in the chain, if there was a specific known size. - // A value of 0 for arraySize will mean to use the full array's size. - void blowUpActiveAggregate(const TType& baseType, const TString& baseName, const TList& derefs, - TList::const_iterator deref, int offset, int blockIndex, int arraySize) - { - // process the part of the dereference chain that was explicit in the shader - TString name = baseName; - const TType* terminalType = &baseType; - for (; deref != derefs.end(); ++deref) { - TIntermBinary* visitNode = *deref; - terminalType = &visitNode->getType(); - int index; - switch (visitNode->getOp()) { - case EOpIndexIndirect: - // Visit all the indices of this array, and for each one add on the remaining dereferencing - for (int i = 0; i < std::max(visitNode->getLeft()->getType().getOuterArraySize(), 1); ++i) { - TString newBaseName = name; - if (baseType.getBasicType() != EbtBlock) - newBaseName.append(TString("[") + String(i) + "]"); - TList::const_iterator nextDeref = deref; - ++nextDeref; - TType derefType(*terminalType, 0); - blowUpActiveAggregate(derefType, newBaseName, derefs, nextDeref, offset, blockIndex, arraySize); - } - - // it was all completed in the recursive calls above - return; - case EOpIndexDirect: - index = visitNode->getRight()->getAsConstantUnion()->getConstArray()[0].getIConst(); - if (baseType.getBasicType() != EbtBlock) - name.append(TString("[") + String(index) + "]"); - break; - case EOpIndexDirectStruct: - index = visitNode->getRight()->getAsConstantUnion()->getConstArray()[0].getIConst(); - if (offset >= 0) - offset += getOffset(visitNode->getLeft()->getType(), index); - if (name.size() > 0) - name.append("."); - name.append((*visitNode->getLeft()->getType().getStruct())[index].type->getFieldName()); - break; - default: - break; - } - } - - // if the terminalType is still too coarse a granularity, this is still an aggregate to expand, expand it... - if (! isReflectionGranularity(*terminalType)) { - if (terminalType->isArray()) { - // Visit all the indices of this array, and for each one, - // fully explode the remaining aggregate to dereference - for (int i = 0; i < std::max(terminalType->getOuterArraySize(), 1); ++i) { - TString newBaseName = name; - newBaseName.append(TString("[") + String(i) + "]"); - TType derefType(*terminalType, 0); - blowUpActiveAggregate(derefType, newBaseName, derefs, derefs.end(), offset, blockIndex, 0); - } - } else { - // Visit all members of this aggregate, and for each one, - // fully explode the remaining aggregate to dereference - const TTypeList& typeList = *terminalType->getStruct(); - for (int i = 0; i < (int)typeList.size(); ++i) { - TString newBaseName = name; - newBaseName.append(TString(".") + typeList[i].type->getFieldName()); - TType derefType(*terminalType, i); - blowUpActiveAggregate(derefType, newBaseName, derefs, derefs.end(), offset, blockIndex, 0); - } - } - - // it was all completed in the recursive calls above - return; - } - - // Finally, add a full string to the reflection database, and update the array size if necessary. - // If the dereferenced entity to record is an array, compute the size and update the maximum size. - - // there might not be a final array dereference, it could have been copied as an array object - if (arraySize == 0) - arraySize = mapToGlArraySize(*terminalType); - - TReflection::TNameToIndex::const_iterator it = reflection.nameToIndex.find(name); - if (it == reflection.nameToIndex.end()) { - reflection.nameToIndex[name] = (int)reflection.indexToUniform.size(); - reflection.indexToUniform.push_back(TObjectReflection(name, *terminalType, offset, - mapToGlType(*terminalType), - arraySize, blockIndex)); - } else if (arraySize > 1) { - int& reflectedArraySize = reflection.indexToUniform[it->second].size; - reflectedArraySize = std::max(arraySize, reflectedArraySize); - } - } - - // Add a uniform dereference where blocks/struct/arrays are involved in the access. - // Handles the situation where the left node is at the correct or too coarse a - // granularity for reflection. (That is, further dereferences up the tree will be - // skipped.) Earlier dereferences, down the tree, will be handled - // at the same time, and logged to prevent reprocessing as the tree is traversed. - // - // Note: Other things like the following must be caught elsewhere: - // - a simple non-array, non-struct variable (no dereference even conceivable) - // - an aggregrate consumed en masse, without a dereference - // - // So, this code is for cases like - // - a struct/block dereferencing a member (whether the member is array or not) - // - an array of struct - // - structs/arrays containing the above - // - void addDereferencedUniform(TIntermBinary* topNode) - { - // See if too fine-grained to process (wait to get further down the tree) - const TType& leftType = topNode->getLeft()->getType(); - if ((leftType.isVector() || leftType.isMatrix()) && ! leftType.isArray()) - return; - - // We have an array or structure or block dereference, see if it's a uniform - // based dereference (if not, skip it). - TIntermSymbol* base = findBase(topNode); - if (! base || ! base->getQualifier().isUniformOrBuffer()) - return; - - // See if we've already processed this (e.g., in the middle of something - // we did earlier), and if so skip it - if (processedDerefs.find(topNode) != processedDerefs.end()) - return; - - // Process this uniform dereference - - int offset = -1; - int blockIndex = -1; - bool anonymous = false; - - // See if we need to record the block itself - bool block = base->getBasicType() == EbtBlock; - if (block) { - offset = 0; - anonymous = IsAnonymous(base->getName()); - - const TString& blockName = base->getType().getTypeName(); - - if (base->getType().isArray()) { - TType derefType(base->getType(), 0); - - assert(! anonymous); - for (int e = 0; e < base->getType().getCumulativeArraySize(); ++e) - blockIndex = addBlockName(blockName + "[" + String(e) + "]", derefType, - getBlockSize(base->getType())); - } else - blockIndex = addBlockName(blockName, base->getType(), getBlockSize(base->getType())); - } - - // Process the dereference chain, backward, accumulating the pieces for later forward traversal. - // If the topNode is a reflection-granularity-array dereference, don't include that last dereference. - TList derefs; - for (TIntermBinary* visitNode = topNode; visitNode; visitNode = visitNode->getLeft()->getAsBinaryNode()) { - if (isReflectionGranularity(visitNode->getLeft()->getType())) - continue; - - derefs.push_front(visitNode); - processedDerefs.insert(visitNode); - } - processedDerefs.insert(base); - - // See if we have a specific array size to stick to while enumerating the explosion of the aggregate - int arraySize = 0; - if (isReflectionGranularity(topNode->getLeft()->getType()) && topNode->getLeft()->isArray()) { - if (topNode->getOp() == EOpIndexDirect) - arraySize = topNode->getRight()->getAsConstantUnion()->getConstArray()[0].getIConst() + 1; - } - - // Put the dereference chain together, forward - TString baseName; - if (! anonymous) { - if (block) - baseName = base->getType().getTypeName(); - else - baseName = base->getName(); - } - blowUpActiveAggregate(base->getType(), baseName, derefs, derefs.begin(), offset, blockIndex, arraySize); - } - - int addBlockName(const TString& name, const TType& type, int size) - { - int blockIndex; - TReflection::TNameToIndex::const_iterator it = reflection.nameToIndex.find(name); - if (reflection.nameToIndex.find(name) == reflection.nameToIndex.end()) { - blockIndex = (int)reflection.indexToUniformBlock.size(); - reflection.nameToIndex[name] = blockIndex; - reflection.indexToUniformBlock.push_back(TObjectReflection(name, type, -1, -1, size, -1)); - } else - blockIndex = it->second; - - return blockIndex; - } - - // Are we at a level in a dereference chain at which individual active uniform queries are made? - bool isReflectionGranularity(const TType& type) - { - return type.getBasicType() != EbtBlock && type.getBasicType() != EbtStruct; - } - - // For a binary operation indexing into an aggregate, chase down the base of the aggregate. - // Return 0 if the topology does not fit this situation. - TIntermSymbol* findBase(const TIntermBinary* node) - { - TIntermSymbol *base = node->getLeft()->getAsSymbolNode(); - if (base) - return base; - TIntermBinary* left = node->getLeft()->getAsBinaryNode(); - if (! left) - return nullptr; - - return findBase(left); - } - - // - // Translate a glslang sampler type into the GL API #define number. - // - int mapSamplerToGlType(TSampler sampler) - { - if (! sampler.image) { - // a sampler... - switch (sampler.type) { - case EbtFloat: - switch ((int)sampler.dim) { - case Esd1D: - switch ((int)sampler.shadow) { - case false: return sampler.arrayed ? GL_SAMPLER_1D_ARRAY : GL_SAMPLER_1D; - case true: return sampler.arrayed ? GL_SAMPLER_1D_ARRAY_SHADOW : GL_SAMPLER_1D_SHADOW; - } - case Esd2D: - switch ((int)sampler.ms) { - case false: - switch ((int)sampler.shadow) { - case false: return sampler.arrayed ? GL_SAMPLER_2D_ARRAY : GL_SAMPLER_2D; - case true: return sampler.arrayed ? GL_SAMPLER_2D_ARRAY_SHADOW : GL_SAMPLER_2D_SHADOW; - } - case true: return sampler.arrayed ? GL_SAMPLER_2D_MULTISAMPLE_ARRAY : GL_SAMPLER_2D_MULTISAMPLE; - } - case Esd3D: - return GL_SAMPLER_3D; - case EsdCube: - switch ((int)sampler.shadow) { - case false: return sampler.arrayed ? GL_SAMPLER_CUBE_MAP_ARRAY : GL_SAMPLER_CUBE; - case true: return sampler.arrayed ? GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW : GL_SAMPLER_CUBE_SHADOW; - } - case EsdRect: - return sampler.shadow ? GL_SAMPLER_2D_RECT_SHADOW : GL_SAMPLER_2D_RECT; - case EsdBuffer: - return GL_SAMPLER_BUFFER; - } - case EbtInt: - switch ((int)sampler.dim) { - case Esd1D: - return sampler.arrayed ? GL_INT_SAMPLER_1D_ARRAY : GL_INT_SAMPLER_1D; - case Esd2D: - switch ((int)sampler.ms) { - case false: return sampler.arrayed ? GL_INT_SAMPLER_2D_ARRAY : GL_INT_SAMPLER_2D; - case true: return sampler.arrayed ? GL_INT_SAMPLER_2D_MULTISAMPLE_ARRAY - : GL_INT_SAMPLER_2D_MULTISAMPLE; - } - case Esd3D: - return GL_INT_SAMPLER_3D; - case EsdCube: - return sampler.arrayed ? GL_INT_SAMPLER_CUBE_MAP_ARRAY : GL_INT_SAMPLER_CUBE; - case EsdRect: - return GL_INT_SAMPLER_2D_RECT; - case EsdBuffer: - return GL_INT_SAMPLER_BUFFER; - } - case EbtUint: - switch ((int)sampler.dim) { - case Esd1D: - return sampler.arrayed ? GL_UNSIGNED_INT_SAMPLER_1D_ARRAY : GL_UNSIGNED_INT_SAMPLER_1D; - case Esd2D: - switch ((int)sampler.ms) { - case false: return sampler.arrayed ? GL_UNSIGNED_INT_SAMPLER_2D_ARRAY : GL_UNSIGNED_INT_SAMPLER_2D; - case true: return sampler.arrayed ? GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE_ARRAY - : GL_UNSIGNED_INT_SAMPLER_2D_MULTISAMPLE; - } - case Esd3D: - return GL_UNSIGNED_INT_SAMPLER_3D; - case EsdCube: - return sampler.arrayed ? GL_UNSIGNED_INT_SAMPLER_CUBE_MAP_ARRAY : GL_UNSIGNED_INT_SAMPLER_CUBE; - case EsdRect: - return GL_UNSIGNED_INT_SAMPLER_2D_RECT; - case EsdBuffer: - return GL_UNSIGNED_INT_SAMPLER_BUFFER; - } - default: - return 0; - } - } else { - // an image... - switch (sampler.type) { - case EbtFloat: - switch ((int)sampler.dim) { - case Esd1D: - return sampler.arrayed ? GL_IMAGE_1D_ARRAY : GL_IMAGE_1D; - case Esd2D: - switch ((int)sampler.ms) { - case false: return sampler.arrayed ? GL_IMAGE_2D_ARRAY : GL_IMAGE_2D; - case true: return sampler.arrayed ? GL_IMAGE_2D_MULTISAMPLE_ARRAY : GL_IMAGE_2D_MULTISAMPLE; - } - case Esd3D: - return GL_IMAGE_3D; - case EsdCube: - return sampler.arrayed ? GL_IMAGE_CUBE_MAP_ARRAY : GL_IMAGE_CUBE; - case EsdRect: - return GL_IMAGE_2D_RECT; - case EsdBuffer: - return GL_IMAGE_BUFFER; - } - case EbtInt: - switch ((int)sampler.dim) { - case Esd1D: - return sampler.arrayed ? GL_INT_IMAGE_1D_ARRAY : GL_INT_IMAGE_1D; - case Esd2D: - switch ((int)sampler.ms) { - case false: return sampler.arrayed ? GL_INT_IMAGE_2D_ARRAY : GL_INT_IMAGE_2D; - case true: return sampler.arrayed ? GL_INT_IMAGE_2D_MULTISAMPLE_ARRAY : GL_INT_IMAGE_2D_MULTISAMPLE; - } - case Esd3D: - return GL_INT_IMAGE_3D; - case EsdCube: - return sampler.arrayed ? GL_INT_IMAGE_CUBE_MAP_ARRAY : GL_INT_IMAGE_CUBE; - case EsdRect: - return GL_INT_IMAGE_2D_RECT; - case EsdBuffer: - return GL_INT_IMAGE_BUFFER; - } - case EbtUint: - switch ((int)sampler.dim) { - case Esd1D: - return sampler.arrayed ? GL_UNSIGNED_INT_IMAGE_1D_ARRAY : GL_UNSIGNED_INT_IMAGE_1D; - case Esd2D: - switch ((int)sampler.ms) { - case false: return sampler.arrayed ? GL_UNSIGNED_INT_IMAGE_2D_ARRAY : GL_UNSIGNED_INT_IMAGE_2D; - case true: return sampler.arrayed ? GL_UNSIGNED_INT_IMAGE_2D_MULTISAMPLE_ARRAY - : GL_UNSIGNED_INT_IMAGE_2D_MULTISAMPLE; - } - case Esd3D: - return GL_UNSIGNED_INT_IMAGE_3D; - case EsdCube: - return sampler.arrayed ? GL_UNSIGNED_INT_IMAGE_CUBE_MAP_ARRAY : GL_UNSIGNED_INT_IMAGE_CUBE; - case EsdRect: - return GL_UNSIGNED_INT_IMAGE_2D_RECT; - case EsdBuffer: - return GL_UNSIGNED_INT_IMAGE_BUFFER; - } - default: - return 0; - } - } - } - - // - // Translate a glslang type into the GL API #define number. - // Ignores arrayness. - // - int mapToGlType(const TType& type) - { - switch (type.getBasicType()) { - case EbtSampler: - return mapSamplerToGlType(type.getSampler()); - case EbtStruct: - case EbtBlock: - case EbtVoid: - return 0; - default: - break; - } - - if (type.isVector()) { - int offset = type.getVectorSize() - 2; - switch (type.getBasicType()) { - case EbtFloat: return GL_FLOAT_VEC2 + offset; - case EbtDouble: return GL_DOUBLE_VEC2 + offset; -#ifdef AMD_EXTENSIONS - case EbtFloat16: return GL_FLOAT16_VEC2_NV + offset; -#endif - case EbtInt: return GL_INT_VEC2 + offset; - case EbtUint: return GL_UNSIGNED_INT_VEC2 + offset; - case EbtInt64: return GL_INT64_ARB + offset; - case EbtUint64: return GL_UNSIGNED_INT64_ARB + offset; - case EbtBool: return GL_BOOL_VEC2 + offset; - case EbtAtomicUint: return GL_UNSIGNED_INT_ATOMIC_COUNTER + offset; - default: return 0; - } - } - if (type.isMatrix()) { - switch (type.getBasicType()) { - case EbtFloat: - switch (type.getMatrixCols()) { - case 2: - switch (type.getMatrixRows()) { - case 2: return GL_FLOAT_MAT2; - case 3: return GL_FLOAT_MAT2x3; - case 4: return GL_FLOAT_MAT2x4; - default: return 0; - } - case 3: - switch (type.getMatrixRows()) { - case 2: return GL_FLOAT_MAT3x2; - case 3: return GL_FLOAT_MAT3; - case 4: return GL_FLOAT_MAT3x4; - default: return 0; - } - case 4: - switch (type.getMatrixRows()) { - case 2: return GL_FLOAT_MAT4x2; - case 3: return GL_FLOAT_MAT4x3; - case 4: return GL_FLOAT_MAT4; - default: return 0; - } - } - case EbtDouble: - switch (type.getMatrixCols()) { - case 2: - switch (type.getMatrixRows()) { - case 2: return GL_DOUBLE_MAT2; - case 3: return GL_DOUBLE_MAT2x3; - case 4: return GL_DOUBLE_MAT2x4; - default: return 0; - } - case 3: - switch (type.getMatrixRows()) { - case 2: return GL_DOUBLE_MAT3x2; - case 3: return GL_DOUBLE_MAT3; - case 4: return GL_DOUBLE_MAT3x4; - default: return 0; - } - case 4: - switch (type.getMatrixRows()) { - case 2: return GL_DOUBLE_MAT4x2; - case 3: return GL_DOUBLE_MAT4x3; - case 4: return GL_DOUBLE_MAT4; - default: return 0; - } - } -#ifdef AMD_EXTENSIONS - case EbtFloat16: - switch (type.getMatrixCols()) { - case 2: - switch (type.getMatrixRows()) { - case 2: return GL_FLOAT16_MAT2_AMD; - case 3: return GL_FLOAT16_MAT2x3_AMD; - case 4: return GL_FLOAT16_MAT2x4_AMD; - default: return 0; - } - case 3: - switch (type.getMatrixRows()) { - case 2: return GL_FLOAT16_MAT3x2_AMD; - case 3: return GL_FLOAT16_MAT3_AMD; - case 4: return GL_FLOAT16_MAT3x4_AMD; - default: return 0; - } - case 4: - switch (type.getMatrixRows()) { - case 2: return GL_FLOAT16_MAT4x2_AMD; - case 3: return GL_FLOAT16_MAT4x3_AMD; - case 4: return GL_FLOAT16_MAT4_AMD; - default: return 0; - } - } -#endif - default: - return 0; - } - } - if (type.getVectorSize() == 1) { - switch (type.getBasicType()) { - case EbtFloat: return GL_FLOAT; - case EbtDouble: return GL_DOUBLE; -#ifdef AMD_EXTENSIONS - case EbtFloat16: return GL_FLOAT16_NV; -#endif - case EbtInt: return GL_INT; - case EbtUint: return GL_UNSIGNED_INT; - case EbtInt64: return GL_INT64_ARB; - case EbtUint64: return GL_UNSIGNED_INT64_ARB; - case EbtBool: return GL_BOOL; - case EbtAtomicUint: return GL_UNSIGNED_INT_ATOMIC_COUNTER; - default: return 0; - } - } - - return 0; - } - - int mapToGlArraySize(const TType& type) - { - return type.isArray() ? type.getOuterArraySize() : 1; - } - - TReflection& reflection; - std::set processedDerefs; - -protected: - TReflectionTraverser(TReflectionTraverser&); - TReflectionTraverser& operator=(TReflectionTraverser&); -}; - -// -// Implement the traversal functions of interest. -// - -// To catch dereferenced aggregates that must be reflected. -// This catches them at the highest level possible in the tree. -bool TReflectionTraverser::visitBinary(TVisit /* visit */, TIntermBinary* node) -{ - switch (node->getOp()) { - case EOpIndexDirect: - case EOpIndexIndirect: - case EOpIndexDirectStruct: - addDereferencedUniform(node); - break; - default: - break; - } - - // still need to visit everything below, which could contain sub-expressions - // containing different uniforms - return true; -} - -// To reflect non-dereferenced objects. -void TReflectionTraverser::visitSymbol(TIntermSymbol* base) -{ - if (base->getQualifier().storage == EvqUniform) - addUniform(*base); - - if (intermediate.getStage() == EShLangVertex && base->getQualifier().isPipeInput()) - addAttribute(*base); -} - -// -// Implement TReflection methods. -// - -// Track any required attribute reflection, such as compute shader numthreads. -// -void TReflection::buildAttributeReflection(EShLanguage stage, const TIntermediate& intermediate) -{ - if (stage == EShLangCompute) { - // Remember thread dimensions - for (int dim=0; dim<3; ++dim) - localSize[dim] = intermediate.getLocalSize(dim); - } -} - -// build counter block index associations for buffers -void TReflection::buildCounterIndices() -{ - // search for ones that have counters - for (int i = 0; i < int(indexToUniformBlock.size()); ++i) { - const TString counterName(indexToUniformBlock[i].name + "@count"); - const int index = getIndex(counterName); - - if (index >= 0) - indexToUniformBlock[i].counterIndex = index; - } -} - -// Merge live symbols from 'intermediate' into the existing reflection database. -// -// Returns false if the input is too malformed to do this. -bool TReflection::addStage(EShLanguage stage, const TIntermediate& intermediate) -{ - if (intermediate.getTreeRoot() == nullptr || - intermediate.getNumEntryPoints() != 1 || - intermediate.isRecursive()) - return false; - - buildAttributeReflection(stage, intermediate); - - TReflectionTraverser it(intermediate, *this); - - // put the entry point on the list of functions to process - it.pushFunction(intermediate.getEntryPointMangledName().c_str()); - - // process all the functions - while (! it.functions.empty()) { - TIntermNode* function = it.functions.back(); - it.functions.pop_back(); - function->traverse(&it); - } - - buildCounterIndices(); - - return true; -} - -void TReflection::dump() -{ - printf("Uniform reflection:\n"); - for (size_t i = 0; i < indexToUniform.size(); ++i) - indexToUniform[i].dump(); - printf("\n"); - - printf("Uniform block reflection:\n"); - for (size_t i = 0; i < indexToUniformBlock.size(); ++i) - indexToUniformBlock[i].dump(); - printf("\n"); - - printf("Vertex attribute reflection:\n"); - for (size_t i = 0; i < indexToAttribute.size(); ++i) - indexToAttribute[i].dump(); - printf("\n"); - - if (getLocalSize(0) > 1) { - static const char* axis[] = { "X", "Y", "Z" }; - - for (int dim=0; dim<3; ++dim) - if (getLocalSize(dim) > 1) - printf("Local size %s: %d\n", axis[dim], getLocalSize(dim)); - - printf("\n"); - } - - // printf("Live names\n"); - // for (TNameToIndex::const_iterator it = nameToIndex.begin(); it != nameToIndex.end(); ++it) - // printf("%s: %d\n", it->first.c_str(), it->second); - // printf("\n"); -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/MachineIndependent/reflection.h b/deps/glslang/glslang/MachineIndependent/reflection.h deleted file mode 100644 index fedfbe8d..00000000 --- a/deps/glslang/glslang/MachineIndependent/reflection.h +++ /dev/null @@ -1,177 +0,0 @@ -// -// Copyright (C) 2013-2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _REFLECTION_INCLUDED -#define _REFLECTION_INCLUDED - -#include "../Public/ShaderLang.h" -#include "../Include/Types.h" - -#include -#include - -// -// A reflection database and its interface, consistent with the OpenGL API reflection queries. -// - -namespace glslang { - -class TIntermediate; -class TIntermAggregate; -class TReflectionTraverser; - -// Data needed for just a single object at the granularity exchanged by the reflection API -class TObjectReflection { -public: - TObjectReflection(const TString& pName, const TType& pType, int pOffset, int pGLDefineType, int pSize, int pIndex) : - name(pName), offset(pOffset), - glDefineType(pGLDefineType), size(pSize), index(pIndex), counterIndex(-1), type(pType.clone()) { } - - const TType* const getType() const { return type; } - int getBinding() const - { - if (type == nullptr || !type->getQualifier().hasBinding()) - return -1; - return type->getQualifier().layoutBinding; - } - void dump() const - { - printf("%s: offset %d, type %x, size %d, index %d, binding %d", - name.c_str(), offset, glDefineType, size, index, getBinding() ); - - if (counterIndex != -1) - printf(", counter %d", counterIndex); - - printf("\n"); - } - static TObjectReflection badReflection() { return TObjectReflection(); } - - TString name; - int offset; - int glDefineType; - int size; // data size in bytes for a block, array size for a (non-block) object that's an array - int index; - int counterIndex; - -protected: - TObjectReflection() : offset(-1), glDefineType(-1), size(-1), index(-1), type(nullptr) { } - - const TType* type; -}; - -// The full reflection database -class TReflection { -public: - TReflection() : badReflection(TObjectReflection::badReflection()) - { - for (int dim=0; dim<3; ++dim) - localSize[dim] = 0; - } - - virtual ~TReflection() {} - - // grow the reflection stage by stage - bool addStage(EShLanguage, const TIntermediate&); - - // for mapping a uniform index to a uniform object's description - int getNumUniforms() { return (int)indexToUniform.size(); } - const TObjectReflection& getUniform(int i) const - { - if (i >= 0 && i < (int)indexToUniform.size()) - return indexToUniform[i]; - else - return badReflection; - } - - // for mapping a block index to the block's description - int getNumUniformBlocks() const { return (int)indexToUniformBlock.size(); } - const TObjectReflection& getUniformBlock(int i) const - { - if (i >= 0 && i < (int)indexToUniformBlock.size()) - return indexToUniformBlock[i]; - else - return badReflection; - } - - // for mapping an attribute index to the attribute's description - int getNumAttributes() { return (int)indexToAttribute.size(); } - const TObjectReflection& getAttribute(int i) const - { - if (i >= 0 && i < (int)indexToAttribute.size()) - return indexToAttribute[i]; - else - return badReflection; - } - - // for mapping any name to its index (block names, uniform names and attribute names) - int getIndex(const char* name) const - { - TNameToIndex::const_iterator it = nameToIndex.find(name); - if (it == nameToIndex.end()) - return -1; - else - return it->second; - } - - // see getIndex(const char*) - int getIndex(const TString& name) const { return getIndex(name.c_str()); } - - // Thread local size - unsigned getLocalSize(int dim) const { return dim <= 2 ? localSize[dim] : 0; } - - void dump(); - -protected: - friend class glslang::TReflectionTraverser; - - void buildCounterIndices(); - void buildAttributeReflection(EShLanguage, const TIntermediate&); - - // Need a TString hash: typedef std::unordered_map TNameToIndex; - typedef std::map TNameToIndex; - typedef std::vector TMapIndexToReflection; - - TObjectReflection badReflection; // return for queries of -1 or generally out of range; has expected descriptions with in it for this - TNameToIndex nameToIndex; // maps names to indexes; can hold all types of data: uniform/buffer and which function names have been processed - TMapIndexToReflection indexToUniform; - TMapIndexToReflection indexToUniformBlock; - TMapIndexToReflection indexToAttribute; - - unsigned int localSize[3]; -}; - -} // end namespace glslang - -#endif // _REFLECTION_INCLUDED diff --git a/deps/glslang/glslang/OSDependent/Unix/CMakeLists.txt b/deps/glslang/glslang/OSDependent/Unix/CMakeLists.txt deleted file mode 100644 index 1bf49e12..00000000 --- a/deps/glslang/glslang/OSDependent/Unix/CMakeLists.txt +++ /dev/null @@ -1,8 +0,0 @@ -add_library(OSDependent STATIC ossource.cpp ../osinclude.h) -set_property(TARGET OSDependent PROPERTY FOLDER glslang) -set_property(TARGET OSDependent PROPERTY POSITION_INDEPENDENT_CODE ON) - -if(ENABLE_GLSLANG_INSTALL) - install(TARGETS OSDependent - ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}) -endif(ENABLE_GLSLANG_INSTALL) diff --git a/deps/glslang/glslang/OSDependent/Unix/ossource.cpp b/deps/glslang/glslang/OSDependent/Unix/ossource.cpp deleted file mode 100644 index 24b77e16..00000000 --- a/deps/glslang/glslang/OSDependent/Unix/ossource.cpp +++ /dev/null @@ -1,191 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// This file contains the Linux-specific functions -// -#include "../osinclude.h" -#include "../../../OGLCompilersDLL/InitializeDll.h" - -#include -#include -#include -#include -#include - -namespace glslang { - -// -// Thread cleanup -// - -// -// Wrapper for Linux call to DetachThread. This is required as pthread_cleanup_push() expects -// the cleanup routine to return void. -// -static void DetachThreadLinux(void *) -{ - DetachThread(); -} - -// -// Registers cleanup handler, sets cancel type and state, and executes the thread specific -// cleanup handler. This function will be called in the Standalone.cpp for regression -// testing. When OpenGL applications are run with the driver code, Linux OS does the -// thread cleanup. -// -void OS_CleanupThreadData(void) -{ -#ifdef __ANDROID__ - DetachThreadLinux(NULL); -#else - int old_cancel_state, old_cancel_type; - void *cleanupArg = NULL; - - // - // Set thread cancel state and push cleanup handler. - // - pthread_setcancelstate(PTHREAD_CANCEL_ENABLE, &old_cancel_state); - pthread_cleanup_push(DetachThreadLinux, (void *) cleanupArg); - - // - // Put the thread in deferred cancellation mode. - // - pthread_setcanceltype(PTHREAD_CANCEL_DEFERRED, &old_cancel_type); - - // - // Pop cleanup handler and execute it prior to unregistering the cleanup handler. - // - pthread_cleanup_pop(1); - - // - // Restore the thread's previous cancellation mode. - // - pthread_setcanceltype(old_cancel_state, NULL); -#endif -} - -// -// Thread Local Storage Operations -// -inline OS_TLSIndex PthreadKeyToTLSIndex(pthread_key_t key) -{ - return (OS_TLSIndex)((uintptr_t)key + 1); -} - -inline pthread_key_t TLSIndexToPthreadKey(OS_TLSIndex nIndex) -{ - return (pthread_key_t)((uintptr_t)nIndex - 1); -} - -OS_TLSIndex OS_AllocTLSIndex() -{ - pthread_key_t pPoolIndex; - - // - // Create global pool key. - // - if ((pthread_key_create(&pPoolIndex, NULL)) != 0) { - assert(0 && "OS_AllocTLSIndex(): Unable to allocate Thread Local Storage"); - return OS_INVALID_TLS_INDEX; - } - else - return PthreadKeyToTLSIndex(pPoolIndex); -} - -bool OS_SetTLSValue(OS_TLSIndex nIndex, void *lpvValue) -{ - if (nIndex == OS_INVALID_TLS_INDEX) { - assert(0 && "OS_SetTLSValue(): Invalid TLS Index"); - return false; - } - - if (pthread_setspecific(TLSIndexToPthreadKey(nIndex), lpvValue) == 0) - return true; - else - return false; -} - -void* OS_GetTLSValue(OS_TLSIndex nIndex) -{ - // - // This function should return 0 if nIndex is invalid. - // - assert(nIndex != OS_INVALID_TLS_INDEX); - return pthread_getspecific(TLSIndexToPthreadKey(nIndex)); -} - -bool OS_FreeTLSIndex(OS_TLSIndex nIndex) -{ - if (nIndex == OS_INVALID_TLS_INDEX) { - assert(0 && "OS_SetTLSValue(): Invalid TLS Index"); - return false; - } - - // - // Delete the global pool key. - // - if (pthread_key_delete(TLSIndexToPthreadKey(nIndex)) == 0) - return true; - else - return false; -} - -namespace { - pthread_mutex_t gMutex; -} - -void InitGlobalLock() -{ - pthread_mutexattr_t mutexattr; - pthread_mutexattr_init(&mutexattr); - pthread_mutexattr_settype(&mutexattr, PTHREAD_MUTEX_RECURSIVE); - pthread_mutex_init(&gMutex, &mutexattr); -} - -void GetGlobalLock() -{ - pthread_mutex_lock(&gMutex); -} - -void ReleaseGlobalLock() -{ - pthread_mutex_unlock(&gMutex); -} - -void OS_DumpMemoryCounters() -{ -} - -} // end namespace glslang diff --git a/deps/glslang/glslang/OSDependent/Windows/CMakeLists.txt b/deps/glslang/glslang/OSDependent/Windows/CMakeLists.txt deleted file mode 100644 index f257418a..00000000 --- a/deps/glslang/glslang/OSDependent/Windows/CMakeLists.txt +++ /dev/null @@ -1,20 +0,0 @@ -set(SOURCES ossource.cpp ../osinclude.h) - -add_library(OSDependent STATIC ${SOURCES}) -set_property(TARGET OSDependent PROPERTY FOLDER glslang) -set_property(TARGET OSDependent PROPERTY POSITION_INDEPENDENT_CODE ON) - -# MinGW GCC complains about function pointer casts to void*. -# Turn that off with -fpermissive. -if(${CMAKE_CXX_COMPILER_ID} MATCHES "GNU") - target_compile_options(OSDependent PRIVATE -fpermissive) -endif() - -if(WIN32) - source_group("Source" FILES ${SOURCES}) -endif(WIN32) - -if(ENABLE_GLSLANG_INSTALL) - install(TARGETS OSDependent - ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}) -endif(ENABLE_GLSLANG_INSTALL) diff --git a/deps/glslang/glslang/OSDependent/Windows/main.cpp b/deps/glslang/glslang/OSDependent/Windows/main.cpp deleted file mode 100644 index 0bcde7b6..00000000 --- a/deps/glslang/glslang/OSDependent/Windows/main.cpp +++ /dev/null @@ -1,74 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "InitializeDll.h" - -#define STRICT -#define VC_EXTRALEAN 1 -#include -#include - -BOOL WINAPI DllMain(HINSTANCE hinstDLL, DWORD fdwReason, LPVOID lpvReserved) -{ - switch (fdwReason) - { - case DLL_PROCESS_ATTACH: - - if (! glslang::InitProcess()) - return FALSE; - break; - case DLL_THREAD_ATTACH: - - if (! glslang::InitThread()) - return FALSE; - break; - - case DLL_THREAD_DETACH: - - if (! glslang::DetachThread()) - return FALSE; - break; - - case DLL_PROCESS_DETACH: - - glslang::DetachProcess(); - break; - - default: - assert(0 && "DllMain(): Reason for calling DLL Main is unknown"); - return FALSE; - } - - return TRUE; -} diff --git a/deps/glslang/glslang/OSDependent/Windows/ossource.cpp b/deps/glslang/glslang/OSDependent/Windows/ossource.cpp deleted file mode 100644 index 870840c5..00000000 --- a/deps/glslang/glslang/OSDependent/Windows/ossource.cpp +++ /dev/null @@ -1,147 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "../osinclude.h" - -#define STRICT -#define VC_EXTRALEAN 1 -#include -#include -#include -#include -#include -#include - -// -// This file contains the Window-OS-specific functions -// - -#if !(defined(_WIN32) || defined(_WIN64)) -#error Trying to build a windows specific file in a non windows build. -#endif - -namespace glslang { - -inline OS_TLSIndex ToGenericTLSIndex (DWORD handle) -{ - return (OS_TLSIndex)((uintptr_t)handle + 1); -} - -inline DWORD ToNativeTLSIndex (OS_TLSIndex nIndex) -{ - return (DWORD)((uintptr_t)nIndex - 1); -} - -// -// Thread Local Storage Operations -// -OS_TLSIndex OS_AllocTLSIndex() -{ - DWORD dwIndex = TlsAlloc(); - if (dwIndex == TLS_OUT_OF_INDEXES) { - assert(0 && "OS_AllocTLSIndex(): Unable to allocate Thread Local Storage"); - return OS_INVALID_TLS_INDEX; - } - - return ToGenericTLSIndex(dwIndex); -} - -bool OS_SetTLSValue(OS_TLSIndex nIndex, void *lpvValue) -{ - if (nIndex == OS_INVALID_TLS_INDEX) { - assert(0 && "OS_SetTLSValue(): Invalid TLS Index"); - return false; - } - - if (TlsSetValue(ToNativeTLSIndex(nIndex), lpvValue)) - return true; - else - return false; -} - -void* OS_GetTLSValue(OS_TLSIndex nIndex) -{ - assert(nIndex != OS_INVALID_TLS_INDEX); - return TlsGetValue(ToNativeTLSIndex(nIndex)); -} - -bool OS_FreeTLSIndex(OS_TLSIndex nIndex) -{ - if (nIndex == OS_INVALID_TLS_INDEX) { - assert(0 && "OS_SetTLSValue(): Invalid TLS Index"); - return false; - } - - if (TlsFree(ToNativeTLSIndex(nIndex))) - return true; - else - return false; -} - -HANDLE GlobalLock; - -void InitGlobalLock() -{ - GlobalLock = CreateMutex(0, false, 0); -} - -void GetGlobalLock() -{ - WaitForSingleObject(GlobalLock, INFINITE); -} - -void ReleaseGlobalLock() -{ - ReleaseMutex(GlobalLock); -} - -unsigned int __stdcall EnterGenericThread (void* entry) -{ - return ((TThreadEntrypoint)entry)(0); -} - -//#define DUMP_COUNTERS - -void OS_DumpMemoryCounters() -{ -#ifdef DUMP_COUNTERS - PROCESS_MEMORY_COUNTERS counters; - GetProcessMemoryInfo(GetCurrentProcess(), &counters, sizeof(counters)); - printf("Working set size: %d\n", counters.WorkingSetSize); -#else - printf("Recompile with DUMP_COUNTERS defined to see counters.\n"); -#endif -} - -} // namespace glslang diff --git a/deps/glslang/glslang/OSDependent/osinclude.h b/deps/glslang/glslang/OSDependent/osinclude.h deleted file mode 100644 index 218abe4f..00000000 --- a/deps/glslang/glslang/OSDependent/osinclude.h +++ /dev/null @@ -1,63 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef __OSINCLUDE_H -#define __OSINCLUDE_H - -namespace glslang { - -// -// Thread Local Storage Operations -// -typedef void* OS_TLSIndex; -#define OS_INVALID_TLS_INDEX ((void*)0) - -OS_TLSIndex OS_AllocTLSIndex(); -bool OS_SetTLSValue(OS_TLSIndex nIndex, void *lpvValue); -bool OS_FreeTLSIndex(OS_TLSIndex nIndex); -void* OS_GetTLSValue(OS_TLSIndex nIndex); - -void InitGlobalLock(); -void GetGlobalLock(); -void ReleaseGlobalLock(); - -typedef unsigned int (*TThreadEntrypoint)(void*); - -void OS_CleanupThreadData(void); - -void OS_DumpMemoryCounters(); - -} // end namespace glslang - -#endif // __OSINCLUDE_H diff --git a/deps/glslang/glslang/Public/ShaderLang.h b/deps/glslang/glslang/Public/ShaderLang.h deleted file mode 100644 index b36e384f..00000000 --- a/deps/glslang/glslang/Public/ShaderLang.h +++ /dev/null @@ -1,667 +0,0 @@ -// -// Copyright (C) 2002-2005 3Dlabs Inc. Ltd. -// Copyright (C) 2013-2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -#ifndef _COMPILER_INTERFACE_INCLUDED_ -#define _COMPILER_INTERFACE_INCLUDED_ - -#include "../Include/ResourceLimits.h" -#include "../MachineIndependent/Versions.h" - -#include -#include - -#ifdef _WIN32 -#define C_DECL __cdecl -//#ifdef SH_EXPORTING -// #define SH_IMPORT_EXPORT __declspec(dllexport) -//#else -// #define SH_IMPORT_EXPORT __declspec(dllimport) -//#endif -#define SH_IMPORT_EXPORT -#else -#define SH_IMPORT_EXPORT -#ifndef __fastcall -#define __fastcall -#endif -#define C_DECL -#endif - -// -// This is the platform independent interface between an OGL driver -// and the shading language compiler/linker. -// - -#ifdef __cplusplus - extern "C" { -#endif - -// -// Driver must call this first, once, before doing any other -// compiler/linker operations. -// -// (Call once per process, not once per thread.) -// -SH_IMPORT_EXPORT int ShInitialize(); - -// -// Driver should call this at process shutdown. -// -SH_IMPORT_EXPORT int __fastcall ShFinalize(); - -// -// Types of languages the compiler can consume. -// -typedef enum { - EShLangVertex, - EShLangTessControl, - EShLangTessEvaluation, - EShLangGeometry, - EShLangFragment, - EShLangCompute, - EShLangCount, -} EShLanguage; // would be better as stage, but this is ancient now - -typedef enum { - EShLangVertexMask = (1 << EShLangVertex), - EShLangTessControlMask = (1 << EShLangTessControl), - EShLangTessEvaluationMask = (1 << EShLangTessEvaluation), - EShLangGeometryMask = (1 << EShLangGeometry), - EShLangFragmentMask = (1 << EShLangFragment), - EShLangComputeMask = (1 << EShLangCompute), -} EShLanguageMask; - -namespace glslang { - -class TType; - -typedef enum { - EShSourceNone, - EShSourceGlsl, - EShSourceHlsl, -} EShSource; // if EShLanguage were EShStage, this could be EShLanguage instead - -typedef enum { - EShClientNone, - EShClientVulkan, - EShClientOpenGL, -} EShClient; - -typedef enum { - EShTargetNone, - EshTargetSpv, -} EShTargetLanguage; - -struct TInputLanguage { - EShSource languageFamily; // redundant information with other input, this one overrides when not EShSourceNone - EShLanguage stage; // redundant information with other input, this one overrides when not EShSourceNone - EShClient dialect; - int dialectVersion; // version of client's language definition, not the client (when not EShClientNone) -}; - -struct TClient { - EShClient client; - int version; // version of client itself (not the client's input dialect) -}; - -struct TTarget { - EShTargetLanguage language; - unsigned int version; // the version to target, if SPIR-V, defined by "word 1" of the SPIR-V binary header -}; - -// All source/client/target versions and settings. -// Can override previous methods of setting, when items are set here. -// Expected to grow, as more are added, rather than growing parameter lists. -struct TEnvironment { - TInputLanguage input; // definition of the input language - TClient client; // what client is the overall compilation being done for? - TTarget target; // what to generate -}; - -const char* StageName(EShLanguage); - -} // end namespace glslang - -// -// Types of output the linker will create. -// -typedef enum { - EShExVertexFragment, - EShExFragment -} EShExecutable; - -// -// Optimization level for the compiler. -// -typedef enum { - EShOptNoGeneration, - EShOptNone, - EShOptSimple, // Optimizations that can be done quickly - EShOptFull, // Optimizations that will take more time -} EShOptimizationLevel; - -// -// Texture and Sampler transformation mode. -// -typedef enum { - EShTexSampTransKeep, // keep textures and samplers as is (default) - EShTexSampTransUpgradeTextureRemoveSampler, // change texture w/o embeded sampler into sampled texture and throw away all samplers -} EShTextureSamplerTransformMode; - -// -// Message choices for what errors and warnings are given. -// -enum EShMessages { - EShMsgDefault = 0, // default is to give all required errors and extra warnings - EShMsgRelaxedErrors = (1 << 0), // be liberal in accepting input - EShMsgSuppressWarnings = (1 << 1), // suppress all warnings, except those required by the specification - EShMsgAST = (1 << 2), // print the AST intermediate representation - EShMsgSpvRules = (1 << 3), // issue messages for SPIR-V generation - EShMsgVulkanRules = (1 << 4), // issue messages for Vulkan-requirements of GLSL for SPIR-V - EShMsgOnlyPreprocessor = (1 << 5), // only print out errors produced by the preprocessor - EShMsgReadHlsl = (1 << 6), // use HLSL parsing rules and semantics - EShMsgCascadingErrors = (1 << 7), // get cascading errors; risks error-recovery issues, instead of an early exit - EShMsgKeepUncalled = (1 << 8), // for testing, don't eliminate uncalled functions - EShMsgHlslOffsets = (1 << 9), // allow block offsets to follow HLSL rules instead of GLSL rules - EShMsgDebugInfo = (1 << 10), // save debug information -}; - -// -// Build a table for bindings. This can be used for locating -// attributes, uniforms, globals, etc., as needed. -// -typedef struct { - const char* name; - int binding; -} ShBinding; - -typedef struct { - int numBindings; - ShBinding* bindings; // array of bindings -} ShBindingTable; - -// -// ShHandle held by but opaque to the driver. It is allocated, -// managed, and de-allocated by the compiler/linker. It's contents -// are defined by and used by the compiler and linker. For example, -// symbol table information and object code passed from the compiler -// to the linker can be stored where ShHandle points. -// -// If handle creation fails, 0 will be returned. -// -typedef void* ShHandle; - -// -// Driver calls these to create and destroy compiler/linker -// objects. -// -SH_IMPORT_EXPORT ShHandle ShConstructCompiler(const EShLanguage, int debugOptions); // one per shader -SH_IMPORT_EXPORT ShHandle ShConstructLinker(const EShExecutable, int debugOptions); // one per shader pair -SH_IMPORT_EXPORT ShHandle ShConstructUniformMap(); // one per uniform namespace (currently entire program object) -SH_IMPORT_EXPORT void ShDestruct(ShHandle); - -// -// The return value of ShCompile is boolean, non-zero indicating -// success. -// -// The info-log should be written by ShCompile into -// ShHandle, so it can answer future queries. -// -SH_IMPORT_EXPORT int ShCompile( - const ShHandle, - const char* const shaderStrings[], - const int numStrings, - const int* lengths, - const EShOptimizationLevel, - const TBuiltInResource *resources, - int debugOptions, - int defaultVersion = 110, // use 100 for ES environment, overridden by #version in shader - bool forwardCompatible = false, // give errors for use of deprecated features - EShMessages messages = EShMsgDefault // warnings and errors - ); - -SH_IMPORT_EXPORT int ShLinkExt( - const ShHandle, // linker object - const ShHandle h[], // compiler objects to link together - const int numHandles); - -// -// ShSetEncrpytionMethod is a place-holder for specifying -// how source code is encrypted. -// -SH_IMPORT_EXPORT void ShSetEncryptionMethod(ShHandle); - -// -// All the following return 0 if the information is not -// available in the object passed down, or the object is bad. -// -SH_IMPORT_EXPORT const char* ShGetInfoLog(const ShHandle); -SH_IMPORT_EXPORT const void* ShGetExecutable(const ShHandle); -SH_IMPORT_EXPORT int ShSetVirtualAttributeBindings(const ShHandle, const ShBindingTable*); // to detect user aliasing -SH_IMPORT_EXPORT int ShSetFixedAttributeBindings(const ShHandle, const ShBindingTable*); // to force any physical mappings -// -// Tell the linker to never assign a vertex attribute to this list of physical attributes -// -SH_IMPORT_EXPORT int ShExcludeAttributes(const ShHandle, int *attributes, int count); - -// -// Returns the location ID of the named uniform. -// Returns -1 if error. -// -SH_IMPORT_EXPORT int ShGetUniformLocation(const ShHandle uniformMap, const char* name); - -#ifdef __cplusplus - } // end extern "C" -#endif - -//////////////////////////////////////////////////////////////////////////////////////////// -// -// Deferred-Lowering C++ Interface -// ----------------------------------- -// -// Below is a new alternate C++ interface that might potentially replace the above -// opaque handle-based interface. -// -// The below is further designed to handle multiple compilation units per stage, where -// the intermediate results, including the parse tree, are preserved until link time, -// rather than the above interface which is designed to have each compilation unit -// lowered at compile time. In the above model, linking occurs on the lowered results, -// whereas in this model intra-stage linking can occur at the parse tree -// (treeRoot in TIntermediate) level, and then a full stage can be lowered. -// - -#include -#include -#include - -class TCompiler; -class TInfoSink; - -namespace glslang { - -const char* GetEsslVersionString(); -const char* GetGlslVersionString(); -int GetKhronosToolId(); - -class TIntermediate; -class TProgram; -class TPoolAllocator; - -// Call this exactly once per process before using anything else -bool InitializeProcess(); - -// Call once per process to tear down everything -void FinalizeProcess(); - -// Make one TShader per shader that you will link into a program. Then provide -// the shader through setStrings() or setStringsWithLengths(), then call parse(), -// then query the info logs. -// Optionally use setPreamble() to set a special shader string that will be -// processed before all others but won't affect the validity of #version. -// -// N.B.: Does not yet support having the same TShader instance being linked into -// multiple programs. -// -// N.B.: Destruct a linked program *before* destructing the shaders linked into it. -// -class TShader { -public: - explicit TShader(EShLanguage); - virtual ~TShader(); - void setStrings(const char* const* s, int n); - void setStringsWithLengths(const char* const* s, const int* l, int n); - void setStringsWithLengthsAndNames( - const char* const* s, const int* l, const char* const* names, int n); - void setPreamble(const char* s) { preamble = s; } - void setEntryPoint(const char* entryPoint); - void setSourceEntryPoint(const char* sourceEntryPointName); - void addProcesses(const std::vector&); - void setShiftSamplerBinding(unsigned int base); - void setShiftTextureBinding(unsigned int base); - void setShiftImageBinding(unsigned int base); - void setShiftUboBinding(unsigned int base); - void setShiftUavBinding(unsigned int base); - void setShiftCbufferBinding(unsigned int base); // synonym for setShiftUboBinding - void setShiftSsboBinding(unsigned int base); - void setResourceSetBinding(const std::vector& base); - void setAutoMapBindings(bool map); - void setAutoMapLocations(bool map); - void setHlslIoMapping(bool hlslIoMap); - void setFlattenUniformArrays(bool flatten); - void setNoStorageFormat(bool useUnknownFormat); - void setTextureSamplerTransformMode(EShTextureSamplerTransformMode mode); - - // For setting up the environment (initialized in the constructor): - void setEnvInput(EShSource lang, EShLanguage envStage, EShClient client, int version) - { - environment.input.languageFamily = lang; - environment.input.stage = envStage; - environment.input.dialect = client; - environment.input.dialectVersion = version; - } - void setEnvClient(EShClient client, int version) - { - environment.client.client = client; - environment.client.version = version; - } - void setEnvTarget(EShTargetLanguage lang, unsigned int version) - { - environment.target.language = lang; - environment.target.version = version; - } - - // Interface to #include handlers. - // - // To support #include, a client of Glslang does the following: - // 1. Call setStringsWithNames to set the source strings and associated - // names. For example, the names could be the names of the files - // containing the shader sources. - // 2. Call parse with an Includer. - // - // When the Glslang parser encounters an #include directive, it calls - // the Includer's include method with the requested include name - // together with the current string name. The returned IncludeResult - // contains the fully resolved name of the included source, together - // with the source text that should replace the #include directive - // in the source stream. After parsing that source, Glslang will - // release the IncludeResult object. - class Includer { - public: - // An IncludeResult contains the resolved name and content of a source - // inclusion. - struct IncludeResult { - IncludeResult(const std::string& headerName, const char* const headerData, const size_t headerLength, void* userData) : - headerName(headerName), headerData(headerData), headerLength(headerLength), userData(userData) { } - // For a successful inclusion, the fully resolved name of the requested - // include. For example, in a file system-based includer, full resolution - // should convert a relative path name into an absolute path name. - // For a failed inclusion, this is an empty string. - const std::string headerName; - // The content and byte length of the requested inclusion. The - // Includer producing this IncludeResult retains ownership of the - // storage. - // For a failed inclusion, the header - // field points to a string containing error details. - const char* const headerData; - const size_t headerLength; - // Include resolver's context. - void* userData; - protected: - IncludeResult& operator=(const IncludeResult&); - IncludeResult(); - }; - - // For both include methods below: - // - // Resolves an inclusion request by name, current source name, - // and include depth. - // On success, returns an IncludeResult containing the resolved name - // and content of the include. - // On failure, returns a nullptr, or an IncludeResult - // with an empty string for the headerName and error details in the - // header field. - // The Includer retains ownership of the contents - // of the returned IncludeResult value, and those contents must - // remain valid until the releaseInclude method is called on that - // IncludeResult object. - // - // Note "local" vs. "system" is not an "either/or": "local" is an - // extra thing to do over "system". Both might get called, as per - // the C++ specification. - - // For the "system" or <>-style includes; search the "system" paths. - virtual IncludeResult* includeSystem(const char* /*headerName*/, - const char* /*includerName*/, - size_t /*inclusionDepth*/) { return nullptr; } - - // For the "local"-only aspect of a "" include. Should not search in the - // "system" paths, because on returning a failure, the parser will - // call includeSystem() to look in the "system" locations. - virtual IncludeResult* includeLocal(const char* /*headerName*/, - const char* /*includerName*/, - size_t /*inclusionDepth*/) { return nullptr; } - - // Signals that the parser will no longer use the contents of the - // specified IncludeResult. - virtual void releaseInclude(IncludeResult*) = 0; - virtual ~Includer() {} - }; - - // Fail all Includer searches - class ForbidIncluder : public Includer { - public: - virtual void releaseInclude(IncludeResult*) override { } - }; - - bool parse(const TBuiltInResource*, int defaultVersion, EProfile defaultProfile, bool forceDefaultVersionAndProfile, - bool forwardCompatible, EShMessages, Includer&); - - bool parse(const TBuiltInResource* res, int defaultVersion, EProfile defaultProfile, bool forceDefaultVersionAndProfile, - bool forwardCompatible, EShMessages messages) - { - TShader::ForbidIncluder includer; - return parse(res, defaultVersion, defaultProfile, forceDefaultVersionAndProfile, forwardCompatible, messages, includer); - } - - // Equivalent to parse() without a default profile and without forcing defaults. - bool parse(const TBuiltInResource* builtInResources, int defaultVersion, bool forwardCompatible, EShMessages messages) - { - return parse(builtInResources, defaultVersion, ENoProfile, false, forwardCompatible, messages); - } - - bool parse(const TBuiltInResource* builtInResources, int defaultVersion, bool forwardCompatible, EShMessages messages, - Includer& includer) - { - return parse(builtInResources, defaultVersion, ENoProfile, false, forwardCompatible, messages, includer); - } - - bool preprocess(const TBuiltInResource* builtInResources, - int defaultVersion, EProfile defaultProfile, bool forceDefaultVersionAndProfile, - bool forwardCompatible, EShMessages message, std::string* outputString, - Includer& includer); - - const char* getInfoLog(); - const char* getInfoDebugLog(); - - EShLanguage getStage() const { return stage; } - -protected: - TPoolAllocator* pool; - EShLanguage stage; - TCompiler* compiler; - TIntermediate* intermediate; - TInfoSink* infoSink; - // strings and lengths follow the standard for glShaderSource: - // strings is an array of numStrings pointers to string data. - // lengths can be null, but if not it is an array of numStrings - // integers containing the length of the associated strings. - // if lengths is null or lengths[n] < 0 the associated strings[n] is - // assumed to be null-terminated. - // stringNames is the optional names for all the strings. If stringNames - // is null, then none of the strings has name. If a certain element in - // stringNames is null, then the corresponding string does not have name. - const char* const* strings; - const int* lengths; - const char* const* stringNames; - const char* preamble; - int numStrings; - - // a function in the source string can be renamed FROM this TO the name given in setEntryPoint. - std::string sourceEntryPointName; - - TEnvironment environment; - - friend class TProgram; - -private: - TShader& operator=(TShader&); -}; - -class TReflection; -class TIoMapper; - -// Allows to customize the binding layout after linking. -// All used uniform variables will invoke at least validateBinding. -// If validateBinding returned true then the other resolveBinding, -// resolveSet, and resolveLocation are invoked to resolve the binding -// and descriptor set index respectively. -// -// Invocations happen in a particular order: -// 1) all shader inputs -// 2) all shader outputs -// 3) all uniforms with binding and set already defined -// 4) all uniforms with binding but no set defined -// 5) all uniforms with set but no binding defined -// 6) all uniforms with no binding and no set defined -// -// mapIO will use this resolver in two phases. The first -// phase is a notification phase, calling the corresponging -// notifiy callbacks, this phase ends with a call to endNotifications. -// Phase two starts directly after the call to endNotifications -// and calls all other callbacks to validate and to get the -// bindings, sets, locations, component and color indices. -// -// NOTE: that still limit checks are applied to bindings and sets -// and may result in an error. -class TIoMapResolver -{ -public: - virtual ~TIoMapResolver() {} - - // Should return true if the resulting/current binding would be okay. - // Basic idea is to do aliasing binding checks with this. - virtual bool validateBinding(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0; - // Should return a value >= 0 if the current binding should be overridden. - // Return -1 if the current binding (including no binding) should be kept. - virtual int resolveBinding(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0; - // Should return a value >= 0 if the current set should be overridden. - // Return -1 if the current set (including no set) should be kept. - virtual int resolveSet(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0; - // Should return a value >= 0 if the current location should be overridden. - // Return -1 if the current location (including no location) should be kept. - virtual int resolveUniformLocation(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0; - // Should return true if the resulting/current setup would be okay. - // Basic idea is to do aliasing checks and reject invalid semantic names. - virtual bool validateInOut(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0; - // Should return a value >= 0 if the current location should be overridden. - // Return -1 if the current location (including no location) should be kept. - virtual int resolveInOutLocation(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0; - // Should return a value >= 0 if the current component index should be overridden. - // Return -1 if the current component index (including no index) should be kept. - virtual int resolveInOutComponent(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0; - // Should return a value >= 0 if the current color index should be overridden. - // Return -1 if the current color index (including no index) should be kept. - virtual int resolveInOutIndex(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0; - // Notification of a uniform variable - virtual void notifyBinding(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0; - // Notification of a in or out variable - virtual void notifyInOut(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0; - // Called by mapIO when it has finished the notify pass - virtual void endNotifications(EShLanguage stage) = 0; - // Called by mapIO when it starts its notify pass for the given stage - virtual void beginNotifications(EShLanguage stage) = 0; - // Called by mipIO when it starts its resolve pass for the given stage - virtual void beginResolve(EShLanguage stage) = 0; - // Called by mapIO when it has finished the resolve pass - virtual void endResolve(EShLanguage stage) = 0; -}; - -// Make one TProgram per set of shaders that will get linked together. Add all -// the shaders that are to be linked together. After calling shader.parse() -// for all shaders, call link(). -// -// N.B.: Destruct a linked program *before* destructing the shaders linked into it. -// -class TProgram { -public: - TProgram(); - virtual ~TProgram(); - void addShader(TShader* shader) { stages[shader->stage].push_back(shader); } - - // Link Validation interface - bool link(EShMessages); - const char* getInfoLog(); - const char* getInfoDebugLog(); - - TIntermediate* getIntermediate(EShLanguage stage) const { return intermediate[stage]; } - - // Reflection Interface - bool buildReflection(); // call first, to do liveness analysis, index mapping, etc.; returns false on failure - int getNumLiveUniformVariables() const; // can be used for glGetProgramiv(GL_ACTIVE_UNIFORMS) - int getNumLiveUniformBlocks() const; // can be used for glGetProgramiv(GL_ACTIVE_UNIFORM_BLOCKS) - const char* getUniformName(int index) const; // can be used for "name" part of glGetActiveUniform() - const char* getUniformBlockName(int blockIndex) const; // can be used for glGetActiveUniformBlockName() - int getUniformBlockSize(int blockIndex) const; // can be used for glGetActiveUniformBlockiv(UNIFORM_BLOCK_DATA_SIZE) - int getUniformIndex(const char* name) const; // can be used for glGetUniformIndices() - int getUniformBinding(int index) const; // returns the binding number - int getUniformBlockIndex(int index) const; // can be used for glGetActiveUniformsiv(GL_UNIFORM_BLOCK_INDEX) - int getUniformBlockCounterIndex(int index) const; // returns block index of associated counter. - int getUniformType(int index) const; // can be used for glGetActiveUniformsiv(GL_UNIFORM_TYPE) - int getUniformBufferOffset(int index) const; // can be used for glGetActiveUniformsiv(GL_UNIFORM_OFFSET) - int getUniformArraySize(int index) const; // can be used for glGetActiveUniformsiv(GL_UNIFORM_SIZE) - int getNumLiveAttributes() const; // can be used for glGetProgramiv(GL_ACTIVE_ATTRIBUTES) - unsigned getLocalSize(int dim) const; // return dim'th local size - const char *getAttributeName(int index) const; // can be used for glGetActiveAttrib() - int getAttributeType(int index) const; // can be used for glGetActiveAttrib() - const TType* getUniformTType(int index) const; // returns a TType* - const TType* getUniformBlockTType(int index) const; // returns a TType* - const TType* getAttributeTType(int index) const; // returns a TType* - - void dumpReflection(); - - // I/O mapping: apply base offsets and map live unbound variables - // If resolver is not provided it uses the previous approach - // and respects auto assignment and offsets. - bool mapIO(TIoMapResolver* resolver = NULL); - -protected: - bool linkStage(EShLanguage, EShMessages); - - TPoolAllocator* pool; - std::list stages[EShLangCount]; - TIntermediate* intermediate[EShLangCount]; - bool newedIntermediate[EShLangCount]; // track which intermediate were "new" versus reusing a singleton unit in a stage - TInfoSink* infoSink; - TReflection* reflection; - TIoMapper* ioMapper; - bool linked; - -private: - TProgram(TProgram&); - TProgram& operator=(TProgram&); -}; - -} // end namespace glslang - -#endif // _COMPILER_INTERFACE_INCLUDED_ diff --git a/deps/glslang/glslang/updateGrammar b/deps/glslang/glslang/updateGrammar deleted file mode 100644 index f8fa81da..00000000 --- a/deps/glslang/glslang/updateGrammar +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -bison --defines=MachineIndependent/glslang_tab.cpp.h -t MachineIndependent/glslang.y -o MachineIndependent/glslang_tab.cpp diff --git a/deps/glslang/hlsl/CMakeLists.txt b/deps/glslang/hlsl/CMakeLists.txt deleted file mode 100644 index 3160dbff..00000000 --- a/deps/glslang/hlsl/CMakeLists.txt +++ /dev/null @@ -1,25 +0,0 @@ -set(SOURCES - hlslAttributes.cpp - hlslParseHelper.cpp - hlslScanContext.cpp - hlslOpMap.cpp - hlslTokenStream.cpp - hlslGrammar.cpp - hlslParseables.cpp) - - set(HEADERS - hlslAttributes.h - hlslParseHelper.h - hlslTokens.h - hlslScanContext.h - hlslOpMap.h - hlslTokenStream.h - hlslGrammar.h - hlslParseables.h) - -add_library(HLSL STATIC ${SOURCES} ${HEADERS}) -set_property(TARGET HLSL PROPERTY FOLDER hlsl) - -if(WIN32) - source_group("Source" FILES ${SOURCES} ${HEADERS}) -endif(WIN32) diff --git a/deps/glslang/hlsl/hlslAttributes.cpp b/deps/glslang/hlsl/hlslAttributes.cpp deleted file mode 100644 index 14f7a7bd..00000000 --- a/deps/glslang/hlsl/hlslAttributes.cpp +++ /dev/null @@ -1,118 +0,0 @@ -// -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "hlslAttributes.h" -#include -#include - -namespace glslang { - // Map the given string to an attribute enum from TAttributeType, - // or EatNone if invalid. - TAttributeType TAttributeMap::attributeFromName(const TString& name) - { - // These are case insensitive. - TString lowername(name); - std::transform(lowername.begin(), lowername.end(), lowername.begin(), ::tolower); - - if (lowername == "allow_uav_condition") - return EatAllow_uav_condition; - else if (lowername == "branch") - return EatBranch; - else if (lowername == "call") - return EatCall; - else if (lowername == "domain") - return EatDomain; - else if (lowername == "earlydepthstencil") - return EatEarlyDepthStencil; - else if (lowername == "fastopt") - return EatFastOpt; - else if (lowername == "flatten") - return EatFlatten; - else if (lowername == "forcecase") - return EatForceCase; - else if (lowername == "instance") - return EatInstance; - else if (lowername == "maxtessfactor") - return EatMaxTessFactor; - else if (lowername == "maxvertexcount") - return EatMaxVertexCount; - else if (lowername == "numthreads") - return EatNumThreads; - else if (lowername == "outputcontrolpoints") - return EatOutputControlPoints; - else if (lowername == "outputtopology") - return EatOutputTopology; - else if (lowername == "partitioning") - return EatPartitioning; - else if (lowername == "patchconstantfunc") - return EatPatchConstantFunc; - else if (lowername == "unroll") - return EatUnroll; - else if (lowername == "loop") - return EatLoop; - else - return EatNone; - } - - // Look up entry, inserting if it's not there, and if name is a valid attribute name - // as known by attributeFromName. - TAttributeType TAttributeMap::setAttribute(const TString* name, TIntermAggregate* value) - { - if (name == nullptr) - return EatNone; - - const TAttributeType attr = attributeFromName(*name); - - if (attr != EatNone) - attributes[attr] = value; - - return attr; - } - - // Look up entry (const version), and return aggregate node. This cannot change the map. - const TIntermAggregate* TAttributeMap::operator[](TAttributeType attr) const - { - const auto entry = attributes.find(attr); - - return (entry == attributes.end()) ? nullptr : entry->second; - } - - // True if entry exists in map (even if value is nullptr) - bool TAttributeMap::contains(TAttributeType attr) const - { - return attributes.find(attr) != attributes.end(); - } - -} // end namespace glslang diff --git a/deps/glslang/hlsl/hlslAttributes.h b/deps/glslang/hlsl/hlslAttributes.h deleted file mode 100644 index b32a53cb..00000000 --- a/deps/glslang/hlsl/hlslAttributes.h +++ /dev/null @@ -1,112 +0,0 @@ -// -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef HLSLATTRIBUTES_H_ -#define HLSLATTRIBUTES_H_ - -#include -#include -#include "hlslScanContext.h" -#include "../glslang/Include/Common.h" - -namespace glslang { - enum TAttributeType { - EatNone, - EatAllow_uav_condition, - EatBranch, - EatCall, - EatDomain, - EatEarlyDepthStencil, - EatFastOpt, - EatFlatten, - EatForceCase, - EatInstance, - EatMaxTessFactor, - EatNumThreads, - EatMaxVertexCount, - EatOutputControlPoints, - EatOutputTopology, - EatPartitioning, - EatPatchConstantFunc, - EatPatchSize, - EatUnroll, - EatLoop, - }; -} - -namespace std { - // Allow use of TAttributeType enum in hash_map without calling code having to cast. - template <> struct hash { - std::size_t operator()(glslang::TAttributeType attr) const { - return std::hash()(int(attr)); - } - }; -} // end namespace std - -namespace glslang { - class TIntermAggregate; - - class TAttributeMap { - public: - // Search for and potentially add the attribute into the map. Return the - // attribute type enum for it, if found, else EatNone. - TAttributeType setAttribute(const TString* name, TIntermAggregate* value); - - // Const lookup: search for (but do not modify) the attribute in the map. - const TIntermAggregate* operator[](TAttributeType) const; - - // True if entry exists in map (even if value is nullptr) - bool contains(TAttributeType) const; - - protected: - // Find an attribute enum given its name. - static TAttributeType attributeFromName(const TString&); - - std::unordered_map attributes; - }; - - class TFunctionDeclarator { - public: - TFunctionDeclarator() : function(nullptr), body(nullptr) { } - TSourceLoc loc; - TFunction* function; - TAttributeMap attributes; - TVector* body; - }; - -} // end namespace glslang - - -#endif // HLSLATTRIBUTES_H_ diff --git a/deps/glslang/hlsl/hlslGrammar.cpp b/deps/glslang/hlsl/hlslGrammar.cpp deleted file mode 100644 index 2f687c5a..00000000 --- a/deps/glslang/hlsl/hlslGrammar.cpp +++ /dev/null @@ -1,3888 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// This is a set of mutually recursive methods implementing the HLSL grammar. -// Generally, each returns -// - through an argument: a type specifically appropriate to which rule it -// recognized -// - through the return value: true/false to indicate whether or not it -// recognized its rule -// -// As much as possible, only grammar recognition should happen in this file, -// with all other work being farmed out to hlslParseHelper.cpp, which in turn -// will build the AST. -// -// The next token, yet to be "accepted" is always sitting in 'token'. -// When a method says it accepts a rule, that means all tokens involved -// in the rule will have been consumed, and none left in 'token'. -// - -#include "hlslTokens.h" -#include "hlslGrammar.h" -#include "hlslAttributes.h" - -namespace glslang { - -// Root entry point to this recursive decent parser. -// Return true if compilation unit was successfully accepted. -bool HlslGrammar::parse() -{ - advanceToken(); - return acceptCompilationUnit(); -} - -void HlslGrammar::expected(const char* syntax) -{ - parseContext.error(token.loc, "Expected", syntax, ""); -} - -void HlslGrammar::unimplemented(const char* error) -{ - parseContext.error(token.loc, "Unimplemented", error, ""); -} - -// IDENTIFIER -// THIS -// type that can be used as IDENTIFIER -// -// Only process the next token if it is an identifier. -// Return true if it was an identifier. -bool HlslGrammar::acceptIdentifier(HlslToken& idToken) -{ - // IDENTIFIER - if (peekTokenClass(EHTokIdentifier)) { - idToken = token; - advanceToken(); - return true; - } - - // THIS - // -> maps to the IDENTIFIER spelled with the internal special name for 'this' - if (peekTokenClass(EHTokThis)) { - idToken = token; - advanceToken(); - idToken.tokenClass = EHTokIdentifier; - idToken.string = NewPoolTString(intermediate.implicitThisName); - return true; - } - - // type that can be used as IDENTIFIER - - // Even though "sample", "bool", "float", etc keywords (for types, interpolation modifiers), - // they ARE still accepted as identifiers. This is not a dense space: e.g, "void" is not a - // valid identifier, nor is "linear". This code special cases the known instances of this, so - // e.g, "int sample;" or "float float;" is accepted. Other cases can be added here if needed. - - const char* idString = getTypeString(peek()); - if (idString == nullptr) - return false; - - token.string = NewPoolTString(idString); - token.tokenClass = EHTokIdentifier; - idToken = token; - typeIdentifiers = true; - - advanceToken(); - - return true; -} - -// compilationUnit -// : declaration_list EOF -// -bool HlslGrammar::acceptCompilationUnit() -{ - TIntermNode* unitNode = nullptr; - - if (! acceptDeclarationList(unitNode)) - return false; - - if (! peekTokenClass(EHTokNone)) - return false; - - // set root of AST - if (unitNode && !unitNode->getAsAggregate()) - unitNode = intermediate.growAggregate(nullptr, unitNode); - intermediate.setTreeRoot(unitNode); - - return true; -} - -// Recognize the following, but with the extra condition that it can be -// successfully terminated by EOF or '}'. -// -// declaration_list -// : list of declaration_or_semicolon followed by EOF or RIGHT_BRACE -// -// declaration_or_semicolon -// : declaration -// : SEMICOLON -// -bool HlslGrammar::acceptDeclarationList(TIntermNode*& nodeList) -{ - do { - // HLSL allows extra semicolons between global declarations - do { } while (acceptTokenClass(EHTokSemicolon)); - - // EOF or RIGHT_BRACE - if (peekTokenClass(EHTokNone) || peekTokenClass(EHTokRightBrace)) - return true; - - // declaration - if (! acceptDeclaration(nodeList)) - return false; - } while (true); - - return true; -} - -// sampler_state -// : LEFT_BRACE [sampler_state_assignment ... ] RIGHT_BRACE -// -// sampler_state_assignment -// : sampler_state_identifier EQUAL value SEMICOLON -// -// sampler_state_identifier -// : ADDRESSU -// | ADDRESSV -// | ADDRESSW -// | BORDERCOLOR -// | FILTER -// | MAXANISOTROPY -// | MAXLOD -// | MINLOD -// | MIPLODBIAS -// -bool HlslGrammar::acceptSamplerState() -{ - // TODO: this should be genericized to accept a list of valid tokens and - // return token/value pairs. Presently it is specific to texture values. - - if (! acceptTokenClass(EHTokLeftBrace)) - return true; - - parseContext.warn(token.loc, "unimplemented", "immediate sampler state", ""); - - do { - // read state name - HlslToken state; - if (! acceptIdentifier(state)) - break; // end of list - - // FXC accepts any case - TString stateName = *state.string; - std::transform(stateName.begin(), stateName.end(), stateName.begin(), ::tolower); - - if (! acceptTokenClass(EHTokAssign)) { - expected("assign"); - return false; - } - - if (stateName == "minlod" || stateName == "maxlod") { - if (! peekTokenClass(EHTokIntConstant)) { - expected("integer"); - return false; - } - - TIntermTyped* lod = nullptr; - if (! acceptLiteral(lod)) // should never fail, since we just looked for an integer - return false; - } else if (stateName == "maxanisotropy") { - if (! peekTokenClass(EHTokIntConstant)) { - expected("integer"); - return false; - } - - TIntermTyped* maxAnisotropy = nullptr; - if (! acceptLiteral(maxAnisotropy)) // should never fail, since we just looked for an integer - return false; - } else if (stateName == "filter") { - HlslToken filterMode; - if (! acceptIdentifier(filterMode)) { - expected("filter mode"); - return false; - } - } else if (stateName == "addressu" || stateName == "addressv" || stateName == "addressw") { - HlslToken addrMode; - if (! acceptIdentifier(addrMode)) { - expected("texture address mode"); - return false; - } - } else if (stateName == "miplodbias") { - TIntermTyped* lodBias = nullptr; - if (! acceptLiteral(lodBias)) { - expected("lod bias"); - return false; - } - } else if (stateName == "bordercolor") { - return false; - } else { - expected("texture state"); - return false; - } - - // SEMICOLON - if (! acceptTokenClass(EHTokSemicolon)) { - expected("semicolon"); - return false; - } - } while (true); - - if (! acceptTokenClass(EHTokRightBrace)) - return false; - - return true; -} - -// sampler_declaration_dx9 -// : SAMPLER identifier EQUAL sampler_type sampler_state -// -bool HlslGrammar::acceptSamplerDeclarationDX9(TType& /*type*/) -{ - if (! acceptTokenClass(EHTokSampler)) - return false; - - // TODO: remove this when DX9 style declarations are implemented. - unimplemented("Direct3D 9 sampler declaration"); - - // read sampler name - HlslToken name; - if (! acceptIdentifier(name)) { - expected("sampler name"); - return false; - } - - if (! acceptTokenClass(EHTokAssign)) { - expected("="); - return false; - } - - return false; -} - -// declaration -// : sampler_declaration_dx9 post_decls SEMICOLON -// | fully_specified_type // for cbuffer/tbuffer -// | fully_specified_type declarator_list SEMICOLON // for non cbuffer/tbuffer -// | fully_specified_type identifier function_parameters post_decls compound_statement // function definition -// | fully_specified_type identifier sampler_state post_decls compound_statement // sampler definition -// | typedef declaration -// | NAMESPACE IDENTIFIER LEFT_BRACE declaration_list RIGHT_BRACE -// -// declarator_list -// : declarator COMMA declarator COMMA declarator... // zero or more declarators -// -// declarator -// : identifier array_specifier post_decls -// | identifier array_specifier post_decls EQUAL assignment_expression -// | identifier function_parameters post_decls // function prototype -// -// Parsing has to go pretty far in to know whether it's a variable, prototype, or -// function definition, so the implementation below doesn't perfectly divide up the grammar -// as above. (The 'identifier' in the first item in init_declarator list is the -// same as 'identifier' for function declarations.) -// -// This can generate more than one subtree, one per initializer or a function body. -// All initializer subtrees are put in their own aggregate node, making one top-level -// node for all the initializers. Each function created is a top-level node to grow -// into the passed-in nodeList. -// -// If 'nodeList' is passed in as non-null, it must an aggregate to extend for -// each top-level node the declaration creates. Otherwise, if only one top-level -// node in generated here, that is want is returned in nodeList. -// -bool HlslGrammar::acceptDeclaration(TIntermNode*& nodeList) -{ - // NAMESPACE IDENTIFIER LEFT_BRACE declaration_list RIGHT_BRACE - if (acceptTokenClass(EHTokNamespace)) { - HlslToken namespaceToken; - if (!acceptIdentifier(namespaceToken)) { - expected("namespace name"); - return false; - } - parseContext.pushNamespace(*namespaceToken.string); - if (!acceptTokenClass(EHTokLeftBrace)) { - expected("{"); - return false; - } - if (!acceptDeclarationList(nodeList)) { - expected("declaration list"); - return false; - } - if (!acceptTokenClass(EHTokRightBrace)) { - expected("}"); - return false; - } - parseContext.popNamespace(); - return true; - } - - bool declarator_list = false; // true when processing comma separation - - // attributes - TFunctionDeclarator declarator; - acceptAttributes(declarator.attributes); - - // typedef - bool typedefDecl = acceptTokenClass(EHTokTypedef); - - TType declaredType; - - // DX9 sampler declaration use a different syntax - // DX9 shaders need to run through HLSL compiler (fxc) via a back compat mode, it isn't going to - // be possible to simultaneously compile D3D10+ style shaders and DX9 shaders. If we want to compile DX9 - // HLSL shaders, this will have to be a master level switch - // As such, the sampler keyword in D3D10+ turns into an automatic sampler type, and is commonly used - // For that reason, this line is commented out - // if (acceptSamplerDeclarationDX9(declaredType)) - // return true; - - bool forbidDeclarators = (peekTokenClass(EHTokCBuffer) || peekTokenClass(EHTokTBuffer)); - // fully_specified_type - if (! acceptFullySpecifiedType(declaredType, nodeList)) - return false; - - // cbuffer and tbuffer end with the closing '}'. - // No semicolon is included. - if (forbidDeclarators) - return true; - - // declarator_list - // : declarator - // : identifier - HlslToken idToken; - TIntermAggregate* initializers = nullptr; - while (acceptIdentifier(idToken)) { - const TString *fullName = idToken.string; - if (parseContext.symbolTable.atGlobalLevel()) - parseContext.getFullNamespaceName(fullName); - if (peekTokenClass(EHTokLeftParen)) { - // looks like function parameters - - // Potentially rename shader entry point function. No-op most of the time. - parseContext.renameShaderFunction(fullName); - - // function_parameters - declarator.function = new TFunction(fullName, declaredType); - if (!acceptFunctionParameters(*declarator.function)) { - expected("function parameter list"); - return false; - } - - // post_decls - acceptPostDecls(declarator.function->getWritableType().getQualifier()); - - // compound_statement (function body definition) or just a prototype? - declarator.loc = token.loc; - if (peekTokenClass(EHTokLeftBrace)) { - if (declarator_list) - parseContext.error(idToken.loc, "function body can't be in a declarator list", "{", ""); - if (typedefDecl) - parseContext.error(idToken.loc, "function body can't be in a typedef", "{", ""); - return acceptFunctionDefinition(declarator, nodeList, nullptr); - } else { - if (typedefDecl) - parseContext.error(idToken.loc, "function typedefs not implemented", "{", ""); - parseContext.handleFunctionDeclarator(declarator.loc, *declarator.function, true); - } - } else { - // A variable declaration. Fix the storage qualifier if it's a global. - if (declaredType.getQualifier().storage == EvqTemporary && parseContext.symbolTable.atGlobalLevel()) - declaredType.getQualifier().storage = EvqUniform; - - // We can handle multiple variables per type declaration, so - // the number of types can expand when arrayness is different. - TType variableType; - variableType.shallowCopy(declaredType); - - // recognize array_specifier - TArraySizes* arraySizes = nullptr; - acceptArraySpecifier(arraySizes); - - // Fix arrayness in the variableType - if (declaredType.isImplicitlySizedArray()) { - // Because "int[] a = int[2](...), b = int[3](...)" makes two arrays a and b - // of different sizes, for this case sharing the shallow copy of arrayness - // with the parseType oversubscribes it, so get a deep copy of the arrayness. - variableType.newArraySizes(declaredType.getArraySizes()); - } - if (arraySizes || variableType.isArray()) { - // In the most general case, arrayness is potentially coming both from the - // declared type and from the variable: "int[] a[];" or just one or the other. - // Merge it all to the variableType, so all arrayness is part of the variableType. - parseContext.arrayDimMerge(variableType, arraySizes); - } - - // samplers accept immediate sampler state - if (variableType.getBasicType() == EbtSampler) { - if (! acceptSamplerState()) - return false; - } - - // post_decls - acceptPostDecls(variableType.getQualifier()); - - // EQUAL assignment_expression - TIntermTyped* expressionNode = nullptr; - if (acceptTokenClass(EHTokAssign)) { - if (typedefDecl) - parseContext.error(idToken.loc, "can't have an initializer", "typedef", ""); - if (! acceptAssignmentExpression(expressionNode)) { - expected("initializer"); - return false; - } - } - - // TODO: things scoped within an annotation need their own name space; - // TODO: strings are not yet handled. - if (variableType.getBasicType() != EbtString && parseContext.getAnnotationNestingLevel() == 0) { - if (typedefDecl) - parseContext.declareTypedef(idToken.loc, *fullName, variableType); - else if (variableType.getBasicType() == EbtBlock) { - parseContext.declareBlock(idToken.loc, variableType, fullName, - variableType.isArray() ? &variableType.getArraySizes() : nullptr); - parseContext.declareStructBufferCounter(idToken.loc, variableType, *fullName); - } else { - if (variableType.getQualifier().storage == EvqUniform && ! variableType.containsOpaque()) { - // this isn't really an individual variable, but a member of the $Global buffer - parseContext.growGlobalUniformBlock(idToken.loc, variableType, *fullName); - } else { - // Declare the variable and add any initializer code to the AST. - // The top-level node is always made into an aggregate, as that's - // historically how the AST has been. - initializers = intermediate.growAggregate(initializers, - parseContext.declareVariable(idToken.loc, *fullName, variableType, expressionNode), - idToken.loc); - } - } - } - } - - // COMMA - if (acceptTokenClass(EHTokComma)) - declarator_list = true; - } - - // The top-level initializer node is a sequence. - if (initializers != nullptr) - initializers->setOperator(EOpSequence); - - // Add the initializers' aggregate to the nodeList we were handed. - if (nodeList) - nodeList = intermediate.growAggregate(nodeList, initializers); - else - nodeList = initializers; - - // SEMICOLON - if (! acceptTokenClass(EHTokSemicolon)) { - // This may have been a false detection of what appeared to be a declaration, but - // was actually an assignment such as "float = 4", where "float" is an identifier. - // We put the token back to let further parsing happen for cases where that may - // happen. This errors on the side of caution, and mostly triggers the error. - if (peek() == EHTokAssign || peek() == EHTokLeftBracket || peek() == EHTokDot || peek() == EHTokComma) { - recedeToken(); - return false; - } else { - expected(";"); - return false; - } - } - - return true; -} - -// control_declaration -// : fully_specified_type identifier EQUAL expression -// -bool HlslGrammar::acceptControlDeclaration(TIntermNode*& node) -{ - node = nullptr; - - // fully_specified_type - TType type; - if (! acceptFullySpecifiedType(type)) - return false; - - // filter out type casts - if (peekTokenClass(EHTokLeftParen)) { - recedeToken(); - return false; - } - - // identifier - HlslToken idToken; - if (! acceptIdentifier(idToken)) { - expected("identifier"); - return false; - } - - // EQUAL - TIntermTyped* expressionNode = nullptr; - if (! acceptTokenClass(EHTokAssign)) { - expected("="); - return false; - } - - // expression - if (! acceptExpression(expressionNode)) { - expected("initializer"); - return false; - } - - node = parseContext.declareVariable(idToken.loc, *idToken.string, type, expressionNode); - - return true; -} - -// fully_specified_type -// : type_specifier -// | type_qualifier type_specifier -// -bool HlslGrammar::acceptFullySpecifiedType(TType& type) -{ - TIntermNode* nodeList = nullptr; - return acceptFullySpecifiedType(type, nodeList); -} -bool HlslGrammar::acceptFullySpecifiedType(TType& type, TIntermNode*& nodeList) -{ - // type_qualifier - TQualifier qualifier; - qualifier.clear(); - if (! acceptQualifier(qualifier)) - return false; - TSourceLoc loc = token.loc; - - // type_specifier - if (! acceptType(type, nodeList)) { - // If this is not a type, we may have inadvertently gone down a wrong path - // by parsing "sample", which can be treated like either an identifier or a - // qualifier. Back it out, if we did. - if (qualifier.sample) - recedeToken(); - - return false; - } - if (type.getBasicType() == EbtBlock) { - // the type was a block, which set some parts of the qualifier - parseContext.mergeQualifiers(type.getQualifier(), qualifier); - // further, it can create an anonymous instance of the block - if (peek() != EHTokIdentifier) - parseContext.declareBlock(loc, type); - } else { - // Some qualifiers are set when parsing the type. Merge those with - // whatever comes from acceptQualifier. - assert(qualifier.layoutFormat == ElfNone); - - qualifier.layoutFormat = type.getQualifier().layoutFormat; - qualifier.precision = type.getQualifier().precision; - - if (type.getQualifier().storage == EvqOut || - type.getQualifier().storage == EvqBuffer) { - qualifier.storage = type.getQualifier().storage; - qualifier.readonly = type.getQualifier().readonly; - } - - if (type.isBuiltIn()) - qualifier.builtIn = type.getQualifier().builtIn; - - type.getQualifier() = qualifier; - } - - return true; -} - -// type_qualifier -// : qualifier qualifier ... -// -// Zero or more of these, so this can't return false. -// -bool HlslGrammar::acceptQualifier(TQualifier& qualifier) -{ - do { - switch (peek()) { - case EHTokStatic: - qualifier.storage = parseContext.symbolTable.atGlobalLevel() ? EvqGlobal : EvqTemporary; - break; - case EHTokExtern: - // TODO: no meaning in glslang? - break; - case EHTokShared: - // TODO: hint - break; - case EHTokGroupShared: - qualifier.storage = EvqShared; - break; - case EHTokUniform: - qualifier.storage = EvqUniform; - break; - case EHTokConst: - qualifier.storage = EvqConst; - break; - case EHTokVolatile: - qualifier.volatil = true; - break; - case EHTokLinear: - qualifier.smooth = true; - break; - case EHTokCentroid: - qualifier.centroid = true; - break; - case EHTokNointerpolation: - qualifier.flat = true; - break; - case EHTokNoperspective: - qualifier.nopersp = true; - break; - case EHTokSample: - qualifier.sample = true; - break; - case EHTokRowMajor: - qualifier.layoutMatrix = ElmColumnMajor; - break; - case EHTokColumnMajor: - qualifier.layoutMatrix = ElmRowMajor; - break; - case EHTokPrecise: - qualifier.noContraction = true; - break; - case EHTokIn: - qualifier.storage = (qualifier.storage == EvqOut) ? EvqInOut : EvqIn; - break; - case EHTokOut: - qualifier.storage = (qualifier.storage == EvqIn) ? EvqInOut : EvqOut; - break; - case EHTokInOut: - qualifier.storage = EvqInOut; - break; - case EHTokLayout: - if (! acceptLayoutQualifierList(qualifier)) - return false; - continue; - case EHTokGloballyCoherent: - qualifier.coherent = true; - break; - case EHTokInline: - // TODO: map this to SPIR-V function control - break; - - // GS geometries: these are specified on stage input variables, and are an error (not verified here) - // for output variables. - case EHTokPoint: - qualifier.storage = EvqIn; - if (!parseContext.handleInputGeometry(token.loc, ElgPoints)) - return false; - break; - case EHTokLine: - qualifier.storage = EvqIn; - if (!parseContext.handleInputGeometry(token.loc, ElgLines)) - return false; - break; - case EHTokTriangle: - qualifier.storage = EvqIn; - if (!parseContext.handleInputGeometry(token.loc, ElgTriangles)) - return false; - break; - case EHTokLineAdj: - qualifier.storage = EvqIn; - if (!parseContext.handleInputGeometry(token.loc, ElgLinesAdjacency)) - return false; - break; - case EHTokTriangleAdj: - qualifier.storage = EvqIn; - if (!parseContext.handleInputGeometry(token.loc, ElgTrianglesAdjacency)) - return false; - break; - - default: - return true; - } - advanceToken(); - } while (true); -} - -// layout_qualifier_list -// : LAYOUT LEFT_PAREN layout_qualifier COMMA layout_qualifier ... RIGHT_PAREN -// -// layout_qualifier -// : identifier -// | identifier EQUAL expression -// -// Zero or more of these, so this can't return false. -// -bool HlslGrammar::acceptLayoutQualifierList(TQualifier& qualifier) -{ - if (! acceptTokenClass(EHTokLayout)) - return false; - - // LEFT_PAREN - if (! acceptTokenClass(EHTokLeftParen)) - return false; - - do { - // identifier - HlslToken idToken; - if (! acceptIdentifier(idToken)) - break; - - // EQUAL expression - if (acceptTokenClass(EHTokAssign)) { - TIntermTyped* expr; - if (! acceptConditionalExpression(expr)) { - expected("expression"); - return false; - } - parseContext.setLayoutQualifier(idToken.loc, qualifier, *idToken.string, expr); - } else - parseContext.setLayoutQualifier(idToken.loc, qualifier, *idToken.string); - - // COMMA - if (! acceptTokenClass(EHTokComma)) - break; - } while (true); - - // RIGHT_PAREN - if (! acceptTokenClass(EHTokRightParen)) { - expected(")"); - return false; - } - - return true; -} - -// template_type -// : FLOAT -// | DOUBLE -// | INT -// | DWORD -// | UINT -// | BOOL -// -bool HlslGrammar::acceptTemplateVecMatBasicType(TBasicType& basicType) -{ - switch (peek()) { - case EHTokFloat: - basicType = EbtFloat; - break; - case EHTokDouble: - basicType = EbtDouble; - break; - case EHTokInt: - case EHTokDword: - basicType = EbtInt; - break; - case EHTokUint: - basicType = EbtUint; - break; - case EHTokBool: - basicType = EbtBool; - break; - default: - return false; - } - - advanceToken(); - - return true; -} - -// vector_template_type -// : VECTOR -// | VECTOR LEFT_ANGLE template_type COMMA integer_literal RIGHT_ANGLE -// -bool HlslGrammar::acceptVectorTemplateType(TType& type) -{ - if (! acceptTokenClass(EHTokVector)) - return false; - - if (! acceptTokenClass(EHTokLeftAngle)) { - // in HLSL, 'vector' alone means float4. - new(&type) TType(EbtFloat, EvqTemporary, 4); - return true; - } - - TBasicType basicType; - if (! acceptTemplateVecMatBasicType(basicType)) { - expected("scalar type"); - return false; - } - - // COMMA - if (! acceptTokenClass(EHTokComma)) { - expected(","); - return false; - } - - // integer - if (! peekTokenClass(EHTokIntConstant)) { - expected("literal integer"); - return false; - } - - TIntermTyped* vecSize; - if (! acceptLiteral(vecSize)) - return false; - - const int vecSizeI = vecSize->getAsConstantUnion()->getConstArray()[0].getIConst(); - - new(&type) TType(basicType, EvqTemporary, vecSizeI); - - if (vecSizeI == 1) - type.makeVector(); - - if (!acceptTokenClass(EHTokRightAngle)) { - expected("right angle bracket"); - return false; - } - - return true; -} - -// matrix_template_type -// : MATRIX -// | MATRIX LEFT_ANGLE template_type COMMA integer_literal COMMA integer_literal RIGHT_ANGLE -// -bool HlslGrammar::acceptMatrixTemplateType(TType& type) -{ - if (! acceptTokenClass(EHTokMatrix)) - return false; - - if (! acceptTokenClass(EHTokLeftAngle)) { - // in HLSL, 'matrix' alone means float4x4. - new(&type) TType(EbtFloat, EvqTemporary, 0, 4, 4); - return true; - } - - TBasicType basicType; - if (! acceptTemplateVecMatBasicType(basicType)) { - expected("scalar type"); - return false; - } - - // COMMA - if (! acceptTokenClass(EHTokComma)) { - expected(","); - return false; - } - - // integer rows - if (! peekTokenClass(EHTokIntConstant)) { - expected("literal integer"); - return false; - } - - TIntermTyped* rows; - if (! acceptLiteral(rows)) - return false; - - // COMMA - if (! acceptTokenClass(EHTokComma)) { - expected(","); - return false; - } - - // integer cols - if (! peekTokenClass(EHTokIntConstant)) { - expected("literal integer"); - return false; - } - - TIntermTyped* cols; - if (! acceptLiteral(cols)) - return false; - - new(&type) TType(basicType, EvqTemporary, 0, - rows->getAsConstantUnion()->getConstArray()[0].getIConst(), - cols->getAsConstantUnion()->getConstArray()[0].getIConst()); - - if (!acceptTokenClass(EHTokRightAngle)) { - expected("right angle bracket"); - return false; - } - - return true; -} - -// layout_geometry -// : LINESTREAM -// | POINTSTREAM -// | TRIANGLESTREAM -// -bool HlslGrammar::acceptOutputPrimitiveGeometry(TLayoutGeometry& geometry) -{ - // read geometry type - const EHlslTokenClass geometryType = peek(); - - switch (geometryType) { - case EHTokPointStream: geometry = ElgPoints; break; - case EHTokLineStream: geometry = ElgLineStrip; break; - case EHTokTriangleStream: geometry = ElgTriangleStrip; break; - default: - return false; // not a layout geometry - } - - advanceToken(); // consume the layout keyword - return true; -} - -// tessellation_decl_type -// : INPUTPATCH -// | OUTPUTPATCH -// -bool HlslGrammar::acceptTessellationDeclType(TBuiltInVariable& patchType) -{ - // read geometry type - const EHlslTokenClass tessType = peek(); - - switch (tessType) { - case EHTokInputPatch: patchType = EbvInputPatch; break; - case EHTokOutputPatch: patchType = EbvOutputPatch; break; - default: - return false; // not a tessellation decl - } - - advanceToken(); // consume the keyword - return true; -} - -// tessellation_patch_template_type -// : tessellation_decl_type LEFT_ANGLE type comma integer_literal RIGHT_ANGLE -// -bool HlslGrammar::acceptTessellationPatchTemplateType(TType& type) -{ - TBuiltInVariable patchType; - - if (! acceptTessellationDeclType(patchType)) - return false; - - if (! acceptTokenClass(EHTokLeftAngle)) - return false; - - if (! acceptType(type)) { - expected("tessellation patch type"); - return false; - } - - if (! acceptTokenClass(EHTokComma)) - return false; - - // integer size - if (! peekTokenClass(EHTokIntConstant)) { - expected("literal integer"); - return false; - } - - TIntermTyped* size; - if (! acceptLiteral(size)) - return false; - - TArraySizes* arraySizes = new TArraySizes; - arraySizes->addInnerSize(size->getAsConstantUnion()->getConstArray()[0].getIConst()); - type.newArraySizes(*arraySizes); - type.getQualifier().builtIn = patchType; - - if (! acceptTokenClass(EHTokRightAngle)) { - expected("right angle bracket"); - return false; - } - - return true; -} - -// stream_out_template_type -// : output_primitive_geometry_type LEFT_ANGLE type RIGHT_ANGLE -// -bool HlslGrammar::acceptStreamOutTemplateType(TType& type, TLayoutGeometry& geometry) -{ - geometry = ElgNone; - - if (! acceptOutputPrimitiveGeometry(geometry)) - return false; - - if (! acceptTokenClass(EHTokLeftAngle)) - return false; - - if (! acceptType(type)) { - expected("stream output type"); - return false; - } - - type.getQualifier().storage = EvqOut; - type.getQualifier().builtIn = EbvGsOutputStream; - - if (! acceptTokenClass(EHTokRightAngle)) { - expected("right angle bracket"); - return false; - } - - return true; -} - -// annotations -// : LEFT_ANGLE declaration SEMI_COLON ... declaration SEMICOLON RIGHT_ANGLE -// -bool HlslGrammar::acceptAnnotations(TQualifier&) -{ - if (! acceptTokenClass(EHTokLeftAngle)) - return false; - - // note that we are nesting a name space - parseContext.nestAnnotations(); - - // declaration SEMI_COLON ... declaration SEMICOLON RIGHT_ANGLE - do { - // eat any extra SEMI_COLON; don't know if the grammar calls for this or not - while (acceptTokenClass(EHTokSemicolon)) - ; - - if (acceptTokenClass(EHTokRightAngle)) - break; - - // declaration - TIntermNode* node = nullptr; - if (! acceptDeclaration(node)) { - expected("declaration in annotation"); - return false; - } - } while (true); - - parseContext.unnestAnnotations(); - return true; -} - -// sampler_type -// : SAMPLER -// | SAMPLER1D -// | SAMPLER2D -// | SAMPLER3D -// | SAMPLERCUBE -// | SAMPLERSTATE -// | SAMPLERCOMPARISONSTATE -bool HlslGrammar::acceptSamplerType(TType& type) -{ - // read sampler type - const EHlslTokenClass samplerType = peek(); - - // TODO: for DX9 - // TSamplerDim dim = EsdNone; - - bool isShadow = false; - - switch (samplerType) { - case EHTokSampler: break; - case EHTokSampler1d: /*dim = Esd1D*/; break; - case EHTokSampler2d: /*dim = Esd2D*/; break; - case EHTokSampler3d: /*dim = Esd3D*/; break; - case EHTokSamplerCube: /*dim = EsdCube*/; break; - case EHTokSamplerState: break; - case EHTokSamplerComparisonState: isShadow = true; break; - default: - return false; // not a sampler declaration - } - - advanceToken(); // consume the sampler type keyword - - TArraySizes* arraySizes = nullptr; // TODO: array - - TSampler sampler; - sampler.setPureSampler(isShadow); - - type.shallowCopy(TType(sampler, EvqUniform, arraySizes)); - - return true; -} - -// texture_type -// | BUFFER -// | TEXTURE1D -// | TEXTURE1DARRAY -// | TEXTURE2D -// | TEXTURE2DARRAY -// | TEXTURE3D -// | TEXTURECUBE -// | TEXTURECUBEARRAY -// | TEXTURE2DMS -// | TEXTURE2DMSARRAY -// | RWBUFFER -// | RWTEXTURE1D -// | RWTEXTURE1DARRAY -// | RWTEXTURE2D -// | RWTEXTURE2DARRAY -// | RWTEXTURE3D - -bool HlslGrammar::acceptTextureType(TType& type) -{ - const EHlslTokenClass textureType = peek(); - - TSamplerDim dim = EsdNone; - bool array = false; - bool ms = false; - bool image = false; - bool combined = true; - - switch (textureType) { - case EHTokBuffer: dim = EsdBuffer; combined = false; break; - case EHTokTexture1d: dim = Esd1D; break; - case EHTokTexture1darray: dim = Esd1D; array = true; break; - case EHTokTexture2d: dim = Esd2D; break; - case EHTokTexture2darray: dim = Esd2D; array = true; break; - case EHTokTexture3d: dim = Esd3D; break; - case EHTokTextureCube: dim = EsdCube; break; - case EHTokTextureCubearray: dim = EsdCube; array = true; break; - case EHTokTexture2DMS: dim = Esd2D; ms = true; break; - case EHTokTexture2DMSarray: dim = Esd2D; array = true; ms = true; break; - case EHTokRWBuffer: dim = EsdBuffer; image=true; break; - case EHTokRWTexture1d: dim = Esd1D; array=false; image=true; break; - case EHTokRWTexture1darray: dim = Esd1D; array=true; image=true; break; - case EHTokRWTexture2d: dim = Esd2D; array=false; image=true; break; - case EHTokRWTexture2darray: dim = Esd2D; array=true; image=true; break; - case EHTokRWTexture3d: dim = Esd3D; array=false; image=true; break; - default: - return false; // not a texture declaration - } - - advanceToken(); // consume the texture object keyword - - TType txType(EbtFloat, EvqUniform, 4); // default type is float4 - - TIntermTyped* msCount = nullptr; - - // texture type: required for multisample types and RWBuffer/RWTextures! - if (acceptTokenClass(EHTokLeftAngle)) { - if (! acceptType(txType)) { - expected("scalar or vector type"); - return false; - } - - const TBasicType basicRetType = txType.getBasicType() ; - - switch (basicRetType) { - case EbtFloat: - case EbtUint: - case EbtInt: - case EbtStruct: - break; - default: - unimplemented("basic type in texture"); - return false; - } - - // Buffers can handle small mats if they fit in 4 components - if (dim == EsdBuffer && txType.isMatrix()) { - if ((txType.getMatrixCols() * txType.getMatrixRows()) > 4) { - expected("components < 4 in matrix buffer type"); - return false; - } - - // TODO: except we don't handle it yet... - unimplemented("matrix type in buffer"); - return false; - } - - if (!txType.isScalar() && !txType.isVector() && !txType.isStruct()) { - expected("scalar, vector, or struct type"); - return false; - } - - if (ms && acceptTokenClass(EHTokComma)) { - // read sample count for multisample types, if given - if (! peekTokenClass(EHTokIntConstant)) { - expected("multisample count"); - return false; - } - - if (! acceptLiteral(msCount)) // should never fail, since we just found an integer - return false; - } - - if (! acceptTokenClass(EHTokRightAngle)) { - expected("right angle bracket"); - return false; - } - } else if (ms) { - expected("texture type for multisample"); - return false; - } else if (image) { - expected("type for RWTexture/RWBuffer"); - return false; - } - - TArraySizes* arraySizes = nullptr; - const bool shadow = false; // declared on the sampler - - TSampler sampler; - TLayoutFormat format = ElfNone; - - // Buffer, RWBuffer and RWTexture (images) require a TLayoutFormat. We handle only a limit set. - if (image || dim == EsdBuffer) - format = parseContext.getLayoutFromTxType(token.loc, txType); - - const TBasicType txBasicType = txType.isStruct() ? (*txType.getStruct())[0].type->getBasicType() - : txType.getBasicType(); - - // Non-image Buffers are combined - if (dim == EsdBuffer && !image) { - sampler.set(txType.getBasicType(), dim, array); - } else { - // DX10 textures are separated. TODO: DX9. - if (image) { - sampler.setImage(txBasicType, dim, array, shadow, ms); - } else { - sampler.setTexture(txBasicType, dim, array, shadow, ms); - } - } - - // Remember the declared return type. Function returns false on error. - if (!parseContext.setTextureReturnType(sampler, txType, token.loc)) - return false; - - // Force uncombined, if necessary - if (!combined) - sampler.combined = false; - - type.shallowCopy(TType(sampler, EvqUniform, arraySizes)); - type.getQualifier().layoutFormat = format; - - return true; -} - -// If token is for a type, update 'type' with the type information, -// and return true and advance. -// Otherwise, return false, and don't advance -bool HlslGrammar::acceptType(TType& type) -{ - TIntermNode* nodeList = nullptr; - return acceptType(type, nodeList); -} -bool HlslGrammar::acceptType(TType& type, TIntermNode*& nodeList) -{ - // Basic types for min* types, broken out here in case of future - // changes, e.g, to use native halfs. - static const TBasicType min16float_bt = EbtFloat; - static const TBasicType min10float_bt = EbtFloat; - static const TBasicType half_bt = EbtFloat; - static const TBasicType min16int_bt = EbtInt; - static const TBasicType min12int_bt = EbtInt; - static const TBasicType min16uint_bt = EbtUint; - - // Some types might have turned into identifiers. Take the hit for checking - // when this has happened. - if (typeIdentifiers) { - const char* identifierString = getTypeString(peek()); - if (identifierString != nullptr) { - TString name = identifierString; - // if it's an identifier, it's not a type - if (parseContext.symbolTable.find(name) != nullptr) - return false; - } - } - - switch (peek()) { - case EHTokVector: - return acceptVectorTemplateType(type); - break; - - case EHTokMatrix: - return acceptMatrixTemplateType(type); - break; - - case EHTokPointStream: // fall through - case EHTokLineStream: // ... - case EHTokTriangleStream: // ... - { - TLayoutGeometry geometry; - if (! acceptStreamOutTemplateType(type, geometry)) - return false; - - if (! parseContext.handleOutputGeometry(token.loc, geometry)) - return false; - - return true; - } - - case EHTokInputPatch: // fall through - case EHTokOutputPatch: // ... - { - if (! acceptTessellationPatchTemplateType(type)) - return false; - - return true; - } - - case EHTokSampler: // fall through - case EHTokSampler1d: // ... - case EHTokSampler2d: // ... - case EHTokSampler3d: // ... - case EHTokSamplerCube: // ... - case EHTokSamplerState: // ... - case EHTokSamplerComparisonState: // ... - return acceptSamplerType(type); - break; - - case EHTokBuffer: // fall through - case EHTokTexture1d: // ... - case EHTokTexture1darray: // ... - case EHTokTexture2d: // ... - case EHTokTexture2darray: // ... - case EHTokTexture3d: // ... - case EHTokTextureCube: // ... - case EHTokTextureCubearray: // ... - case EHTokTexture2DMS: // ... - case EHTokTexture2DMSarray: // ... - case EHTokRWTexture1d: // ... - case EHTokRWTexture1darray: // ... - case EHTokRWTexture2d: // ... - case EHTokRWTexture2darray: // ... - case EHTokRWTexture3d: // ... - case EHTokRWBuffer: // ... - return acceptTextureType(type); - break; - - case EHTokAppendStructuredBuffer: - case EHTokByteAddressBuffer: - case EHTokConsumeStructuredBuffer: - case EHTokRWByteAddressBuffer: - case EHTokRWStructuredBuffer: - case EHTokStructuredBuffer: - return acceptStructBufferType(type); - break; - - case EHTokConstantBuffer: - return acceptConstantBufferType(type); - - case EHTokClass: - case EHTokStruct: - case EHTokCBuffer: - case EHTokTBuffer: - return acceptStruct(type, nodeList); - - case EHTokIdentifier: - // An identifier could be for a user-defined type. - // Note we cache the symbol table lookup, to save for a later rule - // when this is not a type. - if (parseContext.lookupUserType(*token.string, type) != nullptr) { - advanceToken(); - return true; - } else - return false; - - case EHTokVoid: - new(&type) TType(EbtVoid); - break; - - case EHTokString: - new(&type) TType(EbtString); - break; - - case EHTokFloat: - new(&type) TType(EbtFloat); - break; - case EHTokFloat1: - new(&type) TType(EbtFloat); - type.makeVector(); - break; - case EHTokFloat2: - new(&type) TType(EbtFloat, EvqTemporary, 2); - break; - case EHTokFloat3: - new(&type) TType(EbtFloat, EvqTemporary, 3); - break; - case EHTokFloat4: - new(&type) TType(EbtFloat, EvqTemporary, 4); - break; - - case EHTokDouble: - new(&type) TType(EbtDouble); - break; - case EHTokDouble1: - new(&type) TType(EbtDouble); - type.makeVector(); - break; - case EHTokDouble2: - new(&type) TType(EbtDouble, EvqTemporary, 2); - break; - case EHTokDouble3: - new(&type) TType(EbtDouble, EvqTemporary, 3); - break; - case EHTokDouble4: - new(&type) TType(EbtDouble, EvqTemporary, 4); - break; - - case EHTokInt: - case EHTokDword: - new(&type) TType(EbtInt); - break; - case EHTokInt1: - new(&type) TType(EbtInt); - type.makeVector(); - break; - case EHTokInt2: - new(&type) TType(EbtInt, EvqTemporary, 2); - break; - case EHTokInt3: - new(&type) TType(EbtInt, EvqTemporary, 3); - break; - case EHTokInt4: - new(&type) TType(EbtInt, EvqTemporary, 4); - break; - - case EHTokUint: - new(&type) TType(EbtUint); - break; - case EHTokUint1: - new(&type) TType(EbtUint); - type.makeVector(); - break; - case EHTokUint2: - new(&type) TType(EbtUint, EvqTemporary, 2); - break; - case EHTokUint3: - new(&type) TType(EbtUint, EvqTemporary, 3); - break; - case EHTokUint4: - new(&type) TType(EbtUint, EvqTemporary, 4); - break; - - case EHTokBool: - new(&type) TType(EbtBool); - break; - case EHTokBool1: - new(&type) TType(EbtBool); - type.makeVector(); - break; - case EHTokBool2: - new(&type) TType(EbtBool, EvqTemporary, 2); - break; - case EHTokBool3: - new(&type) TType(EbtBool, EvqTemporary, 3); - break; - case EHTokBool4: - new(&type) TType(EbtBool, EvqTemporary, 4); - break; - - case EHTokHalf: - new(&type) TType(half_bt, EvqTemporary); - break; - case EHTokHalf1: - new(&type) TType(half_bt, EvqTemporary); - type.makeVector(); - break; - case EHTokHalf2: - new(&type) TType(half_bt, EvqTemporary, 2); - break; - case EHTokHalf3: - new(&type) TType(half_bt, EvqTemporary, 3); - break; - case EHTokHalf4: - new(&type) TType(half_bt, EvqTemporary, 4); - break; - - case EHTokMin16float: - new(&type) TType(min16float_bt, EvqTemporary, EpqMedium); - break; - case EHTokMin16float1: - new(&type) TType(min16float_bt, EvqTemporary, EpqMedium); - type.makeVector(); - break; - case EHTokMin16float2: - new(&type) TType(min16float_bt, EvqTemporary, EpqMedium, 2); - break; - case EHTokMin16float3: - new(&type) TType(min16float_bt, EvqTemporary, EpqMedium, 3); - break; - case EHTokMin16float4: - new(&type) TType(min16float_bt, EvqTemporary, EpqMedium, 4); - break; - - case EHTokMin10float: - new(&type) TType(min10float_bt, EvqTemporary, EpqMedium); - break; - case EHTokMin10float1: - new(&type) TType(min10float_bt, EvqTemporary, EpqMedium); - type.makeVector(); - break; - case EHTokMin10float2: - new(&type) TType(min10float_bt, EvqTemporary, EpqMedium, 2); - break; - case EHTokMin10float3: - new(&type) TType(min10float_bt, EvqTemporary, EpqMedium, 3); - break; - case EHTokMin10float4: - new(&type) TType(min10float_bt, EvqTemporary, EpqMedium, 4); - break; - - case EHTokMin16int: - new(&type) TType(min16int_bt, EvqTemporary, EpqMedium); - break; - case EHTokMin16int1: - new(&type) TType(min16int_bt, EvqTemporary, EpqMedium); - type.makeVector(); - break; - case EHTokMin16int2: - new(&type) TType(min16int_bt, EvqTemporary, EpqMedium, 2); - break; - case EHTokMin16int3: - new(&type) TType(min16int_bt, EvqTemporary, EpqMedium, 3); - break; - case EHTokMin16int4: - new(&type) TType(min16int_bt, EvqTemporary, EpqMedium, 4); - break; - - case EHTokMin12int: - new(&type) TType(min12int_bt, EvqTemporary, EpqMedium); - break; - case EHTokMin12int1: - new(&type) TType(min12int_bt, EvqTemporary, EpqMedium); - type.makeVector(); - break; - case EHTokMin12int2: - new(&type) TType(min12int_bt, EvqTemporary, EpqMedium, 2); - break; - case EHTokMin12int3: - new(&type) TType(min12int_bt, EvqTemporary, EpqMedium, 3); - break; - case EHTokMin12int4: - new(&type) TType(min12int_bt, EvqTemporary, EpqMedium, 4); - break; - - case EHTokMin16uint: - new(&type) TType(min16uint_bt, EvqTemporary, EpqMedium); - break; - case EHTokMin16uint1: - new(&type) TType(min16uint_bt, EvqTemporary, EpqMedium); - type.makeVector(); - break; - case EHTokMin16uint2: - new(&type) TType(min16uint_bt, EvqTemporary, EpqMedium, 2); - break; - case EHTokMin16uint3: - new(&type) TType(min16uint_bt, EvqTemporary, EpqMedium, 3); - break; - case EHTokMin16uint4: - new(&type) TType(min16uint_bt, EvqTemporary, EpqMedium, 4); - break; - - case EHTokInt1x1: - new(&type) TType(EbtInt, EvqTemporary, 0, 1, 1); - break; - case EHTokInt1x2: - new(&type) TType(EbtInt, EvqTemporary, 0, 1, 2); - break; - case EHTokInt1x3: - new(&type) TType(EbtInt, EvqTemporary, 0, 1, 3); - break; - case EHTokInt1x4: - new(&type) TType(EbtInt, EvqTemporary, 0, 1, 4); - break; - case EHTokInt2x1: - new(&type) TType(EbtInt, EvqTemporary, 0, 2, 1); - break; - case EHTokInt2x2: - new(&type) TType(EbtInt, EvqTemporary, 0, 2, 2); - break; - case EHTokInt2x3: - new(&type) TType(EbtInt, EvqTemporary, 0, 2, 3); - break; - case EHTokInt2x4: - new(&type) TType(EbtInt, EvqTemporary, 0, 2, 4); - break; - case EHTokInt3x1: - new(&type) TType(EbtInt, EvqTemporary, 0, 3, 1); - break; - case EHTokInt3x2: - new(&type) TType(EbtInt, EvqTemporary, 0, 3, 2); - break; - case EHTokInt3x3: - new(&type) TType(EbtInt, EvqTemporary, 0, 3, 3); - break; - case EHTokInt3x4: - new(&type) TType(EbtInt, EvqTemporary, 0, 3, 4); - break; - case EHTokInt4x1: - new(&type) TType(EbtInt, EvqTemporary, 0, 4, 1); - break; - case EHTokInt4x2: - new(&type) TType(EbtInt, EvqTemporary, 0, 4, 2); - break; - case EHTokInt4x3: - new(&type) TType(EbtInt, EvqTemporary, 0, 4, 3); - break; - case EHTokInt4x4: - new(&type) TType(EbtInt, EvqTemporary, 0, 4, 4); - break; - - case EHTokUint1x1: - new(&type) TType(EbtUint, EvqTemporary, 0, 1, 1); - break; - case EHTokUint1x2: - new(&type) TType(EbtUint, EvqTemporary, 0, 1, 2); - break; - case EHTokUint1x3: - new(&type) TType(EbtUint, EvqTemporary, 0, 1, 3); - break; - case EHTokUint1x4: - new(&type) TType(EbtUint, EvqTemporary, 0, 1, 4); - break; - case EHTokUint2x1: - new(&type) TType(EbtUint, EvqTemporary, 0, 2, 1); - break; - case EHTokUint2x2: - new(&type) TType(EbtUint, EvqTemporary, 0, 2, 2); - break; - case EHTokUint2x3: - new(&type) TType(EbtUint, EvqTemporary, 0, 2, 3); - break; - case EHTokUint2x4: - new(&type) TType(EbtUint, EvqTemporary, 0, 2, 4); - break; - case EHTokUint3x1: - new(&type) TType(EbtUint, EvqTemporary, 0, 3, 1); - break; - case EHTokUint3x2: - new(&type) TType(EbtUint, EvqTemporary, 0, 3, 2); - break; - case EHTokUint3x3: - new(&type) TType(EbtUint, EvqTemporary, 0, 3, 3); - break; - case EHTokUint3x4: - new(&type) TType(EbtUint, EvqTemporary, 0, 3, 4); - break; - case EHTokUint4x1: - new(&type) TType(EbtUint, EvqTemporary, 0, 4, 1); - break; - case EHTokUint4x2: - new(&type) TType(EbtUint, EvqTemporary, 0, 4, 2); - break; - case EHTokUint4x3: - new(&type) TType(EbtUint, EvqTemporary, 0, 4, 3); - break; - case EHTokUint4x4: - new(&type) TType(EbtUint, EvqTemporary, 0, 4, 4); - break; - - case EHTokBool1x1: - new(&type) TType(EbtBool, EvqTemporary, 0, 1, 1); - break; - case EHTokBool1x2: - new(&type) TType(EbtBool, EvqTemporary, 0, 1, 2); - break; - case EHTokBool1x3: - new(&type) TType(EbtBool, EvqTemporary, 0, 1, 3); - break; - case EHTokBool1x4: - new(&type) TType(EbtBool, EvqTemporary, 0, 1, 4); - break; - case EHTokBool2x1: - new(&type) TType(EbtBool, EvqTemporary, 0, 2, 1); - break; - case EHTokBool2x2: - new(&type) TType(EbtBool, EvqTemporary, 0, 2, 2); - break; - case EHTokBool2x3: - new(&type) TType(EbtBool, EvqTemporary, 0, 2, 3); - break; - case EHTokBool2x4: - new(&type) TType(EbtBool, EvqTemporary, 0, 2, 4); - break; - case EHTokBool3x1: - new(&type) TType(EbtBool, EvqTemporary, 0, 3, 1); - break; - case EHTokBool3x2: - new(&type) TType(EbtBool, EvqTemporary, 0, 3, 2); - break; - case EHTokBool3x3: - new(&type) TType(EbtBool, EvqTemporary, 0, 3, 3); - break; - case EHTokBool3x4: - new(&type) TType(EbtBool, EvqTemporary, 0, 3, 4); - break; - case EHTokBool4x1: - new(&type) TType(EbtBool, EvqTemporary, 0, 4, 1); - break; - case EHTokBool4x2: - new(&type) TType(EbtBool, EvqTemporary, 0, 4, 2); - break; - case EHTokBool4x3: - new(&type) TType(EbtBool, EvqTemporary, 0, 4, 3); - break; - case EHTokBool4x4: - new(&type) TType(EbtBool, EvqTemporary, 0, 4, 4); - break; - - case EHTokFloat1x1: - new(&type) TType(EbtFloat, EvqTemporary, 0, 1, 1); - break; - case EHTokFloat1x2: - new(&type) TType(EbtFloat, EvqTemporary, 0, 1, 2); - break; - case EHTokFloat1x3: - new(&type) TType(EbtFloat, EvqTemporary, 0, 1, 3); - break; - case EHTokFloat1x4: - new(&type) TType(EbtFloat, EvqTemporary, 0, 1, 4); - break; - case EHTokFloat2x1: - new(&type) TType(EbtFloat, EvqTemporary, 0, 2, 1); - break; - case EHTokFloat2x2: - new(&type) TType(EbtFloat, EvqTemporary, 0, 2, 2); - break; - case EHTokFloat2x3: - new(&type) TType(EbtFloat, EvqTemporary, 0, 2, 3); - break; - case EHTokFloat2x4: - new(&type) TType(EbtFloat, EvqTemporary, 0, 2, 4); - break; - case EHTokFloat3x1: - new(&type) TType(EbtFloat, EvqTemporary, 0, 3, 1); - break; - case EHTokFloat3x2: - new(&type) TType(EbtFloat, EvqTemporary, 0, 3, 2); - break; - case EHTokFloat3x3: - new(&type) TType(EbtFloat, EvqTemporary, 0, 3, 3); - break; - case EHTokFloat3x4: - new(&type) TType(EbtFloat, EvqTemporary, 0, 3, 4); - break; - case EHTokFloat4x1: - new(&type) TType(EbtFloat, EvqTemporary, 0, 4, 1); - break; - case EHTokFloat4x2: - new(&type) TType(EbtFloat, EvqTemporary, 0, 4, 2); - break; - case EHTokFloat4x3: - new(&type) TType(EbtFloat, EvqTemporary, 0, 4, 3); - break; - case EHTokFloat4x4: - new(&type) TType(EbtFloat, EvqTemporary, 0, 4, 4); - break; - - case EHTokHalf1x1: - new(&type) TType(half_bt, EvqTemporary, 0, 1, 1); - break; - case EHTokHalf1x2: - new(&type) TType(half_bt, EvqTemporary, 0, 1, 2); - break; - case EHTokHalf1x3: - new(&type) TType(half_bt, EvqTemporary, 0, 1, 3); - break; - case EHTokHalf1x4: - new(&type) TType(half_bt, EvqTemporary, 0, 1, 4); - break; - case EHTokHalf2x1: - new(&type) TType(half_bt, EvqTemporary, 0, 2, 1); - break; - case EHTokHalf2x2: - new(&type) TType(half_bt, EvqTemporary, 0, 2, 2); - break; - case EHTokHalf2x3: - new(&type) TType(half_bt, EvqTemporary, 0, 2, 3); - break; - case EHTokHalf2x4: - new(&type) TType(half_bt, EvqTemporary, 0, 2, 4); - break; - case EHTokHalf3x1: - new(&type) TType(half_bt, EvqTemporary, 0, 3, 1); - break; - case EHTokHalf3x2: - new(&type) TType(half_bt, EvqTemporary, 0, 3, 2); - break; - case EHTokHalf3x3: - new(&type) TType(half_bt, EvqTemporary, 0, 3, 3); - break; - case EHTokHalf3x4: - new(&type) TType(half_bt, EvqTemporary, 0, 3, 4); - break; - case EHTokHalf4x1: - new(&type) TType(half_bt, EvqTemporary, 0, 4, 1); - break; - case EHTokHalf4x2: - new(&type) TType(half_bt, EvqTemporary, 0, 4, 2); - break; - case EHTokHalf4x3: - new(&type) TType(half_bt, EvqTemporary, 0, 4, 3); - break; - case EHTokHalf4x4: - new(&type) TType(half_bt, EvqTemporary, 0, 4, 4); - break; - - case EHTokDouble1x1: - new(&type) TType(EbtDouble, EvqTemporary, 0, 1, 1); - break; - case EHTokDouble1x2: - new(&type) TType(EbtDouble, EvqTemporary, 0, 1, 2); - break; - case EHTokDouble1x3: - new(&type) TType(EbtDouble, EvqTemporary, 0, 1, 3); - break; - case EHTokDouble1x4: - new(&type) TType(EbtDouble, EvqTemporary, 0, 1, 4); - break; - case EHTokDouble2x1: - new(&type) TType(EbtDouble, EvqTemporary, 0, 2, 1); - break; - case EHTokDouble2x2: - new(&type) TType(EbtDouble, EvqTemporary, 0, 2, 2); - break; - case EHTokDouble2x3: - new(&type) TType(EbtDouble, EvqTemporary, 0, 2, 3); - break; - case EHTokDouble2x4: - new(&type) TType(EbtDouble, EvqTemporary, 0, 2, 4); - break; - case EHTokDouble3x1: - new(&type) TType(EbtDouble, EvqTemporary, 0, 3, 1); - break; - case EHTokDouble3x2: - new(&type) TType(EbtDouble, EvqTemporary, 0, 3, 2); - break; - case EHTokDouble3x3: - new(&type) TType(EbtDouble, EvqTemporary, 0, 3, 3); - break; - case EHTokDouble3x4: - new(&type) TType(EbtDouble, EvqTemporary, 0, 3, 4); - break; - case EHTokDouble4x1: - new(&type) TType(EbtDouble, EvqTemporary, 0, 4, 1); - break; - case EHTokDouble4x2: - new(&type) TType(EbtDouble, EvqTemporary, 0, 4, 2); - break; - case EHTokDouble4x3: - new(&type) TType(EbtDouble, EvqTemporary, 0, 4, 3); - break; - case EHTokDouble4x4: - new(&type) TType(EbtDouble, EvqTemporary, 0, 4, 4); - break; - - default: - return false; - } - - advanceToken(); - - return true; -} - -// struct -// : struct_type IDENTIFIER post_decls LEFT_BRACE struct_declaration_list RIGHT_BRACE -// | struct_type post_decls LEFT_BRACE struct_declaration_list RIGHT_BRACE -// | struct_type IDENTIFIER // use of previously declared struct type -// -// struct_type -// : STRUCT -// | CLASS -// | CBUFFER -// | TBUFFER -// -bool HlslGrammar::acceptStruct(TType& type, TIntermNode*& nodeList) -{ - // This storage qualifier will tell us whether it's an AST - // block type or just a generic structure type. - TStorageQualifier storageQualifier = EvqTemporary; - bool readonly = false; - - if (acceptTokenClass(EHTokCBuffer)) { - // CBUFFER - storageQualifier = EvqUniform; - } else if (acceptTokenClass(EHTokTBuffer)) { - // TBUFFER - storageQualifier = EvqBuffer; - readonly = true; - } else if (! acceptTokenClass(EHTokClass) && ! acceptTokenClass(EHTokStruct)) { - // Neither CLASS nor STRUCT - return false; - } - - // Now known to be one of CBUFFER, TBUFFER, CLASS, or STRUCT - - // IDENTIFIER - TString structName = ""; - if (peekTokenClass(EHTokIdentifier)) { - structName = *token.string; - advanceToken(); - } - - // post_decls - TQualifier postDeclQualifier; - postDeclQualifier.clear(); - bool postDeclsFound = acceptPostDecls(postDeclQualifier); - - // LEFT_BRACE, or - // struct_type IDENTIFIER - if (! acceptTokenClass(EHTokLeftBrace)) { - if (structName.size() > 0 && !postDeclsFound && parseContext.lookupUserType(structName, type) != nullptr) { - // struct_type IDENTIFIER - return true; - } else { - expected("{"); - return false; - } - } - - - // struct_declaration_list - TTypeList* typeList; - // Save each member function so they can be processed after we have a fully formed 'this'. - TVector functionDeclarators; - - parseContext.pushNamespace(structName); - bool acceptedList = acceptStructDeclarationList(typeList, nodeList, functionDeclarators); - parseContext.popNamespace(); - - if (! acceptedList) { - expected("struct member declarations"); - return false; - } - - // RIGHT_BRACE - if (! acceptTokenClass(EHTokRightBrace)) { - expected("}"); - return false; - } - - // create the user-defined type - if (storageQualifier == EvqTemporary) - new(&type) TType(typeList, structName); - else { - postDeclQualifier.storage = storageQualifier; - postDeclQualifier.readonly = readonly; - new(&type) TType(typeList, structName, postDeclQualifier); // sets EbtBlock - } - - parseContext.declareStruct(token.loc, structName, type); - - // For member functions: now that we know the type of 'this', go back and - // - add their implicit argument with 'this' (not to the mangling, just the argument list) - // - parse the functions, their tokens were saved for deferred parsing (now) - for (int b = 0; b < (int)functionDeclarators.size(); ++b) { - // update signature - if (functionDeclarators[b].function->hasImplicitThis()) - functionDeclarators[b].function->addThisParameter(type, intermediate.implicitThisName); - } - - // All member functions get parsed inside the class/struct namespace and with the - // class/struct members in a symbol-table level. - parseContext.pushNamespace(structName); - parseContext.pushThisScope(type, functionDeclarators); - bool deferredSuccess = true; - for (int b = 0; b < (int)functionDeclarators.size() && deferredSuccess; ++b) { - // parse body - pushTokenStream(functionDeclarators[b].body); - if (! acceptFunctionBody(functionDeclarators[b], nodeList)) - deferredSuccess = false; - popTokenStream(); - } - parseContext.popThisScope(); - parseContext.popNamespace(); - - return deferredSuccess; -} - -// constantbuffer -// : CONSTANTBUFFER LEFT_ANGLE type RIGHT_ANGLE -bool HlslGrammar::acceptConstantBufferType(TType& type) -{ - if (! acceptTokenClass(EHTokConstantBuffer)) - return false; - - if (! acceptTokenClass(EHTokLeftAngle)) { - expected("left angle bracket"); - return false; - } - - TType templateType; - if (! acceptType(templateType)) { - expected("type"); - return false; - } - - if (! acceptTokenClass(EHTokRightAngle)) { - expected("right angle bracket"); - return false; - } - - TQualifier postDeclQualifier; - postDeclQualifier.clear(); - postDeclQualifier.storage = EvqUniform; - - if (templateType.isStruct()) { - // Make a block from the type parsed as the template argument - TTypeList* typeList = templateType.getWritableStruct(); - new(&type) TType(typeList, "", postDeclQualifier); // sets EbtBlock - - type.getQualifier().storage = EvqUniform; - - return true; - } else { - parseContext.error(token.loc, "non-structure type in ConstantBuffer", "", ""); - return false; - } -} - -// struct_buffer -// : APPENDSTRUCTUREDBUFFER -// | BYTEADDRESSBUFFER -// | CONSUMESTRUCTUREDBUFFER -// | RWBYTEADDRESSBUFFER -// | RWSTRUCTUREDBUFFER -// | STRUCTUREDBUFFER -bool HlslGrammar::acceptStructBufferType(TType& type) -{ - const EHlslTokenClass structBuffType = peek(); - - // TODO: globallycoherent - bool hasTemplateType = true; - bool readonly = false; - - TStorageQualifier storage = EvqBuffer; - TBuiltInVariable builtinType = EbvNone; - - switch (structBuffType) { - case EHTokAppendStructuredBuffer: - builtinType = EbvAppendConsume; - break; - case EHTokByteAddressBuffer: - hasTemplateType = false; - readonly = true; - builtinType = EbvByteAddressBuffer; - break; - case EHTokConsumeStructuredBuffer: - builtinType = EbvAppendConsume; - break; - case EHTokRWByteAddressBuffer: - hasTemplateType = false; - builtinType = EbvRWByteAddressBuffer; - break; - case EHTokRWStructuredBuffer: - builtinType = EbvRWStructuredBuffer; - break; - case EHTokStructuredBuffer: - builtinType = EbvStructuredBuffer; - readonly = true; - break; - default: - return false; // not a structure buffer type - } - - advanceToken(); // consume the structure keyword - - // type on which this StructedBuffer is templatized. E.g, StructedBuffer ==> MyStruct - TType* templateType = new TType; - - if (hasTemplateType) { - if (! acceptTokenClass(EHTokLeftAngle)) { - expected("left angle bracket"); - return false; - } - - if (! acceptType(*templateType)) { - expected("type"); - return false; - } - if (! acceptTokenClass(EHTokRightAngle)) { - expected("right angle bracket"); - return false; - } - } else { - // byte address buffers have no explicit type. - TType uintType(EbtUint, storage); - templateType->shallowCopy(uintType); - } - - // Create an unsized array out of that type. - // TODO: does this work if it's already an array type? - TArraySizes unsizedArray; - unsizedArray.addInnerSize(UnsizedArraySize); - templateType->newArraySizes(unsizedArray); - templateType->getQualifier().storage = storage; - - // field name is canonical for all structbuffers - templateType->setFieldName("@data"); - - TTypeList* blockStruct = new TTypeList; - TTypeLoc member = { templateType, token.loc }; - blockStruct->push_back(member); - - // This is the type of the buffer block (SSBO) - TType blockType(blockStruct, "", templateType->getQualifier()); - - blockType.getQualifier().storage = storage; - blockType.getQualifier().readonly = readonly; - blockType.getQualifier().builtIn = builtinType; - - // We may have created an equivalent type before, in which case we should use its - // deep structure. - parseContext.shareStructBufferType(blockType); - - type.shallowCopy(blockType); - - return true; -} - -// struct_declaration_list -// : struct_declaration SEMI_COLON struct_declaration SEMI_COLON ... -// -// struct_declaration -// : fully_specified_type struct_declarator COMMA struct_declarator ... -// | fully_specified_type IDENTIFIER function_parameters post_decls compound_statement // member-function definition -// -// struct_declarator -// : IDENTIFIER post_decls -// | IDENTIFIER array_specifier post_decls -// | IDENTIFIER function_parameters post_decls // member-function prototype -// -bool HlslGrammar::acceptStructDeclarationList(TTypeList*& typeList, TIntermNode*& nodeList, - TVector& declarators) -{ - typeList = new TTypeList(); - HlslToken idToken; - - do { - // success on seeing the RIGHT_BRACE coming up - if (peekTokenClass(EHTokRightBrace)) - break; - - // struct_declaration - - bool declarator_list = false; - - // fully_specified_type - TType memberType; - if (! acceptFullySpecifiedType(memberType, nodeList)) { - expected("member type"); - return false; - } - - // struct_declarator COMMA struct_declarator ... - bool functionDefinitionAccepted = false; - do { - if (! acceptIdentifier(idToken)) { - expected("member name"); - return false; - } - - if (peekTokenClass(EHTokLeftParen)) { - // function_parameters - if (!declarator_list) { - declarators.resize(declarators.size() + 1); - // request a token stream for deferred processing - functionDefinitionAccepted = acceptMemberFunctionDefinition(nodeList, memberType, *idToken.string, - declarators.back()); - if (functionDefinitionAccepted) - break; - } - expected("member-function definition"); - return false; - } else { - // add it to the list of members - TTypeLoc member = { new TType(EbtVoid), token.loc }; - member.type->shallowCopy(memberType); - member.type->setFieldName(*idToken.string); - typeList->push_back(member); - - // array_specifier - TArraySizes* arraySizes = nullptr; - acceptArraySpecifier(arraySizes); - if (arraySizes) - typeList->back().type->newArraySizes(*arraySizes); - - acceptPostDecls(member.type->getQualifier()); - - // EQUAL assignment_expression - if (acceptTokenClass(EHTokAssign)) { - parseContext.warn(idToken.loc, "struct-member initializers ignored", "typedef", ""); - TIntermTyped* expressionNode = nullptr; - if (! acceptAssignmentExpression(expressionNode)) { - expected("initializer"); - return false; - } - } - } - // success on seeing the SEMICOLON coming up - if (peekTokenClass(EHTokSemicolon)) - break; - - // COMMA - if (acceptTokenClass(EHTokComma)) - declarator_list = true; - else { - expected(","); - return false; - } - - } while (true); - - // SEMI_COLON - if (! functionDefinitionAccepted && ! acceptTokenClass(EHTokSemicolon)) { - expected(";"); - return false; - } - - } while (true); - - return true; -} - -// member_function_definition -// | function_parameters post_decls compound_statement -// -// Expects type to have EvqGlobal for a static member and -// EvqTemporary for non-static member. -bool HlslGrammar::acceptMemberFunctionDefinition(TIntermNode*& nodeList, const TType& type, const TString& memberName, - TFunctionDeclarator& declarator) -{ - bool accepted = false; - - const TString* functionName = &memberName; - parseContext.getFullNamespaceName(functionName); - declarator.function = new TFunction(functionName, type); - if (type.getQualifier().storage == EvqTemporary) - declarator.function->setImplicitThis(); - else - declarator.function->setIllegalImplicitThis(); - - // function_parameters - if (acceptFunctionParameters(*declarator.function)) { - // post_decls - acceptPostDecls(declarator.function->getWritableType().getQualifier()); - - // compound_statement (function body definition) - if (peekTokenClass(EHTokLeftBrace)) { - declarator.loc = token.loc; - declarator.body = new TVector; - accepted = acceptFunctionDefinition(declarator, nodeList, declarator.body); - } - } else - expected("function parameter list"); - - return accepted; -} - -// function_parameters -// : LEFT_PAREN parameter_declaration COMMA parameter_declaration ... RIGHT_PAREN -// | LEFT_PAREN VOID RIGHT_PAREN -// -bool HlslGrammar::acceptFunctionParameters(TFunction& function) -{ - // LEFT_PAREN - if (! acceptTokenClass(EHTokLeftParen)) - return false; - - // VOID RIGHT_PAREN - if (! acceptTokenClass(EHTokVoid)) { - do { - // parameter_declaration - if (! acceptParameterDeclaration(function)) - break; - - // COMMA - if (! acceptTokenClass(EHTokComma)) - break; - } while (true); - } - - // RIGHT_PAREN - if (! acceptTokenClass(EHTokRightParen)) { - expected(")"); - return false; - } - - return true; -} - -// default_parameter_declaration -// : EQUAL conditional_expression -// : EQUAL initializer -bool HlslGrammar::acceptDefaultParameterDeclaration(const TType& type, TIntermTyped*& node) -{ - node = nullptr; - - // Valid not to have a default_parameter_declaration - if (!acceptTokenClass(EHTokAssign)) - return true; - - if (!acceptConditionalExpression(node)) { - if (!acceptInitializer(node)) - return false; - - // For initializer lists, we have to const-fold into a constructor for the type, so build - // that. - TFunction* constructor = parseContext.makeConstructorCall(token.loc, type); - if (constructor == nullptr) // cannot construct - return false; - - TIntermTyped* arguments = nullptr; - for (int i = 0; i < int(node->getAsAggregate()->getSequence().size()); i++) - parseContext.handleFunctionArgument(constructor, arguments, node->getAsAggregate()->getSequence()[i]->getAsTyped()); - - node = parseContext.handleFunctionCall(token.loc, constructor, node); - } - - // If this is simply a constant, we can use it directly. - if (node->getAsConstantUnion()) - return true; - - // Otherwise, it has to be const-foldable. - TIntermTyped* origNode = node; - - node = intermediate.fold(node->getAsAggregate()); - - if (node != nullptr && origNode != node) - return true; - - parseContext.error(token.loc, "invalid default parameter value", "", ""); - - return false; -} - -// parameter_declaration -// : fully_specified_type post_decls [ = default_parameter_declaration ] -// | fully_specified_type identifier array_specifier post_decls [ = default_parameter_declaration ] -// -bool HlslGrammar::acceptParameterDeclaration(TFunction& function) -{ - // fully_specified_type - TType* type = new TType; - if (! acceptFullySpecifiedType(*type)) - return false; - - // identifier - HlslToken idToken; - acceptIdentifier(idToken); - - // array_specifier - TArraySizes* arraySizes = nullptr; - acceptArraySpecifier(arraySizes); - if (arraySizes) { - if (arraySizes->isImplicit()) { - parseContext.error(token.loc, "function parameter array cannot be implicitly sized", "", ""); - return false; - } - - type->newArraySizes(*arraySizes); - } - - // post_decls - acceptPostDecls(type->getQualifier()); - - TIntermTyped* defaultValue; - if (!acceptDefaultParameterDeclaration(*type, defaultValue)) - return false; - - parseContext.paramFix(*type); - - // If any prior parameters have default values, all the parameters after that must as well. - if (defaultValue == nullptr && function.getDefaultParamCount() > 0) { - parseContext.error(idToken.loc, "invalid parameter after default value parameters", idToken.string->c_str(), ""); - return false; - } - - TParameter param = { idToken.string, type, defaultValue }; - function.addParameter(param); - - return true; -} - -// Do the work to create the function definition in addition to -// parsing the body (compound_statement). -// -// If 'deferredTokens' are passed in, just get the token stream, -// don't process. -// -bool HlslGrammar::acceptFunctionDefinition(TFunctionDeclarator& declarator, TIntermNode*& nodeList, - TVector* deferredTokens) -{ - parseContext.handleFunctionDeclarator(declarator.loc, *declarator.function, false /* not prototype */); - - if (deferredTokens) - return captureBlockTokens(*deferredTokens); - else - return acceptFunctionBody(declarator, nodeList); -} - -bool HlslGrammar::acceptFunctionBody(TFunctionDeclarator& declarator, TIntermNode*& nodeList) -{ - // we might get back an entry-point - TIntermNode* entryPointNode = nullptr; - - // This does a pushScope() - TIntermNode* functionNode = parseContext.handleFunctionDefinition(declarator.loc, *declarator.function, - declarator.attributes, entryPointNode); - - // compound_statement - TIntermNode* functionBody = nullptr; - if (! acceptCompoundStatement(functionBody)) - return false; - - // this does a popScope() - parseContext.handleFunctionBody(declarator.loc, *declarator.function, functionBody, functionNode); - - // Hook up the 1 or 2 function definitions. - nodeList = intermediate.growAggregate(nodeList, functionNode); - nodeList = intermediate.growAggregate(nodeList, entryPointNode); - - return true; -} - -// Accept an expression with parenthesis around it, where -// the parenthesis ARE NOT expression parenthesis, but the -// syntactically required ones like in "if ( expression )". -// -// Also accepts a declaration expression; "if (int a = expression)". -// -// Note this one is not set up to be speculative; as it gives -// errors if not found. -// -bool HlslGrammar::acceptParenExpression(TIntermTyped*& expression) -{ - // LEFT_PAREN - if (! acceptTokenClass(EHTokLeftParen)) - expected("("); - - bool decl = false; - TIntermNode* declNode = nullptr; - decl = acceptControlDeclaration(declNode); - if (decl) { - if (declNode == nullptr || declNode->getAsTyped() == nullptr) { - expected("initialized declaration"); - return false; - } else - expression = declNode->getAsTyped(); - } else { - // no declaration - if (! acceptExpression(expression)) { - expected("expression"); - return false; - } - } - - // RIGHT_PAREN - if (! acceptTokenClass(EHTokRightParen)) - expected(")"); - - return true; -} - -// The top-level full expression recognizer. -// -// expression -// : assignment_expression COMMA assignment_expression COMMA assignment_expression ... -// -bool HlslGrammar::acceptExpression(TIntermTyped*& node) -{ - node = nullptr; - - // assignment_expression - if (! acceptAssignmentExpression(node)) - return false; - - if (! peekTokenClass(EHTokComma)) - return true; - - do { - // ... COMMA - TSourceLoc loc = token.loc; - advanceToken(); - - // ... assignment_expression - TIntermTyped* rightNode = nullptr; - if (! acceptAssignmentExpression(rightNode)) { - expected("assignment expression"); - return false; - } - - node = intermediate.addComma(node, rightNode, loc); - - if (! peekTokenClass(EHTokComma)) - return true; - } while (true); -} - -// initializer -// : LEFT_BRACE RIGHT_BRACE -// | LEFT_BRACE initializer_list RIGHT_BRACE -// -// initializer_list -// : assignment_expression COMMA assignment_expression COMMA ... -// -bool HlslGrammar::acceptInitializer(TIntermTyped*& node) -{ - // LEFT_BRACE - if (! acceptTokenClass(EHTokLeftBrace)) - return false; - - // RIGHT_BRACE - TSourceLoc loc = token.loc; - if (acceptTokenClass(EHTokRightBrace)) { - // a zero-length initializer list - node = intermediate.makeAggregate(loc); - return true; - } - - // initializer_list - node = nullptr; - do { - // assignment_expression - TIntermTyped* expr; - if (! acceptAssignmentExpression(expr)) { - expected("assignment expression in initializer list"); - return false; - } - - const bool firstNode = (node == nullptr); - - node = intermediate.growAggregate(node, expr, loc); - - // If every sub-node in the list has qualifier EvqConst, the returned node becomes - // EvqConst. Otherwise, it becomes EvqTemporary. That doesn't happen with e.g. - // EvqIn or EvqPosition, since the collection isn't EvqPosition if all the members are. - if (firstNode && expr->getQualifier().storage == EvqConst) - node->getQualifier().storage = EvqConst; - else if (expr->getQualifier().storage != EvqConst) - node->getQualifier().storage = EvqTemporary; - - // COMMA - if (acceptTokenClass(EHTokComma)) { - if (acceptTokenClass(EHTokRightBrace)) // allow trailing comma - return true; - continue; - } - - // RIGHT_BRACE - if (acceptTokenClass(EHTokRightBrace)) - return true; - - expected(", or }"); - return false; - } while (true); -} - -// Accept an assignment expression, where assignment operations -// associate right-to-left. That is, it is implicit, for example -// -// a op (b op (c op d)) -// -// assigment_expression -// : initializer -// | conditional_expression -// | conditional_expression assign_op conditional_expression assign_op conditional_expression ... -// -bool HlslGrammar::acceptAssignmentExpression(TIntermTyped*& node) -{ - // initializer - if (peekTokenClass(EHTokLeftBrace)) { - if (acceptInitializer(node)) - return true; - - expected("initializer"); - return false; - } - - // conditional_expression - if (! acceptConditionalExpression(node)) - return false; - - // assignment operation? - TOperator assignOp = HlslOpMap::assignment(peek()); - if (assignOp == EOpNull) - return true; - - // assign_op - TSourceLoc loc = token.loc; - advanceToken(); - - // conditional_expression assign_op conditional_expression ... - // Done by recursing this function, which automatically - // gets the right-to-left associativity. - TIntermTyped* rightNode = nullptr; - if (! acceptAssignmentExpression(rightNode)) { - expected("assignment expression"); - return false; - } - - node = parseContext.handleAssign(loc, assignOp, node, rightNode); - node = parseContext.handleLvalue(loc, "assign", node); - - if (node == nullptr) { - parseContext.error(loc, "could not create assignment", "", ""); - return false; - } - - if (! peekTokenClass(EHTokComma)) - return true; - - return true; -} - -// Accept a conditional expression, which associates right-to-left, -// accomplished by the "true" expression calling down to lower -// precedence levels than this level. -// -// conditional_expression -// : binary_expression -// | binary_expression QUESTION expression COLON assignment_expression -// -bool HlslGrammar::acceptConditionalExpression(TIntermTyped*& node) -{ - // binary_expression - if (! acceptBinaryExpression(node, PlLogicalOr)) - return false; - - if (! acceptTokenClass(EHTokQuestion)) - return true; - - node = parseContext.convertConditionalExpression(token.loc, node, false); - if (node == nullptr) - return false; - - ++parseContext.controlFlowNestingLevel; // this only needs to work right if no errors - - TIntermTyped* trueNode = nullptr; - if (! acceptExpression(trueNode)) { - expected("expression after ?"); - return false; - } - TSourceLoc loc = token.loc; - - if (! acceptTokenClass(EHTokColon)) { - expected(":"); - return false; - } - - TIntermTyped* falseNode = nullptr; - if (! acceptAssignmentExpression(falseNode)) { - expected("expression after :"); - return false; - } - - --parseContext.controlFlowNestingLevel; - - node = intermediate.addSelection(node, trueNode, falseNode, loc); - - return true; -} - -// Accept a binary expression, for binary operations that -// associate left-to-right. This is, it is implicit, for example -// -// ((a op b) op c) op d -// -// binary_expression -// : expression op expression op expression ... -// -// where 'expression' is the next higher level in precedence. -// -bool HlslGrammar::acceptBinaryExpression(TIntermTyped*& node, PrecedenceLevel precedenceLevel) -{ - if (precedenceLevel > PlMul) - return acceptUnaryExpression(node); - - // assignment_expression - if (! acceptBinaryExpression(node, (PrecedenceLevel)(precedenceLevel + 1))) - return false; - - do { - TOperator op = HlslOpMap::binary(peek()); - PrecedenceLevel tokenLevel = HlslOpMap::precedenceLevel(op); - if (tokenLevel < precedenceLevel) - return true; - - // ... op - TSourceLoc loc = token.loc; - advanceToken(); - - // ... expression - TIntermTyped* rightNode = nullptr; - if (! acceptBinaryExpression(rightNode, (PrecedenceLevel)(precedenceLevel + 1))) { - expected("expression"); - return false; - } - - node = intermediate.addBinaryMath(op, node, rightNode, loc); - if (node == nullptr) { - parseContext.error(loc, "Could not perform requested binary operation", "", ""); - return false; - } - } while (true); -} - -// unary_expression -// : (type) unary_expression -// | + unary_expression -// | - unary_expression -// | ! unary_expression -// | ~ unary_expression -// | ++ unary_expression -// | -- unary_expression -// | postfix_expression -// -bool HlslGrammar::acceptUnaryExpression(TIntermTyped*& node) -{ - // (type) unary_expression - // Have to look two steps ahead, because this could be, e.g., a - // postfix_expression instead, since that also starts with at "(". - if (acceptTokenClass(EHTokLeftParen)) { - TType castType; - if (acceptType(castType)) { - // recognize any array_specifier as part of the type - TArraySizes* arraySizes = nullptr; - acceptArraySpecifier(arraySizes); - if (arraySizes != nullptr) - castType.newArraySizes(*arraySizes); - TSourceLoc loc = token.loc; - if (acceptTokenClass(EHTokRightParen)) { - // We've matched "(type)" now, get the expression to cast - if (! acceptUnaryExpression(node)) - return false; - - // Hook it up like a constructor - TFunction* constructorFunction = parseContext.makeConstructorCall(loc, castType); - if (constructorFunction == nullptr) { - expected("type that can be constructed"); - return false; - } - TIntermTyped* arguments = nullptr; - parseContext.handleFunctionArgument(constructorFunction, arguments, node); - node = parseContext.handleFunctionCall(loc, constructorFunction, arguments); - - return true; - } else { - // This could be a parenthesized constructor, ala (int(3)), and we just accepted - // the '(int' part. We must back up twice. - recedeToken(); - recedeToken(); - - // Note, there are no array constructors like - // (float[2](...)) - if (arraySizes != nullptr) - parseContext.error(loc, "parenthesized array constructor not allowed", "([]())", "", ""); - } - } else { - // This isn't a type cast, but it still started "(", so if it is a - // unary expression, it can only be a postfix_expression, so try that. - // Back it up first. - recedeToken(); - return acceptPostfixExpression(node); - } - } - - // peek for "op unary_expression" - TOperator unaryOp = HlslOpMap::preUnary(peek()); - - // postfix_expression (if no unary operator) - if (unaryOp == EOpNull) - return acceptPostfixExpression(node); - - // op unary_expression - TSourceLoc loc = token.loc; - advanceToken(); - if (! acceptUnaryExpression(node)) - return false; - - // + is a no-op - if (unaryOp == EOpAdd) - return true; - - node = intermediate.addUnaryMath(unaryOp, node, loc); - - // These unary ops require lvalues - if (unaryOp == EOpPreIncrement || unaryOp == EOpPreDecrement) - node = parseContext.handleLvalue(loc, "unary operator", node); - - return node != nullptr; -} - -// postfix_expression -// : LEFT_PAREN expression RIGHT_PAREN -// | literal -// | constructor -// | IDENTIFIER [ COLONCOLON IDENTIFIER [ COLONCOLON IDENTIFIER ... ] ] -// | function_call -// | postfix_expression LEFT_BRACKET integer_expression RIGHT_BRACKET -// | postfix_expression DOT IDENTIFIER -// | postfix_expression DOT IDENTIFIER arguments -// | postfix_expression arguments -// | postfix_expression INC_OP -// | postfix_expression DEC_OP -// -bool HlslGrammar::acceptPostfixExpression(TIntermTyped*& node) -{ - // Not implemented as self-recursive: - // The logical "right recursion" is done with a loop at the end - - // idToken will pick up either a variable or a function name in a function call - HlslToken idToken; - - // Find something before the postfix operations, as they can't operate - // on nothing. So, no "return true", they fall through, only "return false". - if (acceptTokenClass(EHTokLeftParen)) { - // LEFT_PAREN expression RIGHT_PAREN - if (! acceptExpression(node)) { - expected("expression"); - return false; - } - if (! acceptTokenClass(EHTokRightParen)) { - expected(")"); - return false; - } - } else if (acceptLiteral(node)) { - // literal (nothing else to do yet) - } else if (acceptConstructor(node)) { - // constructor (nothing else to do yet) - } else if (acceptIdentifier(idToken)) { - // user-type, namespace name, variable, or function name - TString* fullName = idToken.string; - while (acceptTokenClass(EHTokColonColon)) { - // user-type or namespace name - fullName = NewPoolTString(fullName->c_str()); - fullName->append(parseContext.scopeMangler); - if (acceptIdentifier(idToken)) - fullName->append(*idToken.string); - else { - expected("identifier after ::"); - return false; - } - } - if (! peekTokenClass(EHTokLeftParen)) { - node = parseContext.handleVariable(idToken.loc, fullName); - } else if (acceptFunctionCall(idToken.loc, *fullName, node, nullptr)) { - // function_call (nothing else to do yet) - } else { - expected("function call arguments"); - return false; - } - } else { - // nothing found, can't post operate - return false; - } - - // Something was found, chain as many postfix operations as exist. - do { - TSourceLoc loc = token.loc; - TOperator postOp = HlslOpMap::postUnary(peek()); - - // Consume only a valid post-unary operator, otherwise we are done. - switch (postOp) { - case EOpIndexDirectStruct: - case EOpIndexIndirect: - case EOpPostIncrement: - case EOpPostDecrement: - case EOpScoping: - advanceToken(); - break; - default: - return true; - } - - // We have a valid post-unary operator, process it. - switch (postOp) { - case EOpScoping: - case EOpIndexDirectStruct: - { - // DOT IDENTIFIER - // includes swizzles, member variables, and member functions - HlslToken field; - if (! acceptIdentifier(field)) { - expected("swizzle or member"); - return false; - } - - if (peekTokenClass(EHTokLeftParen)) { - // member function - TIntermTyped* thisNode = node; - - // arguments - if (! acceptFunctionCall(field.loc, *field.string, node, thisNode)) { - expected("function parameters"); - return false; - } - } else - node = parseContext.handleDotDereference(field.loc, node, *field.string); - - break; - } - case EOpIndexIndirect: - { - // LEFT_BRACKET integer_expression RIGHT_BRACKET - TIntermTyped* indexNode = nullptr; - if (! acceptExpression(indexNode) || - ! peekTokenClass(EHTokRightBracket)) { - expected("expression followed by ']'"); - return false; - } - advanceToken(); - node = parseContext.handleBracketDereference(indexNode->getLoc(), node, indexNode); - if (node == nullptr) - return false; - break; - } - case EOpPostIncrement: - // INC_OP - // fall through - case EOpPostDecrement: - // DEC_OP - node = intermediate.addUnaryMath(postOp, node, loc); - node = parseContext.handleLvalue(loc, "unary operator", node); - break; - default: - assert(0); - break; - } - } while (true); -} - -// constructor -// : type argument_list -// -bool HlslGrammar::acceptConstructor(TIntermTyped*& node) -{ - // type - TType type; - if (acceptType(type)) { - TFunction* constructorFunction = parseContext.makeConstructorCall(token.loc, type); - if (constructorFunction == nullptr) - return false; - - // arguments - TIntermTyped* arguments = nullptr; - if (! acceptArguments(constructorFunction, arguments)) { - // It's possible this is a type keyword used as an identifier. Put the token back - // for later use. - recedeToken(); - return false; - } - - // hook it up - node = parseContext.handleFunctionCall(arguments->getLoc(), constructorFunction, arguments); - - return true; - } - - return false; -} - -// The function_call identifier was already recognized, and passed in as idToken. -// -// function_call -// : [idToken] arguments -// -bool HlslGrammar::acceptFunctionCall(const TSourceLoc& loc, TString& name, TIntermTyped*& node, TIntermTyped* baseObject) -{ - // name - TString* functionName = nullptr; - if (baseObject == nullptr) { - functionName = &name; - } else if (parseContext.isBuiltInMethod(loc, baseObject, name)) { - // Built-in methods are not in the symbol table as methods, but as global functions - // taking an explicit 'this' as the first argument. - functionName = NewPoolTString(BUILTIN_PREFIX); - functionName->append(name); - } else { - if (! baseObject->getType().isStruct()) { - expected("structure"); - return false; - } - functionName = NewPoolTString(""); - functionName->append(baseObject->getType().getTypeName()); - parseContext.addScopeMangler(*functionName); - functionName->append(name); - } - - // function - TFunction* function = new TFunction(functionName, TType(EbtVoid)); - - // arguments - TIntermTyped* arguments = nullptr; - if (baseObject != nullptr) { - // Non-static member functions have an implicit first argument of the base object. - parseContext.handleFunctionArgument(function, arguments, baseObject); - } - if (! acceptArguments(function, arguments)) - return false; - - // call - node = parseContext.handleFunctionCall(loc, function, arguments); - - return true; -} - -// arguments -// : LEFT_PAREN expression COMMA expression COMMA ... RIGHT_PAREN -// -// The arguments are pushed onto the 'function' argument list and -// onto the 'arguments' aggregate. -// -bool HlslGrammar::acceptArguments(TFunction* function, TIntermTyped*& arguments) -{ - // LEFT_PAREN - if (! acceptTokenClass(EHTokLeftParen)) - return false; - - // RIGHT_PAREN - if (acceptTokenClass(EHTokRightParen)) - return true; - - // must now be at least one expression... - do { - // expression - TIntermTyped* arg; - if (! acceptAssignmentExpression(arg)) - return false; - - // hook it up - parseContext.handleFunctionArgument(function, arguments, arg); - - // COMMA - if (! acceptTokenClass(EHTokComma)) - break; - } while (true); - - // RIGHT_PAREN - if (! acceptTokenClass(EHTokRightParen)) { - expected(")"); - return false; - } - - return true; -} - -bool HlslGrammar::acceptLiteral(TIntermTyped*& node) -{ - switch (token.tokenClass) { - case EHTokIntConstant: - node = intermediate.addConstantUnion(token.i, token.loc, true); - break; - case EHTokUintConstant: - node = intermediate.addConstantUnion(token.u, token.loc, true); - break; - case EHTokFloatConstant: - node = intermediate.addConstantUnion(token.d, EbtFloat, token.loc, true); - break; - case EHTokDoubleConstant: - node = intermediate.addConstantUnion(token.d, EbtDouble, token.loc, true); - break; - case EHTokBoolConstant: - node = intermediate.addConstantUnion(token.b, token.loc, true); - break; - case EHTokStringConstant: - node = intermediate.addConstantUnion(token.string, token.loc, true); - break; - - default: - return false; - } - - advanceToken(); - - return true; -} - -// simple_statement -// : SEMICOLON -// | declaration_statement -// | expression SEMICOLON -// -bool HlslGrammar::acceptSimpleStatement(TIntermNode*& statement) -{ - // SEMICOLON - if (acceptTokenClass(EHTokSemicolon)) - return true; - - // declaration - if (acceptDeclaration(statement)) - return true; - - // expression - TIntermTyped* node; - if (acceptExpression(node)) - statement = node; - else - return false; - - // SEMICOLON (following an expression) - if (acceptTokenClass(EHTokSemicolon)) - return true; - else { - expected(";"); - return false; - } -} - -// compound_statement -// : LEFT_CURLY statement statement ... RIGHT_CURLY -// -bool HlslGrammar::acceptCompoundStatement(TIntermNode*& retStatement) -{ - TIntermAggregate* compoundStatement = nullptr; - - // LEFT_CURLY - if (! acceptTokenClass(EHTokLeftBrace)) - return false; - - // statement statement ... - TIntermNode* statement = nullptr; - while (acceptStatement(statement)) { - TIntermBranch* branch = statement ? statement->getAsBranchNode() : nullptr; - if (branch != nullptr && (branch->getFlowOp() == EOpCase || - branch->getFlowOp() == EOpDefault)) { - // hook up individual subsequences within a switch statement - parseContext.wrapupSwitchSubsequence(compoundStatement, statement); - compoundStatement = nullptr; - } else { - // hook it up to the growing compound statement - compoundStatement = intermediate.growAggregate(compoundStatement, statement); - } - } - if (compoundStatement) - compoundStatement->setOperator(EOpSequence); - - retStatement = compoundStatement; - - // RIGHT_CURLY - return acceptTokenClass(EHTokRightBrace); -} - -bool HlslGrammar::acceptScopedStatement(TIntermNode*& statement) -{ - parseContext.pushScope(); - bool result = acceptStatement(statement); - parseContext.popScope(); - - return result; -} - -bool HlslGrammar::acceptScopedCompoundStatement(TIntermNode*& statement) -{ - parseContext.pushScope(); - bool result = acceptCompoundStatement(statement); - parseContext.popScope(); - - return result; -} - -// statement -// : attributes attributed_statement -// -// attributed_statement -// : compound_statement -// | simple_statement -// | selection_statement -// | switch_statement -// | case_label -// | default_label -// | iteration_statement -// | jump_statement -// -bool HlslGrammar::acceptStatement(TIntermNode*& statement) -{ - statement = nullptr; - - // attributes - TAttributeMap attributes; - acceptAttributes(attributes); - - // attributed_statement - switch (peek()) { - case EHTokLeftBrace: - return acceptScopedCompoundStatement(statement); - - case EHTokIf: - return acceptSelectionStatement(statement, attributes); - - case EHTokSwitch: - return acceptSwitchStatement(statement, attributes); - - case EHTokFor: - case EHTokDo: - case EHTokWhile: - return acceptIterationStatement(statement, attributes); - - case EHTokContinue: - case EHTokBreak: - case EHTokDiscard: - case EHTokReturn: - return acceptJumpStatement(statement); - - case EHTokCase: - return acceptCaseLabel(statement); - case EHTokDefault: - return acceptDefaultLabel(statement); - - case EHTokRightBrace: - // Performance: not strictly necessary, but stops a bunch of hunting early, - // and is how sequences of statements end. - return false; - - default: - return acceptSimpleStatement(statement); - } - - return true; -} - -// attributes -// : list of zero or more of: LEFT_BRACKET attribute RIGHT_BRACKET -// -// attribute: -// : UNROLL -// | UNROLL LEFT_PAREN literal RIGHT_PAREN -// | FASTOPT -// | ALLOW_UAV_CONDITION -// | BRANCH -// | FLATTEN -// | FORCECASE -// | CALL -// | DOMAIN -// | EARLYDEPTHSTENCIL -// | INSTANCE -// | MAXTESSFACTOR -// | OUTPUTCONTROLPOINTS -// | OUTPUTTOPOLOGY -// | PARTITIONING -// | PATCHCONSTANTFUNC -// | NUMTHREADS LEFT_PAREN x_size, y_size,z z_size RIGHT_PAREN -// -void HlslGrammar::acceptAttributes(TAttributeMap& attributes) -{ - // For now, accept the [ XXX(X) ] syntax, but drop all but - // numthreads, which is used to set the CS local size. - // TODO: subset to correct set? Pass on? - do { - HlslToken idToken; - - // LEFT_BRACKET? - if (! acceptTokenClass(EHTokLeftBracket)) - return; - - // attribute - if (acceptIdentifier(idToken)) { - // 'idToken.string' is the attribute - } else if (! peekTokenClass(EHTokRightBracket)) { - expected("identifier"); - advanceToken(); - } - - TIntermAggregate* expressions = nullptr; - - // (x, ...) - if (acceptTokenClass(EHTokLeftParen)) { - expressions = new TIntermAggregate; - - TIntermTyped* node; - bool expectingExpression = false; - - while (acceptAssignmentExpression(node)) { - expectingExpression = false; - expressions->getSequence().push_back(node); - if (acceptTokenClass(EHTokComma)) - expectingExpression = true; - } - - // 'expressions' is an aggregate with the expressions in it - if (! acceptTokenClass(EHTokRightParen)) - expected(")"); - - // Error for partial or missing expression - if (expectingExpression || expressions->getSequence().empty()) - expected("expression"); - } - - // RIGHT_BRACKET - if (!acceptTokenClass(EHTokRightBracket)) { - expected("]"); - return; - } - - // Add any values we found into the attribute map. This accepts - // (and ignores) values not mapping to a known TAttributeType; - attributes.setAttribute(idToken.string, expressions); - } while (true); -} - -// selection_statement -// : IF LEFT_PAREN expression RIGHT_PAREN statement -// : IF LEFT_PAREN expression RIGHT_PAREN statement ELSE statement -// -bool HlslGrammar::acceptSelectionStatement(TIntermNode*& statement, const TAttributeMap& attributes) -{ - TSourceLoc loc = token.loc; - - const TSelectionControl control = parseContext.handleSelectionControl(attributes); - - // IF - if (! acceptTokenClass(EHTokIf)) - return false; - - // so that something declared in the condition is scoped to the lifetimes - // of the then-else statements - parseContext.pushScope(); - - // LEFT_PAREN expression RIGHT_PAREN - TIntermTyped* condition; - if (! acceptParenExpression(condition)) - return false; - condition = parseContext.convertConditionalExpression(loc, condition); - if (condition == nullptr) - return false; - - // create the child statements - TIntermNodePair thenElse = { nullptr, nullptr }; - - ++parseContext.controlFlowNestingLevel; // this only needs to work right if no errors - - // then statement - if (! acceptScopedStatement(thenElse.node1)) { - expected("then statement"); - return false; - } - - // ELSE - if (acceptTokenClass(EHTokElse)) { - // else statement - if (! acceptScopedStatement(thenElse.node2)) { - expected("else statement"); - return false; - } - } - - // Put the pieces together - statement = intermediate.addSelection(condition, thenElse, loc, control); - parseContext.popScope(); - --parseContext.controlFlowNestingLevel; - - return true; -} - -// switch_statement -// : SWITCH LEFT_PAREN expression RIGHT_PAREN compound_statement -// -bool HlslGrammar::acceptSwitchStatement(TIntermNode*& statement, const TAttributeMap& attributes) -{ - // SWITCH - TSourceLoc loc = token.loc; - - const TSelectionControl control = parseContext.handleSelectionControl(attributes); - - if (! acceptTokenClass(EHTokSwitch)) - return false; - - // LEFT_PAREN expression RIGHT_PAREN - parseContext.pushScope(); - TIntermTyped* switchExpression; - if (! acceptParenExpression(switchExpression)) { - parseContext.popScope(); - return false; - } - - // compound_statement - parseContext.pushSwitchSequence(new TIntermSequence); - - ++parseContext.controlFlowNestingLevel; - bool statementOkay = acceptCompoundStatement(statement); - --parseContext.controlFlowNestingLevel; - - if (statementOkay) - statement = parseContext.addSwitch(loc, switchExpression, statement ? statement->getAsAggregate() : nullptr, control); - - parseContext.popSwitchSequence(); - parseContext.popScope(); - - return statementOkay; -} - -// iteration_statement -// : WHILE LEFT_PAREN condition RIGHT_PAREN statement -// | DO LEFT_BRACE statement RIGHT_BRACE WHILE LEFT_PAREN expression RIGHT_PAREN SEMICOLON -// | FOR LEFT_PAREN for_init_statement for_rest_statement RIGHT_PAREN statement -// -// Non-speculative, only call if it needs to be found; WHILE or DO or FOR already seen. -bool HlslGrammar::acceptIterationStatement(TIntermNode*& statement, const TAttributeMap& attributes) -{ - TSourceLoc loc = token.loc; - TIntermTyped* condition = nullptr; - - EHlslTokenClass loop = peek(); - assert(loop == EHTokDo || loop == EHTokFor || loop == EHTokWhile); - - // WHILE or DO or FOR - advanceToken(); - - const TLoopControl control = parseContext.handleLoopControl(attributes); - - switch (loop) { - case EHTokWhile: - // so that something declared in the condition is scoped to the lifetime - // of the while sub-statement - parseContext.pushScope(); // this only needs to work right if no errors - parseContext.nestLooping(); - ++parseContext.controlFlowNestingLevel; - - // LEFT_PAREN condition RIGHT_PAREN - if (! acceptParenExpression(condition)) - return false; - condition = parseContext.convertConditionalExpression(loc, condition); - if (condition == nullptr) - return false; - - // statement - if (! acceptScopedStatement(statement)) { - expected("while sub-statement"); - return false; - } - - parseContext.unnestLooping(); - parseContext.popScope(); - --parseContext.controlFlowNestingLevel; - - statement = intermediate.addLoop(statement, condition, nullptr, true, loc, control); - - return true; - - case EHTokDo: - parseContext.nestLooping(); // this only needs to work right if no errors - ++parseContext.controlFlowNestingLevel; - - // statement - if (! acceptScopedStatement(statement)) { - expected("do sub-statement"); - return false; - } - - // WHILE - if (! acceptTokenClass(EHTokWhile)) { - expected("while"); - return false; - } - - // LEFT_PAREN condition RIGHT_PAREN - TIntermTyped* condition; - if (! acceptParenExpression(condition)) - return false; - condition = parseContext.convertConditionalExpression(loc, condition); - if (condition == nullptr) - return false; - - if (! acceptTokenClass(EHTokSemicolon)) - expected(";"); - - parseContext.unnestLooping(); - --parseContext.controlFlowNestingLevel; - - statement = intermediate.addLoop(statement, condition, 0, false, loc, control); - - return true; - - case EHTokFor: - { - // LEFT_PAREN - if (! acceptTokenClass(EHTokLeftParen)) - expected("("); - - // so that something declared in the condition is scoped to the lifetime - // of the for sub-statement - parseContext.pushScope(); - - // initializer - TIntermNode* initNode = nullptr; - if (! acceptSimpleStatement(initNode)) - expected("for-loop initializer statement"); - - parseContext.nestLooping(); // this only needs to work right if no errors - ++parseContext.controlFlowNestingLevel; - - // condition SEMI_COLON - acceptExpression(condition); - if (! acceptTokenClass(EHTokSemicolon)) - expected(";"); - if (condition != nullptr) { - condition = parseContext.convertConditionalExpression(loc, condition); - if (condition == nullptr) - return false; - } - - // iterator SEMI_COLON - TIntermTyped* iterator = nullptr; - acceptExpression(iterator); - if (! acceptTokenClass(EHTokRightParen)) - expected(")"); - - // statement - if (! acceptScopedStatement(statement)) { - expected("for sub-statement"); - return false; - } - - statement = intermediate.addForLoop(statement, initNode, condition, iterator, true, loc, control); - - parseContext.popScope(); - parseContext.unnestLooping(); - --parseContext.controlFlowNestingLevel; - - return true; - } - - default: - return false; - } -} - -// jump_statement -// : CONTINUE SEMICOLON -// | BREAK SEMICOLON -// | DISCARD SEMICOLON -// | RETURN SEMICOLON -// | RETURN expression SEMICOLON -// -bool HlslGrammar::acceptJumpStatement(TIntermNode*& statement) -{ - EHlslTokenClass jump = peek(); - switch (jump) { - case EHTokContinue: - case EHTokBreak: - case EHTokDiscard: - case EHTokReturn: - advanceToken(); - break; - default: - // not something we handle in this function - return false; - } - - switch (jump) { - case EHTokContinue: - statement = intermediate.addBranch(EOpContinue, token.loc); - break; - case EHTokBreak: - statement = intermediate.addBranch(EOpBreak, token.loc); - break; - case EHTokDiscard: - statement = intermediate.addBranch(EOpKill, token.loc); - break; - - case EHTokReturn: - { - // expression - TIntermTyped* node; - if (acceptExpression(node)) { - // hook it up - statement = parseContext.handleReturnValue(token.loc, node); - } else - statement = intermediate.addBranch(EOpReturn, token.loc); - break; - } - - default: - assert(0); - return false; - } - - // SEMICOLON - if (! acceptTokenClass(EHTokSemicolon)) - expected(";"); - - return true; -} - -// case_label -// : CASE expression COLON -// -bool HlslGrammar::acceptCaseLabel(TIntermNode*& statement) -{ - TSourceLoc loc = token.loc; - if (! acceptTokenClass(EHTokCase)) - return false; - - TIntermTyped* expression; - if (! acceptExpression(expression)) { - expected("case expression"); - return false; - } - - if (! acceptTokenClass(EHTokColon)) { - expected(":"); - return false; - } - - statement = parseContext.intermediate.addBranch(EOpCase, expression, loc); - - return true; -} - -// default_label -// : DEFAULT COLON -// -bool HlslGrammar::acceptDefaultLabel(TIntermNode*& statement) -{ - TSourceLoc loc = token.loc; - if (! acceptTokenClass(EHTokDefault)) - return false; - - if (! acceptTokenClass(EHTokColon)) { - expected(":"); - return false; - } - - statement = parseContext.intermediate.addBranch(EOpDefault, loc); - - return true; -} - -// array_specifier -// : LEFT_BRACKET integer_expression RGHT_BRACKET ... // optional -// : LEFT_BRACKET RGHT_BRACKET // optional -// -void HlslGrammar::acceptArraySpecifier(TArraySizes*& arraySizes) -{ - arraySizes = nullptr; - - // Early-out if there aren't any array dimensions - if (!peekTokenClass(EHTokLeftBracket)) - return; - - // If we get here, we have at least one array dimension. This will track the sizes we find. - arraySizes = new TArraySizes; - - // Collect each array dimension. - while (acceptTokenClass(EHTokLeftBracket)) { - TSourceLoc loc = token.loc; - TIntermTyped* sizeExpr = nullptr; - - // Array sizing expression is optional. If omitted, array will be later sized by initializer list. - const bool hasArraySize = acceptAssignmentExpression(sizeExpr); - - if (! acceptTokenClass(EHTokRightBracket)) { - expected("]"); - return; - } - - if (hasArraySize) { - TArraySize arraySize; - parseContext.arraySizeCheck(loc, sizeExpr, arraySize); - arraySizes->addInnerSize(arraySize); - } else { - arraySizes->addInnerSize(0); // sized by initializers. - } - } -} - -// post_decls -// : COLON semantic // optional -// COLON PACKOFFSET LEFT_PAREN c[Subcomponent][.component] RIGHT_PAREN // optional -// COLON REGISTER LEFT_PAREN [shader_profile,] Type#[subcomp]opt (COMMA SPACEN)opt RIGHT_PAREN // optional -// COLON LAYOUT layout_qualifier_list -// annotations // optional -// -// Return true if any tokens were accepted. That is, -// false can be returned on successfully recognizing nothing, -// not necessarily meaning bad syntax. -// -bool HlslGrammar::acceptPostDecls(TQualifier& qualifier) -{ - bool found = false; - - do { - // COLON - if (acceptTokenClass(EHTokColon)) { - found = true; - HlslToken idToken; - if (peekTokenClass(EHTokLayout)) - acceptLayoutQualifierList(qualifier); - else if (acceptTokenClass(EHTokPackOffset)) { - // PACKOFFSET LEFT_PAREN c[Subcomponent][.component] RIGHT_PAREN - if (! acceptTokenClass(EHTokLeftParen)) { - expected("("); - return false; - } - HlslToken locationToken; - if (! acceptIdentifier(locationToken)) { - expected("c[subcomponent][.component]"); - return false; - } - HlslToken componentToken; - if (acceptTokenClass(EHTokDot)) { - if (! acceptIdentifier(componentToken)) { - expected("component"); - return false; - } - } - if (! acceptTokenClass(EHTokRightParen)) { - expected(")"); - break; - } - parseContext.handlePackOffset(locationToken.loc, qualifier, *locationToken.string, componentToken.string); - } else if (! acceptIdentifier(idToken)) { - expected("layout, semantic, packoffset, or register"); - return false; - } else if (*idToken.string == "register") { - // REGISTER LEFT_PAREN [shader_profile,] Type#[subcomp]opt (COMMA SPACEN)opt RIGHT_PAREN - // LEFT_PAREN - if (! acceptTokenClass(EHTokLeftParen)) { - expected("("); - return false; - } - HlslToken registerDesc; // for Type# - HlslToken profile; - if (! acceptIdentifier(registerDesc)) { - expected("register number description"); - return false; - } - if (registerDesc.string->size() > 1 && !isdigit((*registerDesc.string)[1]) && - acceptTokenClass(EHTokComma)) { - // Then we didn't really see the registerDesc yet, it was - // actually the profile. Adjust... - profile = registerDesc; - if (! acceptIdentifier(registerDesc)) { - expected("register number description"); - return false; - } - } - int subComponent = 0; - if (acceptTokenClass(EHTokLeftBracket)) { - // LEFT_BRACKET subcomponent RIGHT_BRACKET - if (! peekTokenClass(EHTokIntConstant)) { - expected("literal integer"); - return false; - } - subComponent = token.i; - advanceToken(); - if (! acceptTokenClass(EHTokRightBracket)) { - expected("]"); - break; - } - } - // (COMMA SPACEN)opt - HlslToken spaceDesc; - if (acceptTokenClass(EHTokComma)) { - if (! acceptIdentifier(spaceDesc)) { - expected ("space identifier"); - return false; - } - } - // RIGHT_PAREN - if (! acceptTokenClass(EHTokRightParen)) { - expected(")"); - break; - } - parseContext.handleRegister(registerDesc.loc, qualifier, profile.string, *registerDesc.string, subComponent, spaceDesc.string); - } else { - // semantic, in idToken.string - TString semanticUpperCase = *idToken.string; - std::transform(semanticUpperCase.begin(), semanticUpperCase.end(), semanticUpperCase.begin(), ::toupper); - parseContext.handleSemantic(idToken.loc, qualifier, mapSemantic(semanticUpperCase.c_str()), semanticUpperCase); - } - } else if (peekTokenClass(EHTokLeftAngle)) { - found = true; - acceptAnnotations(qualifier); - } else - break; - - } while (true); - - return found; -} - -// -// Get the stream of tokens from the scanner, but skip all syntactic/semantic -// processing. -// -bool HlslGrammar::captureBlockTokens(TVector& tokens) -{ - if (! peekTokenClass(EHTokLeftBrace)) - return false; - - int braceCount = 0; - - do { - switch (peek()) { - case EHTokLeftBrace: - ++braceCount; - break; - case EHTokRightBrace: - --braceCount; - break; - case EHTokNone: - // End of input before balance { } is bad... - return false; - default: - break; - } - - tokens.push_back(token); - advanceToken(); - } while (braceCount > 0); - - return true; -} - -// Return a string for just the types that can also be declared as an identifier. -const char* HlslGrammar::getTypeString(EHlslTokenClass tokenClass) const -{ - switch (tokenClass) { - case EHTokSample: return "sample"; - case EHTokHalf: return "half"; - case EHTokHalf1x1: return "half1x1"; - case EHTokHalf1x2: return "half1x2"; - case EHTokHalf1x3: return "half1x3"; - case EHTokHalf1x4: return "half1x4"; - case EHTokHalf2x1: return "half2x1"; - case EHTokHalf2x2: return "half2x2"; - case EHTokHalf2x3: return "half2x3"; - case EHTokHalf2x4: return "half2x4"; - case EHTokHalf3x1: return "half3x1"; - case EHTokHalf3x2: return "half3x2"; - case EHTokHalf3x3: return "half3x3"; - case EHTokHalf3x4: return "half3x4"; - case EHTokHalf4x1: return "half4x1"; - case EHTokHalf4x2: return "half4x2"; - case EHTokHalf4x3: return "half4x3"; - case EHTokHalf4x4: return "half4x4"; - case EHTokBool: return "bool"; - case EHTokFloat: return "float"; - case EHTokDouble: return "double"; - case EHTokInt: return "int"; - case EHTokUint: return "uint"; - case EHTokMin16float: return "min16float"; - case EHTokMin10float: return "min10float"; - case EHTokMin16int: return "min16int"; - case EHTokMin12int: return "min12int"; - default: - return nullptr; - } -} - -} // end namespace glslang diff --git a/deps/glslang/hlsl/hlslGrammar.h b/deps/glslang/hlsl/hlslGrammar.h deleted file mode 100644 index ded8e966..00000000 --- a/deps/glslang/hlsl/hlslGrammar.h +++ /dev/null @@ -1,139 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef HLSLGRAMMAR_H_ -#define HLSLGRAMMAR_H_ - -#include "hlslParseHelper.h" -#include "hlslOpMap.h" -#include "hlslTokenStream.h" - -namespace glslang { - - class TAttributeMap; - class TFunctionDeclarator; - - // Should just be the grammar aspect of HLSL. - // Described in more detail in hlslGrammar.cpp. - - class HlslGrammar : public HlslTokenStream { - public: - HlslGrammar(HlslScanContext& scanner, HlslParseContext& parseContext) - : HlslTokenStream(scanner), parseContext(parseContext), intermediate(parseContext.intermediate), - typeIdentifiers(false) { } - virtual ~HlslGrammar() { } - - bool parse(); - - protected: - HlslGrammar(); - HlslGrammar& operator=(const HlslGrammar&); - - void expected(const char*); - void unimplemented(const char*); - bool acceptIdentifier(HlslToken&); - bool acceptCompilationUnit(); - bool acceptDeclarationList(TIntermNode*&); - bool acceptDeclaration(TIntermNode*&); - bool acceptControlDeclaration(TIntermNode*& node); - bool acceptSamplerDeclarationDX9(TType&); - bool acceptSamplerState(); - bool acceptFullySpecifiedType(TType&); - bool acceptFullySpecifiedType(TType&, TIntermNode*& nodeList); - bool acceptQualifier(TQualifier&); - bool acceptLayoutQualifierList(TQualifier&); - bool acceptType(TType&); - bool acceptType(TType&, TIntermNode*& nodeList); - bool acceptTemplateVecMatBasicType(TBasicType&); - bool acceptVectorTemplateType(TType&); - bool acceptMatrixTemplateType(TType&); - bool acceptTessellationDeclType(TBuiltInVariable&); - bool acceptTessellationPatchTemplateType(TType&); - bool acceptStreamOutTemplateType(TType&, TLayoutGeometry&); - bool acceptOutputPrimitiveGeometry(TLayoutGeometry&); - bool acceptAnnotations(TQualifier&); - bool acceptSamplerType(TType&); - bool acceptTextureType(TType&); - bool acceptStructBufferType(TType&); - bool acceptConstantBufferType(TType&); - bool acceptStruct(TType&, TIntermNode*& nodeList); - bool acceptStructDeclarationList(TTypeList*&, TIntermNode*& nodeList, TVector&); - bool acceptMemberFunctionDefinition(TIntermNode*& nodeList, const TType&, const TString& memberName, - TFunctionDeclarator&); - bool acceptFunctionParameters(TFunction&); - bool acceptParameterDeclaration(TFunction&); - bool acceptFunctionDefinition(TFunctionDeclarator&, TIntermNode*& nodeList, TVector* deferredTokens); - bool acceptFunctionBody(TFunctionDeclarator& declarator, TIntermNode*& nodeList); - bool acceptParenExpression(TIntermTyped*&); - bool acceptExpression(TIntermTyped*&); - bool acceptInitializer(TIntermTyped*&); - bool acceptAssignmentExpression(TIntermTyped*&); - bool acceptConditionalExpression(TIntermTyped*&); - bool acceptBinaryExpression(TIntermTyped*&, PrecedenceLevel); - bool acceptUnaryExpression(TIntermTyped*&); - bool acceptPostfixExpression(TIntermTyped*&); - bool acceptConstructor(TIntermTyped*&); - bool acceptFunctionCall(const TSourceLoc&, TString& name, TIntermTyped*&, TIntermTyped* objectBase); - bool acceptArguments(TFunction*, TIntermTyped*&); - bool acceptLiteral(TIntermTyped*&); - bool acceptSimpleStatement(TIntermNode*&); - bool acceptCompoundStatement(TIntermNode*&); - bool acceptScopedStatement(TIntermNode*&); - bool acceptScopedCompoundStatement(TIntermNode*&); - bool acceptStatement(TIntermNode*&); - bool acceptNestedStatement(TIntermNode*&); - void acceptAttributes(TAttributeMap&); - bool acceptSelectionStatement(TIntermNode*&, const TAttributeMap&); - bool acceptSwitchStatement(TIntermNode*&, const TAttributeMap&); - bool acceptIterationStatement(TIntermNode*&, const TAttributeMap&); - bool acceptJumpStatement(TIntermNode*&); - bool acceptCaseLabel(TIntermNode*&); - bool acceptDefaultLabel(TIntermNode*&); - void acceptArraySpecifier(TArraySizes*&); - bool acceptPostDecls(TQualifier&); - bool acceptDefaultParameterDeclaration(const TType&, TIntermTyped*&); - - bool captureBlockTokens(TVector& tokens); - const char* getTypeString(EHlslTokenClass tokenClass) const; - - HlslParseContext& parseContext; // state of parsing and helper functions for building the intermediate - TIntermediate& intermediate; // the final product, the intermediate representation, includes the AST - bool typeIdentifiers; // shader uses some types as identifiers - }; - -} // end namespace glslang - -#endif // HLSLGRAMMAR_H_ diff --git a/deps/glslang/hlsl/hlslOpMap.cpp b/deps/glslang/hlsl/hlslOpMap.cpp deleted file mode 100644 index ebe6fbd9..00000000 --- a/deps/glslang/hlsl/hlslOpMap.cpp +++ /dev/null @@ -1,173 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// Map from physical token form (e.g. '-') to logical operator -// form (e.g., binary subtract or unary negate). - -#include "hlslOpMap.h" - -namespace glslang { - -// Map parsing tokens that could be assignments into assignment operators. -TOperator HlslOpMap::assignment(EHlslTokenClass op) -{ - switch (op) { - case EHTokAssign: return EOpAssign; - case EHTokMulAssign: return EOpMulAssign; - case EHTokDivAssign: return EOpDivAssign; - case EHTokAddAssign: return EOpAddAssign; - case EHTokModAssign: return EOpModAssign; - case EHTokLeftAssign: return EOpLeftShiftAssign; - case EHTokRightAssign: return EOpRightShiftAssign; - case EHTokAndAssign: return EOpAndAssign; - case EHTokXorAssign: return EOpExclusiveOrAssign; - case EHTokOrAssign: return EOpInclusiveOrAssign; - case EHTokSubAssign: return EOpSubAssign; - - default: - return EOpNull; - } -} - -// Map parsing tokens that could be binary operations into binary operators. -TOperator HlslOpMap::binary(EHlslTokenClass op) -{ - switch (op) { - case EHTokPlus: return EOpAdd; - case EHTokDash: return EOpSub; - case EHTokStar: return EOpMul; - case EHTokSlash: return EOpDiv; - case EHTokPercent: return EOpMod; - case EHTokRightOp: return EOpRightShift; - case EHTokLeftOp: return EOpLeftShift; - case EHTokAmpersand: return EOpAnd; - case EHTokVerticalBar: return EOpInclusiveOr; - case EHTokCaret: return EOpExclusiveOr; - case EHTokEqOp: return EOpEqual; - case EHTokNeOp: return EOpNotEqual; - case EHTokLeftAngle: return EOpLessThan; - case EHTokRightAngle: return EOpGreaterThan; - case EHTokLeOp: return EOpLessThanEqual; - case EHTokGeOp: return EOpGreaterThanEqual; - case EHTokOrOp: return EOpLogicalOr; - case EHTokXorOp: return EOpLogicalXor; - case EHTokAndOp: return EOpLogicalAnd; - - default: - return EOpNull; - } -} - -// Map parsing tokens that could be unary operations into unary operators. -// These are just the ones that can appear in front of its operand. -TOperator HlslOpMap::preUnary(EHlslTokenClass op) -{ - switch (op) { - case EHTokPlus: return EOpAdd; // means no-op, but still a unary op was present - case EHTokDash: return EOpNegative; - case EHTokBang: return EOpLogicalNot; - case EHTokTilde: return EOpBitwiseNot; - - case EHTokIncOp: return EOpPreIncrement; - case EHTokDecOp: return EOpPreDecrement; - - default: return EOpNull; // means not a pre-unary op - } -} - -// Map parsing tokens that could be unary operations into unary operators. -// These are just the ones that can appear behind its operand. -TOperator HlslOpMap::postUnary(EHlslTokenClass op) -{ - switch (op) { - case EHTokDot: return EOpIndexDirectStruct; - case EHTokLeftBracket: return EOpIndexIndirect; - - case EHTokIncOp: return EOpPostIncrement; - case EHTokDecOp: return EOpPostDecrement; - - case EHTokColonColon: return EOpScoping; - - default: return EOpNull; // means not a post-unary op - } -} - -// Map operators into their level of precedence. -PrecedenceLevel HlslOpMap::precedenceLevel(TOperator op) -{ - switch (op) { - case EOpLogicalOr: - return PlLogicalOr; - case EOpLogicalXor: - return PlLogicalXor; - case EOpLogicalAnd: - return PlLogicalAnd; - - case EOpInclusiveOr: - return PlBitwiseOr; - case EOpExclusiveOr: - return PlBitwiseXor; - case EOpAnd: - return PlBitwiseAnd; - - case EOpEqual: - case EOpNotEqual: - return PlEquality; - - case EOpLessThan: - case EOpGreaterThan: - case EOpLessThanEqual: - case EOpGreaterThanEqual: - return PlRelational; - - case EOpRightShift: - case EOpLeftShift: - return PlShift; - - case EOpAdd: - case EOpSub: - return PlAdd; - - case EOpMul: - case EOpDiv: - case EOpMod: - return PlMul; - - default: - return PlBad; - } -} - -} // end namespace glslang diff --git a/deps/glslang/hlsl/hlslOpMap.h b/deps/glslang/hlsl/hlslOpMap.h deleted file mode 100644 index 4e783f3f..00000000 --- a/deps/glslang/hlsl/hlslOpMap.h +++ /dev/null @@ -1,69 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef HLSLOPMAP_H_ -#define HLSLOPMAP_H_ - -#include "hlslScanContext.h" - -namespace glslang { - - enum PrecedenceLevel { - PlBad, - PlLogicalOr, - PlLogicalXor, - PlLogicalAnd, - PlBitwiseOr, - PlBitwiseXor, - PlBitwiseAnd, - PlEquality, - PlRelational, - PlShift, - PlAdd, - PlMul - }; - - class HlslOpMap { - public: - static TOperator assignment(EHlslTokenClass op); - static TOperator binary(EHlslTokenClass op); - static TOperator preUnary(EHlslTokenClass op); - static TOperator postUnary(EHlslTokenClass op); - static PrecedenceLevel precedenceLevel(TOperator); - }; - -} // end namespace glslang - -#endif // HLSLOPMAP_H_ diff --git a/deps/glslang/hlsl/hlslParseHelper.cpp b/deps/glslang/hlsl/hlslParseHelper.cpp deleted file mode 100644 index 929bf419..00000000 --- a/deps/glslang/hlsl/hlslParseHelper.cpp +++ /dev/null @@ -1,9390 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "hlslParseHelper.h" -#include "hlslScanContext.h" -#include "hlslGrammar.h" -#include "hlslAttributes.h" - -#include "../glslang/MachineIndependent/Scan.h" -#include "../glslang/MachineIndependent/preprocessor/PpContext.h" - -#include "../glslang/OSDependent/osinclude.h" - -#include -#include -#include -#include -#include - -namespace glslang { - -HlslParseContext::HlslParseContext(TSymbolTable& symbolTable, TIntermediate& interm, bool parsingBuiltins, - int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, - TInfoSink& infoSink, - const TString sourceEntryPointName, - bool forwardCompatible, EShMessages messages) : - TParseContextBase(symbolTable, interm, parsingBuiltins, version, profile, spvVersion, language, infoSink, - forwardCompatible, messages), - annotationNestingLevel(0), - inputPatch(nullptr), - nextInLocation(0), nextOutLocation(0), - sourceEntryPointName(sourceEntryPointName), - entryPointFunction(nullptr), - entryPointFunctionBody(nullptr), - gsStreamOutput(nullptr), - clipDistanceVariable(nullptr), - cullDistanceVariable(nullptr) -{ - globalUniformDefaults.clear(); - globalUniformDefaults.layoutMatrix = ElmRowMajor; - globalUniformDefaults.layoutPacking = ElpStd140; - - globalBufferDefaults.clear(); - globalBufferDefaults.layoutMatrix = ElmRowMajor; - globalBufferDefaults.layoutPacking = ElpStd430; - - globalInputDefaults.clear(); - globalOutputDefaults.clear(); - - clipSemanticNSize.fill(0); - cullSemanticNSize.fill(0); - - // "Shaders in the transform - // feedback capturing mode have an initial global default of - // layout(xfb_buffer = 0) out;" - if (language == EShLangVertex || - language == EShLangTessControl || - language == EShLangTessEvaluation || - language == EShLangGeometry) - globalOutputDefaults.layoutXfbBuffer = 0; - - if (language == EShLangGeometry) - globalOutputDefaults.layoutStream = 0; - - if (spvVersion.spv == 0 || spvVersion.vulkan == 0) - infoSink.info << "ERROR: HLSL currently only supported when requesting SPIR-V for Vulkan.\n"; -} - -HlslParseContext::~HlslParseContext() -{ -} - -void HlslParseContext::initializeExtensionBehavior() -{ - TParseContextBase::initializeExtensionBehavior(); - - // HLSL allows #line by default. - extensionBehavior[E_GL_GOOGLE_cpp_style_line_directive] = EBhEnable; -} - -void HlslParseContext::setLimits(const TBuiltInResource& r) -{ - resources = r; - intermediate.setLimits(resources); -} - -// -// Parse an array of strings using the parser in HlslRules. -// -// Returns true for successful acceptance of the shader, false if any errors. -// -bool HlslParseContext::parseShaderStrings(TPpContext& ppContext, TInputScanner& input, bool versionWillBeError) -{ - currentScanner = &input; - ppContext.setInput(input, versionWillBeError); - - HlslScanContext scanContext(*this, ppContext); - HlslGrammar grammar(scanContext, *this); - if (!grammar.parse()) { - // Print a message formated such that if you click on the message it will take you right to - // the line through most UIs. - const glslang::TSourceLoc& sourceLoc = input.getSourceLoc(); - infoSink.info << sourceLoc.name << "(" << sourceLoc.line << "): error at column " << sourceLoc.column - << ", HLSL parsing failed.\n"; - ++numErrors; - return false; - } - - finish(); - - return numErrors == 0; -} - -// -// Return true if this l-value node should be converted in some manner. -// For instance: turning a load aggregate into a store in an l-value. -// -bool HlslParseContext::shouldConvertLValue(const TIntermNode* node) const -{ - if (node == nullptr || node->getAsTyped() == nullptr) - return false; - - const TIntermAggregate* lhsAsAggregate = node->getAsAggregate(); - const TIntermBinary* lhsAsBinary = node->getAsBinaryNode(); - - // If it's a swizzled/indexed aggregate, look at the left node instead. - if (lhsAsBinary != nullptr && - (lhsAsBinary->getOp() == EOpVectorSwizzle || lhsAsBinary->getOp() == EOpIndexDirect)) - lhsAsAggregate = lhsAsBinary->getLeft()->getAsAggregate(); - if (lhsAsAggregate != nullptr && lhsAsAggregate->getOp() == EOpImageLoad) - return true; - - // If it's a syntactic write to a sampler, we will use that to establish - // a compile-time alias. - if (node->getAsTyped()->getBasicType() == EbtSampler) - return true; - - return false; -} - -void HlslParseContext::growGlobalUniformBlock(const TSourceLoc& loc, TType& memberType, const TString& memberName, - TTypeList* newTypeList) -{ - newTypeList = nullptr; - correctUniform(memberType.getQualifier()); - if (memberType.isStruct()) { - auto it = ioTypeMap.find(memberType.getStruct()); - if (it != ioTypeMap.end() && it->second.uniform) - newTypeList = it->second.uniform; - } - TParseContextBase::growGlobalUniformBlock(loc, memberType, memberName, newTypeList); -} - -// -// Return a TLayoutFormat corresponding to the given texture type. -// -TLayoutFormat HlslParseContext::getLayoutFromTxType(const TSourceLoc& loc, const TType& txType) -{ - if (txType.isStruct()) { - // TODO: implement. - error(loc, "unimplemented: structure type in image or buffer", "", ""); - return ElfNone; - } - - const int components = txType.getVectorSize(); - const TBasicType txBasicType = txType.getBasicType(); - - const auto selectFormat = [this,&components](TLayoutFormat v1, TLayoutFormat v2, TLayoutFormat v4) -> TLayoutFormat { - if (intermediate.getNoStorageFormat()) - return ElfNone; - - return components == 1 ? v1 : - components == 2 ? v2 : v4; - }; - - switch (txBasicType) { - case EbtFloat: return selectFormat(ElfR32f, ElfRg32f, ElfRgba32f); - case EbtInt: return selectFormat(ElfR32i, ElfRg32i, ElfRgba32i); - case EbtUint: return selectFormat(ElfR32ui, ElfRg32ui, ElfRgba32ui); - default: - error(loc, "unknown basic type in image format", "", ""); - return ElfNone; - } -} - -// -// Both test and if necessary, spit out an error, to see if the node is really -// an l-value that can be operated on this way. -// -// Returns true if there was an error. -// -bool HlslParseContext::lValueErrorCheck(const TSourceLoc& loc, const char* op, TIntermTyped* node) -{ - if (shouldConvertLValue(node)) { - // if we're writing to a texture, it must be an RW form. - - TIntermAggregate* lhsAsAggregate = node->getAsAggregate(); - TIntermTyped* object = lhsAsAggregate->getSequence()[0]->getAsTyped(); - - if (!object->getType().getSampler().isImage()) { - error(loc, "operator[] on a non-RW texture must be an r-value", "", ""); - return true; - } - } - - // Let the base class check errors - return TParseContextBase::lValueErrorCheck(loc, op, node); -} - -// -// This function handles l-value conversions and verifications. It uses, but is not synonymous -// with lValueErrorCheck. That function accepts an l-value directly, while this one must be -// given the surrounding tree - e.g, with an assignment, so we can convert the assign into a -// series of other image operations. -// -// Most things are passed through unmodified, except for error checking. -// -TIntermTyped* HlslParseContext::handleLvalue(const TSourceLoc& loc, const char* op, TIntermTyped*& node) -{ - if (node == nullptr) - return nullptr; - - TIntermBinary* nodeAsBinary = node->getAsBinaryNode(); - TIntermUnary* nodeAsUnary = node->getAsUnaryNode(); - TIntermAggregate* sequence = nullptr; - - TIntermTyped* lhs = nodeAsUnary ? nodeAsUnary->getOperand() : - nodeAsBinary ? nodeAsBinary->getLeft() : - nullptr; - - // Early bail out if there is no conversion to apply - if (!shouldConvertLValue(lhs)) { - if (lhs != nullptr) - if (lValueErrorCheck(loc, op, lhs)) - return nullptr; - return node; - } - - // *** If we get here, we're going to apply some conversion to an l-value. - - // Spin off sampler aliasing - if (node->getAsTyped()->getBasicType() == EbtSampler) - return handleSamplerLvalue(loc, op, node); - - // Helper to create a load. - const auto makeLoad = [&](TIntermSymbol* rhsTmp, TIntermTyped* object, TIntermTyped* coord, const TType& derefType) { - TIntermAggregate* loadOp = new TIntermAggregate(EOpImageLoad); - loadOp->setLoc(loc); - loadOp->getSequence().push_back(object); - loadOp->getSequence().push_back(intermediate.addSymbol(*coord->getAsSymbolNode())); - loadOp->setType(derefType); - - sequence = intermediate.growAggregate(sequence, - intermediate.addAssign(EOpAssign, rhsTmp, loadOp, loc), - loc); - }; - - // Helper to create a store. - const auto makeStore = [&](TIntermTyped* object, TIntermTyped* coord, TIntermSymbol* rhsTmp) { - TIntermAggregate* storeOp = new TIntermAggregate(EOpImageStore); - storeOp->getSequence().push_back(object); - storeOp->getSequence().push_back(coord); - storeOp->getSequence().push_back(intermediate.addSymbol(*rhsTmp)); - storeOp->setLoc(loc); - storeOp->setType(TType(EbtVoid)); - - sequence = intermediate.growAggregate(sequence, storeOp); - }; - - // Helper to create an assign. - const auto makeBinary = [&](TOperator op, TIntermTyped* lhs, TIntermTyped* rhs) { - sequence = intermediate.growAggregate(sequence, - intermediate.addBinaryNode(op, lhs, rhs, loc, lhs->getType()), - loc); - }; - - // Helper to complete sequence by adding trailing variable, so we evaluate to the right value. - const auto finishSequence = [&](TIntermSymbol* rhsTmp, const TType& derefType) -> TIntermAggregate* { - // Add a trailing use of the temp, so the sequence returns the proper value. - sequence = intermediate.growAggregate(sequence, intermediate.addSymbol(*rhsTmp)); - sequence->setOperator(EOpSequence); - sequence->setLoc(loc); - sequence->setType(derefType); - - return sequence; - }; - - // Helper to add unary op - const auto makeUnary = [&](TOperator op, TIntermSymbol* rhsTmp) { - sequence = intermediate.growAggregate(sequence, - intermediate.addUnaryNode(op, intermediate.addSymbol(*rhsTmp), loc, - rhsTmp->getType()), - loc); - }; - - // Return true if swizzle or index writes all components of the given variable. - const auto writesAllComponents = [&](TIntermSymbol* var, TIntermBinary* swizzle) -> bool { - if (swizzle == nullptr) // not a swizzle or index - return true; - - // Track which components are being set. - std::array compIsSet; - compIsSet.fill(false); - - const TIntermConstantUnion* asConst = swizzle->getRight()->getAsConstantUnion(); - const TIntermAggregate* asAggregate = swizzle->getRight()->getAsAggregate(); - - // This could be either a direct index, or a swizzle. - if (asConst) { - compIsSet[asConst->getConstArray()[0].getIConst()] = true; - } else if (asAggregate) { - const TIntermSequence& seq = asAggregate->getSequence(); - for (int comp=0; compgetAsConstantUnion()->getConstArray()[0].getIConst()] = true; - } else { - assert(0); - } - - // Return true if all components are being set by the index or swizzle - return std::all_of(compIsSet.begin(), compIsSet.begin() + var->getType().getVectorSize(), - [](bool isSet) { return isSet; } ); - }; - - // Create swizzle matching input swizzle - const auto addSwizzle = [&](TIntermSymbol* var, TIntermBinary* swizzle) -> TIntermTyped* { - if (swizzle) - return intermediate.addBinaryNode(swizzle->getOp(), var, swizzle->getRight(), loc, swizzle->getType()); - else - return var; - }; - - TIntermBinary* lhsAsBinary = lhs->getAsBinaryNode(); - TIntermAggregate* lhsAsAggregate = lhs->getAsAggregate(); - bool lhsIsSwizzle = false; - - // If it's a swizzled L-value, remember the swizzle, and use the LHS. - if (lhsAsBinary != nullptr && (lhsAsBinary->getOp() == EOpVectorSwizzle || lhsAsBinary->getOp() == EOpIndexDirect)) { - lhsAsAggregate = lhsAsBinary->getLeft()->getAsAggregate(); - lhsIsSwizzle = true; - } - - TIntermTyped* object = lhsAsAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* coord = lhsAsAggregate->getSequence()[1]->getAsTyped(); - - const TSampler& texSampler = object->getType().getSampler(); - - TType objDerefType; - getTextureReturnType(texSampler, objDerefType); - - if (nodeAsBinary) { - TIntermTyped* rhs = nodeAsBinary->getRight(); - const TOperator assignOp = nodeAsBinary->getOp(); - - bool isModifyOp = false; - - switch (assignOp) { - case EOpAddAssign: - case EOpSubAssign: - case EOpMulAssign: - case EOpVectorTimesMatrixAssign: - case EOpVectorTimesScalarAssign: - case EOpMatrixTimesScalarAssign: - case EOpMatrixTimesMatrixAssign: - case EOpDivAssign: - case EOpModAssign: - case EOpAndAssign: - case EOpInclusiveOrAssign: - case EOpExclusiveOrAssign: - case EOpLeftShiftAssign: - case EOpRightShiftAssign: - isModifyOp = true; - // fall through... - case EOpAssign: - { - // Since this is an lvalue, we'll convert an image load to a sequence like this - // (to still provide the value): - // OpSequence - // OpImageStore(object, lhs, rhs) - // rhs - // But if it's not a simple symbol RHS (say, a fn call), we don't want to duplicate the RHS, - // so we'll convert instead to this: - // OpSequence - // rhsTmp = rhs - // OpImageStore(object, coord, rhsTmp) - // rhsTmp - // If this is a read-modify-write op, like +=, we issue: - // OpSequence - // coordtmp = load's param1 - // rhsTmp = OpImageLoad(object, coordTmp) - // rhsTmp op= rhs - // OpImageStore(object, coordTmp, rhsTmp) - // rhsTmp - // - // If the lvalue is swizzled, we apply that when writing the temp variable, like so: - // ... - // rhsTmp.some_swizzle = ... - // For partial writes, an error is generated. - - TIntermSymbol* rhsTmp = rhs->getAsSymbolNode(); - TIntermTyped* coordTmp = coord; - - if (rhsTmp == nullptr || isModifyOp || lhsIsSwizzle) { - rhsTmp = makeInternalVariableNode(loc, "storeTemp", objDerefType); - - // Partial updates not yet supported - if (!writesAllComponents(rhsTmp, lhsAsBinary)) { - error(loc, "unimplemented: partial image updates", "", ""); - } - - // Assign storeTemp = rhs - if (isModifyOp) { - // We have to make a temp var for the coordinate, to avoid evaluating it twice. - coordTmp = makeInternalVariableNode(loc, "coordTemp", coord->getType()); - makeBinary(EOpAssign, coordTmp, coord); // coordtmp = load[param1] - makeLoad(rhsTmp, object, coordTmp, objDerefType); // rhsTmp = OpImageLoad(object, coordTmp) - } - - // rhsTmp op= rhs. - makeBinary(assignOp, addSwizzle(intermediate.addSymbol(*rhsTmp), lhsAsBinary), rhs); - } - - makeStore(object, coordTmp, rhsTmp); // add a store - return finishSequence(rhsTmp, objDerefType); // return rhsTmp from sequence - } - - default: - break; - } - } - - if (nodeAsUnary) { - const TOperator assignOp = nodeAsUnary->getOp(); - - switch (assignOp) { - case EOpPreIncrement: - case EOpPreDecrement: - { - // We turn this into: - // OpSequence - // coordtmp = load's param1 - // rhsTmp = OpImageLoad(object, coordTmp) - // rhsTmp op - // OpImageStore(object, coordTmp, rhsTmp) - // rhsTmp - - TIntermSymbol* rhsTmp = makeInternalVariableNode(loc, "storeTemp", objDerefType); - TIntermTyped* coordTmp = makeInternalVariableNode(loc, "coordTemp", coord->getType()); - - makeBinary(EOpAssign, coordTmp, coord); // coordtmp = load[param1] - makeLoad(rhsTmp, object, coordTmp, objDerefType); // rhsTmp = OpImageLoad(object, coordTmp) - makeUnary(assignOp, rhsTmp); // op rhsTmp - makeStore(object, coordTmp, rhsTmp); // OpImageStore(object, coordTmp, rhsTmp) - return finishSequence(rhsTmp, objDerefType); // return rhsTmp from sequence - } - - case EOpPostIncrement: - case EOpPostDecrement: - { - // We turn this into: - // OpSequence - // coordtmp = load's param1 - // rhsTmp1 = OpImageLoad(object, coordTmp) - // rhsTmp2 = rhsTmp1 - // rhsTmp2 op - // OpImageStore(object, coordTmp, rhsTmp2) - // rhsTmp1 (pre-op value) - TIntermSymbol* rhsTmp1 = makeInternalVariableNode(loc, "storeTempPre", objDerefType); - TIntermSymbol* rhsTmp2 = makeInternalVariableNode(loc, "storeTempPost", objDerefType); - TIntermTyped* coordTmp = makeInternalVariableNode(loc, "coordTemp", coord->getType()); - - makeBinary(EOpAssign, coordTmp, coord); // coordtmp = load[param1] - makeLoad(rhsTmp1, object, coordTmp, objDerefType); // rhsTmp1 = OpImageLoad(object, coordTmp) - makeBinary(EOpAssign, rhsTmp2, rhsTmp1); // rhsTmp2 = rhsTmp1 - makeUnary(assignOp, rhsTmp2); // rhsTmp op - makeStore(object, coordTmp, rhsTmp2); // OpImageStore(object, coordTmp, rhsTmp2) - return finishSequence(rhsTmp1, objDerefType); // return rhsTmp from sequence - } - - default: - break; - } - } - - if (lhs) - if (lValueErrorCheck(loc, op, lhs)) - return nullptr; - - return node; -} - -// Deal with sampler aliasing: turning assignments into aliases -// Return a placeholder node for higher-level code that think assignments must -// generate code. -TIntermTyped* HlslParseContext::handleSamplerLvalue(const TSourceLoc& loc, const char* op, TIntermTyped*& node) -{ - // Can only alias an assignment: "s1 = s2" - TIntermBinary* binary = node->getAsBinaryNode(); - if (binary == nullptr || node->getAsOperator()->getOp() != EOpAssign || - binary->getLeft()->getAsSymbolNode() == nullptr || - binary->getRight()->getAsSymbolNode() == nullptr) { - error(loc, "can't modify sampler", op, ""); - return node; - } - - if (controlFlowNestingLevel > 0) - warn(loc, "sampler or image aliased under control flow; consumption must be in same path", op, ""); - - TIntermTyped* set = setOpaqueLvalue(binary->getLeft(), binary->getRight()); - if (set == nullptr) - warn(loc, "could not create alias for sampler", op, ""); - else - node = set; - - return node; -} - -// Do an opaque assignment that needs to turn into an alias. -// Return nullptr if it can't be done, otherwise return a placeholder -// node for higher-level code that think assignments must generate code. -TIntermTyped* HlslParseContext::setOpaqueLvalue(TIntermTyped* leftTyped, TIntermTyped* rightTyped) -{ - // Best is if we are aliasing a flattened struct member "S.s1 = s2", - // in which case we want to update the flattening information with the alias, - // making everything else work seamlessly. - TIntermSymbol* left = leftTyped->getAsSymbolNode(); - TIntermSymbol* right = rightTyped->getAsSymbolNode(); - for (auto fit = flattenMap.begin(); fit != flattenMap.end(); ++fit) { - for (auto mit = fit->second.members.begin(); mit != fit->second.members.end(); ++mit) { - if ((*mit)->getUniqueId() == left->getId()) { - // found it: update with alias to the existing variable, and don't emit any code - (*mit) = new TVariable(&right->getName(), right->getType()); - (*mit)->setUniqueId(right->getId()); - // replace node (rest of compiler expects either an error or code to generate) - // with pointless access - return right; - } - } - } - - return nullptr; -} - -void HlslParseContext::handlePragma(const TSourceLoc& loc, const TVector& tokens) -{ - if (pragmaCallback) - pragmaCallback(loc.line, tokens); - - if (tokens.size() == 0) - return; - - // These pragmas are case insensitive in HLSL, so we'll compare in lower case. - TVector lowerTokens = tokens; - - for (auto it = lowerTokens.begin(); it != lowerTokens.end(); ++it) - std::transform(it->begin(), it->end(), it->begin(), ::tolower); - - // Handle pack_matrix - if (tokens.size() == 4 && lowerTokens[0] == "pack_matrix" && tokens[1] == "(" && tokens[3] == ")") { - // Note that HLSL semantic order is Mrc, not Mcr like SPIR-V, so we reverse the sense. - // Row major becomes column major and vice versa. - - if (lowerTokens[2] == "row_major") { - globalUniformDefaults.layoutMatrix = globalBufferDefaults.layoutMatrix = ElmColumnMajor; - } else if (lowerTokens[2] == "column_major") { - globalUniformDefaults.layoutMatrix = globalBufferDefaults.layoutMatrix = ElmRowMajor; - } else { - // unknown majorness strings are treated as (HLSL column major)==(SPIR-V row major) - warn(loc, "unknown pack_matrix pragma value", tokens[2].c_str(), ""); - globalUniformDefaults.layoutMatrix = globalBufferDefaults.layoutMatrix = ElmRowMajor; - } - } -} - -// -// Look at a '.' matrix selector string and change it into components -// for a matrix. There are two types: -// -// _21 second row, first column (one based) -// _m21 third row, second column (zero based) -// -// Returns true if there is no error. -// -bool HlslParseContext::parseMatrixSwizzleSelector(const TSourceLoc& loc, const TString& fields, int cols, int rows, - TSwizzleSelectors& components) -{ - int startPos[MaxSwizzleSelectors]; - int numComps = 0; - TString compString = fields; - - // Find where each component starts, - // recording the first character position after the '_'. - for (size_t c = 0; c < compString.size(); ++c) { - if (compString[c] == '_') { - if (numComps >= MaxSwizzleSelectors) { - error(loc, "matrix component swizzle has too many components", compString.c_str(), ""); - return false; - } - if (c > compString.size() - 3 || - ((compString[c+1] == 'm' || compString[c+1] == 'M') && c > compString.size() - 4)) { - error(loc, "matrix component swizzle missing", compString.c_str(), ""); - return false; - } - startPos[numComps++] = (int)c + 1; - } - } - - // Process each component - for (int i = 0; i < numComps; ++i) { - int pos = startPos[i]; - int bias = -1; - if (compString[pos] == 'm' || compString[pos] == 'M') { - bias = 0; - ++pos; - } - TMatrixSelector comp; - comp.coord1 = compString[pos+0] - '0' + bias; - comp.coord2 = compString[pos+1] - '0' + bias; - if (comp.coord1 < 0 || comp.coord1 >= cols) { - error(loc, "matrix row component out of range", compString.c_str(), ""); - return false; - } - if (comp.coord2 < 0 || comp.coord2 >= rows) { - error(loc, "matrix column component out of range", compString.c_str(), ""); - return false; - } - components.push_back(comp); - } - - return true; -} - -// If the 'comps' express a column of a matrix, -// return the column. Column means the first coords all match. -// -// Otherwise, return -1. -// -int HlslParseContext::getMatrixComponentsColumn(int rows, const TSwizzleSelectors& selector) -{ - int col = -1; - - // right number of comps? - if (selector.size() != rows) - return -1; - - // all comps in the same column? - // rows in order? - col = selector[0].coord1; - for (int i = 0; i < rows; ++i) { - if (col != selector[i].coord1) - return -1; - if (i != selector[i].coord2) - return -1; - } - - return col; -} - -// -// Handle seeing a variable identifier in the grammar. -// -TIntermTyped* HlslParseContext::handleVariable(const TSourceLoc& loc, const TString* string) -{ - int thisDepth; - TSymbol* symbol = symbolTable.find(*string, thisDepth); - if (symbol && symbol->getAsVariable() && symbol->getAsVariable()->isUserType()) { - error(loc, "expected symbol, not user-defined type", string->c_str(), ""); - return nullptr; - } - - // Error check for requiring specific extensions present. - if (symbol && symbol->getNumExtensions()) - requireExtensions(loc, symbol->getNumExtensions(), symbol->getExtensions(), symbol->getName().c_str()); - - const TVariable* variable = nullptr; - const TAnonMember* anon = symbol ? symbol->getAsAnonMember() : nullptr; - TIntermTyped* node = nullptr; - if (anon) { - // It was a member of an anonymous container, which could be a 'this' structure. - - // Create a subtree for its dereference. - if (thisDepth > 0) { - variable = getImplicitThis(thisDepth); - if (variable == nullptr) - error(loc, "cannot access member variables (static member function?)", "this", ""); - } - if (variable == nullptr) - variable = anon->getAnonContainer().getAsVariable(); - - TIntermTyped* container = intermediate.addSymbol(*variable, loc); - TIntermTyped* constNode = intermediate.addConstantUnion(anon->getMemberNumber(), loc); - node = intermediate.addIndex(EOpIndexDirectStruct, container, constNode, loc); - - node->setType(*(*variable->getType().getStruct())[anon->getMemberNumber()].type); - if (node->getType().hiddenMember()) - error(loc, "member of nameless block was not redeclared", string->c_str(), ""); - } else { - // Not a member of an anonymous container. - - // The symbol table search was done in the lexical phase. - // See if it was a variable. - variable = symbol ? symbol->getAsVariable() : nullptr; - if (variable) { - if ((variable->getType().getBasicType() == EbtBlock || - variable->getType().getBasicType() == EbtStruct) && variable->getType().getStruct() == nullptr) { - error(loc, "cannot be used (maybe an instance name is needed)", string->c_str(), ""); - variable = nullptr; - } - } else { - if (symbol) - error(loc, "variable name expected", string->c_str(), ""); - } - - // Recovery, if it wasn't found or was not a variable. - if (variable == nullptr) { - error(loc, "unknown variable", string->c_str(), ""); - variable = new TVariable(string, TType(EbtVoid)); - } - - if (variable->getType().getQualifier().isFrontEndConstant()) - node = intermediate.addConstantUnion(variable->getConstArray(), variable->getType(), loc); - else - node = intermediate.addSymbol(*variable, loc); - } - - if (variable->getType().getQualifier().isIo()) - intermediate.addIoAccessed(*string); - - return node; -} - -// -// Handle operator[] on any objects it applies to. Currently: -// Textures -// Buffers -// -TIntermTyped* HlslParseContext::handleBracketOperator(const TSourceLoc& loc, TIntermTyped* base, TIntermTyped* index) -{ - // handle r-value operator[] on textures and images. l-values will be processed later. - if (base->getType().getBasicType() == EbtSampler && !base->isArray()) { - const TSampler& sampler = base->getType().getSampler(); - if (sampler.isImage() || sampler.isTexture()) { - if (! mipsOperatorMipArg.empty() && mipsOperatorMipArg.back().mipLevel == nullptr) { - // The first operator[] to a .mips[] sequence is the mip level. We'll remember it. - mipsOperatorMipArg.back().mipLevel = index; - return base; // next [] index is to the same base. - } else { - TIntermAggregate* load = new TIntermAggregate(sampler.isImage() ? EOpImageLoad : EOpTextureFetch); - - TType sampReturnType; - getTextureReturnType(sampler, sampReturnType); - - load->setType(sampReturnType); - load->setLoc(loc); - load->getSequence().push_back(base); - load->getSequence().push_back(index); - - // Textures need a MIP. If we saw one go by, use it. Otherwise, use zero. - if (sampler.isTexture()) { - if (! mipsOperatorMipArg.empty()) { - load->getSequence().push_back(mipsOperatorMipArg.back().mipLevel); - mipsOperatorMipArg.pop_back(); - } else { - load->getSequence().push_back(intermediate.addConstantUnion(0, loc, true)); - } - } - - return load; - } - } - } - - // Handle operator[] on structured buffers: this indexes into the array element of the buffer. - // indexStructBufferContent returns nullptr if it isn't a structuredbuffer (SSBO). - TIntermTyped* sbArray = indexStructBufferContent(loc, base); - if (sbArray != nullptr) { - if (sbArray == nullptr) - return nullptr; - - // Now we'll apply the [] index to that array - const TOperator idxOp = (index->getQualifier().storage == EvqConst) ? EOpIndexDirect : EOpIndexIndirect; - - TIntermTyped* element = intermediate.addIndex(idxOp, sbArray, index, loc); - const TType derefType(sbArray->getType(), 0); - element->setType(derefType); - return element; - } - - return nullptr; -} - -// -// Cast index value to a uint if it isn't already (for operator[], load indexes, etc) -TIntermTyped* HlslParseContext::makeIntegerIndex(TIntermTyped* index) -{ - const TBasicType indexBasicType = index->getType().getBasicType(); - const int vecSize = index->getType().getVectorSize(); - - // We can use int types directly as the index - if (indexBasicType == EbtInt || indexBasicType == EbtUint || - indexBasicType == EbtInt64 || indexBasicType == EbtUint64) - return index; - - // Cast index to unsigned integer if it isn't one. - return intermediate.addConversion(EOpConstructUint, TType(EbtUint, EvqTemporary, vecSize), index); -} - -// -// Handle seeing a base[index] dereference in the grammar. -// -TIntermTyped* HlslParseContext::handleBracketDereference(const TSourceLoc& loc, TIntermTyped* base, TIntermTyped* index) -{ - index = makeIntegerIndex(index); - - if (index == nullptr) { - error(loc, " unknown index type ", "", ""); - return nullptr; - } - - TIntermTyped* result = handleBracketOperator(loc, base, index); - - if (result != nullptr) - return result; // it was handled as an operator[] - - bool flattened = false; - int indexValue = 0; - if (index->getQualifier().storage == EvqConst) { - indexValue = index->getAsConstantUnion()->getConstArray()[0].getIConst(); - checkIndex(loc, base->getType(), indexValue); - } - - variableCheck(base); - if (! base->isArray() && ! base->isMatrix() && ! base->isVector()) { - if (base->getAsSymbolNode()) - error(loc, " left of '[' is not of type array, matrix, or vector ", - base->getAsSymbolNode()->getName().c_str(), ""); - else - error(loc, " left of '[' is not of type array, matrix, or vector ", "expression", ""); - } else if (base->getType().getQualifier().storage == EvqConst && index->getQualifier().storage == EvqConst) - return intermediate.foldDereference(base, indexValue, loc); - else { - // at least one of base and index is variable... - - if (base->getAsSymbolNode() && wasFlattened(base)) { - if (index->getQualifier().storage != EvqConst) - error(loc, "Invalid variable index to flattened array", base->getAsSymbolNode()->getName().c_str(), ""); - - result = flattenAccess(base, indexValue); - flattened = (result != base); - } else { - if (index->getQualifier().storage == EvqConst) { - if (base->getType().isImplicitlySizedArray()) - updateImplicitArraySize(loc, base, indexValue); - result = intermediate.addIndex(EOpIndexDirect, base, index, loc); - } else { - result = intermediate.addIndex(EOpIndexIndirect, base, index, loc); - } - } - } - - if (result == nullptr) { - // Insert dummy error-recovery result - result = intermediate.addConstantUnion(0.0, EbtFloat, loc); - } else { - // If the array reference was flattened, it has the correct type. E.g, if it was - // a uniform array, it was flattened INTO a set of scalar uniforms, not scalar temps. - // In that case, we preserve the qualifiers. - if (!flattened) { - // Insert valid dereferenced result - TType newType(base->getType(), 0); // dereferenced type - if (base->getType().getQualifier().storage == EvqConst && index->getQualifier().storage == EvqConst) - newType.getQualifier().storage = EvqConst; - else - newType.getQualifier().storage = EvqTemporary; - result->setType(newType); - } - } - - return result; -} - -void HlslParseContext::checkIndex(const TSourceLoc& /*loc*/, const TType& /*type*/, int& /*index*/) -{ - // HLSL todo: any rules for index fixups? -} - -// Handle seeing a binary node with a math operation. -TIntermTyped* HlslParseContext::handleBinaryMath(const TSourceLoc& loc, const char* str, TOperator op, - TIntermTyped* left, TIntermTyped* right) -{ - TIntermTyped* result = intermediate.addBinaryMath(op, left, right, loc); - if (result == nullptr) - binaryOpError(loc, str, left->getCompleteString(), right->getCompleteString()); - - return result; -} - -// Handle seeing a unary node with a math operation. -TIntermTyped* HlslParseContext::handleUnaryMath(const TSourceLoc& loc, const char* str, TOperator op, - TIntermTyped* childNode) -{ - TIntermTyped* result = intermediate.addUnaryMath(op, childNode, loc); - - if (result) - return result; - else - unaryOpError(loc, str, childNode->getCompleteString()); - - return childNode; -} -// -// Return true if the name is a struct buffer method -// -bool HlslParseContext::isStructBufferMethod(const TString& name) const -{ - return - name == "GetDimensions" || - name == "Load" || - name == "Load2" || - name == "Load3" || - name == "Load4" || - name == "Store" || - name == "Store2" || - name == "Store3" || - name == "Store4" || - name == "InterlockedAdd" || - name == "InterlockedAnd" || - name == "InterlockedCompareExchange" || - name == "InterlockedCompareStore" || - name == "InterlockedExchange" || - name == "InterlockedMax" || - name == "InterlockedMin" || - name == "InterlockedOr" || - name == "InterlockedXor" || - name == "IncrementCounter" || - name == "DecrementCounter" || - name == "Append" || - name == "Consume"; -} - -// -// Handle seeing a base.field dereference in the grammar, where 'field' is a -// swizzle or member variable. -// -TIntermTyped* HlslParseContext::handleDotDereference(const TSourceLoc& loc, TIntermTyped* base, const TString& field) -{ - variableCheck(base); - - if (base->isArray()) { - error(loc, "cannot apply to an array:", ".", field.c_str()); - return base; - } - - TIntermTyped* result = base; - - if (base->getType().getBasicType() == EbtSampler) { - // Handle .mips[mipid][pos] operation on textures - const TSampler& sampler = base->getType().getSampler(); - if (sampler.isTexture() && field == "mips") { - // Push a null to signify that we expect a mip level under operator[] next. - mipsOperatorMipArg.push_back(tMipsOperatorData(loc, nullptr)); - // Keep 'result' pointing to 'base', since we expect an operator[] to go by next. - } else { - if (field == "mips") - error(loc, "unexpected texture type for .mips[][] operator:", - base->getType().getCompleteString().c_str(), ""); - else - error(loc, "unexpected operator on texture type:", field.c_str(), - base->getType().getCompleteString().c_str()); - } - } else if (base->isVector() || base->isScalar()) { - TSwizzleSelectors selectors; - parseSwizzleSelector(loc, field, base->getVectorSize(), selectors); - - if (base->isScalar()) { - if (selectors.size() == 1) - return result; - else { - TType type(base->getBasicType(), EvqTemporary, selectors.size()); - return addConstructor(loc, base, type); - } - } - if (base->getVectorSize() == 1) { - TType scalarType(base->getBasicType(), EvqTemporary, 1); - if (selectors.size() == 1) - return addConstructor(loc, base, scalarType); - else { - TType vectorType(base->getBasicType(), EvqTemporary, selectors.size()); - return addConstructor(loc, addConstructor(loc, base, scalarType), vectorType); - } - } - - if (base->getType().getQualifier().isFrontEndConstant()) - result = intermediate.foldSwizzle(base, selectors, loc); - else { - if (selectors.size() == 1) { - TIntermTyped* index = intermediate.addConstantUnion(selectors[0], loc); - result = intermediate.addIndex(EOpIndexDirect, base, index, loc); - result->setType(TType(base->getBasicType(), EvqTemporary)); - } else { - TIntermTyped* index = intermediate.addSwizzle(selectors, loc); - result = intermediate.addIndex(EOpVectorSwizzle, base, index, loc); - result->setType(TType(base->getBasicType(), EvqTemporary, base->getType().getQualifier().precision, - selectors.size())); - } - } - } else if (base->isMatrix()) { - TSwizzleSelectors selectors; - if (! parseMatrixSwizzleSelector(loc, field, base->getMatrixCols(), base->getMatrixRows(), selectors)) - return result; - - if (selectors.size() == 1) { - // Representable by m[c][r] - if (base->getType().getQualifier().isFrontEndConstant()) { - result = intermediate.foldDereference(base, selectors[0].coord1, loc); - result = intermediate.foldDereference(result, selectors[0].coord2, loc); - } else { - result = intermediate.addIndex(EOpIndexDirect, base, - intermediate.addConstantUnion(selectors[0].coord1, loc), - loc); - TType dereferencedCol(base->getType(), 0); - result->setType(dereferencedCol); - result = intermediate.addIndex(EOpIndexDirect, result, - intermediate.addConstantUnion(selectors[0].coord2, loc), - loc); - TType dereferenced(dereferencedCol, 0); - result->setType(dereferenced); - } - } else { - int column = getMatrixComponentsColumn(base->getMatrixRows(), selectors); - if (column >= 0) { - // Representable by m[c] - if (base->getType().getQualifier().isFrontEndConstant()) - result = intermediate.foldDereference(base, column, loc); - else { - result = intermediate.addIndex(EOpIndexDirect, base, intermediate.addConstantUnion(column, loc), - loc); - TType dereferenced(base->getType(), 0); - result->setType(dereferenced); - } - } else { - // general case, not a column, not a single component - TIntermTyped* index = intermediate.addSwizzle(selectors, loc); - result = intermediate.addIndex(EOpMatrixSwizzle, base, index, loc); - result->setType(TType(base->getBasicType(), EvqTemporary, base->getType().getQualifier().precision, - selectors.size())); - } - } - } else if (base->getBasicType() == EbtStruct || base->getBasicType() == EbtBlock) { - const TTypeList* fields = base->getType().getStruct(); - bool fieldFound = false; - int member; - for (member = 0; member < (int)fields->size(); ++member) { - if ((*fields)[member].type->getFieldName() == field) { - fieldFound = true; - break; - } - } - if (fieldFound) { - if (base->getAsSymbolNode() && wasFlattened(base)) { - result = flattenAccess(base, member); - } else { - if (base->getType().getQualifier().storage == EvqConst) - result = intermediate.foldDereference(base, member, loc); - else { - TIntermTyped* index = intermediate.addConstantUnion(member, loc); - result = intermediate.addIndex(EOpIndexDirectStruct, base, index, loc); - result->setType(*(*fields)[member].type); - } - } - } else - error(loc, "no such field in structure", field.c_str(), ""); - } else - error(loc, "does not apply to this type:", field.c_str(), base->getType().getCompleteString().c_str()); - - return result; -} - -// -// Return true if the field should be treated as a built-in method. -// Return false otherwise. -// -bool HlslParseContext::isBuiltInMethod(const TSourceLoc&, TIntermTyped* base, const TString& field) -{ - if (base == nullptr) - return false; - - variableCheck(base); - - if (base->getType().getBasicType() == EbtSampler) { - return true; - } else if (isStructBufferType(base->getType()) && isStructBufferMethod(field)) { - return true; - } else if (field == "Append" || - field == "RestartStrip") { - // We cannot check the type here: it may be sanitized if we're not compiling a geometry shader, but - // the code is around in the shader source. - return true; - } else - return false; -} - -// Independently establish a built-in that is a member of a structure. -// 'arraySizes' are what's desired for the independent built-in, whatever -// the higher-level source/expression of them was. -void HlslParseContext::splitBuiltIn(const TString& baseName, const TType& memberType, const TArraySizes* arraySizes, - const TQualifier& outerQualifier) -{ - // Because of arrays of structs, we might be asked more than once, - // but the arraySizes passed in should have captured the whole thing - // the first time. - // However, clip/cull rely on multiple updates. - if (!isClipOrCullDistance(memberType)) - if (splitBuiltIns.find(tInterstageIoData(memberType.getQualifier().builtIn, outerQualifier.storage)) != - splitBuiltIns.end()) - return; - - TVariable* ioVar = makeInternalVariable(baseName + "." + memberType.getFieldName(), memberType); - - if (arraySizes != nullptr && !memberType.isArray()) - ioVar->getWritableType().newArraySizes(*arraySizes); - - fixBuiltInIoType(ioVar->getWritableType()); - - splitBuiltIns[tInterstageIoData(memberType.getQualifier().builtIn, outerQualifier.storage)] = ioVar; - if (!isClipOrCullDistance(ioVar->getType())) - trackLinkage(*ioVar); - - // Merge qualifier from the user structure - mergeQualifiers(ioVar->getWritableType().getQualifier(), outerQualifier); - // But, not location, we're losing that - ioVar->getWritableType().getQualifier().layoutLocation = TQualifier::layoutLocationEnd; -} - -// Split a type into -// 1. a struct of non-I/O members -// 2. a collection of independent I/O variables -void HlslParseContext::split(const TVariable& variable) -{ - // Create a new variable: - const TType& clonedType = *variable.getType().clone(); - const TType& splitType = split(clonedType, variable.getName(), clonedType.getQualifier()); - splitNonIoVars[variable.getUniqueId()] = makeInternalVariable(variable.getName(), splitType); -} - -// Recursive implementation of split(). -// Returns reference to the modified type. -const TType& HlslParseContext::split(const TType& type, const TString& name, const TQualifier& outerQualifier) -{ - if (type.isStruct()) { - TTypeList* userStructure = type.getWritableStruct(); - for (auto ioType = userStructure->begin(); ioType != userStructure->end(); ) { - if (ioType->type->isBuiltIn()) { - // move out the built-in - splitBuiltIn(name, *ioType->type, type.getArraySizes(), outerQualifier); - ioType = userStructure->erase(ioType); - } else { - split(*ioType->type, name + "." + ioType->type->getFieldName(), outerQualifier); - ++ioType; - } - } - } - - return type; -} - -// Is this a uniform array or structure which should be flattened? -bool HlslParseContext::shouldFlatten(const TType& type) const -{ - const TStorageQualifier qualifier = type.getQualifier().storage; - - return (qualifier == EvqUniform && type.isArray() && intermediate.getFlattenUniformArrays()) || - (type.isStruct() && type.containsOpaque()); -} - -// Top level variable flattening: construct data -void HlslParseContext::flatten(const TVariable& variable, bool linkage) -{ - const TType& type = variable.getType(); - - // If it's a standalone built-in, there is nothing to flatten - if (type.isBuiltIn() && !type.isStruct()) - return; - - auto entry = flattenMap.insert(std::make_pair(variable.getUniqueId(), - TFlattenData(type.getQualifier().layoutBinding, - type.getQualifier().layoutLocation))); - - // the item is a map pair, so first->second is the TFlattenData itself. - flatten(variable, type, entry.first->second, variable.getName(), linkage, type.getQualifier(), nullptr); -} - -// Recursively flatten the given variable at the provided type, building the flattenData as we go. -// -// This is mutually recursive with flattenStruct and flattenArray. -// We are going to flatten an arbitrarily nested composite structure into a linear sequence of -// members, and later on, we want to turn a path through the tree structure into a final -// location in this linear sequence. -// -// If the tree was N-ary, that can be directly calculated. However, we are dealing with -// arbitrary numbers - perhaps a struct of 7 members containing an array of 3. Thus, we must -// build a data structure to allow the sequence of bracket and dot operators on arrays and -// structs to arrive at the proper member. -// -// To avoid storing a tree with pointers, we are going to flatten the tree into a vector of integers. -// The leaves are the indexes into the flattened member array. -// Each level will have the next location for the Nth item stored sequentially, so for instance: -// -// struct { float2 a[2]; int b; float4 c[3] }; -// -// This will produce the following flattened tree: -// Pos: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 -// (3, 7, 8, 5, 6, 0, 1, 2, 11, 12, 13, 3, 4, 5} -// -// Given a reference to mystruct.c[1], the access chain is (2,1), so we traverse: -// (0+2) = 8 --> (8+1) = 12 --> 12 = 4 -// -// so the 4th flattened member in traversal order is ours. -// -int HlslParseContext::flatten(const TVariable& variable, const TType& type, - TFlattenData& flattenData, TString name, bool linkage, - const TQualifier& outerQualifier, - const TArraySizes* builtInArraySizes) -{ - // If something is an arrayed struct, the array flattener will recursively call flatten() - // to then flatten the struct, so this is an "if else": we don't do both. - if (type.isArray()) - return flattenArray(variable, type, flattenData, name, linkage, outerQualifier); - else if (type.isStruct()) - return flattenStruct(variable, type, flattenData, name, linkage, outerQualifier, builtInArraySizes); - else { - assert(0); // should never happen - return -1; - } -} - -// Add a single flattened member to the flattened data being tracked for the composite -// Returns true for the final flattening level. -int HlslParseContext::addFlattenedMember(const TVariable& variable, const TType& type, TFlattenData& flattenData, - const TString& memberName, bool linkage, - const TQualifier& outerQualifier, - const TArraySizes* builtInArraySizes) -{ - if (isFinalFlattening(type)) { - // This is as far as we flatten. Insert the variable. - TVariable* memberVariable = makeInternalVariable(memberName, type); - mergeQualifiers(memberVariable->getWritableType().getQualifier(), variable.getType().getQualifier()); - - if (flattenData.nextBinding != TQualifier::layoutBindingEnd) - memberVariable->getWritableType().getQualifier().layoutBinding = flattenData.nextBinding++; - - if (memberVariable->getType().isBuiltIn()) { - // inherited locations are nonsensical for built-ins (TODO: what if semantic had a number) - memberVariable->getWritableType().getQualifier().layoutLocation = TQualifier::layoutLocationEnd; - } else { - // inherited locations must be auto bumped, not replicated - if (flattenData.nextLocation != TQualifier::layoutLocationEnd) { - memberVariable->getWritableType().getQualifier().layoutLocation = flattenData.nextLocation; - flattenData.nextLocation += intermediate.computeTypeLocationSize(memberVariable->getType()); - nextOutLocation = std::max(nextOutLocation, flattenData.nextLocation); - } - } - - flattenData.offsets.push_back(static_cast(flattenData.members.size())); - flattenData.members.push_back(memberVariable); - - if (linkage) - trackLinkage(*memberVariable); - - return static_cast(flattenData.offsets.size()) - 1; // location of the member reference - } else { - // Further recursion required - return flatten(variable, type, flattenData, memberName, linkage, outerQualifier, builtInArraySizes); - } -} - -// Figure out the mapping between an aggregate's top members and an -// equivalent set of individual variables. -// -// Assumes shouldFlatten() or equivalent was called first. -int HlslParseContext::flattenStruct(const TVariable& variable, const TType& type, - TFlattenData& flattenData, TString name, bool linkage, - const TQualifier& outerQualifier, - const TArraySizes* builtInArraySizes) -{ - assert(type.isStruct()); - - auto members = *type.getStruct(); - - // Reserve space for this tree level. - int start = static_cast(flattenData.offsets.size()); - int pos = start; - flattenData.offsets.resize(int(pos + members.size()), -1); - - for (int member = 0; member < (int)members.size(); ++member) { - TType& dereferencedType = *members[member].type; - if (dereferencedType.isBuiltIn()) - splitBuiltIn(variable.getName(), dereferencedType, builtInArraySizes, outerQualifier); - else { - const int mpos = addFlattenedMember(variable, dereferencedType, flattenData, - name + "." + dereferencedType.getFieldName(), - linkage, outerQualifier, - builtInArraySizes == nullptr && dereferencedType.isArray() - ? &dereferencedType.getArraySizes() - : builtInArraySizes); - flattenData.offsets[pos++] = mpos; - } - } - - return start; -} - -// Figure out mapping between an array's members and an -// equivalent set of individual variables. -// -// Assumes shouldFlatten() or equivalent was called first. -int HlslParseContext::flattenArray(const TVariable& variable, const TType& type, - TFlattenData& flattenData, TString name, bool linkage, - const TQualifier& outerQualifier) -{ - assert(type.isArray() && !type.isImplicitlySizedArray()); - - const int size = type.getOuterArraySize(); - const TType dereferencedType(type, 0); - - if (name.empty()) - name = variable.getName(); - - // Reserve space for this tree level. - int start = static_cast(flattenData.offsets.size()); - int pos = start; - flattenData.offsets.resize(int(pos + size), -1); - - for (int element=0; element < size; ++element) { - char elementNumBuf[20]; // sufficient for MAXINT - snprintf(elementNumBuf, sizeof(elementNumBuf)-1, "[%d]", element); - const int mpos = addFlattenedMember(variable, dereferencedType, flattenData, - name + elementNumBuf, linkage, outerQualifier, - type.getArraySizes()); - - flattenData.offsets[pos++] = mpos; - } - - return start; -} - -// Return true if we have flattened this node. -bool HlslParseContext::wasFlattened(const TIntermTyped* node) const -{ - return node != nullptr && node->getAsSymbolNode() != nullptr && - wasFlattened(node->getAsSymbolNode()->getId()); -} - -// Return true if we have split this structure -bool HlslParseContext::wasSplit(const TIntermTyped* node) const -{ - return node != nullptr && node->getAsSymbolNode() != nullptr && - wasSplit(node->getAsSymbolNode()->getId()); -} - -// Turn an access into an aggregate that was flattened to instead be -// an access to the individual variable the member was flattened to. -// Assumes wasFlattened() or equivalent was called first. -TIntermTyped* HlslParseContext::flattenAccess(TIntermTyped* base, int member) -{ - const TType dereferencedType(base->getType(), member); // dereferenced type - const TIntermSymbol& symbolNode = *base->getAsSymbolNode(); - TIntermTyped* flattened = flattenAccess(symbolNode.getId(), member, dereferencedType, symbolNode.getFlattenSubset()); - - return flattened ? flattened : base; -} -TIntermTyped* HlslParseContext::flattenAccess(int uniqueId, int member, const TType& dereferencedType, int subset) -{ - const auto flattenData = flattenMap.find(uniqueId); - - if (flattenData == flattenMap.end()) - return nullptr; - - // Calculate new cumulative offset from the packed tree - int newSubset = flattenData->second.offsets[subset >= 0 ? subset + member : member]; - - TIntermSymbol* subsetSymbol; - if (isFinalFlattening(dereferencedType)) { - // Finished flattening: create symbol for variable - member = flattenData->second.offsets[newSubset]; - const TVariable* memberVariable = flattenData->second.members[member]; - subsetSymbol = intermediate.addSymbol(*memberVariable); - subsetSymbol->setFlattenSubset(-1); - } else { - - // If this is not the final flattening, accumulate the position and return - // an object of the partially dereferenced type. - subsetSymbol = new TIntermSymbol(uniqueId, "flattenShadow", dereferencedType); - subsetSymbol->setFlattenSubset(newSubset); - } - - return subsetSymbol; -} - -// Find and return the split IO TVariable for id, or nullptr if none. -TVariable* HlslParseContext::getSplitNonIoVar(int id) const -{ - const auto splitNonIoVar = splitNonIoVars.find(id); - if (splitNonIoVar == splitNonIoVars.end()) - return nullptr; - - return splitNonIoVar->second; -} - -// Pass through to base class after remembering built-in mappings. -void HlslParseContext::trackLinkage(TSymbol& symbol) -{ - TBuiltInVariable biType = symbol.getType().getQualifier().builtIn; - - if (biType != EbvNone) - builtInTessLinkageSymbols[biType] = symbol.clone(); - - TParseContextBase::trackLinkage(symbol); -} - - -// Returns true if the built-in is a clip or cull distance variable. -bool HlslParseContext::isClipOrCullDistance(TBuiltInVariable builtIn) -{ - return builtIn == EbvClipDistance || builtIn == EbvCullDistance; -} - -// Some types require fixed array sizes in SPIR-V, but can be scalars or -// arrays of sizes SPIR-V doesn't allow. For example, tessellation factors. -// This creates the right size. A conversion is performed when the internal -// type is copied to or from the external type. This corrects the externally -// facing input or output type to abide downstream semantics. -void HlslParseContext::fixBuiltInIoType(TType& type) -{ - int requiredArraySize = 0; - - switch (type.getQualifier().builtIn) { - case EbvTessLevelOuter: requiredArraySize = 4; break; - case EbvTessLevelInner: requiredArraySize = 2; break; - - case EbvTessCoord: - { - // tesscoord is always a vec3 for the IO variable, no matter the shader's - // declared vector size. - TType tessCoordType(type.getBasicType(), type.getQualifier().storage, 3); - - tessCoordType.getQualifier() = type.getQualifier(); - type.shallowCopy(tessCoordType); - - break; - } - default: - if (isClipOrCullDistance(type)) { - if (type.getQualifier().builtIn == EbvClipDistance) { - clipSemanticNSize[type.getQualifier().layoutLocation] = type.getVectorSize(); - } else { - cullSemanticNSize[type.getQualifier().layoutLocation] = type.getVectorSize(); - } - } - - return; - } - - // Alter or set array size as needed. - if (requiredArraySize > 0) { - if (!type.isArray() || type.getOuterArraySize() != requiredArraySize) { - TArraySizes arraySizes; - arraySizes.addInnerSize(requiredArraySize); - type.newArraySizes(arraySizes); - } - } -} - -// Variables that correspond to the user-interface in and out of a stage -// (not the built-in interface) are -// - assigned locations -// - registered as a linkage node (part of the stage's external interface). -// Assumes it is called in the order in which locations should be assigned. -void HlslParseContext::assignToInterface(TVariable& variable) -{ - const auto assignLocation = [&](TVariable& variable) { - TType& type = variable.getWritableType(); - if (!type.isStruct() || type.getStruct()->size() > 0) { - TQualifier& qualifier = type.getQualifier(); - if (qualifier.storage == EvqVaryingIn || qualifier.storage == EvqVaryingOut) { - if (qualifier.builtIn == EbvNone && !qualifier.hasLocation()) { - // Strip off the outer array dimension for those having an extra one. - int size; - if (type.isArray() && qualifier.isArrayedIo(language)) { - TType elementType(type, 0); - size = intermediate.computeTypeLocationSize(elementType); - } else - size = intermediate.computeTypeLocationSize(type); - - if (qualifier.storage == EvqVaryingIn) { - variable.getWritableType().getQualifier().layoutLocation = nextInLocation; - nextInLocation += size; - } else { - variable.getWritableType().getQualifier().layoutLocation = nextOutLocation; - nextOutLocation += size; - } - } - trackLinkage(variable); - } - } - }; - - if (wasFlattened(variable.getUniqueId())) { - auto& memberList = flattenMap[variable.getUniqueId()].members; - for (auto member = memberList.begin(); member != memberList.end(); ++member) - assignLocation(**member); - } else if (wasSplit(variable.getUniqueId())) { - TVariable* splitIoVar = getSplitNonIoVar(variable.getUniqueId()); - assignLocation(*splitIoVar); - } else { - assignLocation(variable); - } -} - -// -// Handle seeing a function declarator in the grammar. This is the precursor -// to recognizing a function prototype or function definition. -// -void HlslParseContext::handleFunctionDeclarator(const TSourceLoc& loc, TFunction& function, bool prototype) -{ - // - // Multiple declarations of the same function name are allowed. - // - // If this is a definition, the definition production code will check for redefinitions - // (we don't know at this point if it's a definition or not). - // - bool builtIn; - TSymbol* symbol = symbolTable.find(function.getMangledName(), &builtIn); - const TFunction* prevDec = symbol ? symbol->getAsFunction() : 0; - - if (prototype) { - // All built-in functions are defined, even though they don't have a body. - // Count their prototype as a definition instead. - if (symbolTable.atBuiltInLevel()) - function.setDefined(); - else { - if (prevDec && ! builtIn) - symbol->getAsFunction()->setPrototyped(); // need a writable one, but like having prevDec as a const - function.setPrototyped(); - } - } - - // This insert won't actually insert it if it's a duplicate signature, but it will still check for - // other forms of name collisions. - if (! symbolTable.insert(function)) - error(loc, "function name is redeclaration of existing name", function.getName().c_str(), ""); -} - -// For struct buffers with counters, we must pass the counter buffer as hidden parameter. -// This adds the hidden parameter to the parameter list in 'paramNodes' if needed. -// Otherwise, it's a no-op -void HlslParseContext::addStructBufferHiddenCounterParam(const TSourceLoc& loc, TParameter& param, - TIntermAggregate*& paramNodes) -{ - if (! hasStructBuffCounter(*param.type)) - return; - - const TString counterBlockName(getStructBuffCounterName(*param.name)); - - TType counterType; - counterBufferType(loc, counterType); - TVariable *variable = makeInternalVariable(counterBlockName, counterType); - - if (! symbolTable.insert(*variable)) - error(loc, "redefinition", variable->getName().c_str(), ""); - - paramNodes = intermediate.growAggregate(paramNodes, - intermediate.addSymbol(*variable, loc), - loc); -} - -// -// Handle seeing the function prototype in front of a function definition in the grammar. -// The body is handled after this function returns. -// -// Returns an aggregate of parameter-symbol nodes. -// -TIntermAggregate* HlslParseContext::handleFunctionDefinition(const TSourceLoc& loc, TFunction& function, - const TAttributeMap& attributes, - TIntermNode*& entryPointTree) -{ - currentCaller = function.getMangledName(); - TSymbol* symbol = symbolTable.find(function.getMangledName()); - TFunction* prevDec = symbol ? symbol->getAsFunction() : nullptr; - - if (prevDec == nullptr) - error(loc, "can't find function", function.getName().c_str(), ""); - // Note: 'prevDec' could be 'function' if this is the first time we've seen function - // as it would have just been put in the symbol table. Otherwise, we're looking up - // an earlier occurrence. - - if (prevDec && prevDec->isDefined()) { - // Then this function already has a body. - error(loc, "function already has a body", function.getName().c_str(), ""); - } - if (prevDec && ! prevDec->isDefined()) { - prevDec->setDefined(); - - // Remember the return type for later checking for RETURN statements. - currentFunctionType = &(prevDec->getType()); - } else - currentFunctionType = new TType(EbtVoid); - functionReturnsValue = false; - - // Entry points need different I/O and other handling, transform it so the - // rest of this function doesn't care. - entryPointTree = transformEntryPoint(loc, function, attributes); - - // - // New symbol table scope for body of function plus its arguments - // - pushScope(); - - // - // Insert parameters into the symbol table. - // If the parameter has no name, it's not an error, just don't insert it - // (could be used for unused args). - // - // Also, accumulate the list of parameters into the AST, so lower level code - // knows where to find parameters. - // - TIntermAggregate* paramNodes = new TIntermAggregate; - for (int i = 0; i < function.getParamCount(); i++) { - TParameter& param = function[i]; - if (param.name != nullptr) { - TVariable *variable = new TVariable(param.name, *param.type); - - if (i == 0 && function.hasImplicitThis()) { - // Anonymous 'this' members are already in a symbol-table level, - // and we need to know what function parameter to map them to. - symbolTable.makeInternalVariable(*variable); - pushImplicitThis(variable); - } - - // Insert the parameters with name in the symbol table. - if (! symbolTable.insert(*variable)) - error(loc, "redefinition", variable->getName().c_str(), ""); - - // Add parameters to the AST list. - if (shouldFlatten(variable->getType())) { - // Expand the AST parameter nodes (but not the name mangling or symbol table view) - // for structures that need to be flattened. - flatten(*variable, false); - const TTypeList* structure = variable->getType().getStruct(); - for (int mem = 0; mem < (int)structure->size(); ++mem) { - paramNodes = intermediate.growAggregate(paramNodes, - flattenAccess(variable->getUniqueId(), mem, - *(*structure)[mem].type), - loc); - } - } else { - // Add the parameter to the AST - paramNodes = intermediate.growAggregate(paramNodes, - intermediate.addSymbol(*variable, loc), - loc); - } - - // Add hidden AST parameter for struct buffer counters, if needed. - addStructBufferHiddenCounterParam(loc, param, paramNodes); - } else - paramNodes = intermediate.growAggregate(paramNodes, intermediate.addSymbol(*param.type, loc), loc); - } - if (function.hasIllegalImplicitThis()) - pushImplicitThis(nullptr); - - intermediate.setAggregateOperator(paramNodes, EOpParameters, TType(EbtVoid), loc); - loopNestingLevel = 0; - controlFlowNestingLevel = 0; - postEntryPointReturn = false; - - return paramNodes; -} - - -// Handle all [attrib] attribute for the shader entry point -void HlslParseContext::handleEntryPointAttributes(const TSourceLoc& loc, const TAttributeMap& attributes) -{ - // Handle entry-point function attributes - const TIntermAggregate* numThreads = attributes[EatNumThreads]; - if (numThreads != nullptr) { - const TIntermSequence& sequence = numThreads->getSequence(); - - for (int lid = 0; lid < int(sequence.size()); ++lid) - intermediate.setLocalSize(lid, sequence[lid]->getAsConstantUnion()->getConstArray()[0].getIConst()); - } - - // MaxVertexCount - const TIntermAggregate* maxVertexCount = attributes[EatMaxVertexCount]; - if (maxVertexCount != nullptr) { - if (! intermediate.setVertices(maxVertexCount->getSequence()[0]->getAsConstantUnion()-> - getConstArray()[0].getIConst())) { - error(loc, "cannot change previously set maxvertexcount attribute", "", ""); - } - } - - // Handle [patchconstantfunction("...")] - const TIntermAggregate* pcfAttr = attributes[EatPatchConstantFunc]; - if (pcfAttr != nullptr) { - const TConstUnion& pcfName = pcfAttr->getSequence()[0]->getAsConstantUnion()->getConstArray()[0]; - - if (pcfName.getType() != EbtString) { - error(loc, "invalid patch constant function", "", ""); - } else { - patchConstantFunctionName = *pcfName.getSConst(); - } - } - - // Handle [domain("...")] - const TIntermAggregate* domainAttr = attributes[EatDomain]; - if (domainAttr != nullptr) { - const TConstUnion& domainType = domainAttr->getSequence()[0]->getAsConstantUnion()->getConstArray()[0]; - if (domainType.getType() != EbtString) { - error(loc, "invalid domain", "", ""); - } else { - TString domainStr = *domainType.getSConst(); - std::transform(domainStr.begin(), domainStr.end(), domainStr.begin(), ::tolower); - - TLayoutGeometry domain = ElgNone; - - if (domainStr == "tri") { - domain = ElgTriangles; - } else if (domainStr == "quad") { - domain = ElgQuads; - } else if (domainStr == "isoline") { - domain = ElgIsolines; - } else { - error(loc, "unsupported domain type", domainStr.c_str(), ""); - } - - if (language == EShLangTessEvaluation) { - if (! intermediate.setInputPrimitive(domain)) - error(loc, "cannot change previously set domain", TQualifier::getGeometryString(domain), ""); - } else { - if (! intermediate.setOutputPrimitive(domain)) - error(loc, "cannot change previously set domain", TQualifier::getGeometryString(domain), ""); - } - } - } - - // Handle [outputtopology("...")] - const TIntermAggregate* topologyAttr = attributes[EatOutputTopology]; - if (topologyAttr != nullptr) { - const TConstUnion& topoType = topologyAttr->getSequence()[0]->getAsConstantUnion()->getConstArray()[0]; - if (topoType.getType() != EbtString) { - error(loc, "invalid outputtopology", "", ""); - } else { - TString topologyStr = *topoType.getSConst(); - std::transform(topologyStr.begin(), topologyStr.end(), topologyStr.begin(), ::tolower); - - TVertexOrder vertexOrder = EvoNone; - TLayoutGeometry primitive = ElgNone; - - if (topologyStr == "point") { - intermediate.setPointMode(); - } else if (topologyStr == "line") { - primitive = ElgIsolines; - } else if (topologyStr == "triangle_cw") { - vertexOrder = EvoCw; - primitive = ElgTriangles; - } else if (topologyStr == "triangle_ccw") { - vertexOrder = EvoCcw; - primitive = ElgTriangles; - } else { - error(loc, "unsupported outputtopology type", topologyStr.c_str(), ""); - } - - if (vertexOrder != EvoNone) { - if (! intermediate.setVertexOrder(vertexOrder)) { - error(loc, "cannot change previously set outputtopology", - TQualifier::getVertexOrderString(vertexOrder), ""); - } - } - if (primitive != ElgNone) - intermediate.setOutputPrimitive(primitive); - } - } - - // Handle [partitioning("...")] - const TIntermAggregate* partitionAttr = attributes[EatPartitioning]; - if (partitionAttr != nullptr) { - const TConstUnion& partType = partitionAttr->getSequence()[0]->getAsConstantUnion()->getConstArray()[0]; - if (partType.getType() != EbtString) { - error(loc, "invalid partitioning", "", ""); - } else { - TString partitionStr = *partType.getSConst(); - std::transform(partitionStr.begin(), partitionStr.end(), partitionStr.begin(), ::tolower); - - TVertexSpacing partitioning = EvsNone; - - if (partitionStr == "integer") { - partitioning = EvsEqual; - } else if (partitionStr == "fractional_even") { - partitioning = EvsFractionalEven; - } else if (partitionStr == "fractional_odd") { - partitioning = EvsFractionalOdd; - //} else if (partition == "pow2") { // TODO: currently nothing to map this to. - } else { - error(loc, "unsupported partitioning type", partitionStr.c_str(), ""); - } - - if (! intermediate.setVertexSpacing(partitioning)) - error(loc, "cannot change previously set partitioning", - TQualifier::getVertexSpacingString(partitioning), ""); - } - } - - // Handle [outputcontrolpoints("...")] - const TIntermAggregate* outputControlPoints = attributes[EatOutputControlPoints]; - if (outputControlPoints != nullptr) { - const TConstUnion& ctrlPointConst = - outputControlPoints->getSequence()[0]->getAsConstantUnion()->getConstArray()[0]; - if (ctrlPointConst.getType() != EbtInt) { - error(loc, "invalid outputcontrolpoints", "", ""); - } else { - const int ctrlPoints = ctrlPointConst.getIConst(); - if (! intermediate.setVertices(ctrlPoints)) { - error(loc, "cannot change previously set outputcontrolpoints attribute", "", ""); - } - } - } -} - -// -// Do all special handling for the entry point, including wrapping -// the shader's entry point with the official entry point that will call it. -// -// The following: -// -// retType shaderEntryPoint(args...) // shader declared entry point -// { body } -// -// Becomes -// -// out retType ret; -// in iargs...; -// out oargs ...; -// -// void shaderEntryPoint() // synthesized, but official, entry point -// { -// args = iargs...; -// ret = @shaderEntryPoint(args...); -// oargs = args...; -// } -// retType @shaderEntryPoint(args...) -// { body } -// -// The symbol table will still map the original entry point name to the -// the modified function and its new name: -// -// symbol table: shaderEntryPoint -> @shaderEntryPoint -// -// Returns nullptr if no entry-point tree was built, otherwise, returns -// a subtree that creates the entry point. -// -TIntermNode* HlslParseContext::transformEntryPoint(const TSourceLoc& loc, TFunction& userFunction, - const TAttributeMap& attributes) -{ - // Return true if this is a tessellation patch constant function input to a domain shader. - const auto isDsPcfInput = [this](const TType& type) { - return language == EShLangTessEvaluation && - type.contains([](const TType* t) { - return t->getQualifier().builtIn == EbvTessLevelOuter || - t->getQualifier().builtIn == EbvTessLevelInner; - }); - }; - - // if we aren't in the entry point, fix the IO as such and exit - if (userFunction.getName().compare(intermediate.getEntryPointName().c_str()) != 0) { - remapNonEntryPointIO(userFunction); - return nullptr; - } - - entryPointFunction = &userFunction; // needed in finish() - - // Handle entry point attributes - handleEntryPointAttributes(loc, attributes); - - // entry point logic... - - // Move parameters and return value to shader in/out - TVariable* entryPointOutput; // gets created in remapEntryPointIO - TVector inputs; - TVector outputs; - remapEntryPointIO(userFunction, entryPointOutput, inputs, outputs); - - // Further this return/in/out transform by flattening, splitting, and assigning locations - const auto makeVariableInOut = [&](TVariable& variable) { - if (variable.getType().isStruct()) { - if (variable.getType().getQualifier().isArrayedIo(language)) { - if (variable.getType().containsBuiltIn()) - split(variable); - } else - flatten(variable, false /* don't track linkage here, it will be tracked in assignToInterface() */); - } - // TODO: flatten arrays too - // TODO: flatten everything in I/O - // TODO: replace all split with flatten, make all paths can create flattened I/O, then split code can be removed - - // For clip and cull distance, multiple output variables potentially get merged - // into one in assignClipCullDistance. That code in assignClipCullDistance - // handles the interface logic, so we avoid it here in that case. - if (!isClipOrCullDistance(variable.getType())) - assignToInterface(variable); - }; - if (entryPointOutput != nullptr) - makeVariableInOut(*entryPointOutput); - for (auto it = inputs.begin(); it != inputs.end(); ++it) - if (!isDsPcfInput((*it)->getType())) // wait until the end for PCF input (see comment below) - makeVariableInOut(*(*it)); - for (auto it = outputs.begin(); it != outputs.end(); ++it) - makeVariableInOut(*(*it)); - - // In the domain shader, PCF input must be at the end of the linkage. That's because in the - // hull shader there is no ordering: the output comes from the separate PCF, which does not - // participate in the argument list. That is always put at the end of the HS linkage, so the - // input side of the DS must match. The argument may be in any position in the DS argument list - // however, so this ensures the linkage is built in the correct order regardless of argument order. - if (language == EShLangTessEvaluation) { - for (auto it = inputs.begin(); it != inputs.end(); ++it) - if (isDsPcfInput((*it)->getType())) - makeVariableInOut(*(*it)); - } - - // Synthesize the call - - pushScope(); // matches the one in handleFunctionBody() - - // new signature - TType voidType(EbtVoid); - TFunction synthEntryPoint(&userFunction.getName(), voidType); - TIntermAggregate* synthParams = new TIntermAggregate(); - intermediate.setAggregateOperator(synthParams, EOpParameters, voidType, loc); - intermediate.setEntryPointMangledName(synthEntryPoint.getMangledName().c_str()); - intermediate.incrementEntryPointCount(); - TFunction callee(&userFunction.getName(), voidType); // call based on old name, which is still in the symbol table - - // change original name - userFunction.addPrefix("@"); // change the name in the function, but not in the symbol table - - // Copy inputs (shader-in -> calling arg), while building up the call node - TVector argVars; - TIntermAggregate* synthBody = new TIntermAggregate(); - auto inputIt = inputs.begin(); - TIntermTyped* callingArgs = nullptr; - - for (int i = 0; i < userFunction.getParamCount(); i++) { - TParameter& param = userFunction[i]; - argVars.push_back(makeInternalVariable(*param.name, *param.type)); - argVars.back()->getWritableType().getQualifier().makeTemporary(); - TIntermSymbol* arg = intermediate.addSymbol(*argVars.back()); - handleFunctionArgument(&callee, callingArgs, arg); - if (param.type->getQualifier().isParamInput()) { - intermediate.growAggregate(synthBody, handleAssign(loc, EOpAssign, arg, - intermediate.addSymbol(**inputIt))); - inputIt++; - } - } - - // Call - currentCaller = synthEntryPoint.getMangledName(); - TIntermTyped* callReturn = handleFunctionCall(loc, &callee, callingArgs); - currentCaller = userFunction.getMangledName(); - - // Return value - if (entryPointOutput) { - TIntermTyped* returnAssign; - - // For hull shaders, the wrapped entry point return value is written to - // an array element as indexed by invocation ID, which we might have to make up. - // This is required to match SPIR-V semantics. - if (language == EShLangTessControl) { - TIntermSymbol* invocationIdSym = findTessLinkageSymbol(EbvInvocationId); - - // If there is no user declared invocation ID, we must make one. - if (invocationIdSym == nullptr) { - TType invocationIdType(EbtUint, EvqIn, 1); - TString* invocationIdName = NewPoolTString("InvocationId"); - invocationIdType.getQualifier().builtIn = EbvInvocationId; - - TVariable* variable = makeInternalVariable(*invocationIdName, invocationIdType); - - globalQualifierFix(loc, variable->getWritableType().getQualifier()); - trackLinkage(*variable); - - invocationIdSym = intermediate.addSymbol(*variable); - } - - TIntermTyped* element = intermediate.addIndex(EOpIndexIndirect, intermediate.addSymbol(*entryPointOutput), - invocationIdSym, loc); - element->setType(callReturn->getType()); - - returnAssign = handleAssign(loc, EOpAssign, element, callReturn); - } else { - returnAssign = handleAssign(loc, EOpAssign, intermediate.addSymbol(*entryPointOutput), callReturn); - } - intermediate.growAggregate(synthBody, returnAssign); - } else - intermediate.growAggregate(synthBody, callReturn); - - // Output copies - auto outputIt = outputs.begin(); - for (int i = 0; i < userFunction.getParamCount(); i++) { - TParameter& param = userFunction[i]; - - // GS outputs are via emit, so we do not copy them here. - if (param.type->getQualifier().isParamOutput()) { - if (param.getDeclaredBuiltIn() == EbvGsOutputStream) { - // GS output stream does not assign outputs here: it's the Append() method - // which writes to the output, probably multiple times separated by Emit. - // We merely remember the output to use, here. - gsStreamOutput = *outputIt; - } else { - intermediate.growAggregate(synthBody, handleAssign(loc, EOpAssign, - intermediate.addSymbol(**outputIt), - intermediate.addSymbol(*argVars[i]))); - } - - outputIt++; - } - } - - // Put the pieces together to form a full function subtree - // for the synthesized entry point. - synthBody->setOperator(EOpSequence); - TIntermNode* synthFunctionDef = synthParams; - handleFunctionBody(loc, synthEntryPoint, synthBody, synthFunctionDef); - - entryPointFunctionBody = synthBody; - - return synthFunctionDef; -} - -void HlslParseContext::handleFunctionBody(const TSourceLoc& loc, TFunction& function, TIntermNode* functionBody, - TIntermNode*& node) -{ - node = intermediate.growAggregate(node, functionBody); - intermediate.setAggregateOperator(node, EOpFunction, function.getType(), loc); - node->getAsAggregate()->setName(function.getMangledName().c_str()); - - popScope(); - if (function.hasImplicitThis()) - popImplicitThis(); - - if (function.getType().getBasicType() != EbtVoid && ! functionReturnsValue) - error(loc, "function does not return a value:", "", function.getName().c_str()); -} - -// AST I/O is done through shader globals declared in the 'in' or 'out' -// storage class. An HLSL entry point has a return value, input parameters -// and output parameters. These need to get remapped to the AST I/O. -void HlslParseContext::remapEntryPointIO(TFunction& function, TVariable*& returnValue, - TVector& inputs, TVector& outputs) -{ - // We might have in input structure type with no decorations that caused it - // to look like an input type, yet it has (e.g.) interpolation types that - // must be modified that turn it into an input type. - // Hence, a missing ioTypeMap for 'input' might need to be synthesized. - const auto synthesizeEditedInput = [this](TType& type) { - // True if a type needs to be 'flat' - const auto needsFlat = [](const TType& type) { - return type.containsBasicType(EbtInt) || - type.containsBasicType(EbtUint) || - type.containsBasicType(EbtInt64) || - type.containsBasicType(EbtUint64) || - type.containsBasicType(EbtBool) || - type.containsBasicType(EbtDouble); - }; - - if (language == EShLangFragment && needsFlat(type)) { - if (type.isStruct()) { - TTypeList* finalList = nullptr; - auto it = ioTypeMap.find(type.getStruct()); - if (it == ioTypeMap.end() || it->second.input == nullptr) { - // Getting here means we have no input struct, but we need one. - auto list = new TTypeList; - for (auto member = type.getStruct()->begin(); member != type.getStruct()->end(); ++member) { - TType* newType = new TType; - newType->shallowCopy(*member->type); - TTypeLoc typeLoc = { newType, member->loc }; - list->push_back(typeLoc); - } - // install the new input type - if (it == ioTypeMap.end()) { - tIoKinds newLists = { list, nullptr, nullptr }; - ioTypeMap[type.getStruct()] = newLists; - } else - it->second.input = list; - finalList = list; - } else - finalList = it->second.input; - // edit for 'flat' - for (auto member = finalList->begin(); member != finalList->end(); ++member) { - if (needsFlat(*member->type)) { - member->type->getQualifier().clearInterpolation(); - member->type->getQualifier().flat = true; - } - } - } else { - type.getQualifier().clearInterpolation(); - type.getQualifier().flat = true; - } - } - }; - - // Do the actual work to make a type be a shader input or output variable, - // and clear the original to be non-IO (for use as a normal function parameter/return). - const auto makeIoVariable = [this](const char* name, TType& type, TStorageQualifier storage) -> TVariable* { - TVariable* ioVariable = makeInternalVariable(name, type); - clearUniformInputOutput(type.getQualifier()); - if (type.isStruct()) { - auto newLists = ioTypeMap.find(ioVariable->getType().getStruct()); - if (newLists != ioTypeMap.end()) { - if (storage == EvqVaryingIn && newLists->second.input) - ioVariable->getWritableType().setStruct(newLists->second.input); - else if (storage == EvqVaryingOut && newLists->second.output) - ioVariable->getWritableType().setStruct(newLists->second.output); - } - } - if (storage == EvqVaryingIn) { - correctInput(ioVariable->getWritableType().getQualifier()); - if (language == EShLangTessEvaluation) - if (!ioVariable->getType().isArray()) - ioVariable->getWritableType().getQualifier().patch = true; - } else { - correctOutput(ioVariable->getWritableType().getQualifier()); - } - ioVariable->getWritableType().getQualifier().storage = storage; - - fixBuiltInIoType(ioVariable->getWritableType()); - - return ioVariable; - }; - - // return value is actually a shader-scoped output (out) - if (function.getType().getBasicType() == EbtVoid) { - returnValue = nullptr; - } else { - if (language == EShLangTessControl) { - // tessellation evaluation in HLSL writes a per-ctrl-pt value, but it needs to be an - // array in SPIR-V semantics. We'll write to it indexed by invocation ID. - - returnValue = makeIoVariable("@entryPointOutput", function.getWritableType(), EvqVaryingOut); - - TType outputType; - outputType.shallowCopy(function.getType()); - - // vertices has necessarily already been set when handling entry point attributes. - TArraySizes arraySizes; - arraySizes.addInnerSize(intermediate.getVertices()); - outputType.newArraySizes(arraySizes); - - clearUniformInputOutput(function.getWritableType().getQualifier()); - returnValue = makeIoVariable("@entryPointOutput", outputType, EvqVaryingOut); - } else { - returnValue = makeIoVariable("@entryPointOutput", function.getWritableType(), EvqVaryingOut); - } - } - - // parameters are actually shader-scoped inputs and outputs (in or out) - for (int i = 0; i < function.getParamCount(); i++) { - TType& paramType = *function[i].type; - if (paramType.getQualifier().isParamInput()) { - synthesizeEditedInput(paramType); - TVariable* argAsGlobal = makeIoVariable(function[i].name->c_str(), paramType, EvqVaryingIn); - inputs.push_back(argAsGlobal); - - if (function[i].getDeclaredBuiltIn() == EbvInputPatch) - inputPatch = argAsGlobal; - } - if (paramType.getQualifier().isParamOutput()) { - TVariable* argAsGlobal = makeIoVariable(function[i].name->c_str(), paramType, EvqVaryingOut); - outputs.push_back(argAsGlobal); - } - } -} - -// An HLSL function that looks like an entry point, but is not, -// declares entry point IO built-ins, but these have to be undone. -void HlslParseContext::remapNonEntryPointIO(TFunction& function) -{ - // return value - if (function.getType().getBasicType() != EbtVoid) - clearUniformInputOutput(function.getWritableType().getQualifier()); - - // parameters. - // References to structuredbuffer types are left unmodified - for (int i = 0; i < function.getParamCount(); i++) - if (!isReference(*function[i].type)) - clearUniformInputOutput(function[i].type->getQualifier()); -} - -// Handle function returns, including type conversions to the function return type -// if necessary. -TIntermNode* HlslParseContext::handleReturnValue(const TSourceLoc& loc, TIntermTyped* value) -{ - functionReturnsValue = true; - - if (currentFunctionType->getBasicType() == EbtVoid) { - error(loc, "void function cannot return a value", "return", ""); - return intermediate.addBranch(EOpReturn, loc); - } else if (*currentFunctionType != value->getType()) { - value = intermediate.addConversion(EOpReturn, *currentFunctionType, value); - if (value && *currentFunctionType != value->getType()) - value = intermediate.addUniShapeConversion(EOpReturn, *currentFunctionType, value); - if (value == nullptr || *currentFunctionType != value->getType()) { - error(loc, "type does not match, or is not convertible to, the function's return type", "return", ""); - return value; - } - } - - return intermediate.addBranch(EOpReturn, value, loc); -} - -void HlslParseContext::handleFunctionArgument(TFunction* function, - TIntermTyped*& arguments, TIntermTyped* newArg) -{ - TParameter param = { 0, new TType, nullptr }; - param.type->shallowCopy(newArg->getType()); - - function->addParameter(param); - if (arguments) - arguments = intermediate.growAggregate(arguments, newArg); - else - arguments = newArg; -} - -// Clip and cull distance require special handling due to a semantic mismatch. In HLSL, -// these can be float scalar, float vector, or arrays of float scalar or float vector. -// In SPIR-V, they are arrays of scalar floats in all cases. We must copy individual components -// (e.g, both x and y components of a float2) out into the destination float array. -// -// The values are assigned to sequential members of the output array. The inner dimension -// is vector components. The outer dimension is array elements. -TIntermAggregate* HlslParseContext::assignClipCullDistance(const TSourceLoc& loc, TOperator op, int semanticId, - TIntermTyped* left, TIntermTyped* right) -{ - switch (language) { - case EShLangFragment: - case EShLangVertex: - break; - default: - error(loc, "unimplemented: clip/cull not currently implemented for this stage", "", ""); - return nullptr; - } - - TVariable** clipCullVar = nullptr; - - // Figure out if we are assigning to, or from, clip or cull distance. - const bool isOutput = isClipOrCullDistance(left->getType()); - - // This is the rvalue or lvalue holding the clip or cull distance. - TIntermTyped* clipCullNode = isOutput ? left : right; - // This is the value going into or out of the clip or cull distance. - TIntermTyped* internalNode = isOutput ? right : left; - - const TBuiltInVariable builtInType = clipCullNode->getQualifier().builtIn; - - decltype(clipSemanticNSize)* semanticNSize = nullptr; - - // Refer to either the clip or the cull distance, depending on semantic. - switch (builtInType) { - case EbvClipDistance: - clipCullVar = &clipDistanceVariable; - semanticNSize = &clipSemanticNSize; - break; - case EbvCullDistance: - clipCullVar = &cullDistanceVariable; - semanticNSize = &cullSemanticNSize; - break; - - // called invalidly: we expected a clip or a cull distance. - // static compile time problem: should not happen. - default: assert(0); return nullptr; - } - - // This is the offset in the destination array of a given semantic's data - std::array semanticOffset; - - // Calculate offset of variable of semantic N in destination array - int arrayLoc = 0; - int vecItems = 0; - - for (int x = 0; x < maxClipCullRegs; ++x) { - // See if we overflowed the vec4 packing - if ((vecItems + (*semanticNSize)[x]) > 4) { - arrayLoc = (arrayLoc + 3) & (~0x3); // round up to next multiple of 4 - vecItems = 0; - } - - semanticOffset[x] = arrayLoc; - vecItems += (*semanticNSize)[x]; - arrayLoc += (*semanticNSize)[x]; - } - - // array sizes, or 1 if it's not an array: - const int internalNodeArraySize = (internalNode->getType().isArray() ? internalNode->getType().getOuterArraySize() : 1); - // vector sizes: - const int internalNodeVectorSize = internalNode->getType().getVectorSize(); - - // If we haven't created the output already, create it now. - if (*clipCullVar == nullptr) { - // ClipDistance and CullDistance are handled specially in the entry point input/output copy - // algorithm, because they may need to be unpacked from components of vectors (or a scalar) - // into a float array, or vice versa. Here, we make the array the right size and type, - // which depends on the incoming data, which has several potential dimensions: Semantic ID - // vector size array size Of those, semantic ID and array size cannot appear - // simultaneously. - const int requiredArraySize = arrayLoc * internalNodeArraySize; - - TType clipCullType(EbtFloat, clipCullNode->getType().getQualifier().storage, 1); - clipCullType.getQualifier() = clipCullNode->getType().getQualifier(); - - // Create required array dimension - TArraySizes arraySizes; - arraySizes.addInnerSize(requiredArraySize); - clipCullType.newArraySizes(arraySizes); - - // Obtain symbol name: we'll use that for the symbol we introduce. - TIntermSymbol* sym = clipCullNode->getAsSymbolNode(); - assert(sym != nullptr); - - // We are moving the semantic ID from the layout location, so it is no longer needed or - // desired there. - clipCullType.getQualifier().layoutLocation = TQualifier::layoutLocationEnd; - - // Create variable and track its linkage - *clipCullVar = makeInternalVariable(sym->getName().c_str(), clipCullType); - - trackLinkage(**clipCullVar); - } - - // Create symbol for the clip or cull variable. - TIntermSymbol* clipCullSym = intermediate.addSymbol(**clipCullVar); - - // array sizes, or 1 if it's not an array: - const int clipCullSymArraySize = (clipCullSym->getType().isArray() ? clipCullSym->getType().getOuterArraySize() : 1); - // vector sizes: - const int clipCullSymVectorSize = clipCullSym->getType().getVectorSize(); - - // clipCullSym has got to be an array of scalar floats, per SPIR-V semantics. - // fixBuiltInIoType() should have handled that upstream. - assert(clipCullSym->getType().isArray()); - assert(clipCullSym->getType().getVectorSize() == 1); - assert(clipCullSym->getType().getBasicType() == EbtFloat); - - // We may be creating multiple sub-assignments. This is an aggregate to hold them. - // TODO: it would be possible to be clever sometimes and avoid the sequence node if not needed. - TIntermAggregate* assignList = nullptr; - - // Holds individual component assignments as we make them. - TIntermTyped* clipCullAssign = nullptr; - - // If the types are homomorphic, use a simple assign. No need to mess about with - // individual components. - if (clipCullSym->getType().isArray() == internalNode->getType().isArray() && - clipCullSymArraySize == internalNodeArraySize && - clipCullSymVectorSize == internalNodeVectorSize) { - - if (isOutput) - clipCullAssign = intermediate.addAssign(op, clipCullSym, internalNode, loc); - else - clipCullAssign = intermediate.addAssign(op, internalNode, clipCullSym, loc); - - assignList = intermediate.growAggregate(assignList, clipCullAssign); - assignList->setOperator(EOpSequence); - - return assignList; - } - - // We are going to copy each component of the internal (per array element if indicated) to sequential - // array elements of the clipCullSym. This tracks the lhs element we're writing to as we go along. - // We may be starting in the middle - e.g, for a non-zero semantic ID calculated above. - int clipCullArrayPos = semanticOffset[semanticId]; - - // Loop through every component of every element of the internal, and copy to or from the matching external. - for (int internalArrayPos = 0; internalArrayPos < internalNodeArraySize; ++internalArrayPos) { - for (int internalComponent = 0; internalComponent < internalNodeVectorSize; ++internalComponent) { - TIntermTyped* clipCullMember = clipCullSym; - - // array member to read from / write to: - { - const TType derefType(clipCullMember->getType(), 0); - clipCullMember = intermediate.addIndex(EOpIndexDirect, clipCullMember, - intermediate.addConstantUnion(clipCullArrayPos++, loc), loc); - clipCullMember->setType(derefType); - } - - TIntermTyped* internalMember = internalNode; - - // If internal node is an array, extract the element of interest - if (internalNode->getType().isArray()) { - const TType derefType(internalMember->getType(), 0); - internalMember = intermediate.addIndex(EOpIndexDirect, internalMember, - intermediate.addConstantUnion(internalArrayPos, loc), loc); - internalMember->setType(derefType); - } - - // If internal node is a vector, extract the component of interest. - if (internalNode->getType().isVector()) { - const TType derefType(internalMember->getType(), 0); - internalMember = intermediate.addIndex(EOpIndexDirect, internalMember, - intermediate.addConstantUnion(internalComponent, loc), loc); - internalMember->setType(derefType); - } - - // Create an assignment: output from internal to clip cull, or input from clip cull to internal. - if (isOutput) - clipCullAssign = intermediate.addAssign(op, clipCullMember, internalMember, loc); - else - clipCullAssign = intermediate.addAssign(op, internalMember, clipCullMember, loc); - - // Track assignment in the sequence. - assignList = intermediate.growAggregate(assignList, clipCullAssign); - } - } - - assert(assignList != nullptr); - assignList->setOperator(EOpSequence); - - return assignList; -} - -// For a declaration with an initializer, where the initialized symbol is flattened, -// and possibly contains opaque values, such that the initializer should never exist -// as emitted code, because even creating the initializer would write opaques. -// -// If possible, decompose this into individual member-wise assignments, which themselves -// are expected to then not exist for opaque types, because they will turn into aliases. -// -// Return a node that contains the non-aliased assignments that must continue to exist. -TIntermTyped* HlslParseContext::executeFlattenedInitializer(const TSourceLoc& loc, TIntermSymbol* symbol, - TIntermAggregate& initializer) -{ - // We need individual RHS initializers per member to do this - const TTypeList* typeList = symbol->getType().getStruct(); - if (typeList == nullptr || initializer.getSequence().size() != typeList->size()) { - warn(loc, "cannot do member-wise aliasing for opaque members with this initializer", "=", ""); - return handleAssign(loc, EOpAssign, symbol, &initializer); - } - - TIntermAggregate* initList = nullptr; - // synthesize an access to each member, and then an assignment to it - for (int member = 0; member < (int)typeList->size(); ++member) { - TIntermTyped* memberInitializer = initializer.getSequence()[member]->getAsTyped(); - TIntermTyped* flattenedMember = flattenAccess(symbol, member); - if (flattenedMember->getType().containsOpaque()) - setOpaqueLvalue(flattenedMember, memberInitializer); - else - initList = intermediate.growAggregate(initList, handleAssign(loc, EOpAssign, flattenedMember, - memberInitializer)); - } - - if (initList) - initList->setOperator(EOpSequence); - return initList; -} - -// Some simple source assignments need to be flattened to a sequence -// of AST assignments. Catch these and flatten, otherwise, pass through -// to intermediate.addAssign(). -// -// Also, assignment to matrix swizzles requires multiple component assignments, -// intercept those as well. -TIntermTyped* HlslParseContext::handleAssign(const TSourceLoc& loc, TOperator op, TIntermTyped* left, - TIntermTyped* right) -{ - if (left == nullptr || right == nullptr) - return nullptr; - - if (left->getAsOperator() && left->getAsOperator()->getOp() == EOpMatrixSwizzle) - return handleAssignToMatrixSwizzle(loc, op, left, right); - - const bool isSplitLeft = wasSplit(left); - const bool isSplitRight = wasSplit(right); - - const bool isFlattenLeft = wasFlattened(left); - const bool isFlattenRight = wasFlattened(right); - - // OK to do a single assign if both are split, or both are unsplit. But if one is and the other - // isn't, we fall back to a member-wise copy. - if (!isFlattenLeft && !isFlattenRight && !isSplitLeft && !isSplitRight) { - // Clip and cull distance requires more processing. See comment above assignClipCullDistance. - if (isClipOrCullDistance(left->getType()) || isClipOrCullDistance(right->getType())) { - const bool isOutput = isClipOrCullDistance(left->getType()); - - const int semanticId = (isOutput ? left : right)->getType().getQualifier().layoutLocation; - return assignClipCullDistance(loc, op, semanticId, left, right); - } - - return intermediate.addAssign(op, left, right, loc); - } - - TIntermAggregate* assignList = nullptr; - const TVector* leftVariables = nullptr; - const TVector* rightVariables = nullptr; - - // A temporary to store the right node's value, so we don't keep indirecting into it - // if it's not a simple symbol. - TVariable* rhsTempVar = nullptr; - - // If the RHS is a simple symbol node, we'll copy it for each member. - TIntermSymbol* cloneSymNode = nullptr; - - int memberCount = 0; - - // Track how many items there are to copy. - if (left->getType().isStruct()) - memberCount = (int)left->getType().getStruct()->size(); - if (left->getType().isArray()) - memberCount = left->getType().getCumulativeArraySize(); - - if (isFlattenLeft) - leftVariables = &flattenMap.find(left->getAsSymbolNode()->getId())->second.members; - - if (isFlattenRight) { - rightVariables = &flattenMap.find(right->getAsSymbolNode()->getId())->second.members; - } else { - // The RHS is not flattened. There are several cases: - // 1. 1 item to copy: Use the RHS directly. - // 2. >1 item, simple symbol RHS: we'll create a new TIntermSymbol node for each, but no assign to temp. - // 3. >1 item, complex RHS: assign it to a new temp variable, and create a TIntermSymbol for each member. - - if (memberCount <= 1) { - // case 1: we'll use the symbol directly below. Nothing to do. - } else { - if (right->getAsSymbolNode() != nullptr) { - // case 2: we'll copy the symbol per iteration below. - cloneSymNode = right->getAsSymbolNode(); - } else { - // case 3: assign to a temp, and indirect into that. - rhsTempVar = makeInternalVariable("flattenTemp", right->getType()); - rhsTempVar->getWritableType().getQualifier().makeTemporary(); - TIntermTyped* noFlattenRHS = intermediate.addSymbol(*rhsTempVar, loc); - - // Add this to the aggregate being built. - assignList = intermediate.growAggregate(assignList, - intermediate.addAssign(op, noFlattenRHS, right, loc), loc); - } - } - } - - int memberIdx = 0; - - // When dealing with split arrayed structures of built-ins, the arrayness is moved to the extracted built-in - // variables, which is awkward when copying between split and unsplit structures. This variable tracks - // array indirections so they can be percolated from outer structs to inner variables. - std::vector arrayElement; - - // We track the outer-most aggregate, so that we can use its storage class later. - const TIntermTyped* outerLeft = left; - const TIntermTyped* outerRight = right; - - const auto getMember = [&](bool isLeft, TIntermTyped* node, int member, TIntermTyped* splitNode, int splitMember) - -> TIntermTyped * { - const bool flattened = isLeft ? isFlattenLeft : isFlattenRight; - const bool split = isLeft ? isSplitLeft : isSplitRight; - - TIntermTyped* subTree; - const TType derefType(node->getType(), member); - const TVariable* builtInVar = nullptr; - if ((flattened || split) && derefType.isBuiltIn()) { - const TIntermTyped* outer = isLeft ? outerLeft : outerRight; - auto splitPair = splitBuiltIns.find(HlslParseContext::tInterstageIoData( - derefType.getQualifier().builtIn, - outer->getType().getQualifier().storage)); - if (splitPair != splitBuiltIns.end()) - builtInVar = splitPair->second; - } - if (builtInVar != nullptr) { - // copy from interstage IO built-in if needed - subTree = intermediate.addSymbol(*builtInVar); - - // Arrayness of builtIn symbols isn't handled by the normal recursion: - // it's been extracted and moved to the built-in. - if (subTree->getType().isArray() && !arrayElement.empty()) { - const TType splitDerefType(subTree->getType(), arrayElement.back()); - subTree = intermediate.addIndex(EOpIndexDirect, subTree, - intermediate.addConstantUnion(arrayElement.back(), loc), loc); - subTree->setType(splitDerefType); - } - } else if (flattened && isFinalFlattening(derefType)) { - const TVector& flatVariables = isLeft ? *leftVariables : *rightVariables; - subTree = intermediate.addSymbol(*flatVariables[memberIdx++]); - } else { - // Index operator if it's an aggregate, else EOpNull - const TOperator accessOp = node->getType().isArray() ? EOpIndexDirect - : node->getType().isStruct() ? EOpIndexDirectStruct - : EOpNull; - if (accessOp == EOpNull) { - subTree = splitNode; - } else { - subTree = intermediate.addIndex(accessOp, splitNode, intermediate.addConstantUnion(splitMember, loc), - loc); - const TType splitDerefType(splitNode->getType(), splitMember); - subTree->setType(splitDerefType); - } - } - - return subTree; - }; - - // Use the proper RHS node: a new symbol from a TVariable, copy - // of an TIntermSymbol node, or sometimes the right node directly. - right = rhsTempVar != nullptr ? intermediate.addSymbol(*rhsTempVar, loc) : - cloneSymNode != nullptr ? intermediate.addSymbol(*cloneSymNode) : - right; - - // Cannot use auto here, because this is recursive, and auto can't work out the type without seeing the - // whole thing. So, we'll resort to an explicit type via std::function. - const std::function - traverse = [&](TIntermTyped* left, TIntermTyped* right, TIntermTyped* splitLeft, TIntermTyped* splitRight) -> void { - // If we get here, we are assigning to or from a whole array or struct that must be - // flattened, so have to do member-by-member assignment: - - if (left->getType().isArray() || right->getType().isArray()) { - const int elementsL = left->getType().isArray() ? left->getType().getOuterArraySize() : 1; - const int elementsR = right->getType().isArray() ? right->getType().getOuterArraySize() : 1; - - // The arrays may not be the same size, e.g, if the size has been forced for EbvTessLevelInner or Outer. - const int elementsToCopy = std::min(elementsL, elementsR); - - // array case - for (int element = 0; element < elementsToCopy; ++element) { - arrayElement.push_back(element); - - // Add a new AST symbol node if we have a temp variable holding a complex RHS. - TIntermTyped* subLeft = getMember(true, left, element, left, element); - TIntermTyped* subRight = getMember(false, right, element, right, element); - - TIntermTyped* subSplitLeft = isSplitLeft ? getMember(true, left, element, splitLeft, element) - : subLeft; - TIntermTyped* subSplitRight = isSplitRight ? getMember(false, right, element, splitRight, element) - : subRight; - - traverse(subLeft, subRight, subSplitLeft, subSplitRight); - - arrayElement.pop_back(); - } - } else if (left->getType().isStruct()) { - // struct case - const auto& membersL = *left->getType().getStruct(); - const auto& membersR = *right->getType().getStruct(); - - // These track the members in the split structures corresponding to the same in the unsplit structures, - // which we traverse in parallel. - int memberL = 0; - int memberR = 0; - - // Handle empty structure assignment - if (int(membersL.size()) == 0 && int(membersR.size()) == 0) - assignList = intermediate.growAggregate(assignList, intermediate.addAssign(op, left, right, loc), loc); - - for (int member = 0; member < int(membersL.size()); ++member) { - const TType& typeL = *membersL[member].type; - const TType& typeR = *membersR[member].type; - - TIntermTyped* subLeft = getMember(true, left, member, left, member); - TIntermTyped* subRight = getMember(false, right, member, right, member); - - // If there is no splitting, use the same values to avoid inefficiency. - TIntermTyped* subSplitLeft = isSplitLeft ? getMember(true, left, member, splitLeft, memberL) - : subLeft; - TIntermTyped* subSplitRight = isSplitRight ? getMember(false, right, member, splitRight, memberR) - : subRight; - - if (isClipOrCullDistance(subSplitLeft->getType()) || isClipOrCullDistance(subSplitRight->getType())) { - // Clip and cull distance built-in assignment is complex in its own right, and is handled in - // a separate function dedicated to that task. See comment above assignClipCullDistance; - - const bool isOutput = isClipOrCullDistance(subSplitLeft->getType()); - - // Since all clip/cull semantics boil down to the same built-in type, we need to get the - // semantic ID from the dereferenced type's layout location, to avoid an N-1 mapping. - const TType derefType((isOutput ? left : right)->getType(), member); - const int semanticId = derefType.getQualifier().layoutLocation; - - TIntermAggregate* clipCullAssign = assignClipCullDistance(loc, op, semanticId, - subSplitLeft, subSplitRight); - - assignList = intermediate.growAggregate(assignList, clipCullAssign, loc); - - } else if (!isFlattenLeft && !isFlattenRight && - !typeL.containsBuiltIn() && - !typeR.containsBuiltIn()) { - // If this is the final flattening (no nested types below to flatten) - // we'll copy the member, else recurse into the type hierarchy. - // However, if splitting the struct, that means we can copy a whole - // subtree here IFF it does not itself contain any interstage built-in - // IO variables, so we only have to recurse into it if there's something - // for splitting to do. That can save a lot of AST verbosity for - // a bunch of memberwise copies. - - assignList = intermediate.growAggregate(assignList, - intermediate.addAssign(op, subSplitLeft, subSplitRight, loc), - loc); - } else { - traverse(subLeft, subRight, subSplitLeft, subSplitRight); - } - - memberL += (typeL.isBuiltIn() ? 0 : 1); - memberR += (typeR.isBuiltIn() ? 0 : 1); - } - } else { - // Member copy - assignList = intermediate.growAggregate(assignList, intermediate.addAssign(op, left, right, loc), loc); - } - - }; - - TIntermTyped* splitLeft = left; - TIntermTyped* splitRight = right; - - // If either left or right was a split structure, we must read or write it, but still have to - // parallel-recurse through the unsplit structure to identify the built-in IO vars. - if (isSplitLeft) - splitLeft = intermediate.addSymbol(*getSplitNonIoVar(left->getAsSymbolNode()->getId()), loc); - - if (isSplitRight) - splitRight = intermediate.addSymbol(*getSplitNonIoVar(right->getAsSymbolNode()->getId()), loc); - - // This makes the whole assignment, recursing through subtypes as needed. - traverse(left, right, splitLeft, splitRight); - - assert(assignList != nullptr); - assignList->setOperator(EOpSequence); - - return assignList; -} - -// An assignment to matrix swizzle must be decomposed into individual assignments. -// These must be selected component-wise from the RHS and stored component-wise -// into the LHS. -TIntermTyped* HlslParseContext::handleAssignToMatrixSwizzle(const TSourceLoc& loc, TOperator op, TIntermTyped* left, - TIntermTyped* right) -{ - assert(left->getAsOperator() && left->getAsOperator()->getOp() == EOpMatrixSwizzle); - - if (op != EOpAssign) - error(loc, "only simple assignment to non-simple matrix swizzle is supported", "assign", ""); - - // isolate the matrix and swizzle nodes - TIntermTyped* matrix = left->getAsBinaryNode()->getLeft()->getAsTyped(); - const TIntermSequence& swizzle = left->getAsBinaryNode()->getRight()->getAsAggregate()->getSequence(); - - // if the RHS isn't already a simple vector, let's store into one - TIntermSymbol* vector = right->getAsSymbolNode(); - TIntermTyped* vectorAssign = nullptr; - if (vector == nullptr) { - // create a new intermediate vector variable to assign to - TType vectorType(matrix->getBasicType(), EvqTemporary, matrix->getQualifier().precision, (int)swizzle.size()/2); - vector = intermediate.addSymbol(*makeInternalVariable("intermVec", vectorType), loc); - - // assign the right to the new vector - vectorAssign = handleAssign(loc, op, vector, right); - } - - // Assign the vector components to the matrix components. - // Store this as a sequence, so a single aggregate node represents this - // entire operation. - TIntermAggregate* result = intermediate.makeAggregate(vectorAssign); - TType columnType(matrix->getType(), 0); - TType componentType(columnType, 0); - TType indexType(EbtInt); - for (int i = 0; i < (int)swizzle.size(); i += 2) { - // the right component, single index into the RHS vector - TIntermTyped* rightComp = intermediate.addIndex(EOpIndexDirect, vector, - intermediate.addConstantUnion(i/2, loc), loc); - - // the left component, double index into the LHS matrix - TIntermTyped* leftComp = intermediate.addIndex(EOpIndexDirect, matrix, - intermediate.addConstantUnion(swizzle[i]->getAsConstantUnion()->getConstArray(), - indexType, loc), - loc); - leftComp->setType(columnType); - leftComp = intermediate.addIndex(EOpIndexDirect, leftComp, - intermediate.addConstantUnion(swizzle[i+1]->getAsConstantUnion()->getConstArray(), - indexType, loc), - loc); - leftComp->setType(componentType); - - // Add the assignment to the aggregate - result = intermediate.growAggregate(result, intermediate.addAssign(op, leftComp, rightComp, loc)); - } - - result->setOp(EOpSequence); - - return result; -} - -// -// HLSL atomic operations have slightly different arguments than -// GLSL/AST/SPIRV. The semantics are converted below in decomposeIntrinsic. -// This provides the post-decomposition equivalent opcode. -// -TOperator HlslParseContext::mapAtomicOp(const TSourceLoc& loc, TOperator op, bool isImage) -{ - switch (op) { - case EOpInterlockedAdd: return isImage ? EOpImageAtomicAdd : EOpAtomicAdd; - case EOpInterlockedAnd: return isImage ? EOpImageAtomicAnd : EOpAtomicAnd; - case EOpInterlockedCompareExchange: return isImage ? EOpImageAtomicCompSwap : EOpAtomicCompSwap; - case EOpInterlockedMax: return isImage ? EOpImageAtomicMax : EOpAtomicMax; - case EOpInterlockedMin: return isImage ? EOpImageAtomicMin : EOpAtomicMin; - case EOpInterlockedOr: return isImage ? EOpImageAtomicOr : EOpAtomicOr; - case EOpInterlockedXor: return isImage ? EOpImageAtomicXor : EOpAtomicXor; - case EOpInterlockedExchange: return isImage ? EOpImageAtomicExchange : EOpAtomicExchange; - case EOpInterlockedCompareStore: // TODO: ... - default: - error(loc, "unknown atomic operation", "unknown op", ""); - return EOpNull; - } -} - -// -// Create a combined sampler/texture from separate sampler and texture. -// -TIntermAggregate* HlslParseContext::handleSamplerTextureCombine(const TSourceLoc& loc, TIntermTyped* argTex, - TIntermTyped* argSampler) -{ - TIntermAggregate* txcombine = new TIntermAggregate(EOpConstructTextureSampler); - - txcombine->getSequence().push_back(argTex); - txcombine->getSequence().push_back(argSampler); - - TSampler samplerType = argTex->getType().getSampler(); - samplerType.combined = true; - samplerType.shadow = argSampler->getType().getSampler().shadow; - - txcombine->setType(TType(samplerType, EvqTemporary)); - txcombine->setLoc(loc); - - return txcombine; -} - -// Return true if this a buffer type that has an associated counter buffer. -bool HlslParseContext::hasStructBuffCounter(const TType& type) const -{ - switch (type.getQualifier().declaredBuiltIn) { - case EbvAppendConsume: // fall through... - case EbvRWStructuredBuffer: // ... - return true; - default: - return false; // the other structuredbuffer types do not have a counter. - } -} - -void HlslParseContext::counterBufferType(const TSourceLoc& loc, TType& type) -{ - // Counter type - TType* counterType = new TType(EbtInt, EvqBuffer); - counterType->setFieldName("@count"); - - TTypeList* blockStruct = new TTypeList; - TTypeLoc member = { counterType, loc }; - blockStruct->push_back(member); - - TType blockType(blockStruct, "", counterType->getQualifier()); - blockType.getQualifier().storage = EvqBuffer; - - type.shallowCopy(blockType); - shareStructBufferType(type); -} - -// knowledge of how to construct block name, in one place instead of N places. -TString HlslParseContext::getStructBuffCounterName(const TString& blockName) const -{ - return blockName + "@count"; -} - -// declare counter for a structured buffer type -void HlslParseContext::declareStructBufferCounter(const TSourceLoc& loc, const TType& bufferType, const TString& name) -{ - // Bail out if not a struct buffer - if (! isStructBufferType(bufferType)) - return; - - if (! hasStructBuffCounter(bufferType)) - return; - - TType blockType; - counterBufferType(loc, blockType); - - TString* blockName = new TString(getStructBuffCounterName(name)); - - // Counter buffer does not have its own counter buffer. TODO: there should be a better way to track this. - structBufferCounter[*blockName] = false; - - shareStructBufferType(blockType); - declareBlock(loc, blockType, blockName); -} - -// return the counter that goes with a given structuredbuffer -TIntermTyped* HlslParseContext::getStructBufferCounter(const TSourceLoc& loc, TIntermTyped* buffer) -{ - // Bail out if not a struct buffer - if (buffer == nullptr || ! isStructBufferType(buffer->getType())) - return nullptr; - - const TString counterBlockName(getStructBuffCounterName(buffer->getAsSymbolNode()->getName())); - - // Mark the counter as being used - structBufferCounter[counterBlockName] = true; - - TIntermTyped* counterVar = handleVariable(loc, &counterBlockName); // find the block structure - TIntermTyped* index = intermediate.addConstantUnion(0, loc); // index to counter inside block struct - - TIntermTyped* counterMember = intermediate.addIndex(EOpIndexDirectStruct, counterVar, index, loc); - counterMember->setType(TType(EbtInt)); - return counterMember; -} - - -// -// Decompose structure buffer methods into AST -// -void HlslParseContext::decomposeStructBufferMethods(const TSourceLoc& loc, TIntermTyped*& node, TIntermNode* arguments) -{ - if (node == nullptr || node->getAsOperator() == nullptr || arguments == nullptr) - return; - - const TOperator op = node->getAsOperator()->getOp(); - TIntermAggregate* argAggregate = arguments->getAsAggregate(); - - // Buffer is the object upon which method is called, so always arg 0 - TIntermTyped* bufferObj = nullptr; - - // The parameters can be an aggregate, or just a the object as a symbol if there are no fn params. - if (argAggregate) { - if (argAggregate->getSequence().empty()) - return; - bufferObj = argAggregate->getSequence()[0]->getAsTyped(); - } else { - bufferObj = arguments->getAsSymbolNode(); - } - - if (bufferObj == nullptr || bufferObj->getAsSymbolNode() == nullptr) - return; - - // Some methods require a hidden internal counter, obtained via getStructBufferCounter(). - // This lambda adds something to it and returns the old value. - const auto incDecCounter = [&](int incval) -> TIntermTyped* { - TIntermTyped* incrementValue = intermediate.addConstantUnion(incval, loc, true); - TIntermTyped* counter = getStructBufferCounter(loc, bufferObj); // obtain the counter member - - if (counter == nullptr) - return nullptr; - - TIntermAggregate* counterIncrement = new TIntermAggregate(EOpAtomicAdd); - counterIncrement->setType(TType(EbtUint, EvqTemporary)); - counterIncrement->setLoc(loc); - counterIncrement->getSequence().push_back(counter); - counterIncrement->getSequence().push_back(incrementValue); - - return counterIncrement; - }; - - // Index to obtain the runtime sized array out of the buffer. - TIntermTyped* argArray = indexStructBufferContent(loc, bufferObj); - if (argArray == nullptr) - return; // It might not be a struct buffer method. - - switch (op) { - case EOpMethodLoad: - { - TIntermTyped* argIndex = makeIntegerIndex(argAggregate->getSequence()[1]->getAsTyped()); // index - - const TType& bufferType = bufferObj->getType(); - - const TBuiltInVariable builtInType = bufferType.getQualifier().declaredBuiltIn; - - // Byte address buffers index in bytes (only multiples of 4 permitted... not so much a byte address - // buffer then, but that's what it calls itself. - const bool isByteAddressBuffer = (builtInType == EbvByteAddressBuffer || - builtInType == EbvRWByteAddressBuffer); - - - if (isByteAddressBuffer) - argIndex = intermediate.addBinaryNode(EOpRightShift, argIndex, - intermediate.addConstantUnion(2, loc, true), - loc, TType(EbtInt)); - - // Index into the array to find the item being loaded. - const TOperator idxOp = (argIndex->getQualifier().storage == EvqConst) ? EOpIndexDirect : EOpIndexIndirect; - - node = intermediate.addIndex(idxOp, argArray, argIndex, loc); - - const TType derefType(argArray->getType(), 0); - node->setType(derefType); - } - - break; - - case EOpMethodLoad2: - case EOpMethodLoad3: - case EOpMethodLoad4: - { - TIntermTyped* argIndex = makeIntegerIndex(argAggregate->getSequence()[1]->getAsTyped()); // index - - TOperator constructOp = EOpNull; - int size = 0; - - switch (op) { - case EOpMethodLoad2: size = 2; constructOp = EOpConstructVec2; break; - case EOpMethodLoad3: size = 3; constructOp = EOpConstructVec3; break; - case EOpMethodLoad4: size = 4; constructOp = EOpConstructVec4; break; - default: assert(0); - } - - TIntermTyped* body = nullptr; - - // First, we'll store the address in a variable to avoid multiple shifts - // (we must convert the byte address to an item address) - TIntermTyped* byteAddrIdx = intermediate.addBinaryNode(EOpRightShift, argIndex, - intermediate.addConstantUnion(2, loc, true), - loc, TType(EbtInt)); - - TVariable* byteAddrSym = makeInternalVariable("byteAddrTemp", TType(EbtInt, EvqTemporary)); - TIntermTyped* byteAddrIdxVar = intermediate.addSymbol(*byteAddrSym, loc); - - body = intermediate.growAggregate(body, intermediate.addAssign(EOpAssign, byteAddrIdxVar, byteAddrIdx, loc)); - - TIntermTyped* vec = nullptr; - - // These are only valid on (rw)byteaddressbuffers, so we can always perform the >>2 - // address conversion. - for (int idx=0; idxgetQualifier().storage == EvqConst) ? EOpIndexDirect - : EOpIndexIndirect; - - vec = intermediate.growAggregate(vec, intermediate.addIndex(idxOp, argArray, offsetIdx, loc)); - } - - vec->setType(TType(argArray->getBasicType(), EvqTemporary, size)); - vec->getAsAggregate()->setOperator(constructOp); - - body = intermediate.growAggregate(body, vec); - body->setType(vec->getType()); - body->getAsAggregate()->setOperator(EOpSequence); - - node = body; - } - - break; - - case EOpMethodStore: - case EOpMethodStore2: - case EOpMethodStore3: - case EOpMethodStore4: - { - TIntermTyped* argIndex = makeIntegerIndex(argAggregate->getSequence()[1]->getAsTyped()); // index - TIntermTyped* argValue = argAggregate->getSequence()[2]->getAsTyped(); // value - - // Index into the array to find the item being loaded. - // Byte address buffers index in bytes (only multiples of 4 permitted... not so much a byte address - // buffer then, but that's what it calls itself. - - int size = 0; - - switch (op) { - case EOpMethodStore: size = 1; break; - case EOpMethodStore2: size = 2; break; - case EOpMethodStore3: size = 3; break; - case EOpMethodStore4: size = 4; break; - default: assert(0); - } - - TIntermAggregate* body = nullptr; - - // First, we'll store the address in a variable to avoid multiple shifts - // (we must convert the byte address to an item address) - TIntermTyped* byteAddrIdx = intermediate.addBinaryNode(EOpRightShift, argIndex, - intermediate.addConstantUnion(2, loc, true), loc, TType(EbtInt)); - - TVariable* byteAddrSym = makeInternalVariable("byteAddrTemp", TType(EbtInt, EvqTemporary)); - TIntermTyped* byteAddrIdxVar = intermediate.addSymbol(*byteAddrSym, loc); - - body = intermediate.growAggregate(body, intermediate.addAssign(EOpAssign, byteAddrIdxVar, byteAddrIdx, loc)); - - for (int idx=0; idxgetQualifier().storage == EvqConst) ? EOpIndexDirect - : EOpIndexIndirect; - - TIntermTyped* lValue = intermediate.addIndex(idxOp, argArray, offsetIdx, loc); - TIntermTyped* rValue = (size == 1) ? argValue : - intermediate.addIndex(EOpIndexDirect, argValue, idxConst, loc); - - TIntermTyped* assign = intermediate.addAssign(EOpAssign, lValue, rValue, loc); - - body = intermediate.growAggregate(body, assign); - } - - body->setOperator(EOpSequence); - node = body; - } - - break; - - case EOpMethodGetDimensions: - { - const int numArgs = (int)argAggregate->getSequence().size(); - TIntermTyped* argNumItems = argAggregate->getSequence()[1]->getAsTyped(); // out num items - TIntermTyped* argStride = numArgs > 2 ? argAggregate->getSequence()[2]->getAsTyped() : nullptr; // out stride - - TIntermAggregate* body = nullptr; - - // Length output: - if (argArray->getType().isRuntimeSizedArray()) { - TIntermTyped* lengthCall = intermediate.addBuiltInFunctionCall(loc, EOpArrayLength, true, argArray, - argNumItems->getType()); - TIntermTyped* assign = intermediate.addAssign(EOpAssign, argNumItems, lengthCall, loc); - body = intermediate.growAggregate(body, assign, loc); - } else { - const int length = argArray->getType().getOuterArraySize(); - TIntermTyped* assign = intermediate.addAssign(EOpAssign, argNumItems, - intermediate.addConstantUnion(length, loc, true), loc); - body = intermediate.growAggregate(body, assign, loc); - } - - // Stride output: - if (argStride != nullptr) { - int size; - int stride; - intermediate.getBaseAlignment(argArray->getType(), size, stride, false, - argArray->getType().getQualifier().layoutMatrix == ElmRowMajor); - - TIntermTyped* assign = intermediate.addAssign(EOpAssign, argStride, - intermediate.addConstantUnion(stride, loc, true), loc); - - body = intermediate.growAggregate(body, assign); - } - - body->setOperator(EOpSequence); - node = body; - } - - break; - - case EOpInterlockedAdd: - case EOpInterlockedAnd: - case EOpInterlockedExchange: - case EOpInterlockedMax: - case EOpInterlockedMin: - case EOpInterlockedOr: - case EOpInterlockedXor: - case EOpInterlockedCompareExchange: - case EOpInterlockedCompareStore: - { - // We'll replace the first argument with the block dereference, and let - // downstream decomposition handle the rest. - - TIntermSequence& sequence = argAggregate->getSequence(); - - TIntermTyped* argIndex = makeIntegerIndex(sequence[1]->getAsTyped()); // index - argIndex = intermediate.addBinaryNode(EOpRightShift, argIndex, intermediate.addConstantUnion(2, loc, true), - loc, TType(EbtInt)); - - const TOperator idxOp = (argIndex->getQualifier().storage == EvqConst) ? EOpIndexDirect : EOpIndexIndirect; - TIntermTyped* element = intermediate.addIndex(idxOp, argArray, argIndex, loc); - - const TType derefType(argArray->getType(), 0); - element->setType(derefType); - - // Replace the numeric byte offset parameter with array reference. - sequence[1] = element; - sequence.erase(sequence.begin(), sequence.begin()+1); - } - break; - - case EOpMethodIncrementCounter: - { - node = incDecCounter(1); - break; - } - - case EOpMethodDecrementCounter: - { - TIntermTyped* preIncValue = incDecCounter(-1); // result is original value - node = intermediate.addBinaryNode(EOpAdd, preIncValue, intermediate.addConstantUnion(-1, loc, true), loc, - preIncValue->getType()); - break; - } - - case EOpMethodAppend: - { - TIntermTyped* oldCounter = incDecCounter(1); - - TIntermTyped* lValue = intermediate.addIndex(EOpIndexIndirect, argArray, oldCounter, loc); - TIntermTyped* rValue = argAggregate->getSequence()[1]->getAsTyped(); - - const TType derefType(argArray->getType(), 0); - lValue->setType(derefType); - - node = intermediate.addAssign(EOpAssign, lValue, rValue, loc); - - break; - } - - case EOpMethodConsume: - { - TIntermTyped* oldCounter = incDecCounter(-1); - - TIntermTyped* newCounter = intermediate.addBinaryNode(EOpAdd, oldCounter, - intermediate.addConstantUnion(-1, loc, true), loc, - oldCounter->getType()); - - node = intermediate.addIndex(EOpIndexIndirect, argArray, newCounter, loc); - - const TType derefType(argArray->getType(), 0); - node->setType(derefType); - - break; - } - - default: - break; // most pass through unchanged - } -} - -// Create array of standard sample positions for given sample count. -// TODO: remove when a real method to query sample pos exists in SPIR-V. -TIntermConstantUnion* HlslParseContext::getSamplePosArray(int count) -{ - struct tSamplePos { float x, y; }; - - static const tSamplePos pos1[] = { - { 0.0/16.0, 0.0/16.0 }, - }; - - // standard sample positions for 2, 4, 8, and 16 samples. - static const tSamplePos pos2[] = { - { 4.0/16.0, 4.0/16.0 }, {-4.0/16.0, -4.0/16.0 }, - }; - - static const tSamplePos pos4[] = { - {-2.0/16.0, -6.0/16.0 }, { 6.0/16.0, -2.0/16.0 }, {-6.0/16.0, 2.0/16.0 }, { 2.0/16.0, 6.0/16.0 }, - }; - - static const tSamplePos pos8[] = { - { 1.0/16.0, -3.0/16.0 }, {-1.0/16.0, 3.0/16.0 }, { 5.0/16.0, 1.0/16.0 }, {-3.0/16.0, -5.0/16.0 }, - {-5.0/16.0, 5.0/16.0 }, {-7.0/16.0, -1.0/16.0 }, { 3.0/16.0, 7.0/16.0 }, { 7.0/16.0, -7.0/16.0 }, - }; - - static const tSamplePos pos16[] = { - { 1.0/16.0, 1.0/16.0 }, {-1.0/16.0, -3.0/16.0 }, {-3.0/16.0, 2.0/16.0 }, { 4.0/16.0, -1.0/16.0 }, - {-5.0/16.0, -2.0/16.0 }, { 2.0/16.0, 5.0/16.0 }, { 5.0/16.0, 3.0/16.0 }, { 3.0/16.0, -5.0/16.0 }, - {-2.0/16.0, 6.0/16.0 }, { 0.0/16.0, -7.0/16.0 }, {-4.0/16.0, -6.0/16.0 }, {-6.0/16.0, 4.0/16.0 }, - {-8.0/16.0, 0.0/16.0 }, { 7.0/16.0, -4.0/16.0 }, { 6.0/16.0, 7.0/16.0 }, {-7.0/16.0, -8.0/16.0 }, - }; - - const tSamplePos* sampleLoc = nullptr; - int numSamples = count; - - switch (count) { - case 2: sampleLoc = pos2; break; - case 4: sampleLoc = pos4; break; - case 8: sampleLoc = pos8; break; - case 16: sampleLoc = pos16; break; - default: - sampleLoc = pos1; - numSamples = 1; - } - - TConstUnionArray* values = new TConstUnionArray(numSamples*2); - - for (int pos=0; posgetAsOperator()) - return; - - // Sampler return must always be a vec4, but we can construct a shorter vector or a structure from it. - const auto convertReturn = [&loc, &node, this](TIntermTyped* result, const TSampler& sampler) -> TIntermTyped* { - result->setType(TType(node->getType().getBasicType(), EvqTemporary, node->getVectorSize())); - - TIntermTyped* convertedResult = nullptr; - - TType retType; - getTextureReturnType(sampler, retType); - - if (retType.isStruct()) { - // For type convenience, conversionAggregate points to the convertedResult (we know it's an aggregate here) - TIntermAggregate* conversionAggregate = new TIntermAggregate; - convertedResult = conversionAggregate; - - // Convert vector output to return structure. We will need a temp symbol to copy the results to. - TVariable* structVar = makeInternalVariable("@sampleStructTemp", retType); - - // We also need a temp symbol to hold the result of the texture. We don't want to re-fetch the - // sample each time we'll index into the result, so we'll copy to this, and index into the copy. - TVariable* sampleShadow = makeInternalVariable("@sampleResultShadow", result->getType()); - - // Initial copy from texture to our sample result shadow. - TIntermTyped* shadowCopy = intermediate.addAssign(EOpAssign, intermediate.addSymbol(*sampleShadow, loc), - result, loc); - - conversionAggregate->getSequence().push_back(shadowCopy); - - unsigned vec4Pos = 0; - - for (unsigned m = 0; m < unsigned(retType.getStruct()->size()); ++m) { - const TType memberType(retType, m); // dereferenced type of the member we're about to assign. - - // Check for bad struct members. This should have been caught upstream. Complain, because - // wwe don't know what to do with it. This algorithm could be generalized to handle - // other things, e.g, sub-structures, but HLSL doesn't allow them. - if (!memberType.isVector() && !memberType.isScalar()) { - error(loc, "expected: scalar or vector type in texture structure", "", ""); - return nullptr; - } - - // Index into the struct variable to find the member to assign. - TIntermTyped* structMember = intermediate.addIndex(EOpIndexDirectStruct, - intermediate.addSymbol(*structVar, loc), - intermediate.addConstantUnion(m, loc), loc); - - structMember->setType(memberType); - - // Assign each component of (possible) vector in struct member. - for (int component = 0; component < memberType.getVectorSize(); ++component) { - TIntermTyped* vec4Member = intermediate.addIndex(EOpIndexDirect, - intermediate.addSymbol(*sampleShadow, loc), - intermediate.addConstantUnion(vec4Pos++, loc), loc); - vec4Member->setType(TType(memberType.getBasicType(), EvqTemporary, 1)); - - TIntermTyped* memberAssign = nullptr; - - if (memberType.isVector()) { - // Vector member: we need to create an access chain to the vector component. - - TIntermTyped* structVecComponent = intermediate.addIndex(EOpIndexDirect, structMember, - intermediate.addConstantUnion(component, loc), loc); - - memberAssign = intermediate.addAssign(EOpAssign, structVecComponent, vec4Member, loc); - } else { - // Scalar member: we can assign to it directly. - memberAssign = intermediate.addAssign(EOpAssign, structMember, vec4Member, loc); - } - - - conversionAggregate->getSequence().push_back(memberAssign); - } - } - - // Add completed variable so the expression results in the whole struct value we just built. - conversionAggregate->getSequence().push_back(intermediate.addSymbol(*structVar, loc)); - - // Make it a sequence. - intermediate.setAggregateOperator(conversionAggregate, EOpSequence, retType, loc); - } else { - // vector clamp the output if template vector type is smaller than sample result. - if (retType.getVectorSize() < node->getVectorSize()) { - // Too many components. Construct shorter vector from it. - const TOperator op = intermediate.mapTypeToConstructorOp(retType); - - convertedResult = constructBuiltIn(retType, op, result, loc, false); - } else { - // Enough components. Use directly. - convertedResult = result; - } - } - - convertedResult->setLoc(loc); - return convertedResult; - }; - - const TOperator op = node->getAsOperator()->getOp(); - const TIntermAggregate* argAggregate = arguments ? arguments->getAsAggregate() : nullptr; - - // Bail out if not a sampler method. - // Note though this is odd to do before checking the op, because the op - // could be something that takes the arguments, and the function in question - // takes the result of the op. So, this is not the final word. - if (arguments != nullptr) { - if (argAggregate == nullptr) { - if (arguments->getAsTyped()->getBasicType() != EbtSampler) - return; - } else { - if (argAggregate->getSequence().size() == 0 || - argAggregate->getSequence()[0]->getAsTyped()->getBasicType() != EbtSampler) - return; - } - } - - switch (op) { - // **** DX9 intrinsics: **** - case EOpTexture: - { - // Texture with ddx & ddy is really gradient form in HLSL - if (argAggregate->getSequence().size() == 4) - node->getAsAggregate()->setOperator(EOpTextureGrad); - - break; - } - - case EOpTextureBias: - { - TIntermTyped* arg0 = argAggregate->getSequence()[0]->getAsTyped(); // sampler - TIntermTyped* arg1 = argAggregate->getSequence()[1]->getAsTyped(); // coord - - // HLSL puts bias in W component of coordinate. We extract it and add it to - // the argument list, instead - TIntermTyped* w = intermediate.addConstantUnion(3, loc, true); - TIntermTyped* bias = intermediate.addIndex(EOpIndexDirect, arg1, w, loc); - - TOperator constructOp = EOpNull; - const TSampler& sampler = arg0->getType().getSampler(); - - switch (sampler.dim) { - case Esd1D: constructOp = EOpConstructFloat; break; // 1D - case Esd2D: constructOp = EOpConstructVec2; break; // 2D - case Esd3D: constructOp = EOpConstructVec3; break; // 3D - case EsdCube: constructOp = EOpConstructVec3; break; // also 3D - default: break; - } - - TIntermAggregate* constructCoord = new TIntermAggregate(constructOp); - constructCoord->getSequence().push_back(arg1); - constructCoord->setLoc(loc); - - // The input vector should never be less than 2, since there's always a bias. - // The max is for safety, and should be a no-op. - constructCoord->setType(TType(arg1->getBasicType(), EvqTemporary, std::max(arg1->getVectorSize() - 1, 0))); - - TIntermAggregate* tex = new TIntermAggregate(EOpTexture); - tex->getSequence().push_back(arg0); // sampler - tex->getSequence().push_back(constructCoord); // coordinate - tex->getSequence().push_back(bias); // bias - - node = convertReturn(tex, sampler); - - break; - } - - // **** DX10 methods: **** - case EOpMethodSample: // fall through - case EOpMethodSampleBias: // ... - { - TIntermTyped* argTex = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* argSamp = argAggregate->getSequence()[1]->getAsTyped(); - TIntermTyped* argCoord = argAggregate->getSequence()[2]->getAsTyped(); - TIntermTyped* argBias = nullptr; - TIntermTyped* argOffset = nullptr; - const TSampler& sampler = argTex->getType().getSampler(); - - int nextArg = 3; - - if (op == EOpMethodSampleBias) // SampleBias has a bias arg - argBias = argAggregate->getSequence()[nextArg++]->getAsTyped(); - - TOperator textureOp = EOpTexture; - - if ((int)argAggregate->getSequence().size() == (nextArg+1)) { // last parameter is offset form - textureOp = EOpTextureOffset; - argOffset = argAggregate->getSequence()[nextArg++]->getAsTyped(); - } - - TIntermAggregate* txcombine = handleSamplerTextureCombine(loc, argTex, argSamp); - - TIntermAggregate* txsample = new TIntermAggregate(textureOp); - txsample->getSequence().push_back(txcombine); - txsample->getSequence().push_back(argCoord); - - if (argBias != nullptr) - txsample->getSequence().push_back(argBias); - - if (argOffset != nullptr) - txsample->getSequence().push_back(argOffset); - - node = convertReturn(txsample, sampler); - - break; - } - - case EOpMethodSampleGrad: // ... - { - TIntermTyped* argTex = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* argSamp = argAggregate->getSequence()[1]->getAsTyped(); - TIntermTyped* argCoord = argAggregate->getSequence()[2]->getAsTyped(); - TIntermTyped* argDDX = argAggregate->getSequence()[3]->getAsTyped(); - TIntermTyped* argDDY = argAggregate->getSequence()[4]->getAsTyped(); - TIntermTyped* argOffset = nullptr; - const TSampler& sampler = argTex->getType().getSampler(); - - TOperator textureOp = EOpTextureGrad; - - if (argAggregate->getSequence().size() == 6) { // last parameter is offset form - textureOp = EOpTextureGradOffset; - argOffset = argAggregate->getSequence()[5]->getAsTyped(); - } - - TIntermAggregate* txcombine = handleSamplerTextureCombine(loc, argTex, argSamp); - - TIntermAggregate* txsample = new TIntermAggregate(textureOp); - txsample->getSequence().push_back(txcombine); - txsample->getSequence().push_back(argCoord); - txsample->getSequence().push_back(argDDX); - txsample->getSequence().push_back(argDDY); - - if (argOffset != nullptr) - txsample->getSequence().push_back(argOffset); - - node = convertReturn(txsample, sampler); - - break; - } - - case EOpMethodGetDimensions: - { - // AST returns a vector of results, which we break apart component-wise into - // separate values to assign to the HLSL method's outputs, ala: - // tx . GetDimensions(width, height); - // float2 sizeQueryTemp = EOpTextureQuerySize - // width = sizeQueryTemp.X; - // height = sizeQueryTemp.Y; - - TIntermTyped* argTex = argAggregate->getSequence()[0]->getAsTyped(); - const TType& texType = argTex->getType(); - - assert(texType.getBasicType() == EbtSampler); - - const TSampler& sampler = texType.getSampler(); - const TSamplerDim dim = sampler.dim; - const bool isImage = sampler.isImage(); - const bool isMs = sampler.isMultiSample(); - const int numArgs = (int)argAggregate->getSequence().size(); - - int numDims = 0; - - switch (dim) { - case Esd1D: numDims = 1; break; // W - case Esd2D: numDims = 2; break; // W, H - case Esd3D: numDims = 3; break; // W, H, D - case EsdCube: numDims = 2; break; // W, H (cube) - case EsdBuffer: numDims = 1; break; // W (buffers) - case EsdRect: numDims = 2; break; // W, H (rect) - default: - assert(0 && "unhandled texture dimension"); - } - - // Arrayed adds another dimension for the number of array elements - if (sampler.isArrayed()) - ++numDims; - - // Establish whether the method itself is querying mip levels. This can be false even - // if the underlying query requires a MIP level, due to the available HLSL method overloads. - const bool mipQuery = (numArgs > (numDims + 1 + (isMs ? 1 : 0))); - - // Establish whether we must use the LOD form of query (even if the method did not supply a mip level to query). - // True if: - // 1. 1D/2D/3D/Cube AND multisample==0 AND NOT image (those can be sent to the non-LOD query) - // or, - // 2. There is a LOD (because the non-LOD query cannot be used in that case, per spec) - const bool mipRequired = - ((dim == Esd1D || dim == Esd2D || dim == Esd3D || dim == EsdCube) && !isMs && !isImage) || // 1... - mipQuery; // 2... - - // AST assumes integer return. Will be converted to float if required. - TIntermAggregate* sizeQuery = new TIntermAggregate(isImage ? EOpImageQuerySize : EOpTextureQuerySize); - sizeQuery->getSequence().push_back(argTex); - - // If we're building an LOD query, add the LOD. - if (mipRequired) { - // If the base HLSL query had no MIP level given, use level 0. - TIntermTyped* queryLod = mipQuery ? argAggregate->getSequence()[1]->getAsTyped() : - intermediate.addConstantUnion(0, loc, true); - sizeQuery->getSequence().push_back(queryLod); - } - - sizeQuery->setType(TType(EbtUint, EvqTemporary, numDims)); - sizeQuery->setLoc(loc); - - // Return value from size query - TVariable* tempArg = makeInternalVariable("sizeQueryTemp", sizeQuery->getType()); - tempArg->getWritableType().getQualifier().makeTemporary(); - TIntermTyped* sizeQueryAssign = intermediate.addAssign(EOpAssign, - intermediate.addSymbol(*tempArg, loc), - sizeQuery, loc); - - // Compound statement for assigning outputs - TIntermAggregate* compoundStatement = intermediate.makeAggregate(sizeQueryAssign, loc); - // Index of first output parameter - const int outParamBase = mipQuery ? 2 : 1; - - for (int compNum = 0; compNum < numDims; ++compNum) { - TIntermTyped* indexedOut = nullptr; - TIntermSymbol* sizeQueryReturn = intermediate.addSymbol(*tempArg, loc); - - if (numDims > 1) { - TIntermTyped* component = intermediate.addConstantUnion(compNum, loc, true); - indexedOut = intermediate.addIndex(EOpIndexDirect, sizeQueryReturn, component, loc); - indexedOut->setType(TType(EbtUint, EvqTemporary, 1)); - indexedOut->setLoc(loc); - } else { - indexedOut = sizeQueryReturn; - } - - TIntermTyped* outParam = argAggregate->getSequence()[outParamBase + compNum]->getAsTyped(); - TIntermTyped* compAssign = intermediate.addAssign(EOpAssign, outParam, indexedOut, loc); - - compoundStatement = intermediate.growAggregate(compoundStatement, compAssign); - } - - // handle mip level parameter - if (mipQuery) { - TIntermTyped* outParam = argAggregate->getSequence()[outParamBase + numDims]->getAsTyped(); - - TIntermAggregate* levelsQuery = new TIntermAggregate(EOpTextureQueryLevels); - levelsQuery->getSequence().push_back(argTex); - levelsQuery->setType(TType(EbtUint, EvqTemporary, 1)); - levelsQuery->setLoc(loc); - - TIntermTyped* compAssign = intermediate.addAssign(EOpAssign, outParam, levelsQuery, loc); - compoundStatement = intermediate.growAggregate(compoundStatement, compAssign); - } - - // 2DMS formats query # samples, which needs a different query op - if (sampler.isMultiSample()) { - TIntermTyped* outParam = argAggregate->getSequence()[outParamBase + numDims]->getAsTyped(); - - TIntermAggregate* samplesQuery = new TIntermAggregate(EOpImageQuerySamples); - samplesQuery->getSequence().push_back(argTex); - samplesQuery->setType(TType(EbtUint, EvqTemporary, 1)); - samplesQuery->setLoc(loc); - - TIntermTyped* compAssign = intermediate.addAssign(EOpAssign, outParam, samplesQuery, loc); - compoundStatement = intermediate.growAggregate(compoundStatement, compAssign); - } - - compoundStatement->setOperator(EOpSequence); - compoundStatement->setLoc(loc); - compoundStatement->setType(TType(EbtVoid)); - - node = compoundStatement; - - break; - } - - case EOpMethodSampleCmp: // fall through... - case EOpMethodSampleCmpLevelZero: - { - TIntermTyped* argTex = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* argSamp = argAggregate->getSequence()[1]->getAsTyped(); - TIntermTyped* argCoord = argAggregate->getSequence()[2]->getAsTyped(); - TIntermTyped* argCmpVal = argAggregate->getSequence()[3]->getAsTyped(); - TIntermTyped* argOffset = nullptr; - - // Sampler argument should be a sampler. - if (argSamp->getType().getBasicType() != EbtSampler) { - error(loc, "expected: sampler type", "", ""); - return; - } - - // Sampler should be a SamplerComparisonState - if (! argSamp->getType().getSampler().isShadow()) { - error(loc, "expected: SamplerComparisonState", "", ""); - return; - } - - // optional offset value - if (argAggregate->getSequence().size() > 4) - argOffset = argAggregate->getSequence()[4]->getAsTyped(); - - const int coordDimWithCmpVal = argCoord->getType().getVectorSize() + 1; // +1 for cmp - - // AST wants comparison value as one of the texture coordinates - TOperator constructOp = EOpNull; - switch (coordDimWithCmpVal) { - // 1D can't happen: there's always at least 1 coordinate dimension + 1 cmp val - case 2: constructOp = EOpConstructVec2; break; - case 3: constructOp = EOpConstructVec3; break; - case 4: constructOp = EOpConstructVec4; break; - case 5: constructOp = EOpConstructVec4; break; // cubeArrayShadow, cmp value is separate arg. - default: assert(0); break; - } - - TIntermAggregate* coordWithCmp = new TIntermAggregate(constructOp); - coordWithCmp->getSequence().push_back(argCoord); - if (coordDimWithCmpVal != 5) // cube array shadow is special. - coordWithCmp->getSequence().push_back(argCmpVal); - coordWithCmp->setLoc(loc); - coordWithCmp->setType(TType(argCoord->getBasicType(), EvqTemporary, std::min(coordDimWithCmpVal, 4))); - - TOperator textureOp = (op == EOpMethodSampleCmpLevelZero ? EOpTextureLod : EOpTexture); - if (argOffset != nullptr) - textureOp = (op == EOpMethodSampleCmpLevelZero ? EOpTextureLodOffset : EOpTextureOffset); - - // Create combined sampler & texture op - TIntermAggregate* txcombine = handleSamplerTextureCombine(loc, argTex, argSamp); - TIntermAggregate* txsample = new TIntermAggregate(textureOp); - txsample->getSequence().push_back(txcombine); - txsample->getSequence().push_back(coordWithCmp); - - if (coordDimWithCmpVal == 5) // cube array shadow is special: cmp val follows coord. - txsample->getSequence().push_back(argCmpVal); - - // the LevelZero form uses 0 as an explicit LOD - if (op == EOpMethodSampleCmpLevelZero) - txsample->getSequence().push_back(intermediate.addConstantUnion(0.0, EbtFloat, loc, true)); - - // Add offset if present - if (argOffset != nullptr) - txsample->getSequence().push_back(argOffset); - - txsample->setType(node->getType()); - txsample->setLoc(loc); - node = txsample; - - break; - } - - case EOpMethodLoad: - { - TIntermTyped* argTex = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* argCoord = argAggregate->getSequence()[1]->getAsTyped(); - TIntermTyped* argOffset = nullptr; - TIntermTyped* lodComponent = nullptr; - TIntermTyped* coordSwizzle = nullptr; - - const TSampler& sampler = argTex->getType().getSampler(); - const bool isMS = sampler.isMultiSample(); - const bool isBuffer = sampler.dim == EsdBuffer; - const bool isImage = sampler.isImage(); - const TBasicType coordBaseType = argCoord->getType().getBasicType(); - - // Last component of coordinate is the mip level, for non-MS. we separate them here: - if (isMS || isBuffer || isImage) { - // MS, Buffer, and Image have no LOD - coordSwizzle = argCoord; - } else { - // Extract coordinate - int swizzleSize = argCoord->getType().getVectorSize() - (isMS ? 0 : 1); - TSwizzleSelectors coordFields; - for (int i = 0; i < swizzleSize; ++i) - coordFields.push_back(i); - TIntermTyped* coordIdx = intermediate.addSwizzle(coordFields, loc); - coordSwizzle = intermediate.addIndex(EOpVectorSwizzle, argCoord, coordIdx, loc); - coordSwizzle->setType(TType(coordBaseType, EvqTemporary, coordFields.size())); - - // Extract LOD - TIntermTyped* lodIdx = intermediate.addConstantUnion(coordFields.size(), loc, true); - lodComponent = intermediate.addIndex(EOpIndexDirect, argCoord, lodIdx, loc); - lodComponent->setType(TType(coordBaseType, EvqTemporary, 1)); - } - - const int numArgs = (int)argAggregate->getSequence().size(); - const bool hasOffset = ((!isMS && numArgs == 3) || (isMS && numArgs == 4)); - - // Create texel fetch - const TOperator fetchOp = (isImage ? EOpImageLoad : - hasOffset ? EOpTextureFetchOffset : - EOpTextureFetch); - TIntermAggregate* txfetch = new TIntermAggregate(fetchOp); - - // Build up the fetch - txfetch->getSequence().push_back(argTex); - txfetch->getSequence().push_back(coordSwizzle); - - if (isMS) { - // add 2DMS sample index - TIntermTyped* argSampleIdx = argAggregate->getSequence()[2]->getAsTyped(); - txfetch->getSequence().push_back(argSampleIdx); - } else if (isBuffer) { - // Nothing else to do for buffers. - } else if (isImage) { - // Nothing else to do for images. - } else { - // 2DMS and buffer have no LOD, but everything else does. - txfetch->getSequence().push_back(lodComponent); - } - - // Obtain offset arg, if there is one. - if (hasOffset) { - const int offsetPos = (isMS ? 3 : 2); - argOffset = argAggregate->getSequence()[offsetPos]->getAsTyped(); - txfetch->getSequence().push_back(argOffset); - } - - node = convertReturn(txfetch, sampler); - - break; - } - - case EOpMethodSampleLevel: - { - TIntermTyped* argTex = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* argSamp = argAggregate->getSequence()[1]->getAsTyped(); - TIntermTyped* argCoord = argAggregate->getSequence()[2]->getAsTyped(); - TIntermTyped* argLod = argAggregate->getSequence()[3]->getAsTyped(); - TIntermTyped* argOffset = nullptr; - const TSampler& sampler = argTex->getType().getSampler(); - - const int numArgs = (int)argAggregate->getSequence().size(); - - if (numArgs == 5) // offset, if present - argOffset = argAggregate->getSequence()[4]->getAsTyped(); - - const TOperator textureOp = (argOffset == nullptr ? EOpTextureLod : EOpTextureLodOffset); - TIntermAggregate* txsample = new TIntermAggregate(textureOp); - - TIntermAggregate* txcombine = handleSamplerTextureCombine(loc, argTex, argSamp); - - txsample->getSequence().push_back(txcombine); - txsample->getSequence().push_back(argCoord); - txsample->getSequence().push_back(argLod); - - if (argOffset != nullptr) - txsample->getSequence().push_back(argOffset); - - node = convertReturn(txsample, sampler); - - break; - } - - case EOpMethodGather: - { - TIntermTyped* argTex = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* argSamp = argAggregate->getSequence()[1]->getAsTyped(); - TIntermTyped* argCoord = argAggregate->getSequence()[2]->getAsTyped(); - TIntermTyped* argOffset = nullptr; - - // Offset is optional - if (argAggregate->getSequence().size() > 3) - argOffset = argAggregate->getSequence()[3]->getAsTyped(); - - const TOperator textureOp = (argOffset == nullptr ? EOpTextureGather : EOpTextureGatherOffset); - TIntermAggregate* txgather = new TIntermAggregate(textureOp); - - TIntermAggregate* txcombine = handleSamplerTextureCombine(loc, argTex, argSamp); - - txgather->getSequence().push_back(txcombine); - txgather->getSequence().push_back(argCoord); - // Offset if not given is implicitly channel 0 (red) - - if (argOffset != nullptr) - txgather->getSequence().push_back(argOffset); - - txgather->setType(node->getType()); - txgather->setLoc(loc); - node = txgather; - - break; - } - - case EOpMethodGatherRed: // fall through... - case EOpMethodGatherGreen: // ... - case EOpMethodGatherBlue: // ... - case EOpMethodGatherAlpha: // ... - case EOpMethodGatherCmpRed: // ... - case EOpMethodGatherCmpGreen: // ... - case EOpMethodGatherCmpBlue: // ... - case EOpMethodGatherCmpAlpha: // ... - { - int channel = 0; // the channel we are gathering - int cmpValues = 0; // 1 if there is a compare value (handier than a bool below) - - switch (op) { - case EOpMethodGatherCmpRed: cmpValues = 1; // fall through - case EOpMethodGatherRed: channel = 0; break; - case EOpMethodGatherCmpGreen: cmpValues = 1; // fall through - case EOpMethodGatherGreen: channel = 1; break; - case EOpMethodGatherCmpBlue: cmpValues = 1; // fall through - case EOpMethodGatherBlue: channel = 2; break; - case EOpMethodGatherCmpAlpha: cmpValues = 1; // fall through - case EOpMethodGatherAlpha: channel = 3; break; - default: assert(0); break; - } - - // For now, we have nothing to map the component-wise comparison forms - // to, because neither GLSL nor SPIR-V has such an opcode. Issue an - // unimplemented error instead. Most of the machinery is here if that - // should ever become available. However, red can be passed through - // to OpImageDrefGather. G/B/A cannot, because that opcode does not - // accept a component. - if (cmpValues != 0 && op != EOpMethodGatherCmpRed) { - error(loc, "unimplemented: component-level gather compare", "", ""); - return; - } - - int arg = 0; - - TIntermTyped* argTex = argAggregate->getSequence()[arg++]->getAsTyped(); - TIntermTyped* argSamp = argAggregate->getSequence()[arg++]->getAsTyped(); - TIntermTyped* argCoord = argAggregate->getSequence()[arg++]->getAsTyped(); - TIntermTyped* argOffset = nullptr; - TIntermTyped* argOffsets[4] = { nullptr, nullptr, nullptr, nullptr }; - // TIntermTyped* argStatus = nullptr; // TODO: residency - TIntermTyped* argCmp = nullptr; - - const TSamplerDim dim = argTex->getType().getSampler().dim; - - const int argSize = (int)argAggregate->getSequence().size(); - bool hasStatus = (argSize == (5+cmpValues) || argSize == (8+cmpValues)); - bool hasOffset1 = false; - bool hasOffset4 = false; - - // Sampler argument should be a sampler. - if (argSamp->getType().getBasicType() != EbtSampler) { - error(loc, "expected: sampler type", "", ""); - return; - } - - // Cmp forms require SamplerComparisonState - if (cmpValues > 0 && ! argSamp->getType().getSampler().isShadow()) { - error(loc, "expected: SamplerComparisonState", "", ""); - return; - } - - // Only 2D forms can have offsets. Discover if we have 0, 1 or 4 offsets. - if (dim == Esd2D) { - hasOffset1 = (argSize == (4+cmpValues) || argSize == (5+cmpValues)); - hasOffset4 = (argSize == (7+cmpValues) || argSize == (8+cmpValues)); - } - - assert(!(hasOffset1 && hasOffset4)); - - TOperator textureOp = EOpTextureGather; - - // Compare forms have compare value - if (cmpValues != 0) - argCmp = argOffset = argAggregate->getSequence()[arg++]->getAsTyped(); - - // Some forms have single offset - if (hasOffset1) { - textureOp = EOpTextureGatherOffset; // single offset form - argOffset = argAggregate->getSequence()[arg++]->getAsTyped(); - } - - // Some forms have 4 gather offsets - if (hasOffset4) { - textureOp = EOpTextureGatherOffsets; // note plural, for 4 offset form - for (int offsetNum = 0; offsetNum < 4; ++offsetNum) - argOffsets[offsetNum] = argAggregate->getSequence()[arg++]->getAsTyped(); - } - - // Residency status - if (hasStatus) { - // argStatus = argAggregate->getSequence()[arg++]->getAsTyped(); - error(loc, "unimplemented: residency status", "", ""); - return; - } - - TIntermAggregate* txgather = new TIntermAggregate(textureOp); - TIntermAggregate* txcombine = handleSamplerTextureCombine(loc, argTex, argSamp); - - TIntermTyped* argChannel = intermediate.addConstantUnion(channel, loc, true); - - txgather->getSequence().push_back(txcombine); - txgather->getSequence().push_back(argCoord); - - // AST wants an array of 4 offsets, where HLSL has separate args. Here - // we construct an array from the separate args. - if (hasOffset4) { - TType arrayType(EbtInt, EvqTemporary, 2); - TArraySizes arraySizes; - arraySizes.addInnerSize(4); - arrayType.newArraySizes(arraySizes); - - TIntermAggregate* initList = new TIntermAggregate(EOpNull); - - for (int offsetNum = 0; offsetNum < 4; ++offsetNum) - initList->getSequence().push_back(argOffsets[offsetNum]); - - argOffset = addConstructor(loc, initList, arrayType); - } - - // Add comparison value if we have one - if (argCmp != nullptr) - txgather->getSequence().push_back(argCmp); - - // Add offset (either 1, or an array of 4) if we have one - if (argOffset != nullptr) - txgather->getSequence().push_back(argOffset); - - // Add channel value if the sampler is not shadow - if (! argSamp->getType().getSampler().isShadow()) - txgather->getSequence().push_back(argChannel); - - txgather->setType(node->getType()); - txgather->setLoc(loc); - node = txgather; - - break; - } - - case EOpMethodCalculateLevelOfDetail: - case EOpMethodCalculateLevelOfDetailUnclamped: - { - TIntermTyped* argTex = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* argSamp = argAggregate->getSequence()[1]->getAsTyped(); - TIntermTyped* argCoord = argAggregate->getSequence()[2]->getAsTyped(); - - TIntermAggregate* txquerylod = new TIntermAggregate(EOpTextureQueryLod); - - TIntermAggregate* txcombine = handleSamplerTextureCombine(loc, argTex, argSamp); - txquerylod->getSequence().push_back(txcombine); - txquerylod->getSequence().push_back(argCoord); - - TIntermTyped* lodComponent = intermediate.addConstantUnion(0, loc, true); - TIntermTyped* lodComponentIdx = intermediate.addIndex(EOpIndexDirect, txquerylod, lodComponent, loc); - lodComponentIdx->setType(TType(EbtFloat, EvqTemporary, 1)); - - node = lodComponentIdx; - - // We cannot currently obtain the unclamped LOD - if (op == EOpMethodCalculateLevelOfDetailUnclamped) - error(loc, "unimplemented: CalculateLevelOfDetailUnclamped", "", ""); - - break; - } - - case EOpMethodGetSamplePosition: - { - // TODO: this entire decomposition exists because there is not yet a way to query - // the sample position directly through SPIR-V. Instead, we return fixed sample - // positions for common cases. *** If the sample positions are set differently, - // this will be wrong. *** - - TIntermTyped* argTex = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* argSampIdx = argAggregate->getSequence()[1]->getAsTyped(); - - TIntermAggregate* samplesQuery = new TIntermAggregate(EOpImageQuerySamples); - samplesQuery->getSequence().push_back(argTex); - samplesQuery->setType(TType(EbtUint, EvqTemporary, 1)); - samplesQuery->setLoc(loc); - - TIntermAggregate* compoundStatement = nullptr; - - TVariable* outSampleCount = makeInternalVariable("@sampleCount", TType(EbtUint)); - outSampleCount->getWritableType().getQualifier().makeTemporary(); - TIntermTyped* compAssign = intermediate.addAssign(EOpAssign, intermediate.addSymbol(*outSampleCount, loc), - samplesQuery, loc); - compoundStatement = intermediate.growAggregate(compoundStatement, compAssign); - - TIntermTyped* idxtest[4]; - - // Create tests against 2, 4, 8, and 16 sample values - int count = 0; - for (int val = 2; val <= 16; val *= 2) - idxtest[count++] = - intermediate.addBinaryNode(EOpEqual, - intermediate.addSymbol(*outSampleCount, loc), - intermediate.addConstantUnion(val, loc), - loc, TType(EbtBool)); - - const TOperator idxOp = (argSampIdx->getQualifier().storage == EvqConst) ? EOpIndexDirect : EOpIndexIndirect; - - // Create index ops into position arrays given sample index. - // TODO: should it be clamped? - TIntermTyped* index[4]; - count = 0; - for (int val = 2; val <= 16; val *= 2) { - index[count] = intermediate.addIndex(idxOp, getSamplePosArray(val), argSampIdx, loc); - index[count++]->setType(TType(EbtFloat, EvqTemporary, 2)); - } - - // Create expression as: - // (sampleCount == 2) ? pos2[idx] : - // (sampleCount == 4) ? pos4[idx] : - // (sampleCount == 8) ? pos8[idx] : - // (sampleCount == 16) ? pos16[idx] : float2(0,0); - TIntermTyped* test = - intermediate.addSelection(idxtest[0], index[0], - intermediate.addSelection(idxtest[1], index[1], - intermediate.addSelection(idxtest[2], index[2], - intermediate.addSelection(idxtest[3], index[3], - getSamplePosArray(1), loc), loc), loc), loc); - - compoundStatement = intermediate.growAggregate(compoundStatement, test); - compoundStatement->setOperator(EOpSequence); - compoundStatement->setLoc(loc); - compoundStatement->setType(TType(EbtFloat, EvqTemporary, 2)); - - node = compoundStatement; - - break; - } - - default: - break; // most pass through unchanged - } -} - -// -// Decompose geometry shader methods -// -void HlslParseContext::decomposeGeometryMethods(const TSourceLoc& loc, TIntermTyped*& node, TIntermNode* arguments) -{ - if (node == nullptr || !node->getAsOperator()) - return; - - const TOperator op = node->getAsOperator()->getOp(); - const TIntermAggregate* argAggregate = arguments ? arguments->getAsAggregate() : nullptr; - - switch (op) { - case EOpMethodAppend: - if (argAggregate) { - // Don't emit these for non-GS stage, since we won't have the gsStreamOutput symbol. - if (language != EShLangGeometry) { - node = nullptr; - return; - } - - TIntermAggregate* sequence = nullptr; - TIntermAggregate* emit = new TIntermAggregate(EOpEmitVertex); - - emit->setLoc(loc); - emit->setType(TType(EbtVoid)); - - // find the matching output - if (gsStreamOutput == nullptr) { - error(loc, "unable to find output symbol for Append()", "", ""); - return; - } - - sequence = intermediate.growAggregate(sequence, - handleAssign(loc, EOpAssign, - intermediate.addSymbol(*gsStreamOutput, loc), - argAggregate->getSequence()[1]->getAsTyped()), - loc); - - sequence = intermediate.growAggregate(sequence, emit); - - sequence->setOperator(EOpSequence); - sequence->setLoc(loc); - sequence->setType(TType(EbtVoid)); - node = sequence; - } - break; - - case EOpMethodRestartStrip: - { - // Don't emit these for non-GS stage, since we won't have the gsStreamOutput symbol. - if (language != EShLangGeometry) { - node = nullptr; - return; - } - - TIntermAggregate* cut = new TIntermAggregate(EOpEndPrimitive); - cut->setLoc(loc); - cut->setType(TType(EbtVoid)); - node = cut; - } - break; - - default: - break; // most pass through unchanged - } -} - -// -// Optionally decompose intrinsics to AST opcodes. -// -void HlslParseContext::decomposeIntrinsic(const TSourceLoc& loc, TIntermTyped*& node, TIntermNode* arguments) -{ - // Helper to find image data for image atomics: - // OpImageLoad(image[idx]) - // We take the image load apart and add its params to the atomic op aggregate node - const auto imageAtomicParams = [this, &loc, &node](TIntermAggregate* atomic, TIntermTyped* load) { - TIntermAggregate* loadOp = load->getAsAggregate(); - if (loadOp == nullptr) { - error(loc, "unknown image type in atomic operation", "", ""); - node = nullptr; - return; - } - - atomic->getSequence().push_back(loadOp->getSequence()[0]); - atomic->getSequence().push_back(loadOp->getSequence()[1]); - }; - - // Return true if this is an imageLoad, which we will change to an image atomic. - const auto isImageParam = [](TIntermTyped* image) -> bool { - TIntermAggregate* imageAggregate = image->getAsAggregate(); - return imageAggregate != nullptr && imageAggregate->getOp() == EOpImageLoad; - }; - - // HLSL intrinsics can be pass through to native AST opcodes, or decomposed here to existing AST - // opcodes for compatibility with existing software stacks. - static const bool decomposeHlslIntrinsics = true; - - if (!decomposeHlslIntrinsics || !node || !node->getAsOperator()) - return; - - const TIntermAggregate* argAggregate = arguments ? arguments->getAsAggregate() : nullptr; - TIntermUnary* fnUnary = node->getAsUnaryNode(); - const TOperator op = node->getAsOperator()->getOp(); - - switch (op) { - case EOpGenMul: - { - // mul(a,b) -> MatrixTimesMatrix, MatrixTimesVector, MatrixTimesScalar, VectorTimesScalar, Dot, Mul - // Since we are treating HLSL rows like GLSL columns (the first matrix indirection), - // we must reverse the operand order here. Hence, arg0 gets sequence[1], etc. - TIntermTyped* arg0 = argAggregate->getSequence()[1]->getAsTyped(); - TIntermTyped* arg1 = argAggregate->getSequence()[0]->getAsTyped(); - - if (arg0->isVector() && arg1->isVector()) { // vec * vec - node->getAsAggregate()->setOperator(EOpDot); - } else { - node = handleBinaryMath(loc, "mul", EOpMul, arg0, arg1); - } - - break; - } - - case EOpRcp: - { - // rcp(a) -> 1 / a - TIntermTyped* arg0 = fnUnary->getOperand(); - TBasicType type0 = arg0->getBasicType(); - TIntermTyped* one = intermediate.addConstantUnion(1, type0, loc, true); - node = handleBinaryMath(loc, "rcp", EOpDiv, one, arg0); - - break; - } - - case EOpAny: // fall through - case EOpAll: - { - TIntermTyped* typedArg = arguments->getAsTyped(); - - // HLSL allows float/etc types here, and the SPIR-V opcode requires a bool. - // We'll convert here. Note that for efficiency, we could add a smarter - // decomposition for some type cases, e.g, maybe by decomposing a dot product. - if (typedArg->getType().getBasicType() != EbtBool) { - const TType boolType(EbtBool, EvqTemporary, - typedArg->getVectorSize(), - typedArg->getMatrixCols(), - typedArg->getMatrixRows(), - typedArg->isVector()); - - typedArg = intermediate.addConversion(EOpConstructBool, boolType, typedArg); - node->getAsUnaryNode()->setOperand(typedArg); - } - - break; - } - - case EOpSaturate: - { - // saturate(a) -> clamp(a,0,1) - TIntermTyped* arg0 = fnUnary->getOperand(); - TBasicType type0 = arg0->getBasicType(); - TIntermAggregate* clamp = new TIntermAggregate(EOpClamp); - - clamp->getSequence().push_back(arg0); - clamp->getSequence().push_back(intermediate.addConstantUnion(0, type0, loc, true)); - clamp->getSequence().push_back(intermediate.addConstantUnion(1, type0, loc, true)); - clamp->setLoc(loc); - clamp->setType(node->getType()); - clamp->getWritableType().getQualifier().makeTemporary(); - node = clamp; - - break; - } - - case EOpSinCos: - { - // sincos(a,b,c) -> b = sin(a), c = cos(a) - TIntermTyped* arg0 = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* arg1 = argAggregate->getSequence()[1]->getAsTyped(); - TIntermTyped* arg2 = argAggregate->getSequence()[2]->getAsTyped(); - - TIntermTyped* sinStatement = handleUnaryMath(loc, "sin", EOpSin, arg0); - TIntermTyped* cosStatement = handleUnaryMath(loc, "cos", EOpCos, arg0); - TIntermTyped* sinAssign = intermediate.addAssign(EOpAssign, arg1, sinStatement, loc); - TIntermTyped* cosAssign = intermediate.addAssign(EOpAssign, arg2, cosStatement, loc); - - TIntermAggregate* compoundStatement = intermediate.makeAggregate(sinAssign, loc); - compoundStatement = intermediate.growAggregate(compoundStatement, cosAssign); - compoundStatement->setOperator(EOpSequence); - compoundStatement->setLoc(loc); - compoundStatement->setType(TType(EbtVoid)); - - node = compoundStatement; - - break; - } - - case EOpClip: - { - // clip(a) -> if (any(a<0)) discard; - TIntermTyped* arg0 = fnUnary->getOperand(); - TBasicType type0 = arg0->getBasicType(); - TIntermTyped* compareNode = nullptr; - - // For non-scalars: per experiment with FXC compiler, discard if any component < 0. - if (!arg0->isScalar()) { - // component-wise compare: a < 0 - TIntermAggregate* less = new TIntermAggregate(EOpLessThan); - less->getSequence().push_back(arg0); - less->setLoc(loc); - - // make vec or mat of bool matching dimensions of input - less->setType(TType(EbtBool, EvqTemporary, - arg0->getType().getVectorSize(), - arg0->getType().getMatrixCols(), - arg0->getType().getMatrixRows(), - arg0->getType().isVector())); - - // calculate # of components for comparison const - const int constComponentCount = - std::max(arg0->getType().getVectorSize(), 1) * - std::max(arg0->getType().getMatrixCols(), 1) * - std::max(arg0->getType().getMatrixRows(), 1); - - TConstUnion zero; - zero.setDConst(0.0); - TConstUnionArray zeros(constComponentCount, zero); - - less->getSequence().push_back(intermediate.addConstantUnion(zeros, arg0->getType(), loc, true)); - - compareNode = intermediate.addBuiltInFunctionCall(loc, EOpAny, true, less, TType(EbtBool)); - } else { - TIntermTyped* zero = intermediate.addConstantUnion(0, type0, loc, true); - compareNode = handleBinaryMath(loc, "clip", EOpLessThan, arg0, zero); - } - - TIntermBranch* killNode = intermediate.addBranch(EOpKill, loc); - - node = new TIntermSelection(compareNode, killNode, nullptr); - node->setLoc(loc); - - break; - } - - case EOpLog10: - { - // log10(a) -> log2(a) * 0.301029995663981 (== 1/log2(10)) - TIntermTyped* arg0 = fnUnary->getOperand(); - TIntermTyped* log2 = handleUnaryMath(loc, "log2", EOpLog2, arg0); - TIntermTyped* base = intermediate.addConstantUnion(0.301029995663981f, EbtFloat, loc, true); - - node = handleBinaryMath(loc, "mul", EOpMul, log2, base); - - break; - } - - case EOpDst: - { - // dest.x = 1; - // dest.y = src0.y * src1.y; - // dest.z = src0.z; - // dest.w = src1.w; - - TIntermTyped* arg0 = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* arg1 = argAggregate->getSequence()[1]->getAsTyped(); - - TIntermTyped* y = intermediate.addConstantUnion(1, loc, true); - TIntermTyped* z = intermediate.addConstantUnion(2, loc, true); - TIntermTyped* w = intermediate.addConstantUnion(3, loc, true); - - TIntermTyped* src0y = intermediate.addIndex(EOpIndexDirect, arg0, y, loc); - TIntermTyped* src1y = intermediate.addIndex(EOpIndexDirect, arg1, y, loc); - TIntermTyped* src0z = intermediate.addIndex(EOpIndexDirect, arg0, z, loc); - TIntermTyped* src1w = intermediate.addIndex(EOpIndexDirect, arg1, w, loc); - - TIntermAggregate* dst = new TIntermAggregate(EOpConstructVec4); - - dst->getSequence().push_back(intermediate.addConstantUnion(1.0, EbtFloat, loc, true)); - dst->getSequence().push_back(handleBinaryMath(loc, "mul", EOpMul, src0y, src1y)); - dst->getSequence().push_back(src0z); - dst->getSequence().push_back(src1w); - dst->setType(TType(EbtFloat, EvqTemporary, 4)); - dst->setLoc(loc); - node = dst; - - break; - } - - case EOpInterlockedAdd: // optional last argument (if present) is assigned from return value - case EOpInterlockedMin: // ... - case EOpInterlockedMax: // ... - case EOpInterlockedAnd: // ... - case EOpInterlockedOr: // ... - case EOpInterlockedXor: // ... - case EOpInterlockedExchange: // always has output arg - { - TIntermTyped* arg0 = argAggregate->getSequence()[0]->getAsTyped(); // dest - TIntermTyped* arg1 = argAggregate->getSequence()[1]->getAsTyped(); // value - TIntermTyped* arg2 = nullptr; - - if (argAggregate->getSequence().size() > 2) - arg2 = argAggregate->getSequence()[2]->getAsTyped(); - - const bool isImage = isImageParam(arg0); - const TOperator atomicOp = mapAtomicOp(loc, op, isImage); - TIntermAggregate* atomic = new TIntermAggregate(atomicOp); - atomic->setType(arg0->getType()); - atomic->getWritableType().getQualifier().makeTemporary(); - atomic->setLoc(loc); - - if (isImage) { - // orig_value = imageAtomicOp(image, loc, data) - imageAtomicParams(atomic, arg0); - atomic->getSequence().push_back(arg1); - - if (argAggregate->getSequence().size() > 2) { - node = intermediate.addAssign(EOpAssign, arg2, atomic, loc); - } else { - node = atomic; // no assignment needed, as there was no out var. - } - } else { - // Normal memory variable: - // arg0 = mem, arg1 = data, arg2(optional,out) = orig_value - if (argAggregate->getSequence().size() > 2) { - // optional output param is present. return value goes to arg2. - atomic->getSequence().push_back(arg0); - atomic->getSequence().push_back(arg1); - - node = intermediate.addAssign(EOpAssign, arg2, atomic, loc); - } else { - // Set the matching operator. Since output is absent, this is all we need to do. - node->getAsAggregate()->setOperator(atomicOp); - } - } - - break; - } - - case EOpInterlockedCompareExchange: - { - TIntermTyped* arg0 = argAggregate->getSequence()[0]->getAsTyped(); // dest - TIntermTyped* arg1 = argAggregate->getSequence()[1]->getAsTyped(); // cmp - TIntermTyped* arg2 = argAggregate->getSequence()[2]->getAsTyped(); // value - TIntermTyped* arg3 = argAggregate->getSequence()[3]->getAsTyped(); // orig - - const bool isImage = isImageParam(arg0); - TIntermAggregate* atomic = new TIntermAggregate(mapAtomicOp(loc, op, isImage)); - atomic->setLoc(loc); - atomic->setType(arg2->getType()); - atomic->getWritableType().getQualifier().makeTemporary(); - - if (isImage) { - imageAtomicParams(atomic, arg0); - } else { - atomic->getSequence().push_back(arg0); - } - - atomic->getSequence().push_back(arg1); - atomic->getSequence().push_back(arg2); - node = intermediate.addAssign(EOpAssign, arg3, atomic, loc); - - break; - } - - case EOpEvaluateAttributeSnapped: - { - // SPIR-V InterpolateAtOffset uses float vec2 offset in pixels - // HLSL uses int2 offset on a 16x16 grid in [-8..7] on x & y: - // iU = (iU<<28)>>28 - // fU = ((float)iU)/16 - // Targets might handle this natively, in which case they can disable - // decompositions. - - TIntermTyped* arg0 = argAggregate->getSequence()[0]->getAsTyped(); // value - TIntermTyped* arg1 = argAggregate->getSequence()[1]->getAsTyped(); // offset - - TIntermTyped* i28 = intermediate.addConstantUnion(28, loc, true); - TIntermTyped* iU = handleBinaryMath(loc, ">>", EOpRightShift, - handleBinaryMath(loc, "<<", EOpLeftShift, arg1, i28), - i28); - - TIntermTyped* recip16 = intermediate.addConstantUnion((1.0/16.0), EbtFloat, loc, true); - TIntermTyped* floatOffset = handleBinaryMath(loc, "mul", EOpMul, - intermediate.addConversion(EOpConstructFloat, - TType(EbtFloat, EvqTemporary, 2), iU), - recip16); - - TIntermAggregate* interp = new TIntermAggregate(EOpInterpolateAtOffset); - interp->getSequence().push_back(arg0); - interp->getSequence().push_back(floatOffset); - interp->setLoc(loc); - interp->setType(arg0->getType()); - interp->getWritableType().getQualifier().makeTemporary(); - - node = interp; - - break; - } - - case EOpLit: - { - TIntermTyped* n_dot_l = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* n_dot_h = argAggregate->getSequence()[1]->getAsTyped(); - TIntermTyped* m = argAggregate->getSequence()[2]->getAsTyped(); - - TIntermAggregate* dst = new TIntermAggregate(EOpConstructVec4); - - // Ambient - dst->getSequence().push_back(intermediate.addConstantUnion(1.0, EbtFloat, loc, true)); - - // Diffuse: - TIntermTyped* zero = intermediate.addConstantUnion(0.0, EbtFloat, loc, true); - TIntermAggregate* diffuse = new TIntermAggregate(EOpMax); - diffuse->getSequence().push_back(n_dot_l); - diffuse->getSequence().push_back(zero); - diffuse->setLoc(loc); - diffuse->setType(TType(EbtFloat)); - dst->getSequence().push_back(diffuse); - - // Specular: - TIntermAggregate* min_ndot = new TIntermAggregate(EOpMin); - min_ndot->getSequence().push_back(n_dot_l); - min_ndot->getSequence().push_back(n_dot_h); - min_ndot->setLoc(loc); - min_ndot->setType(TType(EbtFloat)); - - TIntermTyped* compare = handleBinaryMath(loc, "<", EOpLessThan, min_ndot, zero); - TIntermTyped* n_dot_h_m = handleBinaryMath(loc, "mul", EOpMul, n_dot_h, m); // n_dot_h * m - - dst->getSequence().push_back(intermediate.addSelection(compare, zero, n_dot_h_m, loc)); - - // One: - dst->getSequence().push_back(intermediate.addConstantUnion(1.0, EbtFloat, loc, true)); - - dst->setLoc(loc); - dst->setType(TType(EbtFloat, EvqTemporary, 4)); - node = dst; - break; - } - - case EOpAsDouble: - { - // asdouble accepts two 32 bit ints. we can use EOpUint64BitsToDouble, but must - // first construct a uint64. - TIntermTyped* arg0 = argAggregate->getSequence()[0]->getAsTyped(); - TIntermTyped* arg1 = argAggregate->getSequence()[1]->getAsTyped(); - - if (arg0->getType().isVector()) { // TODO: ... - error(loc, "double2 conversion not implemented", "asdouble", ""); - break; - } - - TIntermAggregate* uint64 = new TIntermAggregate(EOpConstructUVec2); - - uint64->getSequence().push_back(arg0); - uint64->getSequence().push_back(arg1); - uint64->setType(TType(EbtUint, EvqTemporary, 2)); // convert 2 uints to a uint2 - uint64->setLoc(loc); - - // bitcast uint2 to a double - TIntermTyped* convert = new TIntermUnary(EOpUint64BitsToDouble); - convert->getAsUnaryNode()->setOperand(uint64); - convert->setLoc(loc); - convert->setType(TType(EbtDouble, EvqTemporary)); - node = convert; - - break; - } - - case EOpF16tof32: - { - // input uvecN with low 16 bits of each component holding a float16. convert to float32. - TIntermTyped* argValue = node->getAsUnaryNode()->getOperand(); - TIntermTyped* zero = intermediate.addConstantUnion(0, loc, true); - const int vecSize = argValue->getType().getVectorSize(); - - TOperator constructOp = EOpNull; - switch (vecSize) { - case 1: constructOp = EOpNull; break; // direct use, no construct needed - case 2: constructOp = EOpConstructVec2; break; - case 3: constructOp = EOpConstructVec3; break; - case 4: constructOp = EOpConstructVec4; break; - default: assert(0); break; - } - - // For scalar case, we don't need to construct another type. - TIntermAggregate* result = (vecSize > 1) ? new TIntermAggregate(constructOp) : nullptr; - - if (result) { - result->setType(TType(EbtFloat, EvqTemporary, vecSize)); - result->setLoc(loc); - } - - for (int idx = 0; idx < vecSize; ++idx) { - TIntermTyped* idxConst = intermediate.addConstantUnion(idx, loc, true); - TIntermTyped* component = argValue->getType().isVector() ? - intermediate.addIndex(EOpIndexDirect, argValue, idxConst, loc) : argValue; - - if (component != argValue) - component->setType(TType(argValue->getBasicType(), EvqTemporary)); - - TIntermTyped* unpackOp = new TIntermUnary(EOpUnpackHalf2x16); - unpackOp->setType(TType(EbtFloat, EvqTemporary, 2)); - unpackOp->getAsUnaryNode()->setOperand(component); - unpackOp->setLoc(loc); - - TIntermTyped* lowOrder = intermediate.addIndex(EOpIndexDirect, unpackOp, zero, loc); - - if (result != nullptr) { - result->getSequence().push_back(lowOrder); - node = result; - } else { - node = lowOrder; - } - } - - break; - } - - case EOpF32tof16: - { - // input floatN converted to 16 bit float in low order bits of each component of uintN - TIntermTyped* argValue = node->getAsUnaryNode()->getOperand(); - - TIntermTyped* zero = intermediate.addConstantUnion(0.0, EbtFloat, loc, true); - const int vecSize = argValue->getType().getVectorSize(); - - TOperator constructOp = EOpNull; - switch (vecSize) { - case 1: constructOp = EOpNull; break; // direct use, no construct needed - case 2: constructOp = EOpConstructUVec2; break; - case 3: constructOp = EOpConstructUVec3; break; - case 4: constructOp = EOpConstructUVec4; break; - default: assert(0); break; - } - - // For scalar case, we don't need to construct another type. - TIntermAggregate* result = (vecSize > 1) ? new TIntermAggregate(constructOp) : nullptr; - - if (result) { - result->setType(TType(EbtUint, EvqTemporary, vecSize)); - result->setLoc(loc); - } - - for (int idx = 0; idx < vecSize; ++idx) { - TIntermTyped* idxConst = intermediate.addConstantUnion(idx, loc, true); - TIntermTyped* component = argValue->getType().isVector() ? - intermediate.addIndex(EOpIndexDirect, argValue, idxConst, loc) : argValue; - - if (component != argValue) - component->setType(TType(argValue->getBasicType(), EvqTemporary)); - - TIntermAggregate* vec2ComponentAndZero = new TIntermAggregate(EOpConstructVec2); - vec2ComponentAndZero->getSequence().push_back(component); - vec2ComponentAndZero->getSequence().push_back(zero); - vec2ComponentAndZero->setType(TType(EbtFloat, EvqTemporary, 2)); - vec2ComponentAndZero->setLoc(loc); - - TIntermTyped* packOp = new TIntermUnary(EOpPackHalf2x16); - packOp->getAsUnaryNode()->setOperand(vec2ComponentAndZero); - packOp->setLoc(loc); - packOp->setType(TType(EbtUint, EvqTemporary)); - - if (result != nullptr) { - result->getSequence().push_back(packOp); - node = result; - } else { - node = packOp; - } - } - - break; - } - - case EOpD3DCOLORtoUBYTE4: - { - // ivec4 ( x.zyxw * 255.001953 ); - TIntermTyped* arg0 = node->getAsUnaryNode()->getOperand(); - TSwizzleSelectors selectors; - selectors.push_back(2); - selectors.push_back(1); - selectors.push_back(0); - selectors.push_back(3); - TIntermTyped* swizzleIdx = intermediate.addSwizzle(selectors, loc); - TIntermTyped* swizzled = intermediate.addIndex(EOpVectorSwizzle, arg0, swizzleIdx, loc); - swizzled->setType(arg0->getType()); - swizzled->getWritableType().getQualifier().makeTemporary(); - - TIntermTyped* conversion = intermediate.addConstantUnion(255.001953f, EbtFloat, loc, true); - TIntermTyped* rangeConverted = handleBinaryMath(loc, "mul", EOpMul, conversion, swizzled); - rangeConverted->setType(arg0->getType()); - rangeConverted->getWritableType().getQualifier().makeTemporary(); - - node = intermediate.addConversion(EOpConstructInt, TType(EbtInt, EvqTemporary, 4), rangeConverted); - node->setLoc(loc); - node->setType(TType(EbtInt, EvqTemporary, 4)); - break; - } - - case EOpIsFinite: - { - // Since OPIsFinite in SPIR-V is only supported with the Kernel capability, we translate - // it to !isnan && !isinf - - TIntermTyped* arg0 = node->getAsUnaryNode()->getOperand(); - - // We'll make a temporary in case the RHS is cmoplex - TVariable* tempArg = makeInternalVariable("@finitetmp", arg0->getType()); - tempArg->getWritableType().getQualifier().makeTemporary(); - - TIntermTyped* tmpArgAssign = intermediate.addAssign(EOpAssign, - intermediate.addSymbol(*tempArg, loc), - arg0, loc); - - TIntermAggregate* compoundStatement = intermediate.makeAggregate(tmpArgAssign, loc); - - const TType boolType(EbtBool, EvqTemporary, arg0->getVectorSize(), arg0->getMatrixCols(), - arg0->getMatrixRows()); - - TIntermTyped* isnan = handleUnaryMath(loc, "isnan", EOpIsNan, intermediate.addSymbol(*tempArg, loc)); - isnan->setType(boolType); - - TIntermTyped* notnan = handleUnaryMath(loc, "!", EOpLogicalNot, isnan); - notnan->setType(boolType); - - TIntermTyped* isinf = handleUnaryMath(loc, "isinf", EOpIsInf, intermediate.addSymbol(*tempArg, loc)); - isinf->setType(boolType); - - TIntermTyped* notinf = handleUnaryMath(loc, "!", EOpLogicalNot, isinf); - notinf->setType(boolType); - - TIntermTyped* andNode = handleBinaryMath(loc, "and", EOpLogicalAnd, notnan, notinf); - andNode->setType(boolType); - - compoundStatement = intermediate.growAggregate(compoundStatement, andNode); - compoundStatement->setOperator(EOpSequence); - compoundStatement->setLoc(loc); - compoundStatement->setType(boolType); - - node = compoundStatement; - - break; - } - - default: - break; // most pass through unchanged - } -} - -// -// Handle seeing function call syntax in the grammar, which could be any of -// - .length() method -// - constructor -// - a call to a built-in function mapped to an operator -// - a call to a built-in function that will remain a function call (e.g., texturing) -// - user function -// - subroutine call (not implemented yet) -// -TIntermTyped* HlslParseContext::handleFunctionCall(const TSourceLoc& loc, TFunction* function, TIntermTyped* arguments) -{ - TIntermTyped* result = nullptr; - - TOperator op = function->getBuiltInOp(); - if (op != EOpNull) { - // - // Then this should be a constructor. - // Don't go through the symbol table for constructors. - // Their parameters will be verified algorithmically. - // - TType type(EbtVoid); // use this to get the type back - if (! constructorError(loc, arguments, *function, op, type)) { - // - // It's a constructor, of type 'type'. - // - result = handleConstructor(loc, arguments, type); - if (result == nullptr) - error(loc, "cannot construct with these arguments", type.getCompleteString().c_str(), ""); - } - } else { - // - // Find it in the symbol table. - // - const TFunction* fnCandidate = nullptr; - bool builtIn = false; - int thisDepth = 0; - - TIntermAggregate* aggregate = arguments ? arguments->getAsAggregate() : nullptr; - - // TODO: this needs improvement: there's no way at present to look up a signature in - // the symbol table for an arbitrary type. This is a temporary hack until that ability exists. - // It will have false positives, since it doesn't check arg counts or types. - if (arguments) { - // Check if first argument is struct buffer type. It may be an aggregate or a symbol, so we - // look for either case. - - TIntermTyped* arg0 = nullptr; - - if (aggregate && aggregate->getSequence().size() > 0) - arg0 = aggregate->getSequence()[0]->getAsTyped(); - else if (arguments->getAsSymbolNode()) - arg0 = arguments->getAsSymbolNode(); - - if (arg0 != nullptr && isStructBufferType(arg0->getType())) { - static const int methodPrefixSize = sizeof(BUILTIN_PREFIX)-1; - - if (function->getName().length() > methodPrefixSize && - isStructBufferMethod(function->getName().substr(methodPrefixSize))) { - const TString mangle = function->getName() + "("; - TSymbol* symbol = symbolTable.find(mangle, &builtIn); - - if (symbol) - fnCandidate = symbol->getAsFunction(); - } - } - } - - if (fnCandidate == nullptr) - fnCandidate = findFunction(loc, *function, builtIn, thisDepth, arguments); - - if (fnCandidate) { - // This is a declared function that might map to - // - a built-in operator, - // - a built-in function not mapped to an operator, or - // - a user function. - - // Error check for a function requiring specific extensions present. - if (builtIn && fnCandidate->getNumExtensions()) - requireExtensions(loc, fnCandidate->getNumExtensions(), fnCandidate->getExtensions(), - fnCandidate->getName().c_str()); - - // turn an implicit member-function resolution into an explicit call - TString callerName; - if (thisDepth == 0) - callerName = fnCandidate->getMangledName(); - else { - // get the explicit (full) name of the function - callerName = currentTypePrefix[currentTypePrefix.size() - thisDepth]; - callerName += fnCandidate->getMangledName(); - // insert the implicit calling argument - pushFrontArguments(intermediate.addSymbol(*getImplicitThis(thisDepth)), arguments); - } - - // Convert 'in' arguments, so that types match. - // However, skip those that need expansion, that is covered next. - if (arguments) - addInputArgumentConversions(*fnCandidate, arguments); - - // Expand arguments. Some arguments must physically expand to a different set - // than what the shader declared and passes. - if (arguments && !builtIn) - expandArguments(loc, *fnCandidate, arguments); - - // Expansion may have changed the form of arguments - aggregate = arguments ? arguments->getAsAggregate() : nullptr; - - op = fnCandidate->getBuiltInOp(); - if (builtIn && op != EOpNull) { - // A function call mapped to a built-in operation. - result = intermediate.addBuiltInFunctionCall(loc, op, fnCandidate->getParamCount() == 1, arguments, - fnCandidate->getType()); - if (result == nullptr) { - error(arguments->getLoc(), " wrong operand type", "Internal Error", - "built in unary operator function. Type: %s", - static_cast(arguments)->getCompleteString().c_str()); - } else if (result->getAsOperator()) { - builtInOpCheck(loc, *fnCandidate, *result->getAsOperator()); - } - } else { - // This is a function call not mapped to built-in operator. - // It could still be a built-in function, but only if PureOperatorBuiltins == false. - result = intermediate.setAggregateOperator(arguments, EOpFunctionCall, fnCandidate->getType(), loc); - TIntermAggregate* call = result->getAsAggregate(); - call->setName(callerName); - - // this is how we know whether the given function is a built-in function or a user-defined function - // if builtIn == false, it's a userDefined -> could be an overloaded built-in function also - // if builtIn == true, it's definitely a built-in function with EOpNull - if (! builtIn) { - call->setUserDefined(); - intermediate.addToCallGraph(infoSink, currentCaller, callerName); - } - } - - // for decompositions, since we want to operate on the function node, not the aggregate holding - // output conversions. - const TIntermTyped* fnNode = result; - - decomposeStructBufferMethods(loc, result, arguments); // HLSL->AST struct buffer method decompositions - decomposeIntrinsic(loc, result, arguments); // HLSL->AST intrinsic decompositions - decomposeSampleMethods(loc, result, arguments); // HLSL->AST sample method decompositions - decomposeGeometryMethods(loc, result, arguments); // HLSL->AST geometry method decompositions - - // Create the qualifier list, carried in the AST for the call. - // Because some arguments expand to multiple arguments, the qualifier list will - // be longer than the formal parameter list. - if (result == fnNode && result->getAsAggregate()) { - TQualifierList& qualifierList = result->getAsAggregate()->getQualifierList(); - for (int i = 0; i < fnCandidate->getParamCount(); ++i) { - TStorageQualifier qual = (*fnCandidate)[i].type->getQualifier().storage; - if (hasStructBuffCounter(*(*fnCandidate)[i].type)) { - // add buffer and counter buffer argument qualifier - qualifierList.push_back(qual); - qualifierList.push_back(qual); - } else if (shouldFlatten(*(*fnCandidate)[i].type)) { - // add structure member expansion - for (int memb = 0; memb < (int)(*fnCandidate)[i].type->getStruct()->size(); ++memb) - qualifierList.push_back(qual); - } else { - // Normal 1:1 case - qualifierList.push_back(qual); - } - } - } - - // Convert 'out' arguments. If it was a constant folded built-in, it won't be an aggregate anymore. - // Built-ins with a single argument aren't called with an aggregate, but they also don't have an output. - // Also, build the qualifier list for user function calls, which are always called with an aggregate. - // We don't do this is if there has been a decomposition, which will have added its own conversions - // for output parameters. - if (result == fnNode && result->getAsAggregate()) - result = addOutputArgumentConversions(*fnCandidate, *result->getAsOperator()); - } - } - - // generic error recovery - // TODO: simplification: localize all the error recoveries that look like this, and taking type into account to - // reduce cascades - if (result == nullptr) - result = intermediate.addConstantUnion(0.0, EbtFloat, loc); - - return result; -} - -// An initial argument list is difficult: it can be null, or a single node, -// or an aggregate if more than one argument. Add one to the front, maintaining -// this lack of uniformity. -void HlslParseContext::pushFrontArguments(TIntermTyped* front, TIntermTyped*& arguments) -{ - if (arguments == nullptr) - arguments = front; - else if (arguments->getAsAggregate() != nullptr) - arguments->getAsAggregate()->getSequence().insert(arguments->getAsAggregate()->getSequence().begin(), front); - else - arguments = intermediate.growAggregate(front, arguments); -} - -// -// Add any needed implicit conversions for function-call arguments to input parameters. -// -void HlslParseContext::addInputArgumentConversions(const TFunction& function, TIntermTyped*& arguments) -{ - TIntermAggregate* aggregate = arguments->getAsAggregate(); - - // Replace a single argument with a single argument. - const auto setArg = [&](int paramNum, TIntermTyped* arg) { - if (function.getParamCount() == 1) - arguments = arg; - else { - if (aggregate == nullptr) - arguments = arg; - else - aggregate->getSequence()[paramNum] = arg; - } - }; - - // Process each argument's conversion - for (int param = 0; param < function.getParamCount(); ++param) { - if (! function[param].type->getQualifier().isParamInput()) - continue; - - // At this early point there is a slight ambiguity between whether an aggregate 'arguments' - // is the single argument itself or its children are the arguments. Only one argument - // means take 'arguments' itself as the one argument. - TIntermTyped* arg = function.getParamCount() == 1 - ? arguments->getAsTyped() - : (aggregate ? - aggregate->getSequence()[param]->getAsTyped() : - arguments->getAsTyped()); - if (*function[param].type != arg->getType()) { - // In-qualified arguments just need an extra node added above the argument to - // convert to the correct type. - TIntermTyped* convArg = intermediate.addConversion(EOpFunctionCall, *function[param].type, arg); - if (convArg != nullptr) - convArg = intermediate.addUniShapeConversion(EOpFunctionCall, *function[param].type, convArg); - if (convArg != nullptr) - setArg(param, convArg); - else - error(arg->getLoc(), "cannot convert input argument, argument", "", "%d", param); - } else { - if (wasFlattened(arg)) { - // If both formal and calling arg are to be flattened, leave that to argument - // expansion, not conversion. - if (!shouldFlatten(*function[param].type)) { - // Will make a two-level subtree. - // The deepest will copy member-by-member to build the structure to pass. - // The level above that will be a two-operand EOpComma sequence that follows the copy by the - // object itself. - TVariable* internalAggregate = makeInternalVariable("aggShadow", *function[param].type); - internalAggregate->getWritableType().getQualifier().makeTemporary(); - TIntermSymbol* internalSymbolNode = new TIntermSymbol(internalAggregate->getUniqueId(), - internalAggregate->getName(), - internalAggregate->getType()); - internalSymbolNode->setLoc(arg->getLoc()); - // This makes the deepest level, the member-wise copy - TIntermAggregate* assignAgg = handleAssign(arg->getLoc(), EOpAssign, - internalSymbolNode, arg)->getAsAggregate(); - - // Now, pair that with the resulting aggregate. - assignAgg = intermediate.growAggregate(assignAgg, internalSymbolNode, arg->getLoc()); - assignAgg->setOperator(EOpComma); - assignAgg->setType(internalAggregate->getType()); - setArg(param, assignAgg); - } - } - } - } -} - -// -// Add any needed implicit expansion of calling arguments from what the shader listed to what's -// internally needed for the AST (given the constraints downstream). -// -void HlslParseContext::expandArguments(const TSourceLoc& loc, const TFunction& function, TIntermTyped*& arguments) -{ - TIntermAggregate* aggregate = arguments->getAsAggregate(); - int functionParamNumberOffset = 0; - - // Replace a single argument with a single argument. - const auto setArg = [&](int paramNum, TIntermTyped* arg) { - if (function.getParamCount() + functionParamNumberOffset == 1) - arguments = arg; - else { - if (aggregate == nullptr) - arguments = arg; - else - aggregate->getSequence()[paramNum] = arg; - } - }; - - // Replace a single argument with a list of arguments - const auto setArgList = [&](int paramNum, const TVector& args) { - if (args.size() == 1) - setArg(paramNum, args.front()); - else if (args.size() > 1) { - if (function.getParamCount() + functionParamNumberOffset == 1) { - arguments = intermediate.makeAggregate(args.front()); - std::for_each(args.begin() + 1, args.end(), - [&](TIntermTyped* arg) { - arguments = intermediate.growAggregate(arguments, arg); - }); - } else { - auto it = aggregate->getSequence().erase(aggregate->getSequence().begin() + paramNum); - aggregate->getSequence().insert(it, args.begin(), args.end()); - } - functionParamNumberOffset += (int)(args.size() - 1); - } - }; - - // Process each argument's conversion - for (int param = 0; param < function.getParamCount(); ++param) { - // At this early point there is a slight ambiguity between whether an aggregate 'arguments' - // is the single argument itself or its children are the arguments. Only one argument - // means take 'arguments' itself as the one argument. - TIntermTyped* arg = function.getParamCount() == 1 - ? arguments->getAsTyped() - : (aggregate ? - aggregate->getSequence()[param + functionParamNumberOffset]->getAsTyped() : - arguments->getAsTyped()); - - if (wasFlattened(arg) && shouldFlatten(*function[param].type)) { - // Need to pass the structure members instead of the structure. - TVector memberArgs; - for (int memb = 0; memb < (int)arg->getType().getStruct()->size(); ++memb) - memberArgs.push_back(flattenAccess(arg, memb)); - setArgList(param + functionParamNumberOffset, memberArgs); - } - } - - // TODO: if we need both hidden counter args (below) and struct expansion (above) - // the two algorithms need to be merged: Each assumes the list starts out 1:1 between - // parameters and arguments. - - // If any argument is a pass-by-reference struct buffer with an associated counter - // buffer, we have to add another hidden parameter for that counter. - if (aggregate) - addStructBuffArguments(loc, aggregate); -} - -// -// Add any needed implicit output conversions for function-call arguments. This -// can require a new tree topology, complicated further by whether the function -// has a return value. -// -// Returns a node of a subtree that evaluates to the return value of the function. -// -TIntermTyped* HlslParseContext::addOutputArgumentConversions(const TFunction& function, TIntermOperator& intermNode) -{ - assert (intermNode.getAsAggregate() != nullptr || intermNode.getAsUnaryNode() != nullptr); - - const TSourceLoc& loc = intermNode.getLoc(); - - TIntermSequence argSequence; // temp sequence for unary node args - - if (intermNode.getAsUnaryNode()) - argSequence.push_back(intermNode.getAsUnaryNode()->getOperand()); - - TIntermSequence& arguments = argSequence.empty() ? intermNode.getAsAggregate()->getSequence() : argSequence; - - const auto needsConversion = [&](int argNum) { - return function[argNum].type->getQualifier().isParamOutput() && - (*function[argNum].type != arguments[argNum]->getAsTyped()->getType() || - shouldConvertLValue(arguments[argNum]) || - wasFlattened(arguments[argNum]->getAsTyped())); - }; - - // Will there be any output conversions? - bool outputConversions = false; - for (int i = 0; i < function.getParamCount(); ++i) { - if (needsConversion(i)) { - outputConversions = true; - break; - } - } - - if (! outputConversions) - return &intermNode; - - // Setup for the new tree, if needed: - // - // Output conversions need a different tree topology. - // Out-qualified arguments need a temporary of the correct type, with the call - // followed by an assignment of the temporary to the original argument: - // void: function(arg, ...) -> ( function(tempArg, ...), arg = tempArg, ...) - // ret = function(arg, ...) -> ret = (tempRet = function(tempArg, ...), arg = tempArg, ..., tempRet) - // Where the "tempArg" type needs no conversion as an argument, but will convert on assignment. - TIntermTyped* conversionTree = nullptr; - TVariable* tempRet = nullptr; - if (intermNode.getBasicType() != EbtVoid) { - // do the "tempRet = function(...), " bit from above - tempRet = makeInternalVariable("tempReturn", intermNode.getType()); - TIntermSymbol* tempRetNode = intermediate.addSymbol(*tempRet, loc); - conversionTree = intermediate.addAssign(EOpAssign, tempRetNode, &intermNode, loc); - } else - conversionTree = &intermNode; - - conversionTree = intermediate.makeAggregate(conversionTree); - - // Process each argument's conversion - for (int i = 0; i < function.getParamCount(); ++i) { - if (needsConversion(i)) { - // Out-qualified arguments needing conversion need to use the topology setup above. - // Do the " ...(tempArg, ...), arg = tempArg" bit from above. - - // Make a temporary for what the function expects the argument to look like. - TVariable* tempArg = makeInternalVariable("tempArg", *function[i].type); - tempArg->getWritableType().getQualifier().makeTemporary(); - TIntermSymbol* tempArgNode = intermediate.addSymbol(*tempArg, loc); - - // This makes the deepest level, the member-wise copy - TIntermTyped* tempAssign = handleAssign(arguments[i]->getLoc(), EOpAssign, arguments[i]->getAsTyped(), - tempArgNode); - tempAssign = handleLvalue(arguments[i]->getLoc(), "assign", tempAssign); - conversionTree = intermediate.growAggregate(conversionTree, tempAssign, arguments[i]->getLoc()); - - // replace the argument with another node for the same tempArg variable - arguments[i] = intermediate.addSymbol(*tempArg, loc); - } - } - - // Finalize the tree topology (see bigger comment above). - if (tempRet) { - // do the "..., tempRet" bit from above - TIntermSymbol* tempRetNode = intermediate.addSymbol(*tempRet, loc); - conversionTree = intermediate.growAggregate(conversionTree, tempRetNode, loc); - } - - conversionTree = intermediate.setAggregateOperator(conversionTree, EOpComma, intermNode.getType(), loc); - - return conversionTree; -} - -// -// Add any needed "hidden" counter buffer arguments for function calls. -// -// Modifies the 'aggregate' argument if needed. Otherwise, is no-op. -// -void HlslParseContext::addStructBuffArguments(const TSourceLoc& loc, TIntermAggregate*& aggregate) -{ - // See if there are any SB types with counters. - const bool hasStructBuffArg = - std::any_of(aggregate->getSequence().begin(), - aggregate->getSequence().end(), - [this](const TIntermNode* node) { - return (node->getAsTyped() != nullptr) && hasStructBuffCounter(node->getAsTyped()->getType()); - }); - - // Nothing to do, if we didn't find one. - if (! hasStructBuffArg) - return; - - TIntermSequence argsWithCounterBuffers; - - for (int param = 0; param < int(aggregate->getSequence().size()); ++param) { - argsWithCounterBuffers.push_back(aggregate->getSequence()[param]); - - if (hasStructBuffCounter(aggregate->getSequence()[param]->getAsTyped()->getType())) { - const TIntermSymbol* blockSym = aggregate->getSequence()[param]->getAsSymbolNode(); - if (blockSym != nullptr) { - TType counterType; - counterBufferType(loc, counterType); - - const TString counterBlockName(getStructBuffCounterName(blockSym->getName())); - - TVariable* variable = makeInternalVariable(counterBlockName, counterType); - - // Mark this buffer as requiring a counter block. TODO: there should be a better - // way to track it. - structBufferCounter[counterBlockName] = true; - - TIntermSymbol* sym = intermediate.addSymbol(*variable, loc); - argsWithCounterBuffers.push_back(sym); - } - } - } - - // Swap with the temp list we've built up. - aggregate->getSequence().swap(argsWithCounterBuffers); -} - - -// -// Do additional checking of built-in function calls that is not caught -// by normal semantic checks on argument type, extension tagging, etc. -// -// Assumes there has been a semantically correct match to a built-in function prototype. -// -void HlslParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCandidate, TIntermOperator& callNode) -{ - // Set up convenience accessors to the argument(s). There is almost always - // multiple arguments for the cases below, but when there might be one, - // check the unaryArg first. - const TIntermSequence* argp = nullptr; // confusing to use [] syntax on a pointer, so this is to help get a reference - const TIntermTyped* unaryArg = nullptr; - const TIntermTyped* arg0 = nullptr; - if (callNode.getAsAggregate()) { - argp = &callNode.getAsAggregate()->getSequence(); - if (argp->size() > 0) - arg0 = (*argp)[0]->getAsTyped(); - } else { - assert(callNode.getAsUnaryNode()); - unaryArg = callNode.getAsUnaryNode()->getOperand(); - arg0 = unaryArg; - } - const TIntermSequence& aggArgs = *argp; // only valid when unaryArg is nullptr - - switch (callNode.getOp()) { - case EOpTextureGather: - case EOpTextureGatherOffset: - case EOpTextureGatherOffsets: - { - // Figure out which variants are allowed by what extensions, - // and what arguments must be constant for which situations. - - TString featureString = fnCandidate.getName() + "(...)"; - const char* feature = featureString.c_str(); - int compArg = -1; // track which argument, if any, is the constant component argument - switch (callNode.getOp()) { - case EOpTextureGather: - // More than two arguments needs gpu_shader5, and rectangular or shadow needs gpu_shader5, - // otherwise, need GL_ARB_texture_gather. - if (fnCandidate.getParamCount() > 2 || fnCandidate[0].type->getSampler().dim == EsdRect || - fnCandidate[0].type->getSampler().shadow) { - if (! fnCandidate[0].type->getSampler().shadow) - compArg = 2; - } - break; - case EOpTextureGatherOffset: - // GL_ARB_texture_gather is good enough for 2D non-shadow textures with no component argument - if (! fnCandidate[0].type->getSampler().shadow) - compArg = 3; - break; - case EOpTextureGatherOffsets: - if (! fnCandidate[0].type->getSampler().shadow) - compArg = 3; - break; - default: - break; - } - - if (compArg > 0 && compArg < fnCandidate.getParamCount()) { - if (aggArgs[compArg]->getAsConstantUnion()) { - int value = aggArgs[compArg]->getAsConstantUnion()->getConstArray()[0].getIConst(); - if (value < 0 || value > 3) - error(loc, "must be 0, 1, 2, or 3:", feature, "component argument"); - } else - error(loc, "must be a compile-time constant:", feature, "component argument"); - } - - break; - } - - case EOpTextureOffset: - case EOpTextureFetchOffset: - case EOpTextureProjOffset: - case EOpTextureLodOffset: - case EOpTextureProjLodOffset: - case EOpTextureGradOffset: - case EOpTextureProjGradOffset: - { - // Handle texture-offset limits checking - // Pick which argument has to hold constant offsets - int arg = -1; - switch (callNode.getOp()) { - case EOpTextureOffset: arg = 2; break; - case EOpTextureFetchOffset: arg = (arg0->getType().getSampler().dim != EsdRect) ? 3 : 2; break; - case EOpTextureProjOffset: arg = 2; break; - case EOpTextureLodOffset: arg = 3; break; - case EOpTextureProjLodOffset: arg = 3; break; - case EOpTextureGradOffset: arg = 4; break; - case EOpTextureProjGradOffset: arg = 4; break; - default: - assert(0); - break; - } - - if (arg > 0) { - if (aggArgs[arg]->getAsConstantUnion() == nullptr) - error(loc, "argument must be compile-time constant", "texel offset", ""); - else { - const TType& type = aggArgs[arg]->getAsTyped()->getType(); - for (int c = 0; c < type.getVectorSize(); ++c) { - int offset = aggArgs[arg]->getAsConstantUnion()->getConstArray()[c].getIConst(); - if (offset > resources.maxProgramTexelOffset || offset < resources.minProgramTexelOffset) - error(loc, "value is out of range:", "texel offset", - "[gl_MinProgramTexelOffset, gl_MaxProgramTexelOffset]"); - } - } - } - - break; - } - - case EOpTextureQuerySamples: - case EOpImageQuerySamples: - break; - - case EOpImageAtomicAdd: - case EOpImageAtomicMin: - case EOpImageAtomicMax: - case EOpImageAtomicAnd: - case EOpImageAtomicOr: - case EOpImageAtomicXor: - case EOpImageAtomicExchange: - case EOpImageAtomicCompSwap: - break; - - case EOpInterpolateAtCentroid: - case EOpInterpolateAtSample: - case EOpInterpolateAtOffset: - // Make sure the first argument is an interpolant, or an array element of an interpolant - if (arg0->getType().getQualifier().storage != EvqVaryingIn) { - // It might still be an array element. - // - // We could check more, but the semantics of the first argument are already met; the - // only way to turn an array into a float/vec* is array dereference and swizzle. - // - // ES and desktop 4.3 and earlier: swizzles may not be used - // desktop 4.4 and later: swizzles may be used - const TIntermTyped* base = TIntermediate::findLValueBase(arg0, true); - if (base == nullptr || base->getType().getQualifier().storage != EvqVaryingIn) - error(loc, "first argument must be an interpolant, or interpolant-array element", - fnCandidate.getName().c_str(), ""); - } - break; - - default: - break; - } -} - -// -// Handle seeing something in a grammar production that can be done by calling -// a constructor. -// -// The constructor still must be "handled" by handleFunctionCall(), which will -// then call handleConstructor(). -// -TFunction* HlslParseContext::makeConstructorCall(const TSourceLoc& loc, const TType& type) -{ - TOperator op = intermediate.mapTypeToConstructorOp(type); - - if (op == EOpNull) { - error(loc, "cannot construct this type", type.getBasicString(), ""); - return nullptr; - } - - TString empty(""); - - return new TFunction(&empty, type, op); -} - -// -// Handle seeing a "COLON semantic" at the end of a type declaration, -// by updating the type according to the semantic. -// -void HlslParseContext::handleSemantic(TSourceLoc loc, TQualifier& qualifier, TBuiltInVariable builtIn, - const TString& upperCase) -{ - // Parse and return semantic number. If limit is 0, it will be ignored. Otherwise, if the parsed - // semantic number is >= limit, errorMsg is issued and 0 is returned. - // TODO: it would be nicer if limit and errorMsg had default parameters, but some compilers don't yet - // accept those in lambda functions. - const auto getSemanticNumber = [this, loc](const TString& semantic, unsigned int limit, const char* errorMsg) -> unsigned int { - size_t pos = semantic.find_last_not_of("0123456789"); - if (pos == std::string::npos) - return 0u; - - unsigned int semanticNum = (unsigned int)atoi(semantic.c_str() + pos + 1); - - if (limit != 0 && semanticNum >= limit) { - error(loc, errorMsg, semantic.c_str(), ""); - return 0u; - } - - return semanticNum; - }; - - switch(builtIn) { - case EbvNone: - // Get location numbers from fragment outputs, instead of - // auto-assigning them. - if (language == EShLangFragment && upperCase.compare(0, 9, "SV_TARGET") == 0) { - qualifier.layoutLocation = getSemanticNumber(upperCase, 0, nullptr); - nextOutLocation = std::max(nextOutLocation, qualifier.layoutLocation + 1u); - } else if (upperCase.compare(0, 15, "SV_CLIPDISTANCE") == 0) { - builtIn = EbvClipDistance; - qualifier.layoutLocation = getSemanticNumber(upperCase, maxClipCullRegs, "invalid clip semantic"); - } else if (upperCase.compare(0, 15, "SV_CULLDISTANCE") == 0) { - builtIn = EbvCullDistance; - qualifier.layoutLocation = getSemanticNumber(upperCase, maxClipCullRegs, "invalid cull semantic"); - } - break; - case EbvPosition: - // adjust for stage in/out - if (language == EShLangFragment) - builtIn = EbvFragCoord; - break; - case EbvFragStencilRef: - error(loc, "unimplemented; need ARB_shader_stencil_export", "SV_STENCILREF", ""); - break; - case EbvTessLevelInner: - case EbvTessLevelOuter: - qualifier.patch = true; - break; - default: - break; - } - - qualifier.builtIn = builtIn; - qualifier.semanticName = intermediate.addSemanticName(upperCase); -} - -// -// Handle seeing something like "PACKOFFSET LEFT_PAREN c[Subcomponent][.component] RIGHT_PAREN" -// -// 'location' has the "c[Subcomponent]" part. -// 'component' points to the "component" part, or nullptr if not present. -// -void HlslParseContext::handlePackOffset(const TSourceLoc& loc, TQualifier& qualifier, const glslang::TString& location, - const glslang::TString* component) -{ - if (location.size() == 0 || location[0] != 'c') { - error(loc, "expected 'c'", "packoffset", ""); - return; - } - if (location.size() == 1) - return; - if (! isdigit(location[1])) { - error(loc, "expected number after 'c'", "packoffset", ""); - return; - } - - qualifier.layoutOffset = 16 * atoi(location.substr(1, location.size()).c_str()); - if (component != nullptr) { - int componentOffset = 0; - switch ((*component)[0]) { - case 'x': componentOffset = 0; break; - case 'y': componentOffset = 4; break; - case 'z': componentOffset = 8; break; - case 'w': componentOffset = 12; break; - default: - componentOffset = -1; - break; - } - if (componentOffset < 0 || component->size() > 1) { - error(loc, "expected {x, y, z, w} for component", "packoffset", ""); - return; - } - qualifier.layoutOffset += componentOffset; - } -} - -// -// Handle seeing something like "REGISTER LEFT_PAREN [shader_profile,] Type# RIGHT_PAREN" -// -// 'profile' points to the shader_profile part, or nullptr if not present. -// 'desc' is the type# part. -// -void HlslParseContext::handleRegister(const TSourceLoc& loc, TQualifier& qualifier, const glslang::TString* profile, - const glslang::TString& desc, int subComponent, const glslang::TString* spaceDesc) -{ - if (profile != nullptr) - warn(loc, "ignoring shader_profile", "register", ""); - - if (desc.size() < 1) { - error(loc, "expected register type", "register", ""); - return; - } - - int regNumber = 0; - if (desc.size() > 1) { - if (isdigit(desc[1])) - regNumber = atoi(desc.substr(1, desc.size()).c_str()); - else { - error(loc, "expected register number after register type", "register", ""); - return; - } - } - - // TODO: learn what all these really mean and how they interact with regNumber and subComponent - const std::vector& resourceInfo = intermediate.getResourceSetBinding(); - switch (std::tolower(desc[0])) { - case 'b': - case 't': - case 'c': - case 's': - case 'u': - qualifier.layoutBinding = regNumber + subComponent; - - // This handles per-register layout sets numbers. For the global mode which sets - // every symbol to the same value, see setLinkageLayoutSets(). - if ((resourceInfo.size() % 3) == 0) { - // Apply per-symbol resource set and binding. - for (auto it = resourceInfo.cbegin(); it != resourceInfo.cend(); it = it + 3) { - if (strcmp(desc.c_str(), it[0].c_str()) == 0) { - qualifier.layoutSet = atoi(it[1].c_str()); - qualifier.layoutBinding = atoi(it[2].c_str()) + subComponent; - break; - } - } - } - break; - default: - warn(loc, "ignoring unrecognized register type", "register", "%c", desc[0]); - break; - } - - // space - unsigned int setNumber; - const auto crackSpace = [&]() -> bool { - const int spaceLen = 5; - if (spaceDesc->size() < spaceLen + 1) - return false; - if (spaceDesc->compare(0, spaceLen, "space") != 0) - return false; - if (! isdigit((*spaceDesc)[spaceLen])) - return false; - setNumber = atoi(spaceDesc->substr(spaceLen, spaceDesc->size()).c_str()); - return true; - }; - - if (spaceDesc) { - if (! crackSpace()) { - error(loc, "expected spaceN", "register", ""); - return; - } - qualifier.layoutSet = setNumber; - } -} - -// Convert to a scalar boolean, or if not allowed by HLSL semantics, -// report an error and return nullptr. -TIntermTyped* HlslParseContext::convertConditionalExpression(const TSourceLoc& loc, TIntermTyped* condition, - bool mustBeScalar) -{ - if (mustBeScalar && !condition->getType().isScalarOrVec1()) { - error(loc, "requires a scalar", "conditional expression", ""); - return nullptr; - } - - return intermediate.addConversion(EOpConstructBool, TType(EbtBool, EvqTemporary, condition->getVectorSize()), - condition); -} - -// -// Same error message for all places assignments don't work. -// -void HlslParseContext::assignError(const TSourceLoc& loc, const char* op, TString left, TString right) -{ - error(loc, "", op, "cannot convert from '%s' to '%s'", - right.c_str(), left.c_str()); -} - -// -// Same error message for all places unary operations don't work. -// -void HlslParseContext::unaryOpError(const TSourceLoc& loc, const char* op, TString operand) -{ - error(loc, " wrong operand type", op, - "no operation '%s' exists that takes an operand of type %s (or there is no acceptable conversion)", - op, operand.c_str()); -} - -// -// Same error message for all binary operations don't work. -// -void HlslParseContext::binaryOpError(const TSourceLoc& loc, const char* op, TString left, TString right) -{ - error(loc, " wrong operand types:", op, - "no operation '%s' exists that takes a left-hand operand of type '%s' and " - "a right operand of type '%s' (or there is no acceptable conversion)", - op, left.c_str(), right.c_str()); -} - -// -// A basic type of EbtVoid is a key that the name string was seen in the source, but -// it was not found as a variable in the symbol table. If so, give the error -// message and insert a dummy variable in the symbol table to prevent future errors. -// -void HlslParseContext::variableCheck(TIntermTyped*& nodePtr) -{ - TIntermSymbol* symbol = nodePtr->getAsSymbolNode(); - if (! symbol) - return; - - if (symbol->getType().getBasicType() == EbtVoid) { - error(symbol->getLoc(), "undeclared identifier", symbol->getName().c_str(), ""); - - // Add to symbol table to prevent future error messages on the same name - if (symbol->getName().size() > 0) { - TVariable* fakeVariable = new TVariable(&symbol->getName(), TType(EbtFloat)); - symbolTable.insert(*fakeVariable); - - // substitute a symbol node for this new variable - nodePtr = intermediate.addSymbol(*fakeVariable, symbol->getLoc()); - } - } -} - -// -// Both test, and if necessary spit out an error, to see if the node is really -// a constant. -// -void HlslParseContext::constantValueCheck(TIntermTyped* node, const char* token) -{ - if (node->getQualifier().storage != EvqConst) - error(node->getLoc(), "constant expression required", token, ""); -} - -// -// Both test, and if necessary spit out an error, to see if the node is really -// an integer. -// -void HlslParseContext::integerCheck(const TIntermTyped* node, const char* token) -{ - if ((node->getBasicType() == EbtInt || node->getBasicType() == EbtUint) && node->isScalar()) - return; - - error(node->getLoc(), "scalar integer expression required", token, ""); -} - -// -// Both test, and if necessary spit out an error, to see if we are currently -// globally scoped. -// -void HlslParseContext::globalCheck(const TSourceLoc& loc, const char* token) -{ - if (! symbolTable.atGlobalLevel()) - error(loc, "not allowed in nested scope", token, ""); -} - -bool HlslParseContext::builtInName(const TString& /*identifier*/) -{ - return false; -} - -// -// Make sure there is enough data and not too many arguments provided to the -// constructor to build something of the type of the constructor. Also returns -// the type of the constructor. -// -// Returns true if there was an error in construction. -// -bool HlslParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, TFunction& function, - TOperator op, TType& type) -{ - type.shallowCopy(function.getType()); - - bool constructingMatrix = false; - switch (op) { - case EOpConstructTextureSampler: - return constructorTextureSamplerError(loc, function); - case EOpConstructMat2x2: - case EOpConstructMat2x3: - case EOpConstructMat2x4: - case EOpConstructMat3x2: - case EOpConstructMat3x3: - case EOpConstructMat3x4: - case EOpConstructMat4x2: - case EOpConstructMat4x3: - case EOpConstructMat4x4: - case EOpConstructDMat2x2: - case EOpConstructDMat2x3: - case EOpConstructDMat2x4: - case EOpConstructDMat3x2: - case EOpConstructDMat3x3: - case EOpConstructDMat3x4: - case EOpConstructDMat4x2: - case EOpConstructDMat4x3: - case EOpConstructDMat4x4: - case EOpConstructIMat2x2: - case EOpConstructIMat2x3: - case EOpConstructIMat2x4: - case EOpConstructIMat3x2: - case EOpConstructIMat3x3: - case EOpConstructIMat3x4: - case EOpConstructIMat4x2: - case EOpConstructIMat4x3: - case EOpConstructIMat4x4: - case EOpConstructUMat2x2: - case EOpConstructUMat2x3: - case EOpConstructUMat2x4: - case EOpConstructUMat3x2: - case EOpConstructUMat3x3: - case EOpConstructUMat3x4: - case EOpConstructUMat4x2: - case EOpConstructUMat4x3: - case EOpConstructUMat4x4: - case EOpConstructBMat2x2: - case EOpConstructBMat2x3: - case EOpConstructBMat2x4: - case EOpConstructBMat3x2: - case EOpConstructBMat3x3: - case EOpConstructBMat3x4: - case EOpConstructBMat4x2: - case EOpConstructBMat4x3: - case EOpConstructBMat4x4: - constructingMatrix = true; - break; - default: - break; - } - - // - // Walk the arguments for first-pass checks and collection of information. - // - - int size = 0; - bool constType = true; - bool full = false; - bool overFull = false; - bool matrixInMatrix = false; - bool arrayArg = false; - for (int arg = 0; arg < function.getParamCount(); ++arg) { - if (function[arg].type->isArray()) { - if (! function[arg].type->isExplicitlySizedArray()) { - // Can't construct from an unsized array. - error(loc, "array argument must be sized", "constructor", ""); - return true; - } - arrayArg = true; - } - if (constructingMatrix && function[arg].type->isMatrix()) - matrixInMatrix = true; - - // 'full' will go to true when enough args have been seen. If we loop - // again, there is an extra argument. - if (full) { - // For vectors and matrices, it's okay to have too many components - // available, but not okay to have unused arguments. - overFull = true; - } - - size += function[arg].type->computeNumComponents(); - if (op != EOpConstructStruct && ! type.isArray() && size >= type.computeNumComponents()) - full = true; - - if (function[arg].type->getQualifier().storage != EvqConst) - constType = false; - } - - if (constType) - type.getQualifier().storage = EvqConst; - - if (type.isArray()) { - if (function.getParamCount() == 0) { - error(loc, "array constructor must have at least one argument", "constructor", ""); - return true; - } - - if (type.isImplicitlySizedArray()) { - // auto adapt the constructor type to the number of arguments - type.changeOuterArraySize(function.getParamCount()); - } else if (type.getOuterArraySize() != function.getParamCount() && - type.computeNumComponents() > size) { - error(loc, "array constructor needs one argument per array element", "constructor", ""); - return true; - } - - if (type.isArrayOfArrays()) { - // Types have to match, but we're still making the type. - // Finish making the type, and the comparison is done later - // when checking for conversion. - TArraySizes& arraySizes = type.getArraySizes(); - - // At least the dimensionalities have to match. - if (! function[0].type->isArray() || - arraySizes.getNumDims() != function[0].type->getArraySizes().getNumDims() + 1) { - error(loc, "array constructor argument not correct type to construct array element", "constructor", ""); - return true; - } - - if (arraySizes.isInnerImplicit()) { - // "Arrays of arrays ..., and the size for any dimension is optional" - // That means we need to adopt (from the first argument) the other array sizes into the type. - for (int d = 1; d < arraySizes.getNumDims(); ++d) { - if (arraySizes.getDimSize(d) == UnsizedArraySize) { - arraySizes.setDimSize(d, function[0].type->getArraySizes().getDimSize(d - 1)); - } - } - } - } - } - - // Some array -> array type casts are okay - if (arrayArg && function.getParamCount() == 1 && op != EOpConstructStruct && type.isArray() && - !type.isArrayOfArrays() && !function[0].type->isArrayOfArrays() && - type.getVectorSize() >= 1 && function[0].type->getVectorSize() >= 1) - return false; - - if (arrayArg && op != EOpConstructStruct && ! type.isArrayOfArrays()) { - error(loc, "constructing non-array constituent from array argument", "constructor", ""); - return true; - } - - if (matrixInMatrix && ! type.isArray()) { - return false; - } - - if (overFull) { - error(loc, "too many arguments", "constructor", ""); - return true; - } - - if (op == EOpConstructStruct && ! type.isArray() && isScalarConstructor(node)) - return false; - - if (op == EOpConstructStruct && ! type.isArray() && (int)type.getStruct()->size() != function.getParamCount()) { - error(loc, "Number of constructor parameters does not match the number of structure fields", "constructor", ""); - return true; - } - - if ((op != EOpConstructStruct && size != 1 && size < type.computeNumComponents()) || - (op == EOpConstructStruct && size < type.computeNumComponents())) { - error(loc, "not enough data provided for construction", "constructor", ""); - return true; - } - - return false; -} - -// See if 'node', in the context of constructing aggregates, is a scalar argument -// to a constructor. -// -bool HlslParseContext::isScalarConstructor(const TIntermNode* node) -{ - // Obviously, it must be a scalar, but an aggregate node might not be fully - // completed yet: holding a sequence of initializers under an aggregate - // would not yet be typed, so don't check it's type. This corresponds to - // the aggregate operator also not being set yet. (An aggregate operation - // that legitimately yields a scalar will have a getOp() of that operator, - // not EOpNull.) - - return node->getAsTyped() != nullptr && - node->getAsTyped()->isScalar() && - (node->getAsAggregate() == nullptr || node->getAsAggregate()->getOp() != EOpNull); -} - -// Verify all the correct semantics for constructing a combined texture/sampler. -// Return true if the semantics are incorrect. -bool HlslParseContext::constructorTextureSamplerError(const TSourceLoc& loc, const TFunction& function) -{ - TString constructorName = function.getType().getBasicTypeString(); // TODO: performance: should not be making copy; interface needs to change - const char* token = constructorName.c_str(); - - // exactly two arguments needed - if (function.getParamCount() != 2) { - error(loc, "sampler-constructor requires two arguments", token, ""); - return true; - } - - // For now, not allowing arrayed constructors, the rest of this function - // is set up to allow them, if this test is removed: - if (function.getType().isArray()) { - error(loc, "sampler-constructor cannot make an array of samplers", token, ""); - return true; - } - - // first argument - // * the constructor's first argument must be a texture type - // * the dimensionality (1D, 2D, 3D, Cube, Rect, Buffer, MS, and Array) - // of the texture type must match that of the constructed sampler type - // (that is, the suffixes of the type of the first argument and the - // type of the constructor will be spelled the same way) - if (function[0].type->getBasicType() != EbtSampler || - ! function[0].type->getSampler().isTexture() || - function[0].type->isArray()) { - error(loc, "sampler-constructor first argument must be a scalar textureXXX type", token, ""); - return true; - } - // simulate the first argument's impact on the result type, so it can be compared with the encapsulated operator!=() - TSampler texture = function.getType().getSampler(); - texture.combined = false; - texture.shadow = false; - if (texture != function[0].type->getSampler()) { - error(loc, "sampler-constructor first argument must match type and dimensionality of constructor type", token, ""); - return true; - } - - // second argument - // * the constructor's second argument must be a scalar of type - // *sampler* or *samplerShadow* - // * the presence or absence of depth comparison (Shadow) must match - // between the constructed sampler type and the type of the second argument - if (function[1].type->getBasicType() != EbtSampler || - ! function[1].type->getSampler().isPureSampler() || - function[1].type->isArray()) { - error(loc, "sampler-constructor second argument must be a scalar type 'sampler'", token, ""); - return true; - } - if (function.getType().getSampler().shadow != function[1].type->getSampler().shadow) { - error(loc, "sampler-constructor second argument presence of shadow must match constructor presence of shadow", - token, ""); - return true; - } - - return false; -} - -// Checks to see if a void variable has been declared and raise an error message for such a case -// -// returns true in case of an error -// -bool HlslParseContext::voidErrorCheck(const TSourceLoc& loc, const TString& identifier, const TBasicType basicType) -{ - if (basicType == EbtVoid) { - error(loc, "illegal use of type 'void'", identifier.c_str(), ""); - return true; - } - - return false; -} - -// -// Fix just a full qualifier (no variables or types yet, but qualifier is complete) at global level. -// -void HlslParseContext::globalQualifierFix(const TSourceLoc&, TQualifier& qualifier) -{ - // move from parameter/unknown qualifiers to pipeline in/out qualifiers - switch (qualifier.storage) { - case EvqIn: - qualifier.storage = EvqVaryingIn; - break; - case EvqOut: - qualifier.storage = EvqVaryingOut; - break; - default: - break; - } -} - -// -// Merge characteristics of the 'src' qualifier into the 'dst'. -// If there is duplication, issue error messages, unless 'force' -// is specified, which means to just override default settings. -// -// Also, when force is false, it will be assumed that 'src' follows -// 'dst', for the purpose of error checking order for versions -// that require specific orderings of qualifiers. -// -void HlslParseContext::mergeQualifiers(TQualifier& dst, const TQualifier& src) -{ - // Storage qualification - if (dst.storage == EvqTemporary || dst.storage == EvqGlobal) - dst.storage = src.storage; - else if ((dst.storage == EvqIn && src.storage == EvqOut) || - (dst.storage == EvqOut && src.storage == EvqIn)) - dst.storage = EvqInOut; - else if ((dst.storage == EvqIn && src.storage == EvqConst) || - (dst.storage == EvqConst && src.storage == EvqIn)) - dst.storage = EvqConstReadOnly; - - // Layout qualifiers - mergeObjectLayoutQualifiers(dst, src, false); - - // individual qualifiers - bool repeated = false; -#define MERGE_SINGLETON(field) repeated |= dst.field && src.field; dst.field |= src.field; - MERGE_SINGLETON(invariant); - MERGE_SINGLETON(noContraction); - MERGE_SINGLETON(centroid); - MERGE_SINGLETON(smooth); - MERGE_SINGLETON(flat); - MERGE_SINGLETON(nopersp); - MERGE_SINGLETON(patch); - MERGE_SINGLETON(sample); - MERGE_SINGLETON(coherent); - MERGE_SINGLETON(volatil); - MERGE_SINGLETON(restrict); - MERGE_SINGLETON(readonly); - MERGE_SINGLETON(writeonly); - MERGE_SINGLETON(specConstant); -} - -// used to flatten the sampler type space into a single dimension -// correlates with the declaration of defaultSamplerPrecision[] -int HlslParseContext::computeSamplerTypeIndex(TSampler& sampler) -{ - int arrayIndex = sampler.arrayed ? 1 : 0; - int shadowIndex = sampler.shadow ? 1 : 0; - int externalIndex = sampler.external ? 1 : 0; - - return EsdNumDims * - (EbtNumTypes * (2 * (2 * arrayIndex + shadowIndex) + externalIndex) + sampler.type) + sampler.dim; -} - -// -// Do size checking for an array type's size. -// -void HlslParseContext::arraySizeCheck(const TSourceLoc& loc, TIntermTyped* expr, TArraySize& sizePair) -{ - bool isConst = false; - sizePair.size = 1; - sizePair.node = nullptr; - - TIntermConstantUnion* constant = expr->getAsConstantUnion(); - if (constant) { - // handle true (non-specialization) constant - sizePair.size = constant->getConstArray()[0].getIConst(); - isConst = true; - } else { - // see if it's a specialization constant instead - if (expr->getQualifier().isSpecConstant()) { - isConst = true; - sizePair.node = expr; - TIntermSymbol* symbol = expr->getAsSymbolNode(); - if (symbol && symbol->getConstArray().size() > 0) - sizePair.size = symbol->getConstArray()[0].getIConst(); - } - } - - if (! isConst || (expr->getBasicType() != EbtInt && expr->getBasicType() != EbtUint)) { - error(loc, "array size must be a constant integer expression", "", ""); - return; - } - - if (sizePair.size <= 0) { - error(loc, "array size must be a positive integer", "", ""); - return; - } -} - -// -// Require array to be completely sized -// -void HlslParseContext::arraySizeRequiredCheck(const TSourceLoc& loc, const TArraySizes& arraySizes) -{ - if (arraySizes.isImplicit()) - error(loc, "array size required", "", ""); -} - -void HlslParseContext::structArrayCheck(const TSourceLoc& /*loc*/, const TType& type) -{ - const TTypeList& structure = *type.getStruct(); - for (int m = 0; m < (int)structure.size(); ++m) { - const TType& member = *structure[m].type; - if (member.isArray()) - arraySizeRequiredCheck(structure[m].loc, *member.getArraySizes()); - } -} - -// Merge array dimensions listed in 'sizes' onto the type's array dimensions. -// -// From the spec: "vec4[2] a[3]; // size-3 array of size-2 array of vec4" -// -// That means, the 'sizes' go in front of the 'type' as outermost sizes. -// 'type' is the type part of the declaration (to the left) -// 'sizes' is the arrayness tagged on the identifier (to the right) -// -void HlslParseContext::arrayDimMerge(TType& type, const TArraySizes* sizes) -{ - if (sizes) - type.addArrayOuterSizes(*sizes); -} - -// -// Do all the semantic checking for declaring or redeclaring an array, with and -// without a size, and make the right changes to the symbol table. -// -void HlslParseContext::declareArray(const TSourceLoc& loc, const TString& identifier, const TType& type, - TSymbol*& symbol, bool track) -{ - if (symbol == nullptr) { - bool currentScope; - symbol = symbolTable.find(identifier, nullptr, ¤tScope); - - if (symbol && builtInName(identifier) && ! symbolTable.atBuiltInLevel()) { - // bad shader (errors already reported) trying to redeclare a built-in name as an array - return; - } - if (symbol == nullptr || ! currentScope) { - // - // Successfully process a new definition. - // (Redeclarations have to take place at the same scope; otherwise they are hiding declarations) - // - symbol = new TVariable(&identifier, type); - symbolTable.insert(*symbol); - if (track && symbolTable.atGlobalLevel()) - trackLinkage(*symbol); - - return; - } - if (symbol->getAsAnonMember()) { - error(loc, "cannot redeclare a user-block member array", identifier.c_str(), ""); - symbol = nullptr; - return; - } - } - - // - // Process a redeclaration. - // - - if (symbol == nullptr) { - error(loc, "array variable name expected", identifier.c_str(), ""); - return; - } - - // redeclareBuiltinVariable() should have already done the copyUp() - TType& existingType = symbol->getWritableType(); - - if (existingType.isExplicitlySizedArray()) { - // be more lenient for input arrays to geometry shaders and tessellation control outputs, - // where the redeclaration is the same size - return; - } - - existingType.updateArraySizes(type); -} - -void HlslParseContext::updateImplicitArraySize(const TSourceLoc& loc, TIntermNode *node, int index) -{ - // maybe there is nothing to do... - TIntermTyped* typedNode = node->getAsTyped(); - if (typedNode->getType().getImplicitArraySize() > index) - return; - - // something to do... - - // Figure out what symbol to lookup, as we will use its type to edit for the size change, - // as that type will be shared through shallow copies for future references. - TSymbol* symbol = nullptr; - int blockIndex = -1; - const TString* lookupName = nullptr; - if (node->getAsSymbolNode()) - lookupName = &node->getAsSymbolNode()->getName(); - else if (node->getAsBinaryNode()) { - const TIntermBinary* deref = node->getAsBinaryNode(); - // This has to be the result of a block dereference, unless it's bad shader code - // If it's a uniform block, then an error will be issued elsewhere, but - // return early now to avoid crashing later in this function. - if (! deref->getLeft()->getAsSymbolNode() || deref->getLeft()->getBasicType() != EbtBlock || - deref->getLeft()->getType().getQualifier().storage == EvqUniform || - deref->getRight()->getAsConstantUnion() == nullptr) - return; - - blockIndex = deref->getRight()->getAsConstantUnion()->getConstArray()[0].getIConst(); - - lookupName = &deref->getLeft()->getAsSymbolNode()->getName(); - if (IsAnonymous(*lookupName)) - lookupName = &(*deref->getLeft()->getType().getStruct())[blockIndex].type->getFieldName(); - } - - // Lookup the symbol, should only fail if shader code is incorrect - symbol = symbolTable.find(*lookupName); - if (symbol == nullptr) - return; - - if (symbol->getAsFunction()) { - error(loc, "array variable name expected", symbol->getName().c_str(), ""); - return; - } - - symbol->getWritableType().setImplicitArraySize(index + 1); -} - -// -// Enforce non-initializer type/qualifier rules. -// -void HlslParseContext::fixConstInit(const TSourceLoc& loc, const TString& identifier, TType& type, - TIntermTyped*& initializer) -{ - // - // Make the qualifier make sense, given that there is an initializer. - // - if (initializer == nullptr) { - if (type.getQualifier().storage == EvqConst || - type.getQualifier().storage == EvqConstReadOnly) { - initializer = intermediate.makeAggregate(loc); - warn(loc, "variable with qualifier 'const' not initialized; zero initializing", identifier.c_str(), ""); - } - } -} - -// -// See if the identifier is a built-in symbol that can be redeclared, and if so, -// copy the symbol table's read-only built-in variable to the current -// global level, where it can be modified based on the passed in type. -// -// Returns nullptr if no redeclaration took place; meaning a normal declaration still -// needs to occur for it, not necessarily an error. -// -// Returns a redeclared and type-modified variable if a redeclared occurred. -// -TSymbol* HlslParseContext::redeclareBuiltinVariable(const TSourceLoc& /*loc*/, const TString& identifier, - const TQualifier& /*qualifier*/, - const TShaderQualifiers& /*publicType*/) -{ - if (! builtInName(identifier) || symbolTable.atBuiltInLevel() || ! symbolTable.atGlobalLevel()) - return nullptr; - - return nullptr; -} - -// -// Generate index to the array element in a structure buffer (SSBO) -// -TIntermTyped* HlslParseContext::indexStructBufferContent(const TSourceLoc& loc, TIntermTyped* buffer) const -{ - // Bail out if not a struct buffer - if (buffer == nullptr || ! isStructBufferType(buffer->getType())) - return nullptr; - - // Runtime sized array is always the last element. - const TTypeList* bufferStruct = buffer->getType().getStruct(); - TIntermTyped* arrayPosition = intermediate.addConstantUnion(unsigned(bufferStruct->size()-1), loc); - - TIntermTyped* argArray = intermediate.addIndex(EOpIndexDirectStruct, buffer, arrayPosition, loc); - argArray->setType(*(*bufferStruct)[bufferStruct->size()-1].type); - - return argArray; -} - -// -// IFF type is a structuredbuffer/byteaddressbuffer type, return the content -// (template) type. E.g, StructuredBuffer -> MyType. Else return nullptr. -// -TType* HlslParseContext::getStructBufferContentType(const TType& type) const -{ - if (type.getBasicType() != EbtBlock) - return nullptr; - - const int memberCount = (int)type.getStruct()->size(); - assert(memberCount > 0); - - TType* contentType = (*type.getStruct())[memberCount-1].type; - - return contentType->isRuntimeSizedArray() ? contentType : nullptr; -} - -// -// If an existing struct buffer has a sharable type, then share it. -// -void HlslParseContext::shareStructBufferType(TType& type) -{ - // PackOffset must be equivalent to share types on a per-member basis. - // Note: cannot use auto type due to recursion. Thus, this is a std::function. - const std::function - compareQualifiers = [&](TType& lhs, TType& rhs) -> bool { - if (lhs.getQualifier().layoutOffset != rhs.getQualifier().layoutOffset) - return false; - - if (lhs.isStruct() != rhs.isStruct()) - return false; - - if (lhs.isStruct() && rhs.isStruct()) { - if (lhs.getStruct()->size() != rhs.getStruct()->size()) - return false; - - for (int i = 0; i < int(lhs.getStruct()->size()); ++i) - if (!compareQualifiers(*(*lhs.getStruct())[i].type, *(*rhs.getStruct())[i].type)) - return false; - } - - return true; - }; - - // We need to compare certain qualifiers in addition to the type. - const auto typeEqual = [compareQualifiers](TType& lhs, TType& rhs) -> bool { - if (lhs.getQualifier().readonly != rhs.getQualifier().readonly) - return false; - - // If both are structures, recursively look for packOffset equality - // as well as type equality. - return compareQualifiers(lhs, rhs) && lhs == rhs; - }; - - // This is an exhaustive O(N) search, but real world shaders have - // only a small number of these. - for (int idx = 0; idx < int(structBufferTypes.size()); ++idx) { - // If the deep structure matches, modulo qualifiers, use it - if (typeEqual(*structBufferTypes[idx], type)) { - type.shallowCopy(*structBufferTypes[idx]); - return; - } - } - - // Otherwise, remember it: - TType* typeCopy = new TType; - typeCopy->shallowCopy(type); - structBufferTypes.push_back(typeCopy); -} - -void HlslParseContext::paramFix(TType& type) -{ - switch (type.getQualifier().storage) { - case EvqConst: - type.getQualifier().storage = EvqConstReadOnly; - break; - case EvqGlobal: - case EvqUniform: - case EvqTemporary: - type.getQualifier().storage = EvqIn; - break; - case EvqBuffer: - { - // SSBO parameter. These do not go through the declareBlock path since they are fn parameters. - correctUniform(type.getQualifier()); - TQualifier bufferQualifier = globalBufferDefaults; - mergeObjectLayoutQualifiers(bufferQualifier, type.getQualifier(), true); - bufferQualifier.storage = type.getQualifier().storage; - bufferQualifier.readonly = type.getQualifier().readonly; - bufferQualifier.coherent = type.getQualifier().coherent; - bufferQualifier.declaredBuiltIn = type.getQualifier().declaredBuiltIn; - type.getQualifier() = bufferQualifier; - break; - } - default: - break; - } -} - -void HlslParseContext::specializationCheck(const TSourceLoc& loc, const TType& type, const char* op) -{ - if (type.containsSpecializationSize()) - error(loc, "can't use with types containing arrays sized with a specialization constant", op, ""); -} - -// -// Layout qualifier stuff. -// - -// Put the id's layout qualification into the public type, for qualifiers not having a number set. -// This is before we know any type information for error checking. -void HlslParseContext::setLayoutQualifier(const TSourceLoc& loc, TQualifier& qualifier, TString& id) -{ - std::transform(id.begin(), id.end(), id.begin(), ::tolower); - - if (id == TQualifier::getLayoutMatrixString(ElmColumnMajor)) { - qualifier.layoutMatrix = ElmRowMajor; - return; - } - if (id == TQualifier::getLayoutMatrixString(ElmRowMajor)) { - qualifier.layoutMatrix = ElmColumnMajor; - return; - } - if (id == "push_constant") { - requireVulkan(loc, "push_constant"); - qualifier.layoutPushConstant = true; - return; - } - if (language == EShLangGeometry || language == EShLangTessEvaluation) { - if (id == TQualifier::getGeometryString(ElgTriangles)) { - // publicType.shaderQualifiers.geometry = ElgTriangles; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (language == EShLangGeometry) { - if (id == TQualifier::getGeometryString(ElgPoints)) { - // publicType.shaderQualifiers.geometry = ElgPoints; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == TQualifier::getGeometryString(ElgLineStrip)) { - // publicType.shaderQualifiers.geometry = ElgLineStrip; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == TQualifier::getGeometryString(ElgLines)) { - // publicType.shaderQualifiers.geometry = ElgLines; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == TQualifier::getGeometryString(ElgLinesAdjacency)) { - // publicType.shaderQualifiers.geometry = ElgLinesAdjacency; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == TQualifier::getGeometryString(ElgTrianglesAdjacency)) { - // publicType.shaderQualifiers.geometry = ElgTrianglesAdjacency; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == TQualifier::getGeometryString(ElgTriangleStrip)) { - // publicType.shaderQualifiers.geometry = ElgTriangleStrip; - warn(loc, "ignored", id.c_str(), ""); - return; - } - } else { - assert(language == EShLangTessEvaluation); - - // input primitive - if (id == TQualifier::getGeometryString(ElgTriangles)) { - // publicType.shaderQualifiers.geometry = ElgTriangles; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == TQualifier::getGeometryString(ElgQuads)) { - // publicType.shaderQualifiers.geometry = ElgQuads; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == TQualifier::getGeometryString(ElgIsolines)) { - // publicType.shaderQualifiers.geometry = ElgIsolines; - warn(loc, "ignored", id.c_str(), ""); - return; - } - - // vertex spacing - if (id == TQualifier::getVertexSpacingString(EvsEqual)) { - // publicType.shaderQualifiers.spacing = EvsEqual; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == TQualifier::getVertexSpacingString(EvsFractionalEven)) { - // publicType.shaderQualifiers.spacing = EvsFractionalEven; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == TQualifier::getVertexSpacingString(EvsFractionalOdd)) { - // publicType.shaderQualifiers.spacing = EvsFractionalOdd; - warn(loc, "ignored", id.c_str(), ""); - return; - } - - // triangle order - if (id == TQualifier::getVertexOrderString(EvoCw)) { - // publicType.shaderQualifiers.order = EvoCw; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == TQualifier::getVertexOrderString(EvoCcw)) { - // publicType.shaderQualifiers.order = EvoCcw; - warn(loc, "ignored", id.c_str(), ""); - return; - } - - // point mode - if (id == "point_mode") { - // publicType.shaderQualifiers.pointMode = true; - warn(loc, "ignored", id.c_str(), ""); - return; - } - } - } - if (language == EShLangFragment) { - if (id == "origin_upper_left") { - // publicType.shaderQualifiers.originUpperLeft = true; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == "pixel_center_integer") { - // publicType.shaderQualifiers.pixelCenterInteger = true; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == "early_fragment_tests") { - // publicType.shaderQualifiers.earlyFragmentTests = true; - warn(loc, "ignored", id.c_str(), ""); - return; - } - for (TLayoutDepth depth = (TLayoutDepth)(EldNone + 1); depth < EldCount; depth = (TLayoutDepth)(depth + 1)) { - if (id == TQualifier::getLayoutDepthString(depth)) { - // publicType.shaderQualifiers.layoutDepth = depth; - warn(loc, "ignored", id.c_str(), ""); - return; - } - } - if (id.compare(0, 13, "blend_support") == 0) { - bool found = false; - for (TBlendEquationShift be = (TBlendEquationShift)0; be < EBlendCount; be = (TBlendEquationShift)(be + 1)) { - if (id == TQualifier::getBlendEquationString(be)) { - requireExtensions(loc, 1, &E_GL_KHR_blend_equation_advanced, "blend equation"); - intermediate.addBlendEquation(be); - // publicType.shaderQualifiers.blendEquation = true; - warn(loc, "ignored", id.c_str(), ""); - found = true; - break; - } - } - if (! found) - error(loc, "unknown blend equation", "blend_support", ""); - return; - } - } - error(loc, "unrecognized layout identifier, or qualifier requires assignment (e.g., binding = 4)", id.c_str(), ""); -} - -// Put the id's layout qualifier value into the public type, for qualifiers having a number set. -// This is before we know any type information for error checking. -void HlslParseContext::setLayoutQualifier(const TSourceLoc& loc, TQualifier& qualifier, TString& id, - const TIntermTyped* node) -{ - const char* feature = "layout-id value"; - // const char* nonLiteralFeature = "non-literal layout-id value"; - - integerCheck(node, feature); - const TIntermConstantUnion* constUnion = node->getAsConstantUnion(); - int value = 0; - if (constUnion) { - value = constUnion->getConstArray()[0].getIConst(); - } - - std::transform(id.begin(), id.end(), id.begin(), ::tolower); - - if (id == "offset") { - qualifier.layoutOffset = value; - return; - } else if (id == "align") { - // "The specified alignment must be a power of 2, or a compile-time error results." - if (! IsPow2(value)) - error(loc, "must be a power of 2", "align", ""); - else - qualifier.layoutAlign = value; - return; - } else if (id == "location") { - if ((unsigned int)value >= TQualifier::layoutLocationEnd) - error(loc, "location is too large", id.c_str(), ""); - else - qualifier.layoutLocation = value; - return; - } else if (id == "set") { - if ((unsigned int)value >= TQualifier::layoutSetEnd) - error(loc, "set is too large", id.c_str(), ""); - else - qualifier.layoutSet = value; - return; - } else if (id == "binding") { - if ((unsigned int)value >= TQualifier::layoutBindingEnd) - error(loc, "binding is too large", id.c_str(), ""); - else - qualifier.layoutBinding = value; - return; - } else if (id == "component") { - if ((unsigned)value >= TQualifier::layoutComponentEnd) - error(loc, "component is too large", id.c_str(), ""); - else - qualifier.layoutComponent = value; - return; - } else if (id.compare(0, 4, "xfb_") == 0) { - // "Any shader making any static use (after preprocessing) of any of these - // *xfb_* qualifiers will cause the shader to be in a transform feedback - // capturing mode and hence responsible for describing the transform feedback - // setup." - intermediate.setXfbMode(); - if (id == "xfb_buffer") { - // "It is a compile-time error to specify an *xfb_buffer* that is greater than - // the implementation-dependent constant gl_MaxTransformFeedbackBuffers." - if (value >= resources.maxTransformFeedbackBuffers) - error(loc, "buffer is too large:", id.c_str(), "gl_MaxTransformFeedbackBuffers is %d", - resources.maxTransformFeedbackBuffers); - if (value >= (int)TQualifier::layoutXfbBufferEnd) - error(loc, "buffer is too large:", id.c_str(), "internal max is %d", TQualifier::layoutXfbBufferEnd - 1); - else - qualifier.layoutXfbBuffer = value; - return; - } else if (id == "xfb_offset") { - if (value >= (int)TQualifier::layoutXfbOffsetEnd) - error(loc, "offset is too large:", id.c_str(), "internal max is %d", TQualifier::layoutXfbOffsetEnd - 1); - else - qualifier.layoutXfbOffset = value; - return; - } else if (id == "xfb_stride") { - // "The resulting stride (implicit or explicit), when divided by 4, must be less than or equal to the - // implementation-dependent constant gl_MaxTransformFeedbackInterleavedComponents." - if (value > 4 * resources.maxTransformFeedbackInterleavedComponents) - error(loc, "1/4 stride is too large:", id.c_str(), "gl_MaxTransformFeedbackInterleavedComponents is %d", - resources.maxTransformFeedbackInterleavedComponents); - else if (value >= (int)TQualifier::layoutXfbStrideEnd) - error(loc, "stride is too large:", id.c_str(), "internal max is %d", TQualifier::layoutXfbStrideEnd - 1); - if (value < (int)TQualifier::layoutXfbStrideEnd) - qualifier.layoutXfbStride = value; - return; - } - } - - if (id == "input_attachment_index") { - requireVulkan(loc, "input_attachment_index"); - if (value >= (int)TQualifier::layoutAttachmentEnd) - error(loc, "attachment index is too large", id.c_str(), ""); - else - qualifier.layoutAttachment = value; - return; - } - if (id == "constant_id") { - requireSpv(loc, "constant_id"); - if (value >= (int)TQualifier::layoutSpecConstantIdEnd) { - error(loc, "specialization-constant id is too large", id.c_str(), ""); - } else { - qualifier.layoutSpecConstantId = value; - qualifier.specConstant = true; - if (! intermediate.addUsedConstantId(value)) - error(loc, "specialization-constant id already used", id.c_str(), ""); - } - return; - } - - switch (language) { - case EShLangVertex: - break; - - case EShLangTessControl: - if (id == "vertices") { - if (value == 0) - error(loc, "must be greater than 0", "vertices", ""); - else - // publicType.shaderQualifiers.vertices = value; - warn(loc, "ignored", id.c_str(), ""); - return; - } - break; - - case EShLangTessEvaluation: - break; - - case EShLangGeometry: - if (id == "invocations") { - if (value == 0) - error(loc, "must be at least 1", "invocations", ""); - else - // publicType.shaderQualifiers.invocations = value; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == "max_vertices") { - // publicType.shaderQualifiers.vertices = value; - warn(loc, "ignored", id.c_str(), ""); - if (value > resources.maxGeometryOutputVertices) - error(loc, "too large, must be less than gl_MaxGeometryOutputVertices", "max_vertices", ""); - return; - } - if (id == "stream") { - qualifier.layoutStream = value; - return; - } - break; - - case EShLangFragment: - if (id == "index") { - qualifier.layoutIndex = value; - return; - } - break; - - case EShLangCompute: - if (id.compare(0, 11, "local_size_") == 0) { - if (id == "local_size_x") { - // publicType.shaderQualifiers.localSize[0] = value; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == "local_size_y") { - // publicType.shaderQualifiers.localSize[1] = value; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == "local_size_z") { - // publicType.shaderQualifiers.localSize[2] = value; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (spvVersion.spv != 0) { - if (id == "local_size_x_id") { - // publicType.shaderQualifiers.localSizeSpecId[0] = value; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == "local_size_y_id") { - // publicType.shaderQualifiers.localSizeSpecId[1] = value; - warn(loc, "ignored", id.c_str(), ""); - return; - } - if (id == "local_size_z_id") { - // publicType.shaderQualifiers.localSizeSpecId[2] = value; - warn(loc, "ignored", id.c_str(), ""); - return; - } - } - } - break; - - default: - break; - } - - error(loc, "there is no such layout identifier for this stage taking an assigned value", id.c_str(), ""); -} - -// Merge any layout qualifier information from src into dst, leaving everything else in dst alone -// -// "More than one layout qualifier may appear in a single declaration. -// Additionally, the same layout-qualifier-name can occur multiple times -// within a layout qualifier or across multiple layout qualifiers in the -// same declaration. When the same layout-qualifier-name occurs -// multiple times, in a single declaration, the last occurrence overrides -// the former occurrence(s). Further, if such a layout-qualifier-name -// will effect subsequent declarations or other observable behavior, it -// is only the last occurrence that will have any effect, behaving as if -// the earlier occurrence(s) within the declaration are not present. -// This is also true for overriding layout-qualifier-names, where one -// overrides the other (e.g., row_major vs. column_major); only the last -// occurrence has any effect." -// -void HlslParseContext::mergeObjectLayoutQualifiers(TQualifier& dst, const TQualifier& src, bool inheritOnly) -{ - if (src.hasMatrix()) - dst.layoutMatrix = src.layoutMatrix; - if (src.hasPacking()) - dst.layoutPacking = src.layoutPacking; - - if (src.hasStream()) - dst.layoutStream = src.layoutStream; - - if (src.hasFormat()) - dst.layoutFormat = src.layoutFormat; - - if (src.hasXfbBuffer()) - dst.layoutXfbBuffer = src.layoutXfbBuffer; - - if (src.hasAlign()) - dst.layoutAlign = src.layoutAlign; - - if (! inheritOnly) { - if (src.hasLocation()) - dst.layoutLocation = src.layoutLocation; - if (src.hasComponent()) - dst.layoutComponent = src.layoutComponent; - if (src.hasIndex()) - dst.layoutIndex = src.layoutIndex; - - if (src.hasOffset()) - dst.layoutOffset = src.layoutOffset; - - if (src.hasSet()) - dst.layoutSet = src.layoutSet; - if (src.layoutBinding != TQualifier::layoutBindingEnd) - dst.layoutBinding = src.layoutBinding; - - if (src.hasXfbStride()) - dst.layoutXfbStride = src.layoutXfbStride; - if (src.hasXfbOffset()) - dst.layoutXfbOffset = src.layoutXfbOffset; - if (src.hasAttachment()) - dst.layoutAttachment = src.layoutAttachment; - if (src.hasSpecConstantId()) - dst.layoutSpecConstantId = src.layoutSpecConstantId; - - if (src.layoutPushConstant) - dst.layoutPushConstant = true; - } -} - -// -// Look up a function name in the symbol table, and make sure it is a function. -// -// First, look for an exact match. If there is none, use the generic selector -// TParseContextBase::selectFunction() to find one, parameterized by the -// convertible() and better() predicates defined below. -// -// Return the function symbol if found, otherwise nullptr. -// -const TFunction* HlslParseContext::findFunction(const TSourceLoc& loc, TFunction& call, bool& builtIn, int& thisDepth, - TIntermTyped*& args) -{ - if (symbolTable.isFunctionNameVariable(call.getName())) { - error(loc, "can't use function syntax on variable", call.getName().c_str(), ""); - return nullptr; - } - - // first, look for an exact match - bool dummyScope; - TSymbol* symbol = symbolTable.find(call.getMangledName(), &builtIn, &dummyScope, &thisDepth); - if (symbol) - return symbol->getAsFunction(); - - // no exact match, use the generic selector, parameterized by the GLSL rules - - // create list of candidates to send - TVector candidateList; - symbolTable.findFunctionNameList(call.getMangledName(), candidateList, builtIn); - - // These built-in ops can accept any type, so we bypass the argument selection - if (candidateList.size() == 1 && builtIn && - (candidateList[0]->getBuiltInOp() == EOpMethodAppend || - candidateList[0]->getBuiltInOp() == EOpMethodRestartStrip || - candidateList[0]->getBuiltInOp() == EOpMethodIncrementCounter || - candidateList[0]->getBuiltInOp() == EOpMethodDecrementCounter || - candidateList[0]->getBuiltInOp() == EOpMethodAppend || - candidateList[0]->getBuiltInOp() == EOpMethodConsume)) { - return candidateList[0]; - } - - bool allowOnlyUpConversions = true; - - // can 'from' convert to 'to'? - const auto convertible = [&](const TType& from, const TType& to, TOperator op, int arg) -> bool { - if (from == to) - return true; - - // no aggregate conversions - if (from.isArray() || to.isArray() || - from.isStruct() || to.isStruct()) - return false; - - switch (op) { - case EOpInterlockedAdd: - case EOpInterlockedAnd: - case EOpInterlockedCompareExchange: - case EOpInterlockedCompareStore: - case EOpInterlockedExchange: - case EOpInterlockedMax: - case EOpInterlockedMin: - case EOpInterlockedOr: - case EOpInterlockedXor: - // We do not promote the texture or image type for these ocodes. Normally that would not - // be an issue because it's a buffer, but we haven't decomposed the opcode yet, and at this - // stage it's merely e.g, a basic integer type. - // - // Instead, we want to promote other arguments, but stay within the same family. In other - // words, InterlockedAdd(RWBuffer, ...) will always use the int flavor, never the uint flavor, - // but it is allowed to promote its other arguments. - if (arg == 0) - return false; - break; - case EOpMethodSample: - case EOpMethodSampleBias: - case EOpMethodSampleCmp: - case EOpMethodSampleCmpLevelZero: - case EOpMethodSampleGrad: - case EOpMethodSampleLevel: - case EOpMethodLoad: - case EOpMethodGetDimensions: - case EOpMethodGetSamplePosition: - case EOpMethodGather: - case EOpMethodCalculateLevelOfDetail: - case EOpMethodCalculateLevelOfDetailUnclamped: - case EOpMethodGatherRed: - case EOpMethodGatherGreen: - case EOpMethodGatherBlue: - case EOpMethodGatherAlpha: - case EOpMethodGatherCmp: - case EOpMethodGatherCmpRed: - case EOpMethodGatherCmpGreen: - case EOpMethodGatherCmpBlue: - case EOpMethodGatherCmpAlpha: - case EOpMethodAppend: - case EOpMethodRestartStrip: - // those are method calls, the object type can not be changed - // they are equal if the dim and type match (is dim sufficient?) - if (arg == 0) - return from.getSampler().type == to.getSampler().type && - from.getSampler().arrayed == to.getSampler().arrayed && - from.getSampler().shadow == to.getSampler().shadow && - from.getSampler().ms == to.getSampler().ms && - from.getSampler().dim == to.getSampler().dim; - break; - default: - break; - } - - // basic types have to be convertible - if (allowOnlyUpConversions) - if (! intermediate.canImplicitlyPromote(from.getBasicType(), to.getBasicType(), EOpFunctionCall)) - return false; - - // shapes have to be convertible - if ((from.isScalarOrVec1() && to.isScalarOrVec1()) || - (from.isScalarOrVec1() && to.isVector()) || - (from.isScalarOrVec1() && to.isMatrix()) || - (from.isVector() && to.isVector() && from.getVectorSize() >= to.getVectorSize())) - return true; - - // TODO: what are the matrix rules? they go here - - return false; - }; - - // Is 'to2' a better conversion than 'to1'? - // Ties should not be considered as better. - // Assumes 'convertible' already said true. - const auto better = [](const TType& from, const TType& to1, const TType& to2) -> bool { - // exact match is always better than mismatch - if (from == to2) - return from != to1; - if (from == to1) - return false; - - // shape changes are always worse - if (from.isScalar() || from.isVector()) { - if (from.getVectorSize() == to2.getVectorSize() && - from.getVectorSize() != to1.getVectorSize()) - return true; - if (from.getVectorSize() == to1.getVectorSize() && - from.getVectorSize() != to2.getVectorSize()) - return false; - } - - // Handle sampler betterness: An exact sampler match beats a non-exact match. - // (If we just looked at basic type, all EbtSamplers would look the same). - // If any type is not a sampler, just use the linearize function below. - if (from.getBasicType() == EbtSampler && to1.getBasicType() == EbtSampler && to2.getBasicType() == EbtSampler) { - // We can ignore the vector size in the comparison. - TSampler to1Sampler = to1.getSampler(); - TSampler to2Sampler = to2.getSampler(); - - to1Sampler.vectorSize = to2Sampler.vectorSize = from.getSampler().vectorSize; - - if (from.getSampler() == to2Sampler) - return from.getSampler() != to1Sampler; - if (from.getSampler() == to1Sampler) - return false; - } - - // Might or might not be changing shape, which means basic type might - // or might not match, so within that, the question is how big a - // basic-type conversion is being done. - // - // Use a hierarchy of domains, translated to order of magnitude - // in a linearized view: - // - floating-point vs. integer - // - 32 vs. 64 bit (or width in general) - // - bool vs. non bool - // - signed vs. not signed - const auto linearize = [](const TBasicType& basicType) -> int { - switch (basicType) { - case EbtBool: return 1; - case EbtInt: return 10; - case EbtUint: return 11; - case EbtInt64: return 20; - case EbtUint64: return 21; - case EbtFloat: return 100; - case EbtDouble: return 110; - default: return 0; - } - }; - - return std::abs(linearize(to2.getBasicType()) - linearize(from.getBasicType())) < - std::abs(linearize(to1.getBasicType()) - linearize(from.getBasicType())); - }; - - // for ambiguity reporting - bool tie = false; - - // send to the generic selector - const TFunction* bestMatch = selectFunction(candidateList, call, convertible, better, tie); - - if (bestMatch == nullptr) { - // If there is nothing selected by allowing only up-conversions (to a larger linearize() value), - // we instead try down-conversions, which are valid in HLSL, but not preferred if there are any - // upconversions possible. - allowOnlyUpConversions = false; - bestMatch = selectFunction(candidateList, call, convertible, better, tie); - } - - if (bestMatch == nullptr) { - error(loc, "no matching overloaded function found", call.getName().c_str(), ""); - return nullptr; - } - - // For built-ins, we can convert across the arguments. This will happen in several steps: - // Step 1: If there's an exact match, use it. - // Step 2a: Otherwise, get the operator from the best match and promote arguments: - // Step 2b: reconstruct the TFunction based on the new arg types - // Step 3: Re-select after type promotion is applied, to find proper candidate. - if (builtIn) { - // Step 1: If there's an exact match, use it. - if (call.getMangledName() == bestMatch->getMangledName()) - return bestMatch; - - // Step 2a: Otherwise, get the operator from the best match and promote arguments as if we - // are that kind of operator. - if (args != nullptr) { - // The arg list can be a unary node, or an aggregate. We have to handle both. - // We will use the normal promote() facilities, which require an interm node. - TIntermOperator* promote = nullptr; - - if (call.getParamCount() == 1) { - promote = new TIntermUnary(bestMatch->getBuiltInOp()); - promote->getAsUnaryNode()->setOperand(args->getAsTyped()); - } else { - promote = new TIntermAggregate(bestMatch->getBuiltInOp()); - promote->getAsAggregate()->getSequence().swap(args->getAsAggregate()->getSequence()); - } - - if (! intermediate.promote(promote)) - return nullptr; - - // Obtain the promoted arg list. - if (call.getParamCount() == 1) { - args = promote->getAsUnaryNode()->getOperand(); - } else { - promote->getAsAggregate()->getSequence().swap(args->getAsAggregate()->getSequence()); - } - } - - // Step 2b: reconstruct the TFunction based on the new arg types - TFunction convertedCall(&call.getName(), call.getType(), call.getBuiltInOp()); - - if (args->getAsAggregate()) { - // Handle aggregates: put all args into the new function call - for (int arg=0; arggetAsAggregate()->getSequence().size()); ++arg) { - // TODO: But for constness, we could avoid the new & shallowCopy, and use the pointer directly. - TParameter param = { 0, new TType, nullptr }; - param.type->shallowCopy(args->getAsAggregate()->getSequence()[arg]->getAsTyped()->getType()); - convertedCall.addParameter(param); - } - } else if (args->getAsUnaryNode()) { - // Handle unaries: put all args into the new function call - TParameter param = { 0, new TType, nullptr }; - param.type->shallowCopy(args->getAsUnaryNode()->getOperand()->getAsTyped()->getType()); - convertedCall.addParameter(param); - } else if (args->getAsTyped()) { - // Handle bare e.g, floats, not in an aggregate. - TParameter param = { 0, new TType, nullptr }; - param.type->shallowCopy(args->getAsTyped()->getType()); - convertedCall.addParameter(param); - } else { - assert(0); // unknown argument list. - return nullptr; - } - - // Step 3: Re-select after type promotion, to find proper candidate - // send to the generic selector - bestMatch = selectFunction(candidateList, convertedCall, convertible, better, tie); - - // At this point, there should be no tie. - } - - if (tie) - error(loc, "ambiguous best function under implicit type conversion", call.getName().c_str(), ""); - - // Append default parameter values if needed - if (!tie && bestMatch != nullptr) { - for (int defParam = call.getParamCount(); defParam < bestMatch->getParamCount(); ++defParam) { - handleFunctionArgument(&call, args, (*bestMatch)[defParam].defaultValue); - } - } - - return bestMatch; -} - -// -// Do everything necessary to handle a typedef declaration, for a single symbol. -// -// 'parseType' is the type part of the declaration (to the left) -// 'arraySizes' is the arrayness tagged on the identifier (to the right) -// -void HlslParseContext::declareTypedef(const TSourceLoc& loc, const TString& identifier, const TType& parseType) -{ - TVariable* typeSymbol = new TVariable(&identifier, parseType, true); - if (! symbolTable.insert(*typeSymbol)) - error(loc, "name already defined", "typedef", identifier.c_str()); -} - -// Do everything necessary to handle a struct declaration, including -// making IO aliases because HLSL allows mixed IO in a struct that specializes -// based on the usage (input, output, uniform, none). -void HlslParseContext::declareStruct(const TSourceLoc& loc, TString& structName, TType& type) -{ - // If it was named, which means the type can be reused later, add - // it to the symbol table. (Unless it's a block, in which - // case the name is not a type.) - if (type.getBasicType() == EbtBlock || structName.size() == 0) - return; - - TVariable* userTypeDef = new TVariable(&structName, type, true); - if (! symbolTable.insert(*userTypeDef)) { - error(loc, "redefinition", structName.c_str(), "struct"); - return; - } - - // See if we need IO aliases for the structure typeList - - const auto condAlloc = [](bool pred, TTypeList*& list) { - if (pred && list == nullptr) - list = new TTypeList; - }; - - tIoKinds newLists = { nullptr, nullptr, nullptr }; // allocate for each kind found - for (auto member = type.getStruct()->begin(); member != type.getStruct()->end(); ++member) { - condAlloc(hasUniform(member->type->getQualifier()), newLists.uniform); - condAlloc( hasInput(member->type->getQualifier()), newLists.input); - condAlloc( hasOutput(member->type->getQualifier()), newLists.output); - - if (member->type->isStruct()) { - auto it = ioTypeMap.find(member->type->getStruct()); - if (it != ioTypeMap.end()) { - condAlloc(it->second.uniform != nullptr, newLists.uniform); - condAlloc(it->second.input != nullptr, newLists.input); - condAlloc(it->second.output != nullptr, newLists.output); - } - } - } - if (newLists.uniform == nullptr && - newLists.input == nullptr && - newLists.output == nullptr) { - // Won't do any IO caching, clear up the type and get out now. - for (auto member = type.getStruct()->begin(); member != type.getStruct()->end(); ++member) - clearUniformInputOutput(member->type->getQualifier()); - return; - } - - // We have IO involved. - - // Make a pure typeList for the symbol table, and cache side copies of IO versions. - for (auto member = type.getStruct()->begin(); member != type.getStruct()->end(); ++member) { - const auto inheritStruct = [&](TTypeList* s, TTypeLoc& ioMember) { - if (s != nullptr) { - ioMember.type = new TType; - ioMember.type->shallowCopy(*member->type); - ioMember.type->setStruct(s); - } - }; - const auto newMember = [&](TTypeLoc& m) { - if (m.type == nullptr) { - m.type = new TType; - m.type->shallowCopy(*member->type); - } - }; - - TTypeLoc newUniformMember = { nullptr, member->loc }; - TTypeLoc newInputMember = { nullptr, member->loc }; - TTypeLoc newOutputMember = { nullptr, member->loc }; - if (member->type->isStruct()) { - // swap in an IO child if there is one - auto it = ioTypeMap.find(member->type->getStruct()); - if (it != ioTypeMap.end()) { - inheritStruct(it->second.uniform, newUniformMember); - inheritStruct(it->second.input, newInputMember); - inheritStruct(it->second.output, newOutputMember); - } - } - if (newLists.uniform) { - newMember(newUniformMember); - - // inherit default matrix layout (changeable via #pragma pack_matrix), if none given. - if (member->type->isMatrix() && member->type->getQualifier().layoutMatrix == ElmNone) - newUniformMember.type->getQualifier().layoutMatrix = globalUniformDefaults.layoutMatrix; - - correctUniform(newUniformMember.type->getQualifier()); - newLists.uniform->push_back(newUniformMember); - } - if (newLists.input) { - newMember(newInputMember); - correctInput(newInputMember.type->getQualifier()); - newLists.input->push_back(newInputMember); - } - if (newLists.output) { - newMember(newOutputMember); - correctOutput(newOutputMember.type->getQualifier()); - newLists.output->push_back(newOutputMember); - } - - // make original pure - clearUniformInputOutput(member->type->getQualifier()); - } - ioTypeMap[type.getStruct()] = newLists; -} - -// Lookup a user-type by name. -// If found, fill in the type and return the defining symbol. -// If not found, return nullptr. -TSymbol* HlslParseContext::lookupUserType(const TString& typeName, TType& type) -{ - TSymbol* symbol = symbolTable.find(typeName); - if (symbol && symbol->getAsVariable() && symbol->getAsVariable()->isUserType()) { - type.shallowCopy(symbol->getType()); - return symbol; - } else - return nullptr; -} - -// -// Do everything necessary to handle a variable (non-block) declaration. -// Either redeclaring a variable, or making a new one, updating the symbol -// table, and all error checking. -// -// Returns a subtree node that computes an initializer, if needed. -// Returns nullptr if there is no code to execute for initialization. -// -// 'parseType' is the type part of the declaration (to the left) -// 'arraySizes' is the arrayness tagged on the identifier (to the right) -// -TIntermNode* HlslParseContext::declareVariable(const TSourceLoc& loc, const TString& identifier, TType& type, - TIntermTyped* initializer) -{ - if (voidErrorCheck(loc, identifier, type.getBasicType())) - return nullptr; - - // Global consts with initializers that are non-const act like EvqGlobal in HLSL. - // This test is implicitly recursive, because initializers propagate constness - // up the aggregate node tree during creation. E.g, for: - // { { 1, 2 }, { 3, 4 } } - // the initializer list is marked EvqConst at the top node, and remains so here. However: - // { 1, { myvar, 2 }, 3 } - // is not a const intializer, and still becomes EvqGlobal here. - - const bool nonConstInitializer = (initializer != nullptr && initializer->getQualifier().storage != EvqConst); - - if (type.getQualifier().storage == EvqConst && symbolTable.atGlobalLevel() && nonConstInitializer) { - // Force to global - type.getQualifier().storage = EvqGlobal; - } - - // make const and initialization consistent - fixConstInit(loc, identifier, type, initializer); - - // Check for redeclaration of built-ins and/or attempting to declare a reserved name - TSymbol* symbol = nullptr; - - inheritGlobalDefaults(type.getQualifier()); - - const bool flattenVar = shouldFlatten(type); - - // correct IO in the type - switch (type.getQualifier().storage) { - case EvqGlobal: - case EvqTemporary: - clearUniformInputOutput(type.getQualifier()); - break; - case EvqUniform: - case EvqBuffer: - correctUniform(type.getQualifier()); - if (type.isStruct()) { - auto it = ioTypeMap.find(type.getStruct()); - if (it != ioTypeMap.end()) - type.setStruct(it->second.uniform); - } - - break; - default: - break; - } - - // Declare the variable - if (type.isArray()) { - // array case - declareArray(loc, identifier, type, symbol, !flattenVar); - } else { - // non-array case - if (symbol == nullptr) - symbol = declareNonArray(loc, identifier, type, !flattenVar); - else if (type != symbol->getType()) - error(loc, "cannot change the type of", "redeclaration", symbol->getName().c_str()); - } - - if (symbol == nullptr) - return nullptr; - - if (flattenVar) - flatten(*symbol->getAsVariable(), symbolTable.atGlobalLevel()); - - if (initializer == nullptr) - return nullptr; - - // Deal with initializer - TVariable* variable = symbol->getAsVariable(); - if (variable == nullptr) { - error(loc, "initializer requires a variable, not a member", identifier.c_str(), ""); - return nullptr; - } - return executeInitializer(loc, initializer, variable, flattenVar); -} - -// Pick up global defaults from the provide global defaults into dst. -void HlslParseContext::inheritGlobalDefaults(TQualifier& dst) const -{ - if (dst.storage == EvqVaryingOut) { - if (! dst.hasStream() && language == EShLangGeometry) - dst.layoutStream = globalOutputDefaults.layoutStream; - if (! dst.hasXfbBuffer()) - dst.layoutXfbBuffer = globalOutputDefaults.layoutXfbBuffer; - } -} - -// -// Make an internal-only variable whose name is for debug purposes only -// and won't be searched for. Callers will only use the return value to use -// the variable, not the name to look it up. It is okay if the name -// is the same as other names; there won't be any conflict. -// -TVariable* HlslParseContext::makeInternalVariable(const char* name, const TType& type) const -{ - TString* nameString = NewPoolTString(name); - TVariable* variable = new TVariable(nameString, type); - symbolTable.makeInternalVariable(*variable); - - return variable; -} - -// Make a symbol node holding a new internal temporary variable. -TIntermSymbol* HlslParseContext::makeInternalVariableNode(const TSourceLoc& loc, const char* name, - const TType& type) const -{ - TVariable* tmpVar = makeInternalVariable(name, type); - tmpVar->getWritableType().getQualifier().makeTemporary(); - - return intermediate.addSymbol(*tmpVar, loc); -} - -// -// Declare a non-array variable, the main point being there is no redeclaration -// for resizing allowed. -// -// Return the successfully declared variable. -// -TVariable* HlslParseContext::declareNonArray(const TSourceLoc& loc, const TString& identifier, const TType& type, - bool track) -{ - // make a new variable - TVariable* variable = new TVariable(&identifier, type); - - // add variable to symbol table - if (symbolTable.insert(*variable)) { - if (track && symbolTable.atGlobalLevel()) - trackLinkage(*variable); - return variable; - } - - error(loc, "redefinition", variable->getName().c_str(), ""); - return nullptr; -} - -// -// Handle all types of initializers from the grammar. -// -// Returning nullptr just means there is no code to execute to handle the -// initializer, which will, for example, be the case for constant initializers. -// -TIntermNode* HlslParseContext::executeInitializer(const TSourceLoc& loc, TIntermTyped* initializer, TVariable* variable, - bool flattened) -{ - // - // Identifier must be of type constant, a global, or a temporary, and - // starting at version 120, desktop allows uniforms to have initializers. - // - TStorageQualifier qualifier = variable->getType().getQualifier().storage; - - // - // If the initializer was from braces { ... }, we convert the whole subtree to a - // constructor-style subtree, allowing the rest of the code to operate - // identically for both kinds of initializers. - // - // - // Type can't be deduced from the initializer list, so a skeletal type to - // follow has to be passed in. Constness and specialization-constness - // should be deduced bottom up, not dictated by the skeletal type. - // - TType skeletalType; - skeletalType.shallowCopy(variable->getType()); - skeletalType.getQualifier().makeTemporary(); - if (initializer->getAsAggregate() && initializer->getAsAggregate()->getOp() == EOpNull) - initializer = convertInitializerList(loc, skeletalType, initializer, nullptr); - if (initializer == nullptr) { - // error recovery; don't leave const without constant values - if (qualifier == EvqConst) - variable->getWritableType().getQualifier().storage = EvqTemporary; - return nullptr; - } - - // Fix outer arrayness if variable is unsized, getting size from the initializer - if (initializer->getType().isExplicitlySizedArray() && - variable->getType().isImplicitlySizedArray()) - variable->getWritableType().changeOuterArraySize(initializer->getType().getOuterArraySize()); - - // Inner arrayness can also get set by an initializer - if (initializer->getType().isArrayOfArrays() && variable->getType().isArrayOfArrays() && - initializer->getType().getArraySizes()->getNumDims() == - variable->getType().getArraySizes()->getNumDims()) { - // adopt unsized sizes from the initializer's sizes - for (int d = 1; d < variable->getType().getArraySizes()->getNumDims(); ++d) { - if (variable->getType().getArraySizes()->getDimSize(d) == UnsizedArraySize) - variable->getWritableType().getArraySizes().setDimSize(d, initializer->getType().getArraySizes()->getDimSize(d)); - } - } - - // Uniform and global consts require a constant initializer - if (qualifier == EvqUniform && initializer->getType().getQualifier().storage != EvqConst) { - error(loc, "uniform initializers must be constant", "=", "'%s'", variable->getType().getCompleteString().c_str()); - variable->getWritableType().getQualifier().storage = EvqTemporary; - return nullptr; - } - - // Const variables require a constant initializer - if (qualifier == EvqConst) { - if (initializer->getType().getQualifier().storage != EvqConst) { - variable->getWritableType().getQualifier().storage = EvqConstReadOnly; - qualifier = EvqConstReadOnly; - } - } - - if (qualifier == EvqConst || qualifier == EvqUniform) { - // Compile-time tagging of the variable with its constant value... - - initializer = intermediate.addConversion(EOpAssign, variable->getType(), initializer); - if (initializer != nullptr && variable->getType() != initializer->getType()) - initializer = intermediate.addUniShapeConversion(EOpAssign, variable->getType(), initializer); - if (initializer == nullptr || !initializer->getAsConstantUnion() || - variable->getType() != initializer->getType()) { - error(loc, "non-matching or non-convertible constant type for const initializer", - variable->getType().getStorageQualifierString(), ""); - variable->getWritableType().getQualifier().storage = EvqTemporary; - return nullptr; - } - - variable->setConstArray(initializer->getAsConstantUnion()->getConstArray()); - } else { - // normal assigning of a value to a variable... - specializationCheck(loc, initializer->getType(), "initializer"); - TIntermSymbol* intermSymbol = intermediate.addSymbol(*variable, loc); - - // If we are flattening, it could be due to setting opaques, which must be handled - // as aliases, and the 'initializer' node cannot actually be emitted, because it - // itself stores the result of the constructor, and we can't store to opaques. - // handleAssign() will emit the initializer. - TIntermNode* initNode = nullptr; - if (flattened && intermSymbol->getType().containsOpaque()) - return executeFlattenedInitializer(loc, intermSymbol, *initializer->getAsAggregate()); - else { - initNode = handleAssign(loc, EOpAssign, intermSymbol, initializer); - if (initNode == nullptr) - assignError(loc, "=", intermSymbol->getCompleteString(), initializer->getCompleteString()); - return initNode; - } - } - - return nullptr; -} - -// -// Reprocess any initializer-list { ... } parts of the initializer. -// Need to hierarchically assign correct types and implicit -// conversions. Will do this mimicking the same process used for -// creating a constructor-style initializer, ensuring we get the -// same form. -// -// Returns a node representing an expression for the initializer list expressed -// as the correct type. -// -// Returns nullptr if there is an error. -// -TIntermTyped* HlslParseContext::convertInitializerList(const TSourceLoc& loc, const TType& type, - TIntermTyped* initializer, TIntermTyped* scalarInit) -{ - // Will operate recursively. Once a subtree is found that is constructor style, - // everything below it is already good: Only the "top part" of the initializer - // can be an initializer list, where "top part" can extend for several (or all) levels. - - // see if we have bottomed out in the tree within the initializer-list part - TIntermAggregate* initList = initializer->getAsAggregate(); - if (initList == nullptr || initList->getOp() != EOpNull) { - // We don't have a list, but if it's a scalar and the 'type' is a - // composite, we need to lengthen below to make it useful. - // Otherwise, this is an already formed object to initialize with. - if (type.isScalar() || !initializer->getType().isScalar()) - return initializer; - else - initList = intermediate.makeAggregate(initializer); - } - - // Of the initializer-list set of nodes, need to process bottom up, - // so recurse deep, then process on the way up. - - // Go down the tree here... - if (type.isArray()) { - // The type's array might be unsized, which could be okay, so base sizes on the size of the aggregate. - // Later on, initializer execution code will deal with array size logic. - TType arrayType; - arrayType.shallowCopy(type); // sharing struct stuff is fine - arrayType.newArraySizes(*type.getArraySizes()); // but get a fresh copy of the array information, to edit below - - // edit array sizes to fill in unsized dimensions - if (type.isImplicitlySizedArray()) - arrayType.changeOuterArraySize((int)initList->getSequence().size()); - - // set unsized array dimensions that can be derived from the initializer's first element - if (arrayType.isArrayOfArrays() && initList->getSequence().size() > 0) { - TIntermTyped* firstInit = initList->getSequence()[0]->getAsTyped(); - if (firstInit->getType().isArray() && - arrayType.getArraySizes().getNumDims() == firstInit->getType().getArraySizes()->getNumDims() + 1) { - for (int d = 1; d < arrayType.getArraySizes().getNumDims(); ++d) { - if (arrayType.getArraySizes().getDimSize(d) == UnsizedArraySize) - arrayType.getArraySizes().setDimSize(d, firstInit->getType().getArraySizes()->getDimSize(d - 1)); - } - } - } - - // lengthen list to be long enough - lengthenList(loc, initList->getSequence(), arrayType.getOuterArraySize(), scalarInit); - - // recursively process each element - TType elementType(arrayType, 0); // dereferenced type - for (int i = 0; i < arrayType.getOuterArraySize(); ++i) { - initList->getSequence()[i] = convertInitializerList(loc, elementType, - initList->getSequence()[i]->getAsTyped(), scalarInit); - if (initList->getSequence()[i] == nullptr) - return nullptr; - } - - return addConstructor(loc, initList, arrayType); - } else if (type.isStruct()) { - // lengthen list to be long enough - lengthenList(loc, initList->getSequence(), static_cast(type.getStruct()->size()), scalarInit); - - if (type.getStruct()->size() != initList->getSequence().size()) { - error(loc, "wrong number of structure members", "initializer list", ""); - return nullptr; - } - for (size_t i = 0; i < type.getStruct()->size(); ++i) { - initList->getSequence()[i] = convertInitializerList(loc, *(*type.getStruct())[i].type, - initList->getSequence()[i]->getAsTyped(), scalarInit); - if (initList->getSequence()[i] == nullptr) - return nullptr; - } - } else if (type.isMatrix()) { - if (type.computeNumComponents() == (int)initList->getSequence().size()) { - // This means the matrix is initialized component-wise, rather than as - // a series of rows and columns. We can just use the list directly as - // a constructor; no further processing needed. - } else { - // lengthen list to be long enough - lengthenList(loc, initList->getSequence(), type.getMatrixCols(), scalarInit); - - if (type.getMatrixCols() != (int)initList->getSequence().size()) { - error(loc, "wrong number of matrix columns:", "initializer list", type.getCompleteString().c_str()); - return nullptr; - } - TType vectorType(type, 0); // dereferenced type - for (int i = 0; i < type.getMatrixCols(); ++i) { - initList->getSequence()[i] = convertInitializerList(loc, vectorType, - initList->getSequence()[i]->getAsTyped(), scalarInit); - if (initList->getSequence()[i] == nullptr) - return nullptr; - } - } - } else if (type.isVector()) { - // lengthen list to be long enough - lengthenList(loc, initList->getSequence(), type.getVectorSize(), scalarInit); - - // error check; we're at bottom, so work is finished below - if (type.getVectorSize() != (int)initList->getSequence().size()) { - error(loc, "wrong vector size (or rows in a matrix column):", "initializer list", - type.getCompleteString().c_str()); - return nullptr; - } - } else if (type.isScalar()) { - // lengthen list to be long enough - lengthenList(loc, initList->getSequence(), 1, scalarInit); - - if ((int)initList->getSequence().size() != 1) { - error(loc, "scalar expected one element:", "initializer list", type.getCompleteString().c_str()); - return nullptr; - } - } else { - error(loc, "unexpected initializer-list type:", "initializer list", type.getCompleteString().c_str()); - return nullptr; - } - - // Now that the subtree is processed, process this node as if the - // initializer list is a set of arguments to a constructor. - TIntermTyped* emulatedConstructorArguments; - if (initList->getSequence().size() == 1) - emulatedConstructorArguments = initList->getSequence()[0]->getAsTyped(); - else - emulatedConstructorArguments = initList; - - return addConstructor(loc, emulatedConstructorArguments, type); -} - -// Lengthen list to be long enough to cover any gap from the current list size -// to 'size'. If the list is longer, do nothing. -// The value to lengthen with is the default for short lists. -// -// By default, lists that are too short due to lack of initializers initialize to zero. -// Alternatively, it could be a scalar initializer for a structure. Both cases are handled, -// based on whether something is passed in as 'scalarInit'. -// -// 'scalarInit' must be safe to use each time this is called (no side effects replication). -// -void HlslParseContext::lengthenList(const TSourceLoc& loc, TIntermSequence& list, int size, TIntermTyped* scalarInit) -{ - for (int c = (int)list.size(); c < size; ++c) { - if (scalarInit == nullptr) - list.push_back(intermediate.addConstantUnion(0, loc)); - else - list.push_back(scalarInit); - } -} - -// -// Test for the correctness of the parameters passed to various constructor functions -// and also convert them to the right data type, if allowed and required. -// -// Returns nullptr for an error or the constructed node (aggregate or typed) for no error. -// -TIntermTyped* HlslParseContext::handleConstructor(const TSourceLoc& loc, TIntermTyped* node, const TType& type) -{ - if (node == nullptr) - return nullptr; - - // Handle the idiom "(struct type)" - if (type.isStruct() && isScalarConstructor(node)) { - // 'node' will almost always get used multiple times, so should not be used directly, - // it would create a DAG instead of a tree, which might be okay (would - // like to formalize that for constants and symbols), but if it has - // side effects, they would get executed multiple times, which is not okay. - if (node->getAsConstantUnion() == nullptr && node->getAsSymbolNode() == nullptr) { - TIntermAggregate* seq = intermediate.makeAggregate(loc); - TIntermSymbol* copy = makeInternalVariableNode(loc, "scalarCopy", node->getType()); - seq = intermediate.growAggregate(seq, intermediate.addBinaryNode(EOpAssign, copy, node, loc)); - seq = intermediate.growAggregate(seq, convertInitializerList(loc, type, intermediate.makeAggregate(loc), copy)); - seq->setOp(EOpComma); - seq->setType(type); - return seq; - } else - return convertInitializerList(loc, type, intermediate.makeAggregate(loc), node); - } - - return addConstructor(loc, node, type); -} - -// Add a constructor, either from the grammar, or other programmatic reasons. -// -// 'node' is what to construct from. -// 'type' is what type to construct. -// -// Returns the constructed object. -// Return nullptr if it can't be done. -// -TIntermTyped* HlslParseContext::addConstructor(const TSourceLoc& loc, TIntermTyped* node, const TType& type) -{ - TOperator op = intermediate.mapTypeToConstructorOp(type); - - // Combined texture-sampler constructors are completely semantic checked - // in constructorTextureSamplerError() - if (op == EOpConstructTextureSampler) - return intermediate.setAggregateOperator(node->getAsAggregate(), op, type, loc); - - TTypeList::const_iterator memberTypes; - if (op == EOpConstructStruct) - memberTypes = type.getStruct()->begin(); - - TType elementType; - if (type.isArray()) { - TType dereferenced(type, 0); - elementType.shallowCopy(dereferenced); - } else - elementType.shallowCopy(type); - - bool singleArg; - TIntermAggregate* aggrNode = node->getAsAggregate(); - if (aggrNode != nullptr) { - if (aggrNode->getOp() != EOpNull || aggrNode->getSequence().size() == 1) - singleArg = true; - else - singleArg = false; - } else - singleArg = true; - - TIntermTyped *newNode; - if (singleArg) { - // Handle array -> array conversion - // Constructing an array of one type from an array of another type is allowed, - // assuming there are enough components available (semantic-checked earlier). - if (type.isArray() && node->isArray()) - newNode = convertArray(node, type); - - // If structure constructor or array constructor is being called - // for only one parameter inside the structure, we need to call constructAggregate function once. - else if (type.isArray()) - newNode = constructAggregate(node, elementType, 1, node->getLoc()); - else if (op == EOpConstructStruct) - newNode = constructAggregate(node, *(*memberTypes).type, 1, node->getLoc()); - else { - // shape conversion for matrix constructor from scalar. HLSL semantics are: scalar - // is replicated into every element of the matrix (not just the diagnonal), so - // that is handled specially here. - if (type.isMatrix() && node->getType().isScalarOrVec1()) - node = intermediate.addShapeConversion(type, node); - - newNode = constructBuiltIn(type, op, node, node->getLoc(), false); - } - - if (newNode && (type.isArray() || op == EOpConstructStruct)) - newNode = intermediate.setAggregateOperator(newNode, EOpConstructStruct, type, loc); - - return newNode; - } - - // - // Handle list of arguments. - // - TIntermSequence &sequenceVector = aggrNode->getSequence(); // Stores the information about the parameter to the constructor - // if the structure constructor contains more than one parameter, then construct - // each parameter - - int paramCount = 0; // keeps a track of the constructor parameter number being checked - - // for each parameter to the constructor call, check to see if the right type is passed or convert them - // to the right type if possible (and allowed). - // for structure constructors, just check if the right type is passed, no conversion is allowed. - - for (TIntermSequence::iterator p = sequenceVector.begin(); - p != sequenceVector.end(); p++, paramCount++) { - if (type.isArray()) - newNode = constructAggregate(*p, elementType, paramCount + 1, node->getLoc()); - else if (op == EOpConstructStruct) - newNode = constructAggregate(*p, *(memberTypes[paramCount]).type, paramCount + 1, node->getLoc()); - else - newNode = constructBuiltIn(type, op, (*p)->getAsTyped(), node->getLoc(), true); - - if (newNode) - *p = newNode; - else - return nullptr; - } - - TIntermTyped* constructor = intermediate.setAggregateOperator(aggrNode, op, type, loc); - - return constructor; -} - -// Function for constructor implementation. Calls addUnaryMath with appropriate EOp value -// for the parameter to the constructor (passed to this function). Essentially, it converts -// the parameter types correctly. If a constructor expects an int (like ivec2) and is passed a -// float, then float is converted to int. -// -// Returns nullptr for an error or the constructed node. -// -TIntermTyped* HlslParseContext::constructBuiltIn(const TType& type, TOperator op, TIntermTyped* node, - const TSourceLoc& loc, bool subset) -{ - TIntermTyped* newNode; - TOperator basicOp; - - // - // First, convert types as needed. - // - switch (op) { - case EOpConstructVec2: - case EOpConstructVec3: - case EOpConstructVec4: - case EOpConstructMat2x2: - case EOpConstructMat2x3: - case EOpConstructMat2x4: - case EOpConstructMat3x2: - case EOpConstructMat3x3: - case EOpConstructMat3x4: - case EOpConstructMat4x2: - case EOpConstructMat4x3: - case EOpConstructMat4x4: - case EOpConstructFloat: - basicOp = EOpConstructFloat; - break; - - case EOpConstructDVec2: - case EOpConstructDVec3: - case EOpConstructDVec4: - case EOpConstructDMat2x2: - case EOpConstructDMat2x3: - case EOpConstructDMat2x4: - case EOpConstructDMat3x2: - case EOpConstructDMat3x3: - case EOpConstructDMat3x4: - case EOpConstructDMat4x2: - case EOpConstructDMat4x3: - case EOpConstructDMat4x4: - case EOpConstructDouble: - basicOp = EOpConstructDouble; - break; - - case EOpConstructIVec2: - case EOpConstructIVec3: - case EOpConstructIVec4: - case EOpConstructIMat2x2: - case EOpConstructIMat2x3: - case EOpConstructIMat2x4: - case EOpConstructIMat3x2: - case EOpConstructIMat3x3: - case EOpConstructIMat3x4: - case EOpConstructIMat4x2: - case EOpConstructIMat4x3: - case EOpConstructIMat4x4: - case EOpConstructInt: - basicOp = EOpConstructInt; - break; - - case EOpConstructUVec2: - case EOpConstructUVec3: - case EOpConstructUVec4: - case EOpConstructUMat2x2: - case EOpConstructUMat2x3: - case EOpConstructUMat2x4: - case EOpConstructUMat3x2: - case EOpConstructUMat3x3: - case EOpConstructUMat3x4: - case EOpConstructUMat4x2: - case EOpConstructUMat4x3: - case EOpConstructUMat4x4: - case EOpConstructUint: - basicOp = EOpConstructUint; - break; - - case EOpConstructBVec2: - case EOpConstructBVec3: - case EOpConstructBVec4: - case EOpConstructBMat2x2: - case EOpConstructBMat2x3: - case EOpConstructBMat2x4: - case EOpConstructBMat3x2: - case EOpConstructBMat3x3: - case EOpConstructBMat3x4: - case EOpConstructBMat4x2: - case EOpConstructBMat4x3: - case EOpConstructBMat4x4: - case EOpConstructBool: - basicOp = EOpConstructBool; - break; - - default: - error(loc, "unsupported construction", "", ""); - - return nullptr; - } - newNode = intermediate.addUnaryMath(basicOp, node, node->getLoc()); - if (newNode == nullptr) { - error(loc, "can't convert", "constructor", ""); - return nullptr; - } - - // - // Now, if there still isn't an operation to do the construction, and we need one, add one. - // - - // Otherwise, skip out early. - if (subset || (newNode != node && newNode->getType() == type)) - return newNode; - - // setAggregateOperator will insert a new node for the constructor, as needed. - return intermediate.setAggregateOperator(newNode, op, type, loc); -} - -// Convert the array in node to the requested type, which is also an array. -// Returns nullptr on failure, otherwise returns aggregate holding the list of -// elements needed to construct the array. -TIntermTyped* HlslParseContext::convertArray(TIntermTyped* node, const TType& type) -{ - assert(node->isArray() && type.isArray()); - if (node->getType().computeNumComponents() < type.computeNumComponents()) - return nullptr; - - // TODO: write an argument replicator, for the case the argument should not be - // executed multiple times, yet multiple copies are needed. - - TIntermTyped* constructee = node->getAsTyped(); - // track where we are in consuming the argument - int constructeeElement = 0; - int constructeeComponent = 0; - - // bump up to the next component to consume - const auto getNextComponent = [&]() { - TIntermTyped* component; - component = handleBracketDereference(node->getLoc(), constructee, - intermediate.addConstantUnion(constructeeElement, node->getLoc())); - if (component->isVector()) - component = handleBracketDereference(node->getLoc(), component, - intermediate.addConstantUnion(constructeeComponent, node->getLoc())); - // bump component pointer up - ++constructeeComponent; - if (constructeeComponent == constructee->getVectorSize()) { - constructeeComponent = 0; - ++constructeeElement; - } - return component; - }; - - // make one subnode per constructed array element - TIntermAggregate* constructor = nullptr; - TType derefType(type, 0); - TType speculativeComponentType(derefType, 0); - TType* componentType = derefType.isVector() ? &speculativeComponentType : &derefType; - TOperator componentOp = intermediate.mapTypeToConstructorOp(*componentType); - TType crossType(node->getBasicType(), EvqTemporary, type.getVectorSize()); - for (int e = 0; e < type.getOuterArraySize(); ++e) { - // construct an element - TIntermTyped* elementArg; - if (type.getVectorSize() == constructee->getVectorSize()) { - // same element shape - elementArg = handleBracketDereference(node->getLoc(), constructee, - intermediate.addConstantUnion(e, node->getLoc())); - } else { - // mismatched element shapes - if (type.getVectorSize() == 1) - elementArg = getNextComponent(); - else { - // make a vector - TIntermAggregate* elementConstructee = nullptr; - for (int c = 0; c < type.getVectorSize(); ++c) - elementConstructee = intermediate.growAggregate(elementConstructee, getNextComponent()); - elementArg = addConstructor(node->getLoc(), elementConstructee, crossType); - } - } - // convert basic types - elementArg = intermediate.addConversion(componentOp, derefType, elementArg); - if (elementArg == nullptr) - return nullptr; - // combine with top-level constructor - constructor = intermediate.growAggregate(constructor, elementArg); - } - - return constructor; -} - -// This function tests for the type of the parameters to the structure or array constructor. Raises -// an error message if the expected type does not match the parameter passed to the constructor. -// -// Returns nullptr for an error or the input node itself if the expected and the given parameter types match. -// -TIntermTyped* HlslParseContext::constructAggregate(TIntermNode* node, const TType& type, int paramCount, - const TSourceLoc& loc) -{ - // Handle cases that map more 1:1 between constructor arguments and constructed. - TIntermTyped* converted = intermediate.addConversion(EOpConstructStruct, type, node->getAsTyped()); - if (converted == nullptr || converted->getType() != type) { - error(loc, "", "constructor", "cannot convert parameter %d from '%s' to '%s'", paramCount, - node->getAsTyped()->getType().getCompleteString().c_str(), type.getCompleteString().c_str()); - - return nullptr; - } - - return converted; -} - -// -// Do everything needed to add an interface block. -// -void HlslParseContext::declareBlock(const TSourceLoc& loc, TType& type, const TString* instanceName, TArraySizes* arraySizes) -{ - assert(type.getWritableStruct() != nullptr); - - // Clean up top-level decorations that don't belong. - switch (type.getQualifier().storage) { - case EvqUniform: - case EvqBuffer: - correctUniform(type.getQualifier()); - break; - case EvqVaryingIn: - correctInput(type.getQualifier()); - break; - case EvqVaryingOut: - correctOutput(type.getQualifier()); - break; - default: - break; - } - - TTypeList& typeList = *type.getWritableStruct(); - // fix and check for member storage qualifiers and types that don't belong within a block - for (unsigned int member = 0; member < typeList.size(); ++member) { - TType& memberType = *typeList[member].type; - TQualifier& memberQualifier = memberType.getQualifier(); - const TSourceLoc& memberLoc = typeList[member].loc; - globalQualifierFix(memberLoc, memberQualifier); - memberQualifier.storage = type.getQualifier().storage; - - if (memberType.isStruct()) { - // clean up and pick up the right set of decorations - auto it = ioTypeMap.find(memberType.getStruct()); - switch (type.getQualifier().storage) { - case EvqUniform: - case EvqBuffer: - correctUniform(type.getQualifier()); - if (it != ioTypeMap.end() && it->second.uniform) - memberType.setStruct(it->second.uniform); - break; - case EvqVaryingIn: - correctInput(type.getQualifier()); - if (it != ioTypeMap.end() && it->second.input) - memberType.setStruct(it->second.input); - break; - case EvqVaryingOut: - correctOutput(type.getQualifier()); - if (it != ioTypeMap.end() && it->second.output) - memberType.setStruct(it->second.output); - break; - default: - break; - } - } - } - - // Make default block qualification, and adjust the member qualifications - - TQualifier defaultQualification; - switch (type.getQualifier().storage) { - case EvqUniform: defaultQualification = globalUniformDefaults; break; - case EvqBuffer: defaultQualification = globalBufferDefaults; break; - case EvqVaryingIn: defaultQualification = globalInputDefaults; break; - case EvqVaryingOut: defaultQualification = globalOutputDefaults; break; - default: defaultQualification.clear(); break; - } - - // Special case for "push_constant uniform", which has a default of std430, - // contrary to normal uniform defaults, and can't have a default tracked for it. - if (type.getQualifier().layoutPushConstant && ! type.getQualifier().hasPacking()) - type.getQualifier().layoutPacking = ElpStd430; - - // fix and check for member layout qualifiers - - mergeObjectLayoutQualifiers(defaultQualification, type.getQualifier(), true); - - bool memberWithLocation = false; - bool memberWithoutLocation = false; - for (unsigned int member = 0; member < typeList.size(); ++member) { - TQualifier& memberQualifier = typeList[member].type->getQualifier(); - const TSourceLoc& memberLoc = typeList[member].loc; - if (memberQualifier.hasStream()) { - if (defaultQualification.layoutStream != memberQualifier.layoutStream) - error(memberLoc, "member cannot contradict block", "stream", ""); - } - - // "This includes a block's inheritance of the - // current global default buffer, a block member's inheritance of the block's - // buffer, and the requirement that any *xfb_buffer* declared on a block - // member must match the buffer inherited from the block." - if (memberQualifier.hasXfbBuffer()) { - if (defaultQualification.layoutXfbBuffer != memberQualifier.layoutXfbBuffer) - error(memberLoc, "member cannot contradict block (or what block inherited from global)", "xfb_buffer", ""); - } - - if (memberQualifier.hasLocation()) { - switch (type.getQualifier().storage) { - case EvqVaryingIn: - case EvqVaryingOut: - memberWithLocation = true; - break; - default: - break; - } - } else - memberWithoutLocation = true; - - TQualifier newMemberQualification = defaultQualification; - mergeQualifiers(newMemberQualification, memberQualifier); - memberQualifier = newMemberQualification; - } - - // Process the members - fixBlockLocations(loc, type.getQualifier(), typeList, memberWithLocation, memberWithoutLocation); - fixBlockXfbOffsets(type.getQualifier(), typeList); - fixBlockUniformOffsets(type.getQualifier(), typeList); - - // reverse merge, so that currentBlockQualifier now has all layout information - // (can't use defaultQualification directly, it's missing other non-layout-default-class qualifiers) - mergeObjectLayoutQualifiers(type.getQualifier(), defaultQualification, true); - - // - // Build and add the interface block as a new type named 'blockName' - // - - // Use the instance name as the interface name if one exists, else the block name. - const TString& interfaceName = (instanceName && !instanceName->empty()) ? *instanceName : type.getTypeName(); - - TType blockType(&typeList, interfaceName, type.getQualifier()); - if (arraySizes) - blockType.newArraySizes(*arraySizes); - - // Add the variable, as anonymous or named instanceName. - // Make an anonymous variable if no name was provided. - if (instanceName == nullptr) - instanceName = NewPoolTString(""); - - TVariable& variable = *new TVariable(instanceName, blockType); - if (! symbolTable.insert(variable)) { - if (*instanceName == "") - error(loc, "nameless block contains a member that already has a name at global scope", - "" /* blockName->c_str() */, ""); - else - error(loc, "block instance name redefinition", variable.getName().c_str(), ""); - - return; - } - - // Save it in the AST for linker use. - trackLinkage(variable); -} - -// -// "For a block, this process applies to the entire block, or until the first member -// is reached that has a location layout qualifier. When a block member is declared with a location -// qualifier, its location comes from that qualifier: The member's location qualifier overrides the block-level -// declaration. Subsequent members are again assigned consecutive locations, based on the newest location, -// until the next member declared with a location qualifier. The values used for locations do not have to be -// declared in increasing order." -void HlslParseContext::fixBlockLocations(const TSourceLoc& loc, TQualifier& qualifier, TTypeList& typeList, bool memberWithLocation, bool memberWithoutLocation) -{ - // "If a block has no block-level location layout qualifier, it is required that either all or none of its members - // have a location layout qualifier, or a compile-time error results." - if (! qualifier.hasLocation() && memberWithLocation && memberWithoutLocation) - error(loc, "either the block needs a location, or all members need a location, or no members have a location", "location", ""); - else { - if (memberWithLocation) { - // remove any block-level location and make it per *every* member - int nextLocation = 0; // by the rule above, initial value is not relevant - if (qualifier.hasAnyLocation()) { - nextLocation = qualifier.layoutLocation; - qualifier.layoutLocation = TQualifier::layoutLocationEnd; - if (qualifier.hasComponent()) { - // "It is a compile-time error to apply the *component* qualifier to a ... block" - error(loc, "cannot apply to a block", "component", ""); - } - if (qualifier.hasIndex()) { - error(loc, "cannot apply to a block", "index", ""); - } - } - for (unsigned int member = 0; member < typeList.size(); ++member) { - TQualifier& memberQualifier = typeList[member].type->getQualifier(); - const TSourceLoc& memberLoc = typeList[member].loc; - if (! memberQualifier.hasLocation()) { - if (nextLocation >= (int)TQualifier::layoutLocationEnd) - error(memberLoc, "location is too large", "location", ""); - memberQualifier.layoutLocation = nextLocation; - memberQualifier.layoutComponent = 0; - } - nextLocation = memberQualifier.layoutLocation + - intermediate.computeTypeLocationSize(*typeList[member].type); - } - } - } -} - -void HlslParseContext::fixBlockXfbOffsets(TQualifier& qualifier, TTypeList& typeList) -{ - // "If a block is qualified with xfb_offset, all its - // members are assigned transform feedback buffer offsets. If a block is not qualified with xfb_offset, any - // members of that block not qualified with an xfb_offset will not be assigned transform feedback buffer - // offsets." - - if (! qualifier.hasXfbBuffer() || ! qualifier.hasXfbOffset()) - return; - - int nextOffset = qualifier.layoutXfbOffset; - for (unsigned int member = 0; member < typeList.size(); ++member) { - TQualifier& memberQualifier = typeList[member].type->getQualifier(); - bool containsDouble = false; - int memberSize = intermediate.computeTypeXfbSize(*typeList[member].type, containsDouble); - // see if we need to auto-assign an offset to this member - if (! memberQualifier.hasXfbOffset()) { - // "if applied to an aggregate containing a double, the offset must also be a multiple of 8" - if (containsDouble) - RoundToPow2(nextOffset, 8); - memberQualifier.layoutXfbOffset = nextOffset; - } else - nextOffset = memberQualifier.layoutXfbOffset; - nextOffset += memberSize; - } - - // The above gave all block members an offset, so we can take it off the block now, - // which will avoid double counting the offset usage. - qualifier.layoutXfbOffset = TQualifier::layoutXfbOffsetEnd; -} - -// Calculate and save the offset of each block member, using the recursively -// defined block offset rules and the user-provided offset and align. -// -// Also, compute and save the total size of the block. For the block's size, arrayness -// is not taken into account, as each element is backed by a separate buffer. -// -void HlslParseContext::fixBlockUniformOffsets(const TQualifier& qualifier, TTypeList& typeList) -{ - if (! qualifier.isUniformOrBuffer()) - return; - if (qualifier.layoutPacking != ElpStd140 && qualifier.layoutPacking != ElpStd430) - return; - - int offset = 0; - int memberSize; - for (unsigned int member = 0; member < typeList.size(); ++member) { - TQualifier& memberQualifier = typeList[member].type->getQualifier(); - const TSourceLoc& memberLoc = typeList[member].loc; - - // "When align is applied to an array, it effects only the start of the array, not the array's internal stride." - - // modify just the children's view of matrix layout, if there is one for this member - TLayoutMatrix subMatrixLayout = typeList[member].type->getQualifier().layoutMatrix; - int dummyStride; - int memberAlignment = intermediate.getBaseAlignment(*typeList[member].type, memberSize, dummyStride, - qualifier.layoutPacking == ElpStd140, - subMatrixLayout != ElmNone - ? subMatrixLayout == ElmRowMajor - : qualifier.layoutMatrix == ElmRowMajor); - if (memberQualifier.hasOffset()) { - // "The specified offset must be a multiple - // of the base alignment of the type of the block member it qualifies, or a compile-time error results." - if (! IsMultipleOfPow2(memberQualifier.layoutOffset, memberAlignment)) - error(memberLoc, "must be a multiple of the member's alignment", "offset", ""); - - // "The offset qualifier forces the qualified member to start at or after the specified - // integral-constant expression, which will be its byte offset from the beginning of the buffer. - // "The actual offset of a member is computed as - // follows: If offset was declared, start with that offset, otherwise start with the next available offset." - offset = std::max(offset, memberQualifier.layoutOffset); - } - - // "The actual alignment of a member will be the greater of the specified align alignment and the standard - // (e.g., std140) base alignment for the member's type." - if (memberQualifier.hasAlign()) - memberAlignment = std::max(memberAlignment, memberQualifier.layoutAlign); - - // "If the resulting offset is not a multiple of the actual alignment, - // increase it to the first offset that is a multiple of - // the actual alignment." - RoundToPow2(offset, memberAlignment); - typeList[member].type->getQualifier().layoutOffset = offset; - offset += memberSize; - } -} - -// For an identifier that is already declared, add more qualification to it. -void HlslParseContext::addQualifierToExisting(const TSourceLoc& loc, TQualifier qualifier, const TString& identifier) -{ - TSymbol* symbol = symbolTable.find(identifier); - if (symbol == nullptr) { - error(loc, "identifier not previously declared", identifier.c_str(), ""); - return; - } - if (symbol->getAsFunction()) { - error(loc, "cannot re-qualify a function name", identifier.c_str(), ""); - return; - } - - if (qualifier.isAuxiliary() || - qualifier.isMemory() || - qualifier.isInterpolation() || - qualifier.hasLayout() || - qualifier.storage != EvqTemporary || - qualifier.precision != EpqNone) { - error(loc, "cannot add storage, auxiliary, memory, interpolation, layout, or precision qualifier to an existing variable", identifier.c_str(), ""); - return; - } - - // For read-only built-ins, add a new symbol for holding the modified qualifier. - // This will bring up an entire block, if a block type has to be modified (e.g., gl_Position inside a block) - if (symbol->isReadOnly()) - symbol = symbolTable.copyUp(symbol); - - if (qualifier.invariant) { - if (intermediate.inIoAccessed(identifier)) - error(loc, "cannot change qualification after use", "invariant", ""); - symbol->getWritableType().getQualifier().invariant = true; - } else if (qualifier.noContraction) { - if (intermediate.inIoAccessed(identifier)) - error(loc, "cannot change qualification after use", "precise", ""); - symbol->getWritableType().getQualifier().noContraction = true; - } else if (qualifier.specConstant) { - symbol->getWritableType().getQualifier().makeSpecConstant(); - if (qualifier.hasSpecConstantId()) - symbol->getWritableType().getQualifier().layoutSpecConstantId = qualifier.layoutSpecConstantId; - } else - warn(loc, "unknown requalification", "", ""); -} - -void HlslParseContext::addQualifierToExisting(const TSourceLoc& loc, TQualifier qualifier, TIdentifierList& identifiers) -{ - for (unsigned int i = 0; i < identifiers.size(); ++i) - addQualifierToExisting(loc, qualifier, *identifiers[i]); -} - -// -// Update the intermediate for the given input geometry -// -bool HlslParseContext::handleInputGeometry(const TSourceLoc& loc, const TLayoutGeometry& geometry) -{ - switch (geometry) { - case ElgPoints: // fall through - case ElgLines: // ... - case ElgTriangles: // ... - case ElgLinesAdjacency: // ... - case ElgTrianglesAdjacency: // ... - if (! intermediate.setInputPrimitive(geometry)) { - error(loc, "input primitive geometry redefinition", TQualifier::getGeometryString(geometry), ""); - return false; - } - break; - - default: - error(loc, "cannot apply to 'in'", TQualifier::getGeometryString(geometry), ""); - return false; - } - - return true; -} - -// -// Update the intermediate for the given output geometry -// -bool HlslParseContext::handleOutputGeometry(const TSourceLoc& loc, const TLayoutGeometry& geometry) -{ - switch (geometry) { - case ElgPoints: - case ElgLineStrip: - case ElgTriangleStrip: - if (! intermediate.setOutputPrimitive(geometry)) { - error(loc, "output primitive geometry redefinition", TQualifier::getGeometryString(geometry), ""); - return false; - } - break; - default: - error(loc, "cannot apply to 'out'", TQualifier::getGeometryString(geometry), ""); - return false; - } - - return true; -} - -// -// Selection hints -// -TSelectionControl HlslParseContext::handleSelectionControl(const TAttributeMap& attributes) const -{ - if (attributes.contains(EatFlatten)) - return ESelectionControlFlatten; - else if (attributes.contains(EatBranch)) - return ESelectionControlDontFlatten; - else - return ESelectionControlNone; -} - -// -// Loop hints -// -TLoopControl HlslParseContext::handleLoopControl(const TAttributeMap& attributes) const -{ - if (attributes.contains(EatUnroll)) - return ELoopControlUnroll; - else if (attributes.contains(EatLoop)) - return ELoopControlDontUnroll; - else - return ELoopControlNone; -} - -// -// Updating default qualifier for the case of a declaration with just a qualifier, -// no type, block, or identifier. -// -void HlslParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, const TPublicType& publicType) -{ - if (publicType.shaderQualifiers.vertices != TQualifier::layoutNotSet) { - assert(language == EShLangTessControl || language == EShLangGeometry); - // const char* id = (language == EShLangTessControl) ? "vertices" : "max_vertices"; - } - if (publicType.shaderQualifiers.invocations != TQualifier::layoutNotSet) { - if (! intermediate.setInvocations(publicType.shaderQualifiers.invocations)) - error(loc, "cannot change previously set layout value", "invocations", ""); - } - if (publicType.shaderQualifiers.geometry != ElgNone) { - if (publicType.qualifier.storage == EvqVaryingIn) { - switch (publicType.shaderQualifiers.geometry) { - case ElgPoints: - case ElgLines: - case ElgLinesAdjacency: - case ElgTriangles: - case ElgTrianglesAdjacency: - case ElgQuads: - case ElgIsolines: - break; - default: - error(loc, "cannot apply to input", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), - ""); - } - } else if (publicType.qualifier.storage == EvqVaryingOut) { - handleOutputGeometry(loc, publicType.shaderQualifiers.geometry); - } else - error(loc, "cannot apply to:", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), - GetStorageQualifierString(publicType.qualifier.storage)); - } - if (publicType.shaderQualifiers.spacing != EvsNone) - intermediate.setVertexSpacing(publicType.shaderQualifiers.spacing); - if (publicType.shaderQualifiers.order != EvoNone) - intermediate.setVertexOrder(publicType.shaderQualifiers.order); - if (publicType.shaderQualifiers.pointMode) - intermediate.setPointMode(); - for (int i = 0; i < 3; ++i) { - if (publicType.shaderQualifiers.localSize[i] > 1) { - int max = 0; - switch (i) { - case 0: max = resources.maxComputeWorkGroupSizeX; break; - case 1: max = resources.maxComputeWorkGroupSizeY; break; - case 2: max = resources.maxComputeWorkGroupSizeZ; break; - default: break; - } - if (intermediate.getLocalSize(i) > (unsigned int)max) - error(loc, "too large; see gl_MaxComputeWorkGroupSize", "local_size", ""); - - // Fix the existing constant gl_WorkGroupSize with this new information. - TVariable* workGroupSize = getEditableVariable("gl_WorkGroupSize"); - workGroupSize->getWritableConstArray()[i].setUConst(intermediate.getLocalSize(i)); - } - if (publicType.shaderQualifiers.localSizeSpecId[i] != TQualifier::layoutNotSet) { - intermediate.setLocalSizeSpecId(i, publicType.shaderQualifiers.localSizeSpecId[i]); - // Set the workgroup built-in variable as a specialization constant - TVariable* workGroupSize = getEditableVariable("gl_WorkGroupSize"); - workGroupSize->getWritableType().getQualifier().specConstant = true; - } - } - if (publicType.shaderQualifiers.earlyFragmentTests) - intermediate.setEarlyFragmentTests(); - - const TQualifier& qualifier = publicType.qualifier; - - switch (qualifier.storage) { - case EvqUniform: - if (qualifier.hasMatrix()) - globalUniformDefaults.layoutMatrix = qualifier.layoutMatrix; - if (qualifier.hasPacking()) - globalUniformDefaults.layoutPacking = qualifier.layoutPacking; - break; - case EvqBuffer: - if (qualifier.hasMatrix()) - globalBufferDefaults.layoutMatrix = qualifier.layoutMatrix; - if (qualifier.hasPacking()) - globalBufferDefaults.layoutPacking = qualifier.layoutPacking; - break; - case EvqVaryingIn: - break; - case EvqVaryingOut: - if (qualifier.hasStream()) - globalOutputDefaults.layoutStream = qualifier.layoutStream; - if (qualifier.hasXfbBuffer()) - globalOutputDefaults.layoutXfbBuffer = qualifier.layoutXfbBuffer; - if (globalOutputDefaults.hasXfbBuffer() && qualifier.hasXfbStride()) { - if (! intermediate.setXfbBufferStride(globalOutputDefaults.layoutXfbBuffer, qualifier.layoutXfbStride)) - error(loc, "all stride settings must match for xfb buffer", "xfb_stride", "%d", - qualifier.layoutXfbBuffer); - } - break; - default: - error(loc, "default qualifier requires 'uniform', 'buffer', 'in', or 'out' storage qualification", "", ""); - return; - } -} - -// -// Take the sequence of statements that has been built up since the last case/default, -// put it on the list of top-level nodes for the current (inner-most) switch statement, -// and follow that by the case/default we are on now. (See switch topology comment on -// TIntermSwitch.) -// -void HlslParseContext::wrapupSwitchSubsequence(TIntermAggregate* statements, TIntermNode* branchNode) -{ - TIntermSequence* switchSequence = switchSequenceStack.back(); - - if (statements) { - statements->setOperator(EOpSequence); - switchSequence->push_back(statements); - } - if (branchNode) { - // check all previous cases for the same label (or both are 'default') - for (unsigned int s = 0; s < switchSequence->size(); ++s) { - TIntermBranch* prevBranch = (*switchSequence)[s]->getAsBranchNode(); - if (prevBranch) { - TIntermTyped* prevExpression = prevBranch->getExpression(); - TIntermTyped* newExpression = branchNode->getAsBranchNode()->getExpression(); - if (prevExpression == nullptr && newExpression == nullptr) - error(branchNode->getLoc(), "duplicate label", "default", ""); - else if (prevExpression != nullptr && - newExpression != nullptr && - prevExpression->getAsConstantUnion() && - newExpression->getAsConstantUnion() && - prevExpression->getAsConstantUnion()->getConstArray()[0].getIConst() == - newExpression->getAsConstantUnion()->getConstArray()[0].getIConst()) - error(branchNode->getLoc(), "duplicated value", "case", ""); - } - } - switchSequence->push_back(branchNode); - } -} - -// -// Turn the top-level node sequence built up of wrapupSwitchSubsequence -// into a switch node. -// -TIntermNode* HlslParseContext::addSwitch(const TSourceLoc& loc, TIntermTyped* expression, - TIntermAggregate* lastStatements, TSelectionControl control) -{ - wrapupSwitchSubsequence(lastStatements, nullptr); - - if (expression == nullptr || - (expression->getBasicType() != EbtInt && expression->getBasicType() != EbtUint) || - expression->getType().isArray() || expression->getType().isMatrix() || expression->getType().isVector()) - error(loc, "condition must be a scalar integer expression", "switch", ""); - - // If there is nothing to do, drop the switch but still execute the expression - TIntermSequence* switchSequence = switchSequenceStack.back(); - if (switchSequence->size() == 0) - return expression; - - if (lastStatements == nullptr) { - // emulate a break for error recovery - lastStatements = intermediate.makeAggregate(intermediate.addBranch(EOpBreak, loc)); - lastStatements->setOperator(EOpSequence); - switchSequence->push_back(lastStatements); - } - - TIntermAggregate* body = new TIntermAggregate(EOpSequence); - body->getSequence() = *switchSequenceStack.back(); - body->setLoc(loc); - - TIntermSwitch* switchNode = new TIntermSwitch(expression, body); - switchNode->setLoc(loc); - switchNode->setSelectionControl(control); - - return switchNode; -} - -// Make a new symbol-table level that is made out of the members of a structure. -// This should be done as an anonymous struct (name is "") so that the symbol table -// finds the members with no explicit reference to a 'this' variable. -void HlslParseContext::pushThisScope(const TType& thisStruct, const TVector& functionDeclarators) -{ - // member variables - TVariable& thisVariable = *new TVariable(NewPoolTString(""), thisStruct); - symbolTable.pushThis(thisVariable); - - // member functions - for (auto it = functionDeclarators.begin(); it != functionDeclarators.end(); ++it) { - // member should have a prefix matching currentTypePrefix.back() - // but, symbol lookup within the class scope will just use the - // unprefixed name. Hence, there are two: one fully prefixed and - // one with no prefix. - TFunction& member = *it->function->clone(); - member.removePrefix(currentTypePrefix.back()); - symbolTable.insert(member); - } -} - -// Track levels of class/struct/namespace nesting with a prefix string using -// the type names separated by the scoping operator. E.g., two levels -// would look like: -// -// outer::inner -// -// The string is empty when at normal global level. -// -void HlslParseContext::pushNamespace(const TString& typeName) -{ - // make new type prefix - TString newPrefix; - if (currentTypePrefix.size() > 0) - newPrefix = currentTypePrefix.back(); - newPrefix.append(typeName); - newPrefix.append(scopeMangler); - currentTypePrefix.push_back(newPrefix); -} - -// Opposite of pushNamespace(), see above -void HlslParseContext::popNamespace() -{ - currentTypePrefix.pop_back(); -} - -// Use the class/struct nesting string to create a global name for -// a member of a class/struct. -void HlslParseContext::getFullNamespaceName(const TString*& name) const -{ - if (currentTypePrefix.size() == 0) - return; - - TString* fullName = NewPoolTString(currentTypePrefix.back().c_str()); - fullName->append(*name); - name = fullName; -} - -// Helper function to add the namespace scope mangling syntax to a string. -void HlslParseContext::addScopeMangler(TString& name) -{ - name.append(scopeMangler); -} - -// Potentially rename shader entry point function -void HlslParseContext::renameShaderFunction(const TString*& name) const -{ - // Replace the entry point name given in the shader with the real entry point name, - // if there is a substitution. - if (name != nullptr && *name == sourceEntryPointName) - name = NewPoolTString(intermediate.getEntryPointName().c_str()); -} - -// Return true if this has uniform-interface like decorations. -bool HlslParseContext::hasUniform(const TQualifier& qualifier) const -{ - return qualifier.hasUniformLayout() || - qualifier.layoutPushConstant; -} - -// Potentially not the opposite of hasUniform(), as if some characteristic is -// ever used for more than one thing (e.g., uniform or input), hasUniform() should -// say it exists, but clearUniform() should leave it in place. -void HlslParseContext::clearUniform(TQualifier& qualifier) -{ - qualifier.clearUniformLayout(); - qualifier.layoutPushConstant = false; -} - -// Return false if builtIn by itself doesn't force this qualifier to be an input qualifier. -bool HlslParseContext::isInputBuiltIn(const TQualifier& qualifier) const -{ - switch (qualifier.builtIn) { - case EbvPosition: - case EbvPointSize: - return language != EShLangVertex && language != EShLangCompute && language != EShLangFragment; - case EbvClipDistance: - case EbvCullDistance: - return language != EShLangVertex && language != EShLangCompute; - case EbvFragCoord: - case EbvFace: - case EbvHelperInvocation: - case EbvLayer: - case EbvPointCoord: - case EbvSampleId: - case EbvSampleMask: - case EbvSamplePosition: - case EbvViewportIndex: - return language == EShLangFragment; - case EbvGlobalInvocationId: - case EbvLocalInvocationIndex: - case EbvLocalInvocationId: - case EbvNumWorkGroups: - case EbvWorkGroupId: - case EbvWorkGroupSize: - return language == EShLangCompute; - case EbvInvocationId: - return language == EShLangTessControl || language == EShLangTessEvaluation || language == EShLangGeometry; - case EbvPatchVertices: - return language == EShLangTessControl || language == EShLangTessEvaluation; - case EbvInstanceId: - case EbvInstanceIndex: - case EbvVertexId: - case EbvVertexIndex: - return language == EShLangVertex; - case EbvPrimitiveId: - return language == EShLangGeometry || language == EShLangFragment; - case EbvTessLevelInner: - case EbvTessLevelOuter: - return language == EShLangTessEvaluation; - case EbvTessCoord: - return language == EShLangTessEvaluation; - default: - return false; - } -} - -// Return true if there are decorations to preserve for input-like storage. -bool HlslParseContext::hasInput(const TQualifier& qualifier) const -{ - if (qualifier.hasAnyLocation()) - return true; - - if (language == EShLangFragment && (qualifier.isInterpolation() || qualifier.centroid || qualifier.sample)) - return true; - - if (language == EShLangTessEvaluation && qualifier.patch) - return true; - - if (isInputBuiltIn(qualifier)) - return true; - - return false; -} - -// Return false if builtIn by itself doesn't force this qualifier to be an output qualifier. -bool HlslParseContext::isOutputBuiltIn(const TQualifier& qualifier) const -{ - switch (qualifier.builtIn) { - case EbvPosition: - case EbvPointSize: - case EbvClipVertex: - case EbvClipDistance: - case EbvCullDistance: - return language != EShLangFragment && language != EShLangCompute; - case EbvFragDepth: - case EbvFragDepthGreater: - case EbvFragDepthLesser: - case EbvSampleMask: - return language == EShLangFragment; - case EbvLayer: - case EbvViewportIndex: - return language == EShLangGeometry; - case EbvPrimitiveId: - return language == EShLangGeometry || language == EShLangTessControl || language == EShLangTessEvaluation; - case EbvTessLevelInner: - case EbvTessLevelOuter: - return language == EShLangTessControl; - default: - return false; - } -} - -// Return true if there are decorations to preserve for output-like storage. -bool HlslParseContext::hasOutput(const TQualifier& qualifier) const -{ - if (qualifier.hasAnyLocation()) - return true; - - if (language != EShLangFragment && language != EShLangCompute && qualifier.hasXfb()) - return true; - - if (language == EShLangTessControl && qualifier.patch) - return true; - - if (language == EShLangGeometry && qualifier.hasStream()) - return true; - - if (isOutputBuiltIn(qualifier)) - return true; - - return false; -} - -// Make the IO decorations etc. be appropriate only for an input interface. -void HlslParseContext::correctInput(TQualifier& qualifier) -{ - clearUniform(qualifier); - if (language == EShLangVertex) - qualifier.clearInterstage(); - if (language != EShLangTessEvaluation) - qualifier.patch = false; - if (language != EShLangFragment) { - qualifier.clearInterpolation(); - qualifier.sample = false; - } - - qualifier.clearStreamLayout(); - qualifier.clearXfbLayout(); - - if (! isInputBuiltIn(qualifier)) - qualifier.builtIn = EbvNone; -} - -// Make the IO decorations etc. be appropriate only for an output interface. -void HlslParseContext::correctOutput(TQualifier& qualifier) -{ - clearUniform(qualifier); - if (language == EShLangFragment) - qualifier.clearInterstage(); - if (language != EShLangGeometry) - qualifier.clearStreamLayout(); - if (language == EShLangFragment) - qualifier.clearXfbLayout(); - if (language != EShLangTessControl) - qualifier.patch = false; - - switch (qualifier.builtIn) { - case EbvFragDepthGreater: - intermediate.setDepth(EldGreater); - qualifier.builtIn = EbvFragDepth; - break; - case EbvFragDepthLesser: - intermediate.setDepth(EldLess); - qualifier.builtIn = EbvFragDepth; - break; - default: - break; - } - - if (! isOutputBuiltIn(qualifier)) - qualifier.builtIn = EbvNone; -} - -// Make the IO decorations etc. be appropriate only for uniform type interfaces. -void HlslParseContext::correctUniform(TQualifier& qualifier) -{ - if (qualifier.declaredBuiltIn == EbvNone) - qualifier.declaredBuiltIn = qualifier.builtIn; - - qualifier.builtIn = EbvNone; - qualifier.clearInterstage(); - qualifier.clearInterstageLayout(); -} - -// Clear out all IO/Uniform stuff, so this has nothing to do with being an IO interface. -void HlslParseContext::clearUniformInputOutput(TQualifier& qualifier) -{ - clearUniform(qualifier); - correctUniform(qualifier); -} - - -// Set texture return type. Returns success (not all types are valid). -bool HlslParseContext::setTextureReturnType(TSampler& sampler, const TType& retType, const TSourceLoc& loc) -{ - // Seed the output with an invalid index. We will set it to a valid one if we can. - sampler.structReturnIndex = TSampler::noReturnStruct; - - // Arrays aren't supported. - if (retType.isArray()) { - error(loc, "Arrays not supported in texture template types", "", ""); - return false; - } - - // If return type is a vector, remember the vector size in the sampler, and return. - if (retType.isVector() || retType.isScalar()) { - sampler.vectorSize = retType.getVectorSize(); - return true; - } - - // If it wasn't a vector, it must be a struct meeting certain requirements. The requirements - // are checked below: just check for struct-ness here. - if (!retType.isStruct()) { - error(loc, "Invalid texture template type", "", ""); - return false; - } - - TTypeList* members = retType.getWritableStruct(); - - // Check for too many or not enough structure members. - if (members->size() > 4 || members->size() == 0) { - error(loc, "Invalid member count in texture template structure", "", ""); - return false; - } - - // Error checking: We must have <= 4 total components, all of the same basic type. - unsigned totalComponents = 0; - for (unsigned m = 0; m < members->size(); ++m) { - // Check for bad member types - if (!(*members)[m].type->isScalar() && !(*members)[m].type->isVector()) { - error(loc, "Invalid texture template struct member type", "", ""); - return false; - } - - const unsigned memberVectorSize = (*members)[m].type->getVectorSize(); - totalComponents += memberVectorSize; - - // too many total member components - if (totalComponents > 4) { - error(loc, "Too many components in texture template structure type", "", ""); - return false; - } - - // All members must be of a common basic type - if ((*members)[m].type->getBasicType() != (*members)[0].type->getBasicType()) { - error(loc, "Texture template structure members must same basic type", "", ""); - return false; - } - } - - // If the structure in the return type already exists in the table, we'll use it. Otherwise, we'll make - // a new entry. This is a linear search, but it hardly ever happens, and the list cannot be very large. - for (unsigned int idx = 0; idx < textureReturnStruct.size(); ++idx) { - if (textureReturnStruct[idx] == members) { - sampler.structReturnIndex = idx; - return true; - } - } - - // It wasn't found as an existing entry. See if we have room for a new one. - if (textureReturnStruct.size() >= TSampler::structReturnSlots) { - error(loc, "Texture template struct return slots exceeded", "", ""); - return false; - } - - // Insert it in the vector that tracks struct return types. - sampler.structReturnIndex = unsigned(textureReturnStruct.size()); - textureReturnStruct.push_back(members); - - // Success! - return true; -} - -// Return the sampler return type in retType. -void HlslParseContext::getTextureReturnType(const TSampler& sampler, TType& retType) const -{ - if (sampler.hasReturnStruct()) { - assert(textureReturnStruct.size() >= sampler.structReturnIndex); - - // We land here if the texture return is a structure. - TTypeList* blockStruct = textureReturnStruct[sampler.structReturnIndex]; - - const TType resultType(blockStruct, ""); - retType.shallowCopy(resultType); - } else { - // We land here if the texture return is a vector or scalar. - const TType resultType(sampler.type, EvqTemporary, sampler.getVectorSize()); - retType.shallowCopy(resultType); - } -} - - -// Return a symbol for the tessellation linkage variable of the given TBuiltInVariable type -TIntermSymbol* HlslParseContext::findTessLinkageSymbol(TBuiltInVariable biType) const -{ - const auto it = builtInTessLinkageSymbols.find(biType); - if (it == builtInTessLinkageSymbols.end()) // if it wasn't declared by the user, return nullptr - return nullptr; - - return intermediate.addSymbol(*it->second->getAsVariable()); -} - -// Finalization step: Add patch constant function invocation -void HlslParseContext::addPatchConstantInvocation() -{ - TSourceLoc loc; - loc.init(); - - // If there's no patch constant function, or we're not a HS, do nothing. - if (patchConstantFunctionName.empty() || language != EShLangTessControl) - return; - - if (symbolTable.isFunctionNameVariable(patchConstantFunctionName)) { - error(loc, "can't use variable in patch constant function", patchConstantFunctionName.c_str(), ""); - return; - } - - const TString mangledName = patchConstantFunctionName + "("; - - // create list of PCF candidates - TVector candidateList; - bool builtIn; - symbolTable.findFunctionNameList(mangledName, candidateList, builtIn); - - // We have to have one and only one, or we don't know which to pick: the patchconstantfunc does not - // allow any disambiguation of overloads. - if (candidateList.empty()) { - error(loc, "patch constant function not found", patchConstantFunctionName.c_str(), ""); - return; - } - - // Based on directed experiments, it appears that if there are overloaded patchconstantfunctions, - // HLSL picks the last one in shader source order. Since that isn't yet implemented here, error - // out if there is more than one candidate. - if (candidateList.size() > 1) { - error(loc, "ambiguous patch constant function", patchConstantFunctionName.c_str(), ""); - return; - } - - // Look for built-in variables in a function's parameter list. - const auto findBuiltIns = [&](const TFunction& function, std::set& builtIns) { - for (int p=0; pgetQualifier().storage; - - if (storage == EvqConstReadOnly) // treated identically to input - storage = EvqIn; - - if (function[p].getDeclaredBuiltIn() != EbvNone) - builtIns.insert(HlslParseContext::tInterstageIoData(function[p].getDeclaredBuiltIn(), storage)); - else - builtIns.insert(HlslParseContext::tInterstageIoData(function[p].type->getQualifier().builtIn, storage)); - } - }; - - // If we synthesize a built-in interface variable, we must add it to the linkage. - const auto addToLinkage = [&](const TType& type, const TString* name, TIntermSymbol** symbolNode) { - if (name == nullptr) { - error(loc, "unable to locate patch function parameter name", "", ""); - return; - } else { - TVariable& variable = *new TVariable(name, type); - if (! symbolTable.insert(variable)) { - error(loc, "unable to declare patch constant function interface variable", name->c_str(), ""); - return; - } - - globalQualifierFix(loc, variable.getWritableType().getQualifier()); - - if (symbolNode != nullptr) - *symbolNode = intermediate.addSymbol(variable); - - trackLinkage(variable); - } - }; - - const auto isOutputPatch = [this](TFunction& patchConstantFunction, int param) { - const TType& type = *patchConstantFunction[param].type; - const TBuiltInVariable biType = patchConstantFunction[param].getDeclaredBuiltIn(); - - return type.isArray() && !type.isRuntimeSizedArray() && biType == EbvOutputPatch; - }; - - // We will perform these steps. Each is in a scoped block for separation: they could - // become separate functions to make addPatchConstantInvocation shorter. - // - // 1. Union the interfaces, and create built-ins for anything present in the PCF and - // declared as a built-in variable that isn't present in the entry point's signature. - // - // 2. Synthesizes a call to the patchconstfunction using built-in variables from either main, - // or the ones we created. Matching is based on built-in type. We may use synthesized - // variables from (1) above. - // - // 2B: Synthesize per control point invocations of wrapped entry point if the PCF requires them. - // - // 3. Create a return sequence: copy the return value (if any) from the PCF to a - // (non-sanitized) output variable. In case this may involve multiple copies, such as for - // an arrayed variable, a temporary copy of the PCF output is created to avoid multiple - // indirections into a complex R-value coming from the call to the PCF. - // - // 4. Create a barrier. - // - // 5/5B. Call the PCF inside an if test for (invocation id == 0). - - TFunction& patchConstantFunction = const_cast(*candidateList[0]); - const int pcfParamCount = patchConstantFunction.getParamCount(); - TIntermSymbol* invocationIdSym = findTessLinkageSymbol(EbvInvocationId); - TIntermSequence& epBodySeq = entryPointFunctionBody->getAsAggregate()->getSequence(); - - int outPatchParam = -1; // -1 means there isn't one. - - // ================ Step 1A: Union Interfaces ================ - // Our patch constant function. - { - std::set pcfBuiltIns; // patch constant function built-ins - std::set epfBuiltIns; // entry point function built-ins - - assert(entryPointFunction); - assert(entryPointFunctionBody); - - findBuiltIns(patchConstantFunction, pcfBuiltIns); - findBuiltIns(*entryPointFunction, epfBuiltIns); - - // Find the set of built-ins in the PCF that are not present in the entry point. - std::set notInEntryPoint; - - notInEntryPoint = pcfBuiltIns; - - // std::set_difference not usable on unordered containers - for (auto bi = epfBuiltIns.begin(); bi != epfBuiltIns.end(); ++bi) - notInEntryPoint.erase(*bi); - - // Now we'll add those to the entry and to the linkage. - for (int p=0; pgetQualifier().storage; - - // Track whether there is an output patch param - if (isOutputPatch(patchConstantFunction, p)) { - if (outPatchParam >= 0) { - // Presently we only support one per ctrl pt input. - error(loc, "unimplemented: multiple output patches in patch constant function", "", ""); - return; - } - outPatchParam = p; - } - - if (biType != EbvNone) { - TType* paramType = patchConstantFunction[p].type->clone(); - - if (storage == EvqConstReadOnly) // treated identically to input - storage = EvqIn; - - // Presently, the only non-built-in we support is InputPatch, which is treated as - // a pseudo-built-in. - if (biType == EbvInputPatch) { - builtInTessLinkageSymbols[biType] = inputPatch; - } else if (biType == EbvOutputPatch) { - // Nothing... - } else { - // Use the original declaration type for the linkage - paramType->getQualifier().builtIn = biType; - - if (notInEntryPoint.count(tInterstageIoData(biType, storage)) == 1) - addToLinkage(*paramType, patchConstantFunction[p].name, nullptr); - } - } - } - - // If we didn't find it because the shader made one, add our own. - if (invocationIdSym == nullptr) { - TType invocationIdType(EbtUint, EvqIn, 1); - TString* invocationIdName = NewPoolTString("InvocationId"); - invocationIdType.getQualifier().builtIn = EbvInvocationId; - addToLinkage(invocationIdType, invocationIdName, &invocationIdSym); - } - - assert(invocationIdSym); - } - - TIntermTyped* pcfArguments = nullptr; - TVariable* perCtrlPtVar = nullptr; - - // ================ Step 1B: Argument synthesis ================ - // Create pcfArguments for synthesis of patchconstantfunction invocation - // TODO: handle struct or array inputs - { - for (int p=0; pgetWritableType().getQualifier().makeTemporary(); - } - inputArg = intermediate.addSymbol(*perCtrlPtVar, loc); - } else { - // find which built-in it is - const TBuiltInVariable biType = patchConstantFunction[p].getDeclaredBuiltIn(); - - inputArg = findTessLinkageSymbol(biType); - - if (inputArg == nullptr) { - error(loc, "unable to find patch constant function built-in variable", "", ""); - return; - } - } - - if (pcfParamCount == 1) - pcfArguments = inputArg; - else - pcfArguments = intermediate.growAggregate(pcfArguments, inputArg); - } - } - - // ================ Step 2: Synthesize call to PCF ================ - TIntermAggregate* pcfCallSequence = nullptr; - TIntermTyped* pcfCall = nullptr; - - { - // Create a function call to the patchconstantfunction - if (pcfArguments) - addInputArgumentConversions(patchConstantFunction, pcfArguments); - - // Synthetic call. - pcfCall = intermediate.setAggregateOperator(pcfArguments, EOpFunctionCall, patchConstantFunction.getType(), loc); - pcfCall->getAsAggregate()->setUserDefined(); - pcfCall->getAsAggregate()->setName(patchConstantFunction.getMangledName()); - intermediate.addToCallGraph(infoSink, intermediate.getEntryPointMangledName().c_str(), - patchConstantFunction.getMangledName()); - - if (pcfCall->getAsAggregate()) { - TQualifierList& qualifierList = pcfCall->getAsAggregate()->getQualifierList(); - for (int i = 0; i < patchConstantFunction.getParamCount(); ++i) { - TStorageQualifier qual = patchConstantFunction[i].type->getQualifier().storage; - qualifierList.push_back(qual); - } - pcfCall = addOutputArgumentConversions(patchConstantFunction, *pcfCall->getAsOperator()); - } - } - - // ================ Step 2B: Per Control Point synthesis ================ - // If there is per control point data, we must either emulate that with multiple - // invocations of the entry point to build up an array, or (TODO:) use a yet - // unavailable extension to look across the SIMD lanes. This is the former - // as a placeholder for the latter. - if (outPatchParam >= 0) { - // We must introduce a local temp variable of the type wanted by the PCF input. - const int arraySize = patchConstantFunction[outPatchParam].type->getOuterArraySize(); - - if (entryPointFunction->getType().getBasicType() == EbtVoid) { - error(loc, "entry point must return a value for use with patch constant function", "", ""); - return; - } - - // Create calls to wrapped main to fill in the array. We will substitute fixed values - // of invocation ID when calling the wrapped main. - - // This is the type of the each member of the per ctrl point array. - const TType derefType(perCtrlPtVar->getType(), 0); - - for (int cpt = 0; cpt < arraySize; ++cpt) { - // TODO: improve. substr(1) here is to avoid the '@' that was grafted on but isn't in the symtab - // for this function. - const TString origName = entryPointFunction->getName().substr(1); - TFunction callee(&origName, TType(EbtVoid)); - TIntermTyped* callingArgs = nullptr; - - for (int i = 0; i < entryPointFunction->getParamCount(); i++) { - TParameter& param = (*entryPointFunction)[i]; - TType& paramType = *param.type; - - if (paramType.getQualifier().isParamOutput()) { - error(loc, "unimplemented: entry point outputs in patch constant function invocation", "", ""); - return; - } - - if (paramType.getQualifier().isParamInput()) { - TIntermTyped* arg = nullptr; - if ((*entryPointFunction)[i].getDeclaredBuiltIn() == EbvInvocationId) { - // substitute invocation ID with the array element ID - arg = intermediate.addConstantUnion(cpt, loc); - } else { - TVariable* argVar = makeInternalVariable(*param.name, *param.type); - argVar->getWritableType().getQualifier().makeTemporary(); - arg = intermediate.addSymbol(*argVar); - } - - handleFunctionArgument(&callee, callingArgs, arg); - } - } - - // Call and assign to per ctrl point variable - currentCaller = intermediate.getEntryPointMangledName().c_str(); - TIntermTyped* callReturn = handleFunctionCall(loc, &callee, callingArgs); - TIntermTyped* index = intermediate.addConstantUnion(cpt, loc); - TIntermSymbol* perCtrlPtSym = intermediate.addSymbol(*perCtrlPtVar, loc); - TIntermTyped* element = intermediate.addIndex(EOpIndexDirect, perCtrlPtSym, index, loc); - element->setType(derefType); - element->setLoc(loc); - - pcfCallSequence = intermediate.growAggregate(pcfCallSequence, - handleAssign(loc, EOpAssign, element, callReturn)); - } - } - - // ================ Step 3: Create return Sequence ================ - // Return sequence: copy PCF result to a temporary, then to shader output variable. - if (pcfCall->getBasicType() != EbtVoid) { - const TType* retType = &patchConstantFunction.getType(); // return type from the PCF - TType outType; // output type that goes with the return type. - outType.shallowCopy(*retType); - - // substitute the output type - const auto newLists = ioTypeMap.find(retType->getStruct()); - if (newLists != ioTypeMap.end()) - outType.setStruct(newLists->second.output); - - // Substitute the top level type's built-in type - if (patchConstantFunction.getDeclaredBuiltInType() != EbvNone) - outType.getQualifier().builtIn = patchConstantFunction.getDeclaredBuiltInType(); - - outType.getQualifier().patch = true; // make it a per-patch variable - - TVariable* pcfOutput = makeInternalVariable("@patchConstantOutput", outType); - pcfOutput->getWritableType().getQualifier().storage = EvqVaryingOut; - - if (pcfOutput->getType().containsBuiltIn()) - split(*pcfOutput); - - assignToInterface(*pcfOutput); - - TIntermSymbol* pcfOutputSym = intermediate.addSymbol(*pcfOutput, loc); - - // The call to the PCF is a complex R-value: we want to store it in a temp to avoid - // repeated calls to the PCF: - TVariable* pcfCallResult = makeInternalVariable("@patchConstantResult", *retType); - pcfCallResult->getWritableType().getQualifier().makeTemporary(); - - TIntermSymbol* pcfResultVar = intermediate.addSymbol(*pcfCallResult, loc); - TIntermNode* pcfResultAssign = handleAssign(loc, EOpAssign, pcfResultVar, pcfCall); - TIntermNode* pcfResultToOut = handleAssign(loc, EOpAssign, pcfOutputSym, - intermediate.addSymbol(*pcfCallResult, loc)); - - pcfCallSequence = intermediate.growAggregate(pcfCallSequence, pcfResultAssign); - pcfCallSequence = intermediate.growAggregate(pcfCallSequence, pcfResultToOut); - } else { - pcfCallSequence = intermediate.growAggregate(pcfCallSequence, pcfCall); - } - - // ================ Step 4: Barrier ================ - TIntermTyped* barrier = new TIntermAggregate(EOpBarrier); - barrier->setLoc(loc); - barrier->setType(TType(EbtVoid)); - epBodySeq.insert(epBodySeq.end(), barrier); - - // ================ Step 5: Test on invocation ID ================ - TIntermTyped* zero = intermediate.addConstantUnion(0, loc, true); - TIntermTyped* cmp = intermediate.addBinaryNode(EOpEqual, invocationIdSym, zero, loc, TType(EbtBool)); - - - // ================ Step 5B: Create if statement on Invocation ID == 0 ================ - intermediate.setAggregateOperator(pcfCallSequence, EOpSequence, TType(EbtVoid), loc); - TIntermTyped* invocationIdTest = new TIntermSelection(cmp, pcfCallSequence, nullptr); - invocationIdTest->setLoc(loc); - - // add our test sequence before the return. - epBodySeq.insert(epBodySeq.end(), invocationIdTest); -} - -// Finalization step: remove unused buffer blocks from linkage (we don't know until the -// shader is entirely compiled) -void HlslParseContext::removeUnusedStructBufferCounters() -{ - const auto endIt = std::remove_if(linkageSymbols.begin(), linkageSymbols.end(), - [this](const TSymbol* sym) { - const auto sbcIt = structBufferCounter.find(sym->getName()); - return sbcIt != structBufferCounter.end() && !sbcIt->second; - }); - - linkageSymbols.erase(endIt, linkageSymbols.end()); -} - -// post-processing -void HlslParseContext::finish() -{ - // Error check: There was a dangling .mips operator. These are not nested constructs in the grammar, so - // cannot be detected there. This is not strictly needed in a non-validating parser; it's just helpful. - if (! mipsOperatorMipArg.empty()) { - error(mipsOperatorMipArg.back().loc, "unterminated mips operator:", "", ""); - } - - removeUnusedStructBufferCounters(); - addPatchConstantInvocation(); - - TParseContextBase::finish(); -} - -} // end namespace glslang diff --git a/deps/glslang/hlsl/hlslParseHelper.h b/deps/glslang/hlsl/hlslParseHelper.h deleted file mode 100644 index e545109c..00000000 --- a/deps/glslang/hlsl/hlslParseHelper.h +++ /dev/null @@ -1,465 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// -#ifndef HLSL_PARSE_INCLUDED_ -#define HLSL_PARSE_INCLUDED_ - -#include "../glslang/MachineIndependent/parseVersions.h" -#include "../glslang/MachineIndependent/ParseHelper.h" - -#include - -namespace glslang { - -class TAttributeMap; // forward declare -class TFunctionDeclarator; - -class HlslParseContext : public TParseContextBase { -public: - HlslParseContext(TSymbolTable&, TIntermediate&, bool parsingBuiltins, - int version, EProfile, const SpvVersion& spvVersion, EShLanguage, TInfoSink&, - const TString sourceEntryPointName, - bool forwardCompatible = false, EShMessages messages = EShMsgDefault); - virtual ~HlslParseContext(); - void initializeExtensionBehavior() override; - - void setLimits(const TBuiltInResource&) override; - bool parseShaderStrings(TPpContext&, TInputScanner& input, bool versionWillBeError = false) override; - virtual const char* getGlobalUniformBlockName() const override { return "$Global"; } - virtual void setUniformBlockDefaults(TType& block) const override - { - block.getQualifier().layoutPacking = ElpStd140; - block.getQualifier().layoutMatrix = ElmRowMajor; - } - - void reservedPpErrorCheck(const TSourceLoc&, const char* /*name*/, const char* /*op*/) override { } - bool lineContinuationCheck(const TSourceLoc&, bool /*endOfComment*/) override { return true; } - bool lineDirectiveShouldSetNextLine() const override { return true; } - bool builtInName(const TString&); - - void handlePragma(const TSourceLoc&, const TVector&) override; - TIntermTyped* handleVariable(const TSourceLoc&, const TString* string); - TIntermTyped* handleBracketDereference(const TSourceLoc&, TIntermTyped* base, TIntermTyped* index); - TIntermTyped* handleBracketOperator(const TSourceLoc&, TIntermTyped* base, TIntermTyped* index); - void checkIndex(const TSourceLoc&, const TType&, int& index); - - TIntermTyped* handleBinaryMath(const TSourceLoc&, const char* str, TOperator op, TIntermTyped* left, TIntermTyped* right); - TIntermTyped* handleUnaryMath(const TSourceLoc&, const char* str, TOperator op, TIntermTyped* childNode); - TIntermTyped* handleDotDereference(const TSourceLoc&, TIntermTyped* base, const TString& field); - bool isBuiltInMethod(const TSourceLoc&, TIntermTyped* base, const TString& field); - void assignToInterface(TVariable& variable); - void handleFunctionDeclarator(const TSourceLoc&, TFunction& function, bool prototype); - TIntermAggregate* handleFunctionDefinition(const TSourceLoc&, TFunction&, const TAttributeMap&, TIntermNode*& entryPointTree); - TIntermNode* transformEntryPoint(const TSourceLoc&, TFunction&, const TAttributeMap&); - void handleEntryPointAttributes(const TSourceLoc&, const TAttributeMap&); - void handleFunctionBody(const TSourceLoc&, TFunction&, TIntermNode* functionBody, TIntermNode*& node); - void remapEntryPointIO(TFunction& function, TVariable*& returnValue, TVector& inputs, TVector& outputs); - void remapNonEntryPointIO(TFunction& function); - TIntermNode* handleReturnValue(const TSourceLoc&, TIntermTyped*); - void handleFunctionArgument(TFunction*, TIntermTyped*& arguments, TIntermTyped* newArg); - TIntermTyped* executeFlattenedInitializer(const TSourceLoc&, TIntermSymbol*, TIntermAggregate&); - TIntermTyped* handleAssign(const TSourceLoc&, TOperator, TIntermTyped* left, TIntermTyped* right); - TIntermTyped* handleAssignToMatrixSwizzle(const TSourceLoc&, TOperator, TIntermTyped* left, TIntermTyped* right); - TIntermTyped* handleFunctionCall(const TSourceLoc&, TFunction*, TIntermTyped*); - TIntermAggregate* assignClipCullDistance(const TSourceLoc&, TOperator, int semanticId, TIntermTyped* left, TIntermTyped* right); - void decomposeIntrinsic(const TSourceLoc&, TIntermTyped*& node, TIntermNode* arguments); - void decomposeSampleMethods(const TSourceLoc&, TIntermTyped*& node, TIntermNode* arguments); - void decomposeStructBufferMethods(const TSourceLoc&, TIntermTyped*& node, TIntermNode* arguments); - void decomposeGeometryMethods(const TSourceLoc&, TIntermTyped*& node, TIntermNode* arguments); - void pushFrontArguments(TIntermTyped* front, TIntermTyped*& arguments); - void addInputArgumentConversions(const TFunction&, TIntermTyped*&); - void expandArguments(const TSourceLoc&, const TFunction&, TIntermTyped*&); - TIntermTyped* addOutputArgumentConversions(const TFunction&, TIntermOperator&); - void builtInOpCheck(const TSourceLoc&, const TFunction&, TIntermOperator&); - TFunction* makeConstructorCall(const TSourceLoc&, const TType&); - void handleSemantic(TSourceLoc, TQualifier&, TBuiltInVariable, const TString& upperCase); - void handlePackOffset(const TSourceLoc&, TQualifier&, const glslang::TString& location, - const glslang::TString* component); - void handleRegister(const TSourceLoc&, TQualifier&, const glslang::TString* profile, const glslang::TString& desc, - int subComponent, const glslang::TString*); - TIntermTyped* convertConditionalExpression(const TSourceLoc&, TIntermTyped*, bool mustBeScalar = true); - TIntermAggregate* handleSamplerTextureCombine(const TSourceLoc& loc, TIntermTyped* argTex, TIntermTyped* argSampler); - - bool parseMatrixSwizzleSelector(const TSourceLoc&, const TString&, int cols, int rows, TSwizzleSelectors&); - int getMatrixComponentsColumn(int rows, const TSwizzleSelectors&); - void assignError(const TSourceLoc&, const char* op, TString left, TString right); - void unaryOpError(const TSourceLoc&, const char* op, TString operand); - void binaryOpError(const TSourceLoc&, const char* op, TString left, TString right); - void variableCheck(TIntermTyped*& nodePtr); - void constantValueCheck(TIntermTyped* node, const char* token); - void integerCheck(const TIntermTyped* node, const char* token); - void globalCheck(const TSourceLoc&, const char* token); - bool constructorError(const TSourceLoc&, TIntermNode*, TFunction&, TOperator, TType&); - bool constructorTextureSamplerError(const TSourceLoc&, const TFunction&); - void arraySizeCheck(const TSourceLoc&, TIntermTyped* expr, TArraySize&); - void arraySizeRequiredCheck(const TSourceLoc&, const TArraySizes&); - void structArrayCheck(const TSourceLoc&, const TType& structure); - void arrayDimMerge(TType& type, const TArraySizes* sizes); - bool voidErrorCheck(const TSourceLoc&, const TString&, TBasicType); - void globalQualifierFix(const TSourceLoc&, TQualifier&); - bool structQualifierErrorCheck(const TSourceLoc&, const TPublicType& pType); - void mergeQualifiers(TQualifier& dst, const TQualifier& src); - int computeSamplerTypeIndex(TSampler&); - TSymbol* redeclareBuiltinVariable(const TSourceLoc&, const TString&, const TQualifier&, const TShaderQualifiers&); - void paramFix(TType& type); - void specializationCheck(const TSourceLoc&, const TType&, const char* op); - - void setLayoutQualifier(const TSourceLoc&, TQualifier&, TString&); - void setLayoutQualifier(const TSourceLoc&, TQualifier&, TString&, const TIntermTyped*); - void mergeObjectLayoutQualifiers(TQualifier& dest, const TQualifier& src, bool inheritOnly); - void checkNoShaderLayouts(const TSourceLoc&, const TShaderQualifiers&); - - const TFunction* findFunction(const TSourceLoc& loc, TFunction& call, bool& builtIn, int& thisDepth, TIntermTyped*& args); - void declareTypedef(const TSourceLoc&, const TString& identifier, const TType&); - void declareStruct(const TSourceLoc&, TString& structName, TType&); - TSymbol* lookupUserType(const TString&, TType&); - TIntermNode* declareVariable(const TSourceLoc&, const TString& identifier, TType&, TIntermTyped* initializer = 0); - void lengthenList(const TSourceLoc&, TIntermSequence& list, int size, TIntermTyped* scalarInit); - TIntermTyped* handleConstructor(const TSourceLoc&, TIntermTyped*, const TType&); - TIntermTyped* addConstructor(const TSourceLoc&, TIntermTyped*, const TType&); - TIntermTyped* convertArray(TIntermTyped*, const TType&); - TIntermTyped* constructAggregate(TIntermNode*, const TType&, int, const TSourceLoc&); - TIntermTyped* constructBuiltIn(const TType&, TOperator, TIntermTyped*, const TSourceLoc&, bool subset); - void declareBlock(const TSourceLoc&, TType&, const TString* instanceName = 0, TArraySizes* arraySizes = 0); - void declareStructBufferCounter(const TSourceLoc& loc, const TType& bufferType, const TString& name); - void fixBlockLocations(const TSourceLoc&, TQualifier&, TTypeList&, bool memberWithLocation, bool memberWithoutLocation); - void fixBlockXfbOffsets(TQualifier&, TTypeList&); - void fixBlockUniformOffsets(const TQualifier&, TTypeList&); - void addQualifierToExisting(const TSourceLoc&, TQualifier, const TString& identifier); - void addQualifierToExisting(const TSourceLoc&, TQualifier, TIdentifierList&); - void updateStandaloneQualifierDefaults(const TSourceLoc&, const TPublicType&); - void wrapupSwitchSubsequence(TIntermAggregate* statements, TIntermNode* branchNode); - TIntermNode* addSwitch(const TSourceLoc&, TIntermTyped* expression, TIntermAggregate* body, TSelectionControl control); - - void updateImplicitArraySize(const TSourceLoc&, TIntermNode*, int index); - - void nestLooping() { ++loopNestingLevel; } - void unnestLooping() { --loopNestingLevel; } - void nestAnnotations() { ++annotationNestingLevel; } - void unnestAnnotations() { --annotationNestingLevel; } - int getAnnotationNestingLevel() { return annotationNestingLevel; } - void pushScope() { symbolTable.push(); } - void popScope() { symbolTable.pop(0); } - - void pushThisScope(const TType&, const TVector&); - void popThisScope() { symbolTable.pop(0); } - - void pushImplicitThis(TVariable* thisParameter) { implicitThisStack.push_back(thisParameter); } - void popImplicitThis() { implicitThisStack.pop_back(); } - TVariable* getImplicitThis(int thisDepth) const { return implicitThisStack[implicitThisStack.size() - thisDepth]; } - - void pushNamespace(const TString& name); - void popNamespace(); - void getFullNamespaceName(const TString*&) const; - void addScopeMangler(TString&); - - void pushSwitchSequence(TIntermSequence* sequence) { switchSequenceStack.push_back(sequence); } - void popSwitchSequence() { switchSequenceStack.pop_back(); } - - virtual void growGlobalUniformBlock(const TSourceLoc&, TType&, const TString& memberName, TTypeList* typeList = nullptr) override; - - // Apply L-value conversions. E.g, turning a write to a RWTexture into an ImageStore. - TIntermTyped* handleLvalue(const TSourceLoc&, const char* op, TIntermTyped*& node); - TIntermTyped* handleSamplerLvalue(const TSourceLoc&, const char* op, TIntermTyped*& node); - TIntermTyped* setOpaqueLvalue(TIntermTyped* left, TIntermTyped* right); - bool lValueErrorCheck(const TSourceLoc&, const char* op, TIntermTyped*) override; - - TLayoutFormat getLayoutFromTxType(const TSourceLoc&, const TType&); - - bool handleOutputGeometry(const TSourceLoc&, const TLayoutGeometry& geometry); - bool handleInputGeometry(const TSourceLoc&, const TLayoutGeometry& geometry); - - // Determine selection control from attributes - TSelectionControl handleSelectionControl(const TAttributeMap& attributes) const; - - // Determine loop control from attributes - TLoopControl handleLoopControl(const TAttributeMap& attributes) const; - - // Potentially rename shader entry point function - void renameShaderFunction(const TString*& name) const; - - // Share struct buffer deep types - void shareStructBufferType(TType&); - - // Set texture return type of the given sampler. Returns success (not all types are valid). - bool setTextureReturnType(TSampler& sampler, const TType& retType, const TSourceLoc& loc); - - // Obtain the sampler return type of the given sampler in retType. - void getTextureReturnType(const TSampler& sampler, TType& retType) const; - -protected: - struct TFlattenData { - TFlattenData() : nextBinding(TQualifier::layoutBindingEnd), - nextLocation(TQualifier::layoutLocationEnd) { } - TFlattenData(int nb, int nl) : nextBinding(nb), nextLocation(nl) { } - - TVector members; // individual flattened variables - TVector offsets; // offset to next tree level - unsigned int nextBinding; // next binding to use. - unsigned int nextLocation; // next location to use - }; - - void fixConstInit(const TSourceLoc&, const TString& identifier, TType& type, TIntermTyped*& initializer); - void inheritGlobalDefaults(TQualifier& dst) const; - TVariable* makeInternalVariable(const char* name, const TType&) const; - TVariable* makeInternalVariable(const TString& name, const TType& type) const { - return makeInternalVariable(name.c_str(), type); - } - TIntermSymbol* makeInternalVariableNode(const TSourceLoc&, const char* name, const TType&) const; - TVariable* declareNonArray(const TSourceLoc&, const TString& identifier, const TType&, bool track); - void declareArray(const TSourceLoc&, const TString& identifier, const TType&, TSymbol*&, bool track); - TIntermNode* executeInitializer(const TSourceLoc&, TIntermTyped* initializer, TVariable* variable, bool flattened); - TIntermTyped* convertInitializerList(const TSourceLoc&, const TType&, TIntermTyped* initializer, TIntermTyped* scalarInit); - bool isScalarConstructor(const TIntermNode*); - TOperator mapAtomicOp(const TSourceLoc& loc, TOperator op, bool isImage); - - // Return true if this node requires L-value conversion (e.g, to an imageStore). - bool shouldConvertLValue(const TIntermNode*) const; - - // Array and struct flattening - TIntermTyped* flattenAccess(TIntermTyped* base, int member); - TIntermTyped* flattenAccess(int uniqueId, int member, const TType&, int subset = -1); - bool shouldFlatten(const TType&) const; - bool wasFlattened(const TIntermTyped* node) const; - bool wasFlattened(int id) const { return flattenMap.find(id) != flattenMap.end(); } - int addFlattenedMember(const TVariable&, const TType&, TFlattenData&, const TString& name, bool linkage, - const TQualifier& outerQualifier, const TArraySizes* builtInArraySizes); - bool isFinalFlattening(const TType& type) const { return !(type.isStruct() || type.isArray()); } - - // Structure splitting (splits interstage built-in types into its own struct) - void split(const TVariable&); - void splitBuiltIn(const TString& baseName, const TType& memberType, const TArraySizes*, const TQualifier&); - const TType& split(const TType& type, const TString& name, const TQualifier&); - bool wasSplit(const TIntermTyped* node) const; - bool wasSplit(int id) const { return splitNonIoVars.find(id) != splitNonIoVars.end(); } - TVariable* getSplitNonIoVar(int id) const; - void addPatchConstantInvocation(); - TIntermTyped* makeIntegerIndex(TIntermTyped*); - - void fixBuiltInIoType(TType&); - - void flatten(const TVariable& variable, bool linkage); - int flatten(const TVariable& variable, const TType&, TFlattenData&, TString name, bool linkage, - const TQualifier& outerQualifier, const TArraySizes* builtInArraySizes); - int flattenStruct(const TVariable& variable, const TType&, TFlattenData&, TString name, bool linkage, - const TQualifier& outerQualifier, const TArraySizes* builtInArraySizes); - int flattenArray(const TVariable& variable, const TType&, TFlattenData&, TString name, bool linkage, - const TQualifier& outerQualifier); - - bool hasUniform(const TQualifier& qualifier) const; - void clearUniform(TQualifier& qualifier); - bool isInputBuiltIn(const TQualifier& qualifier) const; - bool hasInput(const TQualifier& qualifier) const; - void correctOutput(TQualifier& qualifier); - bool isOutputBuiltIn(const TQualifier& qualifier) const; - bool hasOutput(const TQualifier& qualifier) const; - void correctInput(TQualifier& qualifier); - void correctUniform(TQualifier& qualifier); - void clearUniformInputOutput(TQualifier& qualifier); - - // Test method names - bool isStructBufferMethod(const TString& name) const; - void counterBufferType(const TSourceLoc& loc, TType& type); - - // Return standard sample position array - TIntermConstantUnion* getSamplePosArray(int count); - - TType* getStructBufferContentType(const TType& type) const; - bool isStructBufferType(const TType& type) const { return getStructBufferContentType(type) != nullptr; } - TIntermTyped* indexStructBufferContent(const TSourceLoc& loc, TIntermTyped* buffer) const; - TIntermTyped* getStructBufferCounter(const TSourceLoc& loc, TIntermTyped* buffer); - TString getStructBuffCounterName(const TString&) const; - void addStructBuffArguments(const TSourceLoc& loc, TIntermAggregate*&); - void addStructBufferHiddenCounterParam(const TSourceLoc& loc, TParameter&, TIntermAggregate*&); - - // Return true if this type is a reference. This is not currently a type method in case that's - // a language specific answer. - bool isReference(const TType& type) const { return isStructBufferType(type); } - - // Return true if this a buffer type that has an associated counter buffer. - bool hasStructBuffCounter(const TType&) const; - - // Finalization step: remove unused buffer blocks from linkage (we don't know until the - // shader is entirely compiled) - void removeUnusedStructBufferCounters(); - - static bool isClipOrCullDistance(TBuiltInVariable); - static bool isClipOrCullDistance(const TQualifier& qual) { return isClipOrCullDistance(qual.builtIn); } - static bool isClipOrCullDistance(const TType& type) { return isClipOrCullDistance(type.getQualifier()); } - - // Pass through to base class after remembering built-in mappings. - using TParseContextBase::trackLinkage; - void trackLinkage(TSymbol& variable) override; - - void finish() override; // post-processing - - // Linkage symbol helpers - TIntermSymbol* findTessLinkageSymbol(TBuiltInVariable biType) const; - - // Current state of parsing - int annotationNestingLevel; // 0 if outside all annotations - - HlslParseContext(HlslParseContext&); - HlslParseContext& operator=(HlslParseContext&); - - static const int maxSamplerIndex = EsdNumDims * (EbtNumTypes * (2 * 2 * 2)); // see computeSamplerTypeIndex() - TQualifier globalBufferDefaults; - TQualifier globalUniformDefaults; - TQualifier globalInputDefaults; - TQualifier globalOutputDefaults; - TString currentCaller; // name of last function body entered (not valid when at global scope) - TIdSetType inductiveLoopIds; - TVector needsIndexLimitationChecking; - - // - // Geometry shader input arrays: - // - array sizing is based on input primitive and/or explicit size - // - // Tessellation control output arrays: - // - array sizing is based on output layout(vertices=...) and/or explicit size - // - // Both: - // - array sizing is retroactive - // - built-in block redeclarations interact with this - // - // Design: - // - use a per-context "resize-list", a list of symbols whose array sizes - // can be fixed - // - // - the resize-list starts empty at beginning of user-shader compilation, it does - // not have built-ins in it - // - // - on built-in array use: copyUp() symbol and add it to the resize-list - // - // - on user array declaration: add it to the resize-list - // - // - on block redeclaration: copyUp() symbol and add it to the resize-list - // * note, that appropriately gives an error if redeclaring a block that - // was already used and hence already copied-up - // - // - on seeing a layout declaration that sizes the array, fix everything in the - // resize-list, giving errors for mismatch - // - // - on seeing an array size declaration, give errors on mismatch between it and previous - // array-sizing declarations - // - TVector ioArraySymbolResizeList; - - TMap flattenMap; - - // IO-type map. Maps a pure symbol-table form of a structure-member list into - // each of the (up to) three kinds of IO, as each as different allowed decorations, - // but HLSL allows mixing all in the same structure. - struct tIoKinds { - TTypeList* input; - TTypeList* output; - TTypeList* uniform; - }; - TMap ioTypeMap; - - // Structure splitting data: - TMap splitNonIoVars; // variables with the built-in interstage IO removed, indexed by unique ID. - - // Structuredbuffer shared types. Typically there are only a few. - TVector structBufferTypes; - - // This tracks texture sample user structure return types. Only a limited number are supported, as - // may fit in TSampler::structReturnIndex. - TVector textureReturnStruct; - - TMap structBufferCounter; - - // The built-in interstage IO map considers e.g, EvqPosition on input and output separately, so that we - // can build the linkage correctly if position appears on both sides. Otherwise, multiple positions - // are considered identical. - struct tInterstageIoData { - tInterstageIoData(TBuiltInVariable bi, TStorageQualifier q) : - builtIn(bi), storage(q) { } - - TBuiltInVariable builtIn; - TStorageQualifier storage; - - // ordering for maps - bool operator<(const tInterstageIoData d) const { - return (builtIn != d.builtIn) ? (builtIn < d.builtIn) : (storage < d.storage); - } - }; - - TMap splitBuiltIns; // split built-ins, indexed by built-in type. - TVariable* inputPatch; - - unsigned int nextInLocation; - unsigned int nextOutLocation; - - TString sourceEntryPointName; - TFunction* entryPointFunction; - TIntermNode* entryPointFunctionBody; - - TString patchConstantFunctionName; // hull shader patch constant function name, from function level attribute. - TMap builtInTessLinkageSymbols; // used for tessellation, finding declared built-ins - - TVector currentTypePrefix; // current scoping prefix for nested structures - TVector implicitThisStack; // currently active 'this' variables for nested structures - - TVariable* gsStreamOutput; // geometry shader stream outputs, for emit (Append method) - - TVariable* clipDistanceVariable; // synthesized clip distance variable (shader might have >1) - TVariable* cullDistanceVariable; // synthesized cull distance variable (shader might have >1) - - static const int maxClipCullRegs = 2; - std::array clipSemanticNSize; // vector, indexed by clip semantic ID - std::array cullSemanticNSize; // vector, indexed by cull semantic ID - - // This tracks the first (mip level) argument to the .mips[][] operator. Since this can be nested as - // in tx.mips[tx.mips[0][1].x][2], we need a stack. We also track the TSourceLoc for error reporting - // purposes. - struct tMipsOperatorData { - tMipsOperatorData(TSourceLoc l, TIntermTyped* m) : loc(l), mipLevel(m) { } - TSourceLoc loc; - TIntermTyped* mipLevel; - }; - - TVector mipsOperatorMipArg; -}; - -// This is the prefix we use for built-in methods to avoid namespace collisions with -// global scope user functions. -// TODO: this would be better as a nonparseable character, but that would -// require changing the scanner. -#define BUILTIN_PREFIX "__BI_" - -} // end namespace glslang - -#endif // HLSL_PARSE_INCLUDED_ diff --git a/deps/glslang/hlsl/hlslParseables.cpp b/deps/glslang/hlsl/hlslParseables.cpp deleted file mode 100644 index db67c39d..00000000 --- a/deps/glslang/hlsl/hlslParseables.cpp +++ /dev/null @@ -1,1238 +0,0 @@ -// -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// Create strings that declare built-in definitions, add built-ins programmatically -// that cannot be expressed in the strings, and establish mappings between -// built-in functions and operators. -// -// Where to put a built-in: -// TBuiltInParseablesHlsl::initialize(version,profile) context-independent textual built-ins; add them to the right string -// TBuiltInParseablesHlsl::initialize(resources,...) context-dependent textual built-ins; add them to the right string -// TBuiltInParseablesHlsl::identifyBuiltIns(...,symbolTable) context-independent programmatic additions/mappings to the symbol table, -// including identifying what extensions are needed if a version does not allow a symbol -// TBuiltInParseablesHlsl::identifyBuiltIns(...,symbolTable, resources) context-dependent programmatic additions/mappings to the -// symbol table, including identifying what extensions are needed if a version does -// not allow a symbol -// - -#include "hlslParseables.h" -#include "hlslParseHelper.h" -#include -#include -#include - -namespace { // anonymous namespace functions - -const bool UseHlslTypes = true; - -const char* BaseTypeName(const char argOrder, const char* scalarName, const char* vecName, const char* matName) -{ - switch (argOrder) { - case 'S': return scalarName; - case 'V': return vecName; - case 'M': return matName; - default: return "UNKNOWN_TYPE"; - } -} - -bool IsSamplerType(const char argType) { return argType == 'S' || argType == 's'; } -bool IsArrayed(const char argOrder) { return argOrder == '@' || argOrder == '&' || argOrder == '#'; } -bool IsTextureMS(const char argOrder) { return argOrder == '$' || argOrder == '&'; } -bool IsBuffer(const char argOrder) { return argOrder == '*' || argOrder == '~'; } -bool IsImage(const char argOrder) { return argOrder == '!' || argOrder == '#' || argOrder == '~'; } -bool IsTextureType(const char argOrder) -{ - return argOrder == '%' || argOrder == '@' || IsTextureMS(argOrder) || IsBuffer(argOrder) | IsImage(argOrder); -} - -// Reject certain combinations that are illegal sample methods. For example, -// 3D arrays. -bool IsIllegalSample(const glslang::TString& name, const char* argOrder, int dim0) -{ - const bool isArrayed = IsArrayed(*argOrder); - const bool isMS = IsTextureMS(*argOrder); - const bool isBuffer = IsBuffer(*argOrder); - - // there are no 3D arrayed textures, or 3D SampleCmp(LevelZero) - if (dim0 == 3 && (isArrayed || name == "SampleCmp" || name == "SampleCmpLevelZero")) - return true; - - const int numArgs = int(std::count(argOrder, argOrder + strlen(argOrder), ',')) + 1; - - // Reject invalid offset forms with cubemaps - if (dim0 == 4) { - if ((name == "Sample" && numArgs >= 4) || - (name == "SampleBias" && numArgs >= 5) || - (name == "SampleCmp" && numArgs >= 5) || - (name == "SampleCmpLevelZero" && numArgs >= 5) || - (name == "SampleGrad" && numArgs >= 6) || - (name == "SampleLevel" && numArgs >= 5)) - return true; - } - - const bool isGather = - (name == "Gather" || - name == "GatherRed" || - name == "GatherGreen" || - name == "GatherBlue" || - name == "GatherAlpha"); - - const bool isGatherCmp = - (name == "GatherCmp" || - name == "GatherCmpRed" || - name == "GatherCmpGreen" || - name == "GatherCmpBlue" || - name == "GatherCmpAlpha"); - - // Reject invalid Gathers - if (isGather || isGatherCmp) { - if (dim0 == 1 || dim0 == 3) // there are no 1D or 3D gathers - return true; - - // no offset on cube or cube array gathers - if (dim0 == 4) { - if ((isGather && numArgs > 3) || (isGatherCmp && numArgs > 4)) - return true; - } - } - - // Reject invalid Loads - if (name == "Load" && dim0 == 4) - return true; // Load does not support any cubemaps, arrayed or not. - - // Multisample formats are only 2D and 2Darray - if (isMS && dim0 != 2) - return true; - - // Buffer are only 1D - if (isBuffer && dim0 != 1) - return true; - - return false; -} - -// Return the number of the coordinate arg, if any -int CoordinateArgPos(const glslang::TString& name, bool isTexture) -{ - if (!isTexture || (name == "GetDimensions")) - return -1; // has none - else if (name == "Load") - return 1; - else - return 2; // other texture methods are 2 -} - -// Some texture methods use an addition coordinate dimension for the mip -bool HasMipInCoord(const glslang::TString& name, bool isMS, bool isBuffer, bool isImage) -{ - return name == "Load" && !isMS && !isBuffer && !isImage; -} - -// LOD calculations don't pass the array level in the coordinate. -bool NoArrayCoord(const glslang::TString& name) -{ - return name == "CalculateLevelOfDetail" || name == "CalculateLevelOfDetailUnclamped"; -} - -// Handle IO params marked with > or < -const char* IoParam(glslang::TString& s, const char* nthArgOrder) -{ - if (*nthArgOrder == '>') { // output params - ++nthArgOrder; - s.append("out "); - } else if (*nthArgOrder == '<') { // input params - ++nthArgOrder; - s.append("in "); - } - - return nthArgOrder; -} - -// Handle repeated args -void HandleRepeatArg(const char*& arg, const char*& prev, const char* current) -{ - if (*arg == ',' || *arg == '\0') - arg = prev; - else - prev = current; -} - -// Return true for the end of a single argument key, which can be the end of the string, or -// the comma separator. -inline bool IsEndOfArg(const char* arg) -{ - return arg == nullptr || *arg == '\0' || *arg == ','; -} - -// If this is a fixed vector size, such as V3, return the size. Else return 0. -int FixedVecSize(const char* arg) -{ - while (!IsEndOfArg(arg)) { - if (isdigit(*arg)) - return *arg - '0'; - ++arg; - } - - return 0; // none found. -} - -// Create and return a type name. This is done in GLSL, not HLSL conventions, until such -// time as builtins are parsed using the HLSL parser. -// -// order: S = scalar, V = vector, M = matrix -// argType: F = float, D = double, I = int, U = uint, B = bool, S = sampler -// dim0 = vector dimension, or matrix 1st dimension -// dim1 = matrix 2nd dimension -glslang::TString& AppendTypeName(glslang::TString& s, const char* argOrder, const char* argType, int dim0, int dim1) -{ - const bool isTranspose = (argOrder[0] == '^'); - const bool isTexture = IsTextureType(argOrder[0]); - const bool isArrayed = IsArrayed(argOrder[0]); - const bool isSampler = IsSamplerType(argType[0]); - const bool isMS = IsTextureMS(argOrder[0]); - const bool isBuffer = IsBuffer(argOrder[0]); - const bool isImage = IsImage(argOrder[0]); - - char type = *argType; - - if (isTranspose) { // Take transpose of matrix dimensions - std::swap(dim0, dim1); - } else if (isTexture) { - if (type == 'F') // map base type to texture of that type. - type = 'T'; // e.g, int -> itexture, uint -> utexture, etc. - else if (type == 'I') - type = 'i'; - else if (type == 'U') - type = 'u'; - } - - if (isTranspose) - ++argOrder; - - char order = *argOrder; - - if (UseHlslTypes) { - switch (type) { - case '-': s += "void"; break; - case 'F': s += "float"; break; - case 'D': s += "double"; break; - case 'I': s += "int"; break; - case 'U': s += "uint"; break; - case 'B': s += "bool"; break; - case 'S': s += "sampler"; break; - case 's': s += "SamplerComparisonState"; break; - case 'T': s += ((isBuffer && isImage) ? "RWBuffer" : - isBuffer ? "Buffer" : - isImage ? "RWTexture" : "Texture"); break; - case 'i': s += ((isBuffer && isImage) ? "RWBuffer" : - isBuffer ? "Buffer" : - isImage ? "RWTexture" : "Texture"); break; - case 'u': s += ((isBuffer && isImage) ? "RWBuffer" : - isBuffer ? "Buffer" : - isImage ? "RWTexture" : "Texture"); break; - default: s += "UNKNOWN_TYPE"; break; - } - } else { - switch (type) { - case '-': s += "void"; break; - case 'F': s += BaseTypeName(order, "float", "vec", "mat"); break; - case 'D': s += BaseTypeName(order, "double", "dvec", "dmat"); break; - case 'I': s += BaseTypeName(order, "int", "ivec", "imat"); break; - case 'U': s += BaseTypeName(order, "uint", "uvec", "umat"); break; - case 'B': s += BaseTypeName(order, "bool", "bvec", "bmat"); break; - case 'S': s += "sampler"; break; - case 's': s += "samplerShadow"; break; - case 'T': // fall through - case 'i': // ... - case 'u': // ... - if (type != 'T') // create itexture, utexture, etc - s += type; - - s += ((isImage && isBuffer) ? "imageBuffer" : - isImage ? "image" : - isBuffer ? "samplerBuffer" : - "texture"); - break; - - default: s += "UNKNOWN_TYPE"; break; - } - } - - // handle fixed vector sizes, such as float3, and only ever 3. - const int fixedVecSize = FixedVecSize(argOrder); - if (fixedVecSize != 0) - dim0 = dim1 = fixedVecSize; - - // Add sampler dimensions - if (isSampler || isTexture) { - if ((order == 'V' || isTexture) && !isBuffer) { - switch (dim0) { - case 1: s += "1D"; break; - case 2: s += (isMS ? "2DMS" : "2D"); break; - case 3: s += "3D"; break; - case 4: s += "Cube"; break; - default: s += "UNKNOWN_SAMPLER"; break; - } - } - } else { - // Non-sampler type: - // verify dimensions - if (((order == 'V' || order == 'M') && (dim0 < 1 || dim0 > 4)) || - (order == 'M' && (dim1 < 1 || dim1 > 4))) { - s += "UNKNOWN_DIMENSION"; - return s; - } - - switch (order) { - case '-': break; // no dimensions for voids - case 'S': break; // no dimensions on scalars - case 'V': - s += ('0' + char(dim0)); - break; - case 'M': - s += ('0' + char(dim0)); - s += 'x'; - s += ('0' + char(dim1)); - break; - default: - break; - } - } - - // handle arrayed textures - if (isArrayed) - s += "Array"; - - // For HLSL, append return type for texture types - if (UseHlslTypes) { - switch (type) { - case 'i': s += ""; break; - case 'u': s += ""; break; - case 'T': s += ""; break; - default: break; - } - } - - return s; -} - -// The GLSL parser can be used to parse a subset of HLSL prototypes. However, many valid HLSL prototypes -// are not valid GLSL prototypes. This rejects the invalid ones. Thus, there is a single switch below -// to enable creation of the entire HLSL space. -inline bool IsValid(const char* cname, char retOrder, char retType, char argOrder, char argType, int dim0, int dim1) -{ - const bool isVec = (argOrder == 'V'); - const bool isMat = (argOrder == 'M'); - - const std::string name(cname); - - // these do not have vec1 versions - if (dim0 == 1 && (name == "normalize" || name == "reflect" || name == "refract")) - return false; - - if (!IsTextureType(argOrder) && (isVec && dim0 == 1)) // avoid vec1 - return false; - - if (UseHlslTypes) { - // NO further restrictions for HLSL - } else { - // GLSL parser restrictions - if ((isMat && (argType == 'I' || argType == 'U' || argType == 'B')) || - (retOrder == 'M' && (retType == 'I' || retType == 'U' || retType == 'B'))) - return false; - - if (isMat && dim0 == 1 && dim1 == 1) // avoid mat1x1 - return false; - - if (isMat && dim1 == 1) // TODO: avoid mat Nx1 until we find the right GLSL profile - return false; - - if (name == "GetRenderTargetSamplePosition" || - name == "tex1D" || - name == "tex1Dgrad") - return false; - } - - return true; -} - -// return position of end of argument specifier -inline const char* FindEndOfArg(const char* arg) -{ - while (!IsEndOfArg(arg)) - ++arg; - - return *arg == '\0' ? nullptr : arg; -} - -// Return pointer to beginning of Nth argument specifier in the string. -inline const char* NthArg(const char* arg, int n) -{ - for (int x=0; x 0) // handle fixed sized vectors - dim0Min = dim0Max = fixedVecSize; -} - -} // end anonymous namespace - -namespace glslang { - -TBuiltInParseablesHlsl::TBuiltInParseablesHlsl() -{ -} - -// -// Handle creation of mat*mat specially, since it doesn't fall conveniently out of -// the generic prototype creation code below. -// -void TBuiltInParseablesHlsl::createMatTimesMat() -{ - TString& s = commonBuiltins; - - const int first = (UseHlslTypes ? 1 : 2); - - for (int xRows = first; xRows <=4; xRows++) { - for (int xCols = first; xCols <=4; xCols++) { - const int yRows = xCols; - for (int yCols = first; yCols <=4; yCols++) { - const int retRows = xRows; - const int retCols = yCols; - - // Create a mat * mat of the appropriate dimensions - AppendTypeName(s, "M", "F", retRows, retCols); // add return type - s.append(" "); // space between type and name - s.append("mul"); // intrinsic name - s.append("("); // open paren - - AppendTypeName(s, "M", "F", xRows, xCols); // add X input - s.append(", "); - AppendTypeName(s, "M", "F", yRows, yCols); // add Y input - - s.append(");\n"); // close paren - } - - // Create M*V - AppendTypeName(s, "V", "F", xRows, 1); // add return type - s.append(" "); // space between type and name - s.append("mul"); // intrinsic name - s.append("("); // open paren - - AppendTypeName(s, "M", "F", xRows, xCols); // add X input - s.append(", "); - AppendTypeName(s, "V", "F", xCols, 1); // add Y input - - s.append(");\n"); // close paren - - // Create V*M - AppendTypeName(s, "V", "F", xCols, 1); // add return type - s.append(" "); // space between type and name - s.append("mul"); // intrinsic name - s.append("("); // open paren - - AppendTypeName(s, "V", "F", xRows, 1); // add Y input - s.append(", "); - AppendTypeName(s, "M", "F", xRows, xCols); // add X input - - s.append(");\n"); // close paren - } - } -} - -// -// Add all context-independent built-in functions and variables that are present -// for the given version and profile. Share common ones across stages, otherwise -// make stage-specific entries. -// -// Most built-ins variables can be added as simple text strings. Some need to -// be added programmatically, which is done later in IdentifyBuiltIns() below. -// -void TBuiltInParseablesHlsl::initialize(int /*version*/, EProfile /*profile*/, const SpvVersion& /*spvVersion*/) -{ - static const EShLanguageMask EShLangAll = EShLanguageMask(EShLangCount - 1); - - // These are the actual stage masks defined in the documentation, in case they are - // needed for future validation. For now, they are commented out, and set below - // to EShLangAll, to allow any intrinsic to be used in any shader, which is legal - // if it is not called. - // - // static const EShLanguageMask EShLangPSCS = EShLanguageMask(EShLangFragmentMask | EShLangComputeMask); - // static const EShLanguageMask EShLangVSPSGS = EShLanguageMask(EShLangVertexMask | EShLangFragmentMask | EShLangGeometryMask); - // static const EShLanguageMask EShLangCS = EShLangComputeMask; - // static const EShLanguageMask EShLangPS = EShLangFragmentMask; - // static const EShLanguageMask EShLangHS = EShLangTessControlMask; - - // This set uses EShLangAll for everything. - static const EShLanguageMask EShLangPSCS = EShLangAll; - static const EShLanguageMask EShLangVSPSGS = EShLangAll; - static const EShLanguageMask EShLangCS = EShLangAll; - static const EShLanguageMask EShLangPS = EShLangAll; - static const EShLanguageMask EShLangHS = EShLangAll; - static const EShLanguageMask EShLangGS = EShLangAll; - - // This structure encodes the prototype information for each HLSL intrinsic. - // Because explicit enumeration would be cumbersome, it's procedurally generated. - // orderKey can be: - // S = scalar, V = vector, M = matrix, - = void - // typekey can be: - // D = double, F = float, U = uint, I = int, B = bool, S = sampler, s = shadowSampler - // An empty order or type key repeats the first one. E.g: SVM,, means 3 args each of SVM. - // '>' as first letter of order creates an output parameter - // '<' as first letter of order creates an input parameter - // '^' as first letter of order takes transpose dimensions - // '%' as first letter of order creates texture of given F/I/U type (texture, itexture, etc) - // '@' as first letter of order creates arrayed texture of given type - // '$' / '&' as first letter of order creates 2DMS / 2DMSArray textures - // '*' as first letter of order creates buffer object - // '!' as first letter of order creates image object - // '#' as first letter of order creates arrayed image object - // '~' as first letter of order creates an image buffer object - - static const struct { - const char* name; // intrinsic name - const char* retOrder; // return type key: empty matches order of 1st argument - const char* retType; // return type key: empty matches type of 1st argument - const char* argOrder; // argument order key - const char* argType; // argument type key - unsigned int stage; // stage mask - bool method; // true if it's a method. - } hlslIntrinsics[] = { - // name retOrd retType argOrder argType stage mask - // ----------------------------------------------------------------------------------------------- - { "abort", nullptr, nullptr, "-", "-", EShLangAll, false }, - { "abs", nullptr, nullptr, "SVM", "DFUI", EShLangAll, false }, - { "acos", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "all", "S", "B", "SVM", "BFIU", EShLangAll, false }, - { "AllMemoryBarrier", nullptr, nullptr, "-", "-", EShLangCS, false }, - { "AllMemoryBarrierWithGroupSync", nullptr, nullptr, "-", "-", EShLangCS, false }, - { "any", "S", "B", "SVM", "BFIU", EShLangAll, false }, - { "asdouble", "S", "D", "S,", "UI,", EShLangAll, false }, - { "asdouble", "V2", "D", "V2,", "UI,", EShLangAll, false }, - { "asfloat", nullptr, "F", "SVM", "BFIU", EShLangAll, false }, - { "asin", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "asint", nullptr, "I", "SVM", "FU", EShLangAll, false }, - { "asuint", nullptr, "U", "SVM", "FU", EShLangAll, false }, - { "atan", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "atan2", nullptr, nullptr, "SVM,", "F,", EShLangAll, false }, - { "ceil", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "CheckAccessFullyMapped", "S", "B" , "S", "U", EShLangPSCS, false }, - { "clamp", nullptr, nullptr, "SVM,,", "FUI,,", EShLangAll, false }, - { "clip", "-", "-", "SVM", "F", EShLangPS, false }, - { "cos", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "cosh", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "countbits", nullptr, nullptr, "SV", "UI", EShLangAll, false }, - { "cross", nullptr, nullptr, "V3,", "F,", EShLangAll, false }, - { "D3DCOLORtoUBYTE4", "V4", "I", "V4", "F", EShLangAll, false }, - { "ddx", nullptr, nullptr, "SVM", "F", EShLangPS, false }, - { "ddx_coarse", nullptr, nullptr, "SVM", "F", EShLangPS, false }, - { "ddx_fine", nullptr, nullptr, "SVM", "F", EShLangPS, false }, - { "ddy", nullptr, nullptr, "SVM", "F", EShLangPS, false }, - { "ddy_coarse", nullptr, nullptr, "SVM", "F", EShLangPS, false }, - { "ddy_fine", nullptr, nullptr, "SVM", "F", EShLangPS, false }, - { "degrees", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "determinant", "S", "F", "M", "F", EShLangAll, false }, - { "DeviceMemoryBarrier", nullptr, nullptr, "-", "-", EShLangPSCS, false }, - { "DeviceMemoryBarrierWithGroupSync", nullptr, nullptr, "-", "-", EShLangCS, false }, - { "distance", "S", "F", "V,", "F,", EShLangAll, false }, - { "dot", "S", nullptr, "SV,", "FI,", EShLangAll, false }, - { "dst", nullptr, nullptr, "V4,", "F,", EShLangAll, false }, - // { "errorf", "-", "-", "", "", EShLangAll, false }, TODO: varargs - { "EvaluateAttributeAtCentroid", nullptr, nullptr, "SVM", "F", EShLangPS, false }, - { "EvaluateAttributeAtSample", nullptr, nullptr, "SVM,S", "F,U", EShLangPS, false }, - { "EvaluateAttributeSnapped", nullptr, nullptr, "SVM,V2", "F,I", EShLangPS, false }, - { "exp", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "exp2", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "f16tof32", nullptr, "F", "SV", "U", EShLangAll, false }, - { "f32tof16", nullptr, "U", "SV", "F", EShLangAll, false }, - { "faceforward", nullptr, nullptr, "V,,", "F,,", EShLangAll, false }, - { "firstbithigh", nullptr, nullptr, "SV", "UI", EShLangAll, false }, - { "firstbitlow", nullptr, nullptr, "SV", "UI", EShLangAll, false }, - { "floor", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "fma", nullptr, nullptr, "SVM,,", "D,,", EShLangAll, false }, - { "fmod", nullptr, nullptr, "SVM,", "F,", EShLangAll, false }, - { "frac", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "frexp", nullptr, nullptr, "SVM,", "F,", EShLangAll, false }, - { "fwidth", nullptr, nullptr, "SVM", "F", EShLangPS, false }, - { "GetRenderTargetSampleCount", "S", "U", "-", "-", EShLangAll, false }, - { "GetRenderTargetSamplePosition", "V2", "F", "V1", "I", EShLangAll, false }, - { "GroupMemoryBarrier", nullptr, nullptr, "-", "-", EShLangCS, false }, - { "GroupMemoryBarrierWithGroupSync", nullptr, nullptr, "-", "-", EShLangCS, false }, - { "InterlockedAdd", "-", "-", "SVM,,>", "UI,,", EShLangPSCS, false }, - { "InterlockedAdd", "-", "-", "SVM,", "UI,", EShLangPSCS, false }, - { "InterlockedAnd", "-", "-", "SVM,,>", "UI,,", EShLangPSCS, false }, - { "InterlockedAnd", "-", "-", "SVM,", "UI,", EShLangPSCS, false }, - { "InterlockedCompareExchange", "-", "-", "SVM,,,>", "UI,,,", EShLangPSCS, false }, - { "InterlockedCompareStore", "-", "-", "SVM,,", "UI,,", EShLangPSCS, false }, - { "InterlockedExchange", "-", "-", "SVM,,>", "UI,,", EShLangPSCS, false }, - { "InterlockedMax", "-", "-", "SVM,,>", "UI,,", EShLangPSCS, false }, - { "InterlockedMax", "-", "-", "SVM,", "UI,", EShLangPSCS, false }, - { "InterlockedMin", "-", "-", "SVM,,>", "UI,,", EShLangPSCS, false }, - { "InterlockedMin", "-", "-", "SVM,", "UI,", EShLangPSCS, false }, - { "InterlockedOr", "-", "-", "SVM,,>", "UI,,", EShLangPSCS, false }, - { "InterlockedOr", "-", "-", "SVM,", "UI,", EShLangPSCS, false }, - { "InterlockedXor", "-", "-", "SVM,,>", "UI,,", EShLangPSCS, false }, - { "InterlockedXor", "-", "-", "SVM,", "UI,", EShLangPSCS, false }, - { "isfinite", nullptr, "B" , "SVM", "F", EShLangAll, false }, - { "isinf", nullptr, "B" , "SVM", "F", EShLangAll, false }, - { "isnan", nullptr, "B" , "SVM", "F", EShLangAll, false }, - { "ldexp", nullptr, nullptr, "SVM,", "F,", EShLangAll, false }, - { "length", "S", "F", "SV", "F", EShLangAll, false }, - { "lerp", nullptr, nullptr, "VM,,", "F,,", EShLangAll, false }, - { "lerp", nullptr, nullptr, "SVM,,S", "F,,", EShLangAll, false }, - { "lit", "V4", "F", "S,,", "F,,", EShLangAll, false }, - { "log", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "log10", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "log2", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "mad", nullptr, nullptr, "SVM,,", "DFUI,,", EShLangAll, false }, - { "max", nullptr, nullptr, "SVM,", "FIU,", EShLangAll, false }, - { "min", nullptr, nullptr, "SVM,", "FIU,", EShLangAll, false }, - { "modf", nullptr, nullptr, "SVM,>", "FIU,", EShLangAll, false }, - { "msad4", "V4", "U", "S,V2,V4", "U,,", EShLangAll, false }, - { "mul", "S", nullptr, "S,S", "FI,", EShLangAll, false }, - { "mul", "V", nullptr, "S,V", "FI,", EShLangAll, false }, - { "mul", "M", nullptr, "S,M", "FI,", EShLangAll, false }, - { "mul", "V", nullptr, "V,S", "FI,", EShLangAll, false }, - { "mul", "S", nullptr, "V,V", "FI,", EShLangAll, false }, - { "mul", "M", nullptr, "M,S", "FI,", EShLangAll, false }, - // mat*mat form of mul is handled in createMatTimesMat() - { "noise", "S", "F", "V", "F", EShLangPS, false }, - { "normalize", nullptr, nullptr, "V", "F", EShLangAll, false }, - { "pow", nullptr, nullptr, "SVM,", "F,", EShLangAll, false }, - // { "printf", "-", "-", "", "", EShLangAll, false }, TODO: varargs - { "Process2DQuadTessFactorsAvg", "-", "-", "V4,V2,>V4,>V2,", "F,,,,", EShLangHS, false }, - { "Process2DQuadTessFactorsMax", "-", "-", "V4,V2,>V4,>V2,", "F,,,,", EShLangHS, false }, - { "Process2DQuadTessFactorsMin", "-", "-", "V4,V2,>V4,>V2,", "F,,,,", EShLangHS, false }, - { "ProcessIsolineTessFactors", "-", "-", "S,,>,>", "F,,,", EShLangHS, false }, - { "ProcessQuadTessFactorsAvg", "-", "-", "V4,S,>V4,>V2,", "F,,,,", EShLangHS, false }, - { "ProcessQuadTessFactorsMax", "-", "-", "V4,S,>V4,>V2,", "F,,,,", EShLangHS, false }, - { "ProcessQuadTessFactorsMin", "-", "-", "V4,S,>V4,>V2,", "F,,,,", EShLangHS, false }, - { "ProcessTriTessFactorsAvg", "-", "-", "V3,S,>V3,>S,", "F,,,,", EShLangHS, false }, - { "ProcessTriTessFactorsMax", "-", "-", "V3,S,>V3,>S,", "F,,,,", EShLangHS, false }, - { "ProcessTriTessFactorsMin", "-", "-", "V3,S,>V3,>S,", "F,,,,", EShLangHS, false }, - { "radians", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "rcp", nullptr, nullptr, "SVM", "FD", EShLangAll, false }, - { "reflect", nullptr, nullptr, "V,", "F,", EShLangAll, false }, - { "refract", nullptr, nullptr, "V,V,S", "F,,", EShLangAll, false }, - { "reversebits", nullptr, nullptr, "SV", "UI", EShLangAll, false }, - { "round", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "rsqrt", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "saturate", nullptr, nullptr , "SVM", "F", EShLangAll, false }, - { "sign", nullptr, nullptr, "SVM", "FI", EShLangAll, false }, - { "sin", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "sincos", "-", "-", "SVM,>,>", "F,,", EShLangAll, false }, - { "sinh", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "smoothstep", nullptr, nullptr, "SVM,,", "F,,", EShLangAll, false }, - { "sqrt", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "step", nullptr, nullptr, "SVM,", "F,", EShLangAll, false }, - { "tan", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "tanh", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - { "tex1D", "V4", "F", "V1,S", "S,F", EShLangPS, false }, - { "tex1D", "V4", "F", "V1,S,V1,", "S,F,,", EShLangPS, false }, - { "tex1Dbias", "V4", "F", "V1,V4", "S,F", EShLangPS, false }, - { "tex1Dgrad", "V4", "F", "V1,,,", "S,F,,", EShLangPS, false }, - { "tex1Dlod", "V4", "F", "V1,V4", "S,F", EShLangPS, false }, - { "tex1Dproj", "V4", "F", "V1,V4", "S,F", EShLangPS, false }, - { "tex2D", "V4", "F", "V2,", "S,F", EShLangPS, false }, - { "tex2D", "V4", "F", "V2,,,", "S,F,,", EShLangPS, false }, - { "tex2Dbias", "V4", "F", "V2,V4", "S,F", EShLangPS, false }, - { "tex2Dgrad", "V4", "F", "V2,,,", "S,F,,", EShLangPS, false }, - { "tex2Dlod", "V4", "F", "V2,V4", "S,F", EShLangPS, false }, - { "tex2Dproj", "V4", "F", "V2,V4", "S,F", EShLangPS, false }, - { "tex3D", "V4", "F", "V3,", "S,F", EShLangPS, false }, - { "tex3D", "V4", "F", "V3,,,", "S,F,,", EShLangPS, false }, - { "tex3Dbias", "V4", "F", "V3,V4", "S,F", EShLangPS, false }, - { "tex3Dgrad", "V4", "F", "V3,,,", "S,F,,", EShLangPS, false }, - { "tex3Dlod", "V4", "F", "V3,V4", "S,F", EShLangPS, false }, - { "tex3Dproj", "V4", "F", "V3,V4", "S,F", EShLangPS, false }, - { "texCUBE", "V4", "F", "V4,V3", "S,F", EShLangPS, false }, - { "texCUBE", "V4", "F", "V4,V3,,", "S,F,,", EShLangPS, false }, - { "texCUBEbias", "V4", "F", "V4,", "S,F", EShLangPS, false }, - { "texCUBEgrad", "V4", "F", "V4,V3,,", "S,F,,", EShLangPS, false }, - { "texCUBElod", "V4", "F", "V4,", "S,F", EShLangPS, false }, - { "texCUBEproj", "V4", "F", "V4,", "S,F", EShLangPS, false }, - { "transpose", "^M", nullptr, "M", "FUIB", EShLangAll, false }, - { "trunc", nullptr, nullptr, "SVM", "F", EShLangAll, false }, - - // Texture object methods. Return type can be overridden by shader declaration. - // !O = no offset, O = offset - { "Sample", /*!O*/ "V4", nullptr, "%@,S,V", "FIU,S,F", EShLangPS, true }, - { "Sample", /* O*/ "V4", nullptr, "%@,S,V,", "FIU,S,F,I", EShLangPS, true }, - - { "SampleBias", /*!O*/ "V4", nullptr, "%@,S,V,S", "FIU,S,F,", EShLangPS, true }, - { "SampleBias", /* O*/ "V4", nullptr, "%@,S,V,S,V", "FIU,S,F,,I", EShLangPS, true }, - - // TODO: FXC accepts int/uint samplers here. unclear what that means. - { "SampleCmp", /*!O*/ "S", "F", "%@,S,V,S", "FIU,s,F,", EShLangPS, true }, - { "SampleCmp", /* O*/ "S", "F", "%@,S,V,S,V", "FIU,s,F,,I", EShLangPS, true }, - - // TODO: FXC accepts int/uint samplers here. unclear what that means. - { "SampleCmpLevelZero", /*!O*/ "S", "F", "%@,S,V,S", "FIU,s,F,F", EShLangPS, true }, - { "SampleCmpLevelZero", /* O*/ "S", "F", "%@,S,V,S,V", "FIU,s,F,F,I", EShLangPS, true }, - - { "SampleGrad", /*!O*/ "V4", nullptr, "%@,S,V,,", "FIU,S,F,,", EShLangAll, true }, - { "SampleGrad", /* O*/ "V4", nullptr, "%@,S,V,,,", "FIU,S,F,,,I", EShLangAll, true }, - - { "SampleLevel", /*!O*/ "V4", nullptr, "%@,S,V,S", "FIU,S,F,", EShLangAll, true }, - { "SampleLevel", /* O*/ "V4", nullptr, "%@,S,V,S,V", "FIU,S,F,,I", EShLangAll, true }, - - { "Load", /*!O*/ "V4", nullptr, "%@,V", "FIU,I", EShLangAll, true }, - { "Load", /* O*/ "V4", nullptr, "%@,V,V", "FIU,I,I", EShLangAll, true }, - { "Load", /* +sampleidex*/ "V4", nullptr, "$&,V,S", "FIU,I,I", EShLangAll, true }, - { "Load", /* +samplindex, offset*/ "V4", nullptr, "$&,V,S,V", "FIU,I,I,I", EShLangAll, true }, - - // RWTexture loads - { "Load", "V4", nullptr, "!#,V", "FIU,I", EShLangAll, true }, - // (RW)Buffer loads - { "Load", "V4", nullptr, "~*1,V", "FIU,I", EShLangAll, true }, - - { "Gather", /*!O*/ "V4", nullptr, "%@,S,V", "FIU,S,F", EShLangAll, true }, - { "Gather", /* O*/ "V4", nullptr, "%@,S,V,V", "FIU,S,F,I", EShLangAll, true }, - - { "CalculateLevelOfDetail", "S", "F", "%@,S,V", "FUI,S,F", EShLangPS, true }, - { "CalculateLevelOfDetailUnclamped", "S", "F", "%@,S,V", "FUI,S,F", EShLangPS, true }, - - { "GetSamplePosition", "V2", "F", "$&2,S", "FUI,I", EShLangVSPSGS,true }, - - // - // UINT Width - // UINT MipLevel, UINT Width, UINT NumberOfLevels - { "GetDimensions", /* 1D */ "-", "-", "%!~1,>S", "FUI,U", EShLangAll, true }, - { "GetDimensions", /* 1D */ "-", "-", "%!~1,>S", "FUI,F", EShLangAll, true }, - { "GetDimensions", /* 1D */ "-", "-", "%1,S,>S,", "FUI,U,,", EShLangAll, true }, - { "GetDimensions", /* 1D */ "-", "-", "%1,S,>S,", "FUI,U,F,", EShLangAll, true }, - - // UINT Width, UINT Elements - // UINT MipLevel, UINT Width, UINT Elements, UINT NumberOfLevels - { "GetDimensions", /* 1DArray */ "-", "-", "@#1,>S,", "FUI,U,", EShLangAll, true }, - { "GetDimensions", /* 1DArray */ "-", "-", "@#1,>S,", "FUI,F,", EShLangAll, true }, - { "GetDimensions", /* 1DArray */ "-", "-", "@1,S,>S,,", "FUI,U,,,", EShLangAll, true }, - { "GetDimensions", /* 1DArray */ "-", "-", "@1,S,>S,,", "FUI,U,F,,", EShLangAll, true }, - - // UINT Width, UINT Height - // UINT MipLevel, UINT Width, UINT Height, UINT NumberOfLevels - { "GetDimensions", /* 2D */ "-", "-", "%!2,>S,", "FUI,U,", EShLangAll, true }, - { "GetDimensions", /* 2D */ "-", "-", "%!2,>S,", "FUI,F,", EShLangAll, true }, - { "GetDimensions", /* 2D */ "-", "-", "%2,S,>S,,", "FUI,U,,,", EShLangAll, true }, - { "GetDimensions", /* 2D */ "-", "-", "%2,S,>S,,", "FUI,U,F,,", EShLangAll, true }, - - // UINT Width, UINT Height, UINT Elements - // UINT MipLevel, UINT Width, UINT Height, UINT Elements, UINT NumberOfLevels - { "GetDimensions", /* 2DArray */ "-", "-", "@#2,>S,,", "FUI,U,,", EShLangAll, true }, - { "GetDimensions", /* 2DArray */ "-", "-", "@#2,>S,,", "FUI,F,F,F", EShLangAll, true }, - { "GetDimensions", /* 2DArray */ "-", "-", "@2,S,>S,,,", "FUI,U,,,,", EShLangAll, true }, - { "GetDimensions", /* 2DArray */ "-", "-", "@2,S,>S,,,", "FUI,U,F,,,", EShLangAll, true }, - - // UINT Width, UINT Height, UINT Depth - // UINT MipLevel, UINT Width, UINT Height, UINT Depth, UINT NumberOfLevels - { "GetDimensions", /* 3D */ "-", "-", "%!3,>S,,", "FUI,U,,", EShLangAll, true }, - { "GetDimensions", /* 3D */ "-", "-", "%!3,>S,,", "FUI,F,,", EShLangAll, true }, - { "GetDimensions", /* 3D */ "-", "-", "%3,S,>S,,,", "FUI,U,,,,", EShLangAll, true }, - { "GetDimensions", /* 3D */ "-", "-", "%3,S,>S,,,", "FUI,U,F,,,", EShLangAll, true }, - - // UINT Width, UINT Height - // UINT MipLevel, UINT Width, UINT Height, UINT NumberOfLevels - { "GetDimensions", /* Cube */ "-", "-", "%4,>S,", "FUI,U,", EShLangAll, true }, - { "GetDimensions", /* Cube */ "-", "-", "%4,>S,", "FUI,F,", EShLangAll, true }, - { "GetDimensions", /* Cube */ "-", "-", "%4,S,>S,,", "FUI,U,,,", EShLangAll, true }, - { "GetDimensions", /* Cube */ "-", "-", "%4,S,>S,,", "FUI,U,F,,", EShLangAll, true }, - - // UINT Width, UINT Height, UINT Elements - // UINT MipLevel, UINT Width, UINT Height, UINT Elements, UINT NumberOfLevels - { "GetDimensions", /* CubeArray */ "-", "-", "@4,>S,,", "FUI,U,,", EShLangAll, true }, - { "GetDimensions", /* CubeArray */ "-", "-", "@4,>S,,", "FUI,F,,", EShLangAll, true }, - { "GetDimensions", /* CubeArray */ "-", "-", "@4,S,>S,,,", "FUI,U,,,,", EShLangAll, true }, - { "GetDimensions", /* CubeArray */ "-", "-", "@4,S,>S,,,", "FUI,U,F,,,", EShLangAll, true }, - - // UINT Width, UINT Height, UINT Samples - // UINT Width, UINT Height, UINT Elements, UINT Samples - { "GetDimensions", /* 2DMS */ "-", "-", "$2,>S,,", "FUI,U,,", EShLangAll, true }, - { "GetDimensions", /* 2DMS */ "-", "-", "$2,>S,,", "FUI,U,,", EShLangAll, true }, - { "GetDimensions", /* 2DMSArray */ "-", "-", "&2,>S,,,", "FUI,U,,,", EShLangAll, true }, - { "GetDimensions", /* 2DMSArray */ "-", "-", "&2,>S,,,", "FUI,U,,,", EShLangAll, true }, - - // SM5 texture methods - { "GatherRed", /*!O*/ "V4", nullptr, "%@,S,V", "FIU,S,F", EShLangAll, true }, - { "GatherRed", /* O*/ "V4", nullptr, "%@,S,V,", "FIU,S,F,I", EShLangAll, true }, - { "GatherRed", /* O, status*/ "V4", nullptr, "%@,S,V,,>S", "FIU,S,F,I,U", EShLangAll, true }, - { "GatherRed", /* O-4 */ "V4", nullptr, "%@,S,V,,,,", "FIU,S,F,I,,,", EShLangAll, true }, - { "GatherRed", /* O-4, status */"V4", nullptr, "%@,S,V,,,,,S", "FIU,S,F,I,,,,U", EShLangAll, true }, - - { "GatherGreen", /*!O*/ "V4", nullptr, "%@,S,V", "FIU,S,F", EShLangAll, true }, - { "GatherGreen", /* O*/ "V4", nullptr, "%@,S,V,", "FIU,S,F,I", EShLangAll, true }, - { "GatherGreen", /* O, status*/ "V4", nullptr, "%@,S,V,,>S", "FIU,S,F,I,U", EShLangAll, true }, - { "GatherGreen", /* O-4 */ "V4", nullptr, "%@,S,V,,,,", "FIU,S,F,I,,,", EShLangAll, true }, - { "GatherGreen", /* O-4, status */"V4", nullptr, "%@,S,V,,,,,S", "FIU,S,F,I,,,,U", EShLangAll, true }, - - { "GatherBlue", /*!O*/ "V4", nullptr, "%@,S,V", "FIU,S,F", EShLangAll, true }, - { "GatherBlue", /* O*/ "V4", nullptr, "%@,S,V,", "FIU,S,F,I", EShLangAll, true }, - { "GatherBlue", /* O, status*/ "V4", nullptr, "%@,S,V,,>S", "FIU,S,F,I,U", EShLangAll, true }, - { "GatherBlue", /* O-4 */ "V4", nullptr, "%@,S,V,,,,", "FIU,S,F,I,,,", EShLangAll, true }, - { "GatherBlue", /* O-4, status */"V4", nullptr, "%@,S,V,,,,,S", "FIU,S,F,I,,,,U", EShLangAll, true }, - - { "GatherAlpha", /*!O*/ "V4", nullptr, "%@,S,V", "FIU,S,F", EShLangAll, true }, - { "GatherAlpha", /* O*/ "V4", nullptr, "%@,S,V,", "FIU,S,F,I", EShLangAll, true }, - { "GatherAlpha", /* O, status*/ "V4", nullptr, "%@,S,V,,>S", "FIU,S,F,I,U", EShLangAll, true }, - { "GatherAlpha", /* O-4 */ "V4", nullptr, "%@,S,V,,,,", "FIU,S,F,I,,,", EShLangAll, true }, - { "GatherAlpha", /* O-4, status */"V4", nullptr, "%@,S,V,,,,,S", "FIU,S,F,I,,,,U", EShLangAll, true }, - - { "GatherCmp", /*!O*/ "V4", nullptr, "%@,S,V,S", "FIU,s,F,", EShLangAll, true }, - { "GatherCmp", /* O*/ "V4", nullptr, "%@,S,V,S,V", "FIU,s,F,,I", EShLangAll, true }, - { "GatherCmp", /* O, status*/ "V4", nullptr, "%@,S,V,S,V,>S", "FIU,s,F,,I,U", EShLangAll, true }, - { "GatherCmp", /* O-4 */ "V4", nullptr, "%@,S,V,S,V,,,", "FIU,s,F,,I,,,", EShLangAll, true }, - { "GatherCmp", /* O-4, status */"V4", nullptr, "%@,S,V,S,V,,V,S","FIU,s,F,,I,,,,U",EShLangAll, true }, - - { "GatherCmpRed", /*!O*/ "V4", nullptr, "%@,S,V,S", "FIU,s,F,", EShLangAll, true }, - { "GatherCmpRed", /* O*/ "V4", nullptr, "%@,S,V,S,V", "FIU,s,F,,I", EShLangAll, true }, - { "GatherCmpRed", /* O, status*/ "V4", nullptr, "%@,S,V,S,V,>S", "FIU,s,F,,I,U", EShLangAll, true }, - { "GatherCmpRed", /* O-4 */ "V4", nullptr, "%@,S,V,S,V,,,", "FIU,s,F,,I,,,", EShLangAll, true }, - { "GatherCmpRed", /* O-4, status */"V4", nullptr, "%@,S,V,S,V,,V,S","FIU,s,F,,I,,,,U",EShLangAll, true }, - - { "GatherCmpGreen", /*!O*/ "V4", nullptr, "%@,S,V,S", "FIU,s,F,", EShLangAll, true }, - { "GatherCmpGreen", /* O*/ "V4", nullptr, "%@,S,V,S,V", "FIU,s,F,,I", EShLangAll, true }, - { "GatherCmpGreen", /* O, status*/ "V4", nullptr, "%@,S,V,S,V,>S", "FIU,s,F,,I,U", EShLangAll, true }, - { "GatherCmpGreen", /* O-4 */ "V4", nullptr, "%@,S,V,S,V,,,", "FIU,s,F,,I,,,", EShLangAll, true }, - { "GatherCmpGreen", /* O-4, status */"V4", nullptr, "%@,S,V,S,V,,,,S","FIU,s,F,,I,,,,U",EShLangAll, true }, - - { "GatherCmpBlue", /*!O*/ "V4", nullptr, "%@,S,V,S", "FIU,s,F,", EShLangAll, true }, - { "GatherCmpBlue", /* O*/ "V4", nullptr, "%@,S,V,S,V", "FIU,s,F,,I", EShLangAll, true }, - { "GatherCmpBlue", /* O, status*/ "V4", nullptr, "%@,S,V,S,V,>S", "FIU,s,F,,I,U", EShLangAll, true }, - { "GatherCmpBlue", /* O-4 */ "V4", nullptr, "%@,S,V,S,V,,,", "FIU,s,F,,I,,,", EShLangAll, true }, - { "GatherCmpBlue", /* O-4, status */"V4", nullptr, "%@,S,V,S,V,,,,S","FIU,s,F,,I,,,,U",EShLangAll, true }, - - { "GatherCmpAlpha", /*!O*/ "V4", nullptr, "%@,S,V,S", "FIU,s,F,", EShLangAll, true }, - { "GatherCmpAlpha", /* O*/ "V4", nullptr, "%@,S,V,S,V", "FIU,s,F,,I", EShLangAll, true }, - { "GatherCmpAlpha", /* O, status*/ "V4", nullptr, "%@,S,V,S,V,>S", "FIU,s,F,,I,U", EShLangAll, true }, - { "GatherCmpAlpha", /* O-4 */ "V4", nullptr, "%@,S,V,S,V,,,", "FIU,s,F,,I,,,", EShLangAll, true }, - { "GatherCmpAlpha", /* O-4, status */"V4", nullptr, "%@,S,V,S,V,,,,S","FIU,s,F,,I,,,,U",EShLangAll, true }, - - // geometry methods - { "Append", "-", "-", "-", "-", EShLangGS , true }, - { "RestartStrip", "-", "-", "-", "-", EShLangGS , true }, - - // Methods for structurebuffers. TODO: wildcard type matching. - { "Load", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "Load2", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "Load3", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "Load4", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "Store", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "Store2", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "Store3", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "Store4", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "GetDimensions", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "InterlockedAdd", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "InterlockedAnd", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "InterlockedCompareExchange", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "InterlockedCompareStore", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "InterlockedExchange", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "InterlockedMax", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "InterlockedMin", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "InterlockedOr", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "InterlockedXor", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "IncrementCounter", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "DecrementCounter", nullptr, nullptr, "-", "-", EShLangAll, true }, - { "Consume", nullptr, nullptr, "-", "-", EShLangAll, true }, - - // Mark end of list, since we want to avoid a range-based for, as some compilers don't handle it yet. - { nullptr, nullptr, nullptr, nullptr, nullptr, 0, false }, - }; - - // Create prototypes for the intrinsics. TODO: Avoid ranged based for until all compilers can handle it. - for (int icount = 0; hlslIntrinsics[icount].name; ++icount) { - const auto& intrinsic = hlslIntrinsics[icount]; - - for (int stage = 0; stage < EShLangCount; ++stage) { // for each stage... - if ((intrinsic.stage & (1< 0 ? std::min(dim0, 3) : dim0; - - s.append(arg > 0 ? ", ": ""); // comma separator if needed - - const char* orderBegin = nthArgOrder; - nthArgOrder = IoParam(s, nthArgOrder); - - // Comma means use the previous argument order and type. - HandleRepeatArg(nthArgOrder, prevArgOrder, orderBegin); - HandleRepeatArg(nthArgType, prevArgType, nthArgType); - - // In case the repeated arg has its own I/O marker - nthArgOrder = IoParam(s, nthArgOrder); - - // arrayed textures have one extra coordinate dimension, except for - // the CalculateLevelOfDetail family. - if (isArrayed && arg == coordArg && !NoArrayCoord(intrinsic.name)) - argDim0++; - - // Some texture methods use an addition arg dimension to hold mip - if (arg == coordArg && mipInCoord) - argDim0++; - - // For textures, the 1D case isn't a 1-vector, but a scalar. - if (isTexture && argDim0 == 1 && arg > 0 && *nthArgOrder == 'V') - nthArgOrder = "S"; - - AppendTypeName(s, nthArgOrder, nthArgType, argDim0, dim1); // Add arguments - } - - s.append(");\n"); // close paren and trailing semicolon - } // dim 1 loop - } // dim 0 loop - } // arg type loop - - // skip over special characters - if (isTexture && isalpha(argOrder[1])) - ++argOrder; - if (isdigit(argOrder[1])) - ++argOrder; - } // arg order loop - - if (intrinsic.stage == EShLangAll) // common builtins are only added once. - break; - } - } - - createMatTimesMat(); // handle this case separately, for convenience - - // printf("Common:\n%s\n", getCommonString().c_str()); - // printf("Frag:\n%s\n", getStageString(EShLangFragment).c_str()); - // printf("Vertex:\n%s\n", getStageString(EShLangVertex).c_str()); - // printf("Geo:\n%s\n", getStageString(EShLangGeometry).c_str()); - // printf("TessCtrl:\n%s\n", getStageString(EShLangTessControl).c_str()); - // printf("TessEval:\n%s\n", getStageString(EShLangTessEvaluation).c_str()); - // printf("Compute:\n%s\n", getStageString(EShLangCompute).c_str()); -} - -// -// Add context-dependent built-in functions and variables that are present -// for the given version and profile. All the results are put into just the -// commonBuiltins, because it is called for just a specific stage. So, -// add stage-specific entries to the commonBuiltins, and only if that stage -// was requested. -// -void TBuiltInParseablesHlsl::initialize(const TBuiltInResource& /*resources*/, int /*version*/, EProfile /*profile*/, - const SpvVersion& /*spvVersion*/, EShLanguage /*language*/) -{ -} - -// -// Finish adding/processing context-independent built-in symbols. -// 1) Programmatically add symbols that could not be added by simple text strings above. -// 2) Map built-in functions to operators, for those that will turn into an operation node -// instead of remaining a function call. -// 3) Tag extension-related symbols added to their base version with their extensions, so -// that if an early version has the extension turned off, there is an error reported on use. -// -void TBuiltInParseablesHlsl::identifyBuiltIns(int /*version*/, EProfile /*profile*/, const SpvVersion& /*spvVersion*/, EShLanguage /*language*/, - TSymbolTable& symbolTable) -{ - // symbolTable.relateToOperator("abort", EOpAbort); - symbolTable.relateToOperator("abs", EOpAbs); - symbolTable.relateToOperator("acos", EOpAcos); - symbolTable.relateToOperator("all", EOpAll); - symbolTable.relateToOperator("AllMemoryBarrier", EOpMemoryBarrier); - symbolTable.relateToOperator("AllMemoryBarrierWithGroupSync", EOpAllMemoryBarrierWithGroupSync); - symbolTable.relateToOperator("any", EOpAny); - symbolTable.relateToOperator("asdouble", EOpAsDouble); - symbolTable.relateToOperator("asfloat", EOpIntBitsToFloat); - symbolTable.relateToOperator("asin", EOpAsin); - symbolTable.relateToOperator("asint", EOpFloatBitsToInt); - symbolTable.relateToOperator("asuint", EOpFloatBitsToUint); - symbolTable.relateToOperator("atan", EOpAtan); - symbolTable.relateToOperator("atan2", EOpAtan); - symbolTable.relateToOperator("ceil", EOpCeil); - // symbolTable.relateToOperator("CheckAccessFullyMapped"); - symbolTable.relateToOperator("clamp", EOpClamp); - symbolTable.relateToOperator("clip", EOpClip); - symbolTable.relateToOperator("cos", EOpCos); - symbolTable.relateToOperator("cosh", EOpCosh); - symbolTable.relateToOperator("countbits", EOpBitCount); - symbolTable.relateToOperator("cross", EOpCross); - symbolTable.relateToOperator("D3DCOLORtoUBYTE4", EOpD3DCOLORtoUBYTE4); - symbolTable.relateToOperator("ddx", EOpDPdx); - symbolTable.relateToOperator("ddx_coarse", EOpDPdxCoarse); - symbolTable.relateToOperator("ddx_fine", EOpDPdxFine); - symbolTable.relateToOperator("ddy", EOpDPdy); - symbolTable.relateToOperator("ddy_coarse", EOpDPdyCoarse); - symbolTable.relateToOperator("ddy_fine", EOpDPdyFine); - symbolTable.relateToOperator("degrees", EOpDegrees); - symbolTable.relateToOperator("determinant", EOpDeterminant); - symbolTable.relateToOperator("DeviceMemoryBarrier", EOpGroupMemoryBarrier); - symbolTable.relateToOperator("DeviceMemoryBarrierWithGroupSync", EOpGroupMemoryBarrierWithGroupSync); // ... - symbolTable.relateToOperator("distance", EOpDistance); - symbolTable.relateToOperator("dot", EOpDot); - symbolTable.relateToOperator("dst", EOpDst); - // symbolTable.relateToOperator("errorf", EOpErrorf); - symbolTable.relateToOperator("EvaluateAttributeAtCentroid", EOpInterpolateAtCentroid); - symbolTable.relateToOperator("EvaluateAttributeAtSample", EOpInterpolateAtSample); - symbolTable.relateToOperator("EvaluateAttributeSnapped", EOpEvaluateAttributeSnapped); - symbolTable.relateToOperator("exp", EOpExp); - symbolTable.relateToOperator("exp2", EOpExp2); - symbolTable.relateToOperator("f16tof32", EOpF16tof32); - symbolTable.relateToOperator("f32tof16", EOpF32tof16); - symbolTable.relateToOperator("faceforward", EOpFaceForward); - symbolTable.relateToOperator("firstbithigh", EOpFindMSB); - symbolTable.relateToOperator("firstbitlow", EOpFindLSB); - symbolTable.relateToOperator("floor", EOpFloor); - symbolTable.relateToOperator("fma", EOpFma); - symbolTable.relateToOperator("fmod", EOpMod); - symbolTable.relateToOperator("frac", EOpFract); - symbolTable.relateToOperator("frexp", EOpFrexp); - symbolTable.relateToOperator("fwidth", EOpFwidth); - // symbolTable.relateToOperator("GetRenderTargetSampleCount"); - // symbolTable.relateToOperator("GetRenderTargetSamplePosition"); - symbolTable.relateToOperator("GroupMemoryBarrier", EOpWorkgroupMemoryBarrier); - symbolTable.relateToOperator("GroupMemoryBarrierWithGroupSync", EOpWorkgroupMemoryBarrierWithGroupSync); - symbolTable.relateToOperator("InterlockedAdd", EOpInterlockedAdd); - symbolTable.relateToOperator("InterlockedAnd", EOpInterlockedAnd); - symbolTable.relateToOperator("InterlockedCompareExchange", EOpInterlockedCompareExchange); - symbolTable.relateToOperator("InterlockedCompareStore", EOpInterlockedCompareStore); - symbolTable.relateToOperator("InterlockedExchange", EOpInterlockedExchange); - symbolTable.relateToOperator("InterlockedMax", EOpInterlockedMax); - symbolTable.relateToOperator("InterlockedMin", EOpInterlockedMin); - symbolTable.relateToOperator("InterlockedOr", EOpInterlockedOr); - symbolTable.relateToOperator("InterlockedXor", EOpInterlockedXor); - symbolTable.relateToOperator("isfinite", EOpIsFinite); - symbolTable.relateToOperator("isinf", EOpIsInf); - symbolTable.relateToOperator("isnan", EOpIsNan); - symbolTable.relateToOperator("ldexp", EOpLdexp); - symbolTable.relateToOperator("length", EOpLength); - symbolTable.relateToOperator("lerp", EOpMix); - symbolTable.relateToOperator("lit", EOpLit); - symbolTable.relateToOperator("log", EOpLog); - symbolTable.relateToOperator("log10", EOpLog10); - symbolTable.relateToOperator("log2", EOpLog2); - symbolTable.relateToOperator("mad", EOpFma); - symbolTable.relateToOperator("max", EOpMax); - symbolTable.relateToOperator("min", EOpMin); - symbolTable.relateToOperator("modf", EOpModf); - // symbolTable.relateToOperator("msad4", EOpMsad4); - symbolTable.relateToOperator("mul", EOpGenMul); - // symbolTable.relateToOperator("noise", EOpNoise); // TODO: check return type - symbolTable.relateToOperator("normalize", EOpNormalize); - symbolTable.relateToOperator("pow", EOpPow); - // symbolTable.relateToOperator("printf", EOpPrintf); - // symbolTable.relateToOperator("Process2DQuadTessFactorsAvg"); - // symbolTable.relateToOperator("Process2DQuadTessFactorsMax"); - // symbolTable.relateToOperator("Process2DQuadTessFactorsMin"); - // symbolTable.relateToOperator("ProcessIsolineTessFactors"); - // symbolTable.relateToOperator("ProcessQuadTessFactorsAvg"); - // symbolTable.relateToOperator("ProcessQuadTessFactorsMax"); - // symbolTable.relateToOperator("ProcessQuadTessFactorsMin"); - // symbolTable.relateToOperator("ProcessTriTessFactorsAvg"); - // symbolTable.relateToOperator("ProcessTriTessFactorsMax"); - // symbolTable.relateToOperator("ProcessTriTessFactorsMin"); - symbolTable.relateToOperator("radians", EOpRadians); - symbolTable.relateToOperator("rcp", EOpRcp); - symbolTable.relateToOperator("reflect", EOpReflect); - symbolTable.relateToOperator("refract", EOpRefract); - symbolTable.relateToOperator("reversebits", EOpBitFieldReverse); - symbolTable.relateToOperator("round", EOpRoundEven); - symbolTable.relateToOperator("rsqrt", EOpInverseSqrt); - symbolTable.relateToOperator("saturate", EOpSaturate); - symbolTable.relateToOperator("sign", EOpSign); - symbolTable.relateToOperator("sin", EOpSin); - symbolTable.relateToOperator("sincos", EOpSinCos); - symbolTable.relateToOperator("sinh", EOpSinh); - symbolTable.relateToOperator("smoothstep", EOpSmoothStep); - symbolTable.relateToOperator("sqrt", EOpSqrt); - symbolTable.relateToOperator("step", EOpStep); - symbolTable.relateToOperator("tan", EOpTan); - symbolTable.relateToOperator("tanh", EOpTanh); - symbolTable.relateToOperator("tex1D", EOpTexture); - symbolTable.relateToOperator("tex1Dbias", EOpTextureBias); - symbolTable.relateToOperator("tex1Dgrad", EOpTextureGrad); - symbolTable.relateToOperator("tex1Dlod", EOpTextureLod); - symbolTable.relateToOperator("tex1Dproj", EOpTextureProj); - symbolTable.relateToOperator("tex2D", EOpTexture); - symbolTable.relateToOperator("tex2Dbias", EOpTextureBias); - symbolTable.relateToOperator("tex2Dgrad", EOpTextureGrad); - symbolTable.relateToOperator("tex2Dlod", EOpTextureLod); - symbolTable.relateToOperator("tex2Dproj", EOpTextureProj); - symbolTable.relateToOperator("tex3D", EOpTexture); - symbolTable.relateToOperator("tex3Dbias", EOpTextureBias); - symbolTable.relateToOperator("tex3Dgrad", EOpTextureGrad); - symbolTable.relateToOperator("tex3Dlod", EOpTextureLod); - symbolTable.relateToOperator("tex3Dproj", EOpTextureProj); - symbolTable.relateToOperator("texCUBE", EOpTexture); - symbolTable.relateToOperator("texCUBEbias", EOpTextureBias); - symbolTable.relateToOperator("texCUBEgrad", EOpTextureGrad); - symbolTable.relateToOperator("texCUBElod", EOpTextureLod); - symbolTable.relateToOperator("texCUBEproj", EOpTextureProj); - symbolTable.relateToOperator("transpose", EOpTranspose); - symbolTable.relateToOperator("trunc", EOpTrunc); - - // Texture methods - symbolTable.relateToOperator(BUILTIN_PREFIX "Sample", EOpMethodSample); - symbolTable.relateToOperator(BUILTIN_PREFIX "SampleBias", EOpMethodSampleBias); - symbolTable.relateToOperator(BUILTIN_PREFIX "SampleCmp", EOpMethodSampleCmp); - symbolTable.relateToOperator(BUILTIN_PREFIX "SampleCmpLevelZero", EOpMethodSampleCmpLevelZero); - symbolTable.relateToOperator(BUILTIN_PREFIX "SampleGrad", EOpMethodSampleGrad); - symbolTable.relateToOperator(BUILTIN_PREFIX "SampleLevel", EOpMethodSampleLevel); - symbolTable.relateToOperator(BUILTIN_PREFIX "Load", EOpMethodLoad); - symbolTable.relateToOperator(BUILTIN_PREFIX "GetDimensions", EOpMethodGetDimensions); - symbolTable.relateToOperator(BUILTIN_PREFIX "GetSamplePosition", EOpMethodGetSamplePosition); - symbolTable.relateToOperator(BUILTIN_PREFIX "Gather", EOpMethodGather); - symbolTable.relateToOperator(BUILTIN_PREFIX "CalculateLevelOfDetail", EOpMethodCalculateLevelOfDetail); - symbolTable.relateToOperator(BUILTIN_PREFIX "CalculateLevelOfDetailUnclamped", EOpMethodCalculateLevelOfDetailUnclamped); - - // Structure buffer methods (excluding associations already made above for texture methods w/ same name) - symbolTable.relateToOperator(BUILTIN_PREFIX "Load2", EOpMethodLoad2); - symbolTable.relateToOperator(BUILTIN_PREFIX "Load3", EOpMethodLoad3); - symbolTable.relateToOperator(BUILTIN_PREFIX "Load4", EOpMethodLoad4); - symbolTable.relateToOperator(BUILTIN_PREFIX "Store", EOpMethodStore); - symbolTable.relateToOperator(BUILTIN_PREFIX "Store2", EOpMethodStore2); - symbolTable.relateToOperator(BUILTIN_PREFIX "Store3", EOpMethodStore3); - symbolTable.relateToOperator(BUILTIN_PREFIX "Store4", EOpMethodStore4); - symbolTable.relateToOperator(BUILTIN_PREFIX "IncrementCounter", EOpMethodIncrementCounter); - symbolTable.relateToOperator(BUILTIN_PREFIX "DecrementCounter", EOpMethodDecrementCounter); - // Append is also a GS method: we don't add it twice - symbolTable.relateToOperator(BUILTIN_PREFIX "Consume", EOpMethodConsume); - - symbolTable.relateToOperator(BUILTIN_PREFIX "InterlockedAdd", EOpInterlockedAdd); - symbolTable.relateToOperator(BUILTIN_PREFIX "InterlockedAnd", EOpInterlockedAnd); - symbolTable.relateToOperator(BUILTIN_PREFIX "InterlockedCompareExchange", EOpInterlockedCompareExchange); - symbolTable.relateToOperator(BUILTIN_PREFIX "InterlockedCompareStore", EOpInterlockedCompareStore); - symbolTable.relateToOperator(BUILTIN_PREFIX "InterlockedExchange", EOpInterlockedExchange); - symbolTable.relateToOperator(BUILTIN_PREFIX "InterlockedMax", EOpInterlockedMax); - symbolTable.relateToOperator(BUILTIN_PREFIX "InterlockedMin", EOpInterlockedMin); - symbolTable.relateToOperator(BUILTIN_PREFIX "InterlockedOr", EOpInterlockedOr); - symbolTable.relateToOperator(BUILTIN_PREFIX "InterlockedXor", EOpInterlockedXor); - - // SM5 Texture methods - symbolTable.relateToOperator(BUILTIN_PREFIX "GatherRed", EOpMethodGatherRed); - symbolTable.relateToOperator(BUILTIN_PREFIX "GatherGreen", EOpMethodGatherGreen); - symbolTable.relateToOperator(BUILTIN_PREFIX "GatherBlue", EOpMethodGatherBlue); - symbolTable.relateToOperator(BUILTIN_PREFIX "GatherAlpha", EOpMethodGatherAlpha); - symbolTable.relateToOperator(BUILTIN_PREFIX "GatherCmp", EOpMethodGatherCmpRed); // alias - symbolTable.relateToOperator(BUILTIN_PREFIX "GatherCmpRed", EOpMethodGatherCmpRed); - symbolTable.relateToOperator(BUILTIN_PREFIX "GatherCmpGreen", EOpMethodGatherCmpGreen); - symbolTable.relateToOperator(BUILTIN_PREFIX "GatherCmpBlue", EOpMethodGatherCmpBlue); - symbolTable.relateToOperator(BUILTIN_PREFIX "GatherCmpAlpha", EOpMethodGatherCmpAlpha); - - // GS methods - symbolTable.relateToOperator(BUILTIN_PREFIX "Append", EOpMethodAppend); - symbolTable.relateToOperator(BUILTIN_PREFIX "RestartStrip", EOpMethodRestartStrip); -} - -// -// Add context-dependent (resource-specific) built-ins not handled by the above. These -// would be ones that need to be programmatically added because they cannot -// be added by simple text strings. For these, also -// 1) Map built-in functions to operators, for those that will turn into an operation node -// instead of remaining a function call. -// 2) Tag extension-related symbols added to their base version with their extensions, so -// that if an early version has the extension turned off, there is an error reported on use. -// -void TBuiltInParseablesHlsl::identifyBuiltIns(int /*version*/, EProfile /*profile*/, const SpvVersion& /*spvVersion*/, EShLanguage /*language*/, - TSymbolTable& /*symbolTable*/, const TBuiltInResource& /*resources*/) -{ -} - -} // end namespace glslang diff --git a/deps/glslang/hlsl/hlslParseables.h b/deps/glslang/hlsl/hlslParseables.h deleted file mode 100644 index 28f424b3..00000000 --- a/deps/glslang/hlsl/hlslParseables.h +++ /dev/null @@ -1,64 +0,0 @@ -// -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of 3Dlabs Inc. Ltd. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef _HLSLPARSEABLES_INCLUDED_ -#define _HLSLPARSEABLES_INCLUDED_ - -#include "../glslang/MachineIndependent/Initialize.h" - -namespace glslang { - -// -// This is an HLSL specific derivation of TBuiltInParseables. See comment -// above TBuiltInParseables for details. -// -class TBuiltInParseablesHlsl : public TBuiltInParseables { -public: - POOL_ALLOCATOR_NEW_DELETE(GetThreadPoolAllocator()) - TBuiltInParseablesHlsl(); - void initialize(int version, EProfile, const SpvVersion& spvVersion); - void initialize(const TBuiltInResource& resources, int version, EProfile, const SpvVersion& spvVersion, EShLanguage); - - void identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable); - - void identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable, const TBuiltInResource &resources); - -private: - void createMatTimesMat(); -}; - -} // end namespace glslang - -#endif // _HLSLPARSEABLES_INCLUDED_ diff --git a/deps/glslang/hlsl/hlslScanContext.cpp b/deps/glslang/hlsl/hlslScanContext.cpp deleted file mode 100644 index 392efeb5..00000000 --- a/deps/glslang/hlsl/hlslScanContext.cpp +++ /dev/null @@ -1,889 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// HLSL scanning, leveraging the scanning done by the preprocessor. -// - -#include -#include -#include - -#include "../glslang/Include/Types.h" -#include "../glslang/MachineIndependent/SymbolTable.h" -#include "../glslang/MachineIndependent/ParseHelper.h" -#include "hlslScanContext.h" -#include "hlslTokens.h" - -// preprocessor includes -#include "../glslang/MachineIndependent/preprocessor/PpContext.h" -#include "../glslang/MachineIndependent/preprocessor/PpTokens.h" - -namespace { - -struct str_eq -{ - bool operator()(const char* lhs, const char* rhs) const - { - return strcmp(lhs, rhs) == 0; - } -}; - -struct str_hash -{ - size_t operator()(const char* str) const - { - // djb2 - unsigned long hash = 5381; - int c; - - while ((c = *str++) != 0) - hash = ((hash << 5) + hash) + c; - - return hash; - } -}; - -// A single global usable by all threads, by all versions, by all languages. -// After a single process-level initialization, this is read only and thread safe -std::unordered_map* KeywordMap = nullptr; -std::unordered_set* ReservedSet = nullptr; -std::unordered_map* SemanticMap = nullptr; - -}; - -namespace glslang { - -void HlslScanContext::fillInKeywordMap() -{ - if (KeywordMap != nullptr) { - // this is really an error, as this should called only once per process - // but, the only risk is if two threads called simultaneously - return; - } - KeywordMap = new std::unordered_map; - - (*KeywordMap)["static"] = EHTokStatic; - (*KeywordMap)["const"] = EHTokConst; - (*KeywordMap)["unorm"] = EHTokUnorm; - (*KeywordMap)["snorm"] = EHTokSNorm; - (*KeywordMap)["extern"] = EHTokExtern; - (*KeywordMap)["uniform"] = EHTokUniform; - (*KeywordMap)["volatile"] = EHTokVolatile; - (*KeywordMap)["precise"] = EHTokPrecise; - (*KeywordMap)["shared"] = EHTokShared; - (*KeywordMap)["groupshared"] = EHTokGroupShared; - (*KeywordMap)["linear"] = EHTokLinear; - (*KeywordMap)["centroid"] = EHTokCentroid; - (*KeywordMap)["nointerpolation"] = EHTokNointerpolation; - (*KeywordMap)["noperspective"] = EHTokNoperspective; - (*KeywordMap)["sample"] = EHTokSample; - (*KeywordMap)["row_major"] = EHTokRowMajor; - (*KeywordMap)["column_major"] = EHTokColumnMajor; - (*KeywordMap)["packoffset"] = EHTokPackOffset; - (*KeywordMap)["in"] = EHTokIn; - (*KeywordMap)["out"] = EHTokOut; - (*KeywordMap)["inout"] = EHTokInOut; - (*KeywordMap)["layout"] = EHTokLayout; - (*KeywordMap)["globallycoherent"] = EHTokGloballyCoherent; - (*KeywordMap)["inline"] = EHTokInline; - - (*KeywordMap)["point"] = EHTokPoint; - (*KeywordMap)["line"] = EHTokLine; - (*KeywordMap)["triangle"] = EHTokTriangle; - (*KeywordMap)["lineadj"] = EHTokLineAdj; - (*KeywordMap)["triangleadj"] = EHTokTriangleAdj; - - (*KeywordMap)["PointStream"] = EHTokPointStream; - (*KeywordMap)["LineStream"] = EHTokLineStream; - (*KeywordMap)["TriangleStream"] = EHTokTriangleStream; - - (*KeywordMap)["InputPatch"] = EHTokInputPatch; - (*KeywordMap)["OutputPatch"] = EHTokOutputPatch; - - (*KeywordMap)["Buffer"] = EHTokBuffer; - (*KeywordMap)["vector"] = EHTokVector; - (*KeywordMap)["matrix"] = EHTokMatrix; - - (*KeywordMap)["void"] = EHTokVoid; - (*KeywordMap)["string"] = EHTokString; - (*KeywordMap)["bool"] = EHTokBool; - (*KeywordMap)["int"] = EHTokInt; - (*KeywordMap)["uint"] = EHTokUint; - (*KeywordMap)["dword"] = EHTokDword; - (*KeywordMap)["half"] = EHTokHalf; - (*KeywordMap)["float"] = EHTokFloat; - (*KeywordMap)["double"] = EHTokDouble; - (*KeywordMap)["min16float"] = EHTokMin16float; - (*KeywordMap)["min10float"] = EHTokMin10float; - (*KeywordMap)["min16int"] = EHTokMin16int; - (*KeywordMap)["min12int"] = EHTokMin12int; - (*KeywordMap)["min16uint"] = EHTokMin16uint; - - (*KeywordMap)["bool1"] = EHTokBool1; - (*KeywordMap)["bool2"] = EHTokBool2; - (*KeywordMap)["bool3"] = EHTokBool3; - (*KeywordMap)["bool4"] = EHTokBool4; - (*KeywordMap)["float1"] = EHTokFloat1; - (*KeywordMap)["float2"] = EHTokFloat2; - (*KeywordMap)["float3"] = EHTokFloat3; - (*KeywordMap)["float4"] = EHTokFloat4; - (*KeywordMap)["int1"] = EHTokInt1; - (*KeywordMap)["int2"] = EHTokInt2; - (*KeywordMap)["int3"] = EHTokInt3; - (*KeywordMap)["int4"] = EHTokInt4; - (*KeywordMap)["double1"] = EHTokDouble1; - (*KeywordMap)["double2"] = EHTokDouble2; - (*KeywordMap)["double3"] = EHTokDouble3; - (*KeywordMap)["double4"] = EHTokDouble4; - (*KeywordMap)["uint1"] = EHTokUint1; - (*KeywordMap)["uint2"] = EHTokUint2; - (*KeywordMap)["uint3"] = EHTokUint3; - (*KeywordMap)["uint4"] = EHTokUint4; - - (*KeywordMap)["half1"] = EHTokHalf1; - (*KeywordMap)["half2"] = EHTokHalf2; - (*KeywordMap)["half3"] = EHTokHalf3; - (*KeywordMap)["half4"] = EHTokHalf4; - (*KeywordMap)["min16float1"] = EHTokMin16float1; - (*KeywordMap)["min16float2"] = EHTokMin16float2; - (*KeywordMap)["min16float3"] = EHTokMin16float3; - (*KeywordMap)["min16float4"] = EHTokMin16float4; - (*KeywordMap)["min10float1"] = EHTokMin10float1; - (*KeywordMap)["min10float2"] = EHTokMin10float2; - (*KeywordMap)["min10float3"] = EHTokMin10float3; - (*KeywordMap)["min10float4"] = EHTokMin10float4; - (*KeywordMap)["min16int1"] = EHTokMin16int1; - (*KeywordMap)["min16int2"] = EHTokMin16int2; - (*KeywordMap)["min16int3"] = EHTokMin16int3; - (*KeywordMap)["min16int4"] = EHTokMin16int4; - (*KeywordMap)["min12int1"] = EHTokMin12int1; - (*KeywordMap)["min12int2"] = EHTokMin12int2; - (*KeywordMap)["min12int3"] = EHTokMin12int3; - (*KeywordMap)["min12int4"] = EHTokMin12int4; - (*KeywordMap)["min16uint1"] = EHTokMin16uint1; - (*KeywordMap)["min16uint2"] = EHTokMin16uint2; - (*KeywordMap)["min16uint3"] = EHTokMin16uint3; - (*KeywordMap)["min16uint4"] = EHTokMin16uint4; - - (*KeywordMap)["bool1x1"] = EHTokBool1x1; - (*KeywordMap)["bool1x2"] = EHTokBool1x2; - (*KeywordMap)["bool1x3"] = EHTokBool1x3; - (*KeywordMap)["bool1x4"] = EHTokBool1x4; - (*KeywordMap)["bool2x1"] = EHTokBool2x1; - (*KeywordMap)["bool2x2"] = EHTokBool2x2; - (*KeywordMap)["bool2x3"] = EHTokBool2x3; - (*KeywordMap)["bool2x4"] = EHTokBool2x4; - (*KeywordMap)["bool3x1"] = EHTokBool3x1; - (*KeywordMap)["bool3x2"] = EHTokBool3x2; - (*KeywordMap)["bool3x3"] = EHTokBool3x3; - (*KeywordMap)["bool3x4"] = EHTokBool3x4; - (*KeywordMap)["bool4x1"] = EHTokBool4x1; - (*KeywordMap)["bool4x2"] = EHTokBool4x2; - (*KeywordMap)["bool4x3"] = EHTokBool4x3; - (*KeywordMap)["bool4x4"] = EHTokBool4x4; - (*KeywordMap)["int1x1"] = EHTokInt1x1; - (*KeywordMap)["int1x2"] = EHTokInt1x2; - (*KeywordMap)["int1x3"] = EHTokInt1x3; - (*KeywordMap)["int1x4"] = EHTokInt1x4; - (*KeywordMap)["int2x1"] = EHTokInt2x1; - (*KeywordMap)["int2x2"] = EHTokInt2x2; - (*KeywordMap)["int2x3"] = EHTokInt2x3; - (*KeywordMap)["int2x4"] = EHTokInt2x4; - (*KeywordMap)["int3x1"] = EHTokInt3x1; - (*KeywordMap)["int3x2"] = EHTokInt3x2; - (*KeywordMap)["int3x3"] = EHTokInt3x3; - (*KeywordMap)["int3x4"] = EHTokInt3x4; - (*KeywordMap)["int4x1"] = EHTokInt4x1; - (*KeywordMap)["int4x2"] = EHTokInt4x2; - (*KeywordMap)["int4x3"] = EHTokInt4x3; - (*KeywordMap)["int4x4"] = EHTokInt4x4; - (*KeywordMap)["uint1x1"] = EHTokUint1x1; - (*KeywordMap)["uint1x2"] = EHTokUint1x2; - (*KeywordMap)["uint1x3"] = EHTokUint1x3; - (*KeywordMap)["uint1x4"] = EHTokUint1x4; - (*KeywordMap)["uint2x1"] = EHTokUint2x1; - (*KeywordMap)["uint2x2"] = EHTokUint2x2; - (*KeywordMap)["uint2x3"] = EHTokUint2x3; - (*KeywordMap)["uint2x4"] = EHTokUint2x4; - (*KeywordMap)["uint3x1"] = EHTokUint3x1; - (*KeywordMap)["uint3x2"] = EHTokUint3x2; - (*KeywordMap)["uint3x3"] = EHTokUint3x3; - (*KeywordMap)["uint3x4"] = EHTokUint3x4; - (*KeywordMap)["uint4x1"] = EHTokUint4x1; - (*KeywordMap)["uint4x2"] = EHTokUint4x2; - (*KeywordMap)["uint4x3"] = EHTokUint4x3; - (*KeywordMap)["uint4x4"] = EHTokUint4x4; - (*KeywordMap)["bool1x1"] = EHTokBool1x1; - (*KeywordMap)["bool1x2"] = EHTokBool1x2; - (*KeywordMap)["bool1x3"] = EHTokBool1x3; - (*KeywordMap)["bool1x4"] = EHTokBool1x4; - (*KeywordMap)["bool2x1"] = EHTokBool2x1; - (*KeywordMap)["bool2x2"] = EHTokBool2x2; - (*KeywordMap)["bool2x3"] = EHTokBool2x3; - (*KeywordMap)["bool2x4"] = EHTokBool2x4; - (*KeywordMap)["bool3x1"] = EHTokBool3x1; - (*KeywordMap)["bool3x2"] = EHTokBool3x2; - (*KeywordMap)["bool3x3"] = EHTokBool3x3; - (*KeywordMap)["bool3x4"] = EHTokBool3x4; - (*KeywordMap)["bool4x1"] = EHTokBool4x1; - (*KeywordMap)["bool4x2"] = EHTokBool4x2; - (*KeywordMap)["bool4x3"] = EHTokBool4x3; - (*KeywordMap)["bool4x4"] = EHTokBool4x4; - (*KeywordMap)["float1x1"] = EHTokFloat1x1; - (*KeywordMap)["float1x2"] = EHTokFloat1x2; - (*KeywordMap)["float1x3"] = EHTokFloat1x3; - (*KeywordMap)["float1x4"] = EHTokFloat1x4; - (*KeywordMap)["float2x1"] = EHTokFloat2x1; - (*KeywordMap)["float2x2"] = EHTokFloat2x2; - (*KeywordMap)["float2x3"] = EHTokFloat2x3; - (*KeywordMap)["float2x4"] = EHTokFloat2x4; - (*KeywordMap)["float3x1"] = EHTokFloat3x1; - (*KeywordMap)["float3x2"] = EHTokFloat3x2; - (*KeywordMap)["float3x3"] = EHTokFloat3x3; - (*KeywordMap)["float3x4"] = EHTokFloat3x4; - (*KeywordMap)["float4x1"] = EHTokFloat4x1; - (*KeywordMap)["float4x2"] = EHTokFloat4x2; - (*KeywordMap)["float4x3"] = EHTokFloat4x3; - (*KeywordMap)["float4x4"] = EHTokFloat4x4; - (*KeywordMap)["half1x1"] = EHTokHalf1x1; - (*KeywordMap)["half1x2"] = EHTokHalf1x2; - (*KeywordMap)["half1x3"] = EHTokHalf1x3; - (*KeywordMap)["half1x4"] = EHTokHalf1x4; - (*KeywordMap)["half2x1"] = EHTokHalf2x1; - (*KeywordMap)["half2x2"] = EHTokHalf2x2; - (*KeywordMap)["half2x3"] = EHTokHalf2x3; - (*KeywordMap)["half2x4"] = EHTokHalf2x4; - (*KeywordMap)["half3x1"] = EHTokHalf3x1; - (*KeywordMap)["half3x2"] = EHTokHalf3x2; - (*KeywordMap)["half3x3"] = EHTokHalf3x3; - (*KeywordMap)["half3x4"] = EHTokHalf3x4; - (*KeywordMap)["half4x1"] = EHTokHalf4x1; - (*KeywordMap)["half4x2"] = EHTokHalf4x2; - (*KeywordMap)["half4x3"] = EHTokHalf4x3; - (*KeywordMap)["half4x4"] = EHTokHalf4x4; - (*KeywordMap)["double1x1"] = EHTokDouble1x1; - (*KeywordMap)["double1x2"] = EHTokDouble1x2; - (*KeywordMap)["double1x3"] = EHTokDouble1x3; - (*KeywordMap)["double1x4"] = EHTokDouble1x4; - (*KeywordMap)["double2x1"] = EHTokDouble2x1; - (*KeywordMap)["double2x2"] = EHTokDouble2x2; - (*KeywordMap)["double2x3"] = EHTokDouble2x3; - (*KeywordMap)["double2x4"] = EHTokDouble2x4; - (*KeywordMap)["double3x1"] = EHTokDouble3x1; - (*KeywordMap)["double3x2"] = EHTokDouble3x2; - (*KeywordMap)["double3x3"] = EHTokDouble3x3; - (*KeywordMap)["double3x4"] = EHTokDouble3x4; - (*KeywordMap)["double4x1"] = EHTokDouble4x1; - (*KeywordMap)["double4x2"] = EHTokDouble4x2; - (*KeywordMap)["double4x3"] = EHTokDouble4x3; - (*KeywordMap)["double4x4"] = EHTokDouble4x4; - - (*KeywordMap)["sampler"] = EHTokSampler; - (*KeywordMap)["sampler1D"] = EHTokSampler1d; - (*KeywordMap)["sampler2D"] = EHTokSampler2d; - (*KeywordMap)["sampler3D"] = EHTokSampler3d; - (*KeywordMap)["samplerCube"] = EHTokSamplerCube; - (*KeywordMap)["sampler_state"] = EHTokSamplerState; - (*KeywordMap)["SamplerState"] = EHTokSamplerState; - (*KeywordMap)["SamplerComparisonState"] = EHTokSamplerComparisonState; - (*KeywordMap)["texture"] = EHTokTexture; - (*KeywordMap)["Texture1D"] = EHTokTexture1d; - (*KeywordMap)["Texture1DArray"] = EHTokTexture1darray; - (*KeywordMap)["Texture2D"] = EHTokTexture2d; - (*KeywordMap)["Texture2DArray"] = EHTokTexture2darray; - (*KeywordMap)["Texture3D"] = EHTokTexture3d; - (*KeywordMap)["TextureCube"] = EHTokTextureCube; - (*KeywordMap)["TextureCubeArray"] = EHTokTextureCubearray; - (*KeywordMap)["Texture2DMS"] = EHTokTexture2DMS; - (*KeywordMap)["Texture2DMSArray"] = EHTokTexture2DMSarray; - (*KeywordMap)["RWTexture1D"] = EHTokRWTexture1d; - (*KeywordMap)["RWTexture1DArray"] = EHTokRWTexture1darray; - (*KeywordMap)["RWTexture2D"] = EHTokRWTexture2d; - (*KeywordMap)["RWTexture2DArray"] = EHTokRWTexture2darray; - (*KeywordMap)["RWTexture3D"] = EHTokRWTexture3d; - (*KeywordMap)["RWBuffer"] = EHTokRWBuffer; - - (*KeywordMap)["AppendStructuredBuffer"] = EHTokAppendStructuredBuffer; - (*KeywordMap)["ByteAddressBuffer"] = EHTokByteAddressBuffer; - (*KeywordMap)["ConsumeStructuredBuffer"] = EHTokConsumeStructuredBuffer; - (*KeywordMap)["RWByteAddressBuffer"] = EHTokRWByteAddressBuffer; - (*KeywordMap)["RWStructuredBuffer"] = EHTokRWStructuredBuffer; - (*KeywordMap)["StructuredBuffer"] = EHTokStructuredBuffer; - - (*KeywordMap)["class"] = EHTokClass; - (*KeywordMap)["struct"] = EHTokStruct; - (*KeywordMap)["cbuffer"] = EHTokCBuffer; - (*KeywordMap)["ConstantBuffer"] = EHTokConstantBuffer; - (*KeywordMap)["tbuffer"] = EHTokTBuffer; - (*KeywordMap)["typedef"] = EHTokTypedef; - (*KeywordMap)["this"] = EHTokThis; - (*KeywordMap)["namespace"] = EHTokNamespace; - - (*KeywordMap)["true"] = EHTokBoolConstant; - (*KeywordMap)["false"] = EHTokBoolConstant; - - (*KeywordMap)["for"] = EHTokFor; - (*KeywordMap)["do"] = EHTokDo; - (*KeywordMap)["while"] = EHTokWhile; - (*KeywordMap)["break"] = EHTokBreak; - (*KeywordMap)["continue"] = EHTokContinue; - (*KeywordMap)["if"] = EHTokIf; - (*KeywordMap)["else"] = EHTokElse; - (*KeywordMap)["discard"] = EHTokDiscard; - (*KeywordMap)["return"] = EHTokReturn; - (*KeywordMap)["switch"] = EHTokSwitch; - (*KeywordMap)["case"] = EHTokCase; - (*KeywordMap)["default"] = EHTokDefault; - - // TODO: get correct set here - ReservedSet = new std::unordered_set; - - ReservedSet->insert("auto"); - ReservedSet->insert("catch"); - ReservedSet->insert("char"); - ReservedSet->insert("const_cast"); - ReservedSet->insert("enum"); - ReservedSet->insert("explicit"); - ReservedSet->insert("friend"); - ReservedSet->insert("goto"); - ReservedSet->insert("long"); - ReservedSet->insert("mutable"); - ReservedSet->insert("new"); - ReservedSet->insert("operator"); - ReservedSet->insert("private"); - ReservedSet->insert("protected"); - ReservedSet->insert("public"); - ReservedSet->insert("reinterpret_cast"); - ReservedSet->insert("short"); - ReservedSet->insert("signed"); - ReservedSet->insert("sizeof"); - ReservedSet->insert("static_cast"); - ReservedSet->insert("template"); - ReservedSet->insert("throw"); - ReservedSet->insert("try"); - ReservedSet->insert("typename"); - ReservedSet->insert("union"); - ReservedSet->insert("unsigned"); - ReservedSet->insert("using"); - ReservedSet->insert("virtual"); - - SemanticMap = new std::unordered_map; - - // in DX9, all outputs had to have a semantic associated with them, that was either consumed - // by the system or was a specific register assignment - // in DX10+, only semantics with the SV_ prefix have any meaning beyond decoration - // Fxc will only accept DX9 style semantics in compat mode - // Also, in DX10 if a SV value is present as the input of a stage, but isn't appropriate for that - // stage, it would just be ignored as it is likely there as part of an output struct from one stage - // to the next - bool bParseDX9 = false; - if (bParseDX9) { - (*SemanticMap)["PSIZE"] = EbvPointSize; - (*SemanticMap)["FOG"] = EbvFogFragCoord; - (*SemanticMap)["DEPTH"] = EbvFragDepth; - (*SemanticMap)["VFACE"] = EbvFace; - (*SemanticMap)["VPOS"] = EbvFragCoord; - } - - (*SemanticMap)["SV_POSITION"] = EbvPosition; - (*SemanticMap)["SV_VERTEXID"] = EbvVertexIndex; - (*SemanticMap)["SV_VIEWPORTARRAYINDEX"] = EbvViewportIndex; - (*SemanticMap)["SV_TESSFACTOR"] = EbvTessLevelOuter; - (*SemanticMap)["SV_SAMPLEINDEX"] = EbvSampleId; - (*SemanticMap)["SV_RENDERTARGETARRAYINDEX"] = EbvLayer; - (*SemanticMap)["SV_PRIMITIVEID"] = EbvPrimitiveId; - (*SemanticMap)["SV_OUTPUTCONTROLPOINTID"] = EbvInvocationId; - (*SemanticMap)["SV_ISFRONTFACE"] = EbvFace; - (*SemanticMap)["SV_INSTANCEID"] = EbvInstanceIndex; - (*SemanticMap)["SV_INSIDETESSFACTOR"] = EbvTessLevelInner; - (*SemanticMap)["SV_GSINSTANCEID"] = EbvInvocationId; - (*SemanticMap)["SV_DISPATCHTHREADID"] = EbvGlobalInvocationId; - (*SemanticMap)["SV_GROUPTHREADID"] = EbvLocalInvocationId; - (*SemanticMap)["SV_GROUPINDEX"] = EbvLocalInvocationIndex; - (*SemanticMap)["SV_GROUPID"] = EbvWorkGroupId; - (*SemanticMap)["SV_DOMAINLOCATION"] = EbvTessCoord; - (*SemanticMap)["SV_DEPTH"] = EbvFragDepth; - (*SemanticMap)["SV_COVERAGE"] = EbvSampleMask; - (*SemanticMap)["SV_DEPTHGREATEREQUAL"] = EbvFragDepthGreater; - (*SemanticMap)["SV_DEPTHLESSEQUAL"] = EbvFragDepthLesser; - (*SemanticMap)["SV_STENCILREF"] = EbvFragStencilRef; -} - -void HlslScanContext::deleteKeywordMap() -{ - delete KeywordMap; - KeywordMap = nullptr; - delete ReservedSet; - ReservedSet = nullptr; - delete SemanticMap; - SemanticMap = nullptr; -} - -// Wrapper for tokenizeClass() to get everything inside the token. -void HlslScanContext::tokenize(HlslToken& token) -{ - EHlslTokenClass tokenClass = tokenizeClass(token); - token.tokenClass = tokenClass; -} - -glslang::TBuiltInVariable HlslScanContext::mapSemantic(const char* upperCase) -{ - auto it = SemanticMap->find(upperCase); - if (it != SemanticMap->end()) - return it->second; - else - return glslang::EbvNone; -} - -// -// Fill in token information for the next token, except for the token class. -// Returns the enum value of the token class of the next token found. -// Return 0 (EndOfTokens) on end of input. -// -EHlslTokenClass HlslScanContext::tokenizeClass(HlslToken& token) -{ - do { - parserToken = &token; - TPpToken ppToken; - int token = ppContext.tokenize(ppToken); - if (token == EndOfInput) - return EHTokNone; - - tokenText = ppToken.name; - loc = ppToken.loc; - parserToken->loc = loc; - switch (token) { - case ';': return EHTokSemicolon; - case ',': return EHTokComma; - case ':': return EHTokColon; - case '=': return EHTokAssign; - case '(': return EHTokLeftParen; - case ')': return EHTokRightParen; - case '.': return EHTokDot; - case '!': return EHTokBang; - case '-': return EHTokDash; - case '~': return EHTokTilde; - case '+': return EHTokPlus; - case '*': return EHTokStar; - case '/': return EHTokSlash; - case '%': return EHTokPercent; - case '<': return EHTokLeftAngle; - case '>': return EHTokRightAngle; - case '|': return EHTokVerticalBar; - case '^': return EHTokCaret; - case '&': return EHTokAmpersand; - case '?': return EHTokQuestion; - case '[': return EHTokLeftBracket; - case ']': return EHTokRightBracket; - case '{': return EHTokLeftBrace; - case '}': return EHTokRightBrace; - case '\\': - parseContext.error(loc, "illegal use of escape character", "\\", ""); - break; - - case PPAtomAddAssign: return EHTokAddAssign; - case PPAtomSubAssign: return EHTokSubAssign; - case PPAtomMulAssign: return EHTokMulAssign; - case PPAtomDivAssign: return EHTokDivAssign; - case PPAtomModAssign: return EHTokModAssign; - - case PpAtomRight: return EHTokRightOp; - case PpAtomLeft: return EHTokLeftOp; - - case PpAtomRightAssign: return EHTokRightAssign; - case PpAtomLeftAssign: return EHTokLeftAssign; - case PpAtomAndAssign: return EHTokAndAssign; - case PpAtomOrAssign: return EHTokOrAssign; - case PpAtomXorAssign: return EHTokXorAssign; - - case PpAtomAnd: return EHTokAndOp; - case PpAtomOr: return EHTokOrOp; - case PpAtomXor: return EHTokXorOp; - - case PpAtomEQ: return EHTokEqOp; - case PpAtomGE: return EHTokGeOp; - case PpAtomNE: return EHTokNeOp; - case PpAtomLE: return EHTokLeOp; - - case PpAtomDecrement: return EHTokDecOp; - case PpAtomIncrement: return EHTokIncOp; - - case PpAtomColonColon: return EHTokColonColon; - - case PpAtomConstInt: parserToken->i = ppToken.ival; return EHTokIntConstant; - case PpAtomConstUint: parserToken->i = ppToken.ival; return EHTokUintConstant; - case PpAtomConstFloat: parserToken->d = ppToken.dval; return EHTokFloatConstant; - case PpAtomConstDouble: parserToken->d = ppToken.dval; return EHTokDoubleConstant; - case PpAtomIdentifier: - { - EHlslTokenClass token = tokenizeIdentifier(); - return token; - } - - case PpAtomConstString: { - parserToken->string = NewPoolTString(tokenText); - return EHTokStringConstant; - } - - case EndOfInput: return EHTokNone; - - default: - char buf[2]; - buf[0] = (char)token; - buf[1] = 0; - parseContext.error(loc, "unexpected token", buf, ""); - break; - } - } while (true); -} - -EHlslTokenClass HlslScanContext::tokenizeIdentifier() -{ - if (ReservedSet->find(tokenText) != ReservedSet->end()) - return reservedWord(); - - auto it = KeywordMap->find(tokenText); - if (it == KeywordMap->end()) { - // Should have an identifier of some sort - return identifierOrType(); - } - keyword = it->second; - - switch (keyword) { - - // qualifiers - case EHTokStatic: - case EHTokConst: - case EHTokSNorm: - case EHTokUnorm: - case EHTokExtern: - case EHTokUniform: - case EHTokVolatile: - case EHTokShared: - case EHTokGroupShared: - case EHTokLinear: - case EHTokCentroid: - case EHTokNointerpolation: - case EHTokNoperspective: - case EHTokSample: - case EHTokRowMajor: - case EHTokColumnMajor: - case EHTokPackOffset: - case EHTokIn: - case EHTokOut: - case EHTokInOut: - case EHTokPrecise: - case EHTokLayout: - case EHTokGloballyCoherent: - case EHTokInline: - return keyword; - - // primitive types - case EHTokPoint: - case EHTokLine: - case EHTokTriangle: - case EHTokLineAdj: - case EHTokTriangleAdj: - return keyword; - - // stream out types - case EHTokPointStream: - case EHTokLineStream: - case EHTokTriangleStream: - return keyword; - - // Tessellation patches - case EHTokInputPatch: - case EHTokOutputPatch: - return keyword; - - case EHTokBuffer: - case EHTokVector: - case EHTokMatrix: - return keyword; - - // scalar types - case EHTokVoid: - case EHTokString: - case EHTokBool: - case EHTokInt: - case EHTokUint: - case EHTokDword: - case EHTokHalf: - case EHTokFloat: - case EHTokDouble: - case EHTokMin16float: - case EHTokMin10float: - case EHTokMin16int: - case EHTokMin12int: - case EHTokMin16uint: - - // vector types - case EHTokBool1: - case EHTokBool2: - case EHTokBool3: - case EHTokBool4: - case EHTokFloat1: - case EHTokFloat2: - case EHTokFloat3: - case EHTokFloat4: - case EHTokInt1: - case EHTokInt2: - case EHTokInt3: - case EHTokInt4: - case EHTokDouble1: - case EHTokDouble2: - case EHTokDouble3: - case EHTokDouble4: - case EHTokUint1: - case EHTokUint2: - case EHTokUint3: - case EHTokUint4: - case EHTokHalf1: - case EHTokHalf2: - case EHTokHalf3: - case EHTokHalf4: - case EHTokMin16float1: - case EHTokMin16float2: - case EHTokMin16float3: - case EHTokMin16float4: - case EHTokMin10float1: - case EHTokMin10float2: - case EHTokMin10float3: - case EHTokMin10float4: - case EHTokMin16int1: - case EHTokMin16int2: - case EHTokMin16int3: - case EHTokMin16int4: - case EHTokMin12int1: - case EHTokMin12int2: - case EHTokMin12int3: - case EHTokMin12int4: - case EHTokMin16uint1: - case EHTokMin16uint2: - case EHTokMin16uint3: - case EHTokMin16uint4: - - // matrix types - case EHTokBool1x1: - case EHTokBool1x2: - case EHTokBool1x3: - case EHTokBool1x4: - case EHTokBool2x1: - case EHTokBool2x2: - case EHTokBool2x3: - case EHTokBool2x4: - case EHTokBool3x1: - case EHTokBool3x2: - case EHTokBool3x3: - case EHTokBool3x4: - case EHTokBool4x1: - case EHTokBool4x2: - case EHTokBool4x3: - case EHTokBool4x4: - case EHTokInt1x1: - case EHTokInt1x2: - case EHTokInt1x3: - case EHTokInt1x4: - case EHTokInt2x1: - case EHTokInt2x2: - case EHTokInt2x3: - case EHTokInt2x4: - case EHTokInt3x1: - case EHTokInt3x2: - case EHTokInt3x3: - case EHTokInt3x4: - case EHTokInt4x1: - case EHTokInt4x2: - case EHTokInt4x3: - case EHTokInt4x4: - case EHTokUint1x1: - case EHTokUint1x2: - case EHTokUint1x3: - case EHTokUint1x4: - case EHTokUint2x1: - case EHTokUint2x2: - case EHTokUint2x3: - case EHTokUint2x4: - case EHTokUint3x1: - case EHTokUint3x2: - case EHTokUint3x3: - case EHTokUint3x4: - case EHTokUint4x1: - case EHTokUint4x2: - case EHTokUint4x3: - case EHTokUint4x4: - case EHTokFloat1x1: - case EHTokFloat1x2: - case EHTokFloat1x3: - case EHTokFloat1x4: - case EHTokFloat2x1: - case EHTokFloat2x2: - case EHTokFloat2x3: - case EHTokFloat2x4: - case EHTokFloat3x1: - case EHTokFloat3x2: - case EHTokFloat3x3: - case EHTokFloat3x4: - case EHTokFloat4x1: - case EHTokFloat4x2: - case EHTokFloat4x3: - case EHTokFloat4x4: - case EHTokHalf1x1: - case EHTokHalf1x2: - case EHTokHalf1x3: - case EHTokHalf1x4: - case EHTokHalf2x1: - case EHTokHalf2x2: - case EHTokHalf2x3: - case EHTokHalf2x4: - case EHTokHalf3x1: - case EHTokHalf3x2: - case EHTokHalf3x3: - case EHTokHalf3x4: - case EHTokHalf4x1: - case EHTokHalf4x2: - case EHTokHalf4x3: - case EHTokHalf4x4: - case EHTokDouble1x1: - case EHTokDouble1x2: - case EHTokDouble1x3: - case EHTokDouble1x4: - case EHTokDouble2x1: - case EHTokDouble2x2: - case EHTokDouble2x3: - case EHTokDouble2x4: - case EHTokDouble3x1: - case EHTokDouble3x2: - case EHTokDouble3x3: - case EHTokDouble3x4: - case EHTokDouble4x1: - case EHTokDouble4x2: - case EHTokDouble4x3: - case EHTokDouble4x4: - return keyword; - - // texturing types - case EHTokSampler: - case EHTokSampler1d: - case EHTokSampler2d: - case EHTokSampler3d: - case EHTokSamplerCube: - case EHTokSamplerState: - case EHTokSamplerComparisonState: - case EHTokTexture: - case EHTokTexture1d: - case EHTokTexture1darray: - case EHTokTexture2d: - case EHTokTexture2darray: - case EHTokTexture3d: - case EHTokTextureCube: - case EHTokTextureCubearray: - case EHTokTexture2DMS: - case EHTokTexture2DMSarray: - case EHTokRWTexture1d: - case EHTokRWTexture1darray: - case EHTokRWTexture2d: - case EHTokRWTexture2darray: - case EHTokRWTexture3d: - case EHTokRWBuffer: - case EHTokAppendStructuredBuffer: - case EHTokByteAddressBuffer: - case EHTokConsumeStructuredBuffer: - case EHTokRWByteAddressBuffer: - case EHTokRWStructuredBuffer: - case EHTokStructuredBuffer: - return keyword; - - // variable, user type, ... - case EHTokClass: - case EHTokStruct: - case EHTokTypedef: - case EHTokCBuffer: - case EHTokConstantBuffer: - case EHTokTBuffer: - case EHTokThis: - case EHTokNamespace: - return keyword; - - case EHTokBoolConstant: - if (strcmp("true", tokenText) == 0) - parserToken->b = true; - else - parserToken->b = false; - return keyword; - - // control flow - case EHTokFor: - case EHTokDo: - case EHTokWhile: - case EHTokBreak: - case EHTokContinue: - case EHTokIf: - case EHTokElse: - case EHTokDiscard: - case EHTokReturn: - case EHTokCase: - case EHTokSwitch: - case EHTokDefault: - return keyword; - - default: - parseContext.infoSink.info.message(EPrefixInternalError, "Unknown glslang keyword", loc); - return EHTokNone; - } -} - -EHlslTokenClass HlslScanContext::identifierOrType() -{ - parserToken->string = NewPoolTString(tokenText); - - return EHTokIdentifier; -} - -// Give an error for use of a reserved symbol. -// However, allow built-in declarations to use reserved words, to allow -// extension support before the extension is enabled. -EHlslTokenClass HlslScanContext::reservedWord() -{ - if (! parseContext.symbolTable.atBuiltInLevel()) - parseContext.error(loc, "Reserved word.", tokenText, "", ""); - - return EHTokNone; -} - -} // end namespace glslang diff --git a/deps/glslang/hlsl/hlslScanContext.h b/deps/glslang/hlsl/hlslScanContext.h deleted file mode 100644 index 9d30a12e..00000000 --- a/deps/glslang/hlsl/hlslScanContext.h +++ /dev/null @@ -1,109 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -// -// This holds context specific to the HLSL scanner, which -// sits between the preprocessor scanner and HLSL parser. -// - -#ifndef HLSLSCANCONTEXT_H_ -#define HLSLSCANCONTEXT_H_ - -#include "../glslang/MachineIndependent/ParseHelper.h" -#include "hlslTokens.h" - -namespace glslang { - -class TPpContext; -class TPpToken; - - -// -// Everything needed to fully describe a token. -// -struct HlslToken { - HlslToken() : string(nullptr) { loc.init(); } - TSourceLoc loc; // location of token in the source - EHlslTokenClass tokenClass; // what kind of token it is - union { // what data the token holds - glslang::TString *string; // for identifiers - int i; // for literals - unsigned int u; - bool b; - double d; - }; -}; - -// -// The state of scanning and translating raw tokens to slightly richer -// semantics, like knowing if an identifier is an existing symbol, or -// user-defined type. -// -class HlslScanContext { -public: - HlslScanContext(TParseContextBase& parseContext, TPpContext& ppContext) - : parseContext(parseContext), ppContext(ppContext) { } - virtual ~HlslScanContext() { } - - static void fillInKeywordMap(); - static void deleteKeywordMap(); - - void tokenize(HlslToken&); - glslang::TBuiltInVariable mapSemantic(const char*); - -protected: - HlslScanContext(HlslScanContext&); - HlslScanContext& operator=(HlslScanContext&); - - EHlslTokenClass tokenizeClass(HlslToken&); - EHlslTokenClass tokenizeIdentifier(); - EHlslTokenClass identifierOrType(); - EHlslTokenClass reservedWord(); - EHlslTokenClass identifierOrReserved(bool reserved); - EHlslTokenClass nonreservedKeyword(int version); - - TParseContextBase& parseContext; - TPpContext& ppContext; - TSourceLoc loc; - TPpToken* ppToken; - HlslToken* parserToken; - - const char* tokenText; - EHlslTokenClass keyword; -}; - -} // end namespace glslang - -#endif // HLSLSCANCONTEXT_H_ diff --git a/deps/glslang/hlsl/hlslTokenStream.cpp b/deps/glslang/hlsl/hlslTokenStream.cpp deleted file mode 100644 index 5d9311cf..00000000 --- a/deps/glslang/hlsl/hlslTokenStream.cpp +++ /dev/null @@ -1,150 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#include "hlslTokenStream.h" - -namespace glslang { - -void HlslTokenStream::pushPreToken(const HlslToken& tok) -{ - assert(preTokenStackSize < tokenBufferSize); - preTokenStack[preTokenStackSize++] = tok; -} - -HlslToken HlslTokenStream::popPreToken() -{ - assert(preTokenStackSize > 0); - - return preTokenStack[--preTokenStackSize]; -} - -void HlslTokenStream::pushTokenBuffer(const HlslToken& tok) -{ - tokenBuffer[tokenBufferPos] = tok; - tokenBufferPos = (tokenBufferPos+1) % tokenBufferSize; -} - -HlslToken HlslTokenStream::popTokenBuffer() -{ - // Back up - tokenBufferPos = (tokenBufferPos+tokenBufferSize-1) % tokenBufferSize; - - return tokenBuffer[tokenBufferPos]; -} - -// -// Make a new source of tokens, not from the source, but from an -// already pre-processed token stream. -// -// This interrupts current token processing which must be restored -// later. Some simplifying assumptions are made (and asserted). -// -void HlslTokenStream::pushTokenStream(const TVector* tokens) -{ - // not yet setup to interrupt a stream that has been receded - // and not yet reconsumed - assert(preTokenStackSize == 0); - - // save current state - currentTokenStack.push_back(token); - - // set up new token stream - tokenStreamStack.push_back(tokens); - - // start position at first token: - token = (*tokens)[0]; - tokenPosition.push_back(0); -} - -// Undo pushTokenStream(), see above -void HlslTokenStream::popTokenStream() -{ - tokenStreamStack.pop_back(); - tokenPosition.pop_back(); - token = currentTokenStack.back(); - currentTokenStack.pop_back(); -} - -// Load 'token' with the next token in the stream of tokens. -void HlslTokenStream::advanceToken() -{ - pushTokenBuffer(token); - if (preTokenStackSize > 0) - token = popPreToken(); - else { - if (tokenStreamStack.size() == 0) - scanner.tokenize(token); - else { - ++tokenPosition.back(); - if (tokenPosition.back() >= (int)tokenStreamStack.back()->size()) - token.tokenClass = EHTokNone; - else - token = (*tokenStreamStack.back())[tokenPosition.back()]; - } - } -} - -void HlslTokenStream::recedeToken() -{ - pushPreToken(token); - token = popTokenBuffer(); -} - -// Return the current token class. -EHlslTokenClass HlslTokenStream::peek() const -{ - return token.tokenClass; -} - -// Return true, without advancing to the next token, if the current token is -// the expected (passed in) token class. -bool HlslTokenStream::peekTokenClass(EHlslTokenClass tokenClass) const -{ - return peek() == tokenClass; -} - -// Return true and advance to the next token if the current token is the -// expected (passed in) token class. -bool HlslTokenStream::acceptTokenClass(EHlslTokenClass tokenClass) -{ - if (peekTokenClass(tokenClass)) { - advanceToken(); - return true; - } - - return false; -} - -} // end namespace glslang diff --git a/deps/glslang/hlsl/hlslTokenStream.h b/deps/glslang/hlsl/hlslTokenStream.h deleted file mode 100644 index cb6c9e72..00000000 --- a/deps/glslang/hlsl/hlslTokenStream.h +++ /dev/null @@ -1,96 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef HLSLTOKENSTREAM_H_ -#define HLSLTOKENSTREAM_H_ - -#include "hlslScanContext.h" - -namespace glslang { - - class HlslTokenStream { - public: - explicit HlslTokenStream(HlslScanContext& scanner) - : scanner(scanner), preTokenStackSize(0), tokenBufferPos(0) { } - virtual ~HlslTokenStream() { } - - public: - void advanceToken(); - void recedeToken(); - bool acceptTokenClass(EHlslTokenClass); - EHlslTokenClass peek() const; - bool peekTokenClass(EHlslTokenClass) const; - glslang::TBuiltInVariable mapSemantic(const char* upperCase) { return scanner.mapSemantic(upperCase); } - - void pushTokenStream(const TVector* tokens); - void popTokenStream(); - - protected: - HlslToken token; // the token we are currently looking at, but have not yet accepted - - private: - HlslTokenStream(); - HlslTokenStream& operator=(const HlslTokenStream&); - - HlslScanContext& scanner; // lexical scanner, to get next token from source file - TVector*> tokenStreamStack; // for getting the next token from an existing vector of tokens - TVector tokenPosition; - TVector currentTokenStack; - - // This is the number of tokens we can recedeToken() over. - static const int tokenBufferSize = 2; - - // Previously scanned tokens, returned for future advances, - // so logically in front of the token stream. - // Is logically a stack; needs last in last out semantics. - // Currently implemented as a stack of size 2. - HlslToken preTokenStack[tokenBufferSize]; - int preTokenStackSize; - void pushPreToken(const HlslToken&); - HlslToken popPreToken(); - - // Previously scanned tokens, not yet returned for future advances, - // but available for that. - // Is logically a fifo for normal advances, and a stack for recession. - // Currently implemented with an intrinsic size of 2. - HlslToken tokenBuffer[tokenBufferSize]; - int tokenBufferPos; - void pushTokenBuffer(const HlslToken&); - HlslToken popTokenBuffer(); - }; - -} // end namespace glslang - -#endif // HLSLTOKENSTREAM_H_ diff --git a/deps/glslang/hlsl/hlslTokens.h b/deps/glslang/hlsl/hlslTokens.h deleted file mode 100644 index 15de63ab..00000000 --- a/deps/glslang/hlsl/hlslTokens.h +++ /dev/null @@ -1,369 +0,0 @@ -// -// Copyright (C) 2016 Google, Inc. -// Copyright (C) 2016 LunarG, Inc. -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// -// Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// -// Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// -// Neither the name of Google, Inc., nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -// POSSIBILITY OF SUCH DAMAGE. -// - -#ifndef EHLSLTOKENS_H_ -#define EHLSLTOKENS_H_ - -namespace glslang { - -enum EHlslTokenClass { - EHTokNone = 0, - - // qualifiers - EHTokStatic, - EHTokConst, - EHTokSNorm, - EHTokUnorm, - EHTokExtern, - EHTokUniform, - EHTokVolatile, - EHTokPrecise, - EHTokShared, - EHTokGroupShared, - EHTokLinear, - EHTokCentroid, - EHTokNointerpolation, - EHTokNoperspective, - EHTokSample, - EHTokRowMajor, - EHTokColumnMajor, - EHTokPackOffset, - EHTokIn, - EHTokOut, - EHTokInOut, - EHTokLayout, - EHTokGloballyCoherent, - EHTokInline, - - // primitive types - EHTokPoint, - EHTokLine, - EHTokTriangle, - EHTokLineAdj, - EHTokTriangleAdj, - - // stream out types - EHTokPointStream, - EHTokLineStream, - EHTokTriangleStream, - - // Tessellation patches - EHTokInputPatch, - EHTokOutputPatch, - - // template types - EHTokBuffer, - EHTokVector, - EHTokMatrix, - - // scalar types - EHTokVoid, - EHTokString, - EHTokBool, - EHTokInt, - EHTokUint, - EHTokDword, - EHTokHalf, - EHTokFloat, - EHTokDouble, - EHTokMin16float, - EHTokMin10float, - EHTokMin16int, - EHTokMin12int, - EHTokMin16uint, - - // vector types - EHTokBool1, - EHTokBool2, - EHTokBool3, - EHTokBool4, - EHTokFloat1, - EHTokFloat2, - EHTokFloat3, - EHTokFloat4, - EHTokInt1, - EHTokInt2, - EHTokInt3, - EHTokInt4, - EHTokDouble1, - EHTokDouble2, - EHTokDouble3, - EHTokDouble4, - EHTokUint1, - EHTokUint2, - EHTokUint3, - EHTokUint4, - EHTokHalf1, - EHTokHalf2, - EHTokHalf3, - EHTokHalf4, - EHTokMin16float1, - EHTokMin16float2, - EHTokMin16float3, - EHTokMin16float4, - EHTokMin10float1, - EHTokMin10float2, - EHTokMin10float3, - EHTokMin10float4, - EHTokMin16int1, - EHTokMin16int2, - EHTokMin16int3, - EHTokMin16int4, - EHTokMin12int1, - EHTokMin12int2, - EHTokMin12int3, - EHTokMin12int4, - EHTokMin16uint1, - EHTokMin16uint2, - EHTokMin16uint3, - EHTokMin16uint4, - - // matrix types - EHTokInt1x1, - EHTokInt1x2, - EHTokInt1x3, - EHTokInt1x4, - EHTokInt2x1, - EHTokInt2x2, - EHTokInt2x3, - EHTokInt2x4, - EHTokInt3x1, - EHTokInt3x2, - EHTokInt3x3, - EHTokInt3x4, - EHTokInt4x1, - EHTokInt4x2, - EHTokInt4x3, - EHTokInt4x4, - EHTokUint1x1, - EHTokUint1x2, - EHTokUint1x3, - EHTokUint1x4, - EHTokUint2x1, - EHTokUint2x2, - EHTokUint2x3, - EHTokUint2x4, - EHTokUint3x1, - EHTokUint3x2, - EHTokUint3x3, - EHTokUint3x4, - EHTokUint4x1, - EHTokUint4x2, - EHTokUint4x3, - EHTokUint4x4, - EHTokBool1x1, - EHTokBool1x2, - EHTokBool1x3, - EHTokBool1x4, - EHTokBool2x1, - EHTokBool2x2, - EHTokBool2x3, - EHTokBool2x4, - EHTokBool3x1, - EHTokBool3x2, - EHTokBool3x3, - EHTokBool3x4, - EHTokBool4x1, - EHTokBool4x2, - EHTokBool4x3, - EHTokBool4x4, - EHTokFloat1x1, - EHTokFloat1x2, - EHTokFloat1x3, - EHTokFloat1x4, - EHTokFloat2x1, - EHTokFloat2x2, - EHTokFloat2x3, - EHTokFloat2x4, - EHTokFloat3x1, - EHTokFloat3x2, - EHTokFloat3x3, - EHTokFloat3x4, - EHTokFloat4x1, - EHTokFloat4x2, - EHTokFloat4x3, - EHTokFloat4x4, - EHTokHalf1x1, - EHTokHalf1x2, - EHTokHalf1x3, - EHTokHalf1x4, - EHTokHalf2x1, - EHTokHalf2x2, - EHTokHalf2x3, - EHTokHalf2x4, - EHTokHalf3x1, - EHTokHalf3x2, - EHTokHalf3x3, - EHTokHalf3x4, - EHTokHalf4x1, - EHTokHalf4x2, - EHTokHalf4x3, - EHTokHalf4x4, - EHTokDouble1x1, - EHTokDouble1x2, - EHTokDouble1x3, - EHTokDouble1x4, - EHTokDouble2x1, - EHTokDouble2x2, - EHTokDouble2x3, - EHTokDouble2x4, - EHTokDouble3x1, - EHTokDouble3x2, - EHTokDouble3x3, - EHTokDouble3x4, - EHTokDouble4x1, - EHTokDouble4x2, - EHTokDouble4x3, - EHTokDouble4x4, - - // texturing types - EHTokSampler, - EHTokSampler1d, - EHTokSampler2d, - EHTokSampler3d, - EHTokSamplerCube, - EHTokSamplerState, - EHTokSamplerComparisonState, - EHTokTexture, - EHTokTexture1d, - EHTokTexture1darray, - EHTokTexture2d, - EHTokTexture2darray, - EHTokTexture3d, - EHTokTextureCube, - EHTokTextureCubearray, - EHTokTexture2DMS, - EHTokTexture2DMSarray, - EHTokRWTexture1d, - EHTokRWTexture1darray, - EHTokRWTexture2d, - EHTokRWTexture2darray, - EHTokRWTexture3d, - EHTokRWBuffer, - - // Structure buffer variants - EHTokAppendStructuredBuffer, - EHTokByteAddressBuffer, - EHTokConsumeStructuredBuffer, - EHTokRWByteAddressBuffer, - EHTokRWStructuredBuffer, - EHTokStructuredBuffer, - - // variable, user type, ... - EHTokIdentifier, - EHTokClass, - EHTokStruct, - EHTokCBuffer, - EHTokTBuffer, - EHTokTypedef, - EHTokThis, - EHTokNamespace, - EHTokConstantBuffer, - - // constant - EHTokFloatConstant, - EHTokDoubleConstant, - EHTokIntConstant, - EHTokUintConstant, - EHTokBoolConstant, - EHTokStringConstant, - - // control flow - EHTokFor, - EHTokDo, - EHTokWhile, - EHTokBreak, - EHTokContinue, - EHTokIf, - EHTokElse, - EHTokDiscard, - EHTokReturn, - EHTokSwitch, - EHTokCase, - EHTokDefault, - - // expressions - EHTokLeftOp, - EHTokRightOp, - EHTokIncOp, - EHTokDecOp, - EHTokLeOp, - EHTokGeOp, - EHTokEqOp, - EHTokNeOp, - EHTokAndOp, - EHTokOrOp, - EHTokXorOp, - EHTokAssign, - EHTokMulAssign, - EHTokDivAssign, - EHTokAddAssign, - EHTokModAssign, - EHTokLeftAssign, - EHTokRightAssign, - EHTokAndAssign, - EHTokXorAssign, - EHTokOrAssign, - EHTokSubAssign, - EHTokLeftParen, - EHTokRightParen, - EHTokLeftBracket, - EHTokRightBracket, - EHTokLeftBrace, - EHTokRightBrace, - EHTokDot, - EHTokComma, - EHTokColon, - EHTokColonColon, - EHTokSemicolon, - EHTokBang, - EHTokDash, - EHTokTilde, - EHTokPlus, - EHTokStar, - EHTokSlash, - EHTokPercent, - EHTokLeftAngle, - EHTokRightAngle, - EHTokVerticalBar, - EHTokCaret, - EHTokAmpersand, - EHTokQuestion, -}; - -} // end namespace glslang - -#endif // EHLSLTOKENS_H_ diff --git a/deps/glslang/index.php b/deps/glslang/index.php deleted file mode 100644 index 9a4bbcd9..00000000 --- a/deps/glslang/index.php +++ /dev/null @@ -1,81 +0,0 @@ - - - - GLSL Reference Parser - - - - -

- [About] - - - -

-

About GLSL Reference Parser

-

- The GLSL Reference Parser provides a front end for parsing and - operating on OpenGL Shading Language code. It was originally - developed by 3Dlabs and is being maintained and updated by LunarG. The README has some additional information. -

- -

- You can download the source from the - Khronos public-access Subversion server . For example, using the Subversion - command-line client: -

- -

- svn checkout --username anonymous --password anonymous - https://cvs.khronos.org/svn/repos/ogl/trunk/ecosystem/public/sdk/tools/glslang/ glslang -

- -
- -

Requirements

-

TBD. Builds on Windows and Linux.

- - -
- - -
-

- Get BuGLe at SourceForge.net. Fast, secure and Free Open Source software downloads - - - diff --git a/deps/glslang/make-revision b/deps/glslang/make-revision deleted file mode 100644 index bf6533f8..00000000 --- a/deps/glslang/make-revision +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh -( -echo "// This header is generated by the make-revision script." -echo "// For the version, it uses the latest git tag followed by the number of commits." -echo "// For the date, it uses the current date (when then script is run)." - -echo -echo \#define GLSLANG_REVISION \"`git describe --tags --abbrev=0`.`git log --oneline | wc -l`\" -echo \#define GLSLANG_DATE \"`date +%d-%b-%Y`\" -) > glslang/Include/revision.h diff --git a/examples/DynamicBuffers/CMakeLists.txt b/examples/DynamicBuffers/CMakeLists.txt index 5551a655..d60db3c8 100644 --- a/examples/DynamicBuffers/CMakeLists.txt +++ b/examples/DynamicBuffers/CMakeLists.txt @@ -16,44 +16,21 @@ if(${CMAKE_SYSTEM_NAME} MATCHES "Linux") endif() endif() -add_subdirectory (../.. "${CMAKE_CURRENT_BINARY_DIR}/anvil") +if (NOT ANVIL_LINK_EXAMPLES) + add_subdirectory (../.. "${CMAKE_CURRENT_BINARY_DIR}/anvil") +endif() +target_include_directories(Anvil PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/anvil/include") include_directories(${Anvil_SOURCE_DIR}/include - ${DynamicBuffers_SOURCE_DIR}/include) - -# Include the Vulkan header. -if (WIN32) - include_directories($ENV{VK_SDK_PATH}/Include - $ENV{VULKAN_SDK}/Include) - - if("${CMAKE_SIZEOF_VOID_P}" EQUAL "8") - link_directories ($ENV{VK_SDK_PATH}/Bin - $ENV{VK_SDK_PATH}/Lib - $ENV{VULKAN_SDK}/Bin - $ENV{VULKAN_SDK}/Lib) - else() - link_directories ($ENV{VK_SDK_PATH}/Bin32 - $ENV{VK_SDK_PATH}/Lib32 - $ENV{VULKAN_SDK}/Bin32 - $ENV{VULKAN_SDK}/Lib32) - endif() -else() - include_directories($ENV{VK_SDK_PATH}/x86_64/include - $ENV{VULKAN_SDK}/include - $ENV{VULKAN_SDK}/x86_64/include) - link_directories ($ENV{VK_SDK_PATH}/x86_64/lib - $ENV{VULKAN_SDK}/lib - $ENV{VULKAN_SDK}/x86_64/lib) -endif() + ${DynamicBuffers_SOURCE_DIR}/include) # Create the DynamicBuffers project. add_executable (DynamicBuffers include/app.h - include/app.h src/app.cpp) # Add linking dependencies for the example projects -add_dependencies(DynamicBuffers Anvil) +add_dependencies (DynamicBuffers Anvil) if (WIN32) target_link_libraries(DynamicBuffers Anvil) diff --git a/examples/DynamicBuffers/include/app.h b/examples/DynamicBuffers/include/app.h index fbd46c51..424a1e75 100644 --- a/examples/DynamicBuffers/include/app.h +++ b/examples/DynamicBuffers/include/app.h @@ -54,11 +54,9 @@ class App void init_window (); void init_vulkan (); - void draw_frame (); - VkBool32 on_validation_callback(VkDebugReportFlagsEXT message_flags, - VkDebugReportObjectTypeEXT object_type, - const char* layer_prefix, - const char* message); + void draw_frame (); + void on_validation_callback(Anvil::DebugMessageSeverityFlags in_severity, + const char* in_message_ptr); void get_buffer_memory_offsets(uint32_t n_sine_pair, uint32_t* out_opt_sine1SB_offset_ptr, @@ -66,45 +64,45 @@ class App uint32_t* out_opt_offset_data_offset_ptr = nullptr); /* Private variables */ - std::weak_ptr m_device_ptr; - std::shared_ptr m_instance_ptr; - std::weak_ptr m_physical_device_ptr; - std::shared_ptr m_present_queue_ptr; - std::shared_ptr m_rendering_surface_ptr; - std::shared_ptr m_swapchain_ptr; - Anvil::Time m_time; - std::shared_ptr m_window_ptr; - - std::shared_ptr m_consumer_dsg_ptr; - std::shared_ptr m_producer_dsg_ptr; - - std::shared_ptr m_consumer_fs_ptr; - std::shared_ptr m_consumer_vs_ptr; - std::shared_ptr m_producer_cs_ptr; - - Anvil::GraphicsPipelineID m_consumer_pipeline_id; - std::shared_ptr m_consumer_render_pass_ptr; - Anvil::ComputePipelineID m_producer_pipeline_id; - - std::shared_ptr m_command_buffers [N_SWAPCHAIN_IMAGES]; - std::shared_ptr m_depth_images [N_SWAPCHAIN_IMAGES]; - std::shared_ptr m_depth_image_views[N_SWAPCHAIN_IMAGES]; - std::shared_ptr m_fbos [N_SWAPCHAIN_IMAGES]; - - std::shared_ptr m_sine_color_buffer_ptr; /* N_SINE_PAIRS * 2 * vec2; data stored as R8G8_UNORM */ - VkDeviceSize m_sine_color_buffer_size; - std::shared_ptr m_sine_data_buffer_ptr; - std::vector m_sine_data_buffer_offsets; - VkDeviceSize m_sine_data_buffer_size; - std::shared_ptr m_sine_offset_data_buffer_ptr; - std::vector m_sine_offset_data_buffer_offsets; - VkDeviceSize m_sine_offset_data_buffer_size; - std::shared_ptr m_sine_props_data_buffer_ptr; - VkDeviceSize m_sine_props_data_buffer_size_per_swapchain_image; + Anvil::BaseDeviceUniquePtr m_device_ptr; + Anvil::InstanceUniquePtr m_instance_ptr; + const Anvil::PhysicalDevice* m_physical_device_ptr; + Anvil::Queue* m_present_queue_ptr; + Anvil::RenderingSurfaceUniquePtr m_rendering_surface_ptr; + Anvil::SwapchainUniquePtr m_swapchain_ptr; + Anvil::Time m_time; + Anvil::WindowUniquePtr m_window_ptr; + + Anvil::DescriptorSetGroupUniquePtr m_consumer_dsg_ptr; + Anvil::DescriptorSetGroupUniquePtr m_producer_dsg_ptr; + + std::unique_ptr m_consumer_fs_ptr; + std::unique_ptr m_consumer_vs_ptr; + std::unique_ptr m_producer_cs_ptr; + + Anvil::PipelineID m_consumer_pipeline_id; + Anvil::RenderPassUniquePtr m_consumer_render_pass_ptr; + Anvil::PipelineID m_producer_pipeline_id; + + Anvil::PrimaryCommandBufferUniquePtr m_command_buffers [N_SWAPCHAIN_IMAGES]; + Anvil::ImageUniquePtr m_depth_images [N_SWAPCHAIN_IMAGES]; + Anvil::ImageViewUniquePtr m_depth_image_views[N_SWAPCHAIN_IMAGES]; + Anvil::FramebufferUniquePtr m_fbos [N_SWAPCHAIN_IMAGES]; + + Anvil::BufferUniquePtr m_sine_color_buffer_ptr; /* N_SINE_PAIRS * 2 * vec2; data stored as R8G8_UNORM */ + VkDeviceSize m_sine_color_buffer_size; + Anvil::BufferUniquePtr m_sine_data_buffer_ptr; + std::vector m_sine_data_buffer_offsets; + VkDeviceSize m_sine_data_buffer_size; + Anvil::BufferUniquePtr m_sine_offset_data_buffer_ptr; + std::vector m_sine_offset_data_buffer_offsets; + VkDeviceSize m_sine_offset_data_buffer_size; + Anvil::BufferUniquePtr m_sine_props_data_buffer_ptr; + VkDeviceSize m_sine_props_data_buffer_size_per_swapchain_image; uint32_t m_n_last_semaphore_used; const uint32_t m_n_swapchain_images; - std::vector > m_frame_signal_semaphores; - std::vector > m_frame_wait_semaphores; + std::vector m_frame_signal_semaphores; + std::vector m_frame_wait_semaphores; }; diff --git a/examples/DynamicBuffers/src/app.cpp b/examples/DynamicBuffers/src/app.cpp index a586d1b0..0a862fe4 100644 --- a/examples/DynamicBuffers/src/app.cpp +++ b/examples/DynamicBuffers/src/app.cpp @@ -26,13 +26,25 @@ /* Uncomment the #define below to enable validation */ // #define ENABLE_VALIDATION + #include #include #include "config.h" +#include "misc/buffer_create_info.h" +#include "misc/compute_pipeline_create_info.h" +#include "misc/framebuffer_create_info.h" #include "misc/glsl_to_spirv.h" +#include "misc/graphics_pipeline_create_info.h" +#include "misc/image_create_info.h" +#include "misc/image_view_create_info.h" +#include "misc/instance_create_info.h" #include "misc/io.h" #include "misc/memory_allocator.h" #include "misc/object_tracker.h" +#include "misc/rendering_surface_create_info.h" +#include "misc/render_pass_create_info.h" +#include "misc/semaphore_create_info.h" +#include "misc/swapchain_create_info.h" #include "misc/time.h" #include "misc/window_factory.h" #include "wrappers/buffer.h" @@ -67,7 +79,6 @@ #endif #endif - /* Low-level #defines follow.. */ #define APP_NAME "Dynamic buffers example" @@ -179,8 +190,10 @@ static const char* g_glsl_producer_comp = App::App() - :m_n_last_semaphore_used(0), - m_n_swapchain_images (N_SWAPCHAIN_IMAGES) + :m_consumer_pipeline_id (UINT32_MAX), + m_n_last_semaphore_used(0), + m_n_swapchain_images (N_SWAPCHAIN_IMAGES), + m_producer_pipeline_id (UINT32_MAX) { // .. } @@ -192,7 +205,24 @@ App::~App() void App::deinit() { - vkDeviceWaitIdle(m_device_ptr.lock()->get_device_vk() ); + auto compute_pipeline_manager_ptr = m_device_ptr->get_compute_pipeline_manager (); + auto gfx_pipeline_manager_ptr = m_device_ptr->get_graphics_pipeline_manager(); + + Anvil::Vulkan::vkDeviceWaitIdle(m_device_ptr->get_device_vk() ); + + if (m_consumer_pipeline_id != UINT32_MAX) + { + gfx_pipeline_manager_ptr->delete_pipeline(m_consumer_pipeline_id); + + m_consumer_pipeline_id = UINT32_MAX; + } + + if (m_producer_pipeline_id != UINT32_MAX) + { + compute_pipeline_manager_ptr->delete_pipeline(m_producer_pipeline_id); + + m_producer_pipeline_id = UINT32_MAX; + } m_frame_signal_semaphores.clear(); m_frame_wait_semaphores.clear(); @@ -236,39 +266,41 @@ void App::deinit() m_sine_offset_data_buffer_ptr.reset(); m_sine_props_data_buffer_ptr.reset(); - m_present_queue_ptr.reset(); m_rendering_surface_ptr.reset(); m_swapchain_ptr.reset(); m_window_ptr.reset(); - m_device_ptr.lock()->destroy(); m_device_ptr.reset(); - m_instance_ptr->destroy(); m_instance_ptr.reset(); } void App::draw_frame() { - std::shared_ptr curr_frame_signal_semaphore_ptr; - std::shared_ptr curr_frame_wait_semaphore_ptr; - std::shared_ptr device_locked_ptr = m_device_ptr.lock(); - static uint32_t n_frames_rendered = 0; - uint32_t n_swapchain_image; - std::shared_ptr present_wait_semaphore_ptr; - const VkPipelineStageFlags wait_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; + Anvil::Semaphore* curr_frame_signal_semaphore_ptr = nullptr; + Anvil::Semaphore* curr_frame_wait_semaphore_ptr = nullptr; + static uint32_t n_frames_rendered = 0; + uint32_t n_swapchain_image; + Anvil::Semaphore* present_wait_semaphore_ptr = nullptr; + const Anvil::PipelineStageFlags wait_stage_mask = Anvil::PipelineStageFlagBits::ALL_COMMANDS_BIT; /* Determine the signal + wait semaphores to use for drawing this frame */ m_n_last_semaphore_used = (m_n_last_semaphore_used + 1) % m_n_swapchain_images; - curr_frame_signal_semaphore_ptr = m_frame_signal_semaphores[m_n_last_semaphore_used]; - curr_frame_wait_semaphore_ptr = m_frame_wait_semaphores [m_n_last_semaphore_used]; + curr_frame_signal_semaphore_ptr = m_frame_signal_semaphores[m_n_last_semaphore_used].get(); + curr_frame_wait_semaphore_ptr = m_frame_wait_semaphores [m_n_last_semaphore_used].get(); present_wait_semaphore_ptr = curr_frame_signal_semaphore_ptr; /* Determine the semaphore which the swapchain image */ - n_swapchain_image = m_swapchain_ptr->acquire_image(curr_frame_wait_semaphore_ptr, - true); /* in_should_block */ + { + const auto acquire_result = m_swapchain_ptr->acquire_image(curr_frame_wait_semaphore_ptr, + &n_swapchain_image, + true /* in_should_block */); + + ANVIL_REDUNDANT_VARIABLE_CONST(acquire_result); + anvil_assert (acquire_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } /* Update time value, used by the generator compute shader */ const uint64_t time_msec = m_time.get_time_in_msec(); @@ -279,19 +311,27 @@ void App::draw_frame() &t); /* Submit jobs to relevant queues and make sure they are correctly synchronized */ - device_locked_ptr->get_universal_queue(0)->submit_command_buffer_with_signal_wait_semaphores(m_command_buffers[n_swapchain_image], - 1, /* n_semaphores_to_signal */ - &curr_frame_signal_semaphore_ptr, - 1, /* n_semaphores_to_wait_on */ - &curr_frame_wait_semaphore_ptr, - &wait_stage_mask, - false, /* should_block */ - nullptr); - - m_present_queue_ptr->present(m_swapchain_ptr, - n_swapchain_image, - 1, /* n_wait_semaphores */ - &present_wait_semaphore_ptr); + m_device_ptr->get_universal_queue(0)->submit( + Anvil::SubmitInfo::create_wait_execute_signal(m_command_buffers[n_swapchain_image].get(), + 1, /* n_semaphores_to_signal */ + &curr_frame_signal_semaphore_ptr, + 1, /* n_semaphores_to_wait_on */ + &curr_frame_wait_semaphore_ptr, + &wait_stage_mask, + false) /* should_block */ + ); + + { + Anvil::SwapchainOperationErrorCode present_result = Anvil::SwapchainOperationErrorCode::DEVICE_LOST; + + m_present_queue_ptr->present(m_swapchain_ptr.get(), + n_swapchain_image, + 1, /* n_wait_semaphores */ + &present_wait_semaphore_ptr, + &present_result); + + anvil_assert(present_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } ++n_frames_rendered; @@ -340,7 +380,7 @@ void App::get_buffer_memory_offsets(uint32_t n_sine_pair, if (out_opt_offset_data_offset_ptr != nullptr) { - const uint32_t sb_offset_alignment = static_cast(m_physical_device_ptr.lock()->get_device_properties().limits.minStorageBufferOffsetAlignment); + const uint32_t sb_offset_alignment = static_cast(m_physical_device_ptr->get_device_properties().core_vk1_0_properties_ptr->limits.min_storage_buffer_offset_alignment); *out_opt_offset_data_offset_ptr = Anvil::Utils::round_up(static_cast(sizeof(float) * 2), sb_offset_alignment) * n_sine_pair; @@ -349,13 +389,12 @@ void App::get_buffer_memory_offsets(uint32_t n_sine_pair, void App::init_buffers() { - std::shared_ptr memory_allocator_ptr; - std::shared_ptr physical_device_locked_ptr(m_physical_device_ptr); - const VkDeviceSize sb_data_alignment_requirement = physical_device_locked_ptr->get_device_properties().limits.minStorageBufferOffsetAlignment; - std::unique_ptr sine_offset_data_raw_buffer_ptr; + Anvil::MemoryAllocatorUniquePtr memory_allocator_ptr; + const VkDeviceSize sb_data_alignment_requirement = m_physical_device_ptr->get_device_properties().core_vk1_0_properties_ptr->limits.min_storage_buffer_offset_alignment; + std::unique_ptr sine_offset_data_raw_buffer_ptr; /* Set up allocators */ - memory_allocator_ptr = Anvil::MemoryAllocator::create_oneshot(m_device_ptr); + memory_allocator_ptr = Anvil::MemoryAllocator::create_oneshot(m_device_ptr.get() ); /* Prepare sine offset data */ m_sine_offset_data_buffer_size = 0; @@ -395,16 +434,21 @@ void App::init_buffers() /* Prepare a buffer object to hold the sine offset data. Note that we fill it with data * after memory allocator actually assigns it a memory block. */ - m_sine_offset_data_buffer_ptr = Anvil::Buffer::create_nonsparse(m_device_ptr, - m_sine_offset_data_buffer_size, - Anvil::QUEUE_FAMILY_COMPUTE_BIT | Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_CONCURRENT, - VK_BUFFER_USAGE_STORAGE_BUFFER_BIT); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_no_alloc(m_device_ptr.get(), + m_sine_offset_data_buffer_size, + Anvil::QueueFamilyFlagBits::COMPUTE_BIT | Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::CONCURRENT, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::STORAGE_BUFFER_BIT); + + m_sine_offset_data_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } m_sine_offset_data_buffer_ptr->set_name("Sine offset data buffer"); - memory_allocator_ptr->add_buffer(m_sine_offset_data_buffer_ptr, - 0); /* in_required_memory_features */ + memory_allocator_ptr->add_buffer(m_sine_offset_data_buffer_ptr.get(), + Anvil::MemoryFeatureFlagBits::NONE); /* in_required_memory_features */ /* Now prepare a memory block which is going to hold vertex data generated by * the producer CS */ @@ -433,39 +477,49 @@ void App::init_buffers() m_sine_data_buffer_size *= 2; - m_sine_data_buffer_ptr = Anvil::Buffer::create_nonsparse(m_device_ptr, - m_sine_data_buffer_size, - Anvil::QUEUE_FAMILY_COMPUTE_BIT | Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_CONCURRENT, - VK_BUFFER_USAGE_STORAGE_BUFFER_BIT); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_no_alloc(m_device_ptr.get(), + m_sine_data_buffer_size, + Anvil::QueueFamilyFlagBits::COMPUTE_BIT | Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::CONCURRENT, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::STORAGE_BUFFER_BIT); + + m_sine_data_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } m_sine_data_buffer_ptr->set_name("Sine data buffer"); - memory_allocator_ptr->add_buffer(m_sine_data_buffer_ptr, - 0); /* in_required_memory_features */ + memory_allocator_ptr->add_buffer(m_sine_data_buffer_ptr.get(), + Anvil::MemoryFeatureFlagBits::NONE); /* We also need some space for a uniform block which is going to hold time info. */ - const auto dynamic_ub_alignment_requirement = m_device_ptr.lock()->get_physical_device_properties().limits.minUniformBufferOffsetAlignment; - const auto sine_props_data_buffer_size_per_swapchain_image = Anvil::Utils::round_up(sizeof(float), + const auto dynamic_ub_alignment_requirement = m_device_ptr->get_physical_device_properties().core_vk1_0_properties_ptr->limits.min_uniform_buffer_offset_alignment; + const auto sine_props_data_buffer_size_per_swapchain_image = Anvil::Utils::round_up(static_cast(sizeof(float)), dynamic_ub_alignment_requirement); const auto sine_props_data_buffer_size_total = sine_props_data_buffer_size_per_swapchain_image * N_SWAPCHAIN_IMAGES; m_sine_props_data_buffer_size_per_swapchain_image = sine_props_data_buffer_size_per_swapchain_image; - m_sine_props_data_buffer_ptr = Anvil::Buffer::create_nonsparse(m_device_ptr, - sine_props_data_buffer_size_total, - Anvil::QUEUE_FAMILY_COMPUTE_BIT | Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_CONCURRENT, - VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_no_alloc(m_device_ptr.get(), + sine_props_data_buffer_size_total, + Anvil::QueueFamilyFlagBits::COMPUTE_BIT | Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::CONCURRENT, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::UNIFORM_BUFFER_BIT); + + m_sine_props_data_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } m_sine_props_data_buffer_ptr->set_name("Sine properties data buffer"); - memory_allocator_ptr->add_buffer(m_sine_props_data_buffer_ptr, - Anvil::MEMORY_FEATURE_FLAG_MAPPABLE); + memory_allocator_ptr->add_buffer(m_sine_props_data_buffer_ptr.get(), + Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT); /* Each sine needs to be assigned a different color. Compute the data and upload it to another buffer object. */ - std::unique_ptr color_buffer_data_ptr; - unsigned char* color_buffer_data_traveller_ptr = nullptr; + std::unique_ptr color_buffer_data_ptr; + unsigned char* color_buffer_data_traveller_ptr = nullptr; m_sine_color_buffer_size = N_SINE_PAIRS * 2 /* sines per pair */ * (2 * sizeof(char) ) /* R8G8 */; @@ -486,73 +540,76 @@ void App::init_buffers() color_buffer_data_traveller_ptr ++; } - m_sine_color_buffer_ptr = Anvil::Buffer::create_nonsparse(m_device_ptr, - m_sine_color_buffer_size, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_VERTEX_BUFFER_BIT); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_no_alloc(m_device_ptr.get(), + m_sine_color_buffer_size, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::VERTEX_BUFFER_BIT); + + m_sine_color_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } m_sine_color_buffer_ptr->set_name("Sine color data buffer"); - memory_allocator_ptr->add_buffer(m_sine_color_buffer_ptr, - 0); /* in_required_memory_features */ + memory_allocator_ptr->add_buffer(m_sine_color_buffer_ptr.get(), + Anvil::MemoryFeatureFlagBits::NONE); /* Assign memory blocks to buffers and fill them with data */ m_sine_offset_data_buffer_ptr->write(0, /* start_offset */ - m_sine_offset_data_buffer_ptr->get_size(), + m_sine_offset_data_buffer_ptr->get_create_info_ptr()->get_size(), sine_offset_data_raw_buffer_ptr.get() ); m_sine_color_buffer_ptr->write(0, /* start_offset */ - m_sine_color_buffer_ptr->get_size(), + m_sine_color_buffer_ptr->get_create_info_ptr()->get_size(), color_buffer_data_ptr.get() ); } void App::init_command_buffers() { - std::shared_ptr device_locked_ptr (m_device_ptr); - std::shared_ptr gfx_pipeline_manager_ptr (device_locked_ptr->get_graphics_pipeline_manager() ); - const bool is_debug_marker_ext_present (device_locked_ptr->is_ext_debug_marker_extension_enabled() ); - std::shared_ptr producer_pipeline_layout_ptr; - VkImageSubresourceRange subresource_range; - std::shared_ptr universal_queue_ptr (device_locked_ptr->get_universal_queue(0) ); + auto gfx_pipeline_manager_ptr (m_device_ptr->get_graphics_pipeline_manager() ); + const bool is_debug_marker_ext_present (m_device_ptr->get_extension_info()->ext_debug_marker() ); + Anvil::PipelineLayout* producer_pipeline_layout_ptr(nullptr); + Anvil::ImageSubresourceRange subresource_range; + Anvil::Queue* universal_queue_ptr (m_device_ptr->get_universal_queue(0) ); - producer_pipeline_layout_ptr = device_locked_ptr->get_compute_pipeline_manager()->get_compute_pipeline_layout(m_producer_pipeline_id); + producer_pipeline_layout_ptr = m_device_ptr->get_compute_pipeline_manager()->get_pipeline_layout(m_producer_pipeline_id); - subresource_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - subresource_range.baseArrayLayer = 0; - subresource_range.baseMipLevel = 0; - subresource_range.layerCount = 1; - subresource_range.levelCount = 1; + subresource_range.aspect_mask = Anvil::ImageAspectFlagBits::COLOR_BIT; + subresource_range.base_array_layer = 0; + subresource_range.base_mip_level = 0; + subresource_range.layer_count = 1; + subresource_range.level_count = 1; /* Set up rendering command buffers. We need one per swap-chain image. */ for (unsigned int n_current_swapchain_image = 0; n_current_swapchain_image < N_SWAPCHAIN_IMAGES; ++n_current_swapchain_image) { - std::shared_ptr draw_cmd_buffer_ptr; + Anvil::PrimaryCommandBufferUniquePtr draw_cmd_buffer_ptr; + + draw_cmd_buffer_ptr = m_device_ptr->get_command_pool_for_queue_family_index(m_device_ptr->get_universal_queue(0)->get_queue_family_index() )->alloc_primary_level_command_buffer(); - draw_cmd_buffer_ptr = device_locked_ptr->get_command_pool(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL)->alloc_primary_level_command_buffer(); - /* Start recording commands */ draw_cmd_buffer_ptr->start_recording(false, /* one_time_submit */ true /* simultaneous_use_allowed */); /* Switch the swap-chain image layout to renderable */ { - Anvil::ImageBarrier image_barrier(0, /* source_access_mask */ - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, /* destination_access_mask */ - false, - VK_IMAGE_LAYOUT_UNDEFINED, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + Anvil::ImageBarrier image_barrier(Anvil::AccessFlags(), /* source_access_mask */ + Anvil::AccessFlagBits::COLOR_ATTACHMENT_WRITE_BIT, /* destination_access_mask */ + Anvil::ImageLayout::UNDEFINED, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, universal_queue_ptr->get_queue_family_index(), universal_queue_ptr->get_queue_family_index(), m_swapchain_ptr->get_image(n_current_swapchain_image), subresource_range); - draw_cmd_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, - VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, - VK_FALSE, /* in_by_region */ + draw_cmd_buffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::TOP_OF_PIPE_BIT, + Anvil::PipelineStageFlagBits::COLOR_ATTACHMENT_OUTPUT_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barrier_ptrs */ 0, /* in_buffer_memory_barrier_count */ @@ -566,17 +623,17 @@ void App::init_command_buffers() * We do not need to worry about offset buffer contents getting overwritten by subsequent frames * because we do not render frames ahead of time in this example. */ - Anvil::BufferBarrier t_value_buffer_barrier = Anvil::BufferBarrier(VK_ACCESS_HOST_WRITE_BIT, /* in_source_access_mask */ - VK_ACCESS_UNIFORM_READ_BIT, /* in_destination_access_mask */ + Anvil::BufferBarrier t_value_buffer_barrier = Anvil::BufferBarrier(Anvil::AccessFlagBits::HOST_WRITE_BIT, /* in_source_access_mask */ + Anvil::AccessFlagBits::UNIFORM_READ_BIT, /* in_destination_access_mask */ VK_QUEUE_FAMILY_IGNORED, VK_QUEUE_FAMILY_IGNORED, - m_sine_props_data_buffer_ptr, + m_sine_props_data_buffer_ptr.get(), n_current_swapchain_image * m_sine_props_data_buffer_size_per_swapchain_image, /* in_start_offset */ sizeof(float) ); /* in_size */ - draw_cmd_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_HOST_BIT, - VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, - VK_FALSE, + draw_cmd_buffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::HOST_BIT, + Anvil::PipelineStageFlagBits::COMPUTE_SHADER_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barriers_ptr */ 1, /* in_buffer_memory_barrier_count */ @@ -585,7 +642,7 @@ void App::init_command_buffers() nullptr); /* in_image_memory_barriers_ptr */ /* Let's generate some sine offset data using our compute shader */ - draw_cmd_buffer_ptr->record_bind_pipeline(VK_PIPELINE_BIND_POINT_COMPUTE, + draw_cmd_buffer_ptr->record_bind_pipeline(Anvil::PipelineBindPoint::COMPUTE, m_producer_pipeline_id); if (is_debug_marker_ext_present) @@ -605,9 +662,9 @@ void App::init_command_buffers() n_sine_pair < N_SINE_PAIRS; ++n_sine_pair) { - uint32_t dynamic_offsets[3]; - const uint32_t n_dynamic_offsets = sizeof(dynamic_offsets) / sizeof(dynamic_offsets[0]); - std::shared_ptr producer_dses[] = + uint32_t dynamic_offsets[3]; + const uint32_t n_dynamic_offsets = sizeof(dynamic_offsets) / sizeof(dynamic_offsets[0]); + Anvil::DescriptorSet* producer_dses[] = { m_producer_dsg_ptr->get_descriptor_set(0), m_producer_dsg_ptr->get_descriptor_set(1) @@ -622,7 +679,7 @@ void App::init_command_buffers() dynamic_offsets[2] = static_cast(m_sine_props_data_buffer_size_per_swapchain_image * n_current_swapchain_image); - draw_cmd_buffer_ptr->record_bind_descriptor_sets(VK_PIPELINE_BIND_POINT_COMPUTE, + draw_cmd_buffer_ptr->record_bind_descriptor_sets(Anvil::PipelineBindPoint::COMPUTE, producer_pipeline_layout_ptr, 0, /* firstSet */ n_producer_dses, @@ -631,7 +688,7 @@ void App::init_command_buffers() dynamic_offsets); draw_cmd_buffer_ptr->record_push_constants(producer_pipeline_layout_ptr, - VK_SHADER_STAGE_COMPUTE_BIT, + Anvil::ShaderStageFlagBits::COMPUTE_BIT, 0, /* in_offset */ 4, /* in_size */ &n_sine_pair); @@ -648,17 +705,17 @@ void App::init_command_buffers() /* Before we proceed with drawing, we need to flush the buffer data. This step is needed in order to ensure * that the data we have generated in CS is actually visible to the draw call. */ - Anvil::BufferBarrier vertex_buffer_barrier(VK_ACCESS_SHADER_WRITE_BIT, /* in_source_access_mask */ - VK_ACCESS_SHADER_READ_BIT, /* in_destination_access_mask */ + Anvil::BufferBarrier vertex_buffer_barrier(Anvil::AccessFlagBits::SHADER_WRITE_BIT, /* in_source_access_mask */ + Anvil::AccessFlagBits::SHADER_READ_BIT, /* in_destination_access_mask */ VK_QUEUE_FAMILY_IGNORED, VK_QUEUE_FAMILY_IGNORED, - m_sine_data_buffer_ptr, + m_sine_data_buffer_ptr.get(), 0, /* in_offset */ m_sine_data_buffer_size); - draw_cmd_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, - VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, - VK_FALSE, /* in_by_region */ + draw_cmd_buffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::COMPUTE_SHADER_BIT, + Anvil::PipelineStageFlagBits::VERTEX_SHADER_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barriers_ptr */ 1, /* in_buffer_memory_barrier_count */ @@ -686,29 +743,30 @@ void App::init_command_buffers() */ draw_cmd_buffer_ptr->record_begin_render_pass(2, /* n_clear_values */ clear_values, - m_fbos[n_current_swapchain_image], + m_fbos[n_current_swapchain_image].get(), render_area, - m_consumer_render_pass_ptr, - VK_SUBPASS_CONTENTS_INLINE); + m_consumer_render_pass_ptr.get(), + Anvil::SubpassContents::INLINE); { - const float max_line_width = m_device_ptr.lock()->get_physical_device_properties().limits.lineWidthRange[1]; - std::shared_ptr renderer_dses[] = + const float max_line_width = m_device_ptr->get_physical_device_properties().core_vk1_0_properties_ptr->limits.line_width_range[1]; + Anvil::DescriptorSet* renderer_dses[] = { m_consumer_dsg_ptr->get_descriptor_set(0), m_consumer_dsg_ptr->get_descriptor_set(1) }; - const uint32_t n_renderer_dses = sizeof(renderer_dses) / sizeof(renderer_dses[0]); + const uint32_t n_renderer_dses = sizeof(renderer_dses) / sizeof(renderer_dses[0]); + auto sine_color_buffer_raw_ptr = m_sine_color_buffer_ptr.get(); - std::shared_ptr renderer_pipeline_layout_ptr; - static const VkDeviceSize sine_color_buffer_start_offset = 0; + Anvil::PipelineLayout* renderer_pipeline_layout_ptr = nullptr; + static const VkDeviceSize sine_color_buffer_start_offset = 0; - renderer_pipeline_layout_ptr = gfx_pipeline_manager_ptr->get_graphics_pipeline_layout(m_consumer_pipeline_id); + renderer_pipeline_layout_ptr = gfx_pipeline_manager_ptr->get_pipeline_layout(m_consumer_pipeline_id); - draw_cmd_buffer_ptr->record_bind_pipeline (VK_PIPELINE_BIND_POINT_GRAPHICS, + draw_cmd_buffer_ptr->record_bind_pipeline (Anvil::PipelineBindPoint::GRAPHICS, m_consumer_pipeline_id); draw_cmd_buffer_ptr->record_bind_vertex_buffers (0, /* startBinding */ 1, /* bindingCount */ - &m_sine_color_buffer_ptr, + &sine_color_buffer_raw_ptr, &sine_color_buffer_start_offset); for (unsigned int n_sine_pair = 0; @@ -743,7 +801,7 @@ void App::init_command_buffers() draw_cmd_buffer_ptr->record_set_line_width(new_line_width); - draw_cmd_buffer_ptr->record_bind_descriptor_sets(VK_PIPELINE_BIND_POINT_GRAPHICS, + draw_cmd_buffer_ptr->record_bind_descriptor_sets(Anvil::PipelineBindPoint::GRAPHICS, renderer_pipeline_layout_ptr, 0, /* firstSet */ n_renderer_dses, @@ -762,31 +820,26 @@ void App::init_command_buffers() /* Close the recording process */ draw_cmd_buffer_ptr->stop_recording(); - m_command_buffers[n_current_swapchain_image] = draw_cmd_buffer_ptr; + m_command_buffers[n_current_swapchain_image] = std::move(draw_cmd_buffer_ptr); } } void App::init_compute_pipelines() { - std::shared_ptr device_locked_ptr (m_device_ptr); - std::shared_ptr compute_manager_ptr(device_locked_ptr->get_compute_pipeline_manager() ); - bool result; + auto compute_manager_ptr(m_device_ptr->get_compute_pipeline_manager() ); + bool result; /* Create & configure the compute pipeline */ - result = compute_manager_ptr->add_regular_pipeline(false, /* disable_optimizations */ - false, /* allow_derivatives */ - *m_producer_cs_ptr, - &m_producer_pipeline_id); - anvil_assert(result); + auto producer_pipeline_info_ptr = Anvil::ComputePipelineCreateInfo::create(Anvil::PipelineCreateFlagBits::NONE, + *m_producer_cs_ptr); - result = compute_manager_ptr->attach_push_constant_range_to_pipeline(m_producer_pipeline_id, - 0, /* offset */ - 4, /* size */ - VK_SHADER_STAGE_COMPUTE_BIT); - anvil_assert(result); + producer_pipeline_info_ptr->attach_push_constant_range (0, /* offset */ + 4, /* size */ + Anvil::ShaderStageFlagBits::COMPUTE_BIT); + producer_pipeline_info_ptr->set_descriptor_set_create_info(m_producer_dsg_ptr->get_descriptor_set_create_info() ); - result = compute_manager_ptr->set_pipeline_dsg(m_producer_pipeline_id, - m_producer_dsg_ptr); + result = compute_manager_ptr->add_pipeline(std::move(producer_pipeline_info_ptr), + &m_producer_pipeline_id); anvil_assert(result); result = compute_manager_ptr->bake(); @@ -795,74 +848,77 @@ void App::init_compute_pipelines() void App::init_dsgs() { + auto consumer_dsg_create_info_ptrs = std::vector(2); + auto producer_dsg_create_info_ptrs = std::vector(2); + + consumer_dsg_create_info_ptrs[0] = Anvil::DescriptorSetCreateInfo::create(); + consumer_dsg_create_info_ptrs[1] = Anvil::DescriptorSetCreateInfo::create(); + producer_dsg_create_info_ptrs[0] = Anvil::DescriptorSetCreateInfo::create(); + producer_dsg_create_info_ptrs[1] = Anvil::DescriptorSetCreateInfo::create(); + + consumer_dsg_create_info_ptrs[0]->add_binding(0, /* binding */ + Anvil::DescriptorType::STORAGE_BUFFER_DYNAMIC, + 1, /* n_elements */ + Anvil::ShaderStageFlagBits::VERTEX_BIT); + consumer_dsg_create_info_ptrs[1]->add_binding(0, /* binding */ + Anvil::DescriptorType::STORAGE_BUFFER_DYNAMIC, + 1, /* n_elements */ + Anvil::ShaderStageFlagBits::VERTEX_BIT); + + producer_dsg_create_info_ptrs[0]->add_binding(0, /* binding */ + Anvil::DescriptorType::STORAGE_BUFFER_DYNAMIC, + 1, /* n_elements */ + Anvil::ShaderStageFlagBits::COMPUTE_BIT); + producer_dsg_create_info_ptrs[0]->add_binding(1, /* binding */ + Anvil::DescriptorType::STORAGE_BUFFER_DYNAMIC, + 1, /* n_elements */ + Anvil::ShaderStageFlagBits::COMPUTE_BIT); + producer_dsg_create_info_ptrs[1]->add_binding(0, /* binding */ + Anvil::DescriptorType::UNIFORM_BUFFER_DYNAMIC, + 1, /* n_elements */ + Anvil::ShaderStageFlagBits::COMPUTE_BIT); + /* Create the descriptor set layouts for the generator program. */ - m_producer_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr, - false, /* releaseable_sets */ - 2 /* n_sets */); - - m_producer_dsg_ptr->add_binding(0, /* n_set */ - 0, /* binding */ - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, - 1, /* n_elements */ - VK_SHADER_STAGE_COMPUTE_BIT); - m_producer_dsg_ptr->add_binding(0, /* n_set */ - 1, /* binding */ - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, - 1, /* n_elements */ - VK_SHADER_STAGE_COMPUTE_BIT); - m_producer_dsg_ptr->add_binding(1, /* n_set */ - 0, /* binding */ - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, - 1, /* n_elements */ - VK_SHADER_STAGE_COMPUTE_BIT); + m_producer_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr.get(), + producer_dsg_create_info_ptrs); + m_producer_dsg_ptr->set_binding_item(0, /* n_set */ 0, /* binding_index */ - Anvil::DescriptorSet::DynamicStorageBufferBindingElement(m_sine_offset_data_buffer_ptr, + Anvil::DescriptorSet::DynamicStorageBufferBindingElement(m_sine_offset_data_buffer_ptr.get(), 0, /* in_start_offset */ sizeof(float) * 2) ); m_producer_dsg_ptr->set_binding_item(0, /* n_set */ 1, /* binding_index */ - Anvil::DescriptorSet::DynamicStorageBufferBindingElement(m_sine_data_buffer_ptr, + Anvil::DescriptorSet::DynamicStorageBufferBindingElement(m_sine_data_buffer_ptr.get(), 0, /* in_start_offset */ sizeof(float) * 4 * N_VERTICES_PER_SINE * 2) ); m_producer_dsg_ptr->set_binding_item(1, /* n_set */ 0, /* binding_index */ - Anvil::DescriptorSet::DynamicUniformBufferBindingElement(m_sine_props_data_buffer_ptr, + Anvil::DescriptorSet::DynamicUniformBufferBindingElement(m_sine_props_data_buffer_ptr.get(), 0, /* in_start_offset */ m_sine_props_data_buffer_size_per_swapchain_image) ); /* Set up the descriptor set layout for the renderer program. */ - m_consumer_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr, - false, /* releaseable_sets */ - 2 /* n_sets */); - - - m_consumer_dsg_ptr->add_binding(0, /* n_set */ - 0, /* binding */ - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, - 1, /* n_elements */ - VK_SHADER_STAGE_VERTEX_BIT); - m_consumer_dsg_ptr->add_binding(1, /* n_set */ - 0, /* binding */ - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, - 1, /* n_elements */ - VK_SHADER_STAGE_VERTEX_BIT); + m_consumer_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr.get(), + consumer_dsg_create_info_ptrs); + m_consumer_dsg_ptr->set_binding_item(0, /* n_set */ 0, /* binding_index */ - Anvil::DescriptorSet::DynamicStorageBufferBindingElement(m_sine_data_buffer_ptr, + Anvil::DescriptorSet::DynamicStorageBufferBindingElement(m_sine_data_buffer_ptr.get(), 0, /* in_start_offset */ sizeof(float) * 4 * N_VERTICES_PER_SINE) ); m_consumer_dsg_ptr->set_binding_item(1, /* n_set */ 0, /* binding_index */ - Anvil::DescriptorSet::DynamicStorageBufferBindingElement(m_sine_data_buffer_ptr, + Anvil::DescriptorSet::DynamicStorageBufferBindingElement(m_sine_data_buffer_ptr.get(), 0, /* in_start_offset */ sizeof(float) * 4 * N_VERTICES_PER_SINE) ); } void App::init_events() { + /* Stub */ } void App::init_framebuffers() @@ -873,117 +929,120 @@ void App::init_framebuffers() n_swapchain_image < N_SWAPCHAIN_IMAGES; ++n_swapchain_image) { - std::shared_ptr result_fb_ptr; + Anvil::FramebufferUniquePtr result_fb_ptr; - result_fb_ptr = Anvil::Framebuffer::create(m_device_ptr, - WINDOW_WIDTH, - WINDOW_HEIGHT, - 1); /* n_layers */ + { + auto create_info_ptr = Anvil::FramebufferCreateInfo::create(m_device_ptr.get(), + WINDOW_WIDTH, + WINDOW_HEIGHT, + 1); /* n_layers */ - result_fb_ptr->set_name_formatted("Framebuffer for swapchain image [%d]", - n_swapchain_image); + result = create_info_ptr->add_attachment(m_swapchain_ptr->get_image_view(n_swapchain_image), + nullptr); /* out_opt_attachment_id_ptr */ + anvil_assert(result); - result = result_fb_ptr->add_attachment(m_swapchain_ptr->get_image_view(n_swapchain_image), - nullptr); /* out_opt_attachment_id_ptr */ - anvil_assert(result); + result = create_info_ptr->add_attachment(m_depth_image_views[n_swapchain_image].get(), + nullptr); /* out_opt_attachment_id_ptr */ - result = result_fb_ptr->add_attachment(m_depth_image_views[n_swapchain_image], - nullptr); /* out_opt_attachment_id_ptr */ + result_fb_ptr = Anvil::Framebuffer::create(std::move(create_info_ptr) ); + } - m_fbos[n_swapchain_image] = result_fb_ptr; + result_fb_ptr->set_name_formatted("Framebuffer for swapchain image [%d]", + n_swapchain_image); + + m_fbos[n_swapchain_image] = std::move(result_fb_ptr); } } void App::init_gfx_pipelines() { - std::shared_ptr device_locked_ptr(m_device_ptr); - std::shared_ptr gfx_manager_ptr (device_locked_ptr->get_graphics_pipeline_manager() ); - bool result; + auto gfx_manager_ptr = m_device_ptr->get_graphics_pipeline_manager(); + auto renderpass_create_info_ptr = Anvil::RenderPassCreateInfoUniquePtr(new Anvil::RenderPassCreateInfo(m_device_ptr.get() ) ); /* Create a renderpass instance */ #ifdef ENABLE_OFFSCREEN_RENDERING - const VkImageLayout final_layout = VK_IMAGE_LAYOUT_GENERAL; + const auto final_layout = Anvil::ImageLayout::GENERAL; #else - const VkImageLayout final_layout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; + const auto final_layout = Anvil::ImageLayout::PRESENT_SRC_KHR; #endif Anvil::RenderPassAttachmentID render_pass_color_attachment_id = -1; Anvil::RenderPassAttachmentID render_pass_depth_attachment_id = -1; Anvil::SubPassID render_pass_subpass_id = -1; - m_consumer_render_pass_ptr = Anvil::RenderPass::create(m_device_ptr, - m_swapchain_ptr); + renderpass_create_info_ptr->add_color_attachment(m_swapchain_ptr->get_create_info_ptr()->get_format(), + Anvil::SampleCountFlagBits::_1_BIT, + Anvil::AttachmentLoadOp::CLEAR, + Anvil::AttachmentStoreOp::STORE, + Anvil::ImageLayout::UNDEFINED, + final_layout, + false, /* may_alias */ + &render_pass_color_attachment_id); + + renderpass_create_info_ptr->add_depth_stencil_attachment(m_depth_images[0]->get_create_info_ptr()->get_format(), + m_depth_images[0]->get_create_info_ptr()->get_sample_count(), + Anvil::AttachmentLoadOp::CLEAR, /* depth_load_op */ + Anvil::AttachmentStoreOp::DONT_CARE, /* depth_store_op */ + Anvil::AttachmentLoadOp::DONT_CARE, /* stencil_load_op */ + Anvil::AttachmentStoreOp::DONT_CARE, /* stencil_store_op */ + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, /* initial_layout */ + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, /* final_layout */ + false, /* may_alias */ + &render_pass_depth_attachment_id); + + renderpass_create_info_ptr->add_subpass(&render_pass_subpass_id); + + renderpass_create_info_ptr->add_subpass_color_attachment (render_pass_subpass_id, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + render_pass_color_attachment_id, + 0, /* location */ + nullptr); /* opt_attachment_resolve_id_ptr */ + renderpass_create_info_ptr->add_subpass_depth_stencil_attachment(render_pass_subpass_id, + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + render_pass_depth_attachment_id); + + m_consumer_render_pass_ptr = Anvil::RenderPass::create(std::move(renderpass_create_info_ptr), + m_swapchain_ptr.get() ); m_consumer_render_pass_ptr->set_name("Consumer renderpass"); - result = m_consumer_render_pass_ptr->add_color_attachment(m_swapchain_ptr->get_image_format(), - VK_SAMPLE_COUNT_1_BIT, - VK_ATTACHMENT_LOAD_OP_CLEAR, - VK_ATTACHMENT_STORE_OP_STORE, - VK_IMAGE_LAYOUT_UNDEFINED, - final_layout, - false, /* may_alias */ - &render_pass_color_attachment_id); - anvil_assert(result); - - result = m_consumer_render_pass_ptr->add_depth_stencil_attachment(m_depth_images[0]->get_image_format(), - m_depth_images[0]->get_image_sample_count(), - VK_ATTACHMENT_LOAD_OP_CLEAR, /* depth_load_op */ - VK_ATTACHMENT_STORE_OP_DONT_CARE, /* depth_store_op */ - VK_ATTACHMENT_LOAD_OP_DONT_CARE, /* stencil_load_op */ - VK_ATTACHMENT_STORE_OP_DONT_CARE, /* stencil_store_op */ - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, /* initial_layout */ - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, /* final_layout */ - false, /* may_alias */ - &render_pass_depth_attachment_id); - anvil_assert(result); - - result = m_consumer_render_pass_ptr->add_subpass(*m_consumer_fs_ptr, - Anvil::ShaderModuleStageEntryPoint(), /* geometry_shader */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_control_shader */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_evaluation_shader */ - *m_consumer_vs_ptr, - &render_pass_subpass_id); - anvil_assert(result); - - result = m_consumer_render_pass_ptr->add_subpass_color_attachment (render_pass_subpass_id, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - render_pass_color_attachment_id, - 0, /* location */ - nullptr); /* opt_attachment_resolve_id_ptr */ - result &= m_consumer_render_pass_ptr->add_subpass_depth_stencil_attachment(render_pass_subpass_id, - render_pass_depth_attachment_id, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); - anvil_assert(result); /* Set up the graphics pipeline for the main subpass */ - result = m_consumer_render_pass_ptr->get_subpass_graphics_pipeline_id(render_pass_subpass_id, - &m_consumer_pipeline_id); - anvil_assert(result); + auto consumer_pipeline_info_ptr = Anvil::GraphicsPipelineCreateInfo::create(Anvil::PipelineCreateFlagBits::NONE, + m_consumer_render_pass_ptr.get(), + render_pass_subpass_id, + *m_consumer_fs_ptr, + Anvil::ShaderModuleStageEntryPoint(), /* geometry_shader */ + Anvil::ShaderModuleStageEntryPoint(), /* tess_control_shader */ + Anvil::ShaderModuleStageEntryPoint(), /* tess_evaluation_shader */ + *m_consumer_vs_ptr); + + { + Anvil::VertexInputAttribute attribute(0, /* in_location */ + Anvil::Format::R8G8_UNORM, + 0); /* in_offset_in_bytes */ + + consumer_pipeline_info_ptr->add_vertex_binding(0, /* in_binding */ + Anvil::VertexInputRate::INSTANCE, + sizeof(char) * 2, /* in_stride_in_bytes */ + 1, /* in_attribute_ptrs */ + &attribute); + } - gfx_manager_ptr->add_vertex_attribute (m_consumer_pipeline_id, - 0, /* location */ - VK_FORMAT_R8G8_UNORM, - 0, /* offset_in_bytes */ - sizeof(char) * 2, /* stride_in_bytes */ - VK_VERTEX_INPUT_RATE_INSTANCE); - gfx_manager_ptr->set_pipeline_dsg (m_consumer_pipeline_id, - m_consumer_dsg_ptr); - gfx_manager_ptr->set_input_assembly_properties(m_consumer_pipeline_id, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP); - gfx_manager_ptr->set_rasterization_properties (m_consumer_pipeline_id, - VK_POLYGON_MODE_FILL, - VK_CULL_MODE_NONE, - VK_FRONT_FACE_COUNTER_CLOCKWISE, - 1.0f /* line_width */); - gfx_manager_ptr->toggle_depth_test (m_consumer_pipeline_id, - true, /* should_enable */ - VK_COMPARE_OP_LESS_OR_EQUAL); - gfx_manager_ptr->toggle_depth_writes (m_consumer_pipeline_id, - true); /* should_enable */ - gfx_manager_ptr->toggle_dynamic_states (m_consumer_pipeline_id, - true, /* should_enable */ - Anvil::GraphicsPipelineManager::DYNAMIC_STATE_LINE_WIDTH_BIT); + consumer_pipeline_info_ptr->set_descriptor_set_create_info(m_consumer_dsg_ptr->get_descriptor_set_create_info() ); + consumer_pipeline_info_ptr->set_primitive_topology (Anvil::PrimitiveTopology::LINE_STRIP); + consumer_pipeline_info_ptr->set_rasterization_properties (Anvil::PolygonMode::FILL, + Anvil::CullModeFlagBits::NONE, + Anvil::FrontFace::COUNTER_CLOCKWISE, + 1.0f /* line_width */); + consumer_pipeline_info_ptr->toggle_depth_test (true, /* should_enable */ + Anvil::CompareOp::LESS_OR_EQUAL); + consumer_pipeline_info_ptr->toggle_depth_writes (true); /* should_enable */ + consumer_pipeline_info_ptr->toggle_dynamic_state (true, /* should_enable */ + Anvil::DynamicState::LINE_WIDTH); + + gfx_manager_ptr->add_pipeline(std::move(consumer_pipeline_info_ptr), + &m_consumer_pipeline_id); } void App::init_images() @@ -992,35 +1051,43 @@ void App::init_images() n_depth_image < N_SWAPCHAIN_IMAGES; ++n_depth_image) { - m_depth_images[n_depth_image] = Anvil::Image::create_nonsparse(m_device_ptr, - VK_IMAGE_TYPE_2D, - VK_FORMAT_D16_UNORM, - VK_IMAGE_TILING_OPTIMAL, - VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, - WINDOW_WIDTH, - WINDOW_HEIGHT, - 1, /* in_base_mipmap_depth */ - 1, /* in_n_layers */ - VK_SAMPLE_COUNT_1_BIT, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - false, /* in_use_full_mipmap_chain */ - 0, /* in_memory_features */ - 0, /* in_create_flags */ - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, - nullptr); /* in_mipmaps_ptr */ - - m_depth_image_views[n_depth_image] = Anvil::ImageView::create_2D(m_device_ptr, - m_depth_images[n_depth_image], + { + auto create_info_ptr = Anvil::ImageCreateInfo::create_alloc(m_device_ptr.get(), + Anvil::ImageType::_2D, + Anvil::Format::D16_UNORM, + Anvil::ImageTiling::OPTIMAL, + Anvil::ImageUsageFlagBits::DEPTH_STENCIL_ATTACHMENT_BIT, + WINDOW_WIDTH, + WINDOW_HEIGHT, + 1, /* in_base_mipmap_depth */ + 1, /* in_n_layers */ + Anvil::SampleCountFlagBits::_1_BIT, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + false, /* in_use_full_mipmap_chain */ + Anvil::MemoryFeatureFlagBits::NONE, + Anvil::ImageCreateFlagBits::NONE, + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + nullptr); /* in_mipmaps_ptr */ + + m_depth_images[n_depth_image] = Anvil::Image::create(std::move(create_info_ptr) ); + } + + { + auto create_info_ptr = Anvil::ImageViewCreateInfo::create_2D(m_device_ptr.get(), + m_depth_images[n_depth_image].get(), 0, /* n_base_layer */ 0, /* n_base_mipmap_level */ 1, /* n_mipmaps */ - VK_IMAGE_ASPECT_DEPTH_BIT, - m_depth_images[n_depth_image]->get_image_format(), - VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_IDENTITY); + Anvil::ImageAspectFlagBits::DEPTH_BIT, + m_depth_images[n_depth_image]->get_create_info_ptr()->get_format(), + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY); + + m_depth_image_views[n_depth_image] = Anvil::ImageView::create(std::move(create_info_ptr) ); + } m_depth_images [n_depth_image]->set_name_formatted("Depth image [%d]", n_depth_image); @@ -1035,40 +1102,53 @@ void App::init_semaphores() n_semaphore < m_n_swapchain_images; ++n_semaphore) { - std::shared_ptr new_signal_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); - std::shared_ptr new_wait_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); + Anvil::SemaphoreUniquePtr new_signal_semaphore_ptr; + Anvil::SemaphoreUniquePtr new_wait_semaphore_ptr; + + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); + + new_signal_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + + new_signal_semaphore_ptr->set_name_formatted("Signal semaphore [%d]", + n_semaphore); + } + + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); - new_signal_semaphore_ptr->set_name_formatted("Signal semaphore [%d]", - n_semaphore); - new_wait_semaphore_ptr->set_name_formatted ("Wait semaphore [%d]", - n_semaphore); + new_wait_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + + new_wait_semaphore_ptr->set_name_formatted("Wait semaphore [%d]", + n_semaphore); + } - m_frame_signal_semaphores.push_back(new_signal_semaphore_ptr); - m_frame_wait_semaphores.push_back (new_wait_semaphore_ptr); + m_frame_signal_semaphores.push_back(std::move(new_signal_semaphore_ptr) ); + m_frame_wait_semaphores.push_back (std::move(new_wait_semaphore_ptr) ); } } void App::init_shaders() { - std::shared_ptr cs_module_ptr; - std::shared_ptr cs_ptr; - std::shared_ptr fs_module_ptr; - std::shared_ptr fs_ptr; - std::shared_ptr vs_module_ptr; - std::shared_ptr vs_ptr; - - cs_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::ShaderModuleUniquePtr cs_module_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr cs_ptr; + Anvil::ShaderModuleUniquePtr fs_module_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr fs_ptr; + Anvil::ShaderModuleUniquePtr vs_module_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr vs_ptr; + + cs_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_producer_comp, - Anvil::SHADER_STAGE_COMPUTE); - fs_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::ShaderStage::COMPUTE); + fs_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_consumer_frag, - Anvil::SHADER_STAGE_FRAGMENT); - vs_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::ShaderStage::FRAGMENT); + vs_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_consumer_vert, - Anvil::SHADER_STAGE_VERTEX); + Anvil::ShaderStage::VERTEX); /* Set up GLSLShader instances */ cs_ptr->add_definition_value_pair("N_SINE_PAIRS", @@ -1080,12 +1160,12 @@ void App::init_shaders() N_VERTICES_PER_SINE); /* Initialize the shader modules */ - cs_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - cs_ptr); - fs_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - fs_ptr); - vs_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - vs_ptr); + cs_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get(), + cs_ptr.get () ); + fs_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get(), + fs_ptr.get () ); + vs_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get(), + vs_ptr.get () ); cs_module_ptr->set_name("Compute shader module"); fs_module_ptr->set_name("Fragment shader module"); @@ -1094,52 +1174,55 @@ void App::init_shaders() /* Prepare entrypoint descriptors. */ m_producer_cs_ptr.reset( new Anvil::ShaderModuleStageEntryPoint("main", - cs_module_ptr, - Anvil::SHADER_STAGE_COMPUTE) + std::move(cs_module_ptr), + Anvil::ShaderStage::COMPUTE) ); m_consumer_fs_ptr.reset( new Anvil::ShaderModuleStageEntryPoint("main", - fs_module_ptr, - Anvil::SHADER_STAGE_FRAGMENT) + std::move(fs_module_ptr), + Anvil::ShaderStage::FRAGMENT) ); m_consumer_vs_ptr.reset( new Anvil::ShaderModuleStageEntryPoint("main", - vs_module_ptr, - Anvil::SHADER_STAGE_VERTEX) + std::move(vs_module_ptr), + Anvil::ShaderStage::VERTEX) ); } void App::init_swapchain() { - std::shared_ptr device_locked_ptr(m_device_ptr); + { + auto create_info_ptr = Anvil::RenderingSurfaceCreateInfo::create(m_instance_ptr.get(), + m_device_ptr.get (), + m_window_ptr.get () ); - m_rendering_surface_ptr = Anvil::RenderingSurface::create(m_instance_ptr, - m_device_ptr, - m_window_ptr); + m_rendering_surface_ptr = Anvil::RenderingSurface::create(std::move(create_info_ptr) ); + } m_rendering_surface_ptr->set_name("Main rendering surface"); - m_swapchain_ptr = device_locked_ptr->create_swapchain(m_rendering_surface_ptr, - m_window_ptr, - VK_FORMAT_B8G8R8A8_UNORM, - VK_PRESENT_MODE_FIFO_KHR, - VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, - m_n_swapchain_images); + m_swapchain_ptr = reinterpret_cast(m_device_ptr.get())->create_swapchain(m_rendering_surface_ptr.get(), + m_window_ptr.get (), + Anvil::Format::B8G8R8A8_UNORM, + Anvil::ColorSpaceKHR::SRGB_NONLINEAR_KHR, + Anvil::PresentModeKHR::FIFO_KHR, + Anvil::ImageUsageFlagBits::COLOR_ATTACHMENT_BIT, + m_n_swapchain_images); m_swapchain_ptr->set_name("Main swapchain"); /* Cache the queue we are going to use for presentation */ const std::vector* present_queue_fams_ptr = nullptr; - if (!m_rendering_surface_ptr->get_queue_families_with_present_support(device_locked_ptr->get_physical_device(), + if (!m_rendering_surface_ptr->get_queue_families_with_present_support(reinterpret_cast(m_device_ptr.get() )->get_physical_device(), &present_queue_fams_ptr) ) { anvil_assert_fail(); } - m_present_queue_ptr = device_locked_ptr->get_queue(present_queue_fams_ptr->at(0), - 0); /* in_n_queue */ + m_present_queue_ptr = m_device_ptr->get_queue_for_queue_family_index(present_queue_fams_ptr->at(0), + 0); /* in_n_queue */ } void App::init_window() @@ -1161,48 +1244,53 @@ void App::init_window() 720, true, /* in_closable */ std::bind(&App::draw_frame, - this) ); + this) + ); } void App::init_vulkan() { /* Create a Vulkan instance */ - m_instance_ptr = Anvil::Instance::create(APP_NAME, /* app_name */ - APP_NAME, /* engine_name */ + { + auto create_info_ptr = Anvil::InstanceCreateInfo::create(APP_NAME, /* app_name */ + APP_NAME, /* engine_name */ #ifdef ENABLE_VALIDATION - std::bind(&App::on_validation_callback, - this, - std::placeholders::_1, - std::placeholders::_2, - std::placeholders::_3, - std::placeholders::_4) ); + std::bind(&App::on_validation_callback, + this, + std::placeholders::_1, + std::placeholders::_2), #else - nullptr); + Anvil::DebugCallbackFunction(), #endif + false); /* in_mt_safe */ + + m_instance_ptr = Anvil::Instance::create(std::move(create_info_ptr) ); + } m_physical_device_ptr = m_instance_ptr->get_physical_device(0); /* Create a Vulkan device */ - m_device_ptr = Anvil::SGPUDevice::create(m_physical_device_ptr, - Anvil::DeviceExtensionConfiguration(), - std::vector(), /* layers */ - false, /* transient_command_buffer_allocs_only */ - false); /* support_resettable_command_buffers */ + { + auto create_info_ptr = Anvil::DeviceCreateInfo::create_sgpu(m_physical_device_ptr, + true, /* in_enable_shader_module_cache */ + Anvil::DeviceExtensionConfiguration(), + std::vector(), /* in_layers */ + Anvil::CommandPoolCreateFlagBits::NONE, + false); /* in_mt_safe */ + + m_device_ptr = Anvil::SGPUDevice::create(std::move(create_info_ptr) ); + } } -VkBool32 App::on_validation_callback(VkDebugReportFlagsEXT message_flags, - VkDebugReportObjectTypeEXT object_type, - const char* layer_prefix, - const char* message) +void App::on_validation_callback(Anvil::DebugMessageSeverityFlags in_severity, + const char* in_message_ptr) { - if ((message_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0) + if ((in_severity & Anvil::DebugMessageSeverityFlagBits::ERROR_BIT) != 0) { fprintf(stderr, "[!] %s\n", - message); + in_message_ptr); } - - return false; } void App::run() @@ -1211,9 +1299,9 @@ void App::run() } -int main() +int main(int argc, char *argv[]) { - std::shared_ptr app_ptr(new App() ); + std::unique_ptr app_ptr(new App() ); app_ptr->init(); app_ptr->run(); diff --git a/examples/MultiViewport/CMakeLists.txt b/examples/MultiViewport/CMakeLists.txt index a9f0f620..0d0e26e1 100644 --- a/examples/MultiViewport/CMakeLists.txt +++ b/examples/MultiViewport/CMakeLists.txt @@ -16,43 +16,21 @@ if(${CMAKE_SYSTEM_NAME} MATCHES "Linux") endif() endif() -add_subdirectory (../.. "${CMAKE_CURRENT_BINARY_DIR}/anvil") +if (NOT ANVIL_LINK_EXAMPLES) + add_subdirectory (../.. "${CMAKE_CURRENT_BINARY_DIR}/anvil") +endif() +target_include_directories(Anvil PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/anvil/include") include_directories(${Anvil_SOURCE_DIR}/include ${MultiViewport_SOURCE_DIR}/include) -if (WIN32) - include_directories($ENV{VK_SDK_PATH}/Include - $ENV{VULKAN_SDK}/Include) - - if("${CMAKE_SIZEOF_VOID_P}" EQUAL "8") - link_directories ($ENV{VK_SDK_PATH}/Bin - $ENV{VK_SDK_PATH}/Lib - $ENV{VULKAN_SDK}/Bin - $ENV{VULKAN_SDK}/Lib) - else() - link_directories ($ENV{VK_SDK_PATH}/Bin32 - $ENV{VK_SDK_PATH}/Lib32 - $ENV{VULKAN_SDK}/Bin32 - $ENV{VULKAN_SDK}/Lib32) - endif() -else() - include_directories($ENV{VK_SDK_PATH}/x86_64/include - $ENV{VULKAN_SDK}/include - $ENV{VULKAN_SDK}/x86_64/include) - link_directories ($ENV{VK_SDK_PATH}/x86_64/lib - $ENV{VULKAN_SDK}/lib - $ENV{VULKAN_SDK}/x86_64/lib) -endif() - # Create the MultiViewport project. add_executable (MultiViewport include/app.h - include/app.h - src/app.cpp) + src/app.cpp) # Add linking dependencies for the example projects -add_dependencies (MultiViewport Anvil) +add_dependencies(MultiViewport Anvil) if (WIN32) target_link_libraries(MultiViewport Anvil) diff --git a/examples/MultiViewport/include/app.h b/examples/MultiViewport/include/app.h index 510105ed..fa15ed7c 100644 --- a/examples/MultiViewport/include/app.h +++ b/examples/MultiViewport/include/app.h @@ -50,48 +50,46 @@ class App void init_window (); void init_vulkan (); - VkFormat get_mesh_color_data_format () const; - uint32_t get_mesh_color_data_n_components() const; - uint32_t get_mesh_color_data_start_offset (uint32_t n_stream, - uint32_t n_vertex = 0) const; - std::shared_ptr get_mesh_data () const; - uint32_t get_mesh_data_size () const; - uint32_t get_mesh_n_vertices () const; - VkFormat get_mesh_vertex_data_format () const; - uint32_t get_mesh_vertex_data_n_components() const; - uint32_t get_mesh_vertex_data_start_offset(uint32_t n_vertex = 0) const; + Anvil::Format get_mesh_color_data_format () const; + uint32_t get_mesh_color_data_n_components() const; + uint32_t get_mesh_color_data_start_offset (uint32_t n_stream, + uint32_t n_vertex = 0) const; + std::unique_ptr get_mesh_data () const; + uint32_t get_mesh_data_size () const; + uint32_t get_mesh_n_vertices () const; + Anvil::Format get_mesh_vertex_data_format () const; + uint32_t get_mesh_vertex_data_n_components() const; + uint32_t get_mesh_vertex_data_start_offset(uint32_t n_vertex = 0) const; void get_scissor_viewport_info(VkRect2D* out_scissors, VkViewport* out_viewports); - void draw_frame (); - VkBool32 on_validation_callback(VkDebugReportFlagsEXT message_flags, - VkDebugReportObjectTypeEXT object_type, - const char* layer_prefix, - const char* message); + void draw_frame (); + void on_validation_callback(Anvil::DebugMessageSeverityFlags in_severity, + const char* in_message_ptr); /* Private variables */ - std::weak_ptr m_device_ptr; - std::shared_ptr m_instance_ptr; - std::weak_ptr m_physical_device_ptr; - std::shared_ptr m_present_queue_ptr; - std::shared_ptr m_rendering_surface_ptr; - std::shared_ptr m_swapchain_ptr; - std::shared_ptr m_window_ptr; + Anvil::BaseDeviceUniquePtr m_device_ptr; + Anvil::InstanceUniquePtr m_instance_ptr; + const Anvil::PhysicalDevice* m_physical_device_ptr; + Anvil::Queue* m_present_queue_ptr; + Anvil::RenderingSurfaceUniquePtr m_rendering_surface_ptr; + Anvil::SwapchainUniquePtr m_swapchain_ptr; + Anvil::WindowUniquePtr m_window_ptr; - std::shared_ptr m_command_buffers[N_SWAPCHAIN_IMAGES]; - std::shared_ptr m_fbos [N_SWAPCHAIN_IMAGES]; - std::shared_ptr m_fs_ptr; - std::shared_ptr m_gs_ptr; - Anvil::GraphicsPipelineID m_pipeline_id; - std::shared_ptr m_renderpass_ptr; - std::shared_ptr m_vs_ptr; - - std::shared_ptr m_mesh_data_buffer_ptr; + Anvil::PrimaryCommandBufferUniquePtr m_command_buffers[N_SWAPCHAIN_IMAGES]; + Anvil::FramebufferUniquePtr m_fbos [N_SWAPCHAIN_IMAGES]; + std::unique_ptr m_fs_ptr; + std::unique_ptr m_gs_ptr; + Anvil::PipelineID m_pipeline_id; + Anvil::RenderPassUniquePtr m_renderpass_ptr; + std::unique_ptr m_vs_ptr; + + Anvil::BufferUniquePtr m_mesh_data_buffer_ptr; uint32_t m_n_last_semaphore_used; const uint32_t m_n_swapchain_images; - std::vector > m_frame_signal_semaphores; - std::vector > m_frame_wait_semaphores; + std::vector m_frame_signal_semaphores; + std::vector m_frame_wait_semaphores; }; diff --git a/examples/MultiViewport/src/app.cpp b/examples/MultiViewport/src/app.cpp index e3c038cb..6c89c745 100644 --- a/examples/MultiViewport/src/app.cpp +++ b/examples/MultiViewport/src/app.cpp @@ -29,8 +29,16 @@ #include #include #include "config.h" +#include "misc/buffer_create_info.h" +#include "misc/framebuffer_create_info.h" #include "misc/glsl_to_spirv.h" +#include "misc/graphics_pipeline_create_info.h" +#include "misc/instance_create_info.h" #include "misc/object_tracker.h" +#include "misc/render_pass_create_info.h" +#include "misc/rendering_surface_create_info.h" +#include "misc/semaphore_create_info.h" +#include "misc/swapchain_create_info.h" #include "misc/time.h" #include "misc/window_factory.h" #include "wrappers/buffer.h" @@ -171,7 +179,9 @@ static const char* g_glsl_render_vert = App::App() :m_n_last_semaphore_used(0), - m_n_swapchain_images (N_SWAPCHAIN_IMAGES) + m_n_swapchain_images (N_SWAPCHAIN_IMAGES), + m_physical_device_ptr (nullptr), + m_present_queue_ptr (nullptr) { } @@ -182,12 +192,11 @@ App::~App() void App::deinit() { - vkDeviceWaitIdle(m_device_ptr.lock()->get_device_vk() ); + Anvil::Vulkan::vkDeviceWaitIdle(m_device_ptr->get_device_vk() ); m_frame_signal_semaphores.clear(); m_frame_wait_semaphores.clear(); - m_present_queue_ptr.reset(); m_rendering_surface_ptr.reset(); m_swapchain_ptr.reset(); @@ -211,10 +220,7 @@ void App::deinit() m_renderpass_ptr.reset(); m_vs_ptr.reset(); - m_device_ptr.lock()->destroy(); m_device_ptr.reset(); - - m_instance_ptr->destroy(); m_instance_ptr.reset(); m_window_ptr.reset(); @@ -222,37 +228,51 @@ void App::deinit() void App::draw_frame() { - std::shared_ptr curr_frame_signal_semaphore_ptr; - std::shared_ptr curr_frame_wait_semaphore_ptr; - std::shared_ptr device_locked_ptr = m_device_ptr.lock(); - static uint32_t n_frames_rendered = 0; - uint32_t n_swapchain_image; - std::shared_ptr present_queue_ptr = device_locked_ptr->get_universal_queue(0); - const VkPipelineStageFlags wait_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; + Anvil::Semaphore* curr_frame_signal_semaphore_ptr = nullptr; + Anvil::Semaphore* curr_frame_wait_semaphore_ptr = nullptr; + static uint32_t n_frames_rendered = 0; + uint32_t n_swapchain_image; + auto present_queue_ptr = m_device_ptr->get_universal_queue(0); + const Anvil::PipelineStageFlags wait_stage_mask = Anvil::PipelineStageFlagBits::ALL_COMMANDS_BIT; /* Determine the signal + wait semaphores to use for drawing this frame */ m_n_last_semaphore_used = (m_n_last_semaphore_used + 1) % m_n_swapchain_images; - curr_frame_signal_semaphore_ptr = m_frame_signal_semaphores[m_n_last_semaphore_used]; - curr_frame_wait_semaphore_ptr = m_frame_wait_semaphores [m_n_last_semaphore_used]; + curr_frame_signal_semaphore_ptr = m_frame_signal_semaphores[m_n_last_semaphore_used].get(); + curr_frame_wait_semaphore_ptr = m_frame_wait_semaphores [m_n_last_semaphore_used].get(); /* Determine the semaphore which the swapchain image */ - n_swapchain_image = m_swapchain_ptr->acquire_image(curr_frame_wait_semaphore_ptr); + { + const auto acquire_result = m_swapchain_ptr->acquire_image(curr_frame_wait_semaphore_ptr, + &n_swapchain_image); + + ANVIL_REDUNDANT_VARIABLE_CONST(acquire_result); + anvil_assert (acquire_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } /* Submit work chunk and present */ - device_locked_ptr->get_universal_queue(0)->submit_command_buffer_with_signal_wait_semaphores(m_command_buffers[n_swapchain_image], - 1, /* n_semaphores_to_signal */ - &curr_frame_signal_semaphore_ptr, - 1, /* n_semaphores_to_wait_on */ - &curr_frame_wait_semaphore_ptr, - &wait_stage_mask, - false, /* should_block */ - nullptr); /* opt_fence_ptr */ - - present_queue_ptr->present(m_swapchain_ptr, - n_swapchain_image, - 1, /* n_wait_semaphores */ - &curr_frame_signal_semaphore_ptr); + m_device_ptr->get_universal_queue(0)->submit( + Anvil::SubmitInfo::create_wait_execute_signal(m_command_buffers[n_swapchain_image].get(), + 1, /* n_semaphores_to_signal */ + &curr_frame_signal_semaphore_ptr, + 1, /* n_semaphores_to_wait_on */ + &curr_frame_wait_semaphore_ptr, + &wait_stage_mask, + false) /* should_block */ + ); + + { + Anvil::SwapchainOperationErrorCode present_result = Anvil::SwapchainOperationErrorCode::DEVICE_LOST; + + present_queue_ptr->present(m_swapchain_ptr.get(), + n_swapchain_image, + 1, /* n_wait_semaphores */ + &curr_frame_signal_semaphore_ptr, + &present_result); + + ANVIL_REDUNDANT_VARIABLE(present_result); + anvil_assert (present_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } #ifdef ENABLE_OFFSCREEN_RENDERING { @@ -268,9 +288,9 @@ void App::draw_frame() #endif } -VkFormat App::get_mesh_color_data_format() const +Anvil::Format App::get_mesh_color_data_format() const { - return VK_FORMAT_R32G32B32_SFLOAT; + return Anvil::Format::R32G32B32_SFLOAT; } uint32_t App::get_mesh_color_data_n_components() const @@ -292,12 +312,12 @@ uint32_t App::get_mesh_color_data_start_offset(uint32_t n_stream, return result; } -std::shared_ptr App::get_mesh_data() const +std::unique_ptr App::get_mesh_data() const { - const float pi = 3.14159265f; - std::shared_ptr result_data_ptr; - uint32_t result_mesh_data_size = 0; - uint32_t result_n_vertices = 0; + const float pi = 3.14159265f; + std::unique_ptr result_data_ptr; + uint32_t result_mesh_data_size = 0; + uint32_t result_n_vertices = 0; /* Generate the mesh data. We need a total of five data streams: * @@ -312,7 +332,9 @@ std::shared_ptr App::get_mesh_data() const result_mesh_data_size = get_mesh_data_size (); - result_data_ptr.reset(new unsigned char[result_mesh_data_size]); + result_data_ptr.reset( + new uint8_t[result_mesh_data_size] + ); for (unsigned int n_vertex = 0; n_vertex < result_n_vertices; @@ -377,9 +399,9 @@ uint32_t App::get_mesh_n_vertices() const return (1 /* central vertex */ + N_SUBDIVISION_TRIANGLES); } -VkFormat App::get_mesh_vertex_data_format() const +Anvil::Format App::get_mesh_vertex_data_format() const { - return VK_FORMAT_R32G32_SFLOAT; + return Anvil::Format::R32G32_SFLOAT; } uint32_t App::get_mesh_vertex_data_n_components() const @@ -465,38 +487,49 @@ void App::init() void App::init_buffers() { - std::shared_ptr mesh_data_ptr = get_mesh_data(); + auto mesh_data_ptr = get_mesh_data(); /* Initialize the buffer object */ - m_mesh_data_buffer_ptr = Anvil::Buffer::create_nonsparse( - m_device_ptr, - get_mesh_data_size(), - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, - 0, /* in_memory_features */ - mesh_data_ptr.get() ); + auto create_info_ptr = Anvil::BufferCreateInfo::create_alloc(m_device_ptr.get(), + get_mesh_data_size(), + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::VERTEX_BUFFER_BIT, + Anvil::MemoryFeatureFlagBits::NONE); /* in_memory_features */ + + create_info_ptr->set_client_data(mesh_data_ptr.get() ); + + m_mesh_data_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); } void App::init_command_buffers() { - std::shared_ptr device_locked_ptr (m_device_ptr); - std::shared_ptr gfx_pipeline_manager_ptr(device_locked_ptr->get_graphics_pipeline_manager() ); - VkImageSubresourceRange subresource_range; - std::shared_ptr universal_queue_ptr (device_locked_ptr->get_universal_queue(0) ); + auto gfx_pipeline_manager_ptr(m_device_ptr->get_graphics_pipeline_manager() ); + Anvil::ImageSubresourceRange subresource_range; + auto universal_queue_ptr (m_device_ptr->get_universal_queue(0) ); - subresource_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - subresource_range.baseArrayLayer = 0; - subresource_range.baseMipLevel = 0; - subresource_range.layerCount = 1; - subresource_range.levelCount = 1; + subresource_range.aspect_mask = Anvil::ImageAspectFlagBits::COLOR_BIT; + subresource_range.base_array_layer = 0; + subresource_range.base_mip_level = 0; + subresource_range.layer_count = 1; + subresource_range.level_count = 1; /* Set up rendering command buffers. We need one per swap-chain image. */ + uint32_t n_universal_queue_family_indices = 0; + const uint32_t* universal_queue_family_indices = nullptr; + + m_device_ptr->get_queue_family_indices_for_queue_family_type(Anvil::QueueFamilyType::UNIVERSAL, + &n_universal_queue_family_indices, + &universal_queue_family_indices); + + anvil_assert(n_universal_queue_family_indices > 0); + for (unsigned int n_current_swapchain_image = 0; n_current_swapchain_image < N_SWAPCHAIN_IMAGES; ++n_current_swapchain_image) { - std::shared_ptr cmd_buffer_ptr = device_locked_ptr->get_command_pool(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL)->alloc_primary_level_command_buffer(); + auto cmd_buffer_ptr = m_device_ptr->get_command_pool_for_queue_family_index(universal_queue_family_indices[0])->alloc_primary_level_command_buffer(); /* Start recording commands */ cmd_buffer_ptr->start_recording(false, /* one_time_submit */ @@ -504,19 +537,18 @@ void App::init_command_buffers() { /* Switch the swap-chain image layout to renderable */ { - Anvil::ImageBarrier image_barrier(0, /* source_access_mask */ - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, - false, - VK_IMAGE_LAYOUT_UNDEFINED, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + Anvil::ImageBarrier image_barrier(Anvil::AccessFlagBits::NONE, /* source_access_mask */ + Anvil::AccessFlagBits::COLOR_ATTACHMENT_WRITE_BIT, + Anvil::ImageLayout::UNDEFINED, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, universal_queue_ptr->get_queue_family_index(), universal_queue_ptr->get_queue_family_index(), m_swapchain_ptr->get_image(n_current_swapchain_image), subresource_range); - cmd_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, - VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, - VK_FALSE, /* in_by_region */ + cmd_buffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::TOP_OF_PIPE_BIT, + Anvil::PipelineStageFlagBits::COLOR_ATTACHMENT_OUTPUT_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barrier_ptrs */ 0, /* in_buffer_memory_barrier_count */ @@ -545,18 +577,18 @@ void App::init_command_buffers() cmd_buffer_ptr->record_begin_render_pass(1, /* in_n_clear_values */ &attachment_clear_value, - m_fbos[n_current_swapchain_image], + m_fbos[n_current_swapchain_image].get(), render_area, - m_renderpass_ptr, - VK_SUBPASS_CONTENTS_INLINE); + m_renderpass_ptr.get(), + Anvil::SubpassContents::INLINE); { - std::shared_ptr mesh_data_buffer_per_binding[g_n_attribute_bindings] = + Anvil::Buffer* mesh_data_buffer_per_binding[g_n_attribute_bindings] = { - m_mesh_data_buffer_ptr, - m_mesh_data_buffer_ptr, - m_mesh_data_buffer_ptr, - m_mesh_data_buffer_ptr, - m_mesh_data_buffer_ptr, + m_mesh_data_buffer_ptr.get(), + m_mesh_data_buffer_ptr.get(), + m_mesh_data_buffer_ptr.get(), + m_mesh_data_buffer_ptr.get(), + m_mesh_data_buffer_ptr.get(), }; VkDeviceSize mesh_data_buffer_data_offset_per_binding[g_n_attribute_bindings] = {0}; @@ -566,7 +598,7 @@ void App::init_command_buffers() mesh_data_buffer_data_offset_per_binding[g_color4_attribute_binding] = get_mesh_color_data_start_offset (3 /* n_stream */); mesh_data_buffer_data_offset_per_binding[g_vertex_attribute_binding] = get_mesh_vertex_data_start_offset(); - cmd_buffer_ptr->record_bind_pipeline (VK_PIPELINE_BIND_POINT_GRAPHICS, + cmd_buffer_ptr->record_bind_pipeline (Anvil::PipelineBindPoint::GRAPHICS, m_pipeline_id); cmd_buffer_ptr->record_bind_vertex_buffers(0, /* startBinding */ g_n_attribute_bindings, @@ -585,23 +617,22 @@ void App::init_command_buffers() /* Change the swap-chain image's layout to presentable */ { - Anvil::ImageBarrier image_barrier(VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, /* source_access_mask */ - 0, /* destination_access_mask */ - false, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, /* old_image_layout */ + Anvil::ImageBarrier image_barrier(Anvil::AccessFlagBits::COLOR_ATTACHMENT_WRITE_BIT, /* source_access_mask */ + Anvil::AccessFlagBits::MEMORY_READ_BIT, /* destination_access_mask */ + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,/* old_image_layout */ #ifdef ENABLE_OFFSCREEN_RENDERING - VK_IMAGE_LAYOUT_GENERAL, /* new_image_layout */ + Anvil::ImageLayout::GENERAL, /* new_image_layout */ #else - VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, /* new_image_layout */ + Anvil::ImageLayout::PRESENT_SRC_KHR, /* new_image_layout */ #endif universal_queue_ptr->get_queue_family_index(), universal_queue_ptr->get_queue_family_index(), m_swapchain_ptr->get_image(n_current_swapchain_image), subresource_range); - cmd_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, - VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, - VK_FALSE, /* in_by_region */ + cmd_buffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::COLOR_ATTACHMENT_OUTPUT_BIT, + Anvil::PipelineStageFlagBits::ALL_COMMANDS_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barrier_ptrs */ 0, /* in_buffer_memory_barrier_count */ @@ -614,7 +645,7 @@ void App::init_command_buffers() /* Close the recording process */ cmd_buffer_ptr->stop_recording(); - m_command_buffers[n_current_swapchain_image] = cmd_buffer_ptr; + m_command_buffers[n_current_swapchain_image] = std::move(cmd_buffer_ptr); } } @@ -626,139 +657,159 @@ void App::init_framebuffers() n_swapchain_image < N_SWAPCHAIN_IMAGES; ++n_swapchain_image) { - m_fbos[n_swapchain_image] = Anvil::Framebuffer::create( - m_device_ptr, - WINDOW_WIDTH, - WINDOW_HEIGHT, - 1 /* n_layers */); + { + auto fb_create_info_ptr = Anvil::FramebufferCreateInfo::create(m_device_ptr.get(), + WINDOW_WIDTH, + WINDOW_HEIGHT, + 1 /* n_layers */); + + result = fb_create_info_ptr->add_attachment(m_swapchain_ptr->get_image_view(n_swapchain_image), + nullptr /* out_opt_attachment_id_ptrs */); + anvil_assert(result); + + m_fbos[n_swapchain_image] = Anvil::Framebuffer::create(std::move(fb_create_info_ptr) ); + } m_fbos[n_swapchain_image]->set_name_formatted("Framebuffer used to render to swapchain image [%d]", n_swapchain_image); - - result = m_fbos[n_swapchain_image]->add_attachment(m_swapchain_ptr->get_image_view(n_swapchain_image), - nullptr /* out_opt_attachment_id_ptrs */); - anvil_assert(result); } } void App::init_gfx_pipelines() { - std::shared_ptr device_locked_ptr (m_device_ptr); - std::shared_ptr gfx_pipeline_manager_ptr(device_locked_ptr->get_graphics_pipeline_manager() ); - const VkFormat mesh_color_data_format (get_mesh_color_data_format ()); - const VkFormat mesh_vertex_data_format (get_mesh_vertex_data_format ()); - const uint32_t n_mesh_color_components (get_mesh_color_data_n_components ()); - const uint32_t n_mesh_vertex_components(get_mesh_vertex_data_n_components()); - bool result; + auto gfx_pipeline_manager_ptr(m_device_ptr->get_graphics_pipeline_manager() ); + const Anvil::Format mesh_color_data_format (get_mesh_color_data_format ()); + const Anvil::Format mesh_vertex_data_format (get_mesh_vertex_data_format ()); + const uint32_t n_mesh_color_components (get_mesh_color_data_n_components ()); + const uint32_t n_mesh_vertex_components(get_mesh_vertex_data_n_components ()); /* Create a render pass for the pipeline */ - Anvil::RenderPassAttachmentID render_pass_color_attachment_id; - Anvil::SubPassID render_pass_subpass_id; - VkRect2D scissors[4]; + Anvil::GraphicsPipelineCreateInfoUniquePtr gfx_pipeline_create_info_ptr; + Anvil::RenderPassAttachmentID render_pass_color_attachment_id; + Anvil::SubPassID render_pass_subpass_id; + VkRect2D scissors[4]; get_scissor_viewport_info(scissors, nullptr); /* viewports */ - m_renderpass_ptr = Anvil::RenderPass::create( - m_device_ptr, - m_swapchain_ptr); + { + Anvil::RenderPassCreateInfoUniquePtr render_pass_create_info_ptr(new Anvil::RenderPassCreateInfo(m_device_ptr.get() ) ); + + render_pass_create_info_ptr->add_color_attachment (m_swapchain_ptr->get_create_info_ptr()->get_format(), + Anvil::SampleCountFlagBits::_1_BIT, + Anvil::AttachmentLoadOp::CLEAR, + Anvil::AttachmentStoreOp::STORE, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + false, /* may_alias */ + &render_pass_color_attachment_id); + render_pass_create_info_ptr->add_subpass (&render_pass_subpass_id); + render_pass_create_info_ptr->add_subpass_color_attachment(render_pass_subpass_id, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + render_pass_color_attachment_id, + 0, /* in_location */ + nullptr); /* in_opt_attachment_resolve_id_ptr */ + + m_renderpass_ptr = Anvil::RenderPass::create(std::move(render_pass_create_info_ptr), + m_swapchain_ptr.get() ); + } m_renderpass_ptr->set_name("Main renderpass"); - result = m_renderpass_ptr->add_color_attachment(m_swapchain_ptr->get_image_format(), - VK_SAMPLE_COUNT_1_BIT, - VK_ATTACHMENT_LOAD_OP_CLEAR, - VK_ATTACHMENT_STORE_OP_STORE, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - false, /* may_alias */ - &render_pass_color_attachment_id); - anvil_assert(result); - - result = m_renderpass_ptr->add_subpass(*m_fs_ptr, - *m_gs_ptr, - Anvil::ShaderModuleStageEntryPoint(), - Anvil::ShaderModuleStageEntryPoint(), - *m_vs_ptr, - &render_pass_subpass_id); - anvil_assert(result); - - result = m_renderpass_ptr->get_subpass_graphics_pipeline_id(render_pass_subpass_id, - &m_pipeline_id); - anvil_assert(result); - - result = m_renderpass_ptr->add_subpass_color_attachment(render_pass_subpass_id, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - render_pass_color_attachment_id, - 0, /* location */ - nullptr); /* opt_attachment_resolve_id_ptr */ - anvil_assert(result); - /* Configure the graphics pipeline */ - gfx_pipeline_manager_ptr->set_dynamic_viewport_state_properties(m_pipeline_id, - sizeof(scissors) / sizeof(scissors[0]) ); - gfx_pipeline_manager_ptr->set_input_assembly_properties (m_pipeline_id, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN); - gfx_pipeline_manager_ptr->set_rasterization_properties (m_pipeline_id, - VK_POLYGON_MODE_FILL, - VK_CULL_MODE_NONE, - VK_FRONT_FACE_COUNTER_CLOCKWISE, - 1.0f /* line_width */); - gfx_pipeline_manager_ptr->toggle_dynamic_states (m_pipeline_id, - true, /* should_enable */ - Anvil::GraphicsPipelineManager::DYNAMIC_STATE_VIEWPORT_BIT); - gfx_pipeline_manager_ptr->toggle_primitive_restart (m_pipeline_id, - true /* should_enable */); - - result = gfx_pipeline_manager_ptr->add_vertex_attribute(m_pipeline_id, - g_vertex_attribute_location, - mesh_vertex_data_format, - 0, /* offset_in_bytes */ - sizeof(float) * n_mesh_vertex_components, /* stride_in_bytes */ - VK_VERTEX_INPUT_RATE_VERTEX, - g_vertex_attribute_binding); - result &= gfx_pipeline_manager_ptr->add_vertex_attribute(m_pipeline_id, - g_color1_attribute_location, - mesh_color_data_format, - 0, /* offset_in_bytes */ - sizeof(float) * n_mesh_color_components, /* stride_in_bytes */ - VK_VERTEX_INPUT_RATE_VERTEX, - g_color1_attribute_binding); - result &= gfx_pipeline_manager_ptr->add_vertex_attribute(m_pipeline_id, - g_color2_attribute_location, - mesh_color_data_format, - 0, /* offset_in_bytes */ - sizeof(float) * n_mesh_color_components, /* stride_in_bytes */ - VK_VERTEX_INPUT_RATE_VERTEX, - g_color2_attribute_binding); - result &= gfx_pipeline_manager_ptr->add_vertex_attribute(m_pipeline_id, - g_color3_attribute_location, - mesh_color_data_format, - 0, /* offset_in_bytes */ - sizeof(float) * n_mesh_color_components, /* stride_in_bytes */ - VK_VERTEX_INPUT_RATE_VERTEX, - g_color3_attribute_binding); - result &= gfx_pipeline_manager_ptr->add_vertex_attribute(m_pipeline_id, - g_color4_attribute_location, - mesh_color_data_format, - 0, /* offset_in_bytes */ - sizeof(float) * n_mesh_color_components, /* stride_in_bytes */ - VK_VERTEX_INPUT_RATE_VERTEX, - g_color4_attribute_binding); - - anvil_assert(result); + gfx_pipeline_create_info_ptr = Anvil::GraphicsPipelineCreateInfo::create(Anvil::PipelineCreateFlagBits::NONE, + m_renderpass_ptr.get(), + render_pass_subpass_id, + *m_fs_ptr, + *m_gs_ptr, + Anvil::ShaderModuleStageEntryPoint(), + Anvil::ShaderModuleStageEntryPoint(), + *m_vs_ptr); + + gfx_pipeline_create_info_ptr->set_n_dynamic_viewports (sizeof(scissors) / sizeof(scissors[0]) ); + gfx_pipeline_create_info_ptr->set_primitive_topology (Anvil::PrimitiveTopology::TRIANGLE_FAN); + gfx_pipeline_create_info_ptr->set_rasterization_properties(Anvil::PolygonMode::FILL, + Anvil::CullModeFlagBits::NONE, + Anvil::FrontFace::COUNTER_CLOCKWISE, + 1.0f /* line_width */); + gfx_pipeline_create_info_ptr->toggle_dynamic_state (true, /* should_enable */ + Anvil::DynamicState::VIEWPORT); + gfx_pipeline_create_info_ptr->toggle_primitive_restart (true /* should_enable */); + + { + Anvil::VertexInputAttribute attribute(g_vertex_attribute_location, + mesh_vertex_data_format, + 0); /* in_offset_in_bytes */ + + gfx_pipeline_create_info_ptr->add_vertex_binding(g_vertex_attribute_binding, + Anvil::VertexInputRate::VERTEX, + sizeof(float) * n_mesh_vertex_components, /* in_stride_in_bytes */ + 1, /* in_n_attributes */ + &attribute); + } + + { + Anvil::VertexInputAttribute attribute(g_color1_attribute_location, + mesh_color_data_format, + 0); /* in_offset_in_bytes */ + + gfx_pipeline_create_info_ptr->add_vertex_binding(g_color1_attribute_binding, + Anvil::VertexInputRate::VERTEX, + sizeof(float) * n_mesh_color_components, /* in_stride_in_bytes */ + 1, /* in_n_attributes */ + &attribute); + } + + { + Anvil::VertexInputAttribute attribute(g_color2_attribute_location, + mesh_color_data_format, + 0); /* in_offset_in_bytes */ + + gfx_pipeline_create_info_ptr->add_vertex_binding(g_color2_attribute_binding, + Anvil::VertexInputRate::VERTEX, + sizeof(float) * n_mesh_color_components, /* in_stride_in_bytes */ + 1, /* in_n_attributes */ + &attribute); + } + + { + Anvil::VertexInputAttribute attribute(g_color3_attribute_location, + mesh_color_data_format, + 0); /* in_offset_in_bytes */ + + gfx_pipeline_create_info_ptr->add_vertex_binding(g_color3_attribute_binding, + Anvil::VertexInputRate::VERTEX, + sizeof(float) * n_mesh_color_components, /* in_stride_in_bytes */ + 1, /* in_n_attributes */ + &attribute); + } + + { + Anvil::VertexInputAttribute attribute(g_color4_attribute_location, + mesh_color_data_format, + 0); /* in_offset_in_bytes */ + + gfx_pipeline_create_info_ptr->add_vertex_binding(g_color4_attribute_binding, + Anvil::VertexInputRate::VERTEX, + sizeof(float) * n_mesh_color_components, /* in_stride_in_bytes */ + 1, /* in_n_attributes */ + &attribute); + } for (uint32_t n_scissor_box = 0; n_scissor_box < sizeof(scissors) / sizeof(scissors[0]); ++n_scissor_box) { - gfx_pipeline_manager_ptr->set_scissor_box_properties(m_pipeline_id, - n_scissor_box, - scissors[n_scissor_box].offset.x, - scissors[n_scissor_box].offset.y, - scissors[n_scissor_box].extent.width, - scissors[n_scissor_box].extent.height); + gfx_pipeline_create_info_ptr->set_scissor_box_properties(n_scissor_box, + scissors[n_scissor_box].offset.x, + scissors[n_scissor_box].offset.y, + scissors[n_scissor_box].extent.width, + scissors[n_scissor_box].extent.height); } + + + gfx_pipeline_manager_ptr->add_pipeline(std::move(gfx_pipeline_create_info_ptr), + &m_pipeline_id); } void App::init_semaphores() @@ -767,103 +818,117 @@ void App::init_semaphores() n_semaphore < m_n_swapchain_images; ++n_semaphore) { - std::shared_ptr new_signal_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); - std::shared_ptr new_wait_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); + Anvil::SemaphoreUniquePtr new_signal_semaphore_ptr; + Anvil::SemaphoreUniquePtr new_wait_semaphore_ptr; + + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); + + new_signal_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + } + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); + + new_wait_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + } new_signal_semaphore_ptr->set_name_formatted("Signal semaphore [%d]", n_semaphore); new_wait_semaphore_ptr->set_name_formatted ("Wait semaphore [%d]", n_semaphore); - m_frame_signal_semaphores.push_back(new_signal_semaphore_ptr); - m_frame_wait_semaphores.push_back (new_wait_semaphore_ptr); + m_frame_signal_semaphores.push_back(std::move(new_signal_semaphore_ptr) ); + m_frame_wait_semaphores.push_back (std::move(new_wait_semaphore_ptr) ); } } void App::init_shaders() { - std::shared_ptr fragment_shader_ptr; - std::shared_ptr fragment_shader_module_ptr; - std::shared_ptr geometry_shader_ptr; - std::shared_ptr geometry_shader_module_ptr; - std::shared_ptr vertex_shader_ptr; - std::shared_ptr vertex_shader_module_ptr; - - fragment_shader_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr fragment_shader_ptr; + Anvil::ShaderModuleUniquePtr fs_module_ptr; + Anvil::ShaderModuleUniquePtr gs_module_ptr; + Anvil::ShaderModuleUniquePtr vs_module_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr geometry_shader_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr vertex_shader_ptr; + + fragment_shader_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_render_frag, - Anvil::SHADER_STAGE_FRAGMENT); - vertex_shader_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::ShaderStage::FRAGMENT); + vertex_shader_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_render_vert, - Anvil::SHADER_STAGE_VERTEX); - geometry_shader_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::ShaderStage::VERTEX); + geometry_shader_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_render_geom, - Anvil::SHADER_STAGE_GEOMETRY); + Anvil::ShaderStage::GEOMETRY); - fragment_shader_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - fragment_shader_ptr); - geometry_shader_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - geometry_shader_ptr); - vertex_shader_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - vertex_shader_ptr); + fs_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get (), + fragment_shader_ptr.get() ); + gs_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get (), + geometry_shader_ptr.get() ); + vs_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get (), + vertex_shader_ptr.get () ); - fragment_shader_module_ptr->set_name("Fragment shader"); - geometry_shader_module_ptr->set_name("Geometry shader"); - vertex_shader_module_ptr->set_name ("Vertex shader"); + fs_module_ptr->set_name("Fragment shader"); + gs_module_ptr->set_name("Geometry shader"); + vs_module_ptr->set_name("Vertex shader"); m_fs_ptr.reset( new Anvil::ShaderModuleStageEntryPoint( "main", - fragment_shader_module_ptr, - Anvil::SHADER_STAGE_FRAGMENT) + std::move(fs_module_ptr), + Anvil::ShaderStage::FRAGMENT) ); m_gs_ptr.reset( new Anvil::ShaderModuleStageEntryPoint( "main", - geometry_shader_module_ptr, - Anvil::SHADER_STAGE_GEOMETRY) + std::move(gs_module_ptr), + Anvil::ShaderStage::GEOMETRY) ); m_vs_ptr.reset( new Anvil::ShaderModuleStageEntryPoint( "main", - vertex_shader_module_ptr, - Anvil::SHADER_STAGE_VERTEX) + std::move(vs_module_ptr), + Anvil::ShaderStage::VERTEX) ); } void App::init_swapchain() { - std::shared_ptr device_locked_ptr(m_device_ptr); + { + auto create_info_ptr = Anvil::RenderingSurfaceCreateInfo::create(m_instance_ptr.get(), + m_device_ptr.get (), + m_window_ptr.get () ); - m_rendering_surface_ptr = Anvil::RenderingSurface::create(m_instance_ptr, - m_device_ptr, - m_window_ptr); + m_rendering_surface_ptr = Anvil::RenderingSurface::create(std::move(create_info_ptr) ); + } m_rendering_surface_ptr->set_name("Main rendering surface"); - m_swapchain_ptr = device_locked_ptr->create_swapchain(m_rendering_surface_ptr, - m_window_ptr, - VK_FORMAT_B8G8R8A8_UNORM, - VK_PRESENT_MODE_FIFO_KHR, - VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, - m_n_swapchain_images); + m_swapchain_ptr = dynamic_cast(m_device_ptr.get() )->create_swapchain(m_rendering_surface_ptr.get(), + m_window_ptr.get (), + Anvil::Format::B8G8R8A8_UNORM, + Anvil::ColorSpaceKHR::SRGB_NONLINEAR_KHR, + Anvil::PresentModeKHR::FIFO_KHR, + Anvil::ImageUsageFlagBits::COLOR_ATTACHMENT_BIT, + m_n_swapchain_images); m_swapchain_ptr->set_name("Main swapchain"); /* Cache the queue we are going to use for presentation */ const std::vector* present_queue_fams_ptr = nullptr; - if (!m_rendering_surface_ptr->get_queue_families_with_present_support(device_locked_ptr->get_physical_device(), + if (!m_rendering_surface_ptr->get_queue_families_with_present_support(dynamic_cast(m_device_ptr.get() )->get_physical_device(), &present_queue_fams_ptr) ) { anvil_assert_fail(); } - m_present_queue_ptr = device_locked_ptr->get_queue(present_queue_fams_ptr->at(0), - 0); /* in_n_queue */ + m_present_queue_ptr = m_device_ptr->get_queue_for_queue_family_index(present_queue_fams_ptr->at(0), + 0); /* in_n_queue */ } void App::init_window() @@ -892,42 +957,46 @@ void App::init_window() void App::init_vulkan() { /* Create a Vulkan instance */ - m_instance_ptr = Anvil::Instance::create(APP_NAME, /* app_name */ - APP_NAME, /* engine_name */ + { + auto create_info_ptr = Anvil::InstanceCreateInfo::create(APP_NAME, /* app_name */ + APP_NAME, /* engine_name */ #ifdef ENABLE_VALIDATION - std::bind(&App::on_validation_callback, - this, - std::placeholders::_1, - std::placeholders::_2, - std::placeholders::_3, - std::placeholders::_4) ); + std::bind(&App::on_validation_callback, + this, + std::placeholders::_1, + std::placeholders::_2), #else - nullptr); + Anvil::DebugCallbackFunction(), #endif + false); /* in_mt_safe */ + + m_instance_ptr = Anvil::Instance::create(std::move(create_info_ptr) ); + } m_physical_device_ptr = m_instance_ptr->get_physical_device(0); /* Create a Vulkan device */ - m_device_ptr = Anvil::SGPUDevice::create(m_physical_device_ptr, - Anvil::DeviceExtensionConfiguration(), - std::vector(), /* layers */ - false, /* transient_command_buffer_allocs_only */ - false); /* support_resettable_command_buffers */ + { + auto create_info_ptr = Anvil::DeviceCreateInfo::create_sgpu(m_physical_device_ptr, + true, /* in_enable_shader_module_cache */ + Anvil::DeviceExtensionConfiguration(), + std::vector(), /* in_layers */ + Anvil::CommandPoolCreateFlagBits::NONE, + false); /* in_mt_safe */ + + m_device_ptr = Anvil::SGPUDevice::create(std::move(create_info_ptr) ); + } } -VkBool32 App::on_validation_callback(VkDebugReportFlagsEXT message_flags, - VkDebugReportObjectTypeEXT object_type, - const char* layer_prefix, - const char* message) +void App::on_validation_callback(Anvil::DebugMessageSeverityFlags in_severity, + const char* in_message_ptr) { - if ((message_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0) + if ((in_severity & Anvil::DebugMessageSeverityFlagBits::ERROR_BIT) != 0) { fprintf(stderr, "[!] %s\n", - message); + in_message_ptr); } - - return false; } void App::run() @@ -936,9 +1005,9 @@ void App::run() } -int main() +int main(int argc, char *argv[]) { - std::shared_ptr app_ptr(new App() ); + std::unique_ptr app_ptr(new App() ); app_ptr->init(); app_ptr->run(); diff --git a/examples/OcclusionQuery/CMakeLists.txt b/examples/OcclusionQuery/CMakeLists.txt index de2203b4..556eb153 100644 --- a/examples/OcclusionQuery/CMakeLists.txt +++ b/examples/OcclusionQuery/CMakeLists.txt @@ -16,44 +16,21 @@ if(${CMAKE_SYSTEM_NAME} MATCHES "Linux") endif() endif() -add_subdirectory (../.. "${CMAKE_CURRENT_BINARY_DIR}/anvil") +if (NOT ANVIL_LINK_EXAMPLES) + add_subdirectory (../.. "${CMAKE_CURRENT_BINARY_DIR}/anvil") +endif() +target_include_directories(Anvil PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/anvil/include") include_directories(${Anvil_SOURCE_DIR}/include - ${OcclusionQuery_SOURCE_DIR}/include) - -# Include the Vulkan header. -if (WIN32) - include_directories($ENV{VK_SDK_PATH}/Include - $ENV{VULKAN_SDK}/Include) - - if("${CMAKE_SIZEOF_VOID_P}" EQUAL "8") - link_directories ($ENV{VK_SDK_PATH}/Bin - $ENV{VK_SDK_PATH}/Lib - $ENV{VULKAN_SDK}/Bin - $ENV{VULKAN_SDK}/Lib) - else() - link_directories ($ENV{VK_SDK_PATH}/Bin32 - $ENV{VK_SDK_PATH}/Lib32 - $ENV{VULKAN_SDK}/Bin32 - $ENV{VULKAN_SDK}/Lib32) - endif() -else() - include_directories($ENV{VK_SDK_PATH}/x86_64/include - $ENV{VULKAN_SDK}/include - $ENV{VULKAN_SDK}/x86_64/include) - link_directories ($ENV{VK_SDK_PATH}/x86_64/lib - $ENV{VULKAN_SDK}/lib - $ENV{VULKAN_SDK}/x86_64/lib) -endif() - + ${OcclusionQuery_SOURCE_DIR}/include) + # Create the OcclusionQuery project. add_executable (OcclusionQuery include/app.h - include/app.h - src/app.cpp) + src/app.cpp) # Add linking dependencies for the example projects -add_dependencies (OcclusionQuery Anvil) +add_dependencies(OcclusionQuery Anvil) if (WIN32) target_link_libraries(OcclusionQuery Anvil) diff --git a/examples/OcclusionQuery/include/app.h b/examples/OcclusionQuery/include/app.h index 951ae2df..ed96527f 100644 --- a/examples/OcclusionQuery/include/app.h +++ b/examples/OcclusionQuery/include/app.h @@ -54,54 +54,52 @@ class App void init_window (); void init_vulkan (); - void draw_frame (); - VkBool32 on_validation_callback(VkDebugReportFlagsEXT message_flags, - VkDebugReportObjectTypeEXT object_type, - const char* layer_prefix, - const char* message); + void draw_frame (); + void on_validation_callback(Anvil::DebugMessageSeverityFlags in_severity, + const char* in_message_ptr); /* Private variables */ - std::weak_ptr m_device_ptr; - std::shared_ptr m_instance_ptr; - std::weak_ptr m_physical_device_ptr; - std::shared_ptr m_present_queue_ptr; - std::shared_ptr m_rendering_surface_ptr; - std::shared_ptr m_swapchain_ptr; - Anvil::Time m_time; - std::shared_ptr m_window_ptr; + Anvil::BaseDeviceUniquePtr m_device_ptr; + Anvil::InstanceUniquePtr m_instance_ptr; + const Anvil::PhysicalDevice* m_physical_device_ptr; + Anvil::Queue* m_present_queue_ptr; + Anvil::RenderingSurfaceUniquePtr m_rendering_surface_ptr; + Anvil::SwapchainUniquePtr m_swapchain_ptr; + Anvil::Time m_time; + Anvil::WindowUniquePtr m_window_ptr; - std::shared_ptr m_depth_image_ptr; - std::shared_ptr m_depth_image_view_ptr; + Anvil::ImageUniquePtr m_depth_image_ptr; + Anvil::ImageViewUniquePtr m_depth_image_view_ptr; - std::shared_ptr m_quad_fs_ptr; - std::shared_ptr m_quad_vs_ptr; - std::shared_ptr m_tri_fs_ptr; - std::shared_ptr m_tri_vs_ptr; + std::unique_ptr m_quad_fs_ptr; + std::unique_ptr m_quad_vs_ptr; + std::unique_ptr m_tri_fs_ptr; + std::unique_ptr m_tri_vs_ptr; - std::shared_ptr m_1stpass_dsg_ptr; - std::shared_ptr m_2ndpass_quad_dsg_ptr; - std::shared_ptr m_2ndpass_tri_dsg_ptr; + Anvil::DescriptorSetGroupUniquePtr m_1stpass_dsg_ptr; + Anvil::DescriptorSetGroupUniquePtr m_2ndpass_quad_dsg_ptr; + Anvil::DescriptorSetGroupUniquePtr m_2ndpass_tri_dsg_ptr; - std::shared_ptr m_query_pool_ptr; + Anvil::QueryPoolUniquePtr m_query_pool_ptr; - uint32_t m_n_bytes_per_query; - std::shared_ptr m_query_bo_ptr; - std::shared_ptr m_query_data_copied_event; - std::shared_ptr m_time_bo_ptr; - uint32_t m_time_n_bytes_per_swapchain_image; + uint32_t m_n_bytes_per_query; + Anvil::BufferUniquePtr m_query_bo_ptr; + Anvil::EventUniquePtr m_query_data_copied_event; + Anvil::BufferUniquePtr m_time_bo_ptr; + uint32_t m_time_n_bytes_per_swapchain_image; - std::shared_ptr m_render_tri1_and_generate_ot_data_cmd_buffers[N_SWAPCHAIN_IMAGES]; - std::shared_ptr m_render_tri2_and_quad_cmd_buffers [N_SWAPCHAIN_IMAGES]; + Anvil::PrimaryCommandBufferUniquePtr m_render_tri1_and_generate_ot_data_cmd_buffers[N_SWAPCHAIN_IMAGES]; + Anvil::PrimaryCommandBufferUniquePtr m_render_tri2_and_quad_cmd_buffers [N_SWAPCHAIN_IMAGES]; - Anvil::GraphicsPipelineID m_1stpass_depth_test_always_pipeline_id; - Anvil::GraphicsPipelineID m_1stpass_depth_test_equal_pipeline_id; - Anvil::GraphicsPipelineID m_2ndpass_depth_test_off_quad_pipeline_id; - Anvil::GraphicsPipelineID m_2ndpass_depth_test_off_tri_pipeline_id; + Anvil::PipelineID m_1stpass_depth_test_always_pipeline_id; + Anvil::PipelineID m_1stpass_depth_test_equal_pipeline_id; + Anvil::PipelineID m_2ndpass_depth_test_off_quad_pipeline_id; + Anvil::PipelineID m_2ndpass_depth_test_off_tri_pipeline_id; - std::shared_ptr m_fbos[N_SWAPCHAIN_IMAGES]; + Anvil::FramebufferUniquePtr m_fbos[N_SWAPCHAIN_IMAGES]; - std::shared_ptr m_renderpass_quad_ptr; - std::shared_ptr m_renderpass_tris_ptr; + Anvil::RenderPassUniquePtr m_renderpass_quad_ptr; + Anvil::RenderPassUniquePtr m_renderpass_tris_ptr; Anvil::SubPassID m_renderpass_1stpass_depth_test_always_subpass_id; Anvil::SubPassID m_renderpass_1stpass_depth_test_equal_ot_subpass_id; @@ -111,6 +109,6 @@ class App uint32_t m_n_last_semaphore_used; const uint32_t m_n_swapchain_images; - std::vector > m_frame_signal_semaphores; - std::vector > m_frame_wait_semaphores; + std::vector m_frame_signal_semaphores; + std::vector m_frame_wait_semaphores; }; diff --git a/examples/OcclusionQuery/src/app.cpp b/examples/OcclusionQuery/src/app.cpp index a2f5c29f..8cd1d4b6 100644 --- a/examples/OcclusionQuery/src/app.cpp +++ b/examples/OcclusionQuery/src/app.cpp @@ -28,9 +28,20 @@ #include "config.h" #include +#include "misc/buffer_create_info.h" +#include "misc/event_create_info.h" +#include "misc/framebuffer_create_info.h" #include "misc/glsl_to_spirv.h" +#include "misc/graphics_pipeline_create_info.h" +#include "misc/image_create_info.h" +#include "misc/image_view_create_info.h" +#include "misc/instance_create_info.h" #include "misc/io.h" #include "misc/object_tracker.h" +#include "misc/render_pass_create_info.h" +#include "misc/rendering_surface_create_info.h" +#include "misc/semaphore_create_info.h" +#include "misc/swapchain_create_info.h" #include "misc/time.h" #include "misc/window_factory.h" #include "wrappers/buffer.h" @@ -162,8 +173,12 @@ static const char* g_glsl_render_tri_vert = App::App() - :m_n_last_semaphore_used(0), - m_n_swapchain_images (N_SWAPCHAIN_IMAGES) + :m_1stpass_depth_test_always_pipeline_id (UINT32_MAX), + m_1stpass_depth_test_equal_pipeline_id (UINT32_MAX), + m_2ndpass_depth_test_off_quad_pipeline_id(UINT32_MAX), + m_2ndpass_depth_test_off_tri_pipeline_id (UINT32_MAX), + m_n_last_semaphore_used (0), + m_n_swapchain_images (N_SWAPCHAIN_IMAGES) { // .. } @@ -175,12 +190,41 @@ App::~App() void App::deinit() { - vkDeviceWaitIdle(m_device_ptr.lock()->get_device_vk() ); + auto gfx_pipeline_manager_ptr = m_device_ptr->get_graphics_pipeline_manager(); + + Anvil::Vulkan::vkDeviceWaitIdle(m_device_ptr->get_device_vk() ); + + if (m_1stpass_depth_test_always_pipeline_id != UINT32_MAX) + { + gfx_pipeline_manager_ptr->delete_pipeline(m_1stpass_depth_test_always_pipeline_id); + + m_1stpass_depth_test_always_pipeline_id = UINT32_MAX; + } + + if (m_1stpass_depth_test_equal_pipeline_id != UINT32_MAX) + { + gfx_pipeline_manager_ptr->delete_pipeline(m_1stpass_depth_test_equal_pipeline_id); + + m_1stpass_depth_test_equal_pipeline_id = UINT32_MAX; + } + + if (m_2ndpass_depth_test_off_quad_pipeline_id != UINT32_MAX) + { + gfx_pipeline_manager_ptr->delete_pipeline(m_2ndpass_depth_test_off_quad_pipeline_id); + + m_2ndpass_depth_test_off_quad_pipeline_id = UINT32_MAX; + } + + if (m_2ndpass_depth_test_off_tri_pipeline_id != UINT32_MAX) + { + gfx_pipeline_manager_ptr->delete_pipeline(m_2ndpass_depth_test_off_tri_pipeline_id); + + m_2ndpass_depth_test_off_tri_pipeline_id = UINT32_MAX; + } m_frame_signal_semaphores.clear(); m_frame_wait_semaphores.clear(); - m_present_queue_ptr.reset(); m_rendering_surface_ptr.reset(); m_swapchain_ptr.reset(); @@ -210,10 +254,7 @@ void App::deinit() m_fbos[n_swapchain_image].reset(); } - m_device_ptr.lock()->destroy(); m_device_ptr.reset(); - - m_instance_ptr->destroy(); m_instance_ptr.reset(); m_window_ptr.reset(); @@ -221,26 +262,31 @@ void App::deinit() void App::draw_frame() { - std::shared_ptr curr_frame_signal_semaphore_ptr; - std::shared_ptr curr_frame_wait_semaphore_ptr; - std::shared_ptr device_locked_ptr = m_device_ptr.lock(); - static uint32_t n_frames_rendered = 0; - uint32_t n_swapchain_image; - std::shared_ptr present_queue_ptr = device_locked_ptr->get_universal_queue(0); - std::shared_ptr present_wait_semaphore_ptr; - const VkPipelineStageFlags wait_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; + Anvil::Semaphore* curr_frame_signal_semaphore_ptr = nullptr; + Anvil::Semaphore* curr_frame_wait_semaphore_ptr = nullptr; + static uint32_t n_frames_rendered = 0; + uint32_t n_swapchain_image; + Anvil::Queue* present_queue_ptr = m_device_ptr->get_universal_queue(0); + Anvil::Semaphore* present_wait_semaphore_ptr = nullptr; + const Anvil::PipelineStageFlags wait_stage_mask = Anvil::PipelineStageFlagBits::ALL_COMMANDS_BIT; /* Determine the signal + wait semaphores to use for drawing this frame */ m_n_last_semaphore_used = (m_n_last_semaphore_used + 1) % m_n_swapchain_images; - curr_frame_signal_semaphore_ptr = m_frame_signal_semaphores[m_n_last_semaphore_used]; - curr_frame_wait_semaphore_ptr = m_frame_wait_semaphores [m_n_last_semaphore_used]; + curr_frame_signal_semaphore_ptr = m_frame_signal_semaphores[m_n_last_semaphore_used].get(); + curr_frame_wait_semaphore_ptr = m_frame_wait_semaphores [m_n_last_semaphore_used].get(); present_wait_semaphore_ptr = curr_frame_signal_semaphore_ptr; /* Determine the semaphore which the swapchain image */ - n_swapchain_image = m_swapchain_ptr->acquire_image(curr_frame_wait_semaphore_ptr, - true); /* in_should_block */ + { + const auto acquire_result = m_swapchain_ptr->acquire_image(curr_frame_wait_semaphore_ptr, + &n_swapchain_image, + true /* in_should_block */); + + ANVIL_REDUNDANT_VARIABLE_CONST(acquire_result); + anvil_assert (acquire_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } /* Update time data */ const float time = float(m_time.get_time_in_msec() ) / 1000.0f; @@ -250,26 +296,36 @@ void App::draw_frame() &time); /* Submit work chunks and present */ - std::shared_ptr cmd_buffers[] = + Anvil::CommandBufferBase* cmd_buffers[] = { - m_render_tri1_and_generate_ot_data_cmd_buffers[n_swapchain_image], - m_render_tri2_and_quad_cmd_buffers [n_swapchain_image] + m_render_tri1_and_generate_ot_data_cmd_buffers[n_swapchain_image].get(), + m_render_tri2_and_quad_cmd_buffers [n_swapchain_image].get() }; const uint32_t n_cmd_buffers = sizeof(cmd_buffers) / sizeof(cmd_buffers[0]); - present_queue_ptr->submit_command_buffers(n_cmd_buffers, - cmd_buffers, - 1, /* n_semaphores_to_signal */ - &curr_frame_signal_semaphore_ptr, - 1, /* n_semaphores_to_wait_on */ - &curr_frame_wait_semaphore_ptr, - &wait_stage_mask, - false); /* should_block */ + present_queue_ptr->submit( + Anvil::SubmitInfo::create_wait_execute_signal(cmd_buffers, + n_cmd_buffers, + 1, /* n_semaphores_to_signal */ + &curr_frame_signal_semaphore_ptr, + 1, /* n_semaphores_to_wait_on */ + &curr_frame_wait_semaphore_ptr, + &wait_stage_mask, + false) /* should_block */ + ); + + { + Anvil::SwapchainOperationErrorCode present_result = Anvil::SwapchainOperationErrorCode::DEVICE_LOST; + + present_queue_ptr->present(m_swapchain_ptr.get(), + n_swapchain_image, + 1, /* n_wait_semaphores */ + &present_wait_semaphore_ptr, + &present_result); - present_queue_ptr->present(m_swapchain_ptr, - n_swapchain_image, - 1, /* n_wait_semaphores */ - &present_wait_semaphore_ptr); + ANVIL_REDUNDANT_VARIABLE(present_result); + anvil_assert (present_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } ++n_frames_rendered; @@ -304,43 +360,56 @@ void App::init() void App::init_buffers() { - std::shared_ptr physical_device_locked_ptr(m_physical_device_ptr); - const VkDeviceSize uniform_alignment_req (physical_device_locked_ptr->get_device_properties().limits.minUniformBufferOffsetAlignment); + const VkDeviceSize uniform_alignment_req(m_physical_device_ptr->get_device_properties().core_vk1_0_properties_ptr->limits.min_uniform_buffer_offset_alignment); - m_n_bytes_per_query = static_cast(Anvil::Utils::round_up(sizeof(uint32_t), + m_n_bytes_per_query = static_cast(Anvil::Utils::round_up(static_cast(sizeof(uint32_t)), uniform_alignment_req) ); - m_time_n_bytes_per_swapchain_image = static_cast(Anvil::Utils::round_up(sizeof(float), + m_time_n_bytes_per_swapchain_image = static_cast(Anvil::Utils::round_up(static_cast(sizeof(float)), uniform_alignment_req) ); - m_query_bo_ptr = Anvil::Buffer::create_nonsparse( - m_device_ptr, - m_n_bytes_per_query * N_SWAPCHAIN_IMAGES, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, - 0, /* in_memory_features */ - nullptr); /* opt_client_data */ - - m_time_bo_ptr = Anvil::Buffer::create_nonsparse( - m_device_ptr, - m_time_n_bytes_per_swapchain_image * N_SWAPCHAIN_IMAGES, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, - Anvil::MEMORY_FEATURE_FLAG_MAPPABLE, - nullptr); /* opt_client_data */ + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_alloc(m_device_ptr.get(), + m_n_bytes_per_query * N_SWAPCHAIN_IMAGES, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::UNIFORM_BUFFER_BIT | Anvil::BufferUsageFlagBits::TRANSFER_DST_BIT, + Anvil::MemoryFeatureFlagBits::NONE); /* in_memory_features */ + + m_query_bo_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } + + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_alloc(m_device_ptr.get(), + m_time_n_bytes_per_swapchain_image * N_SWAPCHAIN_IMAGES, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::UNIFORM_BUFFER_BIT, + Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT); + + m_time_bo_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } } void App::init_command_buffers() { - VkClearValue clear_values[2]; - std::shared_ptr device_locked_ptr (m_device_ptr); - std::shared_ptr gfx_pipeline_manager_ptr (device_locked_ptr->get_graphics_pipeline_manager() ); - const bool is_ext_debug_marker_supported(device_locked_ptr->is_ext_debug_marker_extension_enabled() ); - VkRect2D render_area; - std::shared_ptr tri_1stpass_ds0_ptr (m_1stpass_dsg_ptr->get_descriptor_set (0) ); - std::shared_ptr quad_2ndpass_ds0_ptr (m_2ndpass_quad_dsg_ptr->get_descriptor_set(0) ); - std::shared_ptr tri_2ndpass_ds0_ptr (m_2ndpass_tri_dsg_ptr->get_descriptor_set (0) ); + VkClearValue clear_values[2]; + auto gfx_pipeline_manager_ptr (m_device_ptr->get_graphics_pipeline_manager() ); + const bool is_ext_debug_marker_supported(m_device_ptr->get_extension_info()->ext_debug_marker() ); + VkRect2D render_area; + auto tri_1stpass_ds0_ptr (m_1stpass_dsg_ptr->get_descriptor_set (0) ); + auto quad_2ndpass_ds0_ptr (m_2ndpass_quad_dsg_ptr->get_descriptor_set(0) ); + auto tri_2ndpass_ds0_ptr (m_2ndpass_tri_dsg_ptr->get_descriptor_set (0) ); + + uint32_t n_universal_queues_available = 0; + const uint32_t* universal_queue_family_indices = nullptr; + + m_device_ptr->get_queue_family_indices_for_queue_family_type(Anvil::QueueFamilyType::UNIVERSAL, + &n_universal_queues_available, + &universal_queue_family_indices); + + anvil_assert(n_universal_queues_available > 0); clear_values[0].color.float32[0] = 1.0f; clear_values[0].color.float32[1] = 1.0f; @@ -360,25 +429,24 @@ void App::init_command_buffers() const uint32_t query_result_offset = m_n_bytes_per_query * n_command_buffer; const uint32_t time_dynamic_offset = m_time_n_bytes_per_swapchain_image * n_command_buffer; - m_render_tri1_and_generate_ot_data_cmd_buffers[n_command_buffer] = device_locked_ptr->get_command_pool(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL)->alloc_primary_level_command_buffer(); - m_render_tri2_and_quad_cmd_buffers [n_command_buffer] = device_locked_ptr->get_command_pool(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL)->alloc_primary_level_command_buffer(); - + m_render_tri1_and_generate_ot_data_cmd_buffers[n_command_buffer] = m_device_ptr->get_command_pool_for_queue_family_index(universal_queue_family_indices[0])->alloc_primary_level_command_buffer(); + m_render_tri2_and_quad_cmd_buffers [n_command_buffer] = m_device_ptr->get_command_pool_for_queue_family_index(universal_queue_family_indices[0])->alloc_primary_level_command_buffer(); - std::shared_ptr render_tri1_and_generate_ot_data_cmd_buffer_ptr = m_render_tri1_and_generate_ot_data_cmd_buffers[n_command_buffer]; - std::shared_ptr render_tri2_and_quad_cmd_buffer_ptr = m_render_tri2_and_quad_cmd_buffers [n_command_buffer]; + Anvil::PrimaryCommandBuffer* render_tri1_and_generate_ot_data_cmd_buffer_ptr = m_render_tri1_and_generate_ot_data_cmd_buffers[n_command_buffer].get(); + Anvil::PrimaryCommandBuffer* render_tri2_and_quad_cmd_buffer_ptr = m_render_tri2_and_quad_cmd_buffers [n_command_buffer].get(); render_tri1_and_generate_ot_data_cmd_buffer_ptr->start_recording(false, /* one_time_submit */ true); /* simultaneous_use_allowed */ { - render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_reset_event(m_query_data_copied_event, - VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT); + render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_reset_event(m_query_data_copied_event.get(), + Anvil::PipelineStageFlagBits::TOP_OF_PIPE_BIT); render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_begin_render_pass(sizeof(clear_values) / sizeof(clear_values[0]), clear_values, - m_fbos[n_command_buffer], + m_fbos[n_command_buffer].get(), render_area, - m_renderpass_tris_ptr, - VK_SUBPASS_CONTENTS_INLINE); + m_renderpass_tris_ptr.get(), + Anvil::SubpassContents::INLINE); { if (is_ext_debug_marker_supported) { @@ -387,10 +455,10 @@ void App::init_command_buffers() } /* Draw the left triangle. */ - render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_bind_pipeline (VK_PIPELINE_BIND_POINT_GRAPHICS, + render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_bind_pipeline (Anvil::PipelineBindPoint::GRAPHICS, m_1stpass_depth_test_always_pipeline_id); - render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_bind_descriptor_sets(VK_PIPELINE_BIND_POINT_GRAPHICS, - gfx_pipeline_manager_ptr->get_graphics_pipeline_layout(m_1stpass_depth_test_always_pipeline_id), + render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_bind_descriptor_sets(Anvil::PipelineBindPoint::GRAPHICS, + gfx_pipeline_manager_ptr->get_pipeline_layout(m_1stpass_depth_test_always_pipeline_id), 0, /* in_first_set */ 1, /* in_set_count */ &tri_1stpass_ds0_ptr, @@ -410,7 +478,7 @@ void App::init_command_buffers() /* Proceed to the next subpass, where we render the right triangle with depth test configured to EQUAL. * At the same time, we count how many fragments passed the test. We're going to use that data to * determine the shade of the quad underneath two triangles. */ - render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_next_subpass(VK_SUBPASS_CONTENTS_INLINE); + render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_next_subpass(Anvil::SubpassContents::INLINE); if (is_ext_debug_marker_supported) { @@ -418,19 +486,19 @@ void App::init_command_buffers() nullptr); /* in_opt_color */ } - render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_bind_pipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, + render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_bind_pipeline(Anvil::PipelineBindPoint::GRAPHICS, m_1stpass_depth_test_equal_pipeline_id); - render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_begin_query(m_query_pool_ptr, + render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_begin_query(m_query_pool_ptr.get(), n_command_buffer, - 0); /* in_flags */ + Anvil::QueryControlFlagBits::NONE); /* in_flags */ { render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_draw(3, /* in_vertex_count */ 1, /* in_instance_count */ 0, /* in_first_vertex */ 1); /* in_first_instance */ } - render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_end_query(m_query_pool_ptr, + render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_end_query(m_query_pool_ptr.get(), n_command_buffer); if (is_ext_debug_marker_supported) @@ -440,15 +508,15 @@ void App::init_command_buffers() } render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_end_render_pass(); - render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_copy_query_pool_results(m_query_pool_ptr, + render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_copy_query_pool_results(m_query_pool_ptr.get(), n_command_buffer, 1, /* in_query_count */ - m_query_bo_ptr, + m_query_bo_ptr.get(), m_n_bytes_per_query * n_command_buffer, /* in_dst_offset */ 0, /* in_dst_stride */ VK_QUERY_RESULT_WAIT_BIT); - render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_set_event (m_query_data_copied_event, - VK_PIPELINE_STAGE_TRANSFER_BIT); + render_tri1_and_generate_ot_data_cmd_buffer_ptr->record_set_event (m_query_data_copied_event.get(), + Anvil::PipelineStageFlagBits::TRANSFER_BIT); } render_tri1_and_generate_ot_data_cmd_buffer_ptr->stop_recording(); @@ -456,18 +524,19 @@ void App::init_command_buffers() true); /* simultaneous_use_allowed */ { /* Wait until query data arrives */ - Anvil::BufferBarrier query_data_barrier(VK_ACCESS_TRANSFER_WRITE_BIT, - VK_ACCESS_UNIFORM_READ_BIT, - device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL), - device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL), - m_query_bo_ptr, - query_result_offset, /* in_offset */ - m_n_bytes_per_query); /* in_size */ + Anvil::BufferBarrier query_data_barrier (Anvil::AccessFlagBits::TRANSFER_WRITE_BIT, + Anvil::AccessFlagBits::UNIFORM_READ_BIT, + universal_queue_family_indices[0], + universal_queue_family_indices[0], + m_query_bo_ptr.get(), + query_result_offset, /* in_offset */ + m_n_bytes_per_query); /* in_size */ + auto query_data_copied_event_raw_ptr(m_query_data_copied_event.get() ); render_tri2_and_quad_cmd_buffer_ptr->record_wait_events(1, /* in_event_count */ - &m_query_data_copied_event, - VK_PIPELINE_STAGE_TRANSFER_BIT, - VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + &query_data_copied_event_raw_ptr, + Anvil::PipelineStageFlagBits::TRANSFER_BIT, + Anvil::PipelineStageFlagBits::VERTEX_SHADER_BIT, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barriers_ptr */ 1, /* in_buffer_memory_barrier_count */ @@ -484,15 +553,15 @@ void App::init_command_buffers() /* Rasterize the right triangle */ render_tri2_and_quad_cmd_buffer_ptr->record_begin_render_pass(0, /* in_n_clear_values */ nullptr, /* in_clear_value_ptrs */ - m_fbos[n_command_buffer], + m_fbos[n_command_buffer].get(), render_area, - m_renderpass_quad_ptr, - VK_SUBPASS_CONTENTS_INLINE); + m_renderpass_quad_ptr.get(), + Anvil::SubpassContents::INLINE); { - render_tri2_and_quad_cmd_buffer_ptr->record_bind_pipeline (VK_PIPELINE_BIND_POINT_GRAPHICS, + render_tri2_and_quad_cmd_buffer_ptr->record_bind_pipeline (Anvil::PipelineBindPoint::GRAPHICS, m_2ndpass_depth_test_off_tri_pipeline_id); - render_tri2_and_quad_cmd_buffer_ptr->record_bind_descriptor_sets(VK_PIPELINE_BIND_POINT_GRAPHICS, - gfx_pipeline_manager_ptr->get_graphics_pipeline_layout(m_2ndpass_depth_test_off_tri_pipeline_id), + render_tri2_and_quad_cmd_buffer_ptr->record_bind_descriptor_sets(Anvil::PipelineBindPoint::GRAPHICS, + gfx_pipeline_manager_ptr->get_pipeline_layout(m_2ndpass_depth_test_off_tri_pipeline_id), 0, /* in_first_set */ 1, /* in_set_count */ &tri_2ndpass_ds0_ptr, @@ -505,12 +574,12 @@ void App::init_command_buffers() 1); /* in_first_instance */ /* Draw the quad */ - render_tri2_and_quad_cmd_buffer_ptr->record_next_subpass(VK_SUBPASS_CONTENTS_INLINE); + render_tri2_and_quad_cmd_buffer_ptr->record_next_subpass(Anvil::SubpassContents::INLINE); - render_tri2_and_quad_cmd_buffer_ptr->record_bind_pipeline (VK_PIPELINE_BIND_POINT_GRAPHICS, + render_tri2_and_quad_cmd_buffer_ptr->record_bind_pipeline (Anvil::PipelineBindPoint::GRAPHICS, m_2ndpass_depth_test_off_quad_pipeline_id); - render_tri2_and_quad_cmd_buffer_ptr->record_bind_descriptor_sets(VK_PIPELINE_BIND_POINT_GRAPHICS, - gfx_pipeline_manager_ptr->get_graphics_pipeline_layout(m_2ndpass_depth_test_off_quad_pipeline_id), + render_tri2_and_quad_cmd_buffer_ptr->record_bind_descriptor_sets(Anvil::PipelineBindPoint::GRAPHICS, + gfx_pipeline_manager_ptr->get_pipeline_layout(m_2ndpass_depth_test_off_quad_pipeline_id), 0, /* in_first_set */ 1, /* in_set_count */ &quad_2ndpass_ds0_ptr, @@ -535,54 +604,57 @@ void App::init_command_buffers() void App::init_dsgs() { - m_2ndpass_quad_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr, - false, /* releaseable_sets */ - 1); /* n_sets */ - m_2ndpass_tri_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr, - false, /* releaseable_sets */ - 1); /* n_sets */ - m_1stpass_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr, - false, /* releaseable_sets */ - 1); /* n_sets */ - - m_1stpass_dsg_ptr->add_binding (0, /* n_set */ - 0, /* binding */ - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, - 1, /* n_elements */ - VK_SHADER_STAGE_VERTEX_BIT); - m_1stpass_dsg_ptr->set_binding_item(0, /* n_set */ - 0, /* binding_index */ - Anvil::DescriptorSet::DynamicUniformBufferBindingElement(m_time_bo_ptr, - 0, /* in_start_offset */ - m_time_n_bytes_per_swapchain_image) ); - - m_2ndpass_tri_dsg_ptr->add_binding (0, /* n_set */ - 0, /* binding */ - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, - 1, /* n_elements */ - VK_SHADER_STAGE_VERTEX_BIT); - m_2ndpass_tri_dsg_ptr->set_binding_item(0, /* n_set */ - 0, /* binding_index */ - Anvil::DescriptorSet::DynamicUniformBufferBindingElement(m_time_bo_ptr, - 0, /* in_start_offset */ - m_time_n_bytes_per_swapchain_image) ); - - m_2ndpass_quad_dsg_ptr->add_binding (0, /* n_set */ - 0, /* binding */ - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, - 1, /* n_elements */ - VK_SHADER_STAGE_VERTEX_BIT); + std::vector dsg_1stpass_create_info_ptrs (1); + std::vector quad_dsg_2ndpass_create_info_ptrs(1); + std::vector tri_dsg_2ndpass_create_info_ptrs (1); + + dsg_1stpass_create_info_ptrs [0] = Anvil::DescriptorSetCreateInfo::create(); + quad_dsg_2ndpass_create_info_ptrs[0] = Anvil::DescriptorSetCreateInfo::create(); + tri_dsg_2ndpass_create_info_ptrs [0] = Anvil::DescriptorSetCreateInfo::create(); + + + dsg_1stpass_create_info_ptrs[0]->add_binding (0, /* binding */ + Anvil::DescriptorType::UNIFORM_BUFFER_DYNAMIC, + 1, /* n_elements */ + Anvil::ShaderStageFlagBits::VERTEX_BIT); + tri_dsg_2ndpass_create_info_ptrs[0]->add_binding (0, /* binding */ + Anvil::DescriptorType::UNIFORM_BUFFER_DYNAMIC, + 1, /* n_elements */ + Anvil::ShaderStageFlagBits::VERTEX_BIT); + quad_dsg_2ndpass_create_info_ptrs[0]->add_binding(0, /* binding */ + Anvil::DescriptorType::UNIFORM_BUFFER_DYNAMIC, + 1, /* n_elements */ + Anvil::ShaderStageFlagBits::VERTEX_BIT); + + m_1stpass_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr.get(), + dsg_1stpass_create_info_ptrs); + m_2ndpass_tri_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr.get(), + tri_dsg_2ndpass_create_info_ptrs); + m_2ndpass_quad_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr.get(), + quad_dsg_2ndpass_create_info_ptrs); + + m_1stpass_dsg_ptr->set_binding_item (0, /* n_set */ + 0, /* binding_index */ + Anvil::DescriptorSet::DynamicUniformBufferBindingElement(m_time_bo_ptr.get(), + 0, /* in_start_offset */ + m_time_n_bytes_per_swapchain_image) ); + m_2ndpass_tri_dsg_ptr->set_binding_item (0, /* n_set */ + 0, /* binding_index */ + Anvil::DescriptorSet::DynamicUniformBufferBindingElement(m_time_bo_ptr.get(), + 0, /* in_start_offset */ + m_time_n_bytes_per_swapchain_image) ); m_2ndpass_quad_dsg_ptr->set_binding_item(0, /* n_set */ 0, /* binding_index */ - Anvil::DescriptorSet::DynamicUniformBufferBindingElement(m_query_bo_ptr, + Anvil::DescriptorSet::DynamicUniformBufferBindingElement(m_query_bo_ptr.get(), 0, /* in_start_offset */ m_n_bytes_per_query)); } void App::init_events() { - m_query_data_copied_event = Anvil::Event::create(m_device_ptr); + auto create_info_ptr = Anvil::EventCreateInfo::create(m_device_ptr.get() ); + m_query_data_copied_event = Anvil::Event::create(std::move(create_info_ptr) ); m_query_data_copied_event->set_name("Query data copied event"); } @@ -592,231 +664,257 @@ void App::init_framebuffers() n_swapchain_image < N_SWAPCHAIN_IMAGES; ++n_swapchain_image) { - m_fbos[n_swapchain_image] = Anvil::Framebuffer::create(m_device_ptr, - WINDOW_WIDTH, - WINDOW_HEIGHT, - 1); /* n_layers */ + { + auto create_info_ptr = Anvil::FramebufferCreateInfo::create(m_device_ptr.get(), + WINDOW_WIDTH, + WINDOW_HEIGHT, + 1); /* n_layers */ + + create_info_ptr->add_attachment(m_swapchain_ptr->get_image_view(n_swapchain_image), + nullptr); /* out_opt_attachment_id_ptr */ + create_info_ptr->add_attachment(m_depth_image_view_ptr.get(), + nullptr); /* out_opt_attachment_id_ptr */ + + m_fbos[n_swapchain_image] = Anvil::Framebuffer::create(std::move(create_info_ptr) ); + } m_fbos[n_swapchain_image]->set_name_formatted("Framebuffer for swapchain image [%d]", n_swapchain_image); - m_fbos[n_swapchain_image]->add_attachment(m_swapchain_ptr->get_image_view(n_swapchain_image), - nullptr); /* out_opt_attachment_id_ptr */ - m_fbos[n_swapchain_image]->add_attachment(m_depth_image_view_ptr, - nullptr); /* out_opt_attachment_id_ptr */ } } void App::init_images() { - m_depth_image_ptr = Anvil::Image::create_nonsparse( - m_device_ptr, - VK_IMAGE_TYPE_2D, - VK_FORMAT_D16_UNORM, - VK_IMAGE_TILING_OPTIMAL, - VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, - WINDOW_WIDTH, - WINDOW_HEIGHT, - 1, /* base_mipmap_depth */ - 1, /* n_layers */ - VK_SAMPLE_COUNT_1_BIT, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - false, /* use_full_mipmap_chain */ - 0, /* in_memory_features */ - false, /* is_mutable */ - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, - nullptr); - - m_depth_image_view_ptr = Anvil::ImageView::create_2D( - m_device_ptr, - m_depth_image_ptr, - 0, /* n_base_layer */ - 0, /* n_base_mipmap_level */ - 1, /* n_mipmaps */ - VK_IMAGE_ASPECT_DEPTH_BIT, - m_depth_image_ptr->get_image_format(), - VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_IDENTITY); + { + auto create_info_ptr = Anvil::ImageCreateInfo::create_alloc(m_device_ptr.get(), + Anvil::ImageType::_2D, + Anvil::Format::D16_UNORM, + Anvil::ImageTiling::OPTIMAL, + Anvil::ImageUsageFlagBits::DEPTH_STENCIL_ATTACHMENT_BIT, + WINDOW_WIDTH, + WINDOW_HEIGHT, + 1, /* base_mipmap_depth */ + 1, /* n_layers */ + Anvil::SampleCountFlagBits::_1_BIT, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + false, /* use_full_mipmap_chain */ + Anvil::MemoryFeatureFlagBits::NONE, /* in_memory_features */ + Anvil::ImageCreateFlagBits::NONE, + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + nullptr); + + m_depth_image_ptr = Anvil::Image::create(std::move(create_info_ptr) ); + } + + { + auto create_info_ptr = Anvil::ImageViewCreateInfo::create_2D(m_device_ptr.get(), + m_depth_image_ptr.get(), + 0, /* n_base_layer */ + 0, /* n_base_mipmap_level */ + 1, /* n_mipmaps */ + Anvil::ImageAspectFlagBits::DEPTH_BIT, + m_depth_image_ptr->get_create_info_ptr()->get_format(), + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY); + + m_depth_image_view_ptr = Anvil::ImageView::create(std::move(create_info_ptr) ); + } } void App::init_query_pool() { - m_query_pool_ptr = Anvil::QueryPool::create_non_ps_query_pool(m_device_ptr, + m_query_pool_ptr = Anvil::QueryPool::create_non_ps_query_pool(m_device_ptr.get(), VK_QUERY_TYPE_OCCLUSION, N_SWAPCHAIN_IMAGES); } void App::init_renderpasses() { - std::shared_ptr device_locked_ptr (m_device_ptr); - std::shared_ptr gfx_pipeline_manager_ptr(device_locked_ptr->get_graphics_pipeline_manager() ); + auto gfx_pipeline_manager_ptr(m_device_ptr->get_graphics_pipeline_manager() ); for (uint32_t n_renderpass = 0; n_renderpass < 2; /* quad, tri */ ++n_renderpass) { - Anvil::RenderPassAttachmentID color_attachment_id; - Anvil::RenderPassAttachmentID ds_attachment_id; - const bool is_2nd_renderpass = (n_renderpass == 1); - std::shared_ptr renderpass_ptr; - - renderpass_ptr = Anvil::RenderPass::create(m_device_ptr, - m_swapchain_ptr); + Anvil::RenderPassAttachmentID color_attachment_id; + Anvil::RenderPassAttachmentID ds_attachment_id; + const bool is_2nd_renderpass = (n_renderpass == 1); + Anvil::RenderPassUniquePtr renderpass_ptr; - renderpass_ptr->set_name( (n_renderpass == 0) ? "Quad renderpass" - : "Triangle renderpass"); - - renderpass_ptr->add_color_attachment(m_swapchain_ptr->get_image_format(), - VK_SAMPLE_COUNT_1_BIT, - (!is_2nd_renderpass) ? VK_ATTACHMENT_LOAD_OP_CLEAR - : VK_ATTACHMENT_LOAD_OP_LOAD, - VK_ATTACHMENT_STORE_OP_STORE, - VK_IMAGE_LAYOUT_UNDEFINED, + { + Anvil::RenderPassCreateInfoUniquePtr renderpass_info_ptr( + new Anvil::RenderPassCreateInfo(m_device_ptr.get() ) + ); + + renderpass_info_ptr->add_color_attachment(m_swapchain_ptr->get_create_info_ptr()->get_format(), + Anvil::SampleCountFlagBits::_1_BIT, + (!is_2nd_renderpass) ? Anvil::AttachmentLoadOp::CLEAR + : Anvil::AttachmentLoadOp::LOAD, + Anvil::AttachmentStoreOp::STORE, + Anvil::ImageLayout::UNDEFINED, #ifdef ENABLE_OFFSCREEN_RENDERING - VK_IMAGE_LAYOUT_GENERAL, + Anvil::ImageLayout::GENERAL, #else - VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, -#endif - false, /* may_alias */ - &color_attachment_id); - - renderpass_ptr->add_depth_stencil_attachment(m_depth_image_ptr->get_image_format(), - VK_SAMPLE_COUNT_1_BIT, - (!is_2nd_renderpass) ? VK_ATTACHMENT_LOAD_OP_CLEAR - : VK_ATTACHMENT_LOAD_OP_LOAD, - VK_ATTACHMENT_STORE_OP_STORE, - VK_ATTACHMENT_LOAD_OP_DONT_CARE, /* stencil_load_op */ - VK_ATTACHMENT_STORE_OP_DONT_CARE, /* stencil_store_op */ - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, - false, /* may_alias */ - &ds_attachment_id); + Anvil::ImageLayout::PRESENT_SRC_KHR, + #endif + false, /* may_alias */ + &color_attachment_id); + + renderpass_info_ptr->add_depth_stencil_attachment(m_depth_image_ptr->get_create_info_ptr()->get_format(), + Anvil::SampleCountFlagBits::_1_BIT, + (!is_2nd_renderpass) ? Anvil::AttachmentLoadOp::CLEAR + : Anvil::AttachmentLoadOp::LOAD, + Anvil::AttachmentStoreOp::STORE, + Anvil::AttachmentLoadOp::DONT_CARE, /* stencil_load_op */ + Anvil::AttachmentStoreOp::DONT_CARE, /* stencil_store_op */ + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + false, /* may_alias */ + &ds_attachment_id); + + if (is_2nd_renderpass) + { + renderpass_info_ptr->add_subpass(&m_renderpass_2ndpass_depth_test_off_tri_subpass_id); + renderpass_info_ptr->add_subpass(&m_renderpass_2ndpass_depth_test_off_quad_subpass_id); + + renderpass_info_ptr->add_subpass_color_attachment(m_renderpass_2ndpass_depth_test_off_tri_subpass_id, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + color_attachment_id, + 0, /* location */ + nullptr); /* opt_attachment_resolve_ptr */ + renderpass_info_ptr->add_subpass_color_attachment(m_renderpass_2ndpass_depth_test_off_quad_subpass_id, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + color_attachment_id, + 0, /* location */ + nullptr); /* opt_attachment_resolve_ptr */ + + /* Vulkan requires applications to synchronize subpass execution via subpass dependencies. + * Even though technically we would be fine with both subpasses being executed in parallel, + * as the rasterized geometry never overlaps, we have to define a dependency .. */ + renderpass_info_ptr->add_subpass_to_subpass_dependency(m_renderpass_2ndpass_depth_test_off_tri_subpass_id, + m_renderpass_2ndpass_depth_test_off_quad_subpass_id, + Anvil::PipelineStageFlagBits::VERTEX_SHADER_BIT, /* in_source_stage_mask */ + Anvil::PipelineStageFlagBits::TOP_OF_PIPE_BIT, /* in_destination_stage_mask */ + Anvil::AccessFlagBits::NONE, /* in_source_access_mask */ + Anvil::AccessFlagBits::NONE, /* in_destination_access_mask */ + Anvil::DependencyFlagBits::BY_REGION_BIT); /* in_by_region */ + } + else + { + renderpass_info_ptr->add_subpass(&m_renderpass_1stpass_depth_test_always_subpass_id); + renderpass_info_ptr->add_subpass(&m_renderpass_1stpass_depth_test_equal_ot_subpass_id); + + renderpass_info_ptr->add_subpass_color_attachment(m_renderpass_1stpass_depth_test_always_subpass_id, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + color_attachment_id, + 0, /* location */ + nullptr); /* opt_attachment_resolve_ptr */ + renderpass_info_ptr->add_subpass_color_attachment(m_renderpass_1stpass_depth_test_equal_ot_subpass_id, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + color_attachment_id, + 0, /* location */ + nullptr); /* opt_attachment_resolve_ptr */ + + renderpass_info_ptr->add_subpass_depth_stencil_attachment(m_renderpass_1stpass_depth_test_always_subpass_id, + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + ds_attachment_id); + renderpass_info_ptr->add_subpass_depth_stencil_attachment(m_renderpass_1stpass_depth_test_equal_ot_subpass_id, + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + ds_attachment_id); + + + /* depth_test_equal_ot subpass must not start before depth_test_always subpass finishes rasterizing */ + renderpass_info_ptr->add_subpass_to_subpass_dependency(m_renderpass_1stpass_depth_test_always_subpass_id, + m_renderpass_1stpass_depth_test_equal_ot_subpass_id, + Anvil::PipelineStageFlagBits::LATE_FRAGMENT_TESTS_BIT, /* in_source_stage_mask */ + Anvil::PipelineStageFlagBits::EARLY_FRAGMENT_TESTS_BIT, /* in_destination_stage_mask */ + Anvil::AccessFlagBits::DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + Anvil::AccessFlagBits::DEPTH_STENCIL_ATTACHMENT_READ_BIT, + Anvil::DependencyFlagBits::BY_REGION_BIT); + } + + renderpass_ptr = Anvil::RenderPass::create(std::move(renderpass_info_ptr), + m_swapchain_ptr.get() ); + } + + renderpass_ptr->set_name( (n_renderpass == 0) ? "Quad renderpass" + : "Triangle renderpass"); if (is_2nd_renderpass) { - renderpass_ptr->add_subpass(*m_tri_fs_ptr, - Anvil::ShaderModuleStageEntryPoint(), /* geometry_shader_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_control_shader_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_evaluation_shader_entrypoint */ - *m_tri_vs_ptr, - &m_renderpass_2ndpass_depth_test_off_tri_subpass_id); - renderpass_ptr->add_subpass(*m_quad_fs_ptr, - Anvil::ShaderModuleStageEntryPoint(), /* geometry_shader_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_control_shader_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_evaluation_shader_entrypoint */ - *m_quad_vs_ptr, - &m_renderpass_2ndpass_depth_test_off_quad_subpass_id); - - - renderpass_ptr->add_subpass_color_attachment(m_renderpass_2ndpass_depth_test_off_tri_subpass_id, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - color_attachment_id, - 0, /* location */ - nullptr); /* opt_attachment_resolve_ptr */ - renderpass_ptr->add_subpass_color_attachment(m_renderpass_2ndpass_depth_test_off_quad_subpass_id, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - color_attachment_id, - 0, /* location */ - nullptr); /* opt_attachment_resolve_ptr */ - - - renderpass_ptr->get_subpass_graphics_pipeline_id(m_renderpass_2ndpass_depth_test_off_tri_subpass_id, - &m_2ndpass_depth_test_off_tri_pipeline_id); - renderpass_ptr->get_subpass_graphics_pipeline_id(m_renderpass_2ndpass_depth_test_off_quad_subpass_id, - &m_2ndpass_depth_test_off_quad_pipeline_id); - - - gfx_pipeline_manager_ptr->set_pipeline_dsg(m_2ndpass_depth_test_off_tri_pipeline_id, - m_2ndpass_tri_dsg_ptr); - - gfx_pipeline_manager_ptr->set_pipeline_dsg (m_2ndpass_depth_test_off_quad_pipeline_id, - m_2ndpass_quad_dsg_ptr); - gfx_pipeline_manager_ptr->set_input_assembly_properties(m_2ndpass_depth_test_off_quad_pipeline_id, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP); - - /* Vulkan requires applications to synchronize subpass execution via subpass dependencies. - * Even though technically we would be fine with both subpasses being executed in parallel, - * as the rasterized geometry never overlaps, we have to define a dependency .. */ - renderpass_ptr->add_subpass_to_subpass_dependency(m_renderpass_2ndpass_depth_test_off_tri_subpass_id, - m_renderpass_2ndpass_depth_test_off_quad_subpass_id, - VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, /* source_stage_mask */ - VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, /* destination_stage_mask */ - 0, /* source_access_mask */ - 0, /* destination_access_mask */ - VK_TRUE); /* by_region */ - - m_renderpass_quad_ptr = renderpass_ptr; + auto depth_test_off_tri_subpass_gfx_pipeline_create_info_ptr = Anvil::GraphicsPipelineCreateInfo::create(Anvil::PipelineCreateFlagBits::NONE, + renderpass_ptr.get(), + m_renderpass_2ndpass_depth_test_off_tri_subpass_id, + *m_tri_fs_ptr, + Anvil::ShaderModuleStageEntryPoint(), /* in_geometry_shader_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* in_tess_control_shader_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* in_tess_evaluation_shader_entrypoint */ + *m_tri_vs_ptr); + auto depth_test_off_quad_subpass_gfx_pipeline_create_info_ptr = Anvil::GraphicsPipelineCreateInfo::create(Anvil::PipelineCreateFlagBits::NONE, + renderpass_ptr.get(), + m_renderpass_2ndpass_depth_test_off_quad_subpass_id, + *m_quad_fs_ptr, + Anvil::ShaderModuleStageEntryPoint(), /* geometry_shader_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* tess_control_shader_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* tess_evaluation_shader_entrypoint */ + *m_quad_vs_ptr); + + + + depth_test_off_tri_subpass_gfx_pipeline_create_info_ptr->set_descriptor_set_create_info (m_2ndpass_tri_dsg_ptr->get_descriptor_set_create_info () ); + depth_test_off_quad_subpass_gfx_pipeline_create_info_ptr->set_descriptor_set_create_info(m_2ndpass_quad_dsg_ptr->get_descriptor_set_create_info() ); + + depth_test_off_quad_subpass_gfx_pipeline_create_info_ptr->set_primitive_topology(Anvil::PrimitiveTopology::TRIANGLE_STRIP); + + + gfx_pipeline_manager_ptr->add_pipeline(std::move(depth_test_off_quad_subpass_gfx_pipeline_create_info_ptr), + &m_2ndpass_depth_test_off_quad_pipeline_id); + gfx_pipeline_manager_ptr->add_pipeline(std::move(depth_test_off_tri_subpass_gfx_pipeline_create_info_ptr), + &m_2ndpass_depth_test_off_tri_pipeline_id); + + m_renderpass_quad_ptr = std::move(renderpass_ptr); } else { - renderpass_ptr->add_subpass(*m_tri_fs_ptr, - Anvil::ShaderModuleStageEntryPoint(), /* geometry_shader_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_control_shader_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_evaluation_shader_entrypoint */ - *m_tri_vs_ptr, - &m_renderpass_1stpass_depth_test_always_subpass_id); - renderpass_ptr->add_subpass(*m_tri_fs_ptr, - Anvil::ShaderModuleStageEntryPoint(), /* geometry_shader_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_control_shader_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_evaluation_shader_entrypoint */ - *m_tri_vs_ptr, - &m_renderpass_1stpass_depth_test_equal_ot_subpass_id); - - - renderpass_ptr->add_subpass_color_attachment(m_renderpass_1stpass_depth_test_always_subpass_id, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - color_attachment_id, - 0, /* location */ - nullptr); /* opt_attachment_resolve_ptr */ - renderpass_ptr->add_subpass_color_attachment(m_renderpass_1stpass_depth_test_equal_ot_subpass_id, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - color_attachment_id, - 0, /* location */ - nullptr); /* opt_attachment_resolve_ptr */ - - renderpass_ptr->add_subpass_depth_stencil_attachment(m_renderpass_1stpass_depth_test_always_subpass_id, - ds_attachment_id, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); - renderpass_ptr->add_subpass_depth_stencil_attachment(m_renderpass_1stpass_depth_test_equal_ot_subpass_id, - ds_attachment_id, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); - - - renderpass_ptr->get_subpass_graphics_pipeline_id(m_renderpass_1stpass_depth_test_always_subpass_id, - &m_1stpass_depth_test_always_pipeline_id); - renderpass_ptr->get_subpass_graphics_pipeline_id(m_renderpass_1stpass_depth_test_equal_ot_subpass_id, - &m_1stpass_depth_test_equal_pipeline_id); - - gfx_pipeline_manager_ptr->set_pipeline_dsg (m_1stpass_depth_test_always_pipeline_id, - m_1stpass_dsg_ptr); - gfx_pipeline_manager_ptr->toggle_depth_test (m_1stpass_depth_test_always_pipeline_id, - true, /* should_enable */ - VK_COMPARE_OP_ALWAYS); - gfx_pipeline_manager_ptr->toggle_depth_writes(m_1stpass_depth_test_always_pipeline_id, - true); /* should_enable */ - - gfx_pipeline_manager_ptr->set_pipeline_dsg (m_1stpass_depth_test_equal_pipeline_id, - m_1stpass_dsg_ptr); - gfx_pipeline_manager_ptr->toggle_depth_test (m_1stpass_depth_test_equal_pipeline_id, - true, /* should_enable */ - VK_COMPARE_OP_EQUAL); - gfx_pipeline_manager_ptr->toggle_depth_writes(m_1stpass_depth_test_equal_pipeline_id, - true); /* should_enable */ - - /* depth_test_equal_ot subpass must not start before depth_test_always subpass finishes rasterizing */ - renderpass_ptr->add_subpass_to_subpass_dependency(m_renderpass_1stpass_depth_test_always_subpass_id, - m_renderpass_1stpass_depth_test_equal_ot_subpass_id, - VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, /* source_stage_mask */ - VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, /* destination_stage_mask */ - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, - VK_TRUE); /* by_region */ - - m_renderpass_tris_ptr = renderpass_ptr; + auto depth_test_always_subpass_gfx_pipeline_create_info_ptr = Anvil::GraphicsPipelineCreateInfo::create(Anvil::PipelineCreateFlagBits::NONE, + renderpass_ptr.get(), + m_renderpass_1stpass_depth_test_always_subpass_id, + *m_tri_fs_ptr, + Anvil::ShaderModuleStageEntryPoint(), /* geometry_shader_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* tess_control_shader_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* tess_evaluation_shader_entrypoint */ + *m_tri_vs_ptr); + auto depth_test_equal_ot_subpass_gfx_pipeline_create_info_ptr = Anvil::GraphicsPipelineCreateInfo::create(Anvil::PipelineCreateFlagBits::NONE, + renderpass_ptr.get(), + m_renderpass_1stpass_depth_test_equal_ot_subpass_id, + *m_tri_fs_ptr, + Anvil::ShaderModuleStageEntryPoint(), /* geometry_shader_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* tess_control_shader_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* tess_evaluation_shader_entrypoint */ + *m_tri_vs_ptr); + + depth_test_always_subpass_gfx_pipeline_create_info_ptr->set_descriptor_set_create_info(m_1stpass_dsg_ptr->get_descriptor_set_create_info() ); + depth_test_always_subpass_gfx_pipeline_create_info_ptr->toggle_depth_test (true, /* in_should_enable */ + Anvil::CompareOp::ALWAYS); + depth_test_always_subpass_gfx_pipeline_create_info_ptr->toggle_depth_writes (true); /* in_should_enable */ + + depth_test_equal_ot_subpass_gfx_pipeline_create_info_ptr->set_descriptor_set_create_info(m_1stpass_dsg_ptr->get_descriptor_set_create_info() ); + depth_test_equal_ot_subpass_gfx_pipeline_create_info_ptr->toggle_depth_test (true, /* in_should_enable */ + Anvil::CompareOp::EQUAL); + depth_test_equal_ot_subpass_gfx_pipeline_create_info_ptr->toggle_depth_writes (true); /* in_should_enable */ + + + gfx_pipeline_manager_ptr->add_pipeline(std::move(depth_test_always_subpass_gfx_pipeline_create_info_ptr), + &m_1stpass_depth_test_always_pipeline_id); + gfx_pipeline_manager_ptr->add_pipeline(std::move(depth_test_equal_ot_subpass_gfx_pipeline_create_info_ptr), + &m_1stpass_depth_test_equal_pipeline_id); + + m_renderpass_tris_ptr = std::move(renderpass_ptr); } } } @@ -827,59 +925,71 @@ void App::init_semaphores() n_semaphore < m_n_swapchain_images; ++n_semaphore) { - std::shared_ptr new_signal_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); - std::shared_ptr new_wait_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); + Anvil::SemaphoreUniquePtr new_signal_semaphore_ptr; + Anvil::SemaphoreUniquePtr new_wait_semaphore_ptr; + + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); + + new_signal_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + } + + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); + + new_wait_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + } new_signal_semaphore_ptr->set_name_formatted("Frame signal semaphore [%d]", n_semaphore); - new_wait_semaphore_ptr->set_name_formatted ("Frame wait semaphore [%d]", - n_semaphore); + new_wait_semaphore_ptr->set_name_formatted("Frame wait semaphore [%d]", + n_semaphore); - m_frame_signal_semaphores.push_back(new_signal_semaphore_ptr); - m_frame_wait_semaphores.push_back (new_wait_semaphore_ptr); + m_frame_signal_semaphores.push_back(std::move(new_signal_semaphore_ptr) ); + m_frame_wait_semaphores.push_back (std::move(new_wait_semaphore_ptr) ); } } void App::init_shaders() { - std::shared_ptr fs_quad_glsl_ptr; - std::shared_ptr fs_quad_module_ptr; - std::shared_ptr fs_tri_glsl_ptr; - std::shared_ptr fs_tri_module_ptr; - std::shared_ptr vs_quad_glsl_ptr; - std::shared_ptr vs_quad_module_ptr; - std::shared_ptr vs_tri_glsl_ptr; - std::shared_ptr vs_tri_module_ptr; - - fs_quad_glsl_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr fs_quad_glsl_ptr; + Anvil::ShaderModuleUniquePtr fs_quad_module_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr fs_tri_glsl_ptr; + Anvil::ShaderModuleUniquePtr fs_tri_module_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr vs_quad_glsl_ptr; + Anvil::ShaderModuleUniquePtr vs_quad_module_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr vs_tri_glsl_ptr; + Anvil::ShaderModuleUniquePtr vs_tri_module_ptr; + + fs_quad_glsl_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_render_quad_frag, - Anvil::SHADER_STAGE_FRAGMENT); - fs_tri_glsl_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::ShaderStage::FRAGMENT); + fs_tri_glsl_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_render_tri_frag, - Anvil::SHADER_STAGE_FRAGMENT); - vs_quad_glsl_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::ShaderStage::FRAGMENT); + vs_quad_glsl_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_render_quad_vert, - Anvil::SHADER_STAGE_VERTEX); - vs_tri_glsl_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::ShaderStage::VERTEX); + vs_tri_glsl_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_render_tri_vert, - Anvil::SHADER_STAGE_VERTEX); + Anvil::ShaderStage::VERTEX); vs_quad_glsl_ptr->add_definition_value_pair("N_SWAPCHAIN_IMAGES", N_SWAPCHAIN_IMAGES); - fs_quad_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - fs_quad_glsl_ptr); - fs_tri_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - fs_tri_glsl_ptr); - vs_quad_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - vs_quad_glsl_ptr); - vs_tri_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - vs_tri_glsl_ptr); + fs_quad_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get (), + fs_quad_glsl_ptr.get() ); + fs_tri_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get (), + fs_tri_glsl_ptr.get () ); + vs_quad_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get (), + vs_quad_glsl_ptr.get() ); + vs_tri_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get (), + vs_tri_glsl_ptr.get () ); fs_quad_module_ptr->set_name("Quad fragment shader module"); fs_tri_module_ptr->set_name ("Triangle fragment shader module"); @@ -888,62 +998,61 @@ void App::init_shaders() m_quad_fs_ptr.reset( - new Anvil::ShaderModuleStageEntryPoint( - "main", - fs_quad_module_ptr, - Anvil::SHADER_STAGE_FRAGMENT) + new Anvil::ShaderModuleStageEntryPoint("main", + std::move(fs_quad_module_ptr), + Anvil::ShaderStage::FRAGMENT) ); m_quad_vs_ptr.reset( - new Anvil::ShaderModuleStageEntryPoint( - "main", - vs_quad_module_ptr, - Anvil::SHADER_STAGE_VERTEX) + new Anvil::ShaderModuleStageEntryPoint("main", + std::move(vs_quad_module_ptr), + Anvil::ShaderStage::VERTEX) ); m_tri_fs_ptr.reset( - new Anvil::ShaderModuleStageEntryPoint( - "main", - fs_tri_module_ptr, - Anvil::SHADER_STAGE_FRAGMENT) + new Anvil::ShaderModuleStageEntryPoint("main", + std::move(fs_tri_module_ptr), + Anvil::ShaderStage::FRAGMENT) ); m_tri_vs_ptr.reset( - new Anvil::ShaderModuleStageEntryPoint( - "main", - vs_tri_module_ptr, - Anvil::SHADER_STAGE_VERTEX) + new Anvil::ShaderModuleStageEntryPoint("main", + std::move(vs_tri_module_ptr), + Anvil::ShaderStage::VERTEX) ); } void App::init_swapchain() { - std::shared_ptr device_locked_ptr(m_device_ptr); + { + auto create_info_ptr = Anvil::RenderingSurfaceCreateInfo::create(m_instance_ptr.get(), + m_device_ptr.get (), + m_window_ptr.get () ); - m_rendering_surface_ptr = Anvil::RenderingSurface::create(m_instance_ptr, - m_device_ptr, - m_window_ptr); + m_rendering_surface_ptr = Anvil::RenderingSurface::create(std::move(create_info_ptr) ); + } m_rendering_surface_ptr->set_name("Main rendering surface"); - m_swapchain_ptr = device_locked_ptr->create_swapchain(m_rendering_surface_ptr, - m_window_ptr, - VK_FORMAT_B8G8R8A8_UNORM, - VK_PRESENT_MODE_FIFO_KHR, - VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, - m_n_swapchain_images); + m_swapchain_ptr = reinterpret_cast(m_device_ptr.get() )->create_swapchain(m_rendering_surface_ptr.get(), + m_window_ptr.get (), + Anvil::Format::B8G8R8A8_UNORM, + Anvil::ColorSpaceKHR::SRGB_NONLINEAR_KHR, + Anvil::PresentModeKHR::FIFO_KHR, + Anvil::ImageUsageFlagBits::COLOR_ATTACHMENT_BIT, + m_n_swapchain_images); m_swapchain_ptr->set_name("Main swapchain"); /* Cache the queue we are going to use for presentation */ const std::vector* present_queue_fams_ptr = nullptr; - if (!m_rendering_surface_ptr->get_queue_families_with_present_support(device_locked_ptr->get_physical_device(), + if (!m_rendering_surface_ptr->get_queue_families_with_present_support(reinterpret_cast(m_device_ptr.get() )->get_physical_device(), &present_queue_fams_ptr) ) { anvil_assert_fail(); } - m_present_queue_ptr = device_locked_ptr->get_queue(present_queue_fams_ptr->at(0), - 0); /* in_n_queue */ + m_present_queue_ptr = m_device_ptr->get_queue_for_queue_family_index(present_queue_fams_ptr->at(0), + 0); /* in_n_queue */ } void App::init_window() @@ -965,48 +1074,53 @@ void App::init_window() 720, true, /* in_closable */ std::bind(&App::draw_frame, - this) ); + this) + ); } void App::init_vulkan() { /* Create a Vulkan instance */ - m_instance_ptr = Anvil::Instance::create(APP_NAME, /* app_name */ - APP_NAME, /* engine_name */ + { + auto create_info_ptr = Anvil::InstanceCreateInfo::create(APP_NAME, /* in_app_name */ + APP_NAME, /* in_engine_name */ #ifdef ENABLE_VALIDATION - std::bind(&App::on_validation_callback, - this, - std::placeholders::_1, - std::placeholders::_2, - std::placeholders::_3, - std::placeholders::_4) ); + std::bind(&App::on_validation_callback, + this, + std::placeholders::_1, + std::placeholders::_2), #else - nullptr); + Anvil::DebugCallbackFunction(), #endif + false); /* in_mt_safe */ + + m_instance_ptr = Anvil::Instance::create(std::move(create_info_ptr) ); + } m_physical_device_ptr = m_instance_ptr->get_physical_device(0); /* Create a Vulkan device */ - m_device_ptr = Anvil::SGPUDevice::create(m_physical_device_ptr, - Anvil::DeviceExtensionConfiguration(), - std::vector(), /* layers */ - false, /* transient_command_buffer_allocs_only */ - false); /* support_resettable_command_buffers */ + { + auto create_info_ptr = Anvil::DeviceCreateInfo::create_sgpu(m_physical_device_ptr, + true, /* in_enable_shader_module_cache */ + Anvil::DeviceExtensionConfiguration(), + std::vector(), /* in_layers */ + Anvil::CommandPoolCreateFlagBits::NONE, + false); /* in_mt_safe */ + + m_device_ptr = Anvil::SGPUDevice::create(std::move(create_info_ptr) ); + } } -VkBool32 App::on_validation_callback(VkDebugReportFlagsEXT message_flags, - VkDebugReportObjectTypeEXT object_type, - const char* layer_prefix, - const char* message) +void App::on_validation_callback(Anvil::DebugMessageSeverityFlags in_severity, + const char* in_message_ptr) { - if ((message_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0) + if ((in_severity & Anvil::DebugMessageSeverityFlagBits::ERROR_BIT) != 0) { fprintf(stderr, "[!] %s\n", - message); + in_message_ptr); } - - return false; } void App::run() @@ -1015,9 +1129,9 @@ void App::run() } -int main() +int main(int argc, char *argv[]) { - std::shared_ptr app_ptr(new App() ); + std::unique_ptr app_ptr(new App() ); app_ptr->init(); app_ptr->run(); diff --git a/examples/OutOfOrderRasterization/CMakeLists.txt b/examples/OutOfOrderRasterization/CMakeLists.txt index ba91e7c7..b1bdf6b3 100644 --- a/examples/OutOfOrderRasterization/CMakeLists.txt +++ b/examples/OutOfOrderRasterization/CMakeLists.txt @@ -16,45 +16,23 @@ if(${CMAKE_SYSTEM_NAME} MATCHES "Linux") endif() endif() -add_subdirectory (../.. "${CMAKE_CURRENT_BINARY_DIR}/anvil") +if (NOT ANVIL_LINK_EXAMPLES) + add_subdirectory (../.. "${CMAKE_CURRENT_BINARY_DIR}/anvil") +endif() +target_include_directories(Anvil PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/anvil/include") include_directories(${Anvil_SOURCE_DIR}/include ${OutOfOrderRasterization_SOURCE_DIR}/include) -# Include the Vulkan header. -if (WIN32) - include_directories($ENV{VK_SDK_PATH}/Include - $ENV{VULKAN_SDK}/Include) - - if("${CMAKE_SIZEOF_VOID_P}" EQUAL "8") - link_directories ($ENV{VK_SDK_PATH}/Bin - $ENV{VK_SDK_PATH}/Lib - $ENV{VULKAN_SDK}/Bin - $ENV{VULKAN_SDK}/Lib) - else() - link_directories ($ENV{VK_SDK_PATH}/Bin32 - $ENV{VK_SDK_PATH}/Lib32 - $ENV{VULKAN_SDK}/Bin32 - $ENV{VULKAN_SDK}/Lib32) - endif() -else() - include_directories($ENV{VK_SDK_PATH}/x86_64/include - $ENV{VULKAN_SDK}/include - $ENV{VULKAN_SDK}/x86_64/include) - link_directories ($ENV{VK_SDK_PATH}/x86_64/lib - $ENV{VULKAN_SDK}/lib - $ENV{VULKAN_SDK}/x86_64/lib) -endif() - # Create the OutOfOrderRasterization project. add_executable (OutOfOrderRasterization include/app.h include/teapot_data.h - src/app.cpp - src/teapot_data.cpp) + src/app.cpp + src/teapot_data.cpp) # Add linking dependencies for the example projects -add_dependencies (OutOfOrderRasterization Anvil) +add_dependencies(OutOfOrderRasterization Anvil) if (WIN32) target_link_libraries(OutOfOrderRasterization Anvil) diff --git a/examples/OutOfOrderRasterization/include/app.h b/examples/OutOfOrderRasterization/include/app.h index 2755cd35..666fc1a0 100644 --- a/examples/OutOfOrderRasterization/include/app.h +++ b/examples/OutOfOrderRasterization/include/app.h @@ -21,7 +21,7 @@ // #include -#define N_SWAPCHAIN_IMAGES (3) +#define N_SWAPCHAIN_IMAGES (2) class App @@ -59,60 +59,100 @@ class App void update_fps (); void update_teapot_props(uint32_t n_current_swapchain_image); - void draw_frame (); - void on_keypress_event (Anvil::CallbackArgument* callback_data_raw_ptr); - VkBool32 on_validation_callback(VkDebugReportFlagsEXT message_flags, - VkDebugReportObjectTypeEXT object_type, - const char* layer_prefix, - const char* message); + void draw_frame (); + void on_keypress_event (Anvil::CallbackArgument* in_callback_data_raw_ptr); + void on_validation_callback(Anvil::DebugMessageSeverityFlags in_severity, + const char* in_message_ptr); + + #if defined(ENABLE_MGPU_SUPPORT) + std::vector get_render_areas(uint32_t in_afr_render_index = 0) const; + #endif /* Private variables */ - std::weak_ptr m_device_ptr; - std::shared_ptr m_instance_ptr; - std::shared_ptr m_present_queue_ptr; - std::shared_ptr m_query_pool_ptr; - std::shared_ptr m_rendering_surface_ptr; - std::shared_ptr m_swapchain_ptr; - std::shared_ptr m_window_ptr; - - std::shared_ptr m_depth_image_ptr; - std::shared_ptr m_depth_image_view_ptr; - std::vector > m_framebuffers; - std::shared_ptr m_fs_entrypoint_ptr; - std::vector > m_render_cmdbuffers_ooo_on; - std::vector > m_render_cmdbuffers_ooo_off; - std::vector > m_renderpasses; - std::shared_ptr m_vs_entrypoint_ptr; + Anvil::BaseDeviceUniquePtr m_device_ptr; + + Anvil::InstanceUniquePtr m_instance_ptr; + Anvil::Queue* m_present_queue_ptr; + Anvil::QueryPoolUniquePtr m_query_pool_ptr; + Anvil::RenderingSurfaceUniquePtr m_rendering_surface_ptr; + Anvil::SwapchainUniquePtr m_swapchain_ptr; + Anvil::WindowUniquePtr m_window_ptr; + + #if defined(ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING) + std::vector m_swapchain_images; + std::vector m_swapchain_image_views; + #endif + + Anvil::ImageUniquePtr m_depth_image_ptr; + Anvil::ImageViewUniquePtr m_depth_image_view_ptr; + std::vector m_framebuffers; + std::unique_ptr m_fs_entrypoint_ptr; + + #if !defined(ENABLE_MGPU_SUPPORT) + std::vector m_render_cmdbuffers_ooo_off; + std::vector m_render_cmdbuffers_ooo_on; + #else + Anvil::PrimaryCommandBufferUniquePtr m_dummy_cmdbuffer_ptr; + std::map > m_render_cmdbuffers_ooo_on; + std::map > m_render_cmdbuffers_ooo_off; + #endif + + std::vector m_renderpasses; + std::unique_ptr m_vs_entrypoint_ptr; uint32_t m_n_indices; uint32_t m_n_last_semaphore_used; - const uint32_t m_n_swapchain_images; + uint32_t m_n_swapchain_images; bool m_ooo_enabled; bool m_should_rotate; - std::shared_ptr m_teapot_props_data_ptr; + std::unique_ptr m_teapot_props_data_ptr; Anvil::Time m_time; - typedef struct + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) + struct SwapchainPeerImages + { + /* Holds N_SWAPCHAIN_IMAGES peer images for consecutive swapchain image indices */ + std::vector peer_images; + }; + + uint32_t m_n_presenting_physical_device; + std::vector m_swapchain_peer_images_per_physical_device; + #endif + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + uint32_t m_n_rendering_physical_device; + #endif + + typedef struct SemaphoreBundle { + SemaphoreBundle() + { + /* Stub */ + } + /* Holds as many semaphores as there are physical devices bound to a logical device */ - std::vector > semaphores; + std::vector semaphores; + + private: + SemaphoreBundle (const SemaphoreBundle&) = delete; + SemaphoreBundle& operator=(const SemaphoreBundle&) = delete; } SemaphoreBundle; - std::vector m_frame_signal_semaphore_bundles; - std::vector m_frame_wait_semaphore_bundles; + std::vector m_frame_acquisition_wait_semaphores; + std::vector > m_frame_signal_semaphore_bundles; + std::vector > m_frame_wait_semaphore_bundles; - bool m_frame_drawn_status[N_SWAPCHAIN_IMAGES]; - Anvil::GraphicsPipelineID m_general_pipeline_id; - Anvil::GraphicsPipelineID m_ooo_disabled_pipeline_id; - Anvil::GraphicsPipelineID m_ooo_enabled_pipeline_id; + bool m_frame_drawn_status[N_SWAPCHAIN_IMAGES]; + Anvil::PipelineID m_general_pipeline_id; + Anvil::PipelineID m_ooo_disabled_pipeline_id; + Anvil::PipelineID m_ooo_enabled_pipeline_id; - std::shared_ptr m_index_buffer_ptr; - std::shared_ptr m_query_results_buffer_ptr; - std::shared_ptr m_vertex_buffer_ptr; + Anvil::BufferUniquePtr m_index_buffer_ptr; + Anvil::BufferUniquePtr m_query_results_buffer_ptr; + Anvil::BufferUniquePtr m_vertex_buffer_ptr; - std::vector > m_dsg_ptrs; - std::vector > m_properties_buffer_ptrs; - bool m_properties_data_set; + std::vector m_dsg_ptrs; + std::vector m_properties_buffer_ptrs; + bool m_properties_data_set; uint32_t m_n_frames_drawn; std::vector m_timestamp_deltas; diff --git a/examples/OutOfOrderRasterization/src/app.cpp b/examples/OutOfOrderRasterization/src/app.cpp index 2486417b..53ce8724 100644 --- a/examples/OutOfOrderRasterization/src/app.cpp +++ b/examples/OutOfOrderRasterization/src/app.cpp @@ -20,6 +20,61 @@ // THE SOFTWARE. // +/* Uncomment the #define below to enable mGPU support. + * + * When enabled, one (and only one!) of the USE_xxx_PRESENTATION_MODE also needs to be enabled. + * + * When enabled, N_SWAPCHAIN_IMAGES must equal the number of logical devices assigned to the physical device. + * This is due to simplification in the rendering code of the app and could be improved when needed. + */ +//#define ENABLE_MGPU_SUPPORT + +/* If mGPU support is enabled, uncomment one (and only one!) of the following #defines to make the app use the specified + * present mode. + * + * NOTE: Path for LOCAL_MULTI_DEVICE presentation mode is not supported at the moment. If needed, just let me know. + * + * NOTE: If the implementation does not support a given presentation mode, you'll get an assertion failure related to missing caps. + * + */ +//#define USE_LOCAL_AFR_PRESENTATION_MODE +//#define USE_LOCAL_SFR_PRESENTATION_MODE +//#define USE_REMOTE_AFR_PRESENTATION_MODE +//#define USE_SUM_SFR_PRESENTATION_MODE + + +#if defined (USE_LOCAL_AFR_PRESENTATION_MODE) + /* Uncomment the #define below to explicitly bind image memory to the created swapchain. + * + * Optional. + */ + // #define ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING + +#elif defined (USE_LOCAL_SFR_PRESENTATION_MODE) + /* No extra knobs available */ + +#elif defined (USE_REMOTE_AFR_PRESENTATION_MODE) + /* Uncomment the #define below to explicitly bind image memory to the created swapchain. + * + * Optional. + */ + //#define ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING + +#elif defined (USE_SUM_SFR_PRESENTATION_MODE) + /* Uncomment the #define below to explicitly bind image memory to the created swapchain. + * + * Optional. + */ + // #define ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING + + /* Uncomment the #define below to enable explicit SFR rectangle definitions. + * Optional. Requires ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING to be defined. + */ + // #define ENABLE_EXPLICIT_SFR_RECT_DEFINITIONS +#endif + + + /* Uncomment the #define below to enable off-screen rendering */ // #define ENABLE_OFFSCREEN_RENDERING @@ -29,10 +84,20 @@ #include #include "config.h" +#include "misc/buffer_create_info.h" +#include "misc/framebuffer_create_info.h" #include "misc/glsl_to_spirv.h" +#include "misc/graphics_pipeline_create_info.h" +#include "misc/image_create_info.h" +#include "misc/image_view_create_info.h" +#include "misc/instance_create_info.h" #include "misc/io.h" #include "misc/memory_allocator.h" #include "misc/object_tracker.h" +#include "misc/rendering_surface_create_info.h" +#include "misc/render_pass_create_info.h" +#include "misc/semaphore_create_info.h" +#include "misc/swapchain_create_info.h" #include "misc/time.h" #include "misc/window_factory.h" #include "wrappers/buffer.h" @@ -58,6 +123,60 @@ #include "app.h" /* Sanity checks */ +#if defined(ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING) && !defined(ENABLE_MGPU_SUPPORT) + #error If ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING is enabled, ENABLE_MGPU_SUPPORT must also be defined. +#endif + +#if defined(ENABLE_EXPLICIT_SFR_RECT_DEFINITIONS) && !defined(ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING) + #error If ENABLE_EXPLICIT_SFR_RECT_DEFINITIONS is enabled, ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING must also be defined. +#endif + +#if defined(ENABLE_MGPU_SUPPORT) + #if !defined(USE_LOCAL_AFR_PRESENTATION_MODE) && !defined(USE_LOCAL_SFR_PRESENTATION_MODE) && !defined(USE_REMOTE_AFR_PRESENTATION_MODE) && !defined(USE_SUM_SFR_PRESENTATION_MODE) + #error One of the USE_*_PRESENTATION_MODE #defines need to be commented out. + #endif +#endif + +#if defined(USE_LOCAL_AFR_PRESENTATION_MODE) + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + #error More than one presentation mode #defines enabled. + #endif + + #if !defined(ENABLE_MGPU_SUPPORT) + #error If USE_LOCAL_AFR_PRESENTATION_MODE is enabled, ENABLE_MGPU_SUPPORT must also be defined. + #endif +#endif + +#if defined(USE_LOCAL_SFR_PRESENTATION_MODE) + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + #error More than one presentation mode #defines enabled. + #endif + + #if !defined(ENABLE_MGPU_SUPPORT) + #error If USE_LOCAL_SFR_PRESENTATION_MODE is enabled, ENABLE_MGPU_SUPPORT must also be defined. + #endif +#endif + +#if defined(USE_REMOTE_AFR_PRESENTATION_MODE) + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + #error More than one presentation mode #defines enabled. + #endif + + #if !defined(ENABLE_MGPU_SUPPORT) + #error If USE_REMOTE_AFR_PRESENTATION_MODE is enabled, ENABLE_MGPU_SUPPORT must also be defined. + #endif +#endif + +#if defined(USE_SUM_SFR_PRESENTATION_MODE) + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + #error More than one presentation mode #defines enabled. + #endif + + #if !defined(ENABLE_MGPU_SUPPORT) + #error If USE_SUM_SFR_PRESENTATION_MODE is enabled, ENABLE_MGPU_SUPPORT must also be defined. + #endif +#endif + #if defined(_WIN32) #if !defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) && !defined(ENABLE_OFFSCREEN_RENDERING) #error Anvil has not been built with Win32/64 window system support. The application can only be built in offscreen rendering mode. @@ -163,15 +282,21 @@ static const char* vs_body = App::App() - :m_general_pipeline_id (-1), - m_n_frames_drawn ( 0), - m_n_indices ( 0), - m_n_last_semaphore_used (-1), - m_n_swapchain_images (N_SWAPCHAIN_IMAGES), - m_ooo_disabled_pipeline_id(-1), - m_ooo_enabled (false), - m_ooo_enabled_pipeline_id (-1), - m_should_rotate (true) + :m_general_pipeline_id (-1), + m_n_frames_drawn ( 0), + m_n_indices ( 0), + m_n_last_semaphore_used (-1), +#if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + m_n_rendering_physical_device(0), +#endif +#if defined(USE_LOCAL_SFR_PRESENTATION_MODE) + m_n_presenting_physical_device(0), +#endif + m_n_swapchain_images (N_SWAPCHAIN_IMAGES), + m_ooo_disabled_pipeline_id (-1), + m_ooo_enabled (false), + m_ooo_enabled_pipeline_id (-1), + m_should_rotate (true) { memset(m_frame_drawn_status, 0, @@ -204,10 +329,9 @@ void App::clear_console_line() void App::deinit() { + Anvil::Vulkan::vkDeviceWaitIdle(m_device_ptr->get_device_vk() ); - vkDeviceWaitIdle(m_device_ptr.lock()->get_device_vk() ); - - const Anvil::GraphicsPipelineID gfx_pipeline_ids[] = + const Anvil::PipelineID gfx_pipeline_ids[] = { m_general_pipeline_id, m_ooo_disabled_pipeline_id, @@ -216,19 +340,27 @@ void App::deinit() const uint32_t n_gfx_pipeline_ids = sizeof(gfx_pipeline_ids) / sizeof(gfx_pipeline_ids[0]); { - std::shared_ptr device_locked_ptr (m_device_ptr); - std::shared_ptr gfx_pipeline_manager_ptr(device_locked_ptr->get_graphics_pipeline_manager() ); + auto gfx_pipeline_manager_ptr(m_device_ptr->get_graphics_pipeline_manager() ); for (uint32_t n_gfx_pipeline_id = 0; n_gfx_pipeline_id < n_gfx_pipeline_ids; ++n_gfx_pipeline_id) { - const Anvil::GraphicsPipelineID& pipeline_id = gfx_pipeline_ids[n_gfx_pipeline_id]; + const Anvil::PipelineID& pipeline_id = gfx_pipeline_ids[n_gfx_pipeline_id]; gfx_pipeline_manager_ptr->delete_pipeline(pipeline_id); } } + #if defined(ENABLE_MGPU_SUPPORT) + m_frame_acquisition_wait_semaphores.clear(); + #endif + + #if defined(ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING) + m_swapchain_image_views.clear(); + m_swapchain_images.clear(); + #endif + m_dsg_ptrs.clear(); m_frame_signal_semaphore_bundles.clear(); m_frame_wait_semaphore_bundles.clear(); @@ -238,6 +370,14 @@ void App::deinit() m_render_cmdbuffers_ooo_off.clear(); m_renderpasses.clear(); + #if defined(ENABLE_MGPU_SUPPORT) + m_dummy_cmdbuffer_ptr.reset(); + #endif + + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) + m_swapchain_peer_images_per_physical_device.clear(); + #endif + m_depth_image_ptr.reset(); m_depth_image_view_ptr.reset(); m_fs_entrypoint_ptr.reset(); @@ -247,14 +387,10 @@ void App::deinit() m_vertex_buffer_ptr.reset(); m_vs_entrypoint_ptr.reset(); - m_present_queue_ptr.reset(); m_rendering_surface_ptr.reset(); m_swapchain_ptr.reset(); - m_device_ptr.lock()->destroy(); m_device_ptr.reset(); - - m_instance_ptr->destroy(); m_instance_ptr.reset(); m_window_ptr.reset(); @@ -262,27 +398,158 @@ void App::deinit() void App::draw_frame() { - std::shared_ptr device_locked_ptr = m_device_ptr.lock(); - std::shared_ptr sgpu_device_locked_ptr = std::dynamic_pointer_cast(device_locked_ptr); - static const VkPipelineStageFlags dst_stage_mask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; - const uint32_t n_physical_devices = 1; - uint32_t n_swapchain_image; - std::weak_ptr physical_device_ptr = sgpu_device_locked_ptr->get_physical_device(); - std::weak_ptr* physical_devices_ptr = &physical_device_ptr; - std::shared_ptr render_cmdbuffer_ptr; - const VkPipelineStageFlags wait_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; + const Anvil::DeviceType device_type = m_device_ptr->get_type(); + static const Anvil::PipelineStageFlags dst_stage_mask = Anvil::PipelineStageFlagBits::TOP_OF_PIPE_BIT; + Anvil::Semaphore* frame_ready_for_present_semaphores [4]; + Anvil::SemaphoreMGPUSubmission frame_ready_for_present_submissions [4]; + Anvil::SemaphoreMGPUSubmission frame_ready_to_render_submissions [4]; + uint32_t n_physical_devices; + uint32_t n_swapchain_image; + const Anvil::PhysicalDevice* physical_device_ptr = nullptr; + const Anvil::PhysicalDevice* const* physical_devices_ptr = nullptr; + Anvil::PrimaryCommandBuffer* render_cmdbuffer_ptr = nullptr; + const Anvil::PipelineStageFlags wait_stage_mask = Anvil::PipelineStageFlagBits::ALL_COMMANDS_BIT; + + switch (device_type) + { + case Anvil::DeviceType::MULTI_GPU: + { + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr.get() ) ); + + n_physical_devices = mgpu_device_ptr->get_n_physical_devices(); + physical_devices_ptr = mgpu_device_ptr->get_physical_devices (); + + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + m_n_rendering_physical_device = (m_n_rendering_physical_device + 1) % n_physical_devices; + #endif + + break; + } + + case Anvil::DeviceType::SINGLE_GPU: + { + const Anvil::SGPUDevice* sgpu_device_ptr(dynamic_cast(m_device_ptr.get() ) ); + + n_physical_devices = 1; + physical_device_ptr = sgpu_device_ptr->get_physical_device(); + physical_devices_ptr = &physical_device_ptr; + + break; + } + + default: + { + anvil_assert(false); + } + } /* Determine the signal + wait semaphores to use for drawing this frame */ m_n_last_semaphore_used = (m_n_last_semaphore_used + 1) % m_n_swapchain_images; - auto& curr_frame_signal_semaphores = m_frame_signal_semaphore_bundles.at (m_n_last_semaphore_used); - const auto& curr_frame_wait_semaphores = m_frame_wait_semaphore_bundles.at (m_n_last_semaphore_used); - const auto& curr_frame_acqusition_wait_semaphore_ptr = curr_frame_wait_semaphores.semaphores.at(0); - auto& curr_frame_present_wait_semaphore_ptr = curr_frame_signal_semaphores.semaphores.at(0); + auto& curr_frame_signal_semaphores_ptr = m_frame_signal_semaphore_bundles.at (m_n_last_semaphore_used); + auto& curr_frame_wait_semaphores_ptr = m_frame_wait_semaphore_bundles.at (m_n_last_semaphore_used); + + #if defined(ENABLE_MGPU_SUPPORT) + const auto& curr_frame_acqusition_wait_semaphore_ptr = m_frame_acquisition_wait_semaphores[m_n_last_semaphore_used]; + #else + auto& curr_frame_acqusition_wait_semaphore_ptr = curr_frame_wait_semaphores_ptr->semaphores.at(0); + #endif /* Determine the semaphore which the swapchain image */ - n_swapchain_image = m_swapchain_ptr->acquire_image(curr_frame_acqusition_wait_semaphore_ptr, - true); /* in_should_block */ + #if !defined(ENABLE_MGPU_SUPPORT) || defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + { + const auto acquire_result = m_swapchain_ptr->acquire_image(curr_frame_acqusition_wait_semaphore_ptr.get(), + &n_swapchain_image, + true); /* in_should_block */ + + ANVIL_REDUNDANT_VARIABLE_CONST(acquire_result); + anvil_assert (acquire_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } + #elif defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + { + m_swapchain_ptr->acquire_image(curr_frame_acqusition_wait_semaphore_ptr.get(), + 1, /* n_mgpu_physical_device */ + &physical_devices_ptr[m_n_rendering_physical_device], + &n_swapchain_image, + true); /* in_should_block */ + } + #else + #error Not supported? + #endif + + /* Set up semaphores we're going to use to render this frame. */ + anvil_assert(n_physical_devices < sizeof(frame_ready_to_render_submissions) / sizeof(frame_ready_to_render_submissions[0]) ); + + for (uint32_t n_signal_sem = 0; + n_signal_sem < n_physical_devices; + ++n_signal_sem) + { + frame_ready_for_present_submissions[n_signal_sem].device_index = n_signal_sem; + frame_ready_for_present_submissions[n_signal_sem].semaphore_ptr = curr_frame_signal_semaphores_ptr->semaphores[n_signal_sem].get(); + + frame_ready_for_present_semaphores[n_signal_sem] = frame_ready_for_present_submissions[n_signal_sem].semaphore_ptr; + + frame_ready_to_render_submissions[n_signal_sem].device_index = n_signal_sem; + frame_ready_to_render_submissions[n_signal_sem].semaphore_ptr = curr_frame_wait_semaphores_ptr->semaphores[n_signal_sem].get(); + } + + #if defined(ENABLE_MGPU_SUPPORT) + { + Anvil::CommandBufferMGPUSubmission dummy_submission; + Anvil::SemaphoreMGPUSubmission wait_semaphore_submission; + + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + { + wait_semaphore_submission.device_index = m_n_rendering_physical_device; + } + #elif defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + { + /* It shouldn't matter which physical device we wait on for the frame acquisition semaphore. */ + wait_semaphore_submission.device_index = 0; + } + #else + #error Not supported? + #endif + + wait_semaphore_submission.semaphore_ptr = curr_frame_acqusition_wait_semaphore_ptr.get(); + + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + { + dummy_submission.cmd_buffer_ptr = m_dummy_cmdbuffer_ptr.get(); + dummy_submission.device_mask = (1 << m_n_rendering_physical_device); + + m_present_queue_ptr->submit( + Anvil::SubmitInfo::create_wait_execute_signal(&dummy_submission, + 1, /* n_command_buffer_submissions */ + 1, /* n_signal_semaphore_submissions */ + frame_ready_to_render_submissions + m_n_rendering_physical_device, + 1, /* n_wait_semaphore_submissions */ + &wait_semaphore_submission, + &dst_stage_mask, + false) /* should_block */ + ); + } + #elif defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + { + dummy_submission.cmd_buffer_ptr = m_dummy_cmdbuffer_ptr.get(); + dummy_submission.device_mask = (1 << n_physical_devices) - 1; + + m_present_queue_ptr->submit( + Anvil::SubmitInfo::create_wait_execute_signal(&dummy_submission, + 1, /* n_command_buffer_submissions */ + n_physical_devices, /* n_signal_semaphore_submissions */ + frame_ready_to_render_submissions, + 1, /* n_wait_semaphore_submissions */ + &wait_semaphore_submission, + &dst_stage_mask, + false) /* should_block */ + ); + } + #else + #error Not supported? + #endif + } + #endif /* if the frame has already been rendered to in the past, then given the fact we use FIFO presentation mode, * we should be safe to extract the timestamps which must have been written by now. @@ -291,22 +558,40 @@ void App::draw_frame() { uint64_t timestamps[2]; /* top of pipe, bottom of pipe */ + #if defined(ENABLE_MGPU_SUPPORT) + { + /* See ENABLE_MGPU_SUPPORT documentation for more details reg. the assertion check below. */ + anvil_assert(n_physical_devices == N_SWAPCHAIN_IMAGES); + } + #endif + /* TODO: Do better than this. */ - vkDeviceWaitIdle(device_locked_ptr->get_device_vk() ); + Anvil::Vulkan::vkDeviceWaitIdle(m_device_ptr->get_device_vk() ); - const uint32_t n_iterations = 1; + #if !defined(ENABLE_MGPU_SUPPORT) || (!defined(USE_SUM_SFR_PRESENTATION_MODE) && !defined(USE_LOCAL_SFR_PRESENTATION_MODE) ) + const uint32_t n_iterations = 1; + #elif defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + const uint32_t n_iterations = n_physical_devices; + #endif for (uint32_t n_iteration = 0; n_iteration < n_iterations; ++n_iteration) { - auto current_physical_device_ptr = physical_devices_ptr[n_iteration]; + #if !defined(ENABLE_MGPU_SUPPORT) || defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + auto device_mask = (1 << n_iteration); + #else + auto device_mask = (1 << m_n_rendering_physical_device); + #endif m_query_results_buffer_ptr->read(n_swapchain_image * sizeof(uint64_t) * 2, /* top of pipe, bottom of pipe */ sizeof(timestamps), +#if defined(ENABLE_MGPU_SUPPORT) + device_mask, +#endif timestamps); - anvil_assert(timestamps[1] != timestamps[0]); + // anvil_assert(timestamps[1] != timestamps[0]); m_timestamp_deltas.push_back(timestamps[1] - timestamps[0]); } @@ -323,25 +608,178 @@ void App::draw_frame() /* Submit work chunks and present */ if (m_ooo_enabled) { - render_cmdbuffer_ptr = m_render_cmdbuffers_ooo_on.at(n_swapchain_image); + #if defined(ENABLE_MGPU_SUPPORT) + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + anvil_assert(m_render_cmdbuffers_ooo_on.size() == 1); + + render_cmdbuffer_ptr = m_render_cmdbuffers_ooo_on.at(0).at(n_swapchain_image).get(); + #elif defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + render_cmdbuffer_ptr = m_render_cmdbuffers_ooo_on.at(m_n_rendering_physical_device).at(n_swapchain_image).get(); + #else + #error Not supported? + #endif + #else + render_cmdbuffer_ptr = m_render_cmdbuffers_ooo_on.at(n_swapchain_image).get(); + #endif } else { - render_cmdbuffer_ptr = m_render_cmdbuffers_ooo_off.at(n_swapchain_image); + #if defined(ENABLE_MGPU_SUPPORT) + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + anvil_assert(m_render_cmdbuffers_ooo_on.size() == 1); + + render_cmdbuffer_ptr = m_render_cmdbuffers_ooo_off.at(0).at(n_swapchain_image).get(); + #elif defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + render_cmdbuffer_ptr = m_render_cmdbuffers_ooo_off.at(m_n_rendering_physical_device).at(n_swapchain_image).get(); + #else + #error Not supported? + #endif + #else + render_cmdbuffer_ptr = m_render_cmdbuffers_ooo_off.at(n_swapchain_image).get(); + #endif + } + + #if !defined(ENABLE_MGPU_SUPPORT) + { + m_present_queue_ptr->submit( + Anvil::SubmitInfo::create_wait_execute_signal(render_cmdbuffer_ptr, + 1, /* n_semaphores_to_signal */ + &frame_ready_for_present_submissions[0].semaphore_ptr, + 1, /* n_semaphores_to_wait_on */ + &frame_ready_to_render_submissions[0].semaphore_ptr, + &wait_stage_mask, + false /* should_block */) + ); } + #else + { + Anvil::CommandBufferMGPUSubmission cmd_buffer_submission; - m_present_queue_ptr->submit_command_buffer_with_signal_wait_semaphores(render_cmdbuffer_ptr, - 1, /* n_semaphores_to_signal */ - &curr_frame_present_wait_semaphore_ptr, - 1, /* n_semaphores_to_wait_on */ - &curr_frame_acqusition_wait_semaphore_ptr, - &wait_stage_mask, - false /* should_block */); + cmd_buffer_submission.cmd_buffer_ptr = render_cmdbuffer_ptr; + + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + { + cmd_buffer_submission.device_mask = (1 << m_n_rendering_physical_device); + + m_present_queue_ptr->submit( + Anvil::SubmitInfo::create_wait_execute_signal(&cmd_buffer_submission, + 1, /* n_command_buffer_submissions */ + 1, /* n_signal_semaphore_submissions */ + frame_ready_for_present_submissions + m_n_rendering_physical_device, + 1, /* n_wait_semaphore_submissions */ + frame_ready_to_render_submissions + m_n_rendering_physical_device, + &wait_stage_mask, + false /* should_block */) + ); + } + #elif defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + { + cmd_buffer_submission.device_mask = (1 << n_physical_devices) - 1; + + m_present_queue_ptr->submit( + Anvil::SubmitInfo::create_wait_execute_signal(&cmd_buffer_submission, + 1, /* n_command_buffer_submissions */ + n_physical_devices, + frame_ready_for_present_submissions, + n_physical_devices, /* n_wait_semaphore_submissions */ + frame_ready_to_render_submissions, + &wait_stage_mask, + false /* should_block */) + ); + } + #else + #error Not supported? + #endif + } + #endif + + #if !defined(ENABLE_MGPU_SUPPORT) + { + Anvil::SwapchainOperationErrorCode present_result = Anvil::SwapchainOperationErrorCode::DEVICE_LOST; - m_present_queue_ptr->present(m_swapchain_ptr, - n_swapchain_image, - 1, /* n_wait_semaphores */ - &curr_frame_present_wait_semaphore_ptr); + m_present_queue_ptr->present(m_swapchain_ptr.get(), + n_swapchain_image, + n_physical_devices, /* n_wait_semaphores */ + frame_ready_for_present_semaphores, + &present_result); + + ANVIL_REDUNDANT_VARIABLE(present_result); + anvil_assert (present_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } + #else + { + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_LOCAL_SFR_PRESENTATION_MODE) + { + const Anvil::MGPUDevice* mgpu_device_ptr (dynamic_cast(m_device_ptr.get() )); + Anvil::SwapchainOperationErrorCode present_result (Anvil::SwapchainOperationErrorCode::DEVICE_LOST); + Anvil::LocalModePresentationItem presentation_item; + const Anvil::PhysicalDevice* presenting_physical_device_ptr (nullptr); + Anvil::Semaphore* wait_semaphore_ptr (nullptr); + + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) + presenting_physical_device_ptr = mgpu_device_ptr->get_physical_device(m_n_presenting_physical_device); + wait_semaphore_ptr = frame_ready_for_present_semaphores[m_n_presenting_physical_device]; + #else + presenting_physical_device_ptr = mgpu_device_ptr->get_physical_device(m_n_rendering_physical_device); + wait_semaphore_ptr = frame_ready_for_present_semaphores[m_n_rendering_physical_device]; + #endif + + presentation_item.physical_device_ptr = presenting_physical_device_ptr; + presentation_item.swapchain_image_index = n_swapchain_image; + presentation_item.swapchain_ptr = m_swapchain_ptr.get(); + + m_present_queue_ptr->present_in_local_presentation_mode(1, + &presentation_item, + 1, /* n_wait_semaphores */ + &wait_semaphore_ptr, + &present_result); + + ANVIL_REDUNDANT_VARIABLE(present_result); + anvil_assert (present_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } + #elif defined(USE_REMOTE_AFR_PRESENTATION_MODE) + { + const Anvil::MGPUDevice* mgpu_device_ptr (dynamic_cast(m_device_ptr.get() )); + Anvil::SwapchainOperationErrorCode present_result (Anvil::SwapchainOperationErrorCode::DEVICE_LOST); + Anvil::RemoteModePresentationItem presentation_item; + + presentation_item.physical_device_ptr = mgpu_device_ptr->get_physical_device(m_n_rendering_physical_device); + presentation_item.swapchain_image_index = n_swapchain_image; + presentation_item.swapchain_ptr = m_swapchain_ptr.get(); + + m_present_queue_ptr->present_in_remote_presentation_mode(1, /* n_remote_mode_presentation_items */ + &presentation_item, + 1, /* n_wait_semaphores */ + frame_ready_for_present_semaphores + m_n_rendering_physical_device, + &present_result); + + ANVIL_REDUNDANT_VARIABLE(present_result); + anvil_assert (present_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } + #elif defined(USE_SUM_SFR_PRESENTATION_MODE) + { + Anvil::SwapchainOperationErrorCode present_result = Anvil::SwapchainOperationErrorCode::DEVICE_LOST; + Anvil::SumModePresentationItem presentation_item; + + presentation_item.n_physical_devices = n_physical_devices; + presentation_item.physical_devices_ptr = physical_devices_ptr; + presentation_item.swapchain_image_index = n_swapchain_image; + presentation_item.swapchain_ptr = m_swapchain_ptr.get(); + + m_present_queue_ptr->present_in_sum_presentation_mode(1, /* n_sum_mode_presentation_items */ + &presentation_item, + n_physical_devices, /* n_wait_semaphores */ + frame_ready_for_present_semaphores, + &present_result); + + ANVIL_REDUNDANT_VARIABLE(present_result); + anvil_assert (present_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } + #else + #error You must enable one of the USE_*_PRESENTATION_MODE #defines. + #endif + } + #endif ++m_n_frames_drawn; @@ -357,6 +795,103 @@ void App::draw_frame() #endif } +#if defined(ENABLE_MGPU_SUPPORT) + +std::vector App::get_render_areas(uint32_t in_afr_render_index) const +{ + const Anvil::MGPUDevice* mgpu_device_locked_ptr(dynamic_cast(m_device_ptr.get() )); + const uint32_t n_physical_devices (mgpu_device_locked_ptr->get_n_physical_devices() ); + std::vector render_areas; + bool result; + VkExtent2D sfr_tile_size; + VkExtent2D split_chunk_size; + const uint32_t window_height (m_window_ptr->get_height_at_creation_time()); + const uint32_t window_width (m_window_ptr->get_width_at_creation_time () ); + + if ((m_swapchain_ptr->get_create_info_ptr()->get_flags() & Anvil::SwapchainCreateFlagBits::SPLIT_INSTANCE_BIND_REGIONS_BIT) != 0) + { + result = m_swapchain_ptr->get_image(0)->get_SFR_tile_size(&sfr_tile_size); + anvil_assert(result); + } + else + { + /* SFR is disabled - we don't need to follow any specific alignment requirements */ + anvil_assert((window_width % n_physical_devices) == 0); + + sfr_tile_size.width = window_width / n_physical_devices; + sfr_tile_size.height = window_height; + } + + split_chunk_size.width = Anvil::Utils::round_up(window_width / n_physical_devices, sfr_tile_size.width); + split_chunk_size.height = Anvil::Utils::round_up(window_height, sfr_tile_size.height); + + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + { + for (uint32_t n_render_area = 0; + n_render_area < n_physical_devices; + ++n_render_area) + { + /* Split the frame vertically. Make sure the render area never exceeds the framebuffer's extent. */ + VkRect2D render_area_chunk; + + render_area_chunk.offset.x = n_render_area * split_chunk_size.width; + render_area_chunk.offset.y = 0; + render_area_chunk.extent.height = split_chunk_size.height; + render_area_chunk.extent.width = split_chunk_size.width; + + if (render_area_chunk.offset.x + render_area_chunk.extent.width > window_width) + { + render_area_chunk.extent.width = window_width - render_area_chunk.offset.x; + } + + if (render_area_chunk.offset.y + render_area_chunk.extent.height > window_height) + { + render_area_chunk.extent.height = window_height - render_area_chunk.offset.y; + } + + render_areas.push_back(render_area_chunk); + } + } + #elif defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + { + VkRect2D dummy_render_area; + VkRect2D render_area; + + dummy_render_area.extent.height = 0; + dummy_render_area.extent.width = 0; + dummy_render_area.offset.x = 0; + dummy_render_area.offset.y = 0; + + render_area.extent.width = window_width; + render_area.extent.height = window_height; + render_area.offset.x = 0; + render_area.offset.y = 0; + + for (uint32_t n_pre_physical_device = 0; + n_pre_physical_device < in_afr_render_index; + ++n_pre_physical_device) + { + render_areas.push_back(dummy_render_area); + } + + render_areas.push_back(render_area); + + for (uint32_t n_post_physical_device = in_afr_render_index + 1; + n_post_physical_device < n_physical_devices; + ++n_post_physical_device) + { + render_areas.push_back(dummy_render_area); + } + } + #else + #error Not supported? + #endif + + return render_areas; +} + +#endif + void App::init() { init_vulkan (); @@ -377,65 +912,89 @@ void App::init() void App::init_buffers() { - std::shared_ptr allocator_ptr; - TeapotData data (U_GRANULARITY, V_GRANULARITY); - const Anvil::DeviceType device_type (m_device_ptr.lock()->get_type() ); - - const VkDeviceSize index_data_size = data.get_index_data_size(); - const VkDeviceSize properties_data_size = N_TEAPOTS * sizeof(float) * 8; /* rot_xyzX + pos_xyzX */ - const VkDeviceSize vertex_data_size = data.get_vertex_data_size(); + Anvil::MemoryAllocatorUniquePtr allocator_ptr; + TeapotData data (U_GRANULARITY, V_GRANULARITY); + const Anvil::DeviceType device_type (m_device_ptr->get_type() ); + const VkDeviceSize index_data_size = data.get_index_data_size(); + const VkDeviceSize properties_data_size = N_TEAPOTS * sizeof(float) * 8; /* rot_xyzX + pos_xyzX */ + const Anvil::MemoryFeatureFlags required_feature_flags = (device_type == Anvil::DeviceType::SINGLE_GPU) ? Anvil::MemoryFeatureFlagBits::NONE + : Anvil::MemoryFeatureFlagBits::MULTI_INSTANCE_BIT; + Anvil::MGPUPeerMemoryRequirements required_peer_memory_feature_flags; + const VkDeviceSize vertex_data_size = data.get_vertex_data_size(); - allocator_ptr = Anvil::MemoryAllocator::create_oneshot(m_device_ptr); + allocator_ptr = Anvil::MemoryAllocator::create_oneshot(m_device_ptr.get() ); - m_index_buffer_ptr = Anvil::Buffer::create_nonsparse(m_device_ptr, - index_data_size, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_INDEX_BUFFER_BIT); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_no_alloc(m_device_ptr.get(), + index_data_size, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::INDEX_BUFFER_BIT); + + m_index_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } - m_query_results_buffer_ptr = Anvil::Buffer::create_nonsparse(m_device_ptr, - sizeof(uint64_t) * m_n_swapchain_images * 2, /* top of pipe, bottom of pipe */ - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_no_alloc(m_device_ptr.get(), + sizeof(uint64_t) * m_n_swapchain_images * 2, /* top of pipe, bottom of pipe */ + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::TRANSFER_SRC_BIT | Anvil::BufferUsageFlagBits::TRANSFER_DST_BIT); + + m_query_results_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } - m_vertex_buffer_ptr = Anvil::Buffer::create_nonsparse(m_device_ptr, - vertex_data_size, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_VERTEX_BUFFER_BIT); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_no_alloc(m_device_ptr.get(), + vertex_data_size, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::VERTEX_BUFFER_BIT); + + m_vertex_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } m_index_buffer_ptr->set_name ("Teapot index buffer"); m_query_results_buffer_ptr->set_name("Query results buffer"); m_vertex_buffer_ptr->set_name ("Teapot vertex buffer"); - allocator_ptr->add_buffer(m_query_results_buffer_ptr, - Anvil::MEMORY_FEATURE_FLAG_MAPPABLE); + allocator_ptr->add_buffer(m_query_results_buffer_ptr.get(), + required_feature_flags); - allocator_ptr->add_buffer(m_index_buffer_ptr, - 0); /* in_required_memory_features */ - allocator_ptr->add_buffer(m_vertex_buffer_ptr, - 0); /* in_required_memory_features */ + allocator_ptr->add_buffer(m_index_buffer_ptr.get(), + required_feature_flags); + allocator_ptr->add_buffer(m_vertex_buffer_ptr.get(), + required_feature_flags); for (uint32_t n_swapchain_image = 0; n_swapchain_image < m_n_swapchain_images; ++n_swapchain_image) { - std::shared_ptr new_buffer_ptr; + Anvil::BufferUniquePtr new_buffer_ptr; - new_buffer_ptr = Anvil::Buffer::create_nonsparse(m_device_ptr, - properties_data_size, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_STORAGE_BUFFER_BIT); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_no_alloc(m_device_ptr.get(), + properties_data_size, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::STORAGE_BUFFER_BIT); + + new_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } new_buffer_ptr->set_name("Properties buffer"); - allocator_ptr->add_buffer(new_buffer_ptr, - 0); /* in_required_memory_features */ + allocator_ptr->add_buffer(new_buffer_ptr.get(), + required_feature_flags); /* in_required_memory_features */ - m_properties_buffer_ptrs.push_back(new_buffer_ptr); + m_properties_buffer_ptrs.push_back( + std::move(new_buffer_ptr) + ); } m_index_buffer_ptr->write (0, /* start_offset */ @@ -450,14 +1009,14 @@ void App::init_buffers() void App::init_command_buffers() { - VkClearValue clear_values[2]; - std::shared_ptr device_locked_ptr = m_device_ptr.lock(); - const Anvil::DeviceType device_type = device_locked_ptr->get_type(); - std::shared_ptr gfx_manager_ptr = device_locked_ptr->get_graphics_pipeline_manager(); - const uint32_t n_swapchain_images = m_swapchain_ptr->get_n_images(); - std::shared_ptr vertex_buffers[] = + VkClearValue clear_values[2]; + const Anvil::DeviceType device_type = m_device_ptr->get_type(); + auto gfx_manager_ptr = m_device_ptr->get_graphics_pipeline_manager(); + const uint32_t n_swapchain_images = m_swapchain_ptr->get_n_images(); + const uint32_t universal_queue_family_index = m_device_ptr->get_universal_queue(0)->get_queue_family_index(); + Anvil::Buffer* vertex_buffers[] = { - m_vertex_buffer_ptr + m_vertex_buffer_ptr.get() }; VkDeviceSize vertex_buffer_offsets[] = { @@ -467,20 +1026,34 @@ void App::init_command_buffers() const uint32_t n_vertex_buffers = sizeof(vertex_buffers) / sizeof(vertex_buffers [0]); const uint32_t n_vertex_buffer_offsets = sizeof(vertex_buffer_offsets) / sizeof(vertex_buffer_offsets[0]); - - static_assert(n_vertex_buffers == n_vertex_buffer_offsets, ""); + static_assert(n_vertex_buffers == n_vertex_buffer_offsets, ""); anvil_assert (m_framebuffers.size() == n_swapchain_images); anvil_assert (m_render_cmdbuffers_ooo_off.size() == 0); anvil_assert (m_render_cmdbuffers_ooo_on.size() == 0); anvil_assert (m_renderpasses.size() == n_swapchain_images); - const uint32_t n_physical_device_iterations(1); - VkRect2D render_area; + #if defined(ENABLE_MGPU_SUPPORT) + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr.get() ) ); + #if !defined(USE_LOCAL_AFR_PRESENTATION_MODE) + std::vector render_areas (get_render_areas() ); + #endif - render_area.extent.height = m_window_ptr->get_height_at_creation_time(); - render_area.extent.width = m_window_ptr->get_width_at_creation_time (); - render_area.offset.x = 0; - render_area.offset.y = 0; + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + const uint32_t n_physical_device_iterations(mgpu_device_ptr->get_n_physical_devices() ); + #elif defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + const uint32_t n_physical_device_iterations(1); + #else + #error Not supported? + #endif + #else + const uint32_t n_physical_device_iterations(1); + VkRect2D render_area; + + render_area.extent.height = m_window_ptr->get_height_at_creation_time(); + render_area.extent.width = m_window_ptr->get_width_at_creation_time (); + render_area.offset.x = 0; + render_area.offset.y = 0; + #endif clear_values[0].color.float32[0] = 0.0f; clear_values[0].color.float32[1] = 1.0f; @@ -492,44 +1065,53 @@ void App::init_command_buffers() n_physical_device_iteration < n_physical_device_iterations; ++n_physical_device_iteration) { + #if defined(ENABLE_MGPU_SUPPORT) && defined (USE_LOCAL_AFR_PRESENTATION_MODE) + std::vector render_areas(get_render_areas(n_physical_device_iteration)); + #endif + for (uint32_t n_ooo_iteration = 0; n_ooo_iteration < 2; /* off, on */ ++n_ooo_iteration) { - const bool is_ooo_enabled = (n_ooo_iteration == 1); - const Anvil::PipelineID pipeline_id = (is_ooo_enabled) ? m_ooo_enabled_pipeline_id - : m_ooo_disabled_pipeline_id; - std::shared_ptr pipeline_layout_ptr = gfx_manager_ptr->get_graphics_pipeline_layout(pipeline_id); - - std::vector >& render_cmdbuffers = (is_ooo_enabled) ? m_render_cmdbuffers_ooo_on - : m_render_cmdbuffers_ooo_off; + const bool is_ooo_enabled = (n_ooo_iteration == 1); + const Anvil::PipelineID pipeline_id = (is_ooo_enabled) ? m_ooo_enabled_pipeline_id + : m_ooo_disabled_pipeline_id; + Anvil::PipelineLayout* pipeline_layout_ptr = gfx_manager_ptr->get_pipeline_layout(pipeline_id); + + #if defined(ENABLE_MGPU_SUPPORT) + std::vector& render_cmdbuffers = (is_ooo_enabled) ? m_render_cmdbuffers_ooo_on [n_physical_device_iteration] + : m_render_cmdbuffers_ooo_off[n_physical_device_iteration]; + #else + std::vector& render_cmdbuffers = (is_ooo_enabled) ? m_render_cmdbuffers_ooo_on + : m_render_cmdbuffers_ooo_off; + #endif for (uint32_t n_render_cmdbuffer = 0; n_render_cmdbuffer < n_swapchain_images; ++n_render_cmdbuffer) { - std::shared_ptr cmdbuffer_ptr; - std::shared_ptr ds_ptr (m_dsg_ptrs [n_render_cmdbuffer]->get_descriptor_set(0)); - std::shared_ptr framebuffer_ptr(m_framebuffers[n_render_cmdbuffer]); - std::shared_ptr renderpass_ptr (m_renderpasses[n_render_cmdbuffer]); - - const Anvil::BufferBarrier query_result_barrier(VK_ACCESS_TRANSFER_WRITE_BIT, - VK_ACCESS_HOST_READ_BIT | VK_ACCESS_TRANSFER_READ_BIT, - device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL), - device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL), - m_query_results_buffer_ptr, + Anvil::PrimaryCommandBufferUniquePtr cmdbuffer_ptr; + Anvil::DescriptorSet* ds_ptr (m_dsg_ptrs [n_render_cmdbuffer]->get_descriptor_set(0)); + Anvil::Framebuffer* framebuffer_ptr(m_framebuffers[n_render_cmdbuffer].get() ); + Anvil::RenderPass* renderpass_ptr (m_renderpasses[n_render_cmdbuffer].get() ); + + const Anvil::BufferBarrier query_result_barrier(Anvil::AccessFlagBits::TRANSFER_WRITE_BIT, + Anvil::AccessFlagBits::HOST_READ_BIT | Anvil::AccessFlagBits::TRANSFER_READ_BIT, + universal_queue_family_index, + universal_queue_family_index, + m_query_results_buffer_ptr.get(), sizeof(uint64_t) * n_render_cmdbuffer * 2, sizeof(uint64_t) * 2); - const Anvil::BufferBarrier props_buffer_barrier(VK_ACCESS_HOST_WRITE_BIT, - VK_ACCESS_SHADER_READ_BIT, - device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL), - device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL), - m_properties_buffer_ptrs[n_render_cmdbuffer], + const Anvil::BufferBarrier props_buffer_barrier(Anvil::AccessFlagBits::HOST_WRITE_BIT, + Anvil::AccessFlagBits::SHADER_READ_BIT, + universal_queue_family_index, + universal_queue_family_index, + m_properties_buffer_ptrs[n_render_cmdbuffer].get(), 0, N_TEAPOTS * sizeof(float) * 2 * 4 /* pos + rot */); - cmdbuffer_ptr = device_locked_ptr->get_command_pool(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL)->alloc_primary_level_command_buffer(); + cmdbuffer_ptr = m_device_ptr->get_command_pool_for_queue_family_index(universal_queue_family_index)->alloc_primary_level_command_buffer(); cmdbuffer_ptr->set_name_formatted("Rendering command buffer (OoO:%s)", ((n_ooo_iteration == 0) ? "off" : "on") ); @@ -539,9 +1121,16 @@ void App::init_command_buffers() { clear_values[0].color.float32[0] = (is_ooo_enabled) ? 1.0f : 0.0f; - cmdbuffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_HOST_BIT, - VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, - VK_FALSE, /* in_by_region */ + #if 1 + /* Useful if you need to visually determine which GPU rendered which frame. */ + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + clear_values[0].color.float32[2] = float(n_physical_device_iteration) / (n_physical_device_iterations - 1); + #endif + #endif + + cmdbuffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::HOST_BIT, + Anvil::PipelineStageFlagBits::VERTEX_SHADER_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barriers_ptr */ 1, /* in_buffer_memory_barrier_count */ @@ -549,23 +1138,57 @@ void App::init_command_buffers() 0, /* in_image_memory_barrier_count */ nullptr); /* in_image_memory_barrier_ptrs */ - cmdbuffer_ptr->record_write_timestamp(VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, - m_query_pool_ptr, + cmdbuffer_ptr->record_write_timestamp(Anvil::PipelineStageFlagBits::TOP_OF_PIPE_BIT, + m_query_pool_ptr.get(), n_render_cmdbuffer * 2 /* top of pipe, bottom of pipe*/ + 0); - cmdbuffer_ptr->record_begin_render_pass(sizeof(clear_values) / sizeof(clear_values[0]), - clear_values, - framebuffer_ptr, - render_area, - renderpass_ptr, - VK_SUBPASS_CONTENTS_INLINE); + #if !defined(ENABLE_MGPU_SUPPORT) { - const uint32_t n_physical_devices(1); - - cmdbuffer_ptr->record_bind_pipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, + cmdbuffer_ptr->record_begin_render_pass(sizeof(clear_values) / sizeof(clear_values[0]), + clear_values, + framebuffer_ptr, + render_area, + renderpass_ptr, + Anvil::SubpassContents::INLINE); + } + #else + { + const Anvil::MGPUDevice* mgpu_device_ptr (dynamic_cast(m_device_ptr.get() )); + const auto& render_areas_ptr(&render_areas.at(0) ); + + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) || defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + const uint32_t device_mask = (1 << mgpu_device_ptr->get_n_physical_devices()) - 1; + #else + #error Not supported? + #endif + + anvil_assert(render_areas.size() == mgpu_device_ptr->get_n_physical_devices()); + + cmdbuffer_ptr->record_begin_render_pass(sizeof(clear_values) / sizeof(clear_values[0]), + clear_values, + framebuffer_ptr, + device_mask, + static_cast(render_areas.size()), + render_areas_ptr, + renderpass_ptr, + Anvil::SubpassContents::INLINE); + } + #endif + { + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + const uint32_t n_physical_devices(mgpu_device_ptr->get_n_physical_devices()); + const Anvil::PhysicalDevice* const* physical_devices (mgpu_device_ptr->get_physical_devices ()); + #elif defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_REMOTE_AFR_PRESENTATION_MODE) + const uint32_t n_physical_devices(1); + const Anvil::PhysicalDevice* const* physical_devices (mgpu_device_ptr->get_physical_devices () + n_physical_device_iteration); + #else + const uint32_t n_physical_devices(1); + #endif + + cmdbuffer_ptr->record_bind_pipeline(Anvil::PipelineBindPoint::GRAPHICS, pipeline_id); - cmdbuffer_ptr->record_bind_descriptor_sets(VK_PIPELINE_BIND_POINT_GRAPHICS, + cmdbuffer_ptr->record_bind_descriptor_sets(Anvil::PipelineBindPoint::GRAPHICS, pipeline_layout_ptr, 0, /* in_first_set */ 1, /* in_set_count */ @@ -573,9 +1196,9 @@ void App::init_command_buffers() 0, /* in_dynamic_offset_count */ nullptr); /* in_dynamic_offset_ptrs */ - cmdbuffer_ptr->record_bind_index_buffer (m_index_buffer_ptr, + cmdbuffer_ptr->record_bind_index_buffer (m_index_buffer_ptr.get(), 0, /* in_offset */ - VK_INDEX_TYPE_UINT32); + Anvil::IndexType::UINT32); cmdbuffer_ptr->record_bind_vertex_buffers(0, /* in_start_binding */ n_vertex_buffers, vertex_buffers, @@ -585,6 +1208,37 @@ void App::init_command_buffers() n_physical_device < n_physical_devices; ++n_physical_device) { + #if defined(ENABLE_MGPU_SUPPORT) + { + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) + VkRect2D scissor = render_areas.at(n_physical_device_iteration); + #else + VkRect2D scissor = render_areas.at(n_physical_device); + #endif + VkViewport viewport; + + viewport.height = static_cast(scissor.extent.height); + viewport.maxDepth = 1.0f; + viewport.minDepth = 0.0f; + viewport.width = static_cast(WINDOW_WIDTH); + viewport.x = 0.0f; + viewport.y = 0.0f; + + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) || defined(USE_SUM_SFR_PRESENTATION_MODE) + { + cmdbuffer_ptr->record_set_device_mask_KHR(1 << n_physical_device); + } + #endif + + cmdbuffer_ptr->record_set_scissor (0, /* in_first_scissor */ + 1, /* in_scissor_count */ + &scissor); + cmdbuffer_ptr->record_set_viewport(0, /* in_first_viewport */ + 1, /* in_viewport_count */ + &viewport); + } + #endif + /* Draw the teapots! */ cmdbuffer_ptr->record_draw_indexed(m_n_indices, N_TEAPOTS, /* in_instance_count */ @@ -595,20 +1249,149 @@ void App::init_command_buffers() } cmdbuffer_ptr->record_end_render_pass(); - cmdbuffer_ptr->record_write_timestamp (VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, - m_query_pool_ptr, + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) + { + /* Once all GPUs have finished rendering, we need to copy all content rendered by non-presenting + * devices to the presenting device's swapchain image instance. */ + std::vector image_barriers_pre; + const uint32_t n_total_physical_devices(mgpu_device_ptr->get_n_physical_devices() ); + + /* First, submit all the barriers we're going to need to submit before we can use the copy op. */ + for (uint32_t n_current_physical_device = 0; + n_current_physical_device < n_total_physical_devices; + ++n_current_physical_device) + { + cmdbuffer_ptr->record_set_device_mask_KHR(1 << n_current_physical_device); + + if (n_current_physical_device == m_n_presenting_physical_device) + { + const Anvil::ImageBarrier renderable_to_dst_transferrable_layout_barrier(Anvil::AccessFlagBits::COLOR_ATTACHMENT_WRITE_BIT, + Anvil::AccessFlagBits::TRANSFER_WRITE_BIT, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + Anvil::ImageLayout::TRANSFER_DST_OPTIMAL, + VK_QUEUE_FAMILY_IGNORED, + VK_QUEUE_FAMILY_IGNORED, + m_swapchain_ptr->get_image(n_render_cmdbuffer), + m_swapchain_ptr->get_image(n_render_cmdbuffer)->get_subresource_range() ); + + cmdbuffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::COLOR_ATTACHMENT_OUTPUT_BIT, + Anvil::PipelineStageFlagBits::TRANSFER_BIT, + Anvil::DependencyFlagBits::BY_REGION_BIT, + 0, /* in_memory_barrier_count */ + nullptr, /* in_memory_barriers_ptr */ + 0, /* in_buffer_memory_barrier_count */ + nullptr, /* in_buffer_memory_barriers_ptr */ + 1, /* in_image_memory_barrier_count */ + &renderable_to_dst_transferrable_layout_barrier); + } + else + { + const auto src_image_ptr = m_swapchain_peer_images_per_physical_device.at(n_current_physical_device).peer_images.at(n_render_cmdbuffer).get(); + + const Anvil::ImageBarrier renderable_to_src_transferrable_layout_barrier(Anvil::AccessFlagBits::COLOR_ATTACHMENT_WRITE_BIT, + Anvil::AccessFlagBits::TRANSFER_READ_BIT, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + Anvil::ImageLayout::TRANSFER_SRC_OPTIMAL, + VK_QUEUE_FAMILY_IGNORED, + VK_QUEUE_FAMILY_IGNORED, + src_image_ptr, + src_image_ptr->get_subresource_range() ); + + cmdbuffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::COLOR_ATTACHMENT_OUTPUT_BIT, + Anvil::PipelineStageFlagBits::TRANSFER_BIT, + Anvil::DependencyFlagBits::BY_REGION_BIT, + 0, /* in_memory_barrier_count */ + nullptr, /* in_memory_barriers_ptr */ + 0, /* in_buffer_memory_barrier_count */ + nullptr, /* in_buffer_memory_barriers_ptr */ + 1, /* in_image_memory_barrier_count */ + &renderable_to_src_transferrable_layout_barrier); + } + } + + /* Next, copy rendered image chunks to the swapchain image instance which is going to be presented. */ + const auto presentable_peer_image_ptr(m_swapchain_peer_images_per_physical_device.at(m_n_presenting_physical_device).peer_images.at(n_render_cmdbuffer).get() ); + + for (uint32_t n_current_physical_device = 0; + n_current_physical_device < n_total_physical_devices; + ++n_current_physical_device) + { + Anvil::ImageCopy copy_region; + const auto src_image_ptr = m_swapchain_peer_images_per_physical_device.at(n_current_physical_device).peer_images.at(n_render_cmdbuffer).get(); + const auto& src_render_area = render_areas[n_current_physical_device]; + + if (n_current_physical_device == m_n_presenting_physical_device) + { + continue; + } + + copy_region.dst_offset.x = src_render_area.offset.x; + copy_region.dst_offset.y = src_render_area.offset.y; + copy_region.dst_offset.z = 0; + copy_region.dst_subresource.aspect_mask = Anvil::ImageAspectFlagBits::COLOR_BIT; + copy_region.dst_subresource.base_array_layer = 0; + copy_region.dst_subresource.layer_count = 1; + copy_region.dst_subresource.mip_level = 0; + copy_region.extent.depth = 1; + copy_region.extent.height = src_render_area.extent.height; + copy_region.extent.width = src_render_area.extent.width; + copy_region.src_offset.x = src_render_area.offset.x; + copy_region.src_offset.y = src_render_area.offset.y; + copy_region.src_offset.z = 0; + copy_region.src_subresource = copy_region.dst_subresource; + + cmdbuffer_ptr->record_set_device_mask_KHR(1 << n_current_physical_device); + + cmdbuffer_ptr->record_copy_image(src_image_ptr, + Anvil::ImageLayout::TRANSFER_SRC_OPTIMAL, /* in_src_image_layout */ + presentable_peer_image_ptr, + Anvil::ImageLayout::TRANSFER_DST_OPTIMAL, /* in_dst_image_layout */ + 1, /* in_region_count */ + ©_region); + } + + /* Finally, transfer the swapchain image instance we are about to present to presentable layout. */ + const Anvil::ImageBarrier dst_transferrable_to_presentable_layout_barrier(Anvil::AccessFlagBits::TRANSFER_WRITE_BIT, + Anvil::AccessFlagBits::MEMORY_READ_BIT, + Anvil::ImageLayout::TRANSFER_DST_OPTIMAL, + Anvil::ImageLayout::PRESENT_SRC_KHR, + VK_QUEUE_FAMILY_IGNORED, + VK_QUEUE_FAMILY_IGNORED, + presentable_peer_image_ptr, + presentable_peer_image_ptr->get_subresource_range() ); + + cmdbuffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::TRANSFER_BIT, + Anvil::PipelineStageFlagBits::BOTTOM_OF_PIPE_BIT, + Anvil::DependencyFlagBits::BY_REGION_BIT, + 0, /* in_memory_barrier_count */ + nullptr, /* in_memory_barriers_ptr */ + 0, /* in_buffer_memory_barrier_count */ + nullptr, /* in_buffer_memory_barriers_ptr */ + 1, /* in_image_memory_barrier_count */ + &dst_transferrable_to_presentable_layout_barrier); + } + #endif + + #if defined(ENABLE_MGPU_SUPPORT) + { + cmdbuffer_ptr->record_set_device_mask_KHR((1 << mgpu_device_ptr->get_n_physical_devices()) - 1); + } + #endif + + cmdbuffer_ptr->record_write_timestamp (Anvil::PipelineStageFlagBits::ALL_GRAPHICS_BIT, + m_query_pool_ptr.get(), n_render_cmdbuffer * 2 /* top of pipe, bottom of pipe */ + 1); - cmdbuffer_ptr->record_copy_query_pool_results(m_query_pool_ptr, + cmdbuffer_ptr->record_copy_query_pool_results(m_query_pool_ptr.get(), n_render_cmdbuffer * 2, /* top of pipe, bottom of pipe */ 2, /* in_query_count */ - m_query_results_buffer_ptr, + m_query_results_buffer_ptr.get(), sizeof(uint64_t) * n_render_cmdbuffer * 2, sizeof(uint64_t), VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT); - cmdbuffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_TRANSFER_BIT, - VK_PIPELINE_STAGE_HOST_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT, - VK_FALSE, /* in_by_region */ + cmdbuffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::TRANSFER_BIT, + Anvil::PipelineStageFlagBits::HOST_BIT | Anvil::PipelineStageFlagBits::TRANSFER_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barriers_ptr */ 1, /* in_buffer_memory_barrier_count */ @@ -618,10 +1401,23 @@ void App::init_command_buffers() } cmdbuffer_ptr->stop_recording(); - render_cmdbuffers.push_back(cmdbuffer_ptr); + render_cmdbuffers.push_back(std::move(cmdbuffer_ptr) ); } } } + + #if defined(ENABLE_MGPU_SUPPORT) + { + m_dummy_cmdbuffer_ptr = mgpu_device_ptr->get_command_pool_for_queue_family_index(universal_queue_family_index)->alloc_primary_level_command_buffer(); + + m_dummy_cmdbuffer_ptr->start_recording(false, /* one_time_submit */ + true); /* simultaneous_use_allowed */ + { + /* Stub */ + } + m_dummy_cmdbuffer_ptr->stop_recording(); + } + #endif } void App::init_dsgs() @@ -632,199 +1428,273 @@ void App::init_dsgs() n_swapchain_image < m_n_swapchain_images; ++n_swapchain_image) { - std::shared_ptr new_dsg_ptr; + Anvil::DescriptorSetGroupUniquePtr new_dsg_ptr; + + { + std::vector new_dsg_create_info_ptr(1); + + new_dsg_create_info_ptr[0] = Anvil::DescriptorSetCreateInfo::create(); - new_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr, - false, /* releaseable_sets */ - 1); /* n_sets */ + new_dsg_create_info_ptr[0]->add_binding(0, /* in_binding */ + Anvil::DescriptorType::STORAGE_BUFFER, + 1, /* in_n_elements */ + Anvil::ShaderStageFlagBits::VERTEX_BIT); + + new_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr.get(), + new_dsg_create_info_ptr); + } - new_dsg_ptr->add_binding (0, /* n_set */ - 0, /* binding */ - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - 1, /* n_elements */ - VK_SHADER_STAGE_VERTEX_BIT); new_dsg_ptr->set_binding_item(0, /* n_set */ 0, /* binding_index */ - Anvil::DescriptorSet::StorageBufferBindingElement(m_properties_buffer_ptrs[n_swapchain_image])); + Anvil::DescriptorSet::StorageBufferBindingElement(m_properties_buffer_ptrs[n_swapchain_image].get() )); - m_dsg_ptrs.push_back(new_dsg_ptr); + m_dsg_ptrs.push_back( + std::move(new_dsg_ptr) + ); } } void App::init_events() { + /* Stub */ } void App::init_gfx_pipelines() { - std::shared_ptr device_locked_ptr = m_device_ptr.lock(); - std::shared_ptr gfx_manager_ptr = device_locked_ptr->get_graphics_pipeline_manager(); + auto gfx_manager_ptr = m_device_ptr->get_graphics_pipeline_manager(); for (uint32_t n_pipeline = 0; n_pipeline < 2 /* ooo on, ooo off */; ++n_pipeline) { - bool is_ooo_disabled = (n_pipeline == 0); - Anvil::GraphicsPipelineID* pipeline_id_ptr = (is_ooo_disabled) ? &m_ooo_disabled_pipeline_id - : &m_ooo_enabled_pipeline_id; - - gfx_manager_ptr->add_regular_pipeline(false, /* disable_optimizations */ - false, /* allow_derivatives */ - m_renderpasses[0], - 0, /* subpass_id */ - *m_fs_entrypoint_ptr, - Anvil::ShaderModuleStageEntryPoint(), /* gs_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* tc_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* te_entrypoint */ - *m_vs_entrypoint_ptr, - pipeline_id_ptr); - - gfx_manager_ptr->add_vertex_attribute(*pipeline_id_ptr, - 0, /* location */ - VK_FORMAT_R32G32B32_SFLOAT, - 0, /* offset_in_bytes */ - sizeof(float) * 3, /* stride_in_bytes */ - VK_VERTEX_INPUT_RATE_VERTEX); - - gfx_manager_ptr->set_pipeline_dsg(*pipeline_id_ptr, - m_dsg_ptrs[0]); - - gfx_manager_ptr->set_input_assembly_properties(*pipeline_id_ptr, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST); - gfx_manager_ptr->set_rasterization_properties (*pipeline_id_ptr, - VK_POLYGON_MODE_FILL, - VK_CULL_MODE_BACK_BIT, - VK_FRONT_FACE_CLOCKWISE, - 4.0f); /* line_width */ - gfx_manager_ptr->toggle_depth_test (*pipeline_id_ptr, - true, /* should_enable */ - VK_COMPARE_OP_LESS); - gfx_manager_ptr->toggle_depth_writes (*pipeline_id_ptr, - true); - - if (!is_ooo_disabled) + bool is_ooo_disabled = (n_pipeline == 0); + Anvil::PipelineID* pipeline_id_ptr = (is_ooo_disabled) ? &m_ooo_disabled_pipeline_id + : &m_ooo_enabled_pipeline_id; + { - if (device_locked_ptr->is_extension_enabled("VK_AMD_rasterization_order") ) + auto pipeline_create_info_ptr = Anvil::GraphicsPipelineCreateInfo::create(Anvil::PipelineCreateFlagBits::NONE, + m_renderpasses[0].get(), + 0, /* in_subpass_id */ + *m_fs_entrypoint_ptr, + Anvil::ShaderModuleStageEntryPoint(), /* in_gs_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* in_tc_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* in_te_entrypoint */ + *m_vs_entrypoint_ptr); + { - gfx_manager_ptr->set_rasterization_order(*pipeline_id_ptr, - VK_RASTERIZATION_ORDER_RELAXED_AMD); + auto attribute = Anvil::VertexInputAttribute(0, /* location */ + Anvil::Format::R32G32B32_SFLOAT, + 0); /* offset_in_bytes */ + + pipeline_create_info_ptr->add_vertex_binding(0, /* in_binding */ + Anvil::VertexInputRate::VERTEX, + sizeof(float) * 3, /* in_stride_in_bytes */ + 1, /* in_n_attributes */ + &attribute); } - } - else - { - gfx_manager_ptr->set_rasterization_order(*pipeline_id_ptr, - VK_RASTERIZATION_ORDER_STRICT_AMD); + + pipeline_create_info_ptr->set_descriptor_set_create_info(m_dsg_ptrs[0]->get_descriptor_set_create_info() ); + + #ifdef ENABLE_MGPU_SUPPORT + { + pipeline_create_info_ptr->set_n_dynamic_scissor_boxes(1); + pipeline_create_info_ptr->set_n_dynamic_viewports (1); + + pipeline_create_info_ptr->toggle_dynamic_states(true, /* should_enable */ + {Anvil::DynamicState::SCISSOR, Anvil::DynamicState::VIEWPORT}); + } + #endif + + pipeline_create_info_ptr->set_primitive_topology (Anvil::PrimitiveTopology::TRIANGLE_LIST); + pipeline_create_info_ptr->set_rasterization_properties(Anvil::PolygonMode::FILL, + Anvil::CullModeFlagBits::BACK_BIT, + Anvil::FrontFace::CLOCKWISE, + 4.0f); /* line_width */ + pipeline_create_info_ptr->toggle_depth_test (true, /* should_enable */ + Anvil::CompareOp::LESS); + pipeline_create_info_ptr->toggle_depth_writes (true); + + if (!is_ooo_disabled) + { + if (m_device_ptr->is_extension_enabled("VK_AMD_rasterization_order") ) + { + pipeline_create_info_ptr->set_rasterization_order(Anvil::RasterizationOrderAMD::RELAXED); + } + } + else + { + pipeline_create_info_ptr->set_rasterization_order(Anvil::RasterizationOrderAMD::STRICT); + } + + gfx_manager_ptr->add_pipeline(std::move(pipeline_create_info_ptr), + pipeline_id_ptr); } } } void App::init_images() { - m_depth_image_ptr = Anvil::Image::create_nonsparse(m_device_ptr, - VK_IMAGE_TYPE_2D, - VK_FORMAT_D32_SFLOAT, - VK_IMAGE_TILING_OPTIMAL, - VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, - m_window_ptr->get_width_at_creation_time (), - m_window_ptr->get_height_at_creation_time(), - 1, /* base_mipmap_depth */ - 1, /* n_layers */ - VK_SAMPLE_COUNT_1_BIT, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - false, /* in_use_full_mipmap_chain */ - 0, /* in_memory_features */ - false, /* in_is_mutable */ - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, /* in_final_image_layout */ - nullptr); /* in_mipmaps_ptr */ - - m_depth_image_view_ptr = Anvil::ImageView::create_2D(m_device_ptr, - m_depth_image_ptr, - 0, /* n_base_layer */ - 0, /* n_base_mipmap_level */ - 1, /* n_mipmaps */ - VK_IMAGE_ASPECT_DEPTH_BIT, - m_depth_image_ptr->get_image_format(), - VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_IDENTITY); + { + auto create_info_ptr = Anvil::ImageCreateInfo::create_alloc(m_device_ptr.get(), + Anvil::ImageType::_2D, + Anvil::Format::D32_SFLOAT, + Anvil::ImageTiling::OPTIMAL, + Anvil::ImageUsageFlagBits::DEPTH_STENCIL_ATTACHMENT_BIT, + m_window_ptr->get_width_at_creation_time (), + m_window_ptr->get_height_at_creation_time(), + 1, /* base_mipmap_depth */ + 1, /* n_layers */ + Anvil::SampleCountFlagBits::_1_BIT, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + false, /* in_use_full_mipmap_chain */ + (m_device_ptr->get_type() == Anvil::DeviceType::MULTI_GPU) ? Anvil::MemoryFeatureFlagBits::MULTI_INSTANCE_BIT : Anvil::MemoryFeatureFlagBits::NONE, + Anvil::ImageCreateFlagBits::NONE, + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, /* in_final_image_layout */ + nullptr); /* in_mipmaps_ptr */ + + m_depth_image_ptr = Anvil::Image::create(std::move(create_info_ptr) ); + } + + { + auto create_info_ptr = Anvil::ImageViewCreateInfo::create_2D(m_device_ptr.get(), + m_depth_image_ptr.get(), + 0, /* n_base_layer */ + 0, /* n_base_mipmap_level */ + 1, /* n_mipmaps */ + Anvil::ImageAspectFlagBits::DEPTH_BIT, + m_depth_image_ptr->get_create_info_ptr()->get_format(), + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY); + + m_depth_image_view_ptr = Anvil::ImageView::create(std::move(create_info_ptr) ); + } + + #if defined(USE_LOCAL_SFR_PRESENTATION_MODE) + { + const Anvil::MGPUDevice* mgpu_device_ptr (dynamic_cast(m_device_ptr.get() )); + const uint32_t n_physical_devices (mgpu_device_ptr->get_n_physical_devices() ); + std::vector device_group_indices (n_physical_devices); + + /* Create peer images. */ + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_devices; + ++n_physical_device) + { + SwapchainPeerImages* current_peer_image_set_ptr (nullptr); + + m_swapchain_peer_images_per_physical_device.push_back(SwapchainPeerImages() ); + + current_peer_image_set_ptr = &m_swapchain_peer_images_per_physical_device.back(); + + for (uint32_t n_device_index = 0; + n_device_index < n_physical_devices; + ++n_device_index) + { + /* We want all logical devices to be able to access n_physical_device's image instance + * through this peer image */ + device_group_indices.at(n_device_index) = n_physical_device; + } + + for (uint32_t n_swapchain_image = 0; + n_swapchain_image < m_n_swapchain_images; + ++n_swapchain_image) + { + Anvil::ImageUniquePtr peer_image_ptr; + + { + auto create_info_ptr = Anvil::ImageCreateInfo::create_peer_no_alloc(m_device_ptr.get (), + m_swapchain_ptr.get(), + n_swapchain_image); + + create_info_ptr->set_device_indices(n_physical_devices, + &device_group_indices.at(0) ); + + peer_image_ptr = Anvil::Image::create(std::move(create_info_ptr) ); + } + + current_peer_image_set_ptr->peer_images.push_back(std::move(peer_image_ptr) ); + } + } + + } + #endif } void App::init_query_pool() { - m_query_pool_ptr = Anvil::QueryPool::create_non_ps_query_pool(m_device_ptr, + m_query_pool_ptr = Anvil::QueryPool::create_non_ps_query_pool(m_device_ptr.get(), VK_QUERY_TYPE_TIMESTAMP, m_n_swapchain_images * 2 /* top of pipe, bottom of pipe */); } void App::init_renderpasses() { - std::shared_ptr device_locked_ptr(m_device_ptr); - /* We are rendering directly to the swapchain image, so need one renderpass per image */ for (uint32_t n_swapchain_image = 0; n_swapchain_image < m_n_swapchain_images; ++n_swapchain_image) { - Anvil::RenderPassAttachmentID color_attachment_id; - Anvil::RenderPassAttachmentID depth_attachment_id; - std::shared_ptr renderpass_ptr; - Anvil::SubPassID subpass_id; + Anvil::RenderPassAttachmentID color_attachment_id; + Anvil::RenderPassAttachmentID depth_attachment_id; + Anvil::RenderPassUniquePtr renderpass_ptr; + Anvil::SubPassID subpass_id; - /* Set up a renderpass instance first */ - renderpass_ptr = Anvil::RenderPass::create(m_device_ptr, - m_swapchain_ptr); - - renderpass_ptr->set_name_formatted("Renderpass for swapchain image [%d]", - n_swapchain_image); + { + Anvil::RenderPassCreateInfoUniquePtr renderpass_create_info_ptr(new Anvil::RenderPassCreateInfo(m_device_ptr.get() ) ); - renderpass_ptr->add_color_attachment(m_swapchain_ptr->get_image_format(), - VK_SAMPLE_COUNT_1_BIT, - VK_ATTACHMENT_LOAD_OP_CLEAR, - VK_ATTACHMENT_STORE_OP_STORE, + renderpass_create_info_ptr->add_color_attachment(m_swapchain_ptr->get_create_info_ptr()->get_format(), + Anvil::SampleCountFlagBits::_1_BIT, + Anvil::AttachmentLoadOp::CLEAR, + Anvil::AttachmentStoreOp::STORE, #ifndef ENABLE_OFFSCREEN_RENDERING - VK_IMAGE_LAYOUT_UNDEFINED, - VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, + Anvil::ImageLayout::UNDEFINED, + #if !defined(USE_LOCAL_SFR_PRESENTATION_MODE) + Anvil::ImageLayout::PRESENT_SRC_KHR, + #else + /* In local SFR presentation mode, we want to avoid ->finalLayout transition, since some swapchain image instances + * will need to be switched to TRANSFER_SRC layout, and one of them to TRANSFER_DST. + */ + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + #endif #else - VK_IMAGE_LAYOUT_GENERAL, - VK_IMAGE_LAYOUT_GENERAL, + Anvil::ImageLayout::GENERAL, + Anvil::ImageLayout::GENERAL, #endif - false, /* may_alias */ - &color_attachment_id); - - renderpass_ptr->add_depth_stencil_attachment(m_depth_image_ptr->get_image_format(), - VK_SAMPLE_COUNT_1_BIT, - VK_ATTACHMENT_LOAD_OP_CLEAR, - VK_ATTACHMENT_STORE_OP_STORE, - VK_ATTACHMENT_LOAD_OP_DONT_CARE, - VK_ATTACHMENT_STORE_OP_DONT_CARE, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, - false, /* may_alias */ - &depth_attachment_id); - - /* Define the only subpass we're going to use there */ - renderpass_ptr->add_subpass(*m_fs_entrypoint_ptr, - Anvil::ShaderModuleStageEntryPoint(), /* gs_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* tc_entrypoint */ - Anvil::ShaderModuleStageEntryPoint(), /* te_entrypoint */ - *m_vs_entrypoint_ptr, - &subpass_id, - m_general_pipeline_id); - - renderpass_ptr->add_subpass_color_attachment(subpass_id, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - color_attachment_id, - 0, /* location */ - nullptr); /* opt_attachment_resolve_id_ptr */ - - renderpass_ptr->add_subpass_depth_stencil_attachment(subpass_id, - depth_attachment_id, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); - - m_renderpasses.push_back(renderpass_ptr); + false, /* may_alias */ + &color_attachment_id); + + renderpass_create_info_ptr->add_depth_stencil_attachment(m_depth_image_ptr->get_create_info_ptr()->get_format(), + Anvil::SampleCountFlagBits::_1_BIT, + Anvil::AttachmentLoadOp::CLEAR, + Anvil::AttachmentStoreOp::STORE, + Anvil::AttachmentLoadOp::DONT_CARE, + Anvil::AttachmentStoreOp::DONT_CARE, + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + false, /* may_alias */ + &depth_attachment_id); + + /* Define the only subpass we're going to use there */ + renderpass_create_info_ptr->add_subpass (&subpass_id); + renderpass_create_info_ptr->add_subpass_color_attachment (subpass_id, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + color_attachment_id, + 0, /* in_location */ + nullptr); /* in_opt_attachment_resolve_id_ptr */ + renderpass_create_info_ptr->add_subpass_depth_stencil_attachment(subpass_id, + Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + depth_attachment_id); + + renderpass_ptr = Anvil::RenderPass::create(std::move(renderpass_create_info_ptr), + m_swapchain_ptr.get() ); + } + + renderpass_ptr->set_name_formatted("Renderpass for swapchain image [%d]", + n_swapchain_image); /* If no general pipeline has been set up yet, do it now. This pipeline is only used to form a pipeline layout * so we only need to configure DSGs & attributes here. @@ -833,93 +1703,172 @@ void App::init_renderpasses() **/ if (m_general_pipeline_id == -1) { - std::shared_ptr gfx_manager_ptr = device_locked_ptr->get_graphics_pipeline_manager(); + auto gfx_manager_ptr = m_device_ptr->get_graphics_pipeline_manager(); + auto gfx_pipeline_create_info_ptr = Anvil::GraphicsPipelineCreateInfo::create (Anvil::PipelineCreateFlagBits::NONE, + renderpass_ptr.get(), + subpass_id, + *m_fs_entrypoint_ptr, + Anvil::ShaderModuleStageEntryPoint(), /* in_gs_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* in_tc_entrypoint */ + Anvil::ShaderModuleStageEntryPoint(), /* in_te_entrypoint */ + *m_vs_entrypoint_ptr); - renderpass_ptr->get_subpass_graphics_pipeline_id(subpass_id, - &m_general_pipeline_id); + { + auto attribute = Anvil::VertexInputAttribute(0, /* location */ + Anvil::Format::R32G32B32_SFLOAT, + 0); /* offset_in_bytes */ + + gfx_pipeline_create_info_ptr->add_vertex_binding(0, /* in_binding */ + Anvil::VertexInputRate::VERTEX, + sizeof(float) * 3, /* stride_in_bytes */ + 1, /* in_n_attributes */ + &attribute); + } - gfx_manager_ptr->add_vertex_attribute(m_general_pipeline_id, - 0, /* location */ - VK_FORMAT_R32G32B32_SFLOAT, - 0, /* offset_in_bytes */ - sizeof(float) * 3, /* stride_in_bytes */ - VK_VERTEX_INPUT_RATE_VERTEX); + gfx_pipeline_create_info_ptr->set_descriptor_set_create_info(m_dsg_ptrs[0]->get_descriptor_set_create_info() ); - gfx_manager_ptr->set_pipeline_dsg(m_general_pipeline_id, - m_dsg_ptrs[0]); + gfx_manager_ptr->add_pipeline(std::move(gfx_pipeline_create_info_ptr), + &m_general_pipeline_id); } + m_renderpasses.push_back( + std::move(renderpass_ptr) + ); + /* Set up a framebuffer we will use for the renderpass */ - std::shared_ptr framebuffer_ptr; + Anvil::FramebufferUniquePtr framebuffer_ptr; - framebuffer_ptr = Anvil::Framebuffer::create(m_device_ptr, - m_window_ptr->get_width_at_creation_time (), - m_window_ptr->get_height_at_creation_time(), - 1); /* n_layers */ + { + auto create_info_ptr = Anvil::FramebufferCreateInfo::create(m_device_ptr.get(), + m_window_ptr->get_width_at_creation_time (), + m_window_ptr->get_height_at_creation_time(), + 1); /* n_layers */ + + #if defined(ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING) + { + create_info_ptr->add_attachment(m_swapchain_image_views.at(n_swapchain_image).get(), + nullptr); + } + #else + { + create_info_ptr->add_attachment(m_swapchain_ptr->get_image_view(n_swapchain_image), + nullptr); + } + #endif + + create_info_ptr->add_attachment(m_depth_image_view_ptr.get(), + nullptr); + + framebuffer_ptr = Anvil::Framebuffer::create(std::move(create_info_ptr) ); + } framebuffer_ptr->set_name("Main framebuffer"); - framebuffer_ptr->add_attachment(m_swapchain_ptr->get_image_view(n_swapchain_image), - nullptr); - framebuffer_ptr->add_attachment(m_depth_image_view_ptr, - nullptr); - m_framebuffers.push_back(framebuffer_ptr); + m_framebuffers.push_back( + std::move(framebuffer_ptr) + ); } } void App::init_semaphores() { - std::shared_ptr base_device_locked_ptr(m_device_ptr); - uint32_t n_physical_devices (1); + uint32_t n_physical_devices(0); + + switch (m_device_ptr->get_type() ) + { + case Anvil::DeviceType::MULTI_GPU: + { + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr.get() ) ); + + n_physical_devices = mgpu_device_ptr->get_n_physical_devices(); + + break; + } + + case Anvil::DeviceType::SINGLE_GPU: + { + n_physical_devices = 1; + + break; + } + + default: + { + anvil_assert(false); + } + } for (uint32_t n_swapchain_image = 0; n_swapchain_image < m_n_swapchain_images; ++n_swapchain_image) { - std::shared_ptr new_frame_acquisition_wait_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); - SemaphoreBundle signal_semaphore_bundle; - SemaphoreBundle wait_semaphore_bundle; + Anvil::SemaphoreUniquePtr new_frame_acquisition_wait_semaphore_ptr; + auto new_signal_sem_bundle_ptr = std::unique_ptr(new SemaphoreBundle() ); + auto new_wait_sem_bundle_ptr = std::unique_ptr(new SemaphoreBundle() ); + + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); + + new_frame_acquisition_wait_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + } new_frame_acquisition_wait_semaphore_ptr->set_name_formatted("New frame acquisition wait semaphore [%d]", n_swapchain_image); + #if defined(ENABLE_MGPU_SUPPORT) + { + m_frame_acquisition_wait_semaphores.push_back(std::move(new_frame_acquisition_wait_semaphore_ptr) ); + } + #endif + for (uint32_t n_physical_device = 0; n_physical_device < n_physical_devices; ++n_physical_device) { - std::shared_ptr new_signal_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); - std::shared_ptr new_wait_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); + Anvil::SemaphoreUniquePtr new_signal_semaphore_ptr; + Anvil::SemaphoreUniquePtr new_wait_semaphore_ptr; + + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); + + new_signal_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + } + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); + + new_wait_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + } new_signal_semaphore_ptr->set_name_formatted("Signal semaphore [%d]", n_swapchain_image); new_wait_semaphore_ptr->set_name_formatted ("Wait semaphore [%d]", n_swapchain_image); - signal_semaphore_bundle.semaphores.push_back(new_signal_semaphore_ptr); - wait_semaphore_bundle.semaphores.push_back (new_wait_semaphore_ptr); + new_signal_sem_bundle_ptr->semaphores.push_back(std::move(new_signal_semaphore_ptr )); + new_wait_sem_bundle_ptr->semaphores.push_back (std::move(new_wait_semaphore_ptr) ); } - m_frame_signal_semaphore_bundles.push_back(signal_semaphore_bundle); - m_frame_wait_semaphore_bundles.push_back (wait_semaphore_bundle); + m_frame_signal_semaphore_bundles.push_back(std::move(new_signal_sem_bundle_ptr) ); + m_frame_wait_semaphore_bundles.push_back (std::move(new_wait_sem_bundle_ptr) ); } } void App::init_shaders() { - std::shared_ptr fs_ptr; - std::shared_ptr fs_sm_ptr; - std::shared_ptr vs_ptr; - std::shared_ptr vs_sm_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr fs_ptr; + Anvil::ShaderModuleUniquePtr fs_sm_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr vs_ptr; + Anvil::ShaderModuleUniquePtr vs_sm_ptr; - fs_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + fs_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, fs_body, - Anvil::SHADER_STAGE_FRAGMENT); - vs_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::ShaderStage::FRAGMENT); + vs_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, vs_body, - Anvil::SHADER_STAGE_VERTEX); + Anvil::ShaderStage::VERTEX); vs_ptr->add_definition_value_pair("MAX_DEPTH", MAX_DEPTH); @@ -930,57 +1879,245 @@ void App::init_shaders() vs_ptr->add_definition_value_pair("N_TEAPOTS", N_TEAPOTS); - fs_sm_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - fs_ptr); - vs_sm_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - vs_ptr); + fs_sm_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get(), + fs_ptr.get () ); + vs_sm_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get(), + vs_ptr.get () ); fs_sm_ptr->set_name("Fragment shader"); vs_sm_ptr->set_name("Vertex shader"); m_fs_entrypoint_ptr.reset(new Anvil::ShaderModuleStageEntryPoint("main", - fs_sm_ptr, - Anvil::SHADER_STAGE_FRAGMENT) ); + std::move(fs_sm_ptr), + Anvil::ShaderStage::FRAGMENT) ); m_vs_entrypoint_ptr.reset(new Anvil::ShaderModuleStageEntryPoint("main", - vs_sm_ptr, - Anvil::SHADER_STAGE_VERTEX) ); + std::move(vs_sm_ptr), + Anvil::ShaderStage::VERTEX) ); } void App::init_swapchain() { - std::shared_ptr device_locked_ptr (m_device_ptr); - static const VkFormat swapchain_format (VK_FORMAT_B8G8R8A8_UNORM); - static const VkPresentModeKHR swapchain_present_mode(VK_PRESENT_MODE_FIFO_KHR); - static const VkImageUsageFlags swapchain_usage (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT); - std::shared_ptr sgpu_device_locked_ptr(std::dynamic_pointer_cast(m_device_ptr.lock() ) ); + static const Anvil::Format swapchain_format (Anvil::Format::B8G8R8A8_UNORM); + static const Anvil::PresentModeKHR swapchain_present_mode(Anvil::PresentModeKHR::FIFO_KHR); + static const Anvil::ImageUsageFlags swapchain_usage (Anvil::ImageUsageFlagBits::COLOR_ATTACHMENT_BIT | Anvil::ImageUsageFlagBits::TRANSFER_SRC_BIT | Anvil::ImageUsageFlagBits::TRANSFER_DST_BIT); + + { + auto create_info_ptr = Anvil::RenderingSurfaceCreateInfo::create(m_instance_ptr.get(), + m_device_ptr.get (), + m_window_ptr.get () ); - m_rendering_surface_ptr = Anvil::RenderingSurface::create(m_instance_ptr, - m_device_ptr, - m_window_ptr); + m_rendering_surface_ptr = Anvil::RenderingSurface::create(std::move(create_info_ptr) ); + } m_rendering_surface_ptr->set_name("Main rendering surface"); - m_swapchain_ptr = sgpu_device_locked_ptr->create_swapchain(m_rendering_surface_ptr, - m_window_ptr, - swapchain_format, - swapchain_present_mode, - swapchain_usage, - m_n_swapchain_images); + switch (m_device_ptr->get_type() ) + { +#if defined(ENABLE_MGPU_SUPPORT) + case Anvil::DeviceType::MULTI_GPU: + { + Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr.get() ) ); + + #if defined(USE_SUM_SFR_PRESENTATION_MODE) + static const Anvil::DeviceGroupPresentModeFlagBits swapchain_device_group_present_mode(Anvil::DeviceGroupPresentModeFlagBits::SUM_BIT_KHR); + #elif defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_LOCAL_SFR_PRESENTATION_MODE) + static const Anvil::DeviceGroupPresentModeFlagBits swapchain_device_group_present_mode(Anvil::DeviceGroupPresentModeFlagBits::LOCAL_BIT_KHR); + #elif defined(USE_REMOTE_AFR_PRESENTATION_MODE) + static const Anvil::DeviceGroupPresentModeFlagBits swapchain_device_group_present_mode(Anvil::DeviceGroupPresentModeFlagBits::REMOTE_BIT_KHR); + #else + #error Not supported? + #endif + + anvil_assert((mgpu_device_ptr->get_supported_present_modes() & swapchain_device_group_present_mode) != 0); + + m_swapchain_ptr = mgpu_device_ptr->create_swapchain(m_rendering_surface_ptr.get(), + m_window_ptr.get (), + swapchain_format, + Anvil::ColorSpaceKHR::SRGB_NONLINEAR_KHR, + swapchain_present_mode, + swapchain_usage, + m_n_swapchain_images, +#if defined(ENABLE_EXPLICIT_SFR_RECT_DEFINITIONS) + true, /* support_SFR */ +#else + false, /* support_SFR */ +#endif + swapchain_device_group_present_mode); - m_swapchain_ptr->set_name("Main swapchain"); + m_n_swapchain_images = m_swapchain_ptr->get_n_images(); - /* Cache the queue we are going to use for presentation */ - const std::vector* present_queue_fams_ptr = nullptr; + #if defined(USE_LOCAL_AFR_PRESENTATION_MODE) || defined(USE_LOCAL_SFR_PRESENTATION_MODE) + { + /* This example app assumes all physical devices can be used to render & present their output + * using local presentation mode. Verify this assertion + */ + anvil_assert((mgpu_device_ptr->get_supported_present_modes_for_surface(m_rendering_surface_ptr.get() ) & Anvil::DeviceGroupPresentModeFlagBits::LOCAL_BIT_KHR) != 0); + } + #elif defined(USE_REMOTE_AFR_PRESENTATION_MODE) + /* This example app assumes all physical devices can be used to render & present their output + * using remote presentation mode. Verify this assertion + */ + anvil_assert((mgpu_device_ptr->get_supported_present_modes_for_surface(m_rendering_surface_ptr.get() ) & Anvil::DeviceGroupPresentModeFlagBits::REMOTE_BIT_KHR) != 0); + #elif defined(USE_SUM_SFR_PRESENTATION_MODE) + /* This example app assumes all physical devices can be used to render & present their output + * using sum presentation mode. Verify this assertion + */ + anvil_assert((mgpu_device_ptr->get_supported_present_modes_for_surface(m_rendering_surface_ptr.get() ) & Anvil::DeviceGroupPresentModeFlagBits::SUM_BIT_KHR) != 0); + #endif + + #if defined(ENABLE_EXPLICIT_SWAPCHAIN_IMAGE_MEMORY_BINDING) + { + for (uint32_t n_swapchain_image = 0; + n_swapchain_image < m_n_swapchain_images; + ++n_swapchain_image) + { + Anvil::ImageUniquePtr new_image_ptr; + Anvil::ImageViewUniquePtr new_image_view_ptr; - if (!m_rendering_surface_ptr->get_queue_families_with_present_support(sgpu_device_locked_ptr->get_physical_device(), - &present_queue_fams_ptr) ) - { - anvil_assert_fail(); - } + #if defined(ENABLE_EXPLICIT_SFR_RECT_DEFINITIONS) + { + /* For split-frame rendering, the rendering surface is split into N vertical areas, where + * N corresponds to the number of logical devices. Since GPU at index X will always render + * only to a dedicated slab stored in its own memory instance, we are going to use dummy + * rects for all other cases. + * + * As a refresh, SFR rect at index i * N + j is the rectangle used by physical device i for memory instance j. + * + */ + const uint32_t n_logical_devices(mgpu_device_ptr->get_n_physical_devices() ); + const auto render_areas (get_render_areas() ); + std::vector sfr_rects (n_logical_devices * n_logical_devices); + + for (uint32_t n_logical_device = 0; + n_logical_device < n_logical_devices; + ++n_logical_device) + { + for (uint32_t n_memory_instance = 0; + n_memory_instance < n_logical_devices; + ++n_memory_instance) + { + const uint32_t n_sfr_rect = n_logical_device * n_logical_devices + n_memory_instance; + auto& current_sfr_rect = sfr_rects.at(n_sfr_rect); + + if (n_logical_device != n_memory_instance) + { + current_sfr_rect.extent.height = 0; + current_sfr_rect.extent.width = 0; + current_sfr_rect.offset.x = 0; + current_sfr_rect.offset.y = 0; + } + else + { + current_sfr_rect = render_areas.at(n_logical_device); + } + } + } + + { + auto create_info_ptr = Anvil::ImageCreateInfo::create_peer_no_alloc(m_device_ptr.get (), + m_swapchain_ptr.get(), + n_swapchain_image); + + create_info_ptr->set_SFR_rectangles(static_cast(sfr_rects.size() ), + &sfr_rects.at(0) ); + + new_image_ptr = Anvil::Image::create(std::move(create_info_ptr) ); + } + } + #else + { + /* No ISV would ever do this, right? */ + auto create_info_ptr = Anvil::ImageCreateInfo::create_peer_no_alloc(m_device_ptr.get (), + m_swapchain_ptr.get(), + n_swapchain_image); + + new_image_ptr = Anvil::Image::create(std::move(create_info_ptr) ); + } + #endif + + { + auto create_info_ptr = Anvil::ImageViewCreateInfo::create_2D(m_device_ptr.get (), + new_image_ptr.get(), + 0, /* n_base_layer */ + 0, /* n_base_mipmap_level */ + 1, /* n_mipmaps */ + Anvil::ImageAspectFlagBits::COLOR_BIT, + new_image_ptr->get_create_info_ptr()->get_format(), + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY, + Anvil::ComponentSwizzle::IDENTITY); + + new_image_view_ptr = Anvil::ImageView::create(std::move(create_info_ptr) ); + } + + m_swapchain_image_views.push_back(std::move(new_image_view_ptr) ); + m_swapchain_images.push_back (std::move(new_image_ptr) ); + } + } + #endif + + /* Cache the queue we are going to use for presentation */ + for (uint32_t n_physical_device = 0; + n_physical_device < mgpu_device_ptr->get_n_physical_devices(); + ++n_physical_device) + { + const std::vector* present_queue_fams_ptr = nullptr; + + if (!m_rendering_surface_ptr->get_queue_families_with_present_support(mgpu_device_ptr->get_physical_device(n_physical_device), + &present_queue_fams_ptr) ) + { + continue; + } + + if (present_queue_fams_ptr->size() > 0) + { + m_present_queue_ptr = m_device_ptr->get_queue_for_queue_family_index(present_queue_fams_ptr->at(0), + 0); /* in_n_queue */ + + break; + } + } + + anvil_assert(m_present_queue_ptr != nullptr); + + break; + } +#endif /* ENABLE_MGPU_SUPPORT */ + + case Anvil::DeviceType::SINGLE_GPU: + { + Anvil::SGPUDevice* sgpu_device_ptr(dynamic_cast(m_device_ptr.get() ) ); + + m_swapchain_ptr = sgpu_device_ptr->create_swapchain(m_rendering_surface_ptr.get(), + m_window_ptr.get (), + swapchain_format, + Anvil::ColorSpaceKHR::SRGB_NONLINEAR_KHR, + swapchain_present_mode, + swapchain_usage, + m_n_swapchain_images); + + /* Cache the queue we are going to use for presentation */ + const std::vector* present_queue_fams_ptr = nullptr; + + if (!m_rendering_surface_ptr->get_queue_families_with_present_support(sgpu_device_ptr->get_physical_device(), + &present_queue_fams_ptr) ) + { + anvil_assert_fail(); + } - m_present_queue_ptr = device_locked_ptr->get_queue(present_queue_fams_ptr->at(0), - 0); /* in_n_queue */ + m_present_queue_ptr = sgpu_device_ptr->get_queue_for_queue_family_index(present_queue_fams_ptr->at(0), + 0); /* in_n_queue */ + + break; + } + + default: + { + anvil_assert(false); + } + } } void App::init_window() @@ -1009,41 +2146,64 @@ void App::init_window() std::bind(&App::on_keypress_event, this, std::placeholders::_1), - this - ); + this); } void App::init_vulkan() { /* Create a Vulkan instance */ - m_instance_ptr = Anvil::Instance::create("OutOfOrderRasterization", /* app_name */ - "OutOfOrderRasterization", /* engine_name */ + { + auto create_info_ptr = Anvil::InstanceCreateInfo::create("OutOfOrderRasterization", /* app_name */ + "OutOfOrderRasterization", /* engine_name */ #ifdef ENABLE_VALIDATION - std::bind(&App::on_validation_callback, - this, - std::placeholders::_1, - std::placeholders::_2, - std::placeholders::_3, - std::placeholders::_4) ); + std::bind(&App::on_validation_callback, + this, + std::placeholders::_1, + std::placeholders::_2), #else - nullptr); + Anvil::DebugCallbackFunction(), #endif + false); /* in_mt_safe */ + + m_instance_ptr = Anvil::Instance::create(std::move(create_info_ptr) ); + } /* Determine which extensions we need to request for */ - std::shared_ptr physical_device_locked_ptr(m_instance_ptr->get_physical_device(0) ); - - /* Create a Vulkan device */ - m_device_ptr = Anvil::SGPUDevice::create(physical_device_locked_ptr, - Anvil::DeviceExtensionConfiguration(), - std::vector(), /* layers */ - false, /* transient_command_buffer_allocs_only */ - false); /* support_resettable_command_buffers */ + #if !defined(ENABLE_MGPU_SUPPORT) + { + /* Create a Vulkan device */ + auto create_info_ptr = Anvil::DeviceCreateInfo::create_sgpu(m_instance_ptr->get_physical_device(0), + true, /* in_enable_shader_module_cache */ + Anvil::DeviceExtensionConfiguration(), + std::vector(), /* in_layers */ + Anvil::CommandPoolCreateFlagBits::NONE, + false); /* in_mt_safe */ + + m_device_ptr = Anvil::SGPUDevice::create(std::move(create_info_ptr) ); + } + #else + { + Anvil::DeviceExtensionConfiguration ext_config; + auto physical_devices = m_instance_ptr->get_physical_device_group(0).physical_device_ptrs; + + ext_config.extension_status[VK_KHR_DEVICE_GROUP_EXTENSION_NAME] = Anvil::ExtensionAvailability::REQUIRE; + ext_config.extension_status[VK_KHR_BIND_MEMORY_2_EXTENSION_NAME] = Anvil::ExtensionAvailability::REQUIRE; + + auto create_info_ptr = Anvil::DeviceCreateInfo::create_mgpu(physical_devices, + true, /* in_enable_shader_module_cache */ + ext_config, + std::vector(), /* layers */ + Anvil::CommandPoolCreateFlagBits::NONE, + false); /* in_mt_safe */ + + m_device_ptr = Anvil::MGPUDevice::create(std::move(create_info_ptr) ); + } + #endif } void App::on_keypress_event(Anvil::CallbackArgument* callback_data_raw_ptr) { - Anvil::OnKeypressReleasedCallbackArgument* callback_data_ptr = static_cast(callback_data_raw_ptr); - std::shared_ptr device_locked_ptr = m_device_ptr.lock(); + auto callback_data_ptr = static_cast(callback_data_raw_ptr); #ifndef ENABLE_OFFSCREEN_RENDERING { @@ -1051,7 +2211,7 @@ void App::on_keypress_event(Anvil::CallbackArgument* callback_data_raw_ptr) { printf("\n\n"); - if (device_locked_ptr->is_extension_enabled("VK_AMD_rasterization_order") ) + if (m_device_ptr->is_extension_enabled("VK_AMD_rasterization_order") ) { m_ooo_enabled = !m_ooo_enabled; @@ -1077,19 +2237,15 @@ void App::on_keypress_event(Anvil::CallbackArgument* callback_data_raw_ptr) #endif } -VkBool32 App::on_validation_callback(VkDebugReportFlagsEXT message_flags, - VkDebugReportObjectTypeEXT object_type, - const char* layer_prefix, - const char* message) +void App::on_validation_callback(Anvil::DebugMessageSeverityFlags in_severity, + const char* in_message_ptr) { - if ((message_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0) + if ((in_severity & Anvil::DebugMessageSeverityFlagBits::ERROR_BIT) != 0) { fprintf(stderr, "[!] %s\n", - message); + in_message_ptr); } - - return false; } void App::run() @@ -1109,8 +2265,7 @@ void App::run() void App::update_fps() { - uint64_t average_delta = 0; - std::shared_ptr device_locked_ptr(m_device_ptr); + uint64_t average_delta = 0; /* Compute average delta from all the samples we have cached so far */ for (uint64_t delta : m_timestamp_deltas) @@ -1121,14 +2276,18 @@ void App::update_fps() average_delta /= m_timestamp_deltas.size(); /* Convert the delta to human-readable information */ - const double time_ns = double(average_delta) * double(device_locked_ptr->get_physical_device_properties().limits.timestampPeriod); + const double time_ns = double(average_delta) * double(m_device_ptr->get_physical_device_properties().core_vk1_0_properties_ptr->limits.timestamp_period); const double time_s = time_ns / NSEC_PER_SEC; const float average_fps = float(1.0 / time_s); /* Print the new info */ clear_console_line(); - printf("Average FPS: %.3f", average_fps); + #if !defined(ENABLE_MGPU_SUPPORT) + printf("Average FPS: %.3f", average_fps); + #else + printf("Average FPS for all GPUs: %.3f", average_fps); + #endif /* Purge the timestamps */ m_timestamp_deltas.clear(); @@ -1198,11 +2357,13 @@ void App::update_teapot_props(uint32_t n_current_swapchain_image) } -int main() +int main(int argc, char *argv[]) { - std::shared_ptr app_ptr(new App() ); + std::unique_ptr app_ptr(new App() ); app_ptr->init(); + + app_ptr->run(); #ifdef _DEBUG diff --git a/examples/PushConstants/CMakeLists.txt b/examples/PushConstants/CMakeLists.txt index 61c9e6dd..6b679fd2 100644 --- a/examples/PushConstants/CMakeLists.txt +++ b/examples/PushConstants/CMakeLists.txt @@ -16,8 +16,11 @@ if(${CMAKE_SYSTEM_NAME} MATCHES "Linux") endif() endif() -add_subdirectory (../.. "${CMAKE_CURRENT_BINARY_DIR}/anvil") +if (NOT ANVIL_LINK_EXAMPLES) + add_subdirectory (../.. "${CMAKE_CURRENT_BINARY_DIR}/anvil") +endif() +target_include_directories(Anvil PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/anvil/include") include_directories(${Anvil_SOURCE_DIR}/include ${PushConstants_SOURCE_DIR}/include) @@ -49,11 +52,10 @@ endif() # Create the PushConstants project. add_executable (PushConstants include/app.h - include/app.h - src/app.cpp) + src/app.cpp) # Add linking dependencies for the example projects -add_dependencies (PushConstants Anvil) +add_dependencies(PushConstants Anvil) if (WIN32) target_link_libraries(PushConstants Anvil) diff --git a/examples/PushConstants/include/app.h b/examples/PushConstants/include/app.h index 6235ec22..a1714c76 100644 --- a/examples/PushConstants/include/app.h +++ b/examples/PushConstants/include/app.h @@ -53,21 +53,19 @@ class App void init_window (); void init_vulkan (); - void draw_frame (); - VkBool32 on_validation_callback(VkDebugReportFlagsEXT message_flags, - VkDebugReportObjectTypeEXT object_type, - const char* layer_prefix, - const char* message); + void draw_frame (); + void on_validation_callback(Anvil::DebugMessageSeverityFlags in_severity, + const char* in_message_ptr); - void get_luminance_data(std::shared_ptr* out_result_ptr, - uint32_t* out_result_size_ptr) const; + void get_luminance_data(std::unique_ptr* out_result_ptr, + uint32_t* out_result_size_ptr) const; const unsigned char* get_mesh_data () const; uint32_t get_mesh_data_color_start_offset () const; uint32_t get_mesh_data_color_stride () const; - VkFormat get_mesh_data_color_format () const; + Anvil::Format get_mesh_data_color_format () const; uint32_t get_mesh_data_size () const; - VkFormat get_mesh_data_position_format () const; + Anvil::Format get_mesh_data_position_format () const; uint32_t get_mesh_data_position_start_offset() const; uint32_t get_mesh_data_position_stride () const; uint32_t get_mesh_n_vertices () const; @@ -76,30 +74,30 @@ class App /* Private variables */ - std::weak_ptr m_device_ptr; - std::shared_ptr m_instance_ptr; - std::weak_ptr m_physical_device_ptr; - std::shared_ptr m_present_queue_ptr; - std::shared_ptr m_rendering_surface_ptr; - std::shared_ptr m_swapchain_ptr; - Anvil::Time m_time; - VkDeviceSize m_ub_data_size_per_swapchain_image; - std::shared_ptr m_window_ptr; - - std::shared_ptr m_command_buffers[N_SWAPCHAIN_IMAGES]; - std::shared_ptr m_data_buffer_ptr; - std::shared_ptr m_dsg_ptr; - std::shared_ptr m_fbos[N_SWAPCHAIN_IMAGES]; - std::shared_ptr m_fs_ptr; - std::shared_ptr m_mesh_data_buffer_ptr; - Anvil::GraphicsPipelineID m_pipeline_id; - std::shared_ptr m_renderpass_ptr; - std::shared_ptr m_vs_ptr; + Anvil::BaseDeviceUniquePtr m_device_ptr; + Anvil::InstanceUniquePtr m_instance_ptr; + const Anvil::PhysicalDevice* m_physical_device_ptr; + Anvil::Queue* m_present_queue_ptr; + Anvil::RenderingSurfaceUniquePtr m_rendering_surface_ptr; + Anvil::SwapchainUniquePtr m_swapchain_ptr; + Anvil::Time m_time; + VkDeviceSize m_ub_data_size_per_swapchain_image; + Anvil::WindowUniquePtr m_window_ptr; + + Anvil::PrimaryCommandBufferUniquePtr m_command_buffers[N_SWAPCHAIN_IMAGES]; + Anvil::BufferUniquePtr m_data_buffer_ptr; + Anvil::DescriptorSetGroupUniquePtr m_dsg_ptr; + Anvil::FramebufferUniquePtr m_fbos[N_SWAPCHAIN_IMAGES]; + std::unique_ptr m_fs_ptr; + Anvil::BufferUniquePtr m_mesh_data_buffer_ptr; + Anvil::PipelineID m_pipeline_id; + Anvil::RenderPassUniquePtr m_renderpass_ptr; + std::unique_ptr m_vs_ptr; uint32_t m_n_last_semaphore_used; const uint32_t m_n_swapchain_images; - std::vector > m_frame_signal_semaphores; - std::vector > m_frame_wait_semaphores; + std::vector m_frame_signal_semaphores; + std::vector m_frame_wait_semaphores; }; diff --git a/examples/PushConstants/src/app.cpp b/examples/PushConstants/src/app.cpp index d69bc033..4ee6eaea 100644 --- a/examples/PushConstants/src/app.cpp +++ b/examples/PushConstants/src/app.cpp @@ -26,13 +26,24 @@ /* Uncomment the #define below to enable validation */ // #define ENABLE_VALIDATION + #include #include #include "config.h" +#include "misc/buffer_create_info.h" +#include "misc/framebuffer_create_info.h" #include "misc/glsl_to_spirv.h" +#include "misc/graphics_pipeline_create_info.h" +#include "misc/image_create_info.h" +#include "misc/image_view_create_info.h" +#include "misc/instance_create_info.h" #include "misc/io.h" #include "misc/memory_allocator.h" #include "misc/object_tracker.h" +#include "misc/render_pass_create_info.h" +#include "misc/rendering_surface_create_info.h" +#include "misc/semaphore_create_info.h" +#include "misc/swapchain_create_info.h" #include "misc/time.h" #include "misc/window_factory.h" #include "wrappers/buffer.h" @@ -210,12 +221,20 @@ App::~App() void App::deinit() { - vkDeviceWaitIdle(m_device_ptr.lock()->get_device_vk() ); + auto gfx_pipeline_manager_ptr = m_device_ptr->get_graphics_pipeline_manager(); + + Anvil::Vulkan::vkDeviceWaitIdle(m_device_ptr->get_device_vk() ); + + if (m_pipeline_id != UINT32_MAX) + { + gfx_pipeline_manager_ptr->delete_pipeline(m_pipeline_id); + + m_pipeline_id = UINT32_MAX; + } m_frame_signal_semaphores.clear(); m_frame_wait_semaphores.clear(); - m_present_queue_ptr.reset(); m_rendering_surface_ptr.reset(); m_swapchain_ptr.reset(); @@ -234,10 +253,7 @@ void App::deinit() m_renderpass_ptr.reset(); m_vs_ptr.reset(); - m_device_ptr.lock()->destroy(); m_device_ptr.reset(); - - m_instance_ptr->destroy(); m_instance_ptr.reset(); m_window_ptr.reset(); @@ -245,42 +261,57 @@ void App::deinit() void App::draw_frame() { - std::shared_ptr curr_frame_signal_semaphore_ptr; - std::shared_ptr curr_frame_wait_semaphore_ptr; - std::shared_ptr device_locked_ptr = m_device_ptr.lock(); - static uint32_t n_frames_rendered = 0; - uint32_t n_swapchain_image; - std::shared_ptr present_queue_ptr = device_locked_ptr->get_universal_queue(0); - std::shared_ptr present_wait_semaphore_ptr; - const VkPipelineStageFlags wait_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; + Anvil::Semaphore* curr_frame_signal_semaphore_ptr = nullptr; + Anvil::Semaphore* curr_frame_wait_semaphore_ptr = nullptr; + static uint32_t n_frames_rendered = 0; + uint32_t n_swapchain_image; + Anvil::Queue* present_queue_ptr = m_device_ptr->get_universal_queue(0); + Anvil::Semaphore* present_wait_semaphore_ptr = nullptr; + const Anvil::PipelineStageFlags wait_stage_mask = Anvil::PipelineStageFlagBits::ALL_COMMANDS_BIT; /* Determine the signal + wait semaphores to use for drawing this frame */ m_n_last_semaphore_used = (m_n_last_semaphore_used + 1) % m_n_swapchain_images; - curr_frame_signal_semaphore_ptr = m_frame_signal_semaphores[m_n_last_semaphore_used]; - curr_frame_wait_semaphore_ptr = m_frame_wait_semaphores [m_n_last_semaphore_used]; + curr_frame_signal_semaphore_ptr = m_frame_signal_semaphores[m_n_last_semaphore_used].get(); + curr_frame_wait_semaphore_ptr = m_frame_wait_semaphores [m_n_last_semaphore_used].get(); present_wait_semaphore_ptr = curr_frame_signal_semaphore_ptr; /* Determine the semaphore which the swapchain image */ - n_swapchain_image = m_swapchain_ptr->acquire_image(curr_frame_wait_semaphore_ptr, - true); /* in_should_block */ + { + const auto acquire_result = m_swapchain_ptr->acquire_image(curr_frame_wait_semaphore_ptr, + &n_swapchain_image, + true); /* in_should_block */ + + ANVIL_REDUNDANT_VARIABLE_CONST(acquire_result); + anvil_assert (acquire_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } /* Submit work chunk and present */ update_data_ub_contents(n_swapchain_image); - present_queue_ptr->submit_command_buffer_with_signal_wait_semaphores(m_command_buffers[n_swapchain_image], - 1, /* n_semaphores_to_signal */ - &curr_frame_signal_semaphore_ptr, - 1, /* n_semaphores_to_wait_on */ - &curr_frame_wait_semaphore_ptr, - &wait_stage_mask, - false /* should_block */); + present_queue_ptr->submit( + Anvil::SubmitInfo::create(m_command_buffers[n_swapchain_image].get(), + 1, /* n_semaphores_to_signal */ + &curr_frame_signal_semaphore_ptr, + 1, /* n_semaphores_to_wait_on */ + &curr_frame_wait_semaphore_ptr, + &wait_stage_mask, + false /* should_block */) + ); + + { + Anvil::SwapchainOperationErrorCode present_result = Anvil::SwapchainOperationErrorCode::DEVICE_LOST; + + present_queue_ptr->present(m_swapchain_ptr.get(), + n_swapchain_image, + 1, /* n_wait_semaphores */ + &present_wait_semaphore_ptr, + &present_result); - present_queue_ptr->present(m_swapchain_ptr, - n_swapchain_image, - 1, /* n_wait_semaphores */ - &present_wait_semaphore_ptr); + ANVIL_REDUNDANT_VARIABLE(present_result); + anvil_assert (present_result == Anvil::SwapchainOperationErrorCode::SUCCESS); + } ++n_frames_rendered; @@ -294,19 +325,19 @@ void App::draw_frame() #endif } -void App::get_luminance_data(std::shared_ptr* out_result_ptr, - uint32_t* out_result_size_ptr) const +void App::get_luminance_data(std::unique_ptr* out_result_ptr, + uint32_t* out_result_size_ptr) const { - std::shared_ptr luminance_data_ptr; - float* luminance_data_raw_ptr; - uint32_t luminance_data_size; + std::unique_ptr luminance_data_ptr; + float* luminance_data_raw_ptr; + uint32_t luminance_data_size; static_assert(N_TRIANGLES == 16, "Shader and the app logic assumes N_TRIANGLES will always be 16"); luminance_data_size = sizeof(float) * N_TRIANGLES; - luminance_data_ptr.reset(new float[luminance_data_size / sizeof(float)], std::default_delete()); + luminance_data_ptr.reset(new float[luminance_data_size / sizeof(float)] ); luminance_data_raw_ptr = luminance_data_ptr.get(); @@ -317,7 +348,7 @@ void App::get_luminance_data(std::shared_ptr* out_result_ptr, luminance_data_raw_ptr[n_tri] = float(n_tri) / float(N_TRIANGLES - 1); } - *out_result_ptr = luminance_data_ptr; + *out_result_ptr = std::move(luminance_data_ptr); *out_result_size_ptr = luminance_data_size; } @@ -326,9 +357,9 @@ const unsigned char* App::get_mesh_data() const return reinterpret_cast(g_mesh_data); } -VkFormat App::get_mesh_data_color_format() const +Anvil::Format App::get_mesh_data_color_format() const { - return VK_FORMAT_R32G32B32_SFLOAT; + return Anvil::Format::R32G32B32_SFLOAT; } uint32_t App::get_mesh_data_color_start_offset() const @@ -341,9 +372,9 @@ uint32_t App::get_mesh_data_color_stride() const return g_mesh_data_color_stride; } -VkFormat App::get_mesh_data_position_format() const +Anvil::Format App::get_mesh_data_position_format() const { - return VK_FORMAT_R32G32B32A32_SFLOAT; + return Anvil::Format::R32G32B32A32_SFLOAT; } uint32_t App::get_mesh_data_position_start_offset() const @@ -390,39 +421,48 @@ void App::init_buffers() sizeof(float) * N_TRIANGLES * 4 + /* position (vec2) + rotation (vec2) */ sizeof(float) * N_TRIANGLES + /* luminance */ sizeof(float) * N_TRIANGLES; /* size */ - const auto ub_data_alignment_requirement = m_device_ptr.lock()->get_physical_device_properties().limits.minUniformBufferOffsetAlignment; + const auto ub_data_alignment_requirement = m_device_ptr->get_physical_device_properties().core_vk1_0_properties_ptr->limits.min_uniform_buffer_offset_alignment; const auto ub_data_size_total = N_SWAPCHAIN_IMAGES * (Anvil::Utils::round_up(ub_data_size_per_swapchain_image, ub_data_alignment_requirement) ); m_ub_data_size_per_swapchain_image = ub_data_size_total / N_SWAPCHAIN_IMAGES; /* Use a memory allocator to re-use memory blocks wherever possible */ - std::shared_ptr allocator_ptr = Anvil::MemoryAllocator::create_oneshot(m_device_ptr); + auto allocator_ptr = Anvil::MemoryAllocator::create_oneshot(m_device_ptr.get() ); /* Set up a buffer to hold uniform data */ - m_data_buffer_ptr = Anvil::Buffer::create_nonsparse(m_device_ptr, - ub_data_size_total, - Anvil::QUEUE_FAMILY_COMPUTE_BIT | Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_no_alloc(m_device_ptr.get(), + ub_data_size_total, + Anvil::QueueFamilyFlagBits::COMPUTE_BIT | Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::UNIFORM_BUFFER_BIT); + + m_data_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } m_data_buffer_ptr->set_name("Data buffer"); - allocator_ptr->add_buffer(m_data_buffer_ptr, - 0); /* in_required_memory_features */ + allocator_ptr->add_buffer(m_data_buffer_ptr.get(), + Anvil::MemoryFeatureFlagBits::NONE); /* in_required_memory_features */ /* Set up a buffer to hold mesh data */ - m_mesh_data_buffer_ptr = Anvil::Buffer::create_nonsparse( - m_device_ptr, - get_mesh_data_size(), - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_VERTEX_BUFFER_BIT); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_no_alloc(m_device_ptr.get(), + get_mesh_data_size(), + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::VERTEX_BUFFER_BIT); + + m_mesh_data_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } - m_mesh_data_buffer_ptr->set_name("Mesh vertexdata buffer"); + m_mesh_data_buffer_ptr->set_name("Mesh vertex data buffer"); - allocator_ptr->add_buffer(m_mesh_data_buffer_ptr, - 0); /* in_required_memory_features */ + allocator_ptr->add_buffer(m_mesh_data_buffer_ptr.get(), + Anvil::MemoryFeatureFlagBits::NONE); /* in_required_memory_features */ /* Allocate memory blocks and copy data where applicable */ m_mesh_data_buffer_ptr->write(0, /* start_offset */ @@ -432,18 +472,17 @@ void App::init_buffers() void App::init_command_buffers() { - std::shared_ptr device_locked_ptr (m_device_ptr); - std::shared_ptr gfx_pipeline_manager_ptr(device_locked_ptr->get_graphics_pipeline_manager() ); - VkImageSubresourceRange image_subresource_range; - std::shared_ptr luminance_data_ptr; - uint32_t luminance_data_size; - std::shared_ptr universal_queue_ptr (device_locked_ptr->get_universal_queue(0) ); + auto gfx_pipeline_manager_ptr(m_device_ptr->get_graphics_pipeline_manager() ); + Anvil::ImageSubresourceRange image_subresource_range; + std::unique_ptr luminance_data_ptr; + uint32_t luminance_data_size; + Anvil::Queue* universal_queue_ptr (m_device_ptr->get_universal_queue(0) ); - image_subresource_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - image_subresource_range.baseArrayLayer = 0; - image_subresource_range.baseMipLevel = 0; - image_subresource_range.layerCount = 1; - image_subresource_range.levelCount = 1; + image_subresource_range.aspect_mask = Anvil::ImageAspectFlagBits::COLOR_BIT; + image_subresource_range.base_array_layer = 0; + image_subresource_range.base_mip_level = 0; + image_subresource_range.layer_count = 1; + image_subresource_range.level_count = 1; get_luminance_data(&luminance_data_ptr, &luminance_data_size); @@ -452,9 +491,9 @@ void App::init_command_buffers() n_command_buffer < N_SWAPCHAIN_IMAGES; ++n_command_buffer) { - std::shared_ptr cmd_buffer_ptr; + Anvil::PrimaryCommandBufferUniquePtr cmd_buffer_ptr; - cmd_buffer_ptr = device_locked_ptr->get_command_pool(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL)->alloc_primary_level_command_buffer(); + cmd_buffer_ptr = m_device_ptr->get_command_pool_for_queue_family_index(m_device_ptr->get_universal_queue(0)->get_queue_family_index() )->alloc_primary_level_command_buffer(); /* Start recording commands */ cmd_buffer_ptr->start_recording(false, /* one_time_submit */ @@ -462,39 +501,38 @@ void App::init_command_buffers() /* Switch the swap-chain image to the color_attachment_optimal image layout */ { - Anvil::ImageBarrier image_barrier(0, /* source_access_mask */ - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, /* destination_access_mask */ - false, - VK_IMAGE_LAYOUT_UNDEFINED, /* old_image_layout */ - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, /* new_image_layout */ + Anvil::ImageBarrier image_barrier(Anvil::AccessFlagBits::NONE, /* source_access_mask */ + Anvil::AccessFlagBits::COLOR_ATTACHMENT_WRITE_BIT, /* destination_access_mask */ + Anvil::ImageLayout::UNDEFINED, /* old_image_layout */ + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, /* new_image_layout */ universal_queue_ptr->get_queue_family_index(), universal_queue_ptr->get_queue_family_index(), m_swapchain_ptr->get_image(n_command_buffer), image_subresource_range); - cmd_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, /* src_stage_mask */ - VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,/* dst_stage_mask */ - VK_FALSE, /* in_by_region */ - 0, /* in_memory_barrier_count */ - nullptr, /* in_memory_barrier_ptrs */ - 0, /* in_buffer_memory_barrier_count */ - nullptr, /* in_buffer_memory_barrier_ptrs */ - 1, /* in_image_memory_barrier_count */ + cmd_buffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::TOP_OF_PIPE_BIT, /* src_stage_mask */ + Anvil::PipelineStageFlagBits::COLOR_ATTACHMENT_OUTPUT_BIT,/* dst_stage_mask */ + Anvil::DependencyFlagBits::NONE, + 0, /* in_memory_barrier_count */ + nullptr, /* in_memory_barrier_ptrs */ + 0, /* in_buffer_memory_barrier_count */ + nullptr, /* in_buffer_memory_barrier_ptrs */ + 1, /* in_image_memory_barrier_count */ &image_barrier); } /* Make sure CPU-written data is flushed before we start rendering */ - Anvil::BufferBarrier buffer_barrier(VK_ACCESS_HOST_WRITE_BIT, /* in_source_access_mask */ - VK_ACCESS_UNIFORM_READ_BIT, /* in_destination_access_mask */ - universal_queue_ptr->get_queue_family_index(), /* in_src_queue_family_index */ - universal_queue_ptr->get_queue_family_index(), /* in_dst_queue_family_index */ - m_data_buffer_ptr, - m_ub_data_size_per_swapchain_image * n_command_buffer, /* in_offset */ - m_ub_data_size_per_swapchain_image); - - cmd_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_HOST_BIT, - VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, - VK_FALSE, /* in_by_region */ + Anvil::BufferBarrier buffer_barrier(Anvil::AccessFlagBits::HOST_WRITE_BIT, /* in_source_access_mask */ + Anvil::AccessFlagBits::UNIFORM_READ_BIT, /* in_destination_access_mask */ + universal_queue_ptr->get_queue_family_index(), /* in_src_queue_family_index */ + universal_queue_ptr->get_queue_family_index(), /* in_dst_queue_family_index */ + m_data_buffer_ptr.get(), + m_ub_data_size_per_swapchain_image * n_command_buffer, /* in_offset */ + m_ub_data_size_per_swapchain_image); + + cmd_buffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::HOST_BIT, + Anvil::PipelineStageFlagBits::VERTEX_SHADER_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barriers_ptr */ 1, /* in_buffer_memory_barrier_count */ @@ -519,26 +557,27 @@ void App::init_command_buffers() cmd_buffer_ptr->record_begin_render_pass(1, /* in_n_clear_values */ &attachment_clear_value, - m_fbos[n_command_buffer], + m_fbos[n_command_buffer].get(), render_area, - m_renderpass_ptr, - VK_SUBPASS_CONTENTS_INLINE); + m_renderpass_ptr.get(), + Anvil::SubpassContents::INLINE); { - const uint32_t data_ub_offset = static_cast(m_ub_data_size_per_swapchain_image * n_command_buffer); - std::shared_ptr ds_ptr = m_dsg_ptr->get_descriptor_set (0 /* n_set */); - const VkDeviceSize mesh_data_buffer_offset = 0; - std::shared_ptr pipeline_layout_ptr = gfx_pipeline_manager_ptr->get_graphics_pipeline_layout(m_pipeline_id); + const uint32_t data_ub_offset = static_cast(m_ub_data_size_per_swapchain_image * n_command_buffer); + auto ds_ptr = m_dsg_ptr->get_descriptor_set (0 /* n_set */); + const VkDeviceSize mesh_data_buffer_offset = 0; + auto mesh_data_buffer_raw_ptr = m_mesh_data_buffer_ptr.get(); + auto pipeline_layout_ptr = gfx_pipeline_manager_ptr->get_pipeline_layout(m_pipeline_id); - cmd_buffer_ptr->record_bind_pipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, + cmd_buffer_ptr->record_bind_pipeline(Anvil::PipelineBindPoint::GRAPHICS, m_pipeline_id); cmd_buffer_ptr->record_push_constants(pipeline_layout_ptr, - VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_VERTEX_BIT, + Anvil::ShaderStageFlagBits::FRAGMENT_BIT | Anvil::ShaderStageFlagBits::VERTEX_BIT, 0, /* in_offset */ luminance_data_size, luminance_data_ptr.get() ); - cmd_buffer_ptr->record_bind_descriptor_sets(VK_PIPELINE_BIND_POINT_GRAPHICS, + cmd_buffer_ptr->record_bind_descriptor_sets(Anvil::PipelineBindPoint::GRAPHICS, pipeline_layout_ptr, 0, /* firstSet */ 1, /* setCount */ @@ -546,10 +585,23 @@ void App::init_command_buffers() 1, /* dynamicOffsetCount */ &data_ub_offset); /* pDynamicOffsets */ - cmd_buffer_ptr->record_bind_vertex_buffers(0, /* startBinding */ - 1, /* bindingCount */ - &m_mesh_data_buffer_ptr, - &mesh_data_buffer_offset); + { + Anvil::Buffer* buffer_raw_ptrs[] = + { + mesh_data_buffer_raw_ptr, + mesh_data_buffer_raw_ptr + }; + const VkDeviceSize buffer_offsets[] = + { + mesh_data_buffer_offset, + mesh_data_buffer_offset + }; + + cmd_buffer_ptr->record_bind_vertex_buffers(0, /* start_binding */ + 2, /* binding_count */ + buffer_raw_ptrs, + buffer_offsets); + } cmd_buffer_ptr->record_draw(3, /* in_vertex_count */ N_TRIANGLES, /* in_instance_count */ @@ -561,30 +613,36 @@ void App::init_command_buffers() /* Close the recording process */ cmd_buffer_ptr->stop_recording(); - m_command_buffers[n_command_buffer] = cmd_buffer_ptr; + m_command_buffers[n_command_buffer] = std::move(cmd_buffer_ptr); } } void App::init_dsgs() { - m_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr, - false, /* releaseable_sets */ - 1 /* n_sets */); + { + auto dsg_create_info_ptrs = std::vector(1); + + dsg_create_info_ptrs[0] = Anvil::DescriptorSetCreateInfo::create(); + + dsg_create_info_ptrs[0]->add_binding(0, /* n_binding */ + Anvil::DescriptorType::UNIFORM_BUFFER_DYNAMIC, + 1, /* n_elements */ + Anvil::ShaderStageFlagBits::VERTEX_BIT); + + m_dsg_ptr = Anvil::DescriptorSetGroup::create(m_device_ptr.get(), + dsg_create_info_ptrs); + } - m_dsg_ptr->add_binding (0, /* n_set */ - 0, /* n_binding */ - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, - 1, /* n_elements */ - VK_SHADER_STAGE_VERTEX_BIT); m_dsg_ptr->set_binding_item(0, /* n_set */ 0, /* n_binding */ - Anvil::DescriptorSet::DynamicUniformBufferBindingElement(m_data_buffer_ptr, + Anvil::DescriptorSet::DynamicUniformBufferBindingElement(m_data_buffer_ptr.get(), 0, /* in_start_offset */ m_ub_data_size_per_swapchain_image) ); } void App::init_events() { + /* Stub */ } void App::init_framebuffers() @@ -596,124 +654,126 @@ void App::init_framebuffers() n_fbo < N_SWAPCHAIN_IMAGES; ++n_fbo) { - std::shared_ptr attachment_image_view_ptr; + Anvil::ImageView* attachment_image_view_ptr = nullptr; attachment_image_view_ptr = m_swapchain_ptr->get_image_view(n_fbo); /* Create the internal framebuffer object */ - m_fbos[n_fbo] = Anvil::Framebuffer::create( - m_device_ptr, - WINDOW_WIDTH, - WINDOW_HEIGHT, - 1 /* n_layers */); + { + auto create_info_ptr = Anvil::FramebufferCreateInfo::create(m_device_ptr.get(), + WINDOW_WIDTH, + WINDOW_HEIGHT, + 1 /* n_layers */); - m_fbos[n_fbo]->set_name_formatted("Framebuffer for swapchain image [%d]", - n_fbo); + result = create_info_ptr->add_attachment(attachment_image_view_ptr, + nullptr /* out_opt_attachment_id_ptr */); + anvil_assert(result); - result = m_fbos[n_fbo]->add_attachment(attachment_image_view_ptr, - nullptr /* out_opt_attachment_id_ptr */); + m_fbos[n_fbo] = Anvil::Framebuffer::create(std::move(create_info_ptr) ); + } - anvil_assert(result); + m_fbos[n_fbo]->set_name_formatted("Framebuffer for swapchain image [%d]", + n_fbo); } } void App::init_gfx_pipelines() { - std::shared_ptr device_locked_ptr (m_device_ptr); - std::shared_ptr gfx_pipeline_manager_ptr(device_locked_ptr->get_graphics_pipeline_manager() ); - bool result; + Anvil::GraphicsPipelineCreateInfoUniquePtr gfx_pipeline_create_info_ptr; + auto gfx_pipeline_manager_ptr(m_device_ptr->get_graphics_pipeline_manager() ); /* Create a renderpass for the pipeline */ Anvil::RenderPassAttachmentID render_pass_color_attachment_id; Anvil::SubPassID render_pass_subpass_id; - m_renderpass_ptr = Anvil::RenderPass::create(m_device_ptr, - m_swapchain_ptr); - - m_renderpass_ptr->set_name("Main renderpass"); + { + Anvil::RenderPassCreateInfoUniquePtr render_pass_create_info_ptr(new Anvil::RenderPassCreateInfo(m_device_ptr.get() ) ); - result = m_renderpass_ptr->add_color_attachment(m_swapchain_ptr->get_image_format(), - VK_SAMPLE_COUNT_1_BIT, - VK_ATTACHMENT_LOAD_OP_CLEAR, - VK_ATTACHMENT_STORE_OP_STORE, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + render_pass_create_info_ptr->add_color_attachment(m_swapchain_ptr->get_create_info_ptr()->get_format(), + Anvil::SampleCountFlagBits::_1_BIT, + Anvil::AttachmentLoadOp::CLEAR, + Anvil::AttachmentStoreOp::STORE, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, #ifdef ENABLE_OFFSCREEN_RENDERING - VK_IMAGE_LAYOUT_GENERAL, + Anvil::ImageLayout::GENERAL, #else - VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, + Anvil::ImageLayout::PRESENT_SRC_KHR, #endif - false, /* may_alias */ - &render_pass_color_attachment_id); - anvil_assert(result); - - - result = m_renderpass_ptr->add_subpass(*m_fs_ptr, - Anvil::ShaderModuleStageEntryPoint(), /* geometry_shader */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_control_shader */ - Anvil::ShaderModuleStageEntryPoint(), /* tess_evaluation_shader */ - *m_vs_ptr, - &render_pass_subpass_id); - anvil_assert(result); - - - result = m_renderpass_ptr->get_subpass_graphics_pipeline_id(render_pass_subpass_id, - &m_pipeline_id); - anvil_assert(result); - - - result = m_renderpass_ptr->add_subpass_color_attachment(render_pass_subpass_id, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - render_pass_color_attachment_id, - 0, /* location */ - nullptr); /* opt_attachment_resolve_id_ptr */ - anvil_assert(result); - - - result = gfx_pipeline_manager_ptr->set_pipeline_dsg (m_pipeline_id, - m_dsg_ptr); - result &= gfx_pipeline_manager_ptr->attach_push_constant_range_to_pipeline(m_pipeline_id, - 0, /* offset */ - sizeof(float) * 4 /* vec4 */ * 4 /* vec4 values */, - VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_VERTEX_BIT); - - anvil_assert(result); - - - gfx_pipeline_manager_ptr->set_rasterization_properties (m_pipeline_id, - VK_POLYGON_MODE_FILL, - VK_CULL_MODE_NONE, - VK_FRONT_FACE_COUNTER_CLOCKWISE, - 1.0f); /* line_width */ - gfx_pipeline_manager_ptr->set_color_blend_attachment_properties(m_pipeline_id, - 0, /* attachment_id */ - true, /* blending_enabled */ - VK_BLEND_OP_ADD, - VK_BLEND_OP_ADD, - VK_BLEND_FACTOR_SRC_ALPHA, - VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, - VK_BLEND_FACTOR_SRC_ALPHA, - VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, - VK_COLOR_COMPONENT_A_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_R_BIT); - - result = gfx_pipeline_manager_ptr->add_vertex_attribute(m_pipeline_id, - 0, /* location */ - get_mesh_data_position_format(), - get_mesh_data_position_start_offset(), - get_mesh_data_position_stride(), - VK_VERTEX_INPUT_RATE_VERTEX); - anvil_assert(result); - - result = gfx_pipeline_manager_ptr->add_vertex_attribute(m_pipeline_id, - 1, /* location */ - get_mesh_data_color_format(), - get_mesh_data_color_start_offset(), - get_mesh_data_color_stride(), - VK_VERTEX_INPUT_RATE_VERTEX); - anvil_assert(result); + false, /* may_alias */ + &render_pass_color_attachment_id); + + + render_pass_create_info_ptr->add_subpass (&render_pass_subpass_id); + render_pass_create_info_ptr->add_subpass_color_attachment(render_pass_subpass_id, + Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, + render_pass_color_attachment_id, + 0, /* location */ + nullptr); /* opt_attachment_resolve_id_ptr */ + + + m_renderpass_ptr = Anvil::RenderPass::create(std::move(render_pass_create_info_ptr), + m_swapchain_ptr.get() ); + } + + m_renderpass_ptr->set_name("Main renderpass"); + + gfx_pipeline_create_info_ptr = Anvil::GraphicsPipelineCreateInfo::create(Anvil::PipelineCreateFlagBits::NONE, + m_renderpass_ptr.get(), + render_pass_subpass_id, + *m_fs_ptr, + Anvil::ShaderModuleStageEntryPoint(), /* in_geometry_shader */ + Anvil::ShaderModuleStageEntryPoint(), /* in_tess_control_shader */ + Anvil::ShaderModuleStageEntryPoint(), /* in_tess_evaluation_shader */ + *m_vs_ptr); + + + + gfx_pipeline_create_info_ptr->set_descriptor_set_create_info (m_dsg_ptr->get_descriptor_set_create_info() ); + gfx_pipeline_create_info_ptr->attach_push_constant_range (0, /* in_offset */ + sizeof(float) * 4 /* vec4 */ * 4 /* vec4 values */, + Anvil::ShaderStageFlagBits::FRAGMENT_BIT | Anvil::ShaderStageFlagBits::VERTEX_BIT); + gfx_pipeline_create_info_ptr->set_rasterization_properties (Anvil::PolygonMode::FILL, + Anvil::CullModeFlagBits::NONE, + Anvil::FrontFace::COUNTER_CLOCKWISE, + 1.0f); /* in_line_width */ + gfx_pipeline_create_info_ptr->set_color_blend_attachment_properties(0, /* in_attachment_id */ + true, /* in_blending_enabled */ + Anvil::BlendOp::ADD, + Anvil::BlendOp::ADD, + Anvil::BlendFactor::SRC_ALPHA, + Anvil::BlendFactor::ONE_MINUS_SRC_ALPHA, + Anvil::BlendFactor::SRC_ALPHA, + Anvil::BlendFactor::ONE_MINUS_SRC_ALPHA, + Anvil::ColorComponentFlagBits::A_BIT | Anvil::ColorComponentFlagBits::B_BIT | Anvil::ColorComponentFlagBits::G_BIT | Anvil::ColorComponentFlagBits::R_BIT); + + { + const auto color_attribute = Anvil::VertexInputAttribute(1, /* location */ + get_mesh_data_color_format (), + get_mesh_data_color_start_offset() ); + const auto position_attribute = Anvil::VertexInputAttribute(0, /* in_location */ + get_mesh_data_position_format (), + get_mesh_data_position_start_offset() ); + + gfx_pipeline_create_info_ptr->add_vertex_binding(0, /* in_binding */ + Anvil::VertexInputRate::VERTEX, + get_mesh_data_position_stride(), + 1, /* in_n_attributes */ + &position_attribute); + + gfx_pipeline_create_info_ptr->add_vertex_binding(1, /* in_binding */ + Anvil::VertexInputRate::VERTEX, + get_mesh_data_color_stride(), + 1, /* in_n_attributes */ + &color_attribute); + } + + gfx_pipeline_manager_ptr->add_pipeline(std::move(gfx_pipeline_create_info_ptr), + &m_pipeline_id); } void App::init_images() { + /* Stub */ } void App::init_semaphores() @@ -722,34 +782,46 @@ void App::init_semaphores() n_semaphore < m_n_swapchain_images; ++n_semaphore) { - std::shared_ptr new_signal_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); - std::shared_ptr new_wait_semaphore_ptr = Anvil::Semaphore::create(m_device_ptr); + Anvil::SemaphoreUniquePtr new_signal_semaphore_ptr; + Anvil::SemaphoreUniquePtr new_wait_semaphore_ptr; + + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); + + new_signal_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + } + + { + auto create_info_ptr = Anvil::SemaphoreCreateInfo::create(m_device_ptr.get() ); + + new_wait_semaphore_ptr = Anvil::Semaphore::create(std::move(create_info_ptr) ); + } new_signal_semaphore_ptr->set_name_formatted("Signal semaphore [%d]", n_semaphore); new_wait_semaphore_ptr->set_name_formatted ("Wait semaphore [%d]", n_semaphore); - m_frame_signal_semaphores.push_back(new_signal_semaphore_ptr); - m_frame_wait_semaphores.push_back (new_wait_semaphore_ptr); + m_frame_signal_semaphores.push_back(std::move(new_signal_semaphore_ptr) ); + m_frame_wait_semaphores.push_back (std::move(new_wait_semaphore_ptr) ); } } void App::init_shaders() { - std::shared_ptr fragment_shader_ptr; - std::shared_ptr fragment_shader_module_ptr; - std::shared_ptr vertex_shader_ptr; - std::shared_ptr vertex_shader_module_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr fragment_shader_ptr; + Anvil::ShaderModuleUniquePtr fragment_shader_module_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr vertex_shader_ptr; + Anvil::ShaderModuleUniquePtr vertex_shader_module_ptr; - fragment_shader_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + fragment_shader_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_frag, - Anvil::SHADER_STAGE_FRAGMENT); - vertex_shader_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr, + Anvil::ShaderStage::FRAGMENT); + vertex_shader_ptr = Anvil::GLSLShaderToSPIRVGenerator::create(m_device_ptr.get(), Anvil::GLSLShaderToSPIRVGenerator::MODE_USE_SPECIFIED_SOURCE, g_glsl_vert, - Anvil::SHADER_STAGE_VERTEX); + Anvil::ShaderStage::VERTEX); fragment_shader_ptr->add_definition_value_pair("N_TRIANGLES", N_TRIANGLES); @@ -757,59 +829,62 @@ void App::init_shaders() N_TRIANGLES); - fragment_shader_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - fragment_shader_ptr); - vertex_shader_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr, - vertex_shader_ptr); + fragment_shader_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get (), + fragment_shader_ptr.get() ); + vertex_shader_module_ptr = Anvil::ShaderModule::create_from_spirv_generator(m_device_ptr.get (), + vertex_shader_ptr.get () ); fragment_shader_module_ptr->set_name("Fragment shader module"); vertex_shader_module_ptr->set_name ("Vertex shader module"); m_fs_ptr.reset( - new Anvil::ShaderModuleStageEntryPoint( - "main", - fragment_shader_module_ptr, - Anvil::SHADER_STAGE_FRAGMENT) + new Anvil::ShaderModuleStageEntryPoint("main", + std::move(fragment_shader_module_ptr), + Anvil::ShaderStage::FRAGMENT) ); m_vs_ptr.reset( - new Anvil::ShaderModuleStageEntryPoint( - "main", - vertex_shader_module_ptr, - Anvil::SHADER_STAGE_VERTEX) + new Anvil::ShaderModuleStageEntryPoint("main", + std::move(vertex_shader_module_ptr), + Anvil::ShaderStage::VERTEX) ); } void App::init_swapchain() { - std::shared_ptr device_locked_ptr(m_device_ptr); + Anvil::SGPUDevice* device_ptr(reinterpret_cast(m_device_ptr.get() )); + + { + auto create_info_ptr = Anvil::RenderingSurfaceCreateInfo::create(m_instance_ptr.get(), + m_device_ptr.get (), + m_window_ptr.get () ); - m_rendering_surface_ptr = Anvil::RenderingSurface::create(m_instance_ptr, - m_device_ptr, - m_window_ptr); + m_rendering_surface_ptr = Anvil::RenderingSurface::create(std::move(create_info_ptr) ); + } m_rendering_surface_ptr->set_name("Main rendering surface"); - m_swapchain_ptr = device_locked_ptr->create_swapchain(m_rendering_surface_ptr, - m_window_ptr, - VK_FORMAT_B8G8R8A8_UNORM, - VK_PRESENT_MODE_FIFO_KHR, - VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, - m_n_swapchain_images); + m_swapchain_ptr = device_ptr->create_swapchain(m_rendering_surface_ptr.get(), + m_window_ptr.get (), + Anvil::Format::B8G8R8A8_UNORM, + Anvil::ColorSpaceKHR::SRGB_NONLINEAR_KHR, + Anvil::PresentModeKHR::FIFO_KHR, + Anvil::ImageUsageFlagBits::COLOR_ATTACHMENT_BIT, + m_n_swapchain_images); m_swapchain_ptr->set_name("Main swapchain"); /* Cache the queue we are going to use for presentation */ const std::vector* present_queue_fams_ptr = nullptr; - if (!m_rendering_surface_ptr->get_queue_families_with_present_support(device_locked_ptr->get_physical_device(), + if (!m_rendering_surface_ptr->get_queue_families_with_present_support(device_ptr->get_physical_device(), &present_queue_fams_ptr) ) { anvil_assert_fail(); } - m_present_queue_ptr = device_locked_ptr->get_queue(present_queue_fams_ptr->at(0), - 0); /* in_n_queue */ + m_present_queue_ptr = device_ptr->get_queue_for_queue_family_index(present_queue_fams_ptr->at(0), + 0); /* in_n_queue */ } void App::init_window() @@ -838,43 +913,46 @@ void App::init_window() void App::init_vulkan() { /* Create a Vulkan instance */ - m_instance_ptr = Anvil::Instance::create(APP_NAME, /* app_name */ - APP_NAME, /* engine_name */ + { + auto create_info_ptr = Anvil::InstanceCreateInfo::create(APP_NAME, /* in_app_name */ + APP_NAME, /* in_engine_name */ #ifdef ENABLE_VALIDATION - std::bind(&App::on_validation_callback, - this, - std::placeholders::_1, - std::placeholders::_2, - std::placeholders::_3, - std::placeholders::_4) ); - + std::bind(&App::on_validation_callback, + this, + std::placeholders::_1, + std::placeholders::_2), #else - nullptr); + Anvil::DebugCallbackFunction(), #endif + false); /* in_mt_safe */ + + m_instance_ptr = Anvil::Instance::create(std::move(create_info_ptr) ); + } m_physical_device_ptr = m_instance_ptr->get_physical_device(0); /* Create a Vulkan device */ - m_device_ptr = Anvil::SGPUDevice::create(m_physical_device_ptr, - Anvil::DeviceExtensionConfiguration(), - std::vector(), /* layers */ - false, /* transient_command_buffer_allocs_only */ - false); /* support_resettable_command_buffers */ + { + auto create_info_ptr = Anvil::DeviceCreateInfo::create_sgpu(m_physical_device_ptr, + true, /* in_enable_shader_module_cache */ + Anvil::DeviceExtensionConfiguration(), + std::vector(), /* in_layers */ + Anvil::CommandPoolCreateFlagBits::NONE, + false); /* in_mt_safe */ + + m_device_ptr = Anvil::SGPUDevice::create(std::move(create_info_ptr) ); + } } -VkBool32 App::on_validation_callback(VkDebugReportFlagsEXT message_flags, - VkDebugReportObjectTypeEXT object_type, - const char* layer_prefix, - const char* message) +void App::on_validation_callback(Anvil::DebugMessageSeverityFlags in_severity, + const char* in_message_ptr) { - if ((message_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0) + if ((in_severity & Anvil::DebugMessageSeverityFlagBits::ERROR_BIT) != 0) { fprintf(stderr, "[!] %s\n", - message); + in_message_ptr); } - - return false; } void App::run() @@ -915,15 +993,15 @@ void App::update_data_ub_contents(uint32_t in_n_swapchain_image) m_data_buffer_ptr->write(in_n_swapchain_image * m_ub_data_size_per_swapchain_image, /* start_offset */ sizeof(data), &data, - m_device_ptr.lock()->get_universal_queue(0) ); + m_device_ptr->get_universal_queue(0) ); ++n_frames_rendered; } -int main() +int main(int argc, char *argv[]) { - std::shared_ptr app_ptr(new App() ); + std::unique_ptr app_ptr(new App() ); app_ptr->init(); app_ptr->run(); diff --git a/include/config.h b/include/config.h new file mode 100644 index 00000000..ace21cc7 --- /dev/null +++ b/include/config.h @@ -0,0 +1,23 @@ +/* Under no circumstances include Vulkan func prototypes. For a variety of reasons, + * these are NOT exposed by Anvil. + */ +#if !defined(ANVIL_VULKAN_CPP) + #define VK_NO_PROTOTYPES +#endif + +/* #undef ANVIL_VULKAN_DYNAMIC_DLL */ + +/* Defined if Anvil links statically with a Vulkan library */ +#define ANVIL_LINK_STATICALLY_WITH_VULKAN_LIB + +/* Defined if glslangvalidator is to be statically linked with Anvil */ +#define ANVIL_LINK_WITH_GLSLANG + +/* Defined if Windows window system support is to be included in Anvil */ +#define ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT + +/* Defined if XCB window system support is to be included in Anvil */ +#define ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT + +/* Defined if Wayland window system support is to be included in Anvil */ +#define ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT diff --git a/include/config.h.in b/include/config.h.in index 5c5aa1bd..60b27aaa 100644 --- a/include/config.h.in +++ b/include/config.h.in @@ -1,3 +1,15 @@ +/* Under no circumstances include Vulkan func prototypes. For a variety of reasons, + * these are NOT exposed by Anvil. + */ +#if !defined(ANVIL_VULKAN_CPP) + #define VK_NO_PROTOTYPES +#endif + +#cmakedefine ANVIL_VULKAN_DYNAMIC_DLL "@ANVIL_VULKAN_DYNAMIC_DLL@" + +/* Defined if Anvil links statically with a Vulkan library */ +#cmakedefine ANVIL_LINK_STATICALLY_WITH_VULKAN_LIB + /* Defined if glslangvalidator is to be statically linked with Anvil */ #cmakedefine ANVIL_LINK_WITH_GLSLANG @@ -6,3 +18,9 @@ /* Defined if XCB window system support is to be included in Anvil */ #cmakedefine ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT + +/* Defined if Wayland window system support is to be included in Anvil */ +#cmakedefine ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT + +/* Defined if headless window system support is to be included in Anvil */ +#cmakedefine ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT \ No newline at end of file diff --git a/include/misc/base_pipeline_create_info.h b/include/misc/base_pipeline_create_info.h new file mode 100644 index 00000000..c7360a98 --- /dev/null +++ b/include/misc/base_pipeline_create_info.h @@ -0,0 +1,158 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef ANVIL_BASE_PIPELINE_CREATE_INFO_H +#define ANVIL_BASE_PIPELINE_CREATE_INFO_H + +#include "misc/mt_safety.h" +#include "misc/types.h" + +namespace Anvil +{ + class BasePipelineCreateInfo + { + public: + /* Public functions */ + + virtual ~BasePipelineCreateInfo(); + + bool allows_derivatives() const + { + return (m_create_flags & Anvil::PipelineCreateFlagBits::ALLOW_DERIVATIVES_BIT) != 0; + } + + bool attach_push_constant_range(uint32_t in_offset, + uint32_t in_size, + Anvil::ShaderStageFlags in_stages); + + /* Returns != UINT32_MAX if this pipeline is a derivative pipeline, or UINT32_MAX otherwise. */ + Anvil::PipelineID get_base_pipeline_id() const + { + return m_base_pipeline_id; + } + + const Anvil::PipelineCreateFlags& get_create_flags() const + { + return m_create_flags; + } + + const std::vector* get_ds_create_info_items() const + { + return &m_ds_create_info_items; + } + + const char* get_name() const + { + return m_name.c_str(); + } + + const PushConstantRanges& get_push_constant_ranges() const + { + return m_push_constant_ranges; + } + + /** Returns shader stage information for each stage, as specified at creation time */ + bool get_shader_stage_properties(Anvil::ShaderStage in_shader_stage, + const ShaderModuleStageEntryPoint** out_opt_result_ptr_ptr) const; + + bool get_specialization_constants(Anvil::ShaderStage in_shader_stage, + const SpecializationConstants** out_opt_spec_constants_ptr, + const unsigned char** out_opt_spec_constants_data_buffer_ptr, + uint32_t *out_opt_spec_constants_data_buffer_size=nullptr) const; + + bool has_optimizations_disabled() const + { + return (m_create_flags & Anvil::PipelineCreateFlagBits::DISABLE_OPTIMIZATION_BIT) != 0; + } + + bool is_proxy() const + { + return m_is_proxy; + } + + void set_descriptor_set_create_info(const std::vector* in_ds_create_info_vec_ptr); + + void set_name(const std::string& in_name) + { + m_name = in_name; + } + + protected: + /* Protected functions */ + + BasePipelineCreateInfo(); + + /** Adds a new specialization constant. + * + * @param in_shader_stage Shader stage, with which the new specialization constant should be + * associated. + * @param in_constant_id ID of the specialization constant to assign data for. + * @param in_n_data_bytes Number of bytes under @param in_data_ptr to assign to the specialization constant. + * @param in_data_ptr A buffer holding the data to be assigned to the constant. Must hold at least + * @param in_n_data_bytes bytes that will be read by the function. + * + * @return true if successful, false otherwise. + **/ + virtual bool add_specialization_constant(Anvil::ShaderStage in_shader_stage, + uint32_t in_constant_id, + uint32_t in_n_data_bytes, + const void* in_data_ptr); + + void init (const Anvil::PipelineCreateFlags& in_create_flags, + uint32_t in_n_shader_module_stage_entrypoints, + const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs, + const Anvil::PipelineID* in_opt_base_pipeline_id_ptr = nullptr, + const std::vector* in_opt_ds_create_info_vec_ptr = nullptr); + void init_proxy(); + + void copy_state_from(const BasePipelineCreateInfo* in_src_pipeline_create_info_ptr); + + + /* Protected type definitions */ + typedef std::map ShaderStageToSpecializationConstantsMap; + + /* Protected variables */ + Anvil::PipelineID m_base_pipeline_id; + + std::vector m_ds_create_info_items; + std::string m_name; + PushConstantRanges m_push_constant_ranges; + std::vector m_specialization_constants_data_buffer; + ShaderStageToSpecializationConstantsMap m_specialization_constants_map; + + Anvil::PipelineCreateFlags m_create_flags; + bool m_is_proxy; + std::map m_shader_stages; + + private: + /* Private type definitions */ + + /* Private functions */ + void init_shader_modules(uint32_t in_n_shader_module_stage_entrypoints, + const Anvil::ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs); + + /* Private variables */ + }; + +}; + +#endif /* ANVIL_BASE_PIPELINE_CREATE_INFO_H */ + diff --git a/include/misc/base_pipeline_manager.h b/include/misc/base_pipeline_manager.h index 96b87f35..b6fdb145 100644 --- a/include/misc/base_pipeline_manager.h +++ b/include/misc/base_pipeline_manager.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -37,445 +37,168 @@ #ifndef MISC_BASE_PIPELINE_MANAGER_H #define MISC_BASE_PIPELINE_MANAGER_H +#include "misc/base_pipeline_create_info.h" +#include "misc/callbacks.h" #include "misc/debug.h" +#include "misc/mt_safety.h" #include "misc/types.h" #include #include namespace Anvil { - - class BasePipelineManager + enum BasePipelineManagerCallbackID { - public: - /* Public type definitions */ + /* Call-back issued whenever a new pipeline is created. + * + * NOTE: Only base pipeline-level properties are available for querying at the time of the call-back! + * + * callback_arg: OnNewPipelineCreatedCallbackData instance. + */ + BASE_PIPELINE_MANAGER_CALLBACK_ID_ON_NEW_PIPELINE_CREATED, - typedef uint32_t ShaderIndex; + /* Always last */ + BASE_PIPELINE_MANAGER_CALLBACK_ID_COUNT + }; + class BasePipelineManager : public CallbacksSupportProvider, + public MTSafetySupportProvider + { + public: /* Public functions */ - /** Constructor. Initializes base layer of a pipeline manager. - * - * @param in_device_ptr Device to use. - * @param in_use_pipeline_cache true if a pipeline cache should be used to spawn new pipeline objects. - * What pipeline cache ends up being used depends on @param in_pipeline_cache_to_reuse_ptr - - * if a nullptr object is passed via this argument, a new pipeline cache instance will - * be created, and later released by the destructor. If a non-nullptr object is passed, - * it will be used instead. - * @param in_pipeline_cache_to_reuse_ptr Please see above. - **/ - explicit BasePipelineManager(std::weak_ptr in_device_ptr, - bool in_use_pipeline_cache = false, - std::shared_ptr in_pipeline_cache_to_reuse_ptr = std::shared_ptr() ); - /** Destructor. Releases internally managed objects. */ virtual ~BasePipelineManager(); - /** Appends a new push constant range description to a pipeline object. The actual data needs to be specified - * when creating a command buffer. - * - * The function marks the specified pipeline as dirty, meaning it will be re-baked at the next get_() call time. - * - * @param in_pipeline_id ID of the pipeline to append the push constant range to. Must not describe a proxy - * pipeline. - * @param in_offset Start offset, at which the push constant data will start. - * @param in_size Size of the push constant data. - * @param in_stages Pipeline stages the push constant data should be accessible to. - * - * @return true if successful, false otherwise. - **/ - bool attach_push_constant_range_to_pipeline(PipelineID in_pipeline_id, - uint32_t in_offset, - uint32_t in_size, - VkShaderStageFlags in_stages); + /** TODO */ + bool add_pipeline(Anvil::BasePipelineCreateInfoUniquePtr in_pipeline_create_info_ptr, + PipelineID* out_pipeline_id_ptr); + /** TODO */ virtual bool bake() = 0; - /** Assigns DSes encapsulated in the user-specified DSG to the pipeline. This is later used to obtain - * a pipeline layout, which is then specified at pipeline creation time. - * - * This function marks the pipeline as dirty, meaning it will be re-baked at the next get_*() call. + /** Deletes an existing pipeline. * - * @param in_pipeline_id ID of the compute pipeline to attach the descriptor sets defined by the group to. - * @param in_dsg_ptr Descriptor set group instance, whose descriptor sets should be appended. + * @param in_pipeline_id ID of a pipeline to delete. * * @return true if successful, false otherwise. **/ - bool set_pipeline_dsg(PipelineID in_pipeline_id, - std::shared_ptr in_dsg_ptr); + bool delete_pipeline(PipelineID in_pipeline_id); - /** Retrieves general properties of a pipeline. + /** Retrieves a VkPipeline instance associated with the specified pipeline ID. * - * @param out_opt_has_optimizations_disabled_ptr If not NULL, deref will be set to true if the pipeline - * has been created with enabled optimizations, or false - * if optimizations have been explicitly disabled. - * @param out_opt_allows_derivatives_ptr If not NULL, deref will be set to true if the pipeline - * has been created with support for derivative pipelines, - * or false otherwise. - * @param out_opt_is_a_derivative_ptr If not NULL, deref will be set to true if the specified - * pipeline is a derivative pipeline, or to false otherwise. + * The function will bake a pipeline object (and, possibly, a pipeline layout object, too) if + * the specified pipeline is marked as dirty. * - * @return true if successful, false otherwise. - */ - bool get_general_pipeline_properties(PipelineID in_pipeline_id, - bool* out_opt_has_optimizations_disabled_ptr, - bool* out_opt_allows_derivatives_ptr, - bool* out_opt_is_a_derivative_ptr) const; + * @param in_pipeline_id ID of the pipeline to return the raw Vulkan pipeline handle for. Must not + * describe a proxy pipeline. + * + * @return VkPipeline handle or nullptr if the function failed. + **/ + VkPipeline get_pipeline(PipelineID in_pipeline_id); - /** Returns the number of created pipelines */ - uint32_t get_n_pipelines() const - { - return static_cast(m_pipelines.size() ); - } + const Anvil::BasePipelineCreateInfo* get_pipeline_create_info(PipelineID in_pipeline_id) const; - /** Returns ID of a pipeline at user-specified index. + /** Retrieves a PipelineLayout instance associated with the specified pipeline ID. * - * @param in_n_pipeline Index of the pipeline to return ID of. - * @param out_pipeline_id_ptr TODO + * The function will bake a pipeline object (and, possibly, a pipeline layout object, too) if + * the specified pipeline is marked as dirty. * - * @return true if successful, false otherwise. + * @param in_pipeline_id ID of the pipeline to return the wrapper instance for. + * Must not describe a proxy pipeline. + * + * @return Ptr to a PipelineLayout instance or nullptr if the function failed. **/ - bool get_pipeline_id_at_index(uint32_t in_n_pipeline, - PipelineID* out_pipeline_id_ptr) const; - - /** Returns shader stage information for each stage, as specified at creation time */ - bool get_shader_stage_properties(PipelineID in_pipeline_id, - Anvil::ShaderStage in_shader_stage, - ShaderModuleStageEntryPoint* out_opt_result_ptr) const; + Anvil::PipelineLayout* get_pipeline_layout(PipelineID in_pipeline_id); + + /** Returns various post-compile information about compute and graphics pipeline shaders like + * compiled binary or optionally shader disassembly. + * Requires support for VK_AMD_shader_info extension. + * + * @param in_pipeline_id ID of the pipeline. + * @param in_shader_stage The shader stage to collect post-compile information for. + * @param in_info_type the type of information to collect - compiled binary code or shader disassembly. + * @param out_data_ptr pointer to a vector of bytes where the post-compile information + * is going to be stored. Pointer must be null. If the vector is empty + * it is going to be resized to match the size required to store + * the post-compile information. + * + * @return true if successful and *out_data_ptr has been updated to store the data returned by the + * device, false otherwise. + **/ + bool get_shader_info(PipelineID in_pipeline_id, + Anvil::ShaderStage in_shader_stage, + Anvil::ShaderInfoType in_info_type, + std::vector* out_data_ptr); - /** By default, a pipeline is bakeable which means it may be baked even when the pipeline manager is - * requested to provide a Vulkan pipeline handle for another pipeline. In such cases, the manager - * will try to bake Vulkan pipelines for all pipelines marked as dirty, which - in cases like when - * we are dealing with a graphics pipeline, where the pipeline stage resources, defined in shaders, - * need to be paired with actual descriptors at baking time - can result in dire crashes. + /** Returns post-compile GPU statistics about compute and graphics pipeline shaders like GPU register usage. + * Requires support for VK_AMD_shader_info extension. * - * Therefore, some manager users may mark certain pipelines as unbakeable and defer baking as long - * as needed. This needs to be done *explicitly*. + * @param in_pipeline_id ID of the pipeline. + * @param in_shader_stage The shader stage to collect post-compile information for. + * @param out_shader_statistics_ptr Pointer to VkShaderStatisticsInfoAMD struct to be filled in + * with statistics about requested shader stage. + * Pointer cannot be null. + * + * @return true if successful, false otherwise. **/ - bool set_pipeline_bakeability(PipelineID in_pipeline_id, - bool in_bakeable); + bool get_shader_statistics(PipelineID in_pipeline_id, + Anvil::ShaderStage in_shader_stage, + VkShaderStatisticsInfoAMD* out_shader_statistics_ptr); protected: /* Protected type declarations */ - struct SpecializationConstant - { - uint32_t constant_id; - uint32_t n_bytes; - uint32_t start_offset; - - /** Dummy constructor. Should only be used by STL containers. */ - SpecializationConstant() - { - constant_id = UINT32_MAX; - n_bytes = UINT32_MAX; - start_offset = UINT32_MAX; - } - - /** Constructor. - * - * @param in_constant_id Specialization constant ID. - * @param in_n_bytes Number of bytes consumed by the constant. - * @param in_start_offset Start offset, at which the constant data starts. - */ - SpecializationConstant(uint32_t in_constant_id, - uint32_t in_n_bytes, - uint32_t in_start_offset) - { - constant_id = in_constant_id; - n_bytes = in_n_bytes; - start_offset = in_start_offset; - } - }; - - typedef std::vector SpecializationConstants; - typedef std::map ShaderIndexToSpecializationConstantsMap; /** Internal pipeline object descriptor */ - typedef struct Pipeline + typedef struct Pipeline : public MTSafetySupportProvider { - VkPipeline base_pipeline; - std::shared_ptr base_pipeline_ptr; - std::weak_ptr device_ptr; - - std::shared_ptr dsg_ptr; - PushConstantRanges push_constant_ranges; - std::vector specialization_constant_data_buffer; - ShaderIndexToSpecializationConstantsMap specialization_constants_map; - - std::vector shader_stages; - - bool layout_dirty; - std::shared_ptr layout_ptr; - - bool allow_derivatives; - VkPipeline baked_pipeline; - bool dirty; - bool disable_optimizations; - bool is_bakeable; - bool is_derivative; - bool is_proxy; - - /** Stores the specified shader modules and associates an empty specialization constant map for each - * shader. - * - * @param in_n_shader_module_stage_entrypoints Number of shader module stage entrypoint descriptors available under - * @param in_shader_module_stage_entrypoint_ptrs. Must be >= 1. - * @param in_shader_module_stage_entrypoint_ptrs Array of shader module stage entrypoint descriptors. Must hold - * @param in_n_shader_module_stage_entrypoints elements. Must not be nullptr. - **/ - void init_shader_modules(uint32_t in_n_shader_module_stage_entrypoints, - const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs) + VkPipeline baked_pipeline; + const BaseDevice* device_ptr; + Anvil::PipelineLayoutUniquePtr layout_ptr; + Anvil::BasePipelineCreateInfoUniquePtr pipeline_create_info_ptr; + + Pipeline(const Anvil::BaseDevice* in_device_ptr, + Anvil::BasePipelineCreateInfoUniquePtr in_pipeline_create_info_ptr, + bool in_mt_safe) + :MTSafetySupportProvider(in_mt_safe) { - shader_stages.reserve(in_n_shader_module_stage_entrypoints); - - for (uint32_t n_shader_module_stage_entrypoint = 0; - n_shader_module_stage_entrypoint < in_n_shader_module_stage_entrypoints; - ++n_shader_module_stage_entrypoint) - { - shader_stages.push_back(in_shader_module_stage_entrypoint_ptrs[n_shader_module_stage_entrypoint]); - - specialization_constants_map[n_shader_module_stage_entrypoint] = SpecializationConstants(); - } - } - - /** Releases pipeline & pipeline layout instances **/ - void release_vulkan_objects(); - - /** Constructor. Should be used to initialize a derivative pipeline object from an existing - * pipeline object managed by the pipeline manager. - * - * @param in_device_ptr Device to use. - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_base_pipeline_ptr Pointer to the internal Pipeline descriptor. Must not be nullptr. - * Must not refer to a proxy pipeline. - * @param in_n_shader_module_stage_entrypoints Number of shader module stage entrypoint descriptors available under - * @param in_shader_module_stage_entrypoint_ptrs. Must be >= 1. - * @param in_shader_module_stage_entrypoint_ptrs Array of shader module stage entrypoint descriptors. Must hold - * @param in_n_shader_module_stage_entrypoints elements. Must not be nullptr. - **/ - Pipeline(std::weak_ptr in_device_ptr, - bool in_disable_optimizations, - bool in_allow_derivatives, - std::shared_ptr in_base_pipeline_ptr, - uint32_t in_n_shader_module_stage_entrypoints, - const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs) - { - allow_derivatives = in_allow_derivatives; - baked_pipeline = VK_NULL_HANDLE; - base_pipeline = VK_NULL_HANDLE; - base_pipeline_ptr = in_base_pipeline_ptr; - device_ptr = in_device_ptr; - dirty = true; - disable_optimizations = in_disable_optimizations; - is_bakeable = true; - is_derivative = true; - is_proxy = false; - layout_dirty = true; - - init_shader_modules(in_n_shader_module_stage_entrypoints, - in_shader_module_stage_entrypoint_ptrs); - - anvil_assert(!base_pipeline_ptr->is_proxy); - } - - /** Constructor. Should be used to initialize a derivative pipeline object from an existing - * raw Vulkan pipeline handle. - * - * @param in_device_ptr Device to use. - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_base_pipeline Vulkan Pipeline handle. - * @param in_n_shader_module_stage_entrypoints Number of shader module stage entrypoint descriptors available under - * @param in_shader_module_stage_entrypoint_ptrs. Must be >= 1. - * @param in_shader_module_stage_entrypoint_ptrs Array of shader module stage entrypoint descriptors. Must hold - * @param in_n_shader_module_stage_entrypoints elements. Must not be nullptr. - **/ - Pipeline(std::weak_ptr in_device_ptr, - bool in_disable_optimizations, - bool in_allow_derivatives, - VkPipeline in_base_pipeline, - uint32_t in_n_shader_module_stage_entrypoints, - const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs) - { - allow_derivatives = in_allow_derivatives; - baked_pipeline = VK_NULL_HANDLE; - base_pipeline = in_base_pipeline; - device_ptr = in_device_ptr; - dirty = true; - disable_optimizations = in_disable_optimizations; - is_bakeable = true; - is_derivative = true; - is_proxy = false; - layout_dirty = true; - - init_shader_modules(in_n_shader_module_stage_entrypoints, - in_shader_module_stage_entrypoint_ptrs); - } - - /** Constructor. Should be used to initialize a regular pipeline object. - * - * @param in_device_ptr Device to use. - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_n_shader_module_stage_entrypoints Number of shader module stage entrypoint descriptors available under - * @param in_shader_module_stage_entrypoint_ptrs. Must be >= 1. - * @param in_shader_module_stage_entrypoint_ptrs Array of shader module stage entrypoint descriptors. Must hold - * @param in_n_shader_module_stage_entrypoints elements. Must not be nullptr. - * @param in_is_proxy true if the created pipeline is a proxy pipeline; false otherwise. - * - **/ - Pipeline(std::weak_ptr in_device_ptr, - bool in_disable_optimizations, - bool in_allow_derivatives, - uint32_t in_n_shader_module_stage_entrypoints, - const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs, - bool in_is_proxy) - { - allow_derivatives = in_allow_derivatives; - baked_pipeline = VK_NULL_HANDLE; - base_pipeline = VK_NULL_HANDLE; - device_ptr = in_device_ptr; - dirty = true; - disable_optimizations = in_disable_optimizations; - is_bakeable = true; - is_derivative = false; - is_proxy = in_is_proxy; - layout_dirty = true; - - init_shader_modules(in_n_shader_module_stage_entrypoints, - in_shader_module_stage_entrypoint_ptrs); - - anvil_assert(!is_proxy || - is_proxy && in_n_shader_module_stage_entrypoints == 0); + baked_pipeline = VK_NULL_HANDLE; + device_ptr = in_device_ptr; + pipeline_create_info_ptr = std::move(in_pipeline_create_info_ptr); } /** Destrutor. Releases the internally managed Vulkan objects. */ ~Pipeline() { - release_vulkan_objects(); + release_pipeline(); } private: Pipeline(); + /** Releases pipeline & pipeline layout instances **/ + void release_pipeline(); } Pipeline; - typedef std::map > Pipelines; + typedef std::map > Pipelines; /* Protected functions */ - /** Registers a new derivative pipeline which is going to inherit state from another pipeline object, - * which has already been added to the pipeline manager. - * - * The function does not automatically bake the new pipeline. This action can either be explicitly - * requested by calling bake(), or by calling one of the get_*() functions. - * - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_n_shader_module_stage_entrypoints Number of shader module stage entrypoint descriptors available under - * @param in_shader_module_stage_entrypoint_ptrs. Must be >= 1. - * @param in_shader_module_stage_entrypoint_ptrs Array of shader module stage entrypoint descriptors. Must hold - * @param in_n_shader_module_stage_entrypoints elements. Must not be nullptr. - * @param in_base_pipeline_id ID of the pipeline, which should be used as base for the new pipeline - * object. Must be an ID earlier returned by one of the other add_() functions. - * @param out_pipeline_id_ptr Deref will be set to the ID of the result pipeline object. Must not be nullptr. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_derivative_pipeline_from_sibling_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - uint32_t in_n_shader_module_stage_entrypoints, - const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs, - PipelineID in_base_pipeline_id, - PipelineID* out_pipeline_id_ptr); - - /** Registers a new derivative pipeline which is going to inherit its state from another Vulkan pipeline object. - * - * The function does not automatically bake the new pipeline. This action can either be explicitly - * requested by calling bake(), or by calling one of the get_*() functions. - * - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_n_shader_module_stage_entrypoints Number of shader module stage entrypoint descriptors available under - * @param in_shader_module_stage_entrypoint_ptrs. Must be >= 1. - * @param in_shader_module_stage_entrypoint_ptrs Array of shader module stage entrypoint descriptors. Must hold - * @param in_n_shader_module_stage_entrypoints elements. Must not be nullptr. - * @param in_base_pipeline Handle of the pipeline, which should be used as base for the new pipeline - * object. Must not be nullptr. - * @param out_pipeline_id_ptr Deref will be set to the ID of the result pipeline object. Must not be nullptr. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_derivative_pipeline_from_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - uint32_t in_n_shader_module_stage_entrypoints, - const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs, - VkPipeline in_base_pipeline, - PipelineID* out_pipeline_id_ptr); - - /** Registers a new proxy pipeline. A proxy pipeline cannot be baked, but it can hold state data and act - * as a parent for other pipelines, which inherit their state at creation time. - * - * @param out_pipeline_id_ptr Deref will be set to the ID of the result pipeline object. Must not be nullptr. - * - * @return true if successful, false otherwise. - **/ - bool add_proxy_pipeline(PipelineID* out_pipeline_id_ptr); - - /** Registers a new pipeline object. - * - * The function does not automatically bake the new pipeline. This action can either be explicitly - * requested by calling bake(), or by calling one of the get_*() functions. - * - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_n_shader_module_stage_entrypoints Number of shader module stage entrypoint descriptors available under - * @param in_shader_module_stage_entrypoint_ptrs. Must be >= 1. - * @param in_shader_module_stage_entrypoint_ptrs Array of shader module stage entrypoint descriptors. Must hold - * @param in_n_shader_module_stage_entrypoints elements. Must not be nullptr. - * @param out_pipeline_id_ptr Deref will be set to the ID of the result pipeline object. Must not be nullptr. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_regular_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - uint32_t in_n_shader_module_stage_entrypoints, - const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs, - PipelineID* out_pipeline_id_ptr); - - /** Adds a new specialization constant to the specified pipeline object. - * - * This function marks the pipeline as dirty, meaning it will be rebaked at the next get_() call. - * - * @param in_pipeline_id ID of the pipeline to add the constant to. Must be an ID returned - * by one of the add_() functions. Must not describe a proxy pipeline. - * @param in_shader_index Index of the shader, with which the new specialization constant should be - * associated. - * @param in_constant_id ID of the specialization constant to assign data for. - * @param in_n_data_bytes Number of bytes under @param in_data_ptr to assign to the specialization constant. - * @param in_data_ptr A buffer holding the data to be assigned to the constant. Must hold at least - * @param in_n_data_bytes bytes that will be read by the function. + /** Constructor. Initializes base layer of a pipeline manager. * - * @return true if successful, false otherwise. + * @param in_device_ptr Device to use. + * @param in_mt_safe True if more than one thread at a time is going to be issuing calls against the pipeline manager. + * @param in_use_pipeline_cache true if a pipeline cache should be used to spawn new pipeline objects. + * What pipeline cache ends up being used depends on @param in_pipeline_cache_to_reuse_ptr - + * if a nullptr object is passed via this argument, a new pipeline cache instance will + * be created, and later released by the destructor. If a non-nullptr object is passed, + * it will be used instead. + * @param in_pipeline_cache_to_reuse_ptr Please see above. **/ - bool add_specialization_constant_to_pipeline(PipelineID in_pipeline_id, - ShaderIndex in_shader_index, - uint32_t in_constantID, - uint32_t in_n_data_bytes, - const void* in_data_ptr); + explicit BasePipelineManager(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + bool in_use_pipeline_cache, + Anvil::PipelineCache* in_pipeline_cache_to_reuse_ptr); /** Fills & returns a VkSpecializationInfo descriptor. Any sub-descriptors, to which the baked descriptor * is going to point at, are stored in a vector provided by the caller. It is caller's responsibility to @@ -489,49 +212,21 @@ namespace Anvil **/ void bake_specialization_info_vk(const SpecializationConstants& in_specialization_constants, const unsigned char* in_specialization_constant_data_ptr, + const uint32_t in_specialization_constant_data_size, std::vector* out_specialization_map_entry_vk_vector, - VkSpecializationInfo* out_specialization_info_ptr); - - /** Deletes an existing pipeline. - * - * @param in_pipeline_id ID of a pipeline to delete. - * - * @return true if successful, false otherwise. - **/ - bool delete_pipeline(PipelineID in_pipeline_id); - - /** Retrieves a VkPipeline instance associated with the specified pipeline ID. - * - * The function will bake a pipeline object (and, possibly, a pipeline layout object, too) if - * the specified pipeline is marked as dirty. - * - * @param in_pipeline_id ID of the pipeline to return the raw Vulkan pipeline handle for. Must not - * describe a proxy pipeline. - * - * @return VkPipeline handle or nullptr if the function failed. - **/ - VkPipeline get_pipeline(PipelineID in_pipeline_id); - - /** Retrieves a PipelineLayout instance associated with the specified pipeline ID. - * - * The function will bake a pipeline object (and, possibly, a pipeline layout object, too) if - * the specified pipeline is marked as dirty. - * - * @param in_pipeline_id ID of the pipeline to return the wrapper instance for. - * Must not describe a proxy pipeline. - * - * @return Ptr to a PipelineLayout instance or nullptr if the function failed. - **/ - std::shared_ptr get_pipeline_layout(PipelineID in_pipeline_id); + VkSpecializationInfo* out_specialization_info_ptr) const; /* Protected members */ - std::weak_ptr m_device_ptr; - uint32_t m_pipeline_counter; - Pipelines m_pipelines; + const Anvil::BaseDevice* m_device_ptr; + std::atomic m_pipeline_counter; + + Pipelines m_baked_pipelines; + Pipelines m_outstanding_pipelines; - std::shared_ptr m_pipeline_cache_ptr; - std::shared_ptr m_pipeline_layout_manager_ptr; - bool m_use_pipeline_cache; + Anvil::PipelineCache* m_pipeline_cache_ptr; + PipelineCacheUniquePtr m_pipeline_cache_owned_ptr; + PipelineLayoutManager* m_pipeline_layout_manager_ptr; + bool m_use_pipeline_cache; private: /* Private functions */ diff --git a/include/misc/buffer_create_info.h b/include/misc/buffer_create_info.h new file mode 100644 index 00000000..c2ca4bc1 --- /dev/null +++ b/include/misc/buffer_create_info.h @@ -0,0 +1,290 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_BUFFER_CREATE_INFO_H +#define MISC_BUFFER_CREATE_INFO_H + +#include "misc/types.h" + + +namespace Anvil +{ + class BufferCreateInfo + { + public: + /* Public functions */ + + /** Creates a create info for a buffer object. + * + * A buffer instance created using the returned info instance WILL ALLOCATE and have a unique memory block BOUND to the object. + * Do NOT call Buffer::set_memory() to configure the binding. + * + * The constructor can optionally upload data to the initialized memory. + * + * The following default values are assumed, unless specified with separate set_..() invocations issued against the result instance: + * + * - Client data to fill the result bullfer with after mem alloc is bound to the buffer: none + * - External memory handle types: none + * - MT safety: MTSafety::INHERIT_FROM_PARENT_DEVICE + * + * @param in_device_ptr Device to use. + * @param in_size Size of the buffer object to be initialized. + * @param in_queue_families Queue families which the buffer object is going to be used with. + * One or more user queue family bits can be enabled. + * @param in_sharing_mode Sharing mode to pass to the vkCreateBuffer() call. + * @param in_create_flags Create flags to use. Must not include SPARSE_ALIASED, SPARSE_BINDING and SPARSE_RESIDENCY bits. + * @param in_usage_flags Usage flags to set in the VkBufferCreateInfo descriptor, passed to + * to the vkCreateBuffer() call. + * @param in_memory_freatures Required memory features. + **/ + static Anvil::BufferCreateInfoUniquePtr create_alloc(const Anvil::BaseDevice* in_device_ptr, + VkDeviceSize in_size, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + Anvil::BufferCreateFlags in_create_flags, + Anvil::BufferUsageFlags in_usage_flags, + Anvil::MemoryFeatureFlags in_memory_features); + + /** Creates a create info for a buffer object. + * + * A buffer instance created using the returned info instance will NOT allocate or have any memory blocks bound + * to itself. It is user's responsibility to call Buffer::set_memory() to configure the binding. + * + * The following default values are assumed, unless specified with separate set_..() invocations issued against the result instance: + * + * - External memory handle types: none + * - MT safety: MTSafety::INHERIT_FROM_PARENT_DEVICE + * + * @param in_device_ptr Device to use. + * @param in_size Size of the buffer object to be initialized. + * @param in_queue_families Queue families which the buffer object is going to be used with. + * One or more user queue family bits can be enabled. + * @param in_sharing_mode Sharing mode to pass to the vkCreateBuffer() call. + * @param in_usage_flags Usage flags to set in the VkBufferCreateInfo descriptor, passed to + * to the vkCreateBuffer() call. + **/ + static Anvil::BufferCreateInfoUniquePtr create_no_alloc(const Anvil::BaseDevice* in_device_ptr, + VkDeviceSize in_size, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + Anvil::BufferCreateFlags in_create_flags, + Anvil::BufferUsageFlags in_usage_flags); + + /** Creates a create info for a buffer object. + * + * The new buffer will reuse a region of the specified buffer's storage, instead of creating one's own. + * + * It is user's responsibility to ensure memory aliasing or synchronization is used, according + * to the spec rules. + * + * @param in_parent_buffer_ptr Specifies the buffer, whose memory block should be used. Must not be + * nullptr. MUST BE NON-SPARSE. + * @param in_start_offset Memory region's start offset. + * @param in_size Size of the memory region to "claim". + **/ + static Anvil::BufferCreateInfoUniquePtr create_no_alloc_child(Anvil::Buffer* in_parent_nonsparse_buffer_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size); + + const void* get_client_data() const + { + return m_client_data_ptr; + } + + const Anvil::BufferCreateFlags& get_create_flags() const + { + return m_create_flags; + } + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + const Anvil::ExternalMemoryHandleTypeFlags& get_exportable_external_memory_handle_types() const + { + return m_exportable_external_memory_handle_types; + } + + const Anvil::MemoryFeatureFlags& get_memory_features() const + { + return m_memory_features; + } + + const Anvil::MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + /** Returns a pointer to the parent buffer, if one was specified at creation time */ + Anvil::Buffer* get_parent_buffer_ptr() const + { + return m_parent_buffer_ptr; + } + + /** Returns info about queue families this buffer has been created for */ + Anvil::QueueFamilyFlags get_queue_families() const + { + return m_queue_families; + } + + /** Returns sharing mode of the buffer */ + Anvil::SharingMode get_sharing_mode() const + { + return m_sharing_mode; + } + + /** Returns size of the encapsulated Vulkan buffer memory region. + * + * @return >= 0 if successful, UINT64_MAX otherwise */ + VkDeviceSize get_size() const + { + anvil_assert(m_size != 0); + + return m_size; + } + + /** Returns start offset of the encapsulated Vulkan buffer memory region. + * + * @return >= 0 if successful, UINT64_MAX otherwise */ + VkDeviceSize get_start_offset() const + { + return m_start_offset; + } + + const Anvil::BufferType& get_type() const + { + return m_type; + } + + const Anvil::BufferUsageFlags get_usage_flags() const + { + return m_usage_flags; + } + + /** Use to specify contents which should be uploaded to a buffer at memory block assignment time. + * + * Note that this setting will be ignored for partially-resident buffers. + * + * The specified pointer must remain valid until Buffer::set_nonsparse_memory() call time. + * + * @param in_client_data_ptr Pointer to data storage holding contents the created buffer should be filled with. + * Must remain valid until memory block assignment time. + */ + void set_client_data(const void* const in_client_data_ptr) + { + m_client_data_ptr = in_client_data_ptr; + } + + void set_device(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + void set_exportable_external_memory_handle_types(const Anvil::ExternalMemoryHandleTypeFlags& in_external_memory_handle_types) + { + m_exportable_external_memory_handle_types = in_external_memory_handle_types; + } + + void set_memory_features(const Anvil::MemoryFeatureFlags& in_memory_features) + { + m_memory_features = in_memory_features; + } + + void set_mt_safety(const Anvil::MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + void set_queue_families(const Anvil::QueueFamilyFlags& in_queue_families) + { + m_queue_families = in_queue_families; + } + + void set_sharing_mode(const Anvil::SharingMode& in_sharing_mode) + { + m_sharing_mode = in_sharing_mode; + } + + void set_size(const VkDeviceSize& in_size) + { + m_size = in_size; + } + + void set_start_offset(const VkDeviceSize& in_start_offset) + { + m_start_offset = in_start_offset; + } + + void set_usage_flags(const Anvil::BufferUsageFlags& in_usage_flags) + { + m_usage_flags = in_usage_flags; + } + + private: + /* Private type definitions */ + + /* Private functions */ + + BufferCreateInfo(const Anvil::BaseDevice* in_device_ptr, + VkDeviceSize in_size, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + Anvil::BufferCreateFlags in_create_flags, + Anvil::BufferUsageFlags in_usage_flags, + MTSafety in_mt_safety, + Anvil::ExternalMemoryHandleTypeFlags in_exportable_external_memory_handle_types); + BufferCreateInfo(const Anvil::BufferType& in_buffer_type, + const Anvil::BaseDevice* in_device_ptr, + VkDeviceSize in_size, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + Anvil::BufferCreateFlags in_create_flags, + Anvil::BufferUsageFlags in_usage_flags, + MemoryFeatureFlags in_memory_features, + MTSafety in_mt_safety, + Anvil::ExternalMemoryHandleTypeFlags in_exportable_external_memory_handle_types, + const void* in_opt_client_data_ptr); + BufferCreateInfo(Anvil::Buffer* in_parent_buffer_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size); + + + /* Private variables */ + const void* m_client_data_ptr; + Anvil::BufferCreateFlags m_create_flags; + const Anvil::BaseDevice* m_device_ptr; + Anvil::ExternalMemoryHandleTypeFlags m_exportable_external_memory_handle_types; + Anvil::MemoryFeatureFlags m_memory_features; + MTSafety m_mt_safety; + Anvil::Buffer* const m_parent_buffer_ptr; + Anvil::QueueFamilyFlags m_queue_families; + Anvil::SharingMode m_sharing_mode; + VkDeviceSize m_size; + VkDeviceSize m_start_offset; + const BufferType m_type; + Anvil::BufferUsageFlags m_usage_flags; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(BufferCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(BufferCreateInfo); + }; +}; /* namespace Anvil */ +#endif /* MISC_BUFFER_CREATE_INFO_H */ diff --git a/include/misc/buffer_view_create_info.h b/include/misc/buffer_view_create_info.h new file mode 100644 index 00000000..982373bf --- /dev/null +++ b/include/misc/buffer_view_create_info.h @@ -0,0 +1,135 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_BUFFER_VIEW_CREATE_INFO_H +#define MISC_BUFFER_VIEW_CREATE_INFO_H + +#include "misc/types.h" + + +namespace Anvil +{ + class BufferViewCreateInfo + { + public: + /* Public functions */ + + /* TODO. + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: MTSafety::INHERIT_FROM_PARENT_DEVICE + */ + static Anvil::BufferViewCreateInfoUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + Anvil::Buffer* in_buffer_ptr, + Anvil::Format in_format, + VkDeviceSize in_start_offset, + VkDeviceSize in_size); + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + /** Returns format used by the buffer view */ + Anvil::Format get_format() const + { + return m_format; + } + + const Anvil::MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + /** Returns pointer to the parent buffer instance */ + Anvil::Buffer* get_parent_buffer() const + { + return m_buffer_ptr; + } + + /** Returns size of the encapsulated buffer memory region */ + VkDeviceSize get_size() const + { + return m_size; + } + + /** Returns start offset of the encapsulated buffer memory region */ + VkDeviceSize get_start_offset() const + { + return m_start_offset; + } + + void set_device(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + void set_format(const Anvil::Format& in_format) + { + m_format = in_format; + } + + void set_mt_safety(const Anvil::MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + /** Returns pointer to the parent buffer instance */ + void set_parent_buffer(Anvil::Buffer* in_buffer_ptr) + { + m_buffer_ptr = in_buffer_ptr; + } + + void set_size(const VkDeviceSize& in_size) + { + m_size = in_size; + } + + void set_start_offset(const VkDeviceSize& in_start_offset) + { + m_start_offset = in_start_offset; + } + + private: + /* Private functions */ + + BufferViewCreateInfo(const Anvil::BaseDevice* in_device_ptr, + Anvil::Buffer* in_buffer_ptr, + Anvil::Format in_format, + VkDeviceSize in_start_offset, + VkDeviceSize in_size, + MTSafety in_mt_safety); + + + /* Private variables */ + Anvil::Buffer* m_buffer_ptr; + const Anvil::BaseDevice* m_device_ptr; + Anvil::Format m_format; + Anvil::MTSafety m_mt_safety; + VkDeviceSize m_size; + VkDeviceSize m_start_offset; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(BufferViewCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(BufferViewCreateInfo); + }; +}; /* namespace Anvil */ +#endif /* MISC_BUFFER_VIEW_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/callbacks.h b/include/misc/callbacks.h index a2f0079a..51fb1c1c 100644 --- a/include/misc/callbacks.h +++ b/include/misc/callbacks.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -72,10 +72,10 @@ namespace Anvil typedef struct IsBufferMemoryAllocPendingQueryCallbackArgument : public Anvil::CallbackArgument { - std::shared_ptr buffer_ptr; - bool result; + const Anvil::Buffer* buffer_ptr; + bool result; - explicit IsBufferMemoryAllocPendingQueryCallbackArgument(std::shared_ptr in_buffer_ptr) + explicit IsBufferMemoryAllocPendingQueryCallbackArgument(const Anvil::Buffer* in_buffer_ptr) :buffer_ptr(in_buffer_ptr), result (false) { @@ -87,7 +87,7 @@ namespace Anvil typedef struct IsImageMemoryAllocPendingQueryCallbackArgument : Anvil::CallbackArgument { - explicit IsImageMemoryAllocPendingQueryCallbackArgument(std::shared_ptr in_image_ptr) + explicit IsImageMemoryAllocPendingQueryCallbackArgument(const Anvil::Image* in_image_ptr) :image_ptr(in_image_ptr), result (false) { @@ -96,13 +96,13 @@ namespace Anvil IsImageMemoryAllocPendingQueryCallbackArgument& operator=(const IsImageMemoryAllocPendingQueryCallbackArgument&) = delete; - const std::shared_ptr image_ptr; - bool result; + const Anvil::Image* image_ptr; + bool result; } IsImageMemoryAllocPendingQueryCallbackArgument; typedef struct OnDescriptorPoolResetCallbackArgument : public Anvil::CallbackArgument { - DescriptorPool* descriptor_pool_ptr; + const DescriptorPool* descriptor_pool_ptr; explicit OnDescriptorPoolResetCallbackArgument(DescriptorPool* in_descriptor_pool_ptr) { @@ -112,14 +112,16 @@ namespace Anvil typedef struct OnGLSLToSPIRVConversionAboutToBeStartedCallbackArgument : public Anvil::CallbackArgument { - GLSLShaderToSPIRVGenerator* generator_ptr; + const GLSLShaderToSPIRVGenerator* generator_ptr; - explicit OnGLSLToSPIRVConversionAboutToBeStartedCallbackArgument(GLSLShaderToSPIRVGenerator* in_generator_ptr) + explicit OnGLSLToSPIRVConversionAboutToBeStartedCallbackArgument(const GLSLShaderToSPIRVGenerator* in_generator_ptr) { generator_ptr = in_generator_ptr; } } OnGLSLToSPIRVConversionAboutToBeStartedCallbackArgument; + typedef OnGLSLToSPIRVConversionAboutToBeStartedCallbackArgument OnGLSLToSPIRVConversionFinishedCallbackArgument; + typedef struct OnKeypressReleasedCallbackArgument : public Anvil::CallbackArgument { KeyID released_key_id; @@ -141,7 +143,7 @@ namespace Anvil typedef struct OnMemoryBlockNeededForBufferCallbackArgument : public Anvil::CallbackArgument { - explicit OnMemoryBlockNeededForBufferCallbackArgument(std::shared_ptr in_buffer_ptr) + explicit OnMemoryBlockNeededForBufferCallbackArgument(const Anvil::Buffer* in_buffer_ptr) :buffer_ptr(in_buffer_ptr) { /* Stub */ @@ -149,12 +151,12 @@ namespace Anvil OnMemoryBlockNeededForBufferCallbackArgument& operator=(const OnMemoryBlockNeededForBufferCallbackArgument&) = delete; - const std::shared_ptr buffer_ptr; + const Anvil::Buffer* buffer_ptr; } OnMemoryBlockNeededForBufferCallbackArgument; typedef struct OnMemoryBlockNeededForImageCallbackArgument : public Anvil::CallbackArgument { - explicit OnMemoryBlockNeededForImageCallbackArgument(std::shared_ptr in_image_ptr) + explicit OnMemoryBlockNeededForImageCallbackArgument(const Anvil::Image* in_image_ptr) :image_ptr(in_image_ptr) { /* Stub */ @@ -162,19 +164,29 @@ namespace Anvil OnMemoryBlockNeededForImageCallbackArgument& operator=(const OnMemoryBlockNeededForImageCallbackArgument&) = delete; - const std::shared_ptr image_ptr; + const Anvil::Image* image_ptr; } OnMemoryBlockNeededForImageCallbackArgument; typedef struct OnNewBindingAddedToDescriptorSetLayoutCallbackArgument : public Anvil::CallbackArgument { - DescriptorSetLayout* descriptor_set_layout_ptr; + const DescriptorSetLayout* descriptor_set_layout_ptr; - explicit OnNewBindingAddedToDescriptorSetLayoutCallbackArgument(DescriptorSetLayout* in_descriptor_set_layout_ptr) + explicit OnNewBindingAddedToDescriptorSetLayoutCallbackArgument(const DescriptorSetLayout* in_descriptor_set_layout_ptr) { descriptor_set_layout_ptr = in_descriptor_set_layout_ptr; } } OnNewBindingAddedToDescriptorSetLayoutCallbackArgument; + typedef struct OnNewPipelineCreatedCallbackData : public Anvil::CallbackArgument + { + PipelineID new_pipeline_id; + + explicit OnNewPipelineCreatedCallbackData(PipelineID in_new_pipeline_id) + { + new_pipeline_id = in_new_pipeline_id; + } + } OnNewPipelineCreatedCallbackData; + typedef struct OnObjectRegisteredCallbackArgument : Anvil::CallbackArgument { void* object_raw_ptr; @@ -213,9 +225,9 @@ namespace Anvil typedef struct OnPresentRequestIssuedCallbackArgument : public Anvil::CallbackArgument { - Swapchain* swapchain_ptr; + const Swapchain* swapchain_ptr; - explicit OnPresentRequestIssuedCallbackArgument(Swapchain* in_swapchain_ptr) + explicit OnPresentRequestIssuedCallbackArgument(const Swapchain* in_swapchain_ptr) { swapchain_ptr = in_swapchain_ptr; } @@ -223,9 +235,9 @@ namespace Anvil typedef struct OnRenderPassBakeNeededCallbackArgument : public Anvil::CallbackArgument { - RenderPass* renderpass_ptr; + const RenderPass* renderpass_ptr; - explicit OnRenderPassBakeNeededCallbackArgument(RenderPass* in_renderpass_ptr) + explicit OnRenderPassBakeNeededCallbackArgument(const RenderPass* in_renderpass_ptr) { renderpass_ptr = in_renderpass_ptr; } @@ -233,9 +245,9 @@ namespace Anvil typedef struct OnWindowAboutToCloseCallbackArgument : public Anvil::CallbackArgument { - Window* window_ptr; + const Window* window_ptr; - explicit OnWindowAboutToCloseCallbackArgument(Window* in_window_ptr) + explicit OnWindowAboutToCloseCallbackArgument(const Window* in_window_ptr) { window_ptr = in_window_ptr; } @@ -289,12 +301,9 @@ namespace Anvil **/ virtual ~CallbacksSupportProvider() { - if (m_callbacks != nullptr) - { - delete [] m_callbacks; + delete [] m_callbacks; - m_callbacks = nullptr; - } + m_callbacks = nullptr; } /* ICallbacksSupportClient interface implementation */ @@ -313,6 +322,8 @@ namespace Anvil CallbackFunction in_callback_function, void* in_callback_function_owner_ptr) const { + std::unique_lock mutex_lock(m_mutex); + anvil_assert(in_callback_id < m_callback_id_count); return std::find(m_callbacks[in_callback_id].begin(), @@ -339,6 +350,8 @@ namespace Anvil CallbackFunction in_callback_function, void* in_callback_owner_ptr) { + std::unique_lock mutex_lock(m_mutex); + anvil_assert(in_callback_id < m_callback_id_count); anvil_assert(in_callback_function != nullptr); anvil_assert(in_callback_owner_ptr != nullptr); @@ -375,6 +388,8 @@ namespace Anvil CallbackFunction in_callback_function, void* in_callback_function_owner_ptr) { + std::unique_lock mutex_lock(m_mutex); + anvil_assert(in_callback_id < m_callback_id_count); anvil_assert(in_callback_function != nullptr); anvil_assert(!m_callbacks_locked); @@ -406,8 +421,10 @@ namespace Anvil * @param in_callback_arg_ptr Call-back argument to use. **/ void callback(CallbackID in_callback_id, - CallbackArgument* in_callback_arg_ptr) + CallbackArgument* in_callback_arg_ptr) const { + std::unique_lock mutex_lock(m_mutex); + anvil_assert(in_callback_id < m_callback_id_count); anvil_assert(!m_callbacks_locked); @@ -446,6 +463,8 @@ namespace Anvil void callback_safe(CallbackID in_callback_id, CallbackArgument* in_callback_arg_ptr) { + std::unique_lock mutex_lock(m_mutex); + anvil_assert(in_callback_id < m_callback_id_count); anvil_assert(!m_callbacks_locked); @@ -494,6 +513,8 @@ namespace Anvil if (in_callback_id < m_callback_id_count) { + std::unique_lock mutex_lock(m_mutex); + result = static_cast(m_callbacks[in_callback_id].size() ); } @@ -531,9 +552,10 @@ namespace Anvil typedef std::vector Callbacks; /* Private variables */ - CallbackID m_callback_id_count; - Callbacks* m_callbacks; - volatile bool m_callbacks_locked; + CallbackID m_callback_id_count; + Callbacks* m_callbacks; + mutable volatile bool m_callbacks_locked; + mutable std::recursive_mutex m_mutex; }; } /* namespace Anvil */ diff --git a/include/misc/compute_pipeline_create_info.h b/include/misc/compute_pipeline_create_info.h new file mode 100644 index 00000000..e9f7c7ab --- /dev/null +++ b/include/misc/compute_pipeline_create_info.h @@ -0,0 +1,66 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#ifndef ANVIL_COMPUTE_PIPELINE_CREATE_INFO_H +#define ANVIL_COMPUTE_PIPELINE_CREATE_INFO_H + +#include "misc/base_pipeline_create_info.h" + + +namespace Anvil +{ + class ComputePipelineCreateInfo : public BasePipelineCreateInfo + { + public: + /* Public functions */ + static ComputePipelineCreateInfoUniquePtr create (const Anvil::PipelineCreateFlags& in_create_flags, + const ShaderModuleStageEntryPoint& in_compute_shader_stage_entrypoint_info, + const Anvil::PipelineID* in_opt_base_pipeline_id_ptr = nullptr); + static ComputePipelineCreateInfoUniquePtr create_proxy(); + + /** Adds a new specialization constant. + * + * @param in_constant_id ID of the specialization constant to assign data for. + * @param in_n_data_bytes Number of bytes under @param in_data_ptr to assign to the specialization constant. + * @param in_data_ptr A buffer holding the data to be assigned to the constant. Must hold at least + * @param in_n_data_bytes bytes that will be read by the function. + * + * @return true if successful, false otherwise. + **/ + bool add_specialization_constant(uint32_t in_constant_id, + uint32_t in_n_data_bytes, + const void* in_data_ptr) + { + return BasePipelineCreateInfo::add_specialization_constant(Anvil::ShaderStage::COMPUTE, + in_constant_id, + in_n_data_bytes, + in_data_ptr); + } + + ~ComputePipelineCreateInfo(); + + private: + ComputePipelineCreateInfo(); + }; +}; + +#endif /* ANVIL_COMPUTE_PIPELINE_CREATE_INFO_H */ diff --git a/include/misc/debug.h b/include/misc/debug.h index b8d20002..7e6ec299 100644 --- a/include/misc/debug.h +++ b/include/misc/debug.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -24,10 +24,13 @@ #ifndef MISC_DEBUG_H #define MISC_DEBUG_H -#include "misc/types.h" +#include +#include +#include +#define ENABLE_DEBUG_ASSERTIONS -#ifdef _DEBUG +#if defined(ENABLE_DEBUG_ASSERTIONS) #define anvil_assert(assertion) \ if (!(assertion)) \ { \ @@ -36,6 +39,14 @@ #assertion); \ } + #define anvil_assert_with_msg(assertion,msg) \ + if (!(assertion)) \ + { \ + Anvil::on_assertion_failed(__FILE__, \ + __LINE__, \ + msg); \ + } + #define anvil_assert_fail() \ Anvil::on_assertion_failed(__FILE__, \ __LINE__, \ @@ -46,15 +57,15 @@ #endif +//#define is_vk_call_successful(result) \ +// (result == VK_SUCCESS || result == VK_ERROR_VALIDATION_FAILED_EXT || result == VK_INCOMPLETE) #define is_vk_call_successful(result) \ - (result == VK_SUCCESS || result == VK_ERROR_VALIDATION_FAILED_EXT || result == VK_INCOMPLETE) - -#define anvil_assert_vk_call_succeeded(result) \ - anvil_assert( is_vk_call_successful(result) ) - + (result == VK_SUCCESS || result == VK_INCOMPLETE) namespace Anvil { + std::string result_to_string(int64_t result); + /** Function prototype of an assertion failure handler. * * @param in_filename File, from which the assertion failure originated. @@ -87,4 +98,7 @@ namespace Anvil void set_assertion_failure_handler(AssertionFailedCallbackFunction in_new_callback_func); }; -#endif /* MISC_DEBUG_H */ \ No newline at end of file +#define anvil_assert_vk_call_succeeded(result) \ + anvil_assert_with_msg( is_vk_call_successful(result),Anvil::result_to_string(result).c_str() ) + +#endif /* MISC_DEBUG_H */ diff --git a/include/misc/debug_marker.h b/include/misc/debug_marker.h index 18124643..ae727e64 100644 --- a/include/misc/debug_marker.h +++ b/include/misc/debug_marker.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,7 +20,7 @@ // THE SOFTWARE. // -/* Provides support for functionality introduced by VK_EXT_debug_marker extension. */ +/* Provides support for functionality introduced by VK_EXT_debug_marker extension, later subsumed by VK_EXT_debug_utils. */ #ifndef MISC_DEBUG_MARKER_H #define MISC_DEBUG_MARKER_H @@ -38,8 +38,8 @@ namespace Anvil /* Public functions */ /** Constructor. */ - DebugMarkerSupportProviderWorker(std::weak_ptr in_device_ptr, - VkDebugReportObjectTypeEXT in_vk_object_type); + DebugMarkerSupportProviderWorker(const Anvil::BaseDevice* in_device_ptr, + const Anvil::ObjectType& in_vk_object_type); /** Destructor. */ ~DebugMarkerSupportProviderWorker() @@ -108,14 +108,24 @@ namespace Anvil void set_vk_handle_internal(uint64_t in_vk_object_handle); private: + /* Private type definitions */ + enum class DebugAPI + { + EXT_DEBUG_MARKER, + EXT_DEBUG_UTILS, + + NONE + }; + /* Private variables */ - std::weak_ptr m_device_ptr; - bool m_is_ext_debug_marker_available; - std::string m_object_name; - std::vector m_object_tag_data; - uint64_t m_object_tag_name; - uint64_t m_vk_object_handle; - VkDebugReportObjectTypeEXT m_vk_object_type; + const Anvil::BaseDevice* m_device_ptr; + std::mutex m_mutex; + std::string m_object_name; + std::vector m_object_tag_data; + uint64_t m_object_tag_name; + DebugAPI m_used_api; + uint64_t m_vk_object_handle; + Anvil::ObjectType m_vk_object_type; }; /** This class needs to be inherited from by all wrapper classes that wrap Vulkan objects. @@ -150,23 +160,24 @@ namespace Anvil * True to permit more than one handle to be used. * **/ - DebugMarkerSupportProvider(std::weak_ptr in_device_ptr, - VkDebugReportObjectTypeEXT in_vk_object_type, - bool in_use_delegate_workers = false) + DebugMarkerSupportProvider(const Anvil::BaseDevice* in_device_ptr, + const Anvil::ObjectType& in_object_type, + bool in_use_delegate_workers = false) { - anvil_assert(!in_device_ptr.expired() ); + anvil_assert(in_device_ptr != nullptr); + + m_device_ptr = in_device_ptr; if (!in_use_delegate_workers) { m_worker_ptr.reset( new DebugMarkerSupportProviderWorker(in_device_ptr, - in_vk_object_type) + in_object_type) ); } else { - m_device_ptr = in_device_ptr; - m_vk_object_type = in_vk_object_type; + m_vk_object_type = in_object_type; } } @@ -187,27 +198,35 @@ namespace Anvil */ void add_delegate(uint64_t in_vk_object_handle) { - std::shared_ptr new_delegate_ptr; - - anvil_assert(m_worker_ptr == nullptr); + Anvil::DebugMarkerSupportProviderWorker* new_delegate_raw_ptr = nullptr; - #ifdef _DEBUG { - for (auto delegate_ptr : m_delegate_workers) + std::unique_ptr new_delegate_ptr; + + anvil_assert(m_worker_ptr == nullptr); + + #ifdef _DEBUG { - anvil_assert(delegate_ptr->get_vk_handle_internal() != in_vk_object_handle); + for (auto& delegate_ptr : m_delegate_workers) + { + anvil_assert(delegate_ptr->get_vk_handle_internal() != in_vk_object_handle); + } } - } - #endif + #endif + + new_delegate_ptr.reset( + new Anvil::DebugMarkerSupportProviderWorker(m_device_ptr, + m_vk_object_type) + ); - new_delegate_ptr.reset( - new Anvil::DebugMarkerSupportProviderWorker(m_device_ptr, - m_vk_object_type) - ); + new_delegate_ptr->set_vk_handle_internal(in_vk_object_handle); - new_delegate_ptr->set_vk_handle_internal(in_vk_object_handle); + new_delegate_raw_ptr = new_delegate_ptr.get(); - m_delegate_workers.push_back(new_delegate_ptr); + m_delegate_workers.push_back( + std::move(new_delegate_ptr) + ); + } if (m_delegate_workers.size() > 1) { @@ -220,13 +239,13 @@ namespace Anvil existing_delegate_worker_ptr->get_tag(&existing_delegate_worker_tag_data_ptr, &existing_delegate_worker_tag_name); - new_delegate_ptr->set_name_internal(existing_delegate_worker_name.c_str() ); + new_delegate_raw_ptr->set_name_internal(existing_delegate_worker_name.c_str() ); if (existing_delegate_worker_tag_data_ptr->size() > 0) { - new_delegate_ptr->set_tag_internal (existing_delegate_worker_tag_name, - existing_delegate_worker_tag_data_ptr->size(), - &existing_delegate_worker_tag_data_ptr->at(0) ); + new_delegate_raw_ptr->set_tag_internal (existing_delegate_worker_tag_name, + existing_delegate_worker_tag_data_ptr->size(), + &existing_delegate_worker_tag_data_ptr->at(0) ); } } } @@ -248,7 +267,7 @@ namespace Anvil */ void remove_delegate(uint64_t in_vk_object_handle) { - std::vector >::iterator worker_iterator; + std::vector >::iterator worker_iterator; anvil_assert(m_worker_ptr == nullptr); @@ -256,7 +275,7 @@ namespace Anvil worker_iterator != m_delegate_workers.end(); ++worker_iterator) { - auto current_worker_ptr = *worker_iterator; + auto& current_worker_ptr = *worker_iterator; if (current_worker_ptr->get_vk_handle_internal() == in_vk_object_handle) { @@ -293,7 +312,7 @@ namespace Anvil } else { - for (auto worker_ptr : m_delegate_workers) + for (auto& worker_ptr : m_delegate_workers) { worker_ptr->set_name_internal(in_object_name); } @@ -329,13 +348,24 @@ namespace Anvil } else { - for (auto worker_ptr : m_delegate_workers) + for (auto& worker_ptr : m_delegate_workers) { worker_ptr->set_name_internal(buffer); } } } + std::string get_name() const + { + if (m_worker_ptr != nullptr) + { + return m_worker_ptr->get_name(); + } + if (m_delegate_workers.empty()) + return {}; + return m_delegate_workers.front()->get_name(); + } + /** Associates a user-specified tag data to with all maintained Vulkan object handles. * * May be called more than once. @@ -357,7 +387,7 @@ namespace Anvil } else { - for (auto worker_ptr : m_delegate_workers) + for (auto& worker_ptr : m_delegate_workers) { worker_ptr->set_tag_internal(in_tag_name, in_tag_size, @@ -393,13 +423,14 @@ namespace Anvil /* Private variables */ /* Only used if delegate workers have been requested at creation time: ==> */ - std::vector > m_delegate_workers; - std::weak_ptr m_device_ptr; - VkDebugReportObjectTypeEXT m_vk_object_type; + std::vector > m_delegate_workers; + const Anvil::BaseDevice* m_device_ptr; + Anvil::ObjectType m_vk_object_type; + /* <== */ /* Otherwise: ==> */ - std::shared_ptr m_worker_ptr; + std::unique_ptr m_worker_ptr; /* <== */ friend Wrapper; diff --git a/include/misc/debug_messenger_create_info.h b/include/misc/debug_messenger_create_info.h new file mode 100644 index 00000000..84de5a1c --- /dev/null +++ b/include/misc/debug_messenger_create_info.h @@ -0,0 +1,94 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#ifndef MISC_DEBUG_MESSENGER_CREATE_INFO_H +#define MISC_DEBUG_MESSENGER_CREATE_INFO_H + +#include "misc/types.h" + +namespace Anvil +{ + class DebugMessengerCreateInfo + { + public: + /* Public functions */ + + /** Destructor. */ + ~DebugMessengerCreateInfo() + { + /* Stub */ + } + + /* Instantiates a create info structure for debug messengers. + * + * @param in_instance_ptr Vulkan instance to use when creating the Debug Messenger object. Must not be nullptr. + * @param in_debug_message_severity_flags Severity of debug messages to use for callbacks. + * @param in_debug_message_type_flags Types of debug messages to use for callbacks. + * @param in_callback_function Callback function to use. Must not be nullptr. + * + * @result Create info structure ijnstance. + */ + static DebugMessengerCreateInfoUniquePtr create(Anvil::Instance* in_instance_ptr, + const Anvil::DebugMessageSeverityFlags& in_debug_message_severity_flags, + const Anvil::DebugMessageTypeFlags& in_debug_message_type_flags, + Anvil::DebugMessengerCallbackFunction in_callback_function); + + const Anvil::DebugMessengerCallbackFunction& get_callback_function() const + { + return m_callback_function; + } + + const Anvil::DebugMessageSeverityFlags& get_debug_message_severity() const + { + return m_debug_message_severity_flags; + } + + const Anvil::DebugMessageTypeFlags& get_debug_message_types() const + { + return m_debug_message_type_flags; + } + + Anvil::Instance* get_instance_ptr() const + { + return m_instance_ptr; + } + + private: + /* Please see create() documentation for more details */ + + DebugMessengerCreateInfo(Anvil::Instance* in_instance_ptr, + const Anvil::DebugMessageSeverityFlags& in_debug_message_severity_flags, + const Anvil::DebugMessageTypeFlags& in_debug_message_type_flags, + Anvil::DebugMessengerCallbackFunction in_callback_function); + + /* Private variables */ + + Anvil::DebugMessengerCallbackFunction m_callback_function; + Anvil::DebugMessageSeverityFlags m_debug_message_severity_flags; + Anvil::DebugMessageTypeFlags m_debug_message_type_flags; + Anvil::Instance* m_instance_ptr; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(DebugMessengerCreateInfo); + }; +}; /* namespace Anvil */ + +#endif /* MISC_DEBUG_MESSENGER_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/descriptor_pool_create_info.h b/include/misc/descriptor_pool_create_info.h new file mode 100644 index 00000000..b76a9b4a --- /dev/null +++ b/include/misc/descriptor_pool_create_info.h @@ -0,0 +1,146 @@ +// +// Copyright (c) 2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#ifndef MISC_DESCRIPTOR_POOL_CREATE_INFO_H +#define MISC_DESCRIPTOR_POOL_CREATE_INFO_H + +#include "misc/mt_safety.h" +#include "misc/types.h" +#include + +namespace Anvil +{ + class DescriptorPoolCreateInfo + { + public: + /* Public functions */ + + /* Creates a new create info structure which should be fed to Anvil::DescriptorPool::create() function. + * + * By default, zero descriptors are associated with each descriptor type. You need to specify the number of descriptors + * the pool should allocate space for by calling set_n_descriptors_for_descriptor_type(). + * + * @param in_device_ptr Device to use. + * @param in_n_max_sets Maximum number of sets to be allocable from the pool. Must be at + * least 1. + * @param in_flags See DescriptorPoolFlagBits documentation for more details. + * @param in_mt_safety MT safety setting to use for the pool to be spawned. + */ + static DescriptorPoolCreateInfoUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + const uint32_t& in_n_max_sets, + const Anvil::DescriptorPoolCreateFlags& in_create_flags, + const MTSafety& in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); + + const Anvil::DescriptorPoolCreateFlags& get_create_flags() const + { + return m_create_flags; + } + + const Anvil::BaseDevice* get_device_ptr() const + { + return m_device_ptr; + } + + const MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + uint32_t get_n_descriptors_for_descriptor_type(const Anvil::DescriptorType& in_descriptor_type) const + { + auto iterator = m_descriptor_count.find(in_descriptor_type); + uint32_t result = 0; + + if (iterator != m_descriptor_count.end() ) + { + result = m_descriptor_count.at(in_descriptor_type); + } + + return result; + } + + const uint32_t& get_n_maximum_inline_uniform_block_bindings() const + { + return m_n_max_inline_uniform_block_bindings; + } + + const uint32_t& get_n_maximum_sets() const + { + return m_n_max_sets; + } + + void set_create_flags(const Anvil::DescriptorPoolCreateFlags& in_create_flags) + { + m_create_flags = in_create_flags; + } + + void set_mt_safety(const MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + void set_n_descriptors_for_descriptor_type(const Anvil::DescriptorType& in_descriptor_type, + const uint32_t& in_n_descriptors) + { + m_descriptor_count[in_descriptor_type] = in_n_descriptors; + } + + /* Configures the maximum number of inline uniform block bindings that descriptor sets spawned using this descriptor pool + * will ever use at once. + * + * NOTE: Requires VK_EXT_inline_uniform_block support. + */ + void set_n_maximum_inline_uniform_block_bindings(const uint32_t& in_n_max_inline_uniform_block_bindings) + { + m_n_max_inline_uniform_block_bindings = in_n_max_inline_uniform_block_bindings; + } + + void set_n_maximum_sets(const uint32_t& in_n_maximum_sets) + { + m_n_max_sets = in_n_maximum_sets; + } + + private: + + /* Private functions */ + + /** Constructor */ + DescriptorPoolCreateInfo(const Anvil::BaseDevice* in_device_ptr, + const uint32_t& in_n_max_sets, + const Anvil::DescriptorPoolCreateFlags& in_create_flags, + const MTSafety& in_mt_safety); + + DescriptorPoolCreateInfo (const DescriptorPoolCreateInfo&); + DescriptorPoolCreateInfo& operator=(const DescriptorPoolCreateInfo&); + + /* Private variables */ + Anvil::DescriptorPoolCreateFlags m_create_flags; + const Anvil::BaseDevice* m_device_ptr; + std::unordered_map > m_descriptor_count; + Anvil::MTSafety m_mt_safety; + uint32_t m_n_max_inline_uniform_block_bindings; + uint32_t m_n_max_sets; + }; + +}; /* namespace Anvil */ + +#endif /* MISC_DESCRIPTOR_POOL_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/descriptor_set_create_info.h b/include/misc/descriptor_set_create_info.h new file mode 100644 index 00000000..43050fe7 --- /dev/null +++ b/include/misc/descriptor_set_create_info.h @@ -0,0 +1,255 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#ifndef MISC_DESCRIPTOR_SET_CREATE_INFO_H +#define MISC_DESCRIPTOR_SET_CREATE_INFO_H + +#include "misc/types.h" +#include "misc/struct_chainer.h" + +namespace Anvil +{ + struct DescriptorSetLayoutCreateInfoContainer + { + std::vector binding_flags_vec; + std::vector binding_info_items; + std::vector sampler_items; + Anvil::StructChainUniquePtr struct_chain_ptr; + }; + + class DescriptorSetCreateInfo + { + public: + /* Public functions */ + + /** Destructor. */ + ~DescriptorSetCreateInfo(); + + /** Adds a new binding. + * + * If @param in_flags includes DESCRIPTOR_BINDING_FLAG_VARIABLE_DESCRIPTOR_COUNT_BIT, @param in_descriptor_array_size + * tells the maximum number of descriptors the binding can take. The actual number of descriptors which is going to be + * specified for the binding needs to be specified by separately calling set_binding_variable_descriptor_count(). + * + * It is an error to attempt to add a binding at an index, for which another binding has + * already been specified. + * + * It is an error to attempt to define immutable samplers for descriptors of type other than + * sampler or combined image+sampler. + * + * NOTE: For inline uniform block bindings, subsequent set_binding_item() call is NOT required. + * + * @param in_binding_index Index of the binding to configure. + * @param in_descriptor_type Type of the descriptor to use for the binding. + * @param in_descriptor_array_size Size of the descriptor array to use for the binding. + * + * For inline uniform blocks, this parameter corresponds to (number of bytes associated with the + * block). This value MUST be divisible by 4. + * + * @param in_flags Please see documentation of Anvil::DescriptorBindingFlags for more details. + * @param in_opt_immutable_sampler_ptr_ptr If not nullptr, an array of @param in_descriptor_array_size samplers should + * be passed. The binding will then be considered immutable, as per spec language. + * May be nullptr. + * + * @return true if successful, false otherwise. + **/ + bool add_binding(uint32_t in_binding_index, + Anvil::DescriptorType in_descriptor_type, + uint32_t in_descriptor_array_size, + Anvil::ShaderStageFlags in_stage_flags, + const Anvil::DescriptorBindingFlags& in_flags = Anvil::DescriptorBindingFlagBits::NONE, + const Anvil::Sampler* const* in_opt_immutable_sampler_ptr_ptr = nullptr); + + /** Tells if the DS info structure contains a variable descriptor count binding. + * + * @param out_opt_binding_index_ptr If not null, deref will be set to the index of the variable descriptor + * count binding. + * @param out_opt_binding_size_ptr If not null, deref will be set to the variable descriptor count binding's size. + * + * @return true if such a binding has been defined, false otherwise. + */ + bool contains_variable_descriptor_count_binding(uint32_t* out_opt_binding_index_ptr = nullptr, + uint32_t* out_opt_binding_size_ptr = nullptr) const + { + if (out_opt_binding_index_ptr != nullptr) + { + *out_opt_binding_index_ptr = m_n_variable_descriptor_count_binding; + } + + if (out_opt_binding_size_ptr != nullptr) + { + *out_opt_binding_size_ptr = m_variable_descriptor_count_binding_size; + } + + return (m_n_variable_descriptor_count_binding != UINT32_MAX); + } + + static DescriptorSetCreateInfoUniquePtr create(); + + /* Fills & returns a VkDescriptorSetLayoutCreateInfo structure holding all information necessary to spawn + * a new descriptor set layout instance. + * + **/ + std::unique_ptr create_descriptor_set_layout_create_info(const Anvil::BaseDevice* in_device_ptr) const; + + /** Retrieves properties of a binding at a given index number. + * + * @param in_n_binding Index number of the binding to retrieve properties of. + * @param out_opt_binding_index_ptr May be nullptr. If not, deref will be set to the index of the + * binding. This does NOT need to be equal to @param in_n_binding. + * @param out_opt_descriptor_type_ptr May be nullptr. If not, deref will be set to the descriptor type + * for the specified binding. + * @param out_opt_descriptor_array_size_ptr May be nullptr. If not, deref will be set to size of the descriptor + * array, associated with the specified binding. + * @param out_opt_stage_flags_ptr May be nullptr. If not, deref will be set to stage flags, + * as configured for the specified binding. + * @param out_opt_immutable_samplers_enabled_ptr May be nullptr. If not, deref will be set to true if immutable samplers + * have been defined for the specified binding; otherwise, it will be + * set to false. + * @param out_opt_flags_ptr May be nullptr. If not, deref will be set to the flags specified at binding + * addition time. + * + * @return true if successful, false otherwise. + **/ + bool get_binding_properties_by_binding_index(uint32_t in_binding_index, + Anvil::DescriptorType* out_opt_descriptor_type_ptr = nullptr, + uint32_t* out_opt_descriptor_array_size_ptr = nullptr, + Anvil::ShaderStageFlags* out_opt_stage_flags_ptr = nullptr, + bool* out_opt_immutable_samplers_enabled_ptr = nullptr, + Anvil::DescriptorBindingFlags* out_opt_flags_ptr = nullptr) const; + + /** Retrieves properties of a binding at a given index number. + * + * @param in_n_binding Index number of the binding to retrieve properties of. + * @param out_opt_binding_index_ptr May be nullptr. If not, deref will be set to the index of the + * binding. This does NOT need to be equal to @param in_n_binding. + * @param out_opt_descriptor_type_ptr May be nullptr. If not, deref will be set to the descriptor type + * for the specified binding. + * @param out_opt_descriptor_array_size_ptr May be nullptr. If not, deref will be set to size of the descriptor + * array, associated with the specified binding. + * @param out_opt_stage_flags_ptr May be nullptr. If not, deref will be set to stage flags, + * as configured for the specified binding. + * @param out_opt_immutable_samplers_enabled_ptr May be nullptr. If not, deref will be set to true if immutable samplers + * have been defined for the specified binding; otherwise, it will be + * set to false. + * @param out_opt_flags_ptr May be nullptr. If not, deref will be set to the flags specified at binding + * addition time. + * + * @return true if successful, false otherwise. + **/ + bool get_binding_properties_by_index_number(uint32_t in_n_binding, + uint32_t* out_opt_binding_index_ptr = nullptr, + Anvil::DescriptorType* out_opt_descriptor_type_ptr = nullptr, + uint32_t* out_opt_descriptor_array_size_ptr = nullptr, + Anvil::ShaderStageFlags* out_opt_stage_flags_ptr = nullptr, + bool* out_opt_immutable_samplers_enabled_ptr = nullptr, + Anvil::DescriptorBindingFlags* out_opt_flags_ptr = nullptr) const; + + /** Returns the number of bindings defined for the layout. */ + uint32_t get_n_bindings() const + { + return static_cast(m_bindings.size() ); + } + + /* Sets the number of descriptors to be used for a variable descriptor count binding. + * + * A variable descriptor count binding must have been added to this DS info instance before this function + * can be called. + */ + bool set_binding_variable_descriptor_count(const uint32_t& in_count); + + bool operator==(const Anvil::DescriptorSetCreateInfo& in_ds) const; + + private: + /* Private type definitions */ + + /** Describes a single descriptor set layout binding */ + typedef struct Binding + { + uint32_t descriptor_array_size; + Anvil::DescriptorType descriptor_type; + Anvil::DescriptorBindingFlags flags; + std::vector immutable_samplers; + + Anvil::ShaderStageFlags stage_flags; + + /** Dummy constructor. Do not use. */ + Binding() + { + descriptor_array_size = 0; + descriptor_type = Anvil::DescriptorType::UNKNOWN; + } + + /** Constructor. + * + * For argument discussion, please see Anvil::DescriptorSetLayout::add_binding() documentation. + **/ + Binding(uint32_t in_descriptor_array_size, + Anvil::DescriptorType in_descriptor_type, + Anvil::ShaderStageFlags in_stage_flags, + const Anvil::Sampler* const* in_immutable_sampler_ptrs, + Anvil::DescriptorBindingFlags in_flags) + { + descriptor_array_size = in_descriptor_array_size; + descriptor_type = in_descriptor_type; + flags = in_flags; + stage_flags = in_stage_flags; + + if (in_immutable_sampler_ptrs != nullptr) + { + for (uint32_t n_sampler = 0; + n_sampler < descriptor_array_size; + ++n_sampler) + { + immutable_samplers.push_back(in_immutable_sampler_ptrs[n_sampler]); + } + } + } + + bool operator==(const Binding& in_binding) const + { + return (descriptor_array_size == in_binding.descriptor_array_size) && + (descriptor_type == in_binding.descriptor_type) && + (flags == in_binding.flags) && + (immutable_samplers == in_binding.immutable_samplers) && + (stage_flags == in_binding.stage_flags); + } + } Binding; + + typedef std::map BindingIndexToBindingMap; + + /* Private functions */ + + /* Please see create() documentation for more details */ + DescriptorSetCreateInfo(); + + /* Private variables */ + BindingIndexToBindingMap m_bindings; + + uint32_t m_n_variable_descriptor_count_binding; + uint32_t m_variable_descriptor_count_binding_size; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(DescriptorSetCreateInfo); + }; +}; /* namespace Anvil */ + +#endif /* MISC_DESCRIPTOR_SET_CREATE_INFO */ \ No newline at end of file diff --git a/include/misc/device_create_info.h b/include/misc/device_create_info.h new file mode 100644 index 00000000..57c14502 --- /dev/null +++ b/include/misc/device_create_info.h @@ -0,0 +1,291 @@ +// +// Copyright (c) 2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_DEVICE_CREATE_INFO_H +#define MISC_DEVICE_CREATE_INFO_H + +#include "misc/types.h" +#include "misc/extensions.h" +#include + +namespace Anvil +{ + class DeviceCreateInfo + { + public: + /* Public functions */ + + /* TODO. + * + * Anvil creates one command pool per each queue family which apps can use at any time which is why the CommandPoolCreateFlags argument + * is present. + * + * NOTE: By default, an empty pipeline cache will be created for pipeline manager usage. You can adjust this behavior, allowing for + * pipeline cache reuse across executions, by calling set_pipeline_cache_ptr() and retrieving pipeline cache data and caching it + * at later time. + * + * NOTE: If VK_EXT_global_queue_priority is supported, all queues are associated MEDIUM_EXT global priority by default. + * This can be changed on a per-queue basis by calling set_queue_global_priority() prior to passing the structure + * to Anvil::{M, S}GPUDevice for device instantiation. + * + * NOTE: By default, all queues are associated with a priority of 0.0 and no create flags. This can be adjusted by calling + * corresponding set_queue_..() functions. + * + * @param in_physical_device_ptr Physical device to create this device from. Must not be nullptr. + * @param in_extension_configuration Tells which extensions must/should be specified at creation time. + * @param in_layers_to_enable A vector of layer names to be used when creating the device. + * Can be empty. + * @param in_transient_command_buffer_allocs_only True if the command pools, which are going to be initialized after + * the device is created, should be set up for transient command buffer + * support. + * @param in_support_resettable_command_buffer_allocs True if the command pools should be configured for resettable command + * buffer support. + * @param in_enable_shader_module_cache True if all spawned shader modules should be cached throughout instance lifetime. + * False if they should be released as soon as all shared pointers go out of scope. + * @param in_mt_safe True if command buffer creation and queue submissions should be automatically serialized. + * Set to false if your app is never going to use more than one thread at a time for + * command buffer creation or submission. + * + */ + static Anvil::DeviceCreateInfoUniquePtr create_mgpu(const std::vector& in_physical_device_ptrs, + const bool& in_enable_shader_module_cache, + const DeviceExtensionConfiguration& in_extension_configuration, + const std::vector& in_layers_to_enable, + const Anvil::CommandPoolCreateFlags& in_helper_command_pool_create_flags, + const bool& in_mt_safe); + static Anvil::DeviceCreateInfoUniquePtr create_sgpu(const Anvil::PhysicalDevice* in_physical_device_ptr, + const bool& in_enable_shader_module_cache, + const DeviceExtensionConfiguration& in_extension_configuration, + const std::vector& in_layers_to_enable, + const Anvil::CommandPoolCreateFlags& in_helper_command_pool_create_flags, + const bool& in_mt_safe); + + const DeviceExtensionConfiguration& get_extension_configuration() const + { + return m_extension_configuration; + } + + const Anvil::CommandPoolCreateFlags& get_helper_command_pool_create_flags() const + { + return m_helper_command_pool_create_flags; + } + + const std::vector& get_layers_to_enable() const + { + return m_layers_to_enable; + } + + const Anvil::MemoryOverallocationBehavior& get_memory_overallocation_behavior() const + { + return m_memory_overallocation_behavior; + } + + const std::vector& get_physical_device_ptrs() const + { + return m_physical_device_ptrs; + } + + Anvil::PipelineCache* get_pipeline_cache_ptr() const + { + return m_pipeline_cache_ptr.get(); + } + + Anvil::QueueGlobalPriority get_queue_global_priority(const uint32_t& in_queue_family_index, + const uint32_t& in_queue_index) const + { + auto queue_fam_iterator = m_queue_properties.find(in_queue_family_index); + auto result = Anvil::QueueGlobalPriority::MEDIUM_EXT; + + if (queue_fam_iterator != m_queue_properties.end() ) + { + auto queue_iterator = queue_fam_iterator->second.find(in_queue_index); + + if (queue_iterator != queue_fam_iterator->second.end() ) + { + result = queue_iterator->second.global_priority; + } + } + + return result; + } + + bool get_queue_must_support_protected_memory_operations(const uint32_t& in_queue_family_index, + const uint32_t& in_queue_index) const + { + auto queue_fam_iterator = m_queue_properties.find(in_queue_family_index); + auto result = false; + + if (queue_fam_iterator != m_queue_properties.end() ) + { + auto queue_iterator = queue_fam_iterator->second.find(in_queue_index); + + if (queue_iterator != queue_fam_iterator->second.end() ) + { + result = queue_iterator->second.is_protected_capable; + } + } + + return result; + } + + float get_queue_priority(const uint32_t& in_queue_family_index, + const uint32_t& in_queue_index) const + { + auto queue_fam_iterator = m_queue_properties.find(in_queue_family_index); + auto result = 0.0f; + + if (queue_fam_iterator != m_queue_properties.end() ) + { + auto queue_iterator = queue_fam_iterator->second.find(in_queue_index); + + if (queue_iterator != queue_fam_iterator->second.end() ) + { + result = queue_iterator->second.priority; + } + } + + return result; + } + + /* Sets memory overallocation behavior to request at device creation time. + * + * NOTE: Requires VK_AMD_memory_overallocation_behavior. + * + * @param in_behavior Required behavior to specify at device creation time. + **/ + void set_memory_overallocation_behavior(const Anvil::MemoryOverallocationBehavior& in_behavior) + { + m_memory_overallocation_behavior = in_behavior; + } + + /* Associates global priority information with a given pair. + * + * NOTE: Requires VK_EXT_global_queue_priority. + * + * @param in_queue_family_index Index of the queue family to use for the association. + * @param in_queue_index Index of the queue belonging to queue family with index @param in_queue_family_index + * to use for the association. + * @param in_queue_global_priority Global priority to use for the queue. + */ + void set_queue_global_priority(const uint32_t& in_queue_family_index, + const uint32_t& in_queue_index, + const Anvil::QueueGlobalPriority& in_queue_global_priority) + { + m_queue_properties[in_queue_family_index][in_queue_index].global_priority = in_queue_global_priority; + } + + /* Caches user-specified pipeline cache for usage with pipeline managers. */ + void set_pipeline_cache_ptr(Anvil::PipelineCacheUniquePtr in_pipeline_cache_ptr) + { + m_pipeline_cache_ptr = std::move(in_pipeline_cache_ptr); + } + + /* Associates priority with a given pair. + * + * By default, all queues will be associated a priority of 0.0. + * + * NOTE: Apps are required to respect discreteQueuePriorities property of the physical device the device + * will be created from! + * + * @param in_queue_family_index Index of the queue family to use for the association. + * @param in_queue_index Index of the queue belonging to queue family with index @param in_queue_family_index + * to use for the association. + * @param in_queue_priority Priority to use for the queue. + */ + void set_queue_priority(const uint32_t& in_queue_family_index, + const uint32_t& in_queue_index, + const float& in_queue_priority) + { + m_queue_properties[in_queue_family_index][in_queue_index].priority = in_queue_priority; + } + + /* Call to specify whether or not VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT should be specified at queue creation time. + * + * NOTE: Apps can only request the bit for queues which are reported to support protected memory operations. + * NOTE: This function is only supported for VK 1.1 devices or newer. + * + * @param in_queue_family_index Index of the queue family to use for the association. + * @param in_queue_index Index of the queue belonging to queue family with index @param in_queue_family_index + * to use for the association. + * @param in_should_enable True to enable the functionality; false to disable. + */ + void set_queue_must_support_protected_memory_operations(const uint32_t& in_queue_family_index, + const uint32_t& in_queue_index, + const bool& in_should_enable) + { + m_queue_properties[in_queue_family_index][in_queue_index].is_protected_capable = in_should_enable; + } + + const bool& should_be_mt_safe() const + { + return m_mt_safe; + } + + const bool& should_enable_shader_module_cache() const + { + return m_should_enable_shader_module_cache; + } + + const void *pNext = nullptr; + + private: + /* Private type definitions */ + typedef struct QueueProperties + { + Anvil::QueueGlobalPriority global_priority; + bool is_protected_capable; + float priority; + + QueueProperties() + :global_priority (Anvil::QueueGlobalPriority::MEDIUM_EXT), + is_protected_capable(false), + priority (0.0f) + { + /* Stub */ + } + } QueueProperties; + + /* Private functions */ + DeviceCreateInfo(const std::vector& in_physical_device_ptrs, + const bool& in_enable_shader_module_cache, + const DeviceExtensionConfiguration& in_extension_configuration, + const std::vector& in_layers_to_enable, + const Anvil::CommandPoolCreateFlags& in_helper_command_pool_create_flags, + const bool& in_mt_safe); + + /* Private variables */ + DeviceExtensionConfiguration m_extension_configuration; + Anvil::CommandPoolCreateFlags m_helper_command_pool_create_flags; + std::vector m_layers_to_enable; + Anvil::MemoryOverallocationBehavior m_memory_overallocation_behavior; + bool m_mt_safe; + std::vector m_physical_device_ptrs; + Anvil::PipelineCacheUniquePtr m_pipeline_cache_ptr; + std::unordered_map > m_queue_properties; + bool m_should_enable_shader_module_cache; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(DeviceCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(DeviceCreateInfo); + }; + +}; /* namespace Anvil */ + +#endif /* MISC_DEVICE_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/dummy_window.h b/include/misc/dummy_window.h index 763ef396..fb31be3a 100644 --- a/include/misc/dummy_window.h +++ b/include/misc/dummy_window.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -35,10 +35,10 @@ namespace Anvil { public: /* Public functions */ - static std::shared_ptr create(const std::string& in_title, - unsigned int in_width, - unsigned int in_height, - Anvil::PresentCallbackFunction in_present_callback_func); + static Anvil::WindowUniquePtr create(const std::string& in_title, + unsigned int in_width, + unsigned int in_height, + Anvil::PresentCallbackFunction in_present_callback_func); virtual ~DummyWindow() { @@ -80,10 +80,10 @@ namespace Anvil { public: /* Public methods */ - static std::shared_ptr create(const std::string& in_title, - unsigned int in_width, - unsigned int in_height, - PresentCallbackFunction in_present_callback_func); + static Anvil::WindowUniquePtr create(const std::string& in_title, + unsigned int in_width, + unsigned int in_height, + PresentCallbackFunction in_present_callback_func); /** Destructor */ virtual ~DummyWindowWithPNGSnapshots() @@ -105,7 +105,7 @@ namespace Anvil * * @param in_swapchain_ptr Swapchain to assign. Must not be NULL. */ - void set_swapchain(std::weak_ptr in_swapchain_ptr); + void set_swapchain(Anvil::Swapchain* in_swapchain_ptr); private: /* Private functions */ @@ -124,7 +124,7 @@ namespace Anvil * * @return As per summary. */ - std::shared_ptr get_swapchain_image_raw_r8g8b8a8_unorm_data(std::shared_ptr in_swapchain_image_ptr); + std::unique_ptr get_swapchain_image_raw_r8g8b8a8_unorm_data(Anvil::Image* in_swapchain_image_ptr); /** Grabs fake swapchain image contents and stores it in a PNG file. */ void store_swapchain_frame(); @@ -135,7 +135,7 @@ namespace Anvil std::string m_title; uint32_t m_width; - std::weak_ptr m_swapchain_ptr; + Anvil::Swapchain* m_swapchain_ptr; }; }; /* namespace Anvil */ diff --git a/include/misc/event_create_info.h b/include/misc/event_create_info.h new file mode 100644 index 00000000..9a132271 --- /dev/null +++ b/include/misc/event_create_info.h @@ -0,0 +1,78 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_EVENT_CREATE_INFO_H +#define MISC_EVENT_CREATE_INFO_H + +#include "misc/types.h" + +namespace Anvil +{ + class EventCreateInfo + { + public: + /* Public functions */ + + /* TODO. + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + */ + static Anvil::EventCreateInfoUniquePtr create(const Anvil::BaseDevice* in_device_ptr); + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + const MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + + void set_device(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + void set_mt_safety(const MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + private: + /* Private functions */ + EventCreateInfo(const Anvil::BaseDevice* in_device_ptr, + MTSafety in_mt_safety); + + /* Private variables */ + const Anvil::BaseDevice* m_device_ptr; + Anvil::MTSafety m_mt_safety; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(EventCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(EventCreateInfo); + }; + +}; /* namespace Anvil */ + +#endif /* MISC_EVENT_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/extensions.h b/include/misc/extensions.h new file mode 100644 index 00000000..b28b443b --- /dev/null +++ b/include/misc/extensions.h @@ -0,0 +1,1322 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_EXTENSIONS_H +#define MISC_EXTENSIONS_H + +#include "misc/debug.h" +#include "misc/types.h" + +namespace Anvil +{ + namespace Internal + { + template + struct DeviceExtensions + { + ValueType amd_buffer_marker; + ValueType amd_draw_indirect_count; + ValueType amd_gcn_shader; + ValueType amd_gpu_shader_half_float; + ValueType amd_gpu_shader_int16; + ValueType amd_memory_overallocation_behavior; + ValueType amd_mixed_attachment_samples; + ValueType amd_negative_viewport_height; + ValueType amd_rasterization_order; + ValueType amd_shader_ballot; + ValueType amd_shader_core_properties; + ValueType amd_shader_explicit_vertex_parameter; + ValueType amd_shader_fragment_mask; + ValueType amd_shader_image_load_store_lod; + ValueType amd_shader_info; + ValueType amd_shader_trinary_minmax; + ValueType amd_texture_gather_bias_lod; + + ValueType ext_conservative_rasterization; + ValueType ext_debug_marker; + ValueType ext_depth_clip_enable; + ValueType ext_depth_range_unrestricted; + ValueType ext_descriptor_indexing; + ValueType ext_pci_bus_info; + ValueType ext_external_memory_host; + ValueType ext_global_priority; + ValueType ext_hdr_metadata; + ValueType ext_inline_uniform_block; + ValueType ext_memory_budget; + ValueType ext_memory_priority; + ValueType ext_queue_family_foreign; + ValueType ext_sample_locations; + ValueType ext_sampler_filter_minmax; + ValueType ext_scalar_block_layout; + ValueType ext_separate_stencil_usage; + ValueType ext_shader_stencil_export; + ValueType ext_shader_subgroup_ballot; + ValueType ext_shader_subgroup_vote; + ValueType ext_shader_viewport_index_layer; + ValueType ext_swapchain_colorspace; + ValueType ext_transform_feedback; + ValueType ext_vertex_attribute_divisor; + ValueType google_decorate_string; + ValueType google_hlsl_functionality1; + ValueType khr_16bit_storage; + ValueType khr_8bit_storage; + ValueType khr_bind_memory2; + ValueType khr_create_renderpass2; + ValueType khr_dedicated_allocation; + ValueType khr_depth_stencil_resolve; + ValueType khr_descriptor_update_template; + ValueType khr_device_group; + ValueType khr_draw_indirect_count; + ValueType khr_driver_properties; + ValueType khr_external_fence; + ValueType khr_external_memory; + ValueType khr_external_semaphore; + + #if defined(_WIN32) + ValueType khr_external_fence_win32; + ValueType khr_external_memory_win32; + ValueType khr_external_semaphore_win32; + #else + ValueType khr_external_fence_fd; + ValueType khr_external_memory_fd; + ValueType khr_external_semaphore_fd; + #endif + + ValueType khr_get_memory_requirements2; + ValueType khr_image_format_list; + ValueType khr_maintenance1; + ValueType khr_maintenance2; + ValueType khr_maintenance3; + ValueType khr_multiview; + ValueType khr_relaxed_block_layout; + ValueType khr_sampler_mirror_clamp_to_edge; + ValueType khr_sampler_ycbcr_conversion; + ValueType khr_shader_atomic_int64; + ValueType khr_shader_draw_parameters; + ValueType khr_shader_float16_int8; + ValueType khr_shader_float_controls; + ValueType khr_storage_buffer_storage_class; + ValueType khr_swapchain; + ValueType khr_swapchain_mutable_format; + ValueType khr_variable_pointers; + ValueType khr_vulkan_memory_model; + + ValueType nv_external_memory_capabilities; + ValueType khr_physical_device_properties_2; + + #if defined(_WIN32) + ValueType khr_win32_keyed_mutex; + #endif + + std::map values_by_extension_names; + + + DeviceExtensions(const std::map& in_values_by_extension_names, + const ValueType& in_unspecified_extension_name_value) + { + typedef struct ExtensionData + { + const char* name; + ValueType* out_result_ptr; + + ExtensionData(const char* in_name, + ValueType* in_out_result_ptr) + :name (in_name), + out_result_ptr(in_out_result_ptr) + { + /* Stub */ + } + } ExtensionData; + + std::vector recognized_extensions = + { + {ExtensionData(VK_AMD_BUFFER_MARKER_EXTENSION_NAME, &amd_buffer_marker)}, + {ExtensionData(VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME, &amd_draw_indirect_count)}, + {ExtensionData(VK_AMD_GCN_SHADER_EXTENSION_NAME, &amd_gcn_shader)}, + {ExtensionData(VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME, &amd_gpu_shader_half_float)}, + {ExtensionData(VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME, &amd_gpu_shader_int16)}, + {ExtensionData(VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME, &amd_memory_overallocation_behavior)}, + {ExtensionData(VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME, &amd_mixed_attachment_samples)}, + {ExtensionData(VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME, &amd_negative_viewport_height)}, + {ExtensionData(VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME, &amd_rasterization_order)}, + {ExtensionData(VK_AMD_SHADER_BALLOT_EXTENSION_NAME, &amd_shader_ballot)}, + {ExtensionData(VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME, &amd_shader_core_properties)}, + {ExtensionData(VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME, &amd_shader_explicit_vertex_parameter)}, + {ExtensionData(VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME, &amd_shader_fragment_mask)}, + {ExtensionData(VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME, &amd_shader_image_load_store_lod)}, + {ExtensionData(VK_AMD_SHADER_INFO_EXTENSION_NAME, &amd_shader_info)}, + {ExtensionData(VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME, &amd_shader_trinary_minmax)}, + {ExtensionData(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME, &amd_texture_gather_bias_lod)}, + {ExtensionData(VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME, &ext_conservative_rasterization)}, + {ExtensionData(VK_EXT_DEBUG_MARKER_EXTENSION_NAME, &ext_debug_marker)}, + {ExtensionData(VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME, &ext_depth_clip_enable)}, + {ExtensionData(VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, &ext_depth_range_unrestricted)}, + {ExtensionData(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME, &ext_descriptor_indexing)}, + {ExtensionData(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, &ext_external_memory_host)}, + {ExtensionData(VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME, &ext_global_priority)}, + {ExtensionData(VK_EXT_HDR_METADATA_EXTENSION_NAME, &ext_hdr_metadata)}, + {ExtensionData(VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME, &ext_inline_uniform_block)}, + {ExtensionData(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME, &ext_memory_budget)}, + {ExtensionData(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME, &ext_memory_priority)}, + {ExtensionData(VK_EXT_PCI_BUS_INFO_EXTENSION_NAME, &ext_pci_bus_info)}, + {ExtensionData(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, &ext_queue_family_foreign)}, + {ExtensionData(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME, &ext_sample_locations)}, + {ExtensionData(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME, &ext_sampler_filter_minmax)}, + {ExtensionData(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME, &ext_scalar_block_layout)}, + {ExtensionData(VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME, &ext_separate_stencil_usage)}, + {ExtensionData(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, &ext_shader_stencil_export)}, + {ExtensionData(VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, &ext_shader_subgroup_ballot)}, + {ExtensionData(VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, &ext_shader_subgroup_vote)}, + {ExtensionData(VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME, &ext_shader_viewport_index_layer)}, + {ExtensionData(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, &ext_swapchain_colorspace)}, + {ExtensionData(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, &ext_transform_feedback)}, + {ExtensionData(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, &ext_vertex_attribute_divisor)}, + {ExtensionData(VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME, &google_decorate_string)}, + {ExtensionData(VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME, &google_hlsl_functionality1)}, + {ExtensionData(VK_KHR_16BIT_STORAGE_EXTENSION_NAME, &khr_16bit_storage)}, + {ExtensionData(VK_KHR_8BIT_STORAGE_EXTENSION_NAME, &khr_8bit_storage)}, + {ExtensionData(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, &khr_bind_memory2)}, + {ExtensionData(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, &khr_create_renderpass2)}, + {ExtensionData(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, &khr_dedicated_allocation)}, + {ExtensionData(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, &khr_depth_stencil_resolve)}, + {ExtensionData(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, &khr_descriptor_update_template)}, + {ExtensionData(VK_KHR_DEVICE_GROUP_EXTENSION_NAME, &khr_device_group)}, + {ExtensionData(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME, &khr_draw_indirect_count)}, + {ExtensionData(VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, &khr_driver_properties)}, + {ExtensionData(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME, &khr_external_fence)}, + {ExtensionData(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, &khr_external_memory)}, + {ExtensionData(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, &khr_external_semaphore)}, + #if defined(_WIN32) + {ExtensionData(VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME, &khr_external_fence_win32)}, + {ExtensionData(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME, &khr_external_memory_win32)}, + {ExtensionData(VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME, &khr_external_semaphore_win32)}, + #else + {ExtensionData(VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME, &khr_external_fence_fd)}, + {ExtensionData(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, &khr_external_memory_fd)}, + {ExtensionData(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, &khr_external_semaphore_fd)}, + #endif + {ExtensionData(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, &khr_get_memory_requirements2)}, + {ExtensionData(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, &khr_image_format_list)}, + {ExtensionData(VK_KHR_MAINTENANCE1_EXTENSION_NAME, &khr_maintenance1)}, + {ExtensionData(VK_KHR_MAINTENANCE2_EXTENSION_NAME, &khr_maintenance2)}, + {ExtensionData(VK_KHR_MAINTENANCE3_EXTENSION_NAME, &khr_maintenance3)}, + {ExtensionData(VK_KHR_MULTIVIEW_EXTENSION_NAME, &khr_multiview)}, + {ExtensionData(VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME, &khr_relaxed_block_layout)}, + {ExtensionData(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME, &khr_sampler_mirror_clamp_to_edge)}, + {ExtensionData(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, &khr_sampler_ycbcr_conversion)}, + {ExtensionData(VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, &khr_shader_atomic_int64)}, + {ExtensionData(VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, &khr_shader_draw_parameters)}, + {ExtensionData(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, &khr_shader_float16_int8)}, + {ExtensionData(VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME, &khr_shader_float_controls)}, + {ExtensionData(VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME, &khr_storage_buffer_storage_class)}, + {ExtensionData(VK_KHR_SWAPCHAIN_EXTENSION_NAME, &khr_swapchain)}, + {ExtensionData(VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME, &khr_swapchain_mutable_format)}, + {ExtensionData(VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME, &khr_variable_pointers)}, + {ExtensionData(VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME, &khr_vulkan_memory_model)}, + + {ExtensionData(VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, &nv_external_memory_capabilities)}, + {ExtensionData(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, &khr_physical_device_properties_2)}, + + #if defined(_WIN32) + {ExtensionData(VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME, &khr_win32_keyed_mutex)}, + #endif + }; + + values_by_extension_names = in_values_by_extension_names; + + for (const auto& current_extension_data : recognized_extensions) + { + auto extension_iterator = in_values_by_extension_names.find(current_extension_data.name); + + if (extension_iterator != in_values_by_extension_names.end() ) + { + *current_extension_data.out_result_ptr = extension_iterator->second; + } + else + { + *current_extension_data.out_result_ptr = in_unspecified_extension_name_value; + values_by_extension_names[current_extension_data.name] = in_unspecified_extension_name_value; + } + } + } + + }; + + template + struct InstanceExtensions + { + ValueType ext_debug_report; + ValueType ext_debug_utils; + ValueType khr_device_group_creation; + ValueType khr_external_fence_capabilities; + ValueType khr_external_memory_capabilities; + ValueType khr_external_semaphore_capabilities; + ValueType khr_get_physical_device_properties2; + ValueType khr_surface; + + #ifdef _WIN32 + #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) + ValueType khr_win32_surface; + #endif + #else + #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) + ValueType khr_xcb_surface; + #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + ValueType khr_wayland_surface; + #endif + #endif + #if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + ValueType ext_headless_surface; + #endif + + std::map values_by_extension_names; + + + InstanceExtensions(const std::map& in_values_by_extension_names, + const ValueType& in_unspecified_extension_name_value) + { + typedef struct ExtensionData + { + const char* name; + ValueType* out_result_ptr; + + ExtensionData(const char* in_name, + ValueType* in_out_result_ptr) + :name (in_name), + out_result_ptr(in_out_result_ptr) + { + /* Stub */ + } + } ExtensionData; + + std::vector recognized_extensions = + { + {ExtensionData(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, &ext_debug_report)}, + {ExtensionData(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, &ext_debug_utils)}, + {ExtensionData(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME, &khr_device_group_creation)}, + {ExtensionData(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, &khr_external_fence_capabilities)}, + {ExtensionData(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, &khr_external_memory_capabilities)}, + {ExtensionData(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, &khr_external_semaphore_capabilities)}, + {ExtensionData(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, &khr_get_physical_device_properties2)}, + {ExtensionData(VK_KHR_SURFACE_EXTENSION_NAME, &khr_surface)}, + + #ifdef _WIN32 + #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) + {ExtensionData(VK_KHR_WIN32_SURFACE_EXTENSION_NAME, &khr_win32_surface)}, + #endif + #else + #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) + {ExtensionData(VK_KHR_XCB_SURFACE_EXTENSION_NAME, &khr_xcb_surface)}, + #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + {ExtensionData(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, &khr_wayland_surface)}, + #endif + #endif + #if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + {ExtensionData(VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, &ext_headless_surface)}, + #endif + }; + + values_by_extension_names = in_values_by_extension_names; + + for (const auto& current_extension_data : recognized_extensions) + { + auto extension_iterator = in_values_by_extension_names.find(current_extension_data.name); + + if (extension_iterator != in_values_by_extension_names.end() ) + { + *current_extension_data.out_result_ptr = extension_iterator->second; + } + else + { + *current_extension_data.out_result_ptr = in_unspecified_extension_name_value; + values_by_extension_names[current_extension_data.name] = in_unspecified_extension_name_value; + } + } + } + }; + }; + + template + class IExtensionInfoDevice + { + public: + virtual ~IExtensionInfoDevice() + { + /* Stub */ + } + + virtual ValueType amd_buffer_marker () const = 0; + virtual ValueType amd_draw_indirect_count () const = 0; + virtual ValueType amd_gcn_shader () const = 0; + virtual ValueType amd_gpu_shader_half_float () const = 0; + virtual ValueType amd_gpu_shader_int16 () const = 0; + virtual ValueType amd_memory_overallocation_behavior () const = 0; + virtual ValueType amd_mixed_attachment_samples () const = 0; + virtual ValueType amd_negative_viewport_height () const = 0; + virtual ValueType amd_rasterization_order () const = 0; + virtual ValueType amd_shader_ballot () const = 0; + virtual ValueType amd_shader_core_properties () const = 0; + virtual ValueType amd_shader_explicit_vertex_parameter() const = 0; + virtual ValueType amd_shader_fragment_mask () const = 0; + virtual ValueType amd_shader_image_load_store_lod () const = 0; + virtual ValueType amd_shader_info () const = 0; + virtual ValueType amd_shader_trinary_minmax () const = 0; + virtual ValueType amd_texture_gather_bias_lod () const = 0; + virtual ValueType ext_conservative_rasterization () const = 0; + virtual ValueType ext_debug_marker () const = 0; + virtual ValueType ext_depth_clip_enable () const = 0; + virtual ValueType ext_depth_range_unrestricted () const = 0; + virtual ValueType ext_descriptor_indexing () const = 0; + virtual ValueType ext_external_memory_host () const = 0; + virtual ValueType ext_global_priority () const = 0; + virtual ValueType ext_hdr_metadata () const = 0; + virtual ValueType ext_inline_uniform_block () const = 0; + virtual ValueType ext_memory_budget () const = 0; + virtual ValueType ext_memory_priority () const = 0; + virtual ValueType ext_pci_bus_info () const = 0; + virtual ValueType ext_queue_family_foreign () const = 0; + virtual ValueType ext_sample_locations () const = 0; + virtual ValueType ext_sampler_filter_minmax () const = 0; + virtual ValueType ext_scalar_block_layout () const = 0; + virtual ValueType ext_separate_stencil_usage () const = 0; + virtual ValueType ext_shader_stencil_export () const = 0; + virtual ValueType ext_shader_subgroup_ballot () const = 0; + virtual ValueType ext_shader_subgroup_vote () const = 0; + virtual ValueType ext_shader_viewport_index_layer () const = 0; + virtual ValueType ext_swapchain_colorspace () const = 0; + virtual ValueType ext_transform_feedback () const = 0; + virtual ValueType ext_vertex_attribute_divisor () const = 0; + virtual ValueType google_decorate_string () const = 0; + virtual ValueType google_hlsl_functionality1 () const = 0; + virtual ValueType khr_16bit_storage () const = 0; + virtual ValueType khr_8bit_storage () const = 0; + virtual ValueType khr_bind_memory2 () const = 0; + virtual ValueType khr_create_renderpass2 () const = 0; + virtual ValueType khr_dedicated_allocation () const = 0; + virtual ValueType khr_depth_stencil_resolve () const = 0; + virtual ValueType khr_descriptor_update_template () const = 0; + virtual ValueType khr_device_group () const = 0; + virtual ValueType khr_draw_indirect_count () const = 0; + virtual ValueType khr_driver_properties () const = 0; + virtual ValueType khr_external_fence () const = 0; + virtual ValueType khr_external_memory () const = 0; + virtual ValueType khr_external_semaphore () const = 0; + #if defined(_WIN32) + virtual ValueType khr_external_fence_win32 () const = 0; + virtual ValueType khr_external_memory_win32 () const = 0; + virtual ValueType khr_external_semaphore_win32() const = 0; + #else + virtual ValueType khr_external_fence_fd () const = 0; + virtual ValueType khr_external_memory_fd () const = 0; + virtual ValueType khr_external_semaphore_fd() const = 0; + #endif + virtual ValueType khr_get_memory_requirements2 () const = 0; + virtual ValueType khr_image_format_list () const = 0; + virtual ValueType khr_maintenance1 () const = 0; + virtual ValueType khr_maintenance2 () const = 0; + virtual ValueType khr_maintenance3 () const = 0; + virtual ValueType khr_multiview () const = 0; + virtual ValueType khr_relaxed_block_layout () const = 0; + virtual ValueType khr_sampler_mirror_clamp_to_edge () const = 0; + virtual ValueType khr_sampler_ycbcr_conversion () const = 0; + virtual ValueType khr_shader_atomic_int64 () const = 0; + virtual ValueType khr_shader_draw_parameters () const = 0; + virtual ValueType khr_shader_float16_int8 () const = 0; + virtual ValueType khr_shader_float_controls () const = 0; + virtual ValueType khr_storage_buffer_storage_class () const = 0; + virtual ValueType khr_swapchain () const = 0; + virtual ValueType khr_swapchain_mutable_format () const = 0; + virtual ValueType khr_variable_pointers () const = 0; + virtual ValueType khr_vulkan_memory_model () const = 0; + virtual ValueType nv_external_memory_capabilities () const = 0; + virtual ValueType khr_external_memory_capabilities () const = 0; + virtual ValueType khr_physical_device_properties_2 () const = 0; + + #if defined(_WIN32) + virtual ValueType khr_win32_keyed_mutex() const = 0; + #endif + + virtual ValueType by_name(const std::string& in_name) const = 0; + }; + + template + class IExtensionInfoInstance + { + public: + virtual ~IExtensionInfoInstance() + { + /* Stub */ + } + + virtual bool ext_debug_report () const = 0; + virtual bool ext_debug_utils () const = 0; + virtual bool khr_device_group_creation () const = 0; + virtual bool khr_external_fence_capabilities () const = 0; + virtual bool khr_external_memory_capabilities () const = 0; + virtual bool khr_external_semaphore_capabilities() const = 0; + virtual bool khr_get_physical_device_properties2() const = 0; + virtual bool khr_surface () const = 0; + + #ifdef _WIN32 + #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) + virtual bool khr_win32_surface() const = 0; + #endif + #else + #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) + virtual bool khr_xcb_surface() const = 0; + #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + virtual bool khr_wayland_surface() const = 0; + #endif + #endif + #if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + virtual bool ext_headless_surface() const = 0; + #endif + + virtual bool by_name(const std::string& in_name) const = 0; + }; + + template + class ExtensionInfo : private IExtensionInfoDevice, + private IExtensionInfoInstance + { + public: + /* Public functions */ + static std::unique_ptr > create_device_extension_info(const std::map& in_values_by_extension_names, + const ValueType& in_unspecified_extension_name_value) + { + std::unique_ptr > result_ptr; + + result_ptr.reset( + new ExtensionInfo(true, /* in_device_extensions */ + in_values_by_extension_names, + in_unspecified_extension_name_value) + ); + + return result_ptr; + } + + static std::unique_ptr create_instance_extension_info(const std::map& in_values_by_extension_names, + const ValueType& in_unspecified_extension_name_value) + { + std::unique_ptr > result_ptr; + + result_ptr.reset( + new ExtensionInfo(false, /* in_device_extensions */ + in_values_by_extension_names, + in_unspecified_extension_name_value) + ); + + return result_ptr; + } + + const IExtensionInfoDevice* get_device_extension_info() const + { + anvil_assert(m_expose_device_extensions); + + return this; + } + + const IExtensionInfoInstance* get_instance_extension_info() const + { + anvil_assert(!m_expose_device_extensions); + + return this; + } + + private: + /* Private functions */ + + ExtensionInfo(const bool& in_device_extensions, + const std::map& in_values_by_extension_names, + const ValueType& in_unspecified_extension_name_value) + :m_expose_device_extensions(in_device_extensions) + { + if (in_device_extensions) + { + m_device_extensions_ptr.reset( + new Internal::DeviceExtensions(in_values_by_extension_names, + in_unspecified_extension_name_value) + ); + } + else + { + m_instance_extensions_ptr.reset( + new Internal::InstanceExtensions(in_values_by_extension_names, + in_unspecified_extension_name_value) + ); + } + /* Stub */ + } + + ValueType by_name(const std::string& in_name) const final + { + if (m_expose_device_extensions) + { + return m_device_extensions_ptr->values_by_extension_names.at(in_name); + } + else + { + return m_instance_extensions_ptr->values_by_extension_names.at(in_name); + } + } + + /* IExtensionInfoDevice */ + ValueType amd_buffer_marker() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_buffer_marker; + } + + ValueType amd_draw_indirect_count() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_draw_indirect_count; + } + + ValueType amd_gcn_shader() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_gcn_shader; + } + + ValueType amd_gpu_shader_half_float() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_gpu_shader_half_float; + } + + ValueType amd_gpu_shader_int16() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_gpu_shader_int16; + } + + ValueType amd_memory_overallocation_behavior() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_memory_overallocation_behavior; + } + + ValueType amd_mixed_attachment_samples() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_mixed_attachment_samples; + } + + ValueType amd_negative_viewport_height() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_negative_viewport_height; + } + + ValueType amd_rasterization_order() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_rasterization_order; + } + + ValueType amd_shader_ballot() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_shader_ballot; + } + + ValueType amd_shader_explicit_vertex_parameter() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_shader_explicit_vertex_parameter; + } + + ValueType amd_shader_fragment_mask() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_shader_fragment_mask; + } + + ValueType amd_shader_image_load_store_lod() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_shader_image_load_store_lod; + } + + ValueType amd_shader_core_properties() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_shader_core_properties; + } + + ValueType amd_shader_info() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_shader_info; + } + + ValueType amd_shader_trinary_minmax() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_shader_trinary_minmax; + } + + ValueType amd_texture_gather_bias_lod() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->amd_texture_gather_bias_lod; + } + + ValueType ext_conservative_rasterization() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_conservative_rasterization; + } + + ValueType ext_debug_marker() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_debug_marker; + } + + ValueType ext_depth_clip_enable() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_depth_clip_enable; + } + + ValueType ext_depth_range_unrestricted() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_depth_range_unrestricted; + } + + ValueType ext_descriptor_indexing() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_descriptor_indexing; + } + + ValueType ext_external_memory_host() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_external_memory_host; + } + + ValueType ext_global_priority() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_global_priority; + } + + ValueType ext_hdr_metadata() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_hdr_metadata; + } + + ValueType ext_inline_uniform_block() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_inline_uniform_block; + } + + ValueType ext_pci_bus_info() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_pci_bus_info; + } + + ValueType ext_queue_family_foreign() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_queue_family_foreign; + } + + ValueType ext_sampler_filter_minmax() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_sampler_filter_minmax; + } + + ValueType ext_sample_locations() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_sample_locations; + } + + ValueType ext_scalar_block_layout() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_scalar_block_layout; + } + + ValueType ext_separate_stencil_usage() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_separate_stencil_usage; + + } + + ValueType ext_shader_stencil_export() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_shader_stencil_export; + } + + ValueType ext_shader_subgroup_ballot() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_shader_subgroup_ballot; + } + + ValueType ext_shader_subgroup_vote() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_shader_subgroup_vote; + } + + ValueType ext_shader_viewport_index_layer() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_shader_viewport_index_layer; + } + + ValueType ext_vertex_attribute_divisor() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_vertex_attribute_divisor; + } + + ValueType ext_swapchain_colorspace() const + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_swapchain_colorspace; + } + + ValueType ext_transform_feedback() const + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_transform_feedback; + } + + ValueType ext_memory_budget() const + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_memory_budget; + } + + ValueType ext_memory_priority() const + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->ext_memory_priority; + } + + ValueType google_decorate_string() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->google_decorate_string; + } + + ValueType google_hlsl_functionality1() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->google_hlsl_functionality1; + } + + ValueType khr_16bit_storage() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_16bit_storage; + } + + ValueType khr_8bit_storage() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_8bit_storage; + } + + ValueType khr_bind_memory2() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_bind_memory2; + } + + ValueType khr_create_renderpass2() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_create_renderpass2; + } + + ValueType khr_dedicated_allocation() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_dedicated_allocation; + } + + ValueType khr_depth_stencil_resolve() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_depth_stencil_resolve; + } + + ValueType khr_descriptor_update_template() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_descriptor_update_template; + } + + ValueType khr_device_group() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_device_group; + } + + ValueType khr_draw_indirect_count() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_draw_indirect_count; + } + + ValueType khr_driver_properties() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_driver_properties; + } + + ValueType khr_external_fence() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_external_fence; + } + + #if defined(_WIN32) + ValueType khr_external_fence_win32() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_external_fence_win32; + } + #else + ValueType khr_external_fence_fd() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_external_fence_fd; + } + #endif + + ValueType khr_external_memory() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_external_memory; + } + + #if defined(_WIN32) + ValueType khr_external_memory_win32() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_external_memory_win32; + } + #else + ValueType khr_external_memory_fd() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_external_memory_fd; + } + #endif + + ValueType khr_external_semaphore() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_external_semaphore; + } + + #if defined(_WIN32) + ValueType khr_external_semaphore_win32() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_external_semaphore_win32; + } + #else + ValueType khr_external_semaphore_fd() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_external_semaphore_fd; + } + #endif + + ValueType khr_get_memory_requirements2() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_get_memory_requirements2; + } + + ValueType khr_image_format_list() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_image_format_list; + } + + ValueType khr_maintenance1() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_maintenance1; + } + + ValueType khr_maintenance2() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_maintenance2; + } + + ValueType khr_maintenance3() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_maintenance3; + } + + ValueType khr_multiview() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_multiview; + } + + ValueType khr_relaxed_block_layout() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_relaxed_block_layout; + } + + ValueType khr_sampler_mirror_clamp_to_edge() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_sampler_mirror_clamp_to_edge; + } + + ValueType khr_sampler_ycbcr_conversion() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_sampler_ycbcr_conversion; + } + + ValueType khr_storage_buffer_storage_class() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_storage_buffer_storage_class; + } + + ValueType khr_shader_atomic_int64() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_shader_atomic_int64; + } + + ValueType khr_shader_draw_parameters() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_shader_draw_parameters; + } + + ValueType khr_shader_float16_int8() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_shader_float16_int8; + } + + ValueType khr_shader_float_controls() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_shader_float_controls; + } + + ValueType khr_swapchain() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_swapchain; + } + + ValueType khr_swapchain_mutable_format() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_swapchain_mutable_format; + } + + ValueType khr_variable_pointers() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_variable_pointers; + } + + ValueType khr_vulkan_memory_model() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_vulkan_memory_model; + } + + ValueType nv_external_memory_capabilities() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->nv_external_memory_capabilities; + } + + ValueType khr_physical_device_properties_2() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_physical_device_properties_2; + } + + #if defined(_WIN32) + ValueType khr_win32_keyed_mutex() const final + { + anvil_assert(m_expose_device_extensions); + + return m_device_extensions_ptr->khr_win32_keyed_mutex; + } + #endif + + /* IExtensionInfoInstance */ + + ValueType ext_debug_report() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->ext_debug_report; + } + + ValueType ext_debug_utils() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->ext_debug_utils; + } + + ValueType khr_device_group_creation() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->khr_device_group_creation; + } + + ValueType khr_external_fence_capabilities() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->khr_external_fence_capabilities; + } + + ValueType khr_external_memory_capabilities() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->khr_external_memory_capabilities; + } + + ValueType khr_external_semaphore_capabilities() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->khr_external_semaphore_capabilities; + } + + ValueType khr_get_physical_device_properties2() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->khr_get_physical_device_properties2; + } + + ValueType khr_surface() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->khr_surface; + } + + #ifdef _WIN32 + #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) + ValueType khr_win32_surface() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->khr_win32_surface; + } + #endif + #else + #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) + ValueType khr_xcb_surface() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->khr_xcb_surface; + } + #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + ValueType khr_wayland_surface() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->khr_wayland_surface; + } + #endif + #endif + #if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + ValueType ext_headless_surface() const final + { + anvil_assert(!m_expose_device_extensions); + + return m_instance_extensions_ptr->ext_headless_surface; + } + #endif + + /* Private variables */ + + std::unique_ptr > m_device_extensions_ptr; + bool m_expose_device_extensions; + std::unique_ptr > m_instance_extensions_ptr; + }; + + /** A struct which tells which extensions must (or should, if supported by the physical device) be enabled + * at device creation time. + */ + typedef struct DeviceExtensionConfiguration + { + DeviceExtensionConfiguration() + { + /* NOTE: By default, Anvil enables nearly all extensions it has support implemented for, which are reported + * as available. + */ + { + Internal::DeviceExtensions reference(std::map(), + false); /* in_unspecified_extension_name_value */ + + for (const auto& current_extension_data : reference.values_by_extension_names) + { + extension_status[current_extension_data.first] = Anvil::ExtensionAvailability::ENABLE_IF_AVAILABLE; + } + } + + /* + * A few exceptions exist. + * + * 1. VK_AMD_negative_viewport_height interacts with KHR_maintenance1, apps will have to enable it manually. */ + extension_status[VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME] = Anvil::ExtensionAvailability::IGNORE; + + /* 2. VK_EXT_debug_marker is only useful for debugging. */ + #if !defined(_DEBUG) + { + extension_status[VK_EXT_DEBUG_MARKER_EXTENSION_NAME] = Anvil::ExtensionAvailability::IGNORE; + } + #endif + + /* 3. Disable VK_AMD_shader_info by default. */ + extension_status[VK_AMD_SHADER_INFO_EXTENSION_NAME] = Anvil::ExtensionAvailability::IGNORE; + } + + std::map extension_status; + + bool operator==(const DeviceExtensionConfiguration& in_config) const + { + return (extension_status == in_config.extension_status); + } + } DeviceExtensionConfiguration; +}; + +#endif /* MISC_EXTENSIONS_H */ \ No newline at end of file diff --git a/include/misc/external_handle.h b/include/misc/external_handle.h new file mode 100644 index 00000000..2651a9fa --- /dev/null +++ b/include/misc/external_handle.h @@ -0,0 +1,70 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_EXTERNAL_HANDLE_H +#define MISC_EXTERNAL_HANDLE_H + +#include "misc/types.h" + +namespace Anvil +{ + class ExternalHandle + { + public: + /* Public functions */ + static ExternalHandleUniquePtr create(const ExternalHandleType& in_handle, + const bool& in_close_at_destruction_time); + + ~ExternalHandle(); + + ExternalHandleType get_handle() const + { + return m_handle; + } + + /* If a payload of an object exported to a NT handle is imported to another object, the ownership is passed + * to the new object. + * + * For NT handles, it is assumed the handle should be destroyed when the wrapper goes out of scope. If the above + * import is performed, you MUST tell ExternalHandle to release the ownership of the handle, or else it will leak. + * + * Under Linux, ownership of the underlying FD is transferred to the app at export time, and back to the driver at import time. + * If the wrapper has been created with in_close_at_destruction_time set to true and an exported external handle IS imported, + * you need to call this function in order to avoid double release of the FD. + */ + void release_ownership() + { + m_close_at_destruction_time = false; + } + + private: + ExternalHandle(const ExternalHandleType& in_handle, + const bool& in_close_at_destruction_time); + + bool m_close_at_destruction_time; + const ExternalHandleType m_handle; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(ExternalHandle); + ANVIL_DISABLE_COPY_CONSTRUCTOR(ExternalHandle); + }; +} /* namespace Anvil */ + +#endif /* MISC_EXTERNAL_HANDLE_H */ diff --git a/include/misc/fence_create_info.h b/include/misc/fence_create_info.h new file mode 100644 index 00000000..59973d49 --- /dev/null +++ b/include/misc/fence_create_info.h @@ -0,0 +1,167 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_FENCE_CREATE_INFO_H +#define MISC_FENCE_CREATE_INFO_H + +#include "misc/types.h" + +namespace Anvil +{ + class FenceCreateInfo + { + public: + /* Public functions */ + + /* TODO. + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - Exportable external fence handle type: none + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + */ + static Anvil::FenceCreateInfoUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + bool in_create_signalled); + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + const Anvil::ExternalFenceHandleTypeFlags& get_exportable_external_fence_handle_types() const + { + return m_exportable_external_fence_handle_types; + } + + #if defined(_WIN32) + /* Returns true if set_exportable_nt_handle_info() has been called prior to this call. + * Otherwise returns false. + * + * If the func returns true, *out_result_ptr is set to the queried data. + */ + bool get_exportable_nt_handle_info(const ExternalNTHandleInfo** out_result_ptr_ptr) const + { + bool result = false; + + if (m_exportable_nt_handle_info_specified) + { + *out_result_ptr_ptr = &m_exportable_nt_handle_info; + result = true; + } + + return result; + } + #endif + + const MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + const bool& should_create_signalled() const + { + return m_create_signalled; + } + + #if defined(_WIN32) + /* Lets the app specify additional details for exportable NT handles. + * + * If @param in_name is zero-sized, member of the VkExportFenceWin32HandleInfoKHR struct, as chained to the VkFenceCreateInfo struct chain, + * will be set to nullptr. + * + * Requires VK_KHR_external_fence_win32 + */ + void set_exportable_nt_handle_info(const SECURITY_ATTRIBUTES* in_opt_attributes_ptr, + const DWORD& in_access, + const std::wstring& in_name) + { + anvil_assert(!m_exportable_nt_handle_info_specified); + + m_exportable_nt_handle_info.access = in_access; + m_exportable_nt_handle_info.name = in_name; + m_exportable_nt_handle_info_specified = true; + + if (in_opt_attributes_ptr != nullptr) + { + m_exportable_nt_handle_info_security_attributes = *in_opt_attributes_ptr; + m_exportable_nt_handle_info_security_attributes_specified = true; + + m_exportable_nt_handle_info.attributes_ptr = &m_exportable_nt_handle_info_security_attributes; + } + else + { + m_exportable_nt_handle_info.attributes_ptr = nullptr; + m_exportable_nt_handle_info_security_attributes_specified = false; + } + + } + #endif + + /* TODO + * + * Requires VK_KHR_external_fence. + */ + void set_exportable_external_fence_handle_types(const Anvil::ExternalFenceHandleTypeFlags& in_external_fence_handle_types) + { + m_exportable_external_fence_handle_types = in_external_fence_handle_types; + } + + void set_device(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + void set_mt_safety(const MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + void set_should_create_signalled(const bool& in_create_signalled) + { + m_create_signalled = in_create_signalled; + } + + private: + /* Private functions */ + FenceCreateInfo(const Anvil::BaseDevice* in_device_ptr, + bool in_create_signalled, + MTSafety in_mt_safety); + + /* Private variables */ + bool m_create_signalled; + const Anvil::BaseDevice * m_device_ptr; + Anvil::ExternalFenceHandleTypeFlags m_exportable_external_fence_handle_types; + Anvil::MTSafety m_mt_safety; + + #ifdef _WIN32 + ExternalNTHandleInfo m_exportable_nt_handle_info; + SECURITY_ATTRIBUTES m_exportable_nt_handle_info_security_attributes; + bool m_exportable_nt_handle_info_security_attributes_specified; + bool m_exportable_nt_handle_info_specified; + #endif + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(FenceCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(FenceCreateInfo); + }; + +}; /* namespace Anvil */ + +#endif /* MISC_FENCE_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/formats.h b/include/misc/formats.h index 738abad0..aa62f579 100644 --- a/include/misc/formats.h +++ b/include/misc/formats.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -32,10 +32,33 @@ namespace Anvil class Formats { public: - /** Returns a VkFormat which meets the user-specified criteria. + /* Returns a list of formats compatible with @param in_format. + * + * The returned array includes @param in_format. + * + * @param in_format Format to use to determine compatibility class to use for the query. + * @param out_n_compatible_formats_ptr Deref will be set to the number of formats accessible under *out_compatible_formats_ptr_ptr. + * Must not be nullptr. + * @param out_compatible_formats_ptr_ptr Deref will be set to a ptr to a linear list of formats compatible with @param in_format. + * The list MAY include YUV formats. + * + * @return true if successful, false otherwise. + */ + static bool get_compatible_formats(Anvil::Format in_format, + uint32_t* out_n_compatible_formats_ptr, + const Anvil::Format** out_compatible_formats_ptr_ptr); + + static bool get_compressed_format_block_size(Anvil::Format in_format, + uint32_t* out_block_size_uvec2_ptr, + uint32_t* out_n_bytes_per_block_ptr); + + /** Returns an Anvil::Format which meets the user-specified criteria. * * This function does not support block formats. * + * This function will only return one of the non-YUV formats whose component sizes + * match the specified number of bits. + * * For formats which use less than 4 components, set irrelevant n_component*_bits arguments * to 0. * @@ -49,35 +72,125 @@ namespace Anvil * @param in_n_component3_bits Number of bits used by the third component. Pass 0 if the component * is irrelevant. * - * @return A corresponding VkFormat value OR VK_FORMAT_UNKNOWN if no recognized Vulkan format + * @return A corresponding Anvil::Format value OR Anvil::Format::UNKNOWN if no recognized Vulkan format * meets the specified requirements. */ - static VkFormat get_format(ComponentLayout in_component_layout, - FormatType in_format_type, - uint32_t in_n_component0_bits, - uint32_t in_n_component1_bits, - uint32_t in_n_component2_bits, - uint32_t in_n_component3_bits); + static Anvil::Format get_format(ComponentLayout in_component_layout, + FormatType in_format_type, + uint32_t in_n_component0_bits, + uint32_t in_n_component1_bits, + uint32_t in_n_component2_bits, + uint32_t in_n_component3_bits); /** Returns image aspects exposed by a given image format. + * + * Supports both non-YUV and YUV formats. * * @param in_format Format to use for the query. * @param out_aspects_ptr Deref will be used to store the result data. Must not be NULL. * * @return true if successful, false otherwise. **/ - static bool get_format_aspects(VkFormat in_format, - std::vector* out_aspects_ptr); + static bool get_format_aspects(Anvil::Format in_format, + std::vector* out_aspects_ptr); + + /** Returns bit layout for the specified format. + * + * NOTE: Only non-compressed non-YUV formats are supported. + * NOTE: Components not used by the specified format have start and end bit indices set to UINT32_MAX. + * + * @param in_format Non-compressed format to use for the query. + * @param out_opt_red_component_start_bit_index_ptr If not null, deref will be set to the bit index, from which red component data starts. + * @param out_opt_red_component_end_bit_index_ptr If not null, deref will be set to the bit index, at which red component data ends. (data under the bit includes the data!) + * @param out_opt_green_component_start_bit_index_ptr If not null, deref will be set to the bit index, from which green component data starts. + * @param out_opt_green_component_end_bit_index_ptr If not null, deref will be set to the bit index, at which green component data ends. (data under the bit includes the data!) + * @param out_opt_blue_component_start_bit_index_ptr If not null, deref will be set to the bit index, from which blue component data starts. + * @param out_opt_blue_component_end_bit_index_ptr If not null, deref will be set to the bit index, at which blue component data ends. (data under the bit includes the data!) + * @param out_opt_alpha_component_start_bit_index_ptr If not null, deref will be set to the bit index, from which alpha component data starts. + * @param out_opt_alpha_component_end_bit_index_ptr If not null, deref will be set to the bit index, at which alpha component data ends. (data under the bit includes the data!) + * @param out_opt_shared_component_start_bit_index_ptr If not null, deref will be set to the bit index, from which shared component data starts. + * @param out_opt_shared_component_end_bit_index_ptr If not null, deref will be set to the bit index, at which shared component data ends. (data under the bit includes the data!) + * @param out_opt_depth_component_start_bit_index_ptr If not null, deref will be set to the bit index, from which depth component data starts. + * @param out_opt_depth_component_end_bit_index_ptr If not null, deref will be set to the bit index, at which depth component data ends. (data under the bit includes the data!) + * @param out_opt_stencil_component_start_bit_index_ptr If not null, deref will be set to the bit index, from which stencil component data starts. + * @param out_opt_stencil_component_end_bit_index_ptr If not null, deref will be set to the bit index, at which stencil component data ends. (data under the bit includes the data!) + */ + static void get_format_bit_layout_nonyuv(Anvil::Format in_format, + uint32_t* out_opt_red_component_start_bit_index_ptr = nullptr, + uint32_t* out_opt_red_component_end_bit_index_ptr = nullptr, + uint32_t* out_opt_green_component_start_bit_index_ptr = nullptr, + uint32_t* out_opt_green_component_end_bit_index_ptr = nullptr, + uint32_t* out_opt_blue_component_start_bit_index_ptr = nullptr, + uint32_t* out_opt_blue_component_end_bit_index_ptr = nullptr, + uint32_t* out_opt_alpha_component_start_bit_index_ptr = nullptr, + uint32_t* out_opt_alpha_component_end_bit_index_ptr = nullptr, + uint32_t* out_opt_shared_component_start_bit_index_ptr = nullptr, + uint32_t* out_opt_shared_component_end_bit_index_ptr = nullptr, + uint32_t* out_opt_depth_component_start_bit_index_ptr = nullptr, + uint32_t* out_opt_depth_component_end_bit_index_ptr = nullptr, + uint32_t* out_opt_stencil_component_start_bit_index_ptr = nullptr, + uint32_t* out_opt_stencil_component_end_bit_index_ptr = nullptr); + + /* Works analogously to get_format_bit_layout_nonyuv() but only supports YUV formats. */ + static void get_format_bit_layout_yuv(Anvil::Format in_format, + uint32_t* out_opt_plane0_r0_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane0_r0_end_bit_index_ptr = nullptr, + uint32_t* out_opt_plane0_g0_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane0_g0_end_bit_index_ptr = nullptr, + uint32_t* out_opt_plane0_b0_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane0_b0_end_bit_index_ptr = nullptr, + uint32_t* out_opt_plane0_a0_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane0_a0_end_bit_index_ptr = nullptr, + uint32_t* out_opt_plane0_g1_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane0_g1_end_bit_index_ptr = nullptr, + uint32_t* out_opt_plane1_r0_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane1_r0_end_bit_index_ptr = nullptr, + uint32_t* out_opt_plane1_g0_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane1_g0_end_bit_index_ptr = nullptr, + uint32_t* out_opt_plane1_b0_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane1_b0_end_bit_index_ptr = nullptr, + uint32_t* out_opt_plane2_r0_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane2_r0_end_bit_index_ptr = nullptr, + uint32_t* out_opt_plane2_g0_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane2_g0_end_bit_index_ptr = nullptr, + uint32_t* out_opt_plane2_b0_start_bit_index_ptr = nullptr, + uint32_t* out_opt_plane2_b0_end_bit_index_ptr = nullptr); + + /** Tells what component layout is used by @param in_format. + * + * NOTE: This function does NOT support YUV KHR format. Please check the overloaded function if + * you would like to use for YUV KHR format. + */ + static ComponentLayout get_format_component_layout_nonyuv(Anvil::Format in_format); + + /** Tells what component layout is used by @param in_format at specified subresource @param in_aspect. + * + * NOTE: Only YUV KHR formats are supported. + */ + static ComponentLayout get_format_component_layout_yuv(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect); - /** Tells what component layout is used by @param in_format. */ - static ComponentLayout get_format_component_layout(VkFormat in_format); + /** Tells the number of components used by @param in_format + * + * NOTE: This function does NOT support YUV KHR formats. + */ + static uint32_t get_format_n_components_nonyuv(Anvil::Format in_format); - /** Tells the number of components used by @param in_format */ - static uint32_t get_format_n_components(VkFormat in_format); + /** Tells the number of components used by @param in_format under specified subresource @param in_aspect. + * + * NOTE: Only YUV KHR formats are supported. + */ + static uint32_t get_format_n_components_yuv(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect); /* Tells the number of bits used for each component in case of Vulkan format specified * under @param in_format. * + * NOTE: This function does NOT support YUV KHR format. Please check the overloaded function if + * you would like to use for YUV KHR format. + * NOTE: Number of bits reported for each component uses ordering as reported for the format + * via get_format_component_layout(). This is especially important in the context of packed formats. + * * @param in_format Vulkan format to use for the query. * @param out_channel0_bits_ptr Deref will be set to the number of bits used for channel 0. Must * not be nullptr. @@ -88,20 +201,137 @@ namespace Anvil * @param out_channel3_bits_ptr Deref will be set to the number of bits used for channel 3. Must * not be nullptr. */ - static void get_format_n_component_bits(VkFormat in_format, - uint32_t* out_channel0_bits_ptr, - uint32_t* out_channel1_bits_ptr, - uint32_t* out_channel2_bits_ptr, - uint32_t* out_channel3_bits_ptr); + static void get_format_n_component_bits_nonyuv(Anvil::Format in_format, + uint32_t* out_channel0_bits_ptr, + uint32_t* out_channel1_bits_ptr, + uint32_t* out_channel2_bits_ptr, + uint32_t* out_channel3_bits_ptr); - /* Returns a raw C string for specified format, or NULL if the format is unknown. */ - static const char* get_format_name(VkFormat in_format); + /* Tells the number of bits used for each component in case of Vulkan format specified + * under @param in_format at subresource @param in_aspect. + * + * NOTE: Only YUV KHR fromats are supported. + * NOTE: Number of bits reported for each component uses ordering as reported for the format + * via get_format_component_layout(). This is especially important in the context of packed formats. + * + * @param in_format Vulkan format to use for the query. + * @param in_aspect Image aspect to use for the query. + * @param out_channel0_bits_ptr Deref will be set to the number of bits used for channel 0. Must + * not be nullptr. + * @param out_channel1_bits_ptr Deref will be set to the number of bits used for channel 1. Must + * not be nullptr. + * @param out_channel2_bits_ptr Deref will be set to the number of bits used for channel 2. Must + * not be nullptr. + * @param out_channel3_bits_ptr Deref will be set to the number of bits used for channel 3. Must + * not be nullptr. + */ + static void get_format_n_component_bits_yuv(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect, + uint32_t* out_channel0_bits_ptr, + uint32_t* out_channel1_bits_ptr, + uint32_t* out_channel2_bits_ptr, + uint32_t* out_channel3_bits_ptr); + + /** Tells the number of planes exposed by the specified format. + * + * For non-YUV formats, this function always returns 1. + * + * @param in_format Format to use for the query. + */ + static uint32_t get_format_n_planes(Anvil::Format in_format); + + /** Tells the number of bits unused for each component in case of Vulkan format specified + * under @param in_format at subresource @param in_aspect. + * + * NOTE: Only YUV KHR formats are supported. + * NOTE: Number of bits reported for each component uses ordering as reported for the format + * via get_format_component_layout(). This is especially important in the context of packed formats. + * + * @param in_format Vulkan format to use for the query. + * @param in_aspect Image aspect to use for the query. + * @param out_channel0_unused_bits_ptr Deref will be set to the number of bits unused for channel 0. Must + * not be nullptr. + * @param out_channel1_unused_bits_ptr Deref will be set to the number of bits unused for channel 1. Must + * not be nullptr. + * @param out_channel2_unused_bits_ptr Deref will be set to the number of bits unused for channel 2. Must + * not be nullptr. + * @param out_channel3_unused_bits_ptr Deref will be set to the number of bits unused for channel 3. Must + * not be nullptr. + */ + static void get_format_n_unused_component_bits_yuv(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect, + uint32_t* out_channel0_unused_bits_ptr, + uint32_t* out_channel1_unused_bits_ptr, + uint32_t* out_channel2_unused_bits_ptr, + uint32_t* out_channel3_unused_bits_ptr); - /** Tells the format type used by @param in_format. */ - static FormatType get_format_type(VkFormat in_format); + /* Returns a raw C string for specified format, or NULL if the format is unknown. + * + * Both non-YUV and YUV formats are supported. + */ + static const char* get_format_name(Anvil::Format in_format); + + /** Tells the format type used by @param in_format. + * + * Both non-YUV and YUV formats are supported. + */ + static FormatType get_format_type(Anvil::Format in_format); + + /** Returns the extent of subresource @param in_aspect with specified format @param in_format. + * + * NOTE: Only YUV KHR formats are supported. + * + * @param in_format VUlkan format. + * @param in_aspect Image aspect for specific subresource of the format. + * @param in_image_extent Image extent specified when creating image. + * @param out_plane_extent_ptr Deref will be set to the extent of the specified subresource. + * Must not be nullptr. + */ + static void get_yuv_format_plane_extent(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect, + VkExtent3D in_image_extent, + VkExtent3D* out_plane_extent_ptr); + + /** Tells whether @param in_format includes a depth aspect. + * + * NOTE: YUV KHR formats are NOT supported. + */ + static bool has_depth_aspect(Anvil::Format in_format); + + /** Tells whether @param in_format includes a stencil aspect. + * + * NOTE: YUV KHR formats are NOT supported. + */ + static bool has_stencil_aspect(Anvil::Format in_format); /** Tells whether @param in_format format is a block format. */ - static bool is_format_compressed(VkFormat in_format); + static bool is_format_compressed(Anvil::Format in_format); + + /** Tells whether @param in_format format is a multiplanar format. */ + static bool is_format_multiplanar(Anvil::Format in_format); + + /** Tells whether @param in_format is a KHR YUV format */ + static bool is_format_yuv_khr(Anvil::Format in_format); + + /** Tells whether @param in_format is a packed format. + * + * Both YUV and non-YUV formats are supported. + **/ + static bool is_format_packed(Anvil::Format in_format); + + private: + /** Returns the index of subresource info by given @param in_format and @param in_aspect. + * + * NOTE: Only YUV KHR formats are supported. + */ + static uint32_t get_yuv_format_plane_index(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect); + + /** Returns the number of subresources by given @param in_format. + * + * NOTE: Only YUV KHR formats are supported. + */ + static uint32_t get_yuv_format_n_planes(Anvil::Format in_format); }; }; diff --git a/include/misc/fp16.h b/include/misc/fp16.h index c30d4799..eb34d7d5 100644 --- a/include/misc/fp16.h +++ b/include/misc/fp16.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/include/misc/framebuffer_create_info.h b/include/misc/framebuffer_create_info.h new file mode 100644 index 00000000..494e45cf --- /dev/null +++ b/include/misc/framebuffer_create_info.h @@ -0,0 +1,184 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_FRAMEBUFFER_CREATE_INFO_H +#define MISC_FRAMEBUFFER_CREATE_INFO_H + +#include "misc/types.h" + + +namespace Anvil +{ + class FramebufferCreateInfo + { + public: + /* Public functions */ + + /* TODO. + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + */ + static Anvil::FramebufferCreateInfoUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + const uint32_t& in_width, + const uint32_t& in_height, + const uint32_t& in_n_layers); + + bool add_attachment(ImageView* in_image_view_ptr, + FramebufferAttachmentID* out_opt_attachment_id_ptr); + + /* Returns an attachment at user-specified index */ + bool get_attachment_at_index(uint32_t in_attachment_index, + ImageView** out_image_view_ptr_ptr) const; + + /** Checks if an attachment has already been created for the specified image view and, if so, + * returns the attachment's ID. + * + * @param in_image_view_ptr Image view to use for the query. Must not be nullptr. + * @param out_attachment_id_ptr If the function executes successfully, deref will be set to + * the found attachment's ID, in which case the pointer must not + * be nullptr. Otherwise, ignored. + * + * @return true if successful (eg. the attachment corresponding to the specified image view was + * found), false otherwise. + **/ + bool get_attachment_id_for_image_view(ImageView* in_image_view_ptr, + FramebufferAttachmentID* out_attachment_id_ptr) const; + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + uint32_t get_height() const + { + return m_height; + } + + Anvil::MTSafety get_mt_safety() const + { + return m_mt_safety; + } + + /** Returns the number of attachments defined for the framebuffer. */ + uint32_t get_n_attachments() const + { + return static_cast(m_attachments.size() ); + } + + uint32_t get_n_layers() const + { + return m_n_layers; + } + + /** Returns framebuffer size, specified at creation time */ + void get_size(uint32_t* out_framebuffer_width_ptr, + uint32_t* out_framebuffer_height_ptr, + uint32_t* out_framebuffer_depth_ptr) const; + + uint32_t get_width() const + { + return m_width; + } + + void set_device(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + void set_height(const uint32_t& in_height) + { + m_height = in_height; + } + + void set_mt_safety(const Anvil::MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + void set_n_layers(const uint32_t& in_n_layers) + { + m_n_layers = in_n_layers; + } + + void set_width(const uint32_t& in_width) + { + m_width = in_width; + } + + private: + /* Private type definitions */ + + typedef struct FramebufferAttachment + { + ImageView* image_view_ptr; + + /** Constructor. Retains the input image view instance. + * + * @param in_image_view_ptr Image view instance to use for the FB attachment. Must not + * be nullptr. + **/ + FramebufferAttachment(ImageView* in_image_view_ptr); + + /** Destructor. Releases the encapsulated image view instance. */ + ~FramebufferAttachment(); + + /** Copy constructor */ + FramebufferAttachment(const FramebufferAttachment& in); + + /** Assignment operator */ + FramebufferAttachment& operator=(const FramebufferAttachment& in); + + /** Returns true if the encapsulated image view instance is the same as the one + * specified uner @param in_image_view_ptr argument. + **/ + bool operator==(const ImageView* in_image_view_ptr) const + { + return (image_view_ptr == in_image_view_ptr); + } + } FramebufferAttachment; + + typedef std::vector FramebufferAttachments; + + /* Private functions */ + + FramebufferCreateInfo(const Anvil::BaseDevice* in_device_ptr, + const uint32_t& in_width, + const uint32_t& in_height, + const uint32_t& in_n_layers, + MTSafety in_mt_safety); + + /* Private variables */ + FramebufferAttachments m_attachments; + + const Anvil::BaseDevice* m_device_ptr; + uint32_t m_height; + MTSafety m_mt_safety; + uint32_t m_n_layers; + uint32_t m_width; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(FramebufferCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(FramebufferCreateInfo); + }; +}; /* namespace Anvil */ + +#endif /* MISC_FRAMEBUFFER_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/glsl_to_spirv.h b/include/misc/glsl_to_spirv.h index 8c10639d..13a9a53a 100644 --- a/include/misc/glsl_to_spirv.h +++ b/include/misc/glsl_to_spirv.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -32,6 +32,8 @@ #include "misc/callbacks.h" #include "misc/debug.h" #include "misc/types.h" +#include +#include #include #include #include @@ -44,7 +46,7 @@ #pragma warning(disable: 4464) #endif - #include "../../deps/glslang/glslang/Public/ShaderLang.h" + #include "glslang/Public/ShaderLang.h" #ifdef _MSC_VER #pragma warning(pop) @@ -59,7 +61,7 @@ namespace Anvil { public: /* Constructor. */ - explicit GLSLangLimits(std::weak_ptr in_device_ptr); + explicit GLSLangLimits(const Anvil::BaseDevice* in_device_ptr); /* Destructor */ ~GLSLangLimits() @@ -91,6 +93,12 @@ namespace Anvil */ GLSL_SHADER_TO_SPIRV_GENERATOR_CALLBACK_ID_CONVERSION_ABOUT_TO_START, + /* Call-back issued right after the conversion ends. + * + * @param callback_arg OnGLSLToSPIRVConversionFinishedCallbackArgument instance. + */ + GLSL_SHADER_TO_SPIRV_GENERATOR_CALLBACK_ID_CONVERSION_FINISHED, + GLSL_SHADER_TO_SPIRV_GENERATOR_CALLBACK_ID_COUNT } GLSLShaderToSPIRVGeneratorCallbackID; @@ -128,15 +136,20 @@ namespace Anvil * which should be used. This mode is NOT supported if ANVIL_LINK_WITH_GLSLANG * macro is undefined. * @param in_shader_stage Shader stage described by the file. + * @param in_spirv_version Target SPIR-V version. **/ - static std::shared_ptr create(std::weak_ptr in_opt_device_ptr, - const Mode& in_mode, - std::string in_data, - ShaderStage in_shader_stage); + static Anvil::GLSLShaderToSPIRVGeneratorUniquePtr create(const Anvil::BaseDevice* in_opt_device_ptr, + const Mode& in_mode, + std::string in_data, + ShaderStage in_shader_stage, + SpvVersion in_spirv_version = SpvVersion::_1_0); /** Destructor. Releases all created Vulkan objects, as well as the SPIR-V blob data. */ ~GLSLShaderToSPIRVGenerator(); + void set_glsl_file_path(const std::optional &in_glsl_file_path) { m_glsl_file_path = in_glsl_file_path; } + void set_with_debug_info(bool in_with_debug_info) { m_with_debug_info = in_with_debug_info; } + /** Adds a "#define [definition_name] [value]" line after the first newline found in the * source code. * @@ -199,6 +212,35 @@ namespace Anvil bool add_extension_behavior(std::string in_extension_name, ExtensionBehavior in_behavior); + /** Replaces all instances of [placeholder_name] with [value] in the shader source. + * + * @param in_placeholder_name As specified above. + * @param in_value As specified above. + * + * @return true if the function succeeded, false otherwise. + **/ + bool add_placeholder_value_pair(const std::string& in_placeholder_name, + const std::string& in_value); + + /** Replaces all instances of [placeholder_name] with [value] in the shader source. + * + * @param in_placeholder_name As specified above. + * @param in_value As specified above. + * + * @return true if the function succeeded, false otherwise. + **/ + template + bool add_placeholder_value_pair(const std::string& in_placeholder_name, + const T& in_value) + { + std::stringstream value_sstream; + + value_sstream << in_value; + + return add_placeholder_value_pair(in_placeholder_name, + value_sstream.str() ); + } + /** Adds a new pragma which is going to be injected into the GLSL code. * * @param in_pragma_name Value to follow the #pragma keyword. @@ -215,12 +257,22 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool bake_spirv_blob(); + bool bake_spirv_blob() const; /* Converts a ExtensionBehavior enum value to a corresponding GLSL definition */ std::string get_extension_behavior_glsl_code(const ExtensionBehavior& in_value) const; #ifdef ANVIL_LINK_WITH_GLSLANG + const std::string& get_debug_info_log() const + { + return m_debug_info_log; + } + + const std::string& get_program_debug_info_log() const + { + return m_program_debug_info_log; + } + /** Returns info log which contains detailed information regarding the program linking process. * * Call if get_spirv_blob() returns nullptr to find out more about shader issues which @@ -252,7 +304,7 @@ namespace Anvil * Otherwise, an assertion failure will occur, as the returned string will have a size of 0. * */ - const std::string& get_glsl_source_code() + const std::string& get_glsl_source_code() const { if (m_glsl_source_code_dirty) { @@ -278,84 +330,80 @@ namespace Anvil * * @return SPIR-V blob or nullptr if the function failed. **/ - const char* get_spirv_blob() + const char* get_spirv_blob() const { if (m_spirv_blob.size() == 0) { - bool result = bake_spirv_blob(); - - ANVIL_REDUNDANT_VARIABLE(result); - - anvil_assert(result); - anvil_assert(m_spirv_blob.size() != 0); + bake_spirv_blob(); } - return &m_spirv_blob.at(0); + return (m_spirv_blob.size() != 0) ? &m_spirv_blob.at(0) : nullptr; } /** Returns the number of bytes the SPIR-V blob, accessible via get_spirv_blob(), takes. */ - const uint32_t get_spirv_blob_size() + uint32_t get_spirv_blob_size() const { if (m_spirv_blob.size() == 0) { - bool result = bake_spirv_blob(); - - ANVIL_REDUNDANT_VARIABLE(result); - - anvil_assert(result); - anvil_assert(m_spirv_blob.size() != 0); + bake_spirv_blob(); } - anvil_assert(m_spirv_blob.size() > 0); - return static_cast(m_spirv_blob.size() ); } private: /* Private type declarations */ - typedef std::map ExtensionNameToExtensionBehaviorMap; - typedef std::map DefinitionNameToValueMap; + typedef std::map ExtensionNameToExtensionBehaviorMap; + typedef std::map DefinitionNameToValueMap; + typedef std::vector> PlaceholderNameAndValueVector; /* Private functions */ ANVIL_DISABLE_ASSIGNMENT_OPERATOR(GLSLShaderToSPIRVGenerator); ANVIL_DISABLE_COPY_CONSTRUCTOR (GLSLShaderToSPIRVGenerator); /* Constructor. Please see create() documentation for specification */ - explicit GLSLShaderToSPIRVGenerator(std::weak_ptr in_device_ptr, - const Mode& in_mode, - std::string in_data, - ShaderStage in_shader_stage); + explicit GLSLShaderToSPIRVGenerator(const Anvil::BaseDevice* in_device_ptr, + const Mode& in_mode, + std::string in_data, + ShaderStage in_shader_stage, + SpvVersion in_spirv_version); - bool bake_glsl_source_code(); + bool bake_glsl_source_code() const; #ifdef ANVIL_LINK_WITH_GLSLANG - bool bake_spirv_blob_by_calling_glslang(const char* in_body); + bool bake_spirv_blob_by_calling_glslang(const char* in_body, const std::optional &glslFilePath = {}, bool withDebugInfo = false) const; EShLanguage get_glslang_shader_stage () const; #else bool bake_spirv_blob_by_spawning_glslang_process(const std::string& in_glsl_filename_with_path, - const std::string& in_spirv_filename_with_path); + const std::string& in_spirv_filename_with_path) const; #endif /* Private members */ #ifdef ANVIL_LINK_WITH_GLSLANG + mutable std::string m_debug_info_log; std::unique_ptr m_limits_ptr; - std::string m_program_info_log; - std::string m_shader_info_log; + mutable std::string m_program_debug_info_log; + mutable std::string m_program_info_log; + mutable std::string m_shader_info_log; #endif std::string m_data; + std::optional m_glsl_file_path; + bool m_with_debug_info; Mode m_mode; - std::string m_glsl_source_code; - bool m_glsl_source_code_dirty; + mutable std::string m_glsl_source_code; + mutable bool m_glsl_source_code_dirty; - ShaderStage m_shader_stage; - std::vector m_spirv_blob; + ShaderStage m_shader_stage; + SpvVersion m_spirv_version; + mutable std::vector m_spirv_blob; DefinitionNameToValueMap m_definition_values; ExtensionNameToExtensionBehaviorMap m_extension_behaviors; + PlaceholderNameAndValueVector m_placeholder_values; DefinitionNameToValueMap m_pragmas; }; }; /* namespace Anvil */ -#endif /* MISC_GLSL_TO_SPIRV_H */ \ No newline at end of file +#endif /* MISC_GLSL_TO_SPIRV_H */ diff --git a/include/misc/graphics_pipeline_create_info.h b/include/misc/graphics_pipeline_create_info.h new file mode 100644 index 00000000..fa693789 --- /dev/null +++ b/include/misc/graphics_pipeline_create_info.h @@ -0,0 +1,1097 @@ +// +// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef ANVIL_GRAPHICS_PIPELINE_CREATE_INFO_H +#define ANVIL_GRAPHICS_PIPELINE_CREATE_INFO_H + +#include "misc/base_pipeline_create_info.h" + + +namespace Anvil +{ + /* By default, graphics pipeline create info uses the following settings: + * + * All rendering modes & tests: disabled + * Blend constant: vec4(0.0) + * Cull mode: VK_CULL_MODE_BACK + * Depth bias: 0.0 + * Depth bias clamp: 0.0 + * Depth bias slope factor: 1.0 + * Depth test compare op: Anvil::CompareOp::ALWAYS + * Depth writes: disabled + * Dynamic states: all disabled + * Fill mode: VK_FILL_MODE_SOLID + * Front face: VK_FRONT_FACE_CCW + * Line width: 1.0 + * Logic op: VK_LOGIC_OP_NOOP; + * Max depth boundary: 1.0 + * Min depth boundary: 0.0 + * Min sample shading: 1.0 + * Number of raster samples: 1 + * Number of tessellation patches: 1 + * Primitive topology: VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST + * Sample mask: 0xFFFFFFFF + * Slope scaled depth bias: 0.0 + * Stencil comparison mask (back/front): 0xFFFFFFFF + * Stencil comparison op (back/front): Anvil::CompareOp::ALWAYS + * Stencil depth fail op (back/front): VK_STENCIL_OP_KEEP + * Stencil fail op (back/front): VK_STENCIL_OP_KEEP + * Stencil pass op (back/front): VK_STENCIL_OP_KEEP + * Stencil reference value (back/front): 0 + * Stencil write mask (back/front): 0xFFFFFFFF + * + * If no scissor or viewport is defined explicitly, one scissor box and one viewport, + * covering the whole screen, will be created at baking time. + * + * If VK_AMD_rasterization_order extension is supported: + * + * + Rasterization order: strict + * + * If VK_EXT_depth_clip_enable extension is supported: + * + * + Depth clip enabled: true + * + * If VK_EXT_transform_feedback extension is supported: + * + * + Rasterization stream index: 0 + * + * If VK_KHR_maintenance2 extension is supported: + * + * + Tessellation domain origin: upper-left + */ + class GraphicsPipelineCreateInfo : public BasePipelineCreateInfo + { + public: + /* Public functions */ + static Anvil::GraphicsPipelineCreateInfoUniquePtr create (const Anvil::PipelineCreateFlags& in_create_flags, + const RenderPass* in_renderpass_ptr, + SubPassID in_subpass_id, + const ShaderModuleStageEntryPoint& in_fragment_shader_stage_entrypoint_info, + const ShaderModuleStageEntryPoint& in_geometry_shader_stage_entrypoint_info, + const ShaderModuleStageEntryPoint& in_tess_control_shader_stage_entrypoint_info, + const ShaderModuleStageEntryPoint& in_tess_evaluation_shader_stage_entrypoint_info, + const ShaderModuleStageEntryPoint& in_vertex_shader_shader_stage_entrypoint_info, + const Anvil::GraphicsPipelineCreateInfo* in_opt_reference_pipeline_info_ptr = nullptr, + const Anvil::PipelineID* in_opt_base_pipeline_id_ptr = nullptr); + static Anvil::GraphicsPipelineCreateInfoUniquePtr create_proxy(); + + ~GraphicsPipelineCreateInfo(); + + /** Adds a new specialization constant. + * + * @param in_shader_stage Shader stage, with which the new specialization constant should be + * associated. Must be one of the graphics shader + * @param in_constant_id ID of the specialization constant to assign data for. + * @param in_n_data_bytes Number of bytes under @param in_data_ptr to assign to the specialization constant. + * @param in_data_ptr A buffer holding the data to be assigned to the constant. Must hold at least + * @param in_n_data_bytes bytes that will be read by the function. + * + * @return true if successful, false otherwise. + **/ + bool add_specialization_constant(Anvil::ShaderStage in_shader_stage, + uint32_t in_constant_id, + uint32_t in_n_data_bytes, + const void* in_data_ptr) final + { + if (in_shader_stage != Anvil::ShaderStage::FRAGMENT && + in_shader_stage != Anvil::ShaderStage::GEOMETRY && + in_shader_stage != Anvil::ShaderStage::TESSELLATION_CONTROL && + in_shader_stage != Anvil::ShaderStage::TESSELLATION_EVALUATION && + in_shader_stage != Anvil::ShaderStage::VERTEX) + { + anvil_assert(in_shader_stage == Anvil::ShaderStage::FRAGMENT || + in_shader_stage == Anvil::ShaderStage::GEOMETRY || + in_shader_stage == Anvil::ShaderStage::TESSELLATION_CONTROL || + in_shader_stage == Anvil::ShaderStage::TESSELLATION_EVALUATION || + in_shader_stage == Anvil::ShaderStage::VERTEX); + + return false; + } + + return BasePipelineCreateInfo::add_specialization_constant(in_shader_stage, + in_constant_id, + in_n_data_bytes, + in_data_ptr); + } + + /** Adds a new vertex binding descriptor to the specified graphics pipeline, along with one or more attributes + * that consume the binding. This data will be used at baking time to configure input vertex attribute & bindings + * for the Vulkan pipeline object. + * + * @param in_binding Index to use for the new binding. + * @param in_stride_in_bytes Stride of the vertex attribute data. + * @param in_step_rate Step rate to use for the vertex attribute data. + * @param in_n_attributes Number of items available for reading under @param in_attribute_ptrs. Must be + * at least 1. + * @param in_attribute_ptrs Vertex attributes to associate with the new bindings. + * @param in_divisor Vertex attribute divisor to use for the binding. Any value different than 1 + * is only supported for devices with VK_EXT_vertex_attribute_divisor extension + * enabled. + * + * @return true if successful, false otherwise. + **/ + bool add_vertex_binding(uint32_t in_binding, + Anvil::VertexInputRate in_step_rate, + uint32_t in_stride_in_bytes, + const uint32_t& in_n_attributes, + const Anvil::VertexInputAttribute* in_attribute_ptrs, + uint32_t in_divisor = 1); + + /** Tells whether depth writes have been enabled. **/ + bool are_depth_writes_enabled() const; + + /** Retrieves blending properties defined. + * + * @param out_opt_blend_constant_vec4_ptr If not NULL, deref will be assigned to a ptr holding four float values + * representing the blend constant. + * @param out_opt_n_blend_attachments_ptr If not NULL, deref will be set to the number of blend + * attachments the graphics pipeline supports. + **/ + void get_blending_properties(const float** out_opt_blend_constant_vec4_ptr_ptr, + uint32_t* out_opt_n_blend_attachments_ptr) const; + + /** Retrieves color blend attachment properties specified for a given subpass attachment. + * + * @param in_attachment_id ID of the attachment the query is being made for. + * @param out_opt_blending_enabled_ptr If not null, deref will be set to true if blending has been + * enabled for this pipeline, or to false otherwise. + * @param out_opt_blend_op_color_ptr If not null, deref will be set to the specified attachment's + * color blend op. + * @param out_opt_blend_op_alpha_ptr If not null, deref will be set to the specified attachment's + * alpha blend op. + * @param out_opt_src_color_blend_factor_ptr If not null, deref will be set to the specified attachment's + * source color blend factor. + * @param out_opt_dst_color_blend_factor_ptr If not null, deref will be set to the specified attachment's + * destination color blend factor. + * @param out_opt_src_alpha_blend_factor_ptr If not null, deref will be set to the specified attachment's + * source alpha blend factor. + * @param out_opt_dst_alpha_blend_factor_ptr If not null, deref will be set to the specified attachment's + * destination alpha blend factor. + * @param out_opt_channel_write_mask_ptr If not null, deref will be set to the specified attachment's + * write mask. + * + * @return true if successful, false otherwise. + */ + bool get_color_blend_attachment_properties(SubPassAttachmentID in_attachment_id, + bool* out_opt_blending_enabled_ptr, + Anvil::BlendOp* out_opt_blend_op_color_ptr, + Anvil::BlendOp* out_opt_blend_op_alpha_ptr, + Anvil::BlendFactor* out_opt_src_color_blend_factor_ptr, + Anvil::BlendFactor* out_opt_dst_color_blend_factor_ptr, + Anvil::BlendFactor* out_opt_src_alpha_blend_factor_ptr, + Anvil::BlendFactor* out_opt_dst_alpha_blend_factor_ptr, + Anvil::ColorComponentFlags* out_opt_channel_write_mask_ptr) const; + + /** Tells what conservative rasterization mode has been specified for this instance. **/ + Anvil::ConservativeRasterizationModeEXT get_conservative_rasterization_mode() const; + + /** Retrieves depth bias-related state configuration. + * + * @param out_opt_is_enabled_ptr If not null, deref will be set to true if depth bias has + * been enabled for the pipeline, or to false otherwise. + * @param out_opt_depth_bias_constant_factor_ptr If not null, deref will be set to the depth bias constant factor + * assigned to the pipeline. + * @param out_opt_depth_bias_clamp_ptr If not null, deref will be set to the depth bias clamp value, as + * assigned to the pipeline. + * @param out_opt_depth_bias_slope_factor_ptr If not null, deref will be set to the depth bias slope factor, as + * configured for the pipeline. + **/ + void get_depth_bias_state(bool* out_opt_is_enabled_ptr, + float* out_opt_depth_bias_constant_factor_ptr, + float* out_opt_depth_bias_clamp_ptr, + float* out_opt_depth_bias_slope_factor_ptr) const; + + /** Retrieves depth bounds-related state configuration. + * + * @param out_opt_is_enabled_ptr If not null, deref will be set to true if depth bounds mode + * has been enabled for the pipeline, or to false otherwise. + * @param out_opt_min_depth_bounds_ptr If not null, deref will be set to the minimum depth bound value + * assigned to the pipeline. + * @param out_opt_max_depth_bounds_ptr If not null, deref will be set to the maximum depth bound value + * assigned to the pipeline. + **/ + void get_depth_bounds_state(bool* out_opt_is_enabled_ptr, + float* out_opt_min_depth_bounds_ptr, + float* out_opt_max_depth_bounds_ptr) const; + + /** Retrieves depth test-related state configuration. + * + * @param out_opt_is_enabled_ptr If not null, deref will be set to true if depth test has been + * enabled for the pipeline, or to false otherwise. + * @param out_opt_compare_op_ptr If not null, deref will be set to the depth compare op assigned + * to the pipeline. + **/ + void get_depth_test_state(bool* out_opt_is_enabled_ptr, + Anvil::CompareOp* out_opt_compare_op_ptr) const; + + /** Tells what dynamic states have been enabled. **/ + void get_enabled_dynamic_states(const Anvil::DynamicState** out_dynamic_states_ptr_ptr, + uint32_t* out_n_dynamic_states_ptr) const; + + /** Retrieves general properties. + * + * @param out_opt_n_scissors_ptr If not null, deref will be set to the number of scissors used + * by the pipeline. + * @param out_opt_n_viewports_ptr If not null, deref will be set to the number of viewports used + * by the pipeline. + * @param out_opt_n_vertex_bindings_ptr If not null, deref will be set to the number of vertex + * bindings specified by the owner. + * @param out_opt_renderpass_ptr_ptr If not null, deref will be set to the renderpass assigned to + * the pipeline. + * @param out_opt_subpass_id_ptr If not null, deref will be set to the ID of the subpass the pipeline + * has been created for. + * + * @return true if successful, false otherwise. + **/ + void get_graphics_pipeline_properties(uint32_t* out_opt_n_scissors_ptr, + uint32_t* out_opt_n_viewports_ptr, + uint32_t* out_opt_n_vertex_bindings_ptr, + const RenderPass** out_opt_renderpass_ptr_ptr, + SubPassID* out_opt_subpass_id_ptr) const; + + /** Retrieves logic op-related state configuration. + * + * @param out_opt_is_enabled_ptr If not null, deref will be set to true if the logic op has + * been enabled for the pipeline, or to false otherwise. + * @param out_opt_logic_op_ptr If not null, deref will be set to the logic op enum, specified + * at creation time. + **/ + void get_logic_op_state(bool* out_opt_is_enabled_ptr, + Anvil::LogicOp* out_opt_logic_op_ptr) const; + + /** Retrieves multisampling-related state configuration. + * + * @param out_opt_sample_count_ptr If not null, deref will be set to the enum value telling + * the sample count assigned to the pipeline. + * @param out_opt_sample_mask_ptr If not null, deref will be set to a ptr to the sample mask assigned + * to the pipeline. + **/ + void get_multisampling_properties(Anvil::SampleCountFlagBits* out_opt_sample_count_ptr, + const VkSampleMask** out_opt_sample_mask_ptr_ptr) const; + + /** Tells the number of dynamic scissor boxes. **/ + uint32_t get_n_dynamic_scissor_boxes() const; + + /** Tells the number of dynamic viewports. **/ + uint32_t get_n_dynamic_viewports() const; + + uint32_t get_n_scissor_boxes() const; + + uint32_t get_n_viewports() const; + + /** Tells what primitive topology has been specified for this instance. **/ + Anvil::PrimitiveTopology get_primitive_topology() const; + + /** Tells what rasterization order has been specified for this instance. **/ + Anvil::RasterizationOrderAMD get_rasterization_order() const; + + /** Tells what primitive overestimation size has been specified for this instance. **/ + float get_extra_primitive_overestimation_size() const; + + /** Retrieves various rasterization properties of the graphics pipeline. + * + * @param out_opt_polygon_mode_ptr If not null, deref will be set to polygon mode used by the + * pipeline. + * @param out_opt_cull_mode_ptr If not null, deref will be set to cull mode used by the + * pipeline. + * @param out_opt_front_face_ptr If not null, deref will be set to front face setting, used + * by the pipeline. + * @param out_opt_line_width_ptr If not null, deref will be set to line width setting, as used + * by the pipeline. + * + * @return true if successful, false otherwise. + **/ + void get_rasterization_properties(Anvil::PolygonMode* out_opt_polygon_mode_ptr, + Anvil::CullModeFlags* out_opt_cull_mode_ptr, + Anvil::FrontFace* out_opt_front_face_ptr, + float* out_opt_line_width_ptr) const; + + /* Returns rasterization stream index associated with the create info structure. */ + const uint32_t& get_rasterization_stream_index() const + { + return m_rasterization_stream_index; + } + + const RenderPass* get_renderpass() const + { + return m_renderpass_ptr; + } + + /** Retrieves state configuration related to custom sample locations. + * + * @param out_opt_is_enabled_ptr If not null, deref will be set to true if custom sample locations have been + * enabled for the pipeline. + * @param out_opt_sample_locations_per_pixel_ptr If not null, deref will be set to the number of sample locations to be used + * per pixel. + * @param out_opt_sample_location_grid_size_ptr If not null, deref will be set to sample location grid size. + * @param out_opt_n_sample_locations_ptr If not null, deref will be set to the number of sample locations available + * for reading under *out_opt_sample_locations_ptr + * @param out_opt_sample_locations_ptr_ptr If not null, deref will be set to a ptr to an array holding custom sample + * locations. + * + **/ + void get_sample_location_state(bool* out_opt_is_enabled_ptr, + Anvil::SampleCountFlagBits* out_opt_sample_locations_per_pixel_ptr, + VkExtent2D* out_opt_sample_location_grid_size_ptr, + uint32_t* out_opt_n_sample_locations_ptr, + const Anvil::SampleLocation** out_opt_sample_locations_ptr_ptr) const; + + /** Retrieves state configuration related to per-sample shading. + * + * @param out_opt_is_enabled_ptr If not null, deref will be set to true if per-sample shading + * is enabled for the pipeline. + * @param out_opt_min_sample_shading_ptr If not null, deref will be set to the minimum sample shading value, + * as specified at creation time. + * + **/ + void get_sample_shading_state(bool* out_opt_is_enabled_ptr, + float* out_opt_min_sample_shading_ptr) const; + + /** Retrieves properties of a scissor box at a given index. + * + * @param in_n_scissor_box Index of the scissor box to retrieve data of. + * @param out_opt_x_ptr If not null, deref will be set to X offset of the scissor box. + * @param out_opt_y_ptr If not null, deref will be set to Y offset of the scissor box. + * @param out_opt_width_ptr If not null, deref will be set to width of the scissor box. + * @param out_opt_height_ptr If not null, deref will be set to height of the scissor box. + * + * @return true if successful, false otherwise. + **/ + bool get_scissor_box_properties(uint32_t in_n_scissor_box, + int32_t* out_opt_x_ptr, + int32_t* out_opt_y_ptr, + uint32_t* out_opt_width_ptr, + uint32_t* out_opt_height_ptr) const; + + /** Retrieves stencil test-related state configuration. + * + * @param out_opt_is_enabled_ptr If not null, deref will be set to true if stencil test + * has been enabled for the pipeline. + * @param out_opt_front_stencil_fail_op_ptr If not null, deref will be set to "front stencil fail operation" + * setting, as specified at creation time. + * @param out_opt_front_stencil_pass_op_ptr If not null, deref will be set to "front stencil pass operation" + * setting, as specified at creation time. + * @param out_opt_front_stencil_depth_fail_op_ptr If not null, deref will be set to "front stencil depth fail operation" + * setting, as specified at creation time. + * @param out_opt_front_stencil_compare_op_ptr If not null, deref will be set to "front stencil compare operation" + * setting, as specified at creation time. + * @param out_opt_front_stencil_compare_mask_ptr If not null, deref will be set to front stencil compare mask, as + * specified at creation time. + * @param out_opt_front_stencil_write_mask_ptr If not null, deref will be set to front stencil write mask, as + * specified at creation time. + * @param out_opt_front_stencil_reference_ptr If not null, deref will be set to front stencil reference value, as + * specified at creation time. + * @param out_opt_back_stencil_fail_op_ptr If not null, deref will be set to "back stencil fail operation" + * setting, as specified at creation time. + * @param out_opt_back_stencil_pass_op_ptr If not null, deref will be set to "back stencil pass operation" + * setting, as specified at creation time. + * @param out_opt_back_stencil_depth_fail_op_ptr If not null, deref will be set to "back stencil depth fail operation" + * setting, as specified at creation time. + * @param out_opt_back_stencil_compare_op_ptr If not null, deref will be set to "back stencil compare operation" + * setting, as specified at creation time. + * @param out_opt_back_stencil_compare_mask_ptr If not null, deref will be set to back stencil compare mask, as + * specified at creation time. + * @param out_opt_back_stencil_write_mask_ptr If not null, deref will be set to back stencil write mask, as + * specified at creation time. + * @param out_opt_back_stencil_reference_ptr If not null, deref will be set to back stencil reference value, as + * specified at creation time. + * + * @return true if successful, false otherwise. + **/ + void get_stencil_test_properties(bool* out_opt_is_enabled_ptr, + Anvil::StencilOp* out_opt_front_stencil_fail_op_ptr, + Anvil::StencilOp* out_opt_front_stencil_pass_op_ptr, + Anvil::StencilOp* out_opt_front_stencil_depth_fail_op_ptr, + Anvil::CompareOp* out_opt_front_stencil_compare_op_ptr, + uint32_t* out_opt_front_stencil_compare_mask_ptr, + uint32_t* out_opt_front_stencil_write_mask_ptr, + uint32_t* out_opt_front_stencil_reference_ptr, + Anvil::StencilOp* out_opt_back_stencil_fail_op_ptr, + Anvil::StencilOp* out_opt_back_stencil_pass_op_ptr, + Anvil::StencilOp* out_opt_back_stencil_depth_fail_op_ptr, + Anvil::CompareOp* out_opt_back_stencil_compare_op_ptr, + uint32_t* out_opt_back_stencil_compare_mask_ptr, + uint32_t* out_opt_back_stencil_write_mask_ptr, + uint32_t* out_opt_back_stencil_reference_ptr) const; + + const SubPassID& get_subpass_id() const + { + return m_subpass_id; + } + + /* TODO + * + * Requires VK_KHR_maintenance2. + */ + Anvil::TessellationDomainOrigin get_tessellation_domain_origin() const + { + return m_tessellation_domain_origin; + } + + /** Tells the number of patch control points associated with this instance. **/ + uint32_t get_n_patch_control_points() const; + + /** Returns properties of a vertex attribute at a given index. These properties are as specified by the owner. + * + * This means the binding ultimately assigned by graphics pipeline manager does not necessarily have to match + * @param out_opt_binding_ptr. + * + * @param in_n_vertex_input_binding Ordinal number of vertex input binding to use for the query. + * @param out_opt_binding_ptr If not null, deref will be set to the specified binding's index. + * @param out_opt_stride_ptr If not null, deref will be set to the specified binding's stride. + * @param out_opt_rate_ptr If not null, deref will be set to the specified binding's step rate. + * @param out_opt_n_attributes_ptr If not null, deref will be set to the number of items available for reading at * @param out_opt_attributes_ptr_ptr + * @param out_opt_attributes_ptr_ptr If not null, deref will be set to a pointer an array of attribute descriptors associated + * with the binding. + * @param out_opt_divisor_ptr If not null, deref will be set to the specified binding's vertex attribute divisor. + * + * @return true if successful, false otherwise. + **/ + bool get_vertex_binding_properties(uint32_t in_n_vertex_binding, + uint32_t* out_opt_binding_ptr = nullptr, + uint32_t* out_opt_stride_ptr = nullptr, + Anvil::VertexInputRate* out_opt_rate_ptr = nullptr, + uint32_t* out_opt_n_attributes_ptr = nullptr, + const VertexInputAttribute** out_opt_attributes_ptr_ptr = nullptr, + uint32_t* out_opt_divisor_ptr = nullptr) const; + + /** Retrieves properties of a viewport at a given index. + * + * @param in_n_viewport Index of the viewport to retrieve properties of. + * @param out_opt_origin_x_ptr If not null, deref will be set to the viewport's X origin. + * @param out_opt_origin_y_ptr If not null, deref will be set to the viewport's Y origin. + * @param out_opt_width_ptr If not null, deref will be set to the viewport's width. + * @param out_opt_height_ptr If not null, deref will be set to the viewport's height. + * @param out_opt_min_depth_ptr If not null, deref will be set to the viewport's minimum depth value. + * @param out_opt_max_depth_ptr If not null, deref will be set to the viewport's maximum depth value. + * + * @return true if successful, false otherwise. + **/ + bool get_viewport_properties(uint32_t in_n_viewport, + float* out_opt_origin_x_ptr, + float* out_opt_origin_y_ptr, + float* out_opt_width_ptr, + float* out_opt_height_ptr, + float* out_opt_min_depth_ptr, + float* out_opt_max_depth_ptr) const; + + /** Tells if alpha-to-coverage mode has been enabled. **/ + bool is_alpha_to_coverage_enabled() const; + + /** Tells if alpha-to-one mode has been enabled. **/ + bool is_alpha_to_one_enabled() const; + + /** Tells whether depth clamping has been enabled. **/ + bool is_depth_clamp_enabled() const; + + /** Tells whether depth clipping has been enabled. **/ + bool is_depth_clip_enabled() const; + + /** Tells whether primitive restart mode has been enabled. **/ + bool is_primitive_restart_enabled() const; + + /** Tells whether rasterizer discard has been enabled. */ + bool is_rasterizer_discard_enabled() const; + + /** Tells whether sample mask has been enabled. */ + bool is_sample_mask_enabled() const; + + /** Sets a new blend constant. + * + * @param in_blend_constant_vec4 4 floats, specifying the constant. Must not be nullptr. + * + **/ + void set_blending_properties(const float* in_blend_constant_vec4); + + /** Updates color blend properties for the specified sub-pass attachment. + * + * @param in_attachment_id ID of the subpass attachment, for which the color blend properties should be applied. + * @param in_blending_enabled true if blending should be enabled for the specified attachment, false otherwise. + * @param in_blend_op_color Blend operation color to use for the attachment. + * @param in_blend_op_alpha Blend operation alpha to use for the attachment. + * @param in_src_color_blend_factor Source blend factor for color components. + * @param in_dst_color_blend_factor Destination blend factor for color components. + * @param in_src_alpha_blend_factor Source blend factor for the alpha component. + * @param in_dst_alpha_blend_factor Destination blend factor for the alpha component. + * @param in_channel_write_mask Component write mask to use for the attachment. + * + **/ + void set_color_blend_attachment_properties(SubPassAttachmentID in_attachment_id, + bool in_blending_enabled, + Anvil::BlendOp in_blend_op_color, + Anvil::BlendOp in_blend_op_alpha, + Anvil::BlendFactor in_src_color_blend_factor, + Anvil::BlendFactor in_dst_color_blend_factor, + Anvil::BlendFactor in_src_alpha_blend_factor, + Anvil::BlendFactor in_dst_alpha_blend_factor, + Anvil::ColorComponentFlags in_channel_write_mask); + + /** Updates multisampling properties. + * + * @param in_sample_count Number of rasterization samples to be used (expressed as one of the enum values). + * @param in_min_sample_shading Minimum number of unique samples to shade for each fragment. + * @param in_sample_mask Sample mask to use. + */ + void set_multisampling_properties(Anvil::SampleCountFlagBits in_sample_count, + float in_min_sample_shading, + const VkSampleMask in_sample_mask); + + /** Updates the number of scissor boxes to be used, when dynamic scissor state is enabled. + * + * @param in_n_dynamic_scissor_boxes As per description. + */ + void set_n_dynamic_scissor_boxes(uint32_t in_n_dynamic_scissor_boxes); + + /** Updates the number of viewports to be used, when dynamic viewport state is enabled. + * + * @param in_n_dynamic_viewports As per description. + */ + void set_n_dynamic_viewports(uint32_t in_n_dynamic_viewports); + + /** Updates the number of tessellation patch points. + * + * @param in_n_patch_control_points As per description. + **/ + void set_n_patch_control_points(uint32_t in_n_patch_control_points); + + /** Sets primitive topology. */ + void set_primitive_topology(Anvil::PrimitiveTopology in_primitive_topology); + + /** Configures the rasterization order for the pipeline if the VK_AMD_rasterization_order extension + * is supported by the device, for which the pipeline has been created. + * + * On drivers which do not support the extension, the setting will be ignored. + * + * @param in_rasterization_order Rasterization order to use. + **/ + void set_rasterization_order(Anvil::RasterizationOrderAMD in_rasterization_order); + + /** Configures rasterization stream index for the pipeline if VK_EXT_transform_feedback extension + * is supported by the device, for which the pipeline is going to be created. + * + * On drivers not supporting the extension, the setting will be ignored. + * + * @param in_rasterization_stream_index Index to use. + **/ + void set_rasterization_stream_index(const uint32_t& in_rasterization_stream_index); + + /** Configures the conservative rasterization mode for the pipeline if the VK_EXT_conservative_rasterization + * extension is supported by the device, for which the pipeline has been created. + * + * On drivers which do not support the extension, the setting will be ignored. + * + * @param in_conservative_rasterization_mode Conservative rasterization mode to use. + **/ + void set_conservative_rasterization_mode(Anvil::ConservativeRasterizationModeEXT in_conservative_rasterization_mode); + + /** if the VK_EXT_conservative_rasterization extension is supported by the device and + * Anvil::ConservativeRasterizationModeEXT::OVERESTIMATE conservative rasterization mode is set for the pipeline, + * this setting controls extra size in pixels by which the primitive is incresed during conservative rasterization. + * + * On drivers which do not support the extension, the setting will be ignored. + * + * @param extra_primitive_overestimation_size extra size to increase the primitive + **/ + void set_extra_primitive_overestimation_size(float extra_primitive_overestimation_size); + + /** Sets a number of rasterization properties to be used for the pipeline. + * + * @param in_polygon_mode Polygon mode to use. + * @param in_cull_mode Cull mode to use. + * @param in_front_face Front face to use. + * @param in_line_width Line width to use. + **/ + void set_rasterization_properties(Anvil::PolygonMode in_polygon_mode, + Anvil::CullModeFlags in_cull_mode, + Anvil::FrontFace in_front_face, + float in_line_width); + + /** Sets all state related to custom sample locations support. + * + * This information is only used if custom sample locations is enabled (which can be done by calling + * toggle_sample_locations(). + * + * @param in_sample_locations_per_pixel Number of sample locations to use per pixel. + * @param in_sample_location_grid_size Size of the sample location grid to select custom sample locations for. + * @param in_n_sample_locations Number of sample locations defined under @param in_sample_locations_ptr. + * Must not be 0. + * @param in_sample_locations_ptr Array of sample locations to use. Must not be nullptr. Must hold at least + * @param in_n_sample_locations items. + */ + void set_sample_location_properties(const Anvil::SampleCountFlagBits& in_sample_locations_per_pixel, + const VkExtent2D& in_sample_location_grid_size, + const uint32_t& in_n_sample_locations, + const Anvil::SampleLocation* in_sample_locations_ptr); + + /** Sets properties of a scissor box at the specified index. + * + * If @param n_scissor_box is larger than 1, all previous scissor boxes must also be defined + * prior to creating a pipeline. Number of scissor boxes must match the number of viewports + * defined for the pipeline. + * + * @param in_n_scissor_box Index of the scissor box to be updated. + * @param in_x X offset of the scissor box. + * @param in_y Y offset of the scissor box. + * @param in_width Width of the scissor box. + * @param in_height Height of the scissor box. + **/ + void set_scissor_box_properties(uint32_t in_n_scissor_box, + int32_t in_x, + int32_t in_y, + uint32_t in_width, + uint32_t in_height); + + /** Sets a number of stencil test properties. + * + * @param in_update_front_face_state true if the front face stencil states should be updated; false to update the + * back stencil states instead. + * @param in_stencil_fail_op Stencil fail operation to use. + * @param in_stencil_pass_op Stencil pass operation to use. + * @param in_stencil_depth_fail_op Stencil depth fail operation to use. + * @param in_stencil_compare_op Stencil compare operation to use. + * @param in_stencil_compare_mask Stencil compare mask to use. + * @param in_stencil_write_mask Stencil write mask to use. + * @param in_stencil_reference Stencil reference value to use. + **/ + void set_stencil_test_properties(bool in_update_front_face_state, + Anvil::StencilOp in_stencil_fail_op, + Anvil::StencilOp in_stencil_pass_op, + Anvil::StencilOp in_stencil_depth_fail_op, + Anvil::CompareOp in_stencil_compare_op, + uint32_t in_stencil_compare_mask, + uint32_t in_stencil_write_mask, + uint32_t in_stencil_reference); + + /* TODO + * + * Requires VK_KHR_maintenance2. + */ + void set_tessellation_domain_origin(const Anvil::TessellationDomainOrigin& in_new_origin); + + /** Sets properties of a viewport at the specified index. + * + * If @param in_n_viewport is larger than 1, all previous viewports must also be defined + * prior to creating a pipeline. Number of scissor boxes must match the number of viewports + * defined for the pipeline. + * + * @param in_n_viewport Index of the viewport, whose properties should be changed. + * @param in_origin_x X offset to use for the viewport's origin. + * @param in_origin_y Y offset to use for the viewport's origin. + * @param in_width Width of the viewport. + * @param in_height Height of the viewport. + * @param in_min_depth Minimum depth value to use for the viewport. + * @param in_max_depth Maximum depth value to use for the viewport. + **/ + void set_viewport_properties(uint32_t in_n_viewport, + float in_origin_x, + float in_origin_y, + float in_width, + float in_height, + float in_min_depth, + float in_max_depth); + + /** Enables or disables the "alpha to coverage" test. + * + * @param in_should_enable true to enable the test; false to disable it. + */ + void toggle_alpha_to_coverage(bool in_should_enable); + + /** Enables or disables the "alpha to one" test. + * + * @param in_should_enable true to enable the test; false to disable it. + */ + void toggle_alpha_to_one(bool in_should_enable); + + /** Enables or disables the "depth bias" mode and updates related state values. + * + * @param in_should_enable true to enable the mode; false to disable it. + * @param in_depth_bias_constant_factor Depth bias constant factor to use for the mode. + * @param in_depth_bias_clamp Depth bias clamp to use for the mode. + * @param in_depth_bias_slope_factor Slope scale for the depth bias to use for the mode. + */ + void toggle_depth_bias(bool in_should_enable, + float in_depth_bias_constant_factor, + float in_depth_bias_clamp, + float in_depth_bias_slope_factor); + + /** Enables or disables the "depth bounds" test and updates related state values. + * + * @param in_should_enable true to enable the test; false to disable it. + * @param in_min_depth_bounds Minimum boundary value to use for the test. + * @param in_max_depth_bounds Maximum boundary value to use for the test. + */ + void toggle_depth_bounds_test(bool in_should_enable, + float in_min_depth_bounds, + float in_max_depth_bounds); + + /** Enables or disables the "depth clamp" test. + * + * @param in_should_enable true to enable the test; false to disable it. + */ + void toggle_depth_clamp(bool in_should_enable); + + /** Enables or disables the "depth clip" test. + * + * Requires VK_EXT_depth_clip_enable extension support. + * + * @param in_should_enable true to enable the test; false to disable it. + */ + void toggle_depth_clip(bool in_should_enable); + + /** Enables or disables the depth test and updates related state values. + * + * @param in_should_enable true to enable the mode; false to disable it. + * @param in_compare_op Compare operation to use for the test. + */ + void toggle_depth_test(bool in_should_enable, + Anvil::CompareOp in_compare_op); + + /** Enables or disables depth writes. + * + * @param in_should_enable true to enable the writes; false to disable them. + */ + void toggle_depth_writes(bool in_should_enable); + + /** Enables or disables the specified dynamic states. + * + * @param in_should_enable true to enable the dynamic state(s) specified by @param in_dynamic_state_bits, false + * disable them if already enabled. + * @param in_dynamic_states_ptr Pointer to an array of dynamic states to disable or enable. Must not be nullptr. + * @param in_n_dynamic_states Number of array items available for reading under @param in_dynamic_states_ptr + **/ + void toggle_dynamic_state (bool in_should_enable, + const Anvil::DynamicState& in_dynamic_state); + void toggle_dynamic_states(bool in_should_enable, + const Anvil::DynamicState* in_dynamic_states_ptr, + const uint32_t& in_n_dynamic_states); + void toggle_dynamic_states(bool in_should_enable, + const std::vector& in_dynamic_states); + + /** Enables or disables logic ops and specifies which logic op should be used. + * + * @param in_should_enable true to enable the mode; false to disable it. + * @param in_logic_op Logic operation type to use. + */ + void toggle_logic_op(bool in_should_enable, + Anvil::LogicOp in_logic_op); + + /** Enables or disables the "primitive restart" mode. + * + * @param in_should_enable true to enable the mode; false to disable it. + */ + void toggle_primitive_restart(bool in_should_enable); + + /** Enables or disables the "rasterizer discard" mode. + * + * @param in_should_enable true to enable the test; false to disable it. + */ + void toggle_rasterizer_discard(bool in_should_enable); + + /** Enables or disables custom sample locations. + * + * NOTE: If you enable the functionality, also make sure to call set_sample_locations_properties() + * to configure additional state required at pipeline creation time. + * + * Requires VK_EXT_sample_locations. + * + * @param in_should_enable True to enable custom sample locations, false to disable the functionality. + */ + void toggle_sample_locations(bool in_should_enable); + + /** Enables or disables the sample mask. + * + * Note: make sure to configure the sample mask using set_multisampling_properties() if you intend to use it. + * + * Note: disabling sample mask will make the manager set VkPipelineMultisampleStateCreateInfo::pSampleMask to a non-null + * value at pipeline creation time. + * + * @param in_should_enable true to enable sample mask; false to disable it. + */ + void toggle_sample_mask(bool in_should_enable); + + /** Enables or disables the "per-sample shading" mode. + * + * @param in_should_enable true to enable the test; false to disable it. + */ + void toggle_sample_shading(bool in_should_enable); + + /** Enables or disables the stencil test. + * + * @param in_should_enable true to enable the test; false to disable it. + */ + void toggle_stencil_test(bool in_should_enable); + + private: + /* Private type definitions */ + + /* Defines blending properties for a single subpass attachment. */ + typedef struct BlendingProperties + { + Anvil::ColorComponentFlags channel_write_mask; + + bool blend_enabled; + Anvil::BlendOp blend_op_alpha; + Anvil::BlendOp blend_op_color; + Anvil::BlendFactor dst_alpha_blend_factor; + Anvil::BlendFactor dst_color_blend_factor; + Anvil::BlendFactor src_alpha_blend_factor; + Anvil::BlendFactor src_color_blend_factor; + + /** Constructor. */ + BlendingProperties() + { + blend_enabled = false; + blend_op_alpha = Anvil::BlendOp::ADD; + blend_op_color = Anvil::BlendOp::ADD; + channel_write_mask = Anvil::ColorComponentFlagBits::A_BIT | + Anvil::ColorComponentFlagBits::B_BIT | + Anvil::ColorComponentFlagBits::G_BIT | + Anvil::ColorComponentFlagBits::R_BIT; + dst_alpha_blend_factor = Anvil::BlendFactor::ONE; + dst_color_blend_factor = Anvil::BlendFactor::ONE; + src_alpha_blend_factor = Anvil::BlendFactor::ONE; + src_color_blend_factor = Anvil::BlendFactor::ONE; + } + + /** Comparison operator + * + * @param in Object to compare against. + * + * @return true if all states match, false otherwise. + **/ + bool operator==(const BlendingProperties& in) const + { + return (in.blend_enabled == blend_enabled && + in.blend_op_alpha == blend_op_alpha && + in.blend_op_color == blend_op_color && + in.channel_write_mask == channel_write_mask && + in.dst_alpha_blend_factor == dst_alpha_blend_factor && + in.dst_color_blend_factor == dst_color_blend_factor && + in.src_alpha_blend_factor == src_alpha_blend_factor && + in.src_color_blend_factor == src_color_blend_factor); + } + } BlendingProperties; + + typedef std::map SubPassAttachmentToBlendingPropertiesMap; + + /** Defines a single scissor box + * + * This descriptor is not exposed to the Vulkan implementation. It is used to form Vulkan-specific + * descriptors at baking time instead. + */ + struct InternalScissorBox + { + int32_t x; + int32_t y; + uint32_t width; + uint32_t height; + + /* Constructor. Should only be used by STL. */ + InternalScissorBox() + { + x = 0; + y = 0; + width = 32u; + height = 32u; + } + + /* Constructor + * + * @param in_x X offset of the scissor box + * @param in_y Y offset of the scissor box + * @param in_width Width of the scissor box + * @param in_height Height of the scissor box + **/ + InternalScissorBox(int32_t in_x, + int32_t in_y, + uint32_t in_width, + uint32_t in_height) + { + height = in_height; + width = in_width; + x = in_x; + y = in_y; + } + }; + + /** Defines a single viewport + * + * This descriptor is not exposed to the Vulkan implementation. It is used to form Vulkan-specific + * descriptors at baking time instead. + */ + typedef struct InternalViewport + { + float height; + float max_depth; + float min_depth; + float origin_x; + float origin_y; + float width; + + /* Constructor. Should only be used by STL */ + InternalViewport() + { + height = 32.0f; + max_depth = 1.0f; + min_depth = 0.0f; + origin_x = 0.0f; + origin_y = 0.0f; + width = 32.0f; + } + + /* Constructor. + * + * @param in_origin_x Origin X of the viewport. + * @param in_origin_y Origin Y of the viewport. + * @param in_width Width of the viewport. + * @param in_height Height of the viewport. + * @param in_min_depth Minimum depth value the viewport should use. + * @param in_max_depth Maximum depth value the viewport should use. + **/ + InternalViewport(float in_origin_x, + float in_origin_y, + float in_width, + float in_height, + float in_min_depth, + float in_max_depth) + { + height = in_height; + max_depth = in_max_depth; + min_depth = in_min_depth; + origin_x = in_origin_x; + origin_y = in_origin_y; + width = in_width; + } + } InternalViewport; + + /** A vertex attribute descriptor. This descriptor is not exposed to the Vulkan implementation. Instead, + * its members are used to create Vulkan input attribute & binding descriptors at baking time. + */ + typedef struct InternalVertexBinding + { + uint32_t divisor; + Anvil::VertexInputRate rate; + uint32_t stride_in_bytes; + std::vector attributes; + + /** Dummy constructor. Should only be used by STL. */ + InternalVertexBinding() + { + divisor = UINT32_MAX; + rate = Anvil::VertexInputRate::UNKNOWN; + stride_in_bytes = UINT32_MAX; + } + + /** Constructor. + * + * @param in_divisor Vertexattribute divisor. + * @param in_rate Step rate. + * @param in_stride_in_bytes Stride in bytes. + **/ + InternalVertexBinding(uint32_t in_divisor, + Anvil::VertexInputRate in_rate, + uint32_t in_stride_in_bytes) + { + divisor = in_divisor; + rate = in_rate; + stride_in_bytes = in_stride_in_bytes; + } + } InternalVertexBinding; + + typedef std::map InternalScissorBoxes; + typedef std::map InternalVertexBindings; + typedef std::map InternalViewports; + + /* Private functions */ + explicit GraphicsPipelineCreateInfo(const RenderPass* in_renderpass_ptr, + SubPassID in_subpass_id); + + bool copy_gfx_state_from(const Anvil::GraphicsPipelineCreateInfo* in_src_pipeline_create_info_ptr); + + /* Private variables */ + bool m_depth_clip_enabled; + + bool m_depth_bounds_test_enabled; + float m_max_depth_bounds; + float m_min_depth_bounds; + + bool m_depth_bias_enabled; + float m_depth_bias_clamp; + float m_depth_bias_constant_factor; + float m_depth_bias_slope_factor; + + bool m_depth_test_enabled; + Anvil::CompareOp m_depth_test_compare_op; + + std::vector m_enabled_dynamic_states; + + bool m_alpha_to_coverage_enabled; + bool m_alpha_to_one_enabled; + bool m_depth_clamp_enabled; + bool m_depth_writes_enabled; + bool m_logic_op_enabled; + bool m_primitive_restart_enabled; + bool m_rasterizer_discard_enabled; + bool m_sample_locations_enabled; + bool m_sample_mask_enabled; + bool m_sample_shading_enabled; + + bool m_stencil_test_enabled; + VkStencilOpState m_stencil_state_back_face; + VkStencilOpState m_stencil_state_front_face; + + VkExtent2D m_sample_location_grid_size; + std::vector m_sample_locations; + Anvil::SampleCountFlagBits m_sample_locations_per_pixel; + + Anvil::RasterizationOrderAMD m_rasterization_order; + + Anvil::ConservativeRasterizationModeEXT m_conservative_rasterization_mode; + float m_extra_primitive_overestimation_size; + + TessellationDomainOrigin m_tessellation_domain_origin; + + InternalVertexBindings m_bindings; + float m_blend_constant[4]; + Anvil::CullModeFlags m_cull_mode; + Anvil::PolygonMode m_polygon_mode; + Anvil::FrontFace m_front_face; + float m_line_width; + Anvil::LogicOp m_logic_op; + float m_min_sample_shading; + uint32_t m_n_dynamic_scissor_boxes; + uint32_t m_n_dynamic_viewports; + uint32_t m_n_patch_control_points; + Anvil::PrimitiveTopology m_primitive_topology; + uint32_t m_rasterization_stream_index; + Anvil::SampleCountFlagBits m_sample_count; + VkSampleMask m_sample_mask; + InternalScissorBoxes m_scissor_boxes; + SubPassAttachmentToBlendingPropertiesMap m_subpass_attachment_blending_properties; + InternalViewports m_viewports; + + const RenderPass* m_renderpass_ptr; + SubPassID m_subpass_id; + }; + +}; + +#endif /* ANVIL_GRAPHICS_PIPELINE_CREATE_INFO_H */ diff --git a/include/misc/image_create_info.h b/include/misc/image_create_info.h new file mode 100644 index 00000000..91b016b7 --- /dev/null +++ b/include/misc/image_create_info.h @@ -0,0 +1,585 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#ifndef MISC_IMAGE_CREATE_INFO_H +#define MISC_IMAGE_CREATE_INFO_H + +#include "misc/types.h" + +namespace Anvil +{ + class ImageCreateInfo + { + public: + /* Public functions */ + + void clear_mipmaps_to_upload() + { + m_mipmaps_to_upload.clear(); + } + + /** Returns an instance of the "create info" item which can be used to instantiate a new Image + * instance *WITH* a memory backing. + * + * This constructor assumes the image should be initialized in UNDEFINED layout, if no mipmap data + * is specified, or PREINITIALIZED otherwise. In the latter case, it will then proceed with filling + * the storage with mipmap data (if @param in_mipmaps_ptr is not nullptr), and finally transition + * the image to the @param in_post_create_image_layout layout. + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - External memory handle types: none + * - Image format list: empty (ie. image views created from the image can use any compatible format) + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * + * @param in_device_ptr Device to use. + * @param in_type Vulkan image type to use. + * @param in_format Vulkan format to use. + * @param in_tiling Vulkan image tiling to use. + * @param in_usage Vulkan image usage pattern to use. + * @param in_base_mipmap_width Width of the base mip-map. + * @param in_base_mipmap_height Height of the base mip-map. Must be at least 1 for all image types. + * @param in_base_mipmap_depth Depth of the base mip-map. Must be at least 1 for all image types. + * @param in_n_layers Number of layers to use. Must be at least 1 for all image types. + * @param in_sample_count Sample count to use. + * @param in_queue_families A combination of Anvil::QueueFamilyFlagBits::* bits, indicating which device queues + * the image is going to be accessed by. + * @param in_sharing_mode Vulkan sharing mode to use. + * @param in_use_full_mipmap_chain true if all mipmaps should be created for the image. False to only allocate + * storage for the base mip-map. + * @param in_memory_features Memory features for the memory backing. + * @param in_create_flags Optional image features that the created image should support. Must not include SPARSE_ALIASED, + * SPARSE_BINDING and SPARSE_RESIDENCY bits. + * @param in_post_alloc_image_layout Image layout to transfer the image to after it is created, assigned memory, + * and optionally uploaded mip data (if @param in_opt_mipmaps_ptr is not null). + * @param in_opt_mipmaps_ptr If not nullptr, specified MipmapRawData items will be used to drive the mipmap contents + * initialization process. Ignored if nullptr. + * Specifying a non-NULL in_opt_mipmaps_ptr argument will make the function OR + * @param in_usage with Anvil::IMAGE_USAGE_FLAG_TRANSFER_DST_BIT. + * + * @return New image instance, if successful, or nullptr otherwise. + **/ + static ImageCreateInfoUniquePtr create_alloc(const Anvil::BaseDevice* in_device_ptr, + Anvil::ImageType in_type, + Anvil::Format in_format, + Anvil::ImageTiling in_tiling, + Anvil::ImageUsageFlags in_usage, + uint32_t in_base_mipmap_width, + uint32_t in_base_mipmap_height, + uint32_t in_base_mipmap_depth, + uint32_t in_n_layers, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + bool in_use_full_mipmap_chain, + Anvil::MemoryFeatureFlags in_memory_features, + Anvil::ImageCreateFlags in_create_flags, + Anvil::ImageLayout in_post_alloc_image_layout, + const std::vector* in_opt_mipmaps_ptr = nullptr); + + /** Returns an instance of the "create info" item which can be used to instantiate a new Image + * instance *without* a memory backing. A memory region should be bound to the object by calling + * Image::set_memory() before using the object for any operations. + * + * The function can also optionally fill the image with data, as soon as memory backing is + * attached. To make it do so, pass a non-null ptr to a MipmapRawData vector via the @param + * mipmaps_ptr argument. + * + * If this constructor is used, the image can be transformed automatically to the right layout + * at set_memory() call time by setting @param in_final_image_layout argument to a value other + * than Anvil::ImageLayout::UNDEFINED and Anvil::ImageLayout::PREINITIALIZED. + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - External memory handle types: none + * - Image format list: empty (ie. image views created from the image can use any compatible format) + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * + * @param in_device_ptr Device to use. + * @param in_type Vulkan image type to use. + * @param in_format Vulkan format to use. + * @param in_tiling Vulkan image tiling to use. + * @param in_usage Vulkan image usage pattern to use. + * @param in_base_mipmap_width Width of the base mip-map. + * @param in_base_mipmap_height Height of the base mip-map. Must be at least 1 for all image types. + * @param in_base_mipmap_depth Depth of the base mip-map. Must be at least 1 for all image types. + * @param in_n_layers Number of layers to use. Must be at least 1 for all image types. + * @param in_sample_count Sample count to use. + * @param in_queue_families A combination of Anvil::QueueFamilyFlagBits::* bits, indicating which device queues + * the image is going to be accessed by. + * @param in_sharing_mode Vulkan sharing mode to use. + * @param in_use_full_mipmap_chain true, if all mipmaps should be created for the image. False to only allocate + * storage for the base mip-map. + * @param in_create_flags Optional image features that the created image should support. + * @param in_opt_post_alloc_image_layout Image layout to transfer the image to after it is created, assigned memory, + * and optionally uploaded mip data (if @param in_opt_mipmaps_ptr is not null). Ignored for + * images with SPARSE_ALIASED, SPARSE_BINDING or SPARSE_RESIDENCY bits set. + * @param in_opt_mipmaps_ptr If not NULL, specified data will be used to initialize created image's mipmaps + * with content, as soon as the image is assigned a memory backing. + * Specifying a non-NULL @param in_opt_mipmaps_ptr argument will make the function OR + * @param in_usage with Anvil::IMAGE_USAGE_FLAG_TRANSFER_DST_BIT. + * + * @return New image instance, if successful, or nullptr otherwise. + **/ + static ImageCreateInfoUniquePtr create_no_alloc(const Anvil::BaseDevice* in_device_ptr, + Anvil::ImageType in_type, + Anvil::Format in_format, + Anvil::ImageTiling in_tiling, + ImageUsageFlags in_usage, + uint32_t in_base_mipmap_width, + uint32_t in_base_mipmap_height, + uint32_t in_base_mipmap_depth, + uint32_t in_n_layers, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + bool in_use_full_mipmap_chain, + Anvil::ImageCreateFlags in_create_flags, + Anvil::ImageLayout in_post_alloc_image_layout = Anvil::ImageLayout::UNDEFINED, + const std::vector* in_opt_mipmaps_ptr = nullptr); + + /** Returns an instance of the "create info" item which can be used to instantiate a new non-sparse Image + * instance, later to be bound to the user-specified swapchain memory. + * + * This function prototype may only be called for sGPU or mGPU devices which support the VK_KHR_device_group + * extension. + * + * Requires VK_KHR_device_group support. + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - External memory handle types: none + * - Image format list: empty (ie. image views created from the image can use any compatible format) + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * - Physical devices: none + * - SFR rectangles: none + * + * For argument discussion, see specification of the other create() functions. + **/ + static ImageCreateInfoUniquePtr create_peer_no_alloc(const Anvil::BaseDevice* in_device_ptr, + const Anvil::Swapchain* in_swapchain_ptr, + uint32_t in_n_swapchain_image); + + /** Returns an instance of the "create info" item which can be used to instantiate a special type of an Image, + * useful for embedding a swapchain image instance. Object instantiated with this create item will NOT + * release the specified VkImage instance at its tear-down time. + * + * The image will NOT be transitioned to any specific image layout. + * + * For argument discussion, see specification of the other create() functions. + * + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + **/ + static ImageCreateInfoUniquePtr create_swapchain_wrapper(const Anvil::BaseDevice* in_device_ptr, + const Anvil::Swapchain* in_swapchain_ptr, + const VkImage& in_image, + const uint32_t& in_n_swapchain_image); + + const uint32_t& get_base_mip_depth() const + { + return m_depth; + } + + const uint32_t& get_base_mip_height() const + { + return m_height; + } + + const uint32_t& get_base_mip_width() const + { + return m_width; + } + + Anvil::ImageCreateFlags get_create_flags() const + { + return m_create_flags; + } + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + const Anvil::ExternalMemoryHandleTypeFlags& get_external_memory_handle_types() const + { + return m_exportable_external_memory_handle_types; + } + + const Anvil::Format& get_format() const + { + return m_format; + } + + /* Tells which compatible image view formats have been specified for the image. + * + * Requires VK_KHR_image_format_list. + * + * @param out_n_image_view_formats_ptr Deref will be set to the number of formats the array under *out_image_view_formats_ptr_ptr + * holds. Must not be nullptr. + * @param out_image_view_formats_ptr_ptr Deref will be set to a ptr to an array of compatible formats. + * + **/ + void get_image_view_formats(uint32_t* out_n_image_view_formats_ptr, + const Anvil::Format** out_image_view_formats_ptr_ptr) const; + + const Anvil::ImageInternalType& get_internal_type() const + { + return m_internal_type; + } + + const Anvil::MemoryFeatureFlags& get_memory_features() const + { + return m_memory_features; + } + + /* NOTE: This func is a const since it should only be accessed by Anvil::Image. */ + const std::vector& get_mipmaps_to_upload() + { + return m_mipmaps_to_upload; + } + + const Anvil::MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + const uint32_t& get_n_layers() const + { + return m_n_layers; + } + + const std::vector get_device_indices() const + { + anvil_assert(m_internal_type == Anvil::ImageInternalType::PEER_NO_ALLOC || + m_internal_type == Anvil::ImageInternalType::SWAPCHAIN_WRAPPER); + + return m_device_indices; + } + + const Anvil::ImageLayout& get_post_alloc_image_layout() const + { + return m_post_alloc_layout; + } + + const Anvil::ImageLayout& get_post_create_image_layout() const + { + return m_post_create_layout; + } + + /** Returns queue families compatible with the image */ + const Anvil::QueueFamilyFlags& get_queue_families() const + { + return m_queue_families; + } + + Anvil::SampleCountFlagBits get_sample_count() const + { + return m_sample_count; + } + + const std::vector& get_sfr_rects() const + { + anvil_assert(m_internal_type == Anvil::ImageInternalType::PEER_NO_ALLOC || + m_internal_type == Anvil::ImageInternalType::SWAPCHAIN_WRAPPER); + + return m_sfr_rects; + } + + const Anvil::SharingMode& get_sharing_mode() const + { + return m_sharing_mode; + } + + const Anvil::ImageUsageFlags& get_stencil_image_aspect_usage() const + { + return m_usage_flags_stencil; + } + + const Anvil::Swapchain* get_swapchain() const + { + anvil_assert(m_internal_type == Anvil::ImageInternalType::PEER_NO_ALLOC || + m_internal_type == Anvil::ImageInternalType::SWAPCHAIN_WRAPPER); + + return m_swapchain_ptr; + } + + const VkImage& get_swapchain_image() const + { + anvil_assert(m_internal_type == Anvil::ImageInternalType::PEER_NO_ALLOC || + m_internal_type == Anvil::ImageInternalType::SWAPCHAIN_WRAPPER); + + return m_swapchain_image; + } + + const uint32_t& get_swapchain_image_index() const + { + anvil_assert(m_internal_type == Anvil::ImageInternalType::PEER_NO_ALLOC || + m_internal_type == Anvil::ImageInternalType::SWAPCHAIN_WRAPPER); + + return m_n_swapchain_image; + } + + /** Returns image tiling */ + const Anvil::ImageTiling& get_tiling() const + { + return m_tiling; + } + + const Anvil::ImageType& get_type() const + { + return m_type_vk; + } + + const ImageUsageFlags& get_usage_flags() const + { + return m_usage_flags; + } + + /** Tells whether this Image wrapper instance holds a sparse image */ + bool is_sparse() const + { + return (m_create_flags & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) != 0; + } + + //- + void set_create_flags(const Anvil::ImageCreateFlags& in_create_flags) + { + m_create_flags = in_create_flags; + } + + void set_depth(const uint32_t& in_depth) + { + m_depth = in_depth; + } + + void set_exportable_external_memory_handle_types(const ExternalMemoryHandleTypeFlags& in_external_memory_handle_types) + { + m_exportable_external_memory_handle_types = in_external_memory_handle_types; + } + + void set_format(const Anvil::Format& in_format) + { + m_format = in_format; + } + + void set_height(const uint32_t& in_height) + { + m_height = in_height; + } + + /* Lets the application specify what formats image views created off this image will use. + * + * This information will be chained to the image create info struct. + * + * Requires VK_KHR_image_format_list + * + * @param in_n_image_view_formats Number of formats available for reading under @param in_image_view_formats_ptr. + * Must not be 0. + * @param in_image_view_formats_ptr Image view formats to specify for the image. Must not be nullptr. + * + */ + void set_image_view_formats(const uint32_t& in_n_image_view_formats, + const Anvil::Format* in_image_view_formats_ptr); + + void set_memory_features(const Anvil::MemoryFeatureFlags& in_memory_features) + { + m_memory_features = in_memory_features; + } + + void set_mipmaps_to_upload(const std::vector& in_mipmaps_to_upload) + { + m_mipmaps_to_upload = in_mipmaps_to_upload; + } + + void set_mt_safety(const Anvil::MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + void set_n_layers(const uint32_t& in_n_layers) + { + m_n_layers = in_n_layers; + } + + void set_post_alloc_layout(const Anvil::ImageLayout& in_post_alloc_layout) + { + m_post_alloc_layout = in_post_alloc_layout; + } + + void set_post_create_layout(const Anvil::ImageLayout& in_post_create_layout) + { + m_post_create_layout = in_post_create_layout; + } + + void set_queue_families(const Anvil::QueueFamilyFlags& in_queue_families) + { + m_queue_families = in_queue_families; + } + + void set_sample_count(const Anvil::SampleCountFlagBits& in_sample_count) + { + m_sample_count = in_sample_count; + } + + void set_device_indices(const uint32_t& in_device_index_count, + const uint32_t* in_devices_indices_ptr) + { + m_device_indices.clear(); + m_device_indices.insert(m_device_indices.begin(), + in_devices_indices_ptr, + in_devices_indices_ptr + in_device_index_count); + } + + void set_SFR_rectangles(const uint32_t& in_n_SFR_rects, + const VkRect2D* in_SFRs_ptr) + { + m_sfr_rects.clear(); + + for (uint32_t n_sfr_rect = 0; + n_sfr_rect < in_n_SFR_rects; + ++n_sfr_rect) + { + m_sfr_rects.push_back(in_SFRs_ptr[n_sfr_rect]); + } + } + + void set_sharing_mode(const Anvil::SharingMode& in_sharing_mode) + { + m_sharing_mode = in_sharing_mode; + } + + /* Use this function to specify usage patterns for the stencil part of the image which is about to be created. + * As per VK_EXT_separate_stencil_usage, various restrictions apply, amongst which the most important is that + * @param in_usage has to be a subset of the usage flags specified globally for the image. + * + * Requires VK_EXT_separate_stencil_usage. + */ + void set_stencil_image_aspect_usage(const Anvil::ImageUsageFlags& in_usage) + { + m_usage_flags_stencil = in_usage; + } + + void set_swapchain(const Anvil::Swapchain* in_swapchain_ptr) + { + m_swapchain_ptr = in_swapchain_ptr; + } + + void set_swapchain_image_index(const uint32_t& in_n_swapchain_image_index) + { + m_n_swapchain_image = in_n_swapchain_image_index; + } + + void set_tiling(const Anvil::ImageTiling& in_tiling) + { + m_tiling = in_tiling; + } + + void set_usage_flags(const ImageUsageFlags& in_usage_flags) + { + m_usage_flags = in_usage_flags; + } + + void set_uses_full_mipmap_chain(const bool& in_use_full_mipmap_chain) + { + m_use_full_mipmap_chain = in_use_full_mipmap_chain; + } + + void set_width(const uint32_t& in_width) + { + m_width = in_width; + } + + const bool& uses_full_mipmap_chain() const + { + return m_use_full_mipmap_chain; + } + + private: + /* Private functions */ + + ImageCreateInfo(Anvil::ImageInternalType in_type, + const Anvil::BaseDevice* in_device_ptr, + Anvil::ImageType in_type_vk, + Anvil::Format in_format, + Anvil::ImageTiling in_tiling, + Anvil::SharingMode in_sharing_mode, + ImageUsageFlags in_usage, + uint32_t in_base_mipmap_width, + uint32_t in_base_mipmap_height, + uint32_t in_base_mipmap_depth, + uint32_t in_n_layers, + Anvil::SampleCountFlagBits in_sample_count, + bool in_use_full_mipmap_chain, + ImageCreateFlags in_create_flags, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::ImageLayout in_post_create_image_layout, + const Anvil::ImageLayout& in_post_alloc_image_layout, + const std::vector* in_opt_mipmaps_ptr, + const Anvil::MTSafety& in_mt_safety, + Anvil::ExternalMemoryHandleTypeFlags in_exportable_external_memory_handle_types, + const Anvil::MemoryFeatureFlags& in_memory_features); + + /* Private variables */ + + Anvil::ImageCreateFlags m_create_flags; + uint32_t m_depth; + const Anvil::BaseDevice* m_device_ptr; + Anvil::ExternalMemoryHandleTypeFlags m_exportable_external_memory_handle_types; + Anvil::Format m_format; + uint32_t m_height; + std::vector m_image_view_formats; + const Anvil::ImageInternalType m_internal_type; + Anvil::MemoryFeatureFlags m_memory_features; + std::vector m_mipmaps_to_upload; + Anvil::MTSafety m_mt_safety; + uint32_t m_n_layers; + Anvil::ImageLayout m_post_alloc_layout; + Anvil::ImageLayout m_post_create_layout; + Anvil::QueueFamilyFlags m_queue_families; + Anvil::SampleCountFlagBits m_sample_count; + Anvil::SharingMode m_sharing_mode; + Anvil::ImageTiling m_tiling; + const Anvil::ImageType m_type_vk; + ImageUsageFlags m_usage_flags; + ImageUsageFlags m_usage_flags_stencil; + bool m_use_full_mipmap_chain; + uint32_t m_width; + + /* Only used for peer images */ + std::vector m_device_indices; + std::vector m_sfr_rects; + + /* Only used for swapchain wrapper images */ + uint32_t m_n_swapchain_image; + VkImage m_swapchain_image; + const Anvil::Swapchain* m_swapchain_ptr; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(ImageCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(ImageCreateInfo); + }; + +}; /* namespace Anvil */ + +#endif /* MISC_IMAGE_CREATE_INFO_H */ diff --git a/include/misc/image_view_create_info.h b/include/misc/image_view_create_info.h new file mode 100644 index 00000000..e73d792f --- /dev/null +++ b/include/misc/image_view_create_info.h @@ -0,0 +1,494 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_IMAGE_VIEW_CREATE_INFO_H +#define MISC_IMAGE_VIEW_CREATE_INFO_H + +#include "misc/types.h" + + +namespace Anvil +{ + class ImageViewCreateInfo + { + public: + /* Public functions */ + + /** Use this function if you need to create a single-sample 1D image view wrapper instance. + * + * @param in_device_ptr Device to use. + * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified + * object will be retained and release at ImageView release time. + * @param in_n_base_layer Base layer index. + * @param in_n_base_mipmap_level Base mipmap level. + * @param in_n_mipmaps Number of mipmaps to include in the view. + * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. + * @param in_format Image view format. + * @param in_swizzle_red Channel to use for the red component when sampling the view. + * @param in_swizzle_green Channel to use for the green component when sampling the view. + * @param in_swizzle_blue Channel to use for the blue component when sampling the view. + * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. + * + * NOTE: Sampler YCbCr Conversion support is disabled by default. In order to enable it for the about-to-be-created + * image view, please call set_sampler_ycbcr_conversion_ptr(). + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * - Sampler YCbCr Conversion support: disabled + * + * @return New ImageView instance, if function executes successfully; nullptr otherwise. + **/ + static Anvil::ImageViewCreateInfoUniquePtr create_1D(const Anvil::BaseDevice* in_device_ptr, + Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha); + + /** Use this function if you need to create a single-sample 1D array image view wrapper instance. + * + * @param in_device_ptr Device to use. + * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified + * object will be retained and release at ImageView release time. + * @param in_n_base_layer Base layer index. + * @param in_n_layers Number of layers to include in the view. + * @param in_n_base_mipmap_level Base mipmap level. + * @param in_n_mipmaps Number of mipmaps to include in the view. + * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. + * @param in_format Image view format. + * @param in_swizzle_red Channel to use for the red component when sampling the view. + * @param in_swizzle_green Channel to use for the green component when sampling the view. + * @param in_swizzle_blue Channel to use for the blue component when sampling the view. + * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. + * + * NOTE: Sampler YCbCr Conversion support is disabled by default. In order to enable it for the about-to-be-created + * image view, please call set_sampler_ycbcr_conversion_ptr(). + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * - Sampler YCbCr Conversion support: disabled + * + * @return New ImageView instance, if function executes successfully; nullptr otherwise. + **/ + static Anvil::ImageViewCreateInfoUniquePtr create_1D_array(const Anvil::BaseDevice* in_device_ptr, + Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_layers, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha); + + + /** Use this function if you need to create a single-sample or a multi-sample 2D image view wrapper instance. The view will be + * single-sample if @param in_image_ptr uses 1 sample per texel, and multi-sample otherwise. + * + * @param in_device_ptr Device to use + * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified + * object will be retained and release at ImageView release time. + * @param in_n_base_layer Base layer index. + * @param in_n_base_mipmap_level Base mipmap level. + * @param in_n_mipmaps Number of mipmaps to include in the view. + * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. + * @param in_format Image view format. + * @param in_swizzle_red Channel to use for the red component when sampling the view. + * @param in_swizzle_green Channel to use for the green component when sampling the view. + * @param in_swizzle_blue Channel to use for the blue component when sampling the view. + * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. + * + * NOTE: Sampler YCbCr Conversion support is disabled by default. In order to enable it for the about-to-be-created + * image view, please call set_sampler_ycbcr_conversion_ptr(). + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * - Sampler YCbCr Conversion support: disabled + * + * @return New ImageView instance, if function executes successfully; nullptr otherwise. + **/ + static Anvil::ImageViewCreateInfoUniquePtr create_2D(const Anvil::BaseDevice* in_device_ptr, + Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha); + + /** Use this function if you need to create a single-sample or a multi-sample 2D array image view wrapper instance. The view will be + * single-sample if @param in_image_ptr uses 1 sample per texel, and multi-sample otherwise. + * + * @param in_device_ptr Device to use. + * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified + * object will be retained and release at ImageView release time. + * @param in_n_base_layer Base layer index. + * @param in_n_layers Number of layers to include in the view. + * @param in_n_base_mipmap_level Base mipmap level. + * @param in_n_mipmaps Number of mipmaps to include in the view. + * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. + * @param in_format Image view format. + * @param in_swizzle_red Channel to use for the red component when sampling the view. + * @param in_swizzle_green Channel to use for the green component when sampling the view. + * @param in_swizzle_blue Channel to use for the blue component when sampling the view. + * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. + * + * NOTE: Sampler YCbCr Conversion support is disabled by default. In order to enable it for the about-to-be-created + * image view, please call set_sampler_ycbcr_conversion_ptr(). + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * - Sampler YCbCr Conversion support: disabled + * + * @return New ImageView instance, if function executes successfully; nullptr otherwise. + **/ + static Anvil::ImageViewCreateInfoUniquePtr create_2D_array(const Anvil::BaseDevice* in_device_ptr, + Anvil::Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_layers, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha); + + /** Use this function if you need to create a single-sample 3D image view wrapper instance. + * + * @param in_device_ptr Device to use. + * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified + * object will be retained and release at ImageView release time. + * @param in_n_base_mipmap_level Base mipmap level. + * @param in_n_mipmaps Number of mipmaps to include in the view. + * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. + * @param in_format Image view format. + * @param in_swizzle_red Channel to use for the red component when sampling the view. + * @param in_swizzle_green Channel to use for the green component when sampling the view. + * @param in_swizzle_blue Channel to use for the blue component when sampling the view. + * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. + * + * NOTE: Sampler YCbCr Conversion support is disabled by default. In order to enable it for the about-to-be-created + * image view, please call set_sampler_ycbcr_conversion_ptr(). + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * - Sampler YCbCr Conversion support: disabled + * + * @return New ImageView instance, if function executes successfully; nullptr otherwise. + **/ + static Anvil::ImageViewCreateInfoUniquePtr create_3D(const Anvil::BaseDevice* in_device_ptr, + Image* in_image_ptr, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha); + + /** Use this function if you need to create a cube-map image view wrapper instance. + * + * @param in_device_ptr Device to use. + * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified + * object will be retained and release at ImageView release time. + * @param in_n_base_layer Base layer index. + * @param in_n_base_mipmap_level Base mipmap level. + * @param in_n_mipmaps Number of mipmaps to include in the view. + * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. + * @param in_format Image view format. + * @param in_swizzle_red Channel to use for the red component when sampling the view. + * @param in_swizzle_green Channel to use for the green component when sampling the view. + * @param in_swizzle_blue Channel to use for the blue component when sampling the view. + * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. + * + * NOTE: Sampler YCbCr Conversion support is disabled by default. In order to enable it for the about-to-be-created + * image view, please call set_sampler_ycbcr_conversion_ptr(). + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * - Sampler YCbCr Conversion support: disabled + * + * @return New ImageView instance, if function executes successfully; nullptr otherwise. + **/ + static Anvil::ImageViewCreateInfoUniquePtr create_cube_map(const Anvil::BaseDevice* in_device_ptr, + Anvil::Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha); + + /** Use this function if you need to create a cube-map array image view wrapper instance. + * + * @param in_device_ptr Device to use. + * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified + * object will be retained and release at ImageView release time. + * @param in_n_base_layer Base layer index. + * @param in_n_cube_maps Number of cube-maps to include in the view. The number of layers created + * for the view will be equal to @param in_n_cube_maps * 6. + * @param in_n_base_mipmap_level Base mipmap level. + * @param in_n_mipmaps Number of mipmaps to include in the view. + * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. + * @param in_format Image view format. + * @param in_swizzle_red Channel to use for the red component when sampling the view. + * @param in_swizzle_green Channel to use for the green component when sampling the view. + * @param in_swizzle_blue Channel to use for the blue component when sampling the view. + * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. + * + * NOTE: Sampler YCbCr Conversion support is disabled by default. In order to enable it for the about-to-be-created + * image view, please call set_sampler_ycbcr_conversion_ptr(). + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * - Sampler YCbCr Conversion support: disabled + * + * @return New ImageView instance, if function executes successfully; nullptr otherwise. + **/ + static Anvil::ImageViewCreateInfoUniquePtr create_cube_map_array(const Anvil::BaseDevice* in_device_ptr, + Anvil::Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_cube_maps, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha); + + /* Returns the aspect assigned to the image view */ + Anvil::ImageAspectFlags get_aspect() const + { + return m_aspect_mask; + } + + /* Returns base layer index used by the image view */ + uint32_t get_base_layer() const + { + return m_n_base_layer; + } + + /* Returns base mip level used by the image view */ + uint32_t get_base_mipmap_level() const + { + return m_n_base_mipmap_level; + } + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + /** Returns image view's format */ + Anvil::Format get_format() const + { + return m_format; + } + + const Anvil::MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + /** Returns number of layers encapsulated by the image view */ + uint32_t get_n_layers() const + { + return m_n_layers; + } + + /** Returns number of mipmaps encapsulated by the image view */ + uint32_t get_n_mipmaps() const + { + return m_n_mipmaps; + } + + /** Returns a pointer to the parent image, from which the image view has been created. */ + Anvil::Image* get_parent_image() const + { + return m_parent_image_ptr; + } + + const Anvil::SamplerYCbCrConversion* get_sampler_ycbcr_conversion_ptr() const + { + return m_sampler_ycbcr_conversion_ptr; + } + + /** Returns swizzle array assigned to the image view */ + const std::array& get_swizzle_array() const + { + return m_swizzle_array; + } + + /** Returns image view type of the image view instance */ + Anvil::ImageViewType get_type() const + { + return m_type; + } + + /** Returns usage flags associated with the image view. + * + * NOTE: If the function returns Anvil::ImageUsageFlagBits::NONE, the image view inherits usage bits + * from the parent image. + * + */ + const Anvil::ImageUsageFlags& get_usage() const + { + return m_usage; + } + + void set_aspect(const Anvil::ImageAspectFlags& in_aspect) + { + m_aspect_mask = in_aspect; + } + + void set_base_layer(const uint32_t& in_n_base_layer) + { + m_n_base_layer = in_n_base_layer; + } + + void set_base_mipmap_level(const uint32_t& in_n_base_mipmap_level) + { + m_n_base_mipmap_level = in_n_base_mipmap_level; + } + + void set_device(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + void set_format(const Anvil::Format& in_format) + { + m_format = in_format; + } + + void set_mt_safety(const Anvil::MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + void set_n_layers(const uint32_t& in_n_layers) + { + m_n_layers = in_n_layers; + } + + void set_n_mipmaps(const uint32_t& in_n_mipmaps) + { + m_n_mipmaps = in_n_mipmaps; + } + + void set_parent_image(Anvil::Image* in_parent_image_ptr) + { + m_parent_image_ptr = in_parent_image_ptr; + } + + /* Attaches or detaches already attached SamplerYCBCRConversion object from the create info struct. + * This information will be used at image view creation time. + * + * NOTE: Requires VK_KHR_sampler_ycbcr_conversion + * + * @param in_sampler_ycbcr_conversion_ptr If not nullptr, the specified object will be passed to the implementation + * at sampler creation time by chaining VkSamplerYcbcrConversionInfo struct. + * If nullptr, the struct will not be chained. + **/ + void set_sampler_ycbcr_conversion_ptr(const Anvil::SamplerYCbCrConversion* in_sampler_ycbcr_conversion_ptr) + { + m_sampler_ycbcr_conversion_ptr = in_sampler_ycbcr_conversion_ptr; + } + + void set_swizzle_array(const std::array& in_swizzle_array) + { + m_swizzle_array = in_swizzle_array; + } + + /* By default, image views inherit usage flags from the parent image. You can use this setter function to override + * the default behavior with a subset of parent image's usage flags. + * + * Requires VK_KHR_maintenance2. + */ + void set_usage(const Anvil::ImageUsageFlags& in_usage) + { + m_usage = in_usage; + } + + private: + + /* Private functions */ + ImageViewCreateInfo(const Anvil::ImageAspectFlags& in_aspect_mask, + const Anvil::BaseDevice* in_device_ptr, + const Anvil::Format in_format, + const uint32_t in_n_base_layer, + const uint32_t in_n_base_mipmap_level, + const uint32_t in_n_layers, + const uint32_t in_n_mipmaps, + Anvil::Image* in_parent_image_ptr, + const Anvil::ComponentSwizzle* in_swizzle_array_ptr, + const Anvil::ImageViewType in_type, + const Anvil::MTSafety& in_mt_safety); + + /* Private variables */ + + Anvil::ImageAspectFlags m_aspect_mask; + + const Anvil::BaseDevice* m_device_ptr; + Anvil::Format m_format; + Anvil::MTSafety m_mt_safety; + uint32_t m_n_base_layer; + uint32_t m_n_base_mipmap_level; + uint32_t m_n_layers; + uint32_t m_n_mipmaps; + Anvil::Image* m_parent_image_ptr; + const Anvil::SamplerYCbCrConversion* m_sampler_ycbcr_conversion_ptr; + std::array m_swizzle_array; + Anvil::ImageViewType m_type; + Anvil::ImageUsageFlags m_usage; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(ImageViewCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(ImageViewCreateInfo); + }; + +}; /* namespace Anvil */ + +#endif/* MISC_IMAGE_VIEW_CREATE_INFO_H */ diff --git a/include/misc/instance_create_info.h b/include/misc/instance_create_info.h new file mode 100644 index 00000000..6f0c3192 --- /dev/null +++ b/include/misc/instance_create_info.h @@ -0,0 +1,214 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_INSTANCE_CREATE_INFO_H +#define MISC_INSTANCE_CREATE_INFO_H + +#include "misc/types.h" + + +namespace Anvil +{ + typedef std::function DebugCallbackFunction; + + enum class LayerSettingType : uint32_t { Bool32 = 0, Int32, Int64, Uint32, Uint64, Float32, Float64, String }; + + struct LayerSetting { + const char *pLayerName; + const char *pSettingName; + LayerSettingType type; + uint32_t valueCount; + const void *pValues; + }; + + class InstanceCreateInfo + { + public: + /* Public functions */ + + /** Instantiates a new create info which must be specified when creating Anvil Instance object. + * + * By default, Anvil will query the highest version of Vulkan supported by the implementation and use it when creating the Instance. + * You can override this behavior by calling set_api_version(). + * + * The following are also assumed by default: + * + * + App version: 0 + * + Engine version: 0 + + * @param in_app_name Name of the application, to be passed in VkCreateInstanceInfo + * structure. + * @param in_engine_name Name of the engine, to be passed in VkCreateInstanceInfo + * structure. + * @param in_opt_validation_callback_function If not nullptr, the specified function will be called whenever + * a call-back from any of the validation layers is received. + * Ignored otherwise. + * @param in_mt_safe True if all instance-based operations where external host synchronization + * is required should be automatically synchronized by Anvil. + * @param in_opt_disallowed_instance_level_extensions Optional vector holding instance-level extension names that must NOT be + * requested at creation time. + **/ + static Anvil::InstanceCreateInfoUniquePtr create(const std::string& in_app_name, + const std::string& in_engine_name, + std::vector &&in_layers, + std::vector &&in_layer_settings, + Anvil::DebugCallbackFunction in_opt_validation_callback_proc, + bool in_mt_safe, + const std::vector& in_opt_disallowed_instance_level_extensions = std::vector() ); + + const Anvil::APIVersion& get_api_version() const + { + return m_api_version; + } + + const std::string& get_app_name() const + { + return m_app_name; + } + + const uint32_t& get_app_version() const + { + return m_app_version; + } + + const std::vector &get_layer_settings() const { return m_layer_settings; } + + const std::vector &get_layers() const { return m_layers; } + + const std::vector& get_disallowed_instance_level_extensions() const + { + return m_disallowed_instance_level_extensions; + } + + const std::string& get_engine_name() const + { + return m_engine_name; + } + + const uint32_t& get_engine_version() const + { + return m_engine_version; + } + + /* Returns memory type index which should be used by Anvil when allocating memory. This value can be specified with set_n_memory_type_to_use_for_all_allocs(). + * + * If UINT32_MAX is returned, Anvil will be free to use any memory type determined to be valid for particular memory allocations. This is the default behavior. + **/ + uint32_t get_n_memory_type_to_use_for_all_allocs() const + { + return m_n_memory_type_to_use_for_all_alocs; + } + + const Anvil::DebugCallbackFunction& get_validation_callback() const + { + return m_validation_callback; + } + + const bool& is_mt_safe() const + { + return m_is_mt_safe; + } + + void set_api_version(const Anvil::APIVersion& in_api_version) + { + m_api_version = in_api_version; + } + + void set_app_name(const std::string& in_app_name) + { + m_app_name = in_app_name; + } + + void set_app_version(const uint32_t& in_version) + { + m_app_version = in_version; + } + + void set_engine_version(const uint32_t& in_version) + { + m_engine_version = in_version; + } + + void set_disallowed_instance_level_extensions(const std::vector& in_extensions) + { + m_disallowed_instance_level_extensions = in_extensions; + } + + void set_engine_name(const std::string& in_engine_name) + { + m_engine_name = in_engine_name; + } + + /* When called, any memory allocations performed by objects owned by Anvil Instance object created using this create info structure + * will always use the specified memory type index. + * + * This function should not be called unless you have a good understanding of the implications. + */ + void set_n_memory_type_to_use_for_all_allocs(const uint32_t& in_n_memory_type_to_use_for_all_allocs) + { + m_n_memory_type_to_use_for_all_alocs = in_n_memory_type_to_use_for_all_allocs; + } + + void set_validation_callback(const Anvil::DebugCallbackFunction& in_validation_callback) + { + m_validation_callback = in_validation_callback; + } + + void set_is_mt_safe(const bool& in_is_mt_safe) + { + m_is_mt_safe = in_is_mt_safe; + } + + const void *pNext = nullptr; + + private: + + /* Private functions */ + InstanceCreateInfo(const std::string& in_app_name, + const std::string& in_engine_name, + std::vector &&in_layers, + std::vector &&in_layer_settings, + Anvil::DebugCallbackFunction in_opt_validation_callback_proc, + bool in_mt_safe, + const std::vector& in_opt_disallowed_instance_level_extensions); + + /* Private variables */ + + Anvil::APIVersion m_api_version; + std::string m_app_name; + uint32_t m_app_version; + std::vector m_disallowed_instance_level_extensions; + std::string m_engine_name; + uint32_t m_engine_version; + std::vector m_layers; + std::vector m_layer_settings; + bool m_is_mt_safe; + uint32_t m_n_memory_type_to_use_for_all_alocs; + Anvil::DebugCallbackFunction m_validation_callback; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(InstanceCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(InstanceCreateInfo); + }; + +}; /* namespace Anvil */ + +#endif /* MISC_INSTANCE_CREATE_INFO_H */ diff --git a/include/misc/io.h b/include/misc/io.h index 6a330c6c..ce40c5c6 100644 --- a/include/misc/io.h +++ b/include/misc/io.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -66,6 +66,7 @@ namespace Anvil KEY_ID_RIGHT = ANVIL_KEY_HELPER(Right), KEY_ID_SPACE = ANVIL_KEY_HELPER(space), #endif + #endif KEY_ID_COUNT, diff --git a/include/misc/library.h b/include/misc/library.h new file mode 100644 index 00000000..42299197 --- /dev/null +++ b/include/misc/library.h @@ -0,0 +1,68 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +/** Implements a wrapper for a single Vulkan instance. Implemented in order to: + * + * - manage life-time of Vulkan instances. + * - encapsulate all logic required to manipulate instances and children objects. + * - let ObjectTracker detect leaking Vulkan instance wrapper instances. + * + * The wrapper is NOT thread-safe. + **/ +#ifndef MISC_LIBRARY_H +#define MISC_LIBRARY_H + +#include "misc/types.h" + +namespace Anvil +{ + /* Forward declarations */ + class Library; + + typedef std::unique_ptr LibraryUniquePtr; + + class Library + { + public: + /* Public functions */ + static LibraryUniquePtr create(const char* in_dll_name); + + ~Library(); + + void* get_proc_address(const char* in_func_name) const; + + private: + /* Private functions */ + Library(const char* in_dll_name); + + bool init(); + + /* Private variables */ + const std::string m_dll_name; + void* m_handle; + + Library (const Library&) = delete; + Library& operator=(const Library&) = delete; + }; +} + +#endif /* MISC_LIBRARY_H */ \ No newline at end of file diff --git a/include/misc/memalloc_backends/backend_oneshot.h b/include/misc/memalloc_backends/backend_oneshot.h index 1a27e66e..c5cd5053 100644 --- a/include/misc/memalloc_backends/backend_oneshot.h +++ b/include/misc/memalloc_backends/backend_oneshot.h @@ -1,4 +1,4 @@ -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -41,7 +41,8 @@ namespace Anvil * * Should only be used by Anvil::MemoryAllocator */ - class OneShot : public Anvil::MemoryAllocator::IMemoryAllocatorBackend + class OneShot : public Anvil::MemoryAllocator::IMemoryAllocatorBackend, + public std::enable_shared_from_this { public: /* Public functions */ @@ -52,7 +53,7 @@ namespace Anvil * * @param in_device_ptr Vulkan device the memory allocations are going to be made for. **/ - OneShot(std::weak_ptr in_device_ptr); + OneShot(const Anvil::BaseDevice* in_device_ptr); /** Destructor. */ virtual ~OneShot(); @@ -60,16 +61,25 @@ namespace Anvil private: /* IMemoryAllocatorBackend functions */ - bool bake (Anvil::MemoryAllocator::Items& in_items); - bool supports_baking() const; + bool bake (Anvil::MemoryAllocator::Items& in_items) final; + VkResult map (void* in_memory_object, + VkDeviceSize in_start_offset, + VkDeviceSize in_memory_block_start_offset, + VkDeviceSize in_size, + void** out_result_ptr) final; + bool supports_baking () const final; + bool supports_external_memory_handles(const Anvil::ExternalMemoryHandleTypeFlags& in_external_memory_handle_types) const final; + bool supports_device_masks () const final; + bool supports_protected_memory () const final; + void unmap (void* in_memory_object) final; + std::optional get_underlying_allocation_size(void* in_memory_object) final {return {};} /* Private functions */ /* Private variables */ - std::weak_ptr m_device_ptr; - bool m_is_baked; - std::vector > m_memory_blocks; - + const Anvil::BaseDevice* m_device_ptr; + bool m_is_baked; + std::vector m_memory_blocks; }; }; }; diff --git a/include/misc/memalloc_backends/backend_vma.h b/include/misc/memalloc_backends/backend_vma.h index 3a7f0435..a25188bb 100644 --- a/include/misc/memalloc_backends/backend_vma.h +++ b/include/misc/memalloc_backends/backend_vma.h @@ -1,4 +1,4 @@ -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -41,7 +41,8 @@ namespace Anvil * * Should only be used by Anvil::MemoryAllocator */ - class VMA : public Anvil::MemoryAllocator::IMemoryAllocatorBackend + class VMA : public Anvil::MemoryAllocator::IMemoryAllocatorBackend, + public std::enable_shared_from_this { public: /* Public functions */ @@ -52,7 +53,9 @@ namespace Anvil * * @param in_device_ptr Vulkan device the memory allocations are going to be made for. **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr); + static std::unique_ptr create(const Anvil::BaseDevice* in_device_ptr); + + VmaAllocator GetVmaHandle() {return m_vma_allocator_ptr->get_handle();} /** Destructor. */ virtual ~VMA(); @@ -73,7 +76,7 @@ namespace Anvil * * @param in_device_ptr Device the allocator should be initialized for. **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr); + static std::shared_ptr create(const Anvil::BaseDevice* in_device_ptr); /** Destructor */ virtual ~VMAAllocator(); @@ -95,37 +98,50 @@ namespace Anvil * * @param in_memory_block_ptr Raw pointer to the memory block which is about to be destroyed. * Must NOT be null. Must be valid at call time. + * @param in_vma_allocation TODO. */ - void on_vma_alloced_mem_block_gone_out_of_scope(Anvil::MemoryBlock* in_memory_block_ptr); + void on_vma_alloced_mem_block_gone_out_of_scope(Anvil::MemoryBlock* in_memory_block_ptr, + VmaAllocation in_vma_allocation); private: /* Private functions */ - VMAAllocator(std::weak_ptr in_device_ptr); + VMAAllocator(const Anvil::BaseDevice* in_device_ptr); bool init(); /* Private variables */ - VmaAllocator m_allocator; - std::weak_ptr m_device_ptr; + VmaAllocator m_allocator; + const Anvil::BaseDevice* m_device_ptr; + std::unique_ptr m_vma_func_ptrs; std::vector > m_refcount_helper; }; /* Private functions */ - VMA(std::weak_ptr in_device_ptr); + VMA(const Anvil::BaseDevice* in_device_ptr); bool init(); /* IMemoryAllocatorBackend functions */ - bool bake (Anvil::MemoryAllocator::Items& in_items); - bool supports_baking() const; + bool bake (Anvil::MemoryAllocator::Items& in_items) final; + VkResult map (void* in_memory_object, + VkDeviceSize in_start_offset, + VkDeviceSize in_memory_block_start_offset, + VkDeviceSize in_size, + void** out_result_ptr); + bool supports_baking () const final; + bool supports_device_masks () const final; + bool supports_external_memory_handles(const Anvil::ExternalMemoryHandleTypeFlags& in_external_memory_handle_types) const final; + bool supports_protected_memory () const final; + void unmap (void* in_memory_object); + std::optional get_underlying_allocation_size(void* in_memory_object) final; /* Private variables */ - std::weak_ptr m_device_ptr; - std::shared_ptr m_vma_allocator_ptr; + const Anvil::BaseDevice* m_device_ptr; + std::shared_ptr m_vma_allocator_ptr; }; }; }; diff --git a/include/misc/memory_allocator.h b/include/misc/memory_allocator.h index 461985b4..6cbc9d27 100644 --- a/include/misc/memory_allocator.h +++ b/include/misc/memory_allocator.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -24,23 +24,29 @@ * objects. At baking time, non-overlapping regions of memory storage are distributed to the objects, * respect to object-specific alignment requirements. * - * The allocator uses a single memory heap for all allocations, so it may not work in all cases. - * This will be improved at some point in the future. + * MT-safe at an opt-in basis. **/ #ifndef MISC_MEMORY_ALLOCATOR_H #define MISC_MEMORY_ALLOCATOR_H #include "misc/debug.h" +#include "misc/mt_safety.h" #include "misc/types.h" +#include #include - +#include namespace Anvil { - typedef std::function MemoryAllocatorBakeCallbackFunction; - - /** Implements a simple memory allocator. For more details, please see the header. */ - class MemoryAllocator : public std::enable_shared_from_this + typedef std::pair LocalRemoteDeviceIndexPair; + typedef std::pair ResourceMemoryDeviceIndexPair; + typedef std::function MemoryAllocatorBakeCallbackFunction; + typedef std::function MemoryAllocatorPostBakePerNonSparseBufferItemMemAssignmentCallback; + typedef std::function MemoryAllocatorPostBakePerNonSparseImageItemMemAssignmentCallback; + typedef std::map MGPUPeerMemoryRequirements; + typedef std::vector MGPUBindSparseDeviceIndices; + + class MemoryAllocator : public MTSafetySupportProvider { public: /* Public type definitions */ @@ -49,79 +55,149 @@ namespace Anvil ITEM_TYPE_BUFFER, ITEM_TYPE_IMAGE_WHOLE, + ITEM_TYPE_SPARSE_BUFFER_REGION, ITEM_TYPE_SPARSE_IMAGE_MIPTAIL, ITEM_TYPE_SPARSE_IMAGE_SUBRESOURCE, } ItemType; typedef struct Item { - std::shared_ptr buffer_ptr; - std::shared_ptr buffer_ref_float_data_ptr; - std::shared_ptr > buffer_ref_float_vector_data_ptr; - std::shared_ptr buffer_ref_uchar8_data_ptr; - std::shared_ptr > buffer_ref_uchar8_vector_data_ptr; - std::shared_ptr buffer_ref_uint32_data_ptr; - std::shared_ptr > buffer_ref_uint32_vector_data_ptr; - std::shared_ptr image_ptr; - - std::shared_ptr memory_allocator_ptr; + Anvil::Buffer* buffer_ptr; + std::unique_ptr buffer_ref_float_data_ptr; + std::unique_ptr, std::function*)> > buffer_ref_float_vector_data_ptr; + std::unique_ptr buffer_ref_uchar8_data_ptr; + std::unique_ptr > buffer_ref_uchar8_vector_data_ptr; + std::unique_ptr buffer_ref_uint32_data_ptr; + std::unique_ptr, std::function*)> > buffer_ref_uint32_vector_data_ptr; + Anvil::Image* image_ptr; + std::string debug_name; + + Anvil::MemoryAllocator* memory_allocator_ptr; ItemType type; - std::shared_ptr alloc_memory_block_ptr; - uint32_t alloc_memory_final_type; - VkDeviceSize alloc_memory_required_alignment; - MemoryFeatureFlags alloc_memory_required_features; - uint32_t alloc_memory_supported_memory_types; - uint32_t alloc_memory_types; - VkDeviceSize alloc_size; - - VkExtent3D extent; - bool is_baked; - VkDeviceSize miptail_offset; - uint32_t n_layer; - VkOffset3D offset; - VkImageSubresource subresource; - - Item(std::shared_ptr in_memory_allocator_ptr, - std::shared_ptr in_buffer_ptr, - VkDeviceSize in_alloc_size, - uint32_t in_alloc_memory_types, - VkDeviceSize in_alloc_alignment, - MemoryFeatureFlags in_alloc_required_memory_features, - uint32_t in_alloc_supported_memory_types); - - Item(std::shared_ptr in_memory_allocator_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_layer, - VkDeviceSize in_alloc_size, - uint32_t in_alloc_memory_types, - VkDeviceSize in_miptail_offset, - VkDeviceSize in_alloc_alignment, - MemoryFeatureFlags in_alloc_required_memory_features, - uint32_t in_alloc_supported_memory_types); - - Item(std::shared_ptr in_memory_allocator_ptr, - std::shared_ptr in_image_ptr, - const VkImageSubresource& in_subresource, - const VkOffset3D& in_offset, - const VkExtent3D& in_extent, - VkDeviceSize in_alloc_size, - uint32_t in_alloc_memory_types, - VkDeviceSize in_alloc_alignment, - MemoryFeatureFlags in_alloc_required_memory_features, - uint32_t in_alloc_supported_memory_types); - - Item(std::shared_ptr in_memory_allocator_ptr, - std::shared_ptr in_image_ptr, - VkDeviceSize in_alloc_size, - uint32_t in_alloc_memory_types, - VkDeviceSize in_alloc_alignment, - MemoryFeatureFlags in_alloc_required_memory_features, - uint32_t in_alloc_supported_memory_types); - - Item& operator=(const Item&); - Item (const Item&); + #if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* alloc_external_nt_handle_info_ptr; + #endif + + uint32_t alloc_device_mask; + Anvil::ExternalMemoryHandleTypeFlags alloc_exportable_external_handle_types; + Anvil::ImageAspectFlagBits alloc_image_aspect; + bool alloc_is_dedicated_memory; + bool alloc_is_scratch_buffer_alloc; + MemoryBlockUniquePtr alloc_memory_block_ptr; + uint32_t alloc_memory_final_type; + VkDeviceSize alloc_memory_required_alignment; + MemoryFeatureFlags alloc_memory_required_features; + uint32_t alloc_memory_supported_memory_types; + uint32_t alloc_memory_types; + MGPUBindSparseDeviceIndices alloc_mgpu_bind_sparse_device_indices; + MGPUPeerMemoryRequirements alloc_mgpu_peer_memory_reqs; + VkDeviceSize alloc_offset; + VkDeviceSize alloc_size; + float memory_priority; + + VkExtent3D extent; + bool is_baked; + VkDeviceSize miptail_offset; + uint32_t n_layer; + uint32_t n_plane; + VkOffset3D offset; + Anvil::ImageSubresource subresource; + + Item(Anvil::MemoryAllocator* in_memory_allocator_ptr, + Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_alloc_size, + uint32_t in_alloc_memory_types, + VkDeviceSize in_alloc_alignment, + MemoryFeatureFlags in_alloc_required_memory_features, + uint32_t in_alloc_supported_memory_types, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, + +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_alloc_external_nt_handle_info_ptr, +#endif + const bool& in_alloc_is_dedicated, + const uint32_t& in_device_mask, + const MGPUPeerMemoryRequirements& in_mgpu_peer_memory_reqs, + const MGPUBindSparseDeviceIndices& in_mgpu_bind_sparse_device_indices, + const float& in_memory_priority); + + Item(Anvil::MemoryAllocator* in_memory_allocator_ptr, + Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_alloc_offset, + VkDeviceSize in_alloc_size, + uint32_t in_alloc_memory_types, + VkDeviceSize in_alloc_alignment, + MemoryFeatureFlags in_alloc_required_memory_features, + uint32_t in_alloc_supported_memory_types, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_alloc_external_nt_handle_info_ptr, +#endif + const bool& in_alloc_is_dedicated, + const uint32_t& in_device_mask, + const MGPUPeerMemoryRequirements& in_mgpu_peer_memory_reqs, + const MGPUBindSparseDeviceIndices& in_mgpu_bind_sparse_device_indices, + const float& in_memory_priority); + + Item(Anvil::MemoryAllocator* in_memory_allocator_ptr, + Anvil::Image* in_image_ptr, + uint32_t in_n_layer, + VkDeviceSize in_alloc_size, + uint32_t in_alloc_memory_types, + VkDeviceSize in_miptail_offset, + const Anvil::ImageAspectFlagBits& in_alloc_aspect, + VkDeviceSize in_alloc_alignment, + MemoryFeatureFlags in_alloc_required_memory_features, + uint32_t in_alloc_supported_memory_types, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_alloc_external_nt_handle_info_ptr, +#endif + const bool& in_alloc_is_dedicated, + const uint32_t& in_device_mask, + const MGPUPeerMemoryRequirements& in_mgpu_peer_memory_reqs, + const MGPUBindSparseDeviceIndices& in_mgpu_bind_sparse_device_indices, + const float& in_memory_priority); + + Item(Anvil::MemoryAllocator* in_memory_allocator_ptr, + Anvil::Image* in_image_ptr, + const Anvil::ImageSubresource& in_subresource, + const VkOffset3D& in_offset, + const VkExtent3D& in_extent, + VkDeviceSize in_alloc_size, + uint32_t in_alloc_memory_types, + VkDeviceSize in_alloc_alignment, + MemoryFeatureFlags in_alloc_required_memory_features, + uint32_t in_alloc_supported_memory_types, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_alloc_external_nt_handle_info_ptr, +#endif + const bool& in_alloc_is_dedicated, + const uint32_t& in_device_mask, + const MGPUPeerMemoryRequirements& in_mgpu_peer_memory_reqs, + const MGPUBindSparseDeviceIndices& in_mgpu_bind_sparse_device_indices, + const float& in_memory_priority); + + Item(Anvil::MemoryAllocator* in_memory_allocator_ptr, + Anvil::Image* in_image_ptr, + VkDeviceSize in_alloc_size, + uint32_t in_alloc_memory_types, + VkDeviceSize in_alloc_alignment, + MemoryFeatureFlags in_alloc_required_memory_features, + uint32_t in_alloc_supported_memory_types, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_alloc_external_nt_handle_info_ptr, +#endif + const bool& in_alloc_is_dedicated, + const uint32_t& in_device_mask, + const MGPUPeerMemoryRequirements& in_mgpu_peer_memory_reqs, + const MGPUBindSparseDeviceIndices& in_mgpu_bind_sparse_device_indices, + const float& in_memory_priority, + const uint32_t& in_n_plane); /** TODO */ ~Item(); @@ -136,9 +212,9 @@ namespace Anvil bool image_has_memory_block_needed_callback_registered; } Item; - typedef std::vector > Items; + typedef std::vector > Items; - class IMemoryAllocatorBackend + class IMemoryAllocatorBackend : public IMemoryAllocatorBackendBase { public: virtual ~IMemoryAllocatorBackend() @@ -146,48 +222,173 @@ namespace Anvil /* Stub */ } - virtual bool bake (Items& in_items) = 0; - virtual bool supports_baking() const = 0; + virtual bool bake (Items& in_items) = 0; + virtual bool supports_device_masks () const = 0; + virtual bool supports_external_memory_handles(const Anvil::ExternalMemoryHandleTypeFlags& in_external_memory_handle_types) const = 0; + virtual bool supports_protected_memory () const = 0; }; /* Public functions */ + IMemoryAllocatorBackend *GetAllocatorBackend() {return m_backend_ptr.get();} + /** Adds a new Buffer object which should use storage coming from the buffer memory * maintained by the Memory Allocator. * - * @param in_buffer_ptr Buffer to configure storage for at bake() call time. Must not - * be nullptr. - * @param in_data_ptr The buffer will be filled with data extracted from the specified - * location. The number of bytes which will be stored is defined by - * buffer size. - * @param in_data_vector_ptr The buffer will be filled with data extracted from the specified - * vector. Total number of bytes defined in the vector must match - * buffer size. - * @param in_required_memory_features Memory features the assigned memory must support. - * See MemoryFeatureFlagBits for more details. + * @param in_buffer_ptr Buffer to configure storage for at bake() call time. Must not + * be nullptr. + * @param in_data_ptr The buffer will be filled with data extracted from the specified + * location. The number of bytes which will be stored is defined by + * buffer size. + * @param in_data_vector_ptr The buffer will be filled with data extracted from the specified + * vector. Total number of bytes defined in the vector must match + * buffer size. + * @param in_required_memory_features Memory features the assigned memory must support. + * See MemoryFeatureFlagBits for more details. + * @param in_opt_external_nt_handle_info_ptr TODO. Pointer must remain valid till baking time. + * @param in_opt_device_mask_ptr If not null, deref should contain a valid device mask to be used for + * the allocation. Specifying a device mask annotates the allocation request + * with VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT flag. Value from deref is copied + * at call time, the pointer may be released when the func leaves. + * @param in_opt_mgpu_bind_sparse_device_indices_ptr If not null, deref may contain up to N pairs of resource and memory device indices, where N is the number of devices + * in the device group. The indices will be used during sparse bind operation, binding the buffer on the device with + * resource index to a memory allocation instance on the device with memory index. If null or empty, the binding will work + * as if both indices were zero. + * @param in_opt_memory_priority Memory priority to use for the allocation. Valid values must be within range [0.0f, 1.0f]. + * Ignored if VK_EXT_memory_priority is unavailable or if VMA backend is used or if FLT_MAX is passed. * * @return true if the buffer has been successfully scheduled for baking, false otherwise. **/ - bool add_buffer (std::shared_ptr in_buffer_ptr, - MemoryFeatureFlags in_required_memory_features); - bool add_buffer_with_float_data_ptr_based_post_fill (std::shared_ptr in_buffer_ptr, - std::shared_ptr in_data_ptr, - MemoryFeatureFlags in_required_memory_features); - bool add_buffer_with_float_data_vector_ptr_based_post_fill (std::shared_ptr in_buffer_ptr, - std::shared_ptr > in_data_vector_ptr, - MemoryFeatureFlags in_required_memory_features); - bool add_buffer_with_uchar8_data_ptr_based_post_fill (std::shared_ptr in_buffer_ptr, - std::shared_ptr in_data_ptr, - MemoryFeatureFlags in_required_memory_features); - bool add_buffer_with_uchar8_data_vector_ptr_based_post_fill(std::shared_ptr in_buffer_ptr, - std::shared_ptr > in_data_vector_ptr, - MemoryFeatureFlags in_required_memory_features); - bool add_buffer_with_uint32_data_ptr_based_post_fill (std::shared_ptr in_buffer_ptr, - std::shared_ptr in_data_ptr, - MemoryFeatureFlags in_required_memory_features); - bool add_buffer_with_uint32_data_vector_ptr_based_post_fill(std::shared_ptr in_buffer_ptr, - std::shared_ptr > in_data_vector_ptr, - MemoryFeatureFlags in_required_memory_features); + bool add_buffer (Anvil::Buffer* in_buffer_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types = Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + #if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr = nullptr, + #endif + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + bool add_buffer_with_float_data_ptr_based_post_fill (Anvil::Buffer* in_buffer_ptr, + std::unique_ptr in_data_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types = Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + #if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr = nullptr, + #endif + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + bool add_buffer_with_float_data_vector_ptr_based_post_fill (Anvil::Buffer* in_buffer_ptr, + std::unique_ptr > in_data_vector_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types = Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + #if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr = nullptr, + #endif + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + bool add_buffer_with_float_data_vector_ptr_based_post_fill (Anvil::Buffer* in_buffer_ptr, + const std::vector* in_data_vector_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types = Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + #if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr = nullptr, + #endif + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + bool add_buffer_with_uchar8_data_ptr_based_post_fill (Anvil::Buffer* in_buffer_ptr, + std::unique_ptr in_data_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types = Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + #if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr = nullptr, + #endif + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + bool add_buffer_with_uchar8_data_vector_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + std::unique_ptr > in_data_vector_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types = Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + #if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr = nullptr, + #endif + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + bool add_buffer_with_uint32_data_ptr_based_post_fill (Anvil::Buffer* in_buffer_ptr, + std::unique_ptr in_data_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types = Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + #if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr = nullptr, + #endif + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + bool add_buffer_with_uint32_data_vector_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + std::unique_ptr > in_data_vector_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types = Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + #if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr = nullptr, + #endif + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + bool add_buffer_with_uint32_data_vector_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + const std::vector* in_data_vector_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types = Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + #if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr = nullptr, + #endif + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + + /** TODO + * + * @param in_buffer_ptr TODO + * @param in_offset TODO. Must be divisible by VkMemoryRequirements::alignment + * @param in_size TODO. + * @param in_required_memory_features TODO + * @param in_opt_device_mask_ptr If not null, deref should contain a valid device mask to be used for + * the allocation. Specifying a device mask annotates the allocation request + * with VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT flag. Value from deref is copied + * at call time, the pointer may be released when the func leaves. + * @param in_opt_memory_reqs_ptr If not null, deref contains information of peer memory requirements which + * should be taken into account when determining which memory heap allocations + * should be made off. Specified device indices must be included in @param in_opt_device_mask_ptr. + * @param in_opt_mgpu_bind_sparse_device_indices_ptr If not null, deref may contain up to N pairs of resource and memory device indices, where N is the number of devices + * in the device group. The indices will be used during sparse bind operation, binding the buffer on the device with + * resource index to a memory allocation instance on the device with memory index. If null or empty, the binding will work + * as if both indices were zero. + * @param in_opt_memory_priority Memory priority to use for the allocation. Valid values must be within range [0.0f, 1.0f]. + * Ignored if VK_EXT_memory_priority is unavailable or if VMA backend is used or if FLT_MAX is passed. + * + * @return TODO + */ + bool add_sparse_buffer_region(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + VkDeviceSize in_size, + MemoryFeatureFlags in_required_memory_features, + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + /** Adds an Image object which should be assigned storage coming from memory objects * maintained by the Memory Allocator. At baking time, all subresources of the image, @@ -195,15 +396,35 @@ namespace Anvil * * This function can be used against both non-sparse and sparse images. * - * @param image_ptr Image to configure storage for at bake() call time. Must not - * be nullptr. - * @param in_required_memory_features Memory features the assigned memory must support. - * See MemoryFeatureFlagBits for more details. + * @param image_ptr Image to configure storage for at bake() call time. Must not + * be nullptr. + * @param in_required_memory_features Memory features the assigned memory must support. + * See MemoryFeatureFlagBits for more details. + * @param in_opt_external_nt_handle_info_ptr TODO. Pointer must remain valid till baking time. + * @param in_opt_device_mask_ptr If not null, deref should contain a valid device mask to be used for + * the allocation. Specifying a device mask annotates the allocation request + * with VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT flag. Value from deref is copied + * at call time, the pointer may be released when the func leaves. + * @param in_opt_mgpu_bind_sparse_device_indices_ptr If not null, deref may contain up to N pairs of resource and memory device indices, where N is the number of devices + * in the device group. The indices will be used during sparse bind operation, binding the image on the device with + * resource index to a memory allocation instance on the device with memory index. If null or empty, the binding will work + * as if both indices were zero. + * @param in_opt_memory_priority Memory priority to use for the allocation. Valid values must be within range [0.0f, 1.0f]. + * Ignored if VK_EXT_memory_priority is unavailable or if VMA backend is used or if FLT_MAX is passed. * * @return true if the image has been successfully scheduled for baking, false otherwise. **/ - bool add_image_whole(std::shared_ptr in_image_ptr, - MemoryFeatureFlags in_required_memory_features); + bool add_image_whole(Anvil::Image* in_image_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types = Anvil::ExternalMemoryHandleTypeFlagBits::NONE, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr = nullptr, + #endif + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + /** Adds a new Image object whose layer @param in_n_layer 's miptail for @param in_aspect * aspect should be assigned a physical memory backing. The miptail will be bound a memory @@ -214,42 +435,71 @@ namespace Anvil * * This function can only be used for sparse resident images. * - * @param in_image_ptr Image to use for the request. Must not be null. - * @param in_aspect Aspect to be used for the request. - * @param in_n_layer Index of the layer to attach the miptail to. - * @param in_required_memory_features Memory features the assigned memory must support. - * See MemoryFeatureFlagBits for more details. + * @param in_image_ptr Image to use for the request. Must not be null. + * @param in_aspect Aspect to be used for the request. + * @param in_n_layer Index of the layer to attach the miptail to. + * @param in_required_memory_features Memory features the assigned memory must support. + * See MemoryFeatureFlagBits for more details. + * @param in_opt_device_mask_ptr If not null, deref should contain a valid device mask to be used for + * the allocation. Specifying a device mask annotates the allocation request + * with VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT flag. Value from deref is copied + * at call time, the pointer may be released when the func leaves. + * @param in_opt_mgpu_bind_sparse_device_indices_ptr If not null, deref may contain up to N pairs of resource and memory device indices, where N is the number of devices + * in the device group. The indices will be used during sparse bind operation, binding the image on the device with + * resource index to a memory allocation instance on the device with memory index. If null or empty, the binding will work + * as if both indices were zero. + * @param in_opt_memory_priority Memory priority to use for the allocation. Valid values must be within range [0.0f, 1.0f]. + * Ignored if VK_EXT_memory_priority is unavailable or if VMA backend is used or if FLT_MAX is passed. * * @return true if the miptail has been successfully scheduled for baking, false otherwise. */ - bool add_sparse_image_miptail(std::shared_ptr in_image_ptr, - VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - MemoryFeatureFlags in_required_memory_features); + bool add_sparse_image_miptail(Anvil::Image* in_image_ptr, + Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + MemoryFeatureFlags in_required_memory_features, + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); + /** Adds a single subresource which should be assigned memory backing. * * This function does NOT alloc memory for the miptail. It is user's responsibilty to call * add_sparse_image_miptail() for any layers which require a miptail. * - * @param in_image_ptr Image to use for the request. Must not be null. - * @param in_subresource Specifies details of the subresource to attach memory backing to. - * @param in_offset XYZ offset, from which the memory backing should be attached. Must - * be rounded up to tile size of the aspect @param in_subresource - * refers to. - * @param in_extent Size of the region to assign memory backing to. Must be rounded up - * to tile size of the aspect @param in_subresource refers to, UNLESS - * @param in_offset + @param in_extent touches the subresource border. - * @param in_required_memory_features Memory features the assigned memory must support. - * See MemoryFeatureFlagBits for more details. + * @param in_image_ptr Image to use for the request. Must not be null. + * @param in_subresource Specifies details of the subresource to attach memory backing to. + * @param in_offset XYZ offset, from which the memory backing should be attached. Must + * be rounded up to tile size of the aspect @param in_subresource + * refers to. + * @param in_extent Size of the region to assign memory backing to. Must be rounded up + * to tile size of the aspect @param in_subresource refers to, UNLESS + * @param in_offset + @param in_extent touches the subresource border. + * @param in_required_memory_features Memory features the assigned memory must support. + * See MemoryFeatureFlagBits for more details. + * @param in_opt_device_mask_ptr If not null, deref should contain a valid device mask to be used for + * the allocation. Specifying a device mask annotates the allocation request + * with VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT flag. Value from deref is copied + * at call time, the pointer may be released when the func leaves. + * @param in_opt_mgpu_bind_sparse_device_indices_ptr If not null, deref may contain up to N pairs of resource and memory device indices, where N is the number of devices + * in the device group. The indices will be used during sparse bind operation, binding the image on the device with + * resource index to a memory allocation instance on the device with memory index. If null or empty, the binding will work + * as if both indices were zero. + * @param in_opt_memory_priority Memory priority to use for the allocation. Valid values must be within range [0.0f, 1.0f]. + * Ignored if VK_EXT_memory_priority is unavailable or if VMA backend is used or if FLT_MAX is passed. * * @return true if the subresource has been successfully scheduled for baking, false otherwise. **/ - bool add_sparse_image_subresource(std::shared_ptr in_image_ptr, - const VkImageSubresource& in_subresource, - const VkOffset3D& in_offset, - VkExtent3D in_extent, - MemoryFeatureFlags in_required_memory_features); + bool add_sparse_image_subresource(Anvil::Image* in_image_ptr, + const Anvil::ImageSubresource& in_subresource, + const VkOffset3D& in_offset, + VkExtent3D in_extent, + MemoryFeatureFlags in_required_memory_features, + const uint32_t* in_opt_device_mask_ptr = nullptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr = nullptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr = nullptr, + const float& in_opt_memory_priority = FLT_MAX); /** TODO */ bool bake(); @@ -260,15 +510,52 @@ namespace Anvil * * @param in_device_ptr Device to use. **/ - static std::shared_ptr create_oneshot(std::weak_ptr in_device_ptr); + static Anvil::MemoryAllocatorUniquePtr create_oneshot(const Anvil::BaseDevice* in_device_ptr, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); /** Creates a new VMA memory allocator instance. * * This type of allocator supports an arbitrary number of implicit or explicit bake invocations. + * This type of allocator does NOT support external handles of any type. + * This type of allocator does NOT support device masks. * * @param in_device_ptr Device to use. **/ - static std::shared_ptr create_vma(std::weak_ptr in_device_ptr); + static Anvil::MemoryAllocatorUniquePtr create_vma(const Anvil::BaseDevice* in_device_ptr, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); + + static bool get_mem_types_supporting_mem_features(const Anvil::BaseDevice* in_device_ptr, + uint32_t in_memory_types, + const Anvil::MemoryFeatureFlags& in_memory_features, + uint32_t* out_opt_filtered_memory_types_ptr); + + /** By default, once memory regions are baked, memory allocator will bind them to objects specified + * at add_*() call time. Use cases exist where apps may prefer to handle this action on their own. + * + * When a post-bake per-item mem assignment callback is specified, the allocator will NO LONGER: + * + * - (NON-SPARSE OBJECTS): call set_(nonsparse_)memory() functions and fill the regions with user-specified data, + * if such was provided at add_*() call time. + * - (SPARSE OBJECTS): bind allocated memory to user-specified sparse memory regions. + * + * The allocator WILL still allocate physical memory as determined to be required for the specified list of objects + * and use the callback specified in this call to let the application do whatever it needs to with the allocated memory. + * + * All callback functions must be specified for all types of objects supported by the allocator. + * + * Do not use this function unless you have a strong understanding of the implications. + * + * NOTE: Support for sparse buffers & images remains a TODO. + */ + void set_post_bake_per_item_mem_assignment_callbacks(MemoryAllocatorPostBakePerNonSparseBufferItemMemAssignmentCallback in_callback_function_for_buffers, + MemoryAllocatorPostBakePerNonSparseImageItemMemAssignmentCallback in_callback_function_for_images) + { + anvil_assert(in_callback_function_for_buffers != nullptr); + anvil_assert(in_callback_function_for_images != nullptr); + + m_post_bake_per_buffer_item_mem_assignment_callback_function = in_callback_function_for_buffers; + m_post_bake_per_image_item_mem_assignment_callback_function = in_callback_function_for_images; + } /** Assigns a func pointer which will be called by the allocator after all added objects * have been assigned memory blocks. @@ -288,13 +575,20 @@ namespace Anvil ~MemoryAllocator(); private: - /* Private functions */ - bool add_buffer_internal(std::shared_ptr in_buffer_ptr, - MemoryFeatureFlags in_required_memory_features); - bool is_alloc_supported (uint32_t in_memory_types, - Anvil::MemoryFeatureFlags in_memory_features, - uint32_t* out_opt_filtered_memory_types_ptr) const; + bool add_buffer_internal(Anvil::Buffer* in_buffer_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority); + + bool do_bind_sparse_device_indices_sanity_check (const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr) const; + bool do_external_memory_handle_type_sanity_checks(const Anvil::ExternalMemoryHandleTypeFlags& in_external_memory_handle_types) const; void on_is_alloc_pending_for_buffer_query(CallbackArgument* in_callback_arg_ptr); void on_is_alloc_pending_for_image_query (CallbackArgument* in_callback_arg_ptr); @@ -303,19 +597,22 @@ namespace Anvil /** Constructor. * * Please see create() documentation for specification. */ - MemoryAllocator(std::weak_ptr in_device_ptr, - std::shared_ptr in_backend_ptr); + MemoryAllocator(const Anvil::BaseDevice* in_device_ptr, + std::shared_ptr in_backend_ptr, + bool in_mt_safe); MemoryAllocator (const MemoryAllocator&); MemoryAllocator& operator=(const MemoryAllocator&); /* Private members */ std::shared_ptr m_backend_ptr; - std::weak_ptr m_device_ptr; + const Anvil::BaseDevice* m_device_ptr; Items m_items; std::map m_per_object_pending_alloc_status; - MemoryAllocatorBakeCallbackFunction m_post_bake_callback_function; + MemoryAllocatorBakeCallbackFunction m_post_bake_callback_function; + MemoryAllocatorPostBakePerNonSparseBufferItemMemAssignmentCallback m_post_bake_per_buffer_item_mem_assignment_callback_function; + MemoryAllocatorPostBakePerNonSparseImageItemMemAssignmentCallback m_post_bake_per_image_item_mem_assignment_callback_function; }; }; diff --git a/include/misc/memory_block_create_info.h b/include/misc/memory_block_create_info.h new file mode 100644 index 00000000..c80c43d5 --- /dev/null +++ b/include/misc/memory_block_create_info.h @@ -0,0 +1,457 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_MEMORY_BLOCK_CREATE_INFO_H +#define MISC_MEMORY_BLOCK_CREATE_INFO_H + +#include "misc/types.h" + + +namespace Anvil +{ + class MemoryBlockCreateInfo + { + public: + /* Public functions */ + + /** Spawns a create info instance which can be used to instantiate a memory block whose storage space is maintained + * by another MemoryBlock instance. + * + * @param in_parent_memory_block_ptr MemoryBlock instance to use as a parent. Must not be nullptr. + * Parent memory block must not be mapped. + * @param in_start_offset Start offset of the storage maintained by the specified parent memory block, + * from which the new MemoryBlock instance's storage should start. + * Must not be equal to or larger than parent object's storage size. + * When added to @param in_size, the result value must not be be larger than the remaining + * storage size. + * @param in_size Region size to use for the derived memory block. + **/ + static MemoryBlockCreateInfoUniquePtr create_derived(MemoryBlock* in_parent_memory_block_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size); + + /** Spawns a create info instance which can be used to instantiate a memory block whose lifetime is maintained by a separate + * entity. While the memory block remains reference-counted as usual, the destruction process is carried out by an external + * party via the specified call-back. + * + * This implements a special case required for support of Vulkan Memory Allocator memory allocator backend. Applications + * are very unlikely to ever need to use this create() function. + * + * NOTE: The following parameters take the following default values: + * + * - Exportable external memory handle types: Anvil::ExternalMemoryHandleTypeFlagBits::NONE + * - Importable external memory handle type: Anvil::ExternalMemoryHandleTypeFlagBits::NONE + * - Use a dedicated allocation?: No. + * + * These can be further adjusted by callingt corresponding set_..() functions prior to passing the CreateInfo instance + * to MemoryBlock::create(). + * + * TODO + * + * @param in_allowed_memory_bits A bitfield whose indices correspond to memory type indices which can be used for the allocation. + * + */ + static MemoryBlockCreateInfoUniquePtr create_derived_with_custom_delete_proc(const Anvil::BaseDevice* in_device_ptr, + VkDeviceMemory in_memory, + uint32_t in_allowed_memory_bits, + Anvil::MemoryFeatureFlags in_memory_features, + uint32_t in_memory_type_index, + VkDeviceSize in_size, + VkDeviceSize in_start_offset, + OnMemoryBlockReleaseCallbackFunction in_on_release_callback_function); + + /** Spawns a create info instance which can be used to instantiate a new memory block. + * + * This function can be used for both single- and multi-GPU device instances. For the latter case, + * the default behavior is to allocate a single instance of memory (deviceMask = 1) for memory heaps + * that do NOT have the VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR bit on, or allocate as many instances + * of memory as there are physical devices assigned to the logical device. This can be adjusted + * by calling corresponding set_..() functions. + * + * NOTE: The following parameters take the following defautl values: + * + * - Exportable external memory handle types: Anvil::EXTERNAL_MEMORY_HANDLE_TYPE_NONE + * - Importable external memory handle type: Anvil::EXTERNAL_MEMORY_HANDLE_TYPE_NONE + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE. + * - Use a dedicated allocation?: No. + * + * These can be further adjusted by callingt corresponding set_..() functions prior to passing the CreateInfo instance + * to MemoryBlock::create(). + * + * @param in_device_ptr Device to use. + * @param in_allowed_memory_bits Memory type bits which meet the allocation requirements. + * @param in_size Required allocation size. + * @param in_memory_features Required memory features. + **/ + static MemoryBlockCreateInfoUniquePtr create_regular(const Anvil::BaseDevice* in_device_ptr, + uint32_t in_allowed_memory_bits, + VkDeviceSize in_size, + Anvil::MemoryFeatureFlags in_memory_features); + + /** Spawns a create info instance which can be used to instantiate a new memory block. + * + * WARNING: in_memory_type_index has to meet all memory requirements, it is user's responsibility to adhere! + * If it is possible please use create_regular function instead of this function. + * + * This function can be used for both single- and multi-GPU device instances. For the latter case, + * the default behavior is to allocate a single instance of memory (deviceMask = 1) for memory heaps + * that do NOT have the VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR bit on, or allocate as many instances + * of memory as there are physical devices assigned to the logical device. This can be adjusted + * by calling corresponding set_..() functions. + * + * NOTE: The following parameters take the following defautl values: + * + * - Exportable external memory handle types: Anvil::EXTERNAL_MEMORY_HANDLE_TYPE_NONE + * - Importable external memory handle type: Anvil::EXTERNAL_MEMORY_HANDLE_TYPE_NONE + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE. + * - Use a dedicated allocation?: No. + * + * These can be further adjusted by callingt corresponding set_..() functions prior to passing the CreateInfo instance + * to MemoryBlock::create(). + * + * @param in_device_ptr Device to use. + * @param in_allowed_memory_bits Memory type bits which meet the allocation requirements. + * @param in_size Required allocation size. + * @param in_memory_features Required memory features. + * @param in_memory_type_index Required memory type index + **/ + static MemoryBlockCreateInfoUniquePtr create_with_memory_type(const Anvil::BaseDevice* in_device_ptr, + uint32_t in_allowed_memory_bits, + VkDeviceSize in_size, + Anvil::MemoryFeatureFlags in_memory_features, + uint32_t in_memory_type_index); + + const uint32_t& get_allowed_memory_bits() const + { + return m_allowed_memory_bits; + } + + void get_dedicated_allocation_properties(bool* out_opt_enabled_ptr, + Anvil::Buffer** out_opt_buffer_ptr_ptr, + Anvil::Image** out_opt_image_ptr_ptr) const + { + if (out_opt_enabled_ptr != nullptr) + { + *out_opt_enabled_ptr = m_use_dedicated_allocation; + } + + if (out_opt_buffer_ptr_ptr != nullptr) + { + *out_opt_buffer_ptr_ptr = m_dedicated_allocation_buffer_ptr; + } + + if (out_opt_image_ptr_ptr != nullptr) + { + *out_opt_image_ptr_ptr = m_dedicated_allocation_image_ptr; + } + } + + float get_memory_priority() const + { + return m_memory_priority; + } + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + const uint32_t& get_device_mask() const + { + return m_device_mask; + } + + const Anvil::ExternalMemoryHandleTypeFlags& get_exportable_external_memory_handle_types() const + { + return m_exportable_external_memory_handle_types; + } + + /* Returns true if set_external_handle_import_info() has been called prior to this call. + * Otherwise returns false. + * + * If the func returns true, *out_result_ptr is set to the queried data. + */ + bool get_external_handle_import_info(const ExternalMemoryHandleImportInfo** out_result_ptr_ptr) const + { + bool result = false; + + if (m_external_handle_import_info_specified) + { + *out_result_ptr_ptr = &m_external_handle_import_info; + result = true; + } + + return result; + } + + #if defined(_WIN32) + /* Returns true if set_exportable_nt_handle_info() has been called prior to this call. + * Otherwise returns false. + * + * If the func returns true, *out_result_ptr is set to the queried data. + */ + bool get_exportable_nt_handle_info(const ExternalNTHandleInfo** out_result_ptr_ptr) const + { + bool result = false; + + if (m_exportable_nt_handle_info_specified) + { + *out_result_ptr_ptr = &m_exportable_nt_handle_info; + result = true; + } + + return result; + } + #endif + + const Anvil::ExternalMemoryHandleTypeFlagBits& get_imported_external_memory_handle_type() const + { + return m_imported_external_memory_handle_type; + } + + VkDeviceMemory get_memory() const + { + anvil_assert(m_type == Anvil::MemoryBlockType::DERIVED_WITH_CUSTOM_DELETE_PROC); + + return m_memory; + } + + /** Returns memory features of the underlying memory region */ + Anvil::MemoryFeatureFlags get_memory_features() const; + + /** Returns the memory type index the memory block was allocated from */ + uint32_t get_memory_type_index() const; + + const Anvil::MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + const Anvil::OnMemoryBlockReleaseCallbackFunction& get_on_release_callback_function() const + { + return m_on_release_callback_function; + } + + /* Returns parent memory block, if one has been defined for this instance */ + Anvil::MemoryBlock* get_parent_memory_block() const + { + return m_parent_memory_block_ptr; + } + + /* Returns the size of the memory block. */ + VkDeviceSize get_size() const + { + return m_size; + } + + /* Returns the start offset of the memory block. + * + * If the memory block has a parent, the returned start offset is NOT relative to the parent memory block's + * start offset (in other words: the returned value is an absolute offset which can be directly used against + * the memory block instance) + */ + VkDeviceSize get_start_offset() const + { + return m_start_offset; + } + + const Anvil::MemoryBlockType& get_type() const + { + return m_type; + } + + /* Requires VK_KHR_device_group */ + void set_device_mask(const uint32_t& in_device_mask) + { + m_device_mask = in_device_mask; + } + + /* Requires VK_KHR_external_memory */ + void set_exportable_external_memory_handle_types(const Anvil::ExternalMemoryHandleTypeFlags& in_external_memory_handle_types) + { + m_exportable_external_memory_handle_types = in_external_memory_handle_types; + } + + /* Lets the app specify imported handle details. + * + * Under Windows, if @param in_name is zero-sized, member of the VkImportMemoryWin32HandleInfoKHR struct, as chained to the VkMemoryAllocateInfo struct chain, + * will be set to nullptr. + * + * NOTE: This function MUST NOT be used for importing host pointers. Please use the other set_external_handle_import_info() function instead. + * NOTE: For NT handles, you also need to call set_external_nt_handle_import_info(). + * + * Requires VK_KHR_external_memory_fd under Windows. + * Requires VK_KHR_external_memory_win32 under Windows. + */ + #if defined(_WIN32) + void set_external_handle_import_info(ExternalHandleType in_handle, + const std::wstring& in_name) + #else + void set_external_handle_import_info(ExternalHandleType in_handle) + #endif + { + anvil_assert(!m_external_handle_import_info_specified); + + m_external_handle_import_info.handle = in_handle; + + #if defined(_WIN32) + { + m_external_handle_import_info.name = in_name; + } + #endif + + m_external_handle_import_info_specified = true; + } + + /* Lets the app specify imported handle details. + * + * NOTE: This function MUST NOT be used for importing handles other than host pointers. Please use the other set_external_handle_import_info() function instead. + * + * Requires VK_EXT_external_memory_host. + */ + void set_external_handle_import_info(void* in_host_pointer) + { + anvil_assert(!m_external_handle_import_info_specified); + anvil_assert( in_host_pointer != nullptr); + + m_external_handle_import_info.host_ptr = in_host_pointer; + m_external_handle_import_info_specified = true; + } + + #if defined(_WIN32) + /* Lets the app specify additional details for exportable NT handles. + * + * If @param in_name is zero-sized, member of the VkExportMemoryWin32HandleInfoKHR struct, as chained to the VkMemoryAllocateInfo struct chain, + * will be set to nullptr. + * + * Requires VK_KHR_external_memory_win32 + */ + void set_exportable_nt_handle_info(const SECURITY_ATTRIBUTES* in_opt_attributes_ptr, + const DWORD& in_access, + const std::wstring& in_name) + { + anvil_assert(!m_exportable_nt_handle_info_specified); + + m_exportable_nt_handle_info.access = in_access; + m_exportable_nt_handle_info.attributes_ptr = in_opt_attributes_ptr; + m_exportable_nt_handle_info.name = in_name; + + m_exportable_nt_handle_info_specified = true; + } + #endif + + void set_imported_external_memory_handle_type(const Anvil::ExternalMemoryHandleTypeFlagBits& in_memory_handle_type) + { + m_imported_external_memory_handle_type = in_memory_handle_type; + } + + void set_mt_safety(const Anvil::MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + /* Call to request a dedicated allocation for the memory block. Requirements are: + * + * 1) Device must support VK_KHR_dedicated_allocation. + * 2) Either in_opt_buffer_ptr or in_opt_image_ptr must not be null. Cases where both + * are nullptr or != nullptr are not allowed. + * + * May only be called once. + * The specified object must remain alive until the actual memory allocation takes place. + * + */ + void use_dedicated_allocation(Anvil::Buffer* in_opt_buffer_ptr, + Anvil::Image* in_opt_image_ptr) + { + anvil_assert(m_dedicated_allocation_buffer_ptr == nullptr); + anvil_assert(m_dedicated_allocation_image_ptr == nullptr); + anvil_assert(!m_use_dedicated_allocation); + + anvil_assert((in_opt_buffer_ptr == nullptr && in_opt_image_ptr != nullptr) || + (in_opt_buffer_ptr != nullptr && in_opt_image_ptr == nullptr) ); + + m_dedicated_allocation_buffer_ptr = in_opt_buffer_ptr; + m_dedicated_allocation_image_ptr = in_opt_image_ptr; + m_use_dedicated_allocation = true; + } + + void set_memory_priority(const float& in_priority) + { + m_memory_priority = in_priority; + } + + private: + MemoryBlockCreateInfo(const Anvil::MemoryBlockType& in_type, + const uint32_t& in_allowed_memory_bits, + const Anvil::BaseDevice* in_device_ptr, + VkDeviceMemory in_memory, + const Anvil::MemoryFeatureFlags& in_memory_features, + const uint32_t& in_memory_type_index, + const uint32_t& in_n_physical_devices, + const Anvil::OnMemoryBlockReleaseCallbackFunction& in_on_release_callback_function, + const Anvil::PhysicalDevice* const* in_opt_physical_device_ptr_ptr, + Anvil::MemoryBlock* in_parent_memory_block_ptr, + const VkDeviceSize& in_size, + const VkDeviceSize& in_start_offset); + + /* NOTE: Only to be used by Anvil::MemoryBlock! */ + void set_memory_type_index(const uint32_t& in_new_index) + { + m_memory_type_index = in_new_index; + } + + uint32_t m_allowed_memory_bits; + uint32_t m_device_mask; + const Anvil::BaseDevice* m_device_ptr; + Anvil::ExternalMemoryHandleTypeFlags m_exportable_external_memory_handle_types; + Anvil::ExternalMemoryHandleTypeFlagBits m_imported_external_memory_handle_type; + VkDeviceMemory m_memory; + Anvil::MemoryFeatureFlags m_memory_features; + float m_memory_priority; + uint32_t m_memory_type_index; + Anvil::MTSafety m_mt_safety; + Anvil::OnMemoryBlockReleaseCallbackFunction m_on_release_callback_function; + Anvil::MemoryBlock* m_parent_memory_block_ptr; + std::vector m_physical_devices; + VkDeviceSize m_size; + VkDeviceSize m_start_offset; + const Anvil::MemoryBlockType m_type; + + Anvil::Buffer* m_dedicated_allocation_buffer_ptr; + Anvil::Image* m_dedicated_allocation_image_ptr; + bool m_use_dedicated_allocation; + + #ifdef _WIN32 + ExternalNTHandleInfo m_exportable_nt_handle_info; + bool m_exportable_nt_handle_info_specified; + #endif + + ExternalMemoryHandleImportInfo m_external_handle_import_info; + bool m_external_handle_import_info_specified; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(MemoryBlockCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(MemoryBlockCreateInfo); + + friend class MemoryBlock; + }; +} /* namespace Anvil */ + +#endif /* MISC_MEMORY_BLOCK_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/mt_safety.h b/include/misc/mt_safety.h new file mode 100644 index 00000000..82174056 --- /dev/null +++ b/include/misc/mt_safety.h @@ -0,0 +1,83 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +#ifndef MISC_MT_SAFETY_H +#define MISC_MT_SAFETY_H + +#include +#include + +namespace Anvil +{ + class MTSafetySupportProvider + { + public: + explicit MTSafetySupportProvider(const bool& in_enable) + { + if (in_enable) + { + m_mutex_ptr.reset( + new std::recursive_mutex() + ); + } + } + + virtual ~MTSafetySupportProvider() + { + /* Stub */ + } + + inline bool is_mt_safe() const + { + return (m_mutex_ptr != nullptr); + } + + inline void lock() const + { + if (m_mutex_ptr != nullptr) + { + m_mutex_ptr->lock(); + } + } + + inline void unlock() const + { + if (m_mutex_ptr != nullptr) + { + m_mutex_ptr->unlock(); + } + } + + protected: + std::recursive_mutex* get_mutex() const + { + return m_mutex_ptr.get(); + } + + private: + std::unique_ptr m_mutex_ptr; + + MTSafetySupportProvider (const MTSafetySupportProvider&); + MTSafetySupportProvider& operator=(const MTSafetySupportProvider&) const; + }; +}; /* namespace Anvil */ + +#endif /* MISC_MT_SAFETY_H */ diff --git a/include/misc/object_tracker.h b/include/misc/object_tracker.h index 0202c223..1a612e70 100644 --- a/include/misc/object_tracker.h +++ b/include/misc/object_tracker.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -37,6 +37,7 @@ #ifndef MISC_OBJECT_TRACKER_H #define MISC_OBJECT_TRACKER_H +#include #include #include "misc/callbacks.h" #include "misc/types.h" @@ -45,14 +46,19 @@ namespace Anvil { typedef enum { - /* Callback issued when a new Anvil object is instantiated + /* Callback issued when a new GLSLShaderToSPIRVGenerator object is instantiated * * @param callback_arg OnObjectRegisteredCallbackArgument structure instance - **/ - OBJECT_TRACKER_CALLBACK_ID_ON_OBJECT_REGISTERED, + */ + OBJECT_TRACKER_CALLBACK_ID_ON_GLSL_SHADER_TO_SPIRV_GENERATOR_OBJECT_REGISTERED, + /* Callback issued when a new ShaderModule object is instantiated + * + * @param callback_arg OnObjectRegisteredCallbackArgument structure instance + */ + OBJECT_TRACKER_CALLBACK_ID_ON_SHADER_MODULE_OBJECT_REGISTERED, - /* Callback issued when an existing Anvil object instance is about to go out of scope. + /* Callback issued when an existing Device object instance is about to go out of scope. * * This callback IS issued BEFORE a corresponding Vulkan handle is destroyed. * @@ -61,20 +67,41 @@ namespace Anvil * * @param callback_arg OnObjectAboutToBeUnregisteredCallbackArgument structure instance **/ - OBJECT_TRACKER_CALLBACK_ID_ON_OBJECT_ABOUT_TO_BE_UNREGISTERED, + OBJECT_TRACKER_CALLBACK_ID_ON_DEVICE_OBJECT_ABOUT_TO_BE_UNREGISTERED, - /* Specialized case of OBJECT_TRACKER_CALLBACK_ID_ON_OBJECT_ABOUT_TO_BE_UNREGISTERED. + /* Callback issued when an existing GLSLShaderToSPIRVGenerator object instance is about to go out of scope. * - * Uses the same callback argument. - */ - OBJECT_TRACKER_CALLBACK_ID_ON_DEVICE_OBJECT_ABOUT_TO_BE_UNREGISTERED, + * This callback IS issued BEFORE a corresponding Vulkan handle is destroyed. + * + * This callback MAY be issued FROM WITHIN the object's destructor, implying all WEAK POINTERS pointing + * to the wrapper instance will have been expired at the time of the callback. + * + * @param callback_arg OnObjectAboutToBeUnregisteredCallbackArgument structure instance + **/ + OBJECT_TRACKER_CALLBACK_ID_ON_GLSL_SHADER_TO_SPIRV_GENERATOR_OBJECT_ABOUT_TO_BE_UNREGISTERED, - /* Specialized case of OBJECT_TRACKER_CALLBACK_ID_ON_OBJECT_ABOUT_TO_BE_UNREGISTERED. + /* Callback issued when an existing PipelineLayout object instance is about to go out of scope. * - * Uses the same callback argument. - */ + * This callback IS issued BEFORE a corresponding Vulkan handle is destroyed. + * + * This callback MAY be issued FROM WITHIN the object's destructor, implying all WEAK POINTERS pointing + * to the wrapper instance will have been expired at the time of the callback. + * + * @param callback_arg OnObjectAboutToBeUnregisteredCallbackArgument structure instance + **/ OBJECT_TRACKER_CALLBACK_ID_ON_PIPELINE_LAYOUT_OBJECT_ABOUT_TO_BE_UNREGISTERED, + /* Callback issued when an existing ShaderModule object instance is about to go out of scope. + * + * This callback IS issued BEFORE a corresponding Vulkan handle is destroyed. + * + * This callback MAY be issued FROM WITHIN the object's destructor, implying all WEAK POINTERS pointing + * to the wrapper instance will have been expired at the time of the callback. + * + * @param callback_arg OnObjectAboutToBeUnregisteredCallbackArgument structure instance + **/ + OBJECT_TRACKER_CALLBACK_ID_ON_SHADER_MODULE_OBJECT_ABOUT_TO_BE_UNREGISTERED, + OBJECT_TRACKER_CALLBACK_ID_COUNT, } ObjectTrackerCallbackID; @@ -100,16 +127,16 @@ namespace Anvil void check_for_leaks() const; /** Retrieves an alive object of user-specified type at given index. */ - void* get_object_at_index(ObjectType in_object_type, - uint32_t in_alloc_index) const; + void* get_object_at_index(const ObjectType& in_object_type, + uint32_t in_alloc_index) const; /** Registers a new object of the specified type. * * @param in_object_type Wrapper object type. * @param in_object_ptr Object instance. The object is NOT retained. **/ - void register_object(ObjectType in_object_type, - void* in_object_ptr); + void register_object(const ObjectType& in_object_type, + void* in_object_ptr); /** Stops tracking the specified object. * @@ -120,8 +147,8 @@ namespace Anvil * been registered earlier with a register_object() call, or else an * assertion failure will occur. **/ - void unregister_object(ObjectType in_object_type, - void* in_object_ptr); + void unregister_object(const ObjectType& in_object_type, + void* in_object_ptr); private: /* Private type declarations */ @@ -165,12 +192,13 @@ namespace Anvil ObjectTracker (const ObjectTracker&); ObjectTracker& operator=(const ObjectTracker&); - const char* get_object_type_name(ObjectType in_object_type) const; + const char* get_object_type_name(const ObjectType& in_object_type) const; /* Private members */ mutable std::mutex m_cs; - ObjectAllocations m_object_allocations [OBJECT_TYPE_COUNT]; - uint32_t m_n_objects_allocated_array[OBJECT_TYPE_COUNT]; + + mutable std::map m_object_allocations; + std::map m_n_objects_allocated_array; }; }; /* namespace Anvil */ diff --git a/include/misc/page_tracker.h b/include/misc/page_tracker.h index fdda511c..13b3c2a4 100644 --- a/include/misc/page_tracker.h +++ b/include/misc/page_tracker.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -42,19 +42,21 @@ namespace Anvil explicit PageTracker(VkDeviceSize in_region_size, VkDeviceSize in_page_size); - /** Retrieves a memory block assigned to the region . + /** Retrieves a memory block assigned to the region . * + * NOTE: in_size must not be larger than page size of the memory block. * NOTE: The caller should NOT assume subsequent pages are assigned the same memory block, unless the application never binds * more than one memory block to a sparsely-bound buffer / image. * - * @param in_start_offset_page_aligned Start offset of the page. Must be page size-aligned. - * @param out_memory_region_start_offset_ptr Deref will be set to the start offset, which corresponds to @param in_start_offset_page_aligned + * @param in_start_offset Start offset of the page.. + * @param out_memory_region_start_offset_ptr Deref will be set to the start offset, which corresponds to @param in_start_offset * from the returned memory object's PoV. Must not be NULL. * * @return Memory block instance bound to the specified page OR null, if no memory block has been assigned to the memory region. */ - std::shared_ptr get_memory_block(VkDeviceSize in_start_offset_page_aligned, - VkDeviceSize* out_memory_region_start_offset_ptr); + Anvil::MemoryBlock* get_memory_block(VkDeviceSize in_start_offset_page_aligned, + VkDeviceSize in_size, + VkDeviceSize* out_memory_region_start_offset_ptr) const; /** The same memory block is often bound to more than just one page. PageTracker * coalesces such occurences into a single descriptor. @@ -67,7 +69,7 @@ namespace Anvil * * @return The requested memory block. */ - std::shared_ptr get_memory_block(uint32_t in_n_memory_block) const + Anvil::MemoryBlock* get_memory_block(uint32_t in_n_memory_block) const { anvil_assert(in_n_memory_block < m_memory_blocks.size() ); @@ -112,24 +114,24 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool set_binding(std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset, - VkDeviceSize in_start_offset, - VkDeviceSize in_size); + bool set_binding(MemoryBlock* in_memory_block_ptr, + VkDeviceSize in_memory_block_start_offset, + VkDeviceSize in_start_offset, + VkDeviceSize in_size); private: /* Private type definitions */ typedef struct MemoryBlockBinding { - std::shared_ptr memory_block_ptr; - VkDeviceSize memory_block_start_offset; - VkDeviceSize size; - VkDeviceSize start_offset; - - MemoryBlockBinding(std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset, - VkDeviceSize in_size, - VkDeviceSize in_start_offset) + MemoryBlock* memory_block_ptr; + VkDeviceSize memory_block_start_offset; + VkDeviceSize size; + VkDeviceSize start_offset; + + MemoryBlockBinding(MemoryBlock* in_memory_block_ptr, + VkDeviceSize in_memory_block_start_offset, + VkDeviceSize in_size, + VkDeviceSize in_start_offset) { memory_block_ptr = in_memory_block_ptr; memory_block_start_offset = in_memory_block_start_offset; diff --git a/include/misc/pools.h b/include/misc/pools.h index 939c256e..b986e0c0 100644 --- a/include/misc/pools.h +++ b/include/misc/pools.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -62,9 +62,9 @@ namespace Anvil /* Stub */ } - virtual PoolItem create_item () = 0; - virtual void release_item(PoolItem in_item_ptr) = 0; - virtual void reset_item (PoolItem in_item_ptr) = 0; + virtual PoolItem create_item () = 0; + virtual void release_item(PoolItem in_item_ptr) = 0; + virtual void reset_item (PoolItem& in_item_ptr) = 0; }; /** Generic pool implementation */ @@ -80,13 +80,16 @@ namespace Anvil PoolItemContainer(PoolItemPtrType in_item) { - item = in_item; + item = std::move(in_item); } bool operator==(const PoolItemType* in_item_ptr) const { return item.get() == in_item_ptr; } + + PoolItemContainer(const PoolItemContainer&) = delete; + PoolItemContainer& operator=(const PoolItemContainer&) = delete; }; /** A functor which returns an object back to the pool. @@ -145,9 +148,13 @@ namespace Anvil n_item < in_n_items_to_preallocate; ++n_item) { - PoolItemContainer new_item_container(m_worker_ptr->create_item() ); + std::unique_ptr > new_item_container_ptr( + new PoolItemContainer(m_worker_ptr->create_item() ) + ); - m_available_pool_item_containers.push_back(new_item_container); + m_available_pool_item_containers.push_back( + std::move(new_item_container_ptr) + ); } } @@ -160,28 +167,28 @@ namespace Anvil { while (!m_active_pool_item_containers.empty()) { - Anvil::PoolItemContainer current_item_container = m_active_pool_item_containers.front(); + std::unique_ptr >& current_item_container = m_active_pool_item_containers.back(); - m_worker_ptr->release_item(current_item_container.item); + m_worker_ptr->release_item( + std::move(current_item_container->item) + ); m_active_pool_item_containers.pop_back(); } while (!m_available_pool_item_containers.empty()) { - Anvil::PoolItemContainer current_item_container = m_available_pool_item_containers.front(); + std::unique_ptr >& current_item_container = m_available_pool_item_containers.back(); - m_worker_ptr->release_item(current_item_container.item); + m_worker_ptr->release_item( + std::move(current_item_container->item) + ); m_available_pool_item_containers.pop_back(); } - if (m_worker_ptr != nullptr) - { - delete m_worker_ptr; - - m_worker_ptr = nullptr; - } + delete m_worker_ptr; + m_worker_ptr = nullptr; } /** Returns pool capacity */ @@ -205,19 +212,26 @@ namespace Anvil if (!m_available_pool_item_containers.empty()) { - result = PoolItemPtrType(m_available_pool_item_containers.back().item.get(), + result = PoolItemPtrType(m_available_pool_item_containers.back()->item.get(), release_functor); - m_active_pool_item_containers.push_back (m_available_pool_item_containers.back() ); + m_active_pool_item_containers.push_back( + std::move(m_available_pool_item_containers.back() ) + ); + m_available_pool_item_containers.pop_back(); } else { - PoolItemContainer new_item_container(m_worker_ptr->create_item() ); + std::unique_ptr > new_item_container( + new PoolItemContainer(m_worker_ptr->create_item() ) + ); - m_active_pool_item_containers.push_back(new_item_container); + m_active_pool_item_containers.push_back( + std::move(new_item_container) + ); - result = PoolItemPtrType(m_active_pool_item_containers.back().item.get(), + result = PoolItemPtrType(m_active_pool_item_containers.back()->item.get(), release_functor); } @@ -229,8 +243,8 @@ namespace Anvil /** Stores the provided instance back in the pool. */ void return_item(PoolItemType* in_item_ptr) { - PoolItemContainer container; - bool has_found = false; + std::unique_ptr > container; + bool has_found = false; ANVIL_REDUNDANT_VARIABLE(has_found); @@ -238,9 +252,9 @@ namespace Anvil iterator != m_active_pool_item_containers.end(); ++iterator) { - if (iterator->item.get() == in_item_ptr) + if ((*iterator)->item.get() == in_item_ptr) { - container = *iterator; + container = std::move(*iterator); has_found = true; m_active_pool_item_containers.erase(iterator); @@ -251,12 +265,14 @@ namespace Anvil anvil_assert(has_found); - m_available_pool_item_containers.push_back(container); + m_available_pool_item_containers.push_back( + std::move(container) + ); } protected: /* Protected type declarations */ - typedef std::vector > PoolItemContainers; + typedef std::vector > > PoolItemContainers; /* Protected functions */ @@ -278,8 +294,8 @@ namespace Anvil /** Implements IPoolWorker interface for primary command buffers. */ - template - class CommandBufferPoolWorker : public IPoolWorker > + template + class CommandBufferPoolWorker : public IPoolWorker { public: /* Public functions */ @@ -291,7 +307,7 @@ namespace Anvil * @param in_parent_command_pool_ptr Command pool instance, from which command buffers * should be spawned. Must not be nullptr. **/ - CommandBufferPoolWorker(std::shared_ptr in_parent_command_pool_ptr) + CommandBufferPoolWorker(Anvil::CommandPool* in_parent_command_pool_ptr) :m_parent_command_pool_ptr(in_parent_command_pool_ptr) { anvil_assert(m_parent_command_pool_ptr != nullptr) @@ -306,25 +322,9 @@ namespace Anvil /* Stub */ } - /** Creates a new primary / secondary command buffer instance. The command buffer - * is taken from the pool specified at worker instantiation time. **/ - virtual std::shared_ptr create_item() = 0; - - /** Resets contents of the specified command buffer. - * - * @param in_item_ptr Command buffer to reset. Must not be nullptr. - **/ - virtual void reset_item(std::shared_ptr in_item_ptr) = 0; - - /** Releases the specified command buffer. **/ - void release_item(std::shared_ptr in_item_ptr) - { - /* Nop */ - } - protected: /* Protected variables */ - std::shared_ptr m_parent_command_pool_ptr; + Anvil::CommandPool* m_parent_command_pool_ptr; private: /* Private functions */ @@ -332,10 +332,10 @@ namespace Anvil bool operator= (const CommandBufferPoolWorker&); }; - class PrimaryCommandBufferPoolWorker : public CommandBufferPoolWorker + class PrimaryCommandBufferPoolWorker : public CommandBufferPoolWorker { public: - PrimaryCommandBufferPoolWorker(std::shared_ptr in_parent_command_pool_ptr) + PrimaryCommandBufferPoolWorker(Anvil::CommandPool* in_parent_command_pool_ptr) :CommandBufferPoolWorker(in_parent_command_pool_ptr) { /* Stub */ @@ -346,17 +346,22 @@ namespace Anvil /* Stub */ } - std::shared_ptr create_item(); - void reset_item (std::shared_ptr in_item_ptr); + Anvil::PrimaryCommandBufferUniquePtr create_item (); + void reset_item (Anvil::PrimaryCommandBufferUniquePtr& in_item_ptr); + + void release_item(Anvil::PrimaryCommandBufferUniquePtr in_item_ptr) + { + /* Stub */ + } ANVIL_DISABLE_ASSIGNMENT_OPERATOR(PrimaryCommandBufferPoolWorker); ANVIL_DISABLE_COPY_CONSTRUCTOR (PrimaryCommandBufferPoolWorker); }; - class SecondaryCommandBufferPoolWorker : public CommandBufferPoolWorker + class SecondaryCommandBufferPoolWorker : public CommandBufferPoolWorker { public: - SecondaryCommandBufferPoolWorker(std::shared_ptr in_parent_command_pool_ptr) + SecondaryCommandBufferPoolWorker(Anvil::CommandPool* in_parent_command_pool_ptr) :CommandBufferPoolWorker(in_parent_command_pool_ptr) { /* Stub */ @@ -367,8 +372,13 @@ namespace Anvil /* Stub */ } - std::shared_ptr create_item(); - void reset_item (std::shared_ptr in_item_ptr); + Anvil::SecondaryCommandBufferUniquePtr create_item (); + void reset_item (Anvil::SecondaryCommandBufferUniquePtr& in_item_ptr); + + void release_item(Anvil::SecondaryCommandBufferUniquePtr in_item_ptr) + { + /* Stub */ + } ANVIL_DISABLE_ASSIGNMENT_OPERATOR(SecondaryCommandBufferPoolWorker); ANVIL_DISABLE_COPY_CONSTRUCTOR (SecondaryCommandBufferPoolWorker); @@ -386,14 +396,13 @@ namespace Anvil * @param in_n_preallocated_items Number of command buffers to preallocate at creation time. * **/ - static std::shared_ptr > create(std::shared_ptr in_parent_command_pool_ptr, - uint32_t in_n_preallocated_items) + static std::unique_ptr > create(Anvil::CommandPool* in_parent_command_pool_ptr, + uint32_t in_n_preallocated_items) { - std::shared_ptr > result_ptr; + std::unique_ptr > result_ptr; result_ptr.reset( - new CommandBufferPool(in_parent_command_pool_ptr, - in_n_preallocated_items, + new CommandBufferPool(in_n_preallocated_items, new PoolWorker(in_parent_command_pool_ptr)) ); @@ -409,9 +418,8 @@ namespace Anvil private: /* Constructor. Please see create() for documentation */ - CommandBufferPool(std::shared_ptr in_parent_command_pool_ptr, - uint32_t in_n_preallocated_items, - PoolWorker* in_pool_worker_ptr) + CommandBufferPool(uint32_t in_n_preallocated_items, + PoolWorker* in_pool_worker_ptr) :GenericPool(in_n_preallocated_items, in_pool_worker_ptr) { @@ -421,8 +429,8 @@ namespace Anvil }; /* Command pool specializations */ - typedef CommandBufferPool > PrimaryCommandBufferPool; - typedef CommandBufferPool > SecondaryCommandBufferPool; + typedef CommandBufferPool PrimaryCommandBufferPool; + typedef CommandBufferPool SecondaryCommandBufferPool; }; /* namespace Anvil */ diff --git a/include/misc/ref_counter.h b/include/misc/ref_counter.h index d9176630..a6c749b1 100644 --- a/include/misc/ref_counter.h +++ b/include/misc/ref_counter.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/include/misc/render_pass_create_info.h b/include/misc/render_pass_create_info.h new file mode 100644 index 00000000..3677395c --- /dev/null +++ b/include/misc/render_pass_create_info.h @@ -0,0 +1,936 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_RENDERPASS_CREATE_INFO_H +#define MISC_RENDERPASS_CREATE_INFO_H + +#include "misc/types.h" + +namespace Anvil +{ + class RenderPassCreateInfo + { + public: + RenderPassCreateInfo(const Anvil::BaseDevice* in_device_ptr); + ~RenderPassCreateInfo(); + + /** Adds a new render-pass color attachment to the internal data model. + * + * @param in_format Image format to use for the color attachment. + * @param in_sample_count Number of samples to use for the color attachment (expressed as enum value). + * @param in_load_op Load operation to use for the color attachment. + * @param in_store_op Store operation to use for the color attachment. + * @param in_initial_layout Initial layout to use for the color attachment. + * @param in_final_layout Final layout to use for the color attachment. + * @param in_may_alias true if the memory backing of the image that will be attached may + * overlap with the backing of another attachment within the same + * subpass. + * @param out_attachment_id_ptr Deref will be set to a unique ID assigned to the new color attachment. + * Must not be nullptr. + * + * @return true if the function executed successfully, false otherwise. + **/ + bool add_color_attachment(Anvil::Format in_format, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::AttachmentLoadOp in_load_op, + Anvil::AttachmentStoreOp in_store_op, + Anvil::ImageLayout in_initial_layout, + Anvil::ImageLayout in_final_layout, + bool in_may_alias, + RenderPassAttachmentID* out_attachment_id_ptr); + + /** Adds a new render-pass depth/stencil attachment to the internal data model. + * + * @param in_format Image format to use for the color attachment. + * @param in_sample_count Number of samples to use for the depth-stencil attachment (expressed as enum value). + * @param in_depth_load_op Load operation to use for the depth data. + * @param in_depth_store_op Store operation to use for the depth data. + * @param in_stencil_load_op Load operation to use for the stencil data. + * @param in_stencil_store_op Store operation to use for the stencil data. + * @param in_initial_layout Initial layout to use for the color attachment. + * @param in_final_layout Final layout to use for the color attachment. + * @param in_may_alias true if the memory backing of the image that will be attached may + * overlap with the backing of another attachment within the same + * subpass. + * @param out_attachment_id_ptr Deref will be set to a unique ID assigned to the new color attachment. + * Must not be nullptr. + * + * @return true if the function executed successfully, false otherwise. + **/ + bool add_depth_stencil_attachment(Anvil::Format in_format, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::AttachmentLoadOp in_depth_load_op, + Anvil::AttachmentStoreOp in_depth_store_op, + Anvil::AttachmentLoadOp in_stencil_load_op, + Anvil::AttachmentStoreOp in_stencil_store_op, + Anvil::ImageLayout in_initial_layout, + Anvil::ImageLayout in_final_layout, + bool in_may_alias, + RenderPassAttachmentID* out_attachment_id_ptr); + + /** Adds a new subpass to the internal data model. + * + * @param out_subpass_id_ptr Deref will be set to a unique ID of the created subpass. + * Must not be nullptr. + * + * @return true if the function executed successfully, false otherwise. + **/ + bool add_subpass(SubPassID* out_subpass_id_ptr); + + /** Adds a new color attachment to the RenderPass instance's specified subpass. + * + * @param in_subpass_id ID of the render-pass subpass to update. + * @param in_layout Layout to use for the attachment when executing the subpass. + * Driver takes care of transforming the attachment to the requested layout + * before subpass commands starts executing. + * @param in_attachment_id ID of the render-pass attachment the new sub-pass color attachment + * should refer to. + * @param in_location Location, under which the specified attachment should be accessible to + * the fragment shader. + * @param in_attachment_resolve_id_ptr Must be nullptr if the new color attachment should be single-sample. + * For multi-sample, this argument can optionally point to a render-pass + * attachment descriptor, to which the MS attachment should be resolved to. + * + * @return true if the function executed successfully, false otherwise. + **/ + bool add_subpass_color_attachment(SubPassID in_subpass_id, + Anvil::ImageLayout in_layout, + RenderPassAttachmentID in_attachment_id, + uint32_t in_location, + const RenderPassAttachmentID* in_opt_attachment_resolve_id_ptr = nullptr); + + /** Configures the depth+stencil attachment the subpass should use. + * + * Note that only up to one depth/stencil attachment may be added for each subpass. + * Any attempt to add more such attachments will result in an assertion failure. + * + * If VK_KHR_depth_stencil_resolve is supported, DS resolve attachment can be configured by calling the func prototype + * which includes @param in_attachment_resolve_id_ptr, @param in_depth_resolve_mode_ptr and @param in_stencil_resolve_mode_ptr + * parameters. Otherwise, please use the prototype w/o the arguments. + * + * @param in_subpass_id ID of the subpass to update the depth+stencil attachment for. + * The subpass must have been earlier created with an add_subpass() call. + * @param in_layout Layout to use for the attachment when executing the subpass. + * Driver takes care of transforming the attachment to the requested layout + * before subpass commands starts executing. + * @param in_attachment_id ID of the render-pass attachment the depth-stencil attachment should refer to. + * @param in_attachment_resolve_id_ptr Pointer to an ID of the renderpass attachment, to which the MS attachment should be resolved to. + * Requires VK_KHR_depth_stencil_resolve support. If not nullptr, @param in_depth_resolve_mode_ptr + * and @param in_stencil_resolve_mode_ptr must also not be nullptr. + * @param in_depth_resolve_mode_ptr Pointer to resolve mode that should be used for the depth aspect. + * Requires VK_KHR_depth_stencil_resolve support. If not nullptr, @param in_opt_attachment_resolve_id_ptr + * and @param in_opt_stencil_resolve_mode_ptr must also not be nullptr. + * @param in_stencil_resolve_mode_ptr Pointer to resolve mode that should be used for the stencil aspect. + * Requires VK_KHR_depth_stencil_resolve support. If not nullptr, @param in_opt_attachment_resolve_id_ptr + * and @param in_opt_depth_resolve_mode_ptr must also not be nullptr. + * + * @return true if the function executed successfully, false otherwise. + * + */ + bool add_subpass_depth_stencil_attachment(SubPassID in_subpass_id, + Anvil::ImageLayout in_layout, + RenderPassAttachmentID in_attachment_id, + const RenderPassAttachmentID* in_attachment_resolve_id_ptr, + const Anvil::ResolveModeFlagBits* in_depth_resolve_mode_ptr, + const Anvil::ResolveModeFlagBits* in_stencil_resolve_mode_ptr); + + bool add_subpass_depth_stencil_attachment(SubPassID in_subpass_id, + Anvil::ImageLayout in_layout, + RenderPassAttachmentID in_attachment_id) + { + return add_subpass_depth_stencil_attachment(in_subpass_id, + in_layout, + in_attachment_id, + nullptr, /* in_attachment_resolve_id_ptr */ + nullptr, /* in_depth_resolve_mode_ptr */ + nullptr); /* in_stencil_resolve_mode_ptr */ + } + + /** Adds a new input attachment to the RenderPass instance's specified subpass. + * + * @param in_subpass_id ID of the render-pass subpass to update. + * @param in_layout Layout to use for the attachment when executing the subpass. + * Driver takes care of transforming the attachment to the requested layout + * before subpass commands starts executing. + * @param in_attachment_id ID of the render-pass attachment the new sub-pass input attachment + * should refer to. + * @param in_attachment_index The "input attachment index", under which the specified attachment should + * be accessible to the fragment shader. Do not forget the image view also + * needs to be bound to the descriptor set for the input attachment to work! + * @param in_opt_aspects_accessed If set to a value != ImageAspectFlagBits::UNKNOWN and KHR_maintenance2 is supported, + * the additional information will be passed down to the driver which may result + * in improved performance. + + * @return true if the function executed successfully, false otherwise. + **/ + bool add_subpass_input_attachment(SubPassID in_subpass_id, + Anvil::ImageLayout in_layout, + RenderPassAttachmentID in_attachment_id, + uint32_t in_attachment_index, + const Anvil::ImageAspectFlags& in_opt_aspects_accessed = Anvil::ImageAspectFlagBits::NONE); + + /** Adds a new external->subpass dependency to the internal data model. + * + * NOTE: @param in_dependency_flags must NOT include Anvil::DependencyFlagBits::VIEW_LOCAL_BIT. + * + * @param in_destination_subpass_id ID of the destination subpass. The subpass must have been + * created earlier with an add_subpass() call. + * @param in_source_stage_mask Source pipeline stage mask. + * @param in_destination_stage_mask Destination pipeline stage mask. + * @param in_source_access_mask Source access mask. + * @param in_destination_access_mask Destination access mask. + * @param in_dependency_flags Flags to use for the dependency. + * + * @return true if the dependency was added successfully; false otherwise. + * + **/ + bool add_external_to_subpass_dependency(SubPassID in_destination_subpass_id, + Anvil::PipelineStageFlags in_source_stage_mask, + Anvil::PipelineStageFlags in_destination_stage_mask, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::DependencyFlags in_dependency_flags); + + /** Adds a new self-subpass dependency to the internal data model. + * + * NOTE: If @param in_dependency_flags includes Anvil::DependencyFlagBits::VIEW_LOCAL_BIT, you must + * also call set_dependency_view_local_properties() to define the src<->dst view index relationship + * for the subpass. + * + * @param in_destination_subpass_id ID of the subpass to create the dep for. The subpass must have been + * created earlier with an add_subpass() call. + * @param in_source_stage_mask Source pipeline stage mask. + * @param in_destination_stage_mask Destination pipeline stage mask. + * @param in_source_access_mask Source access mask. + * @param in_destination_access_mask Destination access mask. + * @param in_dependency_flags Flags to use for the dependency. + * + * @return true if the dependency was added successfully; false otherwise. + * + **/ + bool add_self_subpass_dependency(SubPassID in_destination_subpass_id, + Anvil::PipelineStageFlags in_source_stage_mask, + Anvil::PipelineStageFlags in_destination_stage_mask, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::DependencyFlags in_dependency_flags); + + /** Adds a new subpass->external dependency to the internal data model. + * + * NOTE: @param in_dependency_flags must NOT include Anvil::DependencyFlagBits::VIEW_LOCAL_BIT. + * + * @param in_source_subpass_id ID of the source subpass. The subpass must have been + * created earlier with an add_subpass() call. + * @param in_source_stage_mask Source pipeline stage mask. + * @param in_destination_stage_mask Destination pipeline stage mask. + * @param in_source_access_mask Source access mask. + * @param in_destination_access_mask Destination access mask. + * @param in_dependency_flags Flags to use for the dependency. + * + * @return true if the dependency was added successfully; false otherwise. + **/ + bool add_subpass_to_external_dependency(SubPassID in_source_subpass_id, + Anvil::PipelineStageFlags in_source_stage_mask, + Anvil::PipelineStageFlags in_destination_stage_mask, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::DependencyFlags in_dependency_flags); + + /** Adds a new subpass->subpass dependency to the internal data model. + * + * NOTE: If @param in_dependency_flags includes Anvil::DependencyFlagBits::VIEW_LOCAL_BIT, you must + * also call set_dependency_view_local_properties() to define the src<->dst view index relationship + * for the subpass. + * + * @param in_source_subpass_id ID of the source subpass. The subpass must have been + * created earlier with an add_subpass() call. + * @param in_destination_subpass_id ID of the source subpass. The subpass must have been + * created earlier with an add_subpass() call. + * @param in_source_stage_mask Source pipeline stage mask. + * @param in_destination_stage_mask Destination pipeline stage mask. + * @param in_source_access_mask Source access mask. + * @param in_destination_access_mask Destination access mask. + * @param in_dependency_flags Flags to use for the dependency. + * + * @return true if the dependency was added successfully; false otherwise. + **/ + bool add_subpass_to_subpass_dependency(SubPassID in_source_subpass_id, + SubPassID in_destination_subpass_id, + Anvil::PipelineStageFlags in_source_stage_mask, + Anvil::PipelineStageFlags in_destination_stage_mask, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::DependencyFlags in_dependency_flags); + + /** Tells what type an attachment with user-specified ID has. + * + * @return true if successful, false otherwise + */ + bool get_attachment_type(RenderPassAttachmentID in_attachment_id, + AttachmentType* out_attachment_type_ptr) const; + + /** Retrieves properties of the render pass color attachment with the user-specified ID + * + * @param in_attachment_id ID of the attachment to retrieve properties of. + * @param out_opt_format_ptr If not nullptr, deref will be set to the format, specified + * at attachment creation time. May be nullptr. + * @param out_opt_sample_count_ptr If not nullptr, deref will be set to the sample count, specified + * at attachment creation time. May be nullptr. + * @param out_opt_load_op_ptr If not nullptr, deref will be set to the load op, specified at + * attachment creation time. May be nullptr. + * @param out_opt_store_op_ptr If not nullptr, deref will be set to the store op, specified at + * attachment creation time. May be nullptr. + * @param out_opt_initial_layout_ptr If not nullptr, deref will be set to the initial layout, specified + * at attachment creation time. May be nullptr. + * @param out_opt_final_layout_ptr If not nullptr, deref will be set to the final layout, specified at + * attachment creation time. May be nullptr. + * @param out_opt_may_alias_ptr If not nullptr, deref will be set to set to true, if the attachment + * can alias other attachments. Otherwise, it will be set to false. + * May be nullptr. + * + * @return true if successful, false otherwise. + **/ + bool get_color_attachment_properties(RenderPassAttachmentID in_attachment_id, + Anvil::Format* out_opt_format_ptr = nullptr, + Anvil::SampleCountFlagBits* out_opt_sample_count_ptr = nullptr, + Anvil::AttachmentLoadOp* out_opt_load_op_ptr = nullptr, + Anvil::AttachmentStoreOp* out_opt_store_op_ptr = nullptr, + Anvil::ImageLayout* out_opt_initial_layout_ptr = nullptr, + Anvil::ImageLayout* out_opt_final_layout_ptr = nullptr, + bool* out_opt_may_alias_ptr = nullptr) const; + + /** Retrieves properties of a dependency at user-specified index. + * + * @param in_n_dependency Index of the dependency to retrieve properties of. + * @param out_destination_subpass_id_ptr Deref will be set to the ID of the dependency's destination, + * or to UINT32_MAX, if the destination of the dependency is external. + * Must not be null. + * @param out_source_subpass_id_ptr Deref will be set to the ID of the dependency's source, + * or to UINT32_MAX, if the source of the dependency is external. + * Must not be null. + * @param out_destination_stage_mask_ptr Deref will be set to the destination stage mask set for the dependency. + * Must not be null. + * @param out_source_stage_mask_ptr Deref will be set to the source stage mask set for the dependency. + * Must not be null. + * @param out_destination_access_mask_ptr Deref will be set to the destination access mask set for the dependency. + * Must not be null. + * @param out_source_access_mask_ptr Deref will be set to the source access mask set for the dependency. + * Must not be null. + * @param out_flags_ptr Deref will be set to flags specified for the dependency. Must not be null. + * + * @return true if successful, false otherwise + */ + bool get_dependency_properties(uint32_t in_n_dependency, + SubPassID* out_destination_subpass_id_ptr, + SubPassID* out_source_subpass_id_ptr, + Anvil::PipelineStageFlags* out_destination_stage_mask_ptr, + Anvil::PipelineStageFlags* out_source_stage_mask_ptr, + Anvil::AccessFlags* out_destination_access_mask_ptr, + Anvil::AccessFlags* out_source_access_mask_ptr, + DependencyFlags* out_flags_ptr) const; + + /* Returns the view offset associated with a given dependency. For view-global dependencies, zero will be returned. + * + * This function should only be called if is_multiview_enabled() returns true. + * + * @param in_n_dependency Index of the dependency to use for the query. + * @param out_view_offset_ptr If the function returns true, deref will be set to the view offset associated with the dependency. + * Must not be nullptr. + * + * @return true if successful, false otherwise. + **/ + bool get_dependency_multiview_properties(uint32_t in_n_dependency, + int32_t* out_view_offset_ptr) const; + + /** Retrieves properties of the render pass color attachment with the user-specified ID + * + * @param in_attachment_id ID of the attachment to retrieve properties of. + * @param out_opt_format_ptr If not nullptr, deref will be set to the format, specified + * at attachment creation time. May be nullptr. + * @param out_opt_sample_count_ptr If not nullptr, deref will be set to the sample count, specified + * at attachment creation time. May be nullptr. + * @param out_opt_depth_load_op_ptr If not nullptr, deref will be set to the depth-specific load op, specified at + * attachment creation time. May be nullptr. + * @param out_opt_depth_store_op_ptr If not nullptr, deref will be set to the depth-specific store op, specified at + * attachment creation time. May be nullptr. + * @param out_opt_stencil_load_op_ptr If not nullptr, deref will be set to the stencil-specific load op, specified at + * attachment creation time. May be nullptr. + * @param out_opt_stencil_store_op_ptr If not nullptr, deref will be set to the stencil-specific store op, specified at + * attachment creation time. May be nullptr. + * @param out_opt_initial_layout_ptr If not nullptr, deref will be set to the initial layout, specified + * at attachment creation time. May be nullptr. + * @param out_opt_final_layout_ptr If not nullptr, deref will be set to the final layout, specified at + * attachment creation time. May be nullptr. + * @param out_opt_may_alias_ptr If not nullptr, deref will be set to set to true, if the attachment + * can alias other attachments. Otherwise, it will be set to false. + * May be nullptr. + * + * @return true if successful, false otherwise. + **/ + bool get_depth_stencil_attachment_properties(RenderPassAttachmentID in_attachment_id, + Anvil::Format* out_opt_format_ptr = nullptr, + Anvil::SampleCountFlagBits* out_opt_sample_count_ptr = nullptr, + Anvil::AttachmentLoadOp* out_opt_depth_load_op_ptr = nullptr, + Anvil::AttachmentStoreOp* out_opt_depth_store_op_ptr = nullptr, + Anvil::AttachmentLoadOp* out_opt_stencil_load_op_ptr = nullptr, + Anvil::AttachmentStoreOp* out_opt_stencil_store_op_ptr = nullptr, + Anvil::ImageLayout* out_opt_initial_layout_ptr = nullptr, + Anvil::ImageLayout* out_opt_final_layout_ptr = nullptr, + bool* out_opt_may_alias_ptr = nullptr) const; + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + /* Returns the largest location assigned to color attachments for the specified subpass. + * + * If no color attachments have been defined for the queried subpass, UINT32_MAX is returned. + * + * @param in_subpass_id ID of the subpass to use for the query. + * + * @return As per description. + **/ + uint32_t get_max_color_location_used_by_subpass(const SubPassID& in_subpass_id) const; + + /* Returns the correlation masks specified for the renderpass. + * + * This function should only be called if is_multiview_enabled() returns true. + * + * @param out_n_correlation_masks_ptr Deref will be set to the number of uint32s available for reading under *out_correlation_masks_ptr_ptr + * after the function leaves. Must not be nullptr. + * @param out_correlation_masks_ptr_ptr Deref will be set to a pointer to an array of uint32s holding correlation masks. Must nto be nullptr. + * + **/ + void get_multiview_correlation_masks(uint32_t* out_n_correlation_masks_ptr, + const uint32_t** out_correlation_masks_ptr_ptr) const; + + /** Returns the number of added attachments */ + uint32_t get_n_attachments() const + { + return static_cast(m_attachments.size() ); + } + + /** Returns the number of added dependencies */ + uint32_t get_n_dependencies() const + { + return static_cast(m_subpass_dependencies.size() ); + } + + /** Returns the number of added subpasses */ + uint32_t get_n_subpasses() const + { + return static_cast(m_subpasses.size() ); + } + + /** Retrieves subpass attachment properties, as specified at creation time. + * + * Supports all attachment types except depth/stencil resolve attachment. You can retrieve details of the attachment + * by calling get_subpass_ds_resolve_attachment_properties() instead. + * + * @param in_subpass_id ID of the subpass to use for the query. + * @param in_attachment_type Type of the attachment to use for the query. Must not be ATTACHMENT_TYPE_PRESERVE. + * @param out_renderpass_attachment_id_ptr Deref will be set to the ID of the renderpass attachment this subpass + * attachment uses. + * @param out_layout_ptr Deref will be set to the image layout assigned to the attachment for the specific + * subpass. Must be NULL if @param in_attachment_type is ATTACHMENT_TYPE_PRESERVE. + * @param out_opt_aspects_accessed_ptr Only relevant for input attachments and KHR_maintenance2 is supported. + * + * @return true if successful, false otherwise. + */ + bool get_subpass_attachment_properties(SubPassID in_subpass_id, + AttachmentType in_attachment_type, + uint32_t in_n_subpass_attachment, + RenderPassAttachmentID* out_renderpass_attachment_id_ptr, + Anvil::ImageLayout* out_layout_ptr, + Anvil::ImageAspectFlags* out_opt_aspects_accessed_ptr = nullptr, + RenderPassAttachmentID* out_opt_attachment_resolve_id_ptr = nullptr, + uint32_t* out_opt_location_ptr = nullptr) const; + + /* TODO. + * + * Returns false if no resolve attachment has been specified for d/ds/s attachment. + */ + bool get_subpass_ds_resolve_attachment_properties(SubPassID in_subpass_id, + RenderPassAttachmentID* out_opt_renderpass_attachment_id_ptr = nullptr, + Anvil::ImageLayout* out_opt_layout_ptr = nullptr, + Anvil::ResolveModeFlagBits* out_opt_depth_resolve_mode_ptr = nullptr, + Anvil::ResolveModeFlagBits* out_opt_stencil_resolve_mode_ptr = nullptr) const; + + /* Returns highest location used by subpass attachments. + * + * @param in_subpass_id ID of the subpass to issue the query against. + * @param out_result_ptr If function returns true, deref will be set to the highest location specified when adding attachments + * to this subpass. + * + * @return true if successful, false otherwise. The latter will be returned if no attachments have been added to the subpass, prior + * to making this call. + */ + bool get_subpass_highest_location(SubPassID in_subpass_id, + uint32_t* out_result_ptr) const; + + /** Returns the number of attachments of user-specified type, defined for the specified subpass. + * + * @param in_subpass_id As per description. + * @param in_attachment_type Type of the attachment to use for the query. + * @param out_n_attachments_ptr Deref will be set to the value above. Must not be nullptr. + * Will only be touched if the function returns true. + * + * @return true if the function was successful, false otherwise. + **/ + bool get_subpass_n_attachments(SubPassID in_subpass_id, + AttachmentType in_attachment_type, + uint32_t* out_n_attachments_ptr) const; + + /* Returns the view mask associated with a given subpass. + * + * This function should only be called if is_multiview_enabled() returns true. + * + * @param in_subpass_id ID of the subpass to use for the query. + * @param out_view_mask_ptr If the function returns true, deref will be set to the view mask associated with the subpass. + * Must not be nullptr. + * + * @return true if successful, false otherwise. + **/ + bool get_subpass_view_mask(SubPassID in_subpass_id, + uint32_t* out_view_mask_ptr) const; + + /* Tells whether the renderpass uses multiview functionality. */ + bool is_multiview_enabled() const + { + return m_multiview_enabled; + } + + /* Specifies correlation mask(s) for the renderpass. + * + * If multiview is not already enabled for the renderpass, calling this function enables the functionality. + * + * Requires VK_KHR_multiview. + * + * @param in_n_correlation_masks Number of masks available for reading under @param in_correlation_masks_ptr. + * Must not be 0. + * @param in_correlation_masks_ptr Correlation masks to use for the renderpass. Please read VK_KHR_multiview spec + * for more details. + */ + void set_correlation_masks(const uint32_t& in_n_correlation_masks, + const uint32_t* in_correlation_masks_ptr); + + /** Specifies a view offset to use for a view-local dependency. + * + * If multiview is not already enabled for the renderpass, calling this function enables the functionality. + * + * Requires VK_KHR_multiview. + * + * @param in_subpass_id ID of the subpass to use. + * @param in_view_offset View offset to use for a view-local dependency defined at index @param in_n_dependency. Must not be zero + * for self-dependencies. + * + * @return true if successful, false otherwise. + */ + bool set_dependency_view_local_properties(const uint32_t& in_n_dependency, + const int32_t& in_view_offset); + + void set_device_ptr(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + /** Specifies the view mask for a subpass associated with ID @param in_subpass_id. + * + * If multiview is not already enabled for the renderpass, calling this function enables the functionality. + * The implication is the application needs to set the view mask for all subpasses defined for the renderpass, + * before the create info structure can be passed over to Anvil::RenderPass. + * + * Requires VK_KHR_multiview. + * + * @param in_subpass_id ID of the subpass to use. + * @param in_view_mask View mask to use for the subpass. Please consult VK_KHR_multiview spec for more details. + * + * @return true if successful, false otherwise. + */ + bool set_subpass_view_mask(SubPassID in_subpass_id, + const uint32_t& in_view_mask); + + private: + /* Private type definitions */ + + /** Holds properties of a single render-pass attachment. **/ + typedef struct RenderPassAttachment + { + Anvil::AttachmentLoadOp color_depth_load_op; + Anvil::AttachmentStoreOp color_depth_store_op; + Anvil::ImageLayout final_layout; + Anvil::Format format; + uint32_t index; + Anvil::ImageLayout initial_layout; + bool may_alias; + Anvil::SampleCountFlagBits sample_count; + Anvil::AttachmentLoadOp stencil_load_op; + Anvil::AttachmentStoreOp stencil_store_op; + AttachmentType type; + + /** Constructor. Should only be used for color attachments. + * + * @param in_format Format that will be used by the render-pass attachment. + * @param in_sample_count Number of samples of the render-pass attachment (expressed as enum value) + * @param in_load_op Load operation to use for the render-pass attachment. + * @param in_store_op Store operation to use for the render-pass attachment. + * @param in_initial_layout Initial layout fo the render-pass attachment. + * @param in_final_layout Layout to transfer the render-pass attachment to, after + * the render-pass finishes. + * @param in_may_alias true if the attachment's memory backing may alias with + * memory region of another attachment; false otherwise. + * @param in_index Index of the created render-pass attachment. + ***/ + RenderPassAttachment(Anvil::Format in_format, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::AttachmentLoadOp in_load_op, + Anvil::AttachmentStoreOp in_store_op, + Anvil::ImageLayout in_initial_layout, + Anvil::ImageLayout in_final_layout, + bool in_may_alias, + uint32_t in_index) + { + color_depth_load_op = in_load_op; + color_depth_store_op = in_store_op; + final_layout = in_final_layout; + format = in_format; + index = in_index; + initial_layout = in_initial_layout; + may_alias = in_may_alias; + sample_count = in_sample_count; + stencil_load_op = Anvil::AttachmentLoadOp::DONT_CARE; + stencil_store_op = Anvil::AttachmentStoreOp::DONT_CARE; + type = Anvil::AttachmentType::COLOR; + } + + /** Constructor. Should only be used for depth/stencil attachments + * + * @param in_format Format that will be used by the render-pass attachment. + * @param in_sample_count Number of samples of the render-pass attachment (expressed as enum value) + * @param in_depth_load_op Load operation to use for the render-pass attachment's depth data. + * @param in_depth_store_op Store operation to use for the render-pass attachment's depth data. + * @param in_stencil_load_op Load operation to use for the render-pass attachment's stencil data. + * @param in_stencil_store_op Store operation to use for the render-pass attachment's stencil data. + * @param in_initial_layout Initial layout fo the render-pass attachment. + * @param in_final_layout Layout to transfer the render-pass attachment to, after + * the render-pass finishes. + * @param in_may_alias true if the attachment's memory backing may alias with + * memory region of another attachment; false otherwise. + * @param in_index Index of the created render-pass attachment. + **/ + RenderPassAttachment(Anvil::Format in_format, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::AttachmentLoadOp in_depth_load_op, + Anvil::AttachmentStoreOp in_depth_store_op, + Anvil::AttachmentLoadOp in_stencil_load_op, + Anvil::AttachmentStoreOp in_stencil_store_op, + Anvil::ImageLayout in_initial_layout, + Anvil::ImageLayout in_final_layout, + bool in_may_alias, + uint32_t in_index) + { + color_depth_load_op = in_depth_load_op; + color_depth_store_op = in_depth_store_op; + final_layout = in_final_layout; + format = in_format; + index = in_index; + initial_layout = in_initial_layout; + may_alias = in_may_alias; + sample_count = in_sample_count; + stencil_load_op = in_stencil_load_op; + stencil_store_op = in_stencil_store_op; + type = Anvil::AttachmentType::DEPTH_STENCIL; + } + + /** Dummy constructor. This should only be used by STL containers. */ + RenderPassAttachment() + { + color_depth_load_op = Anvil::AttachmentLoadOp::UNKNOWN; + color_depth_store_op = Anvil::AttachmentStoreOp::UNKNOWN; + final_layout = Anvil::ImageLayout::UNKNOWN; + format = Anvil::Format::UNKNOWN; + index = UINT32_MAX; + initial_layout = Anvil::ImageLayout::UNKNOWN; + may_alias = false; + sample_count = static_cast(0); + stencil_load_op = Anvil::AttachmentLoadOp::UNKNOWN; + stencil_store_op = Anvil::AttachmentStoreOp::UNKNOWN; + type = Anvil::AttachmentType::UNKNOWN; + } + } RenderPassAttachment; + + typedef std::vector RenderPassAttachments; + + /* Holds properties of a sub-pass attachment */ + typedef struct SubPassAttachment + { + Anvil::ImageAspectFlags aspects_accessed; /* Only used for input attachments! */ + + Anvil::ResolveModeFlagBits depth_resolve_mode; /* Only used for MS D/DS attachments! */ + Anvil::ResolveModeFlagBits stencil_resolve_mode; /* Only used for MS DS/S attachments! */ + + uint32_t attachment_index; + uint32_t highest_subpass_index; + Anvil::ImageLayout layout; + uint32_t lowest_subpass_index; + uint32_t resolve_attachment_index; + + /* Dummy constructor. Should only be used by STL containers */ + SubPassAttachment() + { + aspects_accessed = Anvil::ImageAspectFlagBits::NONE; + attachment_index = UINT32_MAX; + depth_resolve_mode = Anvil::ResolveModeFlagBits::NONE; + highest_subpass_index = UINT32_MAX; + layout = Anvil::ImageLayout::UNKNOWN; + lowest_subpass_index = UINT32_MAX; + resolve_attachment_index = UINT32_MAX; + stencil_resolve_mode = Anvil::ResolveModeFlagBits::NONE; + } + + /** Constructor. + * + * @param in_attachment_index Index of render-pass attachment that this sub-pass attachment should reference. + * Must not be UINT32_MAX. + * @param in_layout Layout to use for the attachment when executing the subpass. + * Driver takes care of transforming the attachment to the requested layout + * before subpass commands starts executing. + * @param in_opt_resolve_attachment_index If not UINT32_MAX, this should point to the render-pass attachment, to which + * MS data of @param in_attachment_ptr should be resolved. If UINT32_MAX, it is + * assumed the sub-pass should not resolve the MS data. + **/ + SubPassAttachment(const uint32_t& in_attachment_index, + Anvil::ImageLayout in_layout, + const uint32_t& in_opt_resolve_attachment_index, + const Anvil::ImageAspectFlags& in_opt_aspects_accessed, + const Anvil::ResolveModeFlagBits& in_depth_resolve_mode, + const Anvil::ResolveModeFlagBits& in_stencil_resolve_mode) + { + aspects_accessed = in_opt_aspects_accessed; + attachment_index = in_attachment_index; + depth_resolve_mode = in_depth_resolve_mode; + highest_subpass_index = UINT32_MAX; + layout = in_layout; + lowest_subpass_index = UINT32_MAX; + resolve_attachment_index = in_opt_resolve_attachment_index; + stencil_resolve_mode = in_stencil_resolve_mode; + } + } SubPassAttachment; + + typedef std::map LocationToSubPassAttachmentMap; + typedef LocationToSubPassAttachmentMap::const_iterator LocationToSubPassAttachmentMapConstIterator; + typedef std::vector SubPassAttachmentVector; + + /** Holds properties of a single sub-pass */ + typedef struct SubPass + { + LocationToSubPassAttachmentMap color_attachments_map; + SubPassAttachment depth_stencil_attachment; + SubPassAttachment ds_resolve_attachment; + uint32_t index; + uint32_t multiview_view_mask; + uint32_t n_highest_location_used; + LocationToSubPassAttachmentMap input_attachments_map; + SubPassAttachmentVector preserved_attachments; + LocationToSubPassAttachmentMap resolved_attachments_map; + + SubPassAttachment* get_color_attachment_at_index(uint32_t in_index) + { + return get_attachment_at_index(color_attachments_map, + in_index); + } + + SubPassAttachment* get_input_attachment_at_index(uint32_t in_index) + { + return get_attachment_at_index(input_attachments_map, + in_index); + } + + SubPassAttachment* get_resolved_attachment_at_index(uint32_t in_index) + { + return get_attachment_at_index(resolved_attachments_map, + in_index); + } + + /** Dummy constructor. This should only be used by STL containers */ + SubPass() + { + index = UINT32_MAX; + multiview_view_mask = 0; + n_highest_location_used = 0; + } + + /** Constructor. + * + * @param in_index Index of the sub-pass + * + **/ + SubPass(uint32_t in_index) + { + index = in_index; + multiview_view_mask = 0; + n_highest_location_used = 0; + } + + /* Destructor */ + ~SubPass(); + + private: + /** Returns a pointer to the SubPassAttachment instance, assigned to the index specified + * under @param index in @param in_map. + **/ + SubPassAttachment* get_attachment_at_index(LocationToSubPassAttachmentMap& in_map, + uint32_t in_index) + { + uint32_t current_index = 0; + SubPassAttachment* result_ptr = nullptr; + + for (auto attachment_iterator = in_map.begin(); + attachment_iterator != in_map.end(); + ++attachment_iterator, ++current_index) + { + if (current_index == in_index) + { + result_ptr = &attachment_iterator->second; + + break; + } + } + + return result_ptr; + } + + SubPass (const SubPass&); + SubPass& operator=(const SubPass&); + } SubPass; + + typedef std::vector > SubPasses; + typedef SubPasses::const_iterator SubPassesConstIterator; + + /** Holds properties of a single subpass<->subpass dependency. */ + typedef struct SubPassDependency + { + Anvil::AccessFlags destination_access_mask; + Anvil::PipelineStageFlags destination_stage_mask; + Anvil::AccessFlags source_access_mask; + Anvil::PipelineStageFlags source_stage_mask; + + DependencyFlags flags; + const SubPass* destination_subpass_ptr; + const SubPass* source_subpass_ptr; + + int32_t multiview_view_offset; + + /** Constructor. + * + * @param in_destination_stage_mask Destination pipeline stage mask. + * @param in_destination_subpass_ptr Pointer to the descriptor of the destination subpass. + * If nullptr, it is assumed an external destination is requested. + * @param in_source_stage_mask Source pipeline stage mask. + * @param in_source_subpass_ptr Pointer to the descriptor of the source subpass. + * If nullptr, it is assumed an external source is requested. + * @param in_source_access_mask Source access mask. + * @param in_destination_access_mask Destination access mask. + * @param in_dependency_flags Flags to use for the dependency. + **/ + SubPassDependency(Anvil::PipelineStageFlags in_destination_stage_mask, + const SubPass* in_destination_subpass_ptr, + Anvil::PipelineStageFlags in_source_stage_mask, + const SubPass* in_source_subpass_ptr, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + const DependencyFlags& in_flags) + { + destination_stage_mask = in_destination_stage_mask; + destination_subpass_ptr = in_destination_subpass_ptr; + destination_access_mask = in_destination_access_mask; + flags = in_flags; + multiview_view_offset = INT32_MAX; + source_access_mask = in_source_access_mask; + source_stage_mask = in_source_stage_mask; + source_subpass_ptr = in_source_subpass_ptr; + } + + /** Dummy constructor. Should only be used by STL containers */ + SubPassDependency() + { + destination_subpass_ptr = nullptr; + multiview_view_offset = INT32_MAX; + source_subpass_ptr = nullptr; + } + + /** Comparator operator */ + bool operator==(const SubPassDependency& in) + { + return in.destination_access_mask == destination_access_mask && + in.destination_stage_mask == destination_stage_mask && + in.destination_subpass_ptr == destination_subpass_ptr && + in.flags == flags && + in.multiview_view_offset == multiview_view_offset && + in.source_access_mask == source_access_mask && + in.source_stage_mask == source_stage_mask && + in.source_subpass_ptr == source_subpass_ptr; + } + } SubPassDependency; + + typedef std::vector SubPassDependencies; + + + /* Private functions */ + + bool add_dependency (SubPass* in_destination_subpass_ptr, + SubPass* in_source_subpass_ptr, + Anvil::PipelineStageFlags in_source_stage_mask, + Anvil::PipelineStageFlags in_destination_stage_mask, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::DependencyFlags in_dependency_flags); + bool add_subpass_color_input_attachment(SubPassID in_subpass_id, + bool in_is_color_attachment, + Anvil::ImageLayout in_input_layout, + RenderPassAttachmentID in_attachment_id, + uint32_t in_attachment_location, + bool in_should_resolve, + RenderPassAttachmentID in_resolve_attachment_id, + const Anvil::ImageAspectFlags& in_aspects_accessed); + + VkAttachmentReference get_attachment_reference_from_renderpass_attachment(const RenderPassAttachment& in_renderpass_attachment) const; + VkAttachmentReference get_attachment_reference_from_subpass_attachment (const SubPassAttachment& in_subpass_attachment) const; + VkAttachmentReference get_attachment_reference_for_resolve_attachment (const SubPassesConstIterator& in_subpass_iterator, + const LocationToSubPassAttachmentMapConstIterator& in_location_to_subpass_att_map_iterator) const; + void update_preserved_attachments () const; + + + /* Private variables */ + RenderPassAttachments m_attachments; + std::vector m_correlation_masks; + const Anvil::BaseDevice* m_device_ptr; + bool m_multiview_enabled; + SubPasses m_subpasses; + SubPassDependencies m_subpass_dependencies; + mutable bool m_update_preserved_attachments; + + friend class Anvil::RenderPass; + }; +}; + +#endif /* MISC_RENDERPASS_CREATE_INFO_H */ diff --git a/include/misc/rendering_surface_create_info.h b/include/misc/rendering_surface_create_info.h new file mode 100644 index 00000000..9b1812dc --- /dev/null +++ b/include/misc/rendering_surface_create_info.h @@ -0,0 +1,99 @@ +// +// Copyright (c) 2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_RENDERING_SURFACE_CREATE_INFO_H +#define MISC_RENDERING_SURFACE_CREATE_INFO_H + +#include "misc/types.h" + +namespace Anvil +{ + class RenderingSurfaceCreateInfo + { + public: + /* Creates a new rendering surface create info instance. */ + static RenderingSurfaceCreateInfoUniquePtr create(Anvil::Instance* in_instance_ptr, + const Anvil::BaseDevice* in_device_ptr, + const Anvil::Window* in_window_ptr, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); + + ~RenderingSurfaceCreateInfo(); + + const Anvil::BaseDevice* get_device_ptr() const + { + return m_device_ptr; + } + + const MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + Anvil::Instance* get_instance_ptr() const + { + return m_instance_ptr; + } + + const Anvil::Window* get_window_ptr() const + { + return m_window_ptr; + } + + void set_device_ptr(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + void set_mt_safety(const MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + void set_instance_ptr(Anvil::Instance* in_instance_ptr) + { + m_instance_ptr = in_instance_ptr; + } + + void set_window_ptr(const Anvil::Window* in_window_ptr) + { + m_window_ptr = in_window_ptr; + } + + private: + /* Private type definitions */ + + /* Private functions */ + + RenderingSurfaceCreateInfo(Anvil::Instance* in_instance_ptr, + const Anvil::BaseDevice* in_device_ptr, + const Anvil::Window* in_window_ptr, + MTSafety in_mt_safety); + + /* Private variables */ + const Anvil::BaseDevice* m_device_ptr; + Anvil::Instance* m_instance_ptr; + const Anvil::Window* m_window_ptr; + + MTSafety m_mt_safety; + }; +}; + +#endif /* MISC_RENDERING_SURFACE_CREATE_INFO_H */ diff --git a/include/misc/sampler_create_info.h b/include/misc/sampler_create_info.h new file mode 100644 index 00000000..475a9546 --- /dev/null +++ b/include/misc/sampler_create_info.h @@ -0,0 +1,295 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_SAMPLER_CREATE_INFO_H +#define MISC_SAMPLER_CREATE_INFO_H + +#include "misc/types.h" + +namespace Anvil +{ + class SamplerCreateInfo + { + public: + /* Public functions */ + + /** TODO + * + * NOTE: By default, no YCbCr conversion will be attached to the created sampler. In order to adjust the setting, + * call set_sampler_ycbcr_conversion_ptr() before passing the create info struct to Sampler::create(). + * + * For argument discussion, please consult Vulkan API specification. + */ + static SamplerCreateInfoUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + Anvil::Filter in_mag_filter, + Anvil::Filter in_min_filter, + Anvil::SamplerMipmapMode in_mipmap_mode, + Anvil::SamplerAddressMode in_address_mode_u, + Anvil::SamplerAddressMode in_address_mode_v, + Anvil::SamplerAddressMode in_address_mode_w, + float in_lod_bias, + float in_max_anisotropy, + bool in_compare_enable, + Anvil::CompareOp in_compare_op, + float in_min_lod, + float in_max_lod, + Anvil::BorderColor in_border_color, + bool in_use_unnormalized_coordinates, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); + + const Anvil::SamplerAddressMode& get_address_mode_u() const + { + return m_address_mode_u; + } + + const Anvil::SamplerAddressMode& get_address_mode_v() const + { + return m_address_mode_v; + } + + const Anvil::SamplerAddressMode& get_address_mode_w() const + { + return m_address_mode_w; + } + + const Anvil::BorderColor& get_border_color() const + { + return m_border_color; + } + + const Anvil::CompareOp& get_compare_op() const + { + return m_compare_op; + } + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + const float& get_lod_bias() const + { + return m_lod_bias; + } + + const Anvil::Filter& get_mag_filter() const + { + return m_mag_filter; + } + + const float& get_max_anisotropy() const + { + return m_max_anisotropy; + } + + const float& get_max_lod() const + { + return m_max_lod; + } + + const Anvil::Filter& get_min_filter() const + { + return m_min_filter; + } + + const float& get_min_lod() const + { + return m_min_lod; + } + + const Anvil::SamplerMipmapMode& get_mipmap_mode() const + { + return m_mipmap_mode; + } + + const Anvil::MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + const Anvil::SamplerReductionMode& get_sampler_reduction_mode() const + { + return m_sampler_reduction_mode; + } + + const Anvil::SamplerYCbCrConversion* get_sampler_ycbcr_conversion_ptr() const + { + return m_sampler_ycbcr_conversion_ptr; + } + + const bool& is_compare_enabled() const + { + return m_compare_enable; + } + + void set_address_mode_u(const Anvil::SamplerAddressMode& in_address_mode_u) + { + m_address_mode_u = in_address_mode_u; + } + + void set_address_mode_v(const Anvil::SamplerAddressMode& in_address_mode_v) + { + m_address_mode_v = in_address_mode_v; + } + + void set_address_mode_w(const Anvil::SamplerAddressMode& in_address_mode_w) + { + m_address_mode_w = in_address_mode_w; + } + + void set_border_color(const Anvil::BorderColor& in_border_color) + { + m_border_color = in_border_color; + } + + void set_compare_op(const Anvil::CompareOp& in_compare_op) + { + m_compare_op = in_compare_op; + } + + void set_device(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + void set_lod_bias(const float& in_lod_bias) + { + m_lod_bias = in_lod_bias; + } + + void set_mag_filter(const Anvil::Filter& in_mag_filter) + { + m_mag_filter = in_mag_filter; + } + + void set_max_anisotropy(const float& in_max_anisotropy) + { + m_max_anisotropy = in_max_anisotropy; + } + + void set_max_lod(const float& in_max_lod) + { + m_max_lod = in_max_lod; + } + + void set_min_filter(const Anvil::Filter& in_min_filter) + { + m_min_filter = in_min_filter; + } + + void set_min_lod(const float& in_min_lod) + { + m_min_lod = in_min_lod; + } + + void set_mipmap_mode(const Anvil::SamplerMipmapMode& in_mipmap_mode) + { + m_mipmap_mode = in_mipmap_mode; + } + + void set_mt_safety(const Anvil::MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + void set_is_compare_enabled(const bool& in_compare_enable) + { + m_compare_enable = in_compare_enable; + } + + /* NOTE: Requires VK_EXT_sampler_filter_minmax */ + void set_sampler_reduction_mode(const Anvil::SamplerReductionMode& in_reduction_mode) + { + m_sampler_reduction_mode = in_reduction_mode; + } + + /* Attaches or detaches already attached SamplerYCBCRConversion object from the create info struct. + * This information will be used at sampler creation time. + * + * NOTE: Requires VK_KHR_sampler_ycbcr_conversion + * + * @param in_sampler_ycbcr_conversion_ptr If not nullptr, the specified object will be passed to the implementation + * at sampler creation time by chaining VkSamplerYcbcrConversionInfo struct. + * If nullptr, the struct will not be chained. + **/ + void set_sampler_ycbcr_conversion_ptr(const Anvil::SamplerYCbCrConversion* in_sampler_ycbcr_conversion_ptr) + { + m_sampler_ycbcr_conversion_ptr = in_sampler_ycbcr_conversion_ptr; + } + + void set_uses_unnormalized_coordinates(const bool& in_use_unnormalized_coordinates) + { + m_use_unnormalized_coordinates = in_use_unnormalized_coordinates; + } + + const bool& uses_unnormalized_coordinates() const + { + return m_use_unnormalized_coordinates; + } + + private: + /* Private functions */ + + SamplerCreateInfo(const Anvil::BaseDevice* in_device_ptr, + Anvil::Filter in_mag_filter, + Anvil::Filter in_min_filter, + Anvil::SamplerMipmapMode in_mipmap_mode, + Anvil::SamplerAddressMode in_address_mode_u, + Anvil::SamplerAddressMode in_address_mode_v, + Anvil::SamplerAddressMode in_address_mode_w, + float in_lod_bias, + float in_max_anisotropy, + bool in_compare_enable, + Anvil::CompareOp in_compare_op, + float in_min_lod, + float in_max_lod, + Anvil::BorderColor in_border_color, + bool in_use_unnormalized_coordinates, + MTSafety in_mt_safety); + + /* Private variables */ + + Anvil::SamplerAddressMode m_address_mode_u; + Anvil::SamplerAddressMode m_address_mode_v; + Anvil::SamplerAddressMode m_address_mode_w; + Anvil::BorderColor m_border_color; + bool m_compare_enable; + Anvil::CompareOp m_compare_op; + float m_lod_bias; + Anvil::Filter m_mag_filter; + float m_max_anisotropy; + float m_max_lod; + Anvil::Filter m_min_filter; + float m_min_lod; + Anvil::SamplerMipmapMode m_mipmap_mode; + Anvil::MTSafety m_mt_safety; + Anvil::SamplerReductionMode m_sampler_reduction_mode; + const Anvil::SamplerYCbCrConversion* m_sampler_ycbcr_conversion_ptr; + bool m_use_unnormalized_coordinates; + + const Anvil::BaseDevice* m_device_ptr; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(SamplerCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(SamplerCreateInfo); + }; +}; /* namespace Anvil */ + +#endif /* MISC_SAMPLER_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/sampler_ycbcr_conversion_create_info.h b/include/misc/sampler_ycbcr_conversion_create_info.h new file mode 100644 index 00000000..c251bf9d --- /dev/null +++ b/include/misc/sampler_ycbcr_conversion_create_info.h @@ -0,0 +1,183 @@ +// +// Copyright (c) 2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_H +#define MISC_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_H + +#include "misc/types.h" + +namespace Anvil +{ + class SamplerYCbCrConversionCreateInfo + { + public: + /* Public functions */ + + /** TODO + * + * NOTE: This object can only be used for Vulkan devices with VK_KHR_sampler_ycbcr_conversion extension support. + * + * For argument discussion, please consult Vulkan API specification. + */ + static SamplerYCbCrConversionCreateInfoUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + const Anvil::Format& in_format, + const Anvil::SamplerYCbCrModelConversion& in_ycbcr_model_conversion, + const Anvil::SamplerYCbCrRange& in_ycbcr_range, + const Anvil::ComponentMapping& in_components, + const Anvil::ChromaLocation& in_x_chroma_offset, + const Anvil::ChromaLocation& in_y_chroma_offset, + const Anvil::Filter& in_chroma_filter, + const bool& in_should_force_explicit_reconstruction, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); + + const Anvil::Filter& get_chroma_filter() const + { + return m_chroma_filter; + } + + const Anvil::ComponentMapping& get_components() const + { + return m_components; + } + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + const Anvil::Format& get_format() const + { + return m_format; + } + + const Anvil::MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + const Anvil::SamplerYCbCrModelConversion& get_ycbcr_model_conversion() const + { + return m_ycbcr_model_conversion; + } + + const Anvil::ChromaLocation& get_x_chroma_offset() const + { + return m_x_chroma_offset; + } + + const Anvil::SamplerYCbCrRange& get_ycbcr_range() const + { + return m_ycbcr_range; + } + + const Anvil::ChromaLocation& get_y_chroma_offset() const + { + return m_y_chroma_offset; + } + + void set_chroma_filter(const Anvil::Filter& in_chroma_filter) + { + m_chroma_filter = in_chroma_filter; + } + + void set_components(const Anvil::ComponentMapping& in_components) + { + m_components = in_components; + } + + void set_device(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + void set_format(const Anvil::Format& in_format) + { + m_format = in_format; + } + + void set_mt_safety(const Anvil::MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + void set_ycbcr_model_conversion(const Anvil::SamplerYCbCrModelConversion& in_conversion) + { + m_ycbcr_model_conversion = in_conversion; + } + + void set_x_chroma_offset(const Anvil::ChromaLocation& in_x_chroma_offset) + { + m_x_chroma_offset = in_x_chroma_offset; + } + + void set_ycbcr_range(const Anvil::SamplerYCbCrRange& in_range) + { + m_ycbcr_range = in_range; + } + + void set_y_chroma_offset(const Anvil::ChromaLocation& in_y_chroma_offset) + { + m_y_chroma_offset = in_y_chroma_offset; + } + + void set_should_force_explicit_reconstruction(const bool& in_should_force_explicit_reconstruction) + { + m_should_force_explicit_reconstruction = in_should_force_explicit_reconstruction; + } + + const bool& should_force_explicit_reconstruction() const + { + return m_should_force_explicit_reconstruction; + } + + private: + /* Private functions */ + + SamplerYCbCrConversionCreateInfo(const Anvil::BaseDevice* in_device_ptr, + const Anvil::Format& in_format, + const Anvil::SamplerYCbCrModelConversion& in_ycbcr_model_conversion, + const Anvil::SamplerYCbCrRange& in_ycbcr_range, + const Anvil::ComponentMapping& in_components, + const Anvil::ChromaLocation& in_x_chroma_offset, + const Anvil::ChromaLocation& in_y_chroma_offset, + const Anvil::Filter& in_chroma_filter, + const bool& in_should_force_explicit_reconstruction, + MTSafety in_mt_safety); + + /* Private variables */ + + Anvil::Filter m_chroma_filter; + Anvil::ComponentMapping m_components; + const Anvil::BaseDevice* m_device_ptr; + Anvil::Format m_format; + MTSafety m_mt_safety; + bool m_should_force_explicit_reconstruction; + Anvil::ChromaLocation m_x_chroma_offset; + Anvil::ChromaLocation m_y_chroma_offset; + Anvil::SamplerYCbCrModelConversion m_ycbcr_model_conversion; + Anvil::SamplerYCbCrRange m_ycbcr_range; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(SamplerYCbCrConversionCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(SamplerYCbCrConversionCreateInfo); + }; +}; /* namespace Anvil */ + +#endif /* MISC_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/semaphore_create_info.h b/include/misc/semaphore_create_info.h new file mode 100644 index 00000000..2a42845d --- /dev/null +++ b/include/misc/semaphore_create_info.h @@ -0,0 +1,154 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_SEMAPHORE_CREATE_INFO_H +#define MISC_SEMAPHORE_CREATE_INFO_H + +#include "misc/types.h" + +namespace Anvil +{ + class SemaphoreCreateInfo + { + public: + /* Public functions */ + + /* TODO. + * + * NOTE: Unless specified later with a corresponding set_..() invocation, the following parameters are assumed by default: + * + * - Exportable external semaphore handle type: none + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + */ + static Anvil::SemaphoreCreateInfoUniquePtr create(const Anvil::BaseDevice* in_device_ptr); + + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + const Anvil::ExternalSemaphoreHandleTypeFlags& get_exportable_external_semaphore_handle_types() const + { + return m_exportable_external_semaphore_handle_types; + } + + #if defined(_WIN32) + /* Returns true if set_exportable_nt_handle_info() has been called prior to this call. + * Otherwise returns false. + * + * If the func returns true, *out_result_ptr is set to the queried data. + */ + bool get_exportable_nt_handle_info(const ExternalNTHandleInfo** out_result_ptr_ptr) const + { + bool result = false; + + if (m_exportable_nt_handle_info_specified) + { + *out_result_ptr_ptr = &m_exportable_nt_handle_info; + result = true; + } + + return result; + } + #endif + + const MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + void set_device(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + /* TODO + * + * Requires VK_KHR_external_semaphore. + */ + void set_exportable_external_semaphore_handle_types(const Anvil::ExternalSemaphoreHandleTypeFlags& in_external_handle_types) + { + m_exportable_external_semaphore_handle_types = in_external_handle_types; + } + + #if defined(_WIN32) + /* Lets the app specify additional details for exportable NT handles. + * + * If @param in_name is zero-sized, member of the VkExportSemaphoreWin32HandleInfoKHR struct, as chained to the VkSemaphoreCreateInfo struct chain, + * will be set to nullptr. + * + * Requires VK_KHR_external_semaphore_win32 + */ + void set_exportable_nt_handle_info(const SECURITY_ATTRIBUTES* in_opt_attributes_ptr, + const DWORD& in_access, + const std::wstring& in_name) + { + anvil_assert(!m_exportable_nt_handle_info_specified); + + m_exportable_nt_handle_info.access = in_access; + m_exportable_nt_handle_info.name = in_name; + m_exportable_nt_handle_info_specified = true; + + if (in_opt_attributes_ptr != nullptr) + { + m_exportable_nt_handle_info_security_attributes = *in_opt_attributes_ptr; + m_exportable_nt_handle_info_security_attributes_specified = true; + + m_exportable_nt_handle_info.attributes_ptr = &m_exportable_nt_handle_info_security_attributes; + } + else + { + m_exportable_nt_handle_info.attributes_ptr = nullptr; + m_exportable_nt_handle_info_security_attributes_specified = false; + } + + } + #endif + + void set_mt_safety(const MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + private: + /* Private functions */ + SemaphoreCreateInfo(const Anvil::BaseDevice* in_device_ptr, + MTSafety in_mt_safety); + + /* Private variables */ + const Anvil::BaseDevice* m_device_ptr; + Anvil::ExternalSemaphoreHandleTypeFlags m_exportable_external_semaphore_handle_types; + Anvil::MTSafety m_mt_safety; + + #ifdef _WIN32 + ExternalNTHandleInfo m_exportable_nt_handle_info; + SECURITY_ATTRIBUTES m_exportable_nt_handle_info_security_attributes; + bool m_exportable_nt_handle_info_security_attributes_specified; + bool m_exportable_nt_handle_info_specified; + #endif + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(SemaphoreCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(SemaphoreCreateInfo); + }; + +}; /* namespace Anvil */ + +#endif /* MISC_SEMAPHORE_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/shader_module_cache.h b/include/misc/shader_module_cache.h index 6ec82d08..d4dafd2e 100644 --- a/include/misc/shader_module_cache.h +++ b/include/misc/shader_module_cache.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -23,7 +23,10 @@ #ifndef WRAPPERS_SHADER_MODULE_CACHE_H #define WRAPPERS_SHADER_MODULE_CACHE_H +#include "misc/mt_safety.h" #include "misc/types.h" +#include "wrappers/shader_module.h" + namespace Anvil { @@ -33,7 +36,7 @@ namespace Anvil * * Shader module cache is thread-safe. */ - class ShaderModuleCache + class ShaderModuleCache : public MTSafetySupportProvider { public: /* Public functions */ @@ -42,26 +45,26 @@ namespace Anvil ~ShaderModuleCache(); /** TODO */ - static std::shared_ptr create(); + static Anvil::ShaderModuleCacheUniquePtr create(); /** TODO */ - std::shared_ptr get_cached_shader_module(std::weak_ptr in_device_ptr, - const char* in_spirv_blob, - uint32_t in_n_spirv_blob_bytes, - const std::string& in_cs_entrypoint_name, - const std::string& in_fs_entrypoint_name, - const std::string& in_gs_entrypoint_name, - const std::string& in_tc_entrypoint_name, - const std::string& in_te_entrypoint_name, - const std::string& in_vs_entrypoint_name); + Anvil::ShaderModuleUniquePtr get_cached_shader_module(const Anvil::BaseDevice* in_device_ptr, + const char* in_spirv_blob, + uint32_t in_n_spirv_blob_bytes, + const std::string& in_cs_entrypoint_name, + const std::string& in_fs_entrypoint_name, + const std::string& in_gs_entrypoint_name, + const std::string& in_tc_entrypoint_name, + const std::string& in_te_entrypoint_name, + const std::string& in_vs_entrypoint_name); private: /* Private type definitions */ typedef struct HashMapItem { - std::weak_ptr device_ptr; - std::vector spirv_blob; - std::shared_ptr shader_module_ptr; + const Anvil::BaseDevice* device_ptr; + std::vector spirv_blob; + Anvil::ShaderModuleUniquePtr shader_module_owned_ptr; std::string cs_entrypoint_name; std::string fs_entrypoint_name; @@ -70,19 +73,20 @@ namespace Anvil std::string te_entrypoint_name; std::string vs_entrypoint_name; - explicit HashMapItem(std::weak_ptr in_device_ptr, - const std::vector& in_spirv_blob, - const std::string& in_cs_entrypoint_name, - const std::string& in_fs_entrypoint_name, - const std::string& in_gs_entrypoint_name, - const std::string& in_tc_entrypoint_name, - const std::string& in_te_entrypoint_name, - const std::string& in_vs_entrypoint_name, - std::shared_ptr in_shader_module_ptr) + explicit HashMapItem(const Anvil::BaseDevice* in_device_ptr, + const std::vector& in_spirv_blob, + const std::string& in_cs_entrypoint_name, + const std::string& in_fs_entrypoint_name, + const std::string& in_gs_entrypoint_name, + const std::string& in_tc_entrypoint_name, + const std::string& in_te_entrypoint_name, + const std::string& in_vs_entrypoint_name, + Anvil::ShaderModule* in_shader_module_ptr) { - device_ptr = in_device_ptr; - spirv_blob = in_spirv_blob; - shader_module_ptr = in_shader_module_ptr; + device_ptr = in_device_ptr; + spirv_blob = in_spirv_blob; + shader_module_owned_ptr = Anvil::ShaderModuleUniquePtr(in_shader_module_ptr, + std::default_delete() ); cs_entrypoint_name = in_cs_entrypoint_name; fs_entrypoint_name = in_fs_entrypoint_name; @@ -92,15 +96,15 @@ namespace Anvil vs_entrypoint_name = in_vs_entrypoint_name; } - bool matches(std::weak_ptr in_device_ptr, - const char* in_spirv_blob, - uint32_t in_n_spirv_blob_bytes, - const std::string& in_cs_entrypoint_name, - const std::string& in_fs_entrypoint_name, - const std::string& in_gs_entrypoint_name, - const std::string& in_tc_entrypoint_name, - const std::string& in_te_entrypoint_name, - const std::string& in_vs_entrypoint_name) const + bool matches(const Anvil::BaseDevice* in_device_ptr, + const char* in_spirv_blob, + uint32_t in_n_spirv_blob_bytes, + const std::string& in_cs_entrypoint_name, + const std::string& in_fs_entrypoint_name, + const std::string& in_gs_entrypoint_name, + const std::string& in_tc_entrypoint_name, + const std::string& in_te_entrypoint_name, + const std::string& in_vs_entrypoint_name) const { bool result = (spirv_blob.size() * sizeof(spirv_blob.at(0)) == in_n_spirv_blob_bytes); @@ -116,7 +120,7 @@ namespace Anvil if (result) { - result = (device_ptr.lock() == in_device_ptr.lock() ); + result = (device_ptr == in_device_ptr); } if (result) @@ -130,14 +134,14 @@ namespace Anvil } } HashMapItem; - typedef std::forward_list HashMapItems; + typedef std::forward_list > HashMapItems; /* Private functions */ ShaderModuleCache(); - void cache (std::shared_ptr in_shader_module_ptr); - void update_subscriptions(bool in_should_init); + void cache (Anvil::ShaderModule* in_shader_module_ptr); + void update_subscriptions(bool in_should_init); size_t get_hash(const char* in_spirv_blob, uint32_t in_n_spirv_blob_bytes, @@ -148,12 +152,11 @@ namespace Anvil const std::string& in_te_entrypoint_name, const std::string& in_vs_entrypoint_name) const; - void on_object_about_to_be_released(CallbackArgument* in_callback_arg_ptr); - void on_object_registered (CallbackArgument* in_callback_arg_ptr); + void on_shader_module_object_about_to_be_released(CallbackArgument* in_callback_arg_ptr); + void on_shader_module_object_registered (CallbackArgument* in_callback_arg_ptr); /* Private variables */ - std::mutex m_cs; - std::map m_items; + std::map m_item_ptrs; ANVIL_DISABLE_ASSIGNMENT_OPERATOR(ShaderModuleCache); ANVIL_DISABLE_COPY_CONSTRUCTOR(ShaderModuleCache); diff --git a/include/misc/struct_chainer.h b/include/misc/struct_chainer.h new file mode 100644 index 00000000..c1092220 --- /dev/null +++ b/include/misc/struct_chainer.h @@ -0,0 +1,397 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#ifndef MISC_STRUCT_CHAINER_H +#define MISC_STRUCT_CHAINER_H + +#include "types.h" + +namespace Anvil +{ + typedef struct StructID + { + uint32_t data_offset; + bool is_helper_struct; + + StructID() + :data_offset (UINT32_MAX), + is_helper_struct(true), + valid (false) + { + /* Stub */ + } + + StructID(const uint32_t& in_data_offset, + const bool& in_is_helper_struct) + :data_offset (in_data_offset), + is_helper_struct(in_is_helper_struct), + valid (true) + { + /* Stub */ + } + + bool is_valid() const + { + return valid; + } + + StructID (const StructID&) = default; + StructID& operator=(const StructID&) = default; + + private: + bool valid; + + } StructID; + + template + struct StructChain + { + std::vector raw_data; + StructType* root_struct_ptr; + + StructChain(const uint32_t& in_raw_data_size) + :raw_data(in_raw_data_size) + { + root_struct_ptr = nullptr; + } + + const StructType* get_struct_with_id(const StructID& in_id) const + { + anvil_assert(!in_id.is_helper_struct); + anvil_assert( in_id.data_offset < raw_data.size() ); + + return reinterpret_cast(&raw_data.at(0) + in_id.data_offset); + } + + template + StructType2* get_struct_with_id(const StructID& in_id) + { + anvil_assert(!in_id.is_helper_struct); + anvil_assert( in_id.data_offset < raw_data.size() ); + + return reinterpret_cast(&raw_data.at(0) + in_id.data_offset); + } + + StructType* get_root_struct() + { + return reinterpret_cast(&raw_data.at(0) ); + } + + const StructType* get_root_struct() const + { + return reinterpret_cast(&raw_data.at(0) ); + } + }; + + template + using StructChainUniquePtr = std::unique_ptr >; + + template + struct StructChainVector + { + void append_struct_chain(StructChainUniquePtr inout_struct_chain_ptr) + { + root_structs.push_back (*inout_struct_chain_ptr->get_root_struct() ); + struct_chain_ptrs.push_back(std::move(inout_struct_chain_ptr) ); + } + + uint32_t get_n_structs() const + { + return static_cast(root_structs.size() ); + } + const StructType* get_root_structs() const + { + return &root_structs.at(0); + } + + + private: + std::vector root_structs; + std::vector > struct_chain_ptrs; + }; + + template + class StructChainer + { + public: + /* Public functions */ + StructChainer() + :m_helper_structs_size(0), + m_structs_size (0) + { + /* Stub */ + } + + ~StructChainer() + { + /* Stub */ + } + + template + StructID append_struct(const ChainedStructType& in_struct) + { + const uint32_t pre_call_structs_size(m_structs_size); + std::vector struct_raw_data (sizeof(in_struct) ); + + anvil_assert(in_struct.pNext == nullptr); + + /* Zeroth item appended to the chain must be of StructType type! */ + if (m_structs_size == 0 && + sizeof(in_struct) != sizeof(StructType) ) + { + anvil_assert_fail(); + } + + memcpy(struct_raw_data.data(), + &in_struct, + sizeof(in_struct) ); + + m_structs.push_back( + std::move(struct_raw_data) + ); + + m_structs_size += sizeof(in_struct); + + return StructID(pre_call_structs_size, + false /* in_is_helper_struct */); + } + + template + StructID store_helper_structure(const HelperStructType& in_helper_struct, + const StructID& in_referring_struct, + const uint32_t& in_referring_struct_pnext_ptr_offset) + { + const auto pre_call_helper_structs_size = m_helper_structs_size; + + m_helper_structs.push_back( + HelperStruct(&in_helper_struct, + sizeof(in_helper_struct), + in_referring_struct, + in_referring_struct_pnext_ptr_offset) + ); + + m_helper_structs_size += static_cast(sizeof(in_helper_struct) ); + + return StructID(pre_call_helper_structs_size, + true /* in_is_helper_struct */); + } + + template + StructID store_helper_structure_vector(const std::vector& in_helper_struct_vec, + const StructID& in_referring_struct, + const uint32_t& in_referring_struct_pnext_ptr_offset) + { + /* Convert the input vector to a single entry */ + anvil_assert(in_helper_struct_vec.size() > 0); + + const auto pre_call_helper_structs_size = m_helper_structs_size; + + m_helper_structs.push_back( + HelperStruct(&in_helper_struct_vec.at(0), + static_cast(in_helper_struct_vec.size() ) * sizeof(HelperStructType), + in_referring_struct, + in_referring_struct_pnext_ptr_offset) + ); + + m_helper_structs_size += static_cast(in_helper_struct_vec.size() ) * sizeof(HelperStructType); + + return StructID(pre_call_helper_structs_size, + true /* in_is_helper_struct */); + } + + std::unique_ptr > create_chain(const void *pNextLast = nullptr) const + { + size_t helper_data_start_offset = 0; + size_t n_bytes_used = 0; + const uint32_t n_helper_structs = static_cast(m_helper_structs.size() ); + const uint32_t n_structs = static_cast(m_structs.size () ); + std::unique_ptr > result_ptr; + + /* Sanity checks */ + if (m_structs.size() == 0) + { + anvil_assert(m_structs.size() > 0); + + goto end; + } + + /* Allocate the result instance */ + result_ptr.reset( + new StructChain(m_structs_size + m_helper_structs_size) + ); + + if (result_ptr == nullptr) + { + anvil_assert(result_ptr != nullptr); + + goto end; + } + + /* Form the struct chain.. */ + for (uint32_t n_struct = 0; + n_struct < n_structs; + ++n_struct) + { + /* Copy struct contents to the final vector */ + const auto& current_struct_data = m_structs.at(n_struct); + const auto current_struct_data_size = current_struct_data.size(); + + memcpy(&result_ptr->raw_data.at(n_bytes_used), + ¤t_struct_data.at(0), + current_struct_data_size); + + /* Adjust pNext pointer to point at the next struct, if defined. */ + VkStructHeader *header_ptr = reinterpret_cast(&result_ptr->raw_data.at(n_bytes_used)); + if (n_struct != (n_structs - 1) ) + header_ptr->next_ptr = &result_ptr->raw_data.at(0) + n_bytes_used + current_struct_data_size; + else + header_ptr->next_ptr = pNextLast; + + n_bytes_used += current_struct_data_size; + } + + helper_data_start_offset = n_bytes_used; + + for (uint32_t n_helper_struct = 0; + n_helper_struct < n_helper_structs; + ++n_helper_struct) + { + const auto& current_helper_struct_props = m_helper_structs.at(n_helper_struct); + const auto& current_helper_struct_data = current_helper_struct_props.data; + const auto current_helper_struct_data_size = current_helper_struct_data.size(); + + /* Cache helper structure data */ + memcpy(&result_ptr->raw_data.at(n_bytes_used), + ¤t_helper_struct_data.at(0), + current_helper_struct_data_size); + + n_bytes_used += current_helper_struct_data_size; + } + + n_bytes_used = helper_data_start_offset; + + for (uint32_t n_helper_struct = 0; + n_helper_struct < n_helper_structs; + ++n_helper_struct) + { + /* Patch the field of the referring struct so that it points to the helper structure we cache locally */ + const auto& current_helper_struct_props = m_helper_structs.at(n_helper_struct); + const auto& current_helper_struct_data = current_helper_struct_props.data; + const auto current_helper_struct_data_size = current_helper_struct_data.size(); + + size_t patch_offset = current_helper_struct_props.referring_struct_id.data_offset + current_helper_struct_props.referring_struct_ptr_offset; + + if (current_helper_struct_props.referring_struct_id.is_helper_struct) + { + patch_offset += helper_data_start_offset; + } + + *reinterpret_cast(&result_ptr->raw_data.at(patch_offset) ) = &result_ptr->raw_data.at(n_bytes_used); + n_bytes_used += current_helper_struct_data_size; + } + + result_ptr->root_struct_ptr = reinterpret_cast(&result_ptr->raw_data.at(0) ); + + /* Done. */ + end: + return result_ptr; + } + + StructType* get_last_struct() + { + anvil_assert(m_structs.size() > 0); + + return reinterpret_cast(&m_structs.at(m_structs.size() - 1).at(0) ); + } + + const StructType* get_last_struct() const + { + anvil_assert(m_structs.size() > 0); + + return reinterpret_cast(&m_structs.at(m_structs.size() - 1).at(0) ); + } + + uint32_t get_n_structs() const + { + return static_cast(m_structs.size() ); + } + + StructType* get_root_struct() + { + anvil_assert(m_structs.size() > 0); + + return reinterpret_cast(&m_structs.at(0).at(0) ); + } + + const StructType* get_root_struct() const + { + anvil_assert(m_structs.size() > 0); + + return reinterpret_cast(&m_structs.at(0).at(0) ); + } + + VkStructHeader* get_struct_at_index(const uint32_t& in_index) + { + VkStructHeader* result_ptr = nullptr; + + if (m_structs.size() > in_index) + { + result_ptr = reinterpret_cast(&m_structs.at(in_index).at(0) ); + } + + return result_ptr; + } + private: + /* Private type definitions */ + typedef struct HelperStruct + { + std::vector data; + StructID referring_struct_id; + uint32_t referring_struct_ptr_offset; + + HelperStruct(const void* in_data_ptr, + const uint32_t& in_data_size, + const StructID& in_referring_struct_id, + const uint32_t& in_referring_struct_ptr_offset) + :referring_struct_id (in_referring_struct_id), + referring_struct_ptr_offset(in_referring_struct_ptr_offset) + { + anvil_assert(in_data_ptr != nullptr); + anvil_assert(in_data_size != 0); + + data.resize(in_data_size); + + memcpy(&data.at(0), + in_data_ptr, + in_data_size); + } + } HelperStruct; + + /* Private functions */ + std::vector m_helper_structs; + uint32_t m_helper_structs_size; + std::vector > m_structs; + uint32_t m_structs_size; + }; +}; + +#endif /* MISC_STRUCT_CHAINER_H */ diff --git a/include/misc/swapchain_create_info.h b/include/misc/swapchain_create_info.h new file mode 100644 index 00000000..cec76095 --- /dev/null +++ b/include/misc/swapchain_create_info.h @@ -0,0 +1,259 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_SWAPCHAIN_CREATE_INFO_H +#define MISC_SWAPCHAIN_CREATE_INFO_H + +#include "misc/types.h" + +namespace Anvil +{ + class SwapchainCreateInfo + { + public: + /* Public functions */ + + /* TODO. + * + * NOTE: By default, the following parameters take default values as below. + * + * - MGPU present mode flags: Anvil::DeviceGroupPresentModeFlagBits::LOCAL_BIT_KHR + * - MT safety: Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE + * - Swapchain create flags: None + * + * To modify these, use corresponding set_..() functions. + */ + static SwapchainCreateInfoUniquePtr create(Anvil::BaseDevice* in_device_ptr, + Anvil::RenderingSurface* in_parent_surface_ptr, + Anvil::Window* in_window_ptr, + Anvil::Format in_format, + Anvil::ColorSpaceKHR in_color_space, + Anvil::PresentModeKHR in_present_mode, + Anvil::ImageUsageFlags in_usage_flags, + uint32_t in_n_images, + const bool& in_clipped = true, + const Anvil::Swapchain* in_opt_old_swapchain_ptr = nullptr); + + const bool& get_clipped() const + { + return m_clipped; + } + + Anvil::ColorSpaceKHR get_color_space() const + { + return m_color_space; + } + + void get_view_format_list(const Anvil::Format** out_compatible_formats_ptr, + uint32_t* out_n_compatible_formats_ptr) const + { + anvil_assert(m_compatible_formats.size() > 0); + + *out_compatible_formats_ptr = &m_compatible_formats.at(0); + *out_n_compatible_formats_ptr = static_cast (m_compatible_formats.size() ); + } + + /** Returns device instance which has been used to create the swapchain */ + const Anvil::BaseDevice* get_device() const + { + return m_device_ptr; + } + + /** Returns flags used to create the swapchain. */ + const Anvil::SwapchainCreateFlags& get_flags() const + { + return m_flags; + } + + /** Returns format used by swapchain image and image views */ + Anvil::Format get_format() const + { + return m_format; + } + + Anvil::DeviceGroupPresentModeFlags get_mgpu_present_mode_flags() const + { + return m_mgpu_present_mode_flags; + } + + const MTSafety& get_mt_safety() const + { + return m_mt_safety; + } + + /** Tells how many images the swap-chain encapsulates. */ + uint32_t get_n_images() const + { + return m_n_images; + } + + const Anvil::Swapchain* get_old_swapchain() const + { + return m_old_swapchain_ptr; + } + + Anvil::PresentModeKHR get_present_mode() const + { + return m_present_mode; + } + + /** Retrieves parent rendering surface. */ + const Anvil::RenderingSurface* get_rendering_surface() const + { + return m_parent_surface_ptr; + } + + const Anvil::ImageUsageFlags& get_usage_flags() const + { + return m_usage_flags; + } + + /** Retrieves a window, to which the swapchain is bound. Note that under certain + * circumstances no window may be assigned. */ + Anvil::Window* get_window() const + { + return m_window_ptr; + } + + void set_clipped(const bool& in_clipped) + { + m_clipped = in_clipped; + } + + void set_color_space(const Anvil::ColorSpaceKHR& in_color_space) + { + m_color_space = in_color_space; + } + + void set_device(const Anvil::BaseDevice* in_device_ptr) + { + m_device_ptr = in_device_ptr; + } + + /* NOTE: If @param in_flags includes SwapchainCreateFlagBits::CREATE_MUTABLE_FORMAT_BIT, you must + * also call set_view_format_list() in order to specify the list of compatible formats BEFORE + * passing the create info struct to swapchain create function. + */ + void set_flags(const Anvil::SwapchainCreateFlags& in_flags) + { + m_flags = in_flags; + } + + void set_format(const Anvil::Format& in_format) + { + m_format = in_format; + } + + void set_mgpu_present_mode_flags(const Anvil::DeviceGroupPresentModeFlags& in_mgpu_present_mode_flags) + { + m_mgpu_present_mode_flags = in_mgpu_present_mode_flags; + } + + void set_mt_safety(const MTSafety& in_mt_safety) + { + m_mt_safety = in_mt_safety; + } + + void set_n_images(const uint32_t& in_n_images) + { + m_n_images = in_n_images; + } + + void set_old_swapchain(const Anvil::Swapchain* in_old_swapchain_ptr) + { + m_old_swapchain_ptr = in_old_swapchain_ptr; + } + + void set_present_mode(const Anvil::PresentModeKHR& in_present_mode) + { + m_present_mode = in_present_mode; + } + + void set_rendering_surface(Anvil::RenderingSurface* in_rendering_surface_ptr) + { + m_parent_surface_ptr = in_rendering_surface_ptr; + } + + void set_usage_flags(Anvil::ImageUsageFlags in_new_usage_flags) + { + m_usage_flags = in_new_usage_flags; + } + + /* Caches a list of image formats the created swapchain needs to be able to support. + * + * If SwapchainCreateFlagBits::CREATE_MUTABLE_FORMAT_BIT create flag has not been specified via set_flags(), it will + * be force-set by the function. + * + * @param in_compatible_formats_ptr Array of formats swapchain images need to support for image view usage. Must not be nullptr. + * @param in_n_compatible_formats Size of @param in_compatible_Formats_ptr array. Must be larger than 0. + * + * Requires VK_KHR_swapchain_mutable_format extension support. + */ + void set_view_format_list(const Anvil::Format* in_compatible_formats_ptr, + const uint32_t& in_n_compatible_formats); + + void set_window(Anvil::Window* in_window_ptr) + { + m_window_ptr = in_window_ptr; + } + + + private: + /* Private functions */ + + SwapchainCreateInfo(Anvil::BaseDevice* in_device_ptr, + Anvil::RenderingSurface* in_parent_surface_ptr, + Anvil::Window* in_window_ptr, + Anvil::Format in_format, + Anvil::ColorSpaceKHR in_color_space, + Anvil::PresentModeKHR in_present_mode, + Anvil::ImageUsageFlags in_usage_flags, + uint32_t in_n_images, + MTSafety in_mt_safety, + Anvil::SwapchainCreateFlags in_flags, + Anvil::DeviceGroupPresentModeFlags in_mgpu_present_mode_flags, + const bool& in_clipped, + const Anvil::Swapchain* in_opt_old_swapchain_ptr); + + /* Private variables */ + + bool m_clipped; + Anvil::ColorSpaceKHR m_color_space; + std::vector m_compatible_formats; + const Anvil::BaseDevice* m_device_ptr; + Anvil::SwapchainCreateFlags m_flags; + Anvil::Format m_format; + Anvil::DeviceGroupPresentModeFlags m_mgpu_present_mode_flags; + Anvil::MTSafety m_mt_safety; + uint32_t m_n_images; + const Anvil::Swapchain* m_old_swapchain_ptr; + Anvil::RenderingSurface* m_parent_surface_ptr; + Anvil::PresentModeKHR m_present_mode; + Anvil::Window* m_window_ptr; + + Anvil::ImageUsageFlags m_usage_flags; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(SwapchainCreateInfo); + ANVIL_DISABLE_COPY_CONSTRUCTOR(SwapchainCreateInfo); + }; +}; /* namespace Anvil */ + +#endif /* MISC_SWAPCHAIN_CREATE_INFO_H */ \ No newline at end of file diff --git a/include/misc/time.h b/include/misc/time.h index 730ca9c1..3c728dc3 100644 --- a/include/misc/time.h +++ b/include/misc/time.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/include/misc/types.h b/include/misc/types.h index d4ec02c8..5233a253 100644 --- a/include/misc/types.h +++ b/include/misc/types.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -24,6 +24,7 @@ #include #include +#include #include #include #include @@ -50,19 +51,19 @@ /* The following #define is required to include Vulkan entry-point prototype declarations. */ #ifdef _WIN32 - #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) - #define VK_USE_PLATFORM_WIN32_KHR - #endif + #define VK_USE_PLATFORM_WIN32_KHR #else #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) #define VK_USE_PLATFORM_XCB_KHR #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + #define VK_USE_PLATFORM_WAYLAND_KHR + #endif #endif - #ifdef _WIN32 /* NOTE: Version clamp required for IsDebuggerPresent() */ - #define ANVIL_MIN_WIN32_WINNT_REQUIRED 0x0400 + #define ANVIL_MIN_WIN32_WINNT_REQUIRED 0x0501 #if !defined(_WIN32_WINNT) #define _WIN32_WINNT ANVIL_MIN_WIN32_WINNT_REQUIRED @@ -95,457 +96,63 @@ #endif #include - - #ifndef nullptr - #define nullptr NULL - #endif #endif -#include "vulkan/vulkan.h" +#include "misc/vulkan.h" #include "vulkan/vk_platform.h" #include #include #include -/* Sanity checks */ -#if !defined(VK_AMD_rasterization_order) - #error Vulkan SDK header used in the compilation process is too old. Please ensure deps\anvil\include\vulkan.h is used. -#endif - -/* Wrappers for some of the Vulkan enums we use across Anvil */ -#ifdef ANVIL_LITTLE_ENDIAN - #define VkAccessFlagsVariable(name) \ - union \ - { \ - VkAccessFlags name; \ - \ - struct \ - { \ - uint8_t VK_ACCESS_INDIRECT_COMMAND_READ_BIT : 1; \ - uint8_t VK_ACCESS_INDEX_READ_BIT : 1; \ - uint8_t VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT : 1; \ - uint8_t VK_ACCESS_UNIFORM_READ_BIT : 1; \ - uint8_t VK_ACCESS_INPUT_ATTACHMENT_READ_BIT : 1; \ - uint8_t VK_ACCESS_SHADER_READ_BIT : 1; \ - uint8_t VK_ACCESS_SHADER_WRITE_BIT : 1; \ - uint8_t VK_ACCESS_COLOR_ATTACHMENT_READ_BIT : 1; \ - uint8_t VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT : 1; \ - uint8_t VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT : 1; \ - uint8_t VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT : 1; \ - uint8_t VK_ACCESS_TRANSFER_READ_BIT : 1; \ - uint8_t VK_ACCESS_TRANSFER_WRITE_BIT : 1; \ - uint8_t VK_ACCESS_HOST_READ_BIT : 1; \ - uint8_t VK_ACCESS_HOST_WRITE_BIT : 1; \ - uint8_t VK_ACCESS_MEMORY_READ_BIT : 1; \ - uint8_t VK_ACCESS_MEMORY_WRITE_BIT : 1; \ - uint32_t OTHER: 15; \ - } name##_flags; \ - }; - - #define VkBufferCreateFlagsVariable(name) \ - union \ - { \ - VkBufferCreateFlags name; \ - \ - struct \ - { \ - uint8_t VK_BUFFER_CREATE_SPARSE_BINDING_BIT : 1; \ - uint8_t VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT : 1; \ - uint8_t VK_BUFFER_CREATE_SPARSE_ALIASED_BIT : 1; \ - uint32_t OTHER: 29; \ - } name##_flags; \ - }; - - #define VkBufferUsageFlagsVariable(name) \ - union \ - { \ - VkBufferUsageFlags name; \ - \ - struct \ - { \ - uint8_t VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 1; \ - uint8_t VK_BUFFER_USAGE_TRANSFER_DST_BIT : 1; \ - uint8_t VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT : 1; \ - uint8_t VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT : 1; \ - uint8_t VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : 1; \ - uint8_t VK_BUFFER_USAGE_STORAGE_BUFFER_BIT : 1; \ - uint8_t VK_BUFFER_USAGE_INDEX_BUFFER_BIT : 1; \ - uint8_t VK_BUFFER_USAGE_VERTEX_BUFFER_BIT : 1; \ - uint8_t VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT : 1; \ - uint32_t OTHER: 23; \ - } name##_flags; \ - }; - - #define VkColorComponentFlagsVariable(name) \ - union \ - { \ - VkColorComponentFlags name; \ - \ - struct \ - { \ - uint8_t VK_COLOR_COMPONENT_R_BIT : 1; \ - uint8_t VK_COLOR_COMPONENT_G_BIT : 1; \ - uint8_t VK_COLOR_COMPONENT_B_BIT : 1; \ - uint8_t VK_COLOR_COMPONENT_A_BIT : 1; \ - uint32_t OTHER: 28; \ - } name##_flags; \ - }; - - #define VkCompositeAlphaFlagsKHRVariable(name) \ - union \ - { \ - VkCompositeAlphaFlagsKHR name; \ - \ - struct \ - { \ - uint8_t VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR : 1; \ - uint8_t VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR : 1; \ - uint8_t VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR : 1; \ - uint8_t VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR : 1; \ - uint32_t OTHER: 28; \ - } name##_flags; \ - }; - - #define VkCullModeFlagsVariable(name) \ - union \ - { \ - VkCullModeFlags name; \ - \ - struct \ - { \ - uint8_t VK_CULL_MODE_FRONT_BIT : 1; \ - uint8_t VK_CULL_MODE_BACK_BIT : 1; \ - uint32_t OTHER: 30; \ - } name##_flags; \ - }; - - #define VkDependencyFlagsVariable(name) \ - union \ - { \ - VkDependencyFlags name; \ - \ - struct \ - { \ - uint8_t VK_DEPENDENCY_BY_REGION_BIT : 1; \ - uint32_t OTHER: 31; \ - } name##_flags; \ - }; - - #define VkFormatFeatureFlagsVariable(name) \ - union \ - { \ - VkFormatFeatureFlags name; \ - \ - struct \ - { \ - uint8_t VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_BLIT_SRC_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_BLIT_DST_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT : 1; \ - uint8_t VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG : 1; \ - uint8_t VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR : 1; \ - uint8_t VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR : 1; \ - uint32_t OTHER: 16; \ - } name##_flags; \ - }; - - #define VkImageAspectFlagsVariable(name) \ - union \ - { \ - VkImageAspectFlags name; \ - \ - struct \ - { \ - uint8_t VK_IMAGE_ASPECT_COLOR_BIT : 1; \ - uint8_t VK_IMAGE_ASPECT_DEPTH_BIT : 1; \ - uint8_t VK_IMAGE_ASPECT_STENCIL_BIT : 1; \ - uint8_t VK_IMAGE_ASPECT_METADATA_BIT : 1; \ - uint32_t OTHER: 28; \ - } name##_flags; \ - }; - - #define VkImageUsageFlagsVariable(name) \ - union \ - { \ - VkImageUsageFlags name; \ - \ - struct \ - { \ - uint8_t VK_IMAGE_USAGE_TRANSFER_SRC_BIT : 1; \ - uint8_t VK_IMAGE_USAGE_TRANSFER_DST_BIT : 1; \ - uint8_t VK_IMAGE_USAGE_SAMPLED_BIT : 1; \ - uint8_t VK_IMAGE_USAGE_STORAGE_BIT : 1; \ - uint8_t VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT : 1; \ - uint8_t VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT : 1; \ - uint8_t VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT : 1; \ - uint8_t VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT : 1; \ - uint32_t OTHER: 24; \ - } name##_flags; \ - }; - - #define VkMemoryHeapFlagsVariable(name) \ - union \ - { \ - VkMemoryHeapFlags name; \ - \ - struct \ - { \ - uint8_t VK_MEMORY_HEAP_DEVICE_LOCAL_BIT : 1; \ - uint32_t OTHER: 31; \ - } name##_flags; \ - }; - - #define VkMemoryPropertyFlagsVariable(name) \ - union \ - { \ - VkMemoryPropertyFlags name; \ - \ - struct \ - { \ - uint8_t VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT : 1; \ - uint8_t VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT : 1; \ - uint8_t VK_MEMORY_PROPERTY_HOST_COHERENT_BIT : 1; \ - uint8_t VK_MEMORY_PROPERTY_HOST_CACHED_BIT : 1; \ - uint8_t VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT : 1; \ - uint32_t OTHER: 27; \ - } name##_flags; \ - }; - - #define VkPipelineStageFlagsVariable(name) \ - union \ - { \ - VkPipelineStageFlags name; \ - \ - struct \ - { \ - uint8_t VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_VERTEX_INPUT_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_VERTEX_SHADER_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_TRANSFER_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_HOST_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT : 1; \ - uint8_t VK_PIPELINE_STAGE_ALL_COMMANDS_BIT : 1; \ - uint32_t OTHER: 15; \ - } name##_flags; \ - }; - - #define VkQueryControlFlagsVariable(name) \ - union \ - { \ - VkQueryControlFlags name; \ - \ - struct \ - { \ - uint8_t VK_QUERY_CONTROL_PRECISE_BIT : 1; \ - uint32_t OTHER: 31; \ - } name##_flags; \ - }; - - #define VkQueryPipelineStatisticFlagsVariable(name) \ - union \ - { \ - VkQueryPipelineStatisticFlags name; \ - \ - struct \ - { \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT : 1; \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT : 1; \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT : 1; \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT : 1; \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT : 1; \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT : 1; \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT : 1; \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT : 1; \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT : 1; \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT : 1; \ - uint8_t VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT : 1; \ - uint32_t OTHER: 21; \ - } name##_flags; \ - }; - - #define VkQueryResultFlagsVariable(name) \ - union \ - { \ - VkQueryResultFlags name; \ - \ - struct \ - { \ - uint8_t VK_QUERY_RESULT_64_BIT : 1; \ - uint8_t VK_QUERY_RESULT_WAIT_BIT : 1; \ - uint8_t VK_QUERY_RESULT_WITH_AVAILABILITY_BIT : 1; \ - uint8_t VK_QUERY_RESULT_PARTIAL_BIT : 1; \ - uint32_t OTHER: 28; \ - } name##_flags; \ - }; - - #define VkQueueFlagsVariable(name) \ - union \ - { \ - VkQueueFlags name; \ - \ - struct \ - { \ - uint8_t VK_QUEUE_GRAPHICS_BIT : 1; \ - uint8_t VK_QUEUE_COMPUTE_BIT : 1; \ - uint8_t VK_QUEUE_TRANSFER_BIT : 1; \ - uint8_t VK_QUEUE_SPARSE_BINDING_BIT : 1; \ - uint32_t OTHER: 28; \ - } name##_flags; \ - }; - - #define VkSampleCountFlagsVariable(name) \ - union \ - { \ - VkSampleCountFlags name; \ - \ - struct \ - { \ - uint8_t VK_SAMPLE_COUNT_1_BIT : 1; \ - uint8_t VK_SAMPLE_COUNT_2_BIT : 1; \ - uint8_t VK_SAMPLE_COUNT_4_BIT : 1; \ - uint8_t VK_SAMPLE_COUNT_8_BIT : 1; \ - uint8_t VK_SAMPLE_COUNT_16_BIT : 1; \ - uint8_t VK_SAMPLE_COUNT_32_BIT : 1; \ - uint8_t VK_SAMPLE_COUNT_64_BIT : 1; \ - uint32_t OTHER: 25; \ - } name##_flags; \ - }; - - #define VkShaderStageFlagsVariable(name) \ - union \ - { \ - VkShaderStageFlags name; \ - \ - struct \ - { \ - uint8_t VK_SHADER_STAGE_VERTEX_BIT : 1; \ - uint8_t VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT : 1; \ - uint8_t VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT : 1; \ - uint8_t VK_SHADER_STAGE_GEOMETRY_BIT : 1; \ - uint8_t VK_SHADER_STAGE_FRAGMENT_BIT : 1; \ - uint8_t VK_SHADER_STAGE_COMPUTE_BIT : 1; \ - uint32_t OTHER: 26; \ - } name##_flags; \ - }; - - #define VkSparseImageFormatFlagsVariable(name) \ - union \ - { \ - VkSparseImageFormatFlags name; \ - \ - struct \ - { \ - uint8_t VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT : 1; \ - uint8_t VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT: 1; \ - uint8_t VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT: 1; \ - uint32_t OTHER: 29; \ - } name##_flags; \ - }; - - #define VkSparseMemoryBindFlagsVariable(name) \ - union \ - { \ - VkSparseMemoryBindFlags name; \ - \ - struct \ - { \ - uint8_t VK_SPARSE_MEMORY_BIND_METADATA_BIT : 1; \ - uint32_t OTHER: 31; \ - } name##_flags; \ - }; - - #define VkStencilFaceFlagsVariable(name) \ - union \ - { \ - VkStencilFaceFlags name; \ - \ - struct \ - { \ - uint8_t VK_STENCIL_FACE_FRONT_BIT : 1; \ - uint8_t VK_STENCIL_FACE_BACK_BIT : 1; \ - uint32_t OTHER: 30; \ - } name##_flags; \ - }; - - #define VkSurfaceTransformFlagsKHRVariable(name) \ - union \ - { \ - VkSurfaceTransformFlagsKHR name; \ - \ - struct \ - { \ - uint8_t VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR : 1; \ - uint8_t VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR : 1; \ - uint8_t VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR : 1; \ - uint8_t VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR : 1; \ - uint8_t VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR : 1; \ - uint8_t VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR : 1; \ - uint8_t VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR : 1; \ - uint8_t VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR : 1; \ - uint8_t VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR : 1; \ - uint32_t OTHER: 23; \ - } name##_flags; \ - }; -#else - #error "Big-endian arch's are not supported" -#endif - -/* Helper macros */ -#define ANVIL_DISABLE_ASSIGNMENT_OPERATOR(x) private: x& operator=(const x&); -#define ANVIL_DISABLE_COPY_CONSTRUCTOR(x) private: x(const x&); -#define ANVIL_REDUNDANT_ARGUMENT(x) x = x; -#define ANVIL_REDUNDANT_ARGUMENT_CONST(x) x; -#define ANVIL_REDUNDANT_VARIABLE(x) x = x; -#define ANVIL_REDUNDANT_VARIABLE_CONST(x) x; - -/* Defines various enums used by Vulkan API wrapper classes. */ +/* Forward declarations */ namespace Anvil { - /* Forward declarations */ class BaseDevice; + class BasePipelineCreateInfo; class Buffer; + class BufferCreateInfo; class BufferView; + class BufferViewCreateInfo; struct CallbackArgument; class CommandBufferBase; class CommandPool; + class ComputePipelineCreateInfo; class ComputePipelineManager; + class DebugMessenger; + class DebugMessengerCreateInfo; class DescriptorPool; + class DescriptorPoolCreateInfo; class DescriptorSet; + class DescriptorSetCreateInfo; class DescriptorSetGroup; class DescriptorSetLayout; + class DescriptorSetLayoutManager; + class DescriptorUpdateTemplate; + class DeviceCreateInfo; + class ExternalHandle; class Event; + class EventCreateInfo; class Fence; + class FenceCreateInfo; class Framebuffer; + class FramebufferCreateInfo; class GLSLShaderToSPIRVGenerator; + class GraphicsPipelineCreateInfo; class GraphicsPipelineManager; class Image; + class ImageCreateInfo; class ImageView; + class ImageViewCreateInfo; class Instance; + class InstanceCreateInfo; class MemoryAllocator; class MemoryBlock; + class MemoryBlockCreateInfo; struct MemoryHeap; struct MemoryProperties; struct MemoryType; + class MGPUDevice; class PhysicalDevice; class PipelineCache; class PipelineLayout; @@ -554,1912 +161,133 @@ namespace Anvil class QueryPool; class Queue; class RenderingSurface; + class RenderingSurfaceCreateInfo; class RenderPass; + class RenderPassCreateInfo; class Sampler; + class SamplerCreateInfo; + class SamplerYCbCrConversion; + class SamplerYCbCrConversionCreateInfo; class SecondaryCommandBuffer; class Semaphore; + class SemaphoreCreateInfo; class SGPUDevice; class ShaderModule; class ShaderModuleCache; class Swapchain; + class SwapchainCreateInfo; class Window; - /* Describes recognized subpass attachment types */ - enum AttachmentType - { - ATTACHMENT_TYPE_FIRST, - - ATTACHMENT_TYPE_COLOR = ATTACHMENT_TYPE_FIRST, - ATTACHMENT_TYPE_DEPTH_STENCIL, - ATTACHMENT_TYPE_INPUT, - ATTACHMENT_TYPE_PRESERVE, - ATTACHMENT_TYPE_RESOLVE, - - ATTACHMENT_TYPE_COUNT, - ATTACHMENT_TYPE_UNKNOWN = ATTACHMENT_TYPE_COUNT - }; - - /** Describes a buffer memory barrier. */ - typedef struct BufferBarrier - { - VkAccessFlagsVariable(dst_access_mask); - VkAccessFlagsVariable(src_access_mask); - - VkBuffer buffer; - VkBufferMemoryBarrier buffer_barrier_vk; - std::shared_ptr buffer_ptr; - uint32_t dst_queue_family_index; - VkDeviceSize offset; - VkDeviceSize size; - uint32_t src_queue_family_index; - - /** Constructor. - * - * Note that @param in_buffer_ptr is retained by this function. - * - * @param in_source_access_mask Source access mask to use for the barrier. - * @param in_destination_access_mask Destination access mask to use for the barrier. - * @param in_src_queue_family_index Source queue family index to use for the barrier. - * @param in_dst_queue_family_index Destination queue family index to use for the barrier. - * @param in_buffer_ptr Pointer to a Buffer instance the instantiated barrier - * refers to. Must not be nullptr. - * @param in_offset Start offset of the region described by the barrier. - * @param in_size Size of the region described by the barrier. - **/ - explicit BufferBarrier(VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - uint32_t in_src_queue_family_index, - uint32_t in_dst_queue_family_index, - std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - VkDeviceSize in_size); - - /** Destructor. - * - * Releases the encapsulated Buffer instance. - **/ - virtual ~BufferBarrier(); - - /** Copy constructor. - * - * Retains the Buffer instance stored in the input barrier. - * - * @param in Barrier instance to copy data from. - **/ - BufferBarrier(const BufferBarrier& in); - - /** Returns a Vulkan buffer memory barrier descriptor, whose configuration corresponds to - * to the configuration of this descriptor. - **/ - virtual VkBufferMemoryBarrier get_barrier_vk() const - { - return buffer_barrier_vk; - } - - /** Returns a pointer to the Vulkan descriptor, whose configuration corresponds to - * the configuration of this descriptor. - * - * The returned pointer remains valid for the duration of the Barrier descriptor's - * life-time. - **/ - const VkBufferMemoryBarrier* get_barrier_vk_ptr() const - { - return &buffer_barrier_vk; - } - - private: - BufferBarrier& operator=(const BufferBarrier&); - } BufferBarrier; - - /** Describes component layout of a format */ - typedef enum - { - /* NOTE: If the ordering used below needs to be changed, make sure to also update formats.cpp::layout_to_n_components */ - COMPONENT_LAYOUT_ABGR, - COMPONENT_LAYOUT_ARGB, - COMPONENT_LAYOUT_BGR, - COMPONENT_LAYOUT_BGRA, - COMPONENT_LAYOUT_D, - COMPONENT_LAYOUT_DS, - COMPONENT_LAYOUT_EBGR, - COMPONENT_LAYOUT_R, - COMPONENT_LAYOUT_RG, - COMPONENT_LAYOUT_RGB, - COMPONENT_LAYOUT_RGBA, - COMPONENT_LAYOUT_S, - COMPONENT_LAYOUT_XD, - - COMPONENT_LAYOUT_UNKNOWN - } ComponentLayout; - - typedef enum - { - EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE, - EXTENSION_AVAILABILITY_IGNORE, - EXTENSION_AVAILABILITY_REQUIRE, - } ExtensionAvailability; - - typedef std::pair DeviceExtensionConfigurationItem; - - /** A struct which tells which extensions must (or should, if supported by the physical device) be enabled - * at device creation time. - */ - typedef struct DeviceExtensionConfiguration - { - ExtensionAvailability amd_draw_indirect_count; - ExtensionAvailability amd_gcn_shader; - ExtensionAvailability amd_gpu_shader_half_float; - ExtensionAvailability amd_gpu_shader_int16; - ExtensionAvailability amd_negative_viewport_height; - ExtensionAvailability amd_rasterization_order; - ExtensionAvailability amd_shader_ballot; - ExtensionAvailability amd_shader_explicit_vertex_parameter; - ExtensionAvailability amd_shader_trinary_minmax; - ExtensionAvailability amd_texture_gather_bias_lod; - ExtensionAvailability ext_debug_marker; - ExtensionAvailability ext_shader_subgroup_ballot; - ExtensionAvailability ext_shader_subgroup_vote; - ExtensionAvailability khr_16bit_storage; - ExtensionAvailability khr_maintenance1; - ExtensionAvailability khr_storage_buffer_storage_class; - ExtensionAvailability khr_surface; - ExtensionAvailability khr_swapchain; - - std::vector other_extensions; - - - DeviceExtensionConfiguration(); - - bool is_supported_by_physical_device(std::weak_ptr in_physical_device_ptr, - std::vector* out_opt_unsupported_extensions_ptr = nullptr) const; - bool operator== (const DeviceExtensionConfiguration& in) const; - } DeviceExtensionConfiguration; - - /** Tells the type of an Anvil::BaseDevice instance */ - typedef enum - { - /* BaseDevice is implemented by SGPUDevice class */ - DEVICE_TYPE_SINGLE_GPU, - - } DeviceType; - - /** Holds properties of a single Vulkan Extension */ - typedef struct Extension - { - std::string name; - uint32_t version; - - /** Constructor. Initializes the instance using data provided by the driver. - * - * @param in_extension_props Vulkan structure to use for initialization. - **/ - explicit Extension(const VkExtensionProperties& in_extension_props) - { - name = in_extension_props.extensionName; - version = in_extension_props.specVersion; - } - - /** Returns true if @param in_extension_name matches the extension described by the instance. */ - bool operator==(const std::string& in_extension_name) const - { - return name == in_extension_name; - } - } Extension; - - typedef std::vector Extensions; - - typedef struct ExtensionAMDDrawIndirectCountEntrypoints - { - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD; - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD; - - ExtensionAMDDrawIndirectCountEntrypoints() - { - vkCmdDrawIndexedIndirectCountAMD = nullptr; - vkCmdDrawIndirectCountAMD = nullptr; - } - } ExtensionAMDDrawIndirectCountEntrypoints; - - typedef struct ExtensionEXTDebugMarkerEntrypoints - { - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT; - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT; - - ExtensionEXTDebugMarkerEntrypoints() - { - vkCmdDebugMarkerBeginEXT = nullptr; - vkCmdDebugMarkerEndEXT = nullptr; - vkCmdDebugMarkerInsertEXT = nullptr; - vkDebugMarkerSetObjectNameEXT = nullptr; - vkDebugMarkerSetObjectTagEXT = nullptr; - } - } ExtensionEXTDebugMarkerEntrypoints; - - typedef struct ExtensionEXTDebugReportEntrypoints - { - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT; - - ExtensionEXTDebugReportEntrypoints() - { - vkCreateDebugReportCallbackEXT = nullptr; - vkDestroyDebugReportCallbackEXT = nullptr; - } - } ExtensionEXTDebugReportEntrypoints; - - typedef struct ExtensionKHRGetPhysicalDeviceProperties2 - { - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR; - - ExtensionKHRGetPhysicalDeviceProperties2() - { - vkGetPhysicalDeviceFeatures2KHR = nullptr; - vkGetPhysicalDeviceFormatProperties2KHR = nullptr; - vkGetPhysicalDeviceImageFormatProperties2KHR = nullptr; - vkGetPhysicalDeviceMemoryProperties2KHR = nullptr; - vkGetPhysicalDeviceProperties2KHR = nullptr; - vkGetPhysicalDeviceQueueFamilyProperties2KHR = nullptr; - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = nullptr; - } - } ExtensionKHRGetPhysicalDeviceProperties2; - - typedef struct ExtensionKHRMaintenance1Entrypoints - { - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR; - - ExtensionKHRMaintenance1Entrypoints() - { - vkTrimCommandPoolKHR = nullptr; - } - } ExtensionKHRMaintenance1Entrypoints; - - typedef struct ExtensionKHRSurfaceEntrypoints - { - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; - - ExtensionKHRSurfaceEntrypoints() - { - vkDestroySurfaceKHR = nullptr; - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr; - vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr; - vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr; - vkGetPhysicalDeviceSurfaceSupportKHR = nullptr; - } - } ExtensionKHRSurfaceEntrypoints; - - typedef struct ExtensionKHRSwapchainEntrypoints - { - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; - PFN_vkQueuePresentKHR vkQueuePresentKHR; - - ExtensionKHRSwapchainEntrypoints() - { - vkAcquireNextImageKHR = nullptr; - vkCreateSwapchainKHR = nullptr; - vkDestroySwapchainKHR = nullptr; - vkGetSwapchainImagesKHR = nullptr; - vkQueuePresentKHR = nullptr; - } - } ExtensionKHRSwapchainEntrypoints; - - #ifdef _WIN32 - #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) - typedef struct ExtensionKHRWin32SurfaceEntrypoints - { - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR; - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR; - - ExtensionKHRWin32SurfaceEntrypoints() - { - vkCreateWin32SurfaceKHR = nullptr; - vkGetPhysicalDeviceWin32PresentationSupportKHR = nullptr; - } - } ExtensionKHRWin32SurfaceEntrypoints; - #endif + typedef std::unique_ptr > BaseDeviceUniquePtr; + typedef std::unique_ptr BasePipelineCreateInfoUniquePtr; + typedef std::unique_ptr BufferCreateInfoUniquePtr; + typedef std::unique_ptr > BufferUniquePtr; + typedef std::unique_ptr BufferViewCreateInfoUniquePtr; + typedef std::unique_ptr > BufferViewUniquePtr; + typedef std::unique_ptr > CommandBufferBaseUniquePtr; + typedef std::unique_ptr > CommandPoolUniquePtr; + typedef std::unique_ptr ComputePipelineCreateInfoUniquePtr; + typedef std::unique_ptr DebugMessengerCreateInfoUniquePtr; + typedef std::unique_ptr > DebugMessengerUniquePtr; + typedef std::unique_ptr DescriptorPoolCreateInfoUniquePtr; + typedef std::unique_ptr > DescriptorPoolUniquePtr; + typedef std::unique_ptr DescriptorSetCreateInfoUniquePtr; + typedef std::unique_ptr > DescriptorSetGroupUniquePtr; + typedef std::unique_ptr > DescriptorSetLayoutUniquePtr; + typedef std::unique_ptr > DescriptorSetLayoutManagerUniquePtr; + typedef std::unique_ptr > DescriptorSetUniquePtr; + typedef std::unique_ptr > DescriptorUpdateTemplateUniquePtr; + typedef std::unique_ptr DeviceCreateInfoUniquePtr; + typedef std::unique_ptr > ExternalHandleUniquePtr; + typedef std::unique_ptr EventCreateInfoUniquePtr; + typedef std::unique_ptr > EventUniquePtr; + typedef std::unique_ptr FenceCreateInfoUniquePtr; + typedef std::unique_ptr > FenceUniquePtr; + typedef std::unique_ptr FramebufferCreateInfoUniquePtr; + typedef std::unique_ptr > FramebufferUniquePtr; + typedef std::unique_ptr > GLSLShaderToSPIRVGeneratorUniquePtr; + typedef std::unique_ptr GraphicsPipelineCreateInfoUniquePtr; + typedef std::unique_ptr GraphicsPipelineManagerUniquePtr; + typedef std::unique_ptr ImageCreateInfoUniquePtr; + typedef std::unique_ptr > ImageUniquePtr; + typedef std::unique_ptr ImageViewCreateInfoUniquePtr; + typedef std::unique_ptr > ImageViewUniquePtr; + typedef std::unique_ptr InstanceCreateInfoUniquePtr; + typedef std::unique_ptr > InstanceUniquePtr; + typedef std::unique_ptr > MemoryAllocatorUniquePtr; + typedef std::unique_ptr MemoryBlockCreateInfoUniquePtr; + typedef std::unique_ptr > MemoryBlockUniquePtr; + typedef std::unique_ptr > MGPUDeviceUniquePtr; + typedef std::unique_ptr > PipelineCacheUniquePtr; + typedef std::unique_ptr > PipelineLayoutManagerUniquePtr; + typedef std::unique_ptr > PipelineLayoutUniquePtr; + typedef std::unique_ptr > PrimaryCommandBufferUniquePtr; + typedef std::unique_ptr > QueryPoolUniquePtr; + typedef std::unique_ptr > RenderingSurfaceUniquePtr; + typedef std::unique_ptr RenderingSurfaceCreateInfoUniquePtr; + typedef std::unique_ptr RenderPassCreateInfoUniquePtr; + typedef std::unique_ptr > RenderPassUniquePtr; + typedef std::unique_ptr SamplerCreateInfoUniquePtr; + typedef std::unique_ptr > SamplerUniquePtr; + typedef std::unique_ptr SamplerYCbCrConversionCreateInfoUniquePtr; + typedef std::unique_ptr > SamplerYCbCrConversionUniquePtr; + typedef std::unique_ptr > SecondaryCommandBufferUniquePtr; + typedef std::unique_ptr SemaphoreCreateInfoUniquePtr; + typedef std::unique_ptr > SemaphoreUniquePtr; + typedef std::unique_ptr > SGPUDeviceUniquePtr; + typedef std::unique_ptr > ShaderModuleCacheUniquePtr; + typedef std::unique_ptr > ShaderModuleUniquePtr; + typedef std::unique_ptr SwapchainCreateInfoUniquePtr; + typedef std::unique_ptr > SwapchainUniquePtr; + typedef std::unique_ptr > WindowUniquePtr; +}; + +/* Defines various types used by Vulkan API wrapper classes. */ +namespace Anvil +{ + #if defined(_WIN32) + typedef HANDLE ExternalHandleType; #else - #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) - typedef struct ExtensionKHRXcbSurfaceEntrypoints - { - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR; - - ExtensionKHRXcbSurfaceEntrypoints() - { - vkCreateXcbSurfaceKHR = nullptr; - } - } ExtensionKHRXcbSurfaceEntrypoints; - #endif + typedef int ExternalHandleType; #endif - /** Holds driver-specific format capabilities */ - typedef struct FormatProperties - { - VkFormatFeatureFlagsVariable(buffer_capabilities); - VkFormatFeatureFlagsVariable(linear_tiling_capabilities); - VkFormatFeatureFlagsVariable(optimal_tiling_capabilities); - - /* Tells whether the format can be used with functions introduced in VK_AMD_texture_gather_bias_lod */ - bool supports_amd_texture_gather_bias_lod; - - /** Dummy constructor */ - FormatProperties() - { - memset(this, - 0, - sizeof(*this) ); - } - - /** Constructor. Initializes the instance using data provided by the driver. - * - * @param in_format_props Vulkan structure to use for initialization. - **/ - FormatProperties(const VkFormatProperties& in_format_props) - { - buffer_capabilities = in_format_props.bufferFeatures; - linear_tiling_capabilities = in_format_props.linearTilingFeatures; - optimal_tiling_capabilities = in_format_props.optimalTilingFeatures; - supports_amd_texture_gather_bias_lod = false; - } - } FormatProperties; - - extern bool operator==(const FormatProperties& in1, - const FormatProperties& in2); - - typedef enum - { - FORMAT_TYPE_SFLOAT, - FORMAT_TYPE_SFLOAT_UINT, - FORMAT_TYPE_SINT, - FORMAT_TYPE_SNORM, - FORMAT_TYPE_SRGB, - FORMAT_TYPE_SSCALED, - FORMAT_TYPE_UFLOAT, - FORMAT_TYPE_UINT, - FORMAT_TYPE_UNORM, - FORMAT_TYPE_UNORM_UINT, - FORMAT_TYPE_USCALED, - - FORMAT_TYPE_UNKNOWN, - } FormatType; - /** ID of an Anvil framebuffer's attachment */ typedef uint32_t FramebufferAttachmentID; - /** Describes an image memory barrier. */ - typedef struct ImageBarrier - { - VkAccessFlagsVariable(dst_access_mask); - VkAccessFlagsVariable(src_access_mask); - - bool by_region; - uint32_t dst_queue_family_index; - VkImage image; - VkImageMemoryBarrier image_barrier_vk; - std::shared_ptr image_ptr; - VkImageLayout new_layout; - VkImageLayout old_layout; - uint32_t src_queue_family_index; - VkImageSubresourceRange subresource_range; - - /** Constructor. - * - * Note that @param in_image_ptr is retained by this function. - * - * @param in_source_access_mask Source access mask to use for the barrier. - * @param in_destination_access_mask Destination access mask to use for the barrier. - * @param in_by_region_barrier true if this is a by-region barrier. - * @param in_old_layout Old layout of @param in_image_ptr to use for the barrier. - * @param in_new_layout New layout of @param in_image_ptr to use for the barrier. - * @param in_src_queue_family_index Source queue family index to use for the barrier. - * @param in_dst_queue_family_index Destination queue family index to use for the barrier. - * @param in_image_ptr Image instance the barrier refers to. May be nullptr, in which case - * "image" and "image_ptr" fields will be set to nullptr. - * The instance will be retained by this function. - * @param in_image_subresource_range Subresource range to use for the barrier. - * - **/ - ImageBarrier(VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region_barrier, - VkImageLayout in_old_layout, - VkImageLayout in_new_layout, - uint32_t in_src_queue_family_index, - uint32_t in_dst_queue_family_index, - std::shared_ptr in_image_ptr, - VkImageSubresourceRange in_image_subresource_range); - - /** Destructor. - * - * Releases the encapsulated Image instance. - **/ - virtual ~ImageBarrier(); - - /** Copy constructor. - * - * Retains the Image instance stored in the input barrier. - * - * @param in Barrier instance to copy data from. - **/ - ImageBarrier(const ImageBarrier& in); - - /** Returns a Vulkan memory barrier descriptor, whose configuration corresponds to - * to the configuration of this descriptor. - **/ - virtual VkImageMemoryBarrier get_barrier_vk() const - { - return image_barrier_vk; - } - - /** Returns a pointer to the Vulkan descriptor, whose configuration corresponds to - * the configuration of this descriptor. - * - * The returned pointer remains valid for the duration of the Barrier descriptor's - * life-time. - **/ - const VkImageMemoryBarrier* get_barrier_vk_ptr() const - { - return &image_barrier_vk; - } - - private: - ImageBarrier& operator=(const ImageBarrier&); - } ImageBarrier; - - enum ImageCreateFlagBits - { - IMAGE_CREATE_FLAG_MUTABLE_FORMAT_BIT = 1 << 0, - IMAGE_CREATE_FLAG_CUBE_COMPATIBLE_BIT = 1 << 1, - - /* NOTE: Requires VK_KHR_maintenance1 */ - IMAGE_CREATE_FLAG_2D_ARRAY_COMPATIBLE_BIT = 1 << 2, - }; - typedef uint32_t ImageCreateFlags; - - /** Holds properties of a single Vulkan Layer. */ - typedef struct Layer - { - std::string description; - std::vector extensions; - uint32_t implementation_version; - std::string name; - uint32_t spec_version; - - /** Dummy constructor. - * - * @param in_layer_name Name to use for the layer. - **/ - Layer(const std::string& in_layer_name) - { - implementation_version = 0; - name = in_layer_name; - spec_version = 0; - } - - /** Constructor. Initializes the instance using data provided by the driver. - * - * @param in_layer_props Vulkan structure to use for initialization. - **/ - Layer(const VkLayerProperties& in_layer_props) - { - description = in_layer_props.description; - implementation_version = in_layer_props.implementationVersion; - name = in_layer_props.layerName; - spec_version = in_layer_props.specVersion; - } - - /** Returns true if @param in_layer_name matches the layer name described by the instance. */ - bool operator==(const std::string& in_layer_name) const - { - return name == in_layer_name; - } - } Layer; - - typedef std::vector Layers; - - /** Describes a Vulkan memory barrier. */ - typedef struct MemoryBarrier - { - VkAccessFlagsVariable(destination_access_mask); - VkAccessFlagsVariable(source_access_mask); - - VkMemoryBarrier memory_barrier_vk; - - /** Constructor. - * - * @param in_source_access_mask Source access mask of the Vulkan memory barrier. - * @param in_destination_access_mask Destination access mask of the Vulkan memory barrier. - * - **/ - explicit MemoryBarrier(VkAccessFlags in_destination_access_mask, - VkAccessFlags in_source_access_mask) - { - destination_access_mask = static_cast(in_destination_access_mask); - source_access_mask = static_cast(in_source_access_mask); - - memory_barrier_vk.dstAccessMask = destination_access_mask; - memory_barrier_vk.pNext = nullptr; - memory_barrier_vk.srcAccessMask = source_access_mask; - memory_barrier_vk.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; - } - - /** Destructor. */ - virtual ~MemoryBarrier() - { - /* Stub */ - } - - /** Returns a Vulkan memory barrier descriptor, whose configuration corresponds to - * to the configuration of this descriptor. - **/ - virtual VkMemoryBarrier get_barrier_vk() const - { - return memory_barrier_vk; - } - - /** Returns a pointer to the Vulkan descriptor, whose configuration corresponds to - * the configuration of this descriptor. - * - * The returned pointer remains valid for the duration of the Barrier descriptor's - * life-time. - **/ - virtual const VkMemoryBarrier* get_barrier_vk_ptr() const - { - return &memory_barrier_vk; - } - } MemoryBarrier; - - /** Holds properties of a single Vulkan Memory Heap. */ - typedef struct MemoryHeap - { - VkMemoryHeapFlagsVariable(flags); - - VkDeviceSize size; - - /** Stub constructor */ - MemoryHeap() - { - flags = 0; - size = 0; - } - } MemoryHeap; - - extern bool operator==(const MemoryHeap& in1, - const MemoryHeap& in2); - - typedef std::vector MemoryHeaps; - - enum MemoryFeatureFlagBits - { - /* NOTE: If more memory feature flags are added here, make sure to also update Anvil::Utils::get_vk_property_flags_from_memory_feature_flags() - * and Anvil::Utils::get_memory_feature_flags_from_vk_property_flags() - */ - - MEMORY_FEATURE_FLAG_DEVICE_LOCAL = 1 << 0, - MEMORY_FEATURE_FLAG_HOST_CACHED = 1 << 1, - MEMORY_FEATURE_FLAG_HOST_COHERENT = 1 << 2, - MEMORY_FEATURE_FLAG_LAZILY_ALLOCATED = 1 << 3, - MEMORY_FEATURE_FLAG_MAPPABLE = 1 << 4, - }; - typedef uint32_t MemoryFeatureFlags; - - /** Holds properties of a single Vulkan Memory Type. */ - typedef struct MemoryType - { - MemoryFeatureFlags features; - MemoryHeap* heap_ptr; - - VkMemoryPropertyFlagsVariable(flags); - - /** Constructor. Initializes the instance using data provided by the driver. - * - * @param in_type Vulkan structure to use for initialization. - * @param in_memory_props_ptr Used to initialize the MemoryHeap pointer member. Must not be nullptr. - **/ - explicit MemoryType(const VkMemoryType& in_type, - struct MemoryProperties* in_memory_props_ptr); - } MemoryType; - - extern bool operator==(const MemoryType& in1, - const MemoryType& in2); - - typedef std::vector MemoryTypes; - - /** Holds information about available memory heaps & types for a specific physical device. */ - typedef struct MemoryProperties - { - MemoryHeap* heaps; - uint32_t n_heaps; - MemoryTypes types; - - MemoryProperties() - { - heaps = nullptr; - n_heaps = 0; - } - - /** Destructor */ - ~MemoryProperties() - { - if (heaps != nullptr) - { - delete [] heaps; - - heaps = nullptr; - } - } - - /** Constructor. Initializes the instance using data provided by the driver. - * - * @param in_mem_properties Vulkan structure to use for initialization. - **/ - void init(const VkPhysicalDeviceMemoryProperties& in_mem_properties); - - private: - MemoryProperties (const MemoryProperties&); - MemoryProperties& operator=(const MemoryProperties&); - } MemoryProperties; - - extern bool operator==(const MemoryProperties& in1, - const MemoryProperties& in2); - - /** Defines data for a single image mip-map. - * - * Use one of the static create_() functions to set up structure fields according to the target - * image type. - **/ - typedef struct MipmapRawData - { - /* Image aspect the mip-map data is specified for. */ - VkImageAspectFlagBits aspect; - - /* Start layer index */ - uint32_t n_layer; - - /* Number of layers to update */ - uint32_t n_layers; - - /* Number of 3D texture slices to update. For non-3D texture types, this field - * should be set to 1. */ - uint32_t n_slices; - - - /* Index of the mip-map to update. */ - uint32_t n_mipmap; - - - /* Pointer to a buffer holding raw data representation. The data structure is characterized by - * data_size, row_size and slice_size fields. - * - * It is assumed the data under the pointer is tightly packed, and stored in column->row->slice->layer - * order. - */ - std::shared_ptr linear_tightly_packed_data_uchar_ptr; - const unsigned char* linear_tightly_packed_data_uchar_raw_ptr; - std::shared_ptr > linear_tightly_packed_data_uchar_vec_ptr; - - - /* Total number of bytes available for reading under linear_tightly_packed_data_ptr */ - uint32_t data_size; - - /* Number of bytes each row takes */ - uint32_t row_size; - - - /** Creates a MipmapRawData instance which can be used to upload data to 1D Image instances: - * - * @param in_aspect Image aspect to modify. - * @param in_n_mipmap Index of the mipmap to be updated. - * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. - * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. - * @param in_row_size Number of bytes each texture row takes. - * - * NOTE: Mipmap contents is NOT cached at call time. This implies raw pointers are ASSUMED to - * be valid at baking time. - * - * @return As per description. - **/ - static MipmapRawData create_1D_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_row_size); - static MipmapRawData create_1D_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_vector_ptr, - uint32_t in_row_size); - static MipmapRawData create_1D_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_row_size); - - /** Creates a MipmapRawData instance which can be used to upload data to 1D Array Image instances: - * - * @param in_aspect Image aspect to modify. - * @param in_n_layer Index of a texture layer the mip-map data should be uploaded to. - * @param in_n_layers Number of texture layers to be updated. - * @param in_n_mipmap Index of the mipmap to be updated. - * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. - * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. - * @param in_row_size Number of bytes each texture row takes. - * @param in_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr. - * - * @return As per description. - **/ - static MipmapRawData create_1D_array_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_row_size, - uint32_t in_data_size); - static MipmapRawData create_1D_array_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_row_size, - uint32_t in_data_size); - static MipmapRawData create_1D_array_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_row_size, - uint32_t in_data_size); - - /** Creates a MipmapRawData instance which can be used to upload data to 2D Image instances: - * - * @param in_aspect Image aspect to modify. - * @param in_n_mipmap Index of the mipmap to be updated. - * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. - * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. - * @param in_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr. - * @param in_row_size Number of bytes each texture row takes. - * - * @return As per description. - **/ - static MipmapRawData create_2D_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - static MipmapRawData create_2D_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - static MipmapRawData create_2D_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - - /** Creates a MipmapRawData instance which can be used to upload data to 2D Array Image instances: - * - * @param in_aspect Image aspect to modify. - * @param in_n_layer Index of a texture layer the mip-map data should be uploaded to. - * @param in_n_layers Number of texture layers to be updated. - * @param in_n_mipmap Index of the mipmap to be updated. - * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. - * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. - * @param in_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr. - * @param in_row_size Number of bytes each texture row takes. - * - * @return As per description. - **/ - static MipmapRawData create_2D_array_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - static MipmapRawData create_2D_array_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - static MipmapRawData create_2D_array_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - - /** Creates a MipmapRawData instnce which can be used to upload data to 3D Image instances: - * - * @param in_aspect Image aspect to modify. - * @param in_n_layer Index of a texture layer the mip-map data should be uploaded to. - * @param in_n_slices Number of texture slices to be updated. - * @param in_n_mipmap Index of the mipmap to be updated. - * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. - * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. - * @param in_slice_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr for a single slice. - * @param in_row_size Number of bytes each texture row takes. - * - * @return As per description. - **/ - static MipmapRawData create_3D_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layer_slices, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_slice_data_size, - uint32_t in_row_size); - static MipmapRawData create_3D_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layer_slices, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_slice_data_size, - uint32_t in_row_size); - static MipmapRawData create_3D_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layer_slices, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_slice_data_size, - uint32_t in_row_size); - - /** Creates a MipmapRawData instance which can be used to upload data to Cube Map Image instances: - * - * @param in_aspect Image aspect to modify. - * @param in_n_layer Index of a texture layer the mip-map data should be uploaded to. - * Valid values and corresponding cube map faces: 0: -X, 1: -Y, 2: -Z, 3: +X, 4: +Y, 5: +Z - * @param in_n_mipmap Index of the mipmap to be updated. - * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. - * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. - * @param in_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr. - * @param in_row_size Number of bytes each texture row takes. - * - * @return As per description. - **/ - static MipmapRawData create_cube_map_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - static MipmapRawData create_cube_map_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - static MipmapRawData create_cube_map_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - - /** Creates a MipmapRawData instance which can be used to upload data to Cube Map Array Image instances: - * - * @param in_aspect Image aspect to modify. - * @param in_n_layer Index of a texture layer the mip-map data should be uploaded to. - * Cube map faces, as selected for layer at index (n_layer % 6), are: - * 0: -X, 1: -Y, 2: -Z, 3: +X, 4: +Y, 5: +Z - * @param in_n_layers Number of texture layers to update. - * @param in_n_mipmap Index of the mipmap to be updated. - * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. - * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. - * @param in_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr. - * @param in_row_size Number of bytes each texture row takes. - * - * @return As per description. - **/ - static MipmapRawData create_cube_map_array_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - static MipmapRawData create_cube_map_array_from_uchar_ptr (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - static MipmapRawData create_cube_map_array_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size); - - private: - static MipmapRawData create_1D (VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - uint32_t in_row_size); - static MipmapRawData create_1D_array(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - uint32_t in_row_size, - uint32_t in_data_size); - static MipmapRawData create_2D (VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - uint32_t in_data_size, - uint32_t in_row_size); - static MipmapRawData create_2D_array(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - uint32_t in_data_size, - uint32_t in_row_size); - static MipmapRawData create_3D (VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_slices, - uint32_t in_n_mipmap, - uint32_t in_data_size, - uint32_t in_row_size); - } MipmapRawData; - - /* Dummy delete functor */ - template - struct NullDeleter - { - void operator()(Type* in_unused_ptr) - { - in_unused_ptr; - } - }; - - typedef enum - { - /* NOTE: If new entries are added or existing entry order is modified, make sure to - * update Anvil::ObjectTracker::get_object_type_name(). - */ - OBJECT_TYPE_FIRST, - - OBJECT_TYPE_BUFFER = OBJECT_TYPE_FIRST, - OBJECT_TYPE_BUFFER_VIEW, - OBJECT_TYPE_COMMAND_BUFFER, - OBJECT_TYPE_COMMAND_POOL, - OBJECT_TYPE_COMPUTE_PIPELINE_MANAGER, - OBJECT_TYPE_DESCRIPTOR_POOL, - OBJECT_TYPE_DESCRIPTOR_SET, - OBJECT_TYPE_DESCRIPTOR_SET_GROUP, - OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, - OBJECT_TYPE_DEVICE, - OBJECT_TYPE_EVENT, - OBJECT_TYPE_FENCE, - OBJECT_TYPE_FRAMEBUFFER, - OBJECT_TYPE_GRAPHICS_PIPELINE_MANAGER, - OBJECT_TYPE_IMAGE, - OBJECT_TYPE_IMAGE_VIEW, - OBJECT_TYPE_INSTANCE, - OBJECT_TYPE_MEMORY_BLOCK, - OBJECT_TYPE_PHYSICAL_DEVICE, - OBJECT_TYPE_PIPELINE_CACHE, - OBJECT_TYPE_PIPELINE_LAYOUT, - OBJECT_TYPE_PIPELINE_LAYOUT_MANAGER, - OBJECT_TYPE_QUERY_POOL, - OBJECT_TYPE_QUEUE, - OBJECT_TYPE_RENDER_PASS, - OBJECT_TYPE_RENDERING_SURFACE, - OBJECT_TYPE_SAMPLER, - OBJECT_TYPE_SEMAPHORE, - OBJECT_TYPE_SHADER_MODULE, - OBJECT_TYPE_SWAPCHAIN, - - OBJECT_TYPE_GLSL_SHADER_TO_SPIRV_GENERATOR, - OBJECT_TYPE_GRAPHICS_PIPELINE, - - /* Always last */ - OBJECT_TYPE_COUNT, - OBJECT_TYPE_UNKNOWN = OBJECT_TYPE_COUNT - } ObjectType; - - /** Defines, to what extent occlusion queries are going to be used. - * - * Only used for second-level command buffer recording policy declaration. - **/ - typedef enum - { - /** Occlusion queries are not going to be used */ - OCCLUSION_QUERY_SUPPORT_SCOPE_NOT_REQUIRED, - - /** Non-precise occlusion queries may be used */ - OCCLUSION_QUERY_SUPPORT_SCOPE_REQUIRED_NONPRECISE, - - /** Pprecise occlusion queries may be used */ - OCCLUSION_QUERY_SUPPORT_SCOPE_REQUIRED_PRECISE, - } OcclusionQuerySupportScope; - - /* A single push constant range descriptor */ - typedef struct PushConstantRange - { - uint32_t offset; - uint32_t size; - VkShaderStageFlagBits stages; - - /** Constructor - * - * @param in_offset Start offset for the range. - * @param in_size Size of the range. - * @param in_stages Valid pipeline stages for the range. - */ - PushConstantRange(uint32_t in_offset, - uint32_t in_size, - VkShaderStageFlags in_stages) - { - offset = in_offset; - size = in_size; - stages = static_cast(in_stages); - } - - /** Comparison operator. Used internally. */ - bool operator==(const PushConstantRange& in) const - { - return (offset == in.offset && - size == in.size && - stages == in.stages); - } - } PushConstantRange; - typedef uint32_t BindingElementIndex; typedef uint32_t BindingIndex; typedef uint32_t NumberOfBindingElements; typedef BindingElementIndex StartBindingElementIndex; typedef std::pair BindingElementArrayRange; - typedef std::vector PushConstantRanges; - - /** A bitmask defining one or more queue family usage.*/ - typedef enum - { - QUEUE_FAMILY_COMPUTE_BIT = 1 << 0, - QUEUE_FAMILY_DMA_BIT = 1 << 1, - QUEUE_FAMILY_GRAPHICS_BIT = 1 << 2, - } QueueFamily; - typedef int QueueFamilyBits; - - /** Holds information about a single Vulkan Queue Family. */ - typedef struct QueueFamilyInfo - { - VkQueueFlagsVariable(flags); - - VkExtent3D min_image_transfer_granularity; - uint32_t n_queues; - uint32_t n_timestamp_bits; - - /** Constructor. Initializes the instance using data provided by the driver. - * - * @param in_props Vulkan structure to use for initialization. - **/ - explicit QueueFamilyInfo(const VkQueueFamilyProperties& in_props) - { - flags = in_props.queueFlags; - min_image_transfer_granularity = in_props.minImageTransferGranularity; - n_queues = in_props.queueCount; - n_timestamp_bits = in_props.timestampValidBits; - } - } QueueFamilyInfo; - - extern bool operator==(const QueueFamilyInfo& in1, - const QueueFamilyInfo& in2); - typedef std::vector QueueFamilyInfoItems; - - /** Enumerates all available queue family types */ - typedef enum - { - QUEUE_FAMILY_TYPE_COMPUTE, - QUEUE_FAMILY_TYPE_TRANSFER, - QUEUE_FAMILY_TYPE_UNIVERSAL, /* compute + graphics */ - - /* Always last */ - QUEUE_FAMILY_TYPE_COUNT, - QUEUE_FAMILY_TYPE_FIRST = QUEUE_FAMILY_TYPE_COMPUTE, - QUEUE_FAMILY_TYPE_UNDEFINED = QUEUE_FAMILY_TYPE_COUNT - } QueueFamilyType; + /** "About to be deleted" call-back function prototype. */ + typedef std::function OnMemoryBlockReleaseCallbackFunction; /** Base pipeline ID. Internal type, used to represent compute / graphics pipeline IDs */ typedef uint32_t PipelineID; - /** Pipeline layout ID */ - typedef uint32_t PipelineLayoutID; - - /** Compute Pipeline ID */ - typedef PipelineID ComputePipelineID; - - /** Graphics Pipeline ID */ - typedef PipelineID GraphicsPipelineID; - - /* Used internally by Buffer and Image to track page occupancy status */ - typedef union - { - uint32_t raw; - - struct - { - uint8_t page_bit_0 : 1; - uint8_t page_bit_1 : 1; - uint8_t page_bit_2 : 1; - uint8_t page_bit_3 : 1; - uint8_t page_bit_4 : 1; - uint8_t page_bit_5 : 1; - uint8_t page_bit_6 : 1; - uint8_t page_bit_7 : 1; - uint8_t page_bit_8 : 1; - uint8_t page_bit_9 : 1; - uint8_t page_bit_10 : 1; - uint8_t page_bit_11 : 1; - uint8_t page_bit_12 : 1; - uint8_t page_bit_13 : 1; - uint8_t page_bit_14 : 1; - uint8_t page_bit_15 : 1; - uint8_t page_bit_16 : 1; - uint8_t page_bit_17 : 1; - uint8_t page_bit_18 : 1; - uint8_t page_bit_19 : 1; - uint8_t page_bit_20 : 1; - uint8_t page_bit_21 : 1; - uint8_t page_bit_22 : 1; - uint8_t page_bit_23 : 1; - uint8_t page_bit_24 : 1; - uint8_t page_bit_25 : 1; - uint8_t page_bit_26 : 1; - uint8_t page_bit_27 : 1; - uint8_t page_bit_28 : 1; - uint8_t page_bit_29 : 1; - uint8_t page_bit_30 : 1; - uint8_t page_bit_31 : 1; - } page_bits; - } PageOccupancyStatus; - /* Index of a query within parent query pool instance */ typedef uint32_t QueryIndex; /* Unique ID of a render-pass attachment within scope of a RenderPass instance. */ typedef uint32_t RenderPassAttachmentID; - typedef enum - { - RENDERING_SURFACE_TYPE_GENERAL, - } RenderingSurfaceType; - - /* A pair of 32-bit FP values which describes a sample location */ - typedef std::pair SampleLocation; - - /* Specifies one of the compute / rendering pipeline stages. */ - typedef enum - { - SHADER_STAGE_FIRST, - - SHADER_STAGE_COMPUTE = SHADER_STAGE_FIRST, - SHADER_STAGE_FRAGMENT, - SHADER_STAGE_GEOMETRY, - SHADER_STAGE_TESSELLATION_CONTROL, - SHADER_STAGE_TESSELLATION_EVALUATION, - SHADER_STAGE_VERTEX, - - SHADER_STAGE_COUNT, - SHADER_STAGE_UNKNOWN = SHADER_STAGE_COUNT - } ShaderStage; - - /** Holds all information related to a specific shader module stage entry-point. */ - typedef struct ShaderModuleStageEntryPoint - { - std::string name; - std::shared_ptr shader_module_ptr; - Anvil::ShaderStage stage; - - /** Dummy constructor */ - ShaderModuleStageEntryPoint(); - - /** Copy constructor. */ - ShaderModuleStageEntryPoint(const ShaderModuleStageEntryPoint& in); - - /** Constructor. - * - * @param in_name Entry-point name. Must not be nullptr. - * @param in_shader_module_ptr ShaderModule instance to use. - * @param in_stage Shader stage the entry-point implements. - */ - ShaderModuleStageEntryPoint(const std::string& in_name, - std::shared_ptr in_shader_module_ptr, - ShaderStage in_stage); - - /** Destructor. */ - ~ShaderModuleStageEntryPoint(); - - ShaderModuleStageEntryPoint& operator=(const ShaderModuleStageEntryPoint&); - } ShaderModuleStageEntryPoint; - - /* Describes sparse properties for an image format */ - typedef struct SparseImageAspectProperties - { - VkImageAspectFlagsVariable (aspect_mask); - VkSparseImageFormatFlagsVariable(flags); - - VkExtent3D granularity; - uint32_t mip_tail_first_lod; - VkDeviceSize mip_tail_offset; - VkDeviceSize mip_tail_size; - VkDeviceSize mip_tail_stride; - - SparseImageAspectProperties() - { - memset(this, - 0, - sizeof(*this) ); - } - - SparseImageAspectProperties(const VkSparseImageMemoryRequirements& in_req) - { - aspect_mask = in_req.formatProperties.aspectMask; - flags = in_req.formatProperties.flags; - granularity = in_req.formatProperties.imageGranularity; - mip_tail_first_lod = in_req.imageMipTailFirstLod; - mip_tail_offset = in_req.imageMipTailOffset; - mip_tail_size = in_req.imageMipTailSize; - mip_tail_stride = in_req.imageMipTailStride; - } - } SparseImageAspectProperties; - /* Unique ID of a sparse memory bind update */ typedef uint32_t SparseMemoryBindInfoID; - typedef enum - { - /* Support sparse binding only */ - SPARSE_RESIDENCY_SCOPE_NONE, - - /* Support sparse residency, do not support sparse aliased residency */ - SPARSE_RESIDENCY_SCOPE_NONALIASED, - - /* Support sparse aliased residency */ - SPARSE_RESIDENCY_SCOPE_ALIASED, - - SPARSE_RESIDENCY_SCOPE_UNDEFINED - } SparseResidencyScope; - - /* Holds 16-bit storage features */ - typedef struct StorageFeatures16Bit - { - bool is_input_output_storage_supported; - bool is_push_constant_16_bit_storage_supported; - bool is_storage_buffer_16_bit_access_supported; - bool is_uniform_and_storage_buffer_16_bit_access_supported; - - StorageFeatures16Bit() - { - is_input_output_storage_supported = false; - is_push_constant_16_bit_storage_supported = false; - is_storage_buffer_16_bit_access_supported = false; - is_uniform_and_storage_buffer_16_bit_access_supported = false; - } - - StorageFeatures16Bit(bool in_is_input_output_storage_supported, - bool in_is_push_constant_16_bit_storage_supported, - bool in_is_storage_buffer_16_bit_access_supported, - bool in_is_uniform_and_storage_buffer_16_bit_access_supported) - { - is_input_output_storage_supported = in_is_input_output_storage_supported; - is_push_constant_16_bit_storage_supported = in_is_push_constant_16_bit_storage_supported; - is_storage_buffer_16_bit_access_supported = in_is_storage_buffer_16_bit_access_supported; - is_uniform_and_storage_buffer_16_bit_access_supported = in_is_uniform_and_storage_buffer_16_bit_access_supported; - } - } StorageFeatures16Bit; - /* Unique ID of a render-pass' sub-pass attachment within scope of a RenderPass instance. */ typedef uint32_t SubPassAttachmentID; /* Unique ID of a sub-pass within scope of a RenderPass instance. */ typedef uint32_t SubPassID; +}; - /** Defines supported timestamp capture modes. */ - typedef enum - { - /* No timestamps should be captured */ - TIMESTAMP_CAPTURE_MODE_DISABLED, - - /* Two timestamps should be captured: - * - * 1. top-of-pipe timestamp, preceding actual commands. - * 2. tof-of-pipe timestamp, after all commands are recorded. - */ - TIMESTAMP_CAPTURE_MODE_ENABLED_COMMAND_SUBMISSION_TIME, - - /* Two timestamps should be captured: - * - * 1. top-of-pipe timestamp, preceding actual commands. - * 2. bottom-of-pipe timestamp, after all commands are recorded. - */ - TIMESTAMP_CAPTURE_MODE_ENABLED_COMMAND_EXECUTION_TIME - } TimestampCaptureMode; - - namespace Utils - { - /** Converts a Anvil::QueueFamilyBits bitfield value to an array of queue family indices. - * - * @param in_queue_families Input value to convert from. - * @param out_opt_queue_family_indices_ptr If not NULL, deref will be updated with *@param out_opt_n_queue_family_indices_ptr - * values, corresponding to queue family indices, as specified under @param in_queue_families. - * @param out_opt_n_queue_family_indices_ptr If not NULL, deref will be set to the number of items that would be or were written - * under @param out_opt_queue_family_indices_ptr. - * - **/ - void convert_queue_family_bits_to_family_indices(std::weak_ptr in_device_ptr, - Anvil::QueueFamilyBits in_queue_families, - uint32_t* out_opt_queue_family_indices_ptr, - uint32_t* out_opt_n_queue_family_indices_ptr); - - /** Returns an access mask which has all the access bits, relevant to the user-specified image layout, - * enabled. - * - * The access mask can be further restricted to the specified queue family type. - */ - VkAccessFlags get_access_mask_from_image_layout(VkImageLayout in_layout, - Anvil::QueueFamilyType in_queue_family_type = Anvil::QUEUE_FAMILY_TYPE_UNDEFINED); - - /** Converts a pair of VkMemoryPropertyFlags and VkMemoryHeapFlags bitfields to a corresponding Anvil::MemoryFeatureFlags - * enum. - * - * @param in_opt_mem_type_flags Vulkan memory property flags. May be 0. - * @param in_opt_mem_heap_flags Vulkan memory heap flags. May be 0. - * - * @return Result bitfield. - */ - Anvil::MemoryFeatureFlags get_memory_feature_flags_from_vk_property_flags(VkMemoryPropertyFlags in_opt_mem_type_flags, - VkMemoryHeapFlags in_opt_mem_heap_flags); - - /** Converts the specified VkAttachmentLoadOp value to a raw string - * - * @param in_load_op Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkAttachmentLoadOp in_load_op); - - /** Converts the specified VkAttachmentStoreOp value to a raw string - * - * @param in_store_op Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkAttachmentStoreOp in_store_op); - - /** Converts the specified VkBlendFactor value to a raw string - * - * @param in_blend_factor Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkBlendFactor in_blend_factor); - - /** Converts the specified VkBlendOp value to a raw string - * - * @param in_blend_op Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkBlendOp in_blend_op); - - /** Converts the specified VkCompareOp value to a raw string - * - * @param in_compare_op Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkCompareOp in_compare_op); - - /** Converts the specified VkCullModeFlagBits value to a raw string - * - * @param in_cull_mode Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkCullModeFlagBits in_cull_mode); - - /** Converts the specified VkDescriptorType value to a raw string - * - * @param in_descriptor_type Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkDescriptorType in_descriptor_type); - - /** Converts the specified VkFrontFace value to a raw string - * - * @param in_front_face Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkFrontFace in_front_face); - - /** Converts the specified VkImageAspectFlagBits value to a raw string - * - * @param in_image_aspect_flag Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkImageAspectFlagBits in_image_aspect_flag); - - /** Converts the specified VkImageLayout value to a raw string - * - * @param in_image_layout Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkImageLayout in_image_layout); - - /** Converts the specified VkImageTiling value to a raw string - * - * @param in_image_tiling Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkImageTiling in_image_tiling); - - /** Converts the specified VkImageType value to a raw string - * - * @param in_image_type Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkImageType in_image_type); - - /** Converts the specified VkImageViewType value to a raw string - * - * @param in_image_view_type Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkImageViewType in_image_view_type); - - /** Converts the specified VkLogicOp value to a raw string - * - * @param in_logic_op Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkLogicOp in_logic_op); - - /** Converts the specified VkPolygonMode value to a raw string - * - * @param in_polygon_mode Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkPolygonMode in_polygon_mode); - - /** Converts the specified VkPrimitiveTopology value to a raw string - * - * @param in_topology Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkPrimitiveTopology in_topology); - - /** Converts the specified VkSampleCountFlagBits value to a raw string - * - * @param in_sample_count Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkSampleCountFlagBits in_sample_count); - - /** Converts the specified Anvil::ShaderStage value to a raw string - * - * @param in_shader_stage Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(Anvil::ShaderStage in_shader_stage); - - /** Converts the specified VkShaderStageFlagBits value to a raw string - * - * @param in_shader_stage Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkShaderStageFlagBits in_shader_stage); - - /** Converts the specified VkSharingMode value to a raw string - * - * @param in_sharing_mode Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkSharingMode in_sharing_mode); - - /** Converts the specified VkStencilOp value to a raw string - * - * @param in_stencil_op Input value. - * - * @return Non-NULL value if successful, NULL otherwise. - */ - const char* get_raw_string(VkStencilOp in_stencil_op); - - /* Returns Vulkan equivalent of @param in_shader_stage */ - VkShaderStageFlagBits get_shader_stage_flag_bits_from_shader_stage(Anvil::ShaderStage in_shader_stage); - - /** Converts an Anvil::MemoryFeatureFlags value to a pair of corresponding Vulkan enum values. - * - * @param in_mem_feature_flags Anvil memory feature flags to use for conversion. - * @param out_mem_type_flags_ptr Deref will be set to a corresponding VkMemoryPropertyFlags value. Must not - * be nullptr. - * @param out_mem_heap_flags_ptr Deref will be set to a corresponding VkMemoryHeapFlags value. Must not - * be nullptr. - **/ - void get_vk_property_flags_from_memory_feature_flags(Anvil::MemoryFeatureFlags in_mem_feature_flags, - VkMemoryPropertyFlags* out_mem_type_flags_ptr, - VkMemoryHeapFlags* out_mem_heap_flags_ptr); - - /** Tells whether @param in_value is a power-of-two. */ - template - type is_pow2(const type in_value) - { - return ((in_value & (in_value - 1)) == 0); - } - - /** Rounds down @param in_value to a multiple of @param in_base */ - template - type round_down(const type in_value, const type in_base) - { - if ((in_value % in_base) == 0) - { - return in_value; - } - else - { - return in_value - (in_value % in_base); - } - } - - /** Rounds up @param in_value to a multiple of @param in_base */ - template - type round_up(const type in_value, const type in_base) - { - if ((in_value % in_base) == 0) - { - return in_value; - } - else - { - return in_value + (in_base - in_value % in_base); - } - } - - /** Container for sparse memory binding updates */ - class SparseMemoryBindingUpdateInfo - { - public: - /* Public functions */ - - /** Constructor. - * - * Marks the container as dirty by default. - */ - SparseMemoryBindingUpdateInfo(); - - /** Adds a new bind info to the container. The application can then append buffer memory updates - * to the bind info by calling append_buffer_memory_update(). - * - * @param in_n_signal_semaphores Number of semaphores to signal after the bind info is processed. Can be 0. - * @param in_opt_signal_semaphores_ptr An array of semaphores (sized @param in_n_signal_semaphores) to signal. - * Should be null if @param in_n_signal_semaphores is 0. - * @param in_n_wait_semaphores Number of semaphores to wait on before the bind info should start being - * processed. Can be 0. - * @param in_opt_wait_semaphores_ptr An array of semaphores (sized @param in_n_wait_semaphores) to wait on, - * before processing the bind info. Should be null if @param in_n_wait_semaphores - * is 0. - * - * @return ID of the new bind info. - **/ - SparseMemoryBindInfoID add_bind_info(uint32_t in_n_signal_semaphores, - std::shared_ptr* in_opt_signal_semaphores_ptr, - uint32_t in_n_wait_semaphores, - std::shared_ptr* in_opt_wait_semaphores_ptr); - - /** Appends a new buffer memory block update to the bind info. - * - * @param in_bind_info_id ID of the bind info to append the update to. - * @param in_buffer_ptr Buffer instance to update. Must not be NULL. - * @param in_buffer_memory_start_offset Start offset of the target memory region. - * @param in_opt_memory_block_ptr Memory block to use for the binding. May be NULL. - * @param in_opt_memory_block_start_offset Start offset of the source memory region. Ignored - * if @param in_memory_block_ptr is NULL. - * @param in_size Size of the memory block to update. - **/ - void append_buffer_memory_update(SparseMemoryBindInfoID in_bind_info_id, - std::shared_ptr in_buffer_ptr, - VkDeviceSize in_buffer_memory_start_offset, - std::shared_ptr in_opt_memory_block_ptr, - VkDeviceSize in_opt_memory_block_start_offset, - VkDeviceSize in_size); - - /** Appends a new non-opaque image memory update to the bind info. - * - * @param in_bind_info_id ID of the bind info to append the update to. - * @param in_image_ptr Image instance to update. Must not be NULL. - * @param in_subresource Subresource which should be used for the update operation. - * @param in_offset Image region offset for the update operation. - * @param in_extent Extent of the update operation. - * @param in_flags VkSparseMemoryBindFlags value to use for the update. - * @param in_opt_memory_block_ptr Memory block to use for the update operation. May be NULL. - * @param in_opt_memory_block_start_offset Start offset of the source memory region. ignored if - * @param in_opt_memory_block_ptr is NULL. - **/ - void append_image_memory_update(SparseMemoryBindInfoID in_bind_info_id, - std::shared_ptr in_image_ptr, - const VkImageSubresource& in_subresource, - const VkOffset3D& in_offset, - const VkExtent3D& in_extent, - VkSparseMemoryBindFlags in_flags, - std::shared_ptr in_opt_memory_block_ptr, - VkDeviceSize in_opt_memory_block_start_offset); - - /** Appends a new opaque image memory update to the bind info. - * - * @param in_bind_info_id ID of the bind info to append the update to. - * @param in_image_ptr Image instance to update. Must not be NULL. - * @param in_resource_offset Raw memory image start offset to use for the update. - * @param in_size Number of bytes to update. - * @param in_flags VkSparseMemoryBindFlags value to use for the update. - * @param in_opt_memory_block_ptr Memory block to use for the update operation. May be NULL. - * @param in_opt_memory_block_start_offset Start offset of the source memory region. Ignored if - * @param in_opt_memory_block_ptr is NULL. - **/ - void append_opaque_image_memory_update(SparseMemoryBindInfoID in_bind_info_id, - std::shared_ptr in_image_ptr, - VkDeviceSize in_resource_offset, - VkDeviceSize in_size, - VkSparseMemoryBindFlags in_flags, - std::shared_ptr in_opt_memory_block_ptr, - VkDeviceSize in_opt_memory_block_start_offset); - - /** Retrieves bind info properties. - * - * @param in_bind_info_id ID of the bind info to retrieve properties of. - * @param out_opt_n_buffer_memory_updates_ptr Deref will be set to the number of buffer memory updates, assigned - * to the specified bind info item. May be NULL. - * @param out_opt_n_image_memory_updates_ptr Deref will be set to the number of non-opaque image memory updates, assigned - * to the specified bind info item. May be NULL. - * @param out_opt_n_image_opaque_memory_updates_ptr Deref will be set to the number of image opaque memory updates, assigned - * to the specified bind info item. May be NULL. - * @param out_opt_fence_to_set_ptr Deref will be set to the fence, which is going to be set once all - * updates assigned to the bind info item are executed. May be NULL. - * @param out_opt_n_signal_semaphores_ptr Deref will be set to the number of semaphores, which should be - * signalled after bindings are applied. May be NULL. - * @param out_opt_signal_semaphores_ptr_ptr Deref will be set to an array of signal semaphores. May be NULL. - * @param out_opt_n_wait_semaphores_ptr Deref will be set to the number of semaphores, which should be - * waited on before bindings are applied. May be NULL. - * @param out_opt_wait_semaphores_ptr_ptr Deref will be set to an array of wait semaphores. May be NULL. - * - * @return true if successful, false otherwise. - **/ - bool get_bind_info_properties(SparseMemoryBindInfoID in_bind_info_id, - uint32_t* const out_opt_n_buffer_memory_updates_ptr, - uint32_t* const out_opt_n_image_memory_updates_ptr, - uint32_t* const out_opt_n_image_opaque_memory_updates_ptr, - uint32_t* const out_opt_n_signal_semaphores_ptr, - const std::shared_ptr** out_opt_signal_semaphores_ptr_ptr, - uint32_t* const out_opt_n_wait_semaphores_ptr, - const std::shared_ptr** out_opt_wait_semaphores_ptr_ptr) const; - - /** Retrieves Vulkan descriptors which should be used for the vkQueueBindSparse() call. - * - * This call will trigger baking, if the container is marked as dirty. - * - * @param out_bind_info_count_ptr Deref will be set to the value which should be passed in the - * argument of the call. Must not be NULL. - * @param out_bind_info_ptr Deref will be set to a pointer to an array, which should be - * passed in the argument of the call. Must not be NULL. - * @param out_fence_to_set_ptr Deref will be set to the fence, which should be set by the implementation - * after all bindings are in place. Note that the fence itself is optional - * and may be null. - **/ - void get_bind_sparse_call_args(uint32_t* out_bind_info_count_ptr, - const VkBindSparseInfo** out_bind_info_ptr, - std::shared_ptr* out_fence_to_set_ptr); - - /** Retrieves details of buffer memory binding updates, cached for user-specified bind info. - * - * @param in_bind_info_id ID of the bind info, which owns the update, whose properties are - * being queried. - * @param in_n_update Index of the buffer memory update to retrieve properties of. - * @param out_opt_buffer_ptr If not NULL, deref will be set to the buffer, whose sparse memory - * binding should be updated. Otherwise ignored. - * @param out_opt_buffer_memory_start_offset_ptr If not NULL, deref will be set to the start offset of the buffer, - * at which the memory block should be bound. Otherwise ignored. - * @param out_opt_memory_block_ptr If not NULL, deref will be set to the memory block, which should - * be used for the binding. Otherwise ignored. - * @param out_opt_memory_block_start_offset_ptr If not NULL, deref will be set to the start offset of the memory block, - * from which the memory region, which should be used for the binding, - * starts. Otherwise ignored. - * @param out_opt_size_ptr If not NULL, deref will be set to the size of the memory region, - * which should be used for the binding. Otherwise ignored. - * - * @return true if successful, false otherwise. - **/ - bool get_buffer_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, - uint32_t in_n_update, - std::shared_ptr* out_opt_buffer_ptr, - VkDeviceSize* out_opt_buffer_memory_start_offset_ptr, - std::shared_ptr* out_opt_memory_block_ptr, - VkDeviceSize* out_opt_memory_block_start_offset_ptr, - VkDeviceSize* out_opt_size_ptr) const; - - /** Retrieves the fence, if one was earlier assigned to the instance */ - std::shared_ptr get_fence() const - { - return m_fence_ptr; - } - - /** Retrieves properties of a non-opaque image memory update with a given ID. - * - * @param in_bind_info_id ID of the bind info, which owns the update, and whose properties are - * being queried. - * @param in_n_update Index of the image memory update to retrieve properties of. - * @param out_opt_image_ptr_ptr If not NULL, deref will be set to the image which should be updated. - * Otherwise ignored. - * @param out_opt_subresouce_ptr If not NULL, deref will be set to the subresource to be used for the - * update. Otherwise ignored. - * @param out_opt_offset_ptr If not NULL, deref will be set to image start offset, at which - * the memory block should be bound. Otherwise ignored. - * @param out_opt_extent_ptr If not NULL, deref will be set to the extent of the update. Otherwise - * ignored. - * @param out_opt_flags_ptr If not NULL, deref will be set to VkSparseMemoryBindFlags value which - * is going to be used for the update. Otherwise ignored. - * @param out_opt_memory_block_ptr_ptr If not NULL, deref will be set to pointer to the memory block, which - * is going to be used for the bind operation. Otherwise ignored. - * @param out_opt_memory_block_start_offset_ptr If not NULL, deref will be set to the start offset of the memory block, - * which should be used for the binding operation. Otherwise ignored. - * - * @return true if successful, false otherwise. - **/ - bool get_image_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, - uint32_t in_n_update, - std::shared_ptr* out_opt_image_ptr_ptr, - VkImageSubresource* out_opt_subresource_ptr, - VkOffset3D* out_opt_offset_ptr, - VkExtent3D* out_opt_extent_ptr, - VkSparseMemoryBindFlags* out_opt_flags_ptr, - std::shared_ptr* out_opt_memory_block_ptr_ptr, - VkDeviceSize* out_opt_memory_block_start_offset_ptr) const; - - /** Retrieves properties of an opaque image memory updated with a given ID. - * - * @param in_bind_info_id ID of the bind info, which owns the update, and whose properties are being - * queried. - * @param in_n_update Index of the opaque image memory update to retrieve properties of. - * @param out_opt_image_ptr_ptr If not NULL, deref will be set to the image which should be updated. Otherwise - * ignored. - * @param out_opt_resource_offset_ptr If not NULL, deref will be set to the raw image memory offset, which should - * be used for the update. Otherwise ignored. - * @param out_opt_size_ptr If not NULL, deref will be set to the size of the image memory which should - * be used for the update. Otherwise ignored. - * @param out_opt_flags_ptr If not NULL, deref will be set to the VkSParseMemoryBindFlags value which is - * going to be used for the update. Otherwise igfnored. - * @param out_opt_memory_block_ptr_ptr If not NULL, deref will be set to pointer to the memory block, which is going - * to be used for the bind operation. Otherwise ignored. - * @param out_opt_memory_block_start_offset_ptr If not NULL, deref will be set to the start offset of the memory block, which - * should be used for the binding operation. Otherwise ignored. - * - * @return true if successful, false otherwise. - */ - bool get_image_opaque_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, - uint32_t in_n_update, - std::shared_ptr* out_opt_image_ptr_ptr, - VkDeviceSize* out_opt_resource_offset_ptr, - VkDeviceSize* out_opt_size_ptr, - VkSparseMemoryBindFlags* out_opt_flags_ptr, - std::shared_ptr* out_opt_memory_block_ptr_ptr, - VkDeviceSize* out_opt_memory_block_start_offset_ptr) const; - - /** Tells how many bind info items have been assigned to the descriptor */ - uint32_t get_n_bind_info_items() const - { - return static_cast(m_bindings.size() ); - } - - /* Changes the fence (null by default), which should be set by the Vulkan implementation after it finishes - * updating the bindings. - **/ - void set_fence(std::shared_ptr in_fence_ptr) - { - m_fence_ptr = in_fence_ptr; - } - - private: - /* Private type definitions */ - typedef struct - { - VkDeviceSize start_offset; - std::shared_ptr memory_block_ptr; - VkDeviceSize memory_block_start_offset; - VkDeviceSize size; - - VkSparseMemoryBindFlagsVariable(flags); - } GeneralBindInfo; - - typedef struct - { - VkExtent3D extent; - VkOffset3D offset; - std::shared_ptr memory_block_ptr; - VkDeviceSize memory_block_start_offset; - VkImageSubresource subresource; - - VkSparseMemoryBindFlagsVariable(flags); - } ImageBindInfo; - - typedef std::map, std::pair, std::vector >> BufferBindUpdateMap; - typedef std::map, std::pair, std::vector >> ImageBindUpdateMap; - typedef std::map, std::pair, std::vector >> ImageOpaqueBindUpdateMap; - - typedef struct BindingInfo - { - BufferBindUpdateMap buffer_updates; - ImageOpaqueBindUpdateMap image_opaque_updates; - ImageBindUpdateMap image_updates; - - std::vector > signal_semaphores; - std::vector signal_semaphores_vk; - std::vector > wait_semaphores; - std::vector wait_semaphores_vk; - - BindingInfo() - { - /* Stub */ - } - } BindingInfo; - - std::vector m_bindings; - bool m_dirty; - std::shared_ptr m_fence_ptr; - - std::vector m_bindings_vk; - std::vector m_buffer_bindings_vk; - std::vector m_image_bindings_vk; - std::vector m_image_opaque_bindings_vk; - - /* Private functions */ - SparseMemoryBindingUpdateInfo (const SparseMemoryBindingUpdateInfo&); - SparseMemoryBindingUpdateInfo operator=(const SparseMemoryBindingUpdateInfo&); - - void bake(); - }; - }; - - /* Represents a Vulkan structure header */ - typedef struct - { - VkStructureType type; - const void* next_ptr; - } VkStructHeader; +#include "misc/types_enums.h" +#include "misc/types_macro.h" - /* Describes recognized subpass attachment types */ - enum Result - { - RESULT_SUCCESS, - RESULT_ERROR, - RESULT_NOT_SUPPORTED - }; -}; /* Anvil namespace */ +#include "misc/types_classes.h" +#include "misc/types_struct.h" +#include "misc/types_utils.h" #endif /* MISC_TYPES_H */ diff --git a/include/misc/types_classes.h b/include/misc/types_classes.h new file mode 100644 index 00000000..f1894b12 --- /dev/null +++ b/include/misc/types_classes.h @@ -0,0 +1,429 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef TYPES_CLASSES_H +#define TYPES_CLASSES_H + +#include "misc/types_struct.h" +#include + +namespace Anvil +{ + class IMemoryAllocatorBackendBase + { + public: + virtual ~IMemoryAllocatorBackendBase() + { + /* Stub */ + } + + virtual VkResult map (void* in_memory_object, + VkDeviceSize in_start_offset, + VkDeviceSize in_memory_block_start_offset, + VkDeviceSize in_size, + void** out_result_ptr) = 0; + virtual bool supports_baking() const = 0; + virtual void unmap (void* in_memory_object) = 0; + virtual std::optional get_underlying_allocation_size(void* in_memory_object) = 0; + }; + + /** Container for sparse memory binding updates */ + class SparseMemoryBindingUpdateInfo + { + public: + /* Public functions */ + + /** Constructor. + * + * Marks the container as dirty by default. + */ + SparseMemoryBindingUpdateInfo(); + + /** Adds a new bind info to the container. The application can then append buffer memory updates + * to the bind info by calling append_buffer_memory_update(). + * + * For mGPU devices, zeroth device index is used by default for both memory and resource device indices. + * You can adjust these default values by calling set_memory_device_index() and set_resource_device_index(). + * + * @param in_n_signal_semaphores Number of semaphores to signal after the bind info is processed. Can be 0. + * @param in_opt_signal_semaphores_ptrs_ptr Ptr to an array of semaphores (sized @param in_n_signal_semaphores) to signal. + * Should be null if @param in_n_signal_semaphores is 0. + * @param in_n_wait_semaphores Number of semaphores to wait on before the bind info should start being + * processed. Can be 0. + * @param in_opt_wait_semaphores_ptrs_ptr Ptr to an array of semaphores (sized @param in_n_wait_semaphores) to wait on, + * before processing the bind info. Should be null if @param in_n_wait_semaphores + * is 0. + * + * @return ID of the new bind info. + **/ + SparseMemoryBindInfoID add_bind_info(uint32_t in_n_signal_semaphores, + Anvil::Semaphore* const* in_opt_signal_semaphores_ptrs_ptr, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_opt_wait_semaphores_ptrs_ptr); + + /** Appends a new buffer memory block update to the bind info. + * + * @param in_bind_info_id ID of the bind info to append the update to. + * @param in_buffer_ptr Buffer instance to update. Must not be NULL. + * @param in_buffer_memory_start_offset Start offset of the target memory region. + * @param in_opt_memory_block_ptr Memory block to use for the binding. May be NULL. + * @param in_opt_memory_block_start_offset Start offset of the source memory region. Ignored + * if @param in_memory_block_ptr is NULL. + * @param in_opt_memory_block_owned_by_buffer TODO. + * @param in_size Size of the memory block to update. + **/ + void append_buffer_memory_update(SparseMemoryBindInfoID in_bind_info_id, + Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_buffer_memory_start_offset, + Anvil::MemoryBlock* in_opt_memory_block_ptr, + VkDeviceSize in_opt_memory_block_start_offset, + bool in_opt_memory_block_owned_by_buffer, + VkDeviceSize in_size); + + /** Appends a new non-opaque image memory update to the bind info. + * + * @param in_bind_info_id ID of the bind info to append the update to. + * @param in_image_ptr Image instance to update. Must not be NULL. + * @param in_subresource Subresource which should be used for the update operation. + * @param in_offset Image region offset for the update operation. + * @param in_extent Extent of the update operation. + * @param in_flags Flags to use for the update. + * @param in_opt_memory_block_ptr Memory block to use for the update operation. May be NULL. + * @param in_opt_memory_block_start_offset Start offset of the source memory region. ignored if + * @param in_opt_memory_block_ptr is NULL. + * @param in_opt_memory_block_owned_by_image TODO + **/ + void append_image_memory_update(SparseMemoryBindInfoID in_bind_info_id, + Anvil::Image* in_image_ptr, + const Anvil::ImageSubresource& in_subresource, + const VkOffset3D& in_offset, + const VkExtent3D& in_extent, + Anvil::SparseMemoryBindFlags in_flags, + Anvil::MemoryBlock* in_opt_memory_block_ptr, + VkDeviceSize in_opt_memory_block_start_offset, + bool in_opt_memory_block_owned_by_image); + + /** Appends a new opaque image memory update to the bind info. + * + * @param in_bind_info_id ID of the bind info to append the update to. + * @param in_image_ptr Image instance to update. Must not be NULL. + * @param in_resource_offset Raw memory image start offset to use for the update. + * @param in_size Number of bytes to update. + * @param in_flags Flags to use for the update. + * @param in_opt_memory_block_ptr Memory block to use for the update operation. May be NULL. + * @param in_opt_memory_block_start_offset Start offset of the source memory region. Ignored if + * @param in_opt_memory_block_ptr is NULL. + * @param in_opt_memory_block_owned_by_image TODO + * @param in_n_plane Index of the image plane to use for the binding. Must be 0 for joint YUV + * and non-YUV images. For disjoint YUV images, must be a value between 0 and 2 + * inclusive. + **/ + void append_opaque_image_memory_update(SparseMemoryBindInfoID in_bind_info_id, + Anvil::Image* in_image_ptr, + VkDeviceSize in_resource_offset, + VkDeviceSize in_size, + Anvil::SparseMemoryBindFlags in_flags, + Anvil::MemoryBlock* in_opt_memory_block_ptr, + VkDeviceSize in_opt_memory_block_start_offset, + bool in_opt_memory_block_owned_by_image, + uint32_t in_n_plane); + + /** Retrieves bind info properties. + * + * @param in_bind_info_id ID of the bind info to retrieve properties of. + * @param out_opt_n_buffer_memory_updates_ptr Deref will be set to the number of buffer memory updates, assigned + * to the specified bind info item. May be NULL. + * @param out_opt_n_image_memory_updates_ptr Deref will be set to the number of non-opaque image memory updates, assigned + * to the specified bind info item. May be NULL. + * @param out_opt_n_image_opaque_memory_updates_ptr Deref will be set to the number of image opaque memory updates, assigned + * to the specified bind info item. May be NULL. + * @param out_opt_fence_to_set_ptr Deref will be set to the fence, which is going to be set once all + * updates assigned to the bind info item are executed. May be NULL. + * @param out_opt_n_signal_semaphores_ptr Deref will be set to the number of semaphores, which should be + * signalled after bindings are applied. May be NULL. + * @param out_opt_signal_semaphores_ptr_ptr_ptr Deref will be set to a ptr to an array of signal semaphores. May be NULL. + * @param out_opt_n_wait_semaphores_ptr Deref will be set to the number of semaphores, which should be + * waited on before bindings are applied. May be NULL. + * @param out_opt_wait_semaphores_ptr_ptr_ptr Deref will be set to a ptr to an array of wait semaphores. May be NULL. + * + * @return true if successful, false otherwise. + **/ + bool get_bind_info_properties(SparseMemoryBindInfoID in_bind_info_id, + uint32_t* const out_opt_n_buffer_memory_updates_ptr, + uint32_t* const out_opt_n_image_memory_updates_ptr, + uint32_t* const out_opt_n_image_opaque_memory_updates_ptr, + uint32_t* const out_opt_n_signal_semaphores_ptr, + Anvil::Semaphore*** out_opt_signal_semaphores_ptr_ptr_ptr, + uint32_t* const out_opt_n_wait_semaphores_ptr, + Anvil::Semaphore*** out_opt_wait_semaphores_ptr_ptr_ptr); + + /** Retrieves Vulkan descriptors which should be used for the vkQueueBindSparse() call. + * + * This call will trigger baking, if the container is marked as dirty. + * + * @param out_bind_info_count_ptr Deref will be set to the value which should be passed in the + * argument of the call. Must not be NULL. + * @param out_bind_info_ptrs_ptr Deref will be set to a pointer to an array, which should be + * passed in the argument of the call. Must not be NULL. + * @param out_fence_to_set_ptrs_ptr Deref will be set to the fence, which should be set by the implementation + * after all bindings are in place. Note that the fence itself is optional + * and may be null. + **/ + void get_bind_sparse_call_args(uint32_t* out_bind_info_count_ptr, + const VkBindSparseInfo** out_bind_info_ptrs_ptr, + Anvil::Fence** out_fence_to_set_ptr_ptr); + + /** Retrieves details of buffer memory binding updates, cached for user-specified bind info. + * + * @param in_bind_info_id ID of the bind info, which owns the update, whose properties are + * being queried. + * @param in_n_update Index of the buffer memory update to retrieve properties of. + * @param out_opt_buffer_ptr If not NULL, deref will be set to the buffer, whose sparse memory + * binding should be updated. Otherwise ignored. + * @param out_opt_buffer_memory_start_offset_ptr If not NULL, deref will be set to the start offset of the buffer, + * at which the memory block should be bound. Otherwise ignored. + * @param out_opt_memory_block_ptr If not NULL, deref will be set to the memory block, which should + * be used for the binding. Otherwise ignored. + * @param out_opt_memory_block_start_offset_ptr If not NULL, deref will be set to the start offset of the memory block, + * from which the memory region, which should be used for the binding, + * starts. Otherwise ignored. + * @param out_opt_memory_block_owned_by_buffer_ptr TODO. + * @param out_opt_size_ptr If not NULL, deref will be set to the size of the memory region, + * which should be used for the binding. Otherwise ignored. + * + * @return true if successful, false otherwise. + **/ + bool get_buffer_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, + uint32_t in_n_update, + Anvil::Buffer** out_opt_buffer_ptr_ptr, + VkDeviceSize* out_opt_buffer_memory_start_offset_ptr, + Anvil::MemoryBlock** out_opt_memory_block_ptr_ptr, + VkDeviceSize* out_opt_memory_block_start_offset_ptr, + bool* out_opt_memory_block_owned_by_buffer_ptr, + VkDeviceSize* out_opt_size_ptr) const; + + /** Returns memory & resource device indices assigned to this batch */ + bool get_device_indices(SparseMemoryBindInfoID in_bind_info_id, + uint32_t* out_opt_resource_device_index_ptr, + uint32_t* out_opt_memory_device_index_ptr) const; + + /** Retrieves the fence, if one was earlier assigned to the instance */ + const Anvil::Fence* get_fence() const + { + return m_fence_ptr; + } + + /** Retrieves properties of a non-opaque image memory update with a given ID. + * + * @param in_bind_info_id ID of the bind info, which owns the update, and whose properties are + * being queried. + * @param in_n_update Index of the image memory update to retrieve properties of. + * @param out_opt_image_ptr_ptr If not NULL, deref will be set to the image which should be updated. + * Otherwise ignored. + * @param out_opt_subresouce_ptr If not NULL, deref will be set to the subresource to be used for the + * update. Otherwise ignored. + * @param out_opt_offset_ptr If not NULL, deref will be set to image start offset, at which + * the memory block should be bound. Otherwise ignored. + * @param out_opt_extent_ptr If not NULL, deref will be set to the extent of the update. Otherwise + * ignored. + * @param out_opt_flags_ptr If not NULL, deref will be set to Anvil::SparseMemoryBindFlags value which + * is going to be used for the update. Otherwise ignored. + * @param out_opt_memory_block_ptr_ptr If not NULL, deref will be set to pointer to the memory block, which + * is going to be used for the bind operation. Otherwise ignored. + * @param out_opt_memory_block_start_offset_ptr If not NULL, deref will be set to the start offset of the memory block, + * which should be used for the binding operation. Otherwise ignored. + * @param out_opt_memory_block_owned_by_image_ptr TODO + * + * @return true if successful, false otherwise. + **/ + bool get_image_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, + uint32_t in_n_update, + Anvil::Image** out_opt_image_ptr_ptr, + Anvil::ImageSubresource* out_opt_subresource_ptr, + VkOffset3D* out_opt_offset_ptr, + VkExtent3D* out_opt_extent_ptr, + Anvil::SparseMemoryBindFlags* out_opt_flags_ptr, + Anvil::MemoryBlock** out_opt_memory_block_ptr_ptr, + VkDeviceSize* out_opt_memory_block_start_offset_ptr, + bool* out_opt_memory_block_owned_by_image_ptr) const; + + /** Retrieves properties of an opaque image memory updated with a given ID. + * + * @param in_bind_info_id ID of the bind info, which owns the update, and whose properties are being + * queried. + * @param in_n_update Index of the opaque image memory update to retrieve properties of. + * @param out_opt_image_ptr_ptr If not NULL, deref will be set to the image which should be updated. Otherwise + * ignored. + * @param out_opt_resource_offset_ptr If not NULL, deref will be set to the raw image memory offset, which should + * be used for the update. Otherwise ignored. + * @param out_opt_size_ptr If not NULL, deref will be set to the size of the image memory which should + * be used for the update. Otherwise ignored. + * @param out_opt_flags_ptr If not NULL, deref will be set to the Anvil::SparseMemoryBindFlags value which is + * going to be used for the update. Otherwise igfnored. + * @param out_opt_memory_block_ptr_ptr If not NULL, deref will be set to pointer to the memory block, which is going + * to be used for the bind operation. Otherwise ignored. + * @param out_opt_memory_block_start_offset_ptr If not NULL, deref will be set to the start offset of the memory block, which + * should be used for the binding operation. Otherwise ignored. + * @param out_opt_memory_block_owned_by_image_ptr TODO + * @param out_opt_n_plane_ptr If not NULL, deref will be set to index of the image plane to use for the binding + * operation. Otherwise ignored. + * + * @return true if successful, false otherwise. + */ + bool get_image_opaque_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, + uint32_t in_n_update, + Anvil::Image** out_opt_image_ptr_ptr, + VkDeviceSize* out_opt_resource_offset_ptr, + VkDeviceSize* out_opt_size_ptr, + Anvil::SparseMemoryBindFlags* out_opt_flags_ptr, + Anvil::MemoryBlock** out_opt_memory_block_ptr_ptr, + VkDeviceSize* out_opt_memory_block_start_offset_ptr, + bool* out_opt_memory_block_owned_by_image_ptr, + uint32_t* out_opt_n_plane_ptr) const; + + /** Tells how many bind info items have been assigned to the descriptor */ + uint32_t get_n_bind_info_items() const + { + return static_cast(m_bindings.size() ); + } + + /* Tells whether this instance requires VK_KHR_device_group extension in order for the vkQueueBindSparse() + * invocation. */ + bool is_device_group_support_required() const; + + /* Changes the fence (null by default), which should be set by the Vulkan implementation after it finishes + * updating the bindings. + **/ + void set_fence(Anvil::Fence* in_fence_ptr) + { + m_fence_ptr = in_fence_ptr; + } + + /* Updates memory device index, associated with this batch. + * + * Do not modify unless VK_KHR_device_group is supported + * + * @param in_memory_device_index New memory device index to use. + * + **/ + void set_memory_device_index(SparseMemoryBindInfoID in_bind_info_id, + const uint32_t& in_memory_device_index); + + /* Updates resource device index, associated with this batch. + * + * Do not modify unless VK_KHR_device_group is supported + * + * @param in_resource_device_index New resource device index to use. + * + **/ + void set_resource_device_index(SparseMemoryBindInfoID in_bind_info_id, + const uint32_t& in_resource_device_index); + + private: + /* Private type definitions */ + typedef struct GeneralBindInfo + { + bool memory_block_owned_by_target; + Anvil::MemoryBlock* memory_block_ptr; + VkDeviceSize memory_block_start_offset; + uint32_t n_plane; + VkDeviceSize size; + VkDeviceSize start_offset; + + SparseMemoryBindFlags flags; + + GeneralBindInfo() + { + memory_block_owned_by_target = false; + memory_block_ptr = nullptr; + memory_block_start_offset = 0; + n_plane = 0; + size = 0; + start_offset = 0; + } + } GeneralBindInfo; + + typedef struct ImageBindInfo + { + VkExtent3D extent; + bool memory_block_owned_by_image; + Anvil::MemoryBlock* memory_block_ptr; + VkDeviceSize memory_block_start_offset; + VkOffset3D offset; + Anvil::ImageSubresource subresource; + + SparseMemoryBindFlags flags; + + ImageBindInfo() + { + memory_block_owned_by_image = false; + memory_block_ptr = nullptr; + memory_block_start_offset = UINT32_MAX; + offset.x = 0; + offset.y = 0; + offset.z = 0; + } + } ImageBindInfo; + + typedef std::map, std::vector >> BufferBindUpdateMap; + typedef std::map, std::vector >> ImageBindUpdateMap; + typedef std::map, std::vector >> ImageOpaqueBindUpdateMap; + + typedef struct BindingInfo + { + BufferBindUpdateMap buffer_updates; + ImageOpaqueBindUpdateMap image_opaque_updates; + ImageBindUpdateMap image_updates; + + uint32_t memory_device_index; + uint32_t resource_device_index; + + std::vector signal_semaphores; + std::vector signal_semaphores_vk; + std::vector wait_semaphores; + std::vector wait_semaphores_vk; + + BindingInfo() + { + memory_device_index = 0; + resource_device_index = 0; + } + } BindingInfo; + + std::vector m_bindings; + bool m_dirty; + Anvil::Fence* m_fence_ptr; + + std::vector m_bindings_vk; + std::vector m_buffer_bindings_vk; + std::vector m_device_group_bindings_vk; + std::vector m_image_bindings_vk; + std::vector m_image_opaque_bindings_vk; + + /* Private functions */ + SparseMemoryBindingUpdateInfo (const SparseMemoryBindingUpdateInfo&); + SparseMemoryBindingUpdateInfo operator=(const SparseMemoryBindingUpdateInfo&); + + void bake(); + }; +}; /* namespace Anvil */ + +#endif /* TYPES_CLASSES_H */ \ No newline at end of file diff --git a/include/misc/types_enums.h b/include/misc/types_enums.h new file mode 100644 index 00000000..9d000ea6 --- /dev/null +++ b/include/misc/types_enums.h @@ -0,0 +1,2020 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef TYPES_ENUMS_H +#define TYPES_ENUMS_H + +#ifdef IGNORE + #undef IGNORE +#endif + +#ifdef STRICT + #undef STRICT +#endif + +namespace Anvil +{ + template + class Bitfield + { + public: + Bitfield() + :m_value(static_cast(0) ) + { + /* Stub */ + } + + Bitfield(const IndividualBitEnumType& in_bit) + :m_value(static_cast(in_bit) ) + { + /* Stub */ + } + + Bitfield(const Bitfield& in_bits) + :m_value(in_bits.m_value) + { + /* Stub */ + } + + inline Bitfield operator&(const IndividualBitEnumType& in_bit) const + { + auto result = Bitfield(static_cast(m_value & static_cast(in_bit) )); + + return result; + } + + inline Bitfield operator&(const IndividualBitEnumType& in_bit) + { + auto result = Bitfield(static_cast(m_value & static_cast(in_bit))); + + return result; + } + +#if 0 + inline Bitfield operator&(const Bitfield& in_bits) const + { + auto result = Bitfield(static_cast(static_cast(m_value) & static_cast(in_bits.m_value) )); + + return result; + } + + + inline Bitfield operator&(const Bitfield& in_bits) + { + auto result = Bitfield(static_cast(static_cast(m_value) & static_cast(in_bits.m_value) )); + + return result; + } +#endif + + inline Bitfield& operator|=(const IndividualBitEnumType& in_bit) + { + m_value |= static_cast(in_bit); + + return *this; + } + + inline Bitfield& operator|=(const Bitfield& in_bits) + { + m_value |= static_cast(in_bits.m_value); + + return *this; + } + + inline Bitfield& operator&=(const IndividualBitEnumType& in_bit) + { + m_value &= static_cast(in_bit); + + return *this; + } + + inline Bitfield& operator&=(const Bitfield& in_bits) + { + m_value &= static_cast(in_bits.m_value); + + return *this; + } + + inline Bitfield& operator=(const IndividualBitEnumType& in_bit) + { + m_value = static_cast(in_bit); + + return *this; + } + + inline Bitfield& operator=(const Bitfield& in_bits) + { + m_value = static_cast(in_bits.m_value); + + return *this; + } + + inline bool operator!=(const int& in_value) const + { + return m_value != static_cast(in_value); + } + + inline bool operator!=(const Bitfield& in_bitfield) const + { + return (m_value != in_bitfield.m_value); + } + + inline bool operator!=(const IndividualBitEnumType& in_bit) const + { + return (m_value != static_cast(in_bit) ); + } + + inline bool operator==(const int& in_value) const + { + return m_value == static_cast(in_value); + } + + inline bool operator==(const Bitfield& in_bitfield) const + { + return (m_value == in_bitfield.m_value); + } + + inline bool operator==(const IndividualBitEnumType& in_bit) const + { + return (m_value == static_cast(in_bit) ); + } + + inline bool operator<(const Bitfield& in_bitfield) const + { + return (m_value < in_bitfield.m_value); + } + + inline bool operator<(const IndividualBitEnumType& in_bit) const + { + return (m_value < static_cast(in_bit) ); + } + + inline bool operator<=(const Bitfield& in_bitfield) const + { + return (m_value <= in_bitfield.m_value); + } + + inline bool operator<=(const IndividualBitEnumType& in_bit) const + { + return (m_value <= static_cast(in_bit) ); + } + + inline bool operator>=(const Bitfield& in_bitfield) const + { + return (m_value >= in_bitfield.m_value); + } + + inline bool operator>=(const IndividualBitEnumType& in_bit) const + { + return (m_value >= static_cast(in_bit) ); + } + + inline const VKFlagsType& get_vk() const + { + return m_value; + } + + inline const VKFlagsType* get_vk_ptr() const + { + return &m_value; + } + + private: + VKFlagsType m_value; + }; + + #define INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(BitfieldType, VKFlagsType, IndividualBitEnumType) \ + IndividualBitEnumType operator&(const IndividualBitEnumType& in_val1, const IndividualBitEnumType& in_val2); \ + BitfieldType operator&(const BitfieldType& in_val1, const BitfieldType& in_val2); \ + IndividualBitEnumType operator|(const IndividualBitEnumType& in_val1, const IndividualBitEnumType& in_val2); \ + BitfieldType operator|(const BitfieldType& in_val1, const BitfieldType& in_val2); \ + IndividualBitEnumType operator~(const IndividualBitEnumType& in_val1); \ + BitfieldType operator~(const BitfieldType& in_val1); + + #define INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(BitfieldType, VKFlagsType, IndividualBitEnumType) \ + IndividualBitEnumType Anvil::operator&(const IndividualBitEnumType& in_val1, const IndividualBitEnumType& in_val2) \ + { \ + IndividualBitEnumType result = static_cast(static_cast(in_val1) & static_cast(in_val2) ); \ + \ + return result; \ + } \ + BitfieldType Anvil::operator&(const BitfieldType& in_val1, const BitfieldType& in_val2) \ + { \ + BitfieldType result = BitfieldType(static_cast(static_cast(in_val1.get_vk() ) & static_cast(in_val2.get_vk() )) ); \ + \ + return result; \ + } \ + IndividualBitEnumType Anvil::operator|(const IndividualBitEnumType& in_val1, const IndividualBitEnumType& in_val2) \ + { \ + IndividualBitEnumType result = static_cast(static_cast(in_val1) | static_cast(in_val2) ); \ + \ + return result; \ + } \ + BitfieldType Anvil::operator|(const BitfieldType& in_val1, const BitfieldType& in_val2) \ + { \ + BitfieldType result = BitfieldType(static_cast(static_cast(in_val1.get_vk() ) | static_cast(in_val2.get_vk() )) ); \ + \ + return result; \ + } \ + IndividualBitEnumType Anvil::operator~(const IndividualBitEnumType& in_val1) \ + { \ + IndividualBitEnumType result = static_cast(~static_cast(in_val1) ); \ + \ + return result; \ + } + + /* NOTE: These map 1:1 to VK equivalents */ + enum class AccessFlagBits + { + COLOR_ATTACHMENT_READ_BIT = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, + COLOR_ATTACHMENT_WRITE_BIT = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + DEPTH_STENCIL_ATTACHMENT_READ_BIT = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + HOST_READ_BIT = VK_ACCESS_HOST_READ_BIT, + HOST_WRITE_BIT = VK_ACCESS_HOST_WRITE_BIT, + INDEX_READ_BIT = VK_ACCESS_INDEX_READ_BIT, + INDIRECT_COMMAND_READ_BIT = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, + INPUT_ATTACHMENT_READ_BIT = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, + MEMORY_READ_BIT = VK_ACCESS_MEMORY_READ_BIT, + MEMORY_WRITE_BIT = VK_ACCESS_MEMORY_WRITE_BIT, + SHADER_READ_BIT = VK_ACCESS_SHADER_READ_BIT, + SHADER_WRITE_BIT = VK_ACCESS_SHADER_WRITE_BIT, + TRANSFER_READ_BIT = VK_ACCESS_TRANSFER_READ_BIT, + TRANSFER_WRITE_BIT = VK_ACCESS_TRANSFER_WRITE_BIT, + UNIFORM_READ_BIT = VK_ACCESS_UNIFORM_READ_BIT, + VERTEX_ATTRIBUTE_READ_BIT = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, + + /* VK_EXT_transform_feedback */ + TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + TRANSFORM_FEEDBACK_WRITE_BIT_EXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + + NONE = 0 + }; + typedef Anvil::Bitfield AccessFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(AccessFlags, VkAccessFlags, AccessFlagBits) + + enum class APIVersion + { + /* Vulkan 1.0 */ + _1_0, + + /* Vulkan 1.1 */ + _1_1, + + UNKNOWN + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class AttachmentLoadOp + { + CLEAR = VK_ATTACHMENT_LOAD_OP_CLEAR, + DONT_CARE = VK_ATTACHMENT_LOAD_OP_DONT_CARE, + LOAD = VK_ATTACHMENT_LOAD_OP_LOAD, + + UNKNOWN = VK_ATTACHMENT_LOAD_OP_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class AttachmentStoreOp + { + DONT_CARE = VK_ATTACHMENT_STORE_OP_DONT_CARE, + STORE = VK_ATTACHMENT_STORE_OP_STORE, + + UNKNOWN = VK_ATTACHMENT_STORE_OP_MAX_ENUM + }; + + /* Describes recognized subpass attachment types */ + enum class AttachmentType + { + ATTACHMENT_TYPE_FIRST, + + COLOR = ATTACHMENT_TYPE_FIRST, + DEPTH_STENCIL, + INPUT, + PRESERVE, + RESOLVE, + + COUNT, + UNKNOWN = COUNT + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class BlendFactor + { + CONSTANT_ALPHA = VK_BLEND_FACTOR_CONSTANT_ALPHA, + CONSTANT_COLOR = VK_BLEND_FACTOR_CONSTANT_COLOR, + DST_ALPHA = VK_BLEND_FACTOR_DST_ALPHA, + DST_COLOR = VK_BLEND_FACTOR_DST_COLOR, + ONE = VK_BLEND_FACTOR_ONE, + ONE_MINUS_CONSTANT_ALPHA = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, + ONE_MINUS_CONSTANT_COLOR = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, + ONE_MINUS_DST_ALPHA = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, + ONE_MINUS_DST_COLOR = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, + ONE_MINUS_SRC_ALPHA = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, + ONE_MINUS_SRC_COLOR = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, + ONE_MINUS_SRC1_COLOR = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, + ONE_MINUS_SRC1_ALPHA = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, + SRC_ALPHA = VK_BLEND_FACTOR_SRC_ALPHA, + SRC_ALPHA_SATURATE = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, + SRC1_ALPHA = VK_BLEND_FACTOR_SRC1_ALPHA, + SRC1_COLOR = VK_BLEND_FACTOR_SRC1_COLOR, + SRC_COLOR = VK_BLEND_FACTOR_SRC_COLOR, + ZERO = VK_BLEND_FACTOR_ZERO, + + UNKNOWN = VK_BLEND_FACTOR_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class BlendOp + { + ADD = VK_BLEND_OP_ADD, + MAX = VK_BLEND_OP_MAX, + MIN = VK_BLEND_OP_MIN, + REVERSE_SUBTRACT = VK_BLEND_OP_REVERSE_SUBTRACT, + SUBTRACT = VK_BLEND_OP_SUBTRACT, + + UNKNOWN = VK_BLEND_OP_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class BorderColor + { + FLOAT_OPAQUE_BLACK = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, + FLOAT_OPAQUE_WHITE = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, + FLOAT_TRANSPARENT_BLACK = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + INT_OPAQUE_BLACK = VK_BORDER_COLOR_INT_OPAQUE_BLACK, + INT_OPAQUE_WHITE = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + INT_TRANSPARENT_BLACK = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, + + UNKNOWN = VK_BORDER_COLOR_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class BufferCreateFlagBits + { + SPARSE_ALIASED_BIT = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + SPARSE_BINDING_BIT = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + SPARSE_RESIDENCY_BIT = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + + /* Core VK 1.1 */ + CREATE_PROTECTED_BIT = VK_BUFFER_CREATE_PROTECTED_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield BufferCreateFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(BufferCreateFlags, VkBufferCreateFlags, BufferCreateFlagBits) + + enum class BufferType + { + ALLOC, + NO_ALLOC, + NO_ALLOC_CHILD, + }; + + /* NOTE: These map 1:1 to Vulkan equialents */ + enum class BufferUsageFlagBits + { + /* Core VK 1.0 */ + INDEX_BUFFER_BIT = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + INDIRECT_BUFFER_BIT = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + STORAGE_BUFFER_BIT = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + STORAGE_TEXEL_BUFFER_BIT = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + TRANSFER_DST_BIT = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + TRANSFER_SRC_BIT = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + UNIFORM_BUFFER_BIT = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + UNIFORM_TEXEL_BUFFER_BIT = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + VERTEX_BUFFER_BIT = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + + /* VK_EXT_transform_feedback */ + TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + + NONE = 0 + }; + typedef Anvil::Bitfield BufferUsageFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(BufferUsageFlags, VkBufferUsageFlags, BufferUsageFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class ChromaLocation + { + COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN_KHR, + MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT_KHR, + + UNKNOWN = VK_CHROMA_LOCATION_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class ColorComponentFlagBits + { + A_BIT = VK_COLOR_COMPONENT_A_BIT, + B_BIT = VK_COLOR_COMPONENT_B_BIT, + G_BIT = VK_COLOR_COMPONENT_G_BIT, + R_BIT = VK_COLOR_COMPONENT_R_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield ColorComponentFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(ColorComponentFlags, VkColorComponentFlags, ColorComponentFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class ColorSpaceKHR + { + /* VK_KHR_surface */ + SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + + /* VK_EXT_swapchain_colorspace */ + DISPLAY_P3_NONLINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, + EXTENDED_SRGB_LINEAR_EXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, + DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT, + DCI_P3_NONLINEAR_EXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, + BT709_LINEAR_EXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, + BT709_NONLINEAR_EXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, + BT2020_LINEAR_EXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, + HDR10_ST2084_EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, + DOLBYVISION_EXT = VK_COLOR_SPACE_DOLBYVISION_EXT, + HDR10_HLG_EXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + ADOBERGB_LINEAR_EXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + ADOBERGB_NONLINEAR_EXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + PASS_THROUGH_EXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + EXTENDED_SRGB_NONLINEAR_EXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, + + UNKNOWN = VK_COLOR_SPACE_MAX_ENUM_KHR + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class CommandPoolCreateFlagBits + { + /* Core VK 1.0 */ + CREATE_RESET_COMMAND_BUFFER_BIT = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, + CREATE_TRANSIENT_BIT = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, + + /* Core VK 1.1 */ + CREATE_PROTECTED_BIT = VK_COMMAND_POOL_CREATE_PROTECTED_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield CommandPoolCreateFlags; + + /* Note: These map 1:1 to VK equivalents. */ + enum class ComponentSwizzle + { + A = VK_COMPONENT_SWIZZLE_A, + B = VK_COMPONENT_SWIZZLE_B, + G = VK_COMPONENT_SWIZZLE_G, + IDENTITY = VK_COMPONENT_SWIZZLE_IDENTITY, + ONE = VK_COMPONENT_SWIZZLE_ONE, + R = VK_COMPONENT_SWIZZLE_R, + ZERO = VK_COMPONENT_SWIZZLE_ZERO, + }; + + /* Note: These map 1:1 to VK equivalents. */ + enum class CompositeAlphaFlagBits + { + OPAQUE_BIT_KHR = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + PRE_MULTIPLIED_BIT_KHR = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + POST_MULTIPLIED_BIT_KHR = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, + INHERIT_BIT_KHR = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR, + + NONE = 0, + }; + typedef Anvil::Bitfield CompositeAlphaFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(CompositeAlphaFlags, VkCompositeAlphaFlagsKHR, CompositeAlphaFlagBits) + +/* Note: These map 1:1 to VK equivalents. */ + enum class DebugMessageSeverityFlagBits + { + ERROR_BIT = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT, + INFO_BIT = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + VERBOSE_BIT = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + WARNING_BIT = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, + + NONE = 0 + }; + typedef Anvil::Bitfield DebugMessageSeverityFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(DebugMessageSeverityFlags, VkDebugUtilsMessageSeverityFlagsEXT, DebugMessageSeverityFlagBits) + + /* Note: These map 1:1 to VK equivalents. */ + enum class DebugMessageTypeFlagBits + { + GENERAL_BIT = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + PERFORMANCE_BIT = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT, + VALIDATION_BIT = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + + NONE = 0 + }; + typedef Anvil::Bitfield DebugMessageTypeFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(DebugMessageTypeFlags, VkDebugUtilsMessageTypeFlagsEXT, DebugMessageTypeFlagBits) + + /* Note: These map 1:1 to VK equivalents. */ + enum class DependencyFlagBits + { + /* Core VK 1.0 */ + BY_REGION_BIT = VK_DEPENDENCY_BY_REGION_BIT, + + /* KHR_device_group */ + DEVICE_GROUP_BIT = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR, + + /* KHR_multiview */ + VIEW_LOCAL_BIT = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR, + + NONE = 0 + }; + typedef Anvil::Bitfield DependencyFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(DependencyFlags, VkDependencyFlags, DependencyFlagBits) + + enum class DeviceGroupPresentModeFlagBits + { + LOCAL_BIT_KHR = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, + LOCAL_MULTI_DEVICE_BIT_KHR = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR, + REMOTE_BIT_KHR = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, + SUM_BIT_KHR = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, + + NONE = 0 + }; + typedef Anvil::Bitfield DeviceGroupPresentModeFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(DeviceGroupPresentModeFlags, VkDeviceGroupPresentModeFlagsKHR, DeviceGroupPresentModeFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class DriverIdKHR + { + AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY_KHR, + AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR, + ARM_PROPRIETARY_KHR = VK_DRIVER_ID_ARM_PROPRIETARY_KHR, + IMAGINATION_PROPRIETARY_KHR = VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR, + INTEL_OPEN_SOURCE_MESA_KHR = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR, + INTEL_PROPRIETARY_WINDOWS_KHR = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR, + MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV_KHR, + NVIDIA_PROPRIETARY_KHR = VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR, + QUALCOMM_PROPRIETARY_KHR = VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR, + + UNKNOWN + }; + + enum class DynamicState + { + /* Core VK 1.0 */ + BLEND_CONSTANTS = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + DEPTH_BIAS = VK_DYNAMIC_STATE_DEPTH_BIAS, + DEPTH_BOUNDS = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + LINE_WIDTH = VK_DYNAMIC_STATE_LINE_WIDTH, + SCISSOR = VK_DYNAMIC_STATE_SCISSOR, + STENCIL_COMPARE_MASK = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + STENCIL_REFERENCE = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + STENCIL_WRITE_MASK = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + VIEWPORT = VK_DYNAMIC_STATE_VIEWPORT, + + /* VK_EXT_sample_locations */ + SAMPLE_LOCATIONS_EXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, + }; + + enum class ExternalFenceHandleTypeFlagBits + { + #if defined(_WIN32) + OPAQUE_WIN32_BIT = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR, + OPAQUE_WIN32_KMT_BIT = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR, + #else + OPAQUE_FD_BIT = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR, + SYNC_FD_BIT = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR, + #endif + + NONE = 0, + }; + typedef Anvil::Bitfield ExternalFenceHandleTypeFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(ExternalFenceHandleTypeFlags, VkExternalFenceHandleTypeFlagsKHR, ExternalFenceHandleTypeFlagBits) + + enum class ExternalMemoryHandleTypeFlagBits + { + #if defined(_WIN32) + /* VK_KHR_external_memory_win32 */ + OPAQUE_WIN32_BIT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR, + OPAQUE_WIN32_KMT_BIT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR, + D3D11_TEXTURE_BIT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR, + D3D11_TEXTURE_KMT_BIT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR, + D3D12_HEAP_BIT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR, + D3D12_RESOURCE_BIT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR, + #else + /* VK_KHR_external_memory_fd */ + OPAQUE_FD_BIT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR, + #endif + + /* VK_EXT_external_memory_host */ + HOST_ALLOCATION_BIT_EXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, + HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, + + NONE = 0, + }; + typedef Anvil::Bitfield ExternalMemoryHandleTypeFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(ExternalMemoryHandleTypeFlags, VkExternalMemoryHandleTypeFlagsKHR, ExternalMemoryHandleTypeFlagBits) + + enum class ExternalSemaphoreHandleTypeFlagBits + { + #if defined(_WIN32) + OPAQUE_WIN32_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR, + OPAQUE_WIN32_KMT_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR, + D3D12_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR, + #else + OPAQUE_FD_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR, + SYNC_FD_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR, + #endif + + NONE = 0, + }; + typedef Anvil::Bitfield ExternalSemaphoreHandleTypeFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(ExternalSemaphoreHandleTypeFlags, VkExternalSemaphoreHandleTypeFlagsKHR, ExternalSemaphoreHandleTypeFlagBits) + + enum class FormatFeatureFlagBits + { + /* Core VK 1.0 */ + BLIT_DST_BIT = VK_FORMAT_FEATURE_BLIT_DST_BIT, + BLIT_SRC_BIT = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + COLOR_ATTACHMENT_BIT = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + COLOR_ATTACHMENT_BLEND_BIT = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + DEPTH_STENCIL_ATTACHMENT_BIT = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + SAMPLED_IMAGE_BIT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + SAMPLED_IMAGE_FILTER_LINEAR_BIT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + STORAGE_IMAGE_ATOMIC_BIT = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + STORAGE_IMAGE_BIT = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + STORAGE_TEXEL_BUFFER_ATOMIC_BIT = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + STORAGE_TEXEL_BUFFER_BIT = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + UNIFORM_TEXEL_BUFFER_BIT = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + VERTEX_BUFFER_BIT = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + + /* EXT_sampler_filter_minmax */ + SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, + + /* KHR_maintenance1 */ + TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, + TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR, + + /* KHR_sampler_ycbcr_conversion */ + MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, + COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, + SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, + SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, + SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, + DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, + + NONE = 0 + }; + typedef Anvil::Bitfield FormatFeatureFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(FormatFeatureFlags, VkFormatFeatureFlags, FormatFeatureFlagBits) + + enum class PeerMemoryFeatureFlagBits + { + COPY_DST_BIT = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR, + COPY_SRC_BIT = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR, + GENERIC_DST_BIT = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR, + GENERIC_SRC_BIT = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR, + + NONE = 0 + }; + typedef Anvil::Bitfield PeerMemoryFeatureFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(PeerMemoryFeatureFlags, VkPeerMemoryFeatureFlags, PeerMemoryFeatureFlagBits) + + /* NOTE: Maps 1:1 to VK equivalents */ + enum class CompareOp + { + NEVER = VK_COMPARE_OP_NEVER, + LESS = VK_COMPARE_OP_LESS, + EQUAL = VK_COMPARE_OP_EQUAL, + LESS_OR_EQUAL = VK_COMPARE_OP_LESS_OR_EQUAL, + GREATER = VK_COMPARE_OP_GREATER, + NOT_EQUAL = VK_COMPARE_OP_NOT_EQUAL, + GREATER_OR_EQUAL = VK_COMPARE_OP_GREATER_OR_EQUAL, + ALWAYS = VK_COMPARE_OP_ALWAYS, + + UNKNOWN = VK_COMPARE_OP_MAX_ENUM + }; + + /** Describes component layout of a format */ + enum class ComponentLayout + { + /* NOTE: If the ordering used below needs to be changed, make sure to also update formats.cpp::layout_to_n_components */ + ABGR, + ARGB, + B, + BGR, + BGRA, + BGRG, + BR, + BX, + BXGXRXGX, + BXRX, + D, + DS, + EBGR, + G, + GBGR, + GX, + GXBXGXRX, + R, + RG, + RGB, + RGBA, + RX, + RXGX, + RXGXBXAX, + S, + XD, + + UNKNOWN + }; + + enum class ConservativeRasterizationModeEXT + { + DISABLED = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + OVERESTIMATE = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, + UNDERESTIMATE = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT, + + UNKNOWN = VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class CullModeFlagBits + { + BACK_BIT = VK_CULL_MODE_BACK_BIT, + FRONT_BIT = VK_CULL_MODE_FRONT_BIT, + NONE = VK_CULL_MODE_NONE, + + FRONT_AND_BACK = VK_CULL_MODE_FRONT_AND_BACK, + }; + typedef Anvil::Bitfield CullModeFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(CullModeFlags, VkCullModeFlags, CullModeFlagBits) + + /* Note: These map 1:1 to VK equivalents */ + enum class DescriptorBindingFlagBits + { + /* When specified for a binding, the binding can be modified after having been bound to a pipeline + * in a command buffer, without invalidating that command buffer. + * The updated binding becomes visible to following submissions as soon as the update function leaves. + * + * Requires VK_EXT_descriptor_indexing. + */ + UPDATE_AFTER_BIND_BIT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT, + + /* When specified for a binding, the binding can be modified after having been bound to a pipeline + * in a command buffer, as long as it is NOT used by the command buffer. Doing so no longer invalidyates + * the command buffer. + * + * Requires VK_EXT_descriptor_indexing. + */ + UPDATE_UNUSED_WHILE_PENDING_BIT = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT, + + /* When specified for a binding, the binding needs not be assigned valid descriptor(s), as long as none of + * the shader invocations execute an instruction that performs any memory access using the descriptor. + * + * Requires VK_EXT_descriptor_indexing. + */ + PARTIALLY_BOUND_BIT = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT, + + /* When specified for a binding, the binding gets a variable size which is specified each time a descriptor + * set is allocated using this layout. The in_descriptor_array_size field specified at DescriptorSetCreateInfo::add_binding() + * call time acts as an upper bound for the number of elements the binding can handle. + * + * Can only be specified for the last binding in the DS layout. + * + * Requires VK_EXT_descriptor_indexing. + */ + VARIABLE_DESCRIPTOR_COUNT_BIT = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT, + + NONE = 0 + }; + typedef Anvil::Bitfield DescriptorBindingFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(DescriptorBindingFlags, VkDescriptorBindingFlagsEXT, DescriptorBindingFlagBits) + + /* NOTE: Maps 1:1 to VK equivalents */ + enum class DescriptorPoolCreateFlagBits + { + /* When set, descriptor set allocations will return back to the pool at release time.*/ + FREE_DESCRIPTOR_SET_BIT = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + + /* When set, descriptor sets allocated from this pool can be created with the DESCRIPTOR_BINDING_FLAG_UPDATE_AFTER_BIND_BIT flag. + * + * Requires VK_EXT_descriptor_indexing. + **/ + UPDATE_AFTER_BIND_BIT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT, + + NONE = 0 + }; + typedef Anvil::Bitfield DescriptorPoolCreateFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(DescriptorPoolCreateFlags, VkDescriptorPoolCreateFlags, DescriptorPoolCreateFlagBits) + + enum class DescriptorSetUpdateMethod + { + /* Updates dirty DS bindings using vkUpdateDescriptorSet() which is available on all Vulkan implementations. */ + CORE, + + /* Updates dirty DS bindings using vkUpdateDescriptorSetWithTemplateKHR(). Templates are cached across update operations, + * and are release at DescriptorSet release time. + * + * This setting is recommended if you are going to be updating the same set of descriptor set bindings more than once. + * + * Only available on devices supporting VK_KHR_descriptor_update_template extension. + */ + TEMPLATE, + }; + + enum class ExtensionAvailability + { + ENABLE_IF_AVAILABLE, + IGNORE, + REQUIRE, + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class DescriptorType + { + /* Core VK 1.0 functionality */ + + COMBINED_IMAGE_SAMPLER = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + INPUT_ATTACHMENT = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + SAMPLED_IMAGE = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + SAMPLER = VK_DESCRIPTOR_TYPE_SAMPLER, + STORAGE_BUFFER = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + STORAGE_BUFFER_DYNAMIC = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + STORAGE_IMAGE = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + STORAGE_TEXEL_BUFFER = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + UNIFORM_BUFFER = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + UNIFORM_BUFFER_DYNAMIC = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + UNIFORM_TEXEL_BUFFER = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + + /* Requires VK_EXT_inline_uniform_block */ + INLINE_UNIFORM_BLOCK = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + + + UNKNOWN = VK_DESCRIPTOR_TYPE_MAX_ENUM + }; + + /** Tells the type of an Anvil::BaseDevice instance */ + enum class DeviceType + { + /* BaseDevice is implemented by SGPUDevice class */ + SINGLE_GPU, + + /* BaseDevice is implemented by MGPUDevice class */ + MULTI_GPU, + + UNKNOWN + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class Filter + { + /* Core VK 1.0 functionality */ + LINEAR = VK_FILTER_LINEAR, + NEAREST = VK_FILTER_NEAREST, + + UNKNOWN = VK_FILTER_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class Format + { + R4G4_UNORM_PACK8 = VK_FORMAT_R4G4_UNORM_PACK8, + R4G4B4A4_UNORM_PACK16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, + B4G4R4A4_UNORM_PACK16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, + R5G6B5_UNORM_PACK16 = VK_FORMAT_R5G6B5_UNORM_PACK16, + B5G6R5_UNORM_PACK16 = VK_FORMAT_B5G6R5_UNORM_PACK16, + R5G5B5A1_UNORM_PACK16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, + B5G5R5A1_UNORM_PACK16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, + A1R5G5B5_UNORM_PACK16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, + R8_UNORM = VK_FORMAT_R8_UNORM, + R8_SNORM = VK_FORMAT_R8_SNORM, + R8_USCALED = VK_FORMAT_R8_USCALED, + R8_SSCALED = VK_FORMAT_R8_SSCALED, + R8_UINT = VK_FORMAT_R8_UINT, + R8_SINT = VK_FORMAT_R8_SINT, + R8_SRGB = VK_FORMAT_R8_SRGB, + R8G8_UNORM = VK_FORMAT_R8G8_UNORM, + R8G8_SNORM = VK_FORMAT_R8G8_SNORM, + R8G8_USCALED = VK_FORMAT_R8G8_USCALED, + R8G8_SSCALED = VK_FORMAT_R8G8_SSCALED, + R8G8_UINT = VK_FORMAT_R8G8_UINT, + R8G8_SINT = VK_FORMAT_R8G8_SINT, + R8G8_SRGB = VK_FORMAT_R8G8_SRGB, + R8G8B8_UNORM = VK_FORMAT_R8G8B8_UNORM, + R8G8B8_SNORM = VK_FORMAT_R8G8B8_SNORM, + R8G8B8_USCALED = VK_FORMAT_R8G8B8_USCALED, + R8G8B8_SSCALED = VK_FORMAT_R8G8B8_SSCALED, + R8G8B8_UINT = VK_FORMAT_R8G8B8_UINT, + R8G8B8_SINT = VK_FORMAT_R8G8B8_SINT, + R8G8B8_SRGB = VK_FORMAT_R8G8B8_SRGB, + B8G8R8_UNORM = VK_FORMAT_B8G8R8_UNORM, + B8G8R8_SNORM = VK_FORMAT_B8G8R8_SNORM, + B8G8R8_USCALED = VK_FORMAT_B8G8R8_USCALED, + B8G8R8_SSCALED = VK_FORMAT_B8G8R8_SSCALED, + B8G8R8_UINT = VK_FORMAT_B8G8R8_UINT, + B8G8R8_SINT = VK_FORMAT_B8G8R8_SINT, + B8G8R8_SRGB = VK_FORMAT_B8G8R8_SRGB, + R8G8B8A8_UNORM = VK_FORMAT_R8G8B8A8_UNORM, + R8G8B8A8_SNORM = VK_FORMAT_R8G8B8A8_SNORM, + R8G8B8A8_USCALED = VK_FORMAT_R8G8B8A8_USCALED, + R8G8B8A8_SSCALED = VK_FORMAT_R8G8B8A8_SSCALED, + R8G8B8A8_UINT = VK_FORMAT_R8G8B8A8_UINT, + R8G8B8A8_SINT = VK_FORMAT_R8G8B8A8_SINT, + R8G8B8A8_SRGB = VK_FORMAT_R8G8B8A8_SRGB, + B8G8R8A8_UNORM = VK_FORMAT_B8G8R8A8_UNORM, + B8G8R8A8_SNORM = VK_FORMAT_B8G8R8A8_SNORM, + B8G8R8A8_USCALED = VK_FORMAT_B8G8R8A8_USCALED, + B8G8R8A8_SSCALED = VK_FORMAT_B8G8R8A8_SSCALED, + B8G8R8A8_UINT = VK_FORMAT_B8G8R8A8_UINT, + B8G8R8A8_SINT = VK_FORMAT_B8G8R8A8_SINT, + B8G8R8A8_SRGB = VK_FORMAT_B8G8R8A8_SRGB, + A8B8G8R8_UNORM_PACK32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, + A8B8G8R8_SNORM_PACK32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, + A8B8G8R8_USCALED_PACK32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, + A8B8G8R8_SSCALED_PACK32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, + A8B8G8R8_UINT_PACK32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, + A8B8G8R8_SINT_PACK32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, + A8B8G8R8_SRGB_PACK32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, + A2R10G10B10_UNORM_PACK32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, + A2R10G10B10_SNORM_PACK32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, + A2R10G10B10_USCALED_PACK32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, + A2R10G10B10_SSCALED_PACK32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, + A2R10G10B10_UINT_PACK32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, + A2R10G10B10_SINT_PACK32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, + A2B10G10R10_UNORM_PACK32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, + A2B10G10R10_SNORM_PACK32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, + A2B10G10R10_USCALED_PACK32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, + A2B10G10R10_SSCALED_PACK32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, + A2B10G10R10_UINT_PACK32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, + A2B10G10R10_SINT_PACK32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, + R16_UNORM = VK_FORMAT_R16_UNORM, + R16_SNORM = VK_FORMAT_R16_SNORM, + R16_USCALED = VK_FORMAT_R16_USCALED, + R16_SSCALED = VK_FORMAT_R16_SSCALED, + R16_UINT = VK_FORMAT_R16_UINT, + R16_SINT = VK_FORMAT_R16_SINT, + R16_SFLOAT = VK_FORMAT_R16_SFLOAT, + R16G16_UNORM = VK_FORMAT_R16G16_UNORM, + R16G16_SNORM = VK_FORMAT_R16G16_SNORM, + R16G16_USCALED = VK_FORMAT_R16G16_USCALED, + R16G16_SSCALED = VK_FORMAT_R16G16_SSCALED, + R16G16_UINT = VK_FORMAT_R16G16_UINT, + R16G16_SINT = VK_FORMAT_R16G16_SINT, + R16G16_SFLOAT = VK_FORMAT_R16G16_SFLOAT, + R16G16B16_UNORM = VK_FORMAT_R16G16B16_UNORM, + R16G16B16_SNORM = VK_FORMAT_R16G16B16_SNORM, + R16G16B16_USCALED = VK_FORMAT_R16G16B16_USCALED, + R16G16B16_SSCALED = VK_FORMAT_R16G16B16_SSCALED, + R16G16B16_UINT = VK_FORMAT_R16G16B16_UINT, + R16G16B16_SINT = VK_FORMAT_R16G16B16_SINT, + R16G16B16_SFLOAT = VK_FORMAT_R16G16B16_SFLOAT, + R16G16B16A16_UNORM = VK_FORMAT_R16G16B16A16_UNORM, + R16G16B16A16_SNORM = VK_FORMAT_R16G16B16A16_SNORM, + R16G16B16A16_USCALED = VK_FORMAT_R16G16B16A16_USCALED, + R16G16B16A16_SSCALED = VK_FORMAT_R16G16B16A16_SSCALED, + R16G16B16A16_UINT = VK_FORMAT_R16G16B16A16_UINT, + R16G16B16A16_SINT = VK_FORMAT_R16G16B16A16_SINT, + R16G16B16A16_SFLOAT = VK_FORMAT_R16G16B16A16_SFLOAT, + R32_UINT = VK_FORMAT_R32_UINT, + R32_SINT = VK_FORMAT_R32_SINT, + R32_SFLOAT = VK_FORMAT_R32_SFLOAT, + R32G32_UINT = VK_FORMAT_R32G32_UINT, + R32G32_SINT = VK_FORMAT_R32G32_SINT, + R32G32_SFLOAT = VK_FORMAT_R32G32_SFLOAT, + R32G32B32_UINT = VK_FORMAT_R32G32B32_UINT, + R32G32B32_SINT = VK_FORMAT_R32G32B32_SINT, + R32G32B32_SFLOAT = VK_FORMAT_R32G32B32_SFLOAT, + R32G32B32A32_UINT = VK_FORMAT_R32G32B32A32_UINT, + R32G32B32A32_SINT = VK_FORMAT_R32G32B32A32_SINT, + R32G32B32A32_SFLOAT = VK_FORMAT_R32G32B32A32_SFLOAT, + R64_UINT = VK_FORMAT_R64_UINT, + R64_SINT = VK_FORMAT_R64_SINT, + R64_SFLOAT = VK_FORMAT_R64_SFLOAT, + R64G64_UINT = VK_FORMAT_R64G64_UINT, + R64G64_SINT = VK_FORMAT_R64G64_SINT, + R64G64_SFLOAT = VK_FORMAT_R64G64_SFLOAT, + R64G64B64_UINT = VK_FORMAT_R64G64B64_UINT, + R64G64B64_SINT = VK_FORMAT_R64G64B64_SINT, + R64G64B64_SFLOAT = VK_FORMAT_R64G64B64_SFLOAT, + R64G64B64A64_UINT = VK_FORMAT_R64G64B64A64_UINT, + R64G64B64A64_SINT = VK_FORMAT_R64G64B64A64_SINT, + R64G64B64A64_SFLOAT = VK_FORMAT_R64G64B64A64_SFLOAT, + B10G11R11_UFLOAT_PACK32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, + E5B9G9R9_UFLOAT_PACK32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + D16_UNORM = VK_FORMAT_D16_UNORM, + X8_D24_UNORM_PACK32 = VK_FORMAT_X8_D24_UNORM_PACK32, + D32_SFLOAT = VK_FORMAT_D32_SFLOAT, + S8_UINT = VK_FORMAT_S8_UINT, + D16_UNORM_S8_UINT = VK_FORMAT_D16_UNORM_S8_UINT, + D24_UNORM_S8_UINT = VK_FORMAT_D24_UNORM_S8_UINT, + D32_SFLOAT_S8_UINT = VK_FORMAT_D32_SFLOAT_S8_UINT, + BC1_RGB_UNORM_BLOCK = VK_FORMAT_BC1_RGB_UNORM_BLOCK, + BC1_RGB_SRGB_BLOCK = VK_FORMAT_BC1_RGB_SRGB_BLOCK, + BC1_RGBA_UNORM_BLOCK = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + BC1_RGBA_SRGB_BLOCK = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + BC2_UNORM_BLOCK = VK_FORMAT_BC2_UNORM_BLOCK, + BC2_SRGB_BLOCK = VK_FORMAT_BC2_SRGB_BLOCK, + BC3_UNORM_BLOCK = VK_FORMAT_BC3_UNORM_BLOCK, + BC3_SRGB_BLOCK = VK_FORMAT_BC3_SRGB_BLOCK, + BC4_UNORM_BLOCK = VK_FORMAT_BC4_UNORM_BLOCK, + BC4_SNORM_BLOCK = VK_FORMAT_BC4_SNORM_BLOCK, + BC5_UNORM_BLOCK = VK_FORMAT_BC5_UNORM_BLOCK, + BC5_SNORM_BLOCK = VK_FORMAT_BC5_SNORM_BLOCK, + BC6H_UFLOAT_BLOCK = VK_FORMAT_BC6H_UFLOAT_BLOCK, + BC6H_SFLOAT_BLOCK = VK_FORMAT_BC6H_SFLOAT_BLOCK, + BC7_UNORM_BLOCK = VK_FORMAT_BC7_UNORM_BLOCK, + BC7_SRGB_BLOCK = VK_FORMAT_BC7_SRGB_BLOCK, + ETC2_R8G8B8_UNORM_BLOCK = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, + ETC2_R8G8B8_SRGB_BLOCK = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, + ETC2_R8G8B8A1_UNORM_BLOCK = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, + ETC2_R8G8B8A1_SRGB_BLOCK = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, + ETC2_R8G8B8A8_UNORM_BLOCK = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, + ETC2_R8G8B8A8_SRGB_BLOCK = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, + EAC_R11_UNORM_BLOCK = VK_FORMAT_EAC_R11_UNORM_BLOCK, + EAC_R11_SNORM_BLOCK = VK_FORMAT_EAC_R11_SNORM_BLOCK, + EAC_R11G11_UNORM_BLOCK = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, + EAC_R11G11_SNORM_BLOCK = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, + ASTC_4x4_UNORM_BLOCK = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + ASTC_4x4_SRGB_BLOCK = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + ASTC_5x4_UNORM_BLOCK = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + ASTC_5x4_SRGB_BLOCK = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + ASTC_5x5_UNORM_BLOCK = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + ASTC_5x5_SRGB_BLOCK = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + ASTC_6x5_UNORM_BLOCK = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + ASTC_6x5_SRGB_BLOCK = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + ASTC_6x6_UNORM_BLOCK = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + ASTC_6x6_SRGB_BLOCK = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + ASTC_8x5_UNORM_BLOCK = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + ASTC_8x5_SRGB_BLOCK = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + ASTC_8x6_UNORM_BLOCK = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + ASTC_8x6_SRGB_BLOCK = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + ASTC_8x8_UNORM_BLOCK = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + ASTC_8x8_SRGB_BLOCK = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + ASTC_10x5_UNORM_BLOCK = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + ASTC_10x5_SRGB_BLOCK = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + ASTC_10x6_UNORM_BLOCK = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + ASTC_10x6_SRGB_BLOCK = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + ASTC_10x8_UNORM_BLOCK = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + ASTC_10x8_SRGB_BLOCK = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + ASTC_10x10_UNORM_BLOCK = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + ASTC_10x10_SRGB_BLOCK = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + ASTC_12x10_UNORM_BLOCK = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + ASTC_12x10_SRGB_BLOCK = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + ASTC_12x12_UNORM_BLOCK = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + ASTC_12x12_SRGB_BLOCK = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + + /* Requires VK_KHR_sampler_ycbcr_conversion */ + G8B8G8R8_422_UNORM = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, + B8G8R8G8_422_UNORM = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, + G8_B8_R8_3PLANE_420_UNORM = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, + G8_B8R8_2PLANE_420_UNORM = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, + G8_B8_R8_3PLANE_422_UNORM = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, + G8_B8R8_2PLANE_422_UNORM = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, + G8_B8_R8_3PLANE_444_UNORM = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, + R10X6_UNORM_PACK16 = VK_FORMAT_R10X6_UNORM_PACK16_KHR, + R10X6G10X6_UNORM_2PACK16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, + R10X6G10X6B10X6A10X6_UNORM_4PACK16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, + G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, + B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, + G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, + G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, + G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, + G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, + G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, + R12X4_UNORM_PACK16 = VK_FORMAT_R12X4_UNORM_PACK16_KHR, + R12X4G12X4_UNORM_2PACK16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, + R12X4G12X4B12X4A12X4_UNORM_4PACK16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, + G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, + B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, + G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, + G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, + G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, + G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, + G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, + G16B16G16R16_422_UNORM = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, + B16G16R16G16_422_UNORM = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, + G16_B16_R16_3PLANE_420_UNORM = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, + G16_B16R16_2PLANE_420_UNORM = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, + G16_B16_R16_3PLANE_422_UNORM = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, + G16_B16R16_2PLANE_422_UNORM = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, + G16_B16_R16_3PLANE_444_UNORM = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, + + /* Other .. */ + UNKNOWN = VK_FORMAT_UNDEFINED, + }; + + + enum class FormatType + { + SFLOAT, + SFLOAT_UINT, + SINT, + SNORM, + SRGB, + SSCALED, + UFLOAT, + UINT, + UNORM, + UNORM_UINT, + USCALED, + + UNKNOWN, + COUNT = UNKNOWN + } ; + + enum class FrontFace + { + CLOCKWISE = VK_FRONT_FACE_CLOCKWISE, + COUNTER_CLOCKWISE = VK_FRONT_FACE_COUNTER_CLOCKWISE, + + UNKNOWN = VK_FRONT_FACE_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class ImageAspectFlagBits + { + /* Core VK 1.0 aspects */ + COLOR_BIT = VK_IMAGE_ASPECT_COLOR_BIT, + DEPTH_BIT = VK_IMAGE_ASPECT_DEPTH_BIT, + METADATA_BIT = VK_IMAGE_ASPECT_METADATA_BIT, + STENCIL_BIT = VK_IMAGE_ASPECT_STENCIL_BIT, + + /* VK_KHR_sampler_ycbcr_conversion aspects */ + PLANE_0_BIT = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, + PLANE_1_BIT = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, + PLANE_2_BIT = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR, + + NONE = 0 + }; + typedef Anvil::Bitfield ImageAspectFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(ImageAspectFlags, VkImageAspectFlags, ImageAspectFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class ImageCreateFlagBits + { + /* Core VK 1.0 stuff */ + CUBE_COMPATIBLE_BIT = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, + MUTABLE_FORMAT_BIT = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, + SPARSE_ALIASED_BIT = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, + SPARSE_BINDING_BIT = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, + SPARSE_RESIDENCY_BIT = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, + + /* NOTE: Requires VK_EXT_sample_locations */ + SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, + + /* NOTE: Requires VK_KHR_bind_memory2 */ + ALIAS_BIT = VK_IMAGE_CREATE_ALIAS_BIT_KHR, + SPLIT_INSTANCE_BIND_REGIONS_BIT = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + + /* NOTE: Requires VK_KHR_maintenance1 */ + _2D_ARRAY_COMPATIBLE_BIT = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, + + /* NOTE: Requires VK_KHR_maintenance2 */ + BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, + EXTENDED_USAGE_BIT = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, + + /* NOTE: Requires VK_KHR_sampler_ycbcr_conversion */ + CREATE_DISJOINT_BIT = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, + + /* Note: Requires core VK 1.1 or newer */ + CREATE_PROTECTED_BIT = VK_IMAGE_CREATE_PROTECTED_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield ImageCreateFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(ImageCreateFlags, VkImageCreateFlags, ImageCreateFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class ImageLayout + { + /* Core VK 1.0 */ + COLOR_ATTACHMENT_OPTIMAL = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + DEPTH_STENCIL_ATTACHMENT_OPTIMAL = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + DEPTH_STENCIL_READ_ONLY_OPTIMAL = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + GENERAL = VK_IMAGE_LAYOUT_GENERAL, + PREINITIALIZED = VK_IMAGE_LAYOUT_PREINITIALIZED, + SHADER_READ_ONLY_OPTIMAL = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + TRANSFER_DST_OPTIMAL = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + TRANSFER_SRC_OPTIMAL = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + UNDEFINED = VK_IMAGE_LAYOUT_UNDEFINED, + + /* Requires VK_KHR_maintenance2 */ + DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, + DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, + + /* Requires VK_KHR_swapchain */ + PRESENT_SRC_KHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, + + UNKNOWN = VK_IMAGE_LAYOUT_MAX_ENUM, + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class ImageUsageFlagBits + { + /* Core VK 1.0 usages */ + TRANSFER_DST_BIT = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + TRANSFER_SRC_BIT = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + SAMPLED_BIT = VK_IMAGE_USAGE_SAMPLED_BIT, + STORAGE_BIT = VK_IMAGE_USAGE_STORAGE_BIT, + COLOR_ATTACHMENT_BIT = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + DEPTH_STENCIL_ATTACHMENT_BIT = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + TRANSIENT_ATTACHMENT_BIT = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + INPUT_ATTACHMENT_BIT = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield ImageUsageFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(ImageUsageFlags, VkImageUsageFlags, ImageUsageFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class ImageViewType + { + _1D = VK_IMAGE_VIEW_TYPE_1D, + _1D_ARRAY = VK_IMAGE_VIEW_TYPE_1D_ARRAY, + _2D = VK_IMAGE_VIEW_TYPE_2D, + _2D_ARRAY = VK_IMAGE_VIEW_TYPE_2D_ARRAY, + _3D = VK_IMAGE_VIEW_TYPE_3D, + _CUBE = VK_IMAGE_VIEW_TYPE_CUBE, + _CUBE_ARRAY = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, + + UNKNOWN = VK_IMAGE_VIEW_TYPE_MAX_ENUM + }; + + enum class ImageInternalType + { + ALLOC, + NO_ALLOC, + PEER_NO_ALLOC, + SWAPCHAIN_WRAPPER + }; + + /* NOTE: These correspond 1:1 to VK equivalents */ + enum class ImageTiling + { + LINEAR = VK_IMAGE_TILING_LINEAR, + OPTIMAL = VK_IMAGE_TILING_OPTIMAL, + + UNKNOWN = VK_IMAGE_TILING_MAX_ENUM + }; + + /* NOTE: These correspond 1:1 to VK equivalents */ + enum class ImageType + { + _1D = VK_IMAGE_TYPE_1D, + _2D = VK_IMAGE_TYPE_2D, + _3D = VK_IMAGE_TYPE_3D, + + UNKNOWN = VK_IMAGE_TYPE_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class IndexType + { + UINT16 = VK_INDEX_TYPE_UINT16, + UINT32 = VK_INDEX_TYPE_UINT32, + + UNKNOWN = VK_INDEX_TYPE_MAX_ENUM + }; + + enum class LogicOp + { + AND = VK_LOGIC_OP_AND, + AND_INVERTED = VK_LOGIC_OP_AND_INVERTED, + AND_REVERSE = VK_LOGIC_OP_AND_REVERSE, + CLEAR = VK_LOGIC_OP_CLEAR, + COPY = VK_LOGIC_OP_COPY, + COPY_INVERTED = VK_LOGIC_OP_COPY_INVERTED, + EQUIVALENT = VK_LOGIC_OP_EQUIVALENT, + INVERT = VK_LOGIC_OP_INVERT, + NAND = VK_LOGIC_OP_NAND, + NO_OP = VK_LOGIC_OP_NO_OP, + NOR = VK_LOGIC_OP_NOR, + OR = VK_LOGIC_OP_OR, + OR_INVERTED = VK_LOGIC_OP_OR_INVERTED, + OR_REVERSE = VK_LOGIC_OP_OR_REVERSE, + SET = VK_LOGIC_OP_SET, + XOR = VK_LOGIC_OP_XOR, + + UNKNOWN = VK_LOGIC_OP_MAX_ENUM + }; + + enum class MemoryBlockType + { + DERIVED, + DERIVED_WITH_CUSTOM_DELETE_PROC, + REGULAR, + REGULAR_WITH_MEMORY_TYPE + }; + + enum class MemoryFeatureFlagBits + { + /* NOTE: If more memory feature flags are added here, make sure to also update Anvil::Utils::get_vk_property_flags_from_memory_feature_flags() + * and Anvil::Utils::get_memory_feature_flags_from_vk_property_flags() + */ + + DEVICE_LOCAL_BIT = 1 << 0, + HOST_CACHED_BIT = 1 << 1, + HOST_COHERENT_BIT = 1 << 2, + LAZILY_ALLOCATED_BIT = 1 << 3, + MAPPABLE_BIT = 1 << 4, + MULTI_INSTANCE_BIT = 1 << 5, + + /* Core VK 1.1 only */ + PROTECTED_BIT = 1 << 6, + + NONE = 0 + }; + typedef Anvil::Bitfield MemoryFeatureFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(MemoryFeatureFlags, uint32_t, MemoryFeatureFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class MemoryHeapFlagBits + { + /* Core VK 1.0 */ + DEVICE_LOCAL_BIT = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, + + /* VK_KHR_device_group or core VK 1.1 */ + MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR, + + NONE = 0 + }; + typedef Anvil::Bitfield MemoryHeapFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(MemoryHeapFlags, VkMemoryHeapFlags, MemoryHeapFlagBits) + + /* NOTE: Maps 1:1 to VK equivalents */ + enum class MemoryOverallocationBehavior + { + ALLOWED = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, + DEFAULT = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, + DISALLOWED = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD, + + UNKNOWN = VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class MemoryPropertyFlagBits + { + /* Core VK 1.0 */ + DEVICE_LOCAL_BIT = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + HOST_CACHED_BIT = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, + HOST_COHERENT_BIT = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + HOST_VISIBLE_BIT = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, + LAZILY_ALLOCATED_BIT = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, + + /* Core VK 1.1 */ + PROTECTED_BIT = VK_MEMORY_PROPERTY_PROTECTED_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield MemoryPropertyFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(MemoryPropertyFlags, VkMemoryPropertyFlags, MemoryPropertyFlagBits) + + enum class MTSafety + { + INHERIT_FROM_PARENT_DEVICE, + ENABLED, + DISABLED + }; + + enum class ObjectType + { + /* NOTE: If new entries are added or existing entries are removed, make sure to + * update Anvil::ObjectTracker::get_object_type_name(). + */ + BUFFER = VK_OBJECT_TYPE_BUFFER, + BUFFER_VIEW = VK_OBJECT_TYPE_BUFFER_VIEW, + COMMAND_BUFFER = VK_OBJECT_TYPE_COMMAND_BUFFER, + COMMAND_POOL = VK_OBJECT_TYPE_COMMAND_POOL, + DEBUG_REPORT_CALLBACK = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, + DEBUG_UTILS_MESSENGER = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + DESCRIPTOR_POOL = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + DESCRIPTOR_SET = VK_OBJECT_TYPE_DESCRIPTOR_SET, + DESCRIPTOR_SET_LAYOUT = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + DESCRIPTOR_UPDATE_TEMPLATE = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + DEVICE = VK_OBJECT_TYPE_DEVICE, + EVENT = VK_OBJECT_TYPE_EVENT, + FENCE = VK_OBJECT_TYPE_FENCE, + FRAMEBUFFER = VK_OBJECT_TYPE_FRAMEBUFFER, + IMAGE = VK_OBJECT_TYPE_IMAGE, + IMAGE_VIEW = VK_OBJECT_TYPE_IMAGE_VIEW, + INSTANCE = VK_OBJECT_TYPE_INSTANCE, + PHYSICAL_DEVICE = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + PIPELINE = VK_OBJECT_TYPE_PIPELINE, + PIPELINE_CACHE = VK_OBJECT_TYPE_PIPELINE_CACHE, + PIPELINE_LAYOUT = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + QUERY_POOL = VK_OBJECT_TYPE_QUERY_POOL, + QUEUE = VK_OBJECT_TYPE_QUEUE, + RENDER_PASS = VK_OBJECT_TYPE_RENDER_PASS, + RENDERING_SURFACE = VK_OBJECT_TYPE_SURFACE_KHR, + SAMPLER = VK_OBJECT_TYPE_SAMPLER, + SAMPLER_YCBCR_CONVERSION = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR, + SEMAPHORE = VK_OBJECT_TYPE_SEMAPHORE, + SHADER_MODULE = VK_OBJECT_TYPE_SHADER_MODULE, + SWAPCHAIN = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + + /* Anvil-specific items */ + ANVIL_COMPUTE_PIPELINE_MANAGER = 2'000'000'000 + 1, // 2'000'000'000 is arbitrary and mustn't match any VK_OBJECT_TYPE enum values! + ANVIL_DESCRIPTOR_SET_GROUP, + ANVIL_DESCRIPTOR_SET_LAYOUT_MANAGER, + ANVIL_GLSL_SHADER_TO_SPIRV_GENERATOR, + ANVIL_GRAPHICS_PIPELINE_MANAGER, + ANVIL_MEMORY_BLOCK, + ANVIL_PIPELINE_LAYOUT_MANAGER, + + /* Always last */ + UNKNOWN + }; + + /** Defines, to what extent occlusion queries are going to be used. + * + * Only used for second-level command buffer recording policy declaration. + **/ + enum class OcclusionQuerySupportScope + { + /** Occlusion queries are not going to be used */ + NOT_REQUIRED, + + /** Non-precise occlusion queries may be used */ + REQUIRED_NONPRECISE, + + /** Pprecise occlusion queries may be used */ + REQUIRED_PRECISE, + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class PipelineBindPoint + { + /* Core VK 1.0 */ + COMPUTE = VK_PIPELINE_BIND_POINT_COMPUTE, + GRAPHICS = VK_PIPELINE_BIND_POINT_GRAPHICS, + + UNKNOWN = VK_PIPELINE_BIND_POINT_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class PipelineStageFlagBits + { + /* Core VK 1.0 */ + ALL_COMMANDS_BIT = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + ALL_GRAPHICS_BIT = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + BOTTOM_OF_PIPE_BIT = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + COLOR_ATTACHMENT_OUTPUT_BIT = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + COMPUTE_SHADER_BIT = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, + DRAW_INDIRECT_BIT = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, + EARLY_FRAGMENT_TESTS_BIT = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + FRAGMENT_SHADER_BIT = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, + GEOMETRY_SHADER_BIT = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, + HOST_BIT = VK_PIPELINE_STAGE_HOST_BIT, + LATE_FRAGMENT_TESTS_BIT = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + TESSELLATION_CONTROL_SHADER_BIT = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, + TESSELLATION_EVALUATION_SHADER_BIT = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, + TOP_OF_PIPE_BIT = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + TRANSFER_BIT = VK_PIPELINE_STAGE_TRANSFER_BIT, + VERTEX_INPUT_BIT = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + VERTEX_SHADER_BIT = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + + /* VK_EXT_transform_feedback */ + TRANSFORM_FEEDBACK_BIT_EXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, + + NONE = 0 + }; + typedef Anvil::Bitfield PipelineStageFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(PipelineStageFlags, VkPipelineStageFlags, PipelineStageFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class PointClippingBehavior + { + ALL_CLIP_PLANES = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR, + USER_CLIP_PLANES_ONLY = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR, + + UNKNOWN = VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class PipelineCreateFlagBits + { + /* Core VK 1.0 */ + ALLOW_DERIVATIVES_BIT = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, + DISABLE_OPTIMIZATION_BIT = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, + DERIVATIVE_BIT = VK_PIPELINE_CREATE_DERIVATIVE_BIT, + + /* VK_KHR_multiview */ + VIEW_INDEX_FROM_DEVICE_INDEX_BIT = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, + + /* VK_KHR_device_group */ + DISPATCH_BASE_BIT = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR, + + NONE = 0 + }; + typedef Anvil::Bitfield PipelineCreateFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(PipelineCreateFlags, VkPipelineCreateFlags, PipelineCreateFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class PolygonMode + { + FILL = VK_POLYGON_MODE_FILL, + LINE = VK_POLYGON_MODE_LINE, + POINT = VK_POLYGON_MODE_POINT, + + UNKNOWN = VK_POLYGON_MODE_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class PresentModeKHR + { + /* VK_KHR_surface */ + IMMEDIATE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR, + MAILBOX_KHR = VK_PRESENT_MODE_MAILBOX_KHR, + FIFO_KHR = VK_PRESENT_MODE_FIFO_KHR, + FIFO_RELAXED_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + + UNKNOWN = VK_PRESENT_MODE_MAX_ENUM_KHR + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class PrimitiveTopology + { + LINE_LIST = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, + LINE_LIST_WITH_ADJACENCY = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, + LINE_STRIP = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + LINE_STRIP_WITH_ADJACENCY = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, + PATCH_LIST = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, + POINT_LIST = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + TRIANGLE_FAN = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, + TRIANGLE_LIST = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, + TRIANGLE_LIST_WITH_ADJACENCY = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, + TRIANGLE_STRIP = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, + TRIANGLE_STRIP_WITH_ADJACENCY = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, + + UNKNOWN = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM + }; + + /** A bitmask defining one or more queue family usage.*/ + enum class QueueFamilyFlagBits + { + COMPUTE_BIT = 1 << 0, + DMA_BIT = 1 << 1, + GRAPHICS_BIT = 1 << 2, + + FIRST_BIT = COMPUTE_BIT, + LAST_BIT = GRAPHICS_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield QueueFamilyFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(QueueFamilyFlags, uint32_t, QueueFamilyFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class QueueGlobalPriority + { + HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, + LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, + MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, + REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT, + + UNKNOWN = VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM + }; + + /** Enumerates all available queue family types */ + enum class QueueFamilyType + { + /* Holds queues that support COMPUTE operations but do NOT support GRAPHICS operations. */ + COMPUTE, + + /* Holds queues that support TRANSFER operations and which have not been classified + * as COMPUTE or UNIVERSAL queue family members. */ + TRANSFER, + + /* Holds queues that support GRAPHICS operations and which have not been classified + * as COMPUTE queue family members. */ + UNIVERSAL, + + /* Always last */ + COUNT, + FIRST = COMPUTE, + UNDEFINED = COUNT + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class QueueFlagBits + { + /* Core VK 1.0 */ + COMPUTE_BIT = VK_QUEUE_COMPUTE_BIT, + GRAPHICS_BIT = VK_QUEUE_GRAPHICS_BIT, + SPARSE_BINDING_BIT = VK_QUEUE_SPARSE_BINDING_BIT, + TRANSFER_BIT = VK_QUEUE_TRANSFER_BIT, + + /* Core VK 1.1 */ + PROTECTED_BIT = VK_QUEUE_PROTECTED_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield QueueFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(QueueFlags, VkQueueFlags, QueueFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class QueryControlFlagBits + { + PRECISE_BIT = VK_QUERY_CONTROL_PRECISE_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield QueryControlFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(QueryControlFlags, VkQueryControlFlags, QueryControlFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class QueryPipelineStatisticFlagBits + { + CLIPPING_INVOCATIONS_BIT = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, + CLIPPING_PRIMITIVES_BIT = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, + COMPUTE_SHADER_INVOCATIONS_BIT = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT, + FRAGMENT_SHADER_INVOCATIONS_BIT = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, + GEOMETRY_SHADER_INVOCATIONS_BIT = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, + GEOMETRY_SHADER_PRIMITIVES_BIT = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, + INPUT_ASSEMBLY_VERTICES_BIT = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, + INPUT_ASSEMBLY_PRIMITIVES_BIT = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, + TESSELLATION_CONTROL_SHADER_PATCHES_BIT = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, + TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, + VERTEX_SHADER_INVOCATIONS_BIT = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, + + NONE = 0, + }; + typedef Anvil::Bitfield QueryPipelineStatisticFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(QueryPipelineStatisticFlags, VkQueryPipelineStatisticFlags, QueryPipelineStatisticFlagBits) + + enum class QueryResultFlagBits + { + _64_BIT = VK_QUERY_RESULT_64_BIT, + + /* Implementation should wait for each query's status to become available before retrieving its results + * + * Core VK 1.0 functionality + */ + WAIT_BIT = VK_QUERY_RESULT_WAIT_BIT, + + /* Each query result value is going to be followed by a status value. Non-zero values indicate result is + * available. + * + * Core VK 1.0 functionality + */ + WITH_AVAILABILITY_BIT = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, + + /* Indicates it is OK for the function to return result values for a sub-range of the requested query range. + * + * Core VK 1.0 frunctionality + */ + PARTIAL_BIT = VK_QUERY_RESULT_PARTIAL_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield QueryResultFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(QueryResultFlags, VkQueryResultFlags, QueryResultFlagBits) + + enum class RasterizationOrderAMD + { + RELAXED = VK_RASTERIZATION_ORDER_RELAXED_AMD, + STRICT = VK_RASTERIZATION_ORDER_STRICT_AMD, + + UNKNOWN = VK_RASTERIZATION_ORDER_MAX_ENUM_AMD + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class ResolveModeFlagBits + { + /* VK_KHR_depth_stencil_resolve */ + + SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR, + AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT_KHR, + MIN_BIT_KHR = VK_RESOLVE_MODE_MIN_BIT_KHR, + MAX_BIT_KHR = VK_RESOLVE_MODE_MAX_BIT_KHR, + + NONE = VK_RESOLVE_MODE_NONE_KHR, + }; + typedef Anvil::Bitfield ResolveModeFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(ResolveModeFlags, VkResolveModeFlagsKHR, ResolveModeFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class SampleCountFlagBits + { + _1_BIT = VK_SAMPLE_COUNT_1_BIT, + _2_BIT = VK_SAMPLE_COUNT_2_BIT, + _4_BIT = VK_SAMPLE_COUNT_4_BIT, + _8_BIT = VK_SAMPLE_COUNT_8_BIT, + _16_BIT = VK_SAMPLE_COUNT_16_BIT, + _32_BIT = VK_SAMPLE_COUNT_32_BIT, + _64_BIT = VK_SAMPLE_COUNT_64_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield SampleCountFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(SampleCountFlags, VkSampleCountFlags, SampleCountFlagBits) + + /* NOTE: These map 1:1 to VK equivalents */ + enum class SamplerAddressMode + { + /* Core VK 1.0 */ + CLAMP_TO_BORDER = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + CLAMP_TO_EDGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, + MIRRORED_REPEAT = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, + REPEAT = VK_SAMPLER_ADDRESS_MODE_REPEAT, + + /* VK_KHR_sampler_mirror_clamp_to_edge */ + MIRROR_CLAMP_TO_EDGE = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + + UNKNOWN = VK_SAMPLER_ADDRESS_MODE_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class SamplerMipmapMode + { + LINEAR = VK_SAMPLER_MIPMAP_MODE_LINEAR, + NEAREST = VK_SAMPLER_MIPMAP_MODE_NEAREST, + + UNKNOWN = VK_SAMPLER_MIPMAP_MODE_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class SamplerReductionMode + { + /* VK_EXT_sampler_filter_minmax */ + WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, + MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT, + MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN_EXT, + + UNKNOWN = VK_SAMPLER_REDUCTION_MODE_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class SamplerYCbCrModelConversion + { + RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR, + YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR, + YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR, + YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR, + YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR, + + UNKNOWN = VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class SamplerYCbCrRange + { + RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR, + YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR, + YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR, + YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR, + YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR, + + UNKNOWN = VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM + }; + + /* Specifies one of the compute / rendering pipeline stages. */ + enum class ShaderStage + { + FIRST, + + COMPUTE = FIRST, + FRAGMENT, + GEOMETRY, + TESSELLATION_CONTROL, + TESSELLATION_EVALUATION, + VERTEX, + + COUNT, + UNKNOWN = COUNT + }; + + /* NOTE: Maps 1:1 to VK equivalents */ + enum class ShaderStageFlagBits + { + COMPUTE_BIT = VK_SHADER_STAGE_COMPUTE_BIT, + FRAGMENT_BIT = VK_SHADER_STAGE_FRAGMENT_BIT, + GEOMETRY_BIT = VK_SHADER_STAGE_GEOMETRY_BIT, + TESSELLATION_CONTROL_BIT = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + TESSELLATION_EVALUATION_BIT = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + VERTEX_BIT = VK_SHADER_STAGE_VERTEX_BIT, + + ALL = VK_SHADER_STAGE_ALL, + ALL_GRAPHICS = VK_SHADER_STAGE_ALL_GRAPHICS, + + NONE = 0 + }; + typedef Anvil::Bitfield ShaderStageFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(ShaderStageFlags, VkShaderStageFlags, ShaderStageFlagBits) + + /* Specifies the type of query for post-compile information about pipeline shaders */ + enum class ShaderInfoType + { + FIRST, + + BINARY = FIRST, + DISASSEMBLY, + + COUNT, + UNKNOWN = COUNT + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class SharingMode + { + CONCURRENT = VK_SHARING_MODE_CONCURRENT, + EXCLUSIVE = VK_SHARING_MODE_EXCLUSIVE, + + UNKNOWN + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class SparseImageFormatFlagBits + { + /* Core VK 1.0 */ + ALIGNED_MIP_SIZE_BIT = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, + NONNSTANDARD_BLOCK_SIZE_BIT = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT, + SINGLE_MIPTAIL_BIT = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield SparseImageFormatFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(SparseImageFormatFlags, VkSparseImageFormatFlags, SparseImageFormatFlagBits) + + enum class SparseMemoryBindFlagBits + { + /* Core VK 1.0 */ + BIND_METADATA_BIT = VK_SPARSE_MEMORY_BIND_METADATA_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield SparseMemoryBindFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(SparseMemoryBindFlags, VkSparseMemoryBindFlags, SparseMemoryBindFlagBits) + + /* Specifies SPIR-V language version */ + enum class SpvVersion + { + _1_0, + _1_1, + _1_2, + _1_3, + _1_4, + UNKNOWN + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class StencilFaceFlagBits + { + BACK_BIT = VK_STENCIL_FACE_BACK_BIT, + FRONT_BIT = VK_STENCIL_FACE_FRONT_BIT, + + NONE = 0 + }; + typedef Anvil::Bitfield StencilFaceFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(StencilFaceFlags, VkStencilFaceFlags, StencilFaceFlagBits) + + enum class StencilOp + { + DECREMENT_AND_CLAMP = VK_STENCIL_OP_DECREMENT_AND_CLAMP, + DECREMENT_AND_WRAP = VK_STENCIL_OP_DECREMENT_AND_WRAP, + INCREMENT_AND_CLAMP = VK_STENCIL_OP_INCREMENT_AND_CLAMP, + INCREMENT_AND_WRAP = VK_STENCIL_OP_INCREMENT_AND_WRAP, + INVERT = VK_STENCIL_OP_INVERT, + KEEP = VK_STENCIL_OP_KEEP, + REPLACE = VK_STENCIL_OP_REPLACE, + ZERO = VK_STENCIL_OP_ZERO, + + UNKNOWN = VK_STENCIL_OP_MAX_ENUM + }; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class SubgroupFeatureFlagBits + { + ARITHMETIC_BIT = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + BALLOT_BIT = VK_SUBGROUP_FEATURE_BALLOT_BIT, + BASIC_BIT = VK_SUBGROUP_FEATURE_BASIC_BIT, + CLUSTERED_BIT = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + QUAD_BIT = VK_SUBGROUP_FEATURE_QUAD_BIT, + RELATIVE_BIT = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, + SHUFFLE_BIT = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + VOTE_BIT = VK_SUBGROUP_FEATURE_VOTE_BIT, + + NONE = VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM + }; + + typedef Anvil::Bitfield SubgroupFeatureFlags; + + /* NOTE: These map 1:1 to VK equivalents */ + enum class SubpassContents + { + INLINE = VK_SUBPASS_CONTENTS_INLINE, + SECONDARY_COMMAND_BUFFERS = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, + + UNKNOWN = VK_SUBPASS_CONTENTS_MAX_ENUM + }; + + enum class SurfaceTransformFlagBits + { + HORIZONTAL_MIRROR_BIT_KHR = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, + HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, + HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, + HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, + IDENTITY_BIT_KHR = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, + INHERIT_BIT_KHR = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR, + ROTATE_180_BIT_KHR = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, + ROTATE_270_BIT_KHR = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, + ROTATE_90_BIT_KHR = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, + + NONE = 0 + }; + typedef Anvil::Bitfield SurfaceTransformFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(SurfaceTransformFlags, VkSurfaceTransformFlagsKHR, SurfaceTransformFlagBits) + + /* NOTE: Enums map 1:1 to their VK equivalents */ + enum class SwapchainCreateFlagBits + { + /* Requires VK_KHR_device_group */ + SPLIT_INSTANCE_BIND_REGIONS_BIT = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + + /* Requires VK_KHR_swapchain_mutable_format */ + CREATE_MUTABLE_FORMAT_BIT = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR, + + NONE = 0 + }; + typedef Anvil::Bitfield SwapchainCreateFlags; + + INJECT_BITFIELD_HELPER_FUNC_PROTOTYPES(SwapchainCreateFlags, VkSwapchainCreateFlagsKHR, SwapchainCreateFlagBits) + + /* Note: These map 1:1 to corresponding VK error & VK swapchain error codes. */ + enum class SwapchainOperationErrorCode + { + DEVICE_LOST = VK_ERROR_DEVICE_LOST, + OUT_OF_DATE = VK_ERROR_OUT_OF_DATE_KHR, + SUBOPTIMAL = VK_SUBOPTIMAL_KHR, + SUCCESS = VK_SUCCESS, + SURFACE_LOST = VK_ERROR_SURFACE_LOST_KHR + }; + + /* NOTE: Enums map 1:1 to their VK equivalents */ + enum class TessellationDomainOrigin + { + LOWER_LEFT = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR, + UPPER_LEFT = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR, + + UNKNOWN = VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM + }; + + enum class VertexInputRate + { + INSTANCE = VK_VERTEX_INPUT_RATE_INSTANCE, + VERTEX = VK_VERTEX_INPUT_RATE_VERTEX, + + UNKNOWN = VK_VERTEX_INPUT_RATE_MAX_ENUM + }; +}; /* namespace Anvil */ + +#endif /* TYPES_ENUMS_H */ diff --git a/include/misc/types_macro.h b/include/misc/types_macro.h new file mode 100644 index 00000000..c195d1b3 --- /dev/null +++ b/include/misc/types_macro.h @@ -0,0 +1,33 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef TYPES_MACRO_H +#define TYPES_MACRO_H + +/* Helper macros */ +#define ANVIL_DISABLE_ASSIGNMENT_OPERATOR(x) private: x& operator=(const x&); +#define ANVIL_DISABLE_COPY_CONSTRUCTOR(x) private: x(const x&); +#define ANVIL_REDUNDANT_ARGUMENT(x) x = x; +#define ANVIL_REDUNDANT_ARGUMENT_CONST(x) x; +#define ANVIL_REDUNDANT_VARIABLE(x) x = x; +#define ANVIL_REDUNDANT_VARIABLE_CONST(x) x; + +#endif /* TYPES_MACRO_H */ \ No newline at end of file diff --git a/include/misc/types_struct.h b/include/misc/types_struct.h new file mode 100644 index 00000000..8ce7c095 --- /dev/null +++ b/include/misc/types_struct.h @@ -0,0 +1,3455 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef TYPES_STRUCT_H +#define TYPES_STRUCT_H + +#ifdef major + #undef major +#endif + +#ifdef minor + #undef minor +#endif + +namespace Anvil +{ + template + struct EnumClassHasher + { + std::size_t operator()(const EnumClass& in_value) const + { + return static_cast(static_cast(in_value) ); + } + }; + + /* Note: Matches VkSampleLocationEXT in terms of the layout and size. + */ + typedef struct SampleLocation + { + float x; + float y; + + SampleLocation() + :x(0.0f), + y(0.0f) + { + /* Stub */ + } + + SampleLocation(const float& in_x, + const float& in_y) + :x(in_x), + y(in_y) + { + /* Stub */ + } + } SampleLocation; + + static_assert(sizeof(SampleLocation) == sizeof(VkSampleLocationEXT), "Struct sizes must match"); + static_assert(offsetof(SampleLocation, x) == offsetof(VkSampleLocationEXT, x), "Member offsets much match"); + static_assert(offsetof(SampleLocation, y) == offsetof(VkSampleLocationEXT, y), "Member offsets much match"); + + typedef struct SampleLocationsInfo + { + Anvil::SampleCountFlagBits sample_locations_per_pixel; + VkExtent2D sample_location_grid_size; + std::vector sample_locations; + + SampleLocationsInfo() + { + sample_locations_per_pixel = Anvil::SampleCountFlagBits::NONE; + sample_location_grid_size.height = 0; + sample_location_grid_size.width = 0; + } + + VkSampleLocationsInfoEXT get_vk() const + { + VkSampleLocationsInfoEXT result; + + result.pNext = nullptr; + result.pSampleLocations = (sample_locations.size() != 0) ? reinterpret_cast(&sample_locations.at(0) ) + : nullptr; + result.sampleLocationGridSize = sample_location_grid_size; + result.sampleLocationsCount = static_cast (sample_locations.size() ); + result.sampleLocationsPerPixel = static_cast(sample_locations_per_pixel); + result.sType = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT; + + return result; + } + + } SampleLocationsInfo; + + typedef struct AttachmentSampleLocations + { + uint32_t n_attachment; + Anvil::SampleLocationsInfo sample_locations_info; + + AttachmentSampleLocations() + { + n_attachment = UINT32_MAX; + } + + VkAttachmentSampleLocationsEXT get_vk() const + { + VkAttachmentSampleLocationsEXT result; + + result.attachmentIndex = n_attachment; + result.sampleLocationsInfo = sample_locations_info.get_vk(); + + return result; + } + } AttachmentSampleLocations; + + typedef struct SubpassSampleLocations + { + uint32_t n_subpass; + Anvil::SampleLocationsInfo sample_locations_info; + + SubpassSampleLocations() + { + n_subpass = UINT32_MAX; + } + + VkSubpassSampleLocationsEXT get_vk() const + { + VkSubpassSampleLocationsEXT result; + + result.subpassIndex = n_subpass; + result.sampleLocationsInfo = sample_locations_info.get_vk(); + + return result; + } + } SubpassSampleLocations; + + /* NOTE: Maps 1:1 to VkImageSubresource */ + typedef struct SurfaceCapabilities + { + uint32_t min_image_count; + uint32_t max_image_count; + VkExtent2D current_extent; + VkExtent2D min_image_extent; + VkExtent2D max_image_extent; + uint32_t max_image_array_layers; + Anvil::SurfaceTransformFlags supported_transforms; + Anvil::SurfaceTransformFlagBits current_transform; + Anvil::CompositeAlphaFlags supported_composite_alpha; + Anvil::ImageUsageFlags supported_usage_flags; + + SurfaceCapabilities() + { + current_extent.height = 0; + current_extent.width = 0; + max_image_array_layers = 0; + max_image_count = 0; + max_image_extent.height = 0; + max_image_extent.width = 0; + min_image_count = 0; + min_image_extent.height = 0; + min_image_extent.width = 0; + supported_transforms = Anvil::SurfaceTransformFlagBits::NONE; + current_transform = Anvil::SurfaceTransformFlagBits::NONE; + supported_composite_alpha = Anvil::CompositeAlphaFlagBits::NONE; + supported_usage_flags = Anvil::ImageUsageFlagBits::NONE; + } + } SurfaceCapabilities; + + static_assert(sizeof(SurfaceCapabilities) == sizeof(VkSurfaceCapabilitiesKHR), "Struct sizes much match"); + static_assert(offsetof(SurfaceCapabilities, min_image_count) == offsetof(VkSurfaceCapabilitiesKHR, minImageCount), "Member offsets must match"); + static_assert(offsetof(SurfaceCapabilities, max_image_count) == offsetof(VkSurfaceCapabilitiesKHR, maxImageCount), "Member offsets must match"); + static_assert(offsetof(SurfaceCapabilities, current_extent) == offsetof(VkSurfaceCapabilitiesKHR, currentExtent), "Member offsets must match"); + static_assert(offsetof(SurfaceCapabilities, min_image_extent) == offsetof(VkSurfaceCapabilitiesKHR, minImageExtent), "Member offsets must match"); + static_assert(offsetof(SurfaceCapabilities, max_image_extent) == offsetof(VkSurfaceCapabilitiesKHR, maxImageExtent), "Member offsets must match"); + static_assert(offsetof(SurfaceCapabilities, max_image_array_layers) == offsetof(VkSurfaceCapabilitiesKHR, maxImageArrayLayers), "Member offsets must match"); + static_assert(offsetof(SurfaceCapabilities, supported_transforms) == offsetof(VkSurfaceCapabilitiesKHR, supportedTransforms), "Member offsets must match"); + static_assert(offsetof(SurfaceCapabilities, current_transform) == offsetof(VkSurfaceCapabilitiesKHR, currentTransform), "Member offsets must match"); + static_assert(offsetof(SurfaceCapabilities, supported_composite_alpha) == offsetof(VkSurfaceCapabilitiesKHR, supportedCompositeAlpha), "Member offsets must match"); + static_assert(offsetof(SurfaceCapabilities, supported_usage_flags) == offsetof(VkSurfaceCapabilitiesKHR, supportedUsageFlags), "Member offsets must match"); + + /* NOTE: Maps 1:1 to VkImageSubresource */ + typedef struct ImageSubresource + { + Anvil::ImageAspectFlags aspect_mask; + uint32_t mip_level; + uint32_t array_layer; + + VkImageSubresource get_vk() const + { + VkImageSubresource result; + + result.arrayLayer = array_layer; + result.aspectMask = aspect_mask.get_vk(); + result.mipLevel = mip_level; + + return result; + } + + ImageSubresource() + :array_layer(UINT32_MAX), + mip_level (UINT32_MAX) + { + /* Stub */ + } + } ImageSubresource; + + static_assert(sizeof(ImageSubresource) == sizeof(VkImageSubresource), "Struct sizes much match"); + static_assert(offsetof(ImageSubresource, aspect_mask) == offsetof(VkImageSubresource, aspectMask), "Member offsets must match"); + static_assert(offsetof(ImageSubresource, mip_level) == offsetof(VkImageSubresource, mipLevel), "Member offsets must match"); + static_assert(offsetof(ImageSubresource, array_layer) == offsetof(VkImageSubresource, arrayLayer), "Member offsets must match"); + + /* NOTE: Maps 1:1 to VkImageSubresourceRange */ + typedef struct ImageSubresourceRange + { + Anvil::ImageAspectFlags aspect_mask; + uint32_t base_mip_level; + uint32_t level_count; + uint32_t base_array_layer; + uint32_t layer_count; + + VkImageSubresourceRange get_vk() const + { + VkImageSubresourceRange result; + + result.aspectMask = aspect_mask.get_vk(); + result.baseMipLevel = base_mip_level; + result.levelCount = level_count; + result.baseArrayLayer = base_array_layer; + result.layerCount = layer_count; + + return result; + } + + bool operator==(const ImageSubresourceRange&) const; + } ImageSubresourceRange; + + static_assert(sizeof(ImageSubresourceRange) == sizeof(VkImageSubresourceRange), "Struct sizes much match"); + static_assert(offsetof(ImageSubresourceRange, aspect_mask) == offsetof(VkImageSubresourceRange, aspectMask), "Member offsets must match"); + static_assert(offsetof(ImageSubresourceRange, base_mip_level) == offsetof(VkImageSubresourceRange, baseMipLevel), "Member offsets must match"); + static_assert(offsetof(ImageSubresourceRange, level_count) == offsetof(VkImageSubresourceRange, levelCount), "Member offsets must match"); + static_assert(offsetof(ImageSubresourceRange, base_array_layer) == offsetof(VkImageSubresourceRange, baseArrayLayer), "Member offsets must match"); + static_assert(offsetof(ImageSubresourceRange, layer_count) == offsetof(VkImageSubresourceRange, layerCount), "Member offsets must match"); + + /* NOTE: Maps 1:1 to VkImageSubresourceLayers */ + typedef struct + { + Anvil::ImageAspectFlags aspect_mask; + uint32_t mip_level; + uint32_t base_array_layer; + uint32_t layer_count; + } ImageSubresourceLayers; + + static_assert(sizeof(ImageSubresourceLayers) == sizeof(VkImageSubresourceLayers), "Struct sizes much match"); + static_assert(offsetof(ImageSubresourceLayers, aspect_mask) == offsetof(VkImageSubresourceLayers, aspectMask), "Member offsets must match"); + static_assert(offsetof(ImageSubresourceLayers, mip_level) == offsetof(VkImageSubresourceLayers, mipLevel), "Member offsets must match"); + static_assert(offsetof(ImageSubresourceLayers, base_array_layer) == offsetof(VkImageSubresourceLayers, baseArrayLayer), "Member offsets must match"); + static_assert(offsetof(ImageSubresourceLayers, layer_count) == offsetof(VkImageSubresourceLayers, layerCount), "Member offsets must match"); + + /* NOTE: Maps 1:1 to VkSubresourceLayout */ + typedef struct + { + VkDeviceSize offset; + VkDeviceSize size; + VkDeviceSize row_pitch; + VkDeviceSize array_pitch; + VkDeviceSize depth_pitch; + } SubresourceLayout; + + static_assert(sizeof(SubresourceLayout) == sizeof(VkSubresourceLayout), "Struct sizes much match"); + static_assert(offsetof(SubresourceLayout, offset) == offsetof(VkSubresourceLayout, offset), "Member offsets must match"); + static_assert(offsetof(SubresourceLayout, size) == offsetof(VkSubresourceLayout, size), "Member offsets must match"); + static_assert(offsetof(SubresourceLayout, row_pitch) == offsetof(VkSubresourceLayout, rowPitch), "Member offsets must match"); + static_assert(offsetof(SubresourceLayout, array_pitch) == offsetof(VkSubresourceLayout, arrayPitch), "Member offsets must match"); + static_assert(offsetof(SubresourceLayout, depth_pitch) == offsetof(VkSubresourceLayout, depthPitch), "Member offsets must match"); + + typedef struct + { + Anvil::ImageSubresourceLayers src_subresource; + VkOffset3D src_offsets[2]; + Anvil::ImageSubresourceLayers dst_subresource; + VkOffset3D dst_offsets[2]; + } ImageBlit; + + static_assert(sizeof(ImageBlit) == sizeof(VkImageBlit), "Struct sizes much match"); + static_assert(offsetof(ImageBlit, src_subresource) == offsetof(VkImageBlit, srcSubresource), "Member offsets must match"); + static_assert(offsetof(ImageBlit, src_offsets) == offsetof(VkImageBlit, srcOffsets), "Member offsets must match"); + static_assert(offsetof(ImageBlit, dst_subresource) == offsetof(VkImageBlit, dstSubresource), "Member offsets must match"); + static_assert(offsetof(ImageBlit, dst_offsets) == offsetof(VkImageBlit, dstOffsets), "Member offsets must match"); + + /* NOTE: Maps 1:1 to VkBufferCopy */ + typedef struct + { + VkDeviceSize src_offset; + VkDeviceSize dst_offset; + VkDeviceSize size; + } BufferCopy; + + static_assert(sizeof(BufferCopy) == sizeof(VkBufferCopy), "Struct sizes much match"); + static_assert(offsetof(BufferCopy, src_offset) == offsetof(VkBufferCopy, srcOffset), "Member offsets must match"); + static_assert(offsetof(BufferCopy, dst_offset) == offsetof(VkBufferCopy, dstOffset), "Member offsets must match"); + static_assert(offsetof(BufferCopy, size) == offsetof(VkBufferCopy, size), "Member offsets must match"); + + /* NOTE: Maps 1:1 to VkBufferImageCopy */ + typedef struct + { + VkDeviceSize buffer_offset; + uint32_t buffer_row_length; + uint32_t buffer_image_height; + Anvil::ImageSubresourceLayers image_subresource; + VkOffset3D image_offset; + VkExtent3D image_extent; + } BufferImageCopy; + + static_assert(sizeof(BufferImageCopy) == sizeof(VkBufferImageCopy), "Struct sizes much match"); + static_assert(offsetof(BufferImageCopy, buffer_offset) == offsetof(VkBufferImageCopy, bufferOffset), "Member offsets must match"); + static_assert(offsetof(BufferImageCopy, buffer_row_length) == offsetof(VkBufferImageCopy, bufferRowLength), "Member offsets must match"); + static_assert(offsetof(BufferImageCopy, buffer_image_height) == offsetof(VkBufferImageCopy, bufferImageHeight), "Member offsets must match"); + static_assert(offsetof(BufferImageCopy, image_subresource) == offsetof(VkBufferImageCopy, imageSubresource), "Member offsets must match"); + static_assert(offsetof(BufferImageCopy, image_offset) == offsetof(VkBufferImageCopy, imageOffset), "Member offsets must match"); + static_assert(offsetof(BufferImageCopy, image_extent) == offsetof(VkBufferImageCopy, imageExtent), "Member offsets must match"); + + /* NOTE: Maps 1:1 to VkClearAttachment */ + typedef struct + { + Anvil::ImageAspectFlags aspect_mask; + uint32_t color_attachment; + VkClearValue clear_value; + } ClearAttachment; + + static_assert(sizeof(ClearAttachment) == sizeof(VkClearAttachment), "Struct sizes much match"); + static_assert(offsetof(ClearAttachment, aspect_mask) == offsetof(VkClearAttachment, aspectMask), "Member offsets must match"); + static_assert(offsetof(ClearAttachment, color_attachment) == offsetof(VkClearAttachment, colorAttachment), "Member offsets must match"); + static_assert(offsetof(ClearAttachment, clear_value) == offsetof(VkClearAttachment, clearValue), "Member offsets must match"); + + /* NOTE: Maps 1:1 to VkImageCopy */ + typedef struct + { + Anvil::ImageSubresourceLayers src_subresource; + VkOffset3D src_offset; + Anvil::ImageSubresourceLayers dst_subresource; + VkOffset3D dst_offset; + VkExtent3D extent; + } ImageCopy; + + typedef struct ExternalFenceProperties + { + Anvil::ExternalFenceHandleTypeFlags compatible_external_handle_types; + Anvil::ExternalFenceHandleTypeFlags export_from_imported_external_handle_types; + + bool is_exportable; + bool is_importable; + + /* Dummy constructor */ + ExternalFenceProperties(); + + ExternalFenceProperties(const VkExternalFencePropertiesKHR& in_external_memory_props); + } ExternalFenceProperties; + + typedef struct ExternalMemoryProperties + { + Anvil::ExternalMemoryHandleTypeFlags compatible_external_handle_types; + Anvil::ExternalMemoryHandleTypeFlags export_from_imported_external_handle_types; + + bool is_exportable; + bool is_importable; + + /* Dummy constructor */ + ExternalMemoryProperties(); + + ExternalMemoryProperties(const VkExternalMemoryPropertiesKHR& in_external_memory_props); + } ExternalMemoryProperties; + + typedef struct ExternalSemaphoreProperties + { + Anvil::ExternalSemaphoreHandleTypeFlags compatible_external_handle_types; + Anvil::ExternalSemaphoreHandleTypeFlags export_from_imported_external_handle_types; + + bool is_exportable; + bool is_importable; + + /* Dummy constructor */ + ExternalSemaphoreProperties(); + + ExternalSemaphoreProperties(const VkExternalSemaphorePropertiesKHR& in_external_memory_props); + } ExternalSemaphoreProperties; + + /* Holds shader core properties pertaining to a physical device */ + typedef struct AMDShaderCoreProperties + { + uint32_t shader_engine_count; ///< Number of shader engines. + uint32_t shader_arrays_per_engine_count; ///< Number of shader arrays. + uint32_t compute_units_per_shader_array; ///< Number of CUs per shader array. + uint32_t simd_per_compute_unit; ///< Number of SIMDs per compute unit. + uint32_t wavefronts_per_simd; ///< Number of wavefront slots in each SIMD. + uint32_t wavefront_size; ///< Wavefront size. + uint32_t sgprs_per_simd; ///< Number of physical SGPRs per SIMD. + uint32_t min_sgpr_allocation; ///< Minimum number of SGPRs that can be allocated by a wave. + uint32_t max_sgpr_allocation; ///< Number of available SGPRs. + uint32_t sgpr_allocation_granularity; ///< SGPRs are allocated in groups of this size. Meaning, if your shader + /// only uses 1 SGPR, you will still end up reserving this number of + /// SGPRs. + uint32_t vgprs_per_simd; ///< Number of physical VGPRs per SIMD. + uint32_t min_vgpr_allocation; ///< Minimum number of VGPRs that can be allocated by a wave. + uint32_t max_vgpr_allocation; ///< Number of available VGPRs. + uint32_t vgpr_allocation_granularity; ///< VGPRs are allocated in groups of this size. Meaning, if your shader + /// only uses 1 VGPR, you will still end up reserving this number of + /// VGPRs. + + AMDShaderCoreProperties(); + AMDShaderCoreProperties(const VkPhysicalDeviceShaderCorePropertiesAMD& in_props); + + bool operator==(const AMDShaderCoreProperties& in_props) const; + } AMDShaderCoreProperties; + + /** Describes a buffer memory barrier. */ + typedef struct BufferBarrier + { + Anvil::AccessFlags dst_access_mask; + Anvil::AccessFlags src_access_mask; + + VkBuffer buffer; + Anvil::Buffer* buffer_ptr; + uint32_t dst_queue_family_index; + VkDeviceSize offset; + VkDeviceSize size; + uint32_t src_queue_family_index; + + /** Constructor. + * + * Note that @param in_buffer_ptr is retained by this function. + * + * @param in_source_access_mask Source access mask to use for the barrier. + * @param in_destination_access_mask Destination access mask to use for the barrier. + * @param in_src_queue_family_index Source queue family index to use for the barrier. + * @param in_dst_queue_family_index Destination queue family index to use for the barrier. + * @param in_buffer_ptr Pointer to a Buffer instance the instantiated barrier + * refers to. Must not be nullptr. + * @param in_offset Start offset of the region described by the barrier. + * @param in_size Size of the region described by the barrier. + **/ + explicit BufferBarrier(Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + uint32_t in_src_queue_family_index, + uint32_t in_dst_queue_family_index, + Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + VkDeviceSize in_size); + + /** Destructor. + * + * Releases the encapsulated Buffer instance. + **/ + virtual ~BufferBarrier(); + + /** Copy constructor. + * + * Retains the Buffer instance stored in the input barrier. + * + * @param in Barrier instance to copy data from. + **/ + BufferBarrier(const BufferBarrier& in); + + /** Returns a Vulkan buffer memory barrier descriptor, whose configuration corresponds to + * to the configuration of this descriptor. + **/ + virtual VkBufferMemoryBarrier get_barrier_vk() const; + + bool operator==(const BufferBarrier&) const; + private: + BufferBarrier& operator=(const BufferBarrier&); + } BufferBarrier; + + typedef struct BufferProperties + { + ExternalMemoryProperties external_handle_properties; + + BufferProperties(); + BufferProperties(const ExternalMemoryProperties& in_external_handle_properties); + + BufferProperties (const BufferProperties&) = default; + BufferProperties& operator=(const BufferProperties&) = default; + } BufferProperties; + + typedef struct BufferPropertiesQuery + { + const Anvil::BufferCreateFlags create_flags; + const Anvil::ExternalMemoryHandleTypeFlags external_memory_handle_type; + const Anvil::BufferUsageFlags usage_flags; + + explicit BufferPropertiesQuery(const Anvil::BufferCreateFlags in_create_flags, + const Anvil::ExternalMemoryHandleTypeFlags& in_external_memory_handle_type, + const Anvil::BufferUsageFlags& in_usage_flags) + :create_flags (in_create_flags), + external_memory_handle_type(in_external_memory_handle_type), + usage_flags (in_usage_flags) + { + /* Stub */ + } + + BufferPropertiesQuery (const BufferPropertiesQuery& in_query) = default; + BufferPropertiesQuery& operator=(const BufferPropertiesQuery& in_query) = delete; + } BufferPropertiesQuery; + + /* Used by Buffer::set_nonsparse_memory_multi(). Requires VK_KHR_device_group support. */ + typedef struct BufferMemoryBindingUpdate + { + Anvil::Buffer* buffer_ptr; + bool memory_block_owned_by_buffer; + Anvil::MemoryBlock* memory_block_ptr; + + /* May either be empty (for sGPU and mGPU devices) or: + * + * 1) hold up as many physical devices as there are assigned to the device + * group (mGPU devices) + * 2) hold the physical device, from which the logical device has been created + * (sGPU device) + */ + std::vector physical_devices; + + BufferMemoryBindingUpdate(); + } BufferMemoryBindingUpdate; + + /** TODO */ + typedef struct CommandBufferMGPUSubmission + { + /* Command buffer to execute. May be nullptr. */ + Anvil::CommandBufferBase* cmd_buffer_ptr; + + /* Bit mask determining which devices in the device group will execute the command buffer. */ + uint32_t device_mask; + + /* Default dummy constructor */ + CommandBufferMGPUSubmission() + { + cmd_buffer_ptr = nullptr; + device_mask = 0; + } + + CommandBufferMGPUSubmission(Anvil::CommandBufferBase* in_cmd_buffer_ptr, + const uint32_t& in_device_mask) + :cmd_buffer_ptr(in_cmd_buffer_ptr), + device_mask (in_device_mask) + { + /* Stub */ + } + + } CommandBufferMGPUSubmission; + + /* NOTE: Matches VK equivalent */ + struct ComponentMapping + { + Anvil::ComponentSwizzle r; + Anvil::ComponentSwizzle g; + Anvil::ComponentSwizzle b; + Anvil::ComponentSwizzle a; + + explicit ComponentMapping(const Anvil::ComponentSwizzle& in_r, + const Anvil::ComponentSwizzle& in_g, + const Anvil::ComponentSwizzle& in_b, + const Anvil::ComponentSwizzle& in_a) + :r(in_r), + g(in_g), + b(in_b), + a(in_a) + { + /* Stub */ + } + + const VkComponentMapping& get_vk() const + { + return *reinterpret_cast(this); + } + }; + + static_assert(sizeof(VkComponentMapping) == sizeof(ComponentMapping), "Anvil/VK struct size mismatch"); + static_assert(offsetof(VkComponentMapping, r) == offsetof(ComponentMapping, r), "Anvil/VK member offset mismatch"); + static_assert(offsetof(VkComponentMapping, g) == offsetof(ComponentMapping, g), "Anvil/VK member offset mismatch"); + static_assert(offsetof(VkComponentMapping, b) == offsetof(ComponentMapping, b), "Anvil/VK member offset mismatch"); + static_assert(offsetof(VkComponentMapping, a) == offsetof(ComponentMapping, a), "Anvil/VK member offset mismatch"); + + struct ConformanceVersionKHR + { + uint8_t major; + uint8_t minor; + uint8_t patch; + uint8_t subminor; + + ConformanceVersionKHR() + :major (0), + minor (0), + patch (0), + subminor(0) + { + /* Stub */ + } + + ConformanceVersionKHR(const VkConformanceVersionKHR& in_version) + :major (in_version.major), + minor (in_version.minor), + patch (in_version.patch), + subminor(in_version.subminor) + { + /* Stub */ + } + + bool operator==(const ConformanceVersionKHR& in_version) const + { + return (major == in_version.major) && + (minor == in_version.minor) && + (patch == in_version.patch) && + (subminor == in_version.subminor); + } + }; + + /* Holds contents of VkDebugUtilsLabelEXT minus pNext and sType */ + struct DebugLabel + { + float color[4]; + const char* name_ptr; + + DebugLabel() + { + color[0] = 0.0f; + color[1] = 0.0f; + color[2] = 0.0f; + color[3] = 0.0f; + name_ptr = nullptr; + } + + DebugLabel(const VkDebugUtilsLabelEXT& in_label_vk) + { + color[0] = in_label_vk.color[0]; + color[1] = in_label_vk.color[1]; + color[2] = in_label_vk.color[2]; + color[3] = in_label_vk.color[3]; + name_ptr = in_label_vk.pLabelName; + } + }; + + /* Holds contents of VkDebugUtilsObjectNameInfoEXT minus pNext and sType */ + struct DebugObjectNameInfo + { + uint64_t object_handle; + const char* object_name_ptr; + Anvil::ObjectType object_type; + + DebugObjectNameInfo() + { + object_handle = 0; + object_name_ptr = nullptr; + object_type = Anvil::ObjectType::UNKNOWN; + } + + DebugObjectNameInfo(const uint64_t& in_object_handle, + const char* in_object_name_ptr, + const Anvil::ObjectType& in_object_type) + :object_handle (in_object_handle), + object_name_ptr(in_object_name_ptr), + object_type (in_object_type) + { + /* Stub */ + } + + DebugObjectNameInfo(const VkDebugUtilsObjectNameInfoEXT& in_name_info_vk); + }; + + #if defined(_WIN32) + typedef struct ExternalNTHandleInfo + { + DWORD access; + const SECURITY_ATTRIBUTES* attributes_ptr; + std::wstring name; + + ExternalNTHandleInfo() + { + access = 0; + attributes_ptr = nullptr; + } + } ExternalNTHandleInfo; + #endif + + typedef struct EXTConservativeRasterizationProperties + { + bool conservative_point_and_line_rasterization; + bool conservative_rasterization_post_depth_coverage; + bool degenerate_lines_rasterized; + bool degenerate_triangles_rasterized; + float extra_primitive_overestimation_size_granularity; + bool fully_covered_fragment_shader_input_variable; + float max_extra_primitive_overestimation_size; + float primitive_overestimation_size; + bool primitive_underestimation; + + EXTConservativeRasterizationProperties(); + EXTConservativeRasterizationProperties(const VkPhysicalDeviceConservativeRasterizationPropertiesEXT& in_properties); + + VkPhysicalDeviceConservativeRasterizationPropertiesEXT get_vk_physical_device_conservative_rasterization_properties() const; + + bool operator==(const EXTConservativeRasterizationProperties& in_properties) const; + } EXTConservativeRasterizationProperties; + + typedef struct EXTDepthClipEnableFeatures + { + bool depth_clip_enable; + + EXTDepthClipEnableFeatures(); + EXTDepthClipEnableFeatures(const VkPhysicalDeviceDepthClipEnableFeaturesEXT& in_features); + + VkPhysicalDeviceDepthClipEnableFeaturesEXT get_vk_physical_device_depth_clip_enable_features() const; + + bool operator==(const EXTDepthClipEnableFeatures& in_features) const; + } EXTDepthClipEnableFeatures; + + typedef struct EXTDescriptorIndexingFeatures + { + bool descriptor_binding_partially_bound; + bool descriptor_binding_sampled_image_update_after_bind; + bool descriptor_binding_storage_buffer_update_after_bind; + bool descriptor_binding_storage_image_update_after_bind; + bool descriptor_binding_storage_texel_buffer_update_after_bind; + bool descriptor_binding_uniform_buffer_update_after_bind; + bool descriptor_binding_uniform_texel_buffer_update_after_bind; + bool descriptor_binding_update_unused_while_pending; + bool descriptor_binding_variable_descriptor_count; + bool runtime_descriptor_array; + bool shader_input_attachment_array_dynamic_indexing; + bool shader_input_attachment_array_non_uniform_indexing; + bool shader_sampled_image_array_non_uniform_indexing; + bool shader_storage_buffer_array_non_uniform_indexing; + bool shader_storage_image_array_non_uniform_indexing; + bool shader_storage_texel_buffer_array_dynamic_indexing; + bool shader_storage_texel_buffer_array_non_uniform_indexing; + bool shader_uniform_buffer_array_non_uniform_indexing; + bool shader_uniform_texel_buffer_array_dynamic_indexing; + bool shader_uniform_texel_buffer_array_non_uniform_indexing; + + EXTDescriptorIndexingFeatures(); + EXTDescriptorIndexingFeatures(const VkPhysicalDeviceDescriptorIndexingFeaturesEXT& in_features); + + VkPhysicalDeviceDescriptorIndexingFeaturesEXT get_vk_physical_device_descriptor_indexing_features() const; + + bool operator==(const EXTDescriptorIndexingFeatures& in_features) const; + } EXTDescriptorIndexingFeatures; + + typedef struct EXTDescriptorIndexingProperties + { + uint32_t max_descriptor_set_update_after_bind_input_attachments; + uint32_t max_descriptor_set_update_after_bind_sampled_images; + uint32_t max_descriptor_set_update_after_bind_samplers; + uint32_t max_descriptor_set_update_after_bind_storage_buffers; + uint32_t max_descriptor_set_update_after_bind_storage_buffers_dynamic; + uint32_t max_descriptor_set_update_after_bind_storage_images; + uint32_t max_descriptor_set_update_after_bind_uniform_buffers; + uint32_t max_descriptor_set_update_after_bind_uniform_buffers_dynamic; + uint32_t max_per_stage_descriptor_update_after_bind_input_attachments; + uint32_t max_per_stage_descriptor_update_after_bind_sampled_images; + uint32_t max_per_stage_descriptor_update_after_bind_samplers; + uint32_t max_per_stage_descriptor_update_after_bind_storage_buffers; + uint32_t max_per_stage_descriptor_update_after_bind_storage_images; + uint32_t max_per_stage_descriptor_update_after_bind_uniform_buffers; + uint32_t max_per_stage_update_after_bind_resources; + uint32_t max_update_after_bind_descriptors_in_all_pools; + bool shader_input_attachment_array_non_uniform_indexing_native; + bool shader_sampled_image_array_non_uniform_indexing_native; + bool shader_storage_buffer_array_non_uniform_indexing_native; + bool shader_storage_image_array_non_uniform_indexing_native; + bool shader_uniform_buffer_array_non_uniform_indexing_native; + + EXTDescriptorIndexingProperties(); + EXTDescriptorIndexingProperties(const VkPhysicalDeviceDescriptorIndexingPropertiesEXT& in_props); + + bool operator==(const EXTDescriptorIndexingProperties& in_props) const; + } EXTDescriptorIndexingProperties; + + typedef struct EXTExternalMemoryHostProperties + { + VkDeviceSize min_imported_host_pointer_alignment; + + EXTExternalMemoryHostProperties(); + EXTExternalMemoryHostProperties(const VkPhysicalDeviceExternalMemoryHostPropertiesEXT& in_props); + + bool operator==(const EXTExternalMemoryHostProperties& in_props) const; + } EXTExternalMemoryHostProperties; + + typedef struct EXTPCIBusInfoProperties + { + uint32_t pci_bus; + uint32_t pci_device; + uint32_t pci_domain; + uint32_t pci_function; + + EXTPCIBusInfoProperties(); + EXTPCIBusInfoProperties(const VkPhysicalDevicePCIBusInfoPropertiesEXT& in_props); + + bool operator==(const EXTPCIBusInfoProperties& in_props) const; + } EXTPCIBusInfoProperties; + + typedef struct EXTSampleLocationsProperties + { + VkExtent2D max_sample_location_grid_size; + float sample_location_coordinate_range[2]; + Anvil::SampleCountFlags sample_location_sample_counts; + uint32_t sample_location_sub_pixel_bits; + bool variable_sample_locations; + + EXTSampleLocationsProperties(); + EXTSampleLocationsProperties(const VkPhysicalDeviceSampleLocationsPropertiesEXT& in_props); + + bool operator==(const EXTSampleLocationsProperties& in_props) const; + } EXTSampleLocationsProperties; + + typedef struct EXTVertexAttributeDivisorProperties + { + uint32_t max_vertex_attribute_divisor; + + EXTVertexAttributeDivisorProperties(); + EXTVertexAttributeDivisorProperties(const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& in_props); + + bool operator==(const EXTVertexAttributeDivisorProperties& in_props) const; + } EXTVertexAttributeDivisorProperties; + + typedef struct DescriptorSetAllocation + { + /* Descriptor set layout to use for the allocation request */ + const Anvil::DescriptorSetLayout* ds_layout_ptr; + + /* Number of descriptors to use for the variable descriptor binding defined in the DS layout. + * + * This value is only required if ds_layout_ptr contains a binding created with the + * DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT flag. Otherwise, it is ignored. + * + */ + uint32_t n_variable_descriptor_bindings; + + /* Dummy constructor. Do not use as input for DS allocation functions. */ + DescriptorSetAllocation() + { + ds_layout_ptr = nullptr; + n_variable_descriptor_bindings = UINT32_MAX; + } + + /* Constructor. + * + * Use if you need to allocate a descriptor set using a descriptor set layout which + * does NOT contain a variable count descriptor binding. + */ + DescriptorSetAllocation(const Anvil::DescriptorSetLayout* in_ds_layout_ptr); + + /* Constructor. + * + * Use if you need to allocate a descriptor set using a descriptor set layout which + * CONTAINS a variable count descriptor binding. + */ + DescriptorSetAllocation(const Anvil::DescriptorSetLayout* in_ds_layout_ptr, + const uint32_t& in_n_variable_descriptor_bindings); + } DescriptorSetAllocation; + + typedef struct DescriptorUpdateTemplateEntry + { + Anvil::DescriptorType descriptor_type; + uint32_t n_descriptors; + uint32_t n_destination_array_element; + uint32_t n_destination_binding; + size_t offset; + size_t stride; + + DescriptorUpdateTemplateEntry(); + DescriptorUpdateTemplateEntry(const Anvil::DescriptorType& in_descriptor_type, + const uint32_t& in_n_destination_array_element, + const uint32_t& in_n_destination_binding, + const uint32_t& in_n_descriptors, + const size_t& in_offset, + const size_t& in_stride); + + VkDescriptorUpdateTemplateEntryKHR get_vk_descriptor_update_template_entry_khr() const; + + bool operator==(const DescriptorUpdateTemplateEntry& in_entry) const; + bool operator< (const DescriptorUpdateTemplateEntry& in_entry) const; + } DescriptorUpdateTemplateEntry; + + typedef struct ExtensionAMDBufferMarkerEntrypoints + { + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD; + + ExtensionAMDBufferMarkerEntrypoints(); + } ExtensionAMDBufferMarkerEntrypoints; + + typedef struct ExtensionAMDDrawIndirectCountEntrypoints + { + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD; + + ExtensionAMDDrawIndirectCountEntrypoints(); + } ExtensionAMDDrawIndirectCountEntrypoints; + + typedef struct ExtensionAMDShaderInfoEntrypoints + { + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD; + + ExtensionAMDShaderInfoEntrypoints(); + } ExtensionAMDShaderInfoEntrypoints; + + typedef struct ExtensionEXTDebugMarkerEntrypoints + { + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT; + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT; + + ExtensionEXTDebugMarkerEntrypoints(); + } ExtensionEXTDebugMarkerEntrypoints; + + typedef struct ExtensionEXTDebugReportEntrypoints + { + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT; + + ExtensionEXTDebugReportEntrypoints(); + } ExtensionEXTDebugReportEntrypoints; + + typedef struct ExtensionEXTDebugUtilsEntrypoints + { + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT; + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT; + + ExtensionEXTDebugUtilsEntrypoints(); + } ExtensionEXTDebugUtilsEntrypoints; + + typedef struct ExtensionEXTExternalMemoryHostEntrypoints + { + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT; + + ExtensionEXTExternalMemoryHostEntrypoints(); + } ExtensionEXTExternalMemoryHostEntrypoints; + + typedef struct ExtensionEXTHdrMetadataEntrypoints + { + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT; + + ExtensionEXTHdrMetadataEntrypoints(); + } ExtensionEXTHdrMetadataEntrypoints; + + typedef struct ExtensionEXTSampleLocationsEntrypoints + { + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT; + + ExtensionEXTSampleLocationsEntrypoints(); + } ExtensionEXTSampleLocationsEntrypoints; + + typedef struct ExtensionEXTTransformFeedbackEntrypoints + { + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT; + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT; + + ExtensionEXTTransformFeedbackEntrypoints(); + } ExtensionEXTTransformFeedbackEntrypoints; + + typedef struct ExtensionKHRCreateRenderpass2Entrypoints + { + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR; + + ExtensionKHRCreateRenderpass2Entrypoints(); + } ExtensionKHRCreateRenderpass2Entrypoints; + + typedef struct ExtensionKHRDeviceGroupEntrypoints + { + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR; + + ExtensionKHRDeviceGroupEntrypoints(); + } ExtensionKHRDeviceGroupEntrypoints; + + typedef struct ExtensionKHRDrawIndirectCountEntrypoints + { + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR; + + ExtensionKHRDrawIndirectCountEntrypoints(); + } ExtensionKHRDrawIndirectCountEntrypoints; + + typedef struct ExtensionKHRBindMemory2Entrypoints + { + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR; + + ExtensionKHRBindMemory2Entrypoints(); + } ExtensionKHRBindMemory2Entrypoints; + + typedef struct ExtensionKHRDescriptorUpdateTemplateEntrypoints + { + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR; + + ExtensionKHRDescriptorUpdateTemplateEntrypoints(); + } ExtensionKHRDescriptorUpdateTemplateEntrypoints; + + typedef struct ExtensionKHRExternalFenceCapabilitiesEntrypoints + { + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR; + + ExtensionKHRExternalFenceCapabilitiesEntrypoints(); + } ExtensionKHRExternalFenceCapabilitiesEntrypoints; + + typedef struct ExtensionKHRExternalMemoryCapabilitiesEntrypoints + { + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR; + + ExtensionKHRExternalMemoryCapabilitiesEntrypoints(); + } ExtensionKHRExternalMemoryCapabilitiesEntrypoints; + + typedef struct ExtensionKHRExternalSemaphoreCapabilitiesEntrypoints + { + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + + ExtensionKHRExternalSemaphoreCapabilitiesEntrypoints(); + } ExtensionKHRExternalSemaphoreCapabilitiesEntrypoints; + + #if defined(_WIN32) + typedef struct ExtensionKHRExternalFenceWin32Entrypoints + { + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR; + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR; + + ExtensionKHRExternalFenceWin32Entrypoints(); + } ExtensionKHRExternalFenceWin32Entrypoints; + + typedef struct ExtensionKHRExternalMemoryWin32Entrypoints + { + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR; + + ExtensionKHRExternalMemoryWin32Entrypoints(); + } ExtensionKHRExternalMemoryWin32Entrypoints; + + typedef struct ExtensionKHRExternalSemaphoreWin32Entrypoints + { + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR; + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR; + + ExtensionKHRExternalSemaphoreWin32Entrypoints(); + } ExtensionKHRExternalSemaphoreWin32Entrypoints; +#else + typedef struct ExtensionKHRExternalFenceFdEntrypoints + { + PFN_vkGetFenceFdKHR vkGetFenceFdKHR; + PFN_vkImportFenceFdKHR vkImportFenceFdKHR; + + ExtensionKHRExternalFenceFdEntrypoints(); + } ExtensionKHRExternalFenceFdEntrypoints; + + typedef struct ExtensionKHRExternalMemoryFdEntrypoints + { + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR; + + ExtensionKHRExternalMemoryFdEntrypoints(); + } ExtensionKHRExternalMemoryFdEntrypoints; + + typedef struct ExtensionKHRExternalSemaphoreFdEntrypoints + { + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR; + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR; + + ExtensionKHRExternalSemaphoreFdEntrypoints(); + } ExtensionKHRExternalSemaphoreFdEntrypoints; +#endif + + typedef struct ExtensionKHRGetMemoryRequirements2Entrypoints + { + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR; + + ExtensionKHRGetMemoryRequirements2Entrypoints(); + } ExtensionKHRGetMemoryRequirements2Entrypoints; + + typedef struct ExtensionKHRGetPhysicalDeviceProperties2 + { + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + + ExtensionKHRGetPhysicalDeviceProperties2(); + } ExtensionKHRGetPhysicalDeviceProperties2; + + typedef struct ExtensionKHRMaintenance1Entrypoints + { + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR; + + ExtensionKHRMaintenance1Entrypoints(); + } ExtensionKHRMaintenance1Entrypoints; + + typedef struct ExtensionKHRMaintenance3Entrypoints + { + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR; + + ExtensionKHRMaintenance3Entrypoints(); + } ExtensionKHRMaintenance3Entrypoints; + + typedef struct ExtensionKHRSamplerYCbCrConversionEntrypoints + { + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR; + + ExtensionKHRSamplerYCbCrConversionEntrypoints(); + } ExtensionKHRSamplerYCbCrConversionEntrypoints; + + typedef struct ExtensionKHRSurfaceEntrypoints + { + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; + + ExtensionKHRSurfaceEntrypoints(); + } ExtensionKHRSurfaceEntrypoints; + + typedef struct ExtensionKHRSwapchainEntrypoints + { + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; + PFN_vkQueuePresentKHR vkQueuePresentKHR; + + ExtensionKHRSwapchainEntrypoints(); + } ExtensionKHRSwapchainEntrypoints; + + #ifdef _WIN32 + #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) + typedef struct ExtensionKHRWin32SurfaceEntrypoints + { + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR; + + ExtensionKHRWin32SurfaceEntrypoints(); + } ExtensionKHRWin32SurfaceEntrypoints; + #endif + #else + #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) + typedef struct ExtensionKHRXcbSurfaceEntrypoints + { + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR; + + ExtensionKHRXcbSurfaceEntrypoints(); + } ExtensionKHRXcbSurfaceEntrypoints; + #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + typedef struct ExtensionKHRWaylandSurfaceEntrypoints + { + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR; + + ExtensionKHRWaylandSurfaceEntrypoints(); + } ExtensionKHRWaylandSurfaceEntrypoints; + #endif + #endif + #if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + typedef struct ExtensionEXTHeadlessSurfaceEntrypoints + { + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT; + + ExtensionEXTHeadlessSurfaceEntrypoints(); + } ExtensionEXTHeadlessSurfaceEntrypoints; + #endif + + typedef struct ExtensionKHRDeviceGroupCreationEntrypoints + { + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR; + + ExtensionKHRDeviceGroupCreationEntrypoints(); + } ExtensionKHRDeviceGroupCreationEntrypoints; + + typedef struct EXTInlineUniformBlockFeatures + { + bool descriptor_binding_inline_uniform_block_update_after_bind; + bool inline_uniform_block; + + EXTInlineUniformBlockFeatures(); + EXTInlineUniformBlockFeatures(const VkPhysicalDeviceInlineUniformBlockFeaturesEXT& in_features); + + VkPhysicalDeviceInlineUniformBlockFeaturesEXT get_vk_physical_device_inline_uniform_block_features() const; + + bool operator==(const EXTInlineUniformBlockFeatures&) const; + + } EXTInlineUniformBlockFeatures; + + typedef struct EXTInlineUniformBlockProperties + { + uint32_t max_descriptor_set_inline_uniform_blocks; + uint32_t max_inline_uniform_block_size; + uint32_t max_per_stage_descriptor_inline_uniform_blocks; + + uint32_t max_descriptor_set_update_after_bind_inline_uniform_blocks; + uint32_t max_per_stage_descriptor_update_after_bind_inline_uniform_blocks; + + EXTInlineUniformBlockProperties(); + EXTInlineUniformBlockProperties(const VkPhysicalDeviceInlineUniformBlockPropertiesEXT& in_props); + + bool operator==(const EXTInlineUniformBlockProperties&) const; + + } EXTInlineUniformBlockProperties; + + typedef struct EXTSamplerFilterMinmaxProperties + { + bool filter_minmax_single_component_formats; + bool filter_minmax_image_component_mapping; + + EXTSamplerFilterMinmaxProperties(); + EXTSamplerFilterMinmaxProperties(const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& in_props); + + bool operator==(const EXTSamplerFilterMinmaxProperties&) const; + + } EXTSamplerFilterMinmaxProperties; + + typedef struct KHRSamplerYCbCrConversionFeatures + { + bool sampler_ycbcr_conversion; + + KHRSamplerYCbCrConversionFeatures(); + KHRSamplerYCbCrConversionFeatures(const VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR& in_features); + + VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR get_vk_physical_device_sampler_ycbcr_conversion_features() const; + + bool operator==(const KHRSamplerYCbCrConversionFeatures&) const; + + } KHRSamplerYCbCrConversionFeatures; + + typedef struct FenceProperties + { + ExternalFenceProperties external_fence_properties; + + /* Dummy constructor */ + FenceProperties(); + + FenceProperties(const ExternalFenceProperties& in_external_fence_properties); + } FenceProperties; + + typedef struct FencePropertiesQuery + { + const Anvil::ExternalFenceHandleTypeFlagBits external_fence_handle_type; + + explicit FencePropertiesQuery(const Anvil::ExternalFenceHandleTypeFlagBits& in_external_fence_handle_type) + :external_fence_handle_type(in_external_fence_handle_type) + { + /* Stub */ + } + + FencePropertiesQuery (const FencePropertiesQuery&) = default; + FencePropertiesQuery& operator=(const FencePropertiesQuery&) = delete; + } FencePropertiesQuery; + + typedef struct FormatProperties + { + FormatFeatureFlags buffer_capabilities; + FormatFeatureFlags linear_tiling_capabilities; + FormatFeatureFlags optimal_tiling_capabilities; + + FormatProperties(); + FormatProperties(const VkFormatProperties& in_format_props); + } FormatProperties; + + typedef struct XYColorEXT + { + float x; + float y; + + XYColorEXT() + :x(0.0f), + y(0.0f) + { + /* Stub */ + } + + VkXYColorEXT get_vk() const + { + VkXYColorEXT result; + + result.x = x; + result.y = y; + + return result; + } + } XYColorEXT; + + typedef struct HdrMetadataEXT + { + Anvil::XYColorEXT display_primary_red; + Anvil::XYColorEXT display_primary_green; + Anvil::XYColorEXT display_primary_blue; + float max_content_light_level; + float max_frame_average_light_level; + float max_luminance; + float min_luminance; + Anvil::XYColorEXT white_point; + + HdrMetadataEXT() + { + max_content_light_level = 0.0f; + max_frame_average_light_level = 0.0f; + max_luminance = 0.0f; + min_luminance = 0.0f; + } + + VkHdrMetadataEXT get_vk() const + { + VkHdrMetadataEXT result; + + result.displayPrimaryBlue = display_primary_blue.get_vk (); + result.displayPrimaryGreen = display_primary_green.get_vk(); + result.displayPrimaryRed = display_primary_red.get_vk (); + result.maxContentLightLevel = max_content_light_level; + result.maxFrameAverageLightLevel = max_frame_average_light_level; + result.maxLuminance = max_luminance; + result.minLuminance = min_luminance; + result.pNext = nullptr; + result.sType = VK_STRUCTURE_TYPE_HDR_METADATA_EXT; + result.whitePoint = white_point.get_vk(); + + return result; + } + } HdrMetadataEXT; + + typedef struct ImageFormatProperties + { + ExternalMemoryProperties external_handle_properties; + + VkExtent3D max_extent; + VkDeviceSize max_resource_size; + uint32_t n_max_array_layers; + uint32_t n_max_mip_levels; + Anvil::SampleCountFlags sample_counts; + + /* Tells how any combined image sampler descriptors the running Vulkan implementation is going to use to provide + * support for accessing the format. + * + * This will only be non-zero if VK_KHR_sampler_ycbcr_conversion is supported and the query targets one of the YUV formats. + */ + uint32_t n_combined_image_sampler_descriptors_used; + + /* Tells whether the format can be used with functions introduced in VK_AMD_texture_gather_bias_lod */ + bool supports_amd_texture_gather_bias_lod; + + /* Tells what usage flags can be requested for this image format, specifically for the stencil image aspect. + * + * Only meaningful if VK_EXT_separate_stencil_usage extension is supported. + */ + Anvil::ImageUsageFlags valid_stencil_aspect_image_usage_flags; + + + /** Dummy constructor */ + ImageFormatProperties(); + + /** Constructor. Initializes the instance using data provided by the driver. + * + * @param in_format_props Vulkan structure to use for initialization. + **/ + ImageFormatProperties(const VkImageFormatProperties& in_image_format_props, + const bool& in_supports_amd_texture_gather_bias_lod, + const ExternalMemoryProperties& in_external_handle_properties, + const Anvil::ImageUsageFlags& in_valid_stencil_aspect_image_usage_flags, + const uint32_t& in_n_combined_image_sampler_descriptors_used); + } ImageFormatProperties; + + typedef struct ImageFormatPropertiesQuery + { + /* TODO + * + * NOTE: In order to retrieve information regarding device's external handle support for a particular image + * configuration, make sure to call ImageFormatPropertiesQuery::set_external_memory_handle_type(), prior + * to passing the struct instance as an arg to get_image_format_properties(). + */ + explicit ImageFormatPropertiesQuery(const Anvil::Format& in_format, + const Anvil::ImageType& in_image_type, + const Anvil::ImageTiling& in_tiling, + const Anvil::ImageUsageFlags& in_usage_flags, + const Anvil::ImageCreateFlags& in_create_flags) + :create_flags (in_create_flags), + external_memory_handle_type(Anvil::ExternalMemoryHandleTypeFlagBits::NONE), + format (in_format), + image_type (in_image_type), + tiling (in_tiling), + usage_flags (in_usage_flags), + stencil_usage_flags (Anvil::ImageUsageFlagBits::NONE) + { + /* Stub */ + } + + void set_external_memory_handle_type(const Anvil::ExternalMemoryHandleTypeFlagBits& in_external_memory_handle_type) + { + external_memory_handle_type = in_external_memory_handle_type; + } + + const Anvil::ImageCreateFlags create_flags; + Anvil::ExternalMemoryHandleTypeFlagBits external_memory_handle_type; + const Anvil::Format format; + const Anvil::ImageType image_type; + const Anvil::ImageTiling tiling; + const Anvil::ImageUsageFlags usage_flags; + const Anvil::ImageUsageFlags stencil_usage_flags; + + ImageFormatPropertiesQuery (const ImageFormatPropertiesQuery& in_query) = default; + ImageFormatPropertiesQuery& operator=(const ImageFormatPropertiesQuery& in_query) = delete; + } ImageFormatPropertiesQuery; + + /* NOTE: Maps 1:1 to VkImageResolve */ + typedef struct + { + Anvil::ImageSubresourceLayers src_subresource; + VkOffset3D src_offset; + Anvil::ImageSubresourceLayers dst_subresource; + VkOffset3D dst_offset; + VkExtent3D extent; + } ImageResolve; + + static_assert(sizeof(ImageResolve) == sizeof(VkImageResolve), "Struct sizes must match"); + static_assert(offsetof(ImageResolve, src_subresource) == offsetof(VkImageResolve, srcSubresource), "Member offsets must match"); + static_assert(offsetof(ImageResolve, src_offset) == offsetof(VkImageResolve, srcOffset), "Member offsets must match"); + static_assert(offsetof(ImageResolve, dst_subresource) == offsetof(VkImageResolve, dstSubresource), "Member offsets must match"); + static_assert(offsetof(ImageResolve, dst_offset) == offsetof(VkImageResolve, dstOffset), "Member offsets must match"); + static_assert(offsetof(ImageResolve, extent) == offsetof(VkImageResolve, extent), "Member offsets must match"); + + /** Describes an image memory barrier. */ + typedef struct ImageBarrier + { + Anvil::AccessFlags dst_access_mask; + Anvil::AccessFlags src_access_mask; + + uint32_t dst_queue_family_index; + VkImage image; + VkImageMemoryBarrier image_barrier_vk; + Anvil::Image* image_ptr; + Anvil::ImageLayout new_layout; + Anvil::ImageLayout old_layout; + uint32_t src_queue_family_index; + Anvil::ImageSubresourceRange subresource_range; + + /** Constructor. + * + * Note that @param in_image_ptr is retained by this function. + * + * @param in_source_access_mask Source access mask to use for the barrier. + * @param in_destination_access_mask Destination access mask to use for the barrier. + * @param in_old_layout Old layout of @param in_image_ptr to use for the barrier. + * @param in_new_layout New layout of @param in_image_ptr to use for the barrier. + * @param in_src_queue_family_index Source queue family index to use for the barrier. + * @param in_dst_queue_family_index Destination queue family index to use for the barrier. + * @param in_image_ptr Image instance the barrier refers to. May be nullptr, in which case + * "image" and "image_ptr" fields will be set to nullptr. + * The instance will be retained by this function. + * @param in_image_subresource_range Subresource range to use for the barrier. + * + **/ + ImageBarrier(Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::ImageLayout in_old_layout, + Anvil::ImageLayout in_new_layout, + uint32_t in_src_queue_family_index, + uint32_t in_dst_queue_family_index, + Anvil::Image* in_image_ptr, + Anvil::ImageSubresourceRange in_image_subresource_range); + + /** Destructor. + * + * Releases the encapsulated Image instance. + **/ + virtual ~ImageBarrier(); + + /** Copy constructor. + * + * Retains the Image instance stored in the input barrier. + * + * @param in Barrier instance to copy data from. + **/ + ImageBarrier(const ImageBarrier& in); + + /** Returns a Vulkan memory barrier descriptor, whose configuration corresponds to + * to the configuration of this descriptor. + **/ + virtual VkImageMemoryBarrier get_barrier_vk() const + { + return image_barrier_vk; + } + + bool operator==(const ImageBarrier& in_barrier) const; + + private: + ImageBarrier& operator=(const ImageBarrier&); + } ImageBarrier; + + typedef struct ExternalMemoryHandleImportInfo + { + ExternalHandleType handle; /* Used for non-host pointer import ops */ + void* host_ptr; /* Used for host pointer import ops */ + + #if defined(_WIN32) + std::wstring name; + #endif + + ExternalMemoryHandleImportInfo() + { + #if defined(_WIN32) + handle = nullptr; + #else + handle = -1; + #endif + + host_ptr = nullptr; + } + } ExternalMemoryHandleImportInfo; + + typedef struct EXTScalarBlockLayoutFeatures + { + bool scalar_block_layout; + + EXTScalarBlockLayoutFeatures(); + EXTScalarBlockLayoutFeatures(const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& in_features); + + VkPhysicalDeviceScalarBlockLayoutFeaturesEXT get_vk_physical_device_scalar_block_layout_features_ext() const; + + bool operator==(const EXTScalarBlockLayoutFeatures& in_features) const; + } EXTScalarBlockLayoutFeatures; + + typedef struct EXTTransformFeedbackFeatures + { + bool geometry_streams; + bool transform_feedback; + + EXTTransformFeedbackFeatures(); + EXTTransformFeedbackFeatures(const VkPhysicalDeviceTransformFeedbackFeaturesEXT& in_features); + + VkPhysicalDeviceTransformFeedbackFeaturesEXT get_vk_physical_device_transform_feedback_features() const; + + bool operator==(const EXTTransformFeedbackFeatures& in_features) const; + } EXTTransformFeedbackFeatures; + + typedef struct EXTTransformFeedbackProperties + { + uint32_t max_transform_feedback_buffer_data_size; + uint32_t max_transform_feedback_buffer_data_stride; + VkDeviceSize max_transform_feedback_buffer_size; + uint32_t max_transform_feedback_stream_data_size; + uint32_t n_max_transform_feedback_buffers; + uint32_t n_max_transform_feedback_streams; + bool supports_transform_feedback_draw; + bool supports_transform_feedback_queries; + bool supports_transform_feedback_rasterization_stream_select; + bool supports_transform_feedback_streams_lines_triangles; + + EXTTransformFeedbackProperties(); + EXTTransformFeedbackProperties(const VkPhysicalDeviceTransformFeedbackPropertiesEXT& in_props); + + bool operator==(const EXTTransformFeedbackProperties&) const; + + } EXTTransformFeedbackProperties; + + typedef struct MemoryBudget + { + std::array heap_budget; + std::array heap_usage; + + MemoryBudget(); + MemoryBudget(const VkPhysicalDeviceMemoryBudgetPropertiesEXT& in_properties); + + } MemoryBudget; + + typedef struct EXTMemoryPriorityFeatures + { + bool is_memory_priority_supported; + + EXTMemoryPriorityFeatures(); + EXTMemoryPriorityFeatures(const VkPhysicalDeviceMemoryPriorityFeaturesEXT& in_features); + + VkPhysicalDeviceMemoryPriorityFeaturesEXT get_vk_physical_device_memory_priority_features() const; + + bool operator==(const EXTMemoryPriorityFeatures&) const; + + } EXTMemoryPriorityFeatures; + + typedef struct KHR16BitStorageFeatures + { + bool is_input_output_storage_supported; + bool is_push_constant_16_bit_storage_supported; + bool is_storage_buffer_16_bit_access_supported; + bool is_uniform_and_storage_buffer_16_bit_access_supported; + + KHR16BitStorageFeatures(); + KHR16BitStorageFeatures(const VkPhysicalDevice16BitStorageFeaturesKHR& in_features); + + VkPhysicalDevice16BitStorageFeaturesKHR get_vk_physical_device_16_bit_storage_features() const; + + bool operator==(const KHR16BitStorageFeatures& in_features) const; + } KHR16BitStorageFeatures; + + typedef struct KHR8BitStorageFeatures + { + bool storage_buffer_8_bit_access; + bool storage_push_constant_8; + bool uniform_and_storage_buffer_8_bit_access; + + KHR8BitStorageFeatures(); + KHR8BitStorageFeatures(const VkPhysicalDevice8BitStorageFeaturesKHR& in_features); + + VkPhysicalDevice8BitStorageFeaturesKHR get_vk_physical_device_8_bit_storage_features() const; + + bool operator==(const KHR8BitStorageFeatures& in_features) const; + } KHR8BitStorageFeatures; + + typedef struct KHRDepthStencilResolveProperties + { + bool independent_resolve; + bool independent_resolve_none; + Anvil::ResolveModeFlags supported_depth_resolve_modes; + Anvil::ResolveModeFlags supported_stencil_resolve_modes; + + KHRDepthStencilResolveProperties(); + KHRDepthStencilResolveProperties(const VkPhysicalDeviceDepthStencilResolvePropertiesKHR& in_properties); + + bool operator==(const KHRDepthStencilResolveProperties& in_props) const; + } KHRDepthStencilResolveProperties; + + typedef struct KHRDriverPropertiesProperties + { + Anvil::ConformanceVersionKHR conformance_version; + Anvil::DriverIdKHR driver_id; + char driver_name[VK_MAX_DRIVER_NAME_SIZE_KHR]; + char driver_info[VK_MAX_DRIVER_INFO_SIZE_KHR]; + + KHRDriverPropertiesProperties(); + KHRDriverPropertiesProperties(const VkPhysicalDeviceDriverPropertiesKHR& in_properties); + + bool operator==(const KHRDriverPropertiesProperties& in_props) const; + + } KHRDriverPropertiesProperties; + + typedef struct KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties + { + uint8_t device_luid[VK_LUID_SIZE]; + bool device_luid_valid; + + uint8_t device_uuid[VK_UUID_SIZE]; + uint8_t driver_uuid[VK_UUID_SIZE]; + + uint32_t device_node_mask; + + KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties(); + KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties(const VkPhysicalDeviceIDPropertiesKHR& in_properties); + + bool operator==(const KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties& in_props) const; + + } KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties; + + typedef struct KHRFloat16Int8Features + { + bool shader_float16; + bool shader_int8; + + KHRFloat16Int8Features(); + KHRFloat16Int8Features(const VkPhysicalDeviceFloat16Int8FeaturesKHR& in_features); + + VkPhysicalDeviceFloat16Int8FeaturesKHR get_vk_physical_device_float16_int8_features() const; + + bool operator==(const KHRFloat16Int8Features&) const; + + } KHRFloat16Int8Features; + + typedef struct KHRMaintenance2PhysicalDevicePointClippingProperties + { + PointClippingBehavior point_clipping_behavior; + + KHRMaintenance2PhysicalDevicePointClippingProperties(); + KHRMaintenance2PhysicalDevicePointClippingProperties(const VkPhysicalDevicePointClippingPropertiesKHR& in_props); + + bool operator==(const KHRMaintenance2PhysicalDevicePointClippingProperties&) const; + + } KHRMaintenance2PhysicalDevicePointClippingProperties; + + typedef struct KHRMaintenance3Properties + { + VkDeviceSize max_memory_allocation_size; + uint32_t max_per_set_descriptors; + + KHRMaintenance3Properties(); + KHRMaintenance3Properties(const VkPhysicalDeviceMaintenance3PropertiesKHR& in_props); + + bool operator==(const KHRMaintenance3Properties&) const; + + } KHRMaintenance3Properties; + + typedef struct KHRMultiviewFeatures + { + bool multiview; + bool multiview_geometry_shader; + bool multiview_tessellation_shader; + + KHRMultiviewFeatures(); + KHRMultiviewFeatures(const VkPhysicalDeviceMultiviewFeatures& in_features); + + VkPhysicalDeviceMultiviewFeatures get_vk_physical_device_multiview_features() const; + + bool operator==(const KHRMultiviewFeatures& in_features) const; + } KHRMultiviewFeatures; + + typedef struct KHRMultiviewProperties + { + uint32_t max_multiview_view_count; + uint32_t max_multiview_instance_index; + + KHRMultiviewProperties(); + KHRMultiviewProperties(const VkPhysicalDeviceMultiviewPropertiesKHR& in_props); + + bool operator==(const KHRMultiviewProperties&) const; + + } KHRMultiviewProperties; + + typedef struct KHRShaderAtomicInt64Features + { + bool shader_buffer_int64_atomics; + bool shader_shared_int64_atomics; + + KHRShaderAtomicInt64Features(); + KHRShaderAtomicInt64Features(const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& in_features); + + VkPhysicalDeviceShaderAtomicInt64FeaturesKHR get_vk_physical_device_shader_atomic_int64_features() const; + + bool operator==(const KHRShaderAtomicInt64Features& in_features) const; + } KHRShaderAtomicInt64Features; + + typedef struct KHRShaderFloatControlsProperties + { + VkShaderFloatControlsIndependenceKHR denorm_behavior_independence; + VkShaderFloatControlsIndependenceKHR rounding_mode_independence; + bool shader_denorm_flush_to_zero_float16; + bool shader_denorm_flush_to_zero_float32; + bool shader_denorm_flush_to_zero_float64; + bool shader_denorm_preserve_float16; + bool shader_denorm_preserve_float32; + bool shader_denorm_preserve_float64; + bool shader_rounding_mode_RTE_float16; + bool shader_rounding_mode_RTE_float32; + bool shader_rounding_mode_RTE_float64; + bool shader_rounding_mode_RTZ_float16; + bool shader_rounding_mode_RTZ_float32; + bool shader_rounding_mode_RTZ_float64; + bool shader_signed_zero_inf_nan_preserve_float16; + bool shader_signed_zero_inf_nan_preserve_float32; + bool shader_signed_zero_inf_nan_preserve_float64; + + KHRShaderFloatControlsProperties(); + KHRShaderFloatControlsProperties(const VkPhysicalDeviceFloatControlsPropertiesKHR& in_properties); + + VkPhysicalDeviceFloatControlsPropertiesKHR get_vk_physical_device_float_controls_properties() const; + + bool operator==(const KHRShaderFloatControlsProperties& in_properties) const; + } KHRShaderFloatControlsProperties; + + typedef struct KHRVariablePointerFeatures + { + bool variable_pointers; + bool variable_pointers_storage_buffer; + + KHRVariablePointerFeatures(); + KHRVariablePointerFeatures(const VkPhysicalDeviceVariablePointerFeatures& in_features); + + VkPhysicalDeviceVariablePointerFeaturesKHR get_vk_physical_device_variable_pointer_features() const; + + bool operator==(const KHRVariablePointerFeatures& in_features) const; + } KHRVariablePointerFeatures; + + typedef struct KHRVulkanMemoryModelFeatures + { + bool vulkan_memory_model; + bool vulkan_memory_model_availability_visibility_chains; + bool vulkan_memory_model_device_scope; + + KHRVulkanMemoryModelFeatures(); + KHRVulkanMemoryModelFeatures(const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& in_features); + + VkPhysicalDeviceVulkanMemoryModelFeaturesKHR get_vk_physical_device_vulkan_memory_model_features() const; + + bool operator==(const KHRVulkanMemoryModelFeatures& in_features) const; + } KHRVulkanMemoryModelFeatures; + + /** Holds properties of a single Vulkan Layer. */ + typedef struct Layer + { + std::string description; + std::vector extensions; + uint32_t implementation_version; + std::string name; + uint32_t spec_version; + + /** Dummy constructor. + * + * @param in_layer_name Name to use for the layer. + **/ + Layer(const std::string& in_layer_name); + + /** Constructor. Initializes the instance using data provided by the driver. + * + * @param in_layer_props Vulkan structure to use for initialization. + **/ + Layer(const VkLayerProperties& in_layer_props); + + /** Returns true if @param in_layer_name matches the layer name described by the instance. */ + bool operator==(const std::string& in_layer_name) const; + } Layer; + + typedef std::vector Layers; + + /** Describes a Vulkan memory barrier. */ + typedef struct MemoryBarrierAnv + { + Anvil::AccessFlags destination_access_mask; + Anvil::AccessFlags source_access_mask; + + VkMemoryBarrier memory_barrier_vk; + + /** Constructor. + * + * @param in_source_access_mask Source access mask of the Vulkan memory barrier. + * @param in_destination_access_mask Destination access mask of the Vulkan memory barrier. + * + **/ + explicit MemoryBarrierAnv(Anvil::AccessFlags in_destination_access_mask, + Anvil::AccessFlags in_source_access_mask); + + /** Destructor. */ + virtual ~MemoryBarrierAnv() + { + /* Stub */ + } + + /** Returns a Vulkan memory barrier descriptor, whose configuration corresponds to + * to the configuration of this descriptor. + **/ + virtual VkMemoryBarrier get_barrier_vk() const + { + return memory_barrier_vk; + } + } MemoryBarrierAnv; + + /** Holds properties of a single Vulkan Memory Heap. */ + typedef struct MemoryHeap + { + Anvil::MemoryHeapFlags flags; + + uint32_t index; + VkDeviceSize size; + + /** Stub constructor */ + MemoryHeap(); + } MemoryHeap; + + typedef std::vector MemoryHeaps; + + extern bool operator==(const MemoryHeap& in1, + const MemoryHeap& in2); + + /** Holds properties of a single Vulkan Memory Type. */ + typedef struct MemoryType + { + MemoryFeatureFlags features; + MemoryHeap* heap_ptr; + + Anvil::MemoryPropertyFlags flags; + + /** Constructor. Initializes the instance using data provided by the driver. + * + * @param in_type Vulkan structure to use for initialization. + * @param in_memory_props_ptr Used to initialize the MemoryHeap pointer member. Must not be nullptr. + **/ + explicit MemoryType(const VkMemoryType& in_type, + struct MemoryProperties* in_memory_props_ptr); + } MemoryType; + + typedef std::vector MemoryTypes; + + extern bool operator==(const MemoryType& in1, + const MemoryType& in2); + + /** Holds information about available memory heaps & types for a specific physical device. */ + typedef struct MemoryProperties + { + MemoryHeap* heaps; + uint32_t n_heaps; + MemoryTypes types; + + MemoryProperties(); + + /** Destructor */ + ~MemoryProperties(); + + /** Constructor. Initializes the instance using data provided by the driver. + * + * @param in_mem_properties Vulkan structure to use for initialization. + **/ + void init(const VkPhysicalDeviceMemoryProperties& in_mem_properties); + + private: + MemoryProperties (const MemoryProperties&); + MemoryProperties& operator=(const MemoryProperties&); + } MemoryProperties; + + extern bool operator==(const MemoryProperties& in1, + const MemoryProperties& in2); + + typedef struct MultisamplePropertiesEXT + { + VkExtent2D max_sample_location_grid_size; + + MultisamplePropertiesEXT() + { + max_sample_location_grid_size.height = 0; + max_sample_location_grid_size.width = 0; + } + + MultisamplePropertiesEXT(const VkMultisamplePropertiesEXT& in_multisample_props_vk) + { + max_sample_location_grid_size = in_multisample_props_vk.maxSampleLocationGridSize; + } + } MultisamplePropertiesEXT; + + /** Defines data for a single image mip-map. + * + * Use one of the static create_() functions to set up structure fields according to the target + * image type. + **/ + typedef struct MipmapRawData + { + /* Image aspect the mip-map data is specified for. */ + Anvil::ImageAspectFlagBits aspect; + + /* Start layer index */ + uint32_t n_layer; + + /* Number of layers to update */ + uint32_t n_layers; + + /* Number of 3D texture slices to update. For non-3D texture types, this field + * should be set to 1. */ + uint32_t n_slices; + + + /* Index of the mip-map to update. */ + uint32_t n_mipmap; + + + /* Pointer to a buffer holding raw data representation. The data structure is characterized by + * data_size, row_size and slice_size fields. + * + * It is assumed the data under the pointer is tightly packed, and stored in column->row->slice->layer + * order. + */ + std::shared_ptr linear_tightly_packed_data_uchar_ptr; + const unsigned char* linear_tightly_packed_data_uchar_raw_ptr; + std::shared_ptr > linear_tightly_packed_data_uchar_vec_ptr; + + + /* Total number of bytes available for reading under linear_tightly_packed_data_ptr */ + uint32_t data_size; + + /* Number of bytes each row takes */ + uint32_t row_size; + + + MipmapRawData() + { + aspect = Anvil::ImageAspectFlagBits::NONE; + data_size = 0; + linear_tightly_packed_data_uchar_raw_ptr = nullptr; + n_layer = 0; + n_layers = 0; + n_mipmap = 0; + n_slices = 0; + row_size = 0; + } + + /** Creates a MipmapRawData instance which can be used to upload data to 1D Image instances: + * + * @param in_aspect Image aspect to modify. + * @param in_n_mipmap Index of the mipmap to be updated. + * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. + * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. + * @param in_row_size Number of bytes each texture row takes. + * + * NOTE: Mipmap contents is NOT cached at call time. This implies raw pointers are ASSUMED to + * be valid at baking time. + * + * @return As per description. + **/ + static MipmapRawData create_1D_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_row_size); + static MipmapRawData create_1D_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_vector_ptr, + uint32_t in_row_size); + static MipmapRawData create_1D_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_row_size); + + /** Creates a MipmapRawData instance which can be used to upload data to 1D Array Image instances: + * + * @param in_aspect Image aspect to modify. + * @param in_n_layer Index of a texture layer the mip-map data should be uploaded to. + * @param in_n_layers Number of texture layers to be updated. + * @param in_n_mipmap Index of the mipmap to be updated. + * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. + * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. + * @param in_row_size Number of bytes each texture row takes. + * @param in_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr. + * + * @return As per description. + **/ + static MipmapRawData create_1D_array_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_row_size, + uint32_t in_data_size); + static MipmapRawData create_1D_array_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_row_size, + uint32_t in_data_size); + static MipmapRawData create_1D_array_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_row_size, + uint32_t in_data_size); + + /** Creates a MipmapRawData instance which can be used to upload data to 2D Image instances: + * + * @param in_aspect Image aspect to modify. + * @param in_n_mipmap Index of the mipmap to be updated. + * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. + * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. + * @param in_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr. + * @param in_row_size Number of bytes each texture row takes. + * + * @return As per description. + **/ + static MipmapRawData create_2D_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + static MipmapRawData create_2D_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + static MipmapRawData create_2D_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + + /** Creates a MipmapRawData instance which can be used to upload data to 2D Array Image instances: + * + * @param in_aspect Image aspect to modify. + * @param in_n_layer Index of a texture layer the mip-map data should be uploaded to. + * @param in_n_layers Number of texture layers to be updated. + * @param in_n_mipmap Index of the mipmap to be updated. + * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. + * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. + * @param in_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr. + * @param in_row_size Number of bytes each texture row takes. + * + * @return As per description. + **/ + static MipmapRawData create_2D_array_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + static MipmapRawData create_2D_array_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + static MipmapRawData create_2D_array_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + + /** Creates a MipmapRawData instnce which can be used to upload data to 3D Image instances: + * + * @param in_aspect Image aspect to modify. + * @param in_n_layer Index of a texture layer the mip-map data should be uploaded to. + * @param in_n_slices Number of texture slices to be updated. + * @param in_n_mipmap Index of the mipmap to be updated. + * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. + * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. + * @param in_slice_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr for a single slice. + * @param in_row_size Number of bytes each texture row takes. + * + * @return As per description. + **/ + static MipmapRawData create_3D_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layer_slices, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_slice_data_size, + uint32_t in_row_size); + static MipmapRawData create_3D_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layer_slices, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_slice_data_size, + uint32_t in_row_size); + static MipmapRawData create_3D_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layer_slices, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_slice_data_size, + uint32_t in_row_size); + + /** Creates a MipmapRawData instance which can be used to upload data to Cube Map Image instances: + * + * @param in_aspect Image aspect to modify. + * @param in_n_layer Index of a texture layer the mip-map data should be uploaded to. + * Valid values and corresponding cube map faces: 0: -X, 1: -Y, 2: -Z, 3: +X, 4: +Y, 5: +Z + * @param in_n_mipmap Index of the mipmap to be updated. + * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. + * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. + * @param in_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr. + * @param in_row_size Number of bytes each texture row takes. + * + * @return As per description. + **/ + static MipmapRawData create_cube_map_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + static MipmapRawData create_cube_map_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + static MipmapRawData create_cube_map_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + + /** Creates a MipmapRawData instance which can be used to upload data to Cube Map Array Image instances: + * + * @param in_aspect Image aspect to modify. + * @param in_n_layer Index of a texture layer the mip-map data should be uploaded to. + * Cube map faces, as selected for layer at index (n_layer % 6), are: + * 0: -X, 1: -Y, 2: -Z, 3: +X, 4: +Y, 5: +Z + * @param in_n_layers Number of texture layers to update. + * @param in_n_mipmap Index of the mipmap to be updated. + * @param in_linear_tightly_packed_data_ptr Pointer to raw mip-map data. + * @param in_linear_tightly_packed_data_vector_ptr Vector holding raw mip-map data. + * @param in_data_size Number of bytes available for reading under @param in_linear_tightly_packed_data_ptr. + * @param in_row_size Number of bytes each texture row takes. + * + * @return As per description. + **/ + static MipmapRawData create_cube_map_array_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + static MipmapRawData create_cube_map_array_from_uchar_ptr (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + static MipmapRawData create_cube_map_array_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size); + + private: + static MipmapRawData create_1D (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + uint32_t in_row_size); + static MipmapRawData create_1D_array(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + uint32_t in_row_size, + uint32_t in_data_size); + static MipmapRawData create_2D (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + uint32_t in_data_size, + uint32_t in_row_size); + static MipmapRawData create_2D_array(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + uint32_t in_data_size, + uint32_t in_row_size); + static MipmapRawData create_3D (Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_slices, + uint32_t in_n_mipmap, + uint32_t in_data_size, + uint32_t in_row_size); + } MipmapRawData; + + /* Dummy delete functor */ + template + struct NullDeleter + { + void operator()(Type* in_unused_ptr) + { + in_unused_ptr; + } + }; + + /* Used internally by Buffer and Image to track page occupancy status */ + typedef union + { + uint32_t raw; + + struct + { + uint8_t page_bit_0 : 1; + uint8_t page_bit_1 : 1; + uint8_t page_bit_2 : 1; + uint8_t page_bit_3 : 1; + uint8_t page_bit_4 : 1; + uint8_t page_bit_5 : 1; + uint8_t page_bit_6 : 1; + uint8_t page_bit_7 : 1; + uint8_t page_bit_8 : 1; + uint8_t page_bit_9 : 1; + uint8_t page_bit_10 : 1; + uint8_t page_bit_11 : 1; + uint8_t page_bit_12 : 1; + uint8_t page_bit_13 : 1; + uint8_t page_bit_14 : 1; + uint8_t page_bit_15 : 1; + uint8_t page_bit_16 : 1; + uint8_t page_bit_17 : 1; + uint8_t page_bit_18 : 1; + uint8_t page_bit_19 : 1; + uint8_t page_bit_20 : 1; + uint8_t page_bit_21 : 1; + uint8_t page_bit_22 : 1; + uint8_t page_bit_23 : 1; + uint8_t page_bit_24 : 1; + uint8_t page_bit_25 : 1; + uint8_t page_bit_26 : 1; + uint8_t page_bit_27 : 1; + uint8_t page_bit_28 : 1; + uint8_t page_bit_29 : 1; + uint8_t page_bit_30 : 1; + uint8_t page_bit_31 : 1; + } page_bits; + } PageOccupancyStatus; + + typedef struct PhysicalDeviceProtectedMemoryFeatures + { + bool protected_memory; + + PhysicalDeviceProtectedMemoryFeatures(); + PhysicalDeviceProtectedMemoryFeatures(const VkPhysicalDeviceProtectedMemoryFeatures& in_features); + + VkPhysicalDeviceProtectedMemoryFeatures get_vk_physical_device_protected_memory_features() const; + + bool operator==(const PhysicalDeviceProtectedMemoryFeatures& in_features) const; + } PhysicalDeviceProtectedMemoryFeatures; + + typedef struct PhysicalDeviceFeaturesCoreVK10 + { + bool alpha_to_one; + bool depth_bias_clamp; + bool depth_bounds; + bool depth_clamp; + bool draw_indirect_first_instance; + bool dual_src_blend; + bool fill_mode_non_solid; + bool fragment_stores_and_atomics; + bool full_draw_index_uint32; + bool geometry_shader; + bool image_cube_array; + bool independent_blend; + bool inherited_queries; + bool large_points; + bool logic_op; + bool multi_draw_indirect; + bool multi_viewport; + bool occlusion_query_precise; + bool pipeline_statistics_query; + bool robust_buffer_access; + bool sampler_anisotropy; + bool sample_rate_shading; + bool shader_clip_distance; + bool shader_cull_distance; + bool shader_float64; + bool shader_image_gather_extended; + bool shader_int16; + bool shader_int64; + bool shader_resource_residency; + bool shader_resource_min_lod; + bool shader_sampled_image_array_dynamic_indexing; + bool shader_storage_buffer_array_dynamic_indexing; + bool shader_storage_image_array_dynamic_indexing; + bool shader_storage_image_extended_formats; + bool shader_storage_image_multisample; + bool shader_storage_image_read_without_format; + bool shader_storage_image_write_without_format; + bool shader_tessellation_and_geometry_point_size; + bool shader_uniform_buffer_array_dynamic_indexing; + bool sparse_binding; + bool sparse_residency_2_samples; + bool sparse_residency_4_samples; + bool sparse_residency_8_samples; + bool sparse_residency_16_samples; + bool sparse_residency_aliased; + bool sparse_residency_buffer; + bool sparse_residency_image_2D; + bool sparse_residency_image_3D; + bool tessellation_shader; + bool texture_compression_ASTC_LDR; + bool texture_compression_BC; + bool texture_compression_ETC2; + bool variable_multisample_rate; + bool vertex_pipeline_stores_and_atomics; + bool wide_lines; + + VkPhysicalDeviceFeatures get_vk_physical_device_features() const; + bool operator== (const PhysicalDeviceFeaturesCoreVK10& in_data) const; + + PhysicalDeviceFeaturesCoreVK10(); + PhysicalDeviceFeaturesCoreVK10(const VkPhysicalDeviceFeatures& in_physical_device_features); + + } PhysicalDeviceFeaturesCoreVK10; + + typedef struct PhysicalDeviceFeaturesCoreVK11 + { + PhysicalDeviceProtectedMemoryFeatures protected_memory_features; + + bool operator==(const PhysicalDeviceFeaturesCoreVK11& in_data) const; + + PhysicalDeviceFeaturesCoreVK11(); + PhysicalDeviceFeaturesCoreVK11(const PhysicalDeviceProtectedMemoryFeatures& in_protected_memory_features); + + } PhysicalDeviceFeaturesCoreVK11; + + typedef struct PhysicalDeviceFeatures + { + const PhysicalDeviceFeaturesCoreVK10* core_vk1_0_features_ptr; + const PhysicalDeviceFeaturesCoreVK11* core_vk1_1_features_ptr; + const EXTDepthClipEnableFeatures* ext_depth_clip_enable_features_ptr; + const EXTDescriptorIndexingFeatures* ext_descriptor_indexing_features_ptr; + const EXTInlineUniformBlockFeatures* ext_inline_uniform_block_features_ptr; + const EXTScalarBlockLayoutFeatures* ext_scalar_block_layout_features_ptr; + const EXTTransformFeedbackFeatures* ext_transform_feedback_features_ptr; + const EXTMemoryPriorityFeatures* ext_memory_priority_features_ptr; + const KHR16BitStorageFeatures* khr_16bit_storage_features_ptr; + const KHR8BitStorageFeatures* khr_8bit_storage_features_ptr; + const KHRFloat16Int8Features* khr_float16_int8_features_ptr; + const KHRMultiviewFeatures* khr_multiview_features_ptr; + const KHRSamplerYCbCrConversionFeatures* khr_sampler_ycbcr_conversion_features_ptr; + const KHRShaderAtomicInt64Features* khr_shader_atomic_int64_features_ptr; + const KHRVariablePointerFeatures* khr_variable_pointer_features_ptr; + const KHRVulkanMemoryModelFeatures* khr_vulkan_memory_model_features_ptr; + + PhysicalDeviceFeatures(); + PhysicalDeviceFeatures(const PhysicalDeviceFeaturesCoreVK10* in_core_vk1_0_features_ptr, + const PhysicalDeviceFeaturesCoreVK11* in_core_vk1_1_features_ptr, + const EXTDepthClipEnableFeatures* in_ext_depth_clip_enable_features_ptr, + const EXTDescriptorIndexingFeatures* in_ext_descriptor_indexing_features_ptr, + const EXTInlineUniformBlockFeatures* in_ext_inline_uniform_block_features_ptr, + const EXTScalarBlockLayoutFeatures* in_ext_scalar_block_layout_features_ptr, + const EXTTransformFeedbackFeatures* in_ext_transform_feedback_features_ptr, + const EXTMemoryPriorityFeatures* in_ext_memory_priority_features_ptr, + const KHR16BitStorageFeatures* in_khr_16_bit_storage_features_ptr, + const KHR8BitStorageFeatures* in_khr_8_bit_storage_features_ptr, + const KHRFloat16Int8Features* in_khr_float16_int8_features_ptr, + const KHRMultiviewFeatures* in_khr_multiview_features_ptr, + const KHRSamplerYCbCrConversionFeatures* in_khr_sampler_ycbcr_conversion_features_ptr, + const KHRShaderAtomicInt64Features* in_khr_shader_atomic_int64_features_ptr, + const KHRVariablePointerFeatures* in_khr_variable_pointer_features_ptr, + const KHRVulkanMemoryModelFeatures* in_khr_vulkan_memory_model_features_ptr); + + bool operator==(const PhysicalDeviceFeatures& in_physical_device_features) const; + } PhysicalDeviceFeatures; + + typedef struct PhysicalDeviceLimits + { + VkDeviceSize buffer_image_granularity; + uint32_t discrete_queue_priorities; + Anvil::SampleCountFlags framebuffer_color_sample_counts; + Anvil::SampleCountFlags framebuffer_depth_sample_counts; + Anvil::SampleCountFlags framebuffer_no_attachments_sample_counts; + Anvil::SampleCountFlags framebuffer_stencil_sample_counts; + float line_width_granularity; + float line_width_range[2]; + uint32_t max_bound_descriptor_sets; + uint32_t max_clip_distances; + uint32_t max_color_attachments; + uint32_t max_combined_clip_and_cull_distances; + uint32_t max_compute_shared_memory_size; + uint32_t max_compute_work_group_count[3]; + uint32_t max_compute_work_group_invocations; + uint32_t max_compute_work_group_size[3]; + uint32_t max_cull_distances; + uint32_t max_descriptor_set_input_attachments; + uint32_t max_descriptor_set_sampled_images; + uint32_t max_descriptor_set_samplers; + uint32_t max_descriptor_set_storage_buffers; + uint32_t max_descriptor_set_storage_buffers_dynamic; + uint32_t max_descriptor_set_storage_images; + uint32_t max_descriptor_set_uniform_buffers; + uint32_t max_descriptor_set_uniform_buffers_dynamic; + uint32_t max_draw_indexed_index_value; + uint32_t max_draw_indirect_count; + uint32_t max_fragment_combined_output_resources; + uint32_t max_fragment_dual_src_attachments; + uint32_t max_fragment_input_components; + uint32_t max_fragment_output_attachments; + uint32_t max_framebuffer_height; + uint32_t max_framebuffer_layers; + uint32_t max_framebuffer_width; + uint32_t max_geometry_input_components; + uint32_t max_geometry_output_components; + uint32_t max_geometry_output_vertices; + uint32_t max_geometry_shader_invocations; + uint32_t max_geometry_total_output_components; + uint32_t max_image_array_layers; + uint32_t max_image_dimension_1D; + uint32_t max_image_dimension_2D; + uint32_t max_image_dimension_3D; + uint32_t max_image_dimension_cube; + float max_interpolation_offset; + uint32_t max_memory_allocation_count; + uint32_t max_per_stage_descriptor_input_attachments; + uint32_t max_per_stage_descriptor_sampled_images; + uint32_t max_per_stage_descriptor_samplers; + uint32_t max_per_stage_descriptor_storage_buffers; + uint32_t max_per_stage_descriptor_storage_images; + uint32_t max_per_stage_descriptor_uniform_buffers; + uint32_t max_per_stage_resources; + uint32_t max_push_constants_size; + uint32_t max_sample_mask_words; + uint32_t max_sampler_allocation_count; + float max_sampler_anisotropy; + float max_sampler_lod_bias; + uint32_t max_storage_buffer_range; + uint32_t max_viewport_dimensions[2]; + uint32_t max_viewports; + uint32_t max_tessellation_control_per_patch_output_components; + uint32_t max_tessellation_control_per_vertex_input_components; + uint32_t max_tessellation_control_per_vertex_output_components; + uint32_t max_tessellation_control_total_output_components; + uint32_t max_tessellation_evaluation_input_components; + uint32_t max_tessellation_evaluation_output_components; + uint32_t max_tessellation_generation_level; + uint32_t max_tessellation_patch_size; + uint32_t max_texel_buffer_elements; + uint32_t max_texel_gather_offset; + uint32_t max_texel_offset; + uint32_t max_uniform_buffer_range; + uint32_t max_vertex_input_attributes; + uint32_t max_vertex_input_attribute_offset; + uint32_t max_vertex_input_bindings; + uint32_t max_vertex_input_binding_stride; + uint32_t max_vertex_output_components; + float min_interpolation_offset; + size_t min_memory_map_alignment; + VkDeviceSize min_storage_buffer_offset_alignment; + VkDeviceSize min_texel_buffer_offset_alignment; + int32_t min_texel_gather_offset; + int32_t min_texel_offset; + VkDeviceSize min_uniform_buffer_offset_alignment; + uint32_t mipmap_precision_bits; + VkDeviceSize non_coherent_atom_size; + VkDeviceSize optimal_buffer_copy_offset_alignment; + VkDeviceSize optimal_buffer_copy_row_pitch_alignment; + float point_size_granularity; + float point_size_range[2]; + Anvil::SampleCountFlags sampled_image_color_sample_counts; + Anvil::SampleCountFlags sampled_image_depth_sample_counts; + Anvil::SampleCountFlags sampled_image_integer_sample_counts; + Anvil::SampleCountFlags sampled_image_stencil_sample_counts; + VkDeviceSize sparse_address_space_size; + bool standard_sample_locations; + Anvil::SampleCountFlags storage_image_sample_counts; + bool strict_lines; + uint32_t sub_pixel_interpolation_offset_bits; + uint32_t sub_pixel_precision_bits; + uint32_t sub_texel_precision_bits; + bool timestamp_compute_and_graphics; + float timestamp_period; + float viewport_bounds_range[2]; + uint32_t viewport_sub_pixel_bits; + + PhysicalDeviceLimits(); + PhysicalDeviceLimits(const VkPhysicalDeviceLimits& in_device_limits); + + bool operator==(const PhysicalDeviceLimits& in_device_limits) const; + } PhysicalDeviceLimits; + + typedef struct PhysicalDeviceProtectedMemoryProperties + { + bool protected_no_fault; + + PhysicalDeviceProtectedMemoryProperties(); + PhysicalDeviceProtectedMemoryProperties(const VkPhysicalDeviceProtectedMemoryProperties& in_props); + + bool operator==(const PhysicalDeviceProtectedMemoryProperties& in_props) const; + } PhysicalDeviceProtectedMemoryProperties; + + typedef struct PhysicalDeviceSparseProperties + { + bool residency_standard_2D_block_shape; + bool residency_standard_2D_multisample_block_shape; + bool residency_standard_3D_block_shape; + bool residency_aligned_mip_size; + bool residency_non_resident_strict; + + PhysicalDeviceSparseProperties(); + PhysicalDeviceSparseProperties(const VkPhysicalDeviceSparseProperties& in_sparse_props); + + bool operator==(const PhysicalDeviceSparseProperties& in_props) const; + } PhysicalDeviceSparseProperties; + + typedef struct PhysicalDeviceSubgroupProperties + { + bool quad_operations_in_all_stages; + uint32_t subgroup_size; + Anvil::SubgroupFeatureFlags supported_operations; + Anvil::ShaderStageFlags supported_stages; + + PhysicalDeviceSubgroupProperties(); + PhysicalDeviceSubgroupProperties(const VkPhysicalDeviceSubgroupProperties & in_props); + + bool operator==(const PhysicalDeviceSubgroupProperties&) const; + } PhysicalDeviceSubgroupProperties; + + typedef struct PhysicalDevicePropertiesCoreVK10 + { + uint32_t api_version; + uint32_t device_id; + char device_name [VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + VkPhysicalDeviceType device_type; + uint32_t driver_version; + uint8_t pipeline_cache_uuid[VK_UUID_SIZE]; + uint32_t vendor_id; + + PhysicalDeviceLimits limits; + PhysicalDeviceSparseProperties sparse_properties; + + bool operator==(const PhysicalDevicePropertiesCoreVK10& in_props) const; + + PhysicalDevicePropertiesCoreVK10(); + PhysicalDevicePropertiesCoreVK10(const VkPhysicalDeviceProperties& in_physical_device_properties); + } PhysicalDevicePropertiesCoreVK10; + + typedef struct PhysicalDevicePropertiesCoreVK11 + { + PhysicalDeviceProtectedMemoryProperties protected_memory_properties; + PhysicalDeviceSubgroupProperties subgroup_properties; + + bool operator==(const PhysicalDevicePropertiesCoreVK11& in_props) const; + + PhysicalDevicePropertiesCoreVK11(); + PhysicalDevicePropertiesCoreVK11(const VkPhysicalDeviceProtectedMemoryProperties& in_protected_memory_properties, + const VkPhysicalDeviceSubgroupProperties& in_subgroup_properties); + } PhysicalDevicePropertiesCoreVK11; + + typedef struct PhysicalDeviceProperties + { + const AMDShaderCoreProperties* amd_shader_core_properties_ptr; + const PhysicalDevicePropertiesCoreVK10* core_vk1_0_properties_ptr; + const PhysicalDevicePropertiesCoreVK11* core_vk1_1_properties_ptr; + const EXTConservativeRasterizationProperties* ext_conservative_rasterization_properties_ptr; + const EXTDescriptorIndexingProperties* ext_descriptor_indexing_properties_ptr; + const EXTExternalMemoryHostProperties* ext_external_memory_host_properties_ptr; + const EXTInlineUniformBlockProperties* ext_inline_uniform_block_properties_ptr; + const EXTPCIBusInfoProperties* ext_pci_bus_info_properties_ptr; + const EXTSampleLocationsProperties* ext_sample_locations_properties_ptr; + const EXTSamplerFilterMinmaxProperties* ext_sampler_filter_minmax_properties_ptr; + const EXTTransformFeedbackProperties* ext_transform_feedback_properties_ptr; + const EXTVertexAttributeDivisorProperties* ext_vertex_attribute_divisor_properties_ptr; + const KHRDepthStencilResolveProperties* khr_depth_stencil_resolve_properties_ptr; + const KHRDriverPropertiesProperties* khr_driver_properties_properties_ptr; + const KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties* khr_external_memory_capabilities_physical_device_id_properties_ptr; + const KHRMaintenance2PhysicalDevicePointClippingProperties* khr_maintenance2_point_clipping_properties_ptr; + const KHRMaintenance3Properties* khr_maintenance3_properties_ptr; + const KHRMultiviewProperties* khr_multiview_properties_ptr; + const KHRShaderFloatControlsProperties* khr_shader_float_controls_properties_ptr; + + PhysicalDeviceProperties(); + PhysicalDeviceProperties(const AMDShaderCoreProperties* in_amd_shader_core_properties_ptr, + const PhysicalDevicePropertiesCoreVK10* in_core_vk1_0_properties_ptr, + const PhysicalDevicePropertiesCoreVK11* in_core_vk1_1_properties_ptr, + const EXTConservativeRasterizationProperties* in_ext_conservative_rasterization_properties_ptr, + const EXTDescriptorIndexingProperties* in_ext_descriptor_indexing_properties_ptr, + const EXTExternalMemoryHostProperties* in_ext_external_memory_host_properties_ptr, + const EXTInlineUniformBlockProperties* in_ext_inline_uniform_block_properties_ptr, + const EXTPCIBusInfoProperties* in_ext_pci_bus_info_properties_ptr, + const EXTSampleLocationsProperties* in_ext_sample_locations_properties_ptr, + const EXTSamplerFilterMinmaxProperties* in_ext_sampler_filter_minmax_properties_ptr, + const EXTTransformFeedbackProperties* in_ext_transform_feedback_properties_ptr, + const EXTVertexAttributeDivisorProperties* in_ext_vertex_attribute_divisor_properties_ptr, + const KHRDepthStencilResolveProperties* in_khr_depth_stencil_resolve_props_ptr, + const KHRDriverPropertiesProperties* in_khr_driver_properties_props_ptr, + const KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties* in_khr_external_memory_caps_physical_device_id_props_ptr, + const KHRMaintenance3Properties* in_khr_maintenance3_properties_ptr, + const KHRMaintenance2PhysicalDevicePointClippingProperties* in_khr_maintenance2_point_clipping_properties_ptr, + const KHRMultiviewProperties* in_khr_multiview_properties_ptr, + const KHRShaderFloatControlsProperties* in_khr_shader_float_controls_properties_ptr); + + bool operator==(const PhysicalDeviceProperties& in_props) const; + } PhysicalDeviceProperties; + + /* A single push constant range descriptor */ + typedef struct PushConstantRange + { + uint32_t offset; + uint32_t size; + Anvil::ShaderStageFlags stages; + + /** Constructor + * + * @param in_offset Start offset for the range. + * @param in_size Size of the range. + * @param in_stages Valid pipeline stages for the range. + */ + PushConstantRange(uint32_t in_offset, + uint32_t in_size, + Anvil::ShaderStageFlags in_stages); + + /** Comparison operator. Used internally. */ + bool operator==(const PushConstantRange& in) const; + } PushConstantRange; + + typedef std::vector PushConstantRanges; + + /** Holds information about a single Vulkan Queue Family. */ + typedef struct QueueFamilyInfo + { + Anvil::QueueFlags flags; + + VkExtent3D min_image_transfer_granularity; + uint32_t n_queues; + uint32_t n_timestamp_bits; + + /** Constructor. Initializes the instance using data provided by the driver. + * + * @param in_props Vulkan structure to use for initialization. + **/ + explicit QueueFamilyInfo(const VkQueueFamilyProperties& in_props); + } QueueFamilyInfo; + + typedef std::vector QueueFamilyInfoItems; + + extern bool operator==(const QueueFamilyInfo& in1, + const QueueFamilyInfo& in2); + + /** Describes a physical device group */ + typedef struct PhysicalDeviceGroup + { + std::vector physical_device_ptrs; + bool supports_subset_allocations; + + explicit PhysicalDeviceGroup(); + } PhysicalDeviceGroup; + + /** TODO */ + typedef struct SemaphoreMGPUSubmission + { + uint32_t device_index; //< device index in the device group + Anvil::Semaphore* semaphore_ptr; + + SemaphoreMGPUSubmission() + { + device_index = UINT32_MAX; + semaphore_ptr = nullptr; + } + + SemaphoreMGPUSubmission(Anvil::Semaphore* in_semaphore_ptr, + const uint32_t& in_device_index) + :device_index (in_device_index), + semaphore_ptr(in_semaphore_ptr) + { + /* Stub */ + } + } SemaphoreMGPUSubmission; + + typedef struct SemaphoreProperties + { + ExternalSemaphoreProperties external_semaphore_properties; + + /* Dummy constructor */ + SemaphoreProperties(); + + SemaphoreProperties(const ExternalSemaphoreProperties& in_external_semaphore_properties); + } SemaphoreProperties; + + typedef struct SemaphorePropertiesQuery + { + const Anvil::ExternalSemaphoreHandleTypeFlagBits external_semaphore_handle_type; + + explicit SemaphorePropertiesQuery(const Anvil::ExternalSemaphoreHandleTypeFlagBits& in_external_semaphore_handle_type) + :external_semaphore_handle_type(in_external_semaphore_handle_type) + { + /* Stub */ + } + + SemaphorePropertiesQuery (const SemaphorePropertiesQuery&) = default; + SemaphorePropertiesQuery& operator=(const SemaphorePropertiesQuery&) = delete; + } SemaphorePropertiesQuery; + + /** Holds all information related to a specific shader module stage entry-point. */ + typedef struct ShaderModuleStageEntryPoint + { + std::string name; + ShaderModuleUniquePtr shader_module_owned_ptr; + Anvil::ShaderModule* shader_module_ptr; + Anvil::ShaderStage stage; + + /** Dummy constructor */ + ShaderModuleStageEntryPoint(); + + /** Copy constructor. */ + ShaderModuleStageEntryPoint(const ShaderModuleStageEntryPoint& in); + + /** Constructor. + * + * @param in_name Entry-point name. Must not be nullptr. + * @param in_shader_module_ptr ShaderModule instance to use. + * @param in_stage Shader stage the entry-point implements. + */ + ShaderModuleStageEntryPoint(const std::string& in_name, + ShaderModule* in_shader_module_ptr, + ShaderStage in_stage); + ShaderModuleStageEntryPoint(const std::string& in_name, + ShaderModuleUniquePtr in_shader_module_ptr, + ShaderStage in_stage); + + /** Destructor. */ + ~ShaderModuleStageEntryPoint(); + + ShaderModuleStageEntryPoint& operator=(const ShaderModuleStageEntryPoint&); + } ShaderModuleStageEntryPoint; + + typedef struct SparseImageFormatProperties + { + Anvil::ImageAspectFlags aspect_mask; + VkExtent3D image_granularity; + Anvil::SparseImageFormatFlags flags; + + SparseImageFormatProperties() + { + aspect_mask = Anvil::ImageAspectFlagBits::NONE; + image_granularity.depth = 0; + image_granularity.height = 0; + image_granularity.width = 0; + flags = Anvil::SparseImageFormatFlagBits::NONE; + } + + SparseImageFormatProperties(const VkSparseImageFormatProperties& in_props) + { + aspect_mask = static_cast(in_props.aspectMask); + image_granularity = in_props.imageGranularity; + flags = static_cast(in_props.flags); + } + } SparseImageFormatProperties; + + static_assert(sizeof(SparseImageFormatProperties) == sizeof(VkSparseImageFormatProperties), "Struct sizes must match"); + static_assert(offsetof(SparseImageFormatProperties, aspect_mask) == offsetof(VkSparseImageFormatProperties, aspectMask), "Member offsets must match"); + static_assert(offsetof(SparseImageFormatProperties, image_granularity) == offsetof(VkSparseImageFormatProperties, imageGranularity), "Member offsets must match"); + static_assert(offsetof(SparseImageFormatProperties, flags) == offsetof(VkSparseImageFormatProperties, flags), "Member offsets must match"); + + typedef struct SparseImageMemoryRequirements + { + Anvil::SparseImageFormatProperties format_properties; + uint32_t image_mip_tail_first_lod; + VkDeviceSize image_mip_tail_size; + VkDeviceSize image_mip_tail_offset; + VkDeviceSize image_mip_tail_stride; + + SparseImageMemoryRequirements() + { + image_mip_tail_first_lod = 0; + image_mip_tail_size = 0; + image_mip_tail_offset = 0; + image_mip_tail_stride = 0; + } + + SparseImageMemoryRequirements(const VkSparseImageMemoryRequirements& in_reqs) + { + format_properties = in_reqs.formatProperties; + image_mip_tail_first_lod = in_reqs.imageMipTailFirstLod; + image_mip_tail_size = in_reqs.imageMipTailSize; + image_mip_tail_offset = in_reqs.imageMipTailOffset; + image_mip_tail_stride = in_reqs.imageMipTailStride; + } + } SparseImageMemoryRequirements; + + static_assert(sizeof(SparseImageMemoryRequirements) == sizeof(VkSparseImageMemoryRequirements), "Struct sizes must match"); + static_assert(offsetof(SparseImageMemoryRequirements, format_properties) == offsetof(VkSparseImageMemoryRequirements, formatProperties), "Member offsets must match"); + static_assert(offsetof(SparseImageMemoryRequirements, image_mip_tail_first_lod) == offsetof(VkSparseImageMemoryRequirements, imageMipTailFirstLod), "Member offsets must match"); + static_assert(offsetof(SparseImageMemoryRequirements, image_mip_tail_size) == offsetof(VkSparseImageMemoryRequirements, imageMipTailSize), "Member offsets must match"); + static_assert(offsetof(SparseImageMemoryRequirements, image_mip_tail_offset) == offsetof(VkSparseImageMemoryRequirements, imageMipTailOffset), "Member offsets must match"); + static_assert(offsetof(SparseImageMemoryRequirements, image_mip_tail_stride) == offsetof(VkSparseImageMemoryRequirements, imageMipTailStride), "Member offsets must match"); + + /* Describes sparse properties for an image format */ + typedef struct SparseImageAspectProperties + { + Anvil::ImageAspectFlags aspect_mask; + + SparseImageFormatFlags flags; + + VkExtent3D granularity; + uint32_t mip_tail_first_lod; + VkDeviceSize mip_tail_offset; + VkDeviceSize mip_tail_size; + VkDeviceSize mip_tail_stride; + + SparseImageAspectProperties(); + SparseImageAspectProperties(const Anvil::SparseImageMemoryRequirements& in_req); + } SparseImageAspectProperties; + + typedef struct SpecializationConstant + { + uint32_t constant_id; + uint32_t n_bytes; + uint32_t start_offset; + + /** Dummy constructor. Should only be used by STL containers. */ + SpecializationConstant(); + + /** Constructor. + * + * @param in_constant_id Specialization constant ID. + * @param in_n_bytes Number of bytes consumed by the constant. + * @param in_start_offset Start offset, at which the constant data starts. + */ + SpecializationConstant(uint32_t in_constant_id, + uint32_t in_n_bytes, + uint32_t in_start_offset); + } SpecializationConstant; + + typedef std::vector SpecializationConstants; + + typedef enum class SubmissionType + { + MGPU, + SGPU + } SubmissionType; + + typedef struct SubmitInfo + { + /** Creates a submission info which, when submitted, waits on the semaphores specified by the user, + * executes user-defined command buffers, and then signals the semaphores passed by arguments. + * If a non-nullptr fence is specified, the function will also wait on the call to finish GPU-side + * before returning. + * + * It is valid to specify 0 command buffers or signal/wait semaphores, in which case + * these steps will be skipped. + * + * If @param in_should_block is true and @param in_opt_fence_ptr is nullptr, the function will create + * a new fence, wait on it, and then release it prior to leaving. This may come at a performance cost. + * + * The prototype which does not take *MGPUSubmission* may be used for both single- and multi-GPU device instances. + * The other one must only be called for multi-GPU device instances. + * + * NOTE: By default, the following values are associated with a new SubmitInfo instance: + * + * - D3D12 fence submit info: none + * - Keyed mutex acquire/release info: none + * - Protected submission: no + * + * To adjust these settings, please use corresponding set_..() functions, prior to passing the structure over to Queue::submit(). + * + * + * @param in_n_command_buffers Number of command buffers under @param in_opt_cmd_buffer_ptrs + * which should be executed. May be 0. + * @param in_opt_cmd_buffer_ptrs_ptr Ptr to an array of command buffers to execute. Can be nullptr if + * @param n_command_buffers is 0. + * @param in_n_semaphores_to_signal Number of semaphores to signal after command buffers finish + * executing. May be 0. + * @param in_opt_semaphore_to_signal_ptrs_ptr Ptr to an array of semaphores to signal after execution. May be nullptr if + * @param n_semaphores_to_signal is 0. + * @param in_n_semaphores_to_wait_on Number of semaphores to wait on before executing command buffers. + * May be 0. + * @param in_opt_semaphore_to_wait_on_ptrs_ptr Ptr to an array of semaphores to wait on prior to execution. May be nullptr + * if @param in_n_semaphores_to_wait_on is 0. + * @param in_opt_dst_stage_masks_to_wait_on_ptrs Array of size @param in_n_semaphores_to_wait_on, specifying stages + * at which the wait ops should be performed. May be nullptr if + * @param n_semaphores_to_wait_on is 0. + * @param in_should_block true if the function should wait for the scheduled commands to + * finish executing, false otherwise. + * @param in_opt_fence_ptr Fence to use when submitting the comamnd buffers to the queue. + * The fence will be waited on if @param in_should_block is true. + * If @param in_should_block is false, the fence will be passed at + * submit call time, but will not be waited on. + * + * @param in_command_buffer_submissions TODO + * @param in_semaphores_to_signal_ptr TODO + * @param in_semaphores_to_wait_on_ptr TODO + **/ + static SubmitInfo create(Anvil::CommandBufferBase* in_opt_cmd_buffer_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_opt_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_opt_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_opt_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + static SubmitInfo create(uint32_t in_n_cmd_buffers, + Anvil::CommandBufferBase* const* in_opt_cmd_buffer_ptrs_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_opt_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_opt_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_opt_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + static SubmitInfo create_execute(Anvil::CommandBufferBase* in_cmd_buffer_ptr, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + static SubmitInfo create_execute(Anvil::CommandBufferBase* const* in_cmd_buffer_ptrs_ptr, + uint32_t in_n_cmd_buffers, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + static SubmitInfo create_execute(const CommandBufferMGPUSubmission* in_cmd_buffer_submissions_ptr, + uint32_t in_n_command_buffer_submissions, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + static SubmitInfo create_execute_signal(Anvil::CommandBufferBase* in_cmd_buffer_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + static SubmitInfo create_execute_signal(Anvil::CommandBufferBase* const* in_cmd_buffer_ptrs_ptr, + uint32_t in_n_cmd_buffers, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + static SubmitInfo create_execute_signal(const CommandBufferMGPUSubmission* in_cmd_buffer_submissions_ptr, + uint32_t in_n_command_buffer_submissions, + uint32_t in_n_signal_semaphore_submissions, + const SemaphoreMGPUSubmission* in_signal_semaphore_submissions_ptr, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + /** TODO + * + * NOTE: This function always blocks. + */ + static SubmitInfo create_signal(uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + Anvil::Fence* in_opt_fence_ptr = nullptr); + static SubmitInfo create_signal(uint32_t in_n_signal_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_signal_semaphore_submissions_ptr, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + static SubmitInfo create_signal_wait(uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + static SubmitInfo create_signal_wait(uint32_t in_n_signal_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_signal_semaphore_submissions_ptr, + uint32_t in_n_wait_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_wait_semaphore_submissions_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + /** TODO + * + * NOTE: This function always blocks. + */ + static SubmitInfo create_wait(uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + static SubmitInfo create_wait(uint32_t in_n_wait_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_wait_semaphore_submissions_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + static SubmitInfo create_wait_execute(Anvil::CommandBufferBase* in_cmd_buffer_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + static SubmitInfo create_wait_execute(Anvil::CommandBufferBase* const* in_cmd_buffer_ptrs_ptr, + uint32_t in_n_cmd_buffers, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + static SubmitInfo create_wait_execute(const CommandBufferMGPUSubmission* in_cmd_buffer_submissions_ptr, + uint32_t in_n_command_buffer_submissions, + uint32_t in_n_wait_semaphore_submissions, + const SemaphoreMGPUSubmission* in_wait_semaphore_submissions_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + static SubmitInfo create_wait_execute_signal(Anvil::CommandBufferBase* in_cmd_buffer_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + static SubmitInfo create_wait_execute_signal(Anvil::CommandBufferBase* const* in_cmd_buffer_ptrs_ptr, + uint32_t in_n_cmd_buffers, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + static SubmitInfo create_wait_execute_signal(const CommandBufferMGPUSubmission* in_cmd_buffer_submissions_ptr, + uint32_t in_n_command_buffer_submissions, + uint32_t in_n_signal_semaphore_submissions, + const SemaphoreMGPUSubmission* in_signal_semaphore_submissions_ptr, + uint32_t in_n_wait_semaphore_submissions, + const SemaphoreMGPUSubmission* in_wait_semaphore_submissions_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr = nullptr); + + const CommandBufferMGPUSubmission* get_command_buffers_mgpu() const + { + return command_buffers_mgpu_ptr; + } + + Anvil::CommandBufferBase* const* get_command_buffers_sgpu() const + { + return command_buffers_sgpu_ptr; + } + + #if defined(_WIN32) + bool get_d3d12_fence_semaphore_values(const uint64_t** out_d3d12_fence_signal_semaphore_values_ptr_ptr, + const uint64_t** out_d3d12_fence_wait_semaphore_values_ptr_ptr) const + { + bool result = (d3d12_fence_signal_semaphore_values_ptr != nullptr && n_signal_semaphores != 0) || + (d3d12_fence_wait_semaphore_values_ptr != nullptr && n_wait_semaphores != 0); + + *out_d3d12_fence_signal_semaphore_values_ptr_ptr = d3d12_fence_signal_semaphore_values_ptr; + *out_d3d12_fence_wait_semaphore_values_ptr_ptr = d3d12_fence_wait_semaphore_values_ptr; + + return result; + } + #endif + + const VkPipelineStageFlags* get_destination_stage_wait_masks() const + { + return (dst_stage_wait_masks.size() > 0) ? &dst_stage_wait_masks.at(0) : nullptr; + } + + Anvil::Fence* get_fence() const + { + return fence_ptr; + } + + #if defined(_WIN32) + bool get_keyed_mutex_acquire_release_info(uint32_t* out_n_acquire_keys_ptr, + const Anvil::MemoryBlock*** out_acquire_d3d11_memory_block_ptrs_ptr, + const uint64_t** out_acquire_mutex_key_value_ptrs_ptr, + const uint32_t** out_acquire_timeout_ptrs_ptr, + uint32_t* out_n_release_keys_ptr, + const Anvil::MemoryBlock*** out_release_d3d11_memory_block_ptrs_ptr, + const uint64_t** out_release_mutex_key_value_ptrs_ptr) const + { + *out_n_acquire_keys_ptr = keyed_mutex_n_acquire_keys; + *out_acquire_d3d11_memory_block_ptrs_ptr = keyed_mutex_acquire_d3d11_memory_block_ptrs_ptr; + *out_acquire_mutex_key_value_ptrs_ptr = keyed_mutex_acquire_mutex_key_value_ptrs; + *out_acquire_timeout_ptrs_ptr = keyed_mutex_acquire_timeout_ptrs; + *out_n_release_keys_ptr = keyed_mutex_n_release_keys; + *out_release_d3d11_memory_block_ptrs_ptr = keyed_mutex_release_d3d11_memory_block_ptrs_ptr; + *out_release_mutex_key_value_ptrs_ptr = keyed_mutex_release_mutex_key_value_ptrs; + + return (keyed_mutex_n_acquire_keys != 0 && (keyed_mutex_acquire_d3d11_memory_block_ptrs_ptr != nullptr && + keyed_mutex_acquire_mutex_key_value_ptrs != nullptr && + keyed_mutex_acquire_timeout_ptrs != nullptr)) || + (keyed_mutex_n_release_keys != 0 && (keyed_mutex_release_d3d11_memory_block_ptrs_ptr != nullptr && + keyed_mutex_release_mutex_key_value_ptrs != nullptr)); + } + #endif + + const uint32_t& get_n_command_buffers() const + { + return n_command_buffers; + } + + const uint32_t& get_n_signal_semaphores() const + { + return n_signal_semaphores; + } + + const uint32_t& get_n_wait_semaphores() const + { + return n_wait_semaphores; + } + + const Anvil::SemaphoreMGPUSubmission* get_signal_semaphores_mgpu() const + { + return signal_semaphores_mgpu_ptr; + } + + Anvil::Semaphore* const* get_signal_semaphores_sgpu() const + { + return signal_semaphores_sgpu_ptr; + } + + const bool& get_should_block() const + { + return should_block; + } + + const uint64_t& get_timeout() const + { + return timeout; + } + + const SubmissionType& get_type() const + { + return type; + } + + const Anvil::SemaphoreMGPUSubmission* get_wait_semaphores_mgpu() const + { + return wait_semaphores_mgpu_ptr; + } + + Anvil::Semaphore* const* get_wait_semaphores_sgpu() const + { + return wait_semaphores_sgpu_ptr; + } + + const bool& is_protected_submission() const + { + return is_protected; + } + + #if defined(_WIN32) + /* Calling this function will make Anvil fill & chain a VkD3D12FenceSubmitInfoKHR struct at queue submission time. + * + * Requires VK_KHR_external_semaphore_win32 support. + * + * NOTE: The structure caches the provided pointers, not the contents available under derefs! Make sure the pointers remain valid + * for the time of the Queue::submit() call. + * + * @param in_signal_semaphore_values_ptr An array of exactly n_signal_semaphores values. Usage as per VkD3D12FenceSubmitInfoKHR::pSignalSemaphoreValues. + * Must not be nullptr unless n_signal_semaphores is 0. + * @param in_wait_semaphore_values_ptr An array of exactly n_wait_semaphores values. Usage as per VkD3D12FenceSubmitInfoKHR::pWaitSemaphoreValues. + * Must not be nullptr unless n_wait_semaphores is 0. + **/ + void set_d3d12_fence_semaphore_values(const uint64_t* in_signal_semaphore_values_ptr, + const uint32_t& in_n_signal_semaphore_values, + const uint64_t* in_wait_semaphore_values_ptr, + const uint32_t& in_n_wait_semaphore_values) + { + ANVIL_REDUNDANT_ARGUMENT_CONST(in_n_signal_semaphore_values); + ANVIL_REDUNDANT_ARGUMENT_CONST(in_n_wait_semaphore_values); + + anvil_assert((n_signal_semaphores != 0 && in_signal_semaphore_values_ptr != nullptr) || + (n_signal_semaphores == 0) ); + anvil_assert((n_wait_semaphores != 0 && in_wait_semaphore_values_ptr != nullptr) || + (n_wait_semaphores == 0) ); + + anvil_assert(in_n_signal_semaphore_values == n_signal_semaphores); + anvil_assert(in_n_wait_semaphore_values == n_wait_semaphores); + + d3d12_fence_signal_semaphore_values_ptr = in_signal_semaphore_values_ptr; + d3d12_fence_wait_semaphore_values_ptr = in_wait_semaphore_values_ptr; + } + + /* Calling this function will make Anvil fill & chain a VkWin32KeyedMutexAcquireReleaseInfoKHR struct at queue submission time. + * + * Requires VK_KHR_win32_keyed_mutex support. + * + * NOTE: The structure caches the provided pointers, not the contents available under derefs! Make sure the pointers remain valid + * for the time of the Queue::submit() call. + * + * @param in_opt_n_acquire_keys Number of items available for reading from @param in_opt_acquire_* params. + * May be 0. + * @param in_opt_acquire_d3d11_memory_block_ptrs_ptr If @param in_opt_n_acquire_keys is > 0, the array holds ptrs to memory blocks, + * whose payloads have been imported from D3D11 resources created with the "keyed mutex" flag. + * This info, together with other @param in_opt_* parameters will be used for acquisition purposes + * at submission time. + * @param in_opt_acquire_mutex_key_value_ptrs If @param in_opt_n_acquire_keys is > 0, the array holds mutex key values to wait + * for, prior to beginning the submitted work. + * @param in_opt_acquire_timeout_ptrs If @param in_opt_n_acquire_keys is > 0, the array holds timeout values to use for corresponding + * acquire requests. + * @param in_opt_n_release_keys Number of items available for reading from @param in_opt_release_* params. May be 0. + * @param in_opt_release_d3d11_memory_block_ptrs_ptr If @param in_opt_n_release_keys is > 0, the array holds ptrs to memory blocks, + * whose payloads have been imported from D3D11 resources created with the "keyed mutex" flag. + * This info, together with other @param in_opt_* parameters will be used for release purposes + * at submission time. + * @param in_opt_release_mutex_key_value_ptrs If @param in_opt_n_release_keys is > 0, the array mutex key values to set when the submitted work + 8 has been completed. + */ + void set_keyed_mutex_acquire_release_info(const uint32_t& in_opt_n_acquire_keys, + const Anvil::MemoryBlock** in_opt_acquire_d3d11_memory_block_ptrs_ptr, + const uint64_t* in_opt_acquire_mutex_key_value_ptrs, + const uint32_t* in_opt_acquire_timeout_ptrs, + const uint32_t& in_opt_n_release_keys, + const Anvil::MemoryBlock** in_opt_release_d3d11_memory_block_ptrs_ptr, + const uint64_t* in_opt_release_mutex_key_value_ptrs) + { + keyed_mutex_n_acquire_keys = in_opt_n_acquire_keys; + keyed_mutex_acquire_d3d11_memory_block_ptrs_ptr = in_opt_acquire_d3d11_memory_block_ptrs_ptr; + keyed_mutex_acquire_mutex_key_value_ptrs = in_opt_acquire_mutex_key_value_ptrs; + keyed_mutex_acquire_timeout_ptrs = in_opt_acquire_timeout_ptrs; + keyed_mutex_n_release_keys = in_opt_n_release_keys; + keyed_mutex_release_d3d11_memory_block_ptrs_ptr = in_opt_release_d3d11_memory_block_ptrs_ptr; + keyed_mutex_release_mutex_key_value_ptrs = in_opt_release_mutex_key_value_ptrs; + + anvil_assert(keyed_mutex_n_acquire_keys != 0 || + keyed_mutex_n_release_keys != 0); + + if (keyed_mutex_n_acquire_keys != 0) + { + anvil_assert(keyed_mutex_acquire_d3d11_memory_block_ptrs_ptr != nullptr && + keyed_mutex_acquire_mutex_key_value_ptrs != nullptr && + keyed_mutex_acquire_timeout_ptrs != nullptr); + } + + if (keyed_mutex_n_release_keys != 0) + { + anvil_assert(keyed_mutex_release_d3d11_memory_block_ptrs_ptr != nullptr && + keyed_mutex_release_mutex_key_value_ptrs != nullptr); + } + } + #endif + + /* Marks (or unmarks) the submission as protected which is required when submitting protected command buffers + * + * NOTE: Requires core VK 1.1 device or newer. + * + * @param in_should_enable True to mark the submission as protected, false to unmark. + **/ + void set_protected_submission(const bool& in_should_enable) + { + is_protected = in_should_enable; + } + + /* Sets a timeout which is used when waiting on a fence that the submission is associated with. + * + * If your submission times out, you're likely about to experience a TDR and lose the device. + */ + void set_timeout(const uint64_t& in_timeout) + { + anvil_assert(should_block); + + timeout = in_timeout; + } + + private: + SubmitInfo(uint32_t in_n_command_buffers, + Anvil::CommandBufferBase* in_opt_single_cmd_buffer_ptr, /* to support n=1 helper functions */ + Anvil::CommandBufferBase* const* in_opt_cmd_buffer_ptrs_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_opt_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_opt_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_opt_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr); + + SubmitInfo(uint32_t in_n_command_buffer_submissions, + const CommandBufferMGPUSubmission* in_opt_command_buffer_submissions_ptr, + uint32_t in_n_signal_semaphore_submissions, + const SemaphoreMGPUSubmission* in_opt_signal_semaphore_submissions_ptr, + uint32_t in_n_wait_semaphore_submissions, + const SemaphoreMGPUSubmission* in_opt_wait_semaphore_submissions_ptr, + const Anvil::PipelineStageFlags* in_opt_dst_stage_masks_to_wait_on_ptr, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr); + + Anvil::CommandBufferBase* helper_cmd_buffer_raw_ptr; + + const CommandBufferMGPUSubmission* command_buffers_mgpu_ptr; + Anvil::CommandBufferBase* const* command_buffers_sgpu_ptr; + uint32_t n_command_buffers; + + const SemaphoreMGPUSubmission* signal_semaphores_mgpu_ptr; + Anvil::Semaphore* const* signal_semaphores_sgpu_ptr; + uint32_t n_signal_semaphores; + + std::vector dst_stage_wait_masks; + const SemaphoreMGPUSubmission* wait_semaphores_mgpu_ptr; + Anvil::Semaphore* const* wait_semaphores_sgpu_ptr; + uint32_t n_wait_semaphores; + + Anvil::Fence* fence_ptr; + + #if defined(_WIN32) + const uint64_t* d3d12_fence_signal_semaphore_values_ptr; + const uint64_t* d3d12_fence_wait_semaphore_values_ptr; + + uint32_t keyed_mutex_n_acquire_keys; + const Anvil::MemoryBlock** keyed_mutex_acquire_d3d11_memory_block_ptrs_ptr; + const uint64_t* keyed_mutex_acquire_mutex_key_value_ptrs; + const uint32_t* keyed_mutex_acquire_timeout_ptrs; + uint32_t keyed_mutex_n_release_keys; + const Anvil::MemoryBlock** keyed_mutex_release_d3d11_memory_block_ptrs_ptr; + const uint64_t* keyed_mutex_release_mutex_key_value_ptrs; + #endif + + bool is_protected; + bool should_block; + uint64_t timeout; + const SubmissionType type; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(SubmitInfo); + } SubmitInfo; + + /* NOTE: Maps 1:1 to VkSurfaceFormatKHR */ + typedef struct SurfaceFormatKHR + { + Anvil::Format format; + Anvil::ColorSpaceKHR color_space; + + SurfaceFormatKHR() + { + color_space = Anvil::ColorSpaceKHR::UNKNOWN; + format = Anvil::Format::UNKNOWN; + } + } SurfaceFormatKHR; + + static_assert(sizeof (SurfaceFormatKHR) == sizeof (VkSurfaceFormatKHR), "Struct sizes must match"); + static_assert(offsetof(SurfaceFormatKHR, format) == offsetof(VkSurfaceFormatKHR, format), "Member offsets must match"); + static_assert(offsetof(SurfaceFormatKHR, color_space) == offsetof(VkSurfaceFormatKHR, colorSpace), "Member offsets must match"); + + typedef struct VertexInputAttribute + { + Anvil::Format format; + uint32_t location; + uint32_t offset_in_bytes; + + VertexInputAttribute() + :format (Anvil::Format::UNKNOWN), + location (UINT32_MAX), + offset_in_bytes(UINT32_MAX) + { + /* Stub */ + } + + VertexInputAttribute(const uint32_t& in_location, + const Anvil::Format& in_format, + const uint32_t& in_offset_in_bytes) + :format (in_format), + location (in_location), + offset_in_bytes(in_offset_in_bytes) + { + /* Stub */ + } + } VertexInputAttribute; + + /* Represents a Vulkan structure header */ + typedef struct + { + VkStructureType type; + const void* next_ptr; + } VkStructHeader; + + typedef std::function& in_queue_labels, + const std::vector& in_cmd_buffer_labels, + const std::vector& in_objects)> DebugMessengerCallbackFunction; + + +}; /* namespace Anvil */ +#endif diff --git a/include/misc/types_utils.h b/include/misc/types_utils.h new file mode 100644 index 00000000..d8649428 --- /dev/null +++ b/include/misc/types_utils.h @@ -0,0 +1,331 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef TYPES_UTILS_H +#define TYPES_UTILS_H + +namespace Anvil +{ + namespace Utils + { + MTSafety convert_boolean_to_mt_safety_enum(bool in_mt_safe); + + bool convert_mt_safety_enum_to_boolean(MTSafety in_mt_safety, + const Anvil::BaseDevice* in_device_ptr); + + /* NOTE: in_api_version must NOT be Anvil::APIVersion::UNKNOWN */ + void get_version_chunks_for_api_version(const Anvil::APIVersion& in_api_version, + uint32_t* out_major_version_ptr, + uint32_t* out_minor_version_ptr); + + Anvil::QueueFamilyFlags get_queue_family_flags_from_queue_family_type(Anvil::QueueFamilyType in_queue_family_type); + + /** Converts a queue family bitfield value to an array of queue family indices. + * + * @param in_queue_families Input value to convert from. + * @param out_opt_queue_family_indices_ptr If not NULL, deref will be updated with *@param out_opt_n_queue_family_indices_ptr + * values, corresponding to queue family indices, as specified under @param in_queue_families. + * @param out_opt_n_queue_family_indices_ptr If not NULL, deref will be set to the number of items that would be or were written + * under @param out_opt_queue_family_indices_ptr. + * + **/ + void convert_queue_family_bits_to_family_indices(const Anvil::BaseDevice* in_device_ptr, + Anvil::QueueFamilyFlags in_queue_families, + uint32_t* out_opt_queue_family_indices_ptr, + uint32_t* out_opt_n_queue_family_indices_ptr); + + /** Count the number of set bits in an unsigned value (popcount). + * Algorithm taken from Software Optimization Guide for AMD64 Processors, p. 179. + **/ + static inline uint32_t count_set_bits(uint32_t x) + { + x = x - ((x >> 1) & 0x55555555); + x = (x & 0x33333333) + ((x >> 2) & 0x33333333); + x = (((x + (x >> 4)) & 0x0F0F0F0F) * 0x01010101) >> ((sizeof(uint32_t) - 1) << 3); + + return x; + } + + /** Returns an access mask which has all the access bits, relevant to the user-specified image layout, + * enabled. + * + * The access mask can be further restricted to the specified queue family type. + */ + Anvil::AccessFlags get_access_mask_from_image_layout(Anvil::ImageLayout in_layout, + Anvil::QueueFamilyType in_queue_family_type = Anvil::QueueFamilyType::UNDEFINED); + + + /** Converts a pair of Anvil::MemoryPropertyFlags and Anvil::MemoryHeapFlags bitfields to a corresponding Anvil::MemoryFeatureFlags + * enum. + * + * @param in_opt_mem_type_flags Vulkan memory property flags. May be 0. + * @param in_opt_mem_heap_flags Vulkan memory heap flags. May be 0. + * + * @return Result bitfield. + */ + Anvil::MemoryFeatureFlags get_memory_feature_flags_from_vk_property_flags(Anvil::MemoryPropertyFlags in_opt_mem_type_flags, + Anvil::MemoryHeapFlags in_opt_mem_heap_flags); + + /** Converts the specified queue family type to a raw string + * + * @param in_queue_family_type Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(Anvil::QueueFamilyType in_queue_family_type); + + /** Converts the specified VkAttachmentLoadOp value to a raw string + * + * @param in_load_op Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkAttachmentLoadOp in_load_op); + + /** Converts the specified VkAttachmentStoreOp value to a raw string + * + * @param in_store_op Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkAttachmentStoreOp in_store_op); + + /** Converts the specified VkBlendFactor value to a raw string + * + * @param in_blend_factor Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkBlendFactor in_blend_factor); + + /** Converts the specified VkBlendOp value to a raw string + * + * @param in_blend_op Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkBlendOp in_blend_op); + + /** Converts the specified VkCompareOp value to a raw string + * + * @param in_compare_op Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkCompareOp in_compare_op); + + /** Converts the specified VkCullModeFlagBits value to a raw string + * + * @param in_cull_mode Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkCullModeFlagBits in_cull_mode); + + /** Converts the specified VkDescriptorType value to a raw string + * + * @param in_descriptor_type Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkDescriptorType in_descriptor_type); + + /** Converts the specified VkFrontFace value to a raw string + * + * @param in_front_face Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkFrontFace in_front_face); + + /** Converts the specified VkImageAspectFlagBits value to a raw string + * + * @param in_image_aspect_flag Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkImageAspectFlagBits in_image_aspect_flag); + + /** Converts the specified VkImageLayout value to a raw string + * + * @param in_image_layout Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkImageLayout in_image_layout); + + /** Converts the specified VkImageTiling value to a raw string + * + * @param in_image_tiling Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkImageTiling in_image_tiling); + + /** Converts the specified VkImageType value to a raw string + * + * @param in_image_type Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkImageType in_image_type); + + /** Converts the specified VkImageViewType value to a raw string + * + * @param in_image_view_type Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkImageViewType in_image_view_type); + + /** Converts the specified VkLogicOp value to a raw string + * + * @param in_logic_op Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkLogicOp in_logic_op); + + /** Converts the specified VkPolygonMode value to a raw string + * + * @param in_polygon_mode Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkPolygonMode in_polygon_mode); + + /** Converts the specified VkPrimitiveTopology value to a raw string + * + * @param in_topology Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkPrimitiveTopology in_topology); + + /** Converts the specified VkSampleCountFlagBits value to a raw string + * + * @param in_sample_count Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkSampleCountFlagBits in_sample_count); + + /** Converts the specified Anvil::ShaderStage value to a raw string + * + * @param in_shader_stage Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(Anvil::ShaderStage in_shader_stage); + + /** Converts the specified VkShaderStageFlagBits value to a raw string + * + * @param in_shader_stage Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkShaderStageFlagBits in_shader_stage); + + /** Converts the specified VkSharingMode value to a raw string + * + * @param in_sharing_mode Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkSharingMode in_sharing_mode); + + /** Converts the specified VkStencilOp value to a raw string + * + * @param in_stencil_op Input value. + * + * @return Non-NULL value if successful, NULL otherwise. + */ + const char* get_raw_string(VkStencilOp in_stencil_op); + + /* Returns Vulkan equivalent of @param in_shader_stage */ + Anvil::ShaderStageFlagBits get_shader_stage_flag_bits_from_shader_stage(Anvil::ShaderStage in_shader_stage); + + /* TODO + * + * get_vk_object_type_for_*() return VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT if no enum exists for @param in_object_type. Since EXT_debug_utils + * has been deprecated, this is not an unlikely scenario. + */ + Anvil::ObjectType get_object_type_for_vk_object_type (const VkObjectType& in_object_type); + Anvil::ObjectType get_object_type_for_vk_debug_report_object_type (const VkDebugReportObjectTypeEXT& in_object_type); + VkDebugReportObjectTypeEXT get_vk_debug_report_object_type_ext_from_object_type(const Anvil::ObjectType& in_object_type); + VkObjectType get_vk_object_type_for_object_type (const Anvil::ObjectType& in_object_type); + + /** Converts an Anvil::MemoryFeatureFlags value to a pair of corresponding Vulkan enum values. + * + * @param in_mem_feature_flags Anvil memory feature flags to use for conversion. + * @param out_mem_type_flags_ptr Deref will be set to a corresponding Anvil::MemoryPropertyFlags value. Must not + * be nullptr. + * @param out_mem_heap_flags_ptr Deref will be set to a corresponding Anvil::MemoryHeapFlags value. Must not + * be nullptr. + **/ + void get_vk_property_flags_from_memory_feature_flags(Anvil::MemoryFeatureFlags in_mem_feature_flags, + Anvil::MemoryPropertyFlags* out_mem_type_flags_ptr, + Anvil::MemoryHeapFlags* out_mem_heap_flags_ptr); + + #ifdef _WIN32 + bool is_nt_handle(const Anvil::ExternalFenceHandleTypeFlagBits& in_type); + bool is_nt_handle(const Anvil::ExternalMemoryHandleTypeFlagBits& in_type); + bool is_nt_handle(const Anvil::ExternalSemaphoreHandleTypeFlagBits& in_type); + #endif + + /** Tells whether @param in_value is a power-of-two. */ + template + bool is_pow2(const type in_value) + { + return ((in_value & (in_value - 1)) == 0); + } + + /** Rounds down @param in_value to a multiple of @param in_base */ + template + type round_down(const type in_value, const type in_base) + { + if ((in_value % in_base) == 0) + { + return in_value; + } + else + { + return in_value - (in_value % in_base); + } + } + + /** Rounds up @param in_value to a multiple of @param in_base */ + template + type round_up(const type in_value, const type in_base) + { + if ((in_value % in_base) == 0) + { + return in_value; + } + else + { + return in_value + (in_base - in_value % in_base); + } + } + }; +}; /* Anvil namespace */ + +#endif /* TYPES_UTILS_H */ \ No newline at end of file diff --git a/include/misc/vulkan.h b/include/misc/vulkan.h new file mode 100644 index 00000000..f3c36065 --- /dev/null +++ b/include/misc/vulkan.h @@ -0,0 +1,213 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef MISC_VULKAN_H +#define MISC_VULKAN_H + +#include +#include "vulkan/vulkan.h" + +namespace Anvil +{ + /* Anvil::Vulkan exposes raw pointers to Vulkan entrypoints. + * + * These func ptrs are initialized, first time a Vulkan instance is created. Applications MUST NOT + * assume the entrypoints are available prior to the time. + */ + namespace Vulkan + { + /* VK 1.0 core */ + extern PFN_vkCreateInstance vkCreateInstance; + extern PFN_vkDestroyInstance vkDestroyInstance; + extern PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; + extern PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures; + extern PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; + extern PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties; + extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; + extern PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; + extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; + extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; + extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; + extern PFN_vkCreateDevice vkCreateDevice; + extern PFN_vkDestroyDevice vkDestroyDevice; + extern PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; + extern PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; + extern PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; + extern PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties; + extern PFN_vkGetDeviceQueue vkGetDeviceQueue; + extern PFN_vkQueueSubmit vkQueueSubmit; + extern PFN_vkQueueWaitIdle vkQueueWaitIdle; + extern PFN_vkDeviceWaitIdle vkDeviceWaitIdle; + extern PFN_vkAllocateMemory vkAllocateMemory; + extern PFN_vkFreeMemory vkFreeMemory; + extern PFN_vkMapMemory vkMapMemory; + extern PFN_vkUnmapMemory vkUnmapMemory; + extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; + extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; + extern PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; + extern PFN_vkBindBufferMemory vkBindBufferMemory; + extern PFN_vkBindImageMemory vkBindImageMemory; + extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; + extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; + extern PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; + extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties; + extern PFN_vkQueueBindSparse vkQueueBindSparse; + extern PFN_vkCreateFence vkCreateFence; + extern PFN_vkDestroyFence vkDestroyFence; + extern PFN_vkResetFences vkResetFences; + extern PFN_vkGetFenceStatus vkGetFenceStatus; + extern PFN_vkWaitForFences vkWaitForFences; + extern PFN_vkCreateSemaphore vkCreateSemaphore; + extern PFN_vkDestroySemaphore vkDestroySemaphore; + extern PFN_vkCreateEvent vkCreateEvent; + extern PFN_vkDestroyEvent vkDestroyEvent; + extern PFN_vkGetEventStatus vkGetEventStatus; + extern PFN_vkSetEvent vkSetEvent; + extern PFN_vkResetEvent vkResetEvent; + extern PFN_vkCreateQueryPool vkCreateQueryPool; + extern PFN_vkDestroyQueryPool vkDestroyQueryPool; + extern PFN_vkGetQueryPoolResults vkGetQueryPoolResults; + extern PFN_vkCreateBuffer vkCreateBuffer; + extern PFN_vkDestroyBuffer vkDestroyBuffer; + extern PFN_vkCreateBufferView vkCreateBufferView; + extern PFN_vkDestroyBufferView vkDestroyBufferView; + extern PFN_vkCreateImage vkCreateImage; + extern PFN_vkDestroyImage vkDestroyImage; + extern PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; + extern PFN_vkCreateImageView vkCreateImageView; + extern PFN_vkDestroyImageView vkDestroyImageView; + extern PFN_vkCreateShaderModule vkCreateShaderModule; + extern PFN_vkDestroyShaderModule vkDestroyShaderModule; + extern PFN_vkCreatePipelineCache vkCreatePipelineCache; + extern PFN_vkDestroyPipelineCache vkDestroyPipelineCache; + extern PFN_vkGetPipelineCacheData vkGetPipelineCacheData; + extern PFN_vkMergePipelineCaches vkMergePipelineCaches; + extern PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; + extern PFN_vkCreateComputePipelines vkCreateComputePipelines; + extern PFN_vkDestroyPipeline vkDestroyPipeline; + extern PFN_vkCreatePipelineLayout vkCreatePipelineLayout; + extern PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; + extern PFN_vkCreateSampler vkCreateSampler; + extern PFN_vkDestroySampler vkDestroySampler; + extern PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; + extern PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; + extern PFN_vkCreateDescriptorPool vkCreateDescriptorPool; + extern PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; + extern PFN_vkResetDescriptorPool vkResetDescriptorPool; + extern PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; + extern PFN_vkFreeDescriptorSets vkFreeDescriptorSets; + extern PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; + extern PFN_vkCreateFramebuffer vkCreateFramebuffer; + extern PFN_vkDestroyFramebuffer vkDestroyFramebuffer; + extern PFN_vkCreateRenderPass vkCreateRenderPass; + extern PFN_vkDestroyRenderPass vkDestroyRenderPass; + extern PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; + extern PFN_vkCreateCommandPool vkCreateCommandPool; + extern PFN_vkDestroyCommandPool vkDestroyCommandPool; + extern PFN_vkResetCommandPool vkResetCommandPool; + extern PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; + extern PFN_vkFreeCommandBuffers vkFreeCommandBuffers; + extern PFN_vkBeginCommandBuffer vkBeginCommandBuffer; + extern PFN_vkEndCommandBuffer vkEndCommandBuffer; + extern PFN_vkResetCommandBuffer vkResetCommandBuffer; + extern PFN_vkCmdBindPipeline vkCmdBindPipeline; + extern PFN_vkCmdSetViewport vkCmdSetViewport; + extern PFN_vkCmdSetScissor vkCmdSetScissor; + extern PFN_vkCmdSetLineWidth vkCmdSetLineWidth; + extern PFN_vkCmdSetDepthBias vkCmdSetDepthBias; + extern PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; + extern PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; + extern PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; + extern PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; + extern PFN_vkCmdSetStencilReference vkCmdSetStencilReference; + extern PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; + extern PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; + extern PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; + extern PFN_vkCmdDraw vkCmdDraw; + extern PFN_vkCmdDrawIndexed vkCmdDrawIndexed; + extern PFN_vkCmdDrawIndirect vkCmdDrawIndirect; + extern PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; + extern PFN_vkCmdDispatch vkCmdDispatch; + extern PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; + extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer; + extern PFN_vkCmdCopyImage vkCmdCopyImage; + extern PFN_vkCmdBlitImage vkCmdBlitImage; + extern PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; + extern PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; + extern PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; + extern PFN_vkCmdFillBuffer vkCmdFillBuffer; + extern PFN_vkCmdClearColorImage vkCmdClearColorImage; + extern PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; + extern PFN_vkCmdClearAttachments vkCmdClearAttachments; + extern PFN_vkCmdResolveImage vkCmdResolveImage; + extern PFN_vkCmdSetEvent vkCmdSetEvent; + extern PFN_vkCmdResetEvent vkCmdResetEvent; + extern PFN_vkCmdWaitEvents vkCmdWaitEvents; + extern PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; + extern PFN_vkCmdBeginQuery vkCmdBeginQuery; + extern PFN_vkCmdEndQuery vkCmdEndQuery; + extern PFN_vkCmdResetQueryPool vkCmdResetQueryPool; + extern PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; + extern PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; + extern PFN_vkCmdPushConstants vkCmdPushConstants; + extern PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; + extern PFN_vkCmdNextSubpass vkCmdNextSubpass; + extern PFN_vkCmdEndRenderPass vkCmdEndRenderPass; + extern PFN_vkCmdExecuteCommands vkCmdExecuteCommands; + + /* VK 1.1 core - only available if implementation reports VK 1.1 support! + * + * These function pointers are always retrieved at run-time. + */ + extern PFN_vkBindBufferMemory2 vkBindBufferMemory2; + extern PFN_vkBindImageMemory2 vkBindImageMemory2; + extern PFN_vkCmdDispatchBase vkCmdDispatchBase; + extern PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask; + extern PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate; + extern PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion; + extern PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate; + extern PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion; + extern PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion; + extern PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups; + extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; + extern PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport; + extern PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures; + extern PFN_vkGetDeviceQueue2 vkGetDeviceQueue2; + extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; + extern PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2; + extern PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties; + extern PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties; + extern PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties; + extern PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2; + extern PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2; + extern PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2; + extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2; + extern PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2; + extern PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2; + extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2; + extern PFN_vkTrimCommandPool vkTrimCommandPool; + extern PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate; + + /* Func pointers to extensions are exposed to apps via relevant functions implemented by Anvil::*Device and Anvil::Instance. */ + } +} + +#endif /* MISC_VULKAN_H */ diff --git a/include/misc/window.h b/include/misc/window.h index f2d877f0..0bc4a561 100644 --- a/include/misc/window.h +++ b/include/misc/window.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -66,7 +66,7 @@ namespace Anvil /* Stub window implementation - useful for off-screen rendering. * * This dummy window saves each "presented" frame in a PNG file. For that process to be successful, - * the application MUST ensure the swapchain image is transitioned to VK_IMAGE_LAYOUT_GENERAL before + * the application MUST ensure the swapchain image is transitioned to Anvil::ImageLayout::GENERAL before * it is presented. **/ WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS, @@ -90,6 +90,8 @@ namespace Anvil WINDOW_PLATFORM_WAYLAND, #endif + WINDOW_PLATFORM_GENERIC, + /* Always last */ WINDOW_PLATFORM_COUNT, WINDOW_PLATFORM_UNKNOWN = WINDOW_PLATFORM_COUNT @@ -124,6 +126,8 @@ namespace Anvil bool in_closable, PresentCallbackFunction in_present_callback_func); + virtual ~Window(); + /** Closes the window and unblocks the thread executing the message pump. */ virtual void close() { /* Stub */ } @@ -194,7 +198,6 @@ namespace Anvil bool m_window_owned; /* protected functions */ - virtual ~Window(); private: /* Private functions */ diff --git a/include/misc/window_factory.h b/include/misc/window_factory.h index 0c858b95..cd331057 100644 --- a/include/misc/window_factory.h +++ b/include/misc/window_factory.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -56,15 +56,17 @@ namespace Anvil * @param in_closable Should the "close button" of the window be accesible to the user? Depending on the OS, * this may translate to the button being greyed out or not reacting to the user's requests. * @param in_present_callback_func Callback function to use for rendering frame contents. + * @param in_visible Should the created window be made visible at creation time? * * @return A new Window wrapper instance if successful, null otherwise. **/ - static std::shared_ptr create_window(WindowPlatform in_platform, - const std::string& in_title, - unsigned int in_width, - unsigned int in_height, - bool in_closable, - Anvil::PresentCallbackFunction in_present_callback_func); + static Anvil::WindowUniquePtr create_window(WindowPlatform in_platform, + const std::string& in_title, + unsigned int in_width, + unsigned int in_height, + bool in_closable, + Anvil::PresentCallbackFunction in_present_callback_func, + bool in_visible = true); /* Creates a Window wrapper instance using app-managed window handle. * @@ -79,9 +81,9 @@ namespace Anvil * @param in_handle Valid, existing window handle. * @param in_xcb_connection Only relevant for XCB window platform; Pointer to xcb_connection_t which owns the window. */ - static std::shared_ptr create_window(WindowPlatform in_platform, - WindowHandle in_handle, - void* in_xcb_connection_ptr = nullptr); + static Anvil::WindowUniquePtr create_window(WindowPlatform in_platform, + WindowHandle in_handle, + void* in_xcb_connection_ptr = nullptr); }; }; /* namespace Anvil */ diff --git a/include/misc/window_generic.h b/include/misc/window_generic.h new file mode 100644 index 00000000..80239a62 --- /dev/null +++ b/include/misc/window_generic.h @@ -0,0 +1,117 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +/** Implements a simple window wrapper for Windows environments. + * + * NOTE: This wrapper does not support scaling (yet). + **/ +#ifndef WINDOW_GENERIC_H +#define WINDOW_GENERIC_H + +#include "misc/window.h" + +namespace Anvil +{ + class WindowGeneric : public Window + { + public: + enum class Type : uint8_t { + Win32 = 0, + Xcb, + Wayland, + Windowless, + + Count, + }; + union Handle { + void *win32Window; + void *waylandWindow; + uint32_t xcbWindow; + }; + using Connection = void*; + using Display = void*; + static Anvil::WindowUniquePtr create(Type type, Handle handle, Display display, Connection connection, uint32_t width, uint32_t height, bool visible, uint32_t fbWidth, uint32_t fbHeight); + + virtual ~WindowGeneric() { /* Stub */ } + + virtual void close() override; + virtual void run() override; + + /* Returns window's platform */ + WindowPlatform get_platform() const + { + return WINDOW_PLATFORM_GENERIC; + } + + bool is_dummy() + { + return false; + } + + /* This function should never be called under Windows */ + virtual void* get_connection() const override { return m_connection; } + Type get_type() const { return m_type; } + Display get_display() const { return m_display; } + Handle get_generic_handle() const { return m_handle; } + uint32_t get_framebuffer_width() const { return m_fbWidth; } + uint32_t get_framebuffer_height() const { return m_fbHeight; } + void set_framebuffer_size(uint32_t in_width, uint32_t in_height) + { + m_fbWidth = in_width; + m_fbHeight = in_height; + } + + /** Changes the window title. + * + * @param in_new_title Null-terminated string, holding the new title. + */ + void set_title(const std::string& in_new_title) override; + + private: + /* Private functions */ + + WindowGeneric(Type type, + Handle handle, + const std::string &in_title, + Display display, + Connection connection, + unsigned int in_width, + unsigned int in_height, + uint32_t fbWidth, + uint32_t fbHeight, + PresentCallbackFunction in_present_callback_func); + + /** Creates a new system window and prepares it for usage. */ + bool init(const bool& in_visible); + + /* Private variables */ + Type m_type; + Display m_display; + Connection m_connection; + Handle m_handle; + uint32_t m_fbWidth; + uint32_t m_fbHeight; + }; +}; /* namespace Anvil */ + +#endif /* WINDOW_WIN3264_H */ + diff --git a/include/misc/window_win3264.h b/include/misc/window_win3264.h index d161e360..dd37505e 100644 --- a/include/misc/window_win3264.h +++ b/include/misc/window_win3264.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -47,14 +47,16 @@ namespace Anvil * the user from being able to destroy the window on their own. * @param in_present_callback_func Func pointer to a function which is going to render frame contents to * the swapchain image. Must not be null. + * @param in_visible Should the window be made visible at creation time? * * @return New Anvil::Window instance if successful, or null otherwise. */ - static std::shared_ptr create(const std::string& in_title, - unsigned int in_width, - unsigned int in_height, - bool in_closable, - Anvil::PresentCallbackFunction in_present_callback_func); + static Anvil::WindowUniquePtr create(const std::string& in_title, + unsigned int in_width, + unsigned int in_height, + bool in_closable, + Anvil::PresentCallbackFunction in_present_callback_func, + bool in_visible); /* Creates a window wrapper instance from an existing window handle. * @@ -68,7 +70,7 @@ namespace Anvil * * @return New Anvil::Window instance if successful, or null otherwise. */ - static std::shared_ptr create(HWND in_window_handle); + static Anvil::WindowUniquePtr create(HWND in_window_handle); virtual ~WindowWin3264(){ /* Stub */ } @@ -110,7 +112,7 @@ namespace Anvil PresentCallbackFunction in_present_callback_func); /** Creates a new system window and prepares it for usage. */ - bool init(); + bool init(const bool& in_visible); static LRESULT CALLBACK msg_callback_pfn_proc(HWND in_window_handle, UINT in_message_id, diff --git a/include/misc/window_xcb.h b/include/misc/window_xcb.h index 019af2f6..f8019720 100644 --- a/include/misc/window_xcb.h +++ b/include/misc/window_xcb.h @@ -35,14 +35,15 @@ namespace Anvil { public: /* Public functions */ - static std::shared_ptr create(const std::string& in_title, - unsigned int in_width, - unsigned int in_height, - bool in_closable, - Anvil::PresentCallbackFunction in_present_callback_func); + static Anvil::WindowUniquePtr create(const std::string& in_title, + unsigned int in_width, + unsigned int in_height, + bool in_closable, + Anvil::PresentCallbackFunction in_present_callback_func, + bool in_visible); - static std::shared_ptr create(xcb_connection_t* in_connection_ptr, - WindowHandle in_window_handle); + static Anvil::WindowUniquePtr create(xcb_connection_t* in_connection_ptr, + WindowHandle in_window_handle); virtual ~WindowXcb(); @@ -77,7 +78,7 @@ namespace Anvil WindowHandle in_window_handle); /** Creates a new system window and prepares it for usage. */ - bool init(); + bool init (const bool& in_visible); bool init_connection(); /* Private variables */ diff --git a/include/vk_video/vulkan_video_codec_av1std.h b/include/vk_video/vulkan_video_codec_av1std.h new file mode 100644 index 00000000..347e0d2a --- /dev/null +++ b/include/vk_video/vulkan_video_codec_av1std.h @@ -0,0 +1,394 @@ +#ifndef VULKAN_VIDEO_CODEC_AV1STD_H_ +#define VULKAN_VIDEO_CODEC_AV1STD_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_av1std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_av1std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_AV1_NUM_REF_FRAMES 8 +#define STD_VIDEO_AV1_REFS_PER_FRAME 7 +#define STD_VIDEO_AV1_TOTAL_REFS_PER_FRAME 8 +#define STD_VIDEO_AV1_MAX_TILE_COLS 64 +#define STD_VIDEO_AV1_MAX_TILE_ROWS 64 +#define STD_VIDEO_AV1_MAX_SEGMENTS 8 +#define STD_VIDEO_AV1_SEG_LVL_MAX 8 +#define STD_VIDEO_AV1_PRIMARY_REF_NONE 7 +#define STD_VIDEO_AV1_SELECT_INTEGER_MV 2 +#define STD_VIDEO_AV1_SELECT_SCREEN_CONTENT_TOOLS 2 +#define STD_VIDEO_AV1_SKIP_MODE_FRAMES 2 +#define STD_VIDEO_AV1_MAX_LOOP_FILTER_STRENGTHS 4 +#define STD_VIDEO_AV1_LOOP_FILTER_ADJUSTMENTS 2 +#define STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS 8 +#define STD_VIDEO_AV1_MAX_NUM_PLANES 3 +#define STD_VIDEO_AV1_GLOBAL_MOTION_PARAMS 6 +#define STD_VIDEO_AV1_MAX_NUM_Y_POINTS 14 +#define STD_VIDEO_AV1_MAX_NUM_CB_POINTS 10 +#define STD_VIDEO_AV1_MAX_NUM_CR_POINTS 10 +#define STD_VIDEO_AV1_MAX_NUM_POS_LUMA 24 +#define STD_VIDEO_AV1_MAX_NUM_POS_CHROMA 25 + +typedef enum StdVideoAV1Profile { + STD_VIDEO_AV1_PROFILE_MAIN = 0, + STD_VIDEO_AV1_PROFILE_HIGH = 1, + STD_VIDEO_AV1_PROFILE_PROFESSIONAL = 2, + STD_VIDEO_AV1_PROFILE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_PROFILE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1Profile; + +typedef enum StdVideoAV1Level { + STD_VIDEO_AV1_LEVEL_2_0 = 0, + STD_VIDEO_AV1_LEVEL_2_1 = 1, + STD_VIDEO_AV1_LEVEL_2_2 = 2, + STD_VIDEO_AV1_LEVEL_2_3 = 3, + STD_VIDEO_AV1_LEVEL_3_0 = 4, + STD_VIDEO_AV1_LEVEL_3_1 = 5, + STD_VIDEO_AV1_LEVEL_3_2 = 6, + STD_VIDEO_AV1_LEVEL_3_3 = 7, + STD_VIDEO_AV1_LEVEL_4_0 = 8, + STD_VIDEO_AV1_LEVEL_4_1 = 9, + STD_VIDEO_AV1_LEVEL_4_2 = 10, + STD_VIDEO_AV1_LEVEL_4_3 = 11, + STD_VIDEO_AV1_LEVEL_5_0 = 12, + STD_VIDEO_AV1_LEVEL_5_1 = 13, + STD_VIDEO_AV1_LEVEL_5_2 = 14, + STD_VIDEO_AV1_LEVEL_5_3 = 15, + STD_VIDEO_AV1_LEVEL_6_0 = 16, + STD_VIDEO_AV1_LEVEL_6_1 = 17, + STD_VIDEO_AV1_LEVEL_6_2 = 18, + STD_VIDEO_AV1_LEVEL_6_3 = 19, + STD_VIDEO_AV1_LEVEL_7_0 = 20, + STD_VIDEO_AV1_LEVEL_7_1 = 21, + STD_VIDEO_AV1_LEVEL_7_2 = 22, + STD_VIDEO_AV1_LEVEL_7_3 = 23, + STD_VIDEO_AV1_LEVEL_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_LEVEL_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1Level; + +typedef enum StdVideoAV1FrameType { + STD_VIDEO_AV1_FRAME_TYPE_KEY = 0, + STD_VIDEO_AV1_FRAME_TYPE_INTER = 1, + STD_VIDEO_AV1_FRAME_TYPE_INTRA_ONLY = 2, + STD_VIDEO_AV1_FRAME_TYPE_SWITCH = 3, + STD_VIDEO_AV1_FRAME_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_FRAME_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1FrameType; + +typedef enum StdVideoAV1ReferenceName { + STD_VIDEO_AV1_REFERENCE_NAME_INTRA_FRAME = 0, + STD_VIDEO_AV1_REFERENCE_NAME_LAST_FRAME = 1, + STD_VIDEO_AV1_REFERENCE_NAME_LAST2_FRAME = 2, + STD_VIDEO_AV1_REFERENCE_NAME_LAST3_FRAME = 3, + STD_VIDEO_AV1_REFERENCE_NAME_GOLDEN_FRAME = 4, + STD_VIDEO_AV1_REFERENCE_NAME_BWDREF_FRAME = 5, + STD_VIDEO_AV1_REFERENCE_NAME_ALTREF2_FRAME = 6, + STD_VIDEO_AV1_REFERENCE_NAME_ALTREF_FRAME = 7, + STD_VIDEO_AV1_REFERENCE_NAME_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_REFERENCE_NAME_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1ReferenceName; + +typedef enum StdVideoAV1InterpolationFilter { + STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP = 0, + STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SMOOTH = 1, + STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SHARP = 2, + STD_VIDEO_AV1_INTERPOLATION_FILTER_BILINEAR = 3, + STD_VIDEO_AV1_INTERPOLATION_FILTER_SWITCHABLE = 4, + STD_VIDEO_AV1_INTERPOLATION_FILTER_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_INTERPOLATION_FILTER_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1InterpolationFilter; + +typedef enum StdVideoAV1TxMode { + STD_VIDEO_AV1_TX_MODE_ONLY_4X4 = 0, + STD_VIDEO_AV1_TX_MODE_LARGEST = 1, + STD_VIDEO_AV1_TX_MODE_SELECT = 2, + STD_VIDEO_AV1_TX_MODE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_TX_MODE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1TxMode; + +typedef enum StdVideoAV1FrameRestorationType { + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_NONE = 0, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_WIENER = 1, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SGRPROJ = 2, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SWITCHABLE = 3, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1FrameRestorationType; + +typedef enum StdVideoAV1ColorPrimaries { + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_709 = 1, + STD_VIDEO_AV1_COLOR_PRIMARIES_UNSPECIFIED = 2, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_M = 4, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_B_G = 5, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_601 = 6, + STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_240 = 7, + STD_VIDEO_AV1_COLOR_PRIMARIES_GENERIC_FILM = 8, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_2020 = 9, + STD_VIDEO_AV1_COLOR_PRIMARIES_XYZ = 10, + STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_431 = 11, + STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_432 = 12, + STD_VIDEO_AV1_COLOR_PRIMARIES_EBU_3213 = 22, + STD_VIDEO_AV1_COLOR_PRIMARIES_INVALID = 0x7FFFFFFF, + // STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED is a deprecated alias + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED = STD_VIDEO_AV1_COLOR_PRIMARIES_UNSPECIFIED, + STD_VIDEO_AV1_COLOR_PRIMARIES_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1ColorPrimaries; + +typedef enum StdVideoAV1TransferCharacteristics { + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_0 = 0, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_709 = 1, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_UNSPECIFIED = 2, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_3 = 3, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_M = 4, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_B_G = 5, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_601 = 6, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_240 = 7, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LINEAR = 8, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100 = 9, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100_SQRT10 = 10, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_IEC_61966 = 11, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_1361 = 12, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SRGB = 13, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_10_BIT = 14, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_12_BIT = 15, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_2084 = 16, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_428 = 17, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_HLG = 18, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1TransferCharacteristics; + +typedef enum StdVideoAV1MatrixCoefficients { + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_IDENTITY = 0, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_709 = 1, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_UNSPECIFIED = 2, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_RESERVED_3 = 3, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_FCC = 4, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_470_B_G = 5, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_601 = 6, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_240 = 7, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_YCGCO = 8, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_NCL = 9, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_CL = 10, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_2085 = 11, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_NCL = 12, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_CL = 13, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_ICTCP = 14, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1MatrixCoefficients; + +typedef enum StdVideoAV1ChromaSamplePosition { + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_UNKNOWN = 0, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_VERTICAL = 1, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_COLOCATED = 2, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_RESERVED = 3, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1ChromaSamplePosition; +typedef struct StdVideoAV1ColorConfigFlags { + uint32_t mono_chrome : 1; + uint32_t color_range : 1; + uint32_t separate_uv_delta_q : 1; + uint32_t color_description_present_flag : 1; + uint32_t reserved : 28; +} StdVideoAV1ColorConfigFlags; + +typedef struct StdVideoAV1ColorConfig { + StdVideoAV1ColorConfigFlags flags; + uint8_t BitDepth; + uint8_t subsampling_x; + uint8_t subsampling_y; + uint8_t reserved1; + StdVideoAV1ColorPrimaries color_primaries; + StdVideoAV1TransferCharacteristics transfer_characteristics; + StdVideoAV1MatrixCoefficients matrix_coefficients; + StdVideoAV1ChromaSamplePosition chroma_sample_position; +} StdVideoAV1ColorConfig; + +typedef struct StdVideoAV1TimingInfoFlags { + uint32_t equal_picture_interval : 1; + uint32_t reserved : 31; +} StdVideoAV1TimingInfoFlags; + +typedef struct StdVideoAV1TimingInfo { + StdVideoAV1TimingInfoFlags flags; + uint32_t num_units_in_display_tick; + uint32_t time_scale; + uint32_t num_ticks_per_picture_minus_1; +} StdVideoAV1TimingInfo; + +typedef struct StdVideoAV1LoopFilterFlags { + uint32_t loop_filter_delta_enabled : 1; + uint32_t loop_filter_delta_update : 1; + uint32_t reserved : 30; +} StdVideoAV1LoopFilterFlags; + +typedef struct StdVideoAV1LoopFilter { + StdVideoAV1LoopFilterFlags flags; + uint8_t loop_filter_level[STD_VIDEO_AV1_MAX_LOOP_FILTER_STRENGTHS]; + uint8_t loop_filter_sharpness; + uint8_t update_ref_delta; + int8_t loop_filter_ref_deltas[STD_VIDEO_AV1_TOTAL_REFS_PER_FRAME]; + uint8_t update_mode_delta; + int8_t loop_filter_mode_deltas[STD_VIDEO_AV1_LOOP_FILTER_ADJUSTMENTS]; +} StdVideoAV1LoopFilter; + +typedef struct StdVideoAV1QuantizationFlags { + uint32_t using_qmatrix : 1; + uint32_t diff_uv_delta : 1; + uint32_t reserved : 30; +} StdVideoAV1QuantizationFlags; + +typedef struct StdVideoAV1Quantization { + StdVideoAV1QuantizationFlags flags; + uint8_t base_q_idx; + int8_t DeltaQYDc; + int8_t DeltaQUDc; + int8_t DeltaQUAc; + int8_t DeltaQVDc; + int8_t DeltaQVAc; + uint8_t qm_y; + uint8_t qm_u; + uint8_t qm_v; +} StdVideoAV1Quantization; + +typedef struct StdVideoAV1Segmentation { + uint8_t FeatureEnabled[STD_VIDEO_AV1_MAX_SEGMENTS]; + int16_t FeatureData[STD_VIDEO_AV1_MAX_SEGMENTS][STD_VIDEO_AV1_SEG_LVL_MAX]; +} StdVideoAV1Segmentation; + +typedef struct StdVideoAV1TileInfoFlags { + uint32_t uniform_tile_spacing_flag : 1; + uint32_t reserved : 31; +} StdVideoAV1TileInfoFlags; + +typedef struct StdVideoAV1TileInfo { + StdVideoAV1TileInfoFlags flags; + uint8_t TileCols; + uint8_t TileRows; + uint16_t context_update_tile_id; + uint8_t tile_size_bytes_minus_1; + uint8_t reserved1[7]; + const uint16_t* pMiColStarts; + const uint16_t* pMiRowStarts; + const uint16_t* pWidthInSbsMinus1; + const uint16_t* pHeightInSbsMinus1; +} StdVideoAV1TileInfo; + +typedef struct StdVideoAV1CDEF { + uint8_t cdef_damping_minus_3; + uint8_t cdef_bits; + uint8_t cdef_y_pri_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; + uint8_t cdef_y_sec_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; + uint8_t cdef_uv_pri_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; + uint8_t cdef_uv_sec_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; +} StdVideoAV1CDEF; + +typedef struct StdVideoAV1LoopRestoration { + StdVideoAV1FrameRestorationType FrameRestorationType[STD_VIDEO_AV1_MAX_NUM_PLANES]; + uint16_t LoopRestorationSize[STD_VIDEO_AV1_MAX_NUM_PLANES]; +} StdVideoAV1LoopRestoration; + +typedef struct StdVideoAV1GlobalMotion { + uint8_t GmType[STD_VIDEO_AV1_NUM_REF_FRAMES]; + int32_t gm_params[STD_VIDEO_AV1_NUM_REF_FRAMES][STD_VIDEO_AV1_GLOBAL_MOTION_PARAMS]; +} StdVideoAV1GlobalMotion; + +typedef struct StdVideoAV1FilmGrainFlags { + uint32_t chroma_scaling_from_luma : 1; + uint32_t overlap_flag : 1; + uint32_t clip_to_restricted_range : 1; + uint32_t update_grain : 1; + uint32_t reserved : 28; +} StdVideoAV1FilmGrainFlags; + +typedef struct StdVideoAV1FilmGrain { + StdVideoAV1FilmGrainFlags flags; + uint8_t grain_scaling_minus_8; + uint8_t ar_coeff_lag; + uint8_t ar_coeff_shift_minus_6; + uint8_t grain_scale_shift; + uint16_t grain_seed; + uint8_t film_grain_params_ref_idx; + uint8_t num_y_points; + uint8_t point_y_value[STD_VIDEO_AV1_MAX_NUM_Y_POINTS]; + uint8_t point_y_scaling[STD_VIDEO_AV1_MAX_NUM_Y_POINTS]; + uint8_t num_cb_points; + uint8_t point_cb_value[STD_VIDEO_AV1_MAX_NUM_CB_POINTS]; + uint8_t point_cb_scaling[STD_VIDEO_AV1_MAX_NUM_CB_POINTS]; + uint8_t num_cr_points; + uint8_t point_cr_value[STD_VIDEO_AV1_MAX_NUM_CR_POINTS]; + uint8_t point_cr_scaling[STD_VIDEO_AV1_MAX_NUM_CR_POINTS]; + int8_t ar_coeffs_y_plus_128[STD_VIDEO_AV1_MAX_NUM_POS_LUMA]; + int8_t ar_coeffs_cb_plus_128[STD_VIDEO_AV1_MAX_NUM_POS_CHROMA]; + int8_t ar_coeffs_cr_plus_128[STD_VIDEO_AV1_MAX_NUM_POS_CHROMA]; + uint8_t cb_mult; + uint8_t cb_luma_mult; + uint16_t cb_offset; + uint8_t cr_mult; + uint8_t cr_luma_mult; + uint16_t cr_offset; +} StdVideoAV1FilmGrain; + +typedef struct StdVideoAV1SequenceHeaderFlags { + uint32_t still_picture : 1; + uint32_t reduced_still_picture_header : 1; + uint32_t use_128x128_superblock : 1; + uint32_t enable_filter_intra : 1; + uint32_t enable_intra_edge_filter : 1; + uint32_t enable_interintra_compound : 1; + uint32_t enable_masked_compound : 1; + uint32_t enable_warped_motion : 1; + uint32_t enable_dual_filter : 1; + uint32_t enable_order_hint : 1; + uint32_t enable_jnt_comp : 1; + uint32_t enable_ref_frame_mvs : 1; + uint32_t frame_id_numbers_present_flag : 1; + uint32_t enable_superres : 1; + uint32_t enable_cdef : 1; + uint32_t enable_restoration : 1; + uint32_t film_grain_params_present : 1; + uint32_t timing_info_present_flag : 1; + uint32_t initial_display_delay_present_flag : 1; + uint32_t reserved : 13; +} StdVideoAV1SequenceHeaderFlags; + +typedef struct StdVideoAV1SequenceHeader { + StdVideoAV1SequenceHeaderFlags flags; + StdVideoAV1Profile seq_profile; + uint8_t frame_width_bits_minus_1; + uint8_t frame_height_bits_minus_1; + uint16_t max_frame_width_minus_1; + uint16_t max_frame_height_minus_1; + uint8_t delta_frame_id_length_minus_2; + uint8_t additional_frame_id_length_minus_1; + uint8_t order_hint_bits_minus_1; + uint8_t seq_force_integer_mv; + uint8_t seq_force_screen_content_tools; + uint8_t reserved1[5]; + const StdVideoAV1ColorConfig* pColorConfig; + const StdVideoAV1TimingInfo* pTimingInfo; +} StdVideoAV1SequenceHeader; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vk_video/vulkan_video_codec_av1std_decode.h b/include/vk_video/vulkan_video_codec_av1std_decode.h new file mode 100644 index 00000000..522628e8 --- /dev/null +++ b/include/vk_video/vulkan_video_codec_av1std_decode.h @@ -0,0 +1,109 @@ +#ifndef VULKAN_VIDEO_CODEC_AV1STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_AV1STD_DECODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_av1std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_av1std_decode 1 +#include "vulkan_video_codec_av1std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_av1_decode" +typedef struct StdVideoDecodeAV1PictureInfoFlags { + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t reference_select : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t apply_grain : 1; + uint32_t reserved : 3; +} StdVideoDecodeAV1PictureInfoFlags; + +typedef struct StdVideoDecodeAV1PictureInfo { + StdVideoDecodeAV1PictureInfoFlags flags; + StdVideoAV1FrameType frame_type; + uint32_t current_frame_id; + uint8_t OrderHint; + uint8_t primary_ref_frame; + uint8_t refresh_frame_flags; + uint8_t reserved1; + StdVideoAV1InterpolationFilter interpolation_filter; + StdVideoAV1TxMode TxMode; + uint8_t delta_q_res; + uint8_t delta_lf_res; + uint8_t SkipModeFrame[STD_VIDEO_AV1_SKIP_MODE_FRAMES]; + uint8_t coded_denom; + uint8_t reserved2[3]; + uint8_t OrderHints[STD_VIDEO_AV1_NUM_REF_FRAMES]; + uint32_t expectedFrameId[STD_VIDEO_AV1_NUM_REF_FRAMES]; + const StdVideoAV1TileInfo* pTileInfo; + const StdVideoAV1Quantization* pQuantization; + const StdVideoAV1Segmentation* pSegmentation; + const StdVideoAV1LoopFilter* pLoopFilter; + const StdVideoAV1CDEF* pCDEF; + const StdVideoAV1LoopRestoration* pLoopRestoration; + const StdVideoAV1GlobalMotion* pGlobalMotion; + const StdVideoAV1FilmGrain* pFilmGrain; +} StdVideoDecodeAV1PictureInfo; + +typedef struct StdVideoDecodeAV1ReferenceInfoFlags { + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; +} StdVideoDecodeAV1ReferenceInfoFlags; + +typedef struct StdVideoDecodeAV1ReferenceInfo { + StdVideoDecodeAV1ReferenceInfoFlags flags; + uint8_t frame_type; + uint8_t RefFrameSignBias; + uint8_t OrderHint; + uint8_t SavedOrderHints[STD_VIDEO_AV1_NUM_REF_FRAMES]; +} StdVideoDecodeAV1ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vk_video/vulkan_video_codec_av1std_encode.h b/include/vk_video/vulkan_video_codec_av1std_encode.h new file mode 100644 index 00000000..ca5f6f47 --- /dev/null +++ b/include/vk_video/vulkan_video_codec_av1std_encode.h @@ -0,0 +1,143 @@ +#ifndef VULKAN_VIDEO_CODEC_AV1STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_AV1STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_av1std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_av1std_encode 1 +#include "vulkan_video_codec_av1std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_av1_encode" +typedef struct StdVideoEncodeAV1DecoderModelInfo { + uint8_t buffer_delay_length_minus_1; + uint8_t buffer_removal_time_length_minus_1; + uint8_t frame_presentation_time_length_minus_1; + uint8_t reserved1; + uint32_t num_units_in_decoding_tick; +} StdVideoEncodeAV1DecoderModelInfo; + +typedef struct StdVideoEncodeAV1ExtensionHeader { + uint8_t temporal_id; + uint8_t spatial_id; +} StdVideoEncodeAV1ExtensionHeader; + +typedef struct StdVideoEncodeAV1OperatingPointInfoFlags { + uint32_t decoder_model_present_for_this_op : 1; + uint32_t low_delay_mode_flag : 1; + uint32_t initial_display_delay_present_for_this_op : 1; + uint32_t reserved : 29; +} StdVideoEncodeAV1OperatingPointInfoFlags; + +typedef struct StdVideoEncodeAV1OperatingPointInfo { + StdVideoEncodeAV1OperatingPointInfoFlags flags; + uint16_t operating_point_idc; + uint8_t seq_level_idx; + uint8_t seq_tier; + uint32_t decoder_buffer_delay; + uint32_t encoder_buffer_delay; + uint8_t initial_display_delay_minus_1; +} StdVideoEncodeAV1OperatingPointInfo; + +typedef struct StdVideoEncodeAV1PictureInfoFlags { + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t show_frame : 1; + uint32_t showable_frame : 1; + uint32_t reserved : 3; +} StdVideoEncodeAV1PictureInfoFlags; + +typedef struct StdVideoEncodeAV1PictureInfo { + StdVideoEncodeAV1PictureInfoFlags flags; + StdVideoAV1FrameType frame_type; + uint32_t frame_presentation_time; + uint32_t current_frame_id; + uint8_t order_hint; + uint8_t primary_ref_frame; + uint8_t refresh_frame_flags; + uint8_t coded_denom; + uint16_t render_width_minus_1; + uint16_t render_height_minus_1; + StdVideoAV1InterpolationFilter interpolation_filter; + StdVideoAV1TxMode TxMode; + uint8_t delta_q_res; + uint8_t delta_lf_res; + uint8_t ref_order_hint[STD_VIDEO_AV1_NUM_REF_FRAMES]; + int8_t ref_frame_idx[STD_VIDEO_AV1_REFS_PER_FRAME]; + uint8_t reserved1[3]; + uint32_t delta_frame_id_minus_1[STD_VIDEO_AV1_REFS_PER_FRAME]; + const StdVideoAV1TileInfo* pTileInfo; + const StdVideoAV1Quantization* pQuantization; + const StdVideoAV1Segmentation* pSegmentation; + const StdVideoAV1LoopFilter* pLoopFilter; + const StdVideoAV1CDEF* pCDEF; + const StdVideoAV1LoopRestoration* pLoopRestoration; + const StdVideoAV1GlobalMotion* pGlobalMotion; + const StdVideoEncodeAV1ExtensionHeader* pExtensionHeader; + const uint32_t* pBufferRemovalTimes; +} StdVideoEncodeAV1PictureInfo; + +typedef struct StdVideoEncodeAV1ReferenceInfoFlags { + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; +} StdVideoEncodeAV1ReferenceInfoFlags; + +typedef struct StdVideoEncodeAV1ReferenceInfo { + StdVideoEncodeAV1ReferenceInfoFlags flags; + uint32_t RefFrameId; + StdVideoAV1FrameType frame_type; + uint8_t OrderHint; + uint8_t reserved1[3]; + const StdVideoEncodeAV1ExtensionHeader* pExtensionHeader; +} StdVideoEncodeAV1ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vk_video/vulkan_video_codec_h264std.h b/include/vk_video/vulkan_video_codec_h264std.h new file mode 100644 index 00000000..6fd38106 --- /dev/null +++ b/include/vk_video/vulkan_video_codec_h264std.h @@ -0,0 +1,312 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_H_ +#define VULKAN_VIDEO_CODEC_H264STD_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_H264_CPB_CNT_LIST_SIZE 32 +#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS 6 +#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS 16 +#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS 6 +#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS 64 +#define STD_VIDEO_H264_MAX_NUM_LIST_REF 32 +#define STD_VIDEO_H264_MAX_CHROMA_PLANES 2 +#define STD_VIDEO_H264_NO_REFERENCE_PICTURE 0xFF + +typedef enum StdVideoH264ChromaFormatIdc { + STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME = 0, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_420 = 1, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_422 = 2, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_444 = 3, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ChromaFormatIdc; + +typedef enum StdVideoH264ProfileIdc { + STD_VIDEO_H264_PROFILE_IDC_BASELINE = 66, + STD_VIDEO_H264_PROFILE_IDC_MAIN = 77, + STD_VIDEO_H264_PROFILE_IDC_HIGH = 100, + STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE = 244, + STD_VIDEO_H264_PROFILE_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ProfileIdc; + +typedef enum StdVideoH264LevelIdc { + STD_VIDEO_H264_LEVEL_IDC_1_0 = 0, + STD_VIDEO_H264_LEVEL_IDC_1_1 = 1, + STD_VIDEO_H264_LEVEL_IDC_1_2 = 2, + STD_VIDEO_H264_LEVEL_IDC_1_3 = 3, + STD_VIDEO_H264_LEVEL_IDC_2_0 = 4, + STD_VIDEO_H264_LEVEL_IDC_2_1 = 5, + STD_VIDEO_H264_LEVEL_IDC_2_2 = 6, + STD_VIDEO_H264_LEVEL_IDC_3_0 = 7, + STD_VIDEO_H264_LEVEL_IDC_3_1 = 8, + STD_VIDEO_H264_LEVEL_IDC_3_2 = 9, + STD_VIDEO_H264_LEVEL_IDC_4_0 = 10, + STD_VIDEO_H264_LEVEL_IDC_4_1 = 11, + STD_VIDEO_H264_LEVEL_IDC_4_2 = 12, + STD_VIDEO_H264_LEVEL_IDC_5_0 = 13, + STD_VIDEO_H264_LEVEL_IDC_5_1 = 14, + STD_VIDEO_H264_LEVEL_IDC_5_2 = 15, + STD_VIDEO_H264_LEVEL_IDC_6_0 = 16, + STD_VIDEO_H264_LEVEL_IDC_6_1 = 17, + STD_VIDEO_H264_LEVEL_IDC_6_2 = 18, + STD_VIDEO_H264_LEVEL_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264LevelIdc; + +typedef enum StdVideoH264PocType { + STD_VIDEO_H264_POC_TYPE_0 = 0, + STD_VIDEO_H264_POC_TYPE_1 = 1, + STD_VIDEO_H264_POC_TYPE_2 = 2, + STD_VIDEO_H264_POC_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_POC_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264PocType; + +typedef enum StdVideoH264AspectRatioIdc { + STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED = 0, + STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE = 1, + STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11 = 2, + STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11 = 3, + STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11 = 4, + STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33 = 5, + STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11 = 6, + STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11 = 7, + STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11 = 8, + STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33 = 9, + STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11 = 10, + STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11 = 11, + STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33 = 12, + STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99 = 13, + STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3 = 14, + STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2 = 15, + STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1 = 16, + STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR = 255, + STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264AspectRatioIdc; + +typedef enum StdVideoH264WeightedBipredIdc { + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT = 0, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT = 1, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT = 2, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264WeightedBipredIdc; + +typedef enum StdVideoH264ModificationOfPicNumsIdc { + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT = 0, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD = 1, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM = 2, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END = 3, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ModificationOfPicNumsIdc; + +typedef enum StdVideoH264MemMgmtControlOp { + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END = 0, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM = 1, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM = 2, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM = 3, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX = 4, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL = 5, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM = 6, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264MemMgmtControlOp; + +typedef enum StdVideoH264CabacInitIdc { + STD_VIDEO_H264_CABAC_INIT_IDC_0 = 0, + STD_VIDEO_H264_CABAC_INIT_IDC_1 = 1, + STD_VIDEO_H264_CABAC_INIT_IDC_2 = 2, + STD_VIDEO_H264_CABAC_INIT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_CABAC_INIT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264CabacInitIdc; + +typedef enum StdVideoH264DisableDeblockingFilterIdc { + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED = 0, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED = 1, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL = 2, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264DisableDeblockingFilterIdc; + +typedef enum StdVideoH264SliceType { + STD_VIDEO_H264_SLICE_TYPE_P = 0, + STD_VIDEO_H264_SLICE_TYPE_B = 1, + STD_VIDEO_H264_SLICE_TYPE_I = 2, + STD_VIDEO_H264_SLICE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264SliceType; + +typedef enum StdVideoH264PictureType { + STD_VIDEO_H264_PICTURE_TYPE_P = 0, + STD_VIDEO_H264_PICTURE_TYPE_B = 1, + STD_VIDEO_H264_PICTURE_TYPE_I = 2, + STD_VIDEO_H264_PICTURE_TYPE_IDR = 5, + STD_VIDEO_H264_PICTURE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264PictureType; + +typedef enum StdVideoH264NonVclNaluType { + STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS = 0, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS = 1, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD = 2, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX = 3, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE = 4, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM = 5, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED = 6, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264NonVclNaluType; +typedef struct StdVideoH264SpsVuiFlags { + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t color_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t timing_info_present_flag : 1; + uint32_t fixed_frame_rate_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; +} StdVideoH264SpsVuiFlags; + +typedef struct StdVideoH264HrdParameters { + uint8_t cpb_cnt_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint8_t reserved1; + uint32_t bit_rate_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint8_t cbr_flag[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint32_t initial_cpb_removal_delay_length_minus1; + uint32_t cpb_removal_delay_length_minus1; + uint32_t dpb_output_delay_length_minus1; + uint32_t time_offset_length; +} StdVideoH264HrdParameters; + +typedef struct StdVideoH264SequenceParameterSetVui { + StdVideoH264SpsVuiFlags flags; + StdVideoH264AspectRatioIdc aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + uint8_t video_format; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; + uint32_t num_units_in_tick; + uint32_t time_scale; + uint8_t max_num_reorder_frames; + uint8_t max_dec_frame_buffering; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + uint32_t reserved1; + const StdVideoH264HrdParameters* pHrdParameters; +} StdVideoH264SequenceParameterSetVui; + +typedef struct StdVideoH264SpsFlags { + uint32_t constraint_set0_flag : 1; + uint32_t constraint_set1_flag : 1; + uint32_t constraint_set2_flag : 1; + uint32_t constraint_set3_flag : 1; + uint32_t constraint_set4_flag : 1; + uint32_t constraint_set5_flag : 1; + uint32_t direct_8x8_inference_flag : 1; + uint32_t mb_adaptive_frame_field_flag : 1; + uint32_t frame_mbs_only_flag : 1; + uint32_t delta_pic_order_always_zero_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t gaps_in_frame_num_value_allowed_flag : 1; + uint32_t qpprime_y_zero_transform_bypass_flag : 1; + uint32_t frame_cropping_flag : 1; + uint32_t seq_scaling_matrix_present_flag : 1; + uint32_t vui_parameters_present_flag : 1; +} StdVideoH264SpsFlags; + +typedef struct StdVideoH264ScalingLists { + uint16_t scaling_list_present_mask; + uint16_t use_default_scaling_matrix_mask; + uint8_t ScalingList4x4[STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS]; + uint8_t ScalingList8x8[STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS]; +} StdVideoH264ScalingLists; + +typedef struct StdVideoH264SequenceParameterSet { + StdVideoH264SpsFlags flags; + StdVideoH264ProfileIdc profile_idc; + StdVideoH264LevelIdc level_idc; + StdVideoH264ChromaFormatIdc chroma_format_idc; + uint8_t seq_parameter_set_id; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t log2_max_frame_num_minus4; + StdVideoH264PocType pic_order_cnt_type; + int32_t offset_for_non_ref_pic; + int32_t offset_for_top_to_bottom_field; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + uint8_t num_ref_frames_in_pic_order_cnt_cycle; + uint8_t max_num_ref_frames; + uint8_t reserved1; + uint32_t pic_width_in_mbs_minus1; + uint32_t pic_height_in_map_units_minus1; + uint32_t frame_crop_left_offset; + uint32_t frame_crop_right_offset; + uint32_t frame_crop_top_offset; + uint32_t frame_crop_bottom_offset; + uint32_t reserved2; + const int32_t* pOffsetForRefFrame; + const StdVideoH264ScalingLists* pScalingLists; + const StdVideoH264SequenceParameterSetVui* pSequenceParameterSetVui; +} StdVideoH264SequenceParameterSet; + +typedef struct StdVideoH264PpsFlags { + uint32_t transform_8x8_mode_flag : 1; + uint32_t redundant_pic_cnt_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t bottom_field_pic_order_in_frame_present_flag : 1; + uint32_t entropy_coding_mode_flag : 1; + uint32_t pic_scaling_matrix_present_flag : 1; +} StdVideoH264PpsFlags; + +typedef struct StdVideoH264PictureParameterSet { + StdVideoH264PpsFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + StdVideoH264WeightedBipredIdc weighted_bipred_idc; + int8_t pic_init_qp_minus26; + int8_t pic_init_qs_minus26; + int8_t chroma_qp_index_offset; + int8_t second_chroma_qp_index_offset; + const StdVideoH264ScalingLists* pScalingLists; +} StdVideoH264PictureParameterSet; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vk_video/vulkan_video_codec_h264std_decode.h b/include/vk_video/vulkan_video_codec_h264std_decode.h new file mode 100644 index 00000000..d6a90b49 --- /dev/null +++ b/include/vk_video/vulkan_video_codec_h264std_decode.h @@ -0,0 +1,77 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std_decode 1 +#include "vulkan_video_codec_h264std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_decode" +#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2 + +typedef enum StdVideoDecodeH264FieldOrderCount { + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP = 0, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM = 1, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID = 0x7FFFFFFF, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_MAX_ENUM = 0x7FFFFFFF +} StdVideoDecodeH264FieldOrderCount; +typedef struct StdVideoDecodeH264PictureInfoFlags { + uint32_t field_pic_flag : 1; + uint32_t is_intra : 1; + uint32_t IdrPicFlag : 1; + uint32_t bottom_field_flag : 1; + uint32_t is_reference : 1; + uint32_t complementary_field_pair : 1; +} StdVideoDecodeH264PictureInfoFlags; + +typedef struct StdVideoDecodeH264PictureInfo { + StdVideoDecodeH264PictureInfoFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint8_t reserved1; + uint8_t reserved2; + uint16_t frame_num; + uint16_t idr_pic_id; + int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE]; +} StdVideoDecodeH264PictureInfo; + +typedef struct StdVideoDecodeH264ReferenceInfoFlags { + uint32_t top_field_flag : 1; + uint32_t bottom_field_flag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t is_non_existing : 1; +} StdVideoDecodeH264ReferenceInfoFlags; + +typedef struct StdVideoDecodeH264ReferenceInfo { + StdVideoDecodeH264ReferenceInfoFlags flags; + uint16_t FrameNum; + uint16_t reserved; + int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE]; +} StdVideoDecodeH264ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vk_video/vulkan_video_codec_h264std_encode.h b/include/vk_video/vulkan_video_codec_h264std_encode.h new file mode 100644 index 00000000..410b1b25 --- /dev/null +++ b/include/vk_video/vulkan_video_codec_h264std_encode.h @@ -0,0 +1,147 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std_encode 1 +#include "vulkan_video_codec_h264std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_encode" +typedef struct StdVideoEncodeH264WeightTableFlags { + uint32_t luma_weight_l0_flag; + uint32_t chroma_weight_l0_flag; + uint32_t luma_weight_l1_flag; + uint32_t chroma_weight_l1_flag; +} StdVideoEncodeH264WeightTableFlags; + +typedef struct StdVideoEncodeH264WeightTable { + StdVideoEncodeH264WeightTableFlags flags; + uint8_t luma_log2_weight_denom; + uint8_t chroma_log2_weight_denom; + int8_t luma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t luma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t chroma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t chroma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t luma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t luma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t chroma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t chroma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; +} StdVideoEncodeH264WeightTable; + +typedef struct StdVideoEncodeH264SliceHeaderFlags { + uint32_t direct_spatial_mv_pred_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t reserved : 30; +} StdVideoEncodeH264SliceHeaderFlags; + +typedef struct StdVideoEncodeH264PictureInfoFlags { + uint32_t IdrPicFlag : 1; + uint32_t is_reference : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t long_term_reference_flag : 1; + uint32_t adaptive_ref_pic_marking_mode_flag : 1; + uint32_t reserved : 27; +} StdVideoEncodeH264PictureInfoFlags; + +typedef struct StdVideoEncodeH264ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t reserved : 31; +} StdVideoEncodeH264ReferenceInfoFlags; + +typedef struct StdVideoEncodeH264ReferenceListsInfoFlags { + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; +} StdVideoEncodeH264ReferenceListsInfoFlags; + +typedef struct StdVideoEncodeH264RefListModEntry { + StdVideoH264ModificationOfPicNumsIdc modification_of_pic_nums_idc; + uint16_t abs_diff_pic_num_minus1; + uint16_t long_term_pic_num; +} StdVideoEncodeH264RefListModEntry; + +typedef struct StdVideoEncodeH264RefPicMarkingEntry { + StdVideoH264MemMgmtControlOp memory_management_control_operation; + uint16_t difference_of_pic_nums_minus1; + uint16_t long_term_pic_num; + uint16_t long_term_frame_idx; + uint16_t max_long_term_frame_idx_plus1; +} StdVideoEncodeH264RefPicMarkingEntry; + +typedef struct StdVideoEncodeH264ReferenceListsInfo { + StdVideoEncodeH264ReferenceListsInfoFlags flags; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; + uint8_t RefPicList0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + uint8_t RefPicList1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + uint8_t refList0ModOpCount; + uint8_t refList1ModOpCount; + uint8_t refPicMarkingOpCount; + uint8_t reserved1[7]; + const StdVideoEncodeH264RefListModEntry* pRefList0ModOperations; + const StdVideoEncodeH264RefListModEntry* pRefList1ModOperations; + const StdVideoEncodeH264RefPicMarkingEntry* pRefPicMarkingOperations; +} StdVideoEncodeH264ReferenceListsInfo; + +typedef struct StdVideoEncodeH264PictureInfo { + StdVideoEncodeH264PictureInfoFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint16_t idr_pic_id; + StdVideoH264PictureType primary_pic_type; + uint32_t frame_num; + int32_t PicOrderCnt; + uint8_t temporal_id; + uint8_t reserved1[3]; + const StdVideoEncodeH264ReferenceListsInfo* pRefLists; +} StdVideoEncodeH264PictureInfo; + +typedef struct StdVideoEncodeH264ReferenceInfo { + StdVideoEncodeH264ReferenceInfoFlags flags; + StdVideoH264PictureType primary_pic_type; + uint32_t FrameNum; + int32_t PicOrderCnt; + uint16_t long_term_pic_num; + uint16_t long_term_frame_idx; + uint8_t temporal_id; +} StdVideoEncodeH264ReferenceInfo; + +typedef struct StdVideoEncodeH264SliceHeader { + StdVideoEncodeH264SliceHeaderFlags flags; + uint32_t first_mb_in_slice; + StdVideoH264SliceType slice_type; + int8_t slice_alpha_c0_offset_div2; + int8_t slice_beta_offset_div2; + int8_t slice_qp_delta; + uint8_t reserved1; + StdVideoH264CabacInitIdc cabac_init_idc; + StdVideoH264DisableDeblockingFilterIdc disable_deblocking_filter_idc; + const StdVideoEncodeH264WeightTable* pWeightTable; +} StdVideoEncodeH264SliceHeader; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vk_video/vulkan_video_codec_h265std.h b/include/vk_video/vulkan_video_codec_h265std.h new file mode 100644 index 00000000..3eecd601 --- /dev/null +++ b/include/vk_video/vulkan_video_codec_h265std.h @@ -0,0 +1,446 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_H_ +#define VULKAN_VIDEO_CODEC_H265STD_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_H265_CPB_CNT_LIST_SIZE 32 +#define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7 +#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS 16 +#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS 2 +#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE 19 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE 21 +#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE 3 +#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128 +#define STD_VIDEO_H265_MAX_NUM_LIST_REF 15 +#define STD_VIDEO_H265_MAX_CHROMA_PLANES 2 +#define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64 +#define STD_VIDEO_H265_MAX_DPB_SIZE 16 +#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32 +#define STD_VIDEO_H265_MAX_LONG_TERM_PICS 16 +#define STD_VIDEO_H265_MAX_DELTA_POC 48 +#define STD_VIDEO_H265_NO_REFERENCE_PICTURE 0xFF + +typedef enum StdVideoH265ChromaFormatIdc { + STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME = 0, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_420 = 1, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_422 = 2, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_444 = 3, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265ChromaFormatIdc; + +typedef enum StdVideoH265ProfileIdc { + STD_VIDEO_H265_PROFILE_IDC_MAIN = 1, + STD_VIDEO_H265_PROFILE_IDC_MAIN_10 = 2, + STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE = 3, + STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS = 4, + STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS = 9, + STD_VIDEO_H265_PROFILE_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265ProfileIdc; + +typedef enum StdVideoH265LevelIdc { + STD_VIDEO_H265_LEVEL_IDC_1_0 = 0, + STD_VIDEO_H265_LEVEL_IDC_2_0 = 1, + STD_VIDEO_H265_LEVEL_IDC_2_1 = 2, + STD_VIDEO_H265_LEVEL_IDC_3_0 = 3, + STD_VIDEO_H265_LEVEL_IDC_3_1 = 4, + STD_VIDEO_H265_LEVEL_IDC_4_0 = 5, + STD_VIDEO_H265_LEVEL_IDC_4_1 = 6, + STD_VIDEO_H265_LEVEL_IDC_5_0 = 7, + STD_VIDEO_H265_LEVEL_IDC_5_1 = 8, + STD_VIDEO_H265_LEVEL_IDC_5_2 = 9, + STD_VIDEO_H265_LEVEL_IDC_6_0 = 10, + STD_VIDEO_H265_LEVEL_IDC_6_1 = 11, + STD_VIDEO_H265_LEVEL_IDC_6_2 = 12, + STD_VIDEO_H265_LEVEL_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265LevelIdc; + +typedef enum StdVideoH265SliceType { + STD_VIDEO_H265_SLICE_TYPE_B = 0, + STD_VIDEO_H265_SLICE_TYPE_P = 1, + STD_VIDEO_H265_SLICE_TYPE_I = 2, + STD_VIDEO_H265_SLICE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265SliceType; + +typedef enum StdVideoH265PictureType { + STD_VIDEO_H265_PICTURE_TYPE_P = 0, + STD_VIDEO_H265_PICTURE_TYPE_B = 1, + STD_VIDEO_H265_PICTURE_TYPE_I = 2, + STD_VIDEO_H265_PICTURE_TYPE_IDR = 3, + STD_VIDEO_H265_PICTURE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265PictureType; + +typedef enum StdVideoH265AspectRatioIdc { + STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED = 0, + STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE = 1, + STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11 = 2, + STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11 = 3, + STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11 = 4, + STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33 = 5, + STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11 = 6, + STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11 = 7, + STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11 = 8, + STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33 = 9, + STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11 = 10, + STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11 = 11, + STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33 = 12, + STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99 = 13, + STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3 = 14, + STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2 = 15, + STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1 = 16, + STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR = 255, + STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265AspectRatioIdc; +typedef struct StdVideoH265DecPicBufMgr { + uint32_t max_latency_increase_plus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint8_t max_dec_pic_buffering_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint8_t max_num_reorder_pics[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; +} StdVideoH265DecPicBufMgr; + +typedef struct StdVideoH265SubLayerHrdParameters { + uint32_t bit_rate_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t bit_rate_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cbr_flag; +} StdVideoH265SubLayerHrdParameters; + +typedef struct StdVideoH265HrdFlags { + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + uint32_t sub_pic_hrd_params_present_flag : 1; + uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1; + uint32_t fixed_pic_rate_general_flag : 8; + uint32_t fixed_pic_rate_within_cvs_flag : 8; + uint32_t low_delay_hrd_flag : 8; +} StdVideoH265HrdFlags; + +typedef struct StdVideoH265HrdParameters { + StdVideoH265HrdFlags flags; + uint8_t tick_divisor_minus2; + uint8_t du_cpb_removal_delay_increment_length_minus1; + uint8_t dpb_output_delay_du_length_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint8_t cpb_size_du_scale; + uint8_t initial_cpb_removal_delay_length_minus1; + uint8_t au_cpb_removal_delay_length_minus1; + uint8_t dpb_output_delay_length_minus1; + uint8_t cpb_cnt_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint16_t elemental_duration_in_tc_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint16_t reserved[3]; + const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersNal; + const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersVcl; +} StdVideoH265HrdParameters; + +typedef struct StdVideoH265VpsFlags { + uint32_t vps_temporal_id_nesting_flag : 1; + uint32_t vps_sub_layer_ordering_info_present_flag : 1; + uint32_t vps_timing_info_present_flag : 1; + uint32_t vps_poc_proportional_to_timing_flag : 1; +} StdVideoH265VpsFlags; + +typedef struct StdVideoH265ProfileTierLevelFlags { + uint32_t general_tier_flag : 1; + uint32_t general_progressive_source_flag : 1; + uint32_t general_interlaced_source_flag : 1; + uint32_t general_non_packed_constraint_flag : 1; + uint32_t general_frame_only_constraint_flag : 1; +} StdVideoH265ProfileTierLevelFlags; + +typedef struct StdVideoH265ProfileTierLevel { + StdVideoH265ProfileTierLevelFlags flags; + StdVideoH265ProfileIdc general_profile_idc; + StdVideoH265LevelIdc general_level_idc; +} StdVideoH265ProfileTierLevel; + +typedef struct StdVideoH265VideoParameterSet { + StdVideoH265VpsFlags flags; + uint8_t vps_video_parameter_set_id; + uint8_t vps_max_sub_layers_minus1; + uint8_t reserved1; + uint8_t reserved2; + uint32_t vps_num_units_in_tick; + uint32_t vps_time_scale; + uint32_t vps_num_ticks_poc_diff_one_minus1; + uint32_t reserved3; + const StdVideoH265DecPicBufMgr* pDecPicBufMgr; + const StdVideoH265HrdParameters* pHrdParameters; + const StdVideoH265ProfileTierLevel* pProfileTierLevel; +} StdVideoH265VideoParameterSet; + +typedef struct StdVideoH265ScalingLists { + uint8_t ScalingList4x4[STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS]; + uint8_t ScalingList8x8[STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS]; + uint8_t ScalingList16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS]; + uint8_t ScalingList32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS]; + uint8_t ScalingListDCCoef16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS]; + uint8_t ScalingListDCCoef32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS]; +} StdVideoH265ScalingLists; + +typedef struct StdVideoH265SpsVuiFlags { + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t colour_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t neutral_chroma_indication_flag : 1; + uint32_t field_seq_flag : 1; + uint32_t frame_field_info_present_flag : 1; + uint32_t default_display_window_flag : 1; + uint32_t vui_timing_info_present_flag : 1; + uint32_t vui_poc_proportional_to_timing_flag : 1; + uint32_t vui_hrd_parameters_present_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t tiles_fixed_structure_flag : 1; + uint32_t motion_vectors_over_pic_boundaries_flag : 1; + uint32_t restricted_ref_pic_lists_flag : 1; +} StdVideoH265SpsVuiFlags; + +typedef struct StdVideoH265SequenceParameterSetVui { + StdVideoH265SpsVuiFlags flags; + StdVideoH265AspectRatioIdc aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + uint8_t video_format; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coeffs; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + uint8_t reserved1; + uint8_t reserved2; + uint16_t def_disp_win_left_offset; + uint16_t def_disp_win_right_offset; + uint16_t def_disp_win_top_offset; + uint16_t def_disp_win_bottom_offset; + uint32_t vui_num_units_in_tick; + uint32_t vui_time_scale; + uint32_t vui_num_ticks_poc_diff_one_minus1; + uint16_t min_spatial_segmentation_idc; + uint16_t reserved3; + uint8_t max_bytes_per_pic_denom; + uint8_t max_bits_per_min_cu_denom; + uint8_t log2_max_mv_length_horizontal; + uint8_t log2_max_mv_length_vertical; + const StdVideoH265HrdParameters* pHrdParameters; +} StdVideoH265SequenceParameterSetVui; + +typedef struct StdVideoH265PredictorPaletteEntries { + uint16_t PredictorPaletteEntries[STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE][STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE]; +} StdVideoH265PredictorPaletteEntries; + +typedef struct StdVideoH265SpsFlags { + uint32_t sps_temporal_id_nesting_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t conformance_window_flag : 1; + uint32_t sps_sub_layer_ordering_info_present_flag : 1; + uint32_t scaling_list_enabled_flag : 1; + uint32_t sps_scaling_list_data_present_flag : 1; + uint32_t amp_enabled_flag : 1; + uint32_t sample_adaptive_offset_enabled_flag : 1; + uint32_t pcm_enabled_flag : 1; + uint32_t pcm_loop_filter_disabled_flag : 1; + uint32_t long_term_ref_pics_present_flag : 1; + uint32_t sps_temporal_mvp_enabled_flag : 1; + uint32_t strong_intra_smoothing_enabled_flag : 1; + uint32_t vui_parameters_present_flag : 1; + uint32_t sps_extension_present_flag : 1; + uint32_t sps_range_extension_flag : 1; + uint32_t transform_skip_rotation_enabled_flag : 1; + uint32_t transform_skip_context_enabled_flag : 1; + uint32_t implicit_rdpcm_enabled_flag : 1; + uint32_t explicit_rdpcm_enabled_flag : 1; + uint32_t extended_precision_processing_flag : 1; + uint32_t intra_smoothing_disabled_flag : 1; + uint32_t high_precision_offsets_enabled_flag : 1; + uint32_t persistent_rice_adaptation_enabled_flag : 1; + uint32_t cabac_bypass_alignment_enabled_flag : 1; + uint32_t sps_scc_extension_flag : 1; + uint32_t sps_curr_pic_ref_enabled_flag : 1; + uint32_t palette_mode_enabled_flag : 1; + uint32_t sps_palette_predictor_initializers_present_flag : 1; + uint32_t intra_boundary_filtering_disabled_flag : 1; +} StdVideoH265SpsFlags; + +typedef struct StdVideoH265ShortTermRefPicSetFlags { + uint32_t inter_ref_pic_set_prediction_flag : 1; + uint32_t delta_rps_sign : 1; +} StdVideoH265ShortTermRefPicSetFlags; + +typedef struct StdVideoH265ShortTermRefPicSet { + StdVideoH265ShortTermRefPicSetFlags flags; + uint32_t delta_idx_minus1; + uint16_t use_delta_flag; + uint16_t abs_delta_rps_minus1; + uint16_t used_by_curr_pic_flag; + uint16_t used_by_curr_pic_s0_flag; + uint16_t used_by_curr_pic_s1_flag; + uint16_t reserved1; + uint8_t reserved2; + uint8_t reserved3; + uint8_t num_negative_pics; + uint8_t num_positive_pics; + uint16_t delta_poc_s0_minus1[STD_VIDEO_H265_MAX_DPB_SIZE]; + uint16_t delta_poc_s1_minus1[STD_VIDEO_H265_MAX_DPB_SIZE]; +} StdVideoH265ShortTermRefPicSet; + +typedef struct StdVideoH265LongTermRefPicsSps { + uint32_t used_by_curr_pic_lt_sps_flag; + uint32_t lt_ref_pic_poc_lsb_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS]; +} StdVideoH265LongTermRefPicsSps; + +typedef struct StdVideoH265SequenceParameterSet { + StdVideoH265SpsFlags flags; + StdVideoH265ChromaFormatIdc chroma_format_idc; + uint32_t pic_width_in_luma_samples; + uint32_t pic_height_in_luma_samples; + uint8_t sps_video_parameter_set_id; + uint8_t sps_max_sub_layers_minus1; + uint8_t sps_seq_parameter_set_id; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + uint8_t log2_min_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_luma_coding_block_size; + uint8_t log2_min_luma_transform_block_size_minus2; + uint8_t log2_diff_max_min_luma_transform_block_size; + uint8_t max_transform_hierarchy_depth_inter; + uint8_t max_transform_hierarchy_depth_intra; + uint8_t num_short_term_ref_pic_sets; + uint8_t num_long_term_ref_pics_sps; + uint8_t pcm_sample_bit_depth_luma_minus1; + uint8_t pcm_sample_bit_depth_chroma_minus1; + uint8_t log2_min_pcm_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_pcm_luma_coding_block_size; + uint8_t reserved1; + uint8_t reserved2; + uint8_t palette_max_size; + uint8_t delta_palette_max_predictor_size; + uint8_t motion_vector_resolution_control_idc; + uint8_t sps_num_palette_predictor_initializers_minus1; + uint32_t conf_win_left_offset; + uint32_t conf_win_right_offset; + uint32_t conf_win_top_offset; + uint32_t conf_win_bottom_offset; + const StdVideoH265ProfileTierLevel* pProfileTierLevel; + const StdVideoH265DecPicBufMgr* pDecPicBufMgr; + const StdVideoH265ScalingLists* pScalingLists; + const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet; + const StdVideoH265LongTermRefPicsSps* pLongTermRefPicsSps; + const StdVideoH265SequenceParameterSetVui* pSequenceParameterSetVui; + const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries; +} StdVideoH265SequenceParameterSet; + +typedef struct StdVideoH265PpsFlags { + uint32_t dependent_slice_segments_enabled_flag : 1; + uint32_t output_flag_present_flag : 1; + uint32_t sign_data_hiding_enabled_flag : 1; + uint32_t cabac_init_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t transform_skip_enabled_flag : 1; + uint32_t cu_qp_delta_enabled_flag : 1; + uint32_t pps_slice_chroma_qp_offsets_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t weighted_bipred_flag : 1; + uint32_t transquant_bypass_enabled_flag : 1; + uint32_t tiles_enabled_flag : 1; + uint32_t entropy_coding_sync_enabled_flag : 1; + uint32_t uniform_spacing_flag : 1; + uint32_t loop_filter_across_tiles_enabled_flag : 1; + uint32_t pps_loop_filter_across_slices_enabled_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t deblocking_filter_override_enabled_flag : 1; + uint32_t pps_deblocking_filter_disabled_flag : 1; + uint32_t pps_scaling_list_data_present_flag : 1; + uint32_t lists_modification_present_flag : 1; + uint32_t slice_segment_header_extension_present_flag : 1; + uint32_t pps_extension_present_flag : 1; + uint32_t cross_component_prediction_enabled_flag : 1; + uint32_t chroma_qp_offset_list_enabled_flag : 1; + uint32_t pps_curr_pic_ref_enabled_flag : 1; + uint32_t residual_adaptive_colour_transform_enabled_flag : 1; + uint32_t pps_slice_act_qp_offsets_present_flag : 1; + uint32_t pps_palette_predictor_initializers_present_flag : 1; + uint32_t monochrome_palette_flag : 1; + uint32_t pps_range_extension_flag : 1; +} StdVideoH265PpsFlags; + +typedef struct StdVideoH265PictureParameterSet { + StdVideoH265PpsFlags flags; + uint8_t pps_pic_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t sps_video_parameter_set_id; + uint8_t num_extra_slice_header_bits; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + int8_t init_qp_minus26; + uint8_t diff_cu_qp_delta_depth; + int8_t pps_cb_qp_offset; + int8_t pps_cr_qp_offset; + int8_t pps_beta_offset_div2; + int8_t pps_tc_offset_div2; + uint8_t log2_parallel_merge_level_minus2; + uint8_t log2_max_transform_skip_block_size_minus2; + uint8_t diff_cu_chroma_qp_offset_depth; + uint8_t chroma_qp_offset_list_len_minus1; + int8_t cb_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE]; + int8_t cr_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE]; + uint8_t log2_sao_offset_scale_luma; + uint8_t log2_sao_offset_scale_chroma; + int8_t pps_act_y_qp_offset_plus5; + int8_t pps_act_cb_qp_offset_plus5; + int8_t pps_act_cr_qp_offset_plus3; + uint8_t pps_num_palette_predictor_initializers; + uint8_t luma_bit_depth_entry_minus8; + uint8_t chroma_bit_depth_entry_minus8; + uint8_t num_tile_columns_minus1; + uint8_t num_tile_rows_minus1; + uint8_t reserved1; + uint8_t reserved2; + uint16_t column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE]; + uint16_t row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE]; + uint32_t reserved3; + const StdVideoH265ScalingLists* pScalingLists; + const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries; +} StdVideoH265PictureParameterSet; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vk_video/vulkan_video_codec_h265std_decode.h b/include/vk_video/vulkan_video_codec_h265std_decode.h new file mode 100644 index 00000000..a9e1a096 --- /dev/null +++ b/include/vk_video/vulkan_video_codec_h265std_decode.h @@ -0,0 +1,67 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std_decode 1 +#include "vulkan_video_codec_h265std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_decode" +#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8 +typedef struct StdVideoDecodeH265PictureInfoFlags { + uint32_t IrapPicFlag : 1; + uint32_t IdrPicFlag : 1; + uint32_t IsReference : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; +} StdVideoDecodeH265PictureInfoFlags; + +typedef struct StdVideoDecodeH265PictureInfo { + StdVideoDecodeH265PictureInfoFlags flags; + uint8_t sps_video_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t pps_pic_parameter_set_id; + uint8_t NumDeltaPocsOfRefRpsIdx; + int32_t PicOrderCntVal; + uint16_t NumBitsForSTRefPicSetInSlice; + uint16_t reserved; + uint8_t RefPicSetStCurrBefore[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; + uint8_t RefPicSetStCurrAfter[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; + uint8_t RefPicSetLtCurr[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; +} StdVideoDecodeH265PictureInfo; + +typedef struct StdVideoDecodeH265ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; +} StdVideoDecodeH265ReferenceInfoFlags; + +typedef struct StdVideoDecodeH265ReferenceInfo { + StdVideoDecodeH265ReferenceInfoFlags flags; + int32_t PicOrderCntVal; +} StdVideoDecodeH265ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vk_video/vulkan_video_codec_h265std_encode.h b/include/vk_video/vulkan_video_codec_h265std_encode.h new file mode 100644 index 00000000..fe2f28d5 --- /dev/null +++ b/include/vk_video/vulkan_video_codec_h265std_encode.h @@ -0,0 +1,157 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std_encode 1 +#include "vulkan_video_codec_h265std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_encode" +typedef struct StdVideoEncodeH265WeightTableFlags { + uint16_t luma_weight_l0_flag; + uint16_t chroma_weight_l0_flag; + uint16_t luma_weight_l1_flag; + uint16_t chroma_weight_l1_flag; +} StdVideoEncodeH265WeightTableFlags; + +typedef struct StdVideoEncodeH265WeightTable { + StdVideoEncodeH265WeightTableFlags flags; + uint8_t luma_log2_weight_denom; + int8_t delta_chroma_log2_weight_denom; + int8_t delta_luma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t luma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t delta_chroma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_chroma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_luma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t luma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t delta_chroma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_chroma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; +} StdVideoEncodeH265WeightTable; + +typedef struct StdVideoEncodeH265SliceSegmentHeaderFlags { + uint32_t first_slice_segment_in_pic_flag : 1; + uint32_t dependent_slice_segment_flag : 1; + uint32_t slice_sao_luma_flag : 1; + uint32_t slice_sao_chroma_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t mvd_l1_zero_flag : 1; + uint32_t cabac_init_flag : 1; + uint32_t cu_chroma_qp_offset_enabled_flag : 1; + uint32_t deblocking_filter_override_flag : 1; + uint32_t slice_deblocking_filter_disabled_flag : 1; + uint32_t collocated_from_l0_flag : 1; + uint32_t slice_loop_filter_across_slices_enabled_flag : 1; + uint32_t reserved : 20; +} StdVideoEncodeH265SliceSegmentHeaderFlags; + +typedef struct StdVideoEncodeH265SliceSegmentHeader { + StdVideoEncodeH265SliceSegmentHeaderFlags flags; + StdVideoH265SliceType slice_type; + uint32_t slice_segment_address; + uint8_t collocated_ref_idx; + uint8_t MaxNumMergeCand; + int8_t slice_cb_qp_offset; + int8_t slice_cr_qp_offset; + int8_t slice_beta_offset_div2; + int8_t slice_tc_offset_div2; + int8_t slice_act_y_qp_offset; + int8_t slice_act_cb_qp_offset; + int8_t slice_act_cr_qp_offset; + int8_t slice_qp_delta; + uint16_t reserved1; + const StdVideoEncodeH265WeightTable* pWeightTable; +} StdVideoEncodeH265SliceSegmentHeader; + +typedef struct StdVideoEncodeH265ReferenceListsInfoFlags { + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; +} StdVideoEncodeH265ReferenceListsInfoFlags; + +typedef struct StdVideoEncodeH265ReferenceListsInfo { + StdVideoEncodeH265ReferenceListsInfoFlags flags; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; + uint8_t RefPicList0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t RefPicList1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t list_entry_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t list_entry_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; +} StdVideoEncodeH265ReferenceListsInfo; + +typedef struct StdVideoEncodeH265PictureInfoFlags { + uint32_t is_reference : 1; + uint32_t IrapPicFlag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t discardable_flag : 1; + uint32_t cross_layer_bla_flag : 1; + uint32_t pic_output_flag : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + uint32_t slice_temporal_mvp_enabled_flag : 1; + uint32_t reserved : 23; +} StdVideoEncodeH265PictureInfoFlags; + +typedef struct StdVideoEncodeH265LongTermRefPics { + uint8_t num_long_term_sps; + uint8_t num_long_term_pics; + uint8_t lt_idx_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS]; + uint8_t poc_lsb_lt[STD_VIDEO_H265_MAX_LONG_TERM_PICS]; + uint16_t used_by_curr_pic_lt_flag; + uint8_t delta_poc_msb_present_flag[STD_VIDEO_H265_MAX_DELTA_POC]; + uint8_t delta_poc_msb_cycle_lt[STD_VIDEO_H265_MAX_DELTA_POC]; +} StdVideoEncodeH265LongTermRefPics; + +typedef struct StdVideoEncodeH265PictureInfo { + StdVideoEncodeH265PictureInfoFlags flags; + StdVideoH265PictureType pic_type; + uint8_t sps_video_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t pps_pic_parameter_set_id; + uint8_t short_term_ref_pic_set_idx; + int32_t PicOrderCntVal; + uint8_t TemporalId; + uint8_t reserved1[7]; + const StdVideoEncodeH265ReferenceListsInfo* pRefLists; + const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet; + const StdVideoEncodeH265LongTermRefPics* pLongTermRefPics; +} StdVideoEncodeH265PictureInfo; + +typedef struct StdVideoEncodeH265ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + uint32_t reserved : 30; +} StdVideoEncodeH265ReferenceInfoFlags; + +typedef struct StdVideoEncodeH265ReferenceInfo { + StdVideoEncodeH265ReferenceInfoFlags flags; + StdVideoH265PictureType pic_type; + int32_t PicOrderCntVal; + uint8_t TemporalId; +} StdVideoEncodeH265ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vk_video/vulkan_video_codecs_common.h b/include/vk_video/vulkan_video_codecs_common.h new file mode 100644 index 00000000..a5f0f3d9 --- /dev/null +++ b/include/vk_video/vulkan_video_codecs_common.h @@ -0,0 +1,36 @@ +#ifndef VULKAN_VIDEO_CODECS_COMMON_H_ +#define VULKAN_VIDEO_CODECS_COMMON_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codecs_common is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codecs_common 1 +#if !defined(VK_NO_STDINT_H) + #include +#endif + +#define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vk_enum_string_helper.h b/include/vulkan/vk_enum_string_helper.h new file mode 100644 index 00000000..5b1e5842 --- /dev/null +++ b/include/vulkan/vk_enum_string_helper.h @@ -0,0 +1,10355 @@ +// *** THIS FILE IS GENERATED - DO NOT EDIT *** +// See enum_string_helper_generator.py for modifications +// Copyright 2023 The Khronos Group Inc. +// Copyright 2023 Valve Corporation +// Copyright 2023 LunarG, Inc. +// +// SPDX-License-Identifier: Apache-2.0 + +#pragma once + +// clang-format off + +#ifdef __cplusplus +#include +#endif +#include +static inline const char* string_VkResult(VkResult input_value) { + switch (input_value) { + case VK_SUCCESS: + return "VK_SUCCESS"; + case VK_NOT_READY: + return "VK_NOT_READY"; + case VK_TIMEOUT: + return "VK_TIMEOUT"; + case VK_EVENT_SET: + return "VK_EVENT_SET"; + case VK_EVENT_RESET: + return "VK_EVENT_RESET"; + case VK_INCOMPLETE: + return "VK_INCOMPLETE"; + case VK_ERROR_OUT_OF_HOST_MEMORY: + return "VK_ERROR_OUT_OF_HOST_MEMORY"; + case VK_ERROR_OUT_OF_DEVICE_MEMORY: + return "VK_ERROR_OUT_OF_DEVICE_MEMORY"; + case VK_ERROR_INITIALIZATION_FAILED: + return "VK_ERROR_INITIALIZATION_FAILED"; + case VK_ERROR_DEVICE_LOST: + return "VK_ERROR_DEVICE_LOST"; + case VK_ERROR_MEMORY_MAP_FAILED: + return "VK_ERROR_MEMORY_MAP_FAILED"; + case VK_ERROR_LAYER_NOT_PRESENT: + return "VK_ERROR_LAYER_NOT_PRESENT"; + case VK_ERROR_EXTENSION_NOT_PRESENT: + return "VK_ERROR_EXTENSION_NOT_PRESENT"; + case VK_ERROR_FEATURE_NOT_PRESENT: + return "VK_ERROR_FEATURE_NOT_PRESENT"; + case VK_ERROR_INCOMPATIBLE_DRIVER: + return "VK_ERROR_INCOMPATIBLE_DRIVER"; + case VK_ERROR_TOO_MANY_OBJECTS: + return "VK_ERROR_TOO_MANY_OBJECTS"; + case VK_ERROR_FORMAT_NOT_SUPPORTED: + return "VK_ERROR_FORMAT_NOT_SUPPORTED"; + case VK_ERROR_FRAGMENTED_POOL: + return "VK_ERROR_FRAGMENTED_POOL"; + case VK_ERROR_UNKNOWN: + return "VK_ERROR_UNKNOWN"; + case VK_ERROR_OUT_OF_POOL_MEMORY: + return "VK_ERROR_OUT_OF_POOL_MEMORY"; + case VK_ERROR_INVALID_EXTERNAL_HANDLE: + return "VK_ERROR_INVALID_EXTERNAL_HANDLE"; + case VK_ERROR_FRAGMENTATION: + return "VK_ERROR_FRAGMENTATION"; + case VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS: + return "VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS"; + case VK_PIPELINE_COMPILE_REQUIRED: + return "VK_PIPELINE_COMPILE_REQUIRED"; + case VK_ERROR_NOT_PERMITTED: + return "VK_ERROR_NOT_PERMITTED"; + case VK_ERROR_SURFACE_LOST_KHR: + return "VK_ERROR_SURFACE_LOST_KHR"; + case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: + return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR"; + case VK_SUBOPTIMAL_KHR: + return "VK_SUBOPTIMAL_KHR"; + case VK_ERROR_OUT_OF_DATE_KHR: + return "VK_ERROR_OUT_OF_DATE_KHR"; + case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: + return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR"; + case VK_ERROR_VALIDATION_FAILED_EXT: + return "VK_ERROR_VALIDATION_FAILED_EXT"; + case VK_ERROR_INVALID_SHADER_NV: + return "VK_ERROR_INVALID_SHADER_NV"; + case VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR: + return "VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR: + return "VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR: + return "VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR: + return "VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR: + return "VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR: + return "VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR"; + case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: + return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT"; + case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: + return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT"; + case VK_THREAD_IDLE_KHR: + return "VK_THREAD_IDLE_KHR"; + case VK_THREAD_DONE_KHR: + return "VK_THREAD_DONE_KHR"; + case VK_OPERATION_DEFERRED_KHR: + return "VK_OPERATION_DEFERRED_KHR"; + case VK_OPERATION_NOT_DEFERRED_KHR: + return "VK_OPERATION_NOT_DEFERRED_KHR"; + case VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR: + return "VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR"; + case VK_ERROR_COMPRESSION_EXHAUSTED_EXT: + return "VK_ERROR_COMPRESSION_EXHAUSTED_EXT"; + case VK_INCOMPATIBLE_SHADER_BINARY_EXT: + return "VK_INCOMPATIBLE_SHADER_BINARY_EXT"; + case VK_PIPELINE_BINARY_MISSING_KHR: + return "VK_PIPELINE_BINARY_MISSING_KHR"; + case VK_ERROR_NOT_ENOUGH_SPACE_KHR: + return "VK_ERROR_NOT_ENOUGH_SPACE_KHR"; + default: + return "Unhandled VkResult"; + } +} +static inline const char* string_VkStructureType(VkStructureType input_value) { + switch (input_value) { + case VK_STRUCTURE_TYPE_APPLICATION_INFO: + return "VK_STRUCTURE_TYPE_APPLICATION_INFO"; + case VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE: + return "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE"; + case VK_STRUCTURE_TYPE_BIND_SPARSE_INFO: + return "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO"; + case VK_STRUCTURE_TYPE_FENCE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_FENCE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_EVENT_CREATE_INFO: + return "VK_STRUCTURE_TYPE_EVENT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO: + return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO: + return "VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO: + return "VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO"; + case VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET: + return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET"; + case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET: + return "VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET"; + case VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO: + return "VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO"; + case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO: + return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER: + return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER"; + case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER: + return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER"; + case VK_STRUCTURE_TYPE_MEMORY_BARRIER: + return "VK_STRUCTURE_TYPE_MEMORY_BARRIER"; + case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES"; + case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO: + return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO"; + case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO: + return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES"; + case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: + return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS"; + case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO: + return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO"; + case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO: + return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO"; + case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO: + return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2: + return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2"; + case VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2: + return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2"; + case VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2: + return "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2"; + case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2: + return "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2"; + case VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2: + return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES"; + case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO: + return "VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO: + return "VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES"; + case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES"; + case VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2: + return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2"; + case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO: + return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO"; + case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO: + return "VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO"; + case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES"; + case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES: + return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO"; + case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES: + return "VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO"; + case VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES: + return "VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES"; + case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO: + return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO"; + case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO"; + case VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES: + return "VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES"; + case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO"; + case VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES: + return "VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES"; + case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO"; + case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2: + return "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2"; + case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2: + return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2"; + case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2: + return "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2"; + case VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2: + return "VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2"; + case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2: + return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2"; + case VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_SUBPASS_END_INFO: + return "VK_STRUCTURE_TYPE_SUBPASS_END_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES"; + case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE: + return "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES"; + case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES"; + case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES"; + case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO: + return "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO"; + case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO: + return "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO"; + case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES"; + case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT: + return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT"; + case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT: + return "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES"; + case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO: + return "VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO"; + case VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO: + return "VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES"; + case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO: + return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO: + return "VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO"; + case VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES"; + case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES"; + case VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES"; + case VK_STRUCTURE_TYPE_MEMORY_BARRIER_2: + return "VK_STRUCTURE_TYPE_MEMORY_BARRIER_2"; + case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2: + return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2"; + case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2: + return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2"; + case VK_STRUCTURE_TYPE_DEPENDENCY_INFO: + return "VK_STRUCTURE_TYPE_DEPENDENCY_INFO"; + case VK_STRUCTURE_TYPE_SUBMIT_INFO_2: + return "VK_STRUCTURE_TYPE_SUBMIT_INFO_2"; + case VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES"; + case VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2: + return "VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2"; + case VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2: + return "VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2"; + case VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2: + return "VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2"; + case VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2: + return "VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2"; + case VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2: + return "VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2"; + case VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2: + return "VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2"; + case VK_STRUCTURE_TYPE_BUFFER_COPY_2: + return "VK_STRUCTURE_TYPE_BUFFER_COPY_2"; + case VK_STRUCTURE_TYPE_IMAGE_COPY_2: + return "VK_STRUCTURE_TYPE_IMAGE_COPY_2"; + case VK_STRUCTURE_TYPE_IMAGE_BLIT_2: + return "VK_STRUCTURE_TYPE_IMAGE_BLIT_2"; + case VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2: + return "VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2"; + case VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2: + return "VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES"; + case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES"; + case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK: + return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES"; + case VK_STRUCTURE_TYPE_RENDERING_INFO: + return "VK_STRUCTURE_TYPE_RENDERING_INFO"; + case VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO: + return "VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES"; + case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3: + return "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES"; + case VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS: + return "VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS"; + case VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS: + return "VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_PROPERTIES"; + case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES"; + case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES"; + case VK_STRUCTURE_TYPE_MEMORY_MAP_INFO: + return "VK_STRUCTURE_TYPE_MEMORY_MAP_INFO"; + case VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO: + return "VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES"; + case VK_STRUCTURE_TYPE_RENDERING_AREA_INFO: + return "VK_STRUCTURE_TYPE_RENDERING_AREA_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO"; + case VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2: + return "VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2"; + case VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2: + return "VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2"; + case VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO: + return "VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES"; + case VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO: + return "VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO"; + case VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO: + return "VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES"; + case VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS: + return "VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS"; + case VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO: + return "VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO"; + case VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO: + return "VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO"; + case VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO: + return "VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO"; + case VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO: + return "VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES"; + case VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES"; + case VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY: + return "VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY"; + case VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY: + return "VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY"; + case VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO: + return "VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO"; + case VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO: + return "VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO"; + case VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO: + return "VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO"; + case VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO: + return "VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO"; + case VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE: + return "VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE"; + case VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY: + return "VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PRESENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: + return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD"; + case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT"; + case VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR"; + case VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR: + return "VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV: + return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX: + return "VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX"; + case VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX: + return "VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX"; + case VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX: + return "VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX"; + case VK_STRUCTURE_TYPE_CU_MODULE_TEXTURING_MODE_CREATE_INFO_NVX: + return "VK_STRUCTURE_TYPE_CU_MODULE_TEXTURING_MODE_CREATE_INFO_NVX"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD: + return "VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD"; + case VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP: + return "VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV: + return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV"; + case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV: + return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV"; + case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV: + return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV"; + case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT: + return "VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT"; + case VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN: + return "VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR: + return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR"; + case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR: + return "VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR"; + case VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT: + return "VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT"; + case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR: + return "VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT"; + case VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT: + return "VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE: + return "VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX"; + case VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX: + return "VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_HDR_METADATA_EXT: + return "VK_STRUCTURE_TYPE_HDR_METADATA_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG"; + case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR: + return "VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR"; + case VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR: + return "VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR"; + case VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR: + return "VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR"; + case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR: + return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR"; + case VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR: + return "VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR"; + case VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK: + return "VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK"; + case VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK: + return "VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK"; + case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID: + return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID"; + case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID: + return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID"; + case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID: + return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID"; + case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: + return "VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID"; + case VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: + return "VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID"; + case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID: + return "VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID"; + case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID: + return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX"; + case VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX: + return "VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX"; + case VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX: + return "VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX"; + case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX: + return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD: + return "VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_BFLOAT16_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_BFLOAT16_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT: + return "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR: + return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR"; + case VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR: + return "VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR"; + case VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT: + return "VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT: + return "VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT"; + case VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_GEOMETRY_NV: + return "VK_STRUCTURE_TYPE_GEOMETRY_NV"; + case VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV: + return "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV"; + case VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV: + return "VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV"; + case VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV: + return "VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV"; + case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV: + return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT"; + case VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT"; + case VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD: + return "VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD: + return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP: + return "VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV"; + case VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV: + return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV"; + case VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV: + return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL"; + case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL: + return "VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL"; + case VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL: + return "VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL"; + case VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL: + return "VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL"; + case VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL: + return "VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL"; + case VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL: + return "VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL"; + case VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL: + return "VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD: + return "VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD"; + case VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR"; + case VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV"; + case VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV: + return "VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT"; + case VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT: + return "VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT: + return "VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV: + return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV"; + case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM"; + case VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM: + return "VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV"; + case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV: + return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PRESENT_ID_KHR: + return "VK_STRUCTURE_TYPE_PRESENT_ID_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR"; + case VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV"; + case VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV: + return "VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_SHADING_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_SHADING_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_SHADING_PROPERTIES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_SHADING_PROPERTIES_QCOM"; + case VK_STRUCTURE_TYPE_RENDER_PASS_TILE_SHADING_CREATE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_RENDER_PASS_TILE_SHADING_CREATE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PER_TILE_BEGIN_INFO_QCOM: + return "VK_STRUCTURE_TYPE_PER_TILE_BEGIN_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PER_TILE_END_INFO_QCOM: + return "VK_STRUCTURE_TYPE_PER_TILE_END_INFO_QCOM"; + case VK_STRUCTURE_TYPE_DISPATCH_TILE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_DISPATCH_TILE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV: + return "VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT"; + case VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: + return "VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT"; + case VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: + return "VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT"; + case VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM: + return "VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT: + return "VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT"; + case VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT: + return "VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA: + return "VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA"; + case VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA: + return "VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI: + return "VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI"; + case VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV: + return "VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT: + return "VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT: + return "VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT"; + case VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT: + return "VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX: + return "VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT: + return "VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT"; + case VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT: + return "VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT"; + case VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT: + return "VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT"; + case VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT: + return "VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT"; + case VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT: + return "VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT: + return "VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM"; + case VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM: + return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM"; + case VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM: + return "VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM"; + case VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM: + return "VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM"; + case VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM: + return "VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV: + return "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_LINEAR_SWEPT_SPHERES_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_LINEAR_SWEPT_SPHERES_FEATURES_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_LINEAR_SWEPT_SPHERES_DATA_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_LINEAR_SWEPT_SPHERES_DATA_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_SPHERES_DATA_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_SPHERES_DATA_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT: + return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG: + return "VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG"; + case VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG: + return "VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT: + return "VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV: + return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV"; + case VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV: + return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV"; + case VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV: + return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID"; + case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID: + return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD"; + case VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD: + return "VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD"; + case VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD: + return "VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR: + return "VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR: + return "VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM: + return "VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC"; + case VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC: + return "VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_COOPERATIVE_VECTOR_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_COOPERATIVE_VECTOR_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_CONVERT_COOPERATIVE_VECTOR_MATRIX_INFO_NV: + return "VK_STRUCTURE_TYPE_CONVERT_COOPERATIVE_VECTOR_MATRIX_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV: + return "VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV"; + case VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV: + return "VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV"; + case VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV: + return "VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV"; + case VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV: + return "VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV"; + case VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV: + return "VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV"; + case VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV: + return "VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV"; + case VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV: + return "VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV: + return "VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM: + return "VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_AV1_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_AV1_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_LAYER_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_LAYER_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUALITY_LEVEL_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUALITY_LEVEL_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_GOP_REMAINING_FRAME_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_GOP_REMAINING_FRAME_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM"; + case VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM: + return "VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX: + return "VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX"; + case VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX: + return "VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX"; + case VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX: + return "VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX"; + case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX: + return "VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT"; + case VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR: + return "VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR"; + case VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT: + return "VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT"; + case VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT: + return "VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_MEMORY_HEAP_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_MEMORY_HEAP_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_MEMORY_HEAP_PROPERTIES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_MEMORY_HEAP_PROPERTIES_QCOM"; + case VK_STRUCTURE_TYPE_TILE_MEMORY_REQUIREMENTS_QCOM: + return "VK_STRUCTURE_TYPE_TILE_MEMORY_REQUIREMENTS_QCOM"; + case VK_STRUCTURE_TYPE_TILE_MEMORY_BIND_INFO_QCOM: + return "VK_STRUCTURE_TYPE_TILE_MEMORY_BIND_INFO_QCOM"; + case VK_STRUCTURE_TYPE_TILE_MEMORY_SIZE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_TILE_MEMORY_SIZE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_DISPLAY_SURFACE_STEREO_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_DISPLAY_SURFACE_STEREO_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_DISPLAY_MODE_STEREO_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_DISPLAY_MODE_STEREO_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_FORMAT_QUANTIZATION_MAP_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_FORMAT_QUANTIZATION_MAP_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUANTIZATION_MAP_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUANTIZATION_MAP_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUANTIZATION_MAP_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUANTIZATION_MAP_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUANTIZATION_MAP_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUANTIZATION_MAP_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_FORMAT_H265_QUANTIZATION_MAP_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_FORMAT_H265_QUANTIZATION_MAP_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUANTIZATION_MAP_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUANTIZATION_MAP_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_FORMAT_AV1_QUANTIZATION_MAP_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_FORMAT_AV1_QUANTIZATION_MAP_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV"; + case VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_DEVICE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_DEVICE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_DATA_PARAMS_NV: + return "VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_DATA_PARAMS_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_COMPUTE_QUEUE_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_COMPUTE_QUEUE_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_CLUSTERS_BOTTOM_LEVEL_INPUT_NV: + return "VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_CLUSTERS_BOTTOM_LEVEL_INPUT_NV"; + case VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_TRIANGLE_CLUSTER_INPUT_NV: + return "VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_TRIANGLE_CLUSTER_INPUT_NV"; + case VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_MOVE_OBJECTS_INPUT_NV: + return "VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_MOVE_OBJECTS_INPUT_NV"; + case VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_INPUT_INFO_NV: + return "VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_INPUT_INFO_NV"; + case VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_COMMANDS_INFO_NV: + return "VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_COMMANDS_INFO_NV"; + case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CLUSTER_ACCELERATION_STRUCTURE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CLUSTER_ACCELERATION_STRUCTURE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_PARTITIONED_ACCELERATION_STRUCTURE_NV: + return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_PARTITIONED_ACCELERATION_STRUCTURE_NV"; + case VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCES_INPUT_NV: + return "VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCES_INPUT_NV"; + case VK_STRUCTURE_TYPE_BUILD_PARTITIONED_ACCELERATION_STRUCTURE_INFO_NV: + return "VK_STRUCTURE_TYPE_BUILD_PARTITIONED_ACCELERATION_STRUCTURE_INFO_NV"; + case VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_FLAGS_NV: + return "VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_FLAGS_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT"; + case VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT: + return "VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT"; + case VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT: + return "VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_8_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_8_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_MEMORY_BARRIER_ACCESS_FLAGS_3_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_BARRIER_ACCESS_FLAGS_3_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA"; + case VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA: + return "VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_2_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_2_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_INLINE_SESSION_PARAMETERS_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_INLINE_SESSION_PARAMETERS_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_INLINE_SESSION_PARAMETERS_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_INLINE_SESSION_PARAMETERS_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_INLINE_SESSION_PARAMETERS_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_INLINE_SESSION_PARAMETERS_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HDR_VIVID_FEATURES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HDR_VIVID_FEATURES_HUAWEI"; + case VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI: + return "VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV"; + case VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_OPACITY_MICROMAP_FEATURES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_OPACITY_MICROMAP_FEATURES_ARM"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_METAL_HANDLE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_METAL_HANDLE_INFO_EXT"; + case VK_STRUCTURE_TYPE_MEMORY_METAL_HANDLE_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_MEMORY_METAL_HANDLE_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_MEMORY_GET_METAL_HANDLE_INFO_EXT: + return "VK_STRUCTURE_TYPE_MEMORY_GET_METAL_HANDLE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_ROBUSTNESS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_ROBUSTNESS_FEATURES_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_SET_PRESENT_CONFIG_NV: + return "VK_STRUCTURE_TYPE_SET_PRESENT_CONFIG_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_METERING_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_METERING_FEATURES_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_EXT"; + case VK_STRUCTURE_TYPE_RENDERING_END_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDERING_END_INFO_EXT"; + default: + return "Unhandled VkStructureType"; + } +} +static inline const char* string_VkPipelineCacheHeaderVersion(VkPipelineCacheHeaderVersion input_value) { + switch (input_value) { + case VK_PIPELINE_CACHE_HEADER_VERSION_ONE: + return "VK_PIPELINE_CACHE_HEADER_VERSION_ONE"; + default: + return "Unhandled VkPipelineCacheHeaderVersion"; + } +} +static inline const char* string_VkImageLayout(VkImageLayout input_value) { + switch (input_value) { + case VK_IMAGE_LAYOUT_UNDEFINED: + return "VK_IMAGE_LAYOUT_UNDEFINED"; + case VK_IMAGE_LAYOUT_GENERAL: + return "VK_IMAGE_LAYOUT_GENERAL"; + case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: + return "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL"; + case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: + return "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL"; + case VK_IMAGE_LAYOUT_PREINITIALIZED: + return "VK_IMAGE_LAYOUT_PREINITIALIZED"; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ: + return "VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ"; + case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: + return "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR"; + case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR: + return "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR"; + case VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT: + return "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT"; + case VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR: + return "VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR"; + case VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT: + return "VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT"; + case VK_IMAGE_LAYOUT_VIDEO_ENCODE_QUANTIZATION_MAP_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_ENCODE_QUANTIZATION_MAP_KHR"; + default: + return "Unhandled VkImageLayout"; + } +} +static inline const char* string_VkObjectType(VkObjectType input_value) { + switch (input_value) { + case VK_OBJECT_TYPE_UNKNOWN: + return "VK_OBJECT_TYPE_UNKNOWN"; + case VK_OBJECT_TYPE_INSTANCE: + return "VK_OBJECT_TYPE_INSTANCE"; + case VK_OBJECT_TYPE_PHYSICAL_DEVICE: + return "VK_OBJECT_TYPE_PHYSICAL_DEVICE"; + case VK_OBJECT_TYPE_DEVICE: + return "VK_OBJECT_TYPE_DEVICE"; + case VK_OBJECT_TYPE_QUEUE: + return "VK_OBJECT_TYPE_QUEUE"; + case VK_OBJECT_TYPE_SEMAPHORE: + return "VK_OBJECT_TYPE_SEMAPHORE"; + case VK_OBJECT_TYPE_COMMAND_BUFFER: + return "VK_OBJECT_TYPE_COMMAND_BUFFER"; + case VK_OBJECT_TYPE_FENCE: + return "VK_OBJECT_TYPE_FENCE"; + case VK_OBJECT_TYPE_DEVICE_MEMORY: + return "VK_OBJECT_TYPE_DEVICE_MEMORY"; + case VK_OBJECT_TYPE_BUFFER: + return "VK_OBJECT_TYPE_BUFFER"; + case VK_OBJECT_TYPE_IMAGE: + return "VK_OBJECT_TYPE_IMAGE"; + case VK_OBJECT_TYPE_EVENT: + return "VK_OBJECT_TYPE_EVENT"; + case VK_OBJECT_TYPE_QUERY_POOL: + return "VK_OBJECT_TYPE_QUERY_POOL"; + case VK_OBJECT_TYPE_BUFFER_VIEW: + return "VK_OBJECT_TYPE_BUFFER_VIEW"; + case VK_OBJECT_TYPE_IMAGE_VIEW: + return "VK_OBJECT_TYPE_IMAGE_VIEW"; + case VK_OBJECT_TYPE_SHADER_MODULE: + return "VK_OBJECT_TYPE_SHADER_MODULE"; + case VK_OBJECT_TYPE_PIPELINE_CACHE: + return "VK_OBJECT_TYPE_PIPELINE_CACHE"; + case VK_OBJECT_TYPE_PIPELINE_LAYOUT: + return "VK_OBJECT_TYPE_PIPELINE_LAYOUT"; + case VK_OBJECT_TYPE_RENDER_PASS: + return "VK_OBJECT_TYPE_RENDER_PASS"; + case VK_OBJECT_TYPE_PIPELINE: + return "VK_OBJECT_TYPE_PIPELINE"; + case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT: + return "VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT"; + case VK_OBJECT_TYPE_SAMPLER: + return "VK_OBJECT_TYPE_SAMPLER"; + case VK_OBJECT_TYPE_DESCRIPTOR_POOL: + return "VK_OBJECT_TYPE_DESCRIPTOR_POOL"; + case VK_OBJECT_TYPE_DESCRIPTOR_SET: + return "VK_OBJECT_TYPE_DESCRIPTOR_SET"; + case VK_OBJECT_TYPE_FRAMEBUFFER: + return "VK_OBJECT_TYPE_FRAMEBUFFER"; + case VK_OBJECT_TYPE_COMMAND_POOL: + return "VK_OBJECT_TYPE_COMMAND_POOL"; + case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION: + return "VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION"; + case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE: + return "VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE"; + case VK_OBJECT_TYPE_PRIVATE_DATA_SLOT: + return "VK_OBJECT_TYPE_PRIVATE_DATA_SLOT"; + case VK_OBJECT_TYPE_SURFACE_KHR: + return "VK_OBJECT_TYPE_SURFACE_KHR"; + case VK_OBJECT_TYPE_SWAPCHAIN_KHR: + return "VK_OBJECT_TYPE_SWAPCHAIN_KHR"; + case VK_OBJECT_TYPE_DISPLAY_KHR: + return "VK_OBJECT_TYPE_DISPLAY_KHR"; + case VK_OBJECT_TYPE_DISPLAY_MODE_KHR: + return "VK_OBJECT_TYPE_DISPLAY_MODE_KHR"; + case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT: + return "VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT"; + case VK_OBJECT_TYPE_VIDEO_SESSION_KHR: + return "VK_OBJECT_TYPE_VIDEO_SESSION_KHR"; + case VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR: + return "VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR"; + case VK_OBJECT_TYPE_CU_MODULE_NVX: + return "VK_OBJECT_TYPE_CU_MODULE_NVX"; + case VK_OBJECT_TYPE_CU_FUNCTION_NVX: + return "VK_OBJECT_TYPE_CU_FUNCTION_NVX"; + case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT: + return "VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT"; + case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR: + return "VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR"; + case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT: + return "VK_OBJECT_TYPE_VALIDATION_CACHE_EXT"; + case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV: + return "VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV"; + case VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL: + return "VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL"; + case VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR: + return "VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR"; + case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV: + return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_OBJECT_TYPE_CUDA_MODULE_NV: + return "VK_OBJECT_TYPE_CUDA_MODULE_NV"; + case VK_OBJECT_TYPE_CUDA_FUNCTION_NV: + return "VK_OBJECT_TYPE_CUDA_FUNCTION_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA: + return "VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA"; + case VK_OBJECT_TYPE_MICROMAP_EXT: + return "VK_OBJECT_TYPE_MICROMAP_EXT"; + case VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV: + return "VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV"; + case VK_OBJECT_TYPE_SHADER_EXT: + return "VK_OBJECT_TYPE_SHADER_EXT"; + case VK_OBJECT_TYPE_PIPELINE_BINARY_KHR: + return "VK_OBJECT_TYPE_PIPELINE_BINARY_KHR"; + case VK_OBJECT_TYPE_EXTERNAL_COMPUTE_QUEUE_NV: + return "VK_OBJECT_TYPE_EXTERNAL_COMPUTE_QUEUE_NV"; + case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT: + return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT"; + case VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT: + return "VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT"; + default: + return "Unhandled VkObjectType"; + } +} +static inline const char* string_VkVendorId(VkVendorId input_value) { + switch (input_value) { + case VK_VENDOR_ID_KHRONOS: + return "VK_VENDOR_ID_KHRONOS"; + case VK_VENDOR_ID_VIV: + return "VK_VENDOR_ID_VIV"; + case VK_VENDOR_ID_VSI: + return "VK_VENDOR_ID_VSI"; + case VK_VENDOR_ID_KAZAN: + return "VK_VENDOR_ID_KAZAN"; + case VK_VENDOR_ID_CODEPLAY: + return "VK_VENDOR_ID_CODEPLAY"; + case VK_VENDOR_ID_MESA: + return "VK_VENDOR_ID_MESA"; + case VK_VENDOR_ID_POCL: + return "VK_VENDOR_ID_POCL"; + case VK_VENDOR_ID_MOBILEYE: + return "VK_VENDOR_ID_MOBILEYE"; + default: + return "Unhandled VkVendorId"; + } +} +static inline const char* string_VkSystemAllocationScope(VkSystemAllocationScope input_value) { + switch (input_value) { + case VK_SYSTEM_ALLOCATION_SCOPE_COMMAND: + return "VK_SYSTEM_ALLOCATION_SCOPE_COMMAND"; + case VK_SYSTEM_ALLOCATION_SCOPE_OBJECT: + return "VK_SYSTEM_ALLOCATION_SCOPE_OBJECT"; + case VK_SYSTEM_ALLOCATION_SCOPE_CACHE: + return "VK_SYSTEM_ALLOCATION_SCOPE_CACHE"; + case VK_SYSTEM_ALLOCATION_SCOPE_DEVICE: + return "VK_SYSTEM_ALLOCATION_SCOPE_DEVICE"; + case VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE: + return "VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE"; + default: + return "Unhandled VkSystemAllocationScope"; + } +} +static inline const char* string_VkInternalAllocationType(VkInternalAllocationType input_value) { + switch (input_value) { + case VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE: + return "VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE"; + default: + return "Unhandled VkInternalAllocationType"; + } +} +static inline const char* string_VkFormat(VkFormat input_value) { + switch (input_value) { + case VK_FORMAT_UNDEFINED: + return "VK_FORMAT_UNDEFINED"; + case VK_FORMAT_R4G4_UNORM_PACK8: + return "VK_FORMAT_R4G4_UNORM_PACK8"; + case VK_FORMAT_R4G4B4A4_UNORM_PACK16: + return "VK_FORMAT_R4G4B4A4_UNORM_PACK16"; + case VK_FORMAT_B4G4R4A4_UNORM_PACK16: + return "VK_FORMAT_B4G4R4A4_UNORM_PACK16"; + case VK_FORMAT_R5G6B5_UNORM_PACK16: + return "VK_FORMAT_R5G6B5_UNORM_PACK16"; + case VK_FORMAT_B5G6R5_UNORM_PACK16: + return "VK_FORMAT_B5G6R5_UNORM_PACK16"; + case VK_FORMAT_R5G5B5A1_UNORM_PACK16: + return "VK_FORMAT_R5G5B5A1_UNORM_PACK16"; + case VK_FORMAT_B5G5R5A1_UNORM_PACK16: + return "VK_FORMAT_B5G5R5A1_UNORM_PACK16"; + case VK_FORMAT_A1R5G5B5_UNORM_PACK16: + return "VK_FORMAT_A1R5G5B5_UNORM_PACK16"; + case VK_FORMAT_R8_UNORM: + return "VK_FORMAT_R8_UNORM"; + case VK_FORMAT_R8_SNORM: + return "VK_FORMAT_R8_SNORM"; + case VK_FORMAT_R8_USCALED: + return "VK_FORMAT_R8_USCALED"; + case VK_FORMAT_R8_SSCALED: + return "VK_FORMAT_R8_SSCALED"; + case VK_FORMAT_R8_UINT: + return "VK_FORMAT_R8_UINT"; + case VK_FORMAT_R8_SINT: + return "VK_FORMAT_R8_SINT"; + case VK_FORMAT_R8_SRGB: + return "VK_FORMAT_R8_SRGB"; + case VK_FORMAT_R8G8_UNORM: + return "VK_FORMAT_R8G8_UNORM"; + case VK_FORMAT_R8G8_SNORM: + return "VK_FORMAT_R8G8_SNORM"; + case VK_FORMAT_R8G8_USCALED: + return "VK_FORMAT_R8G8_USCALED"; + case VK_FORMAT_R8G8_SSCALED: + return "VK_FORMAT_R8G8_SSCALED"; + case VK_FORMAT_R8G8_UINT: + return "VK_FORMAT_R8G8_UINT"; + case VK_FORMAT_R8G8_SINT: + return "VK_FORMAT_R8G8_SINT"; + case VK_FORMAT_R8G8_SRGB: + return "VK_FORMAT_R8G8_SRGB"; + case VK_FORMAT_R8G8B8_UNORM: + return "VK_FORMAT_R8G8B8_UNORM"; + case VK_FORMAT_R8G8B8_SNORM: + return "VK_FORMAT_R8G8B8_SNORM"; + case VK_FORMAT_R8G8B8_USCALED: + return "VK_FORMAT_R8G8B8_USCALED"; + case VK_FORMAT_R8G8B8_SSCALED: + return "VK_FORMAT_R8G8B8_SSCALED"; + case VK_FORMAT_R8G8B8_UINT: + return "VK_FORMAT_R8G8B8_UINT"; + case VK_FORMAT_R8G8B8_SINT: + return "VK_FORMAT_R8G8B8_SINT"; + case VK_FORMAT_R8G8B8_SRGB: + return "VK_FORMAT_R8G8B8_SRGB"; + case VK_FORMAT_B8G8R8_UNORM: + return "VK_FORMAT_B8G8R8_UNORM"; + case VK_FORMAT_B8G8R8_SNORM: + return "VK_FORMAT_B8G8R8_SNORM"; + case VK_FORMAT_B8G8R8_USCALED: + return "VK_FORMAT_B8G8R8_USCALED"; + case VK_FORMAT_B8G8R8_SSCALED: + return "VK_FORMAT_B8G8R8_SSCALED"; + case VK_FORMAT_B8G8R8_UINT: + return "VK_FORMAT_B8G8R8_UINT"; + case VK_FORMAT_B8G8R8_SINT: + return "VK_FORMAT_B8G8R8_SINT"; + case VK_FORMAT_B8G8R8_SRGB: + return "VK_FORMAT_B8G8R8_SRGB"; + case VK_FORMAT_R8G8B8A8_UNORM: + return "VK_FORMAT_R8G8B8A8_UNORM"; + case VK_FORMAT_R8G8B8A8_SNORM: + return "VK_FORMAT_R8G8B8A8_SNORM"; + case VK_FORMAT_R8G8B8A8_USCALED: + return "VK_FORMAT_R8G8B8A8_USCALED"; + case VK_FORMAT_R8G8B8A8_SSCALED: + return "VK_FORMAT_R8G8B8A8_SSCALED"; + case VK_FORMAT_R8G8B8A8_UINT: + return "VK_FORMAT_R8G8B8A8_UINT"; + case VK_FORMAT_R8G8B8A8_SINT: + return "VK_FORMAT_R8G8B8A8_SINT"; + case VK_FORMAT_R8G8B8A8_SRGB: + return "VK_FORMAT_R8G8B8A8_SRGB"; + case VK_FORMAT_B8G8R8A8_UNORM: + return "VK_FORMAT_B8G8R8A8_UNORM"; + case VK_FORMAT_B8G8R8A8_SNORM: + return "VK_FORMAT_B8G8R8A8_SNORM"; + case VK_FORMAT_B8G8R8A8_USCALED: + return "VK_FORMAT_B8G8R8A8_USCALED"; + case VK_FORMAT_B8G8R8A8_SSCALED: + return "VK_FORMAT_B8G8R8A8_SSCALED"; + case VK_FORMAT_B8G8R8A8_UINT: + return "VK_FORMAT_B8G8R8A8_UINT"; + case VK_FORMAT_B8G8R8A8_SINT: + return "VK_FORMAT_B8G8R8A8_SINT"; + case VK_FORMAT_B8G8R8A8_SRGB: + return "VK_FORMAT_B8G8R8A8_SRGB"; + case VK_FORMAT_A8B8G8R8_UNORM_PACK32: + return "VK_FORMAT_A8B8G8R8_UNORM_PACK32"; + case VK_FORMAT_A8B8G8R8_SNORM_PACK32: + return "VK_FORMAT_A8B8G8R8_SNORM_PACK32"; + case VK_FORMAT_A8B8G8R8_USCALED_PACK32: + return "VK_FORMAT_A8B8G8R8_USCALED_PACK32"; + case VK_FORMAT_A8B8G8R8_SSCALED_PACK32: + return "VK_FORMAT_A8B8G8R8_SSCALED_PACK32"; + case VK_FORMAT_A8B8G8R8_UINT_PACK32: + return "VK_FORMAT_A8B8G8R8_UINT_PACK32"; + case VK_FORMAT_A8B8G8R8_SINT_PACK32: + return "VK_FORMAT_A8B8G8R8_SINT_PACK32"; + case VK_FORMAT_A8B8G8R8_SRGB_PACK32: + return "VK_FORMAT_A8B8G8R8_SRGB_PACK32"; + case VK_FORMAT_A2R10G10B10_UNORM_PACK32: + return "VK_FORMAT_A2R10G10B10_UNORM_PACK32"; + case VK_FORMAT_A2R10G10B10_SNORM_PACK32: + return "VK_FORMAT_A2R10G10B10_SNORM_PACK32"; + case VK_FORMAT_A2R10G10B10_USCALED_PACK32: + return "VK_FORMAT_A2R10G10B10_USCALED_PACK32"; + case VK_FORMAT_A2R10G10B10_SSCALED_PACK32: + return "VK_FORMAT_A2R10G10B10_SSCALED_PACK32"; + case VK_FORMAT_A2R10G10B10_UINT_PACK32: + return "VK_FORMAT_A2R10G10B10_UINT_PACK32"; + case VK_FORMAT_A2R10G10B10_SINT_PACK32: + return "VK_FORMAT_A2R10G10B10_SINT_PACK32"; + case VK_FORMAT_A2B10G10R10_UNORM_PACK32: + return "VK_FORMAT_A2B10G10R10_UNORM_PACK32"; + case VK_FORMAT_A2B10G10R10_SNORM_PACK32: + return "VK_FORMAT_A2B10G10R10_SNORM_PACK32"; + case VK_FORMAT_A2B10G10R10_USCALED_PACK32: + return "VK_FORMAT_A2B10G10R10_USCALED_PACK32"; + case VK_FORMAT_A2B10G10R10_SSCALED_PACK32: + return "VK_FORMAT_A2B10G10R10_SSCALED_PACK32"; + case VK_FORMAT_A2B10G10R10_UINT_PACK32: + return "VK_FORMAT_A2B10G10R10_UINT_PACK32"; + case VK_FORMAT_A2B10G10R10_SINT_PACK32: + return "VK_FORMAT_A2B10G10R10_SINT_PACK32"; + case VK_FORMAT_R16_UNORM: + return "VK_FORMAT_R16_UNORM"; + case VK_FORMAT_R16_SNORM: + return "VK_FORMAT_R16_SNORM"; + case VK_FORMAT_R16_USCALED: + return "VK_FORMAT_R16_USCALED"; + case VK_FORMAT_R16_SSCALED: + return "VK_FORMAT_R16_SSCALED"; + case VK_FORMAT_R16_UINT: + return "VK_FORMAT_R16_UINT"; + case VK_FORMAT_R16_SINT: + return "VK_FORMAT_R16_SINT"; + case VK_FORMAT_R16_SFLOAT: + return "VK_FORMAT_R16_SFLOAT"; + case VK_FORMAT_R16G16_UNORM: + return "VK_FORMAT_R16G16_UNORM"; + case VK_FORMAT_R16G16_SNORM: + return "VK_FORMAT_R16G16_SNORM"; + case VK_FORMAT_R16G16_USCALED: + return "VK_FORMAT_R16G16_USCALED"; + case VK_FORMAT_R16G16_SSCALED: + return "VK_FORMAT_R16G16_SSCALED"; + case VK_FORMAT_R16G16_UINT: + return "VK_FORMAT_R16G16_UINT"; + case VK_FORMAT_R16G16_SINT: + return "VK_FORMAT_R16G16_SINT"; + case VK_FORMAT_R16G16_SFLOAT: + return "VK_FORMAT_R16G16_SFLOAT"; + case VK_FORMAT_R16G16B16_UNORM: + return "VK_FORMAT_R16G16B16_UNORM"; + case VK_FORMAT_R16G16B16_SNORM: + return "VK_FORMAT_R16G16B16_SNORM"; + case VK_FORMAT_R16G16B16_USCALED: + return "VK_FORMAT_R16G16B16_USCALED"; + case VK_FORMAT_R16G16B16_SSCALED: + return "VK_FORMAT_R16G16B16_SSCALED"; + case VK_FORMAT_R16G16B16_UINT: + return "VK_FORMAT_R16G16B16_UINT"; + case VK_FORMAT_R16G16B16_SINT: + return "VK_FORMAT_R16G16B16_SINT"; + case VK_FORMAT_R16G16B16_SFLOAT: + return "VK_FORMAT_R16G16B16_SFLOAT"; + case VK_FORMAT_R16G16B16A16_UNORM: + return "VK_FORMAT_R16G16B16A16_UNORM"; + case VK_FORMAT_R16G16B16A16_SNORM: + return "VK_FORMAT_R16G16B16A16_SNORM"; + case VK_FORMAT_R16G16B16A16_USCALED: + return "VK_FORMAT_R16G16B16A16_USCALED"; + case VK_FORMAT_R16G16B16A16_SSCALED: + return "VK_FORMAT_R16G16B16A16_SSCALED"; + case VK_FORMAT_R16G16B16A16_UINT: + return "VK_FORMAT_R16G16B16A16_UINT"; + case VK_FORMAT_R16G16B16A16_SINT: + return "VK_FORMAT_R16G16B16A16_SINT"; + case VK_FORMAT_R16G16B16A16_SFLOAT: + return "VK_FORMAT_R16G16B16A16_SFLOAT"; + case VK_FORMAT_R32_UINT: + return "VK_FORMAT_R32_UINT"; + case VK_FORMAT_R32_SINT: + return "VK_FORMAT_R32_SINT"; + case VK_FORMAT_R32_SFLOAT: + return "VK_FORMAT_R32_SFLOAT"; + case VK_FORMAT_R32G32_UINT: + return "VK_FORMAT_R32G32_UINT"; + case VK_FORMAT_R32G32_SINT: + return "VK_FORMAT_R32G32_SINT"; + case VK_FORMAT_R32G32_SFLOAT: + return "VK_FORMAT_R32G32_SFLOAT"; + case VK_FORMAT_R32G32B32_UINT: + return "VK_FORMAT_R32G32B32_UINT"; + case VK_FORMAT_R32G32B32_SINT: + return "VK_FORMAT_R32G32B32_SINT"; + case VK_FORMAT_R32G32B32_SFLOAT: + return "VK_FORMAT_R32G32B32_SFLOAT"; + case VK_FORMAT_R32G32B32A32_UINT: + return "VK_FORMAT_R32G32B32A32_UINT"; + case VK_FORMAT_R32G32B32A32_SINT: + return "VK_FORMAT_R32G32B32A32_SINT"; + case VK_FORMAT_R32G32B32A32_SFLOAT: + return "VK_FORMAT_R32G32B32A32_SFLOAT"; + case VK_FORMAT_R64_UINT: + return "VK_FORMAT_R64_UINT"; + case VK_FORMAT_R64_SINT: + return "VK_FORMAT_R64_SINT"; + case VK_FORMAT_R64_SFLOAT: + return "VK_FORMAT_R64_SFLOAT"; + case VK_FORMAT_R64G64_UINT: + return "VK_FORMAT_R64G64_UINT"; + case VK_FORMAT_R64G64_SINT: + return "VK_FORMAT_R64G64_SINT"; + case VK_FORMAT_R64G64_SFLOAT: + return "VK_FORMAT_R64G64_SFLOAT"; + case VK_FORMAT_R64G64B64_UINT: + return "VK_FORMAT_R64G64B64_UINT"; + case VK_FORMAT_R64G64B64_SINT: + return "VK_FORMAT_R64G64B64_SINT"; + case VK_FORMAT_R64G64B64_SFLOAT: + return "VK_FORMAT_R64G64B64_SFLOAT"; + case VK_FORMAT_R64G64B64A64_UINT: + return "VK_FORMAT_R64G64B64A64_UINT"; + case VK_FORMAT_R64G64B64A64_SINT: + return "VK_FORMAT_R64G64B64A64_SINT"; + case VK_FORMAT_R64G64B64A64_SFLOAT: + return "VK_FORMAT_R64G64B64A64_SFLOAT"; + case VK_FORMAT_B10G11R11_UFLOAT_PACK32: + return "VK_FORMAT_B10G11R11_UFLOAT_PACK32"; + case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32: + return "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32"; + case VK_FORMAT_D16_UNORM: + return "VK_FORMAT_D16_UNORM"; + case VK_FORMAT_X8_D24_UNORM_PACK32: + return "VK_FORMAT_X8_D24_UNORM_PACK32"; + case VK_FORMAT_D32_SFLOAT: + return "VK_FORMAT_D32_SFLOAT"; + case VK_FORMAT_S8_UINT: + return "VK_FORMAT_S8_UINT"; + case VK_FORMAT_D16_UNORM_S8_UINT: + return "VK_FORMAT_D16_UNORM_S8_UINT"; + case VK_FORMAT_D24_UNORM_S8_UINT: + return "VK_FORMAT_D24_UNORM_S8_UINT"; + case VK_FORMAT_D32_SFLOAT_S8_UINT: + return "VK_FORMAT_D32_SFLOAT_S8_UINT"; + case VK_FORMAT_BC1_RGB_UNORM_BLOCK: + return "VK_FORMAT_BC1_RGB_UNORM_BLOCK"; + case VK_FORMAT_BC1_RGB_SRGB_BLOCK: + return "VK_FORMAT_BC1_RGB_SRGB_BLOCK"; + case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: + return "VK_FORMAT_BC1_RGBA_UNORM_BLOCK"; + case VK_FORMAT_BC1_RGBA_SRGB_BLOCK: + return "VK_FORMAT_BC1_RGBA_SRGB_BLOCK"; + case VK_FORMAT_BC2_UNORM_BLOCK: + return "VK_FORMAT_BC2_UNORM_BLOCK"; + case VK_FORMAT_BC2_SRGB_BLOCK: + return "VK_FORMAT_BC2_SRGB_BLOCK"; + case VK_FORMAT_BC3_UNORM_BLOCK: + return "VK_FORMAT_BC3_UNORM_BLOCK"; + case VK_FORMAT_BC3_SRGB_BLOCK: + return "VK_FORMAT_BC3_SRGB_BLOCK"; + case VK_FORMAT_BC4_UNORM_BLOCK: + return "VK_FORMAT_BC4_UNORM_BLOCK"; + case VK_FORMAT_BC4_SNORM_BLOCK: + return "VK_FORMAT_BC4_SNORM_BLOCK"; + case VK_FORMAT_BC5_UNORM_BLOCK: + return "VK_FORMAT_BC5_UNORM_BLOCK"; + case VK_FORMAT_BC5_SNORM_BLOCK: + return "VK_FORMAT_BC5_SNORM_BLOCK"; + case VK_FORMAT_BC6H_UFLOAT_BLOCK: + return "VK_FORMAT_BC6H_UFLOAT_BLOCK"; + case VK_FORMAT_BC6H_SFLOAT_BLOCK: + return "VK_FORMAT_BC6H_SFLOAT_BLOCK"; + case VK_FORMAT_BC7_UNORM_BLOCK: + return "VK_FORMAT_BC7_UNORM_BLOCK"; + case VK_FORMAT_BC7_SRGB_BLOCK: + return "VK_FORMAT_BC7_SRGB_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK"; + case VK_FORMAT_EAC_R11_UNORM_BLOCK: + return "VK_FORMAT_EAC_R11_UNORM_BLOCK"; + case VK_FORMAT_EAC_R11_SNORM_BLOCK: + return "VK_FORMAT_EAC_R11_SNORM_BLOCK"; + case VK_FORMAT_EAC_R11G11_UNORM_BLOCK: + return "VK_FORMAT_EAC_R11G11_UNORM_BLOCK"; + case VK_FORMAT_EAC_R11G11_SNORM_BLOCK: + return "VK_FORMAT_EAC_R11G11_SNORM_BLOCK"; + case VK_FORMAT_ASTC_4x4_UNORM_BLOCK: + return "VK_FORMAT_ASTC_4x4_UNORM_BLOCK"; + case VK_FORMAT_ASTC_4x4_SRGB_BLOCK: + return "VK_FORMAT_ASTC_4x4_SRGB_BLOCK"; + case VK_FORMAT_ASTC_5x4_UNORM_BLOCK: + return "VK_FORMAT_ASTC_5x4_UNORM_BLOCK"; + case VK_FORMAT_ASTC_5x4_SRGB_BLOCK: + return "VK_FORMAT_ASTC_5x4_SRGB_BLOCK"; + case VK_FORMAT_ASTC_5x5_UNORM_BLOCK: + return "VK_FORMAT_ASTC_5x5_UNORM_BLOCK"; + case VK_FORMAT_ASTC_5x5_SRGB_BLOCK: + return "VK_FORMAT_ASTC_5x5_SRGB_BLOCK"; + case VK_FORMAT_ASTC_6x5_UNORM_BLOCK: + return "VK_FORMAT_ASTC_6x5_UNORM_BLOCK"; + case VK_FORMAT_ASTC_6x5_SRGB_BLOCK: + return "VK_FORMAT_ASTC_6x5_SRGB_BLOCK"; + case VK_FORMAT_ASTC_6x6_UNORM_BLOCK: + return "VK_FORMAT_ASTC_6x6_UNORM_BLOCK"; + case VK_FORMAT_ASTC_6x6_SRGB_BLOCK: + return "VK_FORMAT_ASTC_6x6_SRGB_BLOCK"; + case VK_FORMAT_ASTC_8x5_UNORM_BLOCK: + return "VK_FORMAT_ASTC_8x5_UNORM_BLOCK"; + case VK_FORMAT_ASTC_8x5_SRGB_BLOCK: + return "VK_FORMAT_ASTC_8x5_SRGB_BLOCK"; + case VK_FORMAT_ASTC_8x6_UNORM_BLOCK: + return "VK_FORMAT_ASTC_8x6_UNORM_BLOCK"; + case VK_FORMAT_ASTC_8x6_SRGB_BLOCK: + return "VK_FORMAT_ASTC_8x6_SRGB_BLOCK"; + case VK_FORMAT_ASTC_8x8_UNORM_BLOCK: + return "VK_FORMAT_ASTC_8x8_UNORM_BLOCK"; + case VK_FORMAT_ASTC_8x8_SRGB_BLOCK: + return "VK_FORMAT_ASTC_8x8_SRGB_BLOCK"; + case VK_FORMAT_ASTC_10x5_UNORM_BLOCK: + return "VK_FORMAT_ASTC_10x5_UNORM_BLOCK"; + case VK_FORMAT_ASTC_10x5_SRGB_BLOCK: + return "VK_FORMAT_ASTC_10x5_SRGB_BLOCK"; + case VK_FORMAT_ASTC_10x6_UNORM_BLOCK: + return "VK_FORMAT_ASTC_10x6_UNORM_BLOCK"; + case VK_FORMAT_ASTC_10x6_SRGB_BLOCK: + return "VK_FORMAT_ASTC_10x6_SRGB_BLOCK"; + case VK_FORMAT_ASTC_10x8_UNORM_BLOCK: + return "VK_FORMAT_ASTC_10x8_UNORM_BLOCK"; + case VK_FORMAT_ASTC_10x8_SRGB_BLOCK: + return "VK_FORMAT_ASTC_10x8_SRGB_BLOCK"; + case VK_FORMAT_ASTC_10x10_UNORM_BLOCK: + return "VK_FORMAT_ASTC_10x10_UNORM_BLOCK"; + case VK_FORMAT_ASTC_10x10_SRGB_BLOCK: + return "VK_FORMAT_ASTC_10x10_SRGB_BLOCK"; + case VK_FORMAT_ASTC_12x10_UNORM_BLOCK: + return "VK_FORMAT_ASTC_12x10_UNORM_BLOCK"; + case VK_FORMAT_ASTC_12x10_SRGB_BLOCK: + return "VK_FORMAT_ASTC_12x10_SRGB_BLOCK"; + case VK_FORMAT_ASTC_12x12_UNORM_BLOCK: + return "VK_FORMAT_ASTC_12x12_UNORM_BLOCK"; + case VK_FORMAT_ASTC_12x12_SRGB_BLOCK: + return "VK_FORMAT_ASTC_12x12_SRGB_BLOCK"; + case VK_FORMAT_G8B8G8R8_422_UNORM: + return "VK_FORMAT_G8B8G8R8_422_UNORM"; + case VK_FORMAT_B8G8R8G8_422_UNORM: + return "VK_FORMAT_B8G8R8G8_422_UNORM"; + case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: + return "VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM"; + case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: + return "VK_FORMAT_G8_B8R8_2PLANE_420_UNORM"; + case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM: + return "VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM"; + case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM: + return "VK_FORMAT_G8_B8R8_2PLANE_422_UNORM"; + case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM: + return "VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM"; + case VK_FORMAT_R10X6_UNORM_PACK16: + return "VK_FORMAT_R10X6_UNORM_PACK16"; + case VK_FORMAT_R10X6G10X6_UNORM_2PACK16: + return "VK_FORMAT_R10X6G10X6_UNORM_2PACK16"; + case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16: + return "VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16"; + case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: + return "VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16"; + case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: + return "VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16"; + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16"; + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16"; + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16"; + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16"; + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16"; + case VK_FORMAT_R12X4_UNORM_PACK16: + return "VK_FORMAT_R12X4_UNORM_PACK16"; + case VK_FORMAT_R12X4G12X4_UNORM_2PACK16: + return "VK_FORMAT_R12X4G12X4_UNORM_2PACK16"; + case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16: + return "VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16"; + case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: + return "VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16"; + case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: + return "VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16"; + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16"; + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16"; + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16"; + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16"; + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16"; + case VK_FORMAT_G16B16G16R16_422_UNORM: + return "VK_FORMAT_G16B16G16R16_422_UNORM"; + case VK_FORMAT_B16G16R16G16_422_UNORM: + return "VK_FORMAT_B16G16R16G16_422_UNORM"; + case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM: + return "VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM"; + case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM: + return "VK_FORMAT_G16_B16R16_2PLANE_420_UNORM"; + case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM: + return "VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM"; + case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM: + return "VK_FORMAT_G16_B16R16_2PLANE_422_UNORM"; + case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM: + return "VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM"; + case VK_FORMAT_G8_B8R8_2PLANE_444_UNORM: + return "VK_FORMAT_G8_B8R8_2PLANE_444_UNORM"; + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16"; + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16"; + case VK_FORMAT_G16_B16R16_2PLANE_444_UNORM: + return "VK_FORMAT_G16_B16R16_2PLANE_444_UNORM"; + case VK_FORMAT_A4R4G4B4_UNORM_PACK16: + return "VK_FORMAT_A4R4G4B4_UNORM_PACK16"; + case VK_FORMAT_A4B4G4R4_UNORM_PACK16: + return "VK_FORMAT_A4B4G4R4_UNORM_PACK16"; + case VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK"; + case VK_FORMAT_A1B5G5R5_UNORM_PACK16: + return "VK_FORMAT_A1B5G5R5_UNORM_PACK16"; + case VK_FORMAT_A8_UNORM: + return "VK_FORMAT_A8_UNORM"; + case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG: + return "VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG"; + case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG: + return "VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG"; + case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG: + return "VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG"; + case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG: + return "VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG"; + case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG: + return "VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG"; + case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG: + return "VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG"; + case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG: + return "VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG"; + case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG: + return "VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG"; + case VK_FORMAT_R16G16_SFIXED5_NV: + return "VK_FORMAT_R16G16_SFIXED5_NV"; + default: + return "Unhandled VkFormat"; + } +} +static inline const char* string_VkImageTiling(VkImageTiling input_value) { + switch (input_value) { + case VK_IMAGE_TILING_OPTIMAL: + return "VK_IMAGE_TILING_OPTIMAL"; + case VK_IMAGE_TILING_LINEAR: + return "VK_IMAGE_TILING_LINEAR"; + case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT: + return "VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT"; + default: + return "Unhandled VkImageTiling"; + } +} +static inline const char* string_VkImageType(VkImageType input_value) { + switch (input_value) { + case VK_IMAGE_TYPE_1D: + return "VK_IMAGE_TYPE_1D"; + case VK_IMAGE_TYPE_2D: + return "VK_IMAGE_TYPE_2D"; + case VK_IMAGE_TYPE_3D: + return "VK_IMAGE_TYPE_3D"; + default: + return "Unhandled VkImageType"; + } +} +static inline const char* string_VkPhysicalDeviceType(VkPhysicalDeviceType input_value) { + switch (input_value) { + case VK_PHYSICAL_DEVICE_TYPE_OTHER: + return "VK_PHYSICAL_DEVICE_TYPE_OTHER"; + case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU: + return "VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU"; + case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU: + return "VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU"; + case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU: + return "VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU"; + case VK_PHYSICAL_DEVICE_TYPE_CPU: + return "VK_PHYSICAL_DEVICE_TYPE_CPU"; + default: + return "Unhandled VkPhysicalDeviceType"; + } +} +static inline const char* string_VkQueryType(VkQueryType input_value) { + switch (input_value) { + case VK_QUERY_TYPE_OCCLUSION: + return "VK_QUERY_TYPE_OCCLUSION"; + case VK_QUERY_TYPE_PIPELINE_STATISTICS: + return "VK_QUERY_TYPE_PIPELINE_STATISTICS"; + case VK_QUERY_TYPE_TIMESTAMP: + return "VK_QUERY_TYPE_TIMESTAMP"; + case VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR: + return "VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR"; + case VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT: + return "VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT"; + case VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR: + return "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV"; + case VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL: + return "VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL"; + case VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR: + return "VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR"; + case VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT: + return "VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT"; + case VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT: + return "VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR"; + case VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT: + return "VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT"; + case VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT: + return "VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT"; + default: + return "Unhandled VkQueryType"; + } +} +static inline const char* string_VkSharingMode(VkSharingMode input_value) { + switch (input_value) { + case VK_SHARING_MODE_EXCLUSIVE: + return "VK_SHARING_MODE_EXCLUSIVE"; + case VK_SHARING_MODE_CONCURRENT: + return "VK_SHARING_MODE_CONCURRENT"; + default: + return "Unhandled VkSharingMode"; + } +} +static inline const char* string_VkComponentSwizzle(VkComponentSwizzle input_value) { + switch (input_value) { + case VK_COMPONENT_SWIZZLE_IDENTITY: + return "VK_COMPONENT_SWIZZLE_IDENTITY"; + case VK_COMPONENT_SWIZZLE_ZERO: + return "VK_COMPONENT_SWIZZLE_ZERO"; + case VK_COMPONENT_SWIZZLE_ONE: + return "VK_COMPONENT_SWIZZLE_ONE"; + case VK_COMPONENT_SWIZZLE_R: + return "VK_COMPONENT_SWIZZLE_R"; + case VK_COMPONENT_SWIZZLE_G: + return "VK_COMPONENT_SWIZZLE_G"; + case VK_COMPONENT_SWIZZLE_B: + return "VK_COMPONENT_SWIZZLE_B"; + case VK_COMPONENT_SWIZZLE_A: + return "VK_COMPONENT_SWIZZLE_A"; + default: + return "Unhandled VkComponentSwizzle"; + } +} +static inline const char* string_VkImageViewType(VkImageViewType input_value) { + switch (input_value) { + case VK_IMAGE_VIEW_TYPE_1D: + return "VK_IMAGE_VIEW_TYPE_1D"; + case VK_IMAGE_VIEW_TYPE_2D: + return "VK_IMAGE_VIEW_TYPE_2D"; + case VK_IMAGE_VIEW_TYPE_3D: + return "VK_IMAGE_VIEW_TYPE_3D"; + case VK_IMAGE_VIEW_TYPE_CUBE: + return "VK_IMAGE_VIEW_TYPE_CUBE"; + case VK_IMAGE_VIEW_TYPE_1D_ARRAY: + return "VK_IMAGE_VIEW_TYPE_1D_ARRAY"; + case VK_IMAGE_VIEW_TYPE_2D_ARRAY: + return "VK_IMAGE_VIEW_TYPE_2D_ARRAY"; + case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY: + return "VK_IMAGE_VIEW_TYPE_CUBE_ARRAY"; + default: + return "Unhandled VkImageViewType"; + } +} +static inline const char* string_VkBlendFactor(VkBlendFactor input_value) { + switch (input_value) { + case VK_BLEND_FACTOR_ZERO: + return "VK_BLEND_FACTOR_ZERO"; + case VK_BLEND_FACTOR_ONE: + return "VK_BLEND_FACTOR_ONE"; + case VK_BLEND_FACTOR_SRC_COLOR: + return "VK_BLEND_FACTOR_SRC_COLOR"; + case VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR: + return "VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR"; + case VK_BLEND_FACTOR_DST_COLOR: + return "VK_BLEND_FACTOR_DST_COLOR"; + case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR: + return "VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR"; + case VK_BLEND_FACTOR_SRC_ALPHA: + return "VK_BLEND_FACTOR_SRC_ALPHA"; + case VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA: + return "VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA"; + case VK_BLEND_FACTOR_DST_ALPHA: + return "VK_BLEND_FACTOR_DST_ALPHA"; + case VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA: + return "VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA"; + case VK_BLEND_FACTOR_CONSTANT_COLOR: + return "VK_BLEND_FACTOR_CONSTANT_COLOR"; + case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR: + return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR"; + case VK_BLEND_FACTOR_CONSTANT_ALPHA: + return "VK_BLEND_FACTOR_CONSTANT_ALPHA"; + case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA: + return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA"; + case VK_BLEND_FACTOR_SRC_ALPHA_SATURATE: + return "VK_BLEND_FACTOR_SRC_ALPHA_SATURATE"; + case VK_BLEND_FACTOR_SRC1_COLOR: + return "VK_BLEND_FACTOR_SRC1_COLOR"; + case VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR: + return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR"; + case VK_BLEND_FACTOR_SRC1_ALPHA: + return "VK_BLEND_FACTOR_SRC1_ALPHA"; + case VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA: + return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA"; + default: + return "Unhandled VkBlendFactor"; + } +} +static inline const char* string_VkBlendOp(VkBlendOp input_value) { + switch (input_value) { + case VK_BLEND_OP_ADD: + return "VK_BLEND_OP_ADD"; + case VK_BLEND_OP_SUBTRACT: + return "VK_BLEND_OP_SUBTRACT"; + case VK_BLEND_OP_REVERSE_SUBTRACT: + return "VK_BLEND_OP_REVERSE_SUBTRACT"; + case VK_BLEND_OP_MIN: + return "VK_BLEND_OP_MIN"; + case VK_BLEND_OP_MAX: + return "VK_BLEND_OP_MAX"; + case VK_BLEND_OP_ZERO_EXT: + return "VK_BLEND_OP_ZERO_EXT"; + case VK_BLEND_OP_SRC_EXT: + return "VK_BLEND_OP_SRC_EXT"; + case VK_BLEND_OP_DST_EXT: + return "VK_BLEND_OP_DST_EXT"; + case VK_BLEND_OP_SRC_OVER_EXT: + return "VK_BLEND_OP_SRC_OVER_EXT"; + case VK_BLEND_OP_DST_OVER_EXT: + return "VK_BLEND_OP_DST_OVER_EXT"; + case VK_BLEND_OP_SRC_IN_EXT: + return "VK_BLEND_OP_SRC_IN_EXT"; + case VK_BLEND_OP_DST_IN_EXT: + return "VK_BLEND_OP_DST_IN_EXT"; + case VK_BLEND_OP_SRC_OUT_EXT: + return "VK_BLEND_OP_SRC_OUT_EXT"; + case VK_BLEND_OP_DST_OUT_EXT: + return "VK_BLEND_OP_DST_OUT_EXT"; + case VK_BLEND_OP_SRC_ATOP_EXT: + return "VK_BLEND_OP_SRC_ATOP_EXT"; + case VK_BLEND_OP_DST_ATOP_EXT: + return "VK_BLEND_OP_DST_ATOP_EXT"; + case VK_BLEND_OP_XOR_EXT: + return "VK_BLEND_OP_XOR_EXT"; + case VK_BLEND_OP_MULTIPLY_EXT: + return "VK_BLEND_OP_MULTIPLY_EXT"; + case VK_BLEND_OP_SCREEN_EXT: + return "VK_BLEND_OP_SCREEN_EXT"; + case VK_BLEND_OP_OVERLAY_EXT: + return "VK_BLEND_OP_OVERLAY_EXT"; + case VK_BLEND_OP_DARKEN_EXT: + return "VK_BLEND_OP_DARKEN_EXT"; + case VK_BLEND_OP_LIGHTEN_EXT: + return "VK_BLEND_OP_LIGHTEN_EXT"; + case VK_BLEND_OP_COLORDODGE_EXT: + return "VK_BLEND_OP_COLORDODGE_EXT"; + case VK_BLEND_OP_COLORBURN_EXT: + return "VK_BLEND_OP_COLORBURN_EXT"; + case VK_BLEND_OP_HARDLIGHT_EXT: + return "VK_BLEND_OP_HARDLIGHT_EXT"; + case VK_BLEND_OP_SOFTLIGHT_EXT: + return "VK_BLEND_OP_SOFTLIGHT_EXT"; + case VK_BLEND_OP_DIFFERENCE_EXT: + return "VK_BLEND_OP_DIFFERENCE_EXT"; + case VK_BLEND_OP_EXCLUSION_EXT: + return "VK_BLEND_OP_EXCLUSION_EXT"; + case VK_BLEND_OP_INVERT_EXT: + return "VK_BLEND_OP_INVERT_EXT"; + case VK_BLEND_OP_INVERT_RGB_EXT: + return "VK_BLEND_OP_INVERT_RGB_EXT"; + case VK_BLEND_OP_LINEARDODGE_EXT: + return "VK_BLEND_OP_LINEARDODGE_EXT"; + case VK_BLEND_OP_LINEARBURN_EXT: + return "VK_BLEND_OP_LINEARBURN_EXT"; + case VK_BLEND_OP_VIVIDLIGHT_EXT: + return "VK_BLEND_OP_VIVIDLIGHT_EXT"; + case VK_BLEND_OP_LINEARLIGHT_EXT: + return "VK_BLEND_OP_LINEARLIGHT_EXT"; + case VK_BLEND_OP_PINLIGHT_EXT: + return "VK_BLEND_OP_PINLIGHT_EXT"; + case VK_BLEND_OP_HARDMIX_EXT: + return "VK_BLEND_OP_HARDMIX_EXT"; + case VK_BLEND_OP_HSL_HUE_EXT: + return "VK_BLEND_OP_HSL_HUE_EXT"; + case VK_BLEND_OP_HSL_SATURATION_EXT: + return "VK_BLEND_OP_HSL_SATURATION_EXT"; + case VK_BLEND_OP_HSL_COLOR_EXT: + return "VK_BLEND_OP_HSL_COLOR_EXT"; + case VK_BLEND_OP_HSL_LUMINOSITY_EXT: + return "VK_BLEND_OP_HSL_LUMINOSITY_EXT"; + case VK_BLEND_OP_PLUS_EXT: + return "VK_BLEND_OP_PLUS_EXT"; + case VK_BLEND_OP_PLUS_CLAMPED_EXT: + return "VK_BLEND_OP_PLUS_CLAMPED_EXT"; + case VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT: + return "VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT"; + case VK_BLEND_OP_PLUS_DARKER_EXT: + return "VK_BLEND_OP_PLUS_DARKER_EXT"; + case VK_BLEND_OP_MINUS_EXT: + return "VK_BLEND_OP_MINUS_EXT"; + case VK_BLEND_OP_MINUS_CLAMPED_EXT: + return "VK_BLEND_OP_MINUS_CLAMPED_EXT"; + case VK_BLEND_OP_CONTRAST_EXT: + return "VK_BLEND_OP_CONTRAST_EXT"; + case VK_BLEND_OP_INVERT_OVG_EXT: + return "VK_BLEND_OP_INVERT_OVG_EXT"; + case VK_BLEND_OP_RED_EXT: + return "VK_BLEND_OP_RED_EXT"; + case VK_BLEND_OP_GREEN_EXT: + return "VK_BLEND_OP_GREEN_EXT"; + case VK_BLEND_OP_BLUE_EXT: + return "VK_BLEND_OP_BLUE_EXT"; + default: + return "Unhandled VkBlendOp"; + } +} +static inline const char* string_VkCompareOp(VkCompareOp input_value) { + switch (input_value) { + case VK_COMPARE_OP_NEVER: + return "VK_COMPARE_OP_NEVER"; + case VK_COMPARE_OP_LESS: + return "VK_COMPARE_OP_LESS"; + case VK_COMPARE_OP_EQUAL: + return "VK_COMPARE_OP_EQUAL"; + case VK_COMPARE_OP_LESS_OR_EQUAL: + return "VK_COMPARE_OP_LESS_OR_EQUAL"; + case VK_COMPARE_OP_GREATER: + return "VK_COMPARE_OP_GREATER"; + case VK_COMPARE_OP_NOT_EQUAL: + return "VK_COMPARE_OP_NOT_EQUAL"; + case VK_COMPARE_OP_GREATER_OR_EQUAL: + return "VK_COMPARE_OP_GREATER_OR_EQUAL"; + case VK_COMPARE_OP_ALWAYS: + return "VK_COMPARE_OP_ALWAYS"; + default: + return "Unhandled VkCompareOp"; + } +} +static inline const char* string_VkDynamicState(VkDynamicState input_value) { + switch (input_value) { + case VK_DYNAMIC_STATE_VIEWPORT: + return "VK_DYNAMIC_STATE_VIEWPORT"; + case VK_DYNAMIC_STATE_SCISSOR: + return "VK_DYNAMIC_STATE_SCISSOR"; + case VK_DYNAMIC_STATE_LINE_WIDTH: + return "VK_DYNAMIC_STATE_LINE_WIDTH"; + case VK_DYNAMIC_STATE_DEPTH_BIAS: + return "VK_DYNAMIC_STATE_DEPTH_BIAS"; + case VK_DYNAMIC_STATE_BLEND_CONSTANTS: + return "VK_DYNAMIC_STATE_BLEND_CONSTANTS"; + case VK_DYNAMIC_STATE_DEPTH_BOUNDS: + return "VK_DYNAMIC_STATE_DEPTH_BOUNDS"; + case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK: + return "VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK"; + case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK: + return "VK_DYNAMIC_STATE_STENCIL_WRITE_MASK"; + case VK_DYNAMIC_STATE_STENCIL_REFERENCE: + return "VK_DYNAMIC_STATE_STENCIL_REFERENCE"; + case VK_DYNAMIC_STATE_CULL_MODE: + return "VK_DYNAMIC_STATE_CULL_MODE"; + case VK_DYNAMIC_STATE_FRONT_FACE: + return "VK_DYNAMIC_STATE_FRONT_FACE"; + case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY: + return "VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY"; + case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT: + return "VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT"; + case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT: + return "VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT"; + case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE: + return "VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE"; + case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE: + return "VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE"; + case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE: + return "VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE"; + case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP: + return "VK_DYNAMIC_STATE_DEPTH_COMPARE_OP"; + case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE: + return "VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE"; + case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE: + return "VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE"; + case VK_DYNAMIC_STATE_STENCIL_OP: + return "VK_DYNAMIC_STATE_STENCIL_OP"; + case VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE: + return "VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE"; + case VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE: + return "VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE"; + case VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE: + return "VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE"; + case VK_DYNAMIC_STATE_LINE_STIPPLE: + return "VK_DYNAMIC_STATE_LINE_STIPPLE"; + case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV: + return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV"; + case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT: + return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT"; + case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT"; + case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT: + return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT"; + case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT: + return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT"; + case VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR: + return "VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR"; + case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV: + return "VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV"; + case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV: + return "VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV"; + case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV: + return "VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV"; + case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV: + return "VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV"; + case VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR: + return "VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR"; + case VK_DYNAMIC_STATE_VERTEX_INPUT_EXT: + return "VK_DYNAMIC_STATE_VERTEX_INPUT_EXT"; + case VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT: + return "VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT"; + case VK_DYNAMIC_STATE_LOGIC_OP_EXT: + return "VK_DYNAMIC_STATE_LOGIC_OP_EXT"; + case VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT"; + case VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT"; + case VK_DYNAMIC_STATE_POLYGON_MODE_EXT: + return "VK_DYNAMIC_STATE_POLYGON_MODE_EXT"; + case VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT: + return "VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT"; + case VK_DYNAMIC_STATE_SAMPLE_MASK_EXT: + return "VK_DYNAMIC_STATE_SAMPLE_MASK_EXT"; + case VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT"; + case VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT"; + case VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT: + return "VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT"; + case VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT: + return "VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT"; + case VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT: + return "VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT"; + case VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT: + return "VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT"; + case VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT: + return "VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT"; + case VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT: + return "VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT"; + case VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT: + return "VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT"; + case VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT: + return "VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT"; + case VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT"; + case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT: + return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT"; + case VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT: + return "VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT"; + case VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT: + return "VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT"; + case VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT: + return "VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT"; + case VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT"; + case VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT"; + case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV: + return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV"; + case VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV: + return "VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV"; + case VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV: + return "VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV"; + case VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV: + return "VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV"; + case VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV: + return "VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV"; + case VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV: + return "VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV"; + case VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV: + return "VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV"; + case VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV: + return "VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV"; + case VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV: + return "VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV"; + case VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV: + return "VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV"; + case VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT: + return "VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT"; + case VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT"; + default: + return "Unhandled VkDynamicState"; + } +} +static inline const char* string_VkFrontFace(VkFrontFace input_value) { + switch (input_value) { + case VK_FRONT_FACE_COUNTER_CLOCKWISE: + return "VK_FRONT_FACE_COUNTER_CLOCKWISE"; + case VK_FRONT_FACE_CLOCKWISE: + return "VK_FRONT_FACE_CLOCKWISE"; + default: + return "Unhandled VkFrontFace"; + } +} +static inline const char* string_VkVertexInputRate(VkVertexInputRate input_value) { + switch (input_value) { + case VK_VERTEX_INPUT_RATE_VERTEX: + return "VK_VERTEX_INPUT_RATE_VERTEX"; + case VK_VERTEX_INPUT_RATE_INSTANCE: + return "VK_VERTEX_INPUT_RATE_INSTANCE"; + default: + return "Unhandled VkVertexInputRate"; + } +} +static inline const char* string_VkPrimitiveTopology(VkPrimitiveTopology input_value) { + switch (input_value) { + case VK_PRIMITIVE_TOPOLOGY_POINT_LIST: + return "VK_PRIMITIVE_TOPOLOGY_POINT_LIST"; + case VK_PRIMITIVE_TOPOLOGY_LINE_LIST: + return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST"; + case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP: + return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN"; + case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST: + return "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST"; + default: + return "Unhandled VkPrimitiveTopology"; + } +} +static inline const char* string_VkPolygonMode(VkPolygonMode input_value) { + switch (input_value) { + case VK_POLYGON_MODE_FILL: + return "VK_POLYGON_MODE_FILL"; + case VK_POLYGON_MODE_LINE: + return "VK_POLYGON_MODE_LINE"; + case VK_POLYGON_MODE_POINT: + return "VK_POLYGON_MODE_POINT"; + case VK_POLYGON_MODE_FILL_RECTANGLE_NV: + return "VK_POLYGON_MODE_FILL_RECTANGLE_NV"; + default: + return "Unhandled VkPolygonMode"; + } +} +static inline const char* string_VkStencilOp(VkStencilOp input_value) { + switch (input_value) { + case VK_STENCIL_OP_KEEP: + return "VK_STENCIL_OP_KEEP"; + case VK_STENCIL_OP_ZERO: + return "VK_STENCIL_OP_ZERO"; + case VK_STENCIL_OP_REPLACE: + return "VK_STENCIL_OP_REPLACE"; + case VK_STENCIL_OP_INCREMENT_AND_CLAMP: + return "VK_STENCIL_OP_INCREMENT_AND_CLAMP"; + case VK_STENCIL_OP_DECREMENT_AND_CLAMP: + return "VK_STENCIL_OP_DECREMENT_AND_CLAMP"; + case VK_STENCIL_OP_INVERT: + return "VK_STENCIL_OP_INVERT"; + case VK_STENCIL_OP_INCREMENT_AND_WRAP: + return "VK_STENCIL_OP_INCREMENT_AND_WRAP"; + case VK_STENCIL_OP_DECREMENT_AND_WRAP: + return "VK_STENCIL_OP_DECREMENT_AND_WRAP"; + default: + return "Unhandled VkStencilOp"; + } +} +static inline const char* string_VkLogicOp(VkLogicOp input_value) { + switch (input_value) { + case VK_LOGIC_OP_CLEAR: + return "VK_LOGIC_OP_CLEAR"; + case VK_LOGIC_OP_AND: + return "VK_LOGIC_OP_AND"; + case VK_LOGIC_OP_AND_REVERSE: + return "VK_LOGIC_OP_AND_REVERSE"; + case VK_LOGIC_OP_COPY: + return "VK_LOGIC_OP_COPY"; + case VK_LOGIC_OP_AND_INVERTED: + return "VK_LOGIC_OP_AND_INVERTED"; + case VK_LOGIC_OP_NO_OP: + return "VK_LOGIC_OP_NO_OP"; + case VK_LOGIC_OP_XOR: + return "VK_LOGIC_OP_XOR"; + case VK_LOGIC_OP_OR: + return "VK_LOGIC_OP_OR"; + case VK_LOGIC_OP_NOR: + return "VK_LOGIC_OP_NOR"; + case VK_LOGIC_OP_EQUIVALENT: + return "VK_LOGIC_OP_EQUIVALENT"; + case VK_LOGIC_OP_INVERT: + return "VK_LOGIC_OP_INVERT"; + case VK_LOGIC_OP_OR_REVERSE: + return "VK_LOGIC_OP_OR_REVERSE"; + case VK_LOGIC_OP_COPY_INVERTED: + return "VK_LOGIC_OP_COPY_INVERTED"; + case VK_LOGIC_OP_OR_INVERTED: + return "VK_LOGIC_OP_OR_INVERTED"; + case VK_LOGIC_OP_NAND: + return "VK_LOGIC_OP_NAND"; + case VK_LOGIC_OP_SET: + return "VK_LOGIC_OP_SET"; + default: + return "Unhandled VkLogicOp"; + } +} +static inline const char* string_VkBorderColor(VkBorderColor input_value) { + switch (input_value) { + case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK: + return "VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK"; + case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK: + return "VK_BORDER_COLOR_INT_TRANSPARENT_BLACK"; + case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK: + return "VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK"; + case VK_BORDER_COLOR_INT_OPAQUE_BLACK: + return "VK_BORDER_COLOR_INT_OPAQUE_BLACK"; + case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE: + return "VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE"; + case VK_BORDER_COLOR_INT_OPAQUE_WHITE: + return "VK_BORDER_COLOR_INT_OPAQUE_WHITE"; + case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT: + return "VK_BORDER_COLOR_FLOAT_CUSTOM_EXT"; + case VK_BORDER_COLOR_INT_CUSTOM_EXT: + return "VK_BORDER_COLOR_INT_CUSTOM_EXT"; + default: + return "Unhandled VkBorderColor"; + } +} +static inline const char* string_VkFilter(VkFilter input_value) { + switch (input_value) { + case VK_FILTER_NEAREST: + return "VK_FILTER_NEAREST"; + case VK_FILTER_LINEAR: + return "VK_FILTER_LINEAR"; + case VK_FILTER_CUBIC_EXT: + return "VK_FILTER_CUBIC_EXT"; + default: + return "Unhandled VkFilter"; + } +} +static inline const char* string_VkSamplerAddressMode(VkSamplerAddressMode input_value) { + switch (input_value) { + case VK_SAMPLER_ADDRESS_MODE_REPEAT: + return "VK_SAMPLER_ADDRESS_MODE_REPEAT"; + case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT: + return "VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT"; + case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE: + return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE"; + case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER: + return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER"; + case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE: + return "VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE"; + default: + return "Unhandled VkSamplerAddressMode"; + } +} +static inline const char* string_VkSamplerMipmapMode(VkSamplerMipmapMode input_value) { + switch (input_value) { + case VK_SAMPLER_MIPMAP_MODE_NEAREST: + return "VK_SAMPLER_MIPMAP_MODE_NEAREST"; + case VK_SAMPLER_MIPMAP_MODE_LINEAR: + return "VK_SAMPLER_MIPMAP_MODE_LINEAR"; + default: + return "Unhandled VkSamplerMipmapMode"; + } +} +static inline const char* string_VkDescriptorType(VkDescriptorType input_value) { + switch (input_value) { + case VK_DESCRIPTOR_TYPE_SAMPLER: + return "VK_DESCRIPTOR_TYPE_SAMPLER"; + case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: + return "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER"; + case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: + return "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE"; + case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: + return "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE"; + case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: + return "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER"; + case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: + return "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER"; + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: + return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER"; + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: + return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER"; + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: + return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC"; + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: + return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC"; + case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: + return "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT"; + case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: + return "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK"; + case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: + return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR"; + case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: + return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV"; + case VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM: + return "VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM"; + case VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM: + return "VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM"; + case VK_DESCRIPTOR_TYPE_MUTABLE_EXT: + return "VK_DESCRIPTOR_TYPE_MUTABLE_EXT"; + case VK_DESCRIPTOR_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_NV: + return "VK_DESCRIPTOR_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_NV"; + default: + return "Unhandled VkDescriptorType"; + } +} +static inline const char* string_VkAttachmentLoadOp(VkAttachmentLoadOp input_value) { + switch (input_value) { + case VK_ATTACHMENT_LOAD_OP_LOAD: + return "VK_ATTACHMENT_LOAD_OP_LOAD"; + case VK_ATTACHMENT_LOAD_OP_CLEAR: + return "VK_ATTACHMENT_LOAD_OP_CLEAR"; + case VK_ATTACHMENT_LOAD_OP_DONT_CARE: + return "VK_ATTACHMENT_LOAD_OP_DONT_CARE"; + case VK_ATTACHMENT_LOAD_OP_NONE: + return "VK_ATTACHMENT_LOAD_OP_NONE"; + default: + return "Unhandled VkAttachmentLoadOp"; + } +} +static inline const char* string_VkAttachmentStoreOp(VkAttachmentStoreOp input_value) { + switch (input_value) { + case VK_ATTACHMENT_STORE_OP_STORE: + return "VK_ATTACHMENT_STORE_OP_STORE"; + case VK_ATTACHMENT_STORE_OP_DONT_CARE: + return "VK_ATTACHMENT_STORE_OP_DONT_CARE"; + case VK_ATTACHMENT_STORE_OP_NONE: + return "VK_ATTACHMENT_STORE_OP_NONE"; + default: + return "Unhandled VkAttachmentStoreOp"; + } +} +static inline const char* string_VkPipelineBindPoint(VkPipelineBindPoint input_value) { + switch (input_value) { + case VK_PIPELINE_BIND_POINT_GRAPHICS: + return "VK_PIPELINE_BIND_POINT_GRAPHICS"; + case VK_PIPELINE_BIND_POINT_COMPUTE: + return "VK_PIPELINE_BIND_POINT_COMPUTE"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX: + return "VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR: + return "VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR"; + case VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI: + return "VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI"; + default: + return "Unhandled VkPipelineBindPoint"; + } +} +static inline const char* string_VkCommandBufferLevel(VkCommandBufferLevel input_value) { + switch (input_value) { + case VK_COMMAND_BUFFER_LEVEL_PRIMARY: + return "VK_COMMAND_BUFFER_LEVEL_PRIMARY"; + case VK_COMMAND_BUFFER_LEVEL_SECONDARY: + return "VK_COMMAND_BUFFER_LEVEL_SECONDARY"; + default: + return "Unhandled VkCommandBufferLevel"; + } +} +static inline const char* string_VkIndexType(VkIndexType input_value) { + switch (input_value) { + case VK_INDEX_TYPE_UINT16: + return "VK_INDEX_TYPE_UINT16"; + case VK_INDEX_TYPE_UINT32: + return "VK_INDEX_TYPE_UINT32"; + case VK_INDEX_TYPE_UINT8: + return "VK_INDEX_TYPE_UINT8"; + case VK_INDEX_TYPE_NONE_KHR: + return "VK_INDEX_TYPE_NONE_KHR"; + default: + return "Unhandled VkIndexType"; + } +} +static inline const char* string_VkSubpassContents(VkSubpassContents input_value) { + switch (input_value) { + case VK_SUBPASS_CONTENTS_INLINE: + return "VK_SUBPASS_CONTENTS_INLINE"; + case VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS: + return "VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS"; + case VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR: + return "VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR"; + default: + return "Unhandled VkSubpassContents"; + } +} +static inline const char* string_VkPointClippingBehavior(VkPointClippingBehavior input_value) { + switch (input_value) { + case VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES: + return "VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES"; + case VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY: + return "VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY"; + default: + return "Unhandled VkPointClippingBehavior"; + } +} +static inline const char* string_VkTessellationDomainOrigin(VkTessellationDomainOrigin input_value) { + switch (input_value) { + case VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT: + return "VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT"; + case VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT: + return "VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT"; + default: + return "Unhandled VkTessellationDomainOrigin"; + } +} +static inline const char* string_VkSamplerYcbcrModelConversion(VkSamplerYcbcrModelConversion input_value) { + switch (input_value) { + case VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY: + return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY"; + case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY: + return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY"; + case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709: + return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709"; + case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601: + return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601"; + case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020: + return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020"; + default: + return "Unhandled VkSamplerYcbcrModelConversion"; + } +} +static inline const char* string_VkSamplerYcbcrRange(VkSamplerYcbcrRange input_value) { + switch (input_value) { + case VK_SAMPLER_YCBCR_RANGE_ITU_FULL: + return "VK_SAMPLER_YCBCR_RANGE_ITU_FULL"; + case VK_SAMPLER_YCBCR_RANGE_ITU_NARROW: + return "VK_SAMPLER_YCBCR_RANGE_ITU_NARROW"; + default: + return "Unhandled VkSamplerYcbcrRange"; + } +} +static inline const char* string_VkChromaLocation(VkChromaLocation input_value) { + switch (input_value) { + case VK_CHROMA_LOCATION_COSITED_EVEN: + return "VK_CHROMA_LOCATION_COSITED_EVEN"; + case VK_CHROMA_LOCATION_MIDPOINT: + return "VK_CHROMA_LOCATION_MIDPOINT"; + default: + return "Unhandled VkChromaLocation"; + } +} +static inline const char* string_VkDescriptorUpdateTemplateType(VkDescriptorUpdateTemplateType input_value) { + switch (input_value) { + case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET: + return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET"; + case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS: + return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS"; + default: + return "Unhandled VkDescriptorUpdateTemplateType"; + } +} +static inline const char* string_VkDriverId(VkDriverId input_value) { + switch (input_value) { + case VK_DRIVER_ID_AMD_PROPRIETARY: + return "VK_DRIVER_ID_AMD_PROPRIETARY"; + case VK_DRIVER_ID_AMD_OPEN_SOURCE: + return "VK_DRIVER_ID_AMD_OPEN_SOURCE"; + case VK_DRIVER_ID_MESA_RADV: + return "VK_DRIVER_ID_MESA_RADV"; + case VK_DRIVER_ID_NVIDIA_PROPRIETARY: + return "VK_DRIVER_ID_NVIDIA_PROPRIETARY"; + case VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS: + return "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS"; + case VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA: + return "VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA"; + case VK_DRIVER_ID_IMAGINATION_PROPRIETARY: + return "VK_DRIVER_ID_IMAGINATION_PROPRIETARY"; + case VK_DRIVER_ID_QUALCOMM_PROPRIETARY: + return "VK_DRIVER_ID_QUALCOMM_PROPRIETARY"; + case VK_DRIVER_ID_ARM_PROPRIETARY: + return "VK_DRIVER_ID_ARM_PROPRIETARY"; + case VK_DRIVER_ID_GOOGLE_SWIFTSHADER: + return "VK_DRIVER_ID_GOOGLE_SWIFTSHADER"; + case VK_DRIVER_ID_GGP_PROPRIETARY: + return "VK_DRIVER_ID_GGP_PROPRIETARY"; + case VK_DRIVER_ID_BROADCOM_PROPRIETARY: + return "VK_DRIVER_ID_BROADCOM_PROPRIETARY"; + case VK_DRIVER_ID_MESA_LLVMPIPE: + return "VK_DRIVER_ID_MESA_LLVMPIPE"; + case VK_DRIVER_ID_MOLTENVK: + return "VK_DRIVER_ID_MOLTENVK"; + case VK_DRIVER_ID_COREAVI_PROPRIETARY: + return "VK_DRIVER_ID_COREAVI_PROPRIETARY"; + case VK_DRIVER_ID_JUICE_PROPRIETARY: + return "VK_DRIVER_ID_JUICE_PROPRIETARY"; + case VK_DRIVER_ID_VERISILICON_PROPRIETARY: + return "VK_DRIVER_ID_VERISILICON_PROPRIETARY"; + case VK_DRIVER_ID_MESA_TURNIP: + return "VK_DRIVER_ID_MESA_TURNIP"; + case VK_DRIVER_ID_MESA_V3DV: + return "VK_DRIVER_ID_MESA_V3DV"; + case VK_DRIVER_ID_MESA_PANVK: + return "VK_DRIVER_ID_MESA_PANVK"; + case VK_DRIVER_ID_SAMSUNG_PROPRIETARY: + return "VK_DRIVER_ID_SAMSUNG_PROPRIETARY"; + case VK_DRIVER_ID_MESA_VENUS: + return "VK_DRIVER_ID_MESA_VENUS"; + case VK_DRIVER_ID_MESA_DOZEN: + return "VK_DRIVER_ID_MESA_DOZEN"; + case VK_DRIVER_ID_MESA_NVK: + return "VK_DRIVER_ID_MESA_NVK"; + case VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA: + return "VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA"; + case VK_DRIVER_ID_MESA_HONEYKRISP: + return "VK_DRIVER_ID_MESA_HONEYKRISP"; + case VK_DRIVER_ID_VULKAN_SC_EMULATION_ON_VULKAN: + return "VK_DRIVER_ID_VULKAN_SC_EMULATION_ON_VULKAN"; + default: + return "Unhandled VkDriverId"; + } +} +static inline const char* string_VkShaderFloatControlsIndependence(VkShaderFloatControlsIndependence input_value) { + switch (input_value) { + case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY: + return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY"; + case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL: + return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL"; + case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE: + return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE"; + default: + return "Unhandled VkShaderFloatControlsIndependence"; + } +} +static inline const char* string_VkSamplerReductionMode(VkSamplerReductionMode input_value) { + switch (input_value) { + case VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE: + return "VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE"; + case VK_SAMPLER_REDUCTION_MODE_MIN: + return "VK_SAMPLER_REDUCTION_MODE_MIN"; + case VK_SAMPLER_REDUCTION_MODE_MAX: + return "VK_SAMPLER_REDUCTION_MODE_MAX"; + case VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM: + return "VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM"; + default: + return "Unhandled VkSamplerReductionMode"; + } +} +static inline const char* string_VkSemaphoreType(VkSemaphoreType input_value) { + switch (input_value) { + case VK_SEMAPHORE_TYPE_BINARY: + return "VK_SEMAPHORE_TYPE_BINARY"; + case VK_SEMAPHORE_TYPE_TIMELINE: + return "VK_SEMAPHORE_TYPE_TIMELINE"; + default: + return "Unhandled VkSemaphoreType"; + } +} +static inline const char* string_VkPipelineRobustnessBufferBehavior(VkPipelineRobustnessBufferBehavior input_value) { + switch (input_value) { + case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT: + return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT"; + case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED: + return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED"; + case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS: + return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS"; + case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2: + return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2"; + default: + return "Unhandled VkPipelineRobustnessBufferBehavior"; + } +} +static inline const char* string_VkPipelineRobustnessImageBehavior(VkPipelineRobustnessImageBehavior input_value) { + switch (input_value) { + case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT: + return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT"; + case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED: + return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED"; + case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS: + return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS"; + case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2: + return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2"; + default: + return "Unhandled VkPipelineRobustnessImageBehavior"; + } +} +static inline const char* string_VkQueueGlobalPriority(VkQueueGlobalPriority input_value) { + switch (input_value) { + case VK_QUEUE_GLOBAL_PRIORITY_LOW: + return "VK_QUEUE_GLOBAL_PRIORITY_LOW"; + case VK_QUEUE_GLOBAL_PRIORITY_MEDIUM: + return "VK_QUEUE_GLOBAL_PRIORITY_MEDIUM"; + case VK_QUEUE_GLOBAL_PRIORITY_HIGH: + return "VK_QUEUE_GLOBAL_PRIORITY_HIGH"; + case VK_QUEUE_GLOBAL_PRIORITY_REALTIME: + return "VK_QUEUE_GLOBAL_PRIORITY_REALTIME"; + default: + return "Unhandled VkQueueGlobalPriority"; + } +} +static inline const char* string_VkLineRasterizationMode(VkLineRasterizationMode input_value) { + switch (input_value) { + case VK_LINE_RASTERIZATION_MODE_DEFAULT: + return "VK_LINE_RASTERIZATION_MODE_DEFAULT"; + case VK_LINE_RASTERIZATION_MODE_RECTANGULAR: + return "VK_LINE_RASTERIZATION_MODE_RECTANGULAR"; + case VK_LINE_RASTERIZATION_MODE_BRESENHAM: + return "VK_LINE_RASTERIZATION_MODE_BRESENHAM"; + case VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH: + return "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH"; + default: + return "Unhandled VkLineRasterizationMode"; + } +} +static inline const char* string_VkPresentModeKHR(VkPresentModeKHR input_value) { + switch (input_value) { + case VK_PRESENT_MODE_IMMEDIATE_KHR: + return "VK_PRESENT_MODE_IMMEDIATE_KHR"; + case VK_PRESENT_MODE_MAILBOX_KHR: + return "VK_PRESENT_MODE_MAILBOX_KHR"; + case VK_PRESENT_MODE_FIFO_KHR: + return "VK_PRESENT_MODE_FIFO_KHR"; + case VK_PRESENT_MODE_FIFO_RELAXED_KHR: + return "VK_PRESENT_MODE_FIFO_RELAXED_KHR"; + case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR: + return "VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR"; + case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR: + return "VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR"; + case VK_PRESENT_MODE_FIFO_LATEST_READY_EXT: + return "VK_PRESENT_MODE_FIFO_LATEST_READY_EXT"; + default: + return "Unhandled VkPresentModeKHR"; + } +} +static inline const char* string_VkColorSpaceKHR(VkColorSpaceKHR input_value) { + switch (input_value) { + case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR: + return "VK_COLOR_SPACE_SRGB_NONLINEAR_KHR"; + case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT: + return "VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT"; + case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT: + return "VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT"; + case VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT: + return "VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT"; + case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT: + return "VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT"; + case VK_COLOR_SPACE_BT709_LINEAR_EXT: + return "VK_COLOR_SPACE_BT709_LINEAR_EXT"; + case VK_COLOR_SPACE_BT709_NONLINEAR_EXT: + return "VK_COLOR_SPACE_BT709_NONLINEAR_EXT"; + case VK_COLOR_SPACE_BT2020_LINEAR_EXT: + return "VK_COLOR_SPACE_BT2020_LINEAR_EXT"; + case VK_COLOR_SPACE_HDR10_ST2084_EXT: + return "VK_COLOR_SPACE_HDR10_ST2084_EXT"; + case VK_COLOR_SPACE_DOLBYVISION_EXT: + return "VK_COLOR_SPACE_DOLBYVISION_EXT"; + case VK_COLOR_SPACE_HDR10_HLG_EXT: + return "VK_COLOR_SPACE_HDR10_HLG_EXT"; + case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT: + return "VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT"; + case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT: + return "VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT"; + case VK_COLOR_SPACE_PASS_THROUGH_EXT: + return "VK_COLOR_SPACE_PASS_THROUGH_EXT"; + case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT: + return "VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT"; + case VK_COLOR_SPACE_DISPLAY_NATIVE_AMD: + return "VK_COLOR_SPACE_DISPLAY_NATIVE_AMD"; + default: + return "Unhandled VkColorSpaceKHR"; + } +} +static inline const char* string_VkQueryResultStatusKHR(VkQueryResultStatusKHR input_value) { + switch (input_value) { + case VK_QUERY_RESULT_STATUS_ERROR_KHR: + return "VK_QUERY_RESULT_STATUS_ERROR_KHR"; + case VK_QUERY_RESULT_STATUS_NOT_READY_KHR: + return "VK_QUERY_RESULT_STATUS_NOT_READY_KHR"; + case VK_QUERY_RESULT_STATUS_COMPLETE_KHR: + return "VK_QUERY_RESULT_STATUS_COMPLETE_KHR"; + case VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR: + return "VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR"; + default: + return "Unhandled VkQueryResultStatusKHR"; + } +} +static inline const char* string_VkPerformanceCounterUnitKHR(VkPerformanceCounterUnitKHR input_value) { + switch (input_value) { + case VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR"; + default: + return "Unhandled VkPerformanceCounterUnitKHR"; + } +} +static inline const char* string_VkPerformanceCounterScopeKHR(VkPerformanceCounterScopeKHR input_value) { + switch (input_value) { + case VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR: + return "VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR"; + case VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR: + return "VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR"; + case VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR: + return "VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR"; + default: + return "Unhandled VkPerformanceCounterScopeKHR"; + } +} +static inline const char* string_VkPerformanceCounterStorageKHR(VkPerformanceCounterStorageKHR input_value) { + switch (input_value) { + case VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR"; + case VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR"; + case VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR"; + case VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR"; + case VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR"; + case VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR"; + default: + return "Unhandled VkPerformanceCounterStorageKHR"; + } +} +static inline const char* string_VkFragmentShadingRateCombinerOpKHR(VkFragmentShadingRateCombinerOpKHR input_value) { + switch (input_value) { + case VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR: + return "VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR"; + case VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR: + return "VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR"; + case VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR: + return "VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR"; + case VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR: + return "VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR"; + case VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR: + return "VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR"; + default: + return "Unhandled VkFragmentShadingRateCombinerOpKHR"; + } +} +static inline const char* string_VkPipelineExecutableStatisticFormatKHR(VkPipelineExecutableStatisticFormatKHR input_value) { + switch (input_value) { + case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR: + return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR"; + case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR: + return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR"; + case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR: + return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR"; + case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR: + return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR"; + default: + return "Unhandled VkPipelineExecutableStatisticFormatKHR"; + } +} +static inline const char* string_VkVideoEncodeTuningModeKHR(VkVideoEncodeTuningModeKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR: + return "VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR"; + case VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR: + return "VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR"; + case VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR: + return "VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR"; + case VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR: + return "VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR"; + case VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR: + return "VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR"; + default: + return "Unhandled VkVideoEncodeTuningModeKHR"; + } +} +static inline const char* string_VkComponentTypeKHR(VkComponentTypeKHR input_value) { + switch (input_value) { + case VK_COMPONENT_TYPE_FLOAT16_KHR: + return "VK_COMPONENT_TYPE_FLOAT16_KHR"; + case VK_COMPONENT_TYPE_FLOAT32_KHR: + return "VK_COMPONENT_TYPE_FLOAT32_KHR"; + case VK_COMPONENT_TYPE_FLOAT64_KHR: + return "VK_COMPONENT_TYPE_FLOAT64_KHR"; + case VK_COMPONENT_TYPE_SINT8_KHR: + return "VK_COMPONENT_TYPE_SINT8_KHR"; + case VK_COMPONENT_TYPE_SINT16_KHR: + return "VK_COMPONENT_TYPE_SINT16_KHR"; + case VK_COMPONENT_TYPE_SINT32_KHR: + return "VK_COMPONENT_TYPE_SINT32_KHR"; + case VK_COMPONENT_TYPE_SINT64_KHR: + return "VK_COMPONENT_TYPE_SINT64_KHR"; + case VK_COMPONENT_TYPE_UINT8_KHR: + return "VK_COMPONENT_TYPE_UINT8_KHR"; + case VK_COMPONENT_TYPE_UINT16_KHR: + return "VK_COMPONENT_TYPE_UINT16_KHR"; + case VK_COMPONENT_TYPE_UINT32_KHR: + return "VK_COMPONENT_TYPE_UINT32_KHR"; + case VK_COMPONENT_TYPE_UINT64_KHR: + return "VK_COMPONENT_TYPE_UINT64_KHR"; + case VK_COMPONENT_TYPE_BFLOAT16_KHR: + return "VK_COMPONENT_TYPE_BFLOAT16_KHR"; + case VK_COMPONENT_TYPE_SINT8_PACKED_NV: + return "VK_COMPONENT_TYPE_SINT8_PACKED_NV"; + case VK_COMPONENT_TYPE_UINT8_PACKED_NV: + return "VK_COMPONENT_TYPE_UINT8_PACKED_NV"; + case VK_COMPONENT_TYPE_FLOAT_E4M3_NV: + return "VK_COMPONENT_TYPE_FLOAT_E4M3_NV"; + case VK_COMPONENT_TYPE_FLOAT_E5M2_NV: + return "VK_COMPONENT_TYPE_FLOAT_E5M2_NV"; + default: + return "Unhandled VkComponentTypeKHR"; + } +} +static inline const char* string_VkScopeKHR(VkScopeKHR input_value) { + switch (input_value) { + case VK_SCOPE_DEVICE_KHR: + return "VK_SCOPE_DEVICE_KHR"; + case VK_SCOPE_WORKGROUP_KHR: + return "VK_SCOPE_WORKGROUP_KHR"; + case VK_SCOPE_SUBGROUP_KHR: + return "VK_SCOPE_SUBGROUP_KHR"; + case VK_SCOPE_QUEUE_FAMILY_KHR: + return "VK_SCOPE_QUEUE_FAMILY_KHR"; + default: + return "Unhandled VkScopeKHR"; + } +} +static inline const char* string_VkVideoEncodeAV1PredictionModeKHR(VkVideoEncodeAV1PredictionModeKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_INTRA_ONLY_KHR: + return "VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_INTRA_ONLY_KHR"; + case VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_SINGLE_REFERENCE_KHR: + return "VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_SINGLE_REFERENCE_KHR"; + case VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_UNIDIRECTIONAL_COMPOUND_KHR: + return "VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_UNIDIRECTIONAL_COMPOUND_KHR"; + case VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_BIDIRECTIONAL_COMPOUND_KHR: + return "VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_BIDIRECTIONAL_COMPOUND_KHR"; + default: + return "Unhandled VkVideoEncodeAV1PredictionModeKHR"; + } +} +static inline const char* string_VkVideoEncodeAV1RateControlGroupKHR(VkVideoEncodeAV1RateControlGroupKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_INTRA_KHR: + return "VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_INTRA_KHR"; + case VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_PREDICTIVE_KHR: + return "VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_PREDICTIVE_KHR"; + case VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_BIPREDICTIVE_KHR: + return "VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_BIPREDICTIVE_KHR"; + default: + return "Unhandled VkVideoEncodeAV1RateControlGroupKHR"; + } +} +static inline const char* string_VkTimeDomainKHR(VkTimeDomainKHR input_value) { + switch (input_value) { + case VK_TIME_DOMAIN_DEVICE_KHR: + return "VK_TIME_DOMAIN_DEVICE_KHR"; + case VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR: + return "VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR"; + case VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR: + return "VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR"; + case VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR: + return "VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR"; + default: + return "Unhandled VkTimeDomainKHR"; + } +} +static inline const char* string_VkPhysicalDeviceLayeredApiKHR(VkPhysicalDeviceLayeredApiKHR input_value) { + switch (input_value) { + case VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR: + return "VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR"; + case VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR: + return "VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR"; + case VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR: + return "VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR"; + case VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR: + return "VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR"; + case VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR: + return "VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR"; + default: + return "Unhandled VkPhysicalDeviceLayeredApiKHR"; + } +} +static inline const char* string_VkDebugReportObjectTypeEXT(VkDebugReportObjectTypeEXT input_value) { + switch (input_value) { + case VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT"; + default: + return "Unhandled VkDebugReportObjectTypeEXT"; + } +} +static inline const char* string_VkRasterizationOrderAMD(VkRasterizationOrderAMD input_value) { + switch (input_value) { + case VK_RASTERIZATION_ORDER_STRICT_AMD: + return "VK_RASTERIZATION_ORDER_STRICT_AMD"; + case VK_RASTERIZATION_ORDER_RELAXED_AMD: + return "VK_RASTERIZATION_ORDER_RELAXED_AMD"; + default: + return "Unhandled VkRasterizationOrderAMD"; + } +} +static inline const char* string_VkShaderInfoTypeAMD(VkShaderInfoTypeAMD input_value) { + switch (input_value) { + case VK_SHADER_INFO_TYPE_STATISTICS_AMD: + return "VK_SHADER_INFO_TYPE_STATISTICS_AMD"; + case VK_SHADER_INFO_TYPE_BINARY_AMD: + return "VK_SHADER_INFO_TYPE_BINARY_AMD"; + case VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD: + return "VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD"; + default: + return "Unhandled VkShaderInfoTypeAMD"; + } +} +static inline const char* string_VkValidationCheckEXT(VkValidationCheckEXT input_value) { + switch (input_value) { + case VK_VALIDATION_CHECK_ALL_EXT: + return "VK_VALIDATION_CHECK_ALL_EXT"; + case VK_VALIDATION_CHECK_SHADERS_EXT: + return "VK_VALIDATION_CHECK_SHADERS_EXT"; + default: + return "Unhandled VkValidationCheckEXT"; + } +} +static inline const char* string_VkDisplayPowerStateEXT(VkDisplayPowerStateEXT input_value) { + switch (input_value) { + case VK_DISPLAY_POWER_STATE_OFF_EXT: + return "VK_DISPLAY_POWER_STATE_OFF_EXT"; + case VK_DISPLAY_POWER_STATE_SUSPEND_EXT: + return "VK_DISPLAY_POWER_STATE_SUSPEND_EXT"; + case VK_DISPLAY_POWER_STATE_ON_EXT: + return "VK_DISPLAY_POWER_STATE_ON_EXT"; + default: + return "Unhandled VkDisplayPowerStateEXT"; + } +} +static inline const char* string_VkDeviceEventTypeEXT(VkDeviceEventTypeEXT input_value) { + switch (input_value) { + case VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT: + return "VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT"; + default: + return "Unhandled VkDeviceEventTypeEXT"; + } +} +static inline const char* string_VkDisplayEventTypeEXT(VkDisplayEventTypeEXT input_value) { + switch (input_value) { + case VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT: + return "VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT"; + default: + return "Unhandled VkDisplayEventTypeEXT"; + } +} +static inline const char* string_VkViewportCoordinateSwizzleNV(VkViewportCoordinateSwizzleNV input_value) { + switch (input_value) { + case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV"; + default: + return "Unhandled VkViewportCoordinateSwizzleNV"; + } +} +static inline const char* string_VkDiscardRectangleModeEXT(VkDiscardRectangleModeEXT input_value) { + switch (input_value) { + case VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT: + return "VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT"; + case VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT: + return "VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT"; + default: + return "Unhandled VkDiscardRectangleModeEXT"; + } +} +static inline const char* string_VkConservativeRasterizationModeEXT(VkConservativeRasterizationModeEXT input_value) { + switch (input_value) { + case VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT: + return "VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT"; + case VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT: + return "VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT"; + case VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT: + return "VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT"; + default: + return "Unhandled VkConservativeRasterizationModeEXT"; + } +} +static inline const char* string_VkBlendOverlapEXT(VkBlendOverlapEXT input_value) { + switch (input_value) { + case VK_BLEND_OVERLAP_UNCORRELATED_EXT: + return "VK_BLEND_OVERLAP_UNCORRELATED_EXT"; + case VK_BLEND_OVERLAP_DISJOINT_EXT: + return "VK_BLEND_OVERLAP_DISJOINT_EXT"; + case VK_BLEND_OVERLAP_CONJOINT_EXT: + return "VK_BLEND_OVERLAP_CONJOINT_EXT"; + default: + return "Unhandled VkBlendOverlapEXT"; + } +} +static inline const char* string_VkCoverageModulationModeNV(VkCoverageModulationModeNV input_value) { + switch (input_value) { + case VK_COVERAGE_MODULATION_MODE_NONE_NV: + return "VK_COVERAGE_MODULATION_MODE_NONE_NV"; + case VK_COVERAGE_MODULATION_MODE_RGB_NV: + return "VK_COVERAGE_MODULATION_MODE_RGB_NV"; + case VK_COVERAGE_MODULATION_MODE_ALPHA_NV: + return "VK_COVERAGE_MODULATION_MODE_ALPHA_NV"; + case VK_COVERAGE_MODULATION_MODE_RGBA_NV: + return "VK_COVERAGE_MODULATION_MODE_RGBA_NV"; + default: + return "Unhandled VkCoverageModulationModeNV"; + } +} +static inline const char* string_VkValidationCacheHeaderVersionEXT(VkValidationCacheHeaderVersionEXT input_value) { + switch (input_value) { + case VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT: + return "VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT"; + default: + return "Unhandled VkValidationCacheHeaderVersionEXT"; + } +} +static inline const char* string_VkShadingRatePaletteEntryNV(VkShadingRatePaletteEntryNV input_value) { + switch (input_value) { + case VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV"; + default: + return "Unhandled VkShadingRatePaletteEntryNV"; + } +} +static inline const char* string_VkCoarseSampleOrderTypeNV(VkCoarseSampleOrderTypeNV input_value) { + switch (input_value) { + case VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV: + return "VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV"; + case VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV: + return "VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV"; + case VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV: + return "VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV"; + case VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV: + return "VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV"; + default: + return "Unhandled VkCoarseSampleOrderTypeNV"; + } +} +static inline const char* string_VkRayTracingShaderGroupTypeKHR(VkRayTracingShaderGroupTypeKHR input_value) { + switch (input_value) { + case VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR"; + case VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR"; + case VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR"; + default: + return "Unhandled VkRayTracingShaderGroupTypeKHR"; + } +} +static inline const char* string_VkGeometryTypeKHR(VkGeometryTypeKHR input_value) { + switch (input_value) { + case VK_GEOMETRY_TYPE_TRIANGLES_KHR: + return "VK_GEOMETRY_TYPE_TRIANGLES_KHR"; + case VK_GEOMETRY_TYPE_AABBS_KHR: + return "VK_GEOMETRY_TYPE_AABBS_KHR"; + case VK_GEOMETRY_TYPE_INSTANCES_KHR: + return "VK_GEOMETRY_TYPE_INSTANCES_KHR"; + case VK_GEOMETRY_TYPE_SPHERES_NV: + return "VK_GEOMETRY_TYPE_SPHERES_NV"; + case VK_GEOMETRY_TYPE_LINEAR_SWEPT_SPHERES_NV: + return "VK_GEOMETRY_TYPE_LINEAR_SWEPT_SPHERES_NV"; + default: + return "Unhandled VkGeometryTypeKHR"; + } +} +static inline const char* string_VkAccelerationStructureTypeKHR(VkAccelerationStructureTypeKHR input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR: + return "VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR"; + case VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR: + return "VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR"; + case VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR: + return "VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR"; + default: + return "Unhandled VkAccelerationStructureTypeKHR"; + } +} +static inline const char* string_VkCopyAccelerationStructureModeKHR(VkCopyAccelerationStructureModeKHR input_value) { + switch (input_value) { + case VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR"; + default: + return "Unhandled VkCopyAccelerationStructureModeKHR"; + } +} +static inline const char* string_VkAccelerationStructureMemoryRequirementsTypeNV(VkAccelerationStructureMemoryRequirementsTypeNV input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV"; + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV"; + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV"; + default: + return "Unhandled VkAccelerationStructureMemoryRequirementsTypeNV"; + } +} +static inline const char* string_VkMemoryOverallocationBehaviorAMD(VkMemoryOverallocationBehaviorAMD input_value) { + switch (input_value) { + case VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD: + return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD"; + case VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD: + return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD"; + case VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD: + return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD"; + default: + return "Unhandled VkMemoryOverallocationBehaviorAMD"; + } +} +static inline const char* string_VkPerformanceConfigurationTypeINTEL(VkPerformanceConfigurationTypeINTEL input_value) { + switch (input_value) { + case VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL: + return "VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL"; + default: + return "Unhandled VkPerformanceConfigurationTypeINTEL"; + } +} +static inline const char* string_VkQueryPoolSamplingModeINTEL(VkQueryPoolSamplingModeINTEL input_value) { + switch (input_value) { + case VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL: + return "VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL"; + default: + return "Unhandled VkQueryPoolSamplingModeINTEL"; + } +} +static inline const char* string_VkPerformanceOverrideTypeINTEL(VkPerformanceOverrideTypeINTEL input_value) { + switch (input_value) { + case VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL: + return "VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL"; + case VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL: + return "VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL"; + default: + return "Unhandled VkPerformanceOverrideTypeINTEL"; + } +} +static inline const char* string_VkPerformanceParameterTypeINTEL(VkPerformanceParameterTypeINTEL input_value) { + switch (input_value) { + case VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL: + return "VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL"; + case VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL: + return "VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL"; + default: + return "Unhandled VkPerformanceParameterTypeINTEL"; + } +} +static inline const char* string_VkPerformanceValueTypeINTEL(VkPerformanceValueTypeINTEL input_value) { + switch (input_value) { + case VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL: + return "VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL"; + case VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL: + return "VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL"; + case VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL: + return "VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL"; + case VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL: + return "VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL"; + case VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL: + return "VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL"; + default: + return "Unhandled VkPerformanceValueTypeINTEL"; + } +} +static inline const char* string_VkValidationFeatureEnableEXT(VkValidationFeatureEnableEXT input_value) { + switch (input_value) { + case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT"; + case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT"; + case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT"; + case VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT"; + case VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT"; + default: + return "Unhandled VkValidationFeatureEnableEXT"; + } +} +static inline const char* string_VkValidationFeatureDisableEXT(VkValidationFeatureDisableEXT input_value) { + switch (input_value) { + case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_ALL_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT"; + default: + return "Unhandled VkValidationFeatureDisableEXT"; + } +} +static inline const char* string_VkCoverageReductionModeNV(VkCoverageReductionModeNV input_value) { + switch (input_value) { + case VK_COVERAGE_REDUCTION_MODE_MERGE_NV: + return "VK_COVERAGE_REDUCTION_MODE_MERGE_NV"; + case VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV: + return "VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV"; + default: + return "Unhandled VkCoverageReductionModeNV"; + } +} +static inline const char* string_VkProvokingVertexModeEXT(VkProvokingVertexModeEXT input_value) { + switch (input_value) { + case VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT: + return "VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT"; + case VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT: + return "VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT"; + default: + return "Unhandled VkProvokingVertexModeEXT"; + } +} +#ifdef VK_USE_PLATFORM_WIN32_KHR +static inline const char* string_VkFullScreenExclusiveEXT(VkFullScreenExclusiveEXT input_value) { + switch (input_value) { + case VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT: + return "VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT"; + case VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT: + return "VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT"; + case VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT: + return "VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT"; + case VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT: + return "VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT"; + default: + return "Unhandled VkFullScreenExclusiveEXT"; + } +} +#endif // VK_USE_PLATFORM_WIN32_KHR +static inline const char* string_VkIndirectCommandsTokenTypeNV(VkIndirectCommandsTokenTypeNV input_value) { + switch (input_value) { + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV"; + default: + return "Unhandled VkIndirectCommandsTokenTypeNV"; + } +} +static inline const char* string_VkDepthBiasRepresentationEXT(VkDepthBiasRepresentationEXT input_value) { + switch (input_value) { + case VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT: + return "VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT"; + case VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT: + return "VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT"; + case VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT: + return "VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT"; + default: + return "Unhandled VkDepthBiasRepresentationEXT"; + } +} +static inline const char* string_VkDeviceMemoryReportEventTypeEXT(VkDeviceMemoryReportEventTypeEXT input_value) { + switch (input_value) { + case VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT: + return "VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT"; + case VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT: + return "VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT"; + case VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT: + return "VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT"; + case VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT: + return "VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT"; + case VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT: + return "VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT"; + default: + return "Unhandled VkDeviceMemoryReportEventTypeEXT"; + } +} +static inline const char* string_VkFragmentShadingRateTypeNV(VkFragmentShadingRateTypeNV input_value) { + switch (input_value) { + case VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV: + return "VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV"; + case VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV: + return "VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV"; + default: + return "Unhandled VkFragmentShadingRateTypeNV"; + } +} +static inline const char* string_VkFragmentShadingRateNV(VkFragmentShadingRateNV input_value) { + switch (input_value) { + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV: + return "VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV"; + case VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV: + return "VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV"; + case VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV: + return "VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV"; + case VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV: + return "VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV"; + case VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV: + return "VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV"; + default: + return "Unhandled VkFragmentShadingRateNV"; + } +} +static inline const char* string_VkAccelerationStructureMotionInstanceTypeNV(VkAccelerationStructureMotionInstanceTypeNV input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV: + return "VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV"; + case VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV: + return "VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV"; + case VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV: + return "VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV"; + default: + return "Unhandled VkAccelerationStructureMotionInstanceTypeNV"; + } +} +static inline const char* string_VkDeviceFaultAddressTypeEXT(VkDeviceFaultAddressTypeEXT input_value) { + switch (input_value) { + case VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT"; + default: + return "Unhandled VkDeviceFaultAddressTypeEXT"; + } +} +static inline const char* string_VkDeviceFaultVendorBinaryHeaderVersionEXT(VkDeviceFaultVendorBinaryHeaderVersionEXT input_value) { + switch (input_value) { + case VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT: + return "VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT"; + default: + return "Unhandled VkDeviceFaultVendorBinaryHeaderVersionEXT"; + } +} +static inline const char* string_VkDeviceAddressBindingTypeEXT(VkDeviceAddressBindingTypeEXT input_value) { + switch (input_value) { + case VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT: + return "VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT"; + case VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT: + return "VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT"; + default: + return "Unhandled VkDeviceAddressBindingTypeEXT"; + } +} +static inline const char* string_VkMicromapTypeEXT(VkMicromapTypeEXT input_value) { + switch (input_value) { + case VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT: + return "VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV: + return "VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + default: + return "Unhandled VkMicromapTypeEXT"; + } +} +static inline const char* string_VkBuildMicromapModeEXT(VkBuildMicromapModeEXT input_value) { + switch (input_value) { + case VK_BUILD_MICROMAP_MODE_BUILD_EXT: + return "VK_BUILD_MICROMAP_MODE_BUILD_EXT"; + default: + return "Unhandled VkBuildMicromapModeEXT"; + } +} +static inline const char* string_VkCopyMicromapModeEXT(VkCopyMicromapModeEXT input_value) { + switch (input_value) { + case VK_COPY_MICROMAP_MODE_CLONE_EXT: + return "VK_COPY_MICROMAP_MODE_CLONE_EXT"; + case VK_COPY_MICROMAP_MODE_SERIALIZE_EXT: + return "VK_COPY_MICROMAP_MODE_SERIALIZE_EXT"; + case VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT: + return "VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT"; + case VK_COPY_MICROMAP_MODE_COMPACT_EXT: + return "VK_COPY_MICROMAP_MODE_COMPACT_EXT"; + default: + return "Unhandled VkCopyMicromapModeEXT"; + } +} +static inline const char* string_VkOpacityMicromapFormatEXT(VkOpacityMicromapFormatEXT input_value) { + switch (input_value) { + case VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT: + return "VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT"; + case VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT: + return "VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT"; + default: + return "Unhandled VkOpacityMicromapFormatEXT"; + } +} +static inline const char* string_VkOpacityMicromapSpecialIndexEXT(VkOpacityMicromapSpecialIndexEXT input_value) { + switch (input_value) { + case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT: + return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT"; + case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT: + return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT"; + case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT: + return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT"; + case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT: + return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT"; + case VK_OPACITY_MICROMAP_SPECIAL_INDEX_CLUSTER_GEOMETRY_DISABLE_OPACITY_MICROMAP_NV: + return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_CLUSTER_GEOMETRY_DISABLE_OPACITY_MICROMAP_NV"; + default: + return "Unhandled VkOpacityMicromapSpecialIndexEXT"; + } +} +static inline const char* string_VkAccelerationStructureCompatibilityKHR(VkAccelerationStructureCompatibilityKHR input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR: + return "VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR"; + case VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR: + return "VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR"; + default: + return "Unhandled VkAccelerationStructureCompatibilityKHR"; + } +} +static inline const char* string_VkAccelerationStructureBuildTypeKHR(VkAccelerationStructureBuildTypeKHR input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR: + return "VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR"; + case VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR: + return "VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR"; + case VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR: + return "VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR"; + default: + return "Unhandled VkAccelerationStructureBuildTypeKHR"; + } +} +#ifdef VK_ENABLE_BETA_EXTENSIONS +static inline const char* string_VkDisplacementMicromapFormatNV(VkDisplacementMicromapFormatNV input_value) { + switch (input_value) { + case VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV: + return "VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV"; + case VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV: + return "VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV"; + case VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV: + return "VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV"; + default: + return "Unhandled VkDisplacementMicromapFormatNV"; + } +} +#endif // VK_ENABLE_BETA_EXTENSIONS +static inline const char* string_VkRayTracingLssIndexingModeNV(VkRayTracingLssIndexingModeNV input_value) { + switch (input_value) { + case VK_RAY_TRACING_LSS_INDEXING_MODE_LIST_NV: + return "VK_RAY_TRACING_LSS_INDEXING_MODE_LIST_NV"; + case VK_RAY_TRACING_LSS_INDEXING_MODE_SUCCESSIVE_NV: + return "VK_RAY_TRACING_LSS_INDEXING_MODE_SUCCESSIVE_NV"; + default: + return "Unhandled VkRayTracingLssIndexingModeNV"; + } +} +static inline const char* string_VkRayTracingLssPrimitiveEndCapsModeNV(VkRayTracingLssPrimitiveEndCapsModeNV input_value) { + switch (input_value) { + case VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_NONE_NV: + return "VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_NONE_NV"; + case VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_CHAINED_NV: + return "VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_CHAINED_NV"; + default: + return "Unhandled VkRayTracingLssPrimitiveEndCapsModeNV"; + } +} +static inline const char* string_VkSubpassMergeStatusEXT(VkSubpassMergeStatusEXT input_value) { + switch (input_value) { + case VK_SUBPASS_MERGE_STATUS_MERGED_EXT: + return "VK_SUBPASS_MERGE_STATUS_MERGED_EXT"; + case VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT: + return "VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT"; + default: + return "Unhandled VkSubpassMergeStatusEXT"; + } +} +static inline const char* string_VkDirectDriverLoadingModeLUNARG(VkDirectDriverLoadingModeLUNARG input_value) { + switch (input_value) { + case VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG: + return "VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG"; + case VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG: + return "VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG"; + default: + return "Unhandled VkDirectDriverLoadingModeLUNARG"; + } +} +static inline const char* string_VkOpticalFlowPerformanceLevelNV(VkOpticalFlowPerformanceLevelNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV: + return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV"; + case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV: + return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV"; + case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV: + return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV"; + case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV: + return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV"; + default: + return "Unhandled VkOpticalFlowPerformanceLevelNV"; + } +} +static inline const char* string_VkOpticalFlowSessionBindingPointNV(VkOpticalFlowSessionBindingPointNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV"; + default: + return "Unhandled VkOpticalFlowSessionBindingPointNV"; + } +} +static inline const char* string_VkAntiLagModeAMD(VkAntiLagModeAMD input_value) { + switch (input_value) { + case VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD: + return "VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD"; + case VK_ANTI_LAG_MODE_ON_AMD: + return "VK_ANTI_LAG_MODE_ON_AMD"; + case VK_ANTI_LAG_MODE_OFF_AMD: + return "VK_ANTI_LAG_MODE_OFF_AMD"; + default: + return "Unhandled VkAntiLagModeAMD"; + } +} +static inline const char* string_VkAntiLagStageAMD(VkAntiLagStageAMD input_value) { + switch (input_value) { + case VK_ANTI_LAG_STAGE_INPUT_AMD: + return "VK_ANTI_LAG_STAGE_INPUT_AMD"; + case VK_ANTI_LAG_STAGE_PRESENT_AMD: + return "VK_ANTI_LAG_STAGE_PRESENT_AMD"; + default: + return "Unhandled VkAntiLagStageAMD"; + } +} +static inline const char* string_VkShaderCodeTypeEXT(VkShaderCodeTypeEXT input_value) { + switch (input_value) { + case VK_SHADER_CODE_TYPE_BINARY_EXT: + return "VK_SHADER_CODE_TYPE_BINARY_EXT"; + case VK_SHADER_CODE_TYPE_SPIRV_EXT: + return "VK_SHADER_CODE_TYPE_SPIRV_EXT"; + default: + return "Unhandled VkShaderCodeTypeEXT"; + } +} +static inline const char* string_VkDepthClampModeEXT(VkDepthClampModeEXT input_value) { + switch (input_value) { + case VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT: + return "VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT"; + case VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT: + return "VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT"; + default: + return "Unhandled VkDepthClampModeEXT"; + } +} +static inline const char* string_VkRayTracingInvocationReorderModeNV(VkRayTracingInvocationReorderModeNV input_value) { + switch (input_value) { + case VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV: + return "VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV"; + case VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV: + return "VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV"; + default: + return "Unhandled VkRayTracingInvocationReorderModeNV"; + } +} +static inline const char* string_VkCooperativeVectorMatrixLayoutNV(VkCooperativeVectorMatrixLayoutNV input_value) { + switch (input_value) { + case VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_ROW_MAJOR_NV: + return "VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_ROW_MAJOR_NV"; + case VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_COLUMN_MAJOR_NV: + return "VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_COLUMN_MAJOR_NV"; + case VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_INFERENCING_OPTIMAL_NV: + return "VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_INFERENCING_OPTIMAL_NV"; + case VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_TRAINING_OPTIMAL_NV: + return "VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_TRAINING_OPTIMAL_NV"; + default: + return "Unhandled VkCooperativeVectorMatrixLayoutNV"; + } +} +static inline const char* string_VkLayerSettingTypeEXT(VkLayerSettingTypeEXT input_value) { + switch (input_value) { + case VK_LAYER_SETTING_TYPE_BOOL32_EXT: + return "VK_LAYER_SETTING_TYPE_BOOL32_EXT"; + case VK_LAYER_SETTING_TYPE_INT32_EXT: + return "VK_LAYER_SETTING_TYPE_INT32_EXT"; + case VK_LAYER_SETTING_TYPE_INT64_EXT: + return "VK_LAYER_SETTING_TYPE_INT64_EXT"; + case VK_LAYER_SETTING_TYPE_UINT32_EXT: + return "VK_LAYER_SETTING_TYPE_UINT32_EXT"; + case VK_LAYER_SETTING_TYPE_UINT64_EXT: + return "VK_LAYER_SETTING_TYPE_UINT64_EXT"; + case VK_LAYER_SETTING_TYPE_FLOAT32_EXT: + return "VK_LAYER_SETTING_TYPE_FLOAT32_EXT"; + case VK_LAYER_SETTING_TYPE_FLOAT64_EXT: + return "VK_LAYER_SETTING_TYPE_FLOAT64_EXT"; + case VK_LAYER_SETTING_TYPE_STRING_EXT: + return "VK_LAYER_SETTING_TYPE_STRING_EXT"; + default: + return "Unhandled VkLayerSettingTypeEXT"; + } +} +static inline const char* string_VkLatencyMarkerNV(VkLatencyMarkerNV input_value) { + switch (input_value) { + case VK_LATENCY_MARKER_SIMULATION_START_NV: + return "VK_LATENCY_MARKER_SIMULATION_START_NV"; + case VK_LATENCY_MARKER_SIMULATION_END_NV: + return "VK_LATENCY_MARKER_SIMULATION_END_NV"; + case VK_LATENCY_MARKER_RENDERSUBMIT_START_NV: + return "VK_LATENCY_MARKER_RENDERSUBMIT_START_NV"; + case VK_LATENCY_MARKER_RENDERSUBMIT_END_NV: + return "VK_LATENCY_MARKER_RENDERSUBMIT_END_NV"; + case VK_LATENCY_MARKER_PRESENT_START_NV: + return "VK_LATENCY_MARKER_PRESENT_START_NV"; + case VK_LATENCY_MARKER_PRESENT_END_NV: + return "VK_LATENCY_MARKER_PRESENT_END_NV"; + case VK_LATENCY_MARKER_INPUT_SAMPLE_NV: + return "VK_LATENCY_MARKER_INPUT_SAMPLE_NV"; + case VK_LATENCY_MARKER_TRIGGER_FLASH_NV: + return "VK_LATENCY_MARKER_TRIGGER_FLASH_NV"; + case VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV: + return "VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV"; + case VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV: + return "VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV"; + case VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV: + return "VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV"; + case VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV: + return "VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV"; + default: + return "Unhandled VkLatencyMarkerNV"; + } +} +static inline const char* string_VkOutOfBandQueueTypeNV(VkOutOfBandQueueTypeNV input_value) { + switch (input_value) { + case VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV: + return "VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV"; + case VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV: + return "VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV"; + default: + return "Unhandled VkOutOfBandQueueTypeNV"; + } +} +static inline const char* string_VkBlockMatchWindowCompareModeQCOM(VkBlockMatchWindowCompareModeQCOM input_value) { + switch (input_value) { + case VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM: + return "VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM"; + case VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM: + return "VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM"; + default: + return "Unhandled VkBlockMatchWindowCompareModeQCOM"; + } +} +static inline const char* string_VkCubicFilterWeightsQCOM(VkCubicFilterWeightsQCOM input_value) { + switch (input_value) { + case VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM: + return "VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM"; + case VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM: + return "VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM"; + case VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM: + return "VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM"; + case VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM: + return "VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM"; + default: + return "Unhandled VkCubicFilterWeightsQCOM"; + } +} +static inline const char* string_VkLayeredDriverUnderlyingApiMSFT(VkLayeredDriverUnderlyingApiMSFT input_value) { + switch (input_value) { + case VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT: + return "VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT"; + case VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT: + return "VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT"; + default: + return "Unhandled VkLayeredDriverUnderlyingApiMSFT"; + } +} +static inline const char* string_VkDisplaySurfaceStereoTypeNV(VkDisplaySurfaceStereoTypeNV input_value) { + switch (input_value) { + case VK_DISPLAY_SURFACE_STEREO_TYPE_NONE_NV: + return "VK_DISPLAY_SURFACE_STEREO_TYPE_NONE_NV"; + case VK_DISPLAY_SURFACE_STEREO_TYPE_ONBOARD_DIN_NV: + return "VK_DISPLAY_SURFACE_STEREO_TYPE_ONBOARD_DIN_NV"; + case VK_DISPLAY_SURFACE_STEREO_TYPE_HDMI_3D_NV: + return "VK_DISPLAY_SURFACE_STEREO_TYPE_HDMI_3D_NV"; + case VK_DISPLAY_SURFACE_STEREO_TYPE_INBAND_DISPLAYPORT_NV: + return "VK_DISPLAY_SURFACE_STEREO_TYPE_INBAND_DISPLAYPORT_NV"; + default: + return "Unhandled VkDisplaySurfaceStereoTypeNV"; + } +} +static inline const char* string_VkClusterAccelerationStructureTypeNV(VkClusterAccelerationStructureTypeNV input_value) { + switch (input_value) { + case VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_CLUSTERS_BOTTOM_LEVEL_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_CLUSTERS_BOTTOM_LEVEL_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_TEMPLATE_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_TEMPLATE_NV"; + default: + return "Unhandled VkClusterAccelerationStructureTypeNV"; + } +} +static inline const char* string_VkClusterAccelerationStructureOpTypeNV(VkClusterAccelerationStructureOpTypeNV input_value) { + switch (input_value) { + case VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_MOVE_OBJECTS_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_MOVE_OBJECTS_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_CLUSTERS_BOTTOM_LEVEL_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_CLUSTERS_BOTTOM_LEVEL_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_TEMPLATE_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_TEMPLATE_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_INSTANTIATE_TRIANGLE_CLUSTER_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_INSTANTIATE_TRIANGLE_CLUSTER_NV"; + default: + return "Unhandled VkClusterAccelerationStructureOpTypeNV"; + } +} +static inline const char* string_VkClusterAccelerationStructureOpModeNV(VkClusterAccelerationStructureOpModeNV input_value) { + switch (input_value) { + case VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_IMPLICIT_DESTINATIONS_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_IMPLICIT_DESTINATIONS_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_EXPLICIT_DESTINATIONS_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_EXPLICIT_DESTINATIONS_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_COMPUTE_SIZES_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_COMPUTE_SIZES_NV"; + default: + return "Unhandled VkClusterAccelerationStructureOpModeNV"; + } +} +static inline const char* string_VkPartitionedAccelerationStructureOpTypeNV(VkPartitionedAccelerationStructureOpTypeNV input_value) { + switch (input_value) { + case VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_INSTANCE_NV: + return "VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_INSTANCE_NV"; + case VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_UPDATE_INSTANCE_NV: + return "VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_UPDATE_INSTANCE_NV"; + case VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_PARTITION_TRANSLATION_NV: + return "VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_PARTITION_TRANSLATION_NV"; + default: + return "Unhandled VkPartitionedAccelerationStructureOpTypeNV"; + } +} +static inline const char* string_VkIndirectExecutionSetInfoTypeEXT(VkIndirectExecutionSetInfoTypeEXT input_value) { + switch (input_value) { + case VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT: + return "VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT"; + case VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT: + return "VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT"; + default: + return "Unhandled VkIndirectExecutionSetInfoTypeEXT"; + } +} +static inline const char* string_VkIndirectCommandsTokenTypeEXT(VkIndirectCommandsTokenTypeEXT input_value) { + switch (input_value) { + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT"; + default: + return "Unhandled VkIndirectCommandsTokenTypeEXT"; + } +} +static inline const char* string_VkBuildAccelerationStructureModeKHR(VkBuildAccelerationStructureModeKHR input_value) { + switch (input_value) { + case VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR"; + default: + return "Unhandled VkBuildAccelerationStructureModeKHR"; + } +} +static inline const char* string_VkShaderGroupShaderKHR(VkShaderGroupShaderKHR input_value) { + switch (input_value) { + case VK_SHADER_GROUP_SHADER_GENERAL_KHR: + return "VK_SHADER_GROUP_SHADER_GENERAL_KHR"; + case VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR: + return "VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR"; + case VK_SHADER_GROUP_SHADER_ANY_HIT_KHR: + return "VK_SHADER_GROUP_SHADER_ANY_HIT_KHR"; + case VK_SHADER_GROUP_SHADER_INTERSECTION_KHR: + return "VK_SHADER_GROUP_SHADER_INTERSECTION_KHR"; + default: + return "Unhandled VkShaderGroupShaderKHR"; + } +} + +static inline const char* string_VkAccessFlagBits(VkAccessFlagBits input_value) { + switch (input_value) { + case VK_ACCESS_INDIRECT_COMMAND_READ_BIT: + return "VK_ACCESS_INDIRECT_COMMAND_READ_BIT"; + case VK_ACCESS_INDEX_READ_BIT: + return "VK_ACCESS_INDEX_READ_BIT"; + case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT: + return "VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT"; + case VK_ACCESS_UNIFORM_READ_BIT: + return "VK_ACCESS_UNIFORM_READ_BIT"; + case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT: + return "VK_ACCESS_INPUT_ATTACHMENT_READ_BIT"; + case VK_ACCESS_SHADER_READ_BIT: + return "VK_ACCESS_SHADER_READ_BIT"; + case VK_ACCESS_SHADER_WRITE_BIT: + return "VK_ACCESS_SHADER_WRITE_BIT"; + case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT: + return "VK_ACCESS_COLOR_ATTACHMENT_READ_BIT"; + case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT: + return "VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT"; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT: + return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT"; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT: + return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"; + case VK_ACCESS_TRANSFER_READ_BIT: + return "VK_ACCESS_TRANSFER_READ_BIT"; + case VK_ACCESS_TRANSFER_WRITE_BIT: + return "VK_ACCESS_TRANSFER_WRITE_BIT"; + case VK_ACCESS_HOST_READ_BIT: + return "VK_ACCESS_HOST_READ_BIT"; + case VK_ACCESS_HOST_WRITE_BIT: + return "VK_ACCESS_HOST_WRITE_BIT"; + case VK_ACCESS_MEMORY_READ_BIT: + return "VK_ACCESS_MEMORY_READ_BIT"; + case VK_ACCESS_MEMORY_WRITE_BIT: + return "VK_ACCESS_MEMORY_WRITE_BIT"; + case VK_ACCESS_NONE: + return "VK_ACCESS_NONE"; + case VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT: + return "VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT"; + case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT: + return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT"; + case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT: + return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT"; + case VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT: + return "VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT"; + case VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT: + return "VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT"; + case VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR: + return "VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR"; + case VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR: + return "VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR"; + case VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT: + return "VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT"; + case VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR: + return "VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR"; + case VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT: + return "VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT"; + case VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT: + return "VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT"; + default: + return "Unhandled VkAccessFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkAccessFlags(VkAccessFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkAccessFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkAccessFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageAspectFlagBits(VkImageAspectFlagBits input_value) { + switch (input_value) { + case VK_IMAGE_ASPECT_COLOR_BIT: + return "VK_IMAGE_ASPECT_COLOR_BIT"; + case VK_IMAGE_ASPECT_DEPTH_BIT: + return "VK_IMAGE_ASPECT_DEPTH_BIT"; + case VK_IMAGE_ASPECT_STENCIL_BIT: + return "VK_IMAGE_ASPECT_STENCIL_BIT"; + case VK_IMAGE_ASPECT_METADATA_BIT: + return "VK_IMAGE_ASPECT_METADATA_BIT"; + case VK_IMAGE_ASPECT_PLANE_0_BIT: + return "VK_IMAGE_ASPECT_PLANE_0_BIT"; + case VK_IMAGE_ASPECT_PLANE_1_BIT: + return "VK_IMAGE_ASPECT_PLANE_1_BIT"; + case VK_IMAGE_ASPECT_PLANE_2_BIT: + return "VK_IMAGE_ASPECT_PLANE_2_BIT"; + case VK_IMAGE_ASPECT_NONE: + return "VK_IMAGE_ASPECT_NONE"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT"; + default: + return "Unhandled VkImageAspectFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageAspectFlags(VkImageAspectFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageAspectFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageAspectFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkFormatFeatureFlagBits(VkFormatFeatureFlagBits input_value) { + switch (input_value) { + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT"; + case VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT: + return "VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT"; + case VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT: + return "VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT"; + case VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT: + return "VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT"; + case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT: + return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT"; + case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT: + return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT"; + case VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT: + return "VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT"; + case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT: + return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT"; + case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT: + return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT"; + case VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT: + return "VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT"; + case VK_FORMAT_FEATURE_BLIT_SRC_BIT: + return "VK_FORMAT_FEATURE_BLIT_SRC_BIT"; + case VK_FORMAT_FEATURE_BLIT_DST_BIT: + return "VK_FORMAT_FEATURE_BLIT_DST_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT"; + case VK_FORMAT_FEATURE_TRANSFER_SRC_BIT: + return "VK_FORMAT_FEATURE_TRANSFER_SRC_BIT"; + case VK_FORMAT_FEATURE_TRANSFER_DST_BIT: + return "VK_FORMAT_FEATURE_TRANSFER_DST_BIT"; + case VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT: + return "VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT"; + case VK_FORMAT_FEATURE_DISJOINT_BIT: + return "VK_FORMAT_FEATURE_DISJOINT_BIT"; + case VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT: + return "VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT"; + case VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR: + return "VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR"; + case VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR: + return "VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR"; + case VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR: + return "VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT"; + case VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT: + return "VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT"; + case VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR: + return "VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + case VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR: + return "VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR"; + case VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR: + return "VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR"; + default: + return "Unhandled VkFormatFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkFormatFeatureFlags(VkFormatFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFormatFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFormatFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageCreateFlagBits(VkImageCreateFlagBits input_value) { + switch (input_value) { + case VK_IMAGE_CREATE_SPARSE_BINDING_BIT: + return "VK_IMAGE_CREATE_SPARSE_BINDING_BIT"; + case VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT: + return "VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT"; + case VK_IMAGE_CREATE_SPARSE_ALIASED_BIT: + return "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT"; + case VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT: + return "VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT"; + case VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT: + return "VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT"; + case VK_IMAGE_CREATE_ALIAS_BIT: + return "VK_IMAGE_CREATE_ALIAS_BIT"; + case VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT: + return "VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT"; + case VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT: + return "VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT"; + case VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT: + return "VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT"; + case VK_IMAGE_CREATE_EXTENDED_USAGE_BIT: + return "VK_IMAGE_CREATE_EXTENDED_USAGE_BIT"; + case VK_IMAGE_CREATE_PROTECTED_BIT: + return "VK_IMAGE_CREATE_PROTECTED_BIT"; + case VK_IMAGE_CREATE_DISJOINT_BIT: + return "VK_IMAGE_CREATE_DISJOINT_BIT"; + case VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV: + return "VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV"; + case VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT: + return "VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT"; + case VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT: + return "VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT"; + case VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT: + return "VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT"; + case VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT: + return "VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT"; + case VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT: + return "VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT"; + case VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR: + return "VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR"; + case VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_EXT: + return "VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_EXT"; + default: + return "Unhandled VkImageCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageCreateFlags(VkImageCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSampleCountFlagBits(VkSampleCountFlagBits input_value) { + switch (input_value) { + case VK_SAMPLE_COUNT_1_BIT: + return "VK_SAMPLE_COUNT_1_BIT"; + case VK_SAMPLE_COUNT_2_BIT: + return "VK_SAMPLE_COUNT_2_BIT"; + case VK_SAMPLE_COUNT_4_BIT: + return "VK_SAMPLE_COUNT_4_BIT"; + case VK_SAMPLE_COUNT_8_BIT: + return "VK_SAMPLE_COUNT_8_BIT"; + case VK_SAMPLE_COUNT_16_BIT: + return "VK_SAMPLE_COUNT_16_BIT"; + case VK_SAMPLE_COUNT_32_BIT: + return "VK_SAMPLE_COUNT_32_BIT"; + case VK_SAMPLE_COUNT_64_BIT: + return "VK_SAMPLE_COUNT_64_BIT"; + default: + return "Unhandled VkSampleCountFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSampleCountFlags(VkSampleCountFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSampleCountFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSampleCountFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageUsageFlagBits(VkImageUsageFlagBits input_value) { + switch (input_value) { + case VK_IMAGE_USAGE_TRANSFER_SRC_BIT: + return "VK_IMAGE_USAGE_TRANSFER_SRC_BIT"; + case VK_IMAGE_USAGE_TRANSFER_DST_BIT: + return "VK_IMAGE_USAGE_TRANSFER_DST_BIT"; + case VK_IMAGE_USAGE_SAMPLED_BIT: + return "VK_IMAGE_USAGE_SAMPLED_BIT"; + case VK_IMAGE_USAGE_STORAGE_BIT: + return "VK_IMAGE_USAGE_STORAGE_BIT"; + case VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_HOST_TRANSFER_BIT: + return "VK_IMAGE_USAGE_HOST_TRANSFER_BIT"; + case VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR"; + case VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR"; + case VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR"; + case VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT: + return "VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT"; + case VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR: + return "VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + case VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR"; + case VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR"; + case VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR"; + case VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT: + return "VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT"; + case VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI: + return "VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI"; + case VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM: + return "VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM"; + case VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM: + return "VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM"; + case VK_IMAGE_USAGE_TILE_MEMORY_QCOM: + return "VK_IMAGE_USAGE_TILE_MEMORY_QCOM"; + case VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR"; + case VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR"; + default: + return "Unhandled VkImageUsageFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageUsageFlags(VkImageUsageFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageUsageFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageUsageFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkInstanceCreateFlagBits(VkInstanceCreateFlagBits input_value) { + switch (input_value) { + case VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR: + return "VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR"; + default: + return "Unhandled VkInstanceCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkInstanceCreateFlags(VkInstanceCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkInstanceCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkInstanceCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryHeapFlagBits(VkMemoryHeapFlagBits input_value) { + switch (input_value) { + case VK_MEMORY_HEAP_DEVICE_LOCAL_BIT: + return "VK_MEMORY_HEAP_DEVICE_LOCAL_BIT"; + case VK_MEMORY_HEAP_MULTI_INSTANCE_BIT: + return "VK_MEMORY_HEAP_MULTI_INSTANCE_BIT"; + case VK_MEMORY_HEAP_TILE_MEMORY_BIT_QCOM: + return "VK_MEMORY_HEAP_TILE_MEMORY_BIT_QCOM"; + default: + return "Unhandled VkMemoryHeapFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryHeapFlags(VkMemoryHeapFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryHeapFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryHeapFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryPropertyFlagBits(VkMemoryPropertyFlagBits input_value) { + switch (input_value) { + case VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT: + return "VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT"; + case VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT: + return "VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT"; + case VK_MEMORY_PROPERTY_HOST_COHERENT_BIT: + return "VK_MEMORY_PROPERTY_HOST_COHERENT_BIT"; + case VK_MEMORY_PROPERTY_HOST_CACHED_BIT: + return "VK_MEMORY_PROPERTY_HOST_CACHED_BIT"; + case VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT: + return "VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT"; + case VK_MEMORY_PROPERTY_PROTECTED_BIT: + return "VK_MEMORY_PROPERTY_PROTECTED_BIT"; + case VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD: + return "VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD"; + case VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD: + return "VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD"; + case VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV: + return "VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV"; + default: + return "Unhandled VkMemoryPropertyFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryPropertyFlags(VkMemoryPropertyFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryPropertyFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryPropertyFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkQueueFlagBits(VkQueueFlagBits input_value) { + switch (input_value) { + case VK_QUEUE_GRAPHICS_BIT: + return "VK_QUEUE_GRAPHICS_BIT"; + case VK_QUEUE_COMPUTE_BIT: + return "VK_QUEUE_COMPUTE_BIT"; + case VK_QUEUE_TRANSFER_BIT: + return "VK_QUEUE_TRANSFER_BIT"; + case VK_QUEUE_SPARSE_BINDING_BIT: + return "VK_QUEUE_SPARSE_BINDING_BIT"; + case VK_QUEUE_PROTECTED_BIT: + return "VK_QUEUE_PROTECTED_BIT"; + case VK_QUEUE_VIDEO_DECODE_BIT_KHR: + return "VK_QUEUE_VIDEO_DECODE_BIT_KHR"; + case VK_QUEUE_VIDEO_ENCODE_BIT_KHR: + return "VK_QUEUE_VIDEO_ENCODE_BIT_KHR"; + case VK_QUEUE_OPTICAL_FLOW_BIT_NV: + return "VK_QUEUE_OPTICAL_FLOW_BIT_NV"; + default: + return "Unhandled VkQueueFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkQueueFlags(VkQueueFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkQueueFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkQueueFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDeviceQueueCreateFlagBits(VkDeviceQueueCreateFlagBits input_value) { + switch (input_value) { + case VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT: + return "VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT"; + default: + return "Unhandled VkDeviceQueueCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDeviceQueueCreateFlags(VkDeviceQueueCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDeviceQueueCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDeviceQueueCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineStageFlagBits(VkPipelineStageFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT: + return "VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT"; + case VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT: + return "VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT"; + case VK_PIPELINE_STAGE_VERTEX_INPUT_BIT: + return "VK_PIPELINE_STAGE_VERTEX_INPUT_BIT"; + case VK_PIPELINE_STAGE_VERTEX_SHADER_BIT: + return "VK_PIPELINE_STAGE_VERTEX_SHADER_BIT"; + case VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT: + return "VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT"; + case VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT: + return "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT"; + case VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT: + return "VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT"; + case VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT: + return "VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT"; + case VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT: + return "VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT"; + case VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT: + return "VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT"; + case VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT: + return "VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT"; + case VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT: + return "VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT"; + case VK_PIPELINE_STAGE_TRANSFER_BIT: + return "VK_PIPELINE_STAGE_TRANSFER_BIT"; + case VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT: + return "VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT"; + case VK_PIPELINE_STAGE_HOST_BIT: + return "VK_PIPELINE_STAGE_HOST_BIT"; + case VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT: + return "VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT"; + case VK_PIPELINE_STAGE_ALL_COMMANDS_BIT: + return "VK_PIPELINE_STAGE_ALL_COMMANDS_BIT"; + case VK_PIPELINE_STAGE_NONE: + return "VK_PIPELINE_STAGE_NONE"; + case VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT: + return "VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT"; + case VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT: + return "VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT"; + case VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR: + return "VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR"; + case VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR: + return "VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR"; + case VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT: + return "VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT"; + case VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR: + return "VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + case VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT: + return "VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT"; + case VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT: + return "VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT"; + case VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT: + return "VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT"; + default: + return "Unhandled VkPipelineStageFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineStageFlags(VkPipelineStageFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineStageFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineStageFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryMapFlagBits(VkMemoryMapFlagBits input_value) { + switch (input_value) { + case VK_MEMORY_MAP_PLACED_BIT_EXT: + return "VK_MEMORY_MAP_PLACED_BIT_EXT"; + default: + return "Unhandled VkMemoryMapFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryMapFlags(VkMemoryMapFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryMapFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryMapFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSparseMemoryBindFlagBits(VkSparseMemoryBindFlagBits input_value) { + switch (input_value) { + case VK_SPARSE_MEMORY_BIND_METADATA_BIT: + return "VK_SPARSE_MEMORY_BIND_METADATA_BIT"; + default: + return "Unhandled VkSparseMemoryBindFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSparseMemoryBindFlags(VkSparseMemoryBindFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSparseMemoryBindFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSparseMemoryBindFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSparseImageFormatFlagBits(VkSparseImageFormatFlagBits input_value) { + switch (input_value) { + case VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT: + return "VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT"; + case VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT: + return "VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT"; + case VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT: + return "VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT"; + default: + return "Unhandled VkSparseImageFormatFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSparseImageFormatFlags(VkSparseImageFormatFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSparseImageFormatFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSparseImageFormatFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkFenceCreateFlagBits(VkFenceCreateFlagBits input_value) { + switch (input_value) { + case VK_FENCE_CREATE_SIGNALED_BIT: + return "VK_FENCE_CREATE_SIGNALED_BIT"; + default: + return "Unhandled VkFenceCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkFenceCreateFlags(VkFenceCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFenceCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFenceCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkEventCreateFlagBits(VkEventCreateFlagBits input_value) { + switch (input_value) { + case VK_EVENT_CREATE_DEVICE_ONLY_BIT: + return "VK_EVENT_CREATE_DEVICE_ONLY_BIT"; + default: + return "Unhandled VkEventCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkEventCreateFlags(VkEventCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkEventCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkEventCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkQueryPipelineStatisticFlagBits(VkQueryPipelineStatisticFlagBits input_value) { + switch (input_value) { + case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT: + return "VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT"; + case VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT: + return "VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT"; + case VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI: + return "VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI"; + default: + return "Unhandled VkQueryPipelineStatisticFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkQueryPipelineStatisticFlags(VkQueryPipelineStatisticFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkQueryPipelineStatisticFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkQueryPipelineStatisticFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkQueryResultFlagBits(VkQueryResultFlagBits input_value) { + switch (input_value) { + case VK_QUERY_RESULT_64_BIT: + return "VK_QUERY_RESULT_64_BIT"; + case VK_QUERY_RESULT_WAIT_BIT: + return "VK_QUERY_RESULT_WAIT_BIT"; + case VK_QUERY_RESULT_WITH_AVAILABILITY_BIT: + return "VK_QUERY_RESULT_WITH_AVAILABILITY_BIT"; + case VK_QUERY_RESULT_PARTIAL_BIT: + return "VK_QUERY_RESULT_PARTIAL_BIT"; + case VK_QUERY_RESULT_WITH_STATUS_BIT_KHR: + return "VK_QUERY_RESULT_WITH_STATUS_BIT_KHR"; + default: + return "Unhandled VkQueryResultFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkQueryResultFlags(VkQueryResultFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkQueryResultFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkQueryResultFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkBufferCreateFlagBits(VkBufferCreateFlagBits input_value) { + switch (input_value) { + case VK_BUFFER_CREATE_SPARSE_BINDING_BIT: + return "VK_BUFFER_CREATE_SPARSE_BINDING_BIT"; + case VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT: + return "VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT"; + case VK_BUFFER_CREATE_SPARSE_ALIASED_BIT: + return "VK_BUFFER_CREATE_SPARSE_ALIASED_BIT"; + case VK_BUFFER_CREATE_PROTECTED_BIT: + return "VK_BUFFER_CREATE_PROTECTED_BIT"; + case VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT: + return "VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT"; + case VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT: + return "VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT"; + case VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR: + return "VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR"; + default: + return "Unhandled VkBufferCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkBufferCreateFlags(VkBufferCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBufferCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkBufferCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkBufferUsageFlagBits(VkBufferUsageFlagBits input_value) { + switch (input_value) { + case VK_BUFFER_USAGE_TRANSFER_SRC_BIT: + return "VK_BUFFER_USAGE_TRANSFER_SRC_BIT"; + case VK_BUFFER_USAGE_TRANSFER_DST_BIT: + return "VK_BUFFER_USAGE_TRANSFER_DST_BIT"; + case VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT: + return "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT"; + case VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT: + return "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT"; + case VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT: + return "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT"; + case VK_BUFFER_USAGE_STORAGE_BUFFER_BIT: + return "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT"; + case VK_BUFFER_USAGE_INDEX_BUFFER_BIT: + return "VK_BUFFER_USAGE_INDEX_BUFFER_BIT"; + case VK_BUFFER_USAGE_VERTEX_BUFFER_BIT: + return "VK_BUFFER_USAGE_VERTEX_BUFFER_BIT"; + case VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT: + return "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT"; + case VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT: + return "VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT"; + case VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR: + return "VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR"; + case VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR: + return "VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR"; + case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT: + return "VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT"; + case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT: + return "VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT"; + case VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT: + return "VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX: + return "VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR: + return "VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR"; + case VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR: + return "VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR"; + case VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR: + return "VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR"; + case VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR: + return "VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR"; + case VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR: + return "VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR"; + case VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT: + return "VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT"; + case VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT: + return "VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT"; + case VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT: + return "VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT"; + case VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT: + return "VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT"; + case VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT: + return "VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT"; + case VK_BUFFER_USAGE_TILE_MEMORY_QCOM: + return "VK_BUFFER_USAGE_TILE_MEMORY_QCOM"; + default: + return "Unhandled VkBufferUsageFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkBufferUsageFlags(VkBufferUsageFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBufferUsageFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkBufferUsageFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageViewCreateFlagBits(VkImageViewCreateFlagBits input_value) { + switch (input_value) { + case VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT: + return "VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT"; + case VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT: + return "VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT"; + case VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT: + return "VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT"; + default: + return "Unhandled VkImageViewCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageViewCreateFlags(VkImageViewCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageViewCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageViewCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineCacheCreateFlagBits(VkPipelineCacheCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT: + return "VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT"; + case VK_PIPELINE_CACHE_CREATE_INTERNALLY_SYNCHRONIZED_MERGE_BIT_KHR: + return "VK_PIPELINE_CACHE_CREATE_INTERNALLY_SYNCHRONIZED_MERGE_BIT_KHR"; + default: + return "Unhandled VkPipelineCacheCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineCacheCreateFlags(VkPipelineCacheCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineCacheCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineCacheCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkColorComponentFlagBits(VkColorComponentFlagBits input_value) { + switch (input_value) { + case VK_COLOR_COMPONENT_R_BIT: + return "VK_COLOR_COMPONENT_R_BIT"; + case VK_COLOR_COMPONENT_G_BIT: + return "VK_COLOR_COMPONENT_G_BIT"; + case VK_COLOR_COMPONENT_B_BIT: + return "VK_COLOR_COMPONENT_B_BIT"; + case VK_COLOR_COMPONENT_A_BIT: + return "VK_COLOR_COMPONENT_A_BIT"; + default: + return "Unhandled VkColorComponentFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkColorComponentFlags(VkColorComponentFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkColorComponentFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkColorComponentFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT: + return "VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT"; + case VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT: + return "VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT"; + case VK_PIPELINE_CREATE_DERIVATIVE_BIT: + return "VK_PIPELINE_CREATE_DERIVATIVE_BIT"; + case VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT: + return "VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT"; + case VK_PIPELINE_CREATE_DISPATCH_BASE_BIT: + return "VK_PIPELINE_CREATE_DISPATCH_BASE_BIT"; + case VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT: + return "VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT"; + case VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT: + return "VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT"; + case VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT: + return "VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT"; + case VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT: + return "VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR"; + case VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV: + return "VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV"; + case VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT: + return "VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT"; + case VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR: + return "VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + case VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR: + return "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR"; + case VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR: + return "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR"; + case VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV: + return "VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV"; + case VK_PIPELINE_CREATE_LIBRARY_BIT_KHR: + return "VK_PIPELINE_CREATE_LIBRARY_BIT_KHR"; + case VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT: + return "VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT"; + case VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT: + return "VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT"; + case VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT: + return "VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT"; + case VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV: + return "VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV"; + case VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT: + return "VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT"; + case VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT: + return "VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT"; + case VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT: + return "VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV: + return "VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + default: + return "Unhandled VkPipelineCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineCreateFlags(VkPipelineCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineShaderStageCreateFlagBits(VkPipelineShaderStageCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT: + return "VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT"; + case VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT: + return "VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT"; + default: + return "Unhandled VkPipelineShaderStageCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineShaderStageCreateFlags(VkPipelineShaderStageCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineShaderStageCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkShaderStageFlagBits(VkShaderStageFlagBits input_value) { + switch (input_value) { + case VK_SHADER_STAGE_VERTEX_BIT: + return "VK_SHADER_STAGE_VERTEX_BIT"; + case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: + return "VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT"; + case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: + return "VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT"; + case VK_SHADER_STAGE_GEOMETRY_BIT: + return "VK_SHADER_STAGE_GEOMETRY_BIT"; + case VK_SHADER_STAGE_FRAGMENT_BIT: + return "VK_SHADER_STAGE_FRAGMENT_BIT"; + case VK_SHADER_STAGE_COMPUTE_BIT: + return "VK_SHADER_STAGE_COMPUTE_BIT"; + case VK_SHADER_STAGE_RAYGEN_BIT_KHR: + return "VK_SHADER_STAGE_RAYGEN_BIT_KHR"; + case VK_SHADER_STAGE_ANY_HIT_BIT_KHR: + return "VK_SHADER_STAGE_ANY_HIT_BIT_KHR"; + case VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR: + return "VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR"; + case VK_SHADER_STAGE_MISS_BIT_KHR: + return "VK_SHADER_STAGE_MISS_BIT_KHR"; + case VK_SHADER_STAGE_INTERSECTION_BIT_KHR: + return "VK_SHADER_STAGE_INTERSECTION_BIT_KHR"; + case VK_SHADER_STAGE_CALLABLE_BIT_KHR: + return "VK_SHADER_STAGE_CALLABLE_BIT_KHR"; + case VK_SHADER_STAGE_TASK_BIT_EXT: + return "VK_SHADER_STAGE_TASK_BIT_EXT"; + case VK_SHADER_STAGE_MESH_BIT_EXT: + return "VK_SHADER_STAGE_MESH_BIT_EXT"; + case VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI: + return "VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI"; + case VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI: + return "VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI"; + default: + return "Unhandled VkShaderStageFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkShaderStageFlags(VkShaderStageFlags input_value) { + if (input_value == VK_SHADER_STAGE_ALL_GRAPHICS) { return "VK_SHADER_STAGE_ALL_GRAPHICS"; } + if (input_value == VK_SHADER_STAGE_ALL) { return "VK_SHADER_STAGE_ALL"; } + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkShaderStageFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkShaderStageFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCullModeFlagBits(VkCullModeFlagBits input_value) { + switch (input_value) { + case VK_CULL_MODE_NONE: + return "VK_CULL_MODE_NONE"; + case VK_CULL_MODE_FRONT_BIT: + return "VK_CULL_MODE_FRONT_BIT"; + case VK_CULL_MODE_BACK_BIT: + return "VK_CULL_MODE_BACK_BIT"; + default: + return "Unhandled VkCullModeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCullModeFlags(VkCullModeFlags input_value) { + if (input_value == VK_CULL_MODE_FRONT_AND_BACK) { return "VK_CULL_MODE_FRONT_AND_BACK"; } + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCullModeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCullModeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineDepthStencilStateCreateFlagBits(VkPipelineDepthStencilStateCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT: + return "VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT"; + case VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT: + return "VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT"; + default: + return "Unhandled VkPipelineDepthStencilStateCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineDepthStencilStateCreateFlags(VkPipelineDepthStencilStateCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineDepthStencilStateCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineDepthStencilStateCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineColorBlendStateCreateFlagBits(VkPipelineColorBlendStateCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT: + return "VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT"; + default: + return "Unhandled VkPipelineColorBlendStateCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineColorBlendStateCreateFlags(VkPipelineColorBlendStateCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineColorBlendStateCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineColorBlendStateCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineLayoutCreateFlagBits(VkPipelineLayoutCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT: + return "VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT"; + default: + return "Unhandled VkPipelineLayoutCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineLayoutCreateFlags(VkPipelineLayoutCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineLayoutCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineLayoutCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSamplerCreateFlagBits(VkSamplerCreateFlagBits input_value) { + switch (input_value) { + case VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT: + return "VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT"; + case VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT: + return "VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT"; + case VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT: + return "VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT"; + case VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT: + return "VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT"; + case VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM: + return "VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM"; + default: + return "Unhandled VkSamplerCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSamplerCreateFlags(VkSamplerCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSamplerCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSamplerCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDescriptorPoolCreateFlagBits(VkDescriptorPoolCreateFlagBits input_value) { + switch (input_value) { + case VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT: + return "VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT"; + case VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT: + return "VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT"; + case VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT: + return "VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT"; + case VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV: + return "VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV"; + case VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV: + return "VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV"; + default: + return "Unhandled VkDescriptorPoolCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDescriptorPoolCreateFlags(VkDescriptorPoolCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDescriptorPoolCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDescriptorPoolCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDescriptorSetLayoutCreateFlagBits(VkDescriptorSetLayoutCreateFlagBits input_value) { + switch (input_value) { + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV"; + default: + return "Unhandled VkDescriptorSetLayoutCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDescriptorSetLayoutCreateFlags(VkDescriptorSetLayoutCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDescriptorSetLayoutCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkAttachmentDescriptionFlagBits(VkAttachmentDescriptionFlagBits input_value) { + switch (input_value) { + case VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT: + return "VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT"; + default: + return "Unhandled VkAttachmentDescriptionFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkAttachmentDescriptionFlags(VkAttachmentDescriptionFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkAttachmentDescriptionFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkAttachmentDescriptionFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDependencyFlagBits(VkDependencyFlagBits input_value) { + switch (input_value) { + case VK_DEPENDENCY_BY_REGION_BIT: + return "VK_DEPENDENCY_BY_REGION_BIT"; + case VK_DEPENDENCY_DEVICE_GROUP_BIT: + return "VK_DEPENDENCY_DEVICE_GROUP_BIT"; + case VK_DEPENDENCY_VIEW_LOCAL_BIT: + return "VK_DEPENDENCY_VIEW_LOCAL_BIT"; + case VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT: + return "VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT"; + case VK_DEPENDENCY_QUEUE_FAMILY_OWNERSHIP_TRANSFER_USE_ALL_STAGES_BIT_KHR: + return "VK_DEPENDENCY_QUEUE_FAMILY_OWNERSHIP_TRANSFER_USE_ALL_STAGES_BIT_KHR"; + default: + return "Unhandled VkDependencyFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDependencyFlags(VkDependencyFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDependencyFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDependencyFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkFramebufferCreateFlagBits(VkFramebufferCreateFlagBits input_value) { + switch (input_value) { + case VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT: + return "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT"; + default: + return "Unhandled VkFramebufferCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkFramebufferCreateFlags(VkFramebufferCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFramebufferCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFramebufferCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkRenderPassCreateFlagBits(VkRenderPassCreateFlagBits input_value) { + switch (input_value) { + case VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM: + return "VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM"; + default: + return "Unhandled VkRenderPassCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkRenderPassCreateFlags(VkRenderPassCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkRenderPassCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkRenderPassCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSubpassDescriptionFlagBits(VkSubpassDescriptionFlagBits input_value) { + switch (input_value) { + case VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX: + return "VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX"; + case VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX: + return "VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX"; + case VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM: + return "VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM"; + case VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM: + return "VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM"; + case VK_SUBPASS_DESCRIPTION_TILE_SHADING_APRON_BIT_QCOM: + return "VK_SUBPASS_DESCRIPTION_TILE_SHADING_APRON_BIT_QCOM"; + case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT: + return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT"; + case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT: + return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT"; + case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT: + return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT"; + case VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT: + return "VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT"; + default: + return "Unhandled VkSubpassDescriptionFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSubpassDescriptionFlags(VkSubpassDescriptionFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSubpassDescriptionFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSubpassDescriptionFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCommandPoolCreateFlagBits(VkCommandPoolCreateFlagBits input_value) { + switch (input_value) { + case VK_COMMAND_POOL_CREATE_TRANSIENT_BIT: + return "VK_COMMAND_POOL_CREATE_TRANSIENT_BIT"; + case VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT: + return "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT"; + case VK_COMMAND_POOL_CREATE_PROTECTED_BIT: + return "VK_COMMAND_POOL_CREATE_PROTECTED_BIT"; + default: + return "Unhandled VkCommandPoolCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCommandPoolCreateFlags(VkCommandPoolCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCommandPoolCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCommandPoolCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCommandPoolResetFlagBits(VkCommandPoolResetFlagBits input_value) { + switch (input_value) { + case VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT: + return "VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT"; + default: + return "Unhandled VkCommandPoolResetFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCommandPoolResetFlags(VkCommandPoolResetFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCommandPoolResetFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCommandPoolResetFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCommandBufferUsageFlagBits(VkCommandBufferUsageFlagBits input_value) { + switch (input_value) { + case VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT: + return "VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT"; + case VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT: + return "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT"; + case VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT: + return "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT"; + default: + return "Unhandled VkCommandBufferUsageFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCommandBufferUsageFlags(VkCommandBufferUsageFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCommandBufferUsageFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCommandBufferUsageFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkQueryControlFlagBits(VkQueryControlFlagBits input_value) { + switch (input_value) { + case VK_QUERY_CONTROL_PRECISE_BIT: + return "VK_QUERY_CONTROL_PRECISE_BIT"; + default: + return "Unhandled VkQueryControlFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkQueryControlFlags(VkQueryControlFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkQueryControlFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkQueryControlFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCommandBufferResetFlagBits(VkCommandBufferResetFlagBits input_value) { + switch (input_value) { + case VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT: + return "VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT"; + default: + return "Unhandled VkCommandBufferResetFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCommandBufferResetFlags(VkCommandBufferResetFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCommandBufferResetFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCommandBufferResetFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkStencilFaceFlagBits(VkStencilFaceFlagBits input_value) { + switch (input_value) { + case VK_STENCIL_FACE_FRONT_BIT: + return "VK_STENCIL_FACE_FRONT_BIT"; + case VK_STENCIL_FACE_BACK_BIT: + return "VK_STENCIL_FACE_BACK_BIT"; + default: + return "Unhandled VkStencilFaceFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkStencilFaceFlags(VkStencilFaceFlags input_value) { + if (input_value == VK_STENCIL_FACE_FRONT_AND_BACK) { return "VK_STENCIL_FACE_FRONT_AND_BACK"; } + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkStencilFaceFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkStencilFaceFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSubgroupFeatureFlagBits(VkSubgroupFeatureFlagBits input_value) { + switch (input_value) { + case VK_SUBGROUP_FEATURE_BASIC_BIT: + return "VK_SUBGROUP_FEATURE_BASIC_BIT"; + case VK_SUBGROUP_FEATURE_VOTE_BIT: + return "VK_SUBGROUP_FEATURE_VOTE_BIT"; + case VK_SUBGROUP_FEATURE_ARITHMETIC_BIT: + return "VK_SUBGROUP_FEATURE_ARITHMETIC_BIT"; + case VK_SUBGROUP_FEATURE_BALLOT_BIT: + return "VK_SUBGROUP_FEATURE_BALLOT_BIT"; + case VK_SUBGROUP_FEATURE_SHUFFLE_BIT: + return "VK_SUBGROUP_FEATURE_SHUFFLE_BIT"; + case VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT: + return "VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT"; + case VK_SUBGROUP_FEATURE_CLUSTERED_BIT: + return "VK_SUBGROUP_FEATURE_CLUSTERED_BIT"; + case VK_SUBGROUP_FEATURE_QUAD_BIT: + return "VK_SUBGROUP_FEATURE_QUAD_BIT"; + case VK_SUBGROUP_FEATURE_ROTATE_BIT: + return "VK_SUBGROUP_FEATURE_ROTATE_BIT"; + case VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT: + return "VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT"; + case VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV: + return "VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV"; + default: + return "Unhandled VkSubgroupFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSubgroupFeatureFlags(VkSubgroupFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSubgroupFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSubgroupFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPeerMemoryFeatureFlagBits(VkPeerMemoryFeatureFlagBits input_value) { + switch (input_value) { + case VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT: + return "VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT"; + case VK_PEER_MEMORY_FEATURE_COPY_DST_BIT: + return "VK_PEER_MEMORY_FEATURE_COPY_DST_BIT"; + case VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT: + return "VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT"; + case VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT: + return "VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT"; + default: + return "Unhandled VkPeerMemoryFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPeerMemoryFeatureFlags(VkPeerMemoryFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPeerMemoryFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPeerMemoryFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryAllocateFlagBits(VkMemoryAllocateFlagBits input_value) { + switch (input_value) { + case VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT: + return "VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT"; + case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT: + return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT"; + case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT: + return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT"; + default: + return "Unhandled VkMemoryAllocateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryAllocateFlags(VkMemoryAllocateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryAllocateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryAllocateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalMemoryHandleTypeFlagBits(VkExternalMemoryHandleTypeFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLBUFFER_BIT_EXT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLBUFFER_BIT_EXT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLTEXTURE_BIT_EXT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLTEXTURE_BIT_EXT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLHEAP_BIT_EXT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLHEAP_BIT_EXT"; + default: + return "Unhandled VkExternalMemoryHandleTypeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalMemoryHandleTypeFlags(VkExternalMemoryHandleTypeFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalMemoryHandleTypeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalMemoryHandleTypeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalMemoryFeatureFlagBits(VkExternalMemoryFeatureFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT: + return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT"; + case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT: + return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT"; + case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT: + return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT"; + default: + return "Unhandled VkExternalMemoryFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalMemoryFeatureFlags(VkExternalMemoryFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalMemoryFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalMemoryFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalFenceHandleTypeFlagBits(VkExternalFenceHandleTypeFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT: + return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT"; + case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT: + return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT"; + case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT: + return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"; + case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT: + return "VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT"; + default: + return "Unhandled VkExternalFenceHandleTypeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalFenceHandleTypeFlags(VkExternalFenceHandleTypeFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalFenceHandleTypeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalFenceHandleTypeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalFenceFeatureFlagBits(VkExternalFenceFeatureFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT: + return "VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT"; + case VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT: + return "VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT"; + default: + return "Unhandled VkExternalFenceFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalFenceFeatureFlags(VkExternalFenceFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalFenceFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalFenceFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkFenceImportFlagBits(VkFenceImportFlagBits input_value) { + switch (input_value) { + case VK_FENCE_IMPORT_TEMPORARY_BIT: + return "VK_FENCE_IMPORT_TEMPORARY_BIT"; + default: + return "Unhandled VkFenceImportFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkFenceImportFlags(VkFenceImportFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFenceImportFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFenceImportFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSemaphoreImportFlagBits(VkSemaphoreImportFlagBits input_value) { + switch (input_value) { + case VK_SEMAPHORE_IMPORT_TEMPORARY_BIT: + return "VK_SEMAPHORE_IMPORT_TEMPORARY_BIT"; + default: + return "Unhandled VkSemaphoreImportFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSemaphoreImportFlags(VkSemaphoreImportFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSemaphoreImportFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSemaphoreImportFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalSemaphoreHandleTypeFlagBits(VkExternalSemaphoreHandleTypeFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT"; + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT"; + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"; + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT"; + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT"; + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA"; + default: + return "Unhandled VkExternalSemaphoreHandleTypeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalSemaphoreHandleTypeFlags(VkExternalSemaphoreHandleTypeFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalSemaphoreHandleTypeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalSemaphoreHandleTypeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalSemaphoreFeatureFlagBits(VkExternalSemaphoreFeatureFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT: + return "VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT"; + case VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT: + return "VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT"; + default: + return "Unhandled VkExternalSemaphoreFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalSemaphoreFeatureFlags(VkExternalSemaphoreFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalSemaphoreFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalSemaphoreFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkResolveModeFlagBits(VkResolveModeFlagBits input_value) { + switch (input_value) { + case VK_RESOLVE_MODE_NONE: + return "VK_RESOLVE_MODE_NONE"; + case VK_RESOLVE_MODE_SAMPLE_ZERO_BIT: + return "VK_RESOLVE_MODE_SAMPLE_ZERO_BIT"; + case VK_RESOLVE_MODE_AVERAGE_BIT: + return "VK_RESOLVE_MODE_AVERAGE_BIT"; + case VK_RESOLVE_MODE_MIN_BIT: + return "VK_RESOLVE_MODE_MIN_BIT"; + case VK_RESOLVE_MODE_MAX_BIT: + return "VK_RESOLVE_MODE_MAX_BIT"; + case VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID: + return "VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID"; + default: + return "Unhandled VkResolveModeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkResolveModeFlags(VkResolveModeFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkResolveModeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkResolveModeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDescriptorBindingFlagBits(VkDescriptorBindingFlagBits input_value) { + switch (input_value) { + case VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT: + return "VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT"; + case VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT: + return "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT"; + case VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT: + return "VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT"; + case VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT: + return "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT"; + default: + return "Unhandled VkDescriptorBindingFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDescriptorBindingFlags(VkDescriptorBindingFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDescriptorBindingFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDescriptorBindingFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSemaphoreWaitFlagBits(VkSemaphoreWaitFlagBits input_value) { + switch (input_value) { + case VK_SEMAPHORE_WAIT_ANY_BIT: + return "VK_SEMAPHORE_WAIT_ANY_BIT"; + default: + return "Unhandled VkSemaphoreWaitFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSemaphoreWaitFlags(VkSemaphoreWaitFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSemaphoreWaitFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSemaphoreWaitFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineCreationFeedbackFlagBits(VkPipelineCreationFeedbackFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT: + return "VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT"; + case VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT: + return "VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT"; + case VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT: + return "VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT"; + default: + return "Unhandled VkPipelineCreationFeedbackFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineCreationFeedbackFlags(VkPipelineCreationFeedbackFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineCreationFeedbackFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineCreationFeedbackFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkToolPurposeFlagBits(VkToolPurposeFlagBits input_value) { + switch (input_value) { + case VK_TOOL_PURPOSE_VALIDATION_BIT: + return "VK_TOOL_PURPOSE_VALIDATION_BIT"; + case VK_TOOL_PURPOSE_PROFILING_BIT: + return "VK_TOOL_PURPOSE_PROFILING_BIT"; + case VK_TOOL_PURPOSE_TRACING_BIT: + return "VK_TOOL_PURPOSE_TRACING_BIT"; + case VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT: + return "VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT"; + case VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT: + return "VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT"; + case VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT: + return "VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT"; + case VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT: + return "VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT"; + default: + return "Unhandled VkToolPurposeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkToolPurposeFlags(VkToolPurposeFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkToolPurposeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkToolPurposeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineStageFlagBits2(uint64_t input_value) { + if (input_value == VK_PIPELINE_STAGE_2_NONE) return "VK_PIPELINE_STAGE_2_NONE"; + if (input_value == VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT) return "VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT) return "VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT) return "VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT) return "VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT) return "VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT) return "VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT) return "VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT) return "VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT) return "VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT) return "VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT) return "VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT) return "VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT) return "VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT) return "VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_HOST_BIT) return "VK_PIPELINE_STAGE_2_HOST_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT) return "VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT) return "VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_COPY_BIT) return "VK_PIPELINE_STAGE_2_COPY_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_RESOLVE_BIT) return "VK_PIPELINE_STAGE_2_RESOLVE_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_BLIT_BIT) return "VK_PIPELINE_STAGE_2_BLIT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_CLEAR_BIT) return "VK_PIPELINE_STAGE_2_CLEAR_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT) return "VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT) return "VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT) return "VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR) return "VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR) return "VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT) return "VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT) return "VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT) return "VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR) return "VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR) return "VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR) return "VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT) return "VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT) return "VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT) return "VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI) return "VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI"; + if (input_value == VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI) return "VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI"; + if (input_value == VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR) return "VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT) return "VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI) return "VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI"; + if (input_value == VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV) return "VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV"; + if (input_value == VK_PIPELINE_STAGE_2_CONVERT_COOPERATIVE_VECTOR_MATRIX_BIT_NV) return "VK_PIPELINE_STAGE_2_CONVERT_COOPERATIVE_VECTOR_MATRIX_BIT_NV"; + return "Unhandled VkPipelineStageFlagBits2"; +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineStageFlags2(VkPipelineStageFlags2 input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineStageFlagBits2(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineStageFlags2(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkAccessFlagBits2(uint64_t input_value) { + if (input_value == VK_ACCESS_2_NONE) return "VK_ACCESS_2_NONE"; + if (input_value == VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT) return "VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT"; + if (input_value == VK_ACCESS_2_INDEX_READ_BIT) return "VK_ACCESS_2_INDEX_READ_BIT"; + if (input_value == VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT) return "VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT"; + if (input_value == VK_ACCESS_2_UNIFORM_READ_BIT) return "VK_ACCESS_2_UNIFORM_READ_BIT"; + if (input_value == VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT) return "VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT"; + if (input_value == VK_ACCESS_2_SHADER_READ_BIT) return "VK_ACCESS_2_SHADER_READ_BIT"; + if (input_value == VK_ACCESS_2_SHADER_WRITE_BIT) return "VK_ACCESS_2_SHADER_WRITE_BIT"; + if (input_value == VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT) return "VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT"; + if (input_value == VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT) return "VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT"; + if (input_value == VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT) return "VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT"; + if (input_value == VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT) return "VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"; + if (input_value == VK_ACCESS_2_TRANSFER_READ_BIT) return "VK_ACCESS_2_TRANSFER_READ_BIT"; + if (input_value == VK_ACCESS_2_TRANSFER_WRITE_BIT) return "VK_ACCESS_2_TRANSFER_WRITE_BIT"; + if (input_value == VK_ACCESS_2_HOST_READ_BIT) return "VK_ACCESS_2_HOST_READ_BIT"; + if (input_value == VK_ACCESS_2_HOST_WRITE_BIT) return "VK_ACCESS_2_HOST_WRITE_BIT"; + if (input_value == VK_ACCESS_2_MEMORY_READ_BIT) return "VK_ACCESS_2_MEMORY_READ_BIT"; + if (input_value == VK_ACCESS_2_MEMORY_WRITE_BIT) return "VK_ACCESS_2_MEMORY_WRITE_BIT"; + if (input_value == VK_ACCESS_2_SHADER_SAMPLED_READ_BIT) return "VK_ACCESS_2_SHADER_SAMPLED_READ_BIT"; + if (input_value == VK_ACCESS_2_SHADER_STORAGE_READ_BIT) return "VK_ACCESS_2_SHADER_STORAGE_READ_BIT"; + if (input_value == VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT) return "VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT"; + if (input_value == VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR) return "VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR"; + if (input_value == VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR) return "VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR"; + if (input_value == VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR) return "VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR"; + if (input_value == VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR) return "VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR"; + if (input_value == VK_ACCESS_2_SHADER_TILE_ATTACHMENT_READ_BIT_QCOM) return "VK_ACCESS_2_SHADER_TILE_ATTACHMENT_READ_BIT_QCOM"; + if (input_value == VK_ACCESS_2_SHADER_TILE_ATTACHMENT_WRITE_BIT_QCOM) return "VK_ACCESS_2_SHADER_TILE_ATTACHMENT_WRITE_BIT_QCOM"; + if (input_value == VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT) return "VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT"; + if (input_value == VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT) return "VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT) return "VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT"; + if (input_value == VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT) return "VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT) return "VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT) return "VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT"; + if (input_value == VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR) return "VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR"; + if (input_value == VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR) return "VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR"; + if (input_value == VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR) return "VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR"; + if (input_value == VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT) return "VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT) return "VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT"; + if (input_value == VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT) return "VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI) return "VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI"; + if (input_value == VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR) return "VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR"; + if (input_value == VK_ACCESS_2_MICROMAP_READ_BIT_EXT) return "VK_ACCESS_2_MICROMAP_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT) return "VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT"; + if (input_value == VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV) return "VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV"; + if (input_value == VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV) return "VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV"; + return "Unhandled VkAccessFlagBits2"; +} + +#ifdef __cplusplus +static inline std::string string_VkAccessFlags2(VkAccessFlags2 input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkAccessFlagBits2(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkAccessFlags2(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSubmitFlagBits(VkSubmitFlagBits input_value) { + switch (input_value) { + case VK_SUBMIT_PROTECTED_BIT: + return "VK_SUBMIT_PROTECTED_BIT"; + default: + return "Unhandled VkSubmitFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSubmitFlags(VkSubmitFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSubmitFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSubmitFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkRenderingFlagBits(VkRenderingFlagBits input_value) { + switch (input_value) { + case VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT: + return "VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT"; + case VK_RENDERING_SUSPENDING_BIT: + return "VK_RENDERING_SUSPENDING_BIT"; + case VK_RENDERING_RESUMING_BIT: + return "VK_RENDERING_RESUMING_BIT"; + case VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT: + return "VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT"; + case VK_RENDERING_CONTENTS_INLINE_BIT_KHR: + return "VK_RENDERING_CONTENTS_INLINE_BIT_KHR"; + default: + return "Unhandled VkRenderingFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkRenderingFlags(VkRenderingFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkRenderingFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkRenderingFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkFormatFeatureFlagBits2(uint64_t input_value) { + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT) return "VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT) return "VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT) return "VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT) return "VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT) return "VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_BLIT_SRC_BIT) return "VK_FORMAT_FEATURE_2_BLIT_SRC_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_BLIT_DST_BIT) return "VK_FORMAT_FEATURE_2_BLIT_DST_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT) return "VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT) return "VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT) return "VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_DISJOINT_BIT) return "VK_FORMAT_FEATURE_2_DISJOINT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT) return "VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT) return "VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR) return "VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR) return "VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR) return "VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT) return "VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT"; + if (input_value == VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR) return "VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR) return "VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR) return "VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_RADIUS_BUFFER_BIT_NV) return "VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_RADIUS_BUFFER_BIT_NV"; + if (input_value == VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV) return "VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV"; + if (input_value == VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM) return "VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM"; + if (input_value == VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM) return "VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM"; + if (input_value == VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM) return "VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM"; + if (input_value == VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM) return "VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM"; + if (input_value == VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV) return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV"; + if (input_value == VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV) return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV"; + if (input_value == VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV) return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV"; + if (input_value == VK_FORMAT_FEATURE_2_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR) return "VK_FORMAT_FEATURE_2_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR) return "VK_FORMAT_FEATURE_2_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR"; + return "Unhandled VkFormatFeatureFlagBits2"; +} + +#ifdef __cplusplus +static inline std::string string_VkFormatFeatureFlags2(VkFormatFeatureFlags2 input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFormatFeatureFlagBits2(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFormatFeatureFlags2(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryUnmapFlagBits(VkMemoryUnmapFlagBits input_value) { + switch (input_value) { + case VK_MEMORY_UNMAP_RESERVE_BIT_EXT: + return "VK_MEMORY_UNMAP_RESERVE_BIT_EXT"; + default: + return "Unhandled VkMemoryUnmapFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryUnmapFlags(VkMemoryUnmapFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryUnmapFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryUnmapFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineCreateFlagBits2(uint64_t input_value) { + if (input_value == VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT) return "VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT"; + if (input_value == VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT) return "VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT"; + if (input_value == VK_PIPELINE_CREATE_2_DERIVATIVE_BIT) return "VK_PIPELINE_CREATE_2_DERIVATIVE_BIT"; + if (input_value == VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT) return "VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT"; + if (input_value == VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT) return "VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT"; + if (input_value == VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT) return "VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT"; + if (input_value == VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT) return "VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT"; + if (input_value == VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT) return "VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT"; + if (input_value == VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT) return "VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (input_value == VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX) return "VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX"; +#endif // VK_ENABLE_BETA_EXTENSIONS + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_SPHERES_AND_LINEAR_SWEPT_SPHERES_BIT_NV) return "VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_SPHERES_AND_LINEAR_SWEPT_SPHERES_BIT_NV"; + if (input_value == VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT) return "VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV) return "VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV"; + if (input_value == VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR) return "VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR) return "VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT) return "VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT) return "VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR) return "VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV) return "VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV) return "VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV"; + if (input_value == VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR) return "VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT) return "VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT) return "VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT) return "VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT) return "VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV) return "VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV"; + if (input_value == VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT) return "VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_DISALLOW_OPACITY_MICROMAP_BIT_ARM) return "VK_PIPELINE_CREATE_2_DISALLOW_OPACITY_MICROMAP_BIT_ARM"; + if (input_value == VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR) return "VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT) return "VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT"; + return "Unhandled VkPipelineCreateFlagBits2"; +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineCreateFlags2(VkPipelineCreateFlags2 input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineCreateFlagBits2(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineCreateFlags2(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkBufferUsageFlagBits2(uint64_t input_value) { + if (input_value == VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT) return "VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT"; + if (input_value == VK_BUFFER_USAGE_2_TRANSFER_DST_BIT) return "VK_BUFFER_USAGE_2_TRANSFER_DST_BIT"; + if (input_value == VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT) return "VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT"; + if (input_value == VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT) return "VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT"; + if (input_value == VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT) return "VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT"; + if (input_value == VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT) return "VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT"; + if (input_value == VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT) return "VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT"; + if (input_value == VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT) return "VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT"; + if (input_value == VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT) return "VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT"; + if (input_value == VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT) return "VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (input_value == VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX) return "VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX"; +#endif // VK_ENABLE_BETA_EXTENSIONS + if (input_value == VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT) return "VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR) return "VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR) return "VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR) return "VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR) return "VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR) return "VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR) return "VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR) return "VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT) return "VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT) return "VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_TILE_MEMORY_QCOM) return "VK_BUFFER_USAGE_2_TILE_MEMORY_QCOM"; + if (input_value == VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT"; + return "Unhandled VkBufferUsageFlagBits2"; +} + +#ifdef __cplusplus +static inline std::string string_VkBufferUsageFlags2(VkBufferUsageFlags2 input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBufferUsageFlagBits2(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkBufferUsageFlags2(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkHostImageCopyFlagBits(VkHostImageCopyFlagBits input_value) { + switch (input_value) { + case VK_HOST_IMAGE_COPY_MEMCPY: + return "VK_HOST_IMAGE_COPY_MEMCPY"; + default: + return "Unhandled VkHostImageCopyFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkHostImageCopyFlags(VkHostImageCopyFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkHostImageCopyFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkHostImageCopyFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSurfaceTransformFlagBitsKHR(VkSurfaceTransformFlagBitsKHR input_value) { + switch (input_value) { + case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR: + return "VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR"; + case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR: + return "VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR"; + case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR: + return "VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR"; + case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR: + return "VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR"; + case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR: + return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR"; + case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR: + return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR"; + case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR: + return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR"; + case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR: + return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR"; + case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR: + return "VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR"; + default: + return "Unhandled VkSurfaceTransformFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSurfaceTransformFlagsKHR(VkSurfaceTransformFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSurfaceTransformFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSurfaceTransformFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCompositeAlphaFlagBitsKHR(VkCompositeAlphaFlagBitsKHR input_value) { + switch (input_value) { + case VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR: + return "VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR"; + case VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR: + return "VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR"; + case VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR: + return "VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR"; + case VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR: + return "VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR"; + default: + return "Unhandled VkCompositeAlphaFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCompositeAlphaFlagsKHR(VkCompositeAlphaFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCompositeAlphaFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSwapchainCreateFlagBitsKHR(VkSwapchainCreateFlagBitsKHR input_value) { + switch (input_value) { + case VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR: + return "VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR"; + case VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR: + return "VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR"; + case VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR: + return "VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR"; + case VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT: + return "VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT"; + default: + return "Unhandled VkSwapchainCreateFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSwapchainCreateFlagsKHR(VkSwapchainCreateFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSwapchainCreateFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSwapchainCreateFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDeviceGroupPresentModeFlagBitsKHR(VkDeviceGroupPresentModeFlagBitsKHR input_value) { + switch (input_value) { + case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR: + return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR"; + case VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR: + return "VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR"; + case VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR: + return "VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR"; + case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR: + return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR"; + default: + return "Unhandled VkDeviceGroupPresentModeFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDeviceGroupPresentModeFlagsKHR(VkDeviceGroupPresentModeFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDeviceGroupPresentModeFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDeviceGroupPresentModeFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDisplayPlaneAlphaFlagBitsKHR(VkDisplayPlaneAlphaFlagBitsKHR input_value) { + switch (input_value) { + case VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR: + return "VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR"; + case VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR: + return "VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR"; + case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR: + return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR"; + case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR: + return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR"; + default: + return "Unhandled VkDisplayPlaneAlphaFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDisplayPlaneAlphaFlagsKHR(VkDisplayPlaneAlphaFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDisplayPlaneAlphaFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDisplayPlaneAlphaFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoCodecOperationFlagBitsKHR(VkVideoCodecOperationFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_CODEC_OPERATION_NONE_KHR: + return "VK_VIDEO_CODEC_OPERATION_NONE_KHR"; + case VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR"; + case VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR"; + case VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR"; + case VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR"; + case VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR"; + case VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR"; + default: + return "Unhandled VkVideoCodecOperationFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoCodecOperationFlagsKHR(VkVideoCodecOperationFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoCodecOperationFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoCodecOperationFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoChromaSubsamplingFlagBitsKHR(VkVideoChromaSubsamplingFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR: + return "VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR"; + case VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR: + return "VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR"; + case VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR: + return "VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR"; + case VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR: + return "VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR"; + case VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR: + return "VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR"; + default: + return "Unhandled VkVideoChromaSubsamplingFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoChromaSubsamplingFlagsKHR(VkVideoChromaSubsamplingFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoChromaSubsamplingFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoChromaSubsamplingFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoComponentBitDepthFlagBitsKHR(VkVideoComponentBitDepthFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR: + return "VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR"; + case VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR: + return "VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR"; + case VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR: + return "VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR"; + case VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR: + return "VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR"; + default: + return "Unhandled VkVideoComponentBitDepthFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoComponentBitDepthFlagsKHR(VkVideoComponentBitDepthFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoComponentBitDepthFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoComponentBitDepthFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoCapabilityFlagBitsKHR(VkVideoCapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR: + return "VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR"; + case VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR: + return "VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR"; + default: + return "Unhandled VkVideoCapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoCapabilityFlagsKHR(VkVideoCapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoCapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoCapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoSessionCreateFlagBitsKHR(VkVideoSessionCreateFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR: + return "VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR"; + case VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR: + return "VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR"; + case VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR: + return "VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR"; + case VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR: + return "VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR"; + case VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_EMPHASIS_MAP_BIT_KHR: + return "VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_EMPHASIS_MAP_BIT_KHR"; + case VK_VIDEO_SESSION_CREATE_INLINE_SESSION_PARAMETERS_BIT_KHR: + return "VK_VIDEO_SESSION_CREATE_INLINE_SESSION_PARAMETERS_BIT_KHR"; + default: + return "Unhandled VkVideoSessionCreateFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoSessionCreateFlagsKHR(VkVideoSessionCreateFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoSessionCreateFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoSessionCreateFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoSessionParametersCreateFlagBitsKHR(VkVideoSessionParametersCreateFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_SESSION_PARAMETERS_CREATE_QUANTIZATION_MAP_COMPATIBLE_BIT_KHR: + return "VK_VIDEO_SESSION_PARAMETERS_CREATE_QUANTIZATION_MAP_COMPATIBLE_BIT_KHR"; + default: + return "Unhandled VkVideoSessionParametersCreateFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoSessionParametersCreateFlagsKHR(VkVideoSessionParametersCreateFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoSessionParametersCreateFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoSessionParametersCreateFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoCodingControlFlagBitsKHR(VkVideoCodingControlFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR: + return "VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR"; + case VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR: + return "VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR"; + case VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR: + return "VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR"; + default: + return "Unhandled VkVideoCodingControlFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoCodingControlFlagsKHR(VkVideoCodingControlFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoCodingControlFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoCodingControlFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoDecodeCapabilityFlagBitsKHR(VkVideoDecodeCapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR: + return "VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR"; + case VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR: + return "VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR"; + default: + return "Unhandled VkVideoDecodeCapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoDecodeCapabilityFlagsKHR(VkVideoDecodeCapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoDecodeCapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoDecodeCapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoDecodeUsageFlagBitsKHR(VkVideoDecodeUsageFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_DECODE_USAGE_DEFAULT_KHR: + return "VK_VIDEO_DECODE_USAGE_DEFAULT_KHR"; + case VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR: + return "VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR"; + case VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR: + return "VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR"; + case VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR: + return "VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR"; + default: + return "Unhandled VkVideoDecodeUsageFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoDecodeUsageFlagsKHR(VkVideoDecodeUsageFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoDecodeUsageFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoDecodeUsageFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH264CapabilityFlagBitsKHR(VkVideoEncodeH264CapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_MB_QP_DIFF_WRAPAROUND_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_MB_QP_DIFF_WRAPAROUND_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH264CapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH264CapabilityFlagsKHR(VkVideoEncodeH264CapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH264CapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH264CapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH264StdFlagBitsKHR(VkVideoEncodeH264StdFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH264StdFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH264StdFlagsKHR(VkVideoEncodeH264StdFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH264StdFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH264StdFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH264RateControlFlagBitsKHR(VkVideoEncodeH264RateControlFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH264RateControlFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH264RateControlFlagsKHR(VkVideoEncodeH264RateControlFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH264RateControlFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH264RateControlFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH265CapabilityFlagBitsKHR(VkVideoEncodeH265CapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_CU_QP_DIFF_WRAPAROUND_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_CU_QP_DIFF_WRAPAROUND_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH265CapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH265CapabilityFlagsKHR(VkVideoEncodeH265CapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH265CapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH265CapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH265StdFlagBitsKHR(VkVideoEncodeH265StdFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH265StdFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH265StdFlagsKHR(VkVideoEncodeH265StdFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH265StdFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH265StdFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH265CtbSizeFlagBitsKHR(VkVideoEncodeH265CtbSizeFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH265CtbSizeFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH265CtbSizeFlagsKHR(VkVideoEncodeH265CtbSizeFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH265CtbSizeFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH265CtbSizeFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH265TransformBlockSizeFlagBitsKHR(VkVideoEncodeH265TransformBlockSizeFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH265TransformBlockSizeFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH265TransformBlockSizeFlagsKHR(VkVideoEncodeH265TransformBlockSizeFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH265TransformBlockSizeFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH265TransformBlockSizeFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH265RateControlFlagBitsKHR(VkVideoEncodeH265RateControlFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH265RateControlFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH265RateControlFlagsKHR(VkVideoEncodeH265RateControlFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH265RateControlFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH265RateControlFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoDecodeH264PictureLayoutFlagBitsKHR(VkVideoDecodeH264PictureLayoutFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR: + return "VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR"; + case VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR: + return "VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR"; + case VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR: + return "VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR"; + default: + return "Unhandled VkVideoDecodeH264PictureLayoutFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoDecodeH264PictureLayoutFlagsKHR(VkVideoDecodeH264PictureLayoutFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoDecodeH264PictureLayoutFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoDecodeH264PictureLayoutFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPerformanceCounterDescriptionFlagBitsKHR(VkPerformanceCounterDescriptionFlagBitsKHR input_value) { + switch (input_value) { + case VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR: + return "VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR"; + case VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR: + return "VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR"; + default: + return "Unhandled VkPerformanceCounterDescriptionFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPerformanceCounterDescriptionFlagsKHR(VkPerformanceCounterDescriptionFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPerformanceCounterDescriptionFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPerformanceCounterDescriptionFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeFlagBitsKHR(VkVideoEncodeFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_WITH_QUANTIZATION_DELTA_MAP_BIT_KHR: + return "VK_VIDEO_ENCODE_WITH_QUANTIZATION_DELTA_MAP_BIT_KHR"; + case VK_VIDEO_ENCODE_WITH_EMPHASIS_MAP_BIT_KHR: + return "VK_VIDEO_ENCODE_WITH_EMPHASIS_MAP_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeFlagsKHR(VkVideoEncodeFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeCapabilityFlagBitsKHR(VkVideoEncodeCapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR: + return "VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR"; + case VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR: + return "VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR"; + case VK_VIDEO_ENCODE_CAPABILITY_QUANTIZATION_DELTA_MAP_BIT_KHR: + return "VK_VIDEO_ENCODE_CAPABILITY_QUANTIZATION_DELTA_MAP_BIT_KHR"; + case VK_VIDEO_ENCODE_CAPABILITY_EMPHASIS_MAP_BIT_KHR: + return "VK_VIDEO_ENCODE_CAPABILITY_EMPHASIS_MAP_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeCapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeCapabilityFlagsKHR(VkVideoEncodeCapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeCapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeCapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeRateControlModeFlagBitsKHR(VkVideoEncodeRateControlModeFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR: + return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR"; + case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR: + return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR"; + case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR: + return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR"; + case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR: + return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeRateControlModeFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeRateControlModeFlagsKHR(VkVideoEncodeRateControlModeFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeRateControlModeFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeRateControlModeFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeFeedbackFlagBitsKHR(VkVideoEncodeFeedbackFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR: + return "VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR"; + case VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR: + return "VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR"; + case VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR: + return "VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeFeedbackFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeFeedbackFlagsKHR(VkVideoEncodeFeedbackFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeFeedbackFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeFeedbackFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeUsageFlagBitsKHR(VkVideoEncodeUsageFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR: + return "VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR"; + case VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR: + return "VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR"; + case VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR: + return "VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR"; + case VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR: + return "VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR"; + case VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR: + return "VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeUsageFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeUsageFlagsKHR(VkVideoEncodeUsageFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeUsageFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeUsageFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeContentFlagBitsKHR(VkVideoEncodeContentFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR: + return "VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR"; + case VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR: + return "VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR"; + case VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR: + return "VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR"; + case VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR: + return "VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeContentFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeContentFlagsKHR(VkVideoEncodeContentFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeContentFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeContentFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeAV1CapabilityFlagBitsKHR(VkVideoEncodeAV1CapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_AV1_CAPABILITY_PER_RATE_CONTROL_GROUP_MIN_MAX_Q_INDEX_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_CAPABILITY_PER_RATE_CONTROL_GROUP_MIN_MAX_Q_INDEX_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_CAPABILITY_GENERATE_OBU_EXTENSION_HEADER_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_CAPABILITY_GENERATE_OBU_EXTENSION_HEADER_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_CAPABILITY_PRIMARY_REFERENCE_CDF_ONLY_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_CAPABILITY_PRIMARY_REFERENCE_CDF_ONLY_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_CAPABILITY_FRAME_SIZE_OVERRIDE_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_CAPABILITY_FRAME_SIZE_OVERRIDE_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_CAPABILITY_MOTION_VECTOR_SCALING_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_CAPABILITY_MOTION_VECTOR_SCALING_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeAV1CapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeAV1CapabilityFlagsKHR(VkVideoEncodeAV1CapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeAV1CapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeAV1CapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeAV1StdFlagBitsKHR(VkVideoEncodeAV1StdFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_AV1_STD_UNIFORM_TILE_SPACING_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_STD_UNIFORM_TILE_SPACING_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_STD_SKIP_MODE_PRESENT_UNSET_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_STD_SKIP_MODE_PRESENT_UNSET_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_STD_PRIMARY_REF_FRAME_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_STD_PRIMARY_REF_FRAME_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_STD_DELTA_Q_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_STD_DELTA_Q_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeAV1StdFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeAV1StdFlagsKHR(VkVideoEncodeAV1StdFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeAV1StdFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeAV1StdFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeAV1SuperblockSizeFlagBitsKHR(VkVideoEncodeAV1SuperblockSizeFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_64_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_64_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_128_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_128_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeAV1SuperblockSizeFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeAV1SuperblockSizeFlagsKHR(VkVideoEncodeAV1SuperblockSizeFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeAV1SuperblockSizeFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeAV1SuperblockSizeFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeAV1RateControlFlagBitsKHR(VkVideoEncodeAV1RateControlFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REGULAR_GOP_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REGULAR_GOP_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR"; + case VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR: + return "VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeAV1RateControlFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeAV1RateControlFlagsKHR(VkVideoEncodeAV1RateControlFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeAV1RateControlFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeAV1RateControlFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkAccessFlagBits3KHR(uint64_t input_value) { + if (input_value == VK_ACCESS_3_NONE_KHR) return "VK_ACCESS_3_NONE_KHR"; + return "Unhandled VkAccessFlagBits3KHR"; +} + +#ifdef __cplusplus +static inline std::string string_VkAccessFlags3KHR(VkAccessFlags3KHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkAccessFlagBits3KHR(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkAccessFlags3KHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDebugReportFlagBitsEXT(VkDebugReportFlagBitsEXT input_value) { + switch (input_value) { + case VK_DEBUG_REPORT_INFORMATION_BIT_EXT: + return "VK_DEBUG_REPORT_INFORMATION_BIT_EXT"; + case VK_DEBUG_REPORT_WARNING_BIT_EXT: + return "VK_DEBUG_REPORT_WARNING_BIT_EXT"; + case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT: + return "VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT"; + case VK_DEBUG_REPORT_ERROR_BIT_EXT: + return "VK_DEBUG_REPORT_ERROR_BIT_EXT"; + case VK_DEBUG_REPORT_DEBUG_BIT_EXT: + return "VK_DEBUG_REPORT_DEBUG_BIT_EXT"; + default: + return "Unhandled VkDebugReportFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDebugReportFlagsEXT(VkDebugReportFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDebugReportFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDebugReportFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalMemoryHandleTypeFlagBitsNV(VkExternalMemoryHandleTypeFlagBitsNV input_value) { + switch (input_value) { + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV"; + default: + return "Unhandled VkExternalMemoryHandleTypeFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalMemoryHandleTypeFlagsNV(VkExternalMemoryHandleTypeFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalMemoryHandleTypeFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalMemoryHandleTypeFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalMemoryFeatureFlagBitsNV(VkExternalMemoryFeatureFlagBitsNV input_value) { + switch (input_value) { + case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV: + return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV"; + case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV: + return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV"; + case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV: + return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV"; + default: + return "Unhandled VkExternalMemoryFeatureFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalMemoryFeatureFlagsNV(VkExternalMemoryFeatureFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalMemoryFeatureFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalMemoryFeatureFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkConditionalRenderingFlagBitsEXT(VkConditionalRenderingFlagBitsEXT input_value) { + switch (input_value) { + case VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT: + return "VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT"; + default: + return "Unhandled VkConditionalRenderingFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkConditionalRenderingFlagsEXT(VkConditionalRenderingFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkConditionalRenderingFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkConditionalRenderingFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSurfaceCounterFlagBitsEXT(VkSurfaceCounterFlagBitsEXT input_value) { + switch (input_value) { + case VK_SURFACE_COUNTER_VBLANK_BIT_EXT: + return "VK_SURFACE_COUNTER_VBLANK_BIT_EXT"; + default: + return "Unhandled VkSurfaceCounterFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSurfaceCounterFlagsEXT(VkSurfaceCounterFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSurfaceCounterFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSurfaceCounterFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDebugUtilsMessageSeverityFlagBitsEXT(VkDebugUtilsMessageSeverityFlagBitsEXT input_value) { + switch (input_value) { + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT"; + default: + return "Unhandled VkDebugUtilsMessageSeverityFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDebugUtilsMessageSeverityFlagsEXT(VkDebugUtilsMessageSeverityFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDebugUtilsMessageSeverityFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDebugUtilsMessageSeverityFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDebugUtilsMessageTypeFlagBitsEXT(VkDebugUtilsMessageTypeFlagBitsEXT input_value) { + switch (input_value) { + case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT"; + default: + return "Unhandled VkDebugUtilsMessageTypeFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDebugUtilsMessageTypeFlagsEXT(VkDebugUtilsMessageTypeFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDebugUtilsMessageTypeFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDebugUtilsMessageTypeFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkGeometryFlagBitsKHR(VkGeometryFlagBitsKHR input_value) { + switch (input_value) { + case VK_GEOMETRY_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_OPAQUE_BIT_KHR"; + case VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR: + return "VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR"; + default: + return "Unhandled VkGeometryFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkGeometryFlagsKHR(VkGeometryFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkGeometryFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkGeometryFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkGeometryInstanceFlagBitsKHR(VkGeometryInstanceFlagBitsKHR input_value) { + switch (input_value) { + case VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT: + return "VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT"; + case VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT: + return "VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT"; + default: + return "Unhandled VkGeometryInstanceFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkGeometryInstanceFlagsKHR(VkGeometryInstanceFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkGeometryInstanceFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkGeometryInstanceFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkBuildAccelerationStructureFlagBitsKHR(VkBuildAccelerationStructureFlagBitsKHR input_value) { + switch (input_value) { + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV: + return "VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR"; + default: + return "Unhandled VkBuildAccelerationStructureFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkBuildAccelerationStructureFlagsKHR(VkBuildAccelerationStructureFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBuildAccelerationStructureFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkBuildAccelerationStructureFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPresentScalingFlagBitsEXT(VkPresentScalingFlagBitsEXT input_value) { + switch (input_value) { + case VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT: + return "VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT"; + case VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT: + return "VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT"; + case VK_PRESENT_SCALING_STRETCH_BIT_EXT: + return "VK_PRESENT_SCALING_STRETCH_BIT_EXT"; + default: + return "Unhandled VkPresentScalingFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPresentScalingFlagsEXT(VkPresentScalingFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPresentScalingFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPresentScalingFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPresentGravityFlagBitsEXT(VkPresentGravityFlagBitsEXT input_value) { + switch (input_value) { + case VK_PRESENT_GRAVITY_MIN_BIT_EXT: + return "VK_PRESENT_GRAVITY_MIN_BIT_EXT"; + case VK_PRESENT_GRAVITY_MAX_BIT_EXT: + return "VK_PRESENT_GRAVITY_MAX_BIT_EXT"; + case VK_PRESENT_GRAVITY_CENTERED_BIT_EXT: + return "VK_PRESENT_GRAVITY_CENTERED_BIT_EXT"; + default: + return "Unhandled VkPresentGravityFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPresentGravityFlagsEXT(VkPresentGravityFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPresentGravityFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPresentGravityFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkIndirectStateFlagBitsNV(VkIndirectStateFlagBitsNV input_value) { + switch (input_value) { + case VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV: + return "VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV"; + default: + return "Unhandled VkIndirectStateFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkIndirectStateFlagsNV(VkIndirectStateFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkIndirectStateFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkIndirectStateFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkIndirectCommandsLayoutUsageFlagBitsNV(VkIndirectCommandsLayoutUsageFlagBitsNV input_value) { + switch (input_value) { + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV"; + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV"; + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV"; + default: + return "Unhandled VkIndirectCommandsLayoutUsageFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkIndirectCommandsLayoutUsageFlagsNV(VkIndirectCommandsLayoutUsageFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkIndirectCommandsLayoutUsageFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDeviceDiagnosticsConfigFlagBitsNV(VkDeviceDiagnosticsConfigFlagBitsNV input_value) { + switch (input_value) { + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV"; + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV"; + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV"; + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV"; + default: + return "Unhandled VkDeviceDiagnosticsConfigFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDeviceDiagnosticsConfigFlagsNV(VkDeviceDiagnosticsConfigFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDeviceDiagnosticsConfigFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDeviceDiagnosticsConfigFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkTileShadingRenderPassFlagBitsQCOM(VkTileShadingRenderPassFlagBitsQCOM input_value) { + switch (input_value) { + case VK_TILE_SHADING_RENDER_PASS_ENABLE_BIT_QCOM: + return "VK_TILE_SHADING_RENDER_PASS_ENABLE_BIT_QCOM"; + case VK_TILE_SHADING_RENDER_PASS_PER_TILE_EXECUTION_BIT_QCOM: + return "VK_TILE_SHADING_RENDER_PASS_PER_TILE_EXECUTION_BIT_QCOM"; + default: + return "Unhandled VkTileShadingRenderPassFlagBitsQCOM"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkTileShadingRenderPassFlagsQCOM(VkTileShadingRenderPassFlagsQCOM input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkTileShadingRenderPassFlagBitsQCOM(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkTileShadingRenderPassFlagsQCOM(0)"); + return ret; +} +#endif // __cplusplus +#ifdef VK_USE_PLATFORM_METAL_EXT +static inline const char* string_VkExportMetalObjectTypeFlagBitsEXT(VkExportMetalObjectTypeFlagBitsEXT input_value) { + switch (input_value) { + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT"; + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT"; + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT"; + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT"; + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT"; + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT"; + default: + return "Unhandled VkExportMetalObjectTypeFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExportMetalObjectTypeFlagsEXT(VkExportMetalObjectTypeFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExportMetalObjectTypeFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExportMetalObjectTypeFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +#endif // VK_USE_PLATFORM_METAL_EXT +static inline const char* string_VkGraphicsPipelineLibraryFlagBitsEXT(VkGraphicsPipelineLibraryFlagBitsEXT input_value) { + switch (input_value) { + case VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT: + return "VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT"; + case VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT: + return "VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT"; + case VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT: + return "VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT"; + case VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT: + return "VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT"; + default: + return "Unhandled VkGraphicsPipelineLibraryFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkGraphicsPipelineLibraryFlagsEXT(VkGraphicsPipelineLibraryFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkGraphicsPipelineLibraryFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkGraphicsPipelineLibraryFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageCompressionFlagBitsEXT(VkImageCompressionFlagBitsEXT input_value) { + switch (input_value) { + case VK_IMAGE_COMPRESSION_DEFAULT_EXT: + return "VK_IMAGE_COMPRESSION_DEFAULT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT"; + case VK_IMAGE_COMPRESSION_DISABLED_EXT: + return "VK_IMAGE_COMPRESSION_DISABLED_EXT"; + default: + return "Unhandled VkImageCompressionFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageCompressionFlagsEXT(VkImageCompressionFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageCompressionFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageCompressionFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageCompressionFixedRateFlagBitsEXT(VkImageCompressionFixedRateFlagBitsEXT input_value) { + switch (input_value) { + case VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT"; + default: + return "Unhandled VkImageCompressionFixedRateFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageCompressionFixedRateFlagsEXT(VkImageCompressionFixedRateFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageCompressionFixedRateFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageCompressionFixedRateFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDeviceAddressBindingFlagBitsEXT(VkDeviceAddressBindingFlagBitsEXT input_value) { + switch (input_value) { + case VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT: + return "VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT"; + default: + return "Unhandled VkDeviceAddressBindingFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDeviceAddressBindingFlagsEXT(VkDeviceAddressBindingFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDeviceAddressBindingFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDeviceAddressBindingFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +#ifdef VK_USE_PLATFORM_FUCHSIA +static inline const char* string_VkImageConstraintsInfoFlagBitsFUCHSIA(VkImageConstraintsInfoFlagBitsFUCHSIA input_value) { + switch (input_value) { + case VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA: + return "VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA"; + case VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA: + return "VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA"; + case VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA: + return "VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA"; + case VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA: + return "VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA"; + case VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA: + return "VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA"; + default: + return "Unhandled VkImageConstraintsInfoFlagBitsFUCHSIA"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageConstraintsInfoFlagsFUCHSIA(VkImageConstraintsInfoFlagsFUCHSIA input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageConstraintsInfoFlagBitsFUCHSIA(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageConstraintsInfoFlagsFUCHSIA(0)"); + return ret; +} +#endif // __cplusplus +#endif // VK_USE_PLATFORM_FUCHSIA +static inline const char* string_VkFrameBoundaryFlagBitsEXT(VkFrameBoundaryFlagBitsEXT input_value) { + switch (input_value) { + case VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT: + return "VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT"; + default: + return "Unhandled VkFrameBoundaryFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkFrameBoundaryFlagsEXT(VkFrameBoundaryFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFrameBoundaryFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFrameBoundaryFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkBuildMicromapFlagBitsEXT(VkBuildMicromapFlagBitsEXT input_value) { + switch (input_value) { + case VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT: + return "VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT"; + case VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT: + return "VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT"; + case VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT: + return "VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT"; + default: + return "Unhandled VkBuildMicromapFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkBuildMicromapFlagsEXT(VkBuildMicromapFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBuildMicromapFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkBuildMicromapFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMicromapCreateFlagBitsEXT(VkMicromapCreateFlagBitsEXT input_value) { + switch (input_value) { + case VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT: + return "VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT"; + default: + return "Unhandled VkMicromapCreateFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMicromapCreateFlagsEXT(VkMicromapCreateFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMicromapCreateFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMicromapCreateFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPhysicalDeviceSchedulingControlsFlagBitsARM(uint64_t input_value) { + if (input_value == VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM) return "VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM"; + return "Unhandled VkPhysicalDeviceSchedulingControlsFlagBitsARM"; +} + +#ifdef __cplusplus +static inline std::string string_VkPhysicalDeviceSchedulingControlsFlagsARM(VkPhysicalDeviceSchedulingControlsFlagsARM input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPhysicalDeviceSchedulingControlsFlagBitsARM(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPhysicalDeviceSchedulingControlsFlagsARM(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryDecompressionMethodFlagBitsNV(uint64_t input_value) { + if (input_value == VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV) return "VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV"; + return "Unhandled VkMemoryDecompressionMethodFlagBitsNV"; +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryDecompressionMethodFlagsNV(VkMemoryDecompressionMethodFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryDecompressionMethodFlagBitsNV(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryDecompressionMethodFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkOpticalFlowGridSizeFlagBitsNV(VkOpticalFlowGridSizeFlagBitsNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV: + return "VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV"; + case VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV: + return "VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV"; + case VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV: + return "VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV"; + case VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV: + return "VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV"; + case VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV: + return "VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV"; + default: + return "Unhandled VkOpticalFlowGridSizeFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkOpticalFlowGridSizeFlagsNV(VkOpticalFlowGridSizeFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkOpticalFlowGridSizeFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkOpticalFlowGridSizeFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkOpticalFlowUsageFlagBitsNV(VkOpticalFlowUsageFlagBitsNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV: + return "VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV"; + case VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV: + return "VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV"; + case VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV: + return "VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV"; + case VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV: + return "VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV"; + case VK_OPTICAL_FLOW_USAGE_COST_BIT_NV: + return "VK_OPTICAL_FLOW_USAGE_COST_BIT_NV"; + case VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV: + return "VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV"; + default: + return "Unhandled VkOpticalFlowUsageFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkOpticalFlowUsageFlagsNV(VkOpticalFlowUsageFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkOpticalFlowUsageFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkOpticalFlowUsageFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkOpticalFlowSessionCreateFlagBitsNV(VkOpticalFlowSessionCreateFlagBitsNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV: + return "VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV"; + case VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV: + return "VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV"; + case VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV: + return "VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV"; + case VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV: + return "VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV"; + case VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV: + return "VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV"; + default: + return "Unhandled VkOpticalFlowSessionCreateFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkOpticalFlowSessionCreateFlagsNV(VkOpticalFlowSessionCreateFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkOpticalFlowSessionCreateFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkOpticalFlowSessionCreateFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkOpticalFlowExecuteFlagBitsNV(VkOpticalFlowExecuteFlagBitsNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV: + return "VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV"; + default: + return "Unhandled VkOpticalFlowExecuteFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkOpticalFlowExecuteFlagsNV(VkOpticalFlowExecuteFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkOpticalFlowExecuteFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkOpticalFlowExecuteFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkShaderCreateFlagBitsEXT(VkShaderCreateFlagBitsEXT input_value) { + switch (input_value) { + case VK_SHADER_CREATE_LINK_STAGE_BIT_EXT: + return "VK_SHADER_CREATE_LINK_STAGE_BIT_EXT"; + case VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT: + return "VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT"; + case VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT: + return "VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT"; + case VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT: + return "VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT"; + case VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT: + return "VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT"; + case VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT: + return "VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT"; + case VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT: + return "VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT"; + case VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT: + return "VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT"; + default: + return "Unhandled VkShaderCreateFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkShaderCreateFlagsEXT(VkShaderCreateFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkShaderCreateFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkShaderCreateFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkClusterAccelerationStructureAddressResolutionFlagBitsNV(VkClusterAccelerationStructureAddressResolutionFlagBitsNV input_value) { + switch (input_value) { + case VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_IMPLICIT_DATA_BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_IMPLICIT_DATA_BIT_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SCRATCH_DATA_BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SCRATCH_DATA_BIT_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_ADDRESS_ARRAY_BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_ADDRESS_ARRAY_BIT_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_SIZES_ARRAY_BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_SIZES_ARRAY_BIT_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_ARRAY_BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_ARRAY_BIT_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_COUNT_BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_COUNT_BIT_NV"; + default: + return "Unhandled VkClusterAccelerationStructureAddressResolutionFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkClusterAccelerationStructureAddressResolutionFlagsNV(VkClusterAccelerationStructureAddressResolutionFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkClusterAccelerationStructureAddressResolutionFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkClusterAccelerationStructureAddressResolutionFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkClusterAccelerationStructureClusterFlagBitsNV(VkClusterAccelerationStructureClusterFlagBitsNV input_value) { + switch (input_value) { + case VK_CLUSTER_ACCELERATION_STRUCTURE_CLUSTER_ALLOW_DISABLE_OPACITY_MICROMAPS_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_CLUSTER_ALLOW_DISABLE_OPACITY_MICROMAPS_NV"; + default: + return "Unhandled VkClusterAccelerationStructureClusterFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkClusterAccelerationStructureClusterFlagsNV(VkClusterAccelerationStructureClusterFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkClusterAccelerationStructureClusterFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkClusterAccelerationStructureClusterFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkClusterAccelerationStructureGeometryFlagBitsNV(VkClusterAccelerationStructureGeometryFlagBitsNV input_value) { + switch (input_value) { + case VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_CULL_DISABLE_BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_CULL_DISABLE_BIT_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_NO_DUPLICATE_ANYHIT_INVOCATION_BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_NO_DUPLICATE_ANYHIT_INVOCATION_BIT_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_OPAQUE_BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_OPAQUE_BIT_NV"; + default: + return "Unhandled VkClusterAccelerationStructureGeometryFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkClusterAccelerationStructureGeometryFlagsNV(VkClusterAccelerationStructureGeometryFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkClusterAccelerationStructureGeometryFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkClusterAccelerationStructureGeometryFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkClusterAccelerationStructureIndexFormatFlagBitsNV(VkClusterAccelerationStructureIndexFormatFlagBitsNV input_value) { + switch (input_value) { + case VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_8BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_8BIT_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_16BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_16BIT_NV"; + case VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_32BIT_NV: + return "VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_32BIT_NV"; + default: + return "Unhandled VkClusterAccelerationStructureIndexFormatFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkClusterAccelerationStructureIndexFormatFlagsNV(VkClusterAccelerationStructureIndexFormatFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkClusterAccelerationStructureIndexFormatFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkClusterAccelerationStructureIndexFormatFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPartitionedAccelerationStructureInstanceFlagBitsNV(VkPartitionedAccelerationStructureInstanceFlagBitsNV input_value) { + switch (input_value) { + case VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FACING_CULL_DISABLE_BIT_NV: + return "VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FACING_CULL_DISABLE_BIT_NV"; + case VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FLIP_FACING_BIT_NV: + return "VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FLIP_FACING_BIT_NV"; + case VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_OPAQUE_BIT_NV: + return "VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_OPAQUE_BIT_NV"; + case VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_NO_OPAQUE_BIT_NV: + return "VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_NO_OPAQUE_BIT_NV"; + case VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_ENABLE_EXPLICIT_BOUNDING_BOX_NV: + return "VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_ENABLE_EXPLICIT_BOUNDING_BOX_NV"; + default: + return "Unhandled VkPartitionedAccelerationStructureInstanceFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPartitionedAccelerationStructureInstanceFlagsNV(VkPartitionedAccelerationStructureInstanceFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPartitionedAccelerationStructureInstanceFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPartitionedAccelerationStructureInstanceFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkIndirectCommandsInputModeFlagBitsEXT(VkIndirectCommandsInputModeFlagBitsEXT input_value) { + switch (input_value) { + case VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT: + return "VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT"; + case VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT: + return "VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT"; + default: + return "Unhandled VkIndirectCommandsInputModeFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkIndirectCommandsInputModeFlagsEXT(VkIndirectCommandsInputModeFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkIndirectCommandsInputModeFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkIndirectCommandsInputModeFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkIndirectCommandsLayoutUsageFlagBitsEXT(VkIndirectCommandsLayoutUsageFlagBitsEXT input_value) { + switch (input_value) { + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT"; + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT"; + default: + return "Unhandled VkIndirectCommandsLayoutUsageFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkIndirectCommandsLayoutUsageFlagsEXT(VkIndirectCommandsLayoutUsageFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkIndirectCommandsLayoutUsageFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkAccelerationStructureCreateFlagBitsKHR(VkAccelerationStructureCreateFlagBitsKHR input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR: + return "VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR"; + case VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT: + return "VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT"; + case VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV: + return "VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV"; + default: + return "Unhandled VkAccelerationStructureCreateFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkAccelerationStructureCreateFlagsKHR(VkAccelerationStructureCreateFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkAccelerationStructureCreateFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkAccelerationStructureCreateFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +// clang-format on diff --git a/include/vulkan/vk_icd.h b/include/vulkan/vk_icd.h new file mode 100644 index 00000000..59204a34 --- /dev/null +++ b/include/vulkan/vk_icd.h @@ -0,0 +1,244 @@ +/* + * Copyright 2015-2023 The Khronos Group Inc. + * Copyright 2015-2023 Valve Corporation + * Copyright 2015-2023 LunarG, Inc. + * + * SPDX-License-Identifier: Apache-2.0 + */ +#pragma once + +#include "vulkan.h" +#include + +// Loader-ICD version negotiation API. Versions add the following features: +// Version 0 - Initial. Doesn't support vk_icdGetInstanceProcAddr +// or vk_icdNegotiateLoaderICDInterfaceVersion. +// Version 1 - Add support for vk_icdGetInstanceProcAddr. +// Version 2 - Add Loader/ICD Interface version negotiation +// via vk_icdNegotiateLoaderICDInterfaceVersion. +// Version 3 - Add ICD creation/destruction of KHR_surface objects. +// Version 4 - Add unknown physical device extension querying via +// vk_icdGetPhysicalDeviceProcAddr. +// Version 5 - Tells ICDs that the loader is now paying attention to the +// application version of Vulkan passed into the ApplicationInfo +// structure during vkCreateInstance. This will tell the ICD +// that if the loader is older, it should automatically fail a +// call for any API version > 1.0. Otherwise, the loader will +// manually determine if it can support the expected version. +// Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices. +// Version 7 - If an ICD supports any of the following functions, they must be +// queryable with vk_icdGetInstanceProcAddr: +// vk_icdNegotiateLoaderICDInterfaceVersion +// vk_icdGetPhysicalDeviceProcAddr +// vk_icdEnumerateAdapterPhysicalDevices (Windows only) +// In addition, these functions no longer need to be exported directly. +// This version allows drivers provided through the extension +// VK_LUNARG_direct_driver_loading be able to support the entire +// Driver-Loader interface. + +#define CURRENT_LOADER_ICD_INTERFACE_VERSION 7 +#define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0 +#define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4 + +// Old typedefs that don't follow a proper naming convention but are preserved for compatibility +typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); +// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this +// file directly, it won't be found. +#ifndef PFN_GetPhysicalDeviceProcAddr +typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName); +#endif + +// Typedefs for loader/ICD interface +typedef VkResult (VKAPI_PTR *PFN_vk_icdNegotiateLoaderICDInterfaceVersion)(uint32_t* pVersion); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) +typedef VkResult (VKAPI_PTR *PFN_vk_icdEnumerateAdapterPhysicalDevices)(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif + +// Prototypes for loader/ICD interface +#if !defined(VK_NO_PROTOTYPES) +#ifdef __cplusplus +extern "C" { +#endif + VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) + VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif +#ifdef __cplusplus +} +#endif +#endif + +/* + * The ICD must reserve space for a pointer for the loader's dispatch + * table, at the start of . + * The ICD must initialize this variable using the SET_LOADER_MAGIC_VALUE macro. + */ + +#define ICD_LOADER_MAGIC 0x01CDC0DE + +typedef union { + uintptr_t loaderMagic; + void *loaderData; +} VK_LOADER_DATA; + +static inline void set_loader_magic_value(void *pNewObject) { + VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + loader_info->loaderMagic = ICD_LOADER_MAGIC; +} + +static inline bool valid_loader_magic_value(void *pNewObject) { + const VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + return (loader_info->loaderMagic & 0xffffffff) == ICD_LOADER_MAGIC; +} + +/* + * Windows and Linux ICDs will treat VkSurfaceKHR as a pointer to a struct that + * contains the platform-specific connection and surface information. + */ +typedef enum { + VK_ICD_WSI_PLATFORM_MIR, + VK_ICD_WSI_PLATFORM_WAYLAND, + VK_ICD_WSI_PLATFORM_WIN32, + VK_ICD_WSI_PLATFORM_XCB, + VK_ICD_WSI_PLATFORM_XLIB, + VK_ICD_WSI_PLATFORM_ANDROID, + VK_ICD_WSI_PLATFORM_MACOS, + VK_ICD_WSI_PLATFORM_IOS, + VK_ICD_WSI_PLATFORM_DISPLAY, + VK_ICD_WSI_PLATFORM_HEADLESS, + VK_ICD_WSI_PLATFORM_METAL, + VK_ICD_WSI_PLATFORM_DIRECTFB, + VK_ICD_WSI_PLATFORM_VI, + VK_ICD_WSI_PLATFORM_GGP, + VK_ICD_WSI_PLATFORM_SCREEN, + VK_ICD_WSI_PLATFORM_FUCHSIA, +} VkIcdWsiPlatform; + +typedef struct { + VkIcdWsiPlatform platform; +} VkIcdSurfaceBase; + +#ifdef VK_USE_PLATFORM_MIR_KHR +typedef struct { + VkIcdSurfaceBase base; + MirConnection *connection; + MirSurface *mirSurface; +} VkIcdSurfaceMir; +#endif // VK_USE_PLATFORM_MIR_KHR + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +typedef struct { + VkIcdSurfaceBase base; + struct wl_display *display; + struct wl_surface *surface; +} VkIcdSurfaceWayland; +#endif // VK_USE_PLATFORM_WAYLAND_KHR + +#ifdef VK_USE_PLATFORM_WIN32_KHR +typedef struct { + VkIcdSurfaceBase base; + HINSTANCE hinstance; + HWND hwnd; +} VkIcdSurfaceWin32; +#endif // VK_USE_PLATFORM_WIN32_KHR + +#ifdef VK_USE_PLATFORM_XCB_KHR +typedef struct { + VkIcdSurfaceBase base; + xcb_connection_t *connection; + xcb_window_t window; +} VkIcdSurfaceXcb; +#endif // VK_USE_PLATFORM_XCB_KHR + +#ifdef VK_USE_PLATFORM_XLIB_KHR +typedef struct { + VkIcdSurfaceBase base; + Display *dpy; + Window window; +} VkIcdSurfaceXlib; +#endif // VK_USE_PLATFORM_XLIB_KHR + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +typedef struct { + VkIcdSurfaceBase base; + IDirectFB *dfb; + IDirectFBSurface *surface; +} VkIcdSurfaceDirectFB; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +typedef struct { + VkIcdSurfaceBase base; + struct ANativeWindow *window; +} VkIcdSurfaceAndroid; +#endif // VK_USE_PLATFORM_ANDROID_KHR + +#ifdef VK_USE_PLATFORM_MACOS_MVK +typedef struct { + VkIcdSurfaceBase base; + const void *pView; +} VkIcdSurfaceMacOS; +#endif // VK_USE_PLATFORM_MACOS_MVK + +#ifdef VK_USE_PLATFORM_IOS_MVK +typedef struct { + VkIcdSurfaceBase base; + const void *pView; +} VkIcdSurfaceIOS; +#endif // VK_USE_PLATFORM_IOS_MVK + +#ifdef VK_USE_PLATFORM_GGP +typedef struct { + VkIcdSurfaceBase base; + GgpStreamDescriptor streamDescriptor; +} VkIcdSurfaceGgp; +#endif // VK_USE_PLATFORM_GGP + +typedef struct { + VkIcdSurfaceBase base; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkIcdSurfaceDisplay; + +typedef struct { + VkIcdSurfaceBase base; +} VkIcdSurfaceHeadless; + +#ifdef VK_USE_PLATFORM_METAL_EXT +typedef struct { + VkIcdSurfaceBase base; + const CAMetalLayer *pLayer; +} VkIcdSurfaceMetal; +#endif // VK_USE_PLATFORM_METAL_EXT + +#ifdef VK_USE_PLATFORM_VI_NN +typedef struct { + VkIcdSurfaceBase base; + void *window; +} VkIcdSurfaceVi; +#endif // VK_USE_PLATFORM_VI_NN + +#ifdef VK_USE_PLATFORM_SCREEN_QNX +typedef struct { + VkIcdSurfaceBase base; + struct _screen_context *context; + struct _screen_window *window; +} VkIcdSurfaceScreen; +#endif // VK_USE_PLATFORM_SCREEN_QNX + +#ifdef VK_USE_PLATFORM_FUCHSIA +typedef struct { + VkIcdSurfaceBase base; +} VkIcdSurfaceImagePipe; +#endif // VK_USE_PLATFORM_FUCHSIA diff --git a/include/vulkan/vk_layer.h b/include/vulkan/vk_layer.h new file mode 100644 index 00000000..19d88fce --- /dev/null +++ b/include/vulkan/vk_layer.h @@ -0,0 +1,189 @@ +/* + * Copyright 2015-2023 The Khronos Group Inc. + * Copyright 2015-2023 Valve Corporation + * Copyright 2015-2023 LunarG, Inc. + * + * SPDX-License-Identifier: Apache-2.0 + */ +#pragma once + +/* Need to define dispatch table + * Core struct can then have ptr to dispatch table at the top + * Along with object ptrs for current and next OBJ + */ + +#include "vulkan_core.h" + +#define MAX_NUM_UNKNOWN_EXTS 250 + + // Loader-Layer version negotiation API. Versions add the following features: + // Versions 0/1 - Initial. Doesn't support vk_layerGetPhysicalDeviceProcAddr + // or vk_icdNegotiateLoaderLayerInterfaceVersion. + // Version 2 - Add support for vk_layerGetPhysicalDeviceProcAddr and + // vk_icdNegotiateLoaderLayerInterfaceVersion. +#define CURRENT_LOADER_LAYER_INTERFACE_VERSION 2 +#define MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION 1 + +#define VK_CURRENT_CHAIN_VERSION 1 + +// Typedef for use in the interfaces below +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); + +// Version negotiation values +typedef enum VkNegotiateLayerStructType { + LAYER_NEGOTIATE_UNINTIALIZED = 0, + LAYER_NEGOTIATE_INTERFACE_STRUCT = 1, +} VkNegotiateLayerStructType; + +// Version negotiation structures +typedef struct VkNegotiateLayerInterface { + VkNegotiateLayerStructType sType; + void *pNext; + uint32_t loaderLayerInterfaceVersion; + PFN_vkGetInstanceProcAddr pfnGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnGetDeviceProcAddr; + PFN_GetPhysicalDeviceProcAddr pfnGetPhysicalDeviceProcAddr; +} VkNegotiateLayerInterface; + +// Version negotiation functions +typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderLayerInterfaceVersion)(VkNegotiateLayerInterface *pVersionStruct); + +// Function prototype for unknown physical device extension command +typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device); + +// ------------------------------------------------------------------------------------------------ +// CreateInstance and CreateDevice support structures + +/* Sub type of structure for instance and device loader ext of CreateInfo. + * When sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO + * or sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO + * then VkLayerFunction indicates struct type pointed to by pNext + */ +typedef enum VkLayerFunction_ { + VK_LAYER_LINK_INFO = 0, + VK_LOADER_DATA_CALLBACK = 1, + VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2, + VK_LOADER_FEATURES = 3, +} VkLayerFunction; + +typedef struct VkLayerInstanceLink_ { + struct VkLayerInstanceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_GetPhysicalDeviceProcAddr pfnNextGetPhysicalDeviceProcAddr; +} VkLayerInstanceLink; + +/* + * When creating the device chain the loader needs to pass + * down information about it's device structure needed at + * the end of the chain. Passing the data via the + * VkLayerDeviceInfo avoids issues with finding the + * exact instance being used. + */ +typedef struct VkLayerDeviceInfo_ { + void *device_info; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; +} VkLayerDeviceInfo; + +typedef VkResult (VKAPI_PTR *PFN_vkSetInstanceLoaderData)(VkInstance instance, + void *object); +typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device, + void *object); +typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA); +typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction); + +typedef enum VkLoaderFeastureFlagBits { + VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING = 0x00000001, +} VkLoaderFlagBits; +typedef VkFlags VkLoaderFeatureFlags; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerInstanceLink *pLayerInfo; + PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData; + struct { + PFN_vkLayerCreateDevice pfnLayerCreateDevice; + PFN_vkLayerDestroyDevice pfnLayerDestroyDevice; + } layerDevice; + VkLoaderFeatureFlags loaderFeatures; + } u; +} VkLayerInstanceCreateInfo; + +typedef struct VkLayerDeviceLink_ { + struct VkLayerDeviceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnNextGetDeviceProcAddr; +} VkLayerDeviceLink; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerDeviceLink *pLayerInfo; + PFN_vkSetDeviceLoaderData pfnSetDeviceLoaderData; + } u; +} VkLayerDeviceCreateInfo; + +#ifdef __cplusplus +extern "C" { +#endif + +VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct); + +typedef enum VkChainType { + VK_CHAIN_TYPE_UNKNOWN = 0, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES = 1, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES = 2, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION = 3, +} VkChainType; + +typedef struct VkChainHeader { + VkChainType type; + uint32_t version; + uint32_t size; +} VkChainHeader; + +typedef struct VkEnumerateInstanceExtensionPropertiesChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceExtensionPropertiesChain *, const char *, uint32_t *, + VkExtensionProperties *); + const struct VkEnumerateInstanceExtensionPropertiesChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) const { + return pfnNextLayer(pNextLink, pLayerName, pPropertyCount, pProperties); + } +#endif +} VkEnumerateInstanceExtensionPropertiesChain; + +typedef struct VkEnumerateInstanceLayerPropertiesChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceLayerPropertiesChain *, uint32_t *, VkLayerProperties *); + const struct VkEnumerateInstanceLayerPropertiesChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(uint32_t *pPropertyCount, VkLayerProperties *pProperties) const { + return pfnNextLayer(pNextLink, pPropertyCount, pProperties); + } +#endif +} VkEnumerateInstanceLayerPropertiesChain; + +typedef struct VkEnumerateInstanceVersionChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceVersionChain *, uint32_t *); + const struct VkEnumerateInstanceVersionChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(uint32_t *pApiVersion) const { + return pfnNextLayer(pNextLink, pApiVersion); + } +#endif +} VkEnumerateInstanceVersionChain; + +#ifdef __cplusplus +} +#endif diff --git a/include/vulkan/vk_platform.h b/include/vulkan/vk_platform.h index 72f80493..18e5ca34 100644 --- a/include/vulkan/vk_platform.h +++ b/include/vulkan/vk_platform.h @@ -2,19 +2,9 @@ // File: vk_platform.h // /* -** Copyright (c) 2014-2017 The Khronos Group Inc. +** Copyright 2014-2025 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ @@ -52,7 +42,7 @@ extern "C" #define VKAPI_CALL __stdcall #define VKAPI_PTR VKAPI_CALL #elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7 - #error "Vulkan isn't supported for the 'armeabi' NDK ABI" + #error "Vulkan is not supported for the 'armeabi' NDK ABI" #elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE) // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat" // calling convention, i.e. float parameters are passed in registers. This @@ -68,7 +58,9 @@ extern "C" #define VKAPI_PTR #endif -#include +#if !defined(VK_NO_STDDEF_H) + #include +#endif // !defined(VK_NO_STDDEF_H) #if !defined(VK_NO_STDINT_H) #if defined(_MSC_VER) && (_MSC_VER < 1600) @@ -89,32 +81,4 @@ extern "C" } // extern "C" #endif // __cplusplus -// Platform-specific headers required by platform window system extensions. -// These are enabled prior to #including "vulkan.h". The same enable then -// controls inclusion of the extension interfaces in vulkan.h. - -#ifdef VK_USE_PLATFORM_ANDROID_KHR -#include -#endif - -#ifdef VK_USE_PLATFORM_MIR_KHR -#include -#endif - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR -#include -#endif - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#include -#endif - -#ifdef VK_USE_PLATFORM_XLIB_KHR -#include -#endif - -#ifdef VK_USE_PLATFORM_XCB_KHR -#include -#endif - #endif diff --git a/include/vulkan/vulkan.cppm b/include/vulkan/vulkan.cppm new file mode 100644 index 00000000..781a93d3 --- /dev/null +++ b/include/vulkan/vulkan.cppm @@ -0,0 +1,8630 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +// Note: This module is still in an experimental state. +// Any feedback is welcome on https://github.com/KhronosGroup/Vulkan-Hpp/issues. + +module; + +#include + +#if defined( __cpp_lib_modules ) && !defined( VULKAN_HPP_ENABLE_STD_MODULE ) +# define VULKAN_HPP_ENABLE_STD_MODULE +#endif + +#include +#include +#include +#include +#include +#include +#ifndef VULKAN_HPP_NO_TO_STRING +# include +#endif + +export module vulkan_hpp; + +export namespace VULKAN_HPP_NAMESPACE +{ + //===================================== + //=== HARDCODED TYPEs AND FUNCTIONs === + //===================================== + using VULKAN_HPP_NAMESPACE::ArrayWrapper1D; + using VULKAN_HPP_NAMESPACE::ArrayWrapper2D; + using VULKAN_HPP_NAMESPACE::Flags; + using VULKAN_HPP_NAMESPACE::FlagTraits; + + namespace detail + { + using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase; + using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderDynamic; +#if !defined( VK_NO_PROTOTYPES ) + using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderStatic; + using VULKAN_HPP_NAMESPACE::detail::getDispatchLoaderStatic; +#endif /*VK_NO_PROTOTYPES*/ + } // namespace detail + + using VULKAN_HPP_NAMESPACE::operator&; + using VULKAN_HPP_NAMESPACE::operator|; + using VULKAN_HPP_NAMESPACE::operator^; + using VULKAN_HPP_NAMESPACE::operator~; + using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE; + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + using VULKAN_HPP_NAMESPACE::ArrayProxy; + using VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries; + using VULKAN_HPP_NAMESPACE::Optional; + using VULKAN_HPP_NAMESPACE::StridedArrayProxy; + using VULKAN_HPP_NAMESPACE::StructureChain; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + namespace detail + { + using VULKAN_HPP_NAMESPACE::detail::ObjectDestroy; + using VULKAN_HPP_NAMESPACE::detail::ObjectDestroyShared; + using VULKAN_HPP_NAMESPACE::detail::ObjectFree; + using VULKAN_HPP_NAMESPACE::detail::ObjectFreeShared; + using VULKAN_HPP_NAMESPACE::detail::ObjectRelease; + using VULKAN_HPP_NAMESPACE::detail::ObjectReleaseShared; + using VULKAN_HPP_NAMESPACE::detail::PoolFree; + using VULKAN_HPP_NAMESPACE::detail::PoolFreeShared; + } // namespace detail + + using VULKAN_HPP_NAMESPACE::SharedHandle; + using VULKAN_HPP_NAMESPACE::UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + using VULKAN_HPP_NAMESPACE::exchange; + + //================== + //=== BASE TYPEs === + //================== + using VULKAN_HPP_NAMESPACE::Bool32; + using VULKAN_HPP_NAMESPACE::DeviceAddress; + using VULKAN_HPP_NAMESPACE::DeviceSize; + using VULKAN_HPP_NAMESPACE::RemoteAddressNV; + using VULKAN_HPP_NAMESPACE::SampleMask; + + //============= + //=== ENUMs === + //============= + using VULKAN_HPP_NAMESPACE::CppType; + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::AccessFlagBits; + using VULKAN_HPP_NAMESPACE::AccessFlags; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlagBits; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags; + using VULKAN_HPP_NAMESPACE::AttachmentLoadOp; + using VULKAN_HPP_NAMESPACE::AttachmentStoreOp; + using VULKAN_HPP_NAMESPACE::BlendFactor; + using VULKAN_HPP_NAMESPACE::BlendOp; + using VULKAN_HPP_NAMESPACE::BorderColor; + using VULKAN_HPP_NAMESPACE::BufferCreateFlagBits; + using VULKAN_HPP_NAMESPACE::BufferCreateFlags; + using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags; + using VULKAN_HPP_NAMESPACE::BufferViewCreateFlagBits; + using VULKAN_HPP_NAMESPACE::BufferViewCreateFlags; + using VULKAN_HPP_NAMESPACE::ColorComponentFlagBits; + using VULKAN_HPP_NAMESPACE::ColorComponentFlags; + using VULKAN_HPP_NAMESPACE::CommandBufferLevel; + using VULKAN_HPP_NAMESPACE::CommandBufferResetFlagBits; + using VULKAN_HPP_NAMESPACE::CommandBufferResetFlags; + using VULKAN_HPP_NAMESPACE::CommandBufferUsageFlagBits; + using VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags; + using VULKAN_HPP_NAMESPACE::CommandPoolCreateFlagBits; + using VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags; + using VULKAN_HPP_NAMESPACE::CommandPoolResetFlagBits; + using VULKAN_HPP_NAMESPACE::CommandPoolResetFlags; + using VULKAN_HPP_NAMESPACE::CompareOp; + using VULKAN_HPP_NAMESPACE::ComponentSwizzle; + using VULKAN_HPP_NAMESPACE::CullModeFlagBits; + using VULKAN_HPP_NAMESPACE::CullModeFlags; + using VULKAN_HPP_NAMESPACE::DependencyFlagBits; + using VULKAN_HPP_NAMESPACE::DependencyFlags; + using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags; + using VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags; + using VULKAN_HPP_NAMESPACE::DescriptorType; + using VULKAN_HPP_NAMESPACE::DeviceCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DeviceCreateFlags; + using VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags; + using VULKAN_HPP_NAMESPACE::DynamicState; + using VULKAN_HPP_NAMESPACE::EventCreateFlagBits; + using VULKAN_HPP_NAMESPACE::EventCreateFlags; + using VULKAN_HPP_NAMESPACE::FenceCreateFlagBits; + using VULKAN_HPP_NAMESPACE::FenceCreateFlags; + using VULKAN_HPP_NAMESPACE::Filter; + using VULKAN_HPP_NAMESPACE::Format; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlags; + using VULKAN_HPP_NAMESPACE::FramebufferCreateFlagBits; + using VULKAN_HPP_NAMESPACE::FramebufferCreateFlags; + using VULKAN_HPP_NAMESPACE::FrontFace; + using VULKAN_HPP_NAMESPACE::ImageAspectFlagBits; + using VULKAN_HPP_NAMESPACE::ImageAspectFlags; + using VULKAN_HPP_NAMESPACE::ImageCreateFlagBits; + using VULKAN_HPP_NAMESPACE::ImageCreateFlags; + using VULKAN_HPP_NAMESPACE::ImageLayout; + using VULKAN_HPP_NAMESPACE::ImageTiling; + using VULKAN_HPP_NAMESPACE::ImageType; + using VULKAN_HPP_NAMESPACE::ImageUsageFlagBits; + using VULKAN_HPP_NAMESPACE::ImageUsageFlags; + using VULKAN_HPP_NAMESPACE::ImageViewCreateFlagBits; + using VULKAN_HPP_NAMESPACE::ImageViewCreateFlags; + using VULKAN_HPP_NAMESPACE::ImageViewType; + using VULKAN_HPP_NAMESPACE::IndexType; + using VULKAN_HPP_NAMESPACE::InstanceCreateFlagBits; + using VULKAN_HPP_NAMESPACE::InstanceCreateFlags; + using VULKAN_HPP_NAMESPACE::InternalAllocationType; + using VULKAN_HPP_NAMESPACE::LogicOp; + using VULKAN_HPP_NAMESPACE::MemoryHeapFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryHeapFlags; + using VULKAN_HPP_NAMESPACE::MemoryMapFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryMapFlags; + using VULKAN_HPP_NAMESPACE::MemoryPropertyFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryPropertyFlags; + using VULKAN_HPP_NAMESPACE::ObjectType; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceType; + using VULKAN_HPP_NAMESPACE::PipelineBindPoint; + using VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineStageFlags; + using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PolygonMode; + using VULKAN_HPP_NAMESPACE::PrimitiveTopology; + using VULKAN_HPP_NAMESPACE::QueryControlFlagBits; + using VULKAN_HPP_NAMESPACE::QueryControlFlags; + using VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlagBits; + using VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlagBits; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags; + using VULKAN_HPP_NAMESPACE::QueryResultFlagBits; + using VULKAN_HPP_NAMESPACE::QueryResultFlags; + using VULKAN_HPP_NAMESPACE::QueryType; + using VULKAN_HPP_NAMESPACE::QueueFlagBits; + using VULKAN_HPP_NAMESPACE::QueueFlags; + using VULKAN_HPP_NAMESPACE::RenderPassCreateFlagBits; + using VULKAN_HPP_NAMESPACE::RenderPassCreateFlags; + using VULKAN_HPP_NAMESPACE::Result; + using VULKAN_HPP_NAMESPACE::SampleCountFlagBits; + using VULKAN_HPP_NAMESPACE::SampleCountFlags; + using VULKAN_HPP_NAMESPACE::SamplerAddressMode; + using VULKAN_HPP_NAMESPACE::SamplerCreateFlagBits; + using VULKAN_HPP_NAMESPACE::SamplerCreateFlags; + using VULKAN_HPP_NAMESPACE::SamplerMipmapMode; + using VULKAN_HPP_NAMESPACE::SemaphoreCreateFlagBits; + using VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags; + using VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlagBits; + using VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags; + using VULKAN_HPP_NAMESPACE::ShaderStageFlagBits; + using VULKAN_HPP_NAMESPACE::ShaderStageFlags; + using VULKAN_HPP_NAMESPACE::SharingMode; + using VULKAN_HPP_NAMESPACE::SparseImageFormatFlagBits; + using VULKAN_HPP_NAMESPACE::SparseImageFormatFlags; + using VULKAN_HPP_NAMESPACE::SparseMemoryBindFlagBits; + using VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags; + using VULKAN_HPP_NAMESPACE::StencilFaceFlagBits; + using VULKAN_HPP_NAMESPACE::StencilFaceFlags; + using VULKAN_HPP_NAMESPACE::StencilOp; + using VULKAN_HPP_NAMESPACE::StructureType; + using VULKAN_HPP_NAMESPACE::SubpassContents; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionFlagBits; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags; + using VULKAN_HPP_NAMESPACE::SystemAllocationScope; + using VULKAN_HPP_NAMESPACE::VendorId; + using VULKAN_HPP_NAMESPACE::VertexInputRate; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::ChromaLocation; + using VULKAN_HPP_NAMESPACE::ChromaLocationKHR; + using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlagBits; + using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags; + using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlagsKHR; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlagsKHR; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateTypeKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagsKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagsKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagsKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagsKHR; + using VULKAN_HPP_NAMESPACE::FenceImportFlagBits; + using VULKAN_HPP_NAMESPACE::FenceImportFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::FenceImportFlags; + using VULKAN_HPP_NAMESPACE::FenceImportFlagsKHR; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlags; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsKHR; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagsKHR; + using VULKAN_HPP_NAMESPACE::PointClippingBehavior; + using VULKAN_HPP_NAMESPACE::PointClippingBehaviorKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversionKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrRange; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrRangeKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagBits; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlags; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagsKHR; + using VULKAN_HPP_NAMESPACE::SubgroupFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags; + using VULKAN_HPP_NAMESPACE::TessellationDomainOrigin; + using VULKAN_HPP_NAMESPACE::TessellationDomainOriginKHR; + + //=== VK_VERSION_1_2 === + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlags; + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagsEXT; + using VULKAN_HPP_NAMESPACE::DriverId; + using VULKAN_HPP_NAMESPACE::DriverIdKHR; + using VULKAN_HPP_NAMESPACE::ResolveModeFlagBits; + using VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ResolveModeFlags; + using VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR; + using VULKAN_HPP_NAMESPACE::SamplerReductionMode; + using VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT; + using VULKAN_HPP_NAMESPACE::SemaphoreType; + using VULKAN_HPP_NAMESPACE::SemaphoreTypeKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagBits; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagsKHR; + using VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence; + using VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::AccessFlagBits2; + using VULKAN_HPP_NAMESPACE::AccessFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::AccessFlags2; + using VULKAN_HPP_NAMESPACE::AccessFlags2KHR; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits2; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlags2; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlags2KHR; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT; + using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits2; + using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::PipelineStageFlags2; + using VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT; + using VULKAN_HPP_NAMESPACE::RenderingFlagBits; + using VULKAN_HPP_NAMESPACE::RenderingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::RenderingFlags; + using VULKAN_HPP_NAMESPACE::RenderingFlagsKHR; + using VULKAN_HPP_NAMESPACE::SubmitFlagBits; + using VULKAN_HPP_NAMESPACE::SubmitFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SubmitFlags; + using VULKAN_HPP_NAMESPACE::SubmitFlagsKHR; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlagBits; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlags; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT; + + //=== VK_VERSION_1_4 === + using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits2; + using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR; + using VULKAN_HPP_NAMESPACE::HostImageCopyFlagBits; + using VULKAN_HPP_NAMESPACE::HostImageCopyFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::HostImageCopyFlags; + using VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT; + using VULKAN_HPP_NAMESPACE::LineRasterizationMode; + using VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT; + using VULKAN_HPP_NAMESPACE::LineRasterizationModeKHR; + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlags; + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT; + using VULKAN_HPP_NAMESPACE::QueueGlobalPriority; + using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT; + using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::ColorSpaceKHR; + using VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR; + using VULKAN_HPP_NAMESPACE::PresentModeKHR; + using VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR; + using VULKAN_HPP_NAMESPACE::SwapchainCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::DebugReportFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT; + + //=== VK_AMD_rasterization_order === + using VULKAN_HPP_NAMESPACE::RasterizationOrderAMD; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::QueryResultStatusKHR; + using VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoCapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodingControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEndCodingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR; + + //=== VK_KHR_video_decode_queue === + using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR; + + //=== VK_EXT_transform_feedback === + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT; + + //=== VK_KHR_video_encode_h264 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR; + + //=== VK_KHR_video_encode_h265 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR; + + //=== VK_KHR_video_decode_h264 === + using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsKHR; + + //=== VK_AMD_shader_info === + using VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagBitsGGP; + using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBitsNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBitsNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV; + + //=== VK_EXT_validation_flags === + using VULKAN_HPP_NAMESPACE::ValidationCheckEXT; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + using VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagBitsNN; + using VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_conditional_rendering === + using VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT; + + //=== VK_EXT_display_surface_counter === + using VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT; + + //=== VK_EXT_display_control === + using VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT; + using VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT; + using VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT; + + //=== VK_NV_viewport_swizzle === + using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV; + using VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV; + + //=== VK_EXT_discard_rectangles === + using VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT; + using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT; + + //=== VK_EXT_conservative_rasterization === + using VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT; + + //=== VK_EXT_depth_clip_enable === + using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT; + + //=== VK_KHR_performance_query === + using VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagBitsMVK; + using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagBitsMVK; + using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT; + + //=== VK_EXT_blend_operation_advanced === + using VULKAN_HPP_NAMESPACE::BlendOverlapEXT; + + //=== VK_NV_fragment_coverage_to_color === + using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagBitsNV; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV; + using VULKAN_HPP_NAMESPACE::GeometryFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::GeometryFlagBitsNV; + using VULKAN_HPP_NAMESPACE::GeometryFlagsKHR; + using VULKAN_HPP_NAMESPACE::GeometryFlagsNV; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagBitsNV; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsNV; + using VULKAN_HPP_NAMESPACE::GeometryTypeKHR; + using VULKAN_HPP_NAMESPACE::GeometryTypeNV; + + //=== VK_KHR_ray_tracing_pipeline === + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR; + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV; + using VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR; + + //=== VK_NV_framebuffer_mixed_samples === + using VULKAN_HPP_NAMESPACE::CoverageModulationModeNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT; + using VULKAN_HPP_NAMESPACE::ValidationCacheHeaderVersionEXT; + + //=== VK_NV_shading_rate_image === + using VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV; + using VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV; + + //=== VK_AMD_pipeline_compiler_control === + using VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagBitsAMD; + using VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD; + + //=== VK_AMD_memory_overallocation_behavior === + using VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL; + using VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagBitsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + using VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR; + + //=== VK_AMD_shader_core_properties2 === + using VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagBitsAMD; + using VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD; + + //=== VK_EXT_validation_features === + using VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT; + using VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT; + + //=== VK_NV_coverage_reduction_mode === + using VULKAN_HPP_NAMESPACE::CoverageReductionModeNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV; + + //=== VK_EXT_provoking_vertex === + using VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + using VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT; + + //=== VK_KHR_pipeline_executable_properties === + using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR; + + //=== VK_EXT_surface_maintenance1 === + using VULKAN_HPP_NAMESPACE::PresentGravityFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT; + using VULKAN_HPP_NAMESPACE::PresentScalingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagBitsNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV; + using VULKAN_HPP_NAMESPACE::IndirectStateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV; + + //=== VK_EXT_depth_bias_control === + using VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT; + + //=== VK_EXT_device_memory_report === + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT; + + //=== VK_KHR_video_encode_queue === + using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR; + + //=== VK_NV_device_diagnostics_config === + using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagBitsNV; + using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV; + + //=== VK_QCOM_tile_shading === + using VULKAN_HPP_NAMESPACE::TileShadingRenderPassFlagBitsQCOM; + using VULKAN_HPP_NAMESPACE::TileShadingRenderPassFlagsQCOM; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + using VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagsEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_graphics_pipeline_library === + using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT; + + //=== VK_NV_fragment_shading_rate_enums === + using VULKAN_HPP_NAMESPACE::FragmentShadingRateNV; + using VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV; + + //=== VK_NV_ray_tracing_motion_blur === + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagBitsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagBitsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV; + + //=== VK_EXT_image_compression_control === + using VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT; + + //=== VK_EXT_device_fault === + using VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_device_address_binding_report === + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT; + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagBitsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagBitsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_frame_boundary === + using VULKAN_HPP_NAMESPACE::FrameBoundaryFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagBitsQNX; + using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::BuildMicromapFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT; + using VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT; + using VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT; + using VULKAN_HPP_NAMESPACE::MicromapCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT; + using VULKAN_HPP_NAMESPACE::MicromapTypeEXT; + using VULKAN_HPP_NAMESPACE::OpacityMicromapFormatEXT; + using VULKAN_HPP_NAMESPACE::OpacityMicromapSpecialIndexEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + using VULKAN_HPP_NAMESPACE::DisplacementMicromapFormatNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_ARM_scheduling_controls === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagBitsARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM; + + //=== VK_NV_memory_decompression === + using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagBitsNV; + using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV; + + //=== VK_NV_ray_tracing_linear_swept_spheres === + using VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV; + using VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV; + + //=== VK_EXT_subpass_merge_feedback === + using VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT; + + //=== VK_LUNARG_direct_driver_loading === + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagBitsLUNARG; + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG; + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV; + + //=== VK_AMD_anti_lag === + using VULKAN_HPP_NAMESPACE::AntiLagModeAMD; + using VULKAN_HPP_NAMESPACE::AntiLagStageAMD; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT; + using VULKAN_HPP_NAMESPACE::ShaderCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT; + + //=== VK_NV_ray_tracing_invocation_reorder === + using VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV; + + //=== VK_NV_cooperative_vector === + using VULKAN_HPP_NAMESPACE::ComponentTypeKHR; + using VULKAN_HPP_NAMESPACE::ComponentTypeNV; + using VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV; + + //=== VK_EXT_layer_settings === + using VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT; + + //=== VK_NV_low_latency2 === + using VULKAN_HPP_NAMESPACE::LatencyMarkerNV; + using VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV; + + //=== VK_KHR_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::ScopeKHR; + using VULKAN_HPP_NAMESPACE::ScopeNV; + + //=== VK_KHR_video_encode_av1 === + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR; + + //=== VK_QCOM_image_processing2 === + using VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM; + + //=== VK_QCOM_filter_cubic_weights === + using VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM; + + //=== VK_MSFT_layered_driver === + using VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT; + + //=== VK_KHR_calibrated_timestamps === + using VULKAN_HPP_NAMESPACE::TimeDomainEXT; + using VULKAN_HPP_NAMESPACE::TimeDomainKHR; + + //=== VK_NV_display_stereo === + using VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV; + + //=== VK_KHR_maintenance7 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR; + + //=== VK_NV_cluster_acceleration_structure === + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagBitsNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagBitsNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryFlagBitsNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryFlagsNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureIndexFormatFlagBitsNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureIndexFormatFlagsNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV; + + //=== VK_NV_partitioned_acceleration_structure === + using VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV; + using VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV; + + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT; + + //=== VK_KHR_maintenance8 === + using VULKAN_HPP_NAMESPACE::AccessFlagBits3KHR; + using VULKAN_HPP_NAMESPACE::AccessFlags3KHR; + + //=== VK_EXT_depth_clamp_control === + using VULKAN_HPP_NAMESPACE::DepthClampModeEXT; + + //========================= + //=== Index Type Traits === + //========================= + using VULKAN_HPP_NAMESPACE::IndexTypeValue; + + //====================== + //=== ENUM to_string === + //====================== +#if !defined( VULKAN_HPP_NO_TO_STRING ) + using VULKAN_HPP_NAMESPACE::to_string; + using VULKAN_HPP_NAMESPACE::toHexString; +#endif /*VULKAN_HPP_NO_TO_STRING*/ + + //============================= + //=== EXCEPTIONs AND ERRORs === + //============================= +#if !defined( VULKAN_HPP_NO_EXCEPTIONS ) + using VULKAN_HPP_NAMESPACE::DeviceLostError; + using VULKAN_HPP_NAMESPACE::Error; + using VULKAN_HPP_NAMESPACE::errorCategory; + using VULKAN_HPP_NAMESPACE::ErrorCategoryImpl; + using VULKAN_HPP_NAMESPACE::ExtensionNotPresentError; + using VULKAN_HPP_NAMESPACE::FeatureNotPresentError; + using VULKAN_HPP_NAMESPACE::FormatNotSupportedError; + using VULKAN_HPP_NAMESPACE::FragmentationError; + using VULKAN_HPP_NAMESPACE::FragmentedPoolError; + using VULKAN_HPP_NAMESPACE::ImageUsageNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::IncompatibleDisplayKHRError; + using VULKAN_HPP_NAMESPACE::IncompatibleDriverError; + using VULKAN_HPP_NAMESPACE::InitializationFailedError; + using VULKAN_HPP_NAMESPACE::InvalidDrmFormatModifierPlaneLayoutEXTError; + using VULKAN_HPP_NAMESPACE::InvalidExternalHandleError; + using VULKAN_HPP_NAMESPACE::InvalidOpaqueCaptureAddressError; + using VULKAN_HPP_NAMESPACE::InvalidShaderNVError; + using VULKAN_HPP_NAMESPACE::LayerNotPresentError; + using VULKAN_HPP_NAMESPACE::LogicError; + using VULKAN_HPP_NAMESPACE::make_error_code; + using VULKAN_HPP_NAMESPACE::make_error_condition; + using VULKAN_HPP_NAMESPACE::MemoryMapFailedError; + using VULKAN_HPP_NAMESPACE::NativeWindowInUseKHRError; + using VULKAN_HPP_NAMESPACE::NotPermittedError; + using VULKAN_HPP_NAMESPACE::OutOfDateKHRError; + using VULKAN_HPP_NAMESPACE::OutOfDeviceMemoryError; + using VULKAN_HPP_NAMESPACE::OutOfHostMemoryError; + using VULKAN_HPP_NAMESPACE::OutOfPoolMemoryError; + using VULKAN_HPP_NAMESPACE::SurfaceLostKHRError; + using VULKAN_HPP_NAMESPACE::SystemError; + using VULKAN_HPP_NAMESPACE::TooManyObjectsError; + using VULKAN_HPP_NAMESPACE::UnknownError; + using VULKAN_HPP_NAMESPACE::ValidationFailedEXTError; + using VULKAN_HPP_NAMESPACE::VideoPictureLayoutNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoProfileCodecNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoProfileFormatNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoProfileOperationNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoStdVersionNotSupportedKHRError; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + using VULKAN_HPP_NAMESPACE::FullScreenExclusiveModeLostEXTError; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + using VULKAN_HPP_NAMESPACE::CompressionExhaustedEXTError; + using VULKAN_HPP_NAMESPACE::InvalidVideoStdParametersKHRError; + using VULKAN_HPP_NAMESPACE::NotEnoughSpaceKHRError; +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + using VULKAN_HPP_NAMESPACE::ResultValue; + using VULKAN_HPP_NAMESPACE::ResultValueType; + + //=========================== + //=== CONSTEXPR CONSTANTs === + //=========================== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::AttachmentUnused; + using VULKAN_HPP_NAMESPACE::False; + using VULKAN_HPP_NAMESPACE::LodClampNone; + using VULKAN_HPP_NAMESPACE::MaxDescriptionSize; + using VULKAN_HPP_NAMESPACE::MaxExtensionNameSize; + using VULKAN_HPP_NAMESPACE::MaxMemoryHeaps; + using VULKAN_HPP_NAMESPACE::MaxMemoryTypes; + using VULKAN_HPP_NAMESPACE::MaxPhysicalDeviceNameSize; + using VULKAN_HPP_NAMESPACE::QueueFamilyIgnored; + using VULKAN_HPP_NAMESPACE::RemainingArrayLayers; + using VULKAN_HPP_NAMESPACE::RemainingMipLevels; + using VULKAN_HPP_NAMESPACE::SubpassExternal; + using VULKAN_HPP_NAMESPACE::True; + using VULKAN_HPP_NAMESPACE::UuidSize; + using VULKAN_HPP_NAMESPACE::WholeSize; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::LuidSize; + using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSize; + using VULKAN_HPP_NAMESPACE::QueueFamilyExternal; + + //=== VK_VERSION_1_2 === + using VULKAN_HPP_NAMESPACE::MaxDriverInfoSize; + using VULKAN_HPP_NAMESPACE::MaxDriverNameSize; + + //=== VK_VERSION_1_4 === + using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySize; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::KHRSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSurfaceSpecVersion; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::KHRSwapchainExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSwapchainSpecVersion; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::KHRDisplayExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDisplaySpecVersion; + + //=== VK_KHR_display_swapchain === + using VULKAN_HPP_NAMESPACE::KHRDisplaySwapchainExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDisplaySwapchainSpecVersion; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + using VULKAN_HPP_NAMESPACE::KHRXlibSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRXlibSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + using VULKAN_HPP_NAMESPACE::KHRXcbSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRXcbSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + using VULKAN_HPP_NAMESPACE::KHRWaylandSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRWaylandSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + using VULKAN_HPP_NAMESPACE::KHRAndroidSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRAndroidSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + using VULKAN_HPP_NAMESPACE::KHRWin32SurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRWin32SurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::EXTDebugReportExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDebugReportSpecVersion; + + //=== VK_NV_glsl_shader === + using VULKAN_HPP_NAMESPACE::NVGlslShaderExtensionName; + using VULKAN_HPP_NAMESPACE::NVGlslShaderSpecVersion; + + //=== VK_EXT_depth_range_unrestricted === + using VULKAN_HPP_NAMESPACE::EXTDepthRangeUnrestrictedExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthRangeUnrestrictedSpecVersion; + + //=== VK_KHR_sampler_mirror_clamp_to_edge === + using VULKAN_HPP_NAMESPACE::KHRSamplerMirrorClampToEdgeExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSamplerMirrorClampToEdgeSpecVersion; + + //=== VK_IMG_filter_cubic === + using VULKAN_HPP_NAMESPACE::IMGFilterCubicExtensionName; + using VULKAN_HPP_NAMESPACE::IMGFilterCubicSpecVersion; + + //=== VK_AMD_rasterization_order === + using VULKAN_HPP_NAMESPACE::AMDRasterizationOrderExtensionName; + using VULKAN_HPP_NAMESPACE::AMDRasterizationOrderSpecVersion; + + //=== VK_AMD_shader_trinary_minmax === + using VULKAN_HPP_NAMESPACE::AMDShaderTrinaryMinmaxExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderTrinaryMinmaxSpecVersion; + + //=== VK_AMD_shader_explicit_vertex_parameter === + using VULKAN_HPP_NAMESPACE::AMDShaderExplicitVertexParameterExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderExplicitVertexParameterSpecVersion; + + //=== VK_EXT_debug_marker === + using VULKAN_HPP_NAMESPACE::EXTDebugMarkerExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDebugMarkerSpecVersion; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::KHRVideoQueueExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoQueueSpecVersion; + + //=== VK_KHR_video_decode_queue === + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeQueueExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeQueueSpecVersion; + + //=== VK_AMD_gcn_shader === + using VULKAN_HPP_NAMESPACE::AMDGcnShaderExtensionName; + using VULKAN_HPP_NAMESPACE::AMDGcnShaderSpecVersion; + + //=== VK_NV_dedicated_allocation === + using VULKAN_HPP_NAMESPACE::NVDedicatedAllocationExtensionName; + using VULKAN_HPP_NAMESPACE::NVDedicatedAllocationSpecVersion; + + //=== VK_EXT_transform_feedback === + using VULKAN_HPP_NAMESPACE::EXTTransformFeedbackExtensionName; + using VULKAN_HPP_NAMESPACE::EXTTransformFeedbackSpecVersion; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::NVXBinaryImportExtensionName; + using VULKAN_HPP_NAMESPACE::NVXBinaryImportSpecVersion; + + //=== VK_NVX_image_view_handle === + using VULKAN_HPP_NAMESPACE::NVXImageViewHandleExtensionName; + using VULKAN_HPP_NAMESPACE::NVXImageViewHandleSpecVersion; + + //=== VK_AMD_draw_indirect_count === + using VULKAN_HPP_NAMESPACE::AMDDrawIndirectCountExtensionName; + using VULKAN_HPP_NAMESPACE::AMDDrawIndirectCountSpecVersion; + + //=== VK_AMD_negative_viewport_height === + using VULKAN_HPP_NAMESPACE::AMDNegativeViewportHeightExtensionName; + using VULKAN_HPP_NAMESPACE::AMDNegativeViewportHeightSpecVersion; + + //=== VK_AMD_gpu_shader_half_float === + using VULKAN_HPP_NAMESPACE::AMDGpuShaderHalfFloatExtensionName; + using VULKAN_HPP_NAMESPACE::AMDGpuShaderHalfFloatSpecVersion; + + //=== VK_AMD_shader_ballot === + using VULKAN_HPP_NAMESPACE::AMDShaderBallotExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderBallotSpecVersion; + + //=== VK_KHR_video_encode_h264 === + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeH264ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeH264SpecVersion; + + //=== VK_KHR_video_encode_h265 === + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeH265ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeH265SpecVersion; + + //=== VK_KHR_video_decode_h264 === + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeH264ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeH264SpecVersion; + + //=== VK_AMD_texture_gather_bias_lod === + using VULKAN_HPP_NAMESPACE::AMDTextureGatherBiasLodExtensionName; + using VULKAN_HPP_NAMESPACE::AMDTextureGatherBiasLodSpecVersion; + + //=== VK_AMD_shader_info === + using VULKAN_HPP_NAMESPACE::AMDShaderInfoExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderInfoSpecVersion; + + //=== VK_KHR_dynamic_rendering === + using VULKAN_HPP_NAMESPACE::KHRDynamicRenderingExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDynamicRenderingSpecVersion; + + //=== VK_AMD_shader_image_load_store_lod === + using VULKAN_HPP_NAMESPACE::AMDShaderImageLoadStoreLodExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderImageLoadStoreLodSpecVersion; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + using VULKAN_HPP_NAMESPACE::GGPStreamDescriptorSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::GGPStreamDescriptorSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + using VULKAN_HPP_NAMESPACE::NVCornerSampledImageExtensionName; + using VULKAN_HPP_NAMESPACE::NVCornerSampledImageSpecVersion; + + //=== VK_KHR_multiview === + using VULKAN_HPP_NAMESPACE::KHRMultiviewExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMultiviewSpecVersion; + + //=== VK_IMG_format_pvrtc === + using VULKAN_HPP_NAMESPACE::IMGFormatPvrtcExtensionName; + using VULKAN_HPP_NAMESPACE::IMGFormatPvrtcSpecVersion; + + //=== VK_NV_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::NVExternalMemoryCapabilitiesExtensionName; + using VULKAN_HPP_NAMESPACE::NVExternalMemoryCapabilitiesSpecVersion; + + //=== VK_NV_external_memory === + using VULKAN_HPP_NAMESPACE::NVExternalMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::NVExternalMemorySpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + using VULKAN_HPP_NAMESPACE::NVExternalMemoryWin32ExtensionName; + using VULKAN_HPP_NAMESPACE::NVExternalMemoryWin32SpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + using VULKAN_HPP_NAMESPACE::NVWin32KeyedMutexExtensionName; + using VULKAN_HPP_NAMESPACE::NVWin32KeyedMutexSpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + using VULKAN_HPP_NAMESPACE::KHRGetPhysicalDeviceProperties2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRGetPhysicalDeviceProperties2SpecVersion; + + //=== VK_KHR_device_group === + using VULKAN_HPP_NAMESPACE::KHRDeviceGroupExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDeviceGroupSpecVersion; + + //=== VK_EXT_validation_flags === + using VULKAN_HPP_NAMESPACE::EXTValidationFlagsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTValidationFlagsSpecVersion; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + using VULKAN_HPP_NAMESPACE::NNViSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::NNViSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_shader_draw_parameters === + using VULKAN_HPP_NAMESPACE::KHRShaderDrawParametersExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderDrawParametersSpecVersion; + + //=== VK_EXT_shader_subgroup_ballot === + using VULKAN_HPP_NAMESPACE::EXTShaderSubgroupBallotExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderSubgroupBallotSpecVersion; + + //=== VK_EXT_shader_subgroup_vote === + using VULKAN_HPP_NAMESPACE::EXTShaderSubgroupVoteExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderSubgroupVoteSpecVersion; + + //=== VK_EXT_texture_compression_astc_hdr === + using VULKAN_HPP_NAMESPACE::EXTTextureCompressionAstcHdrExtensionName; + using VULKAN_HPP_NAMESPACE::EXTTextureCompressionAstcHdrSpecVersion; + + //=== VK_EXT_astc_decode_mode === + using VULKAN_HPP_NAMESPACE::EXTAstcDecodeModeExtensionName; + using VULKAN_HPP_NAMESPACE::EXTAstcDecodeModeSpecVersion; + + //=== VK_EXT_pipeline_robustness === + using VULKAN_HPP_NAMESPACE::EXTPipelineRobustnessExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelineRobustnessSpecVersion; + + //=== VK_KHR_maintenance1 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance1ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance1SpecVersion; + + //=== VK_KHR_device_group_creation === + using VULKAN_HPP_NAMESPACE::KHRDeviceGroupCreationExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDeviceGroupCreationSpecVersion; + using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSizeKHR; + + //=== VK_KHR_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryCapabilitiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryCapabilitiesSpecVersion; + using VULKAN_HPP_NAMESPACE::LuidSizeKHR; + + //=== VK_KHR_external_memory === + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalMemorySpecVersion; + using VULKAN_HPP_NAMESPACE::QueueFamilyExternalKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryWin32ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryWin32SpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryFdExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryFdSpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + using VULKAN_HPP_NAMESPACE::KHRWin32KeyedMutexExtensionName; + using VULKAN_HPP_NAMESPACE::KHRWin32KeyedMutexSpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_capabilities === + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreCapabilitiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreCapabilitiesSpecVersion; + + //=== VK_KHR_external_semaphore === + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreSpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreWin32ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreWin32SpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreFdExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreFdSpecVersion; + + //=== VK_KHR_push_descriptor === + using VULKAN_HPP_NAMESPACE::KHRPushDescriptorExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPushDescriptorSpecVersion; + + //=== VK_EXT_conditional_rendering === + using VULKAN_HPP_NAMESPACE::EXTConditionalRenderingExtensionName; + using VULKAN_HPP_NAMESPACE::EXTConditionalRenderingSpecVersion; + + //=== VK_KHR_shader_float16_int8 === + using VULKAN_HPP_NAMESPACE::KHRShaderFloat16Int8ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderFloat16Int8SpecVersion; + + //=== VK_KHR_16bit_storage === + using VULKAN_HPP_NAMESPACE::KHR16BitStorageExtensionName; + using VULKAN_HPP_NAMESPACE::KHR16BitStorageSpecVersion; + + //=== VK_KHR_incremental_present === + using VULKAN_HPP_NAMESPACE::KHRIncrementalPresentExtensionName; + using VULKAN_HPP_NAMESPACE::KHRIncrementalPresentSpecVersion; + + //=== VK_KHR_descriptor_update_template === + using VULKAN_HPP_NAMESPACE::KHRDescriptorUpdateTemplateExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDescriptorUpdateTemplateSpecVersion; + + //=== VK_NV_clip_space_w_scaling === + using VULKAN_HPP_NAMESPACE::NVClipSpaceWScalingExtensionName; + using VULKAN_HPP_NAMESPACE::NVClipSpaceWScalingSpecVersion; + + //=== VK_EXT_direct_mode_display === + using VULKAN_HPP_NAMESPACE::EXTDirectModeDisplayExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDirectModeDisplaySpecVersion; + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + using VULKAN_HPP_NAMESPACE::EXTAcquireXlibDisplayExtensionName; + using VULKAN_HPP_NAMESPACE::EXTAcquireXlibDisplaySpecVersion; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + using VULKAN_HPP_NAMESPACE::EXTDisplaySurfaceCounterExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDisplaySurfaceCounterSpecVersion; + + //=== VK_EXT_display_control === + using VULKAN_HPP_NAMESPACE::EXTDisplayControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDisplayControlSpecVersion; + + //=== VK_GOOGLE_display_timing === + using VULKAN_HPP_NAMESPACE::GOOGLEDisplayTimingExtensionName; + using VULKAN_HPP_NAMESPACE::GOOGLEDisplayTimingSpecVersion; + + //=== VK_NV_sample_mask_override_coverage === + using VULKAN_HPP_NAMESPACE::NVSampleMaskOverrideCoverageExtensionName; + using VULKAN_HPP_NAMESPACE::NVSampleMaskOverrideCoverageSpecVersion; + + //=== VK_NV_geometry_shader_passthrough === + using VULKAN_HPP_NAMESPACE::NVGeometryShaderPassthroughExtensionName; + using VULKAN_HPP_NAMESPACE::NVGeometryShaderPassthroughSpecVersion; + + //=== VK_NV_viewport_array2 === + using VULKAN_HPP_NAMESPACE::NVViewportArray2ExtensionName; + using VULKAN_HPP_NAMESPACE::NVViewportArray2SpecVersion; + + //=== VK_NVX_multiview_per_view_attributes === + using VULKAN_HPP_NAMESPACE::NVXMultiviewPerViewAttributesExtensionName; + using VULKAN_HPP_NAMESPACE::NVXMultiviewPerViewAttributesSpecVersion; + + //=== VK_NV_viewport_swizzle === + using VULKAN_HPP_NAMESPACE::NVViewportSwizzleExtensionName; + using VULKAN_HPP_NAMESPACE::NVViewportSwizzleSpecVersion; + + //=== VK_EXT_discard_rectangles === + using VULKAN_HPP_NAMESPACE::EXTDiscardRectanglesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDiscardRectanglesSpecVersion; + + //=== VK_EXT_conservative_rasterization === + using VULKAN_HPP_NAMESPACE::EXTConservativeRasterizationExtensionName; + using VULKAN_HPP_NAMESPACE::EXTConservativeRasterizationSpecVersion; + + //=== VK_EXT_depth_clip_enable === + using VULKAN_HPP_NAMESPACE::EXTDepthClipEnableExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthClipEnableSpecVersion; + + //=== VK_EXT_swapchain_colorspace === + using VULKAN_HPP_NAMESPACE::EXTSwapchainColorSpaceExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSwapchainColorSpaceSpecVersion; + + //=== VK_EXT_hdr_metadata === + using VULKAN_HPP_NAMESPACE::EXTHdrMetadataExtensionName; + using VULKAN_HPP_NAMESPACE::EXTHdrMetadataSpecVersion; + + //=== VK_KHR_imageless_framebuffer === + using VULKAN_HPP_NAMESPACE::KHRImagelessFramebufferExtensionName; + using VULKAN_HPP_NAMESPACE::KHRImagelessFramebufferSpecVersion; + + //=== VK_KHR_create_renderpass2 === + using VULKAN_HPP_NAMESPACE::KHRCreateRenderpass2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRCreateRenderpass2SpecVersion; + + //=== VK_IMG_relaxed_line_rasterization === + using VULKAN_HPP_NAMESPACE::IMGRelaxedLineRasterizationExtensionName; + using VULKAN_HPP_NAMESPACE::IMGRelaxedLineRasterizationSpecVersion; + + //=== VK_KHR_shared_presentable_image === + using VULKAN_HPP_NAMESPACE::KHRSharedPresentableImageExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSharedPresentableImageSpecVersion; + + //=== VK_KHR_external_fence_capabilities === + using VULKAN_HPP_NAMESPACE::KHRExternalFenceCapabilitiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalFenceCapabilitiesSpecVersion; + + //=== VK_KHR_external_fence === + using VULKAN_HPP_NAMESPACE::KHRExternalFenceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalFenceSpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + using VULKAN_HPP_NAMESPACE::KHRExternalFenceWin32ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalFenceWin32SpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + using VULKAN_HPP_NAMESPACE::KHRExternalFenceFdExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalFenceFdSpecVersion; + + //=== VK_KHR_performance_query === + using VULKAN_HPP_NAMESPACE::KHRPerformanceQueryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPerformanceQuerySpecVersion; + + //=== VK_KHR_maintenance2 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance2SpecVersion; + + //=== VK_KHR_get_surface_capabilities2 === + using VULKAN_HPP_NAMESPACE::KHRGetSurfaceCapabilities2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRGetSurfaceCapabilities2SpecVersion; + + //=== VK_KHR_variable_pointers === + using VULKAN_HPP_NAMESPACE::KHRVariablePointersExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVariablePointersSpecVersion; + + //=== VK_KHR_get_display_properties2 === + using VULKAN_HPP_NAMESPACE::KHRGetDisplayProperties2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRGetDisplayProperties2SpecVersion; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + using VULKAN_HPP_NAMESPACE::MVKIosSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::MVKIosSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + using VULKAN_HPP_NAMESPACE::MVKMacosSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::MVKMacosSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_external_memory_dma_buf === + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryDmaBufExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryDmaBufSpecVersion; + + //=== VK_EXT_queue_family_foreign === + using VULKAN_HPP_NAMESPACE::EXTQueueFamilyForeignExtensionName; + using VULKAN_HPP_NAMESPACE::EXTQueueFamilyForeignSpecVersion; + using VULKAN_HPP_NAMESPACE::QueueFamilyForeignEXT; + + //=== VK_KHR_dedicated_allocation === + using VULKAN_HPP_NAMESPACE::KHRDedicatedAllocationExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDedicatedAllocationSpecVersion; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::EXTDebugUtilsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDebugUtilsSpecVersion; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + using VULKAN_HPP_NAMESPACE::ANDROIDExternalMemoryAndroidHardwareBufferExtensionName; + using VULKAN_HPP_NAMESPACE::ANDROIDExternalMemoryAndroidHardwareBufferSpecVersion; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sampler_filter_minmax === + using VULKAN_HPP_NAMESPACE::EXTSamplerFilterMinmaxExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSamplerFilterMinmaxSpecVersion; + + //=== VK_KHR_storage_buffer_storage_class === + using VULKAN_HPP_NAMESPACE::KHRStorageBufferStorageClassExtensionName; + using VULKAN_HPP_NAMESPACE::KHRStorageBufferStorageClassSpecVersion; + + //=== VK_AMD_gpu_shader_int16 === + using VULKAN_HPP_NAMESPACE::AMDGpuShaderInt16ExtensionName; + using VULKAN_HPP_NAMESPACE::AMDGpuShaderInt16SpecVersion; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + using VULKAN_HPP_NAMESPACE::AMDXShaderEnqueueExtensionName; + using VULKAN_HPP_NAMESPACE::AMDXShaderEnqueueSpecVersion; + using VULKAN_HPP_NAMESPACE::ShaderIndexUnusedAMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + using VULKAN_HPP_NAMESPACE::AMDMixedAttachmentSamplesExtensionName; + using VULKAN_HPP_NAMESPACE::AMDMixedAttachmentSamplesSpecVersion; + + //=== VK_AMD_shader_fragment_mask === + using VULKAN_HPP_NAMESPACE::AMDShaderFragmentMaskExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderFragmentMaskSpecVersion; + + //=== VK_EXT_inline_uniform_block === + using VULKAN_HPP_NAMESPACE::EXTInlineUniformBlockExtensionName; + using VULKAN_HPP_NAMESPACE::EXTInlineUniformBlockSpecVersion; + + //=== VK_EXT_shader_stencil_export === + using VULKAN_HPP_NAMESPACE::EXTShaderStencilExportExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderStencilExportSpecVersion; + + //=== VK_KHR_shader_bfloat16 === + using VULKAN_HPP_NAMESPACE::KHRShaderBfloat16ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderBfloat16SpecVersion; + + //=== VK_EXT_sample_locations === + using VULKAN_HPP_NAMESPACE::EXTSampleLocationsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSampleLocationsSpecVersion; + + //=== VK_KHR_relaxed_block_layout === + using VULKAN_HPP_NAMESPACE::KHRRelaxedBlockLayoutExtensionName; + using VULKAN_HPP_NAMESPACE::KHRRelaxedBlockLayoutSpecVersion; + + //=== VK_KHR_get_memory_requirements2 === + using VULKAN_HPP_NAMESPACE::KHRGetMemoryRequirements2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRGetMemoryRequirements2SpecVersion; + + //=== VK_KHR_image_format_list === + using VULKAN_HPP_NAMESPACE::KHRImageFormatListExtensionName; + using VULKAN_HPP_NAMESPACE::KHRImageFormatListSpecVersion; + + //=== VK_EXT_blend_operation_advanced === + using VULKAN_HPP_NAMESPACE::EXTBlendOperationAdvancedExtensionName; + using VULKAN_HPP_NAMESPACE::EXTBlendOperationAdvancedSpecVersion; + + //=== VK_NV_fragment_coverage_to_color === + using VULKAN_HPP_NAMESPACE::NVFragmentCoverageToColorExtensionName; + using VULKAN_HPP_NAMESPACE::NVFragmentCoverageToColorSpecVersion; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::KHRAccelerationStructureExtensionName; + using VULKAN_HPP_NAMESPACE::KHRAccelerationStructureSpecVersion; + + //=== VK_KHR_ray_tracing_pipeline === + using VULKAN_HPP_NAMESPACE::KHRRayTracingPipelineExtensionName; + using VULKAN_HPP_NAMESPACE::KHRRayTracingPipelineSpecVersion; + using VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + + //=== VK_KHR_ray_query === + using VULKAN_HPP_NAMESPACE::KHRRayQueryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRRayQuerySpecVersion; + + //=== VK_NV_framebuffer_mixed_samples === + using VULKAN_HPP_NAMESPACE::NVFramebufferMixedSamplesExtensionName; + using VULKAN_HPP_NAMESPACE::NVFramebufferMixedSamplesSpecVersion; + + //=== VK_NV_fill_rectangle === + using VULKAN_HPP_NAMESPACE::NVFillRectangleExtensionName; + using VULKAN_HPP_NAMESPACE::NVFillRectangleSpecVersion; + + //=== VK_NV_shader_sm_builtins === + using VULKAN_HPP_NAMESPACE::NVShaderSmBuiltinsExtensionName; + using VULKAN_HPP_NAMESPACE::NVShaderSmBuiltinsSpecVersion; + + //=== VK_EXT_post_depth_coverage === + using VULKAN_HPP_NAMESPACE::EXTPostDepthCoverageExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPostDepthCoverageSpecVersion; + + //=== VK_KHR_sampler_ycbcr_conversion === + using VULKAN_HPP_NAMESPACE::KHRSamplerYcbcrConversionExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSamplerYcbcrConversionSpecVersion; + + //=== VK_KHR_bind_memory2 === + using VULKAN_HPP_NAMESPACE::KHRBindMemory2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRBindMemory2SpecVersion; + + //=== VK_EXT_image_drm_format_modifier === + using VULKAN_HPP_NAMESPACE::EXTImageDrmFormatModifierExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageDrmFormatModifierSpecVersion; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::EXTValidationCacheExtensionName; + using VULKAN_HPP_NAMESPACE::EXTValidationCacheSpecVersion; + + //=== VK_EXT_descriptor_indexing === + using VULKAN_HPP_NAMESPACE::EXTDescriptorIndexingExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDescriptorIndexingSpecVersion; + + //=== VK_EXT_shader_viewport_index_layer === + using VULKAN_HPP_NAMESPACE::EXTShaderViewportIndexLayerExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderViewportIndexLayerSpecVersion; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + using VULKAN_HPP_NAMESPACE::KHRPortabilitySubsetExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPortabilitySubsetSpecVersion; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + using VULKAN_HPP_NAMESPACE::NVShadingRateImageExtensionName; + using VULKAN_HPP_NAMESPACE::NVShadingRateImageSpecVersion; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::NVRayTracingExtensionName; + using VULKAN_HPP_NAMESPACE::NVRayTracingSpecVersion; + using VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + + //=== VK_NV_representative_fragment_test === + using VULKAN_HPP_NAMESPACE::NVRepresentativeFragmentTestExtensionName; + using VULKAN_HPP_NAMESPACE::NVRepresentativeFragmentTestSpecVersion; + + //=== VK_KHR_maintenance3 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance3ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance3SpecVersion; + + //=== VK_KHR_draw_indirect_count === + using VULKAN_HPP_NAMESPACE::KHRDrawIndirectCountExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDrawIndirectCountSpecVersion; + + //=== VK_EXT_filter_cubic === + using VULKAN_HPP_NAMESPACE::EXTFilterCubicExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFilterCubicSpecVersion; + + //=== VK_QCOM_render_pass_shader_resolve === + using VULKAN_HPP_NAMESPACE::QCOMRenderPassShaderResolveExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMRenderPassShaderResolveSpecVersion; + + //=== VK_EXT_global_priority === + using VULKAN_HPP_NAMESPACE::EXTGlobalPriorityExtensionName; + using VULKAN_HPP_NAMESPACE::EXTGlobalPrioritySpecVersion; + + //=== VK_KHR_shader_subgroup_extended_types === + using VULKAN_HPP_NAMESPACE::KHRShaderSubgroupExtendedTypesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderSubgroupExtendedTypesSpecVersion; + + //=== VK_KHR_8bit_storage === + using VULKAN_HPP_NAMESPACE::KHR8BitStorageExtensionName; + using VULKAN_HPP_NAMESPACE::KHR8BitStorageSpecVersion; + + //=== VK_EXT_external_memory_host === + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryHostExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryHostSpecVersion; + + //=== VK_AMD_buffer_marker === + using VULKAN_HPP_NAMESPACE::AMDBufferMarkerExtensionName; + using VULKAN_HPP_NAMESPACE::AMDBufferMarkerSpecVersion; + + //=== VK_KHR_shader_atomic_int64 === + using VULKAN_HPP_NAMESPACE::KHRShaderAtomicInt64ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderAtomicInt64SpecVersion; + + //=== VK_KHR_shader_clock === + using VULKAN_HPP_NAMESPACE::KHRShaderClockExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderClockSpecVersion; + + //=== VK_AMD_pipeline_compiler_control === + using VULKAN_HPP_NAMESPACE::AMDPipelineCompilerControlExtensionName; + using VULKAN_HPP_NAMESPACE::AMDPipelineCompilerControlSpecVersion; + + //=== VK_EXT_calibrated_timestamps === + using VULKAN_HPP_NAMESPACE::EXTCalibratedTimestampsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTCalibratedTimestampsSpecVersion; + + //=== VK_AMD_shader_core_properties === + using VULKAN_HPP_NAMESPACE::AMDShaderCorePropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderCorePropertiesSpecVersion; + + //=== VK_KHR_video_decode_h265 === + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeH265ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeH265SpecVersion; + + //=== VK_KHR_global_priority === + using VULKAN_HPP_NAMESPACE::KHRGlobalPriorityExtensionName; + using VULKAN_HPP_NAMESPACE::KHRGlobalPrioritySpecVersion; + using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeKHR; + + //=== VK_AMD_memory_overallocation_behavior === + using VULKAN_HPP_NAMESPACE::AMDMemoryOverallocationBehaviorExtensionName; + using VULKAN_HPP_NAMESPACE::AMDMemoryOverallocationBehaviorSpecVersion; + + //=== VK_EXT_vertex_attribute_divisor === + using VULKAN_HPP_NAMESPACE::EXTVertexAttributeDivisorExtensionName; + using VULKAN_HPP_NAMESPACE::EXTVertexAttributeDivisorSpecVersion; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + using VULKAN_HPP_NAMESPACE::GGPFrameTokenExtensionName; + using VULKAN_HPP_NAMESPACE::GGPFrameTokenSpecVersion; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_EXT_pipeline_creation_feedback === + using VULKAN_HPP_NAMESPACE::EXTPipelineCreationFeedbackExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelineCreationFeedbackSpecVersion; + + //=== VK_KHR_driver_properties === + using VULKAN_HPP_NAMESPACE::KHRDriverPropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDriverPropertiesSpecVersion; + using VULKAN_HPP_NAMESPACE::MaxDriverInfoSizeKHR; + using VULKAN_HPP_NAMESPACE::MaxDriverNameSizeKHR; + + //=== VK_KHR_shader_float_controls === + using VULKAN_HPP_NAMESPACE::KHRShaderFloatControlsExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderFloatControlsSpecVersion; + + //=== VK_NV_shader_subgroup_partitioned === + using VULKAN_HPP_NAMESPACE::NVShaderSubgroupPartitionedExtensionName; + using VULKAN_HPP_NAMESPACE::NVShaderSubgroupPartitionedSpecVersion; + + //=== VK_KHR_depth_stencil_resolve === + using VULKAN_HPP_NAMESPACE::KHRDepthStencilResolveExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDepthStencilResolveSpecVersion; + + //=== VK_KHR_swapchain_mutable_format === + using VULKAN_HPP_NAMESPACE::KHRSwapchainMutableFormatExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSwapchainMutableFormatSpecVersion; + + //=== VK_NV_compute_shader_derivatives === + using VULKAN_HPP_NAMESPACE::NVComputeShaderDerivativesExtensionName; + using VULKAN_HPP_NAMESPACE::NVComputeShaderDerivativesSpecVersion; + + //=== VK_NV_mesh_shader === + using VULKAN_HPP_NAMESPACE::NVMeshShaderExtensionName; + using VULKAN_HPP_NAMESPACE::NVMeshShaderSpecVersion; + + //=== VK_NV_fragment_shader_barycentric === + using VULKAN_HPP_NAMESPACE::NVFragmentShaderBarycentricExtensionName; + using VULKAN_HPP_NAMESPACE::NVFragmentShaderBarycentricSpecVersion; + + //=== VK_NV_shader_image_footprint === + using VULKAN_HPP_NAMESPACE::NVShaderImageFootprintExtensionName; + using VULKAN_HPP_NAMESPACE::NVShaderImageFootprintSpecVersion; + + //=== VK_NV_scissor_exclusive === + using VULKAN_HPP_NAMESPACE::NVScissorExclusiveExtensionName; + using VULKAN_HPP_NAMESPACE::NVScissorExclusiveSpecVersion; + + //=== VK_NV_device_diagnostic_checkpoints === + using VULKAN_HPP_NAMESPACE::NVDeviceDiagnosticCheckpointsExtensionName; + using VULKAN_HPP_NAMESPACE::NVDeviceDiagnosticCheckpointsSpecVersion; + + //=== VK_KHR_timeline_semaphore === + using VULKAN_HPP_NAMESPACE::KHRTimelineSemaphoreExtensionName; + using VULKAN_HPP_NAMESPACE::KHRTimelineSemaphoreSpecVersion; + + //=== VK_INTEL_shader_integer_functions2 === + using VULKAN_HPP_NAMESPACE::INTELShaderIntegerFunctions2ExtensionName; + using VULKAN_HPP_NAMESPACE::INTELShaderIntegerFunctions2SpecVersion; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::INTELPerformanceQueryExtensionName; + using VULKAN_HPP_NAMESPACE::INTELPerformanceQuerySpecVersion; + + //=== VK_KHR_vulkan_memory_model === + using VULKAN_HPP_NAMESPACE::KHRVulkanMemoryModelExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVulkanMemoryModelSpecVersion; + + //=== VK_EXT_pci_bus_info === + using VULKAN_HPP_NAMESPACE::EXTPciBusInfoExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPciBusInfoSpecVersion; + + //=== VK_AMD_display_native_hdr === + using VULKAN_HPP_NAMESPACE::AMDDisplayNativeHdrExtensionName; + using VULKAN_HPP_NAMESPACE::AMDDisplayNativeHdrSpecVersion; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + using VULKAN_HPP_NAMESPACE::FUCHSIAImagepipeSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::FUCHSIAImagepipeSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_KHR_shader_terminate_invocation === + using VULKAN_HPP_NAMESPACE::KHRShaderTerminateInvocationExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderTerminateInvocationSpecVersion; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + using VULKAN_HPP_NAMESPACE::EXTMetalSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMetalSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + using VULKAN_HPP_NAMESPACE::EXTFragmentDensityMapExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFragmentDensityMapSpecVersion; + + //=== VK_EXT_scalar_block_layout === + using VULKAN_HPP_NAMESPACE::EXTScalarBlockLayoutExtensionName; + using VULKAN_HPP_NAMESPACE::EXTScalarBlockLayoutSpecVersion; + + //=== VK_GOOGLE_hlsl_functionality1 === + using VULKAN_HPP_NAMESPACE::GOOGLEHlslFunctionality1ExtensionName; + using VULKAN_HPP_NAMESPACE::GOOGLEHlslFunctionality1SpecVersion; + + //=== VK_GOOGLE_decorate_string === + using VULKAN_HPP_NAMESPACE::GOOGLEDecorateStringExtensionName; + using VULKAN_HPP_NAMESPACE::GOOGLEDecorateStringSpecVersion; + + //=== VK_EXT_subgroup_size_control === + using VULKAN_HPP_NAMESPACE::EXTSubgroupSizeControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSubgroupSizeControlSpecVersion; + + //=== VK_KHR_fragment_shading_rate === + using VULKAN_HPP_NAMESPACE::KHRFragmentShadingRateExtensionName; + using VULKAN_HPP_NAMESPACE::KHRFragmentShadingRateSpecVersion; + + //=== VK_AMD_shader_core_properties2 === + using VULKAN_HPP_NAMESPACE::AMDShaderCoreProperties2ExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderCoreProperties2SpecVersion; + + //=== VK_AMD_device_coherent_memory === + using VULKAN_HPP_NAMESPACE::AMDDeviceCoherentMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::AMDDeviceCoherentMemorySpecVersion; + + //=== VK_KHR_dynamic_rendering_local_read === + using VULKAN_HPP_NAMESPACE::KHRDynamicRenderingLocalReadExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDynamicRenderingLocalReadSpecVersion; + + //=== VK_EXT_shader_image_atomic_int64 === + using VULKAN_HPP_NAMESPACE::EXTShaderImageAtomicInt64ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderImageAtomicInt64SpecVersion; + + //=== VK_KHR_shader_quad_control === + using VULKAN_HPP_NAMESPACE::KHRShaderQuadControlExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderQuadControlSpecVersion; + + //=== VK_KHR_spirv_1_4 === + using VULKAN_HPP_NAMESPACE::KHRSpirv14ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSpirv14SpecVersion; + + //=== VK_EXT_memory_budget === + using VULKAN_HPP_NAMESPACE::EXTMemoryBudgetExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMemoryBudgetSpecVersion; + + //=== VK_EXT_memory_priority === + using VULKAN_HPP_NAMESPACE::EXTMemoryPriorityExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMemoryPrioritySpecVersion; + + //=== VK_KHR_surface_protected_capabilities === + using VULKAN_HPP_NAMESPACE::KHRSurfaceProtectedCapabilitiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSurfaceProtectedCapabilitiesSpecVersion; + + //=== VK_NV_dedicated_allocation_image_aliasing === + using VULKAN_HPP_NAMESPACE::NVDedicatedAllocationImageAliasingExtensionName; + using VULKAN_HPP_NAMESPACE::NVDedicatedAllocationImageAliasingSpecVersion; + + //=== VK_KHR_separate_depth_stencil_layouts === + using VULKAN_HPP_NAMESPACE::KHRSeparateDepthStencilLayoutsExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSeparateDepthStencilLayoutsSpecVersion; + + //=== VK_EXT_buffer_device_address === + using VULKAN_HPP_NAMESPACE::EXTBufferDeviceAddressExtensionName; + using VULKAN_HPP_NAMESPACE::EXTBufferDeviceAddressSpecVersion; + + //=== VK_EXT_tooling_info === + using VULKAN_HPP_NAMESPACE::EXTToolingInfoExtensionName; + using VULKAN_HPP_NAMESPACE::EXTToolingInfoSpecVersion; + + //=== VK_EXT_separate_stencil_usage === + using VULKAN_HPP_NAMESPACE::EXTSeparateStencilUsageExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSeparateStencilUsageSpecVersion; + + //=== VK_EXT_validation_features === + using VULKAN_HPP_NAMESPACE::EXTValidationFeaturesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTValidationFeaturesSpecVersion; + + //=== VK_KHR_present_wait === + using VULKAN_HPP_NAMESPACE::KHRPresentWaitExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPresentWaitSpecVersion; + + //=== VK_NV_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::NVCooperativeMatrixExtensionName; + using VULKAN_HPP_NAMESPACE::NVCooperativeMatrixSpecVersion; + + //=== VK_NV_coverage_reduction_mode === + using VULKAN_HPP_NAMESPACE::NVCoverageReductionModeExtensionName; + using VULKAN_HPP_NAMESPACE::NVCoverageReductionModeSpecVersion; + + //=== VK_EXT_fragment_shader_interlock === + using VULKAN_HPP_NAMESPACE::EXTFragmentShaderInterlockExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFragmentShaderInterlockSpecVersion; + + //=== VK_EXT_ycbcr_image_arrays === + using VULKAN_HPP_NAMESPACE::EXTYcbcrImageArraysExtensionName; + using VULKAN_HPP_NAMESPACE::EXTYcbcrImageArraysSpecVersion; + + //=== VK_KHR_uniform_buffer_standard_layout === + using VULKAN_HPP_NAMESPACE::KHRUniformBufferStandardLayoutExtensionName; + using VULKAN_HPP_NAMESPACE::KHRUniformBufferStandardLayoutSpecVersion; + + //=== VK_EXT_provoking_vertex === + using VULKAN_HPP_NAMESPACE::EXTProvokingVertexExtensionName; + using VULKAN_HPP_NAMESPACE::EXTProvokingVertexSpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + using VULKAN_HPP_NAMESPACE::EXTFullScreenExclusiveExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFullScreenExclusiveSpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + using VULKAN_HPP_NAMESPACE::EXTHeadlessSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::EXTHeadlessSurfaceSpecVersion; + + //=== VK_KHR_buffer_device_address === + using VULKAN_HPP_NAMESPACE::KHRBufferDeviceAddressExtensionName; + using VULKAN_HPP_NAMESPACE::KHRBufferDeviceAddressSpecVersion; + + //=== VK_EXT_line_rasterization === + using VULKAN_HPP_NAMESPACE::EXTLineRasterizationExtensionName; + using VULKAN_HPP_NAMESPACE::EXTLineRasterizationSpecVersion; + + //=== VK_EXT_shader_atomic_float === + using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloatExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloatSpecVersion; + + //=== VK_EXT_host_query_reset === + using VULKAN_HPP_NAMESPACE::EXTHostQueryResetExtensionName; + using VULKAN_HPP_NAMESPACE::EXTHostQueryResetSpecVersion; + + //=== VK_EXT_index_type_uint8 === + using VULKAN_HPP_NAMESPACE::EXTIndexTypeUint8ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTIndexTypeUint8SpecVersion; + + //=== VK_EXT_extended_dynamic_state === + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicStateExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicStateSpecVersion; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_NAMESPACE::KHRDeferredHostOperationsExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDeferredHostOperationsSpecVersion; + + //=== VK_KHR_pipeline_executable_properties === + using VULKAN_HPP_NAMESPACE::KHRPipelineExecutablePropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPipelineExecutablePropertiesSpecVersion; + + //=== VK_EXT_host_image_copy === + using VULKAN_HPP_NAMESPACE::EXTHostImageCopyExtensionName; + using VULKAN_HPP_NAMESPACE::EXTHostImageCopySpecVersion; + + //=== VK_KHR_map_memory2 === + using VULKAN_HPP_NAMESPACE::KHRMapMemory2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMapMemory2SpecVersion; + + //=== VK_EXT_map_memory_placed === + using VULKAN_HPP_NAMESPACE::EXTMapMemoryPlacedExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMapMemoryPlacedSpecVersion; + + //=== VK_EXT_shader_atomic_float2 === + using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloat2ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloat2SpecVersion; + + //=== VK_EXT_surface_maintenance1 === + using VULKAN_HPP_NAMESPACE::EXTSurfaceMaintenance1ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSurfaceMaintenance1SpecVersion; + + //=== VK_EXT_swapchain_maintenance1 === + using VULKAN_HPP_NAMESPACE::EXTSwapchainMaintenance1ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSwapchainMaintenance1SpecVersion; + + //=== VK_EXT_shader_demote_to_helper_invocation === + using VULKAN_HPP_NAMESPACE::EXTShaderDemoteToHelperInvocationExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderDemoteToHelperInvocationSpecVersion; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::NVDeviceGeneratedCommandsExtensionName; + using VULKAN_HPP_NAMESPACE::NVDeviceGeneratedCommandsSpecVersion; + + //=== VK_NV_inherited_viewport_scissor === + using VULKAN_HPP_NAMESPACE::NVInheritedViewportScissorExtensionName; + using VULKAN_HPP_NAMESPACE::NVInheritedViewportScissorSpecVersion; + + //=== VK_KHR_shader_integer_dot_product === + using VULKAN_HPP_NAMESPACE::KHRShaderIntegerDotProductExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderIntegerDotProductSpecVersion; + + //=== VK_EXT_texel_buffer_alignment === + using VULKAN_HPP_NAMESPACE::EXTTexelBufferAlignmentExtensionName; + using VULKAN_HPP_NAMESPACE::EXTTexelBufferAlignmentSpecVersion; + + //=== VK_QCOM_render_pass_transform === + using VULKAN_HPP_NAMESPACE::QCOMRenderPassTransformExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMRenderPassTransformSpecVersion; + + //=== VK_EXT_depth_bias_control === + using VULKAN_HPP_NAMESPACE::EXTDepthBiasControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthBiasControlSpecVersion; + + //=== VK_EXT_device_memory_report === + using VULKAN_HPP_NAMESPACE::EXTDeviceMemoryReportExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDeviceMemoryReportSpecVersion; + + //=== VK_EXT_acquire_drm_display === + using VULKAN_HPP_NAMESPACE::EXTAcquireDrmDisplayExtensionName; + using VULKAN_HPP_NAMESPACE::EXTAcquireDrmDisplaySpecVersion; + + //=== VK_EXT_robustness2 === + using VULKAN_HPP_NAMESPACE::EXTRobustness2ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTRobustness2SpecVersion; + + //=== VK_EXT_custom_border_color === + using VULKAN_HPP_NAMESPACE::EXTCustomBorderColorExtensionName; + using VULKAN_HPP_NAMESPACE::EXTCustomBorderColorSpecVersion; + + //=== VK_GOOGLE_user_type === + using VULKAN_HPP_NAMESPACE::GOOGLEUserTypeExtensionName; + using VULKAN_HPP_NAMESPACE::GOOGLEUserTypeSpecVersion; + + //=== VK_KHR_pipeline_library === + using VULKAN_HPP_NAMESPACE::KHRPipelineLibraryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPipelineLibrarySpecVersion; + + //=== VK_NV_present_barrier === + using VULKAN_HPP_NAMESPACE::NVPresentBarrierExtensionName; + using VULKAN_HPP_NAMESPACE::NVPresentBarrierSpecVersion; + + //=== VK_KHR_shader_non_semantic_info === + using VULKAN_HPP_NAMESPACE::KHRShaderNonSemanticInfoExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderNonSemanticInfoSpecVersion; + + //=== VK_KHR_present_id === + using VULKAN_HPP_NAMESPACE::KHRPresentIdExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPresentIdSpecVersion; + + //=== VK_EXT_private_data === + using VULKAN_HPP_NAMESPACE::EXTPrivateDataExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPrivateDataSpecVersion; + + //=== VK_EXT_pipeline_creation_cache_control === + using VULKAN_HPP_NAMESPACE::EXTPipelineCreationCacheControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelineCreationCacheControlSpecVersion; + + //=== VK_KHR_video_encode_queue === + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeQueueExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeQueueSpecVersion; + + //=== VK_NV_device_diagnostics_config === + using VULKAN_HPP_NAMESPACE::NVDeviceDiagnosticsConfigExtensionName; + using VULKAN_HPP_NAMESPACE::NVDeviceDiagnosticsConfigSpecVersion; + + //=== VK_QCOM_render_pass_store_ops === + using VULKAN_HPP_NAMESPACE::QCOMRenderPassStoreOpsExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMRenderPassStoreOpsSpecVersion; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_NAMESPACE::NVCudaKernelLaunchExtensionName; + using VULKAN_HPP_NAMESPACE::NVCudaKernelLaunchSpecVersion; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + using VULKAN_HPP_NAMESPACE::QCOMTileShadingExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMTileShadingSpecVersion; + + //=== VK_NV_low_latency === + using VULKAN_HPP_NAMESPACE::NVLowLatencyExtensionName; + using VULKAN_HPP_NAMESPACE::NVLowLatencySpecVersion; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + using VULKAN_HPP_NAMESPACE::EXTMetalObjectsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMetalObjectsSpecVersion; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + using VULKAN_HPP_NAMESPACE::KHRSynchronization2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSynchronization2SpecVersion; + + //=== VK_EXT_descriptor_buffer === + using VULKAN_HPP_NAMESPACE::EXTDescriptorBufferExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDescriptorBufferSpecVersion; + + //=== VK_EXT_graphics_pipeline_library === + using VULKAN_HPP_NAMESPACE::EXTGraphicsPipelineLibraryExtensionName; + using VULKAN_HPP_NAMESPACE::EXTGraphicsPipelineLibrarySpecVersion; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + using VULKAN_HPP_NAMESPACE::AMDShaderEarlyAndLateFragmentTestsExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderEarlyAndLateFragmentTestsSpecVersion; + + //=== VK_KHR_fragment_shader_barycentric === + using VULKAN_HPP_NAMESPACE::KHRFragmentShaderBarycentricExtensionName; + using VULKAN_HPP_NAMESPACE::KHRFragmentShaderBarycentricSpecVersion; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + using VULKAN_HPP_NAMESPACE::KHRShaderSubgroupUniformControlFlowExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderSubgroupUniformControlFlowSpecVersion; + + //=== VK_KHR_zero_initialize_workgroup_memory === + using VULKAN_HPP_NAMESPACE::KHRZeroInitializeWorkgroupMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRZeroInitializeWorkgroupMemorySpecVersion; + + //=== VK_NV_fragment_shading_rate_enums === + using VULKAN_HPP_NAMESPACE::NVFragmentShadingRateEnumsExtensionName; + using VULKAN_HPP_NAMESPACE::NVFragmentShadingRateEnumsSpecVersion; + + //=== VK_NV_ray_tracing_motion_blur === + using VULKAN_HPP_NAMESPACE::NVRayTracingMotionBlurExtensionName; + using VULKAN_HPP_NAMESPACE::NVRayTracingMotionBlurSpecVersion; + + //=== VK_EXT_mesh_shader === + using VULKAN_HPP_NAMESPACE::EXTMeshShaderExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMeshShaderSpecVersion; + + //=== VK_EXT_ycbcr_2plane_444_formats === + using VULKAN_HPP_NAMESPACE::EXTYcbcr2Plane444FormatsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTYcbcr2Plane444FormatsSpecVersion; + + //=== VK_EXT_fragment_density_map2 === + using VULKAN_HPP_NAMESPACE::EXTFragmentDensityMap2ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFragmentDensityMap2SpecVersion; + + //=== VK_QCOM_rotated_copy_commands === + using VULKAN_HPP_NAMESPACE::QCOMRotatedCopyCommandsExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMRotatedCopyCommandsSpecVersion; + + //=== VK_EXT_image_robustness === + using VULKAN_HPP_NAMESPACE::EXTImageRobustnessExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageRobustnessSpecVersion; + + //=== VK_KHR_workgroup_memory_explicit_layout === + using VULKAN_HPP_NAMESPACE::KHRWorkgroupMemoryExplicitLayoutExtensionName; + using VULKAN_HPP_NAMESPACE::KHRWorkgroupMemoryExplicitLayoutSpecVersion; + + //=== VK_KHR_copy_commands2 === + using VULKAN_HPP_NAMESPACE::KHRCopyCommands2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRCopyCommands2SpecVersion; + + //=== VK_EXT_image_compression_control === + using VULKAN_HPP_NAMESPACE::EXTImageCompressionControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageCompressionControlSpecVersion; + + //=== VK_EXT_attachment_feedback_loop_layout === + using VULKAN_HPP_NAMESPACE::EXTAttachmentFeedbackLoopLayoutExtensionName; + using VULKAN_HPP_NAMESPACE::EXTAttachmentFeedbackLoopLayoutSpecVersion; + + //=== VK_EXT_4444_formats === + using VULKAN_HPP_NAMESPACE::EXT4444FormatsExtensionName; + using VULKAN_HPP_NAMESPACE::EXT4444FormatsSpecVersion; + + //=== VK_EXT_device_fault === + using VULKAN_HPP_NAMESPACE::EXTDeviceFaultExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDeviceFaultSpecVersion; + + //=== VK_ARM_rasterization_order_attachment_access === + using VULKAN_HPP_NAMESPACE::ARMRasterizationOrderAttachmentAccessExtensionName; + using VULKAN_HPP_NAMESPACE::ARMRasterizationOrderAttachmentAccessSpecVersion; + + //=== VK_EXT_rgba10x6_formats === + using VULKAN_HPP_NAMESPACE::EXTRgba10X6FormatsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTRgba10X6FormatsSpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + using VULKAN_HPP_NAMESPACE::NVAcquireWinrtDisplayExtensionName; + using VULKAN_HPP_NAMESPACE::NVAcquireWinrtDisplaySpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + using VULKAN_HPP_NAMESPACE::EXTDirectfbSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDirectfbSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_VALVE_mutable_descriptor_type === + using VULKAN_HPP_NAMESPACE::VALVEMutableDescriptorTypeExtensionName; + using VULKAN_HPP_NAMESPACE::VALVEMutableDescriptorTypeSpecVersion; + + //=== VK_EXT_vertex_input_dynamic_state === + using VULKAN_HPP_NAMESPACE::EXTVertexInputDynamicStateExtensionName; + using VULKAN_HPP_NAMESPACE::EXTVertexInputDynamicStateSpecVersion; + + //=== VK_EXT_physical_device_drm === + using VULKAN_HPP_NAMESPACE::EXTPhysicalDeviceDrmExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPhysicalDeviceDrmSpecVersion; + + //=== VK_EXT_device_address_binding_report === + using VULKAN_HPP_NAMESPACE::EXTDeviceAddressBindingReportExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDeviceAddressBindingReportSpecVersion; + + //=== VK_EXT_depth_clip_control === + using VULKAN_HPP_NAMESPACE::EXTDepthClipControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthClipControlSpecVersion; + + //=== VK_EXT_primitive_topology_list_restart === + using VULKAN_HPP_NAMESPACE::EXTPrimitiveTopologyListRestartExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPrimitiveTopologyListRestartSpecVersion; + + //=== VK_KHR_format_feature_flags2 === + using VULKAN_HPP_NAMESPACE::KHRFormatFeatureFlags2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRFormatFeatureFlags2SpecVersion; + + //=== VK_EXT_present_mode_fifo_latest_ready === + using VULKAN_HPP_NAMESPACE::EXTPresentModeFifoLatestReadyExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPresentModeFifoLatestReadySpecVersion; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + using VULKAN_HPP_NAMESPACE::FUCHSIAExternalMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::FUCHSIAExternalMemorySpecVersion; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + using VULKAN_HPP_NAMESPACE::FUCHSIAExternalSemaphoreExtensionName; + using VULKAN_HPP_NAMESPACE::FUCHSIAExternalSemaphoreSpecVersion; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::FUCHSIABufferCollectionExtensionName; + using VULKAN_HPP_NAMESPACE::FUCHSIABufferCollectionSpecVersion; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + using VULKAN_HPP_NAMESPACE::HUAWEISubpassShadingExtensionName; + using VULKAN_HPP_NAMESPACE::HUAWEISubpassShadingSpecVersion; + + //=== VK_HUAWEI_invocation_mask === + using VULKAN_HPP_NAMESPACE::HUAWEIInvocationMaskExtensionName; + using VULKAN_HPP_NAMESPACE::HUAWEIInvocationMaskSpecVersion; + + //=== VK_NV_external_memory_rdma === + using VULKAN_HPP_NAMESPACE::NVExternalMemoryRdmaExtensionName; + using VULKAN_HPP_NAMESPACE::NVExternalMemoryRdmaSpecVersion; + + //=== VK_EXT_pipeline_properties === + using VULKAN_HPP_NAMESPACE::EXTPipelinePropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelinePropertiesSpecVersion; + + //=== VK_EXT_frame_boundary === + using VULKAN_HPP_NAMESPACE::EXTFrameBoundaryExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFrameBoundarySpecVersion; + + //=== VK_EXT_multisampled_render_to_single_sampled === + using VULKAN_HPP_NAMESPACE::EXTMultisampledRenderToSingleSampledExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMultisampledRenderToSingleSampledSpecVersion; + + //=== VK_EXT_extended_dynamic_state2 === + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicState2ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicState2SpecVersion; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + using VULKAN_HPP_NAMESPACE::QNXScreenSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::QNXScreenSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + using VULKAN_HPP_NAMESPACE::EXTColorWriteEnableExtensionName; + using VULKAN_HPP_NAMESPACE::EXTColorWriteEnableSpecVersion; + + //=== VK_EXT_primitives_generated_query === + using VULKAN_HPP_NAMESPACE::EXTPrimitivesGeneratedQueryExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPrimitivesGeneratedQuerySpecVersion; + + //=== VK_KHR_ray_tracing_maintenance1 === + using VULKAN_HPP_NAMESPACE::KHRRayTracingMaintenance1ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRRayTracingMaintenance1SpecVersion; + + //=== VK_EXT_global_priority_query === + using VULKAN_HPP_NAMESPACE::EXTGlobalPriorityQueryExtensionName; + using VULKAN_HPP_NAMESPACE::EXTGlobalPriorityQuerySpecVersion; + using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeEXT; + + //=== VK_EXT_image_view_min_lod === + using VULKAN_HPP_NAMESPACE::EXTImageViewMinLodExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageViewMinLodSpecVersion; + + //=== VK_EXT_multi_draw === + using VULKAN_HPP_NAMESPACE::EXTMultiDrawExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMultiDrawSpecVersion; + + //=== VK_EXT_image_2d_view_of_3d === + using VULKAN_HPP_NAMESPACE::EXTImage2DViewOf3DExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImage2DViewOf3DSpecVersion; + + //=== VK_KHR_portability_enumeration === + using VULKAN_HPP_NAMESPACE::KHRPortabilityEnumerationExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPortabilityEnumerationSpecVersion; + + //=== VK_EXT_shader_tile_image === + using VULKAN_HPP_NAMESPACE::EXTShaderTileImageExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderTileImageSpecVersion; + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::EXTOpacityMicromapExtensionName; + using VULKAN_HPP_NAMESPACE::EXTOpacityMicromapSpecVersion; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + using VULKAN_HPP_NAMESPACE::NVDisplacementMicromapExtensionName; + using VULKAN_HPP_NAMESPACE::NVDisplacementMicromapSpecVersion; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_load_store_op_none === + using VULKAN_HPP_NAMESPACE::EXTLoadStoreOpNoneExtensionName; + using VULKAN_HPP_NAMESPACE::EXTLoadStoreOpNoneSpecVersion; + + //=== VK_HUAWEI_cluster_culling_shader === + using VULKAN_HPP_NAMESPACE::HUAWEIClusterCullingShaderExtensionName; + using VULKAN_HPP_NAMESPACE::HUAWEIClusterCullingShaderSpecVersion; + + //=== VK_EXT_border_color_swizzle === + using VULKAN_HPP_NAMESPACE::EXTBorderColorSwizzleExtensionName; + using VULKAN_HPP_NAMESPACE::EXTBorderColorSwizzleSpecVersion; + + //=== VK_EXT_pageable_device_local_memory === + using VULKAN_HPP_NAMESPACE::EXTPageableDeviceLocalMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPageableDeviceLocalMemorySpecVersion; + + //=== VK_KHR_maintenance4 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance4ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance4SpecVersion; + + //=== VK_ARM_shader_core_properties === + using VULKAN_HPP_NAMESPACE::ARMShaderCorePropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::ARMShaderCorePropertiesSpecVersion; + + //=== VK_KHR_shader_subgroup_rotate === + using VULKAN_HPP_NAMESPACE::KHRShaderSubgroupRotateExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderSubgroupRotateSpecVersion; + + //=== VK_ARM_scheduling_controls === + using VULKAN_HPP_NAMESPACE::ARMSchedulingControlsExtensionName; + using VULKAN_HPP_NAMESPACE::ARMSchedulingControlsSpecVersion; + + //=== VK_EXT_image_sliced_view_of_3d === + using VULKAN_HPP_NAMESPACE::EXTImageSlicedViewOf3DExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageSlicedViewOf3DSpecVersion; + using VULKAN_HPP_NAMESPACE::Remaining3DSlicesEXT; + + //=== VK_VALVE_descriptor_set_host_mapping === + using VULKAN_HPP_NAMESPACE::VALVEDescriptorSetHostMappingExtensionName; + using VULKAN_HPP_NAMESPACE::VALVEDescriptorSetHostMappingSpecVersion; + + //=== VK_EXT_depth_clamp_zero_one === + using VULKAN_HPP_NAMESPACE::EXTDepthClampZeroOneExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthClampZeroOneSpecVersion; + + //=== VK_EXT_non_seamless_cube_map === + using VULKAN_HPP_NAMESPACE::EXTNonSeamlessCubeMapExtensionName; + using VULKAN_HPP_NAMESPACE::EXTNonSeamlessCubeMapSpecVersion; + + //=== VK_ARM_render_pass_striped === + using VULKAN_HPP_NAMESPACE::ARMRenderPassStripedExtensionName; + using VULKAN_HPP_NAMESPACE::ARMRenderPassStripedSpecVersion; + + //=== VK_QCOM_fragment_density_map_offset === + using VULKAN_HPP_NAMESPACE::QCOMFragmentDensityMapOffsetExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMFragmentDensityMapOffsetSpecVersion; + + //=== VK_NV_copy_memory_indirect === + using VULKAN_HPP_NAMESPACE::NVCopyMemoryIndirectExtensionName; + using VULKAN_HPP_NAMESPACE::NVCopyMemoryIndirectSpecVersion; + + //=== VK_NV_memory_decompression === + using VULKAN_HPP_NAMESPACE::NVMemoryDecompressionExtensionName; + using VULKAN_HPP_NAMESPACE::NVMemoryDecompressionSpecVersion; + + //=== VK_NV_device_generated_commands_compute === + using VULKAN_HPP_NAMESPACE::NVDeviceGeneratedCommandsComputeExtensionName; + using VULKAN_HPP_NAMESPACE::NVDeviceGeneratedCommandsComputeSpecVersion; + + //=== VK_NV_ray_tracing_linear_swept_spheres === + using VULKAN_HPP_NAMESPACE::NVRayTracingLinearSweptSpheresExtensionName; + using VULKAN_HPP_NAMESPACE::NVRayTracingLinearSweptSpheresSpecVersion; + + //=== VK_NV_linear_color_attachment === + using VULKAN_HPP_NAMESPACE::NVLinearColorAttachmentExtensionName; + using VULKAN_HPP_NAMESPACE::NVLinearColorAttachmentSpecVersion; + + //=== VK_GOOGLE_surfaceless_query === + using VULKAN_HPP_NAMESPACE::GOOGLESurfacelessQueryExtensionName; + using VULKAN_HPP_NAMESPACE::GOOGLESurfacelessQuerySpecVersion; + + //=== VK_KHR_shader_maximal_reconvergence === + using VULKAN_HPP_NAMESPACE::KHRShaderMaximalReconvergenceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderMaximalReconvergenceSpecVersion; + + //=== VK_EXT_image_compression_control_swapchain === + using VULKAN_HPP_NAMESPACE::EXTImageCompressionControlSwapchainExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageCompressionControlSwapchainSpecVersion; + + //=== VK_QCOM_image_processing === + using VULKAN_HPP_NAMESPACE::QCOMImageProcessingExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMImageProcessingSpecVersion; + + //=== VK_EXT_nested_command_buffer === + using VULKAN_HPP_NAMESPACE::EXTNestedCommandBufferExtensionName; + using VULKAN_HPP_NAMESPACE::EXTNestedCommandBufferSpecVersion; + + //=== VK_EXT_external_memory_acquire_unmodified === + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryAcquireUnmodifiedExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryAcquireUnmodifiedSpecVersion; + + //=== VK_EXT_extended_dynamic_state3 === + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicState3ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicState3SpecVersion; + + //=== VK_EXT_subpass_merge_feedback === + using VULKAN_HPP_NAMESPACE::EXTSubpassMergeFeedbackExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSubpassMergeFeedbackSpecVersion; + + //=== VK_LUNARG_direct_driver_loading === + using VULKAN_HPP_NAMESPACE::LUNARGDirectDriverLoadingExtensionName; + using VULKAN_HPP_NAMESPACE::LUNARGDirectDriverLoadingSpecVersion; + + //=== VK_EXT_shader_module_identifier === + using VULKAN_HPP_NAMESPACE::EXTShaderModuleIdentifierExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderModuleIdentifierSpecVersion; + using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeEXT; + + //=== VK_EXT_rasterization_order_attachment_access === + using VULKAN_HPP_NAMESPACE::EXTRasterizationOrderAttachmentAccessExtensionName; + using VULKAN_HPP_NAMESPACE::EXTRasterizationOrderAttachmentAccessSpecVersion; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::NVOpticalFlowExtensionName; + using VULKAN_HPP_NAMESPACE::NVOpticalFlowSpecVersion; + + //=== VK_EXT_legacy_dithering === + using VULKAN_HPP_NAMESPACE::EXTLegacyDitheringExtensionName; + using VULKAN_HPP_NAMESPACE::EXTLegacyDitheringSpecVersion; + + //=== VK_EXT_pipeline_protected_access === + using VULKAN_HPP_NAMESPACE::EXTPipelineProtectedAccessExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelineProtectedAccessSpecVersion; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + using VULKAN_HPP_NAMESPACE::ANDROIDExternalFormatResolveExtensionName; + using VULKAN_HPP_NAMESPACE::ANDROIDExternalFormatResolveSpecVersion; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_maintenance5 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance5ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance5SpecVersion; + + //=== VK_AMD_anti_lag === + using VULKAN_HPP_NAMESPACE::AMDAntiLagExtensionName; + using VULKAN_HPP_NAMESPACE::AMDAntiLagSpecVersion; + + //=== VK_KHR_ray_tracing_position_fetch === + using VULKAN_HPP_NAMESPACE::KHRRayTracingPositionFetchExtensionName; + using VULKAN_HPP_NAMESPACE::KHRRayTracingPositionFetchSpecVersion; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::EXTShaderObjectExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderObjectSpecVersion; + + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_NAMESPACE::KHRPipelineBinaryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPipelineBinarySpecVersion; + using VULKAN_HPP_NAMESPACE::MaxPipelineBinaryKeySizeKHR; + + //=== VK_QCOM_tile_properties === + using VULKAN_HPP_NAMESPACE::QCOMTilePropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMTilePropertiesSpecVersion; + + //=== VK_SEC_amigo_profiling === + using VULKAN_HPP_NAMESPACE::SECAmigoProfilingExtensionName; + using VULKAN_HPP_NAMESPACE::SECAmigoProfilingSpecVersion; + + //=== VK_QCOM_multiview_per_view_viewports === + using VULKAN_HPP_NAMESPACE::QCOMMultiviewPerViewViewportsExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMMultiviewPerViewViewportsSpecVersion; + + //=== VK_NV_ray_tracing_invocation_reorder === + using VULKAN_HPP_NAMESPACE::NVRayTracingInvocationReorderExtensionName; + using VULKAN_HPP_NAMESPACE::NVRayTracingInvocationReorderSpecVersion; + + //=== VK_NV_cooperative_vector === + using VULKAN_HPP_NAMESPACE::NVCooperativeVectorExtensionName; + using VULKAN_HPP_NAMESPACE::NVCooperativeVectorSpecVersion; + + //=== VK_NV_extended_sparse_address_space === + using VULKAN_HPP_NAMESPACE::NVExtendedSparseAddressSpaceExtensionName; + using VULKAN_HPP_NAMESPACE::NVExtendedSparseAddressSpaceSpecVersion; + + //=== VK_EXT_mutable_descriptor_type === + using VULKAN_HPP_NAMESPACE::EXTMutableDescriptorTypeExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMutableDescriptorTypeSpecVersion; + + //=== VK_EXT_legacy_vertex_attributes === + using VULKAN_HPP_NAMESPACE::EXTLegacyVertexAttributesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTLegacyVertexAttributesSpecVersion; + + //=== VK_EXT_layer_settings === + using VULKAN_HPP_NAMESPACE::EXTLayerSettingsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTLayerSettingsSpecVersion; + + //=== VK_ARM_shader_core_builtins === + using VULKAN_HPP_NAMESPACE::ARMShaderCoreBuiltinsExtensionName; + using VULKAN_HPP_NAMESPACE::ARMShaderCoreBuiltinsSpecVersion; + + //=== VK_EXT_pipeline_library_group_handles === + using VULKAN_HPP_NAMESPACE::EXTPipelineLibraryGroupHandlesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelineLibraryGroupHandlesSpecVersion; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + using VULKAN_HPP_NAMESPACE::EXTDynamicRenderingUnusedAttachmentsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDynamicRenderingUnusedAttachmentsSpecVersion; + + //=== VK_NV_low_latency2 === + using VULKAN_HPP_NAMESPACE::NVLowLatency2ExtensionName; + using VULKAN_HPP_NAMESPACE::NVLowLatency2SpecVersion; + + //=== VK_KHR_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::KHRCooperativeMatrixExtensionName; + using VULKAN_HPP_NAMESPACE::KHRCooperativeMatrixSpecVersion; + + //=== VK_QCOM_multiview_per_view_render_areas === + using VULKAN_HPP_NAMESPACE::QCOMMultiviewPerViewRenderAreasExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMMultiviewPerViewRenderAreasSpecVersion; + + //=== VK_KHR_compute_shader_derivatives === + using VULKAN_HPP_NAMESPACE::KHRComputeShaderDerivativesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRComputeShaderDerivativesSpecVersion; + + //=== VK_KHR_video_decode_av1 === + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeAv1ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeAv1SpecVersion; + using VULKAN_HPP_NAMESPACE::MaxVideoAv1ReferencesPerFrameKHR; + + //=== VK_KHR_video_encode_av1 === + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeAv1ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeAv1SpecVersion; + + //=== VK_KHR_video_maintenance1 === + using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance1ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance1SpecVersion; + + //=== VK_NV_per_stage_descriptor_set === + using VULKAN_HPP_NAMESPACE::NVPerStageDescriptorSetExtensionName; + using VULKAN_HPP_NAMESPACE::NVPerStageDescriptorSetSpecVersion; + + //=== VK_QCOM_image_processing2 === + using VULKAN_HPP_NAMESPACE::QCOMImageProcessing2ExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMImageProcessing2SpecVersion; + + //=== VK_QCOM_filter_cubic_weights === + using VULKAN_HPP_NAMESPACE::QCOMFilterCubicWeightsExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMFilterCubicWeightsSpecVersion; + + //=== VK_QCOM_ycbcr_degamma === + using VULKAN_HPP_NAMESPACE::QCOMYcbcrDegammaExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMYcbcrDegammaSpecVersion; + + //=== VK_QCOM_filter_cubic_clamp === + using VULKAN_HPP_NAMESPACE::QCOMFilterCubicClampExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMFilterCubicClampSpecVersion; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + using VULKAN_HPP_NAMESPACE::EXTAttachmentFeedbackLoopDynamicStateExtensionName; + using VULKAN_HPP_NAMESPACE::EXTAttachmentFeedbackLoopDynamicStateSpecVersion; + + //=== VK_KHR_vertex_attribute_divisor === + using VULKAN_HPP_NAMESPACE::KHRVertexAttributeDivisorExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVertexAttributeDivisorSpecVersion; + + //=== VK_KHR_load_store_op_none === + using VULKAN_HPP_NAMESPACE::KHRLoadStoreOpNoneExtensionName; + using VULKAN_HPP_NAMESPACE::KHRLoadStoreOpNoneSpecVersion; + + //=== VK_KHR_shader_float_controls2 === + using VULKAN_HPP_NAMESPACE::KHRShaderFloatControls2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderFloatControls2SpecVersion; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + using VULKAN_HPP_NAMESPACE::QNXExternalMemoryScreenBufferExtensionName; + using VULKAN_HPP_NAMESPACE::QNXExternalMemoryScreenBufferSpecVersion; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + using VULKAN_HPP_NAMESPACE::MSFTLayeredDriverExtensionName; + using VULKAN_HPP_NAMESPACE::MSFTLayeredDriverSpecVersion; + + //=== VK_KHR_index_type_uint8 === + using VULKAN_HPP_NAMESPACE::KHRIndexTypeUint8ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRIndexTypeUint8SpecVersion; + + //=== VK_KHR_line_rasterization === + using VULKAN_HPP_NAMESPACE::KHRLineRasterizationExtensionName; + using VULKAN_HPP_NAMESPACE::KHRLineRasterizationSpecVersion; + + //=== VK_KHR_calibrated_timestamps === + using VULKAN_HPP_NAMESPACE::KHRCalibratedTimestampsExtensionName; + using VULKAN_HPP_NAMESPACE::KHRCalibratedTimestampsSpecVersion; + + //=== VK_KHR_shader_expect_assume === + using VULKAN_HPP_NAMESPACE::KHRShaderExpectAssumeExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderExpectAssumeSpecVersion; + + //=== VK_KHR_maintenance6 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance6ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance6SpecVersion; + + //=== VK_NV_descriptor_pool_overallocation === + using VULKAN_HPP_NAMESPACE::NVDescriptorPoolOverallocationExtensionName; + using VULKAN_HPP_NAMESPACE::NVDescriptorPoolOverallocationSpecVersion; + + //=== VK_QCOM_tile_memory_heap === + using VULKAN_HPP_NAMESPACE::QCOMTileMemoryHeapExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMTileMemoryHeapSpecVersion; + + //=== VK_NV_display_stereo === + using VULKAN_HPP_NAMESPACE::NVDisplayStereoExtensionName; + using VULKAN_HPP_NAMESPACE::NVDisplayStereoSpecVersion; + + //=== VK_KHR_video_encode_quantization_map === + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeQuantizationMapExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeQuantizationMapSpecVersion; + + //=== VK_NV_raw_access_chains === + using VULKAN_HPP_NAMESPACE::NVRawAccessChainsExtensionName; + using VULKAN_HPP_NAMESPACE::NVRawAccessChainsSpecVersion; + + //=== VK_NV_external_compute_queue === + using VULKAN_HPP_NAMESPACE::NVExternalComputeQueueExtensionName; + using VULKAN_HPP_NAMESPACE::NVExternalComputeQueueSpecVersion; + + //=== VK_KHR_shader_relaxed_extended_instruction === + using VULKAN_HPP_NAMESPACE::KHRShaderRelaxedExtendedInstructionExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderRelaxedExtendedInstructionSpecVersion; + + //=== VK_NV_command_buffer_inheritance === + using VULKAN_HPP_NAMESPACE::NVCommandBufferInheritanceExtensionName; + using VULKAN_HPP_NAMESPACE::NVCommandBufferInheritanceSpecVersion; + + //=== VK_KHR_maintenance7 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance7ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance7SpecVersion; + + //=== VK_NV_shader_atomic_float16_vector === + using VULKAN_HPP_NAMESPACE::NVShaderAtomicFloat16VectorExtensionName; + using VULKAN_HPP_NAMESPACE::NVShaderAtomicFloat16VectorSpecVersion; + + //=== VK_EXT_shader_replicated_composites === + using VULKAN_HPP_NAMESPACE::EXTShaderReplicatedCompositesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderReplicatedCompositesSpecVersion; + + //=== VK_NV_ray_tracing_validation === + using VULKAN_HPP_NAMESPACE::NVRayTracingValidationExtensionName; + using VULKAN_HPP_NAMESPACE::NVRayTracingValidationSpecVersion; + + //=== VK_NV_cluster_acceleration_structure === + using VULKAN_HPP_NAMESPACE::NVClusterAccelerationStructureExtensionName; + using VULKAN_HPP_NAMESPACE::NVClusterAccelerationStructureSpecVersion; + + //=== VK_NV_partitioned_acceleration_structure === + using VULKAN_HPP_NAMESPACE::NVPartitionedAccelerationStructureExtensionName; + using VULKAN_HPP_NAMESPACE::NVPartitionedAccelerationStructureSpecVersion; + using VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructurePartitionIndexGlobalNV; + + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::EXTDeviceGeneratedCommandsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDeviceGeneratedCommandsSpecVersion; + + //=== VK_KHR_maintenance8 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance8ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance8SpecVersion; + + //=== VK_MESA_image_alignment_control === + using VULKAN_HPP_NAMESPACE::MESAImageAlignmentControlExtensionName; + using VULKAN_HPP_NAMESPACE::MESAImageAlignmentControlSpecVersion; + + //=== VK_EXT_depth_clamp_control === + using VULKAN_HPP_NAMESPACE::EXTDepthClampControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthClampControlSpecVersion; + + //=== VK_KHR_video_maintenance2 === + using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance2SpecVersion; + + //=== VK_HUAWEI_hdr_vivid === + using VULKAN_HPP_NAMESPACE::HUAWEIHdrVividExtensionName; + using VULKAN_HPP_NAMESPACE::HUAWEIHdrVividSpecVersion; + + //=== VK_NV_cooperative_matrix2 === + using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2ExtensionName; + using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2SpecVersion; + + //=== VK_ARM_pipeline_opacity_micromap === + using VULKAN_HPP_NAMESPACE::ARMPipelineOpacityMicromapExtensionName; + using VULKAN_HPP_NAMESPACE::ARMPipelineOpacityMicromapSpecVersion; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryMetalExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryMetalSpecVersion; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_depth_clamp_zero_one === + using VULKAN_HPP_NAMESPACE::KHRDepthClampZeroOneExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDepthClampZeroOneSpecVersion; + + //=== VK_EXT_vertex_attribute_robustness === + using VULKAN_HPP_NAMESPACE::EXTVertexAttributeRobustnessExtensionName; + using VULKAN_HPP_NAMESPACE::EXTVertexAttributeRobustnessSpecVersion; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_present_metering === + using VULKAN_HPP_NAMESPACE::NVPresentMeteringExtensionName; + using VULKAN_HPP_NAMESPACE::NVPresentMeteringSpecVersion; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_fragment_density_map_offset === + using VULKAN_HPP_NAMESPACE::EXTFragmentDensityMapOffsetExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFragmentDensityMapOffsetSpecVersion; + + //======================== + //=== CONSTEXPR VALUEs === + //======================== + using VULKAN_HPP_NAMESPACE::HeaderVersion; + using VULKAN_HPP_NAMESPACE::Use64BitPtrDefines; + + //========================= + //=== CONSTEXPR CALLEEs === + //========================= + using VULKAN_HPP_NAMESPACE::apiVersionMajor; + using VULKAN_HPP_NAMESPACE::apiVersionMinor; + using VULKAN_HPP_NAMESPACE::apiVersionPatch; + using VULKAN_HPP_NAMESPACE::apiVersionVariant; + using VULKAN_HPP_NAMESPACE::makeApiVersion; + using VULKAN_HPP_NAMESPACE::makeVersion; + using VULKAN_HPP_NAMESPACE::versionMajor; + using VULKAN_HPP_NAMESPACE::versionMinor; + using VULKAN_HPP_NAMESPACE::versionPatch; + + //========================== + //=== CONSTEXPR CALLERs === + //========================== + using VULKAN_HPP_NAMESPACE::ApiVersion; + using VULKAN_HPP_NAMESPACE::ApiVersion10; + using VULKAN_HPP_NAMESPACE::ApiVersion11; + using VULKAN_HPP_NAMESPACE::ApiVersion12; + using VULKAN_HPP_NAMESPACE::ApiVersion13; + using VULKAN_HPP_NAMESPACE::ApiVersion14; + using VULKAN_HPP_NAMESPACE::HeaderVersionComplete; + + //==================== + //=== FUNCPOINTERs === + //==================== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::PFN_AllocationFunction; + using VULKAN_HPP_NAMESPACE::PFN_FreeFunction; + using VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification; + using VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification; + using VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction; + using VULKAN_HPP_NAMESPACE::PFN_VoidFunction; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT; + + //=== VK_EXT_device_memory_report === + using VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT; + + //=== VK_LUNARG_direct_driver_loading === + using VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG; + + //=============== + //=== STRUCTs === + //=============== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::AllocationCallbacks; + using VULKAN_HPP_NAMESPACE::ApplicationInfo; + using VULKAN_HPP_NAMESPACE::AttachmentDescription; + using VULKAN_HPP_NAMESPACE::AttachmentReference; + using VULKAN_HPP_NAMESPACE::BaseInStructure; + using VULKAN_HPP_NAMESPACE::BaseOutStructure; + using VULKAN_HPP_NAMESPACE::BindSparseInfo; + using VULKAN_HPP_NAMESPACE::BufferCopy; + using VULKAN_HPP_NAMESPACE::BufferCreateInfo; + using VULKAN_HPP_NAMESPACE::BufferImageCopy; + using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier; + using VULKAN_HPP_NAMESPACE::BufferViewCreateInfo; + using VULKAN_HPP_NAMESPACE::ClearAttachment; + using VULKAN_HPP_NAMESPACE::ClearColorValue; + using VULKAN_HPP_NAMESPACE::ClearDepthStencilValue; + using VULKAN_HPP_NAMESPACE::ClearRect; + using VULKAN_HPP_NAMESPACE::ClearValue; + using VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo; + using VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo; + using VULKAN_HPP_NAMESPACE::ComponentMapping; + using VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo; + using VULKAN_HPP_NAMESPACE::CopyDescriptorSet; + using VULKAN_HPP_NAMESPACE::DescriptorBufferInfo; + using VULKAN_HPP_NAMESPACE::DescriptorImageInfo; + using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorPoolSize; + using VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo; + using VULKAN_HPP_NAMESPACE::DispatchIndirectCommand; + using VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand; + using VULKAN_HPP_NAMESPACE::DrawIndirectCommand; + using VULKAN_HPP_NAMESPACE::EventCreateInfo; + using VULKAN_HPP_NAMESPACE::ExtensionProperties; + using VULKAN_HPP_NAMESPACE::Extent2D; + using VULKAN_HPP_NAMESPACE::Extent3D; + using VULKAN_HPP_NAMESPACE::FenceCreateInfo; + using VULKAN_HPP_NAMESPACE::FormatProperties; + using VULKAN_HPP_NAMESPACE::FramebufferCreateInfo; + using VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageBlit; + using VULKAN_HPP_NAMESPACE::ImageCopy; + using VULKAN_HPP_NAMESPACE::ImageCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageFormatProperties; + using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier; + using VULKAN_HPP_NAMESPACE::ImageResolve; + using VULKAN_HPP_NAMESPACE::ImageSubresource; + using VULKAN_HPP_NAMESPACE::ImageSubresourceLayers; + using VULKAN_HPP_NAMESPACE::ImageSubresourceRange; + using VULKAN_HPP_NAMESPACE::ImageViewCreateInfo; + using VULKAN_HPP_NAMESPACE::InstanceCreateInfo; + using VULKAN_HPP_NAMESPACE::LayerProperties; + using VULKAN_HPP_NAMESPACE::MappedMemoryRange; + using VULKAN_HPP_NAMESPACE::MemoryAllocateInfo; + using VULKAN_HPP_NAMESPACE::MemoryBarrier; + using VULKAN_HPP_NAMESPACE::MemoryHeap; + using VULKAN_HPP_NAMESPACE::MemoryRequirements; + using VULKAN_HPP_NAMESPACE::MemoryType; + using VULKAN_HPP_NAMESPACE::Offset2D; + using VULKAN_HPP_NAMESPACE::Offset3D; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties; + using VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PushConstantRange; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo; + using VULKAN_HPP_NAMESPACE::QueueFamilyProperties; + using VULKAN_HPP_NAMESPACE::Rect2D; + using VULKAN_HPP_NAMESPACE::RenderPassBeginInfo; + using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo; + using VULKAN_HPP_NAMESPACE::SamplerCreateInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo; + using VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo; + using VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo; + using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryBind; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements; + using VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo; + using VULKAN_HPP_NAMESPACE::SparseMemoryBind; + using VULKAN_HPP_NAMESPACE::SpecializationInfo; + using VULKAN_HPP_NAMESPACE::SpecializationMapEntry; + using VULKAN_HPP_NAMESPACE::StencilOpState; + using VULKAN_HPP_NAMESPACE::SubmitInfo; + using VULKAN_HPP_NAMESPACE::SubpassDependency; + using VULKAN_HPP_NAMESPACE::SubpassDescription; + using VULKAN_HPP_NAMESPACE::SubresourceLayout; + using VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDescription; + using VULKAN_HPP_NAMESPACE::Viewport; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSet; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo; + using VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfoKHR; + using VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo; + using VULKAN_HPP_NAMESPACE::BindBufferMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo; + using VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImageMemoryInfo; + using VULKAN_HPP_NAMESPACE::BindImageMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo; + using VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2; + using VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2KHR; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupportKHR; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntryKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceQueueInfo2; + using VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo; + using VULKAN_HPP_NAMESPACE::ExportFenceCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo; + using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo; + using VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExternalBufferProperties; + using VULKAN_HPP_NAMESPACE::ExternalBufferPropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceProperties; + using VULKAN_HPP_NAMESPACE::ExternalFencePropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties; + using VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo; + using VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo; + using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryProperties; + using VULKAN_HPP_NAMESPACE::ExternalMemoryPropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties; + using VULKAN_HPP_NAMESPACE::ExternalSemaphorePropertiesKHR; + using VULKAN_HPP_NAMESPACE::FormatProperties2; + using VULKAN_HPP_NAMESPACE::FormatProperties2KHR; + using VULKAN_HPP_NAMESPACE::ImageFormatProperties2; + using VULKAN_HPP_NAMESPACE::ImageFormatProperties2KHR; + using VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2; + using VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2KHR; + using VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo; + using VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2; + using VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2KHR; + using VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference; + using VULKAN_HPP_NAMESPACE::InputAttachmentAspectReferenceKHR; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirementsKHR; + using VULKAN_HPP_NAMESPACE::MemoryRequirements2; + using VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIDPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParameterFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointerFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointerFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo; + using VULKAN_HPP_NAMESPACE::QueueFamilyProperties2; + using VULKAN_HPP_NAMESPACE::QueueFamilyProperties2KHR; + using VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo; + using VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo; + using VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatPropertiesKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfoKHR; + using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2; + using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2KHR; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2KHR; + + //=== VK_VERSION_1_2 === + using VULKAN_HPP_NAMESPACE::AttachmentDescription2; + using VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayoutKHR; + using VULKAN_HPP_NAMESPACE::AttachmentReference2; + using VULKAN_HPP_NAMESPACE::AttachmentReference2KHR; + using VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout; + using VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayoutKHR; + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo; + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoEXT; + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR; + using VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo; + using VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ConformanceVersion; + using VULKAN_HPP_NAMESPACE::ConformanceVersionKHR; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupportEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo; + using VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfoKHR; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo; + using VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolvePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloat16Int8FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphorePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeaturesKHR; + using VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo; + using VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2; + using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR; + using VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo; + using VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreSignalInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitInfoKHR; + using VULKAN_HPP_NAMESPACE::SubpassBeginInfo; + using VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::SubpassDependency2; + using VULKAN_HPP_NAMESPACE::SubpassDependency2KHR; + using VULKAN_HPP_NAMESPACE::SubpassDescription2; + using VULKAN_HPP_NAMESPACE::SubpassDescription2KHR; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolveKHR; + using VULKAN_HPP_NAMESPACE::SubpassEndInfo; + using VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR; + using VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo; + using VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfoKHR; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::BlitImageInfo2; + using VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::BufferCopy2; + using VULKAN_HPP_NAMESPACE::BufferCopy2KHR; + using VULKAN_HPP_NAMESPACE::BufferImageCopy2; + using VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR; + using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2; + using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR; + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfoKHR; + using VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyBufferInfo2; + using VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR; + using VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2; + using VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::CopyImageInfo2; + using VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2; + using VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR; + using VULKAN_HPP_NAMESPACE::DependencyInfo; + using VULKAN_HPP_NAMESPACE::DependencyInfoKHR; + using VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements; + using VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirementsKHR; + using VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements; + using VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirementsKHR; + using VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo; + using VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::FormatProperties3; + using VULKAN_HPP_NAMESPACE::FormatProperties3KHR; + using VULKAN_HPP_NAMESPACE::ImageBlit2; + using VULKAN_HPP_NAMESPACE::ImageBlit2KHR; + using VULKAN_HPP_NAMESPACE::ImageCopy2; + using VULKAN_HPP_NAMESPACE::ImageCopy2KHR; + using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2; + using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR; + using VULKAN_HPP_NAMESPACE::ImageResolve2; + using VULKAN_HPP_NAMESPACE::ImageResolve2KHR; + using VULKAN_HPP_NAMESPACE::MemoryBarrier2; + using VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedback; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT; + using VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo; + using VULKAN_HPP_NAMESPACE::RenderingAttachmentInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingInfo; + using VULKAN_HPP_NAMESPACE::RenderingInfoKHR; + using VULKAN_HPP_NAMESPACE::ResolveImageInfo2; + using VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::ShaderRequiredSubgroupSizeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::SubmitInfo2; + using VULKAN_HPP_NAMESPACE::SubmitInfo2KHR; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT; + + //=== VK_VERSION_1_4 === + using VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo; + using VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR; + using VULKAN_HPP_NAMESPACE::BindMemoryStatus; + using VULKAN_HPP_NAMESPACE::BindMemoryStatusKHR; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfo; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyImageToImageInfo; + using VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo; + using VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo; + using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo; + using VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQuery; + using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT; + using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo; + using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageSubresource2; + using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT; + using VULKAN_HPP_NAMESPACE::ImageSubresource2KHR; + using VULKAN_HPP_NAMESPACE::ImageToMemoryCopy; + using VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT; + using VULKAN_HPP_NAMESPACE::MemoryMapInfo; + using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryToImageCopy; + using VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT; + using VULKAN_HPP_NAMESPACE::MemoryUnmapInfo; + using VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Properties; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PushConstantsInfo; + using VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR; + using VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo; + using VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR; + using VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo; + using VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR; + using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityProperties; + using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesEXT; + using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR; + using VULKAN_HPP_NAMESPACE::RenderingAreaInfo; + using VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo; + using VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo; + using VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR; + using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySize; + using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::SurfaceFormatKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PresentInfoKHR; + using VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR; + using VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR; + using VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR; + + //=== VK_KHR_display_swapchain === + using VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT; + + //=== VK_AMD_rasterization_order === + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD; + + //=== VK_EXT_debug_marker === + using VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR; + using VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR; + using VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR; + + //=== VK_KHR_video_decode_queue === + using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR; + + //=== VK_NV_dedicated_allocation === + using VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV; + using VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV; + using VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV; + + //=== VK_EXT_transform_feedback === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX; + using VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX; + using VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX; + using VULKAN_HPP_NAMESPACE::CuModuleTexturingModeCreateInfoNVX; + + //=== VK_NVX_image_view_handle === + using VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX; + using VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX; + + //=== VK_KHR_video_encode_h264 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoKHR; + + //=== VK_KHR_video_encode_h265 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoKHR; + + //=== VK_KHR_video_decode_h264 === + using VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR; + + //=== VK_AMD_texture_gather_bias_lod === + using VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD; + + //=== VK_AMD_shader_info === + using VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD; + using VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV; + + //=== VK_NV_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV; + + //=== VK_NV_external_memory === + using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + using VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV; + using VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + using VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_validation_flags === + using VULKAN_HPP_NAMESPACE::ValidationFlagsEXT; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + using VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_astc_decode_mode === + using VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + using VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + using VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR; + using VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + using VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + using VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + using VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR; + + //=== VK_EXT_conditional_rendering === + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT; + using VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT; + + //=== VK_KHR_incremental_present === + using VULKAN_HPP_NAMESPACE::PresentRegionKHR; + using VULKAN_HPP_NAMESPACE::PresentRegionsKHR; + using VULKAN_HPP_NAMESPACE::RectLayerKHR; + + //=== VK_NV_clip_space_w_scaling === + using VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::ViewportWScalingNV; + + //=== VK_EXT_display_surface_counter === + using VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT; + + //=== VK_EXT_display_control === + using VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT; + using VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT; + using VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT; + + //=== VK_GOOGLE_display_timing === + using VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE; + using VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE; + using VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE; + using VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE; + + //=== VK_NVX_multiview_per_view_attributes === + using VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + //=== VK_NV_viewport_swizzle === + using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::ViewportSwizzleNV; + + //=== VK_EXT_discard_rectangles === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT; + + //=== VK_EXT_conservative_rasterization === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT; + + //=== VK_EXT_depth_clip_enable === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT; + + //=== VK_EXT_hdr_metadata === + using VULKAN_HPP_NAMESPACE::HdrMetadataEXT; + using VULKAN_HPP_NAMESPACE::XYColorEXT; + + //=== VK_IMG_relaxed_line_rasterization === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + //=== VK_KHR_shared_presentable_image === + using VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + using VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + using VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR; + + //=== VK_KHR_performance_query === + using VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR; + using VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR; + using VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR; + + //=== VK_KHR_get_surface_capabilities2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR; + using VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR; + using VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR; + + //=== VK_KHR_get_display_properties2 === + using VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR; + using VULKAN_HPP_NAMESPACE::DisplayProperties2KHR; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID; + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID; + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID; + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID; + using VULKAN_HPP_NAMESPACE::ExternalFormatANDROID; + using VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID; + using VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX; + using VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX; + using VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX; + using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX; + using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD; + using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoNV; + + //=== VK_KHR_shader_bfloat16 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderBfloat16FeaturesKHR; + + //=== VK_EXT_sample_locations === + using VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT; + using VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT; + using VULKAN_HPP_NAMESPACE::SampleLocationEXT; + using VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT; + using VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT; + + //=== VK_EXT_blend_operation_advanced === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT; + + //=== VK_NV_fragment_coverage_to_color === + using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::AabbPositionsKHR; + using VULKAN_HPP_NAMESPACE::AabbPositionsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR; + using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR; + using VULKAN_HPP_NAMESPACE::TransformMatrixKHR; + using VULKAN_HPP_NAMESPACE::TransformMatrixNV; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR; + + //=== VK_KHR_ray_tracing_pipeline === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR; + using VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR; + using VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR; + + //=== VK_KHR_ray_query === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR; + + //=== VK_NV_framebuffer_mixed_samples === + using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV; + + //=== VK_NV_shader_sm_builtins === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV; + + //=== VK_EXT_image_drm_format_modifier === + using VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT; + using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT; + using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT; + using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT; + using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + using VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV; + using VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV; + using VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV; + using VULKAN_HPP_NAMESPACE::GeometryAABBNV; + using VULKAN_HPP_NAMESPACE::GeometryDataNV; + using VULKAN_HPP_NAMESPACE::GeometryNV; + using VULKAN_HPP_NAMESPACE::GeometryTrianglesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV; + using VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV; + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV; + + //=== VK_NV_representative_fragment_test === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV; + + //=== VK_EXT_filter_cubic === + using VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT; + + //=== VK_EXT_external_memory_host === + using VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT; + using VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT; + + //=== VK_KHR_shader_clock === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR; + + //=== VK_AMD_pipeline_compiler_control === + using VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD; + + //=== VK_AMD_shader_core_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD; + + //=== VK_KHR_video_decode_h265 === + using VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR; + + //=== VK_AMD_memory_overallocation_behavior === + using VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD; + + //=== VK_EXT_vertex_attribute_divisor === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + using VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_mesh_shader === + using VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV; + + //=== VK_NV_shader_image_footprint === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV; + + //=== VK_NV_scissor_exclusive === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV; + + //=== VK_NV_device_diagnostic_checkpoints === + using VULKAN_HPP_NAMESPACE::CheckpointData2NV; + using VULKAN_HPP_NAMESPACE::CheckpointDataNV; + using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV; + using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV; + + //=== VK_INTEL_shader_integer_functions2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceValueINTEL; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL; + using VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL; + + //=== VK_EXT_pci_bus_info === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT; + + //=== VK_AMD_display_native_hdr === + using VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD; + using VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT; + using VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT; + + //=== VK_KHR_fragment_shading_rate === + using VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR; + + //=== VK_AMD_shader_core_properties2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD; + + //=== VK_AMD_device_coherent_memory === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD; + + //=== VK_EXT_shader_image_atomic_int64 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + //=== VK_KHR_shader_quad_control === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderQuadControlFeaturesKHR; + + //=== VK_EXT_memory_budget === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT; + + //=== VK_EXT_memory_priority === + using VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT; + + //=== VK_KHR_surface_protected_capabilities === + using VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR; + + //=== VK_NV_dedicated_allocation_image_aliasing === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + //=== VK_EXT_buffer_device_address === + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferAddressFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT; + + //=== VK_EXT_validation_features === + using VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT; + + //=== VK_KHR_present_wait === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR; + + //=== VK_NV_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV; + + //=== VK_NV_coverage_reduction_mode === + using VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV; + + //=== VK_EXT_fragment_shader_interlock === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + //=== VK_EXT_ycbcr_image_arrays === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT; + + //=== VK_EXT_provoking_vertex === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT; + using VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT; + using VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT; + + //=== VK_EXT_shader_atomic_float === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT; + + //=== VK_EXT_extended_dynamic_state === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT; + + //=== VK_KHR_pipeline_executable_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR; + using VULKAN_HPP_NAMESPACE::PipelineInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineInfoKHR; + + //=== VK_EXT_map_memory_placed === + using VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT; + + //=== VK_EXT_shader_atomic_float2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + //=== VK_EXT_surface_maintenance1 === + using VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT; + using VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT; + using VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT; + + //=== VK_EXT_swapchain_maintenance1 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT; + using VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV; + using VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV; + using VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + using VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV; + + //=== VK_NV_inherited_viewport_scissor === + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV; + + //=== VK_EXT_texel_buffer_alignment === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + + //=== VK_QCOM_render_pass_transform === + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM; + using VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM; + + //=== VK_EXT_depth_bias_control === + using VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT; + using VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT; + + //=== VK_EXT_device_memory_report === + using VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT; + + //=== VK_EXT_robustness2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT; + + //=== VK_EXT_custom_border_color === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT; + using VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT; + + //=== VK_KHR_pipeline_library === + using VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR; + + //=== VK_NV_present_barrier === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV; + using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV; + using VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV; + + //=== VK_KHR_present_id === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PresentIdKHR; + + //=== VK_KHR_video_encode_queue === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR; + using VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR; + + //=== VK_NV_device_diagnostics_config === + using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV; + using VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV; + using VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + using VULKAN_HPP_NAMESPACE::DispatchTileInfoQCOM; + using VULKAN_HPP_NAMESPACE::PerTileBeginInfoQCOM; + using VULKAN_HPP_NAMESPACE::PerTileEndInfoQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTileShadingFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTileShadingPropertiesQCOM; + using VULKAN_HPP_NAMESPACE::RenderPassTileShadingCreateInfoQCOM; + + //=== VK_NV_low_latency === + using VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + using VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_descriptor_buffer === + using VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT; + using VULKAN_HPP_NAMESPACE::DescriptorDataEXT; + using VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT; + using VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT; + + //=== VK_EXT_graphics_pipeline_library === + using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + //=== VK_KHR_fragment_shader_barycentric === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + //=== VK_NV_fragment_shading_rate_enums === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + using VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV; + + //=== VK_NV_ray_tracing_motion_blur === + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV; + using VULKAN_HPP_NAMESPACE::SRTDataNV; + + //=== VK_EXT_mesh_shader === + using VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT; + + //=== VK_EXT_ycbcr_2plane_444_formats === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + //=== VK_EXT_fragment_density_map2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT; + + //=== VK_QCOM_rotated_copy_commands === + using VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM; + + //=== VK_KHR_workgroup_memory_explicit_layout === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + //=== VK_EXT_image_compression_control === + using VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT; + + //=== VK_EXT_attachment_feedback_loop_layout === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + //=== VK_EXT_4444_formats === + using VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT; + + //=== VK_EXT_device_fault === + using VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT; + + //=== VK_EXT_rgba10x6_formats === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + using VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT; + + //=== VK_EXT_physical_device_drm === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT; + + //=== VK_EXT_device_address_binding_report === + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT; + + //=== VK_EXT_depth_clip_control === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT; + + //=== VK_EXT_primitive_topology_list_restart === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + + //=== VK_EXT_present_mode_fifo_latest_ready === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + using VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + using VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA; + using VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI; + using VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI; + + //=== VK_HUAWEI_invocation_mask === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI; + + //=== VK_NV_external_memory_rdma === + using VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV; + + //=== VK_EXT_pipeline_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT; + + //=== VK_EXT_frame_boundary === + using VULKAN_HPP_NAMESPACE::FrameBoundaryEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT; + + //=== VK_EXT_multisampled_render_to_single_sampled === + using VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + using VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT; + + //=== VK_EXT_extended_dynamic_state2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT; + + //=== VK_EXT_primitives_generated_query === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + //=== VK_KHR_ray_tracing_maintenance1 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR; + using VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR; + + //=== VK_EXT_image_view_min_lod === + using VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT; + + //=== VK_EXT_multi_draw === + using VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT; + using VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT; + + //=== VK_EXT_image_2d_view_of_3d === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT; + + //=== VK_EXT_shader_tile_image === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT; + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT; + using VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapTriangleEXT; + using VULKAN_HPP_NAMESPACE::MicromapUsageEXT; + using VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + using VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + + //=== VK_EXT_border_color_swizzle === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT; + using VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT; + + //=== VK_EXT_pageable_device_local_memory === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + + //=== VK_ARM_shader_core_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM; + + //=== VK_ARM_scheduling_controls === + using VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM; + + //=== VK_EXT_image_sliced_view_of_3d === + using VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + + //=== VK_VALVE_descriptor_set_host_mapping === + using VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + + //=== VK_EXT_non_seamless_cube_map === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + //=== VK_ARM_render_pass_striped === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedFeaturesARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedPropertiesARM; + using VULKAN_HPP_NAMESPACE::RenderPassStripeBeginInfoARM; + using VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM; + using VULKAN_HPP_NAMESPACE::RenderPassStripeSubmitInfoARM; + + //=== VK_NV_copy_memory_indirect === + using VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV; + + //=== VK_NV_memory_decompression === + using VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV; + + //=== VK_NV_device_generated_commands_compute === + using VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV; + + //=== VK_NV_ray_tracing_linear_swept_spheres === + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryLinearSweptSpheresDataNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometrySpheresDataNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + + //=== VK_NV_linear_color_attachment === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV; + + //=== VK_KHR_shader_maximal_reconvergence === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + + //=== VK_EXT_image_compression_control_swapchain === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + //=== VK_QCOM_image_processing === + using VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM; + + //=== VK_EXT_nested_command_buffer === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT; + + //=== VK_EXT_external_memory_acquire_unmodified === + using VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT; + + //=== VK_EXT_extended_dynamic_state3 === + using VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT; + using VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT; + + //=== VK_EXT_subpass_merge_feedback === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + using VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT; + using VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT; + + //=== VK_LUNARG_direct_driver_loading === + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG; + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG; + + //=== VK_EXT_shader_module_identifier === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT; + + //=== VK_EXT_rasterization_order_attachment_access === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV; + + //=== VK_EXT_legacy_dithering === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_AMD_anti_lag === + using VULKAN_HPP_NAMESPACE::AntiLagDataAMD; + using VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAntiLagFeaturesAMD; + + //=== VK_KHR_ray_tracing_position_fetch === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT; + using VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT; + + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_NAMESPACE::DevicePipelineBinaryInternalCacheControlKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR; + + //=== VK_QCOM_tile_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::TilePropertiesQCOM; + + //=== VK_SEC_amigo_profiling === + using VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC; + + //=== VK_QCOM_multiview_per_view_viewports === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + //=== VK_NV_ray_tracing_invocation_reorder === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + + //=== VK_NV_cooperative_vector === + using VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV; + using VULKAN_HPP_NAMESPACE::CooperativeVectorPropertiesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorPropertiesNV; + + //=== VK_NV_extended_sparse_address_space === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + + //=== VK_EXT_mutable_descriptor_type === + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE; + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT; + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + + //=== VK_EXT_legacy_vertex_attributes === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesPropertiesEXT; + + //=== VK_EXT_layer_settings === + using VULKAN_HPP_NAMESPACE::LayerSettingEXT; + using VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT; + + //=== VK_ARM_shader_core_builtins === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + //=== VK_EXT_pipeline_library_group_handles === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + //=== VK_NV_low_latency2 === + using VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV; + using VULKAN_HPP_NAMESPACE::LatencySleepInfoNV; + using VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV; + using VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV; + using VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV; + using VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV; + using VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV; + using VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV; + using VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV; + + //=== VK_KHR_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR; + + //=== VK_QCOM_multiview_per_view_render_areas === + using VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + + //=== VK_KHR_compute_shader_derivatives === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesPropertiesKHR; + + //=== VK_KHR_video_decode_av1 === + using VULKAN_HPP_NAMESPACE::VideoDecodeAV1CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeAV1DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeAV1PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeAV1ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeAV1SessionParametersCreateInfoKHR; + + //=== VK_KHR_video_encode_av1 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1GopRemainingFrameInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1QualityLevelPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionParametersCreateInfoKHR; + + //=== VK_KHR_video_maintenance1 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR; + using VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR; + + //=== VK_NV_per_stage_descriptor_set === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePerStageDescriptorSetFeaturesNV; + + //=== VK_QCOM_image_processing2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM; + using VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM; + + //=== VK_QCOM_filter_cubic_weights === + using VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM; + + //=== VK_QCOM_ycbcr_degamma === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + + //=== VK_QCOM_filter_cubic_clamp === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + using VULKAN_HPP_NAMESPACE::ExternalFormatQNX; + using VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + using VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX; + using VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT; + + //=== VK_KHR_calibrated_timestamps === + using VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT; + using VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR; + + //=== VK_KHR_maintenance6 === + using VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT; + using VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT; + + //=== VK_NV_descriptor_pool_overallocation === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + //=== VK_QCOM_tile_memory_heap === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTileMemoryHeapFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTileMemoryHeapPropertiesQCOM; + using VULKAN_HPP_NAMESPACE::TileMemoryBindInfoQCOM; + using VULKAN_HPP_NAMESPACE::TileMemoryRequirementsQCOM; + using VULKAN_HPP_NAMESPACE::TileMemorySizeInfoQCOM; + + //=== VK_NV_display_stereo === + using VULKAN_HPP_NAMESPACE::DisplayModeStereoPropertiesNV; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoCreateInfoNV; + + //=== VK_KHR_video_encode_quantization_map === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1QuantizationMapCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264QuantizationMapCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265QuantizationMapCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoFormatAV1QuantizationMapPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoFormatH265QuantizationMapPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoFormatQuantizationMapPropertiesKHR; + + //=== VK_NV_raw_access_chains === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRawAccessChainsFeaturesNV; + + //=== VK_NV_external_compute_queue === + using VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV; + using VULKAN_HPP_NAMESPACE::ExternalComputeQueueDataParamsNV; + using VULKAN_HPP_NAMESPACE::ExternalComputeQueueDeviceCreateInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalComputeQueuePropertiesNV; + + //=== VK_KHR_shader_relaxed_extended_instruction === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + //=== VK_NV_command_buffer_inheritance === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCommandBufferInheritanceFeaturesNV; + + //=== VK_KHR_maintenance7 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesListKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiVulkanPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7PropertiesKHR; + + //=== VK_NV_shader_atomic_float16_vector === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + + //=== VK_EXT_shader_replicated_composites === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + + //=== VK_NV_ray_tracing_validation === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV; + + //=== VK_NV_cluster_acceleration_structure === + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildClustersBottomLevelInfoNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterInfoNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInstantiateClusterInfoNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInfoNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV; + using VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructureFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructurePropertiesNV; + using VULKAN_HPP_NAMESPACE::RayTracingPipelineClusterAccelerationStructureCreateInfoNV; + using VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV; + + //=== VK_NV_partitioned_acceleration_structure === + using VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV; + using VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureFlagsNV; + using VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV; + using VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureUpdateInstanceDataNV; + using VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWriteInstanceDataNV; + using VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWritePartitionTranslationDataNV; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructureFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructurePropertiesNV; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetPartitionedAccelerationStructureNV; + + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandEXT; + using VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT; + using VULKAN_HPP_NAMESPACE::DrawIndirectCountIndirectCommandEXT; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsPipelineInfoEXT; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsShaderInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + using VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT; + using VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT; + + //=== VK_KHR_maintenance8 === + using VULKAN_HPP_NAMESPACE::MemoryBarrierAccessFlags3KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance8FeaturesKHR; + + //=== VK_MESA_image_alignment_control === + using VULKAN_HPP_NAMESPACE::ImageAlignmentControlCreateInfoMESA; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlPropertiesMESA; + + //=== VK_EXT_depth_clamp_control === + using VULKAN_HPP_NAMESPACE::DepthClampRangeEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT; + + //=== VK_KHR_video_maintenance2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance2FeaturesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeAV1InlineSessionParametersInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264InlineSessionParametersInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265InlineSessionParametersInfoKHR; + + //=== VK_HUAWEI_hdr_vivid === + using VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI; + + //=== VK_NV_cooperative_matrix2 === + using VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV; + + //=== VK_ARM_pipeline_opacity_micromap === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineOpacityMicromapFeaturesARM; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + using VULKAN_HPP_NAMESPACE::ImportMemoryMetalHandleInfoEXT; + using VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT; + using VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_depth_clamp_zero_one === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesKHR; + + //=== VK_EXT_vertex_attribute_robustness === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_present_metering === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentMeteringFeaturesNV; + using VULKAN_HPP_NAMESPACE::SetPresentConfigNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_fragment_density_map_offset === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + using VULKAN_HPP_NAMESPACE::RenderingEndInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapOffsetEndInfoEXT; + using VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM; + + //=============== + //=== HANDLEs === + //=============== + + using VULKAN_HPP_NAMESPACE::isVulkanHandleType; + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::Buffer; + using VULKAN_HPP_NAMESPACE::BufferView; + using VULKAN_HPP_NAMESPACE::CommandBuffer; + using VULKAN_HPP_NAMESPACE::CommandPool; + using VULKAN_HPP_NAMESPACE::DescriptorPool; + using VULKAN_HPP_NAMESPACE::DescriptorSet; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + using VULKAN_HPP_NAMESPACE::Device; + using VULKAN_HPP_NAMESPACE::DeviceMemory; + using VULKAN_HPP_NAMESPACE::Event; + using VULKAN_HPP_NAMESPACE::Fence; + using VULKAN_HPP_NAMESPACE::Framebuffer; + using VULKAN_HPP_NAMESPACE::Image; + using VULKAN_HPP_NAMESPACE::ImageView; + using VULKAN_HPP_NAMESPACE::Instance; + using VULKAN_HPP_NAMESPACE::PhysicalDevice; + using VULKAN_HPP_NAMESPACE::Pipeline; + using VULKAN_HPP_NAMESPACE::PipelineCache; + using VULKAN_HPP_NAMESPACE::PipelineLayout; + using VULKAN_HPP_NAMESPACE::QueryPool; + using VULKAN_HPP_NAMESPACE::Queue; + using VULKAN_HPP_NAMESPACE::RenderPass; + using VULKAN_HPP_NAMESPACE::Sampler; + using VULKAN_HPP_NAMESPACE::Semaphore; + using VULKAN_HPP_NAMESPACE::ShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::PrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::SurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::SwapchainKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::DisplayKHR; + using VULKAN_HPP_NAMESPACE::DisplayModeKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::VideoSessionKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::CuFunctionNVX; + using VULKAN_HPP_NAMESPACE::CuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_NAMESPACE::CudaFunctionNV; + using VULKAN_HPP_NAMESPACE::CudaModuleNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::MicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::ShaderEXT; + + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + + //=== VK_NV_external_compute_queue === + using VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV; + + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; + + //====================== + //=== UNIQUE HANDLEs === + //====================== + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::UniqueBuffer; + using VULKAN_HPP_NAMESPACE::UniqueBufferView; + using VULKAN_HPP_NAMESPACE::UniqueCommandBuffer; + using VULKAN_HPP_NAMESPACE::UniqueCommandPool; + using VULKAN_HPP_NAMESPACE::UniqueDescriptorPool; + using VULKAN_HPP_NAMESPACE::UniqueDescriptorSet; + using VULKAN_HPP_NAMESPACE::UniqueDescriptorSetLayout; + using VULKAN_HPP_NAMESPACE::UniqueDevice; + using VULKAN_HPP_NAMESPACE::UniqueDeviceMemory; + using VULKAN_HPP_NAMESPACE::UniqueEvent; + using VULKAN_HPP_NAMESPACE::UniqueFence; + using VULKAN_HPP_NAMESPACE::UniqueFramebuffer; + using VULKAN_HPP_NAMESPACE::UniqueImage; + using VULKAN_HPP_NAMESPACE::UniqueImageView; + using VULKAN_HPP_NAMESPACE::UniqueInstance; + using VULKAN_HPP_NAMESPACE::UniquePipeline; + using VULKAN_HPP_NAMESPACE::UniquePipelineCache; + using VULKAN_HPP_NAMESPACE::UniquePipelineLayout; + using VULKAN_HPP_NAMESPACE::UniqueQueryPool; + using VULKAN_HPP_NAMESPACE::UniqueRenderPass; + using VULKAN_HPP_NAMESPACE::UniqueSampler; + using VULKAN_HPP_NAMESPACE::UniqueSemaphore; + using VULKAN_HPP_NAMESPACE::UniqueShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::UniqueDescriptorUpdateTemplate; + using VULKAN_HPP_NAMESPACE::UniqueSamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::UniquePrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::UniqueSurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::UniqueSwapchainKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::UniqueDisplayKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::UniqueDebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::UniqueVideoSessionKHR; + using VULKAN_HPP_NAMESPACE::UniqueVideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::UniqueCuFunctionNVX; + using VULKAN_HPP_NAMESPACE::UniqueCuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::UniqueDebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::UniqueValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureNV; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::UniquePerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_NAMESPACE::UniqueDeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::UniqueIndirectCommandsLayoutNV; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_NAMESPACE::UniqueCudaFunctionNV; + using VULKAN_HPP_NAMESPACE::UniqueCudaModuleNV; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::UniqueBufferCollectionFUCHSIA; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::UniqueMicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::UniqueOpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::UniqueShaderEXT; + + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_NAMESPACE::UniquePipelineBinaryKHR; + + //=== VK_NV_external_compute_queue === + using VULKAN_HPP_NAMESPACE::UniqueExternalComputeQueueNV; + + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::UniqueHandleTraits; + using VULKAN_HPP_NAMESPACE::UniqueIndirectCommandsLayoutEXT; + using VULKAN_HPP_NAMESPACE::UniqueIndirectExecutionSetEXT; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + //====================== + //=== SHARED HANDLEs === + //====================== + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::SharedBuffer; + using VULKAN_HPP_NAMESPACE::SharedBufferView; + using VULKAN_HPP_NAMESPACE::SharedCommandBuffer; + using VULKAN_HPP_NAMESPACE::SharedCommandPool; + using VULKAN_HPP_NAMESPACE::SharedDescriptorPool; + using VULKAN_HPP_NAMESPACE::SharedDescriptorSet; + using VULKAN_HPP_NAMESPACE::SharedDescriptorSetLayout; + using VULKAN_HPP_NAMESPACE::SharedDevice; + using VULKAN_HPP_NAMESPACE::SharedDeviceMemory; + using VULKAN_HPP_NAMESPACE::SharedEvent; + using VULKAN_HPP_NAMESPACE::SharedFence; + using VULKAN_HPP_NAMESPACE::SharedFramebuffer; + using VULKAN_HPP_NAMESPACE::SharedImage; + using VULKAN_HPP_NAMESPACE::SharedImageView; + using VULKAN_HPP_NAMESPACE::SharedInstance; + using VULKAN_HPP_NAMESPACE::SharedPhysicalDevice; + using VULKAN_HPP_NAMESPACE::SharedPipeline; + using VULKAN_HPP_NAMESPACE::SharedPipelineCache; + using VULKAN_HPP_NAMESPACE::SharedPipelineLayout; + using VULKAN_HPP_NAMESPACE::SharedQueryPool; + using VULKAN_HPP_NAMESPACE::SharedQueue; + using VULKAN_HPP_NAMESPACE::SharedRenderPass; + using VULKAN_HPP_NAMESPACE::SharedSampler; + using VULKAN_HPP_NAMESPACE::SharedSemaphore; + using VULKAN_HPP_NAMESPACE::SharedShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::SharedDescriptorUpdateTemplate; + using VULKAN_HPP_NAMESPACE::SharedSamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::SharedPrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::SharedSurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::SharedSwapchainKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::SharedDisplayKHR; + using VULKAN_HPP_NAMESPACE::SharedDisplayModeKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::SharedDebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::SharedVideoSessionKHR; + using VULKAN_HPP_NAMESPACE::SharedVideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::SharedCuFunctionNVX; + using VULKAN_HPP_NAMESPACE::SharedCuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::SharedDebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::SharedAccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::SharedValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::SharedAccelerationStructureNV; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::SharedPerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_NAMESPACE::SharedDeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::SharedIndirectCommandsLayoutNV; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_NAMESPACE::SharedCudaFunctionNV; + using VULKAN_HPP_NAMESPACE::SharedCudaModuleNV; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::SharedBufferCollectionFUCHSIA; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::SharedMicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::SharedOpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::SharedShaderEXT; + + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_NAMESPACE::SharedPipelineBinaryKHR; + + //=== VK_NV_external_compute_queue === + using VULKAN_HPP_NAMESPACE::SharedExternalComputeQueueNV; + + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::SharedHandleTraits; + using VULKAN_HPP_NAMESPACE::SharedIndirectCommandsLayoutEXT; + using VULKAN_HPP_NAMESPACE::SharedIndirectExecutionSetEXT; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + //=========================== + //=== COMMAND Definitions === + //=========================== + using VULKAN_HPP_NAMESPACE::createInstance; + using VULKAN_HPP_NAMESPACE::enumerateInstanceExtensionProperties; + using VULKAN_HPP_NAMESPACE::enumerateInstanceLayerProperties; + using VULKAN_HPP_NAMESPACE::enumerateInstanceVersion; + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + using VULKAN_HPP_NAMESPACE::createInstanceUnique; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + using VULKAN_HPP_NAMESPACE::StructExtends; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + namespace detail + { + using VULKAN_HPP_NAMESPACE::detail::DynamicLoader; + } // namespace detail +#endif /*VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL*/ + + //===================== + //=== Format Traits === + //===================== + using VULKAN_HPP_NAMESPACE::blockExtent; + using VULKAN_HPP_NAMESPACE::blockSize; + using VULKAN_HPP_NAMESPACE::compatibilityClass; + using VULKAN_HPP_NAMESPACE::componentBits; + using VULKAN_HPP_NAMESPACE::componentCount; + using VULKAN_HPP_NAMESPACE::componentName; + using VULKAN_HPP_NAMESPACE::componentNumericFormat; + using VULKAN_HPP_NAMESPACE::componentPlaneIndex; + using VULKAN_HPP_NAMESPACE::componentsAreCompressed; + using VULKAN_HPP_NAMESPACE::compressionScheme; + using VULKAN_HPP_NAMESPACE::isCompressed; + using VULKAN_HPP_NAMESPACE::packed; + using VULKAN_HPP_NAMESPACE::planeCompatibleFormat; + using VULKAN_HPP_NAMESPACE::planeCount; + using VULKAN_HPP_NAMESPACE::planeHeightDivisor; + using VULKAN_HPP_NAMESPACE::planeWidthDivisor; + using VULKAN_HPP_NAMESPACE::texelsPerBlock; + + //====================================== + //=== Extension inspection functions === + //====================================== + using VULKAN_HPP_NAMESPACE::getDeprecatedExtensions; + using VULKAN_HPP_NAMESPACE::getDeviceExtensions; + using VULKAN_HPP_NAMESPACE::getExtensionDepends; + using VULKAN_HPP_NAMESPACE::getExtensionDeprecatedBy; + using VULKAN_HPP_NAMESPACE::getExtensionObsoletedBy; + using VULKAN_HPP_NAMESPACE::getExtensionPromotedTo; + using VULKAN_HPP_NAMESPACE::getInstanceExtensions; + using VULKAN_HPP_NAMESPACE::getObsoletedExtensions; + using VULKAN_HPP_NAMESPACE::getPromotedExtensions; + using VULKAN_HPP_NAMESPACE::isDeprecatedExtension; + using VULKAN_HPP_NAMESPACE::isDeviceExtension; + using VULKAN_HPP_NAMESPACE::isInstanceExtension; + using VULKAN_HPP_NAMESPACE::isObsoletedExtension; + using VULKAN_HPP_NAMESPACE::isPromotedExtension; + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + namespace VULKAN_HPP_RAII_NAMESPACE + { + //====================== + //=== RAII HARDCODED === + //====================== + + using VULKAN_HPP_RAII_NAMESPACE::Context; + + namespace detail + { + using VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher; + using VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher; + using VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher; + } // namespace detail + + //==================== + //=== RAII HANDLEs === + //==================== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_RAII_NAMESPACE::Buffer; + using VULKAN_HPP_RAII_NAMESPACE::BufferView; + using VULKAN_HPP_RAII_NAMESPACE::CommandBuffer; + using VULKAN_HPP_RAII_NAMESPACE::CommandBuffers; + using VULKAN_HPP_RAII_NAMESPACE::CommandPool; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorPool; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorSet; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorSets; + using VULKAN_HPP_RAII_NAMESPACE::Device; + using VULKAN_HPP_RAII_NAMESPACE::DeviceMemory; + using VULKAN_HPP_RAII_NAMESPACE::Event; + using VULKAN_HPP_RAII_NAMESPACE::Fence; + using VULKAN_HPP_RAII_NAMESPACE::Framebuffer; + using VULKAN_HPP_RAII_NAMESPACE::Image; + using VULKAN_HPP_RAII_NAMESPACE::ImageView; + using VULKAN_HPP_RAII_NAMESPACE::Instance; + using VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice; + using VULKAN_HPP_RAII_NAMESPACE::PhysicalDevices; + using VULKAN_HPP_RAII_NAMESPACE::Pipeline; + using VULKAN_HPP_RAII_NAMESPACE::PipelineCache; + using VULKAN_HPP_RAII_NAMESPACE::PipelineLayout; + using VULKAN_HPP_RAII_NAMESPACE::Pipelines; + using VULKAN_HPP_RAII_NAMESPACE::QueryPool; + using VULKAN_HPP_RAII_NAMESPACE::Queue; + using VULKAN_HPP_RAII_NAMESPACE::RenderPass; + using VULKAN_HPP_RAII_NAMESPACE::Sampler; + using VULKAN_HPP_RAII_NAMESPACE::Semaphore; + using VULKAN_HPP_RAII_NAMESPACE::ShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate; + using VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR; + using VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs; + + //=== VK_KHR_display === + using VULKAN_HPP_RAII_NAMESPACE::DisplayKHR; + using VULKAN_HPP_RAII_NAMESPACE::DisplayKHRs; + using VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR; + using VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX; + using VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV; + using VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_RAII_NAMESPACE::MicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_RAII_NAMESPACE::ShaderEXT; + using VULKAN_HPP_RAII_NAMESPACE::ShaderEXTs; + + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR; + using VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHRs; + + //=== VK_NV_external_compute_queue === + using VULKAN_HPP_RAII_NAMESPACE::ExternalComputeQueueNV; + + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT; + using VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT; + + } // namespace VULKAN_HPP_RAII_NAMESPACE +#endif +} // namespace VULKAN_HPP_NAMESPACE + +export namespace std +{ + + //======================================= + //=== HASH specialization for Flags types === + //======================================= + + template + struct hash>; + + //======================================== + //=== HASH specializations for handles === + //======================================== + + //=== VK_VERSION_1_0 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_1 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_3 === + template <> + struct hash; + + //=== VK_KHR_surface === + template <> + struct hash; + + //=== VK_KHR_swapchain === + template <> + struct hash; + + //=== VK_KHR_display === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_debug_report === + template <> + struct hash; + + //=== VK_KHR_video_queue === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NVX_binary_import === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_debug_utils === + template <> + struct hash; + + //=== VK_KHR_acceleration_structure === + template <> + struct hash; + + //=== VK_EXT_validation_cache === + template <> + struct hash; + + //=== VK_NV_ray_tracing === + template <> + struct hash; + + //=== VK_INTEL_performance_query === + template <> + struct hash; + + //=== VK_KHR_deferred_host_operations === + template <> + struct hash; + + //=== VK_NV_device_generated_commands === + template <> + struct hash; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + template <> + struct hash; + + //=== VK_NV_optical_flow === + template <> + struct hash; + + //=== VK_EXT_shader_object === + template <> + struct hash; + + //=== VK_KHR_pipeline_binary === + template <> + struct hash; + + //=== VK_NV_external_compute_queue === + template <> + struct hash; + + //=== VK_EXT_device_generated_commands === + template <> + struct hash; + template <> + struct hash; + + //======================================== + //=== HASH specializations for structs === + //======================================== + + //=== VK_VERSION_1_0 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_1 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_3 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_4 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_surface === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_swapchain === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_display === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_display_swapchain === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + template <> + struct hash; + + //=== VK_AMD_rasterization_order === + template <> + struct hash; + + //=== VK_EXT_debug_marker === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_queue === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_decode_queue === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_dedicated_allocation === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_transform_feedback === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NVX_binary_import === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NVX_image_view_handle === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_encode_h264 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_encode_h265 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_decode_h264 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_AMD_texture_gather_bias_lod === + template <> + struct hash; + + //=== VK_AMD_shader_info === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + template <> + struct hash; + + //=== VK_NV_external_memory_capabilities === + template <> + struct hash; + + //=== VK_NV_external_memory === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_validation_flags === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_astc_decode_mode === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_conditional_rendering === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_incremental_present === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_clip_space_w_scaling === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_display_surface_counter === + template <> + struct hash; + + //=== VK_EXT_display_control === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_GOOGLE_display_timing === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NVX_multiview_per_view_attributes === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_viewport_swizzle === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_discard_rectangles === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_conservative_rasterization === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_depth_clip_enable === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_hdr_metadata === + template <> + struct hash; + template <> + struct hash; + + //=== VK_IMG_relaxed_line_rasterization === + template <> + struct hash; + + //=== VK_KHR_shared_presentable_image === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_performance_query === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_get_surface_capabilities2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_get_display_properties2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + template <> + struct hash; + + //=== VK_KHR_shader_bfloat16 === + template <> + struct hash; + + //=== VK_EXT_sample_locations === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_blend_operation_advanced === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_fragment_coverage_to_color === + template <> + struct hash; + + //=== VK_KHR_acceleration_structure === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_ray_tracing_pipeline === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_ray_query === + template <> + struct hash; + + //=== VK_NV_framebuffer_mixed_samples === + template <> + struct hash; + + //=== VK_NV_shader_sm_builtins === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_image_drm_format_modifier === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_validation_cache === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_ray_tracing === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_representative_fragment_test === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_filter_cubic === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_external_memory_host === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_shader_clock === + template <> + struct hash; + + //=== VK_AMD_pipeline_compiler_control === + template <> + struct hash; + + //=== VK_AMD_shader_core_properties === + template <> + struct hash; + + //=== VK_KHR_video_decode_h265 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_AMD_memory_overallocation_behavior === + template <> + struct hash; + + //=== VK_EXT_vertex_attribute_divisor === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_mesh_shader === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_shader_image_footprint === + template <> + struct hash; + + //=== VK_NV_scissor_exclusive === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_device_diagnostic_checkpoints === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_INTEL_shader_integer_functions2 === + template <> + struct hash; + + //=== VK_INTEL_performance_query === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_pci_bus_info === + template <> + struct hash; + + //=== VK_AMD_display_native_hdr === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_fragment_shading_rate === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_AMD_shader_core_properties2 === + template <> + struct hash; + + //=== VK_AMD_device_coherent_memory === + template <> + struct hash; + + //=== VK_EXT_shader_image_atomic_int64 === + template <> + struct hash; + + //=== VK_KHR_shader_quad_control === + template <> + struct hash; + + //=== VK_EXT_memory_budget === + template <> + struct hash; + + //=== VK_EXT_memory_priority === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_surface_protected_capabilities === + template <> + struct hash; + + //=== VK_NV_dedicated_allocation_image_aliasing === + template <> + struct hash; + + //=== VK_EXT_buffer_device_address === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_validation_features === + template <> + struct hash; + + //=== VK_KHR_present_wait === + template <> + struct hash; + + //=== VK_NV_cooperative_matrix === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_coverage_reduction_mode === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_fragment_shader_interlock === + template <> + struct hash; + + //=== VK_EXT_ycbcr_image_arrays === + template <> + struct hash; + + //=== VK_EXT_provoking_vertex === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + template <> + struct hash; + + //=== VK_EXT_shader_atomic_float === + template <> + struct hash; + + //=== VK_EXT_extended_dynamic_state === + template <> + struct hash; + + //=== VK_KHR_pipeline_executable_properties === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_map_memory_placed === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_shader_atomic_float2 === + template <> + struct hash; + + //=== VK_EXT_surface_maintenance1 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_swapchain_maintenance1 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_device_generated_commands === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_inherited_viewport_scissor === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_texel_buffer_alignment === + template <> + struct hash; + + //=== VK_QCOM_render_pass_transform === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_depth_bias_control === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_device_memory_report === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_robustness2 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_custom_border_color === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_pipeline_library === + template <> + struct hash; + + //=== VK_NV_present_barrier === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_present_id === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_encode_queue === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_device_diagnostics_config === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_low_latency === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_descriptor_buffer === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_graphics_pipeline_library === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + template <> + struct hash; + + //=== VK_KHR_fragment_shader_barycentric === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + template <> + struct hash; + + //=== VK_NV_fragment_shading_rate_enums === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_ray_tracing_motion_blur === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_mesh_shader === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_ycbcr_2plane_444_formats === + template <> + struct hash; + + //=== VK_EXT_fragment_density_map2 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_rotated_copy_commands === + template <> + struct hash; + + //=== VK_KHR_workgroup_memory_explicit_layout === + template <> + struct hash; + + //=== VK_EXT_image_compression_control === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_attachment_feedback_loop_layout === + template <> + struct hash; + + //=== VK_EXT_4444_formats === + template <> + struct hash; + + //=== VK_EXT_device_fault === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_rgba10x6_formats === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_physical_device_drm === + template <> + struct hash; + + //=== VK_EXT_device_address_binding_report === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_depth_clip_control === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_primitive_topology_list_restart === + template <> + struct hash; + + //=== VK_EXT_present_mode_fifo_latest_ready === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_HUAWEI_invocation_mask === + template <> + struct hash; + + //=== VK_NV_external_memory_rdma === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_pipeline_properties === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_frame_boundary === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_multisampled_render_to_single_sampled === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_extended_dynamic_state2 === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_primitives_generated_query === + template <> + struct hash; + + //=== VK_KHR_ray_tracing_maintenance1 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_image_view_min_lod === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_multi_draw === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_image_2d_view_of_3d === + template <> + struct hash; + + //=== VK_EXT_shader_tile_image === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_opacity_micromap === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_border_color_swizzle === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_pageable_device_local_memory === + template <> + struct hash; + + //=== VK_ARM_shader_core_properties === + template <> + struct hash; + + //=== VK_ARM_scheduling_controls === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_image_sliced_view_of_3d === + template <> + struct hash; + template <> + struct hash; + + //=== VK_VALVE_descriptor_set_host_mapping === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_non_seamless_cube_map === + template <> + struct hash; + + //=== VK_ARM_render_pass_striped === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_copy_memory_indirect === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_memory_decompression === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_device_generated_commands_compute === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_ray_tracing_linear_swept_spheres === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_linear_color_attachment === + template <> + struct hash; + + //=== VK_KHR_shader_maximal_reconvergence === + template <> + struct hash; + + //=== VK_EXT_image_compression_control_swapchain === + template <> + struct hash; + + //=== VK_QCOM_image_processing === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_nested_command_buffer === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_external_memory_acquire_unmodified === + template <> + struct hash; + + //=== VK_EXT_extended_dynamic_state3 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_subpass_merge_feedback === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_LUNARG_direct_driver_loading === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_shader_module_identifier === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_rasterization_order_attachment_access === + template <> + struct hash; + + //=== VK_NV_optical_flow === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_legacy_dithering === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_AMD_anti_lag === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_ray_tracing_position_fetch === + template <> + struct hash; + + //=== VK_EXT_shader_object === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_pipeline_binary === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_tile_properties === + template <> + struct hash; + template <> + struct hash; + + //=== VK_SEC_amigo_profiling === + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_multiview_per_view_viewports === + template <> + struct hash; + + //=== VK_NV_ray_tracing_invocation_reorder === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_cooperative_vector === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_extended_sparse_address_space === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_mutable_descriptor_type === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_legacy_vertex_attributes === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_layer_settings === + template <> + struct hash; + template <> + struct hash; + + //=== VK_ARM_shader_core_builtins === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_pipeline_library_group_handles === + template <> + struct hash; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + template <> + struct hash; + + //=== VK_NV_low_latency2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_cooperative_matrix === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_multiview_per_view_render_areas === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_compute_shader_derivatives === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_decode_av1 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_encode_av1 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_maintenance1 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_per_stage_descriptor_set === + template <> + struct hash; + + //=== VK_QCOM_image_processing2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_filter_cubic_weights === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_ycbcr_degamma === + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_filter_cubic_clamp === + template <> + struct hash; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + template <> + struct hash; + + //=== VK_KHR_calibrated_timestamps === + template <> + struct hash; + + //=== VK_KHR_maintenance6 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_descriptor_pool_overallocation === + template <> + struct hash; + + //=== VK_QCOM_tile_memory_heap === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_display_stereo === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_encode_quantization_map === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_raw_access_chains === + template <> + struct hash; + + //=== VK_NV_external_compute_queue === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_shader_relaxed_extended_instruction === + template <> + struct hash; + + //=== VK_NV_command_buffer_inheritance === + template <> + struct hash; + + //=== VK_KHR_maintenance7 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_shader_atomic_float16_vector === + template <> + struct hash; + + //=== VK_EXT_shader_replicated_composites === + template <> + struct hash; + + //=== VK_NV_ray_tracing_validation === + template <> + struct hash; + + //=== VK_NV_cluster_acceleration_structure === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_partitioned_acceleration_structure === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_device_generated_commands === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_maintenance8 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_MESA_image_alignment_control === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_depth_clamp_control === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_maintenance2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_HUAWEI_hdr_vivid === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_cooperative_matrix2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_ARM_pipeline_opacity_micromap === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_depth_clamp_zero_one === + template <> + struct hash; + + //=== VK_EXT_vertex_attribute_robustness === + template <> + struct hash; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_present_metering === + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_fragment_density_map_offset === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //================================================================= + //=== Required exports for VULKAN_HPP_NAMESPACE::StructureChain === + //================================================================= + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + using std::tuple_element; + using std::tuple_size; +#endif +} // namespace std + +// This VkFlags type is used as part of a bitfield in some structure. +// As it can't be mimicked by vk-data types, we need to export just that!! +export using ::VkGeometryInstanceFlagsKHR; \ No newline at end of file diff --git a/include/vulkan/vulkan.h b/include/vulkan/vulkan.h index ae039725..90605819 100644 --- a/include/vulkan/vulkan.h +++ b/include/vulkan/vulkan.h @@ -1,6838 +1,99 @@ #ifndef VULKAN_H_ #define VULKAN_H_ 1 -#ifdef __cplusplus -extern "C" { -#endif - -/* -** Copyright (c) 2015-2017 The Khronos Group Inc. -** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. -*/ - -/* -** This header is generated from the Khronos Vulkan XML API Registry. -** -*/ - - -#define VK_VERSION_1_0 1 -#include "vk_platform.h" - -#define VK_MAKE_VERSION(major, minor, patch) \ - (((major) << 22) | ((minor) << 12) | (patch)) - -// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. -//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 - -// Vulkan 1.0 version number -#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 - -#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) -#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) -#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) -// Version of this file -#define VK_HEADER_VERSION 57 - - -#define VK_NULL_HANDLE 0 - - - -#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; - - -#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE) -#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) - #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; -#else - #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; -#endif -#endif - - - -typedef uint32_t VkFlags; -typedef uint32_t VkBool32; -typedef uint64_t VkDeviceSize; -typedef uint32_t VkSampleMask; - -VK_DEFINE_HANDLE(VkInstance) -VK_DEFINE_HANDLE(VkPhysicalDevice) -VK_DEFINE_HANDLE(VkDevice) -VK_DEFINE_HANDLE(VkQueue) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) -VK_DEFINE_HANDLE(VkCommandBuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) - -#define VK_LOD_CLAMP_NONE 1000.0f -#define VK_REMAINING_MIP_LEVELS (~0U) -#define VK_REMAINING_ARRAY_LAYERS (~0U) -#define VK_WHOLE_SIZE (~0ULL) -#define VK_ATTACHMENT_UNUSED (~0U) -#define VK_TRUE 1 -#define VK_FALSE 0 -#define VK_QUEUE_FAMILY_IGNORED (~0U) -#define VK_SUBPASS_EXTERNAL (~0U) -#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 -#define VK_UUID_SIZE 16 -#define VK_MAX_MEMORY_TYPES 32 -#define VK_MAX_MEMORY_HEAPS 16 -#define VK_MAX_EXTENSION_NAME_SIZE 256 -#define VK_MAX_DESCRIPTION_SIZE 256 - - -typedef enum VkPipelineCacheHeaderVersion { - VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, - VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, - VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, - VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1), - VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF -} VkPipelineCacheHeaderVersion; - -typedef enum VkResult { - VK_SUCCESS = 0, - VK_NOT_READY = 1, - VK_TIMEOUT = 2, - VK_EVENT_SET = 3, - VK_EVENT_RESET = 4, - VK_INCOMPLETE = 5, - VK_ERROR_OUT_OF_HOST_MEMORY = -1, - VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, - VK_ERROR_INITIALIZATION_FAILED = -3, - VK_ERROR_DEVICE_LOST = -4, - VK_ERROR_MEMORY_MAP_FAILED = -5, - VK_ERROR_LAYER_NOT_PRESENT = -6, - VK_ERROR_EXTENSION_NOT_PRESENT = -7, - VK_ERROR_FEATURE_NOT_PRESENT = -8, - VK_ERROR_INCOMPATIBLE_DRIVER = -9, - VK_ERROR_TOO_MANY_OBJECTS = -10, - VK_ERROR_FORMAT_NOT_SUPPORTED = -11, - VK_ERROR_FRAGMENTED_POOL = -12, - VK_ERROR_SURFACE_LOST_KHR = -1000000000, - VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, - VK_SUBOPTIMAL_KHR = 1000001003, - VK_ERROR_OUT_OF_DATE_KHR = -1000001004, - VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, - VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, - VK_ERROR_INVALID_SHADER_NV = -1000012000, - VK_ERROR_OUT_OF_POOL_MEMORY_KHR = -1000069000, - VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = -1000072003, - VK_RESULT_BEGIN_RANGE = VK_ERROR_FRAGMENTED_POOL, - VK_RESULT_END_RANGE = VK_INCOMPLETE, - VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FRAGMENTED_POOL + 1), - VK_RESULT_MAX_ENUM = 0x7FFFFFFF -} VkResult; - -typedef enum VkStructureType { - VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, - VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, - VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, - VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, - VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, - VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, - VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, - VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, - VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, - VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, - VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, - VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, - VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, - VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, - VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, - VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, - VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, - VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, - VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, - VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, - VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, - VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, - VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, - VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, - VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, - VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, - VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, - VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, - VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, - VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, - VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, - VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, - VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, - VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, - VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, - VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, - VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, - VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, - VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, - VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, - VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, - VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, - VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, - VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, - VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, - VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, - VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, - VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, - VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, - VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, - VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, - VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, - VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, - VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, - VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, - VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, - VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR = 1000007000, - VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, - VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, - VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000, - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000, - VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, - VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, - VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, - VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, - VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, - VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, - VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, - VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHX = 1000053000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHX = 1000053001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHX = 1000053002, - VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001, - VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001, - VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = 1000059000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = 1000059001, - VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = 1000059002, - VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = 1000059003, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = 1000059004, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = 1000059005, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = 1000059006, - VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = 1000059007, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = 1000059008, - VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHX = 1000060000, - VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHX = 1000060003, - VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHX = 1000060004, - VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHX = 1000060005, - VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHX = 1000060006, - VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHX = 1000060007, - VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHX = 1000060008, - VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHX = 1000060009, - VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHX = 1000060010, - VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHX = 1000060011, - VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHX = 1000060012, - VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHX = 1000060013, - VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHX = 1000060014, - VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, - VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHX = 1000070000, - VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHX = 1000070001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = 1000071000, - VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = 1000071001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = 1000071002, - VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = 1000071003, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = 1000071004, - VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = 1000072000, - VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = 1000072001, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = 1000072002, - VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, - VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, - VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003, - VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000, - VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001, - VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002, - VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = 1000076000, - VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = 1000076001, - VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = 1000077000, - VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000, - VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001, - VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002, - VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, - VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, - VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = 1000083000, - VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, - VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = 1000085000, - VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000, - VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001, - VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002, - VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005, - VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, - VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = 1000090000, - VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, - VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001, - VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002, - VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, - VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES_KHX = 1000093000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, - VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, - VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, - VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, - VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = 1000112000, - VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = 1000112001, - VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = 1000113000, - VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, - VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, - VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, - VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, - VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = 1000117000, - VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = 1000117001, - VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = 1000117002, - VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = 1000117003, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, - VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, - VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = 1000120000, - VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR = 1000121000, - VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR = 1000121001, - VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR = 1000121002, - VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR = 1000121003, - VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR = 1000121004, - VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000, - VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000, - VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = 1000127000, - VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = 1000127001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = 1000130000, - VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = 1000130001, - VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO_KHR = 1000145000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES_KHR = 1000145001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES_KHR = 1000145002, - VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146000, - VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146001, - VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146002, - VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = 1000146003, - VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = 1000146004, - VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = 1000147000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, - VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, - VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, - VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, - VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = 1000156000, - VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = 1000156001, - VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = 1000156002, - VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = 1000156003, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = 1000156004, - VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = 1000156005, - VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = 1000157000, - VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = 1000157001, - VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO, - VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, - VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1), - VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkStructureType; - -typedef enum VkSystemAllocationScope { - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, - VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, - VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, - VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, - VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, - VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, - VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, - VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1), - VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF -} VkSystemAllocationScope; - -typedef enum VkInternalAllocationType { - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, - VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, - VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, - VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1), - VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkInternalAllocationType; - -typedef enum VkFormat { - VK_FORMAT_UNDEFINED = 0, - VK_FORMAT_R4G4_UNORM_PACK8 = 1, - VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, - VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, - VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, - VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, - VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, - VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, - VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, - VK_FORMAT_R8_UNORM = 9, - VK_FORMAT_R8_SNORM = 10, - VK_FORMAT_R8_USCALED = 11, - VK_FORMAT_R8_SSCALED = 12, - VK_FORMAT_R8_UINT = 13, - VK_FORMAT_R8_SINT = 14, - VK_FORMAT_R8_SRGB = 15, - VK_FORMAT_R8G8_UNORM = 16, - VK_FORMAT_R8G8_SNORM = 17, - VK_FORMAT_R8G8_USCALED = 18, - VK_FORMAT_R8G8_SSCALED = 19, - VK_FORMAT_R8G8_UINT = 20, - VK_FORMAT_R8G8_SINT = 21, - VK_FORMAT_R8G8_SRGB = 22, - VK_FORMAT_R8G8B8_UNORM = 23, - VK_FORMAT_R8G8B8_SNORM = 24, - VK_FORMAT_R8G8B8_USCALED = 25, - VK_FORMAT_R8G8B8_SSCALED = 26, - VK_FORMAT_R8G8B8_UINT = 27, - VK_FORMAT_R8G8B8_SINT = 28, - VK_FORMAT_R8G8B8_SRGB = 29, - VK_FORMAT_B8G8R8_UNORM = 30, - VK_FORMAT_B8G8R8_SNORM = 31, - VK_FORMAT_B8G8R8_USCALED = 32, - VK_FORMAT_B8G8R8_SSCALED = 33, - VK_FORMAT_B8G8R8_UINT = 34, - VK_FORMAT_B8G8R8_SINT = 35, - VK_FORMAT_B8G8R8_SRGB = 36, - VK_FORMAT_R8G8B8A8_UNORM = 37, - VK_FORMAT_R8G8B8A8_SNORM = 38, - VK_FORMAT_R8G8B8A8_USCALED = 39, - VK_FORMAT_R8G8B8A8_SSCALED = 40, - VK_FORMAT_R8G8B8A8_UINT = 41, - VK_FORMAT_R8G8B8A8_SINT = 42, - VK_FORMAT_R8G8B8A8_SRGB = 43, - VK_FORMAT_B8G8R8A8_UNORM = 44, - VK_FORMAT_B8G8R8A8_SNORM = 45, - VK_FORMAT_B8G8R8A8_USCALED = 46, - VK_FORMAT_B8G8R8A8_SSCALED = 47, - VK_FORMAT_B8G8R8A8_UINT = 48, - VK_FORMAT_B8G8R8A8_SINT = 49, - VK_FORMAT_B8G8R8A8_SRGB = 50, - VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, - VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, - VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, - VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, - VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, - VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, - VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, - VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, - VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, - VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, - VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, - VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, - VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, - VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, - VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, - VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, - VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, - VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, - VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, - VK_FORMAT_R16_UNORM = 70, - VK_FORMAT_R16_SNORM = 71, - VK_FORMAT_R16_USCALED = 72, - VK_FORMAT_R16_SSCALED = 73, - VK_FORMAT_R16_UINT = 74, - VK_FORMAT_R16_SINT = 75, - VK_FORMAT_R16_SFLOAT = 76, - VK_FORMAT_R16G16_UNORM = 77, - VK_FORMAT_R16G16_SNORM = 78, - VK_FORMAT_R16G16_USCALED = 79, - VK_FORMAT_R16G16_SSCALED = 80, - VK_FORMAT_R16G16_UINT = 81, - VK_FORMAT_R16G16_SINT = 82, - VK_FORMAT_R16G16_SFLOAT = 83, - VK_FORMAT_R16G16B16_UNORM = 84, - VK_FORMAT_R16G16B16_SNORM = 85, - VK_FORMAT_R16G16B16_USCALED = 86, - VK_FORMAT_R16G16B16_SSCALED = 87, - VK_FORMAT_R16G16B16_UINT = 88, - VK_FORMAT_R16G16B16_SINT = 89, - VK_FORMAT_R16G16B16_SFLOAT = 90, - VK_FORMAT_R16G16B16A16_UNORM = 91, - VK_FORMAT_R16G16B16A16_SNORM = 92, - VK_FORMAT_R16G16B16A16_USCALED = 93, - VK_FORMAT_R16G16B16A16_SSCALED = 94, - VK_FORMAT_R16G16B16A16_UINT = 95, - VK_FORMAT_R16G16B16A16_SINT = 96, - VK_FORMAT_R16G16B16A16_SFLOAT = 97, - VK_FORMAT_R32_UINT = 98, - VK_FORMAT_R32_SINT = 99, - VK_FORMAT_R32_SFLOAT = 100, - VK_FORMAT_R32G32_UINT = 101, - VK_FORMAT_R32G32_SINT = 102, - VK_FORMAT_R32G32_SFLOAT = 103, - VK_FORMAT_R32G32B32_UINT = 104, - VK_FORMAT_R32G32B32_SINT = 105, - VK_FORMAT_R32G32B32_SFLOAT = 106, - VK_FORMAT_R32G32B32A32_UINT = 107, - VK_FORMAT_R32G32B32A32_SINT = 108, - VK_FORMAT_R32G32B32A32_SFLOAT = 109, - VK_FORMAT_R64_UINT = 110, - VK_FORMAT_R64_SINT = 111, - VK_FORMAT_R64_SFLOAT = 112, - VK_FORMAT_R64G64_UINT = 113, - VK_FORMAT_R64G64_SINT = 114, - VK_FORMAT_R64G64_SFLOAT = 115, - VK_FORMAT_R64G64B64_UINT = 116, - VK_FORMAT_R64G64B64_SINT = 117, - VK_FORMAT_R64G64B64_SFLOAT = 118, - VK_FORMAT_R64G64B64A64_UINT = 119, - VK_FORMAT_R64G64B64A64_SINT = 120, - VK_FORMAT_R64G64B64A64_SFLOAT = 121, - VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, - VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, - VK_FORMAT_D16_UNORM = 124, - VK_FORMAT_X8_D24_UNORM_PACK32 = 125, - VK_FORMAT_D32_SFLOAT = 126, - VK_FORMAT_S8_UINT = 127, - VK_FORMAT_D16_UNORM_S8_UINT = 128, - VK_FORMAT_D24_UNORM_S8_UINT = 129, - VK_FORMAT_D32_SFLOAT_S8_UINT = 130, - VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, - VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, - VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, - VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, - VK_FORMAT_BC2_UNORM_BLOCK = 135, - VK_FORMAT_BC2_SRGB_BLOCK = 136, - VK_FORMAT_BC3_UNORM_BLOCK = 137, - VK_FORMAT_BC3_SRGB_BLOCK = 138, - VK_FORMAT_BC4_UNORM_BLOCK = 139, - VK_FORMAT_BC4_SNORM_BLOCK = 140, - VK_FORMAT_BC5_UNORM_BLOCK = 141, - VK_FORMAT_BC5_SNORM_BLOCK = 142, - VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, - VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, - VK_FORMAT_BC7_UNORM_BLOCK = 145, - VK_FORMAT_BC7_SRGB_BLOCK = 146, - VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, - VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, - VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, - VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, - VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, - VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, - VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, - VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, - VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, - VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, - VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, - VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, - VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, - VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, - VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, - VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, - VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, - VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, - VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, - VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, - VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, - VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, - VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, - VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, - VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, - VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, - VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, - VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, - VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, - VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, - VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, - VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, - VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, - VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, - VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, - VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, - VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, - VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, - VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, - VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, - VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, - VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, - VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, - VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, - VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, - VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, - VK_FORMAT_G8B8G8R8_422_UNORM_KHR = 1000156000, - VK_FORMAT_B8G8R8G8_422_UNORM_KHR = 1000156001, - VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = 1000156002, - VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = 1000156003, - VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = 1000156004, - VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = 1000156005, - VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = 1000156006, - VK_FORMAT_R10X6_UNORM_PACK16_KHR = 1000156007, - VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = 1000156008, - VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = 1000156009, - VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = 1000156010, - VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = 1000156011, - VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = 1000156012, - VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = 1000156013, - VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = 1000156014, - VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = 1000156015, - VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = 1000156016, - VK_FORMAT_R12X4_UNORM_PACK16_KHR = 1000156017, - VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = 1000156018, - VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = 1000156019, - VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = 1000156020, - VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = 1000156021, - VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = 1000156022, - VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = 1000156023, - VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = 1000156024, - VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = 1000156025, - VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = 1000156026, - VK_FORMAT_G16B16G16R16_422_UNORM_KHR = 1000156027, - VK_FORMAT_B16G16R16G16_422_UNORM_KHR = 1000156028, - VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = 1000156029, - VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = 1000156030, - VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = 1000156031, - VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = 1000156032, - VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = 1000156033, - VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED, - VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, - VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1), - VK_FORMAT_MAX_ENUM = 0x7FFFFFFF -} VkFormat; - -typedef enum VkImageType { - VK_IMAGE_TYPE_1D = 0, - VK_IMAGE_TYPE_2D = 1, - VK_IMAGE_TYPE_3D = 2, - VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D, - VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D, - VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1), - VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkImageType; - -typedef enum VkImageTiling { - VK_IMAGE_TILING_OPTIMAL = 0, - VK_IMAGE_TILING_LINEAR = 1, - VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL, - VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR, - VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1), - VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF -} VkImageTiling; - -typedef enum VkPhysicalDeviceType { - VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, - VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, - VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, - VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, - VK_PHYSICAL_DEVICE_TYPE_CPU = 4, - VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER, - VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU, - VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1), - VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkPhysicalDeviceType; - -typedef enum VkQueryType { - VK_QUERY_TYPE_OCCLUSION = 0, - VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, - VK_QUERY_TYPE_TIMESTAMP = 2, - VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION, - VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP, - VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1), - VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkQueryType; - -typedef enum VkSharingMode { - VK_SHARING_MODE_EXCLUSIVE = 0, - VK_SHARING_MODE_CONCURRENT = 1, - VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE, - VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT, - VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1), - VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSharingMode; - -typedef enum VkImageLayout { - VK_IMAGE_LAYOUT_UNDEFINED = 0, - VK_IMAGE_LAYOUT_GENERAL = 1, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, - VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, - VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, - VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, - VK_IMAGE_LAYOUT_PREINITIALIZED = 8, - VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, - VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, - VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = 1000117000, - VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = 1000117001, - VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED, - VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED, - VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1), - VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF -} VkImageLayout; - -typedef enum VkImageViewType { - VK_IMAGE_VIEW_TYPE_1D = 0, - VK_IMAGE_VIEW_TYPE_2D = 1, - VK_IMAGE_VIEW_TYPE_3D = 2, - VK_IMAGE_VIEW_TYPE_CUBE = 3, - VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, - VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, - VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, - VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D, - VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, - VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1), - VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkImageViewType; - -typedef enum VkComponentSwizzle { - VK_COMPONENT_SWIZZLE_IDENTITY = 0, - VK_COMPONENT_SWIZZLE_ZERO = 1, - VK_COMPONENT_SWIZZLE_ONE = 2, - VK_COMPONENT_SWIZZLE_R = 3, - VK_COMPONENT_SWIZZLE_G = 4, - VK_COMPONENT_SWIZZLE_B = 5, - VK_COMPONENT_SWIZZLE_A = 6, - VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A, - VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1), - VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF -} VkComponentSwizzle; - -typedef enum VkVertexInputRate { - VK_VERTEX_INPUT_RATE_VERTEX = 0, - VK_VERTEX_INPUT_RATE_INSTANCE = 1, - VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX, - VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE, - VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1), - VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF -} VkVertexInputRate; - -typedef enum VkPrimitiveTopology { - VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, - VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, - VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, - VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, - VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, - VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, - VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1), - VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF -} VkPrimitiveTopology; - -typedef enum VkPolygonMode { - VK_POLYGON_MODE_FILL = 0, - VK_POLYGON_MODE_LINE = 1, - VK_POLYGON_MODE_POINT = 2, - VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, - VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL, - VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT, - VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1), - VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF -} VkPolygonMode; - -typedef enum VkFrontFace { - VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, - VK_FRONT_FACE_CLOCKWISE = 1, - VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE, - VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE, - VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1), - VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF -} VkFrontFace; - -typedef enum VkCompareOp { - VK_COMPARE_OP_NEVER = 0, - VK_COMPARE_OP_LESS = 1, - VK_COMPARE_OP_EQUAL = 2, - VK_COMPARE_OP_LESS_OR_EQUAL = 3, - VK_COMPARE_OP_GREATER = 4, - VK_COMPARE_OP_NOT_EQUAL = 5, - VK_COMPARE_OP_GREATER_OR_EQUAL = 6, - VK_COMPARE_OP_ALWAYS = 7, - VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER, - VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS, - VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1), - VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF -} VkCompareOp; - -typedef enum VkStencilOp { - VK_STENCIL_OP_KEEP = 0, - VK_STENCIL_OP_ZERO = 1, - VK_STENCIL_OP_REPLACE = 2, - VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, - VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, - VK_STENCIL_OP_INVERT = 5, - VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, - VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, - VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP, - VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP, - VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1), - VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF -} VkStencilOp; - -typedef enum VkLogicOp { - VK_LOGIC_OP_CLEAR = 0, - VK_LOGIC_OP_AND = 1, - VK_LOGIC_OP_AND_REVERSE = 2, - VK_LOGIC_OP_COPY = 3, - VK_LOGIC_OP_AND_INVERTED = 4, - VK_LOGIC_OP_NO_OP = 5, - VK_LOGIC_OP_XOR = 6, - VK_LOGIC_OP_OR = 7, - VK_LOGIC_OP_NOR = 8, - VK_LOGIC_OP_EQUIVALENT = 9, - VK_LOGIC_OP_INVERT = 10, - VK_LOGIC_OP_OR_REVERSE = 11, - VK_LOGIC_OP_COPY_INVERTED = 12, - VK_LOGIC_OP_OR_INVERTED = 13, - VK_LOGIC_OP_NAND = 14, - VK_LOGIC_OP_SET = 15, - VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR, - VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET, - VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1), - VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF -} VkLogicOp; - -typedef enum VkBlendFactor { - VK_BLEND_FACTOR_ZERO = 0, - VK_BLEND_FACTOR_ONE = 1, - VK_BLEND_FACTOR_SRC_COLOR = 2, - VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, - VK_BLEND_FACTOR_DST_COLOR = 4, - VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, - VK_BLEND_FACTOR_SRC_ALPHA = 6, - VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, - VK_BLEND_FACTOR_DST_ALPHA = 8, - VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, - VK_BLEND_FACTOR_CONSTANT_COLOR = 10, - VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, - VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, - VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, - VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, - VK_BLEND_FACTOR_SRC1_COLOR = 15, - VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, - VK_BLEND_FACTOR_SRC1_ALPHA = 17, - VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, - VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO, - VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, - VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1), - VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF -} VkBlendFactor; - -typedef enum VkBlendOp { - VK_BLEND_OP_ADD = 0, - VK_BLEND_OP_SUBTRACT = 1, - VK_BLEND_OP_REVERSE_SUBTRACT = 2, - VK_BLEND_OP_MIN = 3, - VK_BLEND_OP_MAX = 4, - VK_BLEND_OP_ZERO_EXT = 1000148000, - VK_BLEND_OP_SRC_EXT = 1000148001, - VK_BLEND_OP_DST_EXT = 1000148002, - VK_BLEND_OP_SRC_OVER_EXT = 1000148003, - VK_BLEND_OP_DST_OVER_EXT = 1000148004, - VK_BLEND_OP_SRC_IN_EXT = 1000148005, - VK_BLEND_OP_DST_IN_EXT = 1000148006, - VK_BLEND_OP_SRC_OUT_EXT = 1000148007, - VK_BLEND_OP_DST_OUT_EXT = 1000148008, - VK_BLEND_OP_SRC_ATOP_EXT = 1000148009, - VK_BLEND_OP_DST_ATOP_EXT = 1000148010, - VK_BLEND_OP_XOR_EXT = 1000148011, - VK_BLEND_OP_MULTIPLY_EXT = 1000148012, - VK_BLEND_OP_SCREEN_EXT = 1000148013, - VK_BLEND_OP_OVERLAY_EXT = 1000148014, - VK_BLEND_OP_DARKEN_EXT = 1000148015, - VK_BLEND_OP_LIGHTEN_EXT = 1000148016, - VK_BLEND_OP_COLORDODGE_EXT = 1000148017, - VK_BLEND_OP_COLORBURN_EXT = 1000148018, - VK_BLEND_OP_HARDLIGHT_EXT = 1000148019, - VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020, - VK_BLEND_OP_DIFFERENCE_EXT = 1000148021, - VK_BLEND_OP_EXCLUSION_EXT = 1000148022, - VK_BLEND_OP_INVERT_EXT = 1000148023, - VK_BLEND_OP_INVERT_RGB_EXT = 1000148024, - VK_BLEND_OP_LINEARDODGE_EXT = 1000148025, - VK_BLEND_OP_LINEARBURN_EXT = 1000148026, - VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027, - VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028, - VK_BLEND_OP_PINLIGHT_EXT = 1000148029, - VK_BLEND_OP_HARDMIX_EXT = 1000148030, - VK_BLEND_OP_HSL_HUE_EXT = 1000148031, - VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032, - VK_BLEND_OP_HSL_COLOR_EXT = 1000148033, - VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034, - VK_BLEND_OP_PLUS_EXT = 1000148035, - VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036, - VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037, - VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038, - VK_BLEND_OP_MINUS_EXT = 1000148039, - VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040, - VK_BLEND_OP_CONTRAST_EXT = 1000148041, - VK_BLEND_OP_INVERT_OVG_EXT = 1000148042, - VK_BLEND_OP_RED_EXT = 1000148043, - VK_BLEND_OP_GREEN_EXT = 1000148044, - VK_BLEND_OP_BLUE_EXT = 1000148045, - VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD, - VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX, - VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1), - VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF -} VkBlendOp; - -typedef enum VkDynamicState { - VK_DYNAMIC_STATE_VIEWPORT = 0, - VK_DYNAMIC_STATE_SCISSOR = 1, - VK_DYNAMIC_STATE_LINE_WIDTH = 2, - VK_DYNAMIC_STATE_DEPTH_BIAS = 3, - VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, - VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, - VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, - VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, - VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, - VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, - VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, - VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT, - VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE, - VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1), - VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF -} VkDynamicState; - -typedef enum VkFilter { - VK_FILTER_NEAREST = 0, - VK_FILTER_LINEAR = 1, - VK_FILTER_CUBIC_IMG = 1000015000, - VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST, - VK_FILTER_END_RANGE = VK_FILTER_LINEAR, - VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1), - VK_FILTER_MAX_ENUM = 0x7FFFFFFF -} VkFilter; - -typedef enum VkSamplerMipmapMode { - VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, - VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, - VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST, - VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR, - VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1), - VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSamplerMipmapMode; - -typedef enum VkSamplerAddressMode { - VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, - VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, - VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, - VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, - VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, - VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT, - VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, - VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1), - VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSamplerAddressMode; - -typedef enum VkBorderColor { - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, - VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, - VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, - VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, - VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, - VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, - VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, - VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE, - VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1), - VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF -} VkBorderColor; - -typedef enum VkDescriptorType { - VK_DESCRIPTOR_TYPE_SAMPLER = 0, - VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, - VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, - VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, - VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, - VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, - VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, - VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER, - VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, - VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1), - VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorType; - -typedef enum VkAttachmentLoadOp { - VK_ATTACHMENT_LOAD_OP_LOAD = 0, - VK_ATTACHMENT_LOAD_OP_CLEAR = 1, - VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, - VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD, - VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE, - VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1), - VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF -} VkAttachmentLoadOp; - -typedef enum VkAttachmentStoreOp { - VK_ATTACHMENT_STORE_OP_STORE = 0, - VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, - VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE, - VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE, - VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1), - VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF -} VkAttachmentStoreOp; - -typedef enum VkPipelineBindPoint { - VK_PIPELINE_BIND_POINT_GRAPHICS = 0, - VK_PIPELINE_BIND_POINT_COMPUTE = 1, - VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS, - VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE, - VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1), - VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF -} VkPipelineBindPoint; - -typedef enum VkCommandBufferLevel { - VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, - VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, - VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY, - VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY, - VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1), - VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF -} VkCommandBufferLevel; - -typedef enum VkIndexType { - VK_INDEX_TYPE_UINT16 = 0, - VK_INDEX_TYPE_UINT32 = 1, - VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16, - VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32, - VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1), - VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkIndexType; - -typedef enum VkSubpassContents { - VK_SUBPASS_CONTENTS_INLINE = 0, - VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, - VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE, - VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, - VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1), - VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF -} VkSubpassContents; - -typedef enum VkObjectType { - VK_OBJECT_TYPE_UNKNOWN = 0, - VK_OBJECT_TYPE_INSTANCE = 1, - VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, - VK_OBJECT_TYPE_DEVICE = 3, - VK_OBJECT_TYPE_QUEUE = 4, - VK_OBJECT_TYPE_SEMAPHORE = 5, - VK_OBJECT_TYPE_COMMAND_BUFFER = 6, - VK_OBJECT_TYPE_FENCE = 7, - VK_OBJECT_TYPE_DEVICE_MEMORY = 8, - VK_OBJECT_TYPE_BUFFER = 9, - VK_OBJECT_TYPE_IMAGE = 10, - VK_OBJECT_TYPE_EVENT = 11, - VK_OBJECT_TYPE_QUERY_POOL = 12, - VK_OBJECT_TYPE_BUFFER_VIEW = 13, - VK_OBJECT_TYPE_IMAGE_VIEW = 14, - VK_OBJECT_TYPE_SHADER_MODULE = 15, - VK_OBJECT_TYPE_PIPELINE_CACHE = 16, - VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, - VK_OBJECT_TYPE_RENDER_PASS = 18, - VK_OBJECT_TYPE_PIPELINE = 19, - VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, - VK_OBJECT_TYPE_SAMPLER = 21, - VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, - VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, - VK_OBJECT_TYPE_FRAMEBUFFER = 24, - VK_OBJECT_TYPE_COMMAND_POOL = 25, - VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, - VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, - VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, - VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, - VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, - VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = 1000085000, - VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000, - VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001, - VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = 1000156000, - VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN, - VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL, - VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1), - VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkObjectType; - -typedef VkFlags VkInstanceCreateFlags; - -typedef enum VkFormatFeatureFlagBits { - VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, - VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, - VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, - VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, - VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, - VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, - VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, - VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, - VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, - VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, - VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, - VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, - VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = 0x00004000, - VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = 0x00008000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = 0x00010000, - VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000, - VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = 0x00400000, - VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000, - VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkFormatFeatureFlagBits; -typedef VkFlags VkFormatFeatureFlags; - -typedef enum VkImageUsageFlagBits { - VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, - VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, - VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, - VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, - VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, - VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, - VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, - VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, - VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageUsageFlagBits; -typedef VkFlags VkImageUsageFlags; - -typedef enum VkImageCreateFlagBits { - VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, - VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, - VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, - VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, - VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, - VK_IMAGE_CREATE_BIND_SFR_BIT_KHX = 0x00000040, - VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = 0x00000020, - VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = 0x00000080, - VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = 0x00000100, - VK_IMAGE_CREATE_PROTECTED_BIT_KHR = 0x00000800, - VK_IMAGE_CREATE_DISJOINT_BIT_KHR = 0x00000200, - VK_IMAGE_CREATE_ALIAS_BIT_KHR = 0x00000400, - VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageCreateFlagBits; -typedef VkFlags VkImageCreateFlags; - -typedef enum VkSampleCountFlagBits { - VK_SAMPLE_COUNT_1_BIT = 0x00000001, - VK_SAMPLE_COUNT_2_BIT = 0x00000002, - VK_SAMPLE_COUNT_4_BIT = 0x00000004, - VK_SAMPLE_COUNT_8_BIT = 0x00000008, - VK_SAMPLE_COUNT_16_BIT = 0x00000010, - VK_SAMPLE_COUNT_32_BIT = 0x00000020, - VK_SAMPLE_COUNT_64_BIT = 0x00000040, - VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSampleCountFlagBits; -typedef VkFlags VkSampleCountFlags; - -typedef enum VkQueueFlagBits { - VK_QUEUE_GRAPHICS_BIT = 0x00000001, - VK_QUEUE_COMPUTE_BIT = 0x00000002, - VK_QUEUE_TRANSFER_BIT = 0x00000004, - VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, - VK_QUEUE_PROTECTED_BIT_KHR = 0x00000010, - VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueueFlagBits; -typedef VkFlags VkQueueFlags; - -typedef enum VkMemoryPropertyFlagBits { - VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, - VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, - VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, - VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, - VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, - VK_MEMORY_PROPERTY_PROTECTED_BIT_KHR = 0x00000020, - VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkMemoryPropertyFlagBits; -typedef VkFlags VkMemoryPropertyFlags; - -typedef enum VkMemoryHeapFlagBits { - VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, - VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHX = 0x00000002, - VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkMemoryHeapFlagBits; -typedef VkFlags VkMemoryHeapFlags; -typedef VkFlags VkDeviceCreateFlags; - -typedef enum VkDeviceQueueCreateFlagBits { - VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT_KHR = 0x00000001, - VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDeviceQueueCreateFlagBits; -typedef VkFlags VkDeviceQueueCreateFlags; - -typedef enum VkPipelineStageFlagBits { - VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, - VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, - VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, - VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, - VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, - VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, - VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, - VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, - VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, - VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, - VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, - VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, - VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, - VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, - VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, - VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, - VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, - VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000, - VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkPipelineStageFlagBits; -typedef VkFlags VkPipelineStageFlags; -typedef VkFlags VkMemoryMapFlags; - -typedef enum VkImageAspectFlagBits { - VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, - VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, - VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, - VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, - VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = 0x00000010, - VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = 0x00000020, - VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = 0x00000040, - VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageAspectFlagBits; -typedef VkFlags VkImageAspectFlags; - -typedef enum VkSparseImageFormatFlagBits { - VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, - VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, - VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, - VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSparseImageFormatFlagBits; -typedef VkFlags VkSparseImageFormatFlags; - -typedef enum VkSparseMemoryBindFlagBits { - VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, - VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSparseMemoryBindFlagBits; -typedef VkFlags VkSparseMemoryBindFlags; - -typedef enum VkFenceCreateFlagBits { - VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, - VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkFenceCreateFlagBits; -typedef VkFlags VkFenceCreateFlags; -typedef VkFlags VkSemaphoreCreateFlags; -typedef VkFlags VkEventCreateFlags; -typedef VkFlags VkQueryPoolCreateFlags; - -typedef enum VkQueryPipelineStatisticFlagBits { - VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, - VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, - VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, - VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, - VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, - VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, - VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, - VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, - VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, - VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, - VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, - VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueryPipelineStatisticFlagBits; -typedef VkFlags VkQueryPipelineStatisticFlags; - -typedef enum VkQueryResultFlagBits { - VK_QUERY_RESULT_64_BIT = 0x00000001, - VK_QUERY_RESULT_WAIT_BIT = 0x00000002, - VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, - VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, - VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueryResultFlagBits; -typedef VkFlags VkQueryResultFlags; - -typedef enum VkBufferCreateFlagBits { - VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, - VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, - VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, - VK_BUFFER_CREATE_PROTECTED_BIT_KHR = 0x00000008, - VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkBufferCreateFlagBits; -typedef VkFlags VkBufferCreateFlags; - -typedef enum VkBufferUsageFlagBits { - VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, - VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, - VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, - VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, - VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, - VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, - VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, - VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, - VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, - VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkBufferUsageFlagBits; -typedef VkFlags VkBufferUsageFlags; -typedef VkFlags VkBufferViewCreateFlags; -typedef VkFlags VkImageViewCreateFlags; -typedef VkFlags VkShaderModuleCreateFlags; -typedef VkFlags VkPipelineCacheCreateFlags; - -typedef enum VkPipelineCreateFlagBits { - VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, - VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, - VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, - VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHX = 0x00000008, - VK_PIPELINE_CREATE_DISPATCH_BASE_KHX = 0x00000010, - VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkPipelineCreateFlagBits; -typedef VkFlags VkPipelineCreateFlags; -typedef VkFlags VkPipelineShaderStageCreateFlags; - -typedef enum VkShaderStageFlagBits { - VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, - VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, - VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, - VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, - VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, - VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, - VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, - VK_SHADER_STAGE_ALL = 0x7FFFFFFF, - VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkShaderStageFlagBits; -typedef VkFlags VkPipelineVertexInputStateCreateFlags; -typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; -typedef VkFlags VkPipelineTessellationStateCreateFlags; -typedef VkFlags VkPipelineViewportStateCreateFlags; -typedef VkFlags VkPipelineRasterizationStateCreateFlags; - -typedef enum VkCullModeFlagBits { - VK_CULL_MODE_NONE = 0, - VK_CULL_MODE_FRONT_BIT = 0x00000001, - VK_CULL_MODE_BACK_BIT = 0x00000002, - VK_CULL_MODE_FRONT_AND_BACK = 0x00000003, - VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCullModeFlagBits; -typedef VkFlags VkCullModeFlags; -typedef VkFlags VkPipelineMultisampleStateCreateFlags; -typedef VkFlags VkPipelineDepthStencilStateCreateFlags; -typedef VkFlags VkPipelineColorBlendStateCreateFlags; - -typedef enum VkColorComponentFlagBits { - VK_COLOR_COMPONENT_R_BIT = 0x00000001, - VK_COLOR_COMPONENT_G_BIT = 0x00000002, - VK_COLOR_COMPONENT_B_BIT = 0x00000004, - VK_COLOR_COMPONENT_A_BIT = 0x00000008, - VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkColorComponentFlagBits; -typedef VkFlags VkColorComponentFlags; -typedef VkFlags VkPipelineDynamicStateCreateFlags; -typedef VkFlags VkPipelineLayoutCreateFlags; -typedef VkFlags VkShaderStageFlags; -typedef VkFlags VkSamplerCreateFlags; - -typedef enum VkDescriptorSetLayoutCreateFlagBits { - VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, - VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorSetLayoutCreateFlagBits; -typedef VkFlags VkDescriptorSetLayoutCreateFlags; - -typedef enum VkDescriptorPoolCreateFlagBits { - VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, - VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorPoolCreateFlagBits; -typedef VkFlags VkDescriptorPoolCreateFlags; -typedef VkFlags VkDescriptorPoolResetFlags; -typedef VkFlags VkFramebufferCreateFlags; -typedef VkFlags VkRenderPassCreateFlags; - -typedef enum VkAttachmentDescriptionFlagBits { - VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, - VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkAttachmentDescriptionFlagBits; -typedef VkFlags VkAttachmentDescriptionFlags; - -typedef enum VkSubpassDescriptionFlagBits { - VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, - VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, - VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSubpassDescriptionFlagBits; -typedef VkFlags VkSubpassDescriptionFlags; - -typedef enum VkAccessFlagBits { - VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, - VK_ACCESS_INDEX_READ_BIT = 0x00000002, - VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, - VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, - VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, - VK_ACCESS_SHADER_READ_BIT = 0x00000020, - VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, - VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, - VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, - VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, - VK_ACCESS_HOST_READ_BIT = 0x00002000, - VK_ACCESS_HOST_WRITE_BIT = 0x00004000, - VK_ACCESS_MEMORY_READ_BIT = 0x00008000, - VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, - VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000, - VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000, - VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, - VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkAccessFlagBits; -typedef VkFlags VkAccessFlags; - -typedef enum VkDependencyFlagBits { - VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, - VK_DEPENDENCY_VIEW_LOCAL_BIT_KHX = 0x00000002, - VK_DEPENDENCY_DEVICE_GROUP_BIT_KHX = 0x00000004, - VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDependencyFlagBits; -typedef VkFlags VkDependencyFlags; - -typedef enum VkCommandPoolCreateFlagBits { - VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, - VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, - VK_COMMAND_POOL_CREATE_PROTECTED_BIT_KHR = 0x00000004, - VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandPoolCreateFlagBits; -typedef VkFlags VkCommandPoolCreateFlags; - -typedef enum VkCommandPoolResetFlagBits { - VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, - VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandPoolResetFlagBits; -typedef VkFlags VkCommandPoolResetFlags; - -typedef enum VkCommandBufferUsageFlagBits { - VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, - VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, - VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, - VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandBufferUsageFlagBits; -typedef VkFlags VkCommandBufferUsageFlags; - -typedef enum VkQueryControlFlagBits { - VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, - VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueryControlFlagBits; -typedef VkFlags VkQueryControlFlags; - -typedef enum VkCommandBufferResetFlagBits { - VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, - VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandBufferResetFlagBits; -typedef VkFlags VkCommandBufferResetFlags; - -typedef enum VkStencilFaceFlagBits { - VK_STENCIL_FACE_FRONT_BIT = 0x00000001, - VK_STENCIL_FACE_BACK_BIT = 0x00000002, - VK_STENCIL_FRONT_AND_BACK = 0x00000003, - VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkStencilFaceFlagBits; -typedef VkFlags VkStencilFaceFlags; - -typedef struct VkApplicationInfo { - VkStructureType sType; - const void* pNext; - const char* pApplicationName; - uint32_t applicationVersion; - const char* pEngineName; - uint32_t engineVersion; - uint32_t apiVersion; -} VkApplicationInfo; - -typedef struct VkInstanceCreateInfo { - VkStructureType sType; - const void* pNext; - VkInstanceCreateFlags flags; - const VkApplicationInfo* pApplicationInfo; - uint32_t enabledLayerCount; - const char* const* ppEnabledLayerNames; - uint32_t enabledExtensionCount; - const char* const* ppEnabledExtensionNames; -} VkInstanceCreateInfo; - -typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( - void* pUserData, - size_t size, - size_t alignment, - VkSystemAllocationScope allocationScope); - -typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( - void* pUserData, - void* pOriginal, - size_t size, - size_t alignment, - VkSystemAllocationScope allocationScope); - -typedef void (VKAPI_PTR *PFN_vkFreeFunction)( - void* pUserData, - void* pMemory); - -typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( - void* pUserData, - size_t size, - VkInternalAllocationType allocationType, - VkSystemAllocationScope allocationScope); - -typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( - void* pUserData, - size_t size, - VkInternalAllocationType allocationType, - VkSystemAllocationScope allocationScope); - -typedef struct VkAllocationCallbacks { - void* pUserData; - PFN_vkAllocationFunction pfnAllocation; - PFN_vkReallocationFunction pfnReallocation; - PFN_vkFreeFunction pfnFree; - PFN_vkInternalAllocationNotification pfnInternalAllocation; - PFN_vkInternalFreeNotification pfnInternalFree; -} VkAllocationCallbacks; - -typedef struct VkPhysicalDeviceFeatures { - VkBool32 robustBufferAccess; - VkBool32 fullDrawIndexUint32; - VkBool32 imageCubeArray; - VkBool32 independentBlend; - VkBool32 geometryShader; - VkBool32 tessellationShader; - VkBool32 sampleRateShading; - VkBool32 dualSrcBlend; - VkBool32 logicOp; - VkBool32 multiDrawIndirect; - VkBool32 drawIndirectFirstInstance; - VkBool32 depthClamp; - VkBool32 depthBiasClamp; - VkBool32 fillModeNonSolid; - VkBool32 depthBounds; - VkBool32 wideLines; - VkBool32 largePoints; - VkBool32 alphaToOne; - VkBool32 multiViewport; - VkBool32 samplerAnisotropy; - VkBool32 textureCompressionETC2; - VkBool32 textureCompressionASTC_LDR; - VkBool32 textureCompressionBC; - VkBool32 occlusionQueryPrecise; - VkBool32 pipelineStatisticsQuery; - VkBool32 vertexPipelineStoresAndAtomics; - VkBool32 fragmentStoresAndAtomics; - VkBool32 shaderTessellationAndGeometryPointSize; - VkBool32 shaderImageGatherExtended; - VkBool32 shaderStorageImageExtendedFormats; - VkBool32 shaderStorageImageMultisample; - VkBool32 shaderStorageImageReadWithoutFormat; - VkBool32 shaderStorageImageWriteWithoutFormat; - VkBool32 shaderUniformBufferArrayDynamicIndexing; - VkBool32 shaderSampledImageArrayDynamicIndexing; - VkBool32 shaderStorageBufferArrayDynamicIndexing; - VkBool32 shaderStorageImageArrayDynamicIndexing; - VkBool32 shaderClipDistance; - VkBool32 shaderCullDistance; - VkBool32 shaderFloat64; - VkBool32 shaderInt64; - VkBool32 shaderInt16; - VkBool32 shaderResourceResidency; - VkBool32 shaderResourceMinLod; - VkBool32 sparseBinding; - VkBool32 sparseResidencyBuffer; - VkBool32 sparseResidencyImage2D; - VkBool32 sparseResidencyImage3D; - VkBool32 sparseResidency2Samples; - VkBool32 sparseResidency4Samples; - VkBool32 sparseResidency8Samples; - VkBool32 sparseResidency16Samples; - VkBool32 sparseResidencyAliased; - VkBool32 variableMultisampleRate; - VkBool32 inheritedQueries; -} VkPhysicalDeviceFeatures; - -typedef struct VkFormatProperties { - VkFormatFeatureFlags linearTilingFeatures; - VkFormatFeatureFlags optimalTilingFeatures; - VkFormatFeatureFlags bufferFeatures; -} VkFormatProperties; - -typedef struct VkExtent3D { - uint32_t width; - uint32_t height; - uint32_t depth; -} VkExtent3D; - -typedef struct VkImageFormatProperties { - VkExtent3D maxExtent; - uint32_t maxMipLevels; - uint32_t maxArrayLayers; - VkSampleCountFlags sampleCounts; - VkDeviceSize maxResourceSize; -} VkImageFormatProperties; - -typedef struct VkPhysicalDeviceLimits { - uint32_t maxImageDimension1D; - uint32_t maxImageDimension2D; - uint32_t maxImageDimension3D; - uint32_t maxImageDimensionCube; - uint32_t maxImageArrayLayers; - uint32_t maxTexelBufferElements; - uint32_t maxUniformBufferRange; - uint32_t maxStorageBufferRange; - uint32_t maxPushConstantsSize; - uint32_t maxMemoryAllocationCount; - uint32_t maxSamplerAllocationCount; - VkDeviceSize bufferImageGranularity; - VkDeviceSize sparseAddressSpaceSize; - uint32_t maxBoundDescriptorSets; - uint32_t maxPerStageDescriptorSamplers; - uint32_t maxPerStageDescriptorUniformBuffers; - uint32_t maxPerStageDescriptorStorageBuffers; - uint32_t maxPerStageDescriptorSampledImages; - uint32_t maxPerStageDescriptorStorageImages; - uint32_t maxPerStageDescriptorInputAttachments; - uint32_t maxPerStageResources; - uint32_t maxDescriptorSetSamplers; - uint32_t maxDescriptorSetUniformBuffers; - uint32_t maxDescriptorSetUniformBuffersDynamic; - uint32_t maxDescriptorSetStorageBuffers; - uint32_t maxDescriptorSetStorageBuffersDynamic; - uint32_t maxDescriptorSetSampledImages; - uint32_t maxDescriptorSetStorageImages; - uint32_t maxDescriptorSetInputAttachments; - uint32_t maxVertexInputAttributes; - uint32_t maxVertexInputBindings; - uint32_t maxVertexInputAttributeOffset; - uint32_t maxVertexInputBindingStride; - uint32_t maxVertexOutputComponents; - uint32_t maxTessellationGenerationLevel; - uint32_t maxTessellationPatchSize; - uint32_t maxTessellationControlPerVertexInputComponents; - uint32_t maxTessellationControlPerVertexOutputComponents; - uint32_t maxTessellationControlPerPatchOutputComponents; - uint32_t maxTessellationControlTotalOutputComponents; - uint32_t maxTessellationEvaluationInputComponents; - uint32_t maxTessellationEvaluationOutputComponents; - uint32_t maxGeometryShaderInvocations; - uint32_t maxGeometryInputComponents; - uint32_t maxGeometryOutputComponents; - uint32_t maxGeometryOutputVertices; - uint32_t maxGeometryTotalOutputComponents; - uint32_t maxFragmentInputComponents; - uint32_t maxFragmentOutputAttachments; - uint32_t maxFragmentDualSrcAttachments; - uint32_t maxFragmentCombinedOutputResources; - uint32_t maxComputeSharedMemorySize; - uint32_t maxComputeWorkGroupCount[3]; - uint32_t maxComputeWorkGroupInvocations; - uint32_t maxComputeWorkGroupSize[3]; - uint32_t subPixelPrecisionBits; - uint32_t subTexelPrecisionBits; - uint32_t mipmapPrecisionBits; - uint32_t maxDrawIndexedIndexValue; - uint32_t maxDrawIndirectCount; - float maxSamplerLodBias; - float maxSamplerAnisotropy; - uint32_t maxViewports; - uint32_t maxViewportDimensions[2]; - float viewportBoundsRange[2]; - uint32_t viewportSubPixelBits; - size_t minMemoryMapAlignment; - VkDeviceSize minTexelBufferOffsetAlignment; - VkDeviceSize minUniformBufferOffsetAlignment; - VkDeviceSize minStorageBufferOffsetAlignment; - int32_t minTexelOffset; - uint32_t maxTexelOffset; - int32_t minTexelGatherOffset; - uint32_t maxTexelGatherOffset; - float minInterpolationOffset; - float maxInterpolationOffset; - uint32_t subPixelInterpolationOffsetBits; - uint32_t maxFramebufferWidth; - uint32_t maxFramebufferHeight; - uint32_t maxFramebufferLayers; - VkSampleCountFlags framebufferColorSampleCounts; - VkSampleCountFlags framebufferDepthSampleCounts; - VkSampleCountFlags framebufferStencilSampleCounts; - VkSampleCountFlags framebufferNoAttachmentsSampleCounts; - uint32_t maxColorAttachments; - VkSampleCountFlags sampledImageColorSampleCounts; - VkSampleCountFlags sampledImageIntegerSampleCounts; - VkSampleCountFlags sampledImageDepthSampleCounts; - VkSampleCountFlags sampledImageStencilSampleCounts; - VkSampleCountFlags storageImageSampleCounts; - uint32_t maxSampleMaskWords; - VkBool32 timestampComputeAndGraphics; - float timestampPeriod; - uint32_t maxClipDistances; - uint32_t maxCullDistances; - uint32_t maxCombinedClipAndCullDistances; - uint32_t discreteQueuePriorities; - float pointSizeRange[2]; - float lineWidthRange[2]; - float pointSizeGranularity; - float lineWidthGranularity; - VkBool32 strictLines; - VkBool32 standardSampleLocations; - VkDeviceSize optimalBufferCopyOffsetAlignment; - VkDeviceSize optimalBufferCopyRowPitchAlignment; - VkDeviceSize nonCoherentAtomSize; -} VkPhysicalDeviceLimits; - -typedef struct VkPhysicalDeviceSparseProperties { - VkBool32 residencyStandard2DBlockShape; - VkBool32 residencyStandard2DMultisampleBlockShape; - VkBool32 residencyStandard3DBlockShape; - VkBool32 residencyAlignedMipSize; - VkBool32 residencyNonResidentStrict; -} VkPhysicalDeviceSparseProperties; - -typedef struct VkPhysicalDeviceProperties { - uint32_t apiVersion; - uint32_t driverVersion; - uint32_t vendorID; - uint32_t deviceID; - VkPhysicalDeviceType deviceType; - char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; - uint8_t pipelineCacheUUID[VK_UUID_SIZE]; - VkPhysicalDeviceLimits limits; - VkPhysicalDeviceSparseProperties sparseProperties; -} VkPhysicalDeviceProperties; - -typedef struct VkQueueFamilyProperties { - VkQueueFlags queueFlags; - uint32_t queueCount; - uint32_t timestampValidBits; - VkExtent3D minImageTransferGranularity; -} VkQueueFamilyProperties; - -typedef struct VkMemoryType { - VkMemoryPropertyFlags propertyFlags; - uint32_t heapIndex; -} VkMemoryType; - -typedef struct VkMemoryHeap { - VkDeviceSize size; - VkMemoryHeapFlags flags; -} VkMemoryHeap; - -typedef struct VkPhysicalDeviceMemoryProperties { - uint32_t memoryTypeCount; - VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; - uint32_t memoryHeapCount; - VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; -} VkPhysicalDeviceMemoryProperties; - -typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); -typedef struct VkDeviceQueueCreateInfo { - VkStructureType sType; - const void* pNext; - VkDeviceQueueCreateFlags flags; - uint32_t queueFamilyIndex; - uint32_t queueCount; - const float* pQueuePriorities; -} VkDeviceQueueCreateInfo; - -typedef struct VkDeviceCreateInfo { - VkStructureType sType; - const void* pNext; - VkDeviceCreateFlags flags; - uint32_t queueCreateInfoCount; - const VkDeviceQueueCreateInfo* pQueueCreateInfos; - uint32_t enabledLayerCount; - const char* const* ppEnabledLayerNames; - uint32_t enabledExtensionCount; - const char* const* ppEnabledExtensionNames; - const VkPhysicalDeviceFeatures* pEnabledFeatures; -} VkDeviceCreateInfo; - -typedef struct VkExtensionProperties { - char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; - uint32_t specVersion; -} VkExtensionProperties; - -typedef struct VkLayerProperties { - char layerName[VK_MAX_EXTENSION_NAME_SIZE]; - uint32_t specVersion; - uint32_t implementationVersion; - char description[VK_MAX_DESCRIPTION_SIZE]; -} VkLayerProperties; - -typedef struct VkSubmitInfo { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore* pWaitSemaphores; - const VkPipelineStageFlags* pWaitDstStageMask; - uint32_t commandBufferCount; - const VkCommandBuffer* pCommandBuffers; - uint32_t signalSemaphoreCount; - const VkSemaphore* pSignalSemaphores; -} VkSubmitInfo; - -typedef struct VkMemoryAllocateInfo { - VkStructureType sType; - const void* pNext; - VkDeviceSize allocationSize; - uint32_t memoryTypeIndex; -} VkMemoryAllocateInfo; - -typedef struct VkMappedMemoryRange { - VkStructureType sType; - const void* pNext; - VkDeviceMemory memory; - VkDeviceSize offset; - VkDeviceSize size; -} VkMappedMemoryRange; - -typedef struct VkMemoryRequirements { - VkDeviceSize size; - VkDeviceSize alignment; - uint32_t memoryTypeBits; -} VkMemoryRequirements; - -typedef struct VkSparseImageFormatProperties { - VkImageAspectFlags aspectMask; - VkExtent3D imageGranularity; - VkSparseImageFormatFlags flags; -} VkSparseImageFormatProperties; - -typedef struct VkSparseImageMemoryRequirements { - VkSparseImageFormatProperties formatProperties; - uint32_t imageMipTailFirstLod; - VkDeviceSize imageMipTailSize; - VkDeviceSize imageMipTailOffset; - VkDeviceSize imageMipTailStride; -} VkSparseImageMemoryRequirements; - -typedef struct VkSparseMemoryBind { - VkDeviceSize resourceOffset; - VkDeviceSize size; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - VkSparseMemoryBindFlags flags; -} VkSparseMemoryBind; - -typedef struct VkSparseBufferMemoryBindInfo { - VkBuffer buffer; - uint32_t bindCount; - const VkSparseMemoryBind* pBinds; -} VkSparseBufferMemoryBindInfo; - -typedef struct VkSparseImageOpaqueMemoryBindInfo { - VkImage image; - uint32_t bindCount; - const VkSparseMemoryBind* pBinds; -} VkSparseImageOpaqueMemoryBindInfo; - -typedef struct VkImageSubresource { - VkImageAspectFlags aspectMask; - uint32_t mipLevel; - uint32_t arrayLayer; -} VkImageSubresource; - -typedef struct VkOffset3D { - int32_t x; - int32_t y; - int32_t z; -} VkOffset3D; - -typedef struct VkSparseImageMemoryBind { - VkImageSubresource subresource; - VkOffset3D offset; - VkExtent3D extent; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - VkSparseMemoryBindFlags flags; -} VkSparseImageMemoryBind; - -typedef struct VkSparseImageMemoryBindInfo { - VkImage image; - uint32_t bindCount; - const VkSparseImageMemoryBind* pBinds; -} VkSparseImageMemoryBindInfo; - -typedef struct VkBindSparseInfo { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore* pWaitSemaphores; - uint32_t bufferBindCount; - const VkSparseBufferMemoryBindInfo* pBufferBinds; - uint32_t imageOpaqueBindCount; - const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; - uint32_t imageBindCount; - const VkSparseImageMemoryBindInfo* pImageBinds; - uint32_t signalSemaphoreCount; - const VkSemaphore* pSignalSemaphores; -} VkBindSparseInfo; - -typedef struct VkFenceCreateInfo { - VkStructureType sType; - const void* pNext; - VkFenceCreateFlags flags; -} VkFenceCreateInfo; - -typedef struct VkSemaphoreCreateInfo { - VkStructureType sType; - const void* pNext; - VkSemaphoreCreateFlags flags; -} VkSemaphoreCreateInfo; - -typedef struct VkEventCreateInfo { - VkStructureType sType; - const void* pNext; - VkEventCreateFlags flags; -} VkEventCreateInfo; - -typedef struct VkQueryPoolCreateInfo { - VkStructureType sType; - const void* pNext; - VkQueryPoolCreateFlags flags; - VkQueryType queryType; - uint32_t queryCount; - VkQueryPipelineStatisticFlags pipelineStatistics; -} VkQueryPoolCreateInfo; - -typedef struct VkBufferCreateInfo { - VkStructureType sType; - const void* pNext; - VkBufferCreateFlags flags; - VkDeviceSize size; - VkBufferUsageFlags usage; - VkSharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; -} VkBufferCreateInfo; - -typedef struct VkBufferViewCreateInfo { - VkStructureType sType; - const void* pNext; - VkBufferViewCreateFlags flags; - VkBuffer buffer; - VkFormat format; - VkDeviceSize offset; - VkDeviceSize range; -} VkBufferViewCreateInfo; - -typedef struct VkImageCreateInfo { - VkStructureType sType; - const void* pNext; - VkImageCreateFlags flags; - VkImageType imageType; - VkFormat format; - VkExtent3D extent; - uint32_t mipLevels; - uint32_t arrayLayers; - VkSampleCountFlagBits samples; - VkImageTiling tiling; - VkImageUsageFlags usage; - VkSharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; - VkImageLayout initialLayout; -} VkImageCreateInfo; - -typedef struct VkSubresourceLayout { - VkDeviceSize offset; - VkDeviceSize size; - VkDeviceSize rowPitch; - VkDeviceSize arrayPitch; - VkDeviceSize depthPitch; -} VkSubresourceLayout; - -typedef struct VkComponentMapping { - VkComponentSwizzle r; - VkComponentSwizzle g; - VkComponentSwizzle b; - VkComponentSwizzle a; -} VkComponentMapping; - -typedef struct VkImageSubresourceRange { - VkImageAspectFlags aspectMask; - uint32_t baseMipLevel; - uint32_t levelCount; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkImageSubresourceRange; - -typedef struct VkImageViewCreateInfo { - VkStructureType sType; - const void* pNext; - VkImageViewCreateFlags flags; - VkImage image; - VkImageViewType viewType; - VkFormat format; - VkComponentMapping components; - VkImageSubresourceRange subresourceRange; -} VkImageViewCreateInfo; - -typedef struct VkShaderModuleCreateInfo { - VkStructureType sType; - const void* pNext; - VkShaderModuleCreateFlags flags; - size_t codeSize; - const uint32_t* pCode; -} VkShaderModuleCreateInfo; - -typedef struct VkPipelineCacheCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineCacheCreateFlags flags; - size_t initialDataSize; - const void* pInitialData; -} VkPipelineCacheCreateInfo; - -typedef struct VkSpecializationMapEntry { - uint32_t constantID; - uint32_t offset; - size_t size; -} VkSpecializationMapEntry; - -typedef struct VkSpecializationInfo { - uint32_t mapEntryCount; - const VkSpecializationMapEntry* pMapEntries; - size_t dataSize; - const void* pData; -} VkSpecializationInfo; - -typedef struct VkPipelineShaderStageCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineShaderStageCreateFlags flags; - VkShaderStageFlagBits stage; - VkShaderModule module; - const char* pName; - const VkSpecializationInfo* pSpecializationInfo; -} VkPipelineShaderStageCreateInfo; - -typedef struct VkVertexInputBindingDescription { - uint32_t binding; - uint32_t stride; - VkVertexInputRate inputRate; -} VkVertexInputBindingDescription; - -typedef struct VkVertexInputAttributeDescription { - uint32_t location; - uint32_t binding; - VkFormat format; - uint32_t offset; -} VkVertexInputAttributeDescription; - -typedef struct VkPipelineVertexInputStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineVertexInputStateCreateFlags flags; - uint32_t vertexBindingDescriptionCount; - const VkVertexInputBindingDescription* pVertexBindingDescriptions; - uint32_t vertexAttributeDescriptionCount; - const VkVertexInputAttributeDescription* pVertexAttributeDescriptions; -} VkPipelineVertexInputStateCreateInfo; - -typedef struct VkPipelineInputAssemblyStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineInputAssemblyStateCreateFlags flags; - VkPrimitiveTopology topology; - VkBool32 primitiveRestartEnable; -} VkPipelineInputAssemblyStateCreateInfo; - -typedef struct VkPipelineTessellationStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineTessellationStateCreateFlags flags; - uint32_t patchControlPoints; -} VkPipelineTessellationStateCreateInfo; - -typedef struct VkViewport { - float x; - float y; - float width; - float height; - float minDepth; - float maxDepth; -} VkViewport; - -typedef struct VkOffset2D { - int32_t x; - int32_t y; -} VkOffset2D; - -typedef struct VkExtent2D { - uint32_t width; - uint32_t height; -} VkExtent2D; - -typedef struct VkRect2D { - VkOffset2D offset; - VkExtent2D extent; -} VkRect2D; - -typedef struct VkPipelineViewportStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineViewportStateCreateFlags flags; - uint32_t viewportCount; - const VkViewport* pViewports; - uint32_t scissorCount; - const VkRect2D* pScissors; -} VkPipelineViewportStateCreateInfo; - -typedef struct VkPipelineRasterizationStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineRasterizationStateCreateFlags flags; - VkBool32 depthClampEnable; - VkBool32 rasterizerDiscardEnable; - VkPolygonMode polygonMode; - VkCullModeFlags cullMode; - VkFrontFace frontFace; - VkBool32 depthBiasEnable; - float depthBiasConstantFactor; - float depthBiasClamp; - float depthBiasSlopeFactor; - float lineWidth; -} VkPipelineRasterizationStateCreateInfo; - -typedef struct VkPipelineMultisampleStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineMultisampleStateCreateFlags flags; - VkSampleCountFlagBits rasterizationSamples; - VkBool32 sampleShadingEnable; - float minSampleShading; - const VkSampleMask* pSampleMask; - VkBool32 alphaToCoverageEnable; - VkBool32 alphaToOneEnable; -} VkPipelineMultisampleStateCreateInfo; - -typedef struct VkStencilOpState { - VkStencilOp failOp; - VkStencilOp passOp; - VkStencilOp depthFailOp; - VkCompareOp compareOp; - uint32_t compareMask; - uint32_t writeMask; - uint32_t reference; -} VkStencilOpState; - -typedef struct VkPipelineDepthStencilStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineDepthStencilStateCreateFlags flags; - VkBool32 depthTestEnable; - VkBool32 depthWriteEnable; - VkCompareOp depthCompareOp; - VkBool32 depthBoundsTestEnable; - VkBool32 stencilTestEnable; - VkStencilOpState front; - VkStencilOpState back; - float minDepthBounds; - float maxDepthBounds; -} VkPipelineDepthStencilStateCreateInfo; - -typedef struct VkPipelineColorBlendAttachmentState { - VkBool32 blendEnable; - VkBlendFactor srcColorBlendFactor; - VkBlendFactor dstColorBlendFactor; - VkBlendOp colorBlendOp; - VkBlendFactor srcAlphaBlendFactor; - VkBlendFactor dstAlphaBlendFactor; - VkBlendOp alphaBlendOp; - VkColorComponentFlags colorWriteMask; -} VkPipelineColorBlendAttachmentState; - -typedef struct VkPipelineColorBlendStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineColorBlendStateCreateFlags flags; - VkBool32 logicOpEnable; - VkLogicOp logicOp; - uint32_t attachmentCount; - const VkPipelineColorBlendAttachmentState* pAttachments; - float blendConstants[4]; -} VkPipelineColorBlendStateCreateInfo; - -typedef struct VkPipelineDynamicStateCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineDynamicStateCreateFlags flags; - uint32_t dynamicStateCount; - const VkDynamicState* pDynamicStates; -} VkPipelineDynamicStateCreateInfo; - -typedef struct VkGraphicsPipelineCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineCreateFlags flags; - uint32_t stageCount; - const VkPipelineShaderStageCreateInfo* pStages; - const VkPipelineVertexInputStateCreateInfo* pVertexInputState; - const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState; - const VkPipelineTessellationStateCreateInfo* pTessellationState; - const VkPipelineViewportStateCreateInfo* pViewportState; - const VkPipelineRasterizationStateCreateInfo* pRasterizationState; - const VkPipelineMultisampleStateCreateInfo* pMultisampleState; - const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState; - const VkPipelineColorBlendStateCreateInfo* pColorBlendState; - const VkPipelineDynamicStateCreateInfo* pDynamicState; - VkPipelineLayout layout; - VkRenderPass renderPass; - uint32_t subpass; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkGraphicsPipelineCreateInfo; - -typedef struct VkComputePipelineCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineCreateFlags flags; - VkPipelineShaderStageCreateInfo stage; - VkPipelineLayout layout; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkComputePipelineCreateInfo; - -typedef struct VkPushConstantRange { - VkShaderStageFlags stageFlags; - uint32_t offset; - uint32_t size; -} VkPushConstantRange; - -typedef struct VkPipelineLayoutCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineLayoutCreateFlags flags; - uint32_t setLayoutCount; - const VkDescriptorSetLayout* pSetLayouts; - uint32_t pushConstantRangeCount; - const VkPushConstantRange* pPushConstantRanges; -} VkPipelineLayoutCreateInfo; - -typedef struct VkSamplerCreateInfo { - VkStructureType sType; - const void* pNext; - VkSamplerCreateFlags flags; - VkFilter magFilter; - VkFilter minFilter; - VkSamplerMipmapMode mipmapMode; - VkSamplerAddressMode addressModeU; - VkSamplerAddressMode addressModeV; - VkSamplerAddressMode addressModeW; - float mipLodBias; - VkBool32 anisotropyEnable; - float maxAnisotropy; - VkBool32 compareEnable; - VkCompareOp compareOp; - float minLod; - float maxLod; - VkBorderColor borderColor; - VkBool32 unnormalizedCoordinates; -} VkSamplerCreateInfo; - -typedef struct VkDescriptorSetLayoutBinding { - uint32_t binding; - VkDescriptorType descriptorType; - uint32_t descriptorCount; - VkShaderStageFlags stageFlags; - const VkSampler* pImmutableSamplers; -} VkDescriptorSetLayoutBinding; - -typedef struct VkDescriptorSetLayoutCreateInfo { - VkStructureType sType; - const void* pNext; - VkDescriptorSetLayoutCreateFlags flags; - uint32_t bindingCount; - const VkDescriptorSetLayoutBinding* pBindings; -} VkDescriptorSetLayoutCreateInfo; - -typedef struct VkDescriptorPoolSize { - VkDescriptorType type; - uint32_t descriptorCount; -} VkDescriptorPoolSize; - -typedef struct VkDescriptorPoolCreateInfo { - VkStructureType sType; - const void* pNext; - VkDescriptorPoolCreateFlags flags; - uint32_t maxSets; - uint32_t poolSizeCount; - const VkDescriptorPoolSize* pPoolSizes; -} VkDescriptorPoolCreateInfo; - -typedef struct VkDescriptorSetAllocateInfo { - VkStructureType sType; - const void* pNext; - VkDescriptorPool descriptorPool; - uint32_t descriptorSetCount; - const VkDescriptorSetLayout* pSetLayouts; -} VkDescriptorSetAllocateInfo; - -typedef struct VkDescriptorImageInfo { - VkSampler sampler; - VkImageView imageView; - VkImageLayout imageLayout; -} VkDescriptorImageInfo; - -typedef struct VkDescriptorBufferInfo { - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize range; -} VkDescriptorBufferInfo; - -typedef struct VkWriteDescriptorSet { - VkStructureType sType; - const void* pNext; - VkDescriptorSet dstSet; - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; - VkDescriptorType descriptorType; - const VkDescriptorImageInfo* pImageInfo; - const VkDescriptorBufferInfo* pBufferInfo; - const VkBufferView* pTexelBufferView; -} VkWriteDescriptorSet; - -typedef struct VkCopyDescriptorSet { - VkStructureType sType; - const void* pNext; - VkDescriptorSet srcSet; - uint32_t srcBinding; - uint32_t srcArrayElement; - VkDescriptorSet dstSet; - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; -} VkCopyDescriptorSet; - -typedef struct VkFramebufferCreateInfo { - VkStructureType sType; - const void* pNext; - VkFramebufferCreateFlags flags; - VkRenderPass renderPass; - uint32_t attachmentCount; - const VkImageView* pAttachments; - uint32_t width; - uint32_t height; - uint32_t layers; -} VkFramebufferCreateInfo; - -typedef struct VkAttachmentDescription { - VkAttachmentDescriptionFlags flags; - VkFormat format; - VkSampleCountFlagBits samples; - VkAttachmentLoadOp loadOp; - VkAttachmentStoreOp storeOp; - VkAttachmentLoadOp stencilLoadOp; - VkAttachmentStoreOp stencilStoreOp; - VkImageLayout initialLayout; - VkImageLayout finalLayout; -} VkAttachmentDescription; - -typedef struct VkAttachmentReference { - uint32_t attachment; - VkImageLayout layout; -} VkAttachmentReference; - -typedef struct VkSubpassDescription { - VkSubpassDescriptionFlags flags; - VkPipelineBindPoint pipelineBindPoint; - uint32_t inputAttachmentCount; - const VkAttachmentReference* pInputAttachments; - uint32_t colorAttachmentCount; - const VkAttachmentReference* pColorAttachments; - const VkAttachmentReference* pResolveAttachments; - const VkAttachmentReference* pDepthStencilAttachment; - uint32_t preserveAttachmentCount; - const uint32_t* pPreserveAttachments; -} VkSubpassDescription; - -typedef struct VkSubpassDependency { - uint32_t srcSubpass; - uint32_t dstSubpass; - VkPipelineStageFlags srcStageMask; - VkPipelineStageFlags dstStageMask; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - VkDependencyFlags dependencyFlags; -} VkSubpassDependency; - -typedef struct VkRenderPassCreateInfo { - VkStructureType sType; - const void* pNext; - VkRenderPassCreateFlags flags; - uint32_t attachmentCount; - const VkAttachmentDescription* pAttachments; - uint32_t subpassCount; - const VkSubpassDescription* pSubpasses; - uint32_t dependencyCount; - const VkSubpassDependency* pDependencies; -} VkRenderPassCreateInfo; - -typedef struct VkCommandPoolCreateInfo { - VkStructureType sType; - const void* pNext; - VkCommandPoolCreateFlags flags; - uint32_t queueFamilyIndex; -} VkCommandPoolCreateInfo; - -typedef struct VkCommandBufferAllocateInfo { - VkStructureType sType; - const void* pNext; - VkCommandPool commandPool; - VkCommandBufferLevel level; - uint32_t commandBufferCount; -} VkCommandBufferAllocateInfo; - -typedef struct VkCommandBufferInheritanceInfo { - VkStructureType sType; - const void* pNext; - VkRenderPass renderPass; - uint32_t subpass; - VkFramebuffer framebuffer; - VkBool32 occlusionQueryEnable; - VkQueryControlFlags queryFlags; - VkQueryPipelineStatisticFlags pipelineStatistics; -} VkCommandBufferInheritanceInfo; - -typedef struct VkCommandBufferBeginInfo { - VkStructureType sType; - const void* pNext; - VkCommandBufferUsageFlags flags; - const VkCommandBufferInheritanceInfo* pInheritanceInfo; -} VkCommandBufferBeginInfo; - -typedef struct VkBufferCopy { - VkDeviceSize srcOffset; - VkDeviceSize dstOffset; - VkDeviceSize size; -} VkBufferCopy; - -typedef struct VkImageSubresourceLayers { - VkImageAspectFlags aspectMask; - uint32_t mipLevel; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkImageSubresourceLayers; - -typedef struct VkImageCopy { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffset; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffset; - VkExtent3D extent; -} VkImageCopy; - -typedef struct VkImageBlit { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffsets[2]; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffsets[2]; -} VkImageBlit; - -typedef struct VkBufferImageCopy { - VkDeviceSize bufferOffset; - uint32_t bufferRowLength; - uint32_t bufferImageHeight; - VkImageSubresourceLayers imageSubresource; - VkOffset3D imageOffset; - VkExtent3D imageExtent; -} VkBufferImageCopy; - -typedef union VkClearColorValue { - float float32[4]; - int32_t int32[4]; - uint32_t uint32[4]; -} VkClearColorValue; - -typedef struct VkClearDepthStencilValue { - float depth; - uint32_t stencil; -} VkClearDepthStencilValue; - -typedef union VkClearValue { - VkClearColorValue color; - VkClearDepthStencilValue depthStencil; -} VkClearValue; - -typedef struct VkClearAttachment { - VkImageAspectFlags aspectMask; - uint32_t colorAttachment; - VkClearValue clearValue; -} VkClearAttachment; - -typedef struct VkClearRect { - VkRect2D rect; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkClearRect; - -typedef struct VkImageResolve { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffset; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffset; - VkExtent3D extent; -} VkImageResolve; - -typedef struct VkMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; -} VkMemoryBarrier; - -typedef struct VkBufferMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize size; -} VkBufferMemoryBarrier; - -typedef struct VkImageMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - VkImageLayout oldLayout; - VkImageLayout newLayout; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - VkImage image; - VkImageSubresourceRange subresourceRange; -} VkImageMemoryBarrier; - -typedef struct VkRenderPassBeginInfo { - VkStructureType sType; - const void* pNext; - VkRenderPass renderPass; - VkFramebuffer framebuffer; - VkRect2D renderArea; - uint32_t clearValueCount; - const VkClearValue* pClearValues; -} VkRenderPassBeginInfo; - -typedef struct VkDispatchIndirectCommand { - uint32_t x; - uint32_t y; - uint32_t z; -} VkDispatchIndirectCommand; - -typedef struct VkDrawIndexedIndirectCommand { - uint32_t indexCount; - uint32_t instanceCount; - uint32_t firstIndex; - int32_t vertexOffset; - uint32_t firstInstance; -} VkDrawIndexedIndirectCommand; - -typedef struct VkDrawIndirectCommand { - uint32_t vertexCount; - uint32_t instanceCount; - uint32_t firstVertex; - uint32_t firstInstance; -} VkDrawIndirectCommand; - - -typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion); -typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); -typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties); -typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName); -typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice); -typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties); -typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue); -typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); -typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); -typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); -typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory); -typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData); -typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); -typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); -typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); -typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes); -typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); -typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); -typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence); -typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); -typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences); -typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); -typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout); -typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore); -typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent); -typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); -typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); -typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); -typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool); -typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags); -typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer); -typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView); -typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage); -typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout); -typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView); -typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule); -typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache); -typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData); -typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches); -typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); -typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); -typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout); -typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler); -typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout); -typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool); -typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); -typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets); -typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets); -typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies); -typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer); -typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); -typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity); -typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool); -typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); -typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers); -typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); -typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo); -typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); -typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); -typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); -typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports); -typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); -typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); -typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); -typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); -typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); -typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); -typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); -typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); -typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets); -typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); -typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets); -typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); -typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions); -typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions); -typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter); -typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions); -typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions); -typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); -typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); -typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); -typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects); -typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions); -typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); -typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); -typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); -typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); -typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); -typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); -typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); -typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); -typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); -typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues); -typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents); -typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); -typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); -typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion( - uint32_t* pApiVersion); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( - const VkInstanceCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkInstance* pInstance); - -VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( - VkInstance instance, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( - VkInstance instance, - uint32_t* pPhysicalDeviceCount, - VkPhysicalDevice* pPhysicalDevices); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures* pFeatures); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties* pFormatProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkImageFormatProperties* pImageFormatProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties* pProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( - VkPhysicalDevice physicalDevice, - uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties* pQueueFamilyProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties* pMemoryProperties); - -VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( - VkInstance instance, - const char* pName); - -VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( - VkDevice device, - const char* pName); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( - VkPhysicalDevice physicalDevice, - const VkDeviceCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDevice* pDevice); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( - VkDevice device, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( - const char* pLayerName, - uint32_t* pPropertyCount, - VkExtensionProperties* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( - VkPhysicalDevice physicalDevice, - const char* pLayerName, - uint32_t* pPropertyCount, - VkExtensionProperties* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( - uint32_t* pPropertyCount, - VkLayerProperties* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkLayerProperties* pProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( - VkDevice device, - uint32_t queueFamilyIndex, - uint32_t queueIndex, - VkQueue* pQueue); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( - VkQueue queue, - uint32_t submitCount, - const VkSubmitInfo* pSubmits, - VkFence fence); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( - VkQueue queue); - -VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( - VkDevice device); - -VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( - VkDevice device, - const VkMemoryAllocateInfo* pAllocateInfo, - const VkAllocationCallbacks* pAllocator, - VkDeviceMemory* pMemory); - -VKAPI_ATTR void VKAPI_CALL vkFreeMemory( - VkDevice device, - VkDeviceMemory memory, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( - VkDevice device, - VkDeviceMemory memory, - VkDeviceSize offset, - VkDeviceSize size, - VkMemoryMapFlags flags, - void** ppData); - -VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( - VkDevice device, - VkDeviceMemory memory); - -VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( - VkDevice device, - uint32_t memoryRangeCount, - const VkMappedMemoryRange* pMemoryRanges); - -VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( - VkDevice device, - uint32_t memoryRangeCount, - const VkMappedMemoryRange* pMemoryRanges); - -VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( - VkDevice device, - VkDeviceMemory memory, - VkDeviceSize* pCommittedMemoryInBytes); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( - VkDevice device, - VkBuffer buffer, - VkDeviceMemory memory, - VkDeviceSize memoryOffset); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( - VkDevice device, - VkImage image, - VkDeviceMemory memory, - VkDeviceSize memoryOffset); - -VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( - VkDevice device, - VkBuffer buffer, - VkMemoryRequirements* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( - VkDevice device, - VkImage image, - VkMemoryRequirements* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( - VkDevice device, - VkImage image, - uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements* pSparseMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkSampleCountFlagBits samples, - VkImageUsageFlags usage, - VkImageTiling tiling, - uint32_t* pPropertyCount, - VkSparseImageFormatProperties* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( - VkQueue queue, - uint32_t bindInfoCount, - const VkBindSparseInfo* pBindInfo, - VkFence fence); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( - VkDevice device, - const VkFenceCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkFence* pFence); - -VKAPI_ATTR void VKAPI_CALL vkDestroyFence( - VkDevice device, - VkFence fence, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( - VkDevice device, - uint32_t fenceCount, - const VkFence* pFences); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( - VkDevice device, - VkFence fence); - -VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( - VkDevice device, - uint32_t fenceCount, - const VkFence* pFences, - VkBool32 waitAll, - uint64_t timeout); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( - VkDevice device, - const VkSemaphoreCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSemaphore* pSemaphore); - -VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( - VkDevice device, - VkSemaphore semaphore, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( - VkDevice device, - const VkEventCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkEvent* pEvent); - -VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( - VkDevice device, - VkEvent event, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( - VkDevice device, - VkEvent event); - -VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( - VkDevice device, - VkEvent event); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( - VkDevice device, - VkEvent event); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( - VkDevice device, - const VkQueryPoolCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkQueryPool* pQueryPool); - -VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( - VkDevice device, - VkQueryPool queryPool, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( - VkDevice device, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - size_t dataSize, - void* pData, - VkDeviceSize stride, - VkQueryResultFlags flags); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( - VkDevice device, - const VkBufferCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkBuffer* pBuffer); - -VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( - VkDevice device, - VkBuffer buffer, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( - VkDevice device, - const VkBufferViewCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkBufferView* pView); - -VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( - VkDevice device, - VkBufferView bufferView, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( - VkDevice device, - const VkImageCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkImage* pImage); - -VKAPI_ATTR void VKAPI_CALL vkDestroyImage( - VkDevice device, - VkImage image, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( - VkDevice device, - VkImage image, - const VkImageSubresource* pSubresource, - VkSubresourceLayout* pLayout); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( - VkDevice device, - const VkImageViewCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkImageView* pView); - -VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( - VkDevice device, - VkImageView imageView, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( - VkDevice device, - const VkShaderModuleCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkShaderModule* pShaderModule); - -VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( - VkDevice device, - VkShaderModule shaderModule, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( - VkDevice device, - const VkPipelineCacheCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkPipelineCache* pPipelineCache); - -VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( - VkDevice device, - VkPipelineCache pipelineCache, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( - VkDevice device, - VkPipelineCache pipelineCache, - size_t* pDataSize, - void* pData); - -VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( - VkDevice device, - VkPipelineCache dstCache, - uint32_t srcCacheCount, - const VkPipelineCache* pSrcCaches); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( - VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkGraphicsPipelineCreateInfo* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( - VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkComputePipelineCreateInfo* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines); - -VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( - VkDevice device, - VkPipeline pipeline, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( - VkDevice device, - const VkPipelineLayoutCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkPipelineLayout* pPipelineLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( - VkDevice device, - VkPipelineLayout pipelineLayout, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( - VkDevice device, - const VkSamplerCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSampler* pSampler); - -VKAPI_ATTR void VKAPI_CALL vkDestroySampler( - VkDevice device, - VkSampler sampler, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( - VkDevice device, - const VkDescriptorSetLayoutCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDescriptorSetLayout* pSetLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( - VkDevice device, - VkDescriptorSetLayout descriptorSetLayout, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( - VkDevice device, - const VkDescriptorPoolCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDescriptorPool* pDescriptorPool); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( - VkDevice device, - VkDescriptorPool descriptorPool, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( - VkDevice device, - VkDescriptorPool descriptorPool, - VkDescriptorPoolResetFlags flags); - -VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( - VkDevice device, - const VkDescriptorSetAllocateInfo* pAllocateInfo, - VkDescriptorSet* pDescriptorSets); - -VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( - VkDevice device, - VkDescriptorPool descriptorPool, - uint32_t descriptorSetCount, - const VkDescriptorSet* pDescriptorSets); - -VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( - VkDevice device, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet* pDescriptorWrites, - uint32_t descriptorCopyCount, - const VkCopyDescriptorSet* pDescriptorCopies); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( - VkDevice device, - const VkFramebufferCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkFramebuffer* pFramebuffer); - -VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( - VkDevice device, - VkFramebuffer framebuffer, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( - VkDevice device, - const VkRenderPassCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkRenderPass* pRenderPass); - -VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( - VkDevice device, - VkRenderPass renderPass, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( - VkDevice device, - VkRenderPass renderPass, - VkExtent2D* pGranularity); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( - VkDevice device, - const VkCommandPoolCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkCommandPool* pCommandPool); - -VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( - VkDevice device, - VkCommandPool commandPool, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( - VkDevice device, - VkCommandPool commandPool, - VkCommandPoolResetFlags flags); - -VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( - VkDevice device, - const VkCommandBufferAllocateInfo* pAllocateInfo, - VkCommandBuffer* pCommandBuffers); - -VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( - VkDevice device, - VkCommandPool commandPool, - uint32_t commandBufferCount, - const VkCommandBuffer* pCommandBuffers); - -VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( - VkCommandBuffer commandBuffer, - const VkCommandBufferBeginInfo* pBeginInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( - VkCommandBuffer commandBuffer); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( - VkCommandBuffer commandBuffer, - VkCommandBufferResetFlags flags); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( - VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewport* pViewports); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( - VkCommandBuffer commandBuffer, - uint32_t firstScissor, - uint32_t scissorCount, - const VkRect2D* pScissors); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( - VkCommandBuffer commandBuffer, - float lineWidth); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( - VkCommandBuffer commandBuffer, - float depthBiasConstantFactor, - float depthBiasClamp, - float depthBiasSlopeFactor); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( - VkCommandBuffer commandBuffer, - const float blendConstants[4]); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( - VkCommandBuffer commandBuffer, - float minDepthBounds, - float maxDepthBounds); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( - VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t compareMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( - VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t writeMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( - VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t reference); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t firstSet, - uint32_t descriptorSetCount, - const VkDescriptorSet* pDescriptorSets, - uint32_t dynamicOffsetCount, - const uint32_t* pDynamicOffsets); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkIndexType indexType); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( - VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer* pBuffers, - const VkDeviceSize* pOffsets); - -VKAPI_ATTR void VKAPI_CALL vkCmdDraw( - VkCommandBuffer commandBuffer, - uint32_t vertexCount, - uint32_t instanceCount, - uint32_t firstVertex, - uint32_t firstInstance); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( - VkCommandBuffer commandBuffer, - uint32_t indexCount, - uint32_t instanceCount, - uint32_t firstIndex, - int32_t vertexOffset, - uint32_t firstInstance); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - uint32_t drawCount, - uint32_t stride); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - uint32_t drawCount, - uint32_t stride); - -VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( - VkCommandBuffer commandBuffer, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ); - -VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( - VkCommandBuffer commandBuffer, - VkBuffer srcBuffer, - VkBuffer dstBuffer, - uint32_t regionCount, - const VkBufferCopy* pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageCopy* pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageBlit* pRegions, - VkFilter filter); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( - VkCommandBuffer commandBuffer, - VkBuffer srcBuffer, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkBufferImageCopy* pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkBuffer dstBuffer, - uint32_t regionCount, - const VkBufferImageCopy* pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( - VkCommandBuffer commandBuffer, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize dataSize, - const void* pData); - -VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( - VkCommandBuffer commandBuffer, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize size, - uint32_t data); - -VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( - VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearColorValue* pColor, - uint32_t rangeCount, - const VkImageSubresourceRange* pRanges); - -VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( - VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearDepthStencilValue* pDepthStencil, - uint32_t rangeCount, - const VkImageSubresourceRange* pRanges); - -VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( - VkCommandBuffer commandBuffer, - uint32_t attachmentCount, - const VkClearAttachment* pAttachments, - uint32_t rectCount, - const VkClearRect* pRects); - -VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageResolve* pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( - VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent* pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier* pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier* pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers); - -VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( - VkCommandBuffer commandBuffer, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - VkDependencyFlags dependencyFlags, - uint32_t memoryBarrierCount, - const VkMemoryBarrier* pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier* pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers); - -VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query, - VkQueryControlFlags flags); - -VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query); - -VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount); - -VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( - VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkQueryPool queryPool, - uint32_t query); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize stride, - VkQueryResultFlags flags); - -VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( - VkCommandBuffer commandBuffer, - VkPipelineLayout layout, - VkShaderStageFlags stageFlags, - uint32_t offset, - uint32_t size, - const void* pValues); - -VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( - VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo* pRenderPassBegin, - VkSubpassContents contents); - -VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( - VkCommandBuffer commandBuffer, - VkSubpassContents contents); - -VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( - VkCommandBuffer commandBuffer); - -VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( - VkCommandBuffer commandBuffer, - uint32_t commandBufferCount, - const VkCommandBuffer* pCommandBuffers); -#endif - -#define VK_KHR_surface 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) - -#define VK_KHR_SURFACE_SPEC_VERSION 25 -#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" -#define VK_COLORSPACE_SRGB_NONLINEAR_KHR VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - - -typedef enum VkColorSpaceKHR { - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, - VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, - VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, - VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = 1000104003, - VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, - VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, - VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, - VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, - VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, - VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, - VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, - VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, - VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, - VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, - VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, - VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1), - VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkColorSpaceKHR; - -typedef enum VkPresentModeKHR { - VK_PRESENT_MODE_IMMEDIATE_KHR = 0, - VK_PRESENT_MODE_MAILBOX_KHR = 1, - VK_PRESENT_MODE_FIFO_KHR = 2, - VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, - VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, - VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, - VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR, - VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR, - VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1), - VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPresentModeKHR; - - -typedef enum VkSurfaceTransformFlagBitsKHR { - VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, - VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, - VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, - VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, - VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, - VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSurfaceTransformFlagBitsKHR; -typedef VkFlags VkSurfaceTransformFlagsKHR; - -typedef enum VkCompositeAlphaFlagBitsKHR { - VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, - VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, - VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, - VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, - VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkCompositeAlphaFlagBitsKHR; -typedef VkFlags VkCompositeAlphaFlagsKHR; - -typedef struct VkSurfaceCapabilitiesKHR { - uint32_t minImageCount; - uint32_t maxImageCount; - VkExtent2D currentExtent; - VkExtent2D minImageExtent; - VkExtent2D maxImageExtent; - uint32_t maxImageArrayLayers; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkSurfaceTransformFlagBitsKHR currentTransform; - VkCompositeAlphaFlagsKHR supportedCompositeAlpha; - VkImageUsageFlags supportedUsageFlags; -} VkSurfaceCapabilitiesKHR; - -typedef struct VkSurfaceFormatKHR { - VkFormat format; - VkColorSpaceKHR colorSpace; -} VkSurfaceFormatKHR; - - -typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( - VkInstance instance, - VkSurfaceKHR surface, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - VkSurfaceKHR surface, - VkBool32* pSupported); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormatKHR* pSurfaceFormats); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pPresentModeCount, - VkPresentModeKHR* pPresentModes); -#endif - -#define VK_KHR_swapchain 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) - -#define VK_KHR_SWAPCHAIN_SPEC_VERSION 68 -#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" - - -typedef enum VkSwapchainCreateFlagBitsKHR { - VK_SWAPCHAIN_CREATE_BIND_SFR_BIT_KHX = 0x00000001, - VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, - VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSwapchainCreateFlagBitsKHR; -typedef VkFlags VkSwapchainCreateFlagsKHR; - -typedef struct VkSwapchainCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkSwapchainCreateFlagsKHR flags; - VkSurfaceKHR surface; - uint32_t minImageCount; - VkFormat imageFormat; - VkColorSpaceKHR imageColorSpace; - VkExtent2D imageExtent; - uint32_t imageArrayLayers; - VkImageUsageFlags imageUsage; - VkSharingMode imageSharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; - VkSurfaceTransformFlagBitsKHR preTransform; - VkCompositeAlphaFlagBitsKHR compositeAlpha; - VkPresentModeKHR presentMode; - VkBool32 clipped; - VkSwapchainKHR oldSwapchain; -} VkSwapchainCreateInfoKHR; - -typedef struct VkPresentInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore* pWaitSemaphores; - uint32_t swapchainCount; - const VkSwapchainKHR* pSwapchains; - const uint32_t* pImageIndices; - VkResult* pResults; -} VkPresentInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); -typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); -typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); -typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( - VkDevice device, - const VkSwapchainCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchain); - -VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( - VkDevice device, - VkSwapchainKHR swapchain, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint32_t* pSwapchainImageCount, - VkImage* pSwapchainImages); - -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint64_t timeout, - VkSemaphore semaphore, - VkFence fence, - uint32_t* pImageIndex); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( - VkQueue queue, - const VkPresentInfoKHR* pPresentInfo); -#endif - -#define VK_KHR_display 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) - -#define VK_KHR_DISPLAY_SPEC_VERSION 21 -#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" - - -typedef enum VkDisplayPlaneAlphaFlagBitsKHR { - VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, - VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, - VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, - VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, - VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDisplayPlaneAlphaFlagBitsKHR; -typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; -typedef VkFlags VkDisplayModeCreateFlagsKHR; -typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; - -typedef struct VkDisplayPropertiesKHR { - VkDisplayKHR display; - const char* displayName; - VkExtent2D physicalDimensions; - VkExtent2D physicalResolution; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkBool32 planeReorderPossible; - VkBool32 persistentContent; -} VkDisplayPropertiesKHR; - -typedef struct VkDisplayModeParametersKHR { - VkExtent2D visibleRegion; - uint32_t refreshRate; -} VkDisplayModeParametersKHR; - -typedef struct VkDisplayModePropertiesKHR { - VkDisplayModeKHR displayMode; - VkDisplayModeParametersKHR parameters; -} VkDisplayModePropertiesKHR; - -typedef struct VkDisplayModeCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDisplayModeCreateFlagsKHR flags; - VkDisplayModeParametersKHR parameters; -} VkDisplayModeCreateInfoKHR; - -typedef struct VkDisplayPlaneCapabilitiesKHR { - VkDisplayPlaneAlphaFlagsKHR supportedAlpha; - VkOffset2D minSrcPosition; - VkOffset2D maxSrcPosition; - VkExtent2D minSrcExtent; - VkExtent2D maxSrcExtent; - VkOffset2D minDstPosition; - VkOffset2D maxDstPosition; - VkExtent2D minDstExtent; - VkExtent2D maxDstExtent; -} VkDisplayPlaneCapabilitiesKHR; - -typedef struct VkDisplayPlanePropertiesKHR { - VkDisplayKHR currentDisplay; - uint32_t currentStackIndex; -} VkDisplayPlanePropertiesKHR; - -typedef struct VkDisplaySurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDisplaySurfaceCreateFlagsKHR flags; - VkDisplayModeKHR displayMode; - uint32_t planeIndex; - uint32_t planeStackIndex; - VkSurfaceTransformFlagBitsKHR transform; - float globalAlpha; - VkDisplayPlaneAlphaFlagBitsKHR alphaMode; - VkExtent2D imageExtent; -} VkDisplaySurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayPropertiesKHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayPlanePropertiesKHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( - VkPhysicalDevice physicalDevice, - uint32_t planeIndex, - uint32_t* pDisplayCount, - VkDisplayKHR* pDisplays); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t* pPropertyCount, - VkDisplayModePropertiesKHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - const VkDisplayModeCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDisplayModeKHR* pMode); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - VkDisplayModeKHR mode, - uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR* pCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( - VkInstance instance, - const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); -#endif - -#define VK_KHR_display_swapchain 1 -#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 9 -#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" - -typedef struct VkDisplayPresentInfoKHR { - VkStructureType sType; - const void* pNext; - VkRect2D srcRect; - VkRect2D dstRect; - VkBool32 persistent; -} VkDisplayPresentInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( - VkDevice device, - uint32_t swapchainCount, - const VkSwapchainCreateInfoKHR* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchains); -#endif - -#ifdef VK_USE_PLATFORM_XLIB_KHR -#define VK_KHR_xlib_surface 1 -#include - -#define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 -#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" - -typedef VkFlags VkXlibSurfaceCreateFlagsKHR; - -typedef struct VkXlibSurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkXlibSurfaceCreateFlagsKHR flags; - Display* dpy; - Window window; -} VkXlibSurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( - VkInstance instance, - const VkXlibSurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); - -VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - Display* dpy, - VisualID visualID); -#endif -#endif /* VK_USE_PLATFORM_XLIB_KHR */ - -#ifdef VK_USE_PLATFORM_XCB_KHR -#define VK_KHR_xcb_surface 1 -#include - -#define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 -#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" - -typedef VkFlags VkXcbSurfaceCreateFlagsKHR; - -typedef struct VkXcbSurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkXcbSurfaceCreateFlagsKHR flags; - xcb_connection_t* connection; - xcb_window_t window; -} VkXcbSurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( - VkInstance instance, - const VkXcbSurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); - -VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - xcb_connection_t* connection, - xcb_visualid_t visual_id); -#endif -#endif /* VK_USE_PLATFORM_XCB_KHR */ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR -#define VK_KHR_wayland_surface 1 -#include - -#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 -#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" - -typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; - -typedef struct VkWaylandSurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkWaylandSurfaceCreateFlagsKHR flags; - struct wl_display* display; - struct wl_surface* surface; -} VkWaylandSurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( - VkInstance instance, - const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); - -VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - struct wl_display* display); -#endif -#endif /* VK_USE_PLATFORM_WAYLAND_KHR */ - -#ifdef VK_USE_PLATFORM_MIR_KHR -#define VK_KHR_mir_surface 1 -#include - -#define VK_KHR_MIR_SURFACE_SPEC_VERSION 4 -#define VK_KHR_MIR_SURFACE_EXTENSION_NAME "VK_KHR_mir_surface" - -typedef VkFlags VkMirSurfaceCreateFlagsKHR; - -typedef struct VkMirSurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkMirSurfaceCreateFlagsKHR flags; - MirConnection* connection; - MirSurface* mirSurface; -} VkMirSurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateMirSurfaceKHR)(VkInstance instance, const VkMirSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, MirConnection* connection); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateMirSurfaceKHR( - VkInstance instance, - const VkMirSurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); - -VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceMirPresentationSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - MirConnection* connection); -#endif -#endif /* VK_USE_PLATFORM_MIR_KHR */ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR -#define VK_KHR_android_surface 1 -#include - -#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 -#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface" - -typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; - -typedef struct VkAndroidSurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkAndroidSurfaceCreateFlagsKHR flags; - ANativeWindow* window; -} VkAndroidSurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( - VkInstance instance, - const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); -#endif -#endif /* VK_USE_PLATFORM_ANDROID_KHR */ - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_KHR_win32_surface 1 -#include - -#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 -#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" - -typedef VkFlags VkWin32SurfaceCreateFlagsKHR; - -typedef struct VkWin32SurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkWin32SurfaceCreateFlagsKHR flags; - HINSTANCE hinstance; - HWND hwnd; -} VkWin32SurfaceCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( - VkInstance instance, - const VkWin32SurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); - -VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex); -#endif -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHR_sampler_mirror_clamp_to_edge 1 -#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 1 -#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" - - -#define VK_KHR_get_physical_device_properties2 1 -#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" - -typedef struct VkPhysicalDeviceFeatures2KHR { - VkStructureType sType; - void* pNext; - VkPhysicalDeviceFeatures features; -} VkPhysicalDeviceFeatures2KHR; - -typedef struct VkPhysicalDeviceProperties2KHR { - VkStructureType sType; - void* pNext; - VkPhysicalDeviceProperties properties; -} VkPhysicalDeviceProperties2KHR; - -typedef struct VkFormatProperties2KHR { - VkStructureType sType; - void* pNext; - VkFormatProperties formatProperties; -} VkFormatProperties2KHR; - -typedef struct VkImageFormatProperties2KHR { - VkStructureType sType; - void* pNext; - VkImageFormatProperties imageFormatProperties; -} VkImageFormatProperties2KHR; - -typedef struct VkPhysicalDeviceImageFormatInfo2KHR { - VkStructureType sType; - const void* pNext; - VkFormat format; - VkImageType type; - VkImageTiling tiling; - VkImageUsageFlags usage; - VkImageCreateFlags flags; -} VkPhysicalDeviceImageFormatInfo2KHR; - -typedef struct VkQueueFamilyProperties2KHR { - VkStructureType sType; - void* pNext; - VkQueueFamilyProperties queueFamilyProperties; -} VkQueueFamilyProperties2KHR; - -typedef struct VkPhysicalDeviceMemoryProperties2KHR { - VkStructureType sType; - void* pNext; - VkPhysicalDeviceMemoryProperties memoryProperties; -} VkPhysicalDeviceMemoryProperties2KHR; - -typedef struct VkSparseImageFormatProperties2KHR { - VkStructureType sType; - void* pNext; - VkSparseImageFormatProperties properties; -} VkSparseImageFormatProperties2KHR; - -typedef struct VkPhysicalDeviceSparseImageFormatInfo2KHR { - VkStructureType sType; - const void* pNext; - VkFormat format; - VkImageType type; - VkSampleCountFlagBits samples; - VkImageUsageFlags usage; - VkImageTiling tiling; -} VkPhysicalDeviceSparseImageFormatInfo2KHR; - - -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2KHR* pFeatures); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2KHR* pProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR* pFormatProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, VkImageFormatProperties2KHR* pImageFormatProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR* pQueueFamilyProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2KHR* pProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures2KHR* pFeatures); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties2KHR* pProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2KHR* pFormatProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, - VkImageFormatProperties2KHR* pImageFormatProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( - VkPhysicalDevice physicalDevice, - uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties2KHR* pQueueFamilyProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, - uint32_t* pPropertyCount, - VkSparseImageFormatProperties2KHR* pProperties); -#endif - -#define VK_KHR_shader_draw_parameters 1 -#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 -#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" - - -#define VK_KHR_maintenance1 1 -#define VK_KHR_MAINTENANCE1_SPEC_VERSION 1 -#define VK_KHR_MAINTENANCE1_EXTENSION_NAME "VK_KHR_maintenance1" - -typedef VkFlags VkCommandPoolTrimFlagsKHR; - -typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( - VkDevice device, - VkCommandPool commandPool, - VkCommandPoolTrimFlagsKHR flags); -#endif - -#define VK_KHR_external_memory_capabilities 1 -#define VK_LUID_SIZE_KHR 8 -#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" - - -typedef enum VkExternalMemoryHandleTypeFlagBitsKHR { - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = 0x00000008, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = 0x00000010, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = 0x00000020, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = 0x00000040, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalMemoryHandleTypeFlagBitsKHR; -typedef VkFlags VkExternalMemoryHandleTypeFlagsKHR; - -typedef enum VkExternalMemoryFeatureFlagBitsKHR { - VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = 0x00000001, - VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = 0x00000002, - VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = 0x00000004, - VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalMemoryFeatureFlagBitsKHR; -typedef VkFlags VkExternalMemoryFeatureFlagsKHR; - -typedef struct VkExternalMemoryPropertiesKHR { - VkExternalMemoryFeatureFlagsKHR externalMemoryFeatures; - VkExternalMemoryHandleTypeFlagsKHR exportFromImportedHandleTypes; - VkExternalMemoryHandleTypeFlagsKHR compatibleHandleTypes; -} VkExternalMemoryPropertiesKHR; - -typedef struct VkPhysicalDeviceExternalImageFormatInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalImageFormatInfoKHR; - -typedef struct VkExternalImageFormatPropertiesKHR { - VkStructureType sType; - void* pNext; - VkExternalMemoryPropertiesKHR externalMemoryProperties; -} VkExternalImageFormatPropertiesKHR; - -typedef struct VkPhysicalDeviceExternalBufferInfoKHR { - VkStructureType sType; - const void* pNext; - VkBufferCreateFlags flags; - VkBufferUsageFlags usage; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalBufferInfoKHR; - -typedef struct VkExternalBufferPropertiesKHR { - VkStructureType sType; - void* pNext; - VkExternalMemoryPropertiesKHR externalMemoryProperties; -} VkExternalBufferPropertiesKHR; - -typedef struct VkPhysicalDeviceIDPropertiesKHR { - VkStructureType sType; - void* pNext; - uint8_t deviceUUID[VK_UUID_SIZE]; - uint8_t driverUUID[VK_UUID_SIZE]; - uint8_t deviceLUID[VK_LUID_SIZE_KHR]; - uint32_t deviceNodeMask; - VkBool32 deviceLUIDValid; -} VkPhysicalDeviceIDPropertiesKHR; - - -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, VkExternalBufferPropertiesKHR* pExternalBufferProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, - VkExternalBufferPropertiesKHR* pExternalBufferProperties); -#endif - -#define VK_KHR_external_memory 1 -#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" -#define VK_QUEUE_FAMILY_EXTERNAL_KHR (~0U-1) - -typedef struct VkExternalMemoryImageCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsKHR handleTypes; -} VkExternalMemoryImageCreateInfoKHR; - -typedef struct VkExternalMemoryBufferCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsKHR handleTypes; -} VkExternalMemoryBufferCreateInfoKHR; - -typedef struct VkExportMemoryAllocateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsKHR handleTypes; -} VkExportMemoryAllocateInfoKHR; - - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_KHR_external_memory_win32 1 -#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" - -typedef struct VkImportMemoryWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; - HANDLE handle; - LPCWSTR name; -} VkImportMemoryWin32HandleInfoKHR; - -typedef struct VkExportMemoryWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; - LPCWSTR name; -} VkExportMemoryWin32HandleInfoKHR; - -typedef struct VkMemoryWin32HandlePropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t memoryTypeBits; -} VkMemoryWin32HandlePropertiesKHR; - -typedef struct VkMemoryGetWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceMemory memory; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkMemoryGetWin32HandleInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR( - VkDevice device, - const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, - HANDLE* pHandle); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( - VkDevice device, - VkExternalMemoryHandleTypeFlagBitsKHR handleType, - HANDLE handle, - VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); -#endif -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHR_external_memory_fd 1 -#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" - -typedef struct VkImportMemoryFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; - int fd; -} VkImportMemoryFdInfoKHR; - -typedef struct VkMemoryFdPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t memoryTypeBits; -} VkMemoryFdPropertiesKHR; - -typedef struct VkMemoryGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceMemory memory; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkMemoryGetFdInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBitsKHR handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( - VkDevice device, - const VkMemoryGetFdInfoKHR* pGetFdInfo, - int* pFd); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( - VkDevice device, - VkExternalMemoryHandleTypeFlagBitsKHR handleType, - int fd, - VkMemoryFdPropertiesKHR* pMemoryFdProperties); -#endif - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_KHR_win32_keyed_mutex 1 -#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 -#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" - -typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t acquireCount; - const VkDeviceMemory* pAcquireSyncs; - const uint64_t* pAcquireKeys; - const uint32_t* pAcquireTimeouts; - uint32_t releaseCount; - const VkDeviceMemory* pReleaseSyncs; - const uint64_t* pReleaseKeys; -} VkWin32KeyedMutexAcquireReleaseInfoKHR; - - -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHR_external_semaphore_capabilities 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" - - -typedef enum VkExternalSemaphoreHandleTypeFlagBitsKHR { - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = 0x00000008, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = 0x00000010, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalSemaphoreHandleTypeFlagBitsKHR; -typedef VkFlags VkExternalSemaphoreHandleTypeFlagsKHR; - -typedef enum VkExternalSemaphoreFeatureFlagBitsKHR { - VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = 0x00000001, - VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = 0x00000002, - VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalSemaphoreFeatureFlagBitsKHR; -typedef VkFlags VkExternalSemaphoreFeatureFlagsKHR; - -typedef struct VkPhysicalDeviceExternalSemaphoreInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalSemaphoreInfoKHR; - -typedef struct VkExternalSemaphorePropertiesKHR { - VkStructureType sType; - void* pNext; - VkExternalSemaphoreHandleTypeFlagsKHR exportFromImportedHandleTypes; - VkExternalSemaphoreHandleTypeFlagsKHR compatibleHandleTypes; - VkExternalSemaphoreFeatureFlagsKHR externalSemaphoreFeatures; -} VkExternalSemaphorePropertiesKHR; - - -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, - VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties); -#endif - -#define VK_KHR_external_semaphore 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" - - -typedef enum VkSemaphoreImportFlagBitsKHR { - VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = 0x00000001, - VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSemaphoreImportFlagBitsKHR; -typedef VkFlags VkSemaphoreImportFlagsKHR; - -typedef struct VkExportSemaphoreCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalSemaphoreHandleTypeFlagsKHR handleTypes; -} VkExportSemaphoreCreateInfoKHR; - - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_KHR_external_semaphore_win32 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" - -typedef struct VkImportSemaphoreWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkSemaphoreImportFlagsKHR flags; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; - HANDLE handle; - LPCWSTR name; -} VkImportSemaphoreWin32HandleInfoKHR; - -typedef struct VkExportSemaphoreWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; - LPCWSTR name; -} VkExportSemaphoreWin32HandleInfoKHR; - -typedef struct VkD3D12FenceSubmitInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreValuesCount; - const uint64_t* pWaitSemaphoreValues; - uint32_t signalSemaphoreValuesCount; - const uint64_t* pSignalSemaphoreValues; -} VkD3D12FenceSubmitInfoKHR; - -typedef struct VkSemaphoreGetWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; -} VkSemaphoreGetWin32HandleInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR( - VkDevice device, - const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( - VkDevice device, - const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, - HANDLE* pHandle); -#endif -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHR_external_semaphore_fd 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" - -typedef struct VkImportSemaphoreFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkSemaphoreImportFlagsKHR flags; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; - int fd; -} VkImportSemaphoreFdInfoKHR; - -typedef struct VkSemaphoreGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; -} VkSemaphoreGetFdInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( - VkDevice device, - const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( - VkDevice device, - const VkSemaphoreGetFdInfoKHR* pGetFdInfo, - int* pFd); -#endif - -#define VK_KHR_push_descriptor 1 -#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 1 -#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" - -typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t maxPushDescriptors; -} VkPhysicalDevicePushDescriptorPropertiesKHR; - - -typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet* pDescriptorWrites); -#endif - -#define VK_KHR_16bit_storage 1 -#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 -#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" - -typedef struct VkPhysicalDevice16BitStorageFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 storageBuffer16BitAccess; - VkBool32 uniformAndStorageBuffer16BitAccess; - VkBool32 storagePushConstant16; - VkBool32 storageInputOutput16; -} VkPhysicalDevice16BitStorageFeaturesKHR; - - - -#define VK_KHR_incremental_present 1 -#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 1 -#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" - -typedef struct VkRectLayerKHR { - VkOffset2D offset; - VkExtent2D extent; - uint32_t layer; -} VkRectLayerKHR; - -typedef struct VkPresentRegionKHR { - uint32_t rectangleCount; - const VkRectLayerKHR* pRectangles; -} VkPresentRegionKHR; - -typedef struct VkPresentRegionsKHR { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const VkPresentRegionKHR* pRegions; -} VkPresentRegionsKHR; - - - -#define VK_KHR_descriptor_update_template 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplateKHR) - -#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 -#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" - - -typedef enum VkDescriptorUpdateTemplateTypeKHR { - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = 0, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE_KHR = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR + 1), - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDescriptorUpdateTemplateTypeKHR; - -typedef VkFlags VkDescriptorUpdateTemplateCreateFlagsKHR; - -typedef struct VkDescriptorUpdateTemplateEntryKHR { - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; - VkDescriptorType descriptorType; - size_t offset; - size_t stride; -} VkDescriptorUpdateTemplateEntryKHR; - -typedef struct VkDescriptorUpdateTemplateCreateInfoKHR { - VkStructureType sType; - void* pNext; - VkDescriptorUpdateTemplateCreateFlagsKHR flags; - uint32_t descriptorUpdateEntryCount; - const VkDescriptorUpdateTemplateEntryKHR* pDescriptorUpdateEntries; - VkDescriptorUpdateTemplateTypeKHR templateType; - VkDescriptorSetLayout descriptorSetLayout; - VkPipelineBindPoint pipelineBindPoint; - VkPipelineLayout pipelineLayout; - uint32_t set; -} VkDescriptorUpdateTemplateCreateInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate); -typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( - VkDevice device, - const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( - VkDevice device, - VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( - VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, - const void* pData); - -VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( - VkCommandBuffer commandBuffer, - VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, - VkPipelineLayout layout, - uint32_t set, - const void* pData); -#endif - -#define VK_KHR_shared_presentable_image 1 -#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 -#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" - -typedef struct VkSharedPresentSurfaceCapabilitiesKHR { - VkStructureType sType; - void* pNext; - VkImageUsageFlags sharedPresentSupportedUsageFlags; -} VkSharedPresentSurfaceCapabilitiesKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( - VkDevice device, - VkSwapchainKHR swapchain); -#endif - -#define VK_KHR_external_fence_capabilities 1 -#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" - - -typedef enum VkExternalFenceHandleTypeFlagBitsKHR { - VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001, - VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002, - VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004, - VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = 0x00000008, - VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalFenceHandleTypeFlagBitsKHR; -typedef VkFlags VkExternalFenceHandleTypeFlagsKHR; - -typedef enum VkExternalFenceFeatureFlagBitsKHR { - VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = 0x00000001, - VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = 0x00000002, - VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalFenceFeatureFlagBitsKHR; -typedef VkFlags VkExternalFenceFeatureFlagsKHR; - -typedef struct VkPhysicalDeviceExternalFenceInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalFenceHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalFenceInfoKHR; - -typedef struct VkExternalFencePropertiesKHR { - VkStructureType sType; - void* pNext; - VkExternalFenceHandleTypeFlagsKHR exportFromImportedHandleTypes; - VkExternalFenceHandleTypeFlagsKHR compatibleHandleTypes; - VkExternalFenceFeatureFlagsKHR externalFenceFeatures; -} VkExternalFencePropertiesKHR; - - -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, VkExternalFencePropertiesKHR* pExternalFenceProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, - VkExternalFencePropertiesKHR* pExternalFenceProperties); -#endif - -#define VK_KHR_external_fence 1 -#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" - - -typedef enum VkFenceImportFlagBitsKHR { - VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = 0x00000001, - VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkFenceImportFlagBitsKHR; -typedef VkFlags VkFenceImportFlagsKHR; - -typedef struct VkExportFenceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalFenceHandleTypeFlagsKHR handleTypes; -} VkExportFenceCreateInfoKHR; - - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_KHR_external_fence_win32 1 -#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" - -typedef struct VkImportFenceWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkFenceImportFlagsKHR flags; - VkExternalFenceHandleTypeFlagBitsKHR handleType; - HANDLE handle; - LPCWSTR name; -} VkImportFenceWin32HandleInfoKHR; - -typedef struct VkExportFenceWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; - LPCWSTR name; -} VkExportFenceWin32HandleInfoKHR; - -typedef struct VkFenceGetWin32HandleInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkExternalFenceHandleTypeFlagBitsKHR handleType; -} VkFenceGetWin32HandleInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR( - VkDevice device, - const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( - VkDevice device, - const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, - HANDLE* pHandle); -#endif -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHR_external_fence_fd 1 -#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" - -typedef struct VkImportFenceFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkFenceImportFlagsKHR flags; - VkExternalFenceHandleTypeFlagBitsKHR handleType; - int fd; -} VkImportFenceFdInfoKHR; - -typedef struct VkFenceGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkExternalFenceHandleTypeFlagBitsKHR handleType; -} VkFenceGetFdInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( - VkDevice device, - const VkImportFenceFdInfoKHR* pImportFenceFdInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( - VkDevice device, - const VkFenceGetFdInfoKHR* pGetFdInfo, - int* pFd); -#endif - -#define VK_KHR_maintenance2 1 -#define VK_KHR_MAINTENANCE2_SPEC_VERSION 1 -#define VK_KHR_MAINTENANCE2_EXTENSION_NAME "VK_KHR_maintenance2" - - -typedef enum VkPointClippingBehaviorKHR { - VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = 0, - VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = 1, - VK_POINT_CLIPPING_BEHAVIOR_BEGIN_RANGE_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR, - VK_POINT_CLIPPING_BEHAVIOR_END_RANGE_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR, - VK_POINT_CLIPPING_BEHAVIOR_RANGE_SIZE_KHR = (VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR - VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR + 1), - VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPointClippingBehaviorKHR; - -typedef enum VkTessellationDomainOriginKHR { - VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = 0, - VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = 1, - VK_TESSELLATION_DOMAIN_ORIGIN_BEGIN_RANGE_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR, - VK_TESSELLATION_DOMAIN_ORIGIN_END_RANGE_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR, - VK_TESSELLATION_DOMAIN_ORIGIN_RANGE_SIZE_KHR = (VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR - VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR + 1), - VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM_KHR = 0x7FFFFFFF -} VkTessellationDomainOriginKHR; - -typedef struct VkPhysicalDevicePointClippingPropertiesKHR { - VkStructureType sType; - void* pNext; - VkPointClippingBehaviorKHR pointClippingBehavior; -} VkPhysicalDevicePointClippingPropertiesKHR; - -typedef struct VkInputAttachmentAspectReferenceKHR { - uint32_t subpass; - uint32_t inputAttachmentIndex; - VkImageAspectFlags aspectMask; -} VkInputAttachmentAspectReferenceKHR; - -typedef struct VkRenderPassInputAttachmentAspectCreateInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t aspectReferenceCount; - const VkInputAttachmentAspectReferenceKHR* pAspectReferences; -} VkRenderPassInputAttachmentAspectCreateInfoKHR; - -typedef struct VkImageViewUsageCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkImageUsageFlags usage; -} VkImageViewUsageCreateInfoKHR; - -typedef struct VkPipelineTessellationDomainOriginStateCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkTessellationDomainOriginKHR domainOrigin; -} VkPipelineTessellationDomainOriginStateCreateInfoKHR; - - - -#define VK_KHR_get_surface_capabilities2 1 -#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" - -typedef struct VkPhysicalDeviceSurfaceInfo2KHR { - VkStructureType sType; - const void* pNext; - VkSurfaceKHR surface; -} VkPhysicalDeviceSurfaceInfo2KHR; - -typedef struct VkSurfaceCapabilities2KHR { - VkStructureType sType; - void* pNext; - VkSurfaceCapabilitiesKHR surfaceCapabilities; -} VkSurfaceCapabilities2KHR; - -typedef struct VkSurfaceFormat2KHR { - VkStructureType sType; - void* pNext; - VkSurfaceFormatKHR surfaceFormat; -} VkSurfaceFormat2KHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - VkSurfaceCapabilities2KHR* pSurfaceCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormat2KHR* pSurfaceFormats); -#endif - -#define VK_KHR_variable_pointers 1 -#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 -#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" - -typedef struct VkPhysicalDeviceVariablePointerFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 variablePointersStorageBuffer; - VkBool32 variablePointers; -} VkPhysicalDeviceVariablePointerFeaturesKHR; - - - -#define VK_KHR_get_display_properties2 1 -#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" - -typedef struct VkDisplayProperties2KHR { - VkStructureType sType; - void* pNext; - VkDisplayPropertiesKHR displayProperties; -} VkDisplayProperties2KHR; - -typedef struct VkDisplayPlaneProperties2KHR { - VkStructureType sType; - void* pNext; - VkDisplayPlanePropertiesKHR displayPlaneProperties; -} VkDisplayPlaneProperties2KHR; - -typedef struct VkDisplayModeProperties2KHR { - VkStructureType sType; - void* pNext; - VkDisplayModePropertiesKHR displayModeProperties; -} VkDisplayModeProperties2KHR; - -typedef struct VkDisplayPlaneInfo2KHR { - VkStructureType sType; - const void* pNext; - VkDisplayModeKHR mode; - uint32_t planeIndex; -} VkDisplayPlaneInfo2KHR; - -typedef struct VkDisplayPlaneCapabilities2KHR { - VkStructureType sType; - void* pNext; - VkDisplayPlaneCapabilitiesKHR capabilities; -} VkDisplayPlaneCapabilities2KHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayProperties2KHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayPlaneProperties2KHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t* pPropertyCount, - VkDisplayModeProperties2KHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( - VkPhysicalDevice physicalDevice, - const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, - VkDisplayPlaneCapabilities2KHR* pCapabilities); -#endif - -#define VK_KHR_dedicated_allocation 1 -#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 1 -#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" - -typedef struct VkMemoryDedicatedRequirementsKHR { - VkStructureType sType; - void* pNext; - VkBool32 prefersDedicatedAllocation; - VkBool32 requiresDedicatedAllocation; -} VkMemoryDedicatedRequirementsKHR; - -typedef struct VkMemoryDedicatedAllocateInfoKHR { - VkStructureType sType; - const void* pNext; - VkImage image; - VkBuffer buffer; -} VkMemoryDedicatedAllocateInfoKHR; - - - -#define VK_KHR_storage_buffer_storage_class 1 -#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 -#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" - - -#define VK_KHR_relaxed_block_layout 1 -#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 -#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" - - -#define VK_KHR_protected_memory 1 -#define VK_KHR_PROTECTED_MEMORY_SPEC_VERSION 2 -#define VK_KHR_PROTECTED_MEMORY_EXTENSION_NAME "VK_KHR_protected_memory" - -typedef struct VkProtectedSubmitInfoKHR { - VkStructureType sType; - const void* pNext; - VkBool32 protectedSubmit; -} VkProtectedSubmitInfoKHR; - -typedef struct VkPhysicalDeviceProtectedMemoryFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 protectedMemory; -} VkPhysicalDeviceProtectedMemoryFeaturesKHR; - -typedef struct VkPhysicalDeviceProtectedMemoryPropertiesKHR { - VkStructureType sType; - void* pNext; - VkBool32 protectedNoFault; -} VkPhysicalDeviceProtectedMemoryPropertiesKHR; - - - -#define VK_KHR_get_memory_requirements2 1 -#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 -#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" - -typedef struct VkBufferMemoryRequirementsInfo2KHR { - VkStructureType sType; - const void* pNext; - VkBuffer buffer; -} VkBufferMemoryRequirementsInfo2KHR; - -typedef struct VkImageMemoryRequirementsInfo2KHR { - VkStructureType sType; - const void* pNext; - VkImage image; -} VkImageMemoryRequirementsInfo2KHR; - -typedef struct VkImageSparseMemoryRequirementsInfo2KHR { - VkStructureType sType; - const void* pNext; - VkImage image; -} VkImageSparseMemoryRequirementsInfo2KHR; - -typedef struct VkMemoryRequirements2KHR { - VkStructureType sType; - void* pNext; - VkMemoryRequirements memoryRequirements; -} VkMemoryRequirements2KHR; - -typedef struct VkSparseImageMemoryRequirements2KHR { - VkStructureType sType; - void* pNext; - VkSparseImageMemoryRequirements memoryRequirements; -} VkSparseImageMemoryRequirements2KHR; - - -typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( - VkDevice device, - const VkImageMemoryRequirementsInfo2KHR* pInfo, - VkMemoryRequirements2KHR* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( - VkDevice device, - const VkBufferMemoryRequirementsInfo2KHR* pInfo, - VkMemoryRequirements2KHR* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( - VkDevice device, - const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, - uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements); -#endif - -#define VK_KHR_image_format_list 1 -#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 -#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" - -typedef struct VkImageFormatListCreateInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t viewFormatCount; - const VkFormat* pViewFormats; -} VkImageFormatListCreateInfoKHR; - - - -#define VK_KHR_sampler_ycbcr_conversion 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversionKHR) - -#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 1 -#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" - - -typedef enum VkSamplerYcbcrModelConversionKHR { - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = 0, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = 1, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = 2, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = 3, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = 4, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_BEGIN_RANGE_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_END_RANGE_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RANGE_SIZE_KHR = (VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR + 1), - VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSamplerYcbcrModelConversionKHR; - -typedef enum VkSamplerYcbcrRangeKHR { - VK_SAMPLER_YCBCR_RANGE_KHRONOS_KHR = 0, - VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = 1, - VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = 2, - VK_SAMPLER_YCBCR_RANGE_BEGIN_RANGE_KHR = VK_SAMPLER_YCBCR_RANGE_KHRONOS_KHR, - VK_SAMPLER_YCBCR_RANGE_END_RANGE_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR, - VK_SAMPLER_YCBCR_RANGE_RANGE_SIZE_KHR = (VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR - VK_SAMPLER_YCBCR_RANGE_KHRONOS_KHR + 1), - VK_SAMPLER_YCBCR_RANGE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSamplerYcbcrRangeKHR; - -typedef enum VkChromaLocationKHR { - VK_CHROMA_LOCATION_COSITED_EVEN_KHR = 0, - VK_CHROMA_LOCATION_MIDPOINT_KHR = 1, - VK_CHROMA_LOCATION_BEGIN_RANGE_KHR = VK_CHROMA_LOCATION_COSITED_EVEN_KHR, - VK_CHROMA_LOCATION_END_RANGE_KHR = VK_CHROMA_LOCATION_MIDPOINT_KHR, - VK_CHROMA_LOCATION_RANGE_SIZE_KHR = (VK_CHROMA_LOCATION_MIDPOINT_KHR - VK_CHROMA_LOCATION_COSITED_EVEN_KHR + 1), - VK_CHROMA_LOCATION_MAX_ENUM_KHR = 0x7FFFFFFF -} VkChromaLocationKHR; - -typedef struct VkSamplerYcbcrConversionCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkFormat format; - VkSamplerYcbcrModelConversionKHR YcbcrModel; - VkSamplerYcbcrRangeKHR YcbcrRange; - VkComponentMapping components; - VkChromaLocationKHR xChromaOffset; - VkChromaLocationKHR yChromaOffset; - VkFilter chromaFilter; - VkBool32 forceExplicitReconstruction; -} VkSamplerYcbcrConversionCreateInfoKHR; - -typedef struct VkSamplerYcbcrConversionInfoKHR { - VkStructureType sType; - const void* pNext; - VkSamplerYcbcrConversionKHR conversion; -} VkSamplerYcbcrConversionInfoKHR; - -typedef struct VkBindImagePlaneMemoryInfoKHR { - VkStructureType sType; - const void* pNext; - VkImageAspectFlagBits planeAspect; -} VkBindImagePlaneMemoryInfoKHR; - -typedef struct VkImagePlaneMemoryRequirementsInfoKHR { - VkStructureType sType; - const void* pNext; - VkImageAspectFlagBits planeAspect; -} VkImagePlaneMemoryRequirementsInfoKHR; - -typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 samplerYcbcrConversion; -} VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; - -typedef struct VkSamplerYcbcrConversionImageFormatPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t combinedImageSamplerDescriptorCount; -} VkSamplerYcbcrConversionImageFormatPropertiesKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversionKHR* pYcbcrConversion); -typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversionKHR YcbcrConversion, const VkAllocationCallbacks* pAllocator); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( - VkDevice device, - const VkSamplerYcbcrConversionCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSamplerYcbcrConversionKHR* pYcbcrConversion); - -VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( - VkDevice device, - VkSamplerYcbcrConversionKHR YcbcrConversion, - const VkAllocationCallbacks* pAllocator); -#endif - -#define VK_KHR_bind_memory2 1 -#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 -#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" - -typedef struct VkBindBufferMemoryInfoKHR { - VkStructureType sType; - const void* pNext; - VkBuffer buffer; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; -} VkBindBufferMemoryInfoKHR; - -typedef struct VkBindImageMemoryInfoKHR { - VkStructureType sType; - const void* pNext; - VkImage image; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; -} VkBindImageMemoryInfoKHR; - - -typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos); -typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( - VkDevice device, - uint32_t bindInfoCount, - const VkBindBufferMemoryInfoKHR* pBindInfos); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( - VkDevice device, - uint32_t bindInfoCount, - const VkBindImageMemoryInfoKHR* pBindInfos); -#endif - -#define VK_EXT_debug_report 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) - -#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 8 -#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" -#define VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT -#define VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT - - -typedef enum VkDebugReportObjectTypeEXT { - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, - VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, - VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, - VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, - VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, - VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, - VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, - VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, - VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, - VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, - VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, - VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, - VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, - VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, - VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, - VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, - VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, - VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, - VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31, - VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = 1000085000, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = 1000156000, - VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1), - VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugReportObjectTypeEXT; - - -typedef enum VkDebugReportFlagBitsEXT { - VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, - VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, - VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, - VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, - VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, - VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugReportFlagBitsEXT; -typedef VkFlags VkDebugReportFlagsEXT; - -typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char* pLayerPrefix, - const char* pMessage, - void* pUserData); - -typedef struct VkDebugReportCallbackCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportFlagsEXT flags; - PFN_vkDebugReportCallbackEXT pfnCallback; - void* pUserData; -} VkDebugReportCallbackCreateInfoEXT; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); -typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( - VkInstance instance, - const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDebugReportCallbackEXT* pCallback); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( - VkInstance instance, - VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( - VkInstance instance, - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char* pLayerPrefix, - const char* pMessage); -#endif - -#define VK_NV_glsl_shader 1 -#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 -#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" - - -#define VK_EXT_depth_range_unrestricted 1 -#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 -#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" - - -#define VK_IMG_filter_cubic 1 -#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 -#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" - - -#define VK_AMD_rasterization_order 1 -#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 -#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" - - -typedef enum VkRasterizationOrderAMD { - VK_RASTERIZATION_ORDER_STRICT_AMD = 0, - VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, - VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD, - VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD, - VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1), - VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF -} VkRasterizationOrderAMD; - -typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { - VkStructureType sType; - const void* pNext; - VkRasterizationOrderAMD rasterizationOrder; -} VkPipelineRasterizationStateRasterizationOrderAMD; - - - -#define VK_AMD_shader_trinary_minmax 1 -#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 -#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" - - -#define VK_AMD_shader_explicit_vertex_parameter 1 -#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 -#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" - - -#define VK_EXT_debug_marker 1 -#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 -#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" - -typedef struct VkDebugMarkerObjectNameInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportObjectTypeEXT objectType; - uint64_t object; - const char* pObjectName; -} VkDebugMarkerObjectNameInfoEXT; - -typedef struct VkDebugMarkerObjectTagInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportObjectTypeEXT objectType; - uint64_t object; - uint64_t tagName; - size_t tagSize; - const void* pTag; -} VkDebugMarkerObjectTagInfoEXT; - -typedef struct VkDebugMarkerMarkerInfoEXT { - VkStructureType sType; - const void* pNext; - const char* pMarkerName; - float color[4]; -} VkDebugMarkerMarkerInfoEXT; - - -typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); -typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( - VkDevice device, - const VkDebugMarkerObjectTagInfoEXT* pTagInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( - VkDevice device, - const VkDebugMarkerObjectNameInfoEXT* pNameInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( - VkCommandBuffer commandBuffer); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); -#endif - -#define VK_AMD_gcn_shader 1 -#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 -#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" - - -#define VK_NV_dedicated_allocation 1 -#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 -#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" - -typedef struct VkDedicatedAllocationImageCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 dedicatedAllocation; -} VkDedicatedAllocationImageCreateInfoNV; - -typedef struct VkDedicatedAllocationBufferCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 dedicatedAllocation; -} VkDedicatedAllocationBufferCreateInfoNV; - -typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { - VkStructureType sType; - const void* pNext; - VkImage image; - VkBuffer buffer; -} VkDedicatedAllocationMemoryAllocateInfoNV; - - - -#define VK_AMD_draw_indirect_count 1 -#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 -#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" - -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); -#endif - -#define VK_AMD_negative_viewport_height 1 -#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 -#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" - - -#define VK_AMD_gpu_shader_half_float 1 -#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 1 -#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" - - -#define VK_AMD_shader_ballot 1 -#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 -#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" - - -#define VK_AMD_texture_gather_bias_lod 1 -#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 -#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" - -typedef struct VkTextureLODGatherFormatPropertiesAMD { - VkStructureType sType; - void* pNext; - VkBool32 supportsTextureGatherLODBiasAMD; -} VkTextureLODGatherFormatPropertiesAMD; - - - -#define VK_KHX_multiview 1 -#define VK_KHX_MULTIVIEW_SPEC_VERSION 1 -#define VK_KHX_MULTIVIEW_EXTENSION_NAME "VK_KHX_multiview" - -typedef struct VkRenderPassMultiviewCreateInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t subpassCount; - const uint32_t* pViewMasks; - uint32_t dependencyCount; - const int32_t* pViewOffsets; - uint32_t correlationMaskCount; - const uint32_t* pCorrelationMasks; -} VkRenderPassMultiviewCreateInfoKHX; - -typedef struct VkPhysicalDeviceMultiviewFeaturesKHX { - VkStructureType sType; - void* pNext; - VkBool32 multiview; - VkBool32 multiviewGeometryShader; - VkBool32 multiviewTessellationShader; -} VkPhysicalDeviceMultiviewFeaturesKHX; - -typedef struct VkPhysicalDeviceMultiviewPropertiesKHX { - VkStructureType sType; - void* pNext; - uint32_t maxMultiviewViewCount; - uint32_t maxMultiviewInstanceIndex; -} VkPhysicalDeviceMultiviewPropertiesKHX; - - - -#define VK_IMG_format_pvrtc 1 -#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 -#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" - - -#define VK_NV_external_memory_capabilities 1 -#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" - - -typedef enum VkExternalMemoryHandleTypeFlagBitsNV { - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkExternalMemoryHandleTypeFlagBitsNV; -typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; - -typedef enum VkExternalMemoryFeatureFlagBitsNV { - VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, - VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, - VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, - VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkExternalMemoryFeatureFlagBitsNV; -typedef VkFlags VkExternalMemoryFeatureFlagsNV; - -typedef struct VkExternalImageFormatPropertiesNV { - VkImageFormatProperties imageFormatProperties; - VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; - VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; - VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; -} VkExternalImageFormatPropertiesNV; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkExternalMemoryHandleTypeFlagsNV externalHandleType, - VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); -#endif - -#define VK_NV_external_memory 1 -#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" - -typedef struct VkExternalMemoryImageCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsNV handleTypes; -} VkExternalMemoryImageCreateInfoNV; - -typedef struct VkExportMemoryAllocateInfoNV { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsNV handleTypes; -} VkExportMemoryAllocateInfoNV; - - - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_NV_external_memory_win32 1 -#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" - -typedef struct VkImportMemoryWin32HandleInfoNV { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsNV handleType; - HANDLE handle; -} VkImportMemoryWin32HandleInfoNV; - -typedef struct VkExportMemoryWin32HandleInfoNV { - VkStructureType sType; - const void* pNext; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; -} VkExportMemoryWin32HandleInfoNV; - +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle); +#include "vk_platform.h" +#include "vulkan_core.h" -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( - VkDevice device, - VkDeviceMemory memory, - VkExternalMemoryHandleTypeFlagsNV handleType, - HANDLE* pHandle); +#ifdef VK_USE_PLATFORM_ANDROID_KHR +#include "vulkan_android.h" #endif -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#define VK_NV_win32_keyed_mutex 1 -#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 1 -#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" - -typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { - VkStructureType sType; - const void* pNext; - uint32_t acquireCount; - const VkDeviceMemory* pAcquireSyncs; - const uint64_t* pAcquireKeys; - const uint32_t* pAcquireTimeoutMilliseconds; - uint32_t releaseCount; - const VkDeviceMemory* pReleaseSyncs; - const uint64_t* pReleaseKeys; -} VkWin32KeyedMutexAcquireReleaseInfoNV; - - -#endif /* VK_USE_PLATFORM_WIN32_KHR */ - -#define VK_KHX_device_group 1 -#define VK_MAX_DEVICE_GROUP_SIZE_KHX 32 -#define VK_KHX_DEVICE_GROUP_SPEC_VERSION 2 -#define VK_KHX_DEVICE_GROUP_EXTENSION_NAME "VK_KHX_device_group" - - -typedef enum VkPeerMemoryFeatureFlagBitsKHX { - VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHX = 0x00000001, - VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHX = 0x00000002, - VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHX = 0x00000004, - VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHX = 0x00000008, - VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF -} VkPeerMemoryFeatureFlagBitsKHX; -typedef VkFlags VkPeerMemoryFeatureFlagsKHX; - -typedef enum VkMemoryAllocateFlagBitsKHX { - VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHX = 0x00000001, - VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF -} VkMemoryAllocateFlagBitsKHX; -typedef VkFlags VkMemoryAllocateFlagsKHX; - -typedef enum VkDeviceGroupPresentModeFlagBitsKHX { - VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHX = 0x00000001, - VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHX = 0x00000002, - VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHX = 0x00000004, - VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHX = 0x00000008, - VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF -} VkDeviceGroupPresentModeFlagBitsKHX; -typedef VkFlags VkDeviceGroupPresentModeFlagsKHX; - -typedef struct VkMemoryAllocateFlagsInfoKHX { - VkStructureType sType; - const void* pNext; - VkMemoryAllocateFlagsKHX flags; - uint32_t deviceMask; -} VkMemoryAllocateFlagsInfoKHX; - -typedef struct VkDeviceGroupRenderPassBeginInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t deviceMask; - uint32_t deviceRenderAreaCount; - const VkRect2D* pDeviceRenderAreas; -} VkDeviceGroupRenderPassBeginInfoKHX; - -typedef struct VkDeviceGroupCommandBufferBeginInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t deviceMask; -} VkDeviceGroupCommandBufferBeginInfoKHX; - -typedef struct VkDeviceGroupSubmitInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreCount; - const uint32_t* pWaitSemaphoreDeviceIndices; - uint32_t commandBufferCount; - const uint32_t* pCommandBufferDeviceMasks; - uint32_t signalSemaphoreCount; - const uint32_t* pSignalSemaphoreDeviceIndices; -} VkDeviceGroupSubmitInfoKHX; - -typedef struct VkDeviceGroupBindSparseInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t resourceDeviceIndex; - uint32_t memoryDeviceIndex; -} VkDeviceGroupBindSparseInfoKHX; - -typedef struct VkDeviceGroupPresentCapabilitiesKHX { - VkStructureType sType; - const void* pNext; - uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE_KHX]; - VkDeviceGroupPresentModeFlagsKHX modes; -} VkDeviceGroupPresentCapabilitiesKHX; - -typedef struct VkImageSwapchainCreateInfoKHX { - VkStructureType sType; - const void* pNext; - VkSwapchainKHR swapchain; -} VkImageSwapchainCreateInfoKHX; - -typedef struct VkBindBufferMemoryDeviceGroupInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; -} VkBindBufferMemoryDeviceGroupInfoKHX; - -typedef struct VkBindImageMemoryDeviceGroupInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; - uint32_t SFRRectCount; - const VkRect2D* pSFRRects; -} VkBindImageMemoryDeviceGroupInfoKHX; - -typedef struct VkBindImageMemorySwapchainInfoKHX { - VkStructureType sType; - const void* pNext; - VkSwapchainKHR swapchain; - uint32_t imageIndex; -} VkBindImageMemorySwapchainInfoKHX; - -typedef struct VkAcquireNextImageInfoKHX { - VkStructureType sType; - const void* pNext; - VkSwapchainKHR swapchain; - uint64_t timeout; - VkSemaphore semaphore; - VkFence fence; - uint32_t deviceMask; -} VkAcquireNextImageInfoKHX; - -typedef struct VkDeviceGroupPresentInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const uint32_t* pDeviceMasks; - VkDeviceGroupPresentModeFlagBitsKHX mode; -} VkDeviceGroupPresentInfoKHX; -typedef struct VkDeviceGroupSwapchainCreateInfoKHX { - VkStructureType sType; - const void* pNext; - VkDeviceGroupPresentModeFlagsKHX modes; -} VkDeviceGroupSwapchainCreateInfoKHX; - - -typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHX)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlagsKHX* pPeerMemoryFeatures); -typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHX)(VkCommandBuffer commandBuffer, uint32_t deviceMask); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHX)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHX* pDeviceGroupPresentCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHX)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHX* pModes); -typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHX)(VkDevice device, const VkAcquireNextImageInfoKHX* pAcquireInfo, uint32_t* pImageIndex); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHX)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHX)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHX( - VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlagsKHX* pPeerMemoryFeatures); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHX( - VkCommandBuffer commandBuffer, - uint32_t deviceMask); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHX( - VkDevice device, - VkDeviceGroupPresentCapabilitiesKHX* pDeviceGroupPresentCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHX( - VkDevice device, - VkSurfaceKHR surface, - VkDeviceGroupPresentModeFlagsKHX* pModes); - -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHX( - VkDevice device, - const VkAcquireNextImageInfoKHX* pAcquireInfo, - uint32_t* pImageIndex); - -VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHX( - VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHX( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pRectCount, - VkRect2D* pRects); +#ifdef VK_USE_PLATFORM_FUCHSIA +#include +#include "vulkan_fuchsia.h" #endif -#define VK_EXT_validation_flags 1 -#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 1 -#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" - - -typedef enum VkValidationCheckEXT { - VK_VALIDATION_CHECK_ALL_EXT = 0, - VK_VALIDATION_CHECK_SHADERS_EXT = 1, - VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT, - VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT, - VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1), - VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationCheckEXT; - -typedef struct VkValidationFlagsEXT { - VkStructureType sType; - const void* pNext; - uint32_t disabledValidationCheckCount; - VkValidationCheckEXT* pDisabledValidationChecks; -} VkValidationFlagsEXT; - - - -#ifdef VK_USE_PLATFORM_VI_NN -#define VK_NN_vi_surface 1 -#define VK_NN_VI_SURFACE_SPEC_VERSION 1 -#define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" - -typedef VkFlags VkViSurfaceCreateFlagsNN; - -typedef struct VkViSurfaceCreateInfoNN { - VkStructureType sType; - const void* pNext; - VkViSurfaceCreateFlagsNN flags; - void* window; -} VkViSurfaceCreateInfoNN; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( - VkInstance instance, - const VkViSurfaceCreateInfoNN* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); +#ifdef VK_USE_PLATFORM_IOS_MVK +#include "vulkan_ios.h" #endif -#endif /* VK_USE_PLATFORM_VI_NN */ - -#define VK_EXT_shader_subgroup_ballot 1 -#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 -#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" - -#define VK_EXT_shader_subgroup_vote 1 -#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 -#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" - -#define VK_KHX_device_group_creation 1 -#define VK_KHX_DEVICE_GROUP_CREATION_SPEC_VERSION 1 -#define VK_KHX_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHX_device_group_creation" - -typedef struct VkPhysicalDeviceGroupPropertiesKHX { - VkStructureType sType; - void* pNext; - uint32_t physicalDeviceCount; - VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE_KHX]; - VkBool32 subsetAllocation; -} VkPhysicalDeviceGroupPropertiesKHX; - -typedef struct VkDeviceGroupDeviceCreateInfoKHX { - VkStructureType sType; - const void* pNext; - uint32_t physicalDeviceCount; - const VkPhysicalDevice* pPhysicalDevices; -} VkDeviceGroupDeviceCreateInfoKHX; - - -typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHX)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHX* pPhysicalDeviceGroupProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHX( - VkInstance instance, - uint32_t* pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupPropertiesKHX* pPhysicalDeviceGroupProperties); +#ifdef VK_USE_PLATFORM_MACOS_MVK +#include "vulkan_macos.h" #endif -#define VK_NVX_device_generated_commands 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) - -#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 1 -#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands" - - -typedef enum VkIndirectCommandsTokenTypeNVX { - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1), - VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsTokenTypeNVX; - -typedef enum VkObjectEntryTypeNVX { - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0, - VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1, - VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2, - VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3, - VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4, - VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, - VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, - VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1), - VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryTypeNVX; - - -typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX { - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsLayoutUsageFlagBitsNVX; -typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; - -typedef enum VkObjectEntryUsageFlagBitsNVX { - VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001, - VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002, - VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryUsageFlagBitsNVX; -typedef VkFlags VkObjectEntryUsageFlagsNVX; - -typedef struct VkDeviceGeneratedCommandsFeaturesNVX { - VkStructureType sType; - const void* pNext; - VkBool32 computeBindingPointSupport; -} VkDeviceGeneratedCommandsFeaturesNVX; - -typedef struct VkDeviceGeneratedCommandsLimitsNVX { - VkStructureType sType; - const void* pNext; - uint32_t maxIndirectCommandsLayoutTokenCount; - uint32_t maxObjectEntryCounts; - uint32_t minSequenceCountBufferOffsetAlignment; - uint32_t minSequenceIndexBufferOffsetAlignment; - uint32_t minCommandsTokenBufferOffsetAlignment; -} VkDeviceGeneratedCommandsLimitsNVX; - -typedef struct VkIndirectCommandsTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - VkBuffer buffer; - VkDeviceSize offset; -} VkIndirectCommandsTokenNVX; - -typedef struct VkIndirectCommandsLayoutTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - uint32_t bindingUnit; - uint32_t dynamicCount; - uint32_t divisor; -} VkIndirectCommandsLayoutTokenNVX; - -typedef struct VkIndirectCommandsLayoutCreateInfoNVX { - VkStructureType sType; - const void* pNext; - VkPipelineBindPoint pipelineBindPoint; - VkIndirectCommandsLayoutUsageFlagsNVX flags; - uint32_t tokenCount; - const VkIndirectCommandsLayoutTokenNVX* pTokens; -} VkIndirectCommandsLayoutCreateInfoNVX; - -typedef struct VkCmdProcessCommandsInfoNVX { - VkStructureType sType; - const void* pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t indirectCommandsTokenCount; - const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens; - uint32_t maxSequencesCount; - VkCommandBuffer targetCommandBuffer; - VkBuffer sequencesCountBuffer; - VkDeviceSize sequencesCountOffset; - VkBuffer sequencesIndexBuffer; - VkDeviceSize sequencesIndexOffset; -} VkCmdProcessCommandsInfoNVX; - -typedef struct VkCmdReserveSpaceForCommandsInfoNVX { - VkStructureType sType; - const void* pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t maxSequencesCount; -} VkCmdReserveSpaceForCommandsInfoNVX; - -typedef struct VkObjectTableCreateInfoNVX { - VkStructureType sType; - const void* pNext; - uint32_t objectCount; - const VkObjectEntryTypeNVX* pObjectEntryTypes; - const uint32_t* pObjectEntryCounts; - const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; - uint32_t maxUniformBuffersPerDescriptor; - uint32_t maxStorageBuffersPerDescriptor; - uint32_t maxStorageImagesPerDescriptor; - uint32_t maxSampledImagesPerDescriptor; - uint32_t maxPipelineLayouts; -} VkObjectTableCreateInfoNVX; - -typedef struct VkObjectTableEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; -} VkObjectTableEntryNVX; - -typedef struct VkObjectTablePipelineEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipeline pipeline; -} VkObjectTablePipelineEntryNVX; - -typedef struct VkObjectTableDescriptorSetEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkDescriptorSet descriptorSet; -} VkObjectTableDescriptorSetEntryNVX; - -typedef struct VkObjectTableVertexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; -} VkObjectTableVertexBufferEntryNVX; - -typedef struct VkObjectTableIndexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; - VkIndexType indexType; -} VkObjectTableIndexBufferEntryNVX; - -typedef struct VkObjectTablePushConstantEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkShaderStageFlags stageFlags; -} VkObjectTablePushConstantEntryNVX; - - -typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); -typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); -typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable); -typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices); -typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX( - VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX( - VkDevice device, - VkIndirectCommandsLayoutNVX indirectCommandsLayout, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX( - VkDevice device, - const VkObjectTableCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkObjectTableNVX* pObjectTable); - -VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX( - VkDevice device, - VkObjectTableNVX objectTable, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectTableEntryNVX* const* ppObjectTableEntries, - const uint32_t* pObjectIndices); - -VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectEntryTypeNVX* pObjectEntryTypes, - const uint32_t* pObjectIndices); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( - VkPhysicalDevice physicalDevice, - VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, - VkDeviceGeneratedCommandsLimitsNVX* pLimits); +#ifdef VK_USE_PLATFORM_METAL_EXT +#include "vulkan_metal.h" #endif -#define VK_NV_clip_space_w_scaling 1 -#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 -#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" - -typedef struct VkViewportWScalingNV { - float xcoeff; - float ycoeff; -} VkViewportWScalingNV; - -typedef struct VkPipelineViewportWScalingStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 viewportWScalingEnable; - uint32_t viewportCount; - const VkViewportWScalingNV* pViewportWScalings; -} VkPipelineViewportWScalingStateCreateInfoNV; - - -typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( - VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportWScalingNV* pViewportWScalings); +#ifdef VK_USE_PLATFORM_VI_NN +#include "vulkan_vi.h" #endif -#define VK_EXT_direct_mode_display 1 -#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 -#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" - -typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display); +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +#include "vulkan_wayland.h" #endif -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT -#define VK_EXT_acquire_xlib_display 1 -#include - -#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 -#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display" - -typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display); -typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT( - VkPhysicalDevice physicalDevice, - Display* dpy, - VkDisplayKHR display); -VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT( - VkPhysicalDevice physicalDevice, - Display* dpy, - RROutput rrOutput, - VkDisplayKHR* pDisplay); +#ifdef VK_USE_PLATFORM_WIN32_KHR +#include +#include "vulkan_win32.h" #endif -#endif /* VK_USE_PLATFORM_XLIB_XRANDR_EXT */ - -#define VK_EXT_display_surface_counter 1 -#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 -#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" - -typedef enum VkSurfaceCounterFlagBitsEXT { - VK_SURFACE_COUNTER_VBLANK_EXT = 0x00000001, - VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkSurfaceCounterFlagBitsEXT; -typedef VkFlags VkSurfaceCounterFlagsEXT; -typedef struct VkSurfaceCapabilities2EXT { - VkStructureType sType; - void* pNext; - uint32_t minImageCount; - uint32_t maxImageCount; - VkExtent2D currentExtent; - VkExtent2D minImageExtent; - VkExtent2D maxImageExtent; - uint32_t maxImageArrayLayers; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkSurfaceTransformFlagBitsKHR currentTransform; - VkCompositeAlphaFlagsKHR supportedCompositeAlpha; - VkImageUsageFlags supportedUsageFlags; - VkSurfaceCounterFlagsEXT supportedSurfaceCounters; -} VkSurfaceCapabilities2EXT; - - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +#ifdef VK_USE_PLATFORM_XCB_KHR +#include +#include "vulkan_xcb.h" #endif -#define VK_EXT_display_control 1 -#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 -#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" - - -typedef enum VkDisplayPowerStateEXT { - VK_DISPLAY_POWER_STATE_OFF_EXT = 0, - VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, - VK_DISPLAY_POWER_STATE_ON_EXT = 2, - VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT, - VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT, - VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1), - VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDisplayPowerStateEXT; - -typedef enum VkDeviceEventTypeEXT { - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, - VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, - VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, - VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1), - VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDeviceEventTypeEXT; - -typedef enum VkDisplayEventTypeEXT { - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, - VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, - VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, - VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1), - VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDisplayEventTypeEXT; - -typedef struct VkDisplayPowerInfoEXT { - VkStructureType sType; - const void* pNext; - VkDisplayPowerStateEXT powerState; -} VkDisplayPowerInfoEXT; - -typedef struct VkDeviceEventInfoEXT { - VkStructureType sType; - const void* pNext; - VkDeviceEventTypeEXT deviceEvent; -} VkDeviceEventInfoEXT; - -typedef struct VkDisplayEventInfoEXT { - VkStructureType sType; - const void* pNext; - VkDisplayEventTypeEXT displayEvent; -} VkDisplayEventInfoEXT; - -typedef struct VkSwapchainCounterCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkSurfaceCounterFlagsEXT surfaceCounters; -} VkSwapchainCounterCreateInfoEXT; - - -typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( - VkDevice device, - VkDisplayKHR display, - const VkDisplayPowerInfoEXT* pDisplayPowerInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( - VkDevice device, - const VkDeviceEventInfoEXT* pDeviceEventInfo, - const VkAllocationCallbacks* pAllocator, - VkFence* pFence); - -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( - VkDevice device, - VkDisplayKHR display, - const VkDisplayEventInfoEXT* pDisplayEventInfo, - const VkAllocationCallbacks* pAllocator, - VkFence* pFence); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( - VkDevice device, - VkSwapchainKHR swapchain, - VkSurfaceCounterFlagBitsEXT counter, - uint64_t* pCounterValue); +#ifdef VK_USE_PLATFORM_XLIB_KHR +#include +#include "vulkan_xlib.h" #endif -#define VK_GOOGLE_display_timing 1 -#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 -#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" - -typedef struct VkRefreshCycleDurationGOOGLE { - uint64_t refreshDuration; -} VkRefreshCycleDurationGOOGLE; - -typedef struct VkPastPresentationTimingGOOGLE { - uint32_t presentID; - uint64_t desiredPresentTime; - uint64_t actualPresentTime; - uint64_t earliestPresentTime; - uint64_t presentMargin; -} VkPastPresentationTimingGOOGLE; - -typedef struct VkPresentTimeGOOGLE { - uint32_t presentID; - uint64_t desiredPresentTime; -} VkPresentTimeGOOGLE; - -typedef struct VkPresentTimesInfoGOOGLE { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const VkPresentTimeGOOGLE* pTimes; -} VkPresentTimesInfoGOOGLE; - -typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( - VkDevice device, - VkSwapchainKHR swapchain, - VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( - VkDevice device, - VkSwapchainKHR swapchain, - uint32_t* pPresentationTimingCount, - VkPastPresentationTimingGOOGLE* pPresentationTimings); +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +#include +#include "vulkan_directfb.h" #endif -#define VK_KHX_subgroup 1 -#define VK_KHX_SUBGROUP_SPEC_VERSION 1 -#define VK_KHX_SUBGROUP_EXTENSION_NAME "VK_KHX_subgroup" - - -typedef enum VkSubgroupFeatureFlagBitsKHX { - VK_SUBGROUP_FEATURE_BIT_BASIC = 0x00000001, - VK_SUBGROUP_FEATURE_BIT_VOTE = 0x00000002, - VK_SUBGROUP_FEATURE_BIT_ARITHMETIC = 0x00000004, - VK_SUBGROUP_FEATURE_BIT_BALLOT = 0x00000008, - VK_SUBGROUP_FEATURE_BIT_SHUFFLE = 0x00000010, - VK_SUBGROUP_FEATURE_BIT_SHUFFLE_RELATIVE = 0x00000020, - VK_SUBGROUP_FEATURE_BIT_CLUSTERED = 0x00000040, - VK_SUBGROUP_FEATURE_BIT_QUAD = 0x00000080, - VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF -} VkSubgroupFeatureFlagBitsKHX; -typedef VkFlags VkSubgroupFeatureFlagsKHX; - -typedef struct VkPhysicalDeviceSubgroupPropertiesKHX { - VkStructureType sType; - void* pNext; - uint32_t subgroupSize; - VkShaderStageFlags supportedStages; - VkSubgroupFeatureFlagsKHX supportedOperations; - VkBool32 quadOperationsInAllStages; -} VkPhysicalDeviceSubgroupPropertiesKHX; - - - -#define VK_NV_sample_mask_override_coverage 1 -#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 -#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" - - -#define VK_NV_geometry_shader_passthrough 1 -#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 -#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" - - -#define VK_NV_viewport_array2 1 -#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION 1 -#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME "VK_NV_viewport_array2" - - -#define VK_NVX_multiview_per_view_attributes 1 -#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 -#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" - -typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { - VkStructureType sType; - void* pNext; - VkBool32 perViewPositionAllComponents; -} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - - - -#define VK_NV_viewport_swizzle 1 -#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 -#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" - - -typedef enum VkViewportCoordinateSwizzleNV { - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, - VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, - VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV, - VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1), - VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF -} VkViewportCoordinateSwizzleNV; - -typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; - -typedef struct VkViewportSwizzleNV { - VkViewportCoordinateSwizzleNV x; - VkViewportCoordinateSwizzleNV y; - VkViewportCoordinateSwizzleNV z; - VkViewportCoordinateSwizzleNV w; -} VkViewportSwizzleNV; - -typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineViewportSwizzleStateCreateFlagsNV flags; - uint32_t viewportCount; - const VkViewportSwizzleNV* pViewportSwizzles; -} VkPipelineViewportSwizzleStateCreateInfoNV; - - -#define VK_EXT_discard_rectangles 1 -#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1 -#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" - - -typedef enum VkDiscardRectangleModeEXT { - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, - VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, - VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, - VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT, - VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1), - VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDiscardRectangleModeEXT; - -typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; - -typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { - VkStructureType sType; - void* pNext; - uint32_t maxDiscardRectangles; -} VkPhysicalDeviceDiscardRectanglePropertiesEXT; - -typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineDiscardRectangleStateCreateFlagsEXT flags; - VkDiscardRectangleModeEXT discardRectangleMode; - uint32_t discardRectangleCount; - const VkRect2D* pDiscardRectangles; -} VkPipelineDiscardRectangleStateCreateInfoEXT; - - -typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( - VkCommandBuffer commandBuffer, - uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VkRect2D* pDiscardRectangles); +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT +#include +#include +#include "vulkan_xlib_xrandr.h" #endif -#define VK_EXT_swapchain_colorspace 1 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 3 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" - - -#define VK_EXT_hdr_metadata 1 -#define VK_EXT_HDR_METADATA_SPEC_VERSION 1 -#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" - -typedef struct VkXYColorEXT { - float x; - float y; -} VkXYColorEXT; -typedef struct VkHdrMetadataEXT { - VkStructureType sType; - const void* pNext; - VkXYColorEXT displayPrimaryRed; - VkXYColorEXT displayPrimaryGreen; - VkXYColorEXT displayPrimaryBlue; - VkXYColorEXT whitePoint; - float maxLuminance; - float minLuminance; - float maxContentLightLevel; - float maxFrameAverageLightLevel; -} VkHdrMetadataEXT; - - -typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( - VkDevice device, - uint32_t swapchainCount, - const VkSwapchainKHR* pSwapchains, - const VkHdrMetadataEXT* pMetadata); +#ifdef VK_USE_PLATFORM_GGP +#include +#include "vulkan_ggp.h" #endif -#ifdef VK_USE_PLATFORM_IOS_MVK -#define VK_MVK_ios_surface 1 -#define VK_MVK_IOS_SURFACE_SPEC_VERSION 2 -#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" - -typedef VkFlags VkIOSSurfaceCreateFlagsMVK; - -typedef struct VkIOSSurfaceCreateInfoMVK { - VkStructureType sType; - const void* pNext; - VkIOSSurfaceCreateFlagsMVK flags; - const void* pView; -} VkIOSSurfaceCreateInfoMVK; - - -typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( - VkInstance instance, - const VkIOSSurfaceCreateInfoMVK* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); +#ifdef VK_USE_PLATFORM_SCREEN_QNX +#include +#include "vulkan_screen.h" #endif -#endif /* VK_USE_PLATFORM_IOS_MVK */ - -#ifdef VK_USE_PLATFORM_MACOS_MVK -#define VK_MVK_macos_surface 1 -#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2 -#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" - -typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; - -typedef struct VkMacOSSurfaceCreateInfoMVK { - VkStructureType sType; - const void* pNext; - VkMacOSSurfaceCreateFlagsMVK flags; - const void* pView; -} VkMacOSSurfaceCreateInfoMVK; -typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( - VkInstance instance, - const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); +#ifdef VK_USE_PLATFORM_SCI +#include +#include +#include "vulkan_sci.h" #endif -#endif /* VK_USE_PLATFORM_MACOS_MVK */ - -#define VK_EXT_sampler_filter_minmax 1 -#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 1 -#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" - - -typedef enum VkSamplerReductionModeEXT { - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = 0, - VK_SAMPLER_REDUCTION_MODE_MIN_EXT = 1, - VK_SAMPLER_REDUCTION_MODE_MAX_EXT = 2, - VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, - VK_SAMPLER_REDUCTION_MODE_END_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT, - VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE_EXT = (VK_SAMPLER_REDUCTION_MODE_MAX_EXT - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT + 1), - VK_SAMPLER_REDUCTION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkSamplerReductionModeEXT; -typedef struct VkSamplerReductionModeCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkSamplerReductionModeEXT reductionMode; -} VkSamplerReductionModeCreateInfoEXT; -typedef struct VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT { - VkStructureType sType; - void* pNext; - VkBool32 filterMinmaxSingleComponentFormats; - VkBool32 filterMinmaxImageComponentMapping; -} VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; - - - -#define VK_AMD_gpu_shader_int16 1 -#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 1 -#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" - - -#define VK_AMD_mixed_attachment_samples 1 -#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 -#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" - - -#define VK_EXT_blend_operation_advanced 1 -#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 -#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" - - -typedef enum VkBlendOverlapEXT { - VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, - VK_BLEND_OVERLAP_DISJOINT_EXT = 1, - VK_BLEND_OVERLAP_CONJOINT_EXT = 2, - VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT, - VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT, - VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1), - VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF -} VkBlendOverlapEXT; - -typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 advancedBlendCoherentOperations; -} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; - -typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { - VkStructureType sType; - void* pNext; - uint32_t advancedBlendMaxColorAttachments; - VkBool32 advancedBlendIndependentBlend; - VkBool32 advancedBlendNonPremultipliedSrcColor; - VkBool32 advancedBlendNonPremultipliedDstColor; - VkBool32 advancedBlendCorrelatedOverlap; - VkBool32 advancedBlendAllOperations; -} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; - -typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkBool32 srcPremultiplied; - VkBool32 dstPremultiplied; - VkBlendOverlapEXT blendOverlap; -} VkPipelineColorBlendAdvancedStateCreateInfoEXT; - - - -#define VK_NV_fragment_coverage_to_color 1 -#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 -#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" - -typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; - -typedef struct VkPipelineCoverageToColorStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCoverageToColorStateCreateFlagsNV flags; - VkBool32 coverageToColorEnable; - uint32_t coverageToColorLocation; -} VkPipelineCoverageToColorStateCreateInfoNV; - - - -#define VK_NV_framebuffer_mixed_samples 1 -#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 -#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" - - -typedef enum VkCoverageModulationModeNV { - VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, - VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, - VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, - VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, - VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV, - VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV, - VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1), - VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCoverageModulationModeNV; - -typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; - -typedef struct VkPipelineCoverageModulationStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCoverageModulationStateCreateFlagsNV flags; - VkCoverageModulationModeNV coverageModulationMode; - VkBool32 coverageModulationTableEnable; - uint32_t coverageModulationTableCount; - const float* pCoverageModulationTable; -} VkPipelineCoverageModulationStateCreateInfoNV; - - - -#define VK_NV_fill_rectangle 1 -#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 -#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" - - -#define VK_EXT_post_depth_coverage 1 -#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 -#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" - - -#ifdef __cplusplus -} +#ifdef VK_ENABLE_BETA_EXTENSIONS +#include "vulkan_beta.h" #endif -#endif +#endif // VULKAN_H_ diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp new file mode 100644 index 00000000..1dc3da01 --- /dev/null +++ b/include/vulkan/vulkan.hpp @@ -0,0 +1,22371 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HPP +#define VULKAN_HPP + +#include + +#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) +# include +import VULKAN_HPP_STD_MODULE; +#else +# include +# include // ArrayWrapperND +# include +# include // strnlen +# include // std::string +# include // std::exchange +# if 17 <= VULKAN_HPP_CPP_VERSION +# include +# endif +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# include // std::tie +# include // std::vector +# endif +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +# include +# endif +# if !defined( VULKAN_HPP_NO_EXCEPTIONS ) +# include // std::is_error_code_enum +# endif +# if defined( VULKAN_HPP_SUPPORT_SPAN ) +# include +# endif +#endif +#include + +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1 +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) +# include +# elif defined( _WIN32 ) && !defined( VULKAN_HPP_NO_WIN32_PROTOTYPES ) +using HINSTANCE = struct HINSTANCE__ *; +# if defined( _WIN64 ) +using FARPROC = int64_t( __stdcall * )(); +# else +using FARPROC = int( __stdcall * )(); +# endif +extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName ); +extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule ); +extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName ); +# endif +#endif + +static_assert( VK_HEADER_VERSION == 313, "Wrong VK_HEADER_VERSION!" ); + +// includes through some other header +// this results in major(x) being resolved to gnu_dev_major(x) +// which is an expression in a constructor initializer list. +#if defined( major ) +# undef major +#endif +#if defined( minor ) +# undef minor +#endif + +// Windows defines MemoryBarrier which is deprecated and collides +// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct. +#if defined( MemoryBarrier ) +# undef MemoryBarrier +#endif + +// XLib.h defines True/False, which collides with our VULKAN_HPP_NAMESPACE::True/VULKAN_HPP_NAMESPACE::False +// -> undef them and provide some namepace-secure constexpr +#if defined( True ) +# undef True +constexpr int True = 1; +#endif +#if defined( False ) +# undef False +constexpr int False = 0; +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + template + class ArrayWrapper1D : public std::array + { + public: + VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT : std::array() {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array const & data ) VULKAN_HPP_NOEXCEPT : std::array( data ) {} + + template ::value, int>::type = 0> + VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string const & data ) VULKAN_HPP_NOEXCEPT + { + copy( data.data(), data.length() ); + } + +#if 17 <= VULKAN_HPP_CPP_VERSION + template ::value, int>::type = 0> + VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string_view data ) VULKAN_HPP_NOEXCEPT + { + copy( data.data(), data.length() ); + } +#endif + +#if ( VK_USE_64_BIT_PTR_DEFINES == 0 ) + // on 32 bit compiles, needs overloads on index type int to resolve ambiguities + VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT + { + return std::array::operator[]( index ); + } + + T & operator[]( int index ) VULKAN_HPP_NOEXCEPT + { + return std::array::operator[]( index ); + } +#endif + + operator T const *() const VULKAN_HPP_NOEXCEPT + { + return this->data(); + } + + operator T *() VULKAN_HPP_NOEXCEPT + { + return this->data(); + } + + template ::value, int>::type = 0> + operator std::string() const + { + return std::string( this->data(), strnlen( this->data(), N ) ); + } + +#if 17 <= VULKAN_HPP_CPP_VERSION + template ::value, int>::type = 0> + operator std::string_view() const + { + return std::string_view( this->data(), strnlen( this->data(), N ) ); + } +#endif + + private: + VULKAN_HPP_CONSTEXPR_14 void copy( char const * data, size_t len ) VULKAN_HPP_NOEXCEPT + { + size_t n = ( std::min )( N - 1, len ); + for ( size_t i = 0; i < n; ++i ) + { + ( *this )[i] = data[i]; + } + ( *this )[n] = 0; + } + }; + +// relational operators between ArrayWrapper1D of chars with potentially different sizes +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template + std::strong_ordering operator<=>( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + int result = strcmp( lhs.data(), rhs.data() ); + return ( result < 0 ) ? std::strong_ordering::less : ( ( result > 0 ) ? std::strong_ordering::greater : std::strong_ordering::equal ); + } +#else + template + bool operator<( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) < 0; + } + + template + bool operator<=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) <= 0; + } + + template + bool operator>( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) > 0; + } + + template + bool operator>=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) >= 0; + } +#endif + + template + bool operator==( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) == 0; + } + + template + bool operator!=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) != 0; + } + +// specialization of relational operators between std::string and arrays of chars +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template + std::strong_ordering operator<=>( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs <=> rhs.data(); + } +#else + template + bool operator<( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs < rhs.data(); + } + + template + bool operator<=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs <= rhs.data(); + } + + template + bool operator>( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs > rhs.data(); + } + + template + bool operator>=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs >= rhs.data(); + } +#endif + + template + bool operator==( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs == rhs.data(); + } + + template + bool operator!=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs != rhs.data(); + } + + template + class ArrayWrapper2D : public std::array, N> + { + public: + VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT : std::array, N>() {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array, N> const & data ) VULKAN_HPP_NOEXCEPT + : std::array, N>( *reinterpret_cast, N> const *>( &data ) ) + { + } + }; + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + class ArrayProxy + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + { + } + + VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + { + } + + ArrayProxy( T const & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + { + } + + ArrayProxy( uint32_t count, T const * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + { + } + + template + ArrayProxy( T const ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT + : m_count( C ) + , m_ptr( ptr ) + { + } + +# if __GNUC__ >= 9 +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Winit-list-lifetime" +# endif + + ArrayProxy( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + { + } + + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + { + } + +# if __GNUC__ >= 9 +# pragma GCC diagnostic pop +# endif + + // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly + // convertible to size_t. The const version can capture temporaries, with lifetime ending at end of statement. + template ().data() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value>::type * = nullptr> + ArrayProxy( V const & v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.data() ) + { + } + + const T * begin() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return m_ptr + m_count; + } + + const T & front() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *m_ptr; + } + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); + } + + bool empty() const VULKAN_HPP_NOEXCEPT + { + return ( m_count == 0 ); + } + + uint32_t size() const VULKAN_HPP_NOEXCEPT + { + return m_count; + } + + T const * data() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + private: + uint32_t m_count; + T const * m_ptr; + }; + + template + class ArrayProxyNoTemporaries + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + { + } + + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + { + } + + template ::value && std::is_lvalue_reference::value, int>::type = 0> + ArrayProxyNoTemporaries( B && value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + { + } + + ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + { + } + + template + ArrayProxyNoTemporaries( T ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT + : m_count( C ) + , m_ptr( ptr ) + { + } + + template + ArrayProxyNoTemporaries( T ( &&ptr )[C] ) = delete; + + // Any l-value reference with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. + template ().begin() ), T *>::value && + std::is_convertible().data() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value && std::is_lvalue_reference::value, + int>::type = 0> + ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.data() ) + { + } + + // Any l-value reference with a .begin() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. + template ().begin() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value && std::is_lvalue_reference::value, + int>::type = 0> + ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.begin() ) + { + } + + const T * begin() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return m_ptr + m_count; + } + + const T & front() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *m_ptr; + } + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); + } + + bool empty() const VULKAN_HPP_NOEXCEPT + { + return ( m_count == 0 ); + } + + uint32_t size() const VULKAN_HPP_NOEXCEPT + { + return m_count; + } + + T * data() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + private: + uint32_t m_count; + T * m_ptr; + }; + + template + class StridedArrayProxy : protected ArrayProxy + { + public: + using ArrayProxy::ArrayProxy; + + StridedArrayProxy( uint32_t count, T const * ptr, uint32_t stride ) VULKAN_HPP_NOEXCEPT + : ArrayProxy( count, ptr ) + , m_stride( stride ) + { + VULKAN_HPP_ASSERT( sizeof( T ) <= stride ); + } + + using ArrayProxy::begin; + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( static_cast( begin() ) + size() * m_stride ); + } + + using ArrayProxy::front; + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( begin() && size() ); + return *reinterpret_cast( static_cast( begin() ) + ( size() - 1 ) * m_stride ); + } + + using ArrayProxy::empty; + using ArrayProxy::size; + using ArrayProxy::data; + + uint32_t stride() const + { + return m_stride; + } + + private: + uint32_t m_stride = sizeof( T ); + }; + + template + class Optional + { + public: + Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT + { + m_ptr = &reference; + } + + Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT + { + m_ptr = ptr; + } + + Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_ptr = nullptr; + } + + operator RefType *() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + RefType const * operator->() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return !!m_ptr; + } + + private: + RefType * m_ptr; + }; + + template + struct StructExtends + { + enum + { + value = false + }; + }; + + template + struct IsPartOfStructureChain + { + static const bool valid = false; + }; + + template + struct IsPartOfStructureChain + { + static const bool valid = std::is_same::value || IsPartOfStructureChain::valid; + }; + + template + struct StructureChainContains + { + static const bool value = std::is_same>::type>::value || + StructureChainContains::value; + }; + + template + struct StructureChainContains<0, T, ChainElements...> + { + static const bool value = std::is_same>::type>::value; + }; + + template + struct StructureChainValidation + { + using TestType = typename std::tuple_element>::type; + static const bool valid = StructExtends>::type>::value && + ( TestType::allowDuplicate || !StructureChainContains::value ) && + StructureChainValidation::valid; + }; + + template + struct StructureChainValidation<0, ChainElements...> + { + static const bool valid = true; + }; + + template + class StructureChain : public std::tuple + { + // Note: StructureChain has no move constructor or move assignment operator, as it is not supposed to contain movable containers. + // In order to get a copy-operation on a move-operations, those functions are neither deleted nor defaulted. + public: + StructureChain() VULKAN_HPP_NOEXCEPT + { + static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); + link(); + } + + StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple( rhs ) + { + static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); + link( &std::get<0>( *this ), + &std::get<0>( rhs ), + reinterpret_cast( &std::get<0>( *this ) ), + reinterpret_cast( &std::get<0>( rhs ) ) ); + } + + StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple( elems... ) + { + static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); + link(); + } + + StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT + { + std::tuple::operator=( rhs ); + link( &std::get<0>( *this ), + &std::get<0>( rhs ), + reinterpret_cast( &std::get<0>( *this ) ), + reinterpret_cast( &std::get<0>( rhs ) ) ); + return *this; + } + + template >::type, size_t Which = 0> + T & get() VULKAN_HPP_NOEXCEPT + { + return std::get::value>( static_cast &>( *this ) ); + } + + template >::type, size_t Which = 0> + T const & get() const VULKAN_HPP_NOEXCEPT + { + return std::get::value>( static_cast const &>( *this ) ); + } + + template + std::tuple get() VULKAN_HPP_NOEXCEPT + { + return std::tie( get(), get(), get()... ); + } + + template + std::tuple get() const VULKAN_HPP_NOEXCEPT + { + return std::tie( get(), get(), get()... ); + } + + // assign a complete structure to the StructureChain without modifying the chaining + template >::type, size_t Which = 0> + StructureChain & assign( const T & rhs ) VULKAN_HPP_NOEXCEPT + { + T & lhs = get(); + auto pNext = lhs.pNext; + lhs = rhs; + lhs.pNext = pNext; + return *this; + } + + template + typename std::enable_if>::type>::value && ( Which == 0 ), bool>::type + isLinked() const VULKAN_HPP_NOEXCEPT + { + return true; + } + + template + typename std::enable_if>::type>::value || ( Which != 0 ), bool>::type + isLinked() const VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, "Can't unlink Structure that's not part of this StructureChain!" ); + return isLinked( reinterpret_cast( &get() ) ); + } + + template + typename std::enable_if>::type>::value || ( Which != 0 ), void>::type + relink() VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, "Can't relink Structure that's not part of this StructureChain!" ); + auto pNext = reinterpret_cast( &get() ); + VULKAN_HPP_ASSERT( !isLinked( pNext ) ); + auto & headElement = std::get<0>( static_cast &>( *this ) ); + pNext->pNext = reinterpret_cast( headElement.pNext ); + headElement.pNext = pNext; + } + + template + typename std::enable_if>::type>::value || ( Which != 0 ), void>::type + unlink() VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, "Can't unlink Structure that's not part of this StructureChain!" ); + unlink( reinterpret_cast( &get() ) ); + } + + private: + template + struct ChainElementIndex : ChainElementIndex + { + }; + + template + struct ChainElementIndex::value, void>::type, First, Types...> + : ChainElementIndex + { + }; + + template + struct ChainElementIndex::value, void>::type, First, Types...> + : ChainElementIndex + { + }; + + template + struct ChainElementIndex::value, void>::type, First, Types...> + : std::integral_constant + { + }; + + bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT + { + VkBaseInStructure const * elementPtr = + reinterpret_cast( &std::get<0>( static_cast const &>( *this ) ) ); + while ( elementPtr ) + { + if ( elementPtr->pNext == pNext ) + { + return true; + } + elementPtr = elementPtr->pNext; + } + return false; + } + + template + typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT + { + auto & x = std::get( static_cast &>( *this ) ); + x.pNext = &std::get( static_cast &>( *this ) ); + link(); + } + + template + typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT + { + } + + void link( void * dstBase, void const * srcBase, VkBaseOutStructure * dst, VkBaseInStructure const * src ) + { + while ( src->pNext ) + { + std::ptrdiff_t offset = reinterpret_cast( src->pNext ) - reinterpret_cast( srcBase ); + dst->pNext = reinterpret_cast( reinterpret_cast( dstBase ) + offset ); + dst = dst->pNext; + src = src->pNext; + } + dst->pNext = nullptr; + } + + void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT + { + VkBaseOutStructure * elementPtr = reinterpret_cast( &std::get<0>( static_cast &>( *this ) ) ); + while ( elementPtr && ( elementPtr->pNext != pNext ) ) + { + elementPtr = elementPtr->pNext; + } + if ( elementPtr ) + { + elementPtr->pNext = pNext->pNext; + } + else + { + VULKAN_HPP_ASSERT( false ); // fires, if the ClassType member has already been unlinked ! + } + } + }; + + // interupt the VULKAN_HPP_NAMESPACE for a moment to add specializations of std::tuple_size and std::tuple_element for the StructureChain! +} + +namespace std +{ + template + struct tuple_size> + { + static constexpr size_t value = std::tuple_size>::value; + }; + + template + struct tuple_element> + { + using type = typename std::tuple_element>::type; + }; +} // namespace std + +namespace VULKAN_HPP_NAMESPACE +{ + +# if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + class UniqueHandleTraits; + + template + class UniqueHandle : public UniqueHandleTraits::deleter + { + private: + using Deleter = typename UniqueHandleTraits::deleter; + + public: + using element_type = Type; + + UniqueHandle() : Deleter(), m_value() {} + + explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT + : Deleter( deleter ) + , m_value( value ) + { + } + + UniqueHandle( UniqueHandle const & ) = delete; + + UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT + : Deleter( std::move( static_cast( other ) ) ) + , m_value( other.release() ) + { + } + + ~UniqueHandle() VULKAN_HPP_NOEXCEPT + { + if ( m_value ) + { + this->destroy( m_value ); + } + } + + UniqueHandle & operator=( UniqueHandle const & ) = delete; + + UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT + { + reset( other.release() ); + *static_cast( this ) = std::move( static_cast( other ) ); + return *this; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_value.operator bool(); + } + +# if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST ) + operator Type() const VULKAN_HPP_NOEXCEPT + { + return m_value; + } +# endif + + Type const * operator->() const VULKAN_HPP_NOEXCEPT + { + return &m_value; + } + + Type * operator->() VULKAN_HPP_NOEXCEPT + { + return &m_value; + } + + Type const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + Type & operator*() VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + const Type & get() const VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + Type & get() VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT + { + if ( m_value != value ) + { + if ( m_value ) + { + this->destroy( m_value ); + } + m_value = value; + } + } + + Type release() VULKAN_HPP_NOEXCEPT + { + Type value = m_value; + m_value = nullptr; + return value; + } + + void swap( UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_value, rhs.m_value ); + std::swap( static_cast( *this ), static_cast( rhs ) ); + } + + private: + Type m_value; + }; + + template + VULKAN_HPP_INLINE std::vector uniqueToRaw( std::vector const & handles ) + { + std::vector newBuffer( handles.size() ); + std::transform( handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } ); + return newBuffer; + } + + template + VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + { + lhs.swap( rhs ); + } +# endif +#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE + + namespace detail + { + class DispatchLoaderBase + { + public: + DispatchLoaderBase() = default; + DispatchLoaderBase( std::nullptr_t ) +#if !defined( NDEBUG ) + : m_valid( false ) +#endif + { + } + +#if !defined( NDEBUG ) + size_t getVkHeaderVersion() const + { + VULKAN_HPP_ASSERT( m_valid ); + return vkHeaderVersion; + } + + private: + size_t vkHeaderVersion = VK_HEADER_VERSION; + bool m_valid = true; +#endif + }; + +#if !defined( VK_NO_PROTOTYPES ) || ( defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) && ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 0 ) ) + class DispatchLoaderStatic : public DispatchLoaderBase + { + public: + //=== VK_VERSION_1_0 === + + VkResult + vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); + } + + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyInstance( instance, pAllocator ); + } + + VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); + } + + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); + } + + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetInstanceProcAddr( instance, pName ); + } + + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceProcAddr( device, pName ); + } + + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); + } + + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDevice( device, pAllocator ); + } + + VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, + const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); + } + + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); + } + + void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); + } + + VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); + } + + VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueWaitIdle( queue ); + } + + VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeviceWaitIdle( device ); + } + + VkResult vkAllocateMemory( VkDevice device, + const VkMemoryAllocateInfo * pAllocateInfo, + const VkAllocationCallbacks * pAllocator, + VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); + } + + void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeMemory( device, memory, pAllocator ); + } + + VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData ); + } + + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory( device, memory ); + } + + VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); + } + + VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); + } + + VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory( device, image, memory, memoryOffset ); + } + + void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); + } + + void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements( VkDevice device, + VkImage image, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); + } + + VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); + } + + VkResult vkCreateFence( VkDevice device, + const VkFenceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); + } + + void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFence( device, fence, pAllocator ); + } + + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetFences( device, fenceCount, pFences ); + } + + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceStatus( device, fence ); + } + + VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); + } + + VkResult vkCreateSemaphore( VkDevice device, + const VkSemaphoreCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); + } + + void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySemaphore( device, semaphore, pAllocator ); + } + + VkResult vkCreateEvent( VkDevice device, + const VkEventCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); + } + + void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyEvent( device, event, pAllocator ); + } + + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEventStatus( device, event ); + } + + VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetEvent( device, event ); + } + + VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetEvent( device, event ); + } + + VkResult vkCreateQueryPool( VkDevice device, + const VkQueryPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); + } + + void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyQueryPool( device, queryPool, pAllocator ); + } + + VkResult vkGetQueryPoolResults( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); + } + + VkResult vkCreateBuffer( VkDevice device, + const VkBufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); + } + + void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBuffer( device, buffer, pAllocator ); + } + + VkResult vkCreateBufferView( VkDevice device, + const VkBufferViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferView( device, bufferView, pAllocator ); + } + + VkResult vkCreateImage( VkDevice device, + const VkImageCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImage * pImage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); + } + + void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImage( device, image, pAllocator ); + } + + void vkGetImageSubresourceLayout( VkDevice device, + VkImage image, + const VkImageSubresource * pSubresource, + VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); + } + + VkResult vkCreateImageView( VkDevice device, + const VkImageViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImageView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImageView( device, imageView, pAllocator ); + } + + VkResult vkCreateShaderModule( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); + } + + void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); + } + + VkResult vkCreatePipelineCache( VkDevice device, + const VkPipelineCacheCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); + } + + void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); + } + + VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); + } + + VkResult + vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkCreateGraphicsPipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkCreateComputePipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipeline( device, pipeline, pAllocator ); + } + + VkResult vkCreatePipelineLayout( VkDevice device, + const VkPipelineLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); + } + + void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); + } + + VkResult vkCreateSampler( VkDevice device, + const VkSamplerCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); + } + + void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySampler( device, sampler, pAllocator ); + } + + VkResult vkCreateDescriptorSetLayout( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); + } + + void vkDestroyDescriptorSetLayout( VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); + } + + VkResult vkCreateDescriptorPool( VkDevice device, + const VkDescriptorPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); + } + + void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); + } + + VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetDescriptorPool( device, descriptorPool, flags ); + } + + VkResult vkAllocateDescriptorSets( VkDevice device, + const VkDescriptorSetAllocateInfo * pAllocateInfo, + VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); + } + + VkResult vkFreeDescriptorSets( VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); + } + + void vkUpdateDescriptorSets( VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + } + + VkResult vkCreateFramebuffer( VkDevice device, + const VkFramebufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); + } + + void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); + } + + VkResult vkCreateRenderPass( VkDevice device, + const VkRenderPassCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyRenderPass( device, renderPass, pAllocator ); + } + + void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); + } + + VkResult vkCreateCommandPool( VkDevice device, + const VkCommandPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); + } + + void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCommandPool( device, commandPool, pAllocator ); + } + + VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandPool( device, commandPool, flags ); + } + + VkResult vkAllocateCommandBuffers( VkDevice device, + const VkCommandBufferAllocateInfo * pAllocateInfo, + VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); + } + + void vkFreeCommandBuffers( VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); + } + + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); + } + + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEndCommandBuffer( commandBuffer ); + } + + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandBuffer( commandBuffer, flags ); + } + + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); + } + + void vkCmdSetViewport( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); + } + + void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); + } + + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); + } + + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); + } + + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); + } + + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); + } + + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); + } + + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); + } + + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets( + commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + } + + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); + } + + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); + } + + void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } + + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + void + vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); + } + + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); + } + + void vkCmdCopyImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdBlitImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit * pRegions, + VkFilter filter ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } + + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + } + + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); + } + + void + vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); + } + + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue * pColor, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); + } + + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); + } + + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment * pAttachments, + uint32_t rectCount, + const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); + } + + void vkCmdResolveImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents( commandBuffer, + eventCount, + pEvents, + srcStageMask, + dstStageMask, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier( commandBuffer, + srcStageMask, + dstStageMask, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); + } + + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQuery( commandBuffer, queryPool, query ); + } + + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); + } + + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); + } + + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + } + + void vkCmdPushConstants( VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); + } + + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); + } + + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass( commandBuffer, contents ); + } + + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass( commandBuffer ); + } + + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); + } + + //=== VK_VERSION_1_1 === + + VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceVersion( pApiVersion ); + } + + VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); + } + + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + void vkGetImageMemoryRequirements2( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPool( device, commandPool, flags ); + } + + void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); + } + + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversion( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); + } + + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplate( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplate( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + + void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + + void vkGetDescriptorSetLayoutSupport( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); + } + + //=== VK_VERSION_1_2 === + + void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + VkResult vkCreateRenderPass2( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); + } + + void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); + } + + VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphores( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphore( device, pSignalInfo ); + } + + VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddress( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); + } + + //=== VK_VERSION_1_3 === + + VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); + } + + VkResult vkCreatePrivateDataSlot( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + + void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); + } + + VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); + } + + void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); + } + + void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); + } + + void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } + + void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); + } + + void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); + } + + VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); + } + + void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); + } + + void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); + } + + void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); + } + + void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); + } + + void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); + } + + void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); + } + + void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); + } + + void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRendering( commandBuffer ); + } + + void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullMode( commandBuffer, cullMode ); + } + + void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFace( commandBuffer, frontFace ); + } + + void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); + } + + void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); + } + + void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); + } + + void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + + void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); + } + + void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); + } + + void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + + void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); + } + + void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); + } + + void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); + } + + void vkGetDeviceBufferMemoryRequirements( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_VERSION_1_4 === + + void vkCmdSetLineStipple( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStipple( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + VkResult vkMapMemory2( VkDevice device, const VkMemoryMapInfo * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory2( device, pMemoryMapInfo, ppData ); + } + + VkResult vkUnmapMemory2( VkDevice device, const VkMemoryUnmapInfo * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory2( device, pMemoryUnmapInfo ); + } + + void vkCmdBindIndexBuffer2( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer2( commandBuffer, buffer, offset, size, indexType ); + } + + void vkGetRenderingAreaGranularity( VkDevice device, const VkRenderingAreaInfo * pRenderingAreaInfo, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderingAreaGranularity( device, pRenderingAreaInfo, pGranularity ); + } + + void vkGetDeviceImageSubresourceLayout( VkDevice device, + const VkDeviceImageSubresourceInfo * pInfo, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSubresourceLayout( device, pInfo, pLayout ); + } + + void vkGetImageSubresourceLayout2( VkDevice device, + VkImage image, + const VkImageSubresource2 * pSubresource, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2( device, image, pSubresource, pLayout ); + } + + void vkCmdPushDescriptorSet( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + } + + void vkCmdPushDescriptorSetWithTemplate( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } + + void vkCmdSetRenderingAttachmentLocations( VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfo * pLocationInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingAttachmentLocations( commandBuffer, pLocationInfo ); + } + + void vkCmdSetRenderingInputAttachmentIndices( VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingInputAttachmentIndices( commandBuffer, pInputAttachmentIndexInfo ); + } + + void vkCmdBindDescriptorSets2( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets2( commandBuffer, pBindDescriptorSetsInfo ); + } + + void vkCmdPushConstants2( VkCommandBuffer commandBuffer, const VkPushConstantsInfo * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants2( commandBuffer, pPushConstantsInfo ); + } + + void vkCmdPushDescriptorSet2( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet2( commandBuffer, pPushDescriptorSetInfo ); + } + + void vkCmdPushDescriptorSetWithTemplate2( VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate2( commandBuffer, pPushDescriptorSetWithTemplateInfo ); + } + + VkResult vkCopyMemoryToImage( VkDevice device, const VkCopyMemoryToImageInfo * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToImage( device, pCopyMemoryToImageInfo ); + } + + VkResult vkCopyImageToMemory( VkDevice device, const VkCopyImageToMemoryInfo * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToMemory( device, pCopyImageToMemoryInfo ); + } + + VkResult vkCopyImageToImage( VkDevice device, const VkCopyImageToImageInfo * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToImage( device, pCopyImageToImageInfo ); + } + + VkResult + vkTransitionImageLayout( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo * pTransitions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTransitionImageLayout( device, transitionCount, pTransitions ); + } + + //=== VK_KHR_surface === + + void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); + } + + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); + } + + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); + } + + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); + } + + //=== VK_KHR_swapchain === + + VkResult vkCreateSwapchainKHR( VkDevice device, + const VkSwapchainCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); + } + + void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); + } + + VkResult vkGetSwapchainImagesKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + } + + VkResult vkAcquireNextImageKHR( + VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); + } + + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueuePresentKHR( queue, pPresentInfo ); + } + + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); + } + + VkResult + vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); + } + + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pRectCount, + VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); + } + + VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); + } + + //=== VK_KHR_display === + + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t * pDisplayCount, + VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); + } + + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); + } + + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); + } + + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_display_swapchain === + + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); + } + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, + const VkXlibSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, + const VkXcbSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, + const VkWin32SurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); + } + + void vkDestroyDebugReportCallbackEXT( VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); + } + + void vkDebugReportMessageEXT( VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + } + + //=== VK_EXT_debug_marker === + + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); + } + + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); + } + + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); + } + + void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerEndEXT( commandBuffer ); + } + + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); + } + + //=== VK_KHR_video_queue === + + VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, + const VkVideoProfileInfoKHR * pVideoProfile, + VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); + } + + VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); + } + + VkResult vkCreateVideoSessionKHR( VkDevice device, + const VkVideoSessionCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); + } + + void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); + } + + VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements ); + } + + VkResult vkBindVideoSessionMemoryKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos ); + } + + VkResult vkCreateVideoSessionParametersKHR( VkDevice device, + const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); + } + + VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); + } + + void vkDestroyVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); + } + + void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); + } + + void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); + } + + void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); + } + + //=== VK_KHR_video_decode_queue === + + void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo ); + } + + //=== VK_EXT_transform_feedback === + + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); + } + + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); + } + + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); + } + + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); + } + + //=== VK_NVX_binary_import === + + VkResult vkCreateCuModuleNVX( VkDevice device, + const VkCuModuleCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); + } + + VkResult vkCreateCuFunctionNVX( VkDevice device, + const VkCuFunctionCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); + } + + void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuModuleNVX( device, module, pAllocator ); + } + + void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); + } + + void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); + } + + //=== VK_NVX_image_view_handle === + + uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewHandleNVX( device, pInfo ); + } + + uint64_t vkGetImageViewHandle64NVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewHandle64NVX( device, pInfo ); + } + + VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); + } + + //=== VK_AMD_draw_indirect_count === + + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_AMD_shader_info === + + VkResult vkGetShaderInfoAMD( VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); + } + + //=== VK_KHR_dynamic_rendering === + + void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); + } + + void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderingKHR( commandBuffer ); + } + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + VkResult + vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + VkResult vkGetMemoryWin32HandleNV( VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + //=== VK_KHR_device_group === + + void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VkResult vkCreateViSurfaceNN( VkInstance instance, + const VkViSurfaceCreateInfoNN * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPoolKHR( device, commandPool, flags ); + } + + //=== VK_KHR_device_group_creation === + + VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + //=== VK_KHR_external_memory_capabilities === + + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + VkResult + vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } + + VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); + } + + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); + } + + //=== VK_KHR_external_semaphore_capabilities === + + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); + } + + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); + } + + VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); + } + + //=== VK_KHR_push_descriptor === + + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + } + + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } + + //=== VK_EXT_conditional_rendering === + + void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); + } + + void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); + } + + //=== VK_KHR_descriptor_update_template === + + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + //=== VK_NV_clip_space_w_scaling === + + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); + } + + //=== VK_EXT_direct_mode_display === + + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseDisplayEXT( physicalDevice, display ); + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); + } + + VkResult + vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); + } + + //=== VK_EXT_display_control === + + VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); + } + + VkResult vkRegisterDeviceEventEXT( VkDevice device, + const VkDeviceEventInfoEXT * pDeviceEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); + } + + VkResult vkRegisterDisplayEventEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT * pDisplayEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); + } + + VkResult vkGetSwapchainCounterEXT( VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); + } + + //=== VK_GOOGLE_display_timing === + + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); + } + + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); + } + + //=== VK_EXT_discard_rectangles === + + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + } + + void vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEnableEXT( commandBuffer, discardRectangleEnable ); + } + + void vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleModeEXT( commandBuffer, discardRectangleMode ); + } + + //=== VK_EXT_hdr_metadata === + + void vkSetHdrMetadataEXT( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR * pSwapchains, + const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + } + + //=== VK_KHR_create_renderpass2 === + + VkResult vkCreateRenderPass2KHR( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + } + + //=== VK_KHR_shared_presentable_image === + + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainStatusKHR( device, swapchain ); + } + + //=== VK_KHR_external_fence_capabilities === + + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + } + + VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); + } + + VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); + } + + //=== VK_KHR_performance_query === + + VkResult + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VkPerformanceCounterKHR * pCounters, + VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); + } + + void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); + } + + VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireProfilingLockKHR( device, pInfo ); + } + + void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseProfilingLockKHR( device ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); + } + + //=== VK_KHR_get_display_properties2 === + + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); + } + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, + const VkIOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); + } + + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); + } + + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueEndDebugUtilsLabelEXT( queue ); + } + + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); + } + + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); + } + + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); + } + + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); + } + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, + const struct AHardwareBuffer * buffer, + VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); + } + + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo ); + } + + VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); + } + + void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, + VkPipeline executionGraph, + VkDeviceAddress scratch, + VkDeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, executionGraph, scratch, scratchSize ); + } + + void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); + } + + void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); + } + + void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, scratchSize, countInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); + } + + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); + } + + //=== VK_KHR_get_memory_requirements2 === + + void vkGetImageMemoryRequirements2KHR( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2KHR( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_KHR_acceleration_structure === + + VkResult vkCreateAccelerationStructureKHR( VkDevice device, + const VkAccelerationStructureCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureKHR( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); + } + + void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); + } + + void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkDeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresIndirectKHR( + commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); + } + + VkResult vkBuildAccelerationStructuresKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); + } + + VkResult vkCopyAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); + } + + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); + } + + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, + const VkAccelerationStructureVersionInfoKHR * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); + } + + void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); + } + + //=== VK_KHR_ray_tracing_pipeline === + + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysKHR( + commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); + } + + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirectKHR( + commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress ); + } + + VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); + } + + void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); + } + + //=== VK_KHR_bind_memory2 === + + VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + //=== VK_EXT_image_drm_format_modifier === + + VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); + } + + //=== VK_EXT_validation_cache === + + VkResult vkCreateValidationCacheEXT( VkDevice device, + const VkValidationCacheCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); + } + + void + vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); + } + + VkResult vkMergeValidationCachesEXT( VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + } + + //=== VK_NV_shading_rate_image === + + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); + } + + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + } + + void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } + + //=== VK_NV_ray_tracing === + + VkResult vkCreateAccelerationStructureNV( VkDevice device, + const VkAccelerationStructureCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); + } + + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); + } + + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV * pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + } + + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); + } + + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysNV( commandBuffer, + raygenShaderBindingTableBuffer, + raygenShaderBindingOffset, + missShaderBindingTableBuffer, + missShaderBindingOffset, + missShaderBindingStride, + hitShaderBindingTableBuffer, + hitShaderBindingOffset, + hitShaderBindingStride, + callableShaderBindingTableBuffer, + callableShaderBindingOffset, + callableShaderBindingStride, + width, + height, + depth ); + } + + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); + } + + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesNV( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCompileDeferredNV( device, pipeline, shader ); + } + + //=== VK_KHR_maintenance3 === + + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); + } + + //=== VK_KHR_draw_indirect_count === + + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_EXT_external_memory_host === + + VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); + } + + //=== VK_AMD_buffer_marker === + + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); + } + + void vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_NV_mesh_shader === + + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); + } + + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_NV_scissor_exclusive === + + void vkCmdSetExclusiveScissorEnableNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkBool32 * pExclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorEnableNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables ); + } + + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); + } + + //=== VK_NV_device_diagnostic_checkpoints === + + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); + } + + void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + } + + void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); + } + + //=== VK_KHR_timeline_semaphore === + + VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphoreKHR( device, pSignalInfo ); + } + + //=== VK_INTEL_performance_query === + + VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + } + + void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUninitializePerformanceApiINTEL( device ); + } + + VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); + } + + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); + } + + VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + } + + VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); + } + + VkResult + vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); + } + + //=== VK_AMD_display_native_hdr === + + void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, + const VkMetalSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); + } + + void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, + const VkExtent2D * pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); + } + + //=== VK_KHR_dynamic_rendering_local_read === + + void vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfo * pLocationInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingAttachmentLocationsKHR( commandBuffer, pLocationInfo ); + } + + void vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingInputAttachmentIndicesKHR( commandBuffer, pInputAttachmentIndexInfo ); + } + + //=== VK_EXT_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressEXT( device, pInfo ); + } + + //=== VK_EXT_tooling_info === + + VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); + } + + //=== VK_KHR_present_wait === + + VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); + } + + //=== VK_NV_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_NV_coverage_reduction_mode === + + VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); + } + + VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressKHR( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); + } + + //=== VK_EXT_line_rasterization === + + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); + } + + void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); + } + + void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); + } + + void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); + } + + void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); + } + + void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + + void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); + } + + void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); + } + + void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + + //=== VK_KHR_deferred_host_operations === + + VkResult vkCreateDeferredOperationKHR( VkDevice device, + const VkAllocationCallbacks * pAllocator, + VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); + } + + void + vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); + } + + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); + } + + VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationResultKHR( device, operation ); + } + + VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeferredOperationJoinKHR( device, operation ); + } + + //=== VK_KHR_pipeline_executable_properties === + + VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, + const VkPipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + } + + VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + } + + VkResult + vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + } + + //=== VK_EXT_host_image_copy === + + VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfo * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); + } + + VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfo * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); + } + + VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfo * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); + } + + VkResult + vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo * pTransitions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions ); + } + + void vkGetImageSubresourceLayout2EXT( VkDevice device, + VkImage image, + const VkImageSubresource2 * pSubresource, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); + } + + //=== VK_KHR_map_memory2 === + + VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfo * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory2KHR( device, pMemoryMapInfo, ppData ); + } + + VkResult vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfo * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory2KHR( device, pMemoryUnmapInfo ); + } + + //=== VK_EXT_swapchain_maintenance1 === + + VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo ); + } + + //=== VK_NV_device_generated_commands === + + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); + } + + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + } + + VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); + } + + //=== VK_EXT_depth_bias_control === + + void vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT * pDepthBiasInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias2EXT( commandBuffer, pDepthBiasInfo ); + } + + //=== VK_EXT_acquire_drm_display === + + VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); + } + + VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); + } + + //=== VK_EXT_private_data === + + VkResult vkCreatePrivateDataSlotEXT( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + + void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); + } + + VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); + } + + void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); + } + + //=== VK_KHR_video_encode_queue === + + VkResult + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VkVideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( physicalDevice, pQualityLevelInfo, pQualityLevelProperties ); + } + + VkResult vkGetEncodedVideoSessionParametersKHR( VkDevice device, + const VkVideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VkVideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEncodedVideoSessionParametersKHR( device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData ); + } + + void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + VkResult vkCreateCudaModuleNV( VkDevice device, + const VkCudaModuleCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCudaModuleNV * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule ); + } + + VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData ); + } + + VkResult vkCreateCudaFunctionNV( VkDevice device, + const VkCudaFunctionCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCudaFunctionNV * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction ); + } + + void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCudaModuleNV( device, module, pAllocator ); + } + + void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCudaFunctionNV( device, function, pAllocator ); + } + + void vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + + void vkCmdDispatchTileQCOM( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchTileQCOM( commandBuffer ); + } + + void vkCmdBeginPerTileExecutionQCOM( VkCommandBuffer commandBuffer, const VkPerTileBeginInfoQCOM * pPerTileBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginPerTileExecutionQCOM( commandBuffer, pPerTileBeginInfo ); + } + + void vkCmdEndPerTileExecutionQCOM( VkCommandBuffer commandBuffer, const VkPerTileEndInfoQCOM * pPerTileEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndPerTileExecutionQCOM( commandBuffer, pPerTileEndInfo ); + } + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + + void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); + } + + void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } + + void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); + } + + void + vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); + } + + VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); + } + + //=== VK_EXT_descriptor_buffer === + + void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes ); + } + + void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, + VkDescriptorSetLayout layout, + uint32_t binding, + VkDeviceSize * pOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset ); + } + + void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor ); + } + + void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t bufferCount, + const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos ); + } + + void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets ); + } + + void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set ); + } + + VkResult + vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult + vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + //=== VK_NV_fragment_shading_rate_enums === + + void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); + } + + //=== VK_EXT_mesh_shader === + + void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_KHR_copy_commands2 === + + void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); + } + + void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); + } + + void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); + } + + void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); + } + + void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); + } + + void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); + } + + //=== VK_EXT_device_fault === + + VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); + } + + VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + + void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetVertexInputEXT( + commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } + + VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + VkResult + vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); + } + + VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); + } + + VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); + } + + VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); + } + + void vkDestroyBufferCollectionFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator ); + } + + VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + VkResult + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); + } + + void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); + } + + //=== VK_HUAWEI_invocation_mask === + + void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); + } + + //=== VK_NV_external_memory_rdma === + + VkResult vkGetMemoryRemoteAddressNV( VkDevice device, + const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); + } + + //=== VK_EXT_pipeline_properties === + + VkResult vkGetPipelinePropertiesEXT( VkDevice device, + const VkPipelineInfoEXT * pPipelineInfo, + VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties ); + } + + //=== VK_EXT_extended_dynamic_state2 === + + void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); + } + + void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); + } + + void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); + } + + void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); + } + + void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VkResult vkCreateScreenSurfaceQNX( VkInstance instance, + const VkScreenSurfaceCreateInfoQNX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); + } + + //=== VK_KHR_ray_tracing_maintenance1 === + + void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress ); + } + + //=== VK_EXT_multi_draw === + + void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); + } + + void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); + } + + //=== VK_EXT_opacity_micromap === + + VkResult vkCreateMicromapEXT( VkDevice device, + const VkMicromapCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap ); + } + + void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyMicromapEXT( device, micromap, pAllocator ); + } + + void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos ); + } + + VkResult vkBuildMicromapsEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos ); + } + + VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMicromapEXT( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMicromapToMemoryEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToMicromapEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo ); + } + + VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, + uint32_t micromapCount, + const VkMicromapEXT * pMicromaps, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo ); + } + + void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo ); + } + + void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, + uint32_t micromapCount, + const VkMicromapEXT * pMicromaps, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, + const VkMicromapVersionInfoEXT * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility ); + } + + void vkGetMicromapBuildSizesEXT( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkMicromapBuildInfoEXT * pBuildInfo, + VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo ); + } + + //=== VK_HUAWEI_cluster_culling_shader === + + void vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawClusterHUAWEI( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDrawClusterIndirectHUAWEI( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawClusterIndirectHUAWEI( commandBuffer, buffer, offset ); + } + + //=== VK_EXT_pageable_device_local_memory === + + void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); + } + + //=== VK_KHR_maintenance4 === + + void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_VALVE_descriptor_set_host_mapping === + + void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, + const VkDescriptorSetBindingReferenceVALVE * pBindingReference, + VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping ); + } + + void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); + } + + //=== VK_NV_copy_memory_indirect === + + void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride ); + } + + void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources ); + } + + //=== VK_NV_memory_decompression === + + void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions ); + } + + void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); + } + + //=== VK_NV_device_generated_commands_compute === + + void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device, + const VkComputePipelineCreateInfo * pCreateInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements ); + } + + void vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdatePipelineIndirectBufferNV( commandBuffer, pipelineBindPoint, pipeline ); + } + + VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo ); + } + + //=== VK_EXT_extended_dynamic_state3 === + + void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable ); + } + + void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode ); + } + + void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples ); + } + + void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask ); + } + + void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable ); + } + + void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable ); + } + + void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable ); + } + + void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables ); + } + + void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations ); + } + + void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks ); + } + + void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin ); + } + + void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream ); + } + + void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, + VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode ); + } + + void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize ); + } + + void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable ); + } + + void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable ); + } + + void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced ); + } + + void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode ); + } + + void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode ); + } + + void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable ); + } + + void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne ); + } + + void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable ); + } + + void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles ); + } + + void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable ); + } + + void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation ); + } + + void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode ); + } + + void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable ); + } + + void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, + uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); + } + + void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable ); + } + + void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable ); + } + + void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode ); + } + + //=== VK_EXT_shader_module_identifier === + + void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier ); + } + + void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier ); + } + + //=== VK_NV_optical_flow === + + VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, + const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties ); + } + + VkResult vkCreateOpticalFlowSessionNV( VkDevice device, + const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession ); + } + + void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator ); + } + + VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, + VkOpticalFlowSessionNV session, + VkOpticalFlowSessionBindingPointNV bindingPoint, + VkImageView view, + VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout ); + } + + void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, + VkOpticalFlowSessionNV session, + const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); + } + + //=== VK_KHR_maintenance5 === + + void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType ); + } + + void + vkGetRenderingAreaGranularityKHR( VkDevice device, const VkRenderingAreaInfo * pRenderingAreaInfo, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity ); + } + + void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, + const VkDeviceImageSubresourceInfo * pInfo, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout ); + } + + void vkGetImageSubresourceLayout2KHR( VkDevice device, + VkImage image, + const VkImageSubresource2 * pSubresource, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); + } + + //=== VK_AMD_anti_lag === + + void vkAntiLagUpdateAMD( VkDevice device, const VkAntiLagDataAMD * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAntiLagUpdateAMD( device, pData ); + } + + //=== VK_EXT_shader_object === + + VkResult vkCreateShadersEXT( VkDevice device, + uint32_t createInfoCount, + const VkShaderCreateInfoEXT * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders ); + } + + void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderEXT( device, shader, pAllocator ); + } + + VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData ); + } + + void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits * pStages, + const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders ); + } + + void vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, + VkDepthClampModeEXT depthClampMode, + const VkDepthClampRangeEXT * pDepthClampRange ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClampRangeEXT( commandBuffer, depthClampMode, pDepthClampRange ); + } + + //=== VK_KHR_pipeline_binary === + + VkResult vkCreatePipelineBinariesKHR( VkDevice device, + const VkPipelineBinaryCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineBinaryHandlesInfoKHR * pBinaries ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineBinariesKHR( device, pCreateInfo, pAllocator, pBinaries ); + } + + void vkDestroyPipelineBinaryKHR( VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineBinaryKHR( device, pipelineBinary, pAllocator ); + } + + VkResult vkGetPipelineKeyKHR( VkDevice device, + const VkPipelineCreateInfoKHR * pPipelineCreateInfo, + VkPipelineBinaryKeyKHR * pPipelineKey ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineKeyKHR( device, pPipelineCreateInfo, pPipelineKey ); + } + + VkResult vkGetPipelineBinaryDataKHR( VkDevice device, + const VkPipelineBinaryDataInfoKHR * pInfo, + VkPipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineBinaryDataKHR( device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData ); + } + + VkResult vkReleaseCapturedPipelineDataKHR( VkDevice device, + const VkReleaseCapturedPipelineDataInfoKHR * pInfo, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseCapturedPipelineDataKHR( device, pInfo, pAllocator ); + } + + //=== VK_QCOM_tile_properties === + + VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, + VkFramebuffer framebuffer, + uint32_t * pPropertiesCount, + VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties ); + } + + VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, + const VkRenderingInfo * pRenderingInfo, + VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties ); + } + + //=== VK_NV_cooperative_vector === + + VkResult vkGetPhysicalDeviceCooperativeVectorPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeVectorPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeVectorPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkConvertCooperativeVectorMatrixNV( VkDevice device, const VkConvertCooperativeVectorMatrixInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkConvertCooperativeVectorMatrixNV( device, pInfo ); + } + + void vkCmdConvertCooperativeVectorMatrixNV( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkConvertCooperativeVectorMatrixInfoNV * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdConvertCooperativeVectorMatrixNV( commandBuffer, infoCount, pInfos ); + } + + //=== VK_NV_low_latency2 === + + VkResult vkSetLatencySleepModeNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV * pSleepModeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLatencySleepModeNV( device, swapchain, pSleepModeInfo ); + } + + VkResult vkLatencySleepNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV * pSleepInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkLatencySleepNV( device, swapchain, pSleepInfo ); + } + + void vkSetLatencyMarkerNV( VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLatencyMarkerNV( device, swapchain, pLatencyMarkerInfo ); + } + + void vkGetLatencyTimingsNV( VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetLatencyTimingsNV( device, swapchain, pLatencyMarkerInfo ); + } + + void vkQueueNotifyOutOfBandNV( VkQueue queue, const VkOutOfBandQueueTypeInfoNV * pQueueTypeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueNotifyOutOfBandNV( queue, pQueueTypeInfo ); + } + + //=== VK_KHR_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + void vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAttachmentFeedbackLoopEnableEXT( commandBuffer, aspectMask ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + VkResult vkGetScreenBufferPropertiesQNX( VkDevice device, + const struct _screen_buffer * buffer, + VkScreenBufferPropertiesQNX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetScreenBufferPropertiesQNX( device, buffer, pProperties ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + + void vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleKHR( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_KHR_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsKHR( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsKHR( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_KHR_maintenance6 === + + void vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets2KHR( commandBuffer, pBindDescriptorSetsInfo ); + } + + void vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfo * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants2KHR( commandBuffer, pPushConstantsInfo ); + } + + void vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet2KHR( commandBuffer, pPushDescriptorSetInfo ); + } + + void vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate2KHR( commandBuffer, pPushDescriptorSetWithTemplateInfo ); + } + + void vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDescriptorBufferOffsets2EXT( commandBuffer, pSetDescriptorBufferOffsetsInfo ); + } + + void vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo ); + } + + //=== VK_QCOM_tile_memory_heap === + + void vkCmdBindTileMemoryQCOM( VkCommandBuffer commandBuffer, const VkTileMemoryBindInfoQCOM * pTileMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindTileMemoryQCOM( commandBuffer, pTileMemoryBindInfo ); + } + + //=== VK_NV_external_compute_queue === + + VkResult vkCreateExternalComputeQueueNV( VkDevice device, + const VkExternalComputeQueueCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkExternalComputeQueueNV * pExternalQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateExternalComputeQueueNV( device, pCreateInfo, pAllocator, pExternalQueue ); + } + + void vkDestroyExternalComputeQueueNV( VkDevice device, + VkExternalComputeQueueNV externalQueue, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyExternalComputeQueueNV( device, externalQueue, pAllocator ); + } + + void vkGetExternalComputeQueueDataNV( VkExternalComputeQueueNV externalQueue, + VkExternalComputeQueueDataParamsNV * params, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExternalComputeQueueDataNV( externalQueue, params, pData ); + } + + //=== VK_NV_cluster_acceleration_structure === + + void vkGetClusterAccelerationStructureBuildSizesNV( VkDevice device, + const VkClusterAccelerationStructureInputInfoNV * pInfo, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetClusterAccelerationStructureBuildSizesNV( device, pInfo, pSizeInfo ); + } + + void vkCmdBuildClusterAccelerationStructureIndirectNV( VkCommandBuffer commandBuffer, + const VkClusterAccelerationStructureCommandsInfoNV * pCommandInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildClusterAccelerationStructureIndirectNV( commandBuffer, pCommandInfos ); + } + + //=== VK_NV_partitioned_acceleration_structure === + + void vkGetPartitionedAccelerationStructuresBuildSizesNV( VkDevice device, + const VkPartitionedAccelerationStructureInstancesInputNV * pInfo, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPartitionedAccelerationStructuresBuildSizesNV( device, pInfo, pSizeInfo ); + } + + void vkCmdBuildPartitionedAccelerationStructuresNV( VkCommandBuffer commandBuffer, + const VkBuildPartitionedAccelerationStructureInfoNV * pBuildInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildPartitionedAccelerationStructuresNV( commandBuffer, pBuildInfo ); + } + + //=== VK_EXT_device_generated_commands === + + void vkGetGeneratedCommandsMemoryRequirementsEXT( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsEXT( device, pInfo, pMemoryRequirements ); + } + + void vkCmdPreprocessGeneratedCommandsEXT( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + VkCommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsEXT( commandBuffer, pGeneratedCommandsInfo, stateCommandBuffer ); + } + + void vkCmdExecuteGeneratedCommandsEXT( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsEXT( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + + VkResult vkCreateIndirectCommandsLayoutEXT( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutEXT * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutEXT( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + void vkDestroyIndirectCommandsLayoutEXT( VkDevice device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutEXT( device, indirectCommandsLayout, pAllocator ); + } + + VkResult vkCreateIndirectExecutionSetEXT( VkDevice device, + const VkIndirectExecutionSetCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectExecutionSetEXT * pIndirectExecutionSet ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectExecutionSetEXT( device, pCreateInfo, pAllocator, pIndirectExecutionSet ); + } + + void vkDestroyIndirectExecutionSetEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectExecutionSetEXT( device, indirectExecutionSet, pAllocator ); + } + + void vkUpdateIndirectExecutionSetPipelineEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateIndirectExecutionSetPipelineEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); + } + + void vkUpdateIndirectExecutionSetShaderEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetShaderEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateIndirectExecutionSetShaderEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); + } + + //=== VK_NV_cooperative_matrix2 === + + VkResult vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + + VkResult + vkGetMemoryMetalHandleEXT( VkDevice device, const VkMemoryGetMetalHandleInfoEXT * pGetMetalHandleInfo, void ** pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryMetalHandleEXT( device, pGetMetalHandleInfo, pHandle ); + } + + VkResult vkGetMemoryMetalHandlePropertiesEXT( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHandle, + VkMemoryMetalHandlePropertiesEXT * pMemoryMetalHandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryMetalHandlePropertiesEXT( device, handleType, pHandle, pMemoryMetalHandleProperties ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map_offset === + + void vkCmdEndRendering2EXT( VkCommandBuffer commandBuffer, const VkRenderingEndInfoEXT * pRenderingEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRendering2EXT( commandBuffer, pRenderingEndInfo ); + } + }; + + inline DispatchLoaderStatic & getDispatchLoaderStatic() + { + static DispatchLoaderStatic dls; + return dls; + } +#endif + + } // namespace detail +#if ( 14 <= VULKAN_HPP_CPP_VERSION ) + using std::exchange; +#else + template + VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue ) + { + T oldValue = std::move( obj ); + obj = std::forward( newValue ); + return oldValue; + } +#endif + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + struct AllocationCallbacks; + + namespace detail + { + template + class ObjectDestroy + { + public: + ObjectDestroy() = default; + + ObjectDestroy( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + class NoParent; + + template + class ObjectDestroy + { + public: + ObjectDestroy() = default; + + ObjectDestroy( Optional allocationCallbacks, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_dispatch ); + t.destroy( m_allocationCallbacks, *m_dispatch ); + } + + private: + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectFree + { + public: + ObjectFree() = default; + + ObjectFree( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectRelease + { + public: + ObjectRelease() = default; + + ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.release( t, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Dispatch const * m_dispatch = nullptr; + }; + + template + class PoolFree + { + public: + PoolFree() = default; + + PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + PoolType getPool() const VULKAN_HPP_NOEXCEPT + { + return m_pool; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + ( m_owner.free )( m_pool, t, *m_dispatch ); + } + + private: + OwnerType m_owner = OwnerType(); + PoolType m_pool = PoolType(); + Dispatch const * m_dispatch = nullptr; + }; + + } // namespace detail +#endif // !VULKAN_HPP_NO_SMART_HANDLE + + //================== + //=== BASE TYPEs === + //================== + + using Bool32 = uint32_t; + using DeviceAddress = uint64_t; + using DeviceSize = uint64_t; + using RemoteAddressNV = void *; + using SampleMask = uint32_t; + + template + struct CppType + { + }; +} // namespace VULKAN_HPP_NAMESPACE + +#include +#if !defined( VULKAN_HPP_NO_TO_STRING ) +# include +#endif + +#ifndef VULKAN_HPP_NO_EXCEPTIONS +namespace std +{ + template <> + struct is_error_code_enum : public true_type + { + }; +} // namespace std +#endif + +namespace VULKAN_HPP_NAMESPACE +{ +#ifndef VULKAN_HPP_NO_EXCEPTIONS + class ErrorCategoryImpl : public std::error_category + { + public: + virtual const char * name() const VULKAN_HPP_NOEXCEPT override + { + return VULKAN_HPP_NAMESPACE_STRING "::Result"; + } + + virtual std::string message( int ev ) const override + { +# if defined( VULKAN_HPP_NO_TO_STRING ) + return std::to_string( ev ); +# else + return VULKAN_HPP_NAMESPACE::to_string( static_cast( ev ) ); +# endif + } + }; + + class Error + { + public: + Error() VULKAN_HPP_NOEXCEPT = default; + Error( const Error & ) VULKAN_HPP_NOEXCEPT = default; + virtual ~Error() VULKAN_HPP_NOEXCEPT = default; + + virtual const char * what() const VULKAN_HPP_NOEXCEPT = 0; + }; + + class LogicError + : public Error + , public std::logic_error + { + public: + explicit LogicError( const std::string & what ) : Error(), std::logic_error( what ) {} + + explicit LogicError( char const * what ) : Error(), std::logic_error( what ) {} + + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::logic_error::what(); + } + }; + + class SystemError + : public Error + , public std::system_error + { + public: + SystemError( std::error_code ec ) : Error(), std::system_error( ec ) {} + + SystemError( std::error_code ec, std::string const & what ) : Error(), std::system_error( ec, what ) {} + + SystemError( std::error_code ec, char const * what ) : Error(), std::system_error( ec, what ) {} + + SystemError( int ev, std::error_category const & ecat ) : Error(), std::system_error( ev, ecat ) {} + + SystemError( int ev, std::error_category const & ecat, std::string const & what ) : Error(), std::system_error( ev, ecat, what ) {} + + SystemError( int ev, std::error_category const & ecat, char const * what ) : Error(), std::system_error( ev, ecat, what ) {} + + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::system_error::what(); + } + }; + + VULKAN_HPP_INLINE const std::error_category & errorCategory() VULKAN_HPP_NOEXCEPT + { + static ErrorCategoryImpl instance; + return instance; + } + + VULKAN_HPP_INLINE std::error_code make_error_code( Result e ) VULKAN_HPP_NOEXCEPT + { + return std::error_code( static_cast( e ), errorCategory() ); + } + + VULKAN_HPP_INLINE std::error_condition make_error_condition( Result e ) VULKAN_HPP_NOEXCEPT + { + return std::error_condition( static_cast( e ), errorCategory() ); + } + + class OutOfHostMemoryError : public SystemError + { + public: + OutOfHostMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + + OutOfHostMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + }; + + class OutOfDeviceMemoryError : public SystemError + { + public: + OutOfDeviceMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + + OutOfDeviceMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + }; + + class InitializationFailedError : public SystemError + { + public: + InitializationFailedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + + InitializationFailedError( char const * message ) : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + }; + + class DeviceLostError : public SystemError + { + public: + DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + + DeviceLostError( char const * message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + }; + + class MemoryMapFailedError : public SystemError + { + public: + MemoryMapFailedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + + MemoryMapFailedError( char const * message ) : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + }; + + class LayerNotPresentError : public SystemError + { + public: + LayerNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + + LayerNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + }; + + class ExtensionNotPresentError : public SystemError + { + public: + ExtensionNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + + ExtensionNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + }; + + class FeatureNotPresentError : public SystemError + { + public: + FeatureNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + + FeatureNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + }; + + class IncompatibleDriverError : public SystemError + { + public: + IncompatibleDriverError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + + IncompatibleDriverError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + }; + + class TooManyObjectsError : public SystemError + { + public: + TooManyObjectsError( std::string const & message ) : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + + TooManyObjectsError( char const * message ) : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + }; + + class FormatNotSupportedError : public SystemError + { + public: + FormatNotSupportedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + + FormatNotSupportedError( char const * message ) : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + }; + + class FragmentedPoolError : public SystemError + { + public: + FragmentedPoolError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + + FragmentedPoolError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + }; + + class UnknownError : public SystemError + { + public: + UnknownError( std::string const & message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + + UnknownError( char const * message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + }; + + class OutOfPoolMemoryError : public SystemError + { + public: + OutOfPoolMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + + OutOfPoolMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + }; + + class InvalidExternalHandleError : public SystemError + { + public: + InvalidExternalHandleError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + + InvalidExternalHandleError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + }; + + class FragmentationError : public SystemError + { + public: + FragmentationError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} + + FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} + }; + + class InvalidOpaqueCaptureAddressError : public SystemError + { + public: + InvalidOpaqueCaptureAddressError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + + InvalidOpaqueCaptureAddressError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + }; + + class NotPermittedError : public SystemError + { + public: + NotPermittedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotPermitted ), message ) {} + + NotPermittedError( char const * message ) : SystemError( make_error_code( Result::eErrorNotPermitted ), message ) {} + }; + + class SurfaceLostKHRError : public SystemError + { + public: + SurfaceLostKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + + SurfaceLostKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + }; + + class NativeWindowInUseKHRError : public SystemError + { + public: + NativeWindowInUseKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + + NativeWindowInUseKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + }; + + class OutOfDateKHRError : public SystemError + { + public: + OutOfDateKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + + OutOfDateKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + }; + + class IncompatibleDisplayKHRError : public SystemError + { + public: + IncompatibleDisplayKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + + IncompatibleDisplayKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + }; + + class ValidationFailedEXTError : public SystemError + { + public: + ValidationFailedEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + + ValidationFailedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + }; + + class InvalidShaderNVError : public SystemError + { + public: + InvalidShaderNVError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + + InvalidShaderNVError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + }; + + class ImageUsageNotSupportedKHRError : public SystemError + { + public: + ImageUsageNotSupportedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {} + + ImageUsageNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {} + }; + + class VideoPictureLayoutNotSupportedKHRError : public SystemError + { + public: + VideoPictureLayoutNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) + { + } + + VideoPictureLayoutNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) + { + } + }; + + class VideoProfileOperationNotSupportedKHRError : public SystemError + { + public: + VideoProfileOperationNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) + { + } + + VideoProfileOperationNotSupportedKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) + { + } + }; + + class VideoProfileFormatNotSupportedKHRError : public SystemError + { + public: + VideoProfileFormatNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) + { + } + + VideoProfileFormatNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) + { + } + }; + + class VideoProfileCodecNotSupportedKHRError : public SystemError + { + public: + VideoProfileCodecNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) + { + } + + VideoProfileCodecNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) {} + }; + + class VideoStdVersionNotSupportedKHRError : public SystemError + { + public: + VideoStdVersionNotSupportedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) + { + } + + VideoStdVersionNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) {} + }; + + class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError + { + public: + InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + { + } + + InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + { + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + class FullScreenExclusiveModeLostEXTError : public SystemError + { + public: + FullScreenExclusiveModeLostEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + { + } + + FullScreenExclusiveModeLostEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + class InvalidVideoStdParametersKHRError : public SystemError + { + public: + InvalidVideoStdParametersKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidVideoStdParametersKHR ), message ) {} + + InvalidVideoStdParametersKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidVideoStdParametersKHR ), message ) {} + }; + + class CompressionExhaustedEXTError : public SystemError + { + public: + CompressionExhaustedEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} + + CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} + }; + + class NotEnoughSpaceKHRError : public SystemError + { + public: + NotEnoughSpaceKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotEnoughSpaceKHR ), message ) {} + + NotEnoughSpaceKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNotEnoughSpaceKHR ), message ) {} + }; + + namespace detail + { + [[noreturn]] VULKAN_HPP_INLINE void throwResultException( Result result, char const * message ) + { + switch ( result ) + { + case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message ); + case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message ); + case Result::eErrorInitializationFailed: throw InitializationFailedError( message ); + case Result::eErrorDeviceLost: throw DeviceLostError( message ); + case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message ); + case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message ); + case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message ); + case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message ); + case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message ); + case Result::eErrorTooManyObjects: throw TooManyObjectsError( message ); + case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message ); + case Result::eErrorFragmentedPool: throw FragmentedPoolError( message ); + case Result::eErrorUnknown: throw UnknownError( message ); + case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message ); + case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message ); + case Result::eErrorFragmentation: throw FragmentationError( message ); + case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message ); + case Result::eErrorNotPermitted: throw NotPermittedError( message ); + case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message ); + case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message ); + case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message ); + case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); + case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); + case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); + case Result::eErrorImageUsageNotSupportedKHR: throw ImageUsageNotSupportedKHRError( message ); + case Result::eErrorVideoPictureLayoutNotSupportedKHR: throw VideoPictureLayoutNotSupportedKHRError( message ); + case Result::eErrorVideoProfileOperationNotSupportedKHR: throw VideoProfileOperationNotSupportedKHRError( message ); + case Result::eErrorVideoProfileFormatNotSupportedKHR: throw VideoProfileFormatNotSupportedKHRError( message ); + case Result::eErrorVideoProfileCodecNotSupportedKHR: throw VideoProfileCodecNotSupportedKHRError( message ); + case Result::eErrorVideoStdVersionNotSupportedKHR: throw VideoStdVersionNotSupportedKHRError( message ); + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case Result::eErrorInvalidVideoStdParametersKHR: throw InvalidVideoStdParametersKHRError( message ); + case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message ); + case Result::eErrorNotEnoughSpaceKHR: throw NotEnoughSpaceKHRError( message ); + default: throw SystemError( make_error_code( result ), message ); + } + } + } // namespace detail +#endif + + template + struct ResultValue + { +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( v ) ) ) +#else + ResultValue( Result r, T & v ) +#endif + : result( r ), value( v ) + { + } + +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( std::move( v ) ) ) ) +#else + ResultValue( Result r, T && v ) +#endif + : result( r ), value( std::move( v ) ) + { + } + + Result result; + T value; + + operator std::tuple() VULKAN_HPP_NOEXCEPT + { + return std::tuple( result, value ); + } + }; + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + struct ResultValue> + { +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, UniqueHandle && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, UniqueHandle && v ) +# endif + : result( r ) + , value( std::move( v ) ) + { + } + + VULKAN_HPP_DEPRECATED( + "asTuple() on an l-value is deprecated, as it implicitly moves the UniqueHandle out of the ResultValue. Use asTuple() on an r-value instead, requiring to explicitly move the UniqueHandle." ) + + std::tuple> asTuple() & + { + return std::make_tuple( result, std::move( value ) ); + } + + std::tuple> asTuple() && + { + return std::make_tuple( result, std::move( value ) ); + } + + Result result; + UniqueHandle value; + }; + + template + struct ResultValue>> + { +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, std::vector> && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, std::vector> && v ) +# endif + : result( r ) + , value( std::move( v ) ) + { + } + + VULKAN_HPP_DEPRECATED( + "asTuple() on an l-value is deprecated, as it implicitly moves the UniqueHandle out of the ResultValue. Use asTuple() on an r-value instead, requiring to explicitly move the UniqueHandle." ) + + std::tuple>> asTuple() & + { + return std::make_tuple( result, std::move( value ) ); + } + + std::tuple>> asTuple() && + { + return std::make_tuple( result, std::move( value ) ); + } + + Result result; + std::vector> value; + }; +#endif + + template + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + using type = ResultValue; +#else + using type = T; +#endif + }; + + template <> + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + using type = Result; +#else + using type = void; +#endif + }; + + namespace detail + { + template + void ignore( T const & ) VULKAN_HPP_NOEXCEPT + { + } + + VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return result; +#else + VULKAN_HPP_NAMESPACE::detail::ignore( result ); +#endif + } + + template + VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result, T & data ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return ResultValue( result, data ); +#else + VULKAN_HPP_NAMESPACE::detail::ignore( result ); + return data; +#endif + } + + template + VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result, T && data ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return ResultValue( result, std::move( data ) ); +#else + VULKAN_HPP_NAMESPACE::detail::ignore( result ); + return std::move( data ); +#endif + } + } // namespace detail + + namespace detail + { + VULKAN_HPP_INLINE void resultCheck( Result result, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_NAMESPACE::detail::ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_NAMESPACE::detail::ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); +#else + if ( result != Result::eSuccess ) + { + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, message ); + } +#endif + } + + VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_NAMESPACE::detail::ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_NAMESPACE::detail::ignore( message ); + VULKAN_HPP_NAMESPACE::detail::ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, message ); + } +#endif + } + } // namespace detail + + //=========================== + //=== CONSTEXPR CONSTANTs === + //=========================== + + //=== VK_VERSION_1_0 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t False = VK_FALSE; + VULKAN_HPP_CONSTEXPR_INLINE float LodClampNone = VK_LOD_CLAMP_NONE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t True = VK_TRUE; + VULKAN_HPP_CONSTEXPR_INLINE uint64_t WholeSize = VK_WHOLE_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t UuidSize = VK_UUID_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS; + + //=== VK_VERSION_1_1 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSize = VK_LUID_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL; + + //=== VK_VERSION_1_2 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE; + + //=== VK_VERSION_1_4 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySize = VK_MAX_GLOBAL_PRIORITY_SIZE; + + //=== VK_KHR_device_group_creation === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSizeKHR = VK_MAX_DEVICE_GROUP_SIZE_KHR; + + //=== VK_KHR_external_memory_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSizeKHR = VK_LUID_SIZE_KHR; + + //=== VK_KHR_external_memory === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternalKHR = VK_QUEUE_FAMILY_EXTERNAL_KHR; + + //=== VK_EXT_queue_family_foreign === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyForeignEXT = VK_QUEUE_FAMILY_FOREIGN_EXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderIndexUnusedAMDX = VK_SHADER_INDEX_UNUSED_AMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_ray_tracing_pipeline === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKHR = VK_SHADER_UNUSED_KHR; + + //=== VK_NV_ray_tracing === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedNV = VK_SHADER_UNUSED_NV; + + //=== VK_KHR_global_priority === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKHR = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; + + //=== VK_KHR_driver_properties === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSizeKHR = VK_MAX_DRIVER_NAME_SIZE_KHR; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSizeKHR = VK_MAX_DRIVER_INFO_SIZE_KHR; + + //=== VK_EXT_global_priority_query === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeEXT = VK_MAX_GLOBAL_PRIORITY_SIZE_EXT; + + //=== VK_EXT_image_sliced_view_of_3d === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesEXT = VK_REMAINING_3D_SLICES_EXT; + + //=== VK_EXT_shader_module_identifier === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxShaderModuleIdentifierSizeEXT = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; + + //=== VK_KHR_pipeline_binary === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPipelineBinaryKeySizeKHR = VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR; + + //=== VK_KHR_video_decode_av1 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxVideoAv1ReferencesPerFrameKHR = VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR; + + //=== VK_NV_partitioned_acceleration_structure === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t PartitionedAccelerationStructurePartitionIndexGlobalNV = VK_PARTITIONED_ACCELERATION_STRUCTURE_PARTITION_INDEX_GLOBAL_NV; + + //======================== + //=== CONSTEXPR VALUEs === + //======================== + VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t Use64BitPtrDefines = VK_USE_64_BIT_PTR_DEFINES; + + //========================= + //=== CONSTEXPR CALLEEs === + //========================= + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionMajor( T const version ) + { + return ( ( (uint32_t)( version ) >> 22U ) & 0x7FU ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionMinor( T const version ) + { + return ( ( (uint32_t)( version ) >> 12U ) & 0x3FFU ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionPatch( T const version ) + { + return ( (uint32_t)(version)&0xFFFU ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionVariant( T const version ) + { + return ( (uint32_t)( version ) >> 29U ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t makeApiVersion( T const variant, T const major, T const minor, T const patch ) + { + return ( ( ( (uint32_t)( variant ) ) << 29U ) | ( ( (uint32_t)( major ) ) << 22U ) | ( ( (uint32_t)( minor ) ) << 12U ) | ( (uint32_t)( patch ) ) ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_MAKE_API_VERSION should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t makeVersion( T const major, T const minor, T const patch ) + { + return ( ( ( (uint32_t)( major ) ) << 22U ) | ( ( (uint32_t)( minor ) ) << 12U ) | ( (uint32_t)( patch ) ) ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_MAJOR should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionMajor( T const version ) + { + return ( (uint32_t)( version ) >> 22U ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_MINOR should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionMinor( T const version ) + { + return ( ( (uint32_t)( version ) >> 12U ) & 0x3FFU ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_PATCH should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionPatch( T const version ) + { + return ( (uint32_t)(version)&0xFFFU ); + } + + //========================= + //=== CONSTEXPR CALLERs === + //========================= + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion = makeApiVersion( 0, 1, 0, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion10 = makeApiVersion( 0, 1, 0, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion12 = makeApiVersion( 0, 1, 2, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion13 = makeApiVersion( 0, 1, 3, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion14 = makeApiVersion( 0, 1, 4, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto HeaderVersionComplete = makeApiVersion( 0, 1, 4, VK_HEADER_VERSION ); + + //================================= + //=== CONSTEXPR EXTENSION NAMEs === + //================================= + + //=== VK_KHR_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceExtensionName = VK_KHR_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceSpecVersion = VK_KHR_SURFACE_SPEC_VERSION; + + //=== VK_KHR_swapchain === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainExtensionName = VK_KHR_SWAPCHAIN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainSpecVersion = VK_KHR_SWAPCHAIN_SPEC_VERSION; + + //=== VK_KHR_display === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplayExtensionName = VK_KHR_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplaySpecVersion = VK_KHR_DISPLAY_SPEC_VERSION; + + //=== VK_KHR_display_swapchain === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplaySwapchainExtensionName = VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplaySwapchainSpecVersion = VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXlibSurfaceExtensionName = VK_KHR_XLIB_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXlibSurfaceSpecVersion = VK_KHR_XLIB_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXcbSurfaceExtensionName = VK_KHR_XCB_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXcbSurfaceSpecVersion = VK_KHR_XCB_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWaylandSurfaceExtensionName = VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWaylandSurfaceSpecVersion = VK_KHR_WAYLAND_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAndroidSurfaceExtensionName = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAndroidSurfaceSpecVersion = VK_KHR_ANDROID_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32SurfaceExtensionName = VK_KHR_WIN32_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32SurfaceSpecVersion = VK_KHR_WIN32_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + VULKAN_HPP_DEPRECATED( "The VK_EXT_debug_report extension has been deprecated by VK_EXT_debug_utils." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugReportExtensionName = VK_EXT_DEBUG_REPORT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_debug_report extension has been deprecated by VK_EXT_debug_utils." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugReportSpecVersion = VK_EXT_DEBUG_REPORT_SPEC_VERSION; + + //=== VK_NV_glsl_shader === + VULKAN_HPP_DEPRECATED( "The VK_NV_glsl_shader extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVGlslShaderExtensionName = VK_NV_GLSL_SHADER_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_glsl_shader extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVGlslShaderSpecVersion = VK_NV_GLSL_SHADER_SPEC_VERSION; + + //=== VK_EXT_depth_range_unrestricted === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthRangeUnrestrictedExtensionName = VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthRangeUnrestrictedSpecVersion = VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION; + + //=== VK_KHR_sampler_mirror_clamp_to_edge === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeExtensionName = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeSpecVersion = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION; + + //=== VK_IMG_filter_cubic === + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFilterCubicExtensionName = VK_IMG_FILTER_CUBIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFilterCubicSpecVersion = VK_IMG_FILTER_CUBIC_SPEC_VERSION; + + //=== VK_AMD_rasterization_order === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDRasterizationOrderExtensionName = VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDRasterizationOrderSpecVersion = VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION; + + //=== VK_AMD_shader_trinary_minmax === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderTrinaryMinmaxExtensionName = VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderTrinaryMinmaxSpecVersion = VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION; + + //=== VK_AMD_shader_explicit_vertex_parameter === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderExplicitVertexParameterExtensionName = VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderExplicitVertexParameterSpecVersion = VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION; + + //=== VK_EXT_debug_marker === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerExtensionName = VK_EXT_DEBUG_MARKER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerSpecVersion = VK_EXT_DEBUG_MARKER_SPEC_VERSION; + + //=== VK_KHR_video_queue === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoQueueExtensionName = VK_KHR_VIDEO_QUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoQueueSpecVersion = VK_KHR_VIDEO_QUEUE_SPEC_VERSION; + + //=== VK_KHR_video_decode_queue === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeQueueExtensionName = VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeQueueSpecVersion = VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION; + + //=== VK_AMD_gcn_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGcnShaderExtensionName = VK_AMD_GCN_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGcnShaderSpecVersion = VK_AMD_GCN_SHADER_SPEC_VERSION; + + //=== VK_NV_dedicated_allocation === + VULKAN_HPP_DEPRECATED( "The VK_NV_dedicated_allocation extension has been deprecated by VK_KHR_dedicated_allocation." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationExtensionName = VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_dedicated_allocation extension has been deprecated by VK_KHR_dedicated_allocation." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationSpecVersion = VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION; + + //=== VK_EXT_transform_feedback === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTransformFeedbackExtensionName = VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTransformFeedbackSpecVersion = VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION; + + //=== VK_NVX_binary_import === + VULKAN_HPP_CONSTEXPR_INLINE auto NVXBinaryImportExtensionName = VK_NVX_BINARY_IMPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVXBinaryImportSpecVersion = VK_NVX_BINARY_IMPORT_SPEC_VERSION; + + //=== VK_NVX_image_view_handle === + VULKAN_HPP_CONSTEXPR_INLINE auto NVXImageViewHandleExtensionName = VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVXImageViewHandleSpecVersion = VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION; + + //=== VK_AMD_draw_indirect_count === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountExtensionName = VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountSpecVersion = VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION; + + //=== VK_AMD_negative_viewport_height === + VULKAN_HPP_DEPRECATED( "The VK_AMD_negative_viewport_height extension has been obsoleted by VK_KHR_maintenance1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDNegativeViewportHeightExtensionName = VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_AMD_negative_viewport_height extension has been obsoleted by VK_KHR_maintenance1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDNegativeViewportHeightSpecVersion = VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION; + + //=== VK_AMD_gpu_shader_half_float === + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_half_float extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderHalfFloatExtensionName = VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_half_float extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderHalfFloatSpecVersion = VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION; + + //=== VK_AMD_shader_ballot === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderBallotExtensionName = VK_AMD_SHADER_BALLOT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderBallotSpecVersion = VK_AMD_SHADER_BALLOT_SPEC_VERSION; + + //=== VK_KHR_video_encode_h264 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH264ExtensionName = VK_KHR_VIDEO_ENCODE_H264_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH264SpecVersion = VK_KHR_VIDEO_ENCODE_H264_SPEC_VERSION; + + //=== VK_KHR_video_encode_h265 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH265ExtensionName = VK_KHR_VIDEO_ENCODE_H265_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH265SpecVersion = VK_KHR_VIDEO_ENCODE_H265_SPEC_VERSION; + + //=== VK_KHR_video_decode_h264 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH264ExtensionName = VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH264SpecVersion = VK_KHR_VIDEO_DECODE_H264_SPEC_VERSION; + + //=== VK_AMD_texture_gather_bias_lod === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDTextureGatherBiasLodExtensionName = VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDTextureGatherBiasLodSpecVersion = VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION; + + //=== VK_AMD_shader_info === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderInfoExtensionName = VK_AMD_SHADER_INFO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderInfoSpecVersion = VK_AMD_SHADER_INFO_SPEC_VERSION; + + //=== VK_KHR_dynamic_rendering === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingExtensionName = VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingSpecVersion = VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION; + + //=== VK_AMD_shader_image_load_store_lod === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderImageLoadStoreLodExtensionName = VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderImageLoadStoreLodSpecVersion = VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto GGPStreamDescriptorSurfaceExtensionName = VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GGPStreamDescriptorSurfaceSpecVersion = VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCornerSampledImageExtensionName = VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCornerSampledImageSpecVersion = VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION; + + //=== VK_KHR_multiview === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewExtensionName = VK_KHR_MULTIVIEW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewSpecVersion = VK_KHR_MULTIVIEW_SPEC_VERSION; + + //=== VK_IMG_format_pvrtc === + VULKAN_HPP_DEPRECATED( "The VK_IMG_format_pvrtc extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFormatPvrtcExtensionName = VK_IMG_FORMAT_PVRTC_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_IMG_format_pvrtc extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFormatPvrtcSpecVersion = VK_IMG_FORMAT_PVRTC_SPEC_VERSION; + + //=== VK_NV_external_memory_capabilities === + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_capabilities extension has been deprecated by VK_KHR_external_memory_capabilities." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryCapabilitiesExtensionName = VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_capabilities extension has been deprecated by VK_KHR_external_memory_capabilities." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryCapabilitiesSpecVersion = VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION; + + //=== VK_NV_external_memory === + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory extension has been deprecated by VK_KHR_external_memory." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryExtensionName = VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory extension has been deprecated by VK_KHR_external_memory." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemorySpecVersion = VK_NV_EXTERNAL_MEMORY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_win32 extension has been deprecated by VK_KHR_external_memory_win32." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryWin32ExtensionName = VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_win32 extension has been deprecated by VK_KHR_external_memory_win32." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryWin32SpecVersion = VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexExtensionName = VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexSpecVersion = VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2ExtensionName = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2SpecVersion = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION; + + //=== VK_KHR_device_group === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupExtensionName = VK_KHR_DEVICE_GROUP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupSpecVersion = VK_KHR_DEVICE_GROUP_SPEC_VERSION; + + //=== VK_EXT_validation_flags === + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_flags extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFlagsExtensionName = VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_flags extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFlagsSpecVersion = VK_EXT_VALIDATION_FLAGS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto NNViSurfaceExtensionName = VK_NN_VI_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NNViSurfaceSpecVersion = VK_NN_VI_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_shader_draw_parameters === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersExtensionName = VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersSpecVersion = VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION; + + //=== VK_EXT_shader_subgroup_ballot === + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_ballot extension has been deprecated by VK_VERSION_1_2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupBallotExtensionName = VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_ballot extension has been deprecated by VK_VERSION_1_2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupBallotSpecVersion = VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION; + + //=== VK_EXT_shader_subgroup_vote === + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_vote extension has been deprecated by VK_VERSION_1_1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupVoteExtensionName = VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_vote extension has been deprecated by VK_VERSION_1_1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupVoteSpecVersion = VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION; + + //=== VK_EXT_texture_compression_astc_hdr === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrExtensionName = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrSpecVersion = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION; + + //=== VK_EXT_astc_decode_mode === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAstcDecodeModeExtensionName = VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAstcDecodeModeSpecVersion = VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION; + + //=== VK_EXT_pipeline_robustness === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineRobustnessExtensionName = VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineRobustnessSpecVersion = VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION; + + //=== VK_KHR_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1ExtensionName = VK_KHR_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1SpecVersion = VK_KHR_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_KHR_device_group_creation === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationExtensionName = VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationSpecVersion = VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION; + + //=== VK_KHR_external_memory_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesExtensionName = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesSpecVersion = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION; + + //=== VK_KHR_external_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryExtensionName = VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemorySpecVersion = VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryWin32ExtensionName = VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryWin32SpecVersion = VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryFdExtensionName = VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryFdSpecVersion = VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32KeyedMutexExtensionName = VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32KeyedMutexSpecVersion = VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION; + + //=== VK_KHR_external_semaphore === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreWin32ExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreWin32SpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreFdExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreFdSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION; + + //=== VK_KHR_push_descriptor === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPushDescriptorExtensionName = VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPushDescriptorSpecVersion = VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION; + + //=== VK_EXT_conditional_rendering === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConditionalRenderingExtensionName = VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConditionalRenderingSpecVersion = VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION; + + //=== VK_KHR_shader_float16_int8 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8ExtensionName = VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8SpecVersion = VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION; + + //=== VK_KHR_16bit_storage === + VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageExtensionName = VK_KHR_16BIT_STORAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageSpecVersion = VK_KHR_16BIT_STORAGE_SPEC_VERSION; + + //=== VK_KHR_incremental_present === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRIncrementalPresentExtensionName = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRIncrementalPresentSpecVersion = VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION; + + //=== VK_KHR_descriptor_update_template === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateExtensionName = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateSpecVersion = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION; + + //=== VK_NV_clip_space_w_scaling === + VULKAN_HPP_CONSTEXPR_INLINE auto NVClipSpaceWScalingExtensionName = VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVClipSpaceWScalingSpecVersion = VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION; + + //=== VK_EXT_direct_mode_display === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectModeDisplayExtensionName = VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectModeDisplaySpecVersion = VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireXlibDisplayExtensionName = VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireXlibDisplaySpecVersion = VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplaySurfaceCounterExtensionName = VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplaySurfaceCounterSpecVersion = VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION; + + //=== VK_EXT_display_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplayControlExtensionName = VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplayControlSpecVersion = VK_EXT_DISPLAY_CONTROL_SPEC_VERSION; + + //=== VK_GOOGLE_display_timing === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDisplayTimingExtensionName = VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDisplayTimingSpecVersion = VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION; + + //=== VK_NV_sample_mask_override_coverage === + VULKAN_HPP_CONSTEXPR_INLINE auto NVSampleMaskOverrideCoverageExtensionName = VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVSampleMaskOverrideCoverageSpecVersion = VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION; + + //=== VK_NV_geometry_shader_passthrough === + VULKAN_HPP_CONSTEXPR_INLINE auto NVGeometryShaderPassthroughExtensionName = VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVGeometryShaderPassthroughSpecVersion = VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION; + + //=== VK_NV_viewport_array2 === + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportArray2ExtensionName = VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportArray2SpecVersion = VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION; + + //=== VK_NVX_multiview_per_view_attributes === + VULKAN_HPP_CONSTEXPR_INLINE auto NVXMultiviewPerViewAttributesExtensionName = VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVXMultiviewPerViewAttributesSpecVersion = VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION; + + //=== VK_NV_viewport_swizzle === + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportSwizzleExtensionName = VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportSwizzleSpecVersion = VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION; + + //=== VK_EXT_discard_rectangles === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDiscardRectanglesExtensionName = VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDiscardRectanglesSpecVersion = VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION; + + //=== VK_EXT_conservative_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConservativeRasterizationExtensionName = VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConservativeRasterizationSpecVersion = VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION; + + //=== VK_EXT_depth_clip_enable === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipEnableExtensionName = VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipEnableSpecVersion = VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION; + + //=== VK_EXT_swapchain_colorspace === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainColorSpaceExtensionName = VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainColorSpaceSpecVersion = VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION; + + //=== VK_EXT_hdr_metadata === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHdrMetadataExtensionName = VK_EXT_HDR_METADATA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHdrMetadataSpecVersion = VK_EXT_HDR_METADATA_SPEC_VERSION; + + //=== VK_KHR_imageless_framebuffer === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferExtensionName = VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferSpecVersion = VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION; + + //=== VK_KHR_create_renderpass2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2ExtensionName = VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2SpecVersion = VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION; + + //=== VK_IMG_relaxed_line_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto IMGRelaxedLineRasterizationExtensionName = VK_IMG_RELAXED_LINE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto IMGRelaxedLineRasterizationSpecVersion = VK_IMG_RELAXED_LINE_RASTERIZATION_SPEC_VERSION; + + //=== VK_KHR_shared_presentable_image === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSharedPresentableImageExtensionName = VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSharedPresentableImageSpecVersion = VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION; + + //=== VK_KHR_external_fence_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesExtensionName = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesSpecVersion = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION; + + //=== VK_KHR_external_fence === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceExtensionName = VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceSpecVersion = VK_KHR_EXTERNAL_FENCE_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceWin32ExtensionName = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceWin32SpecVersion = VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceFdExtensionName = VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceFdSpecVersion = VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION; + + //=== VK_KHR_performance_query === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPerformanceQueryExtensionName = VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPerformanceQuerySpecVersion = VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION; + + //=== VK_KHR_maintenance2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2ExtensionName = VK_KHR_MAINTENANCE_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2SpecVersion = VK_KHR_MAINTENANCE_2_SPEC_VERSION; + + //=== VK_KHR_get_surface_capabilities2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetSurfaceCapabilities2ExtensionName = VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetSurfaceCapabilities2SpecVersion = VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION; + + //=== VK_KHR_variable_pointers === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersExtensionName = VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersSpecVersion = VK_KHR_VARIABLE_POINTERS_SPEC_VERSION; + + //=== VK_KHR_get_display_properties2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetDisplayProperties2ExtensionName = VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetDisplayProperties2SpecVersion = VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + VULKAN_HPP_DEPRECATED( "The VK_MVK_ios_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKIosSurfaceExtensionName = VK_MVK_IOS_SURFACE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_MVK_ios_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKIosSurfaceSpecVersion = VK_MVK_IOS_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + VULKAN_HPP_DEPRECATED( "The VK_MVK_macos_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKMacosSurfaceExtensionName = VK_MVK_MACOS_SURFACE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_MVK_macos_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKMacosSurfaceSpecVersion = VK_MVK_MACOS_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_external_memory_dma_buf === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryDmaBufExtensionName = VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryDmaBufSpecVersion = VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION; + + //=== VK_EXT_queue_family_foreign === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTQueueFamilyForeignExtensionName = VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTQueueFamilyForeignSpecVersion = VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION; + + //=== VK_KHR_dedicated_allocation === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationExtensionName = VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationSpecVersion = VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION; + + //=== VK_EXT_debug_utils === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugUtilsExtensionName = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugUtilsSpecVersion = VK_EXT_DEBUG_UTILS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalMemoryAndroidHardwareBufferExtensionName = VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalMemoryAndroidHardwareBufferSpecVersion = VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sampler_filter_minmax === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxExtensionName = VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxSpecVersion = VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION; + + //=== VK_KHR_storage_buffer_storage_class === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassExtensionName = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassSpecVersion = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION; + + //=== VK_AMD_gpu_shader_int16 === + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_int16 extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderInt16ExtensionName = VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_int16 extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderInt16SpecVersion = VK_AMD_GPU_SHADER_INT16_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDXShaderEnqueueExtensionName = VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDXShaderEnqueueSpecVersion = VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMixedAttachmentSamplesExtensionName = VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMixedAttachmentSamplesSpecVersion = VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION; + + //=== VK_AMD_shader_fragment_mask === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderFragmentMaskExtensionName = VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderFragmentMaskSpecVersion = VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION; + + //=== VK_EXT_inline_uniform_block === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockExtensionName = VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockSpecVersion = VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION; + + //=== VK_EXT_shader_stencil_export === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderStencilExportExtensionName = VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderStencilExportSpecVersion = VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION; + + //=== VK_KHR_shader_bfloat16 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderBfloat16ExtensionName = VK_KHR_SHADER_BFLOAT16_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderBfloat16SpecVersion = VK_KHR_SHADER_BFLOAT16_SPEC_VERSION; + + //=== VK_EXT_sample_locations === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSampleLocationsExtensionName = VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSampleLocationsSpecVersion = VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION; + + //=== VK_KHR_relaxed_block_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutExtensionName = VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutSpecVersion = VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION; + + //=== VK_KHR_get_memory_requirements2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2ExtensionName = VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2SpecVersion = VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION; + + //=== VK_KHR_image_format_list === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListExtensionName = VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListSpecVersion = VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION; + + //=== VK_EXT_blend_operation_advanced === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBlendOperationAdvancedExtensionName = VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBlendOperationAdvancedSpecVersion = VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION; + + //=== VK_NV_fragment_coverage_to_color === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentCoverageToColorExtensionName = VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentCoverageToColorSpecVersion = VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION; + + //=== VK_KHR_acceleration_structure === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAccelerationStructureExtensionName = VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAccelerationStructureSpecVersion = VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION; + + //=== VK_KHR_ray_tracing_pipeline === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPipelineExtensionName = VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPipelineSpecVersion = VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION; + + //=== VK_KHR_ray_query === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayQueryExtensionName = VK_KHR_RAY_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayQuerySpecVersion = VK_KHR_RAY_QUERY_SPEC_VERSION; + + //=== VK_NV_framebuffer_mixed_samples === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFramebufferMixedSamplesExtensionName = VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFramebufferMixedSamplesSpecVersion = VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION; + + //=== VK_NV_fill_rectangle === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFillRectangleExtensionName = VK_NV_FILL_RECTANGLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFillRectangleSpecVersion = VK_NV_FILL_RECTANGLE_SPEC_VERSION; + + //=== VK_NV_shader_sm_builtins === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSmBuiltinsExtensionName = VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSmBuiltinsSpecVersion = VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION; + + //=== VK_EXT_post_depth_coverage === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPostDepthCoverageExtensionName = VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPostDepthCoverageSpecVersion = VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION; + + //=== VK_KHR_sampler_ycbcr_conversion === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionExtensionName = VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionSpecVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION; + + //=== VK_KHR_bind_memory2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2ExtensionName = VK_KHR_BIND_MEMORY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2SpecVersion = VK_KHR_BIND_MEMORY_2_SPEC_VERSION; + + //=== VK_EXT_image_drm_format_modifier === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageDrmFormatModifierExtensionName = VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageDrmFormatModifierSpecVersion = VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION; + + //=== VK_EXT_validation_cache === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationCacheExtensionName = VK_EXT_VALIDATION_CACHE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationCacheSpecVersion = VK_EXT_VALIDATION_CACHE_SPEC_VERSION; + + //=== VK_EXT_descriptor_indexing === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingExtensionName = VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingSpecVersion = VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION; + + //=== VK_EXT_shader_viewport_index_layer === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerExtensionName = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerSpecVersion = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilitySubsetExtensionName = VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilitySubsetSpecVersion = VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShadingRateImageExtensionName = VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShadingRateImageSpecVersion = VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION; + + //=== VK_NV_ray_tracing === + VULKAN_HPP_DEPRECATED( "The VK_NV_ray_tracing extension has been deprecated by VK_KHR_ray_tracing_pipeline." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingExtensionName = VK_NV_RAY_TRACING_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_ray_tracing extension has been deprecated by VK_KHR_ray_tracing_pipeline." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingSpecVersion = VK_NV_RAY_TRACING_SPEC_VERSION; + + //=== VK_NV_representative_fragment_test === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRepresentativeFragmentTestExtensionName = VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRepresentativeFragmentTestSpecVersion = VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION; + + //=== VK_KHR_maintenance3 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3ExtensionName = VK_KHR_MAINTENANCE_3_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3SpecVersion = VK_KHR_MAINTENANCE_3_SPEC_VERSION; + + //=== VK_KHR_draw_indirect_count === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountExtensionName = VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountSpecVersion = VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION; + + //=== VK_EXT_filter_cubic === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFilterCubicExtensionName = VK_EXT_FILTER_CUBIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFilterCubicSpecVersion = VK_EXT_FILTER_CUBIC_SPEC_VERSION; + + //=== VK_QCOM_render_pass_shader_resolve === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassShaderResolveExtensionName = VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassShaderResolveSpecVersion = VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION; + + //=== VK_EXT_global_priority === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityExtensionName = VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPrioritySpecVersion = VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION; + + //=== VK_KHR_shader_subgroup_extended_types === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesExtensionName = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesSpecVersion = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION; + + //=== VK_KHR_8bit_storage === + VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageExtensionName = VK_KHR_8BIT_STORAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageSpecVersion = VK_KHR_8BIT_STORAGE_SPEC_VERSION; + + //=== VK_EXT_external_memory_host === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryHostExtensionName = VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryHostSpecVersion = VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION; + + //=== VK_AMD_buffer_marker === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDBufferMarkerExtensionName = VK_AMD_BUFFER_MARKER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDBufferMarkerSpecVersion = VK_AMD_BUFFER_MARKER_SPEC_VERSION; + + //=== VK_KHR_shader_atomic_int64 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64ExtensionName = VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64SpecVersion = VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION; + + //=== VK_KHR_shader_clock === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderClockExtensionName = VK_KHR_SHADER_CLOCK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderClockSpecVersion = VK_KHR_SHADER_CLOCK_SPEC_VERSION; + + //=== VK_AMD_pipeline_compiler_control === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDPipelineCompilerControlExtensionName = VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDPipelineCompilerControlSpecVersion = VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION; + + //=== VK_EXT_calibrated_timestamps === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsExtensionName = VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsSpecVersion = VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION; + + //=== VK_AMD_shader_core_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCorePropertiesExtensionName = VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCorePropertiesSpecVersion = VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION; + + //=== VK_KHR_video_decode_h265 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH265ExtensionName = VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH265SpecVersion = VK_KHR_VIDEO_DECODE_H265_SPEC_VERSION; + + //=== VK_KHR_global_priority === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGlobalPriorityExtensionName = VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGlobalPrioritySpecVersion = VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION; + + //=== VK_AMD_memory_overallocation_behavior === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMemoryOverallocationBehaviorExtensionName = VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMemoryOverallocationBehaviorSpecVersion = VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION; + + //=== VK_EXT_vertex_attribute_divisor === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorExtensionName = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + VULKAN_HPP_CONSTEXPR_INLINE auto GGPFrameTokenExtensionName = VK_GGP_FRAME_TOKEN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GGPFrameTokenSpecVersion = VK_GGP_FRAME_TOKEN_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_EXT_pipeline_creation_feedback === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackExtensionName = VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackSpecVersion = VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION; + + //=== VK_KHR_driver_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesExtensionName = VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesSpecVersion = VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION; + + //=== VK_KHR_shader_float_controls === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsExtensionName = VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsSpecVersion = VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION; + + //=== VK_NV_shader_subgroup_partitioned === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSubgroupPartitionedExtensionName = VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSubgroupPartitionedSpecVersion = VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION; + + //=== VK_KHR_depth_stencil_resolve === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveExtensionName = VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveSpecVersion = VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION; + + //=== VK_KHR_swapchain_mutable_format === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainMutableFormatExtensionName = VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainMutableFormatSpecVersion = VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION; + + //=== VK_NV_compute_shader_derivatives === + VULKAN_HPP_CONSTEXPR_INLINE auto NVComputeShaderDerivativesExtensionName = VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVComputeShaderDerivativesSpecVersion = VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION; + + //=== VK_NV_mesh_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto NVMeshShaderExtensionName = VK_NV_MESH_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVMeshShaderSpecVersion = VK_NV_MESH_SHADER_SPEC_VERSION; + + //=== VK_NV_fragment_shader_barycentric === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricExtensionName = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricSpecVersion = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION; + + //=== VK_NV_shader_image_footprint === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderImageFootprintExtensionName = VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderImageFootprintSpecVersion = VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION; + + //=== VK_NV_scissor_exclusive === + VULKAN_HPP_CONSTEXPR_INLINE auto NVScissorExclusiveExtensionName = VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVScissorExclusiveSpecVersion = VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION; + + //=== VK_NV_device_diagnostic_checkpoints === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticCheckpointsExtensionName = VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticCheckpointsSpecVersion = VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION; + + //=== VK_KHR_timeline_semaphore === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreExtensionName = VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreSpecVersion = VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION; + + //=== VK_INTEL_shader_integer_functions2 === + VULKAN_HPP_CONSTEXPR_INLINE auto INTELShaderIntegerFunctions2ExtensionName = VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto INTELShaderIntegerFunctions2SpecVersion = VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION; + + //=== VK_INTEL_performance_query === + VULKAN_HPP_CONSTEXPR_INLINE auto INTELPerformanceQueryExtensionName = VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto INTELPerformanceQuerySpecVersion = VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION; + + //=== VK_KHR_vulkan_memory_model === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelExtensionName = VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelSpecVersion = VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION; + + //=== VK_EXT_pci_bus_info === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPciBusInfoExtensionName = VK_EXT_PCI_BUS_INFO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPciBusInfoSpecVersion = VK_EXT_PCI_BUS_INFO_SPEC_VERSION; + + //=== VK_AMD_display_native_hdr === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDisplayNativeHdrExtensionName = VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDisplayNativeHdrSpecVersion = VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAImagepipeSurfaceExtensionName = VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAImagepipeSurfaceSpecVersion = VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_KHR_shader_terminate_invocation === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationExtensionName = VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationSpecVersion = VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalSurfaceExtensionName = VK_EXT_METAL_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalSurfaceSpecVersion = VK_EXT_METAL_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMapExtensionName = VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMapSpecVersion = VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION; + + //=== VK_EXT_scalar_block_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutExtensionName = VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutSpecVersion = VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION; + + //=== VK_GOOGLE_hlsl_functionality1 === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEHlslFunctionality1ExtensionName = VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEHlslFunctionality1SpecVersion = VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION; + + //=== VK_GOOGLE_decorate_string === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDecorateStringExtensionName = VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDecorateStringSpecVersion = VK_GOOGLE_DECORATE_STRING_SPEC_VERSION; + + //=== VK_EXT_subgroup_size_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlExtensionName = VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlSpecVersion = VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION; + + //=== VK_KHR_fragment_shading_rate === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShadingRateExtensionName = VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShadingRateSpecVersion = VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION; + + //=== VK_AMD_shader_core_properties2 === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCoreProperties2ExtensionName = VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCoreProperties2SpecVersion = VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION; + + //=== VK_AMD_device_coherent_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDeviceCoherentMemoryExtensionName = VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDeviceCoherentMemorySpecVersion = VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION; + + //=== VK_KHR_dynamic_rendering_local_read === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingLocalReadExtensionName = VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingLocalReadSpecVersion = VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_SPEC_VERSION; + + //=== VK_EXT_shader_image_atomic_int64 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderImageAtomicInt64ExtensionName = VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderImageAtomicInt64SpecVersion = VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION; + + //=== VK_KHR_shader_quad_control === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderQuadControlExtensionName = VK_KHR_SHADER_QUAD_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderQuadControlSpecVersion = VK_KHR_SHADER_QUAD_CONTROL_SPEC_VERSION; + + //=== VK_KHR_spirv_1_4 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14ExtensionName = VK_KHR_SPIRV_1_4_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14SpecVersion = VK_KHR_SPIRV_1_4_SPEC_VERSION; + + //=== VK_EXT_memory_budget === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryBudgetExtensionName = VK_EXT_MEMORY_BUDGET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryBudgetSpecVersion = VK_EXT_MEMORY_BUDGET_SPEC_VERSION; + + //=== VK_EXT_memory_priority === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryPriorityExtensionName = VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryPrioritySpecVersion = VK_EXT_MEMORY_PRIORITY_SPEC_VERSION; + + //=== VK_KHR_surface_protected_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceProtectedCapabilitiesExtensionName = VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceProtectedCapabilitiesSpecVersion = VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION; + + //=== VK_NV_dedicated_allocation_image_aliasing === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationImageAliasingExtensionName = VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationImageAliasingSpecVersion = VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION; + + //=== VK_KHR_separate_depth_stencil_layouts === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsExtensionName = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsSpecVersion = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION; + + //=== VK_EXT_buffer_device_address === + VULKAN_HPP_DEPRECATED( "The VK_EXT_buffer_device_address extension has been deprecated by VK_KHR_buffer_device_address." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBufferDeviceAddressExtensionName = VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_buffer_device_address extension has been deprecated by VK_KHR_buffer_device_address." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBufferDeviceAddressSpecVersion = VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; + + //=== VK_EXT_tooling_info === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoExtensionName = VK_EXT_TOOLING_INFO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoSpecVersion = VK_EXT_TOOLING_INFO_SPEC_VERSION; + + //=== VK_EXT_separate_stencil_usage === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageExtensionName = VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageSpecVersion = VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION; + + //=== VK_EXT_validation_features === + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_features extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFeaturesExtensionName = VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_features extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFeaturesSpecVersion = VK_EXT_VALIDATION_FEATURES_SPEC_VERSION; + + //=== VK_KHR_present_wait === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentWaitExtensionName = VK_KHR_PRESENT_WAIT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentWaitSpecVersion = VK_KHR_PRESENT_WAIT_SPEC_VERSION; + + //=== VK_NV_cooperative_matrix === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrixExtensionName = VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrixSpecVersion = VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION; + + //=== VK_NV_coverage_reduction_mode === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCoverageReductionModeExtensionName = VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCoverageReductionModeSpecVersion = VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION; + + //=== VK_EXT_fragment_shader_interlock === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentShaderInterlockExtensionName = VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentShaderInterlockSpecVersion = VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION; + + //=== VK_EXT_ycbcr_image_arrays === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcrImageArraysExtensionName = VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcrImageArraysSpecVersion = VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION; + + //=== VK_KHR_uniform_buffer_standard_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutExtensionName = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutSpecVersion = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION; + + //=== VK_EXT_provoking_vertex === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTProvokingVertexExtensionName = VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTProvokingVertexSpecVersion = VK_EXT_PROVOKING_VERTEX_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFullScreenExclusiveExtensionName = VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFullScreenExclusiveSpecVersion = VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHeadlessSurfaceExtensionName = VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHeadlessSurfaceSpecVersion = VK_EXT_HEADLESS_SURFACE_SPEC_VERSION; + + //=== VK_KHR_buffer_device_address === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressExtensionName = VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressSpecVersion = VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; + + //=== VK_EXT_line_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationExtensionName = VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationSpecVersion = VK_EXT_LINE_RASTERIZATION_SPEC_VERSION; + + //=== VK_EXT_shader_atomic_float === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloatExtensionName = VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloatSpecVersion = VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION; + + //=== VK_EXT_host_query_reset === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetExtensionName = VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetSpecVersion = VK_EXT_HOST_QUERY_RESET_SPEC_VERSION; + + //=== VK_EXT_index_type_uint8 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8ExtensionName = VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8SpecVersion = VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION; + + //=== VK_EXT_extended_dynamic_state === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateSpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION; + + //=== VK_KHR_deferred_host_operations === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeferredHostOperationsExtensionName = VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeferredHostOperationsSpecVersion = VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION; + + //=== VK_KHR_pipeline_executable_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineExecutablePropertiesExtensionName = VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineExecutablePropertiesSpecVersion = VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION; + + //=== VK_EXT_host_image_copy === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostImageCopyExtensionName = VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostImageCopySpecVersion = VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION; + + //=== VK_KHR_map_memory2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMapMemory2ExtensionName = VK_KHR_MAP_MEMORY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMapMemory2SpecVersion = VK_KHR_MAP_MEMORY_2_SPEC_VERSION; + + //=== VK_EXT_map_memory_placed === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMapMemoryPlacedExtensionName = VK_EXT_MAP_MEMORY_PLACED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMapMemoryPlacedSpecVersion = VK_EXT_MAP_MEMORY_PLACED_SPEC_VERSION; + + //=== VK_EXT_shader_atomic_float2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloat2ExtensionName = VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloat2SpecVersion = VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION; + + //=== VK_EXT_surface_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSurfaceMaintenance1ExtensionName = VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSurfaceMaintenance1SpecVersion = VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_EXT_swapchain_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainMaintenance1ExtensionName = VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainMaintenance1SpecVersion = VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_EXT_shader_demote_to_helper_invocation === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationExtensionName = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationSpecVersion = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION; + + //=== VK_NV_device_generated_commands === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsExtensionName = VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsSpecVersion = VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION; + + //=== VK_NV_inherited_viewport_scissor === + VULKAN_HPP_CONSTEXPR_INLINE auto NVInheritedViewportScissorExtensionName = VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVInheritedViewportScissorSpecVersion = VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION; + + //=== VK_KHR_shader_integer_dot_product === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductExtensionName = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductSpecVersion = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION; + + //=== VK_EXT_texel_buffer_alignment === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentExtensionName = VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentSpecVersion = VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION; + + //=== VK_QCOM_render_pass_transform === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassTransformExtensionName = VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassTransformSpecVersion = VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION; + + //=== VK_EXT_depth_bias_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthBiasControlExtensionName = VK_EXT_DEPTH_BIAS_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthBiasControlSpecVersion = VK_EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION; + + //=== VK_EXT_device_memory_report === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceMemoryReportExtensionName = VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceMemoryReportSpecVersion = VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION; + + //=== VK_EXT_acquire_drm_display === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireDrmDisplayExtensionName = VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireDrmDisplaySpecVersion = VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION; + + //=== VK_EXT_robustness2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRobustness2ExtensionName = VK_EXT_ROBUSTNESS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRobustness2SpecVersion = VK_EXT_ROBUSTNESS_2_SPEC_VERSION; + + //=== VK_EXT_custom_border_color === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCustomBorderColorExtensionName = VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCustomBorderColorSpecVersion = VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION; + + //=== VK_GOOGLE_user_type === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEUserTypeExtensionName = VK_GOOGLE_USER_TYPE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEUserTypeSpecVersion = VK_GOOGLE_USER_TYPE_SPEC_VERSION; + + //=== VK_KHR_pipeline_library === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineLibraryExtensionName = VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineLibrarySpecVersion = VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION; + + //=== VK_NV_present_barrier === + VULKAN_HPP_CONSTEXPR_INLINE auto NVPresentBarrierExtensionName = VK_NV_PRESENT_BARRIER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVPresentBarrierSpecVersion = VK_NV_PRESENT_BARRIER_SPEC_VERSION; + + //=== VK_KHR_shader_non_semantic_info === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoExtensionName = VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoSpecVersion = VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION; + + //=== VK_KHR_present_id === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentIdExtensionName = VK_KHR_PRESENT_ID_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentIdSpecVersion = VK_KHR_PRESENT_ID_SPEC_VERSION; + + //=== VK_EXT_private_data === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataExtensionName = VK_EXT_PRIVATE_DATA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataSpecVersion = VK_EXT_PRIVATE_DATA_SPEC_VERSION; + + //=== VK_EXT_pipeline_creation_cache_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlExtensionName = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlSpecVersion = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION; + + //=== VK_KHR_video_encode_queue === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQueueExtensionName = VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQueueSpecVersion = VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION; + + //=== VK_NV_device_diagnostics_config === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticsConfigExtensionName = VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticsConfigSpecVersion = VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION; + + //=== VK_QCOM_render_pass_store_ops === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassStoreOpsExtensionName = VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassStoreOpsSpecVersion = VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCudaKernelLaunchExtensionName = VK_NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCudaKernelLaunchSpecVersion = VK_NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTileShadingExtensionName = VK_QCOM_TILE_SHADING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTileShadingSpecVersion = VK_QCOM_TILE_SHADING_SPEC_VERSION; + + //=== VK_NV_low_latency === + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatencyExtensionName = VK_NV_LOW_LATENCY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatencySpecVersion = VK_NV_LOW_LATENCY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalObjectsExtensionName = VK_EXT_METAL_OBJECTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalObjectsSpecVersion = VK_EXT_METAL_OBJECTS_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2ExtensionName = VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2SpecVersion = VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION; + + //=== VK_EXT_descriptor_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferExtensionName = VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferSpecVersion = VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION; + + //=== VK_EXT_graphics_pipeline_library === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGraphicsPipelineLibraryExtensionName = VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGraphicsPipelineLibrarySpecVersion = VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderEarlyAndLateFragmentTestsExtensionName = VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderEarlyAndLateFragmentTestsSpecVersion = VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION; + + //=== VK_KHR_fragment_shader_barycentric === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShaderBarycentricExtensionName = VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShaderBarycentricSpecVersion = VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupUniformControlFlowExtensionName = VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupUniformControlFlowSpecVersion = VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION; + + //=== VK_KHR_zero_initialize_workgroup_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemoryExtensionName = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemorySpecVersion = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION; + + //=== VK_NV_fragment_shading_rate_enums === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShadingRateEnumsExtensionName = VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShadingRateEnumsSpecVersion = VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION; + + //=== VK_NV_ray_tracing_motion_blur === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingMotionBlurExtensionName = VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingMotionBlurSpecVersion = VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION; + + //=== VK_EXT_mesh_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMeshShaderExtensionName = VK_EXT_MESH_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMeshShaderSpecVersion = VK_EXT_MESH_SHADER_SPEC_VERSION; + + //=== VK_EXT_ycbcr_2plane_444_formats === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsExtensionName = VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsSpecVersion = VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION; + + //=== VK_EXT_fragment_density_map2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMap2ExtensionName = VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMap2SpecVersion = VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION; + + //=== VK_QCOM_rotated_copy_commands === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRotatedCopyCommandsExtensionName = VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRotatedCopyCommandsSpecVersion = VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION; + + //=== VK_EXT_image_robustness === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessExtensionName = VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessSpecVersion = VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION; + + //=== VK_KHR_workgroup_memory_explicit_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWorkgroupMemoryExplicitLayoutExtensionName = VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWorkgroupMemoryExplicitLayoutSpecVersion = VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION; + + //=== VK_KHR_copy_commands2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2ExtensionName = VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2SpecVersion = VK_KHR_COPY_COMMANDS_2_SPEC_VERSION; + + //=== VK_EXT_image_compression_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlExtensionName = VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlSpecVersion = VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION; + + //=== VK_EXT_attachment_feedback_loop_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopLayoutExtensionName = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopLayoutSpecVersion = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION; + + //=== VK_EXT_4444_formats === + VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsExtensionName = VK_EXT_4444_FORMATS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsSpecVersion = VK_EXT_4444_FORMATS_SPEC_VERSION; + + //=== VK_EXT_device_fault === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceFaultExtensionName = VK_EXT_DEVICE_FAULT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceFaultSpecVersion = VK_EXT_DEVICE_FAULT_SPEC_VERSION; + + //=== VK_ARM_rasterization_order_attachment_access === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessExtensionName = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessSpecVersion = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION; + + //=== VK_EXT_rgba10x6_formats === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRgba10X6FormatsExtensionName = VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRgba10X6FormatsSpecVersion = VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + VULKAN_HPP_CONSTEXPR_INLINE auto NVAcquireWinrtDisplayExtensionName = VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVAcquireWinrtDisplaySpecVersion = VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectfbSurfaceExtensionName = VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectfbSurfaceSpecVersion = VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_VALVE_mutable_descriptor_type === + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeExtensionName = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeSpecVersion = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; + + //=== VK_EXT_vertex_input_dynamic_state === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexInputDynamicStateExtensionName = VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexInputDynamicStateSpecVersion = VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION; + + //=== VK_EXT_physical_device_drm === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPhysicalDeviceDrmExtensionName = VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPhysicalDeviceDrmSpecVersion = VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION; + + //=== VK_EXT_device_address_binding_report === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceAddressBindingReportExtensionName = VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceAddressBindingReportSpecVersion = VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION; + + //=== VK_EXT_depth_clip_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipControlExtensionName = VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipControlSpecVersion = VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION; + + //=== VK_EXT_primitive_topology_list_restart === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitiveTopologyListRestartExtensionName = VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitiveTopologyListRestartSpecVersion = VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION; + + //=== VK_KHR_format_feature_flags2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2ExtensionName = VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2SpecVersion = VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION; + + //=== VK_EXT_present_mode_fifo_latest_ready === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPresentModeFifoLatestReadyExtensionName = VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPresentModeFifoLatestReadySpecVersion = VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalMemoryExtensionName = VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalMemorySpecVersion = VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalSemaphoreExtensionName = VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalSemaphoreSpecVersion = VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIABufferCollectionExtensionName = VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIABufferCollectionSpecVersion = VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEISubpassShadingExtensionName = VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEISubpassShadingSpecVersion = VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION; + + //=== VK_HUAWEI_invocation_mask === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIInvocationMaskExtensionName = VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIInvocationMaskSpecVersion = VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION; + + //=== VK_NV_external_memory_rdma === + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryRdmaExtensionName = VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryRdmaSpecVersion = VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION; + + //=== VK_EXT_pipeline_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelinePropertiesExtensionName = VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelinePropertiesSpecVersion = VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION; + + //=== VK_EXT_frame_boundary === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFrameBoundaryExtensionName = VK_EXT_FRAME_BOUNDARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFrameBoundarySpecVersion = VK_EXT_FRAME_BOUNDARY_SPEC_VERSION; + + //=== VK_EXT_multisampled_render_to_single_sampled === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultisampledRenderToSingleSampledExtensionName = VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultisampledRenderToSingleSampledSpecVersion = VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION; + + //=== VK_EXT_extended_dynamic_state2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2ExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2SpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto QNXScreenSurfaceExtensionName = VK_QNX_SCREEN_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QNXScreenSurfaceSpecVersion = VK_QNX_SCREEN_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTColorWriteEnableExtensionName = VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTColorWriteEnableSpecVersion = VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION; + + //=== VK_EXT_primitives_generated_query === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitivesGeneratedQueryExtensionName = VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitivesGeneratedQuerySpecVersion = VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION; + + //=== VK_KHR_ray_tracing_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingMaintenance1ExtensionName = VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingMaintenance1SpecVersion = VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_EXT_global_priority_query === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQueryExtensionName = VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQuerySpecVersion = VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION; + + //=== VK_EXT_image_view_min_lod === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageViewMinLodExtensionName = VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageViewMinLodSpecVersion = VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION; + + //=== VK_EXT_multi_draw === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultiDrawExtensionName = VK_EXT_MULTI_DRAW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultiDrawSpecVersion = VK_EXT_MULTI_DRAW_SPEC_VERSION; + + //=== VK_EXT_image_2d_view_of_3d === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImage2DViewOf3DExtensionName = VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImage2DViewOf3DSpecVersion = VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION; + + //=== VK_KHR_portability_enumeration === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilityEnumerationExtensionName = VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilityEnumerationSpecVersion = VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION; + + //=== VK_EXT_shader_tile_image === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderTileImageExtensionName = VK_EXT_SHADER_TILE_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderTileImageSpecVersion = VK_EXT_SHADER_TILE_IMAGE_SPEC_VERSION; + + //=== VK_EXT_opacity_micromap === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTOpacityMicromapExtensionName = VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTOpacityMicromapSpecVersion = VK_EXT_OPACITY_MICROMAP_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + VULKAN_HPP_DEPRECATED( "The VK_NV_displacement_micromap extension has been deprecated by VK_NV_cluster_acceleration_structure." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplacementMicromapExtensionName = VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_displacement_micromap extension has been deprecated by VK_NV_cluster_acceleration_structure." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplacementMicromapSpecVersion = VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_load_store_op_none === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneExtensionName = VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneSpecVersion = VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION; + + //=== VK_HUAWEI_cluster_culling_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIClusterCullingShaderExtensionName = VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIClusterCullingShaderSpecVersion = VK_HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION; + + //=== VK_EXT_border_color_swizzle === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBorderColorSwizzleExtensionName = VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBorderColorSwizzleSpecVersion = VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION; + + //=== VK_EXT_pageable_device_local_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPageableDeviceLocalMemoryExtensionName = VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPageableDeviceLocalMemorySpecVersion = VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION; + + //=== VK_KHR_maintenance4 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4ExtensionName = VK_KHR_MAINTENANCE_4_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4SpecVersion = VK_KHR_MAINTENANCE_4_SPEC_VERSION; + + //=== VK_ARM_shader_core_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCorePropertiesExtensionName = VK_ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCorePropertiesSpecVersion = VK_ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION; + + //=== VK_KHR_shader_subgroup_rotate === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupRotateExtensionName = VK_KHR_SHADER_SUBGROUP_ROTATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupRotateSpecVersion = VK_KHR_SHADER_SUBGROUP_ROTATE_SPEC_VERSION; + + //=== VK_ARM_scheduling_controls === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMSchedulingControlsExtensionName = VK_ARM_SCHEDULING_CONTROLS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMSchedulingControlsSpecVersion = VK_ARM_SCHEDULING_CONTROLS_SPEC_VERSION; + + //=== VK_EXT_image_sliced_view_of_3d === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageSlicedViewOf3DExtensionName = VK_EXT_IMAGE_SLICED_VIEW_OF_3D_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageSlicedViewOf3DSpecVersion = VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION; + + //=== VK_VALVE_descriptor_set_host_mapping === + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEDescriptorSetHostMappingExtensionName = VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEDescriptorSetHostMappingSpecVersion = VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION; + + //=== VK_EXT_depth_clamp_zero_one === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampZeroOneExtensionName = VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampZeroOneSpecVersion = VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION; + + //=== VK_EXT_non_seamless_cube_map === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNonSeamlessCubeMapExtensionName = VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNonSeamlessCubeMapSpecVersion = VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION; + + //=== VK_ARM_render_pass_striped === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRenderPassStripedExtensionName = VK_ARM_RENDER_PASS_STRIPED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRenderPassStripedSpecVersion = VK_ARM_RENDER_PASS_STRIPED_SPEC_VERSION; + + //=== VK_QCOM_fragment_density_map_offset === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFragmentDensityMapOffsetExtensionName = VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFragmentDensityMapOffsetSpecVersion = VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION; + + //=== VK_NV_copy_memory_indirect === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCopyMemoryIndirectExtensionName = VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCopyMemoryIndirectSpecVersion = VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION; + + //=== VK_NV_memory_decompression === + VULKAN_HPP_CONSTEXPR_INLINE auto NVMemoryDecompressionExtensionName = VK_NV_MEMORY_DECOMPRESSION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVMemoryDecompressionSpecVersion = VK_NV_MEMORY_DECOMPRESSION_SPEC_VERSION; + + //=== VK_NV_device_generated_commands_compute === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsComputeExtensionName = VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsComputeSpecVersion = VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION; + + //=== VK_NV_ray_tracing_linear_swept_spheres === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingLinearSweptSpheresExtensionName = VK_NV_RAY_TRACING_LINEAR_SWEPT_SPHERES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingLinearSweptSpheresSpecVersion = VK_NV_RAY_TRACING_LINEAR_SWEPT_SPHERES_SPEC_VERSION; + + //=== VK_NV_linear_color_attachment === + VULKAN_HPP_CONSTEXPR_INLINE auto NVLinearColorAttachmentExtensionName = VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVLinearColorAttachmentSpecVersion = VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION; + + //=== VK_GOOGLE_surfaceless_query === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLESurfacelessQueryExtensionName = VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLESurfacelessQuerySpecVersion = VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION; + + //=== VK_KHR_shader_maximal_reconvergence === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderMaximalReconvergenceExtensionName = VK_KHR_SHADER_MAXIMAL_RECONVERGENCE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderMaximalReconvergenceSpecVersion = VK_KHR_SHADER_MAXIMAL_RECONVERGENCE_SPEC_VERSION; + + //=== VK_EXT_image_compression_control_swapchain === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlSwapchainExtensionName = VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlSwapchainSpecVersion = VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION; + + //=== VK_QCOM_image_processing === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessingExtensionName = VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessingSpecVersion = VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION; + + //=== VK_EXT_nested_command_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNestedCommandBufferExtensionName = VK_EXT_NESTED_COMMAND_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNestedCommandBufferSpecVersion = VK_EXT_NESTED_COMMAND_BUFFER_SPEC_VERSION; + + //=== VK_EXT_external_memory_acquire_unmodified === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryAcquireUnmodifiedExtensionName = VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryAcquireUnmodifiedSpecVersion = VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION; + + //=== VK_EXT_extended_dynamic_state3 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState3ExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState3SpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION; + + //=== VK_EXT_subpass_merge_feedback === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubpassMergeFeedbackExtensionName = VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubpassMergeFeedbackSpecVersion = VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION; + + //=== VK_LUNARG_direct_driver_loading === + VULKAN_HPP_CONSTEXPR_INLINE auto LUNARGDirectDriverLoadingExtensionName = VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto LUNARGDirectDriverLoadingSpecVersion = VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION; + + //=== VK_EXT_shader_module_identifier === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderModuleIdentifierExtensionName = VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderModuleIdentifierSpecVersion = VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION; + + //=== VK_EXT_rasterization_order_attachment_access === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRasterizationOrderAttachmentAccessExtensionName = VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRasterizationOrderAttachmentAccessSpecVersion = VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION; + + //=== VK_NV_optical_flow === + VULKAN_HPP_CONSTEXPR_INLINE auto NVOpticalFlowExtensionName = VK_NV_OPTICAL_FLOW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVOpticalFlowSpecVersion = VK_NV_OPTICAL_FLOW_SPEC_VERSION; + + //=== VK_EXT_legacy_dithering === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyDitheringExtensionName = VK_EXT_LEGACY_DITHERING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyDitheringSpecVersion = VK_EXT_LEGACY_DITHERING_SPEC_VERSION; + + //=== VK_EXT_pipeline_protected_access === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineProtectedAccessExtensionName = VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineProtectedAccessSpecVersion = VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalFormatResolveExtensionName = VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalFormatResolveSpecVersion = VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_maintenance5 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance5ExtensionName = VK_KHR_MAINTENANCE_5_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance5SpecVersion = VK_KHR_MAINTENANCE_5_SPEC_VERSION; + + //=== VK_AMD_anti_lag === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagExtensionName = VK_AMD_ANTI_LAG_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagSpecVersion = VK_AMD_ANTI_LAG_SPEC_VERSION; + + //=== VK_KHR_ray_tracing_position_fetch === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchExtensionName = VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchSpecVersion = VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION; + + //=== VK_EXT_shader_object === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderObjectExtensionName = VK_EXT_SHADER_OBJECT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderObjectSpecVersion = VK_EXT_SHADER_OBJECT_SPEC_VERSION; + + //=== VK_KHR_pipeline_binary === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineBinaryExtensionName = VK_KHR_PIPELINE_BINARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineBinarySpecVersion = VK_KHR_PIPELINE_BINARY_SPEC_VERSION; + + //=== VK_QCOM_tile_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTilePropertiesExtensionName = VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTilePropertiesSpecVersion = VK_QCOM_TILE_PROPERTIES_SPEC_VERSION; + + //=== VK_SEC_amigo_profiling === + VULKAN_HPP_CONSTEXPR_INLINE auto SECAmigoProfilingExtensionName = VK_SEC_AMIGO_PROFILING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto SECAmigoProfilingSpecVersion = VK_SEC_AMIGO_PROFILING_SPEC_VERSION; + + //=== VK_QCOM_multiview_per_view_viewports === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewViewportsExtensionName = VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewViewportsSpecVersion = VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION; + + //=== VK_NV_ray_tracing_invocation_reorder === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingInvocationReorderExtensionName = VK_NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingInvocationReorderSpecVersion = VK_NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION; + + //=== VK_NV_cooperative_vector === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeVectorExtensionName = VK_NV_COOPERATIVE_VECTOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeVectorSpecVersion = VK_NV_COOPERATIVE_VECTOR_SPEC_VERSION; + + //=== VK_NV_extended_sparse_address_space === + VULKAN_HPP_CONSTEXPR_INLINE auto NVExtendedSparseAddressSpaceExtensionName = VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVExtendedSparseAddressSpaceSpecVersion = VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_SPEC_VERSION; + + //=== VK_EXT_mutable_descriptor_type === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMutableDescriptorTypeExtensionName = VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMutableDescriptorTypeSpecVersion = VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; + + //=== VK_EXT_legacy_vertex_attributes === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyVertexAttributesExtensionName = VK_EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyVertexAttributesSpecVersion = VK_EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION; + + //=== VK_EXT_layer_settings === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLayerSettingsExtensionName = VK_EXT_LAYER_SETTINGS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLayerSettingsSpecVersion = VK_EXT_LAYER_SETTINGS_SPEC_VERSION; + + //=== VK_ARM_shader_core_builtins === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCoreBuiltinsExtensionName = VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCoreBuiltinsSpecVersion = VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION; + + //=== VK_EXT_pipeline_library_group_handles === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineLibraryGroupHandlesExtensionName = VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineLibraryGroupHandlesSpecVersion = VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDynamicRenderingUnusedAttachmentsExtensionName = VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDynamicRenderingUnusedAttachmentsSpecVersion = VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION; + + //=== VK_NV_low_latency2 === + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatency2ExtensionName = VK_NV_LOW_LATENCY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatency2SpecVersion = VK_NV_LOW_LATENCY_2_SPEC_VERSION; + + //=== VK_KHR_cooperative_matrix === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCooperativeMatrixExtensionName = VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCooperativeMatrixSpecVersion = VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION; + + //=== VK_QCOM_multiview_per_view_render_areas === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewRenderAreasExtensionName = VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewRenderAreasSpecVersion = VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION; + + //=== VK_KHR_compute_shader_derivatives === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRComputeShaderDerivativesExtensionName = VK_KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRComputeShaderDerivativesSpecVersion = VK_KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION; + + //=== VK_KHR_video_decode_av1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1ExtensionName = VK_KHR_VIDEO_DECODE_AV1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1SpecVersion = VK_KHR_VIDEO_DECODE_AV1_SPEC_VERSION; + + //=== VK_KHR_video_encode_av1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeAv1ExtensionName = VK_KHR_VIDEO_ENCODE_AV1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeAv1SpecVersion = VK_KHR_VIDEO_ENCODE_AV1_SPEC_VERSION; + + //=== VK_KHR_video_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance1ExtensionName = VK_KHR_VIDEO_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance1SpecVersion = VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_NV_per_stage_descriptor_set === + VULKAN_HPP_CONSTEXPR_INLINE auto NVPerStageDescriptorSetExtensionName = VK_NV_PER_STAGE_DESCRIPTOR_SET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVPerStageDescriptorSetSpecVersion = VK_NV_PER_STAGE_DESCRIPTOR_SET_SPEC_VERSION; + + //=== VK_QCOM_image_processing2 === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessing2ExtensionName = VK_QCOM_IMAGE_PROCESSING_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessing2SpecVersion = VK_QCOM_IMAGE_PROCESSING_2_SPEC_VERSION; + + //=== VK_QCOM_filter_cubic_weights === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicWeightsExtensionName = VK_QCOM_FILTER_CUBIC_WEIGHTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicWeightsSpecVersion = VK_QCOM_FILTER_CUBIC_WEIGHTS_SPEC_VERSION; + + //=== VK_QCOM_ycbcr_degamma === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMYcbcrDegammaExtensionName = VK_QCOM_YCBCR_DEGAMMA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMYcbcrDegammaSpecVersion = VK_QCOM_YCBCR_DEGAMMA_SPEC_VERSION; + + //=== VK_QCOM_filter_cubic_clamp === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicClampExtensionName = VK_QCOM_FILTER_CUBIC_CLAMP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicClampSpecVersion = VK_QCOM_FILTER_CUBIC_CLAMP_SPEC_VERSION; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopDynamicStateExtensionName = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopDynamicStateSpecVersion = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION; + + //=== VK_KHR_vertex_attribute_divisor === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVertexAttributeDivisorExtensionName = VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVertexAttributeDivisorSpecVersion = VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION; + + //=== VK_KHR_load_store_op_none === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRLoadStoreOpNoneExtensionName = VK_KHR_LOAD_STORE_OP_NONE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRLoadStoreOpNoneSpecVersion = VK_KHR_LOAD_STORE_OP_NONE_SPEC_VERSION; + + //=== VK_KHR_shader_float_controls2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControls2ExtensionName = VK_KHR_SHADER_FLOAT_CONTROLS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControls2SpecVersion = VK_KHR_SHADER_FLOAT_CONTROLS_2_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto QNXExternalMemoryScreenBufferExtensionName = VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QNXExternalMemoryScreenBufferSpecVersion = VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + VULKAN_HPP_CONSTEXPR_INLINE auto MSFTLayeredDriverExtensionName = VK_MSFT_LAYERED_DRIVER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto MSFTLayeredDriverSpecVersion = VK_MSFT_LAYERED_DRIVER_SPEC_VERSION; + + //=== VK_KHR_index_type_uint8 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRIndexTypeUint8ExtensionName = VK_KHR_INDEX_TYPE_UINT8_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRIndexTypeUint8SpecVersion = VK_KHR_INDEX_TYPE_UINT8_SPEC_VERSION; + + //=== VK_KHR_line_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRLineRasterizationExtensionName = VK_KHR_LINE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRLineRasterizationSpecVersion = VK_KHR_LINE_RASTERIZATION_SPEC_VERSION; + + //=== VK_KHR_calibrated_timestamps === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCalibratedTimestampsExtensionName = VK_KHR_CALIBRATED_TIMESTAMPS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCalibratedTimestampsSpecVersion = VK_KHR_CALIBRATED_TIMESTAMPS_SPEC_VERSION; + + //=== VK_KHR_shader_expect_assume === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderExpectAssumeExtensionName = VK_KHR_SHADER_EXPECT_ASSUME_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderExpectAssumeSpecVersion = VK_KHR_SHADER_EXPECT_ASSUME_SPEC_VERSION; + + //=== VK_KHR_maintenance6 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance6ExtensionName = VK_KHR_MAINTENANCE_6_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance6SpecVersion = VK_KHR_MAINTENANCE_6_SPEC_VERSION; + + //=== VK_NV_descriptor_pool_overallocation === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationExtensionName = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationSpecVersion = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION; + + //=== VK_QCOM_tile_memory_heap === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTileMemoryHeapExtensionName = VK_QCOM_TILE_MEMORY_HEAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTileMemoryHeapSpecVersion = VK_QCOM_TILE_MEMORY_HEAP_SPEC_VERSION; + + //=== VK_NV_display_stereo === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplayStereoExtensionName = VK_NV_DISPLAY_STEREO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplayStereoSpecVersion = VK_NV_DISPLAY_STEREO_SPEC_VERSION; + + //=== VK_KHR_video_encode_quantization_map === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQuantizationMapExtensionName = VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQuantizationMapSpecVersion = VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_SPEC_VERSION; + + //=== VK_NV_raw_access_chains === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRawAccessChainsExtensionName = VK_NV_RAW_ACCESS_CHAINS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRawAccessChainsSpecVersion = VK_NV_RAW_ACCESS_CHAINS_SPEC_VERSION; + + //=== VK_NV_external_compute_queue === + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalComputeQueueExtensionName = VK_NV_EXTERNAL_COMPUTE_QUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalComputeQueueSpecVersion = VK_NV_EXTERNAL_COMPUTE_QUEUE_SPEC_VERSION; + + //=== VK_KHR_shader_relaxed_extended_instruction === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderRelaxedExtendedInstructionExtensionName = VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderRelaxedExtendedInstructionSpecVersion = VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION; + + //=== VK_NV_command_buffer_inheritance === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCommandBufferInheritanceExtensionName = VK_NV_COMMAND_BUFFER_INHERITANCE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCommandBufferInheritanceSpecVersion = VK_NV_COMMAND_BUFFER_INHERITANCE_SPEC_VERSION; + + //=== VK_KHR_maintenance7 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance7ExtensionName = VK_KHR_MAINTENANCE_7_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance7SpecVersion = VK_KHR_MAINTENANCE_7_SPEC_VERSION; + + //=== VK_NV_shader_atomic_float16_vector === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorExtensionName = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorSpecVersion = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION; + + //=== VK_EXT_shader_replicated_composites === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesExtensionName = VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesSpecVersion = VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION; + + //=== VK_NV_ray_tracing_validation === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationExtensionName = VK_NV_RAY_TRACING_VALIDATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationSpecVersion = VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION; + + //=== VK_NV_cluster_acceleration_structure === + VULKAN_HPP_CONSTEXPR_INLINE auto NVClusterAccelerationStructureExtensionName = VK_NV_CLUSTER_ACCELERATION_STRUCTURE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVClusterAccelerationStructureSpecVersion = VK_NV_CLUSTER_ACCELERATION_STRUCTURE_SPEC_VERSION; + + //=== VK_NV_partitioned_acceleration_structure === + VULKAN_HPP_CONSTEXPR_INLINE auto NVPartitionedAccelerationStructureExtensionName = VK_NV_PARTITIONED_ACCELERATION_STRUCTURE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVPartitionedAccelerationStructureSpecVersion = VK_NV_PARTITIONED_ACCELERATION_STRUCTURE_SPEC_VERSION; + + //=== VK_EXT_device_generated_commands === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceGeneratedCommandsExtensionName = VK_EXT_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceGeneratedCommandsSpecVersion = VK_EXT_DEVICE_GENERATED_COMMANDS_SPEC_VERSION; + + //=== VK_KHR_maintenance8 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance8ExtensionName = VK_KHR_MAINTENANCE_8_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance8SpecVersion = VK_KHR_MAINTENANCE_8_SPEC_VERSION; + + //=== VK_MESA_image_alignment_control === + VULKAN_HPP_CONSTEXPR_INLINE auto MESAImageAlignmentControlExtensionName = VK_MESA_IMAGE_ALIGNMENT_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto MESAImageAlignmentControlSpecVersion = VK_MESA_IMAGE_ALIGNMENT_CONTROL_SPEC_VERSION; + + //=== VK_EXT_depth_clamp_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlExtensionName = VK_EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlSpecVersion = VK_EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION; + + //=== VK_KHR_video_maintenance2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance2ExtensionName = VK_KHR_VIDEO_MAINTENANCE_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance2SpecVersion = VK_KHR_VIDEO_MAINTENANCE_2_SPEC_VERSION; + + //=== VK_HUAWEI_hdr_vivid === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIHdrVividExtensionName = VK_HUAWEI_HDR_VIVID_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIHdrVividSpecVersion = VK_HUAWEI_HDR_VIVID_SPEC_VERSION; + + //=== VK_NV_cooperative_matrix2 === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2ExtensionName = VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2SpecVersion = VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION; + + //=== VK_ARM_pipeline_opacity_micromap === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMPipelineOpacityMicromapExtensionName = VK_ARM_PIPELINE_OPACITY_MICROMAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMPipelineOpacityMicromapSpecVersion = VK_ARM_PIPELINE_OPACITY_MICROMAP_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryMetalExtensionName = VK_EXT_EXTERNAL_MEMORY_METAL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryMetalSpecVersion = VK_EXT_EXTERNAL_MEMORY_METAL_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_depth_clamp_zero_one === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthClampZeroOneExtensionName = VK_KHR_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthClampZeroOneSpecVersion = VK_KHR_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION; + + //=== VK_EXT_vertex_attribute_robustness === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeRobustnessExtensionName = VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeRobustnessSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_present_metering === + VULKAN_HPP_CONSTEXPR_INLINE auto NVPresentMeteringExtensionName = VK_NV_PRESENT_METERING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVPresentMeteringSpecVersion = VK_NV_PRESENT_METERING_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_fragment_density_map_offset === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMapOffsetExtensionName = VK_EXT_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMapOffsetSpecVersion = VK_EXT_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION; + +} // namespace VULKAN_HPP_NAMESPACE + +// clang-format off +#include +#include +#include + +// clang-format on + +namespace VULKAN_HPP_NAMESPACE +{ +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + + //======================= + //=== STRUCTS EXTENDS === + //======================= + + //=== VK_VERSION_1_0 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_3 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_4 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_swapchain === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_display_swapchain === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_debug_report === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_rasterization_order === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_queue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_decode_queue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_dedicated_allocation === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_transform_feedback === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NVX_binary_import === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_encode_h264 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_encode_h265 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_decode_h264 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_texture_gather_bias_lod === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_corner_sampled_image === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_external_memory === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_validation_flags === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_astc_decode_mode === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_conditional_rendering === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_incremental_present === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_clip_space_w_scaling === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_display_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_GOOGLE_display_timing === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NVX_multiview_per_view_attributes === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_viewport_swizzle === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_discard_rectangles === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_conservative_rasterization === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_depth_clip_enable === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_IMG_relaxed_line_rasterization === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_shared_presentable_image === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_performance_query === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_debug_utils === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_shader_bfloat16 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_sample_locations === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_blend_operation_advanced === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_fragment_coverage_to_color === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_acceleration_structure === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_ray_tracing_pipeline === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_ray_query === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_framebuffer_mixed_samples === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_shader_sm_builtins === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_drm_format_modifier === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_validation_cache === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_ray_tracing === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_representative_fragment_test === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_filter_cubic === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_external_memory_host === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_shader_clock === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_pipeline_compiler_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_shader_core_properties === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_decode_h265 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_memory_overallocation_behavior === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_vertex_attribute_divisor === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_mesh_shader === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_shader_image_footprint === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_scissor_exclusive === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_device_diagnostic_checkpoints === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_INTEL_shader_integer_functions2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_INTEL_performance_query === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_pci_bus_info === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_display_native_hdr === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_fragment_density_map === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_fragment_shading_rate === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_shader_core_properties2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_device_coherent_memory === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_image_atomic_int64 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_shader_quad_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_memory_budget === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_memory_priority === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_surface_protected_capabilities === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_dedicated_allocation_image_aliasing === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_buffer_device_address === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_validation_features === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_present_wait === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_cooperative_matrix === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_coverage_reduction_mode === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_fragment_shader_interlock === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_ycbcr_image_arrays === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_provoking_vertex === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_shader_atomic_float === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_extended_dynamic_state === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_pipeline_executable_properties === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_map_memory_placed === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_atomic_float2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_surface_maintenance1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_swapchain_maintenance1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_device_generated_commands === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_inherited_viewport_scissor === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_texel_buffer_alignment === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_render_pass_transform === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_depth_bias_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_device_memory_report === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_robustness2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_custom_border_color === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_pipeline_library === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_present_barrier === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_present_id === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_encode_queue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_device_diagnostics_config === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_low_latency === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_descriptor_buffer === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_graphics_pipeline_library === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_fragment_shader_barycentric === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_fragment_shading_rate_enums === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_ray_tracing_motion_blur === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_mesh_shader === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_ycbcr_2plane_444_formats === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_fragment_density_map2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_rotated_copy_commands === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_workgroup_memory_explicit_layout === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_compression_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_attachment_feedback_loop_layout === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_4444_formats === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_device_fault === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_rgba10x6_formats === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_vertex_input_dynamic_state === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_physical_device_drm === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_device_address_binding_report === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_depth_clip_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_primitive_topology_list_restart === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_present_mode_fifo_latest_ready === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_HUAWEI_invocation_mask === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_external_memory_rdma === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_pipeline_properties === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_frame_boundary === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_multisampled_render_to_single_sampled === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_extended_dynamic_state2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_color_write_enable === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_primitives_generated_query === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_ray_tracing_maintenance1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_view_min_lod === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_multi_draw === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_2d_view_of_3d === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_tile_image === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_opacity_micromap === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_border_color_swizzle === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_pageable_device_local_memory === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_ARM_shader_core_properties === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_ARM_scheduling_controls === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_sliced_view_of_3d === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VALVE_descriptor_set_host_mapping === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_non_seamless_cube_map === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_ARM_render_pass_striped === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_copy_memory_indirect === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_memory_decompression === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_device_generated_commands_compute === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_ray_tracing_linear_swept_spheres === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_linear_color_attachment === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_shader_maximal_reconvergence === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_compression_control_swapchain === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_image_processing === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_nested_command_buffer === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_external_memory_acquire_unmodified === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_extended_dynamic_state3 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_subpass_merge_feedback === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_LUNARG_direct_driver_loading === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_module_identifier === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_rasterization_order_attachment_access === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_optical_flow === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_legacy_dithering === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_AMD_anti_lag === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_ray_tracing_position_fetch === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_object === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_pipeline_binary === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_tile_properties === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_SEC_amigo_profiling === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_multiview_per_view_viewports === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_ray_tracing_invocation_reorder === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_cooperative_vector === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_extended_sparse_address_space === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_mutable_descriptor_type === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_legacy_vertex_attributes === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_layer_settings === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_ARM_shader_core_builtins === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_pipeline_library_group_handles === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_low_latency2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_cooperative_matrix === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_multiview_per_view_render_areas === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_compute_shader_derivatives === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_decode_av1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_encode_av1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_maintenance1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_per_stage_descriptor_set === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_image_processing2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_filter_cubic_weights === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_ycbcr_degamma === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_filter_cubic_clamp === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_descriptor_pool_overallocation === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_tile_memory_heap === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_display_stereo === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_encode_quantization_map === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_raw_access_chains === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_external_compute_queue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_shader_relaxed_extended_instruction === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_command_buffer_inheritance === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_maintenance7 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_shader_atomic_float16_vector === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_replicated_composites === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_ray_tracing_validation === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_cluster_acceleration_structure === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_partitioned_acceleration_structure === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_device_generated_commands === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_maintenance8 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_MESA_image_alignment_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_depth_clamp_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_maintenance2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_HUAWEI_hdr_vivid === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_cooperative_matrix2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_ARM_pipeline_opacity_micromap === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_depth_clamp_zero_one === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_vertex_attribute_robustness === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_present_metering === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_fragment_density_map_offset === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE + + namespace detail + { +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + class DynamicLoader + { + public: +# ifdef VULKAN_HPP_NO_EXCEPTIONS + DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT +# else + DynamicLoader( std::string const & vulkanLibraryName = {} ) +# endif + { + if ( !vulkanLibraryName.empty() ) + { +# if defined( _WIN32 ) + m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); +# elif defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); +# else +# error unsupported platform +# endif + } + else + { +# if defined( _WIN32 ) + m_library = ::LoadLibraryA( "vulkan-1.dll" ); +# elif defined( __APPLE__ ) + m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); + if ( !m_library ) + { + m_library = dlopen( "libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL ); + } + if ( !m_library ) + { + m_library = dlopen( "libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL ); + } + // Add support for using Vulkan and MoltenVK in a Framework. App store rules for iOS + // strictly enforce no .dylib's. If they aren't found it just falls through + if ( !m_library ) + { + m_library = dlopen( "vulkan.framework/vulkan", RTLD_NOW | RTLD_LOCAL ); + } + if ( !m_library ) + { + m_library = dlopen( "MoltenVK.framework/MoltenVK", RTLD_NOW | RTLD_LOCAL ); + } + // modern versions of macOS don't search /usr/local/lib automatically contrary to what man dlopen says + // Vulkan SDK uses this as the system-wide installation location, so we're going to fallback to this if all else fails + if ( !m_library && ( getenv( "DYLD_FALLBACK_LIBRARY_PATH" ) == NULL ) ) + { + m_library = dlopen( "/usr/local/lib/libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); + } +# elif defined( __unix__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); + if ( !m_library ) + { + m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); + } +# else +# error unsupported platform +# endif + } + +# ifndef VULKAN_HPP_NO_EXCEPTIONS + if ( !m_library ) + { + // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. + throw std::runtime_error( "Failed to load vulkan library!" ); + } +# endif + } + + DynamicLoader( DynamicLoader const & ) = delete; + + DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) + { + other.m_library = nullptr; + } + + DynamicLoader & operator=( DynamicLoader const & ) = delete; + + DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_library, other.m_library ); + return *this; + } + + ~DynamicLoader() VULKAN_HPP_NOEXCEPT + { + if ( m_library ) + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + dlclose( m_library ); +# elif defined( _WIN32 ) + ::FreeLibrary( m_library ); +# else +# error unsupported platform +# endif + } + } + + template + T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + return (T)dlsym( m_library, function ); +# elif defined( _WIN32 ) + return ( T )::GetProcAddress( m_library, function ); +# else +# error unsupported platform +# endif + } + + bool success() const VULKAN_HPP_NOEXCEPT + { + return m_library != nullptr; + } + + private: +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + void * m_library; +# elif defined( _WIN32 ) + ::HINSTANCE m_library; +# else +# error unsupported platform +# endif + }; +#endif + + using PFN_dummy = void ( * )(); + + class DispatchLoaderDynamic : public DispatchLoaderBase + { + public: + //=== VK_VERSION_1_0 === + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + + //=== VK_VERSION_1_2 === + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; + + //=== VK_VERSION_1_3 === + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; + + //=== VK_VERSION_1_4 === + PFN_vkCmdSetLineStipple vkCmdSetLineStipple = 0; + PFN_vkMapMemory2 vkMapMemory2 = 0; + PFN_vkUnmapMemory2 vkUnmapMemory2 = 0; + PFN_vkCmdBindIndexBuffer2 vkCmdBindIndexBuffer2 = 0; + PFN_vkGetRenderingAreaGranularity vkGetRenderingAreaGranularity = 0; + PFN_vkGetDeviceImageSubresourceLayout vkGetDeviceImageSubresourceLayout = 0; + PFN_vkGetImageSubresourceLayout2 vkGetImageSubresourceLayout2 = 0; + PFN_vkCmdPushDescriptorSet vkCmdPushDescriptorSet = 0; + PFN_vkCmdPushDescriptorSetWithTemplate vkCmdPushDescriptorSetWithTemplate = 0; + PFN_vkCmdSetRenderingAttachmentLocations vkCmdSetRenderingAttachmentLocations = 0; + PFN_vkCmdSetRenderingInputAttachmentIndices vkCmdSetRenderingInputAttachmentIndices = 0; + PFN_vkCmdBindDescriptorSets2 vkCmdBindDescriptorSets2 = 0; + PFN_vkCmdPushConstants2 vkCmdPushConstants2 = 0; + PFN_vkCmdPushDescriptorSet2 vkCmdPushDescriptorSet2 = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2 vkCmdPushDescriptorSetWithTemplate2 = 0; + PFN_vkCopyMemoryToImage vkCopyMemoryToImage = 0; + PFN_vkCopyImageToMemory vkCopyImageToMemory = 0; + PFN_vkCopyImageToImage vkCopyImageToImage = 0; + PFN_vkTransitionImageLayout vkTransitionImageLayout = 0; + + //=== VK_KHR_surface === + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + + //=== VK_KHR_swapchain === + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + + //=== VK_KHR_display === + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + + //=== VK_KHR_display_swapchain === + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + + //=== VK_EXT_debug_marker === + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; + + //=== VK_KHR_video_queue === + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; + + //=== VK_KHR_video_decode_queue === + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; + + //=== VK_EXT_transform_feedback === + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + + //=== VK_NVX_binary_import === + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; + + //=== VK_NVX_image_view_handle === + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + + //=== VK_AMD_draw_indirect_count === + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + + //=== VK_AMD_shader_info === + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + + //=== VK_KHR_dynamic_rendering === + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +#else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +#else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + + //=== VK_KHR_device_group === + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + + //=== VK_KHR_device_group_creation === + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + + //=== VK_KHR_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + + //=== VK_KHR_external_semaphore_capabilities === + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; + + //=== VK_KHR_push_descriptor === + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; + + //=== VK_EXT_conditional_rendering === + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; + + //=== VK_KHR_descriptor_update_template === + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + + //=== VK_NV_clip_space_w_scaling === + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + + //=== VK_EXT_direct_mode_display === + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + + //=== VK_EXT_display_control === + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + + //=== VK_GOOGLE_display_timing === + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + + //=== VK_EXT_discard_rectangles === + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; + + //=== VK_EXT_hdr_metadata === + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + + //=== VK_KHR_create_renderpass2 === + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + + //=== VK_KHR_shared_presentable_image === + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + + //=== VK_KHR_external_fence_capabilities === + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + + //=== VK_KHR_performance_query === + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; + + //=== VK_KHR_get_surface_capabilities2 === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + + //=== VK_KHR_get_display_properties2 === + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +#else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + + //=== VK_KHR_get_memory_requirements2 === + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + + //=== VK_KHR_acceleration_structure === + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; + + //=== VK_KHR_ray_tracing_pipeline === + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; + + //=== VK_KHR_sampler_ycbcr_conversion === + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + + //=== VK_KHR_bind_memory2 === + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; + + //=== VK_EXT_image_drm_format_modifier === + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; + + //=== VK_EXT_validation_cache === + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; + + //=== VK_NV_shading_rate_image === + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + + //=== VK_NV_ray_tracing === + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; + + //=== VK_KHR_maintenance3 === + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + + //=== VK_KHR_draw_indirect_count === + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + + //=== VK_EXT_external_memory_host === + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; + + //=== VK_AMD_buffer_marker === + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; + + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; + + //=== VK_NV_mesh_shader === + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; + + //=== VK_NV_scissor_exclusive === + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + + //=== VK_NV_device_diagnostic_checkpoints === + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + + //=== VK_KHR_timeline_semaphore === + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + + //=== VK_INTEL_performance_query === + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; + + //=== VK_AMD_display_native_hdr === + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +#else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; + + //=== VK_KHR_dynamic_rendering_local_read === + PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; + + //=== VK_EXT_buffer_device_address === + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; + + //=== VK_EXT_tooling_info === + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + + //=== VK_KHR_present_wait === + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; + + //=== VK_NV_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + + //=== VK_NV_coverage_reduction_mode === + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +#else + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; + + //=== VK_KHR_buffer_device_address === + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; + + //=== VK_EXT_line_rasterization === + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; + + //=== VK_EXT_host_query_reset === + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; + + //=== VK_EXT_extended_dynamic_state === + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; + + //=== VK_KHR_deferred_host_operations === + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; + + //=== VK_KHR_pipeline_executable_properties === + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + + //=== VK_EXT_host_image_copy === + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; + + //=== VK_KHR_map_memory2 === + PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; + + //=== VK_EXT_swapchain_maintenance1 === + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; + + //=== VK_NV_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + + //=== VK_EXT_depth_bias_control === + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; + + //=== VK_EXT_acquire_drm_display === + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; + + //=== VK_EXT_private_data === + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; + + //=== VK_KHR_video_encode_queue === + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; +#else + PFN_dummy vkCreateCudaModuleNV_placeholder = 0; + PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; + PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; + PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; + PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; + PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + PFN_vkCmdDispatchTileQCOM vkCmdDispatchTileQCOM = 0; + PFN_vkCmdBeginPerTileExecutionQCOM vkCmdBeginPerTileExecutionQCOM = 0; + PFN_vkCmdEndPerTileExecutionQCOM vkCmdEndPerTileExecutionQCOM = 0; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; +#else + PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; + + //=== VK_EXT_descriptor_buffer === + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; + + //=== VK_NV_fragment_shading_rate_enums === + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; + + //=== VK_EXT_mesh_shader === + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; + + //=== VK_KHR_copy_commands2 === + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; + + //=== VK_EXT_device_fault === + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +#else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +#else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +#else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +#else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; + + //=== VK_HUAWEI_invocation_mask === + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; + + //=== VK_NV_external_memory_rdma === + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; + + //=== VK_EXT_pipeline_properties === + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; + + //=== VK_EXT_extended_dynamic_state2 === + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +#else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; + + //=== VK_KHR_ray_tracing_maintenance1 === + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; + + //=== VK_EXT_multi_draw === + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; + + //=== VK_EXT_opacity_micromap === + PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; + + //=== VK_HUAWEI_cluster_culling_shader === + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; + + //=== VK_EXT_pageable_device_local_memory === + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; + + //=== VK_KHR_maintenance4 === + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; + + //=== VK_VALVE_descriptor_set_host_mapping === + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; + + //=== VK_NV_copy_memory_indirect === + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; + + //=== VK_NV_memory_decompression === + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; + + //=== VK_NV_device_generated_commands_compute === + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; + + //=== VK_EXT_extended_dynamic_state3 === + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; + + //=== VK_EXT_shader_module_identifier === + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; + + //=== VK_NV_optical_flow === + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; + + //=== VK_KHR_maintenance5 === + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; + + //=== VK_AMD_anti_lag === + PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; + + //=== VK_EXT_shader_object === + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; + + //=== VK_KHR_pipeline_binary === + PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; + PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; + PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; + PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; + PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; + + //=== VK_QCOM_tile_properties === + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; + + //=== VK_NV_cooperative_vector === + PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV vkGetPhysicalDeviceCooperativeVectorPropertiesNV = 0; + PFN_vkConvertCooperativeVectorMatrixNV vkConvertCooperativeVectorMatrixNV = 0; + PFN_vkCmdConvertCooperativeVectorMatrixNV vkCmdConvertCooperativeVectorMatrixNV = 0; + + //=== VK_NV_low_latency2 === + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; + PFN_vkLatencySleepNV vkLatencySleepNV = 0; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; + + //=== VK_KHR_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; +#else + PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; + + //=== VK_KHR_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; + PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; + + //=== VK_KHR_maintenance6 === + PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; + PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; + PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; + PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; + + //=== VK_QCOM_tile_memory_heap === + PFN_vkCmdBindTileMemoryQCOM vkCmdBindTileMemoryQCOM = 0; + + //=== VK_NV_external_compute_queue === + PFN_vkCreateExternalComputeQueueNV vkCreateExternalComputeQueueNV = 0; + PFN_vkDestroyExternalComputeQueueNV vkDestroyExternalComputeQueueNV = 0; + PFN_vkGetExternalComputeQueueDataNV vkGetExternalComputeQueueDataNV = 0; + + //=== VK_NV_cluster_acceleration_structure === + PFN_vkGetClusterAccelerationStructureBuildSizesNV vkGetClusterAccelerationStructureBuildSizesNV = 0; + PFN_vkCmdBuildClusterAccelerationStructureIndirectNV vkCmdBuildClusterAccelerationStructureIndirectNV = 0; + + //=== VK_NV_partitioned_acceleration_structure === + PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV vkGetPartitionedAccelerationStructuresBuildSizesNV = 0; + PFN_vkCmdBuildPartitionedAccelerationStructuresNV vkCmdBuildPartitionedAccelerationStructuresNV = 0; + + //=== VK_EXT_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; + PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; + PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; + PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; + PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; + PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; + PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; + PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; + PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; + + //=== VK_NV_cooperative_matrix2 === + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + PFN_vkGetMemoryMetalHandleEXT vkGetMemoryMetalHandleEXT = 0; + PFN_vkGetMemoryMetalHandlePropertiesEXT vkGetMemoryMetalHandlePropertiesEXT = 0; +#else + PFN_dummy vkGetMemoryMetalHandleEXT_placeholder = 0; + PFN_dummy vkGetMemoryMetalHandlePropertiesEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map_offset === + PFN_vkCmdEndRendering2EXT vkCmdEndRendering2EXT = 0; + + public: + DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; + DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + init( getInstanceProcAddr ); + } + + // This interface does not require a linked vulkan library. + DispatchLoaderDynamic( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT + { + init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); + } + + template + void init() + { + static DynamicLoader dl; + init( dl ); + } + + template + void init( DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); + init( getInstanceProcAddr ); + } + + void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getInstanceProcAddr ); + + vkGetInstanceProcAddr = getInstanceProcAddr; + + //=== VK_VERSION_1_0 === + vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); + vkEnumerateInstanceExtensionProperties = + PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); + vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); + + //=== VK_VERSION_1_1 === + vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); + } + + // This interface does not require a linked vulkan library. + void init( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); + vkGetInstanceProcAddr = getInstanceProcAddr; + init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); + if ( device ) + { + init( VULKAN_HPP_NAMESPACE::Device( device ) ); + } + } + + void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT + { + VkInstance instance = static_cast( instanceCpp ); + + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = + PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = + PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = + PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = + PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); + vkEnumerateDeviceExtensionProperties = + PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = + PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = + PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = + PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = + PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetPhysicalDeviceExternalBufferProperties = + PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = + PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = + PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_VERSION_1_4 === + vkCmdSetLineStipple = PFN_vkCmdSetLineStipple( vkGetInstanceProcAddr( instance, "vkCmdSetLineStipple" ) ); + vkMapMemory2 = PFN_vkMapMemory2( vkGetInstanceProcAddr( instance, "vkMapMemory2" ) ); + vkUnmapMemory2 = PFN_vkUnmapMemory2( vkGetInstanceProcAddr( instance, "vkUnmapMemory2" ) ); + vkCmdBindIndexBuffer2 = PFN_vkCmdBindIndexBuffer2( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2" ) ); + vkGetRenderingAreaGranularity = PFN_vkGetRenderingAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularity" ) ); + vkGetDeviceImageSubresourceLayout = PFN_vkGetDeviceImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayout" ) ); + vkGetImageSubresourceLayout2 = PFN_vkGetImageSubresourceLayout2( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2" ) ); + vkCmdPushDescriptorSet = PFN_vkCmdPushDescriptorSet( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet" ) ); + vkCmdPushDescriptorSetWithTemplate = PFN_vkCmdPushDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate" ) ); + vkCmdSetRenderingAttachmentLocations = + PFN_vkCmdSetRenderingAttachmentLocations( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocations" ) ); + vkCmdSetRenderingInputAttachmentIndices = + PFN_vkCmdSetRenderingInputAttachmentIndices( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndices" ) ); + vkCmdBindDescriptorSets2 = PFN_vkCmdBindDescriptorSets2( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets2" ) ); + vkCmdPushConstants2 = PFN_vkCmdPushConstants2( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2" ) ); + vkCmdPushDescriptorSet2 = PFN_vkCmdPushDescriptorSet2( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2" ) ); + vkCmdPushDescriptorSetWithTemplate2 = + PFN_vkCmdPushDescriptorSetWithTemplate2( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate2" ) ); + vkCopyMemoryToImage = PFN_vkCopyMemoryToImage( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImage" ) ); + vkCopyImageToMemory = PFN_vkCopyImageToMemory( vkGetInstanceProcAddr( instance, "vkCopyImageToMemory" ) ); + vkCopyImageToImage = PFN_vkCopyImageToImage( vkGetInstanceProcAddr( instance, "vkCopyImageToImage" ) ); + vkTransitionImageLayout = PFN_vkTransitionImageLayout( vkGetInstanceProcAddr( instance, "vkTransitionImageLayout" ) ); + + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = + PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetPhysicalDevicePresentRectanglesKHR = + PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = + PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = + PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = + PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = + PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + if ( !vkCmdPushDescriptorSet ) + vkCmdPushDescriptorSet = vkCmdPushDescriptorSetKHR; + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate ) + vkCmdPushDescriptorSetWithTemplate = vkCmdPushDescriptorSetWithTemplateKHR; + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = + PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = + PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = + PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkGetPhysicalDeviceMultisamplePropertiesEXT = + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = + PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = + PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = + PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = + PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + if ( !vkCmdSetRenderingAttachmentLocations ) + vkCmdSetRenderingAttachmentLocations = vkCmdSetRenderingAttachmentLocationsKHR; + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + if ( !vkCmdSetRenderingInputAttachmentIndices ) + vkCmdSetRenderingInputAttachmentIndices = vkCmdSetRenderingInputAttachmentIndicesKHR; + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = + PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); + vkAcquireFullScreenExclusiveModeEXT = + PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = + PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleEXT; + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); + if ( !vkCopyMemoryToImage ) + vkCopyMemoryToImage = vkCopyMemoryToImageEXT; + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); + if ( !vkCopyImageToMemory ) + vkCopyImageToMemory = vkCopyImageToMemoryEXT; + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); + if ( !vkCopyImageToImage ) + vkCopyImageToImage = vkCopyImageToImageEXT; + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); + if ( !vkTransitionImageLayout ) + vkTransitionImageLayout = vkTransitionImageLayoutEXT; + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); + if ( !vkMapMemory2 ) + vkMapMemory2 = vkMapMemory2KHR; + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) ); + if ( !vkUnmapMemory2 ) + vkUnmapMemory2 = vkUnmapMemory2KHR; + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetInstanceProcAddr( instance, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetInstanceProcAddr( instance, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + vkCmdDispatchTileQCOM = PFN_vkCmdDispatchTileQCOM( vkGetInstanceProcAddr( instance, "vkCmdDispatchTileQCOM" ) ); + vkCmdBeginPerTileExecutionQCOM = PFN_vkCmdBeginPerTileExecutionQCOM( vkGetInstanceProcAddr( instance, "vkCmdBeginPerTileExecutionQCOM" ) ); + vkCmdEndPerTileExecutionQCOM = PFN_vkCmdEndPerTileExecutionQCOM( vkGetInstanceProcAddr( instance, "vkCmdEndPerTileExecutionQCOM" ) ); + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = + PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = + PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetInstanceProcAddr( instance, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetTessellationDomainOriginEXT = + PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkGetPhysicalDeviceOpticalFlowImageFormatsNV = + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) ); + if ( !vkCmdBindIndexBuffer2 ) + vkCmdBindIndexBuffer2 = vkCmdBindIndexBuffer2KHR; + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularityKHR" ) ); + if ( !vkGetRenderingAreaGranularity ) + vkGetRenderingAreaGranularity = vkGetRenderingAreaGranularityKHR; + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + if ( !vkGetDeviceImageSubresourceLayout ) + vkGetDeviceImageSubresourceLayout = vkGetDeviceImageSubresourceLayoutKHR; + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2KHR; + + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetInstanceProcAddr( instance, "vkAntiLagUpdateAMD" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetInstanceProcAddr( instance, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) ); + vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampRangeEXT" ) ); + + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetInstanceProcAddr( instance, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetInstanceProcAddr( instance, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetInstanceProcAddr( instance, "vkReleaseCapturedPipelineDataKHR" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_cooperative_vector === + vkGetPhysicalDeviceCooperativeVectorPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeVectorPropertiesNV" ) ); + vkConvertCooperativeVectorMatrixNV = PFN_vkConvertCooperativeVectorMatrixNV( vkGetInstanceProcAddr( instance, "vkConvertCooperativeVectorMatrixNV" ) ); + vkCmdConvertCooperativeVectorMatrixNV = + PFN_vkCmdConvertCooperativeVectorMatrixNV( vkGetInstanceProcAddr( instance, "vkCmdConvertCooperativeVectorMatrixNV" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetInstanceProcAddr( instance, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetInstanceProcAddr( instance, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetInstanceProcAddr( instance, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetInstanceProcAddr( instance, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetInstanceProcAddr( instance, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_KHR_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetInstanceProcAddr( instance, "vkGetScreenBufferPropertiesQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleKHR" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleKHR; + + //=== VK_KHR_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets2KHR" ) ); + if ( !vkCmdBindDescriptorSets2 ) + vkCmdBindDescriptorSets2 = vkCmdBindDescriptorSets2KHR; + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2KHR" ) ); + if ( !vkCmdPushConstants2 ) + vkCmdPushConstants2 = vkCmdPushConstants2KHR; + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdPushDescriptorSet2 ) + vkCmdPushDescriptorSet2 = vkCmdPushDescriptorSet2KHR; + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate2 ) + vkCmdPushDescriptorSetWithTemplate2 = vkCmdPushDescriptorSetWithTemplate2KHR; + vkCmdSetDescriptorBufferOffsets2EXT = + PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + + //=== VK_QCOM_tile_memory_heap === + vkCmdBindTileMemoryQCOM = PFN_vkCmdBindTileMemoryQCOM( vkGetInstanceProcAddr( instance, "vkCmdBindTileMemoryQCOM" ) ); + + //=== VK_NV_external_compute_queue === + vkCreateExternalComputeQueueNV = PFN_vkCreateExternalComputeQueueNV( vkGetInstanceProcAddr( instance, "vkCreateExternalComputeQueueNV" ) ); + vkDestroyExternalComputeQueueNV = PFN_vkDestroyExternalComputeQueueNV( vkGetInstanceProcAddr( instance, "vkDestroyExternalComputeQueueNV" ) ); + vkGetExternalComputeQueueDataNV = PFN_vkGetExternalComputeQueueDataNV( vkGetInstanceProcAddr( instance, "vkGetExternalComputeQueueDataNV" ) ); + + //=== VK_NV_cluster_acceleration_structure === + vkGetClusterAccelerationStructureBuildSizesNV = + PFN_vkGetClusterAccelerationStructureBuildSizesNV( vkGetInstanceProcAddr( instance, "vkGetClusterAccelerationStructureBuildSizesNV" ) ); + vkCmdBuildClusterAccelerationStructureIndirectNV = + PFN_vkCmdBuildClusterAccelerationStructureIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdBuildClusterAccelerationStructureIndirectNV" ) ); + + //=== VK_NV_partitioned_acceleration_structure === + vkGetPartitionedAccelerationStructuresBuildSizesNV = + PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV( vkGetInstanceProcAddr( instance, "vkGetPartitionedAccelerationStructuresBuildSizesNV" ) ); + vkCmdBuildPartitionedAccelerationStructuresNV = + PFN_vkCmdBuildPartitionedAccelerationStructuresNV( vkGetInstanceProcAddr( instance, "vkCmdBuildPartitionedAccelerationStructuresNV" ) ); + + //=== VK_EXT_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsEXT = + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); + vkCmdPreprocessGeneratedCommandsEXT = + PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsEXT" ) ); + vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsEXT" ) ); + vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutEXT" ) ); + vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutEXT" ) ); + vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetInstanceProcAddr( instance, "vkCreateIndirectExecutionSetEXT" ) ); + vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetInstanceProcAddr( instance, "vkDestroyIndirectExecutionSetEXT" ) ); + vkUpdateIndirectExecutionSetPipelineEXT = + PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetInstanceProcAddr( instance, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); + vkUpdateIndirectExecutionSetShaderEXT = + PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetInstanceProcAddr( instance, "vkUpdateIndirectExecutionSetShaderEXT" ) ); + + //=== VK_NV_cooperative_matrix2 === + vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + vkGetMemoryMetalHandleEXT = PFN_vkGetMemoryMetalHandleEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryMetalHandleEXT" ) ); + vkGetMemoryMetalHandlePropertiesEXT = + PFN_vkGetMemoryMetalHandlePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryMetalHandlePropertiesEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map_offset === + vkCmdEndRendering2EXT = PFN_vkCmdEndRendering2EXT( vkGetInstanceProcAddr( instance, "vkCmdEndRendering2EXT" ) ); + } + + void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT + { + VkDevice device = static_cast( deviceCpp ); + + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_VERSION_1_4 === + vkCmdSetLineStipple = PFN_vkCmdSetLineStipple( vkGetDeviceProcAddr( device, "vkCmdSetLineStipple" ) ); + vkMapMemory2 = PFN_vkMapMemory2( vkGetDeviceProcAddr( device, "vkMapMemory2" ) ); + vkUnmapMemory2 = PFN_vkUnmapMemory2( vkGetDeviceProcAddr( device, "vkUnmapMemory2" ) ); + vkCmdBindIndexBuffer2 = PFN_vkCmdBindIndexBuffer2( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2" ) ); + vkGetRenderingAreaGranularity = PFN_vkGetRenderingAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularity" ) ); + vkGetDeviceImageSubresourceLayout = PFN_vkGetDeviceImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayout" ) ); + vkGetImageSubresourceLayout2 = PFN_vkGetImageSubresourceLayout2( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2" ) ); + vkCmdPushDescriptorSet = PFN_vkCmdPushDescriptorSet( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet" ) ); + vkCmdPushDescriptorSetWithTemplate = PFN_vkCmdPushDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate" ) ); + vkCmdSetRenderingAttachmentLocations = + PFN_vkCmdSetRenderingAttachmentLocations( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocations" ) ); + vkCmdSetRenderingInputAttachmentIndices = + PFN_vkCmdSetRenderingInputAttachmentIndices( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndices" ) ); + vkCmdBindDescriptorSets2 = PFN_vkCmdBindDescriptorSets2( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2" ) ); + vkCmdPushConstants2 = PFN_vkCmdPushConstants2( vkGetDeviceProcAddr( device, "vkCmdPushConstants2" ) ); + vkCmdPushDescriptorSet2 = PFN_vkCmdPushDescriptorSet2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2" ) ); + vkCmdPushDescriptorSetWithTemplate2 = PFN_vkCmdPushDescriptorSetWithTemplate2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2" ) ); + vkCopyMemoryToImage = PFN_vkCopyMemoryToImage( vkGetDeviceProcAddr( device, "vkCopyMemoryToImage" ) ); + vkCopyImageToMemory = PFN_vkCopyImageToMemory( vkGetDeviceProcAddr( device, "vkCopyImageToMemory" ) ); + vkCopyImageToImage = PFN_vkCopyImageToImage( vkGetDeviceProcAddr( device, "vkCopyImageToImage" ) ); + vkTransitionImageLayout = PFN_vkTransitionImageLayout( vkGetDeviceProcAddr( device, "vkTransitionImageLayout" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + if ( !vkCmdPushDescriptorSet ) + vkCmdPushDescriptorSet = vkCmdPushDescriptorSetKHR; + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate ) + vkCmdPushDescriptorSetWithTemplate = vkCmdPushDescriptorSetWithTemplateKHR; + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + if ( !vkCmdSetRenderingAttachmentLocations ) + vkCmdSetRenderingAttachmentLocations = vkCmdSetRenderingAttachmentLocationsKHR; + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + if ( !vkCmdSetRenderingInputAttachmentIndices ) + vkCmdSetRenderingInputAttachmentIndices = vkCmdSetRenderingInputAttachmentIndicesKHR; + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleEXT; + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + if ( !vkCopyMemoryToImage ) + vkCopyMemoryToImage = vkCopyMemoryToImageEXT; + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + if ( !vkCopyImageToMemory ) + vkCopyImageToMemory = vkCopyImageToMemoryEXT; + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + if ( !vkCopyImageToImage ) + vkCopyImageToImage = vkCopyImageToImageEXT; + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + if ( !vkTransitionImageLayout ) + vkTransitionImageLayout = vkTransitionImageLayoutEXT; + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + if ( !vkMapMemory2 ) + vkMapMemory2 = vkMapMemory2KHR; + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + if ( !vkUnmapMemory2 ) + vkUnmapMemory2 = vkUnmapMemory2KHR; + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + vkCmdDispatchTileQCOM = PFN_vkCmdDispatchTileQCOM( vkGetDeviceProcAddr( device, "vkCmdDispatchTileQCOM" ) ); + vkCmdBeginPerTileExecutionQCOM = PFN_vkCmdBeginPerTileExecutionQCOM( vkGetDeviceProcAddr( device, "vkCmdBeginPerTileExecutionQCOM" ) ); + vkCmdEndPerTileExecutionQCOM = PFN_vkCmdEndPerTileExecutionQCOM( vkGetDeviceProcAddr( device, "vkCmdEndPerTileExecutionQCOM" ) ); + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + if ( !vkCmdBindIndexBuffer2 ) + vkCmdBindIndexBuffer2 = vkCmdBindIndexBuffer2KHR; + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + if ( !vkGetRenderingAreaGranularity ) + vkGetRenderingAreaGranularity = vkGetRenderingAreaGranularityKHR; + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + if ( !vkGetDeviceImageSubresourceLayout ) + vkGetDeviceImageSubresourceLayout = vkGetDeviceImageSubresourceLayoutKHR; + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2KHR; + + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); + + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_cooperative_vector === + vkConvertCooperativeVectorMatrixNV = PFN_vkConvertCooperativeVectorMatrixNV( vkGetDeviceProcAddr( device, "vkConvertCooperativeVectorMatrixNV" ) ); + vkCmdConvertCooperativeVectorMatrixNV = + PFN_vkCmdConvertCooperativeVectorMatrixNV( vkGetDeviceProcAddr( device, "vkCmdConvertCooperativeVectorMatrixNV" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleKHR; + + //=== VK_KHR_calibrated_timestamps === + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); + if ( !vkCmdBindDescriptorSets2 ) + vkCmdBindDescriptorSets2 = vkCmdBindDescriptorSets2KHR; + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + if ( !vkCmdPushConstants2 ) + vkCmdPushConstants2 = vkCmdPushConstants2KHR; + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdPushDescriptorSet2 ) + vkCmdPushDescriptorSet2 = vkCmdPushDescriptorSet2KHR; + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate2 ) + vkCmdPushDescriptorSetWithTemplate2 = vkCmdPushDescriptorSetWithTemplate2KHR; + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + + //=== VK_QCOM_tile_memory_heap === + vkCmdBindTileMemoryQCOM = PFN_vkCmdBindTileMemoryQCOM( vkGetDeviceProcAddr( device, "vkCmdBindTileMemoryQCOM" ) ); + + //=== VK_NV_external_compute_queue === + vkCreateExternalComputeQueueNV = PFN_vkCreateExternalComputeQueueNV( vkGetDeviceProcAddr( device, "vkCreateExternalComputeQueueNV" ) ); + vkDestroyExternalComputeQueueNV = PFN_vkDestroyExternalComputeQueueNV( vkGetDeviceProcAddr( device, "vkDestroyExternalComputeQueueNV" ) ); + vkGetExternalComputeQueueDataNV = PFN_vkGetExternalComputeQueueDataNV( vkGetDeviceProcAddr( device, "vkGetExternalComputeQueueDataNV" ) ); + + //=== VK_NV_cluster_acceleration_structure === + vkGetClusterAccelerationStructureBuildSizesNV = + PFN_vkGetClusterAccelerationStructureBuildSizesNV( vkGetDeviceProcAddr( device, "vkGetClusterAccelerationStructureBuildSizesNV" ) ); + vkCmdBuildClusterAccelerationStructureIndirectNV = + PFN_vkCmdBuildClusterAccelerationStructureIndirectNV( vkGetDeviceProcAddr( device, "vkCmdBuildClusterAccelerationStructureIndirectNV" ) ); + + //=== VK_NV_partitioned_acceleration_structure === + vkGetPartitionedAccelerationStructuresBuildSizesNV = + PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV( vkGetDeviceProcAddr( device, "vkGetPartitionedAccelerationStructuresBuildSizesNV" ) ); + vkCmdBuildPartitionedAccelerationStructuresNV = + PFN_vkCmdBuildPartitionedAccelerationStructuresNV( vkGetDeviceProcAddr( device, "vkCmdBuildPartitionedAccelerationStructuresNV" ) ); + + //=== VK_EXT_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsEXT = + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); + vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); + vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); + vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); + vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); + vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); + vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); + vkUpdateIndirectExecutionSetPipelineEXT = + PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); + vkUpdateIndirectExecutionSetShaderEXT = + PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + vkGetMemoryMetalHandleEXT = PFN_vkGetMemoryMetalHandleEXT( vkGetDeviceProcAddr( device, "vkGetMemoryMetalHandleEXT" ) ); + vkGetMemoryMetalHandlePropertiesEXT = PFN_vkGetMemoryMetalHandlePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryMetalHandlePropertiesEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map_offset === + vkCmdEndRendering2EXT = PFN_vkCmdEndRendering2EXT( vkGetDeviceProcAddr( device, "vkCmdEndRendering2EXT" ) ); + } + + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress( "vkGetDeviceProcAddr" ); + init( static_cast( instance ), getInstanceProcAddr, static_cast( device ), device ? getDeviceProcAddr : nullptr ); + } + + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT + { + static DynamicLoader dl; + init( instance, device, dl ); + } + }; + } // namespace detail +} // namespace VULKAN_HPP_NAMESPACE +#endif \ No newline at end of file diff --git a/include/vulkan/vulkan_android.h b/include/vulkan/vulkan_android.h new file mode 100644 index 00000000..a2ba3add --- /dev/null +++ b/include/vulkan/vulkan_android.h @@ -0,0 +1,153 @@ +#ifndef VULKAN_ANDROID_H_ +#define VULKAN_ANDROID_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_android_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_android_surface 1 +struct ANativeWindow; +#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 +#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface" +typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; +typedef struct VkAndroidSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAndroidSurfaceCreateFlagsKHR flags; + struct ANativeWindow* window; +} VkAndroidSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( + VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_ANDROID_external_memory_android_hardware_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_ANDROID_external_memory_android_hardware_buffer 1 +struct AHardwareBuffer; +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 5 +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer" +typedef struct VkAndroidHardwareBufferUsageANDROID { + VkStructureType sType; + void* pNext; + uint64_t androidHardwareBufferUsage; +} VkAndroidHardwareBufferUsageANDROID; + +typedef struct VkAndroidHardwareBufferPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkAndroidHardwareBufferPropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatPropertiesANDROID; + +typedef struct VkImportAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + struct AHardwareBuffer* buffer; +} VkImportAndroidHardwareBufferInfoANDROID; + +typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkMemoryGetAndroidHardwareBufferInfoANDROID; + +typedef struct VkExternalFormatANDROID { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatANDROID; + +typedef struct VkAndroidHardwareBufferFormatProperties2ANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags2 formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatProperties2ANDROID; + +typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID( + VkDevice device, + const struct AHardwareBuffer* buffer, + VkAndroidHardwareBufferPropertiesANDROID* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID( + VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, + struct AHardwareBuffer** pBuffer); +#endif + + +// VK_ANDROID_external_format_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_ANDROID_external_format_resolve 1 +#define VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION 1 +#define VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME "VK_ANDROID_external_format_resolve" +typedef struct VkPhysicalDeviceExternalFormatResolveFeaturesANDROID { + VkStructureType sType; + void* pNext; + VkBool32 externalFormatResolve; +} VkPhysicalDeviceExternalFormatResolveFeaturesANDROID; + +typedef struct VkPhysicalDeviceExternalFormatResolvePropertiesANDROID { + VkStructureType sType; + void* pNext; + VkBool32 nullColorAttachmentWithExternalFormatResolve; + VkChromaLocation externalFormatResolveChromaOffsetX; + VkChromaLocation externalFormatResolveChromaOffsetY; +} VkPhysicalDeviceExternalFormatResolvePropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatResolvePropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat colorAttachmentFormat; +} VkAndroidHardwareBufferFormatResolvePropertiesANDROID; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_beta.h b/include/vulkan/vulkan_beta.h new file mode 100644 index 00000000..867483d0 --- /dev/null +++ b/include/vulkan/vulkan_beta.h @@ -0,0 +1,226 @@ +#ifndef VULKAN_BETA_H_ +#define VULKAN_BETA_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_portability_subset is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_portability_subset 1 +#define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset" +typedef struct VkPhysicalDevicePortabilitySubsetFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 constantAlphaColorBlendFactors; + VkBool32 events; + VkBool32 imageViewFormatReinterpretation; + VkBool32 imageViewFormatSwizzle; + VkBool32 imageView2DOn3DImage; + VkBool32 multisampleArrayImage; + VkBool32 mutableComparisonSamplers; + VkBool32 pointPolygons; + VkBool32 samplerMipLodBias; + VkBool32 separateStencilMaskRef; + VkBool32 shaderSampleRateInterpolationFunctions; + VkBool32 tessellationIsolines; + VkBool32 tessellationPointMode; + VkBool32 triangleFans; + VkBool32 vertexAttributeAccessBeyondStride; +} VkPhysicalDevicePortabilitySubsetFeaturesKHR; + +typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t minVertexInputBindingStrideAlignment; +} VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + + +// VK_AMDX_shader_enqueue is a preprocessor guard. Do not pass it to API calls. +#define VK_AMDX_shader_enqueue 1 +#define VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION 2 +#define VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME "VK_AMDX_shader_enqueue" +#define VK_SHADER_INDEX_UNUSED_AMDX (~0U) +typedef struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX { + VkStructureType sType; + void* pNext; + VkBool32 shaderEnqueue; + VkBool32 shaderMeshEnqueue; +} VkPhysicalDeviceShaderEnqueueFeaturesAMDX; + +typedef struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX { + VkStructureType sType; + void* pNext; + uint32_t maxExecutionGraphDepth; + uint32_t maxExecutionGraphShaderOutputNodes; + uint32_t maxExecutionGraphShaderPayloadSize; + uint32_t maxExecutionGraphShaderPayloadCount; + uint32_t executionGraphDispatchAddressAlignment; + uint32_t maxExecutionGraphWorkgroupCount[3]; + uint32_t maxExecutionGraphWorkgroups; +} VkPhysicalDeviceShaderEnqueuePropertiesAMDX; + +typedef struct VkExecutionGraphPipelineScratchSizeAMDX { + VkStructureType sType; + void* pNext; + VkDeviceSize minSize; + VkDeviceSize maxSize; + VkDeviceSize sizeGranularity; +} VkExecutionGraphPipelineScratchSizeAMDX; + +typedef struct VkExecutionGraphPipelineCreateInfoAMDX { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkExecutionGraphPipelineCreateInfoAMDX; + +typedef union VkDeviceOrHostAddressConstAMDX { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstAMDX; + +typedef struct VkDispatchGraphInfoAMDX { + uint32_t nodeIndex; + uint32_t payloadCount; + VkDeviceOrHostAddressConstAMDX payloads; + uint64_t payloadStride; +} VkDispatchGraphInfoAMDX; + +typedef struct VkDispatchGraphCountInfoAMDX { + uint32_t count; + VkDeviceOrHostAddressConstAMDX infos; + uint64_t stride; +} VkDispatchGraphCountInfoAMDX; + +typedef struct VkPipelineShaderStageNodeCreateInfoAMDX { + VkStructureType sType; + const void* pNext; + const char* pName; + uint32_t index; +} VkPipelineShaderStageNodeCreateInfoAMDX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateExecutionGraphPipelinesAMDX)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)(VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)(VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex); +typedef void (VKAPI_PTR *PFN_vkCmdInitializeGraphScratchMemoryAMDX)(VkCommandBuffer commandBuffer, VkPipeline executionGraph, VkDeviceAddress scratch, VkDeviceSize scratchSize); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectCountAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, VkDeviceAddress countInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateExecutionGraphPipelinesAMDX( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineScratchSizeAMDX( + VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineNodeIndexAMDX( + VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, + uint32_t* pNodeIndex); + +VKAPI_ATTR void VKAPI_CALL vkCmdInitializeGraphScratchMemoryAMDX( + VkCommandBuffer commandBuffer, + VkPipeline executionGraph, + VkDeviceAddress scratch, + VkDeviceSize scratchSize); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX* pCountInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX* pCountInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectCountAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + VkDeviceAddress countInfo); +#endif + + +// VK_NV_displacement_micromap is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_displacement_micromap 1 +#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 2 +#define VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME "VK_NV_displacement_micromap" + +typedef enum VkDisplacementMicromapFormatNV { + VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV = 1, + VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV = 2, + VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV = 3, + VK_DISPLACEMENT_MICROMAP_FORMAT_MAX_ENUM_NV = 0x7FFFFFFF +} VkDisplacementMicromapFormatNV; +typedef struct VkPhysicalDeviceDisplacementMicromapFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 displacementMicromap; +} VkPhysicalDeviceDisplacementMicromapFeaturesNV; + +typedef struct VkPhysicalDeviceDisplacementMicromapPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxDisplacementMicromapSubdivisionLevel; +} VkPhysicalDeviceDisplacementMicromapPropertiesNV; + +typedef struct VkAccelerationStructureTrianglesDisplacementMicromapNV { + VkStructureType sType; + void* pNext; + VkFormat displacementBiasAndScaleFormat; + VkFormat displacementVectorFormat; + VkDeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer; + VkDeviceSize displacementBiasAndScaleStride; + VkDeviceOrHostAddressConstKHR displacementVectorBuffer; + VkDeviceSize displacementVectorStride; + VkDeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags; + VkDeviceSize displacedMicromapPrimitiveFlagsStride; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexBuffer; + VkDeviceSize indexStride; + uint32_t baseTriangle; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkMicromapEXT micromap; +} VkAccelerationStructureTrianglesDisplacementMicromapNV; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h new file mode 100644 index 00000000..b57b3682 --- /dev/null +++ b/include/vulkan/vulkan_core.h @@ -0,0 +1,22130 @@ +#ifndef VULKAN_CORE_H_ +#define VULKAN_CORE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_VERSION_1_0 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_0 1 +#include "vk_platform.h" + +#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; + + +#ifndef VK_USE_64_BIT_PTR_DEFINES + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) || (defined(__riscv) && __riscv_xlen == 64) + #define VK_USE_64_BIT_PTR_DEFINES 1 + #else + #define VK_USE_64_BIT_PTR_DEFINES 0 + #endif +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #if (defined(__cplusplus) && (__cplusplus >= 201103L)) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201103L)) + #define VK_NULL_HANDLE nullptr + #else + #define VK_NULL_HANDLE ((void*)0) + #endif + #else + #define VK_NULL_HANDLE 0ULL + #endif +#endif +#ifndef VK_NULL_HANDLE + #define VK_NULL_HANDLE 0 +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; + #else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; + #endif +#endif + +#define VK_MAKE_API_VERSION(variant, major, minor, patch) \ + ((((uint32_t)(variant)) << 29U) | (((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) + +// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. +//#define VK_API_VERSION VK_MAKE_API_VERSION(0, 1, 0, 0) // Patch version should always be set to 0 + +// Vulkan 1.0 version number +#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 + +// Version of this file +#define VK_HEADER_VERSION 313 + +// Complete version of this file +#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 4, VK_HEADER_VERSION) + +// VK_MAKE_VERSION is deprecated, but no reason was given in the API XML +// DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. +#define VK_MAKE_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) + +// VK_VERSION_MAJOR is deprecated, but no reason was given in the API XML +// DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead. +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22U) + +// VK_VERSION_MINOR is deprecated, but no reason was given in the API XML +// DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead. +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) + +// VK_VERSION_PATCH is deprecated, but no reason was given in the API XML +// DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead. +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) + +#define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29U) +#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22U) & 0x7FU) +#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) +#define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) +typedef uint32_t VkBool32; +typedef uint64_t VkDeviceAddress; +typedef uint64_t VkDeviceSize; +typedef uint32_t VkFlags; +typedef uint32_t VkSampleMask; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) +VK_DEFINE_HANDLE(VkInstance) +VK_DEFINE_HANDLE(VkPhysicalDevice) +VK_DEFINE_HANDLE(VkDevice) +VK_DEFINE_HANDLE(VkQueue) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) +VK_DEFINE_HANDLE(VkCommandBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) +#define VK_ATTACHMENT_UNUSED (~0U) +#define VK_FALSE 0U +#define VK_LOD_CLAMP_NONE 1000.0F +#define VK_QUEUE_FAMILY_IGNORED (~0U) +#define VK_REMAINING_ARRAY_LAYERS (~0U) +#define VK_REMAINING_MIP_LEVELS (~0U) +#define VK_SUBPASS_EXTERNAL (~0U) +#define VK_TRUE 1U +#define VK_WHOLE_SIZE (~0ULL) +#define VK_MAX_MEMORY_TYPES 32U +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256U +#define VK_UUID_SIZE 16U +#define VK_MAX_EXTENSION_NAME_SIZE 256U +#define VK_MAX_DESCRIPTION_SIZE 256U +#define VK_MAX_MEMORY_HEAPS 16U + +typedef enum VkResult { + VK_SUCCESS = 0, + VK_NOT_READY = 1, + VK_TIMEOUT = 2, + VK_EVENT_SET = 3, + VK_EVENT_RESET = 4, + VK_INCOMPLETE = 5, + VK_ERROR_OUT_OF_HOST_MEMORY = -1, + VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, + VK_ERROR_INITIALIZATION_FAILED = -3, + VK_ERROR_DEVICE_LOST = -4, + VK_ERROR_MEMORY_MAP_FAILED = -5, + VK_ERROR_LAYER_NOT_PRESENT = -6, + VK_ERROR_EXTENSION_NOT_PRESENT = -7, + VK_ERROR_FEATURE_NOT_PRESENT = -8, + VK_ERROR_INCOMPATIBLE_DRIVER = -9, + VK_ERROR_TOO_MANY_OBJECTS = -10, + VK_ERROR_FORMAT_NOT_SUPPORTED = -11, + VK_ERROR_FRAGMENTED_POOL = -12, + VK_ERROR_UNKNOWN = -13, + VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000, + VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003, + VK_ERROR_FRAGMENTATION = -1000161000, + VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = -1000257000, + VK_PIPELINE_COMPILE_REQUIRED = 1000297000, + VK_ERROR_NOT_PERMITTED = -1000174001, + VK_ERROR_SURFACE_LOST_KHR = -1000000000, + VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, + VK_SUBOPTIMAL_KHR = 1000001003, + VK_ERROR_OUT_OF_DATE_KHR = -1000001004, + VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, + VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, + VK_ERROR_INVALID_SHADER_NV = -1000012000, + VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR = -1000023000, + VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR = -1000023001, + VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR = -1000023002, + VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR = -1000023003, + VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR = -1000023004, + VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR = -1000023005, + VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, + VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000, + VK_THREAD_IDLE_KHR = 1000268000, + VK_THREAD_DONE_KHR = 1000268001, + VK_OPERATION_DEFERRED_KHR = 1000268002, + VK_OPERATION_NOT_DEFERRED_KHR = 1000268003, + VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR = -1000299000, + VK_ERROR_COMPRESSION_EXHAUSTED_EXT = -1000338000, + VK_INCOMPATIBLE_SHADER_BINARY_EXT = 1000482000, + VK_PIPELINE_BINARY_MISSING_KHR = 1000483000, + VK_ERROR_NOT_ENOUGH_SPACE_KHR = -1000483000, + VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, + VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, + VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION, + VK_ERROR_NOT_PERMITTED_EXT = VK_ERROR_NOT_PERMITTED, + VK_ERROR_NOT_PERMITTED_KHR = VK_ERROR_NOT_PERMITTED, + VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + VK_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, + VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, + // VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT is a deprecated alias + VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT = VK_INCOMPATIBLE_SHADER_BINARY_EXT, + VK_RESULT_MAX_ENUM = 0x7FFFFFFF +} VkResult; + +typedef enum VkStructureType { + VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, + VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, + VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, + VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, + VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, + VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, + VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, + VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, + VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, + VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, + VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, + VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, + VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, + VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, + VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, + VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, + VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, + VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, + VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, + VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, + VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, + VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, + VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, + VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, + VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, + VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, + VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES = 1000094000, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO = 1000157000, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO = 1000157001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES = 1000083000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS = 1000127000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO = 1000127001, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO = 1000060000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO = 1000060003, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO = 1000060004, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO = 1000060005, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO = 1000060006, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO = 1000060013, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO = 1000060014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES = 1000070000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO = 1000070001, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 = 1000146000, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 = 1000146001, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 = 1000146002, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 = 1000146003, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 = 1000146004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 = 1000059000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 1000059001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 = 1000059002, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 = 1000059003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 = 1000059004, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 = 1000059005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 = 1000059006, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 = 1000059007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 = 1000059008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES = 1000117000, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO = 1000117001, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO = 1000117002, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO = 1000117003, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO = 1000053000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES = 1000053001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES = 1000053002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES = 1000120000, + VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO = 1000145000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES = 1000145001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES = 1000145002, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 = 1000145003, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO = 1000156000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO = 1000156001, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO = 1000156002, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO = 1000156003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES = 1000156004, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES = 1000156005, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO = 1000085000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO = 1000071000, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES = 1000071001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO = 1000071002, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES = 1000071003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES = 1000071004, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO = 1000072000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO = 1000072001, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO = 1000072002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO = 1000112000, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES = 1000112001, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO = 1000113000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO = 1000077000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO = 1000076000, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES = 1000076001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES = 1000063000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES = 49, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES = 50, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES = 51, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES = 52, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO = 1000147000, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2 = 1000109000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 = 1000109001, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2 = 1000109002, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2 = 1000109003, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2 = 1000109004, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO = 1000109005, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO = 1000109006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES = 1000177000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES = 1000196000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES = 1000180000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES = 1000082000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES = 1000197000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO = 1000161000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES = 1000161001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES = 1000161002, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO = 1000161003, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT = 1000161004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES = 1000199000, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE = 1000199001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES = 1000221000, + VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO = 1000246000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES = 1000130000, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO = 1000130001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES = 1000211000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES = 1000108000, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO = 1000108001, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO = 1000108002, + VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO = 1000108003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES = 1000253000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES = 1000175000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES = 1000241000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT = 1000241001, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT = 1000241002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES = 1000261000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES = 1000207000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES = 1000207001, + VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO = 1000207002, + VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO = 1000207003, + VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO = 1000207004, + VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO = 1000207005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES = 1000257000, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO = 1000244001, + VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO = 1000257002, + VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO = 1000257003, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO = 1000257004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES = 53, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES = 54, + VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO = 1000192000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES = 1000215000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES = 1000245000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES = 1000276000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES = 1000295000, + VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO = 1000295001, + VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO = 1000295002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES = 1000297000, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_2 = 1000314000, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2 = 1000314001, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2 = 1000314002, + VK_STRUCTURE_TYPE_DEPENDENCY_INFO = 1000314003, + VK_STRUCTURE_TYPE_SUBMIT_INFO_2 = 1000314004, + VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO = 1000314005, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO = 1000314006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES = 1000314007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES = 1000325000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES = 1000335000, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2 = 1000337000, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2 = 1000337001, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2 = 1000337002, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2 = 1000337003, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2 = 1000337004, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2 = 1000337005, + VK_STRUCTURE_TYPE_BUFFER_COPY_2 = 1000337006, + VK_STRUCTURE_TYPE_IMAGE_COPY_2 = 1000337007, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2 = 1000337008, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2 = 1000337009, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2 = 1000337010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES = 1000225000, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO = 1000225001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES = 1000225002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES = 1000138000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES = 1000138001, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK = 1000138002, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO = 1000138003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES = 1000066000, + VK_STRUCTURE_TYPE_RENDERING_INFO = 1000044000, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO = 1000044001, + VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO = 1000044002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES = 1000044003, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO = 1000044004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES = 1000280000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES = 1000280001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES = 1000281001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3 = 1000360000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES = 1000413000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 1000413001, + VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS = 1000413002, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS = 1000413003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_FEATURES = 55, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_PROPERTIES = 56, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO = 1000174000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES = 1000388000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES = 1000388001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES = 1000416000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES = 1000528000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES = 1000544000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES = 1000259000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO = 1000259001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES = 1000259002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES = 1000525000, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO = 1000190001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES = 1000190002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES = 1000265000, + VK_STRUCTURE_TYPE_MEMORY_MAP_INFO = 1000271000, + VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO = 1000271001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES = 1000470000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES = 1000470001, + VK_STRUCTURE_TYPE_RENDERING_AREA_INFO = 1000470003, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO = 1000470004, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2 = 1000338002, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2 = 1000338003, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO = 1000470005, + VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO = 1000470006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES = 1000080000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES = 1000232000, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO = 1000232001, + VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO = 1000232002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES = 1000545000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES = 1000545001, + VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS = 1000545002, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO = 1000545003, + VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO = 1000545004, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO = 1000545005, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO = 1000545006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES = 1000466000, + VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO = 1000068000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES = 1000068001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES = 1000068002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES = 1000270000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES = 1000270001, + VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY = 1000270002, + VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY = 1000270003, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO = 1000270004, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO = 1000270005, + VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO = 1000270006, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO = 1000270007, + VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE = 1000270008, + VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY = 1000270009, + VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, + VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007, + VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR = 1000060008, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR = 1000060009, + VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR = 1000060010, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR = 1000060011, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR = 1000060012, + VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, + VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, + VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, + VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, + VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, + VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, + VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, + VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, + VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR = 1000023000, + VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR = 1000023001, + VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR = 1000023002, + VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR = 1000023003, + VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR = 1000023004, + VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR = 1000023005, + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000023006, + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR = 1000023007, + VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR = 1000023008, + VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR = 1000023009, + VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR = 1000023010, + VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR = 1000023011, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR = 1000023012, + VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR = 1000023013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR = 1000023014, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR = 1000023015, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR = 1000023016, + VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR = 1000024000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR = 1000024001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR = 1000024002, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT = 1000028000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002, + VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX = 1000029000, + VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX = 1000029001, + VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX = 1000029002, + VK_STRUCTURE_TYPE_CU_MODULE_TEXTURING_MODE_CREATE_INFO_NVX = 1000029004, + VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR = 1000038000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000038001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000038002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR = 1000038003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR = 1000038004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR = 1000038005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR = 1000038006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR = 1000038007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR = 1000038008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR = 1000038009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR = 1000038010, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR = 1000038011, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR = 1000038012, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000038013, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR = 1000039000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000039001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR = 1000039002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR = 1000039003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR = 1000039004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR = 1000039005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR = 1000039006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR = 1000039007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR = 1000039009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR = 1000039010, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR = 1000039011, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR = 1000039012, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR = 1000039013, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000039014, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR = 1000040000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR = 1000040001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR = 1000040003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000040004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000040005, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR = 1000040006, + VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, + VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, + VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, + VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, + VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, + VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000, + VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001, + VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001, + VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001, + VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002, + VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, + VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, + VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001, + VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002, + VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, + VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, + VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT = 1000101000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT = 1000101001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT = 1000102000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT = 1000102001, + VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG = 1000110000, + VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, + VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, + VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, + VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, + VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, + VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR = 1000116000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR = 1000116001, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR = 1000116002, + VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR = 1000116003, + VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR = 1000116004, + VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR = 1000116005, + VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR = 1000116006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, + VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, + VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR = 1000121000, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR = 1000121001, + VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR = 1000121002, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR = 1000121003, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR = 1000121004, + VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000, + VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT = 1000128000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT = 1000128001, + VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT = 1000128002, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT = 1000128003, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT = 1000128004, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID = 1000129000, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID = 1000129001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID = 1000129002, + VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003, + VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID = 1000129006, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX = 1000134000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX = 1000134001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX = 1000134002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX = 1000134003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX = 1000134004, +#endif + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_BFLOAT16_FEATURES_KHR = 1000141000, + VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, + VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, + VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003, + VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000150007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR = 1000150009, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR = 1000150013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR = 1000150014, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR = 1000150020, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR = 1000347000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR = 1000347001, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR = 1000348013, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001, + VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT = 1000158000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT = 1000158002, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT = 1000158003, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT = 1000158004, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005, + VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT = 1000158006, + VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, + VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR = 1000163000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR = 1000163001, +#endif + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV = 1000164005, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV = 1000165000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV = 1000165001, + VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003, + VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004, + VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005, + VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV = 1000165012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV = 1000166000, + VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV = 1000166001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT = 1000170000, + VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT = 1000170001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000, + VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR = 1000181000, + VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR = 1000187000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000187001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR = 1000187002, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR = 1000187003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR = 1000187004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR = 1000187005, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, + VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV = 1000205000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000, + VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001, + VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL = 1000210002, + VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003, + VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL = 1000210004, + VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL = 1000210005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT = 1000212000, + VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD = 1000213000, + VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD = 1000213001, + VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA = 1000214000, + VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT = 1000217000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001, + VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, + VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR = 1000235000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000, + VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001, + VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR = 1000239000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV = 1000240000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT = 1000244000, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT = 1000244002, + VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT = 1000247000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR = 1000248000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV = 1000249000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV = 1000250000, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV = 1000250001, + VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV = 1000250002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT = 1000251000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT = 1000252000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT = 1000254000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT = 1000254001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT = 1000254002, + VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT = 1000255000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002, + VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001, + VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT = 1000256000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000, + VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT = 1000272000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT = 1000272001, + VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT = 1000272002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT = 1000273000, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT = 1000274000, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT = 1000274001, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT = 1000274002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT = 1000275000, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT = 1000275001, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT = 1000275002, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT = 1000275003, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT = 1000275004, + VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT = 1000275005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000, + VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV = 1000277003, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV = 1000277004, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV = 1000278000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV = 1000278001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000, + VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT = 1000283000, + VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT = 1000283001, + VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT = 1000283002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT = 1000284000, + VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT = 1000284001, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT = 1000284002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT = 1000286000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT = 1000286001, + VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT = 1000287000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002, + VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV = 1000292000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV = 1000292001, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV = 1000292002, + VK_STRUCTURE_TYPE_PRESENT_ID_KHR = 1000294000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR = 1000294001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR = 1000299000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR = 1000299001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR = 1000299002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR = 1000299003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR = 1000299004, + VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR = 1000299005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR = 1000299006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR = 1000299007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR = 1000299008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR = 1000299009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000299010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000, + VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV = 1000307000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV = 1000307001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV = 1000307002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV = 1000307003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV = 1000307004, +#endif + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_SHADING_FEATURES_QCOM = 1000309000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_SHADING_PROPERTIES_QCOM = 1000309001, + VK_STRUCTURE_TYPE_RENDER_PASS_TILE_SHADING_CREATE_INFO_QCOM = 1000309002, + VK_STRUCTURE_TYPE_PER_TILE_BEGIN_INFO_QCOM = 1000309003, + VK_STRUCTURE_TYPE_PER_TILE_END_INFO_QCOM = 1000309004, + VK_STRUCTURE_TYPE_DISPATCH_TILE_INFO_QCOM = 1000309005, + VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV = 1000310000, + VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT = 1000311000, + VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT = 1000311001, + VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT = 1000311002, + VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT = 1000311003, + VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT = 1000311004, + VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT = 1000311005, + VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT = 1000311006, + VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT = 1000311007, + VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT = 1000311008, + VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT = 1000311009, + VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311010, + VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311011, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT = 1000316000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT = 1000316001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT = 1000316002, + VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT = 1000316003, + VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT = 1000316004, + VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316005, + VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316006, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316007, + VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316008, + VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT = 1000316010, + VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT = 1000316011, + VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT = 1000316012, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT = 1000320000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT = 1000320001, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT = 1000320002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD = 1000321000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR = 1000203000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR = 1000322000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR = 1000323000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV = 1000326002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV = 1000327000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV = 1000327001, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV = 1000327002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT = 1000328000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT = 1000328001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT = 1000330000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001, + VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM = 1000333000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT = 1000338000, + VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT = 1000338001, + VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT = 1000338004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT = 1000339000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT = 1000341000, + VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT = 1000341001, + VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT = 1000341002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT = 1000344000, + VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000, + VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001, + VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT = 1000353000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT = 1000354000, + VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT = 1000354001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT = 1000355000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT = 1000361000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000, + VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001, + VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365001, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA = 1000366000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA = 1000366001, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA = 1000366002, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA = 1000366003, + VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA = 1000366004, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA = 1000366005, + VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA = 1000366006, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA = 1000366007, + VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA = 1000366008, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA = 1000366009, + VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI = 1000369000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI = 1000369001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI = 1000369002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI = 1000370000, + VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV = 1000371000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV = 1000371001, + VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT = 1000372000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT = 1000372001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT = 1000375000, + VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT = 1000375001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT = 1000376000, + VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT = 1000376001, + VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT = 1000376002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT = 1000377000, + VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX = 1000378000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT = 1000381000, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT = 1000381001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT = 1000382000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR = 1000386000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT = 1000391000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT = 1000391001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT = 1000392000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT = 1000392001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT = 1000393000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT = 1000395000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT = 1000395001, + VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT = 1000396000, + VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT = 1000396001, + VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT = 1000396002, + VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT = 1000396003, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT = 1000396004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT = 1000396005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT = 1000396006, + VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT = 1000396007, + VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT = 1000396008, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT = 1000396009, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV = 1000397000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV = 1000397001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV = 1000397002, +#endif + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI = 1000404000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI = 1000404001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI = 1000404002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT = 1000411000, + VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM = 1000415000, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM = 1000417000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM = 1000417001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM = 1000417002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT = 1000418000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT = 1000418001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE = 1000420000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE = 1000420001, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE = 1000420002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT = 1000422000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM = 1000424000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM = 1000424001, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM = 1000424002, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM = 1000424003, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM = 1000424004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV = 1000426000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV = 1000426001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV = 1000427000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV = 1000427001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV = 1000428000, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV = 1000428001, + VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV = 1000428002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_LINEAR_SWEPT_SPHERES_FEATURES_NV = 1000429008, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_LINEAR_SWEPT_SPHERES_DATA_NV = 1000429009, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_SPHERES_DATA_NV = 1000429010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV = 1000430000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR = 1000434000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT = 1000437000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM = 1000440000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM = 1000440001, + VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM = 1000440002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT = 1000451000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT = 1000451001, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT = 1000453000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT = 1000455000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT = 1000455001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT = 1000458000, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT = 1000458001, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000458002, + VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT = 1000458003, + VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG = 1000459000, + VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG = 1000459001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT = 1000462000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT = 1000462001, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT = 1000462002, + VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT = 1000462003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT = 1000342000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV = 1000464000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV = 1000464001, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV = 1000464002, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV = 1000464003, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV = 1000464004, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV = 1000464005, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID = 1000468000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID = 1000468001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID = 1000468002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD = 1000476000, + VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD = 1000476001, + VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD = 1000476002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR = 1000481000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT = 1000482000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT = 1000482001, + VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT = 1000482002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR = 1000483000, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR = 1000483001, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR = 1000483002, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR = 1000483003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR = 1000483004, + VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR = 1000483005, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR = 1000483006, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR = 1000483007, + VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR = 1000483008, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR = 1000483009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000, + VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000, + VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC = 1000485001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM = 1000488000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV = 1000490000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV = 1000490001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_FEATURES_NV = 1000491000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_PROPERTIES_NV = 1000491001, + VK_STRUCTURE_TYPE_COOPERATIVE_VECTOR_PROPERTIES_NV = 1000491002, + VK_STRUCTURE_TYPE_CONVERT_COOPERATIVE_VECTOR_MATRIX_INFO_NV = 1000491004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV = 1000492000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV = 1000492001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT = 1000351000, + VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT = 1000351002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT = 1000495000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT = 1000495001, + VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT = 1000496000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM = 1000497000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM = 1000497001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT = 1000498000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT = 1000499000, + VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV = 1000505000, + VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV = 1000505001, + VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV = 1000505002, + VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV = 1000505003, + VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV = 1000505004, + VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV = 1000505005, + VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV = 1000505006, + VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV = 1000505007, + VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV = 1000505008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR = 1000506000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR = 1000506001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR = 1000506002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM = 1000510000, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM = 1000510001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR = 1000201000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR = 1000511000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR = 1000512000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR = 1000512001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR = 1000512003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000512004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR = 1000512005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_CAPABILITIES_KHR = 1000513000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000513001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PICTURE_INFO_KHR = 1000513002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_DPB_SLOT_INFO_KHR = 1000513003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_AV1_FEATURES_KHR = 1000513004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PROFILE_INFO_KHR = 1000513005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_INFO_KHR = 1000513006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_LAYER_INFO_KHR = 1000513007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUALITY_LEVEL_PROPERTIES_KHR = 1000513008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_CREATE_INFO_KHR = 1000513009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_GOP_REMAINING_FRAME_INFO_KHR = 1000513010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR = 1000515000, + VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR = 1000515001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV = 1000516000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM = 1000518000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM = 1000518001, + VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM = 1000518002, + VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM = 1000519000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM = 1000519001, + VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM = 1000519002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM = 1000520000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM = 1000520001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM = 1000521000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT = 1000524000, + VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX = 1000529000, + VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX = 1000529001, + VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX = 1000529002, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX = 1000529003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX = 1000529004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT = 1000530000, + VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR = 1000184000, + VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT = 1000545007, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT = 1000545008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV = 1000546000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_MEMORY_HEAP_FEATURES_QCOM = 1000547000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_MEMORY_HEAP_PROPERTIES_QCOM = 1000547001, + VK_STRUCTURE_TYPE_TILE_MEMORY_REQUIREMENTS_QCOM = 1000547002, + VK_STRUCTURE_TYPE_TILE_MEMORY_BIND_INFO_QCOM = 1000547003, + VK_STRUCTURE_TYPE_TILE_MEMORY_SIZE_INFO_QCOM = 1000547004, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_STEREO_CREATE_INFO_NV = 1000551000, + VK_STRUCTURE_TYPE_DISPLAY_MODE_STEREO_PROPERTIES_NV = 1000551001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553000, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_QUANTIZATION_MAP_PROPERTIES_KHR = 1000553001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_INFO_KHR = 1000553002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000553005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUANTIZATION_MAP_FEATURES_KHR = 1000553009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553004, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_H265_QUANTIZATION_MAP_PROPERTIES_KHR = 1000553006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553007, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_AV1_QUANTIZATION_MAP_PROPERTIES_KHR = 1000553008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV = 1000555000, + VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_DEVICE_CREATE_INFO_NV = 1000556000, + VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_CREATE_INFO_NV = 1000556001, + VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_DATA_PARAMS_NV = 1000556002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_COMPUTE_QUEUE_PROPERTIES_NV = 1000556003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR = 1000558000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV = 1000559000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR = 1000562000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR = 1000562001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR = 1000562002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR = 1000562003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR = 1000562004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV = 1000563000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT = 1000564000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV = 1000568000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_FEATURES_NV = 1000569000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_PROPERTIES_NV = 1000569001, + VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_CLUSTERS_BOTTOM_LEVEL_INPUT_NV = 1000569002, + VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_TRIANGLE_CLUSTER_INPUT_NV = 1000569003, + VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_MOVE_OBJECTS_INPUT_NV = 1000569004, + VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_INPUT_INFO_NV = 1000569005, + VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_COMMANDS_INFO_NV = 1000569006, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CLUSTER_ACCELERATION_STRUCTURE_CREATE_INFO_NV = 1000569007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_FEATURES_NV = 1000570000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_PROPERTIES_NV = 1000570001, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_PARTITIONED_ACCELERATION_STRUCTURE_NV = 1000570002, + VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCES_INPUT_NV = 1000570003, + VK_STRUCTURE_TYPE_BUILD_PARTITIONED_ACCELERATION_STRUCTURE_INFO_NV = 1000570004, + VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_FLAGS_NV = 1000570005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT = 1000572000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT = 1000572001, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT = 1000572002, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT = 1000572003, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT = 1000572004, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT = 1000572006, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT = 1000572007, + VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT = 1000572008, + VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT = 1000572009, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT = 1000572010, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT = 1000572011, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT = 1000572012, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT = 1000572013, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT = 1000572014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_8_FEATURES_KHR = 1000574000, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_ACCESS_FLAGS_3_KHR = 1000574002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA = 1000575000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA = 1000575001, + VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA = 1000575002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT = 1000582000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT = 1000582001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_2_FEATURES_KHR = 1000586000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_INLINE_SESSION_PARAMETERS_INFO_KHR = 1000586001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_INLINE_SESSION_PARAMETERS_INFO_KHR = 1000586002, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_INLINE_SESSION_PARAMETERS_INFO_KHR = 1000586003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HDR_VIVID_FEATURES_HUAWEI = 1000590000, + VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI = 1000590001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV = 1000593000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV = 1000593001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV = 1000593002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_OPACITY_MICROMAP_FEATURES_ARM = 1000596000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_METAL_HANDLE_INFO_EXT = 1000602000, + VK_STRUCTURE_TYPE_MEMORY_METAL_HANDLE_PROPERTIES_EXT = 1000602001, + VK_STRUCTURE_TYPE_MEMORY_GET_METAL_HANDLE_INFO_EXT = 1000602002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_KHR = 1000421000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_ROBUSTNESS_FEATURES_EXT = 1000608000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_SET_PRESENT_CONFIG_NV = 1000613000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_METERING_FEATURES_NV = 1000613001, +#endif + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_EXT = 1000425000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_EXT = 1000425001, + VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_EXT = 1000425002, + VK_STRUCTURE_TYPE_RENDERING_END_INFO_EXT = 1000619003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + // VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT is a deprecated alias + VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + VK_STRUCTURE_TYPE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INFO, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, + VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + // VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT is a deprecated alias + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + // VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL is a deprecated alias + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO, + VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES, + VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES, + VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY, + VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO, + VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO, + VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE, + VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY, + VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO, + VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES, + VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO, + VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO, + VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2, + VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, + VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, + VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_EXT, + VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES, + VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO, + VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO, + VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES, + VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO, + VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO, + VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkStructureType; + +typedef enum VkPipelineCacheHeaderVersion { + VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, + VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheHeaderVersion; + +typedef enum VkImageLayout { + VK_IMAGE_LAYOUT_UNDEFINED = 0, + VK_IMAGE_LAYOUT_GENERAL = 1, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, + VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, + VK_IMAGE_LAYOUT_PREINITIALIZED = 8, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003, + VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL = 1000314000, + VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL = 1000314001, + VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ = 1000232000, + VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, + VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR = 1000024000, + VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR = 1000024001, + VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR = 1000024002, + VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, + VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, + VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = 1000164003, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR = 1000299000, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002, + VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT = 1000339000, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_QUANTIZATION_MAP_KHR = 1000553000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, + VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF +} VkImageLayout; + +typedef enum VkObjectType { + VK_OBJECT_TYPE_UNKNOWN = 0, + VK_OBJECT_TYPE_INSTANCE = 1, + VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, + VK_OBJECT_TYPE_DEVICE = 3, + VK_OBJECT_TYPE_QUEUE = 4, + VK_OBJECT_TYPE_SEMAPHORE = 5, + VK_OBJECT_TYPE_COMMAND_BUFFER = 6, + VK_OBJECT_TYPE_FENCE = 7, + VK_OBJECT_TYPE_DEVICE_MEMORY = 8, + VK_OBJECT_TYPE_BUFFER = 9, + VK_OBJECT_TYPE_IMAGE = 10, + VK_OBJECT_TYPE_EVENT = 11, + VK_OBJECT_TYPE_QUERY_POOL = 12, + VK_OBJECT_TYPE_BUFFER_VIEW = 13, + VK_OBJECT_TYPE_IMAGE_VIEW = 14, + VK_OBJECT_TYPE_SHADER_MODULE = 15, + VK_OBJECT_TYPE_PIPELINE_CACHE = 16, + VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, + VK_OBJECT_TYPE_RENDER_PASS = 18, + VK_OBJECT_TYPE_PIPELINE = 19, + VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, + VK_OBJECT_TYPE_SAMPLER = 21, + VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, + VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, + VK_OBJECT_TYPE_FRAMEBUFFER = 24, + VK_OBJECT_TYPE_COMMAND_POOL = 25, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT = 1000295000, + VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, + VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, + VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, + VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, + VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, + VK_OBJECT_TYPE_VIDEO_SESSION_KHR = 1000023000, + VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR = 1000023001, + VK_OBJECT_TYPE_CU_MODULE_NVX = 1000029000, + VK_OBJECT_TYPE_CU_FUNCTION_NVX = 1000029001, + VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, + VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, + VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_OBJECT_TYPE_CUDA_MODULE_NV = 1000307000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_OBJECT_TYPE_CUDA_FUNCTION_NV = 1000307001, +#endif + VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000, + VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000, + VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000, + VK_OBJECT_TYPE_SHADER_EXT = 1000482000, + VK_OBJECT_TYPE_PIPELINE_BINARY_KHR = 1000483000, + VK_OBJECT_TYPE_EXTERNAL_COMPUTE_QUEUE_NV = 1000556000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT = 1000572000, + VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT = 1000572001, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, + VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkObjectType; + +typedef enum VkVendorId { + VK_VENDOR_ID_KHRONOS = 0x10000, + VK_VENDOR_ID_VIV = 0x10001, + VK_VENDOR_ID_VSI = 0x10002, + VK_VENDOR_ID_KAZAN = 0x10003, + VK_VENDOR_ID_CODEPLAY = 0x10004, + VK_VENDOR_ID_MESA = 0x10005, + VK_VENDOR_ID_POCL = 0x10006, + VK_VENDOR_ID_MOBILEYE = 0x10007, + VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF +} VkVendorId; + +typedef enum VkSystemAllocationScope { + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, + VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, + VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, + VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF +} VkSystemAllocationScope; + +typedef enum VkInternalAllocationType { + VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, + VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkInternalAllocationType; + +typedef enum VkFormat { + VK_FORMAT_UNDEFINED = 0, + VK_FORMAT_R4G4_UNORM_PACK8 = 1, + VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, + VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, + VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, + VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, + VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, + VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, + VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, + VK_FORMAT_R8_UNORM = 9, + VK_FORMAT_R8_SNORM = 10, + VK_FORMAT_R8_USCALED = 11, + VK_FORMAT_R8_SSCALED = 12, + VK_FORMAT_R8_UINT = 13, + VK_FORMAT_R8_SINT = 14, + VK_FORMAT_R8_SRGB = 15, + VK_FORMAT_R8G8_UNORM = 16, + VK_FORMAT_R8G8_SNORM = 17, + VK_FORMAT_R8G8_USCALED = 18, + VK_FORMAT_R8G8_SSCALED = 19, + VK_FORMAT_R8G8_UINT = 20, + VK_FORMAT_R8G8_SINT = 21, + VK_FORMAT_R8G8_SRGB = 22, + VK_FORMAT_R8G8B8_UNORM = 23, + VK_FORMAT_R8G8B8_SNORM = 24, + VK_FORMAT_R8G8B8_USCALED = 25, + VK_FORMAT_R8G8B8_SSCALED = 26, + VK_FORMAT_R8G8B8_UINT = 27, + VK_FORMAT_R8G8B8_SINT = 28, + VK_FORMAT_R8G8B8_SRGB = 29, + VK_FORMAT_B8G8R8_UNORM = 30, + VK_FORMAT_B8G8R8_SNORM = 31, + VK_FORMAT_B8G8R8_USCALED = 32, + VK_FORMAT_B8G8R8_SSCALED = 33, + VK_FORMAT_B8G8R8_UINT = 34, + VK_FORMAT_B8G8R8_SINT = 35, + VK_FORMAT_B8G8R8_SRGB = 36, + VK_FORMAT_R8G8B8A8_UNORM = 37, + VK_FORMAT_R8G8B8A8_SNORM = 38, + VK_FORMAT_R8G8B8A8_USCALED = 39, + VK_FORMAT_R8G8B8A8_SSCALED = 40, + VK_FORMAT_R8G8B8A8_UINT = 41, + VK_FORMAT_R8G8B8A8_SINT = 42, + VK_FORMAT_R8G8B8A8_SRGB = 43, + VK_FORMAT_B8G8R8A8_UNORM = 44, + VK_FORMAT_B8G8R8A8_SNORM = 45, + VK_FORMAT_B8G8R8A8_USCALED = 46, + VK_FORMAT_B8G8R8A8_SSCALED = 47, + VK_FORMAT_B8G8R8A8_UINT = 48, + VK_FORMAT_B8G8R8A8_SINT = 49, + VK_FORMAT_B8G8R8A8_SRGB = 50, + VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, + VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, + VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, + VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, + VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, + VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, + VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, + VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, + VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, + VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, + VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, + VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, + VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, + VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, + VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, + VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, + VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, + VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, + VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, + VK_FORMAT_R16_UNORM = 70, + VK_FORMAT_R16_SNORM = 71, + VK_FORMAT_R16_USCALED = 72, + VK_FORMAT_R16_SSCALED = 73, + VK_FORMAT_R16_UINT = 74, + VK_FORMAT_R16_SINT = 75, + VK_FORMAT_R16_SFLOAT = 76, + VK_FORMAT_R16G16_UNORM = 77, + VK_FORMAT_R16G16_SNORM = 78, + VK_FORMAT_R16G16_USCALED = 79, + VK_FORMAT_R16G16_SSCALED = 80, + VK_FORMAT_R16G16_UINT = 81, + VK_FORMAT_R16G16_SINT = 82, + VK_FORMAT_R16G16_SFLOAT = 83, + VK_FORMAT_R16G16B16_UNORM = 84, + VK_FORMAT_R16G16B16_SNORM = 85, + VK_FORMAT_R16G16B16_USCALED = 86, + VK_FORMAT_R16G16B16_SSCALED = 87, + VK_FORMAT_R16G16B16_UINT = 88, + VK_FORMAT_R16G16B16_SINT = 89, + VK_FORMAT_R16G16B16_SFLOAT = 90, + VK_FORMAT_R16G16B16A16_UNORM = 91, + VK_FORMAT_R16G16B16A16_SNORM = 92, + VK_FORMAT_R16G16B16A16_USCALED = 93, + VK_FORMAT_R16G16B16A16_SSCALED = 94, + VK_FORMAT_R16G16B16A16_UINT = 95, + VK_FORMAT_R16G16B16A16_SINT = 96, + VK_FORMAT_R16G16B16A16_SFLOAT = 97, + VK_FORMAT_R32_UINT = 98, + VK_FORMAT_R32_SINT = 99, + VK_FORMAT_R32_SFLOAT = 100, + VK_FORMAT_R32G32_UINT = 101, + VK_FORMAT_R32G32_SINT = 102, + VK_FORMAT_R32G32_SFLOAT = 103, + VK_FORMAT_R32G32B32_UINT = 104, + VK_FORMAT_R32G32B32_SINT = 105, + VK_FORMAT_R32G32B32_SFLOAT = 106, + VK_FORMAT_R32G32B32A32_UINT = 107, + VK_FORMAT_R32G32B32A32_SINT = 108, + VK_FORMAT_R32G32B32A32_SFLOAT = 109, + VK_FORMAT_R64_UINT = 110, + VK_FORMAT_R64_SINT = 111, + VK_FORMAT_R64_SFLOAT = 112, + VK_FORMAT_R64G64_UINT = 113, + VK_FORMAT_R64G64_SINT = 114, + VK_FORMAT_R64G64_SFLOAT = 115, + VK_FORMAT_R64G64B64_UINT = 116, + VK_FORMAT_R64G64B64_SINT = 117, + VK_FORMAT_R64G64B64_SFLOAT = 118, + VK_FORMAT_R64G64B64A64_UINT = 119, + VK_FORMAT_R64G64B64A64_SINT = 120, + VK_FORMAT_R64G64B64A64_SFLOAT = 121, + VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, + VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, + VK_FORMAT_D16_UNORM = 124, + VK_FORMAT_X8_D24_UNORM_PACK32 = 125, + VK_FORMAT_D32_SFLOAT = 126, + VK_FORMAT_S8_UINT = 127, + VK_FORMAT_D16_UNORM_S8_UINT = 128, + VK_FORMAT_D24_UNORM_S8_UINT = 129, + VK_FORMAT_D32_SFLOAT_S8_UINT = 130, + VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, + VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, + VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, + VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, + VK_FORMAT_BC2_UNORM_BLOCK = 135, + VK_FORMAT_BC2_SRGB_BLOCK = 136, + VK_FORMAT_BC3_UNORM_BLOCK = 137, + VK_FORMAT_BC3_SRGB_BLOCK = 138, + VK_FORMAT_BC4_UNORM_BLOCK = 139, + VK_FORMAT_BC4_SNORM_BLOCK = 140, + VK_FORMAT_BC5_UNORM_BLOCK = 141, + VK_FORMAT_BC5_SNORM_BLOCK = 142, + VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, + VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, + VK_FORMAT_BC7_UNORM_BLOCK = 145, + VK_FORMAT_BC7_SRGB_BLOCK = 146, + VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, + VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, + VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, + VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, + VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, + VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, + VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, + VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, + VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, + VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, + VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, + VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, + VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, + VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, + VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, + VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, + VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, + VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, + VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, + VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, + VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, + VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, + VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, + VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000, + VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006, + VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016, + VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026, + VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027, + VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033, + VK_FORMAT_G8_B8R8_2PLANE_444_UNORM = 1000330000, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16 = 1000330001, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16 = 1000330002, + VK_FORMAT_G16_B16R16_2PLANE_444_UNORM = 1000330003, + VK_FORMAT_A4R4G4B4_UNORM_PACK16 = 1000340000, + VK_FORMAT_A4B4G4R4_UNORM_PACK16 = 1000340001, + VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK = 1000066000, + VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK = 1000066001, + VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK = 1000066002, + VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK = 1000066003, + VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK = 1000066004, + VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK = 1000066005, + VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK = 1000066006, + VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK = 1000066007, + VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK = 1000066008, + VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK = 1000066009, + VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK = 1000066010, + VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK = 1000066011, + VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK = 1000066012, + VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK = 1000066013, + VK_FORMAT_A1B5G5R5_UNORM_PACK16 = 1000470000, + VK_FORMAT_A8_UNORM = 1000470001, + VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, + VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, + VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, + VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, + VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, + VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, + VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, + VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, + VK_FORMAT_R16G16_SFIXED5_NV = 1000464000, + VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, + VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, + VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK, + VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK, + VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK, + VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM, + VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + VK_FORMAT_R10X6_UNORM_PACK16_KHR = VK_FORMAT_R10X6_UNORM_PACK16, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_R12X4_UNORM_PACK16_KHR = VK_FORMAT_R12X4_UNORM_PACK16, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16B16G16R16_422_UNORM_KHR = VK_FORMAT_G16B16G16R16_422_UNORM, + VK_FORMAT_B16G16R16G16_422_UNORM_KHR = VK_FORMAT_B16G16R16G16_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM, + VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16, + VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16, + // VK_FORMAT_R16G16_S10_5_NV is a deprecated alias + VK_FORMAT_R16G16_S10_5_NV = VK_FORMAT_R16G16_SFIXED5_NV, + VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16, + VK_FORMAT_A8_UNORM_KHR = VK_FORMAT_A8_UNORM, + VK_FORMAT_MAX_ENUM = 0x7FFFFFFF +} VkFormat; + +typedef enum VkImageTiling { + VK_IMAGE_TILING_OPTIMAL = 0, + VK_IMAGE_TILING_LINEAR = 1, + VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000, + VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF +} VkImageTiling; + +typedef enum VkImageType { + VK_IMAGE_TYPE_1D = 0, + VK_IMAGE_TYPE_2D = 1, + VK_IMAGE_TYPE_3D = 2, + VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageType; + +typedef enum VkPhysicalDeviceType { + VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, + VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, + VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, + VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, + VK_PHYSICAL_DEVICE_TYPE_CPU = 4, + VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkPhysicalDeviceType; + +typedef enum VkQueryType { + VK_QUERY_TYPE_OCCLUSION = 0, + VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, + VK_QUERY_TYPE_TIMESTAMP = 2, + VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR = 1000023000, + VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004, + VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000150000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150001, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000, + VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000, + VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR = 1000299000, + VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT = 1000328000, + VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT = 1000382000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR = 1000386000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR = 1000386001, + VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT = 1000396000, + VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT = 1000396001, + VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkQueryType; + +typedef enum VkSharingMode { + VK_SHARING_MODE_EXCLUSIVE = 0, + VK_SHARING_MODE_CONCURRENT = 1, + VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSharingMode; + +typedef enum VkComponentSwizzle { + VK_COMPONENT_SWIZZLE_IDENTITY = 0, + VK_COMPONENT_SWIZZLE_ZERO = 1, + VK_COMPONENT_SWIZZLE_ONE = 2, + VK_COMPONENT_SWIZZLE_R = 3, + VK_COMPONENT_SWIZZLE_G = 4, + VK_COMPONENT_SWIZZLE_B = 5, + VK_COMPONENT_SWIZZLE_A = 6, + VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF +} VkComponentSwizzle; + +typedef enum VkImageViewType { + VK_IMAGE_VIEW_TYPE_1D = 0, + VK_IMAGE_VIEW_TYPE_2D = 1, + VK_IMAGE_VIEW_TYPE_3D = 2, + VK_IMAGE_VIEW_TYPE_CUBE = 3, + VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, + VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, + VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, + VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageViewType; + +typedef enum VkBlendFactor { + VK_BLEND_FACTOR_ZERO = 0, + VK_BLEND_FACTOR_ONE = 1, + VK_BLEND_FACTOR_SRC_COLOR = 2, + VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, + VK_BLEND_FACTOR_DST_COLOR = 4, + VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, + VK_BLEND_FACTOR_SRC_ALPHA = 6, + VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, + VK_BLEND_FACTOR_DST_ALPHA = 8, + VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, + VK_BLEND_FACTOR_CONSTANT_COLOR = 10, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, + VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, + VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, + VK_BLEND_FACTOR_SRC1_COLOR = 15, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, + VK_BLEND_FACTOR_SRC1_ALPHA = 17, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, + VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF +} VkBlendFactor; + +typedef enum VkBlendOp { + VK_BLEND_OP_ADD = 0, + VK_BLEND_OP_SUBTRACT = 1, + VK_BLEND_OP_REVERSE_SUBTRACT = 2, + VK_BLEND_OP_MIN = 3, + VK_BLEND_OP_MAX = 4, + VK_BLEND_OP_ZERO_EXT = 1000148000, + VK_BLEND_OP_SRC_EXT = 1000148001, + VK_BLEND_OP_DST_EXT = 1000148002, + VK_BLEND_OP_SRC_OVER_EXT = 1000148003, + VK_BLEND_OP_DST_OVER_EXT = 1000148004, + VK_BLEND_OP_SRC_IN_EXT = 1000148005, + VK_BLEND_OP_DST_IN_EXT = 1000148006, + VK_BLEND_OP_SRC_OUT_EXT = 1000148007, + VK_BLEND_OP_DST_OUT_EXT = 1000148008, + VK_BLEND_OP_SRC_ATOP_EXT = 1000148009, + VK_BLEND_OP_DST_ATOP_EXT = 1000148010, + VK_BLEND_OP_XOR_EXT = 1000148011, + VK_BLEND_OP_MULTIPLY_EXT = 1000148012, + VK_BLEND_OP_SCREEN_EXT = 1000148013, + VK_BLEND_OP_OVERLAY_EXT = 1000148014, + VK_BLEND_OP_DARKEN_EXT = 1000148015, + VK_BLEND_OP_LIGHTEN_EXT = 1000148016, + VK_BLEND_OP_COLORDODGE_EXT = 1000148017, + VK_BLEND_OP_COLORBURN_EXT = 1000148018, + VK_BLEND_OP_HARDLIGHT_EXT = 1000148019, + VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020, + VK_BLEND_OP_DIFFERENCE_EXT = 1000148021, + VK_BLEND_OP_EXCLUSION_EXT = 1000148022, + VK_BLEND_OP_INVERT_EXT = 1000148023, + VK_BLEND_OP_INVERT_RGB_EXT = 1000148024, + VK_BLEND_OP_LINEARDODGE_EXT = 1000148025, + VK_BLEND_OP_LINEARBURN_EXT = 1000148026, + VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027, + VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028, + VK_BLEND_OP_PINLIGHT_EXT = 1000148029, + VK_BLEND_OP_HARDMIX_EXT = 1000148030, + VK_BLEND_OP_HSL_HUE_EXT = 1000148031, + VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032, + VK_BLEND_OP_HSL_COLOR_EXT = 1000148033, + VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034, + VK_BLEND_OP_PLUS_EXT = 1000148035, + VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036, + VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037, + VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038, + VK_BLEND_OP_MINUS_EXT = 1000148039, + VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040, + VK_BLEND_OP_CONTRAST_EXT = 1000148041, + VK_BLEND_OP_INVERT_OVG_EXT = 1000148042, + VK_BLEND_OP_RED_EXT = 1000148043, + VK_BLEND_OP_GREEN_EXT = 1000148044, + VK_BLEND_OP_BLUE_EXT = 1000148045, + VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF +} VkBlendOp; + +typedef enum VkCompareOp { + VK_COMPARE_OP_NEVER = 0, + VK_COMPARE_OP_LESS = 1, + VK_COMPARE_OP_EQUAL = 2, + VK_COMPARE_OP_LESS_OR_EQUAL = 3, + VK_COMPARE_OP_GREATER = 4, + VK_COMPARE_OP_NOT_EQUAL = 5, + VK_COMPARE_OP_GREATER_OR_EQUAL = 6, + VK_COMPARE_OP_ALWAYS = 7, + VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF +} VkCompareOp; + +typedef enum VkDynamicState { + VK_DYNAMIC_STATE_VIEWPORT = 0, + VK_DYNAMIC_STATE_SCISSOR = 1, + VK_DYNAMIC_STATE_LINE_WIDTH = 2, + VK_DYNAMIC_STATE_DEPTH_BIAS = 3, + VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, + VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, + VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, + VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, + VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, + VK_DYNAMIC_STATE_CULL_MODE = 1000267000, + VK_DYNAMIC_STATE_FRONT_FACE = 1000267001, + VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY = 1000267002, + VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT = 1000267003, + VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT = 1000267004, + VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE = 1000267005, + VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE = 1000267006, + VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE = 1000267007, + VK_DYNAMIC_STATE_DEPTH_COMPARE_OP = 1000267008, + VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE = 1000267009, + VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE = 1000267010, + VK_DYNAMIC_STATE_STENCIL_OP = 1000267011, + VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE = 1000377001, + VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE = 1000377002, + VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE = 1000377004, + VK_DYNAMIC_STATE_LINE_STIPPLE = 1000259000, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT = 1000099001, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT = 1000099002, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, + VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR = 1000347000, + VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004, + VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006, + VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV = 1000205000, + VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001, + VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR = 1000226000, + VK_DYNAMIC_STATE_VERTEX_INPUT_EXT = 1000352000, + VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000, + VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003, + VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000, + VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT = 1000455003, + VK_DYNAMIC_STATE_POLYGON_MODE_EXT = 1000455004, + VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT = 1000455005, + VK_DYNAMIC_STATE_SAMPLE_MASK_EXT = 1000455006, + VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT = 1000455007, + VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT = 1000455008, + VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT = 1000455009, + VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT = 1000455010, + VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT = 1000455011, + VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT = 1000455012, + VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT = 1000455002, + VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT = 1000455013, + VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT = 1000455014, + VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT = 1000455015, + VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT = 1000455016, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT = 1000455017, + VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT = 1000455018, + VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT = 1000455019, + VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT = 1000455020, + VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT = 1000455021, + VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT = 1000455022, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV = 1000455023, + VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV = 1000455024, + VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV = 1000455025, + VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV = 1000455026, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV = 1000455027, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV = 1000455028, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV = 1000455029, + VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV = 1000455030, + VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV = 1000455031, + VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV = 1000455032, + VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT = 1000524000, + VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT = 1000582000, + VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = VK_DYNAMIC_STATE_LINE_STIPPLE, + VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE, + VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE, + VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, + VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT, + VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT, + VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE, + VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, + VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, + VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, + VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, + VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, + VK_DYNAMIC_STATE_STENCIL_OP_EXT = VK_DYNAMIC_STATE_STENCIL_OP, + VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, + VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, + VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, + VK_DYNAMIC_STATE_LINE_STIPPLE_KHR = VK_DYNAMIC_STATE_LINE_STIPPLE, + VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF +} VkDynamicState; + +typedef enum VkFrontFace { + VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, + VK_FRONT_FACE_CLOCKWISE = 1, + VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF +} VkFrontFace; + +typedef enum VkVertexInputRate { + VK_VERTEX_INPUT_RATE_VERTEX = 0, + VK_VERTEX_INPUT_RATE_INSTANCE = 1, + VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF +} VkVertexInputRate; + +typedef enum VkPrimitiveTopology { + VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, + VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF +} VkPrimitiveTopology; + +typedef enum VkPolygonMode { + VK_POLYGON_MODE_FILL = 0, + VK_POLYGON_MODE_LINE = 1, + VK_POLYGON_MODE_POINT = 2, + VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, + VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPolygonMode; + +typedef enum VkStencilOp { + VK_STENCIL_OP_KEEP = 0, + VK_STENCIL_OP_ZERO = 1, + VK_STENCIL_OP_REPLACE = 2, + VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, + VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, + VK_STENCIL_OP_INVERT = 5, + VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, + VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, + VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF +} VkStencilOp; + +typedef enum VkLogicOp { + VK_LOGIC_OP_CLEAR = 0, + VK_LOGIC_OP_AND = 1, + VK_LOGIC_OP_AND_REVERSE = 2, + VK_LOGIC_OP_COPY = 3, + VK_LOGIC_OP_AND_INVERTED = 4, + VK_LOGIC_OP_NO_OP = 5, + VK_LOGIC_OP_XOR = 6, + VK_LOGIC_OP_OR = 7, + VK_LOGIC_OP_NOR = 8, + VK_LOGIC_OP_EQUIVALENT = 9, + VK_LOGIC_OP_INVERT = 10, + VK_LOGIC_OP_OR_REVERSE = 11, + VK_LOGIC_OP_COPY_INVERTED = 12, + VK_LOGIC_OP_OR_INVERTED = 13, + VK_LOGIC_OP_NAND = 14, + VK_LOGIC_OP_SET = 15, + VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF +} VkLogicOp; + +typedef enum VkBorderColor { + VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, + VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, + VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, + VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, + VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, + VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, + VK_BORDER_COLOR_FLOAT_CUSTOM_EXT = 1000287003, + VK_BORDER_COLOR_INT_CUSTOM_EXT = 1000287004, + VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF +} VkBorderColor; + +typedef enum VkFilter { + VK_FILTER_NEAREST = 0, + VK_FILTER_LINEAR = 1, + VK_FILTER_CUBIC_EXT = 1000015000, + VK_FILTER_CUBIC_IMG = VK_FILTER_CUBIC_EXT, + VK_FILTER_MAX_ENUM = 0x7FFFFFFF +} VkFilter; + +typedef enum VkSamplerAddressMode { + VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, + VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + // VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR is a deprecated alias + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerAddressMode; + +typedef enum VkSamplerMipmapMode { + VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, + VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, + VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerMipmapMode; + +typedef enum VkDescriptorType { + VK_DESCRIPTOR_TYPE_SAMPLER = 0, + VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, + VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, + VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, + VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, + VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, + VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, + VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK = 1000138000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM = 1000440000, + VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM = 1000440001, + VK_DESCRIPTOR_TYPE_MUTABLE_EXT = 1000351000, + VK_DESCRIPTOR_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_NV = 1000570000, + VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK, + VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, + VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorType; + +typedef enum VkAttachmentLoadOp { + VK_ATTACHMENT_LOAD_OP_LOAD = 0, + VK_ATTACHMENT_LOAD_OP_CLEAR = 1, + VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, + VK_ATTACHMENT_LOAD_OP_NONE = 1000400000, + VK_ATTACHMENT_LOAD_OP_NONE_EXT = VK_ATTACHMENT_LOAD_OP_NONE, + VK_ATTACHMENT_LOAD_OP_NONE_KHR = VK_ATTACHMENT_LOAD_OP_NONE, + VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentLoadOp; + +typedef enum VkAttachmentStoreOp { + VK_ATTACHMENT_STORE_OP_STORE = 0, + VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, + VK_ATTACHMENT_STORE_OP_NONE = 1000301000, + VK_ATTACHMENT_STORE_OP_NONE_KHR = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_NONE_QCOM = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_NONE_EXT = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentStoreOp; + +typedef enum VkPipelineBindPoint { + VK_PIPELINE_BIND_POINT_GRAPHICS = 0, + VK_PIPELINE_BIND_POINT_COMPUTE = 1, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX = 1000134000, +#endif + VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000, + VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI = 1000369003, + VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, + VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF +} VkPipelineBindPoint; + +typedef enum VkCommandBufferLevel { + VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, + VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, + VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferLevel; + +typedef enum VkIndexType { + VK_INDEX_TYPE_UINT16 = 0, + VK_INDEX_TYPE_UINT32 = 1, + VK_INDEX_TYPE_UINT8 = 1000265000, + VK_INDEX_TYPE_NONE_KHR = 1000165000, + VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR, + VK_INDEX_TYPE_UINT8_EXT = VK_INDEX_TYPE_UINT8, + VK_INDEX_TYPE_UINT8_KHR = VK_INDEX_TYPE_UINT8, + VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkIndexType; + +typedef enum VkSubpassContents { + VK_SUBPASS_CONTENTS_INLINE = 0, + VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, + VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR = 1000451000, + VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, + VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassContents; + +typedef enum VkAccessFlagBits { + VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, + VK_ACCESS_INDEX_READ_BIT = 0x00000002, + VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, + VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, + VK_ACCESS_SHADER_READ_BIT = 0x00000020, + VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, + VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, + VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, + VK_ACCESS_HOST_READ_BIT = 0x00002000, + VK_ACCESS_HOST_WRITE_BIT = 0x00004000, + VK_ACCESS_MEMORY_READ_BIT = 0x00008000, + VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, + VK_ACCESS_NONE = 0, + VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000, + VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000, + VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000, + VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000, + VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT = 0x00020000, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT = 0x00040000, + VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT, + VK_ACCESS_NONE_KHR = VK_ACCESS_NONE, + VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAccessFlagBits; +typedef VkFlags VkAccessFlags; + +typedef enum VkImageAspectFlagBits { + VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, + VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, + VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, + VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, + VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, + VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, + VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, + VK_IMAGE_ASPECT_NONE = 0, + VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080, + VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100, + VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200, + VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400, + VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, + VK_IMAGE_ASPECT_NONE_KHR = VK_IMAGE_ASPECT_NONE, + VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageAspectFlagBits; +typedef VkFlags VkImageAspectFlags; + +typedef enum VkFormatFeatureFlagBits { + VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, + VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, + VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, + VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, + VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, + VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, + VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, + VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT = 0x00004000, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT = 0x00008000, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000, + VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000, + VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000, + VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000, + VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000, + VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000, + VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000, + VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000, + VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFormatFeatureFlagBits; +typedef VkFlags VkFormatFeatureFlags; + +typedef enum VkImageCreateFlagBits { + VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, + VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, + VK_IMAGE_CREATE_ALIAS_BIT = 0x00000400, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT = 0x00000040, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT = 0x00000020, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = 0x00000080, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT = 0x00000100, + VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800, + VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200, + VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000, + VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, + VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000, + VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00010000, + VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT = 0x00040000, + VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT = 0x00020000, + VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR = 0x00100000, + VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_EXT = 0x00008000, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT, + VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT, + VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_EXT, + VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageCreateFlagBits; +typedef VkFlags VkImageCreateFlags; + +typedef enum VkSampleCountFlagBits { + VK_SAMPLE_COUNT_1_BIT = 0x00000001, + VK_SAMPLE_COUNT_2_BIT = 0x00000002, + VK_SAMPLE_COUNT_4_BIT = 0x00000004, + VK_SAMPLE_COUNT_8_BIT = 0x00000008, + VK_SAMPLE_COUNT_16_BIT = 0x00000010, + VK_SAMPLE_COUNT_32_BIT = 0x00000020, + VK_SAMPLE_COUNT_64_BIT = 0x00000040, + VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSampleCountFlagBits; +typedef VkFlags VkSampleCountFlags; + +typedef enum VkImageUsageFlagBits { + VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, + VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, + VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, + VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, + VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, + VK_IMAGE_USAGE_HOST_TRANSFER_BIT = 0x00400000, + VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00000400, + VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00000800, + VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000, + VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, + VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100, + VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000, + VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00004000, + VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000, + VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x00080000, + VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI = 0x00040000, + VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM = 0x00100000, + VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM = 0x00200000, + VK_IMAGE_USAGE_TILE_MEMORY_QCOM = 0x08000000, + VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x02000000, + VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x04000000, + VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT, + VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageUsageFlagBits; +typedef VkFlags VkImageUsageFlags; + +typedef enum VkInstanceCreateFlagBits { + VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR = 0x00000001, + VK_INSTANCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkInstanceCreateFlagBits; +typedef VkFlags VkInstanceCreateFlags; + +typedef enum VkMemoryHeapFlagBits { + VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, + VK_MEMORY_HEAP_TILE_MEMORY_BIT_QCOM = 0x00000008, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryHeapFlagBits; +typedef VkFlags VkMemoryHeapFlags; + +typedef enum VkMemoryPropertyFlagBits { + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, + VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, + VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, + VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, + VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020, + VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD = 0x00000040, + VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD = 0x00000080, + VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV = 0x00000100, + VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryPropertyFlagBits; +typedef VkFlags VkMemoryPropertyFlags; + +typedef enum VkQueueFlagBits { + VK_QUEUE_GRAPHICS_BIT = 0x00000001, + VK_QUEUE_COMPUTE_BIT = 0x00000002, + VK_QUEUE_TRANSFER_BIT = 0x00000004, + VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, + VK_QUEUE_PROTECTED_BIT = 0x00000010, + VK_QUEUE_VIDEO_DECODE_BIT_KHR = 0x00000020, + VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040, + VK_QUEUE_OPTICAL_FLOW_BIT_NV = 0x00000100, + VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueueFlagBits; +typedef VkFlags VkQueueFlags; +typedef VkFlags VkDeviceCreateFlags; + +typedef enum VkDeviceQueueCreateFlagBits { + VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001, + VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDeviceQueueCreateFlagBits; +typedef VkFlags VkDeviceQueueCreateFlags; + +typedef enum VkPipelineStageFlagBits { + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, + VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, + VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, + VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, + VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, + VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, + VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, + VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, + VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, + VK_PIPELINE_STAGE_NONE = 0, + VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000, + VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000, + VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000, + VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000, + VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT = 0x00080000, + VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT = 0x00100000, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT = 0x00020000, + VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, + VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT, + VK_PIPELINE_STAGE_NONE_KHR = VK_PIPELINE_STAGE_NONE, + VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineStageFlagBits; +typedef VkFlags VkPipelineStageFlags; + +typedef enum VkMemoryMapFlagBits { + VK_MEMORY_MAP_PLACED_BIT_EXT = 0x00000001, + VK_MEMORY_MAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryMapFlagBits; +typedef VkFlags VkMemoryMapFlags; + +typedef enum VkSparseMemoryBindFlagBits { + VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, + VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseMemoryBindFlagBits; +typedef VkFlags VkSparseMemoryBindFlags; + +typedef enum VkSparseImageFormatFlagBits { + VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, + VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, + VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, + VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseImageFormatFlagBits; +typedef VkFlags VkSparseImageFormatFlags; + +typedef enum VkFenceCreateFlagBits { + VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, + VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceCreateFlagBits; +typedef VkFlags VkFenceCreateFlags; +typedef VkFlags VkSemaphoreCreateFlags; + +typedef enum VkEventCreateFlagBits { + VK_EVENT_CREATE_DEVICE_ONLY_BIT = 0x00000001, + VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT, + VK_EVENT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkEventCreateFlagBits; +typedef VkFlags VkEventCreateFlags; + +typedef enum VkQueryPipelineStatisticFlagBits { + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, + VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, + VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, + VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, + VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT = 0x00000800, + VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT = 0x00001000, + VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI = 0x00002000, + VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryPipelineStatisticFlagBits; +typedef VkFlags VkQueryPipelineStatisticFlags; +typedef VkFlags VkQueryPoolCreateFlags; + +typedef enum VkQueryResultFlagBits { + VK_QUERY_RESULT_64_BIT = 0x00000001, + VK_QUERY_RESULT_WAIT_BIT = 0x00000002, + VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, + VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, + VK_QUERY_RESULT_WITH_STATUS_BIT_KHR = 0x00000010, + VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryResultFlagBits; +typedef VkFlags VkQueryResultFlags; + +typedef enum VkBufferCreateFlagBits { + VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000010, + VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000020, + VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR = 0x00000040, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferCreateFlagBits; +typedef VkFlags VkBufferCreateFlags; + +typedef enum VkBufferUsageFlagBits { + VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, + VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, + VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, + VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, + VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, + VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, + VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT = 0x00020000, + VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000, + VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00004000, + VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, + VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, + VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000, +#endif + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000, + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000, + VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400, + VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000, + VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000, + VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000, + VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000, + VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000, + VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000, + VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT = 0x01000000, + VK_BUFFER_USAGE_TILE_MEMORY_QCOM = 0x08000000, + VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferUsageFlagBits; +typedef VkFlags VkBufferUsageFlags; +typedef VkFlags VkBufferViewCreateFlags; + +typedef enum VkImageViewCreateFlagBits { + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001, + VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000004, + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002, + VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageViewCreateFlagBits; +typedef VkFlags VkImageViewCreateFlags; +typedef VkFlags VkShaderModuleCreateFlags; + +typedef enum VkPipelineCacheCreateFlagBits { + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001, + VK_PIPELINE_CACHE_CREATE_INTERNALLY_SYNCHRONIZED_MERGE_BIT_KHR = 0x00000008, + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, + VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheCreateFlagBits; +typedef VkFlags VkPipelineCacheCreateFlags; + +typedef enum VkColorComponentFlagBits { + VK_COLOR_COMPONENT_R_BIT = 0x00000001, + VK_COLOR_COMPONENT_G_BIT = 0x00000002, + VK_COLOR_COMPONENT_B_BIT = 0x00000004, + VK_COLOR_COMPONENT_A_BIT = 0x00000008, + VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkColorComponentFlagBits; +typedef VkFlags VkColorComponentFlags; + +typedef enum VkPipelineCreateFlagBits { + VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, + VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, + VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008, + VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200, + VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT = 0x08000000, + VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT = 0x40000000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, + VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000, + VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, + VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, + VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, + VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, + VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800, + VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000, + VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000, + VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400, + VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000, + VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000, + VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000, + VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000, +#endif + VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, + VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT, + VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT, + VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreateFlagBits; +typedef VkFlags VkPipelineCreateFlags; + +typedef enum VkPipelineShaderStageCreateFlagBits { + VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT = 0x00000001, + VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT = 0x00000002, + VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT, + VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT, + VK_PIPELINE_SHADER_STAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineShaderStageCreateFlagBits; +typedef VkFlags VkPipelineShaderStageCreateFlags; + +typedef enum VkShaderStageFlagBits { + VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, + VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, + VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, + VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, + VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, + VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, + VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, + VK_SHADER_STAGE_ALL = 0x7FFFFFFF, + VK_SHADER_STAGE_RAYGEN_BIT_KHR = 0x00000100, + VK_SHADER_STAGE_ANY_HIT_BIT_KHR = 0x00000200, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR = 0x00000400, + VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800, + VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000, + VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000, + VK_SHADER_STAGE_TASK_BIT_EXT = 0x00000040, + VK_SHADER_STAGE_MESH_BIT_EXT = 0x00000080, + VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI = 0x00004000, + VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI = 0x00080000, + VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR, + VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + VK_SHADER_STAGE_TASK_BIT_NV = VK_SHADER_STAGE_TASK_BIT_EXT, + VK_SHADER_STAGE_MESH_BIT_NV = VK_SHADER_STAGE_MESH_BIT_EXT, + VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkShaderStageFlagBits; + +typedef enum VkCullModeFlagBits { + VK_CULL_MODE_NONE = 0, + VK_CULL_MODE_FRONT_BIT = 0x00000001, + VK_CULL_MODE_BACK_BIT = 0x00000002, + VK_CULL_MODE_FRONT_AND_BACK = 0x00000003, + VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCullModeFlagBits; +typedef VkFlags VkCullModeFlags; +typedef VkFlags VkPipelineVertexInputStateCreateFlags; +typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; +typedef VkFlags VkPipelineTessellationStateCreateFlags; +typedef VkFlags VkPipelineViewportStateCreateFlags; +typedef VkFlags VkPipelineRasterizationStateCreateFlags; +typedef VkFlags VkPipelineMultisampleStateCreateFlags; + +typedef enum VkPipelineDepthStencilStateCreateFlagBits { + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000001, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000002, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineDepthStencilStateCreateFlagBits; +typedef VkFlags VkPipelineDepthStencilStateCreateFlags; + +typedef enum VkPipelineColorBlendStateCreateFlagBits { + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT = 0x00000001, + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT, + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineColorBlendStateCreateFlagBits; +typedef VkFlags VkPipelineColorBlendStateCreateFlags; +typedef VkFlags VkPipelineDynamicStateCreateFlags; + +typedef enum VkPipelineLayoutCreateFlagBits { + VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT = 0x00000002, + VK_PIPELINE_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineLayoutCreateFlagBits; +typedef VkFlags VkPipelineLayoutCreateFlags; +typedef VkFlags VkShaderStageFlags; + +typedef enum VkSamplerCreateFlagBits { + VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT = 0x00000001, + VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT = 0x00000002, + VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008, + VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT = 0x00000004, + VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM = 0x00000010, + VK_SAMPLER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSamplerCreateFlagBits; +typedef VkFlags VkSamplerCreateFlags; + +typedef enum VkDescriptorPoolCreateFlagBits { + VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002, + VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV = 0x00000008, + VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV = 0x00000010, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT, + VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorPoolCreateFlagBits; +typedef VkFlags VkDescriptorPoolCreateFlags; +typedef VkFlags VkDescriptorPoolResetFlags; + +typedef enum VkDescriptorSetLayoutCreateFlagBits { + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00000010, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT = 0x00000020, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00000080, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV = 0x00000040, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorSetLayoutCreateFlagBits; +typedef VkFlags VkDescriptorSetLayoutCreateFlags; + +typedef enum VkAttachmentDescriptionFlagBits { + VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, + VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentDescriptionFlagBits; +typedef VkFlags VkAttachmentDescriptionFlags; + +typedef enum VkDependencyFlagBits { + VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, + VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004, + VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002, + VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT = 0x00000008, + VK_DEPENDENCY_QUEUE_FAMILY_OWNERSHIP_TRANSFER_USE_ALL_STAGES_BIT_KHR = 0x00000020, + VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT, + VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT, + VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDependencyFlagBits; +typedef VkFlags VkDependencyFlags; + +typedef enum VkFramebufferCreateFlagBits { + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001, + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFramebufferCreateFlagBits; +typedef VkFlags VkFramebufferCreateFlags; + +typedef enum VkRenderPassCreateFlagBits { + VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM = 0x00000002, + VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkRenderPassCreateFlagBits; +typedef VkFlags VkRenderPassCreateFlags; + +typedef enum VkSubpassDescriptionFlagBits { + VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, + VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, + VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM = 0x00000004, + VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM = 0x00000008, + VK_SUBPASS_DESCRIPTION_TILE_SHADING_APRON_BIT_QCOM = 0x00000100, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT = 0x00000010, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000020, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000040, + VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000080, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassDescriptionFlagBits; +typedef VkFlags VkSubpassDescriptionFlags; + +typedef enum VkCommandPoolCreateFlagBits { + VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, + VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, + VK_COMMAND_POOL_CREATE_PROTECTED_BIT = 0x00000004, + VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolCreateFlagBits; +typedef VkFlags VkCommandPoolCreateFlags; + +typedef enum VkCommandPoolResetFlagBits { + VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolResetFlagBits; +typedef VkFlags VkCommandPoolResetFlags; + +typedef enum VkCommandBufferUsageFlagBits { + VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, + VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, + VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, + VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferUsageFlagBits; +typedef VkFlags VkCommandBufferUsageFlags; + +typedef enum VkQueryControlFlagBits { + VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, + VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryControlFlagBits; +typedef VkFlags VkQueryControlFlags; + +typedef enum VkCommandBufferResetFlagBits { + VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferResetFlagBits; +typedef VkFlags VkCommandBufferResetFlags; + +typedef enum VkStencilFaceFlagBits { + VK_STENCIL_FACE_FRONT_BIT = 0x00000001, + VK_STENCIL_FACE_BACK_BIT = 0x00000002, + VK_STENCIL_FACE_FRONT_AND_BACK = 0x00000003, + // VK_STENCIL_FRONT_AND_BACK is a deprecated alias + VK_STENCIL_FRONT_AND_BACK = VK_STENCIL_FACE_FRONT_AND_BACK, + VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkStencilFaceFlagBits; +typedef VkFlags VkStencilFaceFlags; +typedef struct VkExtent2D { + uint32_t width; + uint32_t height; +} VkExtent2D; + +typedef struct VkExtent3D { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkExtent3D; + +typedef struct VkOffset2D { + int32_t x; + int32_t y; +} VkOffset2D; + +typedef struct VkOffset3D { + int32_t x; + int32_t y; + int32_t z; +} VkOffset3D; + +typedef struct VkRect2D { + VkOffset2D offset; + VkExtent2D extent; +} VkRect2D; + +typedef struct VkBaseInStructure { + VkStructureType sType; + const struct VkBaseInStructure* pNext; +} VkBaseInStructure; + +typedef struct VkBaseOutStructure { + VkStructureType sType; + struct VkBaseOutStructure* pNext; +} VkBaseOutStructure; + +typedef struct VkBufferMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier; + +typedef struct VkDispatchIndirectCommand { + uint32_t x; + uint32_t y; + uint32_t z; +} VkDispatchIndirectCommand; + +typedef struct VkDrawIndexedIndirectCommand { + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; +} VkDrawIndexedIndirectCommand; + +typedef struct VkDrawIndirectCommand { + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; +} VkDrawIndirectCommand; + +typedef struct VkImageSubresourceRange { + VkImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceRange; + +typedef struct VkImageMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier; + +typedef struct VkMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; +} VkMemoryBarrier; + +typedef struct VkPipelineCacheHeaderVersionOne { + uint32_t headerSize; + VkPipelineCacheHeaderVersion headerVersion; + uint32_t vendorID; + uint32_t deviceID; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; +} VkPipelineCacheHeaderVersionOne; + +typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( + void* pUserData, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkFreeFunction)( + void* pUserData, + void* pMemory); + +typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); +typedef struct VkAllocationCallbacks { + void* pUserData; + PFN_vkAllocationFunction pfnAllocation; + PFN_vkReallocationFunction pfnReallocation; + PFN_vkFreeFunction pfnFree; + PFN_vkInternalAllocationNotification pfnInternalAllocation; + PFN_vkInternalFreeNotification pfnInternalFree; +} VkAllocationCallbacks; + +typedef struct VkApplicationInfo { + VkStructureType sType; + const void* pNext; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; +} VkApplicationInfo; + +typedef struct VkFormatProperties { + VkFormatFeatureFlags linearTilingFeatures; + VkFormatFeatureFlags optimalTilingFeatures; + VkFormatFeatureFlags bufferFeatures; +} VkFormatProperties; + +typedef struct VkImageFormatProperties { + VkExtent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + VkSampleCountFlags sampleCounts; + VkDeviceSize maxResourceSize; +} VkImageFormatProperties; + +typedef struct VkInstanceCreateInfo { + VkStructureType sType; + const void* pNext; + VkInstanceCreateFlags flags; + const VkApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; +} VkInstanceCreateInfo; + +typedef struct VkMemoryHeap { + VkDeviceSize size; + VkMemoryHeapFlags flags; +} VkMemoryHeap; + +typedef struct VkMemoryType { + VkMemoryPropertyFlags propertyFlags; + uint32_t heapIndex; +} VkMemoryType; + +typedef struct VkPhysicalDeviceFeatures { + VkBool32 robustBufferAccess; + VkBool32 fullDrawIndexUint32; + VkBool32 imageCubeArray; + VkBool32 independentBlend; + VkBool32 geometryShader; + VkBool32 tessellationShader; + VkBool32 sampleRateShading; + VkBool32 dualSrcBlend; + VkBool32 logicOp; + VkBool32 multiDrawIndirect; + VkBool32 drawIndirectFirstInstance; + VkBool32 depthClamp; + VkBool32 depthBiasClamp; + VkBool32 fillModeNonSolid; + VkBool32 depthBounds; + VkBool32 wideLines; + VkBool32 largePoints; + VkBool32 alphaToOne; + VkBool32 multiViewport; + VkBool32 samplerAnisotropy; + VkBool32 textureCompressionETC2; + VkBool32 textureCompressionASTC_LDR; + VkBool32 textureCompressionBC; + VkBool32 occlusionQueryPrecise; + VkBool32 pipelineStatisticsQuery; + VkBool32 vertexPipelineStoresAndAtomics; + VkBool32 fragmentStoresAndAtomics; + VkBool32 shaderTessellationAndGeometryPointSize; + VkBool32 shaderImageGatherExtended; + VkBool32 shaderStorageImageExtendedFormats; + VkBool32 shaderStorageImageMultisample; + VkBool32 shaderStorageImageReadWithoutFormat; + VkBool32 shaderStorageImageWriteWithoutFormat; + VkBool32 shaderUniformBufferArrayDynamicIndexing; + VkBool32 shaderSampledImageArrayDynamicIndexing; + VkBool32 shaderStorageBufferArrayDynamicIndexing; + VkBool32 shaderStorageImageArrayDynamicIndexing; + VkBool32 shaderClipDistance; + VkBool32 shaderCullDistance; + VkBool32 shaderFloat64; + VkBool32 shaderInt64; + VkBool32 shaderInt16; + VkBool32 shaderResourceResidency; + VkBool32 shaderResourceMinLod; + VkBool32 sparseBinding; + VkBool32 sparseResidencyBuffer; + VkBool32 sparseResidencyImage2D; + VkBool32 sparseResidencyImage3D; + VkBool32 sparseResidency2Samples; + VkBool32 sparseResidency4Samples; + VkBool32 sparseResidency8Samples; + VkBool32 sparseResidency16Samples; + VkBool32 sparseResidencyAliased; + VkBool32 variableMultisampleRate; + VkBool32 inheritedQueries; +} VkPhysicalDeviceFeatures; + +typedef struct VkPhysicalDeviceLimits { + uint32_t maxImageDimension1D; + uint32_t maxImageDimension2D; + uint32_t maxImageDimension3D; + uint32_t maxImageDimensionCube; + uint32_t maxImageArrayLayers; + uint32_t maxTexelBufferElements; + uint32_t maxUniformBufferRange; + uint32_t maxStorageBufferRange; + uint32_t maxPushConstantsSize; + uint32_t maxMemoryAllocationCount; + uint32_t maxSamplerAllocationCount; + VkDeviceSize bufferImageGranularity; + VkDeviceSize sparseAddressSpaceSize; + uint32_t maxBoundDescriptorSets; + uint32_t maxPerStageDescriptorSamplers; + uint32_t maxPerStageDescriptorUniformBuffers; + uint32_t maxPerStageDescriptorStorageBuffers; + uint32_t maxPerStageDescriptorSampledImages; + uint32_t maxPerStageDescriptorStorageImages; + uint32_t maxPerStageDescriptorInputAttachments; + uint32_t maxPerStageResources; + uint32_t maxDescriptorSetSamplers; + uint32_t maxDescriptorSetUniformBuffers; + uint32_t maxDescriptorSetUniformBuffersDynamic; + uint32_t maxDescriptorSetStorageBuffers; + uint32_t maxDescriptorSetStorageBuffersDynamic; + uint32_t maxDescriptorSetSampledImages; + uint32_t maxDescriptorSetStorageImages; + uint32_t maxDescriptorSetInputAttachments; + uint32_t maxVertexInputAttributes; + uint32_t maxVertexInputBindings; + uint32_t maxVertexInputAttributeOffset; + uint32_t maxVertexInputBindingStride; + uint32_t maxVertexOutputComponents; + uint32_t maxTessellationGenerationLevel; + uint32_t maxTessellationPatchSize; + uint32_t maxTessellationControlPerVertexInputComponents; + uint32_t maxTessellationControlPerVertexOutputComponents; + uint32_t maxTessellationControlPerPatchOutputComponents; + uint32_t maxTessellationControlTotalOutputComponents; + uint32_t maxTessellationEvaluationInputComponents; + uint32_t maxTessellationEvaluationOutputComponents; + uint32_t maxGeometryShaderInvocations; + uint32_t maxGeometryInputComponents; + uint32_t maxGeometryOutputComponents; + uint32_t maxGeometryOutputVertices; + uint32_t maxGeometryTotalOutputComponents; + uint32_t maxFragmentInputComponents; + uint32_t maxFragmentOutputAttachments; + uint32_t maxFragmentDualSrcAttachments; + uint32_t maxFragmentCombinedOutputResources; + uint32_t maxComputeSharedMemorySize; + uint32_t maxComputeWorkGroupCount[3]; + uint32_t maxComputeWorkGroupInvocations; + uint32_t maxComputeWorkGroupSize[3]; + uint32_t subPixelPrecisionBits; + uint32_t subTexelPrecisionBits; + uint32_t mipmapPrecisionBits; + uint32_t maxDrawIndexedIndexValue; + uint32_t maxDrawIndirectCount; + float maxSamplerLodBias; + float maxSamplerAnisotropy; + uint32_t maxViewports; + uint32_t maxViewportDimensions[2]; + float viewportBoundsRange[2]; + uint32_t viewportSubPixelBits; + size_t minMemoryMapAlignment; + VkDeviceSize minTexelBufferOffsetAlignment; + VkDeviceSize minUniformBufferOffsetAlignment; + VkDeviceSize minStorageBufferOffsetAlignment; + int32_t minTexelOffset; + uint32_t maxTexelOffset; + int32_t minTexelGatherOffset; + uint32_t maxTexelGatherOffset; + float minInterpolationOffset; + float maxInterpolationOffset; + uint32_t subPixelInterpolationOffsetBits; + uint32_t maxFramebufferWidth; + uint32_t maxFramebufferHeight; + uint32_t maxFramebufferLayers; + VkSampleCountFlags framebufferColorSampleCounts; + VkSampleCountFlags framebufferDepthSampleCounts; + VkSampleCountFlags framebufferStencilSampleCounts; + VkSampleCountFlags framebufferNoAttachmentsSampleCounts; + uint32_t maxColorAttachments; + VkSampleCountFlags sampledImageColorSampleCounts; + VkSampleCountFlags sampledImageIntegerSampleCounts; + VkSampleCountFlags sampledImageDepthSampleCounts; + VkSampleCountFlags sampledImageStencilSampleCounts; + VkSampleCountFlags storageImageSampleCounts; + uint32_t maxSampleMaskWords; + VkBool32 timestampComputeAndGraphics; + float timestampPeriod; + uint32_t maxClipDistances; + uint32_t maxCullDistances; + uint32_t maxCombinedClipAndCullDistances; + uint32_t discreteQueuePriorities; + float pointSizeRange[2]; + float lineWidthRange[2]; + float pointSizeGranularity; + float lineWidthGranularity; + VkBool32 strictLines; + VkBool32 standardSampleLocations; + VkDeviceSize optimalBufferCopyOffsetAlignment; + VkDeviceSize optimalBufferCopyRowPitchAlignment; + VkDeviceSize nonCoherentAtomSize; +} VkPhysicalDeviceLimits; + +typedef struct VkPhysicalDeviceMemoryProperties { + uint32_t memoryTypeCount; + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryProperties; + +typedef struct VkPhysicalDeviceSparseProperties { + VkBool32 residencyStandard2DBlockShape; + VkBool32 residencyStandard2DMultisampleBlockShape; + VkBool32 residencyStandard3DBlockShape; + VkBool32 residencyAlignedMipSize; + VkBool32 residencyNonResidentStrict; +} VkPhysicalDeviceSparseProperties; + +typedef struct VkPhysicalDeviceProperties { + uint32_t apiVersion; + uint32_t driverVersion; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceType deviceType; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + VkPhysicalDeviceLimits limits; + VkPhysicalDeviceSparseProperties sparseProperties; +} VkPhysicalDeviceProperties; + +typedef struct VkQueueFamilyProperties { + VkQueueFlags queueFlags; + uint32_t queueCount; + uint32_t timestampValidBits; + VkExtent3D minImageTransferGranularity; +} VkQueueFamilyProperties; + +typedef struct VkDeviceQueueCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueCount; + const float* pQueuePriorities; +} VkDeviceQueueCreateInfo; + +typedef struct VkDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceCreateFlags flags; + uint32_t queueCreateInfoCount; + const VkDeviceQueueCreateInfo* pQueueCreateInfos; + // enabledLayerCount is deprecated and should not be used + uint32_t enabledLayerCount; + // ppEnabledLayerNames is deprecated and should not be used + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + const VkPhysicalDeviceFeatures* pEnabledFeatures; +} VkDeviceCreateInfo; + +typedef struct VkExtensionProperties { + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; +} VkExtensionProperties; + +typedef struct VkLayerProperties { + char layerName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + uint32_t implementationVersion; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkLayerProperties; + +typedef struct VkSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + const VkPipelineStageFlags* pWaitDstStageMask; + uint32_t commandBufferCount; + const VkCommandBuffer* pCommandBuffers; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkSubmitInfo; + +typedef struct VkMappedMemoryRange { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMappedMemoryRange; + +typedef struct VkMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeIndex; +} VkMemoryAllocateInfo; + +typedef struct VkMemoryRequirements { + VkDeviceSize size; + VkDeviceSize alignment; + uint32_t memoryTypeBits; +} VkMemoryRequirements; + +typedef struct VkSparseMemoryBind { + VkDeviceSize resourceOffset; + VkDeviceSize size; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseMemoryBind; + +typedef struct VkSparseBufferMemoryBindInfo { + VkBuffer buffer; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseBufferMemoryBindInfo; + +typedef struct VkSparseImageOpaqueMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseImageOpaqueMemoryBindInfo; + +typedef struct VkImageSubresource { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t arrayLayer; +} VkImageSubresource; + +typedef struct VkSparseImageMemoryBind { + VkImageSubresource subresource; + VkOffset3D offset; + VkExtent3D extent; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseImageMemoryBind; + +typedef struct VkSparseImageMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseImageMemoryBind* pBinds; +} VkSparseImageMemoryBindInfo; + +typedef struct VkBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t bufferBindCount; + const VkSparseBufferMemoryBindInfo* pBufferBinds; + uint32_t imageOpaqueBindCount; + const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; + uint32_t imageBindCount; + const VkSparseImageMemoryBindInfo* pImageBinds; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkBindSparseInfo; + +typedef struct VkSparseImageFormatProperties { + VkImageAspectFlags aspectMask; + VkExtent3D imageGranularity; + VkSparseImageFormatFlags flags; +} VkSparseImageFormatProperties; + +typedef struct VkSparseImageMemoryRequirements { + VkSparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + VkDeviceSize imageMipTailSize; + VkDeviceSize imageMipTailOffset; + VkDeviceSize imageMipTailStride; +} VkSparseImageMemoryRequirements; + +typedef struct VkFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkFenceCreateFlags flags; +} VkFenceCreateInfo; + +typedef struct VkSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreCreateFlags flags; +} VkSemaphoreCreateInfo; + +typedef struct VkEventCreateInfo { + VkStructureType sType; + const void* pNext; + VkEventCreateFlags flags; +} VkEventCreateInfo; + +typedef struct VkQueryPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueryPoolCreateFlags flags; + VkQueryType queryType; + uint32_t queryCount; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkQueryPoolCreateInfo; + +typedef struct VkBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkDeviceSize size; + VkBufferUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkBufferCreateInfo; + +typedef struct VkBufferViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferViewCreateFlags flags; + VkBuffer buffer; + VkFormat format; + VkDeviceSize offset; + VkDeviceSize range; +} VkBufferViewCreateInfo; + +typedef struct VkImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageType imageType; + VkFormat format; + VkExtent3D extent; + uint32_t mipLevels; + uint32_t arrayLayers; + VkSampleCountFlagBits samples; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkImageLayout initialLayout; +} VkImageCreateInfo; + +typedef struct VkSubresourceLayout { + VkDeviceSize offset; + VkDeviceSize size; + VkDeviceSize rowPitch; + VkDeviceSize arrayPitch; + VkDeviceSize depthPitch; +} VkSubresourceLayout; + +typedef struct VkComponentMapping { + VkComponentSwizzle r; + VkComponentSwizzle g; + VkComponentSwizzle b; + VkComponentSwizzle a; +} VkComponentMapping; + +typedef struct VkImageViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageViewCreateFlags flags; + VkImage image; + VkImageViewType viewType; + VkFormat format; + VkComponentMapping components; + VkImageSubresourceRange subresourceRange; +} VkImageViewCreateInfo; + +typedef struct VkShaderModuleCreateInfo { + VkStructureType sType; + const void* pNext; + VkShaderModuleCreateFlags flags; + size_t codeSize; + const uint32_t* pCode; +} VkShaderModuleCreateInfo; + +typedef struct VkPipelineCacheCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCacheCreateFlags flags; + size_t initialDataSize; + const void* pInitialData; +} VkPipelineCacheCreateInfo; + +typedef struct VkSpecializationMapEntry { + uint32_t constantID; + uint32_t offset; + size_t size; +} VkSpecializationMapEntry; + +typedef struct VkSpecializationInfo { + uint32_t mapEntryCount; + const VkSpecializationMapEntry* pMapEntries; + size_t dataSize; + const void* pData; +} VkSpecializationInfo; + +typedef struct VkPipelineShaderStageCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineShaderStageCreateFlags flags; + VkShaderStageFlagBits stage; + VkShaderModule module; + const char* pName; + const VkSpecializationInfo* pSpecializationInfo; +} VkPipelineShaderStageCreateInfo; + +typedef struct VkComputePipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + VkPipelineShaderStageCreateInfo stage; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkComputePipelineCreateInfo; + +typedef struct VkVertexInputBindingDescription { + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; +} VkVertexInputBindingDescription; + +typedef struct VkVertexInputAttributeDescription { + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription; + +typedef struct VkPipelineVertexInputStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineVertexInputStateCreateFlags flags; + uint32_t vertexBindingDescriptionCount; + const VkVertexInputBindingDescription* pVertexBindingDescriptions; + uint32_t vertexAttributeDescriptionCount; + const VkVertexInputAttributeDescription* pVertexAttributeDescriptions; +} VkPipelineVertexInputStateCreateInfo; + +typedef struct VkPipelineInputAssemblyStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineInputAssemblyStateCreateFlags flags; + VkPrimitiveTopology topology; + VkBool32 primitiveRestartEnable; +} VkPipelineInputAssemblyStateCreateInfo; + +typedef struct VkPipelineTessellationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineTessellationStateCreateFlags flags; + uint32_t patchControlPoints; +} VkPipelineTessellationStateCreateInfo; + +typedef struct VkViewport { + float x; + float y; + float width; + float height; + float minDepth; + float maxDepth; +} VkViewport; + +typedef struct VkPipelineViewportStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineViewportStateCreateFlags flags; + uint32_t viewportCount; + const VkViewport* pViewports; + uint32_t scissorCount; + const VkRect2D* pScissors; +} VkPipelineViewportStateCreateInfo; + +typedef struct VkPipelineRasterizationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateCreateFlags flags; + VkBool32 depthClampEnable; + VkBool32 rasterizerDiscardEnable; + VkPolygonMode polygonMode; + VkCullModeFlags cullMode; + VkFrontFace frontFace; + VkBool32 depthBiasEnable; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; + float lineWidth; +} VkPipelineRasterizationStateCreateInfo; + +typedef struct VkPipelineMultisampleStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineMultisampleStateCreateFlags flags; + VkSampleCountFlagBits rasterizationSamples; + VkBool32 sampleShadingEnable; + float minSampleShading; + const VkSampleMask* pSampleMask; + VkBool32 alphaToCoverageEnable; + VkBool32 alphaToOneEnable; +} VkPipelineMultisampleStateCreateInfo; + +typedef struct VkStencilOpState { + VkStencilOp failOp; + VkStencilOp passOp; + VkStencilOp depthFailOp; + VkCompareOp compareOp; + uint32_t compareMask; + uint32_t writeMask; + uint32_t reference; +} VkStencilOpState; + +typedef struct VkPipelineDepthStencilStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDepthStencilStateCreateFlags flags; + VkBool32 depthTestEnable; + VkBool32 depthWriteEnable; + VkCompareOp depthCompareOp; + VkBool32 depthBoundsTestEnable; + VkBool32 stencilTestEnable; + VkStencilOpState front; + VkStencilOpState back; + float minDepthBounds; + float maxDepthBounds; +} VkPipelineDepthStencilStateCreateInfo; + +typedef struct VkPipelineColorBlendAttachmentState { + VkBool32 blendEnable; + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; + VkColorComponentFlags colorWriteMask; +} VkPipelineColorBlendAttachmentState; + +typedef struct VkPipelineColorBlendStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineColorBlendStateCreateFlags flags; + VkBool32 logicOpEnable; + VkLogicOp logicOp; + uint32_t attachmentCount; + const VkPipelineColorBlendAttachmentState* pAttachments; + float blendConstants[4]; +} VkPipelineColorBlendStateCreateInfo; + +typedef struct VkPipelineDynamicStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDynamicStateCreateFlags flags; + uint32_t dynamicStateCount; + const VkDynamicState* pDynamicStates; +} VkPipelineDynamicStateCreateInfo; + +typedef struct VkGraphicsPipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; + const VkPipelineViewportStateCreateInfo* pViewportState; + const VkPipelineRasterizationStateCreateInfo* pRasterizationState; + const VkPipelineMultisampleStateCreateInfo* pMultisampleState; + const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState; + const VkPipelineColorBlendStateCreateInfo* pColorBlendState; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkRenderPass renderPass; + uint32_t subpass; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkGraphicsPipelineCreateInfo; + +typedef struct VkPushConstantRange { + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; +} VkPushConstantRange; + +typedef struct VkPipelineLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayoutCreateFlags flags; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkPipelineLayoutCreateInfo; + +typedef struct VkSamplerCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerCreateFlags flags; + VkFilter magFilter; + VkFilter minFilter; + VkSamplerMipmapMode mipmapMode; + VkSamplerAddressMode addressModeU; + VkSamplerAddressMode addressModeV; + VkSamplerAddressMode addressModeW; + float mipLodBias; + VkBool32 anisotropyEnable; + float maxAnisotropy; + VkBool32 compareEnable; + VkCompareOp compareOp; + float minLod; + float maxLod; + VkBorderColor borderColor; + VkBool32 unnormalizedCoordinates; +} VkSamplerCreateInfo; + +typedef struct VkCopyDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; +} VkCopyDescriptorSet; + +typedef struct VkDescriptorBufferInfo { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize range; +} VkDescriptorBufferInfo; + +typedef struct VkDescriptorImageInfo { + VkSampler sampler; + VkImageView imageView; + VkImageLayout imageLayout; +} VkDescriptorImageInfo; + +typedef struct VkDescriptorPoolSize { + VkDescriptorType type; + uint32_t descriptorCount; +} VkDescriptorPoolSize; + +typedef struct VkDescriptorPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPoolCreateFlags flags; + uint32_t maxSets; + uint32_t poolSizeCount; + const VkDescriptorPoolSize* pPoolSizes; +} VkDescriptorPoolCreateInfo; + +typedef struct VkDescriptorSetAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPool descriptorPool; + uint32_t descriptorSetCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkDescriptorSetAllocateInfo; + +typedef struct VkDescriptorSetLayoutBinding { + uint32_t binding; + VkDescriptorType descriptorType; + uint32_t descriptorCount; + VkShaderStageFlags stageFlags; + const VkSampler* pImmutableSamplers; +} VkDescriptorSetLayoutBinding; + +typedef struct VkDescriptorSetLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const VkDescriptorSetLayoutBinding* pBindings; +} VkDescriptorSetLayoutCreateInfo; + +typedef struct VkWriteDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + const VkDescriptorImageInfo* pImageInfo; + const VkDescriptorBufferInfo* pBufferInfo; + const VkBufferView* pTexelBufferView; +} VkWriteDescriptorSet; + +typedef struct VkAttachmentDescription { + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription; + +typedef struct VkAttachmentReference { + uint32_t attachment; + VkImageLayout layout; +} VkAttachmentReference; + +typedef struct VkFramebufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkFramebufferCreateFlags flags; + VkRenderPass renderPass; + uint32_t attachmentCount; + const VkImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; +} VkFramebufferCreateInfo; + +typedef struct VkSubpassDescription { + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t inputAttachmentCount; + const VkAttachmentReference* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference* pColorAttachments; + const VkAttachmentReference* pResolveAttachments; + const VkAttachmentReference* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription; + +typedef struct VkSubpassDependency { + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; +} VkSubpassDependency; + +typedef struct VkRenderPassCreateInfo { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency* pDependencies; +} VkRenderPassCreateInfo; + +typedef struct VkCommandPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPoolCreateFlags flags; + uint32_t queueFamilyIndex; +} VkCommandPoolCreateInfo; + +typedef struct VkCommandBufferAllocateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPool commandPool; + VkCommandBufferLevel level; + uint32_t commandBufferCount; +} VkCommandBufferAllocateInfo; + +typedef struct VkCommandBufferInheritanceInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + uint32_t subpass; + VkFramebuffer framebuffer; + VkBool32 occlusionQueryEnable; + VkQueryControlFlags queryFlags; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkCommandBufferInheritanceInfo; + +typedef struct VkCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + VkCommandBufferUsageFlags flags; + const VkCommandBufferInheritanceInfo* pInheritanceInfo; +} VkCommandBufferBeginInfo; + +typedef struct VkBufferCopy { + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy; + +typedef struct VkImageSubresourceLayers { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceLayers; + +typedef struct VkBufferImageCopy { + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy; + +typedef union VkClearColorValue { + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; +} VkClearColorValue; + +typedef struct VkClearDepthStencilValue { + float depth; + uint32_t stencil; +} VkClearDepthStencilValue; + +typedef union VkClearValue { + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +} VkClearValue; + +typedef struct VkClearAttachment { + VkImageAspectFlags aspectMask; + uint32_t colorAttachment; + VkClearValue clearValue; +} VkClearAttachment; + +typedef struct VkClearRect { + VkRect2D rect; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkClearRect; + +typedef struct VkImageBlit { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit; + +typedef struct VkImageCopy { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy; + +typedef struct VkImageResolve { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve; + +typedef struct VkRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + VkFramebuffer framebuffer; + VkRect2D renderArea; + uint32_t clearValueCount; + const VkClearValue* pClearValues; +} VkRenderPassBeginInfo; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); +typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice); +typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); +typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory); +typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData); +typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); +typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore); +typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent); +typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool); +typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage); +typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler); +typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets); +typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool); +typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers); +typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); +typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo); +typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); +typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); +typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( + const VkInstanceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkInstance* pInstance); + +VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( + VkInstance instance, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( + VkInstance instance, + uint32_t* pPhysicalDeviceCount, + VkPhysicalDevice* pPhysicalDevices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties* pMemoryProperties); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( + VkInstance instance, + const char* pName); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( + VkDevice device, + const char* pName); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( + VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDevice* pDevice); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( + VkDevice device, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( + VkPhysicalDevice physicalDevice, + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( + VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo* pSubmits, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( + VkQueue queue); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( + VkDevice device, + const VkMemoryAllocateInfo* pAllocateInfo, + const VkAllocationCallbacks* pAllocator, + VkDeviceMemory* pMemory); + +VKAPI_ATTR void VKAPI_CALL vkFreeMemory( + VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void** ppData); + +VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( + VkDevice device, + VkDeviceMemory memory); + +VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize* pCommittedMemoryInBytes); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( + VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( + VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( + VkDevice device, + VkBuffer buffer, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( + VkDevice device, + VkImage image, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( + VkDevice device, + VkImage image, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( + VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo* pBindInfo, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( + VkDevice device, + const VkFenceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFence( + VkDevice device, + VkFence fence, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( + VkDevice device, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences, + VkBool32 waitAll, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( + VkDevice device, + const VkSemaphoreCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSemaphore* pSemaphore); + +VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( + VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( + VkDevice device, + const VkEventCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkEvent* pEvent); + +VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( + VkDevice device, + VkEvent event, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( + VkDevice device, + const VkQueryPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkQueryPool* pQueryPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( + VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void* pData, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( + VkDevice device, + const VkBufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBuffer* pBuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( + VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( + VkDevice device, + const VkBufferViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( + VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( + VkDevice device, + const VkImageCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImage* pImage); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImage( + VkDevice device, + VkImage image, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( + VkDevice device, + VkImage image, + const VkImageSubresource* pSubresource, + VkSubresourceLayout* pLayout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( + VkDevice device, + const VkImageViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImageView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( + VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkShaderModule* pShaderModule); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( + VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( + VkDevice device, + const VkPipelineCacheCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineCache* pPipelineCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( + VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( + VkDevice device, + VkPipelineCache pipelineCache, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( + VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( + VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( + VkDevice device, + const VkPipelineLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineLayout* pPipelineLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( + VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( + VkDevice device, + const VkSamplerCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSampler* pSampler); + +VKAPI_ATTR void VKAPI_CALL vkDestroySampler( + VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorSetLayout* pSetLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( + VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( + VkDevice device, + const VkDescriptorPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorPool* pDescriptorPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( + VkDevice device, + const VkDescriptorSetAllocateInfo* pAllocateInfo, + VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( + VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( + VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet* pDescriptorCopies); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( + VkDevice device, + const VkFramebufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFramebuffer* pFramebuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( + VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( + VkDevice device, + const VkRenderPassCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( + VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( + VkDevice device, + VkRenderPass renderPass, + VkExtent2D* pGranularity); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( + VkDevice device, + const VkCommandPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCommandPool* pCommandPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( + VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( + VkDevice device, + const VkCommandBufferAllocateInfo* pAllocateInfo, + VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( + VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( + VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo* pBeginInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( + VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( + VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( + VkCommandBuffer commandBuffer, + float lineWidth); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( + VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( + VkCommandBuffer commandBuffer, + const float blendConstants[4]); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( + VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t* pDynamicOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdDraw( + VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( + VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit* pRegions, + VkFilter filter); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue* pColor, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue* pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment* pAttachments, + uint32_t rectCount, + const VkClearRect* pRects); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( + VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void* pValues); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( + VkCommandBuffer commandBuffer, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( + VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); +#endif + + +// VK_VERSION_1_1 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_1 1 +// Vulkan 1.1 version number +#define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0 + +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) +#define VK_MAX_DEVICE_GROUP_SIZE 32U +#define VK_LUID_SIZE 8U +#define VK_QUEUE_FAMILY_EXTERNAL (~1U) + +typedef enum VkPointClippingBehavior { + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1, + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPointClippingBehavior; + +typedef enum VkTessellationDomainOrigin { + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT = 0, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1, + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF +} VkTessellationDomainOrigin; + +typedef enum VkSamplerYcbcrModelConversion { + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY = 0, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY = 1, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709 = 2, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601 = 3, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 = 4, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrModelConversion; + +typedef enum VkSamplerYcbcrRange { + VK_SAMPLER_YCBCR_RANGE_ITU_FULL = 0, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1, + VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrRange; + +typedef enum VkChromaLocation { + VK_CHROMA_LOCATION_COSITED_EVEN = 0, + VK_CHROMA_LOCATION_MIDPOINT = 1, + VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN, + VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT, + VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF +} VkChromaLocation; + +typedef enum VkDescriptorUpdateTemplateType { + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorUpdateTemplateType; + +typedef enum VkSubgroupFeatureFlagBits { + VK_SUBGROUP_FEATURE_BASIC_BIT = 0x00000001, + VK_SUBGROUP_FEATURE_VOTE_BIT = 0x00000002, + VK_SUBGROUP_FEATURE_ARITHMETIC_BIT = 0x00000004, + VK_SUBGROUP_FEATURE_BALLOT_BIT = 0x00000008, + VK_SUBGROUP_FEATURE_SHUFFLE_BIT = 0x00000010, + VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020, + VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040, + VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080, + VK_SUBGROUP_FEATURE_ROTATE_BIT = 0x00000200, + VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT = 0x00000400, + VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100, + VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR = VK_SUBGROUP_FEATURE_ROTATE_BIT, + VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT, + VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubgroupFeatureFlagBits; +typedef VkFlags VkSubgroupFeatureFlags; + +typedef enum VkPeerMemoryFeatureFlagBits { + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT = 0x00000001, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT = 0x00000002, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT = 0x00000004, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT = 0x00000008, + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT, + VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPeerMemoryFeatureFlagBits; +typedef VkFlags VkPeerMemoryFeatureFlags; + +typedef enum VkMemoryAllocateFlagBits { + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT = 0x00000002, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000004, + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryAllocateFlagBits; +typedef VkFlags VkMemoryAllocateFlags; +typedef VkFlags VkCommandPoolTrimFlags; +typedef VkFlags VkDescriptorUpdateTemplateCreateFlags; + +typedef enum VkExternalMemoryHandleTypeFlagBits { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT = 0x00000010, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT = 0x00000020, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT = 0x00000040, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT = 0x00000200, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA = 0x00000800, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV = 0x00001000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX = 0x00004000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLBUFFER_BIT_EXT = 0x00010000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLTEXTURE_BIT_EXT = 0x00020000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLHEAP_BIT_EXT = 0x00040000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBits; +typedef VkFlags VkExternalMemoryHandleTypeFlags; + +typedef enum VkExternalMemoryFeatureFlagBits { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBits; +typedef VkFlags VkExternalMemoryFeatureFlags; + +typedef enum VkExternalFenceHandleTypeFlagBits { + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000008, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceHandleTypeFlagBits; +typedef VkFlags VkExternalFenceHandleTypeFlags; + +typedef enum VkExternalFenceFeatureFlagBits { + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceFeatureFlagBits; +typedef VkFlags VkExternalFenceFeatureFlags; + +typedef enum VkFenceImportFlagBits { + VK_FENCE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = VK_FENCE_IMPORT_TEMPORARY_BIT, + VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceImportFlagBits; +typedef VkFlags VkFenceImportFlags; + +typedef enum VkSemaphoreImportFlagBits { + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, + VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreImportFlagBits; +typedef VkFlags VkSemaphoreImportFlags; + +typedef enum VkExternalSemaphoreHandleTypeFlagBits { + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA = 0x00000080, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreHandleTypeFlagBits; +typedef VkFlags VkExternalSemaphoreHandleTypeFlags; + +typedef enum VkExternalSemaphoreFeatureFlagBits { + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreFeatureFlagBits; +typedef VkFlags VkExternalSemaphoreFeatureFlags; +typedef struct VkPhysicalDeviceSubgroupProperties { + VkStructureType sType; + void* pNext; + uint32_t subgroupSize; + VkShaderStageFlags supportedStages; + VkSubgroupFeatureFlags supportedOperations; + VkBool32 quadOperationsInAllStages; +} VkPhysicalDeviceSubgroupProperties; + +typedef struct VkBindBufferMemoryInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindBufferMemoryInfo; + +typedef struct VkBindImageMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindImageMemoryInfo; + +typedef struct VkPhysicalDevice16BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; +} VkPhysicalDevice16BitStorageFeatures; + +typedef struct VkMemoryDedicatedRequirements { + VkStructureType sType; + void* pNext; + VkBool32 prefersDedicatedAllocation; + VkBool32 requiresDedicatedAllocation; +} VkMemoryDedicatedRequirements; + +typedef struct VkMemoryDedicatedAllocateInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkMemoryDedicatedAllocateInfo; + +typedef struct VkMemoryAllocateFlagsInfo { + VkStructureType sType; + const void* pNext; + VkMemoryAllocateFlags flags; + uint32_t deviceMask; +} VkMemoryAllocateFlagsInfo; + +typedef struct VkDeviceGroupRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; + uint32_t deviceRenderAreaCount; + const VkRect2D* pDeviceRenderAreas; +} VkDeviceGroupRenderPassBeginInfo; + +typedef struct VkDeviceGroupCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; +} VkDeviceGroupCommandBufferBeginInfo; + +typedef struct VkDeviceGroupSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const uint32_t* pWaitSemaphoreDeviceIndices; + uint32_t commandBufferCount; + const uint32_t* pCommandBufferDeviceMasks; + uint32_t signalSemaphoreCount; + const uint32_t* pSignalSemaphoreDeviceIndices; +} VkDeviceGroupSubmitInfo; + +typedef struct VkDeviceGroupBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t resourceDeviceIndex; + uint32_t memoryDeviceIndex; +} VkDeviceGroupBindSparseInfo; + +typedef struct VkBindBufferMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindBufferMemoryDeviceGroupInfo; + +typedef struct VkBindImageMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + uint32_t splitInstanceBindRegionCount; + const VkRect2D* pSplitInstanceBindRegions; +} VkBindImageMemoryDeviceGroupInfo; + +typedef struct VkPhysicalDeviceGroupProperties { + VkStructureType sType; + void* pNext; + uint32_t physicalDeviceCount; + VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE]; + VkBool32 subsetAllocation; +} VkPhysicalDeviceGroupProperties; + +typedef struct VkDeviceGroupDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t physicalDeviceCount; + const VkPhysicalDevice* pPhysicalDevices; +} VkDeviceGroupDeviceCreateInfo; + +typedef struct VkBufferMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferMemoryRequirementsInfo2; + +typedef struct VkImageMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageMemoryRequirementsInfo2; + +typedef struct VkImageSparseMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageSparseMemoryRequirementsInfo2; + +typedef struct VkMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkMemoryRequirements memoryRequirements; +} VkMemoryRequirements2; + +typedef struct VkSparseImageMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkSparseImageMemoryRequirements memoryRequirements; +} VkSparseImageMemoryRequirements2; + +typedef struct VkPhysicalDeviceFeatures2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceFeatures features; +} VkPhysicalDeviceFeatures2; + +typedef struct VkPhysicalDeviceProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties properties; +} VkPhysicalDeviceProperties2; + +typedef struct VkFormatProperties2 { + VkStructureType sType; + void* pNext; + VkFormatProperties formatProperties; +} VkFormatProperties2; + +typedef struct VkImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkImageFormatProperties imageFormatProperties; +} VkImageFormatProperties2; + +typedef struct VkPhysicalDeviceImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkImageCreateFlags flags; +} VkPhysicalDeviceImageFormatInfo2; + +typedef struct VkQueueFamilyProperties2 { + VkStructureType sType; + void* pNext; + VkQueueFamilyProperties queueFamilyProperties; +} VkQueueFamilyProperties2; + +typedef struct VkPhysicalDeviceMemoryProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceMemoryProperties memoryProperties; +} VkPhysicalDeviceMemoryProperties2; + +typedef struct VkSparseImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkSparseImageFormatProperties properties; +} VkSparseImageFormatProperties2; + +typedef struct VkPhysicalDeviceSparseImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkSampleCountFlagBits samples; + VkImageUsageFlags usage; + VkImageTiling tiling; +} VkPhysicalDeviceSparseImageFormatInfo2; + +typedef struct VkPhysicalDevicePointClippingProperties { + VkStructureType sType; + void* pNext; + VkPointClippingBehavior pointClippingBehavior; +} VkPhysicalDevicePointClippingProperties; + +typedef struct VkInputAttachmentAspectReference { + uint32_t subpass; + uint32_t inputAttachmentIndex; + VkImageAspectFlags aspectMask; +} VkInputAttachmentAspectReference; + +typedef struct VkRenderPassInputAttachmentAspectCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t aspectReferenceCount; + const VkInputAttachmentAspectReference* pAspectReferences; +} VkRenderPassInputAttachmentAspectCreateInfo; + +typedef struct VkImageViewUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags usage; +} VkImageViewUsageCreateInfo; + +typedef struct VkPipelineTessellationDomainOriginStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkTessellationDomainOrigin domainOrigin; +} VkPipelineTessellationDomainOriginStateCreateInfo; + +typedef struct VkRenderPassMultiviewCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t subpassCount; + const uint32_t* pViewMasks; + uint32_t dependencyCount; + const int32_t* pViewOffsets; + uint32_t correlationMaskCount; + const uint32_t* pCorrelationMasks; +} VkRenderPassMultiviewCreateInfo; + +typedef struct VkPhysicalDeviceMultiviewFeatures { + VkStructureType sType; + void* pNext; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; +} VkPhysicalDeviceMultiviewFeatures; + +typedef struct VkPhysicalDeviceMultiviewProperties { + VkStructureType sType; + void* pNext; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; +} VkPhysicalDeviceMultiviewProperties; + +typedef struct VkPhysicalDeviceVariablePointersFeatures { + VkStructureType sType; + void* pNext; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; +} VkPhysicalDeviceVariablePointersFeatures; + +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 protectedMemory; +} VkPhysicalDeviceProtectedMemoryFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryProperties { + VkStructureType sType; + void* pNext; + VkBool32 protectedNoFault; +} VkPhysicalDeviceProtectedMemoryProperties; + +typedef struct VkDeviceQueueInfo2 { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueIndex; +} VkDeviceQueueInfo2; + +typedef struct VkProtectedSubmitInfo { + VkStructureType sType; + const void* pNext; + VkBool32 protectedSubmit; +} VkProtectedSubmitInfo; + +typedef struct VkSamplerYcbcrConversionCreateInfo { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkSamplerYcbcrModelConversion ycbcrModel; + VkSamplerYcbcrRange ycbcrRange; + VkComponentMapping components; + VkChromaLocation xChromaOffset; + VkChromaLocation yChromaOffset; + VkFilter chromaFilter; + VkBool32 forceExplicitReconstruction; +} VkSamplerYcbcrConversionCreateInfo; + +typedef struct VkSamplerYcbcrConversionInfo { + VkStructureType sType; + const void* pNext; + VkSamplerYcbcrConversion conversion; +} VkSamplerYcbcrConversionInfo; + +typedef struct VkBindImagePlaneMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkBindImagePlaneMemoryInfo; + +typedef struct VkImagePlaneMemoryRequirementsInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkImagePlaneMemoryRequirementsInfo; + +typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeatures { + VkStructureType sType; + void* pNext; + VkBool32 samplerYcbcrConversion; +} VkPhysicalDeviceSamplerYcbcrConversionFeatures; + +typedef struct VkSamplerYcbcrConversionImageFormatProperties { + VkStructureType sType; + void* pNext; + uint32_t combinedImageSamplerDescriptorCount; +} VkSamplerYcbcrConversionImageFormatProperties; + +typedef struct VkDescriptorUpdateTemplateEntry { + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + size_t offset; + size_t stride; +} VkDescriptorUpdateTemplateEntry; + +typedef struct VkDescriptorUpdateTemplateCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorUpdateTemplateCreateFlags flags; + uint32_t descriptorUpdateEntryCount; + const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntries; + VkDescriptorUpdateTemplateType templateType; + VkDescriptorSetLayout descriptorSetLayout; + VkPipelineBindPoint pipelineBindPoint; + VkPipelineLayout pipelineLayout; + uint32_t set; +} VkDescriptorUpdateTemplateCreateInfo; + +typedef struct VkExternalMemoryProperties { + VkExternalMemoryFeatureFlags externalMemoryFeatures; + VkExternalMemoryHandleTypeFlags exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlags compatibleHandleTypes; +} VkExternalMemoryProperties; + +typedef struct VkPhysicalDeviceExternalImageFormatInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalImageFormatInfo; + +typedef struct VkExternalImageFormatProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalImageFormatProperties; + +typedef struct VkPhysicalDeviceExternalBufferInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkBufferUsageFlags usage; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalBufferInfo; + +typedef struct VkExternalBufferProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalBufferProperties; + +typedef struct VkPhysicalDeviceIDProperties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; +} VkPhysicalDeviceIDProperties; + +typedef struct VkExternalMemoryImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryImageCreateInfo; + +typedef struct VkExternalMemoryBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryBufferCreateInfo; + +typedef struct VkExportMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExportMemoryAllocateInfo; + +typedef struct VkPhysicalDeviceExternalFenceInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalFenceInfo; + +typedef struct VkExternalFenceProperties { + VkStructureType sType; + void* pNext; + VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes; + VkExternalFenceHandleTypeFlags compatibleHandleTypes; + VkExternalFenceFeatureFlags externalFenceFeatures; +} VkExternalFenceProperties; + +typedef struct VkExportFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlags handleTypes; +} VkExportFenceCreateInfo; + +typedef struct VkExportSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlags handleTypes; +} VkExportSemaphoreCreateInfo; + +typedef struct VkPhysicalDeviceExternalSemaphoreInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalSemaphoreInfo; + +typedef struct VkExternalSemaphoreProperties { + VkStructureType sType; + void* pNext; + VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes; + VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes; + VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures; +} VkExternalSemaphoreProperties; + +typedef struct VkPhysicalDeviceMaintenance3Properties { + VkStructureType sType; + void* pNext; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceMaintenance3Properties; + +typedef struct VkDescriptorSetLayoutSupport { + VkStructureType sType; + void* pNext; + VkBool32 supported; +} VkDescriptorSetLayoutSupport; + +typedef struct VkPhysicalDeviceShaderDrawParametersFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceShaderDrawParametersFeatures; + +typedef VkPhysicalDeviceShaderDrawParametersFeatures VkPhysicalDeviceShaderDrawParameterFeatures; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeatures)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMask)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBase)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroups)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkTrimCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue2)(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversion)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversion)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplate)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplate)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplate)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFenceProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupport)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion( + uint32_t* pApiVersion); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2( + VkDevice device, + const VkDeviceQueueInfo2* pQueueInfo, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + + +// VK_VERSION_1_2 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_2 1 +// Vulkan 1.2 version number +#define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0 + +#define VK_MAX_DRIVER_NAME_SIZE 256U +#define VK_MAX_DRIVER_INFO_SIZE 256U + +typedef enum VkDriverId { + VK_DRIVER_ID_AMD_PROPRIETARY = 1, + VK_DRIVER_ID_AMD_OPEN_SOURCE = 2, + VK_DRIVER_ID_MESA_RADV = 3, + VK_DRIVER_ID_NVIDIA_PROPRIETARY = 4, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS = 5, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA = 6, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY = 7, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY = 8, + VK_DRIVER_ID_ARM_PROPRIETARY = 9, + VK_DRIVER_ID_GOOGLE_SWIFTSHADER = 10, + VK_DRIVER_ID_GGP_PROPRIETARY = 11, + VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12, + VK_DRIVER_ID_MESA_LLVMPIPE = 13, + VK_DRIVER_ID_MOLTENVK = 14, + VK_DRIVER_ID_COREAVI_PROPRIETARY = 15, + VK_DRIVER_ID_JUICE_PROPRIETARY = 16, + VK_DRIVER_ID_VERISILICON_PROPRIETARY = 17, + VK_DRIVER_ID_MESA_TURNIP = 18, + VK_DRIVER_ID_MESA_V3DV = 19, + VK_DRIVER_ID_MESA_PANVK = 20, + VK_DRIVER_ID_SAMSUNG_PROPRIETARY = 21, + VK_DRIVER_ID_MESA_VENUS = 22, + VK_DRIVER_ID_MESA_DOZEN = 23, + VK_DRIVER_ID_MESA_NVK = 24, + VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA = 25, + VK_DRIVER_ID_MESA_HONEYKRISP = 26, + VK_DRIVER_ID_VULKAN_SC_EMULATION_ON_VULKAN = 27, + VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, + VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, + VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, + VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + VK_DRIVER_ID_ARM_PROPRIETARY_KHR = VK_DRIVER_ID_ARM_PROPRIETARY, + VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + VK_DRIVER_ID_GGP_PROPRIETARY_KHR = VK_DRIVER_ID_GGP_PROPRIETARY, + VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + VK_DRIVER_ID_MAX_ENUM = 0x7FFFFFFF +} VkDriverId; + +typedef enum VkShaderFloatControlsIndependence { + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY = 0, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL = 1, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE = 2, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM = 0x7FFFFFFF +} VkShaderFloatControlsIndependence; + +typedef enum VkSamplerReductionMode { + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE = 0, + VK_SAMPLER_REDUCTION_MODE_MIN = 1, + VK_SAMPLER_REDUCTION_MODE_MAX = 2, + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM = 1000521000, + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN, + VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX, + VK_SAMPLER_REDUCTION_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerReductionMode; + +typedef enum VkSemaphoreType { + VK_SEMAPHORE_TYPE_BINARY = 0, + VK_SEMAPHORE_TYPE_TIMELINE = 1, + VK_SEMAPHORE_TYPE_BINARY_KHR = VK_SEMAPHORE_TYPE_BINARY, + VK_SEMAPHORE_TYPE_TIMELINE_KHR = VK_SEMAPHORE_TYPE_TIMELINE, + VK_SEMAPHORE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreType; + +typedef enum VkResolveModeFlagBits { + VK_RESOLVE_MODE_NONE = 0, + VK_RESOLVE_MODE_SAMPLE_ZERO_BIT = 0x00000001, + VK_RESOLVE_MODE_AVERAGE_BIT = 0x00000002, + VK_RESOLVE_MODE_MIN_BIT = 0x00000004, + VK_RESOLVE_MODE_MAX_BIT = 0x00000008, + VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID = 0x00000010, + VK_RESOLVE_MODE_NONE_KHR = VK_RESOLVE_MODE_NONE, + VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, + VK_RESOLVE_MODE_AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT, + VK_RESOLVE_MODE_MIN_BIT_KHR = VK_RESOLVE_MODE_MIN_BIT, + VK_RESOLVE_MODE_MAX_BIT_KHR = VK_RESOLVE_MODE_MAX_BIT, + VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkResolveModeFlagBits; +typedef VkFlags VkResolveModeFlags; + +typedef enum VkDescriptorBindingFlagBits { + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT = 0x00000001, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT = 0x00000002, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT = 0x00000004, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT = 0x00000008, + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT, + VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorBindingFlagBits; +typedef VkFlags VkDescriptorBindingFlags; + +typedef enum VkSemaphoreWaitFlagBits { + VK_SEMAPHORE_WAIT_ANY_BIT = 0x00000001, + VK_SEMAPHORE_WAIT_ANY_BIT_KHR = VK_SEMAPHORE_WAIT_ANY_BIT, + VK_SEMAPHORE_WAIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreWaitFlagBits; +typedef VkFlags VkSemaphoreWaitFlags; +typedef struct VkPhysicalDeviceVulkan11Features { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; + VkBool32 protectedMemory; + VkBool32 samplerYcbcrConversion; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceVulkan11Features; + +typedef struct VkPhysicalDeviceVulkan11Properties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; + uint32_t subgroupSize; + VkShaderStageFlags subgroupSupportedStages; + VkSubgroupFeatureFlags subgroupSupportedOperations; + VkBool32 subgroupQuadOperationsInAllStages; + VkPointClippingBehavior pointClippingBehavior; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; + VkBool32 protectedNoFault; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceVulkan11Properties; + +typedef struct VkPhysicalDeviceVulkan12Features { + VkStructureType sType; + void* pNext; + VkBool32 samplerMirrorClampToEdge; + VkBool32 drawIndirectCount; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; + VkBool32 shaderFloat16; + VkBool32 shaderInt8; + VkBool32 descriptorIndexing; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; + VkBool32 samplerFilterMinmax; + VkBool32 scalarBlockLayout; + VkBool32 imagelessFramebuffer; + VkBool32 uniformBufferStandardLayout; + VkBool32 shaderSubgroupExtendedTypes; + VkBool32 separateDepthStencilLayouts; + VkBool32 hostQueryReset; + VkBool32 timelineSemaphore; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; + VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; + VkBool32 shaderOutputViewportIndex; + VkBool32 shaderOutputLayer; + VkBool32 subgroupBroadcastDynamicId; +} VkPhysicalDeviceVulkan12Features; + +typedef struct VkConformanceVersion { + uint8_t major; + uint8_t minor; + uint8_t subminor; + uint8_t patch; +} VkConformanceVersion; + +typedef struct VkPhysicalDeviceVulkan12Properties { + VkStructureType sType; + void* pNext; + VkDriverId driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE]; + VkConformanceVersion conformanceVersion; + VkShaderFloatControlsIndependence denormBehaviorIndependence; + VkShaderFloatControlsIndependence roundingModeIndependence; + VkBool32 shaderSignedZeroInfNanPreserveFloat16; + VkBool32 shaderSignedZeroInfNanPreserveFloat32; + VkBool32 shaderSignedZeroInfNanPreserveFloat64; + VkBool32 shaderDenormPreserveFloat16; + VkBool32 shaderDenormPreserveFloat32; + VkBool32 shaderDenormPreserveFloat64; + VkBool32 shaderDenormFlushToZeroFloat16; + VkBool32 shaderDenormFlushToZeroFloat32; + VkBool32 shaderDenormFlushToZeroFloat64; + VkBool32 shaderRoundingModeRTEFloat16; + VkBool32 shaderRoundingModeRTEFloat32; + VkBool32 shaderRoundingModeRTEFloat64; + VkBool32 shaderRoundingModeRTZFloat16; + VkBool32 shaderRoundingModeRTZFloat32; + VkBool32 shaderRoundingModeRTZFloat64; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; + VkResolveModeFlags supportedDepthResolveModes; + VkResolveModeFlags supportedStencilResolveModes; + VkBool32 independentResolveNone; + VkBool32 independentResolve; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; + uint64_t maxTimelineSemaphoreValueDifference; + VkSampleCountFlags framebufferIntegerColorSampleCounts; +} VkPhysicalDeviceVulkan12Properties; + +typedef struct VkImageFormatListCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkImageFormatListCreateInfo; + +typedef struct VkAttachmentDescription2 { + VkStructureType sType; + const void* pNext; + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription2; + +typedef struct VkAttachmentReference2 { + VkStructureType sType; + const void* pNext; + uint32_t attachment; + VkImageLayout layout; + VkImageAspectFlags aspectMask; +} VkAttachmentReference2; + +typedef struct VkSubpassDescription2 { + VkStructureType sType; + const void* pNext; + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t viewMask; + uint32_t inputAttachmentCount; + const VkAttachmentReference2* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference2* pColorAttachments; + const VkAttachmentReference2* pResolveAttachments; + const VkAttachmentReference2* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription2; + +typedef struct VkSubpassDependency2 { + VkStructureType sType; + const void* pNext; + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; + int32_t viewOffset; +} VkSubpassDependency2; + +typedef struct VkRenderPassCreateInfo2 { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription2* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription2* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency2* pDependencies; + uint32_t correlatedViewMaskCount; + const uint32_t* pCorrelatedViewMasks; +} VkRenderPassCreateInfo2; + +typedef struct VkSubpassBeginInfo { + VkStructureType sType; + const void* pNext; + VkSubpassContents contents; +} VkSubpassBeginInfo; + +typedef struct VkSubpassEndInfo { + VkStructureType sType; + const void* pNext; +} VkSubpassEndInfo; + +typedef struct VkPhysicalDevice8BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; +} VkPhysicalDevice8BitStorageFeatures; + +typedef struct VkPhysicalDeviceDriverProperties { + VkStructureType sType; + void* pNext; + VkDriverId driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE]; + VkConformanceVersion conformanceVersion; +} VkPhysicalDeviceDriverProperties; + +typedef struct VkPhysicalDeviceShaderAtomicInt64Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; +} VkPhysicalDeviceShaderAtomicInt64Features; + +typedef struct VkPhysicalDeviceShaderFloat16Int8Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloat16; + VkBool32 shaderInt8; +} VkPhysicalDeviceShaderFloat16Int8Features; + +typedef struct VkPhysicalDeviceFloatControlsProperties { + VkStructureType sType; + void* pNext; + VkShaderFloatControlsIndependence denormBehaviorIndependence; + VkShaderFloatControlsIndependence roundingModeIndependence; + VkBool32 shaderSignedZeroInfNanPreserveFloat16; + VkBool32 shaderSignedZeroInfNanPreserveFloat32; + VkBool32 shaderSignedZeroInfNanPreserveFloat64; + VkBool32 shaderDenormPreserveFloat16; + VkBool32 shaderDenormPreserveFloat32; + VkBool32 shaderDenormPreserveFloat64; + VkBool32 shaderDenormFlushToZeroFloat16; + VkBool32 shaderDenormFlushToZeroFloat32; + VkBool32 shaderDenormFlushToZeroFloat64; + VkBool32 shaderRoundingModeRTEFloat16; + VkBool32 shaderRoundingModeRTEFloat32; + VkBool32 shaderRoundingModeRTEFloat64; + VkBool32 shaderRoundingModeRTZFloat16; + VkBool32 shaderRoundingModeRTZFloat32; + VkBool32 shaderRoundingModeRTZFloat64; +} VkPhysicalDeviceFloatControlsProperties; + +typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t bindingCount; + const VkDescriptorBindingFlags* pBindingFlags; +} VkDescriptorSetLayoutBindingFlagsCreateInfo; + +typedef struct VkPhysicalDeviceDescriptorIndexingFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; +} VkPhysicalDeviceDescriptorIndexingFeatures; + +typedef struct VkPhysicalDeviceDescriptorIndexingProperties { + VkStructureType sType; + void* pNext; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; +} VkPhysicalDeviceDescriptorIndexingProperties; + +typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfo { + VkStructureType sType; + const void* pNext; + uint32_t descriptorSetCount; + const uint32_t* pDescriptorCounts; +} VkDescriptorSetVariableDescriptorCountAllocateInfo; + +typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupport { + VkStructureType sType; + void* pNext; + uint32_t maxVariableDescriptorCount; +} VkDescriptorSetVariableDescriptorCountLayoutSupport; + +typedef struct VkSubpassDescriptionDepthStencilResolve { + VkStructureType sType; + const void* pNext; + VkResolveModeFlagBits depthResolveMode; + VkResolveModeFlagBits stencilResolveMode; + const VkAttachmentReference2* pDepthStencilResolveAttachment; +} VkSubpassDescriptionDepthStencilResolve; + +typedef struct VkPhysicalDeviceDepthStencilResolveProperties { + VkStructureType sType; + void* pNext; + VkResolveModeFlags supportedDepthResolveModes; + VkResolveModeFlags supportedStencilResolveModes; + VkBool32 independentResolveNone; + VkBool32 independentResolve; +} VkPhysicalDeviceDepthStencilResolveProperties; + +typedef struct VkPhysicalDeviceScalarBlockLayoutFeatures { + VkStructureType sType; + void* pNext; + VkBool32 scalarBlockLayout; +} VkPhysicalDeviceScalarBlockLayoutFeatures; + +typedef struct VkImageStencilUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags stencilUsage; +} VkImageStencilUsageCreateInfo; + +typedef struct VkSamplerReductionModeCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerReductionMode reductionMode; +} VkSamplerReductionModeCreateInfo; + +typedef struct VkPhysicalDeviceSamplerFilterMinmaxProperties { + VkStructureType sType; + void* pNext; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; +} VkPhysicalDeviceSamplerFilterMinmaxProperties; + +typedef struct VkPhysicalDeviceVulkanMemoryModelFeatures { + VkStructureType sType; + void* pNext; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; + VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; +} VkPhysicalDeviceVulkanMemoryModelFeatures; + +typedef struct VkPhysicalDeviceImagelessFramebufferFeatures { + VkStructureType sType; + void* pNext; + VkBool32 imagelessFramebuffer; +} VkPhysicalDeviceImagelessFramebufferFeatures; + +typedef struct VkFramebufferAttachmentImageInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageUsageFlags usage; + uint32_t width; + uint32_t height; + uint32_t layerCount; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkFramebufferAttachmentImageInfo; + +typedef struct VkFramebufferAttachmentsCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t attachmentImageInfoCount; + const VkFramebufferAttachmentImageInfo* pAttachmentImageInfos; +} VkFramebufferAttachmentsCreateInfo; + +typedef struct VkRenderPassAttachmentBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t attachmentCount; + const VkImageView* pAttachments; +} VkRenderPassAttachmentBeginInfo; + +typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures { + VkStructureType sType; + void* pNext; + VkBool32 uniformBufferStandardLayout; +} VkPhysicalDeviceUniformBufferStandardLayoutFeatures; + +typedef struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupExtendedTypes; +} VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +typedef struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures { + VkStructureType sType; + void* pNext; + VkBool32 separateDepthStencilLayouts; +} VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +typedef struct VkAttachmentReferenceStencilLayout { + VkStructureType sType; + void* pNext; + VkImageLayout stencilLayout; +} VkAttachmentReferenceStencilLayout; + +typedef struct VkAttachmentDescriptionStencilLayout { + VkStructureType sType; + void* pNext; + VkImageLayout stencilInitialLayout; + VkImageLayout stencilFinalLayout; +} VkAttachmentDescriptionStencilLayout; + +typedef struct VkPhysicalDeviceHostQueryResetFeatures { + VkStructureType sType; + void* pNext; + VkBool32 hostQueryReset; +} VkPhysicalDeviceHostQueryResetFeatures; + +typedef struct VkPhysicalDeviceTimelineSemaphoreFeatures { + VkStructureType sType; + void* pNext; + VkBool32 timelineSemaphore; +} VkPhysicalDeviceTimelineSemaphoreFeatures; + +typedef struct VkPhysicalDeviceTimelineSemaphoreProperties { + VkStructureType sType; + void* pNext; + uint64_t maxTimelineSemaphoreValueDifference; +} VkPhysicalDeviceTimelineSemaphoreProperties; + +typedef struct VkSemaphoreTypeCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreType semaphoreType; + uint64_t initialValue; +} VkSemaphoreTypeCreateInfo; + +typedef struct VkTimelineSemaphoreSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValueCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValueCount; + const uint64_t* pSignalSemaphoreValues; +} VkTimelineSemaphoreSubmitInfo; + +typedef struct VkSemaphoreWaitInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreWaitFlags flags; + uint32_t semaphoreCount; + const VkSemaphore* pSemaphores; + const uint64_t* pValues; +} VkSemaphoreWaitInfo; + +typedef struct VkSemaphoreSignalInfo { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; +} VkSemaphoreSignalInfo; + +typedef struct VkPhysicalDeviceBufferDeviceAddressFeatures { + VkStructureType sType; + void* pNext; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeatures; + +typedef struct VkBufferDeviceAddressInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferDeviceAddressInfo; + +typedef struct VkBufferOpaqueCaptureAddressCreateInfo { + VkStructureType sType; + const void* pNext; + uint64_t opaqueCaptureAddress; +} VkBufferOpaqueCaptureAddressCreateInfo; + +typedef struct VkMemoryOpaqueCaptureAddressAllocateInfo { + VkStructureType sType; + const void* pNext; + uint64_t opaqueCaptureAddress; +} VkMemoryOpaqueCaptureAddressAllocateInfo; + +typedef struct VkDeviceMemoryOpaqueCaptureAddressInfo { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkDeviceMemoryOpaqueCaptureAddressInfo; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkResetQueryPool)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValue)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphores)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphore)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddress)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkResetQueryPool( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue( + VkDevice device, + VkSemaphore semaphore, + uint64_t* pValue); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddress( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif + + +// VK_VERSION_1_3 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_3 1 +// Vulkan 1.3 version number +#define VK_API_VERSION_1_3 VK_MAKE_API_VERSION(0, 1, 3, 0)// Patch version should always be set to 0 + +typedef uint64_t VkFlags64; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlot) + +typedef enum VkPipelineCreationFeedbackFlagBits { + VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT = 0x00000001, + VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT = 0x00000002, + VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT = 0x00000004, + VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT, + VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT, + VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT, + VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreationFeedbackFlagBits; +typedef VkFlags VkPipelineCreationFeedbackFlags; + +typedef enum VkToolPurposeFlagBits { + VK_TOOL_PURPOSE_VALIDATION_BIT = 0x00000001, + VK_TOOL_PURPOSE_PROFILING_BIT = 0x00000002, + VK_TOOL_PURPOSE_TRACING_BIT = 0x00000004, + VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT = 0x00000008, + VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT = 0x00000010, + VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT = 0x00000020, + VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT = 0x00000040, + VK_TOOL_PURPOSE_VALIDATION_BIT_EXT = VK_TOOL_PURPOSE_VALIDATION_BIT, + VK_TOOL_PURPOSE_PROFILING_BIT_EXT = VK_TOOL_PURPOSE_PROFILING_BIT, + VK_TOOL_PURPOSE_TRACING_BIT_EXT = VK_TOOL_PURPOSE_TRACING_BIT, + VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT, + VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT, + VK_TOOL_PURPOSE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkToolPurposeFlagBits; +typedef VkFlags VkToolPurposeFlags; +typedef VkFlags VkPrivateDataSlotCreateFlags; +typedef VkFlags64 VkPipelineStageFlags2; + +// Flag bits for VkPipelineStageFlagBits2 +typedef VkFlags64 VkPipelineStageFlagBits2; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE = 0ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT = 0x00000001ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT = 0x00000002ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT = 0x00000004ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT = 0x00000008ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT = 0x00000040ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT = 0x00000080ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT = 0x00000100ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT = 0x00000200ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT = 0x00000800ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT = 0x00002000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT = 0x00004000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT = 0x00008000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT = 0x00010000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT = 0x100000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT = 0x200000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT = 0x400000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT = 0x800000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT = 0x1000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT = 0x2000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT = 0x4000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR = 0x04000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = 0x08000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT = 0x00020000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR = 0x00200000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV = 0x00200000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT = 0x00080000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT = 0x00100000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI = 0x8000000000ULL; +// VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI is a deprecated alias +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT = 0x40000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI = 0x20000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV = 0x20000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONVERT_COOPERATIVE_VECTOR_MATRIX_BIT_NV = 0x100000000000ULL; + +typedef VkFlags64 VkAccessFlags2; + +// Flag bits for VkAccessFlagBits2 +typedef VkFlags64 VkAccessFlagBits2; +static const VkAccessFlagBits2 VK_ACCESS_2_NONE = 0ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT = 0x00000001ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT = 0x00000002ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT = 0x00000008ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT = 0x00000010ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT = 0x00000020ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT = 0x00000040ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT = 0x00000080ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT = 0x00000800ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT = 0x00001000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT = 0x00002000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT = 0x00004000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT = 0x00008000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT = 0x00010000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT = 0x100000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT = 0x200000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT = 0x400000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_TILE_ATTACHMENT_READ_BIT_QCOM = 0x8000000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_TILE_ATTACHMENT_WRITE_BIT_QCOM = 0x10000000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_NONE_KHR = 0ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT = 0x00020000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT = 0x00040000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT = 0x20000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI = 0x8000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR = 0x10000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_READ_BIT_EXT = 0x100000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT = 0x200000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV = 0x40000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV = 0x80000000000ULL; + + +typedef enum VkSubmitFlagBits { + VK_SUBMIT_PROTECTED_BIT = 0x00000001, + VK_SUBMIT_PROTECTED_BIT_KHR = VK_SUBMIT_PROTECTED_BIT, + VK_SUBMIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubmitFlagBits; +typedef VkFlags VkSubmitFlags; + +typedef enum VkRenderingFlagBits { + VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT = 0x00000001, + VK_RENDERING_SUSPENDING_BIT = 0x00000002, + VK_RENDERING_RESUMING_BIT = 0x00000004, + VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000008, + VK_RENDERING_CONTENTS_INLINE_BIT_KHR = 0x00000010, + VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, + VK_RENDERING_SUSPENDING_BIT_KHR = VK_RENDERING_SUSPENDING_BIT, + VK_RENDERING_RESUMING_BIT_KHR = VK_RENDERING_RESUMING_BIT, + VK_RENDERING_CONTENTS_INLINE_BIT_EXT = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, + VK_RENDERING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkRenderingFlagBits; +typedef VkFlags VkRenderingFlags; +typedef VkFlags64 VkFormatFeatureFlags2; + +// Flag bits for VkFormatFeatureFlagBits2 +typedef VkFlags64 VkFormatFeatureFlagBits2; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT = 0x00000001ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT = 0x00000002ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000010ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT = 0x00000040ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT = 0x00000080ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT = 0x00000400ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT = 0x00000800ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT = 0x00004000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT = 0x00008000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT = 0x00400000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT = 0x00800000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT = 0x80000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT = 0x100000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT = 0x200000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT = 0x400000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT = 0x400000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR = 0x00000001ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR = 0x00000002ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR = 0x00000004ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR = 0x00000020ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR = 0x00000080ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR = 0x00000100ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR = 0x00000200ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR = 0x00000400ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR = 0x00000800ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR = 0x00001000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR = 0x00004000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR = 0x00008000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR = 0x00400000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR = 0x80000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR = 0x100000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR = 0x200000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR = 0x00010000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_RADIUS_BUFFER_BIT_NV = 0x8000000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV = 0x4000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM = 0x400000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM = 0x800000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM = 0x1000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM = 0x2000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV = 0x10000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV = 0x20000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV = 0x40000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x2000000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x4000000000000ULL; + +typedef struct VkPhysicalDeviceVulkan13Features { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; + VkBool32 inlineUniformBlock; + VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; + VkBool32 pipelineCreationCacheControl; + VkBool32 privateData; + VkBool32 shaderDemoteToHelperInvocation; + VkBool32 shaderTerminateInvocation; + VkBool32 subgroupSizeControl; + VkBool32 computeFullSubgroups; + VkBool32 synchronization2; + VkBool32 textureCompressionASTC_HDR; + VkBool32 shaderZeroInitializeWorkgroupMemory; + VkBool32 dynamicRendering; + VkBool32 shaderIntegerDotProduct; + VkBool32 maintenance4; +} VkPhysicalDeviceVulkan13Features; + +typedef struct VkPhysicalDeviceVulkan13Properties { + VkStructureType sType; + void* pNext; + uint32_t minSubgroupSize; + uint32_t maxSubgroupSize; + uint32_t maxComputeWorkgroupSubgroups; + VkShaderStageFlags requiredSubgroupSizeStages; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; + uint32_t maxInlineUniformTotalSize; + VkBool32 integerDotProduct8BitUnsignedAccelerated; + VkBool32 integerDotProduct8BitSignedAccelerated; + VkBool32 integerDotProduct8BitMixedSignednessAccelerated; + VkBool32 integerDotProduct4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedSignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProduct16BitUnsignedAccelerated; + VkBool32 integerDotProduct16BitSignedAccelerated; + VkBool32 integerDotProduct16BitMixedSignednessAccelerated; + VkBool32 integerDotProduct32BitUnsignedAccelerated; + VkBool32 integerDotProduct32BitSignedAccelerated; + VkBool32 integerDotProduct32BitMixedSignednessAccelerated; + VkBool32 integerDotProduct64BitUnsignedAccelerated; + VkBool32 integerDotProduct64BitSignedAccelerated; + VkBool32 integerDotProduct64BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated; + VkDeviceSize storageTexelBufferOffsetAlignmentBytes; + VkBool32 storageTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize uniformTexelBufferOffsetAlignmentBytes; + VkBool32 uniformTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize maxBufferSize; +} VkPhysicalDeviceVulkan13Properties; + +typedef struct VkPipelineCreationFeedback { + VkPipelineCreationFeedbackFlags flags; + uint64_t duration; +} VkPipelineCreationFeedback; + +typedef struct VkPipelineCreationFeedbackCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreationFeedback* pPipelineCreationFeedback; + uint32_t pipelineStageCreationFeedbackCount; + VkPipelineCreationFeedback* pPipelineStageCreationFeedbacks; +} VkPipelineCreationFeedbackCreateInfo; + +typedef struct VkPhysicalDeviceShaderTerminateInvocationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderTerminateInvocation; +} VkPhysicalDeviceShaderTerminateInvocationFeatures; + +typedef struct VkPhysicalDeviceToolProperties { + VkStructureType sType; + void* pNext; + char name[VK_MAX_EXTENSION_NAME_SIZE]; + char version[VK_MAX_EXTENSION_NAME_SIZE]; + VkToolPurposeFlags purposes; + char description[VK_MAX_DESCRIPTION_SIZE]; + char layer[VK_MAX_EXTENSION_NAME_SIZE]; +} VkPhysicalDeviceToolProperties; + +typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDemoteToHelperInvocation; +} VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures; + +typedef struct VkPhysicalDevicePrivateDataFeatures { + VkStructureType sType; + void* pNext; + VkBool32 privateData; +} VkPhysicalDevicePrivateDataFeatures; + +typedef struct VkDevicePrivateDataCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t privateDataSlotRequestCount; +} VkDevicePrivateDataCreateInfo; + +typedef struct VkPrivateDataSlotCreateInfo { + VkStructureType sType; + const void* pNext; + VkPrivateDataSlotCreateFlags flags; +} VkPrivateDataSlotCreateInfo; + +typedef struct VkPhysicalDevicePipelineCreationCacheControlFeatures { + VkStructureType sType; + void* pNext; + VkBool32 pipelineCreationCacheControl; +} VkPhysicalDevicePipelineCreationCacheControlFeatures; + +typedef struct VkMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; +} VkMemoryBarrier2; + +typedef struct VkBufferMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier2; + +typedef struct VkImageMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier2; + +typedef struct VkDependencyInfo { + VkStructureType sType; + const void* pNext; + VkDependencyFlags dependencyFlags; + uint32_t memoryBarrierCount; + const VkMemoryBarrier2* pMemoryBarriers; + uint32_t bufferMemoryBarrierCount; + const VkBufferMemoryBarrier2* pBufferMemoryBarriers; + uint32_t imageMemoryBarrierCount; + const VkImageMemoryBarrier2* pImageMemoryBarriers; +} VkDependencyInfo; + +typedef struct VkSemaphoreSubmitInfo { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; + VkPipelineStageFlags2 stageMask; + uint32_t deviceIndex; +} VkSemaphoreSubmitInfo; + +typedef struct VkCommandBufferSubmitInfo { + VkStructureType sType; + const void* pNext; + VkCommandBuffer commandBuffer; + uint32_t deviceMask; +} VkCommandBufferSubmitInfo; + +typedef struct VkSubmitInfo2 { + VkStructureType sType; + const void* pNext; + VkSubmitFlags flags; + uint32_t waitSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pWaitSemaphoreInfos; + uint32_t commandBufferInfoCount; + const VkCommandBufferSubmitInfo* pCommandBufferInfos; + uint32_t signalSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pSignalSemaphoreInfos; +} VkSubmitInfo2; + +typedef struct VkPhysicalDeviceSynchronization2Features { + VkStructureType sType; + void* pNext; + VkBool32 synchronization2; +} VkPhysicalDeviceSynchronization2Features; + +typedef struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderZeroInitializeWorkgroupMemory; +} VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + +typedef struct VkPhysicalDeviceImageRobustnessFeatures { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; +} VkPhysicalDeviceImageRobustnessFeatures; + +typedef struct VkBufferCopy2 { + VkStructureType sType; + const void* pNext; + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy2; + +typedef struct VkCopyBufferInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferCopy2* pRegions; +} VkCopyBufferInfo2; + +typedef struct VkImageCopy2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy2; + +typedef struct VkCopyImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2* pRegions; +} VkCopyImageInfo2; + +typedef struct VkBufferImageCopy2 { + VkStructureType sType; + const void* pNext; + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy2; + +typedef struct VkCopyBufferToImageInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkBufferImageCopy2* pRegions; +} VkCopyBufferToImageInfo2; + +typedef struct VkCopyImageToBufferInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferImageCopy2* pRegions; +} VkCopyImageToBufferInfo2; + +typedef struct VkImageBlit2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit2; + +typedef struct VkBlitImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageBlit2* pRegions; + VkFilter filter; +} VkBlitImageInfo2; + +typedef struct VkImageResolve2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve2; + +typedef struct VkResolveImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageResolve2* pRegions; +} VkResolveImageInfo2; + +typedef struct VkPhysicalDeviceSubgroupSizeControlFeatures { + VkStructureType sType; + void* pNext; + VkBool32 subgroupSizeControl; + VkBool32 computeFullSubgroups; +} VkPhysicalDeviceSubgroupSizeControlFeatures; + +typedef struct VkPhysicalDeviceSubgroupSizeControlProperties { + VkStructureType sType; + void* pNext; + uint32_t minSubgroupSize; + uint32_t maxSubgroupSize; + uint32_t maxComputeWorkgroupSubgroups; + VkShaderStageFlags requiredSubgroupSizeStages; +} VkPhysicalDeviceSubgroupSizeControlProperties; + +typedef struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfo { + VkStructureType sType; + void* pNext; + uint32_t requiredSubgroupSize; +} VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; + +typedef struct VkPhysicalDeviceInlineUniformBlockFeatures { + VkStructureType sType; + void* pNext; + VkBool32 inlineUniformBlock; + VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; +} VkPhysicalDeviceInlineUniformBlockFeatures; + +typedef struct VkPhysicalDeviceInlineUniformBlockProperties { + VkStructureType sType; + void* pNext; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; +} VkPhysicalDeviceInlineUniformBlockProperties; + +typedef struct VkWriteDescriptorSetInlineUniformBlock { + VkStructureType sType; + const void* pNext; + uint32_t dataSize; + const void* pData; +} VkWriteDescriptorSetInlineUniformBlock; + +typedef struct VkDescriptorPoolInlineUniformBlockCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t maxInlineUniformBlockBindings; +} VkDescriptorPoolInlineUniformBlockCreateInfo; + +typedef struct VkPhysicalDeviceTextureCompressionASTCHDRFeatures { + VkStructureType sType; + void* pNext; + VkBool32 textureCompressionASTC_HDR; +} VkPhysicalDeviceTextureCompressionASTCHDRFeatures; + +typedef struct VkRenderingAttachmentInfo { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkResolveModeFlagBits resolveMode; + VkImageView resolveImageView; + VkImageLayout resolveImageLayout; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkClearValue clearValue; +} VkRenderingAttachmentInfo; + +typedef struct VkRenderingInfo { + VkStructureType sType; + const void* pNext; + VkRenderingFlags flags; + VkRect2D renderArea; + uint32_t layerCount; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkRenderingAttachmentInfo* pColorAttachments; + const VkRenderingAttachmentInfo* pDepthAttachment; + const VkRenderingAttachmentInfo* pStencilAttachment; +} VkRenderingInfo; + +typedef struct VkPipelineRenderingCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; +} VkPipelineRenderingCreateInfo; + +typedef struct VkPhysicalDeviceDynamicRenderingFeatures { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRendering; +} VkPhysicalDeviceDynamicRenderingFeatures; + +typedef struct VkCommandBufferInheritanceRenderingInfo { + VkStructureType sType; + const void* pNext; + VkRenderingFlags flags; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; + VkSampleCountFlagBits rasterizationSamples; +} VkCommandBufferInheritanceRenderingInfo; + +typedef struct VkPhysicalDeviceShaderIntegerDotProductFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderIntegerDotProduct; +} VkPhysicalDeviceShaderIntegerDotProductFeatures; + +typedef struct VkPhysicalDeviceShaderIntegerDotProductProperties { + VkStructureType sType; + void* pNext; + VkBool32 integerDotProduct8BitUnsignedAccelerated; + VkBool32 integerDotProduct8BitSignedAccelerated; + VkBool32 integerDotProduct8BitMixedSignednessAccelerated; + VkBool32 integerDotProduct4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedSignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProduct16BitUnsignedAccelerated; + VkBool32 integerDotProduct16BitSignedAccelerated; + VkBool32 integerDotProduct16BitMixedSignednessAccelerated; + VkBool32 integerDotProduct32BitUnsignedAccelerated; + VkBool32 integerDotProduct32BitSignedAccelerated; + VkBool32 integerDotProduct32BitMixedSignednessAccelerated; + VkBool32 integerDotProduct64BitUnsignedAccelerated; + VkBool32 integerDotProduct64BitSignedAccelerated; + VkBool32 integerDotProduct64BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated; +} VkPhysicalDeviceShaderIntegerDotProductProperties; + +typedef struct VkPhysicalDeviceTexelBufferAlignmentProperties { + VkStructureType sType; + void* pNext; + VkDeviceSize storageTexelBufferOffsetAlignmentBytes; + VkBool32 storageTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize uniformTexelBufferOffsetAlignmentBytes; + VkBool32 uniformTexelBufferOffsetSingleTexelAlignment; +} VkPhysicalDeviceTexelBufferAlignmentProperties; + +typedef struct VkFormatProperties3 { + VkStructureType sType; + void* pNext; + VkFormatFeatureFlags2 linearTilingFeatures; + VkFormatFeatureFlags2 optimalTilingFeatures; + VkFormatFeatureFlags2 bufferFeatures; +} VkFormatProperties3; + +typedef struct VkPhysicalDeviceMaintenance4Features { + VkStructureType sType; + void* pNext; + VkBool32 maintenance4; +} VkPhysicalDeviceMaintenance4Features; + +typedef struct VkPhysicalDeviceMaintenance4Properties { + VkStructureType sType; + void* pNext; + VkDeviceSize maxBufferSize; +} VkPhysicalDeviceMaintenance4Properties; + +typedef struct VkDeviceBufferMemoryRequirements { + VkStructureType sType; + const void* pNext; + const VkBufferCreateInfo* pCreateInfo; +} VkDeviceBufferMemoryRequirements; + +typedef struct VkDeviceImageMemoryRequirements { + VkStructureType sType; + const void* pNext; + const VkImageCreateInfo* pCreateInfo; + VkImageAspectFlagBits planeAspect; +} VkDeviceImageMemoryRequirements; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolProperties)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlot)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlot)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRendering)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRendering)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdSetCullMode)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFace)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopology)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCount)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCount)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnable)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOp)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnable)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOp)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnable)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnable)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); +typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirements)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolProperties* pToolProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlot( + VkDevice device, + const VkPrivateDataSlotCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlot* pPrivateDataSlot); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlot( + VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateData( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateData( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfo* pDependencyInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2( + VkCommandBuffer commandBuffer, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2* pSubmits, + VkFence fence); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2* pCopyBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2* pCopyImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2* pBlitImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2* pResolveImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRendering( + VkCommandBuffer commandBuffer, + const VkRenderingInfo* pRenderingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRendering( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullMode( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFace( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopology( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCount( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCount( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOp( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOp( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnable( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnable( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirements( + VkDevice device, + const VkDeviceBufferMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirements( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +// VK_VERSION_1_4 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_4 1 +// Vulkan 1.4 version number +#define VK_API_VERSION_1_4 VK_MAKE_API_VERSION(0, 1, 4, 0)// Patch version should always be set to 0 + +#define VK_MAX_GLOBAL_PRIORITY_SIZE 16U + +typedef enum VkPipelineRobustnessBufferBehavior { + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT = 0, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED = 1, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS = 2, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2 = 3, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPipelineRobustnessBufferBehavior; + +typedef enum VkPipelineRobustnessImageBehavior { + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT = 0, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED = 1, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS = 2, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2 = 3, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPipelineRobustnessImageBehavior; + +typedef enum VkQueueGlobalPriority { + VK_QUEUE_GLOBAL_PRIORITY_LOW = 128, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM = 256, + VK_QUEUE_GLOBAL_PRIORITY_HIGH = 512, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME = 1024, + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME, + VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR = VK_QUEUE_GLOBAL_PRIORITY_LOW, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR = VK_QUEUE_GLOBAL_PRIORITY_HIGH, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR = VK_QUEUE_GLOBAL_PRIORITY_REALTIME, + VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM = 0x7FFFFFFF +} VkQueueGlobalPriority; + +typedef enum VkLineRasterizationMode { + VK_LINE_RASTERIZATION_MODE_DEFAULT = 0, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR = 1, + VK_LINE_RASTERIZATION_MODE_BRESENHAM = 2, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH = 3, + VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = VK_LINE_RASTERIZATION_MODE_DEFAULT, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = VK_LINE_RASTERIZATION_MODE_BRESENHAM, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH, + VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR = VK_LINE_RASTERIZATION_MODE_DEFAULT, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR = VK_LINE_RASTERIZATION_MODE_BRESENHAM, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH, + VK_LINE_RASTERIZATION_MODE_MAX_ENUM = 0x7FFFFFFF +} VkLineRasterizationMode; + +typedef enum VkMemoryUnmapFlagBits { + VK_MEMORY_UNMAP_RESERVE_BIT_EXT = 0x00000001, + VK_MEMORY_UNMAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryUnmapFlagBits; +typedef VkFlags VkMemoryUnmapFlags; +typedef VkFlags64 VkPipelineCreateFlags2; + +// Flag bits for VkPipelineCreateFlagBits2 +typedef VkFlags64 VkPipelineCreateFlagBits2; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT = 0x00000001ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT = 0x00000002ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DERIVATIVE_BIT = 0x00000004ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT = 0x00000010ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT = 0x08000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT = 0x40000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX = 0x100000000ULL; +#endif +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_BUILT_IN_PRIMITIVES_BIT_KHR = 0x00001000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_SPHERES_AND_LINEAR_SWEPT_SPHERES_BIT_NV = 0x200000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x400000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV = 0x00000020ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR = 0x00000040ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR = 0x00000100ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR = 0x00000200ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR = 0x00000800ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV = 0x00040000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISALLOW_OPACITY_MICROMAP_BIT_ARM = 0x2000000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR = 0x80000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT = 0x4000000000ULL; + +typedef VkFlags64 VkBufferUsageFlags2; + +// Flag bits for VkBufferUsageFlagBits2 +typedef VkFlags64 VkBufferUsageFlagBits2; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT = 0x00000001ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_DST_BIT = 0x00000002ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000008ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT = 0x00000010ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT = 0x00000020ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT = 0x00000040ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT = 0x00000080ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT = 0x00000100ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT = 0x00020000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; +#endif +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR = 0x00000001ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR = 0x00000002ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000004ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR = 0x00000020ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV = 0x00000400ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR = 0x00004000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR = 0x00020000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT = 0x01000000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TILE_MEMORY_QCOM = 0x08000000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT = 0x80000000ULL; + + +typedef enum VkHostImageCopyFlagBits { + VK_HOST_IMAGE_COPY_MEMCPY = 0x00000001, + VK_HOST_IMAGE_COPY_MEMCPY_EXT = VK_HOST_IMAGE_COPY_MEMCPY, + VK_HOST_IMAGE_COPY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkHostImageCopyFlagBits; +typedef VkFlags VkHostImageCopyFlags; +typedef struct VkPhysicalDeviceVulkan14Features { + VkStructureType sType; + void* pNext; + VkBool32 globalPriorityQuery; + VkBool32 shaderSubgroupRotate; + VkBool32 shaderSubgroupRotateClustered; + VkBool32 shaderFloatControls2; + VkBool32 shaderExpectAssume; + VkBool32 rectangularLines; + VkBool32 bresenhamLines; + VkBool32 smoothLines; + VkBool32 stippledRectangularLines; + VkBool32 stippledBresenhamLines; + VkBool32 stippledSmoothLines; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; + VkBool32 indexTypeUint8; + VkBool32 dynamicRenderingLocalRead; + VkBool32 maintenance5; + VkBool32 maintenance6; + VkBool32 pipelineProtectedAccess; + VkBool32 pipelineRobustness; + VkBool32 hostImageCopy; + VkBool32 pushDescriptor; +} VkPhysicalDeviceVulkan14Features; + +typedef struct VkPhysicalDeviceVulkan14Properties { + VkStructureType sType; + void* pNext; + uint32_t lineSubPixelPrecisionBits; + uint32_t maxVertexAttribDivisor; + VkBool32 supportsNonZeroFirstInstance; + uint32_t maxPushDescriptors; + VkBool32 dynamicRenderingLocalReadDepthStencilAttachments; + VkBool32 dynamicRenderingLocalReadMultisampledAttachments; + VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting; + VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting; + VkBool32 depthStencilSwizzleOneSupport; + VkBool32 polygonModePointSize; + VkBool32 nonStrictSinglePixelWideLinesUseParallelogram; + VkBool32 nonStrictWideLinesUseParallelogram; + VkBool32 blockTexelViewCompatibleMultipleLayers; + uint32_t maxCombinedImageSamplerDescriptorCount; + VkBool32 fragmentShadingRateClampCombinerInputs; + VkPipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessVertexInputs; + VkPipelineRobustnessImageBehavior defaultRobustnessImages; + uint32_t copySrcLayoutCount; + VkImageLayout* pCopySrcLayouts; + uint32_t copyDstLayoutCount; + VkImageLayout* pCopyDstLayouts; + uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE]; + VkBool32 identicalMemoryTypeRequirements; +} VkPhysicalDeviceVulkan14Properties; + +typedef struct VkDeviceQueueGlobalPriorityCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueueGlobalPriority globalPriority; +} VkDeviceQueueGlobalPriorityCreateInfo; + +typedef struct VkPhysicalDeviceGlobalPriorityQueryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 globalPriorityQuery; +} VkPhysicalDeviceGlobalPriorityQueryFeatures; + +typedef struct VkQueueFamilyGlobalPriorityProperties { + VkStructureType sType; + void* pNext; + uint32_t priorityCount; + VkQueueGlobalPriority priorities[VK_MAX_GLOBAL_PRIORITY_SIZE]; +} VkQueueFamilyGlobalPriorityProperties; + +typedef struct VkPhysicalDeviceShaderSubgroupRotateFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupRotate; + VkBool32 shaderSubgroupRotateClustered; +} VkPhysicalDeviceShaderSubgroupRotateFeatures; + +typedef struct VkPhysicalDeviceShaderFloatControls2Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloatControls2; +} VkPhysicalDeviceShaderFloatControls2Features; + +typedef struct VkPhysicalDeviceShaderExpectAssumeFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderExpectAssume; +} VkPhysicalDeviceShaderExpectAssumeFeatures; + +typedef struct VkPhysicalDeviceLineRasterizationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 rectangularLines; + VkBool32 bresenhamLines; + VkBool32 smoothLines; + VkBool32 stippledRectangularLines; + VkBool32 stippledBresenhamLines; + VkBool32 stippledSmoothLines; +} VkPhysicalDeviceLineRasterizationFeatures; + +typedef struct VkPhysicalDeviceLineRasterizationProperties { + VkStructureType sType; + void* pNext; + uint32_t lineSubPixelPrecisionBits; +} VkPhysicalDeviceLineRasterizationProperties; + +typedef struct VkPipelineRasterizationLineStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkLineRasterizationMode lineRasterizationMode; + VkBool32 stippledLineEnable; + uint32_t lineStippleFactor; + uint16_t lineStipplePattern; +} VkPipelineRasterizationLineStateCreateInfo; + +typedef struct VkPhysicalDeviceVertexAttributeDivisorProperties { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; + VkBool32 supportsNonZeroFirstInstance; +} VkPhysicalDeviceVertexAttributeDivisorProperties; + +typedef struct VkVertexInputBindingDivisorDescription { + uint32_t binding; + uint32_t divisor; +} VkVertexInputBindingDivisorDescription; + +typedef struct VkPipelineVertexInputDivisorStateCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t vertexBindingDivisorCount; + const VkVertexInputBindingDivisorDescription* pVertexBindingDivisors; +} VkPipelineVertexInputDivisorStateCreateInfo; + +typedef struct VkPhysicalDeviceVertexAttributeDivisorFeatures { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; +} VkPhysicalDeviceVertexAttributeDivisorFeatures; + +typedef struct VkPhysicalDeviceIndexTypeUint8Features { + VkStructureType sType; + void* pNext; + VkBool32 indexTypeUint8; +} VkPhysicalDeviceIndexTypeUint8Features; + +typedef struct VkMemoryMapInfo { + VkStructureType sType; + const void* pNext; + VkMemoryMapFlags flags; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMemoryMapInfo; + +typedef struct VkMemoryUnmapInfo { + VkStructureType sType; + const void* pNext; + VkMemoryUnmapFlags flags; + VkDeviceMemory memory; +} VkMemoryUnmapInfo; + +typedef struct VkPhysicalDeviceMaintenance5Features { + VkStructureType sType; + void* pNext; + VkBool32 maintenance5; +} VkPhysicalDeviceMaintenance5Features; + +typedef struct VkPhysicalDeviceMaintenance5Properties { + VkStructureType sType; + void* pNext; + VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting; + VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting; + VkBool32 depthStencilSwizzleOneSupport; + VkBool32 polygonModePointSize; + VkBool32 nonStrictSinglePixelWideLinesUseParallelogram; + VkBool32 nonStrictWideLinesUseParallelogram; +} VkPhysicalDeviceMaintenance5Properties; + +typedef struct VkRenderingAreaInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; +} VkRenderingAreaInfo; + +typedef struct VkImageSubresource2 { + VkStructureType sType; + void* pNext; + VkImageSubresource imageSubresource; +} VkImageSubresource2; + +typedef struct VkDeviceImageSubresourceInfo { + VkStructureType sType; + const void* pNext; + const VkImageCreateInfo* pCreateInfo; + const VkImageSubresource2* pSubresource; +} VkDeviceImageSubresourceInfo; + +typedef struct VkSubresourceLayout2 { + VkStructureType sType; + void* pNext; + VkSubresourceLayout subresourceLayout; +} VkSubresourceLayout2; + +typedef struct VkPipelineCreateFlags2CreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags2 flags; +} VkPipelineCreateFlags2CreateInfo; + +typedef struct VkBufferUsageFlags2CreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferUsageFlags2 usage; +} VkBufferUsageFlags2CreateInfo; + +typedef struct VkPhysicalDevicePushDescriptorProperties { + VkStructureType sType; + void* pNext; + uint32_t maxPushDescriptors; +} VkPhysicalDevicePushDescriptorProperties; + +typedef struct VkPhysicalDeviceDynamicRenderingLocalReadFeatures { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRenderingLocalRead; +} VkPhysicalDeviceDynamicRenderingLocalReadFeatures; + +typedef struct VkRenderingAttachmentLocationInfo { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const uint32_t* pColorAttachmentLocations; +} VkRenderingAttachmentLocationInfo; + +typedef struct VkRenderingInputAttachmentIndexInfo { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const uint32_t* pColorAttachmentInputIndices; + const uint32_t* pDepthInputAttachmentIndex; + const uint32_t* pStencilInputAttachmentIndex; +} VkRenderingInputAttachmentIndexInfo; + +typedef struct VkPhysicalDeviceMaintenance6Features { + VkStructureType sType; + void* pNext; + VkBool32 maintenance6; +} VkPhysicalDeviceMaintenance6Features; + +typedef struct VkPhysicalDeviceMaintenance6Properties { + VkStructureType sType; + void* pNext; + VkBool32 blockTexelViewCompatibleMultipleLayers; + uint32_t maxCombinedImageSamplerDescriptorCount; + VkBool32 fragmentShadingRateClampCombinerInputs; +} VkPhysicalDeviceMaintenance6Properties; + +typedef struct VkBindMemoryStatus { + VkStructureType sType; + const void* pNext; + VkResult* pResult; +} VkBindMemoryStatus; + +typedef struct VkBindDescriptorSetsInfo { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t firstSet; + uint32_t descriptorSetCount; + const VkDescriptorSet* pDescriptorSets; + uint32_t dynamicOffsetCount; + const uint32_t* pDynamicOffsets; +} VkBindDescriptorSetsInfo; + +typedef struct VkPushConstantsInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayout layout; + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; + const void* pValues; +} VkPushConstantsInfo; + +typedef struct VkPushDescriptorSetInfo { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t set; + uint32_t descriptorWriteCount; + const VkWriteDescriptorSet* pDescriptorWrites; +} VkPushDescriptorSetInfo; + +typedef struct VkPushDescriptorSetWithTemplateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorUpdateTemplate descriptorUpdateTemplate; + VkPipelineLayout layout; + uint32_t set; + const void* pData; +} VkPushDescriptorSetWithTemplateInfo; + +typedef struct VkPhysicalDevicePipelineProtectedAccessFeatures { + VkStructureType sType; + void* pNext; + VkBool32 pipelineProtectedAccess; +} VkPhysicalDevicePipelineProtectedAccessFeatures; + +typedef struct VkPhysicalDevicePipelineRobustnessFeatures { + VkStructureType sType; + void* pNext; + VkBool32 pipelineRobustness; +} VkPhysicalDevicePipelineRobustnessFeatures; + +typedef struct VkPhysicalDevicePipelineRobustnessProperties { + VkStructureType sType; + void* pNext; + VkPipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessVertexInputs; + VkPipelineRobustnessImageBehavior defaultRobustnessImages; +} VkPhysicalDevicePipelineRobustnessProperties; + +typedef struct VkPipelineRobustnessCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRobustnessBufferBehavior storageBuffers; + VkPipelineRobustnessBufferBehavior uniformBuffers; + VkPipelineRobustnessBufferBehavior vertexInputs; + VkPipelineRobustnessImageBehavior images; +} VkPipelineRobustnessCreateInfo; + +typedef struct VkPhysicalDeviceHostImageCopyFeatures { + VkStructureType sType; + void* pNext; + VkBool32 hostImageCopy; +} VkPhysicalDeviceHostImageCopyFeatures; + +typedef struct VkPhysicalDeviceHostImageCopyProperties { + VkStructureType sType; + void* pNext; + uint32_t copySrcLayoutCount; + VkImageLayout* pCopySrcLayouts; + uint32_t copyDstLayoutCount; + VkImageLayout* pCopyDstLayouts; + uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE]; + VkBool32 identicalMemoryTypeRequirements; +} VkPhysicalDeviceHostImageCopyProperties; + +typedef struct VkMemoryToImageCopy { + VkStructureType sType; + const void* pNext; + const void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkMemoryToImageCopy; + +typedef struct VkImageToMemoryCopy { + VkStructureType sType; + const void* pNext; + void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkImageToMemoryCopy; + +typedef struct VkCopyMemoryToImageInfo { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlags flags; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkMemoryToImageCopy* pRegions; +} VkCopyMemoryToImageInfo; + +typedef struct VkCopyImageToMemoryInfo { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlags flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + uint32_t regionCount; + const VkImageToMemoryCopy* pRegions; +} VkCopyImageToMemoryInfo; + +typedef struct VkCopyImageToImageInfo { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlags flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2* pRegions; +} VkCopyImageToImageInfo; + +typedef struct VkHostImageLayoutTransitionInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkImageLayout oldLayout; + VkImageLayout newLayout; + VkImageSubresourceRange subresourceRange; +} VkHostImageLayoutTransitionInfo; + +typedef struct VkSubresourceHostMemcpySize { + VkStructureType sType; + void* pNext; + VkDeviceSize size; +} VkSubresourceHostMemcpySize; + +typedef struct VkHostImageCopyDevicePerformanceQuery { + VkStructureType sType; + void* pNext; + VkBool32 optimalDeviceAccess; + VkBool32 identicalMemoryLayout; +} VkHostImageCopyDevicePerformanceQuery; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStipple)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory2)(VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData); +typedef VkResult (VKAPI_PTR *PFN_vkUnmapMemory2)(VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer2)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularity)(VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayout)(VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2)(VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingAttachmentLocations)(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndices)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets2)(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants2)(VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet2)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate2)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImage)(VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemory)(VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImage)(VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayout)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStipple( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); + +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory2( + VkDevice device, + const VkMemoryMapInfo* pMemoryMapInfo, + void** ppData); + +VKAPI_ATTR VkResult VKAPI_CALL vkUnmapMemory2( + VkDevice device, + const VkMemoryUnmapInfo* pMemoryUnmapInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderingAreaGranularity( + VkDevice device, + const VkRenderingAreaInfo* pRenderingAreaInfo, + VkExtent2D* pGranularity); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSubresourceLayout( + VkDevice device, + const VkDeviceImageSubresourceInfo* pInfo, + VkSubresourceLayout2* pLayout); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2( + VkDevice device, + VkImage image, + const VkImageSubresource2* pSubresource, + VkSubresourceLayout2* pLayout); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingAttachmentLocations( + VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfo* pLocationInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingInputAttachmentIndices( + VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets2( + VkCommandBuffer commandBuffer, + const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants2( + VkCommandBuffer commandBuffer, + const VkPushConstantsInfo* pPushConstantsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet2( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate2( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImage( + VkDevice device, + const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemory( + VkDevice device, + const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImage( + VkDevice device, + const VkCopyImageToImageInfo* pCopyImageToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayout( + VkDevice device, + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfo* pTransitions); +#endif + + +// VK_KHR_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_surface 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) +#define VK_KHR_SURFACE_SPEC_VERSION 25 +#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" + +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, + VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_FIFO_LATEST_READY_EXT = 1000361000, + VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPresentModeKHR; + +typedef enum VkColorSpaceKHR { + VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, + VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, + VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, + VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003, + VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, + VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, + VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, + VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, + VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, + // VK_COLOR_SPACE_DOLBYVISION_EXT is deprecated, but no reason was given in the API XML + VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, + VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, + VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, + VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, + VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, + VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, + VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000, + // VK_COLORSPACE_SRGB_NONLINEAR_KHR is a deprecated alias + VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + // VK_COLOR_SPACE_DCI_P3_LINEAR_EXT is a deprecated alias + VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkColorSpaceKHR; + +typedef enum VkSurfaceTransformFlagBitsKHR { + VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, + VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, + VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, + VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, + VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, + VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSurfaceTransformFlagBitsKHR; + +typedef enum VkCompositeAlphaFlagBitsKHR { + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, + VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCompositeAlphaFlagBitsKHR; +typedef VkFlags VkCompositeAlphaFlagsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; +typedef struct VkSurfaceCapabilitiesKHR { + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; +} VkSurfaceCapabilitiesKHR; + +typedef struct VkSurfaceFormatKHR { + VkFormat format; + VkColorSpaceKHR colorSpace; +} VkSurfaceFormatKHR; + +typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); +#endif + + +// VK_KHR_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_swapchain 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) +#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 +#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" + +typedef enum VkSwapchainCreateFlagBitsKHR { + VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, + VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, + VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004, + VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT = 0x00000008, + VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSwapchainCreateFlagBitsKHR; +typedef VkFlags VkSwapchainCreateFlagsKHR; + +typedef enum VkDeviceGroupPresentModeFlagBitsKHR { + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, + VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, + VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, + VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDeviceGroupPresentModeFlagBitsKHR; +typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; +typedef struct VkSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainCreateFlagsKHR flags; + VkSurfaceKHR surface; + uint32_t minImageCount; + VkFormat imageFormat; + VkColorSpaceKHR imageColorSpace; + VkExtent2D imageExtent; + uint32_t imageArrayLayers; + VkImageUsageFlags imageUsage; + VkSharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkSurfaceTransformFlagBitsKHR preTransform; + VkCompositeAlphaFlagBitsKHR compositeAlpha; + VkPresentModeKHR presentMode; + VkBool32 clipped; + VkSwapchainKHR oldSwapchain; +} VkSwapchainCreateInfoKHR; + +typedef struct VkPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t swapchainCount; + const VkSwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + VkResult* pResults; +} VkPresentInfoKHR; + +typedef struct VkImageSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; +} VkImageSwapchainCreateInfoKHR; + +typedef struct VkBindImageMemorySwapchainInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndex; +} VkBindImageMemorySwapchainInfoKHR; + +typedef struct VkAcquireNextImageInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint64_t timeout; + VkSemaphore semaphore; + VkFence fence; + uint32_t deviceMask; +} VkAcquireNextImageInfoKHR; + +typedef struct VkDeviceGroupPresentCapabilitiesKHR { + VkStructureType sType; + void* pNext; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupPresentCapabilitiesKHR; + +typedef struct VkDeviceGroupPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + VkDeviceGroupPresentModeFlagBitsKHR mode; +} VkDeviceGroupPresentInfoKHR; + +typedef struct VkDeviceGroupSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupSwapchainCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); +typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); +typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( + VkDevice device, + const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain); + +VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( + VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( + VkQueue queue, + const VkPresentInfoKHR* pPresentInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, + VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( + VkDevice device, + const VkAcquireNextImageInfoKHR* pAcquireInfo, + uint32_t* pImageIndex); +#endif + + +// VK_KHR_display is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_display 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) +#define VK_KHR_DISPLAY_SPEC_VERSION 23 +#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" +typedef VkFlags VkDisplayModeCreateFlagsKHR; + +typedef enum VkDisplayPlaneAlphaFlagBitsKHR { + VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, + VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDisplayPlaneAlphaFlagBitsKHR; +typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; +typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; +typedef struct VkDisplayModeParametersKHR { + VkExtent2D visibleRegion; + uint32_t refreshRate; +} VkDisplayModeParametersKHR; + +typedef struct VkDisplayModeCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeCreateFlagsKHR flags; + VkDisplayModeParametersKHR parameters; +} VkDisplayModeCreateInfoKHR; + +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; + +typedef struct VkDisplayPlaneCapabilitiesKHR { + VkDisplayPlaneAlphaFlagsKHR supportedAlpha; + VkOffset2D minSrcPosition; + VkOffset2D maxSrcPosition; + VkExtent2D minSrcExtent; + VkExtent2D maxSrcExtent; + VkOffset2D minDstPosition; + VkOffset2D maxDstPosition; + VkExtent2D minDstExtent; + VkExtent2D maxDstExtent; +} VkDisplayPlaneCapabilitiesKHR; + +typedef struct VkDisplayPlanePropertiesKHR { + VkDisplayKHR currentDisplay; + uint32_t currentStackIndex; +} VkDisplayPlanePropertiesKHR; + +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; + +typedef struct VkDisplaySurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceCreateFlagsKHR flags; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkDisplaySurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( + VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( + VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_KHR_display_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_display_swapchain 1 +#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10 +#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" +typedef struct VkDisplayPresentInfoKHR { + VkStructureType sType; + const void* pNext; + VkRect2D srcRect; + VkRect2D dstRect; + VkBool32 persistent; +} VkDisplayPresentInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains); +#endif + + +// VK_KHR_sampler_mirror_clamp_to_edge is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_sampler_mirror_clamp_to_edge 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" + + +// VK_KHR_video_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_queue 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR) +#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 8 +#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue" + +typedef enum VkQueryResultStatusKHR { + VK_QUERY_RESULT_STATUS_ERROR_KHR = -1, + VK_QUERY_RESULT_STATUS_NOT_READY_KHR = 0, + VK_QUERY_RESULT_STATUS_COMPLETE_KHR = 1, + VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR = -1000299000, + VK_QUERY_RESULT_STATUS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkQueryResultStatusKHR; + +typedef enum VkVideoCodecOperationFlagBitsKHR { + VK_VIDEO_CODEC_OPERATION_NONE_KHR = 0, + VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR = 0x00010000, + VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR = 0x00020000, + VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR = 0x00000001, + VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR = 0x00000002, + VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR = 0x00000004, + VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR = 0x00040000, + VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodecOperationFlagBitsKHR; +typedef VkFlags VkVideoCodecOperationFlagsKHR; + +typedef enum VkVideoChromaSubsamplingFlagBitsKHR { + VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR = 0, + VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001, + VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002, + VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004, + VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR = 0x00000008, + VK_VIDEO_CHROMA_SUBSAMPLING_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoChromaSubsamplingFlagBitsKHR; +typedef VkFlags VkVideoChromaSubsamplingFlagsKHR; + +typedef enum VkVideoComponentBitDepthFlagBitsKHR { + VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR = 0, + VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR = 0x00000001, + VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR = 0x00000004, + VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR = 0x00000010, + VK_VIDEO_COMPONENT_BIT_DEPTH_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoComponentBitDepthFlagBitsKHR; +typedef VkFlags VkVideoComponentBitDepthFlagsKHR; + +typedef enum VkVideoCapabilityFlagBitsKHR { + VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR = 0x00000002, + VK_VIDEO_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCapabilityFlagBitsKHR; +typedef VkFlags VkVideoCapabilityFlagsKHR; + +typedef enum VkVideoSessionCreateFlagBitsKHR { + VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR = 0x00000002, + VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR = 0x00000004, + VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x00000008, + VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x00000010, + VK_VIDEO_SESSION_CREATE_INLINE_SESSION_PARAMETERS_BIT_KHR = 0x00000020, + VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoSessionCreateFlagBitsKHR; +typedef VkFlags VkVideoSessionCreateFlagsKHR; + +typedef enum VkVideoSessionParametersCreateFlagBitsKHR { + VK_VIDEO_SESSION_PARAMETERS_CREATE_QUANTIZATION_MAP_COMPATIBLE_BIT_KHR = 0x00000001, + VK_VIDEO_SESSION_PARAMETERS_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoSessionParametersCreateFlagBitsKHR; +typedef VkFlags VkVideoSessionParametersCreateFlagsKHR; +typedef VkFlags VkVideoBeginCodingFlagsKHR; +typedef VkFlags VkVideoEndCodingFlagsKHR; + +typedef enum VkVideoCodingControlFlagBitsKHR { + VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001, + VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR = 0x00000002, + VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR = 0x00000004, + VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodingControlFlagBitsKHR; +typedef VkFlags VkVideoCodingControlFlagsKHR; +typedef struct VkQueueFamilyQueryResultStatusPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 queryResultStatusSupport; +} VkQueueFamilyQueryResultStatusPropertiesKHR; + +typedef struct VkQueueFamilyVideoPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCodecOperationFlagsKHR videoCodecOperations; +} VkQueueFamilyVideoPropertiesKHR; + +typedef struct VkVideoProfileInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodecOperationFlagBitsKHR videoCodecOperation; + VkVideoChromaSubsamplingFlagsKHR chromaSubsampling; + VkVideoComponentBitDepthFlagsKHR lumaBitDepth; + VkVideoComponentBitDepthFlagsKHR chromaBitDepth; +} VkVideoProfileInfoKHR; + +typedef struct VkVideoProfileListInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t profileCount; + const VkVideoProfileInfoKHR* pProfiles; +} VkVideoProfileListInfoKHR; + +typedef struct VkVideoCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCapabilityFlagsKHR flags; + VkDeviceSize minBitstreamBufferOffsetAlignment; + VkDeviceSize minBitstreamBufferSizeAlignment; + VkExtent2D pictureAccessGranularity; + VkExtent2D minCodedExtent; + VkExtent2D maxCodedExtent; + uint32_t maxDpbSlots; + uint32_t maxActiveReferencePictures; + VkExtensionProperties stdHeaderVersion; +} VkVideoCapabilitiesKHR; + +typedef struct VkPhysicalDeviceVideoFormatInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags imageUsage; +} VkPhysicalDeviceVideoFormatInfoKHR; + +typedef struct VkVideoFormatPropertiesKHR { + VkStructureType sType; + void* pNext; + VkFormat format; + VkComponentMapping componentMapping; + VkImageCreateFlags imageCreateFlags; + VkImageType imageType; + VkImageTiling imageTiling; + VkImageUsageFlags imageUsageFlags; +} VkVideoFormatPropertiesKHR; + +typedef struct VkVideoPictureResourceInfoKHR { + VkStructureType sType; + const void* pNext; + VkOffset2D codedOffset; + VkExtent2D codedExtent; + uint32_t baseArrayLayer; + VkImageView imageViewBinding; +} VkVideoPictureResourceInfoKHR; + +typedef struct VkVideoReferenceSlotInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t slotIndex; + const VkVideoPictureResourceInfoKHR* pPictureResource; +} VkVideoReferenceSlotInfoKHR; + +typedef struct VkVideoSessionMemoryRequirementsKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryBindIndex; + VkMemoryRequirements memoryRequirements; +} VkVideoSessionMemoryRequirementsKHR; + +typedef struct VkBindVideoSessionMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t memoryBindIndex; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkDeviceSize memorySize; +} VkBindVideoSessionMemoryInfoKHR; + +typedef struct VkVideoSessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + VkVideoSessionCreateFlagsKHR flags; + const VkVideoProfileInfoKHR* pVideoProfile; + VkFormat pictureFormat; + VkExtent2D maxCodedExtent; + VkFormat referencePictureFormat; + uint32_t maxDpbSlots; + uint32_t maxActiveReferencePictures; + const VkExtensionProperties* pStdHeaderVersion; +} VkVideoSessionCreateInfoKHR; + +typedef struct VkVideoSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersCreateFlagsKHR flags; + VkVideoSessionParametersKHR videoSessionParametersTemplate; + VkVideoSessionKHR videoSession; +} VkVideoSessionParametersCreateInfoKHR; + +typedef struct VkVideoSessionParametersUpdateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t updateSequenceCount; +} VkVideoSessionParametersUpdateInfoKHR; + +typedef struct VkVideoBeginCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoBeginCodingFlagsKHR flags; + VkVideoSessionKHR videoSession; + VkVideoSessionParametersKHR videoSessionParameters; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; +} VkVideoBeginCodingInfoKHR; + +typedef struct VkVideoEndCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEndCodingFlagsKHR flags; +} VkVideoEndCodingInfoKHR; + +typedef struct VkVideoCodingControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodingControlFlagsKHR flags; +} VkVideoCodingControlInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters); +typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBeginVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + const VkVideoProfileInfoKHR* pVideoProfile, + VkVideoCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, + uint32_t* pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR* pVideoFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionKHR( + VkDevice device, + const VkVideoSessionCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionKHR* pVideoSession); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t* pMemoryRequirementsCount, + VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR( + VkDevice device, + const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionParametersKHR* pVideoSessionParameters); + +VKAPI_ATTR VkResult VKAPI_CALL vkUpdateVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR* pBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoEndCodingInfoKHR* pEndCodingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoCodingControlInfoKHR* pCodingControlInfo); +#endif + + +// VK_KHR_video_decode_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_queue 1 +#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 8 +#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue" + +typedef enum VkVideoDecodeCapabilityFlagBitsKHR { + VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeCapabilityFlagBitsKHR; +typedef VkFlags VkVideoDecodeCapabilityFlagsKHR; + +typedef enum VkVideoDecodeUsageFlagBitsKHR { + VK_VIDEO_DECODE_USAGE_DEFAULT_KHR = 0, + VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR = 0x00000004, + VK_VIDEO_DECODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeUsageFlagBitsKHR; +typedef VkFlags VkVideoDecodeUsageFlagsKHR; +typedef VkFlags VkVideoDecodeFlagsKHR; +typedef struct VkVideoDecodeCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoDecodeCapabilityFlagsKHR flags; +} VkVideoDecodeCapabilitiesKHR; + +typedef struct VkVideoDecodeUsageInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoDecodeUsageFlagsKHR videoUsageHints; +} VkVideoDecodeUsageInfoKHR; + +typedef struct VkVideoDecodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoDecodeFlagsKHR flags; + VkBuffer srcBuffer; + VkDeviceSize srcBufferOffset; + VkDeviceSize srcBufferRange; + VkVideoPictureResourceInfoKHR dstPictureResource; + const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; +} VkVideoDecodeInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pDecodeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoDecodeInfoKHR* pDecodeInfo); +#endif + + +// VK_KHR_video_encode_h264 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_h264 1 +#include "vk_video/vulkan_video_codec_h264std.h" +#include "vk_video/vulkan_video_codec_h264std_encode.h" +#define VK_KHR_VIDEO_ENCODE_H264_SPEC_VERSION 14 +#define VK_KHR_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_KHR_video_encode_h264" + +typedef enum VkVideoEncodeH264CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H264_CAPABILITY_MB_QP_DIFF_WRAPAROUND_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H264_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264CapabilityFlagsKHR; + +typedef enum VkVideoEncodeH264StdFlagBitsKHR { + VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR = 0x00000400, + VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR = 0x00000800, + VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR = 0x00001000, + VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR = 0x00002000, + VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR = 0x00004000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR = 0x00008000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR = 0x00010000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR = 0x00020000, + VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR = 0x00080000, + VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR = 0x00100000, + VK_VIDEO_ENCODE_H264_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264StdFlagsKHR; + +typedef enum VkVideoEncodeH264RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264RateControlFlagsKHR; +typedef struct VkVideoEncodeH264CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH264CapabilityFlagsKHR flags; + StdVideoH264LevelIdc maxLevelIdc; + uint32_t maxSliceCount; + uint32_t maxPPictureL0ReferenceCount; + uint32_t maxBPictureL0ReferenceCount; + uint32_t maxL1ReferenceCount; + uint32_t maxTemporalLayerCount; + VkBool32 expectDyadicTemporalLayerPattern; + int32_t minQp; + int32_t maxQp; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeH264StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeH264CapabilitiesKHR; + +typedef struct VkVideoEncodeH264QpKHR { + int32_t qpI; + int32_t qpP; + int32_t qpB; +} VkVideoEncodeH264QpKHR; + +typedef struct VkVideoEncodeH264QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH264RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredIdrPeriod; + uint32_t preferredConsecutiveBFrameCount; + uint32_t preferredTemporalLayerCount; + VkVideoEncodeH264QpKHR preferredConstantQp; + uint32_t preferredMaxL0ReferenceCount; + uint32_t preferredMaxL1ReferenceCount; + VkBool32 preferredStdEntropyCodingModeFlag; +} VkVideoEncodeH264QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeH264SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevelIdc; + StdVideoH264LevelIdc maxLevelIdc; +} VkVideoEncodeH264SessionCreateInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdSPSCount; + const StdVideoH264SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH264PictureParameterSet* pStdPPSs; +} VkVideoEncodeH264SessionParametersAddInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoEncodeH264SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoEncodeH264SessionParametersCreateInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 writeStdSPS; + VkBool32 writeStdPPS; + uint32_t stdSPSId; + uint32_t stdPPSId; +} VkVideoEncodeH264SessionParametersGetInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasStdSPSOverrides; + VkBool32 hasStdPPSOverrides; +} VkVideoEncodeH264SessionParametersFeedbackInfoKHR; + +typedef struct VkVideoEncodeH264NaluSliceInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t constantQp; + const StdVideoEncodeH264SliceHeader* pStdSliceHeader; +} VkVideoEncodeH264NaluSliceInfoKHR; + +typedef struct VkVideoEncodeH264PictureInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t naluSliceEntryCount; + const VkVideoEncodeH264NaluSliceInfoKHR* pNaluSliceEntries; + const StdVideoEncodeH264PictureInfo* pStdPictureInfo; + VkBool32 generatePrefixNalu; +} VkVideoEncodeH264PictureInfoKHR; + +typedef struct VkVideoEncodeH264DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeH264DpbSlotInfoKHR; + +typedef struct VkVideoEncodeH264ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; +} VkVideoEncodeH264ProfileInfoKHR; + +typedef struct VkVideoEncodeH264RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH264RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t idrPeriod; + uint32_t consecutiveBFrameCount; + uint32_t temporalLayerCount; +} VkVideoEncodeH264RateControlInfoKHR; + +typedef struct VkVideoEncodeH264FrameSizeKHR { + uint32_t frameISize; + uint32_t framePSize; + uint32_t frameBSize; +} VkVideoEncodeH264FrameSizeKHR; + +typedef struct VkVideoEncodeH264RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQp; + VkVideoEncodeH264QpKHR minQp; + VkBool32 useMaxQp; + VkVideoEncodeH264QpKHR maxQp; + VkBool32 useMaxFrameSize; + VkVideoEncodeH264FrameSizeKHR maxFrameSize; +} VkVideoEncodeH264RateControlLayerInfoKHR; + +typedef struct VkVideoEncodeH264GopRemainingFrameInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingI; + uint32_t gopRemainingP; + uint32_t gopRemainingB; +} VkVideoEncodeH264GopRemainingFrameInfoKHR; + + + +// VK_KHR_video_encode_h265 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_h265 1 +#include "vk_video/vulkan_video_codec_h265std.h" +#include "vk_video/vulkan_video_codec_h265std_encode.h" +#define VK_KHR_VIDEO_ENCODE_H265_SPEC_VERSION 14 +#define VK_KHR_VIDEO_ENCODE_H265_EXTENSION_NAME "VK_KHR_video_encode_h265" + +typedef enum VkVideoEncodeH265CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H265_CAPABILITY_CU_QP_DIFF_WRAPAROUND_BIT_KHR = 0x00000400, + VK_VIDEO_ENCODE_H265_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265CapabilityFlagsKHR; + +typedef enum VkVideoEncodeH265StdFlagBitsKHR { + VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR = 0x00000400, + VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR = 0x00000800, + VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR = 0x00001000, + VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR = 0x00002000, + VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR = 0x00004000, + VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR = 0x00008000, + VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR = 0x00010000, + VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR = 0x00020000, + VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR = 0x00040000, + VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR = 0x00080000, + VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR = 0x00100000, + VK_VIDEO_ENCODE_H265_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265StdFlagsKHR; + +typedef enum VkVideoEncodeH265CtbSizeFlagBitsKHR { + VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_CTB_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265CtbSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265CtbSizeFlagsKHR; + +typedef enum VkVideoEncodeH265TransformBlockSizeFlagBitsKHR { + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265TransformBlockSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265TransformBlockSizeFlagsKHR; + +typedef enum VkVideoEncodeH265RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265RateControlFlagsKHR; +typedef struct VkVideoEncodeH265CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265CapabilityFlagsKHR flags; + StdVideoH265LevelIdc maxLevelIdc; + uint32_t maxSliceSegmentCount; + VkExtent2D maxTiles; + VkVideoEncodeH265CtbSizeFlagsKHR ctbSizes; + VkVideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes; + uint32_t maxPPictureL0ReferenceCount; + uint32_t maxBPictureL0ReferenceCount; + uint32_t maxL1ReferenceCount; + uint32_t maxSubLayerCount; + VkBool32 expectDyadicTemporalSubLayerPattern; + int32_t minQp; + int32_t maxQp; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeH265StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeH265CapabilitiesKHR; + +typedef struct VkVideoEncodeH265SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevelIdc; + StdVideoH265LevelIdc maxLevelIdc; +} VkVideoEncodeH265SessionCreateInfoKHR; + +typedef struct VkVideoEncodeH265QpKHR { + int32_t qpI; + int32_t qpP; + int32_t qpB; +} VkVideoEncodeH265QpKHR; + +typedef struct VkVideoEncodeH265QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredIdrPeriod; + uint32_t preferredConsecutiveBFrameCount; + uint32_t preferredSubLayerCount; + VkVideoEncodeH265QpKHR preferredConstantQp; + uint32_t preferredMaxL0ReferenceCount; + uint32_t preferredMaxL1ReferenceCount; +} VkVideoEncodeH265QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeH265SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdVPSCount; + const StdVideoH265VideoParameterSet* pStdVPSs; + uint32_t stdSPSCount; + const StdVideoH265SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH265PictureParameterSet* pStdPPSs; +} VkVideoEncodeH265SessionParametersAddInfoKHR; + +typedef struct VkVideoEncodeH265SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdVPSCount; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoEncodeH265SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoEncodeH265SessionParametersCreateInfoKHR; + +typedef struct VkVideoEncodeH265SessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 writeStdVPS; + VkBool32 writeStdSPS; + VkBool32 writeStdPPS; + uint32_t stdVPSId; + uint32_t stdSPSId; + uint32_t stdPPSId; +} VkVideoEncodeH265SessionParametersGetInfoKHR; + +typedef struct VkVideoEncodeH265SessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasStdVPSOverrides; + VkBool32 hasStdSPSOverrides; + VkBool32 hasStdPPSOverrides; +} VkVideoEncodeH265SessionParametersFeedbackInfoKHR; + +typedef struct VkVideoEncodeH265NaluSliceSegmentInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t constantQp; + const StdVideoEncodeH265SliceSegmentHeader* pStdSliceSegmentHeader; +} VkVideoEncodeH265NaluSliceSegmentInfoKHR; + +typedef struct VkVideoEncodeH265PictureInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t naluSliceSegmentEntryCount; + const VkVideoEncodeH265NaluSliceSegmentInfoKHR* pNaluSliceSegmentEntries; + const StdVideoEncodeH265PictureInfo* pStdPictureInfo; +} VkVideoEncodeH265PictureInfoKHR; + +typedef struct VkVideoEncodeH265DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeH265DpbSlotInfoKHR; + +typedef struct VkVideoEncodeH265ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoEncodeH265ProfileInfoKHR; + +typedef struct VkVideoEncodeH265RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH265RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t idrPeriod; + uint32_t consecutiveBFrameCount; + uint32_t subLayerCount; +} VkVideoEncodeH265RateControlInfoKHR; + +typedef struct VkVideoEncodeH265FrameSizeKHR { + uint32_t frameISize; + uint32_t framePSize; + uint32_t frameBSize; +} VkVideoEncodeH265FrameSizeKHR; + +typedef struct VkVideoEncodeH265RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQp; + VkVideoEncodeH265QpKHR minQp; + VkBool32 useMaxQp; + VkVideoEncodeH265QpKHR maxQp; + VkBool32 useMaxFrameSize; + VkVideoEncodeH265FrameSizeKHR maxFrameSize; +} VkVideoEncodeH265RateControlLayerInfoKHR; + +typedef struct VkVideoEncodeH265GopRemainingFrameInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingI; + uint32_t gopRemainingP; + uint32_t gopRemainingB; +} VkVideoEncodeH265GopRemainingFrameInfoKHR; + + + +// VK_KHR_video_decode_h264 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_h264 1 +#include "vk_video/vulkan_video_codec_h264std_decode.h" +#define VK_KHR_VIDEO_DECODE_H264_SPEC_VERSION 9 +#define VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME "VK_KHR_video_decode_h264" + +typedef enum VkVideoDecodeH264PictureLayoutFlagBitsKHR { + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR = 0, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeH264PictureLayoutFlagBitsKHR; +typedef VkFlags VkVideoDecodeH264PictureLayoutFlagsKHR; +typedef struct VkVideoDecodeH264ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; + VkVideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout; +} VkVideoDecodeH264ProfileInfoKHR; + +typedef struct VkVideoDecodeH264CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoH264LevelIdc maxLevelIdc; + VkOffset2D fieldOffsetGranularity; +} VkVideoDecodeH264CapabilitiesKHR; + +typedef struct VkVideoDecodeH264SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdSPSCount; + const StdVideoH264SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH264PictureParameterSet* pStdPPSs; +} VkVideoDecodeH264SessionParametersAddInfoKHR; + +typedef struct VkVideoDecodeH264SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoDecodeH264SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoDecodeH264SessionParametersCreateInfoKHR; + +typedef struct VkVideoDecodeH264PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264PictureInfo* pStdPictureInfo; + uint32_t sliceCount; + const uint32_t* pSliceOffsets; +} VkVideoDecodeH264PictureInfoKHR; + +typedef struct VkVideoDecodeH264DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH264DpbSlotInfoKHR; + + + +// VK_KHR_dynamic_rendering is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dynamic_rendering 1 +#define VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION 1 +#define VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME "VK_KHR_dynamic_rendering" +typedef VkRenderingFlags VkRenderingFlagsKHR; + +typedef VkRenderingFlagBits VkRenderingFlagBitsKHR; + +typedef VkRenderingInfo VkRenderingInfoKHR; + +typedef VkRenderingAttachmentInfo VkRenderingAttachmentInfoKHR; + +typedef VkPipelineRenderingCreateInfo VkPipelineRenderingCreateInfoKHR; + +typedef VkPhysicalDeviceDynamicRenderingFeatures VkPhysicalDeviceDynamicRenderingFeaturesKHR; + +typedef VkCommandBufferInheritanceRenderingInfo VkCommandBufferInheritanceRenderingInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderingKHR)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderingKHR)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderingKHR( + VkCommandBuffer commandBuffer, + const VkRenderingInfo* pRenderingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderingKHR( + VkCommandBuffer commandBuffer); +#endif + + +// VK_KHR_multiview is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_multiview 1 +#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 +#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" +typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; + +typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; + +typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; + + + +// VK_KHR_get_physical_device_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_physical_device_properties2 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" +typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; + +typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; + +typedef VkFormatProperties2 VkFormatProperties2KHR; + +typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; + +typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; + +typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; + +typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; + +typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; + +typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); +#endif + + +// VK_KHR_device_group is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_device_group 1 +#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 4 +#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" +typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; + +typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; + +typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; + +typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; + +typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; + +typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; + +typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; + +typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; + +typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; + +typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; + +typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); +#endif + + +// VK_KHR_shader_draw_parameters is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_draw_parameters 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" + + +// VK_KHR_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance1 1 +#define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1" +// VK_KHR_MAINTENANCE1_SPEC_VERSION is a deprecated alias +#define VK_KHR_MAINTENANCE1_SPEC_VERSION VK_KHR_MAINTENANCE_1_SPEC_VERSION +// VK_KHR_MAINTENANCE1_EXTENSION_NAME is a deprecated alias +#define VK_KHR_MAINTENANCE1_EXTENSION_NAME VK_KHR_MAINTENANCE_1_EXTENSION_NAME +typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; + +typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); +#endif + + +// VK_KHR_device_group_creation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_device_group_creation 1 +#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 +#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" +#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE +typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; + +typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +#endif + + +// VK_KHR_external_memory_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_capabilities 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" +#define VK_LUID_SIZE_KHR VK_LUID_SIZE +typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; + +typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; + +typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; + +typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; + +typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; + +typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; + +typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; + +typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; + +typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; + +typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); +#endif + + +// VK_KHR_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory 1 +#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" +#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL +typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; + +typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; + +typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; + + + +// VK_KHR_external_memory_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_fd 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" +typedef struct VkImportMemoryFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + int fd; +} VkImportMemoryFdInfoKHR; + +typedef struct VkMemoryFdPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryFdPropertiesKHR; + +typedef struct VkMemoryGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( + VkDevice device, + const VkMemoryGetFdInfoKHR* pGetFdInfo, + int* pFd); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR* pMemoryFdProperties); +#endif + + +// VK_KHR_external_semaphore_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_capabilities 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" +typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; + +typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; + +typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; + +typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; + +typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; + +typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +#endif + + +// VK_KHR_external_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" +typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; + +typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; + +typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; + + + +// VK_KHR_external_semaphore_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_fd 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" +typedef struct VkImportSemaphoreFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + int fd; +} VkImportSemaphoreFdInfoKHR; + +typedef struct VkSemaphoreGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( + VkDevice device, + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( + VkDevice device, + const VkSemaphoreGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + + +// VK_KHR_push_descriptor is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_push_descriptor 1 +#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 +#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" +typedef VkPhysicalDevicePushDescriptorProperties VkPhysicalDevicePushDescriptorPropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); +#endif + + +// VK_KHR_shader_float16_int8 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float16_int8 1 +#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1 +#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8" +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR; + +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR; + + + +// VK_KHR_16bit_storage is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_16bit_storage 1 +#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" +typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; + + + +// VK_KHR_incremental_present is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_incremental_present 1 +#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2 +#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" +typedef struct VkRectLayerKHR { + VkOffset2D offset; + VkExtent2D extent; + uint32_t layer; +} VkRectLayerKHR; + +typedef struct VkPresentRegionKHR { + uint32_t rectangleCount; + const VkRectLayerKHR* pRectangles; +} VkPresentRegionKHR; + +typedef struct VkPresentRegionsKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentRegionKHR* pRegions; +} VkPresentRegionsKHR; + + + +// VK_KHR_descriptor_update_template is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_descriptor_update_template 1 +typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; + +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" +typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; + +typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; + +typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; + +typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); +#endif + + +// VK_KHR_imageless_framebuffer is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_imageless_framebuffer 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer" +typedef VkPhysicalDeviceImagelessFramebufferFeatures VkPhysicalDeviceImagelessFramebufferFeaturesKHR; + +typedef VkFramebufferAttachmentsCreateInfo VkFramebufferAttachmentsCreateInfoKHR; + +typedef VkFramebufferAttachmentImageInfo VkFramebufferAttachmentImageInfoKHR; + +typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR; + + + +// VK_KHR_create_renderpass2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_create_renderpass2 1 +#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1 +#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2" +typedef VkRenderPassCreateInfo2 VkRenderPassCreateInfo2KHR; + +typedef VkAttachmentDescription2 VkAttachmentDescription2KHR; + +typedef VkAttachmentReference2 VkAttachmentReference2KHR; + +typedef VkSubpassDescription2 VkSubpassDescription2KHR; + +typedef VkSubpassDependency2 VkSubpassDependency2KHR; + +typedef VkSubpassBeginInfo VkSubpassBeginInfoKHR; + +typedef VkSubpassEndInfo VkSubpassEndInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); +#endif + + +// VK_KHR_shared_presentable_image is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shared_presentable_image 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" +typedef struct VkSharedPresentSurfaceCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkImageUsageFlags sharedPresentSupportedUsageFlags; +} VkSharedPresentSurfaceCapabilitiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( + VkDevice device, + VkSwapchainKHR swapchain); +#endif + + +// VK_KHR_external_fence_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_capabilities 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" +typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; + +typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; + +typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; + +typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; + +typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; + +typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); +#endif + + +// VK_KHR_external_fence is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence 1 +#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" +typedef VkFenceImportFlags VkFenceImportFlagsKHR; + +typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; + +typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; + + + +// VK_KHR_external_fence_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_fd 1 +#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" +typedef struct VkImportFenceFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + int fd; +} VkImportFenceFdInfoKHR; + +typedef struct VkFenceGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( + VkDevice device, + const VkImportFenceFdInfoKHR* pImportFenceFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( + VkDevice device, + const VkFenceGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + + +// VK_KHR_performance_query is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_performance_query 1 +#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1 +#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query" + +typedef enum VkPerformanceCounterUnitKHR { + VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0, + VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1, + VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4, + VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5, + VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6, + VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7, + VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8, + VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9, + VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10, + VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterUnitKHR; + +typedef enum VkPerformanceCounterScopeKHR { + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0, + VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1, + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2, + // VK_QUERY_SCOPE_COMMAND_BUFFER_KHR is a deprecated alias + VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + // VK_QUERY_SCOPE_RENDER_PASS_KHR is a deprecated alias + VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + // VK_QUERY_SCOPE_COMMAND_KHR is a deprecated alias + VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterScopeKHR; + +typedef enum VkPerformanceCounterStorageKHR { + VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0, + VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1, + VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2, + VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5, + VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterStorageKHR; + +typedef enum VkPerformanceCounterDescriptionFlagBitsKHR { + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002, + // VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR is a deprecated alias + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + // VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR is a deprecated alias + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR, + VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterDescriptionFlagBitsKHR; +typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR; + +typedef enum VkAcquireProfilingLockFlagBitsKHR { + VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAcquireProfilingLockFlagBitsKHR; +typedef VkFlags VkAcquireProfilingLockFlagsKHR; +typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 performanceCounterQueryPools; + VkBool32 performanceCounterMultipleQueryPools; +} VkPhysicalDevicePerformanceQueryFeaturesKHR; + +typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 allowCommandBufferQueryCopies; +} VkPhysicalDevicePerformanceQueryPropertiesKHR; + +typedef struct VkPerformanceCounterKHR { + VkStructureType sType; + void* pNext; + VkPerformanceCounterUnitKHR unit; + VkPerformanceCounterScopeKHR scope; + VkPerformanceCounterStorageKHR storage; + uint8_t uuid[VK_UUID_SIZE]; +} VkPerformanceCounterKHR; + +typedef struct VkPerformanceCounterDescriptionKHR { + VkStructureType sType; + void* pNext; + VkPerformanceCounterDescriptionFlagsKHR flags; + char name[VK_MAX_DESCRIPTION_SIZE]; + char category[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkPerformanceCounterDescriptionKHR; + +typedef struct VkQueryPoolPerformanceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + uint32_t counterIndexCount; + const uint32_t* pCounterIndices; +} VkQueryPoolPerformanceCreateInfoKHR; + +typedef union VkPerformanceCounterResultKHR { + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; +} VkPerformanceCounterResultKHR; + +typedef struct VkAcquireProfilingLockInfoKHR { + VkStructureType sType; + const void* pNext; + VkAcquireProfilingLockFlagsKHR flags; + uint64_t timeout; +} VkAcquireProfilingLockInfoKHR; + +typedef struct VkPerformanceQuerySubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t counterPassIndex; +} VkPerformanceQuerySubmitInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t* pCounterCount, + VkPerformanceCounterKHR* pCounters, + VkPerformanceCounterDescriptionKHR* pCounterDescriptions); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, + uint32_t* pNumPasses); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR( + VkDevice device, + const VkAcquireProfilingLockInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR( + VkDevice device); +#endif + + +// VK_KHR_maintenance2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance2 1 +#define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2" +// VK_KHR_MAINTENANCE2_SPEC_VERSION is a deprecated alias +#define VK_KHR_MAINTENANCE2_SPEC_VERSION VK_KHR_MAINTENANCE_2_SPEC_VERSION +// VK_KHR_MAINTENANCE2_EXTENSION_NAME is a deprecated alias +#define VK_KHR_MAINTENANCE2_EXTENSION_NAME VK_KHR_MAINTENANCE_2_EXTENSION_NAME +typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; + +typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; + +typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; + +typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; + +typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; + +typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; + +typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; + + + +// VK_KHR_get_surface_capabilities2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_surface_capabilities2 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" +typedef struct VkPhysicalDeviceSurfaceInfo2KHR { + VkStructureType sType; + const void* pNext; + VkSurfaceKHR surface; +} VkPhysicalDeviceSurfaceInfo2KHR; + +typedef struct VkSurfaceCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceCapabilitiesKHR surfaceCapabilities; +} VkSurfaceCapabilities2KHR; + +typedef struct VkSurfaceFormat2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceFormatKHR surfaceFormat; +} VkSurfaceFormat2KHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormat2KHR* pSurfaceFormats); +#endif + + +// VK_KHR_variable_pointers is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_variable_pointers 1 +#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 +#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; + +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR; + + + +// VK_KHR_get_display_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_display_properties2 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" +typedef struct VkDisplayProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPropertiesKHR displayProperties; +} VkDisplayProperties2KHR; + +typedef struct VkDisplayPlaneProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlanePropertiesKHR displayPlaneProperties; +} VkDisplayPlaneProperties2KHR; + +typedef struct VkDisplayModeProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayModePropertiesKHR displayModeProperties; +} VkDisplayModeProperties2KHR; + +typedef struct VkDisplayPlaneInfo2KHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeKHR mode; + uint32_t planeIndex; +} VkDisplayPlaneInfo2KHR; + +typedef struct VkDisplayPlaneCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlaneCapabilitiesKHR capabilities; +} VkDisplayPlaneCapabilities2KHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlaneProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModeProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR* pCapabilities); +#endif + + +// VK_KHR_dedicated_allocation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dedicated_allocation 1 +#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 +#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" +typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; + +typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; + + + +// VK_KHR_storage_buffer_storage_class is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_storage_buffer_storage_class 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" + + +// VK_KHR_shader_bfloat16 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_bfloat16 1 +#define VK_KHR_SHADER_BFLOAT16_SPEC_VERSION 1 +#define VK_KHR_SHADER_BFLOAT16_EXTENSION_NAME "VK_KHR_shader_bfloat16" +typedef struct VkPhysicalDeviceShaderBfloat16FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderBFloat16Type; + VkBool32 shaderBFloat16DotProduct; + VkBool32 shaderBFloat16CooperativeMatrix; +} VkPhysicalDeviceShaderBfloat16FeaturesKHR; + + + +// VK_KHR_relaxed_block_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_relaxed_block_layout 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" + + +// VK_KHR_get_memory_requirements2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_memory_requirements2 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" +typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; + +typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; + +typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; + +typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; + +typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; + +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +// VK_KHR_image_format_list is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_image_format_list 1 +#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 +#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" +typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR; + + + +// VK_KHR_sampler_ycbcr_conversion is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_sampler_ycbcr_conversion 1 +typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; + +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14 +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" +typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; + +typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; + +typedef VkChromaLocation VkChromaLocationKHR; + +typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; + +typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; + +typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; + +typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; + +typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; + +typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); +#endif + + +// VK_KHR_bind_memory2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_bind_memory2 1 +#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" +typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; + +typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); +#endif + + +// VK_KHR_maintenance3 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance3 1 +#define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3" +// VK_KHR_MAINTENANCE3_SPEC_VERSION is a deprecated alias +#define VK_KHR_MAINTENANCE3_SPEC_VERSION VK_KHR_MAINTENANCE_3_SPEC_VERSION +// VK_KHR_MAINTENANCE3_EXTENSION_NAME is a deprecated alias +#define VK_KHR_MAINTENANCE3_EXTENSION_NAME VK_KHR_MAINTENANCE_3_EXTENSION_NAME +typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; + +typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + + +// VK_KHR_draw_indirect_count is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_draw_indirect_count 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_KHR_shader_subgroup_extended_types is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_extended_types 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types" +typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR; + + + +// VK_KHR_8bit_storage is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_8bit_storage 1 +#define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage" +typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesKHR; + + + +// VK_KHR_shader_atomic_int64 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_atomic_int64 1 +#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64" +typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicInt64FeaturesKHR; + + + +// VK_KHR_shader_clock is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_clock 1 +#define VK_KHR_SHADER_CLOCK_SPEC_VERSION 1 +#define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock" +typedef struct VkPhysicalDeviceShaderClockFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupClock; + VkBool32 shaderDeviceClock; +} VkPhysicalDeviceShaderClockFeaturesKHR; + + + +// VK_KHR_video_decode_h265 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_h265 1 +#include "vk_video/vulkan_video_codec_h265std_decode.h" +#define VK_KHR_VIDEO_DECODE_H265_SPEC_VERSION 8 +#define VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME "VK_KHR_video_decode_h265" +typedef struct VkVideoDecodeH265ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoDecodeH265ProfileInfoKHR; + +typedef struct VkVideoDecodeH265CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoH265LevelIdc maxLevelIdc; +} VkVideoDecodeH265CapabilitiesKHR; + +typedef struct VkVideoDecodeH265SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdVPSCount; + const StdVideoH265VideoParameterSet* pStdVPSs; + uint32_t stdSPSCount; + const StdVideoH265SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH265PictureParameterSet* pStdPPSs; +} VkVideoDecodeH265SessionParametersAddInfoKHR; + +typedef struct VkVideoDecodeH265SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdVPSCount; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoDecodeH265SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoDecodeH265SessionParametersCreateInfoKHR; + +typedef struct VkVideoDecodeH265PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265PictureInfo* pStdPictureInfo; + uint32_t sliceSegmentCount; + const uint32_t* pSliceSegmentOffsets; +} VkVideoDecodeH265PictureInfoKHR; + +typedef struct VkVideoDecodeH265DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH265DpbSlotInfoKHR; + + + +// VK_KHR_global_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_global_priority 1 +#define VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION 1 +#define VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME "VK_KHR_global_priority" +#define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR VK_MAX_GLOBAL_PRIORITY_SIZE +typedef VkQueueGlobalPriority VkQueueGlobalPriorityKHR; + +typedef VkDeviceQueueGlobalPriorityCreateInfo VkDeviceQueueGlobalPriorityCreateInfoKHR; + +typedef VkPhysicalDeviceGlobalPriorityQueryFeatures VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; + +typedef VkQueueFamilyGlobalPriorityProperties VkQueueFamilyGlobalPriorityPropertiesKHR; + + + +// VK_KHR_driver_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_driver_properties 1 +#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties" +#define VK_MAX_DRIVER_NAME_SIZE_KHR VK_MAX_DRIVER_NAME_SIZE +#define VK_MAX_DRIVER_INFO_SIZE_KHR VK_MAX_DRIVER_INFO_SIZE +typedef VkDriverId VkDriverIdKHR; + +typedef VkConformanceVersion VkConformanceVersionKHR; + +typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR; + + + +// VK_KHR_shader_float_controls is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float_controls 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4 +#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls" +typedef VkShaderFloatControlsIndependence VkShaderFloatControlsIndependenceKHR; + +typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPropertiesKHR; + + + +// VK_KHR_depth_stencil_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_depth_stencil_resolve 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve" +typedef VkResolveModeFlagBits VkResolveModeFlagBitsKHR; + +typedef VkResolveModeFlags VkResolveModeFlagsKHR; + +typedef VkSubpassDescriptionDepthStencilResolve VkSubpassDescriptionDepthStencilResolveKHR; + +typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStencilResolvePropertiesKHR; + + + +// VK_KHR_swapchain_mutable_format is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_swapchain_mutable_format 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format" + + +// VK_KHR_timeline_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_timeline_semaphore 1 +#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2 +#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore" +typedef VkSemaphoreType VkSemaphoreTypeKHR; + +typedef VkSemaphoreWaitFlagBits VkSemaphoreWaitFlagBitsKHR; + +typedef VkSemaphoreWaitFlags VkSemaphoreWaitFlagsKHR; + +typedef VkPhysicalDeviceTimelineSemaphoreFeatures VkPhysicalDeviceTimelineSemaphoreFeaturesKHR; + +typedef VkPhysicalDeviceTimelineSemaphoreProperties VkPhysicalDeviceTimelineSemaphorePropertiesKHR; + +typedef VkSemaphoreTypeCreateInfo VkSemaphoreTypeCreateInfoKHR; + +typedef VkTimelineSemaphoreSubmitInfo VkTimelineSemaphoreSubmitInfoKHR; + +typedef VkSemaphoreWaitInfo VkSemaphoreWaitInfoKHR; + +typedef VkSemaphoreSignalInfo VkSemaphoreSignalInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR( + VkDevice device, + VkSemaphore semaphore, + uint64_t* pValue); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); +#endif + + +// VK_KHR_vulkan_memory_model is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_vulkan_memory_model 1 +#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3 +#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model" +typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryModelFeaturesKHR; + + + +// VK_KHR_shader_terminate_invocation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_terminate_invocation 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation" +typedef VkPhysicalDeviceShaderTerminateInvocationFeatures VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR; + + + +// VK_KHR_fragment_shading_rate is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_fragment_shading_rate 1 +#define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 2 +#define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate" + +typedef enum VkFragmentShadingRateCombinerOpKHR { + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = 0, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = 1, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = 2, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = 3, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = 4, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_ENUM_KHR = 0x7FFFFFFF +} VkFragmentShadingRateCombinerOpKHR; +typedef struct VkFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + const VkAttachmentReference2* pFragmentShadingRateAttachment; + VkExtent2D shadingRateAttachmentTexelSize; +} VkFragmentShadingRateAttachmentInfoKHR; + +typedef struct VkPipelineFragmentShadingRateStateCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkExtent2D fragmentSize; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateStateCreateInfoKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineFragmentShadingRate; + VkBool32 primitiveFragmentShadingRate; + VkBool32 attachmentFragmentShadingRate; +} VkPhysicalDeviceFragmentShadingRateFeaturesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentShadingRateAttachmentTexelSize; + VkExtent2D maxFragmentShadingRateAttachmentTexelSize; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio; + VkBool32 primitiveFragmentShadingRateWithMultipleViewports; + VkBool32 layeredShadingRateAttachments; + VkBool32 fragmentShadingRateNonTrivialCombinerOps; + VkExtent2D maxFragmentSize; + uint32_t maxFragmentSizeAspectRatio; + uint32_t maxFragmentShadingRateCoverageSamples; + VkSampleCountFlagBits maxFragmentShadingRateRasterizationSamples; + VkBool32 fragmentShadingRateWithShaderDepthStencilWrites; + VkBool32 fragmentShadingRateWithSampleMask; + VkBool32 fragmentShadingRateWithShaderSampleMask; + VkBool32 fragmentShadingRateWithConservativeRasterization; + VkBool32 fragmentShadingRateWithFragmentShaderInterlock; + VkBool32 fragmentShadingRateWithCustomSampleLocations; + VkBool32 fragmentShadingRateStrictMultiplyCombiner; +} VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateKHR { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleCounts; + VkExtent2D fragmentSize; +} VkPhysicalDeviceFragmentShadingRateKHR; + +typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkExtent2D shadingRateAttachmentTexelSize; +} VkRenderingFragmentShadingRateAttachmentInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR( + VkCommandBuffer commandBuffer, + const VkExtent2D* pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif + + +// VK_KHR_dynamic_rendering_local_read is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dynamic_rendering_local_read 1 +#define VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_SPEC_VERSION 1 +#define VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME "VK_KHR_dynamic_rendering_local_read" +typedef VkPhysicalDeviceDynamicRenderingLocalReadFeatures VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; + +typedef VkRenderingAttachmentLocationInfo VkRenderingAttachmentLocationInfoKHR; + +typedef VkRenderingInputAttachmentIndexInfo VkRenderingInputAttachmentIndexInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingAttachmentLocationsKHR)(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndicesKHR)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingAttachmentLocationsKHR( + VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfo* pLocationInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingInputAttachmentIndicesKHR( + VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); +#endif + + +// VK_KHR_shader_quad_control is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_quad_control 1 +#define VK_KHR_SHADER_QUAD_CONTROL_SPEC_VERSION 1 +#define VK_KHR_SHADER_QUAD_CONTROL_EXTENSION_NAME "VK_KHR_shader_quad_control" +typedef struct VkPhysicalDeviceShaderQuadControlFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderQuadControl; +} VkPhysicalDeviceShaderQuadControlFeaturesKHR; + + + +// VK_KHR_spirv_1_4 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_spirv_1_4 1 +#define VK_KHR_SPIRV_1_4_SPEC_VERSION 1 +#define VK_KHR_SPIRV_1_4_EXTENSION_NAME "VK_KHR_spirv_1_4" + + +// VK_KHR_surface_protected_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_surface_protected_capabilities 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities" +typedef struct VkSurfaceProtectedCapabilitiesKHR { + VkStructureType sType; + const void* pNext; + VkBool32 supportsProtected; +} VkSurfaceProtectedCapabilitiesKHR; + + + +// VK_KHR_separate_depth_stencil_layouts is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_separate_depth_stencil_layouts 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts" +typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR; + +typedef VkAttachmentReferenceStencilLayout VkAttachmentReferenceStencilLayoutKHR; + +typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayoutKHR; + + + +// VK_KHR_present_wait is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_present_wait 1 +#define VK_KHR_PRESENT_WAIT_SPEC_VERSION 1 +#define VK_KHR_PRESENT_WAIT_EXTENSION_NAME "VK_KHR_present_wait" +typedef struct VkPhysicalDevicePresentWaitFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 presentWait; +} VkPhysicalDevicePresentWaitFeaturesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkWaitForPresentKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForPresentKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout); +#endif + + +// VK_KHR_uniform_buffer_standard_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_uniform_buffer_standard_layout 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout" +typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + + + +// VK_KHR_buffer_device_address is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_buffer_device_address 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address" +typedef VkPhysicalDeviceBufferDeviceAddressFeatures VkPhysicalDeviceBufferDeviceAddressFeaturesKHR; + +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoKHR; + +typedef VkBufferOpaqueCaptureAddressCreateInfo VkBufferOpaqueCaptureAddressCreateInfoKHR; + +typedef VkMemoryOpaqueCaptureAddressAllocateInfo VkMemoryOpaqueCaptureAddressAllocateInfoKHR; + +typedef VkDeviceMemoryOpaqueCaptureAddressInfo VkDeviceMemoryOpaqueCaptureAddressInfoKHR; + +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif + + +// VK_KHR_deferred_host_operations is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_deferred_host_operations 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) +#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4 +#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations" +typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation); +typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); +typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR( + VkDevice device, + const VkAllocationCallbacks* pAllocator, + VkDeferredOperationKHR* pDeferredOperation); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR( + VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( + VkDevice device, + VkDeferredOperationKHR operation); +#endif + + +// VK_KHR_pipeline_executable_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_executable_properties 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties" + +typedef enum VkPipelineExecutableStatisticFormatKHR { + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPipelineExecutableStatisticFormatKHR; +typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineExecutableInfo; +} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +typedef struct VkPipelineInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; +} VkPipelineInfoKHR; + +typedef struct VkPipelineExecutablePropertiesKHR { + VkStructureType sType; + void* pNext; + VkShaderStageFlags stages; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + uint32_t subgroupSize; +} VkPipelineExecutablePropertiesKHR; + +typedef struct VkPipelineExecutableInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; + uint32_t executableIndex; +} VkPipelineExecutableInfoKHR; + +typedef union VkPipelineExecutableStatisticValueKHR { + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +} VkPipelineExecutableStatisticValueKHR; + +typedef struct VkPipelineExecutableStatisticKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkPipelineExecutableStatisticFormatKHR format; + VkPipelineExecutableStatisticValueKHR value; +} VkPipelineExecutableStatisticKHR; + +typedef struct VkPipelineExecutableInternalRepresentationKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkBool32 isText; + size_t dataSize; + void* pData; +} VkPipelineExecutableInternalRepresentationKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR( + VkDevice device, + const VkPipelineInfoKHR* pPipelineInfo, + uint32_t* pExecutableCount, + VkPipelineExecutablePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pStatisticCount, + VkPipelineExecutableStatisticKHR* pStatistics); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); +#endif + + +// VK_KHR_map_memory2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_map_memory2 1 +#define VK_KHR_MAP_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_MAP_MEMORY_2_EXTENSION_NAME "VK_KHR_map_memory2" +typedef VkMemoryUnmapFlagBits VkMemoryUnmapFlagBitsKHR; + +typedef VkMemoryUnmapFlags VkMemoryUnmapFlagsKHR; + +typedef VkMemoryMapInfo VkMemoryMapInfoKHR; + +typedef VkMemoryUnmapInfo VkMemoryUnmapInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory2KHR)(VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData); +typedef VkResult (VKAPI_PTR *PFN_vkUnmapMemory2KHR)(VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory2KHR( + VkDevice device, + const VkMemoryMapInfo* pMemoryMapInfo, + void** ppData); + +VKAPI_ATTR VkResult VKAPI_CALL vkUnmapMemory2KHR( + VkDevice device, + const VkMemoryUnmapInfo* pMemoryUnmapInfo); +#endif + + +// VK_KHR_shader_integer_dot_product is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_integer_dot_product 1 +#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION 1 +#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME "VK_KHR_shader_integer_dot_product" +typedef VkPhysicalDeviceShaderIntegerDotProductFeatures VkPhysicalDeviceShaderIntegerDotProductFeaturesKHR; + +typedef VkPhysicalDeviceShaderIntegerDotProductProperties VkPhysicalDeviceShaderIntegerDotProductPropertiesKHR; + + + +// VK_KHR_pipeline_library is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_library 1 +#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" +typedef struct VkPipelineLibraryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t libraryCount; + const VkPipeline* pLibraries; +} VkPipelineLibraryCreateInfoKHR; + + + +// VK_KHR_shader_non_semantic_info is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_non_semantic_info 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info" + + +// VK_KHR_present_id is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_present_id 1 +#define VK_KHR_PRESENT_ID_SPEC_VERSION 1 +#define VK_KHR_PRESENT_ID_EXTENSION_NAME "VK_KHR_present_id" +typedef struct VkPresentIdKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint64_t* pPresentIds; +} VkPresentIdKHR; + +typedef struct VkPhysicalDevicePresentIdFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 presentId; +} VkPhysicalDevicePresentIdFeaturesKHR; + + + +// VK_KHR_video_encode_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_queue 1 +#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 12 +#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue" + +typedef enum VkVideoEncodeTuningModeKHR { + VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR = 1, + VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR = 2, + VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR = 3, + VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4, + VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeTuningModeKHR; + +typedef enum VkVideoEncodeFlagBitsKHR { + VK_VIDEO_ENCODE_WITH_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_WITH_EMPHASIS_MAP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFlagBitsKHR; +typedef VkFlags VkVideoEncodeFlagsKHR; + +typedef enum VkVideoEncodeCapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_CAPABILITY_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_CAPABILITY_EMPHASIS_MAP_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeCapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeCapabilityFlagsKHR; + +typedef enum VkVideoEncodeRateControlModeFlagBitsKHR { + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeRateControlModeFlagBitsKHR; +typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR; + +typedef enum VkVideoEncodeFeedbackFlagBitsKHR { + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_FEEDBACK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFeedbackFlagBitsKHR; +typedef VkFlags VkVideoEncodeFeedbackFlagsKHR; + +typedef enum VkVideoEncodeUsageFlagBitsKHR { + VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeUsageFlagBitsKHR; +typedef VkFlags VkVideoEncodeUsageFlagsKHR; + +typedef enum VkVideoEncodeContentFlagBitsKHR { + VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_CONTENT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeContentFlagBitsKHR; +typedef VkFlags VkVideoEncodeContentFlagsKHR; +typedef VkFlags VkVideoEncodeRateControlFlagsKHR; +typedef struct VkVideoEncodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFlagsKHR flags; + VkBuffer dstBuffer; + VkDeviceSize dstBufferOffset; + VkDeviceSize dstBufferRange; + VkVideoPictureResourceInfoKHR srcPictureResource; + const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; + uint32_t precedingExternallyEncodedBytes; +} VkVideoEncodeInfoKHR; + +typedef struct VkVideoEncodeCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeCapabilityFlagsKHR flags; + VkVideoEncodeRateControlModeFlagsKHR rateControlModes; + uint32_t maxRateControlLayers; + uint64_t maxBitrate; + uint32_t maxQualityLevels; + VkExtent2D encodeInputPictureGranularity; + VkVideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags; +} VkVideoEncodeCapabilitiesKHR; + +typedef struct VkQueryPoolVideoEncodeFeedbackCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFeedbackFlagsKHR encodeFeedbackFlags; +} VkQueryPoolVideoEncodeFeedbackCreateInfoKHR; + +typedef struct VkVideoEncodeUsageInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeUsageFlagsKHR videoUsageHints; + VkVideoEncodeContentFlagsKHR videoContentHints; + VkVideoEncodeTuningModeKHR tuningMode; +} VkVideoEncodeUsageInfoKHR; + +typedef struct VkVideoEncodeRateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + uint64_t averageBitrate; + uint64_t maxBitrate; + uint32_t frameRateNumerator; + uint32_t frameRateDenominator; +} VkVideoEncodeRateControlLayerInfoKHR; + +typedef struct VkVideoEncodeRateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeRateControlFlagsKHR flags; + VkVideoEncodeRateControlModeFlagBitsKHR rateControlMode; + uint32_t layerCount; + const VkVideoEncodeRateControlLayerInfoKHR* pLayers; + uint32_t virtualBufferSizeInMs; + uint32_t initialVirtualBufferSizeInMs; +} VkVideoEncodeRateControlInfoKHR; + +typedef struct VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR { + VkStructureType sType; + const void* pNext; + const VkVideoProfileInfoKHR* pVideoProfile; + uint32_t qualityLevel; +} VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR; + +typedef struct VkVideoEncodeQualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode; + uint32_t preferredRateControlLayerCount; +} VkVideoEncodeQualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeQualityLevelInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t qualityLevel; +} VkVideoEncodeQualityLevelInfoKHR; + +typedef struct VkVideoEncodeSessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersKHR videoSessionParameters; +} VkVideoEncodeSessionParametersGetInfoKHR; + +typedef struct VkVideoEncodeSessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasOverrides; +} VkVideoEncodeSessionParametersFeedbackInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetEncodedVideoSessionParametersKHR)(VkDevice device, const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, size_t* pDataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdEncodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, + VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEncodedVideoSessionParametersKHR( + VkDevice device, + const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, + VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoEncodeInfoKHR* pEncodeInfo); +#endif + + +// VK_KHR_synchronization2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_synchronization2 1 +#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1 +#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2" +typedef VkPipelineStageFlags2 VkPipelineStageFlags2KHR; + +typedef VkPipelineStageFlagBits2 VkPipelineStageFlagBits2KHR; + +typedef VkAccessFlags2 VkAccessFlags2KHR; + +typedef VkAccessFlagBits2 VkAccessFlagBits2KHR; + +typedef VkSubmitFlagBits VkSubmitFlagBitsKHR; + +typedef VkSubmitFlags VkSubmitFlagsKHR; + +typedef VkMemoryBarrier2 VkMemoryBarrier2KHR; + +typedef VkBufferMemoryBarrier2 VkBufferMemoryBarrier2KHR; + +typedef VkImageMemoryBarrier2 VkImageMemoryBarrier2KHR; + +typedef VkDependencyInfo VkDependencyInfoKHR; + +typedef VkSubmitInfo2 VkSubmitInfo2KHR; + +typedef VkSemaphoreSubmitInfo VkSemaphoreSubmitInfoKHR; + +typedef VkCommandBufferSubmitInfo VkCommandBufferSubmitInfoKHR; + +typedef VkPhysicalDeviceSynchronization2Features VkPhysicalDeviceSynchronization2FeaturesKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfo* pDependencyInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR( + VkCommandBuffer commandBuffer, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2* pSubmits, + VkFence fence); +#endif + + +// VK_KHR_fragment_shader_barycentric is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_fragment_shader_barycentric 1 +#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_KHR_fragment_shader_barycentric" +typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderBarycentric; +} VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + +typedef struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 triStripVertexOrderIndependentOfProvokingVertex; +} VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + + +// VK_KHR_shader_subgroup_uniform_control_flow is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_uniform_control_flow 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow" +typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupUniformControlFlow; +} VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + + +// VK_KHR_zero_initialize_workgroup_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_zero_initialize_workgroup_memory 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory" +typedef VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + + + +// VK_KHR_workgroup_memory_explicit_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_workgroup_memory_explicit_layout 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout" +typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 workgroupMemoryExplicitLayout; + VkBool32 workgroupMemoryExplicitLayoutScalarBlockLayout; + VkBool32 workgroupMemoryExplicitLayout8BitAccess; + VkBool32 workgroupMemoryExplicitLayout16BitAccess; +} VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + + +// VK_KHR_copy_commands2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_copy_commands2 1 +#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1 +#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2" +typedef VkCopyBufferInfo2 VkCopyBufferInfo2KHR; + +typedef VkCopyImageInfo2 VkCopyImageInfo2KHR; + +typedef VkCopyBufferToImageInfo2 VkCopyBufferToImageInfo2KHR; + +typedef VkCopyImageToBufferInfo2 VkCopyImageToBufferInfo2KHR; + +typedef VkBlitImageInfo2 VkBlitImageInfo2KHR; + +typedef VkResolveImageInfo2 VkResolveImageInfo2KHR; + +typedef VkBufferCopy2 VkBufferCopy2KHR; + +typedef VkImageCopy2 VkImageCopy2KHR; + +typedef VkImageBlit2 VkImageBlit2KHR; + +typedef VkBufferImageCopy2 VkBufferImageCopy2KHR; + +typedef VkImageResolve2 VkImageResolve2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2* pCopyBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2* pCopyImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2* pBlitImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2* pResolveImageInfo); +#endif + + +// VK_KHR_format_feature_flags2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_format_feature_flags2 1 +#define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 2 +#define VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME "VK_KHR_format_feature_flags2" +typedef VkFormatFeatureFlags2 VkFormatFeatureFlags2KHR; + +typedef VkFormatFeatureFlagBits2 VkFormatFeatureFlagBits2KHR; + +typedef VkFormatProperties3 VkFormatProperties3KHR; + + + +// VK_KHR_ray_tracing_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_maintenance1 1 +#define VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_ray_tracing_maintenance1" +typedef struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingMaintenance1; + VkBool32 rayTracingPipelineTraceRaysIndirect2; +} VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; + +typedef struct VkTraceRaysIndirectCommand2KHR { + VkDeviceAddress raygenShaderRecordAddress; + VkDeviceSize raygenShaderRecordSize; + VkDeviceAddress missShaderBindingTableAddress; + VkDeviceSize missShaderBindingTableSize; + VkDeviceSize missShaderBindingTableStride; + VkDeviceAddress hitShaderBindingTableAddress; + VkDeviceSize hitShaderBindingTableSize; + VkDeviceSize hitShaderBindingTableStride; + VkDeviceAddress callableShaderBindingTableAddress; + VkDeviceSize callableShaderBindingTableSize; + VkDeviceSize callableShaderBindingTableStride; + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommand2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirect2KHR)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirect2KHR( + VkCommandBuffer commandBuffer, + VkDeviceAddress indirectDeviceAddress); +#endif + + +// VK_KHR_portability_enumeration is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_portability_enumeration 1 +#define VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME "VK_KHR_portability_enumeration" + + +// VK_KHR_maintenance4 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance4 1 +#define VK_KHR_MAINTENANCE_4_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE_4_EXTENSION_NAME "VK_KHR_maintenance4" +typedef VkPhysicalDeviceMaintenance4Features VkPhysicalDeviceMaintenance4FeaturesKHR; + +typedef VkPhysicalDeviceMaintenance4Properties VkPhysicalDeviceMaintenance4PropertiesKHR; + +typedef VkDeviceBufferMemoryRequirements VkDeviceBufferMemoryRequirementsKHR; + +typedef VkDeviceImageMemoryRequirements VkDeviceImageMemoryRequirementsKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirementsKHR)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirementsKHR( + VkDevice device, + const VkDeviceBufferMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +// VK_KHR_shader_subgroup_rotate is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_rotate 1 +#define VK_KHR_SHADER_SUBGROUP_ROTATE_SPEC_VERSION 2 +#define VK_KHR_SHADER_SUBGROUP_ROTATE_EXTENSION_NAME "VK_KHR_shader_subgroup_rotate" +typedef VkPhysicalDeviceShaderSubgroupRotateFeatures VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR; + + + +// VK_KHR_shader_maximal_reconvergence is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_maximal_reconvergence 1 +#define VK_KHR_SHADER_MAXIMAL_RECONVERGENCE_SPEC_VERSION 1 +#define VK_KHR_SHADER_MAXIMAL_RECONVERGENCE_EXTENSION_NAME "VK_KHR_shader_maximal_reconvergence" +typedef struct VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderMaximalReconvergence; +} VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + + + +// VK_KHR_maintenance5 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance5 1 +#define VK_KHR_MAINTENANCE_5_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_5_EXTENSION_NAME "VK_KHR_maintenance5" +typedef VkPipelineCreateFlags2 VkPipelineCreateFlags2KHR; + +typedef VkPipelineCreateFlagBits2 VkPipelineCreateFlagBits2KHR; + +typedef VkBufferUsageFlags2 VkBufferUsageFlags2KHR; + +typedef VkBufferUsageFlagBits2 VkBufferUsageFlagBits2KHR; + +typedef VkPhysicalDeviceMaintenance5Features VkPhysicalDeviceMaintenance5FeaturesKHR; + +typedef VkPhysicalDeviceMaintenance5Properties VkPhysicalDeviceMaintenance5PropertiesKHR; + +typedef VkRenderingAreaInfo VkRenderingAreaInfoKHR; + +typedef VkDeviceImageSubresourceInfo VkDeviceImageSubresourceInfoKHR; + +typedef VkImageSubresource2 VkImageSubresource2KHR; + +typedef VkSubresourceLayout2 VkSubresourceLayout2KHR; + +typedef VkPipelineCreateFlags2CreateInfo VkPipelineCreateFlags2CreateInfoKHR; + +typedef VkBufferUsageFlags2CreateInfo VkBufferUsageFlags2CreateInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer2KHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularityKHR)(VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayoutKHR)(VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2KHR)(VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2KHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderingAreaGranularityKHR( + VkDevice device, + const VkRenderingAreaInfo* pRenderingAreaInfo, + VkExtent2D* pGranularity); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSubresourceLayoutKHR( + VkDevice device, + const VkDeviceImageSubresourceInfo* pInfo, + VkSubresourceLayout2* pLayout); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2KHR( + VkDevice device, + VkImage image, + const VkImageSubresource2* pSubresource, + VkSubresourceLayout2* pLayout); +#endif + + +// VK_KHR_ray_tracing_position_fetch is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_position_fetch 1 +#define VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME "VK_KHR_ray_tracing_position_fetch" +typedef struct VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingPositionFetch; +} VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + + +// VK_KHR_pipeline_binary is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_binary 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineBinaryKHR) +#define VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR 32U +#define VK_KHR_PIPELINE_BINARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_BINARY_EXTENSION_NAME "VK_KHR_pipeline_binary" +typedef struct VkPhysicalDevicePipelineBinaryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineBinaries; +} VkPhysicalDevicePipelineBinaryFeaturesKHR; + +typedef struct VkPhysicalDevicePipelineBinaryPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineBinaryInternalCache; + VkBool32 pipelineBinaryInternalCacheControl; + VkBool32 pipelineBinaryPrefersInternalCache; + VkBool32 pipelineBinaryPrecompiledInternalCache; + VkBool32 pipelineBinaryCompressedData; +} VkPhysicalDevicePipelineBinaryPropertiesKHR; + +typedef struct VkDevicePipelineBinaryInternalCacheControlKHR { + VkStructureType sType; + const void* pNext; + VkBool32 disableInternalCache; +} VkDevicePipelineBinaryInternalCacheControlKHR; + +typedef struct VkPipelineBinaryKeyKHR { + VkStructureType sType; + void* pNext; + uint32_t keySize; + uint8_t key[VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR]; +} VkPipelineBinaryKeyKHR; + +typedef struct VkPipelineBinaryDataKHR { + size_t dataSize; + void* pData; +} VkPipelineBinaryDataKHR; + +typedef struct VkPipelineBinaryKeysAndDataKHR { + uint32_t binaryCount; + const VkPipelineBinaryKeyKHR* pPipelineBinaryKeys; + const VkPipelineBinaryDataKHR* pPipelineBinaryData; +} VkPipelineBinaryKeysAndDataKHR; + +typedef struct VkPipelineCreateInfoKHR { + VkStructureType sType; + void* pNext; +} VkPipelineCreateInfoKHR; + +typedef struct VkPipelineBinaryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + const VkPipelineBinaryKeysAndDataKHR* pKeysAndDataInfo; + VkPipeline pipeline; + const VkPipelineCreateInfoKHR* pPipelineCreateInfo; +} VkPipelineBinaryCreateInfoKHR; + +typedef struct VkPipelineBinaryInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t binaryCount; + const VkPipelineBinaryKHR* pPipelineBinaries; +} VkPipelineBinaryInfoKHR; + +typedef struct VkReleaseCapturedPipelineDataInfoKHR { + VkStructureType sType; + void* pNext; + VkPipeline pipeline; +} VkReleaseCapturedPipelineDataInfoKHR; + +typedef struct VkPipelineBinaryDataInfoKHR { + VkStructureType sType; + void* pNext; + VkPipelineBinaryKHR pipelineBinary; +} VkPipelineBinaryDataInfoKHR; + +typedef struct VkPipelineBinaryHandlesInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t pipelineBinaryCount; + VkPipelineBinaryKHR* pPipelineBinaries; +} VkPipelineBinaryHandlesInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineBinariesKHR)(VkDevice device, const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineBinaryHandlesInfoKHR* pBinaries); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineBinaryKHR)(VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineKeyKHR)(VkDevice device, const VkPipelineCreateInfoKHR* pPipelineCreateInfo, VkPipelineBinaryKeyKHR* pPipelineKey); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineBinaryDataKHR)(VkDevice device, const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, size_t* pPipelineBinaryDataSize, void* pPipelineBinaryData); +typedef VkResult (VKAPI_PTR *PFN_vkReleaseCapturedPipelineDataKHR)(VkDevice device, const VkReleaseCapturedPipelineDataInfoKHR* pInfo, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineBinariesKHR( + VkDevice device, + const VkPipelineBinaryCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineBinaryHandlesInfoKHR* pBinaries); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineBinaryKHR( + VkDevice device, + VkPipelineBinaryKHR pipelineBinary, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineKeyKHR( + VkDevice device, + const VkPipelineCreateInfoKHR* pPipelineCreateInfo, + VkPipelineBinaryKeyKHR* pPipelineKey); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineBinaryDataKHR( + VkDevice device, + const VkPipelineBinaryDataInfoKHR* pInfo, + VkPipelineBinaryKeyKHR* pPipelineBinaryKey, + size_t* pPipelineBinaryDataSize, + void* pPipelineBinaryData); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseCapturedPipelineDataKHR( + VkDevice device, + const VkReleaseCapturedPipelineDataInfoKHR* pInfo, + const VkAllocationCallbacks* pAllocator); +#endif + + +// VK_KHR_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_cooperative_matrix 1 +#define VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION 2 +#define VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_KHR_cooperative_matrix" + +typedef enum VkComponentTypeKHR { + VK_COMPONENT_TYPE_FLOAT16_KHR = 0, + VK_COMPONENT_TYPE_FLOAT32_KHR = 1, + VK_COMPONENT_TYPE_FLOAT64_KHR = 2, + VK_COMPONENT_TYPE_SINT8_KHR = 3, + VK_COMPONENT_TYPE_SINT16_KHR = 4, + VK_COMPONENT_TYPE_SINT32_KHR = 5, + VK_COMPONENT_TYPE_SINT64_KHR = 6, + VK_COMPONENT_TYPE_UINT8_KHR = 7, + VK_COMPONENT_TYPE_UINT16_KHR = 8, + VK_COMPONENT_TYPE_UINT32_KHR = 9, + VK_COMPONENT_TYPE_UINT64_KHR = 10, + VK_COMPONENT_TYPE_BFLOAT16_KHR = 1000141000, + VK_COMPONENT_TYPE_SINT8_PACKED_NV = 1000491000, + VK_COMPONENT_TYPE_UINT8_PACKED_NV = 1000491001, + VK_COMPONENT_TYPE_FLOAT_E4M3_NV = 1000491002, + VK_COMPONENT_TYPE_FLOAT_E5M2_NV = 1000491003, + VK_COMPONENT_TYPE_FLOAT16_NV = VK_COMPONENT_TYPE_FLOAT16_KHR, + VK_COMPONENT_TYPE_FLOAT32_NV = VK_COMPONENT_TYPE_FLOAT32_KHR, + VK_COMPONENT_TYPE_FLOAT64_NV = VK_COMPONENT_TYPE_FLOAT64_KHR, + VK_COMPONENT_TYPE_SINT8_NV = VK_COMPONENT_TYPE_SINT8_KHR, + VK_COMPONENT_TYPE_SINT16_NV = VK_COMPONENT_TYPE_SINT16_KHR, + VK_COMPONENT_TYPE_SINT32_NV = VK_COMPONENT_TYPE_SINT32_KHR, + VK_COMPONENT_TYPE_SINT64_NV = VK_COMPONENT_TYPE_SINT64_KHR, + VK_COMPONENT_TYPE_UINT8_NV = VK_COMPONENT_TYPE_UINT8_KHR, + VK_COMPONENT_TYPE_UINT16_NV = VK_COMPONENT_TYPE_UINT16_KHR, + VK_COMPONENT_TYPE_UINT32_NV = VK_COMPONENT_TYPE_UINT32_KHR, + VK_COMPONENT_TYPE_UINT64_NV = VK_COMPONENT_TYPE_UINT64_KHR, + VK_COMPONENT_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkComponentTypeKHR; + +typedef enum VkScopeKHR { + VK_SCOPE_DEVICE_KHR = 1, + VK_SCOPE_WORKGROUP_KHR = 2, + VK_SCOPE_SUBGROUP_KHR = 3, + VK_SCOPE_QUEUE_FAMILY_KHR = 5, + VK_SCOPE_DEVICE_NV = VK_SCOPE_DEVICE_KHR, + VK_SCOPE_WORKGROUP_NV = VK_SCOPE_WORKGROUP_KHR, + VK_SCOPE_SUBGROUP_NV = VK_SCOPE_SUBGROUP_KHR, + VK_SCOPE_QUEUE_FAMILY_NV = VK_SCOPE_QUEUE_FAMILY_KHR, + VK_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkScopeKHR; +typedef struct VkCooperativeMatrixPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t MSize; + uint32_t NSize; + uint32_t KSize; + VkComponentTypeKHR AType; + VkComponentTypeKHR BType; + VkComponentTypeKHR CType; + VkComponentTypeKHR ResultType; + VkBool32 saturatingAccumulation; + VkScopeKHR scope; +} VkCooperativeMatrixPropertiesKHR; + +typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrix; + VkBool32 cooperativeMatrixRobustBufferAccess; +} VkPhysicalDeviceCooperativeMatrixFeaturesKHR; + +typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesKHR { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeMatrixSupportedStages; +} VkPhysicalDeviceCooperativeMatrixPropertiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesKHR* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesKHR* pProperties); +#endif + + +// VK_KHR_compute_shader_derivatives is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_compute_shader_derivatives 1 +#define VK_KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_KHR_compute_shader_derivatives" +typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 computeDerivativeGroupQuads; + VkBool32 computeDerivativeGroupLinear; +} VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR; + +typedef struct VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 meshAndTaskShaderDerivatives; +} VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR; + + + +// VK_KHR_video_decode_av1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_av1 1 +#include "vk_video/vulkan_video_codec_av1std.h" +#include "vk_video/vulkan_video_codec_av1std_decode.h" +#define VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR 7U +#define VK_KHR_VIDEO_DECODE_AV1_SPEC_VERSION 1 +#define VK_KHR_VIDEO_DECODE_AV1_EXTENSION_NAME "VK_KHR_video_decode_av1" +typedef struct VkVideoDecodeAV1ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoAV1Profile stdProfile; + VkBool32 filmGrainSupport; +} VkVideoDecodeAV1ProfileInfoKHR; + +typedef struct VkVideoDecodeAV1CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoAV1Level maxLevel; +} VkVideoDecodeAV1CapabilitiesKHR; + +typedef struct VkVideoDecodeAV1SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoAV1SequenceHeader* pStdSequenceHeader; +} VkVideoDecodeAV1SessionParametersCreateInfoKHR; + +typedef struct VkVideoDecodeAV1PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeAV1PictureInfo* pStdPictureInfo; + int32_t referenceNameSlotIndices[VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR]; + uint32_t frameHeaderOffset; + uint32_t tileCount; + const uint32_t* pTileOffsets; + const uint32_t* pTileSizes; +} VkVideoDecodeAV1PictureInfoKHR; + +typedef struct VkVideoDecodeAV1DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeAV1ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeAV1DpbSlotInfoKHR; + + + +// VK_KHR_video_encode_av1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_av1 1 +#include "vk_video/vulkan_video_codec_av1std_encode.h" +#define VK_KHR_VIDEO_ENCODE_AV1_SPEC_VERSION 1 +#define VK_KHR_VIDEO_ENCODE_AV1_EXTENSION_NAME "VK_KHR_video_encode_av1" + +typedef enum VkVideoEncodeAV1PredictionModeKHR { + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_INTRA_ONLY_KHR = 0, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_SINGLE_REFERENCE_KHR = 1, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_UNIDIRECTIONAL_COMPOUND_KHR = 2, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_BIDIRECTIONAL_COMPOUND_KHR = 3, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1PredictionModeKHR; + +typedef enum VkVideoEncodeAV1RateControlGroupKHR { + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_INTRA_KHR = 0, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_PREDICTIVE_KHR = 1, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_BIPREDICTIVE_KHR = 2, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1RateControlGroupKHR; + +typedef enum VkVideoEncodeAV1CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_CAPABILITY_PER_RATE_CONTROL_GROUP_MIN_MAX_Q_INDEX_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_CAPABILITY_GENERATE_OBU_EXTENSION_HEADER_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_CAPABILITY_PRIMARY_REFERENCE_CDF_ONLY_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_AV1_CAPABILITY_FRAME_SIZE_OVERRIDE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_AV1_CAPABILITY_MOTION_VECTOR_SCALING_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_AV1_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1CapabilityFlagsKHR; + +typedef enum VkVideoEncodeAV1StdFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_STD_UNIFORM_TILE_SPACING_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_STD_SKIP_MODE_PRESENT_UNSET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_STD_PRIMARY_REF_FRAME_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_AV1_STD_DELTA_Q_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_AV1_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1StdFlagsKHR; + +typedef enum VkVideoEncodeAV1SuperblockSizeFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_64_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_128_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1SuperblockSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1SuperblockSizeFlagsKHR; + +typedef enum VkVideoEncodeAV1RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1RateControlFlagsKHR; +typedef struct VkPhysicalDeviceVideoEncodeAV1FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoEncodeAV1; +} VkPhysicalDeviceVideoEncodeAV1FeaturesKHR; + +typedef struct VkVideoEncodeAV1CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeAV1CapabilityFlagsKHR flags; + StdVideoAV1Level maxLevel; + VkExtent2D codedPictureAlignment; + VkExtent2D maxTiles; + VkExtent2D minTileSize; + VkExtent2D maxTileSize; + VkVideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes; + uint32_t maxSingleReferenceCount; + uint32_t singleReferenceNameMask; + uint32_t maxUnidirectionalCompoundReferenceCount; + uint32_t maxUnidirectionalCompoundGroup1ReferenceCount; + uint32_t unidirectionalCompoundReferenceNameMask; + uint32_t maxBidirectionalCompoundReferenceCount; + uint32_t maxBidirectionalCompoundGroup1ReferenceCount; + uint32_t maxBidirectionalCompoundGroup2ReferenceCount; + uint32_t bidirectionalCompoundReferenceNameMask; + uint32_t maxTemporalLayerCount; + uint32_t maxSpatialLayerCount; + uint32_t maxOperatingPoints; + uint32_t minQIndex; + uint32_t maxQIndex; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeAV1StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeAV1CapabilitiesKHR; + +typedef struct VkVideoEncodeAV1QIndexKHR { + uint32_t intraQIndex; + uint32_t predictiveQIndex; + uint32_t bipredictiveQIndex; +} VkVideoEncodeAV1QIndexKHR; + +typedef struct VkVideoEncodeAV1QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredKeyFramePeriod; + uint32_t preferredConsecutiveBipredictiveFrameCount; + uint32_t preferredTemporalLayerCount; + VkVideoEncodeAV1QIndexKHR preferredConstantQIndex; + uint32_t preferredMaxSingleReferenceCount; + uint32_t preferredSingleReferenceNameMask; + uint32_t preferredMaxUnidirectionalCompoundReferenceCount; + uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount; + uint32_t preferredUnidirectionalCompoundReferenceNameMask; + uint32_t preferredMaxBidirectionalCompoundReferenceCount; + uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount; + uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount; + uint32_t preferredBidirectionalCompoundReferenceNameMask; +} VkVideoEncodeAV1QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeAV1SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevel; + StdVideoAV1Level maxLevel; +} VkVideoEncodeAV1SessionCreateInfoKHR; + +typedef struct VkVideoEncodeAV1SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoAV1SequenceHeader* pStdSequenceHeader; + const StdVideoEncodeAV1DecoderModelInfo* pStdDecoderModelInfo; + uint32_t stdOperatingPointCount; + const StdVideoEncodeAV1OperatingPointInfo* pStdOperatingPoints; +} VkVideoEncodeAV1SessionParametersCreateInfoKHR; + +typedef struct VkVideoEncodeAV1PictureInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeAV1PredictionModeKHR predictionMode; + VkVideoEncodeAV1RateControlGroupKHR rateControlGroup; + uint32_t constantQIndex; + const StdVideoEncodeAV1PictureInfo* pStdPictureInfo; + int32_t referenceNameSlotIndices[VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR]; + VkBool32 primaryReferenceCdfOnly; + VkBool32 generateObuExtensionHeader; +} VkVideoEncodeAV1PictureInfoKHR; + +typedef struct VkVideoEncodeAV1DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeAV1ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeAV1DpbSlotInfoKHR; + +typedef struct VkVideoEncodeAV1ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoAV1Profile stdProfile; +} VkVideoEncodeAV1ProfileInfoKHR; + +typedef struct VkVideoEncodeAV1FrameSizeKHR { + uint32_t intraFrameSize; + uint32_t predictiveFrameSize; + uint32_t bipredictiveFrameSize; +} VkVideoEncodeAV1FrameSizeKHR; + +typedef struct VkVideoEncodeAV1GopRemainingFrameInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingIntra; + uint32_t gopRemainingPredictive; + uint32_t gopRemainingBipredictive; +} VkVideoEncodeAV1GopRemainingFrameInfoKHR; + +typedef struct VkVideoEncodeAV1RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeAV1RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t keyFramePeriod; + uint32_t consecutiveBipredictiveFrameCount; + uint32_t temporalLayerCount; +} VkVideoEncodeAV1RateControlInfoKHR; + +typedef struct VkVideoEncodeAV1RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQIndex; + VkVideoEncodeAV1QIndexKHR minQIndex; + VkBool32 useMaxQIndex; + VkVideoEncodeAV1QIndexKHR maxQIndex; + VkBool32 useMaxFrameSize; + VkVideoEncodeAV1FrameSizeKHR maxFrameSize; +} VkVideoEncodeAV1RateControlLayerInfoKHR; + + + +// VK_KHR_video_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_maintenance1 1 +#define VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_KHR_VIDEO_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_video_maintenance1" +typedef struct VkPhysicalDeviceVideoMaintenance1FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoMaintenance1; +} VkPhysicalDeviceVideoMaintenance1FeaturesKHR; + +typedef struct VkVideoInlineQueryInfoKHR { + VkStructureType sType; + const void* pNext; + VkQueryPool queryPool; + uint32_t firstQuery; + uint32_t queryCount; +} VkVideoInlineQueryInfoKHR; + + + +// VK_KHR_vertex_attribute_divisor is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_vertex_attribute_divisor 1 +#define VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 1 +#define VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_KHR_vertex_attribute_divisor" +typedef VkPhysicalDeviceVertexAttributeDivisorProperties VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR; + +typedef VkVertexInputBindingDivisorDescription VkVertexInputBindingDivisorDescriptionKHR; + +typedef VkPipelineVertexInputDivisorStateCreateInfo VkPipelineVertexInputDivisorStateCreateInfoKHR; + +typedef VkPhysicalDeviceVertexAttributeDivisorFeatures VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR; + + + +// VK_KHR_load_store_op_none is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_load_store_op_none 1 +#define VK_KHR_LOAD_STORE_OP_NONE_SPEC_VERSION 1 +#define VK_KHR_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_KHR_load_store_op_none" + + +// VK_KHR_shader_float_controls2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float_controls2 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_2_SPEC_VERSION 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_2_EXTENSION_NAME "VK_KHR_shader_float_controls2" +typedef VkPhysicalDeviceShaderFloatControls2Features VkPhysicalDeviceShaderFloatControls2FeaturesKHR; + + + +// VK_KHR_index_type_uint8 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_index_type_uint8 1 +#define VK_KHR_INDEX_TYPE_UINT8_SPEC_VERSION 1 +#define VK_KHR_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_KHR_index_type_uint8" +typedef VkPhysicalDeviceIndexTypeUint8Features VkPhysicalDeviceIndexTypeUint8FeaturesKHR; + + + +// VK_KHR_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_line_rasterization 1 +#define VK_KHR_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_KHR_LINE_RASTERIZATION_EXTENSION_NAME "VK_KHR_line_rasterization" +typedef VkLineRasterizationMode VkLineRasterizationModeKHR; + +typedef VkPhysicalDeviceLineRasterizationFeatures VkPhysicalDeviceLineRasterizationFeaturesKHR; + +typedef VkPhysicalDeviceLineRasterizationProperties VkPhysicalDeviceLineRasterizationPropertiesKHR; + +typedef VkPipelineRasterizationLineStateCreateInfo VkPipelineRasterizationLineStateCreateInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleKHR)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleKHR( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); +#endif + + +// VK_KHR_calibrated_timestamps is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_calibrated_timestamps 1 +#define VK_KHR_CALIBRATED_TIMESTAMPS_SPEC_VERSION 1 +#define VK_KHR_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_KHR_calibrated_timestamps" + +typedef enum VkTimeDomainKHR { + VK_TIME_DOMAIN_DEVICE_KHR = 0, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR = 1, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR = 2, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR = 3, + VK_TIME_DOMAIN_DEVICE_EXT = VK_TIME_DOMAIN_DEVICE_KHR, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR, + VK_TIME_DOMAIN_MAX_ENUM_KHR = 0x7FFFFFFF +} VkTimeDomainKHR; +typedef struct VkCalibratedTimestampInfoKHR { + VkStructureType sType; + const void* pNext; + VkTimeDomainKHR timeDomain; +} VkCalibratedTimestampInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsKHR)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainKHR* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsKHR( + VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); +#endif + + +// VK_KHR_shader_expect_assume is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_expect_assume 1 +#define VK_KHR_SHADER_EXPECT_ASSUME_SPEC_VERSION 1 +#define VK_KHR_SHADER_EXPECT_ASSUME_EXTENSION_NAME "VK_KHR_shader_expect_assume" +typedef VkPhysicalDeviceShaderExpectAssumeFeatures VkPhysicalDeviceShaderExpectAssumeFeaturesKHR; + + + +// VK_KHR_maintenance6 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance6 1 +#define VK_KHR_MAINTENANCE_6_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_6_EXTENSION_NAME "VK_KHR_maintenance6" +typedef VkPhysicalDeviceMaintenance6Features VkPhysicalDeviceMaintenance6FeaturesKHR; + +typedef VkPhysicalDeviceMaintenance6Properties VkPhysicalDeviceMaintenance6PropertiesKHR; + +typedef VkBindMemoryStatus VkBindMemoryStatusKHR; + +typedef VkBindDescriptorSetsInfo VkBindDescriptorSetsInfoKHR; + +typedef VkPushConstantsInfo VkPushConstantsInfoKHR; + +typedef VkPushDescriptorSetInfo VkPushDescriptorSetInfoKHR; + +typedef VkPushDescriptorSetWithTemplateInfo VkPushDescriptorSetWithTemplateInfoKHR; + +typedef struct VkSetDescriptorBufferOffsetsInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t firstSet; + uint32_t setCount; + const uint32_t* pBufferIndices; + const VkDeviceSize* pOffsets; +} VkSetDescriptorBufferOffsetsInfoEXT; + +typedef struct VkBindDescriptorBufferEmbeddedSamplersInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t set; +} VkBindDescriptorBufferEmbeddedSamplersInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets2KHR)(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants2KHR)(VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsets2EXT)(VkCommandBuffer commandBuffer, const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT)(VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets2KHR( + VkCommandBuffer commandBuffer, + const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants2KHR( + VkCommandBuffer commandBuffer, + const VkPushConstantsInfo* pPushConstantsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsets2EXT( + VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); +#endif + + +// VK_KHR_video_encode_quantization_map is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_quantization_map 1 +#define VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_SPEC_VERSION 2 +#define VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_EXTENSION_NAME "VK_KHR_video_encode_quantization_map" +typedef struct VkVideoEncodeQuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D maxQuantizationMapExtent; +} VkVideoEncodeQuantizationMapCapabilitiesKHR; + +typedef struct VkVideoFormatQuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D quantizationMapTexelSize; +} VkVideoFormatQuantizationMapPropertiesKHR; + +typedef struct VkVideoEncodeQuantizationMapInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView quantizationMap; + VkExtent2D quantizationMapExtent; +} VkVideoEncodeQuantizationMapInfoKHR; + +typedef struct VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkExtent2D quantizationMapTexelSize; +} VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + +typedef struct VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoEncodeQuantizationMap; +} VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + +typedef struct VkVideoEncodeH264QuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + int32_t minQpDelta; + int32_t maxQpDelta; +} VkVideoEncodeH264QuantizationMapCapabilitiesKHR; + +typedef struct VkVideoEncodeH265QuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + int32_t minQpDelta; + int32_t maxQpDelta; +} VkVideoEncodeH265QuantizationMapCapabilitiesKHR; + +typedef struct VkVideoFormatH265QuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes; +} VkVideoFormatH265QuantizationMapPropertiesKHR; + +typedef struct VkVideoEncodeAV1QuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + int32_t minQIndexDelta; + int32_t maxQIndexDelta; +} VkVideoEncodeAV1QuantizationMapCapabilitiesKHR; + +typedef struct VkVideoFormatAV1QuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes; +} VkVideoFormatAV1QuantizationMapPropertiesKHR; + + + +// VK_KHR_shader_relaxed_extended_instruction is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_relaxed_extended_instruction 1 +#define VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION 1 +#define VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_EXTENSION_NAME "VK_KHR_shader_relaxed_extended_instruction" +typedef struct VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderRelaxedExtendedInstruction; +} VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + + +// VK_KHR_maintenance7 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance7 1 +#define VK_KHR_MAINTENANCE_7_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_7_EXTENSION_NAME "VK_KHR_maintenance7" + +typedef enum VkPhysicalDeviceLayeredApiKHR { + VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR = 0, + VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR = 1, + VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR = 2, + VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR = 3, + VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR = 4, + VK_PHYSICAL_DEVICE_LAYERED_API_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPhysicalDeviceLayeredApiKHR; +typedef struct VkPhysicalDeviceMaintenance7FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance7; +} VkPhysicalDeviceMaintenance7FeaturesKHR; + +typedef struct VkPhysicalDeviceMaintenance7PropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 robustFragmentShadingRateAttachmentAccess; + VkBool32 separateDepthStencilAttachmentAccess; + uint32_t maxDescriptorSetTotalUniformBuffersDynamic; + uint32_t maxDescriptorSetTotalStorageBuffersDynamic; + uint32_t maxDescriptorSetTotalBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic; +} VkPhysicalDeviceMaintenance7PropertiesKHR; + +typedef struct VkPhysicalDeviceLayeredApiPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceLayeredApiKHR layeredAPI; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; +} VkPhysicalDeviceLayeredApiPropertiesKHR; + +typedef struct VkPhysicalDeviceLayeredApiPropertiesListKHR { + VkStructureType sType; + void* pNext; + uint32_t layeredApiCount; + VkPhysicalDeviceLayeredApiPropertiesKHR* pLayeredApis; +} VkPhysicalDeviceLayeredApiPropertiesListKHR; + +typedef struct VkPhysicalDeviceLayeredApiVulkanPropertiesKHR { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties2 properties; +} VkPhysicalDeviceLayeredApiVulkanPropertiesKHR; + + + +// VK_KHR_maintenance8 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance8 1 +#define VK_KHR_MAINTENANCE_8_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_8_EXTENSION_NAME "VK_KHR_maintenance8" +typedef VkFlags64 VkAccessFlags3KHR; + +// Flag bits for VkAccessFlagBits3KHR +typedef VkFlags64 VkAccessFlagBits3KHR; +static const VkAccessFlagBits3KHR VK_ACCESS_3_NONE_KHR = 0ULL; + +typedef struct VkPhysicalDeviceMaintenance8FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance8; +} VkPhysicalDeviceMaintenance8FeaturesKHR; + +typedef struct VkMemoryBarrierAccessFlags3KHR { + VkStructureType sType; + const void* pNext; + VkAccessFlags3KHR srcAccessMask3; + VkAccessFlags3KHR dstAccessMask3; +} VkMemoryBarrierAccessFlags3KHR; + + + +// VK_KHR_video_maintenance2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_maintenance2 1 +#define VK_KHR_VIDEO_MAINTENANCE_2_SPEC_VERSION 1 +#define VK_KHR_VIDEO_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_video_maintenance2" +typedef struct VkPhysicalDeviceVideoMaintenance2FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoMaintenance2; +} VkPhysicalDeviceVideoMaintenance2FeaturesKHR; + +typedef struct VkVideoDecodeH264InlineSessionParametersInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoH264SequenceParameterSet* pStdSPS; + const StdVideoH264PictureParameterSet* pStdPPS; +} VkVideoDecodeH264InlineSessionParametersInfoKHR; + +typedef struct VkVideoDecodeH265InlineSessionParametersInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoH265VideoParameterSet* pStdVPS; + const StdVideoH265SequenceParameterSet* pStdSPS; + const StdVideoH265PictureParameterSet* pStdPPS; +} VkVideoDecodeH265InlineSessionParametersInfoKHR; + +typedef struct VkVideoDecodeAV1InlineSessionParametersInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoAV1SequenceHeader* pStdSequenceHeader; +} VkVideoDecodeAV1InlineSessionParametersInfoKHR; + + + +// VK_KHR_depth_clamp_zero_one is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_depth_clamp_zero_one 1 +#define VK_KHR_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1 +#define VK_KHR_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_KHR_depth_clamp_zero_one" +typedef struct VkPhysicalDeviceDepthClampZeroOneFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 depthClampZeroOne; +} VkPhysicalDeviceDepthClampZeroOneFeaturesKHR; + + + +// VK_EXT_debug_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_report 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 10 +#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" + +typedef enum VkDebugReportObjectTypeEXT { + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, + VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, + VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, + VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, + VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, + VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, + VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, + VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, + VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, + VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, + VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT = 1000029000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, + VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT = 1000307000, + VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT = 1000307001, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000, + // VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT is a deprecated alias + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + // VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT is a deprecated alias + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportObjectTypeEXT; + +typedef enum VkDebugReportFlagBitsEXT { + VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, + VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, + VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, + VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, + VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportFlagBitsEXT; +typedef VkFlags VkDebugReportFlagsEXT; +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); + +typedef struct VkDebugReportCallbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; +} VkDebugReportCallbackCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( + VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( + VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( + VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage); +#endif + + +// VK_NV_glsl_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_glsl_shader 1 +#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 +#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" + + +// VK_EXT_depth_range_unrestricted is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_range_unrestricted 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" + + +// VK_IMG_filter_cubic is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_filter_cubic 1 +#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 +#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" + + +// VK_AMD_rasterization_order is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_rasterization_order 1 +#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 +#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" + +typedef enum VkRasterizationOrderAMD { + VK_RASTERIZATION_ORDER_STRICT_AMD = 0, + VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, + VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF +} VkRasterizationOrderAMD; +typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { + VkStructureType sType; + const void* pNext; + VkRasterizationOrderAMD rasterizationOrder; +} VkPipelineRasterizationStateRasterizationOrderAMD; + + + +// VK_AMD_shader_trinary_minmax is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_trinary_minmax 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" + + +// VK_AMD_shader_explicit_vertex_parameter is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_explicit_vertex_parameter 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" + + +// VK_EXT_debug_marker is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_marker 1 +#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 +#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" +typedef struct VkDebugMarkerObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; +} VkDebugMarkerObjectNameInfoEXT; + +typedef struct VkDebugMarkerObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugMarkerObjectTagInfoEXT; + +typedef struct VkDebugMarkerMarkerInfoEXT { + VkStructureType sType; + const void* pNext; + const char* pMarkerName; + float color[4]; +} VkDebugMarkerMarkerInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( + VkDevice device, + const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( + VkDevice device, + const VkDebugMarkerObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +#endif + + +// VK_AMD_gcn_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gcn_shader 1 +#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 +#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" + + +// VK_NV_dedicated_allocation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_dedicated_allocation 1 +#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" +typedef struct VkDedicatedAllocationImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationImageCreateInfoNV; + +typedef struct VkDedicatedAllocationBufferCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationBufferCreateInfoNV; + +typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkDedicatedAllocationMemoryAllocateInfoNV; + + + +// VK_EXT_transform_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_transform_feedback 1 +#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback" +typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT; +typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 transformFeedback; + VkBool32 geometryStreams; +} VkPhysicalDeviceTransformFeedbackFeaturesEXT; + +typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxTransformFeedbackStreams; + uint32_t maxTransformFeedbackBuffers; + VkDeviceSize maxTransformFeedbackBufferSize; + uint32_t maxTransformFeedbackStreamDataSize; + uint32_t maxTransformFeedbackBufferDataSize; + uint32_t maxTransformFeedbackBufferDataStride; + VkBool32 transformFeedbackQueries; + VkBool32 transformFeedbackStreamsLinesTriangles; + VkBool32 transformFeedbackRasterizationStreamSelect; + VkBool32 transformFeedbackDraw; +} VkPhysicalDeviceTransformFeedbackPropertiesEXT; + +typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateStreamCreateFlagsEXT flags; + uint32_t rasterizationStream; +} VkPipelineRasterizationStateStreamCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes); +typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( + VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride); +#endif + + +// VK_NVX_binary_import is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_binary_import 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) +#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 2 +#define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import" +typedef struct VkCuModuleCreateInfoNVX { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCuModuleCreateInfoNVX; + +typedef struct VkCuModuleTexturingModeCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkBool32 use64bitTexturing; +} VkCuModuleTexturingModeCreateInfoNVX; + +typedef struct VkCuFunctionCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuModuleNVX module; + const char* pName; +} VkCuFunctionCreateInfoNVX; + +typedef struct VkCuLaunchInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuFunctionNVX function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCuLaunchInfoNVX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuModuleNVX)(VkDevice device, const VkCuModuleCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuModuleNVX* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuFunctionNVX)(VkDevice device, const VkCuFunctionCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuFunctionNVX* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCuModuleNVX)(VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCuFunctionNVX)(VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCuLaunchKernelNVX)(VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX* pLaunchInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuModuleNVX( + VkDevice device, + const VkCuModuleCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuModuleNVX* pModule); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuFunctionNVX( + VkDevice device, + const VkCuFunctionCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuFunctionNVX* pFunction); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuModuleNVX( + VkDevice device, + VkCuModuleNVX module, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuFunctionNVX( + VkDevice device, + VkCuFunctionNVX function, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( + VkCommandBuffer commandBuffer, + const VkCuLaunchInfoNVX* pLaunchInfo); +#endif + + +// VK_NVX_image_view_handle is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_image_view_handle 1 +#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 3 +#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" +typedef struct VkImageViewHandleInfoNVX { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkDescriptorType descriptorType; + VkSampler sampler; +} VkImageViewHandleInfoNVX; + +typedef struct VkImageViewAddressPropertiesNVX { + VkStructureType sType; + void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; +} VkImageViewAddressPropertiesNVX; + +typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetImageViewHandle64NVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX( + VkDevice device, + const VkImageViewHandleInfoNVX* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetImageViewHandle64NVX( + VkDevice device, + const VkImageViewHandleInfoNVX* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( + VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX* pProperties); +#endif + + +// VK_AMD_draw_indirect_count is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_draw_indirect_count 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2 +#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_AMD_negative_viewport_height is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_negative_viewport_height 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" + + +// VK_AMD_gpu_shader_half_float is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gpu_shader_half_float 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" + + +// VK_AMD_shader_ballot is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_ballot 1 +#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 +#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" + + +// VK_AMD_texture_gather_bias_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_texture_gather_bias_lod 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" +typedef struct VkTextureLODGatherFormatPropertiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 supportsTextureGatherLODBiasAMD; +} VkTextureLODGatherFormatPropertiesAMD; + + + +// VK_AMD_shader_info is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_info 1 +#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 +#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" + +typedef enum VkShaderInfoTypeAMD { + VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, + VK_SHADER_INFO_TYPE_BINARY_AMD = 1, + VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, + VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderInfoTypeAMD; +typedef struct VkShaderResourceUsageAMD { + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; +} VkShaderResourceUsageAMD; + +typedef struct VkShaderStatisticsInfoAMD { + VkShaderStageFlags shaderStageMask; + VkShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; +} VkShaderStatisticsInfoAMD; + +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( + VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t* pInfoSize, + void* pInfo); +#endif + + +// VK_AMD_shader_image_load_store_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_image_load_store_lod 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" + + +// VK_NV_corner_sampled_image is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_corner_sampled_image 1 +#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2 +#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image" +typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cornerSampledImage; +} VkPhysicalDeviceCornerSampledImageFeaturesNV; + + + +// VK_IMG_format_pvrtc is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_format_pvrtc 1 +#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 +#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" + + +// VK_NV_external_memory_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_capabilities 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" + +typedef enum VkExternalMemoryHandleTypeFlagBitsNV { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBitsNV; +typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; + +typedef enum VkExternalMemoryFeatureFlagBitsNV { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBitsNV; +typedef VkFlags VkExternalMemoryFeatureFlagsNV; +typedef struct VkExternalImageFormatPropertiesNV { + VkImageFormatProperties imageFormatProperties; + VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; + VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; +} VkExternalImageFormatPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +#endif + + +// VK_NV_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory 1 +#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" +typedef struct VkExternalMemoryImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExternalMemoryImageCreateInfoNV; + +typedef struct VkExportMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExportMemoryAllocateInfoNV; + + + +// VK_EXT_validation_flags is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_flags 1 +#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 3 +#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" + +typedef enum VkValidationCheckEXT { + VK_VALIDATION_CHECK_ALL_EXT = 0, + VK_VALIDATION_CHECK_SHADERS_EXT = 1, + VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCheckEXT; +typedef struct VkValidationFlagsEXT { + VkStructureType sType; + const void* pNext; + uint32_t disabledValidationCheckCount; + const VkValidationCheckEXT* pDisabledValidationChecks; +} VkValidationFlagsEXT; + + + +// VK_EXT_shader_subgroup_ballot is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_subgroup_ballot 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" + + +// VK_EXT_shader_subgroup_vote is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_subgroup_vote 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" + + +// VK_EXT_texture_compression_astc_hdr is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_texture_compression_astc_hdr 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr" +typedef VkPhysicalDeviceTextureCompressionASTCHDRFeatures VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + + + +// VK_EXT_astc_decode_mode is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_astc_decode_mode 1 +#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1 +#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode" +typedef struct VkImageViewASTCDecodeModeEXT { + VkStructureType sType; + const void* pNext; + VkFormat decodeMode; +} VkImageViewASTCDecodeModeEXT; + +typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 decodeModeSharedExponent; +} VkPhysicalDeviceASTCDecodeFeaturesEXT; + + + +// VK_EXT_pipeline_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_robustness 1 +#define VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_pipeline_robustness" +typedef VkPipelineRobustnessBufferBehavior VkPipelineRobustnessBufferBehaviorEXT; + +typedef VkPipelineRobustnessImageBehavior VkPipelineRobustnessImageBehaviorEXT; + +typedef VkPhysicalDevicePipelineRobustnessFeatures VkPhysicalDevicePipelineRobustnessFeaturesEXT; + +typedef VkPhysicalDevicePipelineRobustnessProperties VkPhysicalDevicePipelineRobustnessPropertiesEXT; + +typedef VkPipelineRobustnessCreateInfo VkPipelineRobustnessCreateInfoEXT; + + + +// VK_EXT_conditional_rendering is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_conditional_rendering 1 +#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2 +#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering" + +typedef enum VkConditionalRenderingFlagBitsEXT { + VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001, + VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConditionalRenderingFlagBitsEXT; +typedef VkFlags VkConditionalRenderingFlagsEXT; +typedef struct VkConditionalRenderingBeginInfoEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceSize offset; + VkConditionalRenderingFlagsEXT flags; +} VkConditionalRenderingBeginInfoEXT; + +typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 conditionalRendering; + VkBool32 inheritedConditionalRendering; +} VkPhysicalDeviceConditionalRenderingFeaturesEXT; + +typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 conditionalRenderingEnable; +} VkCommandBufferInheritanceConditionalRenderingInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); +typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( + VkCommandBuffer commandBuffer); +#endif + + +// VK_NV_clip_space_w_scaling is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_clip_space_w_scaling 1 +#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 +#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" +typedef struct VkViewportWScalingNV { + float xcoeff; + float ycoeff; +} VkViewportWScalingNV; + +typedef struct VkPipelineViewportWScalingStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportWScalingEnable; + uint32_t viewportCount; + const VkViewportWScalingNV* pViewportWScalings; +} VkPipelineViewportWScalingStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV* pViewportWScalings); +#endif + + +// VK_EXT_direct_mode_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_direct_mode_display 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" +typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); +#endif + + +// VK_EXT_display_surface_counter is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_display_surface_counter 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" + +typedef enum VkSurfaceCounterFlagBitsEXT { + VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001, + // VK_SURFACE_COUNTER_VBLANK_EXT is a deprecated alias + VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT, + VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSurfaceCounterFlagBitsEXT; +typedef VkFlags VkSurfaceCounterFlagsEXT; +typedef struct VkSurfaceCapabilities2EXT { + VkStructureType sType; + void* pNext; + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; + VkSurfaceCounterFlagsEXT supportedSurfaceCounters; +} VkSurfaceCapabilities2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +#endif + + +// VK_EXT_display_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_display_control 1 +#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" + +typedef enum VkDisplayPowerStateEXT { + VK_DISPLAY_POWER_STATE_OFF_EXT = 0, + VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, + VK_DISPLAY_POWER_STATE_ON_EXT = 2, + VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayPowerStateEXT; + +typedef enum VkDeviceEventTypeEXT { + VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, + VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceEventTypeEXT; + +typedef enum VkDisplayEventTypeEXT { + VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, + VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayEventTypeEXT; +typedef struct VkDisplayPowerInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayPowerStateEXT powerState; +} VkDisplayPowerInfoEXT; + +typedef struct VkDeviceEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceEventTypeEXT deviceEvent; +} VkDeviceEventInfoEXT; + +typedef struct VkDisplayEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayEventTypeEXT displayEvent; +} VkDisplayEventInfoEXT; + +typedef struct VkSwapchainCounterCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSurfaceCounterFlagsEXT surfaceCounters; +} VkSwapchainCounterCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT* pDisplayPowerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( + VkDevice device, + const VkDeviceEventInfoEXT* pDeviceEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT* pDisplayEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( + VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue); +#endif + + +// VK_GOOGLE_display_timing is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_display_timing 1 +#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 +#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" +typedef struct VkRefreshCycleDurationGOOGLE { + uint64_t refreshDuration; +} VkRefreshCycleDurationGOOGLE; + +typedef struct VkPastPresentationTimingGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; +} VkPastPresentationTimingGOOGLE; + +typedef struct VkPresentTimeGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; +} VkPresentTimeGOOGLE; + +typedef struct VkPresentTimesInfoGOOGLE { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentTimeGOOGLE* pTimes; +} VkPresentTimesInfoGOOGLE; + +typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pPresentationTimingCount, + VkPastPresentationTimingGOOGLE* pPresentationTimings); +#endif + + +// VK_NV_sample_mask_override_coverage is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_sample_mask_override_coverage 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" + + +// VK_NV_geometry_shader_passthrough is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_geometry_shader_passthrough 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" + + +// VK_NV_viewport_array2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_viewport_array2 1 +#define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2" +// VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION is a deprecated alias +#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION +// VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME is a deprecated alias +#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME + + +// VK_NVX_multiview_per_view_attributes is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_multiview_per_view_attributes 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" +typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { + VkStructureType sType; + void* pNext; + VkBool32 perViewPositionAllComponents; +} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + +typedef struct VkMultiviewPerViewAttributesInfoNVX { + VkStructureType sType; + const void* pNext; + VkBool32 perViewAttributes; + VkBool32 perViewAttributesPositionXOnly; +} VkMultiviewPerViewAttributesInfoNVX; + + + +// VK_NV_viewport_swizzle is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_viewport_swizzle 1 +#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" + +typedef enum VkViewportCoordinateSwizzleNV { + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, + VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF +} VkViewportCoordinateSwizzleNV; +typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; +typedef struct VkViewportSwizzleNV { + VkViewportCoordinateSwizzleNV x; + VkViewportCoordinateSwizzleNV y; + VkViewportCoordinateSwizzleNV z; + VkViewportCoordinateSwizzleNV w; +} VkViewportSwizzleNV; + +typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const VkViewportSwizzleNV* pViewportSwizzles; +} VkPipelineViewportSwizzleStateCreateInfoNV; + + + +// VK_EXT_discard_rectangles is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_discard_rectangles 1 +#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 2 +#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" + +typedef enum VkDiscardRectangleModeEXT { + VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, + VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, + VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDiscardRectangleModeEXT; +typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxDiscardRectangles; +} VkPhysicalDeviceDiscardRectanglePropertiesEXT; + +typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineDiscardRectangleStateCreateFlagsEXT flags; + VkDiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const VkRect2D* pDiscardRectangles; +} VkPipelineDiscardRectangleStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleModeEXT)(VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( + VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 discardRectangleEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleModeEXT( + VkCommandBuffer commandBuffer, + VkDiscardRectangleModeEXT discardRectangleMode); +#endif + + +// VK_EXT_conservative_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_conservative_rasterization 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" + +typedef enum VkConservativeRasterizationModeEXT { + VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, + VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, + VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, + VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConservativeRasterizationModeEXT; +typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + VkBool32 primitiveUnderestimation; + VkBool32 conservativePointAndLineRasterization; + VkBool32 degenerateTrianglesRasterized; + VkBool32 degenerateLinesRasterized; + VkBool32 fullyCoveredFragmentShaderInputVariable; + VkBool32 conservativeRasterizationPostDepthCoverage; +} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; + VkConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; +} VkPipelineRasterizationConservativeStateCreateInfoEXT; + + + +// VK_EXT_depth_clip_enable is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clip_enable 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable" +typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipEnable; +} VkPhysicalDeviceDepthClipEnableFeaturesEXT; + +typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationDepthClipStateCreateFlagsEXT flags; + VkBool32 depthClipEnable; +} VkPipelineRasterizationDepthClipStateCreateInfoEXT; + + + +// VK_EXT_swapchain_colorspace is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_swapchain_colorspace 1 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 5 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" + + +// VK_EXT_hdr_metadata is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_hdr_metadata 1 +#define VK_EXT_HDR_METADATA_SPEC_VERSION 3 +#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" +typedef struct VkXYColorEXT { + float x; + float y; +} VkXYColorEXT; + +typedef struct VkHdrMetadataEXT { + VkStructureType sType; + const void* pNext; + VkXYColorEXT displayPrimaryRed; + VkXYColorEXT displayPrimaryGreen; + VkXYColorEXT displayPrimaryBlue; + VkXYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; +} VkHdrMetadataEXT; + +typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata); +#endif + + +// VK_IMG_relaxed_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_relaxed_line_rasterization 1 +#define VK_IMG_RELAXED_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_IMG_RELAXED_LINE_RASTERIZATION_EXTENSION_NAME "VK_IMG_relaxed_line_rasterization" +typedef struct VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG { + VkStructureType sType; + void* pNext; + VkBool32 relaxedLineRasterization; +} VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + + +// VK_EXT_external_memory_dma_buf is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_dma_buf 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" + + +// VK_EXT_queue_family_foreign is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_queue_family_foreign 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" +#define VK_QUEUE_FAMILY_FOREIGN_EXT (~2U) + + +// VK_EXT_debug_utils is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_utils 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) +#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 2 +#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" +typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; + +typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageSeverityFlagBitsEXT; + +typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, + VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT = 0x00000008, + VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageTypeFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; +typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; +typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; +typedef struct VkDebugUtilsLabelEXT { + VkStructureType sType; + const void* pNext; + const char* pLabelName; + float color[4]; +} VkDebugUtilsLabelEXT; + +typedef struct VkDebugUtilsObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; +} VkDebugUtilsObjectNameInfoEXT; + +typedef struct VkDebugUtilsMessengerCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + const VkDebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + const VkDebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + const VkDebugUtilsObjectNameInfoEXT* pObjects; +} VkDebugUtilsMessengerCallbackDataEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDebugUtilsMessengerCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCreateFlagsEXT flags; + VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; + VkDebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; +} VkDebugUtilsMessengerCreateInfoEXT; + +typedef struct VkDebugUtilsObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugUtilsObjectTagInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); +typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); +typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( + VkDevice device, + const VkDebugUtilsObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( + VkDevice device, + const VkDebugUtilsObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( + VkQueue queue); + +VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( + VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugUtilsMessengerEXT* pMessenger); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( + VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( + VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +#endif + + +// VK_EXT_sampler_filter_minmax is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_sampler_filter_minmax 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2 +#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" +typedef VkSamplerReductionMode VkSamplerReductionModeEXT; + +typedef VkSamplerReductionModeCreateInfo VkSamplerReductionModeCreateInfoEXT; + +typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + + + +// VK_AMD_gpu_shader_int16 is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gpu_shader_int16 1 +#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" + + +// VK_AMD_mixed_attachment_samples is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_mixed_attachment_samples 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" +typedef struct VkAttachmentSampleCountInfoAMD { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const VkSampleCountFlagBits* pColorAttachmentSamples; + VkSampleCountFlagBits depthStencilAttachmentSamples; +} VkAttachmentSampleCountInfoAMD; + + + +// VK_AMD_shader_fragment_mask is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_fragment_mask 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" + + +// VK_EXT_inline_uniform_block is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_inline_uniform_block 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block" +typedef VkPhysicalDeviceInlineUniformBlockFeatures VkPhysicalDeviceInlineUniformBlockFeaturesEXT; + +typedef VkPhysicalDeviceInlineUniformBlockProperties VkPhysicalDeviceInlineUniformBlockPropertiesEXT; + +typedef VkWriteDescriptorSetInlineUniformBlock VkWriteDescriptorSetInlineUniformBlockEXT; + +typedef VkDescriptorPoolInlineUniformBlockCreateInfo VkDescriptorPoolInlineUniformBlockCreateInfoEXT; + + + +// VK_EXT_shader_stencil_export is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_stencil_export 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" + + +// VK_EXT_sample_locations is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_sample_locations 1 +#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 +#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" +typedef struct VkSampleLocationEXT { + float x; + float y; +} VkSampleLocationEXT; + +typedef struct VkSampleLocationsInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampleCountFlagBits sampleLocationsPerPixel; + VkExtent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const VkSampleLocationEXT* pSampleLocations; +} VkSampleLocationsInfoEXT; + +typedef struct VkAttachmentSampleLocationsEXT { + uint32_t attachmentIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkAttachmentSampleLocationsEXT; + +typedef struct VkSubpassSampleLocationsEXT { + uint32_t subpassIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkSubpassSampleLocationsEXT; + +typedef struct VkRenderPassSampleLocationsBeginInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentInitialSampleLocationsCount; + const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; +} VkRenderPassSampleLocationsBeginInfoEXT; + +typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 sampleLocationsEnable; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkPipelineSampleLocationsStateCreateInfoEXT; + +typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleLocationSampleCounts; + VkExtent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + VkBool32 variableSampleLocations; +} VkPhysicalDeviceSampleLocationsPropertiesEXT; + +typedef struct VkMultisamplePropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D maxSampleLocationGridSize; +} VkMultisamplePropertiesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( + VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT* pSampleLocationsInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( + VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties); +#endif + + +// VK_EXT_blend_operation_advanced is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_blend_operation_advanced 1 +#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 +#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" + +typedef enum VkBlendOverlapEXT { + VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, + VK_BLEND_OVERLAP_DISJOINT_EXT = 1, + VK_BLEND_OVERLAP_CONJOINT_EXT = 2, + VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBlendOverlapEXT; +typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 advancedBlendCoherentOperations; +} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t advancedBlendMaxColorAttachments; + VkBool32 advancedBlendIndependentBlend; + VkBool32 advancedBlendNonPremultipliedSrcColor; + VkBool32 advancedBlendNonPremultipliedDstColor; + VkBool32 advancedBlendCorrelatedOverlap; + VkBool32 advancedBlendAllOperations; +} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; +} VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + + +// VK_NV_fragment_coverage_to_color is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_coverage_to_color 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" +typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; +typedef struct VkPipelineCoverageToColorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageToColorStateCreateFlagsNV flags; + VkBool32 coverageToColorEnable; + uint32_t coverageToColorLocation; +} VkPipelineCoverageToColorStateCreateInfoNV; + + + +// VK_NV_framebuffer_mixed_samples is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_framebuffer_mixed_samples 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" + +typedef enum VkCoverageModulationModeNV { + VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, + VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, + VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, + VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, + VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageModulationModeNV; +typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; +typedef struct VkPipelineCoverageModulationStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageModulationStateCreateFlagsNV flags; + VkCoverageModulationModeNV coverageModulationMode; + VkBool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; +} VkPipelineCoverageModulationStateCreateInfoNV; + +typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; + + + +// VK_NV_fill_rectangle is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fill_rectangle 1 +#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 +#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" + + +// VK_NV_shader_sm_builtins is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_sm_builtins 1 +#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1 +#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins" +typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderSMCount; + uint32_t shaderWarpsPerSM; +} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; + +typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shaderSMBuiltins; +} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + + + +// VK_EXT_post_depth_coverage is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_post_depth_coverage 1 +#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 +#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" + + +// VK_EXT_image_drm_format_modifier is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_drm_format_modifier 1 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 2 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier" +typedef struct VkDrmFormatModifierPropertiesEXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags drmFormatModifierTilingFeatures; +} VkDrmFormatModifierPropertiesEXT; + +typedef struct VkDrmFormatModifierPropertiesListEXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesListEXT; + +typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + +typedef struct VkImageDrmFormatModifierListCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t drmFormatModifierCount; + const uint64_t* pDrmFormatModifiers; +} VkImageDrmFormatModifierListCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + const VkSubresourceLayout* pPlaneLayouts; +} VkImageDrmFormatModifierExplicitCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierPropertiesEXT { + VkStructureType sType; + void* pNext; + uint64_t drmFormatModifier; +} VkImageDrmFormatModifierPropertiesEXT; + +typedef struct VkDrmFormatModifierProperties2EXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags2 drmFormatModifierTilingFeatures; +} VkDrmFormatModifierProperties2EXT; + +typedef struct VkDrmFormatModifierPropertiesList2EXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierProperties2EXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesList2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT* pProperties); +#endif + + +// VK_EXT_validation_cache is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_cache 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) +#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" + +typedef enum VkValidationCacheHeaderVersionEXT { + VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, + VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCacheHeaderVersionEXT; +typedef VkFlags VkValidationCacheCreateFlagsEXT; +typedef struct VkValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; +} VkValidationCacheCreateInfoEXT; + +typedef struct VkShaderModuleValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheEXT validationCache; +} VkShaderModuleValidationCacheCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); +typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( + VkDevice device, + const VkValidationCacheCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkValidationCacheEXT* pValidationCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( + VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + size_t* pDataSize, + void* pData); +#endif + + +// VK_EXT_descriptor_indexing is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_descriptor_indexing 1 +#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 +#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" +typedef VkDescriptorBindingFlagBits VkDescriptorBindingFlagBitsEXT; + +typedef VkDescriptorBindingFlags VkDescriptorBindingFlagsEXT; + +typedef VkDescriptorSetLayoutBindingFlagsCreateInfo VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; + +typedef VkPhysicalDeviceDescriptorIndexingFeatures VkPhysicalDeviceDescriptorIndexingFeaturesEXT; + +typedef VkPhysicalDeviceDescriptorIndexingProperties VkPhysicalDeviceDescriptorIndexingPropertiesEXT; + +typedef VkDescriptorSetVariableDescriptorCountAllocateInfo VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; + +typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + + +// VK_EXT_shader_viewport_index_layer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_viewport_index_layer 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" + + +// VK_NV_shading_rate_image is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shading_rate_image 1 +#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3 +#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image" + +typedef enum VkShadingRatePaletteEntryNV { + VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0, + VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1, + VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2, + VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3, + VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11, + VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF +} VkShadingRatePaletteEntryNV; + +typedef enum VkCoarseSampleOrderTypeNV { + VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0, + VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1, + VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2, + VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3, + VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoarseSampleOrderTypeNV; +typedef struct VkShadingRatePaletteNV { + uint32_t shadingRatePaletteEntryCount; + const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries; +} VkShadingRatePaletteNV; + +typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 shadingRateImageEnable; + uint32_t viewportCount; + const VkShadingRatePaletteNV* pShadingRatePalettes; +} VkPipelineViewportShadingRateImageStateCreateInfoNV; + +typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shadingRateImage; + VkBool32 shadingRateCoarseSampleOrder; +} VkPhysicalDeviceShadingRateImageFeaturesNV; + +typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV { + VkStructureType sType; + void* pNext; + VkExtent2D shadingRateTexelSize; + uint32_t shadingRatePaletteSize; + uint32_t shadingRateMaxCoarseSamples; +} VkPhysicalDeviceShadingRateImagePropertiesNV; + +typedef struct VkCoarseSampleLocationNV { + uint32_t pixelX; + uint32_t pixelY; + uint32_t sample; +} VkCoarseSampleLocationNV; + +typedef struct VkCoarseSampleOrderCustomNV { + VkShadingRatePaletteEntryNV shadingRate; + uint32_t sampleCount; + uint32_t sampleLocationCount; + const VkCoarseSampleLocationNV* pSampleLocations; +} VkCoarseSampleOrderCustomNV; + +typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCoarseSampleOrderTypeNV sampleOrderType; + uint32_t customSampleOrderCount; + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders; +} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV* pShadingRatePalettes); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( + VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); +#endif + + +// VK_NV_ray_tracing is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) +#define VK_NV_RAY_TRACING_SPEC_VERSION 3 +#define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" +#define VK_SHADER_UNUSED_KHR (~0U) +#define VK_SHADER_UNUSED_NV VK_SHADER_UNUSED_KHR + +typedef enum VkRayTracingShaderGroupTypeKHR { + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2, + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkRayTracingShaderGroupTypeKHR; +typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV; + + +typedef enum VkGeometryTypeKHR { + VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0, + VK_GEOMETRY_TYPE_AABBS_KHR = 1, + VK_GEOMETRY_TYPE_INSTANCES_KHR = 2, + VK_GEOMETRY_TYPE_SPHERES_NV = 1000429004, + VK_GEOMETRY_TYPE_LINEAR_SWEPT_SPHERES_NV = 1000429005, + VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR, + VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryTypeKHR; +typedef VkGeometryTypeKHR VkGeometryTypeNV; + + +typedef enum VkAccelerationStructureTypeKHR { + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1, + VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR = 2, + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureTypeKHR; +typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV; + + +typedef enum VkCopyAccelerationStructureModeKHR { + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1, + VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2, + VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3, + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCopyAccelerationStructureModeKHR; +typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV; + + +typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMemoryRequirementsTypeNV; + +typedef enum VkGeometryFlagBitsKHR { + VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002, + VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR, + VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryFlagBitsKHR; +typedef VkFlags VkGeometryFlagsKHR; +typedef VkGeometryFlagsKHR VkGeometryFlagsNV; + +typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV; + + +typedef enum VkGeometryInstanceFlagBitsKHR { + VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001, + VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008, + VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT = 0x00000010, + VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000020, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryInstanceFlagBitsKHR; +typedef VkFlags VkGeometryInstanceFlagsKHR; +typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV; + +typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV; + + +typedef enum VkBuildAccelerationStructureFlagBitsKHR { + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010, + VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT = 0x00000040, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000080, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT = 0x00000100, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV = 0x00000200, +#endif + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR = 0x00000800, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureFlagBitsKHR; +typedef VkFlags VkBuildAccelerationStructureFlagsKHR; +typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV; + +typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV; + +typedef struct VkRayTracingShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; +} VkRayTracingShaderGroupCreateInfoNV; + +typedef struct VkRayTracingPipelineCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoNV* pGroups; + uint32_t maxRecursionDepth; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoNV; + +typedef struct VkGeometryTrianglesNV { + VkStructureType sType; + const void* pNext; + VkBuffer vertexData; + VkDeviceSize vertexOffset; + uint32_t vertexCount; + VkDeviceSize vertexStride; + VkFormat vertexFormat; + VkBuffer indexData; + VkDeviceSize indexOffset; + uint32_t indexCount; + VkIndexType indexType; + VkBuffer transformData; + VkDeviceSize transformOffset; +} VkGeometryTrianglesNV; + +typedef struct VkGeometryAABBNV { + VkStructureType sType; + const void* pNext; + VkBuffer aabbData; + uint32_t numAABBs; + uint32_t stride; + VkDeviceSize offset; +} VkGeometryAABBNV; + +typedef struct VkGeometryDataNV { + VkGeometryTrianglesNV triangles; + VkGeometryAABBNV aabbs; +} VkGeometryDataNV; + +typedef struct VkGeometryNV { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkGeometryDataNV geometry; + VkGeometryFlagsKHR flags; +} VkGeometryNV; + +typedef struct VkAccelerationStructureInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeNV type; + VkBuildAccelerationStructureFlagsNV flags; + uint32_t instanceCount; + uint32_t geometryCount; + const VkGeometryNV* pGeometries; +} VkAccelerationStructureInfoNV; + +typedef struct VkAccelerationStructureCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceSize compactedSize; + VkAccelerationStructureInfoNV info; +} VkAccelerationStructureCreateInfoNV; + +typedef struct VkBindAccelerationStructureMemoryInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureNV accelerationStructure; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindAccelerationStructureMemoryInfoNV; + +typedef struct VkWriteDescriptorSetAccelerationStructureNV { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureNV* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureNV; + +typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureMemoryRequirementsTypeNV type; + VkAccelerationStructureNV accelerationStructure; +} VkAccelerationStructureMemoryRequirementsInfoNV; + +typedef struct VkPhysicalDeviceRayTracingPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxTriangleCount; + uint32_t maxDescriptorSetAccelerationStructures; +} VkPhysicalDeviceRayTracingPropertiesNV; + +typedef struct VkTransformMatrixKHR { + float matrix[3][4]; +} VkTransformMatrixKHR; + +typedef VkTransformMatrixKHR VkTransformMatrixNV; + +typedef struct VkAabbPositionsKHR { + float minX; + float minY; + float minZ; + float maxX; + float maxY; + float maxZ; +} VkAabbPositionsKHR; + +typedef VkAabbPositionsKHR VkAabbPositionsNV; + +typedef struct VkAccelerationStructureInstanceKHR { + VkTransformMatrixKHR transform; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureInstanceKHR; + +typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( + VkDevice device, + const VkAccelerationStructureCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureNV* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( + VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( + VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( + VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV* pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( + VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( + VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV( + VkDevice device, + VkPipeline pipeline, + uint32_t shader); +#endif + + +// VK_NV_representative_fragment_test is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_representative_fragment_test 1 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test" +typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 representativeFragmentTest; +} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 representativeFragmentTestEnable; +} VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + + + +// VK_EXT_filter_cubic is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_filter_cubic 1 +#define VK_EXT_FILTER_CUBIC_SPEC_VERSION 3 +#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic" +typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT { + VkStructureType sType; + void* pNext; + VkImageViewType imageViewType; +} VkPhysicalDeviceImageViewImageFormatInfoEXT; + +typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 filterCubic; + VkBool32 filterCubicMinmax; +} VkFilterCubicImageViewImageFormatPropertiesEXT; + + + +// VK_QCOM_render_pass_shader_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_shader_resolve 1 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve" + + +// VK_EXT_global_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_global_priority 1 +#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 +#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" +typedef VkQueueGlobalPriority VkQueueGlobalPriorityEXT; + +typedef VkDeviceQueueGlobalPriorityCreateInfo VkDeviceQueueGlobalPriorityCreateInfoEXT; + + + +// VK_EXT_external_memory_host is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_host 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" +typedef struct VkImportMemoryHostPointerInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; +} VkImportMemoryHostPointerInfoEXT; + +typedef struct VkMemoryHostPointerPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryHostPointerPropertiesEXT; + +typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minImportedHostPointerAlignment; +} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHostPointer, + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +#endif + + +// VK_AMD_buffer_marker is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_buffer_marker 1 +#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 +#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); +#endif + + +// VK_AMD_pipeline_compiler_control is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_pipeline_compiler_control 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control" + +typedef enum VkPipelineCompilerControlFlagBitsAMD { + VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkPipelineCompilerControlFlagBitsAMD; +typedef VkFlags VkPipelineCompilerControlFlagsAMD; +typedef struct VkPipelineCompilerControlCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkPipelineCompilerControlFlagsAMD compilerControlFlags; +} VkPipelineCompilerControlCreateInfoAMD; + + + +// VK_EXT_calibrated_timestamps is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_calibrated_timestamps 1 +#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2 +#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" +typedef VkTimeDomainKHR VkTimeDomainEXT; + +typedef VkCalibratedTimestampInfoKHR VkCalibratedTimestampInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainKHR* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT( + VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); +#endif + + +// VK_AMD_shader_core_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_core_properties 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2 +#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" +typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { + VkStructureType sType; + void* pNext; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; +} VkPhysicalDeviceShaderCorePropertiesAMD; + + + +// VK_AMD_memory_overallocation_behavior is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_memory_overallocation_behavior 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior" + +typedef enum VkMemoryOverallocationBehaviorAMD { + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF +} VkMemoryOverallocationBehaviorAMD; +typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkMemoryOverallocationBehaviorAMD overallocationBehavior; +} VkDeviceMemoryOverallocationCreateInfoAMD; + + + +// VK_EXT_vertex_attribute_divisor is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_attribute_divisor 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +typedef VkVertexInputBindingDivisorDescription VkVertexInputBindingDivisorDescriptionEXT; + +typedef VkPipelineVertexInputDivisorStateCreateInfo VkPipelineVertexInputDivisorStateCreateInfoEXT; + +typedef VkPhysicalDeviceVertexAttributeDivisorFeatures VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; + + + +// VK_EXT_pipeline_creation_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_creation_feedback 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback" +typedef VkPipelineCreationFeedbackFlagBits VkPipelineCreationFeedbackFlagBitsEXT; + +typedef VkPipelineCreationFeedbackFlags VkPipelineCreationFeedbackFlagsEXT; + +typedef VkPipelineCreationFeedbackCreateInfo VkPipelineCreationFeedbackCreateInfoEXT; + +typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT; + + + +// VK_NV_shader_subgroup_partitioned is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_subgroup_partitioned 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" + + +// VK_NV_compute_shader_derivatives is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_compute_shader_derivatives 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" +typedef VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; + + + +// VK_NV_mesh_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_mesh_shader 1 +#define VK_NV_MESH_SHADER_SPEC_VERSION 1 +#define VK_NV_MESH_SHADER_EXTENSION_NAME "VK_NV_mesh_shader" +typedef struct VkPhysicalDeviceMeshShaderFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 taskShader; + VkBool32 meshShader; +} VkPhysicalDeviceMeshShaderFeaturesNV; + +typedef struct VkPhysicalDeviceMeshShaderPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxDrawMeshTasksCount; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskTotalMemorySize; + uint32_t maxTaskOutputCount; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshTotalMemorySize; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; +} VkPhysicalDeviceMeshShaderPropertiesNV; + +typedef struct VkDrawMeshTasksIndirectCommandNV { + uint32_t taskCount; + uint32_t firstTask; +} VkDrawMeshTasksIndirectCommandNV; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV( + VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_NV_fragment_shader_barycentric is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_shader_barycentric 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric" +typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; + + + +// VK_NV_shader_image_footprint is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_image_footprint 1 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint" +typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 imageFootprint; +} VkPhysicalDeviceShaderImageFootprintFeaturesNV; + + + +// VK_NV_scissor_exclusive is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_scissor_exclusive 1 +#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 2 +#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive" +typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t exclusiveScissorCount; + const VkRect2D* pExclusiveScissors; +} VkPipelineViewportExclusiveScissorStateCreateInfoNV; + +typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 exclusiveScissor; +} VkPhysicalDeviceExclusiveScissorFeaturesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorEnableNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkBool32* pExclusiveScissorEnables); +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorEnableNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkBool32* pExclusiveScissorEnables); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D* pExclusiveScissors); +#endif + + +// VK_NV_device_diagnostic_checkpoints is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_diagnostic_checkpoints 1 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints" +typedef struct VkQueueFamilyCheckpointPropertiesNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags checkpointExecutionStageMask; +} VkQueueFamilyCheckpointPropertiesNV; + +typedef struct VkCheckpointDataNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlagBits stage; + void* pCheckpointMarker; +} VkCheckpointDataNV; + +typedef struct VkQueueFamilyCheckpointProperties2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 checkpointExecutionStageMask; +} VkQueueFamilyCheckpointProperties2NV; + +typedef struct VkCheckpointData2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 stage; + void* pCheckpointMarker; +} VkCheckpointData2NV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( + VkCommandBuffer commandBuffer, + const void* pCheckpointMarker); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointDataNV* pCheckpointData); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointData2NV* pCheckpointData); +#endif + + +// VK_INTEL_shader_integer_functions2 is a preprocessor guard. Do not pass it to API calls. +#define VK_INTEL_shader_integer_functions2 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2" +typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { + VkStructureType sType; + void* pNext; + VkBool32 shaderIntegerFunctions2; +} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + + +// VK_INTEL_performance_query is a preprocessor guard. Do not pass it to API calls. +#define VK_INTEL_performance_query 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) +#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2 +#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query" + +typedef enum VkPerformanceConfigurationTypeINTEL { + VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0, + VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceConfigurationTypeINTEL; + +typedef enum VkQueryPoolSamplingModeINTEL { + VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0, + VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkQueryPoolSamplingModeINTEL; + +typedef enum VkPerformanceOverrideTypeINTEL { + VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0, + VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1, + VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceOverrideTypeINTEL; + +typedef enum VkPerformanceParameterTypeINTEL { + VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0, + VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1, + VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceParameterTypeINTEL; + +typedef enum VkPerformanceValueTypeINTEL { + VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0, + VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1, + VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2, + VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3, + VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4, + VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceValueTypeINTEL; +typedef union VkPerformanceValueDataINTEL { + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char* valueString; +} VkPerformanceValueDataINTEL; + +typedef struct VkPerformanceValueINTEL { + VkPerformanceValueTypeINTEL type; + VkPerformanceValueDataINTEL data; +} VkPerformanceValueINTEL; + +typedef struct VkInitializePerformanceApiInfoINTEL { + VkStructureType sType; + const void* pNext; + void* pUserData; +} VkInitializePerformanceApiInfoINTEL; + +typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL { + VkStructureType sType; + const void* pNext; + VkQueryPoolSamplingModeINTEL performanceCountersSampling; +} VkQueryPoolPerformanceQueryCreateInfoINTEL; + +typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL; + +typedef struct VkPerformanceMarkerInfoINTEL { + VkStructureType sType; + const void* pNext; + uint64_t marker; +} VkPerformanceMarkerInfoINTEL; + +typedef struct VkPerformanceStreamMarkerInfoINTEL { + VkStructureType sType; + const void* pNext; + uint32_t marker; +} VkPerformanceStreamMarkerInfoINTEL; + +typedef struct VkPerformanceOverrideInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceOverrideTypeINTEL type; + VkBool32 enable; + uint64_t parameter; +} VkPerformanceOverrideInfoINTEL; + +typedef struct VkPerformanceConfigurationAcquireInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceConfigurationTypeINTEL type; +} VkPerformanceConfigurationAcquireInfoINTEL; + +typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); +typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo); +typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration); +typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL( + VkDevice device, + const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); + +VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL* pMarkerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL* pOverrideInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL( + VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, + VkPerformanceConfigurationINTEL* pConfiguration); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL( + VkDevice device, + VkPerformanceConfigurationINTEL configuration); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL( + VkQueue queue, + VkPerformanceConfigurationINTEL configuration); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL( + VkDevice device, + VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL* pValue); +#endif + + +// VK_EXT_pci_bus_info is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pci_bus_info 1 +#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION 2 +#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info" +typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t pciDomain; + uint32_t pciBus; + uint32_t pciDevice; + uint32_t pciFunction; +} VkPhysicalDevicePCIBusInfoPropertiesEXT; + + + +// VK_AMD_display_native_hdr is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_display_native_hdr 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr" +typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 localDimmingSupport; +} VkDisplayNativeHdrSurfaceCapabilitiesAMD; + +typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkBool32 localDimmingEnable; +} VkSwapchainDisplayNativeHdrCreateInfoAMD; + +typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD( + VkDevice device, + VkSwapchainKHR swapChain, + VkBool32 localDimmingEnable); +#endif + + +// VK_EXT_fragment_density_map is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_density_map 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 2 +#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map" +typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMap; + VkBool32 fragmentDensityMapDynamic; + VkBool32 fragmentDensityMapNonSubsampledImages; +} VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentDensityTexelSize; + VkExtent2D maxFragmentDensityTexelSize; + VkBool32 fragmentDensityInvocations; +} VkPhysicalDeviceFragmentDensityMapPropertiesEXT; + +typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkAttachmentReference fragmentDensityMapAttachment; +} VkRenderPassFragmentDensityMapCreateInfoEXT; + +typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; +} VkRenderingFragmentDensityMapAttachmentInfoEXT; + + + +// VK_EXT_scalar_block_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_scalar_block_layout 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout" +typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLayoutFeaturesEXT; + + + +// VK_GOOGLE_hlsl_functionality1 is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_hlsl_functionality1 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" +// VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION is a deprecated alias +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION +// VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME is a deprecated alias +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME + + +// VK_GOOGLE_decorate_string is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_decorate_string 1 +#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1 +#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string" + + +// VK_EXT_subgroup_size_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_subgroup_size_control 1 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control" +typedef VkPhysicalDeviceSubgroupSizeControlFeatures VkPhysicalDeviceSubgroupSizeControlFeaturesEXT; + +typedef VkPhysicalDeviceSubgroupSizeControlProperties VkPhysicalDeviceSubgroupSizeControlPropertiesEXT; + +typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + + + +// VK_AMD_shader_core_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_core_properties2 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2" + +typedef enum VkShaderCorePropertiesFlagBitsAMD { + VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderCorePropertiesFlagBitsAMD; +typedef VkFlags VkShaderCorePropertiesFlagsAMD; +typedef struct VkPhysicalDeviceShaderCoreProperties2AMD { + VkStructureType sType; + void* pNext; + VkShaderCorePropertiesFlagsAMD shaderCoreFeatures; + uint32_t activeComputeUnitCount; +} VkPhysicalDeviceShaderCoreProperties2AMD; + + + +// VK_AMD_device_coherent_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_device_coherent_memory 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory" +typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 deviceCoherentMemory; +} VkPhysicalDeviceCoherentMemoryFeaturesAMD; + + + +// VK_EXT_shader_image_atomic_int64 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_image_atomic_int64 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64" +typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderImageInt64Atomics; + VkBool32 sparseImageInt64Atomics; +} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + + +// VK_EXT_memory_budget is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_memory_budget 1 +#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 +#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" +typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS]; + VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryBudgetPropertiesEXT; + + + +// VK_EXT_memory_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_memory_priority 1 +#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1 +#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority" +typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 memoryPriority; +} VkPhysicalDeviceMemoryPriorityFeaturesEXT; + +typedef struct VkMemoryPriorityAllocateInfoEXT { + VkStructureType sType; + const void* pNext; + float priority; +} VkMemoryPriorityAllocateInfoEXT; + + + +// VK_NV_dedicated_allocation_image_aliasing is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_dedicated_allocation_image_aliasing 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing" +typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 dedicatedAllocationImageAliasing; +} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + + +// VK_EXT_buffer_device_address is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_buffer_device_address 1 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address" +typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; + +typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT; + +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoEXT; + +typedef struct VkBufferDeviceAddressCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; +} VkBufferDeviceAddressCreateInfoEXT; + +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); +#endif + + +// VK_EXT_tooling_info is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_tooling_info 1 +#define VK_EXT_TOOLING_INFO_SPEC_VERSION 1 +#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info" +typedef VkToolPurposeFlagBits VkToolPurposeFlagBitsEXT; + +typedef VkToolPurposeFlags VkToolPurposeFlagsEXT; + +typedef VkPhysicalDeviceToolProperties VkPhysicalDeviceToolPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolProperties* pToolProperties); +#endif + + +// VK_EXT_separate_stencil_usage is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_separate_stencil_usage 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage" +typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; + + + +// VK_EXT_validation_features is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_features 1 +#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 6 +#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" + +typedef enum VkValidationFeatureEnableEXT { + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0, + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, + VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, + VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3, + VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4, + VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureEnableEXT; + +typedef enum VkValidationFeatureDisableEXT { + VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0, + VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1, + VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2, + VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3, + VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4, + VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5, + VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6, + VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT = 7, + VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureDisableEXT; +typedef struct VkValidationFeaturesEXT { + VkStructureType sType; + const void* pNext; + uint32_t enabledValidationFeatureCount; + const VkValidationFeatureEnableEXT* pEnabledValidationFeatures; + uint32_t disabledValidationFeatureCount; + const VkValidationFeatureDisableEXT* pDisabledValidationFeatures; +} VkValidationFeaturesEXT; + + + +// VK_NV_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cooperative_matrix 1 +#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1 +#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix" +typedef VkComponentTypeKHR VkComponentTypeNV; + +typedef VkScopeKHR VkScopeNV; + +typedef struct VkCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t MSize; + uint32_t NSize; + uint32_t KSize; + VkComponentTypeNV AType; + VkComponentTypeNV BType; + VkComponentTypeNV CType; + VkComponentTypeNV DType; + VkScopeNV scope; +} VkCooperativeMatrixPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrix; + VkBool32 cooperativeMatrixRobustBufferAccess; +} VkPhysicalDeviceCooperativeMatrixFeaturesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeMatrixSupportedStages; +} VkPhysicalDeviceCooperativeMatrixPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesNV* pProperties); +#endif + + +// VK_NV_coverage_reduction_mode is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_coverage_reduction_mode 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode" + +typedef enum VkCoverageReductionModeNV { + VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0, + VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1, + VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageReductionModeNV; +typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV; +typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 coverageReductionMode; +} VkPhysicalDeviceCoverageReductionModeFeaturesNV; + +typedef struct VkPipelineCoverageReductionStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageReductionStateCreateFlagsNV flags; + VkCoverageReductionModeNV coverageReductionMode; +} VkPipelineCoverageReductionStateCreateInfoNV; + +typedef struct VkFramebufferMixedSamplesCombinationNV { + VkStructureType sType; + void* pNext; + VkCoverageReductionModeNV coverageReductionMode; + VkSampleCountFlagBits rasterizationSamples; + VkSampleCountFlags depthStencilSamples; + VkSampleCountFlags colorSamples; +} VkFramebufferMixedSamplesCombinationNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t* pCombinationCount, + VkFramebufferMixedSamplesCombinationNV* pCombinations); +#endif + + +// VK_EXT_fragment_shader_interlock is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_shader_interlock 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock" +typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderSampleInterlock; + VkBool32 fragmentShaderPixelInterlock; + VkBool32 fragmentShaderShadingRateInterlock; +} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + + +// VK_EXT_ycbcr_image_arrays is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_ycbcr_image_arrays 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays" +typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 ycbcrImageArrays; +} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; + + + +// VK_EXT_provoking_vertex is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_provoking_vertex 1 +#define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1 +#define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex" + +typedef enum VkProvokingVertexModeEXT { + VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT = 0, + VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT = 1, + VK_PROVOKING_VERTEX_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkProvokingVertexModeEXT; +typedef struct VkPhysicalDeviceProvokingVertexFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexLast; + VkBool32 transformFeedbackPreservesProvokingVertex; +} VkPhysicalDeviceProvokingVertexFeaturesEXT; + +typedef struct VkPhysicalDeviceProvokingVertexPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexModePerPipeline; + VkBool32 transformFeedbackPreservesTriangleFanProvokingVertex; +} VkPhysicalDeviceProvokingVertexPropertiesEXT; + +typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkProvokingVertexModeEXT provokingVertexMode; +} VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; + + + +// VK_EXT_headless_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_headless_surface 1 +#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1 +#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface" +typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT; +typedef struct VkHeadlessSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkHeadlessSurfaceCreateFlagsEXT flags; +} VkHeadlessSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT( + VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_EXT_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_line_rasterization 1 +#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization" +typedef VkLineRasterizationMode VkLineRasterizationModeEXT; + +typedef VkPhysicalDeviceLineRasterizationFeatures VkPhysicalDeviceLineRasterizationFeaturesEXT; + +typedef VkPhysicalDeviceLineRasterizationProperties VkPhysicalDeviceLineRasterizationPropertiesEXT; + +typedef VkPipelineRasterizationLineStateCreateInfo VkPipelineRasterizationLineStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); +#endif + + +// VK_EXT_shader_atomic_float is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_atomic_float 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float" +typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat32Atomics; + VkBool32 shaderBufferFloat32AtomicAdd; + VkBool32 shaderBufferFloat64Atomics; + VkBool32 shaderBufferFloat64AtomicAdd; + VkBool32 shaderSharedFloat32Atomics; + VkBool32 shaderSharedFloat32AtomicAdd; + VkBool32 shaderSharedFloat64Atomics; + VkBool32 shaderSharedFloat64AtomicAdd; + VkBool32 shaderImageFloat32Atomics; + VkBool32 shaderImageFloat32AtomicAdd; + VkBool32 sparseImageFloat32Atomics; + VkBool32 sparseImageFloat32AtomicAdd; +} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + + +// VK_EXT_host_query_reset is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_host_query_reset 1 +#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 +#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" +typedef VkPhysicalDeviceHostQueryResetFeatures VkPhysicalDeviceHostQueryResetFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); +#endif + + +// VK_EXT_index_type_uint8 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_index_type_uint8 1 +#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1 +#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8" +typedef VkPhysicalDeviceIndexTypeUint8Features VkPhysicalDeviceIndexTypeUint8FeaturesEXT; + + + +// VK_EXT_extended_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state" +typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState; +} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); +#endif + + +// VK_EXT_host_image_copy is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_host_image_copy 1 +#define VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION 1 +#define VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME "VK_EXT_host_image_copy" +typedef VkHostImageCopyFlagBits VkHostImageCopyFlagBitsEXT; + +typedef VkHostImageCopyFlags VkHostImageCopyFlagsEXT; + +typedef VkPhysicalDeviceHostImageCopyFeatures VkPhysicalDeviceHostImageCopyFeaturesEXT; + +typedef VkPhysicalDeviceHostImageCopyProperties VkPhysicalDeviceHostImageCopyPropertiesEXT; + +typedef VkMemoryToImageCopy VkMemoryToImageCopyEXT; + +typedef VkImageToMemoryCopy VkImageToMemoryCopyEXT; + +typedef VkCopyMemoryToImageInfo VkCopyMemoryToImageInfoEXT; + +typedef VkCopyImageToMemoryInfo VkCopyImageToMemoryInfoEXT; + +typedef VkCopyImageToImageInfo VkCopyImageToImageInfoEXT; + +typedef VkHostImageLayoutTransitionInfo VkHostImageLayoutTransitionInfoEXT; + +typedef VkSubresourceHostMemcpySize VkSubresourceHostMemcpySizeEXT; + +typedef VkHostImageCopyDevicePerformanceQuery VkHostImageCopyDevicePerformanceQueryEXT; + +typedef VkSubresourceLayout2 VkSubresourceLayout2EXT; + +typedef VkImageSubresource2 VkImageSubresource2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImageEXT)(VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemoryEXT)(VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImageEXT)(VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayoutEXT)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT( + VkDevice device, + const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemoryEXT( + VkDevice device, + const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImageEXT( + VkDevice device, + const VkCopyImageToImageInfo* pCopyImageToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT( + VkDevice device, + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfo* pTransitions); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT( + VkDevice device, + VkImage image, + const VkImageSubresource2* pSubresource, + VkSubresourceLayout2* pLayout); +#endif + + +// VK_EXT_map_memory_placed is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_map_memory_placed 1 +#define VK_EXT_MAP_MEMORY_PLACED_SPEC_VERSION 1 +#define VK_EXT_MAP_MEMORY_PLACED_EXTENSION_NAME "VK_EXT_map_memory_placed" +typedef struct VkPhysicalDeviceMapMemoryPlacedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 memoryMapPlaced; + VkBool32 memoryMapRangePlaced; + VkBool32 memoryUnmapReserve; +} VkPhysicalDeviceMapMemoryPlacedFeaturesEXT; + +typedef struct VkPhysicalDeviceMapMemoryPlacedPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minPlacedMemoryMapAlignment; +} VkPhysicalDeviceMapMemoryPlacedPropertiesEXT; + +typedef struct VkMemoryMapPlacedInfoEXT { + VkStructureType sType; + const void* pNext; + void* pPlacedAddress; +} VkMemoryMapPlacedInfoEXT; + + + +// VK_EXT_shader_atomic_float2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_atomic_float2 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME "VK_EXT_shader_atomic_float2" +typedef struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat16Atomics; + VkBool32 shaderBufferFloat16AtomicAdd; + VkBool32 shaderBufferFloat16AtomicMinMax; + VkBool32 shaderBufferFloat32AtomicMinMax; + VkBool32 shaderBufferFloat64AtomicMinMax; + VkBool32 shaderSharedFloat16Atomics; + VkBool32 shaderSharedFloat16AtomicAdd; + VkBool32 shaderSharedFloat16AtomicMinMax; + VkBool32 shaderSharedFloat32AtomicMinMax; + VkBool32 shaderSharedFloat64AtomicMinMax; + VkBool32 shaderImageFloat32AtomicMinMax; + VkBool32 sparseImageFloat32AtomicMinMax; +} VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + + +// VK_EXT_surface_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_surface_maintenance1 1 +#define VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_surface_maintenance1" + +typedef enum VkPresentScalingFlagBitsEXT { + VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT = 0x00000001, + VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT = 0x00000002, + VK_PRESENT_SCALING_STRETCH_BIT_EXT = 0x00000004, + VK_PRESENT_SCALING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPresentScalingFlagBitsEXT; +typedef VkFlags VkPresentScalingFlagsEXT; + +typedef enum VkPresentGravityFlagBitsEXT { + VK_PRESENT_GRAVITY_MIN_BIT_EXT = 0x00000001, + VK_PRESENT_GRAVITY_MAX_BIT_EXT = 0x00000002, + VK_PRESENT_GRAVITY_CENTERED_BIT_EXT = 0x00000004, + VK_PRESENT_GRAVITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPresentGravityFlagBitsEXT; +typedef VkFlags VkPresentGravityFlagsEXT; +typedef struct VkSurfacePresentModeEXT { + VkStructureType sType; + void* pNext; + VkPresentModeKHR presentMode; +} VkSurfacePresentModeEXT; + +typedef struct VkSurfacePresentScalingCapabilitiesEXT { + VkStructureType sType; + void* pNext; + VkPresentScalingFlagsEXT supportedPresentScaling; + VkPresentGravityFlagsEXT supportedPresentGravityX; + VkPresentGravityFlagsEXT supportedPresentGravityY; + VkExtent2D minScaledImageExtent; + VkExtent2D maxScaledImageExtent; +} VkSurfacePresentScalingCapabilitiesEXT; + +typedef struct VkSurfacePresentModeCompatibilityEXT { + VkStructureType sType; + void* pNext; + uint32_t presentModeCount; + VkPresentModeKHR* pPresentModes; +} VkSurfacePresentModeCompatibilityEXT; + + + +// VK_EXT_swapchain_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_swapchain_maintenance1 1 +#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_swapchain_maintenance1" +typedef struct VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 swapchainMaintenance1; +} VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT; + +typedef struct VkSwapchainPresentFenceInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkFence* pFences; +} VkSwapchainPresentFenceInfoEXT; + +typedef struct VkSwapchainPresentModesCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t presentModeCount; + const VkPresentModeKHR* pPresentModes; +} VkSwapchainPresentModesCreateInfoEXT; + +typedef struct VkSwapchainPresentModeInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentModeKHR* pPresentModes; +} VkSwapchainPresentModeInfoEXT; + +typedef struct VkSwapchainPresentScalingCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPresentScalingFlagsEXT scalingBehavior; + VkPresentGravityFlagsEXT presentGravityX; + VkPresentGravityFlagsEXT presentGravityY; +} VkSwapchainPresentScalingCreateInfoEXT; + +typedef struct VkReleaseSwapchainImagesInfoEXT { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndexCount; + const uint32_t* pImageIndices; +} VkReleaseSwapchainImagesInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkReleaseSwapchainImagesEXT)(VkDevice device, const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseSwapchainImagesEXT( + VkDevice device, + const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo); +#endif + + +// VK_EXT_shader_demote_to_helper_invocation is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_demote_to_helper_invocation 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation" +typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + + + +// VK_NV_device_generated_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) +#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands" + +typedef enum VkIndirectCommandsTokenTypeNV { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV = 1000428003, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV = 1000428004, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNV; + +typedef enum VkIndirectStateFlagBitsNV { + VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001, + VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectStateFlagBitsNV; +typedef VkFlags VkIndirectStateFlagsNV; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNV; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxGraphicsShaderGroupCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsStreamCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsStreamStride; + uint32_t minSequencesCountBufferOffsetAlignment; + uint32_t minSequencesIndexBufferOffsetAlignment; + uint32_t minIndirectCommandsBufferOffsetAlignment; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +typedef struct VkGraphicsShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; +} VkGraphicsShaderGroupCreateInfoNV; + +typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t groupCount; + const VkGraphicsShaderGroupCreateInfoNV* pGroups; + uint32_t pipelineCount; + const VkPipeline* pPipelines; +} VkGraphicsPipelineShaderGroupsCreateInfoNV; + +typedef struct VkBindShaderGroupIndirectCommandNV { + uint32_t groupIndex; +} VkBindShaderGroupIndirectCommandNV; + +typedef struct VkBindIndexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandNV; + +typedef struct VkBindVertexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandNV; + +typedef struct VkSetStateFlagsIndirectCommandNV { + uint32_t data; +} VkSetStateFlagsIndirectCommandNV; + +typedef struct VkIndirectCommandsStreamNV { + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsStreamNV; + +typedef struct VkIndirectCommandsLayoutTokenNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeNV tokenType; + uint32_t stream; + uint32_t offset; + uint32_t vertexBindingUnit; + VkBool32 vertexDynamicStride; + VkPipelineLayout pushconstantPipelineLayout; + VkShaderStageFlags pushconstantShaderStageFlags; + uint32_t pushconstantOffset; + uint32_t pushconstantSize; + VkIndirectStateFlagsNV indirectStateFlags; + uint32_t indexTypeCount; + const VkIndexType* pIndexTypes; + const uint32_t* pIndexTypeValues; +} VkIndirectCommandsLayoutTokenNV; + +typedef struct VkIndirectCommandsLayoutCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsNV flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNV* pTokens; + uint32_t streamCount; + const uint32_t* pStreamStrides; +} VkIndirectCommandsLayoutCreateInfoNV; + +typedef struct VkGeneratedCommandsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t streamCount; + const VkIndirectCommandsStreamNV* pStreams; + uint32_t sequencesCount; + VkBuffer preprocessBuffer; + VkDeviceSize preprocessOffset; + VkDeviceSize preprocessSize; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkGeneratedCommandsInfoNV; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkGeneratedCommandsMemoryRequirementsInfoNV; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( + VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); +#endif + + +// VK_NV_inherited_viewport_scissor is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_inherited_viewport_scissor 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor" +typedef struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 inheritedViewportScissor2D; +} VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + +typedef struct VkCommandBufferInheritanceViewportScissorInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportScissor2D; + uint32_t viewportDepthCount; + const VkViewport* pViewportDepths; +} VkCommandBufferInheritanceViewportScissorInfoNV; + + + +// VK_EXT_texel_buffer_alignment is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_texel_buffer_alignment 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" +typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 texelBufferAlignment; +} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; + +typedef VkPhysicalDeviceTexelBufferAlignmentProperties VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT; + + + +// VK_QCOM_render_pass_transform is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_transform 1 +#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 4 +#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform" +typedef struct VkRenderPassTransformBeginInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkRenderPassTransformBeginInfoQCOM; + +typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; + VkRect2D renderArea; +} VkCommandBufferInheritanceRenderPassTransformInfoQCOM; + + + +// VK_EXT_depth_bias_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_bias_control 1 +#define VK_EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_BIAS_CONTROL_EXTENSION_NAME "VK_EXT_depth_bias_control" + +typedef enum VkDepthBiasRepresentationEXT { + VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT = 0, + VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT = 1, + VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT = 2, + VK_DEPTH_BIAS_REPRESENTATION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDepthBiasRepresentationEXT; +typedef struct VkPhysicalDeviceDepthBiasControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthBiasControl; + VkBool32 leastRepresentableValueForceUnormRepresentation; + VkBool32 floatRepresentation; + VkBool32 depthBiasExact; +} VkPhysicalDeviceDepthBiasControlFeaturesEXT; + +typedef struct VkDepthBiasInfoEXT { + VkStructureType sType; + const void* pNext; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; +} VkDepthBiasInfoEXT; + +typedef struct VkDepthBiasRepresentationInfoEXT { + VkStructureType sType; + const void* pNext; + VkDepthBiasRepresentationEXT depthBiasRepresentation; + VkBool32 depthBiasExact; +} VkDepthBiasRepresentationInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias2EXT)(VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT* pDepthBiasInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias2EXT( + VkCommandBuffer commandBuffer, + const VkDepthBiasInfoEXT* pDepthBiasInfo); +#endif + + +// VK_EXT_device_memory_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_memory_report 1 +#define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report" + +typedef enum VkDeviceMemoryReportEventTypeEXT { + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT = 0, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT = 1, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT = 2, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT = 3, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT = 4, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceMemoryReportEventTypeEXT; +typedef VkFlags VkDeviceMemoryReportFlagsEXT; +typedef struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceMemoryReport; +} VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + +typedef struct VkDeviceMemoryReportCallbackDataEXT { + VkStructureType sType; + void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + VkDeviceMemoryReportEventTypeEXT type; + uint64_t memoryObjectId; + VkDeviceSize size; + VkObjectType objectType; + uint64_t objectHandle; + uint32_t heapIndex; +} VkDeviceMemoryReportCallbackDataEXT; + +typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)( + const VkDeviceMemoryReportCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback; + void* pUserData; +} VkDeviceDeviceMemoryReportCreateInfoEXT; + + + +// VK_EXT_acquire_drm_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_acquire_drm_display 1 +#define VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_drm_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireDrmDisplayEXT( + VkPhysicalDevice physicalDevice, + int32_t drmFd, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDrmDisplayEXT( + VkPhysicalDevice physicalDevice, + int32_t drmFd, + uint32_t connectorId, + VkDisplayKHR* display); +#endif + + +// VK_EXT_robustness2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_robustness2 1 +#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1 +#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2" +typedef struct VkPhysicalDeviceRobustness2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 robustBufferAccess2; + VkBool32 robustImageAccess2; + VkBool32 nullDescriptor; +} VkPhysicalDeviceRobustness2FeaturesEXT; + +typedef struct VkPhysicalDeviceRobustness2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize robustStorageBufferAccessSizeAlignment; + VkDeviceSize robustUniformBufferAccessSizeAlignment; +} VkPhysicalDeviceRobustness2PropertiesEXT; + + + +// VK_EXT_custom_border_color is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_custom_border_color 1 +#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12 +#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color" +typedef struct VkSamplerCustomBorderColorCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkClearColorValue customBorderColor; + VkFormat format; +} VkSamplerCustomBorderColorCreateInfoEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxCustomBorderColorSamplers; +} VkPhysicalDeviceCustomBorderColorPropertiesEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 customBorderColors; + VkBool32 customBorderColorWithoutFormat; +} VkPhysicalDeviceCustomBorderColorFeaturesEXT; + + + +// VK_GOOGLE_user_type is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_user_type 1 +#define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1 +#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type" + + +// VK_NV_present_barrier is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_present_barrier 1 +#define VK_NV_PRESENT_BARRIER_SPEC_VERSION 1 +#define VK_NV_PRESENT_BARRIER_EXTENSION_NAME "VK_NV_present_barrier" +typedef struct VkPhysicalDevicePresentBarrierFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 presentBarrier; +} VkPhysicalDevicePresentBarrierFeaturesNV; + +typedef struct VkSurfaceCapabilitiesPresentBarrierNV { + VkStructureType sType; + void* pNext; + VkBool32 presentBarrierSupported; +} VkSurfaceCapabilitiesPresentBarrierNV; + +typedef struct VkSwapchainPresentBarrierCreateInfoNV { + VkStructureType sType; + void* pNext; + VkBool32 presentBarrierEnable; +} VkSwapchainPresentBarrierCreateInfoNV; + + + +// VK_EXT_private_data is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_private_data 1 +typedef VkPrivateDataSlot VkPrivateDataSlotEXT; + +#define VK_EXT_PRIVATE_DATA_SPEC_VERSION 1 +#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data" +typedef VkPrivateDataSlotCreateFlags VkPrivateDataSlotCreateFlagsEXT; + +typedef VkPhysicalDevicePrivateDataFeatures VkPhysicalDevicePrivateDataFeaturesEXT; + +typedef VkDevicePrivateDataCreateInfo VkDevicePrivateDataCreateInfoEXT; + +typedef VkPrivateDataSlotCreateInfo VkPrivateDataSlotCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT( + VkDevice device, + const VkPrivateDataSlotCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlot* pPrivateDataSlot); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT( + VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t* pData); +#endif + + +// VK_EXT_pipeline_creation_cache_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_creation_cache_control 1 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control" +typedef VkPhysicalDevicePipelineCreationCacheControlFeatures VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT; + + + +// VK_NV_device_diagnostics_config is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_diagnostics_config 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config" + +typedef enum VkDeviceDiagnosticsConfigFlagBitsNV { + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV = 0x00000008, + VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkDeviceDiagnosticsConfigFlagBitsNV; +typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; +typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 diagnosticsConfig; +} VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + +typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceDiagnosticsConfigFlagsNV flags; +} VkDeviceDiagnosticsConfigCreateInfoNV; + + + +// VK_QCOM_render_pass_store_ops is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_store_ops 1 +#define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2 +#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" + + +// VK_NV_cuda_kernel_launch is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cuda_kernel_launch 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaModuleNV) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaFunctionNV) +#define VK_NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION 2 +#define VK_NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME "VK_NV_cuda_kernel_launch" +typedef struct VkCudaModuleCreateInfoNV { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCudaModuleCreateInfoNV; + +typedef struct VkCudaFunctionCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCudaModuleNV module; + const char* pName; +} VkCudaFunctionCreateInfoNV; + +typedef struct VkCudaLaunchInfoNV { + VkStructureType sType; + const void* pNext; + VkCudaFunctionNV function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCudaLaunchInfoNV; + +typedef struct VkPhysicalDeviceCudaKernelLaunchFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cudaKernelLaunchFeatures; +} VkPhysicalDeviceCudaKernelLaunchFeaturesNV; + +typedef struct VkPhysicalDeviceCudaKernelLaunchPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t computeCapabilityMinor; + uint32_t computeCapabilityMajor; +} VkPhysicalDeviceCudaKernelLaunchPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaModuleNV)(VkDevice device, const VkCudaModuleCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaModuleNV* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkGetCudaModuleCacheNV)(VkDevice device, VkCudaModuleNV module, size_t* pCacheSize, void* pCacheData); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaFunctionNV)(VkDevice device, const VkCudaFunctionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaFunctionNV* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCudaModuleNV)(VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCudaFunctionNV)(VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCudaLaunchKernelNV)(VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV* pLaunchInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaModuleNV( + VkDevice device, + const VkCudaModuleCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCudaModuleNV* pModule); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCudaModuleCacheNV( + VkDevice device, + VkCudaModuleNV module, + size_t* pCacheSize, + void* pCacheData); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaFunctionNV( + VkDevice device, + const VkCudaFunctionCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCudaFunctionNV* pFunction); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCudaModuleNV( + VkDevice device, + VkCudaModuleNV module, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCudaFunctionNV( + VkDevice device, + VkCudaFunctionNV function, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdCudaLaunchKernelNV( + VkCommandBuffer commandBuffer, + const VkCudaLaunchInfoNV* pLaunchInfo); +#endif + + +// VK_QCOM_tile_shading is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_tile_shading 1 +#define VK_QCOM_TILE_SHADING_SPEC_VERSION 1 +#define VK_QCOM_TILE_SHADING_EXTENSION_NAME "VK_QCOM_tile_shading" + +typedef enum VkTileShadingRenderPassFlagBitsQCOM { + VK_TILE_SHADING_RENDER_PASS_ENABLE_BIT_QCOM = 0x00000001, + VK_TILE_SHADING_RENDER_PASS_PER_TILE_EXECUTION_BIT_QCOM = 0x00000002, + VK_TILE_SHADING_RENDER_PASS_FLAG_BITS_MAX_ENUM_QCOM = 0x7FFFFFFF +} VkTileShadingRenderPassFlagBitsQCOM; +typedef VkFlags VkTileShadingRenderPassFlagsQCOM; +typedef struct VkPhysicalDeviceTileShadingFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 tileShading; + VkBool32 tileShadingFragmentStage; + VkBool32 tileShadingColorAttachments; + VkBool32 tileShadingDepthAttachments; + VkBool32 tileShadingStencilAttachments; + VkBool32 tileShadingInputAttachments; + VkBool32 tileShadingSampledAttachments; + VkBool32 tileShadingPerTileDraw; + VkBool32 tileShadingPerTileDispatch; + VkBool32 tileShadingDispatchTile; + VkBool32 tileShadingApron; + VkBool32 tileShadingAnisotropicApron; + VkBool32 tileShadingAtomicOps; + VkBool32 tileShadingImageProcessing; +} VkPhysicalDeviceTileShadingFeaturesQCOM; + +typedef struct VkPhysicalDeviceTileShadingPropertiesQCOM { + VkStructureType sType; + void* pNext; + uint32_t maxApronSize; + VkBool32 preferNonCoherent; + VkExtent2D tileGranularity; + VkExtent2D maxTileShadingRate; +} VkPhysicalDeviceTileShadingPropertiesQCOM; + +typedef struct VkRenderPassTileShadingCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkTileShadingRenderPassFlagsQCOM flags; + VkExtent2D tileApronSize; +} VkRenderPassTileShadingCreateInfoQCOM; + +typedef struct VkPerTileBeginInfoQCOM { + VkStructureType sType; + const void* pNext; +} VkPerTileBeginInfoQCOM; + +typedef struct VkPerTileEndInfoQCOM { + VkStructureType sType; + const void* pNext; +} VkPerTileEndInfoQCOM; + +typedef struct VkDispatchTileInfoQCOM { + VkStructureType sType; + const void* pNext; +} VkDispatchTileInfoQCOM; + +typedef void (VKAPI_PTR *PFN_vkCmdDispatchTileQCOM)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdBeginPerTileExecutionQCOM)(VkCommandBuffer commandBuffer, const VkPerTileBeginInfoQCOM* pPerTileBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndPerTileExecutionQCOM)(VkCommandBuffer commandBuffer, const VkPerTileEndInfoQCOM* pPerTileEndInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchTileQCOM( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginPerTileExecutionQCOM( + VkCommandBuffer commandBuffer, + const VkPerTileBeginInfoQCOM* pPerTileBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndPerTileExecutionQCOM( + VkCommandBuffer commandBuffer, + const VkPerTileEndInfoQCOM* pPerTileEndInfo); +#endif + + +// VK_NV_low_latency is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_low_latency 1 +#define VK_NV_LOW_LATENCY_SPEC_VERSION 1 +#define VK_NV_LOW_LATENCY_EXTENSION_NAME "VK_NV_low_latency" +typedef struct VkQueryLowLatencySupportNV { + VkStructureType sType; + const void* pNext; + void* pQueriedLowLatencyData; +} VkQueryLowLatencySupportNV; + + + +// VK_EXT_descriptor_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_descriptor_buffer 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) +#define VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION 1 +#define VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME "VK_EXT_descriptor_buffer" +typedef struct VkPhysicalDeviceDescriptorBufferPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 combinedImageSamplerDescriptorSingleArray; + VkBool32 bufferlessPushDescriptors; + VkBool32 allowSamplerImageViewPostSubmitCreation; + VkDeviceSize descriptorBufferOffsetAlignment; + uint32_t maxDescriptorBufferBindings; + uint32_t maxResourceDescriptorBufferBindings; + uint32_t maxSamplerDescriptorBufferBindings; + uint32_t maxEmbeddedImmutableSamplerBindings; + uint32_t maxEmbeddedImmutableSamplers; + size_t bufferCaptureReplayDescriptorDataSize; + size_t imageCaptureReplayDescriptorDataSize; + size_t imageViewCaptureReplayDescriptorDataSize; + size_t samplerCaptureReplayDescriptorDataSize; + size_t accelerationStructureCaptureReplayDescriptorDataSize; + size_t samplerDescriptorSize; + size_t combinedImageSamplerDescriptorSize; + size_t sampledImageDescriptorSize; + size_t storageImageDescriptorSize; + size_t uniformTexelBufferDescriptorSize; + size_t robustUniformTexelBufferDescriptorSize; + size_t storageTexelBufferDescriptorSize; + size_t robustStorageTexelBufferDescriptorSize; + size_t uniformBufferDescriptorSize; + size_t robustUniformBufferDescriptorSize; + size_t storageBufferDescriptorSize; + size_t robustStorageBufferDescriptorSize; + size_t inputAttachmentDescriptorSize; + size_t accelerationStructureDescriptorSize; + VkDeviceSize maxSamplerDescriptorBufferRange; + VkDeviceSize maxResourceDescriptorBufferRange; + VkDeviceSize samplerDescriptorBufferAddressSpaceSize; + VkDeviceSize resourceDescriptorBufferAddressSpaceSize; + VkDeviceSize descriptorBufferAddressSpaceSize; +} VkPhysicalDeviceDescriptorBufferPropertiesEXT; + +typedef struct VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT { + VkStructureType sType; + void* pNext; + size_t combinedImageSamplerDensityMapDescriptorSize; +} VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + +typedef struct VkPhysicalDeviceDescriptorBufferFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 descriptorBuffer; + VkBool32 descriptorBufferCaptureReplay; + VkBool32 descriptorBufferImageLayoutIgnored; + VkBool32 descriptorBufferPushDescriptors; +} VkPhysicalDeviceDescriptorBufferFeaturesEXT; + +typedef struct VkDescriptorAddressInfoEXT { + VkStructureType sType; + void* pNext; + VkDeviceAddress address; + VkDeviceSize range; + VkFormat format; +} VkDescriptorAddressInfoEXT; + +typedef struct VkDescriptorBufferBindingInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceAddress address; + VkBufferUsageFlags usage; +} VkDescriptorBufferBindingInfoEXT; + +typedef struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkDescriptorBufferBindingPushDescriptorBufferHandleEXT; + +typedef union VkDescriptorDataEXT { + const VkSampler* pSampler; + const VkDescriptorImageInfo* pCombinedImageSampler; + const VkDescriptorImageInfo* pInputAttachmentImage; + const VkDescriptorImageInfo* pSampledImage; + const VkDescriptorImageInfo* pStorageImage; + const VkDescriptorAddressInfoEXT* pUniformTexelBuffer; + const VkDescriptorAddressInfoEXT* pStorageTexelBuffer; + const VkDescriptorAddressInfoEXT* pUniformBuffer; + const VkDescriptorAddressInfoEXT* pStorageBuffer; + VkDeviceAddress accelerationStructure; +} VkDescriptorDataEXT; + +typedef struct VkDescriptorGetInfoEXT { + VkStructureType sType; + const void* pNext; + VkDescriptorType type; + VkDescriptorDataEXT data; +} VkDescriptorGetInfoEXT; + +typedef struct VkBufferCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferCaptureDescriptorDataInfoEXT; + +typedef struct VkImageCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageCaptureDescriptorDataInfoEXT; + +typedef struct VkImageViewCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; +} VkImageViewCaptureDescriptorDataInfoEXT; + +typedef struct VkSamplerCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampler sampler; +} VkSamplerCaptureDescriptorDataInfoEXT; + +typedef struct VkOpaqueCaptureDescriptorDataCreateInfoEXT { + VkStructureType sType; + const void* pNext; + const void* opaqueCaptureDescriptorData; +} VkOpaqueCaptureDescriptorDataCreateInfoEXT; + +typedef struct VkAccelerationStructureCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; + VkAccelerationStructureNV accelerationStructureNV; +} VkAccelerationStructureCaptureDescriptorDataInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSizeEXT)(VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)(VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize* pOffset); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorEXT)(VkDevice device, const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, void* pDescriptor); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT* pBindingInfos); +typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsetsEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set); +typedef VkResult (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSizeEXT( + VkDevice device, + VkDescriptorSetLayout layout, + VkDeviceSize* pLayoutSizeInBytes); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutBindingOffsetEXT( + VkDevice device, + VkDescriptorSetLayout layout, + uint32_t binding, + VkDeviceSize* pOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorEXT( + VkDevice device, + const VkDescriptorGetInfoEXT* pDescriptorInfo, + size_t dataSize, + void* pDescriptor); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t bufferCount, + const VkDescriptorBufferBindingInfoEXT* pBindingInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsetsEXT( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t* pBufferIndices, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkBufferCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkImageCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSamplerOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, + void* pData); +#endif + + +// VK_EXT_graphics_pipeline_library is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_graphics_pipeline_library 1 +#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME "VK_EXT_graphics_pipeline_library" + +typedef enum VkGraphicsPipelineLibraryFlagBitsEXT { + VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT = 0x00000001, + VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT = 0x00000002, + VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT = 0x00000004, + VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT = 0x00000008, + VK_GRAPHICS_PIPELINE_LIBRARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkGraphicsPipelineLibraryFlagBitsEXT; +typedef VkFlags VkGraphicsPipelineLibraryFlagsEXT; +typedef struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 graphicsPipelineLibrary; +} VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + +typedef struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 graphicsPipelineLibraryFastLinking; + VkBool32 graphicsPipelineLibraryIndependentInterpolationDecoration; +} VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + +typedef struct VkGraphicsPipelineLibraryCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkGraphicsPipelineLibraryFlagsEXT flags; +} VkGraphicsPipelineLibraryCreateInfoEXT; + + + +// VK_AMD_shader_early_and_late_fragment_tests is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_early_and_late_fragment_tests 1 +#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION 1 +#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME "VK_AMD_shader_early_and_late_fragment_tests" +typedef struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 shaderEarlyAndLateFragmentTests; +} VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + + +// VK_NV_fragment_shading_rate_enums is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_shading_rate_enums 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums" + +typedef enum VkFragmentShadingRateTypeNV { + VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV = 0, + VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV = 1, + VK_FRAGMENT_SHADING_RATE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateTypeNV; + +typedef enum VkFragmentShadingRateNV { + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV = 0, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV = 1, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV = 4, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV = 5, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV = 6, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV = 10, + VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV = 11, + VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV = 12, + VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV = 13, + VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV = 14, + VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV = 15, + VK_FRAGMENT_SHADING_RATE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateNV; +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShadingRateEnums; + VkBool32 supersampleFragmentShadingRates; + VkBool32 noInvocationFragmentShadingRates; +} VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV { + VkStructureType sType; + void* pNext; + VkSampleCountFlagBits maxFragmentShadingRateInvocationCount; +} VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +typedef struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkFragmentShadingRateTypeNV shadingRateType; + VkFragmentShadingRateNV shadingRate; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateEnumStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateEnumNV)(VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV( + VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif + + +// VK_NV_ray_tracing_motion_blur is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_motion_blur 1 +#define VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME "VK_NV_ray_tracing_motion_blur" + +typedef enum VkAccelerationStructureMotionInstanceTypeNV { + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV = 0, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV = 1, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV = 2, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMotionInstanceTypeNV; +typedef VkFlags VkAccelerationStructureMotionInfoFlagsNV; +typedef VkFlags VkAccelerationStructureMotionInstanceFlagsNV; +typedef union VkDeviceOrHostAddressConstKHR { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstKHR; + +typedef struct VkAccelerationStructureGeometryMotionTrianglesDataNV { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR vertexData; +} VkAccelerationStructureGeometryMotionTrianglesDataNV; + +typedef struct VkAccelerationStructureMotionInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t maxInstances; + VkAccelerationStructureMotionInfoFlagsNV flags; +} VkAccelerationStructureMotionInfoNV; + +typedef struct VkAccelerationStructureMatrixMotionInstanceNV { + VkTransformMatrixKHR transformT0; + VkTransformMatrixKHR transformT1; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureMatrixMotionInstanceNV; + +typedef struct VkSRTDataNV { + float sx; + float a; + float b; + float pvx; + float sy; + float c; + float pvy; + float sz; + float pvz; + float qx; + float qy; + float qz; + float qw; + float tx; + float ty; + float tz; +} VkSRTDataNV; + +typedef struct VkAccelerationStructureSRTMotionInstanceNV { + VkSRTDataNV transformT0; + VkSRTDataNV transformT1; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureSRTMotionInstanceNV; + +typedef union VkAccelerationStructureMotionInstanceDataNV { + VkAccelerationStructureInstanceKHR staticInstance; + VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; +} VkAccelerationStructureMotionInstanceDataNV; + +typedef struct VkAccelerationStructureMotionInstanceNV { + VkAccelerationStructureMotionInstanceTypeNV type; + VkAccelerationStructureMotionInstanceFlagsNV flags; + VkAccelerationStructureMotionInstanceDataNV data; +} VkAccelerationStructureMotionInstanceNV; + +typedef struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingMotionBlur; + VkBool32 rayTracingMotionBlurPipelineTraceRaysIndirect; +} VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; + + + +// VK_EXT_ycbcr_2plane_444_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_ycbcr_2plane_444_formats 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats" +typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 ycbcr2plane444Formats; +} VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + + +// VK_EXT_fragment_density_map2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_density_map2 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2" +typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapDeferred; +} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subsampledLoads; + VkBool32 subsampledCoarseReconstructionEarlyAccess; + uint32_t maxSubsampledArrayLayers; + uint32_t maxDescriptorSetSubsampledSamplers; +} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + + + +// VK_QCOM_rotated_copy_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_rotated_copy_commands 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 2 +#define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands" +typedef struct VkCopyCommandTransformInfoQCOM { + VkStructureType sType; + const void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkCopyCommandTransformInfoQCOM; + + + +// VK_EXT_image_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_robustness 1 +#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness" +typedef VkPhysicalDeviceImageRobustnessFeatures VkPhysicalDeviceImageRobustnessFeaturesEXT; + + + +// VK_EXT_image_compression_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_compression_control 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME "VK_EXT_image_compression_control" + +typedef enum VkImageCompressionFlagBitsEXT { + VK_IMAGE_COMPRESSION_DEFAULT_EXT = 0, + VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT = 0x00000001, + VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT = 0x00000002, + VK_IMAGE_COMPRESSION_DISABLED_EXT = 0x00000004, + VK_IMAGE_COMPRESSION_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkImageCompressionFlagBitsEXT; +typedef VkFlags VkImageCompressionFlagsEXT; + +typedef enum VkImageCompressionFixedRateFlagBitsEXT { + VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT = 0, + VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT = 0x00000001, + VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT = 0x00000002, + VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT = 0x00000004, + VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT = 0x00000008, + VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT = 0x00000010, + VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT = 0x00000020, + VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT = 0x00000040, + VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT = 0x00000080, + VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT = 0x00000100, + VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT = 0x00000200, + VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT = 0x00000400, + VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT = 0x00000800, + VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT = 0x00001000, + VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT = 0x00002000, + VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT = 0x00004000, + VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT = 0x00008000, + VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT = 0x00010000, + VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT = 0x00020000, + VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT = 0x00040000, + VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT = 0x00080000, + VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT = 0x00100000, + VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT = 0x00200000, + VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT = 0x00400000, + VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT = 0x00800000, + VK_IMAGE_COMPRESSION_FIXED_RATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkImageCompressionFixedRateFlagBitsEXT; +typedef VkFlags VkImageCompressionFixedRateFlagsEXT; +typedef struct VkPhysicalDeviceImageCompressionControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 imageCompressionControl; +} VkPhysicalDeviceImageCompressionControlFeaturesEXT; + +typedef struct VkImageCompressionControlEXT { + VkStructureType sType; + const void* pNext; + VkImageCompressionFlagsEXT flags; + uint32_t compressionControlPlaneCount; + VkImageCompressionFixedRateFlagsEXT* pFixedRateFlags; +} VkImageCompressionControlEXT; + +typedef struct VkImageCompressionPropertiesEXT { + VkStructureType sType; + void* pNext; + VkImageCompressionFlagsEXT imageCompressionFlags; + VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags; +} VkImageCompressionPropertiesEXT; + + + +// VK_EXT_attachment_feedback_loop_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_attachment_feedback_loop_layout 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION 2 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_layout" +typedef struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 attachmentFeedbackLoopLayout; +} VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + + +// VK_EXT_4444_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_4444_formats 1 +#define VK_EXT_4444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats" +typedef struct VkPhysicalDevice4444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 formatA4R4G4B4; + VkBool32 formatA4B4G4R4; +} VkPhysicalDevice4444FormatsFeaturesEXT; + + + +// VK_EXT_device_fault is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_fault 1 +#define VK_EXT_DEVICE_FAULT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_FAULT_EXTENSION_NAME "VK_EXT_device_fault" + +typedef enum VkDeviceFaultAddressTypeEXT { + VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT = 0, + VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT = 1, + VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT = 2, + VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT = 3, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT = 4, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT = 5, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT = 6, + VK_DEVICE_FAULT_ADDRESS_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceFaultAddressTypeEXT; + +typedef enum VkDeviceFaultVendorBinaryHeaderVersionEXT { + VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT = 1, + VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceFaultVendorBinaryHeaderVersionEXT; +typedef struct VkPhysicalDeviceFaultFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceFault; + VkBool32 deviceFaultVendorBinary; +} VkPhysicalDeviceFaultFeaturesEXT; + +typedef struct VkDeviceFaultCountsEXT { + VkStructureType sType; + void* pNext; + uint32_t addressInfoCount; + uint32_t vendorInfoCount; + VkDeviceSize vendorBinarySize; +} VkDeviceFaultCountsEXT; + +typedef struct VkDeviceFaultAddressInfoEXT { + VkDeviceFaultAddressTypeEXT addressType; + VkDeviceAddress reportedAddress; + VkDeviceSize addressPrecision; +} VkDeviceFaultAddressInfoEXT; + +typedef struct VkDeviceFaultVendorInfoEXT { + char description[VK_MAX_DESCRIPTION_SIZE]; + uint64_t vendorFaultCode; + uint64_t vendorFaultData; +} VkDeviceFaultVendorInfoEXT; + +typedef struct VkDeviceFaultInfoEXT { + VkStructureType sType; + void* pNext; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkDeviceFaultAddressInfoEXT* pAddressInfos; + VkDeviceFaultVendorInfoEXT* pVendorInfos; + void* pVendorBinaryData; +} VkDeviceFaultInfoEXT; + +typedef struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT { + uint32_t headerSize; + VkDeviceFaultVendorBinaryHeaderVersionEXT headerVersion; + uint32_t vendorID; + uint32_t deviceID; + uint32_t driverVersion; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + uint32_t applicationNameOffset; + uint32_t applicationVersion; + uint32_t engineNameOffset; + uint32_t engineVersion; + uint32_t apiVersion; +} VkDeviceFaultVendorBinaryHeaderVersionOneEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceFaultInfoEXT)(VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceFaultInfoEXT( + VkDevice device, + VkDeviceFaultCountsEXT* pFaultCounts, + VkDeviceFaultInfoEXT* pFaultInfo); +#endif + + +// VK_ARM_rasterization_order_attachment_access is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_rasterization_order_attachment_access 1 +#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 +#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access" +typedef struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 rasterizationOrderColorAttachmentAccess; + VkBool32 rasterizationOrderDepthAttachmentAccess; + VkBool32 rasterizationOrderStencilAttachmentAccess; +} VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + +typedef VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + + + +// VK_EXT_rgba10x6_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_rgba10x6_formats 1 +#define VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION 1 +#define VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME "VK_EXT_rgba10x6_formats" +typedef struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 formatRgba10x6WithoutYCbCrSampler; +} VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; + + + +// VK_VALVE_mutable_descriptor_type is a preprocessor guard. Do not pass it to API calls. +#define VK_VALVE_mutable_descriptor_type 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type" +typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 mutableDescriptorType; +} VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT; + +typedef VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + +typedef struct VkMutableDescriptorTypeListEXT { + uint32_t descriptorTypeCount; + const VkDescriptorType* pDescriptorTypes; +} VkMutableDescriptorTypeListEXT; + +typedef VkMutableDescriptorTypeListEXT VkMutableDescriptorTypeListVALVE; + +typedef struct VkMutableDescriptorTypeCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t mutableDescriptorTypeListCount; + const VkMutableDescriptorTypeListEXT* pMutableDescriptorTypeLists; +} VkMutableDescriptorTypeCreateInfoEXT; + +typedef VkMutableDescriptorTypeCreateInfoEXT VkMutableDescriptorTypeCreateInfoVALVE; + + + +// VK_EXT_vertex_input_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_input_dynamic_state 1 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state" +typedef struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexInputDynamicState; +} VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; + +typedef struct VkVertexInputBindingDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; + uint32_t divisor; +} VkVertexInputBindingDescription2EXT; + +typedef struct VkVertexInputAttributeDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription2EXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetVertexInputEXT)(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT( + VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); +#endif + + +// VK_EXT_physical_device_drm is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_physical_device_drm 1 +#define VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION 1 +#define VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME "VK_EXT_physical_device_drm" +typedef struct VkPhysicalDeviceDrmPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 hasPrimary; + VkBool32 hasRender; + int64_t primaryMajor; + int64_t primaryMinor; + int64_t renderMajor; + int64_t renderMinor; +} VkPhysicalDeviceDrmPropertiesEXT; + + + +// VK_EXT_device_address_binding_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_address_binding_report 1 +#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION 1 +#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME "VK_EXT_device_address_binding_report" + +typedef enum VkDeviceAddressBindingTypeEXT { + VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT = 0, + VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT = 1, + VK_DEVICE_ADDRESS_BINDING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceAddressBindingTypeEXT; + +typedef enum VkDeviceAddressBindingFlagBitsEXT { + VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT = 0x00000001, + VK_DEVICE_ADDRESS_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceAddressBindingFlagBitsEXT; +typedef VkFlags VkDeviceAddressBindingFlagsEXT; +typedef struct VkPhysicalDeviceAddressBindingReportFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 reportAddressBinding; +} VkPhysicalDeviceAddressBindingReportFeaturesEXT; + +typedef struct VkDeviceAddressBindingCallbackDataEXT { + VkStructureType sType; + void* pNext; + VkDeviceAddressBindingFlagsEXT flags; + VkDeviceAddress baseAddress; + VkDeviceSize size; + VkDeviceAddressBindingTypeEXT bindingType; +} VkDeviceAddressBindingCallbackDataEXT; + + + +// VK_EXT_depth_clip_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clip_control 1 +#define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control" +typedef struct VkPhysicalDeviceDepthClipControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipControl; +} VkPhysicalDeviceDepthClipControlFeaturesEXT; + +typedef struct VkPipelineViewportDepthClipControlCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 negativeOneToOne; +} VkPipelineViewportDepthClipControlCreateInfoEXT; + + + +// VK_EXT_primitive_topology_list_restart is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_primitive_topology_list_restart 1 +#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION 1 +#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME "VK_EXT_primitive_topology_list_restart" +typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 primitiveTopologyListRestart; + VkBool32 primitiveTopologyPatchListRestart; +} VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + + + +// VK_EXT_present_mode_fifo_latest_ready is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_present_mode_fifo_latest_ready 1 +#define VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION 1 +#define VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME "VK_EXT_present_mode_fifo_latest_ready" +typedef struct VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 presentModeFifoLatestReady; +} VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + + + +// VK_HUAWEI_subpass_shading is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_subpass_shading 1 +#define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 3 +#define VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME "VK_HUAWEI_subpass_shading" +typedef struct VkSubpassShadingPipelineCreateInfoHUAWEI { + VkStructureType sType; + void* pNext; + VkRenderPass renderPass; + uint32_t subpass; +} VkSubpassShadingPipelineCreateInfoHUAWEI; + +typedef struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 subpassShading; +} VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; + +typedef struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI { + VkStructureType sType; + void* pNext; + uint32_t maxSubpassShadingWorkgroupSizeAspectRatio; +} VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; + +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)(VkDevice device, VkRenderPass renderpass, VkExtent2D* pMaxWorkgroupSize); +typedef void (VKAPI_PTR *PFN_vkCmdSubpassShadingHUAWEI)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + VkDevice device, + VkRenderPass renderpass, + VkExtent2D* pMaxWorkgroupSize); + +VKAPI_ATTR void VKAPI_CALL vkCmdSubpassShadingHUAWEI( + VkCommandBuffer commandBuffer); +#endif + + +// VK_HUAWEI_invocation_mask is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_invocation_mask 1 +#define VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION 1 +#define VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME "VK_HUAWEI_invocation_mask" +typedef struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 invocationMask; +} VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; + +typedef void (VKAPI_PTR *PFN_vkCmdBindInvocationMaskHUAWEI)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindInvocationMaskHUAWEI( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); +#endif + + +// VK_NV_external_memory_rdma is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_rdma 1 +typedef void* VkRemoteAddressNV; +#define VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME "VK_NV_external_memory_rdma" +typedef struct VkMemoryGetRemoteAddressInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetRemoteAddressInfoNV; + +typedef struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 externalMemoryRDMA; +} VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryRemoteAddressNV)(VkDevice device, const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, VkRemoteAddressNV* pAddress); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryRemoteAddressNV( + VkDevice device, + const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV* pAddress); +#endif + + +// VK_EXT_pipeline_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_properties 1 +#define VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME "VK_EXT_pipeline_properties" +typedef VkPipelineInfoKHR VkPipelineInfoEXT; + +typedef struct VkPipelinePropertiesIdentifierEXT { + VkStructureType sType; + void* pNext; + uint8_t pipelineIdentifier[VK_UUID_SIZE]; +} VkPipelinePropertiesIdentifierEXT; + +typedef struct VkPhysicalDevicePipelinePropertiesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelinePropertiesIdentifier; +} VkPhysicalDevicePipelinePropertiesFeaturesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelinePropertiesEXT)(VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelinePropertiesEXT( + VkDevice device, + const VkPipelineInfoEXT* pPipelineInfo, + VkBaseOutStructure* pPipelineProperties); +#endif + + +// VK_EXT_frame_boundary is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_frame_boundary 1 +#define VK_EXT_FRAME_BOUNDARY_SPEC_VERSION 1 +#define VK_EXT_FRAME_BOUNDARY_EXTENSION_NAME "VK_EXT_frame_boundary" + +typedef enum VkFrameBoundaryFlagBitsEXT { + VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT = 0x00000001, + VK_FRAME_BOUNDARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkFrameBoundaryFlagBitsEXT; +typedef VkFlags VkFrameBoundaryFlagsEXT; +typedef struct VkPhysicalDeviceFrameBoundaryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 frameBoundary; +} VkPhysicalDeviceFrameBoundaryFeaturesEXT; + +typedef struct VkFrameBoundaryEXT { + VkStructureType sType; + const void* pNext; + VkFrameBoundaryFlagsEXT flags; + uint64_t frameID; + uint32_t imageCount; + const VkImage* pImages; + uint32_t bufferCount; + const VkBuffer* pBuffers; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkFrameBoundaryEXT; + + + +// VK_EXT_multisampled_render_to_single_sampled is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_multisampled_render_to_single_sampled 1 +#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION 1 +#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME "VK_EXT_multisampled_render_to_single_sampled" +typedef struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 multisampledRenderToSingleSampled; +} VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + +typedef struct VkSubpassResolvePerformanceQueryEXT { + VkStructureType sType; + void* pNext; + VkBool32 optimal; +} VkSubpassResolvePerformanceQueryEXT; + +typedef struct VkMultisampledRenderToSingleSampledInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 multisampledRenderToSingleSampledEnable; + VkSampleCountFlagBits rasterizationSamples; +} VkMultisampledRenderToSingleSampledInfoEXT; + + + +// VK_EXT_extended_dynamic_state2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state2 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2" +typedef struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState2; + VkBool32 extendedDynamicState2LogicOp; + VkBool32 extendedDynamicState2PatchControlPoints; +} VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetPatchControlPointsEXT)(VkCommandBuffer commandBuffer, uint32_t patchControlPoints); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEXT)(VkCommandBuffer commandBuffer, VkLogicOp logicOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetPatchControlPointsEXT( + VkCommandBuffer commandBuffer, + uint32_t patchControlPoints); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEXT( + VkCommandBuffer commandBuffer, + VkLogicOp logicOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); +#endif + + +// VK_EXT_color_write_enable is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_color_write_enable 1 +#define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1 +#define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable" +typedef struct VkPhysicalDeviceColorWriteEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 colorWriteEnable; +} VkPhysicalDeviceColorWriteEnableFeaturesEXT; + +typedef struct VkPipelineColorWriteCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentCount; + const VkBool32* pColorWriteEnables; +} VkPipelineColorWriteCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteEnableEXT)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32* pColorWriteEnables); +#endif + + +// VK_EXT_primitives_generated_query is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_primitives_generated_query 1 +#define VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION 1 +#define VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME "VK_EXT_primitives_generated_query" +typedef struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 primitivesGeneratedQuery; + VkBool32 primitivesGeneratedQueryWithRasterizerDiscard; + VkBool32 primitivesGeneratedQueryWithNonZeroStreams; +} VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + + +// VK_EXT_global_priority_query is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_global_priority_query 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query" +#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT VK_MAX_GLOBAL_PRIORITY_SIZE +typedef VkPhysicalDeviceGlobalPriorityQueryFeatures VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT; + +typedef VkQueueFamilyGlobalPriorityProperties VkQueueFamilyGlobalPriorityPropertiesEXT; + + + +// VK_EXT_image_view_min_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_view_min_lod 1 +#define VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION 1 +#define VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME "VK_EXT_image_view_min_lod" +typedef struct VkPhysicalDeviceImageViewMinLodFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 minLod; +} VkPhysicalDeviceImageViewMinLodFeaturesEXT; + +typedef struct VkImageViewMinLodCreateInfoEXT { + VkStructureType sType; + const void* pNext; + float minLod; +} VkImageViewMinLodCreateInfoEXT; + + + +// VK_EXT_multi_draw is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_multi_draw 1 +#define VK_EXT_MULTI_DRAW_SPEC_VERSION 1 +#define VK_EXT_MULTI_DRAW_EXTENSION_NAME "VK_EXT_multi_draw" +typedef struct VkPhysicalDeviceMultiDrawFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 multiDraw; +} VkPhysicalDeviceMultiDrawFeaturesEXT; + +typedef struct VkPhysicalDeviceMultiDrawPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxMultiDrawCount; +} VkPhysicalDeviceMultiDrawPropertiesEXT; + +typedef struct VkMultiDrawInfoEXT { + uint32_t firstVertex; + uint32_t vertexCount; +} VkMultiDrawInfoEXT; + +typedef struct VkMultiDrawIndexedInfoEXT { + uint32_t firstIndex; + uint32_t indexCount; + int32_t vertexOffset; +} VkMultiDrawIndexedInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiIndexedEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiEXT( + VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT* pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiIndexedEXT( + VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT* pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t* pVertexOffset); +#endif + + +// VK_EXT_image_2d_view_of_3d is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_2d_view_of_3d 1 +#define VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION 1 +#define VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_2d_view_of_3d" +typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 image2DViewOf3D; + VkBool32 sampler2DViewOf3D; +} VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; + + + +// VK_EXT_shader_tile_image is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_tile_image 1 +#define VK_EXT_SHADER_TILE_IMAGE_SPEC_VERSION 1 +#define VK_EXT_SHADER_TILE_IMAGE_EXTENSION_NAME "VK_EXT_shader_tile_image" +typedef struct VkPhysicalDeviceShaderTileImageFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderTileImageColorReadAccess; + VkBool32 shaderTileImageDepthReadAccess; + VkBool32 shaderTileImageStencilReadAccess; +} VkPhysicalDeviceShaderTileImageFeaturesEXT; + +typedef struct VkPhysicalDeviceShaderTileImagePropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderTileImageCoherentReadAccelerated; + VkBool32 shaderTileImageReadSampleFromPixelRateInvocation; + VkBool32 shaderTileImageReadFromHelperInvocation; +} VkPhysicalDeviceShaderTileImagePropertiesEXT; + + + +// VK_EXT_opacity_micromap is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_opacity_micromap 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkMicromapEXT) +#define VK_EXT_OPACITY_MICROMAP_SPEC_VERSION 2 +#define VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME "VK_EXT_opacity_micromap" + +typedef enum VkMicromapTypeEXT { + VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT = 0, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV = 1000397000, +#endif + VK_MICROMAP_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkMicromapTypeEXT; + +typedef enum VkBuildMicromapModeEXT { + VK_BUILD_MICROMAP_MODE_BUILD_EXT = 0, + VK_BUILD_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBuildMicromapModeEXT; + +typedef enum VkCopyMicromapModeEXT { + VK_COPY_MICROMAP_MODE_CLONE_EXT = 0, + VK_COPY_MICROMAP_MODE_SERIALIZE_EXT = 1, + VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT = 2, + VK_COPY_MICROMAP_MODE_COMPACT_EXT = 3, + VK_COPY_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkCopyMicromapModeEXT; + +typedef enum VkOpacityMicromapFormatEXT { + VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT = 1, + VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT = 2, + VK_OPACITY_MICROMAP_FORMAT_MAX_ENUM_EXT = 0x7FFFFFFF +} VkOpacityMicromapFormatEXT; + +typedef enum VkOpacityMicromapSpecialIndexEXT { + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT = -1, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT = -2, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT = -3, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT = -4, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_CLUSTER_GEOMETRY_DISABLE_OPACITY_MICROMAP_NV = -5, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_MAX_ENUM_EXT = 0x7FFFFFFF +} VkOpacityMicromapSpecialIndexEXT; + +typedef enum VkAccelerationStructureCompatibilityKHR { + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCompatibilityKHR; + +typedef enum VkAccelerationStructureBuildTypeKHR { + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureBuildTypeKHR; + +typedef enum VkBuildMicromapFlagBitsEXT { + VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT = 0x00000001, + VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT = 0x00000002, + VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT = 0x00000004, + VK_BUILD_MICROMAP_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBuildMicromapFlagBitsEXT; +typedef VkFlags VkBuildMicromapFlagsEXT; + +typedef enum VkMicromapCreateFlagBitsEXT { + VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = 0x00000001, + VK_MICROMAP_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkMicromapCreateFlagBitsEXT; +typedef VkFlags VkMicromapCreateFlagsEXT; +typedef struct VkMicromapUsageEXT { + uint32_t count; + uint32_t subdivisionLevel; + uint32_t format; +} VkMicromapUsageEXT; + +typedef union VkDeviceOrHostAddressKHR { + VkDeviceAddress deviceAddress; + void* hostAddress; +} VkDeviceOrHostAddressKHR; + +typedef struct VkMicromapBuildInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapTypeEXT type; + VkBuildMicromapFlagsEXT flags; + VkBuildMicromapModeEXT mode; + VkMicromapEXT dstMicromap; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkDeviceOrHostAddressConstKHR data; + VkDeviceOrHostAddressKHR scratchData; + VkDeviceOrHostAddressConstKHR triangleArray; + VkDeviceSize triangleArrayStride; +} VkMicromapBuildInfoEXT; + +typedef struct VkMicromapCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapCreateFlagsEXT createFlags; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; + VkMicromapTypeEXT type; + VkDeviceAddress deviceAddress; +} VkMicromapCreateInfoEXT; + +typedef struct VkPhysicalDeviceOpacityMicromapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 micromap; + VkBool32 micromapCaptureReplay; + VkBool32 micromapHostCommands; +} VkPhysicalDeviceOpacityMicromapFeaturesEXT; + +typedef struct VkPhysicalDeviceOpacityMicromapPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxOpacity2StateSubdivisionLevel; + uint32_t maxOpacity4StateSubdivisionLevel; +} VkPhysicalDeviceOpacityMicromapPropertiesEXT; + +typedef struct VkMicromapVersionInfoEXT { + VkStructureType sType; + const void* pNext; + const uint8_t* pVersionData; +} VkMicromapVersionInfoEXT; + +typedef struct VkCopyMicromapToMemoryInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapEXT src; + VkDeviceOrHostAddressKHR dst; + VkCopyMicromapModeEXT mode; +} VkCopyMicromapToMemoryInfoEXT; + +typedef struct VkCopyMemoryToMicromapInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkMicromapEXT dst; + VkCopyMicromapModeEXT mode; +} VkCopyMemoryToMicromapInfoEXT; + +typedef struct VkCopyMicromapInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapEXT src; + VkMicromapEXT dst; + VkCopyMicromapModeEXT mode; +} VkCopyMicromapInfoEXT; + +typedef struct VkMicromapBuildSizesInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceSize micromapSize; + VkDeviceSize buildScratchSize; + VkBool32 discardable; +} VkMicromapBuildSizesInfoEXT; + +typedef struct VkAccelerationStructureTrianglesOpacityMicromapEXT { + VkStructureType sType; + void* pNext; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexBuffer; + VkDeviceSize indexStride; + uint32_t baseTriangle; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkMicromapEXT micromap; +} VkAccelerationStructureTrianglesOpacityMicromapEXT; + +typedef struct VkMicromapTriangleEXT { + uint32_t dataOffset; + uint16_t subdivisionLevel; + uint16_t format; +} VkMicromapTriangleEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMicromapEXT)(VkDevice device, const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkMicromapEXT* pMicromap); +typedef void (VKAPI_PTR *PFN_vkDestroyMicromapEXT)(VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBuildMicromapsEXT)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBuildMicromapsEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapToMemoryEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteMicromapsPropertiesEXT)(VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapToMemoryEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteMicromapsPropertiesEXT)(VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMicromapCompatibilityEXT)(VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility); +typedef void (VKAPI_PTR *PFN_vkGetMicromapBuildSizesEXT)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, VkMicromapBuildSizesInfoEXT* pSizeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMicromapEXT( + VkDevice device, + const VkMicromapCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkMicromapEXT* pMicromap); + +VKAPI_ATTR void VKAPI_CALL vkDestroyMicromapEXT( + VkDevice device, + VkMicromapEXT micromap, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildMicromapsEXT( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkMicromapBuildInfoEXT* pInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBuildMicromapsEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkMicromapBuildInfoEXT* pInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapInfoEXT* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapToMemoryEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToMicromapEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToMicromapInfoEXT* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkWriteMicromapsPropertiesEXT( + VkDevice device, + uint32_t micromapCount, + const VkMicromapEXT* pMicromaps, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapEXT( + VkCommandBuffer commandBuffer, + const VkCopyMicromapInfoEXT* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapToMemoryEXT( + VkCommandBuffer commandBuffer, + const VkCopyMicromapToMemoryInfoEXT* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToMicromapEXT( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToMicromapInfoEXT* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteMicromapsPropertiesEXT( + VkCommandBuffer commandBuffer, + uint32_t micromapCount, + const VkMicromapEXT* pMicromaps, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMicromapCompatibilityEXT( + VkDevice device, + const VkMicromapVersionInfoEXT* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); + +VKAPI_ATTR void VKAPI_CALL vkGetMicromapBuildSizesEXT( + VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkMicromapBuildInfoEXT* pBuildInfo, + VkMicromapBuildSizesInfoEXT* pSizeInfo); +#endif + + +// VK_EXT_load_store_op_none is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_load_store_op_none 1 +#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1 +#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none" + + +// VK_HUAWEI_cluster_culling_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_cluster_culling_shader 1 +#define VK_HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION 3 +#define VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME "VK_HUAWEI_cluster_culling_shader" +typedef struct VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 clustercullingShader; + VkBool32 multiviewClusterCullingShader; +} VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + +typedef struct VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI { + VkStructureType sType; + void* pNext; + uint32_t maxWorkGroupCount[3]; + uint32_t maxWorkGroupSize[3]; + uint32_t maxOutputClusterCount; + VkDeviceSize indirectBufferOffsetAlignment; +} VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + +typedef struct VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 clusterShadingRate; +} VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawClusterHUAWEI)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDrawClusterIndirectHUAWEI)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterHUAWEI( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterIndirectHUAWEI( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); +#endif + + +// VK_EXT_border_color_swizzle is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_border_color_swizzle 1 +#define VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION 1 +#define VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME "VK_EXT_border_color_swizzle" +typedef struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 borderColorSwizzle; + VkBool32 borderColorSwizzleFromImage; +} VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; + +typedef struct VkSamplerBorderColorComponentMappingCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkComponentMapping components; + VkBool32 srgb; +} VkSamplerBorderColorComponentMappingCreateInfoEXT; + + + +// VK_EXT_pageable_device_local_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pageable_device_local_memory 1 +#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION 1 +#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME "VK_EXT_pageable_device_local_memory" +typedef struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pageableDeviceLocalMemory; +} VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkSetDeviceMemoryPriorityEXT)(VkDevice device, VkDeviceMemory memory, float priority); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetDeviceMemoryPriorityEXT( + VkDevice device, + VkDeviceMemory memory, + float priority); +#endif + + +// VK_ARM_shader_core_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_shader_core_properties 1 +#define VK_ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION 1 +#define VK_ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_ARM_shader_core_properties" +typedef struct VkPhysicalDeviceShaderCorePropertiesARM { + VkStructureType sType; + void* pNext; + uint32_t pixelRate; + uint32_t texelRate; + uint32_t fmaRate; +} VkPhysicalDeviceShaderCorePropertiesARM; + + + +// VK_ARM_scheduling_controls is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_scheduling_controls 1 +#define VK_ARM_SCHEDULING_CONTROLS_SPEC_VERSION 1 +#define VK_ARM_SCHEDULING_CONTROLS_EXTENSION_NAME "VK_ARM_scheduling_controls" +typedef VkFlags64 VkPhysicalDeviceSchedulingControlsFlagsARM; + +// Flag bits for VkPhysicalDeviceSchedulingControlsFlagBitsARM +typedef VkFlags64 VkPhysicalDeviceSchedulingControlsFlagBitsARM; +static const VkPhysicalDeviceSchedulingControlsFlagBitsARM VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM = 0x00000001ULL; + +typedef struct VkDeviceQueueShaderCoreControlCreateInfoARM { + VkStructureType sType; + void* pNext; + uint32_t shaderCoreCount; +} VkDeviceQueueShaderCoreControlCreateInfoARM; + +typedef struct VkPhysicalDeviceSchedulingControlsFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 schedulingControls; +} VkPhysicalDeviceSchedulingControlsFeaturesARM; + +typedef struct VkPhysicalDeviceSchedulingControlsPropertiesARM { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags; +} VkPhysicalDeviceSchedulingControlsPropertiesARM; + + + +// VK_EXT_image_sliced_view_of_3d is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_sliced_view_of_3d 1 +#define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION 1 +#define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_sliced_view_of_3d" +#define VK_REMAINING_3D_SLICES_EXT (~0U) +typedef struct VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 imageSlicedViewOf3D; +} VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + +typedef struct VkImageViewSlicedCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t sliceOffset; + uint32_t sliceCount; +} VkImageViewSlicedCreateInfoEXT; + + + +// VK_VALVE_descriptor_set_host_mapping is a preprocessor guard. Do not pass it to API calls. +#define VK_VALVE_descriptor_set_host_mapping 1 +#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION 1 +#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME "VK_VALVE_descriptor_set_host_mapping" +typedef struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE { + VkStructureType sType; + void* pNext; + VkBool32 descriptorSetHostMapping; +} VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + +typedef struct VkDescriptorSetBindingReferenceVALVE { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayout descriptorSetLayout; + uint32_t binding; +} VkDescriptorSetBindingReferenceVALVE; + +typedef struct VkDescriptorSetLayoutHostMappingInfoVALVE { + VkStructureType sType; + void* pNext; + size_t descriptorOffset; + uint32_t descriptorSize; +} VkDescriptorSetLayoutHostMappingInfoVALVE; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)(VkDevice device, const VkDescriptorSetBindingReferenceVALVE* pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetHostMappingVALVE)(VkDevice device, VkDescriptorSet descriptorSet, void** ppData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutHostMappingInfoVALVE( + VkDevice device, + const VkDescriptorSetBindingReferenceVALVE* pBindingReference, + VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetHostMappingVALVE( + VkDevice device, + VkDescriptorSet descriptorSet, + void** ppData); +#endif + + +// VK_EXT_depth_clamp_zero_one is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clamp_zero_one 1 +#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_EXT_depth_clamp_zero_one" +typedef VkPhysicalDeviceDepthClampZeroOneFeaturesKHR VkPhysicalDeviceDepthClampZeroOneFeaturesEXT; + + + +// VK_EXT_non_seamless_cube_map is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_non_seamless_cube_map 1 +#define VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION 1 +#define VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME "VK_EXT_non_seamless_cube_map" +typedef struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nonSeamlessCubeMap; +} VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + + +// VK_ARM_render_pass_striped is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_render_pass_striped 1 +#define VK_ARM_RENDER_PASS_STRIPED_SPEC_VERSION 1 +#define VK_ARM_RENDER_PASS_STRIPED_EXTENSION_NAME "VK_ARM_render_pass_striped" +typedef struct VkPhysicalDeviceRenderPassStripedFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 renderPassStriped; +} VkPhysicalDeviceRenderPassStripedFeaturesARM; + +typedef struct VkPhysicalDeviceRenderPassStripedPropertiesARM { + VkStructureType sType; + void* pNext; + VkExtent2D renderPassStripeGranularity; + uint32_t maxRenderPassStripes; +} VkPhysicalDeviceRenderPassStripedPropertiesARM; + +typedef struct VkRenderPassStripeInfoARM { + VkStructureType sType; + const void* pNext; + VkRect2D stripeArea; +} VkRenderPassStripeInfoARM; + +typedef struct VkRenderPassStripeBeginInfoARM { + VkStructureType sType; + const void* pNext; + uint32_t stripeInfoCount; + const VkRenderPassStripeInfoARM* pStripeInfos; +} VkRenderPassStripeBeginInfoARM; + +typedef struct VkRenderPassStripeSubmitInfoARM { + VkStructureType sType; + const void* pNext; + uint32_t stripeSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pStripeSemaphoreInfos; +} VkRenderPassStripeSubmitInfoARM; + + + +// VK_QCOM_fragment_density_map_offset is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_fragment_density_map_offset 1 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 3 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset" +typedef struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapOffset; +} VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; + +typedef VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + +typedef struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D fragmentDensityOffsetGranularity; +} VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; + +typedef VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + +typedef struct VkRenderPassFragmentDensityMapOffsetEndInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t fragmentDensityOffsetCount; + const VkOffset2D* pFragmentDensityOffsets; +} VkRenderPassFragmentDensityMapOffsetEndInfoEXT; + +typedef VkRenderPassFragmentDensityMapOffsetEndInfoEXT VkSubpassFragmentDensityMapOffsetEndInfoQCOM; + + + +// VK_NV_copy_memory_indirect is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_copy_memory_indirect 1 +#define VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION 1 +#define VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME "VK_NV_copy_memory_indirect" +typedef struct VkCopyMemoryIndirectCommandNV { + VkDeviceAddress srcAddress; + VkDeviceAddress dstAddress; + VkDeviceSize size; +} VkCopyMemoryIndirectCommandNV; + +typedef struct VkCopyMemoryToImageIndirectCommandNV { + VkDeviceAddress srcAddress; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkCopyMemoryToImageIndirectCommandNV; + +typedef struct VkPhysicalDeviceCopyMemoryIndirectFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 indirectCopy; +} VkPhysicalDeviceCopyMemoryIndirectFeaturesNV; + +typedef struct VkPhysicalDeviceCopyMemoryIndirectPropertiesNV { + VkStructureType sType; + void* pNext; + VkQueueFlags supportedQueues; +} VkPhysicalDeviceCopyMemoryIndirectPropertiesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToImageIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VkImage dstImage, VkImageLayout dstImageLayout, const VkImageSubresourceLayers* pImageSubresources); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryIndirectNV( + VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToImageIndirectNV( + VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers* pImageSubresources); +#endif + + +// VK_NV_memory_decompression is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_memory_decompression 1 +#define VK_NV_MEMORY_DECOMPRESSION_SPEC_VERSION 1 +#define VK_NV_MEMORY_DECOMPRESSION_EXTENSION_NAME "VK_NV_memory_decompression" + +// Flag bits for VkMemoryDecompressionMethodFlagBitsNV +typedef VkFlags64 VkMemoryDecompressionMethodFlagBitsNV; +static const VkMemoryDecompressionMethodFlagBitsNV VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV = 0x00000001ULL; + +typedef VkFlags64 VkMemoryDecompressionMethodFlagsNV; +typedef struct VkDecompressMemoryRegionNV { + VkDeviceAddress srcAddress; + VkDeviceAddress dstAddress; + VkDeviceSize compressedSize; + VkDeviceSize decompressedSize; + VkMemoryDecompressionMethodFlagsNV decompressionMethod; +} VkDecompressMemoryRegionNV; + +typedef struct VkPhysicalDeviceMemoryDecompressionFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 memoryDecompression; +} VkPhysicalDeviceMemoryDecompressionFeaturesNV; + +typedef struct VkPhysicalDeviceMemoryDecompressionPropertiesNV { + VkStructureType sType; + void* pNext; + VkMemoryDecompressionMethodFlagsNV decompressionMethods; + uint64_t maxDecompressionIndirectCount; +} VkPhysicalDeviceMemoryDecompressionPropertiesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryNV)(VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV* pDecompressMemoryRegions); +typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryIndirectCountNV)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryNV( + VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV* pDecompressMemoryRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryIndirectCountNV( + VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride); +#endif + + +// VK_NV_device_generated_commands_compute is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_generated_commands_compute 1 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION 2 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME "VK_NV_device_generated_commands_compute" +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCompute; + VkBool32 deviceGeneratedComputePipelines; + VkBool32 deviceGeneratedComputeCaptureReplay; +} VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + +typedef struct VkComputePipelineIndirectBufferInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; + VkDeviceAddress pipelineDeviceAddressCaptureReplay; +} VkComputePipelineIndirectBufferInfoNV; + +typedef struct VkPipelineIndirectDeviceAddressInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; +} VkPipelineIndirectDeviceAddressInfoNV; + +typedef struct VkBindPipelineIndirectCommandNV { + VkDeviceAddress pipelineAddress; +} VkBindPipelineIndirectCommandNV; + +typedef void (VKAPI_PTR *PFN_vkGetPipelineIndirectMemoryRequirementsNV)(VkDevice device, const VkComputePipelineCreateInfo* pCreateInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdUpdatePipelineIndirectBufferNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetPipelineIndirectDeviceAddressNV)(VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPipelineIndirectMemoryRequirementsNV( + VkDevice device, + const VkComputePipelineCreateInfo* pCreateInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdatePipelineIndirectBufferNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetPipelineIndirectDeviceAddressNV( + VkDevice device, + const VkPipelineIndirectDeviceAddressInfoNV* pInfo); +#endif + + +// VK_NV_ray_tracing_linear_swept_spheres is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_linear_swept_spheres 1 +#define VK_NV_RAY_TRACING_LINEAR_SWEPT_SPHERES_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_LINEAR_SWEPT_SPHERES_EXTENSION_NAME "VK_NV_ray_tracing_linear_swept_spheres" + +typedef enum VkRayTracingLssIndexingModeNV { + VK_RAY_TRACING_LSS_INDEXING_MODE_LIST_NV = 0, + VK_RAY_TRACING_LSS_INDEXING_MODE_SUCCESSIVE_NV = 1, + VK_RAY_TRACING_LSS_INDEXING_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkRayTracingLssIndexingModeNV; + +typedef enum VkRayTracingLssPrimitiveEndCapsModeNV { + VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_NONE_NV = 0, + VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_CHAINED_NV = 1, + VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkRayTracingLssPrimitiveEndCapsModeNV; +typedef struct VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 spheres; + VkBool32 linearSweptSpheres; +} VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + +typedef struct VkAccelerationStructureGeometryLinearSweptSpheresDataNV { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + VkFormat radiusFormat; + VkDeviceOrHostAddressConstKHR radiusData; + VkDeviceSize radiusStride; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceSize indexStride; + VkRayTracingLssIndexingModeNV indexingMode; + VkRayTracingLssPrimitiveEndCapsModeNV endCapsMode; +} VkAccelerationStructureGeometryLinearSweptSpheresDataNV; + +typedef struct VkAccelerationStructureGeometrySpheresDataNV { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + VkFormat radiusFormat; + VkDeviceOrHostAddressConstKHR radiusData; + VkDeviceSize radiusStride; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceSize indexStride; +} VkAccelerationStructureGeometrySpheresDataNV; + + + +// VK_NV_linear_color_attachment is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_linear_color_attachment 1 +#define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1 +#define VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME "VK_NV_linear_color_attachment" +typedef struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 linearColorAttachment; +} VkPhysicalDeviceLinearColorAttachmentFeaturesNV; + + + +// VK_GOOGLE_surfaceless_query is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_surfaceless_query 1 +#define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 2 +#define VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME "VK_GOOGLE_surfaceless_query" + + +// VK_EXT_image_compression_control_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_compression_control_swapchain 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME "VK_EXT_image_compression_control_swapchain" +typedef struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 imageCompressionControlSwapchain; +} VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + + +// VK_QCOM_image_processing is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_image_processing 1 +#define VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION 1 +#define VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME "VK_QCOM_image_processing" +typedef struct VkImageViewSampleWeightCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkOffset2D filterCenter; + VkExtent2D filterSize; + uint32_t numPhases; +} VkImageViewSampleWeightCreateInfoQCOM; + +typedef struct VkPhysicalDeviceImageProcessingFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 textureSampleWeighted; + VkBool32 textureBoxFilter; + VkBool32 textureBlockMatch; +} VkPhysicalDeviceImageProcessingFeaturesQCOM; + +typedef struct VkPhysicalDeviceImageProcessingPropertiesQCOM { + VkStructureType sType; + void* pNext; + uint32_t maxWeightFilterPhases; + VkExtent2D maxWeightFilterDimension; + VkExtent2D maxBlockMatchRegion; + VkExtent2D maxBoxFilterBlockSize; +} VkPhysicalDeviceImageProcessingPropertiesQCOM; + + + +// VK_EXT_nested_command_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_nested_command_buffer 1 +#define VK_EXT_NESTED_COMMAND_BUFFER_SPEC_VERSION 1 +#define VK_EXT_NESTED_COMMAND_BUFFER_EXTENSION_NAME "VK_EXT_nested_command_buffer" +typedef struct VkPhysicalDeviceNestedCommandBufferFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nestedCommandBuffer; + VkBool32 nestedCommandBufferRendering; + VkBool32 nestedCommandBufferSimultaneousUse; +} VkPhysicalDeviceNestedCommandBufferFeaturesEXT; + +typedef struct VkPhysicalDeviceNestedCommandBufferPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxCommandBufferNestingLevel; +} VkPhysicalDeviceNestedCommandBufferPropertiesEXT; + + + +// VK_EXT_external_memory_acquire_unmodified is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_acquire_unmodified 1 +#define VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXTENSION_NAME "VK_EXT_external_memory_acquire_unmodified" +typedef struct VkExternalMemoryAcquireUnmodifiedEXT { + VkStructureType sType; + const void* pNext; + VkBool32 acquireUnmodifiedMemory; +} VkExternalMemoryAcquireUnmodifiedEXT; + + + +// VK_EXT_extended_dynamic_state3 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state3 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION 2 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME "VK_EXT_extended_dynamic_state3" +typedef struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState3TessellationDomainOrigin; + VkBool32 extendedDynamicState3DepthClampEnable; + VkBool32 extendedDynamicState3PolygonMode; + VkBool32 extendedDynamicState3RasterizationSamples; + VkBool32 extendedDynamicState3SampleMask; + VkBool32 extendedDynamicState3AlphaToCoverageEnable; + VkBool32 extendedDynamicState3AlphaToOneEnable; + VkBool32 extendedDynamicState3LogicOpEnable; + VkBool32 extendedDynamicState3ColorBlendEnable; + VkBool32 extendedDynamicState3ColorBlendEquation; + VkBool32 extendedDynamicState3ColorWriteMask; + VkBool32 extendedDynamicState3RasterizationStream; + VkBool32 extendedDynamicState3ConservativeRasterizationMode; + VkBool32 extendedDynamicState3ExtraPrimitiveOverestimationSize; + VkBool32 extendedDynamicState3DepthClipEnable; + VkBool32 extendedDynamicState3SampleLocationsEnable; + VkBool32 extendedDynamicState3ColorBlendAdvanced; + VkBool32 extendedDynamicState3ProvokingVertexMode; + VkBool32 extendedDynamicState3LineRasterizationMode; + VkBool32 extendedDynamicState3LineStippleEnable; + VkBool32 extendedDynamicState3DepthClipNegativeOneToOne; + VkBool32 extendedDynamicState3ViewportWScalingEnable; + VkBool32 extendedDynamicState3ViewportSwizzle; + VkBool32 extendedDynamicState3CoverageToColorEnable; + VkBool32 extendedDynamicState3CoverageToColorLocation; + VkBool32 extendedDynamicState3CoverageModulationMode; + VkBool32 extendedDynamicState3CoverageModulationTableEnable; + VkBool32 extendedDynamicState3CoverageModulationTable; + VkBool32 extendedDynamicState3CoverageReductionMode; + VkBool32 extendedDynamicState3RepresentativeFragmentTestEnable; + VkBool32 extendedDynamicState3ShadingRateImageEnable; +} VkPhysicalDeviceExtendedDynamicState3FeaturesEXT; + +typedef struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 dynamicPrimitiveTopologyUnrestricted; +} VkPhysicalDeviceExtendedDynamicState3PropertiesEXT; + +typedef struct VkColorBlendEquationEXT { + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; +} VkColorBlendEquationEXT; + +typedef struct VkColorBlendAdvancedEXT { + VkBlendOp advancedBlendOp; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; + VkBool32 clampResults; +} VkColorBlendAdvancedEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetPolygonModeEXT)(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationSamplesEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples); +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleMaskEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask* pSampleMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToCoverageEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToOneEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEnableEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEquationEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteMaskEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks); +typedef void (VKAPI_PTR *PFN_vkCmdSetTessellationDomainOriginEXT)(VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationStreamEXT)(VkCommandBuffer commandBuffer, uint32_t rasterizationStream); +typedef void (VKAPI_PTR *PFN_vkCmdSetConservativeRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)(VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendAdvancedEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced); +typedef void (VKAPI_PTR *PFN_vkCmdSetProvokingVertexModeEXT)(VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipNegativeOneToOneEXT)(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingEnableNV)(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportSwizzleNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorLocationNV)(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationModeNV)(VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableNV)(VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable); +typedef void (VKAPI_PTR *PFN_vkCmdSetShadingRateImageEnableNV)(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetRepresentativeFragmentTestEnableNV)(VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageReductionModeNV)(VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthClampEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPolygonModeEXT( + VkCommandBuffer commandBuffer, + VkPolygonMode polygonMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationSamplesEXT( + VkCommandBuffer commandBuffer, + VkSampleCountFlagBits rasterizationSamples); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleMaskEXT( + VkCommandBuffer commandBuffer, + VkSampleCountFlagBits samples, + const VkSampleMask* pSampleMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToCoverageEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 alphaToCoverageEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToOneEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 alphaToOneEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 logicOpEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkBool32* pColorBlendEnables); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEquationEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendEquationEXT* pColorBlendEquations); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteMaskEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags* pColorWriteMasks); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetTessellationDomainOriginEXT( + VkCommandBuffer commandBuffer, + VkTessellationDomainOrigin domainOrigin); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationStreamEXT( + VkCommandBuffer commandBuffer, + uint32_t rasterizationStream); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetConservativeRasterizationModeEXT( + VkCommandBuffer commandBuffer, + VkConservativeRasterizationModeEXT conservativeRasterizationMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetExtraPrimitiveOverestimationSizeEXT( + VkCommandBuffer commandBuffer, + float extraPrimitiveOverestimationSize); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthClipEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 sampleLocationsEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendAdvancedEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendAdvancedEXT* pColorBlendAdvanced); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetProvokingVertexModeEXT( + VkCommandBuffer commandBuffer, + VkProvokingVertexModeEXT provokingVertexMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineRasterizationModeEXT( + VkCommandBuffer commandBuffer, + VkLineRasterizationModeEXT lineRasterizationMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stippledLineEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipNegativeOneToOneEXT( + VkCommandBuffer commandBuffer, + VkBool32 negativeOneToOne); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 viewportWScalingEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportSwizzleNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV* pViewportSwizzles); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 coverageToColorEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorLocationNV( + VkCommandBuffer commandBuffer, + uint32_t coverageToColorLocation); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationModeNV( + VkCommandBuffer commandBuffer, + VkCoverageModulationModeNV coverageModulationMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 coverageModulationTableEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableNV( + VkCommandBuffer commandBuffer, + uint32_t coverageModulationTableCount, + const float* pCoverageModulationTable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetShadingRateImageEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 shadingRateImageEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRepresentativeFragmentTestEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 representativeFragmentTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageReductionModeNV( + VkCommandBuffer commandBuffer, + VkCoverageReductionModeNV coverageReductionMode); +#endif + + +// VK_EXT_subpass_merge_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_subpass_merge_feedback 1 +#define VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION 2 +#define VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME "VK_EXT_subpass_merge_feedback" + +typedef enum VkSubpassMergeStatusEXT { + VK_SUBPASS_MERGE_STATUS_MERGED_EXT = 0, + VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT = 1, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT = 2, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT = 3, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT = 4, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT = 5, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT = 6, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT = 7, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT = 8, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT = 9, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT = 10, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT = 11, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT = 12, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT = 13, + VK_SUBPASS_MERGE_STATUS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSubpassMergeStatusEXT; +typedef struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subpassMergeFeedback; +} VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + +typedef struct VkRenderPassCreationControlEXT { + VkStructureType sType; + const void* pNext; + VkBool32 disallowMerging; +} VkRenderPassCreationControlEXT; + +typedef struct VkRenderPassCreationFeedbackInfoEXT { + uint32_t postMergeSubpassCount; +} VkRenderPassCreationFeedbackInfoEXT; + +typedef struct VkRenderPassCreationFeedbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkRenderPassCreationFeedbackInfoEXT* pRenderPassFeedback; +} VkRenderPassCreationFeedbackCreateInfoEXT; + +typedef struct VkRenderPassSubpassFeedbackInfoEXT { + VkSubpassMergeStatusEXT subpassMergeStatus; + char description[VK_MAX_DESCRIPTION_SIZE]; + uint32_t postMergeIndex; +} VkRenderPassSubpassFeedbackInfoEXT; + +typedef struct VkRenderPassSubpassFeedbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkRenderPassSubpassFeedbackInfoEXT* pSubpassFeedback; +} VkRenderPassSubpassFeedbackCreateInfoEXT; + + + +// VK_LUNARG_direct_driver_loading is a preprocessor guard. Do not pass it to API calls. +#define VK_LUNARG_direct_driver_loading 1 +#define VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION 1 +#define VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME "VK_LUNARG_direct_driver_loading" + +typedef enum VkDirectDriverLoadingModeLUNARG { + VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG = 0, + VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG = 1, + VK_DIRECT_DRIVER_LOADING_MODE_MAX_ENUM_LUNARG = 0x7FFFFFFF +} VkDirectDriverLoadingModeLUNARG; +typedef VkFlags VkDirectDriverLoadingFlagsLUNARG; +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddrLUNARG)( + VkInstance instance, const char* pName); + +typedef struct VkDirectDriverLoadingInfoLUNARG { + VkStructureType sType; + void* pNext; + VkDirectDriverLoadingFlagsLUNARG flags; + PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr; +} VkDirectDriverLoadingInfoLUNARG; + +typedef struct VkDirectDriverLoadingListLUNARG { + VkStructureType sType; + const void* pNext; + VkDirectDriverLoadingModeLUNARG mode; + uint32_t driverCount; + const VkDirectDriverLoadingInfoLUNARG* pDrivers; +} VkDirectDriverLoadingListLUNARG; + + + +// VK_EXT_shader_module_identifier is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_module_identifier 1 +#define VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT 32U +#define VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION 1 +#define VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME "VK_EXT_shader_module_identifier" +typedef struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderModuleIdentifier; +} VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT; + +typedef struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT { + VkStructureType sType; + void* pNext; + uint8_t shaderModuleIdentifierAlgorithmUUID[VK_UUID_SIZE]; +} VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT; + +typedef struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t identifierSize; + const uint8_t* pIdentifier; +} VkPipelineShaderStageModuleIdentifierCreateInfoEXT; + +typedef struct VkShaderModuleIdentifierEXT { + VkStructureType sType; + void* pNext; + uint32_t identifierSize; + uint8_t identifier[VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT]; +} VkShaderModuleIdentifierEXT; + +typedef void (VKAPI_PTR *PFN_vkGetShaderModuleIdentifierEXT)(VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier); +typedef void (VKAPI_PTR *PFN_vkGetShaderModuleCreateInfoIdentifierEXT)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleIdentifierEXT( + VkDevice device, + VkShaderModule shaderModule, + VkShaderModuleIdentifierEXT* pIdentifier); + +VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleCreateInfoIdentifierEXT( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + VkShaderModuleIdentifierEXT* pIdentifier); +#endif + + +// VK_EXT_rasterization_order_attachment_access is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_rasterization_order_attachment_access 1 +#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 +#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_EXT_rasterization_order_attachment_access" + + +// VK_NV_optical_flow is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_optical_flow 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkOpticalFlowSessionNV) +#define VK_NV_OPTICAL_FLOW_SPEC_VERSION 1 +#define VK_NV_OPTICAL_FLOW_EXTENSION_NAME "VK_NV_optical_flow" + +typedef enum VkOpticalFlowPerformanceLevelNV { + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV = 1, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV = 2, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV = 3, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowPerformanceLevelNV; + +typedef enum VkOpticalFlowSessionBindingPointNV { + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV = 1, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV = 2, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV = 3, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV = 4, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV = 5, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV = 6, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV = 7, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV = 8, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowSessionBindingPointNV; + +typedef enum VkOpticalFlowGridSizeFlagBitsNV { + VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_GRID_SIZE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowGridSizeFlagBitsNV; +typedef VkFlags VkOpticalFlowGridSizeFlagsNV; + +typedef enum VkOpticalFlowUsageFlagBitsNV { + VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_USAGE_COST_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV = 0x00000010, + VK_OPTICAL_FLOW_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowUsageFlagBitsNV; +typedef VkFlags VkOpticalFlowUsageFlagsNV; + +typedef enum VkOpticalFlowSessionCreateFlagBitsNV { + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV = 0x00000010, + VK_OPTICAL_FLOW_SESSION_CREATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowSessionCreateFlagBitsNV; +typedef VkFlags VkOpticalFlowSessionCreateFlagsNV; + +typedef enum VkOpticalFlowExecuteFlagBitsNV { + VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_EXECUTE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowExecuteFlagBitsNV; +typedef VkFlags VkOpticalFlowExecuteFlagsNV; +typedef struct VkPhysicalDeviceOpticalFlowFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 opticalFlow; +} VkPhysicalDeviceOpticalFlowFeaturesNV; + +typedef struct VkPhysicalDeviceOpticalFlowPropertiesNV { + VkStructureType sType; + void* pNext; + VkOpticalFlowGridSizeFlagsNV supportedOutputGridSizes; + VkOpticalFlowGridSizeFlagsNV supportedHintGridSizes; + VkBool32 hintSupported; + VkBool32 costSupported; + VkBool32 bidirectionalFlowSupported; + VkBool32 globalFlowSupported; + uint32_t minWidth; + uint32_t minHeight; + uint32_t maxWidth; + uint32_t maxHeight; + uint32_t maxNumRegionsOfInterest; +} VkPhysicalDeviceOpticalFlowPropertiesNV; + +typedef struct VkOpticalFlowImageFormatInfoNV { + VkStructureType sType; + const void* pNext; + VkOpticalFlowUsageFlagsNV usage; +} VkOpticalFlowImageFormatInfoNV; + +typedef struct VkOpticalFlowImageFormatPropertiesNV { + VkStructureType sType; + const void* pNext; + VkFormat format; +} VkOpticalFlowImageFormatPropertiesNV; + +typedef struct VkOpticalFlowSessionCreateInfoNV { + VkStructureType sType; + void* pNext; + uint32_t width; + uint32_t height; + VkFormat imageFormat; + VkFormat flowVectorFormat; + VkFormat costFormat; + VkOpticalFlowGridSizeFlagsNV outputGridSize; + VkOpticalFlowGridSizeFlagsNV hintGridSize; + VkOpticalFlowPerformanceLevelNV performanceLevel; + VkOpticalFlowSessionCreateFlagsNV flags; +} VkOpticalFlowSessionCreateInfoNV; + +typedef struct VkOpticalFlowSessionCreatePrivateDataInfoNV { + VkStructureType sType; + void* pNext; + uint32_t id; + uint32_t size; + const void* pPrivateData; +} VkOpticalFlowSessionCreatePrivateDataInfoNV; + +typedef struct VkOpticalFlowExecuteInfoNV { + VkStructureType sType; + void* pNext; + VkOpticalFlowExecuteFlagsNV flags; + uint32_t regionCount; + const VkRect2D* pRegions; +} VkOpticalFlowExecuteInfoNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)(VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, uint32_t* pFormatCount, VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateOpticalFlowSessionNV)(VkDevice device, const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkOpticalFlowSessionNV* pSession); +typedef void (VKAPI_PTR *PFN_vkDestroyOpticalFlowSessionNV)(VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkBindOpticalFlowSessionImageNV)(VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout); +typedef void (VKAPI_PTR *PFN_vkCmdOpticalFlowExecuteNV)(VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV* pExecuteInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + VkPhysicalDevice physicalDevice, + const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, + uint32_t* pFormatCount, + VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateOpticalFlowSessionNV( + VkDevice device, + const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkOpticalFlowSessionNV* pSession); + +VKAPI_ATTR void VKAPI_CALL vkDestroyOpticalFlowSessionNV( + VkDevice device, + VkOpticalFlowSessionNV session, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindOpticalFlowSessionImageNV( + VkDevice device, + VkOpticalFlowSessionNV session, + VkOpticalFlowSessionBindingPointNV bindingPoint, + VkImageView view, + VkImageLayout layout); + +VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV( + VkCommandBuffer commandBuffer, + VkOpticalFlowSessionNV session, + const VkOpticalFlowExecuteInfoNV* pExecuteInfo); +#endif + + +// VK_EXT_legacy_dithering is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_legacy_dithering 1 +#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 2 +#define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering" +typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 legacyDithering; +} VkPhysicalDeviceLegacyDitheringFeaturesEXT; + + + +// VK_EXT_pipeline_protected_access is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_protected_access 1 +#define VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME "VK_EXT_pipeline_protected_access" +typedef VkPhysicalDevicePipelineProtectedAccessFeatures VkPhysicalDevicePipelineProtectedAccessFeaturesEXT; + + + +// VK_AMD_anti_lag is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_anti_lag 1 +#define VK_AMD_ANTI_LAG_SPEC_VERSION 1 +#define VK_AMD_ANTI_LAG_EXTENSION_NAME "VK_AMD_anti_lag" + +typedef enum VkAntiLagModeAMD { + VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD = 0, + VK_ANTI_LAG_MODE_ON_AMD = 1, + VK_ANTI_LAG_MODE_OFF_AMD = 2, + VK_ANTI_LAG_MODE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkAntiLagModeAMD; + +typedef enum VkAntiLagStageAMD { + VK_ANTI_LAG_STAGE_INPUT_AMD = 0, + VK_ANTI_LAG_STAGE_PRESENT_AMD = 1, + VK_ANTI_LAG_STAGE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkAntiLagStageAMD; +typedef struct VkPhysicalDeviceAntiLagFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 antiLag; +} VkPhysicalDeviceAntiLagFeaturesAMD; + +typedef struct VkAntiLagPresentationInfoAMD { + VkStructureType sType; + void* pNext; + VkAntiLagStageAMD stage; + uint64_t frameIndex; +} VkAntiLagPresentationInfoAMD; + +typedef struct VkAntiLagDataAMD { + VkStructureType sType; + const void* pNext; + VkAntiLagModeAMD mode; + uint32_t maxFPS; + const VkAntiLagPresentationInfoAMD* pPresentationInfo; +} VkAntiLagDataAMD; + +typedef void (VKAPI_PTR *PFN_vkAntiLagUpdateAMD)(VkDevice device, const VkAntiLagDataAMD* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkAntiLagUpdateAMD( + VkDevice device, + const VkAntiLagDataAMD* pData); +#endif + + +// VK_EXT_shader_object is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_object 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderEXT) +#define VK_EXT_SHADER_OBJECT_SPEC_VERSION 1 +#define VK_EXT_SHADER_OBJECT_EXTENSION_NAME "VK_EXT_shader_object" + +typedef enum VkShaderCodeTypeEXT { + VK_SHADER_CODE_TYPE_BINARY_EXT = 0, + VK_SHADER_CODE_TYPE_SPIRV_EXT = 1, + VK_SHADER_CODE_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkShaderCodeTypeEXT; + +typedef enum VkDepthClampModeEXT { + VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT = 0, + VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT = 1, + VK_DEPTH_CLAMP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDepthClampModeEXT; + +typedef enum VkShaderCreateFlagBitsEXT { + VK_SHADER_CREATE_LINK_STAGE_BIT_EXT = 0x00000001, + VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000002, + VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = 0x00000004, + VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT = 0x00000008, + VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT = 0x00000010, + VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT = 0x00000020, + VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00000040, + VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT = 0x00000080, + VK_SHADER_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkShaderCreateFlagBitsEXT; +typedef VkFlags VkShaderCreateFlagsEXT; +typedef struct VkPhysicalDeviceShaderObjectFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderObject; +} VkPhysicalDeviceShaderObjectFeaturesEXT; + +typedef struct VkPhysicalDeviceShaderObjectPropertiesEXT { + VkStructureType sType; + void* pNext; + uint8_t shaderBinaryUUID[VK_UUID_SIZE]; + uint32_t shaderBinaryVersion; +} VkPhysicalDeviceShaderObjectPropertiesEXT; + +typedef struct VkShaderCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderCreateFlagsEXT flags; + VkShaderStageFlagBits stage; + VkShaderStageFlags nextStage; + VkShaderCodeTypeEXT codeType; + size_t codeSize; + const void* pCode; + const char* pName; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; + const VkSpecializationInfo* pSpecializationInfo; +} VkShaderCreateInfoEXT; + +typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkShaderRequiredSubgroupSizeCreateInfoEXT; + +typedef struct VkDepthClampRangeEXT { + float minDepthClamp; + float maxDepthClamp; +} VkDepthClampRangeEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateShadersEXT)(VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderEXT)(VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderBinaryDataEXT)(VkDevice device, VkShaderEXT shader, size_t* pDataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdBindShadersEXT)(VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampRangeEXT)(VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, const VkDepthClampRangeEXT* pDepthClampRange); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShadersEXT( + VkDevice device, + uint32_t createInfoCount, + const VkShaderCreateInfoEXT* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkShaderEXT* pShaders); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderEXT( + VkDevice device, + VkShaderEXT shader, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderBinaryDataEXT( + VkDevice device, + VkShaderEXT shader, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadersEXT( + VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits* pStages, + const VkShaderEXT* pShaders); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampRangeEXT( + VkCommandBuffer commandBuffer, + VkDepthClampModeEXT depthClampMode, + const VkDepthClampRangeEXT* pDepthClampRange); +#endif + + +// VK_QCOM_tile_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_tile_properties 1 +#define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1 +#define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties" +typedef struct VkPhysicalDeviceTilePropertiesFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 tileProperties; +} VkPhysicalDeviceTilePropertiesFeaturesQCOM; + +typedef struct VkTilePropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent3D tileSize; + VkExtent2D apronSize; + VkOffset2D origin; +} VkTilePropertiesQCOM; + +typedef VkResult (VKAPI_PTR *PFN_vkGetFramebufferTilePropertiesQCOM)(VkDevice device, VkFramebuffer framebuffer, uint32_t* pPropertiesCount, VkTilePropertiesQCOM* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDynamicRenderingTilePropertiesQCOM)(VkDevice device, const VkRenderingInfo* pRenderingInfo, VkTilePropertiesQCOM* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetFramebufferTilePropertiesQCOM( + VkDevice device, + VkFramebuffer framebuffer, + uint32_t* pPropertiesCount, + VkTilePropertiesQCOM* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDynamicRenderingTilePropertiesQCOM( + VkDevice device, + const VkRenderingInfo* pRenderingInfo, + VkTilePropertiesQCOM* pProperties); +#endif + + +// VK_SEC_amigo_profiling is a preprocessor guard. Do not pass it to API calls. +#define VK_SEC_amigo_profiling 1 +#define VK_SEC_AMIGO_PROFILING_SPEC_VERSION 1 +#define VK_SEC_AMIGO_PROFILING_EXTENSION_NAME "VK_SEC_amigo_profiling" +typedef struct VkPhysicalDeviceAmigoProfilingFeaturesSEC { + VkStructureType sType; + void* pNext; + VkBool32 amigoProfiling; +} VkPhysicalDeviceAmigoProfilingFeaturesSEC; + +typedef struct VkAmigoProfilingSubmitInfoSEC { + VkStructureType sType; + const void* pNext; + uint64_t firstDrawTimestamp; + uint64_t swapBufferTimestamp; +} VkAmigoProfilingSubmitInfoSEC; + + + +// VK_QCOM_multiview_per_view_viewports is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_multiview_per_view_viewports 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME "VK_QCOM_multiview_per_view_viewports" +typedef struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 multiviewPerViewViewports; +} VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + + +// VK_NV_ray_tracing_invocation_reorder is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_invocation_reorder 1 +#define VK_NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME "VK_NV_ray_tracing_invocation_reorder" + +typedef enum VkRayTracingInvocationReorderModeNV { + VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV = 0, + VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV = 1, + VK_RAY_TRACING_INVOCATION_REORDER_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkRayTracingInvocationReorderModeNV; +typedef struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV { + VkStructureType sType; + void* pNext; + VkRayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint; +} VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV; + +typedef struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingInvocationReorder; +} VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV; + + + +// VK_NV_cooperative_vector is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cooperative_vector 1 +#define VK_NV_COOPERATIVE_VECTOR_SPEC_VERSION 4 +#define VK_NV_COOPERATIVE_VECTOR_EXTENSION_NAME "VK_NV_cooperative_vector" + +typedef enum VkCooperativeVectorMatrixLayoutNV { + VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_ROW_MAJOR_NV = 0, + VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_COLUMN_MAJOR_NV = 1, + VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_INFERENCING_OPTIMAL_NV = 2, + VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_TRAINING_OPTIMAL_NV = 3, + VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_MAX_ENUM_NV = 0x7FFFFFFF +} VkCooperativeVectorMatrixLayoutNV; +typedef struct VkPhysicalDeviceCooperativeVectorPropertiesNV { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeVectorSupportedStages; + VkBool32 cooperativeVectorTrainingFloat16Accumulation; + VkBool32 cooperativeVectorTrainingFloat32Accumulation; + uint32_t maxCooperativeVectorComponents; +} VkPhysicalDeviceCooperativeVectorPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeVectorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeVector; + VkBool32 cooperativeVectorTraining; +} VkPhysicalDeviceCooperativeVectorFeaturesNV; + +typedef struct VkCooperativeVectorPropertiesNV { + VkStructureType sType; + void* pNext; + VkComponentTypeKHR inputType; + VkComponentTypeKHR inputInterpretation; + VkComponentTypeKHR matrixInterpretation; + VkComponentTypeKHR biasInterpretation; + VkComponentTypeKHR resultType; + VkBool32 transpose; +} VkCooperativeVectorPropertiesNV; + +typedef struct VkConvertCooperativeVectorMatrixInfoNV { + VkStructureType sType; + const void* pNext; + size_t srcSize; + VkDeviceOrHostAddressConstKHR srcData; + size_t* pDstSize; + VkDeviceOrHostAddressKHR dstData; + VkComponentTypeKHR srcComponentType; + VkComponentTypeKHR dstComponentType; + uint32_t numRows; + uint32_t numColumns; + VkCooperativeVectorMatrixLayoutNV srcLayout; + size_t srcStride; + VkCooperativeVectorMatrixLayoutNV dstLayout; + size_t dstStride; +} VkConvertCooperativeVectorMatrixInfoNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeVectorPropertiesNV* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkConvertCooperativeVectorMatrixNV)(VkDevice device, const VkConvertCooperativeVectorMatrixInfoNV* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdConvertCooperativeVectorMatrixNV)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkConvertCooperativeVectorMatrixInfoNV* pInfos); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeVectorPropertiesNV* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkConvertCooperativeVectorMatrixNV( + VkDevice device, + const VkConvertCooperativeVectorMatrixInfoNV* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdConvertCooperativeVectorMatrixNV( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkConvertCooperativeVectorMatrixInfoNV* pInfos); +#endif + + +// VK_NV_extended_sparse_address_space is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_extended_sparse_address_space 1 +#define VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_SPEC_VERSION 1 +#define VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_EXTENSION_NAME "VK_NV_extended_sparse_address_space" +typedef struct VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 extendedSparseAddressSpace; +} VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + +typedef struct VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV { + VkStructureType sType; + void* pNext; + VkDeviceSize extendedSparseAddressSpaceSize; + VkImageUsageFlags extendedSparseImageUsageFlags; + VkBufferUsageFlags extendedSparseBufferUsageFlags; +} VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + + + +// VK_EXT_mutable_descriptor_type is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_mutable_descriptor_type 1 +#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_EXT_mutable_descriptor_type" + + +// VK_EXT_legacy_vertex_attributes is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_legacy_vertex_attributes 1 +#define VK_EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION 1 +#define VK_EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME "VK_EXT_legacy_vertex_attributes" +typedef struct VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 legacyVertexAttributes; +} VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT; + +typedef struct VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nativeUnalignedPerformance; +} VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT; + + + +// VK_EXT_layer_settings is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_layer_settings 1 +#define VK_EXT_LAYER_SETTINGS_SPEC_VERSION 2 +#define VK_EXT_LAYER_SETTINGS_EXTENSION_NAME "VK_EXT_layer_settings" + +typedef enum VkLayerSettingTypeEXT { + VK_LAYER_SETTING_TYPE_BOOL32_EXT = 0, + VK_LAYER_SETTING_TYPE_INT32_EXT = 1, + VK_LAYER_SETTING_TYPE_INT64_EXT = 2, + VK_LAYER_SETTING_TYPE_UINT32_EXT = 3, + VK_LAYER_SETTING_TYPE_UINT64_EXT = 4, + VK_LAYER_SETTING_TYPE_FLOAT32_EXT = 5, + VK_LAYER_SETTING_TYPE_FLOAT64_EXT = 6, + VK_LAYER_SETTING_TYPE_STRING_EXT = 7, + VK_LAYER_SETTING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkLayerSettingTypeEXT; +typedef struct VkLayerSettingEXT { + const char* pLayerName; + const char* pSettingName; + VkLayerSettingTypeEXT type; + uint32_t valueCount; + const void* pValues; +} VkLayerSettingEXT; + +typedef struct VkLayerSettingsCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t settingCount; + const VkLayerSettingEXT* pSettings; +} VkLayerSettingsCreateInfoEXT; + + + +// VK_ARM_shader_core_builtins is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_shader_core_builtins 1 +#define VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION 2 +#define VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME "VK_ARM_shader_core_builtins" +typedef struct VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 shaderCoreBuiltins; +} VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM; + +typedef struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM { + VkStructureType sType; + void* pNext; + uint64_t shaderCoreMask; + uint32_t shaderCoreCount; + uint32_t shaderWarpsPerCore; +} VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + + +// VK_EXT_pipeline_library_group_handles is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_library_group_handles 1 +#define VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_EXTENSION_NAME "VK_EXT_pipeline_library_group_handles" +typedef struct VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelineLibraryGroupHandles; +} VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + + +// VK_EXT_dynamic_rendering_unused_attachments is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_dynamic_rendering_unused_attachments 1 +#define VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION 1 +#define VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME "VK_EXT_dynamic_rendering_unused_attachments" +typedef struct VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRenderingUnusedAttachments; +} VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + + +// VK_NV_low_latency2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_low_latency2 1 +#define VK_NV_LOW_LATENCY_2_SPEC_VERSION 2 +#define VK_NV_LOW_LATENCY_2_EXTENSION_NAME "VK_NV_low_latency2" + +typedef enum VkLatencyMarkerNV { + VK_LATENCY_MARKER_SIMULATION_START_NV = 0, + VK_LATENCY_MARKER_SIMULATION_END_NV = 1, + VK_LATENCY_MARKER_RENDERSUBMIT_START_NV = 2, + VK_LATENCY_MARKER_RENDERSUBMIT_END_NV = 3, + VK_LATENCY_MARKER_PRESENT_START_NV = 4, + VK_LATENCY_MARKER_PRESENT_END_NV = 5, + VK_LATENCY_MARKER_INPUT_SAMPLE_NV = 6, + VK_LATENCY_MARKER_TRIGGER_FLASH_NV = 7, + VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV = 8, + VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV = 9, + VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV = 10, + VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV = 11, + VK_LATENCY_MARKER_MAX_ENUM_NV = 0x7FFFFFFF +} VkLatencyMarkerNV; + +typedef enum VkOutOfBandQueueTypeNV { + VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV = 0, + VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV = 1, + VK_OUT_OF_BAND_QUEUE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkOutOfBandQueueTypeNV; +typedef struct VkLatencySleepModeInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 lowLatencyMode; + VkBool32 lowLatencyBoost; + uint32_t minimumIntervalUs; +} VkLatencySleepModeInfoNV; + +typedef struct VkLatencySleepInfoNV { + VkStructureType sType; + const void* pNext; + VkSemaphore signalSemaphore; + uint64_t value; +} VkLatencySleepInfoNV; + +typedef struct VkSetLatencyMarkerInfoNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; + VkLatencyMarkerNV marker; +} VkSetLatencyMarkerInfoNV; + +typedef struct VkLatencyTimingsFrameReportNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; + uint64_t inputSampleTimeUs; + uint64_t simStartTimeUs; + uint64_t simEndTimeUs; + uint64_t renderSubmitStartTimeUs; + uint64_t renderSubmitEndTimeUs; + uint64_t presentStartTimeUs; + uint64_t presentEndTimeUs; + uint64_t driverStartTimeUs; + uint64_t driverEndTimeUs; + uint64_t osRenderQueueStartTimeUs; + uint64_t osRenderQueueEndTimeUs; + uint64_t gpuRenderStartTimeUs; + uint64_t gpuRenderEndTimeUs; +} VkLatencyTimingsFrameReportNV; + +typedef struct VkGetLatencyMarkerInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t timingCount; + VkLatencyTimingsFrameReportNV* pTimings; +} VkGetLatencyMarkerInfoNV; + +typedef struct VkLatencySubmissionPresentIdNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; +} VkLatencySubmissionPresentIdNV; + +typedef struct VkSwapchainLatencyCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 latencyModeEnable; +} VkSwapchainLatencyCreateInfoNV; + +typedef struct VkOutOfBandQueueTypeInfoNV { + VkStructureType sType; + const void* pNext; + VkOutOfBandQueueTypeNV queueType; +} VkOutOfBandQueueTypeInfoNV; + +typedef struct VkLatencySurfaceCapabilitiesNV { + VkStructureType sType; + const void* pNext; + uint32_t presentModeCount; + VkPresentModeKHR* pPresentModes; +} VkLatencySurfaceCapabilitiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkSetLatencySleepModeNV)(VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV* pSleepModeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkLatencySleepNV)(VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV* pSleepInfo); +typedef void (VKAPI_PTR *PFN_vkSetLatencyMarkerNV)(VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkGetLatencyTimingsNV)(VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkQueueNotifyOutOfBandNV)(VkQueue queue, const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetLatencySleepModeNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkLatencySleepModeInfoNV* pSleepModeInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkLatencySleepNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkLatencySleepInfoNV* pSleepInfo); + +VKAPI_ATTR void VKAPI_CALL vkSetLatencyMarkerNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetLatencyTimingsNV( + VkDevice device, + VkSwapchainKHR swapchain, + VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueNotifyOutOfBandNV( + VkQueue queue, + const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo); +#endif + + +// VK_QCOM_multiview_per_view_render_areas is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_multiview_per_view_render_areas 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_EXTENSION_NAME "VK_QCOM_multiview_per_view_render_areas" +typedef struct VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 multiviewPerViewRenderAreas; +} VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + +typedef struct VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM { + VkStructureType sType; + const void* pNext; + uint32_t perViewRenderAreaCount; + const VkRect2D* pPerViewRenderAreas; +} VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + + + +// VK_NV_per_stage_descriptor_set is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_per_stage_descriptor_set 1 +#define VK_NV_PER_STAGE_DESCRIPTOR_SET_SPEC_VERSION 1 +#define VK_NV_PER_STAGE_DESCRIPTOR_SET_EXTENSION_NAME "VK_NV_per_stage_descriptor_set" +typedef struct VkPhysicalDevicePerStageDescriptorSetFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 perStageDescriptorSet; + VkBool32 dynamicPipelineLayout; +} VkPhysicalDevicePerStageDescriptorSetFeaturesNV; + + + +// VK_QCOM_image_processing2 is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_image_processing2 1 +#define VK_QCOM_IMAGE_PROCESSING_2_SPEC_VERSION 1 +#define VK_QCOM_IMAGE_PROCESSING_2_EXTENSION_NAME "VK_QCOM_image_processing2" + +typedef enum VkBlockMatchWindowCompareModeQCOM { + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM = 0, + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM = 1, + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_ENUM_QCOM = 0x7FFFFFFF +} VkBlockMatchWindowCompareModeQCOM; +typedef struct VkPhysicalDeviceImageProcessing2FeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 textureBlockMatch2; +} VkPhysicalDeviceImageProcessing2FeaturesQCOM; + +typedef struct VkPhysicalDeviceImageProcessing2PropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent2D maxBlockMatchWindow; +} VkPhysicalDeviceImageProcessing2PropertiesQCOM; + +typedef struct VkSamplerBlockMatchWindowCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkExtent2D windowExtent; + VkBlockMatchWindowCompareModeQCOM windowCompareMode; +} VkSamplerBlockMatchWindowCreateInfoQCOM; + + + +// VK_QCOM_filter_cubic_weights is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_filter_cubic_weights 1 +#define VK_QCOM_FILTER_CUBIC_WEIGHTS_SPEC_VERSION 1 +#define VK_QCOM_FILTER_CUBIC_WEIGHTS_EXTENSION_NAME "VK_QCOM_filter_cubic_weights" + +typedef enum VkCubicFilterWeightsQCOM { + VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM = 0, + VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM = 1, + VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM = 2, + VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM = 3, + VK_CUBIC_FILTER_WEIGHTS_MAX_ENUM_QCOM = 0x7FFFFFFF +} VkCubicFilterWeightsQCOM; +typedef struct VkPhysicalDeviceCubicWeightsFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 selectableCubicWeights; +} VkPhysicalDeviceCubicWeightsFeaturesQCOM; + +typedef struct VkSamplerCubicWeightsCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkCubicFilterWeightsQCOM cubicWeights; +} VkSamplerCubicWeightsCreateInfoQCOM; + +typedef struct VkBlitImageCubicWeightsInfoQCOM { + VkStructureType sType; + const void* pNext; + VkCubicFilterWeightsQCOM cubicWeights; +} VkBlitImageCubicWeightsInfoQCOM; + + + +// VK_QCOM_ycbcr_degamma is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_ycbcr_degamma 1 +#define VK_QCOM_YCBCR_DEGAMMA_SPEC_VERSION 1 +#define VK_QCOM_YCBCR_DEGAMMA_EXTENSION_NAME "VK_QCOM_ycbcr_degamma" +typedef struct VkPhysicalDeviceYcbcrDegammaFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 ycbcrDegamma; +} VkPhysicalDeviceYcbcrDegammaFeaturesQCOM; + +typedef struct VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM { + VkStructureType sType; + void* pNext; + VkBool32 enableYDegamma; + VkBool32 enableCbCrDegamma; +} VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + + + +// VK_QCOM_filter_cubic_clamp is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_filter_cubic_clamp 1 +#define VK_QCOM_FILTER_CUBIC_CLAMP_SPEC_VERSION 1 +#define VK_QCOM_FILTER_CUBIC_CLAMP_EXTENSION_NAME "VK_QCOM_filter_cubic_clamp" +typedef struct VkPhysicalDeviceCubicClampFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 cubicRangeClamp; +} VkPhysicalDeviceCubicClampFeaturesQCOM; + + + +// VK_EXT_attachment_feedback_loop_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_attachment_feedback_loop_dynamic_state 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_dynamic_state" +typedef struct VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 attachmentFeedbackLoopDynamicState; +} VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT)(VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetAttachmentFeedbackLoopEnableEXT( + VkCommandBuffer commandBuffer, + VkImageAspectFlags aspectMask); +#endif + + +// VK_MSFT_layered_driver is a preprocessor guard. Do not pass it to API calls. +#define VK_MSFT_layered_driver 1 +#define VK_MSFT_LAYERED_DRIVER_SPEC_VERSION 1 +#define VK_MSFT_LAYERED_DRIVER_EXTENSION_NAME "VK_MSFT_layered_driver" + +typedef enum VkLayeredDriverUnderlyingApiMSFT { + VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT = 0, + VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT = 1, + VK_LAYERED_DRIVER_UNDERLYING_API_MAX_ENUM_MSFT = 0x7FFFFFFF +} VkLayeredDriverUnderlyingApiMSFT; +typedef struct VkPhysicalDeviceLayeredDriverPropertiesMSFT { + VkStructureType sType; + void* pNext; + VkLayeredDriverUnderlyingApiMSFT underlyingAPI; +} VkPhysicalDeviceLayeredDriverPropertiesMSFT; + + + +// VK_NV_descriptor_pool_overallocation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_descriptor_pool_overallocation 1 +#define VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION 1 +#define VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME "VK_NV_descriptor_pool_overallocation" +typedef struct VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 descriptorPoolOverallocation; +} VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + + +// VK_QCOM_tile_memory_heap is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_tile_memory_heap 1 +#define VK_QCOM_TILE_MEMORY_HEAP_SPEC_VERSION 1 +#define VK_QCOM_TILE_MEMORY_HEAP_EXTENSION_NAME "VK_QCOM_tile_memory_heap" +typedef struct VkPhysicalDeviceTileMemoryHeapFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 tileMemoryHeap; +} VkPhysicalDeviceTileMemoryHeapFeaturesQCOM; + +typedef struct VkPhysicalDeviceTileMemoryHeapPropertiesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 queueSubmitBoundary; + VkBool32 tileBufferTransfers; +} VkPhysicalDeviceTileMemoryHeapPropertiesQCOM; + +typedef struct VkTileMemoryRequirementsQCOM { + VkStructureType sType; + void* pNext; + VkDeviceSize size; + VkDeviceSize alignment; +} VkTileMemoryRequirementsQCOM; + +typedef struct VkTileMemoryBindInfoQCOM { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkTileMemoryBindInfoQCOM; + +typedef struct VkTileMemorySizeInfoQCOM { + VkStructureType sType; + const void* pNext; + VkDeviceSize size; +} VkTileMemorySizeInfoQCOM; + +typedef void (VKAPI_PTR *PFN_vkCmdBindTileMemoryQCOM)(VkCommandBuffer commandBuffer, const VkTileMemoryBindInfoQCOM* pTileMemoryBindInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindTileMemoryQCOM( + VkCommandBuffer commandBuffer, + const VkTileMemoryBindInfoQCOM* pTileMemoryBindInfo); +#endif + + +// VK_NV_display_stereo is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_display_stereo 1 +#define VK_NV_DISPLAY_STEREO_SPEC_VERSION 1 +#define VK_NV_DISPLAY_STEREO_EXTENSION_NAME "VK_NV_display_stereo" + +typedef enum VkDisplaySurfaceStereoTypeNV { + VK_DISPLAY_SURFACE_STEREO_TYPE_NONE_NV = 0, + VK_DISPLAY_SURFACE_STEREO_TYPE_ONBOARD_DIN_NV = 1, + VK_DISPLAY_SURFACE_STEREO_TYPE_HDMI_3D_NV = 2, + VK_DISPLAY_SURFACE_STEREO_TYPE_INBAND_DISPLAYPORT_NV = 3, + VK_DISPLAY_SURFACE_STEREO_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkDisplaySurfaceStereoTypeNV; +typedef struct VkDisplaySurfaceStereoCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceStereoTypeNV stereoType; +} VkDisplaySurfaceStereoCreateInfoNV; + +typedef struct VkDisplayModeStereoPropertiesNV { + VkStructureType sType; + const void* pNext; + VkBool32 hdmi3DSupported; +} VkDisplayModeStereoPropertiesNV; + + + +// VK_NV_raw_access_chains is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_raw_access_chains 1 +#define VK_NV_RAW_ACCESS_CHAINS_SPEC_VERSION 1 +#define VK_NV_RAW_ACCESS_CHAINS_EXTENSION_NAME "VK_NV_raw_access_chains" +typedef struct VkPhysicalDeviceRawAccessChainsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shaderRawAccessChains; +} VkPhysicalDeviceRawAccessChainsFeaturesNV; + + + +// VK_NV_external_compute_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_compute_queue 1 +VK_DEFINE_HANDLE(VkExternalComputeQueueNV) +#define VK_NV_EXTERNAL_COMPUTE_QUEUE_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_COMPUTE_QUEUE_EXTENSION_NAME "VK_NV_external_compute_queue" +typedef struct VkExternalComputeQueueDeviceCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t reservedExternalQueues; +} VkExternalComputeQueueDeviceCreateInfoNV; + +typedef struct VkExternalComputeQueueCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkQueue preferredQueue; +} VkExternalComputeQueueCreateInfoNV; + +typedef struct VkExternalComputeQueueDataParamsNV { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndex; +} VkExternalComputeQueueDataParamsNV; + +typedef struct VkPhysicalDeviceExternalComputeQueuePropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t externalDataSize; + uint32_t maxExternalQueues; +} VkPhysicalDeviceExternalComputeQueuePropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateExternalComputeQueueNV)(VkDevice device, const VkExternalComputeQueueCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkExternalComputeQueueNV* pExternalQueue); +typedef void (VKAPI_PTR *PFN_vkDestroyExternalComputeQueueNV)(VkDevice device, VkExternalComputeQueueNV externalQueue, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetExternalComputeQueueDataNV)(VkExternalComputeQueueNV externalQueue, VkExternalComputeQueueDataParamsNV* params, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateExternalComputeQueueNV( + VkDevice device, + const VkExternalComputeQueueCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkExternalComputeQueueNV* pExternalQueue); + +VKAPI_ATTR void VKAPI_CALL vkDestroyExternalComputeQueueNV( + VkDevice device, + VkExternalComputeQueueNV externalQueue, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetExternalComputeQueueDataNV( + VkExternalComputeQueueNV externalQueue, + VkExternalComputeQueueDataParamsNV* params, + void* pData); +#endif + + +// VK_NV_command_buffer_inheritance is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_command_buffer_inheritance 1 +#define VK_NV_COMMAND_BUFFER_INHERITANCE_SPEC_VERSION 1 +#define VK_NV_COMMAND_BUFFER_INHERITANCE_EXTENSION_NAME "VK_NV_command_buffer_inheritance" +typedef struct VkPhysicalDeviceCommandBufferInheritanceFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 commandBufferInheritance; +} VkPhysicalDeviceCommandBufferInheritanceFeaturesNV; + + + +// VK_NV_shader_atomic_float16_vector is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_atomic_float16_vector 1 +#define VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION 1 +#define VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_EXTENSION_NAME "VK_NV_shader_atomic_float16_vector" +typedef struct VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloat16VectorAtomics; +} VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + + + +// VK_EXT_shader_replicated_composites is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_replicated_composites 1 +#define VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION 1 +#define VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME "VK_EXT_shader_replicated_composites" +typedef struct VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderReplicatedComposites; +} VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + + + +// VK_NV_ray_tracing_validation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_validation 1 +#define VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_VALIDATION_EXTENSION_NAME "VK_NV_ray_tracing_validation" +typedef struct VkPhysicalDeviceRayTracingValidationFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingValidation; +} VkPhysicalDeviceRayTracingValidationFeaturesNV; + + + +// VK_NV_cluster_acceleration_structure is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cluster_acceleration_structure 1 +#define VK_NV_CLUSTER_ACCELERATION_STRUCTURE_SPEC_VERSION 2 +#define VK_NV_CLUSTER_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_NV_cluster_acceleration_structure" + +typedef enum VkClusterAccelerationStructureTypeNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_CLUSTERS_BOTTOM_LEVEL_NV = 0, + VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_NV = 1, + VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_TEMPLATE_NV = 2, + VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureTypeNV; + +typedef enum VkClusterAccelerationStructureOpTypeNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_MOVE_OBJECTS_NV = 0, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_CLUSTERS_BOTTOM_LEVEL_NV = 1, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_NV = 2, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_TEMPLATE_NV = 3, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_INSTANTIATE_TRIANGLE_CLUSTER_NV = 4, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureOpTypeNV; + +typedef enum VkClusterAccelerationStructureOpModeNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_IMPLICIT_DESTINATIONS_NV = 0, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_EXPLICIT_DESTINATIONS_NV = 1, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_COMPUTE_SIZES_NV = 2, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureOpModeNV; + +typedef enum VkClusterAccelerationStructureAddressResolutionFlagBitsNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_IMPLICIT_DATA_BIT_NV = 0x00000001, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SCRATCH_DATA_BIT_NV = 0x00000002, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_ADDRESS_ARRAY_BIT_NV = 0x00000004, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_SIZES_ARRAY_BIT_NV = 0x00000008, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_ARRAY_BIT_NV = 0x00000010, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_COUNT_BIT_NV = 0x00000020, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureAddressResolutionFlagBitsNV; +typedef VkFlags VkClusterAccelerationStructureAddressResolutionFlagsNV; + +typedef enum VkClusterAccelerationStructureClusterFlagBitsNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_CLUSTER_ALLOW_DISABLE_OPACITY_MICROMAPS_NV = 0x00000001, + VK_CLUSTER_ACCELERATION_STRUCTURE_CLUSTER_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureClusterFlagBitsNV; +typedef VkFlags VkClusterAccelerationStructureClusterFlagsNV; + +typedef enum VkClusterAccelerationStructureGeometryFlagBitsNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_CULL_DISABLE_BIT_NV = 0x00000001, + VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_NO_DUPLICATE_ANYHIT_INVOCATION_BIT_NV = 0x00000002, + VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_OPAQUE_BIT_NV = 0x00000004, + VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureGeometryFlagBitsNV; +typedef VkFlags VkClusterAccelerationStructureGeometryFlagsNV; + +typedef enum VkClusterAccelerationStructureIndexFormatFlagBitsNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_8BIT_NV = 0x00000001, + VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_16BIT_NV = 0x00000002, + VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_32BIT_NV = 0x00000004, + VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureIndexFormatFlagBitsNV; +typedef VkFlags VkClusterAccelerationStructureIndexFormatFlagsNV; +typedef struct VkPhysicalDeviceClusterAccelerationStructureFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 clusterAccelerationStructure; +} VkPhysicalDeviceClusterAccelerationStructureFeaturesNV; + +typedef struct VkPhysicalDeviceClusterAccelerationStructurePropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxVerticesPerCluster; + uint32_t maxTrianglesPerCluster; + uint32_t clusterScratchByteAlignment; + uint32_t clusterByteAlignment; + uint32_t clusterTemplateByteAlignment; + uint32_t clusterBottomLevelByteAlignment; + uint32_t clusterTemplateBoundsByteAlignment; + uint32_t maxClusterGeometryIndex; +} VkPhysicalDeviceClusterAccelerationStructurePropertiesNV; + +typedef struct VkClusterAccelerationStructureClustersBottomLevelInputNV { + VkStructureType sType; + void* pNext; + uint32_t maxTotalClusterCount; + uint32_t maxClusterCountPerAccelerationStructure; +} VkClusterAccelerationStructureClustersBottomLevelInputNV; + +typedef struct VkClusterAccelerationStructureTriangleClusterInputNV { + VkStructureType sType; + void* pNext; + VkFormat vertexFormat; + uint32_t maxGeometryIndexValue; + uint32_t maxClusterUniqueGeometryCount; + uint32_t maxClusterTriangleCount; + uint32_t maxClusterVertexCount; + uint32_t maxTotalTriangleCount; + uint32_t maxTotalVertexCount; + uint32_t minPositionTruncateBitCount; +} VkClusterAccelerationStructureTriangleClusterInputNV; + +typedef struct VkClusterAccelerationStructureMoveObjectsInputNV { + VkStructureType sType; + void* pNext; + VkClusterAccelerationStructureTypeNV type; + VkBool32 noMoveOverlap; + VkDeviceSize maxMovedBytes; +} VkClusterAccelerationStructureMoveObjectsInputNV; + +typedef union VkClusterAccelerationStructureOpInputNV { + VkClusterAccelerationStructureClustersBottomLevelInputNV* pClustersBottomLevel; + VkClusterAccelerationStructureTriangleClusterInputNV* pTriangleClusters; + VkClusterAccelerationStructureMoveObjectsInputNV* pMoveObjects; +} VkClusterAccelerationStructureOpInputNV; + +typedef struct VkClusterAccelerationStructureInputInfoNV { + VkStructureType sType; + void* pNext; + uint32_t maxAccelerationStructureCount; + VkBuildAccelerationStructureFlagsKHR flags; + VkClusterAccelerationStructureOpTypeNV opType; + VkClusterAccelerationStructureOpModeNV opMode; + VkClusterAccelerationStructureOpInputNV opInput; +} VkClusterAccelerationStructureInputInfoNV; + +typedef struct VkStridedDeviceAddressRegionKHR { + VkDeviceAddress deviceAddress; + VkDeviceSize stride; + VkDeviceSize size; +} VkStridedDeviceAddressRegionKHR; + +typedef struct VkClusterAccelerationStructureCommandsInfoNV { + VkStructureType sType; + void* pNext; + VkClusterAccelerationStructureInputInfoNV input; + VkDeviceAddress dstImplicitData; + VkDeviceAddress scratchData; + VkStridedDeviceAddressRegionKHR dstAddressesArray; + VkStridedDeviceAddressRegionKHR dstSizesArray; + VkStridedDeviceAddressRegionKHR srcInfosArray; + VkDeviceAddress srcInfosCount; + VkClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags; +} VkClusterAccelerationStructureCommandsInfoNV; + +typedef struct VkStridedDeviceAddressNV { + VkDeviceAddress startAddress; + VkDeviceSize strideInBytes; +} VkStridedDeviceAddressNV; + +typedef struct VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV { + uint32_t geometryIndex:24; + uint32_t reserved:5; + uint32_t geometryFlags:3; +} VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV; + +typedef struct VkClusterAccelerationStructureMoveObjectsInfoNV { + VkDeviceAddress srcAccelerationStructure; +} VkClusterAccelerationStructureMoveObjectsInfoNV; + +typedef struct VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV { + uint32_t clusterReferencesCount; + uint32_t clusterReferencesStride; + VkDeviceAddress clusterReferences; +} VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV; + +typedef struct VkClusterAccelerationStructureBuildTriangleClusterInfoNV { + uint32_t clusterID; + VkClusterAccelerationStructureClusterFlagsNV clusterFlags; + uint32_t triangleCount:9; + uint32_t vertexCount:9; + uint32_t positionTruncateBitCount:6; + uint32_t indexType:4; + uint32_t opacityMicromapIndexType:4; + VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags; + uint16_t indexBufferStride; + uint16_t vertexBufferStride; + uint16_t geometryIndexAndFlagsBufferStride; + uint16_t opacityMicromapIndexBufferStride; + VkDeviceAddress indexBuffer; + VkDeviceAddress vertexBuffer; + VkDeviceAddress geometryIndexAndFlagsBuffer; + VkDeviceAddress opacityMicromapArray; + VkDeviceAddress opacityMicromapIndexBuffer; +} VkClusterAccelerationStructureBuildTriangleClusterInfoNV; + +typedef struct VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV { + uint32_t clusterID; + VkClusterAccelerationStructureClusterFlagsNV clusterFlags; + uint32_t triangleCount:9; + uint32_t vertexCount:9; + uint32_t positionTruncateBitCount:6; + uint32_t indexType:4; + uint32_t opacityMicromapIndexType:4; + VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags; + uint16_t indexBufferStride; + uint16_t vertexBufferStride; + uint16_t geometryIndexAndFlagsBufferStride; + uint16_t opacityMicromapIndexBufferStride; + VkDeviceAddress indexBuffer; + VkDeviceAddress vertexBuffer; + VkDeviceAddress geometryIndexAndFlagsBuffer; + VkDeviceAddress opacityMicromapArray; + VkDeviceAddress opacityMicromapIndexBuffer; + VkDeviceAddress instantiationBoundingBoxLimit; +} VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV; + +typedef struct VkClusterAccelerationStructureInstantiateClusterInfoNV { + uint32_t clusterIdOffset; + uint32_t geometryIndexOffset:24; + uint32_t reserved:8; + VkDeviceAddress clusterTemplateAddress; + VkStridedDeviceAddressNV vertexBuffer; +} VkClusterAccelerationStructureInstantiateClusterInfoNV; + +typedef struct VkAccelerationStructureBuildSizesInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize accelerationStructureSize; + VkDeviceSize updateScratchSize; + VkDeviceSize buildScratchSize; +} VkAccelerationStructureBuildSizesInfoKHR; + +typedef struct VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV { + VkStructureType sType; + void* pNext; + VkBool32 allowClusterAccelerationStructure; +} VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkGetClusterAccelerationStructureBuildSizesNV)(VkDevice device, const VkClusterAccelerationStructureInputInfoNV* pInfo, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBuildClusterAccelerationStructureIndirectNV)(VkCommandBuffer commandBuffer, const VkClusterAccelerationStructureCommandsInfoNV* pCommandInfos); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetClusterAccelerationStructureBuildSizesNV( + VkDevice device, + const VkClusterAccelerationStructureInputInfoNV* pInfo, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildClusterAccelerationStructureIndirectNV( + VkCommandBuffer commandBuffer, + const VkClusterAccelerationStructureCommandsInfoNV* pCommandInfos); +#endif + + +// VK_NV_partitioned_acceleration_structure is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_partitioned_acceleration_structure 1 +#define VK_NV_PARTITIONED_ACCELERATION_STRUCTURE_SPEC_VERSION 1 +#define VK_NV_PARTITIONED_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_NV_partitioned_acceleration_structure" +#define VK_PARTITIONED_ACCELERATION_STRUCTURE_PARTITION_INDEX_GLOBAL_NV (~0U) + +typedef enum VkPartitionedAccelerationStructureOpTypeNV { + VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_INSTANCE_NV = 0, + VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_UPDATE_INSTANCE_NV = 1, + VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_PARTITION_TRANSLATION_NV = 2, + VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkPartitionedAccelerationStructureOpTypeNV; + +typedef enum VkPartitionedAccelerationStructureInstanceFlagBitsNV { + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FACING_CULL_DISABLE_BIT_NV = 0x00000001, + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FLIP_FACING_BIT_NV = 0x00000002, + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_OPAQUE_BIT_NV = 0x00000004, + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_NO_OPAQUE_BIT_NV = 0x00000008, + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_ENABLE_EXPLICIT_BOUNDING_BOX_NV = 0x00000010, + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkPartitionedAccelerationStructureInstanceFlagBitsNV; +typedef VkFlags VkPartitionedAccelerationStructureInstanceFlagsNV; +typedef struct VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 partitionedAccelerationStructure; +} VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV; + +typedef struct VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxPartitionCount; +} VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV; + +typedef struct VkPartitionedAccelerationStructureFlagsNV { + VkStructureType sType; + void* pNext; + VkBool32 enablePartitionTranslation; +} VkPartitionedAccelerationStructureFlagsNV; + +typedef struct VkBuildPartitionedAccelerationStructureIndirectCommandNV { + VkPartitionedAccelerationStructureOpTypeNV opType; + uint32_t argCount; + VkStridedDeviceAddressNV argData; +} VkBuildPartitionedAccelerationStructureIndirectCommandNV; + +typedef struct VkPartitionedAccelerationStructureWriteInstanceDataNV { + VkTransformMatrixKHR transform; + float explicitAABB[6]; + uint32_t instanceID; + uint32_t instanceMask; + uint32_t instanceContributionToHitGroupIndex; + VkPartitionedAccelerationStructureInstanceFlagsNV instanceFlags; + uint32_t instanceIndex; + uint32_t partitionIndex; + VkDeviceAddress accelerationStructure; +} VkPartitionedAccelerationStructureWriteInstanceDataNV; + +typedef struct VkPartitionedAccelerationStructureUpdateInstanceDataNV { + uint32_t instanceIndex; + uint32_t instanceContributionToHitGroupIndex; + VkDeviceAddress accelerationStructure; +} VkPartitionedAccelerationStructureUpdateInstanceDataNV; + +typedef struct VkPartitionedAccelerationStructureWritePartitionTranslationDataNV { + uint32_t partitionIndex; + float partitionTranslation[3]; +} VkPartitionedAccelerationStructureWritePartitionTranslationDataNV; + +typedef struct VkWriteDescriptorSetPartitionedAccelerationStructureNV { + VkStructureType sType; + void* pNext; + uint32_t accelerationStructureCount; + const VkDeviceAddress* pAccelerationStructures; +} VkWriteDescriptorSetPartitionedAccelerationStructureNV; + +typedef struct VkPartitionedAccelerationStructureInstancesInputNV { + VkStructureType sType; + void* pNext; + VkBuildAccelerationStructureFlagsKHR flags; + uint32_t instanceCount; + uint32_t maxInstancePerPartitionCount; + uint32_t partitionCount; + uint32_t maxInstanceInGlobalPartitionCount; +} VkPartitionedAccelerationStructureInstancesInputNV; + +typedef struct VkBuildPartitionedAccelerationStructureInfoNV { + VkStructureType sType; + void* pNext; + VkPartitionedAccelerationStructureInstancesInputNV input; + VkDeviceAddress srcAccelerationStructureData; + VkDeviceAddress dstAccelerationStructureData; + VkDeviceAddress scratchData; + VkDeviceAddress srcInfos; + VkDeviceAddress srcInfosCount; +} VkBuildPartitionedAccelerationStructureInfoNV; + +typedef void (VKAPI_PTR *PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV)(VkDevice device, const VkPartitionedAccelerationStructureInstancesInputNV* pInfo, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBuildPartitionedAccelerationStructuresNV)(VkCommandBuffer commandBuffer, const VkBuildPartitionedAccelerationStructureInfoNV* pBuildInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPartitionedAccelerationStructuresBuildSizesNV( + VkDevice device, + const VkPartitionedAccelerationStructureInstancesInputNV* pInfo, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildPartitionedAccelerationStructuresNV( + VkCommandBuffer commandBuffer, + const VkBuildPartitionedAccelerationStructureInfoNV* pBuildInfo); +#endif + + +// VK_EXT_device_generated_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectExecutionSetEXT) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutEXT) +#define VK_EXT_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 1 +#define VK_EXT_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_EXT_device_generated_commands" + +typedef enum VkIndirectExecutionSetInfoTypeEXT { + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT = 0, + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT = 1, + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectExecutionSetInfoTypeEXT; + +typedef enum VkIndirectCommandsTokenTypeEXT { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT = 8, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT = 9, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT = 1000202002, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT = 1000202003, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT = 1000328000, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT = 1000328001, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT = 1000386004, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeEXT; + +typedef enum VkIndirectCommandsInputModeFlagBitsEXT { + VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT = 0x00000001, + VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT = 0x00000002, + VK_INDIRECT_COMMANDS_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsInputModeFlagBitsEXT; +typedef VkFlags VkIndirectCommandsInputModeFlagsEXT; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsEXT { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsEXT; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsEXT; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; + VkBool32 dynamicGeneratedPipelineLayout; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxIndirectPipelineCount; + uint32_t maxIndirectShaderObjectCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsIndirectStride; + VkIndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes; + VkShaderStageFlags supportedIndirectCommandsShaderStages; + VkShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding; + VkShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding; + VkBool32 deviceGeneratedCommandsTransformFeedback; + VkBool32 deviceGeneratedCommandsMultiDrawIndirectCount; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectExecutionSetEXT indirectExecutionSet; + VkIndirectCommandsLayoutEXT indirectCommandsLayout; + uint32_t maxSequenceCount; + uint32_t maxDrawCount; +} VkGeneratedCommandsMemoryRequirementsInfoEXT; + +typedef struct VkIndirectExecutionSetPipelineInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipeline initialPipeline; + uint32_t maxPipelineCount; +} VkIndirectExecutionSetPipelineInfoEXT; + +typedef struct VkIndirectExecutionSetShaderLayoutInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkIndirectExecutionSetShaderLayoutInfoEXT; + +typedef struct VkIndirectExecutionSetShaderInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t shaderCount; + const VkShaderEXT* pInitialShaders; + const VkIndirectExecutionSetShaderLayoutInfoEXT* pSetLayoutInfos; + uint32_t maxShaderCount; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkIndirectExecutionSetShaderInfoEXT; + +typedef union VkIndirectExecutionSetInfoEXT { + const VkIndirectExecutionSetPipelineInfoEXT* pPipelineInfo; + const VkIndirectExecutionSetShaderInfoEXT* pShaderInfo; +} VkIndirectExecutionSetInfoEXT; + +typedef struct VkIndirectExecutionSetCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectExecutionSetInfoTypeEXT type; + VkIndirectExecutionSetInfoEXT info; +} VkIndirectExecutionSetCreateInfoEXT; + +typedef struct VkGeneratedCommandsInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags shaderStages; + VkIndirectExecutionSetEXT indirectExecutionSet; + VkIndirectCommandsLayoutEXT indirectCommandsLayout; + VkDeviceAddress indirectAddress; + VkDeviceSize indirectAddressSize; + VkDeviceAddress preprocessAddress; + VkDeviceSize preprocessSize; + uint32_t maxSequenceCount; + VkDeviceAddress sequenceCountAddress; + uint32_t maxDrawCount; +} VkGeneratedCommandsInfoEXT; + +typedef struct VkWriteIndirectExecutionSetPipelineEXT { + VkStructureType sType; + const void* pNext; + uint32_t index; + VkPipeline pipeline; +} VkWriteIndirectExecutionSetPipelineEXT; + +typedef struct VkIndirectCommandsPushConstantTokenEXT { + VkPushConstantRange updateRange; +} VkIndirectCommandsPushConstantTokenEXT; + +typedef struct VkIndirectCommandsVertexBufferTokenEXT { + uint32_t vertexBindingUnit; +} VkIndirectCommandsVertexBufferTokenEXT; + +typedef struct VkIndirectCommandsIndexBufferTokenEXT { + VkIndirectCommandsInputModeFlagBitsEXT mode; +} VkIndirectCommandsIndexBufferTokenEXT; + +typedef struct VkIndirectCommandsExecutionSetTokenEXT { + VkIndirectExecutionSetInfoTypeEXT type; + VkShaderStageFlags shaderStages; +} VkIndirectCommandsExecutionSetTokenEXT; + +typedef union VkIndirectCommandsTokenDataEXT { + const VkIndirectCommandsPushConstantTokenEXT* pPushConstant; + const VkIndirectCommandsVertexBufferTokenEXT* pVertexBuffer; + const VkIndirectCommandsIndexBufferTokenEXT* pIndexBuffer; + const VkIndirectCommandsExecutionSetTokenEXT* pExecutionSet; +} VkIndirectCommandsTokenDataEXT; + +typedef struct VkIndirectCommandsLayoutTokenEXT { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeEXT type; + VkIndirectCommandsTokenDataEXT data; + uint32_t offset; +} VkIndirectCommandsLayoutTokenEXT; + +typedef struct VkIndirectCommandsLayoutCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsEXT flags; + VkShaderStageFlags shaderStages; + uint32_t indirectStride; + VkPipelineLayout pipelineLayout; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenEXT* pTokens; +} VkIndirectCommandsLayoutCreateInfoEXT; + +typedef struct VkDrawIndirectCountIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t stride; + uint32_t commandCount; +} VkDrawIndirectCountIndirectCommandEXT; + +typedef struct VkBindVertexBufferIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandEXT; + +typedef struct VkBindIndexBufferIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandEXT; + +typedef struct VkGeneratedCommandsPipelineInfoEXT { + VkStructureType sType; + void* pNext; + VkPipeline pipeline; +} VkGeneratedCommandsPipelineInfoEXT; + +typedef struct VkGeneratedCommandsShaderInfoEXT { + VkStructureType sType; + void* pNext; + uint32_t shaderCount; + const VkShaderEXT* pShaders; +} VkGeneratedCommandsShaderInfoEXT; + +typedef struct VkWriteIndirectExecutionSetShaderEXT { + VkStructureType sType; + const void* pNext; + uint32_t index; + VkShaderEXT shader; +} VkWriteIndirectExecutionSetShaderEXT; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsEXT)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsEXT)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, VkCommandBuffer stateCommandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsEXT)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutEXT)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutEXT)(VkDevice device, VkIndirectCommandsLayoutEXT indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectExecutionSetEXT)(VkDevice device, const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectExecutionSetEXT* pIndirectExecutionSet); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectExecutionSetEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateIndirectExecutionSetPipelineEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites); +typedef void (VKAPI_PTR *PFN_vkUpdateIndirectExecutionSetShaderEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsEXT( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsEXT( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, + VkCommandBuffer stateCommandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsEXT( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutEXT( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutEXT( + VkDevice device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectExecutionSetEXT( + VkDevice device, + const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectExecutionSetEXT* pIndirectExecutionSet); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectExecutionSetEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateIndirectExecutionSetPipelineEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites); + +VKAPI_ATTR void VKAPI_CALL vkUpdateIndirectExecutionSetShaderEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites); +#endif + + +// VK_MESA_image_alignment_control is a preprocessor guard. Do not pass it to API calls. +#define VK_MESA_image_alignment_control 1 +#define VK_MESA_IMAGE_ALIGNMENT_CONTROL_SPEC_VERSION 1 +#define VK_MESA_IMAGE_ALIGNMENT_CONTROL_EXTENSION_NAME "VK_MESA_image_alignment_control" +typedef struct VkPhysicalDeviceImageAlignmentControlFeaturesMESA { + VkStructureType sType; + void* pNext; + VkBool32 imageAlignmentControl; +} VkPhysicalDeviceImageAlignmentControlFeaturesMESA; + +typedef struct VkPhysicalDeviceImageAlignmentControlPropertiesMESA { + VkStructureType sType; + void* pNext; + uint32_t supportedImageAlignmentMask; +} VkPhysicalDeviceImageAlignmentControlPropertiesMESA; + +typedef struct VkImageAlignmentControlCreateInfoMESA { + VkStructureType sType; + const void* pNext; + uint32_t maximumRequestedAlignment; +} VkImageAlignmentControlCreateInfoMESA; + + + +// VK_EXT_depth_clamp_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clamp_control 1 +#define VK_EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clamp_control" +typedef struct VkPhysicalDeviceDepthClampControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClampControl; +} VkPhysicalDeviceDepthClampControlFeaturesEXT; + +typedef struct VkPipelineViewportDepthClampControlCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDepthClampModeEXT depthClampMode; + const VkDepthClampRangeEXT* pDepthClampRange; +} VkPipelineViewportDepthClampControlCreateInfoEXT; + + + +// VK_HUAWEI_hdr_vivid is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_hdr_vivid 1 +#define VK_HUAWEI_HDR_VIVID_SPEC_VERSION 1 +#define VK_HUAWEI_HDR_VIVID_EXTENSION_NAME "VK_HUAWEI_hdr_vivid" +typedef struct VkPhysicalDeviceHdrVividFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 hdrVivid; +} VkPhysicalDeviceHdrVividFeaturesHUAWEI; + +typedef struct VkHdrVividDynamicMetadataHUAWEI { + VkStructureType sType; + const void* pNext; + size_t dynamicMetadataSize; + const void* pDynamicMetadata; +} VkHdrVividDynamicMetadataHUAWEI; + + + +// VK_NV_cooperative_matrix2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cooperative_matrix2 1 +#define VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION 1 +#define VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME "VK_NV_cooperative_matrix2" +typedef struct VkCooperativeMatrixFlexibleDimensionsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t MGranularity; + uint32_t NGranularity; + uint32_t KGranularity; + VkComponentTypeKHR AType; + VkComponentTypeKHR BType; + VkComponentTypeKHR CType; + VkComponentTypeKHR ResultType; + VkBool32 saturatingAccumulation; + VkScopeKHR scope; + uint32_t workgroupInvocations; +} VkCooperativeMatrixFlexibleDimensionsPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrix2FeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrixWorkgroupScope; + VkBool32 cooperativeMatrixFlexibleDimensions; + VkBool32 cooperativeMatrixReductions; + VkBool32 cooperativeMatrixConversions; + VkBool32 cooperativeMatrixPerElementOperations; + VkBool32 cooperativeMatrixTensorAddressing; + VkBool32 cooperativeMatrixBlockLoads; +} VkPhysicalDeviceCooperativeMatrix2FeaturesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrix2PropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize; + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension; + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory; +} VkPhysicalDeviceCooperativeMatrix2PropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixFlexibleDimensionsPropertiesNV* pProperties); +#endif + + +// VK_ARM_pipeline_opacity_micromap is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_pipeline_opacity_micromap 1 +#define VK_ARM_PIPELINE_OPACITY_MICROMAP_SPEC_VERSION 1 +#define VK_ARM_PIPELINE_OPACITY_MICROMAP_EXTENSION_NAME "VK_ARM_pipeline_opacity_micromap" +typedef struct VkPhysicalDevicePipelineOpacityMicromapFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 pipelineOpacityMicromap; +} VkPhysicalDevicePipelineOpacityMicromapFeaturesARM; + + + +// VK_EXT_vertex_attribute_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_attribute_robustness 1 +#define VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_vertex_attribute_robustness" +typedef struct VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeRobustness; +} VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + + + +// VK_NV_present_metering is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_present_metering 1 +#define VK_NV_PRESENT_METERING_SPEC_VERSION 1 +#define VK_NV_PRESENT_METERING_EXTENSION_NAME "VK_NV_present_metering" +typedef struct VkSetPresentConfigNV { + VkStructureType sType; + const void* pNext; + uint32_t numFramesPerBatch; + uint32_t presentConfigFeedback; +} VkSetPresentConfigNV; + +typedef struct VkPhysicalDevicePresentMeteringFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 presentMetering; +} VkPhysicalDevicePresentMeteringFeaturesNV; + + + +// VK_EXT_fragment_density_map_offset is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_density_map_offset 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_EXT_fragment_density_map_offset" +typedef struct VkRenderingEndInfoEXT { + VkStructureType sType; + const void* pNext; +} VkRenderingEndInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdEndRendering2EXT)(VkCommandBuffer commandBuffer, const VkRenderingEndInfoEXT* pRenderingEndInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdEndRendering2EXT( + VkCommandBuffer commandBuffer, + const VkRenderingEndInfoEXT* pRenderingEndInfo); +#endif + + +// VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_acceleration_structure 1 +#define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 +#define VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_KHR_acceleration_structure" + +typedef enum VkBuildAccelerationStructureModeKHR { + VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR = 0, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR = 1, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureModeKHR; + +typedef enum VkAccelerationStructureCreateFlagBitsKHR { + VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001, + VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008, + VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV = 0x00000004, + VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCreateFlagBitsKHR; +typedef VkFlags VkAccelerationStructureCreateFlagsKHR; +typedef struct VkAccelerationStructureBuildRangeInfoKHR { + uint32_t primitiveCount; + uint32_t primitiveOffset; + uint32_t firstVertex; + uint32_t transformOffset; +} VkAccelerationStructureBuildRangeInfoKHR; + +typedef struct VkAccelerationStructureGeometryTrianglesDataKHR { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + uint32_t maxVertex; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceOrHostAddressConstKHR transformData; +} VkAccelerationStructureGeometryTrianglesDataKHR; + +typedef struct VkAccelerationStructureGeometryAabbsDataKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR data; + VkDeviceSize stride; +} VkAccelerationStructureGeometryAabbsDataKHR; + +typedef struct VkAccelerationStructureGeometryInstancesDataKHR { + VkStructureType sType; + const void* pNext; + VkBool32 arrayOfPointers; + VkDeviceOrHostAddressConstKHR data; +} VkAccelerationStructureGeometryInstancesDataKHR; + +typedef union VkAccelerationStructureGeometryDataKHR { + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +} VkAccelerationStructureGeometryDataKHR; + +typedef struct VkAccelerationStructureGeometryKHR { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkAccelerationStructureGeometryDataKHR geometry; + VkGeometryFlagsKHR flags; +} VkAccelerationStructureGeometryKHR; + +typedef struct VkAccelerationStructureBuildGeometryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeKHR type; + VkBuildAccelerationStructureFlagsKHR flags; + VkBuildAccelerationStructureModeKHR mode; + VkAccelerationStructureKHR srcAccelerationStructure; + VkAccelerationStructureKHR dstAccelerationStructure; + uint32_t geometryCount; + const VkAccelerationStructureGeometryKHR* pGeometries; + const VkAccelerationStructureGeometryKHR* const* ppGeometries; + VkDeviceOrHostAddressKHR scratchData; +} VkAccelerationStructureBuildGeometryInfoKHR; + +typedef struct VkAccelerationStructureCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureCreateFlagsKHR createFlags; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; + VkAccelerationStructureTypeKHR type; + VkDeviceAddress deviceAddress; +} VkAccelerationStructureCreateInfoKHR; + +typedef struct VkWriteDescriptorSetAccelerationStructureKHR { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureKHR* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureKHR; + +typedef struct VkPhysicalDeviceAccelerationStructureFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 accelerationStructure; + VkBool32 accelerationStructureCaptureReplay; + VkBool32 accelerationStructureIndirectBuild; + VkBool32 accelerationStructureHostCommands; + VkBool32 descriptorBindingAccelerationStructureUpdateAfterBind; +} VkPhysicalDeviceAccelerationStructureFeaturesKHR; + +typedef struct VkPhysicalDeviceAccelerationStructurePropertiesKHR { + VkStructureType sType; + void* pNext; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxPrimitiveCount; + uint32_t maxPerStageDescriptorAccelerationStructures; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures; + uint32_t maxDescriptorSetAccelerationStructures; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures; + uint32_t minAccelerationStructureScratchOffsetAlignment; +} VkPhysicalDeviceAccelerationStructurePropertiesKHR; + +typedef struct VkAccelerationStructureDeviceAddressInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; +} VkAccelerationStructureDeviceAddressInfoKHR; + +typedef struct VkAccelerationStructureVersionInfoKHR { + VkStructureType sType; + const void* pNext; + const uint8_t* pVersionData; +} VkAccelerationStructureVersionInfoKHR; + +typedef struct VkCopyAccelerationStructureToMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkDeviceOrHostAddressKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureToMemoryInfoKHR; + +typedef struct VkCopyMemoryToAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyMemoryToAccelerationStructureInfoKHR; + +typedef struct VkCopyAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresIndirectKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const* ppMaxPrimitiveCounts); +typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructuresKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureBuildSizesKHR)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR( + VkDevice device, + const VkAccelerationStructureCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureKHR* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR( + VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresIndirectKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkDeviceAddress* pIndirectDeviceAddresses, + const uint32_t* pIndirectStrides, + const uint32_t* const* ppMaxPrimitiveCounts); + +VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructuresKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR( + VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR( + VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionInfoKHR* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureBuildSizesKHR( + VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, + const uint32_t* pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); +#endif + + +// VK_KHR_ray_tracing_pipeline is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_pipeline 1 +#define VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME "VK_KHR_ray_tracing_pipeline" + +typedef enum VkShaderGroupShaderKHR { + VK_SHADER_GROUP_SHADER_GENERAL_KHR = 0, + VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR = 1, + VK_SHADER_GROUP_SHADER_ANY_HIT_KHR = 2, + VK_SHADER_GROUP_SHADER_INTERSECTION_KHR = 3, + VK_SHADER_GROUP_SHADER_MAX_ENUM_KHR = 0x7FFFFFFF +} VkShaderGroupShaderKHR; +typedef struct VkRayTracingShaderGroupCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; + const void* pShaderGroupCaptureReplayHandle; +} VkRayTracingShaderGroupCreateInfoKHR; + +typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxPipelineRayPayloadSize; + uint32_t maxPipelineRayHitAttributeSize; +} VkRayTracingPipelineInterfaceCreateInfoKHR; + +typedef struct VkRayTracingPipelineCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoKHR* pGroups; + uint32_t maxPipelineRayRecursionDepth; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingPipeline; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplay; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed; + VkBool32 rayTracingPipelineTraceRaysIndirect; + VkBool32 rayTraversalPrimitiveCulling; +} VkPhysicalDeviceRayTracingPipelineFeaturesKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRayRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint32_t shaderGroupHandleCaptureReplaySize; + uint32_t maxRayDispatchInvocationCount; + uint32_t shaderGroupHandleAlignment; + uint32_t maxRayHitAttributeSize; +} VkPhysicalDeviceRayTracingPipelinePropertiesKHR; + +typedef struct VkTraceRaysIndirectCommandKHR { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommandKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress); +typedef VkDeviceSize (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupStackSizeKHR)(VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader); +typedef void (VKAPI_PTR *PFN_vkCmdSetRayTracingPipelineStackSizeKHR)(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress); + +VKAPI_ATTR VkDeviceSize VKAPI_CALL vkGetRayTracingShaderGroupStackSizeKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRayTracingPipelineStackSizeKHR( + VkCommandBuffer commandBuffer, + uint32_t pipelineStackSize); +#endif + + +// VK_KHR_ray_query is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_query 1 +#define VK_KHR_RAY_QUERY_SPEC_VERSION 1 +#define VK_KHR_RAY_QUERY_EXTENSION_NAME "VK_KHR_ray_query" +typedef struct VkPhysicalDeviceRayQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayQuery; +} VkPhysicalDeviceRayQueryFeaturesKHR; + + + +// VK_EXT_mesh_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_mesh_shader 1 +#define VK_EXT_MESH_SHADER_SPEC_VERSION 1 +#define VK_EXT_MESH_SHADER_EXTENSION_NAME "VK_EXT_mesh_shader" +typedef struct VkPhysicalDeviceMeshShaderFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 taskShader; + VkBool32 meshShader; + VkBool32 multiviewMeshShader; + VkBool32 primitiveFragmentShadingRateMeshShader; + VkBool32 meshShaderQueries; +} VkPhysicalDeviceMeshShaderFeaturesEXT; + +typedef struct VkPhysicalDeviceMeshShaderPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxTaskWorkGroupTotalCount; + uint32_t maxTaskWorkGroupCount[3]; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskPayloadSize; + uint32_t maxTaskSharedMemorySize; + uint32_t maxTaskPayloadAndSharedMemorySize; + uint32_t maxMeshWorkGroupTotalCount; + uint32_t maxMeshWorkGroupCount[3]; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshSharedMemorySize; + uint32_t maxMeshPayloadAndSharedMemorySize; + uint32_t maxMeshOutputMemorySize; + uint32_t maxMeshPayloadAndOutputMemorySize; + uint32_t maxMeshOutputComponents; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshOutputLayers; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; + uint32_t maxPreferredTaskWorkGroupInvocations; + uint32_t maxPreferredMeshWorkGroupInvocations; + VkBool32 prefersLocalInvocationVertexOutput; + VkBool32 prefersLocalInvocationPrimitiveOutput; + VkBool32 prefersCompactVertexOutput; + VkBool32 prefersCompactPrimitiveOutput; +} VkPhysicalDeviceMeshShaderPropertiesEXT; + +typedef struct VkDrawMeshTasksIndirectCommandEXT { + uint32_t groupCountX; + uint32_t groupCountY; + uint32_t groupCountZ; +} VkDrawMeshTasksIndirectCommandEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksEXT)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksEXT( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectEXT( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountEXT( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_directfb.h b/include/vulkan/vulkan_directfb.h new file mode 100644 index 00000000..26a43631 --- /dev/null +++ b/include/vulkan/vulkan_directfb.h @@ -0,0 +1,55 @@ +#ifndef VULKAN_DIRECTFB_H_ +#define VULKAN_DIRECTFB_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_EXT_directfb_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_directfb_surface 1 +#define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1 +#define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface" +typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT; +typedef struct VkDirectFBSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDirectFBSurfaceCreateFlagsEXT flags; + IDirectFB* dfb; + IDirectFBSurface* surface; +} VkDirectFBSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDirectFBSurfaceEXT)(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT( + VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB* dfb); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp new file mode 100644 index 00000000..d315e45a --- /dev/null +++ b/include/vulkan/vulkan_enums.hpp @@ -0,0 +1,8971 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_ENUMS_HPP +#define VULKAN_ENUMS_HPP + +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + template + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = false; + }; + + template + class Flags + { + public: + using BitsType = BitType; + using MaskType = typename std::underlying_type::type; + + // constructors + VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask( 0 ) {} + + VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT : m_mask( static_cast( bit ) ) {} + + VULKAN_HPP_CONSTEXPR Flags( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT : m_mask( flags ) {} + + // relational operators +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Flags const & ) const = default; +#else + VULKAN_HPP_CONSTEXPR bool operator<( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask < rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator<=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask <= rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator>( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask > rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator>=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask >= rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator==( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask == rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator!=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask != rhs.m_mask; + } +#endif + + // logical operator + VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT + { + return !m_mask; + } + + // bitwise operators + VULKAN_HPP_CONSTEXPR Flags operator&( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask & rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator|( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask | rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator^( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask ^ rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask ^ FlagTraits::allFlags.m_mask ); + } + + // assignment operators + VULKAN_HPP_CONSTEXPR_14 Flags & operator=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VULKAN_HPP_CONSTEXPR_14 Flags & operator|=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask |= rhs.m_mask; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Flags & operator&=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask &= rhs.m_mask; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Flags & operator^=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask ^= rhs.m_mask; + return *this; + } + + // cast operators + explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT + { + return !!m_mask; + } + + explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT + { + return m_mask; + } + +#if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC ) + public: +#else + private: +#endif + MaskType m_mask; + }; + +#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + // relational operators only needed for pre C++20 + template + VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator>( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator>=( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator<( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator<=( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator==( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator!=( bit ); + } +#endif + + // bitwise operators + template + VULKAN_HPP_CONSTEXPR Flags operator&( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator&( bit ); + } + + template + VULKAN_HPP_CONSTEXPR Flags operator|( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator|( bit ); + } + + template + VULKAN_HPP_CONSTEXPR Flags operator^( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator^( bit ); + } + + // bitwise operators on BitType + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator&( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT + { + return Flags( lhs ) & rhs; + } + + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator|( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT + { + return Flags( lhs ) | rhs; + } + + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator^( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT + { + return Flags( lhs ) ^ rhs; + } + + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator~( BitType bit ) VULKAN_HPP_NOEXCEPT + { + return ~( Flags( bit ) ); + } + + //============= + //=== ENUMs === + //============= + + //=== VK_VERSION_1_0 === + + // wrapper class for enum VkResult, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkResult.html + enum class Result + { + eSuccess = VK_SUCCESS, + eNotReady = VK_NOT_READY, + eTimeout = VK_TIMEOUT, + eEventSet = VK_EVENT_SET, + eEventReset = VK_EVENT_RESET, + eIncomplete = VK_INCOMPLETE, + eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, + eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, + eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, + eErrorDeviceLost = VK_ERROR_DEVICE_LOST, + eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, + eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, + eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, + eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, + eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, + eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, + eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, + eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, + eErrorUnknown = VK_ERROR_UNKNOWN, + eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, + eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, + eErrorFragmentation = VK_ERROR_FRAGMENTATION, + eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, + eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, + eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, + ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED, + ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorNotPermitted = VK_ERROR_NOT_PERMITTED, + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, + eErrorNotPermittedKHR = VK_ERROR_NOT_PERMITTED_KHR, + eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, + eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, + eSuboptimalKHR = VK_SUBOPTIMAL_KHR, + eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, + eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, + eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, + eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorImageUsageNotSupportedKHR = VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR, + eErrorVideoPictureLayoutNotSupportedKHR = VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR, + eErrorVideoProfileOperationNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR, + eErrorVideoProfileFormatNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR, + eErrorVideoProfileCodecNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR, + eErrorVideoStdVersionNotSupportedKHR = VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR, + eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eThreadIdleKHR = VK_THREAD_IDLE_KHR, + eThreadDoneKHR = VK_THREAD_DONE_KHR, + eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, + eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, + eErrorInvalidVideoStdParametersKHR = VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR, + eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT, + eIncompatibleShaderBinaryEXT = VK_INCOMPATIBLE_SHADER_BINARY_EXT, + eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT, + ePipelineBinaryMissingKHR = VK_PIPELINE_BINARY_MISSING_KHR, + eErrorNotEnoughSpaceKHR = VK_ERROR_NOT_ENOUGH_SPACE_KHR + }; + + // wrapper class for enum VkStructureType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStructureType.html + enum class StructureType + { + eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, + eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, + eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, + eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, + eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, + eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, + eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, + ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, + ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, + ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, + ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, + ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, + ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, + ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, + ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, + ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, + eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, + eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, + eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, + eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, + eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, + eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, + eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, + eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, + eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, + eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, + eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, + eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, + eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, + eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, + eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, + ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, + eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, + eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, + eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, + eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, + eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, + eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, + eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, + eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, + ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, + eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, + eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, + eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, + ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, + ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, + eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, + eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, + ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, + eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, + ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, + eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, + ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, + ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, + eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, + eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, + ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, + eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, + ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, + ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, + ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, + eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, + ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, + ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, + eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, + eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, + eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, + eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, + eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, + ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, + eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, + eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, + ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, + eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, + ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, + eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, + ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, + eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, + eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, + ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, + eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, + eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, + eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, + ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, + eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, + ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, + eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, + ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, + ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES, + ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES, + ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES, + eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, + eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, + eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, + eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, + eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, + eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, + eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, + ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, + ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, + ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, + eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, + ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, + ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, + eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, + ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, + eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, + ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, + eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, + ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, + eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, + eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, + eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, + ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, + ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, + ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, + eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, + eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, + ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, + ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, + ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, + eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, + eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, + eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, + eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, + ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, + eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, + eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, + eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, + eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, + eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, + ePhysicalDeviceVulkan13Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES, + ePhysicalDeviceVulkan13Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES, + ePipelineCreationFeedbackCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, + ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, + ePhysicalDeviceShaderTerminateInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, + ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR, + ePhysicalDeviceToolProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES, + ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, + ePhysicalDeviceShaderDemoteToHelperInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, + ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + ePhysicalDevicePrivateDataFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES, + ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT, + eDevicePrivateDataCreateInfo = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO, + eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT, + ePrivateDataSlotCreateInfo = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO, + ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT, + ePhysicalDevicePipelineCreationCacheControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES, + ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, + eMemoryBarrier2 = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2, + eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR, + eBufferMemoryBarrier2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2, + eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR, + eImageMemoryBarrier2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2, + eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR, + eDependencyInfo = VK_STRUCTURE_TYPE_DEPENDENCY_INFO, + eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, + eSubmitInfo2 = VK_STRUCTURE_TYPE_SUBMIT_INFO_2, + eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR, + eSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO, + eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR, + eCommandBufferSubmitInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, + eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR, + ePhysicalDeviceSynchronization2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES, + ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR, + ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, + ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR, + ePhysicalDeviceImageRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES, + ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT, + eCopyBufferInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2, + eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR, + eCopyImageInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2, + eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR, + eCopyBufferToImageInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2, + eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR, + eCopyImageToBufferInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2, + eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR, + eBlitImageInfo2 = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2, + eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR, + eResolveImageInfo2 = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2, + eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR, + eBufferCopy2 = VK_STRUCTURE_TYPE_BUFFER_COPY_2, + eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR, + eImageCopy2 = VK_STRUCTURE_TYPE_IMAGE_COPY_2, + eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR, + eImageBlit2 = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, + eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR, + eBufferImageCopy2 = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, + eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR, + eImageResolve2 = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, + eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR, + ePhysicalDeviceSubgroupSizeControlProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, + ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, + ePipelineShaderStageRequiredSubgroupSizeCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + eShaderRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + ePhysicalDeviceSubgroupSizeControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, + ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES, + ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES, + ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, + eWriteDescriptorSetInlineUniformBlock = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK, + eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, + eDescriptorPoolInlineUniformBlockCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO, + eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, + ePhysicalDeviceTextureCompressionAstcHdrFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, + ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, + eRenderingInfo = VK_STRUCTURE_TYPE_RENDERING_INFO, + eRenderingInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INFO_KHR, + eRenderingAttachmentInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, + eRenderingAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR, + ePipelineRenderingCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, + ePipelineRenderingCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR, + ePhysicalDeviceDynamicRenderingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, + ePhysicalDeviceDynamicRenderingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR, + eCommandBufferInheritanceRenderingInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, + eCommandBufferInheritanceRenderingInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR, + ePhysicalDeviceShaderIntegerDotProductFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, + ePhysicalDeviceShaderIntegerDotProductFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR, + ePhysicalDeviceShaderIntegerDotProductProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, + ePhysicalDeviceShaderIntegerDotProductPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR, + ePhysicalDeviceTexelBufferAlignmentProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES, + ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, + eFormatProperties3 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, + eFormatProperties3KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR, + ePhysicalDeviceMaintenance4Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, + ePhysicalDeviceMaintenance4FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR, + ePhysicalDeviceMaintenance4Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, + ePhysicalDeviceMaintenance4PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR, + eDeviceBufferMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, + eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR, + eDeviceImageMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, + eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR, + ePhysicalDeviceVulkan14Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_FEATURES, + ePhysicalDeviceVulkan14Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_PROPERTIES, + eDeviceQueueGlobalPriorityCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + ePhysicalDeviceGlobalPriorityQueryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES, + ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, + ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, + eQueueFamilyGlobalPriorityProperties = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES, + eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, + ePhysicalDeviceShaderSubgroupRotateFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES, + ePhysicalDeviceShaderSubgroupRotateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR, + ePhysicalDeviceShaderFloatControls2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES, + ePhysicalDeviceShaderFloatControls2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR, + ePhysicalDeviceShaderExpectAssumeFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES, + ePhysicalDeviceShaderExpectAssumeFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR, + ePhysicalDeviceLineRasterizationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES, + ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + ePhysicalDeviceLineRasterizationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR, + ePipelineRasterizationLineStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO, + ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + ePipelineRasterizationLineStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR, + ePhysicalDeviceLineRasterizationProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES, + ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, + ePhysicalDeviceLineRasterizationPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR, + ePhysicalDeviceVertexAttributeDivisorProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES, + ePhysicalDeviceVertexAttributeDivisorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR, + ePipelineVertexInputDivisorStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO, + ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePipelineVertexInputDivisorStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, + ePhysicalDeviceVertexAttributeDivisorFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, + ePhysicalDeviceIndexTypeUint8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES, + ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + ePhysicalDeviceIndexTypeUint8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR, + eMemoryMapInfo = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO, + eMemoryMapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR, + eMemoryUnmapInfo = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO, + eMemoryUnmapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR, + ePhysicalDeviceMaintenance5Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES, + ePhysicalDeviceMaintenance5FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR, + ePhysicalDeviceMaintenance5Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES, + ePhysicalDeviceMaintenance5PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR, + eRenderingAreaInfo = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO, + eRenderingAreaInfoKHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR, + eDeviceImageSubresourceInfo = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO, + eDeviceImageSubresourceInfoKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR, + eSubresourceLayout2 = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2, + eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT, + eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, + eImageSubresource2 = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2, + eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT, + eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, + ePipelineCreateFlags2CreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO, + ePipelineCreateFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR, + eBufferUsageFlags2CreateInfo = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO, + eBufferUsageFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR, + ePhysicalDevicePushDescriptorProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES, + ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, + ePhysicalDeviceDynamicRenderingLocalReadFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES, + ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR, + eRenderingAttachmentLocationInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO, + eRenderingAttachmentLocationInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR, + eRenderingInputAttachmentIndexInfo = VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO, + eRenderingInputAttachmentIndexInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR, + ePhysicalDeviceMaintenance6Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES, + ePhysicalDeviceMaintenance6FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR, + ePhysicalDeviceMaintenance6Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES, + ePhysicalDeviceMaintenance6PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR, + eBindMemoryStatus = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS, + eBindMemoryStatusKHR = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR, + eBindDescriptorSetsInfo = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO, + eBindDescriptorSetsInfoKHR = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR, + ePushConstantsInfo = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO, + ePushConstantsInfoKHR = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR, + ePushDescriptorSetInfo = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO, + ePushDescriptorSetInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR, + ePushDescriptorSetWithTemplateInfo = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO, + ePushDescriptorSetWithTemplateInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR, + ePhysicalDevicePipelineProtectedAccessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES, + ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT, + ePipelineRobustnessCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO, + ePipelineRobustnessCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT, + ePhysicalDevicePipelineRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES, + ePhysicalDevicePipelineRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT, + ePhysicalDevicePipelineRobustnessProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES, + ePhysicalDevicePipelineRobustnessPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT, + ePhysicalDeviceHostImageCopyFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES, + ePhysicalDeviceHostImageCopyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT, + ePhysicalDeviceHostImageCopyProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES, + ePhysicalDeviceHostImageCopyPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT, + eMemoryToImageCopy = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY, + eMemoryToImageCopyEXT = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT, + eImageToMemoryCopy = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY, + eImageToMemoryCopyEXT = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT, + eCopyImageToMemoryInfo = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO, + eCopyImageToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT, + eCopyMemoryToImageInfo = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO, + eCopyMemoryToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT, + eHostImageLayoutTransitionInfo = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO, + eHostImageLayoutTransitionInfoEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT, + eCopyImageToImageInfo = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO, + eCopyImageToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT, + eSubresourceHostMemcpySize = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE, + eSubresourceHostMemcpySizeEXT = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT, + eHostImageCopyDevicePerformanceQuery = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY, + eHostImageCopyDevicePerformanceQueryEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT, + eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, + ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, + eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, + eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, + eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, + eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, + eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, + eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, + eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, + eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, + eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, + ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, + eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, + eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, + eVideoProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR, + eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR, + eVideoPictureResourceInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR, + eVideoSessionMemoryRequirementsKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR, + eBindVideoSessionMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR, + eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR, + eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR, + eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR, + eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR, + eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR, + eVideoReferenceSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR, + eQueueFamilyVideoPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR, + eVideoProfileListInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR, + ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR, + eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR, + eQueueFamilyQueryResultStatusPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR, + eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR, + eVideoDecodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR, + eVideoDecodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR, + eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, + eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, + eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, + ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, + ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, + ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, + eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX, + eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX, + eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX, + eCuModuleTexturingModeCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_TEXTURING_MODE_CREATE_INFO_NVX, + eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, + eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, + eVideoEncodeH264CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR, + eVideoEncodeH264SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoEncodeH264SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoEncodeH264PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR, + eVideoEncodeH264DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR, + eVideoEncodeH264NaluSliceInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR, + eVideoEncodeH264GopRemainingFrameInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR, + eVideoEncodeH264ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR, + eVideoEncodeH264RateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR, + eVideoEncodeH264RateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeH264SessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR, + eVideoEncodeH264QualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeH264SessionParametersGetInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR, + eVideoEncodeH264SessionParametersFeedbackInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR, + eVideoEncodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR, + eVideoEncodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoEncodeH265SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoEncodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR, + eVideoEncodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR, + eVideoEncodeH265NaluSliceSegmentInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR, + eVideoEncodeH265GopRemainingFrameInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR, + eVideoEncodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR, + eVideoEncodeH265RateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR, + eVideoEncodeH265RateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeH265SessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR, + eVideoEncodeH265QualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeH265SessionParametersGetInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR, + eVideoEncodeH265SessionParametersFeedbackInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR, + eVideoDecodeH264CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR, + eVideoDecodeH264PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR, + eVideoDecodeH264ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR, + eVideoDecodeH264SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoDecodeH264SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoDecodeH264DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR, + eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, +#if defined( VK_USE_PLATFORM_GGP ) + eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, +#endif /*VK_USE_PLATFORM_GGP*/ + ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, + eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, + eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, +#if defined( VK_USE_PLATFORM_VI_NN ) + eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, +#endif /*VK_USE_PLATFORM_VI_NN*/ + eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, + ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, + eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, + eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, + eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, + eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, + eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, + eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, + eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, + ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, + eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, + ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, + ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, + eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, + eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, + eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, + eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, + ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, + ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, + ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, + ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, + ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, + ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, + ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, + eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, + ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG, + eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, + eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, + ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR, + ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR, + eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR, + ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR, + eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR, + ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR, + ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR, + ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, + eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, + eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, + eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, + eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, + eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, + eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, + eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, + eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, + eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, + eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, + eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, + eAndroidHardwareBufferFormatProperties2ANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDeviceShaderEnqueueFeaturesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX, + ePhysicalDeviceShaderEnqueuePropertiesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX, + eExecutionGraphPipelineScratchSizeAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX, + eExecutionGraphPipelineCreateInfoAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX, + ePipelineShaderStageNodeCreateInfoAMDX = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, + eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, + ePhysicalDeviceShaderBfloat16FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_BFLOAT16_FEATURES_KHR, + eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, + eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, + ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, + ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, + eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, + ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, + ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, + ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, + ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR, + eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR, + eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR, + eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR, + eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR, + eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR, + eAccelerationStructureVersionInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR, + eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR, + eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR, + eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR, + ePhysicalDeviceAccelerationStructureFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR, + ePhysicalDeviceAccelerationStructurePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR, + eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR, + eAccelerationStructureBuildSizesInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR, + ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR, + ePhysicalDeviceRayTracingPipelinePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR, + eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR, + eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR, + eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR, + ePhysicalDeviceRayQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR, + ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, + ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, + ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, + eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, + ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, + eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, + eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, + eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + eDrmFormatModifierPropertiesList2EXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT, + eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, + eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR, + ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, + ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, + ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, + ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, + eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, + eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, + eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, + eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, + eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, + eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, + eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, + eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, + ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, + ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, + ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, + eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, + eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, + eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, + ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, + ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, + ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, + ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, + eVideoDecodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR, + eVideoDecodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoDecodeH265SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoDecodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR, + eVideoDecodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR, + eVideoDecodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR, + eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, +#if defined( VK_USE_PLATFORM_GGP ) + ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, +#endif /*VK_USE_PLATFORM_GGP*/ + ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, + ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, + ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, + ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, + ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, + eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, + eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, + eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, + ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, + eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, + eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, + ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, + ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, + ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, + ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, + ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, + eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD, + eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, + eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, + eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, + eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, + ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR, + ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR, + ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, + eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, + ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, + ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, + ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT, + ePhysicalDeviceShaderQuadControlFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR, + ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, + ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, + eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, + eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, + ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, + ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, + eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, + eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, + ePhysicalDevicePresentWaitFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR, + ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, + eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, + ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV, + eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, + ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, + ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, + ePhysicalDeviceProvokingVertexFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT, + ePipelineRasterizationProvokingVertexStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT, + ePhysicalDeviceProvokingVertexPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, + eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, + eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, + ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT, + ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT, + ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, + ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + ePipelineInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT, + ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, + ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, + ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, + ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, + ePhysicalDeviceMapMemoryPlacedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT, + ePhysicalDeviceMapMemoryPlacedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT, + eMemoryMapPlacedInfoEXT = VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT, + ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT, + eSurfacePresentModeEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT, + eSurfacePresentScalingCapabilitiesEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT, + eSurfacePresentModeCompatibilityEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT, + ePhysicalDeviceSwapchainMaintenance1FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT, + eSwapchainPresentFenceInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT, + eSwapchainPresentModesCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT, + eSwapchainPresentModeInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT, + eSwapchainPresentScalingCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT, + eReleaseSwapchainImagesInfoEXT = VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, + eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, + eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, + eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV, + eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV, + eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV, + eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV, + ePhysicalDeviceInheritedViewportScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV, + eCommandBufferInheritanceViewportScissorInfoNV = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV, + ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, + eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, + eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM, + ePhysicalDeviceDepthBiasControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT, + eDepthBiasInfoEXT = VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT, + eDepthBiasRepresentationInfoEXT = VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT, + ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT, + eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT, + eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT, + ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT, + ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT, + eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT, + ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT, + ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT, + ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, + ePhysicalDevicePresentBarrierFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV, + eSurfaceCapabilitiesPresentBarrierNV = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV, + eSwapchainPresentBarrierCreateInfoNV = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV, + ePresentIdKHR = VK_STRUCTURE_TYPE_PRESENT_ID_KHR, + ePhysicalDevicePresentIdFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR, + eVideoEncodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR, + eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR, + eVideoEncodeRateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR, + eVideoEncodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR, + eQueryPoolVideoEncodeFeedbackCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR, + ePhysicalDeviceVideoEncodeQualityLevelInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR, + eVideoEncodeQualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeQualityLevelInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR, + eVideoEncodeSessionParametersGetInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR, + eVideoEncodeSessionParametersFeedbackInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR, + ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, + eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eCudaModuleCreateInfoNV = VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV, + eCudaFunctionCreateInfoNV = VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV, + eCudaLaunchInfoNV = VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV, + ePhysicalDeviceCudaKernelLaunchFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV, + ePhysicalDeviceCudaKernelLaunchPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePhysicalDeviceTileShadingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_SHADING_FEATURES_QCOM, + ePhysicalDeviceTileShadingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_SHADING_PROPERTIES_QCOM, + eRenderPassTileShadingCreateInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TILE_SHADING_CREATE_INFO_QCOM, + ePerTileBeginInfoQCOM = VK_STRUCTURE_TYPE_PER_TILE_BEGIN_INFO_QCOM, + ePerTileEndInfoQCOM = VK_STRUCTURE_TYPE_PER_TILE_END_INFO_QCOM, + eDispatchTileInfoQCOM = VK_STRUCTURE_TYPE_DISPATCH_TILE_INFO_QCOM, + eQueryLowLatencySupportNV = VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eExportMetalObjectCreateInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT, + eExportMetalObjectsInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT, + eExportMetalDeviceInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT, + eExportMetalCommandQueueInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT, + eExportMetalBufferInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT, + eImportMetalBufferInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT, + eExportMetalTextureInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT, + eImportMetalTextureInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT, + eExportMetalIoSurfaceInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT, + eImportMetalIoSurfaceInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT, + eExportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT, + eImportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + ePhysicalDeviceDescriptorBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT, + ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT, + ePhysicalDeviceDescriptorBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT, + eDescriptorAddressInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT, + eDescriptorGetInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT, + eBufferCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eImageCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eImageViewCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eSamplerCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eOpaqueCaptureDescriptorDataCreateInfoEXT = VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT, + eDescriptorBufferBindingInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT, + eDescriptorBufferBindingPushDescriptorBufferHandleEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT, + eAccelerationStructureCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT, + ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT, + eGraphicsPipelineLibraryCreateInfoEXT = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT, + ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD, + ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, + ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, + ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR, + ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV, + ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV, + ePipelineFragmentShadingRateEnumStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV, + eAccelerationStructureGeometryMotionTrianglesDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV, + ePhysicalDeviceRayTracingMotionBlurFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV, + eAccelerationStructureMotionInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV, + ePhysicalDeviceMeshShaderFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT, + ePhysicalDeviceMeshShaderPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT, + ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT, + eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM, + ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR, + ePhysicalDeviceImageCompressionControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT, + eImageCompressionControlEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT, + eImageCompressionPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT, + ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT, + ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT, + ePhysicalDeviceFaultFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT, + eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT, + eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT, + ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT, +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + ePhysicalDeviceVertexInputDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT, + eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT, + eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT, + ePhysicalDeviceDrmPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT, + ePhysicalDeviceAddressBindingReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT, + eDeviceAddressBindingCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT, + ePhysicalDeviceDepthClipControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT, + ePipelineViewportDepthClipControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT, + ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT, + ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA, + eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA, + eMemoryGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA, + eImportSemaphoreZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA, + eSemaphoreGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA, + eBufferCollectionCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA, + eImportMemoryBufferCollectionFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA, + eBufferCollectionImageCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA, + eBufferCollectionPropertiesFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA, + eBufferConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA, + eBufferCollectionBufferCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA, + eImageConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA, + eImageFormatConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA, + eSysmemColorSpaceFUCHSIA = VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA, + eBufferCollectionConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eSubpassShadingPipelineCreateInfoHUAWEI = VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI, + ePhysicalDeviceSubpassShadingFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI, + ePhysicalDeviceSubpassShadingPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI, + ePhysicalDeviceInvocationMaskFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI, + eMemoryGetRemoteAddressInfoNV = VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV, + ePhysicalDeviceExternalMemoryRdmaFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV, + ePipelinePropertiesIdentifierEXT = VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT, + ePhysicalDevicePipelinePropertiesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT, + ePhysicalDeviceFrameBoundaryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT, + eFrameBoundaryEXT = VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT, + ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT, + eSubpassResolvePerformanceQueryEXT = VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT, + eMultisampledRenderToSingleSampledInfoEXT = VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT, + ePhysicalDeviceExtendedDynamicState2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenSurfaceCreateInfoQNX = VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT, + ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT, + ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT, + ePhysicalDeviceRayTracingMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR, + ePhysicalDeviceImageViewMinLodFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT, + eImageViewMinLodCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT, + ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT, + ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT, + ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT, + ePhysicalDeviceShaderTileImageFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT, + ePhysicalDeviceShaderTileImagePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT, + eMicromapBuildInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT, + eMicromapVersionInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT, + eCopyMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT, + eCopyMicromapToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT, + eCopyMemoryToMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT, + ePhysicalDeviceOpacityMicromapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT, + ePhysicalDeviceOpacityMicromapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT, + eMicromapCreateInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT, + eMicromapBuildSizesInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT, + eAccelerationStructureTrianglesOpacityMicromapEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDeviceDisplacementMicromapFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV, + ePhysicalDeviceDisplacementMicromapPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV, + eAccelerationStructureTrianglesDisplacementMicromapNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI, + ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI, + ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI, + ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT, + eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT, + ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT, + ePhysicalDeviceShaderCorePropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM, + eDeviceQueueShaderCoreControlCreateInfoARM = VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM, + ePhysicalDeviceSchedulingControlsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM, + ePhysicalDeviceSchedulingControlsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM, + ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT, + eImageViewSlicedCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT, + ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE, + eDescriptorSetBindingReferenceVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE, + eDescriptorSetLayoutHostMappingInfoVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE, + ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT, + ePhysicalDeviceRenderPassStripedFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM, + ePhysicalDeviceRenderPassStripedPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM, + eRenderPassStripeBeginInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM, + eRenderPassStripeInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM, + eRenderPassStripeSubmitInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM, + ePhysicalDeviceCopyMemoryIndirectFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV, + ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV, + ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV, + ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV, + ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV, + eComputePipelineIndirectBufferInfoNV = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV, + ePipelineIndirectDeviceAddressInfoNV = VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV, + ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_LINEAR_SWEPT_SPHERES_FEATURES_NV, + eAccelerationStructureGeometryLinearSweptSpheresDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_LINEAR_SWEPT_SPHERES_DATA_NV, + eAccelerationStructureGeometrySpheresDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_SPHERES_DATA_NV, + ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV, + ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR, + ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT, + ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM, + ePhysicalDeviceImageProcessingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM, + eImageViewSampleWeightCreateInfoQCOM = VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM, + ePhysicalDeviceNestedCommandBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT, + ePhysicalDeviceNestedCommandBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT, + eExternalMemoryAcquireUnmodifiedEXT = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT, + ePhysicalDeviceExtendedDynamicState3FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT, + ePhysicalDeviceExtendedDynamicState3PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT, + ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT, + eRenderPassCreationControlEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT, + eRenderPassCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT, + eRenderPassSubpassFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT, + eDirectDriverLoadingInfoLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG, + eDirectDriverLoadingListLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG, + ePhysicalDeviceShaderModuleIdentifierFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT, + ePhysicalDeviceShaderModuleIdentifierPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT, + ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT, + eShaderModuleIdentifierEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT, + ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, + ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM, + ePhysicalDeviceOpticalFlowFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV, + ePhysicalDeviceOpticalFlowPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV, + eOpticalFlowImageFormatInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV, + eOpticalFlowImageFormatPropertiesNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV, + eOpticalFlowSessionCreateInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV, + eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV, + eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV, + ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + ePhysicalDeviceExternalFormatResolveFeaturesANDROID = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID, + ePhysicalDeviceExternalFormatResolvePropertiesANDROID = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatResolvePropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + ePhysicalDeviceAntiLagFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD, + eAntiLagDataAMD = VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD, + eAntiLagPresentationInfoAMD = VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD, + ePhysicalDeviceRayTracingPositionFetchFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR, + ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT, + ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT, + eShaderCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT, + ePhysicalDevicePipelineBinaryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR, + ePipelineBinaryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR, + ePipelineBinaryInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR, + ePipelineBinaryKeyKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR, + ePhysicalDevicePipelineBinaryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR, + eReleaseCapturedPipelineDataInfoKHR = VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR, + ePipelineBinaryDataInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR, + ePipelineCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR, + eDevicePipelineBinaryInternalCacheControlKHR = VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR, + ePipelineBinaryHandlesInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR, + ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM, + eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM, + ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC, + eAmigoProfilingSubmitInfoSEC = VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC, + ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM, + ePhysicalDeviceRayTracingInvocationReorderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV, + ePhysicalDeviceRayTracingInvocationReorderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV, + ePhysicalDeviceCooperativeVectorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_FEATURES_NV, + ePhysicalDeviceCooperativeVectorPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_PROPERTIES_NV, + eCooperativeVectorPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_VECTOR_PROPERTIES_NV, + eConvertCooperativeVectorMatrixInfoNV = VK_STRUCTURE_TYPE_CONVERT_COOPERATIVE_VECTOR_MATRIX_INFO_NV, + ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV, + ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV, + ePhysicalDeviceMutableDescriptorTypeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, + ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE, + eMutableDescriptorTypeCreateInfoEXT = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, + eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, + ePhysicalDeviceLegacyVertexAttributesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT, + ePhysicalDeviceLegacyVertexAttributesPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT, + eLayerSettingsCreateInfoEXT = VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT, + ePhysicalDeviceShaderCoreBuiltinsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM, + ePhysicalDeviceShaderCoreBuiltinsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM, + ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT, + ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT, + eLatencySleepModeInfoNV = VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV, + eLatencySleepInfoNV = VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV, + eSetLatencyMarkerInfoNV = VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV, + eGetLatencyMarkerInfoNV = VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV, + eLatencyTimingsFrameReportNV = VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV, + eLatencySubmissionPresentIdNV = VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV, + eOutOfBandQueueTypeInfoNV = VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV, + eSwapchainLatencyCreateInfoNV = VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV, + eLatencySurfaceCapabilitiesNV = VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV, + ePhysicalDeviceCooperativeMatrixFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR, + eCooperativeMatrixPropertiesKHR = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR, + ePhysicalDeviceCooperativeMatrixPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR, + ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM, + eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM, + ePhysicalDeviceComputeShaderDerivativesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, + ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, + ePhysicalDeviceComputeShaderDerivativesPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR, + eVideoDecodeAv1CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR, + eVideoDecodeAv1PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR, + eVideoDecodeAv1ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR, + eVideoDecodeAv1SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoDecodeAv1DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR, + eVideoEncodeAv1CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_CAPABILITIES_KHR, + eVideoEncodeAv1SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoEncodeAv1PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PICTURE_INFO_KHR, + eVideoEncodeAv1DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_DPB_SLOT_INFO_KHR, + ePhysicalDeviceVideoEncodeAv1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_AV1_FEATURES_KHR, + eVideoEncodeAv1ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PROFILE_INFO_KHR, + eVideoEncodeAv1RateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_INFO_KHR, + eVideoEncodeAv1RateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeAv1QualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeAv1SessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_CREATE_INFO_KHR, + eVideoEncodeAv1GopRemainingFrameInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_GOP_REMAINING_FRAME_INFO_KHR, + ePhysicalDeviceVideoMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR, + eVideoInlineQueryInfoKHR = VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR, + ePhysicalDevicePerStageDescriptorSetFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV, + ePhysicalDeviceImageProcessing2FeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM, + ePhysicalDeviceImageProcessing2PropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM, + eSamplerBlockMatchWindowCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM, + eSamplerCubicWeightsCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM, + ePhysicalDeviceCubicWeightsFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM, + eBlitImageCubicWeightsInfoQCOM = VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM, + ePhysicalDeviceYcbcrDegammaFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM, + eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM, + ePhysicalDeviceCubicClampFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM, + ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenBufferPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX, + eScreenBufferFormatPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX, + eImportScreenBufferInfoQNX = VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX, + eExternalFormatQNX = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX, + ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + ePhysicalDeviceLayeredDriverPropertiesMSFT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT, + eCalibratedTimestampInfoKHR = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, + eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, + eSetDescriptorBufferOffsetsInfoEXT = VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT, + eBindDescriptorBufferEmbeddedSamplersInfoEXT = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT, + ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV, + ePhysicalDeviceTileMemoryHeapFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_MEMORY_HEAP_FEATURES_QCOM, + ePhysicalDeviceTileMemoryHeapPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_MEMORY_HEAP_PROPERTIES_QCOM, + eTileMemoryRequirementsQCOM = VK_STRUCTURE_TYPE_TILE_MEMORY_REQUIREMENTS_QCOM, + eTileMemoryBindInfoQCOM = VK_STRUCTURE_TYPE_TILE_MEMORY_BIND_INFO_QCOM, + eTileMemorySizeInfoQCOM = VK_STRUCTURE_TYPE_TILE_MEMORY_SIZE_INFO_QCOM, + eDisplaySurfaceStereoCreateInfoNV = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_STEREO_CREATE_INFO_NV, + eDisplayModeStereoPropertiesNV = VK_STRUCTURE_TYPE_DISPLAY_MODE_STEREO_PROPERTIES_NV, + eVideoEncodeQuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatQuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_QUANTIZATION_MAP_PROPERTIES_KHR, + eVideoEncodeQuantizationMapInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_INFO_KHR, + eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_SESSION_PARAMETERS_CREATE_INFO_KHR, + ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUANTIZATION_MAP_FEATURES_KHR, + eVideoEncodeH264QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoEncodeH265QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatH265QuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_H265_QUANTIZATION_MAP_PROPERTIES_KHR, + eVideoEncodeAv1QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatAv1QuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_AV1_QUANTIZATION_MAP_PROPERTIES_KHR, + ePhysicalDeviceRawAccessChainsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV, + eExternalComputeQueueDeviceCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_DEVICE_CREATE_INFO_NV, + eExternalComputeQueueCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_CREATE_INFO_NV, + eExternalComputeQueueDataParamsNV = VK_STRUCTURE_TYPE_EXTERNAL_COMPUTE_QUEUE_DATA_PARAMS_NV, + ePhysicalDeviceExternalComputeQueuePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_COMPUTE_QUEUE_PROPERTIES_NV, + ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR, + ePhysicalDeviceCommandBufferInheritanceFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV, + ePhysicalDeviceMaintenance7FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR, + ePhysicalDeviceMaintenance7PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR, + ePhysicalDeviceLayeredApiPropertiesListKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR, + ePhysicalDeviceLayeredApiPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR, + ePhysicalDeviceLayeredApiVulkanPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR, + ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV, + ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT, + ePhysicalDeviceRayTracingValidationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV, + ePhysicalDeviceClusterAccelerationStructureFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_FEATURES_NV, + ePhysicalDeviceClusterAccelerationStructurePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_PROPERTIES_NV, + eClusterAccelerationStructureClustersBottomLevelInputNV = VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_CLUSTERS_BOTTOM_LEVEL_INPUT_NV, + eClusterAccelerationStructureTriangleClusterInputNV = VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_TRIANGLE_CLUSTER_INPUT_NV, + eClusterAccelerationStructureMoveObjectsInputNV = VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_MOVE_OBJECTS_INPUT_NV, + eClusterAccelerationStructureInputInfoNV = VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_INPUT_INFO_NV, + eClusterAccelerationStructureCommandsInfoNV = VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_COMMANDS_INFO_NV, + eRayTracingPipelineClusterAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CLUSTER_ACCELERATION_STRUCTURE_CREATE_INFO_NV, + ePhysicalDevicePartitionedAccelerationStructureFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_FEATURES_NV, + ePhysicalDevicePartitionedAccelerationStructurePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_PROPERTIES_NV, + eWriteDescriptorSetPartitionedAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_PARTITIONED_ACCELERATION_STRUCTURE_NV, + ePartitionedAccelerationStructureInstancesInputNV = VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCES_INPUT_NV, + eBuildPartitionedAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_BUILD_PARTITIONED_ACCELERATION_STRUCTURE_INFO_NV, + ePartitionedAccelerationStructureFlagsNV = VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_FLAGS_NV, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT, + eGeneratedCommandsMemoryRequirementsInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT, + eIndirectExecutionSetCreateInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT, + eGeneratedCommandsInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT, + eIndirectCommandsLayoutCreateInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT, + eIndirectCommandsLayoutTokenEXT = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT, + eWriteIndirectExecutionSetPipelineEXT = VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT, + eWriteIndirectExecutionSetShaderEXT = VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT, + eIndirectExecutionSetPipelineInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT, + eIndirectExecutionSetShaderInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT, + eIndirectExecutionSetShaderLayoutInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT, + eGeneratedCommandsPipelineInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT, + eGeneratedCommandsShaderInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT, + ePhysicalDeviceMaintenance8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_8_FEATURES_KHR, + eMemoryBarrierAccessFlags3KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_ACCESS_FLAGS_3_KHR, + ePhysicalDeviceImageAlignmentControlFeaturesMESA = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA, + ePhysicalDeviceImageAlignmentControlPropertiesMESA = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA, + eImageAlignmentControlCreateInfoMESA = VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA, + ePhysicalDeviceDepthClampControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT, + ePipelineViewportDepthClampControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT, + ePhysicalDeviceVideoMaintenance2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_2_FEATURES_KHR, + eVideoDecodeH264InlineSessionParametersInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_INLINE_SESSION_PARAMETERS_INFO_KHR, + eVideoDecodeH265InlineSessionParametersInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_INLINE_SESSION_PARAMETERS_INFO_KHR, + eVideoDecodeAv1InlineSessionParametersInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_INLINE_SESSION_PARAMETERS_INFO_KHR, + ePhysicalDeviceHdrVividFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HDR_VIVID_FEATURES_HUAWEI, + eHdrVividDynamicMetadataHUAWEI = VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI, + ePhysicalDeviceCooperativeMatrix2FeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV, + eCooperativeMatrixFlexibleDimensionsPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV, + ePhysicalDeviceCooperativeMatrix2PropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV, + ePhysicalDevicePipelineOpacityMicromapFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_OPACITY_MICROMAP_FEATURES_ARM, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eImportMemoryMetalHandleInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_METAL_HANDLE_INFO_EXT, + eMemoryMetalHandlePropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_METAL_HANDLE_PROPERTIES_EXT, + eMemoryGetMetalHandleInfoEXT = VK_STRUCTURE_TYPE_MEMORY_GET_METAL_HANDLE_INFO_EXT, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + ePhysicalDeviceDepthClampZeroOneFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_KHR, + ePhysicalDeviceDepthClampZeroOneFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT, + ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_ROBUSTNESS_FEATURES_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eSetPresentConfigNV = VK_STRUCTURE_TYPE_SET_PRESENT_CONFIG_NV, + ePhysicalDevicePresentMeteringFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_METERING_FEATURES_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM, + ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_EXT, + ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM, + eRenderPassFragmentDensityMapOffsetEndInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_EXT, + eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM, + eRenderingEndInfoEXT = VK_STRUCTURE_TYPE_RENDERING_END_INFO_EXT + }; + + // wrapper class for enum VkPipelineCacheHeaderVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCacheHeaderVersion.html + enum class PipelineCacheHeaderVersion + { + eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE + }; + + // wrapper class for enum VkObjectType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkObjectType.html + enum class ObjectType + { + eUnknown = VK_OBJECT_TYPE_UNKNOWN, + eInstance = VK_OBJECT_TYPE_INSTANCE, + ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + eDevice = VK_OBJECT_TYPE_DEVICE, + eQueue = VK_OBJECT_TYPE_QUEUE, + eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, + eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, + eFence = VK_OBJECT_TYPE_FENCE, + eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, + eBuffer = VK_OBJECT_TYPE_BUFFER, + eImage = VK_OBJECT_TYPE_IMAGE, + eEvent = VK_OBJECT_TYPE_EVENT, + eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, + eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, + eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, + eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, + ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, + ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, + ePipeline = VK_OBJECT_TYPE_PIPELINE, + eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + eSampler = VK_OBJECT_TYPE_SAMPLER, + eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, + eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, + eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, + eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR, + eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, + ePrivateDataSlot = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, + ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, + eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, + eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, + eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, + eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, + eVideoSessionKHR = VK_OBJECT_TYPE_VIDEO_SESSION_KHR, + eVideoSessionParametersKHR = VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR, + eCuModuleNVX = VK_OBJECT_TYPE_CU_MODULE_NVX, + eCuFunctionNVX = VK_OBJECT_TYPE_CU_FUNCTION_NVX, + eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, + eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, + ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, + eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, + eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eCudaModuleNV = VK_OBJECT_TYPE_CUDA_MODULE_NV, + eCudaFunctionNV = VK_OBJECT_TYPE_CUDA_FUNCTION_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT, + eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV, + eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT, + ePipelineBinaryKHR = VK_OBJECT_TYPE_PIPELINE_BINARY_KHR, + eExternalComputeQueueNV = VK_OBJECT_TYPE_EXTERNAL_COMPUTE_QUEUE_NV, + eIndirectCommandsLayoutEXT = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT, + eIndirectExecutionSetEXT = VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT + }; + + // wrapper class for enum VkVendorId, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVendorId.html + enum class VendorId + { + eKhronos = VK_VENDOR_ID_KHRONOS, + eVIV = VK_VENDOR_ID_VIV, + eVSI = VK_VENDOR_ID_VSI, + eKazan = VK_VENDOR_ID_KAZAN, + eCodeplay = VK_VENDOR_ID_CODEPLAY, + eMESA = VK_VENDOR_ID_MESA, + ePocl = VK_VENDOR_ID_POCL, + eMobileye = VK_VENDOR_ID_MOBILEYE + }; + + // wrapper class for enum VkFormat, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormat.html + enum class Format + { + eUndefined = VK_FORMAT_UNDEFINED, + eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, + eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, + eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, + eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, + eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, + eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, + eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, + eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, + eR8Unorm = VK_FORMAT_R8_UNORM, + eR8Snorm = VK_FORMAT_R8_SNORM, + eR8Uscaled = VK_FORMAT_R8_USCALED, + eR8Sscaled = VK_FORMAT_R8_SSCALED, + eR8Uint = VK_FORMAT_R8_UINT, + eR8Sint = VK_FORMAT_R8_SINT, + eR8Srgb = VK_FORMAT_R8_SRGB, + eR8G8Unorm = VK_FORMAT_R8G8_UNORM, + eR8G8Snorm = VK_FORMAT_R8G8_SNORM, + eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, + eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, + eR8G8Uint = VK_FORMAT_R8G8_UINT, + eR8G8Sint = VK_FORMAT_R8G8_SINT, + eR8G8Srgb = VK_FORMAT_R8G8_SRGB, + eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, + eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, + eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, + eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, + eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, + eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, + eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, + eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, + eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, + eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, + eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, + eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, + eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, + eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, + eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, + eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, + eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, + eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, + eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, + eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, + eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, + eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, + eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, + eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, + eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, + eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, + eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, + eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, + eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, + eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, + eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, + eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, + eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, + eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, + eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, + eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, + eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, + eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, + eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, + eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, + eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, + eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, + eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, + eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, + eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, + eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, + eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, + eR16Unorm = VK_FORMAT_R16_UNORM, + eR16Snorm = VK_FORMAT_R16_SNORM, + eR16Uscaled = VK_FORMAT_R16_USCALED, + eR16Sscaled = VK_FORMAT_R16_SSCALED, + eR16Uint = VK_FORMAT_R16_UINT, + eR16Sint = VK_FORMAT_R16_SINT, + eR16Sfloat = VK_FORMAT_R16_SFLOAT, + eR16G16Unorm = VK_FORMAT_R16G16_UNORM, + eR16G16Snorm = VK_FORMAT_R16G16_SNORM, + eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, + eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, + eR16G16Uint = VK_FORMAT_R16G16_UINT, + eR16G16Sint = VK_FORMAT_R16G16_SINT, + eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, + eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, + eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, + eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, + eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, + eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, + eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, + eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, + eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, + eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, + eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, + eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, + eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, + eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, + eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, + eR32Uint = VK_FORMAT_R32_UINT, + eR32Sint = VK_FORMAT_R32_SINT, + eR32Sfloat = VK_FORMAT_R32_SFLOAT, + eR32G32Uint = VK_FORMAT_R32G32_UINT, + eR32G32Sint = VK_FORMAT_R32G32_SINT, + eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, + eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, + eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, + eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, + eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, + eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, + eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, + eR64Uint = VK_FORMAT_R64_UINT, + eR64Sint = VK_FORMAT_R64_SINT, + eR64Sfloat = VK_FORMAT_R64_SFLOAT, + eR64G64Uint = VK_FORMAT_R64G64_UINT, + eR64G64Sint = VK_FORMAT_R64G64_SINT, + eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, + eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, + eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, + eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, + eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, + eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, + eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, + eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, + eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + eD16Unorm = VK_FORMAT_D16_UNORM, + eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, + eD32Sfloat = VK_FORMAT_D32_SFLOAT, + eS8Uint = VK_FORMAT_S8_UINT, + eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, + eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, + eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, + eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, + eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, + eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, + eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, + eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, + eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, + eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, + eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, + eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, + eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, + eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, + eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, + eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, + eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, + eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, + eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, + eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, + eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, + eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, + eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, + eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, + eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, + eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, + eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, + eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, + eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, + eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, + eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, + eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, + eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, + eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, + eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, + eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, + eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, + eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, + eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, + eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, + eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR, + eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, + eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, + eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, + eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, + eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, + eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, + eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, + eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, + eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, + eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, + eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, + eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, + eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, + eG8B8R82Plane444Unorm = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM, + eG8B8R82Plane444UnormEXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT, + eG10X6B10X6R10X62Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16, + eG10X6B10X6R10X62Plane444Unorm3Pack16EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT, + eG12X4B12X4R12X42Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16, + eG12X4B12X4R12X42Plane444Unorm3Pack16EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT, + eG16B16R162Plane444Unorm = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM, + eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT, + eA4R4G4B4UnormPack16 = VK_FORMAT_A4R4G4B4_UNORM_PACK16, + eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT, + eA4B4G4R4UnormPack16 = VK_FORMAT_A4B4G4R4_UNORM_PACK16, + eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT, + eAstc4x4SfloatBlock = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, + eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, + eAstc5x4SfloatBlock = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, + eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, + eAstc5x5SfloatBlock = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, + eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, + eAstc6x5SfloatBlock = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK, + eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, + eAstc6x6SfloatBlock = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK, + eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, + eAstc8x5SfloatBlock = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK, + eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, + eAstc8x6SfloatBlock = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK, + eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, + eAstc8x8SfloatBlock = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK, + eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, + eAstc10x5SfloatBlock = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK, + eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, + eAstc10x6SfloatBlock = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK, + eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, + eAstc10x8SfloatBlock = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK, + eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, + eAstc10x10SfloatBlock = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK, + eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, + eAstc12x10SfloatBlock = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK, + eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, + eAstc12x12SfloatBlock = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK, + eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, + eA1B5G5R5UnormPack16 = VK_FORMAT_A1B5G5R5_UNORM_PACK16, + eA1B5G5R5UnormPack16KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR, + eA8Unorm = VK_FORMAT_A8_UNORM, + eA8UnormKHR = VK_FORMAT_A8_UNORM_KHR, + ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, + ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, + ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, + ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, + ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, + ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, + ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, + ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, + eR16G16Sfixed5NV = VK_FORMAT_R16G16_SFIXED5_NV, + eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV + }; + + // wrapper class for enum VkFormatFeatureFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatFeatureFlagBits.html + enum class FormatFeatureFlagBits : VkFormatFeatureFlags + { + eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR, + eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, + eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, + eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, + eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, + eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, + eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, + eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR, + eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR, + eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, + eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR, + eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR + }; + + // wrapper using for bitmask VkFormatFeatureFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatFeatureFlags.html + using FormatFeatureFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkFormatFeatureFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FormatFeatureFlags allFlags = + FormatFeatureFlagBits::eSampledImage | FormatFeatureFlagBits::eStorageImage | FormatFeatureFlagBits::eStorageImageAtomic | + FormatFeatureFlagBits::eUniformTexelBuffer | FormatFeatureFlagBits::eStorageTexelBuffer | FormatFeatureFlagBits::eStorageTexelBufferAtomic | + FormatFeatureFlagBits::eVertexBuffer | FormatFeatureFlagBits::eColorAttachment | FormatFeatureFlagBits::eColorAttachmentBlend | + FormatFeatureFlagBits::eDepthStencilAttachment | FormatFeatureFlagBits::eBlitSrc | FormatFeatureFlagBits::eBlitDst | + FormatFeatureFlagBits::eSampledImageFilterLinear | FormatFeatureFlagBits::eTransferSrc | FormatFeatureFlagBits::eTransferDst | + FormatFeatureFlagBits::eMidpointChromaSamples | FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter | + FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter | + FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit | + FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable | FormatFeatureFlagBits::eDisjoint | + FormatFeatureFlagBits::eCositedChromaSamples | FormatFeatureFlagBits::eSampledImageFilterMinmax | FormatFeatureFlagBits::eVideoDecodeOutputKHR | + FormatFeatureFlagBits::eVideoDecodeDpbKHR | FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR | + FormatFeatureFlagBits::eSampledImageFilterCubicEXT | FormatFeatureFlagBits::eFragmentDensityMapEXT | + FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits::eVideoEncodeInputKHR | FormatFeatureFlagBits::eVideoEncodeDpbKHR; + }; + + // wrapper class for enum VkImageCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCreateFlagBits.html + enum class ImageCreateFlagBits : VkImageCreateFlags + { + eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, + eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, + eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, + eAlias = VK_IMAGE_CREATE_ALIAS_BIT, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR, + eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, + eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, + eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, + eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, + eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, + eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, + eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, + eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, + eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, + eDescriptorBufferCaptureReplayEXT = VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eMultisampledRenderToSingleSampledEXT = VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT, + e2DViewCompatibleEXT = VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT, + eVideoProfileIndependentKHR = VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR, + eFragmentDensityMapOffsetEXT = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_EXT, + eFragmentDensityMapOffsetQCOM = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM + }; + + // wrapper using for bitmask VkImageCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCreateFlags.html + using ImageCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkImageCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCreateFlags allFlags = + ImageCreateFlagBits::eSparseBinding | ImageCreateFlagBits::eSparseResidency | ImageCreateFlagBits::eSparseAliased | ImageCreateFlagBits::eMutableFormat | + ImageCreateFlagBits::eCubeCompatible | ImageCreateFlagBits::eAlias | ImageCreateFlagBits::eSplitInstanceBindRegions | + ImageCreateFlagBits::e2DArrayCompatible | ImageCreateFlagBits::eBlockTexelViewCompatible | ImageCreateFlagBits::eExtendedUsage | + ImageCreateFlagBits::eProtected | ImageCreateFlagBits::eDisjoint | ImageCreateFlagBits::eCornerSampledNV | + ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT | ImageCreateFlagBits::eSubsampledEXT | ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT | + ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT | ImageCreateFlagBits::e2DViewCompatibleEXT | + ImageCreateFlagBits::eVideoProfileIndependentKHR | ImageCreateFlagBits::eFragmentDensityMapOffsetEXT; + }; + + // wrapper class for enum VkImageTiling, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageTiling.html + enum class ImageTiling + { + eOptimal = VK_IMAGE_TILING_OPTIMAL, + eLinear = VK_IMAGE_TILING_LINEAR, + eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT + }; + + // wrapper class for enum VkImageType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageType.html + enum class ImageType + { + e1D = VK_IMAGE_TYPE_1D, + e2D = VK_IMAGE_TYPE_2D, + e3D = VK_IMAGE_TYPE_3D + }; + + // wrapper class for enum VkImageUsageFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageUsageFlagBits.html + enum class ImageUsageFlagBits : VkImageUsageFlags + { + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, + eHostTransfer = VK_IMAGE_USAGE_HOST_TRANSFER_BIT, + eHostTransferEXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT, + eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, + eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, + eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI, + eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM, + eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM, + eTileMemoryQCOM = VK_IMAGE_USAGE_TILE_MEMORY_QCOM, + eVideoEncodeQuantizationDeltaMapKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR, + eVideoEncodeEmphasisMapKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR + }; + + // wrapper using for bitmask VkImageUsageFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageUsageFlags.html + using ImageUsageFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkImageUsageFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageUsageFlags allFlags = + ImageUsageFlagBits::eTransferSrc | ImageUsageFlagBits::eTransferDst | ImageUsageFlagBits::eSampled | ImageUsageFlagBits::eStorage | + ImageUsageFlagBits::eColorAttachment | ImageUsageFlagBits::eDepthStencilAttachment | ImageUsageFlagBits::eTransientAttachment | + ImageUsageFlagBits::eInputAttachment | ImageUsageFlagBits::eHostTransfer | ImageUsageFlagBits::eVideoDecodeDstKHR | + ImageUsageFlagBits::eVideoDecodeSrcKHR | ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | + ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR | ImageUsageFlagBits::eVideoEncodeDstKHR | ImageUsageFlagBits::eVideoEncodeSrcKHR | + ImageUsageFlagBits::eVideoEncodeDpbKHR | ImageUsageFlagBits::eAttachmentFeedbackLoopEXT | ImageUsageFlagBits::eInvocationMaskHUAWEI | + ImageUsageFlagBits::eSampleWeightQCOM | ImageUsageFlagBits::eSampleBlockMatchQCOM | ImageUsageFlagBits::eTileMemoryQCOM | + ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR | ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR; + }; + + // wrapper class for enum VkInstanceCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInstanceCreateFlagBits.html + enum class InstanceCreateFlagBits : VkInstanceCreateFlags + { + eEnumeratePortabilityKHR = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR + }; + + // wrapper using for bitmask VkInstanceCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInstanceCreateFlags.html + using InstanceCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkInstanceCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR InstanceCreateFlags allFlags = InstanceCreateFlagBits::eEnumeratePortabilityKHR; + }; + + // wrapper class for enum VkInternalAllocationType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInternalAllocationType.html + enum class InternalAllocationType + { + eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + }; + + // wrapper class for enum VkMemoryHeapFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryHeapFlagBits.html + enum class MemoryHeapFlagBits : VkMemoryHeapFlags + { + eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, + eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR, + eTileMemoryQCOM = VK_MEMORY_HEAP_TILE_MEMORY_BIT_QCOM + }; + + // wrapper using for bitmask VkMemoryHeapFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryHeapFlags.html + using MemoryHeapFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkMemoryHeapFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryHeapFlags allFlags = + MemoryHeapFlagBits::eDeviceLocal | MemoryHeapFlagBits::eMultiInstance | MemoryHeapFlagBits::eTileMemoryQCOM; + }; + + // wrapper class for enum VkMemoryPropertyFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryPropertyFlagBits.html + enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags + { + eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, + eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, + eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, + eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT, + eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD, + eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD, + eRdmaCapableNV = VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV + }; + + // wrapper using for bitmask VkMemoryPropertyFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryPropertyFlags.html + using MemoryPropertyFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkMemoryPropertyFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryPropertyFlags allFlags = + MemoryPropertyFlagBits::eDeviceLocal | MemoryPropertyFlagBits::eHostVisible | MemoryPropertyFlagBits::eHostCoherent | + MemoryPropertyFlagBits::eHostCached | MemoryPropertyFlagBits::eLazilyAllocated | MemoryPropertyFlagBits::eProtected | + MemoryPropertyFlagBits::eDeviceCoherentAMD | MemoryPropertyFlagBits::eDeviceUncachedAMD | MemoryPropertyFlagBits::eRdmaCapableNV; + }; + + // wrapper class for enum VkPhysicalDeviceType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceType.html + enum class PhysicalDeviceType + { + eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, + eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, + eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, + eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, + eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU + }; + + // wrapper class for enum VkQueueFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFlagBits.html + enum class QueueFlagBits : VkQueueFlags + { + eGraphics = VK_QUEUE_GRAPHICS_BIT, + eCompute = VK_QUEUE_COMPUTE_BIT, + eTransfer = VK_QUEUE_TRANSFER_BIT, + eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, + eProtected = VK_QUEUE_PROTECTED_BIT, + eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR, + eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR, + eOpticalFlowNV = VK_QUEUE_OPTICAL_FLOW_BIT_NV + }; + + // wrapper using for bitmask VkQueueFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFlags.html + using QueueFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkQueueFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueueFlags allFlags = QueueFlagBits::eGraphics | QueueFlagBits::eCompute | QueueFlagBits::eTransfer | + QueueFlagBits::eSparseBinding | QueueFlagBits::eProtected | QueueFlagBits::eVideoDecodeKHR | + QueueFlagBits::eVideoEncodeKHR | QueueFlagBits::eOpticalFlowNV; + }; + + // wrapper class for enum VkSampleCountFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampleCountFlagBits.html + enum class SampleCountFlagBits : VkSampleCountFlags + { + e1 = VK_SAMPLE_COUNT_1_BIT, + e2 = VK_SAMPLE_COUNT_2_BIT, + e4 = VK_SAMPLE_COUNT_4_BIT, + e8 = VK_SAMPLE_COUNT_8_BIT, + e16 = VK_SAMPLE_COUNT_16_BIT, + e32 = VK_SAMPLE_COUNT_32_BIT, + e64 = VK_SAMPLE_COUNT_64_BIT + }; + + // wrapper using for bitmask VkSampleCountFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampleCountFlags.html + using SampleCountFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkSampleCountFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SampleCountFlags allFlags = SampleCountFlagBits::e1 | SampleCountFlagBits::e2 | SampleCountFlagBits::e4 | + SampleCountFlagBits::e8 | SampleCountFlagBits::e16 | SampleCountFlagBits::e32 | + SampleCountFlagBits::e64; + }; + + // wrapper class for enum VkSystemAllocationScope, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSystemAllocationScope.html + enum class SystemAllocationScope + { + eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, + eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, + eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, + eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE + }; + + enum class DeviceCreateFlagBits : VkDeviceCreateFlags + { + }; + + // wrapper using for bitmask VkDeviceCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceCreateFlags.html + using DeviceCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceCreateFlags allFlags = {}; + }; + + // wrapper class for enum VkDeviceQueueCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueCreateFlagBits.html + enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags + { + eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT + }; + + // wrapper using for bitmask VkDeviceQueueCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueCreateFlags.html + using DeviceQueueCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDeviceQueueCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceQueueCreateFlags allFlags = DeviceQueueCreateFlagBits::eProtected; + }; + + // wrapper class for enum VkPipelineStageFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineStageFlagBits.html + enum class PipelineStageFlagBits : VkPipelineStageFlags + { + eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, + eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + eNone = VK_PIPELINE_STAGE_NONE, + eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR, + eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, + eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, + eTaskShaderEXT = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, + eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, + eMeshShaderEXT = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, + eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, + eCommandPreprocessEXT = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT, + eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV + }; + + // wrapper using for bitmask VkPipelineStageFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineStageFlags.html + using PipelineStageFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPipelineStageFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineStageFlags allFlags = + PipelineStageFlagBits::eTopOfPipe | PipelineStageFlagBits::eDrawIndirect | PipelineStageFlagBits::eVertexInput | PipelineStageFlagBits::eVertexShader | + PipelineStageFlagBits::eTessellationControlShader | PipelineStageFlagBits::eTessellationEvaluationShader | PipelineStageFlagBits::eGeometryShader | + PipelineStageFlagBits::eFragmentShader | PipelineStageFlagBits::eEarlyFragmentTests | PipelineStageFlagBits::eLateFragmentTests | + PipelineStageFlagBits::eColorAttachmentOutput | PipelineStageFlagBits::eComputeShader | PipelineStageFlagBits::eTransfer | + PipelineStageFlagBits::eBottomOfPipe | PipelineStageFlagBits::eHost | PipelineStageFlagBits::eAllGraphics | PipelineStageFlagBits::eAllCommands | + PipelineStageFlagBits::eNone | PipelineStageFlagBits::eTransformFeedbackEXT | PipelineStageFlagBits::eConditionalRenderingEXT | + PipelineStageFlagBits::eAccelerationStructureBuildKHR | PipelineStageFlagBits::eRayTracingShaderKHR | PipelineStageFlagBits::eFragmentDensityProcessEXT | + PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits::eTaskShaderEXT | PipelineStageFlagBits::eMeshShaderEXT | + PipelineStageFlagBits::eCommandPreprocessEXT; + }; + + // wrapper class for enum VkMemoryMapFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMapFlagBits.html + enum class MemoryMapFlagBits : VkMemoryMapFlags + { + ePlacedEXT = VK_MEMORY_MAP_PLACED_BIT_EXT + }; + + // wrapper using for bitmask VkMemoryMapFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMapFlags.html + using MemoryMapFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkMemoryMapFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryMapFlags allFlags = MemoryMapFlagBits::ePlacedEXT; + }; + + // wrapper class for enum VkImageAspectFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageAspectFlagBits.html + enum class ImageAspectFlagBits : VkImageAspectFlags + { + eColor = VK_IMAGE_ASPECT_COLOR_BIT, + eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, + eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, + eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, + ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, + ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, + ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, + ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR, + eNone = VK_IMAGE_ASPECT_NONE, + eNoneKHR = VK_IMAGE_ASPECT_NONE_KHR, + eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, + eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, + eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, + eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT + }; + + // wrapper using for bitmask VkImageAspectFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageAspectFlags.html + using ImageAspectFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkImageAspectFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageAspectFlags allFlags = ImageAspectFlagBits::eColor | ImageAspectFlagBits::eDepth | ImageAspectFlagBits::eStencil | + ImageAspectFlagBits::eMetadata | ImageAspectFlagBits::ePlane0 | + ImageAspectFlagBits::ePlane1 | ImageAspectFlagBits::ePlane2 | ImageAspectFlagBits::eNone | + ImageAspectFlagBits::eMemoryPlane0EXT | ImageAspectFlagBits::eMemoryPlane1EXT | + ImageAspectFlagBits::eMemoryPlane2EXT | ImageAspectFlagBits::eMemoryPlane3EXT; + }; + + // wrapper class for enum VkSparseImageFormatFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageFormatFlagBits.html + enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags + { + eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, + eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, + eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT + }; + + // wrapper using for bitmask VkSparseImageFormatFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageFormatFlags.html + using SparseImageFormatFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkSparseImageFormatFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SparseImageFormatFlags allFlags = + SparseImageFormatFlagBits::eSingleMiptail | SparseImageFormatFlagBits::eAlignedMipSize | SparseImageFormatFlagBits::eNonstandardBlockSize; + }; + + // wrapper class for enum VkSparseMemoryBindFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseMemoryBindFlagBits.html + enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags + { + eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT + }; + + // wrapper using for bitmask VkSparseMemoryBindFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseMemoryBindFlags.html + using SparseMemoryBindFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkSparseMemoryBindFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SparseMemoryBindFlags allFlags = SparseMemoryBindFlagBits::eMetadata; + }; + + // wrapper class for enum VkFenceCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceCreateFlagBits.html + enum class FenceCreateFlagBits : VkFenceCreateFlags + { + eSignaled = VK_FENCE_CREATE_SIGNALED_BIT + }; + + // wrapper using for bitmask VkFenceCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceCreateFlags.html + using FenceCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkFenceCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FenceCreateFlags allFlags = FenceCreateFlagBits::eSignaled; + }; + + enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags + { + }; + + // wrapper using for bitmask VkSemaphoreCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreCreateFlags.html + using SemaphoreCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreCreateFlags allFlags = {}; + }; + + // wrapper class for enum VkEventCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkEventCreateFlagBits.html + enum class EventCreateFlagBits : VkEventCreateFlags + { + eDeviceOnly = VK_EVENT_CREATE_DEVICE_ONLY_BIT, + eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR + }; + + // wrapper using for bitmask VkEventCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkEventCreateFlags.html + using EventCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkEventCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR EventCreateFlags allFlags = EventCreateFlagBits::eDeviceOnly; + }; + + // wrapper class for enum VkQueryPipelineStatisticFlagBits, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPipelineStatisticFlagBits.html + enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags + { + eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, + eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, + eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, + eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, + eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, + eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, + eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, + eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, + eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, + eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, + eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT, + eTaskShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT, + eMeshShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT, + eClusterCullingShaderInvocationsHUAWEI = VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI + }; + + // wrapper using for bitmask VkQueryPipelineStatisticFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPipelineStatisticFlags.html + using QueryPipelineStatisticFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkQueryPipelineStatisticFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPipelineStatisticFlags allFlags = + QueryPipelineStatisticFlagBits::eInputAssemblyVertices | QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives | + QueryPipelineStatisticFlagBits::eVertexShaderInvocations | QueryPipelineStatisticFlagBits::eGeometryShaderInvocations | + QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives | QueryPipelineStatisticFlagBits::eClippingInvocations | + QueryPipelineStatisticFlagBits::eClippingPrimitives | QueryPipelineStatisticFlagBits::eFragmentShaderInvocations | + QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches | QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations | + QueryPipelineStatisticFlagBits::eComputeShaderInvocations | QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT | + QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT | QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI; + }; + + // wrapper class for enum VkQueryResultFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryResultFlagBits.html + enum class QueryResultFlagBits : VkQueryResultFlags + { + e64 = VK_QUERY_RESULT_64_BIT, + eWait = VK_QUERY_RESULT_WAIT_BIT, + eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, + ePartial = VK_QUERY_RESULT_PARTIAL_BIT, + eWithStatusKHR = VK_QUERY_RESULT_WITH_STATUS_BIT_KHR + }; + + // wrapper using for bitmask VkQueryResultFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryResultFlags.html + using QueryResultFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkQueryResultFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryResultFlags allFlags = QueryResultFlagBits::e64 | QueryResultFlagBits::eWait | + QueryResultFlagBits::eWithAvailability | QueryResultFlagBits::ePartial | + QueryResultFlagBits::eWithStatusKHR; + }; + + // wrapper class for enum VkQueryType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryType.html + enum class QueryType + { + eOcclusion = VK_QUERY_TYPE_OCCLUSION, + ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, + eTimestamp = VK_QUERY_TYPE_TIMESTAMP, + eResultStatusOnlyKHR = VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR, + eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, + ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, + eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, + eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, + ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, + eVideoEncodeFeedbackKHR = VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR, + eMeshPrimitivesGeneratedEXT = VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT, + ePrimitivesGeneratedEXT = VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT, + eAccelerationStructureSerializationBottomLevelPointersKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR, + eAccelerationStructureSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR, + eMicromapSerializationSizeEXT = VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT, + eMicromapCompactedSizeEXT = VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT + }; + + enum class QueryPoolCreateFlagBits : VkQueryPoolCreateFlags + { + }; + + // wrapper using for bitmask VkQueryPoolCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolCreateFlags.html + using QueryPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPoolCreateFlags allFlags = {}; + }; + + // wrapper class for enum VkBufferCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCreateFlagBits.html + enum class BufferCreateFlagBits : VkBufferCreateFlags + { + eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, + eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT, + eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, + eDescriptorBufferCaptureReplayEXT = VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eVideoProfileIndependentKHR = VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR + }; + + // wrapper using for bitmask VkBufferCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCreateFlags.html + using BufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkBufferCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferCreateFlags allFlags = + BufferCreateFlagBits::eSparseBinding | BufferCreateFlagBits::eSparseResidency | BufferCreateFlagBits::eSparseAliased | BufferCreateFlagBits::eProtected | + BufferCreateFlagBits::eDeviceAddressCaptureReplay | BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT | + BufferCreateFlagBits::eVideoProfileIndependentKHR; + }; + + // wrapper class for enum VkBufferUsageFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferUsageFlagBits.html + enum class BufferUsageFlagBits : VkBufferUsageFlags + { + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, + eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR, + eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, + eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, + eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, + eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, + eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT, + eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT, + ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT, + eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT, + eMicromapStorageEXT = VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT, + eTileMemoryQCOM = VK_BUFFER_USAGE_TILE_MEMORY_QCOM + }; + + // wrapper using for bitmask VkBufferUsageFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferUsageFlags.html + using BufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkBufferUsageFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags allFlags = + BufferUsageFlagBits::eTransferSrc | BufferUsageFlagBits::eTransferDst | BufferUsageFlagBits::eUniformTexelBuffer | + BufferUsageFlagBits::eStorageTexelBuffer | BufferUsageFlagBits::eUniformBuffer | BufferUsageFlagBits::eStorageBuffer | BufferUsageFlagBits::eIndexBuffer | + BufferUsageFlagBits::eVertexBuffer | BufferUsageFlagBits::eIndirectBuffer | BufferUsageFlagBits::eShaderDeviceAddress | + BufferUsageFlagBits::eVideoDecodeSrcKHR | BufferUsageFlagBits::eVideoDecodeDstKHR | BufferUsageFlagBits::eTransformFeedbackBufferEXT | + BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits::eConditionalRenderingEXT +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BufferUsageFlagBits::eExecutionGraphScratchAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits::eAccelerationStructureStorageKHR | + BufferUsageFlagBits::eShaderBindingTableKHR | BufferUsageFlagBits::eVideoEncodeDstKHR | BufferUsageFlagBits::eVideoEncodeSrcKHR | + BufferUsageFlagBits::eSamplerDescriptorBufferEXT | BufferUsageFlagBits::eResourceDescriptorBufferEXT | + BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT | BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT | + BufferUsageFlagBits::eMicromapStorageEXT | BufferUsageFlagBits::eTileMemoryQCOM; + }; + + // wrapper class for enum VkSharingMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSharingMode.html + enum class SharingMode + { + eExclusive = VK_SHARING_MODE_EXCLUSIVE, + eConcurrent = VK_SHARING_MODE_CONCURRENT + }; + + enum class BufferViewCreateFlagBits : VkBufferViewCreateFlags + { + }; + + // wrapper using for bitmask VkBufferViewCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferViewCreateFlags.html + using BufferViewCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferViewCreateFlags allFlags = {}; + }; + + // wrapper class for enum VkImageLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageLayout.html + enum class ImageLayout + { + eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, + eGeneral = VK_IMAGE_LAYOUT_GENERAL, + eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, + eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, + eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, + eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, + eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR, + eReadOnlyOptimal = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL, + eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, + eAttachmentOptimal = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, + eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, + eRenderingLocalRead = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ, + eRenderingLocalReadKHR = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR, + ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, + eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR, + eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, + eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, + eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, + eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, + eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR, + eAttachmentFeedbackLoopOptimalEXT = VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT, + eVideoEncodeQuantizationMapKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_QUANTIZATION_MAP_KHR + }; + + // wrapper class for enum VkComponentSwizzle, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComponentSwizzle.html + enum class ComponentSwizzle + { + eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, + eZero = VK_COMPONENT_SWIZZLE_ZERO, + eOne = VK_COMPONENT_SWIZZLE_ONE, + eR = VK_COMPONENT_SWIZZLE_R, + eG = VK_COMPONENT_SWIZZLE_G, + eB = VK_COMPONENT_SWIZZLE_B, + eA = VK_COMPONENT_SWIZZLE_A + }; + + // wrapper class for enum VkImageViewCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCreateFlagBits.html + enum class ImageViewCreateFlagBits : VkImageViewCreateFlags + { + eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT, + eDescriptorBufferCaptureReplayEXT = VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT + }; + + // wrapper using for bitmask VkImageViewCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCreateFlags.html + using ImageViewCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkImageViewCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageViewCreateFlags allFlags = ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT | + ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT | + ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT; + }; + + // wrapper class for enum VkImageViewType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewType.html + enum class ImageViewType + { + e1D = VK_IMAGE_VIEW_TYPE_1D, + e2D = VK_IMAGE_VIEW_TYPE_2D, + e3D = VK_IMAGE_VIEW_TYPE_3D, + eCube = VK_IMAGE_VIEW_TYPE_CUBE, + e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, + e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, + eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY + }; + + enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags + { + }; + + // wrapper using for bitmask VkShaderModuleCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleCreateFlags.html + using ShaderModuleCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderModuleCreateFlags allFlags = {}; + }; + + // wrapper class for enum VkPipelineCacheCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCacheCreateFlagBits.html + enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags + { + eExternallySynchronized = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, + eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT, + eInternallySynchronizedMergeKHR = VK_PIPELINE_CACHE_CREATE_INTERNALLY_SYNCHRONIZED_MERGE_BIT_KHR + }; + + // wrapper using for bitmask VkPipelineCacheCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCacheCreateFlags.html + using PipelineCacheCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPipelineCacheCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCacheCreateFlags allFlags = + PipelineCacheCreateFlagBits::eExternallySynchronized | PipelineCacheCreateFlagBits::eInternallySynchronizedMergeKHR; + }; + + // wrapper class for enum VkBlendFactor, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlendFactor.html + enum class BlendFactor + { + eZero = VK_BLEND_FACTOR_ZERO, + eOne = VK_BLEND_FACTOR_ONE, + eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, + eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, + eDstColor = VK_BLEND_FACTOR_DST_COLOR, + eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, + eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, + eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, + eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, + eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, + eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, + eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, + eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, + eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, + eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, + eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, + eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, + eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, + eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA + }; + + // wrapper class for enum VkBlendOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlendOp.html + enum class BlendOp + { + eAdd = VK_BLEND_OP_ADD, + eSubtract = VK_BLEND_OP_SUBTRACT, + eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, + eMin = VK_BLEND_OP_MIN, + eMax = VK_BLEND_OP_MAX, + eZeroEXT = VK_BLEND_OP_ZERO_EXT, + eSrcEXT = VK_BLEND_OP_SRC_EXT, + eDstEXT = VK_BLEND_OP_DST_EXT, + eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, + eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, + eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, + eDstInEXT = VK_BLEND_OP_DST_IN_EXT, + eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, + eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, + eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, + eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, + eXorEXT = VK_BLEND_OP_XOR_EXT, + eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, + eScreenEXT = VK_BLEND_OP_SCREEN_EXT, + eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, + eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, + eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, + eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, + eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, + eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, + eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, + eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, + eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, + eInvertEXT = VK_BLEND_OP_INVERT_EXT, + eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, + eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, + eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, + eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, + eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, + ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, + eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, + eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, + eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, + eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, + eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, + ePlusEXT = VK_BLEND_OP_PLUS_EXT, + ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, + ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, + ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, + eMinusEXT = VK_BLEND_OP_MINUS_EXT, + eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, + eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, + eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, + eRedEXT = VK_BLEND_OP_RED_EXT, + eGreenEXT = VK_BLEND_OP_GREEN_EXT, + eBlueEXT = VK_BLEND_OP_BLUE_EXT + }; + + // wrapper class for enum VkColorComponentFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorComponentFlagBits.html + enum class ColorComponentFlagBits : VkColorComponentFlags + { + eR = VK_COLOR_COMPONENT_R_BIT, + eG = VK_COLOR_COMPONENT_G_BIT, + eB = VK_COLOR_COMPONENT_B_BIT, + eA = VK_COLOR_COMPONENT_A_BIT + }; + + // wrapper using for bitmask VkColorComponentFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorComponentFlags.html + using ColorComponentFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkColorComponentFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ColorComponentFlags allFlags = + ColorComponentFlagBits::eR | ColorComponentFlagBits::eG | ColorComponentFlagBits::eB | ColorComponentFlagBits::eA; + }; + + // wrapper class for enum VkCompareOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCompareOp.html + enum class CompareOp + { + eNever = VK_COMPARE_OP_NEVER, + eLess = VK_COMPARE_OP_LESS, + eEqual = VK_COMPARE_OP_EQUAL, + eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, + eGreater = VK_COMPARE_OP_GREATER, + eNotEqual = VK_COMPARE_OP_NOT_EQUAL, + eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, + eAlways = VK_COMPARE_OP_ALWAYS + }; + + // wrapper class for enum VkCullModeFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCullModeFlagBits.html + enum class CullModeFlagBits : VkCullModeFlags + { + eNone = VK_CULL_MODE_NONE, + eFront = VK_CULL_MODE_FRONT_BIT, + eBack = VK_CULL_MODE_BACK_BIT, + eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK + }; + + // wrapper using for bitmask VkCullModeFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCullModeFlags.html + using CullModeFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkCullModeFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CullModeFlags allFlags = + CullModeFlagBits::eNone | CullModeFlagBits::eFront | CullModeFlagBits::eBack | CullModeFlagBits::eFrontAndBack; + }; + + // wrapper class for enum VkDynamicState, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDynamicState.html + enum class DynamicState + { + eViewport = VK_DYNAMIC_STATE_VIEWPORT, + eScissor = VK_DYNAMIC_STATE_SCISSOR, + eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, + eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, + eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + eCullMode = VK_DYNAMIC_STATE_CULL_MODE, + eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT, + eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE, + eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT, + ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, + ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, + eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT, + eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, + eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT, + eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, + eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE, + eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, + eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, + eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, + eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, + eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, + eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, + eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, + eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, + eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, + eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, + eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, + eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP, + eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT, + eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, + eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT, + eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, + eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT, + ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, + ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT, + eLineStipple = VK_DYNAMIC_STATE_LINE_STIPPLE, + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, + eLineStippleKHR = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, + eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, + eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, + eDiscardRectangleEnableEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT, + eDiscardRectangleModeEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT, + eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, + eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR, + eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, + eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, + eExclusiveScissorEnableNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV, + eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, + eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR, + eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT, + ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT, + eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT, + eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT, + eDepthClampEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT, + ePolygonModeEXT = VK_DYNAMIC_STATE_POLYGON_MODE_EXT, + eRasterizationSamplesEXT = VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT, + eSampleMaskEXT = VK_DYNAMIC_STATE_SAMPLE_MASK_EXT, + eAlphaToCoverageEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT, + eAlphaToOneEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT, + eLogicOpEnableEXT = VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT, + eColorBlendEnableEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT, + eColorBlendEquationEXT = VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT, + eColorWriteMaskEXT = VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, + eTessellationDomainOriginEXT = VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT, + eRasterizationStreamEXT = VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT, + eConservativeRasterizationModeEXT = VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT, + eExtraPrimitiveOverestimationSizeEXT = VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT, + eDepthClipEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT, + eSampleLocationsEnableEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT, + eColorBlendAdvancedEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT, + eProvokingVertexModeEXT = VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT, + eLineRasterizationModeEXT = VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT, + eLineStippleEnableEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT, + eDepthClipNegativeOneToOneEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT, + eViewportWScalingEnableNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV, + eViewportSwizzleNV = VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV, + eCoverageToColorEnableNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV, + eCoverageToColorLocationNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV, + eCoverageModulationModeNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV, + eCoverageModulationTableEnableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV, + eCoverageModulationTableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV, + eShadingRateImageEnableNV = VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV, + eRepresentativeFragmentTestEnableNV = VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV, + eCoverageReductionModeNV = VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV, + eAttachmentFeedbackLoopEnableEXT = VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT, + eDepthClampRangeEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT + }; + + // wrapper class for enum VkFrontFace, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFrontFace.html + enum class FrontFace + { + eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, + eClockwise = VK_FRONT_FACE_CLOCKWISE + }; + + // wrapper class for enum VkLogicOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLogicOp.html + enum class LogicOp + { + eClear = VK_LOGIC_OP_CLEAR, + eAnd = VK_LOGIC_OP_AND, + eAndReverse = VK_LOGIC_OP_AND_REVERSE, + eCopy = VK_LOGIC_OP_COPY, + eAndInverted = VK_LOGIC_OP_AND_INVERTED, + eNoOp = VK_LOGIC_OP_NO_OP, + eXor = VK_LOGIC_OP_XOR, + eOr = VK_LOGIC_OP_OR, + eNor = VK_LOGIC_OP_NOR, + eEquivalent = VK_LOGIC_OP_EQUIVALENT, + eInvert = VK_LOGIC_OP_INVERT, + eOrReverse = VK_LOGIC_OP_OR_REVERSE, + eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, + eOrInverted = VK_LOGIC_OP_OR_INVERTED, + eNand = VK_LOGIC_OP_NAND, + eSet = VK_LOGIC_OP_SET + }; + + // wrapper class for enum VkPipelineCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreateFlagBits.html + enum class PipelineCreateFlagBits : VkPipelineCreateFlags + { + eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, + eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR, + eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, + eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, + eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, + eNoProtectedAccess = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT, + eNoProtectedAccessEXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT, + eProtectedAccessOnly = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT, + eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, + eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, + eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, + eDescriptorBufferEXT = VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, + eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT, + eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT, + eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV, + eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + // wrapper using for bitmask VkPipelineCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreateFlags.html + using PipelineCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPipelineCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags allFlags = + PipelineCreateFlagBits::eDisableOptimization | PipelineCreateFlagBits::eAllowDerivatives | PipelineCreateFlagBits::eDerivative | + PipelineCreateFlagBits::eViewIndexFromDeviceIndex | PipelineCreateFlagBits::eDispatchBase | PipelineCreateFlagBits::eFailOnPipelineCompileRequired | + PipelineCreateFlagBits::eEarlyReturnOnFailure | PipelineCreateFlagBits::eNoProtectedAccess | PipelineCreateFlagBits::eProtectedAccessOnly | + PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR | + PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR | + PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR | PipelineCreateFlagBits::eRayTracingSkipAabbsKHR | + PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR | PipelineCreateFlagBits::eDeferCompileNV | + PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR | + PipelineCreateFlagBits::eCaptureStatisticsKHR | PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR | PipelineCreateFlagBits::eIndirectBindableNV | + PipelineCreateFlagBits::eLibraryKHR | PipelineCreateFlagBits::eDescriptorBufferEXT | PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT | + PipelineCreateFlagBits::eLinkTimeOptimizationEXT | PipelineCreateFlagBits::eRayTracingAllowMotionNV | + PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT | + PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ; + }; + + // wrapper class for enum VkPipelineShaderStageCreateFlagBits, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageCreateFlagBits.html + enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags + { + eAllowVaryingSubgroupSize = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT, + eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroups = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT, + eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT + }; + + // wrapper using for bitmask VkPipelineShaderStageCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageCreateFlags.html + using PipelineShaderStageCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPipelineShaderStageCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineShaderStageCreateFlags allFlags = + PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize | PipelineShaderStageCreateFlagBits::eRequireFullSubgroups; + }; + + // wrapper class for enum VkPolygonMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPolygonMode.html + enum class PolygonMode + { + eFill = VK_POLYGON_MODE_FILL, + eLine = VK_POLYGON_MODE_LINE, + ePoint = VK_POLYGON_MODE_POINT, + eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV + }; + + // wrapper class for enum VkPrimitiveTopology, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPrimitiveTopology.html + enum class PrimitiveTopology + { + ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, + eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, + eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, + eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, + eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, + eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, + eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, + eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, + ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST + }; + + // wrapper class for enum VkShaderStageFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderStageFlagBits.html + enum class ShaderStageFlagBits : VkShaderStageFlags + { + eVertex = VK_SHADER_STAGE_VERTEX_BIT, + eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, + eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, + eCompute = VK_SHADER_STAGE_COMPUTE_BIT, + eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, + eAll = VK_SHADER_STAGE_ALL, + eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, + eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, + eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, + eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, + eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, + eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, + eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, + eTaskEXT = VK_SHADER_STAGE_TASK_BIT_EXT, + eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, + eMeshEXT = VK_SHADER_STAGE_MESH_BIT_EXT, + eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, + eSubpassShadingHUAWEI = VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI, + eClusterCullingHUAWEI = VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI + }; + + // wrapper using for bitmask VkShaderStageFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderStageFlags.html + using ShaderStageFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkShaderStageFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderStageFlags allFlags = + ShaderStageFlagBits::eVertex | ShaderStageFlagBits::eTessellationControl | ShaderStageFlagBits::eTessellationEvaluation | ShaderStageFlagBits::eGeometry | + ShaderStageFlagBits::eFragment | ShaderStageFlagBits::eCompute | ShaderStageFlagBits::eAllGraphics | ShaderStageFlagBits::eAll | + ShaderStageFlagBits::eRaygenKHR | ShaderStageFlagBits::eAnyHitKHR | ShaderStageFlagBits::eClosestHitKHR | ShaderStageFlagBits::eMissKHR | + ShaderStageFlagBits::eIntersectionKHR | ShaderStageFlagBits::eCallableKHR | ShaderStageFlagBits::eTaskEXT | ShaderStageFlagBits::eMeshEXT | + ShaderStageFlagBits::eSubpassShadingHUAWEI | ShaderStageFlagBits::eClusterCullingHUAWEI; + }; + + // wrapper class for enum VkStencilOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStencilOp.html + enum class StencilOp + { + eKeep = VK_STENCIL_OP_KEEP, + eZero = VK_STENCIL_OP_ZERO, + eReplace = VK_STENCIL_OP_REPLACE, + eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, + eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, + eInvert = VK_STENCIL_OP_INVERT, + eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, + eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP + }; + + // wrapper class for enum VkVertexInputRate, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputRate.html + enum class VertexInputRate + { + eVertex = VK_VERTEX_INPUT_RATE_VERTEX, + eInstance = VK_VERTEX_INPUT_RATE_INSTANCE + }; + + // wrapper class for enum VkPipelineColorBlendStateCreateFlagBits, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendStateCreateFlagBits.html + enum class PipelineColorBlendStateCreateFlagBits : VkPipelineColorBlendStateCreateFlags + { + eRasterizationOrderAttachmentAccessEXT = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentAccessARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM + }; + + // wrapper using for bitmask VkPipelineColorBlendStateCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendStateCreateFlags.html + using PipelineColorBlendStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPipelineColorBlendStateCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineColorBlendStateCreateFlags allFlags = + PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT; + }; + + // wrapper class for enum VkPipelineDepthStencilStateCreateFlagBits, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDepthStencilStateCreateFlagBits.html + enum class PipelineDepthStencilStateCreateFlagBits : VkPipelineDepthStencilStateCreateFlags + { + eRasterizationOrderAttachmentDepthAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentDepthAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentStencilAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentStencilAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM + }; + + // wrapper using for bitmask VkPipelineDepthStencilStateCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDepthStencilStateCreateFlags.html + using PipelineDepthStencilStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPipelineDepthStencilStateCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDepthStencilStateCreateFlags allFlags = + PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT | + PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT; + }; + + enum class PipelineDynamicStateCreateFlagBits : VkPipelineDynamicStateCreateFlags + { + }; + + // wrapper using for bitmask VkPipelineDynamicStateCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDynamicStateCreateFlags.html + using PipelineDynamicStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDynamicStateCreateFlags allFlags = {}; + }; + + enum class PipelineInputAssemblyStateCreateFlagBits : VkPipelineInputAssemblyStateCreateFlags + { + }; + + // wrapper using for bitmask VkPipelineInputAssemblyStateCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineInputAssemblyStateCreateFlags.html + using PipelineInputAssemblyStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineInputAssemblyStateCreateFlags allFlags = {}; + }; + + // wrapper class for enum VkPipelineLayoutCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLayoutCreateFlagBits.html + enum class PipelineLayoutCreateFlagBits : VkPipelineLayoutCreateFlags + { + eIndependentSetsEXT = VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT + }; + + // wrapper using for bitmask VkPipelineLayoutCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLayoutCreateFlags.html + using PipelineLayoutCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPipelineLayoutCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineLayoutCreateFlags allFlags = PipelineLayoutCreateFlagBits::eIndependentSetsEXT; + }; + + enum class PipelineMultisampleStateCreateFlagBits : VkPipelineMultisampleStateCreateFlags + { + }; + + // wrapper using for bitmask VkPipelineMultisampleStateCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineMultisampleStateCreateFlags.html + using PipelineMultisampleStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineMultisampleStateCreateFlags allFlags = {}; + }; + + enum class PipelineRasterizationStateCreateFlagBits : VkPipelineRasterizationStateCreateFlags + { + }; + + // wrapper using for bitmask VkPipelineRasterizationStateCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateCreateFlags.html + using PipelineRasterizationStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationStateCreateFlags allFlags = {}; + }; + + enum class PipelineTessellationStateCreateFlagBits : VkPipelineTessellationStateCreateFlags + { + }; + + // wrapper using for bitmask VkPipelineTessellationStateCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineTessellationStateCreateFlags.html + using PipelineTessellationStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineTessellationStateCreateFlags allFlags = {}; + }; + + enum class PipelineVertexInputStateCreateFlagBits : VkPipelineVertexInputStateCreateFlags + { + }; + + // wrapper using for bitmask VkPipelineVertexInputStateCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineVertexInputStateCreateFlags.html + using PipelineVertexInputStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineVertexInputStateCreateFlags allFlags = {}; + }; + + enum class PipelineViewportStateCreateFlagBits : VkPipelineViewportStateCreateFlags + { + }; + + // wrapper using for bitmask VkPipelineViewportStateCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportStateCreateFlags.html + using PipelineViewportStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineViewportStateCreateFlags allFlags = {}; + }; + + // wrapper class for enum VkBorderColor, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBorderColor.html + enum class BorderColor + { + eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, + eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, + eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, + eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, + eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT, + eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT + }; + + // wrapper class for enum VkFilter, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFilter.html + enum class Filter + { + eNearest = VK_FILTER_NEAREST, + eLinear = VK_FILTER_LINEAR, + eCubicEXT = VK_FILTER_CUBIC_EXT, + eCubicIMG = VK_FILTER_CUBIC_IMG + }; + + // wrapper class for enum VkSamplerAddressMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerAddressMode.html + enum class SamplerAddressMode + { + eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, + eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, + eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, + eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR + }; + + // wrapper class for enum VkSamplerCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCreateFlagBits.html + enum class SamplerCreateFlagBits : VkSamplerCreateFlags + { + eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, + eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT, + eDescriptorBufferCaptureReplayEXT = VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eNonSeamlessCubeMapEXT = VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT, + eImageProcessingQCOM = VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM + }; + + // wrapper using for bitmask VkSamplerCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCreateFlags.html + using SamplerCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkSamplerCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SamplerCreateFlags allFlags = + SamplerCreateFlagBits::eSubsampledEXT | SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT | + SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT | SamplerCreateFlagBits::eNonSeamlessCubeMapEXT | SamplerCreateFlagBits::eImageProcessingQCOM; + }; + + // wrapper class for enum VkSamplerMipmapMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerMipmapMode.html + enum class SamplerMipmapMode + { + eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, + eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR + }; + + // wrapper class for enum VkDescriptorPoolCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolCreateFlagBits.html + enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags + { + eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT, + eHostOnlyEXT = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT, + eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE, + eAllowOverallocationSetsNV = VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV, + eAllowOverallocationPoolsNV = VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV + }; + + // wrapper using for bitmask VkDescriptorPoolCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolCreateFlags.html + using DescriptorPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDescriptorPoolCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorPoolCreateFlags allFlags = + DescriptorPoolCreateFlagBits::eFreeDescriptorSet | DescriptorPoolCreateFlagBits::eUpdateAfterBind | DescriptorPoolCreateFlagBits::eHostOnlyEXT | + DescriptorPoolCreateFlagBits::eAllowOverallocationSetsNV | DescriptorPoolCreateFlagBits::eAllowOverallocationPoolsNV; + }; + + // wrapper class for enum VkDescriptorSetLayoutCreateFlagBits, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutCreateFlagBits.html + enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags + { + eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT, + ePushDescriptor = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT, + ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, + eDescriptorBufferEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, + eEmbeddedImmutableSamplersEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT, + eIndirectBindableNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV, + eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, + eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, + ePerStageNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV + }; + + // wrapper using for bitmask VkDescriptorSetLayoutCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutCreateFlags.html + using DescriptorSetLayoutCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDescriptorSetLayoutCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorSetLayoutCreateFlags allFlags = + DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool | DescriptorSetLayoutCreateFlagBits::ePushDescriptor | + DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT | DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT | + DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV | DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT | + DescriptorSetLayoutCreateFlagBits::ePerStageNV; + }; + + // wrapper class for enum VkDescriptorType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorType.html + enum class DescriptorType + { + eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, + eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + eInlineUniformBlock = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK, + eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, + eSampleWeightImageQCOM = VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM, + eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM, + eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, + eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, + ePartitionedAccelerationStructureNV = VK_DESCRIPTOR_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_NV + }; + + enum class DescriptorPoolResetFlagBits : VkDescriptorPoolResetFlags + { + }; + + // wrapper using for bitmask VkDescriptorPoolResetFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolResetFlags.html + using DescriptorPoolResetFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorPoolResetFlags allFlags = {}; + }; + + // wrapper class for enum VkAccessFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccessFlagBits.html + enum class AccessFlagBits : VkAccessFlags + { + eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_HOST_READ_BIT, + eHostWrite = VK_ACCESS_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, + eNone = VK_ACCESS_NONE, + eNoneKHR = VK_ACCESS_NONE_KHR, + eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, + eCommandPreprocessReadEXT = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT, + eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteEXT = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT, + eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV + }; + + // wrapper using for bitmask VkAccessFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccessFlags.html + using AccessFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkAccessFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags allFlags = + AccessFlagBits::eIndirectCommandRead | AccessFlagBits::eIndexRead | AccessFlagBits::eVertexAttributeRead | AccessFlagBits::eUniformRead | + AccessFlagBits::eInputAttachmentRead | AccessFlagBits::eShaderRead | AccessFlagBits::eShaderWrite | AccessFlagBits::eColorAttachmentRead | + AccessFlagBits::eColorAttachmentWrite | AccessFlagBits::eDepthStencilAttachmentRead | AccessFlagBits::eDepthStencilAttachmentWrite | + AccessFlagBits::eTransferRead | AccessFlagBits::eTransferWrite | AccessFlagBits::eHostRead | AccessFlagBits::eHostWrite | AccessFlagBits::eMemoryRead | + AccessFlagBits::eMemoryWrite | AccessFlagBits::eNone | AccessFlagBits::eTransformFeedbackWriteEXT | AccessFlagBits::eTransformFeedbackCounterReadEXT | + AccessFlagBits::eTransformFeedbackCounterWriteEXT | AccessFlagBits::eConditionalRenderingReadEXT | AccessFlagBits::eColorAttachmentReadNoncoherentEXT | + AccessFlagBits::eAccelerationStructureReadKHR | AccessFlagBits::eAccelerationStructureWriteKHR | AccessFlagBits::eFragmentDensityMapReadEXT | + AccessFlagBits::eFragmentShadingRateAttachmentReadKHR | AccessFlagBits::eCommandPreprocessReadEXT | AccessFlagBits::eCommandPreprocessWriteEXT; + }; + + // wrapper class for enum VkAttachmentDescriptionFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescriptionFlagBits.html + enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags + { + eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT + }; + + // wrapper using for bitmask VkAttachmentDescriptionFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescriptionFlags.html + using AttachmentDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkAttachmentDescriptionFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AttachmentDescriptionFlags allFlags = AttachmentDescriptionFlagBits::eMayAlias; + }; + + // wrapper class for enum VkAttachmentLoadOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentLoadOp.html + enum class AttachmentLoadOp + { + eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, + eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, + eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE, + eNone = VK_ATTACHMENT_LOAD_OP_NONE, + eNoneEXT = VK_ATTACHMENT_LOAD_OP_NONE_EXT, + eNoneKHR = VK_ATTACHMENT_LOAD_OP_NONE_KHR + }; + + // wrapper class for enum VkAttachmentStoreOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentStoreOp.html + enum class AttachmentStoreOp + { + eStore = VK_ATTACHMENT_STORE_OP_STORE, + eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE, + eNone = VK_ATTACHMENT_STORE_OP_NONE, + eNoneKHR = VK_ATTACHMENT_STORE_OP_NONE_KHR, + eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM, + eNoneEXT = VK_ATTACHMENT_STORE_OP_NONE_EXT + }; + + // wrapper class for enum VkDependencyFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDependencyFlagBits.html + enum class DependencyFlagBits : VkDependencyFlags + { + eByRegion = VK_DEPENDENCY_BY_REGION_BIT, + eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR, + eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR, + eFeedbackLoopEXT = VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT, + eQueueFamilyOwnershipTransferUseAllStagesKHR = VK_DEPENDENCY_QUEUE_FAMILY_OWNERSHIP_TRANSFER_USE_ALL_STAGES_BIT_KHR + }; + + // wrapper using for bitmask VkDependencyFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDependencyFlags.html + using DependencyFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDependencyFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DependencyFlags allFlags = DependencyFlagBits::eByRegion | DependencyFlagBits::eDeviceGroup | + DependencyFlagBits::eViewLocal | DependencyFlagBits::eFeedbackLoopEXT | + DependencyFlagBits::eQueueFamilyOwnershipTransferUseAllStagesKHR; + }; + + // wrapper class for enum VkFramebufferCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferCreateFlagBits.html + enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags + { + eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR + }; + + // wrapper using for bitmask VkFramebufferCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferCreateFlags.html + using FramebufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkFramebufferCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FramebufferCreateFlags allFlags = FramebufferCreateFlagBits::eImageless; + }; + + // wrapper class for enum VkPipelineBindPoint, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBindPoint.html + enum class PipelineBindPoint + { + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphAMDX = VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, + eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, + eSubpassShadingHUAWEI = VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI + }; + + // wrapper class for enum VkRenderPassCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreateFlagBits.html + enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags + { + eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM + }; + + // wrapper using for bitmask VkRenderPassCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreateFlags.html + using RenderPassCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkRenderPassCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR RenderPassCreateFlags allFlags = RenderPassCreateFlagBits::eTransformQCOM; + }; + + // wrapper class for enum VkSubpassDescriptionFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescriptionFlagBits.html + enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags + { + ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, + ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, + eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, + eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM, + eTileShadingApronQCOM = VK_SUBPASS_DESCRIPTION_TILE_SHADING_APRON_BIT_QCOM, + eRasterizationOrderAttachmentColorAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentColorAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentDepthAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentDepthAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentStencilAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentStencilAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM, + eEnableLegacyDitheringEXT = VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT + }; + + // wrapper using for bitmask VkSubpassDescriptionFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescriptionFlags.html + using SubpassDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkSubpassDescriptionFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SubpassDescriptionFlags allFlags = + SubpassDescriptionFlagBits::ePerViewAttributesNVX | SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX | + SubpassDescriptionFlagBits::eFragmentRegionQCOM | SubpassDescriptionFlagBits::eShaderResolveQCOM | SubpassDescriptionFlagBits::eTileShadingApronQCOM | + SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT | SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT | + SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT | SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT; + }; + + // wrapper class for enum VkCommandPoolCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPoolCreateFlagBits.html + enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags + { + eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, + eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, + eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT + }; + + // wrapper using for bitmask VkCommandPoolCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPoolCreateFlags.html + using CommandPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkCommandPoolCreateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolCreateFlags allFlags = + CommandPoolCreateFlagBits::eTransient | CommandPoolCreateFlagBits::eResetCommandBuffer | CommandPoolCreateFlagBits::eProtected; + }; + + // wrapper class for enum VkCommandPoolResetFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPoolResetFlagBits.html + enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags + { + eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT + }; + + // wrapper using for bitmask VkCommandPoolResetFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPoolResetFlags.html + using CommandPoolResetFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkCommandPoolResetFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolResetFlags allFlags = CommandPoolResetFlagBits::eReleaseResources; + }; + + // wrapper class for enum VkCommandBufferLevel, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferLevel.html + enum class CommandBufferLevel + { + ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY + }; + + // wrapper class for enum VkCommandBufferResetFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferResetFlagBits.html + enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags + { + eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT + }; + + // wrapper using for bitmask VkCommandBufferResetFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferResetFlags.html + using CommandBufferResetFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkCommandBufferResetFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandBufferResetFlags allFlags = CommandBufferResetFlagBits::eReleaseResources; + }; + + // wrapper class for enum VkCommandBufferUsageFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferUsageFlagBits.html + enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags + { + eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, + eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, + eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT + }; + + // wrapper using for bitmask VkCommandBufferUsageFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferUsageFlags.html + using CommandBufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkCommandBufferUsageFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandBufferUsageFlags allFlags = + CommandBufferUsageFlagBits::eOneTimeSubmit | CommandBufferUsageFlagBits::eRenderPassContinue | CommandBufferUsageFlagBits::eSimultaneousUse; + }; + + // wrapper class for enum VkQueryControlFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryControlFlagBits.html + enum class QueryControlFlagBits : VkQueryControlFlags + { + ePrecise = VK_QUERY_CONTROL_PRECISE_BIT + }; + + // wrapper using for bitmask VkQueryControlFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryControlFlags.html + using QueryControlFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkQueryControlFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryControlFlags allFlags = QueryControlFlagBits::ePrecise; + }; + + // wrapper class for enum VkIndexType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndexType.html + enum class IndexType + { + eUint16 = VK_INDEX_TYPE_UINT16, + eUint32 = VK_INDEX_TYPE_UINT32, + eUint8 = VK_INDEX_TYPE_UINT8, + eUint8EXT = VK_INDEX_TYPE_UINT8_EXT, + eUint8KHR = VK_INDEX_TYPE_UINT8_KHR, + eNoneKHR = VK_INDEX_TYPE_NONE_KHR, + eNoneNV = VK_INDEX_TYPE_NONE_NV + }; + + //========================= + //=== Index Type Traits === + //========================= + + template + struct IndexTypeValue + { + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16; + }; + + template <> + struct CppType + { + using Type = uint16_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32; + }; + + template <> + struct CppType + { + using Type = uint32_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8; + }; + + template <> + struct CppType + { + using Type = uint8_t; + }; + + // wrapper class for enum VkStencilFaceFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStencilFaceFlagBits.html + enum class StencilFaceFlagBits : VkStencilFaceFlags + { + eFront = VK_STENCIL_FACE_FRONT_BIT, + eBack = VK_STENCIL_FACE_BACK_BIT, + eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, + eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK + }; + + // wrapper using for bitmask VkStencilFaceFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStencilFaceFlags.html + using StencilFaceFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkStencilFaceFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StencilFaceFlags allFlags = + StencilFaceFlagBits::eFront | StencilFaceFlagBits::eBack | StencilFaceFlagBits::eFrontAndBack; + }; + + // wrapper class for enum VkSubpassContents, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassContents.html + enum class SubpassContents + { + eInline = VK_SUBPASS_CONTENTS_INLINE, + eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, + eInlineAndSecondaryCommandBuffersKHR = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, + eInlineAndSecondaryCommandBuffersEXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT + }; + + //=== VK_VERSION_1_1 === + + // wrapper class for enum VkSubgroupFeatureFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubgroupFeatureFlagBits.html + enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags + { + eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, + eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, + eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, + eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, + eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, + eRotate = VK_SUBGROUP_FEATURE_ROTATE_BIT, + eRotateKHR = VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR, + eRotateClustered = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT, + eRotateClusteredKHR = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + }; + + // wrapper using for bitmask VkSubgroupFeatureFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubgroupFeatureFlags.html + using SubgroupFeatureFlags = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkSubgroupFeatureFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SubgroupFeatureFlags allFlags = + SubgroupFeatureFlagBits::eBasic | SubgroupFeatureFlagBits::eVote | SubgroupFeatureFlagBits::eArithmetic | SubgroupFeatureFlagBits::eBallot | + SubgroupFeatureFlagBits::eShuffle | SubgroupFeatureFlagBits::eShuffleRelative | SubgroupFeatureFlagBits::eClustered | SubgroupFeatureFlagBits::eQuad | + SubgroupFeatureFlagBits::eRotate | SubgroupFeatureFlagBits::eRotateClustered | SubgroupFeatureFlagBits::ePartitionedNV; + }; + + // wrapper class for enum VkPeerMemoryFeatureFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPeerMemoryFeatureFlagBits.html + enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags + { + eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT + }; + + using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits; + + // wrapper using for bitmask VkPeerMemoryFeatureFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPeerMemoryFeatureFlags.html + using PeerMemoryFeatureFlags = Flags; + using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkPeerMemoryFeatureFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PeerMemoryFeatureFlags allFlags = PeerMemoryFeatureFlagBits::eCopySrc | PeerMemoryFeatureFlagBits::eCopyDst | + PeerMemoryFeatureFlagBits::eGenericSrc | PeerMemoryFeatureFlagBits::eGenericDst; + }; + + // wrapper class for enum VkMemoryAllocateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryAllocateFlagBits.html + enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags + { + eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT + }; + + using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits; + + // wrapper using for bitmask VkMemoryAllocateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryAllocateFlags.html + using MemoryAllocateFlags = Flags; + using MemoryAllocateFlagsKHR = MemoryAllocateFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkMemoryAllocateFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryAllocateFlags allFlags = + MemoryAllocateFlagBits::eDeviceMask | MemoryAllocateFlagBits::eDeviceAddress | MemoryAllocateFlagBits::eDeviceAddressCaptureReplay; + }; + + enum class CommandPoolTrimFlagBits : VkCommandPoolTrimFlags + { + }; + + // wrapper using for bitmask VkCommandPoolTrimFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPoolTrimFlags.html + using CommandPoolTrimFlags = Flags; + using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolTrimFlags allFlags = {}; + }; + + // wrapper class for enum VkPointClippingBehavior, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPointClippingBehavior.html + enum class PointClippingBehavior + { + eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY + }; + + using PointClippingBehaviorKHR = PointClippingBehavior; + + // wrapper class for enum VkTessellationDomainOrigin, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTessellationDomainOrigin.html + enum class TessellationDomainOrigin + { + eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT + }; + + using TessellationDomainOriginKHR = TessellationDomainOrigin; + + // wrapper class for enum VkSamplerYcbcrModelConversion, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrModelConversion.html + enum class SamplerYcbcrModelConversion + { + eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 + }; + + using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion; + + // wrapper class for enum VkSamplerYcbcrRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrRange.html + enum class SamplerYcbcrRange + { + eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW + }; + + using SamplerYcbcrRangeKHR = SamplerYcbcrRange; + + // wrapper class for enum VkChromaLocation, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkChromaLocation.html + enum class ChromaLocation + { + eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, + eMidpoint = VK_CHROMA_LOCATION_MIDPOINT + }; + + using ChromaLocationKHR = ChromaLocation; + + // wrapper class for enum VkDescriptorUpdateTemplateType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateType.html + enum class DescriptorUpdateTemplateType + { + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + ePushDescriptors = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS + }; + + using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; + + enum class DescriptorUpdateTemplateCreateFlagBits : VkDescriptorUpdateTemplateCreateFlags + { + }; + + // wrapper using for bitmask VkDescriptorUpdateTemplateCreateFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateCreateFlags.html + using DescriptorUpdateTemplateCreateFlags = Flags; + using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorUpdateTemplateCreateFlags allFlags = {}; + }; + + // wrapper class for enum VkExternalMemoryHandleTypeFlagBits, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryHandleTypeFlagBits.html + enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, + eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eZirconVmoFUCHSIA = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eRdmaAddressNV = VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenBufferQNX = VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eMtlbufferEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLBUFFER_BIT_EXT, + eMtltextureEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLTEXTURE_BIT_EXT, + eMtlheapEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLHEAP_BIT_EXT +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + }; + + using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits; + + // wrapper using for bitmask VkExternalMemoryHandleTypeFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryHandleTypeFlags.html + using ExternalMemoryHandleTypeFlags = Flags; + using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkExternalMemoryHandleTypeFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryHandleTypeFlags allFlags = + ExternalMemoryHandleTypeFlagBits::eOpaqueFd | ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 | ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt | + ExternalMemoryHandleTypeFlagBits::eD3D11Texture | ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt | ExternalMemoryHandleTypeFlagBits::eD3D12Heap | + ExternalMemoryHandleTypeFlagBits::eD3D12Resource | ExternalMemoryHandleTypeFlagBits::eDmaBufEXT +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + | ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + | ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT | ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + | ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + | ExternalMemoryHandleTypeFlagBits::eScreenBufferQNX +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + | ExternalMemoryHandleTypeFlagBits::eMtlbufferEXT | ExternalMemoryHandleTypeFlagBits::eMtltextureEXT | ExternalMemoryHandleTypeFlagBits::eMtlheapEXT +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + ; + }; + + // wrapper class for enum VkExternalMemoryFeatureFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryFeatureFlagBits.html + enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT + }; + + using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits; + + // wrapper using for bitmask VkExternalMemoryFeatureFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryFeatureFlags.html + using ExternalMemoryFeatureFlags = Flags; + using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkExternalMemoryFeatureFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryFeatureFlags allFlags = + ExternalMemoryFeatureFlagBits::eDedicatedOnly | ExternalMemoryFeatureFlagBits::eExportable | ExternalMemoryFeatureFlagBits::eImportable; + }; + + // wrapper class for enum VkExternalFenceHandleTypeFlagBits, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFenceHandleTypeFlagBits.html + enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT + }; + + using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits; + + // wrapper using for bitmask VkExternalFenceHandleTypeFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFenceHandleTypeFlags.html + using ExternalFenceHandleTypeFlags = Flags; + using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkExternalFenceHandleTypeFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalFenceHandleTypeFlags allFlags = + ExternalFenceHandleTypeFlagBits::eOpaqueFd | ExternalFenceHandleTypeFlagBits::eOpaqueWin32 | ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt | + ExternalFenceHandleTypeFlagBits::eSyncFd; + }; + + // wrapper class for enum VkExternalFenceFeatureFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFenceFeatureFlagBits.html + enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags + { + eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT + }; + + using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits; + + // wrapper using for bitmask VkExternalFenceFeatureFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFenceFeatureFlags.html + using ExternalFenceFeatureFlags = Flags; + using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkExternalFenceFeatureFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalFenceFeatureFlags allFlags = + ExternalFenceFeatureFlagBits::eExportable | ExternalFenceFeatureFlagBits::eImportable; + }; + + // wrapper class for enum VkFenceImportFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceImportFlagBits.html + enum class FenceImportFlagBits : VkFenceImportFlags + { + eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT + }; + + using FenceImportFlagBitsKHR = FenceImportFlagBits; + + // wrapper using for bitmask VkFenceImportFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceImportFlags.html + using FenceImportFlags = Flags; + using FenceImportFlagsKHR = FenceImportFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkFenceImportFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FenceImportFlags allFlags = FenceImportFlagBits::eTemporary; + }; + + // wrapper class for enum VkSemaphoreImportFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreImportFlagBits.html + enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags + { + eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT + }; + + using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits; + + // wrapper using for bitmask VkSemaphoreImportFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreImportFlags.html + using SemaphoreImportFlags = Flags; + using SemaphoreImportFlagsKHR = SemaphoreImportFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkSemaphoreImportFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreImportFlags allFlags = SemaphoreImportFlagBits::eTemporary; + }; + + // wrapper class for enum VkExternalSemaphoreHandleTypeFlagBits, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalSemaphoreHandleTypeFlagBits.html + enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT, + eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eZirconEventFUCHSIA = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + }; + + using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits; + + // wrapper using for bitmask VkExternalSemaphoreHandleTypeFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalSemaphoreHandleTypeFlags.html + using ExternalSemaphoreHandleTypeFlags = Flags; + using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkExternalSemaphoreHandleTypeFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalSemaphoreHandleTypeFlags allFlags = + ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd | ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 | + ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt | ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence | ExternalSemaphoreHandleTypeFlagBits::eSyncFd +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + ; + }; + + // wrapper class for enum VkExternalSemaphoreFeatureFlagBits, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalSemaphoreFeatureFlagBits.html + enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags + { + eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT + }; + + using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits; + + // wrapper using for bitmask VkExternalSemaphoreFeatureFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalSemaphoreFeatureFlags.html + using ExternalSemaphoreFeatureFlags = Flags; + using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkExternalSemaphoreFeatureFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalSemaphoreFeatureFlags allFlags = + ExternalSemaphoreFeatureFlagBits::eExportable | ExternalSemaphoreFeatureFlagBits::eImportable; + }; + + //=== VK_VERSION_1_2 === + + // wrapper class for enum VkDriverId, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDriverId.html + enum class DriverId + { + eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY, + eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE, + eMesaRadv = VK_DRIVER_ID_MESA_RADV, + eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY, + eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, + eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE, + eMoltenvk = VK_DRIVER_ID_MOLTENVK, + eCoreaviProprietary = VK_DRIVER_ID_COREAVI_PROPRIETARY, + eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY, + eVerisiliconProprietary = VK_DRIVER_ID_VERISILICON_PROPRIETARY, + eMesaTurnip = VK_DRIVER_ID_MESA_TURNIP, + eMesaV3Dv = VK_DRIVER_ID_MESA_V3DV, + eMesaPanvk = VK_DRIVER_ID_MESA_PANVK, + eSamsungProprietary = VK_DRIVER_ID_SAMSUNG_PROPRIETARY, + eMesaVenus = VK_DRIVER_ID_MESA_VENUS, + eMesaDozen = VK_DRIVER_ID_MESA_DOZEN, + eMesaNvk = VK_DRIVER_ID_MESA_NVK, + eImaginationOpenSourceMESA = VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA, + eMesaHoneykrisp = VK_DRIVER_ID_MESA_HONEYKRISP, + eVulkanScEmulationOnVulkan = VK_DRIVER_ID_VULKAN_SC_EMULATION_ON_VULKAN + }; + + using DriverIdKHR = DriverId; + + // wrapper class for enum VkShaderFloatControlsIndependence, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderFloatControlsIndependence.html + enum class ShaderFloatControlsIndependence + { + e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, + eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE + }; + + using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence; + + // wrapper class for enum VkDescriptorBindingFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBindingFlagBits.html + enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags + { + eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, + ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT + }; + + using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits; + + // wrapper using for bitmask VkDescriptorBindingFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBindingFlags.html + using DescriptorBindingFlags = Flags; + using DescriptorBindingFlagsEXT = DescriptorBindingFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkDescriptorBindingFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorBindingFlags allFlags = + DescriptorBindingFlagBits::eUpdateAfterBind | DescriptorBindingFlagBits::eUpdateUnusedWhilePending | DescriptorBindingFlagBits::ePartiallyBound | + DescriptorBindingFlagBits::eVariableDescriptorCount; + }; + + // wrapper class for enum VkResolveModeFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkResolveModeFlagBits.html + enum class ResolveModeFlagBits : VkResolveModeFlags + { + eNone = VK_RESOLVE_MODE_NONE, + eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, + eAverage = VK_RESOLVE_MODE_AVERAGE_BIT, + eMin = VK_RESOLVE_MODE_MIN_BIT, + eMax = VK_RESOLVE_MODE_MAX_BIT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eExternalFormatDownsampleANDROID = VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + }; + + using ResolveModeFlagBitsKHR = ResolveModeFlagBits; + + // wrapper using for bitmask VkResolveModeFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkResolveModeFlags.html + using ResolveModeFlags = Flags; + using ResolveModeFlagsKHR = ResolveModeFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkResolveModeFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ResolveModeFlags allFlags = ResolveModeFlagBits::eNone | ResolveModeFlagBits::eSampleZero | + ResolveModeFlagBits::eAverage | ResolveModeFlagBits::eMin | ResolveModeFlagBits::eMax +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + | ResolveModeFlagBits::eExternalFormatDownsampleANDROID +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + ; + }; + + // wrapper class for enum VkSamplerReductionMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerReductionMode.html + enum class SamplerReductionMode + { + eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + eMin = VK_SAMPLER_REDUCTION_MODE_MIN, + eMax = VK_SAMPLER_REDUCTION_MODE_MAX, + eWeightedAverageRangeclampQCOM = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM + }; + + using SamplerReductionModeEXT = SamplerReductionMode; + + // wrapper class for enum VkSemaphoreType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreType.html + enum class SemaphoreType + { + eBinary = VK_SEMAPHORE_TYPE_BINARY, + eTimeline = VK_SEMAPHORE_TYPE_TIMELINE + }; + + using SemaphoreTypeKHR = SemaphoreType; + + // wrapper class for enum VkSemaphoreWaitFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreWaitFlagBits.html + enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags + { + eAny = VK_SEMAPHORE_WAIT_ANY_BIT + }; + + using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits; + + // wrapper using for bitmask VkSemaphoreWaitFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreWaitFlags.html + using SemaphoreWaitFlags = Flags; + using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkSemaphoreWaitFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreWaitFlags allFlags = SemaphoreWaitFlagBits::eAny; + }; + + //=== VK_VERSION_1_3 === + + // wrapper class for enum VkPipelineCreationFeedbackFlagBits, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreationFeedbackFlagBits.html + enum class PipelineCreationFeedbackFlagBits : VkPipelineCreationFeedbackFlags + { + eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT, + eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT, + eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT + }; + + using PipelineCreationFeedbackFlagBitsEXT = PipelineCreationFeedbackFlagBits; + + // wrapper using for bitmask VkPipelineCreationFeedbackFlags, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreationFeedbackFlags.html + using PipelineCreationFeedbackFlags = Flags; + using PipelineCreationFeedbackFlagsEXT = PipelineCreationFeedbackFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkPipelineCreationFeedbackFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreationFeedbackFlags allFlags = PipelineCreationFeedbackFlagBits::eValid | + PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit | + PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration; + }; + + // wrapper class for enum VkToolPurposeFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkToolPurposeFlagBits.html + enum class ToolPurposeFlagBits : VkToolPurposeFlags + { + eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT, + eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT, + eTracing = VK_TOOL_PURPOSE_TRACING_BIT, + eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT, + eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT, + eDebugReportingEXT = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT, + eDebugMarkersEXT = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT + }; + + using ToolPurposeFlagBitsEXT = ToolPurposeFlagBits; + + // wrapper using for bitmask VkToolPurposeFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkToolPurposeFlags.html + using ToolPurposeFlags = Flags; + using ToolPurposeFlagsEXT = ToolPurposeFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkToolPurposeFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ToolPurposeFlags allFlags = + ToolPurposeFlagBits::eValidation | ToolPurposeFlagBits::eProfiling | ToolPurposeFlagBits::eTracing | ToolPurposeFlagBits::eAdditionalFeatures | + ToolPurposeFlagBits::eModifyingFeatures | ToolPurposeFlagBits::eDebugReportingEXT | ToolPurposeFlagBits::eDebugMarkersEXT; + }; + + enum class PrivateDataSlotCreateFlagBits : VkPrivateDataSlotCreateFlags + { + }; + + using PrivateDataSlotCreateFlagBitsEXT = PrivateDataSlotCreateFlagBits; + + // wrapper using for bitmask VkPrivateDataSlotCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPrivateDataSlotCreateFlags.html + using PrivateDataSlotCreateFlags = Flags; + using PrivateDataSlotCreateFlagsEXT = PrivateDataSlotCreateFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PrivateDataSlotCreateFlags allFlags = {}; + }; + + // wrapper class for enum VkPipelineStageFlagBits2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineStageFlagBits2.html + enum class PipelineStageFlagBits2 : VkPipelineStageFlags2 + { + eNone = VK_PIPELINE_STAGE_2_NONE, + eTopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT, + eAllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT, + eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_2_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT, + eCopy = VK_PIPELINE_STAGE_2_COPY_BIT, + eResolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT, + eBlit = VK_PIPELINE_STAGE_2_BLIT_BIT, + eClear = VK_PIPELINE_STAGE_2_CLEAR_BIT, + eIndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT, + eVertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT, + ePreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT, + eVideoDecodeKHR = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR, + eVideoEncodeKHR = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR, + eTransformFeedbackEXT = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, + eCommandPreprocessEXT = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT, + eCommandPreprocessNV = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV, + eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, + eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, + eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eTaskShaderEXT = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT, + eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV, + eMeshShaderEXT = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT, + eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV, + eSubpassShaderHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI, + eSubpassShadingHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, + eInvocationMaskHUAWEI = VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI, + eAccelerationStructureCopyKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR, + eMicromapBuildEXT = VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT, + eClusterCullingShaderHUAWEI = VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI, + eOpticalFlowNV = VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV, + eConvertCooperativeVectorMatrixNV = VK_PIPELINE_STAGE_2_CONVERT_COOPERATIVE_VECTOR_MATRIX_BIT_NV + }; + + using PipelineStageFlagBits2KHR = PipelineStageFlagBits2; + + // wrapper using for bitmask VkPipelineStageFlags2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineStageFlags2.html + using PipelineStageFlags2 = Flags; + using PipelineStageFlags2KHR = PipelineStageFlags2; + + template <> + struct FlagTraits + { + using WrappedType = VkPipelineStageFlagBits2; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineStageFlags2 allFlags = + PipelineStageFlagBits2::eNone | PipelineStageFlagBits2::eTopOfPipe | PipelineStageFlagBits2::eDrawIndirect | PipelineStageFlagBits2::eVertexInput | + PipelineStageFlagBits2::eVertexShader | PipelineStageFlagBits2::eTessellationControlShader | PipelineStageFlagBits2::eTessellationEvaluationShader | + PipelineStageFlagBits2::eGeometryShader | PipelineStageFlagBits2::eFragmentShader | PipelineStageFlagBits2::eEarlyFragmentTests | + PipelineStageFlagBits2::eLateFragmentTests | PipelineStageFlagBits2::eColorAttachmentOutput | PipelineStageFlagBits2::eComputeShader | + PipelineStageFlagBits2::eAllTransfer | PipelineStageFlagBits2::eBottomOfPipe | PipelineStageFlagBits2::eHost | PipelineStageFlagBits2::eAllGraphics | + PipelineStageFlagBits2::eAllCommands | PipelineStageFlagBits2::eCopy | PipelineStageFlagBits2::eResolve | PipelineStageFlagBits2::eBlit | + PipelineStageFlagBits2::eClear | PipelineStageFlagBits2::eIndexInput | PipelineStageFlagBits2::eVertexAttributeInput | + PipelineStageFlagBits2::ePreRasterizationShaders | PipelineStageFlagBits2::eVideoDecodeKHR | PipelineStageFlagBits2::eVideoEncodeKHR | + PipelineStageFlagBits2::eTransformFeedbackEXT | PipelineStageFlagBits2::eConditionalRenderingEXT | PipelineStageFlagBits2::eCommandPreprocessEXT | + PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits2::eAccelerationStructureBuildKHR | + PipelineStageFlagBits2::eRayTracingShaderKHR | PipelineStageFlagBits2::eFragmentDensityProcessEXT | PipelineStageFlagBits2::eTaskShaderEXT | + PipelineStageFlagBits2::eMeshShaderEXT | PipelineStageFlagBits2::eSubpassShaderHUAWEI | PipelineStageFlagBits2::eInvocationMaskHUAWEI | + PipelineStageFlagBits2::eAccelerationStructureCopyKHR | PipelineStageFlagBits2::eMicromapBuildEXT | PipelineStageFlagBits2::eClusterCullingShaderHUAWEI | + PipelineStageFlagBits2::eOpticalFlowNV | PipelineStageFlagBits2::eConvertCooperativeVectorMatrixNV; + }; + + // wrapper class for enum VkAccessFlagBits2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccessFlagBits2.html + enum class AccessFlagBits2 : VkAccessFlags2 + { + eNone = VK_ACCESS_2_NONE, + eIndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_2_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_2_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_2_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_2_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_2_HOST_READ_BIT, + eHostWrite = VK_ACCESS_2_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_2_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT, + eShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT, + eShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT, + eShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT, + eVideoDecodeReadKHR = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, + eVideoDecodeWriteKHR = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, + eVideoEncodeReadKHR = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, + eVideoEncodeWriteKHR = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, + eShaderTileAttachmentReadQCOM = VK_ACCESS_2_SHADER_TILE_ATTACHMENT_READ_BIT_QCOM, + eShaderTileAttachmentWriteQCOM = VK_ACCESS_2_SHADER_TILE_ATTACHMENT_WRITE_BIT_QCOM, + eTransformFeedbackWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, + eCommandPreprocessReadEXT = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT, + eCommandPreprocessReadNV = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteEXT = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT, + eCommandPreprocessWriteNV = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, + eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, + eAccelerationStructureReadKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eFragmentDensityMapReadEXT = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eDescriptorBufferReadEXT = VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT, + eInvocationMaskReadHUAWEI = VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI, + eShaderBindingTableReadKHR = VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR, + eMicromapReadEXT = VK_ACCESS_2_MICROMAP_READ_BIT_EXT, + eMicromapWriteEXT = VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT, + eOpticalFlowReadNV = VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV, + eOpticalFlowWriteNV = VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV + }; + + using AccessFlagBits2KHR = AccessFlagBits2; + + // wrapper using for bitmask VkAccessFlags2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccessFlags2.html + using AccessFlags2 = Flags; + using AccessFlags2KHR = AccessFlags2; + + template <> + struct FlagTraits + { + using WrappedType = VkAccessFlagBits2; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags2 allFlags = + AccessFlagBits2::eNone | AccessFlagBits2::eIndirectCommandRead | AccessFlagBits2::eIndexRead | AccessFlagBits2::eVertexAttributeRead | + AccessFlagBits2::eUniformRead | AccessFlagBits2::eInputAttachmentRead | AccessFlagBits2::eShaderRead | AccessFlagBits2::eShaderWrite | + AccessFlagBits2::eColorAttachmentRead | AccessFlagBits2::eColorAttachmentWrite | AccessFlagBits2::eDepthStencilAttachmentRead | + AccessFlagBits2::eDepthStencilAttachmentWrite | AccessFlagBits2::eTransferRead | AccessFlagBits2::eTransferWrite | AccessFlagBits2::eHostRead | + AccessFlagBits2::eHostWrite | AccessFlagBits2::eMemoryRead | AccessFlagBits2::eMemoryWrite | AccessFlagBits2::eShaderSampledRead | + AccessFlagBits2::eShaderStorageRead | AccessFlagBits2::eShaderStorageWrite | AccessFlagBits2::eVideoDecodeReadKHR | + AccessFlagBits2::eVideoDecodeWriteKHR | AccessFlagBits2::eVideoEncodeReadKHR | AccessFlagBits2::eVideoEncodeWriteKHR | + AccessFlagBits2::eShaderTileAttachmentReadQCOM | AccessFlagBits2::eShaderTileAttachmentWriteQCOM | AccessFlagBits2::eTransformFeedbackWriteEXT | + AccessFlagBits2::eTransformFeedbackCounterReadEXT | AccessFlagBits2::eTransformFeedbackCounterWriteEXT | AccessFlagBits2::eConditionalRenderingReadEXT | + AccessFlagBits2::eCommandPreprocessReadEXT | AccessFlagBits2::eCommandPreprocessWriteEXT | AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR | + AccessFlagBits2::eAccelerationStructureReadKHR | AccessFlagBits2::eAccelerationStructureWriteKHR | AccessFlagBits2::eFragmentDensityMapReadEXT | + AccessFlagBits2::eColorAttachmentReadNoncoherentEXT | AccessFlagBits2::eDescriptorBufferReadEXT | AccessFlagBits2::eInvocationMaskReadHUAWEI | + AccessFlagBits2::eShaderBindingTableReadKHR | AccessFlagBits2::eMicromapReadEXT | AccessFlagBits2::eMicromapWriteEXT | + AccessFlagBits2::eOpticalFlowReadNV | AccessFlagBits2::eOpticalFlowWriteNV; + }; + + // wrapper class for enum VkSubmitFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubmitFlagBits.html + enum class SubmitFlagBits : VkSubmitFlags + { + eProtected = VK_SUBMIT_PROTECTED_BIT + }; + + using SubmitFlagBitsKHR = SubmitFlagBits; + + // wrapper using for bitmask VkSubmitFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubmitFlags.html + using SubmitFlags = Flags; + using SubmitFlagsKHR = SubmitFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkSubmitFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SubmitFlags allFlags = SubmitFlagBits::eProtected; + }; + + // wrapper class for enum VkRenderingFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingFlagBits.html + enum class RenderingFlagBits : VkRenderingFlags + { + eContentsSecondaryCommandBuffers = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, + eSuspending = VK_RENDERING_SUSPENDING_BIT, + eResuming = VK_RENDERING_RESUMING_BIT, + eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT, + eContentsInlineKHR = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, + eContentsInlineEXT = VK_RENDERING_CONTENTS_INLINE_BIT_EXT + }; + + using RenderingFlagBitsKHR = RenderingFlagBits; + + // wrapper using for bitmask VkRenderingFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingFlags.html + using RenderingFlags = Flags; + using RenderingFlagsKHR = RenderingFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkRenderingFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR RenderingFlags allFlags = RenderingFlagBits::eContentsSecondaryCommandBuffers | RenderingFlagBits::eSuspending | + RenderingFlagBits::eResuming | RenderingFlagBits::eEnableLegacyDitheringEXT | + RenderingFlagBits::eContentsInlineKHR; + }; + + // wrapper class for enum VkFormatFeatureFlagBits2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatFeatureFlagBits2.html + enum class FormatFeatureFlagBits2 : VkFormatFeatureFlags2 + { + eSampledImage = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_2_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_2_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT, + eSampledImageFilterMinmax = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = + VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_2_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT, + eStorageReadWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT, + eStorageWriteWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT, + eSampledImageDepthComparison = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT, + eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eHostImageTransfer = VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT, + eHostImageTransferEXT = VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT, + eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR, + eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR, + eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, + eFragmentDensityMapEXT = VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eVideoEncodeInputKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR, + eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR, + eAccelerationStructureRadiusBufferNV = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_RADIUS_BUFFER_BIT_NV, + eLinearColorAttachmentNV = VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV, + eWeightImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM, + eWeightSampledImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM, + eBlockMatchingQCOM = VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM, + eBoxFilterSampledQCOM = VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM, + eOpticalFlowImageNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV, + eOpticalFlowVectorNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV, + eOpticalFlowCostNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV, + eVideoEncodeQuantizationDeltaMapKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR, + eVideoEncodeEmphasisMapKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR + }; + + using FormatFeatureFlagBits2KHR = FormatFeatureFlagBits2; + + // wrapper using for bitmask VkFormatFeatureFlags2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatFeatureFlags2.html + using FormatFeatureFlags2 = Flags; + using FormatFeatureFlags2KHR = FormatFeatureFlags2; + + template <> + struct FlagTraits + { + using WrappedType = VkFormatFeatureFlagBits2; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FormatFeatureFlags2 allFlags = + FormatFeatureFlagBits2::eSampledImage | FormatFeatureFlagBits2::eStorageImage | FormatFeatureFlagBits2::eStorageImageAtomic | + FormatFeatureFlagBits2::eUniformTexelBuffer | FormatFeatureFlagBits2::eStorageTexelBuffer | FormatFeatureFlagBits2::eStorageTexelBufferAtomic | + FormatFeatureFlagBits2::eVertexBuffer | FormatFeatureFlagBits2::eColorAttachment | FormatFeatureFlagBits2::eColorAttachmentBlend | + FormatFeatureFlagBits2::eDepthStencilAttachment | FormatFeatureFlagBits2::eBlitSrc | FormatFeatureFlagBits2::eBlitDst | + FormatFeatureFlagBits2::eSampledImageFilterLinear | FormatFeatureFlagBits2::eTransferSrc | FormatFeatureFlagBits2::eTransferDst | + FormatFeatureFlagBits2::eSampledImageFilterMinmax | FormatFeatureFlagBits2::eMidpointChromaSamples | + FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter | FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter | + FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit | + FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable | FormatFeatureFlagBits2::eDisjoint | + FormatFeatureFlagBits2::eCositedChromaSamples | FormatFeatureFlagBits2::eStorageReadWithoutFormat | FormatFeatureFlagBits2::eStorageWriteWithoutFormat | + FormatFeatureFlagBits2::eSampledImageDepthComparison | FormatFeatureFlagBits2::eSampledImageFilterCubic | FormatFeatureFlagBits2::eHostImageTransfer | + FormatFeatureFlagBits2::eVideoDecodeOutputKHR | FormatFeatureFlagBits2::eVideoDecodeDpbKHR | + FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR | FormatFeatureFlagBits2::eFragmentDensityMapEXT | + FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits2::eVideoEncodeInputKHR | FormatFeatureFlagBits2::eVideoEncodeDpbKHR | + FormatFeatureFlagBits2::eAccelerationStructureRadiusBufferNV | FormatFeatureFlagBits2::eLinearColorAttachmentNV | + FormatFeatureFlagBits2::eWeightImageQCOM | FormatFeatureFlagBits2::eWeightSampledImageQCOM | FormatFeatureFlagBits2::eBlockMatchingQCOM | + FormatFeatureFlagBits2::eBoxFilterSampledQCOM | FormatFeatureFlagBits2::eOpticalFlowImageNV | FormatFeatureFlagBits2::eOpticalFlowVectorNV | + FormatFeatureFlagBits2::eOpticalFlowCostNV | FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR | + FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR; + }; + + //=== VK_VERSION_1_4 === + + // wrapper class for enum VkQueueGlobalPriority, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueGlobalPriority.html + enum class QueueGlobalPriority + { + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW, + eLowKHR = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM, + eMediumKHR = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH, + eHighKHR = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, + eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME, + eRealtimeKHR = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR + }; + + using QueueGlobalPriorityEXT = QueueGlobalPriority; + using QueueGlobalPriorityKHR = QueueGlobalPriority; + + // wrapper class for enum VkLineRasterizationMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLineRasterizationMode.html + enum class LineRasterizationMode + { + eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT, + eDefaultKHR = VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR, + eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR, + eRectangularKHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR, + eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM, + eBresenhamKHR = VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR, + eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH, + eRectangularSmoothKHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR + }; + + using LineRasterizationModeEXT = LineRasterizationMode; + using LineRasterizationModeKHR = LineRasterizationMode; + + // wrapper class for enum VkMemoryUnmapFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryUnmapFlagBits.html + enum class MemoryUnmapFlagBits : VkMemoryUnmapFlags + { + eReserveEXT = VK_MEMORY_UNMAP_RESERVE_BIT_EXT + }; + + using MemoryUnmapFlagBitsKHR = MemoryUnmapFlagBits; + + // wrapper using for bitmask VkMemoryUnmapFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryUnmapFlags.html + using MemoryUnmapFlags = Flags; + using MemoryUnmapFlagsKHR = MemoryUnmapFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkMemoryUnmapFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryUnmapFlags allFlags = MemoryUnmapFlagBits::eReserveEXT; + }; + + // wrapper class for enum VkPipelineCreateFlagBits2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreateFlagBits2.html + enum class PipelineCreateFlagBits2 : VkPipelineCreateFlags2 + { + eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT, + eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + eEarlyReturnOnFailure = VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT, + eNoProtectedAccess = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT, + eNoProtectedAccessEXT = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT, + eProtectedAccessOnly = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT, + eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphAMDX = VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eRayTracingAllowSpheresAndLinearSweptSpheresNV = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_SPHERES_AND_LINEAR_SWEPT_SPHERES_BIT_NV, + eEnableLegacyDitheringEXT = VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT, + eDeferCompileNV = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV, + eCaptureStatisticsKHR = VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT, + eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT, + eLibraryKHR = VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR, + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipBuiltInPrimitives = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_BUILT_IN_PRIMITIVES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV, + eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV, + eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT, + eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV, + eDescriptorBufferEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT, + eDisallowOpacityMicromapARM = VK_PIPELINE_CREATE_2_DISALLOW_OPACITY_MICROMAP_BIT_ARM, + eCaptureDataKHR = VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR, + eIndirectBindableEXT = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT + }; + + using PipelineCreateFlagBits2KHR = PipelineCreateFlagBits2; + + // wrapper using for bitmask VkPipelineCreateFlags2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreateFlags2.html + using PipelineCreateFlags2 = Flags; + using PipelineCreateFlags2KHR = PipelineCreateFlags2; + + template <> + struct FlagTraits + { + using WrappedType = VkPipelineCreateFlagBits2; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2 allFlags = + PipelineCreateFlagBits2::eDisableOptimization | PipelineCreateFlagBits2::eAllowDerivatives | PipelineCreateFlagBits2::eDerivative | + PipelineCreateFlagBits2::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2::eDispatchBase | PipelineCreateFlagBits2::eFailOnPipelineCompileRequired | + PipelineCreateFlagBits2::eEarlyReturnOnFailure | PipelineCreateFlagBits2::eNoProtectedAccess | PipelineCreateFlagBits2::eProtectedAccessOnly +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | PipelineCreateFlagBits2::eExecutionGraphAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | PipelineCreateFlagBits2::eRayTracingAllowSpheresAndLinearSweptSpheresNV | PipelineCreateFlagBits2::eEnableLegacyDitheringEXT | + PipelineCreateFlagBits2::eDeferCompileNV | PipelineCreateFlagBits2::eCaptureStatisticsKHR | PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR | + PipelineCreateFlagBits2::eLinkTimeOptimizationEXT | PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits2::eLibraryKHR | + PipelineCreateFlagBits2::eRayTracingSkipTrianglesKHR | PipelineCreateFlagBits2::eRayTracingSkipAabbsKHR | + PipelineCreateFlagBits2::eRayTracingNoNullAnyHitShadersKHR | PipelineCreateFlagBits2::eRayTracingNoNullClosestHitShadersKHR | + PipelineCreateFlagBits2::eRayTracingNoNullMissShadersKHR | PipelineCreateFlagBits2::eRayTracingNoNullIntersectionShadersKHR | + PipelineCreateFlagBits2::eRayTracingShaderGroupHandleCaptureReplayKHR | PipelineCreateFlagBits2::eIndirectBindableNV | + PipelineCreateFlagBits2::eRayTracingAllowMotionNV | PipelineCreateFlagBits2::eRenderingFragmentShadingRateAttachmentKHR | + PipelineCreateFlagBits2::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits2::eRayTracingOpacityMicromapEXT | + PipelineCreateFlagBits2::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits2::eDepthStencilAttachmentFeedbackLoopEXT | + PipelineCreateFlagBits2::eRayTracingDisplacementMicromapNV | PipelineCreateFlagBits2::eDescriptorBufferEXT | + PipelineCreateFlagBits2::eDisallowOpacityMicromapARM | PipelineCreateFlagBits2::eCaptureDataKHR | PipelineCreateFlagBits2::eIndirectBindableEXT; + }; + + // wrapper class for enum VkBufferUsageFlagBits2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferUsageFlagBits2.html + enum class BufferUsageFlagBits2 : VkBufferUsageFlags2 + { + eTransferSrc = VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_2_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eConditionalRenderingEXT = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT, + eShaderBindingTableKHR = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR, + eRayTracingNV = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eVideoDecodeSrcKHR = VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDstKHR = VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR, + eVideoEncodeDstKHR = VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR, + eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, + eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, + eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT, + eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT, + ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT, + eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT, + eMicromapStorageEXT = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT, + eTileMemoryQCOM = VK_BUFFER_USAGE_2_TILE_MEMORY_QCOM, + ePreprocessBufferEXT = VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT + }; + + using BufferUsageFlagBits2KHR = BufferUsageFlagBits2; + + // wrapper using for bitmask VkBufferUsageFlags2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferUsageFlags2.html + using BufferUsageFlags2 = Flags; + using BufferUsageFlags2KHR = BufferUsageFlags2; + + template <> + struct FlagTraits + { + using WrappedType = VkBufferUsageFlagBits2; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags2 allFlags = + BufferUsageFlagBits2::eTransferSrc | BufferUsageFlagBits2::eTransferDst | BufferUsageFlagBits2::eUniformTexelBuffer | + BufferUsageFlagBits2::eStorageTexelBuffer | BufferUsageFlagBits2::eUniformBuffer | BufferUsageFlagBits2::eStorageBuffer | + BufferUsageFlagBits2::eIndexBuffer | BufferUsageFlagBits2::eVertexBuffer | BufferUsageFlagBits2::eIndirectBuffer | + BufferUsageFlagBits2::eShaderDeviceAddress +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BufferUsageFlagBits2::eExecutionGraphScratchAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BufferUsageFlagBits2::eConditionalRenderingEXT | BufferUsageFlagBits2::eShaderBindingTableKHR | BufferUsageFlagBits2::eTransformFeedbackBufferEXT | + BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits2::eVideoDecodeSrcKHR | BufferUsageFlagBits2::eVideoDecodeDstKHR | + BufferUsageFlagBits2::eVideoEncodeDstKHR | BufferUsageFlagBits2::eVideoEncodeSrcKHR | BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR | + BufferUsageFlagBits2::eAccelerationStructureStorageKHR | BufferUsageFlagBits2::eSamplerDescriptorBufferEXT | + BufferUsageFlagBits2::eResourceDescriptorBufferEXT | BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT | + BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT | BufferUsageFlagBits2::eMicromapStorageEXT | BufferUsageFlagBits2::eTileMemoryQCOM | + BufferUsageFlagBits2::ePreprocessBufferEXT; + }; + + // wrapper class for enum VkPipelineRobustnessBufferBehavior, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRobustnessBufferBehavior.html + enum class PipelineRobustnessBufferBehavior + { + eDeviceDefault = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT, + eDisabled = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED, + eRobustBufferAccess = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS, + eRobustBufferAccess2 = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2 + }; + + using PipelineRobustnessBufferBehaviorEXT = PipelineRobustnessBufferBehavior; + + // wrapper class for enum VkPipelineRobustnessImageBehavior, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRobustnessImageBehavior.html + enum class PipelineRobustnessImageBehavior + { + eDeviceDefault = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT, + eDisabled = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED, + eRobustImageAccess = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS, + eRobustImageAccess2 = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2 + }; + + using PipelineRobustnessImageBehaviorEXT = PipelineRobustnessImageBehavior; + + // wrapper class for enum VkHostImageCopyFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageCopyFlagBits.html + enum class HostImageCopyFlagBits : VkHostImageCopyFlags + { + eMemcpy = VK_HOST_IMAGE_COPY_MEMCPY + }; + + using HostImageCopyFlagBitsEXT = HostImageCopyFlagBits; + + // wrapper using for bitmask VkHostImageCopyFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageCopyFlags.html + using HostImageCopyFlags = Flags; + using HostImageCopyFlagsEXT = HostImageCopyFlags; + + template <> + struct FlagTraits + { + using WrappedType = VkHostImageCopyFlagBits; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR HostImageCopyFlags allFlags = HostImageCopyFlagBits::eMemcpy; + }; + + //=== VK_KHR_surface === + + // wrapper class for enum VkSurfaceTransformFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceTransformFlagBitsKHR.html + enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR + { + eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, + eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, + eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, + eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, + eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, + eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, + eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, + eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, + eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + }; + + // wrapper using for bitmask VkSurfaceTransformFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceTransformFlagsKHR.html + using SurfaceTransformFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkSurfaceTransformFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SurfaceTransformFlagsKHR allFlags = + SurfaceTransformFlagBitsKHR::eIdentity | SurfaceTransformFlagBitsKHR::eRotate90 | SurfaceTransformFlagBitsKHR::eRotate180 | + SurfaceTransformFlagBitsKHR::eRotate270 | SurfaceTransformFlagBitsKHR::eHorizontalMirror | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 | + SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 | SurfaceTransformFlagBitsKHR::eInherit; + }; + + // wrapper class for enum VkPresentModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentModeKHR.html + enum class PresentModeKHR + { + eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, + eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, + eFifo = VK_PRESENT_MODE_FIFO_KHR, + eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, + eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR, + eFifoLatestReadyEXT = VK_PRESENT_MODE_FIFO_LATEST_READY_EXT + }; + + // wrapper class for enum VkColorSpaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorSpaceKHR.html + enum class ColorSpaceKHR + { + eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, + eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, + eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, + eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT, + eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, + eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, + eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, + eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, + eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, + eDolbyvisionEXT VULKAN_HPP_DEPRECATED_17( "eDolbyvisionEXT is deprecated, but no reason was given in the API XML" ) = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, + eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD + }; + + // wrapper class for enum VkCompositeAlphaFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCompositeAlphaFlagBitsKHR.html + enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR + { + eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, + eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR + }; + + // wrapper using for bitmask VkCompositeAlphaFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCompositeAlphaFlagsKHR.html + using CompositeAlphaFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkCompositeAlphaFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CompositeAlphaFlagsKHR allFlags = CompositeAlphaFlagBitsKHR::eOpaque | CompositeAlphaFlagBitsKHR::ePreMultiplied | + CompositeAlphaFlagBitsKHR::ePostMultiplied | CompositeAlphaFlagBitsKHR::eInherit; + }; + + //=== VK_KHR_swapchain === + + // wrapper class for enum VkSwapchainCreateFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainCreateFlagBitsKHR.html + enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR + { + eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, + eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR, + eDeferredMemoryAllocationEXT = VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT + }; + + // wrapper using for bitmask VkSwapchainCreateFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainCreateFlagsKHR.html + using SwapchainCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkSwapchainCreateFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SwapchainCreateFlagsKHR allFlags = + SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions | SwapchainCreateFlagBitsKHR::eProtected | SwapchainCreateFlagBitsKHR::eMutableFormat | + SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT; + }; + + // wrapper class for enum VkDeviceGroupPresentModeFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentModeFlagBitsKHR.html + enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR + { + eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, + eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, + eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, + eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR + }; + + // wrapper using for bitmask VkDeviceGroupPresentModeFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentModeFlagsKHR.html + using DeviceGroupPresentModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDeviceGroupPresentModeFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceGroupPresentModeFlagsKHR allFlags = + DeviceGroupPresentModeFlagBitsKHR::eLocal | DeviceGroupPresentModeFlagBitsKHR::eRemote | DeviceGroupPresentModeFlagBitsKHR::eSum | + DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice; + }; + + //=== VK_KHR_display === + + // wrapper class for enum VkDisplayPlaneAlphaFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneAlphaFlagBitsKHR.html + enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR + { + eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, + eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, + ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, + ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR + }; + + // wrapper using for bitmask VkDisplayPlaneAlphaFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneAlphaFlagsKHR.html + using DisplayPlaneAlphaFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDisplayPlaneAlphaFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DisplayPlaneAlphaFlagsKHR allFlags = DisplayPlaneAlphaFlagBitsKHR::eOpaque | DisplayPlaneAlphaFlagBitsKHR::eGlobal | + DisplayPlaneAlphaFlagBitsKHR::ePerPixel | + DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied; + }; + + enum class DisplayModeCreateFlagBitsKHR : VkDisplayModeCreateFlagsKHR + { + }; + + // wrapper using for bitmask VkDisplayModeCreateFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeCreateFlagsKHR.html + using DisplayModeCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DisplayModeCreateFlagsKHR allFlags = {}; + }; + + enum class DisplaySurfaceCreateFlagBitsKHR : VkDisplaySurfaceCreateFlagsKHR + { + }; + + // wrapper using for bitmask VkDisplaySurfaceCreateFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplaySurfaceCreateFlagsKHR.html + using DisplaySurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DisplaySurfaceCreateFlagsKHR allFlags = {}; + }; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + enum class XlibSurfaceCreateFlagBitsKHR : VkXlibSurfaceCreateFlagsKHR + { + }; + + // wrapper using for bitmask VkXlibSurfaceCreateFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXlibSurfaceCreateFlagsKHR.html + using XlibSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR XlibSurfaceCreateFlagsKHR allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + enum class XcbSurfaceCreateFlagBitsKHR : VkXcbSurfaceCreateFlagsKHR + { + }; + + // wrapper using for bitmask VkXcbSurfaceCreateFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXcbSurfaceCreateFlagsKHR.html + using XcbSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR XcbSurfaceCreateFlagsKHR allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + enum class WaylandSurfaceCreateFlagBitsKHR : VkWaylandSurfaceCreateFlagsKHR + { + }; + + // wrapper using for bitmask VkWaylandSurfaceCreateFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWaylandSurfaceCreateFlagsKHR.html + using WaylandSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR WaylandSurfaceCreateFlagsKHR allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + enum class AndroidSurfaceCreateFlagBitsKHR : VkAndroidSurfaceCreateFlagsKHR + { + }; + + // wrapper using for bitmask VkAndroidSurfaceCreateFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidSurfaceCreateFlagsKHR.html + using AndroidSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AndroidSurfaceCreateFlagsKHR allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + enum class Win32SurfaceCreateFlagBitsKHR : VkWin32SurfaceCreateFlagsKHR + { + }; + + // wrapper using for bitmask VkWin32SurfaceCreateFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWin32SurfaceCreateFlagsKHR.html + using Win32SurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR Win32SurfaceCreateFlagsKHR allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + // wrapper class for enum VkDebugReportFlagBitsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugReportFlagBitsEXT.html + enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT + { + eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, + eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, + ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, + eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, + eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT + }; + + // wrapper using for bitmask VkDebugReportFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugReportFlagsEXT.html + using DebugReportFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDebugReportFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugReportFlagsEXT allFlags = DebugReportFlagBitsEXT::eInformation | DebugReportFlagBitsEXT::eWarning | + DebugReportFlagBitsEXT::ePerformanceWarning | DebugReportFlagBitsEXT::eError | + DebugReportFlagBitsEXT::eDebug; + }; + + // wrapper class for enum VkDebugReportObjectTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugReportObjectTypeEXT.html + enum class DebugReportObjectTypeEXT + { + eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, + eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, + eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, + eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, + eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, + eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, + eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, + eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, + eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, + eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, + eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, + eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, + eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, + eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, + ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, + ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, + eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, + ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, + eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, + eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, + eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, + eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, + eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, + eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, + eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, + eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, + eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, + eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, + eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, + eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, + eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, + eCuModuleNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT, + eCuFunctionNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT, + eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, + eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eCudaModuleNV = VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT, + eCudaFunctionNV = VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eBufferCollectionFUCHSIA = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + }; + + //=== VK_AMD_rasterization_order === + + // wrapper class for enum VkRasterizationOrderAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRasterizationOrderAMD.html + enum class RasterizationOrderAMD + { + eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, + eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD + }; + + //=== VK_KHR_video_queue === + + // wrapper class for enum VkVideoCodecOperationFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCodecOperationFlagBitsKHR.html + enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR + { + eNone = VK_VIDEO_CODEC_OPERATION_NONE_KHR, + eEncodeH264 = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR, + eEncodeH265 = VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR, + eDecodeH264 = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR, + eDecodeH265 = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR, + eDecodeAv1 = VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR, + eEncodeAv1 = VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR + }; + + // wrapper using for bitmask VkVideoCodecOperationFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCodecOperationFlagsKHR.html + using VideoCodecOperationFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoCodecOperationFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodecOperationFlagsKHR allFlags = + VideoCodecOperationFlagBitsKHR::eNone | VideoCodecOperationFlagBitsKHR::eEncodeH264 | VideoCodecOperationFlagBitsKHR::eEncodeH265 | + VideoCodecOperationFlagBitsKHR::eDecodeH264 | VideoCodecOperationFlagBitsKHR::eDecodeH265 | VideoCodecOperationFlagBitsKHR::eDecodeAv1 | + VideoCodecOperationFlagBitsKHR::eEncodeAv1; + }; + + // wrapper class for enum VkVideoChromaSubsamplingFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoChromaSubsamplingFlagBitsKHR.html + enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR + { + eInvalid = VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR, + eMonochrome = VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR, + e420 = VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR, + e422 = VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR, + e444 = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR + }; + + // wrapper using for bitmask VkVideoChromaSubsamplingFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoChromaSubsamplingFlagsKHR.html + using VideoChromaSubsamplingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoChromaSubsamplingFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoChromaSubsamplingFlagsKHR allFlags = + VideoChromaSubsamplingFlagBitsKHR::eInvalid | VideoChromaSubsamplingFlagBitsKHR::eMonochrome | VideoChromaSubsamplingFlagBitsKHR::e420 | + VideoChromaSubsamplingFlagBitsKHR::e422 | VideoChromaSubsamplingFlagBitsKHR::e444; + }; + + // wrapper class for enum VkVideoComponentBitDepthFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoComponentBitDepthFlagBitsKHR.html + enum class VideoComponentBitDepthFlagBitsKHR : VkVideoComponentBitDepthFlagsKHR + { + eInvalid = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR, + e8 = VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR, + e10 = VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR, + e12 = VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR + }; + + // wrapper using for bitmask VkVideoComponentBitDepthFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoComponentBitDepthFlagsKHR.html + using VideoComponentBitDepthFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoComponentBitDepthFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoComponentBitDepthFlagsKHR allFlags = + VideoComponentBitDepthFlagBitsKHR::eInvalid | VideoComponentBitDepthFlagBitsKHR::e8 | VideoComponentBitDepthFlagBitsKHR::e10 | + VideoComponentBitDepthFlagBitsKHR::e12; + }; + + // wrapper class for enum VkVideoCapabilityFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCapabilityFlagBitsKHR.html + enum class VideoCapabilityFlagBitsKHR : VkVideoCapabilityFlagsKHR + { + eProtectedContent = VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR, + eSeparateReferenceImages = VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR + }; + + // wrapper using for bitmask VkVideoCapabilityFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCapabilityFlagsKHR.html + using VideoCapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoCapabilityFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCapabilityFlagsKHR allFlags = + VideoCapabilityFlagBitsKHR::eProtectedContent | VideoCapabilityFlagBitsKHR::eSeparateReferenceImages; + }; + + // wrapper class for enum VkVideoSessionCreateFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionCreateFlagBitsKHR.html + enum class VideoSessionCreateFlagBitsKHR : VkVideoSessionCreateFlagsKHR + { + eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR, + eAllowEncodeParameterOptimizations = VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR, + eInlineQueries = VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR, + eAllowEncodeQuantizationDeltaMap = VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR, + eAllowEncodeEmphasisMap = VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_EMPHASIS_MAP_BIT_KHR, + eInlineSessionParameters = VK_VIDEO_SESSION_CREATE_INLINE_SESSION_PARAMETERS_BIT_KHR + }; + + // wrapper using for bitmask VkVideoSessionCreateFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionCreateFlagsKHR.html + using VideoSessionCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoSessionCreateFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionCreateFlagsKHR allFlags = + VideoSessionCreateFlagBitsKHR::eProtectedContent | VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations | + VideoSessionCreateFlagBitsKHR::eInlineQueries | VideoSessionCreateFlagBitsKHR::eAllowEncodeQuantizationDeltaMap | + VideoSessionCreateFlagBitsKHR::eAllowEncodeEmphasisMap | VideoSessionCreateFlagBitsKHR::eInlineSessionParameters; + }; + + // wrapper class for enum VkVideoCodingControlFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCodingControlFlagBitsKHR.html + enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR + { + eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR, + eEncodeRateControl = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR, + eEncodeQualityLevel = VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR + }; + + // wrapper using for bitmask VkVideoCodingControlFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCodingControlFlagsKHR.html + using VideoCodingControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoCodingControlFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodingControlFlagsKHR allFlags = + VideoCodingControlFlagBitsKHR::eReset | VideoCodingControlFlagBitsKHR::eEncodeRateControl | VideoCodingControlFlagBitsKHR::eEncodeQualityLevel; + }; + + // wrapper class for enum VkQueryResultStatusKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryResultStatusKHR.html + enum class QueryResultStatusKHR + { + eError = VK_QUERY_RESULT_STATUS_ERROR_KHR, + eNotReady = VK_QUERY_RESULT_STATUS_NOT_READY_KHR, + eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR, + eInsufficientBitstreamBufferRange = VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR + }; + + // wrapper class for enum VkVideoSessionParametersCreateFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionParametersCreateFlagBitsKHR.html + enum class VideoSessionParametersCreateFlagBitsKHR : VkVideoSessionParametersCreateFlagsKHR + { + eQuantizationMapCompatible = VK_VIDEO_SESSION_PARAMETERS_CREATE_QUANTIZATION_MAP_COMPATIBLE_BIT_KHR + }; + + // wrapper using for bitmask VkVideoSessionParametersCreateFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionParametersCreateFlagsKHR.html + using VideoSessionParametersCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoSessionParametersCreateFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionParametersCreateFlagsKHR allFlags = VideoSessionParametersCreateFlagBitsKHR::eQuantizationMapCompatible; + }; + + enum class VideoBeginCodingFlagBitsKHR : VkVideoBeginCodingFlagsKHR + { + }; + + // wrapper using for bitmask VkVideoBeginCodingFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoBeginCodingFlagsKHR.html + using VideoBeginCodingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoBeginCodingFlagsKHR allFlags = {}; + }; + + enum class VideoEndCodingFlagBitsKHR : VkVideoEndCodingFlagsKHR + { + }; + + // wrapper using for bitmask VkVideoEndCodingFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEndCodingFlagsKHR.html + using VideoEndCodingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEndCodingFlagsKHR allFlags = {}; + }; + + //=== VK_KHR_video_decode_queue === + + // wrapper class for enum VkVideoDecodeCapabilityFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeCapabilityFlagBitsKHR.html + enum class VideoDecodeCapabilityFlagBitsKHR : VkVideoDecodeCapabilityFlagsKHR + { + eDpbAndOutputCoincide = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR, + eDpbAndOutputDistinct = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR + }; + + // wrapper using for bitmask VkVideoDecodeCapabilityFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeCapabilityFlagsKHR.html + using VideoDecodeCapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoDecodeCapabilityFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeCapabilityFlagsKHR allFlags = + VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide | VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct; + }; + + // wrapper class for enum VkVideoDecodeUsageFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeUsageFlagBitsKHR.html + enum class VideoDecodeUsageFlagBitsKHR : VkVideoDecodeUsageFlagsKHR + { + eDefault = VK_VIDEO_DECODE_USAGE_DEFAULT_KHR, + eTranscoding = VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR, + eOffline = VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR, + eStreaming = VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR + }; + + // wrapper using for bitmask VkVideoDecodeUsageFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeUsageFlagsKHR.html + using VideoDecodeUsageFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoDecodeUsageFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeUsageFlagsKHR allFlags = VideoDecodeUsageFlagBitsKHR::eDefault | VideoDecodeUsageFlagBitsKHR::eTranscoding | + VideoDecodeUsageFlagBitsKHR::eOffline | VideoDecodeUsageFlagBitsKHR::eStreaming; + }; + + enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR + { + }; + + // wrapper using for bitmask VkVideoDecodeFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeFlagsKHR.html + using VideoDecodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeFlagsKHR allFlags = {}; + }; + + //=== VK_EXT_transform_feedback === + + enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkPipelineRasterizationStateStreamCreateFlagsEXT + { + }; + + // wrapper using for bitmask VkPipelineRasterizationStateStreamCreateFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateStreamCreateFlagsEXT.html + using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationStateStreamCreateFlagsEXT allFlags = {}; + }; + + //=== VK_KHR_video_encode_h264 === + + // wrapper class for enum VkVideoEncodeH264CapabilityFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264CapabilityFlagBitsKHR.html + enum class VideoEncodeH264CapabilityFlagBitsKHR : VkVideoEncodeH264CapabilityFlagsKHR + { + eHrdCompliance = VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR, + ePredictionWeightTableGenerated = VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR, + eRowUnalignedSlice = VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR, + eDifferentSliceType = VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR, + eBFrameInL0List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR, + eBFrameInL1List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR, + ePerPictureTypeMinMaxQp = VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR, + ePerSliceConstantQp = VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR, + eGeneratePrefixNalu = VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR, + eMbQpDiffWraparound = VK_VIDEO_ENCODE_H264_CAPABILITY_MB_QP_DIFF_WRAPAROUND_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeH264CapabilityFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264CapabilityFlagsKHR.html + using VideoEncodeH264CapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeH264CapabilityFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264CapabilityFlagsKHR allFlags = + VideoEncodeH264CapabilityFlagBitsKHR::eHrdCompliance | VideoEncodeH264CapabilityFlagBitsKHR::ePredictionWeightTableGenerated | + VideoEncodeH264CapabilityFlagBitsKHR::eRowUnalignedSlice | VideoEncodeH264CapabilityFlagBitsKHR::eDifferentSliceType | + VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL0List | VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL1List | + VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp | VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp | + VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu | VideoEncodeH264CapabilityFlagBitsKHR::eMbQpDiffWraparound; + }; + + // wrapper class for enum VkVideoEncodeH264StdFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264StdFlagBitsKHR.html + enum class VideoEncodeH264StdFlagBitsKHR : VkVideoEncodeH264StdFlagsKHR + { + eSeparateColorPlaneFlagSet = VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR, + eQpprimeYZeroTransformBypassFlagSet = VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR, + eScalingMatrixPresentFlagSet = VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR, + eChromaQpIndexOffset = VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR, + eSecondChromaQpIndexOffset = VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR, + ePicInitQpMinus26 = VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR, + eWeightedPredFlagSet = VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR, + eWeightedBipredIdcExplicit = VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR, + eWeightedBipredIdcImplicit = VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR, + eTransform8X8ModeFlagSet = VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR, + eDirectSpatialMvPredFlagUnset = VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR, + eEntropyCodingModeFlagUnset = VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR, + eEntropyCodingModeFlagSet = VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR, + eDirect8X8InferenceFlagUnset = VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR, + eConstrainedIntraPredFlagSet = VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR, + eDeblockingFilterDisabled = VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR, + eDeblockingFilterEnabled = VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR, + eDeblockingFilterPartial = VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR, + eSliceQpDelta = VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR, + eDifferentSliceQpDelta = VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeH264StdFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264StdFlagsKHR.html + using VideoEncodeH264StdFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeH264StdFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264StdFlagsKHR allFlags = + VideoEncodeH264StdFlagBitsKHR::eSeparateColorPlaneFlagSet | VideoEncodeH264StdFlagBitsKHR::eQpprimeYZeroTransformBypassFlagSet | + VideoEncodeH264StdFlagBitsKHR::eScalingMatrixPresentFlagSet | VideoEncodeH264StdFlagBitsKHR::eChromaQpIndexOffset | + VideoEncodeH264StdFlagBitsKHR::eSecondChromaQpIndexOffset | VideoEncodeH264StdFlagBitsKHR::ePicInitQpMinus26 | + VideoEncodeH264StdFlagBitsKHR::eWeightedPredFlagSet | VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcExplicit | + VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcImplicit | VideoEncodeH264StdFlagBitsKHR::eTransform8X8ModeFlagSet | + VideoEncodeH264StdFlagBitsKHR::eDirectSpatialMvPredFlagUnset | VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagUnset | + VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagSet | VideoEncodeH264StdFlagBitsKHR::eDirect8X8InferenceFlagUnset | + VideoEncodeH264StdFlagBitsKHR::eConstrainedIntraPredFlagSet | VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterDisabled | + VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterEnabled | VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterPartial | + VideoEncodeH264StdFlagBitsKHR::eSliceQpDelta | VideoEncodeH264StdFlagBitsKHR::eDifferentSliceQpDelta; + }; + + // wrapper class for enum VkVideoEncodeH264RateControlFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264RateControlFlagBitsKHR.html + enum class VideoEncodeH264RateControlFlagBitsKHR : VkVideoEncodeH264RateControlFlagsKHR + { + eAttemptHrdCompliance = VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR, + eRegularGop = VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR, + eReferencePatternFlat = VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR, + eReferencePatternDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR, + eTemporalLayerPatternDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeH264RateControlFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264RateControlFlagsKHR.html + using VideoEncodeH264RateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeH264RateControlFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264RateControlFlagsKHR allFlags = + VideoEncodeH264RateControlFlagBitsKHR::eAttemptHrdCompliance | VideoEncodeH264RateControlFlagBitsKHR::eRegularGop | + VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternFlat | VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternDyadic | + VideoEncodeH264RateControlFlagBitsKHR::eTemporalLayerPatternDyadic; + }; + + //=== VK_KHR_video_encode_h265 === + + // wrapper class for enum VkVideoEncodeH265CapabilityFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265CapabilityFlagBitsKHR.html + enum class VideoEncodeH265CapabilityFlagBitsKHR : VkVideoEncodeH265CapabilityFlagsKHR + { + eHrdCompliance = VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR, + ePredictionWeightTableGenerated = VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR, + eRowUnalignedSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR, + eDifferentSliceSegmentType = VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR, + eBFrameInL0List = VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR, + eBFrameInL1List = VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR, + ePerPictureTypeMinMaxQp = VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR, + ePerSliceSegmentConstantQp = VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR, + eMultipleTilesPerSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR, + eMultipleSliceSegmentsPerTile = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR, + eCuQpDiffWraparound = VK_VIDEO_ENCODE_H265_CAPABILITY_CU_QP_DIFF_WRAPAROUND_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeH265CapabilityFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265CapabilityFlagsKHR.html + using VideoEncodeH265CapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeH265CapabilityFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265CapabilityFlagsKHR allFlags = + VideoEncodeH265CapabilityFlagBitsKHR::eHrdCompliance | VideoEncodeH265CapabilityFlagBitsKHR::ePredictionWeightTableGenerated | + VideoEncodeH265CapabilityFlagBitsKHR::eRowUnalignedSliceSegment | VideoEncodeH265CapabilityFlagBitsKHR::eDifferentSliceSegmentType | + VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL0List | VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL1List | + VideoEncodeH265CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp | VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp | + VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment | VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile | + VideoEncodeH265CapabilityFlagBitsKHR::eCuQpDiffWraparound; + }; + + // wrapper class for enum VkVideoEncodeH265StdFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265StdFlagBitsKHR.html + enum class VideoEncodeH265StdFlagBitsKHR : VkVideoEncodeH265StdFlagsKHR + { + eSeparateColorPlaneFlagSet = VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR, + eSampleAdaptiveOffsetEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR, + eScalingListDataPresentFlagSet = VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR, + ePcmEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR, + eSpsTemporalMvpEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR, + eInitQpMinus26 = VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR, + eWeightedPredFlagSet = VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR, + eWeightedBipredFlagSet = VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR, + eLog2ParallelMergeLevelMinus2 = VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR, + eSignDataHidingEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR, + eTransformSkipEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR, + eTransformSkipEnabledFlagUnset = VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR, + ePpsSliceChromaQpOffsetsPresentFlagSet = VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR, + eTransquantBypassEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR, + eConstrainedIntraPredFlagSet = VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR, + eEntropyCodingSyncEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR, + eDeblockingFilterOverrideEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR, + eDependentSliceSegmentsEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR, + eDependentSliceSegmentFlagSet = VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR, + eSliceQpDelta = VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR, + eDifferentSliceQpDelta = VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeH265StdFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265StdFlagsKHR.html + using VideoEncodeH265StdFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeH265StdFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265StdFlagsKHR allFlags = + VideoEncodeH265StdFlagBitsKHR::eSeparateColorPlaneFlagSet | VideoEncodeH265StdFlagBitsKHR::eSampleAdaptiveOffsetEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eScalingListDataPresentFlagSet | VideoEncodeH265StdFlagBitsKHR::ePcmEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eSpsTemporalMvpEnabledFlagSet | VideoEncodeH265StdFlagBitsKHR::eInitQpMinus26 | + VideoEncodeH265StdFlagBitsKHR::eWeightedPredFlagSet | VideoEncodeH265StdFlagBitsKHR::eWeightedBipredFlagSet | + VideoEncodeH265StdFlagBitsKHR::eLog2ParallelMergeLevelMinus2 | VideoEncodeH265StdFlagBitsKHR::eSignDataHidingEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagSet | VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagUnset | + VideoEncodeH265StdFlagBitsKHR::ePpsSliceChromaQpOffsetsPresentFlagSet | VideoEncodeH265StdFlagBitsKHR::eTransquantBypassEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eConstrainedIntraPredFlagSet | VideoEncodeH265StdFlagBitsKHR::eEntropyCodingSyncEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eDeblockingFilterOverrideEnabledFlagSet | VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentsEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentFlagSet | VideoEncodeH265StdFlagBitsKHR::eSliceQpDelta | + VideoEncodeH265StdFlagBitsKHR::eDifferentSliceQpDelta; + }; + + // wrapper class for enum VkVideoEncodeH265CtbSizeFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265CtbSizeFlagBitsKHR.html + enum class VideoEncodeH265CtbSizeFlagBitsKHR : VkVideoEncodeH265CtbSizeFlagsKHR + { + e16 = VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR, + e32 = VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR, + e64 = VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeH265CtbSizeFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265CtbSizeFlagsKHR.html + using VideoEncodeH265CtbSizeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeH265CtbSizeFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265CtbSizeFlagsKHR allFlags = + VideoEncodeH265CtbSizeFlagBitsKHR::e16 | VideoEncodeH265CtbSizeFlagBitsKHR::e32 | VideoEncodeH265CtbSizeFlagBitsKHR::e64; + }; + + // wrapper class for enum VkVideoEncodeH265TransformBlockSizeFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265TransformBlockSizeFlagBitsKHR.html + enum class VideoEncodeH265TransformBlockSizeFlagBitsKHR : VkVideoEncodeH265TransformBlockSizeFlagsKHR + { + e4 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR, + e8 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR, + e16 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR, + e32 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeH265TransformBlockSizeFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265TransformBlockSizeFlagsKHR.html + using VideoEncodeH265TransformBlockSizeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeH265TransformBlockSizeFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265TransformBlockSizeFlagsKHR allFlags = + VideoEncodeH265TransformBlockSizeFlagBitsKHR::e4 | VideoEncodeH265TransformBlockSizeFlagBitsKHR::e8 | VideoEncodeH265TransformBlockSizeFlagBitsKHR::e16 | + VideoEncodeH265TransformBlockSizeFlagBitsKHR::e32; + }; + + // wrapper class for enum VkVideoEncodeH265RateControlFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265RateControlFlagBitsKHR.html + enum class VideoEncodeH265RateControlFlagBitsKHR : VkVideoEncodeH265RateControlFlagsKHR + { + eAttemptHrdCompliance = VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR, + eRegularGop = VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR, + eReferencePatternFlat = VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR, + eReferencePatternDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR, + eTemporalSubLayerPatternDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeH265RateControlFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265RateControlFlagsKHR.html + using VideoEncodeH265RateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeH265RateControlFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265RateControlFlagsKHR allFlags = + VideoEncodeH265RateControlFlagBitsKHR::eAttemptHrdCompliance | VideoEncodeH265RateControlFlagBitsKHR::eRegularGop | + VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternFlat | VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternDyadic | + VideoEncodeH265RateControlFlagBitsKHR::eTemporalSubLayerPatternDyadic; + }; + + //=== VK_KHR_video_decode_h264 === + + // wrapper class for enum VkVideoDecodeH264PictureLayoutFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264PictureLayoutFlagBitsKHR.html + enum class VideoDecodeH264PictureLayoutFlagBitsKHR : VkVideoDecodeH264PictureLayoutFlagsKHR + { + eProgressive = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR, + eInterlacedInterleavedLines = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR, + eInterlacedSeparatePlanes = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR + }; + + // wrapper using for bitmask VkVideoDecodeH264PictureLayoutFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264PictureLayoutFlagsKHR.html + using VideoDecodeH264PictureLayoutFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoDecodeH264PictureLayoutFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeH264PictureLayoutFlagsKHR allFlags = VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive | + VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines | + VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes; + }; + + //=== VK_AMD_shader_info === + + // wrapper class for enum VkShaderInfoTypeAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderInfoTypeAMD.html + enum class ShaderInfoTypeAMD + { + eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, + eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD + }; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkStreamDescriptorSurfaceCreateFlagsGGP + { + }; + + // wrapper using for bitmask VkStreamDescriptorSurfaceCreateFlagsGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkStreamDescriptorSurfaceCreateFlagsGGP.html + using StreamDescriptorSurfaceCreateFlagsGGP = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StreamDescriptorSurfaceCreateFlagsGGP allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + // wrapper class for enum VkExternalMemoryHandleTypeFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryHandleTypeFlagBitsNV.html + enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV + { + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, + eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, + eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV + }; + + // wrapper using for bitmask VkExternalMemoryHandleTypeFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryHandleTypeFlagsNV.html + using ExternalMemoryHandleTypeFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkExternalMemoryHandleTypeFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryHandleTypeFlagsNV allFlags = + ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 | ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt | ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image | + ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt; + }; + + // wrapper class for enum VkExternalMemoryFeatureFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryFeatureFlagBitsNV.html + enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV + }; + + // wrapper using for bitmask VkExternalMemoryFeatureFlagsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryFeatureFlagsNV.html + using ExternalMemoryFeatureFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkExternalMemoryFeatureFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryFeatureFlagsNV allFlags = + ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly | ExternalMemoryFeatureFlagBitsNV::eExportable | ExternalMemoryFeatureFlagBitsNV::eImportable; + }; + + //=== VK_EXT_validation_flags === + + // wrapper class for enum VkValidationCheckEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationCheckEXT.html + enum class ValidationCheckEXT + { + eAll = VK_VALIDATION_CHECK_ALL_EXT, + eShaders = VK_VALIDATION_CHECK_SHADERS_EXT + }; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + enum class ViSurfaceCreateFlagBitsNN : VkViSurfaceCreateFlagsNN + { + }; + + // wrapper using for bitmask VkViSurfaceCreateFlagsNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViSurfaceCreateFlagsNN.html + using ViSurfaceCreateFlagsNN = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ViSurfaceCreateFlagsNN allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_conditional_rendering === + + // wrapper class for enum VkConditionalRenderingFlagBitsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkConditionalRenderingFlagBitsEXT.html + enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT + { + eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT + }; + + // wrapper using for bitmask VkConditionalRenderingFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkConditionalRenderingFlagsEXT.html + using ConditionalRenderingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkConditionalRenderingFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ConditionalRenderingFlagsEXT allFlags = ConditionalRenderingFlagBitsEXT::eInverted; + }; + + //=== VK_EXT_display_surface_counter === + + // wrapper class for enum VkSurfaceCounterFlagBitsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCounterFlagBitsEXT.html + enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT + { + eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT + }; + + // wrapper using for bitmask VkSurfaceCounterFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCounterFlagsEXT.html + using SurfaceCounterFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkSurfaceCounterFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SurfaceCounterFlagsEXT allFlags = SurfaceCounterFlagBitsEXT::eVblank; + }; + + //=== VK_EXT_display_control === + + // wrapper class for enum VkDisplayPowerStateEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPowerStateEXT.html + enum class DisplayPowerStateEXT + { + eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, + eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, + eOn = VK_DISPLAY_POWER_STATE_ON_EXT + }; + + // wrapper class for enum VkDeviceEventTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceEventTypeEXT.html + enum class DeviceEventTypeEXT + { + eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + }; + + // wrapper class for enum VkDisplayEventTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayEventTypeEXT.html + enum class DisplayEventTypeEXT + { + eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + }; + + //=== VK_NV_viewport_swizzle === + + // wrapper class for enum VkViewportCoordinateSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewportCoordinateSwizzleNV.html + enum class ViewportCoordinateSwizzleNV + { + ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, + eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV, + ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV, + eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV, + ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, + eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV, + ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV, + eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV + }; + + enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkPipelineViewportSwizzleStateCreateFlagsNV + { + }; + + // wrapper using for bitmask VkPipelineViewportSwizzleStateCreateFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportSwizzleStateCreateFlagsNV.html + using PipelineViewportSwizzleStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineViewportSwizzleStateCreateFlagsNV allFlags = {}; + }; + + //=== VK_EXT_discard_rectangles === + + // wrapper class for enum VkDiscardRectangleModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDiscardRectangleModeEXT.html + enum class DiscardRectangleModeEXT + { + eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT + }; + + enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkPipelineDiscardRectangleStateCreateFlagsEXT + { + }; + + // wrapper using for bitmask VkPipelineDiscardRectangleStateCreateFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDiscardRectangleStateCreateFlagsEXT.html + using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDiscardRectangleStateCreateFlagsEXT allFlags = {}; + }; + + //=== VK_EXT_conservative_rasterization === + + // wrapper class for enum VkConservativeRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkConservativeRasterizationModeEXT.html + enum class ConservativeRasterizationModeEXT + { + eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, + eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT + }; + + enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkPipelineRasterizationConservativeStateCreateFlagsEXT + { + }; + + // wrapper using for bitmask VkPipelineRasterizationConservativeStateCreateFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationConservativeStateCreateFlagsEXT.html + using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationConservativeStateCreateFlagsEXT allFlags = {}; + }; + + //=== VK_EXT_depth_clip_enable === + + enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkPipelineRasterizationDepthClipStateCreateFlagsEXT + { + }; + + // wrapper using for bitmask VkPipelineRasterizationDepthClipStateCreateFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationDepthClipStateCreateFlagsEXT.html + using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationDepthClipStateCreateFlagsEXT allFlags = {}; + }; + + //=== VK_KHR_performance_query === + + // wrapper class for enum VkPerformanceCounterDescriptionFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterDescriptionFlagBitsKHR.html + enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR + { + ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR + }; + + // wrapper using for bitmask VkPerformanceCounterDescriptionFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterDescriptionFlagsKHR.html + using PerformanceCounterDescriptionFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPerformanceCounterDescriptionFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PerformanceCounterDescriptionFlagsKHR allFlags = + PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting | PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted; + }; + + // wrapper class for enum VkPerformanceCounterScopeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterScopeKHR.html + enum class PerformanceCounterScopeKHR + { + eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, + eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR, + eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR + }; + + // wrapper class for enum VkPerformanceCounterStorageKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterStorageKHR.html + enum class PerformanceCounterStorageKHR + { + eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR, + eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR, + eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR, + eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR, + eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR, + eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR + }; + + // wrapper class for enum VkPerformanceCounterUnitKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterUnitKHR.html + enum class PerformanceCounterUnitKHR + { + eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR, + ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR, + eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR, + eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR, + eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR, + eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR, + eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR, + eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR, + eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR, + eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR, + eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR + }; + + enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR + { + }; + + // wrapper using for bitmask VkAcquireProfilingLockFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAcquireProfilingLockFlagsKHR.html + using AcquireProfilingLockFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AcquireProfilingLockFlagsKHR allFlags = {}; + }; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + enum class IOSSurfaceCreateFlagBitsMVK : VkIOSSurfaceCreateFlagsMVK + { + }; + + // wrapper using for bitmask VkIOSSurfaceCreateFlagsMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIOSSurfaceCreateFlagsMVK.html + using IOSSurfaceCreateFlagsMVK = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IOSSurfaceCreateFlagsMVK allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + enum class MacOSSurfaceCreateFlagBitsMVK : VkMacOSSurfaceCreateFlagsMVK + { + }; + + // wrapper using for bitmask VkMacOSSurfaceCreateFlagsMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMacOSSurfaceCreateFlagsMVK.html + using MacOSSurfaceCreateFlagsMVK = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MacOSSurfaceCreateFlagsMVK allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + // wrapper class for enum VkDebugUtilsMessageSeverityFlagBitsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessageSeverityFlagBitsEXT.html + enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT + { + eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, + eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT + }; + + // wrapper using for bitmask VkDebugUtilsMessageSeverityFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessageSeverityFlagsEXT.html + using DebugUtilsMessageSeverityFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDebugUtilsMessageSeverityFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT allFlags = + DebugUtilsMessageSeverityFlagBitsEXT::eVerbose | DebugUtilsMessageSeverityFlagBitsEXT::eInfo | DebugUtilsMessageSeverityFlagBitsEXT::eWarning | + DebugUtilsMessageSeverityFlagBitsEXT::eError; + }; + + // wrapper class for enum VkDebugUtilsMessageTypeFlagBitsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessageTypeFlagBitsEXT.html + enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT + { + eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT, + eDeviceAddressBinding = VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT + }; + + // wrapper using for bitmask VkDebugUtilsMessageTypeFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessageTypeFlagsEXT.html + using DebugUtilsMessageTypeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDebugUtilsMessageTypeFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessageTypeFlagsEXT allFlags = + DebugUtilsMessageTypeFlagBitsEXT::eGeneral | DebugUtilsMessageTypeFlagBitsEXT::eValidation | DebugUtilsMessageTypeFlagBitsEXT::ePerformance | + DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding; + }; + + enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkDebugUtilsMessengerCallbackDataFlagsEXT + { + }; + + // wrapper using for bitmask VkDebugUtilsMessengerCallbackDataFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerCallbackDataFlagsEXT.html + using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCallbackDataFlagsEXT allFlags = {}; + }; + + enum class DebugUtilsMessengerCreateFlagBitsEXT : VkDebugUtilsMessengerCreateFlagsEXT + { + }; + + // wrapper using for bitmask VkDebugUtilsMessengerCreateFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerCreateFlagsEXT.html + using DebugUtilsMessengerCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCreateFlagsEXT allFlags = {}; + }; + + //=== VK_EXT_blend_operation_advanced === + + // wrapper class for enum VkBlendOverlapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlendOverlapEXT.html + enum class BlendOverlapEXT + { + eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, + eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT + }; + + //=== VK_NV_fragment_coverage_to_color === + + enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkPipelineCoverageToColorStateCreateFlagsNV + { + }; + + // wrapper using for bitmask VkPipelineCoverageToColorStateCreateFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageToColorStateCreateFlagsNV.html + using PipelineCoverageToColorStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageToColorStateCreateFlagsNV allFlags = {}; + }; + + //=== VK_KHR_acceleration_structure === + + // wrapper class for enum VkAccelerationStructureTypeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureTypeKHR.html + enum class AccelerationStructureTypeKHR + { + eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + eGeneric = VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR + }; + + using AccelerationStructureTypeNV = AccelerationStructureTypeKHR; + + // wrapper class for enum VkAccelerationStructureBuildTypeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildTypeKHR.html + enum class AccelerationStructureBuildTypeKHR + { + eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR, + eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR, + eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR + }; + + // wrapper class for enum VkGeometryFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryFlagBitsKHR.html + enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR + { + eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR, + eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR + }; + + using GeometryFlagBitsNV = GeometryFlagBitsKHR; + + // wrapper using for bitmask VkGeometryFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryFlagsKHR.html + using GeometryFlagsKHR = Flags; + using GeometryFlagsNV = GeometryFlagsKHR; + + template <> + struct FlagTraits + { + using WrappedType = VkGeometryFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR GeometryFlagsKHR allFlags = GeometryFlagBitsKHR::eOpaque | GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation; + }; + + // wrapper class for enum VkGeometryInstanceFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryInstanceFlagBitsKHR.html + enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR + { + eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV, + eTriangleFlipFacing = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, + eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + eForceOpacityMicromap2StateEXT = VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT, + eDisableOpacityMicromapsEXT = VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT + }; + + using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR; + + // wrapper using for bitmask VkGeometryInstanceFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryInstanceFlagsKHR.html + using GeometryInstanceFlagsKHR = Flags; + using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; + + template <> + struct FlagTraits + { + using WrappedType = VkGeometryInstanceFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR GeometryInstanceFlagsKHR allFlags = + GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable | GeometryInstanceFlagBitsKHR::eTriangleFlipFacing | GeometryInstanceFlagBitsKHR::eForceOpaque | + GeometryInstanceFlagBitsKHR::eForceNoOpaque | GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT | + GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT; + }; + + // wrapper class for enum VkBuildAccelerationStructureFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildAccelerationStructureFlagBitsKHR.html + enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR + { + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV, + eAllowOpacityMicromapUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT, + eAllowDisableOpacityMicromapsEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT, + eAllowOpacityMicromapDataUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eAllowDisplacementMicromapUpdateNV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAllowDataAccess = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR + }; + + using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR; + + // wrapper using for bitmask VkBuildAccelerationStructureFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildAccelerationStructureFlagsKHR.html + using BuildAccelerationStructureFlagsKHR = Flags; + using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; + + template <> + struct FlagTraits + { + using WrappedType = VkBuildAccelerationStructureFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BuildAccelerationStructureFlagsKHR allFlags = + BuildAccelerationStructureFlagBitsKHR::eAllowUpdate | BuildAccelerationStructureFlagBitsKHR::eAllowCompaction | + BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace | BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild | + BuildAccelerationStructureFlagBitsKHR::eLowMemory | BuildAccelerationStructureFlagBitsKHR::eMotionNV | + BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT | BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT | + BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BuildAccelerationStructureFlagBitsKHR::eAllowDisplacementMicromapUpdateNV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BuildAccelerationStructureFlagBitsKHR::eAllowDataAccess; + }; + + // wrapper class for enum VkCopyAccelerationStructureModeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyAccelerationStructureModeKHR.html + enum class CopyAccelerationStructureModeKHR + { + eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR, + eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR + }; + + using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR; + + // wrapper class for enum VkGeometryTypeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryTypeKHR.html + enum class GeometryTypeKHR + { + eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, + eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR, + eSpheresNV = VK_GEOMETRY_TYPE_SPHERES_NV, + eLinearSweptSpheresNV = VK_GEOMETRY_TYPE_LINEAR_SWEPT_SPHERES_NV + }; + + using GeometryTypeNV = GeometryTypeKHR; + + // wrapper class for enum VkAccelerationStructureCompatibilityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCompatibilityKHR.html + enum class AccelerationStructureCompatibilityKHR + { + eCompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR, + eIncompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR + }; + + // wrapper class for enum VkAccelerationStructureCreateFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCreateFlagBitsKHR.html + enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR + { + eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, + eDescriptorBufferCaptureReplayEXT = VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eMotionNV = VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV + }; + + // wrapper using for bitmask VkAccelerationStructureCreateFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCreateFlagsKHR.html + using AccelerationStructureCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkAccelerationStructureCreateFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureCreateFlagsKHR allFlags = + AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay | AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT | + AccelerationStructureCreateFlagBitsKHR::eMotionNV; + }; + + // wrapper class for enum VkBuildAccelerationStructureModeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildAccelerationStructureModeKHR.html + enum class BuildAccelerationStructureModeKHR + { + eBuild = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR, + eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR + }; + + //=== VK_KHR_ray_tracing_pipeline === + + // wrapper class for enum VkRayTracingShaderGroupTypeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingShaderGroupTypeKHR.html + enum class RayTracingShaderGroupTypeKHR + { + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR + }; + + using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; + + // wrapper class for enum VkShaderGroupShaderKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderGroupShaderKHR.html + enum class ShaderGroupShaderKHR + { + eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR, + eClosestHit = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR, + eAnyHit = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR, + eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR + }; + + //=== VK_NV_framebuffer_mixed_samples === + + // wrapper class for enum VkCoverageModulationModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoverageModulationModeNV.html + enum class CoverageModulationModeNV + { + eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, + eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, + eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, + eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV + }; + + enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkPipelineCoverageModulationStateCreateFlagsNV + { + }; + + // wrapper using for bitmask VkPipelineCoverageModulationStateCreateFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageModulationStateCreateFlagsNV.html + using PipelineCoverageModulationStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageModulationStateCreateFlagsNV allFlags = {}; + }; + + //=== VK_EXT_validation_cache === + + // wrapper class for enum VkValidationCacheHeaderVersionEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationCacheHeaderVersionEXT.html + enum class ValidationCacheHeaderVersionEXT + { + eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + }; + + enum class ValidationCacheCreateFlagBitsEXT : VkValidationCacheCreateFlagsEXT + { + }; + + // wrapper using for bitmask VkValidationCacheCreateFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationCacheCreateFlagsEXT.html + using ValidationCacheCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ValidationCacheCreateFlagsEXT allFlags = {}; + }; + + //=== VK_NV_shading_rate_image === + + // wrapper class for enum VkShadingRatePaletteEntryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShadingRatePaletteEntryNV.html + enum class ShadingRatePaletteEntryNV + { + eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, + e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, + e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, + e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV + }; + + // wrapper class for enum VkCoarseSampleOrderTypeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoarseSampleOrderTypeNV.html + enum class CoarseSampleOrderTypeNV + { + eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, + eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, + ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, + eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV + }; + + //=== VK_NV_ray_tracing === + + // wrapper class for enum VkAccelerationStructureMemoryRequirementsTypeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMemoryRequirementsTypeNV.html + enum class AccelerationStructureMemoryRequirementsTypeNV + { + eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, + eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, + eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV + }; + + //=== VK_AMD_pipeline_compiler_control === + + enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD + { + }; + + // wrapper using for bitmask VkPipelineCompilerControlFlagsAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCompilerControlFlagsAMD.html + using PipelineCompilerControlFlagsAMD = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCompilerControlFlagsAMD allFlags = {}; + }; + + //=== VK_AMD_memory_overallocation_behavior === + + // wrapper class for enum VkMemoryOverallocationBehaviorAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryOverallocationBehaviorAMD.html + enum class MemoryOverallocationBehaviorAMD + { + eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, + eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, + eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD + }; + + //=== VK_INTEL_performance_query === + + // wrapper class for enum VkPerformanceConfigurationTypeINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceConfigurationTypeINTEL.html + enum class PerformanceConfigurationTypeINTEL + { + eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + }; + + // wrapper class for enum VkQueryPoolSamplingModeINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolSamplingModeINTEL.html + enum class QueryPoolSamplingModeINTEL + { + eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL + }; + + // wrapper class for enum VkPerformanceOverrideTypeINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceOverrideTypeINTEL.html + enum class PerformanceOverrideTypeINTEL + { + eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, + eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL + }; + + // wrapper class for enum VkPerformanceParameterTypeINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceParameterTypeINTEL.html + enum class PerformanceParameterTypeINTEL + { + eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, + eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL + }; + + // wrapper class for enum VkPerformanceValueTypeINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceValueTypeINTEL.html + enum class PerformanceValueTypeINTEL + { + eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, + eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, + eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, + eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, + eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkImagePipeSurfaceCreateFlagsFUCHSIA + { + }; + + // wrapper using for bitmask VkImagePipeSurfaceCreateFlagsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImagePipeSurfaceCreateFlagsFUCHSIA.html + using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImagePipeSurfaceCreateFlagsFUCHSIA allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + enum class MetalSurfaceCreateFlagBitsEXT : VkMetalSurfaceCreateFlagsEXT + { + }; + + // wrapper using for bitmask VkMetalSurfaceCreateFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMetalSurfaceCreateFlagsEXT.html + using MetalSurfaceCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MetalSurfaceCreateFlagsEXT allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + // wrapper class for enum VkFragmentShadingRateCombinerOpKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFragmentShadingRateCombinerOpKHR.html + enum class FragmentShadingRateCombinerOpKHR + { + eKeep = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR, + eReplace = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR, + eMin = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR, + eMax = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR, + eMul = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR + }; + + //=== VK_AMD_shader_core_properties2 === + + enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD + { + }; + + // wrapper using for bitmask VkShaderCorePropertiesFlagsAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderCorePropertiesFlagsAMD.html + using ShaderCorePropertiesFlagsAMD = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCorePropertiesFlagsAMD allFlags = {}; + }; + + //=== VK_EXT_validation_features === + + // wrapper class for enum VkValidationFeatureEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationFeatureEnableEXT.html + enum class ValidationFeatureEnableEXT + { + eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, + eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, + eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, + eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT, + eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT + }; + + // wrapper class for enum VkValidationFeatureDisableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationFeatureDisableEXT.html + enum class ValidationFeatureDisableEXT + { + eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, + eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, + eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, + eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, + eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, + eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, + eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT, + eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT + }; + + //=== VK_NV_coverage_reduction_mode === + + // wrapper class for enum VkCoverageReductionModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoverageReductionModeNV.html + enum class CoverageReductionModeNV + { + eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, + eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV + }; + + enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkPipelineCoverageReductionStateCreateFlagsNV + { + }; + + // wrapper using for bitmask VkPipelineCoverageReductionStateCreateFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageReductionStateCreateFlagsNV.html + using PipelineCoverageReductionStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageReductionStateCreateFlagsNV allFlags = {}; + }; + + //=== VK_EXT_provoking_vertex === + + // wrapper class for enum VkProvokingVertexModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkProvokingVertexModeEXT.html + enum class ProvokingVertexModeEXT + { + eFirstVertex = VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT, + eLastVertex = VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + // wrapper class for enum VkFullScreenExclusiveEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFullScreenExclusiveEXT.html + enum class FullScreenExclusiveEXT + { + eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, + eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, + eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, + eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT + }; + +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + enum class HeadlessSurfaceCreateFlagBitsEXT : VkHeadlessSurfaceCreateFlagsEXT + { + }; + + // wrapper using for bitmask VkHeadlessSurfaceCreateFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHeadlessSurfaceCreateFlagsEXT.html + using HeadlessSurfaceCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR HeadlessSurfaceCreateFlagsEXT allFlags = {}; + }; + + //=== VK_KHR_pipeline_executable_properties === + + // wrapper class for enum VkPipelineExecutableStatisticFormatKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutableStatisticFormatKHR.html + enum class PipelineExecutableStatisticFormatKHR + { + eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, + eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, + eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, + eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR + }; + + //=== VK_EXT_surface_maintenance1 === + + // wrapper class for enum VkPresentScalingFlagBitsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentScalingFlagBitsEXT.html + enum class PresentScalingFlagBitsEXT : VkPresentScalingFlagsEXT + { + eOneToOne = VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT, + eAspectRatioStretch = VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT, + eStretch = VK_PRESENT_SCALING_STRETCH_BIT_EXT + }; + + // wrapper using for bitmask VkPresentScalingFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentScalingFlagsEXT.html + using PresentScalingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPresentScalingFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PresentScalingFlagsEXT allFlags = + PresentScalingFlagBitsEXT::eOneToOne | PresentScalingFlagBitsEXT::eAspectRatioStretch | PresentScalingFlagBitsEXT::eStretch; + }; + + // wrapper class for enum VkPresentGravityFlagBitsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentGravityFlagBitsEXT.html + enum class PresentGravityFlagBitsEXT : VkPresentGravityFlagsEXT + { + eMin = VK_PRESENT_GRAVITY_MIN_BIT_EXT, + eMax = VK_PRESENT_GRAVITY_MAX_BIT_EXT, + eCentered = VK_PRESENT_GRAVITY_CENTERED_BIT_EXT + }; + + // wrapper using for bitmask VkPresentGravityFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentGravityFlagsEXT.html + using PresentGravityFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPresentGravityFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PresentGravityFlagsEXT allFlags = + PresentGravityFlagBitsEXT::eMin | PresentGravityFlagBitsEXT::eMax | PresentGravityFlagBitsEXT::eCentered; + }; + + //=== VK_NV_device_generated_commands === + + // wrapper class for enum VkIndirectStateFlagBitsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectStateFlagBitsNV.html + enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV + { + eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV + }; + + // wrapper using for bitmask VkIndirectStateFlagsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectStateFlagsNV.html + using IndirectStateFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkIndirectStateFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectStateFlagsNV allFlags = IndirectStateFlagBitsNV::eFlagFrontface; + }; + + // wrapper class for enum VkIndirectCommandsTokenTypeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsTokenTypeNV.html + enum class IndirectCommandsTokenTypeNV + { + eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, + eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, + eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV, + eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV, + ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV + }; + + // wrapper class for enum VkIndirectCommandsLayoutUsageFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutUsageFlagBitsNV.html + enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV + { + eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV, + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV + }; + + // wrapper using for bitmask VkIndirectCommandsLayoutUsageFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutUsageFlagsNV.html + using IndirectCommandsLayoutUsageFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkIndirectCommandsLayoutUsageFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV allFlags = IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess | + IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences | + IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences; + }; + + //=== VK_EXT_depth_bias_control === + + // wrapper class for enum VkDepthBiasRepresentationEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthBiasRepresentationEXT.html + enum class DepthBiasRepresentationEXT + { + eLeastRepresentableValueFormat = VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT, + eLeastRepresentableValueForceUnorm = VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT, + eFloat = VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT + }; + + //=== VK_EXT_device_memory_report === + + // wrapper class for enum VkDeviceMemoryReportEventTypeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryReportEventTypeEXT.html + enum class DeviceMemoryReportEventTypeEXT + { + eAllocate = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT, + eFree = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT, + eImport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT, + eUnimport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT, + eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT + }; + + enum class DeviceMemoryReportFlagBitsEXT : VkDeviceMemoryReportFlagsEXT + { + }; + + // wrapper using for bitmask VkDeviceMemoryReportFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryReportFlagsEXT.html + using DeviceMemoryReportFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceMemoryReportFlagsEXT allFlags = {}; + }; + + //=== VK_KHR_video_encode_queue === + + // wrapper class for enum VkVideoEncodeCapabilityFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeCapabilityFlagBitsKHR.html + enum class VideoEncodeCapabilityFlagBitsKHR : VkVideoEncodeCapabilityFlagsKHR + { + ePrecedingExternallyEncodedBytes = VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR, + eInsufficientBitstreamBufferRangeDetection = VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR, + eQuantizationDeltaMap = VK_VIDEO_ENCODE_CAPABILITY_QUANTIZATION_DELTA_MAP_BIT_KHR, + eEmphasisMap = VK_VIDEO_ENCODE_CAPABILITY_EMPHASIS_MAP_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeCapabilityFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeCapabilityFlagsKHR.html + using VideoEncodeCapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeCapabilityFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeCapabilityFlagsKHR allFlags = + VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes | VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection | + VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap | VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap; + }; + + // wrapper class for enum VkVideoEncodeFeedbackFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeFeedbackFlagBitsKHR.html + enum class VideoEncodeFeedbackFlagBitsKHR : VkVideoEncodeFeedbackFlagsKHR + { + eBitstreamBufferOffset = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR, + eBitstreamBytesWritten = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR, + eBitstreamHasOverrides = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeFeedbackFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeFeedbackFlagsKHR.html + using VideoEncodeFeedbackFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeFeedbackFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFeedbackFlagsKHR allFlags = VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset | + VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten | + VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides; + }; + + // wrapper class for enum VkVideoEncodeUsageFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeUsageFlagBitsKHR.html + enum class VideoEncodeUsageFlagBitsKHR : VkVideoEncodeUsageFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR, + eTranscoding = VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR, + eStreaming = VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR, + eRecording = VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR, + eConferencing = VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeUsageFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeUsageFlagsKHR.html + using VideoEncodeUsageFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeUsageFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeUsageFlagsKHR allFlags = VideoEncodeUsageFlagBitsKHR::eDefault | VideoEncodeUsageFlagBitsKHR::eTranscoding | + VideoEncodeUsageFlagBitsKHR::eStreaming | VideoEncodeUsageFlagBitsKHR::eRecording | + VideoEncodeUsageFlagBitsKHR::eConferencing; + }; + + // wrapper class for enum VkVideoEncodeContentFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeContentFlagBitsKHR.html + enum class VideoEncodeContentFlagBitsKHR : VkVideoEncodeContentFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR, + eCamera = VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR, + eDesktop = VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR, + eRendered = VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeContentFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeContentFlagsKHR.html + using VideoEncodeContentFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeContentFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeContentFlagsKHR allFlags = + VideoEncodeContentFlagBitsKHR::eDefault | VideoEncodeContentFlagBitsKHR::eCamera | VideoEncodeContentFlagBitsKHR::eDesktop | + VideoEncodeContentFlagBitsKHR::eRendered; + }; + + // wrapper class for enum VkVideoEncodeTuningModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeTuningModeKHR.html + enum class VideoEncodeTuningModeKHR + { + eDefault = VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR, + eHighQuality = VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR, + eLowLatency = VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR, + eUltraLowLatency = VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR, + eLossless = VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR + }; + + // wrapper class for enum VkVideoEncodeRateControlModeFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeRateControlModeFlagBitsKHR.html + enum class VideoEncodeRateControlModeFlagBitsKHR : VkVideoEncodeRateControlModeFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR, + eDisabled = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR, + eCbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR, + eVbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeRateControlModeFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeRateControlModeFlagsKHR.html + using VideoEncodeRateControlModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeRateControlModeFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeRateControlModeFlagsKHR allFlags = + VideoEncodeRateControlModeFlagBitsKHR::eDefault | VideoEncodeRateControlModeFlagBitsKHR::eDisabled | VideoEncodeRateControlModeFlagBitsKHR::eCbr | + VideoEncodeRateControlModeFlagBitsKHR::eVbr; + }; + + // wrapper class for enum VkVideoEncodeFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeFlagBitsKHR.html + enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR + { + eWithQuantizationDeltaMap = VK_VIDEO_ENCODE_WITH_QUANTIZATION_DELTA_MAP_BIT_KHR, + eWithEmphasisMap = VK_VIDEO_ENCODE_WITH_EMPHASIS_MAP_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeFlagsKHR.html + using VideoEncodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFlagsKHR allFlags = + VideoEncodeFlagBitsKHR::eWithQuantizationDeltaMap | VideoEncodeFlagBitsKHR::eWithEmphasisMap; + }; + + enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR + { + }; + + // wrapper using for bitmask VkVideoEncodeRateControlFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeRateControlFlagsKHR.html + using VideoEncodeRateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeRateControlFlagsKHR allFlags = {}; + }; + + //=== VK_NV_device_diagnostics_config === + + // wrapper class for enum VkDeviceDiagnosticsConfigFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceDiagnosticsConfigFlagBitsNV.html + enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV + { + eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV, + eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV, + eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV, + eEnableShaderErrorReporting = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV + }; + + // wrapper using for bitmask VkDeviceDiagnosticsConfigFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceDiagnosticsConfigFlagsNV.html + using DeviceDiagnosticsConfigFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDeviceDiagnosticsConfigFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceDiagnosticsConfigFlagsNV allFlags = + DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo | DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking | + DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints | DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting; + }; + + //=== VK_QCOM_tile_shading === + + // wrapper class for enum VkTileShadingRenderPassFlagBitsQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileShadingRenderPassFlagBitsQCOM.html + enum class TileShadingRenderPassFlagBitsQCOM : VkTileShadingRenderPassFlagsQCOM + { + eEnable = VK_TILE_SHADING_RENDER_PASS_ENABLE_BIT_QCOM, + ePerTileExecution = VK_TILE_SHADING_RENDER_PASS_PER_TILE_EXECUTION_BIT_QCOM + }; + + // wrapper using for bitmask VkTileShadingRenderPassFlagsQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileShadingRenderPassFlagsQCOM.html + using TileShadingRenderPassFlagsQCOM = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkTileShadingRenderPassFlagBitsQCOM; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR TileShadingRenderPassFlagsQCOM allFlags = + TileShadingRenderPassFlagBitsQCOM::eEnable | TileShadingRenderPassFlagBitsQCOM::ePerTileExecution; + }; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + // wrapper class for enum VkExportMetalObjectTypeFlagBitsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalObjectTypeFlagBitsEXT.html + enum class ExportMetalObjectTypeFlagBitsEXT : VkExportMetalObjectTypeFlagsEXT + { + eMetalDevice = VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT, + eMetalCommandQueue = VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT, + eMetalBuffer = VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT, + eMetalTexture = VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT, + eMetalIosurface = VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT, + eMetalSharedEvent = VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT + }; + + // wrapper using for bitmask VkExportMetalObjectTypeFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalObjectTypeFlagsEXT.html + using ExportMetalObjectTypeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkExportMetalObjectTypeFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExportMetalObjectTypeFlagsEXT allFlags = + ExportMetalObjectTypeFlagBitsEXT::eMetalDevice | ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue | ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer | + ExportMetalObjectTypeFlagBitsEXT::eMetalTexture | ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface | ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_graphics_pipeline_library === + + // wrapper class for enum VkGraphicsPipelineLibraryFlagBitsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineLibraryFlagBitsEXT.html + enum class GraphicsPipelineLibraryFlagBitsEXT : VkGraphicsPipelineLibraryFlagsEXT + { + eVertexInputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT, + ePreRasterizationShaders = VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT, + eFragmentShader = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT, + eFragmentOutputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT + }; + + // wrapper using for bitmask VkGraphicsPipelineLibraryFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineLibraryFlagsEXT.html + using GraphicsPipelineLibraryFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkGraphicsPipelineLibraryFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR GraphicsPipelineLibraryFlagsEXT allFlags = + GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface | GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders | + GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader | GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface; + }; + + //=== VK_NV_fragment_shading_rate_enums === + + // wrapper class for enum VkFragmentShadingRateNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFragmentShadingRateNV.html + enum class FragmentShadingRateNV + { + e1InvocationPerPixel = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer1X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X1Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV, + e2InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV, + e16InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV, + eNoInvocations = VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV + }; + + // wrapper class for enum VkFragmentShadingRateTypeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFragmentShadingRateTypeNV.html + enum class FragmentShadingRateTypeNV + { + eFragmentSize = VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV, + eEnums = VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV + }; + + //=== VK_NV_ray_tracing_motion_blur === + + // wrapper class for enum VkAccelerationStructureMotionInstanceTypeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInstanceTypeNV.html + enum class AccelerationStructureMotionInstanceTypeNV + { + eStatic = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV, + eMatrixMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV, + eSrtMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV + }; + + enum class AccelerationStructureMotionInfoFlagBitsNV : VkAccelerationStructureMotionInfoFlagsNV + { + }; + + // wrapper using for bitmask VkAccelerationStructureMotionInfoFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInfoFlagsNV.html + using AccelerationStructureMotionInfoFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureMotionInfoFlagsNV allFlags = {}; + }; + + enum class AccelerationStructureMotionInstanceFlagBitsNV : VkAccelerationStructureMotionInstanceFlagsNV + { + }; + + // wrapper using for bitmask VkAccelerationStructureMotionInstanceFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInstanceFlagsNV.html + using AccelerationStructureMotionInstanceFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureMotionInstanceFlagsNV allFlags = {}; + }; + + //=== VK_EXT_image_compression_control === + + // wrapper class for enum VkImageCompressionFlagBitsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionFlagBitsEXT.html + enum class ImageCompressionFlagBitsEXT : VkImageCompressionFlagsEXT + { + eDefault = VK_IMAGE_COMPRESSION_DEFAULT_EXT, + eFixedRateDefault = VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT, + eFixedRateExplicit = VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT, + eDisabled = VK_IMAGE_COMPRESSION_DISABLED_EXT + }; + + // wrapper using for bitmask VkImageCompressionFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionFlagsEXT.html + using ImageCompressionFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkImageCompressionFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCompressionFlagsEXT allFlags = + ImageCompressionFlagBitsEXT::eDefault | ImageCompressionFlagBitsEXT::eFixedRateDefault | ImageCompressionFlagBitsEXT::eFixedRateExplicit | + ImageCompressionFlagBitsEXT::eDisabled; + }; + + // wrapper class for enum VkImageCompressionFixedRateFlagBitsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionFixedRateFlagBitsEXT.html + enum class ImageCompressionFixedRateFlagBitsEXT : VkImageCompressionFixedRateFlagsEXT + { + eNone = VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT, + e1Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT, + e2Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT, + e3Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT, + e4Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT, + e5Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT, + e6Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT, + e7Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT, + e8Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT, + e9Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT, + e10Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT, + e11Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT, + e12Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT, + e13Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT, + e14Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT, + e15Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT, + e16Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT, + e17Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT, + e18Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT, + e19Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT, + e20Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT, + e21Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT, + e22Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT, + e23Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT, + e24Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT + }; + + // wrapper using for bitmask VkImageCompressionFixedRateFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionFixedRateFlagsEXT.html + using ImageCompressionFixedRateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkImageCompressionFixedRateFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCompressionFixedRateFlagsEXT allFlags = + ImageCompressionFixedRateFlagBitsEXT::eNone | ImageCompressionFixedRateFlagBitsEXT::e1Bpc | ImageCompressionFixedRateFlagBitsEXT::e2Bpc | + ImageCompressionFixedRateFlagBitsEXT::e3Bpc | ImageCompressionFixedRateFlagBitsEXT::e4Bpc | ImageCompressionFixedRateFlagBitsEXT::e5Bpc | + ImageCompressionFixedRateFlagBitsEXT::e6Bpc | ImageCompressionFixedRateFlagBitsEXT::e7Bpc | ImageCompressionFixedRateFlagBitsEXT::e8Bpc | + ImageCompressionFixedRateFlagBitsEXT::e9Bpc | ImageCompressionFixedRateFlagBitsEXT::e10Bpc | ImageCompressionFixedRateFlagBitsEXT::e11Bpc | + ImageCompressionFixedRateFlagBitsEXT::e12Bpc | ImageCompressionFixedRateFlagBitsEXT::e13Bpc | ImageCompressionFixedRateFlagBitsEXT::e14Bpc | + ImageCompressionFixedRateFlagBitsEXT::e15Bpc | ImageCompressionFixedRateFlagBitsEXT::e16Bpc | ImageCompressionFixedRateFlagBitsEXT::e17Bpc | + ImageCompressionFixedRateFlagBitsEXT::e18Bpc | ImageCompressionFixedRateFlagBitsEXT::e19Bpc | ImageCompressionFixedRateFlagBitsEXT::e20Bpc | + ImageCompressionFixedRateFlagBitsEXT::e21Bpc | ImageCompressionFixedRateFlagBitsEXT::e22Bpc | ImageCompressionFixedRateFlagBitsEXT::e23Bpc | + ImageCompressionFixedRateFlagBitsEXT::e24Bpc; + }; + + //=== VK_EXT_device_fault === + + // wrapper class for enum VkDeviceFaultAddressTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultAddressTypeEXT.html + enum class DeviceFaultAddressTypeEXT + { + eNone = VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT, + eReadInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT, + eWriteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT, + eExecuteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT, + eInstructionPointerUnknown = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT, + eInstructionPointerInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT, + eInstructionPointerFault = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT + }; + + // wrapper class for enum VkDeviceFaultVendorBinaryHeaderVersionEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultVendorBinaryHeaderVersionEXT.html + enum class DeviceFaultVendorBinaryHeaderVersionEXT + { + eOne = VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT + }; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + enum class DirectFBSurfaceCreateFlagBitsEXT : VkDirectFBSurfaceCreateFlagsEXT + { + }; + + // wrapper using for bitmask VkDirectFBSurfaceCreateFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectFBSurfaceCreateFlagsEXT.html + using DirectFBSurfaceCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DirectFBSurfaceCreateFlagsEXT allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_device_address_binding_report === + + // wrapper class for enum VkDeviceAddressBindingFlagBitsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceAddressBindingFlagBitsEXT.html + enum class DeviceAddressBindingFlagBitsEXT : VkDeviceAddressBindingFlagsEXT + { + eInternalObject = VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT + }; + + // wrapper using for bitmask VkDeviceAddressBindingFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceAddressBindingFlagsEXT.html + using DeviceAddressBindingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkDeviceAddressBindingFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceAddressBindingFlagsEXT allFlags = DeviceAddressBindingFlagBitsEXT::eInternalObject; + }; + + // wrapper class for enum VkDeviceAddressBindingTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceAddressBindingTypeEXT.html + enum class DeviceAddressBindingTypeEXT + { + eBind = VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT, + eUnbind = VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + // wrapper class for enum VkImageConstraintsInfoFlagBitsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageConstraintsInfoFlagBitsFUCHSIA.html + enum class ImageConstraintsInfoFlagBitsFUCHSIA : VkImageConstraintsInfoFlagsFUCHSIA + { + eCpuReadRarely = VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA, + eCpuReadOften = VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA, + eCpuWriteRarely = VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA, + eCpuWriteOften = VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA, + eProtectedOptional = VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA + }; + + // wrapper using for bitmask VkImageConstraintsInfoFlagsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageConstraintsInfoFlagsFUCHSIA.html + using ImageConstraintsInfoFlagsFUCHSIA = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkImageConstraintsInfoFlagBitsFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA allFlags = + ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften | + ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften | + ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional; + }; + + enum class ImageFormatConstraintsFlagBitsFUCHSIA : VkImageFormatConstraintsFlagsFUCHSIA + { + }; + + // wrapper using for bitmask VkImageFormatConstraintsFlagsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatConstraintsFlagsFUCHSIA.html + using ImageFormatConstraintsFlagsFUCHSIA = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageFormatConstraintsFlagsFUCHSIA allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_frame_boundary === + + // wrapper class for enum VkFrameBoundaryFlagBitsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFrameBoundaryFlagBitsEXT.html + enum class FrameBoundaryFlagBitsEXT : VkFrameBoundaryFlagsEXT + { + eFrameEnd = VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT + }; + + // wrapper using for bitmask VkFrameBoundaryFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFrameBoundaryFlagsEXT.html + using FrameBoundaryFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkFrameBoundaryFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FrameBoundaryFlagsEXT allFlags = FrameBoundaryFlagBitsEXT::eFrameEnd; + }; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + enum class ScreenSurfaceCreateFlagBitsQNX : VkScreenSurfaceCreateFlagsQNX + { + }; + + // wrapper using for bitmask VkScreenSurfaceCreateFlagsQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkScreenSurfaceCreateFlagsQNX.html + using ScreenSurfaceCreateFlagsQNX = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ScreenSurfaceCreateFlagsQNX allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_opacity_micromap === + + // wrapper class for enum VkMicromapTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapTypeEXT.html + enum class MicromapTypeEXT + { + eOpacityMicromap = VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eDisplacementMicromapNV = VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + // wrapper class for enum VkBuildMicromapFlagBitsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildMicromapFlagBitsEXT.html + enum class BuildMicromapFlagBitsEXT : VkBuildMicromapFlagsEXT + { + ePreferFastTrace = VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT, + ePreferFastBuild = VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT, + eAllowCompaction = VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT + }; + + // wrapper using for bitmask VkBuildMicromapFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildMicromapFlagsEXT.html + using BuildMicromapFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkBuildMicromapFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BuildMicromapFlagsEXT allFlags = + BuildMicromapFlagBitsEXT::ePreferFastTrace | BuildMicromapFlagBitsEXT::ePreferFastBuild | BuildMicromapFlagBitsEXT::eAllowCompaction; + }; + + // wrapper class for enum VkCopyMicromapModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMicromapModeEXT.html + enum class CopyMicromapModeEXT + { + eClone = VK_COPY_MICROMAP_MODE_CLONE_EXT, + eSerialize = VK_COPY_MICROMAP_MODE_SERIALIZE_EXT, + eDeserialize = VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT, + eCompact = VK_COPY_MICROMAP_MODE_COMPACT_EXT + }; + + // wrapper class for enum VkMicromapCreateFlagBitsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapCreateFlagBitsEXT.html + enum class MicromapCreateFlagBitsEXT : VkMicromapCreateFlagsEXT + { + eDeviceAddressCaptureReplay = VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT + }; + + // wrapper using for bitmask VkMicromapCreateFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapCreateFlagsEXT.html + using MicromapCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkMicromapCreateFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MicromapCreateFlagsEXT allFlags = MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay; + }; + + // wrapper class for enum VkBuildMicromapModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildMicromapModeEXT.html + enum class BuildMicromapModeEXT + { + eBuild = VK_BUILD_MICROMAP_MODE_BUILD_EXT + }; + + // wrapper class for enum VkOpacityMicromapFormatEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpacityMicromapFormatEXT.html + enum class OpacityMicromapFormatEXT + { + e2State = VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT, + e4State = VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT + }; + + // wrapper class for enum VkOpacityMicromapSpecialIndexEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpacityMicromapSpecialIndexEXT.html + enum class OpacityMicromapSpecialIndexEXT + { + eFullyTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT, + eFullyOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT, + eFullyUnknownTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT, + eFullyUnknownOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT, + eClusterGeometryDisableOpacityMicromapNV = VK_OPACITY_MICROMAP_SPECIAL_INDEX_CLUSTER_GEOMETRY_DISABLE_OPACITY_MICROMAP_NV + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + + // wrapper class for enum VkDisplacementMicromapFormatNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplacementMicromapFormatNV.html + enum class DisplacementMicromapFormatNV + { + e64Triangles64Bytes = VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV, + e256Triangles128Bytes = VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV, + e1024Triangles128Bytes = VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV + }; + +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_ARM_scheduling_controls === + + // wrapper class for enum VkPhysicalDeviceSchedulingControlsFlagBitsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSchedulingControlsFlagBitsARM.html + enum class PhysicalDeviceSchedulingControlsFlagBitsARM : VkPhysicalDeviceSchedulingControlsFlagsARM + { + eShaderCoreCount = VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM + }; + + // wrapper using for bitmask VkPhysicalDeviceSchedulingControlsFlagsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSchedulingControlsFlagsARM.html + using PhysicalDeviceSchedulingControlsFlagsARM = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPhysicalDeviceSchedulingControlsFlagBitsARM; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PhysicalDeviceSchedulingControlsFlagsARM allFlags = PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount; + }; + + //=== VK_NV_memory_decompression === + + // wrapper class for enum VkMemoryDecompressionMethodFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDecompressionMethodFlagBitsNV.html + enum class MemoryDecompressionMethodFlagBitsNV : VkMemoryDecompressionMethodFlagsNV + { + eGdeflate10 = VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV + }; + + // wrapper using for bitmask VkMemoryDecompressionMethodFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDecompressionMethodFlagsNV.html + using MemoryDecompressionMethodFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkMemoryDecompressionMethodFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryDecompressionMethodFlagsNV allFlags = MemoryDecompressionMethodFlagBitsNV::eGdeflate10; + }; + + //=== VK_NV_ray_tracing_linear_swept_spheres === + + // wrapper class for enum VkRayTracingLssIndexingModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingLssIndexingModeNV.html + enum class RayTracingLssIndexingModeNV + { + eList = VK_RAY_TRACING_LSS_INDEXING_MODE_LIST_NV, + eSuccessive = VK_RAY_TRACING_LSS_INDEXING_MODE_SUCCESSIVE_NV + }; + + // wrapper class for enum VkRayTracingLssPrimitiveEndCapsModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingLssPrimitiveEndCapsModeNV.html + enum class RayTracingLssPrimitiveEndCapsModeNV + { + eNone = VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_NONE_NV, + eChained = VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_CHAINED_NV + }; + + //=== VK_EXT_subpass_merge_feedback === + + // wrapper class for enum VkSubpassMergeStatusEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassMergeStatusEXT.html + enum class SubpassMergeStatusEXT + { + eMerged = VK_SUBPASS_MERGE_STATUS_MERGED_EXT, + eDisallowed = VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT, + eNotMergedSideEffects = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT, + eNotMergedSamplesMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT, + eNotMergedViewsMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT, + eNotMergedAliasing = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT, + eNotMergedDependencies = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT, + eNotMergedIncompatibleInputAttachment = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT, + eNotMergedTooManyAttachments = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT, + eNotMergedInsufficientStorage = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT, + eNotMergedDepthStencilCount = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT, + eNotMergedResolveAttachmentReuse = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT, + eNotMergedSingleSubpass = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT, + eNotMergedUnspecified = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT + }; + + //=== VK_LUNARG_direct_driver_loading === + + // wrapper class for enum VkDirectDriverLoadingModeLUNARG, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectDriverLoadingModeLUNARG.html + enum class DirectDriverLoadingModeLUNARG + { + eExclusive = VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG, + eInclusive = VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG + }; + + enum class DirectDriverLoadingFlagBitsLUNARG : VkDirectDriverLoadingFlagsLUNARG + { + }; + + // wrapper using for bitmask VkDirectDriverLoadingFlagsLUNARG, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectDriverLoadingFlagsLUNARG.html + using DirectDriverLoadingFlagsLUNARG = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DirectDriverLoadingFlagsLUNARG allFlags = {}; + }; + + //=== VK_NV_optical_flow === + + // wrapper class for enum VkOpticalFlowUsageFlagBitsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowUsageFlagBitsNV.html + enum class OpticalFlowUsageFlagBitsNV : VkOpticalFlowUsageFlagsNV + { + eUnknown = VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV, + eInput = VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV, + eOutput = VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV, + eHint = VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV, + eCost = VK_OPTICAL_FLOW_USAGE_COST_BIT_NV, + eGlobalFlow = VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV + }; + + // wrapper using for bitmask VkOpticalFlowUsageFlagsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowUsageFlagsNV.html + using OpticalFlowUsageFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkOpticalFlowUsageFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowUsageFlagsNV allFlags = OpticalFlowUsageFlagBitsNV::eUnknown | OpticalFlowUsageFlagBitsNV::eInput | + OpticalFlowUsageFlagBitsNV::eOutput | OpticalFlowUsageFlagBitsNV::eHint | + OpticalFlowUsageFlagBitsNV::eCost | OpticalFlowUsageFlagBitsNV::eGlobalFlow; + }; + + // wrapper class for enum VkOpticalFlowGridSizeFlagBitsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowGridSizeFlagBitsNV.html + enum class OpticalFlowGridSizeFlagBitsNV : VkOpticalFlowGridSizeFlagsNV + { + eUnknown = VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV, + e1X1 = VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV, + e2X2 = VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV, + e4X4 = VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV, + e8X8 = VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV + }; + + // wrapper using for bitmask VkOpticalFlowGridSizeFlagsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowGridSizeFlagsNV.html + using OpticalFlowGridSizeFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkOpticalFlowGridSizeFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowGridSizeFlagsNV allFlags = OpticalFlowGridSizeFlagBitsNV::eUnknown | OpticalFlowGridSizeFlagBitsNV::e1X1 | + OpticalFlowGridSizeFlagBitsNV::e2X2 | OpticalFlowGridSizeFlagBitsNV::e4X4 | + OpticalFlowGridSizeFlagBitsNV::e8X8; + }; + + // wrapper class for enum VkOpticalFlowPerformanceLevelNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowPerformanceLevelNV.html + enum class OpticalFlowPerformanceLevelNV + { + eUnknown = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV, + eSlow = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV, + eMedium = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV, + eFast = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV + }; + + // wrapper class for enum VkOpticalFlowSessionBindingPointNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionBindingPointNV.html + enum class OpticalFlowSessionBindingPointNV + { + eUnknown = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV, + eInput = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV, + eReference = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV, + eHint = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV, + eFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV, + eBackwardFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV, + eCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV, + eBackwardCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV, + eGlobalFlow = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV + }; + + // wrapper class for enum VkOpticalFlowSessionCreateFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionCreateFlagBitsNV.html + enum class OpticalFlowSessionCreateFlagBitsNV : VkOpticalFlowSessionCreateFlagsNV + { + eEnableHint = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV, + eEnableCost = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV, + eEnableGlobalFlow = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV, + eAllowRegions = VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV, + eBothDirections = VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV + }; + + // wrapper using for bitmask VkOpticalFlowSessionCreateFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionCreateFlagsNV.html + using OpticalFlowSessionCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkOpticalFlowSessionCreateFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowSessionCreateFlagsNV allFlags = + OpticalFlowSessionCreateFlagBitsNV::eEnableHint | OpticalFlowSessionCreateFlagBitsNV::eEnableCost | + OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow | OpticalFlowSessionCreateFlagBitsNV::eAllowRegions | + OpticalFlowSessionCreateFlagBitsNV::eBothDirections; + }; + + // wrapper class for enum VkOpticalFlowExecuteFlagBitsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowExecuteFlagBitsNV.html + enum class OpticalFlowExecuteFlagBitsNV : VkOpticalFlowExecuteFlagsNV + { + eDisableTemporalHints = VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV + }; + + // wrapper using for bitmask VkOpticalFlowExecuteFlagsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowExecuteFlagsNV.html + using OpticalFlowExecuteFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkOpticalFlowExecuteFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints; + }; + + //=== VK_AMD_anti_lag === + + // wrapper class for enum VkAntiLagModeAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAntiLagModeAMD.html + enum class AntiLagModeAMD + { + eDriverControl = VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD, + eOn = VK_ANTI_LAG_MODE_ON_AMD, + eOff = VK_ANTI_LAG_MODE_OFF_AMD + }; + + // wrapper class for enum VkAntiLagStageAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAntiLagStageAMD.html + enum class AntiLagStageAMD + { + eInput = VK_ANTI_LAG_STAGE_INPUT_AMD, + ePresent = VK_ANTI_LAG_STAGE_PRESENT_AMD + }; + + //=== VK_EXT_shader_object === + + // wrapper class for enum VkShaderCreateFlagBitsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderCreateFlagBitsEXT.html + enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT + { + eLinkStage = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT, + eAllowVaryingSubgroupSize = VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroups = VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT, + eNoTaskShader = VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT, + eDispatchBase = VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT, + eFragmentShadingRateAttachment = VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT, + eFragmentDensityMapAttachment = VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eIndirectBindable = VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT + }; + + // wrapper using for bitmask VkShaderCreateFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderCreateFlagsEXT.html + using ShaderCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkShaderCreateFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCreateFlagsEXT allFlags = + ShaderCreateFlagBitsEXT::eLinkStage | ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize | ShaderCreateFlagBitsEXT::eRequireFullSubgroups | + ShaderCreateFlagBitsEXT::eNoTaskShader | ShaderCreateFlagBitsEXT::eDispatchBase | ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment | + ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment | ShaderCreateFlagBitsEXT::eIndirectBindable; + }; + + // wrapper class for enum VkShaderCodeTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderCodeTypeEXT.html + enum class ShaderCodeTypeEXT + { + eBinary = VK_SHADER_CODE_TYPE_BINARY_EXT, + eSpirv = VK_SHADER_CODE_TYPE_SPIRV_EXT + }; + + //=== VK_NV_ray_tracing_invocation_reorder === + + // wrapper class for enum VkRayTracingInvocationReorderModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingInvocationReorderModeNV.html + enum class RayTracingInvocationReorderModeNV + { + eNone = VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV, + eReorder = VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV + }; + + //=== VK_NV_cooperative_vector === + + // wrapper class for enum VkCooperativeVectorMatrixLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeVectorMatrixLayoutNV.html + enum class CooperativeVectorMatrixLayoutNV + { + eRowMajor = VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_ROW_MAJOR_NV, + eColumnMajor = VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_COLUMN_MAJOR_NV, + eInferencingOptimal = VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_INFERENCING_OPTIMAL_NV, + eTrainingOptimal = VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_TRAINING_OPTIMAL_NV + }; + + // wrapper class for enum VkComponentTypeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComponentTypeKHR.html + enum class ComponentTypeKHR + { + eFloat16 = VK_COMPONENT_TYPE_FLOAT16_KHR, + eFloat32 = VK_COMPONENT_TYPE_FLOAT32_KHR, + eFloat64 = VK_COMPONENT_TYPE_FLOAT64_KHR, + eSint8 = VK_COMPONENT_TYPE_SINT8_KHR, + eSint16 = VK_COMPONENT_TYPE_SINT16_KHR, + eSint32 = VK_COMPONENT_TYPE_SINT32_KHR, + eSint64 = VK_COMPONENT_TYPE_SINT64_KHR, + eUint8 = VK_COMPONENT_TYPE_UINT8_KHR, + eUint16 = VK_COMPONENT_TYPE_UINT16_KHR, + eUint32 = VK_COMPONENT_TYPE_UINT32_KHR, + eUint64 = VK_COMPONENT_TYPE_UINT64_KHR, + eBfloat16 = VK_COMPONENT_TYPE_BFLOAT16_KHR, + eSint8PackedNV = VK_COMPONENT_TYPE_SINT8_PACKED_NV, + eUint8PackedNV = VK_COMPONENT_TYPE_UINT8_PACKED_NV, + eFloatE4M3NV = VK_COMPONENT_TYPE_FLOAT_E4M3_NV, + eFloatE5M2NV = VK_COMPONENT_TYPE_FLOAT_E5M2_NV + }; + + using ComponentTypeNV = ComponentTypeKHR; + + //=== VK_EXT_layer_settings === + + // wrapper class for enum VkLayerSettingTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerSettingTypeEXT.html + enum class LayerSettingTypeEXT + { + eBool32 = VK_LAYER_SETTING_TYPE_BOOL32_EXT, + eInt32 = VK_LAYER_SETTING_TYPE_INT32_EXT, + eInt64 = VK_LAYER_SETTING_TYPE_INT64_EXT, + eUint32 = VK_LAYER_SETTING_TYPE_UINT32_EXT, + eUint64 = VK_LAYER_SETTING_TYPE_UINT64_EXT, + eFloat32 = VK_LAYER_SETTING_TYPE_FLOAT32_EXT, + eFloat64 = VK_LAYER_SETTING_TYPE_FLOAT64_EXT, + eString = VK_LAYER_SETTING_TYPE_STRING_EXT + }; + + //================================= + //=== Layer Setting Type Traits === + //================================= + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Bool32; + }; + + template <> + struct CppType + { + using Type = int32_t; + }; + + template <> + struct CppType + { + using Type = int64_t; + }; + + template <> + struct CppType + { + using Type = uint32_t; + }; + + template <> + struct CppType + { + using Type = uint64_t; + }; + + template <> + struct CppType + { + using Type = float; + }; + + template <> + struct CppType + { + using Type = double; + }; + + template <> + struct CppType + { + using Type = char *; + }; + + template + bool isSameType( LayerSettingTypeEXT layerSettingType ) + { + switch ( layerSettingType ) + { + case LayerSettingTypeEXT::eBool32: return std::is_same::value; + case LayerSettingTypeEXT::eInt32: return std::is_same::value; + case LayerSettingTypeEXT::eInt64: return std::is_same::value; + case LayerSettingTypeEXT::eUint32: return std::is_same::value; + case LayerSettingTypeEXT::eUint64: return std::is_same::value; + case LayerSettingTypeEXT::eFloat32: return std::is_same::value; + case LayerSettingTypeEXT::eFloat64: return std::is_same::value; + case LayerSettingTypeEXT::eString: return std::is_same::value; + default: return false; + } + } + + //=== VK_NV_low_latency2 === + + // wrapper class for enum VkLatencyMarkerNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencyMarkerNV.html + enum class LatencyMarkerNV + { + eSimulationStart = VK_LATENCY_MARKER_SIMULATION_START_NV, + eSimulationEnd = VK_LATENCY_MARKER_SIMULATION_END_NV, + eRendersubmitStart = VK_LATENCY_MARKER_RENDERSUBMIT_START_NV, + eRendersubmitEnd = VK_LATENCY_MARKER_RENDERSUBMIT_END_NV, + ePresentStart = VK_LATENCY_MARKER_PRESENT_START_NV, + ePresentEnd = VK_LATENCY_MARKER_PRESENT_END_NV, + eInputSample = VK_LATENCY_MARKER_INPUT_SAMPLE_NV, + eTriggerFlash = VK_LATENCY_MARKER_TRIGGER_FLASH_NV, + eOutOfBandRendersubmitStart = VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV, + eOutOfBandRendersubmitEnd = VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV, + eOutOfBandPresentStart = VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV, + eOutOfBandPresentEnd = VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV + }; + + // wrapper class for enum VkOutOfBandQueueTypeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOutOfBandQueueTypeNV.html + enum class OutOfBandQueueTypeNV + { + eRender = VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV, + ePresent = VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV + }; + + //=== VK_KHR_cooperative_matrix === + + // wrapper class for enum VkScopeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkScopeKHR.html + enum class ScopeKHR + { + eDevice = VK_SCOPE_DEVICE_KHR, + eWorkgroup = VK_SCOPE_WORKGROUP_KHR, + eSubgroup = VK_SCOPE_SUBGROUP_KHR, + eQueueFamily = VK_SCOPE_QUEUE_FAMILY_KHR + }; + + using ScopeNV = ScopeKHR; + + //=== VK_KHR_video_encode_av1 === + + // wrapper class for enum VkVideoEncodeAV1PredictionModeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1PredictionModeKHR.html + enum class VideoEncodeAV1PredictionModeKHR + { + eIntraOnly = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_INTRA_ONLY_KHR, + eSingleReference = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_SINGLE_REFERENCE_KHR, + eUnidirectionalCompound = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_UNIDIRECTIONAL_COMPOUND_KHR, + eBidirectionalCompound = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_BIDIRECTIONAL_COMPOUND_KHR + }; + + // wrapper class for enum VkVideoEncodeAV1RateControlGroupKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1RateControlGroupKHR.html + enum class VideoEncodeAV1RateControlGroupKHR + { + eIntra = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_INTRA_KHR, + ePredictive = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_PREDICTIVE_KHR, + eBipredictive = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_BIPREDICTIVE_KHR + }; + + // wrapper class for enum VkVideoEncodeAV1CapabilityFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1CapabilityFlagBitsKHR.html + enum class VideoEncodeAV1CapabilityFlagBitsKHR : VkVideoEncodeAV1CapabilityFlagsKHR + { + ePerRateControlGroupMinMaxQIndex = VK_VIDEO_ENCODE_AV1_CAPABILITY_PER_RATE_CONTROL_GROUP_MIN_MAX_Q_INDEX_BIT_KHR, + eGenerateObuExtensionHeader = VK_VIDEO_ENCODE_AV1_CAPABILITY_GENERATE_OBU_EXTENSION_HEADER_BIT_KHR, + ePrimaryReferenceCdfOnly = VK_VIDEO_ENCODE_AV1_CAPABILITY_PRIMARY_REFERENCE_CDF_ONLY_BIT_KHR, + eFrameSizeOverride = VK_VIDEO_ENCODE_AV1_CAPABILITY_FRAME_SIZE_OVERRIDE_BIT_KHR, + eMotionVectorScaling = VK_VIDEO_ENCODE_AV1_CAPABILITY_MOTION_VECTOR_SCALING_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeAV1CapabilityFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1CapabilityFlagsKHR.html + using VideoEncodeAV1CapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeAV1CapabilityFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1CapabilityFlagsKHR allFlags = + VideoEncodeAV1CapabilityFlagBitsKHR::ePerRateControlGroupMinMaxQIndex | VideoEncodeAV1CapabilityFlagBitsKHR::eGenerateObuExtensionHeader | + VideoEncodeAV1CapabilityFlagBitsKHR::ePrimaryReferenceCdfOnly | VideoEncodeAV1CapabilityFlagBitsKHR::eFrameSizeOverride | + VideoEncodeAV1CapabilityFlagBitsKHR::eMotionVectorScaling; + }; + + // wrapper class for enum VkVideoEncodeAV1StdFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1StdFlagBitsKHR.html + enum class VideoEncodeAV1StdFlagBitsKHR : VkVideoEncodeAV1StdFlagsKHR + { + eUniformTileSpacingFlagSet = VK_VIDEO_ENCODE_AV1_STD_UNIFORM_TILE_SPACING_FLAG_SET_BIT_KHR, + eSkipModePresentUnset = VK_VIDEO_ENCODE_AV1_STD_SKIP_MODE_PRESENT_UNSET_BIT_KHR, + ePrimaryRefFrame = VK_VIDEO_ENCODE_AV1_STD_PRIMARY_REF_FRAME_BIT_KHR, + eDeltaQ = VK_VIDEO_ENCODE_AV1_STD_DELTA_Q_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeAV1StdFlagsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1StdFlagsKHR.html + using VideoEncodeAV1StdFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeAV1StdFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1StdFlagsKHR allFlags = + VideoEncodeAV1StdFlagBitsKHR::eUniformTileSpacingFlagSet | VideoEncodeAV1StdFlagBitsKHR::eSkipModePresentUnset | + VideoEncodeAV1StdFlagBitsKHR::ePrimaryRefFrame | VideoEncodeAV1StdFlagBitsKHR::eDeltaQ; + }; + + // wrapper class for enum VkVideoEncodeAV1SuperblockSizeFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1SuperblockSizeFlagBitsKHR.html + enum class VideoEncodeAV1SuperblockSizeFlagBitsKHR : VkVideoEncodeAV1SuperblockSizeFlagsKHR + { + e64 = VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_64_BIT_KHR, + e128 = VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_128_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeAV1SuperblockSizeFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1SuperblockSizeFlagsKHR.html + using VideoEncodeAV1SuperblockSizeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeAV1SuperblockSizeFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1SuperblockSizeFlagsKHR allFlags = + VideoEncodeAV1SuperblockSizeFlagBitsKHR::e64 | VideoEncodeAV1SuperblockSizeFlagBitsKHR::e128; + }; + + // wrapper class for enum VkVideoEncodeAV1RateControlFlagBitsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1RateControlFlagBitsKHR.html + enum class VideoEncodeAV1RateControlFlagBitsKHR : VkVideoEncodeAV1RateControlFlagsKHR + { + eRegularGop = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REGULAR_GOP_BIT_KHR, + eTemporalLayerPatternDyadic = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR, + eReferencePatternFlat = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR, + eReferencePatternDyadic = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR + }; + + // wrapper using for bitmask VkVideoEncodeAV1RateControlFlagsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1RateControlFlagsKHR.html + using VideoEncodeAV1RateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkVideoEncodeAV1RateControlFlagBitsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1RateControlFlagsKHR allFlags = + VideoEncodeAV1RateControlFlagBitsKHR::eRegularGop | VideoEncodeAV1RateControlFlagBitsKHR::eTemporalLayerPatternDyadic | + VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternFlat | VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternDyadic; + }; + + //=== VK_QCOM_image_processing2 === + + // wrapper class for enum VkBlockMatchWindowCompareModeQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlockMatchWindowCompareModeQCOM.html + enum class BlockMatchWindowCompareModeQCOM + { + eMin = VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM, + eMax = VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM + }; + + //=== VK_QCOM_filter_cubic_weights === + + // wrapper class for enum VkCubicFilterWeightsQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCubicFilterWeightsQCOM.html + enum class CubicFilterWeightsQCOM + { + eCatmullRom = VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM, + eZeroTangentCardinal = VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM, + eBSpline = VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM, + eMitchellNetravali = VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM + }; + + //=== VK_MSFT_layered_driver === + + // wrapper class for enum VkLayeredDriverUnderlyingApiMSFT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayeredDriverUnderlyingApiMSFT.html + enum class LayeredDriverUnderlyingApiMSFT + { + eNone = VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT, + eD3D12 = VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT + }; + + //=== VK_KHR_calibrated_timestamps === + + // wrapper class for enum VkTimeDomainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTimeDomainKHR.html + enum class TimeDomainKHR + { + eDevice = VK_TIME_DOMAIN_DEVICE_KHR, + eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR, + eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR, + eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR + }; + + using TimeDomainEXT = TimeDomainKHR; + + //=== VK_NV_display_stereo === + + // wrapper class for enum VkDisplaySurfaceStereoTypeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplaySurfaceStereoTypeNV.html + enum class DisplaySurfaceStereoTypeNV + { + eNone = VK_DISPLAY_SURFACE_STEREO_TYPE_NONE_NV, + eOnboardDin = VK_DISPLAY_SURFACE_STEREO_TYPE_ONBOARD_DIN_NV, + eHdmi3D = VK_DISPLAY_SURFACE_STEREO_TYPE_HDMI_3D_NV, + eInbandDisplayport = VK_DISPLAY_SURFACE_STEREO_TYPE_INBAND_DISPLAYPORT_NV + }; + + //=== VK_KHR_maintenance7 === + + // wrapper class for enum VkPhysicalDeviceLayeredApiKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiKHR.html + enum class PhysicalDeviceLayeredApiKHR + { + eVulkan = VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR, + eD3D12 = VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR, + eMetal = VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR, + eOpengl = VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR, + eOpengles = VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR + }; + + //=== VK_NV_cluster_acceleration_structure === + + // wrapper class for enum VkClusterAccelerationStructureClusterFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureClusterFlagBitsNV.html + enum class ClusterAccelerationStructureClusterFlagBitsNV : VkClusterAccelerationStructureClusterFlagsNV + { + eAllowDisableOpacityMicromaps = VK_CLUSTER_ACCELERATION_STRUCTURE_CLUSTER_ALLOW_DISABLE_OPACITY_MICROMAPS_NV + }; + + // wrapper using for bitmask VkClusterAccelerationStructureClusterFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureClusterFlagsNV.html + using ClusterAccelerationStructureClusterFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkClusterAccelerationStructureClusterFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ClusterAccelerationStructureClusterFlagsNV allFlags = + ClusterAccelerationStructureClusterFlagBitsNV::eAllowDisableOpacityMicromaps; + }; + + // wrapper class for enum VkClusterAccelerationStructureGeometryFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureGeometryFlagBitsNV.html + enum class ClusterAccelerationStructureGeometryFlagBitsNV : VkClusterAccelerationStructureGeometryFlagsNV + { + eCullDisable = VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_CULL_DISABLE_BIT_NV, + eNoDuplicateAnyhitInvocation = VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_NO_DUPLICATE_ANYHIT_INVOCATION_BIT_NV, + eOpaque = VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_OPAQUE_BIT_NV + }; + + // wrapper using for bitmask VkClusterAccelerationStructureGeometryFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureGeometryFlagsNV.html + using ClusterAccelerationStructureGeometryFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkClusterAccelerationStructureGeometryFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ClusterAccelerationStructureGeometryFlagsNV allFlags = + ClusterAccelerationStructureGeometryFlagBitsNV::eCullDisable | ClusterAccelerationStructureGeometryFlagBitsNV::eNoDuplicateAnyhitInvocation | + ClusterAccelerationStructureGeometryFlagBitsNV::eOpaque; + }; + + // wrapper class for enum VkClusterAccelerationStructureAddressResolutionFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureAddressResolutionFlagBitsNV.html + enum class ClusterAccelerationStructureAddressResolutionFlagBitsNV : VkClusterAccelerationStructureAddressResolutionFlagsNV + { + eIndirectedDstImplicitData = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_IMPLICIT_DATA_BIT_NV, + eIndirectedScratchData = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SCRATCH_DATA_BIT_NV, + eIndirectedDstAddressArray = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_ADDRESS_ARRAY_BIT_NV, + eIndirectedDstSizesArray = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_SIZES_ARRAY_BIT_NV, + eIndirectedSrcInfosArray = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_ARRAY_BIT_NV, + eIndirectedSrcInfosCount = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_COUNT_BIT_NV + }; + + // wrapper using for bitmask VkClusterAccelerationStructureAddressResolutionFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureAddressResolutionFlagsNV.html + using ClusterAccelerationStructureAddressResolutionFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkClusterAccelerationStructureAddressResolutionFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ClusterAccelerationStructureAddressResolutionFlagsNV allFlags = + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstImplicitData | + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedScratchData | + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstAddressArray | + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstSizesArray | + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosArray | + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosCount; + }; + + // wrapper class for enum VkClusterAccelerationStructureIndexFormatFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureIndexFormatFlagBitsNV.html + enum class ClusterAccelerationStructureIndexFormatFlagBitsNV : VkClusterAccelerationStructureIndexFormatFlagsNV + { + e8 = VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_8BIT_NV, + e16 = VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_16BIT_NV, + e32 = VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_32BIT_NV + }; + + // wrapper using for bitmask VkClusterAccelerationStructureIndexFormatFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureIndexFormatFlagsNV.html + using ClusterAccelerationStructureIndexFormatFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkClusterAccelerationStructureIndexFormatFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ClusterAccelerationStructureIndexFormatFlagsNV allFlags = ClusterAccelerationStructureIndexFormatFlagBitsNV::e8 | + ClusterAccelerationStructureIndexFormatFlagBitsNV::e16 | + ClusterAccelerationStructureIndexFormatFlagBitsNV::e32; + }; + + // wrapper class for enum VkClusterAccelerationStructureTypeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureTypeNV.html + enum class ClusterAccelerationStructureTypeNV + { + eClustersBottomLevel = VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_CLUSTERS_BOTTOM_LEVEL_NV, + eTriangleCluster = VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_NV, + eTriangleClusterTemplate = VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_TEMPLATE_NV + }; + + // wrapper class for enum VkClusterAccelerationStructureOpTypeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureOpTypeNV.html + enum class ClusterAccelerationStructureOpTypeNV + { + eMoveObjects = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_MOVE_OBJECTS_NV, + eBuildClustersBottomLevel = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_CLUSTERS_BOTTOM_LEVEL_NV, + eBuildTriangleCluster = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_NV, + eBuildTriangleClusterTemplate = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_TEMPLATE_NV, + eInstantiateTriangleCluster = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_INSTANTIATE_TRIANGLE_CLUSTER_NV + }; + + // wrapper class for enum VkClusterAccelerationStructureOpModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureOpModeNV.html + enum class ClusterAccelerationStructureOpModeNV + { + eImplicitDestinations = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_IMPLICIT_DESTINATIONS_NV, + eExplicitDestinations = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_EXPLICIT_DESTINATIONS_NV, + eComputeSizes = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_COMPUTE_SIZES_NV + }; + + //=== VK_NV_partitioned_acceleration_structure === + + // wrapper class for enum VkPartitionedAccelerationStructureOpTypeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureOpTypeNV.html + enum class PartitionedAccelerationStructureOpTypeNV + { + eWriteInstance = VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_INSTANCE_NV, + eUpdateInstance = VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_UPDATE_INSTANCE_NV, + eWritePartitionTranslation = VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_PARTITION_TRANSLATION_NV + }; + + // wrapper class for enum VkPartitionedAccelerationStructureInstanceFlagBitsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureInstanceFlagBitsNV.html + enum class PartitionedAccelerationStructureInstanceFlagBitsNV : VkPartitionedAccelerationStructureInstanceFlagsNV + { + eFlagTriangleFacingCullDisable = VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FACING_CULL_DISABLE_BIT_NV, + eFlagTriangleFlipFacing = VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FLIP_FACING_BIT_NV, + eFlagForceOpaque = VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_OPAQUE_BIT_NV, + eFlagForceNoOpaque = VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_NO_OPAQUE_BIT_NV, + eFlagEnableExplicitBoundingBox = VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_ENABLE_EXPLICIT_BOUNDING_BOX_NV + }; + + // wrapper using for bitmask VkPartitionedAccelerationStructureInstanceFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureInstanceFlagsNV.html + using PartitionedAccelerationStructureInstanceFlagsNV = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkPartitionedAccelerationStructureInstanceFlagBitsNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PartitionedAccelerationStructureInstanceFlagsNV allFlags = + PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFacingCullDisable | + PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFlipFacing | PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceOpaque | + PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceNoOpaque | + PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagEnableExplicitBoundingBox; + }; + + //=== VK_EXT_device_generated_commands === + + // wrapper class for enum VkIndirectCommandsTokenTypeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsTokenTypeEXT.html + enum class IndirectCommandsTokenTypeEXT + { + eExecutionSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT, + eSequenceIndex = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT, + eDrawIndexedCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT, + eDrawCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT, + eDrawMeshTasksNV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT, + eDrawMeshTasksCountNV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT, + eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT, + eDrawMeshTasksCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT, + eTraceRays2 = VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT + }; + + // wrapper class for enum VkIndirectExecutionSetInfoTypeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetInfoTypeEXT.html + enum class IndirectExecutionSetInfoTypeEXT + { + ePipelines = VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT, + eShaderObjects = VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT + }; + + // wrapper class for enum VkIndirectCommandsLayoutUsageFlagBitsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutUsageFlagBitsEXT.html + enum class IndirectCommandsLayoutUsageFlagBitsEXT : VkIndirectCommandsLayoutUsageFlagsEXT + { + eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT, + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT + }; + + // wrapper using for bitmask VkIndirectCommandsLayoutUsageFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutUsageFlagsEXT.html + using IndirectCommandsLayoutUsageFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkIndirectCommandsLayoutUsageFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsLayoutUsageFlagsEXT allFlags = + IndirectCommandsLayoutUsageFlagBitsEXT::eExplicitPreprocess | IndirectCommandsLayoutUsageFlagBitsEXT::eUnorderedSequences; + }; + + // wrapper class for enum VkIndirectCommandsInputModeFlagBitsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsInputModeFlagBitsEXT.html + enum class IndirectCommandsInputModeFlagBitsEXT : VkIndirectCommandsInputModeFlagsEXT + { + eVulkanIndexBuffer = VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT, + eDxgiIndexBuffer = VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT + }; + + // wrapper using for bitmask VkIndirectCommandsInputModeFlagsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsInputModeFlagsEXT.html + using IndirectCommandsInputModeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkIndirectCommandsInputModeFlagBitsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsInputModeFlagsEXT allFlags = + IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer | IndirectCommandsInputModeFlagBitsEXT::eDxgiIndexBuffer; + }; + + //=== VK_KHR_maintenance8 === + + // wrapper class for enum VkAccessFlagBits3KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccessFlagBits3KHR.html + enum class AccessFlagBits3KHR : VkAccessFlags3KHR + { + eNone = VK_ACCESS_3_NONE_KHR + }; + + // wrapper using for bitmask VkAccessFlags3KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccessFlags3KHR.html + using AccessFlags3KHR = Flags; + + template <> + struct FlagTraits + { + using WrappedType = VkAccessFlagBits3KHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags3KHR allFlags = AccessFlagBits3KHR::eNone; + }; + + //=== VK_EXT_depth_clamp_control === + + // wrapper class for enum VkDepthClampModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthClampModeEXT.html + enum class DepthClampModeEXT + { + eViewportRange = VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT, + eUserDefinedRange = VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT + }; + + //=========================================================== + //=== Mapping from ObjectType to DebugReportObjectTypeEXT === + //=========================================================== + + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType ) + { + switch ( objectType ) + { + //=== VK_VERSION_1_0 === + case VULKAN_HPP_NAMESPACE::ObjectType::eInstance: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + case VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + case VULKAN_HPP_NAMESPACE::ObjectType::eDevice: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + case VULKAN_HPP_NAMESPACE::ObjectType::eQueue: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + case VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + case VULKAN_HPP_NAMESPACE::ObjectType::eFence: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + case VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + case VULKAN_HPP_NAMESPACE::ObjectType::eEvent: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + case VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + case VULKAN_HPP_NAMESPACE::ObjectType::eBuffer: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + case VULKAN_HPP_NAMESPACE::ObjectType::eBufferView: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + case VULKAN_HPP_NAMESPACE::ObjectType::eImage: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + case VULKAN_HPP_NAMESPACE::ObjectType::eImageView: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + case VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + case VULKAN_HPP_NAMESPACE::ObjectType::ePipeline: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + case VULKAN_HPP_NAMESPACE::ObjectType::eSampler: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + case VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + case VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + case VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + case VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + //=== VK_VERSION_1_1 === + case VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + //=== VK_VERSION_1_3 === + case VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_surface === + case VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + //=== VK_KHR_swapchain === + case VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + //=== VK_KHR_display === + case VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + case VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + //=== VK_EXT_debug_report === + case VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + case VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + case VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_NVX_binary_import === + case VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + case VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + //=== VK_EXT_debug_utils === + case VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_acceleration_structure === + case VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + case VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + //=== VK_NV_ray_tracing === + case VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + //=== VK_INTEL_performance_query === + case VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_deferred_host_operations === + case VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_NV_device_generated_commands === + case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + case VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV; + case VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + case VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + case VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_NV_optical_flow === + case VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_EXT_shader_object === + case VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_pipeline_binary === + case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_NV_external_compute_queue === + case VULKAN_HPP_NAMESPACE::ObjectType::eExternalComputeQueueNV: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_EXT_device_generated_commands === + case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + default: VULKAN_HPP_ASSERT( false && "unknown ObjectType" ); return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + } + } + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/include/vulkan/vulkan_extension_inspection.hpp b/include/vulkan/vulkan_extension_inspection.hpp new file mode 100644 index 00000000..2f901e99 --- /dev/null +++ b/include/vulkan/vulkan_extension_inspection.hpp @@ -0,0 +1,3616 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_EXTENSION_INSPECTION_HPP +#define VULKAN_EXTENSION_INSPECTION_HPP + +#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) +import VULKAN_HPP_STD_MODULE; +#else +# include +# include +# include +# include +# include +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + //====================================== + //=== Extension inspection functions === + //====================================== + + std::set const & getDeviceExtensions(); + std::set const & getInstanceExtensions(); + std::map const & getDeprecatedExtensions(); + std::map>> const & getExtensionDepends( std::string const & extension ); + std::pair> const &> getExtensionDepends( std::string const & version, std::string const & extension ); + std::map const & getObsoletedExtensions(); + std::map const & getPromotedExtensions(); + VULKAN_HPP_CONSTEXPR_20 std::string getExtensionDeprecatedBy( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 std::string getExtensionObsoletedBy( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 std::string getExtensionPromotedTo( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isDeprecatedExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isObsoletedExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isPromotedExtension( std::string const & extension ); + + //===================================================== + //=== Extension inspection function implementations === + //===================================================== + + VULKAN_HPP_INLINE std::map const & getDeprecatedExtensions() + { + static const std::map deprecatedExtensions = { + { "VK_EXT_debug_report", "VK_EXT_debug_utils" }, + { "VK_NV_glsl_shader", "" }, + { "VK_NV_dedicated_allocation", "VK_KHR_dedicated_allocation" }, + { "VK_AMD_gpu_shader_half_float", "VK_KHR_shader_float16_int8" }, + { "VK_IMG_format_pvrtc", "" }, + { "VK_NV_external_memory_capabilities", "VK_KHR_external_memory_capabilities" }, + { "VK_NV_external_memory", "VK_KHR_external_memory" }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_external_memory_win32", "VK_KHR_external_memory_win32" }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_EXT_validation_flags", "VK_EXT_layer_settings" }, + { "VK_EXT_shader_subgroup_ballot", "VK_VERSION_1_2" }, + { "VK_EXT_shader_subgroup_vote", "VK_VERSION_1_1" }, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + { "VK_MVK_ios_surface", "VK_EXT_metal_surface" }, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + { "VK_MVK_macos_surface", "VK_EXT_metal_surface" }, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + { "VK_AMD_gpu_shader_int16", "VK_KHR_shader_float16_int8" }, + { "VK_NV_ray_tracing", "VK_KHR_ray_tracing_pipeline" }, + { "VK_EXT_buffer_device_address", "VK_KHR_buffer_device_address" }, + { "VK_EXT_validation_features", "VK_EXT_layer_settings" }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + { "VK_NV_displacement_micromap", "VK_NV_cluster_acceleration_structure" } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + return deprecatedExtensions; + } + + VULKAN_HPP_INLINE std::set const & getDeviceExtensions() + { + static const std::set deviceExtensions = { + "VK_KHR_swapchain", + "VK_KHR_display_swapchain", + "VK_NV_glsl_shader", + "VK_EXT_depth_range_unrestricted", + "VK_KHR_sampler_mirror_clamp_to_edge", + "VK_IMG_filter_cubic", + "VK_AMD_rasterization_order", + "VK_AMD_shader_trinary_minmax", + "VK_AMD_shader_explicit_vertex_parameter", + "VK_EXT_debug_marker", + "VK_KHR_video_queue", + "VK_KHR_video_decode_queue", + "VK_AMD_gcn_shader", + "VK_NV_dedicated_allocation", + "VK_EXT_transform_feedback", + "VK_NVX_binary_import", + "VK_NVX_image_view_handle", + "VK_AMD_draw_indirect_count", + "VK_AMD_negative_viewport_height", + "VK_AMD_gpu_shader_half_float", + "VK_AMD_shader_ballot", + "VK_KHR_video_encode_h264", + "VK_KHR_video_encode_h265", + "VK_KHR_video_decode_h264", + "VK_AMD_texture_gather_bias_lod", + "VK_AMD_shader_info", + "VK_KHR_dynamic_rendering", + "VK_AMD_shader_image_load_store_lod", + "VK_NV_corner_sampled_image", + "VK_KHR_multiview", + "VK_IMG_format_pvrtc", + "VK_NV_external_memory", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_NV_external_memory_win32", + "VK_NV_win32_keyed_mutex", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_device_group", + "VK_KHR_shader_draw_parameters", + "VK_EXT_shader_subgroup_ballot", + "VK_EXT_shader_subgroup_vote", + "VK_EXT_texture_compression_astc_hdr", + "VK_EXT_astc_decode_mode", + "VK_EXT_pipeline_robustness", + "VK_KHR_maintenance1", + "VK_KHR_external_memory", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_external_memory_win32", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_memory_fd", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_win32_keyed_mutex", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_semaphore", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_external_semaphore_win32", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_semaphore_fd", + "VK_KHR_push_descriptor", + "VK_EXT_conditional_rendering", + "VK_KHR_shader_float16_int8", + "VK_KHR_16bit_storage", + "VK_KHR_incremental_present", + "VK_KHR_descriptor_update_template", + "VK_NV_clip_space_w_scaling", + "VK_EXT_display_control", + "VK_GOOGLE_display_timing", + "VK_NV_sample_mask_override_coverage", + "VK_NV_geometry_shader_passthrough", + "VK_NV_viewport_array2", + "VK_NVX_multiview_per_view_attributes", + "VK_NV_viewport_swizzle", + "VK_EXT_discard_rectangles", + "VK_EXT_conservative_rasterization", + "VK_EXT_depth_clip_enable", + "VK_EXT_hdr_metadata", + "VK_KHR_imageless_framebuffer", + "VK_KHR_create_renderpass2", + "VK_IMG_relaxed_line_rasterization", + "VK_KHR_shared_presentable_image", + "VK_KHR_external_fence", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_external_fence_win32", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_fence_fd", + "VK_KHR_performance_query", + "VK_KHR_maintenance2", + "VK_KHR_variable_pointers", + "VK_EXT_external_memory_dma_buf", + "VK_EXT_queue_family_foreign", + "VK_KHR_dedicated_allocation", +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + "VK_ANDROID_external_memory_android_hardware_buffer", +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + "VK_EXT_sampler_filter_minmax", + "VK_KHR_storage_buffer_storage_class", + "VK_AMD_gpu_shader_int16", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_AMDX_shader_enqueue", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_AMD_mixed_attachment_samples", + "VK_AMD_shader_fragment_mask", + "VK_EXT_inline_uniform_block", + "VK_EXT_shader_stencil_export", + "VK_KHR_shader_bfloat16", + "VK_EXT_sample_locations", + "VK_KHR_relaxed_block_layout", + "VK_KHR_get_memory_requirements2", + "VK_KHR_image_format_list", + "VK_EXT_blend_operation_advanced", + "VK_NV_fragment_coverage_to_color", + "VK_KHR_acceleration_structure", + "VK_KHR_ray_tracing_pipeline", + "VK_KHR_ray_query", + "VK_NV_framebuffer_mixed_samples", + "VK_NV_fill_rectangle", + "VK_NV_shader_sm_builtins", + "VK_EXT_post_depth_coverage", + "VK_KHR_sampler_ycbcr_conversion", + "VK_KHR_bind_memory2", + "VK_EXT_image_drm_format_modifier", + "VK_EXT_validation_cache", + "VK_EXT_descriptor_indexing", + "VK_EXT_shader_viewport_index_layer", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_KHR_portability_subset", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_NV_shading_rate_image", + "VK_NV_ray_tracing", + "VK_NV_representative_fragment_test", + "VK_KHR_maintenance3", + "VK_KHR_draw_indirect_count", + "VK_EXT_filter_cubic", + "VK_QCOM_render_pass_shader_resolve", + "VK_EXT_global_priority", + "VK_KHR_shader_subgroup_extended_types", + "VK_KHR_8bit_storage", + "VK_EXT_external_memory_host", + "VK_AMD_buffer_marker", + "VK_KHR_shader_atomic_int64", + "VK_KHR_shader_clock", + "VK_AMD_pipeline_compiler_control", + "VK_EXT_calibrated_timestamps", + "VK_AMD_shader_core_properties", + "VK_KHR_video_decode_h265", + "VK_KHR_global_priority", + "VK_AMD_memory_overallocation_behavior", + "VK_EXT_vertex_attribute_divisor", +#if defined( VK_USE_PLATFORM_GGP ) + "VK_GGP_frame_token", +#endif /*VK_USE_PLATFORM_GGP*/ + "VK_EXT_pipeline_creation_feedback", + "VK_KHR_driver_properties", + "VK_KHR_shader_float_controls", + "VK_NV_shader_subgroup_partitioned", + "VK_KHR_depth_stencil_resolve", + "VK_KHR_swapchain_mutable_format", + "VK_NV_compute_shader_derivatives", + "VK_NV_mesh_shader", + "VK_NV_fragment_shader_barycentric", + "VK_NV_shader_image_footprint", + "VK_NV_scissor_exclusive", + "VK_NV_device_diagnostic_checkpoints", + "VK_KHR_timeline_semaphore", + "VK_INTEL_shader_integer_functions2", + "VK_INTEL_performance_query", + "VK_KHR_vulkan_memory_model", + "VK_EXT_pci_bus_info", + "VK_AMD_display_native_hdr", + "VK_KHR_shader_terminate_invocation", + "VK_EXT_fragment_density_map", + "VK_EXT_scalar_block_layout", + "VK_GOOGLE_hlsl_functionality1", + "VK_GOOGLE_decorate_string", + "VK_EXT_subgroup_size_control", + "VK_KHR_fragment_shading_rate", + "VK_AMD_shader_core_properties2", + "VK_AMD_device_coherent_memory", + "VK_KHR_dynamic_rendering_local_read", + "VK_EXT_shader_image_atomic_int64", + "VK_KHR_shader_quad_control", + "VK_KHR_spirv_1_4", + "VK_EXT_memory_budget", + "VK_EXT_memory_priority", + "VK_NV_dedicated_allocation_image_aliasing", + "VK_KHR_separate_depth_stencil_layouts", + "VK_EXT_buffer_device_address", + "VK_EXT_tooling_info", + "VK_EXT_separate_stencil_usage", + "VK_KHR_present_wait", + "VK_NV_cooperative_matrix", + "VK_NV_coverage_reduction_mode", + "VK_EXT_fragment_shader_interlock", + "VK_EXT_ycbcr_image_arrays", + "VK_KHR_uniform_buffer_standard_layout", + "VK_EXT_provoking_vertex", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_EXT_full_screen_exclusive", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_buffer_device_address", + "VK_EXT_line_rasterization", + "VK_EXT_shader_atomic_float", + "VK_EXT_host_query_reset", + "VK_EXT_index_type_uint8", + "VK_EXT_extended_dynamic_state", + "VK_KHR_deferred_host_operations", + "VK_KHR_pipeline_executable_properties", + "VK_EXT_host_image_copy", + "VK_KHR_map_memory2", + "VK_EXT_map_memory_placed", + "VK_EXT_shader_atomic_float2", + "VK_EXT_swapchain_maintenance1", + "VK_EXT_shader_demote_to_helper_invocation", + "VK_NV_device_generated_commands", + "VK_NV_inherited_viewport_scissor", + "VK_KHR_shader_integer_dot_product", + "VK_EXT_texel_buffer_alignment", + "VK_QCOM_render_pass_transform", + "VK_EXT_depth_bias_control", + "VK_EXT_device_memory_report", + "VK_EXT_robustness2", + "VK_EXT_custom_border_color", + "VK_GOOGLE_user_type", + "VK_KHR_pipeline_library", + "VK_NV_present_barrier", + "VK_KHR_shader_non_semantic_info", + "VK_KHR_present_id", + "VK_EXT_private_data", + "VK_EXT_pipeline_creation_cache_control", + "VK_KHR_video_encode_queue", + "VK_NV_device_diagnostics_config", + "VK_QCOM_render_pass_store_ops", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_NV_cuda_kernel_launch", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_QCOM_tile_shading", + "VK_NV_low_latency", +#if defined( VK_USE_PLATFORM_METAL_EXT ) + "VK_EXT_metal_objects", +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + "VK_KHR_synchronization2", + "VK_EXT_descriptor_buffer", + "VK_EXT_graphics_pipeline_library", + "VK_AMD_shader_early_and_late_fragment_tests", + "VK_KHR_fragment_shader_barycentric", + "VK_KHR_shader_subgroup_uniform_control_flow", + "VK_KHR_zero_initialize_workgroup_memory", + "VK_NV_fragment_shading_rate_enums", + "VK_NV_ray_tracing_motion_blur", + "VK_EXT_mesh_shader", + "VK_EXT_ycbcr_2plane_444_formats", + "VK_EXT_fragment_density_map2", + "VK_QCOM_rotated_copy_commands", + "VK_EXT_image_robustness", + "VK_KHR_workgroup_memory_explicit_layout", + "VK_KHR_copy_commands2", + "VK_EXT_image_compression_control", + "VK_EXT_attachment_feedback_loop_layout", + "VK_EXT_4444_formats", + "VK_EXT_device_fault", + "VK_ARM_rasterization_order_attachment_access", + "VK_EXT_rgba10x6_formats", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_NV_acquire_winrt_display", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_VALVE_mutable_descriptor_type", + "VK_EXT_vertex_input_dynamic_state", + "VK_EXT_physical_device_drm", + "VK_EXT_device_address_binding_report", + "VK_EXT_depth_clip_control", + "VK_EXT_primitive_topology_list_restart", + "VK_KHR_format_feature_flags2", + "VK_EXT_present_mode_fifo_latest_ready", +#if defined( VK_USE_PLATFORM_FUCHSIA ) + "VK_FUCHSIA_external_memory", + "VK_FUCHSIA_external_semaphore", + "VK_FUCHSIA_buffer_collection", +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + "VK_HUAWEI_subpass_shading", + "VK_HUAWEI_invocation_mask", + "VK_NV_external_memory_rdma", + "VK_EXT_pipeline_properties", + "VK_EXT_frame_boundary", + "VK_EXT_multisampled_render_to_single_sampled", + "VK_EXT_extended_dynamic_state2", + "VK_EXT_color_write_enable", + "VK_EXT_primitives_generated_query", + "VK_KHR_ray_tracing_maintenance1", + "VK_EXT_global_priority_query", + "VK_EXT_image_view_min_lod", + "VK_EXT_multi_draw", + "VK_EXT_image_2d_view_of_3d", + "VK_EXT_shader_tile_image", + "VK_EXT_opacity_micromap", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_NV_displacement_micromap", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_EXT_load_store_op_none", + "VK_HUAWEI_cluster_culling_shader", + "VK_EXT_border_color_swizzle", + "VK_EXT_pageable_device_local_memory", + "VK_KHR_maintenance4", + "VK_ARM_shader_core_properties", + "VK_KHR_shader_subgroup_rotate", + "VK_ARM_scheduling_controls", + "VK_EXT_image_sliced_view_of_3d", + "VK_VALVE_descriptor_set_host_mapping", + "VK_EXT_depth_clamp_zero_one", + "VK_EXT_non_seamless_cube_map", + "VK_ARM_render_pass_striped", + "VK_QCOM_fragment_density_map_offset", + "VK_NV_copy_memory_indirect", + "VK_NV_memory_decompression", + "VK_NV_device_generated_commands_compute", + "VK_NV_ray_tracing_linear_swept_spheres", + "VK_NV_linear_color_attachment", + "VK_KHR_shader_maximal_reconvergence", + "VK_EXT_image_compression_control_swapchain", + "VK_QCOM_image_processing", + "VK_EXT_nested_command_buffer", + "VK_EXT_external_memory_acquire_unmodified", + "VK_EXT_extended_dynamic_state3", + "VK_EXT_subpass_merge_feedback", + "VK_EXT_shader_module_identifier", + "VK_EXT_rasterization_order_attachment_access", + "VK_NV_optical_flow", + "VK_EXT_legacy_dithering", + "VK_EXT_pipeline_protected_access", +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + "VK_ANDROID_external_format_resolve", +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + "VK_KHR_maintenance5", + "VK_AMD_anti_lag", + "VK_KHR_ray_tracing_position_fetch", + "VK_EXT_shader_object", + "VK_KHR_pipeline_binary", + "VK_QCOM_tile_properties", + "VK_SEC_amigo_profiling", + "VK_QCOM_multiview_per_view_viewports", + "VK_NV_ray_tracing_invocation_reorder", + "VK_NV_cooperative_vector", + "VK_NV_extended_sparse_address_space", + "VK_EXT_mutable_descriptor_type", + "VK_EXT_legacy_vertex_attributes", + "VK_ARM_shader_core_builtins", + "VK_EXT_pipeline_library_group_handles", + "VK_EXT_dynamic_rendering_unused_attachments", + "VK_NV_low_latency2", + "VK_KHR_cooperative_matrix", + "VK_QCOM_multiview_per_view_render_areas", + "VK_KHR_compute_shader_derivatives", + "VK_KHR_video_decode_av1", + "VK_KHR_video_encode_av1", + "VK_KHR_video_maintenance1", + "VK_NV_per_stage_descriptor_set", + "VK_QCOM_image_processing2", + "VK_QCOM_filter_cubic_weights", + "VK_QCOM_ycbcr_degamma", + "VK_QCOM_filter_cubic_clamp", + "VK_EXT_attachment_feedback_loop_dynamic_state", + "VK_KHR_vertex_attribute_divisor", + "VK_KHR_load_store_op_none", + "VK_KHR_shader_float_controls2", +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + "VK_QNX_external_memory_screen_buffer", +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + "VK_MSFT_layered_driver", + "VK_KHR_index_type_uint8", + "VK_KHR_line_rasterization", + "VK_KHR_calibrated_timestamps", + "VK_KHR_shader_expect_assume", + "VK_KHR_maintenance6", + "VK_NV_descriptor_pool_overallocation", + "VK_QCOM_tile_memory_heap", + "VK_KHR_video_encode_quantization_map", + "VK_NV_raw_access_chains", + "VK_NV_external_compute_queue", + "VK_KHR_shader_relaxed_extended_instruction", + "VK_NV_command_buffer_inheritance", + "VK_KHR_maintenance7", + "VK_NV_shader_atomic_float16_vector", + "VK_EXT_shader_replicated_composites", + "VK_NV_ray_tracing_validation", + "VK_NV_cluster_acceleration_structure", + "VK_NV_partitioned_acceleration_structure", + "VK_EXT_device_generated_commands", + "VK_KHR_maintenance8", + "VK_MESA_image_alignment_control", + "VK_EXT_depth_clamp_control", + "VK_KHR_video_maintenance2", + "VK_HUAWEI_hdr_vivid", + "VK_NV_cooperative_matrix2", + "VK_ARM_pipeline_opacity_micromap", +#if defined( VK_USE_PLATFORM_METAL_EXT ) + "VK_EXT_external_memory_metal", +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + "VK_KHR_depth_clamp_zero_one", + "VK_EXT_vertex_attribute_robustness", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_NV_present_metering", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_EXT_fragment_density_map_offset" + }; + return deviceExtensions; + } + + VULKAN_HPP_INLINE std::set const & getInstanceExtensions() + { + static const std::set instanceExtensions = { + "VK_KHR_surface", + "VK_KHR_display", +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + "VK_KHR_xlib_surface", +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + "VK_KHR_xcb_surface", +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + "VK_KHR_wayland_surface", +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + "VK_KHR_android_surface", +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_win32_surface", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_EXT_debug_report", +#if defined( VK_USE_PLATFORM_GGP ) + "VK_GGP_stream_descriptor_surface", +#endif /*VK_USE_PLATFORM_GGP*/ + "VK_NV_external_memory_capabilities", + "VK_KHR_get_physical_device_properties2", + "VK_EXT_validation_flags", +#if defined( VK_USE_PLATFORM_VI_NN ) + "VK_NN_vi_surface", +#endif /*VK_USE_PLATFORM_VI_NN*/ + "VK_KHR_device_group_creation", + "VK_KHR_external_memory_capabilities", + "VK_KHR_external_semaphore_capabilities", + "VK_EXT_direct_mode_display", +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + "VK_EXT_acquire_xlib_display", +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + "VK_EXT_display_surface_counter", + "VK_EXT_swapchain_colorspace", + "VK_KHR_external_fence_capabilities", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_get_display_properties2", +#if defined( VK_USE_PLATFORM_IOS_MVK ) + "VK_MVK_ios_surface", +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + "VK_MVK_macos_surface", +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + "VK_EXT_debug_utils", +#if defined( VK_USE_PLATFORM_FUCHSIA ) + "VK_FUCHSIA_imagepipe_surface", +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + "VK_EXT_metal_surface", +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + "VK_KHR_surface_protected_capabilities", + "VK_EXT_validation_features", + "VK_EXT_headless_surface", + "VK_EXT_surface_maintenance1", + "VK_EXT_acquire_drm_display", +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + "VK_EXT_directfb_surface", +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + "VK_QNX_screen_surface", +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + "VK_KHR_portability_enumeration", + "VK_GOOGLE_surfaceless_query", + "VK_LUNARG_direct_driver_loading", + "VK_EXT_layer_settings", + "VK_NV_display_stereo" + }; + return instanceExtensions; + } + + VULKAN_HPP_INLINE std::map>> const & getExtensionDepends( std::string const & extension ) + { + static const std::map>> noDependencies; + static const std::map>>> dependencies = { + { "VK_KHR_swapchain", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_display", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_display_swapchain", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + "VK_KHR_swapchain", + } } } } }, +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + { "VK_KHR_xlib_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + { "VK_KHR_xcb_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + { "VK_KHR_wayland_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + { "VK_KHR_android_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_win32_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_EXT_debug_marker", + { { "VK_VERSION_1_0", + { { + "VK_EXT_debug_report", + } } } } }, + { "VK_KHR_video_queue", + { { "VK_VERSION_1_1", + { { + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_KHR_video_decode_queue", + { { "VK_VERSION_1_0", + { { + "VK_KHR_synchronization2", + "VK_KHR_video_queue", + } } }, + { "VK_VERSION_1_3", + { { + "VK_KHR_video_queue", + } } } } }, + { "VK_EXT_transform_feedback", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_video_encode_h264", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + } } } } }, + { "VK_KHR_video_encode_h265", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + } } } } }, + { "VK_KHR_video_decode_h264", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_decode_queue", + } } } } }, + { "VK_AMD_texture_gather_bias_lod", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_dynamic_rendering", + { { "VK_VERSION_1_0", + { { + "VK_KHR_depth_stencil_resolve", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_depth_stencil_resolve", + } } }, + { "VK_VERSION_1_2", { {} } } } }, +#if defined( VK_USE_PLATFORM_GGP ) + { "VK_GGP_stream_descriptor_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_GGP*/ + { "VK_NV_corner_sampled_image", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_multiview", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_external_memory", + { { "VK_VERSION_1_0", + { { + "VK_NV_external_memory_capabilities", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_external_memory_win32", + { { "VK_VERSION_1_0", + { { + "VK_NV_external_memory", + } } } } }, + { "VK_NV_win32_keyed_mutex", + { { "VK_VERSION_1_0", + { { + "VK_NV_external_memory_win32", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_device_group", + { { "VK_VERSION_1_0", + { { + "VK_KHR_device_group_creation", + } } } } }, +#if defined( VK_USE_PLATFORM_VI_NN ) + { "VK_NN_vi_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_VI_NN*/ + { "VK_EXT_texture_compression_astc_hdr", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_astc_decode_mode", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_robustness", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_external_memory_capabilities", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_external_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_capabilities", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_external_memory_win32", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_memory_fd", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_win32_keyed_mutex", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_win32", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_semaphore_capabilities", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_external_semaphore", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore_capabilities", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_external_semaphore_win32", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_semaphore_fd", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_push_descriptor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_conditional_rendering", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_float16_int8", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_16bit_storage", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_storage_buffer_storage_class", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_incremental_present", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_EXT_direct_mode_display", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + } } } } }, +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + { "VK_EXT_acquire_xlib_display", + { { "VK_VERSION_1_0", + { { + "VK_EXT_direct_mode_display", + } } } } }, +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + { "VK_EXT_display_surface_counter", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + } } } } }, + { "VK_EXT_display_control", + { { "VK_VERSION_1_0", + { { + "VK_EXT_display_surface_counter", + "VK_KHR_swapchain", + } } } } }, + { "VK_GOOGLE_display_timing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_NVX_multiview_per_view_attributes", + { { "VK_VERSION_1_0", + { { + "VK_KHR_multiview", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_discard_rectangles", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_conservative_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_depth_clip_enable", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_swapchain_colorspace", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_EXT_hdr_metadata", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_KHR_imageless_framebuffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_image_format_list", + "VK_KHR_maintenance2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_image_format_list", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_KHR_create_renderpass2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance2", + "VK_KHR_multiview", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_IMG_relaxed_line_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shared_presentable_image", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_swapchain", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_get_surface_capabilities2", + "VK_KHR_swapchain", + } } } } }, + { "VK_KHR_external_fence_capabilities", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_external_fence", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_fence_capabilities", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_external_fence_win32", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_fence", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_fence_fd", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_fence", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_performance_query", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_get_surface_capabilities2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_variable_pointers", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_storage_buffer_storage_class", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_get_display_properties2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + } } } } }, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + { "VK_MVK_ios_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + { "VK_MVK_macos_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + { "VK_EXT_external_memory_dma_buf", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_fd", + } } } } }, + { "VK_EXT_queue_family_foreign", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_dedicated_allocation", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_memory_requirements2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + { "VK_ANDROID_external_memory_android_hardware_buffer", + { { "VK_VERSION_1_0", + { { + "VK_EXT_queue_family_foreign", + "VK_KHR_dedicated_allocation", + "VK_KHR_external_memory", + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_queue_family_foreign", + } } } } }, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + { "VK_EXT_sampler_filter_minmax", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + { "VK_AMDX_shader_enqueue", + { { "VK_VERSION_1_0", + { { + "VK_EXT_extended_dynamic_state", + "VK_KHR_maintenance5", + "VK_KHR_pipeline_library", + "VK_KHR_spirv_1_4", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", + { { + "VK_KHR_maintenance5", + "VK_KHR_pipeline_library", + } } } } }, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + { "VK_EXT_inline_uniform_block", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_maintenance1", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_bfloat16", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_sample_locations", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_blend_operation_advanced", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_acceleration_structure", + { { "VK_VERSION_1_1", + { { + "VK_EXT_descriptor_indexing", + "VK_KHR_buffer_device_address", + "VK_KHR_deferred_host_operations", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_deferred_host_operations", + } } } } }, + { "VK_KHR_ray_tracing_pipeline", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + "VK_KHR_spirv_1_4", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_KHR_ray_query", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + "VK_KHR_spirv_1_4", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_NV_shader_sm_builtins", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_sampler_ycbcr_conversion", + { { "VK_VERSION_1_0", + { { + "VK_KHR_bind_memory2", + "VK_KHR_get_memory_requirements2", + "VK_KHR_get_physical_device_properties2", + "VK_KHR_maintenance1", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_image_drm_format_modifier", + { { "VK_VERSION_1_0", + { { + "VK_KHR_bind_memory2", + "VK_KHR_get_physical_device_properties2", + "VK_KHR_image_format_list", + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_image_format_list", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_EXT_descriptor_indexing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_maintenance3", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + { "VK_KHR_portability_subset", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + { "VK_NV_shading_rate_image", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_ray_tracing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_memory_requirements2", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_representative_fragment_test", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_maintenance3", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_subgroup_extended_types", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_8bit_storage", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_storage_buffer_storage_class", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_external_memory_host", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_atomic_int64", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_clock", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_calibrated_timestamps", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_AMD_shader_core_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_video_decode_h265", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_decode_queue", + } } } } }, + { "VK_KHR_global_priority", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_vertex_attribute_divisor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_GGP ) + { "VK_GGP_frame_token", + { { "VK_VERSION_1_0", + { { + "VK_GGP_stream_descriptor_surface", + "VK_KHR_swapchain", + } } } } }, +#endif /*VK_USE_PLATFORM_GGP*/ + { "VK_KHR_driver_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_float_controls", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_shader_subgroup_partitioned", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_depth_stencil_resolve", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_KHR_swapchain_mutable_format", + { { "VK_VERSION_1_0", + { { + "VK_KHR_image_format_list", + "VK_KHR_maintenance2", + "VK_KHR_swapchain", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_image_format_list", + "VK_KHR_swapchain", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_NV_compute_shader_derivatives", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_mesh_shader", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_fragment_shader_barycentric", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_shader_image_footprint", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_scissor_exclusive", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_device_diagnostic_checkpoints", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_timeline_semaphore", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_INTEL_shader_integer_functions2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_vulkan_memory_model", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pci_bus_info", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_AMD_display_native_hdr", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_swapchain", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_get_surface_capabilities2", + "VK_KHR_swapchain", + } } } } }, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + { "VK_FUCHSIA_imagepipe_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + { "VK_KHR_shader_terminate_invocation", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + { "VK_EXT_metal_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + { "VK_EXT_fragment_density_map", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_scalar_block_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_subgroup_size_control", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_fragment_shading_rate", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_create_renderpass2", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_AMD_shader_core_properties2", + { { "VK_VERSION_1_0", + { { + "VK_AMD_shader_core_properties", + } } } } }, + { "VK_AMD_device_coherent_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_dynamic_rendering_local_read", + { { "VK_VERSION_1_0", + { { + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_shader_image_atomic_int64", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_quad_control", + { { "VK_VERSION_1_1", + { { + "VK_KHR_shader_maximal_reconvergence", + "VK_KHR_vulkan_memory_model", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_shader_maximal_reconvergence", + } } } } }, + { "VK_KHR_spirv_1_4", + { { "VK_VERSION_1_1", + { { + "VK_KHR_shader_float_controls", + } } } } }, + { "VK_EXT_memory_budget", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_memory_priority", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_surface_protected_capabilities", + { { "VK_VERSION_1_1", + { { + "VK_KHR_get_surface_capabilities2", + } } } } }, + { "VK_NV_dedicated_allocation_image_aliasing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_dedicated_allocation", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_separate_depth_stencil_layouts", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_create_renderpass2", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_EXT_buffer_device_address", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_present_wait", + { { "VK_VERSION_1_0", + { { + "VK_KHR_present_id", + "VK_KHR_swapchain", + } } } } }, + { "VK_NV_cooperative_matrix", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_coverage_reduction_mode", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_NV_framebuffer_mixed_samples", + } } }, + { "VK_VERSION_1_1", + { { + "VK_NV_framebuffer_mixed_samples", + } } } } }, + { "VK_EXT_fragment_shader_interlock", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_ycbcr_image_arrays", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_uniform_buffer_standard_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_provoking_vertex", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_EXT_full_screen_exclusive", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_surface", + "VK_KHR_swapchain", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_get_surface_capabilities2", + "VK_KHR_surface", + "VK_KHR_swapchain", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_EXT_headless_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_buffer_device_address", + { { "VK_VERSION_1_0", + { { + "VK_KHR_device_group", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_line_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_shader_atomic_float", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_host_query_reset", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_index_type_uint8", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_extended_dynamic_state", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_pipeline_executable_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_host_image_copy", + { { "VK_VERSION_1_0", + { { + "VK_KHR_copy_commands2", + "VK_KHR_format_feature_flags2", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_copy_commands2", + "VK_KHR_format_feature_flags2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_map_memory_placed", + { { "VK_VERSION_1_0", + { { + "VK_KHR_map_memory2", + } } }, + { "VK_VERSION_1_4", { {} } } } }, + { "VK_EXT_shader_atomic_float2", + { { "VK_VERSION_1_0", + { { + "VK_EXT_shader_atomic_float", + } } } } }, + { "VK_EXT_surface_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_surface_capabilities2", + "VK_KHR_surface", + } } } } }, + { "VK_EXT_swapchain_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_EXT_surface_maintenance1", + "VK_KHR_get_physical_device_properties2", + "VK_KHR_swapchain", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_surface_maintenance1", + "VK_KHR_swapchain", + } } } } }, + { "VK_EXT_shader_demote_to_helper_invocation", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_device_generated_commands", + { { "VK_VERSION_1_1", + { { + "VK_KHR_buffer_device_address", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_NV_inherited_viewport_scissor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_integer_dot_product", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_texel_buffer_alignment", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_depth_bias_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_device_memory_report", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_acquire_drm_display", + { { "VK_VERSION_1_0", + { { + "VK_EXT_direct_mode_display", + } } } } }, + { "VK_EXT_robustness2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_custom_border_color", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_present_barrier", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_surface", + "VK_KHR_swapchain", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_get_surface_capabilities2", + "VK_KHR_surface", + "VK_KHR_swapchain", + } } } } }, + { "VK_KHR_present_id", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_swapchain", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_private_data", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_creation_cache_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_video_encode_queue", + { { "VK_VERSION_1_0", + { { + "VK_KHR_synchronization2", + "VK_KHR_video_queue", + } } }, + { "VK_VERSION_1_3", + { { + "VK_KHR_video_queue", + } } } } }, + { "VK_NV_device_diagnostics_config", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_QCOM_tile_shading", + { { "VK_VERSION_1_0", + { { + "VK_QCOM_tile_properties", + }, + { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_synchronization2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_descriptor_buffer", + { { "VK_VERSION_1_0", + { { + "VK_EXT_descriptor_indexing", + "VK_KHR_buffer_device_address", + "VK_KHR_get_physical_device_properties2", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_descriptor_indexing", + "VK_KHR_buffer_device_address", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_graphics_pipeline_library", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_pipeline_library", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_pipeline_library", + } } } } }, + { "VK_AMD_shader_early_and_late_fragment_tests", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_fragment_shader_barycentric", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_subgroup_uniform_control_flow", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_zero_initialize_workgroup_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_fragment_shading_rate_enums", + { { "VK_VERSION_1_0", + { { + "VK_KHR_fragment_shading_rate", + } } } } }, + { "VK_NV_ray_tracing_motion_blur", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + } } } } }, + { "VK_EXT_mesh_shader", + { { "VK_VERSION_1_0", + { { + "VK_KHR_spirv_1_4", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_EXT_ycbcr_2plane_444_formats", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_fragment_density_map2", + { { "VK_VERSION_1_0", + { { + "VK_EXT_fragment_density_map", + } } } } }, + { "VK_QCOM_rotated_copy_commands", + { { "VK_VERSION_1_0", + { { + "VK_KHR_copy_commands2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_image_robustness", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_workgroup_memory_explicit_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_copy_commands2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_image_compression_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_attachment_feedback_loop_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_4444_formats", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_device_fault", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_ARM_rasterization_order_attachment_access", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_rgba10x6_formats", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_acquire_winrt_display", + { { "VK_VERSION_1_0", + { { + "VK_EXT_direct_mode_display", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + { "VK_EXT_directfb_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + { "VK_VALVE_mutable_descriptor_type", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance3", + } } } } }, + { "VK_EXT_vertex_input_dynamic_state", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_physical_device_drm", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_device_address_binding_report", + { { "VK_VERSION_1_0", + { { + "VK_EXT_debug_utils", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_debug_utils", + } } } } }, + { "VK_EXT_depth_clip_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_primitive_topology_list_restart", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_format_feature_flags2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_present_mode_fifo_latest_ready", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + { "VK_FUCHSIA_external_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + "VK_KHR_external_memory_capabilities", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_FUCHSIA_external_semaphore", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore", + "VK_KHR_external_semaphore_capabilities", + } } } } }, + { "VK_FUCHSIA_buffer_collection", + { { "VK_VERSION_1_0", + { { + "VK_FUCHSIA_external_memory", + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", + { { + "VK_FUCHSIA_external_memory", + } } } } }, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + { "VK_HUAWEI_subpass_shading", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_HUAWEI_invocation_mask", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", + { { + "VK_KHR_ray_tracing_pipeline", + } } } } }, + { "VK_NV_external_memory_rdma", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_multisampled_render_to_single_sampled", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + "VK_KHR_depth_stencil_resolve", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_EXT_extended_dynamic_state2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + { "VK_QNX_screen_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + { "VK_EXT_color_write_enable", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_primitives_generated_query", + { { "VK_VERSION_1_0", + { { + "VK_EXT_transform_feedback", + } } } } }, + { "VK_KHR_ray_tracing_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_EXT_global_priority_query", + { { "VK_VERSION_1_0", + { { + "VK_EXT_global_priority", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_global_priority", + } } } } }, + { "VK_EXT_image_view_min_lod", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_multi_draw", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_image_2d_view_of_3d", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_maintenance1", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_shader_tile_image", { { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_opacity_micromap", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", + { { + "VK_KHR_acceleration_structure", + } } } } }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + { "VK_NV_displacement_micromap", + { { "VK_VERSION_1_0", + { { + "VK_EXT_opacity_micromap", + } } } } }, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + { "VK_HUAWEI_cluster_culling_shader", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_border_color_swizzle", + { { "VK_VERSION_1_0", + { { + "VK_EXT_custom_border_color", + } } } } }, + { "VK_EXT_pageable_device_local_memory", + { { "VK_VERSION_1_0", + { { + "VK_EXT_memory_priority", + } } } } }, + { "VK_KHR_maintenance4", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_ARM_shader_core_properties", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_ARM_scheduling_controls", + { { "VK_VERSION_1_0", + { { + "VK_ARM_shader_core_builtins", + } } } } }, + { "VK_EXT_image_sliced_view_of_3d", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_maintenance1", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_VALVE_descriptor_set_host_mapping", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_depth_clamp_zero_one", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_non_seamless_cube_map", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_ARM_render_pass_striped", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_QCOM_fragment_density_map_offset", + { { "VK_VERSION_1_0", + { { + "VK_EXT_fragment_density_map", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_fragment_density_map", + } } } } }, + { "VK_NV_copy_memory_indirect", + { { "VK_VERSION_1_0", + { { + "VK_KHR_buffer_device_address", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_buffer_device_address", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_NV_memory_decompression", + { { "VK_VERSION_1_0", + { { + "VK_KHR_buffer_device_address", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_buffer_device_address", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_NV_device_generated_commands_compute", + { { "VK_VERSION_1_0", + { { + "VK_NV_device_generated_commands", + } } } } }, + { "VK_NV_ray_tracing_linear_swept_spheres", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + } } } } }, + { "VK_NV_linear_color_attachment", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_GOOGLE_surfaceless_query", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_shader_maximal_reconvergence", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_image_compression_control_swapchain", + { { "VK_VERSION_1_0", + { { + "VK_EXT_image_compression_control", + } } } } }, + { "VK_QCOM_image_processing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_format_feature_flags2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_nested_command_buffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_external_memory_acquire_unmodified", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_extended_dynamic_state3", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_subpass_merge_feedback", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_shader_module_identifier", + { { "VK_VERSION_1_0", + { { + "VK_EXT_pipeline_creation_cache_control", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_pipeline_creation_cache_control", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_rasterization_order_attachment_access", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_optical_flow", + { { "VK_VERSION_1_0", + { { + "VK_KHR_format_feature_flags2", + "VK_KHR_get_physical_device_properties2", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_format_feature_flags2", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_legacy_dithering", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_protected_access", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + { "VK_ANDROID_external_format_resolve", + { { "VK_VERSION_1_0", + { { + "VK_ANDROID_external_memory_android_hardware_buffer", + } } } } }, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + { "VK_KHR_maintenance5", + { { "VK_VERSION_1_1", + { { + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_KHR_ray_tracing_position_fetch", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_EXT_shader_object", + { { "VK_VERSION_1_0", + { { + "VK_KHR_dynamic_rendering", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_KHR_pipeline_binary", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance5", + } } }, + { "VK_VERSION_1_4", { {} } } } }, + { "VK_QCOM_tile_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_SEC_amigo_profiling", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_QCOM_multiview_per_view_viewports", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_ray_tracing_invocation_reorder", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + } } } } }, + { "VK_EXT_mutable_descriptor_type", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance3", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_legacy_vertex_attributes", + { { "VK_VERSION_1_0", + { { + "VK_EXT_vertex_input_dynamic_state", + } } } } }, + { "VK_ARM_shader_core_builtins", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_library_group_handles", + { { "VK_VERSION_1_0", + { { + "VK_KHR_pipeline_library", + "VK_KHR_ray_tracing_pipeline", + } } } } }, + { "VK_EXT_dynamic_rendering_unused_attachments", + { { "VK_VERSION_1_0", + { { + "VK_KHR_dynamic_rendering", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_NV_low_latency2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_timeline_semaphore", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_KHR_cooperative_matrix", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_compute_shader_derivatives", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_video_decode_av1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_decode_queue", + } } } } }, + { "VK_KHR_video_encode_av1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + } } } } }, + { "VK_KHR_video_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_queue", + } } } } }, + { "VK_NV_per_stage_descriptor_set", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance6", + } } }, + { "VK_VERSION_1_4", { {} } } } }, + { "VK_QCOM_image_processing2", + { { "VK_VERSION_1_0", + { { + "VK_QCOM_image_processing", + } } } } }, + { "VK_QCOM_filter_cubic_weights", + { { "VK_VERSION_1_0", + { { + "VK_EXT_filter_cubic", + } } } } }, + { "VK_QCOM_filter_cubic_clamp", + { { "VK_VERSION_1_0", + { { + "VK_EXT_filter_cubic", + "VK_EXT_sampler_filter_minmax", + } } }, + { "VK_VERSION_1_2", + { { + "VK_EXT_filter_cubic", + } } } } }, + { "VK_EXT_attachment_feedback_loop_dynamic_state", + { { "VK_VERSION_1_0", + { { + "VK_EXT_attachment_feedback_loop_layout", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_attachment_feedback_loop_layout", + } } } } }, + { "VK_KHR_vertex_attribute_divisor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_float_controls2", + { { "VK_VERSION_1_1", + { { + "VK_KHR_shader_float_controls", + } } } } }, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + { "VK_QNX_external_memory_screen_buffer", + { { "VK_VERSION_1_0", + { { + "VK_EXT_queue_family_foreign", + "VK_KHR_dedicated_allocation", + "VK_KHR_external_memory", + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_queue_family_foreign", + } } } } }, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + { "VK_MSFT_layered_driver", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_index_type_uint8", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_line_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_calibrated_timestamps", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_expect_assume", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_maintenance6", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_descriptor_pool_overallocation", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_QCOM_tile_memory_heap", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_memory_requirements2", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_display_stereo", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + "VK_KHR_get_display_properties2", + } } } } }, + { "VK_KHR_video_encode_quantization_map", + { { "VK_VERSION_1_0", + { { + "VK_KHR_format_feature_flags2", + "VK_KHR_video_encode_queue", + } } }, + { "VK_VERSION_1_3", + { { + "VK_KHR_video_encode_queue", + } } } } }, + { "VK_KHR_maintenance7", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_cluster_acceleration_structure", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_NV_partitioned_acceleration_structure", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_EXT_device_generated_commands", + { { "VK_VERSION_1_0", + { { + "VK_KHR_buffer_device_address", + "VK_KHR_maintenance5", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_maintenance5", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_KHR_maintenance8", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_MESA_image_alignment_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_depth_clamp_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_video_maintenance2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_queue", + } } } } }, + { "VK_HUAWEI_hdr_vivid", + { { "VK_VERSION_1_0", + { { + "VK_EXT_hdr_metadata", + "VK_KHR_get_physical_device_properties2", + "VK_KHR_swapchain", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_hdr_metadata", + "VK_KHR_swapchain", + } } } } }, + { "VK_NV_cooperative_matrix2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_cooperative_matrix", + } } } } }, + { "VK_ARM_pipeline_opacity_micromap", + { { "VK_VERSION_1_0", + { { + "VK_EXT_opacity_micromap", + } } } } }, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + { "VK_EXT_external_memory_metal", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + { "VK_KHR_depth_clamp_zero_one", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_vertex_attribute_robustness", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_fragment_density_map_offset", + { { "VK_VERSION_1_0", + { { + "VK_EXT_fragment_density_map", + "VK_KHR_create_renderpass2", + "VK_KHR_dynamic_rendering", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_fragment_density_map", + "VK_KHR_create_renderpass2", + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_2", + { { + "VK_EXT_fragment_density_map", + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", + { { + "VK_EXT_fragment_density_map", + } } } } } + }; + auto depIt = dependencies.find( extension ); + return ( depIt != dependencies.end() ) ? depIt->second : noDependencies; + } + + VULKAN_HPP_INLINE std::pair> const &> getExtensionDepends( std::string const & version, + std::string const & extension ) + { +#if !defined( NDEBUG ) + static std::set versions = { "VK_VERSION_1_0", "VK_VERSION_1_1", "VK_VERSION_1_2", "VK_VERSION_1_3", "VK_VERSION_1_4" }; + assert( versions.find( version ) != versions.end() ); +#endif + static std::vector> noDependencies; + + std::map>> const & dependencies = getExtensionDepends( extension ); + if ( dependencies.empty() ) + { + return { true, noDependencies }; + } + auto depIt = dependencies.lower_bound( version ); + if ( ( depIt == dependencies.end() ) || ( depIt->first != version ) ) + { + depIt = std::prev( depIt ); + } + if ( depIt == dependencies.end() ) + { + return { false, noDependencies }; + } + else + { + return { true, depIt->second }; + } + } + + VULKAN_HPP_INLINE std::map const & getObsoletedExtensions() + { + static const std::map obsoletedExtensions = { { "VK_AMD_negative_viewport_height", "VK_KHR_maintenance1" } }; + return obsoletedExtensions; + } + + VULKAN_HPP_INLINE std::map const & getPromotedExtensions() + { + static const std::map promotedExtensions = { + { "VK_KHR_sampler_mirror_clamp_to_edge", "VK_VERSION_1_2" }, + { "VK_EXT_debug_marker", "VK_EXT_debug_utils" }, + { "VK_AMD_draw_indirect_count", "VK_KHR_draw_indirect_count" }, + { "VK_KHR_dynamic_rendering", "VK_VERSION_1_3" }, + { "VK_KHR_multiview", "VK_VERSION_1_1" }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_win32_keyed_mutex", "VK_KHR_win32_keyed_mutex" }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_get_physical_device_properties2", "VK_VERSION_1_1" }, + { "VK_KHR_device_group", "VK_VERSION_1_1" }, + { "VK_KHR_shader_draw_parameters", "VK_VERSION_1_1" }, + { "VK_EXT_texture_compression_astc_hdr", "VK_VERSION_1_3" }, + { "VK_EXT_pipeline_robustness", "VK_VERSION_1_4" }, + { "VK_KHR_maintenance1", "VK_VERSION_1_1" }, + { "VK_KHR_device_group_creation", "VK_VERSION_1_1" }, + { "VK_KHR_external_memory_capabilities", "VK_VERSION_1_1" }, + { "VK_KHR_external_memory", "VK_VERSION_1_1" }, + { "VK_KHR_external_semaphore_capabilities", "VK_VERSION_1_1" }, + { "VK_KHR_external_semaphore", "VK_VERSION_1_1" }, + { "VK_KHR_push_descriptor", "VK_VERSION_1_4" }, + { "VK_KHR_shader_float16_int8", "VK_VERSION_1_2" }, + { "VK_KHR_16bit_storage", "VK_VERSION_1_1" }, + { "VK_KHR_descriptor_update_template", "VK_VERSION_1_1" }, + { "VK_KHR_imageless_framebuffer", "VK_VERSION_1_2" }, + { "VK_KHR_create_renderpass2", "VK_VERSION_1_2" }, + { "VK_KHR_external_fence_capabilities", "VK_VERSION_1_1" }, + { "VK_KHR_external_fence", "VK_VERSION_1_1" }, + { "VK_KHR_maintenance2", "VK_VERSION_1_1" }, + { "VK_KHR_variable_pointers", "VK_VERSION_1_1" }, + { "VK_KHR_dedicated_allocation", "VK_VERSION_1_1" }, + { "VK_EXT_sampler_filter_minmax", "VK_VERSION_1_2" }, + { "VK_KHR_storage_buffer_storage_class", "VK_VERSION_1_1" }, + { "VK_EXT_inline_uniform_block", "VK_VERSION_1_3" }, + { "VK_KHR_relaxed_block_layout", "VK_VERSION_1_1" }, + { "VK_KHR_get_memory_requirements2", "VK_VERSION_1_1" }, + { "VK_KHR_image_format_list", "VK_VERSION_1_2" }, + { "VK_KHR_sampler_ycbcr_conversion", "VK_VERSION_1_1" }, + { "VK_KHR_bind_memory2", "VK_VERSION_1_1" }, + { "VK_EXT_descriptor_indexing", "VK_VERSION_1_2" }, + { "VK_EXT_shader_viewport_index_layer", "VK_VERSION_1_2" }, + { "VK_KHR_maintenance3", "VK_VERSION_1_1" }, + { "VK_KHR_draw_indirect_count", "VK_VERSION_1_2" }, + { "VK_EXT_global_priority", "VK_KHR_global_priority" }, + { "VK_KHR_shader_subgroup_extended_types", "VK_VERSION_1_2" }, + { "VK_KHR_8bit_storage", "VK_VERSION_1_2" }, + { "VK_KHR_shader_atomic_int64", "VK_VERSION_1_2" }, + { "VK_EXT_calibrated_timestamps", "VK_KHR_calibrated_timestamps" }, + { "VK_KHR_global_priority", "VK_VERSION_1_4" }, + { "VK_EXT_vertex_attribute_divisor", "VK_KHR_vertex_attribute_divisor" }, + { "VK_EXT_pipeline_creation_feedback", "VK_VERSION_1_3" }, + { "VK_KHR_driver_properties", "VK_VERSION_1_2" }, + { "VK_KHR_shader_float_controls", "VK_VERSION_1_2" }, + { "VK_KHR_depth_stencil_resolve", "VK_VERSION_1_2" }, + { "VK_NV_compute_shader_derivatives", "VK_KHR_compute_shader_derivatives" }, + { "VK_NV_fragment_shader_barycentric", "VK_KHR_fragment_shader_barycentric" }, + { "VK_KHR_timeline_semaphore", "VK_VERSION_1_2" }, + { "VK_KHR_vulkan_memory_model", "VK_VERSION_1_2" }, + { "VK_KHR_shader_terminate_invocation", "VK_VERSION_1_3" }, + { "VK_EXT_scalar_block_layout", "VK_VERSION_1_2" }, + { "VK_EXT_subgroup_size_control", "VK_VERSION_1_3" }, + { "VK_KHR_dynamic_rendering_local_read", "VK_VERSION_1_4" }, + { "VK_KHR_spirv_1_4", "VK_VERSION_1_2" }, + { "VK_KHR_separate_depth_stencil_layouts", "VK_VERSION_1_2" }, + { "VK_EXT_tooling_info", "VK_VERSION_1_3" }, + { "VK_EXT_separate_stencil_usage", "VK_VERSION_1_2" }, + { "VK_KHR_uniform_buffer_standard_layout", "VK_VERSION_1_2" }, + { "VK_KHR_buffer_device_address", "VK_VERSION_1_2" }, + { "VK_EXT_line_rasterization", "VK_KHR_line_rasterization" }, + { "VK_EXT_host_query_reset", "VK_VERSION_1_2" }, + { "VK_EXT_index_type_uint8", "VK_KHR_index_type_uint8" }, + { "VK_EXT_extended_dynamic_state", "VK_VERSION_1_3" }, + { "VK_EXT_host_image_copy", "VK_VERSION_1_4" }, + { "VK_KHR_map_memory2", "VK_VERSION_1_4" }, + { "VK_EXT_shader_demote_to_helper_invocation", "VK_VERSION_1_3" }, + { "VK_KHR_shader_integer_dot_product", "VK_VERSION_1_3" }, + { "VK_EXT_texel_buffer_alignment", "VK_VERSION_1_3" }, + { "VK_KHR_shader_non_semantic_info", "VK_VERSION_1_3" }, + { "VK_EXT_private_data", "VK_VERSION_1_3" }, + { "VK_EXT_pipeline_creation_cache_control", "VK_VERSION_1_3" }, + { "VK_KHR_synchronization2", "VK_VERSION_1_3" }, + { "VK_KHR_zero_initialize_workgroup_memory", "VK_VERSION_1_3" }, + { "VK_EXT_ycbcr_2plane_444_formats", "VK_VERSION_1_3" }, + { "VK_EXT_image_robustness", "VK_VERSION_1_3" }, + { "VK_KHR_copy_commands2", "VK_VERSION_1_3" }, + { "VK_EXT_4444_formats", "VK_VERSION_1_3" }, + { "VK_ARM_rasterization_order_attachment_access", "VK_EXT_rasterization_order_attachment_access" }, + { "VK_VALVE_mutable_descriptor_type", "VK_EXT_mutable_descriptor_type" }, + { "VK_KHR_format_feature_flags2", "VK_VERSION_1_3" }, + { "VK_EXT_extended_dynamic_state2", "VK_VERSION_1_3" }, + { "VK_EXT_global_priority_query", "VK_KHR_global_priority" }, + { "VK_EXT_load_store_op_none", "VK_KHR_load_store_op_none" }, + { "VK_KHR_maintenance4", "VK_VERSION_1_3" }, + { "VK_KHR_shader_subgroup_rotate", "VK_VERSION_1_4" }, + { "VK_EXT_depth_clamp_zero_one", "VK_KHR_depth_clamp_zero_one" }, + { "VK_QCOM_fragment_density_map_offset", "VK_EXT_fragment_density_map_offset" }, + { "VK_EXT_pipeline_protected_access", "VK_VERSION_1_4" }, + { "VK_KHR_maintenance5", "VK_VERSION_1_4" }, + { "VK_KHR_vertex_attribute_divisor", "VK_VERSION_1_4" }, + { "VK_KHR_load_store_op_none", "VK_VERSION_1_4" }, + { "VK_KHR_shader_float_controls2", "VK_VERSION_1_4" }, + { "VK_KHR_index_type_uint8", "VK_VERSION_1_4" }, + { "VK_KHR_line_rasterization", "VK_VERSION_1_4" }, + { "VK_KHR_shader_expect_assume", "VK_VERSION_1_4" }, + { "VK_KHR_maintenance6", "VK_VERSION_1_4" } + }; + return promotedExtensions; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionDeprecatedBy( std::string const & extension ) + { + if ( extension == "VK_EXT_debug_report" ) + { + return "VK_EXT_debug_utils"; + } + if ( extension == "VK_NV_glsl_shader" ) + { + return ""; + } + if ( extension == "VK_NV_dedicated_allocation" ) + { + return "VK_KHR_dedicated_allocation"; + } + if ( extension == "VK_AMD_gpu_shader_half_float" ) + { + return "VK_KHR_shader_float16_int8"; + } + if ( extension == "VK_IMG_format_pvrtc" ) + { + return ""; + } + if ( extension == "VK_NV_external_memory_capabilities" ) + { + return "VK_KHR_external_memory_capabilities"; + } + if ( extension == "VK_NV_external_memory" ) + { + return "VK_KHR_external_memory"; + } +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + if ( extension == "VK_NV_external_memory_win32" ) + { + return "VK_KHR_external_memory_win32"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + if ( extension == "VK_EXT_validation_flags" ) + { + return "VK_EXT_layer_settings"; + } + if ( extension == "VK_EXT_shader_subgroup_ballot" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_shader_subgroup_vote" ) + { + return "VK_VERSION_1_1"; + } +#if defined( VK_USE_PLATFORM_IOS_MVK ) + if ( extension == "VK_MVK_ios_surface" ) + { + return "VK_EXT_metal_surface"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + if ( extension == "VK_MVK_macos_surface" ) + { + return "VK_EXT_metal_surface"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + if ( extension == "VK_AMD_gpu_shader_int16" ) + { + return "VK_KHR_shader_float16_int8"; + } + if ( extension == "VK_NV_ray_tracing" ) + { + return "VK_KHR_ray_tracing_pipeline"; + } + if ( extension == "VK_EXT_buffer_device_address" ) + { + return "VK_KHR_buffer_device_address"; + } + if ( extension == "VK_EXT_validation_features" ) + { + return "VK_EXT_layer_settings"; + } +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( extension == "VK_NV_displacement_micromap" ) + { + return "VK_NV_cluster_acceleration_structure"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + return ""; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionObsoletedBy( std::string const & extension ) + { + if ( extension == "VK_AMD_negative_viewport_height" ) + { + return "VK_KHR_maintenance1"; + } + return ""; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionPromotedTo( std::string const & extension ) + { + if ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_debug_marker" ) + { + return "VK_EXT_debug_utils"; + } + if ( extension == "VK_AMD_draw_indirect_count" ) + { + return "VK_KHR_draw_indirect_count"; + } + if ( extension == "VK_KHR_dynamic_rendering" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_multiview" ) + { + return "VK_VERSION_1_1"; + } +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + if ( extension == "VK_NV_win32_keyed_mutex" ) + { + return "VK_KHR_win32_keyed_mutex"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + if ( extension == "VK_KHR_get_physical_device_properties2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_device_group" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_shader_draw_parameters" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_texture_compression_astc_hdr" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_pipeline_robustness" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_maintenance1" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_device_group_creation" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_memory_capabilities" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_memory" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_semaphore_capabilities" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_semaphore" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_push_descriptor" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_shader_float16_int8" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_16bit_storage" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_descriptor_update_template" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_imageless_framebuffer" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_create_renderpass2" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_external_fence_capabilities" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_fence" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_maintenance2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_variable_pointers" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_dedicated_allocation" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_sampler_filter_minmax" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_storage_buffer_storage_class" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_inline_uniform_block" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_relaxed_block_layout" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_get_memory_requirements2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_image_format_list" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_sampler_ycbcr_conversion" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_bind_memory2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_descriptor_indexing" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_shader_viewport_index_layer" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_maintenance3" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_draw_indirect_count" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_global_priority" ) + { + return "VK_KHR_global_priority"; + } + if ( extension == "VK_KHR_shader_subgroup_extended_types" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_8bit_storage" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_shader_atomic_int64" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_calibrated_timestamps" ) + { + return "VK_KHR_calibrated_timestamps"; + } + if ( extension == "VK_KHR_global_priority" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_EXT_vertex_attribute_divisor" ) + { + return "VK_KHR_vertex_attribute_divisor"; + } + if ( extension == "VK_EXT_pipeline_creation_feedback" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_driver_properties" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_shader_float_controls" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_depth_stencil_resolve" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_NV_compute_shader_derivatives" ) + { + return "VK_KHR_compute_shader_derivatives"; + } + if ( extension == "VK_NV_fragment_shader_barycentric" ) + { + return "VK_KHR_fragment_shader_barycentric"; + } + if ( extension == "VK_KHR_timeline_semaphore" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_vulkan_memory_model" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_shader_terminate_invocation" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_scalar_block_layout" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_subgroup_size_control" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_dynamic_rendering_local_read" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_spirv_1_4" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_separate_depth_stencil_layouts" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_tooling_info" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_separate_stencil_usage" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_uniform_buffer_standard_layout" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_buffer_device_address" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_line_rasterization" ) + { + return "VK_KHR_line_rasterization"; + } + if ( extension == "VK_EXT_host_query_reset" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_index_type_uint8" ) + { + return "VK_KHR_index_type_uint8"; + } + if ( extension == "VK_EXT_extended_dynamic_state" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_host_image_copy" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_map_memory2" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_shader_integer_dot_product" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_texel_buffer_alignment" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_shader_non_semantic_info" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_private_data" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_pipeline_creation_cache_control" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_synchronization2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_image_robustness" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_copy_commands2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_4444_formats" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_ARM_rasterization_order_attachment_access" ) + { + return "VK_EXT_rasterization_order_attachment_access"; + } + if ( extension == "VK_VALVE_mutable_descriptor_type" ) + { + return "VK_EXT_mutable_descriptor_type"; + } + if ( extension == "VK_KHR_format_feature_flags2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_extended_dynamic_state2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_global_priority_query" ) + { + return "VK_KHR_global_priority"; + } + if ( extension == "VK_EXT_load_store_op_none" ) + { + return "VK_KHR_load_store_op_none"; + } + if ( extension == "VK_KHR_maintenance4" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_shader_subgroup_rotate" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_EXT_depth_clamp_zero_one" ) + { + return "VK_KHR_depth_clamp_zero_one"; + } + if ( extension == "VK_QCOM_fragment_density_map_offset" ) + { + return "VK_EXT_fragment_density_map_offset"; + } + if ( extension == "VK_EXT_pipeline_protected_access" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_maintenance5" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_vertex_attribute_divisor" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_load_store_op_none" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_shader_float_controls2" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_index_type_uint8" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_line_rasterization" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_shader_expect_assume" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_maintenance6" ) + { + return "VK_VERSION_1_4"; + } + return ""; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeprecatedExtension( std::string const & extension ) + { + return ( extension == "VK_EXT_debug_report" ) || ( extension == "VK_NV_glsl_shader" ) || ( extension == "VK_NV_dedicated_allocation" ) || + ( extension == "VK_AMD_gpu_shader_half_float" ) || ( extension == "VK_IMG_format_pvrtc" ) || ( extension == "VK_NV_external_memory_capabilities" ) || + ( extension == "VK_NV_external_memory" ) || +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + ( extension == "VK_NV_external_memory_win32" ) || +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + ( extension == "VK_EXT_validation_flags" ) || ( extension == "VK_EXT_shader_subgroup_ballot" ) || ( extension == "VK_EXT_shader_subgroup_vote" ) || +#if defined( VK_USE_PLATFORM_IOS_MVK ) + ( extension == "VK_MVK_ios_surface" ) || +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + ( extension == "VK_MVK_macos_surface" ) || +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_EXT_buffer_device_address" ) || + ( extension == "VK_EXT_validation_features" ) || +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ( extension == "VK_NV_displacement_micromap" ) || +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + false; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & extension ) + { + return ( extension == "VK_KHR_swapchain" ) || ( extension == "VK_KHR_display_swapchain" ) || ( extension == "VK_NV_glsl_shader" ) || + ( extension == "VK_EXT_depth_range_unrestricted" ) || ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) || + ( extension == "VK_IMG_filter_cubic" ) || ( extension == "VK_AMD_rasterization_order" ) || ( extension == "VK_AMD_shader_trinary_minmax" ) || + ( extension == "VK_AMD_shader_explicit_vertex_parameter" ) || ( extension == "VK_EXT_debug_marker" ) || ( extension == "VK_KHR_video_queue" ) || + ( extension == "VK_KHR_video_decode_queue" ) || ( extension == "VK_AMD_gcn_shader" ) || ( extension == "VK_NV_dedicated_allocation" ) || + ( extension == "VK_EXT_transform_feedback" ) || ( extension == "VK_NVX_binary_import" ) || ( extension == "VK_NVX_image_view_handle" ) || + ( extension == "VK_AMD_draw_indirect_count" ) || ( extension == "VK_AMD_negative_viewport_height" ) || + ( extension == "VK_AMD_gpu_shader_half_float" ) || ( extension == "VK_AMD_shader_ballot" ) || ( extension == "VK_KHR_video_encode_h264" ) || + ( extension == "VK_KHR_video_encode_h265" ) || ( extension == "VK_KHR_video_decode_h264" ) || ( extension == "VK_AMD_texture_gather_bias_lod" ) || + ( extension == "VK_AMD_shader_info" ) || ( extension == "VK_KHR_dynamic_rendering" ) || ( extension == "VK_AMD_shader_image_load_store_lod" ) || + ( extension == "VK_NV_corner_sampled_image" ) || ( extension == "VK_KHR_multiview" ) || ( extension == "VK_IMG_format_pvrtc" ) || + ( extension == "VK_NV_external_memory" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_NV_external_memory_win32" ) || ( extension == "VK_NV_win32_keyed_mutex" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_device_group" ) || ( extension == "VK_KHR_shader_draw_parameters" ) || ( extension == "VK_EXT_shader_subgroup_ballot" ) || + ( extension == "VK_EXT_shader_subgroup_vote" ) || ( extension == "VK_EXT_texture_compression_astc_hdr" ) || + ( extension == "VK_EXT_astc_decode_mode" ) || ( extension == "VK_EXT_pipeline_robustness" ) || ( extension == "VK_KHR_maintenance1" ) || + ( extension == "VK_KHR_external_memory" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_external_memory_win32" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_memory_fd" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_win32_keyed_mutex" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_semaphore" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_external_semaphore_win32" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_semaphore_fd" ) || ( extension == "VK_KHR_push_descriptor" ) || ( extension == "VK_EXT_conditional_rendering" ) || + ( extension == "VK_KHR_shader_float16_int8" ) || ( extension == "VK_KHR_16bit_storage" ) || ( extension == "VK_KHR_incremental_present" ) || + ( extension == "VK_KHR_descriptor_update_template" ) || ( extension == "VK_NV_clip_space_w_scaling" ) || ( extension == "VK_EXT_display_control" ) || + ( extension == "VK_GOOGLE_display_timing" ) || ( extension == "VK_NV_sample_mask_override_coverage" ) || + ( extension == "VK_NV_geometry_shader_passthrough" ) || ( extension == "VK_NV_viewport_array2" ) || + ( extension == "VK_NVX_multiview_per_view_attributes" ) || ( extension == "VK_NV_viewport_swizzle" ) || + ( extension == "VK_EXT_discard_rectangles" ) || ( extension == "VK_EXT_conservative_rasterization" ) || + ( extension == "VK_EXT_depth_clip_enable" ) || ( extension == "VK_EXT_hdr_metadata" ) || ( extension == "VK_KHR_imageless_framebuffer" ) || + ( extension == "VK_KHR_create_renderpass2" ) || ( extension == "VK_IMG_relaxed_line_rasterization" ) || + ( extension == "VK_KHR_shared_presentable_image" ) || ( extension == "VK_KHR_external_fence" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_external_fence_win32" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_fence_fd" ) || ( extension == "VK_KHR_performance_query" ) || ( extension == "VK_KHR_maintenance2" ) || + ( extension == "VK_KHR_variable_pointers" ) || ( extension == "VK_EXT_external_memory_dma_buf" ) || ( extension == "VK_EXT_queue_family_foreign" ) || + ( extension == "VK_KHR_dedicated_allocation" ) +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + || ( extension == "VK_ANDROID_external_memory_android_hardware_buffer" ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + || ( extension == "VK_EXT_sampler_filter_minmax" ) || ( extension == "VK_KHR_storage_buffer_storage_class" ) || + ( extension == "VK_AMD_gpu_shader_int16" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_AMDX_shader_enqueue" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) || + ( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_EXT_shader_stencil_export" ) || ( extension == "VK_KHR_shader_bfloat16" ) || + ( extension == "VK_EXT_sample_locations" ) || ( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) || + ( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_EXT_blend_operation_advanced" ) || + ( extension == "VK_NV_fragment_coverage_to_color" ) || ( extension == "VK_KHR_acceleration_structure" ) || + ( extension == "VK_KHR_ray_tracing_pipeline" ) || ( extension == "VK_KHR_ray_query" ) || ( extension == "VK_NV_framebuffer_mixed_samples" ) || + ( extension == "VK_NV_fill_rectangle" ) || ( extension == "VK_NV_shader_sm_builtins" ) || ( extension == "VK_EXT_post_depth_coverage" ) || + ( extension == "VK_KHR_sampler_ycbcr_conversion" ) || ( extension == "VK_KHR_bind_memory2" ) || + ( extension == "VK_EXT_image_drm_format_modifier" ) || ( extension == "VK_EXT_validation_cache" ) || ( extension == "VK_EXT_descriptor_indexing" ) || + ( extension == "VK_EXT_shader_viewport_index_layer" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_KHR_portability_subset" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_NV_shading_rate_image" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_NV_representative_fragment_test" ) || + ( extension == "VK_KHR_maintenance3" ) || ( extension == "VK_KHR_draw_indirect_count" ) || ( extension == "VK_EXT_filter_cubic" ) || + ( extension == "VK_QCOM_render_pass_shader_resolve" ) || ( extension == "VK_EXT_global_priority" ) || + ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || + ( extension == "VK_EXT_external_memory_host" ) || ( extension == "VK_AMD_buffer_marker" ) || ( extension == "VK_KHR_shader_atomic_int64" ) || + ( extension == "VK_KHR_shader_clock" ) || ( extension == "VK_AMD_pipeline_compiler_control" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || + ( extension == "VK_AMD_shader_core_properties" ) || ( extension == "VK_KHR_video_decode_h265" ) || ( extension == "VK_KHR_global_priority" ) || + ( extension == "VK_AMD_memory_overallocation_behavior" ) || ( extension == "VK_EXT_vertex_attribute_divisor" ) +#if defined( VK_USE_PLATFORM_GGP ) + || ( extension == "VK_GGP_frame_token" ) +#endif /*VK_USE_PLATFORM_GGP*/ + || ( extension == "VK_EXT_pipeline_creation_feedback" ) || ( extension == "VK_KHR_driver_properties" ) || + ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_NV_shader_subgroup_partitioned" ) || + ( extension == "VK_KHR_depth_stencil_resolve" ) || ( extension == "VK_KHR_swapchain_mutable_format" ) || + ( extension == "VK_NV_compute_shader_derivatives" ) || ( extension == "VK_NV_mesh_shader" ) || + ( extension == "VK_NV_fragment_shader_barycentric" ) || ( extension == "VK_NV_shader_image_footprint" ) || + ( extension == "VK_NV_scissor_exclusive" ) || ( extension == "VK_NV_device_diagnostic_checkpoints" ) || + ( extension == "VK_KHR_timeline_semaphore" ) || ( extension == "VK_INTEL_shader_integer_functions2" ) || + ( extension == "VK_INTEL_performance_query" ) || ( extension == "VK_KHR_vulkan_memory_model" ) || ( extension == "VK_EXT_pci_bus_info" ) || + ( extension == "VK_AMD_display_native_hdr" ) || ( extension == "VK_KHR_shader_terminate_invocation" ) || + ( extension == "VK_EXT_fragment_density_map" ) || ( extension == "VK_EXT_scalar_block_layout" ) || + ( extension == "VK_GOOGLE_hlsl_functionality1" ) || ( extension == "VK_GOOGLE_decorate_string" ) || + ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_fragment_shading_rate" ) || + ( extension == "VK_AMD_shader_core_properties2" ) || ( extension == "VK_AMD_device_coherent_memory" ) || + ( extension == "VK_KHR_dynamic_rendering_local_read" ) || ( extension == "VK_EXT_shader_image_atomic_int64" ) || + ( extension == "VK_KHR_shader_quad_control" ) || ( extension == "VK_KHR_spirv_1_4" ) || ( extension == "VK_EXT_memory_budget" ) || + ( extension == "VK_EXT_memory_priority" ) || ( extension == "VK_NV_dedicated_allocation_image_aliasing" ) || + ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || ( extension == "VK_EXT_buffer_device_address" ) || + ( extension == "VK_EXT_tooling_info" ) || ( extension == "VK_EXT_separate_stencil_usage" ) || ( extension == "VK_KHR_present_wait" ) || + ( extension == "VK_NV_cooperative_matrix" ) || ( extension == "VK_NV_coverage_reduction_mode" ) || + ( extension == "VK_EXT_fragment_shader_interlock" ) || ( extension == "VK_EXT_ycbcr_image_arrays" ) || + ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || ( extension == "VK_EXT_provoking_vertex" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_EXT_full_screen_exclusive" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_shader_atomic_float" ) || + ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) || ( extension == "VK_EXT_extended_dynamic_state" ) || + ( extension == "VK_KHR_deferred_host_operations" ) || ( extension == "VK_KHR_pipeline_executable_properties" ) || + ( extension == "VK_EXT_host_image_copy" ) || ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_map_memory_placed" ) || + ( extension == "VK_EXT_shader_atomic_float2" ) || ( extension == "VK_EXT_swapchain_maintenance1" ) || + ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_NV_device_generated_commands" ) || + ( extension == "VK_NV_inherited_viewport_scissor" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) || + ( extension == "VK_EXT_texel_buffer_alignment" ) || ( extension == "VK_QCOM_render_pass_transform" ) || + ( extension == "VK_EXT_depth_bias_control" ) || ( extension == "VK_EXT_device_memory_report" ) || ( extension == "VK_EXT_robustness2" ) || + ( extension == "VK_EXT_custom_border_color" ) || ( extension == "VK_GOOGLE_user_type" ) || ( extension == "VK_KHR_pipeline_library" ) || + ( extension == "VK_NV_present_barrier" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_KHR_present_id" ) || + ( extension == "VK_EXT_private_data" ) || ( extension == "VK_EXT_pipeline_creation_cache_control" ) || + ( extension == "VK_KHR_video_encode_queue" ) || ( extension == "VK_NV_device_diagnostics_config" ) || + ( extension == "VK_QCOM_render_pass_store_ops" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_NV_cuda_kernel_launch" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_QCOM_tile_shading" ) || ( extension == "VK_NV_low_latency" ) +#if defined( VK_USE_PLATFORM_METAL_EXT ) + || ( extension == "VK_EXT_metal_objects" ) +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + || ( extension == "VK_KHR_synchronization2" ) || ( extension == "VK_EXT_descriptor_buffer" ) || ( extension == "VK_EXT_graphics_pipeline_library" ) || + ( extension == "VK_AMD_shader_early_and_late_fragment_tests" ) || ( extension == "VK_KHR_fragment_shader_barycentric" ) || + ( extension == "VK_KHR_shader_subgroup_uniform_control_flow" ) || ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) || + ( extension == "VK_NV_fragment_shading_rate_enums" ) || ( extension == "VK_NV_ray_tracing_motion_blur" ) || ( extension == "VK_EXT_mesh_shader" ) || + ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) || ( extension == "VK_EXT_fragment_density_map2" ) || + ( extension == "VK_QCOM_rotated_copy_commands" ) || ( extension == "VK_EXT_image_robustness" ) || + ( extension == "VK_KHR_workgroup_memory_explicit_layout" ) || ( extension == "VK_KHR_copy_commands2" ) || + ( extension == "VK_EXT_image_compression_control" ) || ( extension == "VK_EXT_attachment_feedback_loop_layout" ) || + ( extension == "VK_EXT_4444_formats" ) || ( extension == "VK_EXT_device_fault" ) || + ( extension == "VK_ARM_rasterization_order_attachment_access" ) || ( extension == "VK_EXT_rgba10x6_formats" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_NV_acquire_winrt_display" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_VALVE_mutable_descriptor_type" ) || ( extension == "VK_EXT_vertex_input_dynamic_state" ) || + ( extension == "VK_EXT_physical_device_drm" ) || ( extension == "VK_EXT_device_address_binding_report" ) || + ( extension == "VK_EXT_depth_clip_control" ) || ( extension == "VK_EXT_primitive_topology_list_restart" ) || + ( extension == "VK_KHR_format_feature_flags2" ) || ( extension == "VK_EXT_present_mode_fifo_latest_ready" ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + || ( extension == "VK_FUCHSIA_external_memory" ) || ( extension == "VK_FUCHSIA_external_semaphore" ) || ( extension == "VK_FUCHSIA_buffer_collection" ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + || ( extension == "VK_HUAWEI_subpass_shading" ) || ( extension == "VK_HUAWEI_invocation_mask" ) || ( extension == "VK_NV_external_memory_rdma" ) || + ( extension == "VK_EXT_pipeline_properties" ) || ( extension == "VK_EXT_frame_boundary" ) || + ( extension == "VK_EXT_multisampled_render_to_single_sampled" ) || ( extension == "VK_EXT_extended_dynamic_state2" ) || + ( extension == "VK_EXT_color_write_enable" ) || ( extension == "VK_EXT_primitives_generated_query" ) || + ( extension == "VK_KHR_ray_tracing_maintenance1" ) || ( extension == "VK_EXT_global_priority_query" ) || + ( extension == "VK_EXT_image_view_min_lod" ) || ( extension == "VK_EXT_multi_draw" ) || ( extension == "VK_EXT_image_2d_view_of_3d" ) || + ( extension == "VK_EXT_shader_tile_image" ) || ( extension == "VK_EXT_opacity_micromap" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_NV_displacement_micromap" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_HUAWEI_cluster_culling_shader" ) || + ( extension == "VK_EXT_border_color_swizzle" ) || ( extension == "VK_EXT_pageable_device_local_memory" ) || ( extension == "VK_KHR_maintenance4" ) || + ( extension == "VK_ARM_shader_core_properties" ) || ( extension == "VK_KHR_shader_subgroup_rotate" ) || + ( extension == "VK_ARM_scheduling_controls" ) || ( extension == "VK_EXT_image_sliced_view_of_3d" ) || + ( extension == "VK_VALVE_descriptor_set_host_mapping" ) || ( extension == "VK_EXT_depth_clamp_zero_one" ) || + ( extension == "VK_EXT_non_seamless_cube_map" ) || ( extension == "VK_ARM_render_pass_striped" ) || + ( extension == "VK_QCOM_fragment_density_map_offset" ) || ( extension == "VK_NV_copy_memory_indirect" ) || + ( extension == "VK_NV_memory_decompression" ) || ( extension == "VK_NV_device_generated_commands_compute" ) || + ( extension == "VK_NV_ray_tracing_linear_swept_spheres" ) || ( extension == "VK_NV_linear_color_attachment" ) || + ( extension == "VK_KHR_shader_maximal_reconvergence" ) || ( extension == "VK_EXT_image_compression_control_swapchain" ) || + ( extension == "VK_QCOM_image_processing" ) || ( extension == "VK_EXT_nested_command_buffer" ) || + ( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_EXT_extended_dynamic_state3" ) || + ( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_shader_module_identifier" ) || + ( extension == "VK_EXT_rasterization_order_attachment_access" ) || ( extension == "VK_NV_optical_flow" ) || + ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" ) +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + || ( extension == "VK_ANDROID_external_format_resolve" ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + || ( extension == "VK_KHR_maintenance5" ) || ( extension == "VK_AMD_anti_lag" ) || ( extension == "VK_KHR_ray_tracing_position_fetch" ) || + ( extension == "VK_EXT_shader_object" ) || ( extension == "VK_KHR_pipeline_binary" ) || ( extension == "VK_QCOM_tile_properties" ) || + ( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) || + ( extension == "VK_NV_ray_tracing_invocation_reorder" ) || ( extension == "VK_NV_cooperative_vector" ) || + ( extension == "VK_NV_extended_sparse_address_space" ) || ( extension == "VK_EXT_mutable_descriptor_type" ) || + ( extension == "VK_EXT_legacy_vertex_attributes" ) || ( extension == "VK_ARM_shader_core_builtins" ) || + ( extension == "VK_EXT_pipeline_library_group_handles" ) || ( extension == "VK_EXT_dynamic_rendering_unused_attachments" ) || + ( extension == "VK_NV_low_latency2" ) || ( extension == "VK_KHR_cooperative_matrix" ) || + ( extension == "VK_QCOM_multiview_per_view_render_areas" ) || ( extension == "VK_KHR_compute_shader_derivatives" ) || + ( extension == "VK_KHR_video_decode_av1" ) || ( extension == "VK_KHR_video_encode_av1" ) || ( extension == "VK_KHR_video_maintenance1" ) || + ( extension == "VK_NV_per_stage_descriptor_set" ) || ( extension == "VK_QCOM_image_processing2" ) || + ( extension == "VK_QCOM_filter_cubic_weights" ) || ( extension == "VK_QCOM_ycbcr_degamma" ) || ( extension == "VK_QCOM_filter_cubic_clamp" ) || + ( extension == "VK_EXT_attachment_feedback_loop_dynamic_state" ) || ( extension == "VK_KHR_vertex_attribute_divisor" ) || + ( extension == "VK_KHR_load_store_op_none" ) || ( extension == "VK_KHR_shader_float_controls2" ) +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + || ( extension == "VK_QNX_external_memory_screen_buffer" ) +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + || ( extension == "VK_MSFT_layered_driver" ) || ( extension == "VK_KHR_index_type_uint8" ) || ( extension == "VK_KHR_line_rasterization" ) || + ( extension == "VK_KHR_calibrated_timestamps" ) || ( extension == "VK_KHR_shader_expect_assume" ) || ( extension == "VK_KHR_maintenance6" ) || + ( extension == "VK_NV_descriptor_pool_overallocation" ) || ( extension == "VK_QCOM_tile_memory_heap" ) || + ( extension == "VK_KHR_video_encode_quantization_map" ) || ( extension == "VK_NV_raw_access_chains" ) || + ( extension == "VK_NV_external_compute_queue" ) || ( extension == "VK_KHR_shader_relaxed_extended_instruction" ) || + ( extension == "VK_NV_command_buffer_inheritance" ) || ( extension == "VK_KHR_maintenance7" ) || + ( extension == "VK_NV_shader_atomic_float16_vector" ) || ( extension == "VK_EXT_shader_replicated_composites" ) || + ( extension == "VK_NV_ray_tracing_validation" ) || ( extension == "VK_NV_cluster_acceleration_structure" ) || + ( extension == "VK_NV_partitioned_acceleration_structure" ) || ( extension == "VK_EXT_device_generated_commands" ) || + ( extension == "VK_KHR_maintenance8" ) || ( extension == "VK_MESA_image_alignment_control" ) || ( extension == "VK_EXT_depth_clamp_control" ) || + ( extension == "VK_KHR_video_maintenance2" ) || ( extension == "VK_HUAWEI_hdr_vivid" ) || ( extension == "VK_NV_cooperative_matrix2" ) || + ( extension == "VK_ARM_pipeline_opacity_micromap" ) +#if defined( VK_USE_PLATFORM_METAL_EXT ) + || ( extension == "VK_EXT_external_memory_metal" ) +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + || ( extension == "VK_KHR_depth_clamp_zero_one" ) || ( extension == "VK_EXT_vertex_attribute_robustness" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_NV_present_metering" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_EXT_fragment_density_map_offset" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) + { + return ( extension == "VK_KHR_surface" ) || ( extension == "VK_KHR_display" ) +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + || ( extension == "VK_KHR_xlib_surface" ) +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + || ( extension == "VK_KHR_xcb_surface" ) +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + || ( extension == "VK_KHR_wayland_surface" ) +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + || ( extension == "VK_KHR_android_surface" ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_win32_surface" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_EXT_debug_report" ) +#if defined( VK_USE_PLATFORM_GGP ) + || ( extension == "VK_GGP_stream_descriptor_surface" ) +#endif /*VK_USE_PLATFORM_GGP*/ + || ( extension == "VK_NV_external_memory_capabilities" ) || ( extension == "VK_KHR_get_physical_device_properties2" ) || + ( extension == "VK_EXT_validation_flags" ) +#if defined( VK_USE_PLATFORM_VI_NN ) + || ( extension == "VK_NN_vi_surface" ) +#endif /*VK_USE_PLATFORM_VI_NN*/ + || ( extension == "VK_KHR_device_group_creation" ) || ( extension == "VK_KHR_external_memory_capabilities" ) || + ( extension == "VK_KHR_external_semaphore_capabilities" ) || ( extension == "VK_EXT_direct_mode_display" ) +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + || ( extension == "VK_EXT_acquire_xlib_display" ) +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + || ( extension == "VK_EXT_display_surface_counter" ) || ( extension == "VK_EXT_swapchain_colorspace" ) || + ( extension == "VK_KHR_external_fence_capabilities" ) || ( extension == "VK_KHR_get_surface_capabilities2" ) || + ( extension == "VK_KHR_get_display_properties2" ) +#if defined( VK_USE_PLATFORM_IOS_MVK ) + || ( extension == "VK_MVK_ios_surface" ) +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + || ( extension == "VK_MVK_macos_surface" ) +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + || ( extension == "VK_EXT_debug_utils" ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + || ( extension == "VK_FUCHSIA_imagepipe_surface" ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + || ( extension == "VK_EXT_metal_surface" ) +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + || ( extension == "VK_KHR_surface_protected_capabilities" ) || ( extension == "VK_EXT_validation_features" ) || + ( extension == "VK_EXT_headless_surface" ) || ( extension == "VK_EXT_surface_maintenance1" ) || ( extension == "VK_EXT_acquire_drm_display" ) +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + || ( extension == "VK_EXT_directfb_surface" ) +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + || ( extension == "VK_QNX_screen_surface" ) +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + || ( extension == "VK_KHR_portability_enumeration" ) || ( extension == "VK_GOOGLE_surfaceless_query" ) || + ( extension == "VK_LUNARG_direct_driver_loading" ) || ( extension == "VK_EXT_layer_settings" ) || ( extension == "VK_NV_display_stereo" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isObsoletedExtension( std::string const & extension ) + { + return ( extension == "VK_AMD_negative_viewport_height" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isPromotedExtension( std::string const & extension ) + { + return ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) || ( extension == "VK_EXT_debug_marker" ) || ( extension == "VK_AMD_draw_indirect_count" ) || + ( extension == "VK_KHR_dynamic_rendering" ) || ( extension == "VK_KHR_multiview" ) || +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + ( extension == "VK_NV_win32_keyed_mutex" ) || +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + ( extension == "VK_KHR_get_physical_device_properties2" ) || ( extension == "VK_KHR_device_group" ) || + ( extension == "VK_KHR_shader_draw_parameters" ) || ( extension == "VK_EXT_texture_compression_astc_hdr" ) || + ( extension == "VK_EXT_pipeline_robustness" ) || ( extension == "VK_KHR_maintenance1" ) || ( extension == "VK_KHR_device_group_creation" ) || + ( extension == "VK_KHR_external_memory_capabilities" ) || ( extension == "VK_KHR_external_memory" ) || + ( extension == "VK_KHR_external_semaphore_capabilities" ) || ( extension == "VK_KHR_external_semaphore" ) || + ( extension == "VK_KHR_push_descriptor" ) || ( extension == "VK_KHR_shader_float16_int8" ) || ( extension == "VK_KHR_16bit_storage" ) || + ( extension == "VK_KHR_descriptor_update_template" ) || ( extension == "VK_KHR_imageless_framebuffer" ) || + ( extension == "VK_KHR_create_renderpass2" ) || ( extension == "VK_KHR_external_fence_capabilities" ) || ( extension == "VK_KHR_external_fence" ) || + ( extension == "VK_KHR_maintenance2" ) || ( extension == "VK_KHR_variable_pointers" ) || ( extension == "VK_KHR_dedicated_allocation" ) || + ( extension == "VK_EXT_sampler_filter_minmax" ) || ( extension == "VK_KHR_storage_buffer_storage_class" ) || + ( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_KHR_relaxed_block_layout" ) || + ( extension == "VK_KHR_get_memory_requirements2" ) || ( extension == "VK_KHR_image_format_list" ) || + ( extension == "VK_KHR_sampler_ycbcr_conversion" ) || ( extension == "VK_KHR_bind_memory2" ) || ( extension == "VK_EXT_descriptor_indexing" ) || + ( extension == "VK_EXT_shader_viewport_index_layer" ) || ( extension == "VK_KHR_maintenance3" ) || ( extension == "VK_KHR_draw_indirect_count" ) || + ( extension == "VK_EXT_global_priority" ) || ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || + ( extension == "VK_KHR_shader_atomic_int64" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || ( extension == "VK_KHR_global_priority" ) || + ( extension == "VK_EXT_vertex_attribute_divisor" ) || ( extension == "VK_EXT_pipeline_creation_feedback" ) || + ( extension == "VK_KHR_driver_properties" ) || ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_KHR_depth_stencil_resolve" ) || + ( extension == "VK_NV_compute_shader_derivatives" ) || ( extension == "VK_NV_fragment_shader_barycentric" ) || + ( extension == "VK_KHR_timeline_semaphore" ) || ( extension == "VK_KHR_vulkan_memory_model" ) || + ( extension == "VK_KHR_shader_terminate_invocation" ) || ( extension == "VK_EXT_scalar_block_layout" ) || + ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_dynamic_rendering_local_read" ) || ( extension == "VK_KHR_spirv_1_4" ) || + ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || ( extension == "VK_EXT_tooling_info" ) || + ( extension == "VK_EXT_separate_stencil_usage" ) || ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || + ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_host_query_reset" ) || + ( extension == "VK_EXT_index_type_uint8" ) || ( extension == "VK_EXT_extended_dynamic_state" ) || ( extension == "VK_EXT_host_image_copy" ) || + ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || + ( extension == "VK_KHR_shader_integer_dot_product" ) || ( extension == "VK_EXT_texel_buffer_alignment" ) || + ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_EXT_private_data" ) || + ( extension == "VK_EXT_pipeline_creation_cache_control" ) || ( extension == "VK_KHR_synchronization2" ) || + ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) || ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) || + ( extension == "VK_EXT_image_robustness" ) || ( extension == "VK_KHR_copy_commands2" ) || ( extension == "VK_EXT_4444_formats" ) || + ( extension == "VK_ARM_rasterization_order_attachment_access" ) || ( extension == "VK_VALVE_mutable_descriptor_type" ) || + ( extension == "VK_KHR_format_feature_flags2" ) || ( extension == "VK_EXT_extended_dynamic_state2" ) || + ( extension == "VK_EXT_global_priority_query" ) || ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_KHR_maintenance4" ) || + ( extension == "VK_KHR_shader_subgroup_rotate" ) || ( extension == "VK_EXT_depth_clamp_zero_one" ) || + ( extension == "VK_QCOM_fragment_density_map_offset" ) || ( extension == "VK_EXT_pipeline_protected_access" ) || + ( extension == "VK_KHR_maintenance5" ) || ( extension == "VK_KHR_vertex_attribute_divisor" ) || ( extension == "VK_KHR_load_store_op_none" ) || + ( extension == "VK_KHR_shader_float_controls2" ) || ( extension == "VK_KHR_index_type_uint8" ) || ( extension == "VK_KHR_line_rasterization" ) || + ( extension == "VK_KHR_shader_expect_assume" ) || ( extension == "VK_KHR_maintenance6" ); + } +} // namespace VULKAN_HPP_NAMESPACE + +#endif diff --git a/include/vulkan/vulkan_format_traits.hpp b/include/vulkan/vulkan_format_traits.hpp new file mode 100644 index 00000000..d5802f58 --- /dev/null +++ b/include/vulkan/vulkan_format_traits.hpp @@ -0,0 +1,7669 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_FORMAT_TRAITS_HPP +#define VULKAN_FORMAT_TRAITS_HPP + +#include + +namespace VULKAN_HPP_NAMESPACE +{ + + //===================== + //=== Format Traits === + //===================== + + // The three-dimensional extent of a texel block. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 std::array blockExtent( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return { { 5, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return { { 5, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return { { 5, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return { { 5, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return { { 6, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return { { 6, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return { { 6, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return { { 6, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return { { 8, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return { { 8, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return { { 8, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return { { 8, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return { { 8, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return { { 8, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return { { 10, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return { { 10, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return { { 10, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return { { 10, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return { { 10, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return { { 10, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return { { 10, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return { { 10, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return { { 12, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return { { 12, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return { { 12, 12, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return { { 12, 12, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return { { 5, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return { { 5, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return { { 6, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return { { 6, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return { { 8, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return { { 8, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return { { 8, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return { { 10, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return { { 10, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return { { 10, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return { { 10, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return { { 12, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return { { 12, 12, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return { { 4, 4, 1 } }; + + default: return { { 1, 1, 1 } }; + } + } + + // The texel block size in bytes. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t blockSize( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 12; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 12; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 12; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 24; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 24; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 24; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 32; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 32; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 32; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 5; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 4; + + default: VULKAN_HPP_ASSERT( false ); return 0; + } + } + + // The class of the format (can't be just named "class"!) + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compatibilityClass( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return "96-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return "96-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return "96-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return "192-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return "192-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return "192-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return "256-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return "256-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return "256-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return "D16"; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return "D24"; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return "D32"; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return "S8"; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return "D16S8"; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return "D24S8"; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return "D32S8"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC1_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC1_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC1_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC1_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC2"; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC2"; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC3"; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC3"; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC4"; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC4"; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC5"; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC5"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC6H"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC6H"; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC7"; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC7"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2_EAC_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2_EAC_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC_R"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC_R"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC_RG"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC_RG"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC_4x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC_4x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC_5x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC_5x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC_5x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC_5x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC_6x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC_6x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC_6x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC_6x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC_8x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC_8x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC_8x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC_8x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC_8x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC_8x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC_10x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC_10x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC_10x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC_10x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC_10x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC_10x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC_10x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC_10x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC_12x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC_12x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC_12x12"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC_12x12"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return "32-bit G8B8G8R8"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return "32-bit B8G8R8G8"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return "8-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return "8-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return "8-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return "8-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return "8-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "64-bit R10G10B10A10"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "64-bit G10B10G10R10"; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "64-bit B10G10R10G10"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "10-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "10-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "10-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "10-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "10-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "64-bit R12G12B12A12"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "64-bit G12B12G12R12"; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "64-bit B12G12R12G12"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "12-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "12-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "12-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "12-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "12-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return "64-bit G16B16G16R16"; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return "64-bit B16G16R16G16"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return "16-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return "16-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return "16-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return "16-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return "16-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return "8-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return "10-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return "12-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return "16-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC_4x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC_5x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC_5x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC_6x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC_6x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC_8x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC_8x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC_8x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC_10x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC_10x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC_10x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC_10x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC_12x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC_12x12"; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return "8-bit alpha"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC1_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC1_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC2_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC2_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC1_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC1_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC2_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC2_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return "32-bit"; + + default: VULKAN_HPP_ASSERT( false ); return ""; + } + } + + // The number of bits in this component, if not compressed, otherwise 0. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentBits( VULKAN_HPP_NAMESPACE::Format format, uint8_t component ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 6; + case 2: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 6; + case 2: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 5; + case 2: return 5; + case 3: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 5; + case 2: return 5; + case 3: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: + switch ( component ) + { + case 0: return 1; + case 1: return 5; + case 2: return 5; + case 3: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + case 3: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + case 3: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + case 3: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: + switch ( component ) + { + case 0: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: + switch ( component ) + { + case 0: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: + switch ( component ) + { + case 0: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + case 3: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + case 3: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + case 3: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: + switch ( component ) + { + case 0: return 10; + case 1: return 11; + case 2: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: + switch ( component ) + { + case 0: return 9; + case 1: return 9; + case 2: return 9; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: + switch ( component ) + { + case 0: return 24; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: + switch ( component ) + { + case 0: return 24; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: + switch ( component ) + { + case 0: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: + switch ( component ) + { + case 0: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: + switch ( component ) + { + case 0: return 11; + case 1: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: + switch ( component ) + { + case 0: return 11; + case 1: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: + switch ( component ) + { + case 0: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: + switch ( component ) + { + case 0: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + case 3: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + case 3: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + case 3: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: + switch ( component ) + { + case 0: return 1; + case 1: return 5; + case 2: return 5; + case 3: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + + default: return 0; + } + } + + // The number of components of this format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentCount( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 3; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 3; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 2; + + default: return 0; + } + } + + // The name of the component + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentName( VULKAN_HPP_NAMESPACE::Format format, uint8_t component ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: + switch ( component ) + { + case 0: return "D"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: + switch ( component ) + { + case 0: return "D"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: + switch ( component ) + { + case 0: return "D"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: + switch ( component ) + { + case 0: return "S"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: + switch ( component ) + { + case 0: return "D"; + case 1: return "S"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: + switch ( component ) + { + case 0: return "D"; + case 1: return "S"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: + switch ( component ) + { + case 0: return "D"; + case 1: return "S"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: + switch ( component ) + { + case 0: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + + default: return ""; + } + } + + // The numeric format of the component + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentNumericFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t component ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: + switch ( component ) + { + case 0: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: + switch ( component ) + { + case 0: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: + switch ( component ) + { + case 0: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: + switch ( component ) + { + case 0: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: + switch ( component ) + { + case 0: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: + switch ( component ) + { + case 0: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: + switch ( component ) + { + case 0: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: + switch ( component ) + { + case 0: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: + switch ( component ) + { + case 0: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: + switch ( component ) + { + case 0: return "UFLOAT"; + case 1: return "UFLOAT"; + case 2: return "UFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: + switch ( component ) + { + case 0: return "UFLOAT"; + case 1: return "UFLOAT"; + case 2: return "UFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: + switch ( component ) + { + case 0: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: + switch ( component ) + { + case 0: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: + switch ( component ) + { + case 0: return "UFLOAT"; + case 1: return "UFLOAT"; + case 2: return "UFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: + switch ( component ) + { + case 0: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: + switch ( component ) + { + case 0: return "SFIXED5"; + case 1: return "SFIXED5"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + + default: return ""; + } + } + + // The plane this component lies in. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentPlaneIndex( VULKAN_HPP_NAMESPACE::Format format, uint8_t component ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + + default: return 0; + } + } + + // True, if the components of this format are compressed, otherwise false. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool componentsAreCompressed( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return true; + default: return false; + } + } + + // A textual description of the compression scheme, or an empty string if it is not compressed + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compressionScheme( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC"; + + default: return ""; + } + } + + // True, if this format is a compressed one. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool isCompressed( VULKAN_HPP_NAMESPACE::Format format ) + { + return ( *VULKAN_HPP_NAMESPACE::compressionScheme( format ) != 0 ); + } + + // The number of bits into which the format is packed. A single image element in this format + // can be stored in the same space as a scalar type of this bit width. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t packed( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 16; + + default: return 0; + } + } + + // The single-plane format that this plane is compatible with. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_NAMESPACE::Format planeCompatibleFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + + default: VULKAN_HPP_ASSERT( plane == 0 ); return format; + } + } + + // The number of image planes of this format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeCount( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 2; + + default: return 1; + } + } + + // The relative height of this plane. A value of k means that this plane is 1/k the height of the overall format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeHeightDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + + default: VULKAN_HPP_ASSERT( plane == 0 ); return 1; + } + } + + // The relative width of this plane. A value of k means that this plane is 1/k the width of the overall format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeWidthDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + + default: VULKAN_HPP_ASSERT( plane == 0 ); return 1; + } + } + + // The number of texels in a texel block. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t texelsPerBlock( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 1; + + default: VULKAN_HPP_ASSERT( false ); return 0; + } + } + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/include/vulkan/vulkan_fuchsia.h b/include/vulkan/vulkan_fuchsia.h new file mode 100644 index 00000000..0af61bda --- /dev/null +++ b/include/vulkan/vulkan_fuchsia.h @@ -0,0 +1,262 @@ +#ifndef VULKAN_FUCHSIA_H_ +#define VULKAN_FUCHSIA_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_FUCHSIA_imagepipe_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_imagepipe_surface 1 +#define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1 +#define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface" +typedef VkFlags VkImagePipeSurfaceCreateFlagsFUCHSIA; +typedef struct VkImagePipeSurfaceCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkImagePipeSurfaceCreateFlagsFUCHSIA flags; + zx_handle_t imagePipeHandle; +} VkImagePipeSurfaceCreateInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateImagePipeSurfaceFUCHSIA)(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA( + VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_FUCHSIA_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_external_memory 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory" +typedef struct VkImportMemoryZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + zx_handle_t handle; +} VkImportMemoryZirconHandleInfoFUCHSIA; + +typedef struct VkMemoryZirconHandlePropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryZirconHandlePropertiesFUCHSIA; + +typedef struct VkMemoryGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandleFUCHSIA)(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA( + VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); +#endif + + +// VK_FUCHSIA_external_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_external_semaphore 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore" +typedef struct VkImportSemaphoreZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + zx_handle_t zirconHandle; +} VkImportSemaphoreZirconHandleInfoFUCHSIA; + +typedef struct VkSemaphoreGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); +#endif + + +// VK_FUCHSIA_buffer_collection is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_buffer_collection 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA) +#define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 2 +#define VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME "VK_FUCHSIA_buffer_collection" +typedef VkFlags VkImageFormatConstraintsFlagsFUCHSIA; + +typedef enum VkImageConstraintsInfoFlagBitsFUCHSIA { + VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA = 0x00000001, + VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA = 0x00000002, + VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA = 0x00000004, + VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA = 0x00000008, + VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA = 0x00000010, + VK_IMAGE_CONSTRAINTS_INFO_FLAG_BITS_MAX_ENUM_FUCHSIA = 0x7FFFFFFF +} VkImageConstraintsInfoFlagBitsFUCHSIA; +typedef VkFlags VkImageConstraintsInfoFlagsFUCHSIA; +typedef struct VkBufferCollectionCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + zx_handle_t collectionToken; +} VkBufferCollectionCreateInfoFUCHSIA; + +typedef struct VkImportMemoryBufferCollectionFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkImportMemoryBufferCollectionFUCHSIA; + +typedef struct VkBufferCollectionImageCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkBufferCollectionImageCreateInfoFUCHSIA; + +typedef struct VkBufferCollectionConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t minBufferCount; + uint32_t maxBufferCount; + uint32_t minBufferCountForCamping; + uint32_t minBufferCountForDedicatedSlack; + uint32_t minBufferCountForSharedSlack; +} VkBufferCollectionConstraintsInfoFUCHSIA; + +typedef struct VkBufferConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCreateInfo createInfo; + VkFormatFeatureFlags requiredFormatFeatures; + VkBufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints; +} VkBufferConstraintsInfoFUCHSIA; + +typedef struct VkBufferCollectionBufferCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkBufferCollectionBufferCreateInfoFUCHSIA; + +typedef struct VkSysmemColorSpaceFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t colorSpace; +} VkSysmemColorSpaceFUCHSIA; + +typedef struct VkBufferCollectionPropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; + uint32_t bufferCount; + uint32_t createInfoIndex; + uint64_t sysmemPixelFormat; + VkFormatFeatureFlags formatFeatures; + VkSysmemColorSpaceFUCHSIA sysmemColorSpaceIndex; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkBufferCollectionPropertiesFUCHSIA; + +typedef struct VkImageFormatConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkImageCreateInfo imageCreateInfo; + VkFormatFeatureFlags requiredFormatFeatures; + VkImageFormatConstraintsFlagsFUCHSIA flags; + uint64_t sysmemPixelFormat; + uint32_t colorSpaceCount; + const VkSysmemColorSpaceFUCHSIA* pColorSpaces; +} VkImageFormatConstraintsInfoFUCHSIA; + +typedef struct VkImageConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t formatConstraintsCount; + const VkImageFormatConstraintsInfoFUCHSIA* pFormatConstraints; + VkBufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints; + VkImageConstraintsInfoFlagsFUCHSIA flags; +} VkImageConstraintsInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferCollectionFUCHSIA)(VkDevice device, const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferCollectionFUCHSIA* pCollection); +typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionImageConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferCollectionFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetBufferCollectionPropertiesFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferCollectionFUCHSIA( + VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferCollectionFUCHSIA* pCollection); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionImageConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionBufferConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferCollectionFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferCollectionPropertiesFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA* pProperties); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp new file mode 100644 index 00000000..e033d3ba --- /dev/null +++ b/include/vulkan/vulkan_funcs.hpp @@ -0,0 +1,31791 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_FUNCS_HPP +#define VULKAN_FUNCS_HPP + +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + + //=========================== + //=== COMMAND Definitions === + //=========================== + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pInstance ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type createInstance( + const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Instance instance; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( instance ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type createInstanceUnique( + const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Instance instance; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( instance, detail::ObjectDestroy( allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyInstance.html + template + VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyInstance( static_cast( m_instance ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyInstance.html + template + VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyInstance && "Function requires " ); +# endif + + d.vkDestroyInstance( m_instance, + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkEnumeratePhysicalDevices( static_cast( m_instance ), pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::enumeratePhysicalDevices( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function requires " ); +# endif + + std::vector physicalDevices; + uint32_t physicalDeviceCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( + d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + if ( physicalDeviceCount < physicalDevices.size() ) + { + physicalDevices.resize( physicalDeviceCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); + } + + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function requires " ); +# endif + + std::vector physicalDevices( physicalDeviceAllocator ); + uint32_t physicalDeviceCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( + d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + if ( physicalDeviceCount < physicalDevices.size() ) + { + physicalDevices.resize( physicalDeviceCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); + + return features; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; + d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); + + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + template < + typename QueueFamilyPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function requires " ); +# endif + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + template < + typename QueueFamilyPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function requires " ); +# endif + + std::vector queueFamilyProperties( queueFamilyPropertiesAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties( static_cast( m_physicalDevice ), + reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMemoryProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetInstanceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetInstanceProcAddr.html + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetInstanceProcAddr( static_cast( m_instance ), pName ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetInstanceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetInstanceProcAddr.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PFN_VoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetInstanceProcAddr && "Function requires " ); +# endif + + PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceProcAddr.html + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceProcAddr( static_cast( m_device ), pName ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceProcAddr.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PFN_VoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceProcAddr && "Function requires " ); +# endif + + PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDevice( static_cast( m_physicalDevice ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDevice ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDevice( + const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Device device; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( device ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Device device; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( device, detail::ObjectDestroy( allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDevice.html + template + VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDevice( static_cast( m_device ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDevice.html + template + VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDevice && "Function requires " ); +# endif + + d.vkDestroyDevice( m_device, + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function requires " ); +# endif + + std::vector properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateDeviceExtensionProperties( + static_cast( m_physicalDevice ), pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function requires " ); +# endif + + std::vector properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function requires " ); +# endif + + std::vector properties( layerPropertiesAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateDeviceLayerProperties( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function requires " ); +# endif + + std::vector properties( layerPropertiesAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceQueue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue.html + template + VULKAN_HPP_INLINE void + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceQueue( static_cast( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceQueue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceQueue && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + + return queue; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueSubmit, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkQueueSubmit( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSubmit, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit( + VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSubmit && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueWaitIdle.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueWaitIdle( static_cast( m_queue ) ) ); + } +#else + // wrapper function for command vkQueueWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueWaitIdle.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueWaitIdle && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDeviceWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeviceWaitIdle.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkDeviceWaitIdle( static_cast( m_device ) ) ); + } +#else + // wrapper function for command vkDeviceWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeviceWaitIdle.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDeviceWaitIdle && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAllocateMemory( static_cast( m_device ), + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMemory ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkAllocateMemory( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memory ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkAllocateMemory( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( memory, detail::ObjectFree( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeMemory( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function requires " ); +# endif + + d.vkFreeMemory( m_device, + static_cast( memory ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template + VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeMemory( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template + VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function requires " ); +# endif + + d.vkFreeMemory( m_device, + static_cast( memory ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkMapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMapMemory( static_cast( m_device ), + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMapMemory && "Function requires " ); +# endif + + void * pData; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnmapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory.html + template + VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUnmapMemory( static_cast( m_device ), static_cast( memory ) ); + } + + // wrapper function for command vkFlushMappedMemoryRanges, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFlushMappedMemoryRanges.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkFlushMappedMemoryRanges( static_cast( m_device ), memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFlushMappedMemoryRanges, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFlushMappedMemoryRanges.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFlushMappedMemoryRanges && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkInvalidateMappedMemoryRanges, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInvalidateMappedMemoryRanges.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkInvalidateMappedMemoryRanges( static_cast( m_device ), memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkInvalidateMappedMemoryRanges, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInvalidateMappedMemoryRanges.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkInvalidateMappedMemoryRanges && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMemoryCommitment, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryCommitment.html + template + VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceMemoryCommitment( + static_cast( m_device ), static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMemoryCommitment, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryCommitment.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryCommitment && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; + d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); + + return committedMemoryInBytes; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindBufferMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindBufferMemory( + static_cast( m_device ), static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); + } +#else + // wrapper function for command vkBindBufferMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory( + VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindBufferMemory && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindImageMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindImageMemory( + static_cast( m_device ), static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); + } +#else + // wrapper function for command vkBindImageMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory( + VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindImageMemory && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements.html + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements( + static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements.html + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements( + static_cast( m_device ), static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements( static_cast( m_device ), + static_cast( image ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function requires " ); +# endif + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function requires " ); +# endif + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirementsAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + template < + typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + template < + typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && + "Function requires " ); +# endif + + std::vector properties( sparseImageFormatPropertiesAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueBindSparse, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBindSparse.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueBindSparse( + static_cast( m_queue ), bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueBindSparse, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBindSparse.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::bindSparse( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueBindSparse && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateFence( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::createFence( + const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFence && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateFence( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFenceUnique( + const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFence && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateFence( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFence( static_cast( m_device ), static_cast( fence ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function requires " ); +# endif + + d.vkDestroyFence( m_device, + static_cast( fence ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFence( static_cast( m_device ), static_cast( fence ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function requires " ); +# endif + + d.vkDestroyFence( m_device, + static_cast( fence ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkResetFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetFences.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetFences( static_cast( m_device ), fenceCount, reinterpret_cast( pFences ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetFences.html + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetFences && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkResetFences( m_device, fences.size(), reinterpret_cast( fences.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFenceStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceStatus.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetFenceStatus( static_cast( m_device ), static_cast( fence ) ) ); + } +#else + // wrapper function for command vkGetFenceStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceStatus.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFenceStatus && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkWaitForFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForFences.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWaitForFences( + static_cast( m_device ), fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitForFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForFences.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitForFences && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkWaitForFences( m_device, fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSemaphore( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSemaphore ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateSemaphore( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( semaphore ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateSemaphore( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( semaphore, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySemaphore( + static_cast( m_device ), static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function requires " ); +# endif + + d.vkDestroySemaphore( m_device, + static_cast( semaphore ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySemaphore( + static_cast( m_device ), static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function requires " ); +# endif + + d.vkDestroySemaphore( m_device, + static_cast( semaphore ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateEvent( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pEvent ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::createEvent( + const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Event event; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateEvent( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( event ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createEventUnique( + const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Event event; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateEvent( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( event, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyEvent( static_cast( m_device ), static_cast( event ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function requires " ); +# endif + + d.vkDestroyEvent( m_device, + static_cast( event ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyEvent( static_cast( m_device ), static_cast( event ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function requires " ); +# endif + + d.vkDestroyEvent( m_device, + static_cast( event ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetEventStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEventStatus.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetEventStatus( static_cast( m_device ), static_cast( event ) ) ); + } +#else + // wrapper function for command vkGetEventStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEventStatus.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEventStatus && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetEvent.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetEvent( static_cast( m_device ), static_cast( event ) ) ); + } +#else + // wrapper function for command vkSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetEvent.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetEvent && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetEvent.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetEvent( static_cast( m_device ), static_cast( event ) ) ); + } +#else + // wrapper function for command vkResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetEvent.html + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetEvent && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateQueryPool( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pQueryPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateQueryPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( queryPool ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateQueryPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( queryPool, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyQueryPool( + static_cast( m_device ), static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function requires " ); +# endif + + d.vkDestroyQueryPool( m_device, + static_cast( queryPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyQueryPool( + static_cast( m_device ), static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function requires " ); +# endif + + d.vkDestroyQueryPool( m_device, + static_cast( queryPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetQueryPoolResults( static_cast( m_device ), + static_cast( queryPool ), + firstQuery, + queryCount, + dataSize, + pData, + static_cast( stride ), + static_cast( flags ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function requires " ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return ResultValue>( result, std::move( data ) ); + } + + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + sizeof( DataType ), + reinterpret_cast( &data ), + static_cast( stride ), + static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return ResultValue( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateBuffer( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pBuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::createBuffer( + const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Buffer buffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateBuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferUnique( + const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Buffer buffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateBuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( buffer, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBuffer( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function requires " ); +# endif + + d.vkDestroyBuffer( m_device, + static_cast( buffer ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBuffer( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function requires " ); +# endif + + d.vkDestroyBuffer( m_device, + static_cast( buffer ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateBufferView( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::BufferView view; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateBufferView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::BufferView view; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateBufferView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferView( + static_cast( m_device ), static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function requires " ); +# endif + + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferView( + static_cast( m_device ), static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function requires " ); +# endif + + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateImage( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pImage ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::createImage( + const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImage && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Image image; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateImage( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( image ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageUnique( + const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImage && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Image image; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateImage( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( image, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImage( static_cast( m_device ), static_cast( image ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function requires " ); +# endif + + d.vkDestroyImage( m_device, + static_cast( image ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImage( static_cast( m_device ), static_cast( image ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function requires " ); +# endif + + d.vkDestroyImage( m_device, + static_cast( image ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout.html + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout( static_cast( m_device ), + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SubresourceLayout layout; + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateImageView( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ImageView view; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateImageView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ImageView view; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateImageView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImageView( + static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function requires " ); +# endif + + d.vkDestroyImageView( m_device, + static_cast( imageView ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImageView( + static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function requires " ); +# endif + + d.vkDestroyImageView( m_device, + static_cast( imageView ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateShaderModule( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pShaderModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( shaderModule ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( shaderModule, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderModule( + static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function requires " ); +# endif + + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderModule( + static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function requires " ); +# endif + + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreatePipelineCache( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineCache ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( pipelineCache, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineCache( + static_cast( m_device ), static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function requires " ); +# endif + + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineCache( + static_cast( m_device ), static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function requires " ); +# endif + + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPipelineCacheData( static_cast( m_device ), static_cast( pipelineCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function requires " ); +# endif + + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function requires " ); +# endif + + std::vector data( uint8_tAllocator ); + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkMergePipelineCaches, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergePipelineCaches.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMergePipelineCaches( + static_cast( m_device ), static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMergePipelineCaches, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergePipelineCaches.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMergePipelineCaches && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMergePipelineCaches( + m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateGraphicsPipelines( static_cast( m_device ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateComputePipelines( static_cast( m_device ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipeline( + static_cast( m_device ), static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function requires " ); +# endif + + d.vkDestroyPipeline( m_device, + static_cast( pipeline ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipeline( + static_cast( m_device ), static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function requires " ); +# endif + + d.vkDestroyPipeline( m_device, + static_cast( pipeline ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreatePipelineLayout( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineLayout ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( pipelineLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineLayout( + static_cast( m_device ), static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function requires " ); +# endif + + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineLayout( + static_cast( m_device ), static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function requires " ); +# endif + + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSampler( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSampler ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::createSampler( + const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Sampler sampler; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateSampler( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sampler ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerUnique( + const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Sampler sampler; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateSampler( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( sampler, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySampler( static_cast( m_device ), static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function requires " ); +# endif + + d.vkDestroySampler( m_device, + static_cast( sampler ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySampler( static_cast( m_device ), static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function requires " ); +# endif + + d.vkDestroySampler( m_device, + static_cast( sampler ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorSetLayout( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSetLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorSetLayout( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( setLayout ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorSetLayout( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( setLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorSetLayout( static_cast( m_device ), + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function requires " ); +# endif + + d.vkDestroyDescriptorSetLayout( + m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorSetLayout( static_cast( m_device ), + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function requires " ); +# endif + + d.vkDestroyDescriptorSetLayout( + m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorPool( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorPool ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( descriptorPool, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorPool( + static_cast( m_device ), static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function requires " ); +# endif + + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorPool( + static_cast( m_device ), static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function requires " ); +# endif + + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetDescriptorPool.html + template + VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetDescriptorPool( + static_cast( m_device ), static_cast( descriptorPool ), static_cast( flags ) ) ); + } +#else + // wrapper function for command vkResetDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetDescriptorPool.html + template + VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetDescriptorPool && "Function requires " ); +# endif + + d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAllocateDescriptorSets( static_cast( m_device ), + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function requires " ); +# endif + + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); + } + + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function requires " ); +# endif + + std::vector descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template < + typename Dispatch, + typename DescriptorSetAllocator, + typename std::enable_if>::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function requires " ); +# endif + + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + std::vector, DescriptorSetAllocator> uniqueDescriptorSets; + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( auto const & descriptorSet : descriptorSets ) + { + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); + } + + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template < + typename Dispatch, + typename DescriptorSetAllocator, + typename std::enable_if>::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function requires " ); +# endif + + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + std::vector, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( auto const & descriptorSet : descriptorSets ) + { + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template + VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkFreeDescriptorSets( static_cast( m_device ), + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template + VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function requires " ); +# endif + + d.vkFreeDescriptorSets( + m_device, static_cast( descriptorPool ), descriptorSets.size(), reinterpret_cast( descriptorSets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template + VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkFreeDescriptorSets( static_cast( m_device ), + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template + VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function requires " ); +# endif + + d.vkFreeDescriptorSets( + m_device, static_cast( descriptorPool ), descriptorSets.size(), reinterpret_cast( descriptorSets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSets.html + template + VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSets( static_cast( m_device ), + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ), + descriptorCopyCount, + reinterpret_cast( pDescriptorCopies ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSets.html + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSets && "Function requires " ); +# endif + + d.vkUpdateDescriptorSets( m_device, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast( descriptorCopies.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateFramebuffer( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFramebuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateFramebuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( framebuffer ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateFramebuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( framebuffer, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFramebuffer( + static_cast( m_device ), static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function requires " ); +# endif + + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFramebuffer( + static_cast( m_device ), static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function requires " ); +# endif + + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRenderPass( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateRenderPass( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateRenderPass( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyRenderPass( + static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function requires " ); +# endif + + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyRenderPass( + static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function requires " ); +# endif + + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRenderAreaGranularity, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderAreaGranularity.html + template + VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderAreaGranularity( static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRenderAreaGranularity, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderAreaGranularity.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRenderAreaGranularity && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); + + return granularity; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCommandPool( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCommandPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCommandPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandPool ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCommandPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( commandPool, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCommandPool( + static_cast( m_device ), static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function requires " ); +# endif + + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCommandPool( + static_cast( m_device ), static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function requires " ); +# endif + + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandPool.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkResetCommandPool( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ) ); + } +#else + // wrapper function for command vkResetCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandPool.html + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetCommandPool && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAllocateCommandBuffers( static_cast( m_device ), + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pCommandBuffers ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function requires " ); +# endif + + std::vector commandBuffers( allocateInfo.commandBufferCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); + } + + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function requires " ); +# endif + + std::vector commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template < + typename Dispatch, + typename CommandBufferAllocator, + typename std::enable_if>::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function requires " ); +# endif + + std::vector commandBuffers( allocateInfo.commandBufferCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + std::vector, CommandBufferAllocator> uniqueCommandBuffers; + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( auto const & commandBuffer : commandBuffers ) + { + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); + } + + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template < + typename Dispatch, + typename CommandBufferAllocator, + typename std::enable_if>::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function requires " ); +# endif + + std::vector commandBuffers( allocateInfo.commandBufferCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + std::vector, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( auto const & commandBuffer : commandBuffers ) + { + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeCommandBuffers( static_cast( m_device ), + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function requires " ); +# endif + + d.vkFreeCommandBuffers( + m_device, static_cast( commandPool ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template + VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeCommandBuffers( static_cast( m_device ), + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template + VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function requires " ); +# endif + + d.vkFreeCommandBuffers( + m_device, static_cast( commandPool ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBeginCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBeginCommandBuffer.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBeginCommandBuffer( static_cast( m_commandBuffer ), reinterpret_cast( pBeginInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBeginCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBeginCommandBuffer.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBeginCommandBuffer && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEndCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEndCommandBuffer.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEndCommandBuffer( static_cast( m_commandBuffer ) ) ); + } +#else + // wrapper function for command vkEndCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEndCommandBuffer.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEndCommandBuffer && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandBuffer.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetCommandBuffer( static_cast( m_commandBuffer ), static_cast( flags ) ) ); + } +#else + // wrapper function for command vkResetCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandBuffer.html + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetCommandBuffer && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkCmdBindPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindPipeline.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindPipeline( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + // wrapper function for command vkCmdSetViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewport.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewport( static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewport.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewport && "Function requires " ); +# endif + + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetScissor, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissor.html + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissor( static_cast( m_commandBuffer ), firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetScissor, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissor.html + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetScissor && "Function requires " ); +# endif + + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetLineWidth, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineWidth.html + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineWidth( static_cast( m_commandBuffer ), lineWidth ); + } + + // wrapper function for command vkCmdSetDepthBias, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBias( static_cast( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + // wrapper function for command vkCmdSetBlendConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetBlendConstants.html + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetBlendConstants( static_cast( m_commandBuffer ), blendConstants ); + } + + // wrapper function for command vkCmdSetDepthBounds, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBounds.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBounds( static_cast( m_commandBuffer ), minDepthBounds, maxDepthBounds ); + } + + // wrapper function for command vkCmdSetStencilCompareMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilCompareMask.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilCompareMask( static_cast( m_commandBuffer ), static_cast( faceMask ), compareMask ); + } + + // wrapper function for command vkCmdSetStencilWriteMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilWriteMask.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilWriteMask( static_cast( m_commandBuffer ), static_cast( faceMask ), writeMask ); + } + + // wrapper function for command vkCmdSetStencilReference, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilReference.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilReference( static_cast( m_commandBuffer ), static_cast( faceMask ), reference ); + } + + // wrapper function for command vkCmdBindDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSetCount, + reinterpret_cast( pDescriptorSets ), + dynamicOffsetCount, + pDynamicOffsets ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets && "Function requires " ); +# endif + + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindIndexBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); + } + + // wrapper function for command vkCmdBindVertexBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers( static_cast( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindVertexBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDraw, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDraw.html + template + VULKAN_HPP_INLINE void CommandBuffer::draw( + uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDraw( static_cast( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); + } + + // wrapper function for command vkCmdDrawIndexed, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexed.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexed( static_cast( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + // wrapper function for command vkCmdDrawIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirect.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirect.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDispatch, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatch.html + template + VULKAN_HPP_INLINE void + CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatch( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkCmdDispatchIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchIndirect.html + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchIndirect( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); + } + + // wrapper function for command vkCmdCopyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer( static_cast( m_commandBuffer ), + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer && "Function requires " ); +# endif + + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImage && "Function requires " ); +# endif + + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBlitImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ), + static_cast( filter ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBlitImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBlitImage && "Function requires " ); +# endif + + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ), + static_cast( filter ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBufferToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage( static_cast( m_commandBuffer ), + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBufferToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage && "Function requires " ); +# endif + + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImageToBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImageToBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer && "Function requires " ); +# endif + + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdUpdateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdateBuffer.html + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdUpdateBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( dataSize ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdUpdateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdateBuffer.html + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdUpdateBuffer && "Function requires " ); +# endif + + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdFillBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdFillBuffer.html + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdFillBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); + } + + // wrapper function for command vkCmdClearColorImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearColorImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearColorImage( static_cast( m_commandBuffer ), + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pColor ), + rangeCount, + reinterpret_cast( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdClearColorImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearColorImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdClearColorImage && "Function requires " ); +# endif + + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &color ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdClearDepthStencilImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearDepthStencilImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearDepthStencilImage( static_cast( m_commandBuffer ), + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pDepthStencil ), + rangeCount, + reinterpret_cast( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdClearDepthStencilImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearDepthStencilImage.html + template + VULKAN_HPP_INLINE void + CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdClearDepthStencilImage && "Function requires " ); +# endif + + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &depthStencil ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdClearAttachments, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearAttachments.html + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearAttachments( static_cast( m_commandBuffer ), + attachmentCount, + reinterpret_cast( pAttachments ), + rectCount, + reinterpret_cast( pRects ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdClearAttachments, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearAttachments.html + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, + VULKAN_HPP_NAMESPACE::ArrayProxy const & rects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdClearAttachments && "Function requires " ); +# endif + + d.vkCmdClearAttachments( m_commandBuffer, + attachments.size(), + reinterpret_cast( attachments.data() ), + rects.size(), + reinterpret_cast( rects.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResolveImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdResolveImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage.html + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdResolveImage && "Function requires " ); +# endif + + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent.html + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + // wrapper function for command vkCmdResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent.html + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + // wrapper function for command vkCmdWaitEvents, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents.html + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents( static_cast( m_commandBuffer ), + eventCount, + reinterpret_cast( pEvents ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWaitEvents, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents.html + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWaitEvents && "Function requires " ); +# endif + + d.vkCmdWaitEvents( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPipelineBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier.html + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier( static_cast( m_commandBuffer ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPipelineBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier.html + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier && "Function requires " ); +# endif + + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginQuery.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginQuery( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ) ); + } + + // wrapper function for command vkCmdEndQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndQuery.html + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndQuery( static_cast( m_commandBuffer ), static_cast( queryPool ), query ); + } + + // wrapper function for command vkCmdResetQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetQueryPool.html + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetQueryPool( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount ); + } + + // wrapper function for command vkCmdWriteTimestamp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp.html + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp( + static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( queryPool ), query ); + } + + // wrapper function for command vkCmdCopyQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyQueryPoolResults.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyQueryPoolResults( static_cast( m_commandBuffer ), + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); + } + + // wrapper function for command vkCmdPushConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants( static_cast( m_commandBuffer ), + static_cast( layout ), + static_cast( stageFlags ), + offset, + size, + pValues ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & values, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushConstants && "Function requires " ); +# endif + + d.vkCmdPushConstants( m_commandBuffer, + static_cast( layout ), + static_cast( stageFlags ), + offset, + values.size() * sizeof( ValuesType ), + reinterpret_cast( values.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass( static_cast( m_commandBuffer ), + reinterpret_cast( pRenderPassBegin ), + static_cast( contents ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass && "Function requires " ); +# endif + + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdNextSubpass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass.html + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass( static_cast( m_commandBuffer ), static_cast( contents ) ); + } + + // wrapper function for command vkCmdEndRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass.html + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass( static_cast( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdExecuteCommands, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteCommands.html + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteCommands( static_cast( m_commandBuffer ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdExecuteCommands, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteCommands.html + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdExecuteCommands && "Function requires " ); +# endif + + d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkEnumerateInstanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceVersion.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateInstanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceVersion.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceVersion && "Function requires " ); +# endif + + uint32_t apiVersion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( apiVersion ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindBufferMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindBufferMemory2( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindBufferMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindBufferMemory2 && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindImageMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindImageMemory2( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindImageMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindImageMemory2 && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceGroupPeerMemoryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeatures.html + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceGroupPeerMemoryFeatures( + static_cast( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupPeerMemoryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeatures.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( + uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeatures && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeatures( + m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDeviceMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDeviceMask.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDeviceMask( static_cast( m_commandBuffer ), deviceMask ); + } + + // wrapper function for command vkCmdDispatchBase, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchBase.html + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchBase( static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && + "Function requires or " ); +# endif + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + } + + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && + "Function requires or " ); +# endif + + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFeatures2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFeatures2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + + return features; + } + + // wrapper function for command vkGetPhysicalDeviceFeatures2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && + "Function requires or " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); +# endif + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); +# endif + + std::vector queueFamilyProperties( queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); +# endif + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && + "Function requires or " ); +# endif + + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && + "Function requires or " ); +# endif + + std::vector properties( sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkTrimCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTrimCommandPool.html + template + VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkTrimCommandPool( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ); + } + + // wrapper function for command vkGetDeviceQueue2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue2.html + template + VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceQueue2( static_cast( m_device ), reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceQueue2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceQueue2 && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue2( m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + + return queue; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSamplerYcbcrConversion( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSamplerYcbcrConversion( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSamplerYcbcrConversion( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversion( static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && + "Function requires or " ); +# endif + + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversion( static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && + "Function requires or " ); +# endif + + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorUpdateTemplate( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && + "Function requires or " ); +# endif + + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && + "Function requires or " ); +# endif + + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplate.html + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplate( static_cast( m_device ), + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplate.html + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplate && + "Function requires or " ); +# endif + + d.vkUpdateDescriptorSetWithTemplate( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceExternalBufferProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferProperties.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalBufferProperties( static_cast( m_physicalDevice ), + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalBufferProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferProperties && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceExternalFenceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFenceProperties.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalFenceProperties( static_cast( m_physicalDevice ), + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalFenceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFenceProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFenceProperties && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceExternalSemaphoreProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphoreProperties.html + template + VULKAN_HPP_INLINE void + PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalSemaphoreProperties( static_cast( m_physicalDevice ), + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalSemaphoreProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphoreProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphoreProperties && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSupport( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupport( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + + return support; + } + + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupport( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_2 === + + // wrapper function for command vkCmdDrawIndirectCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCount.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCount( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirectCount, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCount.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCount( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRenderPass2( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateRenderPass2( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateRenderPass2( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2( static_cast( m_commandBuffer ), + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2 && "Function requires or " ); +# endif + + d.vkCmdBeginRenderPass2( + m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdNextSubpass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2.html + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2( static_cast( m_commandBuffer ), + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdNextSubpass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2.html + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2 && "Function requires or " ); +# endif + + d.vkCmdNextSubpass2( + m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2.html + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2.html + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2 && "Function requires or " ); +# endif + + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkResetQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetQueryPool.html + template + VULKAN_HPP_INLINE void + Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkResetQueryPool( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount ); + } + + // wrapper function for command vkGetSemaphoreCounterValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValue.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSemaphoreCounterValue( static_cast( m_device ), static_cast( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreCounterValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValue.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValue && "Function requires or " ); +# endif + + uint64_t value; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWaitSemaphores, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphores.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWaitSemaphores( static_cast( m_device ), reinterpret_cast( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitSemaphores, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphores.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitSemaphores && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSignalSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphore.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSignalSemaphore( static_cast( m_device ), reinterpret_cast( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSignalSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphore.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSignalSemaphore && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferDeviceAddress, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddress.html + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetBufferDeviceAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferDeviceAddress, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddress.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddress && + "Function requires or or " ); +# endif + + VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddress.html + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferOpaqueCaptureAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddress.html + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddress && + "Function requires or " ); +# endif + + uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddress.html + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceMemoryOpaqueCaptureAddress( static_cast( m_device ), + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddress.html + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddress && + "Function requires or " ); +# endif + + uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceToolProperties( + static_cast( m_physicalDevice ), pToolCount, reinterpret_cast( pToolProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && + "Function requires or " ); +# endif + + std::vector toolProperties; + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( + d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && + "Function requires or " ); +# endif + + std::vector toolProperties( + physicalDeviceToolPropertiesAllocator ); + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( + d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreatePrivateDataSlot( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPrivateDataSlot ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreatePrivateDataSlot( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreatePrivateDataSlot( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlot( + static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function requires or " ); +# endif + + d.vkDestroyPrivateDataSlot( + m_device, + static_cast( privateDataSlot ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlot( + static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function requires or " ); +# endif + + d.vkDestroyPrivateDataSlot( + m_device, + static_cast( privateDataSlot ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateData.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + } +#else + // wrapper function for command vkSetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateData.html + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetPrivateData && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkSetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateData.html + template + VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateData.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPrivateData && "Function requires or " ); +# endif + + uint64_t data; + d.vkGetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + + return data; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2.html + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent2( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2.html + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetEvent2 && "Function requires or " ); +# endif + + d.vkCmdSetEvent2( m_commandBuffer, static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent2.html + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent2( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + // wrapper function for command vkCmdWaitEvents2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2.html + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents2( static_cast( m_commandBuffer ), + eventCount, + reinterpret_cast( pEvents ), + reinterpret_cast( pDependencyInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWaitEvents2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2.html + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2 && "Function requires or " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdWaitEvents2( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPipelineBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2.html + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier2( static_cast( m_commandBuffer ), reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPipelineBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2.html + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2 && "Function requires or " ); +# endif + + d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteTimestamp2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp2.html + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp2( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); + } + + // wrapper function for command vkQueueSubmit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkQueueSubmit2( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSubmit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit2( + VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSubmit2 && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2 && "Function requires or " ); +# endif + + d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImage2 && "Function requires or " ); +# endif + + d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBufferToImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferToImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBufferToImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2 && "Function requires or " ); +# endif + + d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast( ©BufferToImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImageToBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageToBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImageToBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2 && "Function requires or " ); +# endif + + d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast( ©ImageToBufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBlitImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2.html + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage2( static_cast( m_commandBuffer ), reinterpret_cast( pBlitImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBlitImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2.html + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBlitImage2 && "Function requires or " ); +# endif + + d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResolveImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2.html + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage2( static_cast( m_commandBuffer ), reinterpret_cast( pResolveImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdResolveImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2.html + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdResolveImage2 && "Function requires or " ); +# endif + + d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRendering.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRendering( static_cast( m_commandBuffer ), reinterpret_cast( pRenderingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRendering.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRendering && "Function requires or " ); +# endif + + d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast( &renderingInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering.html + template + VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRendering( static_cast( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdSetCullMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCullMode.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); + } + + // wrapper function for command vkCmdSetFrontFace, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFrontFace.html + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); + } + + // wrapper function for command vkCmdSetPrimitiveTopology, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveTopology.html + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); + } + + // wrapper function for command vkCmdSetViewportWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCount.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCount( static_cast( m_commandBuffer ), viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCount.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCount && + "Function requires or or " ); +# endif + + d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetScissorWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCount.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissorWithCount( static_cast( m_commandBuffer ), scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetScissorWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCount.html + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCount && + "Function requires or or " ); +# endif + + d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindVertexBuffers2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers2( static_cast( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ), + reinterpret_cast( pStrides ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindVertexBuffers2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2 && + "Function requires or or " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers2( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDepthTestEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthTestEnable.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); + } + + // wrapper function for command vkCmdSetDepthWriteEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthWriteEnable.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); + } + + // wrapper function for command vkCmdSetDepthCompareOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthCompareOp.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); + } + + // wrapper function for command vkCmdSetDepthBoundsTestEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBoundsTestEnable.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilTestEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilTestEnable.html + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilOp.html + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilOp( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + // wrapper function for command vkCmdSetRasterizerDiscardEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizerDiscardEnable.html + template + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); + } + + // wrapper function for command vkCmdSetDepthBiasEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBiasEnable.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); + } + + // wrapper function for command vkCmdSetPrimitiveRestartEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveRestartEnable.html + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); + } + + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceBufferMemoryRequirements( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetDeviceBufferMemoryRequirements( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageMemoryRequirements( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceImageMemoryRequirements( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetDeviceImageMemoryRequirements( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSparseMemoryRequirements( static_cast( m_device ), + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirements( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirements( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirements( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirements( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_4 === + + // wrapper function for command vkCmdSetLineStipple, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStipple.html + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStipple( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + // wrapper function for command vkMapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMapMemory2( static_cast( m_device ), reinterpret_cast( pMemoryMapInfo ), ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMapMemory2 && "Function requires or " ); +# endif + + void * pData; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkMapMemory2( m_device, reinterpret_cast( &memoryMapInfo ), &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnmapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkUnmapMemory2( static_cast( m_device ), reinterpret_cast( pMemoryUnmapInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUnmapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2.html + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUnmapMemory2 && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkUnmapMemory2( m_device, reinterpret_cast( &memoryUnmapInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindIndexBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer2.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer2( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + // wrapper function for command vkGetRenderingAreaGranularity, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularity.html + template + VULKAN_HPP_INLINE void Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderingAreaGranularity( + static_cast( m_device ), reinterpret_cast( pRenderingAreaInfo ), reinterpret_cast( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRenderingAreaGranularity, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularity.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularity && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderingAreaGranularity( + m_device, reinterpret_cast( &renderingAreaInfo ), reinterpret_cast( &granularity ) ); + + return granularity; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSubresourceLayout( + static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetDeviceImageSubresourceLayout( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetDeviceImageSubresourceLayout( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2( static_cast( m_device ), + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2 && + "Function requires or or or " ); +# endif + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetImageSubresourceLayout2( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetImageSubresourceLayout2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2 && + "Function requires or or or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSet( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet.html + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet && "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSet( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplate( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkCmdPushDescriptorSetWithTemplate && + "Function requires or or " ); +# endif + + d.vkCmdPushDescriptorSetWithTemplate( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetRenderingAttachmentLocations, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocations.html + template + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingAttachmentLocations( static_cast( m_commandBuffer ), + reinterpret_cast( pLocationInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingAttachmentLocations, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocations.html + template + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocations && + "Function requires or " ); +# endif + + d.vkCmdSetRenderingAttachmentLocations( m_commandBuffer, reinterpret_cast( &locationInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetRenderingInputAttachmentIndices, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndices.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingInputAttachmentIndices( static_cast( m_commandBuffer ), + reinterpret_cast( pInputAttachmentIndexInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingInputAttachmentIndices, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndices.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndices && + "Function requires or " ); +# endif + + d.vkCmdSetRenderingInputAttachmentIndices( m_commandBuffer, reinterpret_cast( &inputAttachmentIndexInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorSets2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets2( static_cast( m_commandBuffer ), + reinterpret_cast( pBindDescriptorSetsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorSets2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2 && "Function requires or " ); +# endif + + d.vkCmdBindDescriptorSets2( m_commandBuffer, reinterpret_cast( &bindDescriptorSetsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushConstants2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants2( static_cast( m_commandBuffer ), reinterpret_cast( pPushConstantsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushConstants2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushConstants2 && "Function requires or " ); +# endif + + d.vkCmdPushConstants2( m_commandBuffer, reinterpret_cast( &pushConstantsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSet2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSet2( static_cast( m_commandBuffer ), reinterpret_cast( pPushDescriptorSetInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSet2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2 && "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSet2( m_commandBuffer, reinterpret_cast( &pushDescriptorSetInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2.html + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplate2( static_cast( m_commandBuffer ), + reinterpret_cast( pPushDescriptorSetWithTemplateInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2.html + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2 && + "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSetWithTemplate2( m_commandBuffer, + reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMemoryToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImage.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyMemoryToImage( static_cast( m_device ), reinterpret_cast( pCopyMemoryToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImage.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToImage && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyMemoryToImage( m_device, reinterpret_cast( ©MemoryToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImage" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemory.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyImageToMemory( static_cast( m_device ), reinterpret_cast( pCopyImageToMemoryInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemory.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToMemory && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyImageToMemory( m_device, reinterpret_cast( ©ImageToMemoryInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemory" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImage.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyImageToImage( static_cast( m_device ), reinterpret_cast( pCopyImageToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImage.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToImage && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkCopyImageToImage( m_device, reinterpret_cast( ©ImageToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImage" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkTransitionImageLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayout( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkTransitionImageLayout( + static_cast( m_device ), transitionCount, reinterpret_cast( pTransitions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkTransitionImageLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayout.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkTransitionImageLayout && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkTransitionImageLayout( m_device, transitions.size(), reinterpret_cast( transitions.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayout" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_surface === + + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( + static_cast( m_instance ), static_cast( surface ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function requires " ); +# endif + + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( + static_cast( m_instance ), static_cast( surface ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function requires " ); +# endif + + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceSupportKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceSupportKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceSupportKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Bool32 supported; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( + m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( supported ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function requires " ); +# endif + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function requires " ); +# endif + + std::vector surfaceFormats( surfaceFormatKHRAllocator ); + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function requires " ); +# endif + + std::vector presentModes; + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function requires " ); +# endif + + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSwapchainKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchain ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function requires " ); +# endif + + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function requires " ); +# endif + + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSwapchainImagesKHR( + static_cast( m_device ), static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function requires " ); +# endif + + std::vector swapchainImages; + uint32_t swapchainImageCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); + } + + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function requires " ); +# endif + + std::vector swapchainImages( imageAllocator ); + uint32_t swapchainImageCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkAcquireNextImageKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImageKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireNextImageKHR( static_cast( m_device ), + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireNextImageKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImageKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireNextImageKHR && "Function requires " ); +# endif + + uint32_t imageIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireNextImageKHR( + m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return ResultValue( result, std::move( imageIndex ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueuePresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueuePresentKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueuePresentKHR( static_cast( m_queue ), reinterpret_cast( pPresentInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueuePresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueuePresentKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueuePresentKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceGroupPresentCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + static_cast( m_device ), reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupPresentCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPresentCapabilitiesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deviceGroupPresentCapabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceGroupSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( + static_cast( m_device ), static_cast( surface ), reinterpret_cast( pModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( + m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + static_cast( m_physicalDevice ), static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && + "Function requires or " ); +# endif + + std::vector rects; + uint32_t rectCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); + } + + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && + "Function requires or " ); +# endif + + std::vector rects( rect2DAllocator ); + uint32_t rectCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkAcquireNextImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImage2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkAcquireNextImage2KHR( static_cast( m_device ), reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireNextImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImage2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireNextImage2KHR && "Function requires or " ); +# endif + + uint32_t imageIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return ResultValue( result, std::move( imageIndex ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display === + + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + template < + typename DisplayPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + template < + typename DisplayPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function requires " ); +# endif + + std::vector properties( displayPropertiesKHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + template < + typename DisplayPlanePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + template < + typename DisplayPlanePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function requires " ); +# endif + + std::vector properties( displayPlanePropertiesKHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + static_cast( m_physicalDevice ), planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function requires " ); +# endif + + std::vector displays; + uint32_t displayCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + if ( displayCount < displays.size() ) + { + displays.resize( displayCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); + } + + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function requires " ); +# endif + + std::vector displays( displayKHRAllocator ); + uint32_t displayCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + if ( displayCount < displays.size() ) + { + displays.resize( displayCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayModePropertiesKHR( static_cast( m_physicalDevice ), + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function requires " ); +# endif + + std::vector properties( displayModePropertiesKHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDisplayModeKHR( static_cast( m_physicalDevice ), + static_cast( display ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMode ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( mode ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( mode, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayPlaneCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( mode ), + planeIndex, + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayPlaneCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilitiesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( + m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display_swapchain === + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSharedSwapchainsKHR( static_cast( m_device ), + swapchainCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif + + std::vector swapchains( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); + } + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif + + std::vector swapchains( createInfos.size(), swapchainKHRAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); + } + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template >::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif + + std::vector swapchains( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + std::vector, SwapchainKHRAllocator> uniqueSwapchains; + uniqueSwapchains.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & swapchain : swapchains ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); + } + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template >::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif + + std::vector swapchains( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); + uniqueSwapchains.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & swapchain : swapchains ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); + } + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateXlibSurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceXlibPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXlibPresentationSupportKHR.html + template + VULKAN_HPP_INLINE Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, dpy, visualID ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceXlibPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXlibPresentationSupportKHR.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXlibPresentationSupportKHR && + "Function requires " ); +# endif + + VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); + + return static_cast( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateXcbSurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceXcbPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXcbPresentationSupportKHR.html + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceXcbPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, connection, visual_id ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceXcbPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXcbPresentationSupportKHR.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXcbPresentationSupportKHR && + "Function requires " ); +# endif + + VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); + + return static_cast( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateWaylandSurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateWaylandSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateWaylandSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceWaylandPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWaylandPresentationSupportKHR.html + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display * display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, display ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceWaylandPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWaylandPresentationSupportKHR.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR && + "Function requires " ); +# endif + + VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); + + return static_cast( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateAndroidSurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAndroidSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAndroidSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateWin32SurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceWin32PresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWin32PresentationSupportKHR.html + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex ) ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDebugReportCallbackEXT( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCallback ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDebugReportCallbackEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( callback ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDebugReportCallbackEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( callback, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugReportCallbackEXT( + static_cast( m_instance ), static_cast( callback ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function requires " ); +# endif + + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugReportCallbackEXT( + static_cast( m_instance ), static_cast( callback ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function requires " ); +# endif + + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDebugReportMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugReportMessageEXT.html + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDebugReportMessageEXT( static_cast( m_instance ), + static_cast( flags ), + static_cast( objectType_ ), + object, + location, + messageCode, + pLayerPrefix, + pMessage ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDebugReportMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugReportMessageEXT.html + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDebugReportMessageEXT && "Function requires " ); +# endif + + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType_ ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_marker === + + // wrapper function for command vkDebugMarkerSetObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectTagEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkDebugMarkerSetObjectTagEXT( static_cast( m_device ), reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDebugMarkerSetObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectTagEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectTagEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDebugMarkerSetObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectNameEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkDebugMarkerSetObjectNameEXT( static_cast( m_device ), reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDebugMarkerSetObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectNameEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectNameEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDebugMarkerBeginEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerBeginEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerBeginEXT( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDebugMarkerBeginEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerBeginEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerBeginEXT && "Function requires " ); +# endif + + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDebugMarkerEndEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerEndEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerEndEXT( static_cast( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdDebugMarkerInsertEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerInsertEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerInsertEXT( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDebugMarkerInsertEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerInsertEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerInsertEXT && "Function requires " ); +# endif + + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_queue === + + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pVideoProfile ), + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( + m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); + } + + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function requires " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( + m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pVideoFormatInfo ), + pVideoFormatPropertyCount, + reinterpret_cast( pVideoFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif + + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif + + std::vector videoFormatProperties( videoFormatPropertiesKHRAllocator ); + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif + + std::vector structureChains; + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get() = videoFormatProperties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get() = videoFormatProperties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateVideoSessionKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSession ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSession ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( videoSession, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionKHR( + static_cast( m_device ), static_cast( videoSession ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function requires " ); +# endif + + d.vkDestroyVideoSessionKHR( + m_device, + static_cast( videoSession ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionKHR( + static_cast( m_device ), static_cast( videoSession ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function requires " ); +# endif + + d.vkDestroyVideoSessionKHR( + m_device, + static_cast( videoSession ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( static_cast( m_device ), + static_cast( videoSession ), + pMemoryRequirementsCount, + reinterpret_cast( pMemoryRequirements ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function requires " ); +# endif + + std::vector memoryRequirements; + uint32_t memoryRequirementsCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast( videoSession ), &memoryRequirementsCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = static_cast( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + static_cast( videoSession ), + &memoryRequirementsCount, + reinterpret_cast( memoryRequirements.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return memoryRequirements; + } + + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function requires " ); +# endif + + std::vector memoryRequirements( + videoSessionMemoryRequirementsKHRAllocator ); + uint32_t memoryRequirementsCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast( videoSession ), &memoryRequirementsCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = static_cast( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + static_cast( videoSession ), + &memoryRequirementsCount, + reinterpret_cast( memoryRequirements.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return memoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindVideoSessionMemoryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindVideoSessionMemoryKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindVideoSessionMemoryKHR( static_cast( m_device ), + static_cast( videoSession ), + bindSessionMemoryInfoCount, + reinterpret_cast( pBindSessionMemoryInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindVideoSessionMemoryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindVideoSessionMemoryKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindVideoSessionMemoryKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindVideoSessionMemoryKHR( m_device, + static_cast( videoSession ), + bindSessionMemoryInfos.size(), + reinterpret_cast( bindSessionMemoryInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateVideoSessionParametersKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSessionParameters ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSessionParameters ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + videoSessionParameters, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkUpdateVideoSessionParametersKHR( static_cast( m_device ), + static_cast( videoSessionParameters ), + reinterpret_cast( pUpdateInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateVideoSessionParametersKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkUpdateVideoSessionParametersKHR( m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( &updateInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionParametersKHR( static_cast( m_device ), + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function requires " ); +# endif + + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionParametersKHR( static_cast( m_device ), + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function requires " ); +# endif + + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginVideoCodingKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginVideoCodingKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginVideoCodingKHR && "Function requires " ); +# endif + + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( &beginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndVideoCodingKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pEndCodingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndVideoCodingKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndVideoCodingKHR && "Function requires " ); +# endif + + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( &endCodingInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdControlVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdControlVideoCodingKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdControlVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pCodingControlInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdControlVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdControlVideoCodingKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdControlVideoCodingKHR && "Function requires " ); +# endif + + d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast( &codingControlInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_decode_queue === + + // wrapper function for command vkCmdDecodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecodeVideoKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( pDecodeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDecodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecodeVideoKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDecodeVideoKHR && "Function requires " ); +# endif + + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( &decodeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_transform_feedback === + + // wrapper function for command vkCmdBindTransformFeedbackBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTransformFeedbackBuffersEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindTransformFeedbackBuffersEXT( static_cast( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindTransformFeedbackBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTransformFeedbackBuffersEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindTransformFeedbackBuffersEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginTransformFeedbackEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginTransformFeedbackEXT( static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginTransformFeedbackEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginTransformFeedbackEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndTransformFeedbackEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndTransformFeedbackEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndTransformFeedbackEXT( static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndTransformFeedbackEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndTransformFeedbackEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndTransformFeedbackEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginQueryIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginQueryIndexedEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginQueryIndexedEXT( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ), index ); + } + + // wrapper function for command vkCmdEndQueryIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndQueryIndexedEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndQueryIndexedEXT( static_cast( m_commandBuffer ), static_cast( queryPool ), query, index ); + } + + // wrapper function for command vkCmdDrawIndirectByteCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectByteCountEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectByteCountEXT( static_cast( m_commandBuffer ), + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); + } + + //=== VK_NVX_binary_import === + + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCuModuleNVX( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCuFunctionNVX( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFunction ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function requires " ); +# endif + + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function requires " ); +# endif + + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function requires " ); +# endif + + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function requires " ); +# endif + + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCuLaunchKernelNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCuLaunchKernelNVX.html + template + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCuLaunchKernelNVX( static_cast( m_commandBuffer ), reinterpret_cast( pLaunchInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCuLaunchKernelNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCuLaunchKernelNVX.html + template + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCuLaunchKernelNVX && "Function requires " ); +# endif + + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( &launchInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NVX_image_view_handle === + + // wrapper function for command vkGetImageViewHandleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandleNVX.html + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetImageViewHandleNVX( static_cast( m_device ), reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewHandleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandleNVX.html + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewHandleNVX && "Function requires " ); +# endif + + uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageViewHandle64NVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandle64NVX.html + template + VULKAN_HPP_INLINE uint64_t Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetImageViewHandle64NVX( static_cast( m_device ), reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewHandle64NVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandle64NVX.html + template + VULKAN_HPP_INLINE uint64_t Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewHandle64NVX && "Function requires " ); +# endif + + uint64_t result = d.vkGetImageViewHandle64NVX( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageViewAddressNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewAddressNVX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetImageViewAddressNVX( + static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewAddressNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewAddressNVX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewAddressNVX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_draw_indirect_count === + + // wrapper function for command vkCmdDrawIndirectCountAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountAMD.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirectCountAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCountAMD.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_AMD_shader_info === + + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetShaderInfoAMD( static_cast( m_device ), + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + pInfoSize, + pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function requires " ); +# endif + + std::vector info; + size_t infoSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); + } + + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function requires " ); +# endif + + std::vector info( uint8_tAllocator ); + size_t infoSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_dynamic_rendering === + + // wrapper function for command vkCmdBeginRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderingKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pRenderingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderingKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderingKHR && "Function requires or " ); +# endif + + d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast( &renderingInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderingKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderingKHR( static_cast( m_commandBuffer ) ); + } + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalImageFormatPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalImageFormatPropertiesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( pExternalImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalImageFormatPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalImageFormatPropertiesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( externalImageFormatProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + // wrapper function for command vkGetMemoryWin32HandleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryWin32HandleNV( + static_cast( m_device ), static_cast( memory ), static_cast( handleType ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryWin32HandleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleNV( + VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleNV && "Function requires " ); +# endif + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + + return features; + } + + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties2KHR( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function requires or " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); +# endif + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); +# endif + + std::vector queueFamilyProperties( queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); +# endif + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && + "Function requires or " ); +# endif + + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && + "Function requires or " ); +# endif + + std::vector properties( sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_device_group === + + // wrapper function for command vkGetDeviceGroupPeerMemoryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeaturesKHR.html + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( + static_cast( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupPeerMemoryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeaturesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( + uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeaturesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( + m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDeviceMaskKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDeviceMaskKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDeviceMaskKHR( static_cast( m_commandBuffer ), deviceMask ); + } + + // wrapper function for command vkCmdDispatchBaseKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchBaseKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchBaseKHR( static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateViSurfaceNN( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + // wrapper function for command vkTrimCommandPoolKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTrimCommandPoolKHR.html + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkTrimCommandPoolKHR( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ); + } + + //=== VK_KHR_device_group_creation === + + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && + "Function requires or " ); +# endif + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + } + + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && + "Function requires or " ); +# endif + + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_memory_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalBufferPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferPropertiesKHR.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalBufferPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferPropertiesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + // wrapper function for command vkGetMemoryWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleKHR && "Function requires " ); +# endif + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryWin32HandlePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandlePropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + handle, + reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryWin32HandlePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandlePropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandlePropertiesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryWin32HandlePropertiesKHR( m_device, + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryWin32HandleProperties ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + // wrapper function for command vkGetMemoryFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryFdKHR && "Function requires " ); +# endif + + int fd; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryFdPropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + fd, + reinterpret_cast( pMemoryFdProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryFdPropertiesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( + m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryFdProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_semaphore_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalSemaphorePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphorePropertiesKHR.html + template + VULKAN_HPP_INLINE void + PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalSemaphorePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphorePropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + // wrapper function for command vkImportSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportSemaphoreWin32HandleKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSemaphoreWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreWin32HandleKHR && "Function requires " ); +# endif + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + // wrapper function for command vkImportSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreFdKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkImportSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreFdKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportSemaphoreFdKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreFdKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreFdKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreFdKHR && "Function requires " ); +# endif + + int fd; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_push_descriptor === + + // wrapper function for command vkCmdPushDescriptorSetKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetKHR( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetKHR.html + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetKHR && "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplateKHR( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkCmdPushDescriptorSetWithTemplateKHR && + "Function requires or or " ); +# endif + + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_conditional_rendering === + + // wrapper function for command vkCmdBeginConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginConditionalRenderingEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginConditionalRenderingEXT( static_cast( m_commandBuffer ), + reinterpret_cast( pConditionalRenderingBegin ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginConditionalRenderingEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginConditionalRenderingEXT && "Function requires " ); +# endif + + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndConditionalRenderingEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndConditionalRenderingEXT( static_cast( m_commandBuffer ) ); + } + + //=== VK_KHR_descriptor_update_template === + + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplateKHR.html + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplateKHR( static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplateKHR.html + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplateKHR && + "Function requires or " ); +# endif + + d.vkDestroyDescriptorUpdateTemplateKHR( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplateKHR.html + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplateKHR( static_cast( m_device ), + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplateKHR.html + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplateKHR && + "Function requires or " ); +# endif + + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_clip_space_w_scaling === + + // wrapper function for command vkCmdSetViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWScalingNV( + static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingNV.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportWScalingNV && "Function requires " ); +# endif + + d.vkCmdSetViewportWScalingNV( + m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast( viewportWScalings.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_direct_mode_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseDisplayEXT.html + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleaseDisplayEXT( static_cast( m_physicalDevice ), static_cast( display ) ) ); + } +#else + // wrapper function for command vkReleaseDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseDisplayEXT.html + template + VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseDisplayEXT && "Function requires " ); +# endif + + d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + // wrapper function for command vkAcquireXlibDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireXlibDisplayEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireXlibDisplayEXT( static_cast( m_physicalDevice ), dpy, static_cast( display ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireXlibDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireXlibDisplayEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireXlibDisplayEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetRandROutputDisplayEXT( static_cast( m_physicalDevice ), dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_display_control === + + // wrapper function for command vkDisplayPowerControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDisplayPowerControlEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkDisplayPowerControlEXT( + static_cast( m_device ), static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDisplayPowerControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDisplayPowerControlEXT.html + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDisplayPowerControlEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkRegisterDeviceEventEXT( static_cast( m_device ), + reinterpret_cast( pDeviceEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkRegisterDeviceEventEXT( + m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkRegisterDeviceEventEXT( + m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkRegisterDisplayEventEXT( static_cast( m_device ), + static_cast( display ), + reinterpret_cast( pDisplayEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkRegisterDisplayEventEXT( + m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkRegisterDisplayEventEXT( + m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSwapchainCounterEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainCounterEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSwapchainCounterEXT( + static_cast( m_device ), static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSwapchainCounterEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainCounterEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getSwapchainCounterEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainCounterEXT && "Function requires " ); +# endif + + uint64_t counterValue; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( counterValue ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_GOOGLE_display_timing === + + // wrapper function for command vkGetRefreshCycleDurationGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRefreshCycleDurationGOOGLE.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( static_cast( m_device ), + static_cast( swapchain ), + reinterpret_cast( pDisplayTimingProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRefreshCycleDurationGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRefreshCycleDurationGOOGLE.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRefreshCycleDurationGOOGLE && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displayTimingProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPastPresentationTimingGOOGLE( static_cast( m_device ), + static_cast( swapchain ), + pPresentationTimingCount, + reinterpret_cast( pPresentationTimings ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + template < + typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function requires " ); +# endif + + std::vector presentationTimings; + uint32_t presentationTimingCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( + d.vkGetPastPresentationTimingGOOGLE( m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); + } + + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + template < + typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function requires " ); +# endif + + std::vector presentationTimings( + pastPresentationTimingGOOGLEAllocator ); + uint32_t presentationTimingCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( + d.vkGetPastPresentationTimingGOOGLE( m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_discard_rectangles === + + // wrapper function for command vkCmdSetDiscardRectangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleEXT( + static_cast( m_commandBuffer ), firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDiscardRectangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDiscardRectangleEXT && "Function requires " ); +# endif + + d.vkCmdSetDiscardRectangleEXT( + m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast( discardRectangles.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDiscardRectangleEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleEnableEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleEnable ) ); + } + + // wrapper function for command vkCmdSetDiscardRectangleModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleModeEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleModeEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleMode ) ); + } + + //=== VK_EXT_hdr_metadata === + + // wrapper function for command vkSetHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetHdrMetadataEXT.html + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetHdrMetadataEXT( static_cast( m_device ), + swapchainCount, + reinterpret_cast( pSwapchains ), + reinterpret_cast( pMetadata ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetHdrMetadataEXT.html + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetHdrMetadataEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +# else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkSetHdrMetadataEXT( m_device, + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_create_renderpass2 === + + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRenderPass2KHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2KHR && "Function requires or " ); +# endif + + d.vkCmdBeginRenderPass2KHR( + m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdNextSubpass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdNextSubpass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2KHR && "Function requires or " ); +# endif + + d.vkCmdNextSubpass2KHR( + m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2KHR && "Function requires or " ); +# endif + + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_shared_presentable_image === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSwapchainStatusKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainStatusKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSwapchainStatusKHR( static_cast( m_device ), static_cast( swapchain ) ) ); + } +#else + // wrapper function for command vkGetSwapchainStatusKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainStatusKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainStatusKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_fence_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalFencePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFencePropertiesKHR.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalFencePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFencePropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFencePropertiesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + // wrapper function for command vkImportFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportFenceWin32HandleKHR( static_cast( m_device ), + reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportFenceWin32HandleKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetFenceWin32HandleKHR( static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFenceWin32HandleKHR && "Function requires " ); +# endif + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + // wrapper function for command vkImportFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceFdKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkImportFenceFdKHR( static_cast( m_device ), reinterpret_cast( pImportFenceFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceFdKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportFenceFdKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceFdKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetFenceFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceFdKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFenceFdKHR && "Function requires " ); +# endif + + int fd; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_performance_query === + + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast( m_physicalDevice ), + queueFamilyIndex, + pCounterCount, + reinterpret_cast( pCounters ), + reinterpret_cast( pCounterDescriptions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + template ::value && + std::is_same::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && + "Function requires " ); +# endif + + std::pair, + std::vector> + data_; + std::vector & counters = data_.first; + std::vector & counterDescriptions = data_.second; + uint32_t counterCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + template ::value && + std::is_same::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && + "Function requires " ); +# endif + + std::pair, + std::vector> + data_( + std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); + std::vector & counters = data_.first; + std::vector & counterDescriptions = data_.second; + uint32_t counterCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR.html + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pPerformanceQueryCreateInfo ), + pNumPasses ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && + "Function requires " ); +# endif + + uint32_t numPasses; + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, reinterpret_cast( &performanceQueryCreateInfo ), &numPasses ); + + return numPasses; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkAcquireProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireProfilingLockKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkAcquireProfilingLockKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireProfilingLockKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireProfilingLockKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkReleaseProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseProfilingLockKHR.html + template + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkReleaseProfilingLockKHR( static_cast( m_device ) ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function requires " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pSurfaceInfo ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); +# endif + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); +# endif + + std::vector surfaceFormats( surfaceFormat2KHRAllocator ); + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); +# endif + + std::vector structureChains; + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + structureChains.resize( surfaceFormatCount ); + } + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + structureChains[i].template get() = surfaceFormats[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + structureChains.resize( surfaceFormatCount ); + } + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + structureChains[i].template get() = surfaceFormats[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_display_properties2 === + + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + template < + typename DisplayProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + template < + typename DisplayProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && + "Function requires " ); +# endif + + std::vector properties( displayProperties2KHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + template < + typename DisplayPlaneProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + template < + typename DisplayPlaneProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && + "Function requires " ); +# endif + + std::vector properties( displayPlaneProperties2KHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template < + typename DisplayModeProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template < + typename DisplayModeProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector properties( displayModeProperties2KHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector structureChains; + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayPlaneCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pDisplayPlaneInfo ), + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayPlaneCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilities2KHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateIOSSurfaceMVK( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateMacOSSurfaceMVK( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkSetDebugUtilsObjectNameEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectNameEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSetDebugUtilsObjectNameEXT( static_cast( m_device ), reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetDebugUtilsObjectNameEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectNameEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectNameEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetDebugUtilsObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectTagEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSetDebugUtilsObjectTagEXT( static_cast( m_device ), reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetDebugUtilsObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectTagEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectTagEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBeginDebugUtilsLabelEXT.html + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueBeginDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBeginDebugUtilsLabelEXT.html + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueBeginDebugUtilsLabelEXT && "Function requires " ); +# endif + + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueEndDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueEndDebugUtilsLabelEXT.html + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueEndDebugUtilsLabelEXT( static_cast( m_queue ) ); + } + + // wrapper function for command vkQueueInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueInsertDebugUtilsLabelEXT.html + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueInsertDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueInsertDebugUtilsLabelEXT.html + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueInsertDebugUtilsLabelEXT && "Function requires " ); +# endif + + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginDebugUtilsLabelEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginDebugUtilsLabelEXT( static_cast( m_commandBuffer ), reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginDebugUtilsLabelEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginDebugUtilsLabelEXT && "Function requires " ); +# endif + + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndDebugUtilsLabelEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndDebugUtilsLabelEXT( static_cast( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInsertDebugUtilsLabelEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInsertDebugUtilsLabelEXT( static_cast( m_commandBuffer ), reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInsertDebugUtilsLabelEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdInsertDebugUtilsLabelEXT && "Function requires " ); +# endif + + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDebugUtilsMessengerEXT( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMessenger ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( messenger ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( messenger, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function requires " ); +# endif + + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function requires " ); +# endif + + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSubmitDebugUtilsMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSubmitDebugUtilsMessageEXT.html + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSubmitDebugUtilsMessageEXT( static_cast( m_instance ), + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( pCallbackData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSubmitDebugUtilsMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSubmitDebugUtilsMessageEXT.html + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSubmitDebugUtilsMessageEXT && "Function requires " ); +# endif + + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), buffer, reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && + "Function requires " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryAndroidHardwareBufferANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryAndroidHardwareBufferANDROID.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + static_cast( m_device ), reinterpret_cast( pInfo ), pBuffer ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryAndroidHardwareBufferANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryAndroidHardwareBufferANDROID.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryAndroidHardwareBufferANDROID && + "Function requires " ); +# endif + + struct AHardwareBuffer * buffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateExecutionGraphPipelinesAMDX( static_cast( m_device ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetExecutionGraphPipelineScratchSizeAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineScratchSizeAMDX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( static_cast( m_device ), + static_cast( executionGraph ), + reinterpret_cast( pSizeInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetExecutionGraphPipelineScratchSizeAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineScratchSizeAMDX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineScratchSizeAMDX && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( &sizeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sizeInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetExecutionGraphPipelineNodeIndexAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineNodeIndexAMDX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast( m_device ), + static_cast( executionGraph ), + reinterpret_cast( pNodeInfo ), + pNodeIndex ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetExecutionGraphPipelineNodeIndexAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineNodeIndexAMDX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getExecutionGraphPipelineNodeIndexAMDX( + VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineNodeIndexAMDX && "Function requires " ); +# endif + + uint32_t nodeIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( &nodeInfo ), &nodeIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( nodeIndex ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdInitializeGraphScratchMemoryAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInitializeGraphScratchMemoryAMDX.html + template + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInitializeGraphScratchMemoryAMDX( static_cast( m_commandBuffer ), + static_cast( executionGraph ), + static_cast( scratch ), + static_cast( scratchSize ) ); + } + + // wrapper function for command vkCmdDispatchGraphAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphAMDX.html + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( pCountInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDispatchGraphAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphAMDX.html + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphAMDX && "Function requires " ); +# endif + + d.vkCmdDispatchGraphAMDX( m_commandBuffer, + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( &countInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDispatchGraphIndirectAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectAMDX.html + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphIndirectAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( pCountInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDispatchGraphIndirectAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectAMDX.html + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphIndirectAMDX && "Function requires " ); +# endif + + d.vkCmdDispatchGraphIndirectAMDX( m_commandBuffer, + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( &countInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDispatchGraphIndirectCountAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectCountAMDX.html + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphIndirectCountAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + static_cast( countInfo ) ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleLocationsEXT( static_cast( m_commandBuffer ), reinterpret_cast( pSampleLocationsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetSampleLocationsEXT && "Function requires " ); +# endif + + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMultisamplePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMultisamplePropertiesEXT.html + template + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast( m_physicalDevice ), + static_cast( samples ), + reinterpret_cast( pMultisampleProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMultisamplePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMultisamplePropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMultisamplePropertiesEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, static_cast( samples ), reinterpret_cast( &multisampleProperties ) ); + + return multisampleProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_memory_requirements2 === + + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_acceleration_structure === + + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateAccelerationStructureKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function requires " ); +# endif + + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function requires " ); +# endif + + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresKHR.html + template + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructuresKHR( static_cast( m_commandBuffer ), + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresKHR && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildAccelerationStructuresIndirectKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresIndirectKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructuresIndirectKHR( static_cast( m_commandBuffer ), + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( pIndirectDeviceAddresses ), + pIndirectStrides, + ppMaxPrimitiveCounts ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildAccelerationStructuresIndirectKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresIndirectKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresIndirectKHR && + "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); + VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); + VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); +# else + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildAccelerationStructuresKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBuildAccelerationStructuresKHR( static_cast( m_device ), + static_cast( deferredOperation ), + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildAccelerationStructuresKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBuildAccelerationStructuresKHR && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBuildAccelerationStructuresKHR( m_device, + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyAccelerationStructureKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureToMemoryKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureToMemoryKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureToMemoryKHR && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToAccelerationStructureKHR && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + dataSize, + pData, + stride ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureKHR && "Function requires " ); +# endif + + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureToMemoryKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureToMemoryKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureToMemoryKHR && + "Function requires " ); +# endif + + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToAccelerationStructureKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToAccelerationStructureKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToAccelerationStructureKHR && + "Function requires " ); +# endif + + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureDeviceAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureDeviceAddressKHR.html + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( + static_cast( m_device ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureDeviceAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureDeviceAddressKHR.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureDeviceAddressKHR && + "Function requires " ); +# endif + + VkDeviceAddress result = + d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesKHR.html + template + VULKAN_HPP_INLINE void + CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast( m_commandBuffer ), + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); +# endif + + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceAccelerationStructureCompatibilityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceAccelerationStructureCompatibilityKHR.html + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast( m_device ), + reinterpret_cast( pVersionInfo ), + reinterpret_cast( pCompatibility ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceAccelerationStructureCompatibilityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceAccelerationStructureCompatibilityKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceAccelerationStructureCompatibilityKHR && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + + return compatibility; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureBuildSizesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureBuildSizesKHR.html + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetAccelerationStructureBuildSizesKHR( static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( pBuildInfo ), + pMaxPrimitiveCounts, + reinterpret_cast( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureBuildSizesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureBuildSizesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureBuildSizesKHR && + "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); +# else + if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetAccelerationStructureBuildSizesKHR( m_device, + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_pipeline === + + // wrapper function for command vkCmdTraceRaysKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + width, + height, + depth ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdTraceRaysKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdTraceRaysKHR && "Function requires " ); +# endif + + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRayTracingPipelinesKHR( static_cast( m_device ), + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && + "Function requires or " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function requires " ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdTraceRaysIndirectKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirectKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysIndirectKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdTraceRaysIndirectKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirectKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdTraceRaysIndirectKHR && "Function requires " ); +# endif + + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingShaderGroupStackSizeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupStackSizeKHR.html + template + VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingShaderGroupStackSizeKHR( + static_cast( m_device ), static_cast( pipeline ), group, static_cast( groupShader ) ) ); + } + + // wrapper function for command vkCmdSetRayTracingPipelineStackSizeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRayTracingPipelineStackSizeKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRayTracingPipelineStackSizeKHR( static_cast( m_commandBuffer ), pipelineStackSize ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSamplerYcbcrConversionKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversionKHR.html + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversionKHR( static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversionKHR.html + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversionKHR && + "Function requires or " ); +# endif + + d.vkDestroySamplerYcbcrConversionKHR( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_bind_memory2 === + + // wrapper function for command vkBindBufferMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindBufferMemory2KHR( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindBufferMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2KHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindBufferMemory2KHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindImageMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindImageMemory2KHR( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindImageMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2KHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindImageMemory2KHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_image_drm_format_modifier === + + // wrapper function for command vkGetImageDrmFormatModifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + static_cast( m_device ), static_cast( image ), reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageDrmFormatModifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageDrmFormatModifierPropertiesEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, static_cast( image ), reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_validation_cache === + + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateValidationCacheEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pValidationCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateValidationCacheEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( validationCache ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateValidationCacheEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( validationCache, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( + static_cast( m_device ), static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function requires " ); +# endif + + d.vkDestroyValidationCacheEXT( + m_device, + static_cast( validationCache ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( + static_cast( m_device ), static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function requires " ); +# endif + + d.vkDestroyValidationCacheEXT( + m_device, + static_cast( validationCache ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkMergeValidationCachesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergeValidationCachesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMergeValidationCachesEXT( static_cast( m_device ), + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMergeValidationCachesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergeValidationCachesEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMergeValidationCachesEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMergeValidationCachesEXT( + m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetValidationCacheDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetValidationCacheDataEXT( static_cast( m_device ), static_cast( validationCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetValidationCacheDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function requires " ); +# endif + + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetValidationCacheDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function requires " ); +# endif + + std::vector data( uint8_tAllocator ); + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_shading_rate_image === + + // wrapper function for command vkCmdBindShadingRateImageNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadingRateImageNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindShadingRateImageNV( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); + } + + // wrapper function for command vkCmdSetViewportShadingRatePaletteNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportShadingRatePaletteNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportShadingRatePaletteNV( + static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportShadingRatePaletteNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportShadingRatePaletteNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportShadingRatePaletteNV && "Function requires " ); +# endif + + d.vkCmdSetViewportShadingRatePaletteNV( + m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast( shadingRatePalettes.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetCoarseSampleOrderNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoarseSampleOrderNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoarseSampleOrderNV( static_cast( m_commandBuffer ), + static_cast( sampleOrderType ), + customSampleOrderCount, + reinterpret_cast( pCustomSampleOrders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetCoarseSampleOrderNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoarseSampleOrderNV.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetCoarseSampleOrderNV && "Function requires " ); +# endif + + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_ray_tracing === + + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateAccelerationStructureNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function requires " ); +# endif + + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function requires " ); +# endif + + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template + VULKAN_HPP_INLINE void + Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindAccelerationStructureMemoryNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindAccelerationStructureMemoryNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindAccelerationStructureMemoryNV( + static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindAccelerationStructureMemoryNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindAccelerationStructureMemoryNV.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindAccelerationStructureMemoryNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructureNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructureNV( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructureNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructureNV && "Function requires " ); +# endif + + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureNV( static_cast( m_commandBuffer ), + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); + } + + // wrapper function for command vkCmdTraceRaysNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysNV( static_cast( m_commandBuffer ), + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRayTracingPipelinesNV( static_cast( m_device ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && + "Function requires or " ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && + "Function requires or " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAccelerationStructureHandleNV( + static_cast( m_device ), static_cast( accelerationStructure ), dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function requires " ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureHandleNV( + m_device, static_cast( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureHandleNV( + m_device, static_cast( accelerationStructure ), sizeof( DataType ), reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesNV( static_cast( m_commandBuffer ), + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesNV && + "Function requires " ); +# endif + + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCompileDeferredNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCompileDeferredNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCompileDeferredNV( static_cast( m_device ), static_cast( pipeline ), shader ) ); + } +#else + // wrapper function for command vkCompileDeferredNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCompileDeferredNV.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCompileDeferredNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance3 === + + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupportKHR( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + + return support; + } + + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupportKHR( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_draw_indirect_count === + + // wrapper function for command vkCmdDrawIndirectCountKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirectCountKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCountKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_EXT_external_memory_host === + + // wrapper function for command vkGetMemoryHostPointerPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryHostPointerPropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( static_cast( m_device ), + static_cast( handleType ), + pHostPointer, + reinterpret_cast( pMemoryHostPointerProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryHostPointerPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryHostPointerPropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryHostPointerPropertiesEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryHostPointerPropertiesEXT( m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryHostPointerProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_buffer_marker === + + // wrapper function for command vkCmdWriteBufferMarkerAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteBufferMarkerAMD.html + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarkerAMD( static_cast( m_commandBuffer ), + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + // wrapper function for command vkCmdWriteBufferMarker2AMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteBufferMarker2AMD.html + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + static_cast( m_physicalDevice ), pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && + "Function requires or " ); +# endif + + std::vector timeDomains; + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && + "Function requires or " ); +# endif + + std::vector timeDomains( timeDomainKHRAllocator ); + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetCalibratedTimestampsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetCalibratedTimestampsEXT( static_cast( m_device ), + timestampCount, + reinterpret_cast( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetCalibratedTimestampsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && + "Function requires or " ); +# endif + + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsEXT( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetCalibratedTimestampsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && + "Function requires or " ); +# endif + + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsEXT( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetCalibratedTimestampsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && + "Function requires or " ); +# endif + + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_mesh_shader === + + // wrapper function for command vkCmdDrawMeshTasksNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksNV( static_cast( m_commandBuffer ), taskCount, firstTask ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectNV( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectCountNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectCountNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_NV_scissor_exclusive === + + // wrapper function for command vkCmdSetExclusiveScissorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorEnableNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExclusiveScissorEnableNV( static_cast( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissorCount, + reinterpret_cast( pExclusiveScissorEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetExclusiveScissorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorEnableNV.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorEnableNV && "Function requires " ); +# endif + + d.vkCmdSetExclusiveScissorEnableNV( + m_commandBuffer, firstExclusiveScissor, exclusiveScissorEnables.size(), reinterpret_cast( exclusiveScissorEnables.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetExclusiveScissorNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExclusiveScissorNV( + static_cast( m_commandBuffer ), firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetExclusiveScissorNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorNV && "Function requires " ); +# endif + + d.vkCmdSetExclusiveScissorNV( + m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast( exclusiveScissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_diagnostic_checkpoints === + + // wrapper function for command vkCmdSetCheckpointNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCheckpointNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCheckpointNV( static_cast( m_commandBuffer ), pCheckpointMarker ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetCheckpointNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCheckpointNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetCheckpointNV && "Function requires " ); +# endif + + d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast( &checkpointMarker ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + template + VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointDataNV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function requires " ); +# endif + + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function requires " ); +# endif + + std::vector checkpointData( checkpointDataNVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetQueueCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + template + VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointData2NV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetQueueCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); +# endif + + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + // wrapper function for command vkGetQueueCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); +# endif + + std::vector checkpointData( checkpointData2NVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_timeline_semaphore === + + // wrapper function for command vkGetSemaphoreCounterValueKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValueKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSemaphoreCounterValueKHR( static_cast( m_device ), static_cast( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreCounterValueKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValueKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValueKHR && "Function requires or " ); +# endif + + uint64_t value; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWaitSemaphoresKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphoresKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkWaitSemaphoresKHR( static_cast( m_device ), reinterpret_cast( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitSemaphoresKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphoresKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitSemaphoresKHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSignalSemaphoreKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphoreKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSignalSemaphoreKHR( static_cast( m_device ), reinterpret_cast( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSignalSemaphoreKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphoreKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSignalSemaphoreKHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === + + // wrapper function for command vkInitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInitializePerformanceApiINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkInitializePerformanceApiINTEL( static_cast( m_device ), + reinterpret_cast( pInitializeInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkInitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInitializePerformanceApiINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkInitializePerformanceApiINTEL && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUninitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUninitializePerformanceApiINTEL.html + template + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUninitializePerformanceApiINTEL( static_cast( m_device ) ); + } + + // wrapper function for command vkCmdSetPerformanceMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceMarkerINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCmdSetPerformanceMarkerINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetPerformanceMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceMarkerINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceMarkerINTEL && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetPerformanceStreamMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceStreamMarkerINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetPerformanceStreamMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceStreamMarkerINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceStreamMarkerINTEL && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetPerformanceOverrideINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceOverrideINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCmdSetPerformanceOverrideINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( pOverrideInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetPerformanceOverrideINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceOverrideINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceOverrideINTEL && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquirePerformanceConfigurationINTEL( static_cast( m_device ), + reinterpret_cast( pAcquireInfo ), + reinterpret_cast( pConfiguration ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkAcquirePerformanceConfigurationINTEL( m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( configuration ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkAcquirePerformanceConfigurationINTEL( m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( configuration, detail::ObjectRelease( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), static_cast( configuration ) ) ); + } +#else + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), static_cast( configuration ) ) ); + } +#else + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSetPerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSetPerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkQueueSetPerformanceConfigurationINTEL( static_cast( m_queue ), static_cast( configuration ) ) ); + } +#else + // wrapper function for command vkQueueSetPerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSetPerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSetPerformanceConfigurationINTEL && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetPerformanceParameterINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPerformanceParameterINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPerformanceParameterINTEL( + static_cast( m_device ), static_cast( parameter ), reinterpret_cast( pValue ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPerformanceParameterINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPerformanceParameterINTEL.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPerformanceParameterINTEL && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPerformanceParameterINTEL( + m_device, static_cast( parameter ), reinterpret_cast( &value ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_display_native_hdr === + + // wrapper function for command vkSetLocalDimmingAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLocalDimmingAMD.html + template + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetLocalDimmingAMD( static_cast( m_device ), static_cast( swapChain ), static_cast( localDimmingEnable ) ); + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateMetalSurfaceEXT( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast( m_physicalDevice ), + pFragmentShadingRateCount, + reinterpret_cast( pFragmentShadingRates ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && + "Function requires " ); +# endif + + std::vector fragmentShadingRates; + uint32_t fragmentShadingRateCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast( fragmentShadingRates.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); + } + + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && + "Function requires " ); +# endif + + std::vector fragmentShadingRates( + physicalDeviceFragmentShadingRateKHRAllocator ); + uint32_t fragmentShadingRateCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast( fragmentShadingRates.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetFragmentShadingRateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFragmentShadingRateKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pFragmentSize ), + reinterpret_cast( combinerOps ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetFragmentShadingRateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetFragmentShadingRateKHR && "Function requires " ); +# endif + + d.vkCmdSetFragmentShadingRateKHR( + m_commandBuffer, reinterpret_cast( &fragmentSize ), reinterpret_cast( combinerOps ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_dynamic_rendering_local_read === + + // wrapper function for command vkCmdSetRenderingAttachmentLocationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocationsKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingAttachmentLocationsKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pLocationInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingAttachmentLocationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocationsKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocationsKHR && + "Function requires or " ); +# endif + + d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast( &locationInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetRenderingInputAttachmentIndicesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndicesKHR.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInputAttachmentIndexInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingInputAttachmentIndicesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndicesKHR.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndicesKHR && + "Function requires or " ); +# endif + + d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast( &inputAttachmentIndexInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_buffer_device_address === + + // wrapper function for command vkGetBufferDeviceAddressEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressEXT.html + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetBufferDeviceAddressEXT( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferDeviceAddressEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressEXT.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressEXT && + "Function requires or or " ); +# endif + + VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_tooling_info === + + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + static_cast( m_physicalDevice ), pToolCount, reinterpret_cast( pToolProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && + "Function requires or " ); +# endif + + std::vector toolProperties; + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( + d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && + "Function requires or " ); +# endif + + std::vector toolProperties( + physicalDeviceToolPropertiesAllocator ); + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( + d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_present_wait === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitForPresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresentKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWaitForPresentKHR( static_cast( m_device ), static_cast( swapchain ), presentId, timeout ) ); + } +#else + // wrapper function for command vkWaitForPresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresentKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitForPresentKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkWaitForPresentKHR( m_device, static_cast( swapchain ), presentId, timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_cooperative_matrix === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( + uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && + "Function requires " ); +# endif + + std::vector properties( + cooperativeMatrixPropertiesNVAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_coverage_reduction_mode === + + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast( m_physicalDevice ), pCombinationCount, reinterpret_cast( pCombinations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && + "Function requires " ); +# endif + + std::vector combinations; + uint32_t combinationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && + "Function requires " ); +# endif + + std::vector combinations( + framebufferMixedSamplesCombinationNVAllocator ); + uint32_t combinationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), + reinterpret_cast( pSurfaceInfo ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && + "Function requires " ); +# endif + + std::vector presentModes; + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && + "Function requires " ); +# endif + + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireFullScreenExclusiveModeEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( swapchain ) ) ); + } +# else + // wrapper function for command vkAcquireFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireFullScreenExclusiveModeEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireFullScreenExclusiveModeEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseFullScreenExclusiveModeEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( swapchain ) ) ); + } +# else + // wrapper function for command vkReleaseFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseFullScreenExclusiveModeEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseFullScreenExclusiveModeEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetDeviceGroupSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModes2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( static_cast( m_device ), + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModes2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModes2EXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateHeadlessSurfaceEXT( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_buffer_device_address === + + // wrapper function for command vkGetBufferDeviceAddressKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressKHR.html + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetBufferDeviceAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferDeviceAddressKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressKHR.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressKHR && + "Function requires or or " ); +# endif + + VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddressKHR.html + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferOpaqueCaptureAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddressKHR.html + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddressKHR && + "Function requires or " ); +# endif + + uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddressKHR.html + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast( m_device ), + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddressKHR.html + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddressKHR && + "Function requires or " ); +# endif + + uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_line_rasterization === + + // wrapper function for command vkCmdSetLineStippleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStippleEXT( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + // wrapper function for command vkResetQueryPoolEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetQueryPoolEXT.html + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkResetQueryPoolEXT( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + // wrapper function for command vkCmdSetCullModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCullModeEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); + } + + // wrapper function for command vkCmdSetFrontFaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFrontFaceEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); + } + + // wrapper function for command vkCmdSetPrimitiveTopologyEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveTopologyEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); + } + + // wrapper function for command vkCmdSetViewportWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCountEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCountEXT( static_cast( m_commandBuffer ), viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCountEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCountEXT && + "Function requires or or " ); +# endif + + d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetScissorWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCountEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissorWithCountEXT( static_cast( m_commandBuffer ), scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetScissorWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCountEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCountEXT && + "Function requires or or " ); +# endif + + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindVertexBuffers2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2EXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers2EXT( static_cast( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ), + reinterpret_cast( pStrides ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindVertexBuffers2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2EXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2EXT && + "Function requires or or " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDepthTestEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthTestEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); + } + + // wrapper function for command vkCmdSetDepthWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthWriteEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); + } + + // wrapper function for command vkCmdSetDepthCompareOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthCompareOpEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); + } + + // wrapper function for command vkCmdSetDepthBoundsTestEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBoundsTestEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilTestEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilTestEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilOpEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + //=== VK_KHR_deferred_host_operations === + + // wrapper function for command vkCreateDeferredOperationKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDeferredOperationKHR( static_cast( m_device ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDeferredOperation ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDeferredOperationKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDeferredOperationKHR( Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDeferredOperationKHR( + m_device, + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deferredOperation ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDeferredOperationKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDeferredOperationKHR( + m_device, + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( deferredOperation, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDeferredOperationKHR( + static_cast( m_device ), static_cast( operation ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function requires " ); +# endif + + d.vkDestroyDeferredOperationKHR( + m_device, + static_cast( operation ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDeferredOperationKHR( + static_cast( m_device ), static_cast( operation ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function requires " ); +# endif + + d.vkDestroyDeferredOperationKHR( + m_device, + static_cast( operation ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeferredOperationMaxConcurrencyKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationMaxConcurrencyKHR.html + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeferredOperationMaxConcurrencyKHR( static_cast( m_device ), static_cast( operation ) ); + } + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeferredOperationResultKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationResultKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeferredOperationResultKHR( static_cast( m_device ), static_cast( operation ) ) ); + } +#else + // wrapper function for command vkGetDeferredOperationResultKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationResultKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeferredOperationResultKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDeferredOperationJoinKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeferredOperationJoinKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkDeferredOperationJoinKHR( static_cast( m_device ), static_cast( operation ) ) ); + } +#else + // wrapper function for command vkDeferredOperationJoinKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeferredOperationJoinKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDeferredOperationJoinKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_pipeline_executable_properties === + + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineExecutablePropertiesKHR( static_cast( m_device ), + reinterpret_cast( pPipelineInfo ), + pExecutableCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && + "Function requires " ); +# endif + + std::vector properties; + uint32_t executableCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && + "Function requires " ); +# endif + + std::vector properties( + pipelineExecutablePropertiesKHRAllocator ); + uint32_t executableCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineExecutableStatisticsKHR( static_cast( m_device ), + reinterpret_cast( pExecutableInfo ), + pStatisticCount, + reinterpret_cast( pStatistics ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && + "Function requires " ); +# endif + + std::vector statistics; + uint32_t statisticCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( + d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); + } + + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && + "Function requires " ); +# endif + + std::vector statistics( + pipelineExecutableStatisticKHRAllocator ); + uint32_t statisticCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( + d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPipelineExecutableInternalRepresentationsKHR( static_cast( m_device ), + reinterpret_cast( pExecutableInfo ), + pInternalRepresentationCount, + reinterpret_cast( pInternalRepresentations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && + "Function requires " ); +# endif + + std::vector + internalRepresentations; + uint32_t internalRepresentationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); + } + + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && + "Function requires " ); +# endif + + std::vector + internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); + uint32_t internalRepresentationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_host_image_copy === + + // wrapper function for command vkCopyMemoryToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImageEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyMemoryToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyMemoryToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImageEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToImageEXT && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast( ©MemoryToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemoryEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyImageToMemoryEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToMemoryInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemoryEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToMemoryEXT && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast( ©ImageToMemoryInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImageEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyImageToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImageEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToImageEXT && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyImageToImageEXT( m_device, reinterpret_cast( ©ImageToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkTransitionImageLayoutEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayoutEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkTransitionImageLayoutEXT( + static_cast( m_device ), transitionCount, reinterpret_cast( pTransitions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkTransitionImageLayoutEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayoutEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkTransitionImageLayoutEXT && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast( transitions.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2EXT( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2EXT && + "Function requires or or or " ); +# endif + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2EXT( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2EXT && + "Function requires or or or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_map_memory2 === + + // wrapper function for command vkMapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryMapInfo ), ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMapMemory2KHR && "Function requires or " ); +# endif + + void * pData; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkMapMemory2KHR( m_device, reinterpret_cast( &memoryMapInfo ), &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnmapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryUnmapInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUnmapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2KHR.html + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkUnmapMemory2KHR( m_device, reinterpret_cast( &memoryUnmapInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_swapchain_maintenance1 === + + // wrapper function for command vkReleaseSwapchainImagesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseSwapchainImagesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkReleaseSwapchainImagesEXT( static_cast( m_device ), reinterpret_cast( pReleaseInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseSwapchainImagesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseSwapchainImagesEXT.html + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseSwapchainImagesEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast( &releaseInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_generated_commands === + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPreprocessGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPreprocessGeneratedCommandsNV( static_cast( m_commandBuffer ), + reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPreprocessGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsNV && "Function requires " ); +# endif + + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdExecuteGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteGeneratedCommandsNV( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdExecuteGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsNV && "Function requires " ); +# endif + + d.vkCmdExecuteGeneratedCommandsNV( + m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindPipelineShaderGroupNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindPipelineShaderGroupNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindPipelineShaderGroupNV( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); + } + + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateIndirectCommandsLayoutNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectCommandsLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function requires " ); +# endif + + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function requires " ); +# endif + + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_depth_bias_control === + + // wrapper function for command vkCmdSetDepthBias2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias2EXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBias2EXT( static_cast( m_commandBuffer ), reinterpret_cast( pDepthBiasInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDepthBias2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias2EXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDepthBias2EXT && "Function requires " ); +# endif + + d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast( &depthBiasInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_acquire_drm_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireDrmDisplayEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, static_cast( display ) ) ); + } +#else + // wrapper function for command vkAcquireDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireDrmDisplayEXT.html + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireDrmDisplayEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, + uint32_t connectorId, + VULKAN_HPP_NAMESPACE::DisplayKHR * display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, connectorId, reinterpret_cast( display ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_private_data === + + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreatePrivateDataSlotEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPrivateDataSlot ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreatePrivateDataSlotEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreatePrivateDataSlotEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlotEXT.html + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlotEXT( + static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlotEXT.html + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlotEXT && "Function requires or " ); +# endif + + d.vkDestroyPrivateDataSlotEXT( + m_device, + static_cast( privateDataSlot ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + } +#else + // wrapper function for command vkSetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateDataEXT.html + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetPrivateDataEXT && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkSetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateDataEXT.html + template + VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPrivateDataEXT && "Function requires or " ); +# endif + + uint64_t data; + d.vkGetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + + return data; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === + + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pQualityLevelInfo ), + reinterpret_cast( pQualityLevelProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( qualityLevelProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function requires " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetEncodedVideoSessionParametersKHR( static_cast( m_device ), + reinterpret_cast( pVideoSessionParametersInfo ), + reinterpret_cast( pFeedbackInfo ), + pDataSize, + pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function requires " ); +# endif + + std::pair> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function requires " ); +# endif + + std::pair> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, std::vector>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function requires " ); +# endif + + std::pair, std::vector> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = + data_.first.template get(); + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, std::vector>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function requires " ); +# endif + + std::pair, std::vector> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = + data_.first.template get(); + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEncodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEncodeVideoKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEncodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( pEncodeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEncodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEncodeVideoKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEncodeVideoKHR && "Function requires " ); +# endif + + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( &encodeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCudaModuleNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pModule ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CudaModuleNV module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCudaModuleNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CudaModuleNV module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCudaModuleNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + size_t * pCacheSize, + void * pCacheData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetCudaModuleCacheNV( static_cast( m_device ), static_cast( module ), pCacheSize, pCacheData ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function requires " ); +# endif + + std::vector cacheData; + size_t cacheSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = static_cast( + d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, reinterpret_cast( cacheData.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); + } + + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function requires " ); +# endif + + std::vector cacheData( uint8_tAllocator ); + size_t cacheSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = static_cast( + d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, reinterpret_cast( cacheData.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCudaFunctionNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFunction ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CudaFunctionNV function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCudaFunctionNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CudaFunctionNV function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCudaFunctionNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template + VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaModuleNV( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template + VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function requires " ); +# endif + + d.vkDestroyCudaModuleNV( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaModuleNV( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function requires " ); +# endif + + d.vkDestroyCudaModuleNV( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template + VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaFunctionNV( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template + VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function requires " ); +# endif + + d.vkDestroyCudaFunctionNV( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaFunctionNV( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function requires " ); +# endif + + d.vkDestroyCudaFunctionNV( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCudaLaunchKernelNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCudaLaunchKernelNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCudaLaunchKernelNV( static_cast( m_commandBuffer ), reinterpret_cast( pLaunchInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCudaLaunchKernelNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCudaLaunchKernelNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCudaLaunchKernelNV && "Function requires " ); +# endif + + d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast( &launchInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + + // wrapper function for command vkCmdDispatchTileQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchTileQCOM.html + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchTileQCOM( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchTileQCOM( static_cast( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdBeginPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginPerTileExecutionQCOM.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileBeginInfoQCOM * pPerTileBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginPerTileExecutionQCOM( static_cast( m_commandBuffer ), reinterpret_cast( pPerTileBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginPerTileExecutionQCOM.html + template + VULKAN_HPP_INLINE void CommandBuffer::beginPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileBeginInfoQCOM & perTileBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginPerTileExecutionQCOM && "Function requires " ); +# endif + + d.vkCmdBeginPerTileExecutionQCOM( m_commandBuffer, reinterpret_cast( &perTileBeginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndPerTileExecutionQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndPerTileExecutionQCOM.html + template + VULKAN_HPP_INLINE void CommandBuffer::endPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileEndInfoQCOM * pPerTileEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndPerTileExecutionQCOM( static_cast( m_commandBuffer ), reinterpret_cast( pPerTileEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndPerTileExecutionQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndPerTileExecutionQCOM.html + template + VULKAN_HPP_INLINE void CommandBuffer::endPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileEndInfoQCOM & perTileEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndPerTileExecutionQCOM && "Function requires " ); +# endif + + d.vkCmdEndPerTileExecutionQCOM( m_commandBuffer, reinterpret_cast( &perTileEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + template + VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( pMetalObjectsInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT + Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; + d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( &metalObjectsInfo ) ); + + return metalObjectsInfo; + } + + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get(); + d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( &metalObjectsInfo ) ); + + return structureChain; + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + + // wrapper function for command vkCmdSetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent2KHR( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetEvent2KHR && "Function requires or " ); +# endif + + d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent2KHR( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + // wrapper function for command vkCmdWaitEvents2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents2KHR( static_cast( m_commandBuffer ), + eventCount, + reinterpret_cast( pEvents ), + reinterpret_cast( pDependencyInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWaitEvents2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2KHR && "Function requires or " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdWaitEvents2KHR( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPipelineBarrier2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPipelineBarrier2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2KHR && "Function requires or " ); +# endif + + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteTimestamp2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp2KHR( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); + } + + // wrapper function for command vkQueueSubmit2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkQueueSubmit2KHR( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSubmit2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2KHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit2KHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSubmit2KHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_descriptor_buffer === + + // wrapper function for command vkGetDescriptorSetLayoutSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSizeEXT.html + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSizeEXT( + static_cast( m_device ), static_cast( layout ), reinterpret_cast( pLayoutSizeInBytes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSizeEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSizeEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes; + d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast( layout ), reinterpret_cast( &layoutSizeInBytes ) ); + + return layoutSizeInBytes; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorSetLayoutBindingOffsetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutBindingOffsetEXT.html + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + uint32_t binding, + VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutBindingOffsetEXT( + static_cast( m_device ), static_cast( layout ), binding, reinterpret_cast( pOffset ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutBindingOffsetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutBindingOffsetEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT( + VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutBindingOffsetEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DeviceSize offset; + d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast( layout ), binding, reinterpret_cast( &offset ) ); + + return offset; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template + VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorEXT( static_cast( m_device ), reinterpret_cast( pDescriptorInfo ), dataSize, pDescriptor ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template + VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function requires " ); +# endif + + d.vkGetDescriptorEXT( m_device, reinterpret_cast( &descriptorInfo ), dataSize, pDescriptor ); + } + + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function requires " ); +# endif + + DescriptorType descriptor; + d.vkGetDescriptorEXT( + m_device, reinterpret_cast( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast( &descriptor ) ); + + return descriptor; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBuffersEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount, + const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorBuffersEXT( + static_cast( m_commandBuffer ), bufferCount, reinterpret_cast( pBindingInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBuffersEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBuffersEXT && "Function requires " ); +# endif + + d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast( bindingInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDescriptorBufferOffsetsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsetsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDescriptorBufferOffsetsEXT( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + setCount, + pBufferIndices, + reinterpret_cast( pOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDescriptorBufferOffsetsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsetsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsetsEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() ); +# else + if ( bufferIndices.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + bufferIndices.size(), + bufferIndices.data(), + reinterpret_cast( offsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplersEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), set ); + } + + // wrapper function for command vkGetBufferOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureDescriptorDataEXT && + "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetImageOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageOpaqueCaptureDescriptorDataEXT && + "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageViewOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT && + "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSamplerOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSamplerOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSamplerOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSamplerOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT && + "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT && + "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + m_device, reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_fragment_shading_rate_enums === + + // wrapper function for command vkCmdSetFragmentShadingRateEnumNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateEnumNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFragmentShadingRateEnumNV( static_cast( m_commandBuffer ), + static_cast( shadingRate ), + reinterpret_cast( combinerOps ) ); + } + + //=== VK_EXT_mesh_shader === + + // wrapper function for command vkCmdDrawMeshTasksEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksEXT( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectEXT( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectCountEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectCountEXT( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_KHR_copy_commands2 === + + // wrapper function for command vkCmdCopyBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2KHR && "Function requires or " ); +# endif + + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImage2KHR && "Function requires or " ); +# endif + + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBufferToImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pCopyBufferToImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBufferToImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2KHR && "Function requires or " ); +# endif + + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( ©BufferToImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImageToBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pCopyImageToBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImageToBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2KHR && "Function requires or " ); +# endif + + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( ©ImageToBufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBlitImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pBlitImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBlitImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBlitImage2KHR && "Function requires or " ); +# endif + + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResolveImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pResolveImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdResolveImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdResolveImage2KHR && "Function requires or " ); +# endif + + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_fault === + + // wrapper function for command vkGetDeviceFaultInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceFaultInfoEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceFaultInfoEXT( + static_cast( m_device ), reinterpret_cast( pFaultCounts ), reinterpret_cast( pFaultInfo ) ) ); + } +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireWinrtDisplayNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireWinrtDisplayNV( static_cast( m_physicalDevice ), static_cast( display ) ) ); + } +# else + // wrapper function for command vkAcquireWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireWinrtDisplayNV.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireWinrtDisplayNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetWinrtDisplayNV( static_cast( m_physicalDevice ), deviceRelativeId, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDirectFBSurfaceEXT( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDirectFBSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDirectFBSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceDirectFBPresentationSupportEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDirectFBPresentationSupportEXT.html + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB * dfb, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast( m_physicalDevice ), queueFamilyIndex, dfb ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDirectFBPresentationSupportEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDirectFBPresentationSupportEXT.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT && + "Function requires " ); +# endif + + VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); + + return static_cast( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + + // wrapper function for command vkCmdSetVertexInputEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetVertexInputEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), + vertexBindingDescriptionCount, + reinterpret_cast( pVertexBindingDescriptions ), + vertexAttributeDescriptionCount, + reinterpret_cast( pVertexAttributeDescriptions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetVertexInputEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetVertexInputEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetVertexInputEXT && "Function requires or " ); +# endif + + d.vkCmdSetVertexInputEXT( m_commandBuffer, + vertexBindingDescriptions.size(), + reinterpret_cast( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast( vertexAttributeDescriptions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + // wrapper function for command vkGetMemoryZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandleFUCHSIA && "Function requires " ); +# endif + + zx_handle_t zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryZirconHandlePropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandlePropertiesFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast( m_device ), + static_cast( handleType ), + zirconHandle, + reinterpret_cast( pMemoryZirconHandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryZirconHandlePropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandlePropertiesFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandlePropertiesFUCHSIA && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, + static_cast( handleType ), + zirconHandle, + reinterpret_cast( &memoryZirconHandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryZirconHandleProperties ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + // wrapper function for command vkImportSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportSemaphoreZirconHandleFUCHSIA && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreZirconHandleFUCHSIA && "Function requires " ); +# endif + + zx_handle_t zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateBufferCollectionFUCHSIA( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCollection ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateBufferCollectionFUCHSIA( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &collection ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( collection ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateBufferCollectionFUCHSIA( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &collection ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( collection, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetBufferCollectionImageConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionImageConstraintsFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast( m_device ), + static_cast( collection ), + reinterpret_cast( pImageConstraintsInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetBufferCollectionImageConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionImageConstraintsFUCHSIA.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetBufferCollectionImageConstraintsFUCHSIA && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetBufferCollectionImageConstraintsFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( &imageConstraintsInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetBufferCollectionBufferConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionBufferConstraintsFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast( m_device ), + static_cast( collection ), + reinterpret_cast( pBufferConstraintsInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetBufferCollectionBufferConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionBufferConstraintsFUCHSIA.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetBufferCollectionBufferConstraintsFUCHSIA && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( &bufferConstraintsInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template + VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferCollectionFUCHSIA( + static_cast( m_device ), static_cast( collection ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template + VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function requires " ); +# endif + + d.vkDestroyBufferCollectionFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferCollectionFUCHSIA( + static_cast( m_device ), static_cast( collection ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function requires " ); +# endif + + d.vkDestroyBufferCollectionFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferCollectionPropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferCollectionPropertiesFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( static_cast( m_device ), + static_cast( collection ), + reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferCollectionPropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferCollectionPropertiesFUCHSIA.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferCollectionPropertiesFUCHSIA && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + // wrapper function for command vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, + VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + static_cast( m_device ), static_cast( renderpass ), reinterpret_cast( pMaxWorkgroupSize ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + m_device, static_cast( renderpass ), reinterpret_cast( &maxWorkgroupSize ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( maxWorkgroupSize ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSubpassShadingHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSubpassShadingHUAWEI.html + template + VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSubpassShadingHUAWEI( static_cast( m_commandBuffer ) ); + } + + //=== VK_HUAWEI_invocation_mask === + + // wrapper function for command vkCmdBindInvocationMaskHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindInvocationMaskHUAWEI.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindInvocationMaskHUAWEI( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); + } + + //=== VK_NV_external_memory_rdma === + + // wrapper function for command vkGetMemoryRemoteAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryRemoteAddressNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryRemoteAddressNV( static_cast( m_device ), + reinterpret_cast( pMemoryGetRemoteAddressInfo ), + reinterpret_cast( pAddress ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryRemoteAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryRemoteAddressNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryRemoteAddressNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::RemoteAddressNV address; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryRemoteAddressNV( + m_device, reinterpret_cast( &memoryGetRemoteAddressInfo ), reinterpret_cast( &address ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( address ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_pipeline_properties === + + // wrapper function for command vkGetPipelinePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelinePropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, + VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelinePropertiesEXT( static_cast( m_device ), + reinterpret_cast( pPipelineInfo ), + reinterpret_cast( pPipelineProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelinePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelinePropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelinePropertiesEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPipelinePropertiesEXT( + m_device, reinterpret_cast( &pipelineInfo ), reinterpret_cast( &pipelineProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_extended_dynamic_state2 === + + // wrapper function for command vkCmdSetPatchControlPointsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPatchControlPointsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); + } + + // wrapper function for command vkCmdSetRasterizerDiscardEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizerDiscardEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); + } + + // wrapper function for command vkCmdSetDepthBiasEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBiasEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); + } + + // wrapper function for command vkCmdSetLogicOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLogicOpEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); + } + + // wrapper function for command vkCmdSetPrimitiveRestartEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveRestartEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateScreenSurfaceQNX( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateScreenSurfaceQNX( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateScreenSurfaceQNX( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceScreenPresentationSupportQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceScreenPresentationSupportQNX.html + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window * window, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast( m_physicalDevice ), queueFamilyIndex, window ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceScreenPresentationSupportQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceScreenPresentationSupportQNX.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceScreenPresentationSupportQNX && + "Function requires " ); +# endif + + VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); + + return static_cast( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + // wrapper function for command vkCmdSetColorWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorWriteEnableEXT( static_cast( m_commandBuffer ), attachmentCount, reinterpret_cast( pColorWriteEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteEnableEXT && "Function requires " ); +# endif + + d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_maintenance1 === + + // wrapper function for command vkCmdTraceRaysIndirect2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirect2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysIndirect2KHR( static_cast( m_commandBuffer ), static_cast( indirectDeviceAddress ) ); + } + + //=== VK_EXT_multi_draw === + + // wrapper function for command vkCmdDrawMultiEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMultiEXT( static_cast( m_commandBuffer ), + drawCount, + reinterpret_cast( pVertexInfo ), + instanceCount, + firstInstance, + stride ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDrawMultiEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDrawMultiEXT && "Function requires " ); +# endif + + d.vkCmdDrawMultiEXT( m_commandBuffer, + vertexInfo.size(), + reinterpret_cast( vertexInfo.data() ), + instanceCount, + firstInstance, + vertexInfo.stride() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDrawMultiIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiIndexedEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMultiIndexedEXT( static_cast( m_commandBuffer ), + drawCount, + reinterpret_cast( pIndexInfo ), + instanceCount, + firstInstance, + stride, + pVertexOffset ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDrawMultiIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiIndexedEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDrawMultiIndexedEXT && "Function requires " ); +# endif + + d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, + indexInfo.size(), + reinterpret_cast( indexInfo.data() ), + instanceCount, + firstInstance, + indexInfo.stride(), + static_cast( vertexOffset ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_opacity_micromap === + + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateMicromapEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMicromap ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MicromapEXT micromap; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMicromapEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( µmap ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( micromap ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MicromapEXT micromap; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMicromapEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( µmap ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( micromap, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template + VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyMicromapEXT( + static_cast( m_device ), static_cast( micromap ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template + VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function requires " ); +# endif + + d.vkDestroyMicromapEXT( m_device, + static_cast( micromap ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyMicromapEXT( + static_cast( m_device ), static_cast( micromap ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function requires " ); +# endif + + d.vkDestroyMicromapEXT( m_device, + static_cast( micromap ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildMicromapsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildMicromapsEXT( static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildMicromapsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildMicromapsEXT && "Function requires " ); +# endif + + d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast( infos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildMicromapsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBuildMicromapsEXT( static_cast( m_device ), + static_cast( deferredOperation ), + infoCount, + reinterpret_cast( pInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildMicromapsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBuildMicromapsEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBuildMicromapsEXT( + m_device, static_cast( deferredOperation ), infos.size(), reinterpret_cast( infos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMicromapEXT( + static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMicromapEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyMicromapEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapToMemoryEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMicromapToMemoryEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapToMemoryEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMicromapToMemoryEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMicromapToMemoryEXT( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToMicromapEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMemoryToMicromapEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToMicromapEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToMicromapEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMemoryToMicromapEXT( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWriteMicromapsPropertiesEXT( static_cast( m_device ), + micromapCount, + reinterpret_cast( pMicromaps ), + static_cast( queryType ), + dataSize, + pData, + stride ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function requires " ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkWriteMicromapsPropertiesEXT( m_device, + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkWriteMicromapsPropertiesEXT( m_device, + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapEXT && "Function requires " ); +# endif + + d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapToMemoryEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMicromapToMemoryEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapToMemoryEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapToMemoryEXT && "Function requires " ); +# endif + + d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToMicromapEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToMicromapEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToMicromapEXT && "Function requires " ); +# endif + + d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteMicromapsPropertiesEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteMicromapsPropertiesEXT( static_cast( m_commandBuffer ), + micromapCount, + reinterpret_cast( pMicromaps ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteMicromapsPropertiesEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWriteMicromapsPropertiesEXT && "Function requires " ); +# endif + + d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMicromapCompatibilityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMicromapCompatibilityEXT.html + template + VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceMicromapCompatibilityEXT( static_cast( m_device ), + reinterpret_cast( pVersionInfo ), + reinterpret_cast( pCompatibility ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMicromapCompatibilityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMicromapCompatibilityEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMicromapCompatibilityEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceMicromapCompatibilityEXT( m_device, + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + + return compatibility; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMicromapBuildSizesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMicromapBuildSizesEXT.html + template + VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetMicromapBuildSizesEXT( static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( pBuildInfo ), + reinterpret_cast( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMicromapBuildSizesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMicromapBuildSizesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMicromapBuildSizesEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo; + d.vkGetMicromapBuildSizesEXT( m_device, + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_HUAWEI_cluster_culling_shader === + + // wrapper function for command vkCmdDrawClusterHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawClusterHUAWEI.html + template + VULKAN_HPP_INLINE void + CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawClusterHUAWEI( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkCmdDrawClusterIndirectHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawClusterIndirectHUAWEI.html + template + VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawClusterIndirectHUAWEI( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); + } + + //=== VK_EXT_pageable_device_local_memory === + + // wrapper function for command vkSetDeviceMemoryPriorityEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDeviceMemoryPriorityEXT.html + template + VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetDeviceMemoryPriorityEXT( static_cast( m_device ), static_cast( memory ), priority ); + } + + //=== VK_KHR_maintenance4 === + + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceBufferMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetDeviceBufferMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceImageMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetDeviceImageMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VALVE_descriptor_set_host_mapping === + + // wrapper function for command vkGetDescriptorSetLayoutHostMappingInfoVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutHostMappingInfoVALVE.html + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast( m_device ), + reinterpret_cast( pBindingReference ), + reinterpret_cast( pHostMapping ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutHostMappingInfoVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutHostMappingInfoVALVE.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE + Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutHostMappingInfoVALVE && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; + d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, + reinterpret_cast( &bindingReference ), + reinterpret_cast( &hostMapping ) ); + + return hostMapping; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorSetHostMappingVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetHostMappingVALVE.html + template + VULKAN_HPP_INLINE void + Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetHostMappingVALVE( static_cast( m_device ), static_cast( descriptorSet ), ppData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetHostMappingVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetHostMappingVALVE.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetHostMappingVALVE && + "Function requires " ); +# endif + + void * pData; + d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast( descriptorSet ), &pData ); + + return pData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_copy_memory_indirect === + + // wrapper function for command vkCmdCopyMemoryIndirectNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryIndirectNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryIndirectNV( static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, stride ); + } + + // wrapper function for command vkCmdCopyMemoryToImageIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToImageIndirectNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToImageIndirectNV( static_cast( m_commandBuffer ), + static_cast( copyBufferAddress ), + copyCount, + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( pImageSubresources ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryToImageIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToImageIndirectNV.html + template + VULKAN_HPP_INLINE void + CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToImageIndirectNV && "Function requires " ); +# endif + + d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, + static_cast( copyBufferAddress ), + imageSubresources.size(), + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( imageSubresources.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_memory_decompression === + + // wrapper function for command vkCmdDecompressMemoryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount, + const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecompressMemoryNV( static_cast( m_commandBuffer ), + decompressRegionCount, + reinterpret_cast( pDecompressMemoryRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDecompressMemoryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryNV.html + template + VULKAN_HPP_INLINE void + CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDecompressMemoryNV && "Function requires " ); +# endif + + d.vkCmdDecompressMemoryNV( + m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast( decompressMemoryRegions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDecompressMemoryIndirectCountNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryIndirectCountNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecompressMemoryIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( indirectCommandsAddress ), + static_cast( indirectCommandsCountAddress ), + stride ); + } + + //=== VK_NV_device_generated_commands_compute === + + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template + VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdUpdatePipelineIndirectBufferNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdatePipelineIndirectBufferNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdUpdatePipelineIndirectBufferNV( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + // wrapper function for command vkGetPipelineIndirectDeviceAddressNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectDeviceAddressNV.html + template + VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPipelineIndirectDeviceAddressNV( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineIndirectDeviceAddressNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectDeviceAddressNV.html + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectDeviceAddressNV && + "Function requires " ); +# endif + + VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_extended_dynamic_state3 === + + // wrapper function for command vkCmdSetDepthClampEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClampEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClampEnable ) ); + } + + // wrapper function for command vkCmdSetPolygonModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPolygonModeEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPolygonModeEXT( static_cast( m_commandBuffer ), static_cast( polygonMode ) ); + } + + // wrapper function for command vkCmdSetRasterizationSamplesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizationSamplesEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizationSamplesEXT( static_cast( m_commandBuffer ), static_cast( rasterizationSamples ) ); + } + + // wrapper function for command vkCmdSetSampleMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleMaskEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleMaskEXT( + static_cast( m_commandBuffer ), static_cast( samples ), reinterpret_cast( pSampleMask ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetSampleMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleMaskEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetSampleMaskEXT && "Function requires or " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast( samples ) + 31 ) / 32 ); +# else + if ( sampleMask.size() != ( static_cast( samples ) + 31 ) / 32 ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast( samples ) + 31 ) / 32" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast( samples ), reinterpret_cast( sampleMask.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetAlphaToCoverageEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAlphaToCoverageEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetAlphaToCoverageEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToCoverageEnable ) ); + } + + // wrapper function for command vkCmdSetAlphaToOneEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAlphaToOneEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetAlphaToOneEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToOneEnable ) ); + } + + // wrapper function for command vkCmdSetLogicOpEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLogicOpEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLogicOpEnableEXT( static_cast( m_commandBuffer ), static_cast( logicOpEnable ) ); + } + + // wrapper function for command vkCmdSetColorBlendEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorBlendEnableEXT( + static_cast( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast( pColorBlendEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorBlendEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEnableEXT && + "Function requires or " ); +# endif + + d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast( colorBlendEnables.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetColorBlendEquationEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEquationEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorBlendEquationEXT( static_cast( m_commandBuffer ), + firstAttachment, + attachmentCount, + reinterpret_cast( pColorBlendEquations ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorBlendEquationEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEquationEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEquationEXT && + "Function requires or " ); +# endif + + d.vkCmdSetColorBlendEquationEXT( + m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast( colorBlendEquations.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetColorWriteMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteMaskEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorWriteMaskEXT( + static_cast( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast( pColorWriteMasks ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorWriteMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteMaskEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteMaskEXT && + "Function requires or " ); +# endif + + d.vkCmdSetColorWriteMaskEXT( + m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast( colorWriteMasks.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetTessellationDomainOriginEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetTessellationDomainOriginEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetTessellationDomainOriginEXT( static_cast( m_commandBuffer ), static_cast( domainOrigin ) ); + } + + // wrapper function for command vkCmdSetRasterizationStreamEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizationStreamEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizationStreamEXT( static_cast( m_commandBuffer ), rasterizationStream ); + } + + // wrapper function for command vkCmdSetConservativeRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetConservativeRasterizationModeEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetConservativeRasterizationModeEXT( static_cast( m_commandBuffer ), + static_cast( conservativeRasterizationMode ) ); + } + + // wrapper function for command vkCmdSetExtraPrimitiveOverestimationSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExtraPrimitiveOverestimationSizeEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast( m_commandBuffer ), extraPrimitiveOverestimationSize ); + } + + // wrapper function for command vkCmdSetDepthClipEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClipEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClipEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClipEnable ) ); + } + + // wrapper function for command vkCmdSetSampleLocationsEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleLocationsEnableEXT( static_cast( m_commandBuffer ), static_cast( sampleLocationsEnable ) ); + } + + // wrapper function for command vkCmdSetColorBlendAdvancedEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendAdvancedEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorBlendAdvancedEXT( static_cast( m_commandBuffer ), + firstAttachment, + attachmentCount, + reinterpret_cast( pColorBlendAdvanced ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorBlendAdvancedEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendAdvancedEXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendAdvancedEXT && + "Function requires or " ); +# endif + + d.vkCmdSetColorBlendAdvancedEXT( + m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast( colorBlendAdvanced.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetProvokingVertexModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetProvokingVertexModeEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetProvokingVertexModeEXT( static_cast( m_commandBuffer ), static_cast( provokingVertexMode ) ); + } + + // wrapper function for command vkCmdSetLineRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineRasterizationModeEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineRasterizationModeEXT( static_cast( m_commandBuffer ), static_cast( lineRasterizationMode ) ); + } + + // wrapper function for command vkCmdSetLineStippleEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStippleEnableEXT( static_cast( m_commandBuffer ), static_cast( stippledLineEnable ) ); + } + + // wrapper function for command vkCmdSetDepthClipNegativeOneToOneEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClipNegativeOneToOneEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClipNegativeOneToOneEXT( static_cast( m_commandBuffer ), static_cast( negativeOneToOne ) ); + } + + // wrapper function for command vkCmdSetViewportWScalingEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingEnableNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWScalingEnableNV( static_cast( m_commandBuffer ), static_cast( viewportWScalingEnable ) ); + } + + // wrapper function for command vkCmdSetViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportSwizzleNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportSwizzleNV( + static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewportSwizzles ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportSwizzleNV.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportSwizzleNV && + "Function requires or " ); +# endif + + d.vkCmdSetViewportSwizzleNV( + m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast( viewportSwizzles.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetCoverageToColorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageToColorEnableNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageToColorEnableNV( static_cast( m_commandBuffer ), static_cast( coverageToColorEnable ) ); + } + + // wrapper function for command vkCmdSetCoverageToColorLocationNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageToColorLocationNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageToColorLocationNV( static_cast( m_commandBuffer ), coverageToColorLocation ); + } + + // wrapper function for command vkCmdSetCoverageModulationModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationModeNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageModulationModeNV( static_cast( m_commandBuffer ), static_cast( coverageModulationMode ) ); + } + + // wrapper function for command vkCmdSetCoverageModulationTableEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableEnableNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageModulationTableEnableNV( static_cast( m_commandBuffer ), static_cast( coverageModulationTableEnable ) ); + } + + // wrapper function for command vkCmdSetCoverageModulationTableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageModulationTableNV( static_cast( m_commandBuffer ), coverageModulationTableCount, pCoverageModulationTable ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetCoverageModulationTableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetCoverageModulationTableNV && + "Function requires or " ); +# endif + + d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetShadingRateImageEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetShadingRateImageEnableNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetShadingRateImageEnableNV( static_cast( m_commandBuffer ), static_cast( shadingRateImageEnable ) ); + } + + // wrapper function for command vkCmdSetRepresentativeFragmentTestEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRepresentativeFragmentTestEnableNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRepresentativeFragmentTestEnableNV( static_cast( m_commandBuffer ), static_cast( representativeFragmentTestEnable ) ); + } + + // wrapper function for command vkCmdSetCoverageReductionModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageReductionModeNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageReductionModeNV( static_cast( m_commandBuffer ), static_cast( coverageReductionMode ) ); + } + + //=== VK_EXT_shader_module_identifier === + + // wrapper function for command vkGetShaderModuleIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleIdentifierEXT.html + template + VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetShaderModuleIdentifierEXT( + static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pIdentifier ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderModuleIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleIdentifierEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderModuleIdentifierEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + d.vkGetShaderModuleIdentifierEXT( m_device, static_cast( shaderModule ), reinterpret_cast( &identifier ) ); + + return identifier; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetShaderModuleCreateInfoIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleCreateInfoIdentifierEXT.html + template + VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetShaderModuleCreateInfoIdentifierEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pIdentifier ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderModuleCreateInfoIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleCreateInfoIdentifierEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderModuleCreateInfoIdentifierEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + d.vkGetShaderModuleCreateInfoIdentifierEXT( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &identifier ) ); + + return identifier; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_optical_flow === + + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast( m_physicalDevice ), + reinterpret_cast( pOpticalFlowImageFormatInfo ), + pFormatCount, + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && + "Function requires " ); +# endif + + std::vector imageFormatProperties; + uint32_t formatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + m_physicalDevice, reinterpret_cast( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = static_cast( + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast( imageFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && + "Function requires " ); +# endif + + std::vector imageFormatProperties( + opticalFlowImageFormatPropertiesNVAllocator ); + uint32_t formatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + m_physicalDevice, reinterpret_cast( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = static_cast( + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast( imageFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateOpticalFlowSessionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateOpticalFlowSessionNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSession ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateOpticalFlowSessionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateOpticalFlowSessionNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &session ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( session ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateOpticalFlowSessionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateOpticalFlowSessionNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &session ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( session, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template + VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyOpticalFlowSessionNV( + static_cast( m_device ), static_cast( session ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template + VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function requires " ); +# endif + + d.vkDestroyOpticalFlowSessionNV( + m_device, + static_cast( session ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyOpticalFlowSessionNV( + static_cast( m_device ), static_cast( session ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function requires " ); +# endif + + d.vkDestroyOpticalFlowSessionNV( + m_device, + static_cast( session ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindOpticalFlowSessionImageNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindOpticalFlowSessionImageNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindOpticalFlowSessionImageNV( static_cast( m_device ), + static_cast( session ), + static_cast( bindingPoint ), + static_cast( view ), + static_cast( layout ) ) ); + } +#else + // wrapper function for command vkBindOpticalFlowSessionImageNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindOpticalFlowSessionImageNV.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindOpticalFlowSessionImageNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkBindOpticalFlowSessionImageNV( m_device, + static_cast( session ), + static_cast( bindingPoint ), + static_cast( view ), + static_cast( layout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkCmdOpticalFlowExecuteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdOpticalFlowExecuteNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdOpticalFlowExecuteNV( static_cast( m_commandBuffer ), + static_cast( session ), + reinterpret_cast( pExecuteInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdOpticalFlowExecuteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdOpticalFlowExecuteNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdOpticalFlowExecuteNV && "Function requires " ); +# endif + + d.vkCmdOpticalFlowExecuteNV( + m_commandBuffer, static_cast( session ), reinterpret_cast( &executeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance5 === + + // wrapper function for command vkCmdBindIndexBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer2KHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + // wrapper function for command vkGetRenderingAreaGranularityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularityKHR.html + template + VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderingAreaGranularityKHR( + static_cast( m_device ), reinterpret_cast( pRenderingAreaInfo ), reinterpret_cast( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRenderingAreaGranularityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularityKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularityKHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderingAreaGranularityKHR( + m_device, reinterpret_cast( &renderingAreaInfo ), reinterpret_cast( &granularity ) ); + + return granularity; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSubresourceLayoutKHR( + static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2KHR( static_cast( m_device ), + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2KHR( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2KHR && + "Function requires or or or " ); +# endif + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2KHR( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2KHR && + "Function requires or or or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_anti_lag === + + // wrapper function for command vkAntiLagUpdateAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAntiLagUpdateAMD.html + template + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkAntiLagUpdateAMD( static_cast( m_device ), reinterpret_cast( pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAntiLagUpdateAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAntiLagUpdateAMD.html + template + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAntiLagUpdateAMD && "Function requires " ); +# endif + + d.vkAntiLagUpdateAMD( m_device, reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_object === + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateShadersEXT( static_cast( m_device ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pShaders ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif + + std::vector shaders( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue>( result, std::move( shaders ) ); + } + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif + + std::vector shaders( createInfos.size(), shaderEXTAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue>( result, std::move( shaders ) ); + } + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ShaderEXT shader; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shader ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue( result, std::move( shader ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template < + typename Dispatch, + typename ShaderEXTAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, ShaderEXTAllocator>> + Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif + + std::vector shaders( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + std::vector, ShaderEXTAllocator> uniqueShaders; + uniqueShaders.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & shader : shaders ) + { + uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); + } + return ResultValue, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); + } + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template < + typename Dispatch, + typename ShaderEXTAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, ShaderEXTAllocator>> + Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif + + std::vector shaders( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + std::vector, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); + uniqueShaders.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & shader : shaders ) + { + uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); + } + return ResultValue, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); + } + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ShaderEXT shader; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shader ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue>( + result, UniqueHandle( shader, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template + VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderEXT( + static_cast( m_device ), static_cast( shader ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template + VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function requires " ); +# endif + + d.vkDestroyShaderEXT( m_device, + static_cast( shader ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderEXT( + static_cast( m_device ), static_cast( shader ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function requires " ); +# endif + + d.vkDestroyShaderEXT( m_device, + static_cast( shader ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetShaderBinaryDataEXT( static_cast( m_device ), static_cast( shader ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function requires " ); +# endif + + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function requires " ); +# endif + + std::vector data( uint8_tAllocator ); + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadersEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount, + const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindShadersEXT( static_cast( m_commandBuffer ), + stageCount, + reinterpret_cast( pStages ), + reinterpret_cast( pShaders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadersEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindShadersEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); +# else + if ( stages.size() != shaders.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindShadersEXT( m_commandBuffer, + stages.size(), + reinterpret_cast( stages.data() ), + reinterpret_cast( shaders.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampRangeEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClampRangeEXT( static_cast( m_commandBuffer ), + static_cast( depthClampMode ), + reinterpret_cast( pDepthClampRange ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampRangeEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDepthClampRangeEXT && + "Function requires or " ); +# endif + + d.vkCmdSetDepthClampRangeEXT( + m_commandBuffer, + static_cast( depthClampMode ), + reinterpret_cast( static_cast( depthClampRange ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_pipeline_binary === + + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreatePipelineBinariesKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pBinaries ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif + + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + + return ResultValue>( result, std::move( pipelineBinaries ) ); + } + + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif + + std::vector pipelineBinaries( pipelineBinaryKHRAllocator ); + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + + return ResultValue>( result, std::move( pipelineBinaries ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template >::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineBinaryKHRAllocator>> + Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif + + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries; + uniquePipelineBinaries.reserve( pipelineBinaries.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipelineBinary : pipelineBinaries ) + { + uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); + } + return ResultValue, PipelineBinaryKHRAllocator>>( + result, std::move( uniquePipelineBinaries ) ); + } + + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template >::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineBinaryKHRAllocator>> + Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif + + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries( + pipelineBinaryKHRAllocator ); + uniquePipelineBinaries.reserve( pipelineBinaries.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipelineBinary : pipelineBinaries ) + { + uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); + } + return ResultValue, PipelineBinaryKHRAllocator>>( + result, std::move( uniquePipelineBinaries ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template + VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineBinaryKHR( + static_cast( m_device ), static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template + VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function requires " ); +# endif + + d.vkDestroyPipelineBinaryKHR( + m_device, + static_cast( pipelineBinary ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineBinaryKHR( + static_cast( m_device ), static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function requires " ); +# endif + + d.vkDestroyPipelineBinaryKHR( + m_device, + static_cast( pipelineBinary ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineKeyKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineKeyKHR( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineKeyKHR( static_cast( m_device ), + reinterpret_cast( pPipelineCreateInfo ), + reinterpret_cast( pPipelineKey ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineKeyKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getPipelineKeyKHR( Optional pipelineCreateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineKeyKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR pipelineKey; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPipelineKeyKHR( + m_device, + reinterpret_cast( static_cast( pipelineCreateInfo ) ), + reinterpret_cast( &pipelineKey ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineKey ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineBinaryDataKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pPipelineBinaryKey ), + pPipelineBinaryDataSize, + pPipelineBinaryData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function requires " ); +# endif + + std::pair> data_; + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast( pipelineBinaryData.data() ) ) ); + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function requires " ); +# endif + + std::pair> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast( pipelineBinaryData.data() ) ) ); + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkReleaseCapturedPipelineDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseCapturedPipelineDataKHR.html + template + VULKAN_HPP_INLINE Result Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleaseCapturedPipelineDataKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pAllocator ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseCapturedPipelineDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseCapturedPipelineDataKHR.html + template + VULKAN_HPP_INLINE void Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseCapturedPipelineDataKHR && "Function requires " ); +# endif + + d.vkReleaseCapturedPipelineDataKHR( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_QCOM_tile_properties === + + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + uint32_t * pPropertiesCount, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetFramebufferTilePropertiesQCOM( static_cast( m_device ), + static_cast( framebuffer ), + pPropertiesCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertiesCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast( framebuffer ), &propertiesCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = static_cast( d.vkGetFramebufferTilePropertiesQCOM( + m_device, static_cast( framebuffer ), &propertiesCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return properties; + } + + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function requires " ); +# endif + + std::vector properties( tilePropertiesQCOMAllocator ); + uint32_t propertiesCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast( framebuffer ), &propertiesCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = static_cast( d.vkGetFramebufferTilePropertiesQCOM( + m_device, static_cast( framebuffer ), &propertiesCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDynamicRenderingTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDynamicRenderingTilePropertiesQCOM.html + template + VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDynamicRenderingTilePropertiesQCOM( static_cast( m_device ), + reinterpret_cast( pRenderingInfo ), + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDynamicRenderingTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDynamicRenderingTilePropertiesQCOM.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDynamicRenderingTilePropertiesQCOM && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; + d.vkGetDynamicRenderingTilePropertiesQCOM( + m_device, reinterpret_cast( &renderingInfo ), reinterpret_cast( &properties ) ); + + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_vector === + + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeVectorPropertiesNV( + uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeVectorPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeVectorPropertiesNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeVectorPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeVectorPropertiesNV( CooperativeVectorPropertiesNVAllocator & cooperativeVectorPropertiesNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV && + "Function requires " ); +# endif + + std::vector properties( + cooperativeVectorPropertiesNVAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeVectorPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkConvertCooperativeVectorMatrixNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::convertCooperativeVectorMatrixNV( + const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkConvertCooperativeVectorMatrixNV( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkConvertCooperativeVectorMatrixNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::convertCooperativeVectorMatrixNV( const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkConvertCooperativeVectorMatrixNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkConvertCooperativeVectorMatrixNV( m_device, reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::convertCooperativeVectorMatrixNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdConvertCooperativeVectorMatrixNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::convertCooperativeVectorMatrixNV( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV * pInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdConvertCooperativeVectorMatrixNV( + static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdConvertCooperativeVectorMatrixNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::convertCooperativeVectorMatrixNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdConvertCooperativeVectorMatrixNV && "Function requires " ); +# endif + + d.vkCmdConvertCooperativeVectorMatrixNV( m_commandBuffer, infos.size(), reinterpret_cast( infos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_low_latency2 === + + // wrapper function for command vkSetLatencySleepModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencySleepModeNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetLatencySleepModeNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pSleepModeInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetLatencySleepModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencySleepModeNV.html + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetLatencySleepModeNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkSetLatencySleepModeNV( m_device, static_cast( swapchain ), reinterpret_cast( &sleepModeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkLatencySleepNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkLatencySleepNV.html + template + VULKAN_HPP_INLINE Result Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkLatencySleepNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pSleepInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkLatencySleepNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkLatencySleepNV.html + template + VULKAN_HPP_INLINE void Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkLatencySleepNV && "Function requires " ); +# endif + + d.vkLatencySleepNV( m_device, static_cast( swapchain ), reinterpret_cast( &sleepInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetLatencyMarkerNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencyMarkerNV.html + template + VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetLatencyMarkerNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetLatencyMarkerNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencyMarkerNV.html + template + VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetLatencyMarkerNV && "Function requires " ); +# endif + + d.vkSetLatencyMarkerNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + template + VULKAN_HPP_INLINE void Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetLatencyTimingsNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + template < + typename LatencyTimingsFrameReportNVAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function requires " ); +# endif + + std::vector timings; + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + + return timings; + } + + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + template < + typename LatencyTimingsFrameReportNVAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function requires " ); +# endif + + std::vector timings( latencyTimingsFrameReportNVAllocator ); + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + + return timings; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueNotifyOutOfBandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueNotifyOutOfBandNV.html + template + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueNotifyOutOfBandNV( static_cast( m_queue ), reinterpret_cast( pQueueTypeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueNotifyOutOfBandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueNotifyOutOfBandNV.html + template + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueNotifyOutOfBandNV && "Function requires " ); +# endif + + d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast( &queueTypeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_cooperative_matrix === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesKHR( + uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && + "Function requires " ); +# endif + + std::vector properties( + cooperativeMatrixPropertiesKHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + // wrapper function for command vkCmdSetAttachmentFeedbackLoopEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAttachmentFeedbackLoopEnableEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast( m_commandBuffer ), static_cast( aspectMask ) ); + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetScreenBufferPropertiesQNX( static_cast( m_device ), buffer, reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function requires " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + + // wrapper function for command vkCmdSetLineStippleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleKHR.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStippleKHR( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + //=== VK_KHR_calibrated_timestamps === + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + static_cast( m_physicalDevice ), pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && + "Function requires or " ); +# endif + + std::vector timeDomains; + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && + "Function requires or " ); +# endif + + std::vector timeDomains( timeDomainKHRAllocator ); + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetCalibratedTimestampsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsKHR( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetCalibratedTimestampsKHR( static_cast( m_device ), + timestampCount, + reinterpret_cast( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetCalibratedTimestampsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && + "Function requires or " ); +# endif + + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsKHR( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetCalibratedTimestampsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && + "Function requires or " ); +# endif + + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsKHR( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetCalibratedTimestampsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && + "Function requires or " ); +# endif + + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance6 === + + // wrapper function for command vkCmdBindDescriptorSets2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pBindDescriptorSetsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorSets2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2KHR && "Function requires or " ); +# endif + + d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast( &bindDescriptorSetsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushConstants2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pPushConstantsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushConstants2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushConstants2KHR && "Function requires or " ); +# endif + + d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast( &pushConstantsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSet2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSet2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pPushDescriptorSetInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSet2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2KHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2KHR && "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast( &pushDescriptorSetInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2KHR.html + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplate2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pPushDescriptorSetWithTemplateInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2KHR.html + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2KHR && + "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, + reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDescriptorBufferOffsets2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsets2EXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDescriptorBufferOffsets2EXT( static_cast( m_commandBuffer ), + reinterpret_cast( pSetDescriptorBufferOffsetsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDescriptorBufferOffsets2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsets2EXT.html + template + VULKAN_HPP_INLINE void + CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsets2EXT && "Function requires " ); +# endif + + d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast( &setDescriptorBufferOffsetsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplers2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplers2EXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + static_cast( m_commandBuffer ), + reinterpret_cast( pBindDescriptorBufferEmbeddedSamplersInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplers2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplers2EXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT && + "Function requires " ); +# endif + + d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + m_commandBuffer, reinterpret_cast( &bindDescriptorBufferEmbeddedSamplersInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_QCOM_tile_memory_heap === + + // wrapper function for command vkCmdBindTileMemoryQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTileMemoryQCOM.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindTileMemoryQCOM( const VULKAN_HPP_NAMESPACE::TileMemoryBindInfoQCOM * pTileMemoryBindInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindTileMemoryQCOM( static_cast( m_commandBuffer ), reinterpret_cast( pTileMemoryBindInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindTileMemoryQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTileMemoryQCOM.html + template + VULKAN_HPP_INLINE void CommandBuffer::bindTileMemoryQCOM( Optional tileMemoryBindInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindTileMemoryQCOM && "Function requires " ); +# endif + + d.vkCmdBindTileMemoryQCOM( + m_commandBuffer, + reinterpret_cast( static_cast( tileMemoryBindInfo ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_external_compute_queue === + + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createExternalComputeQueueNV( const VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV * pExternalQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateExternalComputeQueueNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pExternalQueue ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createExternalComputeQueueNV( const VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExternalComputeQueueNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExternalComputeQueueNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &externalQueue ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExternalComputeQueueNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( externalQueue ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createExternalComputeQueueNVUnique( const VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExternalComputeQueueNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExternalComputeQueueNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &externalQueue ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExternalComputeQueueNVUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( externalQueue, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template + VULKAN_HPP_INLINE void Device::destroyExternalComputeQueueNV( VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyExternalComputeQueueNV( static_cast( m_device ), + static_cast( externalQueue ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template + VULKAN_HPP_INLINE void Device::destroyExternalComputeQueueNV( VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyExternalComputeQueueNV && "Function requires " ); +# endif + + d.vkDestroyExternalComputeQueueNV( + m_device, + static_cast( externalQueue ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyExternalComputeQueueNV( static_cast( m_device ), + static_cast( externalQueue ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyExternalComputeQueueNV && "Function requires " ); +# endif + + d.vkDestroyExternalComputeQueueNV( + m_device, + static_cast( externalQueue ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetExternalComputeQueueDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExternalComputeQueueDataNV.html + template + VULKAN_HPP_INLINE void ExternalComputeQueueNV::getData( VULKAN_HPP_NAMESPACE::ExternalComputeQueueDataParamsNV * params, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetExternalComputeQueueDataNV( + static_cast( m_externalComputeQueueNV ), reinterpret_cast( params ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetExternalComputeQueueDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExternalComputeQueueDataNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + ExternalComputeQueueNV::getData( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetExternalComputeQueueDataNV && "Function requires " ); +# endif + + std::pair data_; + VULKAN_HPP_NAMESPACE::ExternalComputeQueueDataParamsNV & arams = data_.first; + DataType & data = data_.second; + d.vkGetExternalComputeQueueDataNV( m_externalComputeQueueNV, reinterpret_cast( &arams ), &data ); + + return data_; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cluster_acceleration_structure === + + // wrapper function for command vkGetClusterAccelerationStructureBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetClusterAccelerationStructureBuildSizesNV.html + template + VULKAN_HPP_INLINE void Device::getClusterAccelerationStructureBuildSizesNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetClusterAccelerationStructureBuildSizesNV( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetClusterAccelerationStructureBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetClusterAccelerationStructureBuildSizesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getClusterAccelerationStructureBuildSizesNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetClusterAccelerationStructureBuildSizesNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetClusterAccelerationStructureBuildSizesNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildClusterAccelerationStructureIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildClusterAccelerationStructureIndirectNV.html + template + VULKAN_HPP_INLINE void + CommandBuffer::buildClusterAccelerationStructureIndirectNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV * pCommandInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildClusterAccelerationStructureIndirectNV( static_cast( m_commandBuffer ), + reinterpret_cast( pCommandInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildClusterAccelerationStructureIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildClusterAccelerationStructureIndirectNV.html + template + VULKAN_HPP_INLINE void + CommandBuffer::buildClusterAccelerationStructureIndirectNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV & commandInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildClusterAccelerationStructureIndirectNV && + "Function requires " ); +# endif + + d.vkCmdBuildClusterAccelerationStructureIndirectNV( m_commandBuffer, + reinterpret_cast( &commandInfos ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_partitioned_acceleration_structure === + + // wrapper function for command vkGetPartitionedAccelerationStructuresBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPartitionedAccelerationStructuresBuildSizesNV.html + template + VULKAN_HPP_INLINE void + Device::getPartitionedAccelerationStructuresBuildSizesNV( const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV * pInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPartitionedAccelerationStructuresBuildSizesNV( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPartitionedAccelerationStructuresBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPartitionedAccelerationStructuresBuildSizesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getPartitionedAccelerationStructuresBuildSizesNV( const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPartitionedAccelerationStructuresBuildSizesNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetPartitionedAccelerationStructuresBuildSizesNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildPartitionedAccelerationStructuresNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildPartitionedAccelerationStructuresNV.html + template + VULKAN_HPP_INLINE void + CommandBuffer::buildPartitionedAccelerationStructuresNV( const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV * pBuildInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildPartitionedAccelerationStructuresNV( static_cast( m_commandBuffer ), + reinterpret_cast( pBuildInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildPartitionedAccelerationStructuresNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildPartitionedAccelerationStructuresNV.html + template + VULKAN_HPP_INLINE void + CommandBuffer::buildPartitionedAccelerationStructuresNV( const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV & buildInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildPartitionedAccelerationStructuresNV && + "Function requires " ); +# endif + + d.vkCmdBuildPartitionedAccelerationStructuresNV( m_commandBuffer, reinterpret_cast( &buildInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_generated_commands === + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPreprocessGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPreprocessGeneratedCommandsEXT( static_cast( m_commandBuffer ), + reinterpret_cast( pGeneratedCommandsInfo ), + static_cast( stateCommandBuffer ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPreprocessGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsEXT && "Function requires " ); +# endif + + d.vkCmdPreprocessGeneratedCommandsEXT( + m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ), static_cast( stateCommandBuffer ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdExecuteGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteGeneratedCommandsEXT( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdExecuteGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsEXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsEXT && "Function requires " ); +# endif + + d.vkCmdExecuteGeneratedCommandsEXT( + m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateIndirectCommandsLayoutEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectCommandsLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createIndirectCommandsLayoutEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function requires " ); +# endif + + d.vkDestroyIndirectCommandsLayoutEXT( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function requires " ); +# endif + + d.vkDestroyIndirectCommandsLayoutEXT( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT * pIndirectExecutionSet, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateIndirectExecutionSetEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectExecutionSet ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectExecutionSetEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectExecutionSet ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectExecutionSet ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createIndirectExecutionSetEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectExecutionSetEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectExecutionSet ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + indirectExecutionSet, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template + VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template + VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function requires " ); +# endif + + d.vkDestroyIndirectExecutionSetEXT( + m_device, + static_cast( indirectExecutionSet ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function requires " ); +# endif + + d.vkDestroyIndirectExecutionSetEXT( + m_device, + static_cast( indirectExecutionSet ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateIndirectExecutionSetPipelineEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetPipelineEXT.html + template + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateIndirectExecutionSetPipelineEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + executionSetWriteCount, + reinterpret_cast( pExecutionSetWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateIndirectExecutionSetPipelineEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetPipelineEXT.html + template + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetPipelineEXT && + "Function requires " ); +# endif + + d.vkUpdateIndirectExecutionSetPipelineEXT( m_device, + static_cast( indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateIndirectExecutionSetShaderEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetShaderEXT.html + template + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT * pExecutionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateIndirectExecutionSetShaderEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + executionSetWriteCount, + reinterpret_cast( pExecutionSetWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateIndirectExecutionSetShaderEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetShaderEXT.html + template + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetShaderEXT && + "Function requires " ); +# endif + + d.vkUpdateIndirectExecutionSetShaderEXT( m_device, + static_cast( indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_matrix2 === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( + CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); +# endif + + std::vector properties( + cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + + // wrapper function for command vkGetMemoryMetalHandleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandleEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT * pGetMetalHandleInfo, + void ** pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryMetalHandleEXT( + static_cast( m_device ), reinterpret_cast( pGetMetalHandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryMetalHandleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandleEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT & getMetalHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryMetalHandleEXT && "Function requires " ); +# endif + + void * handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryMetalHandleEXT( m_device, reinterpret_cast( &getMetalHandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryMetalHandleEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryMetalHandlePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandlePropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHandle, + VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT * pMemoryMetalHandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryMetalHandlePropertiesEXT( static_cast( m_device ), + static_cast( handleType ), + pHandle, + reinterpret_cast( pMemoryMetalHandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryMetalHandlePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandlePropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HandleType const & handle, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryMetalHandlePropertiesEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT memoryMetalHandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryMetalHandlePropertiesEXT( m_device, + static_cast( handleType ), + reinterpret_cast( &handle ), + reinterpret_cast( &memoryMetalHandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryMetalHandlePropertiesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryMetalHandleProperties ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map_offset === + + // wrapper function for command vkCmdEndRendering2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering2EXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::endRendering2EXT( const VULKAN_HPP_NAMESPACE::RenderingEndInfoEXT * pRenderingEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRendering2EXT( static_cast( m_commandBuffer ), reinterpret_cast( pRenderingEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndRendering2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering2EXT.html + template + VULKAN_HPP_INLINE void CommandBuffer::endRendering2EXT( Optional renderingEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndRendering2EXT && "Function requires " ); +# endif + + d.vkCmdEndRendering2EXT( + m_commandBuffer, reinterpret_cast( static_cast( renderingEndInfo ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/include/vulkan/vulkan_ggp.h b/include/vulkan/vulkan_ggp.h new file mode 100644 index 00000000..4b6affc0 --- /dev/null +++ b/include/vulkan/vulkan_ggp.h @@ -0,0 +1,60 @@ +#ifndef VULKAN_GGP_H_ +#define VULKAN_GGP_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_GGP_stream_descriptor_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_GGP_stream_descriptor_surface 1 +#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION 1 +#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME "VK_GGP_stream_descriptor_surface" +typedef VkFlags VkStreamDescriptorSurfaceCreateFlagsGGP; +typedef struct VkStreamDescriptorSurfaceCreateInfoGGP { + VkStructureType sType; + const void* pNext; + VkStreamDescriptorSurfaceCreateFlagsGGP flags; + GgpStreamDescriptor streamDescriptor; +} VkStreamDescriptorSurfaceCreateInfoGGP; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateStreamDescriptorSurfaceGGP)(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateStreamDescriptorSurfaceGGP( + VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_GGP_frame_token is a preprocessor guard. Do not pass it to API calls. +#define VK_GGP_frame_token 1 +#define VK_GGP_FRAME_TOKEN_SPEC_VERSION 1 +#define VK_GGP_FRAME_TOKEN_EXTENSION_NAME "VK_GGP_frame_token" +typedef struct VkPresentFrameTokenGGP { + VkStructureType sType; + const void* pNext; + GgpFrameToken frameToken; +} VkPresentFrameTokenGGP; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_handles.hpp b/include/vulkan/vulkan_handles.hpp new file mode 100644 index 00000000..a08b8bb4 --- /dev/null +++ b/include/vulkan/vulkan_handles.hpp @@ -0,0 +1,20741 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HANDLES_HPP +#define VULKAN_HANDLES_HPP + +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + + //=================================== + //=== STRUCT forward declarations === + //=================================== + + //=== VK_VERSION_1_0 === + struct Extent2D; + struct Extent3D; + struct Offset2D; + struct Offset3D; + struct Rect2D; + struct BaseInStructure; + struct BaseOutStructure; + struct BufferMemoryBarrier; + struct DispatchIndirectCommand; + struct DrawIndexedIndirectCommand; + struct DrawIndirectCommand; + struct ImageMemoryBarrier; + struct MemoryBarrier; + struct PipelineCacheHeaderVersionOne; + struct AllocationCallbacks; + struct ApplicationInfo; + struct FormatProperties; + struct ImageFormatProperties; + struct InstanceCreateInfo; + struct MemoryHeap; + struct MemoryType; + struct PhysicalDeviceFeatures; + struct PhysicalDeviceLimits; + struct PhysicalDeviceMemoryProperties; + struct PhysicalDeviceProperties; + struct PhysicalDeviceSparseProperties; + struct QueueFamilyProperties; + struct DeviceCreateInfo; + struct DeviceQueueCreateInfo; + struct ExtensionProperties; + struct LayerProperties; + struct SubmitInfo; + struct MappedMemoryRange; + struct MemoryAllocateInfo; + struct MemoryRequirements; + struct BindSparseInfo; + struct ImageSubresource; + struct SparseBufferMemoryBindInfo; + struct SparseImageFormatProperties; + struct SparseImageMemoryBind; + struct SparseImageMemoryBindInfo; + struct SparseImageMemoryRequirements; + struct SparseImageOpaqueMemoryBindInfo; + struct SparseMemoryBind; + struct FenceCreateInfo; + struct SemaphoreCreateInfo; + struct EventCreateInfo; + struct QueryPoolCreateInfo; + struct BufferCreateInfo; + struct BufferViewCreateInfo; + struct ImageCreateInfo; + struct SubresourceLayout; + struct ComponentMapping; + struct ImageSubresourceRange; + struct ImageViewCreateInfo; + struct ShaderModuleCreateInfo; + struct PipelineCacheCreateInfo; + struct ComputePipelineCreateInfo; + struct GraphicsPipelineCreateInfo; + struct PipelineColorBlendAttachmentState; + struct PipelineColorBlendStateCreateInfo; + struct PipelineDepthStencilStateCreateInfo; + struct PipelineDynamicStateCreateInfo; + struct PipelineInputAssemblyStateCreateInfo; + struct PipelineMultisampleStateCreateInfo; + struct PipelineRasterizationStateCreateInfo; + struct PipelineShaderStageCreateInfo; + struct PipelineTessellationStateCreateInfo; + struct PipelineVertexInputStateCreateInfo; + struct PipelineViewportStateCreateInfo; + struct SpecializationInfo; + struct SpecializationMapEntry; + struct StencilOpState; + struct VertexInputAttributeDescription; + struct VertexInputBindingDescription; + struct Viewport; + struct PipelineLayoutCreateInfo; + struct PushConstantRange; + struct SamplerCreateInfo; + struct CopyDescriptorSet; + struct DescriptorBufferInfo; + struct DescriptorImageInfo; + struct DescriptorPoolCreateInfo; + struct DescriptorPoolSize; + struct DescriptorSetAllocateInfo; + struct DescriptorSetLayoutBinding; + struct DescriptorSetLayoutCreateInfo; + struct WriteDescriptorSet; + struct AttachmentDescription; + struct AttachmentReference; + struct FramebufferCreateInfo; + struct RenderPassCreateInfo; + struct SubpassDependency; + struct SubpassDescription; + struct CommandPoolCreateInfo; + struct CommandBufferAllocateInfo; + struct CommandBufferBeginInfo; + struct CommandBufferInheritanceInfo; + struct BufferCopy; + struct BufferImageCopy; + struct ClearAttachment; + union ClearColorValue; + struct ClearDepthStencilValue; + struct ClearRect; + union ClearValue; + struct ImageBlit; + struct ImageCopy; + struct ImageResolve; + struct ImageSubresourceLayers; + struct RenderPassBeginInfo; + + //=== VK_VERSION_1_1 === + struct PhysicalDeviceSubgroupProperties; + struct BindBufferMemoryInfo; + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + struct BindImageMemoryInfo; + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + struct PhysicalDevice16BitStorageFeatures; + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + struct MemoryDedicatedRequirements; + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + struct MemoryDedicatedAllocateInfo; + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + struct MemoryAllocateFlagsInfo; + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + struct DeviceGroupRenderPassBeginInfo; + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + struct DeviceGroupCommandBufferBeginInfo; + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + struct DeviceGroupSubmitInfo; + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + struct DeviceGroupBindSparseInfo; + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + struct BindBufferMemoryDeviceGroupInfo; + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + struct BindImageMemoryDeviceGroupInfo; + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + struct PhysicalDeviceGroupProperties; + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + struct DeviceGroupDeviceCreateInfo; + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + struct BufferMemoryRequirementsInfo2; + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + struct ImageMemoryRequirementsInfo2; + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + struct ImageSparseMemoryRequirementsInfo2; + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + struct MemoryRequirements2; + using MemoryRequirements2KHR = MemoryRequirements2; + struct SparseImageMemoryRequirements2; + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + struct PhysicalDeviceFeatures2; + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + struct PhysicalDeviceProperties2; + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + struct FormatProperties2; + using FormatProperties2KHR = FormatProperties2; + struct ImageFormatProperties2; + using ImageFormatProperties2KHR = ImageFormatProperties2; + struct PhysicalDeviceImageFormatInfo2; + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + struct QueueFamilyProperties2; + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + struct PhysicalDeviceMemoryProperties2; + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + struct SparseImageFormatProperties2; + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + struct PhysicalDeviceSparseImageFormatInfo2; + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + struct PhysicalDevicePointClippingProperties; + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + struct RenderPassInputAttachmentAspectCreateInfo; + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + struct InputAttachmentAspectReference; + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + struct ImageViewUsageCreateInfo; + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + struct PipelineTessellationDomainOriginStateCreateInfo; + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + struct RenderPassMultiviewCreateInfo; + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + struct PhysicalDeviceMultiviewFeatures; + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + struct PhysicalDeviceMultiviewProperties; + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + struct PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + struct PhysicalDeviceProtectedMemoryFeatures; + struct PhysicalDeviceProtectedMemoryProperties; + struct DeviceQueueInfo2; + struct ProtectedSubmitInfo; + struct SamplerYcbcrConversionCreateInfo; + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + struct SamplerYcbcrConversionInfo; + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + struct BindImagePlaneMemoryInfo; + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + struct ImagePlaneMemoryRequirementsInfo; + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + struct PhysicalDeviceSamplerYcbcrConversionFeatures; + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + struct SamplerYcbcrConversionImageFormatProperties; + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + struct DescriptorUpdateTemplateEntry; + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + struct DescriptorUpdateTemplateCreateInfo; + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + struct ExternalMemoryProperties; + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + struct PhysicalDeviceExternalImageFormatInfo; + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + struct ExternalImageFormatProperties; + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + struct PhysicalDeviceExternalBufferInfo; + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + struct ExternalBufferProperties; + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + struct PhysicalDeviceIDProperties; + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + struct ExternalMemoryImageCreateInfo; + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + struct ExternalMemoryBufferCreateInfo; + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + struct ExportMemoryAllocateInfo; + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + struct PhysicalDeviceExternalFenceInfo; + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + struct ExternalFenceProperties; + using ExternalFencePropertiesKHR = ExternalFenceProperties; + struct ExportFenceCreateInfo; + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + struct ExportSemaphoreCreateInfo; + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + struct PhysicalDeviceExternalSemaphoreInfo; + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + struct ExternalSemaphoreProperties; + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + struct PhysicalDeviceMaintenance3Properties; + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + struct DescriptorSetLayoutSupport; + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + struct PhysicalDeviceShaderDrawParametersFeatures; + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + + //=== VK_VERSION_1_2 === + struct PhysicalDeviceVulkan11Features; + struct PhysicalDeviceVulkan11Properties; + struct PhysicalDeviceVulkan12Features; + struct PhysicalDeviceVulkan12Properties; + struct ImageFormatListCreateInfo; + using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; + struct RenderPassCreateInfo2; + using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; + struct AttachmentDescription2; + using AttachmentDescription2KHR = AttachmentDescription2; + struct AttachmentReference2; + using AttachmentReference2KHR = AttachmentReference2; + struct SubpassDescription2; + using SubpassDescription2KHR = SubpassDescription2; + struct SubpassDependency2; + using SubpassDependency2KHR = SubpassDependency2; + struct SubpassBeginInfo; + using SubpassBeginInfoKHR = SubpassBeginInfo; + struct SubpassEndInfo; + using SubpassEndInfoKHR = SubpassEndInfo; + struct PhysicalDevice8BitStorageFeatures; + using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; + struct ConformanceVersion; + using ConformanceVersionKHR = ConformanceVersion; + struct PhysicalDeviceDriverProperties; + using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; + struct PhysicalDeviceShaderAtomicInt64Features; + using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; + struct PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + struct PhysicalDeviceFloatControlsProperties; + using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; + struct DescriptorSetLayoutBindingFlagsCreateInfo; + using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; + struct PhysicalDeviceDescriptorIndexingFeatures; + using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; + struct PhysicalDeviceDescriptorIndexingProperties; + using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + struct DescriptorSetVariableDescriptorCountAllocateInfo; + using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; + struct DescriptorSetVariableDescriptorCountLayoutSupport; + using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; + struct SubpassDescriptionDepthStencilResolve; + using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; + struct PhysicalDeviceDepthStencilResolveProperties; + using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; + struct PhysicalDeviceScalarBlockLayoutFeatures; + using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; + struct ImageStencilUsageCreateInfo; + using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; + struct SamplerReductionModeCreateInfo; + using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; + struct PhysicalDeviceSamplerFilterMinmaxProperties; + using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + struct PhysicalDeviceVulkanMemoryModelFeatures; + using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; + struct PhysicalDeviceImagelessFramebufferFeatures; + using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; + struct FramebufferAttachmentsCreateInfo; + using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; + struct FramebufferAttachmentImageInfo; + using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; + struct RenderPassAttachmentBeginInfo; + using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; + struct PhysicalDeviceUniformBufferStandardLayoutFeatures; + using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; + struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + struct AttachmentReferenceStencilLayout; + using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; + struct AttachmentDescriptionStencilLayout; + using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; + struct PhysicalDeviceHostQueryResetFeatures; + using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + struct PhysicalDeviceTimelineSemaphoreFeatures; + using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; + struct PhysicalDeviceTimelineSemaphoreProperties; + using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; + struct SemaphoreTypeCreateInfo; + using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; + struct TimelineSemaphoreSubmitInfo; + using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; + struct SemaphoreWaitInfo; + using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + struct SemaphoreSignalInfo; + using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; + struct PhysicalDeviceBufferDeviceAddressFeatures; + using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; + struct BufferDeviceAddressInfo; + using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; + using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; + struct BufferOpaqueCaptureAddressCreateInfo; + using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; + struct MemoryOpaqueCaptureAddressAllocateInfo; + using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; + struct DeviceMemoryOpaqueCaptureAddressInfo; + using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; + + //=== VK_VERSION_1_3 === + struct PhysicalDeviceVulkan13Features; + struct PhysicalDeviceVulkan13Properties; + struct PipelineCreationFeedbackCreateInfo; + using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo; + struct PipelineCreationFeedback; + using PipelineCreationFeedbackEXT = PipelineCreationFeedback; + struct PhysicalDeviceShaderTerminateInvocationFeatures; + using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures; + struct PhysicalDeviceToolProperties; + using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties; + struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + struct PhysicalDevicePrivateDataFeatures; + using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; + struct DevicePrivateDataCreateInfo; + using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; + struct PrivateDataSlotCreateInfo; + using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo; + struct PhysicalDevicePipelineCreationCacheControlFeatures; + using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures; + struct MemoryBarrier2; + using MemoryBarrier2KHR = MemoryBarrier2; + struct BufferMemoryBarrier2; + using BufferMemoryBarrier2KHR = BufferMemoryBarrier2; + struct ImageMemoryBarrier2; + using ImageMemoryBarrier2KHR = ImageMemoryBarrier2; + struct DependencyInfo; + using DependencyInfoKHR = DependencyInfo; + struct SubmitInfo2; + using SubmitInfo2KHR = SubmitInfo2; + struct SemaphoreSubmitInfo; + using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo; + struct CommandBufferSubmitInfo; + using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo; + struct PhysicalDeviceSynchronization2Features; + using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features; + struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + struct PhysicalDeviceImageRobustnessFeatures; + using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures; + struct CopyBufferInfo2; + using CopyBufferInfo2KHR = CopyBufferInfo2; + struct CopyImageInfo2; + using CopyImageInfo2KHR = CopyImageInfo2; + struct CopyBufferToImageInfo2; + using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2; + struct CopyImageToBufferInfo2; + using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; + struct BlitImageInfo2; + using BlitImageInfo2KHR = BlitImageInfo2; + struct ResolveImageInfo2; + using ResolveImageInfo2KHR = ResolveImageInfo2; + struct BufferCopy2; + using BufferCopy2KHR = BufferCopy2; + struct ImageCopy2; + using ImageCopy2KHR = ImageCopy2; + struct ImageBlit2; + using ImageBlit2KHR = ImageBlit2; + struct BufferImageCopy2; + using BufferImageCopy2KHR = BufferImageCopy2; + struct ImageResolve2; + using ImageResolve2KHR = ImageResolve2; + struct PhysicalDeviceSubgroupSizeControlFeatures; + using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures; + struct PhysicalDeviceSubgroupSizeControlProperties; + using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; + struct PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + struct PhysicalDeviceInlineUniformBlockFeatures; + using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; + struct PhysicalDeviceInlineUniformBlockProperties; + using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; + struct WriteDescriptorSetInlineUniformBlock; + using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock; + struct DescriptorPoolInlineUniformBlockCreateInfo; + using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo; + struct PhysicalDeviceTextureCompressionASTCHDRFeatures; + using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures; + struct RenderingInfo; + using RenderingInfoKHR = RenderingInfo; + struct RenderingAttachmentInfo; + using RenderingAttachmentInfoKHR = RenderingAttachmentInfo; + struct PipelineRenderingCreateInfo; + using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo; + struct PhysicalDeviceDynamicRenderingFeatures; + using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; + struct CommandBufferInheritanceRenderingInfo; + using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo; + struct PhysicalDeviceShaderIntegerDotProductFeatures; + using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures; + struct PhysicalDeviceShaderIntegerDotProductProperties; + using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties; + struct PhysicalDeviceTexelBufferAlignmentProperties; + using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties; + struct FormatProperties3; + using FormatProperties3KHR = FormatProperties3; + struct PhysicalDeviceMaintenance4Features; + using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features; + struct PhysicalDeviceMaintenance4Properties; + using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; + struct DeviceBufferMemoryRequirements; + using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements; + struct DeviceImageMemoryRequirements; + using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; + + //=== VK_VERSION_1_4 === + struct PhysicalDeviceVulkan14Features; + struct PhysicalDeviceVulkan14Properties; + struct DeviceQueueGlobalPriorityCreateInfo; + using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfo; + using DeviceQueueGlobalPriorityCreateInfoKHR = DeviceQueueGlobalPriorityCreateInfo; + struct PhysicalDeviceGlobalPriorityQueryFeatures; + using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeatures; + using PhysicalDeviceGlobalPriorityQueryFeaturesKHR = PhysicalDeviceGlobalPriorityQueryFeatures; + struct QueueFamilyGlobalPriorityProperties; + using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityProperties; + using QueueFamilyGlobalPriorityPropertiesKHR = QueueFamilyGlobalPriorityProperties; + struct PhysicalDeviceShaderSubgroupRotateFeatures; + using PhysicalDeviceShaderSubgroupRotateFeaturesKHR = PhysicalDeviceShaderSubgroupRotateFeatures; + struct PhysicalDeviceShaderFloatControls2Features; + using PhysicalDeviceShaderFloatControls2FeaturesKHR = PhysicalDeviceShaderFloatControls2Features; + struct PhysicalDeviceShaderExpectAssumeFeatures; + using PhysicalDeviceShaderExpectAssumeFeaturesKHR = PhysicalDeviceShaderExpectAssumeFeatures; + struct PhysicalDeviceLineRasterizationFeatures; + using PhysicalDeviceLineRasterizationFeaturesEXT = PhysicalDeviceLineRasterizationFeatures; + using PhysicalDeviceLineRasterizationFeaturesKHR = PhysicalDeviceLineRasterizationFeatures; + struct PhysicalDeviceLineRasterizationProperties; + using PhysicalDeviceLineRasterizationPropertiesEXT = PhysicalDeviceLineRasterizationProperties; + using PhysicalDeviceLineRasterizationPropertiesKHR = PhysicalDeviceLineRasterizationProperties; + struct PipelineRasterizationLineStateCreateInfo; + using PipelineRasterizationLineStateCreateInfoEXT = PipelineRasterizationLineStateCreateInfo; + using PipelineRasterizationLineStateCreateInfoKHR = PipelineRasterizationLineStateCreateInfo; + struct PhysicalDeviceVertexAttributeDivisorProperties; + using PhysicalDeviceVertexAttributeDivisorPropertiesKHR = PhysicalDeviceVertexAttributeDivisorProperties; + struct VertexInputBindingDivisorDescription; + using VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescription; + using VertexInputBindingDivisorDescriptionKHR = VertexInputBindingDivisorDescription; + struct PipelineVertexInputDivisorStateCreateInfo; + using PipelineVertexInputDivisorStateCreateInfoEXT = PipelineVertexInputDivisorStateCreateInfo; + using PipelineVertexInputDivisorStateCreateInfoKHR = PipelineVertexInputDivisorStateCreateInfo; + struct PhysicalDeviceVertexAttributeDivisorFeatures; + using PhysicalDeviceVertexAttributeDivisorFeaturesEXT = PhysicalDeviceVertexAttributeDivisorFeatures; + using PhysicalDeviceVertexAttributeDivisorFeaturesKHR = PhysicalDeviceVertexAttributeDivisorFeatures; + struct PhysicalDeviceIndexTypeUint8Features; + using PhysicalDeviceIndexTypeUint8FeaturesEXT = PhysicalDeviceIndexTypeUint8Features; + using PhysicalDeviceIndexTypeUint8FeaturesKHR = PhysicalDeviceIndexTypeUint8Features; + struct MemoryMapInfo; + using MemoryMapInfoKHR = MemoryMapInfo; + struct MemoryUnmapInfo; + using MemoryUnmapInfoKHR = MemoryUnmapInfo; + struct PhysicalDeviceMaintenance5Features; + using PhysicalDeviceMaintenance5FeaturesKHR = PhysicalDeviceMaintenance5Features; + struct PhysicalDeviceMaintenance5Properties; + using PhysicalDeviceMaintenance5PropertiesKHR = PhysicalDeviceMaintenance5Properties; + struct RenderingAreaInfo; + using RenderingAreaInfoKHR = RenderingAreaInfo; + struct DeviceImageSubresourceInfo; + using DeviceImageSubresourceInfoKHR = DeviceImageSubresourceInfo; + struct ImageSubresource2; + using ImageSubresource2EXT = ImageSubresource2; + using ImageSubresource2KHR = ImageSubresource2; + struct SubresourceLayout2; + using SubresourceLayout2EXT = SubresourceLayout2; + using SubresourceLayout2KHR = SubresourceLayout2; + struct PipelineCreateFlags2CreateInfo; + using PipelineCreateFlags2CreateInfoKHR = PipelineCreateFlags2CreateInfo; + struct BufferUsageFlags2CreateInfo; + using BufferUsageFlags2CreateInfoKHR = BufferUsageFlags2CreateInfo; + struct PhysicalDevicePushDescriptorProperties; + using PhysicalDevicePushDescriptorPropertiesKHR = PhysicalDevicePushDescriptorProperties; + struct PhysicalDeviceDynamicRenderingLocalReadFeatures; + using PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = PhysicalDeviceDynamicRenderingLocalReadFeatures; + struct RenderingAttachmentLocationInfo; + using RenderingAttachmentLocationInfoKHR = RenderingAttachmentLocationInfo; + struct RenderingInputAttachmentIndexInfo; + using RenderingInputAttachmentIndexInfoKHR = RenderingInputAttachmentIndexInfo; + struct PhysicalDeviceMaintenance6Features; + using PhysicalDeviceMaintenance6FeaturesKHR = PhysicalDeviceMaintenance6Features; + struct PhysicalDeviceMaintenance6Properties; + using PhysicalDeviceMaintenance6PropertiesKHR = PhysicalDeviceMaintenance6Properties; + struct BindMemoryStatus; + using BindMemoryStatusKHR = BindMemoryStatus; + struct BindDescriptorSetsInfo; + using BindDescriptorSetsInfoKHR = BindDescriptorSetsInfo; + struct PushConstantsInfo; + using PushConstantsInfoKHR = PushConstantsInfo; + struct PushDescriptorSetInfo; + using PushDescriptorSetInfoKHR = PushDescriptorSetInfo; + struct PushDescriptorSetWithTemplateInfo; + using PushDescriptorSetWithTemplateInfoKHR = PushDescriptorSetWithTemplateInfo; + struct PhysicalDevicePipelineProtectedAccessFeatures; + using PhysicalDevicePipelineProtectedAccessFeaturesEXT = PhysicalDevicePipelineProtectedAccessFeatures; + struct PhysicalDevicePipelineRobustnessFeatures; + using PhysicalDevicePipelineRobustnessFeaturesEXT = PhysicalDevicePipelineRobustnessFeatures; + struct PhysicalDevicePipelineRobustnessProperties; + using PhysicalDevicePipelineRobustnessPropertiesEXT = PhysicalDevicePipelineRobustnessProperties; + struct PipelineRobustnessCreateInfo; + using PipelineRobustnessCreateInfoEXT = PipelineRobustnessCreateInfo; + struct PhysicalDeviceHostImageCopyFeatures; + using PhysicalDeviceHostImageCopyFeaturesEXT = PhysicalDeviceHostImageCopyFeatures; + struct PhysicalDeviceHostImageCopyProperties; + using PhysicalDeviceHostImageCopyPropertiesEXT = PhysicalDeviceHostImageCopyProperties; + struct MemoryToImageCopy; + using MemoryToImageCopyEXT = MemoryToImageCopy; + struct ImageToMemoryCopy; + using ImageToMemoryCopyEXT = ImageToMemoryCopy; + struct CopyMemoryToImageInfo; + using CopyMemoryToImageInfoEXT = CopyMemoryToImageInfo; + struct CopyImageToMemoryInfo; + using CopyImageToMemoryInfoEXT = CopyImageToMemoryInfo; + struct CopyImageToImageInfo; + using CopyImageToImageInfoEXT = CopyImageToImageInfo; + struct HostImageLayoutTransitionInfo; + using HostImageLayoutTransitionInfoEXT = HostImageLayoutTransitionInfo; + struct SubresourceHostMemcpySize; + using SubresourceHostMemcpySizeEXT = SubresourceHostMemcpySize; + struct HostImageCopyDevicePerformanceQuery; + using HostImageCopyDevicePerformanceQueryEXT = HostImageCopyDevicePerformanceQuery; + + //=== VK_KHR_surface === + struct SurfaceCapabilitiesKHR; + struct SurfaceFormatKHR; + + //=== VK_KHR_swapchain === + struct SwapchainCreateInfoKHR; + struct PresentInfoKHR; + struct ImageSwapchainCreateInfoKHR; + struct BindImageMemorySwapchainInfoKHR; + struct AcquireNextImageInfoKHR; + struct DeviceGroupPresentCapabilitiesKHR; + struct DeviceGroupPresentInfoKHR; + struct DeviceGroupSwapchainCreateInfoKHR; + + //=== VK_KHR_display === + struct DisplayModeCreateInfoKHR; + struct DisplayModeParametersKHR; + struct DisplayModePropertiesKHR; + struct DisplayPlaneCapabilitiesKHR; + struct DisplayPlanePropertiesKHR; + struct DisplayPropertiesKHR; + struct DisplaySurfaceCreateInfoKHR; + + //=== VK_KHR_display_swapchain === + struct DisplayPresentInfoKHR; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + struct XlibSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + struct XcbSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + struct WaylandSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + struct AndroidSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + struct Win32SurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + struct DebugReportCallbackCreateInfoEXT; + + //=== VK_AMD_rasterization_order === + struct PipelineRasterizationStateRasterizationOrderAMD; + + //=== VK_EXT_debug_marker === + struct DebugMarkerObjectNameInfoEXT; + struct DebugMarkerObjectTagInfoEXT; + struct DebugMarkerMarkerInfoEXT; + + //=== VK_KHR_video_queue === + struct QueueFamilyQueryResultStatusPropertiesKHR; + struct QueueFamilyVideoPropertiesKHR; + struct VideoProfileInfoKHR; + struct VideoProfileListInfoKHR; + struct VideoCapabilitiesKHR; + struct PhysicalDeviceVideoFormatInfoKHR; + struct VideoFormatPropertiesKHR; + struct VideoPictureResourceInfoKHR; + struct VideoReferenceSlotInfoKHR; + struct VideoSessionMemoryRequirementsKHR; + struct BindVideoSessionMemoryInfoKHR; + struct VideoSessionCreateInfoKHR; + struct VideoSessionParametersCreateInfoKHR; + struct VideoSessionParametersUpdateInfoKHR; + struct VideoBeginCodingInfoKHR; + struct VideoEndCodingInfoKHR; + struct VideoCodingControlInfoKHR; + + //=== VK_KHR_video_decode_queue === + struct VideoDecodeCapabilitiesKHR; + struct VideoDecodeUsageInfoKHR; + struct VideoDecodeInfoKHR; + + //=== VK_NV_dedicated_allocation === + struct DedicatedAllocationImageCreateInfoNV; + struct DedicatedAllocationBufferCreateInfoNV; + struct DedicatedAllocationMemoryAllocateInfoNV; + + //=== VK_EXT_transform_feedback === + struct PhysicalDeviceTransformFeedbackFeaturesEXT; + struct PhysicalDeviceTransformFeedbackPropertiesEXT; + struct PipelineRasterizationStateStreamCreateInfoEXT; + + //=== VK_NVX_binary_import === + struct CuModuleCreateInfoNVX; + struct CuModuleTexturingModeCreateInfoNVX; + struct CuFunctionCreateInfoNVX; + struct CuLaunchInfoNVX; + + //=== VK_NVX_image_view_handle === + struct ImageViewHandleInfoNVX; + struct ImageViewAddressPropertiesNVX; + + //=== VK_KHR_video_encode_h264 === + struct VideoEncodeH264CapabilitiesKHR; + struct VideoEncodeH264QualityLevelPropertiesKHR; + struct VideoEncodeH264SessionCreateInfoKHR; + struct VideoEncodeH264SessionParametersCreateInfoKHR; + struct VideoEncodeH264SessionParametersAddInfoKHR; + struct VideoEncodeH264SessionParametersGetInfoKHR; + struct VideoEncodeH264SessionParametersFeedbackInfoKHR; + struct VideoEncodeH264PictureInfoKHR; + struct VideoEncodeH264DpbSlotInfoKHR; + struct VideoEncodeH264NaluSliceInfoKHR; + struct VideoEncodeH264ProfileInfoKHR; + struct VideoEncodeH264RateControlInfoKHR; + struct VideoEncodeH264RateControlLayerInfoKHR; + struct VideoEncodeH264QpKHR; + struct VideoEncodeH264FrameSizeKHR; + struct VideoEncodeH264GopRemainingFrameInfoKHR; + + //=== VK_KHR_video_encode_h265 === + struct VideoEncodeH265CapabilitiesKHR; + struct VideoEncodeH265SessionCreateInfoKHR; + struct VideoEncodeH265QualityLevelPropertiesKHR; + struct VideoEncodeH265SessionParametersCreateInfoKHR; + struct VideoEncodeH265SessionParametersAddInfoKHR; + struct VideoEncodeH265SessionParametersGetInfoKHR; + struct VideoEncodeH265SessionParametersFeedbackInfoKHR; + struct VideoEncodeH265PictureInfoKHR; + struct VideoEncodeH265DpbSlotInfoKHR; + struct VideoEncodeH265NaluSliceSegmentInfoKHR; + struct VideoEncodeH265ProfileInfoKHR; + struct VideoEncodeH265RateControlInfoKHR; + struct VideoEncodeH265RateControlLayerInfoKHR; + struct VideoEncodeH265QpKHR; + struct VideoEncodeH265FrameSizeKHR; + struct VideoEncodeH265GopRemainingFrameInfoKHR; + + //=== VK_KHR_video_decode_h264 === + struct VideoDecodeH264ProfileInfoKHR; + struct VideoDecodeH264CapabilitiesKHR; + struct VideoDecodeH264SessionParametersCreateInfoKHR; + struct VideoDecodeH264SessionParametersAddInfoKHR; + struct VideoDecodeH264PictureInfoKHR; + struct VideoDecodeH264DpbSlotInfoKHR; + + //=== VK_AMD_texture_gather_bias_lod === + struct TextureLODGatherFormatPropertiesAMD; + + //=== VK_AMD_shader_info === + struct ShaderResourceUsageAMD; + struct ShaderStatisticsInfoAMD; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + struct StreamDescriptorSurfaceCreateInfoGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + struct PhysicalDeviceCornerSampledImageFeaturesNV; + + //=== VK_NV_external_memory_capabilities === + struct ExternalImageFormatPropertiesNV; + + //=== VK_NV_external_memory === + struct ExternalMemoryImageCreateInfoNV; + struct ExportMemoryAllocateInfoNV; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + struct ImportMemoryWin32HandleInfoNV; + struct ExportMemoryWin32HandleInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + struct Win32KeyedMutexAcquireReleaseInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_validation_flags === + struct ValidationFlagsEXT; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + struct ViSurfaceCreateInfoNN; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_astc_decode_mode === + struct ImageViewASTCDecodeModeEXT; + struct PhysicalDeviceASTCDecodeFeaturesEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + struct ImportMemoryWin32HandleInfoKHR; + struct ExportMemoryWin32HandleInfoKHR; + struct MemoryWin32HandlePropertiesKHR; + struct MemoryGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + struct ImportMemoryFdInfoKHR; + struct MemoryFdPropertiesKHR; + struct MemoryGetFdInfoKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + struct Win32KeyedMutexAcquireReleaseInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + struct ImportSemaphoreWin32HandleInfoKHR; + struct ExportSemaphoreWin32HandleInfoKHR; + struct D3D12FenceSubmitInfoKHR; + struct SemaphoreGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + struct ImportSemaphoreFdInfoKHR; + struct SemaphoreGetFdInfoKHR; + + //=== VK_EXT_conditional_rendering === + struct ConditionalRenderingBeginInfoEXT; + struct PhysicalDeviceConditionalRenderingFeaturesEXT; + struct CommandBufferInheritanceConditionalRenderingInfoEXT; + + //=== VK_KHR_incremental_present === + struct PresentRegionsKHR; + struct PresentRegionKHR; + struct RectLayerKHR; + + //=== VK_NV_clip_space_w_scaling === + struct ViewportWScalingNV; + struct PipelineViewportWScalingStateCreateInfoNV; + + //=== VK_EXT_display_surface_counter === + struct SurfaceCapabilities2EXT; + + //=== VK_EXT_display_control === + struct DisplayPowerInfoEXT; + struct DeviceEventInfoEXT; + struct DisplayEventInfoEXT; + struct SwapchainCounterCreateInfoEXT; + + //=== VK_GOOGLE_display_timing === + struct RefreshCycleDurationGOOGLE; + struct PastPresentationTimingGOOGLE; + struct PresentTimesInfoGOOGLE; + struct PresentTimeGOOGLE; + + //=== VK_NVX_multiview_per_view_attributes === + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + struct MultiviewPerViewAttributesInfoNVX; + + //=== VK_NV_viewport_swizzle === + struct ViewportSwizzleNV; + struct PipelineViewportSwizzleStateCreateInfoNV; + + //=== VK_EXT_discard_rectangles === + struct PhysicalDeviceDiscardRectanglePropertiesEXT; + struct PipelineDiscardRectangleStateCreateInfoEXT; + + //=== VK_EXT_conservative_rasterization === + struct PhysicalDeviceConservativeRasterizationPropertiesEXT; + struct PipelineRasterizationConservativeStateCreateInfoEXT; + + //=== VK_EXT_depth_clip_enable === + struct PhysicalDeviceDepthClipEnableFeaturesEXT; + struct PipelineRasterizationDepthClipStateCreateInfoEXT; + + //=== VK_EXT_hdr_metadata === + struct HdrMetadataEXT; + struct XYColorEXT; + + //=== VK_IMG_relaxed_line_rasterization === + struct PhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + //=== VK_KHR_shared_presentable_image === + struct SharedPresentSurfaceCapabilitiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + struct ImportFenceWin32HandleInfoKHR; + struct ExportFenceWin32HandleInfoKHR; + struct FenceGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + struct ImportFenceFdInfoKHR; + struct FenceGetFdInfoKHR; + + //=== VK_KHR_performance_query === + struct PhysicalDevicePerformanceQueryFeaturesKHR; + struct PhysicalDevicePerformanceQueryPropertiesKHR; + struct PerformanceCounterKHR; + struct PerformanceCounterDescriptionKHR; + struct QueryPoolPerformanceCreateInfoKHR; + union PerformanceCounterResultKHR; + struct AcquireProfilingLockInfoKHR; + struct PerformanceQuerySubmitInfoKHR; + + //=== VK_KHR_get_surface_capabilities2 === + struct PhysicalDeviceSurfaceInfo2KHR; + struct SurfaceCapabilities2KHR; + struct SurfaceFormat2KHR; + + //=== VK_KHR_get_display_properties2 === + struct DisplayProperties2KHR; + struct DisplayPlaneProperties2KHR; + struct DisplayModeProperties2KHR; + struct DisplayPlaneInfo2KHR; + struct DisplayPlaneCapabilities2KHR; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + struct IOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + struct MacOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + struct DebugUtilsLabelEXT; + struct DebugUtilsMessengerCallbackDataEXT; + struct DebugUtilsMessengerCreateInfoEXT; + struct DebugUtilsObjectNameInfoEXT; + struct DebugUtilsObjectTagInfoEXT; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + struct AndroidHardwareBufferUsageANDROID; + struct AndroidHardwareBufferPropertiesANDROID; + struct AndroidHardwareBufferFormatPropertiesANDROID; + struct ImportAndroidHardwareBufferInfoANDROID; + struct MemoryGetAndroidHardwareBufferInfoANDROID; + struct ExternalFormatANDROID; + struct AndroidHardwareBufferFormatProperties2ANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + struct PhysicalDeviceShaderEnqueueFeaturesAMDX; + struct PhysicalDeviceShaderEnqueuePropertiesAMDX; + struct ExecutionGraphPipelineScratchSizeAMDX; + struct ExecutionGraphPipelineCreateInfoAMDX; + struct DispatchGraphInfoAMDX; + struct DispatchGraphCountInfoAMDX; + struct PipelineShaderStageNodeCreateInfoAMDX; + union DeviceOrHostAddressConstAMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + struct AttachmentSampleCountInfoAMD; + using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; + + //=== VK_KHR_shader_bfloat16 === + struct PhysicalDeviceShaderBfloat16FeaturesKHR; + + //=== VK_EXT_sample_locations === + struct SampleLocationEXT; + struct SampleLocationsInfoEXT; + struct AttachmentSampleLocationsEXT; + struct SubpassSampleLocationsEXT; + struct RenderPassSampleLocationsBeginInfoEXT; + struct PipelineSampleLocationsStateCreateInfoEXT; + struct PhysicalDeviceSampleLocationsPropertiesEXT; + struct MultisamplePropertiesEXT; + + //=== VK_EXT_blend_operation_advanced === + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + struct PipelineColorBlendAdvancedStateCreateInfoEXT; + + //=== VK_NV_fragment_coverage_to_color === + struct PipelineCoverageToColorStateCreateInfoNV; + + //=== VK_KHR_acceleration_structure === + union DeviceOrHostAddressKHR; + union DeviceOrHostAddressConstKHR; + struct AccelerationStructureBuildRangeInfoKHR; + struct AabbPositionsKHR; + using AabbPositionsNV = AabbPositionsKHR; + struct AccelerationStructureGeometryTrianglesDataKHR; + struct TransformMatrixKHR; + using TransformMatrixNV = TransformMatrixKHR; + struct AccelerationStructureBuildGeometryInfoKHR; + struct AccelerationStructureGeometryAabbsDataKHR; + struct AccelerationStructureInstanceKHR; + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; + struct AccelerationStructureGeometryInstancesDataKHR; + union AccelerationStructureGeometryDataKHR; + struct AccelerationStructureGeometryKHR; + struct AccelerationStructureCreateInfoKHR; + struct WriteDescriptorSetAccelerationStructureKHR; + struct PhysicalDeviceAccelerationStructureFeaturesKHR; + struct PhysicalDeviceAccelerationStructurePropertiesKHR; + struct AccelerationStructureDeviceAddressInfoKHR; + struct AccelerationStructureVersionInfoKHR; + struct CopyAccelerationStructureToMemoryInfoKHR; + struct CopyMemoryToAccelerationStructureInfoKHR; + struct CopyAccelerationStructureInfoKHR; + struct AccelerationStructureBuildSizesInfoKHR; + + //=== VK_KHR_ray_tracing_pipeline === + struct RayTracingShaderGroupCreateInfoKHR; + struct RayTracingPipelineCreateInfoKHR; + struct PhysicalDeviceRayTracingPipelineFeaturesKHR; + struct PhysicalDeviceRayTracingPipelinePropertiesKHR; + struct StridedDeviceAddressRegionKHR; + struct TraceRaysIndirectCommandKHR; + struct RayTracingPipelineInterfaceCreateInfoKHR; + + //=== VK_KHR_ray_query === + struct PhysicalDeviceRayQueryFeaturesKHR; + + //=== VK_NV_framebuffer_mixed_samples === + struct PipelineCoverageModulationStateCreateInfoNV; + + //=== VK_NV_shader_sm_builtins === + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV; + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV; + + //=== VK_EXT_image_drm_format_modifier === + struct DrmFormatModifierPropertiesListEXT; + struct DrmFormatModifierPropertiesEXT; + struct PhysicalDeviceImageDrmFormatModifierInfoEXT; + struct ImageDrmFormatModifierListCreateInfoEXT; + struct ImageDrmFormatModifierExplicitCreateInfoEXT; + struct ImageDrmFormatModifierPropertiesEXT; + struct DrmFormatModifierPropertiesList2EXT; + struct DrmFormatModifierProperties2EXT; + + //=== VK_EXT_validation_cache === + struct ValidationCacheCreateInfoEXT; + struct ShaderModuleValidationCacheCreateInfoEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + struct PhysicalDevicePortabilitySubsetFeaturesKHR; + struct PhysicalDevicePortabilitySubsetPropertiesKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + struct ShadingRatePaletteNV; + struct PipelineViewportShadingRateImageStateCreateInfoNV; + struct PhysicalDeviceShadingRateImageFeaturesNV; + struct PhysicalDeviceShadingRateImagePropertiesNV; + struct CoarseSampleLocationNV; + struct CoarseSampleOrderCustomNV; + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV; + + //=== VK_NV_ray_tracing === + struct RayTracingShaderGroupCreateInfoNV; + struct RayTracingPipelineCreateInfoNV; + struct GeometryTrianglesNV; + struct GeometryAABBNV; + struct GeometryDataNV; + struct GeometryNV; + struct AccelerationStructureInfoNV; + struct AccelerationStructureCreateInfoNV; + struct BindAccelerationStructureMemoryInfoNV; + struct WriteDescriptorSetAccelerationStructureNV; + struct AccelerationStructureMemoryRequirementsInfoNV; + struct PhysicalDeviceRayTracingPropertiesNV; + + //=== VK_NV_representative_fragment_test === + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + struct PipelineRepresentativeFragmentTestStateCreateInfoNV; + + //=== VK_EXT_filter_cubic === + struct PhysicalDeviceImageViewImageFormatInfoEXT; + struct FilterCubicImageViewImageFormatPropertiesEXT; + + //=== VK_EXT_external_memory_host === + struct ImportMemoryHostPointerInfoEXT; + struct MemoryHostPointerPropertiesEXT; + struct PhysicalDeviceExternalMemoryHostPropertiesEXT; + + //=== VK_KHR_shader_clock === + struct PhysicalDeviceShaderClockFeaturesKHR; + + //=== VK_AMD_pipeline_compiler_control === + struct PipelineCompilerControlCreateInfoAMD; + + //=== VK_AMD_shader_core_properties === + struct PhysicalDeviceShaderCorePropertiesAMD; + + //=== VK_KHR_video_decode_h265 === + struct VideoDecodeH265ProfileInfoKHR; + struct VideoDecodeH265CapabilitiesKHR; + struct VideoDecodeH265SessionParametersCreateInfoKHR; + struct VideoDecodeH265SessionParametersAddInfoKHR; + struct VideoDecodeH265PictureInfoKHR; + struct VideoDecodeH265DpbSlotInfoKHR; + + //=== VK_AMD_memory_overallocation_behavior === + struct DeviceMemoryOverallocationCreateInfoAMD; + + //=== VK_EXT_vertex_attribute_divisor === + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + struct PresentFrameTokenGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_mesh_shader === + struct PhysicalDeviceMeshShaderFeaturesNV; + struct PhysicalDeviceMeshShaderPropertiesNV; + struct DrawMeshTasksIndirectCommandNV; + + //=== VK_NV_shader_image_footprint === + struct PhysicalDeviceShaderImageFootprintFeaturesNV; + + //=== VK_NV_scissor_exclusive === + struct PipelineViewportExclusiveScissorStateCreateInfoNV; + struct PhysicalDeviceExclusiveScissorFeaturesNV; + + //=== VK_NV_device_diagnostic_checkpoints === + struct QueueFamilyCheckpointPropertiesNV; + struct CheckpointDataNV; + struct QueueFamilyCheckpointProperties2NV; + struct CheckpointData2NV; + + //=== VK_INTEL_shader_integer_functions2 === + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + //=== VK_INTEL_performance_query === + union PerformanceValueDataINTEL; + struct PerformanceValueINTEL; + struct InitializePerformanceApiInfoINTEL; + struct QueryPoolPerformanceQueryCreateInfoINTEL; + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; + struct PerformanceMarkerInfoINTEL; + struct PerformanceStreamMarkerInfoINTEL; + struct PerformanceOverrideInfoINTEL; + struct PerformanceConfigurationAcquireInfoINTEL; + + //=== VK_EXT_pci_bus_info === + struct PhysicalDevicePCIBusInfoPropertiesEXT; + + //=== VK_AMD_display_native_hdr === + struct DisplayNativeHdrSurfaceCapabilitiesAMD; + struct SwapchainDisplayNativeHdrCreateInfoAMD; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + struct ImagePipeSurfaceCreateInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + struct MetalSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + struct PhysicalDeviceFragmentDensityMapFeaturesEXT; + struct PhysicalDeviceFragmentDensityMapPropertiesEXT; + struct RenderPassFragmentDensityMapCreateInfoEXT; + struct RenderingFragmentDensityMapAttachmentInfoEXT; + + //=== VK_KHR_fragment_shading_rate === + struct FragmentShadingRateAttachmentInfoKHR; + struct PipelineFragmentShadingRateStateCreateInfoKHR; + struct PhysicalDeviceFragmentShadingRateFeaturesKHR; + struct PhysicalDeviceFragmentShadingRatePropertiesKHR; + struct PhysicalDeviceFragmentShadingRateKHR; + struct RenderingFragmentShadingRateAttachmentInfoKHR; + + //=== VK_AMD_shader_core_properties2 === + struct PhysicalDeviceShaderCoreProperties2AMD; + + //=== VK_AMD_device_coherent_memory === + struct PhysicalDeviceCoherentMemoryFeaturesAMD; + + //=== VK_EXT_shader_image_atomic_int64 === + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + //=== VK_KHR_shader_quad_control === + struct PhysicalDeviceShaderQuadControlFeaturesKHR; + + //=== VK_EXT_memory_budget === + struct PhysicalDeviceMemoryBudgetPropertiesEXT; + + //=== VK_EXT_memory_priority === + struct PhysicalDeviceMemoryPriorityFeaturesEXT; + struct MemoryPriorityAllocateInfoEXT; + + //=== VK_KHR_surface_protected_capabilities === + struct SurfaceProtectedCapabilitiesKHR; + + //=== VK_NV_dedicated_allocation_image_aliasing === + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + //=== VK_EXT_buffer_device_address === + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT; + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + struct BufferDeviceAddressCreateInfoEXT; + + //=== VK_EXT_validation_features === + struct ValidationFeaturesEXT; + + //=== VK_KHR_present_wait === + struct PhysicalDevicePresentWaitFeaturesKHR; + + //=== VK_NV_cooperative_matrix === + struct CooperativeMatrixPropertiesNV; + struct PhysicalDeviceCooperativeMatrixFeaturesNV; + struct PhysicalDeviceCooperativeMatrixPropertiesNV; + + //=== VK_NV_coverage_reduction_mode === + struct PhysicalDeviceCoverageReductionModeFeaturesNV; + struct PipelineCoverageReductionStateCreateInfoNV; + struct FramebufferMixedSamplesCombinationNV; + + //=== VK_EXT_fragment_shader_interlock === + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + //=== VK_EXT_ycbcr_image_arrays === + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT; + + //=== VK_EXT_provoking_vertex === + struct PhysicalDeviceProvokingVertexFeaturesEXT; + struct PhysicalDeviceProvokingVertexPropertiesEXT; + struct PipelineRasterizationProvokingVertexStateCreateInfoEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + struct SurfaceFullScreenExclusiveInfoEXT; + struct SurfaceCapabilitiesFullScreenExclusiveEXT; + struct SurfaceFullScreenExclusiveWin32InfoEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + struct HeadlessSurfaceCreateInfoEXT; + + //=== VK_EXT_shader_atomic_float === + struct PhysicalDeviceShaderAtomicFloatFeaturesEXT; + + //=== VK_EXT_extended_dynamic_state === + struct PhysicalDeviceExtendedDynamicStateFeaturesEXT; + + //=== VK_KHR_pipeline_executable_properties === + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + struct PipelineInfoKHR; + using PipelineInfoEXT = PipelineInfoKHR; + struct PipelineExecutablePropertiesKHR; + struct PipelineExecutableInfoKHR; + union PipelineExecutableStatisticValueKHR; + struct PipelineExecutableStatisticKHR; + struct PipelineExecutableInternalRepresentationKHR; + + //=== VK_EXT_map_memory_placed === + struct PhysicalDeviceMapMemoryPlacedFeaturesEXT; + struct PhysicalDeviceMapMemoryPlacedPropertiesEXT; + struct MemoryMapPlacedInfoEXT; + + //=== VK_EXT_shader_atomic_float2 === + struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + //=== VK_EXT_surface_maintenance1 === + struct SurfacePresentModeEXT; + struct SurfacePresentScalingCapabilitiesEXT; + struct SurfacePresentModeCompatibilityEXT; + + //=== VK_EXT_swapchain_maintenance1 === + struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT; + struct SwapchainPresentFenceInfoEXT; + struct SwapchainPresentModesCreateInfoEXT; + struct SwapchainPresentModeInfoEXT; + struct SwapchainPresentScalingCreateInfoEXT; + struct ReleaseSwapchainImagesInfoEXT; + + //=== VK_NV_device_generated_commands === + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + struct GraphicsShaderGroupCreateInfoNV; + struct GraphicsPipelineShaderGroupsCreateInfoNV; + struct BindShaderGroupIndirectCommandNV; + struct BindIndexBufferIndirectCommandNV; + struct BindVertexBufferIndirectCommandNV; + struct SetStateFlagsIndirectCommandNV; + struct IndirectCommandsStreamNV; + struct IndirectCommandsLayoutTokenNV; + struct IndirectCommandsLayoutCreateInfoNV; + struct GeneratedCommandsInfoNV; + struct GeneratedCommandsMemoryRequirementsInfoNV; + + //=== VK_NV_inherited_viewport_scissor === + struct PhysicalDeviceInheritedViewportScissorFeaturesNV; + struct CommandBufferInheritanceViewportScissorInfoNV; + + //=== VK_EXT_texel_buffer_alignment === + struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + + //=== VK_QCOM_render_pass_transform === + struct RenderPassTransformBeginInfoQCOM; + struct CommandBufferInheritanceRenderPassTransformInfoQCOM; + + //=== VK_EXT_depth_bias_control === + struct PhysicalDeviceDepthBiasControlFeaturesEXT; + struct DepthBiasInfoEXT; + struct DepthBiasRepresentationInfoEXT; + + //=== VK_EXT_device_memory_report === + struct PhysicalDeviceDeviceMemoryReportFeaturesEXT; + struct DeviceDeviceMemoryReportCreateInfoEXT; + struct DeviceMemoryReportCallbackDataEXT; + + //=== VK_EXT_robustness2 === + struct PhysicalDeviceRobustness2FeaturesEXT; + struct PhysicalDeviceRobustness2PropertiesEXT; + + //=== VK_EXT_custom_border_color === + struct SamplerCustomBorderColorCreateInfoEXT; + struct PhysicalDeviceCustomBorderColorPropertiesEXT; + struct PhysicalDeviceCustomBorderColorFeaturesEXT; + + //=== VK_KHR_pipeline_library === + struct PipelineLibraryCreateInfoKHR; + + //=== VK_NV_present_barrier === + struct PhysicalDevicePresentBarrierFeaturesNV; + struct SurfaceCapabilitiesPresentBarrierNV; + struct SwapchainPresentBarrierCreateInfoNV; + + //=== VK_KHR_present_id === + struct PresentIdKHR; + struct PhysicalDevicePresentIdFeaturesKHR; + + //=== VK_KHR_video_encode_queue === + struct VideoEncodeInfoKHR; + struct VideoEncodeCapabilitiesKHR; + struct QueryPoolVideoEncodeFeedbackCreateInfoKHR; + struct VideoEncodeUsageInfoKHR; + struct VideoEncodeRateControlInfoKHR; + struct VideoEncodeRateControlLayerInfoKHR; + struct PhysicalDeviceVideoEncodeQualityLevelInfoKHR; + struct VideoEncodeQualityLevelPropertiesKHR; + struct VideoEncodeQualityLevelInfoKHR; + struct VideoEncodeSessionParametersGetInfoKHR; + struct VideoEncodeSessionParametersFeedbackInfoKHR; + + //=== VK_NV_device_diagnostics_config === + struct PhysicalDeviceDiagnosticsConfigFeaturesNV; + struct DeviceDiagnosticsConfigCreateInfoNV; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + struct CudaModuleCreateInfoNV; + struct CudaFunctionCreateInfoNV; + struct CudaLaunchInfoNV; + struct PhysicalDeviceCudaKernelLaunchFeaturesNV; + struct PhysicalDeviceCudaKernelLaunchPropertiesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + struct PhysicalDeviceTileShadingFeaturesQCOM; + struct PhysicalDeviceTileShadingPropertiesQCOM; + struct RenderPassTileShadingCreateInfoQCOM; + struct PerTileBeginInfoQCOM; + struct PerTileEndInfoQCOM; + struct DispatchTileInfoQCOM; + + //=== VK_NV_low_latency === + struct QueryLowLatencySupportNV; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + struct ExportMetalObjectCreateInfoEXT; + struct ExportMetalObjectsInfoEXT; + struct ExportMetalDeviceInfoEXT; + struct ExportMetalCommandQueueInfoEXT; + struct ExportMetalBufferInfoEXT; + struct ImportMetalBufferInfoEXT; + struct ExportMetalTextureInfoEXT; + struct ImportMetalTextureInfoEXT; + struct ExportMetalIOSurfaceInfoEXT; + struct ImportMetalIOSurfaceInfoEXT; + struct ExportMetalSharedEventInfoEXT; + struct ImportMetalSharedEventInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_descriptor_buffer === + struct PhysicalDeviceDescriptorBufferPropertiesEXT; + struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + struct PhysicalDeviceDescriptorBufferFeaturesEXT; + struct DescriptorAddressInfoEXT; + struct DescriptorBufferBindingInfoEXT; + struct DescriptorBufferBindingPushDescriptorBufferHandleEXT; + union DescriptorDataEXT; + struct DescriptorGetInfoEXT; + struct BufferCaptureDescriptorDataInfoEXT; + struct ImageCaptureDescriptorDataInfoEXT; + struct ImageViewCaptureDescriptorDataInfoEXT; + struct SamplerCaptureDescriptorDataInfoEXT; + struct OpaqueCaptureDescriptorDataCreateInfoEXT; + struct AccelerationStructureCaptureDescriptorDataInfoEXT; + + //=== VK_EXT_graphics_pipeline_library === + struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + struct GraphicsPipelineLibraryCreateInfoEXT; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + //=== VK_KHR_fragment_shader_barycentric === + struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + //=== VK_NV_fragment_shading_rate_enums === + struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + struct PipelineFragmentShadingRateEnumStateCreateInfoNV; + + //=== VK_NV_ray_tracing_motion_blur === + struct AccelerationStructureGeometryMotionTrianglesDataNV; + struct AccelerationStructureMotionInfoNV; + struct AccelerationStructureMotionInstanceNV; + union AccelerationStructureMotionInstanceDataNV; + struct AccelerationStructureMatrixMotionInstanceNV; + struct AccelerationStructureSRTMotionInstanceNV; + struct SRTDataNV; + struct PhysicalDeviceRayTracingMotionBlurFeaturesNV; + + //=== VK_EXT_mesh_shader === + struct PhysicalDeviceMeshShaderFeaturesEXT; + struct PhysicalDeviceMeshShaderPropertiesEXT; + struct DrawMeshTasksIndirectCommandEXT; + + //=== VK_EXT_ycbcr_2plane_444_formats === + struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + //=== VK_EXT_fragment_density_map2 === + struct PhysicalDeviceFragmentDensityMap2FeaturesEXT; + struct PhysicalDeviceFragmentDensityMap2PropertiesEXT; + + //=== VK_QCOM_rotated_copy_commands === + struct CopyCommandTransformInfoQCOM; + + //=== VK_KHR_workgroup_memory_explicit_layout === + struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + //=== VK_EXT_image_compression_control === + struct PhysicalDeviceImageCompressionControlFeaturesEXT; + struct ImageCompressionControlEXT; + struct ImageCompressionPropertiesEXT; + + //=== VK_EXT_attachment_feedback_loop_layout === + struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + //=== VK_EXT_4444_formats === + struct PhysicalDevice4444FormatsFeaturesEXT; + + //=== VK_EXT_device_fault === + struct PhysicalDeviceFaultFeaturesEXT; + struct DeviceFaultCountsEXT; + struct DeviceFaultInfoEXT; + struct DeviceFaultAddressInfoEXT; + struct DeviceFaultVendorInfoEXT; + struct DeviceFaultVendorBinaryHeaderVersionOneEXT; + + //=== VK_EXT_rgba10x6_formats === + struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + struct DirectFBSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + struct VertexInputBindingDescription2EXT; + struct VertexInputAttributeDescription2EXT; + + //=== VK_EXT_physical_device_drm === + struct PhysicalDeviceDrmPropertiesEXT; + + //=== VK_EXT_device_address_binding_report === + struct PhysicalDeviceAddressBindingReportFeaturesEXT; + struct DeviceAddressBindingCallbackDataEXT; + + //=== VK_EXT_depth_clip_control === + struct PhysicalDeviceDepthClipControlFeaturesEXT; + struct PipelineViewportDepthClipControlCreateInfoEXT; + + //=== VK_EXT_primitive_topology_list_restart === + struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + + //=== VK_EXT_present_mode_fifo_latest_ready === + struct PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + struct ImportMemoryZirconHandleInfoFUCHSIA; + struct MemoryZirconHandlePropertiesFUCHSIA; + struct MemoryGetZirconHandleInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + struct ImportSemaphoreZirconHandleInfoFUCHSIA; + struct SemaphoreGetZirconHandleInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + struct BufferCollectionCreateInfoFUCHSIA; + struct ImportMemoryBufferCollectionFUCHSIA; + struct BufferCollectionImageCreateInfoFUCHSIA; + struct BufferConstraintsInfoFUCHSIA; + struct BufferCollectionBufferCreateInfoFUCHSIA; + struct BufferCollectionPropertiesFUCHSIA; + struct SysmemColorSpaceFUCHSIA; + struct ImageConstraintsInfoFUCHSIA; + struct ImageFormatConstraintsInfoFUCHSIA; + struct BufferCollectionConstraintsInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + struct SubpassShadingPipelineCreateInfoHUAWEI; + struct PhysicalDeviceSubpassShadingFeaturesHUAWEI; + struct PhysicalDeviceSubpassShadingPropertiesHUAWEI; + + //=== VK_HUAWEI_invocation_mask === + struct PhysicalDeviceInvocationMaskFeaturesHUAWEI; + + //=== VK_NV_external_memory_rdma === + struct MemoryGetRemoteAddressInfoNV; + struct PhysicalDeviceExternalMemoryRDMAFeaturesNV; + + //=== VK_EXT_pipeline_properties === + struct PipelinePropertiesIdentifierEXT; + struct PhysicalDevicePipelinePropertiesFeaturesEXT; + + //=== VK_EXT_frame_boundary === + struct PhysicalDeviceFrameBoundaryFeaturesEXT; + struct FrameBoundaryEXT; + + //=== VK_EXT_multisampled_render_to_single_sampled === + struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + struct SubpassResolvePerformanceQueryEXT; + struct MultisampledRenderToSingleSampledInfoEXT; + + //=== VK_EXT_extended_dynamic_state2 === + struct PhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + struct ScreenSurfaceCreateInfoQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + struct PhysicalDeviceColorWriteEnableFeaturesEXT; + struct PipelineColorWriteCreateInfoEXT; + + //=== VK_EXT_primitives_generated_query === + struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + //=== VK_KHR_ray_tracing_maintenance1 === + struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR; + struct TraceRaysIndirectCommand2KHR; + + //=== VK_EXT_image_view_min_lod === + struct PhysicalDeviceImageViewMinLodFeaturesEXT; + struct ImageViewMinLodCreateInfoEXT; + + //=== VK_EXT_multi_draw === + struct PhysicalDeviceMultiDrawFeaturesEXT; + struct PhysicalDeviceMultiDrawPropertiesEXT; + struct MultiDrawInfoEXT; + struct MultiDrawIndexedInfoEXT; + + //=== VK_EXT_image_2d_view_of_3d === + struct PhysicalDeviceImage2DViewOf3DFeaturesEXT; + + //=== VK_EXT_shader_tile_image === + struct PhysicalDeviceShaderTileImageFeaturesEXT; + struct PhysicalDeviceShaderTileImagePropertiesEXT; + + //=== VK_EXT_opacity_micromap === + struct MicromapBuildInfoEXT; + struct MicromapUsageEXT; + struct MicromapCreateInfoEXT; + struct PhysicalDeviceOpacityMicromapFeaturesEXT; + struct PhysicalDeviceOpacityMicromapPropertiesEXT; + struct MicromapVersionInfoEXT; + struct CopyMicromapToMemoryInfoEXT; + struct CopyMemoryToMicromapInfoEXT; + struct CopyMicromapInfoEXT; + struct MicromapBuildSizesInfoEXT; + struct AccelerationStructureTrianglesOpacityMicromapEXT; + struct MicromapTriangleEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + struct PhysicalDeviceDisplacementMicromapFeaturesNV; + struct PhysicalDeviceDisplacementMicromapPropertiesNV; + struct AccelerationStructureTrianglesDisplacementMicromapNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === + struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + struct PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + + //=== VK_EXT_border_color_swizzle === + struct PhysicalDeviceBorderColorSwizzleFeaturesEXT; + struct SamplerBorderColorComponentMappingCreateInfoEXT; + + //=== VK_EXT_pageable_device_local_memory === + struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + + //=== VK_ARM_shader_core_properties === + struct PhysicalDeviceShaderCorePropertiesARM; + + //=== VK_ARM_scheduling_controls === + struct DeviceQueueShaderCoreControlCreateInfoARM; + struct PhysicalDeviceSchedulingControlsFeaturesARM; + struct PhysicalDeviceSchedulingControlsPropertiesARM; + + //=== VK_EXT_image_sliced_view_of_3d === + struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + struct ImageViewSlicedCreateInfoEXT; + + //=== VK_VALVE_descriptor_set_host_mapping === + struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + struct DescriptorSetBindingReferenceVALVE; + struct DescriptorSetLayoutHostMappingInfoVALVE; + + //=== VK_EXT_non_seamless_cube_map === + struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + //=== VK_ARM_render_pass_striped === + struct PhysicalDeviceRenderPassStripedFeaturesARM; + struct PhysicalDeviceRenderPassStripedPropertiesARM; + struct RenderPassStripeBeginInfoARM; + struct RenderPassStripeInfoARM; + struct RenderPassStripeSubmitInfoARM; + + //=== VK_NV_copy_memory_indirect === + struct CopyMemoryIndirectCommandNV; + struct CopyMemoryToImageIndirectCommandNV; + struct PhysicalDeviceCopyMemoryIndirectFeaturesNV; + struct PhysicalDeviceCopyMemoryIndirectPropertiesNV; + + //=== VK_NV_memory_decompression === + struct DecompressMemoryRegionNV; + struct PhysicalDeviceMemoryDecompressionFeaturesNV; + struct PhysicalDeviceMemoryDecompressionPropertiesNV; + + //=== VK_NV_device_generated_commands_compute === + struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + struct ComputePipelineIndirectBufferInfoNV; + struct PipelineIndirectDeviceAddressInfoNV; + struct BindPipelineIndirectCommandNV; + + //=== VK_NV_ray_tracing_linear_swept_spheres === + struct PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + struct AccelerationStructureGeometryLinearSweptSpheresDataNV; + struct AccelerationStructureGeometrySpheresDataNV; + + //=== VK_NV_linear_color_attachment === + struct PhysicalDeviceLinearColorAttachmentFeaturesNV; + + //=== VK_KHR_shader_maximal_reconvergence === + struct PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + + //=== VK_EXT_image_compression_control_swapchain === + struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + //=== VK_QCOM_image_processing === + struct ImageViewSampleWeightCreateInfoQCOM; + struct PhysicalDeviceImageProcessingFeaturesQCOM; + struct PhysicalDeviceImageProcessingPropertiesQCOM; + + //=== VK_EXT_nested_command_buffer === + struct PhysicalDeviceNestedCommandBufferFeaturesEXT; + struct PhysicalDeviceNestedCommandBufferPropertiesEXT; + + //=== VK_EXT_external_memory_acquire_unmodified === + struct ExternalMemoryAcquireUnmodifiedEXT; + + //=== VK_EXT_extended_dynamic_state3 === + struct PhysicalDeviceExtendedDynamicState3FeaturesEXT; + struct PhysicalDeviceExtendedDynamicState3PropertiesEXT; + struct ColorBlendEquationEXT; + struct ColorBlendAdvancedEXT; + + //=== VK_EXT_subpass_merge_feedback === + struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + struct RenderPassCreationControlEXT; + struct RenderPassCreationFeedbackInfoEXT; + struct RenderPassCreationFeedbackCreateInfoEXT; + struct RenderPassSubpassFeedbackInfoEXT; + struct RenderPassSubpassFeedbackCreateInfoEXT; + + //=== VK_LUNARG_direct_driver_loading === + struct DirectDriverLoadingInfoLUNARG; + struct DirectDriverLoadingListLUNARG; + + //=== VK_EXT_shader_module_identifier === + struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT; + struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT; + struct PipelineShaderStageModuleIdentifierCreateInfoEXT; + struct ShaderModuleIdentifierEXT; + + //=== VK_EXT_rasterization_order_attachment_access === + struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + //=== VK_NV_optical_flow === + struct PhysicalDeviceOpticalFlowFeaturesNV; + struct PhysicalDeviceOpticalFlowPropertiesNV; + struct OpticalFlowImageFormatInfoNV; + struct OpticalFlowImageFormatPropertiesNV; + struct OpticalFlowSessionCreateInfoNV; + struct OpticalFlowSessionCreatePrivateDataInfoNV; + struct OpticalFlowExecuteInfoNV; + + //=== VK_EXT_legacy_dithering === + struct PhysicalDeviceLegacyDitheringFeaturesEXT; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + struct PhysicalDeviceExternalFormatResolveFeaturesANDROID; + struct PhysicalDeviceExternalFormatResolvePropertiesANDROID; + struct AndroidHardwareBufferFormatResolvePropertiesANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_AMD_anti_lag === + struct PhysicalDeviceAntiLagFeaturesAMD; + struct AntiLagDataAMD; + struct AntiLagPresentationInfoAMD; + + //=== VK_KHR_ray_tracing_position_fetch === + struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + //=== VK_EXT_shader_object === + struct PhysicalDeviceShaderObjectFeaturesEXT; + struct PhysicalDeviceShaderObjectPropertiesEXT; + struct ShaderCreateInfoEXT; + + //=== VK_KHR_pipeline_binary === + struct PhysicalDevicePipelineBinaryFeaturesKHR; + struct PhysicalDevicePipelineBinaryPropertiesKHR; + struct DevicePipelineBinaryInternalCacheControlKHR; + struct PipelineBinaryKeyKHR; + struct PipelineBinaryDataKHR; + struct PipelineBinaryKeysAndDataKHR; + struct PipelineBinaryCreateInfoKHR; + struct PipelineBinaryInfoKHR; + struct ReleaseCapturedPipelineDataInfoKHR; + struct PipelineBinaryDataInfoKHR; + struct PipelineCreateInfoKHR; + struct PipelineBinaryHandlesInfoKHR; + + //=== VK_QCOM_tile_properties === + struct PhysicalDeviceTilePropertiesFeaturesQCOM; + struct TilePropertiesQCOM; + + //=== VK_SEC_amigo_profiling === + struct PhysicalDeviceAmigoProfilingFeaturesSEC; + struct AmigoProfilingSubmitInfoSEC; + + //=== VK_QCOM_multiview_per_view_viewports === + struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + //=== VK_NV_ray_tracing_invocation_reorder === + struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + + //=== VK_NV_cooperative_vector === + struct PhysicalDeviceCooperativeVectorPropertiesNV; + struct PhysicalDeviceCooperativeVectorFeaturesNV; + struct CooperativeVectorPropertiesNV; + struct ConvertCooperativeVectorMatrixInfoNV; + + //=== VK_NV_extended_sparse_address_space === + struct PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + struct PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + + //=== VK_EXT_mutable_descriptor_type === + struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + struct MutableDescriptorTypeListEXT; + using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT; + struct MutableDescriptorTypeCreateInfoEXT; + using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT; + + //=== VK_EXT_legacy_vertex_attributes === + struct PhysicalDeviceLegacyVertexAttributesFeaturesEXT; + struct PhysicalDeviceLegacyVertexAttributesPropertiesEXT; + + //=== VK_EXT_layer_settings === + struct LayerSettingsCreateInfoEXT; + struct LayerSettingEXT; + + //=== VK_ARM_shader_core_builtins === + struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM; + struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + //=== VK_EXT_pipeline_library_group_handles === + struct PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + //=== VK_NV_low_latency2 === + struct LatencySleepModeInfoNV; + struct LatencySleepInfoNV; + struct SetLatencyMarkerInfoNV; + struct GetLatencyMarkerInfoNV; + struct LatencyTimingsFrameReportNV; + struct LatencySubmissionPresentIdNV; + struct SwapchainLatencyCreateInfoNV; + struct OutOfBandQueueTypeInfoNV; + struct LatencySurfaceCapabilitiesNV; + + //=== VK_KHR_cooperative_matrix === + struct CooperativeMatrixPropertiesKHR; + struct PhysicalDeviceCooperativeMatrixFeaturesKHR; + struct PhysicalDeviceCooperativeMatrixPropertiesKHR; + + //=== VK_QCOM_multiview_per_view_render_areas === + struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + + //=== VK_KHR_compute_shader_derivatives === + struct PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + using PhysicalDeviceComputeShaderDerivativesFeaturesNV = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + struct PhysicalDeviceComputeShaderDerivativesPropertiesKHR; + + //=== VK_KHR_video_decode_av1 === + struct VideoDecodeAV1ProfileInfoKHR; + struct VideoDecodeAV1CapabilitiesKHR; + struct VideoDecodeAV1SessionParametersCreateInfoKHR; + struct VideoDecodeAV1PictureInfoKHR; + struct VideoDecodeAV1DpbSlotInfoKHR; + + //=== VK_KHR_video_encode_av1 === + struct PhysicalDeviceVideoEncodeAV1FeaturesKHR; + struct VideoEncodeAV1CapabilitiesKHR; + struct VideoEncodeAV1QualityLevelPropertiesKHR; + struct VideoEncodeAV1SessionCreateInfoKHR; + struct VideoEncodeAV1SessionParametersCreateInfoKHR; + struct VideoEncodeAV1PictureInfoKHR; + struct VideoEncodeAV1DpbSlotInfoKHR; + struct VideoEncodeAV1ProfileInfoKHR; + struct VideoEncodeAV1QIndexKHR; + struct VideoEncodeAV1FrameSizeKHR; + struct VideoEncodeAV1GopRemainingFrameInfoKHR; + struct VideoEncodeAV1RateControlInfoKHR; + struct VideoEncodeAV1RateControlLayerInfoKHR; + + //=== VK_KHR_video_maintenance1 === + struct PhysicalDeviceVideoMaintenance1FeaturesKHR; + struct VideoInlineQueryInfoKHR; + + //=== VK_NV_per_stage_descriptor_set === + struct PhysicalDevicePerStageDescriptorSetFeaturesNV; + + //=== VK_QCOM_image_processing2 === + struct PhysicalDeviceImageProcessing2FeaturesQCOM; + struct PhysicalDeviceImageProcessing2PropertiesQCOM; + struct SamplerBlockMatchWindowCreateInfoQCOM; + + //=== VK_QCOM_filter_cubic_weights === + struct PhysicalDeviceCubicWeightsFeaturesQCOM; + struct SamplerCubicWeightsCreateInfoQCOM; + struct BlitImageCubicWeightsInfoQCOM; + + //=== VK_QCOM_ycbcr_degamma === + struct PhysicalDeviceYcbcrDegammaFeaturesQCOM; + struct SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + + //=== VK_QCOM_filter_cubic_clamp === + struct PhysicalDeviceCubicClampFeaturesQCOM; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + struct ScreenBufferPropertiesQNX; + struct ScreenBufferFormatPropertiesQNX; + struct ImportScreenBufferInfoQNX; + struct ExternalFormatQNX; + struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + struct PhysicalDeviceLayeredDriverPropertiesMSFT; + + //=== VK_KHR_calibrated_timestamps === + struct CalibratedTimestampInfoKHR; + using CalibratedTimestampInfoEXT = CalibratedTimestampInfoKHR; + + //=== VK_KHR_maintenance6 === + struct SetDescriptorBufferOffsetsInfoEXT; + struct BindDescriptorBufferEmbeddedSamplersInfoEXT; + + //=== VK_NV_descriptor_pool_overallocation === + struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + //=== VK_QCOM_tile_memory_heap === + struct PhysicalDeviceTileMemoryHeapFeaturesQCOM; + struct PhysicalDeviceTileMemoryHeapPropertiesQCOM; + struct TileMemoryRequirementsQCOM; + struct TileMemoryBindInfoQCOM; + struct TileMemorySizeInfoQCOM; + + //=== VK_NV_display_stereo === + struct DisplaySurfaceStereoCreateInfoNV; + struct DisplayModeStereoPropertiesNV; + + //=== VK_KHR_video_encode_quantization_map === + struct VideoEncodeQuantizationMapCapabilitiesKHR; + struct VideoFormatQuantizationMapPropertiesKHR; + struct VideoEncodeQuantizationMapInfoKHR; + struct VideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + struct PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + struct VideoEncodeH264QuantizationMapCapabilitiesKHR; + struct VideoEncodeH265QuantizationMapCapabilitiesKHR; + struct VideoFormatH265QuantizationMapPropertiesKHR; + struct VideoEncodeAV1QuantizationMapCapabilitiesKHR; + struct VideoFormatAV1QuantizationMapPropertiesKHR; + + //=== VK_NV_raw_access_chains === + struct PhysicalDeviceRawAccessChainsFeaturesNV; + + //=== VK_NV_external_compute_queue === + struct ExternalComputeQueueDeviceCreateInfoNV; + struct ExternalComputeQueueCreateInfoNV; + struct ExternalComputeQueueDataParamsNV; + struct PhysicalDeviceExternalComputeQueuePropertiesNV; + + //=== VK_KHR_shader_relaxed_extended_instruction === + struct PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + //=== VK_NV_command_buffer_inheritance === + struct PhysicalDeviceCommandBufferInheritanceFeaturesNV; + + //=== VK_KHR_maintenance7 === + struct PhysicalDeviceMaintenance7FeaturesKHR; + struct PhysicalDeviceMaintenance7PropertiesKHR; + struct PhysicalDeviceLayeredApiPropertiesListKHR; + struct PhysicalDeviceLayeredApiPropertiesKHR; + struct PhysicalDeviceLayeredApiVulkanPropertiesKHR; + + //=== VK_NV_shader_atomic_float16_vector === + struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + + //=== VK_EXT_shader_replicated_composites === + struct PhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + + //=== VK_NV_ray_tracing_validation === + struct PhysicalDeviceRayTracingValidationFeaturesNV; + + //=== VK_NV_cluster_acceleration_structure === + struct PhysicalDeviceClusterAccelerationStructureFeaturesNV; + struct PhysicalDeviceClusterAccelerationStructurePropertiesNV; + struct ClusterAccelerationStructureClustersBottomLevelInputNV; + struct ClusterAccelerationStructureTriangleClusterInputNV; + struct ClusterAccelerationStructureMoveObjectsInputNV; + union ClusterAccelerationStructureOpInputNV; + struct ClusterAccelerationStructureInputInfoNV; + struct ClusterAccelerationStructureCommandsInfoNV; + struct StridedDeviceAddressNV; + struct ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV; + struct ClusterAccelerationStructureMoveObjectsInfoNV; + struct ClusterAccelerationStructureBuildClustersBottomLevelInfoNV; + struct ClusterAccelerationStructureBuildTriangleClusterInfoNV; + struct ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV; + struct ClusterAccelerationStructureInstantiateClusterInfoNV; + struct RayTracingPipelineClusterAccelerationStructureCreateInfoNV; + + //=== VK_NV_partitioned_acceleration_structure === + struct PhysicalDevicePartitionedAccelerationStructureFeaturesNV; + struct PhysicalDevicePartitionedAccelerationStructurePropertiesNV; + struct PartitionedAccelerationStructureFlagsNV; + struct BuildPartitionedAccelerationStructureIndirectCommandNV; + struct PartitionedAccelerationStructureWriteInstanceDataNV; + struct PartitionedAccelerationStructureUpdateInstanceDataNV; + struct PartitionedAccelerationStructureWritePartitionTranslationDataNV; + struct WriteDescriptorSetPartitionedAccelerationStructureNV; + struct PartitionedAccelerationStructureInstancesInputNV; + struct BuildPartitionedAccelerationStructureInfoNV; + + //=== VK_EXT_device_generated_commands === + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + struct GeneratedCommandsMemoryRequirementsInfoEXT; + struct IndirectExecutionSetCreateInfoEXT; + union IndirectExecutionSetInfoEXT; + struct IndirectExecutionSetPipelineInfoEXT; + struct IndirectExecutionSetShaderInfoEXT; + struct GeneratedCommandsInfoEXT; + struct WriteIndirectExecutionSetPipelineEXT; + struct IndirectCommandsLayoutCreateInfoEXT; + struct IndirectCommandsLayoutTokenEXT; + struct DrawIndirectCountIndirectCommandEXT; + struct IndirectCommandsVertexBufferTokenEXT; + struct BindVertexBufferIndirectCommandEXT; + struct IndirectCommandsIndexBufferTokenEXT; + struct BindIndexBufferIndirectCommandEXT; + struct IndirectCommandsPushConstantTokenEXT; + struct IndirectCommandsExecutionSetTokenEXT; + union IndirectCommandsTokenDataEXT; + struct IndirectExecutionSetShaderLayoutInfoEXT; + struct GeneratedCommandsPipelineInfoEXT; + struct GeneratedCommandsShaderInfoEXT; + struct WriteIndirectExecutionSetShaderEXT; + + //=== VK_KHR_maintenance8 === + struct PhysicalDeviceMaintenance8FeaturesKHR; + struct MemoryBarrierAccessFlags3KHR; + + //=== VK_MESA_image_alignment_control === + struct PhysicalDeviceImageAlignmentControlFeaturesMESA; + struct PhysicalDeviceImageAlignmentControlPropertiesMESA; + struct ImageAlignmentControlCreateInfoMESA; + + //=== VK_EXT_depth_clamp_control === + struct PhysicalDeviceDepthClampControlFeaturesEXT; + struct PipelineViewportDepthClampControlCreateInfoEXT; + struct DepthClampRangeEXT; + + //=== VK_KHR_video_maintenance2 === + struct PhysicalDeviceVideoMaintenance2FeaturesKHR; + struct VideoDecodeH264InlineSessionParametersInfoKHR; + struct VideoDecodeH265InlineSessionParametersInfoKHR; + struct VideoDecodeAV1InlineSessionParametersInfoKHR; + + //=== VK_HUAWEI_hdr_vivid === + struct PhysicalDeviceHdrVividFeaturesHUAWEI; + struct HdrVividDynamicMetadataHUAWEI; + + //=== VK_NV_cooperative_matrix2 === + struct CooperativeMatrixFlexibleDimensionsPropertiesNV; + struct PhysicalDeviceCooperativeMatrix2FeaturesNV; + struct PhysicalDeviceCooperativeMatrix2PropertiesNV; + + //=== VK_ARM_pipeline_opacity_micromap === + struct PhysicalDevicePipelineOpacityMicromapFeaturesARM; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + struct ImportMemoryMetalHandleInfoEXT; + struct MemoryMetalHandlePropertiesEXT; + struct MemoryGetMetalHandleInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_depth_clamp_zero_one === + struct PhysicalDeviceDepthClampZeroOneFeaturesKHR; + using PhysicalDeviceDepthClampZeroOneFeaturesEXT = PhysicalDeviceDepthClampZeroOneFeaturesKHR; + + //=== VK_EXT_vertex_attribute_robustness === + struct PhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_present_metering === + struct SetPresentConfigNV; + struct PhysicalDevicePresentMeteringFeaturesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_fragment_density_map_offset === + struct RenderingEndInfoEXT; + struct PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; + using PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; + struct PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; + using PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; + struct RenderPassFragmentDensityMapOffsetEndInfoEXT; + using SubpassFragmentDensityMapOffsetEndInfoQCOM = RenderPassFragmentDensityMapOffsetEndInfoEXT; + + //=================================== + //=== HANDLE forward declarations === + //=================================== + + //=== VK_VERSION_1_0 === + class Instance; + class PhysicalDevice; + class Device; + class Queue; + class DeviceMemory; + class Fence; + class Semaphore; + class Event; + class QueryPool; + class Buffer; + class BufferView; + class Image; + class ImageView; + class ShaderModule; + class PipelineCache; + class Pipeline; + class PipelineLayout; + class Sampler; + class DescriptorPool; + class DescriptorSet; + class DescriptorSetLayout; + class Framebuffer; + class RenderPass; + class CommandPool; + class CommandBuffer; + + //=== VK_VERSION_1_1 === + class SamplerYcbcrConversion; + class DescriptorUpdateTemplate; + + //=== VK_VERSION_1_3 === + class PrivateDataSlot; + + //=== VK_KHR_surface === + class SurfaceKHR; + + //=== VK_KHR_swapchain === + class SwapchainKHR; + + //=== VK_KHR_display === + class DisplayKHR; + class DisplayModeKHR; + + //=== VK_EXT_debug_report === + class DebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + class VideoSessionKHR; + class VideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + class CuModuleNVX; + class CuFunctionNVX; + + //=== VK_EXT_debug_utils === + class DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + class AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + class ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + class AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + class PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + class DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + class IndirectCommandsLayoutNV; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + class CudaModuleNV; + class CudaFunctionNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + class BufferCollectionFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + class MicromapEXT; + + //=== VK_NV_optical_flow === + class OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + class ShaderEXT; + + //=== VK_KHR_pipeline_binary === + class PipelineBinaryKHR; + + //=== VK_NV_external_compute_queue === + class ExternalComputeQueueNV; + + //=== VK_EXT_device_generated_commands === + class IndirectCommandsLayoutEXT; + class IndirectExecutionSetEXT; + + typedef void( VKAPI_PTR * PFN_VoidFunction )(); + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + //====================== + //=== UNIQUE HANDLEs === + //====================== + + //=== VK_VERSION_1_0 === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueInstance = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueDevice = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectFree; + }; + + using UniqueDeviceMemory = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueFence = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueSemaphore = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueEvent = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueQueryPool = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueBuffer = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueBufferView = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueImage = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueImageView = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueShaderModule = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniquePipelineCache = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniquePipeline = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniquePipelineLayout = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueSampler = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueDescriptorPool = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::PoolFree; + }; + + using UniqueDescriptorSet = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueDescriptorSetLayout = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueFramebuffer = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueRenderPass = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueCommandPool = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::PoolFree; + }; + + using UniqueCommandBuffer = UniqueHandle; + + //=== VK_VERSION_1_1 === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueSamplerYcbcrConversion = UniqueHandle; + using UniqueSamplerYcbcrConversionKHR = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueDescriptorUpdateTemplate = UniqueHandle; + using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; + + //=== VK_VERSION_1_3 === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniquePrivateDataSlot = UniqueHandle; + using UniquePrivateDataSlotEXT = UniqueHandle; + + //=== VK_KHR_surface === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueSurfaceKHR = UniqueHandle; + + //=== VK_KHR_swapchain === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueSwapchainKHR = UniqueHandle; + + //=== VK_KHR_display === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueDisplayKHR = UniqueHandle; + + //=== VK_EXT_debug_report === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueDebugReportCallbackEXT = UniqueHandle; + + //=== VK_KHR_video_queue === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueVideoSessionKHR = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueVideoSessionParametersKHR = UniqueHandle; + + //=== VK_NVX_binary_import === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueCuModuleNVX = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueCuFunctionNVX = UniqueHandle; + + //=== VK_EXT_debug_utils === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueDebugUtilsMessengerEXT = UniqueHandle; + + //=== VK_KHR_acceleration_structure === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueAccelerationStructureKHR = UniqueHandle; + + //=== VK_EXT_validation_cache === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueValidationCacheEXT = UniqueHandle; + + //=== VK_NV_ray_tracing === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueAccelerationStructureNV = UniqueHandle; + + //=== VK_INTEL_performance_query === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniquePerformanceConfigurationINTEL = UniqueHandle; + + //=== VK_KHR_deferred_host_operations === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueDeferredOperationKHR = UniqueHandle; + + //=== VK_NV_device_generated_commands === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueIndirectCommandsLayoutNV = UniqueHandle; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueCudaModuleNV = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueCudaFunctionNV = UniqueHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueBufferCollectionFUCHSIA = UniqueHandle; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueMicromapEXT = UniqueHandle; + + //=== VK_NV_optical_flow === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueOpticalFlowSessionNV = UniqueHandle; + + //=== VK_EXT_shader_object === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueShaderEXT = UniqueHandle; + + //=== VK_KHR_pipeline_binary === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniquePipelineBinaryKHR = UniqueHandle; + + //=== VK_NV_external_compute_queue === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueExternalComputeQueueNV = UniqueHandle; + + //=== VK_EXT_device_generated_commands === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueIndirectCommandsLayoutEXT = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueIndirectExecutionSetEXT = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + //=============== + //=== HANDLEs === + //=============== + + template + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; + }; + + // wrapper class for handle VkSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceKHR.html + class SurfaceKHR + { + public: + using CType = VkSurfaceKHR; + using NativeType = VkSurfaceKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + public: + SurfaceKHR() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + SurfaceKHR( SurfaceKHR const & rhs ) = default; + SurfaceKHR & operator=( SurfaceKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + SurfaceKHR( SurfaceKHR && rhs ) = default; + SurfaceKHR & operator=( SurfaceKHR && rhs ) = default; +#else + SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_surfaceKHR, {} ) ) {} + + SurfaceKHR & operator=( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_surfaceKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + SurfaceKHR & operator=( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = surfaceKHR; + return *this; + } +#endif + + SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == VK_NULL_HANDLE; + } + + private: + VkSurfaceKHR m_surfaceKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDebugReportCallbackEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugReportCallbackEXT.html + class DebugReportCallbackEXT + { + public: + using CType = VkDebugReportCallbackEXT; + using NativeType = VkDebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + public: + DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + DebugReportCallbackEXT( DebugReportCallbackEXT const & rhs ) = default; + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) = default; + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) = default; +#else + DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugReportCallbackEXT, {} ) ) + { + } + + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugReportCallbackEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( debugReportCallbackEXT ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DebugReportCallbackEXT & operator=( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = debugReportCallbackEXT; + return *this; + } +#endif + + DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == VK_NULL_HANDLE; + } + + private: + VkDebugReportCallbackEXT m_debugReportCallbackEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDebugUtilsMessengerEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerEXT.html + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + using NativeType = VkDebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & rhs ) = default; + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) = default; + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) = default; +#else + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugUtilsMessengerEXT, {} ) ) + { + } + + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugUtilsMessengerEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DebugUtilsMessengerEXT & operator=( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + return *this; + } +#endif + + DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; + } + + private: + VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDisplayKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayKHR.html + class DisplayKHR + { + public: + using CType = VkDisplayKHR; + using NativeType = VkDisplayKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + + public: + DisplayKHR() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + DisplayKHR( DisplayKHR const & rhs ) = default; + DisplayKHR & operator=( DisplayKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DisplayKHR( DisplayKHR && rhs ) = default; + DisplayKHR & operator=( DisplayKHR && rhs ) = default; +#else + DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_displayKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayKHR, {} ) ) {} + + DisplayKHR & operator=( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DisplayKHR & operator=( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = displayKHR; + return *this; + } +#endif + + DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayKHR m_displayKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainKHR.html + class SwapchainKHR + { + public: + using CType = VkSwapchainKHR; + using NativeType = VkSwapchainKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + public: + SwapchainKHR() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + SwapchainKHR( SwapchainKHR const & rhs ) = default; + SwapchainKHR & operator=( SwapchainKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + SwapchainKHR( SwapchainKHR && rhs ) = default; + SwapchainKHR & operator=( SwapchainKHR && rhs ) = default; +#else + SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_swapchainKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_swapchainKHR, {} ) ) {} + + SwapchainKHR & operator=( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_swapchainKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT : m_swapchainKHR( swapchainKHR ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + SwapchainKHR & operator=( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = swapchainKHR; + return *this; + } +#endif + + SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == VK_NULL_HANDLE; + } + + private: + VkSwapchainKHR m_swapchainKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphore.html + class Semaphore + { + public: + using CType = VkSemaphore; + using NativeType = VkSemaphore; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + + public: + Semaphore() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Semaphore( Semaphore const & rhs ) = default; + Semaphore & operator=( Semaphore const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Semaphore( Semaphore && rhs ) = default; + Semaphore & operator=( Semaphore && rhs ) = default; +#else + Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT : m_semaphore( VULKAN_HPP_NAMESPACE::exchange( rhs.m_semaphore, {} ) ) {} + + Semaphore & operator=( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = VULKAN_HPP_NAMESPACE::exchange( rhs.m_semaphore, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT : m_semaphore( semaphore ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Semaphore & operator=( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = semaphore; + return *this; + } +#endif + + Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == VK_NULL_HANDLE; + } + + private: + VkSemaphore m_semaphore = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFence.html + class Fence + { + public: + using CType = VkFence; + using NativeType = VkFence; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + + public: + Fence() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Fence( Fence const & rhs ) = default; + Fence & operator=( Fence const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Fence( Fence && rhs ) = default; + Fence & operator=( Fence && rhs ) = default; +#else + Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT : m_fence( VULKAN_HPP_NAMESPACE::exchange( rhs.m_fence, {} ) ) {} + + Fence & operator=( Fence && rhs ) VULKAN_HPP_NOEXCEPT + { + m_fence = VULKAN_HPP_NAMESPACE::exchange( rhs.m_fence, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT : m_fence( fence ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Fence & operator=( VkFence fence ) VULKAN_HPP_NOEXCEPT + { + m_fence = fence; + return *this; + } +#endif + + Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_fence = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_fence != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_fence == VK_NULL_HANDLE; + } + + private: + VkFence m_fence = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceConfigurationINTEL.html + class PerformanceConfigurationINTEL + { + public: + using CType = VkPerformanceConfigurationINTEL; + using NativeType = VkPerformanceConfigurationINTEL; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & rhs ) = default; + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) = default; + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) = default; +#else + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::exchange( rhs.m_performanceConfigurationINTEL, {} ) ) + { + } + + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = VULKAN_HPP_NAMESPACE::exchange( rhs.m_performanceConfigurationINTEL, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PerformanceConfigurationINTEL & operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = performanceConfigurationINTEL; + return *this; + } +#endif + + PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == VK_NULL_HANDLE; + } + + private: + VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPool.html + class QueryPool + { + public: + using CType = VkQueryPool; + using NativeType = VkQueryPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + + public: + QueryPool() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + QueryPool( QueryPool const & rhs ) = default; + QueryPool & operator=( QueryPool const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + QueryPool( QueryPool && rhs ) = default; + QueryPool & operator=( QueryPool && rhs ) = default; +#else + QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT : m_queryPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queryPool, {} ) ) {} + + QueryPool & operator=( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = VULKAN_HPP_NAMESPACE::exchange( rhs.m_queryPool, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT : m_queryPool( queryPool ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + QueryPool & operator=( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = queryPool; + return *this; + } +#endif + + QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == VK_NULL_HANDLE; + } + + private: + VkQueryPool m_queryPool = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuffer.html + class Buffer + { + public: + using CType = VkBuffer; + using NativeType = VkBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + + public: + Buffer() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Buffer( Buffer const & rhs ) = default; + Buffer & operator=( Buffer const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Buffer( Buffer && rhs ) = default; + Buffer & operator=( Buffer && rhs ) = default; +#else + Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT : m_buffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_buffer, {} ) ) {} + + Buffer & operator=( Buffer && rhs ) VULKAN_HPP_NOEXCEPT + { + m_buffer = VULKAN_HPP_NAMESPACE::exchange( rhs.m_buffer, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT : m_buffer( buffer ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Buffer & operator=( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT + { + m_buffer = buffer; + return *this; + } +#endif + + Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_buffer = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_buffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_buffer == VK_NULL_HANDLE; + } + + private: + VkBuffer m_buffer = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLayout.html + class PipelineLayout + { + public: + using CType = VkPipelineLayout; + using NativeType = VkPipelineLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + + public: + PipelineLayout() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + PipelineLayout( PipelineLayout const & rhs ) = default; + PipelineLayout & operator=( PipelineLayout const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PipelineLayout( PipelineLayout && rhs ) = default; + PipelineLayout & operator=( PipelineLayout && rhs ) = default; +#else + PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT : m_pipelineLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ) {} + + PipelineLayout & operator=( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT : m_pipelineLayout( pipelineLayout ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PipelineLayout & operator=( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = pipelineLayout; + return *this; + } +#endif + + PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == VK_NULL_HANDLE; + } + + private: + VkPipelineLayout m_pipelineLayout = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSet.html + class DescriptorSet + { + public: + using CType = VkDescriptorSet; + using NativeType = VkDescriptorSet; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + + public: + DescriptorSet() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + DescriptorSet( DescriptorSet const & rhs ) = default; + DescriptorSet & operator=( DescriptorSet const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorSet( DescriptorSet && rhs ) = default; + DescriptorSet & operator=( DescriptorSet && rhs ) = default; +#else + DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT : m_descriptorSet( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ) {} + + DescriptorSet & operator=( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSet, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT : m_descriptorSet( descriptorSet ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DescriptorSet & operator=( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = descriptorSet; + return *this; + } +#endif + + DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == VK_NULL_HANDLE; + } + + private: + VkDescriptorSet m_descriptorSet = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageView.html + class ImageView + { + public: + using CType = VkImageView; + using NativeType = VkImageView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + + public: + ImageView() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + ImageView( ImageView const & rhs ) = default; + ImageView & operator=( ImageView const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ImageView( ImageView && rhs ) = default; + ImageView & operator=( ImageView && rhs ) = default; +#else + ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT : m_imageView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_imageView, {} ) ) {} + + ImageView & operator=( ImageView && rhs ) VULKAN_HPP_NOEXCEPT + { + m_imageView = VULKAN_HPP_NAMESPACE::exchange( rhs.m_imageView, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT : m_imageView( imageView ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + ImageView & operator=( VkImageView imageView ) VULKAN_HPP_NOEXCEPT + { + m_imageView = imageView; + return *this; + } +#endif + + ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_imageView = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_imageView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_imageView == VK_NULL_HANDLE; + } + + private: + VkImageView m_imageView = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipeline.html + class Pipeline + { + public: + using CType = VkPipeline; + using NativeType = VkPipeline; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + + public: + Pipeline() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Pipeline( Pipeline const & rhs ) = default; + Pipeline & operator=( Pipeline const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Pipeline( Pipeline && rhs ) = default; + Pipeline & operator=( Pipeline && rhs ) = default; +#else + Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT : m_pipeline( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipeline, {} ) ) {} + + Pipeline & operator=( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipeline, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT : m_pipeline( pipeline ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Pipeline & operator=( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = pipeline; + return *this; + } +#endif + + Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == VK_NULL_HANDLE; + } + + private: + VkPipeline m_pipeline = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderEXT.html + class ShaderEXT + { + public: + using CType = VkShaderEXT; + using NativeType = VkShaderEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + ShaderEXT() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + ShaderEXT( ShaderEXT const & rhs ) = default; + ShaderEXT & operator=( ShaderEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ShaderEXT( ShaderEXT && rhs ) = default; + ShaderEXT & operator=( ShaderEXT && rhs ) = default; +#else + ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT : m_shaderEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderEXT, {} ) ) {} + + ShaderEXT & operator=( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR ShaderEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderEXT( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT : m_shaderEXT( shaderEXT ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + ShaderEXT & operator=( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = shaderEXT; + return *this; + } +#endif + + ShaderEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderEXT() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT == VK_NULL_HANDLE; + } + + private: + VkShaderEXT m_shaderEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImage.html + class Image + { + public: + using CType = VkImage; + using NativeType = VkImage; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + + public: + Image() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Image( Image const & rhs ) = default; + Image & operator=( Image const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Image( Image && rhs ) = default; + Image & operator=( Image && rhs ) = default; +#else + Image( Image && rhs ) VULKAN_HPP_NOEXCEPT : m_image( VULKAN_HPP_NAMESPACE::exchange( rhs.m_image, {} ) ) {} + + Image & operator=( Image && rhs ) VULKAN_HPP_NOEXCEPT + { + m_image = VULKAN_HPP_NAMESPACE::exchange( rhs.m_image, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT : m_image( image ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Image & operator=( VkImage image ) VULKAN_HPP_NOEXCEPT + { + m_image = image; + return *this; + } +#endif + + Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_image = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_image != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_image == VK_NULL_HANDLE; + } + + private: + VkImage m_image = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkAccelerationStructureNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureNV.html + class AccelerationStructureNV + { + public: + using CType = VkAccelerationStructureNV; + using NativeType = VkAccelerationStructureNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + public: + AccelerationStructureNV() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + AccelerationStructureNV( AccelerationStructureNV const & rhs ) = default; + AccelerationStructureNV & operator=( AccelerationStructureNV const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + AccelerationStructureNV( AccelerationStructureNV && rhs ) = default; + AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) = default; +#else + AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureNV, {} ) ) + { + } + + AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureNV, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV( accelerationStructureNV ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + AccelerationStructureNV & operator=( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = accelerationStructureNV; + return *this; + } +#endif + + AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureNV m_accelerationStructureNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkOpticalFlowSessionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionNV.html + class OpticalFlowSessionNV + { + public: + using CType = VkOpticalFlowSessionNV; + using NativeType = VkOpticalFlowSessionNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + OpticalFlowSessionNV() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + OpticalFlowSessionNV( OpticalFlowSessionNV const & rhs ) = default; + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) = default; + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) = default; +#else + OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_opticalFlowSessionNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_opticalFlowSessionNV, {} ) ) + { + } + + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_opticalFlowSessionNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_opticalFlowSessionNV, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT OpticalFlowSessionNV( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT + : m_opticalFlowSessionNV( opticalFlowSessionNV ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + OpticalFlowSessionNV & operator=( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT + { + m_opticalFlowSessionNV = opticalFlowSessionNV; + return *this; + } +#endif + + OpticalFlowSessionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_opticalFlowSessionNV = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkOpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV == VK_NULL_HANDLE; + } + + private: + VkOpticalFlowSessionNV m_opticalFlowSessionNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDescriptorUpdateTemplate, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplate.html + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + using NativeType = VkDescriptorUpdateTemplate; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + public: + DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + DescriptorUpdateTemplate( DescriptorUpdateTemplate const & rhs ) = default; + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) = default; + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) = default; +#else + DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ) + { + } + + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DescriptorUpdateTemplate & operator=( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = descriptorUpdateTemplate; + return *this; + } +#endif + + DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + } + + private: + VkDescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; + + // wrapper class for handle VkEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkEvent.html + class Event + { + public: + using CType = VkEvent; + using NativeType = VkEvent; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + + public: + Event() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Event( Event const & rhs ) = default; + Event & operator=( Event const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Event( Event && rhs ) = default; + Event & operator=( Event && rhs ) = default; +#else + Event( Event && rhs ) VULKAN_HPP_NOEXCEPT : m_event( VULKAN_HPP_NAMESPACE::exchange( rhs.m_event, {} ) ) {} + + Event & operator=( Event && rhs ) VULKAN_HPP_NOEXCEPT + { + m_event = VULKAN_HPP_NAMESPACE::exchange( rhs.m_event, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT : m_event( event ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Event & operator=( VkEvent event ) VULKAN_HPP_NOEXCEPT + { + m_event = event; + return *this; + } +#endif + + Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_event = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_event != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_event == VK_NULL_HANDLE; + } + + private: + VkEvent m_event = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkAccelerationStructureKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureKHR.html + class AccelerationStructureKHR + { + public: + using CType = VkAccelerationStructureKHR; + using NativeType = VkAccelerationStructureKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + public: + AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + AccelerationStructureKHR( AccelerationStructureKHR const & rhs ) = default; + AccelerationStructureKHR & operator=( AccelerationStructureKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + AccelerationStructureKHR( AccelerationStructureKHR && rhs ) = default; + AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) = default; +#else + AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureKHR, {} ) ) + { + } + + AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( accelerationStructureKHR ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + AccelerationStructureKHR & operator=( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = accelerationStructureKHR; + return *this; + } +#endif + + AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureKHR m_accelerationStructureKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapEXT.html + class MicromapEXT + { + public: + using CType = VkMicromapEXT; + using NativeType = VkMicromapEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + MicromapEXT() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + MicromapEXT( MicromapEXT const & rhs ) = default; + MicromapEXT & operator=( MicromapEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + MicromapEXT( MicromapEXT && rhs ) = default; + MicromapEXT & operator=( MicromapEXT && rhs ) = default; +#else + MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT : m_micromapEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_micromapEXT, {} ) ) {} + + MicromapEXT & operator=( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_micromapEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_micromapEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR MicromapEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT MicromapEXT( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT : m_micromapEXT( micromapEXT ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + MicromapEXT & operator=( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT + { + m_micromapEXT = micromapEXT; + return *this; + } +#endif + + MicromapEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_micromapEXT = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkMicromapEXT() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT == VK_NULL_HANDLE; + } + + private: + VkMicromapEXT m_micromapEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::MicromapEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::MicromapEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBuffer.html + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + using NativeType = VkCommandBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + public: + CommandBuffer() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + CommandBuffer( CommandBuffer const & rhs ) = default; + CommandBuffer & operator=( CommandBuffer const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CommandBuffer( CommandBuffer && rhs ) = default; + CommandBuffer & operator=( CommandBuffer && rhs ) = default; +#else + CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT : m_commandBuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ) {} + + CommandBuffer & operator=( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandBuffer, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT : m_commandBuffer( commandBuffer ) {} + + CommandBuffer & operator=( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = commandBuffer; + return *this; + } + + CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = {}; + return *this; + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkBeginCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBeginCommandBuffer.html + template + VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBeginCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBeginCommandBuffer.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEndCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEndCommandBuffer.html + template + VULKAN_HPP_NODISCARD Result end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkEndCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEndCommandBuffer.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandBuffer.html + template + VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkResetCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandBuffer.html + template + typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkCmdBindPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindPipeline.html + template + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewport.html + template + void setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewport.html + template + void setViewport( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetScissor, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissor.html + template + void setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetScissor, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissor.html + template + void setScissor( uint32_t firstScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetLineWidth, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineWidth.html + template + void setLineWidth( float lineWidth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBias, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias.html + template + void setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetBlendConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetBlendConstants.html + template + void setBlendConstants( const float blendConstants[4], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBounds, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBounds.html + template + void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilCompareMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilCompareMask.html + template + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilWriteMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilWriteMask.html + template + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilReference, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilReference.html + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets.html + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets.html + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindIndexBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer.html + template + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindVertexBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers.html + template + void bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindVertexBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers.html + template + void bindVertexBuffers( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDraw, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDraw.html + template + void draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndexed, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexed.html + template + void drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirect.html + template + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndexedIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirect.html + template + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatch, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatch.html + template + void dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatchIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchIndirect.html + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer.html + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer.html + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage.html + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage.html + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBlitImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage.html + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBlitImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage.html + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBufferToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage.html + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBufferToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage.html + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImageToBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer.html + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImageToBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer.html + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdUpdateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdateBuffer.html + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdUpdateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdateBuffer.html + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdFillBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdFillBuffer.html + template + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdClearColorImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearColorImage.html + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdClearColorImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearColorImage.html + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdClearDepthStencilImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearDepthStencilImage.html + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdClearDepthStencilImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearDepthStencilImage.html + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdClearAttachments, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearAttachments.html + template + void clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdClearAttachments, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearAttachments.html + template + void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, + VULKAN_HPP_NAMESPACE::ArrayProxy const & rects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResolveImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage.html + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdResolveImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage.html + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent.html + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent.html + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWaitEvents, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents.html + template + void waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWaitEvents, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents.html + template + void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPipelineBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier.html + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPipelineBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier.html + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginQuery.html + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndQuery.html + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdResetQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetQueryPool.html + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWriteTimestamp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp.html + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyQueryPoolResults.html + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants.html + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants.html + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & values, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass.html + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass.html + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdNextSubpass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass.html + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass.html + template + void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdExecuteCommands, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteCommands.html + template + void executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdExecuteCommands, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteCommands.html + template + void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkCmdSetDeviceMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDeviceMask.html + template + void setDeviceMask( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatchBase, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchBase.html + template + void dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_2 === + + // wrapper function for command vkCmdDrawIndirectCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCount.html + template + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndexedIndirectCount, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCount.html + template + void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBeginRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2.html + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2.html + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdNextSubpass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2.html + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdNextSubpass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2.html + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2.html + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2.html + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkCmdSetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2.html + template + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2.html + template + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent2.html + template + void resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWaitEvents2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2.html + template + void waitEvents2( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWaitEvents2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2.html + template + void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPipelineBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2.html + template + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPipelineBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2.html + template + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteTimestamp2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp2.html + template + void writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2.html + template + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2.html + template + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2.html + template + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2.html + template + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBufferToImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2.html + template + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBufferToImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2.html + template + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImageToBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2.html + template + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImageToBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2.html + template + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBlitImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2.html + template + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBlitImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2.html + template + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResolveImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2.html + template + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdResolveImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2.html + template + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRendering.html + template + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRendering.html + template + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering.html + template + void endRendering( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCullMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCullMode.html + template + void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetFrontFace, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFrontFace.html + template + void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetPrimitiveTopology, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveTopology.html + template + void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewportWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCount.html + template + void setViewportWithCount( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCount.html + template + void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetScissorWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCount.html + template + void setScissorWithCount( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetScissorWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCount.html + template + void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindVertexBuffers2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2.html + template + void bindVertexBuffers2( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindVertexBuffers2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2.html + template + void bindVertexBuffers2( + uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDepthTestEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthTestEnable.html + template + void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthWriteEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthWriteEnable.html + template + void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthCompareOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthCompareOp.html + template + void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBoundsTestEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBoundsTestEnable.html + template + void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilTestEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilTestEnable.html + template + void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilOp.html + template + void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRasterizerDiscardEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizerDiscardEnable.html + template + void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBiasEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBiasEnable.html + template + void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetPrimitiveRestartEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveRestartEnable.html + template + void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_4 === + + // wrapper function for command vkCmdSetLineStipple, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStipple.html + template + void setLineStipple( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindIndexBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer2.html + template + void bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet.html + template + void pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet.html + template + void pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate.html + template + void pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate.html + template + void pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetRenderingAttachmentLocations, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocations.html + template + void setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingAttachmentLocations, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocations.html + template + void setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetRenderingInputAttachmentIndices, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndices.html + template + void setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingInputAttachmentIndices, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndices.html + template + void setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorSets2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2.html + template + void bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorSets2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2.html + template + void bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushConstants2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2.html + template + void pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushConstants2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2.html + template + void pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSet2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2.html + template + void pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSet2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2.html + template + void pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2.html + template + void pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2.html + template + void pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_marker === + + // wrapper function for command vkCmdDebugMarkerBeginEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerBeginEXT.html + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDebugMarkerBeginEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerBeginEXT.html + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDebugMarkerEndEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerEndEXT.html + template + void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDebugMarkerInsertEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerInsertEXT.html + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDebugMarkerInsertEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerInsertEXT.html + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_queue === + + // wrapper function for command vkCmdBeginVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginVideoCodingKHR.html + template + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginVideoCodingKHR.html + template + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndVideoCodingKHR.html + template + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndVideoCodingKHR.html + template + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdControlVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdControlVideoCodingKHR.html + template + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdControlVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdControlVideoCodingKHR.html + template + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_decode_queue === + + // wrapper function for command vkCmdDecodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecodeVideoKHR.html + template + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDecodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecodeVideoKHR.html + template + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_transform_feedback === + + // wrapper function for command vkCmdBindTransformFeedbackBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTransformFeedbackBuffersEXT.html + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindTransformFeedbackBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTransformFeedbackBuffersEXT.html + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginTransformFeedbackEXT.html + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginTransformFeedbackEXT.html + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndTransformFeedbackEXT.html + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndTransformFeedbackEXT.html + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginQueryIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginQueryIndexedEXT.html + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndQueryIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndQueryIndexedEXT.html + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndirectByteCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectByteCountEXT.html + template + void drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NVX_binary_import === + + // wrapper function for command vkCmdCuLaunchKernelNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCuLaunchKernelNVX.html + template + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCuLaunchKernelNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCuLaunchKernelNVX.html + template + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_draw_indirect_count === + + // wrapper function for command vkCmdDrawIndirectCountAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountAMD.html + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndexedIndirectCountAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCountAMD.html + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_dynamic_rendering === + + // wrapper function for command vkCmdBeginRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderingKHR.html + template + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderingKHR.html + template + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderingKHR.html + template + void endRenderingKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_device_group === + + // wrapper function for command vkCmdSetDeviceMaskKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDeviceMaskKHR.html + template + void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatchBaseKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchBaseKHR.html + template + void dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_push_descriptor === + + // wrapper function for command vkCmdPushDescriptorSetKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetKHR.html + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetKHR.html + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_conditional_rendering === + + // wrapper function for command vkCmdBeginConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginConditionalRenderingEXT.html + template + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginConditionalRenderingEXT.html + template + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndConditionalRenderingEXT.html + template + void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_clip_space_w_scaling === + + // wrapper function for command vkCmdSetViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingNV.html + template + void setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingNV.html + template + void setViewportWScalingNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_discard_rectangles === + + // wrapper function for command vkCmdSetDiscardRectangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEXT.html + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDiscardRectangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEXT.html + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDiscardRectangleEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEnableEXT.html + template + void setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDiscardRectangleModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleModeEXT.html + template + void setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_create_renderpass2 === + + // wrapper function for command vkCmdBeginRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2KHR.html + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2KHR.html + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdNextSubpass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2KHR.html + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdNextSubpass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2KHR.html + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2KHR.html + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2KHR.html + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkCmdBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginDebugUtilsLabelEXT.html + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginDebugUtilsLabelEXT.html + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndDebugUtilsLabelEXT.html + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInsertDebugUtilsLabelEXT.html + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInsertDebugUtilsLabelEXT.html + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + // wrapper function for command vkCmdInitializeGraphScratchMemoryAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInitializeGraphScratchMemoryAMDX.html + template + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatchGraphAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphAMDX.html + template + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDispatchGraphAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphAMDX.html + template + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDispatchGraphIndirectAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectAMDX.html + template + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDispatchGraphIndirectAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectAMDX.html + template + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDispatchGraphIndirectCountAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectCountAMDX.html + template + void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_acceleration_structure === + + // wrapper function for command vkCmdBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresKHR.html + template + void buildAccelerationStructuresKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresKHR.html + template + void buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildAccelerationStructuresIndirectKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresIndirectKHR.html + template + void buildAccelerationStructuresIndirectKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildAccelerationStructuresIndirectKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresIndirectKHR.html + template + void buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureKHR.html + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureKHR.html + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureToMemoryKHR.html + template + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureToMemoryKHR.html + template + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToAccelerationStructureKHR.html + template + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToAccelerationStructureKHR.html + template + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesKHR.html + template + void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesKHR.html + template + void writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_pipeline === + + // wrapper function for command vkCmdTraceRaysKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysKHR.html + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdTraceRaysKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysKHR.html + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdTraceRaysIndirectKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirectKHR.html + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdTraceRaysIndirectKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirectKHR.html + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetRayTracingPipelineStackSizeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRayTracingPipelineStackSizeKHR.html + template + void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_shading_rate_image === + + // wrapper function for command vkCmdBindShadingRateImageNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadingRateImageNV.html + template + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewportShadingRatePaletteNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportShadingRatePaletteNV.html + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportShadingRatePaletteNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportShadingRatePaletteNV.html + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetCoarseSampleOrderNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoarseSampleOrderNV.html + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetCoarseSampleOrderNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoarseSampleOrderNV.html + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_ray_tracing === + + // wrapper function for command vkCmdBuildAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructureNV.html + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructureNV.html + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureNV.html + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdTraceRaysNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysNV.html + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesNV.html + template + void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesNV.html + template + void writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_draw_indirect_count === + + // wrapper function for command vkCmdDrawIndirectCountKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountKHR.html + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndexedIndirectCountKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCountKHR.html + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_buffer_marker === + + // wrapper function for command vkCmdWriteBufferMarkerAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteBufferMarkerAMD.html + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWriteBufferMarker2AMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteBufferMarker2AMD.html + template + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_mesh_shader === + + // wrapper function for command vkCmdDrawMeshTasksNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksNV.html + template + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawMeshTasksIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectNV.html + template + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawMeshTasksIndirectCountNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectCountNV.html + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_scissor_exclusive === + + // wrapper function for command vkCmdSetExclusiveScissorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorEnableNV.html + template + void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetExclusiveScissorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorEnableNV.html + template + void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetExclusiveScissorNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorNV.html + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetExclusiveScissorNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorNV.html + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_diagnostic_checkpoints === + + // wrapper function for command vkCmdSetCheckpointNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCheckpointNV.html + template + void setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetCheckpointNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCheckpointNV.html + template + void setCheckpointNV( CheckpointMarkerType const & checkpointMarker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === + + // wrapper function for command vkCmdSetPerformanceMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceMarkerINTEL.html + template + VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetPerformanceMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceMarkerINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetPerformanceStreamMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceStreamMarkerINTEL.html + template + VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetPerformanceStreamMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceStreamMarkerINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetPerformanceOverrideINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceOverrideINTEL.html + template + VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetPerformanceOverrideINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceOverrideINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_fragment_shading_rate === + + // wrapper function for command vkCmdSetFragmentShadingRateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateKHR.html + template + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetFragmentShadingRateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateKHR.html + template + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_dynamic_rendering_local_read === + + // wrapper function for command vkCmdSetRenderingAttachmentLocationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocationsKHR.html + template + void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingAttachmentLocationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocationsKHR.html + template + void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetRenderingInputAttachmentIndicesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndicesKHR.html + template + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingInputAttachmentIndicesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndicesKHR.html + template + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_line_rasterization === + + // wrapper function for command vkCmdSetLineStippleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleEXT.html + template + void setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state === + + // wrapper function for command vkCmdSetCullModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCullModeEXT.html + template + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetFrontFaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFrontFaceEXT.html + template + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetPrimitiveTopologyEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveTopologyEXT.html + template + void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewportWithCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCountEXT.html + template + void setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportWithCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCountEXT.html + template + void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetScissorWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCountEXT.html + template + void setScissorWithCountEXT( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetScissorWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCountEXT.html + template + void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindVertexBuffers2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2EXT.html + template + void bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindVertexBuffers2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2EXT.html + template + void bindVertexBuffers2EXT( + uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDepthTestEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthTestEnableEXT.html + template + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthWriteEnableEXT.html + template + void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthCompareOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthCompareOpEXT.html + template + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBoundsTestEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBoundsTestEnableEXT.html + template + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilTestEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilTestEnableEXT.html + template + void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilOpEXT.html + template + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands === + + // wrapper function for command vkCmdPreprocessGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsNV.html + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPreprocessGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsNV.html + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdExecuteGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsNV.html + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdExecuteGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsNV.html + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindPipelineShaderGroupNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindPipelineShaderGroupNV.html + template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_depth_bias_control === + + // wrapper function for command vkCmdSetDepthBias2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias2EXT.html + template + void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDepthBias2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias2EXT.html + template + void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === + + // wrapper function for command vkCmdEncodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEncodeVideoKHR.html + template + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEncodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEncodeVideoKHR.html + template + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + // wrapper function for command vkCmdCudaLaunchKernelNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCudaLaunchKernelNV.html + template + void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCudaLaunchKernelNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCudaLaunchKernelNV.html + template + void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + + // wrapper function for command vkCmdDispatchTileQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchTileQCOM.html + template + void dispatchTileQCOM( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBeginPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginPerTileExecutionQCOM.html + template + void beginPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileBeginInfoQCOM * pPerTileBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginPerTileExecutionQCOM.html + template + void beginPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileBeginInfoQCOM & perTileBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndPerTileExecutionQCOM.html + template + void endPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileEndInfoQCOM * pPerTileEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndPerTileExecutionQCOM.html + template + void endPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileEndInfoQCOM & perTileEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_synchronization2 === + + // wrapper function for command vkCmdSetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2KHR.html + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2KHR.html + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent2KHR.html + template + void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWaitEvents2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2KHR.html + template + void waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWaitEvents2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2KHR.html + template + void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPipelineBarrier2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2KHR.html + template + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPipelineBarrier2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2KHR.html + template + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteTimestamp2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp2KHR.html + template + void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_descriptor_buffer === + + // wrapper function for command vkCmdBindDescriptorBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBuffersEXT.html + template + void bindDescriptorBuffersEXT( uint32_t bufferCount, + const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBuffersEXT.html + template + void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDescriptorBufferOffsetsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsetsEXT.html + template + void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDescriptorBufferOffsetsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsetsEXT.html + template + void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplersEXT.html + template + void bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_fragment_shading_rate_enums === + + // wrapper function for command vkCmdSetFragmentShadingRateEnumNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateEnumNV.html + template + void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_mesh_shader === + + // wrapper function for command vkCmdDrawMeshTasksEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksEXT.html + template + void drawMeshTasksEXT( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawMeshTasksIndirectEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectEXT.html + template + void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawMeshTasksIndirectCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectCountEXT.html + template + void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_copy_commands2 === + + // wrapper function for command vkCmdCopyBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2KHR.html + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2KHR.html + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2KHR.html + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2KHR.html + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBufferToImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2KHR.html + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBufferToImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2KHR.html + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImageToBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2KHR.html + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImageToBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2KHR.html + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBlitImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2KHR.html + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBlitImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2KHR.html + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResolveImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2KHR.html + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdResolveImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2KHR.html + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_vertex_input_dynamic_state === + + // wrapper function for command vkCmdSetVertexInputEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetVertexInputEXT.html + template + void setVertexInputEXT( uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetVertexInputEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetVertexInputEXT.html + template + void + setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_HUAWEI_subpass_shading === + + // wrapper function for command vkCmdSubpassShadingHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSubpassShadingHUAWEI.html + template + void subpassShadingHUAWEI( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_invocation_mask === + + // wrapper function for command vkCmdBindInvocationMaskHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindInvocationMaskHUAWEI.html + template + void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state2 === + + // wrapper function for command vkCmdSetPatchControlPointsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPatchControlPointsEXT.html + template + void setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRasterizerDiscardEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizerDiscardEnableEXT.html + template + void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBiasEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBiasEnableEXT.html + template + void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetLogicOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLogicOpEXT.html + template + void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetPrimitiveRestartEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveRestartEnableEXT.html + template + void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_color_write_enable === + + // wrapper function for command vkCmdSetColorWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteEnableEXT.html + template + void setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteEnableEXT.html + template + void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_maintenance1 === + + // wrapper function for command vkCmdTraceRaysIndirect2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirect2KHR.html + template + void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_multi_draw === + + // wrapper function for command vkCmdDrawMultiEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiEXT.html + template + void drawMultiEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDrawMultiEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiEXT.html + template + void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDrawMultiIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiIndexedEXT.html + template + void drawMultiIndexedEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDrawMultiIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiIndexedEXT.html + template + void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_opacity_micromap === + + // wrapper function for command vkCmdBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildMicromapsEXT.html + template + void buildMicromapsEXT( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildMicromapsEXT.html + template + void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapEXT.html + template + void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapEXT.html + template + void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMicromapToMemoryEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapToMemoryEXT.html + template + void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMicromapToMemoryEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapToMemoryEXT.html + template + void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMemoryToMicromapEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToMicromapEXT.html + template + void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryToMicromapEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToMicromapEXT.html + template + void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteMicromapsPropertiesEXT.html + template + void writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteMicromapsPropertiesEXT.html + template + void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_HUAWEI_cluster_culling_shader === + + // wrapper function for command vkCmdDrawClusterHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawClusterHUAWEI.html + template + void drawClusterHUAWEI( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawClusterIndirectHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawClusterIndirectHUAWEI.html + template + void drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_copy_memory_indirect === + + // wrapper function for command vkCmdCopyMemoryIndirectNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryIndirectNV.html + template + void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyMemoryToImageIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToImageIndirectNV.html + template + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryToImageIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToImageIndirectNV.html + template + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_memory_decompression === + + // wrapper function for command vkCmdDecompressMemoryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryNV.html + template + void decompressMemoryNV( uint32_t decompressRegionCount, + const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDecompressMemoryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryNV.html + template + void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDecompressMemoryIndirectCountNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryIndirectCountNV.html + template + void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands_compute === + + // wrapper function for command vkCmdUpdatePipelineIndirectBufferNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdatePipelineIndirectBufferNV.html + template + void updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state3 === + + // wrapper function for command vkCmdSetDepthClampEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampEnableEXT.html + template + void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetPolygonModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPolygonModeEXT.html + template + void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRasterizationSamplesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizationSamplesEXT.html + template + void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetSampleMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleMaskEXT.html + template + void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetSampleMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleMaskEXT.html + template + void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetAlphaToCoverageEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAlphaToCoverageEnableEXT.html + template + void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetAlphaToOneEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAlphaToOneEnableEXT.html + template + void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetLogicOpEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLogicOpEnableEXT.html + template + void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetColorBlendEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEnableEXT.html + template + void setColorBlendEnableEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorBlendEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEnableEXT.html + template + void setColorBlendEnableEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetColorBlendEquationEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEquationEXT.html + template + void setColorBlendEquationEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorBlendEquationEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEquationEXT.html + template + void setColorBlendEquationEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetColorWriteMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteMaskEXT.html + template + void setColorWriteMaskEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorWriteMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteMaskEXT.html + template + void setColorWriteMaskEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetTessellationDomainOriginEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetTessellationDomainOriginEXT.html + template + void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRasterizationStreamEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizationStreamEXT.html + template + void setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetConservativeRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetConservativeRasterizationModeEXT.html + template + void setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetExtraPrimitiveOverestimationSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExtraPrimitiveOverestimationSizeEXT.html + template + void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthClipEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClipEnableEXT.html + template + void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetSampleLocationsEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEnableEXT.html + template + void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetColorBlendAdvancedEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendAdvancedEXT.html + template + void setColorBlendAdvancedEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorBlendAdvancedEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendAdvancedEXT.html + template + void setColorBlendAdvancedEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetProvokingVertexModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetProvokingVertexModeEXT.html + template + void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetLineRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineRasterizationModeEXT.html + template + void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetLineStippleEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleEnableEXT.html + template + void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthClipNegativeOneToOneEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClipNegativeOneToOneEXT.html + template + void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewportWScalingEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingEnableNV.html + template + void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportSwizzleNV.html + template + void setViewportSwizzleNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportSwizzleNV.html + template + void setViewportSwizzleNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetCoverageToColorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageToColorEnableNV.html + template + void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageToColorLocationNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageToColorLocationNV.html + template + void setCoverageToColorLocationNV( uint32_t coverageToColorLocation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageModulationModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationModeNV.html + template + void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageModulationTableEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableEnableNV.html + template + void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageModulationTableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableNV.html + template + void setCoverageModulationTableNV( uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetCoverageModulationTableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableNV.html + template + void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetShadingRateImageEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetShadingRateImageEnableNV.html + template + void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRepresentativeFragmentTestEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRepresentativeFragmentTestEnableNV.html + template + void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageReductionModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageReductionModeNV.html + template + void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_optical_flow === + + // wrapper function for command vkCmdOpticalFlowExecuteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdOpticalFlowExecuteNV.html + template + void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdOpticalFlowExecuteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdOpticalFlowExecuteNV.html + template + void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance5 === + + // wrapper function for command vkCmdBindIndexBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer2KHR.html + template + void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_object === + + // wrapper function for command vkCmdBindShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadersEXT.html + template + void bindShadersEXT( uint32_t stageCount, + const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadersEXT.html + template + void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampRangeEXT.html + template + void setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampRangeEXT.html + template + void setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_vector === + + // wrapper function for command vkCmdConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdConvertCooperativeVectorMatrixNV.html + template + void convertCooperativeVectorMatrixNV( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV * pInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdConvertCooperativeVectorMatrixNV.html + template + void convertCooperativeVectorMatrixNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + // wrapper function for command vkCmdSetAttachmentFeedbackLoopEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAttachmentFeedbackLoopEnableEXT.html + template + void setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_line_rasterization === + + // wrapper function for command vkCmdSetLineStippleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleKHR.html + template + void setLineStippleKHR( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance6 === + + // wrapper function for command vkCmdBindDescriptorSets2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2KHR.html + template + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorSets2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2KHR.html + template + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushConstants2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2KHR.html + template + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushConstants2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2KHR.html + template + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSet2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2KHR.html + template + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSet2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2KHR.html + template + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2KHR.html + template + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2KHR.html + template + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDescriptorBufferOffsets2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsets2EXT.html + template + void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDescriptorBufferOffsets2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsets2EXT.html + template + void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplers2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplers2EXT.html + template + void bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplers2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplers2EXT.html + template + void bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_QCOM_tile_memory_heap === + + // wrapper function for command vkCmdBindTileMemoryQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTileMemoryQCOM.html + template + void bindTileMemoryQCOM( const VULKAN_HPP_NAMESPACE::TileMemoryBindInfoQCOM * pTileMemoryBindInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindTileMemoryQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTileMemoryQCOM.html + template + void bindTileMemoryQCOM( Optional tileMemoryBindInfo VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cluster_acceleration_structure === + + // wrapper function for command vkCmdBuildClusterAccelerationStructureIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildClusterAccelerationStructureIndirectNV.html + template + void buildClusterAccelerationStructureIndirectNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV * pCommandInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildClusterAccelerationStructureIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildClusterAccelerationStructureIndirectNV.html + template + void buildClusterAccelerationStructureIndirectNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV & commandInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_partitioned_acceleration_structure === + + // wrapper function for command vkCmdBuildPartitionedAccelerationStructuresNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildPartitionedAccelerationStructuresNV.html + template + void buildPartitionedAccelerationStructuresNV( const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV * pBuildInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildPartitionedAccelerationStructuresNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildPartitionedAccelerationStructuresNV.html + template + void buildPartitionedAccelerationStructuresNV( const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV & buildInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_generated_commands === + + // wrapper function for command vkCmdPreprocessGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsEXT.html + template + void preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPreprocessGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsEXT.html + template + void preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdExecuteGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsEXT.html + template + void executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdExecuteGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsEXT.html + template + void executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_fragment_density_map_offset === + + // wrapper function for command vkCmdEndRendering2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering2EXT.html + template + void endRendering2EXT( const VULKAN_HPP_NAMESPACE::RenderingEndInfoEXT * pRenderingEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndRendering2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering2EXT.html + template + void endRendering2EXT( Optional renderingEndInfo VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == VK_NULL_HANDLE; + } + + private: + VkCommandBuffer m_commandBuffer = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDeviceMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemory.html + class DeviceMemory + { + public: + using CType = VkDeviceMemory; + using NativeType = VkDeviceMemory; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + + public: + DeviceMemory() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + DeviceMemory( DeviceMemory const & rhs ) = default; + DeviceMemory & operator=( DeviceMemory const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DeviceMemory( DeviceMemory && rhs ) = default; + DeviceMemory & operator=( DeviceMemory && rhs ) = default; +#else + DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT : m_deviceMemory( VULKAN_HPP_NAMESPACE::exchange( rhs.m_deviceMemory, {} ) ) {} + + DeviceMemory & operator=( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = VULKAN_HPP_NAMESPACE::exchange( rhs.m_deviceMemory, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT : m_deviceMemory( deviceMemory ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DeviceMemory & operator=( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = deviceMemory; + return *this; + } +#endif + + DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == VK_NULL_HANDLE; + } + + private: + VkDeviceMemory m_deviceMemory = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionKHR.html + class VideoSessionKHR + { + public: + using CType = VkVideoSessionKHR; + using NativeType = VkVideoSessionKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VideoSessionKHR() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + VideoSessionKHR( VideoSessionKHR const & rhs ) = default; + VideoSessionKHR & operator=( VideoSessionKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + VideoSessionKHR( VideoSessionKHR && rhs ) = default; + VideoSessionKHR & operator=( VideoSessionKHR && rhs ) = default; +#else + VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_videoSessionKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionKHR, {} ) ) {} + + VideoSessionKHR & operator=( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT : m_videoSessionKHR( videoSessionKHR ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + VideoSessionKHR & operator=( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = videoSessionKHR; + return *this; + } +#endif + + VideoSessionKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionKHR m_videoSessionKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDeferredOperationKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeferredOperationKHR.html + class DeferredOperationKHR + { + public: + using CType = VkDeferredOperationKHR; + using NativeType = VkDeferredOperationKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + DeferredOperationKHR() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + DeferredOperationKHR( DeferredOperationKHR const & rhs ) = default; + DeferredOperationKHR & operator=( DeferredOperationKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DeferredOperationKHR( DeferredOperationKHR && rhs ) = default; + DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) = default; +#else + DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_deferredOperationKHR, {} ) ) + { + } + + DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_deferredOperationKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( deferredOperationKHR ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DeferredOperationKHR & operator=( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = deferredOperationKHR; + return *this; + } +#endif + + DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == VK_NULL_HANDLE; + } + + private: + VkDeferredOperationKHR m_deferredOperationKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkBufferCollectionFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionFUCHSIA.html +#if defined( VK_USE_PLATFORM_FUCHSIA ) + class BufferCollectionFUCHSIA + { + public: + using CType = VkBufferCollectionFUCHSIA; + using NativeType = VkBufferCollectionFUCHSIA; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; + + public: + BufferCollectionFUCHSIA() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & rhs ) = default; + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & rhs ) = default; + +# if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) = default; + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) = default; +# else + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + : m_bufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferCollectionFUCHSIA, {} ) ) + { + } + + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + { + m_bufferCollectionFUCHSIA = VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferCollectionFUCHSIA, {} ); + return *this; + } +# endif + + VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT BufferCollectionFUCHSIA( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT + : m_bufferCollectionFUCHSIA( bufferCollectionFUCHSIA ) + { + } + +# if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + BufferCollectionFUCHSIA & operator=( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT + { + m_bufferCollectionFUCHSIA = bufferCollectionFUCHSIA; + return *this; + } +# endif + + BufferCollectionFUCHSIA & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_bufferCollectionFUCHSIA = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA == VK_NULL_HANDLE; + } + + private: + VkBufferCollectionFUCHSIA m_bufferCollectionFUCHSIA = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + }; + +# if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + }; +# endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + // wrapper class for handle VkBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferView.html + class BufferView + { + public: + using CType = VkBufferView; + using NativeType = VkBufferView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + + public: + BufferView() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + BufferView( BufferView const & rhs ) = default; + BufferView & operator=( BufferView const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + BufferView( BufferView && rhs ) = default; + BufferView & operator=( BufferView && rhs ) = default; +#else + BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT : m_bufferView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferView, {} ) ) {} + + BufferView & operator=( BufferView && rhs ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferView, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + BufferView & operator=( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = bufferView; + return *this; + } +#endif + + BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == VK_NULL_HANDLE; + } + + private: + VkBufferView m_bufferView = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPool.html + class CommandPool + { + public: + using CType = VkCommandPool; + using NativeType = VkCommandPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + + public: + CommandPool() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + CommandPool( CommandPool const & rhs ) = default; + CommandPool & operator=( CommandPool const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CommandPool( CommandPool && rhs ) = default; + CommandPool & operator=( CommandPool && rhs ) = default; +#else + CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT : m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) {} + + CommandPool & operator=( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT : m_commandPool( commandPool ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + CommandPool & operator=( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = commandPool; + return *this; + } +#endif + + CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == VK_NULL_HANDLE; + } + + private: + VkCommandPool m_commandPool = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCache.html + class PipelineCache + { + public: + using CType = VkPipelineCache; + using NativeType = VkPipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + + public: + PipelineCache() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + PipelineCache( PipelineCache const & rhs ) = default; + PipelineCache & operator=( PipelineCache const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PipelineCache( PipelineCache && rhs ) = default; + PipelineCache & operator=( PipelineCache && rhs ) = default; +#else + PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT : m_pipelineCache( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ) {} + + PipelineCache & operator=( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineCache, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT : m_pipelineCache( pipelineCache ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PipelineCache & operator=( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = pipelineCache; + return *this; + } +#endif + + PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == VK_NULL_HANDLE; + } + + private: + VkPipelineCache m_pipelineCache = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuFunctionNVX.html + class CuFunctionNVX + { + public: + using CType = VkCuFunctionNVX; + using NativeType = VkCuFunctionNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + public: + CuFunctionNVX() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + CuFunctionNVX( CuFunctionNVX const & rhs ) = default; + CuFunctionNVX & operator=( CuFunctionNVX const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CuFunctionNVX( CuFunctionNVX && rhs ) = default; + CuFunctionNVX & operator=( CuFunctionNVX && rhs ) = default; +#else + CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT : m_cuFunctionNVX( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuFunctionNVX, {} ) ) {} + + CuFunctionNVX & operator=( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuFunctionNVX, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT : m_cuFunctionNVX( cuFunctionNVX ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + CuFunctionNVX & operator=( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = cuFunctionNVX; + return *this; + } +#endif + + CuFunctionNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuFunctionNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX == VK_NULL_HANDLE; + } + + private: + VkCuFunctionNVX m_cuFunctionNVX = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuModuleNVX.html + class CuModuleNVX + { + public: + using CType = VkCuModuleNVX; + using NativeType = VkCuModuleNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + + public: + CuModuleNVX() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + CuModuleNVX( CuModuleNVX const & rhs ) = default; + CuModuleNVX & operator=( CuModuleNVX const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CuModuleNVX( CuModuleNVX && rhs ) = default; + CuModuleNVX & operator=( CuModuleNVX && rhs ) = default; +#else + CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT : m_cuModuleNVX( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuModuleNVX, {} ) ) {} + + CuModuleNVX & operator=( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuModuleNVX, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT : m_cuModuleNVX( cuModuleNVX ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + CuModuleNVX & operator=( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = cuModuleNVX; + return *this; + } +#endif + + CuModuleNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuModuleNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX == VK_NULL_HANDLE; + } + + private: + VkCuModuleNVX m_cuModuleNVX = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaFunctionNV.html +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaFunctionNV + { + public: + using CType = VkCudaFunctionNV; + using NativeType = VkCudaFunctionNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV; + + public: + CudaFunctionNV() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + CudaFunctionNV( CudaFunctionNV const & rhs ) = default; + CudaFunctionNV & operator=( CudaFunctionNV const & rhs ) = default; + +# if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CudaFunctionNV( CudaFunctionNV && rhs ) = default; + CudaFunctionNV & operator=( CudaFunctionNV && rhs ) = default; +# else + CudaFunctionNV( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT : m_cudaFunctionNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaFunctionNV, {} ) ) {} + + CudaFunctionNV & operator=( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cudaFunctionNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaFunctionNV, {} ); + return *this; + } +# endif + + VULKAN_HPP_CONSTEXPR CudaFunctionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CudaFunctionNV( VkCudaFunctionNV cudaFunctionNV ) VULKAN_HPP_NOEXCEPT : m_cudaFunctionNV( cudaFunctionNV ) {} + +# if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + CudaFunctionNV & operator=( VkCudaFunctionNV cudaFunctionNV ) VULKAN_HPP_NOEXCEPT + { + m_cudaFunctionNV = cudaFunctionNV; + return *this; + } +# endif + + CudaFunctionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cudaFunctionNV = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaFunctionNV() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV == VK_NULL_HANDLE; + } + + private: + VkCudaFunctionNV m_cudaFunctionNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV; + }; + +# if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV; + }; +# endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper class for handle VkCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaModuleNV.html +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaModuleNV + { + public: + using CType = VkCudaModuleNV; + using NativeType = VkCudaModuleNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV; + + public: + CudaModuleNV() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + CudaModuleNV( CudaModuleNV const & rhs ) = default; + CudaModuleNV & operator=( CudaModuleNV const & rhs ) = default; + +# if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CudaModuleNV( CudaModuleNV && rhs ) = default; + CudaModuleNV & operator=( CudaModuleNV && rhs ) = default; +# else + CudaModuleNV( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT : m_cudaModuleNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaModuleNV, {} ) ) {} + + CudaModuleNV & operator=( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cudaModuleNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaModuleNV, {} ); + return *this; + } +# endif + + VULKAN_HPP_CONSTEXPR CudaModuleNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CudaModuleNV( VkCudaModuleNV cudaModuleNV ) VULKAN_HPP_NOEXCEPT : m_cudaModuleNV( cudaModuleNV ) {} + +# if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + CudaModuleNV & operator=( VkCudaModuleNV cudaModuleNV ) VULKAN_HPP_NOEXCEPT + { + m_cudaModuleNV = cudaModuleNV; + return *this; + } +# endif + + CudaModuleNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cudaModuleNV = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaModuleNV() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV == VK_NULL_HANDLE; + } + + private: + VkCudaModuleNV m_cudaModuleNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV; + }; + +# if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV; + }; +# endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper class for handle VkDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPool.html + class DescriptorPool + { + public: + using CType = VkDescriptorPool; + using NativeType = VkDescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + + public: + DescriptorPool() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + DescriptorPool( DescriptorPool const & rhs ) = default; + DescriptorPool & operator=( DescriptorPool const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorPool( DescriptorPool && rhs ) = default; + DescriptorPool & operator=( DescriptorPool && rhs ) = default; +#else + DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT : m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) {} + + DescriptorPool & operator=( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT : m_descriptorPool( descriptorPool ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DescriptorPool & operator=( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = descriptorPool; + return *this; + } +#endif + + DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == VK_NULL_HANDLE; + } + + private: + VkDescriptorPool m_descriptorPool = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayout.html + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + using NativeType = VkDescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + + public: + DescriptorSetLayout() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + DescriptorSetLayout( DescriptorSetLayout const & rhs ) = default; + DescriptorSetLayout & operator=( DescriptorSetLayout const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorSetLayout( DescriptorSetLayout && rhs ) = default; + DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) = default; +#else + DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) ) + { + } + + DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( descriptorSetLayout ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DescriptorSetLayout & operator=( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = descriptorSetLayout; + return *this; + } +#endif + + DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == VK_NULL_HANDLE; + } + + private: + VkDescriptorSetLayout m_descriptorSetLayout = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkExternalComputeQueueNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueNV.html + class ExternalComputeQueueNV + { + public: + using CType = VkExternalComputeQueueNV; + using NativeType = VkExternalComputeQueueNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eExternalComputeQueueNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + ExternalComputeQueueNV() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + ExternalComputeQueueNV( ExternalComputeQueueNV const & rhs ) = default; + ExternalComputeQueueNV & operator=( ExternalComputeQueueNV const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ExternalComputeQueueNV( ExternalComputeQueueNV && rhs ) = default; + ExternalComputeQueueNV & operator=( ExternalComputeQueueNV && rhs ) = default; +#else + ExternalComputeQueueNV( ExternalComputeQueueNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_externalComputeQueueNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_externalComputeQueueNV, {} ) ) + { + } + + ExternalComputeQueueNV & operator=( ExternalComputeQueueNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_externalComputeQueueNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_externalComputeQueueNV, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR ExternalComputeQueueNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + ExternalComputeQueueNV( VkExternalComputeQueueNV externalComputeQueueNV ) VULKAN_HPP_NOEXCEPT : m_externalComputeQueueNV( externalComputeQueueNV ) {} + + ExternalComputeQueueNV & operator=( VkExternalComputeQueueNV externalComputeQueueNV ) VULKAN_HPP_NOEXCEPT + { + m_externalComputeQueueNV = externalComputeQueueNV; + return *this; + } + + ExternalComputeQueueNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_externalComputeQueueNV = {}; + return *this; + } + + //=== VK_NV_external_compute_queue === + + // wrapper function for command vkGetExternalComputeQueueDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExternalComputeQueueDataNV.html + template + void getData( VULKAN_HPP_NAMESPACE::ExternalComputeQueueDataParamsNV * params, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetExternalComputeQueueDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExternalComputeQueueDataNV.html + template + VULKAN_HPP_NODISCARD std::pair + getData( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + operator VkExternalComputeQueueNV() const VULKAN_HPP_NOEXCEPT + { + return m_externalComputeQueueNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_externalComputeQueueNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_externalComputeQueueNV == VK_NULL_HANDLE; + } + + private: + VkExternalComputeQueueNV m_externalComputeQueueNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebuffer.html + class Framebuffer + { + public: + using CType = VkFramebuffer; + using NativeType = VkFramebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + + public: + Framebuffer() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Framebuffer( Framebuffer const & rhs ) = default; + Framebuffer & operator=( Framebuffer const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Framebuffer( Framebuffer && rhs ) = default; + Framebuffer & operator=( Framebuffer && rhs ) = default; +#else + Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT : m_framebuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ) {} + + Framebuffer & operator=( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = VULKAN_HPP_NAMESPACE::exchange( rhs.m_framebuffer, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT : m_framebuffer( framebuffer ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Framebuffer & operator=( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = framebuffer; + return *this; + } +#endif + + Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == VK_NULL_HANDLE; + } + + private: + VkFramebuffer m_framebuffer = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkIndirectCommandsLayoutEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutEXT.html + class IndirectCommandsLayoutEXT + { + public: + using CType = VkIndirectCommandsLayoutEXT; + using NativeType = VkIndirectCommandsLayoutEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + IndirectCommandsLayoutEXT() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT const & rhs ) = default; + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT && rhs ) = default; + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT && rhs ) = default; +#else + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutEXT, {} ) ) + { + } + + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutEXT( VkIndirectCommandsLayoutEXT indirectCommandsLayoutEXT ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutEXT( indirectCommandsLayoutEXT ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + IndirectCommandsLayoutEXT & operator=( VkIndirectCommandsLayoutEXT indirectCommandsLayoutEXT ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutEXT = indirectCommandsLayoutEXT; + return *this; + } +#endif + + IndirectCommandsLayoutEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutEXT = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutEXT m_indirectCommandsLayoutEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkIndirectCommandsLayoutNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutNV.html + class IndirectCommandsLayoutNV + { + public: + using CType = VkIndirectCommandsLayoutNV; + using NativeType = VkIndirectCommandsLayoutNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & rhs ) = default; + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) = default; + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) = default; +#else + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutNV, {} ) ) + { + } + + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutNV, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + IndirectCommandsLayoutNV & operator=( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; + return *this; + } +#endif + + IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkIndirectExecutionSetEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetEXT.html + class IndirectExecutionSetEXT + { + public: + using CType = VkIndirectExecutionSetEXT; + using NativeType = VkIndirectExecutionSetEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + IndirectExecutionSetEXT() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + IndirectExecutionSetEXT( IndirectExecutionSetEXT const & rhs ) = default; + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + IndirectExecutionSetEXT( IndirectExecutionSetEXT && rhs ) = default; + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT && rhs ) = default; +#else + IndirectExecutionSetEXT( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_indirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectExecutionSetEXT, {} ) ) + { + } + + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_indirectExecutionSetEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectExecutionSetEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR IndirectExecutionSetEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectExecutionSetEXT( VkIndirectExecutionSetEXT indirectExecutionSetEXT ) VULKAN_HPP_NOEXCEPT + : m_indirectExecutionSetEXT( indirectExecutionSetEXT ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + IndirectExecutionSetEXT & operator=( VkIndirectExecutionSetEXT indirectExecutionSetEXT ) VULKAN_HPP_NOEXCEPT + { + m_indirectExecutionSetEXT = indirectExecutionSetEXT; + return *this; + } +#endif + + IndirectExecutionSetEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectExecutionSetEXT = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectExecutionSetEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT == VK_NULL_HANDLE; + } + + private: + VkIndirectExecutionSetEXT m_indirectExecutionSetEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPrivateDataSlot.html + class PrivateDataSlot + { + public: + using CType = VkPrivateDataSlot; + using NativeType = VkPrivateDataSlot; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PrivateDataSlot() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + PrivateDataSlot( PrivateDataSlot const & rhs ) = default; + PrivateDataSlot & operator=( PrivateDataSlot const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PrivateDataSlot( PrivateDataSlot && rhs ) = default; + PrivateDataSlot & operator=( PrivateDataSlot && rhs ) = default; +#else + PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT : m_privateDataSlot( VULKAN_HPP_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ) {} + + PrivateDataSlot & operator=( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = VULKAN_HPP_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR PrivateDataSlot( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlot( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT : m_privateDataSlot( privateDataSlot ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PrivateDataSlot & operator=( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = privateDataSlot; + return *this; + } +#endif + + PrivateDataSlot & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlot() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot == VK_NULL_HANDLE; + } + + private: + VkPrivateDataSlot m_privateDataSlot = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlot; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlot; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + using PrivateDataSlotEXT = PrivateDataSlot; + + // wrapper class for handle VkRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPass.html + class RenderPass + { + public: + using CType = VkRenderPass; + using NativeType = VkRenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + + public: + RenderPass() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + RenderPass( RenderPass const & rhs ) = default; + RenderPass & operator=( RenderPass const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + RenderPass( RenderPass && rhs ) = default; + RenderPass & operator=( RenderPass && rhs ) = default; +#else + RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT : m_renderPass( VULKAN_HPP_NAMESPACE::exchange( rhs.m_renderPass, {} ) ) {} + + RenderPass & operator=( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = VULKAN_HPP_NAMESPACE::exchange( rhs.m_renderPass, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + RenderPass & operator=( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = renderPass; + return *this; + } +#endif + + RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == VK_NULL_HANDLE; + } + + private: + VkRenderPass m_renderPass = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampler.html + class Sampler + { + public: + using CType = VkSampler; + using NativeType = VkSampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: + Sampler() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Sampler( Sampler const & rhs ) = default; + Sampler & operator=( Sampler const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Sampler( Sampler && rhs ) = default; + Sampler & operator=( Sampler && rhs ) = default; +#else + Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT : m_sampler( VULKAN_HPP_NAMESPACE::exchange( rhs.m_sampler, {} ) ) {} + + Sampler & operator=( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + { + m_sampler = VULKAN_HPP_NAMESPACE::exchange( rhs.m_sampler, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT : m_sampler( sampler ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Sampler & operator=( VkSampler sampler ) VULKAN_HPP_NOEXCEPT + { + m_sampler = sampler; + return *this; + } +#endif + + Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_sampler = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_sampler != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_sampler == VK_NULL_HANDLE; + } + + private: + VkSampler m_sampler = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkSamplerYcbcrConversion, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversion.html + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + using NativeType = VkSamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + + public: + SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + SamplerYcbcrConversion( SamplerYcbcrConversion const & rhs ) = default; + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) = default; + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) = default; +#else + SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( VULKAN_HPP_NAMESPACE::exchange( rhs.m_samplerYcbcrConversion, {} ) ) + { + } + + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = VULKAN_HPP_NAMESPACE::exchange( rhs.m_samplerYcbcrConversion, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( samplerYcbcrConversion ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + SamplerYcbcrConversion & operator=( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = samplerYcbcrConversion; + return *this; + } +#endif + + SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == VK_NULL_HANDLE; + } + + private: + VkSamplerYcbcrConversion m_samplerYcbcrConversion = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; + + // wrapper class for handle VkShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModule.html + class ShaderModule + { + public: + using CType = VkShaderModule; + using NativeType = VkShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + + public: + ShaderModule() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + ShaderModule( ShaderModule const & rhs ) = default; + ShaderModule & operator=( ShaderModule const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ShaderModule( ShaderModule && rhs ) = default; + ShaderModule & operator=( ShaderModule && rhs ) = default; +#else + ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT : m_shaderModule( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ) {} + + ShaderModule & operator=( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderModule, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT : m_shaderModule( shaderModule ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + ShaderModule & operator=( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = shaderModule; + return *this; + } +#endif + + ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == VK_NULL_HANDLE; + } + + private: + VkShaderModule m_shaderModule = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationCacheEXT.html + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + using NativeType = VkValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + public: + ValidationCacheEXT() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + ValidationCacheEXT( ValidationCacheEXT const & rhs ) = default; + ValidationCacheEXT & operator=( ValidationCacheEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ValidationCacheEXT( ValidationCacheEXT && rhs ) = default; + ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) = default; +#else + ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT : m_validationCacheEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_validationCacheEXT, {} ) ) + { + } + + ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_validationCacheEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT : m_validationCacheEXT( validationCacheEXT ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + ValidationCacheEXT & operator=( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = validationCacheEXT; + return *this; + } +#endif + + ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == VK_NULL_HANDLE; + } + + private: + VkValidationCacheEXT m_validationCacheEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkVideoSessionParametersKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionParametersKHR.html + class VideoSessionParametersKHR + { + public: + using CType = VkVideoSessionParametersKHR; + using NativeType = VkVideoSessionParametersKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VideoSessionParametersKHR() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + VideoSessionParametersKHR( VideoSessionParametersKHR const & rhs ) = default; + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) = default; + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) = default; +#else + VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_videoSessionParametersKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionParametersKHR, {} ) ) + { + } + + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionParametersKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + : m_videoSessionParametersKHR( videoSessionParametersKHR ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + VideoSessionParametersKHR & operator=( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = videoSessionParametersKHR; + return *this; + } +#endif + + VideoSessionParametersKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionParametersKHR m_videoSessionParametersKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryKHR.html + class PipelineBinaryKHR + { + public: + using CType = VkPipelineBinaryKHR; + using NativeType = VkPipelineBinaryKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PipelineBinaryKHR() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + PipelineBinaryKHR( PipelineBinaryKHR const & rhs ) = default; + PipelineBinaryKHR & operator=( PipelineBinaryKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PipelineBinaryKHR( PipelineBinaryKHR && rhs ) = default; + PipelineBinaryKHR & operator=( PipelineBinaryKHR && rhs ) = default; +#else + PipelineBinaryKHR( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_pipelineBinaryKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineBinaryKHR, {} ) ) {} + + PipelineBinaryKHR & operator=( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipelineBinaryKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineBinaryKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR PipelineBinaryKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineBinaryKHR( VkPipelineBinaryKHR pipelineBinaryKHR ) VULKAN_HPP_NOEXCEPT : m_pipelineBinaryKHR( pipelineBinaryKHR ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PipelineBinaryKHR & operator=( VkPipelineBinaryKHR pipelineBinaryKHR ) VULKAN_HPP_NOEXCEPT + { + m_pipelineBinaryKHR = pipelineBinaryKHR; + return *this; + } +#endif + + PipelineBinaryKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineBinaryKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineBinaryKHR() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR == VK_NULL_HANDLE; + } + + private: + VkPipelineBinaryKHR m_pipelineBinaryKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkQueue, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueue.html + class Queue + { + public: + using CType = VkQueue; + using NativeType = VkQueue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + + public: + Queue() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Queue( Queue const & rhs ) = default; + Queue & operator=( Queue const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Queue( Queue && rhs ) = default; + Queue & operator=( Queue && rhs ) = default; +#else + Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT : m_queue( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queue, {} ) ) {} + + Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT + { + m_queue = VULKAN_HPP_NAMESPACE::exchange( rhs.m_queue, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {} + + Queue & operator=( VkQueue queue ) VULKAN_HPP_NOEXCEPT + { + m_queue = queue; + return *this; + } + + Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queue = {}; + return *this; + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkQueueSubmit, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit.html + template + VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSubmit, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueWaitIdle.html + template + VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkQueueWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueWaitIdle.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkQueueBindSparse, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBindSparse.html + template + VULKAN_HPP_NODISCARD Result bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueBindSparse, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBindSparse.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkQueueSubmit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2.html + template + VULKAN_HPP_NODISCARD Result submit2( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSubmit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + + // wrapper function for command vkQueuePresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueuePresentKHR.html + template + VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueuePresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueuePresentKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkQueueBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBeginDebugUtilsLabelEXT.html + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBeginDebugUtilsLabelEXT.html + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueEndDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueEndDebugUtilsLabelEXT.html + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkQueueInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueInsertDebugUtilsLabelEXT.html + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueInsertDebugUtilsLabelEXT.html + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_diagnostic_checkpoints === + + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + template + void getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + template < + typename CheckpointDataNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + template < + typename CheckpointDataNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetQueueCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + template + void getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetQueueCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + template < + typename CheckpointData2NVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetQueueCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + template < + typename CheckpointData2NVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSetPerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSetPerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkQueueSetPerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSetPerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_synchronization2 === + + // wrapper function for command vkQueueSubmit2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2KHR.html + template + VULKAN_HPP_NODISCARD Result submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSubmit2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2KHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_low_latency2 === + + // wrapper function for command vkQueueNotifyOutOfBandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueNotifyOutOfBandNV.html + template + void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueNotifyOutOfBandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueNotifyOutOfBandNV.html + template + void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + operator VkQueue() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queue != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queue == VK_NULL_HANDLE; + } + + private: + VkQueue m_queue = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDevice.html + class Device + { + public: + using CType = VkDevice; + using NativeType = VkDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + + public: + Device() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Device( Device const & rhs ) = default; + Device & operator=( Device const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Device( Device && rhs ) = default; + Device & operator=( Device && rhs ) = default; +#else + Device( Device && rhs ) VULKAN_HPP_NOEXCEPT : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) {} + + Device & operator=( Device && rhs ) VULKAN_HPP_NOEXCEPT + { + m_device = VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {} + + Device & operator=( VkDevice device ) VULKAN_HPP_NOEXCEPT + { + m_device = device; + return *this; + } + + Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_device = {}; + return *this; + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkGetDeviceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceProcAddr.html + template + PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceProcAddr.html + template + VULKAN_HPP_NAMESPACE::PFN_VoidFunction getProcAddr( const std::string & name, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDevice.html + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDevice.html + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceQueue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue.html + template + void getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceQueue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue + getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDeviceWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeviceWaitIdle.html + template + VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkDeviceWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeviceWaitIdle.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + template + VULKAN_HPP_NODISCARD Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template + void( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template + void( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkMapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory.html + template + VULKAN_HPP_NODISCARD Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnmapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory.html + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkFlushMappedMemoryRanges, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFlushMappedMemoryRanges.html + template + VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFlushMappedMemoryRanges, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFlushMappedMemoryRanges.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkInvalidateMappedMemoryRanges, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInvalidateMappedMemoryRanges.html + template + VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkInvalidateMappedMemoryRanges, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInvalidateMappedMemoryRanges.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMemoryCommitment, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryCommitment.html + template + void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMemoryCommitment, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryCommitment.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindBufferMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory.html + template + VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkBindBufferMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindImageMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory.html + template + VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkBindImageMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements.html + template + void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements + getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements.html + template + void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements + getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + template + void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + template + VULKAN_HPP_NODISCARD Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkResetFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetFences.html + template + VULKAN_HPP_NODISCARD Result resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetFences.html + template + typename ResultValueType::type resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFenceStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceStatus.html + template + VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkGetFenceStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceStatus.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkWaitForFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForFences.html + template + VULKAN_HPP_NODISCARD Result waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitForFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForFences.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + template + VULKAN_HPP_NODISCARD Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + template + VULKAN_HPP_NODISCARD Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetEventStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEventStatus.html + template + VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkGetEventStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEventStatus.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetEvent.html + template + VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetEvent.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetEvent.html + template + VULKAN_HPP_NODISCARD Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetEvent.html + template + typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + template + VULKAN_HPP_NODISCARD Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template + VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template + VULKAN_HPP_NODISCARD ResultValue getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + template + VULKAN_HPP_NODISCARD Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + template + VULKAN_HPP_NODISCARD Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + template + VULKAN_HPP_NODISCARD Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout.html + template + void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout + getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + template + VULKAN_HPP_NODISCARD Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + template + VULKAN_HPP_NODISCARD Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + template + VULKAN_HPP_NODISCARD Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + template + VULKAN_HPP_NODISCARD Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkMergePipelineCaches, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergePipelineCaches.html + template + VULKAN_HPP_NODISCARD Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMergePipelineCaches, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergePipelineCaches.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template + VULKAN_HPP_NODISCARD Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template + VULKAN_HPP_NODISCARD ResultValue + createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template + VULKAN_HPP_NODISCARD Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template + VULKAN_HPP_NODISCARD ResultValue + createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + template + VULKAN_HPP_NODISCARD Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + template + VULKAN_HPP_NODISCARD Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + template + VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorSetLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorSetLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorSetLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorSetLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + template + VULKAN_HPP_NODISCARD Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetDescriptorPool.html + template + Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkResetDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetDescriptorPool.html + template + void resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template + VULKAN_HPP_NODISCARD Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename DescriptorSetAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename DescriptorSetAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template + Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template + void freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template + Result( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template + void( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSets.html + template + void updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSets.html + template + void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + template + VULKAN_HPP_NODISCARD Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + template + VULKAN_HPP_NODISCARD Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRenderAreaGranularity, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderAreaGranularity.html + template + void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRenderAreaGranularity, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderAreaGranularity.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + template + VULKAN_HPP_NODISCARD Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandPool.html + template + VULKAN_HPP_NODISCARD Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkResetCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandPool.html + template + typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template + VULKAN_HPP_NODISCARD Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename CommandBufferAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename CommandBufferAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template + void( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template + void( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkBindBufferMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2.html + template + VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindBufferMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindImageMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2.html + template + VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindImageMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceGroupPeerMemoryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeatures.html + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupPeerMemoryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeatures.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template + void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template + void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + template + void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkTrimCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTrimCommandPool.html + template + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceQueue2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue2.html + template + void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceQueue2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplate.html + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplate.html + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template + void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_2 === + + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + template + VULKAN_HPP_NODISCARD Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkResetQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetQueryPool.html + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetSemaphoreCounterValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValue.html + template + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreCounterValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValue.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWaitSemaphores, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphores.html + template + VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitSemaphores, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphores.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSignalSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphore.html + template + VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSignalSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphore.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferDeviceAddress, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddress.html + template + DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferDeviceAddress, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddress.html + template + VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddress.html + template + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddress.html + template + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddress.html + template + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddress.html + template + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + template + VULKAN_HPP_NODISCARD Result createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template + void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template + void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateData.html + template + VULKAN_HPP_NODISCARD Result setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkSetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateData.html + template + typename ResultValueType::type setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateData.html + template + void getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateData.html + template + VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template + void getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template + void getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + template + void getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_4 === + + // wrapper function for command vkMapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2.html + template + VULKAN_HPP_NODISCARD Result mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnmapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2.html + template + VULKAN_HPP_NODISCARD Result unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUnmapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2.html + template + typename ResultValueType::type unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRenderingAreaGranularity, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularity.html + template + void getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRenderingAreaGranularity, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularity.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template + void getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template + void getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetImageSubresourceLayout2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMemoryToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImage.html + template + VULKAN_HPP_NODISCARD Result copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImage.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemory.html + template + VULKAN_HPP_NODISCARD Result copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemory.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImage.html + template + VULKAN_HPP_NODISCARD Result copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImage.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkTransitionImageLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayout.html + template + VULKAN_HPP_NODISCARD Result transitionImageLayout( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkTransitionImageLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayout.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + template + VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + template + VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkAcquireNextImageKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImageKHR.html + template + VULKAN_HPP_NODISCARD Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireNextImageKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImageKHR.html + template + VULKAN_HPP_NODISCARD ResultValue acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceGroupPresentCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupPresentCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceGroupSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkAcquireNextImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImage2KHR.html + template + VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireNextImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImage2KHR.html + template + VULKAN_HPP_NODISCARD ResultValue acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display_swapchain === + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template + VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename SwapchainKHRAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename SwapchainKHRAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_marker === + + // wrapper function for command vkDebugMarkerSetObjectTagEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectTagEXT.html + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDebugMarkerSetObjectTagEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectTagEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDebugMarkerSetObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectNameEXT.html + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDebugMarkerSetObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectNameEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_queue === + + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + template + VULKAN_HPP_NODISCARD Result createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template + void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template + void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindVideoSessionMemoryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindVideoSessionMemoryKHR.html + template + VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindVideoSessionMemoryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindVideoSessionMemoryKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template + void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template + void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NVX_binary_import === + + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + template + VULKAN_HPP_NODISCARD Result createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + template + VULKAN_HPP_NODISCARD Result createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template + void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template + void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NVX_image_view_handle === + + // wrapper function for command vkGetImageViewHandleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandleNVX.html + template + uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewHandleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandleNVX.html + template + uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageViewHandle64NVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandle64NVX.html + template + uint64_t getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewHandle64NVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandle64NVX.html + template + uint64_t getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageViewAddressNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewAddressNVX.html + template + VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewAddressNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewAddressNVX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_shader_info === + + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + template + VULKAN_HPP_NODISCARD Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + // wrapper function for command vkGetMemoryWin32HandleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleNV.html + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryWin32HandleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + + // wrapper function for command vkGetDeviceGroupPeerMemoryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeaturesKHR.html + template + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupPeerMemoryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeaturesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance1 === + + // wrapper function for command vkTrimCommandPoolKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTrimCommandPoolKHR.html + template + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + // wrapper function for command vkGetMemoryWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryWin32HandlePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandlePropertiesKHR.html + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryWin32HandlePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandlePropertiesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + // wrapper function for command vkGetMemoryFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdKHR.html + template + VULKAN_HPP_NODISCARD Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdPropertiesKHR.html + template + VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdPropertiesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryFdPropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + // wrapper function for command vkImportSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + // wrapper function for command vkImportSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreFdKHR.html + template + VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreFdKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreFdKHR.html + template + VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreFdKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_descriptor_update_template === + + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplateKHR.html + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplateKHR.html + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplateKHR.html + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplateKHR.html + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_display_control === + + // wrapper function for command vkDisplayPowerControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDisplayPowerControlEXT.html + template + VULKAN_HPP_NODISCARD Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDisplayPowerControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDisplayPowerControlEXT.html + template + typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + template + VULKAN_HPP_NODISCARD Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + template + VULKAN_HPP_NODISCARD Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSwapchainCounterEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainCounterEXT.html + template + VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSwapchainCounterEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainCounterEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_GOOGLE_display_timing === + + // wrapper function for command vkGetRefreshCycleDurationGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRefreshCycleDurationGOOGLE.html + template + VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRefreshCycleDurationGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRefreshCycleDurationGOOGLE.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + template + VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_hdr_metadata === + + // wrapper function for command vkSetHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetHdrMetadataEXT.html + template + void setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetHdrMetadataEXT.html + template + void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_create_renderpass2 === + + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + template + VULKAN_HPP_NODISCARD Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_shared_presentable_image === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSwapchainStatusKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainStatusKHR.html + template + VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkGetSwapchainStatusKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainStatusKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + // wrapper function for command vkImportFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceWin32HandleKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + // wrapper function for command vkImportFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceFdKHR.html + template + VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceFdKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceFdKHR.html + template + VULKAN_HPP_NODISCARD Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceFdKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_performance_query === + + // wrapper function for command vkAcquireProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireProfilingLockKHR.html + template + VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireProfilingLockKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkReleaseProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseProfilingLockKHR.html + template + void releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkSetDebugUtilsObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectNameEXT.html + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetDebugUtilsObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectNameEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetDebugUtilsObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectTagEXT.html + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetDebugUtilsObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectTagEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template + VULKAN_HPP_NODISCARD Result + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryAndroidHardwareBufferANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryAndroidHardwareBufferANDROID.html + template + VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryAndroidHardwareBufferANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryAndroidHardwareBufferANDROID.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template + VULKAN_HPP_NODISCARD Result createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template + VULKAN_HPP_NODISCARD ResultValue + createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template + VULKAN_HPP_NODISCARD ResultValue> createExecutionGraphPipelineAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetExecutionGraphPipelineScratchSizeAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineScratchSizeAMDX.html + template + VULKAN_HPP_NODISCARD Result + getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetExecutionGraphPipelineScratchSizeAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineScratchSizeAMDX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetExecutionGraphPipelineNodeIndexAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineNodeIndexAMDX.html + template + VULKAN_HPP_NODISCARD Result getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetExecutionGraphPipelineNodeIndexAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineNodeIndexAMDX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_get_memory_requirements2 === + + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template + void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template + void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + template + void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_acceleration_structure === + + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildAccelerationStructuresKHR.html + template + VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildAccelerationStructuresKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureToMemoryKHR.html + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureToMemoryKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToAccelerationStructureKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template + VULKAN_HPP_NODISCARD Result + writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureDeviceAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureDeviceAddressKHR.html + template + DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureDeviceAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureDeviceAddressKHR.html + template + VULKAN_HPP_NAMESPACE::DeviceAddress + getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceAccelerationStructureCompatibilityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceAccelerationStructureCompatibilityKHR.html + template + void getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceAccelerationStructureCompatibilityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceAccelerationStructureCompatibilityKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureBuildSizesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureBuildSizesKHR.html + template + void getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureBuildSizesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureBuildSizesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_pipeline === + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template + VULKAN_HPP_NODISCARD ResultValue + createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingShaderGroupHandleKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD Result + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandleKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingShaderGroupStackSizeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupStackSizeKHR.html + template + DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_sampler_ycbcr_conversion === + + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversionKHR.html + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversionKHR.html + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_bind_memory2 === + + // wrapper function for command vkBindBufferMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2KHR.html + template + VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindBufferMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2KHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindImageMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2KHR.html + template + VULKAN_HPP_NODISCARD Result bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindImageMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2KHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_image_drm_format_modifier === + + // wrapper function for command vkGetImageDrmFormatModifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html + template + VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageDrmFormatModifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_validation_cache === + + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + template + VULKAN_HPP_NODISCARD Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkMergeValidationCachesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergeValidationCachesEXT.html + template + VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMergeValidationCachesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergeValidationCachesEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetValidationCacheDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + template + VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetValidationCacheDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetValidationCacheDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_ray_tracing === + + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template + void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindAccelerationStructureMemoryNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindAccelerationStructureMemoryNV.html + template + VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindAccelerationStructureMemoryNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindAccelerationStructureMemoryNV.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindAccelerationStructureMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template + VULKAN_HPP_NODISCARD ResultValue + createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingShaderGroupHandleNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template + VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getAccelerationStructureHandleNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCompileDeferredNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCompileDeferredNV.html + template + VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkCompileDeferredNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCompileDeferredNV.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance3 === + + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template + void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_external_memory_host === + + // wrapper function for command vkGetMemoryHostPointerPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryHostPointerPropertiesEXT.html + template + VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryHostPointerPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryHostPointerPropertiesEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_calibrated_timestamps === + + // wrapper function for command vkGetCalibratedTimestampsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template + VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetCalibratedTimestampsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetCalibratedTimestampsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetCalibratedTimestampsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_timeline_semaphore === + + // wrapper function for command vkGetSemaphoreCounterValueKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValueKHR.html + template + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreCounterValueKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValueKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWaitSemaphoresKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphoresKHR.html + template + VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitSemaphoresKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphoresKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSignalSemaphoreKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphoreKHR.html + template + VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSignalSemaphoreKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphoreKHR.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === + + // wrapper function for command vkInitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInitializePerformanceApiINTEL.html + template + VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkInitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInitializePerformanceApiINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUninitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUninitializePerformanceApiINTEL.html + template + void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD Result release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetPerformanceParameterINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPerformanceParameterINTEL.html + template + VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPerformanceParameterINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPerformanceParameterINTEL.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_display_native_hdr === + + // wrapper function for command vkSetLocalDimmingAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLocalDimmingAMD.html + template + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_buffer_device_address === + + // wrapper function for command vkGetBufferDeviceAddressEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressEXT.html + template + DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferDeviceAddressEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressEXT.html + template + VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_present_wait === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitForPresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresentKHR.html + template + VULKAN_HPP_NODISCARD Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkWaitForPresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresentKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireFullScreenExclusiveModeEXT.html + template + VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + // wrapper function for command vkAcquireFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireFullScreenExclusiveModeEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseFullScreenExclusiveModeEXT.html + template + VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + // wrapper function for command vkReleaseFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseFullScreenExclusiveModeEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetDeviceGroupSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModes2EXT.html + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModes2EXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + + // wrapper function for command vkGetBufferDeviceAddressKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressKHR.html + template + DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferDeviceAddressKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressKHR.html + template + VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddressKHR.html + template + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddressKHR.html + template + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddressKHR.html + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddressKHR.html + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_host_query_reset === + + // wrapper function for command vkResetQueryPoolEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetQueryPoolEXT.html + template + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_deferred_host_operations === + + // wrapper function for command vkCreateDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + template + VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDeferredOperationKHR( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDeferredOperationKHRUnique( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeferredOperationMaxConcurrencyKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationMaxConcurrencyKHR.html + template + uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeferredOperationResultKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationResultKHR.html + template + VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkGetDeferredOperationResultKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationResultKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDeferredOperationJoinKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeferredOperationJoinKHR.html + template + VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkDeferredOperationJoinKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeferredOperationJoinKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_pipeline_executable_properties === + + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + template + VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + template + VULKAN_HPP_NODISCARD Result + getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_host_image_copy === + + // wrapper function for command vkCopyMemoryToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImageEXT.html + template + VULKAN_HPP_NODISCARD Result copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImageEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemoryEXT.html + template + VULKAN_HPP_NODISCARD Result copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemoryEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImageEXT.html + template + VULKAN_HPP_NODISCARD Result copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImageEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkTransitionImageLayoutEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayoutEXT.html + template + VULKAN_HPP_NODISCARD Result transitionImageLayoutEXT( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkTransitionImageLayoutEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayoutEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template + void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_map_memory2 === + + // wrapper function for command vkMapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2KHR.html + template + VULKAN_HPP_NODISCARD Result mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2KHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnmapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2KHR.html + template + VULKAN_HPP_NODISCARD Result unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUnmapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2KHR.html + template + typename ResultValueType::type unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_swapchain_maintenance1 === + + // wrapper function for command vkReleaseSwapchainImagesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseSwapchainImagesEXT.html + template + VULKAN_HPP_NODISCARD Result releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseSwapchainImagesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseSwapchainImagesEXT.html + template + typename ResultValueType::type releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_generated_commands === + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template + void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + template + VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_private_data === + + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + template + VULKAN_HPP_NODISCARD Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlotEXT.html + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlotEXT.html + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateDataEXT.html + template + VULKAN_HPP_NODISCARD Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkSetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateDataEXT.html + template + typename ResultValueType::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateDataEXT.html + template + void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateDataEXT.html + template + VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD Result + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + template + VULKAN_HPP_NODISCARD Result createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + template + VULKAN_HPP_NODISCARD Result getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + size_t * pCacheSize, + void * pCacheData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getCudaModuleCacheNV( + VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + template + VULKAN_HPP_NODISCARD Result createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template + void destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template + void destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template + void destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template + void destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + template + void exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT + exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_descriptor_buffer === + + // wrapper function for command vkGetDescriptorSetLayoutSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSizeEXT.html + template + void getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSizeEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorSetLayoutBindingOffsetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutBindingOffsetEXT.html + template + void getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + uint32_t binding, + VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutBindingOffsetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutBindingOffsetEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + uint32_t binding, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template + void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template + void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template + VULKAN_HPP_NODISCARD DescriptorType getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD Result + getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD Result getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageViewOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD Result + getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSamplerOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSamplerOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD Result + getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSamplerOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSamplerOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD Result + getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_fault === + + // wrapper function for command vkGetDeviceFaultInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceFaultInfoEXT.html + template + VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + // wrapper function for command vkGetMemoryZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryZirconHandlePropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandlePropertiesFUCHSIA.html + template + VULKAN_HPP_NODISCARD Result + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryZirconHandlePropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandlePropertiesFUCHSIA.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + // wrapper function for command vkImportSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD Result + importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreZirconHandleFUCHSIA.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + template + VULKAN_HPP_NODISCARD Result createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetBufferCollectionImageConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionImageConstraintsFUCHSIA.html + template + VULKAN_HPP_NODISCARD Result + setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetBufferCollectionImageConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionImageConstraintsFUCHSIA.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetBufferCollectionBufferConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionBufferConstraintsFUCHSIA.html + template + VULKAN_HPP_NODISCARD Result + setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetBufferCollectionBufferConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionBufferConstraintsFUCHSIA.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template + void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template + void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template + void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template + void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferCollectionPropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferCollectionPropertiesFUCHSIA.html + template + VULKAN_HPP_NODISCARD Result getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferCollectionPropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferCollectionPropertiesFUCHSIA.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + // wrapper function for command vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI.html + template + VULKAN_HPP_NODISCARD Result + getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, + VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_external_memory_rdma === + + // wrapper function for command vkGetMemoryRemoteAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryRemoteAddressNV.html + template + VULKAN_HPP_NODISCARD Result getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryRemoteAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryRemoteAddressNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_pipeline_properties === + + // wrapper function for command vkGetPipelinePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelinePropertiesEXT.html + template + VULKAN_HPP_NODISCARD Result getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, + VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelinePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelinePropertiesEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_opacity_micromap === + + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + template + VULKAN_HPP_NODISCARD Result createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template + void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template + void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildMicromapsEXT.html + template + VULKAN_HPP_NODISCARD Result buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildMicromapsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapEXT.html + template + VULKAN_HPP_NODISCARD Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapToMemoryEXT.html + template + VULKAN_HPP_NODISCARD Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapToMemoryEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToMicromapEXT.html + template + VULKAN_HPP_NODISCARD Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToMicromapEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template + VULKAN_HPP_NODISCARD Result writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMicromapCompatibilityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMicromapCompatibilityEXT.html + template + void getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMicromapCompatibilityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMicromapCompatibilityEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMicromapBuildSizesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMicromapBuildSizesEXT.html + template + void getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMicromapBuildSizesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMicromapBuildSizesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_pageable_device_local_memory === + + // wrapper function for command vkSetDeviceMemoryPriorityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDeviceMemoryPriorityEXT.html + template + void setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + float priority, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance4 === + + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template + void getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template + void getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + template + void getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VALVE_descriptor_set_host_mapping === + + // wrapper function for command vkGetDescriptorSetLayoutHostMappingInfoVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutHostMappingInfoVALVE.html + template + void getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutHostMappingInfoVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutHostMappingInfoVALVE.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE + getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorSetHostMappingVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetHostMappingVALVE.html + template + void getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetHostMappingVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetHostMappingVALVE.html + template + VULKAN_HPP_NODISCARD void * getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_generated_commands_compute === + + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template + void getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineIndirectDeviceAddressNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectDeviceAddressNV.html + template + DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineIndirectDeviceAddressNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectDeviceAddressNV.html + template + VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_module_identifier === + + // wrapper function for command vkGetShaderModuleIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleIdentifierEXT.html + template + void getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderModuleIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleIdentifierEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetShaderModuleCreateInfoIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleCreateInfoIdentifierEXT.html + template + void getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderModuleCreateInfoIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleCreateInfoIdentifierEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_optical_flow === + + // wrapper function for command vkCreateOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + template + VULKAN_HPP_NODISCARD Result createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template + void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template + void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindOpticalFlowSessionImageNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindOpticalFlowSessionImageNV.html + template + VULKAN_HPP_NODISCARD Result bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkBindOpticalFlowSessionImageNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindOpticalFlowSessionImageNV.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance5 === + + // wrapper function for command vkGetRenderingAreaGranularityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularityKHR.html + template + void getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRenderingAreaGranularityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularityKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template + void getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template + void getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_anti_lag === + + // wrapper function for command vkAntiLagUpdateAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAntiLagUpdateAMD.html + template + void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAntiLagUpdateAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAntiLagUpdateAMD.html + template + void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_object === + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template + VULKAN_HPP_NODISCARD Result createShadersEXT( uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template + VULKAN_HPP_NODISCARD ResultValue + createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, ShaderEXTAllocator>> + createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, ShaderEXTAllocator>> + createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template + VULKAN_HPP_NODISCARD ResultValue> + createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template + void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template + void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + template + VULKAN_HPP_NODISCARD Result getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderBinaryDataEXT( + VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_pipeline_binary === + + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template + VULKAN_HPP_NODISCARD Result createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template < + typename PipelineBinaryKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template < + typename PipelineBinaryKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template >, + typename std::enable_if< + std::is_same>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineBinaryKHRAllocator>> + createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template >, + typename std::enable_if< + std::is_same>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineBinaryKHRAllocator>> + createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template + void destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template + void destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineKeyKHR.html + template + VULKAN_HPP_NODISCARD Result getPipelineKeyKHR( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineKeyKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getPipelineKeyKHR( Optional pipelineCreateInfo VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + template + VULKAN_HPP_NODISCARD Result getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>>::type + getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>>::type + getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkReleaseCapturedPipelineDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseCapturedPipelineDataKHR.html + template + Result releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseCapturedPipelineDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseCapturedPipelineDataKHR.html + template + void releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_QCOM_tile_properties === + + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + template + VULKAN_HPP_NODISCARD Result getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + uint32_t * pPropertiesCount, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + template < + typename TilePropertiesQCOMAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + template < + typename TilePropertiesQCOMAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDynamicRenderingTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDynamicRenderingTilePropertiesQCOM.html + template + Result getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDynamicRenderingTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDynamicRenderingTilePropertiesQCOM.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_vector === + + // wrapper function for command vkConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkConvertCooperativeVectorMatrixNV.html + template + VULKAN_HPP_NODISCARD Result convertCooperativeVectorMatrixNV( const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkConvertCooperativeVectorMatrixNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result convertCooperativeVectorMatrixNV( const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_low_latency2 === + + // wrapper function for command vkSetLatencySleepModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencySleepModeNV.html + template + VULKAN_HPP_NODISCARD Result setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetLatencySleepModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencySleepModeNV.html + template + typename ResultValueType::type setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkLatencySleepNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkLatencySleepNV.html + template + Result latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkLatencySleepNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkLatencySleepNV.html + template + void latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetLatencyMarkerNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencyMarkerNV.html + template + void setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetLatencyMarkerNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencyMarkerNV.html + template + void setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + template + void getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + template < + typename LatencyTimingsFrameReportNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + template < + typename LatencyTimingsFrameReportNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template + VULKAN_HPP_NODISCARD Result getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + + // wrapper function for command vkGetCalibratedTimestampsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template + VULKAN_HPP_NODISCARD Result getCalibratedTimestampsKHR( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetCalibratedTimestampsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetCalibratedTimestampsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetCalibratedTimestampsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_external_compute_queue === + + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + template + VULKAN_HPP_NODISCARD Result createExternalComputeQueueNV( const VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV * pExternalQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createExternalComputeQueueNV( const VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createExternalComputeQueueNVUnique( const VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template + void destroyExternalComputeQueueNV( VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template + void destroyExternalComputeQueueNV( VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template + void destroy( VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cluster_acceleration_structure === + + // wrapper function for command vkGetClusterAccelerationStructureBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetClusterAccelerationStructureBuildSizesNV.html + template + void getClusterAccelerationStructureBuildSizesNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetClusterAccelerationStructureBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetClusterAccelerationStructureBuildSizesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + getClusterAccelerationStructureBuildSizesNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_partitioned_acceleration_structure === + + // wrapper function for command vkGetPartitionedAccelerationStructuresBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPartitionedAccelerationStructuresBuildSizesNV.html + template + void getPartitionedAccelerationStructuresBuildSizesNV( const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV * pInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPartitionedAccelerationStructuresBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPartitionedAccelerationStructuresBuildSizesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + getPartitionedAccelerationStructuresBuildSizesNV( const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_generated_commands === + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template + void getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + template + VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT * pIndirectCommandsLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIndirectCommandsLayoutEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template + void destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template + void destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + template + VULKAN_HPP_NODISCARD Result createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT * pIndirectExecutionSet, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIndirectExecutionSetEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template + void destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template + void destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateIndirectExecutionSetPipelineEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetPipelineEXT.html + template + void updateIndirectExecutionSetPipelineEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateIndirectExecutionSetPipelineEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetPipelineEXT.html + template + void updateIndirectExecutionSetPipelineEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateIndirectExecutionSetShaderEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetShaderEXT.html + template + void updateIndirectExecutionSetShaderEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT * pExecutionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateIndirectExecutionSetShaderEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetShaderEXT.html + template + void updateIndirectExecutionSetShaderEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + + // wrapper function for command vkGetMemoryMetalHandleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandleEXT.html + template + VULKAN_HPP_NODISCARD Result getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT * pGetMetalHandleInfo, + void ** pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryMetalHandleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandleEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT & getMetalHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryMetalHandlePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandlePropertiesEXT.html + template + VULKAN_HPP_NODISCARD Result getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHandle, + VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT * pMemoryMetalHandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryMetalHandlePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandlePropertiesEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HandleType const & handle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + operator VkDevice() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_device != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_device == VK_NULL_HANDLE; + } + + private: + VkDevice m_device = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeKHR.html + class DisplayModeKHR + { + public: + using CType = VkDisplayModeKHR; + using NativeType = VkDisplayModeKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + public: + DisplayModeKHR() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + DisplayModeKHR( DisplayModeKHR const & rhs ) = default; + DisplayModeKHR & operator=( DisplayModeKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DisplayModeKHR( DisplayModeKHR && rhs ) = default; + DisplayModeKHR & operator=( DisplayModeKHR && rhs ) = default; +#else + DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_displayModeKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ) {} + + DisplayModeKHR & operator=( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT : m_displayModeKHR( displayModeKHR ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DisplayModeKHR & operator=( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = displayModeKHR; + return *this; + } +#endif + + DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayModeKHR m_displayModeKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPhysicalDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice.html + class PhysicalDevice + { + public: + using CType = VkPhysicalDevice; + using NativeType = VkPhysicalDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + + public: + PhysicalDevice() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + PhysicalDevice( PhysicalDevice const & rhs ) = default; + PhysicalDevice & operator=( PhysicalDevice const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PhysicalDevice( PhysicalDevice && rhs ) = default; + PhysicalDevice & operator=( PhysicalDevice && rhs ) = default; +#else + PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) {} + + PhysicalDevice & operator=( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT : m_physicalDevice( physicalDevice ) {} + + PhysicalDevice & operator=( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = physicalDevice; + return *this; + } + + PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = {}; + return *this; + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkGetPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures.html + template + void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties.html + template + void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties + getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties.html + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties.html + template + void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + template + void getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties.html + template + void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMemoryProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + template + VULKAN_HPP_NODISCARD Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + template + VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + template + VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + template + void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + template < + typename SparseImageFormatPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + template < + typename SparseImageFormatPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkGetPhysicalDeviceFeatures2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template + void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFeatures2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetPhysicalDeviceFeatures2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template + void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template + void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template + void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template + void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + template + void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceExternalBufferProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferProperties.html + template + void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalBufferProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties + getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceExternalFenceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFenceProperties.html + template + void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalFenceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFenceProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties + getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceExternalSemaphoreProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphoreProperties.html + template + void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalSemaphoreProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphoreProperties.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + template + VULKAN_HPP_NODISCARD Result getToolProperties( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getToolProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_surface === + + // wrapper function for command vkGetPhysicalDeviceSurfaceSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceSupportKHR.html + template + VULKAN_HPP_NODISCARD Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceSupportKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getSurfaceSupportKHR( + uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + template + VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + template < + typename SurfaceFormatKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + template < + typename SurfaceFormatKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + template + VULKAN_HPP_NODISCARD Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display === + + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + template + VULKAN_HPP_NODISCARD Result getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + template + VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + template < + typename DisplayPlanePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + template < + typename DisplayPlanePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( + uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + template + VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + template < + typename DisplayModePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + template < + typename DisplayModePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + template + VULKAN_HPP_NODISCARD Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayPlaneCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayPlaneCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getDisplayPlaneCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + // wrapper function for command vkGetPhysicalDeviceXlibPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXlibPresentationSupportKHR.html + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceXlibPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXlibPresentationSupportKHR.html + template + VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display & dpy, + VisualID visualID, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + // wrapper function for command vkGetPhysicalDeviceXcbPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXcbPresentationSupportKHR.html + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceXcbPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXcbPresentationSupportKHR.html + template + VULKAN_HPP_NAMESPACE::Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + // wrapper function for command vkGetPhysicalDeviceWaylandPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWaylandPresentationSupportKHR.html + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display * display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceWaylandPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWaylandPresentationSupportKHR.html + template + VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + // wrapper function for command vkGetPhysicalDeviceWin32PresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWin32PresentationSupportKHR.html + template + Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_video_queue === + + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD Result getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template + VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template < + typename VideoFormatPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template < + typename VideoFormatPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_external_memory_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalImageFormatPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalImageFormatPropertiesNV.html + template + VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalImageFormatPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalImageFormatPropertiesNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_physical_device_properties2 === + + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template + void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template + void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template + void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template + void getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template + void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + template + void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_memory_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalBufferPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferPropertiesKHR.html + template + void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalBufferPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties + getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_semaphore_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalSemaphorePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphorePropertiesKHR.html + template + void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalSemaphorePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphorePropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_direct_mode_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseDisplayEXT.html + template + Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkReleaseDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseDisplayEXT.html + template + void releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + // wrapper function for command vkAcquireXlibDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireXlibDisplayEXT.html + template + VULKAN_HPP_NODISCARD Result acquireXlibDisplayEXT( Display * dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireXlibDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireXlibDisplayEXT.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + template + VULKAN_HPP_NODISCARD Result getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2EXT.html + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2EXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_fence_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalFencePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFencePropertiesKHR.html + template + void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalFencePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFencePropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties + getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_performance_query === + + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + template + VULKAN_HPP_NODISCARD Result + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + template , + typename PerformanceCounterDescriptionKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value && + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType, + std::vector>>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + template , + typename PerformanceCounterDescriptionKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value && + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType, + std::vector>>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR.html + template + void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR.html + template + VULKAN_HPP_NODISCARD uint32_t + getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_surface_capabilities2 === + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template + VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template < + typename SurfaceFormat2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template < + typename SurfaceFormat2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_display_properties2 === + + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + template + VULKAN_HPP_NODISCARD Result getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + template < + typename DisplayPlaneProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + template < + typename DisplayPlaneProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template + VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template < + typename DisplayModeProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template < + typename DisplayModeProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayPlaneCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayPlaneCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_sample_locations === + + // wrapper function for command vkGetPhysicalDeviceMultisamplePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMultisamplePropertiesEXT.html + template + void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMultisamplePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMultisamplePropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_calibrated_timestamps === + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + template + VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_fragment_shading_rate === + + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + template + VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_tooling_info === + + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + template + VULKAN_HPP_NODISCARD Result getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_matrix === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + template + VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_coverage_reduction_mode === + + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + template + VULKAN_HPP_NODISCARD Result + getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t * pCombinationCount, + VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getSupportedFramebufferMixedSamplesCombinationsNV( FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_acquire_drm_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireDrmDisplayEXT.html + template + VULKAN_HPP_NODISCARD Result acquireDrmDisplayEXT( int32_t drmFd, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + // wrapper function for command vkAcquireDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireDrmDisplayEXT.html + template + typename ResultValueType::type + acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + template + VULKAN_HPP_NODISCARD Result getDrmDisplayEXT( int32_t drmFd, + uint32_t connectorId, + VULKAN_HPP_NAMESPACE::DisplayKHR * display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === + + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template + VULKAN_HPP_NODISCARD Result + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireWinrtDisplayNV.html + template + VULKAN_HPP_NODISCARD Result acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + // wrapper function for command vkAcquireWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireWinrtDisplayNV.html + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + template + VULKAN_HPP_NODISCARD Result getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + // wrapper function for command vkGetPhysicalDeviceDirectFBPresentationSupportEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDirectFBPresentationSupportEXT.html + template + Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB * dfb, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDirectFBPresentationSupportEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDirectFBPresentationSupportEXT.html + template + VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB & dfb, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + // wrapper function for command vkGetPhysicalDeviceScreenPresentationSupportQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceScreenPresentationSupportQNX.html + template + Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window * window, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceScreenPresentationSupportQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceScreenPresentationSupportQNX.html + template + VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window & window, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_NV_optical_flow === + + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + template + VULKAN_HPP_NODISCARD Result getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_vector === + + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + template + VULKAN_HPP_NODISCARD Result getCooperativeVectorPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeVectorPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeVectorPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeVectorPropertiesNV( CooperativeVectorPropertiesNVAllocator & cooperativeVectorPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_cooperative_matrix === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + template + VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_calibrated_timestamps === + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + template + VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_matrix2 === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + template + VULKAN_HPP_NODISCARD Result + getCooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + template < + typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + template < + typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getCooperativeMatrixFlexibleDimensionsPropertiesNV( + CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == VK_NULL_HANDLE; + } + + private: + VkPhysicalDevice m_physicalDevice = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInstance.html + class Instance + { + public: + using CType = VkInstance; + using NativeType = VkInstance; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + + public: + Instance() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue + + Instance( Instance const & rhs ) = default; + Instance & operator=( Instance const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Instance( Instance && rhs ) = default; + Instance & operator=( Instance && rhs ) = default; +#else + Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) {} + + Instance & operator=( Instance && rhs ) VULKAN_HPP_NOEXCEPT + { + m_instance = VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT : m_instance( instance ) {} + + Instance & operator=( VkInstance instance ) VULKAN_HPP_NOEXCEPT + { + m_instance = instance; + return *this; + } + + Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_instance = {}; + return *this; + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkDestroyInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyInstance.html + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyInstance.html + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetInstanceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetInstanceProcAddr.html + template + PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetInstanceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetInstanceProcAddr.html + template + VULKAN_HPP_NAMESPACE::PFN_VoidFunction getProcAddr( const std::string & name, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_surface === + + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display === + + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + template + VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + template + VULKAN_HPP_NODISCARD Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + template + VULKAN_HPP_NODISCARD Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + template + VULKAN_HPP_NODISCARD Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + template + VULKAN_HPP_NODISCARD Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + template + VULKAN_HPP_NODISCARD Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + template + VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDebugReportMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugReportMessageEXT.html + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDebugReportMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugReportMessageEXT.html + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + template + VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type createStreamDescriptorSurfaceGGPUnique( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_GGP*/ + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + template + VULKAN_HPP_NODISCARD Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + template + VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSubmitDebugUtilsMessageEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSubmitDebugUtilsMessageEXT.html + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSubmitDebugUtilsMessageEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSubmitDebugUtilsMessageEXT.html + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + template + VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + template + VULKAN_HPP_NODISCARD Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_headless_surface === + + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + template + VULKAN_HPP_NODISCARD Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + template + VULKAN_HPP_NODISCARD Result createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + template + VULKAN_HPP_NODISCARD Result createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + operator VkInstance() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_instance != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_instance == VK_NULL_HANDLE; + } + + private: + VkInstance m_instance = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + template + VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + template + VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + template + VULKAN_HPP_NODISCARD Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkEnumerateInstanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceVersion.html + template + VULKAN_HPP_NODISCARD Result enumerateInstanceVersion( uint32_t * pApiVersion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateInstanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceVersion.html + template + VULKAN_HPP_NODISCARD typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // operators to compare VULKAN_HPP_NAMESPACE::-handles +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template ::value, int>::type = 0> + auto operator<=>( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) <=> static_cast( rhs ); + } +#else + template ::value, int>::type = 0> + bool operator==( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) == static_cast( rhs ); + } + + template ::value, int>::type = 0> + bool operator!=( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) != static_cast( rhs ); + } + + template ::value, int>::type = 0> + bool operator<( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) < static_cast( rhs ); + } +#endif + + template ::value, int>::type = 0> + bool operator==( T const & v, std::nullptr_t ) + { + return !v; + } + + template ::value, int>::type = 0> + bool operator==( std::nullptr_t, T const & v ) + { + return !v; + } + + template ::value, int>::type = 0> + bool operator!=( T const & v, std::nullptr_t ) + { + return !!v; + } + + template ::value, int>::type = 0> + bool operator!=( std::nullptr_t, T const & v ) + { + return !!v; + } +} // namespace VULKAN_HPP_NAMESPACE +#endif \ No newline at end of file diff --git a/include/vulkan/vulkan_hash.hpp b/include/vulkan/vulkan_hash.hpp new file mode 100644 index 00000000..ff239026 --- /dev/null +++ b/include/vulkan/vulkan_hash.hpp @@ -0,0 +1,19020 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HASH_HPP +#define VULKAN_HASH_HPP + +#include + +namespace std +{ + //======================================= + //=== HASH structures for Flags types === + //======================================= + + template + struct hash> + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Flags const & flags ) const VULKAN_HPP_NOEXCEPT + { + return std::hash::type>{}( static_cast::type>( flags ) ); + } + }; + + //=================================== + //=== HASH structures for handles === + //=================================== + + //=== VK_VERSION_1_0 === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Instance const & instance ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( instance ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice const & physicalDevice ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( physicalDevice ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Device const & device ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( device ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Queue const & queue ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( queue ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemory const & deviceMemory ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( deviceMemory ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Fence const & fence ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( fence ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Semaphore const & semaphore ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( semaphore ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Event const & event ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( event ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPool const & queryPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( queryPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Buffer const & buffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( buffer ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferView const & bufferView ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( bufferView ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Image const & image ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( image ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageView const & imageView ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( imageView ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModule const & shaderModule ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( shaderModule ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCache const & pipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineCache ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Pipeline const & pipeline ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipeline ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayout const & pipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineLayout ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Sampler const & sampler ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( sampler ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPool const & descriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSet const & descriptorSet ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorSet ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & descriptorSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorSetLayout ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Framebuffer const & framebuffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( framebuffer ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPass const & renderPass ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( renderPass ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandPool const & commandPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( commandPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBuffer const & commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( commandBuffer ) ); + } + }; + + //=== VK_VERSION_1_1 === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & samplerYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( samplerYcbcrConversion ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & descriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorUpdateTemplate ) ); + } + }; + + //=== VK_VERSION_1_3 === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlot const & privateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( privateDataSlot ) ); + } + }; + + //=== VK_KHR_surface === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceKHR const & surfaceKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( surfaceKHR ) ); + } + }; + + //=== VK_KHR_swapchain === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainKHR const & swapchainKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( swapchainKHR ) ); + } + }; + + //=== VK_KHR_display === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayKHR const & displayKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( displayKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeKHR const & displayModeKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( displayModeKHR ) ); + } + }; + + //=== VK_EXT_debug_report === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & debugReportCallbackEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( debugReportCallbackEXT ) ); + } + }; + + //=== VK_KHR_video_queue === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionKHR const & videoSessionKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( videoSessionKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & videoSessionParametersKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( videoSessionParametersKHR ) ); + } + }; + + //=== VK_NVX_binary_import === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleNVX const & cuModuleNVX ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cuModuleNVX ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionNVX const & cuFunctionNVX ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cuFunctionNVX ) ); + } + }; + + //=== VK_EXT_debug_utils === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & debugUtilsMessengerEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( debugUtilsMessengerEXT ) ); + } + }; + + //=== VK_KHR_acceleration_structure === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & accelerationStructureKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( accelerationStructureKHR ) ); + } + }; + + //=== VK_EXT_validation_cache === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & validationCacheEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( validationCacheEXT ) ); + } + }; + + //=== VK_NV_ray_tracing === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & accelerationStructureNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( accelerationStructureNV ) ); + } + }; + + //=== VK_INTEL_performance_query === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & performanceConfigurationINTEL ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( performanceConfigurationINTEL ) ); + } + }; + + //=== VK_KHR_deferred_host_operations === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & deferredOperationKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( deferredOperationKHR ) ); + } + }; + + //=== VK_NV_device_generated_commands === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & indirectCommandsLayoutNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( indirectCommandsLayoutNV ) ); + } + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaModuleNV const & cudaModuleNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cudaModuleNV ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaFunctionNV const & cudaFunctionNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cudaFunctionNV ) ); + } + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & bufferCollectionFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( bufferCollectionFUCHSIA ) ); + } + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapEXT const & micromapEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( micromapEXT ) ); + } + }; + + //=== VK_NV_optical_flow === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & opticalFlowSessionNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( opticalFlowSessionNV ) ); + } + }; + + //=== VK_EXT_shader_object === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderEXT const & shaderEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( shaderEXT ) ); + } + }; + + //=== VK_KHR_pipeline_binary === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR const & pipelineBinaryKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineBinaryKHR ) ); + } + }; + + //=== VK_NV_external_compute_queue === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV const & externalComputeQueueNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( externalComputeQueueNV ) ); + } + }; + + //=== VK_EXT_device_generated_commands === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT const & indirectCommandsLayoutEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( indirectCommandsLayoutEXT ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT const & indirectExecutionSetEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( indirectExecutionSetEXT ) ); + } + }; + +#if 14 <= VULKAN_HPP_CPP_VERSION + //====================================== + //=== HASH structures for structures === + //====================================== + +# if !defined( VULKAN_HPP_HASH_COMBINE ) +# define VULKAN_HPP_HASH_COMBINE( seed, value ) \ + seed ^= std::hash::type>{}( value ) + 0x9e3779b9 + ( seed << 6 ) + ( seed >> 2 ) +# endif + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AabbPositionsKHR const & aabbPositionsKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minX ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minY ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minZ ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxX ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxY ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxZ ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const & accelerationStructureBuildRangeInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.primitiveCount ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.primitiveOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.firstVertex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.transformOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const & accelerationStructureBuildSizesInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.accelerationStructureSize ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.updateScratchSize ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.buildScratchSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT const & accelerationStructureCaptureDescriptorDataInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.accelerationStructure ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.accelerationStructureNV ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & accelerationStructureCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.createFlags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.offset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.size ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.type ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.deviceAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & geometryTrianglesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexStride ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexFormat ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexCount ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.transformData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.transformOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & geometryAABBNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.aabbData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.numAABBs ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.stride ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometryDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryDataNV.triangles ); + VULKAN_HPP_HASH_COMBINE( seed, geometryDataNV.aabbs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryNV const & geometryNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.geometryType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.geometry ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & accelerationStructureInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.geometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.pGeometries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & accelerationStructureCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.compactedSize ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.info ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const & accelerationStructureDeviceAddressInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.accelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformMatrixKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < 3; ++i ) + { + for ( size_t j = 0; j < 4; ++j ) + { + VULKAN_HPP_HASH_COMBINE( seed, transformMatrixKHR.matrix[i][j] ); + } + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & accelerationStructureInstanceKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.transform ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.instanceCustomIndex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.mask ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.instanceShaderBindingTableRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.accelerationStructureReference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & accelerationStructureMatrixMotionInstanceNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.transformT0 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.transformT1 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.instanceCustomIndex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.mask ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.instanceShaderBindingTableRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.accelerationStructureReference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const & accelerationStructureMemoryRequirementsInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.accelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV const & accelerationStructureMotionInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.maxInstances ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SRTDataNV const & sRTDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.a ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.b ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sy ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.c ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvy ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sz ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvz ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qy ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qz ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qw ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.tx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.ty ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.tz ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & accelerationStructureSRTMotionInstanceNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.transformT0 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.transformT1 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.instanceCustomIndex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.mask ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.instanceShaderBindingTableRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.accelerationStructureReference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapUsageEXT const & micromapUsageEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.count ); + VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.subdivisionLevel ); + VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.format ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const & accelerationStructureVersionInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.pVersionData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const & acquireNextImageInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.swapchain ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.timeout ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const & acquireProfilingLockInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.timeout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AllocationCallbacks const & allocationCallbacks ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pUserData ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnReallocation ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnFree ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnInternalAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnInternalFree ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC const & amigoProfilingSubmitInfoSEC ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.sType ); + VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.firstDrawTimestamp ); + VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.swapBufferTimestamp ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ComponentMapping const & componentMapping ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.r ); + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.g ); + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.b ); + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.a ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const & androidHardwareBufferFormatProperties2ANDROID ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.format ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.externalFormat ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const & androidHardwareBufferFormatPropertiesANDROID ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.format ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.externalFormat ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID const & androidHardwareBufferFormatResolvePropertiesANDROID ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatResolvePropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatResolvePropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatResolvePropertiesANDROID.colorAttachmentFormat ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const & androidHardwareBufferPropertiesANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.allocationSize ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const & androidHardwareBufferUsageANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.androidHardwareBufferUsage ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & androidSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD const & antiLagPresentationInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.stage ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.frameIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AntiLagDataAMD const & antiLagDataAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.mode ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.maxFPS ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.pPresentationInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ApplicationInfo const & applicationInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.pNext ); + for ( const char * p = applicationInfo.pApplicationName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.applicationVersion ); + for ( const char * p = applicationInfo.pEngineName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.engineVersion ); + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.apiVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescription const & attachmentDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.flags ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.format ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.samples ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.loadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.storeOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.stencilLoadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.stencilStoreOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.initialLayout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.finalLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescription2 const & attachmentDescription2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.format ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.samples ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.loadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.storeOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.stencilLoadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.stencilStoreOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.initialLayout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.finalLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const & attachmentDescriptionStencilLayout ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.stencilInitialLayout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.stencilFinalLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReference const & attachmentReference ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference.attachment ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference.layout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReference2 const & attachmentReference2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.attachment ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.layout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.aspectMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const & attachmentReferenceStencilLayout ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.stencilLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD const & attachmentSampleCountInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.pColorAttachmentSamples ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.depthStencilAttachmentSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Extent2D const & extent2D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, extent2D.width ); + VULKAN_HPP_HASH_COMBINE( seed, extent2D.height ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SampleLocationEXT const & sampleLocationEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationEXT.x ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationEXT.y ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationsPerPixel ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationGridSize ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationsCount ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.pSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const & attachmentSampleLocationsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleLocationsEXT.attachmentIndex ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleLocationsEXT.sampleLocationsInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BaseInStructure const & baseInStructure ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, baseInStructure.sType ); + VULKAN_HPP_HASH_COMBINE( seed, baseInStructure.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BaseOutStructure const & baseOutStructure ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, baseOutStructure.sType ); + VULKAN_HPP_HASH_COMBINE( seed, baseOutStructure.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const & bindAccelerationStructureMemoryInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.accelerationStructure ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.deviceIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.pDeviceIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const & bindBufferMemoryDeviceGroupInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.deviceIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.pDeviceIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const & bindBufferMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.memoryOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT const & bindDescriptorBufferEmbeddedSamplersInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.layout ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.set ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo const & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.firstSet ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.descriptorSetCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.pDescriptorSets ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.dynamicOffsetCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.pDynamicOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Offset2D const & offset2D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, offset2D.x ); + VULKAN_HPP_HASH_COMBINE( seed, offset2D.y ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Rect2D const & rect2D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rect2D.offset ); + VULKAN_HPP_HASH_COMBINE( seed, rect2D.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const & bindImageMemoryDeviceGroupInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.deviceIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pDeviceIndices ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.splitInstanceBindRegionCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pSplitInstanceBindRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const & bindImageMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.memoryOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const & bindImageMemorySwapchainInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.swapchain ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.imageIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const & bindImagePlaneMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.planeAspect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandEXT const & bindIndexBufferIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandEXT.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandEXT.indexType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const & bindIndexBufferIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.indexType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindMemoryStatus const & bindMemoryStatus ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatus.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatus.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatus.pResult ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV const & bindPipelineIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindPipelineIndirectCommandNV.pipelineAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const & bindShaderGroupIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindShaderGroupIndirectCommandNV.groupIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseMemoryBind const & sparseMemoryBind ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.resourceOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.size ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.memory ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const & sparseBufferMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.bindCount ); + VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.pBinds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const & sparseImageOpaqueMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.bindCount ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.pBinds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.mipLevel ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.arrayLayer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Offset3D const & offset3D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, offset3D.x ); + VULKAN_HPP_HASH_COMBINE( seed, offset3D.y ); + VULKAN_HPP_HASH_COMBINE( seed, offset3D.z ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Extent3D const & extent3D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, extent3D.width ); + VULKAN_HPP_HASH_COMBINE( seed, extent3D.height ); + VULKAN_HPP_HASH_COMBINE( seed, extent3D.depth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const & sparseImageMemoryBind ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.subresource ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.offset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.extent ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.memory ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const & sparseImageMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.bindCount ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.pBinds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindSparseInfo const & bindSparseInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pWaitSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.bufferBindCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pBufferBinds ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.imageOpaqueBindCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pImageOpaqueBinds ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.imageBindCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pImageBinds ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.signalSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pSignalSemaphores ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT const & bindVertexBufferIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandEXT.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandEXT.stride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const & bindVertexBufferIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.stride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR const & bindVideoSessionMemoryInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memoryBindIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memorySize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM const & blitImageCubicWeightsInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, blitImageCubicWeightsInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageCubicWeightsInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageCubicWeightsInfoQCOM.cubicWeights ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresourceLayers ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.mipLevel ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.layerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageBlit2 const & imageBlit2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.srcSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.srcOffsets[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.dstSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.dstOffsets[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BlitImageInfo2 const & blitImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.pRegions ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.filter ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT const & bufferCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.buffer ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const & bufferCollectionBufferCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.collection ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.index ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.maxBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForCamping ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForDedicatedSlack ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForSharedSlack ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & bufferCollectionCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.collectionToken ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const & bufferCollectionImageCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.collection ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.index ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.colorSpace ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA const & bufferCollectionPropertiesFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.memoryTypeBits ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.bufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.createInfoIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sysmemPixelFormat ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sysmemColorSpaceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & bufferCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.size ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.sharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.pQueueFamilyIndices ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA const & bufferConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.createInfo ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.requiredFormatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.bufferCollectionConstraints ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCopy const & bufferCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCopy2 const & bufferCopy2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const & bufferDeviceAddressCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.deviceAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const & bufferDeviceAddressInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferImageCopy const & bufferImageCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferImageCopy2 const & bufferImageCopy2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const & bufferMemoryBarrier ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.offset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const & bufferMemoryBarrier2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.offset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const & bufferMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const & bufferOpaqueCaptureAddressCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.opaqueCaptureAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfo const & bufferUsageFlags2CreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfo.usage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & bufferViewCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.range ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const & stridedDeviceAddressNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressNV.startAddress ); + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressNV.strideInBytes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureIndirectCommandNV const & + buildPartitionedAccelerationStructureIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureIndirectCommandNV.opType ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureIndirectCommandNV.argCount ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureIndirectCommandNV.argData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV const & partitionedAccelerationStructureInstancesInputNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.maxInstancePerPartitionCount ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.partitionCount ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.maxInstanceInGlobalPartitionCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV const & buildPartitionedAccelerationStructureInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.input ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.srcAccelerationStructureData ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.dstAccelerationStructureData ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.scratchData ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.srcInfos ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.srcInfosCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR const & calibratedTimestampInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoKHR.timeDomain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CheckpointData2NV const & checkpointData2NV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.stage ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.pCheckpointMarker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CheckpointDataNV const & checkpointDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.stage ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.pCheckpointMarker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & clearDepthStencilValue ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clearDepthStencilValue.depth ); + VULKAN_HPP_HASH_COMBINE( seed, clearDepthStencilValue.stencil ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClearRect const & clearRect ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clearRect.rect ); + VULKAN_HPP_HASH_COMBINE( seed, clearRect.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, clearRect.layerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & + clusterAccelerationStructureBuildClustersBottomLevelInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildClustersBottomLevelInfoNV.clusterReferencesCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildClustersBottomLevelInfoNV.clusterReferencesStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildClustersBottomLevelInfoNV.clusterReferences ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & + clusterAccelerationStructureGeometryIndexAndGeometryFlagsNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureGeometryIndexAndGeometryFlagsNV.geometryIndex ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureGeometryIndexAndGeometryFlagsNV.reserved ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureGeometryIndexAndGeometryFlagsNV.geometryFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterInfoNV const & + clusterAccelerationStructureBuildTriangleClusterInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterID ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterFlags ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.triangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.positionTruncateBitCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.indexType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.baseGeometryIndexAndGeometryFlags ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapArray ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBuffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & + clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.clusterID ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.clusterFlags ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.triangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.vertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.positionTruncateBitCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.indexType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.opacityMicromapIndexType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.baseGeometryIndexAndGeometryFlags ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.indexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.vertexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.geometryIndexAndFlagsBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.opacityMicromapIndexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.indexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.vertexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.geometryIndexAndFlagsBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.opacityMicromapArray ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.opacityMicromapIndexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.instantiationBoundingBoxLimit ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV const & + clusterAccelerationStructureClustersBottomLevelInputNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureClustersBottomLevelInputNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureClustersBottomLevelInputNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureClustersBottomLevelInputNV.maxTotalClusterCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureClustersBottomLevelInputNV.maxClusterCountPerAccelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV const & clusterAccelerationStructureTriangleClusterInputNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.vertexFormat ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxGeometryIndexValue ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxClusterUniqueGeometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxClusterTriangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxClusterVertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxTotalTriangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxTotalVertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.minPositionTruncateBitCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV const & clusterAccelerationStructureMoveObjectsInputNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInputNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInputNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInputNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInputNV.noMoveOverlap ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInputNV.maxMovedBytes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & stridedDeviceAddressRegionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.stride ); + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInstantiateClusterInfoNV const & + clusterAccelerationStructureInstantiateClusterInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureInstantiateClusterInfoNV.clusterIdOffset ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureInstantiateClusterInfoNV.geometryIndexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureInstantiateClusterInfoNV.reserved ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureInstantiateClusterInfoNV.clusterTemplateAddress ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureInstantiateClusterInfoNV.vertexBuffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInfoNV const & clusterAccelerationStructureMoveObjectsInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInfoNV.srcAccelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const & coarseSampleLocationNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelX ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelY ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.sample ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const & coarseSampleOrderCustomNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.shadingRate ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleCount ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleLocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.pSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const & colorBlendAdvancedEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.advancedBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.srcPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.dstPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.blendOverlap ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.clampResults ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const & colorBlendEquationEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.colorBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.alphaBlendOp ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & commandBufferAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandPool ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.level ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandBufferCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & commandBufferInheritanceInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.subpass ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.framebuffer ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.occlusionQueryEnable ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.queryFlags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pipelineStatistics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & commandBufferBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pInheritanceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & commandBufferInheritanceConditionalRenderingInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.conditionalRenderingEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const & commandBufferInheritanceRenderPassTransformInfoQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.transform ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.renderArea ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const & commandBufferInheritanceRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.stencilAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.rasterizationSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Viewport const & viewport ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viewport.x ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.y ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.width ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.height ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.minDepth ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.maxDepth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const & commandBufferInheritanceViewportScissorInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.viewportScissor2D ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.viewportDepthCount ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.pViewportDepths ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const & commandBufferSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.commandBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & commandPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.queueFamilyIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SpecializationMapEntry const & specializationMapEntry ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.constantID ); + VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.offset ); + VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SpecializationInfo const & specializationInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.mapEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.pMapEntries ); + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & pipelineShaderStageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.stage ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.module ); + for ( const char * p = pipelineShaderStageCreateInfo.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.pSpecializationInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & computePipelineCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.stage ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV const & computePipelineIndirectBufferInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.pipelineDeviceAddressCaptureReplay ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const & conditionalRenderingBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.offset ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ConformanceVersion const & conformanceVersion ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.major ); + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.minor ); + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.subminor ); + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.patch ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV const & cooperativeMatrixFlexibleDimensionsPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.MGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.NGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.KGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.AType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.BType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.CType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.ResultType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.saturatingAccumulation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.scope ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.workgroupInvocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR const & cooperativeMatrixPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.MSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.NSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.KSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.AType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.BType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.CType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.ResultType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.saturatingAccumulation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.scope ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const & cooperativeMatrixPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.MSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.NSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.KSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.AType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.BType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.CType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.DType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.scope ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CooperativeVectorPropertiesNV const & cooperativeVectorPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.inputType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.inputInterpretation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.matrixInterpretation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.biasInterpretation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.resultType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.transpose ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const & copyAccelerationStructureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.src ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.dst ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 const & copyBufferInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.srcBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.dstBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const & copyBufferToImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.srcBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const & copyCommandTransformInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.transform ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyDescriptorSet const & copyDescriptorSet ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcSet ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcBinding ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstSet ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstBinding ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.descriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCopy2 const & imageCopy2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageInfo2 const & copyImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const & copyImageToBufferInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.dstBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToImageInfo const & copyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageToMemoryCopy const & imageToMemoryCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.pHostPointer ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.memoryRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.memoryImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo const & copyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV const & copyMemoryIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.dstAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV const & copyMemoryToImageIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.bufferRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.bufferImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryToImageCopy const & memoryToImageCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.pHostPointer ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.memoryRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.memoryImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo const & copyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const & copyMicromapInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.src ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.dst ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & cuFunctionCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.module ); + for ( const char * p = cuFunctionCreateInfoNVX.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX const & cuLaunchInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.function ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.sharedMemBytes ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.paramCount ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pParams ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.extraCount ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pExtras ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & cuModuleCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleTexturingModeCreateInfoNVX const & cuModuleTexturingModeCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuModuleTexturingModeCreateInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleTexturingModeCreateInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleTexturingModeCreateInfoNVX.use64bitTexturing ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & cudaFunctionCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.module ); + for ( const char * p = cudaFunctionCreateInfoNV.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV const & cudaLaunchInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.function ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.sharedMemBytes ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.paramCount ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pParams ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.extraCount ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pExtras ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & cudaModuleCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.pData ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & d3D12FenceSubmitInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.waitSemaphoreValuesCount ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pWaitSemaphoreValues ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.signalSemaphoreValuesCount ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pSignalSemaphoreValues ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const & debugMarkerMarkerInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.pNext ); + for ( const char * p = debugMarkerMarkerInfoEXT.pMarkerName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + for ( size_t i = 0; i < 4; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.color[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const & debugMarkerObjectNameInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.object ); + for ( const char * p = debugMarkerObjectNameInfoEXT.pObjectName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const & debugMarkerObjectTagInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.object ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.tagName ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.tagSize ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.pTag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & debugReportCallbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pfnCallback ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const & debugUtilsLabelEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.pNext ); + for ( const char * p = debugUtilsLabelEXT.pLabelName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + for ( size_t i = 0; i < 4; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.color[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const & debugUtilsObjectNameInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.objectHandle ); + for ( const char * p = debugUtilsObjectNameInfoEXT.pObjectName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const & debugUtilsMessengerCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.flags ); + for ( const char * p = debugUtilsMessengerCallbackDataEXT.pMessageIdName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.messageIdNumber ); + for ( const char * p = debugUtilsMessengerCallbackDataEXT.pMessage; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.queueLabelCount ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pQueueLabels ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.cmdBufLabelCount ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pCmdBufLabels ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.objectCount ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pObjects ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & debugUtilsMessengerCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.messageSeverity ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.messageType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pfnUserCallback ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const & debugUtilsObjectTagInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.objectHandle ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.tagName ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.tagSize ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.pTag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV const & decompressMemoryRegionNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.dstAddress ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.compressedSize ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.decompressedSize ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.decompressionMethod ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const & dedicatedAllocationBufferCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.dedicatedAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const & dedicatedAllocationImageCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.dedicatedAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const & dedicatedAllocationMemoryAllocateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.image ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryBarrier2 const & memoryBarrier2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.dstAccessMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & imageSubresourceRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.baseMipLevel ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.levelCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.layerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 const & imageMemoryBarrier2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.image ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.subresourceRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DependencyInfo const & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.dependencyFlags ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.memoryBarrierCount ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pMemoryBarriers ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.bufferMemoryBarrierCount ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pBufferMemoryBarriers ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.imageMemoryBarrierCount ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pImageMemoryBarriers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT const & depthBiasInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.depthBiasConstantFactor ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.depthBiasClamp ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.depthBiasSlopeFactor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT const & depthBiasRepresentationInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.depthBiasRepresentation ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.depthBiasExact ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DepthClampRangeEXT const & depthClampRangeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, depthClampRangeEXT.minDepthClamp ); + VULKAN_HPP_HASH_COMBINE( seed, depthClampRangeEXT.maxDepthClamp ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT const & descriptorAddressInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.address ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.range ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.format ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT const & descriptorBufferBindingInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.address ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.usage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT const & + descriptorBufferBindingPushDescriptorBufferHandleEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const & descriptorBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.range ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorImageInfo const & descriptorImageInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.sampler ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.imageLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolSize const & descriptorPoolSize ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolSize.type ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolSize.descriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & descriptorPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.maxSets ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.poolSizeCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.pPoolSizes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const & descriptorPoolInlineUniformBlockCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.maxInlineUniformBlockBindings ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & descriptorSetAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.descriptorPool ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.descriptorSetCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.pSetLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE const & descriptorSetBindingReferenceVALVE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.descriptorSetLayout ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.binding ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const & descriptorSetLayoutBinding ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.binding ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.descriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.pImmutableSamplers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const & descriptorSetLayoutBindingFlagsCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.bindingCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.pBindingFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & descriptorSetLayoutCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.bindingCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.pBindings ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE const & descriptorSetLayoutHostMappingInfoVALVE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.descriptorOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.descriptorSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const & descriptorSetLayoutSupport ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.supported ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const & descriptorSetVariableDescriptorCountAllocateInfo ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.descriptorSetCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.pDescriptorCounts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const & descriptorSetVariableDescriptorCountLayoutSupport ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.maxVariableDescriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const & descriptorUpdateTemplateEntry ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.dstBinding ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.dstArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.descriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.offset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.stride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & descriptorUpdateTemplateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.descriptorUpdateEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pDescriptorUpdateEntries ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.templateType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.descriptorSetLayout ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pipelineLayout ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.set ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT const & deviceAddressBindingCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.baseAddress ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.bindingType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const & deviceBufferMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.pCreateInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const & deviceQueueCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.queueCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.pQueuePriorities ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & physicalDeviceFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.robustBufferAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fullDrawIndexUint32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.imageCubeArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.independentBlend ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.geometryShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.tessellationShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sampleRateShading ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.dualSrcBlend ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.logicOp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.multiDrawIndirect ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.drawIndirectFirstInstance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthClamp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthBiasClamp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fillModeNonSolid ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthBounds ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.wideLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.largePoints ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.alphaToOne ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.multiViewport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.samplerAnisotropy ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionETC2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionASTC_LDR ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionBC ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.occlusionQueryPrecise ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.pipelineStatisticsQuery ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.vertexPipelineStoresAndAtomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fragmentStoresAndAtomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderTessellationAndGeometryPointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderImageGatherExtended ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageExtendedFormats ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageMultisample ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageReadWithoutFormat ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageWriteWithoutFormat ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderUniformBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderSampledImageArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderClipDistance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderCullDistance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderInt64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderInt16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderResourceResidency ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderResourceMinLod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseBinding ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyImage2D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyImage3D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency2Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency4Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency8Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency16Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyAliased ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.variableMultisampleRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.inheritedQueries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & deviceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.queueCreateInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pQueueCreateInfos ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.enabledLayerCount ); + for ( size_t i = 0; i < deviceCreateInfo.enabledLayerCount; ++i ) + { + for ( const char * p = deviceCreateInfo.ppEnabledLayerNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.enabledExtensionCount ); + for ( size_t i = 0; i < deviceCreateInfo.enabledExtensionCount; ++i ) + { + for ( const char * p = deviceCreateInfo.ppEnabledExtensionNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pEnabledFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const & deviceDeviceMemoryReportCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pfnUserCallback ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const & deviceDiagnosticsConfigCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.deviceEvent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT const & deviceFaultAddressInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.reportedAddress ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressPrecision ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT const & deviceFaultCountsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.addressInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorBinarySize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT const & deviceFaultVendorInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultCode ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT const & deviceFaultInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pNext ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pAddressInfos ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorInfos ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorBinaryData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT const & deviceFaultVendorBinaryHeaderVersionOneEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerSize ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerVersion ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.driverVersion ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.pipelineCacheUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationNameOffset ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationVersion ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.engineNameOffset ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.engineVersion ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.apiVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & deviceGroupBindSparseInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.resourceDeviceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.memoryDeviceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const & deviceGroupCommandBufferBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const & deviceGroupDeviceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.physicalDeviceCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.pPhysicalDevices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const & deviceGroupPresentCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.pNext ); + for ( size_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.presentMask[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.modes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const & deviceGroupPresentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.pDeviceMasks ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const & deviceGroupRenderPassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.deviceMask ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.deviceRenderAreaCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.pDeviceRenderAreas ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const & deviceGroupSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pWaitSemaphoreDeviceIndices ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.commandBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pCommandBufferDeviceMasks ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.signalSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pSignalSemaphoreDeviceIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const & deviceGroupSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.modes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.imageType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.extent ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.mipLevels ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.arrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.samples ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.tiling ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.sharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.pQueueFamilyIndices ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.initialLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements const & deviceImageMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.pCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.planeAspect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2 const & imageSubresource2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2.imageSubresource ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo const & deviceImageSubresourceInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.pCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.pSubresource ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const & deviceMemoryOpaqueCaptureAddressInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.memory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const & deviceMemoryOverallocationCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.overallocationBehavior ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const & deviceMemoryReportCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.memoryObjectId ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.objectHandle ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.heapIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DevicePipelineBinaryInternalCacheControlKHR const & devicePipelineBinaryInternalCacheControlKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, devicePipelineBinaryInternalCacheControlKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, devicePipelineBinaryInternalCacheControlKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, devicePipelineBinaryInternalCacheControlKHR.disableInternalCache ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo const & devicePrivateDataCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.privateDataSlotRequestCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfo const & deviceQueueGlobalPriorityCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfo.globalPriority ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & deviceQueueInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.queueIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM const & deviceQueueShaderCoreControlCreateInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.shaderCoreCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG const & directDriverLoadingInfoLUNARG ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.sType ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.flags ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.pfnGetInstanceProcAddr ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG const & directDriverLoadingListLUNARG ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.sType ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.mode ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.driverCount ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.pDrivers ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & directFBSurfaceCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.dfb ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.surface ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const & dispatchIndirectCommand ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.x ); + VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.y ); + VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.z ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DispatchTileInfoQCOM const & dispatchTileInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dispatchTileInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dispatchTileInfoQCOM.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.displayEvent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & displayModeParametersKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeParametersKHR.visibleRegion ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeParametersKHR.refreshRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & displayModeCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.parameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const & displayModePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModePropertiesKHR.displayMode ); + VULKAN_HPP_HASH_COMBINE( seed, displayModePropertiesKHR.parameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const & displayModeProperties2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.displayModeProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeStereoPropertiesNV const & displayModeStereoPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeStereoPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeStereoPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeStereoPropertiesNV.hdmi3DSupported ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const & displayNativeHdrSurfaceCapabilitiesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.localDimmingSupport ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const & displayPlaneCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.supportedAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minSrcPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxSrcPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minSrcExtent ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxSrcExtent ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minDstPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxDstPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minDstExtent ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxDstExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const & displayPlaneCapabilities2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.capabilities ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const & displayPlaneInfo2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.mode ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.planeIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const & displayPlanePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlanePropertiesKHR.currentDisplay ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlanePropertiesKHR.currentStackIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const & displayPlaneProperties2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.displayPlaneProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const & displayPowerInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.powerState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const & displayPresentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.srcRect ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.dstRect ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.persistent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const & displayPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.display ); + for ( const char * p = displayPropertiesKHR.displayName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.physicalDimensions ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.physicalResolution ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.supportedTransforms ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.planeReorderPossible ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.persistentContent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const & displayProperties2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.displayProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & displaySurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.displayMode ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.planeIndex ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.planeStackIndex ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.transform ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.globalAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.alphaMode ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoCreateInfoNV const & displaySurfaceStereoCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceStereoCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceStereoCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceStereoCreateInfoNV.stereoType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const & drawIndexedIndirectCommand ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.indexCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.firstIndex ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.vertexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.firstInstance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndirectCommand const & drawIndirectCommand ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.vertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.firstVertex ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.firstInstance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndirectCountIndirectCommandEXT const & drawIndirectCountIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCountIndirectCommandEXT.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCountIndirectCommandEXT.stride ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCountIndirectCommandEXT.commandCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT const & drawMeshTasksIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountX ); + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountY ); + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountZ ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const & drawMeshTasksIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandNV.taskCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandNV.firstTask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT const & drmFormatModifierProperties2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifierPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifierTilingFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const & drmFormatModifierPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifierPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifierTilingFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const & drmFormatModifierPropertiesList2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.drmFormatModifierCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.pDrmFormatModifierProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const & drmFormatModifierPropertiesListEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.drmFormatModifierCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.pDrmFormatModifierProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::EventCreateInfo const & eventCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & executionGraphPipelineCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.flags ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pLibraryInfo ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.layout ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineIndex ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX const & executionGraphPipelineScratchSizeAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.minSize ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.maxSize ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sizeGranularity ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & exportFenceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.handleTypes ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & exportFenceWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.dwAccess ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const & exportMemoryAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const & exportMemoryAllocateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.handleTypes ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const & exportMemoryWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.dwAccess ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const & exportMemoryWin32HandleInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.dwAccess ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT const & exportMetalBufferInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.memory ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.mtlBuffer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT const & exportMetalCommandQueueInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.queue ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.mtlCommandQueue ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT const & exportMetalDeviceInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.mtlDevice ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT const & exportMetalIOSurfaceInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.image ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.ioSurface ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT const & exportMetalObjectCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.exportObjectType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT const & exportMetalObjectsInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectsInfoEXT.pNext ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT const & exportMetalSharedEventInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.event ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.mtlSharedEvent ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT const & exportMetalTextureInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.image ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.bufferView ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.plane ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.mtlTexture ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const & exportSemaphoreCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.handleTypes ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const & exportSemaphoreWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.dwAccess ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExtensionProperties const & extensionProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, extensionProperties.extensionName[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, extensionProperties.specVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const & externalMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.externalMemoryFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.compatibleHandleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalBufferProperties const & externalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.externalMemoryProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV const & externalComputeQueueCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalComputeQueueCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalComputeQueueCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalComputeQueueCreateInfoNV.preferredQueue ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalComputeQueueDataParamsNV const & externalComputeQueueDataParamsNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalComputeQueueDataParamsNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalComputeQueueDataParamsNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalComputeQueueDataParamsNV.deviceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExternalComputeQueueDeviceCreateInfoNV const & externalComputeQueueDeviceCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalComputeQueueDeviceCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalComputeQueueDeviceCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalComputeQueueDeviceCreateInfoNV.reservedExternalQueues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFenceProperties const & externalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.compatibleHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.externalFenceFeatures ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const & externalFormatANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.externalFormat ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFormatQNX const & externalFormatQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalFormatQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatQNX.externalFormat ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const & externalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.externalMemoryProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatProperties const & imageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxExtent ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxMipLevels ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.sampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxResourceSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const & externalImageFormatPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.imageFormatProperties ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.externalMemoryFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.compatibleHandleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT const & externalMemoryAcquireUnmodifiedEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryAcquireUnmodifiedEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryAcquireUnmodifiedEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryAcquireUnmodifiedEXT.acquireUnmodifiedMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const & externalMemoryBufferCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const & externalMemoryImageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const & externalMemoryImageCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & externalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.compatibleHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.externalSemaphoreFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & fenceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const & fenceGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const & fenceGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const & filterCubicImageViewImageFormatPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.filterCubic ); + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.filterCubicMinmax ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FormatProperties const & formatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, formatProperties.linearTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties.optimalTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties.bufferFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FormatProperties2 const & formatProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.formatProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FormatProperties3 const & formatProperties3 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.sType ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.linearTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.optimalTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.bufferFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const & fragmentShadingRateAttachmentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.pFragmentShadingRateAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.shadingRateAttachmentTexelSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FrameBoundaryEXT const & frameBoundaryEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.frameID ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.imageCount ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pImages ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.bufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.tagName ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.tagSize ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pTag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const & framebufferAttachmentImageInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.width ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.height ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.layerCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.viewFormatCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.pViewFormats ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const & framebufferAttachmentsCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.attachmentImageInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.pAttachmentImageInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & framebufferCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.pAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.width ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.height ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.layers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const & framebufferMixedSamplesCombinationNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.coverageReductionMode ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.rasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.depthStencilSamples ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.colorSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT const & generatedCommandsInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.shaderStages ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectExecutionSet ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectAddress ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectAddressSize ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.preprocessAddress ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.preprocessSize ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.maxSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.sequenceCountAddress ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.maxDrawCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const & indirectCommandsStreamNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsStreamNV.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsStreamNV.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const & generatedCommandsInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.streamCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pStreams ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessOffset ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessSize ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCountBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCountOffset ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesIndexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesIndexOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT const & generatedCommandsMemoryRequirementsInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.indirectExecutionSet ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.maxSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.maxDrawCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const & generatedCommandsMemoryRequirementsInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.maxSequencesCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsPipelineInfoEXT const & generatedCommandsPipelineInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsPipelineInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsPipelineInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsPipelineInfoEXT.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsShaderInfoEXT const & generatedCommandsShaderInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.shaderCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.pShaders ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV const & latencyTimingsFrameReportNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.inputSampleTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.simStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.simEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.renderSubmitStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.renderSubmitEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.presentStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.presentEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.driverStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.driverEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.osRenderQueueStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.osRenderQueueEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.gpuRenderStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.gpuRenderEndTimeUs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV const & getLatencyMarkerInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.timingCount ); + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.pTimings ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const & vertexInputBindingDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.stride ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.inputRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const & vertexInputAttributeDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.location ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.format ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const & pipelineVertexInputStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.vertexBindingDescriptionCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pVertexBindingDescriptions ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.vertexAttributeDescriptionCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pVertexAttributeDescriptions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const & pipelineInputAssemblyStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.topology ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.primitiveRestartEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const & pipelineTessellationStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.patchControlPoints ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const & pipelineViewportStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pViewports ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.scissorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pScissors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const & pipelineRasterizationStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthClampEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.rasterizerDiscardEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.polygonMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.cullMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.frontFace ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasConstantFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasClamp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasSlopeFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.lineWidth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const & pipelineMultisampleStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.rasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.sampleShadingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.minSampleShading ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.pSampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.alphaToCoverageEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.alphaToOneEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StencilOpState const & stencilOpState ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.failOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.passOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.depthFailOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.compareOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.compareMask ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.writeMask ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.reference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const & pipelineDepthStencilStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthWriteEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthCompareOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthBoundsTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.stencilTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.front ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.back ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.minDepthBounds ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.maxDepthBounds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const & pipelineColorBlendAttachmentState ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.blendEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.srcColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.dstColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.colorBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.srcAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.dstAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.alphaBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.colorWriteMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const & pipelineColorBlendStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.logicOpEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.logicOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.pAttachments ); + for ( size_t i = 0; i < 4; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.blendConstants[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const & pipelineDynamicStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.dynamicStateCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.pDynamicStates ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & graphicsPipelineCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pVertexInputState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pInputAssemblyState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pTessellationState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pViewportState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pRasterizationState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pMultisampleState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pDepthStencilState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pColorBlendState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pDynamicState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.subpass ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT const & graphicsPipelineLibraryCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const & graphicsShaderGroupCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pVertexInputState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pTessellationState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const & graphicsPipelineShaderGroupsCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.groupCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pGroups ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pipelineCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pPipelines ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::XYColorEXT const & xYColorEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, xYColorEXT.x ); + VULKAN_HPP_HASH_COMBINE( seed, xYColorEXT.y ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HdrMetadataEXT const & hdrMetadataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryRed ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryGreen ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryBlue ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.whitePoint ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxLuminance ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.minLuminance ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxContentLightLevel ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxFrameAverageLightLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI const & hdrVividDynamicMetadataHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.dynamicMetadataSize ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.pDynamicMetadata ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & headlessSurfaceCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQuery const & hostImageCopyDevicePerformanceQuery ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.optimalDeviceAccess ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.identicalMemoryLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo const & hostImageLayoutTransitionInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.subresourceRange ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & iOSSurfaceCreateInfoMVK ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.sType ); + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.flags ); + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.pView ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageAlignmentControlCreateInfoMESA const & imageAlignmentControlCreateInfoMESA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageAlignmentControlCreateInfoMESA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageAlignmentControlCreateInfoMESA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageAlignmentControlCreateInfoMESA.maximumRequestedAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageBlit const & imageBlit ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.srcSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.srcOffsets[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.dstSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.dstOffsets[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT const & imageCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.image ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT const & imageCompressionControlEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.compressionControlPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.pFixedRateFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT const & imageCompressionPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.imageCompressionFlags ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.imageCompressionFixedRateFlags ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA const & imageFormatConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.imageCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.requiredFormatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.sysmemPixelFormat ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.colorSpaceCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.pColorSpaces ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA const & imageConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.formatConstraintsCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.pFormatConstraints ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.bufferCollectionConstraints ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.flags ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCopy const & imageCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout const & subresourceLayout ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.offset ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.size ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.rowPitch ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.arrayPitch ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.depthPitch ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const & imageDrmFormatModifierExplicitCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.drmFormatModifierPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.pPlaneLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const & imageDrmFormatModifierListCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.drmFormatModifierCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.pDrmFormatModifiers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const & imageDrmFormatModifierPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.drmFormatModifier ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const & imageFormatListCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.viewFormatCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.pViewFormats ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const & imageFormatProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.imageFormatProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const & imageMemoryBarrier ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.image ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.subresourceRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const & imageMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.image ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & imagePipeSurfaceCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.imagePipeHandle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const & imagePlaneMemoryRequirementsInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.planeAspect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageResolve const & imageResolve ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageResolve2 const & imageResolve2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const & imageSparseMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.image ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const & imageStencilUsageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.stencilUsage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & imageSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.swapchain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const & imageViewASTCDecodeModeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.decodeMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const & imageViewAddressPropertiesNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT const & imageViewCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.imageView ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & imageViewCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.viewType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.components ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.subresourceRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const & imageViewHandleInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.sampler ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT const & imageViewMinLodCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.minLod ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM const & imageViewSampleWeightCreateInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.filterCenter ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.filterSize ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.numPhases ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT const & imageViewSlicedCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.sliceOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.sliceCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const & imageViewUsageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.usage ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const & importAndroidHardwareBufferInfoANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.buffer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const & importFenceFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.fd ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const & importFenceWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.handle ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA const & importMemoryBufferCollectionFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.collection ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.index ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const & importMemoryFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.fd ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const & importMemoryHostPointerInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.pHostPointer ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryMetalHandleInfoEXT const & importMemoryMetalHandleInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryMetalHandleInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryMetalHandleInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryMetalHandleInfoEXT.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryMetalHandleInfoEXT.handle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const & importMemoryWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.handle ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const & importMemoryWin32HandleInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.handle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const & importMemoryZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.handle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT const & importMetalBufferInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.mtlBuffer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT const & importMetalIOSurfaceInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.ioSurface ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT const & importMetalSharedEventInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.mtlSharedEvent ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT const & importMetalTextureInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.plane ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.mtlTexture ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX const & importScreenBufferInfoQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importScreenBufferInfoQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importScreenBufferInfoQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importScreenBufferInfoQNX.buffer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const & importSemaphoreFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.fd ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const & importSemaphoreWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.handle ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const & importSemaphoreZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.zirconHandle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT const & indirectCommandsExecutionSetTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsExecutionSetTokenEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsExecutionSetTokenEXT.shaderStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT const & indirectCommandsIndexBufferTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsIndexBufferTokenEXT.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantRange const & pushConstantRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.offset ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT const & indirectCommandsPushConstantTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsPushConstantTokenEXT.updateRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT const & indirectCommandsVertexBufferTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsVertexBufferTokenEXT.vertexBindingUnit ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & indirectCommandsLayoutCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.shaderStages ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.indirectStride ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.pipelineLayout ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.tokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.pTokens ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const & indirectCommandsLayoutTokenNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.tokenType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.stream ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.offset ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.vertexBindingUnit ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.vertexDynamicStride ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantPipelineLayout ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantShaderStageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantOffset ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantSize ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.indirectStateFlags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.indexTypeCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pIndexTypes ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pIndexTypeValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & indirectCommandsLayoutCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.tokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pTokens ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.streamCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pStreamStrides ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT const & indirectExecutionSetPipelineInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.initialPipeline ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.maxPipelineCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT const & indirectExecutionSetShaderLayoutInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.setLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.pSetLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT const & indirectExecutionSetShaderInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.shaderCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pInitialShaders ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pSetLayoutInfos ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.maxShaderCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pushConstantRangeCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pPushConstantRanges ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & initializePerformanceApiInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const & inputAttachmentAspectReference ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.subpass ); + VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.inputAttachmentIndex ); + VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.aspectMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & instanceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.pApplicationInfo ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.enabledLayerCount ); + for ( size_t i = 0; i < instanceCreateInfo.enabledLayerCount; ++i ) + { + for ( const char * p = instanceCreateInfo.ppEnabledLayerNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.enabledExtensionCount ); + for ( size_t i = 0; i < instanceCreateInfo.enabledExtensionCount; ++i ) + { + for ( const char * p = instanceCreateInfo.ppEnabledExtensionNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySleepInfoNV const & latencySleepInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.signalSemaphore ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.value ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV const & latencySleepModeInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.lowLatencyMode ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.lowLatencyBoost ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.minimumIntervalUs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV const & latencySubmissionPresentIdNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySubmissionPresentIdNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySubmissionPresentIdNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySubmissionPresentIdNV.presentID ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV const & latencySurfaceCapabilitiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.presentModeCount ); + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LayerProperties const & layerProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.layerName[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.specVersion ); + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.implementationVersion ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.description[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LayerSettingEXT const & layerSettingEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( const char * p = layerSettingEXT.pLayerName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + for ( const char * p = layerSettingEXT.pSettingName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, layerSettingEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingEXT.valueCount ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingEXT.pValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT const & layerSettingsCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.settingCount ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.pSettings ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & macOSSurfaceCreateInfoMVK ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.sType ); + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.flags ); + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.pView ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MappedMemoryRange const & mappedMemoryRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.sType ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.memory ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.offset ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const & memoryAllocateFlagsInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & memoryAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.allocationSize ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.memoryTypeIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryBarrier const & memoryBarrier ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.dstAccessMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryBarrierAccessFlags3KHR const & memoryBarrierAccessFlags3KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrierAccessFlags3KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrierAccessFlags3KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrierAccessFlags3KHR.srcAccessMask3 ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrierAccessFlags3KHR.dstAccessMask3 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const & memoryDedicatedAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & memoryDedicatedRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.prefersDedicatedAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.requiresDedicatedAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const & memoryFdPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.memoryTypeBits ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const & memoryGetAndroidHardwareBufferInfoANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.memory ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const & memoryGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT const & memoryGetMetalHandleInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetMetalHandleInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetMetalHandleInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetMetalHandleInfoEXT.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetMetalHandleInfoEXT.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV const & memoryGetRemoteAddressInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const & memoryGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA const & memoryGetZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryHeap const & memoryHeap ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryHeap.size ); + VULKAN_HPP_HASH_COMBINE( seed, memoryHeap.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const & memoryHostPointerPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.memoryTypeBits ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMapInfo const & memoryMapInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT const & memoryMapPlacedInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.pPlacedAddress ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT const & memoryMetalHandlePropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryMetalHandlePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMetalHandlePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMetalHandlePropertiesEXT.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const & memoryOpaqueCaptureAddressAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.opaqueCaptureAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const & memoryPriorityAllocateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.priority ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryRequirements const & memoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.size ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.alignment ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.memoryTypeBits ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryRequirements2 const & memoryRequirements2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.memoryRequirements ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryType const & memoryType ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryType.propertyFlags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryType.heapIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryUnmapInfo const & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.memory ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const & memoryWin32HandlePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const & memoryZirconHandlePropertiesFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & metalSurfaceCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.pLayer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT const & micromapBuildSizesInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.micromapSize ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.buildScratchSize ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.discardable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & micromapCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.createFlags ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.offset ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.deviceAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT const & micromapTriangleEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.dataOffset ); + VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.subdivisionLevel ); + VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.format ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT const & micromapVersionInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pVersionData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT const & multiDrawIndexedInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.firstIndex ); + VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.indexCount ); + VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.vertexOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT const & multiDrawInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiDrawInfoEXT.firstVertex ); + VULKAN_HPP_HASH_COMBINE( seed, multiDrawInfoEXT.vertexCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const & multisamplePropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.maxSampleLocationGridSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT const & multisampledRenderToSingleSampledInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.multisampledRenderToSingleSampledEnable ); + VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.rasterizationSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX const & multiviewPerViewAttributesInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.perViewAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.perViewAttributesPositionXOnly ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & multiviewPerViewRenderAreasRenderPassBeginInfoQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.perViewRenderAreaCount ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.pPerViewRenderAreas ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT const & mutableDescriptorTypeListEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListEXT.descriptorTypeCount ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListEXT.pDescriptorTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT const & mutableDescriptorTypeCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.mutableDescriptorTypeListCount ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.pMutableDescriptorTypeLists ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT const & opaqueCaptureDescriptorDataCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.opaqueCaptureDescriptorData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV const & opticalFlowExecuteInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV const & opticalFlowImageFormatInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.usage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV const & opticalFlowImageFormatPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.format ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & opticalFlowSessionCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.width ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.height ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.imageFormat ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flowVectorFormat ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.costFormat ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.outputGridSize ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.hintGridSize ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.performanceLevel ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV const & opticalFlowSessionCreatePrivateDataInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.id ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pPrivateData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV const & outOfBandQueueTypeInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, outOfBandQueueTypeInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, outOfBandQueueTypeInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, outOfBandQueueTypeInfoNV.queueType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureFlagsNV const & partitionedAccelerationStructureFlagsNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureFlagsNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureFlagsNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureFlagsNV.enablePartitionTranslation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureUpdateInstanceDataNV const & + partitionedAccelerationStructureUpdateInstanceDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureUpdateInstanceDataNV.instanceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureUpdateInstanceDataNV.instanceContributionToHitGroupIndex ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureUpdateInstanceDataNV.accelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWriteInstanceDataNV const & partitionedAccelerationStructureWriteInstanceDataNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.transform ); + for ( size_t i = 0; i < 6; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.explicitAABB[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.instanceID ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.instanceMask ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.instanceContributionToHitGroupIndex ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.instanceFlags ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.instanceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.partitionIndex ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.accelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWritePartitionTranslationDataNV const & + partitionedAccelerationStructureWritePartitionTranslationDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWritePartitionTranslationDataNV.partitionIndex ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWritePartitionTranslationDataNV.partitionTranslation[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const & pastPresentationTimingGOOGLE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.desiredPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.actualPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.earliestPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentMargin ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerTileBeginInfoQCOM const & perTileBeginInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, perTileBeginInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, perTileBeginInfoQCOM.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerTileEndInfoQCOM const & perTileEndInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, perTileEndInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, perTileEndInfoQCOM.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & performanceConfigurationAcquireInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.type ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & performanceCounterDescriptionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.flags ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.category[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.description[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & performanceCounterKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.unit ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.scope ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.storage ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.uuid[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & performanceMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.marker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & performanceOverrideInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.type ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.enable ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.parameter ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & performanceQuerySubmitInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.counterPassIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & performanceStreamMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.marker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & physicalDevice16BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.uniformAndStorageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storagePushConstant16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageInputOutput16 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const & physicalDevice4444FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.formatA4R4G4B4 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.formatA4B4G4R4 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const & physicalDevice8BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.storageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.uniformAndStorageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.storagePushConstant8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const & physicalDeviceASTCDecodeFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.decodeModeSharedExponent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const & physicalDeviceAccelerationStructureFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructure ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureIndirectBuild ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureHostCommands ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.descriptorBindingAccelerationStructureUpdateAfterBind ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const & physicalDeviceAccelerationStructurePropertiesKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxGeometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxInstanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPrimitiveCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetUpdateAfterBindAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.minAccelerationStructureScratchOffsetAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT const & physicalDeviceAddressBindingReportFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.reportAddressBinding ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC const & physicalDeviceAmigoProfilingFeaturesSEC ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.amigoProfiling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAntiLagFeaturesAMD const & physicalDeviceAntiLagFeaturesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAntiLagFeaturesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAntiLagFeaturesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAntiLagFeaturesAMD.antiLag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & + physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.attachmentFeedbackLoopDynamicState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & + physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.attachmentFeedbackLoopLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & physicalDeviceBlendOperationAdvancedFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.advancedBlendCoherentOperations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & physicalDeviceBlendOperationAdvancedPropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendMaxColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendIndependentBlend ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedSrcColor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedDstColor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendCorrelatedOverlap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendAllOperations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT const & physicalDeviceBorderColorSwizzleFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.borderColorSwizzle ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.borderColorSwizzleFromImage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const & physicalDeviceBufferDeviceAddressFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddressCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddressMultiDevice ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const & physicalDeviceBufferDeviceAddressFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddressCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddressMultiDevice ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructureFeaturesNV const & + physicalDeviceClusterAccelerationStructureFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructureFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructureFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructureFeaturesNV.clusterAccelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructurePropertiesNV const & + physicalDeviceClusterAccelerationStructurePropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.maxVerticesPerCluster ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.maxTrianglesPerCluster ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.clusterScratchByteAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.clusterByteAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.clusterTemplateByteAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.clusterBottomLevelByteAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.clusterTemplateBoundsByteAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.maxClusterGeometryIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & physicalDeviceClusterCullingShaderFeaturesHUAWEI ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.clustercullingShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.multiviewClusterCullingShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & physicalDeviceClusterCullingShaderPropertiesHUAWEI ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.pNext ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxWorkGroupCount[i] ); + } + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxOutputClusterCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.indirectBufferOffsetAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.clusterShadingRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & physicalDeviceCoherentMemoryFeaturesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.deviceCoherentMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const & physicalDeviceColorWriteEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.colorWriteEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCommandBufferInheritanceFeaturesNV const & physicalDeviceCommandBufferInheritanceFeaturesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCommandBufferInheritanceFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCommandBufferInheritanceFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCommandBufferInheritanceFeaturesNV.commandBufferInheritance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & physicalDeviceComputeShaderDerivativesFeaturesKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.computeDerivativeGroupQuads ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.computeDerivativeGroupLinear ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & physicalDeviceComputeShaderDerivativesPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesPropertiesKHR.meshAndTaskShaderDerivatives ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & physicalDeviceConditionalRenderingFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.conditionalRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.inheritedConditionalRendering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const & + physicalDeviceConservativeRasterizationPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.maxExtraPrimitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.extraPrimitiveOverestimationSizeGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveUnderestimation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativePointAndLineRasterization ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateTrianglesRasterized ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateLinesRasterized ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.fullyCoveredFragmentShaderInputVariable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativeRasterizationPostDepthCoverage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV const & physicalDeviceCooperativeMatrix2FeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixWorkgroupScope ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixFlexibleDimensions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixReductions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixConversions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixPerElementOperations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixTensorAddressing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixBlockLoads ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV const & physicalDeviceCooperativeMatrix2PropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixFlexibleDimensionsMaxDimension ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixWorkgroupScopeReservedSharedMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR const & physicalDeviceCooperativeMatrixFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.cooperativeMatrix ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.cooperativeMatrixRobustBufferAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & physicalDeviceCooperativeMatrixFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrix ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrixRobustBufferAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR const & physicalDeviceCooperativeMatrixPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesKHR.cooperativeMatrixSupportedStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & physicalDeviceCooperativeMatrixPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.cooperativeMatrixSupportedStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorFeaturesNV const & physicalDeviceCooperativeVectorFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorFeaturesNV.cooperativeVector ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorFeaturesNV.cooperativeVectorTraining ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorPropertiesNV const & physicalDeviceCooperativeVectorPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.cooperativeVectorSupportedStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.cooperativeVectorTrainingFloat16Accumulation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.cooperativeVectorTrainingFloat32Accumulation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.maxCooperativeVectorComponents ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV const & physicalDeviceCopyMemoryIndirectFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.indirectCopy ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV const & physicalDeviceCopyMemoryIndirectPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.supportedQueues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const & physicalDeviceCornerSampledImageFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.cornerSampledImage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const & physicalDeviceCoverageReductionModeFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.coverageReductionMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM const & physicalDeviceCubicClampFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicClampFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicClampFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicClampFeaturesQCOM.cubicRangeClamp ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM const & physicalDeviceCubicWeightsFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicWeightsFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicWeightsFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicWeightsFeaturesQCOM.selectableCubicWeights ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV const & physicalDeviceCudaKernelLaunchFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.cudaKernelLaunchFeatures ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV const & physicalDeviceCudaKernelLaunchPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.computeCapabilityMinor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.computeCapabilityMajor ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const & physicalDeviceCustomBorderColorFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.customBorderColors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.customBorderColorWithoutFormat ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const & physicalDeviceCustomBorderColorPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.maxCustomBorderColorSamplers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & + physicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.dedicatedAllocationImageAliasing ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT const & physicalDeviceDepthBiasControlFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.depthBiasControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.leastRepresentableValueForceUnormRepresentation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.floatRepresentation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.depthBiasExact ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT const & physicalDeviceDepthClampControlFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampControlFeaturesEXT.depthClampControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesKHR const & physicalDeviceDepthClampZeroOneFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesKHR.depthClampZeroOne ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT const & physicalDeviceDepthClipControlFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.depthClipControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const & physicalDeviceDepthClipEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.depthClipEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const & physicalDeviceDepthStencilResolveProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.supportedDepthResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.supportedStencilResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.independentResolveNone ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.independentResolve ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & + physicalDeviceDescriptorBufferDensityMapPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.combinedImageSamplerDensityMapDescriptorSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT const & physicalDeviceDescriptorBufferFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferImageLayoutIgnored ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferPushDescriptors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT const & physicalDeviceDescriptorBufferPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.combinedImageSamplerDescriptorSingleArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.bufferlessPushDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.allowSamplerImageViewPostSubmitCreation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.descriptorBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxDescriptorBufferBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxResourceDescriptorBufferBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxSamplerDescriptorBufferBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxEmbeddedImmutableSamplerBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxEmbeddedImmutableSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.bufferCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.imageCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.imageViewCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.accelerationStructureCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.combinedImageSamplerDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.sampledImageDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageImageDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.uniformTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustUniformTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustStorageTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.uniformBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustUniformBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustStorageBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.inputAttachmentDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.accelerationStructureDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxSamplerDescriptorBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxResourceDescriptorBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerDescriptorBufferAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.resourceDescriptorBufferAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.descriptorBufferAddressSpaceSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const & physicalDeviceDescriptorIndexingFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingPartiallyBound ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingVariableDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.runtimeDescriptorArray ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const & physicalDeviceDescriptorIndexingProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxUpdateAfterBindDescriptorsInAllPools ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderUniformBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderSampledImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderStorageBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderStorageImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderInputAttachmentArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.robustBufferAccessUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.quadDivergentImplicitLod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageUpdateAfterBindResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindInputAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & + physicalDeviceDescriptorPoolOverallocationFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorPoolOverallocationFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorPoolOverallocationFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorPoolOverallocationFeaturesNV.descriptorPoolOverallocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & physicalDeviceDescriptorSetHostMappingFeaturesVALVE ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.descriptorSetHostMapping ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & + physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedCompute ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedComputePipelines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedComputeCaptureReplay ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & physicalDeviceDeviceGeneratedCommandsFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.deviceGeneratedCommands ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.dynamicGeneratedPipelineLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & physicalDeviceDeviceGeneratedCommandsFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.deviceGeneratedCommands ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & physicalDeviceDeviceGeneratedCommandsPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectPipelineCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectShaderObjectCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectCommandsTokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectCommandsTokenOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectCommandsIndirectStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsInputModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsShaderStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsShaderStagesPipelineBinding ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsShaderStagesShaderBinding ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.deviceGeneratedCommandsTransformFeedback ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.deviceGeneratedCommandsMultiDrawIndirectCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & physicalDeviceDeviceGeneratedCommandsPropertiesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxGraphicsShaderGroupCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsTokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsTokenOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesCountBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesIndexBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minIndirectCommandsBufferOffsetAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const & physicalDeviceDeviceMemoryReportFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.deviceMemoryReport ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const & physicalDeviceDiagnosticsConfigFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.diagnosticsConfig ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const & physicalDeviceDiscardRectanglePropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.maxDiscardRectangles ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV const & physicalDeviceDisplacementMicromapFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapFeaturesNV.displacementMicromap ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV const & physicalDeviceDisplacementMicromapPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapPropertiesNV.maxDisplacementMicromapSubdivisionLevel ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const & physicalDeviceDriverProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverID ); + for ( size_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverName[i] ); + } + for ( size_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverInfo[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.conformanceVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT const & physicalDeviceDrmPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.hasPrimary ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.hasRender ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.primaryMajor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.primaryMinor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.renderMajor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.renderMinor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const & physicalDeviceDynamicRenderingFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.dynamicRendering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeatures const & physicalDeviceDynamicRenderingLocalReadFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeatures.dynamicRenderingLocalRead ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & + physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.dynamicRenderingUnusedAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const & physicalDeviceExclusiveScissorFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.exclusiveScissor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const & physicalDeviceExtendedDynamicState2FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2LogicOp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2PatchControlPoints ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT const & physicalDeviceExtendedDynamicState3FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3TessellationDomainOrigin ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClampEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3PolygonMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToCoverageEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToOneEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LogicOpEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEquation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorWriteMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationStream ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ConservativeRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ExtraPrimitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleLocationsEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendAdvanced ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ProvokingVertexMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineStippleEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipNegativeOneToOne ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportWScalingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportSwizzle ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorLocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTableEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageReductionMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RepresentativeFragmentTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ShadingRateImageEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT const & physicalDeviceExtendedDynamicState3PropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.dynamicPrimitiveTopologyUnrestricted ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const & physicalDeviceExtendedDynamicStateFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.extendedDynamicState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & physicalDeviceExtendedSparseAddressSpaceFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpaceFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpaceFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpaceFeaturesNV.extendedSparseAddressSpace ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & + physicalDeviceExtendedSparseAddressSpacePropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.extendedSparseAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.extendedSparseImageUsageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.extendedSparseBufferUsageFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const & physicalDeviceExternalBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalComputeQueuePropertiesNV const & physicalDeviceExternalComputeQueuePropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalComputeQueuePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalComputeQueuePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalComputeQueuePropertiesNV.externalDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalComputeQueuePropertiesNV.maxExternalQueues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const & physicalDeviceExternalFenceInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID const & physicalDeviceExternalFormatResolveFeaturesANDROID ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolveFeaturesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolveFeaturesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolveFeaturesANDROID.externalFormatResolve ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID const & + physicalDeviceExternalFormatResolvePropertiesANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.nullColorAttachmentWithExternalFormatResolve ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.externalFormatResolveChromaOffsetX ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.externalFormatResolveChromaOffsetY ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const & physicalDeviceExternalImageFormatInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const & physicalDeviceExternalMemoryHostPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.minImportedHostPointerAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV const & physicalDeviceExternalMemoryRDMAFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.externalMemoryRDMA ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & physicalDeviceExternalMemoryScreenBufferFeaturesQNX ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryScreenBufferFeaturesQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryScreenBufferFeaturesQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryScreenBufferFeaturesQNX.screenBufferImport ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const & physicalDeviceExternalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const & physicalDeviceFaultFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFault ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFaultVendorBinary ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & physicalDeviceFeatures2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.features ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const & physicalDeviceFloatControlsProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.denormBehaviorIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.roundingModeIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat64 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const & physicalDeviceFragmentDensityMap2FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.fragmentDensityMapDeferred ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const & physicalDeviceFragmentDensityMap2PropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledLoads ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledCoarseReconstructionEarlyAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.maxSubsampledArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.maxDescriptorSetSubsampledSamplers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const & physicalDeviceFragmentDensityMapFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapNonSubsampledImages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & physicalDeviceFragmentDensityMapOffsetFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesEXT.fragmentDensityMapOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & physicalDeviceFragmentDensityMapOffsetPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesEXT.fragmentDensityOffsetGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const & physicalDeviceFragmentDensityMapPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.minFragmentDensityTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.maxFragmentDensityTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.fragmentDensityInvocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & physicalDeviceFragmentShaderBarycentricFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.fragmentShaderBarycentric ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & + physicalDeviceFragmentShaderBarycentricPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.triStripVertexOrderIndependentOfProvokingVertex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & physicalDeviceFragmentShaderInterlockFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderSampleInterlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderPixelInterlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderShadingRateInterlock ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & physicalDeviceFragmentShadingRateEnumsFeaturesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.fragmentShadingRateEnums ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.supersampleFragmentShadingRates ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.noInvocationFragmentShadingRates ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & physicalDeviceFragmentShadingRateEnumsPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.maxFragmentShadingRateInvocationCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const & physicalDeviceFragmentShadingRateFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.pipelineFragmentShadingRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.primitiveFragmentShadingRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.attachmentFragmentShadingRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const & physicalDeviceFragmentShadingRateKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.sampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.fragmentSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const & physicalDeviceFragmentShadingRatePropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.minFragmentShadingRateAttachmentTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.primitiveFragmentShadingRateWithMultipleViewports ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.layeredShadingRateAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateNonTrivialCombinerOps ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentSizeAspectRatio ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateCoverageSamples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateRasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderDepthStencilWrites ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithSampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderSampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithConservativeRasterization ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithFragmentShaderInterlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithCustomSampleLocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateStrictMultiplyCombiner ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT const & physicalDeviceFrameBoundaryFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFrameBoundaryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFrameBoundaryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFrameBoundaryFeaturesEXT.frameBoundary ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeatures const & physicalDeviceGlobalPriorityQueryFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeatures.globalPriorityQuery ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & physicalDeviceGraphicsPipelineLibraryFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.graphicsPipelineLibrary ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & physicalDeviceGraphicsPipelineLibraryPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.graphicsPipelineLibraryFastLinking ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.graphicsPipelineLibraryIndependentInterpolationDecoration ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const & physicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.physicalDeviceCount ); + for ( size_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.physicalDevices[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.subsetAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI const & physicalDeviceHdrVividFeaturesHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.hdrVivid ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeatures const & physicalDeviceHostImageCopyFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeatures.hostImageCopy ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyProperties const & physicalDeviceHostImageCopyProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.copySrcLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.pCopySrcLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.copyDstLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.pCopyDstLayouts ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.optimalTilingLayoutUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.identicalMemoryTypeRequirements ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const & physicalDeviceHostQueryResetFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.hostQueryReset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const & physicalDeviceIDProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceUUID[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.driverUUID[i] ); + } + for ( size_t i = 0; i < VK_LUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceLUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceNodeMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceLUIDValid ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT const & physicalDeviceImage2DViewOf3DFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.image2DViewOf3D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.sampler2DViewOf3D ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA const & physicalDeviceImageAlignmentControlFeaturesMESA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlFeaturesMESA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlFeaturesMESA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlFeaturesMESA.imageAlignmentControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlPropertiesMESA const & physicalDeviceImageAlignmentControlPropertiesMESA ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlPropertiesMESA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlPropertiesMESA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlPropertiesMESA.supportedImageAlignmentMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT const & physicalDeviceImageCompressionControlFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.imageCompressionControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & + physicalDeviceImageCompressionControlSwapchainFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.imageCompressionControlSwapchain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const & physicalDeviceImageDrmFormatModifierInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.sharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.pQueueFamilyIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const & physicalDeviceImageFormatInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.format ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.type ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.tiling ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.usage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM const & physicalDeviceImageProcessing2FeaturesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2FeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2FeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2FeaturesQCOM.textureBlockMatch2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM const & physicalDeviceImageProcessing2PropertiesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2PropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2PropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2PropertiesQCOM.maxBlockMatchWindow ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM const & physicalDeviceImageProcessingFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureSampleWeighted ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureBoxFilter ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureBlockMatch ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM const & physicalDeviceImageProcessingPropertiesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxWeightFilterPhases ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxWeightFilterDimension ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxBlockMatchRegion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxBoxFilterBlockSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const & physicalDeviceImageRobustnessFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.robustImageAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & physicalDeviceImageSlicedViewOf3DFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageSlicedViewOf3DFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageSlicedViewOf3DFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageSlicedViewOf3DFeaturesEXT.imageSlicedViewOf3D ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const & physicalDeviceImageViewImageFormatInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.imageViewType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT const & physicalDeviceImageViewMinLodFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.minLod ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const & physicalDeviceImagelessFramebufferFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8Features const & physicalDeviceIndexTypeUint8Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8Features.indexTypeUint8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const & physicalDeviceInheritedViewportScissorFeaturesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.inheritedViewportScissor2D ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const & physicalDeviceInlineUniformBlockFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.inlineUniformBlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const & physicalDeviceInlineUniformBlockProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxInlineUniformBlockSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI const & physicalDeviceInvocationMaskFeaturesHUAWEI ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.invocationMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR const & physicalDeviceLayeredApiPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.layeredAPI ); + for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.deviceName[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesListKHR const & physicalDeviceLayeredApiPropertiesListKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.layeredApiCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.pLayeredApis ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const & physicalDeviceLimits ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension1D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension2D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension3D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimensionCube ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelBufferElements ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxUniformBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxStorageBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPushConstantsSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxMemoryAllocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerAllocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.bufferImageGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sparseAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxBoundDescriptorSets ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputAttributeOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputBindingStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationGenerationLevel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationPatchSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerVertexInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerVertexOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerPatchOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlTotalOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationEvaluationInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationEvaluationOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryShaderInvocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryOutputVertices ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryTotalOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentOutputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentDualSrcAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentCombinedOutputResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeSharedMemorySize ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subPixelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subTexelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.mipmapPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDrawIndexedIndexValue ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDrawIndirectCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerLodBias ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerAnisotropy ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxViewports ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxViewportDimensions[i] ); + } + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.viewportBoundsRange[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.viewportSubPixelBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minMemoryMapAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minUniformBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minStorageBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelGatherOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelGatherOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minInterpolationOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxInterpolationOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subPixelInterpolationOffsetBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferWidth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferHeight ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferColorSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferDepthSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferStencilSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferNoAttachmentsSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageColorSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageIntegerSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageDepthSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageStencilSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.storageImageSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSampleMaskWords ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.timestampComputeAndGraphics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.timestampPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxClipDistances ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxCullDistances ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxCombinedClipAndCullDistances ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.discreteQueuePriorities ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.pointSizeRange[i] ); + } + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.lineWidthRange[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.pointSizeGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.lineWidthGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.strictLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.standardSampleLocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.optimalBufferCopyOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.optimalBufferCopyRowPitchAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.nonCoherentAtomSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const & physicalDeviceSparseProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DMultisampleBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard3DBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyAlignedMipSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyNonResidentStrict ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const & physicalDeviceProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.apiVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.driverVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceType ); + for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceName[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.pipelineCacheUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.limits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.sparseProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & physicalDeviceProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.properties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiVulkanPropertiesKHR const & physicalDeviceLayeredApiVulkanPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiVulkanPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiVulkanPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiVulkanPropertiesKHR.properties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT const & physicalDeviceLayeredDriverPropertiesMSFT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.underlyingAPI ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const & physicalDeviceLegacyDitheringFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.legacyDithering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & physicalDeviceLegacyVertexAttributesFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesFeaturesEXT.legacyVertexAttributes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & physicalDeviceLegacyVertexAttributesPropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesPropertiesEXT.nativeUnalignedPerformance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeatures const & physicalDeviceLineRasterizationFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.rectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.bresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.smoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.stippledRectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.stippledBresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.stippledSmoothLines ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationProperties const & physicalDeviceLineRasterizationProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationProperties.lineSubPixelPrecisionBits ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV const & physicalDeviceLinearColorAttachmentFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.linearColorAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const & physicalDeviceMaintenance3Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.maxPerSetDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.maxMemoryAllocationSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const & physicalDeviceMaintenance4Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.maintenance4 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const & physicalDeviceMaintenance4Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.maxBufferSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Features const & physicalDeviceMaintenance5Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Features.maintenance5 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Properties const & physicalDeviceMaintenance5Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.earlyFragmentMultisampleCoverageAfterSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.earlyFragmentSampleMaskTestBeforeSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.depthStencilSwizzleOneSupport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.polygonModePointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.nonStrictSinglePixelWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.nonStrictWideLinesUseParallelogram ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Features const & physicalDeviceMaintenance6Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Features.maintenance6 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Properties const & physicalDeviceMaintenance6Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.blockTexelViewCompatibleMultipleLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.maxCombinedImageSamplerDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.fragmentShadingRateClampCombinerInputs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7FeaturesKHR const & physicalDeviceMaintenance7FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7FeaturesKHR.maintenance7 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7PropertiesKHR const & physicalDeviceMaintenance7PropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.robustFragmentShadingRateAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.separateDepthStencilAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetTotalUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetTotalStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetTotalBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance8FeaturesKHR const & physicalDeviceMaintenance8FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance8FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance8FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance8FeaturesKHR.maintenance8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT const & physicalDeviceMapMemoryPlacedFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryMapPlaced ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryMapRangePlaced ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryUnmapReserve ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT const & physicalDeviceMapMemoryPlacedPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.minPlacedMemoryMapAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const & physicalDeviceMemoryBudgetPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.pNext ); + for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.heapBudget[i] ); + } + for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.heapUsage[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV const & physicalDeviceMemoryDecompressionFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.memoryDecompression ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV const & physicalDeviceMemoryDecompressionPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.decompressionMethods ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.maxDecompressionIndirectCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const & physicalDeviceMemoryPriorityFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.memoryPriority ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const & physicalDeviceMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryTypeCount ); + for ( size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryTypes[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryHeapCount ); + for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryHeaps[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const & physicalDeviceMemoryProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.memoryProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT const & physicalDeviceMeshShaderFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.taskShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.meshShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.multiviewMeshShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.primitiveFragmentShadingRateMeshShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.meshShaderQueries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const & physicalDeviceMeshShaderFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.taskShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.meshShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT const & physicalDeviceMeshShaderPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupTotalCount ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskPayloadSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskSharedMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskPayloadAndSharedMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupTotalCount ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshSharedMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshPayloadAndSharedMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshPayloadAndOutputMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputVertices ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputPrimitives ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.meshOutputPerVertexGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.meshOutputPerPrimitiveGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxPreferredTaskWorkGroupInvocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxPreferredMeshWorkGroupInvocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersLocalInvocationVertexOutput ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersLocalInvocationPrimitiveOutput ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersCompactVertexOutput ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersCompactPrimitiveOutput ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const & physicalDeviceMeshShaderPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxDrawMeshTasksCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskTotalMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskOutputCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshTotalMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshOutputVertices ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshOutputPrimitives ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.meshOutputPerVertexGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.meshOutputPerPrimitiveGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT const & physicalDeviceMultiDrawFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.multiDraw ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT const & physicalDeviceMultiDrawPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.maxMultiDrawCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & + physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.multisampledRenderToSingleSampled ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const & physicalDeviceMultiviewFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiview ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiviewGeometryShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiviewTessellationShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & + physicalDeviceMultiviewPerViewAttributesPropertiesNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.perViewPositionAllComponents ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & + physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.multiviewPerViewRenderAreas ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & physicalDeviceMultiviewPerViewViewportsFeaturesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.multiviewPerViewViewports ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const & physicalDeviceMultiviewProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.maxMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.maxMultiviewInstanceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & physicalDeviceMutableDescriptorTypeFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.mutableDescriptorType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT const & physicalDeviceNestedCommandBufferFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.nestedCommandBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.nestedCommandBufferRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.nestedCommandBufferSimultaneousUse ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT const & physicalDeviceNestedCommandBufferPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferPropertiesEXT.maxCommandBufferNestingLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & physicalDeviceNonSeamlessCubeMapFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.nonSeamlessCubeMap ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT const & physicalDeviceOpacityMicromapFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapHostCommands ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT const & physicalDeviceOpacityMicromapPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity2StateSubdivisionLevel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity4StateSubdivisionLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV const & physicalDeviceOpticalFlowFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.opticalFlow ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV const & physicalDeviceOpticalFlowPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedOutputGridSizes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedHintGridSizes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.hintSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.costSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.bidirectionalFlowSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.globalFlowSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minWidth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minHeight ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxWidth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxHeight ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxNumRegionsOfInterest ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & physicalDevicePCIBusInfoPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDomain ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciBus ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDevice ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciFunction ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & physicalDevicePageableDeviceLocalMemoryFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pageableDeviceLocalMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & + physicalDevicePartitionedAccelerationStructureFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructureFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructureFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructureFeaturesNV.partitionedAccelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & + physicalDevicePartitionedAccelerationStructurePropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructurePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructurePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructurePropertiesNV.maxPartitionCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerStageDescriptorSetFeaturesNV const & physicalDevicePerStageDescriptorSetFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.perStageDescriptorSet ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.dynamicPipelineLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & physicalDevicePerformanceQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterQueryPools ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterMultipleQueryPools ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & physicalDevicePerformanceQueryPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.allowCommandBufferQueryCopies ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryFeaturesKHR const & physicalDevicePipelineBinaryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryFeaturesKHR.pipelineBinaries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryPropertiesKHR const & physicalDevicePipelineBinaryPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryInternalCache ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryInternalCacheControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryPrefersInternalCache ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryPrecompiledInternalCache ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryCompressedData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const & physicalDevicePipelineCreationCacheControlFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.pipelineCreationCacheControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & + physicalDevicePipelineExecutablePropertiesFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.pipelineExecutableInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & + physicalDevicePipelineLibraryGroupHandlesFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineLibraryGroupHandlesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineLibraryGroupHandlesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineLibraryGroupHandlesFeaturesEXT.pipelineLibraryGroupHandles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineOpacityMicromapFeaturesARM const & physicalDevicePipelineOpacityMicromapFeaturesARM ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineOpacityMicromapFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineOpacityMicromapFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineOpacityMicromapFeaturesARM.pipelineOpacityMicromap ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT const & physicalDevicePipelinePropertiesFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.pipelinePropertiesIdentifier ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeatures const & physicalDevicePipelineProtectedAccessFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeatures.pipelineProtectedAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeatures const & physicalDevicePipelineRobustnessFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeatures.pipelineRobustness ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessProperties const & physicalDevicePipelineRobustnessProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessVertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessImages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const & physicalDevicePointClippingProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.pointClippingBehavior ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const & physicalDevicePortabilitySubsetFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.constantAlphaColorBlendFactors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.events ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageViewFormatReinterpretation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageViewFormatSwizzle ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageView2DOn3DImage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.multisampleArrayImage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.mutableComparisonSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.pointPolygons ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.samplerMipLodBias ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.separateStencilMaskRef ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.shaderSampleRateInterpolationFunctions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.tessellationIsolines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.tessellationPointMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.triangleFans ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.vertexAttributeAccessBeyondStride ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const & physicalDevicePortabilitySubsetPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.minVertexInputBindingStrideAlignment ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV const & physicalDevicePresentBarrierFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.presentBarrier ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const & physicalDevicePresentIdFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.presentId ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentMeteringFeaturesNV const & physicalDevicePresentMeteringFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentMeteringFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentMeteringFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentMeteringFeaturesNV.presentMetering ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & physicalDevicePresentModeFifoLatestReadyFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentModeFifoLatestReadyFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentModeFifoLatestReadyFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentModeFifoLatestReadyFeaturesEXT.presentModeFifoLatestReady ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const & physicalDevicePresentWaitFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.presentWait ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & + physicalDevicePrimitiveTopologyListRestartFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyListRestart ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyPatchListRestart ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & physicalDevicePrimitivesGeneratedQueryFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQuery ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQueryWithRasterizerDiscard ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQueryWithNonZeroStreams ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures const & physicalDevicePrivateDataFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.privateData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const & physicalDeviceProtectedMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.protectedMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const & physicalDeviceProtectedMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.protectedNoFault ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const & physicalDeviceProvokingVertexFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.provokingVertexLast ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.transformFeedbackPreservesProvokingVertex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const & physicalDeviceProvokingVertexPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.provokingVertexModePerPipeline ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.transformFeedbackPreservesTriangleFanProvokingVertex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorProperties const & physicalDevicePushDescriptorProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorProperties.maxPushDescriptors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & physicalDeviceRGBA10X6FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.formatRgba10x6WithoutYCbCrSampler ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & + physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderColorAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderDepthAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderStencilAttachmentAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRawAccessChainsFeaturesNV const & physicalDeviceRawAccessChainsFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRawAccessChainsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRawAccessChainsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRawAccessChainsFeaturesNV.shaderRawAccessChains ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const & physicalDeviceRayQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.rayQuery ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & physicalDeviceRayTracingInvocationReorderFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.rayTracingInvocationReorder ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & + physicalDeviceRayTracingInvocationReorderPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.rayTracingInvocationReorderReorderingHint ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & + physicalDeviceRayTracingLinearSweptSpheresFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingLinearSweptSpheresFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingLinearSweptSpheresFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingLinearSweptSpheresFeaturesNV.spheres ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingLinearSweptSpheresFeaturesNV.linearSweptSpheres ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & physicalDeviceRayTracingMaintenance1FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.rayTracingMaintenance1 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.rayTracingPipelineTraceRaysIndirect2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV const & physicalDeviceRayTracingMotionBlurFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlur ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlurPipelineTraceRaysIndirect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const & physicalDeviceRayTracingPipelineFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipeline ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineTraceRaysIndirect ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTraversalPrimitiveCulling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const & physicalDeviceRayTracingPipelinePropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxShaderGroupStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupBaseAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleCaptureReplaySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayDispatchInvocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayHitAttributeSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & physicalDeviceRayTracingPositionFetchFeaturesKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPositionFetchFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPositionFetchFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPositionFetchFeaturesKHR.rayTracingPositionFetch ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const & physicalDeviceRayTracingPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.shaderGroupHandleSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxShaderGroupStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.shaderGroupBaseAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxGeometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxInstanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxTriangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxDescriptorSetAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV const & physicalDeviceRayTracingValidationFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingValidationFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingValidationFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingValidationFeaturesNV.rayTracingValidation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & physicalDeviceRelaxedLineRasterizationFeaturesIMG ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRelaxedLineRasterizationFeaturesIMG.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRelaxedLineRasterizationFeaturesIMG.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRelaxedLineRasterizationFeaturesIMG.relaxedLineRasterization ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedFeaturesARM const & physicalDeviceRenderPassStripedFeaturesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedFeaturesARM.renderPassStriped ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedPropertiesARM const & physicalDeviceRenderPassStripedPropertiesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.renderPassStripeGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.maxRenderPassStripes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & physicalDeviceRepresentativeFragmentTestFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.representativeFragmentTest ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const & physicalDeviceRobustness2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.robustBufferAccess2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.robustImageAccess2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.nullDescriptor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const & physicalDeviceRobustness2PropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.robustStorageBufferAccessSizeAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.robustUniformBufferAccessSizeAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const & physicalDeviceSampleLocationsPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.maxSampleLocationGridSize ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationCoordinateRange[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationSubPixelBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.variableSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const & physicalDeviceSamplerFilterMinmaxProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.filterMinmaxSingleComponentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.filterMinmaxImageComponentMapping ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const & physicalDeviceSamplerYcbcrConversionFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.samplerYcbcrConversion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const & physicalDeviceScalarBlockLayoutFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.scalarBlockLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM const & physicalDeviceSchedulingControlsFeaturesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.schedulingControls ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM const & physicalDeviceSchedulingControlsPropertiesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.schedulingControlsFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & physicalDeviceSeparateDepthStencilLayoutsFeatures ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.separateDepthStencilLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & physicalDeviceShaderAtomicFloat16VectorFeaturesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.shaderFloat16VectorAtomics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & physicalDeviceShaderAtomicFloat2FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat32AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat64AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat32AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat64AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderImageFloat32AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.sparseImageFloat32AtomicMinMax ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const & physicalDeviceShaderAtomicFloatFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat32AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat64AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat32AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat64AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderImageFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderImageFloat32AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sparseImageFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sparseImageFloat32AtomicAdd ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const & physicalDeviceShaderAtomicInt64Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.shaderBufferInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.shaderSharedInt64Atomics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderBfloat16FeaturesKHR const & physicalDeviceShaderBfloat16FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderBfloat16FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderBfloat16FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderBfloat16FeaturesKHR.shaderBFloat16Type ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderBfloat16FeaturesKHR.shaderBFloat16DotProduct ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderBfloat16FeaturesKHR.shaderBFloat16CooperativeMatrix ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const & physicalDeviceShaderClockFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.shaderSubgroupClock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.shaderDeviceClock ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & physicalDeviceShaderCoreBuiltinsFeaturesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.shaderCoreBuiltins ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & physicalDeviceShaderCoreBuiltinsPropertiesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderCoreMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderCoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderWarpsPerCore ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const & physicalDeviceShaderCoreProperties2AMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.shaderCoreFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.activeComputeUnitCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const & physicalDeviceShaderCorePropertiesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.shaderEngineCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.shaderArraysPerEngineCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.computeUnitsPerShaderArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.simdPerComputeUnit ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.wavefrontsPerSimd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.wavefrontSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sgprsPerSimd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.minSgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.maxSgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sgprAllocationGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.vgprsPerSimd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.minVgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.maxVgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.vgprAllocationGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM const & physicalDeviceShaderCorePropertiesARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.pixelRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.texelRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.fmaRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & + physicalDeviceShaderDemoteToHelperInvocationFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.shaderDemoteToHelperInvocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const & physicalDeviceShaderDrawParametersFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.shaderDrawParameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & + physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.shaderEarlyAndLateFragmentTests ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX const & physicalDeviceShaderEnqueueFeaturesAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderEnqueue ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderMeshEnqueue ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX const & physicalDeviceShaderEnqueuePropertiesAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphDepth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderOutputNodes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.executionGraphDispatchAddressAlignment ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphWorkgroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphWorkgroups ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeatures const & physicalDeviceShaderExpectAssumeFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeatures.shaderExpectAssume ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const & physicalDeviceShaderFloat16Int8Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.shaderFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.shaderInt8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features const & physicalDeviceShaderFloatControls2Features ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2Features.shaderFloatControls2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & physicalDeviceShaderImageAtomicInt64FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.shaderImageInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.sparseImageInt64Atomics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const & physicalDeviceShaderImageFootprintFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.imageFootprint ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const & physicalDeviceShaderIntegerDotProductFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.shaderIntegerDotProduct ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const & physicalDeviceShaderIntegerDotProductProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & physicalDeviceShaderIntegerFunctions2FeaturesINTEL ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.shaderIntegerFunctions2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & physicalDeviceShaderMaximalReconvergenceFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderMaximalReconvergenceFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderMaximalReconvergenceFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderMaximalReconvergenceFeaturesKHR.shaderMaximalReconvergence ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & physicalDeviceShaderModuleIdentifierFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.shaderModuleIdentifier ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & physicalDeviceShaderModuleIdentifierPropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.shaderModuleIdentifierAlgorithmUUID[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT const & physicalDeviceShaderObjectFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.shaderObject ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT const & physicalDeviceShaderObjectPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderQuadControlFeaturesKHR const & physicalDeviceShaderQuadControlFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderQuadControlFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderQuadControlFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderQuadControlFeaturesKHR.shaderQuadControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & + physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.shaderRelaxedExtendedInstruction ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & physicalDeviceShaderReplicatedCompositesFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderReplicatedCompositesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderReplicatedCompositesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderReplicatedCompositesFeaturesEXT.shaderReplicatedComposites ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const & physicalDeviceShaderSMBuiltinsFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.shaderSMBuiltins ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const & physicalDeviceShaderSMBuiltinsPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.shaderSMCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.shaderWarpsPerSM ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & physicalDeviceShaderSubgroupExtendedTypesFeatures ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.shaderSubgroupExtendedTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeatures const & physicalDeviceShaderSubgroupRotateFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.shaderSubgroupRotate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.shaderSubgroupRotateClustered ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & + physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.shaderSubgroupUniformControlFlow ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const & physicalDeviceShaderTerminateInvocationFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.shaderTerminateInvocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT const & physicalDeviceShaderTileImageFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageColorReadAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageDepthReadAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageStencilReadAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT const & physicalDeviceShaderTileImagePropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageCoherentReadAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadSampleFromPixelRateInvocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadFromHelperInvocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const & physicalDeviceShadingRateImageFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.shadingRateImage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.shadingRateCoarseSampleOrder ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const & physicalDeviceShadingRateImagePropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRateTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRatePaletteSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRateMaxCoarseSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const & physicalDeviceSparseImageFormatInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.format ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.type ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.usage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.tiling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const & physicalDeviceSubgroupProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.subgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.supportedStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.supportedOperations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.quadOperationsInAllStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const & physicalDeviceSubgroupSizeControlFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.subgroupSizeControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.computeFullSubgroups ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const & physicalDeviceSubgroupSizeControlProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.minSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.maxSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.maxComputeWorkgroupSubgroups ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.requiredSubgroupSizeStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & physicalDeviceSubpassMergeFeedbackFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.subpassMergeFeedback ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI const & physicalDeviceSubpassShadingFeaturesHUAWEI ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.subpassShading ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI const & physicalDeviceSubpassShadingPropertiesHUAWEI ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.maxSubpassShadingWorkgroupSizeAspectRatio ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const & physicalDeviceSurfaceInfo2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.surface ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & physicalDeviceSwapchainMaintenance1FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.swapchainMaintenance1 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const & physicalDeviceSynchronization2Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.synchronization2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & physicalDeviceTexelBufferAlignmentFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.texelBufferAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const & physicalDeviceTexelBufferAlignmentProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetSingleTexelAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetSingleTexelAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const & physicalDeviceTextureCompressionASTCHDRFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.textureCompressionASTC_HDR ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTileMemoryHeapFeaturesQCOM const & physicalDeviceTileMemoryHeapFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileMemoryHeapFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileMemoryHeapFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileMemoryHeapFeaturesQCOM.tileMemoryHeap ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTileMemoryHeapPropertiesQCOM const & physicalDeviceTileMemoryHeapPropertiesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileMemoryHeapPropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileMemoryHeapPropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileMemoryHeapPropertiesQCOM.queueSubmitBoundary ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileMemoryHeapPropertiesQCOM.tileBufferTransfers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM const & physicalDeviceTilePropertiesFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.tileProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTileShadingFeaturesQCOM const & physicalDeviceTileShadingFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShading ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingFragmentStage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingDepthAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingStencilAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingSampledAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingPerTileDraw ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingPerTileDispatch ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingDispatchTile ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingApron ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingAnisotropicApron ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingAtomicOps ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingFeaturesQCOM.tileShadingImageProcessing ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTileShadingPropertiesQCOM const & physicalDeviceTileShadingPropertiesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingPropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingPropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingPropertiesQCOM.maxApronSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingPropertiesQCOM.preferNonCoherent ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingPropertiesQCOM.tileGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTileShadingPropertiesQCOM.maxTileShadingRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const & physicalDeviceTimelineSemaphoreFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.timelineSemaphore ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const & physicalDeviceTimelineSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.maxTimelineSemaphoreValueDifference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties const & physicalDeviceToolProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.pNext ); + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.version[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.purposes ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.description[i] ); + } + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.layer[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const & physicalDeviceTransformFeedbackFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.transformFeedback ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.geometryStreams ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const & physicalDeviceTransformFeedbackPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackStreams ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackStreamDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackQueries ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackStreamsLinesTriangles ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackRasterizationStreamSelect ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackDraw ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const & physicalDeviceUniformBufferStandardLayoutFeatures ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.uniformBufferStandardLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const & physicalDeviceVariablePointersFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.variablePointersStorageBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.variablePointers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeatures const & physicalDeviceVertexAttributeDivisorFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.vertexAttributeInstanceRateDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.vertexAttributeInstanceRateZeroDivisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorProperties const & physicalDeviceVertexAttributeDivisorProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.maxVertexAttribDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.supportsNonZeroFirstInstance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & physicalDeviceVertexAttributeDivisorPropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.maxVertexAttribDivisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & physicalDeviceVertexAttributeRobustnessFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeRobustnessFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeRobustnessFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeRobustnessFeaturesEXT.vertexAttributeRobustness ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & physicalDeviceVertexInputDynamicStateFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.vertexInputDynamicState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR const & physicalDeviceVideoEncodeAV1FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeAV1FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeAV1FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeAV1FeaturesKHR.videoEncodeAV1 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR const & videoProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.videoCodecOperation ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaSubsampling ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.lumaBitDepth ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaBitDepth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & physicalDeviceVideoEncodeQualityLevelInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.pVideoProfile ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.qualityLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & physicalDeviceVideoEncodeQuantizationMapFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQuantizationMapFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQuantizationMapFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQuantizationMapFeaturesKHR.videoEncodeQuantizationMap ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const & physicalDeviceVideoFormatInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.imageUsage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR const & physicalDeviceVideoMaintenance1FeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance1FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance1FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance1FeaturesKHR.videoMaintenance1 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance2FeaturesKHR const & physicalDeviceVideoMaintenance2FeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance2FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance2FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance2FeaturesKHR.videoMaintenance2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const & physicalDeviceVulkan11Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.uniformAndStorageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storagePushConstant16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storageInputOutput16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiview ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiviewGeometryShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiviewTessellationShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.variablePointersStorageBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.variablePointers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.protectedMemory ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.samplerYcbcrConversion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.shaderDrawParameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const & physicalDeviceVulkan11Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceUUID[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.driverUUID[i] ); + } + for ( size_t i = 0; i < VK_LUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceLUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceNodeMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceLUIDValid ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSupportedStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSupportedOperations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupQuadOperationsInAllStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.pointClippingBehavior ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMultiviewInstanceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.protectedNoFault ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxPerSetDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMemoryAllocationSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const & physicalDeviceVulkan12Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.samplerMirrorClampToEdge ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.drawIndirectCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.storageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.uniformAndStorageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.storagePushConstant8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderBufferInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSharedInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInt8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInputAttachmentArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSampledImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInputAttachmentArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUniformBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingSampledImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUniformTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUpdateUnusedWhilePending ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingPartiallyBound ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingVariableDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.runtimeDescriptorArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.samplerFilterMinmax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.scalarBlockLayout ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.imagelessFramebuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.uniformBufferStandardLayout ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSubgroupExtendedTypes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.separateDepthStencilLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.hostQueryReset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.timelineSemaphore ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddressCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddressMultiDevice ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModelDeviceScope ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModelAvailabilityVisibilityChains ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderOutputViewportIndex ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderOutputLayer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.subgroupBroadcastDynamicId ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const & physicalDeviceVulkan12Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverID ); + for ( size_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverName[i] ); + } + for ( size_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverInfo[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.conformanceVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.denormBehaviorIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.roundingModeIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxUpdateAfterBindDescriptorsInAllPools ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderUniformBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSampledImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderStorageBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderStorageImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderInputAttachmentArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.robustBufferAccessUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.quadDivergentImplicitLod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageUpdateAfterBindResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.supportedDepthResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.supportedStencilResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.independentResolveNone ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.independentResolve ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.filterMinmaxSingleComponentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.filterMinmaxImageComponentMapping ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxTimelineSemaphoreValueDifference ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.framebufferIntegerColorSampleCounts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features const & physicalDeviceVulkan13Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.robustImageAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.inlineUniformBlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.descriptorBindingInlineUniformBlockUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.pipelineCreationCacheControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.privateData ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderDemoteToHelperInvocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderTerminateInvocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.subgroupSizeControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.computeFullSubgroups ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.synchronization2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.textureCompressionASTC_HDR ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderZeroInitializeWorkgroupMemory ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.dynamicRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderIntegerDotProduct ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.maintenance4 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties const & physicalDeviceVulkan13Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.minSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxComputeWorkgroupSubgroups ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.requiredSubgroupSizeStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxInlineUniformBlockSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxDescriptorSetInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxInlineUniformTotalSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.storageTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.storageTexelBufferOffsetSingleTexelAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.uniformTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.uniformTexelBufferOffsetSingleTexelAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxBufferSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Features const & physicalDeviceVulkan14Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.globalPriorityQuery ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderSubgroupRotate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderSubgroupRotateClustered ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderFloatControls2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderExpectAssume ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.rectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.bresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.smoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.stippledRectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.stippledBresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.stippledSmoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.vertexAttributeInstanceRateDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.vertexAttributeInstanceRateZeroDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.indexTypeUint8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.dynamicRenderingLocalRead ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.maintenance5 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.maintenance6 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pipelineProtectedAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pipelineRobustness ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.hostImageCopy ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pushDescriptor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Properties const & physicalDeviceVulkan14Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.lineSubPixelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.maxVertexAttribDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.supportsNonZeroFirstInstance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.maxPushDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.dynamicRenderingLocalReadDepthStencilAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.dynamicRenderingLocalReadMultisampledAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.earlyFragmentMultisampleCoverageAfterSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.earlyFragmentSampleMaskTestBeforeSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.depthStencilSwizzleOneSupport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.polygonModePointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.nonStrictSinglePixelWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.nonStrictWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.blockTexelViewCompatibleMultipleLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.maxCombinedImageSamplerDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.fragmentShadingRateClampCombinerInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessVertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.copySrcLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.pCopySrcLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.copyDstLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.pCopyDstLayouts ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.optimalTilingLayoutUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.identicalMemoryTypeRequirements ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const & physicalDeviceVulkanMemoryModelFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelDeviceScope ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelAvailabilityVisibilityChains ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & + physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayoutScalarBlockLayout ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout16BitAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & physicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.ycbcr2plane444Formats ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM const & physicalDeviceYcbcrDegammaFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrDegammaFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrDegammaFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrDegammaFeaturesQCOM.ycbcrDegamma ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const & physicalDeviceYcbcrImageArraysFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.ycbcrImageArrays ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & physicalDeviceZeroInitializeWorkgroupMemoryFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.shaderZeroInitializeWorkgroupMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR const & pipelineBinaryKeyKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.keySize ); + for ( size_t i = 0; i < VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.key[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR const & pipelineBinaryDataKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataKHR.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataKHR.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR const & pipelineBinaryKeysAndDataKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeysAndDataKHR.binaryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeysAndDataKHR.pPipelineBinaryKeys ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeysAndDataKHR.pPipelineBinaryData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR const & pipelineCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateInfoKHR.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & pipelineBinaryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pKeysAndDataInfo ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pPipelineCreateInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR const & pipelineBinaryDataInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataInfoKHR.pipelineBinary ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR const & pipelineBinaryHandlesInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.pipelineBinaryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.pPipelineBinaries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryInfoKHR const & pipelineBinaryInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.binaryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.pPipelineBinaries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & pipelineCacheCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.initialDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.pInitialData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const & pipelineCacheHeaderVersionOne ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.headerSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.headerVersion ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.deviceID ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.pipelineCacheUUID[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const & pipelineColorBlendAdvancedStateCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.srcPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.dstPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.blendOverlap ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const & pipelineColorWriteCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.pColorWriteEnables ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const & pipelineCompilerControlCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.compilerControlFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const & pipelineCoverageModulationStateCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationTableEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationTableCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.pCoverageModulationTable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const & pipelineCoverageReductionStateCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.coverageReductionMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const & pipelineCoverageToColorStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.coverageToColorEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.coverageToColorLocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfo const & pipelineCreateFlags2CreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const & pipelineCreationFeedback ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedback.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedback.duration ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const & pipelineCreationFeedbackCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pPipelineCreationFeedback ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pipelineStageCreationFeedbackCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pPipelineStageCreationFeedbacks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const & pipelineDiscardRectangleStateCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.discardRectangleMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.discardRectangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.pDiscardRectangles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const & pipelineExecutableInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.executableIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const & pipelineExecutableInternalRepresentationKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.pNext ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.isText ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const & pipelineExecutablePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.stages ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.subgroupSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const & pipelineFragmentShadingRateEnumStateCreateInfoNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.shadingRateType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.shadingRate ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.combinerOps[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const & pipelineFragmentShadingRateStateCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.fragmentSize ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.combinerOps[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV const & pipelineIndirectDeviceAddressInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & pipelineInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & pipelineLayoutCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.setLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pSetLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pushConstantRangeCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pPushConstantRanges ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT const & pipelinePropertiesIdentifierEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.pipelineIdentifier[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const & pipelineRasterizationConservativeStateCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.conservativeRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.extraPrimitiveOverestimationSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const & pipelineRasterizationDepthClipStateCreateInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.depthClipEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfo const & pipelineRasterizationLineStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.lineRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.stippledLineEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.lineStippleFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.lineStipplePattern ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT const & + pipelineRasterizationProvokingVertexStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.provokingVertexMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const & pipelineRasterizationStateRasterizationOrderAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.rasterizationOrder ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const & pipelineRasterizationStateStreamCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.rasterizationStream ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo const & pipelineRenderingCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.stencilAttachmentFormat ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const & pipelineRepresentativeFragmentTestStateCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.representativeFragmentTestEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfo const & pipelineRobustnessCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.storageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.uniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.vertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.images ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const & pipelineSampleLocationsStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sampleLocationsEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sampleLocationsInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT const & pipelineShaderStageModuleIdentifierCreateInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.identifierSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.pIdentifier ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX const & pipelineShaderStageNodeCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.pNext ); + for ( const char * p = pipelineShaderStageNodeCreateInfoAMDX.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.index ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const & pipelineShaderStageRequiredSubgroupSizeCreateInfo ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.requiredSubgroupSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const & pipelineTessellationDomainOriginStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.domainOrigin ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription const & vertexInputBindingDivisorDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescription.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescription.divisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfo const & pipelineVertexInputDivisorStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.vertexBindingDivisorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.pVertexBindingDivisors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const & pipelineViewportCoarseSampleOrderStateCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.sampleOrderType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.customSampleOrderCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.pCustomSampleOrders ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT const & pipelineViewportDepthClampControlCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.depthClampMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.pDepthClampRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT const & pipelineViewportDepthClipControlCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.negativeOneToOne ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const & pipelineViewportExclusiveScissorStateCreateInfoNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.exclusiveScissorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.pExclusiveScissors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const & shadingRatePaletteNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shadingRatePaletteNV.shadingRatePaletteEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, shadingRatePaletteNV.pShadingRatePaletteEntries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const & pipelineViewportShadingRateImageStateCreateInfoNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.shadingRateImageEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.pShadingRatePalettes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const & viewportSwizzleNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.x ); + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.y ); + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.z ); + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.w ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const & pipelineViewportSwizzleStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.pViewportSwizzles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ViewportWScalingNV const & viewportWScalingNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viewportWScalingNV.xcoeff ); + VULKAN_HPP_HASH_COMBINE( seed, viewportWScalingNV.ycoeff ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const & pipelineViewportWScalingStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.viewportWScalingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.pViewportWScalings ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_GGP ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const & presentFrameTokenGGP ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.frameToken ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_GGP*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentIdKHR const & presentIdKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.pPresentIds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentInfoKHR const & presentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pWaitSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pSwapchains ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pImageIndices ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pResults ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RectLayerKHR const & rectLayerKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.offset ); + VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.extent ); + VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.layer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentRegionKHR const & presentRegionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentRegionKHR.rectangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionKHR.pRectangles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentRegionsKHR const & presentRegionsKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const & presentTimeGOOGLE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentTimeGOOGLE.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimeGOOGLE.desiredPresentTime ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const & presentTimesInfoGOOGLE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.pTimes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & privateDataSlotCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const & protectedSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.protectedSubmit ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantsInfo const & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.size ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.pValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & writeDescriptorSet ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstSet ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstBinding ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pImageInfo ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pBufferInfo ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pTexelBufferView ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo const & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.set ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.descriptorWriteCount ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.pDescriptorWrites ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo const & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.descriptorUpdateTemplate ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.set ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV const & queryLowLatencySupportNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryLowLatencySupportNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryLowLatencySupportNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryLowLatencySupportNV.pQueriedLowLatencyData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & queryPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryCount ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pipelineStatistics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & queryPoolPerformanceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.counterIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pCounterIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const & queryPoolPerformanceQueryCreateInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.performanceCountersSampling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR const & queryPoolVideoEncodeFeedbackCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolVideoEncodeFeedbackCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolVideoEncodeFeedbackCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolVideoEncodeFeedbackCreateInfoKHR.encodeFeedbackFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const & queueFamilyCheckpointProperties2NV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.checkpointExecutionStageMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const & queueFamilyCheckpointPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.checkpointExecutionStageMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityProperties const & queueFamilyGlobalPriorityProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.priorityCount ); + for ( size_t i = 0; i < VK_MAX_GLOBAL_PRIORITY_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.priorities[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyProperties const & queueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.queueFlags ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.queueCount ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.timestampValidBits ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.minImageTransferGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const & queueFamilyProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.queueFamilyProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR const & queueFamilyQueryResultStatusPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.queryResultStatusSupport ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR const & queueFamilyVideoPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.videoCodecOperations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & + rayTracingPipelineClusterAccelerationStructureCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineClusterAccelerationStructureCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineClusterAccelerationStructureCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineClusterAccelerationStructureCreateInfoNV.allowClusterAccelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const & rayTracingShaderGroupCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.type ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.generalShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.closestHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.anyHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.intersectionShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.pShaderGroupCaptureReplayHandle ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const & rayTracingPipelineInterfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.maxPipelineRayPayloadSize ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.maxPipelineRayHitAttributeSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & rayTracingPipelineCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.groupCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pGroups ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.maxPipelineRayRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pLibraryInfo ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pLibraryInterface ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pDynamicState ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.layout ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const & rayTracingShaderGroupCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.generalShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.closestHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.anyHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.intersectionShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & rayTracingPipelineCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.groupCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pGroups ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.maxRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.layout ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const & refreshCycleDurationGOOGLE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, refreshCycleDurationGOOGLE.refreshDuration ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR const & releaseCapturedPipelineDataInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, releaseCapturedPipelineDataInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, releaseCapturedPipelineDataInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, releaseCapturedPipelineDataInfoKHR.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT const & releaseSwapchainImagesInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.swapchain ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.imageIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.pImageIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const & renderPassAttachmentBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.pAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const & renderPassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.framebuffer ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.renderArea ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.clearValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.pClearValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDescription const & subpassDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.flags ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.inputAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pResolveAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pDepthStencilAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.preserveAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pPreserveAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDependency const & subpassDependency ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dependencyFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & renderPassCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.subpassCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pSubpasses ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.dependencyCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pDependencies ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDescription2 const & subpassDescription2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.inputAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pResolveAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pDepthStencilAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.preserveAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pPreserveAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDependency2 const & subpassDependency2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dependencyFlags ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.viewOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & renderPassCreateInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.subpassCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pSubpasses ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.dependencyCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pDependencies ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.correlatedViewMaskCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pCorrelatedViewMasks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT const & renderPassCreationControlEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.disallowMerging ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT const & renderPassCreationFeedbackInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackInfoEXT.postMergeSubpassCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT const & renderPassCreationFeedbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.pRenderPassFeedback ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const & renderPassFragmentDensityMapCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.fragmentDensityMapAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapOffsetEndInfoEXT const & renderPassFragmentDensityMapOffsetEndInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapOffsetEndInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapOffsetEndInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapOffsetEndInfoEXT.fragmentDensityOffsetCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapOffsetEndInfoEXT.pFragmentDensityOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const & renderPassInputAttachmentAspectCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.aspectReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.pAspectReferences ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const & renderPassMultiviewCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.subpassCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pViewMasks ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.dependencyCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pViewOffsets ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.correlationMaskCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pCorrelationMasks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const & subpassSampleLocationsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassSampleLocationsEXT.subpassIndex ); + VULKAN_HPP_HASH_COMBINE( seed, subpassSampleLocationsEXT.sampleLocationsInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const & renderPassSampleLocationsBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.attachmentInitialSampleLocationsCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pAttachmentInitialSampleLocations ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.postSubpassSampleLocationsCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pPostSubpassSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM const & renderPassStripeInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeInfoARM.stripeArea ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassStripeBeginInfoARM const & renderPassStripeBeginInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.stripeInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.pStripeInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const & semaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.value ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.stageMask ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.deviceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassStripeSubmitInfoARM const & renderPassStripeSubmitInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.stripeSemaphoreInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.pStripeSemaphoreInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT const & renderPassSubpassFeedbackInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.subpassMergeStatus ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.postMergeIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT const & renderPassSubpassFeedbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.pSubpassFeedback ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassTileShadingCreateInfoQCOM const & renderPassTileShadingCreateInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassTileShadingCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassTileShadingCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassTileShadingCreateInfoQCOM.flags ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassTileShadingCreateInfoQCOM.tileApronSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const & renderPassTransformBeginInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.transform ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAreaInfo const & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.stencilAttachmentFormat ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo const & renderingAttachmentLocationInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.pColorAttachmentLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingEndInfoEXT const & renderingEndInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingEndInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingEndInfoEXT.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const & renderingFragmentDensityMapAttachmentInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.imageLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR const & renderingFragmentShadingRateAttachmentInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.imageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.shadingRateAttachmentTexelSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingInfo const & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.renderArea ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.layerCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pDepthAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pStencilAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo const & renderingInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pColorAttachmentInputIndices ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pDepthInputAttachmentIndex ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pStencilInputAttachmentIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 const & resolveImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM const & samplerBlockMatchWindowCreateInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.windowExtent ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.windowCompareMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT const & samplerBorderColorComponentMappingCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.components ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.srgb ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT const & samplerCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.sampler ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & samplerCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.magFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipmapMode ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeU ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeV ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeW ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipLodBias ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.anisotropyEnable ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxAnisotropy ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareEnable ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareOp ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minLod ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxLod ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.borderColor ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.unnormalizedCoordinates ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM const & samplerCubicWeightsCreateInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCubicWeightsCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCubicWeightsCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCubicWeightsCreateInfoQCOM.cubicWeights ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const & samplerReductionModeCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.reductionMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & samplerYcbcrConversionCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.ycbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.ycbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.components ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.xChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.yChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.chromaFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.forceExplicitReconstruction ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const & samplerYcbcrConversionImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.combinedImageSamplerDescriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const & samplerYcbcrConversionInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.conversion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.enableYDegamma ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.enableCbCrDegamma ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX const & screenBufferFormatPropertiesQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.format ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.externalFormat ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.screenUsage ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX const & screenBufferPropertiesQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.allocationSize ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & screenSurfaceCreateInfoQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.flags ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.context ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & semaphoreCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const & semaphoreGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const & semaphoreGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const & semaphoreGetZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const & semaphoreSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.value ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const & semaphoreTypeCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.semaphoreType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.initialValue ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const & semaphoreWaitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.semaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT const & setDescriptorBufferOffsetsInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.layout ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.firstSet ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.setCount ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.pBufferIndices ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.pOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV const & setLatencyMarkerInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.marker ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SetPresentConfigNV const & setPresentConfigNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, setPresentConfigNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, setPresentConfigNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, setPresentConfigNV.numFramesPerBatch ); + VULKAN_HPP_HASH_COMBINE( seed, setPresentConfigNV.presentConfigFeedback ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const & setStateFlagsIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, setStateFlagsIndirectCommandNV.data ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & shaderCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.stage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.nextStage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeSize ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pCode ); + for ( const char * p = shaderCreateInfoEXT.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.setLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSetLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pushConstantRangeCount ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pPushConstantRanges ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSpecializationInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & shaderModuleCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.codeSize ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pCode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT const & shaderModuleIdentifierEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifierSize ); + for ( size_t i = 0; i < VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifier[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & shaderModuleValidationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.validationCache ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const & shaderResourceUsageAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.numUsedVgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.numUsedSgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.ldsSizePerLocalWorkGroup ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.ldsUsageSizeInBytes ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.scratchMemUsageInBytes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const & shaderStatisticsInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.shaderStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.resourceUsage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numPhysicalVgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numPhysicalSgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numAvailableVgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numAvailableSgprs ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.computeWorkGroupSize[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const & sharedPresentSurfaceCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.sharedPresentSupportedUsageFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const & sparseImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.imageGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const & sparseImageFormatProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.properties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const & sparseImageMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.formatProperties ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailFirstLod ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailSize ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailStride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const & sparseImageMemoryRequirements2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.memoryRequirements ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_GGP ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & streamDescriptorSurfaceCreateInfoGGP ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.sType ); + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.flags ); + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.streamDescriptor ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_GGP*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubmitInfo const & submitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pWaitSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pWaitDstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.commandBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pCommandBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.signalSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pSignalSemaphores ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubmitInfo2 const & submitInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.waitSemaphoreInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pWaitSemaphoreInfos ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.commandBufferInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pCommandBufferInfos ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.signalSemaphoreInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pSignalSemaphoreInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassBeginInfo const & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.contents ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const & subpassDescriptionDepthStencilResolve ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.depthResolveMode ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.stencilResolveMode ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.pDepthStencilResolveAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassEndInfo const & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassEndInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassEndInfo.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT const & subpassResolvePerformanceQueryEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.optimal ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI const & subpassShadingPipelineCreateInfoHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.subpass ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySize const & subresourceHostMemcpySize ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySize.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySize.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySize.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2 const & subresourceLayout2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2.subresourceLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const & surfaceCapabilities2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.minImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.currentExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.minImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedTransforms ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.currentTransform ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedCompositeAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedUsageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedSurfaceCounters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const & surfaceCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.minImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.currentExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.minImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedTransforms ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.currentTransform ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedCompositeAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedUsageFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const & surfaceCapabilities2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.surfaceCapabilities ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const & surfaceCapabilitiesFullScreenExclusiveEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.fullScreenExclusiveSupported ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV const & surfaceCapabilitiesPresentBarrierNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.presentBarrierSupported ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const & surfaceFormatKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.format ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.colorSpace ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & surfaceFormat2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.surfaceFormat ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & surfaceFullScreenExclusiveInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.fullScreenExclusive ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & surfaceFullScreenExclusiveWin32InfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.hmonitor ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT const & surfacePresentModeCompatibilityEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.presentModeCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT const & surfacePresentModeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.presentMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT const & surfacePresentScalingCapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentScaling ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentGravityX ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentGravityY ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.minScaledImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.maxScaledImageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & surfaceProtectedCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.supportsProtected ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & swapchainCounterCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.surfaceCounters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & swapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.surface ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.minImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageFormat ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageColorSpace ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageUsage ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageSharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pQueueFamilyIndices ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.preTransform ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.compositeAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.presentMode ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.clipped ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.oldSwapchain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & swapchainDisplayNativeHdrCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.localDimmingEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV const & swapchainLatencyCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainLatencyCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainLatencyCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainLatencyCreateInfoNV.latencyModeEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV const & swapchainPresentBarrierCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.presentBarrierEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT const & swapchainPresentFenceInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.pFences ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT const & swapchainPresentModeInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT const & swapchainPresentModesCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.presentModeCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT const & swapchainPresentScalingCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.scalingBehavior ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.presentGravityX ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.presentGravityY ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & textureLODGatherFormatPropertiesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.supportsTextureGatherLODBiasAMD ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TileMemoryBindInfoQCOM const & tileMemoryBindInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, tileMemoryBindInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, tileMemoryBindInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, tileMemoryBindInfoQCOM.memory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TileMemoryRequirementsQCOM const & tileMemoryRequirementsQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, tileMemoryRequirementsQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, tileMemoryRequirementsQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, tileMemoryRequirementsQCOM.size ); + VULKAN_HPP_HASH_COMBINE( seed, tileMemoryRequirementsQCOM.alignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TileMemorySizeInfoQCOM const & tileMemorySizeInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, tileMemorySizeInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, tileMemorySizeInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, tileMemorySizeInfoQCOM.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TilePropertiesQCOM const & tilePropertiesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.tileSize ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.apronSize ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.origin ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & timelineSemaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.waitSemaphoreValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pWaitSemaphoreValues ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.signalSemaphoreValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pSignalSemaphoreValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR const & traceRaysIndirectCommand2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableStride ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableStride ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableStride ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.width ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.height ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.depth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const & traceRaysIndirectCommandKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.width ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.height ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.depth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & validationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.initialDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pInitialData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & validationFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.enabledValidationFeatureCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pEnabledValidationFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.disabledValidationFeatureCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pDisabledValidationFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & validationFlagsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.disabledValidationCheckCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pDisabledValidationChecks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const & vertexInputAttributeDescription2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.location ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.format ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const & vertexInputBindingDescription2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.stride ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.inputRate ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.divisor ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_VI_NN ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & viSurfaceCreateInfoNN ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.sType ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.flags ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_VI_NN*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & videoPictureResourceInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.imageViewBinding ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR const & videoReferenceSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.slotIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pPictureResource ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const & videoBeginCodingInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSession ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSessionParameters ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pReferenceSlots ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const & videoCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferSizeAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pictureAccessGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minCodedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxCodedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxDpbSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxActiveReferencePictures ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.stdHeaderVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const & videoCodingControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1CapabilitiesKHR const & videoDecodeAV1CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1CapabilitiesKHR.maxLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1DpbSlotInfoKHR const & videoDecodeAV1DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1InlineSessionParametersInfoKHR const & videoDecodeAV1InlineSessionParametersInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1InlineSessionParametersInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1InlineSessionParametersInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1InlineSessionParametersInfoKHR.pStdSequenceHeader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1PictureInfoKHR const & videoDecodeAV1PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.pStdPictureInfo ); + for ( size_t i = 0; i < VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.referenceNameSlotIndices[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.frameHeaderOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.tileCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.pTileOffsets ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.pTileSizes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1ProfileInfoKHR const & videoDecodeAV1ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1ProfileInfoKHR.stdProfile ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1ProfileInfoKHR.filmGrainSupport ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1SessionParametersCreateInfoKHR const & videoDecodeAV1SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1SessionParametersCreateInfoKHR.pStdSequenceHeader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR const & videoDecodeCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR const & videoDecodeH264CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.maxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.fieldOffsetGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR const & videoDecodeH264DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264InlineSessionParametersInfoKHR const & videoDecodeH264InlineSessionParametersInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264InlineSessionParametersInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264InlineSessionParametersInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264InlineSessionParametersInfoKHR.pStdSPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264InlineSessionParametersInfoKHR.pStdPPS ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR const & videoDecodeH264PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.sliceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pSliceOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR const & videoDecodeH264ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.stdProfileIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.pictureLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR const & videoDecodeH264SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pStdPPSs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR const & videoDecodeH264SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.pParametersAddInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR const & videoDecodeH265CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.maxLevelIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR const & videoDecodeH265DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265InlineSessionParametersInfoKHR const & videoDecodeH265InlineSessionParametersInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265InlineSessionParametersInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265InlineSessionParametersInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265InlineSessionParametersInfoKHR.pStdVPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265InlineSessionParametersInfoKHR.pStdSPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265InlineSessionParametersInfoKHR.pStdPPS ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR const & videoDecodeH265PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.sliceSegmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pSliceSegmentOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR const & videoDecodeH265ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.stdProfileIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR const & videoDecodeH265SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdVPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdPPSs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR const & videoDecodeH265SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.pParametersAddInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const & videoDecodeInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.dstPictureResource ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pSetupReferenceSlot ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pReferenceSlots ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR const & videoDecodeUsageInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.videoUsageHints ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR const & videoEncodeAV1CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxLevel ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.codedPictureAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxTiles ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.minTileSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxTileSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.superblockSizes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxSingleReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.singleReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxUnidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxUnidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.unidirectionalCompoundReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxBidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxBidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxBidirectionalCompoundGroup2ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.bidirectionalCompoundReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxSpatialLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxOperatingPoints ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.minQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.prefersGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.requiresGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.stdSyntaxFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1DpbSlotInfoKHR const & videoEncodeAV1DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR const & videoEncodeAV1FrameSizeKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1FrameSizeKHR.intraFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1FrameSizeKHR.predictiveFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1FrameSizeKHR.bipredictiveFrameSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1GopRemainingFrameInfoKHR const & videoEncodeAV1GopRemainingFrameInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.useGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.gopRemainingIntra ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.gopRemainingPredictive ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.gopRemainingBipredictive ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1PictureInfoKHR const & videoEncodeAV1PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.predictionMode ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.rateControlGroup ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.constantQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.pStdPictureInfo ); + for ( size_t i = 0; i < VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.referenceNameSlotIndices[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.primaryReferenceCdfOnly ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.generateObuExtensionHeader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1ProfileInfoKHR const & videoEncodeAV1ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1ProfileInfoKHR.stdProfile ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & videoEncodeAV1QIndexKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QIndexKHR.intraQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QIndexKHR.predictiveQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QIndexKHR.bipredictiveQIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QualityLevelPropertiesKHR const & videoEncodeAV1QualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredRateControlFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredGopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredKeyFramePeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredConsecutiveBipredictiveFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredConstantQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxSingleReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredSingleReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxUnidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxUnidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredUnidirectionalCompoundReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxBidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxBidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxBidirectionalCompoundGroup2ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredBidirectionalCompoundReferenceNameMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QuantizationMapCapabilitiesKHR const & videoEncodeAV1QuantizationMapCapabilitiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.minQIndexDelta ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.maxQIndexDelta ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR const & videoEncodeAV1RateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.keyFramePeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.consecutiveBipredictiveFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.temporalLayerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR const & videoEncodeAV1RateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.useMinQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.minQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.useMaxQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.maxQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.maxFrameSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionCreateInfoKHR const & videoEncodeAV1SessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.useMaxLevel ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.maxLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionParametersCreateInfoKHR const & videoEncodeAV1SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pStdSequenceHeader ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pStdDecoderModelInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.stdOperatingPointCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pStdOperatingPoints ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR const & videoEncodeCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.rateControlModes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.maxRateControlLayers ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.maxBitrate ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.maxQualityLevels ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.encodeInputPictureGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.supportedEncodeFeedbackFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesKHR const & videoEncodeH264CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxSliceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxPPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxBPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxL1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.expectDyadicTemporalLayerPattern ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.prefersGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.requiresGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.stdSyntaxFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoKHR const & videoEncodeH264DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR const & videoEncodeH264FrameSizeKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeKHR.frameISize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeKHR.framePSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeKHR.frameBSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoKHR const & videoEncodeH264GopRemainingFrameInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.useGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.gopRemainingI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.gopRemainingP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.gopRemainingB ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR const & videoEncodeH264NaluSliceInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.constantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.pStdSliceHeader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoKHR const & videoEncodeH264PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.naluSliceEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.pNaluSliceEntries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.generatePrefixNalu ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoKHR const & videoEncodeH264ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoKHR.stdProfileIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & videoEncodeH264QpKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpKHR.qpI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpKHR.qpP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpKHR.qpB ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesKHR const & videoEncodeH264QualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredRateControlFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredGopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredIdrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredConsecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredConstantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredMaxL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredMaxL1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredStdEntropyCodingModeFlag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QuantizationMapCapabilitiesKHR const & videoEncodeH264QuantizationMapCapabilitiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.minQpDelta ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.maxQpDelta ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoKHR const & videoEncodeH264RateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.idrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.consecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.temporalLayerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoKHR const & videoEncodeH264RateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.useMinQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.useMaxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.maxFrameSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoKHR const & videoEncodeH264SessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.useMaxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.maxLevelIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR const & videoEncodeH264SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.pStdPPSs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoKHR const & videoEncodeH264SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.pParametersAddInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoKHR const & videoEncodeH264SessionParametersFeedbackInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.hasStdSPSOverrides ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.hasStdPPSOverrides ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoKHR const & videoEncodeH264SessionParametersGetInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.writeStdSPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.writeStdPPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.stdSPSId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.stdPPSId ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesKHR const & videoEncodeH265CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxSliceSegmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxTiles ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.ctbSizes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.transformBlockSizes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxPPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxBPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxL1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxSubLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.expectDyadicTemporalSubLayerPattern ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.prefersGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.requiresGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.stdSyntaxFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoKHR const & videoEncodeH265DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR const & videoEncodeH265FrameSizeKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeKHR.frameISize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeKHR.framePSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeKHR.frameBSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoKHR const & videoEncodeH265GopRemainingFrameInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.useGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.gopRemainingI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.gopRemainingP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.gopRemainingB ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR const & videoEncodeH265NaluSliceSegmentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.constantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.pStdSliceSegmentHeader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoKHR const & videoEncodeH265PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.naluSliceSegmentEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.pNaluSliceSegmentEntries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.pStdPictureInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoKHR const & videoEncodeH265ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoKHR.stdProfileIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & videoEncodeH265QpKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpKHR.qpI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpKHR.qpP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpKHR.qpB ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesKHR const & videoEncodeH265QualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredRateControlFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredGopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredIdrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredConsecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredSubLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredConstantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredMaxL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredMaxL1ReferenceCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QuantizationMapCapabilitiesKHR const & videoEncodeH265QuantizationMapCapabilitiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.minQpDelta ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.maxQpDelta ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoKHR const & videoEncodeH265RateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.idrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.consecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.subLayerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoKHR const & videoEncodeH265RateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.useMinQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.useMaxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.maxFrameSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoKHR const & videoEncodeH265SessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.useMaxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.maxLevelIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR const & videoEncodeH265SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.stdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pStdVPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pStdPPSs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoKHR const & videoEncodeH265SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.maxStdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.pParametersAddInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoKHR const & videoEncodeH265SessionParametersFeedbackInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.hasStdVPSOverrides ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.hasStdSPSOverrides ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.hasStdPPSOverrides ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoKHR const & videoEncodeH265SessionParametersGetInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.writeStdVPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.writeStdSPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.writeStdPPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.stdVPSId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.stdSPSId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.stdPPSId ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const & videoEncodeInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.srcPictureResource ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pSetupReferenceSlot ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pReferenceSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.precedingExternallyEncodedBytes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR const & videoEncodeQualityLevelInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelInfoKHR.qualityLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR const & videoEncodeQualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.preferredRateControlMode ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.preferredRateControlLayerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapCapabilitiesKHR const & videoEncodeQuantizationMapCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapCapabilitiesKHR.maxQuantizationMapExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapInfoKHR const & videoEncodeQuantizationMapInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.quantizationMap ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.quantizationMapExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & + videoEncodeQuantizationMapSessionParametersCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapSessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapSessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapSessionParametersCreateInfoKHR.quantizationMapTexelSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR const & videoEncodeRateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.averageBitrate ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.maxBitrate ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.frameRateNumerator ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.frameRateDenominator ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR const & videoEncodeRateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.rateControlMode ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.layerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.pLayers ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.virtualBufferSizeInMs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.initialVirtualBufferSizeInMs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR const & videoEncodeSessionParametersFeedbackInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersFeedbackInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersFeedbackInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersFeedbackInfoKHR.hasOverrides ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR const & videoEncodeSessionParametersGetInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersGetInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersGetInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersGetInfoKHR.videoSessionParameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR const & videoEncodeUsageInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.videoUsageHints ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.videoContentHints ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.tuningMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR const & videoEndCodingInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatAV1QuantizationMapPropertiesKHR const & videoFormatAV1QuantizationMapPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatAV1QuantizationMapPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatAV1QuantizationMapPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatAV1QuantizationMapPropertiesKHR.compatibleSuperblockSizes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatH265QuantizationMapPropertiesKHR const & videoFormatH265QuantizationMapPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatH265QuantizationMapPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatH265QuantizationMapPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatH265QuantizationMapPropertiesKHR.compatibleCtbSizes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR const & videoFormatPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.format ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.componentMapping ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageCreateFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageTiling ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageUsageFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoFormatQuantizationMapPropertiesKHR const & videoFormatQuantizationMapPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatQuantizationMapPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatQuantizationMapPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatQuantizationMapPropertiesKHR.quantizationMapTexelSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR const & videoInlineQueryInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.queryPool ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.firstQuery ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.queryCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR const & videoProfileListInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.profileCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.pProfiles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & videoSessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pVideoProfile ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pictureFormat ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxCodedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.referencePictureFormat ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxDpbSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxActiveReferencePictures ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pStdHeaderVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR const & videoSessionMemoryRequirementsKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.memoryBindIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.memoryRequirements ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & videoSessionParametersCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.videoSessionParametersTemplate ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.videoSession ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const & videoSessionParametersUpdateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.updateSequenceCount ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & waylandSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.display ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.surface ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const & win32KeyedMutexAcquireReleaseInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.acquireCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireKeys ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireTimeouts ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.releaseCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pReleaseSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pReleaseKeys ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const & win32KeyedMutexAcquireReleaseInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.acquireCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireKeys ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireTimeoutMilliseconds ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.releaseCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pReleaseSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pReleaseKeys ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & win32SurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.hinstance ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.hwnd ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const & writeDescriptorSetAccelerationStructureKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.accelerationStructureCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.pAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const & writeDescriptorSetAccelerationStructureNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.accelerationStructureCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.pAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const & writeDescriptorSetInlineUniformBlock ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetPartitionedAccelerationStructureNV const & + writeDescriptorSetPartitionedAccelerationStructureNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetPartitionedAccelerationStructureNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetPartitionedAccelerationStructureNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetPartitionedAccelerationStructureNV.accelerationStructureCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetPartitionedAccelerationStructureNV.pAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT const & writeIndirectExecutionSetPipelineEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.index ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT const & writeIndirectExecutionSetShaderEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.index ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.shader ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & xcbSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.connection ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & xlibSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.dpy ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#endif // 14 <= VULKAN_HPP_CPP_VERSION + +} // namespace std +#endif diff --git a/include/vulkan/vulkan_hpp_macros.hpp b/include/vulkan/vulkan_hpp_macros.hpp new file mode 100644 index 00000000..eb7869b9 --- /dev/null +++ b/include/vulkan/vulkan_hpp_macros.hpp @@ -0,0 +1,326 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HPP_MACROS_HPP +#define VULKAN_HPP_MACROS_HPP + +#if defined( _MSVC_LANG ) +# define VULKAN_HPP_CPLUSPLUS _MSVC_LANG +#else +# define VULKAN_HPP_CPLUSPLUS __cplusplus +#endif + +#if 202002L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 23 +#elif 201703L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 20 +#elif 201402L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 17 +#elif 201103L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 14 +#elif 199711L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 11 +#else +# error "vulkan.hpp needs at least c++ standard version 11" +#endif + +// include headers holding feature-test macros +#if 20 <= VULKAN_HPP_CPP_VERSION +# include +#else +# include +#endif + +#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# if !defined( VULKAN_HPP_NO_SMART_HANDLE ) +# define VULKAN_HPP_NO_SMART_HANDLE +# endif +#endif + +#if defined( VULKAN_HPP_NO_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# define VULKAN_HPP_NO_STRUCT_CONSTRUCTORS +# endif +# if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) +# define VULKAN_HPP_NO_UNION_CONSTRUCTORS +# endif +#endif + +#if defined( VULKAN_HPP_NO_SETTERS ) +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# define VULKAN_HPP_NO_STRUCT_SETTERS +# endif +# if !defined( VULKAN_HPP_NO_UNION_SETTERS ) +# define VULKAN_HPP_NO_UNION_SETTERS +# endif +#endif + +#if !defined( VULKAN_HPP_ASSERT ) +# define VULKAN_HPP_ASSERT assert +#endif + +#if !defined( VULKAN_HPP_ASSERT_ON_RESULT ) +# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT +#endif + +#if !defined( VULKAN_HPP_STATIC_ASSERT ) +# define VULKAN_HPP_STATIC_ASSERT static_assert +#endif + +#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) +# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 +#endif + +#if !defined( __has_include ) +# define __has_include( x ) false +#endif + +#if defined( __cpp_lib_three_way_comparison ) && ( 201907 <= __cpp_lib_three_way_comparison ) && __has_include( ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR ) +# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR +#endif + +#if defined( __cpp_lib_span ) && ( 201803 <= __cpp_lib_span ) +# define VULKAN_HPP_SUPPORT_SPAN +#endif + +#if defined( __cpp_lib_modules ) && !defined( VULKAN_HPP_STD_MODULE ) && defined( VULKAN_HPP_ENABLE_STD_MODULE ) +# define VULKAN_HPP_STD_MODULE std.compat +#endif + +#ifndef VK_USE_64_BIT_PTR_DEFINES +# if defined( __LP64__ ) || defined( _WIN64 ) || ( defined( __x86_64__ ) && !defined( __ILP32__ ) ) || defined( _M_X64 ) || defined( __ia64 ) || \ + defined( _M_IA64 ) || defined( __aarch64__ ) || defined( __powerpc64__ ) || ( defined( __riscv ) && __riscv_xlen == 64 ) +# define VK_USE_64_BIT_PTR_DEFINES 1 +# else +# define VK_USE_64_BIT_PTR_DEFINES 0 +# endif +#endif + +// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. +// To enable this feature on 32-bit platforms please #define VULKAN_HPP_TYPESAFE_CONVERSION 1 +// To disable this feature on 64-bit platforms please #define VULKAN_HPP_TYPESAFE_CONVERSION 0 +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) +# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_CONVERSION 1 +# endif +#endif + +#if defined( __GNUC__ ) +# define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ ) +#endif + +#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS ) +# if defined( __clang__ ) +# if __has_feature( cxx_unrestricted_unions ) +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( __GNUC__ ) +# if 40600 <= GCC_VERSION +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( _MSC_VER ) +# if 1900 <= _MSC_VER +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# endif +#endif + +#if !defined( VULKAN_HPP_INLINE ) +# if defined( __clang__ ) +# if __has_attribute( always_inline ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# else +# define VULKAN_HPP_INLINE inline +# endif +# elif defined( __GNUC__ ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# elif defined( _MSC_VER ) +# define VULKAN_HPP_INLINE inline +# else +# define VULKAN_HPP_INLINE inline +# endif +#endif + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) +# define VULKAN_HPP_TYPESAFE_EXPLICIT +#else +# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +#endif + +#if defined( __cpp_constexpr ) +# define VULKAN_HPP_CONSTEXPR constexpr +# if 201304 <= __cpp_constexpr +# define VULKAN_HPP_CONSTEXPR_14 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_14 +# endif +# if ( 201907 <= __cpp_constexpr ) && ( !defined( __GNUC__ ) || ( 110400 < GCC_VERSION ) ) +# define VULKAN_HPP_CONSTEXPR_20 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_20 +# endif +# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr +#else +# define VULKAN_HPP_CONSTEXPR +# define VULKAN_HPP_CONSTEXPR_14 +# define VULKAN_HPP_CONST_OR_CONSTEXPR const +#endif + +#if !defined( VULKAN_HPP_CONSTEXPR_INLINE ) +# if 201606L <= __cpp_inline_variables +# define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR inline +# else +# define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR +# endif +#endif + +#if !defined( VULKAN_HPP_NOEXCEPT ) +# if defined( _MSC_VER ) && ( _MSC_VER <= 1800 ) +# define VULKAN_HPP_NOEXCEPT +# else +# define VULKAN_HPP_NOEXCEPT noexcept +# define VULKAN_HPP_HAS_NOEXCEPT 1 +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept +# else +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS +# endif +# endif +#endif + +#if 14 <= VULKAN_HPP_CPP_VERSION +# define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]] +#else +# define VULKAN_HPP_DEPRECATED( msg ) +#endif + +#if 17 <= VULKAN_HPP_CPP_VERSION +# define VULKAN_HPP_DEPRECATED_17( msg ) [[deprecated( msg )]] +#else +# define VULKAN_HPP_DEPRECATED_17( msg ) +#endif + +#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS ) +# define VULKAN_HPP_NODISCARD [[nodiscard]] +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]] +# else +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +# endif +#else +# define VULKAN_HPP_NODISCARD +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +#endif + +#if !defined( VULKAN_HPP_NAMESPACE ) +# define VULKAN_HPP_NAMESPACE vk +#endif + +#define VULKAN_HPP_STRINGIFY2( text ) #text +#define VULKAN_HPP_STRINGIFY( text ) VULKAN_HPP_STRINGIFY2( text ) +#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE ) + +#if !defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) +# if defined( VK_NO_PROTOTYPES ) +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 +# else +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 +# endif +#endif + +#if !defined( VULKAN_HPP_STORAGE_API ) +# if defined( VULKAN_HPP_STORAGE_SHARED ) +# if defined( _MSC_VER ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __declspec( dllexport ) +# else +# define VULKAN_HPP_STORAGE_API __declspec( dllimport ) +# endif +# elif defined( __clang__ ) || defined( __GNUC__ ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) ) +# else +# define VULKAN_HPP_STORAGE_API +# endif +# else +# define VULKAN_HPP_STORAGE_API +# pragma warning Unknown import / export semantics +# endif +# else +# define VULKAN_HPP_STORAGE_API +# endif +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + namespace detail + { + class DispatchLoaderDynamic; + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 + extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; +# endif +#endif + } // namespace detail +} // namespace VULKAN_HPP_NAMESPACE + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::detail::defaultDispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ + namespace VULKAN_HPP_NAMESPACE \ + { \ + namespace detail \ + { \ + VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ + } \ + } +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::detail::getDispatchLoaderStatic() +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE +# endif +#endif + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderDynamic +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderStatic +# endif +#endif + +#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER ) +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT +#else +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {} +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER +#endif + +#if !defined( VULKAN_HPP_EXPECTED ) && ( 23 <= VULKAN_HPP_CPP_VERSION ) && defined( __cpp_lib_expected ) +# if !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) +# include +# endif +# define VULKAN_HPP_EXPECTED std::expected +# define VULKAN_HPP_UNEXPECTED std::unexpected +#endif + +#if !defined( VULKAN_HPP_RAII_NAMESPACE ) +# define VULKAN_HPP_RAII_NAMESPACE raii +#endif + +#if defined( VULKAN_HPP_NO_EXCEPTIONS ) && defined( VULKAN_HPP_EXPECTED ) +# define VULKAN_HPP_RAII_NO_EXCEPTIONS +# define VULKAN_HPP_RAII_CREATE_NOEXCEPT noexcept +#else +# define VULKAN_HPP_RAII_CREATE_NOEXCEPT +#endif + +#endif \ No newline at end of file diff --git a/include/vulkan/vulkan_ios.h b/include/vulkan/vulkan_ios.h new file mode 100644 index 00000000..a705dc60 --- /dev/null +++ b/include/vulkan/vulkan_ios.h @@ -0,0 +1,48 @@ +#ifndef VULKAN_IOS_H_ +#define VULKAN_IOS_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_MVK_ios_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_MVK_ios_surface 1 +#define VK_MVK_IOS_SURFACE_SPEC_VERSION 3 +#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" +typedef VkFlags VkIOSSurfaceCreateFlagsMVK; +typedef struct VkIOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkIOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkIOSSurfaceCreateInfoMVK; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( + VkInstance instance, + const VkIOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_macos.h b/include/vulkan/vulkan_macos.h new file mode 100644 index 00000000..8d5dd05a --- /dev/null +++ b/include/vulkan/vulkan_macos.h @@ -0,0 +1,48 @@ +#ifndef VULKAN_MACOS_H_ +#define VULKAN_MACOS_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_MVK_macos_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_MVK_macos_surface 1 +#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3 +#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" +typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; +typedef struct VkMacOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkMacOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkMacOSSurfaceCreateInfoMVK; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( + VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_metal.h b/include/vulkan/vulkan_metal.h new file mode 100644 index 00000000..7e44f54c --- /dev/null +++ b/include/vulkan/vulkan_metal.h @@ -0,0 +1,236 @@ +#ifndef VULKAN_METAL_H_ +#define VULKAN_METAL_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_EXT_metal_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_metal_surface 1 +#ifdef __OBJC__ +@class CAMetalLayer; +#else +typedef void CAMetalLayer; +#endif + +#define VK_EXT_METAL_SURFACE_SPEC_VERSION 1 +#define VK_EXT_METAL_SURFACE_EXTENSION_NAME "VK_EXT_metal_surface" +typedef VkFlags VkMetalSurfaceCreateFlagsEXT; +typedef struct VkMetalSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkMetalSurfaceCreateFlagsEXT flags; + const CAMetalLayer* pLayer; +} VkMetalSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMetalSurfaceEXT)(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT( + VkInstance instance, + const VkMetalSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_EXT_metal_objects is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_metal_objects 1 +#ifdef __OBJC__ +@protocol MTLDevice; +typedef __unsafe_unretained id MTLDevice_id; +#else +typedef void* MTLDevice_id; +#endif + +#ifdef __OBJC__ +@protocol MTLCommandQueue; +typedef __unsafe_unretained id MTLCommandQueue_id; +#else +typedef void* MTLCommandQueue_id; +#endif + +#ifdef __OBJC__ +@protocol MTLBuffer; +typedef __unsafe_unretained id MTLBuffer_id; +#else +typedef void* MTLBuffer_id; +#endif + +#ifdef __OBJC__ +@protocol MTLTexture; +typedef __unsafe_unretained id MTLTexture_id; +#else +typedef void* MTLTexture_id; +#endif + +typedef struct __IOSurface* IOSurfaceRef; +#ifdef __OBJC__ +@protocol MTLSharedEvent; +typedef __unsafe_unretained id MTLSharedEvent_id; +#else +typedef void* MTLSharedEvent_id; +#endif + +#define VK_EXT_METAL_OBJECTS_SPEC_VERSION 2 +#define VK_EXT_METAL_OBJECTS_EXTENSION_NAME "VK_EXT_metal_objects" + +typedef enum VkExportMetalObjectTypeFlagBitsEXT { + VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT = 0x00000001, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT = 0x00000002, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT = 0x00000004, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT = 0x00000008, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT = 0x00000010, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT = 0x00000020, + VK_EXPORT_METAL_OBJECT_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkExportMetalObjectTypeFlagBitsEXT; +typedef VkFlags VkExportMetalObjectTypeFlagsEXT; +typedef struct VkExportMetalObjectCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkExportMetalObjectTypeFlagBitsEXT exportObjectType; +} VkExportMetalObjectCreateInfoEXT; + +typedef struct VkExportMetalObjectsInfoEXT { + VkStructureType sType; + const void* pNext; +} VkExportMetalObjectsInfoEXT; + +typedef struct VkExportMetalDeviceInfoEXT { + VkStructureType sType; + const void* pNext; + MTLDevice_id mtlDevice; +} VkExportMetalDeviceInfoEXT; + +typedef struct VkExportMetalCommandQueueInfoEXT { + VkStructureType sType; + const void* pNext; + VkQueue queue; + MTLCommandQueue_id mtlCommandQueue; +} VkExportMetalCommandQueueInfoEXT; + +typedef struct VkExportMetalBufferInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + MTLBuffer_id mtlBuffer; +} VkExportMetalBufferInfoEXT; + +typedef struct VkImportMetalBufferInfoEXT { + VkStructureType sType; + const void* pNext; + MTLBuffer_id mtlBuffer; +} VkImportMetalBufferInfoEXT; + +typedef struct VkExportMetalTextureInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; + VkImageView imageView; + VkBufferView bufferView; + VkImageAspectFlagBits plane; + MTLTexture_id mtlTexture; +} VkExportMetalTextureInfoEXT; + +typedef struct VkImportMetalTextureInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits plane; + MTLTexture_id mtlTexture; +} VkImportMetalTextureInfoEXT; + +typedef struct VkExportMetalIOSurfaceInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; + IOSurfaceRef ioSurface; +} VkExportMetalIOSurfaceInfoEXT; + +typedef struct VkImportMetalIOSurfaceInfoEXT { + VkStructureType sType; + const void* pNext; + IOSurfaceRef ioSurface; +} VkImportMetalIOSurfaceInfoEXT; + +typedef struct VkExportMetalSharedEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkEvent event; + MTLSharedEvent_id mtlSharedEvent; +} VkExportMetalSharedEventInfoEXT; + +typedef struct VkImportMetalSharedEventInfoEXT { + VkStructureType sType; + const void* pNext; + MTLSharedEvent_id mtlSharedEvent; +} VkImportMetalSharedEventInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkExportMetalObjectsEXT)(VkDevice device, VkExportMetalObjectsInfoEXT* pMetalObjectsInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkExportMetalObjectsEXT( + VkDevice device, + VkExportMetalObjectsInfoEXT* pMetalObjectsInfo); +#endif + + +// VK_EXT_external_memory_metal is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_metal 1 +#define VK_EXT_EXTERNAL_MEMORY_METAL_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_METAL_EXTENSION_NAME "VK_EXT_external_memory_metal" +typedef struct VkImportMemoryMetalHandleInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* handle; +} VkImportMemoryMetalHandleInfoEXT; + +typedef struct VkMemoryMetalHandlePropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryMetalHandlePropertiesEXT; + +typedef struct VkMemoryGetMetalHandleInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetMetalHandleInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryMetalHandleEXT)(VkDevice device, const VkMemoryGetMetalHandleInfoEXT* pGetMetalHandleInfo, void** pHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryMetalHandlePropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHandle, VkMemoryMetalHandlePropertiesEXT* pMemoryMetalHandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryMetalHandleEXT( + VkDevice device, + const VkMemoryGetMetalHandleInfoEXT* pGetMetalHandleInfo, + void** pHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryMetalHandlePropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHandle, + VkMemoryMetalHandlePropertiesEXT* pMemoryMetalHandleProperties); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_raii.hpp b/include/vulkan/vulkan_raii.hpp new file mode 100644 index 00000000..8fdaa853 --- /dev/null +++ b/include/vulkan/vulkan_raii.hpp @@ -0,0 +1,27567 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_RAII_HPP +#define VULKAN_RAII_HPP + +#include +#if !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) +# include // std::unique_ptr +# include // std::forward +#endif + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +namespace VULKAN_HPP_NAMESPACE +{ + namespace VULKAN_HPP_RAII_NAMESPACE + { + namespace detail + { + template + class CreateReturnType + { + public: +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + using Type = VULKAN_HPP_EXPECTED; +# else + using Type = T; +# endif + }; + + using PFN_dummy = void ( * )(); + + class ContextDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase + { + public: + ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr ) + : vkGetInstanceProcAddr( getProcAddr ) + //=== VK_VERSION_1_0 === + , vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) ) + , vkEnumerateInstanceExtensionProperties( + PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) ) + , vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) ) + //=== VK_VERSION_1_1 === + , vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) ) + { + } + + public: + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + + //=== VK_VERSION_1_0 === + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + }; + + class InstanceDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase + { + public: + InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = + PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = + PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = + PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = + PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkEnumerateDeviceExtensionProperties = + PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + + //=== VK_VERSION_1_1 === + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = + PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = + PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = + PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkGetPhysicalDeviceExternalBufferProperties = + PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = + PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = + PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + + //=== VK_KHR_swapchain === + vkGetPhysicalDevicePresentRectanglesKHR = + PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = + PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = + PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = + PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = + PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = + PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + + //=== VK_EXT_sample_locations === + vkGetPhysicalDeviceMultisamplePropertiesEXT = + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = + PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + + //=== VK_KHR_video_encode_queue === + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_NV_optical_flow === + vkGetPhysicalDeviceOpticalFlowImageFormatsNV = + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); + + //=== VK_NV_cooperative_vector === + vkGetPhysicalDeviceCooperativeVectorPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeVectorPropertiesNV" ) ); + + //=== VK_KHR_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + + //=== VK_KHR_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); + + //=== VK_NV_cooperative_matrix2 === + vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); + + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + } + + public: + //=== VK_VERSION_1_0 === + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + + //=== VK_VERSION_1_3 === + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + + //=== VK_KHR_surface === + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + + //=== VK_KHR_swapchain === + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + + //=== VK_KHR_display === + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +# else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + + //=== VK_KHR_video_queue === + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +# else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + + //=== VK_KHR_get_physical_device_properties2 === + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +# else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + + //=== VK_KHR_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + + //=== VK_KHR_external_semaphore_capabilities === + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + + //=== VK_EXT_direct_mode_display === + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +# else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + + //=== VK_KHR_external_fence_capabilities === + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + + //=== VK_KHR_performance_query === + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + + //=== VK_KHR_get_surface_capabilities2 === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + + //=== VK_KHR_get_display_properties2 === + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +# else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +# else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + + //=== VK_EXT_sample_locations === + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +# else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +# else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + + //=== VK_EXT_tooling_info === + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + + //=== VK_NV_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + + //=== VK_NV_coverage_reduction_mode === + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; + + //=== VK_EXT_acquire_drm_display === + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; + + //=== VK_KHR_video_encode_queue === + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +# else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +# else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +# else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_NV_optical_flow === + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; + + //=== VK_NV_cooperative_vector === + PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV vkGetPhysicalDeviceCooperativeVectorPropertiesNV = 0; + + //=== VK_KHR_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; + + //=== VK_KHR_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; + + //=== VK_NV_cooperative_matrix2 === + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; + + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + }; + + class DeviceDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase + { + public: + DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_VERSION_1_4 === + vkCmdSetLineStipple = PFN_vkCmdSetLineStipple( vkGetDeviceProcAddr( device, "vkCmdSetLineStipple" ) ); + vkMapMemory2 = PFN_vkMapMemory2( vkGetDeviceProcAddr( device, "vkMapMemory2" ) ); + vkUnmapMemory2 = PFN_vkUnmapMemory2( vkGetDeviceProcAddr( device, "vkUnmapMemory2" ) ); + vkCmdBindIndexBuffer2 = PFN_vkCmdBindIndexBuffer2( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2" ) ); + vkGetRenderingAreaGranularity = PFN_vkGetRenderingAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularity" ) ); + vkGetDeviceImageSubresourceLayout = PFN_vkGetDeviceImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayout" ) ); + vkGetImageSubresourceLayout2 = PFN_vkGetImageSubresourceLayout2( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2" ) ); + vkCmdPushDescriptorSet = PFN_vkCmdPushDescriptorSet( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet" ) ); + vkCmdPushDescriptorSetWithTemplate = PFN_vkCmdPushDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate" ) ); + vkCmdSetRenderingAttachmentLocations = + PFN_vkCmdSetRenderingAttachmentLocations( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocations" ) ); + vkCmdSetRenderingInputAttachmentIndices = + PFN_vkCmdSetRenderingInputAttachmentIndices( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndices" ) ); + vkCmdBindDescriptorSets2 = PFN_vkCmdBindDescriptorSets2( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2" ) ); + vkCmdPushConstants2 = PFN_vkCmdPushConstants2( vkGetDeviceProcAddr( device, "vkCmdPushConstants2" ) ); + vkCmdPushDescriptorSet2 = PFN_vkCmdPushDescriptorSet2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2" ) ); + vkCmdPushDescriptorSetWithTemplate2 = PFN_vkCmdPushDescriptorSetWithTemplate2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2" ) ); + vkCopyMemoryToImage = PFN_vkCopyMemoryToImage( vkGetDeviceProcAddr( device, "vkCopyMemoryToImage" ) ); + vkCopyImageToMemory = PFN_vkCopyImageToMemory( vkGetDeviceProcAddr( device, "vkCopyImageToMemory" ) ); + vkCopyImageToImage = PFN_vkCopyImageToImage( vkGetDeviceProcAddr( device, "vkCopyImageToImage" ) ); + vkTransitionImageLayout = PFN_vkTransitionImageLayout( vkGetDeviceProcAddr( device, "vkTransitionImageLayout" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + if ( !vkCmdPushDescriptorSet ) + vkCmdPushDescriptorSet = vkCmdPushDescriptorSetKHR; + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate ) + vkCmdPushDescriptorSetWithTemplate = vkCmdPushDescriptorSetWithTemplateKHR; + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + if ( !vkCmdSetRenderingAttachmentLocations ) + vkCmdSetRenderingAttachmentLocations = vkCmdSetRenderingAttachmentLocationsKHR; + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + if ( !vkCmdSetRenderingInputAttachmentIndices ) + vkCmdSetRenderingInputAttachmentIndices = vkCmdSetRenderingInputAttachmentIndicesKHR; + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleEXT; + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + if ( !vkCopyMemoryToImage ) + vkCopyMemoryToImage = vkCopyMemoryToImageEXT; + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + if ( !vkCopyImageToMemory ) + vkCopyImageToMemory = vkCopyImageToMemoryEXT; + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + if ( !vkCopyImageToImage ) + vkCopyImageToImage = vkCopyImageToImageEXT; + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + if ( !vkTransitionImageLayout ) + vkTransitionImageLayout = vkTransitionImageLayoutEXT; + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + if ( !vkMapMemory2 ) + vkMapMemory2 = vkMapMemory2KHR; + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + if ( !vkUnmapMemory2 ) + vkUnmapMemory2 = vkUnmapMemory2KHR; + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + vkCmdDispatchTileQCOM = PFN_vkCmdDispatchTileQCOM( vkGetDeviceProcAddr( device, "vkCmdDispatchTileQCOM" ) ); + vkCmdBeginPerTileExecutionQCOM = PFN_vkCmdBeginPerTileExecutionQCOM( vkGetDeviceProcAddr( device, "vkCmdBeginPerTileExecutionQCOM" ) ); + vkCmdEndPerTileExecutionQCOM = PFN_vkCmdEndPerTileExecutionQCOM( vkGetDeviceProcAddr( device, "vkCmdEndPerTileExecutionQCOM" ) ); + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + if ( !vkCmdBindIndexBuffer2 ) + vkCmdBindIndexBuffer2 = vkCmdBindIndexBuffer2KHR; + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + if ( !vkGetRenderingAreaGranularity ) + vkGetRenderingAreaGranularity = vkGetRenderingAreaGranularityKHR; + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + if ( !vkGetDeviceImageSubresourceLayout ) + vkGetDeviceImageSubresourceLayout = vkGetDeviceImageSubresourceLayoutKHR; + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2KHR; + + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); + + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_cooperative_vector === + vkConvertCooperativeVectorMatrixNV = PFN_vkConvertCooperativeVectorMatrixNV( vkGetDeviceProcAddr( device, "vkConvertCooperativeVectorMatrixNV" ) ); + vkCmdConvertCooperativeVectorMatrixNV = + PFN_vkCmdConvertCooperativeVectorMatrixNV( vkGetDeviceProcAddr( device, "vkCmdConvertCooperativeVectorMatrixNV" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleKHR; + + //=== VK_KHR_calibrated_timestamps === + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); + if ( !vkCmdBindDescriptorSets2 ) + vkCmdBindDescriptorSets2 = vkCmdBindDescriptorSets2KHR; + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + if ( !vkCmdPushConstants2 ) + vkCmdPushConstants2 = vkCmdPushConstants2KHR; + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdPushDescriptorSet2 ) + vkCmdPushDescriptorSet2 = vkCmdPushDescriptorSet2KHR; + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate2 ) + vkCmdPushDescriptorSetWithTemplate2 = vkCmdPushDescriptorSetWithTemplate2KHR; + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + + //=== VK_QCOM_tile_memory_heap === + vkCmdBindTileMemoryQCOM = PFN_vkCmdBindTileMemoryQCOM( vkGetDeviceProcAddr( device, "vkCmdBindTileMemoryQCOM" ) ); + + //=== VK_NV_external_compute_queue === + vkCreateExternalComputeQueueNV = PFN_vkCreateExternalComputeQueueNV( vkGetDeviceProcAddr( device, "vkCreateExternalComputeQueueNV" ) ); + vkDestroyExternalComputeQueueNV = PFN_vkDestroyExternalComputeQueueNV( vkGetDeviceProcAddr( device, "vkDestroyExternalComputeQueueNV" ) ); + vkGetExternalComputeQueueDataNV = PFN_vkGetExternalComputeQueueDataNV( vkGetDeviceProcAddr( device, "vkGetExternalComputeQueueDataNV" ) ); + + //=== VK_NV_cluster_acceleration_structure === + vkGetClusterAccelerationStructureBuildSizesNV = + PFN_vkGetClusterAccelerationStructureBuildSizesNV( vkGetDeviceProcAddr( device, "vkGetClusterAccelerationStructureBuildSizesNV" ) ); + vkCmdBuildClusterAccelerationStructureIndirectNV = + PFN_vkCmdBuildClusterAccelerationStructureIndirectNV( vkGetDeviceProcAddr( device, "vkCmdBuildClusterAccelerationStructureIndirectNV" ) ); + + //=== VK_NV_partitioned_acceleration_structure === + vkGetPartitionedAccelerationStructuresBuildSizesNV = + PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV( vkGetDeviceProcAddr( device, "vkGetPartitionedAccelerationStructuresBuildSizesNV" ) ); + vkCmdBuildPartitionedAccelerationStructuresNV = + PFN_vkCmdBuildPartitionedAccelerationStructuresNV( vkGetDeviceProcAddr( device, "vkCmdBuildPartitionedAccelerationStructuresNV" ) ); + + //=== VK_EXT_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsEXT = + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); + vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); + vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); + vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); + vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); + vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); + vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); + vkUpdateIndirectExecutionSetPipelineEXT = + PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); + vkUpdateIndirectExecutionSetShaderEXT = + PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + vkGetMemoryMetalHandleEXT = PFN_vkGetMemoryMetalHandleEXT( vkGetDeviceProcAddr( device, "vkGetMemoryMetalHandleEXT" ) ); + vkGetMemoryMetalHandlePropertiesEXT = PFN_vkGetMemoryMetalHandlePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryMetalHandlePropertiesEXT" ) ); +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map_offset === + vkCmdEndRendering2EXT = PFN_vkCmdEndRendering2EXT( vkGetDeviceProcAddr( device, "vkCmdEndRendering2EXT" ) ); + } + + public: + //=== VK_VERSION_1_0 === + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + + //=== VK_VERSION_1_1 === + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + + //=== VK_VERSION_1_2 === + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; + + //=== VK_VERSION_1_3 === + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; + + //=== VK_VERSION_1_4 === + PFN_vkCmdSetLineStipple vkCmdSetLineStipple = 0; + PFN_vkMapMemory2 vkMapMemory2 = 0; + PFN_vkUnmapMemory2 vkUnmapMemory2 = 0; + PFN_vkCmdBindIndexBuffer2 vkCmdBindIndexBuffer2 = 0; + PFN_vkGetRenderingAreaGranularity vkGetRenderingAreaGranularity = 0; + PFN_vkGetDeviceImageSubresourceLayout vkGetDeviceImageSubresourceLayout = 0; + PFN_vkGetImageSubresourceLayout2 vkGetImageSubresourceLayout2 = 0; + PFN_vkCmdPushDescriptorSet vkCmdPushDescriptorSet = 0; + PFN_vkCmdPushDescriptorSetWithTemplate vkCmdPushDescriptorSetWithTemplate = 0; + PFN_vkCmdSetRenderingAttachmentLocations vkCmdSetRenderingAttachmentLocations = 0; + PFN_vkCmdSetRenderingInputAttachmentIndices vkCmdSetRenderingInputAttachmentIndices = 0; + PFN_vkCmdBindDescriptorSets2 vkCmdBindDescriptorSets2 = 0; + PFN_vkCmdPushConstants2 vkCmdPushConstants2 = 0; + PFN_vkCmdPushDescriptorSet2 vkCmdPushDescriptorSet2 = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2 vkCmdPushDescriptorSetWithTemplate2 = 0; + PFN_vkCopyMemoryToImage vkCopyMemoryToImage = 0; + PFN_vkCopyImageToMemory vkCopyImageToMemory = 0; + PFN_vkCopyImageToImage vkCopyImageToImage = 0; + PFN_vkTransitionImageLayout vkTransitionImageLayout = 0; + + //=== VK_KHR_swapchain === + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + + //=== VK_KHR_display_swapchain === + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; + + //=== VK_EXT_debug_marker === + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; + + //=== VK_KHR_video_queue === + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; + + //=== VK_KHR_video_decode_queue === + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; + + //=== VK_EXT_transform_feedback === + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + + //=== VK_NVX_binary_import === + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; + + //=== VK_NVX_image_view_handle === + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + + //=== VK_AMD_draw_indirect_count === + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + + //=== VK_AMD_shader_info === + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + + //=== VK_KHR_dynamic_rendering === + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +# else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + + //=== VK_KHR_maintenance1 === + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +# else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +# else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; + + //=== VK_KHR_push_descriptor === + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; + + //=== VK_EXT_conditional_rendering === + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; + + //=== VK_KHR_descriptor_update_template === + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + + //=== VK_NV_clip_space_w_scaling === + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + + //=== VK_EXT_display_control === + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + + //=== VK_GOOGLE_display_timing === + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + + //=== VK_EXT_discard_rectangles === + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; + + //=== VK_EXT_hdr_metadata === + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + + //=== VK_KHR_create_renderpass2 === + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + + //=== VK_KHR_shared_presentable_image === + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +# else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + + //=== VK_KHR_performance_query === + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; + + //=== VK_EXT_debug_utils === + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +# else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +# else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + + //=== VK_KHR_get_memory_requirements2 === + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + + //=== VK_KHR_acceleration_structure === + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; + + //=== VK_KHR_ray_tracing_pipeline === + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; + + //=== VK_KHR_sampler_ycbcr_conversion === + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + + //=== VK_KHR_bind_memory2 === + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; + + //=== VK_EXT_image_drm_format_modifier === + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; + + //=== VK_EXT_validation_cache === + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; + + //=== VK_NV_shading_rate_image === + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + + //=== VK_NV_ray_tracing === + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; + + //=== VK_KHR_maintenance3 === + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + + //=== VK_KHR_draw_indirect_count === + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + + //=== VK_EXT_external_memory_host === + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; + + //=== VK_AMD_buffer_marker === + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; + + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; + + //=== VK_NV_mesh_shader === + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; + + //=== VK_NV_scissor_exclusive === + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + + //=== VK_NV_device_diagnostic_checkpoints === + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + + //=== VK_KHR_timeline_semaphore === + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + + //=== VK_INTEL_performance_query === + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; + + //=== VK_AMD_display_native_hdr === + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; + + //=== VK_KHR_fragment_shading_rate === + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; + + //=== VK_KHR_dynamic_rendering_local_read === + PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; + + //=== VK_EXT_buffer_device_address === + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; + + //=== VK_KHR_present_wait === + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; + + //=== VK_EXT_line_rasterization === + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; + + //=== VK_EXT_host_query_reset === + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; + + //=== VK_EXT_extended_dynamic_state === + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; + + //=== VK_KHR_deferred_host_operations === + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; + + //=== VK_KHR_pipeline_executable_properties === + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + + //=== VK_EXT_host_image_copy === + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; + + //=== VK_KHR_map_memory2 === + PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; + + //=== VK_EXT_swapchain_maintenance1 === + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; + + //=== VK_NV_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + + //=== VK_EXT_depth_bias_control === + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; + + //=== VK_EXT_private_data === + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; + + //=== VK_KHR_video_encode_queue === + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; +# else + PFN_dummy vkCreateCudaModuleNV_placeholder = 0; + PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; + PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; + PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; + PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; + PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + PFN_vkCmdDispatchTileQCOM vkCmdDispatchTileQCOM = 0; + PFN_vkCmdBeginPerTileExecutionQCOM vkCmdBeginPerTileExecutionQCOM = 0; + PFN_vkCmdEndPerTileExecutionQCOM vkCmdEndPerTileExecutionQCOM = 0; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; +# else + PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; + + //=== VK_EXT_descriptor_buffer === + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; + + //=== VK_NV_fragment_shading_rate_enums === + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; + + //=== VK_EXT_mesh_shader === + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; + + //=== VK_KHR_copy_commands2 === + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; + + //=== VK_EXT_device_fault === + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; + + //=== VK_EXT_vertex_input_dynamic_state === + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +# else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +# else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +# else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; + + //=== VK_HUAWEI_invocation_mask === + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; + + //=== VK_NV_external_memory_rdma === + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; + + //=== VK_EXT_pipeline_properties === + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; + + //=== VK_EXT_extended_dynamic_state2 === + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; + + //=== VK_EXT_color_write_enable === + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; + + //=== VK_KHR_ray_tracing_maintenance1 === + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; + + //=== VK_EXT_multi_draw === + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; + + //=== VK_EXT_opacity_micromap === + PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; + + //=== VK_HUAWEI_cluster_culling_shader === + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; + + //=== VK_EXT_pageable_device_local_memory === + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; + + //=== VK_KHR_maintenance4 === + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; + + //=== VK_VALVE_descriptor_set_host_mapping === + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; + + //=== VK_NV_copy_memory_indirect === + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; + + //=== VK_NV_memory_decompression === + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; + + //=== VK_NV_device_generated_commands_compute === + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; + + //=== VK_EXT_extended_dynamic_state3 === + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; + + //=== VK_EXT_shader_module_identifier === + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; + + //=== VK_NV_optical_flow === + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; + + //=== VK_KHR_maintenance5 === + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; + + //=== VK_AMD_anti_lag === + PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; + + //=== VK_EXT_shader_object === + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; + + //=== VK_KHR_pipeline_binary === + PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; + PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; + PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; + PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; + PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; + + //=== VK_QCOM_tile_properties === + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; + + //=== VK_NV_cooperative_vector === + PFN_vkConvertCooperativeVectorMatrixNV vkConvertCooperativeVectorMatrixNV = 0; + PFN_vkCmdConvertCooperativeVectorMatrixNV vkCmdConvertCooperativeVectorMatrixNV = 0; + + //=== VK_NV_low_latency2 === + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; + PFN_vkLatencySleepNV vkLatencySleepNV = 0; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; +# else + PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; + + //=== VK_KHR_calibrated_timestamps === + PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; + + //=== VK_KHR_maintenance6 === + PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; + PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; + PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; + PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; + + //=== VK_QCOM_tile_memory_heap === + PFN_vkCmdBindTileMemoryQCOM vkCmdBindTileMemoryQCOM = 0; + + //=== VK_NV_external_compute_queue === + PFN_vkCreateExternalComputeQueueNV vkCreateExternalComputeQueueNV = 0; + PFN_vkDestroyExternalComputeQueueNV vkDestroyExternalComputeQueueNV = 0; + PFN_vkGetExternalComputeQueueDataNV vkGetExternalComputeQueueDataNV = 0; + + //=== VK_NV_cluster_acceleration_structure === + PFN_vkGetClusterAccelerationStructureBuildSizesNV vkGetClusterAccelerationStructureBuildSizesNV = 0; + PFN_vkCmdBuildClusterAccelerationStructureIndirectNV vkCmdBuildClusterAccelerationStructureIndirectNV = 0; + + //=== VK_NV_partitioned_acceleration_structure === + PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV vkGetPartitionedAccelerationStructuresBuildSizesNV = 0; + PFN_vkCmdBuildPartitionedAccelerationStructuresNV vkCmdBuildPartitionedAccelerationStructuresNV = 0; + + //=== VK_EXT_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; + PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; + PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; + PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; + PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; + PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; + PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; + PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; + PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + PFN_vkGetMemoryMetalHandleEXT vkGetMemoryMetalHandleEXT = 0; + PFN_vkGetMemoryMetalHandlePropertiesEXT vkGetMemoryMetalHandlePropertiesEXT = 0; +# else + PFN_dummy vkGetMemoryMetalHandleEXT_placeholder = 0; + PFN_dummy vkGetMemoryMetalHandlePropertiesEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map_offset === + PFN_vkCmdEndRendering2EXT vkCmdEndRendering2EXT = 0; + }; + + } // namespace detail + + //======================================== + //=== RAII HANDLE forward declarations === + //======================================== + + //=== VK_VERSION_1_0 === + class Instance; + class PhysicalDevice; + class Device; + class Queue; + class DeviceMemory; + class Fence; + class Semaphore; + class Event; + class QueryPool; + class Buffer; + class BufferView; + class Image; + class ImageView; + class ShaderModule; + class PipelineCache; + class Pipeline; + class PipelineLayout; + class Sampler; + class DescriptorPool; + class DescriptorSet; + class DescriptorSetLayout; + class Framebuffer; + class RenderPass; + class CommandPool; + class CommandBuffer; + + //=== VK_VERSION_1_1 === + class SamplerYcbcrConversion; + class DescriptorUpdateTemplate; + + //=== VK_VERSION_1_3 === + class PrivateDataSlot; + + //=== VK_KHR_surface === + class SurfaceKHR; + + //=== VK_KHR_swapchain === + class SwapchainKHR; + + //=== VK_KHR_display === + class DisplayKHR; + class DisplayModeKHR; + + //=== VK_EXT_debug_report === + class DebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + class VideoSessionKHR; + class VideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + class CuModuleNVX; + class CuFunctionNVX; + + //=== VK_EXT_debug_utils === + class DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + class AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + class ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + class AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + class PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + class DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + class IndirectCommandsLayoutNV; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + class CudaModuleNV; + class CudaFunctionNV; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + class BufferCollectionFUCHSIA; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + class MicromapEXT; + + //=== VK_NV_optical_flow === + class OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + class ShaderEXT; + + //=== VK_KHR_pipeline_binary === + class PipelineBinaryKHR; + + //=== VK_NV_external_compute_queue === + class ExternalComputeQueueNV; + + //=== VK_EXT_device_generated_commands === + class IndirectCommandsLayoutEXT; + class IndirectExecutionSetEXT; + + //==================== + //=== RAII HANDLES === + //==================== + + template + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; + }; + + class Context + { + public: +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + Context() + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher( + m_dynamicLoader.getProcAddress( "vkGetInstanceProcAddr" ) ) ) +# else + Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher( getInstanceProcAddr ) ) +# endif + { + } + + ~Context() = default; + + Context( Context const & ) = delete; + Context( Context && rhs ) VULKAN_HPP_NOEXCEPT +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + : m_dynamicLoader( std::move( rhs.m_dynamicLoader ) ) + , m_dispatcher( rhs.m_dispatcher.release() ) +# else + : m_dispatcher( rhs.m_dispatcher.release() ) +# endif + { + } + Context & operator=( Context const & ) = delete; + + Context & operator=( Context && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + m_dynamicLoader = std::move( rhs.m_dynamicLoader ); +# endif + m_dispatcher.reset( rhs.m_dispatcher.release() ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return &*m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context & rhs ) + { +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + std::swap( m_dynamicLoader, rhs.m_dynamicLoader ); +# endif + m_dispatcher.swap( rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + VULKAN_HPP_NODISCARD std::vector + enumerateInstanceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + VULKAN_HPP_NODISCARD std::vector enumerateInstanceLayerProperties() const; + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkEnumerateInstanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceVersion.html + VULKAN_HPP_NODISCARD uint32_t enumerateInstanceVersion() const; + + private: +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + VULKAN_HPP_NAMESPACE::detail::DynamicLoader m_dynamicLoader; +# endif + std::unique_ptr m_dispatcher; + }; + + // wrapper class for handle VkInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInstance.html + class Instance + { + public: + using CType = VkInstance; + using CppType = VULKAN_HPP_NAMESPACE::Instance; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, + VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = context.createInstance( createInfo, allocator ); + } +# endif + + Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, + VkInstance instance, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( instance ), m_allocator( static_cast( allocator ) ) + { + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, + static_cast( m_instance ) ) ); + } + + Instance( std::nullptr_t ) {} + + ~Instance() + { + clear(); + } + + Instance() = delete; + Instance( Instance const & ) = delete; + + Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( rhs.m_dispatcher.release() ) + { + } + + Instance & operator=( Instance const & ) = delete; + + Instance & operator=( Instance && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Instance const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + operator VULKAN_HPP_NAMESPACE::Instance() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_instance ) + { + getDispatcher()->vkDestroyInstance( static_cast( m_instance ), reinterpret_cast( m_allocator ) ); + } + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Instance release() + { + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_instance, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return &*m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + enumeratePhysicalDevices() const; + + // wrapper function for command vkGetInstanceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetInstanceProcAddr.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PFN_VoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + VULKAN_HPP_NODISCARD std::vector enumeratePhysicalDeviceGroups() const; + + //=== VK_KHR_display === + + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkDebugReportMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugReportMessageEXT.html + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_GGP*/ + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + VULKAN_HPP_NODISCARD std::vector enumeratePhysicalDeviceGroupsKHR() const; + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkSubmitDebugUtilsMessageEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSubmitDebugUtilsMessageEXT.html + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_headless_surface === + + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + std::unique_ptr m_dispatcher; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPhysicalDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice.html + class PhysicalDevice + { + public: + using CType = VkPhysicalDevice; + using CppType = VULKAN_HPP_NAMESPACE::PhysicalDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + + public: + PhysicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VkPhysicalDevice physicalDevice ) + : m_physicalDevice( physicalDevice ), m_dispatcher( instance.getDispatcher() ) + { + } + + PhysicalDevice( std::nullptr_t ) {} + + ~PhysicalDevice() + { + clear(); + } + + PhysicalDevice() = delete; + + PhysicalDevice( PhysicalDevice const & rhs ) : m_physicalDevice( rhs.m_physicalDevice ), m_dispatcher( rhs.m_dispatcher ) {} + + PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PhysicalDevice & operator=( PhysicalDevice const & rhs ) + { + m_physicalDevice = rhs.m_physicalDevice; + m_dispatcher = rhs.m_dispatcher; + return *this; + } + + PhysicalDevice & operator=( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PhysicalDevice const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + operator VULKAN_HPP_NAMESPACE::PhysicalDevice() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PhysicalDevice release() + { + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_physicalDevice, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkGetPhysicalDeviceFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + // wrapper function for command vkGetPhysicalDeviceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties() const; + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + VULKAN_HPP_NODISCARD std::vector + enumerateDeviceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + VULKAN_HPP_NODISCARD std::vector enumerateDeviceLayerProperties() const; + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const; + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkGetPhysicalDeviceFeatures2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceFeatures2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getFeatures2() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getProperties2() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2() const; + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2() const; + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getMemoryProperties2() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const; + + // wrapper function for command vkGetPhysicalDeviceExternalBufferProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties + getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceExternalFenceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFenceProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties + getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceExternalSemaphoreProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphoreProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + VULKAN_HPP_NODISCARD std::vector getToolProperties() const; + + //=== VK_KHR_surface === + + // wrapper function for command vkGetPhysicalDeviceSurfaceSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilitiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + VULKAN_HPP_NODISCARD std::vector + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + VULKAN_HPP_NODISCARD std::vector + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_KHR_swapchain === + + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + VULKAN_HPP_NODISCARD std::vector getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + //=== VK_KHR_display === + + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + VULKAN_HPP_NODISCARD std::vector getDisplayPropertiesKHR() const; + + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + VULKAN_HPP_NODISCARD std::vector getDisplayPlanePropertiesKHR() const; + + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const; + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + // wrapper function for command vkGetPhysicalDeviceXlibPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXlibPresentationSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 + getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + // wrapper function for command vkGetPhysicalDeviceXcbPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXcbPresentationSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 + getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + // wrapper function for command vkGetPhysicalDeviceWaylandPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWaylandPresentationSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + // wrapper function for command vkGetPhysicalDeviceWin32PresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWin32PresentationSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_video_queue === + + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const; + + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const; + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + VULKAN_HPP_NODISCARD std::vector + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const; + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template + VULKAN_HPP_NODISCARD std::vector + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const; + + //=== VK_NV_external_memory_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalImageFormatPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalImageFormatPropertiesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_KHR_get_physical_device_properties2 === + + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getFeatures2KHR() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR() const; + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR() const; + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const; + + //=== VK_KHR_external_memory_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalBufferPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferPropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties + getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_external_semaphore_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalSemaphorePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphorePropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + // wrapper function for command vkAcquireXlibDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireXlibDisplayEXT.html + void acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; + + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2EXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + //=== VK_KHR_external_fence_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalFencePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFencePropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties + getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_performance_query === + + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + VULKAN_HPP_NODISCARD + std::pair, std::vector> + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const; + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR.html + VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_get_surface_capabilities2 === + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + VULKAN_HPP_NODISCARD std::vector + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template + VULKAN_HPP_NODISCARD std::vector getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + //=== VK_KHR_get_display_properties2 === + + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + VULKAN_HPP_NODISCARD std::vector getDisplayProperties2KHR() const; + + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + VULKAN_HPP_NODISCARD std::vector getDisplayPlaneProperties2KHR() const; + + // wrapper function for command vkGetDisplayPlaneCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilities2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR + getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const; + + //=== VK_EXT_sample_locations === + + // wrapper function for command vkGetPhysicalDeviceMultisamplePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMultisamplePropertiesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_calibrated_timestamps === + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + VULKAN_HPP_NODISCARD std::vector getCalibrateableTimeDomainsEXT() const; + + //=== VK_KHR_fragment_shading_rate === + + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + VULKAN_HPP_NODISCARD std::vector getFragmentShadingRatesKHR() const; + + //=== VK_EXT_tooling_info === + + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + VULKAN_HPP_NODISCARD std::vector getToolPropertiesEXT() const; + + //=== VK_NV_cooperative_matrix === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + VULKAN_HPP_NODISCARD std::vector getCooperativeMatrixPropertiesNV() const; + + //=== VK_NV_coverage_reduction_mode === + + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + VULKAN_HPP_NODISCARD std::vector getSupportedFramebufferMixedSamplesCombinationsNV() const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + VULKAN_HPP_NODISCARD std::vector + getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_acquire_drm_display === + + // wrapper function for command vkAcquireDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireDrmDisplayEXT.html + void acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; + + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_KHR_video_encode_queue === + + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const; + + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + // wrapper function for command vkGetPhysicalDeviceDirectFBPresentationSupportEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDirectFBPresentationSupportEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + // wrapper function for command vkGetPhysicalDeviceScreenPresentationSupportQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceScreenPresentationSupportQNX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_NV_optical_flow === + + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + VULKAN_HPP_NODISCARD std::vector + getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const; + + //=== VK_NV_cooperative_vector === + + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + VULKAN_HPP_NODISCARD std::vector getCooperativeVectorPropertiesNV() const; + + //=== VK_KHR_cooperative_matrix === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + VULKAN_HPP_NODISCARD std::vector getCooperativeMatrixPropertiesKHR() const; + + //=== VK_KHR_calibrated_timestamps === + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + VULKAN_HPP_NODISCARD std::vector getCalibrateableTimeDomainsKHR() const; + + //=== VK_NV_cooperative_matrix2 === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + VULKAN_HPP_NODISCARD std::vector + getCooperativeMatrixFlexibleDimensionsPropertiesNV() const; + + private: + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PhysicalDevices : public std::vector + { + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PhysicalDevices( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance ) + { + *this = instance.enumeratePhysicalDevices(); + } +# endif + + PhysicalDevices( std::nullptr_t ) {} + + PhysicalDevices() = delete; + PhysicalDevices( PhysicalDevices const & ) = delete; + PhysicalDevices( PhysicalDevices && rhs ) = default; + PhysicalDevices & operator=( PhysicalDevices const & ) = delete; + PhysicalDevices & operator=( PhysicalDevices && rhs ) = default; + + private: + PhysicalDevices( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + + // wrapper class for handle VkDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDevice.html + class Device + { + public: + using CType = VkDevice; + using CppType = VULKAN_HPP_NAMESPACE::Device; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = physicalDevice.createDevice( createInfo, allocator ); + } +# endif + + Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + VkDevice device, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ), m_allocator( static_cast( allocator ) ) + { + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, + static_cast( m_device ) ) ); + } + + Device( std::nullptr_t ) {} + + ~Device() + { + clear(); + } + + Device() = delete; + Device( Device const & ) = delete; + + Device( Device && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( rhs.m_dispatcher.release() ) + { + } + + Device & operator=( Device const & ) = delete; + + Device & operator=( Device && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Device const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + operator VULKAN_HPP_NAMESPACE::Device() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_device ) + { + getDispatcher()->vkDestroyDevice( static_cast( m_device ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Device release() + { + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_device, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return &*m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkGetDeviceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceProcAddr.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PFN_VoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceQueue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkDeviceWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeviceWaitIdle.html + void waitIdle() const; + + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkFlushMappedMemoryRanges, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFlushMappedMemoryRanges.html + void flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const; + + // wrapper function for command vkInvalidateMappedMemoryRanges, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInvalidateMappedMemoryRanges.html + void invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const; + + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkResetFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetFences.html + void resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences ) const; + + // wrapper function for command vkWaitForFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForFences.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout ) const; + + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + createComputePipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateDescriptorSetLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const; + + // wrapper function for command vkUpdateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSets.html + void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const; + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkBindBufferMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2.html + void bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; + + // wrapper function for command vkBindImageMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2.html + void bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; + + // wrapper function for command vkGetDeviceGroupPeerMemoryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeatures.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const; + + // wrapper function for command vkGetDeviceQueue2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue2.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type + createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_2 === + + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkWaitSemaphores, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphores.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const; + + // wrapper function for command vkSignalSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphore.html + void signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const; + + // wrapper function for command vkGetBufferDeviceAddress, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddress.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetBufferOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddress.html + VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddress.html + VULKAN_HPP_NODISCARD uint64_t + getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkSetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateData.html + void setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const; + + // wrapper function for command vkGetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateData.html + VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const; + + //=== VK_VERSION_1_4 === + + // wrapper function for command vkMapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2.html + VULKAN_HPP_NODISCARD void * mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const; + + // wrapper function for command vkUnmapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2.html + void unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const; + + // wrapper function for command vkGetRenderingAreaGranularity, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularity.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCopyMemoryToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImage.html + void copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const; + + // wrapper function for command vkCopyImageToMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemory.html + void copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const; + + // wrapper function for command vkCopyImageToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImage.html + void copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const; + + // wrapper function for command vkTransitionImageLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayout.html + void transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const; + + //=== VK_KHR_swapchain === + + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkGetDeviceGroupPresentCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR getGroupPresentCapabilitiesKHR() const; + + // wrapper function for command vkGetDeviceGroupSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + // wrapper function for command vkAcquireNextImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImage2KHR.html + VULKAN_HPP_NODISCARD std::pair + acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const; + + //=== VK_KHR_display_swapchain === + + // wrapper function for command vkCreateSharedSwapchainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + // wrapper function for command vkCreateSharedSwapchainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_EXT_debug_marker === + + // wrapper function for command vkDebugMarkerSetObjectTagEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectTagEXT.html + void debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const; + + // wrapper function for command vkDebugMarkerSetObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectNameEXT.html + void debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const; + + //=== VK_KHR_video_queue === + + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR>::Type + createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_NVX_binary_import === + + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_NVX_image_view_handle === + + // wrapper function for command vkGetImageViewHandleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandleNVX.html + VULKAN_HPP_NODISCARD uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetImageViewHandle64NVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandle64NVX.html + VULKAN_HPP_NODISCARD uint64_t getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_device_group === + + // wrapper function for command vkGetDeviceGroupPeerMemoryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeaturesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + // wrapper function for command vkGetMemoryWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleKHR.html + VULKAN_HPP_NODISCARD HANDLE getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const; + + // wrapper function for command vkGetMemoryWin32HandlePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandlePropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR + getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + // wrapper function for command vkGetMemoryFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdKHR.html + VULKAN_HPP_NODISCARD int getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const; + + // wrapper function for command vkGetMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdPropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR + getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + // wrapper function for command vkImportSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreWin32HandleKHR.html + void importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const; + + // wrapper function for command vkGetSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreWin32HandleKHR.html + VULKAN_HPP_NODISCARD HANDLE getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + // wrapper function for command vkImportSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreFdKHR.html + void importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const; + + // wrapper function for command vkGetSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreFdKHR.html + VULKAN_HPP_NODISCARD int getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const; + + //=== VK_KHR_descriptor_update_template === + + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type + createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkDestroyDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplateKHR.html + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_display_control === + + // wrapper function for command vkDisplayPowerControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDisplayPowerControlEXT.html + void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const; + + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_EXT_hdr_metadata === + + // wrapper function for command vkSetHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetHdrMetadataEXT.html + void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata ) const; + + //=== VK_KHR_create_renderpass2 === + + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + // wrapper function for command vkImportFenceWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceWin32HandleKHR.html + void importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const; + + // wrapper function for command vkGetFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceWin32HandleKHR.html + VULKAN_HPP_NODISCARD HANDLE getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + // wrapper function for command vkImportFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceFdKHR.html + void importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const; + + // wrapper function for command vkGetFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceFdKHR.html + VULKAN_HPP_NODISCARD int getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const; + + //=== VK_KHR_performance_query === + + // wrapper function for command vkAcquireProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireProfilingLockKHR.html + void acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const; + + // wrapper function for command vkReleaseProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseProfilingLockKHR.html + void releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkSetDebugUtilsObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectNameEXT.html + void setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const; + + // wrapper function for command vkSetDebugUtilsObjectTagEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectTagEXT.html + void setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const; + + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const; + + // wrapper function for command vkGetMemoryAndroidHardwareBufferANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryAndroidHardwareBufferANDROID.html + VULKAN_HPP_NODISCARD struct AHardwareBuffer * + getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createExecutionGraphPipelineAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_get_memory_requirements2 === + + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const; + + //=== VK_KHR_acceleration_structure === + + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR>::Type + createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildAccelerationStructuresKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const; + + // wrapper function for command vkCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const; + + // wrapper function for command vkCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureToMemoryKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const; + + // wrapper function for command vkCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToAccelerationStructureKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const; + + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template + VULKAN_HPP_NODISCARD std::vector writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const; + + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template + VULKAN_HPP_NODISCARD DataType writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const; + + // wrapper function for command vkGetAccelerationStructureDeviceAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureDeviceAddressKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceAccelerationStructureCompatibilityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceAccelerationStructureCompatibilityKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetAccelerationStructureBuildSizesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureBuildSizesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + //=== VK_KHR_ray_tracing_pipeline === + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_KHR_sampler_ycbcr_conversion === + + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkDestroySamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversionKHR.html + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_bind_memory2 === + + // wrapper function for command vkBindBufferMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2KHR.html + void bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; + + // wrapper function for command vkBindImageMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2KHR.html + void bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; + + //=== VK_EXT_validation_cache === + + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_NV_ray_tracing === + + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV>::Type + createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkBindAccelerationStructureMemoryNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindAccelerationStructureMemoryNV.html + void bindAccelerationStructureMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_KHR_maintenance3 === + + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_external_memory_host === + + // wrapper function for command vkGetMemoryHostPointerPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryHostPointerPropertiesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT + getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const; + + //=== VK_EXT_calibrated_timestamps === + + // wrapper function for command vkGetCalibratedTimestampsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + VULKAN_HPP_NODISCARD std::pair, uint64_t> + getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const; + + // wrapper function for command vkGetCalibratedTimestampsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + VULKAN_HPP_NODISCARD std::pair + getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const; + + //=== VK_KHR_timeline_semaphore === + + // wrapper function for command vkWaitSemaphoresKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphoresKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const; + + // wrapper function for command vkSignalSemaphoreKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphoreKHR.html + void signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const; + + //=== VK_INTEL_performance_query === + + // wrapper function for command vkInitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInitializePerformanceApiINTEL.html + void initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const; + + // wrapper function for command vkUninitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUninitializePerformanceApiINTEL.html + void uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL>::Type + acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkGetPerformanceParameterINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPerformanceParameterINTEL.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PerformanceValueINTEL + getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const; + + //=== VK_EXT_buffer_device_address === + + // wrapper function for command vkGetBufferDeviceAddressEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + // wrapper function for command vkGetDeviceGroupSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModes2EXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + + // wrapper function for command vkGetBufferDeviceAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetBufferOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddressKHR.html + VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddressKHR.html + VULKAN_HPP_NODISCARD uint64_t + getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_deferred_host_operations === + + // wrapper function for command vkCreateDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_KHR_pipeline_executable_properties === + + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + VULKAN_HPP_NODISCARD std::vector + getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const; + + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + VULKAN_HPP_NODISCARD std::vector + getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const; + + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + VULKAN_HPP_NODISCARD std::vector + getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const; + + //=== VK_EXT_host_image_copy === + + // wrapper function for command vkCopyMemoryToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImageEXT.html + void copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const; + + // wrapper function for command vkCopyImageToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemoryEXT.html + void copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const; + + // wrapper function for command vkCopyImageToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImageEXT.html + void copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const; + + // wrapper function for command vkTransitionImageLayoutEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayoutEXT.html + void transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const; + + //=== VK_KHR_map_memory2 === + + // wrapper function for command vkMapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2KHR.html + VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const; + + // wrapper function for command vkUnmapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2KHR.html + void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const; + + //=== VK_EXT_swapchain_maintenance1 === + + // wrapper function for command vkReleaseSwapchainImagesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseSwapchainImagesEXT.html + void releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const; + + //=== VK_NV_device_generated_commands === + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV>::Type + createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_EXT_private_data === + + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkDestroyPrivateDataSlotEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlotEXT.html + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkSetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateDataEXT.html + void setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const; + + // wrapper function for command vkGetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateDataEXT.html + VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_video_encode_queue === + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + VULKAN_HPP_NODISCARD std::pair> + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const; + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD std::pair, std::vector> + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_descriptor_buffer === + + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template + VULKAN_HPP_NODISCARD DescriptorType getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetBufferOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD DataType getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const; + + // wrapper function for command vkGetImageOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD DataType getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const; + + // wrapper function for command vkGetImageViewOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD DataType + getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const; + + // wrapper function for command vkGetSamplerOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSamplerOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD DataType getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const; + + // wrapper function for command vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD DataType + getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const; + + //=== VK_EXT_device_fault === + // wrapper function for command vkGetDeviceFaultInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceFaultInfoEXT.html + template + VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + // wrapper function for command vkGetMemoryZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandleFUCHSIA.html + VULKAN_HPP_NODISCARD zx_handle_t getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const; + + // wrapper function for command vkGetMemoryZirconHandlePropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandlePropertiesFUCHSIA.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + // wrapper function for command vkImportSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreZirconHandleFUCHSIA.html + void importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const; + + // wrapper function for command vkGetSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreZirconHandleFUCHSIA.html + VULKAN_HPP_NODISCARD zx_handle_t + getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA>::Type + createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_NV_external_memory_rdma === + + // wrapper function for command vkGetMemoryRemoteAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryRemoteAddressNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RemoteAddressNV + getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const; + + //=== VK_EXT_pipeline_properties === + + // wrapper function for command vkGetPipelinePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelinePropertiesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BaseOutStructure getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const; + + //=== VK_EXT_opacity_micromap === + + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildMicromapsEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const; + + // wrapper function for command vkCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const; + + // wrapper function for command vkCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapToMemoryEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const; + + // wrapper function for command vkCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToMicromapEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const; + + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template + VULKAN_HPP_NODISCARD std::vector + writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const; + + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template + VULKAN_HPP_NODISCARD DataType writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const; + + // wrapper function for command vkGetDeviceMicromapCompatibilityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMicromapCompatibilityEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetMicromapBuildSizesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMicromapBuildSizesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance4 === + + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const; + + //=== VK_VALVE_descriptor_set_host_mapping === + + // wrapper function for command vkGetDescriptorSetLayoutHostMappingInfoVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutHostMappingInfoVALVE.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE( + const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands_compute === + + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetPipelineIndirectDeviceAddressNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectDeviceAddressNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_module_identifier === + + // wrapper function for command vkGetShaderModuleCreateInfoIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleCreateInfoIdentifierEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_optical_flow === + + // wrapper function for command vkCreateOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_KHR_maintenance5 === + + // wrapper function for command vkGetRenderingAreaGranularityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularityKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_anti_lag === + + // wrapper function for command vkAntiLagUpdateAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAntiLagUpdateAMD.html + void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_object === + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_KHR_pipeline_binary === + + // wrapper function for command vkCreatePipelineBinariesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + // wrapper function for command vkGetPipelineKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineKeyKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR getPipelineKeyKHR( + Optional pipelineCreateInfo VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + VULKAN_HPP_NODISCARD std::pair> + getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info ) const; + + // wrapper function for command vkReleaseCapturedPipelineDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseCapturedPipelineDataKHR.html + void releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_QCOM_tile_properties === + + // wrapper function for command vkGetDynamicRenderingTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDynamicRenderingTilePropertiesQCOM.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_cooperative_vector === + + // wrapper function for command vkConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkConvertCooperativeVectorMatrixNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + convertCooperativeVectorMatrixNV( const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV & info ) const; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const; + + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + + // wrapper function for command vkGetCalibratedTimestampsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + VULKAN_HPP_NODISCARD std::pair, uint64_t> + getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const; + + // wrapper function for command vkGetCalibratedTimestampsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + VULKAN_HPP_NODISCARD std::pair + getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const; + + //=== VK_NV_external_compute_queue === + + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createExternalComputeQueueNV( VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_NV_cluster_acceleration_structure === + + // wrapper function for command vkGetClusterAccelerationStructureBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetClusterAccelerationStructureBuildSizesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + getClusterAccelerationStructureBuildSizesNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_partitioned_acceleration_structure === + + // wrapper function for command vkGetPartitionedAccelerationStructuresBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPartitionedAccelerationStructuresBuildSizesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getPartitionedAccelerationStructuresBuildSizesNV( + const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_device_generated_commands === + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT>::Type + createIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT>::Type + createIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + + // wrapper function for command vkGetMemoryMetalHandleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandleEXT.html + VULKAN_HPP_NODISCARD void * getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT & getMetalHandleInfo ) const; + + // wrapper function for command vkGetMemoryMetalHandlePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandlePropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT + getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HandleType const & handle ) const; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + std::unique_ptr m_dispatcher; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkAccelerationStructureKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureKHR.html + class AccelerationStructureKHR + { + public: + using CType = VkAccelerationStructureKHR; + using CppType = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createAccelerationStructureKHR( createInfo, allocator ); + } +# endif + + AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkAccelerationStructureKHR accelerationStructure, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_accelerationStructureKHR( accelerationStructure ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + AccelerationStructureKHR( std::nullptr_t ) {} + + ~AccelerationStructureKHR() + { + clear(); + } + + AccelerationStructureKHR() = delete; + AccelerationStructureKHR( AccelerationStructureKHR const & ) = delete; + + AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_accelerationStructureKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureKHR, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + AccelerationStructureKHR & operator=( AccelerationStructureKHR const & ) = delete; + + AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructureKHR, rhs.m_accelerationStructureKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR; + } + + operator VULKAN_HPP_NAMESPACE::AccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_accelerationStructureKHR ) + { + getDispatcher()->vkDestroyAccelerationStructureKHR( static_cast( m_device ), + static_cast( m_accelerationStructureKHR ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_accelerationStructureKHR = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_accelerationStructureKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructureKHR, rhs.m_accelerationStructureKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructureKHR = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkAccelerationStructureNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureNV.html + class AccelerationStructureNV + { + public: + using CType = VkAccelerationStructureNV; + using CppType = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createAccelerationStructureNV( createInfo, allocator ); + } +# endif + + AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkAccelerationStructureNV accelerationStructure, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_accelerationStructureNV( accelerationStructure ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + AccelerationStructureNV( std::nullptr_t ) {} + + ~AccelerationStructureNV() + { + clear(); + } + + AccelerationStructureNV() = delete; + AccelerationStructureNV( AccelerationStructureNV const & ) = delete; + + AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_accelerationStructureNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureNV, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + AccelerationStructureNV & operator=( AccelerationStructureNV const & ) = delete; + + AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructureNV, rhs.m_accelerationStructureNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV; + } + + operator VULKAN_HPP_NAMESPACE::AccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_accelerationStructureNV ) + { + getDispatcher()->vkDestroyAccelerationStructureNV( static_cast( m_device ), + static_cast( m_accelerationStructureNV ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_accelerationStructureNV = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_accelerationStructureNV, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructureNV, rhs.m_accelerationStructureNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_NV_ray_tracing === + + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template + VULKAN_HPP_NODISCARD std::vector getHandle( size_t dataSize ) const; + + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template + VULKAN_HPP_NODISCARD DataType getHandle() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructureNV = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuffer.html + class Buffer + { + public: + using CType = VkBuffer; + using CppType = VULKAN_HPP_NAMESPACE::Buffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createBuffer( createInfo, allocator ); + } +# endif + + Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkBuffer buffer, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_buffer( buffer ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Buffer( std::nullptr_t ) {} + + ~Buffer() + { + clear(); + } + + Buffer() = delete; + Buffer( Buffer const & ) = delete; + + Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_buffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_buffer, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Buffer & operator=( Buffer const & ) = delete; + + Buffer & operator=( Buffer && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_buffer, rhs.m_buffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Buffer const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + + operator VULKAN_HPP_NAMESPACE::Buffer() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_buffer ) + { + getDispatcher()->vkDestroyBuffer( + static_cast( m_device ), static_cast( m_buffer ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_buffer = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Buffer release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_buffer, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_buffer, rhs.m_buffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkBindBufferMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory.html + void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const; + + // wrapper function for command vkGetBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Buffer m_buffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkBufferCollectionFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionFUCHSIA.html +# if defined( VK_USE_PLATFORM_FUCHSIA ) + class BufferCollectionFUCHSIA + { + public: + using CType = VkBufferCollectionFUCHSIA; + using CppType = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createBufferCollectionFUCHSIA( createInfo, allocator ); + } +# endif + + BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkBufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_bufferCollectionFUCHSIA( collection ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + BufferCollectionFUCHSIA( std::nullptr_t ) {} + + ~BufferCollectionFUCHSIA() + { + clear(); + } + + BufferCollectionFUCHSIA() = delete; + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & ) = delete; + + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_bufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferCollectionFUCHSIA, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & ) = delete; + + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_bufferCollectionFUCHSIA, rhs.m_bufferCollectionFUCHSIA ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA; + } + + operator VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_bufferCollectionFUCHSIA ) + { + getDispatcher()->vkDestroyBufferCollectionFUCHSIA( static_cast( m_device ), + static_cast( m_bufferCollectionFUCHSIA ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_bufferCollectionFUCHSIA = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_bufferCollectionFUCHSIA, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_bufferCollectionFUCHSIA, rhs.m_bufferCollectionFUCHSIA ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_FUCHSIA_buffer_collection === + + // wrapper function for command vkSetBufferCollectionImageConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionImageConstraintsFUCHSIA.html + void setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const; + + // wrapper function for command vkSetBufferCollectionBufferConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionBufferConstraintsFUCHSIA.html + void setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const; + + // wrapper function for command vkGetBufferCollectionPropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferCollectionPropertiesFUCHSIA.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA getProperties() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_bufferCollectionFUCHSIA = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + // wrapper class for handle VkBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferView.html + class BufferView + { + public: + using CType = VkBufferView; + using CppType = VULKAN_HPP_NAMESPACE::BufferView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createBufferView( createInfo, allocator ); + } +# endif + + BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkBufferView bufferView, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_bufferView( bufferView ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + BufferView( std::nullptr_t ) {} + + ~BufferView() + { + clear(); + } + + BufferView() = delete; + BufferView( BufferView const & ) = delete; + + BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_bufferView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferView, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + BufferView & operator=( BufferView const & ) = delete; + + BufferView & operator=( BufferView && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_bufferView, rhs.m_bufferView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::BufferView const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + operator VULKAN_HPP_NAMESPACE::BufferView() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_bufferView ) + { + getDispatcher()->vkDestroyBufferView( + static_cast( m_device ), static_cast( m_bufferView ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_bufferView = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::BufferView release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_bufferView, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_bufferView, rhs.m_bufferView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPool.html + class CommandPool + { + public: + using CType = VkCommandPool; + using CppType = VULKAN_HPP_NAMESPACE::CommandPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createCommandPool( createInfo, allocator ); + } +# endif + + CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCommandPool commandPool, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_commandPool( commandPool ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CommandPool( std::nullptr_t ) {} + + ~CommandPool() + { + clear(); + } + + CommandPool() = delete; + CommandPool( CommandPool const & ) = delete; + + CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CommandPool & operator=( CommandPool const & ) = delete; + + CommandPool & operator=( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CommandPool const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + operator VULKAN_HPP_NAMESPACE::CommandPool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_commandPool ) + { + getDispatcher()->vkDestroyCommandPool( + static_cast( m_device ), static_cast( m_commandPool ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_commandPool = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CommandPool release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_commandPool, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkResetCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandPool.html + void reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkTrimCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTrimCommandPool.html + void trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance1 === + + // wrapper function for command vkTrimCommandPoolKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTrimCommandPoolKHR.html + void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBuffer.html + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + using CppType = VULKAN_HPP_NAMESPACE::CommandBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + public: + CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool ) + : m_device( device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() ) + { + } + + CommandBuffer( std::nullptr_t ) {} + + ~CommandBuffer() + { + clear(); + } + + CommandBuffer() = delete; + CommandBuffer( CommandBuffer const & ) = delete; + + CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) + , m_commandBuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CommandBuffer & operator=( CommandBuffer const & ) = delete; + + CommandBuffer & operator=( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_commandBuffer, rhs.m_commandBuffer ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CommandBuffer const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + operator VULKAN_HPP_NAMESPACE::CommandBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_commandBuffer ) + { + getDispatcher()->vkFreeCommandBuffers( + static_cast( m_device ), static_cast( m_commandPool ), 1, reinterpret_cast( &m_commandBuffer ) ); + } + m_device = nullptr; + m_commandPool = nullptr; + m_commandBuffer = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CommandBuffer release() + { + m_device = nullptr; + m_commandPool = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_commandBuffer, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_commandBuffer, rhs.m_commandBuffer ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkBeginCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBeginCommandBuffer.html + void begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const; + + // wrapper function for command vkEndCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEndCommandBuffer.html + void end() const; + + // wrapper function for command vkResetCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandBuffer.html + void reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + // wrapper function for command vkCmdBindPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindPipeline.html + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewport.html + void setViewport( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetScissor, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissor.html + void setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetLineWidth, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineWidth.html + void setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBias, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias.html + void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetBlendConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetBlendConstants.html + void setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBounds, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBounds.html + void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilCompareMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilCompareMask.html + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilWriteMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilWriteMask.html + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilReference, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilReference.html + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets.html + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindIndexBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer.html + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindVertexBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers.html + void bindVertexBuffers( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const; + + // wrapper function for command vkCmdDraw, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDraw.html + void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndexed, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexed.html + void + drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirect.html + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndexedIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirect.html + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatch, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatch.html + void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatchIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchIndirect.html + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer.html + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage.html + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBlitImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage.html + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyBufferToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage.html + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyImageToBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer.html + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdUpdateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdateBuffer.html + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & data ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdFillBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdFillBuffer.html + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdClearColorImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearColorImage.html + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdClearDepthStencilImage, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearDepthStencilImage.html + void + clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdClearAttachments, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearAttachments.html + void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, + VULKAN_HPP_NAMESPACE::ArrayProxy const & rects ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdResolveImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage.html + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent.html + void setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent.html + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWaitEvents, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents.html + void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPipelineBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier.html + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBeginQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginQuery.html + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndQuery.html + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdResetQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetQueryPool.html + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWriteTimestamp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp.html + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyQueryPoolResults.html + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants.html + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & values ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBeginRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass.html + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdNextSubpass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass.html + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass.html + void endRenderPass() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdExecuteCommands, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteCommands.html + void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkCmdSetDeviceMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDeviceMask.html + void setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatchBase, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchBase.html + void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_2 === + + // wrapper function for command vkCmdDrawIndirectCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCount.html + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndexedIndirectCount, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCount.html + void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBeginRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2.html + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdNextSubpass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2.html + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2.html + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkCmdSetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2.html + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdResetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent2.html + void resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWaitEvents2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2.html + void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const; + + // wrapper function for command vkCmdPipelineBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2.html + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWriteTimestamp2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp2.html + void + writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2.html + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2.html + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyBufferToImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2.html + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyImageToBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2.html + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBlitImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2.html + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdResolveImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2.html + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBeginRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRendering.html + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering.html + void endRendering() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCullMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCullMode.html + void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetFrontFace, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFrontFace.html + void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetPrimitiveTopology, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveTopology.html + void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewportWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCount.html + void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetScissorWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCount.html + void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindVertexBuffers2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2.html + void bindVertexBuffers2( + uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + // wrapper function for command vkCmdSetDepthTestEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthTestEnable.html + void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthWriteEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthWriteEnable.html + void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthCompareOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthCompareOp.html + void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBoundsTestEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBoundsTestEnable.html + void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilTestEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilTestEnable.html + void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilOp.html + void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRasterizerDiscardEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizerDiscardEnable.html + void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBiasEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBiasEnable.html + void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetPrimitiveRestartEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveRestartEnable.html + void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_4 === + + // wrapper function for command vkCmdSetLineStipple, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStipple.html + void setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindIndexBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer2.html + void bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet.html + void pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate.html + template + void pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRenderingAttachmentLocations, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocations.html + void setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRenderingInputAttachmentIndices, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndices.html + void setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindDescriptorSets2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2.html + void bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushConstants2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2.html + void pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushDescriptorSet2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2.html + void pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2.html + void pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_marker === + + // wrapper function for command vkCmdDebugMarkerBeginEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerBeginEXT.html + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDebugMarkerEndEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerEndEXT.html + void debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDebugMarkerInsertEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerInsertEXT.html + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_video_queue === + + // wrapper function for command vkCmdBeginVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginVideoCodingKHR.html + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndVideoCodingKHR.html + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdControlVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdControlVideoCodingKHR.html + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_video_decode_queue === + + // wrapper function for command vkCmdDecodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecodeVideoKHR.html + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_transform_feedback === + + // wrapper function for command vkCmdBindTransformFeedbackBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTransformFeedbackBuffersEXT.html + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + // wrapper function for command vkCmdBeginTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginTransformFeedbackEXT.html + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + // wrapper function for command vkCmdEndTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndTransformFeedbackEXT.html + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + // wrapper function for command vkCmdBeginQueryIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginQueryIndexedEXT.html + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndQueryIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndQueryIndexedEXT.html + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndirectByteCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectByteCountEXT.html + void drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NVX_binary_import === + + // wrapper function for command vkCmdCuLaunchKernelNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCuLaunchKernelNVX.html + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_draw_indirect_count === + + // wrapper function for command vkCmdDrawIndirectCountAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountAMD.html + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndexedIndirectCountAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCountAMD.html + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_dynamic_rendering === + + // wrapper function for command vkCmdBeginRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderingKHR.html + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderingKHR.html + void endRenderingKHR() const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_device_group === + + // wrapper function for command vkCmdSetDeviceMaskKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDeviceMaskKHR.html + void setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatchBaseKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchBaseKHR.html + void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) + const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_push_descriptor === + + // wrapper function for command vkCmdPushDescriptorSetKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetKHR.html + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_conditional_rendering === + + // wrapper function for command vkCmdBeginConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginConditionalRenderingEXT.html + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndConditionalRenderingEXT.html + void endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_clip_space_w_scaling === + + // wrapper function for command vkCmdSetViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingNV.html + void setViewportWScalingNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_discard_rectangles === + + // wrapper function for command vkCmdSetDiscardRectangleEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEXT.html + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDiscardRectangleEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEnableEXT.html + void setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDiscardRectangleModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleModeEXT.html + void setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_create_renderpass2 === + + // wrapper function for command vkCmdBeginRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2KHR.html + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdNextSubpass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2KHR.html + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2KHR.html + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkCmdBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginDebugUtilsLabelEXT.html + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndDebugUtilsLabelEXT.html + void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInsertDebugUtilsLabelEXT.html + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + // wrapper function for command vkCmdInitializeGraphScratchMemoryAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInitializeGraphScratchMemoryAMDX.html + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatchGraphAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphAMDX.html + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatchGraphIndirectAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectAMDX.html + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDispatchGraphIndirectCountAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectCountAMDX.html + void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_acceleration_structure === + + // wrapper function for command vkCmdBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresKHR.html + void buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const; + + // wrapper function for command vkCmdBuildAccelerationStructuresIndirectKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresIndirectKHR.html + void buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts ) const; + + // wrapper function for command vkCmdCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureKHR.html + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureToMemoryKHR.html + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToAccelerationStructureKHR.html + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesKHR.html + void writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_ray_tracing_pipeline === + + // wrapper function for command vkCmdTraceRaysKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysKHR.html + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdTraceRaysIndirectKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirectKHR.html + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRayTracingPipelineStackSizeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRayTracingPipelineStackSizeKHR.html + void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_shading_rate_image === + + // wrapper function for command vkCmdBindShadingRateImageNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadingRateImageNV.html + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewportShadingRatePaletteNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportShadingRatePaletteNV.html + void setViewportShadingRatePaletteNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoarseSampleOrderNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoarseSampleOrderNV.html + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_ray_tracing === + + // wrapper function for command vkCmdBuildAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructureNV.html + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureNV.html + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdTraceRaysNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysNV.html + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesNV.html + void writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_draw_indirect_count === + + // wrapper function for command vkCmdDrawIndirectCountKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountKHR.html + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawIndexedIndirectCountKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCountKHR.html + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_buffer_marker === + + // wrapper function for command vkCmdWriteBufferMarkerAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteBufferMarkerAMD.html + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWriteBufferMarker2AMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteBufferMarker2AMD.html + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_mesh_shader === + + // wrapper function for command vkCmdDrawMeshTasksNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksNV.html + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawMeshTasksIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectNV.html + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawMeshTasksIndirectCountNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectCountNV.html + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_scissor_exclusive === + + // wrapper function for command vkCmdSetExclusiveScissorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorEnableNV.html + void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetExclusiveScissorNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorNV.html + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_diagnostic_checkpoints === + + // wrapper function for command vkCmdSetCheckpointNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCheckpointNV.html + template + void setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_INTEL_performance_query === + + // wrapper function for command vkCmdSetPerformanceMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceMarkerINTEL.html + void setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const; + + // wrapper function for command vkCmdSetPerformanceStreamMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceStreamMarkerINTEL.html + void setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const; + + // wrapper function for command vkCmdSetPerformanceOverrideINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceOverrideINTEL.html + void setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const; + + //=== VK_KHR_fragment_shading_rate === + + // wrapper function for command vkCmdSetFragmentShadingRateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateKHR.html + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_dynamic_rendering_local_read === + + // wrapper function for command vkCmdSetRenderingAttachmentLocationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocationsKHR.html + void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRenderingInputAttachmentIndicesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndicesKHR.html + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_line_rasterization === + + // wrapper function for command vkCmdSetLineStippleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleEXT.html + void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state === + + // wrapper function for command vkCmdSetCullModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCullModeEXT.html + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetFrontFaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFrontFaceEXT.html + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetPrimitiveTopologyEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveTopologyEXT.html + void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewportWithCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCountEXT.html + void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetScissorWithCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCountEXT.html + void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindVertexBuffers2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2EXT.html + void bindVertexBuffers2EXT( + uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + // wrapper function for command vkCmdSetDepthTestEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthTestEnableEXT.html + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthWriteEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthWriteEnableEXT.html + void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthCompareOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthCompareOpEXT.html + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBoundsTestEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBoundsTestEnableEXT.html + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilTestEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilTestEnableEXT.html + void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetStencilOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilOpEXT.html + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands === + + // wrapper function for command vkCmdPreprocessGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsNV.html + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdExecuteGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsNV.html + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindPipelineShaderGroupNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindPipelineShaderGroupNV.html + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_depth_bias_control === + + // wrapper function for command vkCmdSetDepthBias2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias2EXT.html + void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_video_encode_queue === + + // wrapper function for command vkCmdEncodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEncodeVideoKHR.html + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + // wrapper function for command vkCmdCudaLaunchKernelNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCudaLaunchKernelNV.html + void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + + // wrapper function for command vkCmdDispatchTileQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchTileQCOM.html + void dispatchTileQCOM() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBeginPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginPerTileExecutionQCOM.html + void beginPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileBeginInfoQCOM & perTileBeginInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdEndPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndPerTileExecutionQCOM.html + void endPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileEndInfoQCOM & perTileEndInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_synchronization2 === + + // wrapper function for command vkCmdSetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2KHR.html + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdResetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent2KHR.html + void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWaitEvents2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2KHR.html + void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const; + + // wrapper function for command vkCmdPipelineBarrier2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2KHR.html + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWriteTimestamp2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp2KHR.html + void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_descriptor_buffer === + + // wrapper function for command vkCmdBindDescriptorBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBuffersEXT.html + void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDescriptorBufferOffsetsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsetsEXT.html + void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const; + + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplersEXT.html + void bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_fragment_shading_rate_enums === + + // wrapper function for command vkCmdSetFragmentShadingRateEnumNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateEnumNV.html + void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_mesh_shader === + + // wrapper function for command vkCmdDrawMeshTasksEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksEXT.html + void drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawMeshTasksIndirectEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectEXT.html + void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawMeshTasksIndirectCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectCountEXT.html + void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_copy_commands2 === + + // wrapper function for command vkCmdCopyBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2KHR.html + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2KHR.html + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyBufferToImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2KHR.html + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyImageToBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2KHR.html + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBlitImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2KHR.html + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdResolveImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2KHR.html + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_vertex_input_dynamic_state === + + // wrapper function for command vkCmdSetVertexInputEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetVertexInputEXT.html + void setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & + vertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_subpass_shading === + + // wrapper function for command vkCmdSubpassShadingHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSubpassShadingHUAWEI.html + void subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_invocation_mask === + + // wrapper function for command vkCmdBindInvocationMaskHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindInvocationMaskHUAWEI.html + void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state2 === + + // wrapper function for command vkCmdSetPatchControlPointsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPatchControlPointsEXT.html + void setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRasterizerDiscardEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizerDiscardEnableEXT.html + void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthBiasEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBiasEnableEXT.html + void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetLogicOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLogicOpEXT.html + void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetPrimitiveRestartEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveRestartEnableEXT.html + void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_color_write_enable === + + // wrapper function for command vkCmdSetColorWriteEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteEnableEXT.html + void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_ray_tracing_maintenance1 === + + // wrapper function for command vkCmdTraceRaysIndirect2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirect2KHR.html + void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_multi_draw === + + // wrapper function for command vkCmdDrawMultiEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiEXT.html + void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawMultiIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiIndexedEXT.html + void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_opacity_micromap === + + // wrapper function for command vkCmdBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildMicromapsEXT.html + void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapEXT.html + void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyMicromapToMemoryEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapToMemoryEXT.html + void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyMemoryToMicromapEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToMicromapEXT.html + void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteMicromapsPropertiesEXT.html + void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_cluster_culling_shader === + + // wrapper function for command vkCmdDrawClusterHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawClusterHUAWEI.html + void drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDrawClusterIndirectHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawClusterIndirectHUAWEI.html + void drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_copy_memory_indirect === + + // wrapper function for command vkCmdCopyMemoryIndirectNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryIndirectNV.html + void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdCopyMemoryToImageIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToImageIndirectNV.html + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_memory_decompression === + + // wrapper function for command vkCmdDecompressMemoryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryNV.html + void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdDecompressMemoryIndirectCountNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryIndirectCountNV.html + void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands_compute === + + // wrapper function for command vkCmdUpdatePipelineIndirectBufferNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdatePipelineIndirectBufferNV.html + void updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state3 === + + // wrapper function for command vkCmdSetDepthClampEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampEnableEXT.html + void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetPolygonModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPolygonModeEXT.html + void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRasterizationSamplesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizationSamplesEXT.html + void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetSampleMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleMaskEXT.html + void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask ) const; + + // wrapper function for command vkCmdSetAlphaToCoverageEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAlphaToCoverageEnableEXT.html + void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetAlphaToOneEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAlphaToOneEnableEXT.html + void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetLogicOpEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLogicOpEnableEXT.html + void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetColorBlendEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEnableEXT.html + void setColorBlendEnableEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetColorBlendEquationEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEquationEXT.html + void setColorBlendEquationEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetColorWriteMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteMaskEXT.html + void setColorWriteMaskEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetTessellationDomainOriginEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetTessellationDomainOriginEXT.html + void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRasterizationStreamEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizationStreamEXT.html + void setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetConservativeRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetConservativeRasterizationModeEXT.html + void + setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetExtraPrimitiveOverestimationSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExtraPrimitiveOverestimationSizeEXT.html + void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthClipEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClipEnableEXT.html + void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetSampleLocationsEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEnableEXT.html + void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetColorBlendAdvancedEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendAdvancedEXT.html + void setColorBlendAdvancedEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetProvokingVertexModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetProvokingVertexModeEXT.html + void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetLineRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineRasterizationModeEXT.html + void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetLineStippleEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleEnableEXT.html + void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDepthClipNegativeOneToOneEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClipNegativeOneToOneEXT.html + void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewportWScalingEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingEnableNV.html + void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportSwizzleNV.html + void setViewportSwizzleNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageToColorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageToColorEnableNV.html + void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageToColorLocationNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageToColorLocationNV.html + void setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageModulationModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationModeNV.html + void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageModulationTableEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableEnableNV.html + void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageModulationTableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableNV.html + void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetShadingRateImageEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetShadingRateImageEnableNV.html + void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetRepresentativeFragmentTestEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRepresentativeFragmentTestEnableNV.html + void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetCoverageReductionModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageReductionModeNV.html + void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_optical_flow === + + // wrapper function for command vkCmdOpticalFlowExecuteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdOpticalFlowExecuteNV.html + void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance5 === + + // wrapper function for command vkCmdBindIndexBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer2KHR.html + void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_object === + + // wrapper function for command vkCmdBindShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadersEXT.html + void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders ) const; + + // wrapper function for command vkCmdSetDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampRangeEXT.html + void setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_cooperative_vector === + + // wrapper function for command vkCmdConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdConvertCooperativeVectorMatrixNV.html + void convertCooperativeVectorMatrixNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + // wrapper function for command vkCmdSetAttachmentFeedbackLoopEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAttachmentFeedbackLoopEnableEXT.html + void setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_line_rasterization === + + // wrapper function for command vkCmdSetLineStippleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleKHR.html + void setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance6 === + + // wrapper function for command vkCmdBindDescriptorSets2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2KHR.html + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushConstants2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2KHR.html + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushDescriptorSet2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2KHR.html + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2KHR.html + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdSetDescriptorBufferOffsets2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsets2EXT.html + void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplers2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplers2EXT.html + void bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_QCOM_tile_memory_heap === + + // wrapper function for command vkCmdBindTileMemoryQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTileMemoryQCOM.html + void bindTileMemoryQCOM( Optional tileMemoryBindInfo + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_cluster_acceleration_structure === + + // wrapper function for command vkCmdBuildClusterAccelerationStructureIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildClusterAccelerationStructureIndirectNV.html + void buildClusterAccelerationStructureIndirectNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV & commandInfos ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_partitioned_acceleration_structure === + + // wrapper function for command vkCmdBuildPartitionedAccelerationStructuresNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildPartitionedAccelerationStructuresNV.html + void buildPartitionedAccelerationStructuresNV( const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV & buildInfo ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_device_generated_commands === + + // wrapper function for command vkCmdPreprocessGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsEXT.html + void preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkCmdExecuteGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsEXT.html + void executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_fragment_density_map_offset === + + // wrapper function for command vkCmdEndRendering2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering2EXT.html + void endRendering2EXT( Optional renderingEndInfo VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class CommandBuffers : public std::vector + { + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + CommandBuffers( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) + { + *this = device.allocateCommandBuffers( allocateInfo ); + } +# endif + + CommandBuffers( std::nullptr_t ) {} + + CommandBuffers() = delete; + CommandBuffers( CommandBuffers const & ) = delete; + CommandBuffers( CommandBuffers && rhs ) = default; + CommandBuffers & operator=( CommandBuffers const & ) = delete; + CommandBuffers & operator=( CommandBuffers && rhs ) = default; + + private: + CommandBuffers( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + + // wrapper class for handle VkCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuFunctionNVX.html + class CuFunctionNVX + { + public: + using CType = VkCuFunctionNVX; + using CppType = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createCuFunctionNVX( createInfo, allocator ); + } +# endif + + CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCuFunctionNVX function, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_cuFunctionNVX( function ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CuFunctionNVX( std::nullptr_t ) {} + + ~CuFunctionNVX() + { + clear(); + } + + CuFunctionNVX() = delete; + CuFunctionNVX( CuFunctionNVX const & ) = delete; + + CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_cuFunctionNVX( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuFunctionNVX, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CuFunctionNVX & operator=( CuFunctionNVX const & ) = delete; + + CuFunctionNVX & operator=( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_cuFunctionNVX, rhs.m_cuFunctionNVX ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CuFunctionNVX const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX; + } + + operator VULKAN_HPP_NAMESPACE::CuFunctionNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_cuFunctionNVX ) + { + getDispatcher()->vkDestroyCuFunctionNVX( static_cast( m_device ), + static_cast( m_cuFunctionNVX ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_cuFunctionNVX = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CuFunctionNVX release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_cuFunctionNVX, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_cuFunctionNVX, rhs.m_cuFunctionNVX ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX m_cuFunctionNVX = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuModuleNVX.html + class CuModuleNVX + { + public: + using CType = VkCuModuleNVX; + using CppType = VULKAN_HPP_NAMESPACE::CuModuleNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createCuModuleNVX( createInfo, allocator ); + } +# endif + + CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCuModuleNVX module, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_cuModuleNVX( module ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CuModuleNVX( std::nullptr_t ) {} + + ~CuModuleNVX() + { + clear(); + } + + CuModuleNVX() = delete; + CuModuleNVX( CuModuleNVX const & ) = delete; + + CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_cuModuleNVX( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuModuleNVX, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CuModuleNVX & operator=( CuModuleNVX const & ) = delete; + + CuModuleNVX & operator=( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_cuModuleNVX, rhs.m_cuModuleNVX ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CuModuleNVX const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX; + } + + operator VULKAN_HPP_NAMESPACE::CuModuleNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_cuModuleNVX ) + { + getDispatcher()->vkDestroyCuModuleNVX( + static_cast( m_device ), static_cast( m_cuModuleNVX ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_cuModuleNVX = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CuModuleNVX release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_cuModuleNVX, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_cuModuleNVX, rhs.m_cuModuleNVX ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX m_cuModuleNVX = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaFunctionNV.html +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaFunctionNV + { + public: + using CType = VkCudaFunctionNV; + using CppType = VULKAN_HPP_NAMESPACE::CudaFunctionNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createCudaFunctionNV( createInfo, allocator ); + } +# endif + + CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCudaFunctionNV function, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_cudaFunctionNV( function ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CudaFunctionNV( std::nullptr_t ) {} + + ~CudaFunctionNV() + { + clear(); + } + + CudaFunctionNV() = delete; + CudaFunctionNV( CudaFunctionNV const & ) = delete; + + CudaFunctionNV( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_cudaFunctionNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaFunctionNV, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CudaFunctionNV & operator=( CudaFunctionNV const & ) = delete; + + CudaFunctionNV & operator=( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_cudaFunctionNV, rhs.m_cudaFunctionNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CudaFunctionNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV; + } + + operator VULKAN_HPP_NAMESPACE::CudaFunctionNV() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_cudaFunctionNV ) + { + getDispatcher()->vkDestroyCudaFunctionNV( static_cast( m_device ), + static_cast( m_cudaFunctionNV ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_cudaFunctionNV = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CudaFunctionNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_cudaFunctionNV, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_cudaFunctionNV, rhs.m_cudaFunctionNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CudaFunctionNV m_cudaFunctionNV = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper class for handle VkCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaModuleNV.html +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaModuleNV + { + public: + using CType = VkCudaModuleNV; + using CppType = VULKAN_HPP_NAMESPACE::CudaModuleNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createCudaModuleNV( createInfo, allocator ); + } +# endif + + CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCudaModuleNV module, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_cudaModuleNV( module ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CudaModuleNV( std::nullptr_t ) {} + + ~CudaModuleNV() + { + clear(); + } + + CudaModuleNV() = delete; + CudaModuleNV( CudaModuleNV const & ) = delete; + + CudaModuleNV( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_cudaModuleNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaModuleNV, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CudaModuleNV & operator=( CudaModuleNV const & ) = delete; + + CudaModuleNV & operator=( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_cudaModuleNV, rhs.m_cudaModuleNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CudaModuleNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV; + } + + operator VULKAN_HPP_NAMESPACE::CudaModuleNV() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_cudaModuleNV ) + { + getDispatcher()->vkDestroyCudaModuleNV( + static_cast( m_device ), static_cast( m_cudaModuleNV ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_cudaModuleNV = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CudaModuleNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_cudaModuleNV, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_cudaModuleNV, rhs.m_cudaModuleNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_NV_cuda_kernel_launch === + + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + VULKAN_HPP_NODISCARD std::vector getCache() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CudaModuleNV m_cudaModuleNV = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper class for handle VkDebugReportCallbackEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugReportCallbackEXT.html + class DebugReportCallbackEXT + { + public: + using CType = VkDebugReportCallbackEXT; + using CppType = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createDebugReportCallbackEXT( createInfo, allocator ); + } +# endif + + DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkDebugReportCallbackEXT callback, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( instance ) + , m_debugReportCallbackEXT( callback ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + } + + DebugReportCallbackEXT( std::nullptr_t ) {} + + ~DebugReportCallbackEXT() + { + clear(); + } + + DebugReportCallbackEXT() = delete; + DebugReportCallbackEXT( DebugReportCallbackEXT const & ) = delete; + + DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_debugReportCallbackEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugReportCallbackEXT, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & ) = delete; + + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_debugReportCallbackEXT, rhs.m_debugReportCallbackEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT; + } + + operator VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_debugReportCallbackEXT ) + { + getDispatcher()->vkDestroyDebugReportCallbackEXT( static_cast( m_instance ), + static_cast( m_debugReportCallbackEXT ), + reinterpret_cast( m_allocator ) ); + } + m_instance = nullptr; + m_debugReportCallbackEXT = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT release() + { + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_debugReportCallbackEXT, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_debugReportCallbackEXT, rhs.m_debugReportCallbackEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_debugReportCallbackEXT = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDebugUtilsMessengerEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerEXT.html + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + using CppType = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createDebugUtilsMessengerEXT( createInfo, allocator ); + } +# endif + + DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkDebugUtilsMessengerEXT messenger, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( instance ) + , m_debugUtilsMessengerEXT( messenger ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + } + + DebugUtilsMessengerEXT( std::nullptr_t ) {} + + ~DebugUtilsMessengerEXT() + { + clear(); + } + + DebugUtilsMessengerEXT() = delete; + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & ) = delete; + + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_debugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugUtilsMessengerEXT, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & ) = delete; + + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_debugUtilsMessengerEXT, rhs.m_debugUtilsMessengerEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT; + } + + operator VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_debugUtilsMessengerEXT ) + { + getDispatcher()->vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), + static_cast( m_debugUtilsMessengerEXT ), + reinterpret_cast( m_allocator ) ); + } + m_instance = nullptr; + m_debugUtilsMessengerEXT = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT release() + { + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_debugUtilsMessengerEXT, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_debugUtilsMessengerEXT, rhs.m_debugUtilsMessengerEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDeferredOperationKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeferredOperationKHR.html + class DeferredOperationKHR + { + public: + using CType = VkDeferredOperationKHR; + using CppType = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createDeferredOperationKHR( allocator ); + } +# endif + + DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDeferredOperationKHR operation, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_deferredOperationKHR( operation ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + DeferredOperationKHR( std::nullptr_t ) {} + + ~DeferredOperationKHR() + { + clear(); + } + + DeferredOperationKHR() = delete; + DeferredOperationKHR( DeferredOperationKHR const & ) = delete; + + DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_deferredOperationKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_deferredOperationKHR, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DeferredOperationKHR & operator=( DeferredOperationKHR const & ) = delete; + + DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_deferredOperationKHR, rhs.m_deferredOperationKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR; + } + + operator VULKAN_HPP_NAMESPACE::DeferredOperationKHR() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_deferredOperationKHR ) + { + getDispatcher()->vkDestroyDeferredOperationKHR( static_cast( m_device ), + static_cast( m_deferredOperationKHR ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_deferredOperationKHR = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DeferredOperationKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_deferredOperationKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_deferredOperationKHR, rhs.m_deferredOperationKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_deferred_host_operations === + + // wrapper function for command vkGetDeferredOperationMaxConcurrencyKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationMaxConcurrencyKHR.html + VULKAN_HPP_NODISCARD uint32_t getMaxConcurrency() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeferredOperationResultKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationResultKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getResult() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkDeferredOperationJoinKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeferredOperationJoinKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result join() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_deferredOperationKHR = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPool.html + class DescriptorPool + { + public: + using CType = VkDescriptorPool; + using CppType = VULKAN_HPP_NAMESPACE::DescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createDescriptorPool( createInfo, allocator ); + } +# endif + + DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_descriptorPool( descriptorPool ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + DescriptorPool( std::nullptr_t ) {} + + ~DescriptorPool() + { + clear(); + } + + DescriptorPool() = delete; + DescriptorPool( DescriptorPool const & ) = delete; + + DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DescriptorPool & operator=( DescriptorPool const & ) = delete; + + DescriptorPool & operator=( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorPool const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + operator VULKAN_HPP_NAMESPACE::DescriptorPool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_descriptorPool ) + { + getDispatcher()->vkDestroyDescriptorPool( static_cast( m_device ), + static_cast( m_descriptorPool ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_descriptorPool = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DescriptorPool release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorPool, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkResetDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetDescriptorPool.html + void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSet.html + class DescriptorSet + { + public: + using CType = VkDescriptorSet; + using CppType = VULKAN_HPP_NAMESPACE::DescriptorSet; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + + public: + DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool ) + : m_device( device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() ) + { + } + + DescriptorSet( std::nullptr_t ) {} + + ~DescriptorSet() + { + clear(); + } + + DescriptorSet() = delete; + DescriptorSet( DescriptorSet const & ) = delete; + + DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) + , m_descriptorSet( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DescriptorSet & operator=( DescriptorSet const & ) = delete; + + DescriptorSet & operator=( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_descriptorSet, rhs.m_descriptorSet ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorSet const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + operator VULKAN_HPP_NAMESPACE::DescriptorSet() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_descriptorSet ) + { + getDispatcher()->vkFreeDescriptorSets( static_cast( m_device ), + static_cast( m_descriptorPool ), + 1, + reinterpret_cast( &m_descriptorSet ) ); + } + m_device = nullptr; + m_descriptorPool = nullptr; + m_descriptorSet = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DescriptorSet release() + { + m_device = nullptr; + m_descriptorPool = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorSet, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_descriptorSet, rhs.m_descriptorSet ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkUpdateDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplate.html + template + void updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_descriptor_update_template === + + // wrapper function for command vkUpdateDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplateKHR.html + template + void updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VALVE_descriptor_set_host_mapping === + + // wrapper function for command vkGetDescriptorSetHostMappingVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetHostMappingVALVE.html + VULKAN_HPP_NODISCARD void * getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSets : public std::vector + { + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DescriptorSets( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) + { + *this = device.allocateDescriptorSets( allocateInfo ); + } +# endif + + DescriptorSets( std::nullptr_t ) {} + + DescriptorSets() = delete; + DescriptorSets( DescriptorSets const & ) = delete; + DescriptorSets( DescriptorSets && rhs ) = default; + DescriptorSets & operator=( DescriptorSets const & ) = delete; + DescriptorSets & operator=( DescriptorSets && rhs ) = default; + + private: + DescriptorSets( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + + // wrapper class for handle VkDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayout.html + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + using CppType = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createDescriptorSetLayout( createInfo, allocator ); + } +# endif + + DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDescriptorSetLayout descriptorSetLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_descriptorSetLayout( descriptorSetLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + DescriptorSetLayout( std::nullptr_t ) {} + + ~DescriptorSetLayout() + { + clear(); + } + + DescriptorSetLayout() = delete; + DescriptorSetLayout( DescriptorSetLayout const & ) = delete; + + DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DescriptorSetLayout & operator=( DescriptorSetLayout const & ) = delete; + + DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + operator VULKAN_HPP_NAMESPACE::DescriptorSetLayout() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_descriptorSetLayout ) + { + getDispatcher()->vkDestroyDescriptorSetLayout( static_cast( m_device ), + static_cast( m_descriptorSetLayout ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_descriptorSetLayout = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DescriptorSetLayout release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorSetLayout, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_descriptor_buffer === + + // wrapper function for command vkGetDescriptorSetLayoutSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSizeEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getSizeEXT() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDescriptorSetLayoutBindingOffsetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutBindingOffsetEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDescriptorUpdateTemplate, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplate.html + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + using CppType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createDescriptorUpdateTemplate( createInfo, allocator ); + } +# endif + + DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + DescriptorUpdateTemplate( std::nullptr_t ) {} + + ~DescriptorUpdateTemplate() + { + clear(); + } + + DescriptorUpdateTemplate() = delete; + DescriptorUpdateTemplate( DescriptorUpdateTemplate const & ) = delete; + + DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & ) = delete; + + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + operator VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_descriptorUpdateTemplate ) + { + getDispatcher()->vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), + static_cast( m_descriptorUpdateTemplate ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_descriptorUpdateTemplate = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorUpdateTemplate, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDeviceMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemory.html + class DeviceMemory + { + public: + using CType = VkDeviceMemory; + using CppType = VULKAN_HPP_NAMESPACE::DeviceMemory; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.allocateMemory( allocateInfo, allocator ); + } +# endif + + DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDeviceMemory memory, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_deviceMemory( memory ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + DeviceMemory( std::nullptr_t ) {} + + ~DeviceMemory() + { + clear(); + } + + DeviceMemory() = delete; + DeviceMemory( DeviceMemory const & ) = delete; + + DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_deviceMemory( VULKAN_HPP_NAMESPACE::exchange( rhs.m_deviceMemory, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DeviceMemory & operator=( DeviceMemory const & ) = delete; + + DeviceMemory & operator=( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_deviceMemory, rhs.m_deviceMemory ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DeviceMemory const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory; + } + + operator VULKAN_HPP_NAMESPACE::DeviceMemory() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_deviceMemory ) + { + getDispatcher()->vkFreeMemory( + static_cast( m_device ), static_cast( m_deviceMemory ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_deviceMemory = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DeviceMemory release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_deviceMemory, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_deviceMemory, rhs.m_deviceMemory ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkMapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory.html + VULKAN_HPP_NODISCARD void * mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + // wrapper function for command vkUnmapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory.html + void unmapMemory() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetDeviceMemoryCommitment, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryCommitment.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getCommitment() const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + // wrapper function for command vkGetMemoryWin32HandleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleNV.html + VULKAN_HPP_NODISCARD HANDLE getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_pageable_device_local_memory === + + // wrapper function for command vkSetDeviceMemoryPriorityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDeviceMemoryPriorityEXT.html + void setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory m_deviceMemory = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkDisplayKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayKHR.html + class DisplayKHR + { + public: + using CType = VkDisplayKHR; + using CppType = VULKAN_HPP_NAMESPACE::DisplayKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, int32_t drmFd, uint32_t connectorId ) + { + *this = physicalDevice.getDrmDisplayEXT( drmFd, connectorId ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, Display & dpy, RROutput rrOutput ) + { + *this = physicalDevice.getRandROutputDisplayEXT( dpy, rrOutput ); + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t deviceRelativeId ) + { + *this = physicalDevice.getWinrtDisplayNV( deviceRelativeId ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif + + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display ) + : m_physicalDevice( physicalDevice ), m_displayKHR( display ), m_dispatcher( physicalDevice.getDispatcher() ) + { + } + + DisplayKHR( std::nullptr_t ) {} + + ~DisplayKHR() + { + clear(); + } + + DisplayKHR() = delete; + DisplayKHR( DisplayKHR const & ) = delete; + + DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_displayKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayKHR, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DisplayKHR & operator=( DisplayKHR const & ) = delete; + + DisplayKHR & operator=( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_displayKHR, rhs.m_displayKHR ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DisplayKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR; + } + + operator VULKAN_HPP_NAMESPACE::DisplayKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_displayKHR ) + { + getDispatcher()->vkReleaseDisplayEXT( static_cast( m_physicalDevice ), static_cast( m_displayKHR ) ); + } + m_physicalDevice = nullptr; + m_displayKHR = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DisplayKHR release() + { + m_physicalDevice = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_displayKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::PhysicalDevice getPhysicalDevice() const + { + return m_physicalDevice; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_displayKHR, rhs.m_displayKHR ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_display === + + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + VULKAN_HPP_NODISCARD std::vector getModeProperties() const; + + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_KHR_get_display_properties2 === + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + VULKAN_HPP_NODISCARD std::vector getModeProperties2() const; + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template + VULKAN_HPP_NODISCARD std::vector getModeProperties2() const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + // wrapper function for command vkAcquireWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireWinrtDisplayNV.html + void acquireWinrtNV() const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + private: + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayKHR m_displayKHR = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DisplayKHRs : public std::vector + { + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DisplayKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t planeIndex ) + { + *this = physicalDevice.getDisplayPlaneSupportedDisplaysKHR( planeIndex ); + } +# endif + + DisplayKHRs( std::nullptr_t ) {} + + DisplayKHRs() = delete; + DisplayKHRs( DisplayKHRs const & ) = delete; + DisplayKHRs( DisplayKHRs && rhs ) = default; + DisplayKHRs & operator=( DisplayKHRs const & ) = delete; + DisplayKHRs & operator=( DisplayKHRs && rhs ) = default; + + private: + DisplayKHRs( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + + // wrapper class for handle VkDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeKHR.html + class DisplayModeKHR + { + public: + using CType = VkDisplayModeKHR; + using CppType = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = display.createMode( createInfo, allocator ); + } +# endif + + DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VkDisplayModeKHR displayModeKHR ) + : m_physicalDevice( display.getPhysicalDevice() ), m_displayModeKHR( displayModeKHR ), m_dispatcher( display.getDispatcher() ) + { + } + + DisplayModeKHR( std::nullptr_t ) {} + + ~DisplayModeKHR() + { + clear(); + } + + DisplayModeKHR() = delete; + + DisplayModeKHR( DisplayModeKHR const & rhs ) : m_displayModeKHR( rhs.m_displayModeKHR ), m_dispatcher( rhs.m_dispatcher ) {} + + DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_displayModeKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DisplayModeKHR & operator=( DisplayModeKHR const & rhs ) + { + m_displayModeKHR = rhs.m_displayModeKHR; + m_dispatcher = rhs.m_dispatcher; + return *this; + } + + DisplayModeKHR & operator=( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_displayModeKHR, rhs.m_displayModeKHR ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DisplayModeKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + operator VULKAN_HPP_NAMESPACE::DisplayModeKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = nullptr; + m_displayModeKHR = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DisplayModeKHR release() + { + m_physicalDevice = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_displayModeKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_displayModeKHR, rhs.m_displayModeKHR ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_display === + + // wrapper function for command vkGetDisplayPlaneCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilitiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR getDisplayPlaneCapabilities( uint32_t planeIndex ) const; + + private: + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkEvent.html + class Event + { + public: + using CType = VkEvent; + using CppType = VULKAN_HPP_NAMESPACE::Event; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createEvent( createInfo, allocator ); + } +# endif + + Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkEvent event, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_event( event ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Event( std::nullptr_t ) {} + + ~Event() + { + clear(); + } + + Event() = delete; + Event( Event const & ) = delete; + + Event( Event && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_event( VULKAN_HPP_NAMESPACE::exchange( rhs.m_event, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Event & operator=( Event const & ) = delete; + + Event & operator=( Event && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_event, rhs.m_event ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Event const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + operator VULKAN_HPP_NAMESPACE::Event() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_event ) + { + getDispatcher()->vkDestroyEvent( + static_cast( m_device ), static_cast( m_event ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_event = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Event release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_event, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_event, rhs.m_event ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkGetEventStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEventStatus.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; + + // wrapper function for command vkSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetEvent.html + void set() const; + + // wrapper function for command vkResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetEvent.html + void reset() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Event m_event = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkExternalComputeQueueNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueNV.html + class ExternalComputeQueueNV + { + public: + using CType = VkExternalComputeQueueNV; + using CppType = VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eExternalComputeQueueNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + ExternalComputeQueueNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createExternalComputeQueueNV( createInfo, allocator ); + } +# endif + + ExternalComputeQueueNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkExternalComputeQueueNV externalQueue, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_externalComputeQueueNV( externalQueue ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + ExternalComputeQueueNV( std::nullptr_t ) {} + + ~ExternalComputeQueueNV() + { + clear(); + } + + ExternalComputeQueueNV() = delete; + ExternalComputeQueueNV( ExternalComputeQueueNV const & ) = delete; + + ExternalComputeQueueNV( ExternalComputeQueueNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_externalComputeQueueNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_externalComputeQueueNV, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + ExternalComputeQueueNV & operator=( ExternalComputeQueueNV const & ) = delete; + + ExternalComputeQueueNV & operator=( ExternalComputeQueueNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_externalComputeQueueNV, rhs.m_externalComputeQueueNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_externalComputeQueueNV; + } + + operator VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV() const VULKAN_HPP_NOEXCEPT + { + return m_externalComputeQueueNV; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_externalComputeQueueNV ) + { + getDispatcher()->vkDestroyExternalComputeQueueNV( static_cast( m_device ), + static_cast( m_externalComputeQueueNV ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_externalComputeQueueNV = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_externalComputeQueueNV, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ExternalComputeQueueNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_externalComputeQueueNV, rhs.m_externalComputeQueueNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_NV_external_compute_queue === + + // wrapper function for command vkGetExternalComputeQueueDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExternalComputeQueueDataNV.html + template + VULKAN_HPP_NODISCARD std::pair getData() const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV m_externalComputeQueueNV = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFence.html + class Fence + { + public: + using CType = VkFence; + using CppType = VULKAN_HPP_NAMESPACE::Fence; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createFence( createInfo, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.registerEventEXT( deviceEventInfo, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.registerDisplayEventEXT( display, displayEventInfo, allocator ); + } +# endif + + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkFence fence, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_fence( fence ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Fence( std::nullptr_t ) {} + + ~Fence() + { + clear(); + } + + Fence() = delete; + Fence( Fence const & ) = delete; + + Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_fence( VULKAN_HPP_NAMESPACE::exchange( rhs.m_fence, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Fence & operator=( Fence const & ) = delete; + + Fence & operator=( Fence && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_fence, rhs.m_fence ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Fence const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + operator VULKAN_HPP_NAMESPACE::Fence() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_fence ) + { + getDispatcher()->vkDestroyFence( + static_cast( m_device ), static_cast( m_fence ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_fence = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Fence release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_fence, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_fence, rhs.m_fence ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkGetFenceStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceStatus.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Fence m_fence = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebuffer.html + class Framebuffer + { + public: + using CType = VkFramebuffer; + using CppType = VULKAN_HPP_NAMESPACE::Framebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createFramebuffer( createInfo, allocator ); + } +# endif + + Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkFramebuffer framebuffer, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_framebuffer( framebuffer ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Framebuffer( std::nullptr_t ) {} + + ~Framebuffer() + { + clear(); + } + + Framebuffer() = delete; + Framebuffer( Framebuffer const & ) = delete; + + Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_framebuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Framebuffer & operator=( Framebuffer const & ) = delete; + + Framebuffer & operator=( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_framebuffer, rhs.m_framebuffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Framebuffer const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + operator VULKAN_HPP_NAMESPACE::Framebuffer() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_framebuffer ) + { + getDispatcher()->vkDestroyFramebuffer( + static_cast( m_device ), static_cast( m_framebuffer ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_framebuffer = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Framebuffer release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_framebuffer, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_framebuffer, rhs.m_framebuffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_QCOM_tile_properties === + + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + VULKAN_HPP_NODISCARD std::vector getTilePropertiesQCOM() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImage.html + class Image + { + public: + using CType = VkImage; + using CppType = VULKAN_HPP_NAMESPACE::Image; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createImage( createInfo, allocator ); + } +# endif + + Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkImage image, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_image( image ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Image( std::nullptr_t ) {} + + ~Image() + { + clear(); + } + + Image() = delete; + Image( Image const & ) = delete; + + Image( Image && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_image( VULKAN_HPP_NAMESPACE::exchange( rhs.m_image, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Image & operator=( Image const & ) = delete; + + Image & operator=( Image && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_image, rhs.m_image ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Image const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + operator VULKAN_HPP_NAMESPACE::Image() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_image ) + { + getDispatcher()->vkDestroyImage( + static_cast( m_device ), static_cast( m_image ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_image = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Image release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_image, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_image, rhs.m_image ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkBindImageMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory.html + void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const; + + // wrapper function for command vkGetImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + VULKAN_HPP_NODISCARD std::vector getSparseMemoryRequirements() const; + + // wrapper function for command vkGetImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout + getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_4 === + + // wrapper function for command vkGetImageSubresourceLayout2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetImageSubresourceLayout2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_image_drm_format_modifier === + + // wrapper function for command vkGetImageDrmFormatModifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT getDrmFormatModifierPropertiesEXT() const; + + //=== VK_EXT_host_image_copy === + + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance5 === + + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Image m_image = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageView.html + class ImageView + { + public: + using CType = VkImageView; + using CppType = VULKAN_HPP_NAMESPACE::ImageView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createImageView( createInfo, allocator ); + } +# endif + + ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkImageView imageView, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_imageView( imageView ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + ImageView( std::nullptr_t ) {} + + ~ImageView() + { + clear(); + } + + ImageView() = delete; + ImageView( ImageView const & ) = delete; + + ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_imageView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_imageView, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + ImageView & operator=( ImageView const & ) = delete; + + ImageView & operator=( ImageView && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_imageView, rhs.m_imageView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ImageView const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + operator VULKAN_HPP_NAMESPACE::ImageView() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_imageView ) + { + getDispatcher()->vkDestroyImageView( + static_cast( m_device ), static_cast( m_imageView ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_imageView = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ImageView release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_imageView, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_imageView, rhs.m_imageView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_NVX_image_view_handle === + + // wrapper function for command vkGetImageViewAddressNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewAddressNVX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX getAddressNVX() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ImageView m_imageView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkIndirectCommandsLayoutEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutEXT.html + class IndirectCommandsLayoutEXT + { + public: + using CType = VkIndirectCommandsLayoutEXT; + using CppType = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + IndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createIndirectCommandsLayoutEXT( createInfo, allocator ); + } +# endif + + IndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_indirectCommandsLayoutEXT( indirectCommandsLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + IndirectCommandsLayoutEXT( std::nullptr_t ) {} + + ~IndirectCommandsLayoutEXT() + { + clear(); + } + + IndirectCommandsLayoutEXT() = delete; + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT const & ) = delete; + + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutEXT, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT const & ) = delete; + + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayoutEXT, rhs.m_indirectCommandsLayoutEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT; + } + + operator VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_indirectCommandsLayoutEXT ) + { + getDispatcher()->vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), + static_cast( m_indirectCommandsLayoutEXT ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_indirectCommandsLayoutEXT = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_indirectCommandsLayoutEXT, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayoutEXT, rhs.m_indirectCommandsLayoutEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT m_indirectCommandsLayoutEXT = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkIndirectCommandsLayoutNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutNV.html + class IndirectCommandsLayoutNV + { + public: + using CType = VkIndirectCommandsLayoutNV; + using CppType = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createIndirectCommandsLayoutNV( createInfo, allocator ); + } +# endif + + IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_indirectCommandsLayoutNV( indirectCommandsLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + IndirectCommandsLayoutNV( std::nullptr_t ) {} + + ~IndirectCommandsLayoutNV() + { + clear(); + } + + IndirectCommandsLayoutNV() = delete; + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & ) = delete; + + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutNV, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & ) = delete; + + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayoutNV, rhs.m_indirectCommandsLayoutNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV; + } + + operator VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_indirectCommandsLayoutNV ) + { + getDispatcher()->vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), + static_cast( m_indirectCommandsLayoutNV ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_indirectCommandsLayoutNV = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_indirectCommandsLayoutNV, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayoutNV, rhs.m_indirectCommandsLayoutNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkIndirectExecutionSetEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetEXT.html + class IndirectExecutionSetEXT + { + public: + using CType = VkIndirectExecutionSetEXT; + using CppType = VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + IndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createIndirectExecutionSetEXT( createInfo, allocator ); + } +# endif + + IndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkIndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_indirectExecutionSetEXT( indirectExecutionSet ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + IndirectExecutionSetEXT( std::nullptr_t ) {} + + ~IndirectExecutionSetEXT() + { + clear(); + } + + IndirectExecutionSetEXT() = delete; + IndirectExecutionSetEXT( IndirectExecutionSetEXT const & ) = delete; + + IndirectExecutionSetEXT( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectExecutionSetEXT, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT const & ) = delete; + + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectExecutionSetEXT, rhs.m_indirectExecutionSetEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT; + } + + operator VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_indirectExecutionSetEXT ) + { + getDispatcher()->vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), + static_cast( m_indirectExecutionSetEXT ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_indirectExecutionSetEXT = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_indirectExecutionSetEXT, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectExecutionSetEXT, rhs.m_indirectExecutionSetEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_device_generated_commands === + + // wrapper function for command vkUpdateIndirectExecutionSetPipelineEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetPipelineEXT.html + void updatePipeline( VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const + VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkUpdateIndirectExecutionSetShaderEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetShaderEXT.html + void updateShader( VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const + VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT m_indirectExecutionSetEXT = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapEXT.html + class MicromapEXT + { + public: + using CType = VkMicromapEXT; + using CppType = VULKAN_HPP_NAMESPACE::MicromapEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createMicromapEXT( createInfo, allocator ); + } +# endif + + MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkMicromapEXT micromap, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_micromapEXT( micromap ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + MicromapEXT( std::nullptr_t ) {} + + ~MicromapEXT() + { + clear(); + } + + MicromapEXT() = delete; + MicromapEXT( MicromapEXT const & ) = delete; + + MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_micromapEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_micromapEXT, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + MicromapEXT & operator=( MicromapEXT const & ) = delete; + + MicromapEXT & operator=( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_micromapEXT, rhs.m_micromapEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::MicromapEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT; + } + + operator VULKAN_HPP_NAMESPACE::MicromapEXT() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_micromapEXT ) + { + getDispatcher()->vkDestroyMicromapEXT( + static_cast( m_device ), static_cast( m_micromapEXT ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_micromapEXT = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::MicromapEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_micromapEXT, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_micromapEXT, rhs.m_micromapEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT m_micromapEXT = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkOpticalFlowSessionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionNV.html + class OpticalFlowSessionNV + { + public: + using CType = VkOpticalFlowSessionNV; + using CppType = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createOpticalFlowSessionNV( createInfo, allocator ); + } +# endif + + OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkOpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_opticalFlowSessionNV( session ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + OpticalFlowSessionNV( std::nullptr_t ) {} + + ~OpticalFlowSessionNV() + { + clear(); + } + + OpticalFlowSessionNV() = delete; + OpticalFlowSessionNV( OpticalFlowSessionNV const & ) = delete; + + OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_opticalFlowSessionNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_opticalFlowSessionNV, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & ) = delete; + + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_opticalFlowSessionNV, rhs.m_opticalFlowSessionNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV; + } + + operator VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_opticalFlowSessionNV ) + { + getDispatcher()->vkDestroyOpticalFlowSessionNV( static_cast( m_device ), + static_cast( m_opticalFlowSessionNV ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_opticalFlowSessionNV = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_opticalFlowSessionNV, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_opticalFlowSessionNV, rhs.m_opticalFlowSessionNV ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_NV_optical_flow === + + // wrapper function for command vkBindOpticalFlowSessionImageNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindOpticalFlowSessionImageNV.html + void bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_opticalFlowSessionNV = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceConfigurationINTEL.html + class PerformanceConfigurationINTEL + { + public: + using CType = VkPerformanceConfigurationINTEL; + using CppType = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) + { + *this = device.acquirePerformanceConfigurationINTEL( acquireInfo ); + } +# endif + + PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPerformanceConfigurationINTEL configuration ) + : m_device( device ), m_performanceConfigurationINTEL( configuration ), m_dispatcher( device.getDispatcher() ) + { + } + + PerformanceConfigurationINTEL( std::nullptr_t ) {} + + ~PerformanceConfigurationINTEL() + { + clear(); + } + + PerformanceConfigurationINTEL() = delete; + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & ) = delete; + + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_performanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::exchange( rhs.m_performanceConfigurationINTEL, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & ) = delete; + + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_performanceConfigurationINTEL, rhs.m_performanceConfigurationINTEL ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL; + } + + operator VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_performanceConfigurationINTEL ) + { + getDispatcher()->vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), + static_cast( m_performanceConfigurationINTEL ) ); + } + m_device = nullptr; + m_performanceConfigurationINTEL = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL release() + { + m_device = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_performanceConfigurationINTEL, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_performanceConfigurationINTEL, rhs.m_performanceConfigurationINTEL ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_performanceConfigurationINTEL = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCache.html + class PipelineCache + { + public: + using CType = VkPipelineCache; + using CppType = VULKAN_HPP_NAMESPACE::PipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createPipelineCache( createInfo, allocator ); + } +# endif + + PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_pipelineCache( pipelineCache ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + PipelineCache( std::nullptr_t ) {} + + ~PipelineCache() + { + clear(); + } + + PipelineCache() = delete; + PipelineCache( PipelineCache const & ) = delete; + + PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineCache( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PipelineCache & operator=( PipelineCache const & ) = delete; + + PipelineCache & operator=( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineCache, rhs.m_pipelineCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineCache const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + operator VULKAN_HPP_NAMESPACE::PipelineCache() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_pipelineCache ) + { + getDispatcher()->vkDestroyPipelineCache( static_cast( m_device ), + static_cast( m_pipelineCache ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_pipelineCache = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PipelineCache release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_pipelineCache, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineCache, rhs.m_pipelineCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + VULKAN_HPP_NODISCARD std::vector getData() const; + + // wrapper function for command vkMergePipelineCaches, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergePipelineCaches.html + void merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipeline.html + class Pipeline + { + public: + using CType = VkPipeline; + using CppType = VULKAN_HPP_NAMESPACE::Pipeline; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createComputePipeline( pipelineCache, createInfo, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createExecutionGraphPipelineAMDX( pipelineCache, createInfo, allocator ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createGraphicsPipeline( pipelineCache, createInfo, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createRayTracingPipelineKHR( deferredOperation, pipelineCache, createInfo, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createRayTracingPipelineNV( pipelineCache, createInfo, allocator ); + } +# endif + + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipeline pipeline, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, + VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) + : m_device( device ) + , m_pipeline( pipeline ) + , m_allocator( static_cast( allocator ) ) + , m_constructorSuccessCode( successCode ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Pipeline( std::nullptr_t ) {} + + ~Pipeline() + { + clear(); + } + + Pipeline() = delete; + Pipeline( Pipeline const & ) = delete; + + Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipeline( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipeline, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Pipeline & operator=( Pipeline const & ) = delete; + + Pipeline & operator=( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipeline, rhs.m_pipeline ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Pipeline const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + operator VULKAN_HPP_NAMESPACE::Pipeline() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_pipeline ) + { + getDispatcher()->vkDestroyPipeline( + static_cast( m_device ), static_cast( m_pipeline ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_pipeline = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Pipeline release() + { + m_device = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_pipeline, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + { + return m_constructorSuccessCode; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipeline, rhs.m_pipeline ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_AMD_shader_info === + + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + VULKAN_HPP_NODISCARD std::vector getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + // wrapper function for command vkGetExecutionGraphPipelineScratchSizeAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineScratchSizeAMDX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX getExecutionGraphScratchSizeAMDX() const; + + // wrapper function for command vkGetExecutionGraphPipelineNodeIndexAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineNodeIndexAMDX.html + VULKAN_HPP_NODISCARD uint32_t getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_ray_tracing_pipeline === + + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD std::vector getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; + + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const; + + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD std::vector + getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; + + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD DataType getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const; + + // wrapper function for command vkGetRayTracingShaderGroupStackSizeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupStackSizeKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_ray_tracing === + + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template + VULKAN_HPP_NODISCARD std::vector getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; + + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template + VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const; + + // wrapper function for command vkCompileDeferredNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCompileDeferredNV.html + void compileDeferredNV( uint32_t shader ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Pipelines : public std::vector + { + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createComputePipelines( pipelineCache, createInfos, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createExecutionGraphPipelinesAMDX( pipelineCache, createInfos, allocator ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createGraphicsPipelines( pipelineCache, createInfos, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createRayTracingPipelinesKHR( deferredOperation, pipelineCache, createInfos, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createRayTracingPipelinesNV( pipelineCache, createInfos, allocator ); + } +# endif + + Pipelines( std::nullptr_t ) {} + + Pipelines() = delete; + Pipelines( Pipelines const & ) = delete; + Pipelines( Pipelines && rhs ) = default; + Pipelines & operator=( Pipelines const & ) = delete; + Pipelines & operator=( Pipelines && rhs ) = default; + + private: + Pipelines( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + + // wrapper class for handle VkPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryKHR.html + class PipelineBinaryKHR + { + public: + using CType = VkPipelineBinaryKHR; + using CppType = VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PipelineBinaryKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineBinaryKHR pipelineBinary, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, + VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) + : m_device( device ) + , m_pipelineBinaryKHR( pipelineBinary ) + , m_allocator( static_cast( allocator ) ) + , m_constructorSuccessCode( successCode ) + , m_dispatcher( device.getDispatcher() ) + { + } + + PipelineBinaryKHR( std::nullptr_t ) {} + + ~PipelineBinaryKHR() + { + clear(); + } + + PipelineBinaryKHR() = delete; + PipelineBinaryKHR( PipelineBinaryKHR const & ) = delete; + + PipelineBinaryKHR( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineBinaryKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineBinaryKHR, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PipelineBinaryKHR & operator=( PipelineBinaryKHR const & ) = delete; + + PipelineBinaryKHR & operator=( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineBinaryKHR, rhs.m_pipelineBinaryKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR; + } + + operator VULKAN_HPP_NAMESPACE::PipelineBinaryKHR() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_pipelineBinaryKHR ) + { + getDispatcher()->vkDestroyPipelineBinaryKHR( static_cast( m_device ), + static_cast( m_pipelineBinaryKHR ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_pipelineBinaryKHR = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_pipelineBinaryKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + { + return m_constructorSuccessCode; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineBinaryKHR, rhs.m_pipelineBinaryKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR m_pipelineBinaryKHR = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PipelineBinaryKHRs : public std::vector + { + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PipelineBinaryKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createPipelineBinariesKHR( createInfo, allocator ); + } +# endif + + PipelineBinaryKHRs( std::nullptr_t ) {} + + PipelineBinaryKHRs() = delete; + PipelineBinaryKHRs( PipelineBinaryKHRs const & ) = delete; + PipelineBinaryKHRs( PipelineBinaryKHRs && rhs ) = default; + PipelineBinaryKHRs & operator=( PipelineBinaryKHRs const & ) = delete; + PipelineBinaryKHRs & operator=( PipelineBinaryKHRs && rhs ) = default; + + private: + PipelineBinaryKHRs( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + + // wrapper class for handle VkPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLayout.html + class PipelineLayout + { + public: + using CType = VkPipelineLayout; + using CppType = VULKAN_HPP_NAMESPACE::PipelineLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createPipelineLayout( createInfo, allocator ); + } +# endif + + PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineLayout pipelineLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_pipelineLayout( pipelineLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + PipelineLayout( std::nullptr_t ) {} + + ~PipelineLayout() + { + clear(); + } + + PipelineLayout() = delete; + PipelineLayout( PipelineLayout const & ) = delete; + + PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PipelineLayout & operator=( PipelineLayout const & ) = delete; + + PipelineLayout & operator=( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineLayout, rhs.m_pipelineLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineLayout const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + operator VULKAN_HPP_NAMESPACE::PipelineLayout() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_pipelineLayout ) + { + getDispatcher()->vkDestroyPipelineLayout( static_cast( m_device ), + static_cast( m_pipelineLayout ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_pipelineLayout = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PipelineLayout release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_pipelineLayout, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineLayout, rhs.m_pipelineLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPrivateDataSlot.html + class PrivateDataSlot + { + public: + using CType = VkPrivateDataSlot; + using CppType = VULKAN_HPP_NAMESPACE::PrivateDataSlot; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createPrivateDataSlot( createInfo, allocator ); + } +# endif + + PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPrivateDataSlot privateDataSlot, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_privateDataSlot( privateDataSlot ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + PrivateDataSlot( std::nullptr_t ) {} + + ~PrivateDataSlot() + { + clear(); + } + + PrivateDataSlot() = delete; + PrivateDataSlot( PrivateDataSlot const & ) = delete; + + PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_privateDataSlot( VULKAN_HPP_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PrivateDataSlot & operator=( PrivateDataSlot const & ) = delete; + + PrivateDataSlot & operator=( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_privateDataSlot, rhs.m_privateDataSlot ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PrivateDataSlot const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot; + } + + operator VULKAN_HPP_NAMESPACE::PrivateDataSlot() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_privateDataSlot ) + { + getDispatcher()->vkDestroyPrivateDataSlot( static_cast( m_device ), + static_cast( m_privateDataSlot ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_privateDataSlot = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PrivateDataSlot release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_privateDataSlot, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_privateDataSlot, rhs.m_privateDataSlot ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPool.html + class QueryPool + { + public: + using CType = VkQueryPool; + using CppType = VULKAN_HPP_NAMESPACE::QueryPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createQueryPool( createInfo, allocator ); + } +# endif + + QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkQueryPool queryPool, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_queryPool( queryPool ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + QueryPool( std::nullptr_t ) {} + + ~QueryPool() + { + clear(); + } + + QueryPool() = delete; + QueryPool( QueryPool const & ) = delete; + + QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_queryPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queryPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + QueryPool & operator=( QueryPool const & ) = delete; + + QueryPool & operator=( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_queryPool, rhs.m_queryPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::QueryPool const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + operator VULKAN_HPP_NAMESPACE::QueryPool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_queryPool ) + { + getDispatcher()->vkDestroyQueryPool( + static_cast( m_device ), static_cast( m_queryPool ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_queryPool = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::QueryPool release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_queryPool, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_queryPool, rhs.m_queryPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template + VULKAN_HPP_NODISCARD std::pair> + getResults( uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template + VULKAN_HPP_NODISCARD std::pair + getResult( uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_VERSION_1_2 === + + // wrapper function for command vkResetQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetQueryPool.html + void reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_host_query_reset === + + // wrapper function for command vkResetQueryPoolEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetQueryPoolEXT.html + void resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkQueue, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueue.html + class Queue + { + public: + using CType = VkQueue; + using CppType = VULKAN_HPP_NAMESPACE::Queue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, uint32_t queueFamilyIndex, uint32_t queueIndex ) + { + *this = device.getQueue( queueFamilyIndex, queueIndex ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) + { + *this = device.getQueue2( queueInfo ); + } +# endif + + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueue queue ) : m_queue( queue ), m_dispatcher( device.getDispatcher() ) + { + } + + Queue( std::nullptr_t ) {} + + ~Queue() + { + clear(); + } + + Queue() = delete; + + Queue( Queue const & rhs ) : m_queue( rhs.m_queue ), m_dispatcher( rhs.m_dispatcher ) {} + + Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT + : m_queue( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queue, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Queue & operator=( Queue const & rhs ) + { + m_queue = rhs.m_queue; + m_dispatcher = rhs.m_dispatcher; + return *this; + } + + Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_queue, rhs.m_queue ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Queue const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + operator VULKAN_HPP_NAMESPACE::Queue() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + m_queue = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Queue release() + { + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_queue, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_queue, rhs.m_queue ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkQueueSubmit, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit.html + void submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + // wrapper function for command vkQueueWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueWaitIdle.html + void waitIdle() const; + + // wrapper function for command vkQueueBindSparse, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBindSparse.html + void bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkQueueSubmit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2.html + void submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_KHR_swapchain === + + // wrapper function for command vkQueuePresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueuePresentKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const; + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkQueueBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBeginDebugUtilsLabelEXT.html + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkQueueEndDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueEndDebugUtilsLabelEXT.html + void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkQueueInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueInsertDebugUtilsLabelEXT.html + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_diagnostic_checkpoints === + + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + VULKAN_HPP_NODISCARD std::vector getCheckpointDataNV() const; + + // wrapper function for command vkGetQueueCheckpointData2NV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + VULKAN_HPP_NODISCARD std::vector getCheckpointData2NV() const; + + //=== VK_INTEL_performance_query === + + // wrapper function for command vkQueueSetPerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSetPerformanceConfigurationINTEL.html + void setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const; + + //=== VK_KHR_synchronization2 === + + // wrapper function for command vkQueueSubmit2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2KHR.html + void submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_NV_low_latency2 === + + // wrapper function for command vkQueueNotifyOutOfBandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueNotifyOutOfBandNV.html + void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Queue m_queue = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPass.html + class RenderPass + { + public: + using CType = VkRenderPass; + using CppType = VULKAN_HPP_NAMESPACE::RenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createRenderPass( createInfo, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createRenderPass2( createInfo, allocator ); + } +# endif + + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkRenderPass renderPass, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_renderPass( renderPass ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + RenderPass( std::nullptr_t ) {} + + ~RenderPass() + { + clear(); + } + + RenderPass() = delete; + RenderPass( RenderPass const & ) = delete; + + RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_renderPass( VULKAN_HPP_NAMESPACE::exchange( rhs.m_renderPass, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + RenderPass & operator=( RenderPass const & ) = delete; + + RenderPass & operator=( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_renderPass, rhs.m_renderPass ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::RenderPass const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + operator VULKAN_HPP_NAMESPACE::RenderPass() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_renderPass ) + { + getDispatcher()->vkDestroyRenderPass( + static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_renderPass = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::RenderPass release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_renderPass, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_renderPass, rhs.m_renderPass ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkGetRenderAreaGranularity, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderAreaGranularity.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_subpass_shading === + + // wrapper function for command vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getSubpassShadingMaxWorkgroupSizeHUAWEI() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampler.html + class Sampler + { + public: + using CType = VkSampler; + using CppType = VULKAN_HPP_NAMESPACE::Sampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createSampler( createInfo, allocator ); + } +# endif + + Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSampler sampler, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_sampler( sampler ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Sampler( std::nullptr_t ) {} + + ~Sampler() + { + clear(); + } + + Sampler() = delete; + Sampler( Sampler const & ) = delete; + + Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_sampler( VULKAN_HPP_NAMESPACE::exchange( rhs.m_sampler, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Sampler & operator=( Sampler const & ) = delete; + + Sampler & operator=( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_sampler, rhs.m_sampler ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Sampler const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + operator VULKAN_HPP_NAMESPACE::Sampler() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_sampler ) + { + getDispatcher()->vkDestroySampler( + static_cast( m_device ), static_cast( m_sampler ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_sampler = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Sampler release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_sampler, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_sampler, rhs.m_sampler ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Sampler m_sampler = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkSamplerYcbcrConversion, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversion.html + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + using CppType = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createSamplerYcbcrConversion( createInfo, allocator ); + } +# endif + + SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSamplerYcbcrConversion ycbcrConversion, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_samplerYcbcrConversion( ycbcrConversion ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + SamplerYcbcrConversion( std::nullptr_t ) {} + + ~SamplerYcbcrConversion() + { + clear(); + } + + SamplerYcbcrConversion() = delete; + SamplerYcbcrConversion( SamplerYcbcrConversion const & ) = delete; + + SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_samplerYcbcrConversion( VULKAN_HPP_NAMESPACE::exchange( rhs.m_samplerYcbcrConversion, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & ) = delete; + + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_samplerYcbcrConversion, rhs.m_samplerYcbcrConversion ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion; + } + + operator VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_samplerYcbcrConversion ) + { + getDispatcher()->vkDestroySamplerYcbcrConversion( static_cast( m_device ), + static_cast( m_samplerYcbcrConversion ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_samplerYcbcrConversion = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_samplerYcbcrConversion, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_samplerYcbcrConversion, rhs.m_samplerYcbcrConversion ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_samplerYcbcrConversion = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphore.html + class Semaphore + { + public: + using CType = VkSemaphore; + using CppType = VULKAN_HPP_NAMESPACE::Semaphore; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createSemaphore( createInfo, allocator ); + } +# endif + + Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSemaphore semaphore, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_semaphore( semaphore ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Semaphore( std::nullptr_t ) {} + + ~Semaphore() + { + clear(); + } + + Semaphore() = delete; + Semaphore( Semaphore const & ) = delete; + + Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_semaphore( VULKAN_HPP_NAMESPACE::exchange( rhs.m_semaphore, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Semaphore & operator=( Semaphore const & ) = delete; + + Semaphore & operator=( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_semaphore, rhs.m_semaphore ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Semaphore const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + operator VULKAN_HPP_NAMESPACE::Semaphore() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_semaphore ) + { + getDispatcher()->vkDestroySemaphore( + static_cast( m_device ), static_cast( m_semaphore ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_semaphore = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Semaphore release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_semaphore, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_semaphore, rhs.m_semaphore ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_2 === + + // wrapper function for command vkGetSemaphoreCounterValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValue.html + VULKAN_HPP_NODISCARD uint64_t getCounterValue() const; + + //=== VK_KHR_timeline_semaphore === + + // wrapper function for command vkGetSemaphoreCounterValueKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValueKHR.html + VULKAN_HPP_NODISCARD uint64_t getCounterValueKHR() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderEXT.html + class ShaderEXT + { + public: + using CType = VkShaderEXT; + using CppType = VULKAN_HPP_NAMESPACE::ShaderEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createShaderEXT( createInfo, allocator ); + } +# endif + + ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkShaderEXT shader, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, + VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) + : m_device( device ) + , m_shaderEXT( shader ) + , m_allocator( static_cast( allocator ) ) + , m_constructorSuccessCode( successCode ) + , m_dispatcher( device.getDispatcher() ) + { + } + + ShaderEXT( std::nullptr_t ) {} + + ~ShaderEXT() + { + clear(); + } + + ShaderEXT() = delete; + ShaderEXT( ShaderEXT const & ) = delete; + + ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_shaderEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderEXT, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + ShaderEXT & operator=( ShaderEXT const & ) = delete; + + ShaderEXT & operator=( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_shaderEXT, rhs.m_shaderEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ShaderEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT; + } + + operator VULKAN_HPP_NAMESPACE::ShaderEXT() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_shaderEXT ) + { + getDispatcher()->vkDestroyShaderEXT( + static_cast( m_device ), static_cast( m_shaderEXT ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_shaderEXT = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ShaderEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_shaderEXT, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + { + return m_constructorSuccessCode; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_shaderEXT, rhs.m_shaderEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_shader_object === + + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + VULKAN_HPP_NODISCARD std::vector getBinaryData() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderEXT m_shaderEXT = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ShaderEXTs : public std::vector + { + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + ShaderEXTs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createShadersEXT( createInfos, allocator ); + } +# endif + + ShaderEXTs( std::nullptr_t ) {} + + ShaderEXTs() = delete; + ShaderEXTs( ShaderEXTs const & ) = delete; + ShaderEXTs( ShaderEXTs && rhs ) = default; + ShaderEXTs & operator=( ShaderEXTs const & ) = delete; + ShaderEXTs & operator=( ShaderEXTs && rhs ) = default; + + private: + ShaderEXTs( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + + // wrapper class for handle VkShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModule.html + class ShaderModule + { + public: + using CType = VkShaderModule; + using CppType = VULKAN_HPP_NAMESPACE::ShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createShaderModule( createInfo, allocator ); + } +# endif + + ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkShaderModule shaderModule, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_shaderModule( shaderModule ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + ShaderModule( std::nullptr_t ) {} + + ~ShaderModule() + { + clear(); + } + + ShaderModule() = delete; + ShaderModule( ShaderModule const & ) = delete; + + ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_shaderModule( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + ShaderModule & operator=( ShaderModule const & ) = delete; + + ShaderModule & operator=( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_shaderModule, rhs.m_shaderModule ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ShaderModule const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + operator VULKAN_HPP_NAMESPACE::ShaderModule() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_shaderModule ) + { + getDispatcher()->vkDestroyShaderModule( + static_cast( m_device ), static_cast( m_shaderModule ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_shaderModule = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ShaderModule release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_shaderModule, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_shaderModule, rhs.m_shaderModule ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_shader_module_identifier === + + // wrapper function for command vkGetShaderModuleIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleIdentifierEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getIdentifierEXT() const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceKHR.html + class SurfaceKHR + { + public: + using CType = VkSurfaceKHR; + using CppType = VULKAN_HPP_NAMESPACE::SurfaceKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createAndroidSurfaceKHR( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createDirectFBSurfaceEXT( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createDisplayPlaneSurfaceKHR( createInfo, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createHeadlessSurfaceEXT( createInfo, allocator ); + } +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_IOS_MVK ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createIOSSurfaceMVK( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_FUCHSIA ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createImagePipeSurfaceFUCHSIA( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createMacOSSurfaceMVK( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_METAL_EXT ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createMetalSurfaceEXT( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createScreenSurfaceQNX( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_GGP ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createStreamDescriptorSurfaceGGP( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_GGP*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_VI_NN ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createViSurfaceNN( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_VI_NN*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createWaylandSurfaceKHR( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createWin32SurfaceKHR( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_XCB_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createXcbSurfaceKHR( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ +# endif + +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createXlibSurfaceKHR( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ +# endif + + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkSurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( instance ) + , m_surfaceKHR( surface ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + } + + SurfaceKHR( std::nullptr_t ) {} + + ~SurfaceKHR() + { + clear(); + } + + SurfaceKHR() = delete; + SurfaceKHR( SurfaceKHR const & ) = delete; + + SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_surfaceKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_surfaceKHR, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + SurfaceKHR & operator=( SurfaceKHR const & ) = delete; + + SurfaceKHR & operator=( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_surfaceKHR, rhs.m_surfaceKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::SurfaceKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR; + } + + operator VULKAN_HPP_NAMESPACE::SurfaceKHR() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_surfaceKHR ) + { + getDispatcher()->vkDestroySurfaceKHR( + static_cast( m_instance ), static_cast( m_surfaceKHR ), reinterpret_cast( m_allocator ) ); + } + m_instance = nullptr; + m_surfaceKHR = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::SurfaceKHR release() + { + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_surfaceKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_surfaceKHR, rhs.m_surfaceKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR m_surfaceKHR = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainKHR.html + class SwapchainKHR + { + public: + using CType = VkSwapchainKHR; + using CppType = VULKAN_HPP_NAMESPACE::SwapchainKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createSwapchainKHR( createInfo, allocator ); + } +# endif + + SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_swapchainKHR( swapchain ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + SwapchainKHR( std::nullptr_t ) {} + + ~SwapchainKHR() + { + clear(); + } + + SwapchainKHR() = delete; + SwapchainKHR( SwapchainKHR const & ) = delete; + + SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_swapchainKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_swapchainKHR, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + SwapchainKHR & operator=( SwapchainKHR const & ) = delete; + + SwapchainKHR & operator=( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_swapchainKHR, rhs.m_swapchainKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::SwapchainKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR; + } + + operator VULKAN_HPP_NAMESPACE::SwapchainKHR() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_swapchainKHR ) + { + getDispatcher()->vkDestroySwapchainKHR( + static_cast( m_device ), static_cast( m_swapchainKHR ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_swapchainKHR = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::SwapchainKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_swapchainKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_swapchainKHR, rhs.m_swapchainKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_swapchain === + + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + VULKAN_HPP_NODISCARD std::vector getImages() const; + + // wrapper function for command vkAcquireNextImageKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImageKHR.html + VULKAN_HPP_NODISCARD std::pair + acquireNextImage( uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_EXT_display_control === + + // wrapper function for command vkGetSwapchainCounterEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainCounterEXT.html + VULKAN_HPP_NODISCARD uint64_t getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const; + + //=== VK_GOOGLE_display_timing === + + // wrapper function for command vkGetRefreshCycleDurationGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRefreshCycleDurationGOOGLE.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE getRefreshCycleDurationGOOGLE() const; + + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + VULKAN_HPP_NODISCARD std::vector getPastPresentationTimingGOOGLE() const; + + //=== VK_KHR_shared_presentable_image === + + // wrapper function for command vkGetSwapchainStatusKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainStatusKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; + + //=== VK_AMD_display_native_hdr === + + // wrapper function for command vkSetLocalDimmingAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLocalDimmingAMD.html + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_present_wait === + + // wrapper function for command vkWaitForPresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresentKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresent( uint64_t presentId, uint64_t timeout ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + // wrapper function for command vkAcquireFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireFullScreenExclusiveModeEXT.html + void acquireFullScreenExclusiveModeEXT() const; + + // wrapper function for command vkReleaseFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseFullScreenExclusiveModeEXT.html + void releaseFullScreenExclusiveModeEXT() const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_NV_low_latency2 === + + // wrapper function for command vkSetLatencySleepModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencySleepModeNV.html + void setLatencySleepModeNV( const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo ) const; + + // wrapper function for command vkLatencySleepNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkLatencySleepNV.html + void latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkSetLatencyMarkerNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencyMarkerNV.html + void setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT; + + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + VULKAN_HPP_NODISCARD std::vector getLatencyTimingsNV() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchainKHR = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class SwapchainKHRs : public std::vector + { + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + SwapchainKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createSharedSwapchainsKHR( createInfos, allocator ); + } +# endif + + SwapchainKHRs( std::nullptr_t ) {} + + SwapchainKHRs() = delete; + SwapchainKHRs( SwapchainKHRs const & ) = delete; + SwapchainKHRs( SwapchainKHRs && rhs ) = default; + SwapchainKHRs & operator=( SwapchainKHRs const & ) = delete; + SwapchainKHRs & operator=( SwapchainKHRs && rhs ) = default; + + private: + SwapchainKHRs( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + + // wrapper class for handle VkValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationCacheEXT.html + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + using CppType = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createValidationCacheEXT( createInfo, allocator ); + } +# endif + + ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkValidationCacheEXT validationCache, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_validationCacheEXT( validationCache ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + ValidationCacheEXT( std::nullptr_t ) {} + + ~ValidationCacheEXT() + { + clear(); + } + + ValidationCacheEXT() = delete; + ValidationCacheEXT( ValidationCacheEXT const & ) = delete; + + ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_validationCacheEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_validationCacheEXT, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + ValidationCacheEXT & operator=( ValidationCacheEXT const & ) = delete; + + ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_validationCacheEXT, rhs.m_validationCacheEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT; + } + + operator VULKAN_HPP_NAMESPACE::ValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_validationCacheEXT ) + { + getDispatcher()->vkDestroyValidationCacheEXT( static_cast( m_device ), + static_cast( m_validationCacheEXT ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_validationCacheEXT = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_validationCacheEXT, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_validationCacheEXT, rhs.m_validationCacheEXT ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_validation_cache === + + // wrapper function for command vkMergeValidationCachesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergeValidationCachesEXT.html + void merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const; + + // wrapper function for command vkGetValidationCacheDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + VULKAN_HPP_NODISCARD std::vector getData() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCacheEXT = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionKHR.html + class VideoSessionKHR + { + public: + using CType = VkVideoSessionKHR; + using CppType = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createVideoSessionKHR( createInfo, allocator ); + } +# endif + + VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkVideoSessionKHR videoSession, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_videoSessionKHR( videoSession ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + VideoSessionKHR( std::nullptr_t ) {} + + ~VideoSessionKHR() + { + clear(); + } + + VideoSessionKHR() = delete; + VideoSessionKHR( VideoSessionKHR const & ) = delete; + + VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_videoSessionKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionKHR, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + VideoSessionKHR & operator=( VideoSessionKHR const & ) = delete; + + VideoSessionKHR & operator=( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSessionKHR, rhs.m_videoSessionKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::VideoSessionKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR; + } + + operator VULKAN_HPP_NAMESPACE::VideoSessionKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_videoSessionKHR ) + { + getDispatcher()->vkDestroyVideoSessionKHR( static_cast( m_device ), + static_cast( m_videoSessionKHR ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_videoSessionKHR = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::VideoSessionKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_videoSessionKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSessionKHR, rhs.m_videoSessionKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_video_queue === + + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + VULKAN_HPP_NODISCARD std::vector getMemoryRequirements() const; + + // wrapper function for command vkBindVideoSessionMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindVideoSessionMemoryKHR.html + void bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSessionKHR = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // wrapper class for handle VkVideoSessionParametersKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionParametersKHR.html + class VideoSessionParametersKHR + { + public: + using CType = VkVideoSessionParametersKHR; + using CppType = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createVideoSessionParametersKHR( createInfo, allocator ); + } +# endif + + VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkVideoSessionParametersKHR videoSessionParameters, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_videoSessionParametersKHR( videoSessionParameters ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + VideoSessionParametersKHR( std::nullptr_t ) {} + + ~VideoSessionParametersKHR() + { + clear(); + } + + VideoSessionParametersKHR() = delete; + VideoSessionParametersKHR( VideoSessionParametersKHR const & ) = delete; + + VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_videoSessionParametersKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionParametersKHR, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & ) = delete; + + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSessionParametersKHR, rhs.m_videoSessionParametersKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR; + } + + operator VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_videoSessionParametersKHR ) + { + getDispatcher()->vkDestroyVideoSessionParametersKHR( static_cast( m_device ), + static_cast( m_videoSessionParametersKHR ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_videoSessionParametersKHR = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_videoSessionParametersKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSessionParametersKHR, rhs.m_videoSessionParametersKHR ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_video_queue === + + // wrapper function for command vkUpdateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateVideoSessionParametersKHR.html + void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParametersKHR = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // operators to compare VULKAN_HPP_NAMESPACE::raii-handles +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template ::value, bool>::type = 0> + auto operator<=>( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a <=> *b; + } +# else + template ::value, bool>::type = 0> + bool operator==( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a == *b; + } + + template ::value, bool>::type = 0> + bool operator!=( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a != *b; + } + + template ::value, bool>::type = 0> + bool operator<( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a < *b; + } +# endif + + template ::value, bool>::type = 0> + bool operator==( const T & v, std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + return !*v; + } + + template ::value, bool>::type = 0> + bool operator==( std::nullptr_t, const T & v ) VULKAN_HPP_NOEXCEPT + { + return !*v; + } + + template ::value, bool>::type = 0> + bool operator!=( const T & v, std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + return *v; + } + + template ::value, bool>::type = 0> + bool operator!=( std::nullptr_t, const T & v ) VULKAN_HPP_NOEXCEPT + { + return *v; + } + + //=========================== + //=== COMMAND Definitions === + //=========================== + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Instance instance; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateInstance( + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Context::createInstance" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance( *this, *reinterpret_cast( &instance ), allocator ); + } + + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + Instance::enumeratePhysicalDevices() const + { + std::vector physicalDevices; + uint32_t physicalDeviceCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumeratePhysicalDevices( static_cast( m_instance ), &physicalDeviceCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( getDispatcher()->vkEnumeratePhysicalDevices( + static_cast( m_instance ), &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::enumeratePhysicalDevices" ); +# endif + } + + std::vector physicalDevicesRAII; + physicalDevicesRAII.reserve( physicalDevices.size() ); + for ( auto & physicalDevice : physicalDevices ) + { + physicalDevicesRAII.emplace_back( *this, *reinterpret_cast( &physicalDevice ) ); + } + return physicalDevicesRAII; + } + + // wrapper function for command vkGetPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; + getDispatcher()->vkGetPhysicalDeviceFeatures( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return features; + } + + // wrapper function for command vkGetPhysicalDeviceFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties && "Function requires " ); + + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + + return imageFormatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; + getDispatcher()->vkGetPhysicalDeviceProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties && + "Function requires " ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + // wrapper function for command vkGetInstanceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetInstanceProcAddr.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PFN_VoidFunction Instance::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetInstanceProcAddr && "Function requires " ); + + PFN_vkVoidFunction result = getDispatcher()->vkGetInstanceProcAddr( static_cast( m_instance ), name.c_str() ); + + return result; + } + + // wrapper function for command vkGetDeviceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceProcAddr.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PFN_VoidFunction Device::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceProcAddr && "Function requires " ); + + PFN_vkVoidFunction result = getDispatcher()->vkGetDeviceProcAddr( static_cast( m_device ), name.c_str() ); + + return result; + } + + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Device device; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDevice( + static_cast( m_physicalDevice ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::createDevice" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device( *this, *reinterpret_cast( &device ), allocator ); + } + + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Context::enumerateInstanceExtensionProperties( Optional layerName ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceExtensionProperties && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateDeviceExtensionProperties && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkEnumerateDeviceExtensionProperties( + static_cast( m_physicalDevice ), layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast( m_physicalDevice ), + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Context::enumerateInstanceLayerProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceLayerProperties && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::enumerateDeviceLayerProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateDeviceLayerProperties && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumerateDeviceLayerProperties( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkEnumerateDeviceLayerProperties( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetDeviceQueue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Queue queue; + getDispatcher()->vkGetDeviceQueue( static_cast( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue( *this, *reinterpret_cast( &queue ) ); + } + + // wrapper function for command vkQueueSubmit, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit.html + VULKAN_HPP_INLINE void Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSubmit( + static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + } + + // wrapper function for command vkQueueWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueWaitIdle.html + VULKAN_HPP_INLINE void Queue::waitIdle() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueWaitIdle && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueWaitIdle( static_cast( m_queue ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + } + + // wrapper function for command vkDeviceWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeviceWaitIdle.html + VULKAN_HPP_INLINE void Device::waitIdle() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDeviceWaitIdle && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDeviceWaitIdle( static_cast( m_device ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + } + + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkAllocateMemory( + static_cast( m_device ), + reinterpret_cast( &allocateInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::allocateMemory" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory( *this, *reinterpret_cast( &memory ), allocator ); + } + + // wrapper function for command vkMapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DeviceMemory::mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory && "Function requires " ); + + void * pData; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkMapMemory( static_cast( m_device ), + static_cast( m_deviceMemory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" ); + + return pData; + } + + // wrapper function for command vkUnmapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory.html + VULKAN_HPP_INLINE void DeviceMemory::unmapMemory() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory && "Function requires " ); + + getDispatcher()->vkUnmapMemory( static_cast( m_device ), static_cast( m_deviceMemory ) ); + } + + // wrapper function for command vkFlushMappedMemoryRanges, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFlushMappedMemoryRanges.html + VULKAN_HPP_INLINE void + Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkFlushMappedMemoryRanges && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkFlushMappedMemoryRanges( + static_cast( m_device ), memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + } + + // wrapper function for command vkInvalidateMappedMemoryRanges, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInvalidateMappedMemoryRanges.html + VULKAN_HPP_INLINE void + Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkInvalidateMappedMemoryRanges && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkInvalidateMappedMemoryRanges( + static_cast( m_device ), memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + } + + // wrapper function for command vkGetDeviceMemoryCommitment, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryCommitment.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DeviceMemory::getCommitment() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryCommitment && "Function requires " ); + + VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; + getDispatcher()->vkGetDeviceMemoryCommitment( + static_cast( m_device ), static_cast( m_deviceMemory ), reinterpret_cast( &committedMemoryInBytes ) ); + + return committedMemoryInBytes; + } + + // wrapper function for command vkBindBufferMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory.html + VULKAN_HPP_INLINE void Buffer::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkBindBufferMemory( static_cast( m_device ), + static_cast( m_buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" ); + } + + // wrapper function for command vkBindImageMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory.html + VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkBindImageMemory( static_cast( m_device ), + static_cast( m_image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" ); + } + + // wrapper function for command vkGetBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Buffer::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements && "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements( + static_cast( m_device ), static_cast( m_buffer ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Image::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements && "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements( + static_cast( m_device ), static_cast( m_image ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Image::getSparseMemoryRequirements() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements && "Function requires " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements( + static_cast( m_device ), static_cast( m_image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements( static_cast( m_device ), + static_cast( m_image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkQueueBindSparse, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBindSparse.html + VULKAN_HPP_INLINE void Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBindSparse && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueBindSparse( + static_cast( m_queue ), bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + } + + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateFence( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createFence" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence( *this, *reinterpret_cast( &fence ), allocator ); + } + + // wrapper function for command vkResetFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetFences.html + VULKAN_HPP_INLINE void Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetFences && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkResetFences( static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + } + + // wrapper function for command vkGetFenceStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceStatus.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Fence::getStatus() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceStatus && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetFenceStatus( static_cast( m_device ), static_cast( m_fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return static_cast( result ); + } + + // wrapper function for command vkWaitForFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForFences.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences( + VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForFences && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkWaitForFences( + static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } + + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSemaphore( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSemaphore" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore( *this, *reinterpret_cast( &semaphore ), allocator ); + } + + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Event event; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateEvent( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createEvent" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event( *this, *reinterpret_cast( &event ), allocator ); + } + + // wrapper function for command vkGetEventStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEventStatus.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Event::getStatus() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetEventStatus && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetEventStatus( static_cast( m_device ), static_cast( m_event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); + + return static_cast( result ); + } + + // wrapper function for command vkSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetEvent.html + VULKAN_HPP_INLINE void Event::set() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetEvent && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkSetEvent( static_cast( m_device ), static_cast( m_event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::set" ); + } + + // wrapper function for command vkResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetEvent.html + VULKAN_HPP_INLINE void Event::reset() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetEvent && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkResetEvent( static_cast( m_device ), static_cast( m_event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::reset" ); + } + + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateQueryPool( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createQueryPool" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool( *this, *reinterpret_cast( &queryPool ), allocator ); + } + + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> QueryPool::getResults( + uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueryPoolResults && "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetQueryPoolResults( static_cast( m_device ), + static_cast( m_queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return std::make_pair( result, std::move( data ) ); + } + + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair QueryPool::getResult( + uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueryPoolResults && "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetQueryPoolResults( static_cast( m_device ), + static_cast( m_queryPool ), + firstQuery, + queryCount, + sizeof( DataType ), + reinterpret_cast( &data ), + static_cast( stride ), + static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return std::make_pair( result, std::move( data ) ); + } + + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Buffer buffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateBuffer( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createBuffer" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, *reinterpret_cast( &buffer ), allocator ); + } + + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::BufferView view; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateBufferView( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createBufferView" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView( *this, *reinterpret_cast( &view ), allocator ); + } + + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Image image; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateImage( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createImage" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image( *this, *reinterpret_cast( &image ), allocator ); + } + + // wrapper function for command vkGetImageSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout + Image::getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout && "Function requires " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout layout; + getDispatcher()->vkGetImageSubresourceLayout( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ImageView view; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateImageView( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createImageView" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, *reinterpret_cast( &view ), allocator ); + } + + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateShaderModule( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createShaderModule" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, *reinterpret_cast( &shaderModule ), allocator ); + } + + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreatePipelineCache( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPipelineCache" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache( *this, *reinterpret_cast( &pipelineCache ), allocator ); + } + + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PipelineCache::getData() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineCacheData && "Function requires " ); + + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPipelineCacheData( static_cast( m_device ), static_cast( m_pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( getDispatcher()->vkGetPipelineCacheData( + static_cast( m_device ), static_cast( m_pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return data; + } + + // wrapper function for command vkMergePipelineCaches, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergePipelineCaches.html + VULKAN_HPP_INLINE void PipelineCache::merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMergePipelineCaches && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkMergePipelineCaches( static_cast( m_device ), + static_cast( m_pipelineCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" ); + } + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateGraphicsPipelines( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createGraphicsPipelines" ); +# endif + } + + std::vector pipelinesRAII; + pipelinesRAII.reserve( pipelines.size() ); + for ( auto & pipeline : pipelines ) + { + pipelinesRAII.emplace_back( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + return pipelinesRAII; + } + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createGraphicsPipeline( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateGraphicsPipelines( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createGraphicsPipeline" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createComputePipelines( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateComputePipelines( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createComputePipelines" ); +# endif + } + + std::vector pipelinesRAII; + pipelinesRAII.reserve( pipelines.size() ); + for ( auto & pipeline : pipelines ) + { + pipelinesRAII.emplace_back( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + return pipelinesRAII; + } + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateComputePipelines( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createComputePipeline" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreatePipelineLayout( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPipelineLayout" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, *reinterpret_cast( &pipelineLayout ), allocator ); + } + + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Sampler sampler; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSampler( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSampler" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler( *this, *reinterpret_cast( &sampler ), allocator ); + } + + // wrapper function for command vkCreateDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDescriptorSetLayout( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorSetLayout" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, *reinterpret_cast( &setLayout ), allocator ); + } + + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDescriptorPool( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorPool" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool( *this, *reinterpret_cast( &descriptorPool ), allocator ); + } + + // wrapper function for command vkResetDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetDescriptorPool.html + VULKAN_HPP_INLINE void DescriptorPool::reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetDescriptorPool && "Function requires " ); + + getDispatcher()->vkResetDescriptorPool( + static_cast( m_device ), static_cast( m_descriptorPool ), static_cast( flags ) ); + } + + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAllocateDescriptorSets( static_cast( m_device ), + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::allocateDescriptorSets" ); +# endif + } + + std::vector descriptorSetsRAII; + descriptorSetsRAII.reserve( descriptorSets.size() ); + for ( auto & descriptorSet : descriptorSets ) + { + descriptorSetsRAII.emplace_back( + *this, *reinterpret_cast( &descriptorSet ), static_cast( allocateInfo.descriptorPool ) ); + } + return descriptorSetsRAII; + } + + // wrapper function for command vkUpdateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSets.html + VULKAN_HPP_INLINE void Device::updateDescriptorSets( + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSets && "Function requires " ); + + getDispatcher()->vkUpdateDescriptorSets( static_cast( m_device ), + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast( descriptorCopies.data() ) ); + } + + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateFramebuffer( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createFramebuffer" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, *reinterpret_cast( &framebuffer ), allocator ); + } + + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRenderPass( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRenderPass" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, *reinterpret_cast( &renderPass ), allocator ); + } + + // wrapper function for command vkGetRenderAreaGranularity, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderAreaGranularity.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderAreaGranularity && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderAreaGranularity( + static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( &granularity ) ); + + return granularity; + } + + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateCommandPool( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCommandPool" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool( *this, *reinterpret_cast( &commandPool ), allocator ); + } + + // wrapper function for command vkResetCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandPool.html + VULKAN_HPP_INLINE void CommandPool::reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetCommandPool && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkResetCommandPool( + static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" ); + } + + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAllocateCommandBuffers( static_cast( m_device ), + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::allocateCommandBuffers" ); +# endif + } + + std::vector commandBuffersRAII; + commandBuffersRAII.reserve( commandBuffers.size() ); + for ( auto & commandBuffer : commandBuffers ) + { + commandBuffersRAII.emplace_back( + *this, *reinterpret_cast( &commandBuffer ), static_cast( allocateInfo.commandPool ) ); + } + return commandBuffersRAII; + } + + // wrapper function for command vkBeginCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBeginCommandBuffer.html + VULKAN_HPP_INLINE void CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBeginCommandBuffer && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBeginCommandBuffer( + static_cast( m_commandBuffer ), reinterpret_cast( &beginInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + } + + // wrapper function for command vkEndCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEndCommandBuffer.html + VULKAN_HPP_INLINE void CommandBuffer::end() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEndCommandBuffer && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkEndCommandBuffer( static_cast( m_commandBuffer ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + } + + // wrapper function for command vkResetCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandBuffer.html + VULKAN_HPP_INLINE void CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetCommandBuffer && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkResetCommandBuffer( static_cast( m_commandBuffer ), static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + } + + // wrapper function for command vkCmdBindPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindPipeline.html + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipeline && "Function requires " ); + + getDispatcher()->vkCmdBindPipeline( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + // wrapper function for command vkCmdSetViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewport.html + VULKAN_HPP_INLINE void + CommandBuffer::setViewport( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewport && "Function requires " ); + + getDispatcher()->vkCmdSetViewport( + static_cast( m_commandBuffer ), firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); + } + + // wrapper function for command vkCmdSetScissor, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissor.html + VULKAN_HPP_INLINE void + CommandBuffer::setScissor( uint32_t firstScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissor && "Function requires " ); + + getDispatcher()->vkCmdSetScissor( + static_cast( m_commandBuffer ), firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); + } + + // wrapper function for command vkCmdSetLineWidth, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineWidth.html + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineWidth && "Function requires " ); + + getDispatcher()->vkCmdSetLineWidth( static_cast( m_commandBuffer ), lineWidth ); + } + + // wrapper function for command vkCmdSetDepthBias, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias.html + VULKAN_HPP_INLINE void + CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBias && "Function requires " ); + + getDispatcher()->vkCmdSetDepthBias( static_cast( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + // wrapper function for command vkCmdSetBlendConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetBlendConstants.html + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetBlendConstants && "Function requires " ); + + getDispatcher()->vkCmdSetBlendConstants( static_cast( m_commandBuffer ), blendConstants ); + } + + // wrapper function for command vkCmdSetDepthBounds, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBounds.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBounds && "Function requires " ); + + getDispatcher()->vkCmdSetDepthBounds( static_cast( m_commandBuffer ), minDepthBounds, maxDepthBounds ); + } + + // wrapper function for command vkCmdSetStencilCompareMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilCompareMask.html + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilCompareMask && "Function requires " ); + + getDispatcher()->vkCmdSetStencilCompareMask( static_cast( m_commandBuffer ), static_cast( faceMask ), compareMask ); + } + + // wrapper function for command vkCmdSetStencilWriteMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilWriteMask.html + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilWriteMask && "Function requires " ); + + getDispatcher()->vkCmdSetStencilWriteMask( static_cast( m_commandBuffer ), static_cast( faceMask ), writeMask ); + } + + // wrapper function for command vkCmdSetStencilReference, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilReference.html + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilReference && "Function requires " ); + + getDispatcher()->vkCmdSetStencilReference( static_cast( m_commandBuffer ), static_cast( faceMask ), reference ); + } + + // wrapper function for command vkCmdBindDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets.html + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets && "Function requires " ); + + getDispatcher()->vkCmdBindDescriptorSets( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); + } + + // wrapper function for command vkCmdBindIndexBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer.html + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer && "Function requires " ); + + getDispatcher()->vkCmdBindIndexBuffer( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); + } + + // wrapper function for command vkCmdBindVertexBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers.html + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindVertexBuffers( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ) ); + } + + // wrapper function for command vkCmdDraw, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDraw.html + VULKAN_HPP_INLINE void + CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDraw && "Function requires " ); + + getDispatcher()->vkCmdDraw( static_cast( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); + } + + // wrapper function for command vkCmdDrawIndexed, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexed.html + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( + uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexed && "Function requires " ); + + getDispatcher()->vkCmdDrawIndexed( static_cast( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + // wrapper function for command vkCmdDrawIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirect.html + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirect && "Function requires " ); + + getDispatcher()->vkCmdDrawIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirect.html + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirect && "Function requires " ); + + getDispatcher()->vkCmdDrawIndexedIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDispatch, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatch.html + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatch && "Function requires " ); + + getDispatcher()->vkCmdDispatch( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkCmdDispatchIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchIndirect.html + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchIndirect && "Function requires " ); + + getDispatcher()->vkCmdDispatchIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); + } + + // wrapper function for command vkCmdCopyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer.html + VULKAN_HPP_INLINE void + CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer && "Function requires " ); + + getDispatcher()->vkCmdCopyBuffer( static_cast( m_commandBuffer ), + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + // wrapper function for command vkCmdCopyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage.html + VULKAN_HPP_INLINE void + CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage && "Function requires " ); + + getDispatcher()->vkCmdCopyImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + // wrapper function for command vkCmdBlitImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage.html + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage && "Function requires " ); + + getDispatcher()->vkCmdBlitImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ), + static_cast( filter ) ); + } + + // wrapper function for command vkCmdCopyBufferToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage.html + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage && "Function requires " ); + + getDispatcher()->vkCmdCopyBufferToImage( static_cast( m_commandBuffer ), + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + // wrapper function for command vkCmdCopyImageToBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer.html + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( + VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer && "Function requires " ); + + getDispatcher()->vkCmdCopyImageToBuffer( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + // wrapper function for command vkCmdUpdateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdateBuffer.html + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdateBuffer && "Function requires " ); + + getDispatcher()->vkCmdUpdateBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + } + + // wrapper function for command vkCmdFillBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdFillBuffer.html + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdFillBuffer && "Function requires " ); + + getDispatcher()->vkCmdFillBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); + } + + // wrapper function for command vkCmdClearColorImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearColorImage.html + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearColorImage && "Function requires " ); + + getDispatcher()->vkCmdClearColorImage( static_cast( m_commandBuffer ), + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &color ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } + + // wrapper function for command vkCmdClearDepthStencilImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearDepthStencilImage.html + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearDepthStencilImage && "Function requires " ); + + getDispatcher()->vkCmdClearDepthStencilImage( static_cast( m_commandBuffer ), + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &depthStencil ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } + + // wrapper function for command vkCmdClearAttachments, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearAttachments.html + VULKAN_HPP_INLINE void + CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, + VULKAN_HPP_NAMESPACE::ArrayProxy const & rects ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearAttachments && "Function requires " ); + + getDispatcher()->vkCmdClearAttachments( static_cast( m_commandBuffer ), + attachments.size(), + reinterpret_cast( attachments.data() ), + rects.size(), + reinterpret_cast( rects.data() ) ); + } + + // wrapper function for command vkCmdResolveImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage.html + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage && "Function requires " ); + + getDispatcher()->vkCmdResolveImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + // wrapper function for command vkCmdSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent.html + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent && "Function requires " ); + + getDispatcher()->vkCmdSetEvent( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + // wrapper function for command vkCmdResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent.html + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent && "Function requires " ); + + getDispatcher()->vkCmdResetEvent( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + // wrapper function for command vkCmdWaitEvents, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents.html + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( + VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents && "Function requires " ); + + getDispatcher()->vkCmdWaitEvents( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } + + // wrapper function for command vkCmdPipelineBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier.html + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier && "Function requires " ); + + getDispatcher()->vkCmdPipelineBarrier( static_cast( m_commandBuffer ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } + + // wrapper function for command vkCmdBeginQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginQuery.html + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQuery && "Function requires " ); + + getDispatcher()->vkCmdBeginQuery( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ) ); + } + + // wrapper function for command vkCmdEndQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndQuery.html + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQuery && "Function requires " ); + + getDispatcher()->vkCmdEndQuery( static_cast( m_commandBuffer ), static_cast( queryPool ), query ); + } + + // wrapper function for command vkCmdResetQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetQueryPool.html + VULKAN_HPP_INLINE void + CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetQueryPool && "Function requires " ); + + getDispatcher()->vkCmdResetQueryPool( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount ); + } + + // wrapper function for command vkCmdWriteTimestamp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp.html + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp && "Function requires " ); + + getDispatcher()->vkCmdWriteTimestamp( + static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( queryPool ), query ); + } + + // wrapper function for command vkCmdCopyQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyQueryPoolResults.html + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyQueryPoolResults && "Function requires " ); + + getDispatcher()->vkCmdCopyQueryPoolResults( static_cast( m_commandBuffer ), + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); + } + + // wrapper function for command vkCmdPushConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & values ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants && "Function requires " ); + + getDispatcher()->vkCmdPushConstants( static_cast( m_commandBuffer ), + static_cast( layout ), + static_cast( stageFlags ), + offset, + values.size() * sizeof( ValuesType ), + reinterpret_cast( values.data() ) ); + } + + // wrapper function for command vkCmdBeginRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass.html + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass && "Function requires " ); + + getDispatcher()->vkCmdBeginRenderPass( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + static_cast( contents ) ); + } + + // wrapper function for command vkCmdNextSubpass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass.html + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass && "Function requires " ); + + getDispatcher()->vkCmdNextSubpass( static_cast( m_commandBuffer ), static_cast( contents ) ); + } + + // wrapper function for command vkCmdEndRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass.html + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass && "Function requires " ); + + getDispatcher()->vkCmdEndRenderPass( static_cast( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdExecuteCommands, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteCommands.html + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteCommands && "Function requires " ); + + getDispatcher()->vkCmdExecuteCommands( + static_cast( m_commandBuffer ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkEnumerateInstanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceVersion.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t Context::enumerateInstanceVersion() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceVersion && "Function requires " ); + + uint32_t apiVersion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkEnumerateInstanceVersion( &apiVersion ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" ); + + return apiVersion; + } + + // wrapper function for command vkBindBufferMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2.html + VULKAN_HPP_INLINE void + Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2 && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindBufferMemory2( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + } + + // wrapper function for command vkBindImageMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2.html + VULKAN_HPP_INLINE void Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2 && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindImageMemory2( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + } + + // wrapper function for command vkGetDeviceGroupPeerMemoryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeatures.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures( static_cast( m_device ), + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } + + // wrapper function for command vkCmdSetDeviceMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDeviceMask.html + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMask && "Function requires or " ); + + getDispatcher()->vkCmdSetDeviceMask( static_cast( m_commandBuffer ), deviceMask ); + } + + // wrapper function for command vkCmdDispatchBase, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchBase.html + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBase && "Function requires or " ); + + getDispatcher()->vkCmdDispatchBase( + static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Instance::enumeratePhysicalDeviceGroups() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroups && + "Function requires or " ); + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( + getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return physicalDeviceGroupProperties; + } + + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetImageMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetBufferMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2 && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements2( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetPhysicalDeviceFeatures2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return features; + } + + // wrapper function for command vkGetPhysicalDeviceFeatures2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return imageFormatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2 && + "Function requires or " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2 && + "Function requires or " ); + + std::vector properties; + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkTrimCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTrimCommandPool.html + VULKAN_HPP_INLINE void CommandPool::trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPool && "Function requires or " ); + + getDispatcher()->vkTrimCommandPool( + static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ); + } + + // wrapper function for command vkGetDeviceQueue2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue2.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Queue queue; + getDispatcher()->vkGetDeviceQueue2( + static_cast( m_device ), reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue( *this, *reinterpret_cast( &queue ) ); + } + + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSamplerYcbcrConversion( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSamplerYcbcrConversion" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( + *this, *reinterpret_cast( &ycbcrConversion ), allocator ); + } + + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDescriptorUpdateTemplate( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorUpdateTemplate" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( + *this, *reinterpret_cast( &descriptorUpdateTemplate ), allocator ); + } + + // wrapper function for command vkUpdateDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplate.html + template + VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplate && + "Function requires or " ); + + getDispatcher()->vkUpdateDescriptorSetWithTemplate( static_cast( m_device ), + static_cast( m_descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } + + // wrapper function for command vkGetPhysicalDeviceExternalBufferProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; + } + + // wrapper function for command vkGetPhysicalDeviceExternalFenceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFenceProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; + } + + // wrapper function for command vkGetPhysicalDeviceExternalSemaphoreProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphoreProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } + + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupport && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + + return support; + } + + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupport && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + + return structureChain; + } + + //=== VK_VERSION_1_2 === + + // wrapper function for command vkCmdDrawIndirectCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCount.html + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCount && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndirectCount( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirectCount, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCount.html + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCount && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndexedIndirectCount( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRenderPass2( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRenderPass2" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, *reinterpret_cast( &renderPass ), allocator ); + } + + // wrapper function for command vkCmdBeginRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2.html + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2 && + "Function requires or " ); + + getDispatcher()->vkCmdBeginRenderPass2( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } + + // wrapper function for command vkCmdNextSubpass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2.html + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2 && "Function requires or " ); + + getDispatcher()->vkCmdNextSubpass2( static_cast( m_commandBuffer ), + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } + + // wrapper function for command vkCmdEndRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2.html + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2 && "Function requires or " ); + + getDispatcher()->vkCmdEndRenderPass2( static_cast( m_commandBuffer ), reinterpret_cast( &subpassEndInfo ) ); + } + + // wrapper function for command vkResetQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetQueryPool.html + VULKAN_HPP_INLINE void QueryPool::reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPool && "Function requires or " ); + + getDispatcher()->vkResetQueryPool( static_cast( m_device ), static_cast( m_queryPool ), firstQuery, queryCount ); + } + + // wrapper function for command vkGetSemaphoreCounterValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValue.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValue() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValue && + "Function requires or " ); + + uint64_t value; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetSemaphoreCounterValue( static_cast( m_device ), static_cast( m_semaphore ), &value ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" ); + + return value; + } + + // wrapper function for command vkWaitSemaphores, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphores.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphores && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWaitSemaphores( static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } + + // wrapper function for command vkSignalSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphore.html + VULKAN_HPP_INLINE void Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphore && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSignalSemaphore( static_cast( m_device ), reinterpret_cast( &signalInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + } + + // wrapper function for command vkGetBufferDeviceAddress, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddress.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddress && + "Function requires or or " ); + + VkDeviceAddress result = + getDispatcher()->vkGetBufferDeviceAddress( static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + // wrapper function for command vkGetBufferOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddress.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddress && + "Function requires or " ); + + uint64_t result = + getDispatcher()->vkGetBufferOpaqueCaptureAddress( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; + } + + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddress.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress && + "Function requires or " ); + + uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress( static_cast( m_device ), + reinterpret_cast( &info ) ); + + return result; + } + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getToolProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolProperties && + "Function requires or " ); + + std::vector toolProperties; + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceToolProperties( static_cast( m_physicalDevice ), &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceToolProperties( + static_cast( m_physicalDevice ), &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return toolProperties; + } + + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreatePrivateDataSlot( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPrivateDataSlot" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, *reinterpret_cast( &privateDataSlot ), allocator ); + } + + // wrapper function for command vkSetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateData.html + VULKAN_HPP_INLINE void Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateData && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + } + + // wrapper function for command vkGetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateData.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateData && "Function requires or " ); + + uint64_t data; + getDispatcher()->vkGetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + + return data; + } + + // wrapper function for command vkCmdSetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2.html + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2 && "Function requires or " ); + + getDispatcher()->vkCmdSetEvent2( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } + + // wrapper function for command vkCmdResetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent2.html + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2 && "Function requires or " ); + + getDispatcher()->vkCmdResetEvent2( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + // wrapper function for command vkCmdWaitEvents2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2.html + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2 && "Function requires or " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdWaitEvents2( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } + + // wrapper function for command vkCmdPipelineBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2.html + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2 && "Function requires or " ); + + getDispatcher()->vkCmdPipelineBarrier2( static_cast( m_commandBuffer ), reinterpret_cast( &dependencyInfo ) ); + } + + // wrapper function for command vkCmdWriteTimestamp2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp2.html + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2 && "Function requires or " ); + + getDispatcher()->vkCmdWriteTimestamp2( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); + } + + // wrapper function for command vkQueueSubmit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2.html + VULKAN_HPP_INLINE void Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2 && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSubmit2( + static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + } + + // wrapper function for command vkCmdCopyBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2.html + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2 && "Function requires or " ); + + getDispatcher()->vkCmdCopyBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( ©BufferInfo ) ); + } + + // wrapper function for command vkCmdCopyImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2.html + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2 && "Function requires or " ); + + getDispatcher()->vkCmdCopyImage2( static_cast( m_commandBuffer ), reinterpret_cast( ©ImageInfo ) ); + } + + // wrapper function for command vkCmdCopyBufferToImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2.html + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2 && + "Function requires or " ); + + getDispatcher()->vkCmdCopyBufferToImage2( static_cast( m_commandBuffer ), + reinterpret_cast( ©BufferToImageInfo ) ); + } + + // wrapper function for command vkCmdCopyImageToBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2.html + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2 && + "Function requires or " ); + + getDispatcher()->vkCmdCopyImageToBuffer2( static_cast( m_commandBuffer ), + reinterpret_cast( ©ImageToBufferInfo ) ); + } + + // wrapper function for command vkCmdBlitImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2.html + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2 && "Function requires or " ); + + getDispatcher()->vkCmdBlitImage2( static_cast( m_commandBuffer ), reinterpret_cast( &blitImageInfo ) ); + } + + // wrapper function for command vkCmdResolveImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2.html + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2 && "Function requires or " ); + + getDispatcher()->vkCmdResolveImage2( static_cast( m_commandBuffer ), + reinterpret_cast( &resolveImageInfo ) ); + } + + // wrapper function for command vkCmdBeginRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRendering.html + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRendering && "Function requires or " ); + + getDispatcher()->vkCmdBeginRendering( static_cast( m_commandBuffer ), reinterpret_cast( &renderingInfo ) ); + } + + // wrapper function for command vkCmdEndRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering.html + VULKAN_HPP_INLINE void CommandBuffer::endRendering() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRendering && "Function requires or " ); + + getDispatcher()->vkCmdEndRendering( static_cast( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdSetCullMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCullMode.html + VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode && + "Function requires or or " ); + + getDispatcher()->vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); + } + + // wrapper function for command vkCmdSetFrontFace, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFrontFace.html + VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace && + "Function requires or or " ); + + getDispatcher()->vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); + } + + // wrapper function for command vkCmdSetPrimitiveTopology, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveTopology.html + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopology && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); + } + + // wrapper function for command vkCmdSetViewportWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCount.html + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCount && + "Function requires or or " ); + + getDispatcher()->vkCmdSetViewportWithCount( + static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); + } + + // wrapper function for command vkCmdSetScissorWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCount.html + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCount && + "Function requires or or " ); + + getDispatcher()->vkCmdSetScissorWithCount( + static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); + } + + // wrapper function for command vkCmdBindVertexBuffers2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2.html + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2 && + "Function requires or or " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindVertexBuffers2( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } + + // wrapper function for command vkCmdSetDepthTestEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthTestEnable.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); + } + + // wrapper function for command vkCmdSetDepthWriteEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthWriteEnable.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); + } + + // wrapper function for command vkCmdSetDepthCompareOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthCompareOp.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOp && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); + } + + // wrapper function for command vkCmdSetDepthBoundsTestEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBoundsTestEnable.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilTestEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilTestEnable.html + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilOp.html + VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilOp( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + // wrapper function for command vkCmdSetRasterizerDiscardEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizerDiscardEnable.html + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); + } + + // wrapper function for command vkCmdSetDepthBiasEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBiasEnable.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); + } + + // wrapper function for command vkCmdSetPrimitiveRestartEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveRestartEnable.html + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); + } + + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirements && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + //=== VK_VERSION_1_4 === + + // wrapper function for command vkCmdSetLineStipple, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStipple.html + VULKAN_HPP_INLINE void CommandBuffer::setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStipple && + "Function requires or or " ); + + getDispatcher()->vkCmdSetLineStipple( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + // wrapper function for command vkMapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory2 && "Function requires or " ); + + void * pData; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkMapMemory2( static_cast( m_device ), reinterpret_cast( &memoryMapInfo ), &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2" ); + + return pData; + } + + // wrapper function for command vkUnmapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2.html + VULKAN_HPP_INLINE void Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2 && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkUnmapMemory2( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2" ); + } + + // wrapper function for command vkCmdBindIndexBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer2.html + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2 && "Function requires or " ); + + getDispatcher()->vkCmdBindIndexBuffer2( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + // wrapper function for command vkGetRenderingAreaGranularity, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularity.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularity && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderingAreaGranularity( static_cast( m_device ), + reinterpret_cast( &renderingAreaInfo ), + reinterpret_cast( &granularity ) ); + + return granularity; + } + + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayout && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetDeviceImageSubresourceLayout( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayout && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetDeviceImageSubresourceLayout( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + // wrapper function for command vkGetImageSubresourceLayout2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Image::getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2 && + "Function requires or or or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetImageSubresourceLayout2( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetImageSubresourceLayout2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2 && + "Function requires or or or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + // wrapper function for command vkCmdPushDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet.html + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet && "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSet( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdPushDescriptorSetWithTemplate && + "Function requires or or " ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplate( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } + + // wrapper function for command vkCmdSetRenderingAttachmentLocations, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocations.html + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingAttachmentLocations && + "Function requires or " ); + + getDispatcher()->vkCmdSetRenderingAttachmentLocations( static_cast( m_commandBuffer ), + reinterpret_cast( &locationInfo ) ); + } + + // wrapper function for command vkCmdSetRenderingInputAttachmentIndices, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndices.html + VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndices( + const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingInputAttachmentIndices && + "Function requires or " ); + + getDispatcher()->vkCmdSetRenderingInputAttachmentIndices( static_cast( m_commandBuffer ), + reinterpret_cast( &inputAttachmentIndexInfo ) ); + } + + // wrapper function for command vkCmdBindDescriptorSets2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2.html + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets2 && + "Function requires or " ); + + getDispatcher()->vkCmdBindDescriptorSets2( static_cast( m_commandBuffer ), + reinterpret_cast( &bindDescriptorSetsInfo ) ); + } + + // wrapper function for command vkCmdPushConstants2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2.html + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants2 && "Function requires or " ); + + getDispatcher()->vkCmdPushConstants2( static_cast( m_commandBuffer ), + reinterpret_cast( &pushConstantsInfo ) ); + } + + // wrapper function for command vkCmdPushDescriptorSet2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2.html + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet2 && "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSet2( static_cast( m_commandBuffer ), + reinterpret_cast( &pushDescriptorSetInfo ) ); + } + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2.html + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2( + const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplate2 && + "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplate2( + static_cast( m_commandBuffer ), reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + } + + // wrapper function for command vkCopyMemoryToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImage.html + VULKAN_HPP_INLINE void Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImage && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyMemoryToImage( + static_cast( m_device ), reinterpret_cast( ©MemoryToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImage" ); + } + + // wrapper function for command vkCopyImageToMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemory.html + VULKAN_HPP_INLINE void Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemory && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToMemory( + static_cast( m_device ), reinterpret_cast( ©ImageToMemoryInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemory" ); + } + + // wrapper function for command vkCopyImageToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImage.html + VULKAN_HPP_INLINE void Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImage && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyImageToImage( static_cast( m_device ), reinterpret_cast( ©ImageToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImage" ); + } + + // wrapper function for command vkTransitionImageLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayout.html + VULKAN_HPP_INLINE void + Device::transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayout && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkTransitionImageLayout( + static_cast( m_device ), transitions.size(), reinterpret_cast( transitions.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayout" ); + } + + //=== VK_KHR_surface === + + // wrapper function for command vkGetPhysicalDeviceSurfaceSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Bool32 supported; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR( static_cast( m_physicalDevice ), + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( &supported ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + + return supported; + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilitiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + + return surfaceCapabilities; + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR && "Function requires " ); + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( + static_cast( m_physicalDevice ), static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return surfaceFormats; + } + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR && + "Function requires " ); + + std::vector presentModes; + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( + static_cast( m_physicalDevice ), static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return presentModes; + } + + //=== VK_KHR_swapchain === + + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSwapchainKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSwapchainKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, *reinterpret_cast( &swapchain ), allocator ); + } + + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getImages() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainImagesKHR && "Function requires " ); + + std::vector swapchainImages; + uint32_t swapchainImageCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetSwapchainImagesKHR( + static_cast( m_device ), static_cast( m_swapchainKHR ), &swapchainImageCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = + static_cast( getDispatcher()->vkGetSwapchainImagesKHR( static_cast( m_device ), + static_cast( m_swapchainKHR ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + return swapchainImages; + } + + // wrapper function for command vkAcquireNextImageKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImageKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + SwapchainKHR::acquireNextImage( uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImageKHR && "Function requires " ); + + uint32_t imageIndex; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkAcquireNextImageKHR( static_cast( m_device ), + static_cast( m_swapchainKHR ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + &imageIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return std::make_pair( result, std::move( imageIndex ) ); + } + + // wrapper function for command vkQueuePresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueuePresentKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueuePresentKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkQueuePresentKHR( static_cast( m_queue ), reinterpret_cast( &presentInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } + + // wrapper function for command vkGetDeviceGroupPresentCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR Device::getGroupPresentCapabilitiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR( + static_cast( m_device ), reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + + return deviceGroupPresentCapabilities; + } + + // wrapper function for command vkGetDeviceGroupSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR( + static_cast( m_device ), static_cast( surface ), reinterpret_cast( &modes ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + + return modes; + } + + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR && + "Function requires or " ); + + std::vector rects; + uint32_t rectCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( + static_cast( m_physicalDevice ), static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return rects; + } + + // wrapper function for command vkAcquireNextImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImage2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImage2KHR && "Function requires or " ); + + uint32_t imageIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkAcquireNextImage2KHR( + static_cast( m_device ), reinterpret_cast( &acquireInfo ), &imageIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return std::make_pair( result, std::move( imageIndex ) ); + } + + //=== VK_KHR_display === + + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPropertiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPlanePropertiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const + { + std::vector displays; + uint32_t displayCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast( m_physicalDevice ), planeIndex, &displayCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( getDispatcher()->vkGetDisplayPlaneSupportedDisplaysKHR( + static_cast( m_physicalDevice ), planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); +# endif + } + + std::vector displaysRAII; + displaysRAII.reserve( displays.size() ); + for ( auto & display : displays ) + { + displaysRAII.emplace_back( *this, *reinterpret_cast( &display ) ); + } + return displaysRAII; + } + + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModePropertiesKHR && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetDisplayModePropertiesKHR( + static_cast( m_physicalDevice ), static_cast( m_displayKHR ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast( m_physicalDevice ), + static_cast( m_displayKHR ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDisplayModeKHR( + static_cast( m_physicalDevice ), + static_cast( m_displayKHR ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "DisplayKHR::createMode" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR( *this, *reinterpret_cast( &mode ) ); + } + + // wrapper function for command vkGetDisplayPlaneCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilitiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR + DisplayModeKHR::getDisplayPlaneCapabilities( uint32_t planeIndex ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( m_displayModeKHR ), + planeIndex, + reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" ); + + return capabilities; + } + + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDisplayPlaneSurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDisplayPlaneSurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + //=== VK_KHR_display_swapchain === + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector swapchains( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSharedSwapchainsKHR( + static_cast( m_device ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSharedSwapchainsKHR" ); +# endif + } + + std::vector swapchainsRAII; + swapchainsRAII.reserve( swapchains.size() ); + for ( auto & swapchain : swapchains ) + { + swapchainsRAII.emplace_back( *this, *reinterpret_cast( &swapchain ), allocator ); + } + return swapchainsRAII; + } + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSharedSwapchainsKHR( + static_cast( m_device ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSharedSwapchainKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, *reinterpret_cast( &swapchain ), allocator ); + } + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateXlibSurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createXlibSurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + // wrapper function for command vkGetPhysicalDeviceXlibPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXlibPresentationSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR && + "Function requires " ); + + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, &dpy, visualID ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateXcbSurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createXcbSurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + // wrapper function for command vkGetPhysicalDeviceXcbPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXcbPresentationSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( + uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR && + "Function requires " ); + + VkBool32 result = getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, &connection, visual_id ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateWaylandSurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createWaylandSurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + // wrapper function for command vkGetPhysicalDeviceWaylandPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWaylandPresentationSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR && + "Function requires " ); + + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, &display ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateAndroidSurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createAndroidSurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateWin32SurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createWin32SurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + // wrapper function for command vkGetPhysicalDeviceWin32PresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWin32PresentationSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR && + "Function requires " ); + + VkBool32 result = getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDebugReportCallbackEXT( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDebugReportCallbackEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT( + *this, *reinterpret_cast( &callback ), allocator ); + } + + // wrapper function for command vkDebugReportMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugReportMessageEXT.html + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugReportMessageEXT && "Function requires " ); + + getDispatcher()->vkDebugReportMessageEXT( static_cast( m_instance ), + static_cast( flags ), + static_cast( objectType_ ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); + } + + //=== VK_EXT_debug_marker === + + // wrapper function for command vkDebugMarkerSetObjectTagEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectTagEXT.html + VULKAN_HPP_INLINE void Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectTagEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDebugMarkerSetObjectTagEXT( + static_cast( m_device ), reinterpret_cast( &tagInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + } + + // wrapper function for command vkDebugMarkerSetObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectNameEXT.html + VULKAN_HPP_INLINE void Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectNameEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDebugMarkerSetObjectNameEXT( + static_cast( m_device ), reinterpret_cast( &nameInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + } + + // wrapper function for command vkCmdDebugMarkerBeginEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerBeginEXT.html + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerBeginEXT && "Function requires " ); + + getDispatcher()->vkCmdDebugMarkerBeginEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ); + } + + // wrapper function for command vkCmdDebugMarkerEndEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerEndEXT.html + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerEndEXT && "Function requires " ); + + getDispatcher()->vkCmdDebugMarkerEndEXT( static_cast( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdDebugMarkerInsertEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerInsertEXT.html + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerInsertEXT && "Function requires " ); + + getDispatcher()->vkCmdDebugMarkerInsertEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ); + } + + //=== VK_KHR_video_queue === + + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return capabilities; + } + + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); + + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return videoFormatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); + + std::vector structureChains; + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get() = videoFormatProperties[i]; + } + return structureChains; + } + + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateVideoSessionKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createVideoSessionKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR( *this, *reinterpret_cast( &videoSession ), allocator ); + } + + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector VideoSessionKHR::getMemoryRequirements() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR && + "Function requires " ); + + std::vector memoryRequirements; + uint32_t memoryRequirementsCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( + static_cast( m_device ), static_cast( m_videoSessionKHR ), &memoryRequirementsCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = static_cast( + getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast( m_device ), + static_cast( m_videoSessionKHR ), + &memoryRequirementsCount, + reinterpret_cast( memoryRequirements.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return memoryRequirements; + } + + // wrapper function for command vkBindVideoSessionMemoryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindVideoSessionMemoryKHR.html + VULKAN_HPP_INLINE void VideoSessionKHR::bindMemory( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindVideoSessionMemoryKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkBindVideoSessionMemoryKHR( static_cast( m_device ), + static_cast( m_videoSessionKHR ), + bindSessionMemoryInfos.size(), + reinterpret_cast( bindSessionMemoryInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" ); + } + + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR>::Type + Device::createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createVideoSessionParametersKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR( + *this, *reinterpret_cast( &videoSessionParameters ), allocator ); + } + + // wrapper function for command vkUpdateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateVideoSessionParametersKHR.html + VULKAN_HPP_INLINE void VideoSessionParametersKHR::update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateVideoSessionParametersKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkUpdateVideoSessionParametersKHR( static_cast( m_device ), + static_cast( m_videoSessionParametersKHR ), + reinterpret_cast( &updateInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" ); + } + + // wrapper function for command vkCmdBeginVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginVideoCodingKHR.html + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginVideoCodingKHR && "Function requires " ); + + getDispatcher()->vkCmdBeginVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &beginInfo ) ); + } + + // wrapper function for command vkCmdEndVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndVideoCodingKHR.html + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndVideoCodingKHR && "Function requires " ); + + getDispatcher()->vkCmdEndVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &endCodingInfo ) ); + } + + // wrapper function for command vkCmdControlVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdControlVideoCodingKHR.html + VULKAN_HPP_INLINE void + CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdControlVideoCodingKHR && "Function requires " ); + + getDispatcher()->vkCmdControlVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &codingControlInfo ) ); + } + + //=== VK_KHR_video_decode_queue === + + // wrapper function for command vkCmdDecodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecodeVideoKHR.html + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecodeVideoKHR && "Function requires " ); + + getDispatcher()->vkCmdDecodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( &decodeInfo ) ); + } + + //=== VK_EXT_transform_feedback === + + // wrapper function for command vkCmdBindTransformFeedbackBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTransformFeedbackBuffersEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); + } + + // wrapper function for command vkCmdBeginTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginTransformFeedbackEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginTransformFeedbackEXT && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBeginTransformFeedbackEXT( static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } + + // wrapper function for command vkCmdEndTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndTransformFeedbackEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndTransformFeedbackEXT && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdEndTransformFeedbackEXT( static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } + + // wrapper function for command vkCmdBeginQueryIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginQueryIndexedEXT.html + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQueryIndexedEXT && "Function requires " ); + + getDispatcher()->vkCmdBeginQueryIndexedEXT( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ), index ); + } + + // wrapper function for command vkCmdEndQueryIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndQueryIndexedEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQueryIndexedEXT && "Function requires " ); + + getDispatcher()->vkCmdEndQueryIndexedEXT( static_cast( m_commandBuffer ), static_cast( queryPool ), query, index ); + } + + // wrapper function for command vkCmdDrawIndirectByteCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectByteCountEXT.html + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectByteCountEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawIndirectByteCountEXT( static_cast( m_commandBuffer ), + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); + } + + //=== VK_NVX_binary_import === + + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateCuModuleNVX( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCuModuleNVX" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, *reinterpret_cast( &module ), allocator ); + } + + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateCuFunctionNVX( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCuFunctionNVX" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX( *this, *reinterpret_cast( &function ), allocator ); + } + + // wrapper function for command vkCmdCuLaunchKernelNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCuLaunchKernelNVX.html + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCuLaunchKernelNVX && "Function requires " ); + + getDispatcher()->vkCmdCuLaunchKernelNVX( static_cast( m_commandBuffer ), reinterpret_cast( &launchInfo ) ); + } + + //=== VK_NVX_image_view_handle === + + // wrapper function for command vkGetImageViewHandleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandleNVX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t + Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandleNVX && "Function requires " ); + + uint32_t result = + getDispatcher()->vkGetImageViewHandleNVX( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; + } + + // wrapper function for command vkGetImageViewHandle64NVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandle64NVX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandle64NVX && "Function requires " ); + + uint64_t result = + getDispatcher()->vkGetImageViewHandle64NVX( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; + } + + // wrapper function for command vkGetImageViewAddressNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewAddressNVX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ImageView::getAddressNVX() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewAddressNVX && "Function requires " ); + + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageViewAddressNVX( + static_cast( m_device ), static_cast( m_imageView ), reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" ); + + return properties; + } + + //=== VK_AMD_draw_indirect_count === + + // wrapper function for command vkCmdDrawIndirectCountAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountAMD.html + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountAMD && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirectCountAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCountAMD.html + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdDrawIndexedIndirectCountAMD && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndexedIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_AMD_shader_info === + + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Pipeline::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderInfoAMD && "Function requires " ); + + std::vector info; + size_t infoSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetShaderInfoAMD( static_cast( m_device ), + static_cast( m_pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( getDispatcher()->vkGetShaderInfoAMD( static_cast( m_device ), + static_cast( m_pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + return info; + } + + //=== VK_KHR_dynamic_rendering === + + // wrapper function for command vkCmdBeginRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderingKHR.html + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderingKHR && + "Function requires or " ); + + getDispatcher()->vkCmdBeginRenderingKHR( static_cast( m_commandBuffer ), reinterpret_cast( &renderingInfo ) ); + } + + // wrapper function for command vkCmdEndRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderingKHR.html + VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderingKHR && "Function requires or " ); + + getDispatcher()->vkCmdEndRenderingKHR( static_cast( m_commandBuffer ) ); + } + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateStreamDescriptorSurfaceGGP( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createStreamDescriptorSurfaceGGP" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalImageFormatPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalImageFormatPropertiesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV + PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + + return externalImageFormatProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + // wrapper function for command vkGetMemoryWin32HandleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE DeviceMemory::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleNV && "Function requires " ); + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetMemoryWin32HandleNV( static_cast( m_device ), + static_cast( m_deviceMemory ), + static_cast( handleType ), + &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" ); + + return handle; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return features; + } + + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return imageFormatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function requires or " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR && + "Function requires or " ); + + std::vector properties; + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + //=== VK_KHR_device_group === + + // wrapper function for command vkGetDeviceGroupPeerMemoryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeaturesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR( static_cast( m_device ), + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } + + // wrapper function for command vkCmdSetDeviceMaskKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDeviceMaskKHR.html + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMaskKHR && "Function requires or " ); + + getDispatcher()->vkCmdSetDeviceMaskKHR( static_cast( m_commandBuffer ), deviceMask ); + } + + // wrapper function for command vkCmdDispatchBaseKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchBaseKHR.html + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBaseKHR && "Function requires or " ); + + getDispatcher()->vkCmdDispatchBaseKHR( + static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateViSurfaceNN( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createViSurfaceNN" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + // wrapper function for command vkTrimCommandPoolKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTrimCommandPoolKHR.html + VULKAN_HPP_INLINE void CommandPool::trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPoolKHR && "Function requires or " ); + + getDispatcher()->vkTrimCommandPoolKHR( + static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ); + } + + //=== VK_KHR_device_group_creation === + + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Instance::enumeratePhysicalDeviceGroupsKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR && + "Function requires or " ); + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( + static_cast( m_instance ), + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return physicalDeviceGroupProperties; + } + + //=== VK_KHR_external_memory_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalBufferPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferPropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + // wrapper function for command vkGetMemoryWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleKHR && "Function requires " ); + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + + return handle; + } + + // wrapper function for command vkGetMemoryWin32HandlePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandlePropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + + return memoryWin32HandleProperties; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + // wrapper function for command vkGetMemoryFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdKHR && "Function requires " ); + + int fd; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + + return fd; + } + + // wrapper function for command vkGetMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdPropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdPropertiesKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryFdPropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + fd, + reinterpret_cast( &memoryFdProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + + return memoryFdProperties; + } + + //=== VK_KHR_external_semaphore_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalSemaphorePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphorePropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + // wrapper function for command vkImportSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreWin32HandleKHR.html + VULKAN_HPP_INLINE void + Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreWin32HandleKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + } + + // wrapper function for command vkGetSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreWin32HandleKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE + Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreWin32HandleKHR && + "Function requires " ); + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + + return handle; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + // wrapper function for command vkImportSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreFdKHR.html + VULKAN_HPP_INLINE void Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreFdKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportSemaphoreFdKHR( + static_cast( m_device ), reinterpret_cast( &importSemaphoreFdInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + } + + // wrapper function for command vkGetSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreFdKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreFdKHR && "Function requires " ); + + int fd; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + + return fd; + } + + //=== VK_KHR_push_descriptor === + + // wrapper function for command vkCmdPushDescriptorSetKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetKHR.html + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR && + "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSetKHR( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } + + // wrapper function for command vkCmdPushDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR && + "Function requires or or " ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } + + //=== VK_EXT_conditional_rendering === + + // wrapper function for command vkCmdBeginConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginConditionalRenderingEXT.html + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginConditionalRenderingEXT && + "Function requires " ); + + getDispatcher()->vkCmdBeginConditionalRenderingEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &conditionalRenderingBegin ) ); + } + + // wrapper function for command vkCmdEndConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndConditionalRenderingEXT.html + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndConditionalRenderingEXT && + "Function requires " ); + + getDispatcher()->vkCmdEndConditionalRenderingEXT( static_cast( m_commandBuffer ) ); + } + + //=== VK_KHR_descriptor_update_template === + + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDescriptorUpdateTemplateKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorUpdateTemplateKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( + *this, *reinterpret_cast( &descriptorUpdateTemplate ), allocator ); + } + + // wrapper function for command vkDestroyDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplateKHR.html + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR && + "Function requires or " ); + + getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR( + static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); + } + + // wrapper function for command vkUpdateDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplateKHR.html + template + VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR && + "Function requires or " ); + + getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR( static_cast( m_device ), + static_cast( m_descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } + + //=== VK_NV_clip_space_w_scaling === + + // wrapper function for command vkCmdSetViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingNV.html + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingNV && "Function requires " ); + + getDispatcher()->vkCmdSetViewportWScalingNV( static_cast( m_commandBuffer ), + firstViewport, + viewportWScalings.size(), + reinterpret_cast( viewportWScalings.data() ) ); + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + // wrapper function for command vkAcquireXlibDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireXlibDisplayEXT.html + VULKAN_HPP_INLINE void PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireXlibDisplayEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireXlibDisplayEXT( static_cast( m_physicalDevice ), &dpy, static_cast( display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + } + + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetRandROutputDisplayEXT( + static_cast( m_physicalDevice ), &dpy, rrOutput, reinterpret_cast( &display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getRandROutputDisplayEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, *reinterpret_cast( &display ) ); + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2EXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + + return surfaceCapabilities; + } + + //=== VK_EXT_display_control === + + // wrapper function for command vkDisplayPowerControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDisplayPowerControlEXT.html + VULKAN_HPP_INLINE void Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDisplayPowerControlEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDisplayPowerControlEXT( + static_cast( m_device ), static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + } + + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkRegisterDeviceEventEXT( + static_cast( m_device ), + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::registerEventEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence( *this, *reinterpret_cast( &fence ), allocator ); + } + + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkRegisterDisplayEventEXT( + static_cast( m_device ), + static_cast( *display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::registerDisplayEventEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence( *this, *reinterpret_cast( &fence ), allocator ); + } + + // wrapper function for command vkGetSwapchainCounterEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainCounterEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t SwapchainKHR::getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainCounterEXT && "Function requires " ); + + uint64_t counterValue; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetSwapchainCounterEXT( static_cast( m_device ), + static_cast( m_swapchainKHR ), + static_cast( counter ), + &counterValue ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" ); + + return counterValue; + } + + //=== VK_GOOGLE_display_timing === + + // wrapper function for command vkGetRefreshCycleDurationGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRefreshCycleDurationGOOGLE.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE SwapchainKHR::getRefreshCycleDurationGOOGLE() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRefreshCycleDurationGOOGLE && "Function requires " ); + + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetRefreshCycleDurationGOOGLE( static_cast( m_device ), + static_cast( m_swapchainKHR ), + reinterpret_cast( &displayTimingProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" ); + + return displayTimingProperties; + } + + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getPastPresentationTimingGOOGLE() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPastPresentationTimingGOOGLE && + "Function requires " ); + + std::vector presentationTimings; + uint32_t presentationTimingCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPastPresentationTimingGOOGLE( + static_cast( m_device ), static_cast( m_swapchainKHR ), &presentationTimingCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( + getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast( m_device ), + static_cast( m_swapchainKHR ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + return presentationTimings; + } + + //=== VK_EXT_discard_rectangles === + + // wrapper function for command vkCmdSetDiscardRectangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( + uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEXT && "Function requires " ); + + getDispatcher()->vkCmdSetDiscardRectangleEXT( static_cast( m_commandBuffer ), + firstDiscardRectangle, + discardRectangles.size(), + reinterpret_cast( discardRectangles.data() ) ); + } + + // wrapper function for command vkCmdSetDiscardRectangleEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEnableEXT && + "Function requires " ); + + getDispatcher()->vkCmdSetDiscardRectangleEnableEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleEnable ) ); + } + + // wrapper function for command vkCmdSetDiscardRectangleModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleModeEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleModeEXT && + "Function requires " ); + + getDispatcher()->vkCmdSetDiscardRectangleModeEXT( static_cast( m_commandBuffer ), + static_cast( discardRectangleMode ) ); + } + + //=== VK_EXT_hdr_metadata === + + // wrapper function for command vkSetHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetHdrMetadataEXT.html + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetHdrMetadataEXT && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +# else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkSetHdrMetadataEXT( static_cast( m_device ), + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); + } + + //=== VK_KHR_create_renderpass2 === + + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRenderPass2KHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRenderPass2KHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, *reinterpret_cast( &renderPass ), allocator ); + } + + // wrapper function for command vkCmdBeginRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdBeginRenderPass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } + + // wrapper function for command vkCmdNextSubpass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2KHR && "Function requires or " ); + + getDispatcher()->vkCmdNextSubpass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } + + // wrapper function for command vkCmdEndRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdEndRenderPass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( &subpassEndInfo ) ); + } + + //=== VK_KHR_shared_presentable_image === + + // wrapper function for command vkGetSwapchainStatusKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainStatusKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::getStatus() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainStatusKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetSwapchainStatusKHR( static_cast( m_device ), static_cast( m_swapchainKHR ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } + + //=== VK_KHR_external_fence_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalFencePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFencePropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + // wrapper function for command vkImportFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceWin32HandleKHR.html + VULKAN_HPP_INLINE void Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceWin32HandleKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportFenceWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + } + + // wrapper function for command vkGetFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceWin32HandleKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceWin32HandleKHR && "Function requires " ); + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetFenceWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + + return handle; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + // wrapper function for command vkImportFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceFdKHR.html + VULKAN_HPP_INLINE void Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceFdKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkImportFenceFdKHR( static_cast( m_device ), reinterpret_cast( &importFenceFdInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + } + + // wrapper function for command vkGetFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceFdKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceFdKHR && "Function requires " ); + + int fd; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetFenceFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + + return fd; + } + + //=== VK_KHR_performance_query === + + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::pair, std::vector> + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && + "Function requires " ); + + std::pair, std::vector> data_; + std::vector & counters = data_.first; + std::vector & counterDescriptions = data_.second; + uint32_t counterCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + static_cast( m_physicalDevice ), + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return data_; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && + "Function requires " ); + + uint32_t numPasses; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &performanceQueryCreateInfo ), + &numPasses ); + + return numPasses; + } + + // wrapper function for command vkAcquireProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireProfilingLockKHR.html + VULKAN_HPP_INLINE void Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireProfilingLockKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireProfilingLockKHR( static_cast( m_device ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + } + + // wrapper function for command vkReleaseProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseProfilingLockKHR.html + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseProfilingLockKHR && "Function requires " ); + + getDispatcher()->vkReleaseProfilingLockKHR( static_cast( m_device ) ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return surfaceCapabilities; + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return structureChain; + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return surfaceFormats; + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); + + std::vector structureChains; + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + structureChains.resize( surfaceFormatCount ); + } + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + structureChains[i].template get() = surfaceFormats[i]; + } + return structureChains; + } + + //=== VK_KHR_get_display_properties2 === + + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayProperties2KHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPlaneProperties2KHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties2() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetDisplayModeProperties2KHR( + static_cast( m_physicalDevice ), static_cast( m_displayKHR ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), + static_cast( m_displayKHR ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties2() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR && + "Function requires " ); + + std::vector structureChains; + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetDisplayModeProperties2KHR( + static_cast( m_physicalDevice ), static_cast( m_displayKHR ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), + static_cast( m_displayKHR ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return structureChains; + } + + // wrapper function for command vkGetDisplayPlaneCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilities2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilities2KHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetDisplayPlaneCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + + return capabilities; + } + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateIOSSurfaceMVK( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createIOSSurfaceMVK" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateMacOSSurfaceMVK( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createMacOSSurfaceMVK" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkSetDebugUtilsObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectNameEXT.html + VULKAN_HPP_INLINE void Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectNameEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetDebugUtilsObjectNameEXT( + static_cast( m_device ), reinterpret_cast( &nameInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + } + + // wrapper function for command vkSetDebugUtilsObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectTagEXT.html + VULKAN_HPP_INLINE void Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectTagEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSetDebugUtilsObjectTagEXT( static_cast( m_device ), reinterpret_cast( &tagInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + } + + // wrapper function for command vkQueueBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBeginDebugUtilsLabelEXT.html + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBeginDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkQueueBeginDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( &labelInfo ) ); + } + + // wrapper function for command vkQueueEndDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueEndDebugUtilsLabelEXT.html + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueEndDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkQueueEndDebugUtilsLabelEXT( static_cast( m_queue ) ); + } + + // wrapper function for command vkQueueInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueInsertDebugUtilsLabelEXT.html + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueInsertDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkQueueInsertDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( &labelInfo ) ); + } + + // wrapper function for command vkCmdBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginDebugUtilsLabelEXT.html + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkCmdBeginDebugUtilsLabelEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &labelInfo ) ); + } + + // wrapper function for command vkCmdEndDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndDebugUtilsLabelEXT.html + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkCmdEndDebugUtilsLabelEXT( static_cast( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInsertDebugUtilsLabelEXT.html + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInsertDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkCmdInsertDebugUtilsLabelEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &labelInfo ) ); + } + + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDebugUtilsMessengerEXT( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDebugUtilsMessengerEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT( + *this, *reinterpret_cast( &messenger ), allocator ); + } + + // wrapper function for command vkSubmitDebugUtilsMessageEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSubmitDebugUtilsMessageEXT.html + VULKAN_HPP_INLINE void + Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSubmitDebugUtilsMessageEXT && "Function requires " ); + + getDispatcher()->vkSubmitDebugUtilsMessageEXT( static_cast( m_instance ), + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); + } + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return properties; + } + + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return structureChain; + } + + // wrapper function for command vkGetMemoryAndroidHardwareBufferANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryAndroidHardwareBufferANDROID.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE struct AHardwareBuffer * + Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID && + "Function requires " ); + + struct AHardwareBuffer * buffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID( + static_cast( m_device ), reinterpret_cast( &info ), &buffer ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + + return buffer; + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateExecutionGraphPipelinesAMDX( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createExecutionGraphPipelinesAMDX" ); +# endif + } + + std::vector pipelinesRAII; + pipelinesRAII.reserve( pipelines.size() ); + for ( auto & pipeline : pipelines ) + { + pipelinesRAII.emplace_back( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + return pipelinesRAII; + } + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createExecutionGraphPipelineAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateExecutionGraphPipelinesAMDX( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createExecutionGraphPipelineAMDX" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + + // wrapper function for command vkGetExecutionGraphPipelineScratchSizeAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineScratchSizeAMDX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX Pipeline::getExecutionGraphScratchSizeAMDX() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX( + static_cast( m_device ), static_cast( m_pipeline ), reinterpret_cast( &sizeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphScratchSizeAMDX" ); + + return sizeInfo; + } + + // wrapper function for command vkGetExecutionGraphPipelineNodeIndexAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineNodeIndexAMDX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t + Pipeline::getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX && + "Function requires " ); + + uint32_t nodeIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast( m_device ), + static_cast( m_pipeline ), + reinterpret_cast( &nodeInfo ), + &nodeIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphNodeIndexAMDX" ); + + return nodeIndex; + } + + // wrapper function for command vkCmdInitializeGraphScratchMemoryAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInitializeGraphScratchMemoryAMDX.html + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX && + "Function requires " ); + + getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast( m_commandBuffer ), + static_cast( executionGraph ), + static_cast( scratch ), + static_cast( scratchSize ) ); + } + + // wrapper function for command vkCmdDispatchGraphAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphAMDX.html + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphAMDX && "Function requires " ); + + getDispatcher()->vkCmdDispatchGraphAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( &countInfo ) ); + } + + // wrapper function for command vkCmdDispatchGraphIndirectAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectAMDX.html + VULKAN_HPP_INLINE void + CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectAMDX && "Function requires " ); + + getDispatcher()->vkCmdDispatchGraphIndirectAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( &countInfo ) ); + } + + // wrapper function for command vkCmdDispatchGraphIndirectCountAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectCountAMDX.html + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX && + "Function requires " ); + + getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + static_cast( countInfo ) ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEXT && "Function requires " ); + + getDispatcher()->vkCmdSetSampleLocationsEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &sampleLocationsInfo ) ); + } + + // wrapper function for command vkGetPhysicalDeviceMultisamplePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMultisamplePropertiesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast( m_physicalDevice ), + static_cast( samples ), + reinterpret_cast( &multisampleProperties ) ); + + return multisampleProperties; + } + + //=== VK_KHR_get_memory_requirements2 === + + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2KHR && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + //=== VK_KHR_acceleration_structure === + + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateAccelerationStructureKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createAccelerationStructureKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR( + *this, *reinterpret_cast( &accelerationStructure ), allocator ); + } + + // wrapper function for command vkCmdBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresKHR.html + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresKHR && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBuildAccelerationStructuresKHR( + static_cast( m_commandBuffer ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + } + + // wrapper function for command vkCmdBuildAccelerationStructuresIndirectKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresIndirectKHR.html + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); + VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); + VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); +# else + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR( static_cast( m_commandBuffer ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); + } + + // wrapper function for command vkBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildAccelerationStructuresKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBuildAccelerationStructuresKHR && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBuildAccelerationStructuresKHR( + static_cast( m_device ), + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + // wrapper function for command vkCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + // wrapper function for command vkCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureToMemoryKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureToMemoryKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyAccelerationStructureToMemoryKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + // wrapper function for command vkCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToAccelerationStructureKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToAccelerationStructureKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyMemoryToAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + + return data; + } + + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + + return data; + } + + // wrapper function for command vkCmdCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureKHR.html + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureKHR && + "Function requires " ); + + getDispatcher()->vkCmdCopyAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + // wrapper function for command vkCmdCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureToMemoryKHR.html + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR && + "Function requires " ); + + getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + // wrapper function for command vkCmdCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToAccelerationStructureKHR.html + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR && + "Function requires " ); + + getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + // wrapper function for command vkGetAccelerationStructureDeviceAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureDeviceAddressKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR && + "Function requires " ); + + VkDeviceAddress result = getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR( + static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesKHR.html + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); + + getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast( m_commandBuffer ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + + // wrapper function for command vkGetDeviceAccelerationStructureCompatibilityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceAccelerationStructureCompatibilityKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast( m_device ), + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + + return compatibility; + } + + // wrapper function for command vkGetAccelerationStructureBuildSizesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureBuildSizesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureBuildSizesKHR && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); +# else + if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + getDispatcher()->vkGetAccelerationStructureBuildSizesKHR( static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } + + //=== VK_KHR_ray_tracing_pipeline === + + // wrapper function for command vkCmdTraceRaysKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysKHR.html + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysKHR && "Function requires " ); + + getDispatcher()->vkCmdTraceRaysKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); + } + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRayTracingPipelinesKHR( + static_cast( m_device ), + deferredOperation ? static_cast( **deferredOperation ) : 0, + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelinesKHR" ); +# endif + } + + std::vector pipelinesRAII; + pipelinesRAII.reserve( pipelines.size() ); + for ( auto & pipeline : pipelines ) + { + pipelinesRAII.emplace_back( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + return pipelinesRAII; + } + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRayTracingPipelinesKHR( + static_cast( m_device ), + deferredOperation ? static_cast( **deferredOperation ) : 0, + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelineKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && + "Function requires or " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" ); + + return data; + } + + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && + "Function requires or " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" ); + + return data; + } + + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + + return data; + } + + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + + return data; + } + + // wrapper function for command vkCmdTraceRaysIndirectKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirectKHR.html + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirectKHR && "Function requires " ); + + getDispatcher()->vkCmdTraceRaysIndirectKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } + + // wrapper function for command vkGetRayTracingShaderGroupStackSizeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupStackSizeKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Pipeline::getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR && + "Function requires " ); + + VkDeviceSize result = getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR( + static_cast( m_device ), static_cast( m_pipeline ), group, static_cast( groupShader ) ); + + return static_cast( result ); + } + + // wrapper function for command vkCmdSetRayTracingPipelineStackSizeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRayTracingPipelineStackSizeKHR.html + VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR && + "Function requires " ); + + getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR( static_cast( m_commandBuffer ), pipelineStackSize ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSamplerYcbcrConversionKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSamplerYcbcrConversionKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( + *this, *reinterpret_cast( &ycbcrConversion ), allocator ); + } + + // wrapper function for command vkDestroySamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversionKHR.html + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDestroySamplerYcbcrConversionKHR && + "Function requires or " ); + + getDispatcher()->vkDestroySamplerYcbcrConversionKHR( + static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); + } + + //=== VK_KHR_bind_memory2 === + + // wrapper function for command vkBindBufferMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2KHR.html + VULKAN_HPP_INLINE void + Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2KHR && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindBufferMemory2KHR( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + } + + // wrapper function for command vkBindImageMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2KHR.html + VULKAN_HPP_INLINE void + Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2KHR && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindImageMemory2KHR( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + } + + //=== VK_EXT_image_drm_format_modifier === + + // wrapper function for command vkGetImageDrmFormatModifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT Image::getDrmFormatModifierPropertiesEXT() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT( + static_cast( m_device ), static_cast( m_image ), reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" ); + + return properties; + } + + //=== VK_EXT_validation_cache === + + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateValidationCacheEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createValidationCacheEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT( + *this, *reinterpret_cast( &validationCache ), allocator ); + } + + // wrapper function for command vkMergeValidationCachesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergeValidationCachesEXT.html + VULKAN_HPP_INLINE void ValidationCacheEXT::merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMergeValidationCachesEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkMergeValidationCachesEXT( static_cast( m_device ), + static_cast( m_validationCacheEXT ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" ); + } + + // wrapper function for command vkGetValidationCacheDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ValidationCacheEXT::getData() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetValidationCacheDataEXT && "Function requires " ); + + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetValidationCacheDataEXT( + static_cast( m_device ), static_cast( m_validationCacheEXT ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = + static_cast( getDispatcher()->vkGetValidationCacheDataEXT( static_cast( m_device ), + static_cast( m_validationCacheEXT ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return data; + } + + //=== VK_NV_shading_rate_image === + + // wrapper function for command vkCmdBindShadingRateImageNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadingRateImageNV.html + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadingRateImageNV && "Function requires " ); + + getDispatcher()->vkCmdBindShadingRateImageNV( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); + } + + // wrapper function for command vkCmdSetViewportShadingRatePaletteNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportShadingRatePaletteNV.html + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportShadingRatePaletteNV && + "Function requires " ); + + getDispatcher()->vkCmdSetViewportShadingRatePaletteNV( static_cast( m_commandBuffer ), + firstViewport, + shadingRatePalettes.size(), + reinterpret_cast( shadingRatePalettes.data() ) ); + } + + // wrapper function for command vkCmdSetCoarseSampleOrderNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoarseSampleOrderNV.html + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoarseSampleOrderNV && "Function requires " ); + + getDispatcher()->vkCmdSetCoarseSampleOrderNV( static_cast( m_commandBuffer ), + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); + } + + //=== VK_NV_ray_tracing === + + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateAccelerationStructureNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createAccelerationStructureNV" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV( + *this, *reinterpret_cast( &accelerationStructure ), allocator ); + } + + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkBindAccelerationStructureMemoryNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindAccelerationStructureMemoryNV.html + VULKAN_HPP_INLINE void Device::bindAccelerationStructureMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindAccelerationStructureMemoryNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindAccelerationStructureMemoryNV( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + } + + // wrapper function for command vkCmdBuildAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructureNV.html + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructureNV && "Function requires " ); + + getDispatcher()->vkCmdBuildAccelerationStructureNV( static_cast( m_commandBuffer ), + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } + + // wrapper function for command vkCmdCopyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureNV.html + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureNV && "Function requires " ); + + getDispatcher()->vkCmdCopyAccelerationStructureNV( static_cast( m_commandBuffer ), + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); + } + + // wrapper function for command vkCmdTraceRaysNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysNV.html + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysNV && "Function requires " ); + + getDispatcher()->vkCmdTraceRaysNV( static_cast( m_commandBuffer ), + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRayTracingPipelinesNV( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelinesNV" ); +# endif + } + + std::vector pipelinesRAII; + pipelinesRAII.reserve( pipelines.size() ); + for ( auto & pipeline : pipelines ) + { + pipelinesRAII.emplace_back( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + return pipelinesRAII; + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRayTracingPipelineNV( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRayTracingPipelinesNV( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelineNV" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && + "Function requires or " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" ); + + return data; + } + + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && + "Function requires or " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" ); + + return data; + } + + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector AccelerationStructureNV::getHandle( size_t dataSize ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast( m_device ), + static_cast( m_accelerationStructureNV ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); + + return data; + } + + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType AccelerationStructureNV::getHandle() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast( m_device ), + static_cast( m_accelerationStructureNV ), + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); + + return data; + } + + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesNV.html + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV && + "Function requires " ); + + getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV( static_cast( m_commandBuffer ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + + // wrapper function for command vkCompileDeferredNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCompileDeferredNV.html + VULKAN_HPP_INLINE void Pipeline::compileDeferredNV( uint32_t shader ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCompileDeferredNV && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCompileDeferredNV( static_cast( m_device ), static_cast( m_pipeline ), shader ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" ); + } + + //=== VK_KHR_maintenance3 === + + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + + return support; + } + + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + + return structureChain; + } + + //=== VK_KHR_draw_indirect_count === + + // wrapper function for command vkCmdDrawIndirectCountKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountKHR.html + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountKHR && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirectCountKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCountKHR.html + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdDrawIndexedIndirectCountKHR && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndexedIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_EXT_external_memory_host === + + // wrapper function for command vkGetMemoryHostPointerPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryHostPointerPropertiesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryHostPointerPropertiesEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryHostPointerPropertiesEXT( static_cast( m_device ), + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + + return memoryHostPointerProperties; + } + + //=== VK_AMD_buffer_marker === + + // wrapper function for command vkCmdWriteBufferMarkerAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteBufferMarkerAMD.html + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarkerAMD && "Function requires " ); + + getDispatcher()->vkCmdWriteBufferMarkerAMD( static_cast( m_commandBuffer ), + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + // wrapper function for command vkCmdWriteBufferMarker2AMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteBufferMarker2AMD.html + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && "Function requires " ); + + getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getCalibrateableTimeDomainsEXT() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && + "Function requires or " ); + + std::vector timeDomains; + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( static_cast( m_physicalDevice ), &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + static_cast( m_physicalDevice ), &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return timeDomains; + } + + // wrapper function for command vkGetCalibratedTimestampsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, uint64_t> Device::getCalibratedTimestampsEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && + "Function requires or " ); + + std::pair, uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetCalibratedTimestampsEXT( static_cast( m_device ), + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + + return data_; + } + + // wrapper function for command vkGetCalibratedTimestampsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && + "Function requires or " ); + + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetCalibratedTimestampsEXT( + static_cast( m_device ), 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + + return data_; + } + + //=== VK_NV_mesh_shader === + + // wrapper function for command vkCmdDrawMeshTasksNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksNV.html + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksNV && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksNV( static_cast( m_commandBuffer ), taskCount, firstTask ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectNV.html + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectNV && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectNV( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectCountNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectCountNV.html + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_NV_scissor_exclusive === + + // wrapper function for command vkCmdSetExclusiveScissorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorEnableNV.html + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( + uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorEnableNV && + "Function requires " ); + + getDispatcher()->vkCmdSetExclusiveScissorEnableNV( static_cast( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissorEnables.size(), + reinterpret_cast( exclusiveScissorEnables.data() ) ); + } + + // wrapper function for command vkCmdSetExclusiveScissorNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorNV.html + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( + uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorNV && "Function requires " ); + + getDispatcher()->vkCmdSetExclusiveScissorNV( static_cast( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissors.size(), + reinterpret_cast( exclusiveScissors.data() ) ); + } + + //=== VK_NV_device_diagnostic_checkpoints === + + // wrapper function for command vkCmdSetCheckpointNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCheckpointNV.html + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCheckpointNV && "Function requires " ); + + getDispatcher()->vkCmdSetCheckpointNV( static_cast( m_commandBuffer ), reinterpret_cast( &checkpointMarker ) ); + } + + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointDataNV && + "Function requires " ); + + std::vector checkpointData; + uint32_t checkpointDataCount; + getDispatcher()->vkGetQueueCheckpointDataNV( static_cast( m_queue ), &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + getDispatcher()->vkGetQueueCheckpointDataNV( + static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + // wrapper function for command vkGetQueueCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointData2NV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && + "Function requires " ); + + std::vector checkpointData; + uint32_t checkpointDataCount; + getDispatcher()->vkGetQueueCheckpointData2NV( static_cast( m_queue ), &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + getDispatcher()->vkGetQueueCheckpointData2NV( + static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + //=== VK_KHR_timeline_semaphore === + + // wrapper function for command vkGetSemaphoreCounterValueKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValueKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValueKHR && + "Function requires or " ); + + uint64_t value; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetSemaphoreCounterValueKHR( static_cast( m_device ), static_cast( m_semaphore ), &value ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" ); + + return value; + } + + // wrapper function for command vkWaitSemaphoresKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphoresKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphoresKHR && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWaitSemaphoresKHR( static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } + + // wrapper function for command vkSignalSemaphoreKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphoreKHR.html + VULKAN_HPP_INLINE void Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphoreKHR && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSignalSemaphoreKHR( static_cast( m_device ), reinterpret_cast( &signalInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + } + + //=== VK_INTEL_performance_query === + + // wrapper function for command vkInitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInitializePerformanceApiINTEL.html + VULKAN_HPP_INLINE void Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkInitializePerformanceApiINTEL && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkInitializePerformanceApiINTEL( + static_cast( m_device ), reinterpret_cast( &initializeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + } + + // wrapper function for command vkUninitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUninitializePerformanceApiINTEL.html + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUninitializePerformanceApiINTEL && + "Function requires " ); + + getDispatcher()->vkUninitializePerformanceApiINTEL( static_cast( m_device ) ); + } + + // wrapper function for command vkCmdSetPerformanceMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceMarkerINTEL.html + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceMarkerINTEL && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCmdSetPerformanceMarkerINTEL( + static_cast( m_commandBuffer ), reinterpret_cast( &markerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + } + + // wrapper function for command vkCmdSetPerformanceStreamMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceStreamMarkerINTEL.html + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL( + static_cast( m_commandBuffer ), reinterpret_cast( &markerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + } + + // wrapper function for command vkCmdSetPerformanceOverrideINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceOverrideINTEL.html + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceOverrideINTEL && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCmdSetPerformanceOverrideINTEL( + static_cast( m_commandBuffer ), reinterpret_cast( &overrideInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + } + + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL>::Type + Device::acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquirePerformanceConfigurationINTEL( static_cast( m_device ), + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::acquirePerformanceConfigurationINTEL" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL( + *this, *reinterpret_cast( &configuration ) ); + } + + // wrapper function for command vkQueueSetPerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSetPerformanceConfigurationINTEL.html + VULKAN_HPP_INLINE void Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL( + static_cast( m_queue ), static_cast( configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + } + + // wrapper function for command vkGetPerformanceParameterINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPerformanceParameterINTEL.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPerformanceParameterINTEL && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPerformanceParameterINTEL( + static_cast( m_device ), static_cast( parameter ), reinterpret_cast( &value ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + + return value; + } + + //=== VK_AMD_display_native_hdr === + + // wrapper function for command vkSetLocalDimmingAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLocalDimmingAMD.html + VULKAN_HPP_INLINE void SwapchainKHR::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetLocalDimmingAMD && "Function requires " ); + + getDispatcher()->vkSetLocalDimmingAMD( + static_cast( m_device ), static_cast( m_swapchainKHR ), static_cast( localDimmingEnable ) ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createImagePipeSurfaceFUCHSIA" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateMetalSurfaceEXT( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createMetalSurfaceEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getFragmentShadingRatesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR && + "Function requires " ); + + std::vector fragmentShadingRates; + uint32_t fragmentShadingRateCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( + static_cast( m_physicalDevice ), &fragmentShadingRateCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( + static_cast( m_physicalDevice ), + &fragmentShadingRateCount, + reinterpret_cast( fragmentShadingRates.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return fragmentShadingRates; + } + + // wrapper function for command vkCmdSetFragmentShadingRateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateKHR.html + VULKAN_HPP_INLINE void + CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateKHR && + "Function requires " ); + + getDispatcher()->vkCmdSetFragmentShadingRateKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &fragmentSize ), + reinterpret_cast( combinerOps ) ); + } + + //=== VK_KHR_dynamic_rendering_local_read === + + // wrapper function for command vkCmdSetRenderingAttachmentLocationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocationsKHR.html + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingAttachmentLocationsKHR && + "Function requires or " ); + + getDispatcher()->vkCmdSetRenderingAttachmentLocationsKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &locationInfo ) ); + } + + // wrapper function for command vkCmdSetRenderingInputAttachmentIndicesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndicesKHR.html + VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR( + const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR && + "Function requires or " ); + + getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &inputAttachmentIndexInfo ) ); + } + + //=== VK_EXT_buffer_device_address === + + // wrapper function for command vkGetBufferDeviceAddressEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferDeviceAddressEXT && + "Function requires or or " ); + + VkDeviceAddress result = + getDispatcher()->vkGetBufferDeviceAddressEXT( static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + //=== VK_EXT_tooling_info === + + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getToolPropertiesEXT() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT && + "Function requires or " ); + + std::vector toolProperties; + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( static_cast( m_physicalDevice ), &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( + static_cast( m_physicalDevice ), &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return toolProperties; + } + + //=== VK_KHR_present_wait === + + // wrapper function for command vkWaitForPresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresentKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::waitForPresent( uint64_t presentId, uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresentKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWaitForPresentKHR( static_cast( m_device ), static_cast( m_swapchainKHR ), presentId, timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } + + //=== VK_NV_cooperative_matrix === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixPropertiesNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + //=== VK_NV_coverage_reduction_mode === + + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && + "Function requires " ); + + std::vector combinations; + uint32_t combinationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast( m_physicalDevice ), &combinationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast( m_physicalDevice ), + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + return combinations; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT && + "Function requires " ); + + std::vector presentModes; + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return presentModes; + } + + // wrapper function for command vkAcquireFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireFullScreenExclusiveModeEXT.html + VULKAN_HPP_INLINE void SwapchainKHR::acquireFullScreenExclusiveModeEXT() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( m_swapchainKHR ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" ); + } + + // wrapper function for command vkReleaseFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseFullScreenExclusiveModeEXT.html + VULKAN_HPP_INLINE void SwapchainKHR::releaseFullScreenExclusiveModeEXT() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkReleaseFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( m_swapchainKHR ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" ); + } + + // wrapper function for command vkGetDeviceGroupSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModes2EXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT( static_cast( m_device ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &modes ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + + return modes; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateHeadlessSurfaceEXT( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createHeadlessSurfaceEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + //=== VK_KHR_buffer_device_address === + + // wrapper function for command vkGetBufferDeviceAddressKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferDeviceAddressKHR && + "Function requires or or " ); + + VkDeviceAddress result = + getDispatcher()->vkGetBufferDeviceAddressKHR( static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + // wrapper function for command vkGetBufferOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddressKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR && + "Function requires or " ); + + uint64_t result = + getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; + } + + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddressKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR && + "Function requires or " ); + + uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast( m_device ), + reinterpret_cast( &info ) ); + + return result; + } + + //=== VK_EXT_line_rasterization === + + // wrapper function for command vkCmdSetLineStippleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetLineStippleEXT( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + // wrapper function for command vkResetQueryPoolEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetQueryPoolEXT.html + VULKAN_HPP_INLINE void QueryPool::resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPoolEXT && "Function requires or " ); + + getDispatcher()->vkResetQueryPoolEXT( static_cast( m_device ), static_cast( m_queryPool ), firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + // wrapper function for command vkCmdSetCullModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCullModeEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); + } + + // wrapper function for command vkCmdSetFrontFaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFrontFaceEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); + } + + // wrapper function for command vkCmdSetPrimitiveTopologyEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveTopologyEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); + } + + // wrapper function for command vkCmdSetViewportWithCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCountEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetViewportWithCountEXT( + static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); + } + + // wrapper function for command vkCmdSetScissorWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCountEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetScissorWithCountEXT( + static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); + } + + // wrapper function for command vkCmdBindVertexBuffers2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2EXT.html + VULKAN_HPP_INLINE void + CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT && + "Function requires or or " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindVertexBuffers2EXT( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } + + // wrapper function for command vkCmdSetDepthTestEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthTestEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); + } + + // wrapper function for command vkCmdSetDepthWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthWriteEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); + } + + // wrapper function for command vkCmdSetDepthCompareOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthCompareOpEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); + } + + // wrapper function for command vkCmdSetDepthBoundsTestEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBoundsTestEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilTestEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilTestEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilOpEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + //=== VK_KHR_deferred_host_operations === + + // wrapper function for command vkCreateDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDeferredOperationKHR( + static_cast( m_device ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDeferredOperationKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR( + *this, *reinterpret_cast( &deferredOperation ), allocator ); + } + + // wrapper function for command vkGetDeferredOperationMaxConcurrencyKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationMaxConcurrencyKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t DeferredOperationKHR::getMaxConcurrency() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR && + "Function requires " ); + + uint32_t result = getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR( static_cast( m_device ), + static_cast( m_deferredOperationKHR ) ); + + return result; + } + + // wrapper function for command vkGetDeferredOperationResultKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationResultKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::getResult() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationResultKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetDeferredOperationResultKHR( static_cast( m_device ), static_cast( m_deferredOperationKHR ) ) ); + + return static_cast( result ); + } + + // wrapper function for command vkDeferredOperationJoinKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeferredOperationJoinKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::join() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDeferredOperationJoinKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkDeferredOperationJoinKHR( static_cast( m_device ), static_cast( m_deferredOperationKHR ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + + return static_cast( result ); + } + + //=== VK_KHR_pipeline_executable_properties === + + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutablePropertiesKHR && + "Function requires " ); + + std::vector properties; + uint32_t executableCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPipelineExecutablePropertiesKHR( + static_cast( m_device ), reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( + getDispatcher()->vkGetPipelineExecutablePropertiesKHR( static_cast( m_device ), + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return properties; + } + + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableStatisticsKHR && + "Function requires " ); + + std::vector statistics; + uint32_t statisticCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPipelineExecutableStatisticsKHR( + static_cast( m_device ), reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( + getDispatcher()->vkGetPipelineExecutableStatisticsKHR( static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return statistics; + } + + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR && + "Function requires " ); + + std::vector internalRepresentations; + uint32_t internalRepresentationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( + static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return internalRepresentations; + } + + //=== VK_EXT_host_image_copy === + + // wrapper function for command vkCopyMemoryToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImageEXT.html + VULKAN_HPP_INLINE void Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImageEXT && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyMemoryToImageEXT( + static_cast( m_device ), reinterpret_cast( ©MemoryToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + } + + // wrapper function for command vkCopyImageToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemoryEXT.html + VULKAN_HPP_INLINE void Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemoryEXT && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToMemoryEXT( + static_cast( m_device ), reinterpret_cast( ©ImageToMemoryInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); + } + + // wrapper function for command vkCopyImageToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImageEXT.html + VULKAN_HPP_INLINE void Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImageEXT && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToImageEXT( + static_cast( m_device ), reinterpret_cast( ©ImageToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); + } + + // wrapper function for command vkTransitionImageLayoutEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayoutEXT.html + VULKAN_HPP_INLINE void + Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayoutEXT && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkTransitionImageLayoutEXT( + static_cast( m_device ), transitions.size(), reinterpret_cast( transitions.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); + } + + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2EXT && + "Function requires or or or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2EXT && + "Function requires or or or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + //=== VK_KHR_map_memory2 === + + // wrapper function for command vkMapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory2KHR && "Function requires or " ); + + void * pData; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryMapInfo ), &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); + + return pData; + } + + // wrapper function for command vkUnmapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2KHR.html + VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2KHR && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); + } + + //=== VK_EXT_swapchain_maintenance1 === + + // wrapper function for command vkReleaseSwapchainImagesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseSwapchainImagesEXT.html + VULKAN_HPP_INLINE void Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseSwapchainImagesEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkReleaseSwapchainImagesEXT( + static_cast( m_device ), reinterpret_cast( &releaseInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); + } + + //=== VK_NV_device_generated_commands === + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkCmdPreprocessGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsNV.html + VULKAN_HPP_INLINE void + CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsNV && + "Function requires " ); + + getDispatcher()->vkCmdPreprocessGeneratedCommandsNV( static_cast( m_commandBuffer ), + reinterpret_cast( &generatedCommandsInfo ) ); + } + + // wrapper function for command vkCmdExecuteGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsNV.html + VULKAN_HPP_INLINE void + CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsNV && + "Function requires " ); + + getDispatcher()->vkCmdExecuteGeneratedCommandsNV( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); + } + + // wrapper function for command vkCmdBindPipelineShaderGroupNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindPipelineShaderGroupNV.html + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipelineShaderGroupNV && + "Function requires " ); + + getDispatcher()->vkCmdBindPipelineShaderGroupNV( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( pipeline ), + groupIndex ); + } + + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateIndirectCommandsLayoutNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectCommandsLayoutNV" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV( + *this, *reinterpret_cast( &indirectCommandsLayout ), allocator ); + } + + //=== VK_EXT_depth_bias_control === + + // wrapper function for command vkCmdSetDepthBias2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias2EXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBias2EXT && "Function requires " ); + + getDispatcher()->vkCmdSetDepthBias2EXT( static_cast( m_commandBuffer ), reinterpret_cast( &depthBiasInfo ) ); + } + + //=== VK_EXT_acquire_drm_display === + + // wrapper function for command vkAcquireDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireDrmDisplayEXT.html + VULKAN_HPP_INLINE void PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireDrmDisplayEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, static_cast( display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + } + + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDrmDisplayEXT( + static_cast( m_physicalDevice ), drmFd, connectorId, reinterpret_cast( &display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getDrmDisplayEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, *reinterpret_cast( &display ) ); + } + + //=== VK_EXT_private_data === + + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreatePrivateDataSlotEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPrivateDataSlotEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, *reinterpret_cast( &privateDataSlot ), allocator ); + } + + // wrapper function for command vkDestroyPrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlotEXT.html + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyPrivateDataSlotEXT && + "Function requires or " ); + + getDispatcher()->vkDestroyPrivateDataSlotEXT( + static_cast( m_device ), + static_cast( privateDataSlot ), + reinterpret_cast( static_cast( allocator ) ) ); + } + + // wrapper function for command vkSetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateDataEXT.html + VULKAN_HPP_INLINE void Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateDataEXT && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + } + + // wrapper function for command vkGetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateDataEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateDataEXT && "Function requires or " ); + + uint64_t data; + getDispatcher()->vkGetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + + return data; + } + + //=== VK_KHR_video_encode_queue === + + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + + return qualityLevelProperties; + } + + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + + return structureChain; + } + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetEncodedVideoSessionParametersKHR && + "Function requires " ); + + std::pair> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return data_; + } + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, std::vector> + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetEncodedVideoSessionParametersKHR && + "Function requires " ); + + std::pair, std::vector> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = + data_.first.template get(); + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return data_; + } + + // wrapper function for command vkCmdEncodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEncodeVideoKHR.html + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEncodeVideoKHR && "Function requires " ); + + getDispatcher()->vkCmdEncodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( &encodeInfo ) ); + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::CudaModuleNV module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateCudaModuleNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCudaModuleNV" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV( *this, *reinterpret_cast( &module ), allocator ); + } + + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector CudaModuleNV::getCache() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCudaModuleCacheNV && "Function requires " ); + + std::vector cacheData; + size_t cacheSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetCudaModuleCacheNV( static_cast( m_device ), static_cast( m_cudaModuleNV ), &cacheSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = static_cast( getDispatcher()->vkGetCudaModuleCacheNV( + static_cast( m_device ), static_cast( m_cudaModuleNV ), &cacheSize, reinterpret_cast( cacheData.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CudaModuleNV::getCache" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return cacheData; + } + + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::CudaFunctionNV function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateCudaFunctionNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCudaFunctionNV" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV( *this, *reinterpret_cast( &function ), allocator ); + } + + // wrapper function for command vkCmdCudaLaunchKernelNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCudaLaunchKernelNV.html + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCudaLaunchKernelNV && "Function requires " ); + + getDispatcher()->vkCmdCudaLaunchKernelNV( static_cast( m_commandBuffer ), reinterpret_cast( &launchInfo ) ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + + // wrapper function for command vkCmdDispatchTileQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchTileQCOM.html + VULKAN_HPP_INLINE void CommandBuffer::dispatchTileQCOM() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchTileQCOM && "Function requires " ); + + getDispatcher()->vkCmdDispatchTileQCOM( static_cast( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdBeginPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginPerTileExecutionQCOM.html + VULKAN_HPP_INLINE void + CommandBuffer::beginPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileBeginInfoQCOM & perTileBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginPerTileExecutionQCOM && "Function requires " ); + + getDispatcher()->vkCmdBeginPerTileExecutionQCOM( static_cast( m_commandBuffer ), + reinterpret_cast( &perTileBeginInfo ) ); + } + + // wrapper function for command vkCmdEndPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndPerTileExecutionQCOM.html + VULKAN_HPP_INLINE void CommandBuffer::endPerTileExecutionQCOM( const VULKAN_HPP_NAMESPACE::PerTileEndInfoQCOM & perTileEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndPerTileExecutionQCOM && "Function requires " ); + + getDispatcher()->vkCmdEndPerTileExecutionQCOM( static_cast( m_commandBuffer ), + reinterpret_cast( &perTileEndInfo ) ); + } + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; + getDispatcher()->vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( &metalObjectsInfo ) ); + + return metalObjectsInfo; + } + + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get(); + getDispatcher()->vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( &metalObjectsInfo ) ); + + return structureChain; + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + + // wrapper function for command vkCmdSetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2KHR && "Function requires or " ); + + getDispatcher()->vkCmdSetEvent2KHR( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } + + // wrapper function for command vkCmdResetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2KHR && "Function requires or " ); + + getDispatcher()->vkCmdResetEvent2KHR( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + // wrapper function for command vkCmdWaitEvents2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2KHR.html + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2KHR && "Function requires or " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdWaitEvents2KHR( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } + + // wrapper function for command vkCmdPipelineBarrier2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdPipelineBarrier2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &dependencyInfo ) ); + } + + // wrapper function for command vkCmdWriteTimestamp2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdWriteTimestamp2KHR( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); + } + + // wrapper function for command vkQueueSubmit2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2KHR.html + VULKAN_HPP_INLINE void Queue::submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2KHR && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSubmit2KHR( + static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + } + + //=== VK_EXT_descriptor_buffer === + + // wrapper function for command vkGetDescriptorSetLayoutSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSizeEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DescriptorSetLayout::getSizeEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSizeEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes; + getDispatcher()->vkGetDescriptorSetLayoutSizeEXT( static_cast( m_device ), + static_cast( m_descriptorSetLayout ), + reinterpret_cast( &layoutSizeInBytes ) ); + + return layoutSizeInBytes; + } + + // wrapper function for command vkGetDescriptorSetLayoutBindingOffsetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutBindingOffsetEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + DescriptorSetLayout::getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::DeviceSize offset; + getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT( + static_cast( m_device ), static_cast( m_descriptorSetLayout ), binding, reinterpret_cast( &offset ) ); + + return offset; + } + + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + size_t dataSize, + void * pDescriptor ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function requires " ); + + getDispatcher()->vkGetDescriptorEXT( + static_cast( m_device ), reinterpret_cast( &descriptorInfo ), dataSize, pDescriptor ); + } + + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType + Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function requires " ); + + DescriptorType descriptor; + getDispatcher()->vkGetDescriptorEXT( static_cast( m_device ), + reinterpret_cast( &descriptorInfo ), + sizeof( DescriptorType ), + reinterpret_cast( &descriptor ) ); + + return descriptor; + } + + // wrapper function for command vkCmdBindDescriptorBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBuffersEXT.html + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBuffersEXT && "Function requires " ); + + getDispatcher()->vkCmdBindDescriptorBuffersEXT( static_cast( m_commandBuffer ), + bindingInfos.size(), + reinterpret_cast( bindingInfos.data() ) ); + } + + // wrapper function for command vkCmdSetDescriptorBufferOffsetsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsetsEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() ); +# else + if ( bufferIndices.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + bufferIndices.size(), + bufferIndices.data(), + reinterpret_cast( offsets.data() ) ); + } + + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplersEXT.html + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT && + "Function requires " ); + + getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), set ); + } + + // wrapper function for command vkGetBufferOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); + + return data; + } + + // wrapper function for command vkGetImageOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); + + return data; + } + + // wrapper function for command vkGetImageViewOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); + + return data; + } + + // wrapper function for command vkGetSamplerOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSamplerOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); + + return data; + } + + // wrapper function for command vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); + + return data; + } + + //=== VK_NV_fragment_shading_rate_enums === + + // wrapper function for command vkCmdSetFragmentShadingRateEnumNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateEnumNV.html + VULKAN_HPP_INLINE void + CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateEnumNV && + "Function requires " ); + + getDispatcher()->vkCmdSetFragmentShadingRateEnumNV( static_cast( m_commandBuffer ), + static_cast( shadingRate ), + reinterpret_cast( combinerOps ) ); + } + + //=== VK_EXT_mesh_shader === + + // wrapper function for command vkCmdDrawMeshTasksEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksEXT.html + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksEXT( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectEXT.html + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectEXT( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectCountEXT.html + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_KHR_copy_commands2 === + + // wrapper function for command vkCmdCopyBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2KHR && "Function requires or " ); + + getDispatcher()->vkCmdCopyBuffer2KHR( static_cast( m_commandBuffer ), reinterpret_cast( ©BufferInfo ) ); + } + + // wrapper function for command vkCmdCopyImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2KHR && "Function requires or " ); + + getDispatcher()->vkCmdCopyImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( ©ImageInfo ) ); + } + + // wrapper function for command vkCmdCopyBufferToImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2KHR.html + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdCopyBufferToImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( ©BufferToImageInfo ) ); + } + + // wrapper function for command vkCmdCopyImageToBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2KHR.html + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdCopyImageToBuffer2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( ©ImageToBufferInfo ) ); + } + + // wrapper function for command vkCmdBlitImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2KHR && "Function requires or " ); + + getDispatcher()->vkCmdBlitImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( &blitImageInfo ) ); + } + + // wrapper function for command vkCmdResolveImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2KHR && "Function requires or " ); + + getDispatcher()->vkCmdResolveImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &resolveImageInfo ) ); + } + + //=== VK_EXT_device_fault === + // wrapper function for command vkGetDeviceFaultInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceFaultInfoEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceFaultInfoEXT( static_cast( m_device ), + reinterpret_cast( pFaultCounts ), + reinterpret_cast( pFaultInfo ) ) ); + } +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + // wrapper function for command vkAcquireWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireWinrtDisplayNV.html + VULKAN_HPP_INLINE void DisplayKHR::acquireWinrtNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireWinrtDisplayNV && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireWinrtDisplayNV( static_cast( m_physicalDevice ), static_cast( m_displayKHR ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" ); + } + + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetWinrtDisplayNV( + static_cast( m_physicalDevice ), deviceRelativeId, reinterpret_cast( &display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getWinrtDisplayNV" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, *reinterpret_cast( &display ) ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDirectFBSurfaceEXT( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDirectFBSurfaceEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + // wrapper function for command vkGetPhysicalDeviceDirectFBPresentationSupportEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDirectFBPresentationSupportEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT && + "Function requires " ); + + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast( m_physicalDevice ), queueFamilyIndex, &dfb ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + + // wrapper function for command vkCmdSetVertexInputEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetVertexInputEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), + vertexBindingDescriptions.size(), + reinterpret_cast( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast( vertexAttributeDescriptions.data() ) ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + // wrapper function for command vkGetMemoryZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandleFUCHSIA.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t + Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA && "Function requires " ); + + zx_handle_t zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + + return zirconHandle; + } + + // wrapper function for command vkGetMemoryZirconHandlePropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandlePropertiesFUCHSIA.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA( + static_cast( m_device ), + static_cast( handleType ), + zirconHandle, + reinterpret_cast( &memoryZirconHandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + + return memoryZirconHandleProperties; + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + // wrapper function for command vkImportSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreZirconHandleFUCHSIA.html + VULKAN_HPP_INLINE void + Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + } + + // wrapper function for command vkGetSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreZirconHandleFUCHSIA.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t + Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA && + "Function requires " ); + + zx_handle_t zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + + return zirconHandle; + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateBufferCollectionFUCHSIA( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &collection ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createBufferCollectionFUCHSIA" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA( + *this, *reinterpret_cast( &collection ), allocator ); + } + + // wrapper function for command vkSetBufferCollectionImageConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionImageConstraintsFUCHSIA.html + VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast( m_device ), + static_cast( m_bufferCollectionFUCHSIA ), + reinterpret_cast( &imageConstraintsInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" ); + } + + // wrapper function for command vkSetBufferCollectionBufferConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionBufferConstraintsFUCHSIA.html + VULKAN_HPP_INLINE void + BufferCollectionFUCHSIA::setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast( m_device ), + static_cast( m_bufferCollectionFUCHSIA ), + reinterpret_cast( &bufferConstraintsInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" ); + } + + // wrapper function for command vkGetBufferCollectionPropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferCollectionPropertiesFUCHSIA.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA BufferCollectionFUCHSIA::getProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA( static_cast( m_device ), + static_cast( m_bufferCollectionFUCHSIA ), + reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" ); + + return properties; + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + // wrapper function for command vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( &maxWorkgroupSize ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); + + return maxWorkgroupSize; + } + + // wrapper function for command vkCmdSubpassShadingHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSubpassShadingHUAWEI.html + VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSubpassShadingHUAWEI && "Function requires " ); + + getDispatcher()->vkCmdSubpassShadingHUAWEI( static_cast( m_commandBuffer ) ); + } + + //=== VK_HUAWEI_invocation_mask === + + // wrapper function for command vkCmdBindInvocationMaskHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindInvocationMaskHUAWEI.html + VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindInvocationMaskHUAWEI && "Function requires " ); + + getDispatcher()->vkCmdBindInvocationMaskHUAWEI( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); + } + + //=== VK_NV_external_memory_rdma === + + // wrapper function for command vkGetMemoryRemoteAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryRemoteAddressNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RemoteAddressNV + Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryRemoteAddressNV && "Function requires " ); + + VULKAN_HPP_NAMESPACE::RemoteAddressNV address; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryRemoteAddressNV( static_cast( m_device ), + reinterpret_cast( &memoryGetRemoteAddressInfo ), + reinterpret_cast( &address ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); + + return address; + } + + //=== VK_EXT_pipeline_properties === + + // wrapper function for command vkGetPipelinePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelinePropertiesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BaseOutStructure + Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelinePropertiesEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPipelinePropertiesEXT( static_cast( m_device ), + reinterpret_cast( &pipelineInfo ), + reinterpret_cast( &pipelineProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); + + return pipelineProperties; + } + + //=== VK_EXT_extended_dynamic_state2 === + + // wrapper function for command vkCmdSetPatchControlPointsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPatchControlPointsEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); + } + + // wrapper function for command vkCmdSetRasterizerDiscardEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizerDiscardEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); + } + + // wrapper function for command vkCmdSetDepthBiasEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBiasEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); + } + + // wrapper function for command vkCmdSetLogicOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLogicOpEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); + } + + // wrapper function for command vkCmdSetPrimitiveRestartEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveRestartEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateScreenSurfaceQNX( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createScreenSurfaceQNX" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + // wrapper function for command vkGetPhysicalDeviceScreenPresentationSupportQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceScreenPresentationSupportQNX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX && + "Function requires " ); + + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast( m_physicalDevice ), queueFamilyIndex, &window ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + // wrapper function for command vkCmdSetColorWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteEnableEXT && "Function requires " ); + + getDispatcher()->vkCmdSetColorWriteEnableEXT( + static_cast( m_commandBuffer ), colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); + } + + //=== VK_KHR_ray_tracing_maintenance1 === + + // wrapper function for command vkCmdTraceRaysIndirect2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirect2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirect2KHR && "Function requires " ); + + getDispatcher()->vkCmdTraceRaysIndirect2KHR( static_cast( m_commandBuffer ), static_cast( indirectDeviceAddress ) ); + } + + //=== VK_EXT_multi_draw === + + // wrapper function for command vkCmdDrawMultiEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMultiEXT( static_cast( m_commandBuffer ), + vertexInfo.size(), + reinterpret_cast( vertexInfo.data() ), + instanceCount, + firstInstance, + vertexInfo.stride() ); + } + + // wrapper function for command vkCmdDrawMultiIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiIndexedEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiIndexedEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMultiIndexedEXT( static_cast( m_commandBuffer ), + indexInfo.size(), + reinterpret_cast( indexInfo.data() ), + instanceCount, + firstInstance, + indexInfo.stride(), + static_cast( vertexOffset ) ); + } + + //=== VK_EXT_opacity_micromap === + + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MicromapEXT micromap; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateMicromapEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( µmap ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createMicromapEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT( *this, *reinterpret_cast( µmap ), allocator ); + } + + // wrapper function for command vkCmdBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildMicromapsEXT.html + VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildMicromapsEXT && "Function requires " ); + + getDispatcher()->vkCmdBuildMicromapsEXT( + static_cast( m_commandBuffer ), infos.size(), reinterpret_cast( infos.data() ) ); + } + + // wrapper function for command vkBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildMicromapsEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBuildMicromapsEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkBuildMicromapsEXT( static_cast( m_device ), + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + // wrapper function for command vkCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCopyMicromapEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + // wrapper function for command vkCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapToMemoryEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapToMemoryEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyMicromapToMemoryEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + // wrapper function for command vkCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToMicromapEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToMicromapEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyMemoryToMicromapEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast( m_device ), + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); + + return data; + } + + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast( m_device ), + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); + + return data; + } + + // wrapper function for command vkCmdCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapEXT.html + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapEXT && "Function requires " ); + + getDispatcher()->vkCmdCopyMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( &info ) ); + } + + // wrapper function for command vkCmdCopyMicromapToMemoryEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapToMemoryEXT.html + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapToMemoryEXT && "Function requires " ); + + getDispatcher()->vkCmdCopyMicromapToMemoryEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + // wrapper function for command vkCmdCopyMemoryToMicromapEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToMicromapEXT.html + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToMicromapEXT && "Function requires " ); + + getDispatcher()->vkCmdCopyMemoryToMicromapEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + // wrapper function for command vkCmdWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteMicromapsPropertiesEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteMicromapsPropertiesEXT && + "Function requires " ); + + getDispatcher()->vkCmdWriteMicromapsPropertiesEXT( static_cast( m_commandBuffer ), + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + + // wrapper function for command vkGetDeviceMicromapCompatibilityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMicromapCompatibilityEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMicromapCompatibilityEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + getDispatcher()->vkGetDeviceMicromapCompatibilityEXT( static_cast( m_device ), + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + + return compatibility; + } + + // wrapper function for command vkGetMicromapBuildSizesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMicromapBuildSizesEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMicromapBuildSizesEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo; + getDispatcher()->vkGetMicromapBuildSizesEXT( static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } + + //=== VK_HUAWEI_cluster_culling_shader === + + // wrapper function for command vkCmdDrawClusterHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawClusterHUAWEI.html + VULKAN_HPP_INLINE void CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterHUAWEI && "Function requires " ); + + getDispatcher()->vkCmdDrawClusterHUAWEI( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkCmdDrawClusterIndirectHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawClusterIndirectHUAWEI.html + VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterIndirectHUAWEI && + "Function requires " ); + + getDispatcher()->vkCmdDrawClusterIndirectHUAWEI( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); + } + + //=== VK_EXT_pageable_device_local_memory === + + // wrapper function for command vkSetDeviceMemoryPriorityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDeviceMemoryPriorityEXT.html + VULKAN_HPP_INLINE void DeviceMemory::setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDeviceMemoryPriorityEXT && + "Function requires " ); + + getDispatcher()->vkSetDeviceMemoryPriorityEXT( static_cast( m_device ), static_cast( m_deviceMemory ), priority ); + } + + //=== VK_KHR_maintenance4 === + + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + //=== VK_VALVE_descriptor_set_host_mapping === + + // wrapper function for command vkGetDescriptorSetLayoutHostMappingInfoVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutHostMappingInfoVALVE.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE Device::getDescriptorSetLayoutHostMappingInfoVALVE( + const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; + getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast( m_device ), + reinterpret_cast( &bindingReference ), + reinterpret_cast( &hostMapping ) ); + + return hostMapping; + } + + // wrapper function for command vkGetDescriptorSetHostMappingVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetHostMappingVALVE.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DescriptorSet::getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetHostMappingVALVE && + "Function requires " ); + + void * pData; + getDispatcher()->vkGetDescriptorSetHostMappingVALVE( static_cast( m_device ), static_cast( m_descriptorSet ), &pData ); + + return pData; + } + + //=== VK_NV_copy_memory_indirect === + + // wrapper function for command vkCmdCopyMemoryIndirectNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryIndirectNV.html + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryIndirectNV && "Function requires " ); + + getDispatcher()->vkCmdCopyMemoryIndirectNV( + static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, stride ); + } + + // wrapper function for command vkCmdCopyMemoryToImageIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToImageIndirectNV.html + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( + VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToImageIndirectNV && + "Function requires " ); + + getDispatcher()->vkCmdCopyMemoryToImageIndirectNV( static_cast( m_commandBuffer ), + static_cast( copyBufferAddress ), + imageSubresources.size(), + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( imageSubresources.data() ) ); + } + + //=== VK_NV_memory_decompression === + + // wrapper function for command vkCmdDecompressMemoryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryNV.html + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryNV && "Function requires " ); + + getDispatcher()->vkCmdDecompressMemoryNV( static_cast( m_commandBuffer ), + decompressMemoryRegions.size(), + reinterpret_cast( decompressMemoryRegions.data() ) ); + } + + // wrapper function for command vkCmdDecompressMemoryIndirectCountNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryIndirectCountNV.html + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryIndirectCountNV && + "Function requires " ); + + getDispatcher()->vkCmdDecompressMemoryIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( indirectCommandsAddress ), + static_cast( indirectCommandsCountAddress ), + stride ); + } + + //=== VK_NV_device_generated_commands_compute === + + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkCmdUpdatePipelineIndirectBufferNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdatePipelineIndirectBufferNV.html + VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV && + "Function requires " ); + + getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + // wrapper function for command vkGetPipelineIndirectDeviceAddressNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectDeviceAddressNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectDeviceAddressNV && + "Function requires " ); + + VkDeviceAddress result = getDispatcher()->vkGetPipelineIndirectDeviceAddressNV( + static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + //=== VK_EXT_extended_dynamic_state3 === + + // wrapper function for command vkCmdSetDepthClampEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClampEnable ) ); + } + + // wrapper function for command vkCmdSetPolygonModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPolygonModeEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetPolygonModeEXT( static_cast( m_commandBuffer ), static_cast( polygonMode ) ); + } + + // wrapper function for command vkCmdSetRasterizationSamplesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizationSamplesEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast( m_commandBuffer ), + static_cast( rasterizationSamples ) ); + } + + // wrapper function for command vkCmdSetSampleMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleMaskEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT && + "Function requires or " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast( samples ) + 31 ) / 32 ); +# else + if ( sampleMask.size() != ( static_cast( samples ) + 31 ) / 32 ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast( samples ) + 31 ) / 32" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdSetSampleMaskEXT( static_cast( m_commandBuffer ), + static_cast( samples ), + reinterpret_cast( sampleMask.data() ) ); + } + + // wrapper function for command vkCmdSetAlphaToCoverageEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAlphaToCoverageEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToCoverageEnable ) ); + } + + // wrapper function for command vkCmdSetAlphaToOneEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAlphaToOneEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToOneEnable ) ); + } + + // wrapper function for command vkCmdSetLogicOpEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLogicOpEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast( m_commandBuffer ), static_cast( logicOpEnable ) ); + } + + // wrapper function for command vkCmdSetColorBlendEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( + uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorBlendEnables.size(), + reinterpret_cast( colorBlendEnables.data() ) ); + } + + // wrapper function for command vkCmdSetColorBlendEquationEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEquationEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( + uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorBlendEquations.size(), + reinterpret_cast( colorBlendEquations.data() ) ); + } + + // wrapper function for command vkCmdSetColorWriteMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteMaskEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( + uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorWriteMasks.size(), + reinterpret_cast( colorWriteMasks.data() ) ); + } + + // wrapper function for command vkCmdSetTessellationDomainOriginEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetTessellationDomainOriginEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast( m_commandBuffer ), + static_cast( domainOrigin ) ); + } + + // wrapper function for command vkCmdSetRasterizationStreamEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizationStreamEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast( m_commandBuffer ), rasterizationStream ); + } + + // wrapper function for command vkCmdSetConservativeRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetConservativeRasterizationModeEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setConservativeRasterizationModeEXT( + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast( m_commandBuffer ), + static_cast( conservativeRasterizationMode ) ); + } + + // wrapper function for command vkCmdSetExtraPrimitiveOverestimationSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExtraPrimitiveOverestimationSizeEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast( m_commandBuffer ), extraPrimitiveOverestimationSize ); + } + + // wrapper function for command vkCmdSetDepthClipEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClipEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClipEnable ) ); + } + + // wrapper function for command vkCmdSetSampleLocationsEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast( m_commandBuffer ), static_cast( sampleLocationsEnable ) ); + } + + // wrapper function for command vkCmdSetColorBlendAdvancedEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendAdvancedEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( + uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorBlendAdvanced.size(), + reinterpret_cast( colorBlendAdvanced.data() ) ); + } + + // wrapper function for command vkCmdSetProvokingVertexModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetProvokingVertexModeEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast( m_commandBuffer ), + static_cast( provokingVertexMode ) ); + } + + // wrapper function for command vkCmdSetLineRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineRasterizationModeEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast( m_commandBuffer ), + static_cast( lineRasterizationMode ) ); + } + + // wrapper function for command vkCmdSetLineStippleEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast( m_commandBuffer ), static_cast( stippledLineEnable ) ); + } + + // wrapper function for command vkCmdSetDepthClipNegativeOneToOneEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClipNegativeOneToOneEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast( m_commandBuffer ), static_cast( negativeOneToOne ) ); + } + + // wrapper function for command vkCmdSetViewportWScalingEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingEnableNV.html + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast( m_commandBuffer ), static_cast( viewportWScalingEnable ) ); + } + + // wrapper function for command vkCmdSetViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportSwizzleNV.html + VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast( m_commandBuffer ), + firstViewport, + viewportSwizzles.size(), + reinterpret_cast( viewportSwizzles.data() ) ); + } + + // wrapper function for command vkCmdSetCoverageToColorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageToColorEnableNV.html + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast( m_commandBuffer ), static_cast( coverageToColorEnable ) ); + } + + // wrapper function for command vkCmdSetCoverageToColorLocationNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageToColorLocationNV.html + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast( m_commandBuffer ), coverageToColorLocation ); + } + + // wrapper function for command vkCmdSetCoverageModulationModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationModeNV.html + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast( m_commandBuffer ), + static_cast( coverageModulationMode ) ); + } + + // wrapper function for command vkCmdSetCoverageModulationTableEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableEnableNV.html + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast( m_commandBuffer ), + static_cast( coverageModulationTableEnable ) ); + } + + // wrapper function for command vkCmdSetCoverageModulationTableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableNV.html + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageModulationTableNV( + static_cast( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() ); + } + + // wrapper function for command vkCmdSetShadingRateImageEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetShadingRateImageEnableNV.html + VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast( m_commandBuffer ), static_cast( shadingRateImageEnable ) ); + } + + // wrapper function for command vkCmdSetRepresentativeFragmentTestEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRepresentativeFragmentTestEnableNV.html + VULKAN_HPP_INLINE void + CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast( m_commandBuffer ), + static_cast( representativeFragmentTestEnable ) ); + } + + // wrapper function for command vkCmdSetCoverageReductionModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageReductionModeNV.html + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast( m_commandBuffer ), + static_cast( coverageReductionMode ) ); + } + + //=== VK_EXT_shader_module_identifier === + + // wrapper function for command vkGetShaderModuleIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleIdentifierEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ShaderModule::getIdentifierEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleIdentifierEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + getDispatcher()->vkGetShaderModuleIdentifierEXT( + static_cast( m_device ), static_cast( m_shaderModule ), reinterpret_cast( &identifier ) ); + + return identifier; + } + + // wrapper function for command vkGetShaderModuleCreateInfoIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleCreateInfoIdentifierEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &identifier ) ); + + return identifier; + } + + //=== VK_NV_optical_flow === + + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV && + "Function requires " ); + + std::vector imageFormatProperties; + uint32_t formatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + static_cast( m_physicalDevice ), + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + static_cast( m_physicalDevice ), + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast( imageFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return imageFormatProperties; + } + + // wrapper function for command vkCreateOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateOpticalFlowSessionNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &session ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createOpticalFlowSessionNV" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV( *this, *reinterpret_cast( &session ), allocator ); + } + + // wrapper function for command vkBindOpticalFlowSessionImageNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindOpticalFlowSessionImageNV.html + VULKAN_HPP_INLINE void OpticalFlowSessionNV::bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindOpticalFlowSessionImageNV && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkBindOpticalFlowSessionImageNV( static_cast( m_device ), + static_cast( m_opticalFlowSessionNV ), + static_cast( bindingPoint ), + static_cast( view ), + static_cast( layout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::OpticalFlowSessionNV::bindImage" ); + } + + // wrapper function for command vkCmdOpticalFlowExecuteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdOpticalFlowExecuteNV.html + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdOpticalFlowExecuteNV && "Function requires " ); + + getDispatcher()->vkCmdOpticalFlowExecuteNV( static_cast( m_commandBuffer ), + static_cast( session ), + reinterpret_cast( &executeInfo ) ); + } + + //=== VK_KHR_maintenance5 === + + // wrapper function for command vkCmdBindIndexBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdBindIndexBuffer2KHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + // wrapper function for command vkGetRenderingAreaGranularityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularityKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularityKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderingAreaGranularityKHR( static_cast( m_device ), + reinterpret_cast( &renderingAreaInfo ), + reinterpret_cast( &granularity ) ); + + return granularity; + } + + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2KHR && + "Function requires or or or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2KHR && + "Function requires or or or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + //=== VK_AMD_anti_lag === + + // wrapper function for command vkAntiLagUpdateAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAntiLagUpdateAMD.html + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAntiLagUpdateAMD && "Function requires " ); + + getDispatcher()->vkAntiLagUpdateAMD( static_cast( m_device ), reinterpret_cast( &data ) ); + } + + //=== VK_EXT_shader_object === + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector shaders( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateShadersEXT( + static_cast( m_device ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createShadersEXT" ); +# endif + } + + std::vector shadersRAII; + shadersRAII.reserve( shaders.size() ); + for ( auto & shader : shaders ) + { + shadersRAII.emplace_back( *this, *reinterpret_cast( &shader ), allocator, result ); + } + return shadersRAII; + } + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ShaderEXT shader; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateShadersEXT( + static_cast( m_device ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shader ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createShaderEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT( *this, *reinterpret_cast( &shader ), allocator, result ); + } + + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ShaderEXT::getBinaryData() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderBinaryDataEXT && "Function requires " ); + + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetShaderBinaryDataEXT( static_cast( m_device ), static_cast( m_shaderEXT ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( getDispatcher()->vkGetShaderBinaryDataEXT( + static_cast( m_device ), static_cast( m_shaderEXT ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ShaderEXT::getBinaryData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return data; + } + + // wrapper function for command vkCmdBindShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadersEXT.html + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadersEXT && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); +# else + if ( stages.size() != shaders.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindShadersEXT( static_cast( m_commandBuffer ), + stages.size(), + reinterpret_cast( stages.data() ), + reinterpret_cast( shaders.data() ) ); + } + + // wrapper function for command vkCmdSetDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampRangeEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampRangeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetDepthClampRangeEXT( + static_cast( m_commandBuffer ), + static_cast( depthClampMode ), + reinterpret_cast( static_cast( depthClampRange ) ) ); + } + + //=== VK_KHR_pipeline_binary === + + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + Device::createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( getDispatcher()->vkCreatePipelineBinariesKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( getDispatcher()->vkCreatePipelineBinariesKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( getDispatcher()->vkCreatePipelineBinariesKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) && + ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPipelineBinariesKHR" ); +# endif + } + + std::vector pipelineBinariesRAII; + pipelineBinariesRAII.reserve( pipelineBinaries.size() ); + for ( auto & pipelineBinary : pipelineBinaries ) + { + pipelineBinariesRAII.emplace_back( *this, *reinterpret_cast( &pipelineBinary ), allocator, result ); + } + return pipelineBinariesRAII; + } + + // wrapper function for command vkGetPipelineKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineKeyKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR + Device::getPipelineKeyKHR( Optional pipelineCreateInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineKeyKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR pipelineKey; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPipelineKeyKHR( + static_cast( m_device ), + reinterpret_cast( static_cast( pipelineCreateInfo ) ), + reinterpret_cast( &pipelineKey ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" ); + + return pipelineKey; + } + + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> + Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineBinaryDataKHR && "Function requires " ); + + std::pair> data_; + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPipelineBinaryDataKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast( + getDispatcher()->vkGetPipelineBinaryDataKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast( pipelineBinaryData.data() ) ) ); + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); + + return data_; + } + + // wrapper function for command vkReleaseCapturedPipelineDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseCapturedPipelineDataKHR.html + VULKAN_HPP_INLINE void + Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseCapturedPipelineDataKHR && "Function requires " ); + + getDispatcher()->vkReleaseCapturedPipelineDataKHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( static_cast( allocator ) ) ); + } + + //=== VK_QCOM_tile_properties === + + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Framebuffer::getTilePropertiesQCOM() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFramebufferTilePropertiesQCOM && + "Function requires " ); + + std::vector properties; + uint32_t propertiesCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetFramebufferTilePropertiesQCOM( + static_cast( m_device ), static_cast( m_framebuffer ), &propertiesCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = static_cast( + getDispatcher()->vkGetFramebufferTilePropertiesQCOM( static_cast( m_device ), + static_cast( m_framebuffer ), + &propertiesCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return properties; + } + + // wrapper function for command vkGetDynamicRenderingTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDynamicRenderingTilePropertiesQCOM.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; + getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM( static_cast( m_device ), + reinterpret_cast( &renderingInfo ), + reinterpret_cast( &properties ) ); + + return properties; + } + + //=== VK_NV_cooperative_vector === + + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeVectorPropertiesNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeVectorPropertiesNV && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCooperativeVectorPropertiesNV( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeVectorPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkConvertCooperativeVectorMatrixNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::convertCooperativeVectorMatrixNV( const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkConvertCooperativeVectorMatrixNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkConvertCooperativeVectorMatrixNV( + static_cast( m_device ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::convertCooperativeVectorMatrixNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + + return static_cast( result ); + } + + // wrapper function for command vkCmdConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdConvertCooperativeVectorMatrixNV.html + VULKAN_HPP_INLINE void CommandBuffer::convertCooperativeVectorMatrixNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdConvertCooperativeVectorMatrixNV && + "Function requires " ); + + getDispatcher()->vkCmdConvertCooperativeVectorMatrixNV( + static_cast( m_commandBuffer ), infos.size(), reinterpret_cast( infos.data() ) ); + } + + //=== VK_NV_low_latency2 === + + // wrapper function for command vkSetLatencySleepModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencySleepModeNV.html + VULKAN_HPP_INLINE void SwapchainKHR::setLatencySleepModeNV( const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetLatencySleepModeNV && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSetLatencySleepModeNV( static_cast( m_device ), + static_cast( m_swapchainKHR ), + reinterpret_cast( &sleepModeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::setLatencySleepModeNV" ); + } + + // wrapper function for command vkLatencySleepNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkLatencySleepNV.html + VULKAN_HPP_INLINE void SwapchainKHR::latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkLatencySleepNV && "Function requires " ); + + getDispatcher()->vkLatencySleepNV( + static_cast( m_device ), static_cast( m_swapchainKHR ), reinterpret_cast( &sleepInfo ) ); + } + + // wrapper function for command vkSetLatencyMarkerNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencyMarkerNV.html + VULKAN_HPP_INLINE void SwapchainKHR::setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetLatencyMarkerNV && "Function requires " ); + + getDispatcher()->vkSetLatencyMarkerNV( static_cast( m_device ), + static_cast( m_swapchainKHR ), + reinterpret_cast( &latencyMarkerInfo ) ); + } + + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getLatencyTimingsNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetLatencyTimingsNV && "Function requires " ); + + std::vector timings; + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + getDispatcher()->vkGetLatencyTimingsNV( + static_cast( m_device ), static_cast( m_swapchainKHR ), reinterpret_cast( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); + getDispatcher()->vkGetLatencyTimingsNV( + static_cast( m_device ), static_cast( m_swapchainKHR ), reinterpret_cast( &latencyMarkerInfo ) ); + + return timings; + } + + // wrapper function for command vkQueueNotifyOutOfBandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueNotifyOutOfBandNV.html + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueNotifyOutOfBandNV && "Function requires " ); + + getDispatcher()->vkQueueNotifyOutOfBandNV( static_cast( m_queue ), reinterpret_cast( &queueTypeInfo ) ); + } + + //=== VK_KHR_cooperative_matrix === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixPropertiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + // wrapper function for command vkCmdSetAttachmentFeedbackLoopEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAttachmentFeedbackLoopEnableEXT.html + VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAttachmentFeedbackLoopEnableEXT && + "Function requires " ); + + getDispatcher()->vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast( m_commandBuffer ), + static_cast( aspectMask ) ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetScreenBufferPropertiesQNX && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetScreenBufferPropertiesQNX( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return properties; + } + + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetScreenBufferPropertiesQNX && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetScreenBufferPropertiesQNX( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return structureChain; + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + + // wrapper function for command vkCmdSetLineStippleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleKHR.html + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleKHR && + "Function requires or or " ); + + getDispatcher()->vkCmdSetLineStippleKHR( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + //=== VK_KHR_calibrated_timestamps === + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getCalibrateableTimeDomainsKHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && + "Function requires or " ); + + std::vector timeDomains; + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( static_cast( m_physicalDevice ), &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + static_cast( m_physicalDevice ), &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return timeDomains; + } + + // wrapper function for command vkGetCalibratedTimestampsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, uint64_t> Device::getCalibratedTimestampsKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsKHR && + "Function requires or " ); + + std::pair, uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetCalibratedTimestampsKHR( static_cast( m_device ), + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + + return data_; + } + + // wrapper function for command vkGetCalibratedTimestampsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsKHR && + "Function requires or " ); + + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetCalibratedTimestampsKHR( + static_cast( m_device ), 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); + + return data_; + } + + //=== VK_KHR_maintenance6 === + + // wrapper function for command vkCmdBindDescriptorSets2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2KHR.html + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdBindDescriptorSets2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &bindDescriptorSetsInfo ) ); + } + + // wrapper function for command vkCmdPushConstants2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants2KHR && "Function requires or " ); + + getDispatcher()->vkCmdPushConstants2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &pushConstantsInfo ) ); + } + + // wrapper function for command vkCmdPushDescriptorSet2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2KHR.html + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSet2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &pushDescriptorSetInfo ) ); + } + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2KHR.html + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2KHR( + const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplate2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplate2KHR( + static_cast( m_commandBuffer ), reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + } + + // wrapper function for command vkCmdSetDescriptorBufferOffsets2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsets2EXT.html + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsets2EXT( + const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDescriptorBufferOffsets2EXT && + "Function requires " ); + + getDispatcher()->vkCmdSetDescriptorBufferOffsets2EXT( static_cast( m_commandBuffer ), + reinterpret_cast( &setDescriptorBufferOffsetsInfo ) ); + } + + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplers2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplers2EXT.html + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplers2EXT && + "Function requires " ); + + getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + static_cast( m_commandBuffer ), + reinterpret_cast( &bindDescriptorBufferEmbeddedSamplersInfo ) ); + } + + //=== VK_QCOM_tile_memory_heap === + + // wrapper function for command vkCmdBindTileMemoryQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTileMemoryQCOM.html + VULKAN_HPP_INLINE void + CommandBuffer::bindTileMemoryQCOM( Optional tileMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindTileMemoryQCOM && "Function requires " ); + + getDispatcher()->vkCmdBindTileMemoryQCOM( + static_cast( m_commandBuffer ), + reinterpret_cast( static_cast( tileMemoryBindInfo ) ) ); + } + + //=== VK_NV_external_compute_queue === + + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createExternalComputeQueueNV( VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV externalQueue; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateExternalComputeQueueNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &externalQueue ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createExternalComputeQueueNV" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ExternalComputeQueueNV( + *this, *reinterpret_cast( &externalQueue ), allocator ); + } + + // wrapper function for command vkGetExternalComputeQueueDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExternalComputeQueueDataNV.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + ExternalComputeQueueNV::getData() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetExternalComputeQueueDataNV && + "Function requires " ); + + std::pair data_; + VULKAN_HPP_NAMESPACE::ExternalComputeQueueDataParamsNV & arams = data_.first; + DataType & data = data_.second; + getDispatcher()->vkGetExternalComputeQueueDataNV( + static_cast( m_externalComputeQueueNV ), reinterpret_cast( &arams ), &data ); + + return data_; + } + + //=== VK_NV_cluster_acceleration_structure === + + // wrapper function for command vkGetClusterAccelerationStructureBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetClusterAccelerationStructureBuildSizesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR Device::getClusterAccelerationStructureBuildSizesNV( + const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetClusterAccelerationStructureBuildSizesNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + getDispatcher()->vkGetClusterAccelerationStructureBuildSizesNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } + + // wrapper function for command vkCmdBuildClusterAccelerationStructureIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildClusterAccelerationStructureIndirectNV.html + VULKAN_HPP_INLINE void CommandBuffer::buildClusterAccelerationStructureIndirectNV( + const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV & commandInfos ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildClusterAccelerationStructureIndirectNV && + "Function requires " ); + + getDispatcher()->vkCmdBuildClusterAccelerationStructureIndirectNV( + static_cast( m_commandBuffer ), reinterpret_cast( &commandInfos ) ); + } + + //=== VK_NV_partitioned_acceleration_structure === + + // wrapper function for command vkGetPartitionedAccelerationStructuresBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPartitionedAccelerationStructuresBuildSizesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getPartitionedAccelerationStructuresBuildSizesNV( const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV & info ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPartitionedAccelerationStructuresBuildSizesNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + getDispatcher()->vkGetPartitionedAccelerationStructuresBuildSizesNV( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } + + // wrapper function for command vkCmdBuildPartitionedAccelerationStructuresNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildPartitionedAccelerationStructuresNV.html + VULKAN_HPP_INLINE void CommandBuffer::buildPartitionedAccelerationStructuresNV( + const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV & buildInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildPartitionedAccelerationStructuresNV && + "Function requires " ); + + getDispatcher()->vkCmdBuildPartitionedAccelerationStructuresNV( static_cast( m_commandBuffer ), + reinterpret_cast( &buildInfo ) ); + } + + //=== VK_EXT_device_generated_commands === + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsEXT( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getGeneratedCommandsMemoryRequirementsEXT( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + // wrapper function for command vkCmdPreprocessGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsEXT.html + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsEXT && + "Function requires " ); + + getDispatcher()->vkCmdPreprocessGeneratedCommandsEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &generatedCommandsInfo ), + static_cast( stateCommandBuffer ) ); + } + + // wrapper function for command vkCmdExecuteGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsEXT.html + VULKAN_HPP_INLINE void + CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsEXT && + "Function requires " ); + + getDispatcher()->vkCmdExecuteGeneratedCommandsEXT( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); + } + + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT>::Type + Device::createIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateIndirectCommandsLayoutEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectCommandsLayoutEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT( + *this, *reinterpret_cast( &indirectCommandsLayout ), allocator ); + } + + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateIndirectExecutionSetEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectExecutionSet ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectExecutionSetEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT( + *this, *reinterpret_cast( &indirectExecutionSet ), allocator ); + } + + // wrapper function for command vkUpdateIndirectExecutionSetPipelineEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetPipelineEXT.html + VULKAN_HPP_INLINE void IndirectExecutionSetEXT::updatePipeline( + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateIndirectExecutionSetPipelineEXT && + "Function requires " ); + + getDispatcher()->vkUpdateIndirectExecutionSetPipelineEXT( static_cast( m_device ), + static_cast( m_indirectExecutionSetEXT ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); + } + + // wrapper function for command vkUpdateIndirectExecutionSetShaderEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetShaderEXT.html + VULKAN_HPP_INLINE void IndirectExecutionSetEXT::updateShader( + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateIndirectExecutionSetShaderEXT && + "Function requires " ); + + getDispatcher()->vkUpdateIndirectExecutionSetShaderEXT( static_cast( m_device ), + static_cast( m_indirectExecutionSetEXT ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); + } + + //=== VK_NV_cooperative_matrix2 === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + + // wrapper function for command vkGetMemoryMetalHandleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandleEXT.html + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * + Device::getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT & getMetalHandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryMetalHandleEXT && "Function requires " ); + + void * handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryMetalHandleEXT( + static_cast( m_device ), reinterpret_cast( &getMetalHandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryMetalHandleEXT" ); + + return handle; + } + + // wrapper function for command vkGetMemoryMetalHandlePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandlePropertiesEXT.html + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT + Device::getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HandleType const & handle ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryMetalHandlePropertiesEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT memoryMetalHandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryMetalHandlePropertiesEXT( static_cast( m_device ), + static_cast( handleType ), + reinterpret_cast( &handle ), + reinterpret_cast( &memoryMetalHandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryMetalHandlePropertiesEXT" ); + + return memoryMetalHandleProperties; + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map_offset === + + // wrapper function for command vkCmdEndRendering2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering2EXT.html + VULKAN_HPP_INLINE void + CommandBuffer::endRendering2EXT( Optional renderingEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRendering2EXT && "Function requires " ); + + getDispatcher()->vkCmdEndRendering2EXT( + static_cast( m_commandBuffer ), + reinterpret_cast( static_cast( renderingEndInfo ) ) ); + } + + //==================== + //=== RAII Helpers === + //==================== + + template + std::vector filterCppTypes( std::vector const & raiiTypes ) + { + std::vector cppTypes( raiiTypes.size() ); + std::transform( raiiTypes.begin(), raiiTypes.end(), cppTypes.begin(), []( RAIIType const & d ) { return *d; } ); + return cppTypes; + } + + template + std::vector filterCppTypes( std::vector const & raiiTypes, UnaryPredicate p ) + { + std::vector cppTypes; + for ( auto const & t : raiiTypes ) + { + if ( p( t ) ) + { + cppTypes.push_back( *t ); + } + } + return cppTypes; + } + + } // namespace VULKAN_HPP_RAII_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE +#endif +#endif \ No newline at end of file diff --git a/include/vulkan/vulkan_screen.h b/include/vulkan/vulkan_screen.h new file mode 100644 index 00000000..6c8bfd25 --- /dev/null +++ b/include/vulkan/vulkan_screen.h @@ -0,0 +1,108 @@ +#ifndef VULKAN_SCREEN_H_ +#define VULKAN_SCREEN_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_QNX_screen_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_QNX_screen_surface 1 +#define VK_QNX_SCREEN_SURFACE_SPEC_VERSION 1 +#define VK_QNX_SCREEN_SURFACE_EXTENSION_NAME "VK_QNX_screen_surface" +typedef VkFlags VkScreenSurfaceCreateFlagsQNX; +typedef struct VkScreenSurfaceCreateInfoQNX { + VkStructureType sType; + const void* pNext; + VkScreenSurfaceCreateFlagsQNX flags; + struct _screen_context* context; + struct _screen_window* window; +} VkScreenSurfaceCreateInfoQNX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateScreenSurfaceQNX)(VkInstance instance, const VkScreenSurfaceCreateInfoQNX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window* window); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateScreenSurfaceQNX( + VkInstance instance, + const VkScreenSurfaceCreateInfoQNX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceScreenPresentationSupportQNX( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window* window); +#endif + + +// VK_QNX_external_memory_screen_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_QNX_external_memory_screen_buffer 1 +#define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION 1 +#define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME "VK_QNX_external_memory_screen_buffer" +typedef struct VkScreenBufferPropertiesQNX { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkScreenBufferPropertiesQNX; + +typedef struct VkScreenBufferFormatPropertiesQNX { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + uint64_t screenUsage; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkScreenBufferFormatPropertiesQNX; + +typedef struct VkImportScreenBufferInfoQNX { + VkStructureType sType; + const void* pNext; + struct _screen_buffer* buffer; +} VkImportScreenBufferInfoQNX; + +typedef struct VkExternalFormatQNX { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatQNX; + +typedef struct VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX { + VkStructureType sType; + void* pNext; + VkBool32 screenBufferImport; +} VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + +typedef VkResult (VKAPI_PTR *PFN_vkGetScreenBufferPropertiesQNX)(VkDevice device, const struct _screen_buffer* buffer, VkScreenBufferPropertiesQNX* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetScreenBufferPropertiesQNX( + VkDevice device, + const struct _screen_buffer* buffer, + VkScreenBufferPropertiesQNX* pProperties); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_shared.hpp b/include/vulkan/vulkan_shared.hpp new file mode 100644 index 00000000..6cdd136d --- /dev/null +++ b/include/vulkan/vulkan_shared.hpp @@ -0,0 +1,1175 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_SHARED_HPP +#define VULKAN_SHARED_HPP + +#include + +#if !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) +# include // std::atomic_size_t +#endif + +namespace VULKAN_HPP_NAMESPACE +{ +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + + template + class SharedHandleTraits; + + class NoDestructor + { + }; + + template + struct HasDestructorType : std::false_type + { + }; + + template + struct HasDestructorType::DestructorType() )> : std::true_type + { + }; + + template + struct GetDestructorType + { + using type = NoDestructor; + }; + + template + struct GetDestructorType::value>::type> + { + using type = typename SharedHandleTraits::DestructorType; + }; + + template + using DestructorTypeOf = typename GetDestructorType::type; + + template + struct HasDestructor : std::integral_constant, NoDestructor>::value> + { + }; + + template + struct HasPoolType : std::false_type + { + }; + + template + struct HasPoolType::deleter::PoolTypeExport() )> : std::true_type + { + }; + + template + struct GetPoolType + { + using type = NoDestructor; + }; + + template + struct GetPoolType::value>::type> + { + using type = typename SharedHandleTraits::deleter::PoolTypeExport; + }; + + //===================================================================================================================== + + template + class SharedHandle; + + template + struct SharedHeader + { + SharedHeader( SharedHandle parent, Deleter deleter = Deleter() ) VULKAN_HPP_NOEXCEPT + : parent( std::move( parent ) ) + , deleter( std::move( deleter ) ) + { + } + + SharedHandle parent; + Deleter deleter; + }; + + template + struct SharedHeader + { + SharedHeader( Deleter deleter = Deleter() ) VULKAN_HPP_NOEXCEPT : deleter( std::move( deleter ) ) {} + + Deleter deleter; + }; + + //===================================================================================================================== + + template + class ReferenceCounter + { + public: + template + ReferenceCounter( Args &&... control_args ) : m_header( std::forward( control_args )... ) + { + } + + ReferenceCounter( const ReferenceCounter & ) = delete; + ReferenceCounter & operator=( const ReferenceCounter & ) = delete; + + public: + size_t addRef() VULKAN_HPP_NOEXCEPT + { + // Relaxed memory order is sufficient since this does not impose any ordering on other operations + return m_ref_cnt.fetch_add( 1, std::memory_order_relaxed ); + } + + size_t release() VULKAN_HPP_NOEXCEPT + { + // A release memory order to ensure that all releases are ordered + return m_ref_cnt.fetch_sub( 1, std::memory_order_release ); + } + + public: + std::atomic_size_t m_ref_cnt{ 1 }; + HeaderType m_header{}; + }; + + //===================================================================================================================== + + template > + class SharedHandleBase + { + public: + SharedHandleBase() = default; + + template + SharedHandleBase( HandleType handle, Args &&... control_args ) + : m_control( new ReferenceCounter( std::forward( control_args )... ) ), m_handle( handle ) + { + } + + SharedHandleBase( const SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT + { + o.addRef(); + m_handle = o.m_handle; + m_control = o.m_control; + } + + SharedHandleBase( SharedHandleBase && o ) VULKAN_HPP_NOEXCEPT + : m_control( o.m_control ) + , m_handle( o.m_handle ) + { + o.m_handle = nullptr; + o.m_control = nullptr; + } + + SharedHandleBase & operator=( const SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT + { + SharedHandleBase( o ).swap( *this ); + return *this; + } + + SharedHandleBase & operator=( SharedHandleBase && o ) VULKAN_HPP_NOEXCEPT + { + SharedHandleBase( std::move( o ) ).swap( *this ); + return *this; + } + + ~SharedHandleBase() + { + // only this function owns the last reference to the control block + // the same principle is used in the default deleter of std::shared_ptr + if ( m_control && ( m_control->release() == 1 ) ) + { + // noop in x86, but does thread synchronization in ARM + // it is required to ensure that last thread is getting to destroy the control block + // by ordering all atomic operations before this fence + std::atomic_thread_fence( std::memory_order_acquire ); + ForwardType::internalDestroy( getHeader(), m_handle ); + delete m_control; + } + } + + public: + HandleType get() const VULKAN_HPP_NOEXCEPT + { + return m_handle; + } + + HandleType operator*() const VULKAN_HPP_NOEXCEPT + { + return m_handle; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return bool( m_handle ); + } + +# if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST ) + operator HandleType() const VULKAN_HPP_NOEXCEPT + { + return m_handle; + } +# endif + + const HandleType * operator->() const VULKAN_HPP_NOEXCEPT + { + return &m_handle; + } + + HandleType * operator->() VULKAN_HPP_NOEXCEPT + { + return &m_handle; + } + + void reset() VULKAN_HPP_NOEXCEPT + { + SharedHandleBase().swap( *this ); + } + + void swap( SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_handle, o.m_handle ); + std::swap( m_control, o.m_control ); + } + + template + typename std::enable_if::value, const SharedHandle> &>::type getDestructorType() const VULKAN_HPP_NOEXCEPT + { + return getHeader().parent; + } + + protected: + template + static typename std::enable_if::value, void>::type internalDestroy( const HeaderType & control, HandleType handle ) VULKAN_HPP_NOEXCEPT + { + control.deleter.destroy( handle ); + } + + template + static typename std::enable_if::value, void>::type internalDestroy( const HeaderType & control, HandleType handle ) VULKAN_HPP_NOEXCEPT + { + control.deleter.destroy( control.parent.get(), handle ); + } + + const HeaderType & getHeader() const VULKAN_HPP_NOEXCEPT + { + return m_control->m_header; + } + + private: + void addRef() const VULKAN_HPP_NOEXCEPT + { + if ( m_control ) + m_control->addRef(); + } + + protected: + ReferenceCounter * m_control = nullptr; + HandleType m_handle{}; + }; + + template + class SharedHandle : public SharedHandleBase, typename SharedHandleTraits::deleter>> + { + private: + using BaseType = SharedHandleBase, typename SharedHandleTraits::deleter>>; + using DeleterType = typename SharedHandleTraits::deleter; + friend BaseType; + + public: + SharedHandle() = default; + + template ::value && !HasPoolType::value>::type> + explicit SharedHandle( HandleType handle, SharedHandle> parent, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( parent ), std::move( deleter ) ) + { + } + + template ::value && HasPoolType::value>::type> + explicit SharedHandle( HandleType handle, + SharedHandle> parent, + SharedHandle::type> pool, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( parent ), DeleterType{ std::move( pool ), dispatch } ) + { + } + + template ::value>::type> + explicit SharedHandle( HandleType handle, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT : BaseType( handle, std::move( deleter ) ) + { + } + + protected: + using BaseType::internalDestroy; + }; + + namespace detail + { + +// Silence the function cast warnings. +# if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER ) +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wcast-function-type" +# elif defined( __clang__ ) && ( __clang_major__ >= 13 ) && !defined( __INTEL_COMPILER ) +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wcast-function-type" +# endif + + template + class ObjectDestroyShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + template + using DestroyFunctionPointerType = + typename std::conditional::value, + void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const, + void ( HandleType::* )( const AllocationCallbacks *, const Dispatcher & ) const>::type; + + using SelectorType = typename std::conditional::value, DestructorType, HandleType>::type; + + template + ObjectDestroyShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &SelectorType::destroy ) ) ) + , m_dispatch( &dispatch ) + , m_allocationCallbacks( allocationCallbacks ) + { + } + + public: + template + typename std::enable_if::value, void>::type destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); + } + + template + typename std::enable_if::value, void>::type destroy( HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( handle.*m_destroy )( m_allocationCallbacks, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + Optional m_allocationCallbacks = nullptr; + }; + + template + class ObjectFreeShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + template + using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const; + + template + ObjectFreeShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) + , m_dispatch( &dispatch ) + , m_allocationCallbacks( allocationCallbacks ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + Optional m_allocationCallbacks = nullptr; + }; + + template + class ObjectReleaseShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + template + using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const Dispatcher & ) const; + + template + ObjectReleaseShared( const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::release ) ) ) + , m_dispatch( &dispatch ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + }; + + template + class PoolFreeShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + using PoolTypeExport = PoolType; + + template + using ReturnType = decltype( std::declval().free( PoolType(), 0u, nullptr, Dispatcher() ) ); + + template + using DestroyFunctionPointerType = ReturnType ( DestructorType::* )( PoolType, uint32_t, const HandleType *, const Dispatcher & ) const; + + PoolFreeShared() = default; + + template + PoolFreeShared( SharedHandle pool, const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) + , m_dispatch( &dispatch ) + , m_pool( std::move( pool ) ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch && m_pool ); + ( parent.*m_destroy )( m_pool.get(), 1u, &handle, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + SharedHandle m_pool{}; + }; + +# if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER ) +# pragma GCC diagnostic pop +# elif defined( __clang__ ) && ( __clang_major__ >= 13 ) && !defined( __INTEL_COMPILER ) +# pragma clang diagnostic pop +# endif + + } // namespace detail + + //====================== + //=== SHARED HANDLEs === + //====================== + + //=== VK_VERSION_1_0 === + template <> + class SharedHandleTraits + { + public: + using DestructorType = NoDestructor; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedInstance = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = NoDestructor; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDevice = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectFreeShared; + }; + + using SharedDeviceMemory = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedFence = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedSemaphore = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedEvent = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedQueryPool = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedBuffer = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedBufferView = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedImage = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedImageView = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedShaderModule = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPipelineCache = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPipeline = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPipelineLayout = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedSampler = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDescriptorPool = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::PoolFreeShared; + }; + + using SharedDescriptorSet = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDescriptorSetLayout = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedFramebuffer = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedRenderPass = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedCommandPool = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::PoolFreeShared; + }; + + using SharedCommandBuffer = SharedHandle; + + //=== VK_VERSION_1_1 === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedSamplerYcbcrConversion = SharedHandle; + using SharedSamplerYcbcrConversionKHR = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDescriptorUpdateTemplate = SharedHandle; + using SharedDescriptorUpdateTemplateKHR = SharedHandle; + + //=== VK_VERSION_1_3 === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPrivateDataSlot = SharedHandle; + using SharedPrivateDataSlotEXT = SharedHandle; + + //=== VK_KHR_surface === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Instance; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedSurfaceKHR = SharedHandle; + + //=== VK_KHR_swapchain === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedSwapchainKHR = SharedHandle; + + //=== VK_KHR_display === + template <> + class SharedHandleTraits + { + public: + using DestructorType = PhysicalDevice; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDisplayKHR = SharedHandle; + + //=== VK_EXT_debug_report === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Instance; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDebugReportCallbackEXT = SharedHandle; + + //=== VK_KHR_video_queue === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedVideoSessionKHR = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedVideoSessionParametersKHR = SharedHandle; + + //=== VK_NVX_binary_import === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedCuModuleNVX = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedCuFunctionNVX = SharedHandle; + + //=== VK_EXT_debug_utils === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Instance; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDebugUtilsMessengerEXT = SharedHandle; + + //=== VK_KHR_acceleration_structure === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedAccelerationStructureKHR = SharedHandle; + + //=== VK_EXT_validation_cache === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedValidationCacheEXT = SharedHandle; + + //=== VK_NV_ray_tracing === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedAccelerationStructureNV = SharedHandle; + + //=== VK_INTEL_performance_query === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPerformanceConfigurationINTEL = SharedHandle; + + //=== VK_KHR_deferred_host_operations === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDeferredOperationKHR = SharedHandle; + + //=== VK_NV_device_generated_commands === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedIndirectCommandsLayoutNV = SharedHandle; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedCudaModuleNV = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedCudaFunctionNV = SharedHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedBufferCollectionFUCHSIA = SharedHandle; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedMicromapEXT = SharedHandle; + + //=== VK_NV_optical_flow === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedOpticalFlowSessionNV = SharedHandle; + + //=== VK_EXT_shader_object === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedShaderEXT = SharedHandle; + + //=== VK_KHR_pipeline_binary === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPipelineBinaryKHR = SharedHandle; + + //=== VK_NV_external_compute_queue === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedExternalComputeQueueNV = SharedHandle; + + //=== VK_EXT_device_generated_commands === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedIndirectCommandsLayoutEXT = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedIndirectExecutionSetEXT = SharedHandle; + + enum class SwapchainOwns + { + no, + yes, + }; + + struct ImageHeader : SharedHeader, typename SharedHandleTraits::deleter> + { + ImageHeader( + SharedHandle> parent, + typename SharedHandleTraits::deleter deleter = typename SharedHandleTraits::deleter(), + SwapchainOwns swapchainOwned = SwapchainOwns::no ) VULKAN_HPP_NOEXCEPT + : SharedHeader, typename SharedHandleTraits::deleter>( std::move( parent ), + std::move( deleter ) ) + , swapchainOwned( swapchainOwned ) + { + } + + SwapchainOwns swapchainOwned = SwapchainOwns::no; + }; + + template <> + class SharedHandle : public SharedHandleBase + { + using BaseType = SharedHandleBase; + using DeleterType = typename SharedHandleTraits::deleter; + friend BaseType; + + public: + SharedHandle() = default; + + explicit SharedHandle( VULKAN_HPP_NAMESPACE::Image handle, + SharedHandle> parent, + SwapchainOwns swapchain_owned = SwapchainOwns::no, + DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( parent ), std::move( deleter ), swapchain_owned ) + { + } + + protected: + static void internalDestroy( const ImageHeader & control, VULKAN_HPP_NAMESPACE::Image handle ) VULKAN_HPP_NOEXCEPT + { + if ( control.swapchainOwned == SwapchainOwns::no ) + { + control.deleter.destroy( control.parent.get(), handle ); + } + } + }; + + struct SwapchainHeader + { + SwapchainHeader( SharedHandle surface, + SharedHandle> parent, + typename SharedHandleTraits::deleter deleter = + typename SharedHandleTraits::deleter() ) VULKAN_HPP_NOEXCEPT + : surface( std::move( surface ) ) + , parent( std::move( parent ) ) + , deleter( std::move( deleter ) ) + { + } + + SharedHandle surface{}; + SharedHandle> parent{}; + typename SharedHandleTraits::deleter deleter{}; + }; + + template <> + class SharedHandle : public SharedHandleBase + { + using BaseType = SharedHandleBase; + using DeleterType = typename SharedHandleTraits::deleter; + friend BaseType; + + public: + SharedHandle() = default; + + explicit SharedHandle( VULKAN_HPP_NAMESPACE::SwapchainKHR handle, + SharedHandle> parent, + SharedHandle surface, + DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( surface ), std::move( parent ), std::move( deleter ) ) + { + } + + public: + const SharedHandle & getSurface() const VULKAN_HPP_NOEXCEPT + { + return getHeader().surface; + } + + protected: + using BaseType::internalDestroy; + }; + + template + class SharedHandleBaseNoDestroy : public SharedHandleBase + { + public: + using SharedHandleBase::SharedHandleBase; + + const DestructorType & getDestructorType() const VULKAN_HPP_NOEXCEPT + { + return SharedHandleBase::getHeader(); + } + + protected: + static void internalDestroy( const DestructorType &, HandleType ) VULKAN_HPP_NOEXCEPT {} + }; + + //=== VK_VERSION_1_0 === + + template <> + class SharedHandle : public SharedHandleBaseNoDestroy + { + friend SharedHandleBase; + + public: + SharedHandle() = default; + + explicit SharedHandle( PhysicalDevice handle, SharedInstance parent ) noexcept + : SharedHandleBaseNoDestroy( handle, std::move( parent ) ) + { + } + }; + + using SharedPhysicalDevice = SharedHandle; + + template <> + class SharedHandle : public SharedHandleBaseNoDestroy + { + friend SharedHandleBase; + + public: + SharedHandle() = default; + + explicit SharedHandle( Queue handle, SharedDevice parent ) noexcept : SharedHandleBaseNoDestroy( handle, std::move( parent ) ) {} + }; + + using SharedQueue = SharedHandle; + + //=== VK_KHR_display === + + template <> + class SharedHandle : public SharedHandleBaseNoDestroy + { + friend SharedHandleBase; + + public: + SharedHandle() = default; + + explicit SharedHandle( DisplayModeKHR handle, SharedDisplayKHR parent ) noexcept + : SharedHandleBaseNoDestroy( handle, std::move( parent ) ) + { + } + }; + + using SharedDisplayModeKHR = SharedHandle; +#endif // !VULKAN_HPP_NO_SMART_HANDLE +} // namespace VULKAN_HPP_NAMESPACE +#endif // VULKAN_SHARED_HPP diff --git a/include/vulkan/vulkan_static_assertions.hpp b/include/vulkan/vulkan_static_assertions.hpp new file mode 100644 index 00000000..ebfc9bfc --- /dev/null +++ b/include/vulkan/vulkan_static_assertions.hpp @@ -0,0 +1,8673 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_STATIC_ASSERTIONS_HPP +#define VULKAN_STATIC_ASSERTIONS_HPP + +#include + +//========================= +//=== static_assertions === +//========================= + +//=== VK_VERSION_1_0 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Extent2D is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Extent3D is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Offset2D is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Offset3D is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Rect2D is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BaseInStructure is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BaseOutStructure is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferMemoryBarrier is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchIndirectCommand is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawIndexedIndirectCommand is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawIndirectCommand is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageMemoryBarrier is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MemoryBarrier is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne ) == sizeof( VkPipelineCacheHeaderVersionOne ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCacheHeaderVersionOne is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AllocationCallbacks is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ApplicationInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FormatProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties ) == sizeof( VkImageFormatProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Instance is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Instance is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "InstanceCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MemoryHeap is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MemoryType is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PhysicalDevice is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevice is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLimits is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSparseProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Device is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Device is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExtensionProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LayerProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Queue is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Queue is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MappedMemoryRange is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "DeviceMemory is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DeviceMemory is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryRequirements is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindSparseInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresource is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseBufferMemoryBindInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageFormatProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageMemoryBind is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageMemoryBindInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageMemoryRequirements is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageOpaqueMemoryBindInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseMemoryBind is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Fence is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Fence is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FenceCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Semaphore is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Semaphore is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Event is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Event is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "EventCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "QueryPool is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "QueryPool is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPoolCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Buffer is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Buffer is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "BufferView is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferView is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferViewCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Image is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Image is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceLayout is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ComponentMapping is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresourceRange is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "ImageView is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageView is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "ShaderModule is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderModule is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderModuleCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PipelineCache is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PipelineCache is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCacheCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ComputePipelineCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GraphicsPipelineCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Pipeline is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Pipeline is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineColorBlendAttachmentState is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineColorBlendStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineDepthStencilStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineDynamicStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineInputAssemblyStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineMultisampleStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineShaderStageCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineTessellationStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineVertexInputStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SpecializationInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SpecializationMapEntry is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "StencilOpState is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputAttributeDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputBindingDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Viewport is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PipelineLayout is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineLayout is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineLayoutCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushConstantRange is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Sampler is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Sampler is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyDescriptorSet is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorBufferInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorImageInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "DescriptorPool is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPool is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPoolCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPoolSize is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "DescriptorSet is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorSet is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "DescriptorSetLayout is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayout is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutBinding is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSet is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription ) == sizeof( VkAttachmentDescription ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentReference is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "Framebuffer is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Framebuffer is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FramebufferCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "RenderPass is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "RenderPass is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDependency is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CommandPool is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandPool is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandPoolCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CommandBuffer is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandBuffer is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferBeginInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferImageCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClearAttachment is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) == sizeof( VkClearColorValue ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClearColorValue is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClearDepthStencilValue is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ClearRect is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) == sizeof( VkClearValue ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ClearValue is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageBlit is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageResolve is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresourceLayers is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassBeginInfo is not nothrow_move_constructible!" ); + +//=== VK_VERSION_1_1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubgroupProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindBufferMemoryInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindImageMemoryInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevice16BitStorageFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryDedicatedRequirements is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryDedicatedAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryAllocateFlagsInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupRenderPassBeginInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupCommandBufferBeginInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupSubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupBindSparseInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindBufferMemoryDeviceGroupInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindImageMemoryDeviceGroupInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGroupProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupDeviceCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferMemoryRequirementsInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageMemoryRequirementsInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSparseMemoryRequirementsInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryRequirements2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageMemoryRequirements2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFeatures2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FormatProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageFormatInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageFormatProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSparseImageFormatInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePointClippingProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassInputAttachmentAspectCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "InputAttachmentAspectReference is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewUsageCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) == + sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineTessellationDomainOriginStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassMultiviewCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVariablePointersFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProtectedMemoryFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProtectedMemoryProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ProtectedSubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindImagePlaneMemoryInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImagePlaneMemoryRequirementsInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) == + sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSamplerYcbcrConversionFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) == + sizeof( VkSamplerYcbcrConversionImageFormatProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionImageFormatProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "SamplerYcbcrConversion is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversion is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "DescriptorUpdateTemplate is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorUpdateTemplate is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorUpdateTemplateEntry is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorUpdateTemplateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalImageFormatInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalImageFormatProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalBufferInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalBufferProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceIDProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryImageCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryBufferCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMemoryAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalFenceInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalFenceProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportFenceCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportSemaphoreCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalSemaphoreInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalSemaphoreProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance3Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutSupport is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderDrawParametersFeatures is not nothrow_move_constructible!" ); + +//=== VK_VERSION_1_2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan11Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan11Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan12Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan12Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatListCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreateInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentDescription2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentReference2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDescription2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDependency2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassBeginInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassEndInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevice8BitStorageFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ConformanceVersion is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDriverProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicInt64Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderFloat16Int8Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFloatControlsProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutBindingFlagsCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorIndexingFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorIndexingProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) == + sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetVariableDescriptorCountAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) == + sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetVariableDescriptorCountLayoutSupport is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDescriptionDepthStencilResolve is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) == + sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthStencilResolveProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceScalarBlockLayoutFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageStencilUsageCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerReductionModeCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) == + sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSamplerFilterMinmaxProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkanMemoryModelFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImagelessFramebufferFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FramebufferAttachmentsCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FramebufferAttachmentImageInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassAttachmentBeginInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) == + sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceUniformBufferStandardLayoutFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == + sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSubgroupExtendedTypesFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == + sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSeparateDepthStencilLayoutsFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentReferenceStencilLayout is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentDescriptionStencilLayout is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostQueryResetFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTimelineSemaphoreFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTimelineSemaphoreProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreTypeCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TimelineSemaphoreSubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreWaitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreSignalInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceBufferDeviceAddressFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferDeviceAddressInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferOpaqueCaptureAddressCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryOpaqueCaptureAddressAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceMemoryOpaqueCaptureAddressInfo is not nothrow_move_constructible!" ); + +//=== VK_VERSION_1_3 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features ) == sizeof( VkPhysicalDeviceVulkan13Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan13Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties ) == sizeof( VkPhysicalDeviceVulkan13Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan13Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo ) == sizeof( VkPipelineCreationFeedbackCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreationFeedbackCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback ) == sizeof( VkPipelineCreationFeedback ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreationFeedback is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures ) == + sizeof( VkPhysicalDeviceShaderTerminateInvocationFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTerminateInvocationFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties ) == sizeof( VkPhysicalDeviceToolProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceToolProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures ) == + sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderDemoteToHelperInvocationFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures ) == sizeof( VkPhysicalDevicePrivateDataFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePrivateDataFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo ) == sizeof( VkDevicePrivateDataCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DevicePrivateDataCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo ) == sizeof( VkPrivateDataSlotCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PrivateDataSlotCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlot ) == sizeof( VkPrivateDataSlot ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PrivateDataSlot is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PrivateDataSlot is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures ) == + sizeof( VkPhysicalDevicePipelineCreationCacheControlFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineCreationCacheControlFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier2 ) == sizeof( VkMemoryBarrier2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryBarrier2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 ) == sizeof( VkBufferMemoryBarrier2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferMemoryBarrier2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 ) == sizeof( VkImageMemoryBarrier2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageMemoryBarrier2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DependencyInfo ) == sizeof( VkDependencyInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DependencyInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo2 ) == sizeof( VkSubmitInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SubmitInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo ) == sizeof( VkSemaphoreSubmitInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreSubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo ) == sizeof( VkCommandBufferSubmitInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferSubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features ) == sizeof( VkPhysicalDeviceSynchronization2Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSynchronization2Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ) == + sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures ) == sizeof( VkPhysicalDeviceImageRobustnessFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageRobustnessFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 ) == sizeof( VkCopyBufferInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyBufferInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageInfo2 ) == sizeof( VkCopyImageInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 ) == sizeof( VkCopyBufferToImageInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyBufferToImageInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 ) == sizeof( VkCopyImageToBufferInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToBufferInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageInfo2 ) == sizeof( VkBlitImageInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BlitImageInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 ) == sizeof( VkResolveImageInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ResolveImageInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy2 ) == sizeof( VkBufferCopy2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferCopy2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy2 ) == sizeof( VkImageCopy2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageCopy2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit2 ) == sizeof( VkImageBlit2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageBlit2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy2 ) == sizeof( VkBufferImageCopy2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferImageCopy2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve2 ) == sizeof( VkImageResolve2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageResolve2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubgroupSizeControlFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties ) == + sizeof( VkPhysicalDeviceSubgroupSizeControlProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubgroupSizeControlProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo ) == + sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineShaderStageRequiredSubgroupSizeCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceInlineUniformBlockFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties ) == sizeof( VkPhysicalDeviceInlineUniformBlockProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceInlineUniformBlockProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock ) == sizeof( VkWriteDescriptorSetInlineUniformBlock ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSetInlineUniformBlock is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPoolInlineUniformBlockCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures ) == + sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTextureCompressionASTCHDRFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInfo ) == sizeof( VkRenderingInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "RenderingInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo ) == sizeof( VkRenderingAttachmentInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAttachmentInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo ) == sizeof( VkPipelineRenderingCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRenderingCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures ) == sizeof( VkPhysicalDeviceDynamicRenderingFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDynamicRenderingFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo ) == sizeof( VkCommandBufferInheritanceRenderingInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceRenderingInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures ) == + sizeof( VkPhysicalDeviceShaderIntegerDotProductFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderIntegerDotProductFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties ) == + sizeof( VkPhysicalDeviceShaderIntegerDotProductProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderIntegerDotProductProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTexelBufferAlignmentProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties3 ) == sizeof( VkFormatProperties3 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FormatProperties3 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features ) == sizeof( VkPhysicalDeviceMaintenance4Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance4Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties ) == sizeof( VkPhysicalDeviceMaintenance4Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance4Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements ) == sizeof( VkDeviceBufferMemoryRequirements ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceBufferMemoryRequirements is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements ) == sizeof( VkDeviceImageMemoryRequirements ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceImageMemoryRequirements is not nothrow_move_constructible!" ); + +//=== VK_VERSION_1_4 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Features ) == sizeof( VkPhysicalDeviceVulkan14Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan14Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Properties ) == sizeof( VkPhysicalDeviceVulkan14Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan14Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfo ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueGlobalPriorityCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeatures ) == sizeof( VkPhysicalDeviceGlobalPriorityQueryFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGlobalPriorityQueryFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityProperties ) == sizeof( VkQueueFamilyGlobalPriorityProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyGlobalPriorityProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupRotateFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSubgroupRotateFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features ) == sizeof( VkPhysicalDeviceShaderFloatControls2Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderFloatControls2Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeatures ) == sizeof( VkPhysicalDeviceShaderExpectAssumeFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderExpectAssumeFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeatures ) == sizeof( VkPhysicalDeviceLineRasterizationFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLineRasterizationFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationProperties ) == sizeof( VkPhysicalDeviceLineRasterizationProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLineRasterizationProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfo ) == sizeof( VkPipelineRasterizationLineStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationLineStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorProperties ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription ) == sizeof( VkVertexInputBindingDivisorDescription ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputBindingDivisorDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfo ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineVertexInputDivisorStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeatures ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8Features ) == sizeof( VkPhysicalDeviceIndexTypeUint8Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceIndexTypeUint8Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfo ) == sizeof( VkMemoryMapInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MemoryMapInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryUnmapInfo ) == sizeof( VkMemoryUnmapInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryUnmapInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Features ) == sizeof( VkPhysicalDeviceMaintenance5Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance5Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Properties ) == sizeof( VkPhysicalDeviceMaintenance5Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance5Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAreaInfo ) == sizeof( VkRenderingAreaInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAreaInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo ) == sizeof( VkDeviceImageSubresourceInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceImageSubresourceInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2 ) == sizeof( VkImageSubresource2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresource2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2 ) == sizeof( VkSubresourceLayout2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceLayout2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfo ) == sizeof( VkPipelineCreateFlags2CreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreateFlags2CreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfo ) == sizeof( VkBufferUsageFlags2CreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferUsageFlags2CreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorProperties ) == sizeof( VkPhysicalDevicePushDescriptorProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePushDescriptorProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeatures ) == + sizeof( VkPhysicalDeviceDynamicRenderingLocalReadFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDynamicRenderingLocalReadFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo ) == sizeof( VkRenderingAttachmentLocationInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAttachmentLocationInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo ) == sizeof( VkRenderingInputAttachmentIndexInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingInputAttachmentIndexInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Features ) == sizeof( VkPhysicalDeviceMaintenance6Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance6Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Properties ) == sizeof( VkPhysicalDeviceMaintenance6Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance6Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindMemoryStatus ) == sizeof( VkBindMemoryStatus ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindMemoryStatus is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo ) == sizeof( VkBindDescriptorSetsInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindDescriptorSetsInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantsInfo ) == sizeof( VkPushConstantsInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushConstantsInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo ) == sizeof( VkPushDescriptorSetInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushDescriptorSetInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo ) == sizeof( VkPushDescriptorSetWithTemplateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushDescriptorSetWithTemplateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeatures ) == + sizeof( VkPhysicalDevicePipelineProtectedAccessFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineProtectedAccessFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeatures ) == sizeof( VkPhysicalDevicePipelineRobustnessFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineRobustnessFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessProperties ) == sizeof( VkPhysicalDevicePipelineRobustnessProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineRobustnessProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfo ) == sizeof( VkPipelineRobustnessCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRobustnessCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeatures ) == sizeof( VkPhysicalDeviceHostImageCopyFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostImageCopyFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyProperties ) == sizeof( VkPhysicalDeviceHostImageCopyProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostImageCopyProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryToImageCopy ) == sizeof( VkMemoryToImageCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryToImageCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageToMemoryCopy ) == sizeof( VkImageToMemoryCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageToMemoryCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo ) == sizeof( VkCopyMemoryToImageInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToImageInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo ) == sizeof( VkCopyImageToMemoryInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToMemoryInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToImageInfo ) == sizeof( VkCopyImageToImageInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToImageInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo ) == sizeof( VkHostImageLayoutTransitionInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostImageLayoutTransitionInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySize ) == sizeof( VkSubresourceHostMemcpySize ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceHostMemcpySize is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQuery ) == sizeof( VkHostImageCopyDevicePerformanceQuery ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostImageCopyDevicePerformanceQuery is not nothrow_move_constructible!" ); + +//=== VK_KHR_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "SurfaceKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SurfaceKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceFormatKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_swapchain === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "SwapchainKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SwapchainKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSwapchainCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindImageMemorySwapchainInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AcquireNextImageInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupPresentCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupPresentInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupSwapchainCreateInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_display === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "DisplayKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DisplayKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "DisplayModeKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeParametersKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlaneCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlanePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplaySurfaceCreateInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_display_swapchain === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPresentInfoKHR is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) +//=== VK_KHR_xlib_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "XlibSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) +//=== VK_KHR_xcb_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "XcbSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) +//=== VK_KHR_wayland_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WaylandSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) +//=== VK_KHR_android_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_KHR_win32_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Win32SurfaceCreateInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_EXT_debug_report === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "DebugReportCallbackEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugReportCallbackEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugReportCallbackCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_AMD_rasterization_order === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) == + sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationStateRasterizationOrderAMD is not nothrow_move_constructible!" ); + +//=== VK_EXT_debug_marker === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugMarkerObjectNameInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugMarkerObjectTagInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugMarkerMarkerInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_queue === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "VideoSessionKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == sizeof( VkVideoSessionParametersKHR ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "VideoSessionParametersKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionParametersKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR ) == sizeof( VkQueueFamilyQueryResultStatusPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyQueryResultStatusPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR ) == sizeof( VkQueueFamilyVideoPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyVideoPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR ) == sizeof( VkVideoProfileInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR ) == sizeof( VkVideoProfileListInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoProfileListInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR ) == sizeof( VkPhysicalDeviceVideoFormatInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoFormatInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR ) == sizeof( VkVideoFormatPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR ) == sizeof( VkVideoPictureResourceInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoPictureResourceInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR ) == sizeof( VkVideoReferenceSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoReferenceSlotInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR ) == sizeof( VkVideoSessionMemoryRequirementsKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionMemoryRequirementsKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR ) == sizeof( VkBindVideoSessionMemoryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindVideoSessionMemoryInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR ) == sizeof( VkVideoSessionCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR ) == sizeof( VkVideoSessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR ) == sizeof( VkVideoSessionParametersUpdateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionParametersUpdateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR ) == sizeof( VkVideoBeginCodingInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoBeginCodingInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEndCodingInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR ) == sizeof( VkVideoCodingControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoCodingControlInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_decode_queue === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR ) == sizeof( VkVideoDecodeCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR ) == sizeof( VkVideoDecodeUsageInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeUsageInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_dedicated_allocation === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DedicatedAllocationImageCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DedicatedAllocationBufferCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DedicatedAllocationMemoryAllocateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_transform_feedback === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTransformFeedbackFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) == + sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTransformFeedbackPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) == + sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationStateStreamCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_NVX_binary_import === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleNVX ) == sizeof( VkCuModuleNVX ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CuModuleNVX is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CuModuleNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CuFunctionNVX is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CuFunctionNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuModuleCreateInfoNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleTexturingModeCreateInfoNVX ) == sizeof( VkCuModuleTexturingModeCreateInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuModuleTexturingModeCreateInfoNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuFunctionCreateInfoNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuLaunchInfoNVX is not nothrow_move_constructible!" ); + +//=== VK_NVX_image_view_handle === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewHandleInfoNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewAddressPropertiesNVX is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_h264 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesKHR ) == sizeof( VkVideoEncodeH264CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeH264QualityLevelPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264QualityLevelPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoKHR ) == sizeof( VkVideoEncodeH264SessionCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeH264SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR ) == sizeof( VkVideoEncodeH264SessionParametersAddInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoKHR ) == sizeof( VkVideoEncodeH264SessionParametersGetInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersGetInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoKHR ) == + sizeof( VkVideoEncodeH264SessionParametersFeedbackInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersFeedbackInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoKHR ) == sizeof( VkVideoEncodeH264PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoKHR ) == sizeof( VkVideoEncodeH264DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR ) == sizeof( VkVideoEncodeH264NaluSliceInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264NaluSliceInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoKHR ) == sizeof( VkVideoEncodeH264ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoKHR ) == sizeof( VkVideoEncodeH264RateControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264RateControlInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoKHR ) == sizeof( VkVideoEncodeH264RateControlLayerInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264RateControlLayerInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR ) == sizeof( VkVideoEncodeH264QpKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264QpKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR ) == sizeof( VkVideoEncodeH264FrameSizeKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264FrameSizeKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoKHR ) == sizeof( VkVideoEncodeH264GopRemainingFrameInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264GopRemainingFrameInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_h265 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesKHR ) == sizeof( VkVideoEncodeH265CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoKHR ) == sizeof( VkVideoEncodeH265SessionCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeH265QualityLevelPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265QualityLevelPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeH265SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR ) == sizeof( VkVideoEncodeH265SessionParametersAddInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoKHR ) == sizeof( VkVideoEncodeH265SessionParametersGetInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersGetInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoKHR ) == + sizeof( VkVideoEncodeH265SessionParametersFeedbackInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersFeedbackInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoKHR ) == sizeof( VkVideoEncodeH265PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoKHR ) == sizeof( VkVideoEncodeH265DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR ) == sizeof( VkVideoEncodeH265NaluSliceSegmentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265NaluSliceSegmentInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoKHR ) == sizeof( VkVideoEncodeH265ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoKHR ) == sizeof( VkVideoEncodeH265RateControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265RateControlInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoKHR ) == sizeof( VkVideoEncodeH265RateControlLayerInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265RateControlLayerInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR ) == sizeof( VkVideoEncodeH265QpKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265QpKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR ) == sizeof( VkVideoEncodeH265FrameSizeKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265FrameSizeKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoKHR ) == sizeof( VkVideoEncodeH265GopRemainingFrameInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265GopRemainingFrameInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_decode_h264 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR ) == sizeof( VkVideoDecodeH264ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR ) == sizeof( VkVideoDecodeH264CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR ) == + sizeof( VkVideoDecodeH264SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR ) == sizeof( VkVideoDecodeH264SessionParametersAddInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR ) == sizeof( VkVideoDecodeH264PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR ) == sizeof( VkVideoDecodeH264DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_AMD_texture_gather_bias_lod === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TextureLODGatherFormatPropertiesAMD is not nothrow_move_constructible!" ); + +//=== VK_AMD_shader_info === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderResourceUsageAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_GGP ) +//=== VK_GGP_stream_descriptor_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "StreamDescriptorSurfaceCreateInfoGGP is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_GGP*/ + +//=== VK_NV_corner_sampled_image === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCornerSampledImageFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_external_memory_capabilities === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalImageFormatPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_external_memory === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryImageCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMemoryAllocateInfoNV is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_NV_external_memory_win32 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_NV_win32_keyed_mutex === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Win32KeyedMutexAcquireReleaseInfoNV is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_EXT_validation_flags === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationFlagsEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_VI_NN ) +//=== VK_NN_vi_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ViSurfaceCreateInfoNN is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_VI_NN*/ + +//=== VK_EXT_astc_decode_mode === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewASTCDecodeModeEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_KHR_external_memory_win32 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryWin32HandlePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetWin32HandleInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_KHR_external_memory_fd === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryFdInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryFdPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetFdInfoKHR is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_KHR_win32_keyed_mutex === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Win32KeyedMutexAcquireReleaseInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_KHR_external_semaphore_win32 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "D3D12FenceSubmitInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreGetWin32HandleInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_KHR_external_semaphore_fd === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportSemaphoreFdInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_conditional_rendering === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ConditionalRenderingBeginInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) == + sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceConditionalRenderingFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) == + sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceConditionalRenderingInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_incremental_present === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentRegionsKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentRegionKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "RectLayerKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_clip_space_w_scaling === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ViewportWScalingNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportWScalingStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_display_surface_counter === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilities2EXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_display_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPowerInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceEventInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayEventInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainCounterCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_GOOGLE_display_timing === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RefreshCycleDurationGOOGLE is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PastPresentationTimingGOOGLE is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentTimesInfoGOOGLE is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentTimeGOOGLE is not nothrow_move_constructible!" ); + +//=== VK_NVX_multiview_per_view_attributes === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == + sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" ); + +//=== VK_NV_viewport_swizzle === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ViewportSwizzleNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportSwizzleStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_discard_rectangles === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) == + sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDiscardRectanglePropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineDiscardRectangleStateCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_conservative_rasterization === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceConservativeRasterizationPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationConservativeStateCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_depth_clip_enable === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClipEnableFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationDepthClipStateCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_hdr_metadata === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HdrMetadataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "XYColorEXT is not nothrow_move_constructible!" ); + +//=== VK_IMG_relaxed_line_rasterization === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRelaxedLineRasterizationFeaturesIMG ) == + sizeof( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRelaxedLineRasterizationFeaturesIMG is not nothrow_move_constructible!" ); + +//=== VK_KHR_shared_presentable_image === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SharedPresentSurfaceCapabilitiesKHR is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_KHR_external_fence_win32 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FenceGetWin32HandleInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_KHR_external_fence_fd === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportFenceFdInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FenceGetFdInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_performance_query === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePerformanceQueryFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) == + sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePerformanceQueryPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceCounterKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceCounterDescriptionKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPoolPerformanceCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) == sizeof( VkPerformanceCounterResultKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceCounterResultKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AcquireProfilingLockInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceQuerySubmitInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_get_surface_capabilities2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSurfaceInfo2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilities2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceFormat2KHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_get_display_properties2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayProperties2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlaneProperties2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeProperties2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlaneInfo2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlaneCapabilities2KHR is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_IOS_MVK ) +//=== VK_MVK_ios_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) +//=== VK_MVK_macos_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MacOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +//=== VK_EXT_debug_utils === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsLabelEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerCallbackDataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "DebugUtilsMessengerEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsObjectNameInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsObjectTagInfoEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) +//=== VK_ANDROID_external_memory_android_hardware_buffer === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferUsageANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferPropertiesANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) == + sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferFormatPropertiesANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalFormatANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID ) == + sizeof( VkAndroidHardwareBufferFormatProperties2ANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_AMDX_shader_enqueue === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueueFeaturesAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEnqueueFeaturesAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueuePropertiesAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEnqueuePropertiesAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX ) == sizeof( VkExecutionGraphPipelineScratchSizeAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExecutionGraphPipelineScratchSizeAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX ) == sizeof( VkExecutionGraphPipelineCreateInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExecutionGraphPipelineCreateInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX ) == sizeof( VkDispatchGraphInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchGraphInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX ) == sizeof( VkDispatchGraphCountInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchGraphCountInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX ) == sizeof( VkPipelineShaderStageNodeCreateInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineShaderStageNodeCreateInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX ) == sizeof( VkDeviceOrHostAddressConstAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceOrHostAddressConstAMDX is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_AMD_mixed_attachment_samples === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" ); + +//=== VK_KHR_shader_bfloat16 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderBfloat16FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderBfloat16FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderBfloat16FeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_sample_locations === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SampleLocationEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SampleLocationsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentSampleLocationsEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassSampleLocationsEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassSampleLocationsBeginInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineSampleLocationsStateCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSampleLocationsPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultisamplePropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_blend_operation_advanced === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceBlendOperationAdvancedFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceBlendOperationAdvancedPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) == + sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineColorBlendAdvancedStateCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_fragment_coverage_to_color === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCoverageToColorStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_acceleration_structure === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) == sizeof( VkDeviceOrHostAddressKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceOrHostAddressKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) == sizeof( VkDeviceOrHostAddressConstKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceOrHostAddressConstKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR ) == sizeof( VkAccelerationStructureBuildRangeInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureBuildRangeInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AabbPositionsKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR ) == + sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryTrianglesDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TransformMatrixKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureBuildGeometryInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryAabbsDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureInstanceKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR ) == + sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryInstancesDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) == sizeof( VkAccelerationStructureGeometryDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "AccelerationStructureKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSetAccelerationStructureKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR ) == + sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAccelerationStructureFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR ) == + sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAccelerationStructurePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureDeviceAddressInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR ) == sizeof( VkAccelerationStructureVersionInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureVersionInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyAccelerationStructureToMemoryInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToAccelerationStructureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyAccelerationStructureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR ) == sizeof( VkAccelerationStructureBuildSizesInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureBuildSizesInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_ray_tracing_pipeline === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_ray_query === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_framebuffer_mixed_samples === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) == + sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_shader_sm_builtins === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSMBuiltinsPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSMBuiltinsFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_drm_format_modifier === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrmFormatModifierPropertiesListEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) == + sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageDrmFormatModifierInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageDrmFormatModifierListCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) == + sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageDrmFormatModifierExplicitCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageDrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT ) == sizeof( VkDrmFormatModifierPropertiesList2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrmFormatModifierPropertiesList2EXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT ) == sizeof( VkDrmFormatModifierProperties2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrmFormatModifierProperties2EXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_validation_cache === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "ValidationCacheEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationCacheEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationCacheCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderModuleValidationCacheCreateInfoEXT is not nothrow_move_constructible!" ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_KHR_portability_subset === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePortabilitySubsetFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR ) == + sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePortabilitySubsetPropertiesKHR is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_NV_shading_rate_image === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShadingRatePaletteNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) == + sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportShadingRateImageStateCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShadingRateImageFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShadingRateImagePropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CoarseSampleLocationNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CoarseSampleOrderCustomNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == + sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportCoarseSampleOrderStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingShaderGroupCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingPipelineCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeometryTrianglesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeometryAABBNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeometryDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "GeometryNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "AccelerationStructureNV is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindAccelerationStructureMemoryInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSetAccelerationStructureNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) == + sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureMemoryRequirementsInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_representative_fragment_test === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == + sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRepresentativeFragmentTestFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) == + sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRepresentativeFragmentTestStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_filter_cubic === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageViewImageFormatInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) == + sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FilterCubicImageViewImageFormatPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_external_memory_host === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryHostPointerInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryHostPointerPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) == + sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalMemoryHostPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_shader_clock === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderClockFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_AMD_pipeline_compiler_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCompilerControlCreateInfoAMD is not nothrow_move_constructible!" ); + +//=== VK_AMD_shader_core_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCorePropertiesAMD is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_decode_h265 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR ) == sizeof( VkVideoDecodeH265ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR ) == sizeof( VkVideoDecodeH265CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR ) == + sizeof( VkVideoDecodeH265SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR ) == sizeof( VkVideoDecodeH265SessionParametersAddInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR ) == sizeof( VkVideoDecodeH265PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR ) == sizeof( VkVideoDecodeH265DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_AMD_memory_overallocation_behavior === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceMemoryOverallocationCreateInfoAMD is not nothrow_move_constructible!" ); + +//=== VK_EXT_vertex_attribute_divisor === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorPropertiesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_GGP ) +//=== VK_GGP_frame_token === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentFrameTokenGGP is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_GGP*/ + +//=== VK_NV_mesh_shader === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMeshShaderFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMeshShaderPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawMeshTasksIndirectCommandNV is not nothrow_move_constructible!" ); + +//=== VK_NV_shader_image_footprint === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderImageFootprintFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_scissor_exclusive === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) == + sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportExclusiveScissorStateCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExclusiveScissorFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_device_diagnostic_checkpoints === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyCheckpointPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CheckpointDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CheckpointData2NV is not nothrow_move_constructible!" ); + +//=== VK_INTEL_shader_integer_functions2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == + sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL is not nothrow_move_constructible!" ); + +//=== VK_INTEL_performance_query === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) == sizeof( VkPerformanceValueDataINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceValueDataINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceValueINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "InitializePerformanceApiInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPoolPerformanceQueryCreateInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceMarkerInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceStreamMarkerInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceOverrideInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceConfigurationAcquireInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "PerformanceConfigurationINTEL is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceConfigurationINTEL is not nothrow_move_constructible!" ); + +//=== VK_EXT_pci_bus_info === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePCIBusInfoPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_AMD_display_native_hdr === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayNativeHdrSurfaceCapabilitiesAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainDisplayNativeHdrCreateInfoAMD is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) +//=== VK_FUCHSIA_imagepipe_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImagePipeSurfaceCreateInfoFUCHSIA is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) +//=== VK_EXT_metal_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MetalSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +//=== VK_EXT_fragment_density_map === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == + sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_fragment_shading_rate === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR ) == + sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineFragmentShadingRateStateCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRatePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == + sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_AMD_shader_core_properties2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCoreProperties2AMD is not nothrow_move_constructible!" ); + +//=== VK_AMD_device_coherent_memory === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCoherentMemoryFeaturesAMD is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_image_atomic_int64 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_shader_quad_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderQuadControlFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderQuadControlFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderQuadControlFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_memory_budget === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryBudgetPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_memory_priority === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryPriorityFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryPriorityAllocateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_surface_protected_capabilities === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceProtectedCapabilitiesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_dedicated_allocation_image_aliasing === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == + sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_buffer_device_address === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == + sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceBufferDeviceAddressFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferDeviceAddressCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_validation_features === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_present_wait === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR ) == sizeof( VkPhysicalDevicePresentWaitFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentWaitFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_cooperative_matrix === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeMatrixPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_coverage_reduction_mode === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) == + sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCoverageReductionModeFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCoverageReductionStateCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FramebufferMixedSamplesCombinationNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_fragment_shader_interlock === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShaderInterlockFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_ycbcr_image_arrays === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceYcbcrImageArraysFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_provoking_vertex === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProvokingVertexFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProvokingVertexPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationProvokingVertexStateCreateInfoEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_EXT_full_screen_exclusive === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceFullScreenExclusiveInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilitiesFullScreenExclusiveEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceFullScreenExclusiveWin32InfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_EXT_headless_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HeadlessSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_atomic_float === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicFloatFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_extended_dynamic_state === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicStateFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_deferred_host_operations === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "DeferredOperationKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeferredOperationKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_pipeline_executable_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == + sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutablePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutableInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) == sizeof( VkPipelineExecutableStatisticValueKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutableStatisticValueKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutableStatisticKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) == + sizeof( VkPipelineExecutableInternalRepresentationKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_map_memory_placed === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT ) == sizeof( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMapMemoryPlacedFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT ) == sizeof( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMapMemoryPlacedPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT ) == sizeof( VkMemoryMapPlacedInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryMapPlacedInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_atomic_float2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicFloat2FeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_surface_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT ) == sizeof( VkSurfacePresentModeEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfacePresentModeEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT ) == sizeof( VkSurfacePresentScalingCapabilitiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfacePresentScalingCapabilitiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT ) == sizeof( VkSurfacePresentModeCompatibilityEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfacePresentModeCompatibilityEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_swapchain_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT ) == + sizeof( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSwapchainMaintenance1FeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT ) == sizeof( VkSwapchainPresentFenceInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentFenceInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT ) == sizeof( VkSwapchainPresentModesCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentModesCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT ) == sizeof( VkSwapchainPresentModeInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentModeInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT ) == sizeof( VkSwapchainPresentScalingCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentScalingCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT ) == sizeof( VkReleaseSwapchainImagesInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ReleaseSwapchainImagesInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_device_generated_commands === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GraphicsShaderGroupCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GraphicsPipelineShaderGroupsCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindShaderGroupIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindIndexBufferIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindVertexBufferIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SetStateFlagsIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "IndirectCommandsLayoutNV is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsStreamNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutTokenNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsMemoryRequirementsInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_inherited_viewport_scissor === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV ) == + sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceInheritedViewportScissorFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV ) == + sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceViewportScissorInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_texel_buffer_alignment === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTexelBufferAlignmentFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_QCOM_render_pass_transform === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassTransformBeginInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM ) == + sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceRenderPassTransformInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_EXT_depth_bias_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthBiasControlFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthBiasControlFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT ) == sizeof( VkDepthBiasInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DepthBiasInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT ) == sizeof( VkDepthBiasRepresentationInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DepthBiasRepresentationInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_device_memory_report === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT ) == + sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceMemoryReportFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceDeviceMemoryReportCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceMemoryReportCallbackDataEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_robustness2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRobustness2FeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRobustness2PropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_custom_border_color === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCustomBorderColorCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT ) == + sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCustomBorderColorPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCustomBorderColorFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_pipeline_library === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_present_barrier === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV ) == sizeof( VkPhysicalDevicePresentBarrierFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentBarrierFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV ) == sizeof( VkSurfaceCapabilitiesPresentBarrierNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilitiesPresentBarrierNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV ) == sizeof( VkSwapchainPresentBarrierCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentBarrierCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_present_id === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentIdKHR ) == sizeof( VkPresentIdKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PresentIdKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR ) == sizeof( VkPhysicalDevicePresentIdFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentIdFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_queue === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR ) == sizeof( VkVideoEncodeCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR ) == sizeof( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPoolVideoEncodeFeedbackCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR ) == sizeof( VkVideoEncodeUsageInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeUsageInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR ) == sizeof( VkVideoEncodeRateControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeRateControlInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR ) == sizeof( VkVideoEncodeRateControlLayerInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeRateControlLayerInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR ) == + sizeof( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoEncodeQualityLevelInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeQualityLevelPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQualityLevelPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR ) == sizeof( VkVideoEncodeQualityLevelInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQualityLevelInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR ) == sizeof( VkVideoEncodeSessionParametersGetInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeSessionParametersGetInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR ) == + sizeof( VkVideoEncodeSessionParametersFeedbackInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeSessionParametersFeedbackInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_device_diagnostics_config === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDiagnosticsConfigFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_NV_cuda_kernel_launch === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaModuleNV ) == sizeof( VkCudaModuleNV ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CudaModuleNV is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CudaModuleNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionNV ) == sizeof( VkCudaFunctionNV ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CudaFunctionNV is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaFunctionNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV ) == sizeof( VkCudaModuleCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaModuleCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV ) == sizeof( VkCudaFunctionCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaFunctionCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV ) == sizeof( VkCudaLaunchInfoNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaLaunchInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV ) == sizeof( VkPhysicalDeviceCudaKernelLaunchFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCudaKernelLaunchFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV ) == sizeof( VkPhysicalDeviceCudaKernelLaunchPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCudaKernelLaunchPropertiesNV is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_QCOM_tile_shading === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTileShadingFeaturesQCOM ) == sizeof( VkPhysicalDeviceTileShadingFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTileShadingFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTileShadingPropertiesQCOM ) == sizeof( VkPhysicalDeviceTileShadingPropertiesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTileShadingPropertiesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassTileShadingCreateInfoQCOM ) == sizeof( VkRenderPassTileShadingCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassTileShadingCreateInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerTileBeginInfoQCOM ) == sizeof( VkPerTileBeginInfoQCOM ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerTileBeginInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerTileEndInfoQCOM ) == sizeof( VkPerTileEndInfoQCOM ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerTileEndInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchTileInfoQCOM ) == sizeof( VkDispatchTileInfoQCOM ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchTileInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_NV_low_latency === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV ) == sizeof( VkQueryLowLatencySupportNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryLowLatencySupportNV is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_METAL_EXT ) +//=== VK_EXT_metal_objects === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT ) == sizeof( VkExportMetalObjectCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalObjectCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT ) == sizeof( VkExportMetalObjectsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalObjectsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT ) == sizeof( VkExportMetalDeviceInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalDeviceInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT ) == sizeof( VkExportMetalCommandQueueInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalCommandQueueInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT ) == sizeof( VkExportMetalBufferInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalBufferInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT ) == sizeof( VkImportMetalBufferInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMetalBufferInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT ) == sizeof( VkExportMetalTextureInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalTextureInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT ) == sizeof( VkImportMetalTextureInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMetalTextureInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT ) == sizeof( VkExportMetalIOSurfaceInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalIOSurfaceInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT ) == sizeof( VkImportMetalIOSurfaceInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMetalIOSurfaceInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT ) == sizeof( VkExportMetalSharedEventInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalSharedEventInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT ) == sizeof( VkImportMetalSharedEventInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMetalSharedEventInfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +//=== VK_EXT_descriptor_buffer === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT ) == + sizeof( VkPhysicalDeviceDescriptorBufferPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorBufferPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT ) == + sizeof( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorBufferFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorBufferFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT ) == sizeof( VkDescriptorAddressInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorAddressInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT ) == sizeof( VkDescriptorBufferBindingInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorBufferBindingInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT ) == + sizeof( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorBufferBindingPushDescriptorBufferHandleEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorDataEXT ) == sizeof( VkDescriptorDataEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorDataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT ) == sizeof( VkDescriptorGetInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorGetInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT ) == sizeof( VkBufferCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT ) == sizeof( VkImageCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT ) == sizeof( VkImageViewCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT ) == sizeof( VkSamplerCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT ) == sizeof( VkOpaqueCaptureDescriptorDataCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpaqueCaptureDescriptorDataCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT ) == + sizeof( VkAccelerationStructureCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_graphics_pipeline_library === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ) == + sizeof( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ) == + sizeof( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT ) == sizeof( VkGraphicsPipelineLibraryCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GraphicsPipelineLibraryCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_AMD_shader_early_and_late_fragment_tests === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ) == + sizeof( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD is not nothrow_move_constructible!" ); + +//=== VK_KHR_fragment_shader_barycentric === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR ) == + sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR ) == + sizeof( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShaderBarycentricPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_shader_subgroup_uniform_control_flow === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_fragment_shading_rate_enums === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) == + sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) == + sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV ) == + sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineFragmentShadingRateEnumStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing_motion_blur === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV ) == + sizeof( VkAccelerationStructureGeometryMotionTrianglesDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryMotionTrianglesDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV ) == sizeof( VkAccelerationStructureMotionInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureMotionInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV ) == sizeof( VkAccelerationStructureMotionInstanceNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureMotionInstanceNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV ) == sizeof( VkAccelerationStructureMotionInstanceDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureMotionInstanceDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV ) == + sizeof( VkAccelerationStructureMatrixMotionInstanceNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureMatrixMotionInstanceNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV ) == sizeof( VkAccelerationStructureSRTMotionInstanceNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureSRTMotionInstanceNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SRTDataNV ) == sizeof( VkSRTDataNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SRTDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingMotionBlurFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_mesh_shader === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMeshShaderFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMeshShaderPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT ) == sizeof( VkDrawMeshTasksIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawMeshTasksIndirectCommandEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_ycbcr_2plane_444_formats === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) == + sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_fragment_density_map2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMap2FeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMap2PropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_QCOM_rotated_copy_commands === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyCommandTransformInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_KHR_workgroup_memory_explicit_layout === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) == + sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_compression_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageCompressionControlFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageCompressionControlFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT ) == sizeof( VkImageCompressionControlEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCompressionControlEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT ) == sizeof( VkImageCompressionPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCompressionPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_attachment_feedback_loop_layout === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ) == + sizeof( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_4444_formats === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_device_fault === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT ) == sizeof( VkPhysicalDeviceFaultFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFaultFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT ) == sizeof( VkDeviceFaultCountsEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultCountsEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT ) == sizeof( VkDeviceFaultInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT ) == sizeof( VkDeviceFaultAddressInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultAddressInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT ) == sizeof( VkDeviceFaultVendorInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultVendorInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT ) == sizeof( VkDeviceFaultVendorBinaryHeaderVersionOneEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultVendorBinaryHeaderVersionOneEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_rgba10x6_formats === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) +//=== VK_EXT_directfb_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +//=== VK_EXT_vertex_input_dynamic_state === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexInputDynamicStateFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT ) == sizeof( VkVertexInputBindingDescription2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputBindingDescription2EXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT ) == sizeof( VkVertexInputAttributeDescription2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputAttributeDescription2EXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_physical_device_drm === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT ) == sizeof( VkPhysicalDeviceDrmPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_device_address_binding_report === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT ) == + sizeof( VkPhysicalDeviceAddressBindingReportFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAddressBindingReportFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT ) == sizeof( VkDeviceAddressBindingCallbackDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceAddressBindingCallbackDataEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_depth_clip_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipControlFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClipControlFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT ) == + sizeof( VkPipelineViewportDepthClipControlCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportDepthClipControlCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_primitive_topology_list_restart === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ) == + sizeof( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_present_mode_fifo_latest_ready === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT ) == + sizeof( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) +//=== VK_FUCHSIA_external_memory === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA ) == sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA ) == sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryZirconHandlePropertiesFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA ) == sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) +//=== VK_FUCHSIA_external_semaphore === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA ) == sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportSemaphoreZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA ) == sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) +//=== VK_FUCHSIA_buffer_collection === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA ) == sizeof( VkBufferCollectionFUCHSIA ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "BufferCollectionFUCHSIA is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionCreateInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA ) == sizeof( VkImportMemoryBufferCollectionFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryBufferCollectionFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionImageCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionImageCreateInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA ) == sizeof( VkBufferConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionBufferCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionBufferCreateInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA ) == sizeof( VkBufferCollectionPropertiesFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionPropertiesFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA ) == sizeof( VkSysmemColorSpaceFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SysmemColorSpaceFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA ) == sizeof( VkImageConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA ) == sizeof( VkImageFormatConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA ) == sizeof( VkBufferCollectionConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +//=== VK_HUAWEI_subpass_shading === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI ) == sizeof( VkSubpassShadingPipelineCreateInfoHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassShadingPipelineCreateInfoHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubpassShadingFeaturesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI ) == + sizeof( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubpassShadingPropertiesHUAWEI is not nothrow_move_constructible!" ); + +//=== VK_HUAWEI_invocation_mask === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceInvocationMaskFeaturesHUAWEI is not nothrow_move_constructible!" ); + +//=== VK_NV_external_memory_rdma === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV ) == sizeof( VkMemoryGetRemoteAddressInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetRemoteAddressInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV ) == sizeof( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalMemoryRDMAFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_pipeline_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT ) == sizeof( VkPipelinePropertiesIdentifierEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelinePropertiesIdentifierEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT ) == + sizeof( VkPhysicalDevicePipelinePropertiesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelinePropertiesFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_frame_boundary === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT ) == sizeof( VkPhysicalDeviceFrameBoundaryFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFrameBoundaryFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FrameBoundaryEXT ) == sizeof( VkFrameBoundaryEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FrameBoundaryEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_multisampled_render_to_single_sampled === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ) == + sizeof( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT ) == sizeof( VkSubpassResolvePerformanceQueryEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassResolvePerformanceQueryEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT ) == sizeof( VkMultisampledRenderToSingleSampledInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultisampledRenderToSingleSampledInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_extended_dynamic_state2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicState2FeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) +//=== VK_QNX_screen_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX ) == sizeof( VkScreenSurfaceCreateInfoQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ScreenSurfaceCreateInfoQNX is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +//=== VK_EXT_color_write_enable === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceColorWriteEnableFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT ) == sizeof( VkPipelineColorWriteCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineColorWriteCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_primitives_generated_query === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT ) == + sizeof( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_ray_tracing_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingMaintenance1FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR ) == sizeof( VkTraceRaysIndirectCommand2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TraceRaysIndirectCommand2KHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_view_min_lod === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT ) == sizeof( VkPhysicalDeviceImageViewMinLodFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageViewMinLodFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT ) == sizeof( VkImageViewMinLodCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewMinLodCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_multi_draw === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT ) == sizeof( VkPhysicalDeviceMultiDrawFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiDrawFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT ) == sizeof( VkPhysicalDeviceMultiDrawPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiDrawPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT ) == sizeof( VkMultiDrawInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiDrawInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT ) == sizeof( VkMultiDrawIndexedInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiDrawIndexedInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_2d_view_of_3d === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT ) == sizeof( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_tile_image === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderTileImageFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTileImageFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT ) == sizeof( VkPhysicalDeviceShaderTileImagePropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTileImagePropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_opacity_micromap === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT ) == sizeof( VkMicromapBuildInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapBuildInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapUsageEXT ) == sizeof( VkMicromapUsageEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapUsageEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT ) == sizeof( VkMicromapCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapEXT ) == sizeof( VkMicromapEXT ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "MicromapEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MicromapEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpacityMicromapFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpacityMicromapPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT ) == sizeof( VkMicromapVersionInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapVersionInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT ) == sizeof( VkCopyMicromapToMemoryInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMicromapToMemoryInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT ) == sizeof( VkCopyMemoryToMicromapInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToMicromapInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT ) == sizeof( VkCopyMicromapInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMicromapInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT ) == sizeof( VkMicromapBuildSizesInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapBuildSizesInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT ) == + sizeof( VkAccelerationStructureTrianglesOpacityMicromapEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureTrianglesOpacityMicromapEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT ) == sizeof( VkMicromapTriangleEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapTriangleEXT is not nothrow_move_constructible!" ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_NV_displacement_micromap === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV ) == + sizeof( VkPhysicalDeviceDisplacementMicromapFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDisplacementMicromapFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV ) == + sizeof( VkPhysicalDeviceDisplacementMicromapPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDisplacementMicromapPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV ) == + sizeof( VkAccelerationStructureTrianglesDisplacementMicromapNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureTrianglesDisplacementMicromapNV is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_HUAWEI_cluster_culling_shader === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI ) == + sizeof( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterCullingShaderFeaturesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI ) == + sizeof( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterCullingShaderPropertiesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI ) == + sizeof( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI is not nothrow_move_constructible!" ); + +//=== VK_EXT_border_color_swizzle === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT ) == + sizeof( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceBorderColorSwizzleFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT ) == + sizeof( VkSamplerBorderColorComponentMappingCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerBorderColorComponentMappingCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_pageable_device_local_memory === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ) == + sizeof( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_ARM_shader_core_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM ) == sizeof( VkPhysicalDeviceShaderCorePropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCorePropertiesARM is not nothrow_move_constructible!" ); + +//=== VK_ARM_scheduling_controls === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM ) == sizeof( VkDeviceQueueShaderCoreControlCreateInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueShaderCoreControlCreateInfoARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM ) == + sizeof( VkPhysicalDeviceSchedulingControlsFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSchedulingControlsFeaturesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM ) == + sizeof( VkPhysicalDeviceSchedulingControlsPropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSchedulingControlsPropertiesARM is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_sliced_view_of_3d === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageSlicedViewOf3DFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT ) == sizeof( VkImageViewSlicedCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewSlicedCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_VALVE_descriptor_set_host_mapping === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ) == + sizeof( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE ) == sizeof( VkDescriptorSetBindingReferenceVALVE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetBindingReferenceVALVE is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE ) == sizeof( VkDescriptorSetLayoutHostMappingInfoVALVE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutHostMappingInfoVALVE is not nothrow_move_constructible!" ); + +//=== VK_EXT_non_seamless_cube_map === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT ) == + sizeof( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_ARM_render_pass_striped === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedFeaturesARM ) == sizeof( VkPhysicalDeviceRenderPassStripedFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRenderPassStripedFeaturesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedPropertiesARM ) == + sizeof( VkPhysicalDeviceRenderPassStripedPropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRenderPassStripedPropertiesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassStripeBeginInfoARM ) == sizeof( VkRenderPassStripeBeginInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassStripeBeginInfoARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM ) == sizeof( VkRenderPassStripeInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassStripeInfoARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassStripeSubmitInfoARM ) == sizeof( VkRenderPassStripeSubmitInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassStripeSubmitInfoARM is not nothrow_move_constructible!" ); + +//=== VK_NV_copy_memory_indirect === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV ) == sizeof( VkCopyMemoryIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV ) == sizeof( VkCopyMemoryToImageIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToImageIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV ) == sizeof( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCopyMemoryIndirectFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV ) == + sizeof( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCopyMemoryIndirectPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_memory_decompression === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV ) == sizeof( VkDecompressMemoryRegionNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DecompressMemoryRegionNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV ) == + sizeof( VkPhysicalDeviceMemoryDecompressionFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryDecompressionFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV ) == + sizeof( VkPhysicalDeviceMemoryDecompressionPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryDecompressionPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_device_generated_commands_compute === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV ) == sizeof( VkComputePipelineIndirectBufferInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ComputePipelineIndirectBufferInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV ) == sizeof( VkPipelineIndirectDeviceAddressInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineIndirectDeviceAddressInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV ) == sizeof( VkBindPipelineIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindPipelineIndirectCommandNV is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing_linear_swept_spheres === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryLinearSweptSpheresDataNV ) == + sizeof( VkAccelerationStructureGeometryLinearSweptSpheresDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryLinearSweptSpheresDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometrySpheresDataNV ) == sizeof( VkAccelerationStructureGeometrySpheresDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometrySpheresDataNV is not nothrow_move_constructible!" ); + +//=== VK_NV_linear_color_attachment === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) == + sizeof( VkPhysicalDeviceLinearColorAttachmentFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_shader_maximal_reconvergence === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_compression_control_swapchain === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_QCOM_image_processing === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM ) == sizeof( VkImageViewSampleWeightCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewSampleWeightCreateInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM ) == sizeof( VkPhysicalDeviceImageProcessingFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageProcessingFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM ) == + sizeof( VkPhysicalDeviceImageProcessingPropertiesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageProcessingPropertiesQCOM is not nothrow_move_constructible!" ); + +//=== VK_EXT_nested_command_buffer === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT ) == + sizeof( VkPhysicalDeviceNestedCommandBufferFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceNestedCommandBufferFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT ) == + sizeof( VkPhysicalDeviceNestedCommandBufferPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceNestedCommandBufferPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_external_memory_acquire_unmodified === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT ) == sizeof( VkExternalMemoryAcquireUnmodifiedEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryAcquireUnmodifiedEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_extended_dynamic_state3 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicState3FeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicState3PropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT ) == sizeof( VkColorBlendEquationEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ColorBlendEquationEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT ) == sizeof( VkColorBlendAdvancedEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ColorBlendAdvancedEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_subpass_merge_feedback === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT ) == + sizeof( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT ) == sizeof( VkRenderPassCreationControlEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreationControlEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT ) == sizeof( VkRenderPassCreationFeedbackInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreationFeedbackInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT ) == sizeof( VkRenderPassCreationFeedbackCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreationFeedbackCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT ) == sizeof( VkRenderPassSubpassFeedbackInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassSubpassFeedbackInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT ) == sizeof( VkRenderPassSubpassFeedbackCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassSubpassFeedbackCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_LUNARG_direct_driver_loading === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG ) == sizeof( VkDirectDriverLoadingInfoLUNARG ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DirectDriverLoadingInfoLUNARG is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG ) == sizeof( VkDirectDriverLoadingListLUNARG ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DirectDriverLoadingListLUNARG is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_module_identifier === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderModuleIdentifierFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT ) == + sizeof( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderModuleIdentifierPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT ) == + sizeof( VkPipelineShaderStageModuleIdentifierCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineShaderStageModuleIdentifierCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ) == sizeof( VkShaderModuleIdentifierEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderModuleIdentifierEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_rasterization_order_attachment_access === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ) == + sizeof( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_optical_flow === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV ) == sizeof( VkPhysicalDeviceOpticalFlowFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpticalFlowFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV ) == sizeof( VkPhysicalDeviceOpticalFlowPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpticalFlowPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV ) == sizeof( VkOpticalFlowImageFormatInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowImageFormatInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV ) == sizeof( VkOpticalFlowImageFormatPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowImageFormatPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV ) == sizeof( VkOpticalFlowSessionNV ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "OpticalFlowSessionNV is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowSessionNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV ) == sizeof( VkOpticalFlowSessionCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowSessionCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV ) == sizeof( VkOpticalFlowSessionCreatePrivateDataInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowSessionCreatePrivateDataInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV ) == sizeof( VkOpticalFlowExecuteInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowExecuteInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_legacy_dithering === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT ) == sizeof( VkPhysicalDeviceLegacyDitheringFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLegacyDitheringFeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) +//=== VK_ANDROID_external_format_resolve === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID ) == + sizeof( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalFormatResolveFeaturesANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID ) == + sizeof( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalFormatResolvePropertiesANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID ) == + sizeof( VkAndroidHardwareBufferFormatResolvePropertiesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferFormatResolvePropertiesANDROID is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +//=== VK_AMD_anti_lag === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAntiLagFeaturesAMD ) == sizeof( VkPhysicalDeviceAntiLagFeaturesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAntiLagFeaturesAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AntiLagDataAMD ) == sizeof( VkAntiLagDataAMD ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AntiLagDataAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD ) == sizeof( VkAntiLagPresentationInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AntiLagPresentationInfoAMD is not nothrow_move_constructible!" ); + +//=== VK_KHR_ray_tracing_position_fetch === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPositionFetchFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_object === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderEXT ) == sizeof( VkShaderEXT ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "ShaderEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderObjectFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderObjectFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT ) == sizeof( VkPhysicalDeviceShaderObjectPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderObjectPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT ) == sizeof( VkShaderCreateInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_pipeline_binary === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineBinaryFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineBinaryFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryPropertiesKHR ) == sizeof( VkPhysicalDevicePipelineBinaryPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineBinaryPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePipelineBinaryInternalCacheControlKHR ) == + sizeof( VkDevicePipelineBinaryInternalCacheControlKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DevicePipelineBinaryInternalCacheControlKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR ) == sizeof( VkPipelineBinaryKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PipelineBinaryKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR ) == sizeof( VkPipelineBinaryKeyKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryKeyKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR ) == sizeof( VkPipelineBinaryDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR ) == sizeof( VkPipelineBinaryKeysAndDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryKeysAndDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR ) == sizeof( VkPipelineBinaryCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryInfoKHR ) == sizeof( VkPipelineBinaryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR ) == sizeof( VkReleaseCapturedPipelineDataInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ReleaseCapturedPipelineDataInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR ) == sizeof( VkPipelineBinaryDataInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryDataInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR ) == sizeof( VkPipelineCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR ) == sizeof( VkPipelineBinaryHandlesInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryHandlesInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_QCOM_tile_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTilePropertiesFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TilePropertiesQCOM ) == sizeof( VkTilePropertiesQCOM ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TilePropertiesQCOM is not nothrow_move_constructible!" ); + +//=== VK_SEC_amigo_profiling === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC ) == sizeof( VkPhysicalDeviceAmigoProfilingFeaturesSEC ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAmigoProfilingFeaturesSEC is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC ) == sizeof( VkAmigoProfilingSubmitInfoSEC ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AmigoProfilingSubmitInfoSEC is not nothrow_move_constructible!" ); + +//=== VK_QCOM_multiview_per_view_viewports === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM ) == + sizeof( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing_invocation_reorder === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV ) == + sizeof( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingInvocationReorderPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingInvocationReorderFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_cooperative_vector === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorPropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeVectorPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeVectorPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeVectorFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeVectorFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeVectorPropertiesNV ) == sizeof( VkCooperativeVectorPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeVectorPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV ) == sizeof( VkConvertCooperativeVectorMatrixInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ConvertCooperativeVectorMatrixInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_extended_sparse_address_space === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV ) == + sizeof( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV ) == + sizeof( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedSparseAddressSpacePropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_mutable_descriptor_type === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT ) == + sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMutableDescriptorTypeFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT ) == sizeof( VkMutableDescriptorTypeListEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MutableDescriptorTypeListEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT ) == sizeof( VkMutableDescriptorTypeCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MutableDescriptorTypeCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_legacy_vertex_attributes === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesFeaturesEXT ) == + sizeof( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLegacyVertexAttributesFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesPropertiesEXT ) == + sizeof( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLegacyVertexAttributesPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_layer_settings === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT ) == sizeof( VkLayerSettingsCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LayerSettingsCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerSettingEXT ) == sizeof( VkLayerSettingEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LayerSettingEXT is not nothrow_move_constructible!" ); + +//=== VK_ARM_shader_core_builtins === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM ) == + sizeof( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCoreBuiltinsFeaturesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM ) == + sizeof( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCoreBuiltinsPropertiesARM is not nothrow_move_constructible!" ); + +//=== VK_EXT_pipeline_library_group_handles === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT ) == + sizeof( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_dynamic_rendering_unused_attachments === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ) == + sizeof( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_low_latency2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV ) == sizeof( VkLatencySleepModeInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySleepModeInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySleepInfoNV ) == sizeof( VkLatencySleepInfoNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySleepInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV ) == sizeof( VkSetLatencyMarkerInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SetLatencyMarkerInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV ) == sizeof( VkGetLatencyMarkerInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GetLatencyMarkerInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV ) == sizeof( VkLatencyTimingsFrameReportNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencyTimingsFrameReportNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV ) == sizeof( VkLatencySubmissionPresentIdNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySubmissionPresentIdNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV ) == sizeof( VkSwapchainLatencyCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainLatencyCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV ) == sizeof( VkOutOfBandQueueTypeInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OutOfBandQueueTypeInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV ) == sizeof( VkLatencySurfaceCapabilitiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySurfaceCapabilitiesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_cooperative_matrix === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR ) == sizeof( VkCooperativeMatrixPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeMatrixPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR ) == + sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_QCOM_multiview_per_view_render_areas === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ) == + sizeof( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM ) == + sizeof( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_KHR_compute_shader_derivatives === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesKHR ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceComputeShaderDerivativesFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesPropertiesKHR ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceComputeShaderDerivativesPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_decode_av1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1ProfileInfoKHR ) == sizeof( VkVideoDecodeAV1ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1CapabilitiesKHR ) == sizeof( VkVideoDecodeAV1CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1SessionParametersCreateInfoKHR ) == + sizeof( VkVideoDecodeAV1SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1PictureInfoKHR ) == sizeof( VkVideoDecodeAV1PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1DpbSlotInfoKHR ) == sizeof( VkVideoDecodeAV1DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_av1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoEncodeAV1FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR ) == sizeof( VkVideoEncodeAV1CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeAV1QualityLevelPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1QualityLevelPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionCreateInfoKHR ) == sizeof( VkVideoEncodeAV1SessionCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1SessionCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeAV1SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1PictureInfoKHR ) == sizeof( VkVideoEncodeAV1PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1DpbSlotInfoKHR ) == sizeof( VkVideoEncodeAV1DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1ProfileInfoKHR ) == sizeof( VkVideoEncodeAV1ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR ) == sizeof( VkVideoEncodeAV1QIndexKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1QIndexKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR ) == sizeof( VkVideoEncodeAV1FrameSizeKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1FrameSizeKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1GopRemainingFrameInfoKHR ) == sizeof( VkVideoEncodeAV1GopRemainingFrameInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1GopRemainingFrameInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR ) == sizeof( VkVideoEncodeAV1RateControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1RateControlInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR ) == sizeof( VkVideoEncodeAV1RateControlLayerInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1RateControlLayerInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoMaintenance1FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoMaintenance1FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR ) == sizeof( VkVideoInlineQueryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoInlineQueryInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_per_stage_descriptor_set === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerStageDescriptorSetFeaturesNV ) == + sizeof( VkPhysicalDevicePerStageDescriptorSetFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePerStageDescriptorSetFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_QCOM_image_processing2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM ) == sizeof( VkPhysicalDeviceImageProcessing2FeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageProcessing2FeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM ) == + sizeof( VkPhysicalDeviceImageProcessing2PropertiesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageProcessing2PropertiesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM ) == sizeof( VkSamplerBlockMatchWindowCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerBlockMatchWindowCreateInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_QCOM_filter_cubic_weights === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM ) == sizeof( VkPhysicalDeviceCubicWeightsFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCubicWeightsFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM ) == sizeof( VkSamplerCubicWeightsCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCubicWeightsCreateInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM ) == sizeof( VkBlitImageCubicWeightsInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BlitImageCubicWeightsInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_QCOM_ycbcr_degamma === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM ) == sizeof( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceYcbcrDegammaFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ) == + sizeof( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_QCOM_filter_cubic_clamp === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM ) == sizeof( VkPhysicalDeviceCubicClampFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCubicClampFeaturesQCOM is not nothrow_move_constructible!" ); + +//=== VK_EXT_attachment_feedback_loop_dynamic_state === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) +//=== VK_QNX_external_memory_screen_buffer === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX ) == sizeof( VkScreenBufferPropertiesQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ScreenBufferPropertiesQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX ) == sizeof( VkScreenBufferFormatPropertiesQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ScreenBufferFormatPropertiesQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX ) == sizeof( VkImportScreenBufferInfoQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportScreenBufferInfoQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatQNX ) == sizeof( VkExternalFormatQNX ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalFormatQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX ) == + sizeof( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +//=== VK_MSFT_layered_driver === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT ) == sizeof( VkPhysicalDeviceLayeredDriverPropertiesMSFT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredDriverPropertiesMSFT is not nothrow_move_constructible!" ); + +//=== VK_KHR_calibrated_timestamps === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR ) == sizeof( VkCalibratedTimestampInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CalibratedTimestampInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_maintenance6 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT ) == sizeof( VkSetDescriptorBufferOffsetsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SetDescriptorBufferOffsetsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT ) == + sizeof( VkBindDescriptorBufferEmbeddedSamplersInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindDescriptorBufferEmbeddedSamplersInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_descriptor_pool_overallocation === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV ) == + sizeof( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_QCOM_tile_memory_heap === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTileMemoryHeapFeaturesQCOM ) == sizeof( VkPhysicalDeviceTileMemoryHeapFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTileMemoryHeapFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTileMemoryHeapPropertiesQCOM ) == sizeof( VkPhysicalDeviceTileMemoryHeapPropertiesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTileMemoryHeapPropertiesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TileMemoryRequirementsQCOM ) == sizeof( VkTileMemoryRequirementsQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TileMemoryRequirementsQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TileMemoryBindInfoQCOM ) == sizeof( VkTileMemoryBindInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TileMemoryBindInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TileMemorySizeInfoQCOM ) == sizeof( VkTileMemorySizeInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TileMemorySizeInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_NV_display_stereo === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoCreateInfoNV ) == sizeof( VkDisplaySurfaceStereoCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplaySurfaceStereoCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeStereoPropertiesNV ) == sizeof( VkDisplayModeStereoPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeStereoPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_quantization_map === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapCapabilitiesKHR ) == sizeof( VkVideoEncodeQuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatQuantizationMapPropertiesKHR ) == sizeof( VkVideoFormatQuantizationMapPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatQuantizationMapPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapInfoKHR ) == sizeof( VkVideoEncodeQuantizationMapInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQuantizationMapInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapSessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQuantizationMapSessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR ) == + sizeof( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QuantizationMapCapabilitiesKHR ) == + sizeof( VkVideoEncodeH264QuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264QuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QuantizationMapCapabilitiesKHR ) == + sizeof( VkVideoEncodeH265QuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265QuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatH265QuantizationMapPropertiesKHR ) == + sizeof( VkVideoFormatH265QuantizationMapPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatH265QuantizationMapPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QuantizationMapCapabilitiesKHR ) == + sizeof( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1QuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatAV1QuantizationMapPropertiesKHR ) == sizeof( VkVideoFormatAV1QuantizationMapPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatAV1QuantizationMapPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_raw_access_chains === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRawAccessChainsFeaturesNV ) == sizeof( VkPhysicalDeviceRawAccessChainsFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRawAccessChainsFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_external_compute_queue === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalComputeQueueNV ) == sizeof( VkExternalComputeQueueNV ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "ExternalComputeQueueNV is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalComputeQueueNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalComputeQueueDeviceCreateInfoNV ) == sizeof( VkExternalComputeQueueDeviceCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalComputeQueueDeviceCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalComputeQueueCreateInfoNV ) == sizeof( VkExternalComputeQueueCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalComputeQueueCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalComputeQueueDataParamsNV ) == sizeof( VkExternalComputeQueueDataParamsNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalComputeQueueDataParamsNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalComputeQueuePropertiesNV ) == + sizeof( VkPhysicalDeviceExternalComputeQueuePropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalComputeQueuePropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_shader_relaxed_extended_instruction === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_command_buffer_inheritance === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCommandBufferInheritanceFeaturesNV ) == + sizeof( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCommandBufferInheritanceFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_maintenance7 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance7FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance7FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance7PropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance7PropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesListKHR ) == sizeof( VkPhysicalDeviceLayeredApiPropertiesListKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredApiPropertiesListKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR ) == sizeof( VkPhysicalDeviceLayeredApiPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredApiPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiVulkanPropertiesKHR ) == + sizeof( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredApiVulkanPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_shader_atomic_float16_vector === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_replicated_composites === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderReplicatedCompositesFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing_validation === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingValidationFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingValidationFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_cluster_acceleration_structure === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructureFeaturesNV ) == + sizeof( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterAccelerationStructureFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructurePropertiesNV ) == + sizeof( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterAccelerationStructurePropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV ) == + sizeof( VkClusterAccelerationStructureClustersBottomLevelInputNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureClustersBottomLevelInputNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV ) == + sizeof( VkClusterAccelerationStructureTriangleClusterInputNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureTriangleClusterInputNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV ) == + sizeof( VkClusterAccelerationStructureMoveObjectsInputNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureMoveObjectsInputNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV ) == sizeof( VkClusterAccelerationStructureOpInputNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureOpInputNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV ) == sizeof( VkClusterAccelerationStructureInputInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureInputInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV ) == sizeof( VkClusterAccelerationStructureCommandsInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureCommandsInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV ) == sizeof( VkStridedDeviceAddressNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "StridedDeviceAddressNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV ) == + sizeof( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInfoNV ) == + sizeof( VkClusterAccelerationStructureMoveObjectsInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureMoveObjectsInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildClustersBottomLevelInfoNV ) == + sizeof( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureBuildClustersBottomLevelInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterInfoNV ) == + sizeof( VkClusterAccelerationStructureBuildTriangleClusterInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureBuildTriangleClusterInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV ) == + sizeof( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInstantiateClusterInfoNV ) == + sizeof( VkClusterAccelerationStructureInstantiateClusterInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureInstantiateClusterInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineClusterAccelerationStructureCreateInfoNV ) == + sizeof( VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingPipelineClusterAccelerationStructureCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_partitioned_acceleration_structure === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructureFeaturesNV ) == + sizeof( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePartitionedAccelerationStructureFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructurePropertiesNV ) == + sizeof( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePartitionedAccelerationStructurePropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureFlagsNV ) == sizeof( VkPartitionedAccelerationStructureFlagsNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PartitionedAccelerationStructureFlagsNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureIndirectCommandNV ) == + sizeof( VkBuildPartitionedAccelerationStructureIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BuildPartitionedAccelerationStructureIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWriteInstanceDataNV ) == + sizeof( VkPartitionedAccelerationStructureWriteInstanceDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PartitionedAccelerationStructureWriteInstanceDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureUpdateInstanceDataNV ) == + sizeof( VkPartitionedAccelerationStructureUpdateInstanceDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PartitionedAccelerationStructureUpdateInstanceDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWritePartitionTranslationDataNV ) == + sizeof( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PartitionedAccelerationStructureWritePartitionTranslationDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetPartitionedAccelerationStructureNV ) == + sizeof( VkWriteDescriptorSetPartitionedAccelerationStructureNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSetPartitionedAccelerationStructureNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV ) == + sizeof( VkPartitionedAccelerationStructureInstancesInputNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PartitionedAccelerationStructureInstancesInputNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV ) == + sizeof( VkBuildPartitionedAccelerationStructureInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BuildPartitionedAccelerationStructureInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_device_generated_commands === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsMemoryRequirementsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT ) == sizeof( VkIndirectExecutionSetCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT ) == sizeof( VkIndirectExecutionSetInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT ) == sizeof( VkIndirectExecutionSetPipelineInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetPipelineInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT ) == sizeof( VkIndirectExecutionSetShaderInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetShaderInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT ) == sizeof( VkGeneratedCommandsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT ) == sizeof( VkWriteIndirectExecutionSetPipelineEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteIndirectExecutionSetPipelineEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT ) == sizeof( VkIndirectCommandsLayoutCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT ) == sizeof( VkIndirectCommandsLayoutTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCountIndirectCommandEXT ) == sizeof( VkDrawIndirectCountIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawIndirectCountIndirectCommandEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT ) == sizeof( VkIndirectCommandsVertexBufferTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsVertexBufferTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT ) == sizeof( VkBindVertexBufferIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindVertexBufferIndirectCommandEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT ) == sizeof( VkIndirectCommandsIndexBufferTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsIndexBufferTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandEXT ) == sizeof( VkBindIndexBufferIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindIndexBufferIndirectCommandEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT ) == sizeof( VkIndirectCommandsPushConstantTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsPushConstantTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT ) == sizeof( VkIndirectCommandsExecutionSetTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsExecutionSetTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT ) == sizeof( VkIndirectCommandsTokenDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsTokenDataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT ) == sizeof( VkIndirectCommandsLayoutEXT ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "IndirectCommandsLayoutEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT ) == sizeof( VkIndirectExecutionSetEXT ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "IndirectExecutionSetEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT ) == sizeof( VkIndirectExecutionSetShaderLayoutInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetShaderLayoutInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsPipelineInfoEXT ) == sizeof( VkGeneratedCommandsPipelineInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsPipelineInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsShaderInfoEXT ) == sizeof( VkGeneratedCommandsShaderInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsShaderInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT ) == sizeof( VkWriteIndirectExecutionSetShaderEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteIndirectExecutionSetShaderEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_maintenance8 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance8FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance8FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance8FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrierAccessFlags3KHR ) == sizeof( VkMemoryBarrierAccessFlags3KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryBarrierAccessFlags3KHR is not nothrow_move_constructible!" ); + +//=== VK_MESA_image_alignment_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA ) == + sizeof( VkPhysicalDeviceImageAlignmentControlFeaturesMESA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageAlignmentControlFeaturesMESA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlPropertiesMESA ) == + sizeof( VkPhysicalDeviceImageAlignmentControlPropertiesMESA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageAlignmentControlPropertiesMESA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageAlignmentControlCreateInfoMESA ) == sizeof( VkImageAlignmentControlCreateInfoMESA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageAlignmentControlCreateInfoMESA is not nothrow_move_constructible!" ); + +//=== VK_EXT_depth_clamp_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClampControlFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClampControlFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT ) == + sizeof( VkPipelineViewportDepthClampControlCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportDepthClampControlCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthClampRangeEXT ) == sizeof( VkDepthClampRangeEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DepthClampRangeEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_maintenance2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance2FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoMaintenance2FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoMaintenance2FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264InlineSessionParametersInfoKHR ) == + sizeof( VkVideoDecodeH264InlineSessionParametersInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264InlineSessionParametersInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265InlineSessionParametersInfoKHR ) == + sizeof( VkVideoDecodeH265InlineSessionParametersInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265InlineSessionParametersInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1InlineSessionParametersInfoKHR ) == + sizeof( VkVideoDecodeAV1InlineSessionParametersInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1InlineSessionParametersInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_HUAWEI_hdr_vivid === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceHdrVividFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHdrVividFeaturesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI ) == sizeof( VkHdrVividDynamicMetadataHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HdrVividDynamicMetadataHUAWEI is not nothrow_move_constructible!" ); + +//=== VK_NV_cooperative_matrix2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV ) == + sizeof( VkCooperativeMatrixFlexibleDimensionsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeMatrixFlexibleDimensionsPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrix2FeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrix2FeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrix2PropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrix2PropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_ARM_pipeline_opacity_micromap === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineOpacityMicromapFeaturesARM ) == + sizeof( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineOpacityMicromapFeaturesARM is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_METAL_EXT ) +//=== VK_EXT_external_memory_metal === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryMetalHandleInfoEXT ) == sizeof( VkImportMemoryMetalHandleInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryMetalHandleInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT ) == sizeof( VkMemoryMetalHandlePropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryMetalHandlePropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT ) == sizeof( VkMemoryGetMetalHandleInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetMetalHandleInfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +//=== VK_KHR_depth_clamp_zero_one === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesKHR ) == sizeof( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClampZeroOneFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_vertex_attribute_robustness === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeRobustnessFeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_NV_present_metering === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetPresentConfigNV ) == sizeof( VkSetPresentConfigNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SetPresentConfigNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentMeteringFeaturesNV ) == sizeof( VkPhysicalDevicePresentMeteringFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentMeteringFeaturesNV is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_EXT_fragment_density_map_offset === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingEndInfoEXT ) == sizeof( VkRenderingEndInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingEndInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapOffsetEndInfoEXT ) == + sizeof( VkRenderPassFragmentDensityMapOffsetEndInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassFragmentDensityMapOffsetEndInfoEXT is not nothrow_move_constructible!" ); + +#endif diff --git a/include/vulkan/vulkan_structs.hpp b/include/vulkan/vulkan_structs.hpp new file mode 100644 index 00000000..6181aad2 --- /dev/null +++ b/include/vulkan/vulkan_structs.hpp @@ -0,0 +1,157533 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_STRUCTS_HPP +#define VULKAN_STRUCTS_HPP + +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + +#include // strcmp + +namespace VULKAN_HPP_NAMESPACE +{ + + //=============== + //=== STRUCTS === + //=============== + + // wrapper struct for struct VkAabbPositionsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAabbPositionsKHR.html + struct AabbPositionsKHR + { + using NativeType = VkAabbPositionsKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AabbPositionsKHR( float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT + : minX{ minX_ } + , minY{ minY_ } + , minZ{ minZ_ } + , maxX{ maxX_ } + , maxY{ maxY_ } + , maxZ{ maxZ_ } + { + } + + VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AabbPositionsKHR( *reinterpret_cast( &rhs ) ) {} + + AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT + { + minX = minX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT + { + minY = minY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT + { + minZ = minZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT + { + maxX = maxX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT + { + maxY = maxY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT + { + maxZ = maxZ_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAabbPositionsKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAabbPositionsKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( minX, minY, minZ, maxX, maxY, maxZ ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AabbPositionsKHR const & ) const = default; +#else + bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) && ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ ); +# endif + } + + bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float minX = {}; + float minY = {}; + float minZ = {}; + float maxX = {}; + float maxY = {}; + float maxZ = {}; + }; + + using AabbPositionsNV = AabbPositionsKHR; + + union DeviceOrHostAddressConstKHR + { + using NativeType = VkDeviceOrHostAddressConstKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceOrHostAddressConstKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressConstKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + const void * hostAddress; +#else + VkDeviceAddress deviceAddress; + const void * hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkAccelerationStructureGeometryTrianglesDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryTrianglesDataKHR.html + struct AccelerationStructureGeometryTrianglesDataKHR + { + using NativeType = VkAccelerationStructureGeometryTrianglesDataKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryTrianglesDataKHR( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + uint32_t maxVertex_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexFormat{ vertexFormat_ } + , vertexData{ vertexData_ } + , vertexStride{ vertexStride_ } + , maxVertex{ maxVertex_ } + , indexType{ indexType_ } + , indexData{ indexData_ } + , transformData{ transformData_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT + { + maxVertex = maxVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryTrianglesDataKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryTrianglesDataKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + uint32_t maxVertex = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryTrianglesDataKHR; + }; + + // wrapper struct for struct VkAccelerationStructureGeometryAabbsDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryAabbsDataKHR.html + struct AccelerationStructureGeometryAabbsDataKHR + { + using NativeType = VkAccelerationStructureGeometryAabbsDataKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , data{ data_ } + , stride{ stride_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryAabbsDataKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryAabbsDataKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, data, stride ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryAabbsDataKHR; + }; + + // wrapper struct for struct VkAccelerationStructureGeometryInstancesDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryInstancesDataKHR.html + struct AccelerationStructureGeometryInstancesDataKHR + { + using NativeType = VkAccelerationStructureGeometryInstancesDataKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , arrayOfPointers{ arrayOfPointers_ } + , data{ data_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & + setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + { + arrayOfPointers = arrayOfPointers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryInstancesDataKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryInstancesDataKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, arrayOfPointers, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryInstancesDataKHR; + }; + + union AccelerationStructureGeometryDataKHR + { + using NativeType = VkAccelerationStructureGeometryDataKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) + : triangles( triangles_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ ) : aabbs( aabbs_ ) {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) + : instances( instances_ ) + { + } +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & + setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & + setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & + setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT + { + instances = instances_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureGeometryDataKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryDataKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances; +#else + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkAccelerationStructureGeometryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryKHR.html + struct AccelerationStructureGeometryKHR + { + using NativeType = VkAccelerationStructureGeometryKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryKHR( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , geometryType{ geometryType_ } + , geometry{ geometry_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & + setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, geometryType, geometry, flags ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryKHR; + }; + + union DeviceOrHostAddressKHR + { + using NativeType = VkDeviceOrHostAddressKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceOrHostAddressKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + void * hostAddress; +#else + VkDeviceAddress deviceAddress; + void * hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkAccelerationStructureBuildGeometryInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildGeometryInfoKHR.html + struct AccelerationStructureBuildGeometryInfoKHR + { + using NativeType = VkAccelerationStructureBuildGeometryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , flags{ flags_ } + , mode{ mode_ } + , srcAccelerationStructure{ srcAccelerationStructure_ } + , dstAccelerationStructure{ dstAccelerationStructure_ } + , geometryCount{ geometryCount_ } + , pGeometries{ pGeometries_ } + , ppGeometries{ ppGeometries_ } + , scratchData{ scratchData_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildGeometryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , srcAccelerationStructure( srcAccelerationStructure_ ) + , dstAccelerationStructure( dstAccelerationStructure_ ) + , geometryCount( static_cast( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) ) + , pGeometries( geometries_.data() ) + , ppGeometries( pGeometries_.data() ) + , scratchData( scratchData_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1 ); +# else + if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + srcAccelerationStructure = srcAccelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + dstAccelerationStructure = dstAccelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + pGeometries = pGeometries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR & setGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT + { + ppGeometries = ppGeometries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR & + setPGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pGeometries_ ) + VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( pGeometries_.size() ); + ppGeometries = pGeometries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildGeometryInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildGeometryInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, type, flags, mode, srcAccelerationStructure, dstAccelerationStructure, geometryCount, pGeometries, ppGeometries, scratchData ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureBuildGeometryInfoKHR; + }; + + // wrapper struct for struct VkAccelerationStructureBuildRangeInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildRangeInfoKHR.html + struct AccelerationStructureBuildRangeInfoKHR + { + using NativeType = VkAccelerationStructureBuildRangeInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( uint32_t primitiveCount_ = {}, + uint32_t primitiveOffset_ = {}, + uint32_t firstVertex_ = {}, + uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : primitiveCount{ primitiveCount_ } + , primitiveOffset{ primitiveOffset_ } + , firstVertex{ firstVertex_ } + , transformOffset{ transformOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureBuildRangeInfoKHR & operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureBuildRangeInfoKHR & operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT + { + primitiveCount = primitiveCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT + { + primitiveOffset = primitiveOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildRangeInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildRangeInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( primitiveCount, primitiveOffset, firstVertex, transformOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) && ( firstVertex == rhs.firstVertex ) && + ( transformOffset == rhs.transformOffset ); +# endif + } + + bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t primitiveCount = {}; + uint32_t primitiveOffset = {}; + uint32_t firstVertex = {}; + uint32_t transformOffset = {}; + }; + + // wrapper struct for struct VkAccelerationStructureBuildSizesInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildSizesInfoKHR.html + struct AccelerationStructureBuildSizesInfoKHR + { + using NativeType = VkAccelerationStructureBuildSizesInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildSizesInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructureSize{ accelerationStructureSize_ } + , updateScratchSize{ updateScratchSize_ } + , buildScratchSize{ buildScratchSize_ } + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureBuildSizesInfoKHR & operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureBuildSizesInfoKHR & operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildSizesInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildSizesInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureSize, updateScratchSize, buildScratchSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureSize == rhs.accelerationStructureSize ) && + ( updateScratchSize == rhs.updateScratchSize ) && ( buildScratchSize == rhs.buildScratchSize ); +# endif + } + + bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureBuildSizesInfoKHR; + }; + + // wrapper struct for struct VkAccelerationStructureCaptureDescriptorDataInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCaptureDescriptorDataInfoEXT.html + struct AccelerationStructureCaptureDescriptorDataInfoEXT + { + using NativeType = VkAccelerationStructureCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } + , accelerationStructureNV{ accelerationStructureNV_ } + { + } + + VULKAN_HPP_CONSTEXPR + AccelerationStructureCaptureDescriptorDataInfoEXT( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCaptureDescriptorDataInfoEXT( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureCaptureDescriptorDataInfoEXT & + operator=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureCaptureDescriptorDataInfoEXT & operator=( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & + setAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureNV = accelerationStructureNV_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructure, accelerationStructureNV ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && + ( accelerationStructureNV == rhs.accelerationStructureNV ); +# endif + } + + bool operator!=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureCaptureDescriptorDataInfoEXT; + }; + + // wrapper struct for struct VkAccelerationStructureCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCreateInfoKHR.html + struct AccelerationStructureCreateInfoKHR + { + using NativeType = VkAccelerationStructureCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , createFlags{ createFlags_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } + , type{ type_ } + , deviceAddress{ deviceAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & + setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT + { + createFlags = createFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && + ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress ); +# endif + } + + bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureCreateInfoKHR; + }; + + // wrapper struct for struct VkGeometryTrianglesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryTrianglesNV.html + struct GeometryTrianglesNV + { + using NativeType = VkGeometryTrianglesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, + uint32_t vertexCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, + uint32_t indexCount_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexData{ vertexData_ } + , vertexOffset{ vertexOffset_ } + , vertexCount{ vertexCount_ } + , vertexStride{ vertexStride_ } + , vertexFormat{ vertexFormat_ } + , indexData{ indexData_ } + , indexOffset{ indexOffset_ } + , indexCount{ indexCount_ } + , indexType{ indexType_ } + , transformData{ transformData_ } + , transformOffset{ transformOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryTrianglesNV( *reinterpret_cast( &rhs ) ) + { + } + + GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT + { + indexOffset = indexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryTrianglesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGeometryTrianglesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + vertexData, + vertexOffset, + vertexCount, + vertexStride, + vertexFormat, + indexData, + indexOffset, + indexCount, + indexType, + transformData, + transformOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryTrianglesNV const & ) const = default; +#else + bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) && ( vertexOffset == rhs.vertexOffset ) && + ( vertexCount == rhs.vertexCount ) && ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) && + ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) && ( indexType == rhs.indexType ) && + ( transformData == rhs.transformData ) && ( transformOffset == rhs.transformOffset ); +# endif + } + + bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {}; + uint32_t vertexCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {}; + uint32_t indexCount = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::Buffer transformData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; + }; + + template <> + struct CppType + { + using Type = GeometryTrianglesNV; + }; + + // wrapper struct for struct VkGeometryAABBNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryAABBNV.html + struct GeometryAABBNV + { + using NativeType = VkGeometryAABBNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, + uint32_t numAABBs_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , aabbData{ aabbData_ } + , numAABBs{ numAABBs_ } + , stride{ stride_ } + , offset{ offset_ } + { + } + + VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryAABBNV( *reinterpret_cast( &rhs ) ) {} + + GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT + { + aabbData = aabbData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT + { + numAABBs = numAABBs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryAABBNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGeometryAABBNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, aabbData, numAABBs, stride, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryAABBNV const & ) const = default; +#else + bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) && ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) && + ( offset == rhs.offset ); +# endif + } + + bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer aabbData = {}; + uint32_t numAABBs = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + + template <> + struct CppType + { + using Type = GeometryAABBNV; + }; + + // wrapper struct for struct VkGeometryDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryDataNV.html + struct GeometryDataNV + { + using NativeType = VkGeometryDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT + : triangles{ triangles_ } + , aabbs{ aabbs_ } + { + } + + VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryDataNV( *reinterpret_cast( &rhs ) ) {} + + GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryDataNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGeometryDataNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( triangles, aabbs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryDataNV const & ) const = default; +#else + bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs ); +# endif + } + + bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {}; + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; + }; + + // wrapper struct for struct VkGeometryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryNV.html + struct GeometryNV + { + using NativeType = VkGeometryNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , geometryType{ geometryType_ } + , geometry{ geometry_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryNV( *reinterpret_cast( &rhs ) ) {} + + GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGeometryNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, geometryType, geometry, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryNV const & ) const = default; +#else + bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && ( geometry == rhs.geometry ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + }; + + template <> + struct CppType + { + using Type = GeometryNV; + }; + + // wrapper struct for struct VkAccelerationStructureInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureInfoNV.html + struct AccelerationStructureInfoNV + { + using NativeType = VkAccelerationStructureInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, + uint32_t instanceCount_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , flags{ flags_ } + , instanceCount{ instanceCount_ } + , geometryCount{ geometryCount_ } + , pGeometries{ pGeometries_ } + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, + uint32_t instanceCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , type( type_ ) + , flags( flags_ ) + , instanceCount( instanceCount_ ) + , geometryCount( static_cast( geometries_.size() ) ) + , pGeometries( geometries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + pGeometries = pGeometries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureInfoNV & + setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, flags, instanceCount, geometryCount, pGeometries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) && ( instanceCount == rhs.instanceCount ) && + ( geometryCount == rhs.geometryCount ) && ( pGeometries == rhs.pGeometries ); +# endif + } + + bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; + uint32_t instanceCount = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureInfoNV; + }; + + // wrapper struct for struct VkAccelerationStructureCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCreateInfoNV.html + struct AccelerationStructureCreateInfoNV + { + using NativeType = VkAccelerationStructureCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , compactedSize{ compactedSize_ } + , info{ info_ } + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + { + compactedSize = compactedSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT + { + info = info_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compactedSize, info ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && ( info == rhs.info ); +# endif + } + + bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureCreateInfoNV; + }; + + // wrapper struct for struct VkAccelerationStructureDeviceAddressInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureDeviceAddressInfoKHR.html + struct AccelerationStructureDeviceAddressInfoKHR + { + using NativeType = VkAccelerationStructureDeviceAddressInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureDeviceAddressInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureDeviceAddressInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ); +# endif + } + + bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureDeviceAddressInfoKHR; + }; + + // wrapper struct for struct VkAccelerationStructureGeometryLinearSweptSpheresDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryLinearSweptSpheresDataNV.html + struct AccelerationStructureGeometryLinearSweptSpheresDataNV + { + using NativeType = VkAccelerationStructureGeometryLinearSweptSpheresDataNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV( + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format radiusFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, + VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV indexingMode_ = VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV::eList, + VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV endCapsMode_ = VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV::eNone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexFormat{ vertexFormat_ } + , vertexData{ vertexData_ } + , vertexStride{ vertexStride_ } + , radiusFormat{ radiusFormat_ } + , radiusData{ radiusData_ } + , radiusStride{ radiusStride_ } + , indexType{ indexType_ } + , indexData{ indexData_ } + , indexStride{ indexStride_ } + , indexingMode{ indexingMode_ } + , endCapsMode{ endCapsMode_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryLinearSweptSpheresDataNV( AccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryLinearSweptSpheresDataNV( VkAccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryLinearSweptSpheresDataNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryLinearSweptSpheresDataNV & + operator=( AccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureGeometryLinearSweptSpheresDataNV & operator=( VkAccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setRadiusFormat( VULKAN_HPP_NAMESPACE::Format radiusFormat_ ) VULKAN_HPP_NOEXCEPT + { + radiusFormat = radiusFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setRadiusData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & radiusData_ ) VULKAN_HPP_NOEXCEPT + { + radiusData = radiusData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setRadiusStride( VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ ) VULKAN_HPP_NOEXCEPT + { + radiusStride = radiusStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT + { + indexStride = indexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setIndexingMode( VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV indexingMode_ ) VULKAN_HPP_NOEXCEPT + { + indexingMode = indexingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setEndCapsMode( VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV endCapsMode_ ) VULKAN_HPP_NOEXCEPT + { + endCapsMode = endCapsMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + vertexFormat, + vertexData, + vertexStride, + radiusFormat, + radiusData, + radiusStride, + indexType, + indexData, + indexStride, + indexingMode, + endCapsMode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format radiusFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize radiusStride = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {}; + VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV indexingMode = VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV::eList; + VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV endCapsMode = VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV::eNone; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryLinearSweptSpheresDataNV; + }; + + // wrapper struct for struct VkAccelerationStructureGeometryMotionTrianglesDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryMotionTrianglesDataNV.html + struct AccelerationStructureGeometryMotionTrianglesDataNV + { + using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexData{ vertexData_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryMotionTrianglesDataNV( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryMotionTrianglesDataNV( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryMotionTrianglesDataNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryMotionTrianglesDataNV & + operator=( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureGeometryMotionTrianglesDataNV & operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryMotionTrianglesDataNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryMotionTrianglesDataNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexData ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryMotionTrianglesDataNV; + }; + + // wrapper struct for struct VkAccelerationStructureGeometrySpheresDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometrySpheresDataNV.html + struct AccelerationStructureGeometrySpheresDataNV + { + using NativeType = VkAccelerationStructureGeometrySpheresDataNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometrySpheresDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format radiusFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexFormat{ vertexFormat_ } + , vertexData{ vertexData_ } + , vertexStride{ vertexStride_ } + , radiusFormat{ radiusFormat_ } + , radiusData{ radiusData_ } + , radiusStride{ radiusStride_ } + , indexType{ indexType_ } + , indexData{ indexData_ } + , indexStride{ indexStride_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV( AccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometrySpheresDataNV( VkAccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometrySpheresDataNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometrySpheresDataNV & operator=( AccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureGeometrySpheresDataNV & operator=( VkAccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusFormat( VULKAN_HPP_NAMESPACE::Format radiusFormat_ ) VULKAN_HPP_NOEXCEPT + { + radiusFormat = radiusFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & + setRadiusData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & radiusData_ ) VULKAN_HPP_NOEXCEPT + { + radiusData = radiusData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusStride( VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ ) VULKAN_HPP_NOEXCEPT + { + radiusStride = radiusStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT + { + indexStride = indexStride_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureGeometrySpheresDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometrySpheresDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometrySpheresDataNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometrySpheresDataNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, radiusFormat, radiusData, radiusStride, indexType, indexData, indexStride ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometrySpheresDataNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format radiusFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize radiusStride = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometrySpheresDataNV; + }; + + // wrapper struct for struct VkTransformMatrixKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTransformMatrixKHR.html + struct TransformMatrixKHR + { + using NativeType = VkTransformMatrixKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix{ matrix_ } {} + + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT : TransformMatrixKHR( *reinterpret_cast( &rhs ) ) {} + + TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & setMatrix( std::array, 3> matrix_ ) VULKAN_HPP_NOEXCEPT + { + matrix = matrix_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTransformMatrixKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTransformMatrixKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( matrix ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TransformMatrixKHR const & ) const = default; +#else + bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( matrix == rhs.matrix ); +# endif + } + + bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper2D matrix = {}; + }; + + using TransformMatrixNV = TransformMatrixKHR; + + // wrapper struct for struct VkAccelerationStructureInstanceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureInstanceKHR.html + struct AccelerationStructureInstanceKHR + { + using NativeType = VkAccelerationStructureInstanceKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transform{ transform_ } + , instanceCustomIndex{ instanceCustomIndex_ } + , mask{ mask_ } + , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } + , flags{ flags_ } + , accelerationStructureReference{ accelerationStructureReference_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureInstanceKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast( &flags_ ); + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInstanceKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureInstanceKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transform, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default; +#else + bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) && + ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) && + ( accelerationStructureReference == rhs.accelerationStructureReference ); +# endif + } + + bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; + + // wrapper struct for struct VkAccelerationStructureMatrixMotionInstanceNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMatrixMotionInstanceNV.html + struct AccelerationStructureMatrixMotionInstanceNV + { + using NativeType = VkAccelerationStructureMatrixMotionInstanceNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0_ = {}, + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transformT0{ transformT0_ } + , transformT1{ transformT1_ } + , instanceCustomIndex{ instanceCustomIndex_ } + , mask{ mask_ } + , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } + , flags{ flags_ } + , accelerationStructureReference{ accelerationStructureReference_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureMatrixMotionInstanceNV( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMatrixMotionInstanceNV( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureMatrixMotionInstanceNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureMatrixMotionInstanceNV & operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureMatrixMotionInstanceNV & operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setTransformT0( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT + { + transformT0 = transformT0_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setTransformT1( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT + { + transformT1 = transformT1_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureMatrixMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast( &flags_ ); + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMatrixMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureMatrixMotionInstanceNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMatrixMotionInstanceNV const & ) const = default; +#else + bool operator==( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && + ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) && + ( accelerationStructureReference == rhs.accelerationStructureReference ); +# endif + } + + bool operator!=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0 = {}; + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1 = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + + // wrapper struct for struct VkAccelerationStructureMemoryRequirementsInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMemoryRequirementsInfoNV.html + struct AccelerationStructureMemoryRequirementsInfoNV + { + using NativeType = VkAccelerationStructureMemoryRequirementsInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , accelerationStructure{ accelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR + AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMemoryRequirementsInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureMemoryRequirementsInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, accelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( accelerationStructure == rhs.accelerationStructure ); +# endif + } + + bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureMemoryRequirementsInfoNV; + }; + + // wrapper struct for struct VkAccelerationStructureMotionInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInfoNV.html + struct AccelerationStructureMotionInfoNV + { + using NativeType = VkAccelerationStructureMotionInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMotionInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( uint32_t maxInstances_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxInstances{ maxInstances_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMotionInfoNV( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureMotionInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureMotionInfoNV & operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureMotionInfoNV & operator=( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setMaxInstances( uint32_t maxInstances_ ) VULKAN_HPP_NOEXCEPT + { + maxInstances = maxInstances_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxInstances, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMotionInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInstances == rhs.maxInstances ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMotionInfoNV; + const void * pNext = {}; + uint32_t maxInstances = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureMotionInfoNV; + }; + + // wrapper struct for struct VkSRTDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSRTDataNV.html + struct SRTDataNV + { + using NativeType = VkSRTDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SRTDataNV( float sx_ = {}, + float a_ = {}, + float b_ = {}, + float pvx_ = {}, + float sy_ = {}, + float c_ = {}, + float pvy_ = {}, + float sz_ = {}, + float pvz_ = {}, + float qx_ = {}, + float qy_ = {}, + float qz_ = {}, + float qw_ = {}, + float tx_ = {}, + float ty_ = {}, + float tz_ = {} ) VULKAN_HPP_NOEXCEPT + : sx{ sx_ } + , a{ a_ } + , b{ b_ } + , pvx{ pvx_ } + , sy{ sy_ } + , c{ c_ } + , pvy{ pvy_ } + , sz{ sz_ } + , pvz{ pvz_ } + , qx{ qx_ } + , qy{ qy_ } + , qz{ qz_ } + , qw{ qw_ } + , tx{ tx_ } + , ty{ ty_ } + , tz{ tz_ } + { + } + + VULKAN_HPP_CONSTEXPR SRTDataNV( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : SRTDataNV( *reinterpret_cast( &rhs ) ) {} + + SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SRTDataNV & operator=( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSx( float sx_ ) VULKAN_HPP_NOEXCEPT + { + sx = sx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setA( float a_ ) VULKAN_HPP_NOEXCEPT + { + a = a_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setB( float b_ ) VULKAN_HPP_NOEXCEPT + { + b = b_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvx( float pvx_ ) VULKAN_HPP_NOEXCEPT + { + pvx = pvx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSy( float sy_ ) VULKAN_HPP_NOEXCEPT + { + sy = sy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setC( float c_ ) VULKAN_HPP_NOEXCEPT + { + c = c_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvy( float pvy_ ) VULKAN_HPP_NOEXCEPT + { + pvy = pvy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSz( float sz_ ) VULKAN_HPP_NOEXCEPT + { + sz = sz_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvz( float pvz_ ) VULKAN_HPP_NOEXCEPT + { + pvz = pvz_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQx( float qx_ ) VULKAN_HPP_NOEXCEPT + { + qx = qx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQy( float qy_ ) VULKAN_HPP_NOEXCEPT + { + qy = qy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQz( float qz_ ) VULKAN_HPP_NOEXCEPT + { + qz = qz_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQw( float qw_ ) VULKAN_HPP_NOEXCEPT + { + qw = qw_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTx( float tx_ ) VULKAN_HPP_NOEXCEPT + { + tx = tx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTy( float ty_ ) VULKAN_HPP_NOEXCEPT + { + ty = ty_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTz( float tz_ ) VULKAN_HPP_NOEXCEPT + { + tz = tz_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSRTDataNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSRTDataNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sx, a, b, pvx, sy, c, pvy, sz, pvz, qx, qy, qz, qw, tx, ty, tz ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SRTDataNV const & ) const = default; +#else + bool operator==( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sx == rhs.sx ) && ( a == rhs.a ) && ( b == rhs.b ) && ( pvx == rhs.pvx ) && ( sy == rhs.sy ) && ( c == rhs.c ) && ( pvy == rhs.pvy ) && + ( sz == rhs.sz ) && ( pvz == rhs.pvz ) && ( qx == rhs.qx ) && ( qy == rhs.qy ) && ( qz == rhs.qz ) && ( qw == rhs.qw ) && ( tx == rhs.tx ) && + ( ty == rhs.ty ) && ( tz == rhs.tz ); +# endif + } + + bool operator!=( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float sx = {}; + float a = {}; + float b = {}; + float pvx = {}; + float sy = {}; + float c = {}; + float pvy = {}; + float sz = {}; + float pvz = {}; + float qx = {}; + float qy = {}; + float qz = {}; + float qw = {}; + float tx = {}; + float ty = {}; + float tz = {}; + }; + + // wrapper struct for struct VkAccelerationStructureSRTMotionInstanceNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureSRTMotionInstanceNV.html + struct AccelerationStructureSRTMotionInstanceNV + { + using NativeType = VkAccelerationStructureSRTMotionInstanceNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( VULKAN_HPP_NAMESPACE::SRTDataNV transformT0_ = {}, + VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transformT0{ transformT0_ } + , transformT1{ transformT1_ } + , instanceCustomIndex{ instanceCustomIndex_ } + , mask{ mask_ } + , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } + , flags{ flags_ } + , accelerationStructureReference{ accelerationStructureReference_ } + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureSRTMotionInstanceNV( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureSRTMotionInstanceNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureSRTMotionInstanceNV & operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureSRTMotionInstanceNV & operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setTransformT0( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT0_ ) VULKAN_HPP_NOEXCEPT + { + transformT0 = transformT0_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setTransformT1( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT1_ ) VULKAN_HPP_NOEXCEPT + { + transformT1 = transformT1_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureSRTMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast( &flags_ ); + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureSRTMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureSRTMotionInstanceNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureSRTMotionInstanceNV const & ) const = default; +#else + bool operator==( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && + ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) && + ( accelerationStructureReference == rhs.accelerationStructureReference ); +# endif + } + + bool operator!=( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SRTDataNV transformT0 = {}; + VULKAN_HPP_NAMESPACE::SRTDataNV transformT1 = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + + union AccelerationStructureMotionInstanceDataNV + { + using NativeType = VkAccelerationStructureMotionInstanceDataNV; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance_ = {} ) + : staticInstance( staticInstance_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_ ) + : matrixMotionInstance( matrixMotionInstance_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance_ ) + : srtMotionInstance( srtMotionInstance_ ) + { + } +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & + setStaticInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT + { + staticInstance = staticInstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & + setMatrixMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ ) VULKAN_HPP_NOEXCEPT + { + matrixMotionInstance = matrixMotionInstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & + setSrtMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) VULKAN_HPP_NOEXCEPT + { + srtMotionInstance = srtMotionInstance_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureMotionInstanceDataNV const &() const + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInstanceDataNV &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance; + VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance; +#else + VkAccelerationStructureInstanceKHR staticInstance; + VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkAccelerationStructureMotionInstanceNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInstanceNV.html + struct AccelerationStructureMotionInstanceNV + { + using NativeType = VkAccelerationStructureMotionInstanceNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic, + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data_ = {} ) VULKAN_HPP_NOEXCEPT + : type{ type_ } + , flags{ flags_ } + , data{ data_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMotionInstanceNV( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureMotionInstanceNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureMotionInstanceNV & operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureMotionInstanceNV & operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & + setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & + setData( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInstanceNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, flags, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic; + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data = {}; + }; + + // wrapper struct for struct VkMicromapUsageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapUsageEXT.html + struct MicromapUsageEXT + { + using NativeType = VkMicromapUsageEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapUsageEXT( uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {} ) VULKAN_HPP_NOEXCEPT + : count{ count_ } + , subdivisionLevel{ subdivisionLevel_ } + , format{ format_ } + { + } + + VULKAN_HPP_CONSTEXPR MicromapUsageEXT( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapUsageEXT( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapUsageEXT( *reinterpret_cast( &rhs ) ) {} + + MicromapUsageEXT & operator=( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapUsageEXT & operator=( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT + { + count = count_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setSubdivisionLevel( uint32_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT + { + subdivisionLevel = subdivisionLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setFormat( uint32_t format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapUsageEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapUsageEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapUsageEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMicromapUsageEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( count, subdivisionLevel, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapUsageEXT const & ) const = default; +#else + bool operator==( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( count == rhs.count ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format ); +# endif + } + + bool operator!=( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t count = {}; + uint32_t subdivisionLevel = {}; + uint32_t format = {}; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkAccelerationStructureTrianglesDisplacementMicromapNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureTrianglesDisplacementMicromapNV.html + struct AccelerationStructureTrianglesDisplacementMicromapNV + { + using NativeType = VkAccelerationStructureTrianglesDisplacementMicromapNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV( + VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, + uint32_t baseTriangle_ = {}, + uint32_t usageCountsCount_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displacementBiasAndScaleFormat{ displacementBiasAndScaleFormat_ } + , displacementVectorFormat{ displacementVectorFormat_ } + , displacementBiasAndScaleBuffer{ displacementBiasAndScaleBuffer_ } + , displacementBiasAndScaleStride{ displacementBiasAndScaleStride_ } + , displacementVectorBuffer{ displacementVectorBuffer_ } + , displacementVectorStride{ displacementVectorStride_ } + , displacedMicromapPrimitiveFlags{ displacedMicromapPrimitiveFlags_ } + , displacedMicromapPrimitiveFlagsStride{ displacedMicromapPrimitiveFlagsStride_ } + , indexType{ indexType_ } + , indexBuffer{ indexBuffer_ } + , indexStride{ indexStride_ } + , baseTriangle{ baseTriangle_ } + , usageCountsCount{ usageCountsCount_ } + , pUsageCounts{ pUsageCounts_ } + , ppUsageCounts{ ppUsageCounts_ } + , micromap{ micromap_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureTrianglesDisplacementMicromapNV( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureTrianglesDisplacementMicromapNV( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureTrianglesDisplacementMicromapNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesDisplacementMicromapNV( + VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_, + VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags_, + VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_, + VULKAN_HPP_NAMESPACE::IndexType indexType_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_, + uint32_t baseTriangle_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , displacementBiasAndScaleFormat( displacementBiasAndScaleFormat_ ) + , displacementVectorFormat( displacementVectorFormat_ ) + , displacementBiasAndScaleBuffer( displacementBiasAndScaleBuffer_ ) + , displacementBiasAndScaleStride( displacementBiasAndScaleStride_ ) + , displacementVectorBuffer( displacementVectorBuffer_ ) + , displacementVectorStride( displacementVectorStride_ ) + , displacedMicromapPrimitiveFlags( displacedMicromapPrimitiveFlags_ ) + , displacedMicromapPrimitiveFlagsStride( displacedMicromapPrimitiveFlagsStride_ ) + , indexType( indexType_ ) + , indexBuffer( indexBuffer_ ) + , indexStride( indexStride_ ) + , baseTriangle( baseTriangle_ ) + , usageCountsCount( static_cast( usageCounts_.size() ) ) + , pUsageCounts( usageCounts_.data() ) + , ppUsageCounts( pUsageCounts_.data() ) + , micromap( micromap_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( usageCounts_.empty() || pUsageCounts_.empty() || ( usageCounts_.size() == pUsageCounts_.size() ) ); +# else + if ( !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureTrianglesDisplacementMicromapNV::AccelerationStructureTrianglesDisplacementMicromapNV: !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureTrianglesDisplacementMicromapNV & + operator=( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureTrianglesDisplacementMicromapNV & operator=( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementBiasAndScaleFormat( VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_ ) VULKAN_HPP_NOEXCEPT + { + displacementBiasAndScaleFormat = displacementBiasAndScaleFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementVectorFormat( VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_ ) VULKAN_HPP_NOEXCEPT + { + displacementVectorFormat = displacementVectorFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementBiasAndScaleBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacementBiasAndScaleBuffer_ ) VULKAN_HPP_NOEXCEPT + { + displacementBiasAndScaleBuffer = displacementBiasAndScaleBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementBiasAndScaleStride( VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_ ) VULKAN_HPP_NOEXCEPT + { + displacementBiasAndScaleStride = displacementBiasAndScaleStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementVectorBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacementVectorBuffer_ ) VULKAN_HPP_NOEXCEPT + { + displacementVectorBuffer = displacementVectorBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementVectorStride( VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_ ) VULKAN_HPP_NOEXCEPT + { + displacementVectorStride = displacementVectorStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacedMicromapPrimitiveFlags( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacedMicromapPrimitiveFlags_ ) VULKAN_HPP_NOEXCEPT + { + displacedMicromapPrimitiveFlags = displacedMicromapPrimitiveFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacedMicromapPrimitiveFlagsStride( VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_ ) VULKAN_HPP_NOEXCEPT + { + displacedMicromapPrimitiveFlagsStride = displacedMicromapPrimitiveFlagsStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + indexBuffer = indexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT + { + indexStride = indexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT + { + baseTriangle = baseTriangle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = usageCountsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + pUsageCounts = pUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesDisplacementMicromapNV & + setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( usageCounts_.size() ); + pUsageCounts = usageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + ppUsageCounts = ppUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesDisplacementMicromapNV & setPUsageCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( pUsageCounts_.size() ); + ppUsageCounts = pUsageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT + { + micromap = micromap_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureTrianglesDisplacementMicromapNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureTrianglesDisplacementMicromapNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureTrianglesDisplacementMicromapNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureTrianglesDisplacementMicromapNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + displacementBiasAndScaleFormat, + displacementVectorFormat, + displacementBiasAndScaleBuffer, + displacementBiasAndScaleStride, + displacementVectorBuffer, + displacementVectorStride, + displacedMicromapPrimitiveFlags, + displacedMicromapPrimitiveFlagsStride, + indexType, + indexBuffer, + indexStride, + baseTriangle, + usageCountsCount, + pUsageCounts, + ppUsageCounts, + micromap ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format displacementVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {}; + uint32_t baseTriangle = {}; + uint32_t usageCountsCount = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureTrianglesDisplacementMicromapNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkAccelerationStructureTrianglesOpacityMicromapEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureTrianglesOpacityMicromapEXT.html + struct AccelerationStructureTrianglesOpacityMicromapEXT + { + using NativeType = VkAccelerationStructureTrianglesOpacityMicromapEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureTrianglesOpacityMicromapEXT( VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, + uint32_t baseTriangle_ = {}, + uint32_t usageCountsCount_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , indexType{ indexType_ } + , indexBuffer{ indexBuffer_ } + , indexStride{ indexStride_ } + , baseTriangle{ baseTriangle_ } + , usageCountsCount{ usageCountsCount_ } + , pUsageCounts{ pUsageCounts_ } + , ppUsageCounts{ ppUsageCounts_ } + , micromap{ micromap_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureTrianglesOpacityMicromapEXT( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureTrianglesOpacityMicromapEXT( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureTrianglesOpacityMicromapEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesOpacityMicromapEXT( + VULKAN_HPP_NAMESPACE::IndexType indexType_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_, + uint32_t baseTriangle_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , indexType( indexType_ ) + , indexBuffer( indexBuffer_ ) + , indexStride( indexStride_ ) + , baseTriangle( baseTriangle_ ) + , usageCountsCount( static_cast( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ) + , pUsageCounts( usageCounts_.data() ) + , ppUsageCounts( pUsageCounts_.data() ) + , micromap( micromap_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 ); +# else + if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureTrianglesOpacityMicromapEXT::AccelerationStructureTrianglesOpacityMicromapEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureTrianglesOpacityMicromapEXT & operator=( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureTrianglesOpacityMicromapEXT & operator=( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + indexBuffer = indexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT + { + indexStride = indexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT + { + baseTriangle = baseTriangle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = usageCountsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + pUsageCounts = pUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesOpacityMicromapEXT & + setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( usageCounts_.size() ); + pUsageCounts = usageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + ppUsageCounts = ppUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( pUsageCounts_.size() ); + ppUsageCounts = pUsageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT + { + micromap = micromap_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureTrianglesOpacityMicromapEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureTrianglesOpacityMicromapEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureTrianglesOpacityMicromapEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureTrianglesOpacityMicromapEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {}; + uint32_t baseTriangle = {}; + uint32_t usageCountsCount = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureTrianglesOpacityMicromapEXT; + }; + + // wrapper struct for struct VkAccelerationStructureVersionInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureVersionInfoKHR.html + struct AccelerationStructureVersionInfoKHR + { + using NativeType = VkAccelerationStructureVersionInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pVersionData{ pVersionData_ } + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureVersionInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureVersionInfoKHR & operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureVersionInfoKHR & operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT + { + pVersionData = pVersionData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureVersionInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAccelerationStructureVersionInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pVersionData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData ); +# endif + } + + bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR; + const void * pNext = {}; + const uint8_t * pVersionData = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureVersionInfoKHR; + }; + + // wrapper struct for struct VkAcquireNextImageInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAcquireNextImageInfoKHR.html + struct AcquireNextImageInfoKHR + { + using NativeType = VkAcquireNextImageInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint64_t timeout_ = {}, + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + uint32_t deviceMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchain{ swapchain_ } + , timeout{ timeout_ } + , semaphore{ semaphore_ } + , fence{ fence_ } + , deviceMask{ deviceMask_ } + { + } + + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AcquireNextImageInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT + { + timeout = timeout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAcquireNextImageInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAcquireNextImageInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain, timeout, semaphore, fence, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireNextImageInfoKHR const & ) const = default; +#else + bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( timeout == rhs.timeout ) && + ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint64_t timeout = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + uint32_t deviceMask = {}; + }; + + template <> + struct CppType + { + using Type = AcquireNextImageInfoKHR; + }; + + // wrapper struct for struct VkAcquireProfilingLockInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAcquireProfilingLockInfoKHR.html + struct AcquireProfilingLockInfoKHR + { + using NativeType = VkAcquireProfilingLockInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, + uint64_t timeout_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , timeout{ timeout_ } + { + } + + VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AcquireProfilingLockInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT + { + timeout = timeout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAcquireProfilingLockInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAcquireProfilingLockInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, timeout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default; +#else + bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout ); +# endif + } + + bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {}; + uint64_t timeout = {}; + }; + + template <> + struct CppType + { + using Type = AcquireProfilingLockInfoKHR; + }; + + typedef void *( VKAPI_PTR * PFN_AllocationFunction )( void * pUserData, + size_t size, + size_t alignment, + VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + + typedef void *( VKAPI_PTR * PFN_ReallocationFunction )( + void * pUserData, void * pOriginal, size_t size, size_t alignment, VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + + typedef void( VKAPI_PTR * PFN_FreeFunction )( void * pUserData, void * pMemory ); + + typedef void( VKAPI_PTR * PFN_InternalAllocationNotification )( void * pUserData, + size_t size, + VULKAN_HPP_NAMESPACE::InternalAllocationType allocationType, + VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + + typedef void( VKAPI_PTR * PFN_InternalFreeNotification )( void * pUserData, + size_t size, + VULKAN_HPP_NAMESPACE::InternalAllocationType allocationType, + VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + + // wrapper struct for struct VkAllocationCallbacks, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAllocationCallbacks.html + struct AllocationCallbacks + { + using NativeType = VkAllocationCallbacks; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, + VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation_ = {}, + VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation_ = {}, + VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree_ = {}, + VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation_ = {}, + VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT + : pUserData{ pUserData_ } + , pfnAllocation{ pfnAllocation_ } + , pfnReallocation{ pfnReallocation_ } + , pfnFree{ pfnFree_ } + , pfnInternalAllocation{ pfnInternalAllocation_ } + , pfnInternalFree{ pfnInternalFree_ } + { + } + + VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT : AllocationCallbacks( *reinterpret_cast( &rhs ) ) + { + } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) + + AllocationCallbacks( void * pUserData_, + PFN_vkAllocationFunction pfnAllocation_, + PFN_vkReallocationFunction pfnReallocation_ = {}, + PFN_vkFreeFunction pfnFree_ = {}, + PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, + PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT + : AllocationCallbacks( pUserData_, + reinterpret_cast( pfnAllocation_ ), + reinterpret_cast( pfnReallocation_ ), + reinterpret_cast( pfnFree_ ), + reinterpret_cast( pfnInternalAllocation_ ), + reinterpret_cast( pfnInternalFree_ ) ) + { + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif + + AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnAllocation = pfnAllocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnReallocation = pfnReallocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT + { + pfnFree = pfnFree_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & + setPfnInternalAllocation( VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnInternalAllocation = pfnInternalAllocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT + { + pfnInternalFree = pfnInternalFree_; + return *this; + } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnAllocation( reinterpret_cast( pfnAllocation_ ) ); + } + + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnReallocation( reinterpret_cast( pfnReallocation_ ) ); + } + + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnInternalAllocation( reinterpret_cast( pfnInternalAllocation_ ) ); + } + + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnInternalFree( reinterpret_cast( pfnInternalFree_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAllocationCallbacks const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAllocationCallbacks *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pUserData, pfnAllocation, pfnReallocation, pfnFree, pfnInternalAllocation, pfnInternalFree ); + } +#endif + + bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && ( pfnReallocation == rhs.pfnReallocation ) && + ( pfnFree == rhs.pfnFree ) && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree ); +#endif + } + + bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + void * pUserData = {}; + VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation = {}; + VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation = {}; + VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree = {}; + VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation = {}; + VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree = {}; + }; + + // wrapper struct for struct VkAmigoProfilingSubmitInfoSEC, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAmigoProfilingSubmitInfoSEC.html + struct AmigoProfilingSubmitInfoSEC + { + using NativeType = VkAmigoProfilingSubmitInfoSEC; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAmigoProfilingSubmitInfoSEC; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AmigoProfilingSubmitInfoSEC( uint64_t firstDrawTimestamp_ = {}, uint64_t swapBufferTimestamp_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , firstDrawTimestamp{ firstDrawTimestamp_ } + , swapBufferTimestamp{ swapBufferTimestamp_ } + { + } + + VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AmigoProfilingSubmitInfoSEC( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT + : AmigoProfilingSubmitInfoSEC( *reinterpret_cast( &rhs ) ) + { + } + + AmigoProfilingSubmitInfoSEC & operator=( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AmigoProfilingSubmitInfoSEC & operator=( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setFirstDrawTimestamp( uint64_t firstDrawTimestamp_ ) VULKAN_HPP_NOEXCEPT + { + firstDrawTimestamp = firstDrawTimestamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setSwapBufferTimestamp( uint64_t swapBufferTimestamp_ ) VULKAN_HPP_NOEXCEPT + { + swapBufferTimestamp = swapBufferTimestamp_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAmigoProfilingSubmitInfoSEC const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAmigoProfilingSubmitInfoSEC &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAmigoProfilingSubmitInfoSEC const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAmigoProfilingSubmitInfoSEC *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, firstDrawTimestamp, swapBufferTimestamp ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AmigoProfilingSubmitInfoSEC const & ) const = default; +#else + bool operator==( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( firstDrawTimestamp == rhs.firstDrawTimestamp ) && + ( swapBufferTimestamp == rhs.swapBufferTimestamp ); +# endif + } + + bool operator!=( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAmigoProfilingSubmitInfoSEC; + const void * pNext = {}; + uint64_t firstDrawTimestamp = {}; + uint64_t swapBufferTimestamp = {}; + }; + + template <> + struct CppType + { + using Type = AmigoProfilingSubmitInfoSEC; + }; + + // wrapper struct for struct VkComponentMapping, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComponentMapping.html + struct ComponentMapping + { + using NativeType = VkComponentMapping; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT + : r{ r_ } + , g{ g_ } + , b{ b_ } + , a{ a_ } + { + } + + VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT : ComponentMapping( *reinterpret_cast( &rhs ) ) {} + + ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT + { + r = r_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT + { + g = g_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT + { + b = b_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT + { + a = a_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComponentMapping const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkComponentMapping *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( r, g, b, a ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComponentMapping const & ) const = default; +#else + bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a ); +# endif + } + + bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + }; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkAndroidHardwareBufferFormatProperties2ANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatProperties2ANDROID.html + struct AndroidHardwareBufferFormatProperties2ANDROID + { + using NativeType = VkAndroidHardwareBufferFormatProperties2ANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , externalFormat{ externalFormat_ } + , formatFeatures{ formatFeatures_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferFormatProperties2ANDROID( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatProperties2ANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferFormatProperties2ANDROID & operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AndroidHardwareBufferFormatProperties2ANDROID & operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatProperties2ANDROID const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatProperties2ANDROID *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + format, + externalFormat, + formatFeatures, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatProperties2ANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) && + ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatProperties2ANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkAndroidHardwareBufferFormatPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatPropertiesANDROID.html + struct AndroidHardwareBufferFormatPropertiesANDROID + { + using NativeType = VkAndroidHardwareBufferFormatPropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , externalFormat{ externalFormat_ } + , formatFeatures{ formatFeatures_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + format, + externalFormat, + formatFeatures, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) && + ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatPropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkAndroidHardwareBufferFormatResolvePropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatResolvePropertiesANDROID.html + struct AndroidHardwareBufferFormatResolvePropertiesANDROID + { + using NativeType = VkAndroidHardwareBufferFormatResolvePropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferFormatResolvePropertiesANDROID( VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , colorAttachmentFormat{ colorAttachmentFormat_ } + { + } + + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferFormatResolvePropertiesANDROID( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatResolvePropertiesANDROID( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatResolvePropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferFormatResolvePropertiesANDROID & + operator=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AndroidHardwareBufferFormatResolvePropertiesANDROID & operator=( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorAttachmentFormat ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatResolvePropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentFormat == rhs.colorAttachmentFormat ); +# endif + } + + bool operator!=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatResolvePropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferPropertiesANDROID.html + struct AndroidHardwareBufferPropertiesANDROID + { + using NativeType = VkAndroidHardwareBufferPropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeBits_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , allocationSize{ allocationSize_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferPropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferPropertiesANDROID *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allocationSize, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferPropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkAndroidHardwareBufferUsageANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferUsageANDROID.html + struct AndroidHardwareBufferUsageANDROID + { + using NativeType = VkAndroidHardwareBufferUsageANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , androidHardwareBufferUsage{ androidHardwareBufferUsage_ } + { + } + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferUsageANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferUsageANDROID const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferUsageANDROID *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, androidHardwareBufferUsage ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); +# endif + } + + bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; + void * pNext = {}; + uint64_t androidHardwareBufferUsage = {}; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferUsageANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkAndroidSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidSurfaceCreateInfoKHR.html + struct AndroidSurfaceCreateInfoKHR + { + using NativeType = VkAndroidSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, + struct ANativeWindow * window_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , window{ window_ } + { + } + + VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAndroidSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); +# endif + } + + bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; + struct ANativeWindow * window = {}; + }; + + template <> + struct CppType + { + using Type = AndroidSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + // wrapper struct for struct VkAntiLagPresentationInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAntiLagPresentationInfoAMD.html + struct AntiLagPresentationInfoAMD + { + using NativeType = VkAntiLagPresentationInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAntiLagPresentationInfoAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD( VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage_ = VULKAN_HPP_NAMESPACE::AntiLagStageAMD::eInput, + uint64_t frameIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stage{ stage_ } + , frameIndex{ frameIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AntiLagPresentationInfoAMD( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : AntiLagPresentationInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + AntiLagPresentationInfoAMD & operator=( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AntiLagPresentationInfoAMD & operator=( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setStage( VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setFrameIndex( uint64_t frameIndex_ ) VULKAN_HPP_NOEXCEPT + { + frameIndex = frameIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAntiLagPresentationInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAntiLagPresentationInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAntiLagPresentationInfoAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAntiLagPresentationInfoAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stage, frameIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AntiLagPresentationInfoAMD const & ) const = default; +#else + bool operator==( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( frameIndex == rhs.frameIndex ); +# endif + } + + bool operator!=( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAntiLagPresentationInfoAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage = VULKAN_HPP_NAMESPACE::AntiLagStageAMD::eInput; + uint64_t frameIndex = {}; + }; + + template <> + struct CppType + { + using Type = AntiLagPresentationInfoAMD; + }; + + // wrapper struct for struct VkAntiLagDataAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAntiLagDataAMD.html + struct AntiLagDataAMD + { + using NativeType = VkAntiLagDataAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAntiLagDataAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AntiLagDataAMD( VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode_ = VULKAN_HPP_NAMESPACE::AntiLagModeAMD::eDriverControl, + uint32_t maxFPS_ = {}, + const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mode{ mode_ } + , maxFPS{ maxFPS_ } + , pPresentationInfo{ pPresentationInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR AntiLagDataAMD( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AntiLagDataAMD( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT : AntiLagDataAMD( *reinterpret_cast( &rhs ) ) {} + + AntiLagDataAMD & operator=( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AntiLagDataAMD & operator=( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMode( VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMaxFPS( uint32_t maxFPS_ ) VULKAN_HPP_NOEXCEPT + { + maxFPS = maxFPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & + setPPresentationInfo( const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pPresentationInfo = pPresentationInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAntiLagDataAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAntiLagDataAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAntiLagDataAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAntiLagDataAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mode, maxFPS, pPresentationInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AntiLagDataAMD const & ) const = default; +#else + bool operator==( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( maxFPS == rhs.maxFPS ) && + ( pPresentationInfo == rhs.pPresentationInfo ); +# endif + } + + bool operator!=( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAntiLagDataAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode = VULKAN_HPP_NAMESPACE::AntiLagModeAMD::eDriverControl; + uint32_t maxFPS = {}; + const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo = {}; + }; + + template <> + struct CppType + { + using Type = AntiLagDataAMD; + }; + + // wrapper struct for struct VkApplicationInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkApplicationInfo.html + struct ApplicationInfo + { + using NativeType = VkApplicationInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_ = {}, + uint32_t applicationVersion_ = {}, + const char * pEngineName_ = {}, + uint32_t engineVersion_ = {}, + uint32_t apiVersion_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pApplicationName{ pApplicationName_ } + , applicationVersion{ applicationVersion_ } + , pEngineName{ pEngineName_ } + , engineVersion{ engineVersion_ } + , apiVersion{ apiVersion_ } + { + } + + VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ApplicationInfo( *reinterpret_cast( &rhs ) ) {} + + ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationName = pApplicationName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT + { + applicationVersion = applicationVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT + { + pEngineName = pEngineName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT + { + engineVersion = engineVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT + { + apiVersion = apiVersion_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkApplicationInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkApplicationInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pApplicationName, applicationVersion, pEngineName, engineVersion, apiVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pApplicationName != rhs.pApplicationName ) + if ( auto cmp = strcmp( pApplicationName, rhs.pApplicationName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0 ) + return cmp; + if ( pEngineName != rhs.pEngineName ) + if ( auto cmp = strcmp( pEngineName, rhs.pEngineName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0 ) + return cmp; + if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( ( pApplicationName == rhs.pApplicationName ) || ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) ) && + ( applicationVersion == rhs.applicationVersion ) && ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) ) && + ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); + } + + bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; + const void * pNext = {}; + const char * pApplicationName = {}; + uint32_t applicationVersion = {}; + const char * pEngineName = {}; + uint32_t engineVersion = {}; + uint32_t apiVersion = {}; + }; + + template <> + struct CppType + { + using Type = ApplicationInfo; + }; + + // wrapper struct for struct VkAttachmentDescription, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescription.html + struct AttachmentDescription + { + using NativeType = VkAttachmentDescription; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : flags{ flags_ } + , format{ format_ } + , samples{ samples_ } + , loadOp{ loadOp_ } + , storeOp{ storeOp_ } + , stencilLoadOp{ stencilLoadOp_ } + , stencilStoreOp{ stencilStoreOp_ } + , initialLayout{ initialLayout_ } + , finalLayout{ finalLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescription( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT + { + finalLayout = finalLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescription const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAttachmentDescription *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription const & ) const = default; +#else + bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && + ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) && + ( finalLayout == rhs.finalLayout ); +# endif + } + + bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + // wrapper struct for struct VkAttachmentDescription2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescription2.html + struct AttachmentDescription2 + { + using NativeType = VkAttachmentDescription2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescription2( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , format{ format_ } + , samples{ samples_ } + , loadOp{ loadOp_ } + , storeOp{ storeOp_ } + , stencilLoadOp{ stencilLoadOp_ } + , stencilStoreOp{ stencilStoreOp_ } + , initialLayout{ initialLayout_ } + , finalLayout{ finalLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescription2( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT + { + finalLayout = finalLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescription2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAttachmentDescription2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription2 const & ) const = default; +#else + bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && + ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && + ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout ); +# endif + } + + bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + template <> + struct CppType + { + using Type = AttachmentDescription2; + }; + + using AttachmentDescription2KHR = AttachmentDescription2; + + // wrapper struct for struct VkAttachmentDescriptionStencilLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescriptionStencilLayout.html + struct AttachmentDescriptionStencilLayout + { + using NativeType = VkAttachmentDescriptionStencilLayout; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stencilInitialLayout{ stencilInitialLayout_ } + , stencilFinalLayout{ stencilFinalLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescriptionStencilLayout( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & + setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilInitialLayout = stencilInitialLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & + setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilFinalLayout = stencilFinalLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescriptionStencilLayout const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAttachmentDescriptionStencilLayout *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stencilInitialLayout, stencilFinalLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default; +#else + bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) && + ( stencilFinalLayout == rhs.stencilFinalLayout ); +# endif + } + + bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + template <> + struct CppType + { + using Type = AttachmentDescriptionStencilLayout; + }; + + using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; + + // wrapper struct for struct VkAttachmentReference, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReference.html + struct AttachmentReference + { + using NativeType = VkAttachmentReference; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : attachment{ attachment_ } + , layout{ layout_ } + { + } + + VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentReference( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT + { + attachment = attachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReference const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAttachmentReference *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( attachment, layout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference const & ) const = default; +#else + bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( attachment == rhs.attachment ) && ( layout == rhs.layout ); +# endif + } + + bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + // wrapper struct for struct VkAttachmentReference2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReference2.html + struct AttachmentReference2 + { + using NativeType = VkAttachmentReference2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachment{ attachment_ } + , layout{ layout_ } + , aspectMask{ aspectMask_ } + { + } + + VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReference2( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT + { + attachment = attachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReference2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAttachmentReference2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachment, layout, aspectMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference2 const & ) const = default; +#else + bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && ( layout == rhs.layout ) && + ( aspectMask == rhs.aspectMask ); +# endif + } + + bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2; + const void * pNext = {}; + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + + template <> + struct CppType + { + using Type = AttachmentReference2; + }; + + using AttachmentReference2KHR = AttachmentReference2; + + // wrapper struct for struct VkAttachmentReferenceStencilLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReferenceStencilLayout.html + struct AttachmentReferenceStencilLayout + { + using NativeType = VkAttachmentReferenceStencilLayout; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stencilLayout{ stencilLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReferenceStencilLayout( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilLayout = stencilLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReferenceStencilLayout const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAttachmentReferenceStencilLayout *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stencilLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default; +#else + bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout ); +# endif + } + + bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + template <> + struct CppType + { + using Type = AttachmentReferenceStencilLayout; + }; + + using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; + + // wrapper struct for struct VkAttachmentSampleCountInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentSampleCountInfoAMD.html + struct AttachmentSampleCountInfoAMD + { + using NativeType = VkAttachmentSampleCountInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentSampleCountInfoAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentSampleCountInfoAMD( uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentSamples{ pColorAttachmentSamples_ } + , depthStencilAttachmentSamples{ depthStencilAttachmentSamples_ } + { + } + + VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleCountInfoAMD( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentSampleCountInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AttachmentSampleCountInfoAMD( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentSamples_, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , colorAttachmentCount( static_cast( colorAttachmentSamples_.size() ) ) + , pColorAttachmentSamples( colorAttachmentSamples_.data() ) + , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AttachmentSampleCountInfoAMD & operator=( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AttachmentSampleCountInfoAMD & operator=( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & + setPColorAttachmentSamples( const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentSamples = pColorAttachmentSamples_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AttachmentSampleCountInfoAMD & setColorAttachmentSamples( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentSamples_.size() ); + pColorAttachmentSamples = colorAttachmentSamples_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & + setDepthStencilAttachmentSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + { + depthStencilAttachmentSamples = depthStencilAttachmentSamples_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentSampleCountInfoAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAttachmentSampleCountInfoAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentSamples, depthStencilAttachmentSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentSampleCountInfoAMD const & ) const = default; +#else + bool operator==( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentSamples == rhs.pColorAttachmentSamples ) && ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples ); +# endif + } + + bool operator!=( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentSampleCountInfoAMD; + const void * pNext = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + + template <> + struct CppType + { + using Type = AttachmentSampleCountInfoAMD; + }; + + using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; + + // wrapper struct for struct VkExtent2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtent2D.html + struct Extent2D + { + using NativeType = VkExtent2D; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT + : width{ width_ } + , height{ height_ } + { + } + + VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast( &rhs ) ) {} + + Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtent2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtent2D const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExtent2D *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( width, height ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent2D const & ) const = default; +#else + bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( width == rhs.width ) && ( height == rhs.height ); +# endif + } + + bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + }; + + // wrapper struct for struct VkSampleLocationEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampleLocationEXT.html + struct SampleLocationEXT + { + using NativeType = VkSampleLocationEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } + { + } + + VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT : SampleLocationEXT( *reinterpret_cast( &rhs ) ) {} + + SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSampleLocationEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSampleLocationEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationEXT const & ) const = default; +#else + bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ); +# endif + } + + bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + }; + + // wrapper struct for struct VkSampleLocationsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampleLocationsInfoEXT.html + struct SampleLocationsInfoEXT + { + using NativeType = VkSampleLocationsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, + uint32_t sampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampleLocationsPerPixel{ sampleLocationsPerPixel_ } + , sampleLocationGridSize{ sampleLocationGridSize_ } + , sampleLocationsCount{ sampleLocationsCount_ } + , pSampleLocations{ pSampleLocations_ } + { + } + + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SampleLocationsInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsPerPixel = sampleLocationsPerPixel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationGridSize = sampleLocationGridSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsCount = sampleLocationsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pSampleLocations = pSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SampleLocationsInfoEXT & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSampleLocationsInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSampleLocationsInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationsInfoEXT const & ) const = default; +#else + bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) && + ( sampleLocationGridSize == rhs.sampleLocationGridSize ) && ( sampleLocationsCount == rhs.sampleLocationsCount ) && + ( pSampleLocations == rhs.pSampleLocations ); +# endif + } + + bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; + uint32_t sampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {}; + }; + + template <> + struct CppType + { + using Type = SampleLocationsInfoEXT; + }; + + // wrapper struct for struct VkAttachmentSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentSampleLocationsEXT.html + struct AttachmentSampleLocationsEXT + { + using NativeType = VkAttachmentSampleLocationsEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentIndex{ attachmentIndex_ } + , sampleLocationsInfo{ sampleLocationsInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + attachmentIndex = attachmentIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & + setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentSampleLocationsEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkAttachmentSampleLocationsEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( attachmentIndex, sampleLocationsInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default; +#else + bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); +# endif + } + + bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t attachmentIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + + // wrapper struct for struct VkBaseInStructure, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBaseInStructure.html + struct BaseInStructure + { + using NativeType = VkBaseInStructure; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, + const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : sType{ sType_ } + , pNext{ pNext_ } + { + } + + BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseInStructure( *reinterpret_cast( &rhs ) ) {} + + BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBaseInStructure const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBaseInStructure *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseInStructure const & ) const = default; +#else + bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; + const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {}; + }; + + // wrapper struct for struct VkBaseOutStructure, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBaseOutStructure.html + struct BaseOutStructure + { + using NativeType = VkBaseOutStructure; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, + struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : sType{ sType_ } + , pNext{ pNext_ } + { + } + + BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseOutStructure( *reinterpret_cast( &rhs ) ) {} + + BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBaseOutStructure const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBaseOutStructure *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseOutStructure const & ) const = default; +#else + bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; + struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {}; + }; + + // wrapper struct for struct VkBindAccelerationStructureMemoryInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindAccelerationStructureMemoryInfoNV.html + struct BindAccelerationStructureMemoryInfoNV + { + using NativeType = VkBindAccelerationStructureMemoryInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , deviceIndexCount{ deviceIndexCount_ } + , pDeviceIndices{ pDeviceIndices_ } + { + } + + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindAccelerationStructureMemoryInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , accelerationStructure( accelerationStructure_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , deviceIndexCount( static_cast( deviceIndices_.size() ) ) + , pDeviceIndices( deviceIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindAccelerationStructureMemoryInfoNV & operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindAccelerationStructureMemoryInfoNV & + setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindAccelerationStructureMemoryInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindAccelerationStructureMemoryInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructure, memory, memoryOffset, deviceIndexCount, pDeviceIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default; +#else + bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ); +# endif + } + + bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + }; + + template <> + struct CppType + { + using Type = BindAccelerationStructureMemoryInfoNV; + }; + + // wrapper struct for struct VkBindBufferMemoryDeviceGroupInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindBufferMemoryDeviceGroupInfo.html + struct BindBufferMemoryDeviceGroupInfo + { + using NativeType = VkBindBufferMemoryDeviceGroupInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceIndexCount{ deviceIndexCount_ } + , pDeviceIndices{ pDeviceIndices_ } + { + } + + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), deviceIndexCount( static_cast( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindBufferMemoryDeviceGroupInfo & + setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindBufferMemoryDeviceGroupInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindBufferMemoryDeviceGroupInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default; +#else + bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ); +# endif + } + + bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + }; + + template <> + struct CppType + { + using Type = BindBufferMemoryDeviceGroupInfo; + }; + + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + + // wrapper struct for struct VkBindBufferMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindBufferMemoryInfo.html + struct BindBufferMemoryInfo + { + using NativeType = VkBindBufferMemoryInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindBufferMemoryInfo( *reinterpret_cast( &rhs ) ) + { + } + + BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindBufferMemoryInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindBufferMemoryInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer, memory, memoryOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryInfo const & ) const = default; +#else + bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ); +# endif + } + + bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; + + template <> + struct CppType + { + using Type = BindBufferMemoryInfo; + }; + + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + + // wrapper struct for struct VkBindDescriptorBufferEmbeddedSamplersInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindDescriptorBufferEmbeddedSamplersInfoEXT.html + struct BindDescriptorBufferEmbeddedSamplersInfoEXT + { + using NativeType = VkBindDescriptorBufferEmbeddedSamplersInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , set{ set_ } + { + } + + VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindDescriptorBufferEmbeddedSamplersInfoEXT( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BindDescriptorBufferEmbeddedSamplersInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & + setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageFlags, layout, set ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindDescriptorBufferEmbeddedSamplersInfoEXT const & ) const = default; +#else + bool operator==( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( set == rhs.set ); +# endif + } + + bool operator!=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t set = {}; + }; + + template <> + struct CppType + { + using Type = BindDescriptorBufferEmbeddedSamplersInfoEXT; + }; + + // wrapper struct for struct VkBindDescriptorSetsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindDescriptorSetsInfo.html + struct BindDescriptorSetsInfo + { + using NativeType = VkBindDescriptorSetsInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorSetsInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t firstSet_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ = {}, + uint32_t dynamicOffsetCount_ = {}, + const uint32_t * pDynamicOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , firstSet{ firstSet_ } + , descriptorSetCount{ descriptorSetCount_ } + , pDescriptorSets{ pDescriptorSets_ } + , dynamicOffsetCount{ dynamicOffsetCount_ } + , pDynamicOffsets{ pDynamicOffsets_ } + { + } + + VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindDescriptorSetsInfo( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindDescriptorSetsInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindDescriptorSetsInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t firstSet_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorSets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicOffsets_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , firstSet( firstSet_ ) + , descriptorSetCount( static_cast( descriptorSets_.size() ) ) + , pDescriptorSets( descriptorSets_.data() ) + , dynamicOffsetCount( static_cast( dynamicOffsets_.size() ) ) + , pDynamicOffsets( dynamicOffsets_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindDescriptorSetsInfo & operator=( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindDescriptorSetsInfo & operator=( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT + { + firstSet = firstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorSets = pDescriptorSets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindDescriptorSetsInfo & + setDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorSets_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( descriptorSets_.size() ); + pDescriptorSets = descriptorSets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDynamicOffsetCount( uint32_t dynamicOffsetCount_ ) VULKAN_HPP_NOEXCEPT + { + dynamicOffsetCount = dynamicOffsetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDynamicOffsets( const uint32_t * pDynamicOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicOffsets = pDynamicOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindDescriptorSetsInfo & setDynamicOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dynamicOffsetCount = static_cast( dynamicOffsets_.size() ); + pDynamicOffsets = dynamicOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindDescriptorSetsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindDescriptorSetsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindDescriptorSetsInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindDescriptorSetsInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageFlags, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindDescriptorSetsInfo const & ) const = default; +#else + bool operator==( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( firstSet == rhs.firstSet ) && + ( descriptorSetCount == rhs.descriptorSetCount ) && ( pDescriptorSets == rhs.pDescriptorSets ) && + ( dynamicOffsetCount == rhs.dynamicOffsetCount ) && ( pDynamicOffsets == rhs.pDynamicOffsets ); +# endif + } + + bool operator!=( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindDescriptorSetsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t firstSet = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets = {}; + uint32_t dynamicOffsetCount = {}; + const uint32_t * pDynamicOffsets = {}; + }; + + template <> + struct CppType + { + using Type = BindDescriptorSetsInfo; + }; + + using BindDescriptorSetsInfoKHR = BindDescriptorSetsInfo; + + // wrapper struct for struct VkOffset2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOffset2D.html + struct Offset2D + { + using NativeType = VkOffset2D; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } + { + } + + VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast( &rhs ) ) {} + + Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOffset2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOffset2D const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkOffset2D *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset2D const & ) const = default; +#else + bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ); +# endif + } + + bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t x = {}; + int32_t y = {}; + }; + + // wrapper struct for struct VkRect2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRect2D.html + struct Rect2D + { + using NativeType = VkRect2D; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : offset{ offset_ } + , extent{ extent_ } + { + } + + VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast( &rhs ) ) {} + + Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRect2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRect2D const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRect2D *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( offset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Rect2D const & ) const = default; +#else + bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( offset == rhs.offset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + }; + + // wrapper struct for struct VkBindImageMemoryDeviceGroupInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemoryDeviceGroupInfo.html + struct BindImageMemoryDeviceGroupInfo + { + using NativeType = VkBindImageMemoryDeviceGroupInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {}, + uint32_t splitInstanceBindRegionCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceIndexCount{ deviceIndexCount_ } + , pDeviceIndices{ pDeviceIndices_ } + , splitInstanceBindRegionCount{ splitInstanceBindRegionCount_ } + , pSplitInstanceBindRegions{ pSplitInstanceBindRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImageMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & splitInstanceBindRegions_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , deviceIndexCount( static_cast( deviceIndices_.size() ) ) + , pDeviceIndices( deviceIndices_.data() ) + , splitInstanceBindRegionCount( static_cast( splitInstanceBindRegions_.size() ) ) + , pSplitInstanceBindRegions( splitInstanceBindRegions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo & + setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT + { + splitInstanceBindRegionCount = splitInstanceBindRegionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & + setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + { + pSplitInstanceBindRegions = pSplitInstanceBindRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + { + splitInstanceBindRegionCount = static_cast( splitInstanceBindRegions_.size() ); + pSplitInstanceBindRegions = splitInstanceBindRegions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemoryDeviceGroupInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindImageMemoryDeviceGroupInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default; +#else + bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ) && + ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); +# endif + } + + bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + uint32_t splitInstanceBindRegionCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {}; + }; + + template <> + struct CppType + { + using Type = BindImageMemoryDeviceGroupInfo; + }; + + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + + // wrapper struct for struct VkBindImageMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemoryInfo.html + struct BindImageMemoryInfo + { + using NativeType = VkBindImageMemoryInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindImageMemoryInfo( *reinterpret_cast( &rhs ) ) + { + } + + BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemoryInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindImageMemoryInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, memory, memoryOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryInfo const & ) const = default; +#else + bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ); +# endif + } + + bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; + + template <> + struct CppType + { + using Type = BindImageMemoryInfo; + }; + + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + + // wrapper struct for struct VkBindImageMemorySwapchainInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemorySwapchainInfoKHR.html + struct BindImageMemorySwapchainInfoKHR + { + using NativeType = VkBindImageMemorySwapchainInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint32_t imageIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchain{ swapchain_ } + , imageIndex{ imageIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImageMemorySwapchainInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT + { + imageIndex = imageIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemorySwapchainInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindImageMemorySwapchainInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain, imageIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default; +#else + bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndex == rhs.imageIndex ); +# endif + } + + bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint32_t imageIndex = {}; + }; + + template <> + struct CppType + { + using Type = BindImageMemorySwapchainInfoKHR; + }; + + // wrapper struct for struct VkBindImagePlaneMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImagePlaneMemoryInfo.html + struct BindImagePlaneMemoryInfo + { + using NativeType = VkBindImagePlaneMemoryInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , planeAspect{ planeAspect_ } + { + } + + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImagePlaneMemoryInfo( *reinterpret_cast( &rhs ) ) + { + } + + BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImagePlaneMemoryInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindImagePlaneMemoryInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, planeAspect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default; +#else + bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); +# endif + } + + bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + + template <> + struct CppType + { + using Type = BindImagePlaneMemoryInfo; + }; + + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + + // wrapper struct for struct VkBindIndexBufferIndirectCommandEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindIndexBufferIndirectCommandEXT.html + struct BindIndexBufferIndirectCommandEXT + { + using NativeType = VkBindIndexBufferIndirectCommandEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindIndexBufferIndirectCommandEXT( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , indexType{ indexType_ } + { + } + + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandEXT( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandEXT( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BindIndexBufferIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + { + } + + BindIndexBufferIndirectCommandEXT & operator=( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindIndexBufferIndirectCommandEXT & operator=( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindIndexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, indexType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindIndexBufferIndirectCommandEXT const & ) const = default; +#else + bool operator==( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); +# endif + } + + bool operator!=( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + }; + + // wrapper struct for struct VkBindIndexBufferIndirectCommandNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindIndexBufferIndirectCommandNV.html + struct BindIndexBufferIndirectCommandNV + { + using NativeType = VkBindIndexBufferIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindIndexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , indexType{ indexType_ } + { + } + + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindIndexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, indexType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default; +#else + bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); +# endif + } + + bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + }; + + // wrapper struct for struct VkBindMemoryStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindMemoryStatus.html + struct BindMemoryStatus + { + using NativeType = VkBindMemoryStatus; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindMemoryStatus; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindMemoryStatus( VULKAN_HPP_NAMESPACE::Result * pResult_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pResult{ pResult_ } + { + } + + VULKAN_HPP_CONSTEXPR BindMemoryStatus( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindMemoryStatus( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT : BindMemoryStatus( *reinterpret_cast( &rhs ) ) {} + + BindMemoryStatus & operator=( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindMemoryStatus & operator=( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPResult( VULKAN_HPP_NAMESPACE::Result * pResult_ ) VULKAN_HPP_NOEXCEPT + { + pResult = pResult_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindMemoryStatus const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindMemoryStatus &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindMemoryStatus const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindMemoryStatus *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pResult ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindMemoryStatus const & ) const = default; +#else + bool operator==( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pResult == rhs.pResult ); +# endif + } + + bool operator!=( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindMemoryStatus; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Result * pResult = {}; + }; + + template <> + struct CppType + { + using Type = BindMemoryStatus; + }; + + using BindMemoryStatusKHR = BindMemoryStatus; + + // wrapper struct for struct VkBindPipelineIndirectCommandNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindPipelineIndirectCommandNV.html + struct BindPipelineIndirectCommandNV + { + using NativeType = VkBindPipelineIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineAddress{ pipelineAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindPipelineIndirectCommandNV( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindPipelineIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BindPipelineIndirectCommandNV & operator=( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindPipelineIndirectCommandNV & operator=( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindPipelineIndirectCommandNV & setPipelineAddress( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ ) VULKAN_HPP_NOEXCEPT + { + pipelineAddress = pipelineAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindPipelineIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindPipelineIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindPipelineIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindPipelineIndirectCommandNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pipelineAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindPipelineIndirectCommandNV const & ) const = default; +#else + bool operator==( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( pipelineAddress == rhs.pipelineAddress ); +# endif + } + + bool operator!=( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress = {}; + }; + + // wrapper struct for struct VkBindShaderGroupIndirectCommandNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindShaderGroupIndirectCommandNV.html + struct BindShaderGroupIndirectCommandNV + { + using NativeType = VkBindShaderGroupIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT : groupIndex{ groupIndex_ } {} + + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindShaderGroupIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT + { + groupIndex = groupIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindShaderGroupIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindShaderGroupIndirectCommandNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( groupIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default; +#else + bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( groupIndex == rhs.groupIndex ); +# endif + } + + bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t groupIndex = {}; + }; + + // wrapper struct for struct VkSparseMemoryBind, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseMemoryBind.html + struct SparseMemoryBind + { + using NativeType = VkSparseMemoryBind; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : resourceOffset{ resourceOffset_ } + , size{ size_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT : SparseMemoryBind( *reinterpret_cast( &rhs ) ) {} + + SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT + { + resourceOffset = resourceOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseMemoryBind const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSparseMemoryBind *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( resourceOffset, size, memory, memoryOffset, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseMemoryBind const & ) const = default; +#else + bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && + ( flags == rhs.flags ); +# endif + } + + bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; + + // wrapper struct for struct VkSparseBufferMemoryBindInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseBufferMemoryBindInfo.html + struct SparseBufferMemoryBindInfo + { + using NativeType = VkSparseBufferMemoryBindInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer{ buffer_ } + , bindCount{ bindCount_ } + , pBinds{ pBinds_ } + { + } + + VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseBufferMemoryBindInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : buffer( buffer_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseBufferMemoryBindInfo & + setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseBufferMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSparseBufferMemoryBindInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( buffer, bindCount, pBinds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); +# endif + } + + bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; + }; + + // wrapper struct for struct VkSparseImageOpaqueMemoryBindInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageOpaqueMemoryBindInfo.html + struct SparseImageOpaqueMemoryBindInfo + { + using NativeType = VkSparseImageOpaqueMemoryBindInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : image{ image_ } + , bindCount{ bindCount_ } + , pBinds{ pBinds_ } + { + } + + VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageOpaqueMemoryBindInfo & + setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageOpaqueMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSparseImageOpaqueMemoryBindInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( image, bindCount, pBinds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); +# endif + } + + bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; + }; + + // wrapper struct for struct VkImageSubresource, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresource.html + struct ImageSubresource + { + using NativeType = VkImageSubresource; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask{ aspectMask_ } + , mipLevel{ mipLevel_ } + , arrayLayer{ arrayLayer_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource( *reinterpret_cast( &rhs ) ) {} + + ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + arrayLayer = arrayLayer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresource const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageSubresource *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, mipLevel, arrayLayer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresource const & ) const = default; +#else + bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer ); +# endif + } + + bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t arrayLayer = {}; + }; + + // wrapper struct for struct VkOffset3D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOffset3D.html + struct Offset3D + { + using NativeType = VkOffset3D; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } + , z{ z_ } + { + } + + VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast( &rhs ) ) {} + + explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {} + + Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOffset3D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOffset3D const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkOffset3D *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, z ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset3D const & ) const = default; +#else + bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); +# endif + } + + bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t x = {}; + int32_t y = {}; + int32_t z = {}; + }; + + // wrapper struct for struct VkExtent3D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtent3D.html + struct Extent3D + { + using NativeType = VkExtent3D; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width{ width_ } + , height{ height_ } + , depth{ depth_ } + { + } + + VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast( &rhs ) ) {} + + explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {} + + Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtent3D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtent3D const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExtent3D *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( width, height, depth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent3D const & ) const = default; +#else + bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); +# endif + } + + bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + + // wrapper struct for struct VkSparseImageMemoryBind, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryBind.html + struct SparseImageMemoryBind + { + using NativeType = VkSparseImageMemoryBind; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : subresource{ subresource_ } + , offset{ offset_ } + , extent{ extent_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryBind( *reinterpret_cast( &rhs ) ) + { + } + + SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT + { + subresource = subresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBind const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBind *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subresource, offset, extent, memory, memoryOffset, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBind const & ) const = default; +#else + bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D offset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; + + // wrapper struct for struct VkSparseImageMemoryBindInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryBindInfo.html + struct SparseImageMemoryBindInfo + { + using NativeType = VkSparseImageMemoryBindInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : image{ image_ } + , bindCount{ bindCount_ } + , pBinds{ pBinds_ } + { + } + + VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryBindInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageMemoryBindInfo & + setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBindInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( image, bindCount, pBinds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); +# endif + } + + bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds = {}; + }; + + // wrapper struct for struct VkBindSparseInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindSparseInfo.html + struct BindSparseInfo + { + using NativeType = VkBindSparseInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t bufferBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {}, + uint32_t imageOpaqueBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {}, + uint32_t imageBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphores{ pWaitSemaphores_ } + , bufferBindCount{ bufferBindCount_ } + , pBufferBinds{ pBufferBinds_ } + , imageOpaqueBindCount{ imageOpaqueBindCount_ } + , pImageOpaqueBinds{ pImageOpaqueBinds_ } + , imageBindCount{ imageBindCount_ } + , pImageBinds{ pImageBinds_ } + , signalSemaphoreCount{ signalSemaphoreCount_ } + , pSignalSemaphores{ pSignalSemaphores_ } + { + } + + VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindSparseInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageOpaqueBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , bufferBindCount( static_cast( bufferBinds_.size() ) ) + , pBufferBinds( bufferBinds_.data() ) + , imageOpaqueBindCount( static_cast( imageOpaqueBinds_.size() ) ) + , pImageOpaqueBinds( imageOpaqueBinds_.data() ) + , imageBindCount( static_cast( imageBinds_.size() ) ) + , pImageBinds( imageBinds_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & + setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = bufferBindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBufferBinds = pBufferBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setBufferBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = static_cast( bufferBinds_.size() ); + pBufferBinds = bufferBinds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = imageOpaqueBindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & + setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + pImageOpaqueBinds = pImageOpaqueBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setImageOpaqueBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = static_cast( imageOpaqueBinds_.size() ); + pImageOpaqueBinds = imageOpaqueBinds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT + { + imageBindCount = imageBindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT + { + pImageBinds = pImageBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setImageBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageBinds_ ) + VULKAN_HPP_NOEXCEPT + { + imageBindCount = static_cast( imageBinds_.size() ); + pImageBinds = imageBinds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & + setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindSparseInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindSparseInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + waitSemaphoreCount, + pWaitSemaphores, + bufferBindCount, + pBufferBinds, + imageOpaqueBindCount, + pImageOpaqueBinds, + imageBindCount, + pImageBinds, + signalSemaphoreCount, + pSignalSemaphores ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindSparseInfo const & ) const = default; +#else + bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) && ( pBufferBinds == rhs.pBufferBinds ) && + ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) && + ( imageBindCount == rhs.imageBindCount ) && ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphores == rhs.pSignalSemaphores ); +# endif + } + + bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t bufferBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds = {}; + uint32_t imageOpaqueBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds = {}; + uint32_t imageBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; + }; + + template <> + struct CppType + { + using Type = BindSparseInfo; + }; + + // wrapper struct for struct VkBindVertexBufferIndirectCommandEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVertexBufferIndirectCommandEXT.html + struct BindVertexBufferIndirectCommandEXT + { + using NativeType = VkBindVertexBufferIndirectCommandEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , stride{ stride_ } + { + } + + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandEXT( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BindVertexBufferIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + { + } + + BindVertexBufferIndirectCommandEXT & operator=( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindVertexBufferIndirectCommandEXT & operator=( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindVertexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, stride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVertexBufferIndirectCommandEXT const & ) const = default; +#else + bool operator==( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); +# endif + } + + bool operator!=( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + uint32_t stride = {}; + }; + + // wrapper struct for struct VkBindVertexBufferIndirectCommandNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVertexBufferIndirectCommandNV.html + struct BindVertexBufferIndirectCommandNV + { + using NativeType = VkBindVertexBufferIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , stride{ stride_ } + { + } + + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindVertexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, stride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default; +#else + bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); +# endif + } + + bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + uint32_t stride = {}; + }; + + // wrapper struct for struct VkBindVideoSessionMemoryInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVideoSessionMemoryInfoKHR.html + struct BindVideoSessionMemoryInfoKHR + { + using NativeType = VkBindVideoSessionMemoryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindVideoSessionMemoryInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( uint32_t memoryBindIndex_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryBindIndex{ memoryBindIndex_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , memorySize{ memorySize_ } + { + } + + VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVideoSessionMemoryInfoKHR( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BindVideoSessionMemoryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + BindVideoSessionMemoryInfoKHR & operator=( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindVideoSessionMemoryInfoKHR & operator=( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryBindIndex = memoryBindIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT + { + memorySize = memorySize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindVideoSessionMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVideoSessionMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVideoSessionMemoryInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBindVideoSessionMemoryInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVideoSessionMemoryInfoKHR const & ) const = default; +#else + bool operator==( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize ); +# endif + } + + bool operator!=( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindVideoSessionMemoryInfoKHR; + const void * pNext = {}; + uint32_t memoryBindIndex = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {}; + }; + + template <> + struct CppType + { + using Type = BindVideoSessionMemoryInfoKHR; + }; + + // wrapper struct for struct VkBlitImageCubicWeightsInfoQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlitImageCubicWeightsInfoQCOM.html + struct BlitImageCubicWeightsInfoQCOM + { + using NativeType = VkBlitImageCubicWeightsInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageCubicWeightsInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BlitImageCubicWeightsInfoQCOM( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cubicWeights{ cubicWeights_ } + { + } + + VULKAN_HPP_CONSTEXPR BlitImageCubicWeightsInfoQCOM( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageCubicWeightsInfoQCOM( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : BlitImageCubicWeightsInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + BlitImageCubicWeightsInfoQCOM & operator=( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BlitImageCubicWeightsInfoQCOM & operator=( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setCubicWeights( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ ) VULKAN_HPP_NOEXCEPT + { + cubicWeights = cubicWeights_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBlitImageCubicWeightsInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBlitImageCubicWeightsInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBlitImageCubicWeightsInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBlitImageCubicWeightsInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cubicWeights ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BlitImageCubicWeightsInfoQCOM const & ) const = default; +#else + bool operator==( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicWeights == rhs.cubicWeights ); +# endif + } + + bool operator!=( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageCubicWeightsInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom; + }; + + template <> + struct CppType + { + using Type = BlitImageCubicWeightsInfoQCOM; + }; + + // wrapper struct for struct VkImageSubresourceLayers, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresourceLayers.html + struct ImageSubresourceLayers + { + using NativeType = VkImageSubresourceLayers; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t mipLevel_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask{ aspectMask_ } + , mipLevel{ mipLevel_ } + , baseArrayLayer{ baseArrayLayer_ } + , layerCount{ layerCount_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceLayers( *reinterpret_cast( &rhs ) ) + { + } + + ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceLayers const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageSubresourceLayers *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, mipLevel, baseArrayLayer, layerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceLayers const & ) const = default; +#else + bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); +# endif + } + + bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + + // wrapper struct for struct VkImageBlit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageBlit2.html + struct ImageBlit2 + { + using NativeType = VkImageBlit2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSubresource{ srcSubresource_ } + , srcOffsets{ srcOffsets_ } + , dstSubresource{ dstSubresource_ } + , dstOffsets{ dstOffsets_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit2( *reinterpret_cast( &rhs ) ) {} + + ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageBlit2 & operator=( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageBlit2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubresource, srcOffsets, dstSubresource, dstOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit2 const & ) const = default; +#else + bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets ); +# endif + } + + bool operator!=( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + }; + + template <> + struct CppType + { + using Type = ImageBlit2; + }; + + using ImageBlit2KHR = ImageBlit2; + + // wrapper struct for struct VkBlitImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlitImageInfo2.html + struct BlitImageInfo2 + { + using NativeType = VkBlitImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {}, + VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + , filter{ filter_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : BlitImageInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + , filter( filter_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BlitImageInfo2 & operator=( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BlitImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT + { + filter = filter_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBlitImageInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBlitImageInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BlitImageInfo2 const & ) const = default; +#else + bool operator==( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ) && + ( filter == rhs.filter ); +# endif + } + + bool operator!=( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions = {}; + VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + }; + + template <> + struct CppType + { + using Type = BlitImageInfo2; + }; + + using BlitImageInfo2KHR = BlitImageInfo2; + + // wrapper struct for struct VkBufferCaptureDescriptorDataInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCaptureDescriptorDataInfoEXT.html + struct BufferCaptureDescriptorDataInfoEXT + { + using NativeType = VkBufferCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCaptureDescriptorDataInfoEXT( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + BufferCaptureDescriptorDataInfoEXT & operator=( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferCaptureDescriptorDataInfoEXT & operator=( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = BufferCaptureDescriptorDataInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkBufferCollectionBufferCreateInfoFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionBufferCreateInfoFUCHSIA.html + struct BufferCollectionBufferCreateInfoFUCHSIA + { + using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, + uint32_t index_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , collection{ collection_ } + , index{ index_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionBufferCreateInfoFUCHSIA( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionBufferCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferCollectionBufferCreateInfoFUCHSIA & operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferCollectionBufferCreateInfoFUCHSIA & operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & + setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + { + collection = collection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionBufferCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferCollectionBufferCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collection, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionBufferCreateInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); +# endif + } + + bool operator!=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; + uint32_t index = {}; + }; + + template <> + struct CppType + { + using Type = BufferCollectionBufferCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkBufferCollectionConstraintsInfoFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionConstraintsInfoFUCHSIA.html + struct BufferCollectionConstraintsInfoFUCHSIA + { + using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( uint32_t minBufferCount_ = {}, + uint32_t maxBufferCount_ = {}, + uint32_t minBufferCountForCamping_ = {}, + uint32_t minBufferCountForDedicatedSlack_ = {}, + uint32_t minBufferCountForSharedSlack_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minBufferCount{ minBufferCount_ } + , maxBufferCount{ maxBufferCount_ } + , minBufferCountForCamping{ minBufferCountForCamping_ } + , minBufferCountForDedicatedSlack{ minBufferCountForDedicatedSlack_ } + , minBufferCountForSharedSlack{ minBufferCountForSharedSlack_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionConstraintsInfoFUCHSIA( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferCollectionConstraintsInfoFUCHSIA & operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferCollectionConstraintsInfoFUCHSIA & operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCount( uint32_t minBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCount = minBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMaxBufferCount( uint32_t maxBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + maxBufferCount = maxBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCountForCamping = minBufferCountForCamping_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & + setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & + setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCountForSharedSlack = minBufferCountForSharedSlack_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferCollectionConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minBufferCount, maxBufferCount, minBufferCountForCamping, minBufferCountForDedicatedSlack, minBufferCountForSharedSlack ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minBufferCount == rhs.minBufferCount ) && ( maxBufferCount == rhs.maxBufferCount ) && + ( minBufferCountForCamping == rhs.minBufferCountForCamping ) && ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack ) && + ( minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack ); +# endif + } + + bool operator!=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA; + const void * pNext = {}; + uint32_t minBufferCount = {}; + uint32_t maxBufferCount = {}; + uint32_t minBufferCountForCamping = {}; + uint32_t minBufferCountForDedicatedSlack = {}; + uint32_t minBufferCountForSharedSlack = {}; + }; + + template <> + struct CppType + { + using Type = BufferCollectionConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkBufferCollectionCreateInfoFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionCreateInfoFUCHSIA.html + struct BufferCollectionCreateInfoFUCHSIA + { + using NativeType = VkBufferCollectionCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( zx_handle_t collectionToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , collectionToken{ collectionToken_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionCreateInfoFUCHSIA( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferCollectionCreateInfoFUCHSIA & operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setCollectionToken( zx_handle_t collectionToken_ ) VULKAN_HPP_NOEXCEPT + { + collectionToken = collectionToken_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferCollectionCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collectionToken ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionCreateInfoFUCHSIA; + const void * pNext = {}; + zx_handle_t collectionToken = {}; + }; + + template <> + struct CppType + { + using Type = BufferCollectionCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkBufferCollectionImageCreateInfoFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionImageCreateInfoFUCHSIA.html + struct BufferCollectionImageCreateInfoFUCHSIA + { + using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, + uint32_t index_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , collection{ collection_ } + , index{ index_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionImageCreateInfoFUCHSIA( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionImageCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferCollectionImageCreateInfoFUCHSIA & operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferCollectionImageCreateInfoFUCHSIA & operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & + setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + { + collection = collection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionImageCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferCollectionImageCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collection, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionImageCreateInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); +# endif + } + + bool operator!=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; + uint32_t index = {}; + }; + + template <> + struct CppType + { + using Type = BufferCollectionImageCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkSysmemColorSpaceFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSysmemColorSpaceFUCHSIA.html + struct SysmemColorSpaceFUCHSIA + { + using NativeType = VkSysmemColorSpaceFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( uint32_t colorSpace_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , colorSpace{ colorSpace_ } + { + } + + VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SysmemColorSpaceFUCHSIA( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : SysmemColorSpaceFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SysmemColorSpaceFUCHSIA & operator=( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setColorSpace( uint32_t colorSpace_ ) VULKAN_HPP_NOEXCEPT + { + colorSpace = colorSpace_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSysmemColorSpaceFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSysmemColorSpaceFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorSpace ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SysmemColorSpaceFUCHSIA const & ) const = default; +# else + bool operator==( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorSpace == rhs.colorSpace ); +# endif + } + + bool operator!=( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSysmemColorSpaceFUCHSIA; + const void * pNext = {}; + uint32_t colorSpace = {}; + }; + + template <> + struct CppType + { + using Type = SysmemColorSpaceFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkBufferCollectionPropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionPropertiesFUCHSIA.html + struct BufferCollectionPropertiesFUCHSIA + { + using NativeType = VkBufferCollectionPropertiesFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionPropertiesFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( + uint32_t memoryTypeBits_ = {}, + uint32_t bufferCount_ = {}, + uint32_t createInfoIndex_ = {}, + uint64_t sysmemPixelFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + , bufferCount{ bufferCount_ } + , createInfoIndex{ createInfoIndex_ } + , sysmemPixelFormat{ sysmemPixelFormat_ } + , formatFeatures{ formatFeatures_ } + , sysmemColorSpaceIndex{ sysmemColorSpaceIndex_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionPropertiesFUCHSIA( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionPropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferCollectionPropertiesFUCHSIA & operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionPropertiesFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferCollectionPropertiesFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + memoryTypeBits, + bufferCount, + createInfoIndex, + sysmemPixelFormat, + formatFeatures, + sysmemColorSpaceIndex, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionPropertiesFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ) && ( bufferCount == rhs.bufferCount ) && + ( createInfoIndex == rhs.createInfoIndex ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( formatFeatures == rhs.formatFeatures ) && + ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + uint32_t bufferCount = {}; + uint32_t createInfoIndex = {}; + uint64_t sysmemPixelFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = BufferCollectionPropertiesFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + // wrapper struct for struct VkBufferCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCreateInfo.html + struct BufferCreateInfo + { + using NativeType = VkBufferCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , size{ size_ } + , usage{ usage_ } + , sharingMode{ sharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::DeviceSize size_, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCreateInfo const & ) const = default; +#else + bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && ( usage == rhs.usage ) && + ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); +# endif + } + + bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + }; + + template <> + struct CppType + { + using Type = BufferCreateInfo; + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkBufferConstraintsInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferConstraintsInfoFUCHSIA.html + struct BufferConstraintsInfoFUCHSIA + { + using NativeType = VkBufferConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , createInfo{ createInfo_ } + , requiredFormatFeatures{ requiredFormatFeatures_ } + , bufferCollectionConstraints{ bufferCollectionConstraints_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferConstraintsInfoFUCHSIA( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferConstraintsInfoFUCHSIA & operator=( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo_ ) VULKAN_HPP_NOEXCEPT + { + createInfo = createInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & + setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT + { + requiredFormatFeatures = requiredFormatFeatures_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & + setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT + { + bufferCollectionConstraints = bufferCollectionConstraints_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, createInfo, requiredFormatFeatures, bufferCollectionConstraints ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createInfo == rhs.createInfo ) && ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && + ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ); +# endif + } + + bool operator!=( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; + }; + + template <> + struct CppType + { + using Type = BufferConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + // wrapper struct for struct VkBufferCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCopy.html + struct BufferCopy + { + using NativeType = VkBufferCopy; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcOffset{ srcOffset_ } + , dstOffset{ dstOffset_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy( *reinterpret_cast( &rhs ) ) {} + + BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferCopy *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcOffset, dstOffset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy const & ) const = default; +#else + bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + // wrapper struct for struct VkBufferCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCopy2.html + struct BufferCopy2 + { + using NativeType = VkBufferCopy2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy2( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcOffset{ srcOffset_ } + , dstOffset{ dstOffset_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy2( *reinterpret_cast( &rhs ) ) {} + + BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferCopy2 & operator=( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferCopy2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcOffset, dstOffset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy2 const & ) const = default; +#else + bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = BufferCopy2; + }; + + using BufferCopy2KHR = BufferCopy2; + + // wrapper struct for struct VkBufferDeviceAddressCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferDeviceAddressCreateInfoEXT.html + struct BufferDeviceAddressCreateInfoEXT + { + using NativeType = VkBufferDeviceAddressCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceAddress{ deviceAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default; +#else + bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ); +# endif + } + + bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + + template <> + struct CppType + { + using Type = BufferDeviceAddressCreateInfoEXT; + }; + + // wrapper struct for struct VkBufferDeviceAddressInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferDeviceAddressInfo.html + struct BufferDeviceAddressInfo + { + using NativeType = VkBufferDeviceAddressInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferDeviceAddressInfo( *reinterpret_cast( &rhs ) ) + { + } + + BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressInfo const & ) const = default; +#else + bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = BufferDeviceAddressInfo; + }; + + using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; + using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; + + // wrapper struct for struct VkBufferImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferImageCopy.html + struct BufferImageCopy + { + using NativeType = VkBufferImageCopy; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferOffset{ bufferOffset_ } + , bufferRowLength{ bufferRowLength_ } + , bufferImageHeight{ bufferImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy( *reinterpret_cast( &rhs ) ) {} + + BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferImageCopy *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy const & ) const = default; +#else + bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && + ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + // wrapper struct for struct VkBufferImageCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferImageCopy2.html + struct BufferImageCopy2 + { + using NativeType = VkBufferImageCopy2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy2( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , bufferOffset{ bufferOffset_ } + , bufferRowLength{ bufferRowLength_ } + , bufferImageHeight{ bufferImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy2( *reinterpret_cast( &rhs ) ) {} + + BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferImageCopy2 & operator=( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferImageCopy2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy2 const & ) const = default; +#else + bool operator==( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && + ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + template <> + struct CppType + { + using Type = BufferImageCopy2; + }; + + using BufferImageCopy2KHR = BufferImageCopy2; + + // wrapper struct for struct VkBufferMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryBarrier.html + struct BufferMemoryBarrier + { + using NativeType = VkBufferMemoryBarrier; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier( *reinterpret_cast( &rhs ) ) + { + } + + BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier const & ) const = default; +#else + bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && + ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = BufferMemoryBarrier; + }; + + // wrapper struct for struct VkBufferMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryBarrier2.html + struct BufferMemoryBarrier2 + { + using NativeType = VkBufferMemoryBarrier2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcStageMask{ srcStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstStageMask{ dstStageMask_ } + , dstAccessMask{ dstAccessMask_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier2( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryBarrier2( *reinterpret_cast( &rhs ) ) + { + } + + BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferMemoryBarrier2 & operator=( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier2 const & ) const = default; +#else + bool operator==( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = BufferMemoryBarrier2; + }; + + using BufferMemoryBarrier2KHR = BufferMemoryBarrier2; + + // wrapper struct for struct VkBufferMemoryRequirementsInfo2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryRequirementsInfo2.html + struct BufferMemoryRequirementsInfo2 + { + using NativeType = VkBufferMemoryRequirementsInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + { + } + + BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = BufferMemoryRequirementsInfo2; + }; + + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + + // wrapper struct for struct VkBufferOpaqueCaptureAddressCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferOpaqueCaptureAddressCreateInfo.html + struct BufferOpaqueCaptureAddressCreateInfo + { + using NativeType = VkBufferOpaqueCaptureAddressCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , opaqueCaptureAddress{ opaqueCaptureAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureAddress = opaqueCaptureAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferOpaqueCaptureAddressCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferOpaqueCaptureAddressCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opaqueCaptureAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default; +#else + bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); +# endif + } + + bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; + }; + + template <> + struct CppType + { + using Type = BufferOpaqueCaptureAddressCreateInfo; + }; + + using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; + + // wrapper struct for struct VkBufferUsageFlags2CreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferUsageFlags2CreateInfo.html + struct BufferUsageFlags2CreateInfo + { + using NativeType = VkBufferUsageFlags2CreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferUsageFlags2CreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , usage{ usage_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferUsageFlags2CreateInfo( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferUsageFlags2CreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + BufferUsageFlags2CreateInfo & operator=( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferUsageFlags2CreateInfo & operator=( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferUsageFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferUsageFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferUsageFlags2CreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferUsageFlags2CreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferUsageFlags2CreateInfo const & ) const = default; +#else + bool operator==( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferUsageFlags2CreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage = {}; + }; + + template <> + struct CppType + { + using Type = BufferUsageFlags2CreateInfo; + }; + + using BufferUsageFlags2CreateInfoKHR = BufferUsageFlags2CreateInfo; + + // wrapper struct for struct VkBufferViewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferViewCreateInfo.html + struct BufferViewCreateInfo + { + using NativeType = VkBufferViewCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , buffer{ buffer_ } + , format{ format_ } + , offset{ offset_ } + , range{ range_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferViewCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferViewCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBufferViewCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, buffer, format, offset, range ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferViewCreateInfo const & ) const = default; +#else + bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && ( format == rhs.format ) && + ( offset == rhs.offset ) && ( range == rhs.range ); +# endif + } + + bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + + template <> + struct CppType + { + using Type = BufferViewCreateInfo; + }; + + // wrapper struct for struct VkStridedDeviceAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStridedDeviceAddressNV.html + struct StridedDeviceAddressNV + { + using NativeType = VkStridedDeviceAddressNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StridedDeviceAddressNV( VULKAN_HPP_NAMESPACE::DeviceAddress startAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize strideInBytes_ = {} ) VULKAN_HPP_NOEXCEPT + : startAddress{ startAddress_ } + , strideInBytes{ strideInBytes_ } + { + } + + VULKAN_HPP_CONSTEXPR StridedDeviceAddressNV( StridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StridedDeviceAddressNV( VkStridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT + : StridedDeviceAddressNV( *reinterpret_cast( &rhs ) ) + { + } + + StridedDeviceAddressNV & operator=( StridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + StridedDeviceAddressNV & operator=( VkStridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV & setStartAddress( VULKAN_HPP_NAMESPACE::DeviceAddress startAddress_ ) VULKAN_HPP_NOEXCEPT + { + startAddress = startAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV & setStrideInBytes( VULKAN_HPP_NAMESPACE::DeviceSize strideInBytes_ ) VULKAN_HPP_NOEXCEPT + { + strideInBytes = strideInBytes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkStridedDeviceAddressNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStridedDeviceAddressNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStridedDeviceAddressNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkStridedDeviceAddressNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( startAddress, strideInBytes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StridedDeviceAddressNV const & ) const = default; +#else + bool operator==( StridedDeviceAddressNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( startAddress == rhs.startAddress ) && ( strideInBytes == rhs.strideInBytes ); +# endif + } + + bool operator!=( StridedDeviceAddressNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress startAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize strideInBytes = {}; + }; + + // wrapper struct for struct VkBuildPartitionedAccelerationStructureIndirectCommandNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildPartitionedAccelerationStructureIndirectCommandNV.html + struct BuildPartitionedAccelerationStructureIndirectCommandNV + { + using NativeType = VkBuildPartitionedAccelerationStructureIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureIndirectCommandNV( + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV opType_ = VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV::eWriteInstance, + uint32_t argCount_ = {}, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV argData_ = {} ) VULKAN_HPP_NOEXCEPT + : opType{ opType_ } + , argCount{ argCount_ } + , argData{ argData_ } + { + } + + VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureIndirectCommandNV( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + BuildPartitionedAccelerationStructureIndirectCommandNV( VkBuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BuildPartitionedAccelerationStructureIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BuildPartitionedAccelerationStructureIndirectCommandNV & + operator=( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BuildPartitionedAccelerationStructureIndirectCommandNV & + operator=( VkBuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & + setOpType( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV opType_ ) VULKAN_HPP_NOEXCEPT + { + opType = opType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & setArgCount( uint32_t argCount_ ) VULKAN_HPP_NOEXCEPT + { + argCount = argCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & + setArgData( VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const & argData_ ) VULKAN_HPP_NOEXCEPT + { + argData = argData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBuildPartitionedAccelerationStructureIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBuildPartitionedAccelerationStructureIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBuildPartitionedAccelerationStructureIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBuildPartitionedAccelerationStructureIndirectCommandNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( opType, argCount, argData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BuildPartitionedAccelerationStructureIndirectCommandNV const & ) const = default; +#else + bool operator==( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( opType == rhs.opType ) && ( argCount == rhs.argCount ) && ( argData == rhs.argData ); +# endif + } + + bool operator!=( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV opType = VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV::eWriteInstance; + uint32_t argCount = {}; + VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV argData = {}; + }; + + // wrapper struct for struct VkPartitionedAccelerationStructureInstancesInputNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureInstancesInputNV.html + struct PartitionedAccelerationStructureInstancesInputNV + { + using NativeType = VkPartitionedAccelerationStructureInstancesInputNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePartitionedAccelerationStructureInstancesInputNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureInstancesInputNV( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + uint32_t instanceCount_ = {}, + uint32_t maxInstancePerPartitionCount_ = {}, + uint32_t partitionCount_ = {}, + uint32_t maxInstanceInGlobalPartitionCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , instanceCount{ instanceCount_ } + , maxInstancePerPartitionCount{ maxInstancePerPartitionCount_ } + , partitionCount{ partitionCount_ } + , maxInstanceInGlobalPartitionCount{ maxInstanceInGlobalPartitionCount_ } + { + } + + VULKAN_HPP_CONSTEXPR + PartitionedAccelerationStructureInstancesInputNV( PartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PartitionedAccelerationStructureInstancesInputNV( VkPartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PartitionedAccelerationStructureInstancesInputNV( *reinterpret_cast( &rhs ) ) + { + } + + PartitionedAccelerationStructureInstancesInputNV & operator=( PartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PartitionedAccelerationStructureInstancesInputNV & operator=( VkPartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & + setMaxInstancePerPartitionCount( uint32_t maxInstancePerPartitionCount_ ) VULKAN_HPP_NOEXCEPT + { + maxInstancePerPartitionCount = maxInstancePerPartitionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setPartitionCount( uint32_t partitionCount_ ) VULKAN_HPP_NOEXCEPT + { + partitionCount = partitionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & + setMaxInstanceInGlobalPartitionCount( uint32_t maxInstanceInGlobalPartitionCount_ ) VULKAN_HPP_NOEXCEPT + { + maxInstanceInGlobalPartitionCount = maxInstanceInGlobalPartitionCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPartitionedAccelerationStructureInstancesInputNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureInstancesInputNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureInstancesInputNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureInstancesInputNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, instanceCount, maxInstancePerPartitionCount, partitionCount, maxInstanceInGlobalPartitionCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PartitionedAccelerationStructureInstancesInputNV const & ) const = default; +#else + bool operator==( PartitionedAccelerationStructureInstancesInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( instanceCount == rhs.instanceCount ) && + ( maxInstancePerPartitionCount == rhs.maxInstancePerPartitionCount ) && ( partitionCount == rhs.partitionCount ) && + ( maxInstanceInGlobalPartitionCount == rhs.maxInstanceInGlobalPartitionCount ); +# endif + } + + bool operator!=( PartitionedAccelerationStructureInstancesInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePartitionedAccelerationStructureInstancesInputNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + uint32_t instanceCount = {}; + uint32_t maxInstancePerPartitionCount = {}; + uint32_t partitionCount = {}; + uint32_t maxInstanceInGlobalPartitionCount = {}; + }; + + template <> + struct CppType + { + using Type = PartitionedAccelerationStructureInstancesInputNV; + }; + + // wrapper struct for struct VkBuildPartitionedAccelerationStructureInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildPartitionedAccelerationStructureInfoNV.html + struct BuildPartitionedAccelerationStructureInfoNV + { + using NativeType = VkBuildPartitionedAccelerationStructureInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBuildPartitionedAccelerationStructureInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV input_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructureData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstAccelerationStructureData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfos_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , input{ input_ } + , srcAccelerationStructureData{ srcAccelerationStructureData_ } + , dstAccelerationStructureData{ dstAccelerationStructureData_ } + , scratchData{ scratchData_ } + , srcInfos{ srcInfos_ } + , srcInfosCount{ srcInfosCount_ } + { + } + + VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureInfoNV( BuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BuildPartitionedAccelerationStructureInfoNV( VkBuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BuildPartitionedAccelerationStructureInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + BuildPartitionedAccelerationStructureInfoNV & operator=( BuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BuildPartitionedAccelerationStructureInfoNV & operator=( VkBuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & + setInput( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV const & input_ ) VULKAN_HPP_NOEXCEPT + { + input = input_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & + setSrcAccelerationStructureData( VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructureData_ ) VULKAN_HPP_NOEXCEPT + { + srcAccelerationStructureData = srcAccelerationStructureData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & + setDstAccelerationStructureData( VULKAN_HPP_NAMESPACE::DeviceAddress dstAccelerationStructureData_ ) VULKAN_HPP_NOEXCEPT + { + dstAccelerationStructureData = dstAccelerationStructureData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setScratchData( VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setSrcInfos( VULKAN_HPP_NAMESPACE::DeviceAddress srcInfos_ ) VULKAN_HPP_NOEXCEPT + { + srcInfos = srcInfos_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & + setSrcInfosCount( VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ ) VULKAN_HPP_NOEXCEPT + { + srcInfosCount = srcInfosCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBuildPartitionedAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBuildPartitionedAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBuildPartitionedAccelerationStructureInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkBuildPartitionedAccelerationStructureInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, input, srcAccelerationStructureData, dstAccelerationStructureData, scratchData, srcInfos, srcInfosCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BuildPartitionedAccelerationStructureInfoNV const & ) const = default; +#else + bool operator==( BuildPartitionedAccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( input == rhs.input ) && + ( srcAccelerationStructureData == rhs.srcAccelerationStructureData ) && ( dstAccelerationStructureData == rhs.dstAccelerationStructureData ) && + ( scratchData == rhs.scratchData ) && ( srcInfos == rhs.srcInfos ) && ( srcInfosCount == rhs.srcInfosCount ); +# endif + } + + bool operator!=( BuildPartitionedAccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBuildPartitionedAccelerationStructureInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV input = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructureData = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstAccelerationStructureData = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress scratchData = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfos = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount = {}; + }; + + template <> + struct CppType + { + using Type = BuildPartitionedAccelerationStructureInfoNV; + }; + + // wrapper struct for struct VkCalibratedTimestampInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCalibratedTimestampInfoKHR.html + struct CalibratedTimestampInfoKHR + { + using NativeType = VkCalibratedTimestampInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainKHR::eDevice, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , timeDomain{ timeDomain_ } + { + } + + VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CalibratedTimestampInfoKHR( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CalibratedTimestampInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + CalibratedTimestampInfoKHR & operator=( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CalibratedTimestampInfoKHR & operator=( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain_ ) VULKAN_HPP_NOEXCEPT + { + timeDomain = timeDomain_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCalibratedTimestampInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCalibratedTimestampInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCalibratedTimestampInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCalibratedTimestampInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, timeDomain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CalibratedTimestampInfoKHR const & ) const = default; +#else + bool operator==( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain ); +# endif + } + + bool operator!=( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainKHR::eDevice; + }; + + template <> + struct CppType + { + using Type = CalibratedTimestampInfoKHR; + }; + + using CalibratedTimestampInfoEXT = CalibratedTimestampInfoKHR; + + // wrapper struct for struct VkCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCheckpointData2NV.html + struct CheckpointData2NV + { + using NativeType = VkCheckpointData2NV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stage{ stage_ } + , pCheckpointMarker{ pCheckpointMarker_ } + { + } + + VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointData2NV( *reinterpret_cast( &rhs ) ) {} + + CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCheckpointData2NV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCheckpointData2NV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stage, pCheckpointMarker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointData2NV const & ) const = default; +#else + bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker ); +# endif + } + + bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {}; + void * pCheckpointMarker = {}; + }; + + template <> + struct CppType + { + using Type = CheckpointData2NV; + }; + + // wrapper struct for struct VkCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCheckpointDataNV.html + struct CheckpointDataNV + { + using NativeType = VkCheckpointDataNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, + void * pCheckpointMarker_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stage{ stage_ } + , pCheckpointMarker{ pCheckpointMarker_ } + { + } + + VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointDataNV( *reinterpret_cast( &rhs ) ) {} + + CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCheckpointDataNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCheckpointDataNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stage, pCheckpointMarker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointDataNV const & ) const = default; +#else + bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker ); +# endif + } + + bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; + void * pCheckpointMarker = {}; + }; + + template <> + struct CppType + { + using Type = CheckpointDataNV; + }; + + union ClearColorValue + { + using NativeType = VkClearColorValue; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & float32_ = {} ) : float32( float32_ ) {} + + VULKAN_HPP_CONSTEXPR ClearColorValue( float float32_0, float float32_1, float float32_2, float float32_3 ) + : float32{ { { float32_0, float32_1, float32_2, float32_3 } } } + { + } + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & int32_ ) : int32( int32_ ) {} + + VULKAN_HPP_CONSTEXPR ClearColorValue( int32_t int32_0, int32_t int32_1, int32_t int32_2, int32_t int32_3 ) + : int32{ { { int32_0, int32_1, int32_2, int32_3 } } } + { + } + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & uint32_ ) : uint32( uint32_ ) {} + + VULKAN_HPP_CONSTEXPR ClearColorValue( uint32_t uint32_0, uint32_t uint32_1, uint32_t uint32_2, uint32_t uint32_3 ) + : uint32{ { { uint32_0, uint32_1, uint32_2, uint32_3 } } } + { + } +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT + { + float32 = float32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT + { + int32 = int32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT + { + uint32 = uint32_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClearColorValue const &() const + { + return *reinterpret_cast( this ); + } + + operator VkClearColorValue &() + { + return *reinterpret_cast( this ); + } + + VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uint32; + }; + + // wrapper struct for struct VkClearDepthStencilValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearDepthStencilValue.html + struct ClearDepthStencilValue + { + using NativeType = VkClearDepthStencilValue; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT + : depth{ depth_ } + , stencil{ stencil_ } + { + } + + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + : ClearDepthStencilValue( *reinterpret_cast( &rhs ) ) + { + } + + ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT + { + stencil = stencil_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearDepthStencilValue const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClearDepthStencilValue *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( depth, stencil ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearDepthStencilValue const & ) const = default; +#else + bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( depth == rhs.depth ) && ( stencil == rhs.stencil ); +# endif + } + + bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float depth = {}; + uint32_t stencil = {}; + }; + + union ClearValue + { + using NativeType = VkClearValue; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {} + + VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT + { + depthStencil = depthStencil_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClearValue const &() const + { + return *reinterpret_cast( this ); + } + + operator VkClearValue &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::ClearColorValue color; + VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil; +#else + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkClearAttachment, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearAttachment.html + struct ClearAttachment + { + using NativeType = VkClearAttachment; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t colorAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask{ aspectMask_ } + , colorAttachment{ colorAttachment_ } + , clearValue{ clearValue_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT : ClearAttachment( *reinterpret_cast( &rhs ) ) {} + + ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachment = colorAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT + { + clearValue = clearValue_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearAttachment const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClearAttachment *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, colorAttachment, clearValue ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t colorAttachment = {}; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + }; + + // wrapper struct for struct VkClearRect, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearRect.html + struct ClearRect + { + using NativeType = VkClearRect; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : rect{ rect_ } + , baseArrayLayer{ baseArrayLayer_ } + , layerCount{ layerCount_ } + { + } + + VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast( &rhs ) ) {} + + ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT + { + rect = rect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearRect &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearRect const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClearRect *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( rect, baseArrayLayer, layerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearRect const & ) const = default; +#else + bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); +# endif + } + + bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Rect2D rect = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + + // wrapper struct for struct VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV.html + struct ClusterAccelerationStructureBuildClustersBottomLevelInfoNV + { + using NativeType = VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( uint32_t clusterReferencesCount_ = {}, + uint32_t clusterReferencesStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress clusterReferences_ = {} ) VULKAN_HPP_NOEXCEPT + : clusterReferencesCount{ clusterReferencesCount_ } + , clusterReferencesStride{ clusterReferencesStride_ } + , clusterReferences{ clusterReferences_ } + { + } + + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( + *reinterpret_cast( &rhs ) ) + { + } + + ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & + operator=( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & + operator=( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & + setClusterReferencesCount( uint32_t clusterReferencesCount_ ) VULKAN_HPP_NOEXCEPT + { + clusterReferencesCount = clusterReferencesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & + setClusterReferencesStride( uint32_t clusterReferencesStride_ ) VULKAN_HPP_NOEXCEPT + { + clusterReferencesStride = clusterReferencesStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & + setClusterReferences( VULKAN_HPP_NAMESPACE::DeviceAddress clusterReferences_ ) VULKAN_HPP_NOEXCEPT + { + clusterReferences = clusterReferences_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( clusterReferencesCount, clusterReferencesStride, clusterReferences ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( clusterReferencesCount == rhs.clusterReferencesCount ) && ( clusterReferencesStride == rhs.clusterReferencesStride ) && + ( clusterReferences == rhs.clusterReferences ); +# endif + } + + bool operator!=( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t clusterReferencesCount = {}; + uint32_t clusterReferencesStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress clusterReferences = {}; + }; + + // wrapper struct for struct VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV.html + struct ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV + { + using NativeType = VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( uint32_t geometryIndex_ = {}, + uint32_t reserved_ = {}, + uint32_t geometryFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : geometryIndex{ geometryIndex_ } + , reserved{ reserved_ } + , geometryFlags{ geometryFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( + *reinterpret_cast( &rhs ) ) + { + } + + ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & + operator=( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & + operator=( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setGeometryIndex( uint32_t geometryIndex_ ) VULKAN_HPP_NOEXCEPT + { + geometryIndex = geometryIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setReserved( uint32_t reserved_ ) VULKAN_HPP_NOEXCEPT + { + reserved = reserved_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setGeometryFlags( uint32_t geometryFlags_ ) VULKAN_HPP_NOEXCEPT + { + geometryFlags = geometryFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( geometryIndex, reserved, geometryFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( geometryIndex == rhs.geometryIndex ) && ( reserved == rhs.reserved ) && ( geometryFlags == rhs.geometryFlags ); +# endif + } + + bool operator!=( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t geometryIndex : 24; + uint32_t reserved : 5; + uint32_t geometryFlags : 3; + }; + + // wrapper struct for struct VkClusterAccelerationStructureBuildTriangleClusterInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildTriangleClusterInfoNV.html + struct ClusterAccelerationStructureBuildTriangleClusterInfoNV + { + using NativeType = VkClusterAccelerationStructureBuildTriangleClusterInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterInfoNV( + uint32_t clusterID_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ = {}, + uint32_t triangleCount_ = {}, + uint32_t vertexCount_ = {}, + uint32_t positionTruncateBitCount_ = {}, + uint32_t indexType_ = {}, + uint32_t opacityMicromapIndexType_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags_ = {}, + uint16_t indexBufferStride_ = {}, + uint16_t vertexBufferStride_ = {}, + uint16_t geometryIndexAndFlagsBufferStride_ = {}, + uint16_t opacityMicromapIndexBufferStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ = {} ) VULKAN_HPP_NOEXCEPT + : clusterID{ clusterID_ } + , clusterFlags{ clusterFlags_ } + , triangleCount{ triangleCount_ } + , vertexCount{ vertexCount_ } + , positionTruncateBitCount{ positionTruncateBitCount_ } + , indexType{ indexType_ } + , opacityMicromapIndexType{ opacityMicromapIndexType_ } + , baseGeometryIndexAndGeometryFlags{ baseGeometryIndexAndGeometryFlags_ } + , indexBufferStride{ indexBufferStride_ } + , vertexBufferStride{ vertexBufferStride_ } + , geometryIndexAndFlagsBufferStride{ geometryIndexAndFlagsBufferStride_ } + , opacityMicromapIndexBufferStride{ opacityMicromapIndexBufferStride_ } + , indexBuffer{ indexBuffer_ } + , vertexBuffer{ vertexBuffer_ } + , geometryIndexAndFlagsBuffer{ geometryIndexAndFlagsBuffer_ } + , opacityMicromapArray{ opacityMicromapArray_ } + , opacityMicromapIndexBuffer{ opacityMicromapIndexBuffer_ } + { + } + + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterInfoNV( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureBuildTriangleClusterInfoNV( VkClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureBuildTriangleClusterInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ClusterAccelerationStructureBuildTriangleClusterInfoNV & + operator=( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureBuildTriangleClusterInfoNV & + operator=( VkClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setClusterID( uint32_t clusterID_ ) VULKAN_HPP_NOEXCEPT + { + clusterID = clusterID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setClusterFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) VULKAN_HPP_NOEXCEPT + { + clusterFlags = clusterFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setTriangleCount( uint32_t triangleCount_ ) VULKAN_HPP_NOEXCEPT + { + triangleCount = triangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT + { + positionTruncateBitCount = positionTruncateBitCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexType( uint32_t indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) VULKAN_HPP_NOEXCEPT + { + opacityMicromapIndexType = opacityMicromapIndexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setBaseGeometryIndexAndGeometryFlags( + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) VULKAN_HPP_NOEXCEPT + { + baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexBufferStride( uint16_t indexBufferStride_ ) VULKAN_HPP_NOEXCEPT + { + indexBufferStride = indexBufferStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexBufferStride( uint16_t vertexBufferStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexBufferStride = vertexBufferStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) VULKAN_HPP_NOEXCEPT + { + geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) VULKAN_HPP_NOEXCEPT + { + opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + indexBuffer = indexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setVertexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + vertexBuffer = vertexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setGeometryIndexAndFlagsBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ ) VULKAN_HPP_NOEXCEPT + { + geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setOpacityMicromapArray( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ ) VULKAN_HPP_NOEXCEPT + { + opacityMicromapArray = opacityMicromapArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setOpacityMicromapIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( clusterID, + clusterFlags, + triangleCount, + vertexCount, + positionTruncateBitCount, + indexType, + opacityMicromapIndexType, + baseGeometryIndexAndGeometryFlags, + indexBufferStride, + vertexBufferStride, + geometryIndexAndFlagsBufferStride, + opacityMicromapIndexBufferStride, + indexBuffer, + vertexBuffer, + geometryIndexAndFlagsBuffer, + opacityMicromapArray, + opacityMicromapIndexBuffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( clusterID == rhs.clusterID ) && ( clusterFlags == rhs.clusterFlags ) && ( triangleCount == rhs.triangleCount ) && + ( vertexCount == rhs.vertexCount ) && ( positionTruncateBitCount == rhs.positionTruncateBitCount ) && ( indexType == rhs.indexType ) && + ( opacityMicromapIndexType == rhs.opacityMicromapIndexType ) && ( baseGeometryIndexAndGeometryFlags == rhs.baseGeometryIndexAndGeometryFlags ) && + ( indexBufferStride == rhs.indexBufferStride ) && ( vertexBufferStride == rhs.vertexBufferStride ) && + ( geometryIndexAndFlagsBufferStride == rhs.geometryIndexAndFlagsBufferStride ) && + ( opacityMicromapIndexBufferStride == rhs.opacityMicromapIndexBufferStride ) && ( indexBuffer == rhs.indexBuffer ) && + ( vertexBuffer == rhs.vertexBuffer ) && ( geometryIndexAndFlagsBuffer == rhs.geometryIndexAndFlagsBuffer ) && + ( opacityMicromapArray == rhs.opacityMicromapArray ) && ( opacityMicromapIndexBuffer == rhs.opacityMicromapIndexBuffer ); +# endif + } + + bool operator!=( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t clusterID = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags = {}; + uint32_t triangleCount : 9; + uint32_t vertexCount : 9; + uint32_t positionTruncateBitCount : 6; + uint32_t indexType : 4; + uint32_t opacityMicromapIndexType : 4; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags = {}; + uint16_t indexBufferStride = {}; + uint16_t vertexBufferStride = {}; + uint16_t geometryIndexAndFlagsBufferStride = {}; + uint16_t opacityMicromapIndexBufferStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer = {}; + }; + + // wrapper struct for struct VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.html + struct ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV + { + using NativeType = VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( + uint32_t clusterID_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ = {}, + uint32_t triangleCount_ = {}, + uint32_t vertexCount_ = {}, + uint32_t positionTruncateBitCount_ = {}, + uint32_t indexType_ = {}, + uint32_t opacityMicromapIndexType_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags_ = {}, + uint16_t indexBufferStride_ = {}, + uint16_t vertexBufferStride_ = {}, + uint16_t geometryIndexAndFlagsBufferStride_ = {}, + uint16_t opacityMicromapIndexBufferStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit_ = {} ) VULKAN_HPP_NOEXCEPT + : clusterID{ clusterID_ } + , clusterFlags{ clusterFlags_ } + , triangleCount{ triangleCount_ } + , vertexCount{ vertexCount_ } + , positionTruncateBitCount{ positionTruncateBitCount_ } + , indexType{ indexType_ } + , opacityMicromapIndexType{ opacityMicromapIndexType_ } + , baseGeometryIndexAndGeometryFlags{ baseGeometryIndexAndGeometryFlags_ } + , indexBufferStride{ indexBufferStride_ } + , vertexBufferStride{ vertexBufferStride_ } + , geometryIndexAndFlagsBufferStride{ geometryIndexAndFlagsBufferStride_ } + , opacityMicromapIndexBufferStride{ opacityMicromapIndexBufferStride_ } + , indexBuffer{ indexBuffer_ } + , vertexBuffer{ vertexBuffer_ } + , geometryIndexAndFlagsBuffer{ geometryIndexAndFlagsBuffer_ } + , opacityMicromapArray{ opacityMicromapArray_ } + , opacityMicromapIndexBuffer{ opacityMicromapIndexBuffer_ } + , instantiationBoundingBoxLimit{ instantiationBoundingBoxLimit_ } + { + } + + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( + ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( + *reinterpret_cast( &rhs ) ) + { + } + + explicit ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( + ClusterAccelerationStructureBuildTriangleClusterInfoNV const & clusterAccelerationStructureBuildTriangleClusterInfoNV, + VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit_ = {} ) + : clusterID( clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterID ) + , clusterFlags( clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterFlags ) + , triangleCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.triangleCount ) + , vertexCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexCount ) + , positionTruncateBitCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.positionTruncateBitCount ) + , indexType( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexType ) + , opacityMicromapIndexType( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexType ) + , baseGeometryIndexAndGeometryFlags( clusterAccelerationStructureBuildTriangleClusterInfoNV.baseGeometryIndexAndGeometryFlags ) + , indexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBufferStride ) + , vertexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBufferStride ) + , geometryIndexAndFlagsBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBufferStride ) + , opacityMicromapIndexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBufferStride ) + , indexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBuffer ) + , vertexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBuffer ) + , geometryIndexAndFlagsBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBuffer ) + , opacityMicromapArray( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapArray ) + , opacityMicromapIndexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBuffer ) + , instantiationBoundingBoxLimit( instantiationBoundingBoxLimit_ ) + { + } + + ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + operator=( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + operator=( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setClusterID( uint32_t clusterID_ ) VULKAN_HPP_NOEXCEPT + { + clusterID = clusterID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setClusterFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) VULKAN_HPP_NOEXCEPT + { + clusterFlags = clusterFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setTriangleCount( uint32_t triangleCount_ ) VULKAN_HPP_NOEXCEPT + { + triangleCount = triangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT + { + positionTruncateBitCount = positionTruncateBitCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setIndexType( uint32_t indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) VULKAN_HPP_NOEXCEPT + { + opacityMicromapIndexType = opacityMicromapIndexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setBaseGeometryIndexAndGeometryFlags( + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) VULKAN_HPP_NOEXCEPT + { + baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setIndexBufferStride( uint16_t indexBufferStride_ ) VULKAN_HPP_NOEXCEPT + { + indexBufferStride = indexBufferStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setVertexBufferStride( uint16_t vertexBufferStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexBufferStride = vertexBufferStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) VULKAN_HPP_NOEXCEPT + { + geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) VULKAN_HPP_NOEXCEPT + { + opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + indexBuffer = indexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setVertexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + vertexBuffer = vertexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setGeometryIndexAndFlagsBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ ) VULKAN_HPP_NOEXCEPT + { + geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setOpacityMicromapArray( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ ) VULKAN_HPP_NOEXCEPT + { + opacityMicromapArray = opacityMicromapArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setOpacityMicromapIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setInstantiationBoundingBoxLimit( VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit_ ) VULKAN_HPP_NOEXCEPT + { + instantiationBoundingBoxLimit = instantiationBoundingBoxLimit_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( clusterID, + clusterFlags, + triangleCount, + vertexCount, + positionTruncateBitCount, + indexType, + opacityMicromapIndexType, + baseGeometryIndexAndGeometryFlags, + indexBufferStride, + vertexBufferStride, + geometryIndexAndFlagsBufferStride, + opacityMicromapIndexBufferStride, + indexBuffer, + vertexBuffer, + geometryIndexAndFlagsBuffer, + opacityMicromapArray, + opacityMicromapIndexBuffer, + instantiationBoundingBoxLimit ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( clusterID == rhs.clusterID ) && ( clusterFlags == rhs.clusterFlags ) && ( triangleCount == rhs.triangleCount ) && + ( vertexCount == rhs.vertexCount ) && ( positionTruncateBitCount == rhs.positionTruncateBitCount ) && ( indexType == rhs.indexType ) && + ( opacityMicromapIndexType == rhs.opacityMicromapIndexType ) && ( baseGeometryIndexAndGeometryFlags == rhs.baseGeometryIndexAndGeometryFlags ) && + ( indexBufferStride == rhs.indexBufferStride ) && ( vertexBufferStride == rhs.vertexBufferStride ) && + ( geometryIndexAndFlagsBufferStride == rhs.geometryIndexAndFlagsBufferStride ) && + ( opacityMicromapIndexBufferStride == rhs.opacityMicromapIndexBufferStride ) && ( indexBuffer == rhs.indexBuffer ) && + ( vertexBuffer == rhs.vertexBuffer ) && ( geometryIndexAndFlagsBuffer == rhs.geometryIndexAndFlagsBuffer ) && + ( opacityMicromapArray == rhs.opacityMicromapArray ) && ( opacityMicromapIndexBuffer == rhs.opacityMicromapIndexBuffer ) && + ( instantiationBoundingBoxLimit == rhs.instantiationBoundingBoxLimit ); +# endif + } + + bool operator!=( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t clusterID = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags = {}; + uint32_t triangleCount : 9; + uint32_t vertexCount : 9; + uint32_t positionTruncateBitCount : 6; + uint32_t indexType : 4; + uint32_t opacityMicromapIndexType : 4; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags = {}; + uint16_t indexBufferStride = {}; + uint16_t vertexBufferStride = {}; + uint16_t geometryIndexAndFlagsBufferStride = {}; + uint16_t opacityMicromapIndexBufferStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit = {}; + }; + + // wrapper struct for struct VkClusterAccelerationStructureClustersBottomLevelInputNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureClustersBottomLevelInputNV.html + struct ClusterAccelerationStructureClustersBottomLevelInputNV + { + using NativeType = VkClusterAccelerationStructureClustersBottomLevelInputNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureClustersBottomLevelInputNV( uint32_t maxTotalClusterCount_ = {}, + uint32_t maxClusterCountPerAccelerationStructure_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxTotalClusterCount{ maxTotalClusterCount_ } + , maxClusterCountPerAccelerationStructure{ maxClusterCountPerAccelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureClustersBottomLevelInputNV( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureClustersBottomLevelInputNV( VkClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureClustersBottomLevelInputNV( *reinterpret_cast( &rhs ) ) + { + } + + ClusterAccelerationStructureClustersBottomLevelInputNV & + operator=( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureClustersBottomLevelInputNV & + operator=( VkClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & + setMaxTotalClusterCount( uint32_t maxTotalClusterCount_ ) VULKAN_HPP_NOEXCEPT + { + maxTotalClusterCount = maxTotalClusterCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & + setMaxClusterCountPerAccelerationStructure( uint32_t maxClusterCountPerAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + maxClusterCountPerAccelerationStructure = maxClusterCountPerAccelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureClustersBottomLevelInputNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureClustersBottomLevelInputNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureClustersBottomLevelInputNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureClustersBottomLevelInputNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxTotalClusterCount, maxClusterCountPerAccelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureClustersBottomLevelInputNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTotalClusterCount == rhs.maxTotalClusterCount ) && + ( maxClusterCountPerAccelerationStructure == rhs.maxClusterCountPerAccelerationStructure ); +# endif + } + + bool operator!=( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV; + void * pNext = {}; + uint32_t maxTotalClusterCount = {}; + uint32_t maxClusterCountPerAccelerationStructure = {}; + }; + + template <> + struct CppType + { + using Type = ClusterAccelerationStructureClustersBottomLevelInputNV; + }; + + // wrapper struct for struct VkClusterAccelerationStructureTriangleClusterInputNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureTriangleClusterInputNV.html + struct ClusterAccelerationStructureTriangleClusterInputNV + { + using NativeType = VkClusterAccelerationStructureTriangleClusterInputNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureTriangleClusterInputNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureTriangleClusterInputNV( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t maxGeometryIndexValue_ = {}, + uint32_t maxClusterUniqueGeometryCount_ = {}, + uint32_t maxClusterTriangleCount_ = {}, + uint32_t maxClusterVertexCount_ = {}, + uint32_t maxTotalTriangleCount_ = {}, + uint32_t maxTotalVertexCount_ = {}, + uint32_t minPositionTruncateBitCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexFormat{ vertexFormat_ } + , maxGeometryIndexValue{ maxGeometryIndexValue_ } + , maxClusterUniqueGeometryCount{ maxClusterUniqueGeometryCount_ } + , maxClusterTriangleCount{ maxClusterTriangleCount_ } + , maxClusterVertexCount{ maxClusterVertexCount_ } + , maxTotalTriangleCount{ maxTotalTriangleCount_ } + , maxTotalVertexCount{ maxTotalVertexCount_ } + , minPositionTruncateBitCount{ minPositionTruncateBitCount_ } + { + } + + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureTriangleClusterInputNV( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureTriangleClusterInputNV( VkClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureTriangleClusterInputNV( *reinterpret_cast( &rhs ) ) + { + } + + ClusterAccelerationStructureTriangleClusterInputNV & + operator=( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureTriangleClusterInputNV & operator=( VkClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & + setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxGeometryIndexValue( uint32_t maxGeometryIndexValue_ ) VULKAN_HPP_NOEXCEPT + { + maxGeometryIndexValue = maxGeometryIndexValue_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & + setMaxClusterUniqueGeometryCount( uint32_t maxClusterUniqueGeometryCount_ ) VULKAN_HPP_NOEXCEPT + { + maxClusterUniqueGeometryCount = maxClusterUniqueGeometryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & + setMaxClusterTriangleCount( uint32_t maxClusterTriangleCount_ ) VULKAN_HPP_NOEXCEPT + { + maxClusterTriangleCount = maxClusterTriangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxClusterVertexCount( uint32_t maxClusterVertexCount_ ) VULKAN_HPP_NOEXCEPT + { + maxClusterVertexCount = maxClusterVertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxTotalTriangleCount( uint32_t maxTotalTriangleCount_ ) VULKAN_HPP_NOEXCEPT + { + maxTotalTriangleCount = maxTotalTriangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxTotalVertexCount( uint32_t maxTotalVertexCount_ ) VULKAN_HPP_NOEXCEPT + { + maxTotalVertexCount = maxTotalVertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & + setMinPositionTruncateBitCount( uint32_t minPositionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT + { + minPositionTruncateBitCount = minPositionTruncateBitCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureTriangleClusterInputNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureTriangleClusterInputNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureTriangleClusterInputNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureTriangleClusterInputNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + vertexFormat, + maxGeometryIndexValue, + maxClusterUniqueGeometryCount, + maxClusterTriangleCount, + maxClusterVertexCount, + maxTotalTriangleCount, + maxTotalVertexCount, + minPositionTruncateBitCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureTriangleClusterInputNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexFormat == rhs.vertexFormat ) && + ( maxGeometryIndexValue == rhs.maxGeometryIndexValue ) && ( maxClusterUniqueGeometryCount == rhs.maxClusterUniqueGeometryCount ) && + ( maxClusterTriangleCount == rhs.maxClusterTriangleCount ) && ( maxClusterVertexCount == rhs.maxClusterVertexCount ) && + ( maxTotalTriangleCount == rhs.maxTotalTriangleCount ) && ( maxTotalVertexCount == rhs.maxTotalVertexCount ) && + ( minPositionTruncateBitCount == rhs.minPositionTruncateBitCount ); +# endif + } + + bool operator!=( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureTriangleClusterInputNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t maxGeometryIndexValue = {}; + uint32_t maxClusterUniqueGeometryCount = {}; + uint32_t maxClusterTriangleCount = {}; + uint32_t maxClusterVertexCount = {}; + uint32_t maxTotalTriangleCount = {}; + uint32_t maxTotalVertexCount = {}; + uint32_t minPositionTruncateBitCount = {}; + }; + + template <> + struct CppType + { + using Type = ClusterAccelerationStructureTriangleClusterInputNV; + }; + + // wrapper struct for struct VkClusterAccelerationStructureMoveObjectsInputNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureMoveObjectsInputNV.html + struct ClusterAccelerationStructureMoveObjectsInputNV + { + using NativeType = VkClusterAccelerationStructureMoveObjectsInputNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureMoveObjectsInputNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInputNV( + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV type_ = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV::eClustersBottomLevel, + VULKAN_HPP_NAMESPACE::Bool32 noMoveOverlap_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMovedBytes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , noMoveOverlap{ noMoveOverlap_ } + , maxMovedBytes{ maxMovedBytes_ } + { + } + + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureMoveObjectsInputNV( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureMoveObjectsInputNV( VkClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureMoveObjectsInputNV( *reinterpret_cast( &rhs ) ) + { + } + + ClusterAccelerationStructureMoveObjectsInputNV & operator=( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureMoveObjectsInputNV & operator=( VkClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & + setType( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setNoMoveOverlap( VULKAN_HPP_NAMESPACE::Bool32 noMoveOverlap_ ) VULKAN_HPP_NOEXCEPT + { + noMoveOverlap = noMoveOverlap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & + setMaxMovedBytes( VULKAN_HPP_NAMESPACE::DeviceSize maxMovedBytes_ ) VULKAN_HPP_NOEXCEPT + { + maxMovedBytes = maxMovedBytes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureMoveObjectsInputNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureMoveObjectsInputNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureMoveObjectsInputNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureMoveObjectsInputNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, noMoveOverlap, maxMovedBytes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureMoveObjectsInputNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( noMoveOverlap == rhs.noMoveOverlap ) && + ( maxMovedBytes == rhs.maxMovedBytes ); +# endif + } + + bool operator!=( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureMoveObjectsInputNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV type = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV::eClustersBottomLevel; + VULKAN_HPP_NAMESPACE::Bool32 noMoveOverlap = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMovedBytes = {}; + }; + + template <> + struct CppType + { + using Type = ClusterAccelerationStructureMoveObjectsInputNV; + }; + + union ClusterAccelerationStructureOpInputNV + { + using NativeType = VkClusterAccelerationStructureOpInputNV; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 + ClusterAccelerationStructureOpInputNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ = {} ) + : pClustersBottomLevel( pClustersBottomLevel_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + ClusterAccelerationStructureOpInputNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ ) + : pTriangleClusters( pTriangleClusters_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ ) + : pMoveObjects( pMoveObjects_ ) + { + } +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & + setPClustersBottomLevel( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ ) VULKAN_HPP_NOEXCEPT + { + pClustersBottomLevel = pClustersBottomLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & + setPTriangleClusters( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ ) VULKAN_HPP_NOEXCEPT + { + pTriangleClusters = pTriangleClusters_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & + setPMoveObjects( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ ) VULKAN_HPP_NOEXCEPT + { + pMoveObjects = pMoveObjects_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureOpInputNV const &() const + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureOpInputNV &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects; +#else + VkClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel; + VkClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters; + VkClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkClusterAccelerationStructureInputInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureInputInfoNV.html + struct ClusterAccelerationStructureInputInfoNV + { + using NativeType = VkClusterAccelerationStructureInputInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureInputInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV( + uint32_t maxAccelerationStructureCount_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV opType_ = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV::eMoveObjects, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV opMode_ = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV::eImplicitDestinations, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV opInput_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxAccelerationStructureCount{ maxAccelerationStructureCount_ } + , flags{ flags_ } + , opType{ opType_ } + , opMode{ opMode_ } + , opInput{ opInput_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV( ClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureInputInfoNV( VkClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureInputInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ClusterAccelerationStructureInputInfoNV & operator=( ClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureInputInfoNV & operator=( VkClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & + setMaxAccelerationStructureCount( uint32_t maxAccelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + maxAccelerationStructureCount = maxAccelerationStructureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & + setOpType( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV opType_ ) VULKAN_HPP_NOEXCEPT + { + opType = opType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & + setOpMode( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV opMode_ ) VULKAN_HPP_NOEXCEPT + { + opMode = opMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & + setOpInput( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV const & opInput_ ) VULKAN_HPP_NOEXCEPT + { + opInput = opInput_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureInputInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureInputInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureInputInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureInputInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxAccelerationStructureCount, flags, opType, opMode, opInput ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureInputInfoNV; + void * pNext = {}; + uint32_t maxAccelerationStructureCount = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV opType = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV::eMoveObjects; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV opMode = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV::eImplicitDestinations; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV opInput = {}; + }; + + template <> + struct CppType + { + using Type = ClusterAccelerationStructureInputInfoNV; + }; + + // wrapper struct for struct VkStridedDeviceAddressRegionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkStridedDeviceAddressRegionKHR.html + struct StridedDeviceAddressRegionKHR + { + using NativeType = VkStridedDeviceAddressRegionKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress{ deviceAddress_ } + , stride{ stride_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : StridedDeviceAddressRegionKHR( *reinterpret_cast( &rhs ) ) + { + } + + StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStridedDeviceAddressRegionKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkStridedDeviceAddressRegionKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( deviceAddress, stride, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default; +#else + bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size ); +# endif + } + + bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + // wrapper struct for struct VkClusterAccelerationStructureCommandsInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureCommandsInfoNV.html + struct ClusterAccelerationStructureCommandsInfoNV + { + using NativeType = VkClusterAccelerationStructureCommandsInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureCommandsInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ClusterAccelerationStructureCommandsInfoNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV input_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstImplicitData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ = {}, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstAddressesArray_ = {}, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstSizesArray_ = {}, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR srcInfosArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , input{ input_ } + , dstImplicitData{ dstImplicitData_ } + , scratchData{ scratchData_ } + , dstAddressesArray{ dstAddressesArray_ } + , dstSizesArray{ dstSizesArray_ } + , srcInfosArray{ srcInfosArray_ } + , srcInfosCount{ srcInfosCount_ } + , addressResolutionFlags{ addressResolutionFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV( ClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureCommandsInfoNV( VkClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureCommandsInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ClusterAccelerationStructureCommandsInfoNV & operator=( ClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureCommandsInfoNV & operator=( VkClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setInput( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV const & input_ ) VULKAN_HPP_NOEXCEPT + { + input = input_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setDstImplicitData( VULKAN_HPP_NAMESPACE::DeviceAddress dstImplicitData_ ) VULKAN_HPP_NOEXCEPT + { + dstImplicitData = dstImplicitData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setScratchData( VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setDstAddressesArray( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & dstAddressesArray_ ) VULKAN_HPP_NOEXCEPT + { + dstAddressesArray = dstAddressesArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setDstSizesArray( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & dstSizesArray_ ) VULKAN_HPP_NOEXCEPT + { + dstSizesArray = dstSizesArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setSrcInfosArray( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & srcInfosArray_ ) VULKAN_HPP_NOEXCEPT + { + srcInfosArray = srcInfosArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setSrcInfosCount( VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ ) VULKAN_HPP_NOEXCEPT + { + srcInfosCount = srcInfosCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setAddressResolutionFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ ) VULKAN_HPP_NOEXCEPT + { + addressResolutionFlags = addressResolutionFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureCommandsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureCommandsInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureCommandsInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, input, dstImplicitData, scratchData, dstAddressesArray, dstSizesArray, srcInfosArray, srcInfosCount, addressResolutionFlags ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureCommandsInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV input = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstImplicitData = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress scratchData = {}; + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstAddressesArray = {}; + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstSizesArray = {}; + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR srcInfosArray = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags = {}; + }; + + template <> + struct CppType + { + using Type = ClusterAccelerationStructureCommandsInfoNV; + }; + + // wrapper struct for struct VkClusterAccelerationStructureInstantiateClusterInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureInstantiateClusterInfoNV.html + struct ClusterAccelerationStructureInstantiateClusterInfoNV + { + using NativeType = VkClusterAccelerationStructureInstantiateClusterInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureInstantiateClusterInfoNV( uint32_t clusterIdOffset_ = {}, + uint32_t geometryIndexOffset_ = {}, + uint32_t reserved_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress clusterTemplateAddress_ = {}, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV vertexBuffer_ = {} ) VULKAN_HPP_NOEXCEPT + : clusterIdOffset{ clusterIdOffset_ } + , geometryIndexOffset{ geometryIndexOffset_ } + , reserved{ reserved_ } + , clusterTemplateAddress{ clusterTemplateAddress_ } + , vertexBuffer{ vertexBuffer_ } + { + } + + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureInstantiateClusterInfoNV( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureInstantiateClusterInfoNV( VkClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureInstantiateClusterInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ClusterAccelerationStructureInstantiateClusterInfoNV & + operator=( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureInstantiateClusterInfoNV & operator=( VkClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setClusterIdOffset( uint32_t clusterIdOffset_ ) VULKAN_HPP_NOEXCEPT + { + clusterIdOffset = clusterIdOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setGeometryIndexOffset( uint32_t geometryIndexOffset_ ) VULKAN_HPP_NOEXCEPT + { + geometryIndexOffset = geometryIndexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setReserved( uint32_t reserved_ ) VULKAN_HPP_NOEXCEPT + { + reserved = reserved_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & + setClusterTemplateAddress( VULKAN_HPP_NAMESPACE::DeviceAddress clusterTemplateAddress_ ) VULKAN_HPP_NOEXCEPT + { + clusterTemplateAddress = clusterTemplateAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & + setVertexBuffer( VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const & vertexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + vertexBuffer = vertexBuffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureInstantiateClusterInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureInstantiateClusterInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureInstantiateClusterInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureInstantiateClusterInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( clusterIdOffset, geometryIndexOffset, reserved, clusterTemplateAddress, vertexBuffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureInstantiateClusterInfoNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( clusterIdOffset == rhs.clusterIdOffset ) && ( geometryIndexOffset == rhs.geometryIndexOffset ) && ( reserved == rhs.reserved ) && + ( clusterTemplateAddress == rhs.clusterTemplateAddress ) && ( vertexBuffer == rhs.vertexBuffer ); +# endif + } + + bool operator!=( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t clusterIdOffset = {}; + uint32_t geometryIndexOffset : 24; + uint32_t reserved : 8; + VULKAN_HPP_NAMESPACE::DeviceAddress clusterTemplateAddress = {}; + VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV vertexBuffer = {}; + }; + + // wrapper struct for struct VkClusterAccelerationStructureMoveObjectsInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureMoveObjectsInfoNV.html + struct ClusterAccelerationStructureMoveObjectsInfoNV + { + using NativeType = VkClusterAccelerationStructureMoveObjectsInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInfoNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccelerationStructure{ srcAccelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureMoveObjectsInfoNV( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureMoveObjectsInfoNV( VkClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureMoveObjectsInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ClusterAccelerationStructureMoveObjectsInfoNV & operator=( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClusterAccelerationStructureMoveObjectsInfoNV & operator=( VkClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInfoNV & + setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + srcAccelerationStructure = srcAccelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureMoveObjectsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureMoveObjectsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureMoveObjectsInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureMoveObjectsInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcAccelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureMoveObjectsInfoNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAccelerationStructure == rhs.srcAccelerationStructure ); +# endif + } + + bool operator!=( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructure = {}; + }; + + // wrapper struct for struct VkCoarseSampleLocationNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoarseSampleLocationNV.html + struct CoarseSampleLocationNV + { + using NativeType = VkCoarseSampleLocationNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT + : pixelX{ pixelX_ } + , pixelY{ pixelY_ } + , sample{ sample_ } + { + } + + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleLocationNV( *reinterpret_cast( &rhs ) ) + { + } + + CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT + { + pixelX = pixelX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT + { + pixelY = pixelY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT + { + sample = sample_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleLocationNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCoarseSampleLocationNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pixelX, pixelY, sample ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleLocationNV const & ) const = default; +#else + bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample ); +# endif + } + + bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t pixelX = {}; + uint32_t pixelY = {}; + uint32_t sample = {}; + }; + + // wrapper struct for struct VkCoarseSampleOrderCustomNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoarseSampleOrderCustomNV.html + struct CoarseSampleOrderCustomNV + { + using NativeType = VkCoarseSampleOrderCustomNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, + uint32_t sampleCount_ = {}, + uint32_t sampleLocationCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRate{ shadingRate_ } + , sampleCount{ sampleCount_ } + , sampleLocationCount{ sampleLocationCount_ } + , pSampleLocations{ pSampleLocations_ } + { + } + + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleOrderCustomNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, + uint32_t sampleCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + { + shadingRate = shadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleCount = sampleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationCount = sampleLocationCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pSampleLocations = pSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleOrderCustomNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCoarseSampleOrderCustomNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( shadingRate, sampleCount, sampleLocationCount, pSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default; +#else + bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && ( sampleLocationCount == rhs.sampleLocationCount ) && + ( pSampleLocations == rhs.pSampleLocations ); +# endif + } + + bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; + uint32_t sampleCount = {}; + uint32_t sampleLocationCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {}; + }; + + // wrapper struct for struct VkColorBlendAdvancedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorBlendAdvancedEXT.html + struct ColorBlendAdvancedEXT + { + using NativeType = VkColorBlendAdvancedEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, + VULKAN_HPP_NAMESPACE::Bool32 clampResults_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendOp{ advancedBlendOp_ } + , srcPremultiplied{ srcPremultiplied_ } + , dstPremultiplied{ dstPremultiplied_ } + , blendOverlap{ blendOverlap_ } + , clampResults{ clampResults_ } + { + } + + VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ColorBlendAdvancedEXT( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ColorBlendAdvancedEXT( *reinterpret_cast( &rhs ) ) + { + } + + ColorBlendAdvancedEXT & operator=( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ColorBlendAdvancedEXT & operator=( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setAdvancedBlendOp( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + advancedBlendOp = advancedBlendOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + srcPremultiplied = srcPremultiplied_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + dstPremultiplied = dstPremultiplied_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT + { + blendOverlap = blendOverlap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setClampResults( VULKAN_HPP_NAMESPACE::Bool32 clampResults_ ) VULKAN_HPP_NOEXCEPT + { + clampResults = clampResults_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkColorBlendAdvancedEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkColorBlendAdvancedEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkColorBlendAdvancedEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkColorBlendAdvancedEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( advancedBlendOp, srcPremultiplied, dstPremultiplied, blendOverlap, clampResults ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ColorBlendAdvancedEXT const & ) const = default; +#else + bool operator==( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( advancedBlendOp == rhs.advancedBlendOp ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) && + ( blendOverlap == rhs.blendOverlap ) && ( clampResults == rhs.clampResults ); +# endif + } + + bool operator!=( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + VULKAN_HPP_NAMESPACE::Bool32 clampResults = {}; + }; + + // wrapper struct for struct VkColorBlendEquationEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorBlendEquationEXT.html + struct ColorBlendEquationEXT + { + using NativeType = VkColorBlendEquationEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd ) VULKAN_HPP_NOEXCEPT + : srcColorBlendFactor{ srcColorBlendFactor_ } + , dstColorBlendFactor{ dstColorBlendFactor_ } + , colorBlendOp{ colorBlendOp_ } + , srcAlphaBlendFactor{ srcAlphaBlendFactor_ } + , dstAlphaBlendFactor{ dstAlphaBlendFactor_ } + , alphaBlendOp{ alphaBlendOp_ } + { + } + + VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ColorBlendEquationEXT( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ColorBlendEquationEXT( *reinterpret_cast( &rhs ) ) + { + } + + ColorBlendEquationEXT & operator=( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ColorBlendEquationEXT & operator=( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcColorBlendFactor = srcColorBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstColorBlendFactor = dstColorBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + colorBlendOp = colorBlendOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcAlphaBlendFactor = srcAlphaBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstAlphaBlendFactor = dstAlphaBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + alphaBlendOp = alphaBlendOp_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkColorBlendEquationEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkColorBlendEquationEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkColorBlendEquationEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkColorBlendEquationEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ColorBlendEquationEXT const & ) const = default; +#else + bool operator==( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) && + ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && ( alphaBlendOp == rhs.alphaBlendOp ); +# endif + } + + bool operator!=( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + }; + + // wrapper struct for struct VkCommandBufferAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferAllocateInfo.html + struct CommandBufferAllocateInfo + { + using NativeType = VkCommandBufferAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, + VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, + uint32_t commandBufferCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , commandPool{ commandPool_ } + , level{ level_ } + , commandBufferCount{ commandBufferCount_ } + { + } + + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT + { + commandPool = commandPool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT + { + level = level_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferAllocateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCommandBufferAllocateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, commandPool, level, commandBufferCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferAllocateInfo const & ) const = default; +#else + bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && ( level == rhs.level ) && + ( commandBufferCount == rhs.commandBufferCount ); +# endif + } + + bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; + uint32_t commandBufferCount = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferAllocateInfo; + }; + + // wrapper struct for struct VkCommandBufferInheritanceInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceInfo.html + struct CommandBufferInheritanceInfo + { + using NativeType = VkCommandBufferInheritanceInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , renderPass{ renderPass_ } + , subpass{ subpass_ } + , framebuffer{ framebuffer_ } + , occlusionQueryEnable{ occlusionQueryEnable_ } + , queryFlags{ queryFlags_ } + , pipelineStatistics{ pipelineStatistics_ } + { + } + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceInfo( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryEnable = occlusionQueryEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT + { + queryFlags = queryFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceInfo const & ) const = default; +#else + bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && + ( framebuffer == rhs.framebuffer ) && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) && + ( pipelineStatistics == rhs.pipelineStatistics ); +# endif + } + + bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferInheritanceInfo; + }; + + // wrapper struct for struct VkCommandBufferBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferBeginInfo.html + struct CommandBufferBeginInfo + { + using NativeType = VkCommandBufferBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pInheritanceInfo{ pInheritanceInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & + setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pInheritanceInfo = pInheritanceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferBeginInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCommandBufferBeginInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pInheritanceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferBeginInfo const & ) const = default; +#else + bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pInheritanceInfo == rhs.pInheritanceInfo ); +# endif + } + + bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferBeginInfo; + }; + + // wrapper struct for struct VkCommandBufferInheritanceConditionalRenderingInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceConditionalRenderingInfoEXT.html + struct CommandBufferInheritanceConditionalRenderingInfoEXT + { + using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , conditionalRenderingEnable{ conditionalRenderingEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & + setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRenderingEnable = conditionalRenderingEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, conditionalRenderingEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default; +#else + bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); +# endif + } + + bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; + }; + + // wrapper struct for struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceRenderPassTransformInfoQCOM.html + struct CommandBufferInheritanceRenderPassTransformInfoQCOM + { + using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , transform{ transform_ } + , renderArea{ renderArea_ } + { + } + + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transform, renderArea ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default; +#else + bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && ( renderArea == rhs.renderArea ); +# endif + } + + bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; + }; + + // wrapper struct for struct VkCommandBufferInheritanceRenderingInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceRenderingInfo.html + struct CommandBufferInheritanceRenderingInfo + { + using NativeType = VkCommandBufferInheritanceRenderingInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, + uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentFormats{ pColorAttachmentFormats_ } + , depthAttachmentFormat{ depthAttachmentFormat_ } + , stencilAttachmentFormat{ stencilAttachmentFormat_ } + , rasterizationSamples{ rasterizationSamples_ } + { + } + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceRenderingInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + , rasterizationSamples( rasterizationSamples_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentFormats = pColorAttachmentFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceRenderingInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceRenderingInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default; +#else + bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewMask == rhs.viewMask ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && + ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) && + ( rasterizationSamples == rhs.rasterizationSamples ); +# endif + } + + bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + + template <> + struct CppType + { + using Type = CommandBufferInheritanceRenderingInfo; + }; + + using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo; + + // wrapper struct for struct VkViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewport.html + struct Viewport + { + using NativeType = VkViewport; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Viewport( float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } + , width{ width_ } + , height{ height_ } + , minDepth{ minDepth_ } + , maxDepth{ maxDepth_ } + { + } + + VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast( &rhs ) ) {} + + Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT + { + minDepth = minDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxDepth = maxDepth_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkViewport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewport const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkViewport *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, width, height, minDepth, maxDepth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Viewport const & ) const = default; +#else + bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && ( minDepth == rhs.minDepth ) && + ( maxDepth == rhs.maxDepth ); +# endif + } + + bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + float width = {}; + float height = {}; + float minDepth = {}; + float maxDepth = {}; + }; + + // wrapper struct for struct VkCommandBufferInheritanceViewportScissorInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceViewportScissorInfoNV.html + struct CommandBufferInheritanceViewportScissorInfoNV + { + using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, + uint32_t viewportDepthCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , viewportScissor2D{ viewportScissor2D_ } + , viewportDepthCount{ viewportDepthCount_ } + , pViewportDepths{ pViewportDepths_ } + { + } + + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceViewportScissorInfoNV( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferInheritanceViewportScissorInfoNV & operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandBufferInheritanceViewportScissorInfoNV & operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + viewportScissor2D = viewportScissor2D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportDepthCount = viewportDepthCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT + { + pViewportDepths = pViewportDepths_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceViewportScissorInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceViewportScissorInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default; +#else + bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) && + ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths ); +# endif + } + + bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {}; + uint32_t viewportDepthCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferInheritanceViewportScissorInfoNV; + }; + + // wrapper struct for struct VkCommandBufferSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferSubmitInfo.html + struct CommandBufferSubmitInfo + { + using NativeType = VkCommandBufferSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, + uint32_t deviceMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , commandBuffer{ commandBuffer_ } + , deviceMask{ deviceMask_ } + { + } + + VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT + { + commandBuffer = commandBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferSubmitInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCommandBufferSubmitInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, commandBuffer, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferSubmitInfo const & ) const = default; +#else + bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {}; + uint32_t deviceMask = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferSubmitInfo; + }; + + using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo; + + // wrapper struct for struct VkCommandPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPoolCreateInfo.html + struct CommandPoolCreateInfo + { + using NativeType = VkCommandPoolCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , queueFamilyIndex{ queueFamilyIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandPoolCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCommandPoolCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queueFamilyIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPoolCreateInfo const & ) const = default; +#else + bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ); +# endif + } + + bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + }; + + template <> + struct CppType + { + using Type = CommandPoolCreateInfo; + }; + + // wrapper struct for struct VkSpecializationMapEntry, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSpecializationMapEntry.html + struct SpecializationMapEntry + { + using NativeType = VkSpecializationMapEntry; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : constantID{ constantID_ } + , offset{ offset_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : SpecializationMapEntry( *reinterpret_cast( &rhs ) ) + { + } + + SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT + { + constantID = constantID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSpecializationMapEntry const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSpecializationMapEntry *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( constantID, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationMapEntry const & ) const = default; +#else + bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t constantID = {}; + uint32_t offset = {}; + size_t size = {}; + }; + + // wrapper struct for struct VkSpecializationInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSpecializationInfo.html + struct SpecializationInfo + { + using NativeType = VkSpecializationInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, + size_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : mapEntryCount{ mapEntryCount_ } + , pMapEntries{ pMapEntries_ } + , dataSize{ dataSize_ } + , pData{ pData_ } + { + } + + VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SpecializationInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mapEntries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ = {} ) + : mapEntryCount( static_cast( mapEntries_.size() ) ) + , pMapEntries( mapEntries_.data() ) + , dataSize( data_.size() * sizeof( T ) ) + , pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = mapEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT + { + pMapEntries = pMapEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SpecializationInfo & + setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mapEntries_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = static_cast( mapEntries_.size() ); + pMapEntries = mapEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSpecializationInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSpecializationInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( mapEntryCount, pMapEntries, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationInfo const & ) const = default; +#else + bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t mapEntryCount = {}; + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + + // wrapper struct for struct VkPipelineShaderStageCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageCreateInfo.html + struct PipelineShaderStageCreateInfo + { + using NativeType = VkPipelineShaderStageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, + const char * pName_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stage{ stage_ } + , module{ module_ } + , pName{ pName_ } + , pSpecializationInfo{ pSpecializationInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & + setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineShaderStageCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( module == rhs.module ) && + ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderModule module = {}; + const char * pName = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; + }; + + template <> + struct CppType + { + using Type = PipelineShaderStageCreateInfo; + }; + + // wrapper struct for struct VkComputePipelineCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComputePipelineCreateInfo.html + struct ComputePipelineCreateInfo + { + using NativeType = VkComputePipelineCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stage{ stage_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ComputePipelineCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComputePipelineCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkComputePipelineCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComputePipelineCreateInfo const & ) const = default; +#else + bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = ComputePipelineCreateInfo; + }; + + // wrapper struct for struct VkComputePipelineIndirectBufferInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkComputePipelineIndirectBufferInfoNV.html + struct ComputePipelineIndirectBufferInfoNV + { + using NativeType = VkComputePipelineIndirectBufferInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineIndirectBufferInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceAddress{ deviceAddress_ } + , size{ size_ } + , pipelineDeviceAddressCaptureReplay{ pipelineDeviceAddressCaptureReplay_ } + { + } + + VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComputePipelineIndirectBufferInfoNV( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ComputePipelineIndirectBufferInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ComputePipelineIndirectBufferInfoNV & operator=( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ComputePipelineIndirectBufferInfoNV & operator=( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & + setPipelineDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + pipelineDeviceAddressCaptureReplay = pipelineDeviceAddressCaptureReplay_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkComputePipelineIndirectBufferInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComputePipelineIndirectBufferInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComputePipelineIndirectBufferInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkComputePipelineIndirectBufferInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceAddress, size, pipelineDeviceAddressCaptureReplay ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComputePipelineIndirectBufferInfoNV const & ) const = default; +#else + bool operator==( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ) && + ( pipelineDeviceAddressCaptureReplay == rhs.pipelineDeviceAddressCaptureReplay ); +# endif + } + + bool operator!=( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineIndirectBufferInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay = {}; + }; + + template <> + struct CppType + { + using Type = ComputePipelineIndirectBufferInfoNV; + }; + + // wrapper struct for struct VkConditionalRenderingBeginInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkConditionalRenderingBeginInfoEXT.html + struct ConditionalRenderingBeginInfoEXT + { + using NativeType = VkConditionalRenderingBeginInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } + , offset{ offset_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ConditionalRenderingBeginInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConditionalRenderingBeginInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkConditionalRenderingBeginInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer, offset, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default; +#else + bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; + }; + + template <> + struct CppType + { + using Type = ConditionalRenderingBeginInfoEXT; + }; + + // wrapper struct for struct VkConformanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkConformanceVersion.html + struct ConformanceVersion + { + using NativeType = VkConformanceVersion; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT + : major{ major_ } + , minor{ minor_ } + , subminor{ subminor_ } + , patch{ patch_ } + { + } + + VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT : ConformanceVersion( *reinterpret_cast( &rhs ) ) {} + + ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT + { + major = major_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT + { + minor = minor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT + { + subminor = subminor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT + { + patch = patch_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConformanceVersion const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkConformanceVersion *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( major, minor, subminor, patch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConformanceVersion const & ) const = default; +#else + bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch ); +# endif + } + + bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint8_t major = {}; + uint8_t minor = {}; + uint8_t subminor = {}; + uint8_t patch = {}; + }; + + using ConformanceVersionKHR = ConformanceVersion; + + // wrapper struct for struct VkConvertCooperativeVectorMatrixInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkConvertCooperativeVectorMatrixInfoNV.html + struct ConvertCooperativeVectorMatrixInfoNV + { + using NativeType = VkConvertCooperativeVectorMatrixInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConvertCooperativeVectorMatrixInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV( + size_t srcSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR srcData_ = {}, + size_t * pDstSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dstData_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR srcComponentType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR dstComponentType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + uint32_t numRows_ = {}, + uint32_t numColumns_ = {}, + VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV srcLayout_ = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor, + size_t srcStride_ = {}, + VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV dstLayout_ = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor, + size_t dstStride_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSize{ srcSize_ } + , srcData{ srcData_ } + , pDstSize{ pDstSize_ } + , dstData{ dstData_ } + , srcComponentType{ srcComponentType_ } + , dstComponentType{ dstComponentType_ } + , numRows{ numRows_ } + , numColumns{ numColumns_ } + , srcLayout{ srcLayout_ } + , srcStride{ srcStride_ } + , dstLayout{ dstLayout_ } + , dstStride{ dstStride_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV( ConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConvertCooperativeVectorMatrixInfoNV( VkConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ConvertCooperativeVectorMatrixInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ConvertCooperativeVectorMatrixInfoNV & operator=( ConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ConvertCooperativeVectorMatrixInfoNV & operator=( VkConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcSize( size_t srcSize_ ) VULKAN_HPP_NOEXCEPT + { + srcSize = srcSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setSrcData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & srcData_ ) VULKAN_HPP_NOEXCEPT + { + srcData = srcData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setPDstSize( size_t * pDstSize_ ) VULKAN_HPP_NOEXCEPT + { + pDstSize = pDstSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setDstData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dstData_ ) VULKAN_HPP_NOEXCEPT + { + dstData = dstData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setSrcComponentType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR srcComponentType_ ) VULKAN_HPP_NOEXCEPT + { + srcComponentType = srcComponentType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setDstComponentType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR dstComponentType_ ) VULKAN_HPP_NOEXCEPT + { + dstComponentType = dstComponentType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setNumRows( uint32_t numRows_ ) VULKAN_HPP_NOEXCEPT + { + numRows = numRows_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setNumColumns( uint32_t numColumns_ ) VULKAN_HPP_NOEXCEPT + { + numColumns = numColumns_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setSrcLayout( VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV srcLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcLayout = srcLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcStride( size_t srcStride_ ) VULKAN_HPP_NOEXCEPT + { + srcStride = srcStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setDstLayout( VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV dstLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstLayout = dstLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstStride( size_t dstStride_ ) VULKAN_HPP_NOEXCEPT + { + dstStride = dstStride_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkConvertCooperativeVectorMatrixInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConvertCooperativeVectorMatrixInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConvertCooperativeVectorMatrixInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkConvertCooperativeVectorMatrixInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + srcSize, + srcData, + pDstSize, + dstData, + srcComponentType, + dstComponentType, + numRows, + numColumns, + srcLayout, + srcStride, + dstLayout, + dstStride ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConvertCooperativeVectorMatrixInfoNV; + const void * pNext = {}; + size_t srcSize = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR srcData = {}; + size_t * pDstSize = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dstData = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR srcComponentType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR dstComponentType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + uint32_t numRows = {}; + uint32_t numColumns = {}; + VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV srcLayout = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor; + size_t srcStride = {}; + VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV dstLayout = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor; + size_t dstStride = {}; + }; + + template <> + struct CppType + { + using Type = ConvertCooperativeVectorMatrixInfoNV; + }; + + // wrapper struct for struct VkCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixFlexibleDimensionsPropertiesNV.html + struct CooperativeMatrixFlexibleDimensionsPropertiesNV + { + using NativeType = VkCooperativeMatrixFlexibleDimensionsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t MGranularity_ = {}, + uint32_t NGranularity_ = {}, + uint32_t KGranularity_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ = {}, + VULKAN_HPP_NAMESPACE::ScopeKHR scope_ = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice, + uint32_t workgroupInvocations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , MGranularity{ MGranularity_ } + , NGranularity{ NGranularity_ } + , KGranularity{ KGranularity_ } + , AType{ AType_ } + , BType{ BType_ } + , CType{ CType_ } + , ResultType{ ResultType_ } + , saturatingAccumulation{ saturatingAccumulation_ } + , scope{ scope_ } + , workgroupInvocations{ workgroupInvocations_ } + { + } + + VULKAN_HPP_CONSTEXPR + CooperativeMatrixFlexibleDimensionsPropertiesNV( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixFlexibleDimensionsPropertiesNV( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixFlexibleDimensionsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, MGranularity, NGranularity, KGranularity, AType, BType, CType, ResultType, saturatingAccumulation, scope, workgroupInvocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixFlexibleDimensionsPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MGranularity == rhs.MGranularity ) && ( NGranularity == rhs.NGranularity ) && + ( KGranularity == rhs.KGranularity ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && + ( ResultType == rhs.ResultType ) && ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope ) && + ( workgroupInvocations == rhs.workgroupInvocations ); +# endif + } + + bool operator!=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV; + void * pNext = {}; + uint32_t MGranularity = {}; + uint32_t NGranularity = {}; + uint32_t KGranularity = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation = {}; + VULKAN_HPP_NAMESPACE::ScopeKHR scope = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice; + uint32_t workgroupInvocations = {}; + }; + + template <> + struct CppType + { + using Type = CooperativeMatrixFlexibleDimensionsPropertiesNV; + }; + + // wrapper struct for struct VkCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixPropertiesKHR.html + struct CooperativeMatrixPropertiesKHR + { + using NativeType = VkCooperativeMatrixPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ = {}, + VULKAN_HPP_NAMESPACE::ScopeKHR scope_ = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , MSize{ MSize_ } + , NSize{ NSize_ } + , KSize{ KSize_ } + , AType{ AType_ } + , BType{ BType_ } + , CType{ CType_ } + , ResultType{ ResultType_ } + , saturatingAccumulation{ saturatingAccumulation_ } + , scope{ scope_ } + { + } + + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixPropertiesKHR( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + CooperativeMatrixPropertiesKHR & operator=( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CooperativeMatrixPropertiesKHR & operator=( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCooperativeMatrixPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, ResultType, saturatingAccumulation, scope ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixPropertiesKHR const & ) const = default; +#else + bool operator==( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) && + ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( ResultType == rhs.ResultType ) && + ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope ); +# endif + } + + bool operator!=( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesKHR; + void * pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation = {}; + VULKAN_HPP_NAMESPACE::ScopeKHR scope = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice; + }; + + template <> + struct CppType + { + using Type = CooperativeMatrixPropertiesKHR; + }; + + // wrapper struct for struct VkCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixPropertiesNV.html + struct CooperativeMatrixPropertiesNV + { + using NativeType = VkCooperativeMatrixPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = {}, + VULKAN_HPP_NAMESPACE::ScopeNV scope_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , MSize{ MSize_ } + , NSize{ NSize_ } + , KSize{ KSize_ } + , AType{ AType_ } + , BType{ BType_ } + , CType{ CType_ } + , DType{ DType_ } + , scope{ scope_ } + { + } + + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCooperativeMatrixPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) && + ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( DType == rhs.DType ) && ( scope == rhs.scope ); +# endif + } + + bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; + void * pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = {}; + VULKAN_HPP_NAMESPACE::ScopeNV scope = {}; + }; + + template <> + struct CppType + { + using Type = CooperativeMatrixPropertiesNV; + }; + + // wrapper struct for struct VkCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeVectorPropertiesNV.html + struct CooperativeVectorPropertiesNV + { + using NativeType = VkCooperativeVectorPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeVectorPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CooperativeVectorPropertiesNV( VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputInterpretation_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR matrixInterpretation_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR biasInterpretation_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR resultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::Bool32 transpose_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , inputType{ inputType_ } + , inputInterpretation{ inputInterpretation_ } + , matrixInterpretation{ matrixInterpretation_ } + , biasInterpretation{ biasInterpretation_ } + , resultType{ resultType_ } + , transpose{ transpose_ } + { + } + + VULKAN_HPP_CONSTEXPR CooperativeVectorPropertiesNV( CooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeVectorPropertiesNV( VkCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeVectorPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + CooperativeVectorPropertiesNV & operator=( CooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CooperativeVectorPropertiesNV & operator=( VkCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setInputType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputType_ ) VULKAN_HPP_NOEXCEPT + { + inputType = inputType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & + setInputInterpretation( VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputInterpretation_ ) VULKAN_HPP_NOEXCEPT + { + inputInterpretation = inputInterpretation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & + setMatrixInterpretation( VULKAN_HPP_NAMESPACE::ComponentTypeKHR matrixInterpretation_ ) VULKAN_HPP_NOEXCEPT + { + matrixInterpretation = matrixInterpretation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & + setBiasInterpretation( VULKAN_HPP_NAMESPACE::ComponentTypeKHR biasInterpretation_ ) VULKAN_HPP_NOEXCEPT + { + biasInterpretation = biasInterpretation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setResultType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR resultType_ ) VULKAN_HPP_NOEXCEPT + { + resultType = resultType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setTranspose( VULKAN_HPP_NAMESPACE::Bool32 transpose_ ) VULKAN_HPP_NOEXCEPT + { + transpose = transpose_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCooperativeVectorPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeVectorPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeVectorPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCooperativeVectorPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, inputType, inputInterpretation, matrixInterpretation, biasInterpretation, resultType, transpose ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeVectorPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inputType == rhs.inputType ) && ( inputInterpretation == rhs.inputInterpretation ) && + ( matrixInterpretation == rhs.matrixInterpretation ) && ( biasInterpretation == rhs.biasInterpretation ) && ( resultType == rhs.resultType ) && + ( transpose == rhs.transpose ); +# endif + } + + bool operator!=( CooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeVectorPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputInterpretation = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR matrixInterpretation = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR biasInterpretation = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR resultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::Bool32 transpose = {}; + }; + + template <> + struct CppType + { + using Type = CooperativeVectorPropertiesNV; + }; + + // wrapper struct for struct VkCopyAccelerationStructureInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyAccelerationStructureInfoKHR.html + struct CopyAccelerationStructureInfoKHR + { + using NativeType = VkCopyAccelerationStructureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default; +#else + bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode ); +# endif + } + + bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + + template <> + struct CppType + { + using Type = CopyAccelerationStructureInfoKHR; + }; + + // wrapper struct for struct VkCopyAccelerationStructureToMemoryInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyAccelerationStructureToMemoryInfoKHR.html + struct CopyAccelerationStructureToMemoryInfoKHR + { + using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + + template <> + struct CppType + { + using Type = CopyAccelerationStructureToMemoryInfoKHR; + }; + + // wrapper struct for struct VkCopyBufferInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyBufferInfo2.html + struct CopyBufferInfo2 + { + using NativeType = VkCopyBufferInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcBuffer{ srcBuffer_ } + , dstBuffer{ dstBuffer_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyBufferInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyBufferInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyBufferInfo2 const & ) const = default; +#else + bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstBuffer == rhs.dstBuffer ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyBufferInfo2; + }; + + using CopyBufferInfo2KHR = CopyBufferInfo2; + + // wrapper struct for struct VkCopyBufferToImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyBufferToImageInfo2.html + struct CopyBufferToImageInfo2 + { + using NativeType = VkCopyBufferToImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcBuffer{ srcBuffer_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyBufferToImageInfo2( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcBuffer( srcBuffer_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2 & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferToImageInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyBufferToImageInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyBufferToImageInfo2 const & ) const = default; +#else + bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyBufferToImageInfo2; + }; + + using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2; + + // wrapper struct for struct VkCopyCommandTransformInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyCommandTransformInfoQCOM.html + struct CopyCommandTransformInfoQCOM + { + using NativeType = VkCopyCommandTransformInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , transform{ transform_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyCommandTransformInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyCommandTransformInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyCommandTransformInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transform ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default; +#else + bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); +# endif + } + + bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + }; + + template <> + struct CppType + { + using Type = CopyCommandTransformInfoQCOM; + }; + + // wrapper struct for struct VkCopyDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyDescriptorSet.html + struct CopyDescriptorSet + { + using NativeType = VkCopyDescriptorSet; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, + uint32_t srcBinding_ = {}, + uint32_t srcArrayElement_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSet{ srcSet_ } + , srcBinding{ srcBinding_ } + , srcArrayElement{ srcArrayElement_ } + , dstSet{ dstSet_ } + , dstBinding{ dstBinding_ } + , dstArrayElement{ dstArrayElement_ } + , descriptorCount{ descriptorCount_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : CopyDescriptorSet( *reinterpret_cast( &rhs ) ) {} + + CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT + { + srcSet = srcSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT + { + srcBinding = srcBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + srcArrayElement = srcArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyDescriptorSet const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyDescriptorSet *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyDescriptorSet const & ) const = default; +#else + bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && ( srcBinding == rhs.srcBinding ) && + ( srcArrayElement == rhs.srcArrayElement ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && + ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ); +# endif + } + + bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; + uint32_t srcBinding = {}; + uint32_t srcArrayElement = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + }; + + template <> + struct CppType + { + using Type = CopyDescriptorSet; + }; + + // wrapper struct for struct VkImageCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCopy2.html + struct ImageCopy2 + { + using NativeType = VkImageCopy2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy2( *reinterpret_cast( &rhs ) ) {} + + ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageCopy2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy2 const & ) const = default; +#else + bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + + template <> + struct CppType + { + using Type = ImageCopy2; + }; + + using ImageCopy2KHR = ImageCopy2; + + // wrapper struct for struct VkCopyImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageInfo2.html + struct CopyImageInfo2 + { + using NativeType = VkCopyImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyImageInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageInfo2 const & ) const = default; +#else + bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyImageInfo2; + }; + + using CopyImageInfo2KHR = CopyImageInfo2; + + // wrapper struct for struct VkCopyImageToBufferInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToBufferInfo2.html + struct CopyImageToBufferInfo2 + { + using NativeType = VkCopyImageToBufferInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstBuffer{ dstBuffer_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToBufferInfo2( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2 & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToBufferInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyImageToBufferInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToBufferInfo2 const & ) const = default; +#else + bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyImageToBufferInfo2; + }; + + using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; + + // wrapper struct for struct VkCopyImageToImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToImageInfo.html + struct CopyImageToImageInfo + { + using NativeType = VkCopyImageToImageInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToImageInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToImageInfo( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToImageInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyImageToImageInfo & operator=( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyImageToImageInfo & operator=( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToImageInfo & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyImageToImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToImageInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyImageToImageInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToImageInfo const & ) const = default; +#else + bool operator==( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyImageToImageInfo; + }; + + using CopyImageToImageInfoEXT = CopyImageToImageInfo; + + // wrapper struct for struct VkImageToMemoryCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageToMemoryCopy.html + struct ImageToMemoryCopy + { + using NativeType = VkImageToMemoryCopy; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageToMemoryCopy; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( void * pHostPointer_ = {}, + uint32_t memoryRowLength_ = {}, + uint32_t memoryImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pHostPointer{ pHostPointer_ } + , memoryRowLength{ memoryRowLength_ } + , memoryImageHeight{ memoryImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageToMemoryCopy( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageToMemoryCopy( *reinterpret_cast( &rhs ) ) {} + + ImageToMemoryCopy & operator=( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageToMemoryCopy & operator=( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT + { + memoryRowLength = memoryRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + memoryImageHeight = memoryImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageToMemoryCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageToMemoryCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageToMemoryCopy const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageToMemoryCopy *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageToMemoryCopy const & ) const = default; +#else + bool operator==( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) && + ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageToMemoryCopy; + const void * pNext = {}; + void * pHostPointer = {}; + uint32_t memoryRowLength = {}; + uint32_t memoryImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + template <> + struct CppType + { + using Type = ImageToMemoryCopy; + }; + + using ImageToMemoryCopyEXT = ImageToMemoryCopy; + + // wrapper struct for struct VkCopyImageToMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToMemoryInfo.html + struct CopyImageToMemoryInfo + { + using NativeType = VkCopyImageToMemoryInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToMemoryInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToMemoryInfo( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToMemoryInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToMemoryInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyImageToMemoryInfo & operator=( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyImageToMemoryInfo & operator=( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPRegions( const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToMemoryInfo & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyImageToMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToMemoryInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyImageToMemoryInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, srcImage, srcImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToMemoryInfo const & ) const = default; +#else + bool operator==( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyImageToMemoryInfo; + }; + + using CopyImageToMemoryInfoEXT = CopyImageToMemoryInfo; + + // wrapper struct for struct VkCopyMemoryIndirectCommandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryIndirectCommandNV.html + struct CopyMemoryIndirectCommandNV + { + using NativeType = VkCopyMemoryIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress{ srcAddress_ } + , dstAddress{ dstAddress_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryIndirectCommandNV( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + CopyMemoryIndirectCommandNV & operator=( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyMemoryIndirectCommandNV & operator=( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT + { + srcAddress = srcAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT + { + dstAddress = dstAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyMemoryIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyMemoryIndirectCommandNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcAddress, dstAddress, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMemoryIndirectCommandNV const & ) const = default; +#else + bool operator==( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( size == rhs.size ); +# endif + } + + bool operator!=( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + // wrapper struct for struct VkCopyMemoryToAccelerationStructureInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToAccelerationStructureInfoKHR.html + struct CopyMemoryToAccelerationStructureInfoKHR + { + using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + + template <> + struct CppType + { + using Type = CopyMemoryToAccelerationStructureInfoKHR; + }; + + // wrapper struct for struct VkCopyMemoryToImageIndirectCommandNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToImageIndirectCommandNV.html + struct CopyMemoryToImageIndirectCommandNV + { + using NativeType = VkCopyMemoryToImageIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress{ srcAddress_ } + , bufferRowLength{ bufferRowLength_ } + , bufferImageHeight{ bufferImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToImageIndirectCommandNV( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToImageIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + CopyMemoryToImageIndirectCommandNV & operator=( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyMemoryToImageIndirectCommandNV & operator=( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT + { + srcAddress = srcAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyMemoryToImageIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageIndirectCommandNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcAddress, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMemoryToImageIndirectCommandNV const & ) const = default; +#else + bool operator==( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAddress == rhs.srcAddress ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && + ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + // wrapper struct for struct VkMemoryToImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryToImageCopy.html + struct MemoryToImageCopy + { + using NativeType = VkMemoryToImageCopy; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryToImageCopy; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryToImageCopy( const void * pHostPointer_ = {}, + uint32_t memoryRowLength_ = {}, + uint32_t memoryImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pHostPointer{ pHostPointer_ } + , memoryRowLength{ memoryRowLength_ } + , memoryImageHeight{ memoryImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryToImageCopy( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryToImageCopy( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryToImageCopy( *reinterpret_cast( &rhs ) ) {} + + MemoryToImageCopy & operator=( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryToImageCopy & operator=( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPHostPointer( const void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT + { + memoryRowLength = memoryRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + memoryImageHeight = memoryImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryToImageCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryToImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryToImageCopy const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryToImageCopy *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryToImageCopy const & ) const = default; +#else + bool operator==( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) && + ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryToImageCopy; + const void * pNext = {}; + const void * pHostPointer = {}; + uint32_t memoryRowLength = {}; + uint32_t memoryImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + template <> + struct CppType + { + using Type = MemoryToImageCopy; + }; + + using MemoryToImageCopyEXT = MemoryToImageCopy; + + // wrapper struct for struct VkCopyMemoryToImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToImageInfo.html + struct CopyMemoryToImageInfo + { + using NativeType = VkCopyMemoryToImageInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToImageInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToImageInfo( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToImageInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyMemoryToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyMemoryToImageInfo & operator=( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyMemoryToImageInfo & operator=( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPRegions( const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyMemoryToImageInfo & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyMemoryToImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMemoryToImageInfo const & ) const = default; +#else + bool operator==( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyMemoryToImageInfo; + }; + + using CopyMemoryToImageInfoEXT = CopyMemoryToImageInfo; + + // wrapper struct for struct VkCopyMemoryToMicromapInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToMicromapInfoEXT.html + struct CopyMemoryToMicromapInfoEXT + { + using NativeType = VkCopyMemoryToMicromapInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToMicromapInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToMicromapInfoEXT( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToMicromapInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + CopyMemoryToMicromapInfoEXT & operator=( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyMemoryToMicromapInfoEXT & operator=( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyMemoryToMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToMicromapInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyMemoryToMicromapInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToMicromapInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT dst = {}; + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone; + }; + + template <> + struct CppType + { + using Type = CopyMemoryToMicromapInfoEXT; + }; + + // wrapper struct for struct VkCopyMicromapInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMicromapInfoEXT.html + struct CopyMicromapInfoEXT + { + using NativeType = VkCopyMicromapInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } + { + } + + VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMicromapInfoEXT( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMicromapInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + CopyMicromapInfoEXT & operator=( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyMicromapInfoEXT & operator=( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMicromapInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyMicromapInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMicromapInfoEXT const & ) const = default; +#else + bool operator==( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode ); +# endif + } + + bool operator!=( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT src = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT dst = {}; + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone; + }; + + template <> + struct CppType + { + using Type = CopyMicromapInfoEXT; + }; + + // wrapper struct for struct VkCopyMicromapToMemoryInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMicromapToMemoryInfoEXT.html + struct CopyMicromapToMemoryInfoEXT + { + using NativeType = VkCopyMicromapToMemoryInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapToMemoryInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMicromapToMemoryInfoEXT( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMicromapToMemoryInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + CopyMicromapToMemoryInfoEXT & operator=( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyMicromapToMemoryInfoEXT & operator=( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyMicromapToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMicromapToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMicromapToMemoryInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCopyMicromapToMemoryInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapToMemoryInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone; + }; + + template <> + struct CppType + { + using Type = CopyMicromapToMemoryInfoEXT; + }; + + // wrapper struct for struct VkCuFunctionCreateInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuFunctionCreateInfoNVX.html + struct CuFunctionCreateInfoNVX + { + using NativeType = VkCuFunctionCreateInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , module{ module_ } + , pName{ pName_ } + { + } + + VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuFunctionCreateInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + + CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuFunctionCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCuFunctionCreateInfoNVX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, module, pName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ); + } + + bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX module = {}; + const char * pName = {}; + }; + + template <> + struct CppType + { + using Type = CuFunctionCreateInfoNVX; + }; + + // wrapper struct for struct VkCuLaunchInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuLaunchInfoNVX.html + struct CuLaunchInfoNVX + { + using NativeType = VkCuLaunchInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, + uint32_t gridDimX_ = {}, + uint32_t gridDimY_ = {}, + uint32_t gridDimZ_ = {}, + uint32_t blockDimX_ = {}, + uint32_t blockDimY_ = {}, + uint32_t blockDimZ_ = {}, + uint32_t sharedMemBytes_ = {}, + size_t paramCount_ = {}, + const void * const * pParams_ = {}, + size_t extraCount_ = {}, + const void * const * pExtras_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , function{ function_ } + , gridDimX{ gridDimX_ } + , gridDimY{ gridDimY_ } + , gridDimZ{ gridDimZ_ } + , blockDimX{ blockDimX_ } + , blockDimY{ blockDimY_ } + , blockDimZ{ blockDimZ_ } + , sharedMemBytes{ sharedMemBytes_ } + , paramCount{ paramCount_ } + , pParams{ pParams_ } + , extraCount{ extraCount_ } + , pExtras{ pExtras_ } + { + } + + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuLaunchInfoNVX( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_, + uint32_t gridDimX_, + uint32_t gridDimY_, + uint32_t gridDimZ_, + uint32_t blockDimX_, + uint32_t blockDimY_, + uint32_t blockDimZ_, + uint32_t sharedMemBytes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( params_.size() ) + , pParams( params_.data() ) + , extraCount( extras_.size() ) + , pExtras( extras_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT + { + function = function_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT + { + gridDimX = gridDimX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT + { + gridDimY = gridDimY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT + { + gridDimZ = gridDimZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT + { + blockDimX = blockDimX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT + { + blockDimY = blockDimY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT + { + blockDimZ = blockDimZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT + { + sharedMemBytes = sharedMemBytes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = paramCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT + { + pParams = pParams_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = params_.size(); + pParams = params_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extraCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT + { + pExtras = pExtras_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extras_.size(); + pExtras = extras_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuLaunchInfoNVX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCuLaunchInfoNVX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuLaunchInfoNVX const & ) const = default; +#else + bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && + ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && + ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && + ( pExtras == rhs.pExtras ); +# endif + } + + bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {}; + uint32_t gridDimX = {}; + uint32_t gridDimY = {}; + uint32_t gridDimZ = {}; + uint32_t blockDimX = {}; + uint32_t blockDimY = {}; + uint32_t blockDimZ = {}; + uint32_t sharedMemBytes = {}; + size_t paramCount = {}; + const void * const * pParams = {}; + size_t extraCount = {}; + const void * const * pExtras = {}; + }; + + template <> + struct CppType + { + using Type = CuLaunchInfoNVX; + }; + + // wrapper struct for struct VkCuModuleCreateInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuModuleCreateInfoNVX.html + struct CuModuleCreateInfoNVX + { + using NativeType = VkCuModuleCreateInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dataSize{ dataSize_ } + , pData{ pData_ } + { + } + + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuModuleCreateInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CuModuleCreateInfoNVX( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CuModuleCreateInfoNVX & operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CuModuleCreateInfoNVX & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuModuleCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCuModuleCreateInfoNVX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleCreateInfoNVX const & ) const = default; +#else + bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX; + const void * pNext = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + + template <> + struct CppType + { + using Type = CuModuleCreateInfoNVX; + }; + + // wrapper struct for struct VkCuModuleTexturingModeCreateInfoNVX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuModuleTexturingModeCreateInfoNVX.html + struct CuModuleTexturingModeCreateInfoNVX + { + using NativeType = VkCuModuleTexturingModeCreateInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleTexturingModeCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , use64bitTexturing{ use64bitTexturing_ } + { + } + + VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuModuleTexturingModeCreateInfoNVX( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuModuleTexturingModeCreateInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + + CuModuleTexturingModeCreateInfoNVX & operator=( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CuModuleTexturingModeCreateInfoNVX & operator=( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setUse64bitTexturing( VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing_ ) VULKAN_HPP_NOEXCEPT + { + use64bitTexturing = use64bitTexturing_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCuModuleTexturingModeCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuModuleTexturingModeCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuModuleTexturingModeCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCuModuleTexturingModeCreateInfoNVX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, use64bitTexturing ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleTexturingModeCreateInfoNVX const & ) const = default; +#else + bool operator==( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( use64bitTexturing == rhs.use64bitTexturing ); +# endif + } + + bool operator!=( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleTexturingModeCreateInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing = {}; + }; + + template <> + struct CppType + { + using Type = CuModuleTexturingModeCreateInfoNVX; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkCudaFunctionCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaFunctionCreateInfoNV.html + struct CudaFunctionCreateInfoNV + { + using NativeType = VkCudaFunctionCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaFunctionCreateInfoNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CudaFunctionCreateInfoNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , module{ module_ } + , pName{ pName_ } + { + } + + VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CudaFunctionCreateInfoNV( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CudaFunctionCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + CudaFunctionCreateInfoNV & operator=( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CudaFunctionCreateInfoNV & operator=( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setModule( VULKAN_HPP_NAMESPACE::CudaModuleNV module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCudaFunctionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCudaFunctionCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCudaFunctionCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCudaFunctionCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, module, pName ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ); + } + + bool operator!=( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaFunctionCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CudaModuleNV module = {}; + const char * pName = {}; + }; + + template <> + struct CppType + { + using Type = CudaFunctionCreateInfoNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkCudaLaunchInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaLaunchInfoNV.html + struct CudaLaunchInfoNV + { + using NativeType = VkCudaLaunchInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaLaunchInfoNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_ = {}, + uint32_t gridDimX_ = {}, + uint32_t gridDimY_ = {}, + uint32_t gridDimZ_ = {}, + uint32_t blockDimX_ = {}, + uint32_t blockDimY_ = {}, + uint32_t blockDimZ_ = {}, + uint32_t sharedMemBytes_ = {}, + size_t paramCount_ = {}, + const void * const * pParams_ = {}, + size_t extraCount_ = {}, + const void * const * pExtras_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , function{ function_ } + , gridDimX{ gridDimX_ } + , gridDimY{ gridDimY_ } + , gridDimZ{ gridDimZ_ } + , blockDimX{ blockDimX_ } + , blockDimY{ blockDimY_ } + , blockDimZ{ blockDimZ_ } + , sharedMemBytes{ sharedMemBytes_ } + , paramCount{ paramCount_ } + , pParams{ pParams_ } + , extraCount{ extraCount_ } + , pExtras{ pExtras_ } + { + } + + VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CudaLaunchInfoNV( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : CudaLaunchInfoNV( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CudaLaunchInfoNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_, + uint32_t gridDimX_, + uint32_t gridDimY_, + uint32_t gridDimZ_, + uint32_t blockDimX_, + uint32_t blockDimY_, + uint32_t blockDimZ_, + uint32_t sharedMemBytes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( params_.size() ) + , pParams( params_.data() ) + , extraCount( extras_.size() ) + , pExtras( extras_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CudaLaunchInfoNV & operator=( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CudaLaunchInfoNV & operator=( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setFunction( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_ ) VULKAN_HPP_NOEXCEPT + { + function = function_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT + { + gridDimX = gridDimX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT + { + gridDimY = gridDimY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT + { + gridDimZ = gridDimZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT + { + blockDimX = blockDimX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT + { + blockDimY = blockDimY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT + { + blockDimZ = blockDimZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT + { + sharedMemBytes = sharedMemBytes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = paramCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT + { + pParams = pParams_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CudaLaunchInfoNV & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = params_.size(); + pParams = params_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extraCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT + { + pExtras = pExtras_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CudaLaunchInfoNV & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extras_.size(); + pExtras = extras_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCudaLaunchInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCudaLaunchInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCudaLaunchInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCudaLaunchInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CudaLaunchInfoNV const & ) const = default; +# else + bool operator==( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && + ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && + ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && + ( pExtras == rhs.pExtras ); +# endif + } + + bool operator!=( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaLaunchInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CudaFunctionNV function = {}; + uint32_t gridDimX = {}; + uint32_t gridDimY = {}; + uint32_t gridDimZ = {}; + uint32_t blockDimX = {}; + uint32_t blockDimY = {}; + uint32_t blockDimZ = {}; + uint32_t sharedMemBytes = {}; + size_t paramCount = {}; + const void * const * pParams = {}; + size_t extraCount = {}; + const void * const * pExtras = {}; + }; + + template <> + struct CppType + { + using Type = CudaLaunchInfoNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkCudaModuleCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaModuleCreateInfoNV.html + struct CudaModuleCreateInfoNV + { + using NativeType = VkCudaModuleCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaModuleCreateInfoNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dataSize{ dataSize_ } + , pData{ pData_ } + { + } + + VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CudaModuleCreateInfoNV( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CudaModuleCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CudaModuleCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CudaModuleCreateInfoNV & operator=( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CudaModuleCreateInfoNV & operator=( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CudaModuleCreateInfoNV & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCudaModuleCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCudaModuleCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCudaModuleCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkCudaModuleCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dataSize, pData ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CudaModuleCreateInfoNV const & ) const = default; +# else + bool operator==( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaModuleCreateInfoNV; + const void * pNext = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + + template <> + struct CppType + { + using Type = CudaModuleCreateInfoNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkD3D12FenceSubmitInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkD3D12FenceSubmitInfoKHR.html + struct D3D12FenceSubmitInfoKHR + { + using NativeType = VkD3D12FenceSubmitInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValuesCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreValuesCount{ waitSemaphoreValuesCount_ } + , pWaitSemaphoreValues{ pWaitSemaphoreValues_ } + , signalSemaphoreValuesCount{ signalSemaphoreValuesCount_ } + , pSignalSemaphoreValues{ pSignalSemaphoreValues_ } + { + } + + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : D3D12FenceSubmitInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreValuesCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValuesCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = waitSemaphoreValuesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & + setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValuesCount = signalSemaphoreValuesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & + setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValuesCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkD3D12FenceSubmitInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkD3D12FenceSubmitInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default; +# else + bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); +# endif + } + + bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreValuesCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValuesCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; + }; + + template <> + struct CppType + { + using Type = D3D12FenceSubmitInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + // wrapper struct for struct VkDebugMarkerMarkerInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerMarkerInfoEXT.html + struct DebugMarkerMarkerInfoEXT + { + using NativeType = VkDebugMarkerMarkerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pMarkerName{ pMarkerName_ } + , color{ color_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerMarkerInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT + { + pMarkerName = pMarkerName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerMarkerInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDebugMarkerMarkerInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pMarkerName, color ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pMarkerName != rhs.pMarkerName ) + if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = color <=> rhs.color; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + + bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) ) && + ( color == rhs.color ); + } + + bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; + const void * pNext = {}; + const char * pMarkerName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; + + template <> + struct CppType + { + using Type = DebugMarkerMarkerInfoEXT; + }; + + // wrapper struct for struct VkDebugMarkerObjectNameInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerObjectNameInfoEXT.html + struct DebugMarkerObjectNameInfoEXT + { + using NativeType = VkDebugMarkerObjectNameInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + const char * pObjectName_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , objectType{ objectType_ } + , object{ object_ } + , pObjectName{ pObjectName_ } + { + } + + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + { + object = object_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT + { + pObjectName = pObjectName_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectNameInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectNameInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, object, pObjectName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) + return cmp; + if ( auto cmp = object <=> rhs.object; cmp != 0 ) + return cmp; + if ( pObjectName != rhs.pObjectName ) + if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && + ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); + } + + bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + const char * pObjectName = {}; + }; + + template <> + struct CppType + { + using Type = DebugMarkerObjectNameInfoEXT; + }; + + // wrapper struct for struct VkDebugMarkerObjectTagInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerObjectTagInfoEXT.html + struct DebugMarkerObjectTagInfoEXT + { + using NativeType = VkDebugMarkerObjectTagInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , objectType{ objectType_ } + , object{ object_ } + , tagName{ tagName_ } + , tagSize{ tagSize_ } + , pTag{ pTag_ } + { + } + + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof( T ) ), pTag( tag_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + { + object = object_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectTagInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectTagInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default; +#else + bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && ( tagName == rhs.tagName ) && + ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); +# endif + } + + bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; + }; + + template <> + struct CppType + { + using Type = DebugMarkerObjectTagInfoEXT; + }; + + typedef VULKAN_HPP_NAMESPACE::Bool32( VKAPI_PTR * PFN_DebugReportCallbackEXT )( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + void * pUserData ); + + // wrapper struct for struct VkDebugReportCallbackCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugReportCallbackCreateInfoEXT.html + struct DebugReportCallbackCreateInfoEXT + { + using NativeType = VkDebugReportCallbackCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pfnCallback{ pfnCallback_ } + , pUserData{ pUserData_ } + { + } + + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugReportCallbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) + + DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_, + PFN_vkDebugReportCallbackEXT pfnCallback_, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : DebugReportCallbackCreateInfoEXT( flags_, reinterpret_cast( pfnCallback_ ), pUserData_, pNext_ ) + { + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif + + DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & + setPfnCallback( VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnCallback = pfnCallback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnCallback( reinterpret_cast( pfnCallback_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugReportCallbackCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDebugReportCallbackCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pfnCallback, pUserData ); + } +#endif + + bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData ); +#endif + } + + bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback = {}; + void * pUserData = {}; + }; + + template <> + struct CppType + { + using Type = DebugReportCallbackCreateInfoEXT; + }; + + // wrapper struct for struct VkDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsLabelEXT.html + struct DebugUtilsLabelEXT + { + using NativeType = VkDebugUtilsLabelEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + DebugUtilsLabelEXT( const char * pLabelName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pLabelName{ pLabelName_ } + , color{ color_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsLabelEXT( *reinterpret_cast( &rhs ) ) {} + + DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT + { + pLabelName = pLabelName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsLabelEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDebugUtilsLabelEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pLabelName, color ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pLabelName != rhs.pLabelName ) + if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = color <=> rhs.color; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + + bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) && + ( color == rhs.color ); + } + + bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; + const void * pNext = {}; + const char * pLabelName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; + + template <> + struct CppType + { + using Type = DebugUtilsLabelEXT; + }; + + // wrapper struct for struct VkDebugUtilsObjectNameInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsObjectNameInfoEXT.html + struct DebugUtilsObjectNameInfoEXT + { + using NativeType = VkDebugUtilsObjectNameInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + const char * pObjectName_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , objectType{ objectType_ } + , objectHandle{ objectHandle_ } + , pObjectName{ pObjectName_ } + { + } + + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + { + objectHandle = objectHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT + { + pObjectName = pObjectName_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectNameInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectNameInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, objectHandle, pObjectName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) + return cmp; + if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 ) + return cmp; + if ( pObjectName != rhs.pObjectName ) + if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && + ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); + } + + bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + const char * pObjectName = {}; + }; + + template <> + struct CppType + { + using Type = DebugUtilsObjectNameInfoEXT; + }; + + // wrapper struct for struct VkDebugUtilsMessengerCallbackDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerCallbackDataEXT.html + struct DebugUtilsMessengerCallbackDataEXT + { + using NativeType = VkDebugUtilsMessengerCallbackDataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, + const char * pMessageIdName_ = {}, + int32_t messageIdNumber_ = {}, + const char * pMessage_ = {}, + uint32_t queueLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, + uint32_t cmdBufLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, + uint32_t objectCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pMessageIdName{ pMessageIdName_ } + , messageIdNumber{ messageIdNumber_ } + , pMessage{ pMessage_ } + , queueLabelCount{ queueLabelCount_ } + , pQueueLabels{ pQueueLabels_ } + , cmdBufLabelCount{ cmdBufLabelCount_ } + , pCmdBufLabels{ pCmdBufLabels_ } + , objectCount{ objectCount_ } + , pObjects{ pObjects_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, + const char * pMessageIdName_, + int32_t messageIdNumber_, + const char * pMessage_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueLabels_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & cmdBufLabels_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & objects_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( static_cast( queueLabels_.size() ) ) + , pQueueLabels( queueLabels_.data() ) + , cmdBufLabelCount( static_cast( cmdBufLabels_.size() ) ) + , pCmdBufLabels( cmdBufLabels_.data() ) + , objectCount( static_cast( objects_.size() ) ) + , pObjects( objects_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT + { + pMessageIdName = pMessageIdName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT + { + messageIdNumber = messageIdNumber_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT + { + pMessage = pMessage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = queueLabelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT + { + pQueueLabels = pQueueLabels_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & + setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueLabels_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = static_cast( queueLabels_.size() ); + pQueueLabels = queueLabels_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = cmdBufLabelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + pCmdBufLabels = pCmdBufLabels_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & + setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = static_cast( cmdBufLabels_.size() ); + pCmdBufLabels = cmdBufLabels_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = objectCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT + { + pObjects = pObjects_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & + setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & objects_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = static_cast( objects_.size() ); + pObjects = objects_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCallbackDataEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( pMessageIdName != rhs.pMessageIdName ) + if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 ) + return cmp; + if ( pMessage != rhs.pMessage ) + if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 ) + return cmp; + if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 ) + return cmp; + if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 ) + return cmp; + if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 ) + return cmp; + if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 ) + return cmp; + if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) ) && + ( messageIdNumber == rhs.messageIdNumber ) && ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) && + ( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) && + ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects ); + } + + bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; + const char * pMessageIdName = {}; + int32_t messageIdNumber = {}; + const char * pMessage = {}; + uint32_t queueLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {}; + uint32_t cmdBufLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {}; + uint32_t objectCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {}; + }; + + template <> + struct CppType + { + using Type = DebugUtilsMessengerCallbackDataEXT; + }; + + typedef VULKAN_HPP_NAMESPACE::Bool32( VKAPI_PTR * PFN_DebugUtilsMessengerCallbackEXT )( + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + void * pUserData ); + + // wrapper struct for struct VkDebugUtilsMessengerCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerCreateInfoEXT.html + struct DebugUtilsMessengerCreateInfoEXT + { + using NativeType = VkDebugUtilsMessengerCreateInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, + VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , messageSeverity{ messageSeverity_ } + , messageType{ messageType_ } + , pfnUserCallback{ pfnUserCallback_ } + , pUserData{ pUserData_ } + { + } + + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) + + DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_, + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCreateInfoEXT( flags_, + messageSeverity_, + messageType_, + reinterpret_cast( pfnUserCallback_ ), + pUserData_, + pNext_ ) + { + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif + + DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT + { + messageSeverity = messageSeverity_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT + { + messageType = messageType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setPfnUserCallback( VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnUserCallback( reinterpret_cast( pfnUserCallback_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData ); + } +#endif + + bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( messageSeverity == rhs.messageSeverity ) && + ( messageType == rhs.messageType ) && ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); +#endif + } + + bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; + VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; + }; + + template <> + struct CppType + { + using Type = DebugUtilsMessengerCreateInfoEXT; + }; + + // wrapper struct for struct VkDebugUtilsObjectTagInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsObjectTagInfoEXT.html + struct DebugUtilsObjectTagInfoEXT + { + using NativeType = VkDebugUtilsObjectTagInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , objectType{ objectType_ } + , objectHandle{ objectHandle_ } + , tagName{ tagName_ } + , tagSize{ tagSize_ } + , pTag{ pTag_ } + { + } + + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + { + objectHandle = objectHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectTagInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectTagInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default; +#else + bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && + ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); +# endif + } + + bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; + }; + + template <> + struct CppType + { + using Type = DebugUtilsObjectTagInfoEXT; + }; + + // wrapper struct for struct VkDecompressMemoryRegionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDecompressMemoryRegionNV.html + struct DecompressMemoryRegionNV + { + using NativeType = VkDecompressMemoryRegionNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ = {}, + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress{ srcAddress_ } + , dstAddress{ dstAddress_ } + , compressedSize{ compressedSize_ } + , decompressedSize{ decompressedSize_ } + , decompressionMethod{ decompressionMethod_ } + { + } + + VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DecompressMemoryRegionNV( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DecompressMemoryRegionNV( *reinterpret_cast( &rhs ) ) + { + } + + DecompressMemoryRegionNV & operator=( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DecompressMemoryRegionNV & operator=( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT + { + srcAddress = srcAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT + { + dstAddress = dstAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setCompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ ) VULKAN_HPP_NOEXCEPT + { + compressedSize = compressedSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ ) VULKAN_HPP_NOEXCEPT + { + decompressedSize = decompressedSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & + setDecompressionMethod( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ ) VULKAN_HPP_NOEXCEPT + { + decompressionMethod = decompressionMethod_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDecompressMemoryRegionNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDecompressMemoryRegionNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDecompressMemoryRegionNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDecompressMemoryRegionNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcAddress, dstAddress, compressedSize, decompressedSize, decompressionMethod ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DecompressMemoryRegionNV const & ) const = default; +#else + bool operator==( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( compressedSize == rhs.compressedSize ) && + ( decompressedSize == rhs.decompressedSize ) && ( decompressionMethod == rhs.decompressionMethod ); +# endif + } + + bool operator!=( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compressedSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize = {}; + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod = {}; + }; + + // wrapper struct for struct VkDedicatedAllocationBufferCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationBufferCreateInfoNV.html + struct DedicatedAllocationBufferCreateInfoNV + { + using NativeType = VkDedicatedAllocationBufferCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dedicatedAllocation{ dedicatedAllocation_ } + { + } + + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationBufferCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDedicatedAllocationBufferCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dedicatedAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); +# endif + } + + bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; + + template <> + struct CppType + { + using Type = DedicatedAllocationBufferCreateInfoNV; + }; + + // wrapper struct for struct VkDedicatedAllocationImageCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationImageCreateInfoNV.html + struct DedicatedAllocationImageCreateInfoNV + { + using NativeType = VkDedicatedAllocationImageCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dedicatedAllocation{ dedicatedAllocation_ } + { + } + + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationImageCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDedicatedAllocationImageCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dedicatedAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); +# endif + } + + bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; + + template <> + struct CppType + { + using Type = DedicatedAllocationImageCreateInfoNV; + }; + + // wrapper struct for struct VkDedicatedAllocationMemoryAllocateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationMemoryAllocateInfoNV.html + struct DedicatedAllocationMemoryAllocateInfoNV + { + using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , buffer{ buffer_ } + { + } + + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationMemoryAllocateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDedicatedAllocationMemoryAllocateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = DedicatedAllocationMemoryAllocateInfoNV; + }; + + // wrapper struct for struct VkMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrier2.html + struct MemoryBarrier2 + { + using NativeType = VkMemoryBarrier2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcStageMask{ srcStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstStageMask{ dstStageMask_ } + , dstAccessMask{ dstAccessMask_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier2( *reinterpret_cast( &rhs ) ) {} + + MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier2 const & ) const = default; +#else + bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ); +# endif + } + + bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + }; + + template <> + struct CppType + { + using Type = MemoryBarrier2; + }; + + using MemoryBarrier2KHR = MemoryBarrier2; + + // wrapper struct for struct VkImageSubresourceRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresourceRange.html + struct ImageSubresourceRange + { + using NativeType = VkImageSubresourceRange; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t baseMipLevel_ = {}, + uint32_t levelCount_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask{ aspectMask_ } + , baseMipLevel{ baseMipLevel_ } + , levelCount{ levelCount_ } + , baseArrayLayer{ baseArrayLayer_ } + , layerCount{ layerCount_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceRange( *reinterpret_cast( &rhs ) ) + { + } + + ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT + { + baseMipLevel = baseMipLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT + { + levelCount = levelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceRange const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageSubresourceRange *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceRange const & ) const = default; +#else + bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && ( levelCount == rhs.levelCount ) && + ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); +# endif + } + + bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t baseMipLevel = {}; + uint32_t levelCount = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + + // wrapper struct for struct VkImageMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryBarrier2.html + struct ImageMemoryBarrier2 + { + using NativeType = VkImageMemoryBarrier2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcStageMask{ srcStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstStageMask{ dstStageMask_ } + , dstAccessMask{ dstAccessMask_ } + , oldLayout{ oldLayout_ } + , newLayout{ newLayout_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , image{ image_ } + , subresourceRange{ subresourceRange_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier2( *reinterpret_cast( &rhs ) ) + { + } + + ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + srcStageMask, + srcAccessMask, + dstStageMask, + dstAccessMask, + oldLayout, + newLayout, + srcQueueFamilyIndex, + dstQueueFamilyIndex, + image, + subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier2 const & ) const = default; +#else + bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && + ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && + ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + + template <> + struct CppType + { + using Type = ImageMemoryBarrier2; + }; + + using ImageMemoryBarrier2KHR = ImageMemoryBarrier2; + + // wrapper struct for struct VkDependencyInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDependencyInfo.html + struct DependencyInfo + { + using NativeType = VkDependencyInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + uint32_t memoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {}, + uint32_t bufferMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {}, + uint32_t imageMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dependencyFlags{ dependencyFlags_ } + , memoryBarrierCount{ memoryBarrierCount_ } + , pMemoryBarriers{ pMemoryBarriers_ } + , bufferMemoryBarrierCount{ bufferMemoryBarrierCount_ } + , pBufferMemoryBarriers{ pBufferMemoryBarriers_ } + , imageMemoryBarrierCount{ imageMemoryBarrierCount_ } + , pImageMemoryBarriers{ pImageMemoryBarriers_ } + { + } + + VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DependencyInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & memoryBarriers_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferMemoryBarriers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageMemoryBarriers_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , dependencyFlags( dependencyFlags_ ) + , memoryBarrierCount( static_cast( memoryBarriers_.size() ) ) + , pMemoryBarriers( memoryBarriers_.data() ) + , bufferMemoryBarrierCount( static_cast( bufferMemoryBarriers_.size() ) ) + , pBufferMemoryBarriers( bufferMemoryBarriers_.data() ) + , imageMemoryBarrierCount( static_cast( imageMemoryBarriers_.size() ) ) + , pImageMemoryBarriers( imageMemoryBarriers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + memoryBarrierCount = memoryBarrierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pMemoryBarriers = pMemoryBarriers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & + setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + memoryBarrierCount = static_cast( memoryBarriers_.size() ); + pMemoryBarriers = memoryBarriers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = bufferMemoryBarrierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & + setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pBufferMemoryBarriers = pBufferMemoryBarriers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & setBufferMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = static_cast( bufferMemoryBarriers_.size() ); + pBufferMemoryBarriers = bufferMemoryBarriers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + imageMemoryBarrierCount = imageMemoryBarrierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & + setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pImageMemoryBarriers = pImageMemoryBarriers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & setImageMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + imageMemoryBarrierCount = static_cast( imageMemoryBarriers_.size() ); + pImageMemoryBarriers = imageMemoryBarriers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDependencyInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDependencyInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DependencyInfo const & ) const = default; +#else + bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) && + ( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) && + ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) && + ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers ); +# endif + } + + bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + uint32_t memoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers = {}; + uint32_t bufferMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers = {}; + uint32_t imageMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers = {}; + }; + + template <> + struct CppType + { + using Type = DependencyInfo; + }; + + using DependencyInfoKHR = DependencyInfo; + + // wrapper struct for struct VkDepthBiasInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthBiasInfoEXT.html + struct DepthBiasInfoEXT + { + using NativeType = VkDepthBiasInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDepthBiasInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthBiasConstantFactor{ depthBiasConstantFactor_ } + , depthBiasClamp{ depthBiasClamp_ } + , depthBiasSlopeFactor{ depthBiasSlopeFactor_ } + { + } + + VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DepthBiasInfoEXT( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthBiasInfoEXT( *reinterpret_cast( &rhs ) ) {} + + DepthBiasInfoEXT & operator=( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DepthBiasInfoEXT & operator=( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasConstantFactor = depthBiasConstantFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasSlopeFactor = depthBiasSlopeFactor_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDepthBiasInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDepthBiasInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDepthBiasInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDepthBiasInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DepthBiasInfoEXT const & ) const = default; +#else + bool operator==( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ); +# endif + } + + bool operator!=( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDepthBiasInfoEXT; + const void * pNext = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; + }; + + template <> + struct CppType + { + using Type = DepthBiasInfoEXT; + }; + + // wrapper struct for struct VkDepthBiasRepresentationInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthBiasRepresentationInfoEXT.html + struct DepthBiasRepresentationInfoEXT + { + using NativeType = VkDepthBiasRepresentationInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDepthBiasRepresentationInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation_ = + VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT::eLeastRepresentableValueFormat, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthBiasRepresentation{ depthBiasRepresentation_ } + , depthBiasExact{ depthBiasExact_ } + { + } + + VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DepthBiasRepresentationInfoEXT( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DepthBiasRepresentationInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DepthBiasRepresentationInfoEXT & operator=( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DepthBiasRepresentationInfoEXT & operator=( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & + setDepthBiasRepresentation( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasRepresentation = depthBiasRepresentation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setDepthBiasExact( VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasExact = depthBiasExact_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDepthBiasRepresentationInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDepthBiasRepresentationInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDepthBiasRepresentationInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDepthBiasRepresentationInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthBiasRepresentation, depthBiasExact ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DepthBiasRepresentationInfoEXT const & ) const = default; +#else + bool operator==( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasRepresentation == rhs.depthBiasRepresentation ) && + ( depthBiasExact == rhs.depthBiasExact ); +# endif + } + + bool operator!=( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDepthBiasRepresentationInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation = VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT::eLeastRepresentableValueFormat; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact = {}; + }; + + template <> + struct CppType + { + using Type = DepthBiasRepresentationInfoEXT; + }; + + // wrapper struct for struct VkDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthClampRangeEXT.html + struct DepthClampRangeEXT + { + using NativeType = VkDepthClampRangeEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( float minDepthClamp_ = {}, float maxDepthClamp_ = {} ) VULKAN_HPP_NOEXCEPT + : minDepthClamp{ minDepthClamp_ } + , maxDepthClamp{ maxDepthClamp_ } + { + } + + VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DepthClampRangeEXT( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthClampRangeEXT( *reinterpret_cast( &rhs ) ) {} + + DepthClampRangeEXT & operator=( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DepthClampRangeEXT & operator=( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMinDepthClamp( float minDepthClamp_ ) VULKAN_HPP_NOEXCEPT + { + minDepthClamp = minDepthClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMaxDepthClamp( float maxDepthClamp_ ) VULKAN_HPP_NOEXCEPT + { + maxDepthClamp = maxDepthClamp_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDepthClampRangeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDepthClampRangeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDepthClampRangeEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDepthClampRangeEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( minDepthClamp, maxDepthClamp ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DepthClampRangeEXT const & ) const = default; +#else + bool operator==( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( minDepthClamp == rhs.minDepthClamp ) && ( maxDepthClamp == rhs.maxDepthClamp ); +# endif + } + + bool operator!=( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float minDepthClamp = {}; + float maxDepthClamp = {}; + }; + + // wrapper struct for struct VkDescriptorAddressInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorAddressInfoEXT.html + struct DescriptorAddressInfoEXT + { + using NativeType = VkDescriptorAddressInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorAddressInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , address{ address_ } + , range{ range_ } + , format{ format_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorAddressInfoEXT( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorAddressInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorAddressInfoEXT & operator=( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorAddressInfoEXT & operator=( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setAddress( VULKAN_HPP_NAMESPACE::DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT + { + address = address_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorAddressInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorAddressInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorAddressInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, address, range, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorAddressInfoEXT const & ) const = default; +#else + bool operator==( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( address == rhs.address ) && ( range == rhs.range ) && ( format == rhs.format ); +# endif + } + + bool operator!=( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorAddressInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress address = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = DescriptorAddressInfoEXT; + }; + + // wrapper struct for struct VkDescriptorBufferBindingInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferBindingInfoEXT.html + struct DescriptorBufferBindingInfoEXT + { + using NativeType = VkDescriptorBufferBindingInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , address{ address_ } + , usage{ usage_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferBindingInfoEXT( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferBindingInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorBufferBindingInfoEXT & operator=( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorBufferBindingInfoEXT & operator=( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setAddress( VULKAN_HPP_NAMESPACE::DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT + { + address = address_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorBufferBindingInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferBindingInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferBindingInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorBufferBindingInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, address, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorBufferBindingInfoEXT const & ) const = default; +#else + bool operator==( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( address == rhs.address ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress address = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorBufferBindingInfoEXT; + }; + + // wrapper struct for struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferBindingPushDescriptorBufferHandleEXT.html + struct DescriptorBufferBindingPushDescriptorBufferHandleEXT + { + using NativeType = VkDescriptorBufferBindingPushDescriptorBufferHandleEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } + { + } + + VULKAN_HPP_CONSTEXPR + DescriptorBufferBindingPushDescriptorBufferHandleEXT( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferBindingPushDescriptorBufferHandleEXT( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferBindingPushDescriptorBufferHandleEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorBufferBindingPushDescriptorBufferHandleEXT & + operator=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorBufferBindingPushDescriptorBufferHandleEXT & operator=( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & ) const = default; +#else + bool operator==( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorBufferBindingPushDescriptorBufferHandleEXT; + }; + + // wrapper struct for struct VkDescriptorBufferInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferInfo.html + struct DescriptorBufferInfo + { + using NativeType = VkDescriptorBufferInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer{ buffer_ } + , offset{ offset_ } + , range{ range_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferInfo( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorBufferInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( buffer, offset, range ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorBufferInfo const & ) const = default; +#else + bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range ); +# endif + } + + bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + + // wrapper struct for struct VkDescriptorImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorImageInfo.html + struct DescriptorImageInfo + { + using NativeType = VkDescriptorImageInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : sampler{ sampler_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorImageInfo( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorImageInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorImageInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sampler, imageView, imageLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorImageInfo const & ) const = default; +#else + bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); +# endif + } + + bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + union DescriptorDataEXT + { + using NativeType = VkDescriptorDataEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::Sampler * pSampler_ = {} ) : pSampler( pSampler_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pDescriptorImageInfo_ ) + : pCombinedImageSampler( pDescriptorImageInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pDescriptorAddressInfoEXT_ ) + : pUniformTexelBuffer( pDescriptorAddressInfoEXT_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) : accelerationStructure( accelerationStructure_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampler( const VULKAN_HPP_NAMESPACE::Sampler * pSampler_ ) VULKAN_HPP_NOEXCEPT + { + pSampler = pSampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPCombinedImageSampler( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pCombinedImageSampler_ ) VULKAN_HPP_NOEXCEPT + { + pCombinedImageSampler = pCombinedImageSampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPInputAttachmentImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pInputAttachmentImage_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachmentImage = pInputAttachmentImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampledImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pSampledImage_ ) VULKAN_HPP_NOEXCEPT + { + pSampledImage = pSampledImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pStorageImage_ ) VULKAN_HPP_NOEXCEPT + { + pStorageImage = pStorageImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPUniformTexelBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformTexelBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pUniformTexelBuffer = pUniformTexelBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPStorageTexelBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageTexelBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pStorageTexelBuffer = pStorageTexelBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pUniformBuffer = pUniformBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pStorageBuffer = pStorageBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorDataEXT const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorDataEXT &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + const VULKAN_HPP_NAMESPACE::Sampler * pSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pCombinedImageSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pInputAttachmentImage; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pSampledImage; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pStorageImage; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformTexelBuffer; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageTexelBuffer; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformBuffer; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageBuffer; + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure; +#else + const VkSampler * pSampler; + const VkDescriptorImageInfo * pCombinedImageSampler; + const VkDescriptorImageInfo * pInputAttachmentImage; + const VkDescriptorImageInfo * pSampledImage; + const VkDescriptorImageInfo * pStorageImage; + const VkDescriptorAddressInfoEXT * pUniformTexelBuffer; + const VkDescriptorAddressInfoEXT * pStorageTexelBuffer; + const VkDescriptorAddressInfoEXT * pUniformBuffer; + const VkDescriptorAddressInfoEXT * pStorageBuffer; + VkDeviceAddress accelerationStructure; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkDescriptorGetInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorGetInfoEXT.html + struct DescriptorGetInfoEXT + { + using NativeType = VkDescriptorGetInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorGetInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::DescriptorDataEXT data_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , data{ data_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorGetInfoEXT( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorGetInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorGetInfoEXT & operator=( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorGetInfoEXT & operator=( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setData( VULKAN_HPP_NAMESPACE::DescriptorDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorGetInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorGetInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorGetInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorGetInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorGetInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::DescriptorDataEXT data = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorGetInfoEXT; + }; + + // wrapper struct for struct VkDescriptorPoolSize, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolSize.html + struct DescriptorPoolSize + { + using NativeType = VkDescriptorPoolSize; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : type{ type_ } + , descriptorCount{ descriptorCount_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorPoolSize( *reinterpret_cast( &rhs ) ) {} + + DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolSize const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorPoolSize *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, descriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolSize const & ) const = default; +#else + bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount ); +# endif + } + + bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + }; + + // wrapper struct for struct VkDescriptorPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolCreateInfo.html + struct DescriptorPoolCreateInfo + { + using NativeType = VkDescriptorPoolCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, + uint32_t maxSets_ = {}, + uint32_t poolSizeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , maxSets{ maxSets_ } + , poolSizeCount{ poolSizeCount_ } + , pPoolSizes{ pPoolSizes_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, + uint32_t maxSets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT + { + maxSets = maxSets_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT + { + poolSizeCount = poolSizeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT + { + pPoolSizes = pPoolSizes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo & + setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) VULKAN_HPP_NOEXCEPT + { + poolSizeCount = static_cast( poolSizes_.size() ); + pPoolSizes = poolSizes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorPoolCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolCreateInfo const & ) const = default; +#else + bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) && + ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes ); +# endif + } + + bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; + uint32_t maxSets = {}; + uint32_t poolSizeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorPoolCreateInfo; + }; + + // wrapper struct for struct VkDescriptorPoolInlineUniformBlockCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolInlineUniformBlockCreateInfo.html + struct DescriptorPoolInlineUniformBlockCreateInfo + { + using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( uint32_t maxInlineUniformBlockBindings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxInlineUniformBlockBindings{ maxInlineUniformBlockBindings_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & + setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT + { + maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolInlineUniformBlockCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorPoolInlineUniformBlockCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxInlineUniformBlockBindings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default; +#else + bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); +# endif + } + + bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; + const void * pNext = {}; + uint32_t maxInlineUniformBlockBindings = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorPoolInlineUniformBlockCreateInfo; + }; + + using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo; + + // wrapper struct for struct VkDescriptorSetAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetAllocateInfo.html + struct DescriptorSetAllocateInfo + { + using NativeType = VkDescriptorSetAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorPool{ descriptorPool_ } + , descriptorSetCount{ descriptorSetCount_ } + , pSetLayouts{ pSetLayouts_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT + { + descriptorPool = descriptorPool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetAllocateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorSetAllocateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetAllocateInfo const & ) const = default; +#else + bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && ( descriptorSetCount == rhs.descriptorSetCount ) && + ( pSetLayouts == rhs.pSetLayouts ); +# endif + } + + bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetAllocateInfo; + }; + + // wrapper struct for struct VkDescriptorSetBindingReferenceVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetBindingReferenceVALVE.html + struct DescriptorSetBindingReferenceVALVE + { + using NativeType = VkDescriptorSetBindingReferenceVALVE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetBindingReferenceVALVE; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + uint32_t binding_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorSetLayout{ descriptorSetLayout_ } + , binding{ binding_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetBindingReferenceVALVE( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetBindingReferenceVALVE( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorSetBindingReferenceVALVE & operator=( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetBindingReferenceVALVE & operator=( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & + setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetLayout = descriptorSetLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetBindingReferenceVALVE const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorSetBindingReferenceVALVE *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorSetLayout, binding ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetBindingReferenceVALVE const & ) const = default; +#else + bool operator==( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( binding == rhs.binding ); +# endif + } + + bool operator!=( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetBindingReferenceVALVE; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; + uint32_t binding = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetBindingReferenceVALVE; + }; + + // wrapper struct for struct VkDescriptorSetLayoutBinding, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutBinding.html + struct DescriptorSetLayoutBinding + { + using NativeType = VkDescriptorSetLayoutBinding; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : binding{ binding_ } + , descriptorType{ descriptorType_ } + , descriptorCount{ descriptorCount_ } + , stageFlags{ stageFlags_ } + , pImmutableSamplers{ pImmutableSamplers_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBinding( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding( uint32_t binding_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( static_cast( immutableSamplers_.size() ) ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( immutableSamplers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT + { + pImmutableSamplers = pImmutableSamplers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding & + setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( immutableSamplers_.size() ); + pImmutableSamplers = immutableSamplers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBinding const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBinding *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBinding const & ) const = default; +#else + bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && ( descriptorCount == rhs.descriptorCount ) && + ( stageFlags == rhs.stageFlags ) && ( pImmutableSamplers == rhs.pImmutableSamplers ); +# endif + } + + bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {}; + }; + + // wrapper struct for struct VkDescriptorSetLayoutBindingFlagsCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutBindingFlagsCreateInfo.html + struct DescriptorSetLayoutBindingFlagsCreateInfo + { + using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , bindingCount{ bindingCount_ } + , pBindingFlags{ pBindingFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindingFlags_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), bindingCount( static_cast( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & + setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT + { + pBindingFlags = pBindingFlags_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = static_cast( bindingFlags_.size() ); + pBindingFlags = bindingFlags_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBindingFlagsCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBindingFlagsCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bindingCount, pBindingFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default; +#else + bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && ( pBindingFlags == rhs.pBindingFlags ); +# endif + } + + bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + const void * pNext = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetLayoutBindingFlagsCreateInfo; + }; + + using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; + + // wrapper struct for struct VkDescriptorSetLayoutCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutCreateInfo.html + struct DescriptorSetLayoutCreateInfo + { + using NativeType = VkDescriptorSetLayoutCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , bindingCount{ bindingCount_ } + , pBindings{ pBindings_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), bindingCount( static_cast( bindings_.size() ) ), pBindings( bindings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & + setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT + { + pBindings = pBindings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo & + setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindings_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = static_cast( bindings_.size() ); + pBindings = bindings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, bindingCount, pBindings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default; +#else + bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( bindingCount == rhs.bindingCount ) && + ( pBindings == rhs.pBindings ); +# endif + } + + bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetLayoutCreateInfo; + }; + + // wrapper struct for struct VkDescriptorSetLayoutHostMappingInfoVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutHostMappingInfoVALVE.html + struct DescriptorSetLayoutHostMappingInfoVALVE + { + using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutHostMappingInfoVALVE( size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorOffset{ descriptorOffset_ } + , descriptorSize{ descriptorSize_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutHostMappingInfoVALVE( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutHostMappingInfoVALVE( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorSetLayoutHostMappingInfoVALVE & operator=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetLayoutHostMappingInfoVALVE & operator=( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorOffset( size_t descriptorOffset_ ) VULKAN_HPP_NOEXCEPT + { + descriptorOffset = descriptorOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorSize( uint32_t descriptorSize_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSize = descriptorSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutHostMappingInfoVALVE const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutHostMappingInfoVALVE *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorOffset, descriptorSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutHostMappingInfoVALVE const & ) const = default; +#else + bool operator==( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorOffset == rhs.descriptorOffset ) && ( descriptorSize == rhs.descriptorSize ); +# endif + } + + bool operator!=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; + void * pNext = {}; + size_t descriptorOffset = {}; + uint32_t descriptorSize = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetLayoutHostMappingInfoVALVE; + }; + + // wrapper struct for struct VkDescriptorSetLayoutSupport, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutSupport.html + struct DescriptorSetLayoutSupport + { + using NativeType = VkDescriptorSetLayoutSupport; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supported{ supported_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutSupport( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutSupport const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutSupport *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supported ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutSupport const & ) const = default; +#else + bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported ); +# endif + } + + bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supported = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetLayoutSupport; + }; + + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + + // wrapper struct for struct VkDescriptorSetVariableDescriptorCountAllocateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetVariableDescriptorCountAllocateInfo.html + struct DescriptorSetVariableDescriptorCountAllocateInfo + { + using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, + const uint32_t * pDescriptorCounts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorSetCount{ descriptorSetCount_ } + , pDescriptorCounts{ pDescriptorCounts_ } + { + } + + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), descriptorSetCount( static_cast( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorCounts = pDescriptorCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo & + setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( descriptorCounts_.size() ); + pDescriptorCounts = descriptorCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default; +#else + bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) && + ( pDescriptorCounts == rhs.pDescriptorCounts ); +# endif + } + + bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + const void * pNext = {}; + uint32_t descriptorSetCount = {}; + const uint32_t * pDescriptorCounts = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetVariableDescriptorCountAllocateInfo; + }; + + using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; + + // wrapper struct for struct VkDescriptorSetVariableDescriptorCountLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetVariableDescriptorCountLayoutSupport.html + struct DescriptorSetVariableDescriptorCountLayoutSupport + { + using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxVariableDescriptorCount{ maxVariableDescriptorCount_ } + { + } + + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupport const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupport *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxVariableDescriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default; +#else + bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); +# endif + } + + bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + void * pNext = {}; + uint32_t maxVariableDescriptorCount = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetVariableDescriptorCountLayoutSupport; + }; + + using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; + + // wrapper struct for struct VkDescriptorUpdateTemplateEntry, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateEntry.html + struct DescriptorUpdateTemplateEntry + { + using NativeType = VkDescriptorUpdateTemplateEntry; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + size_t offset_ = {}, + size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : dstBinding{ dstBinding_ } + , dstArrayElement{ dstArrayElement_ } + , descriptorCount{ descriptorCount_ } + , descriptorType{ descriptorType_ } + , offset{ offset_ } + , stride{ stride_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateEntry( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateEntry const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateEntry *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && + ( descriptorType == rhs.descriptorType ) && ( offset == rhs.offset ) && ( stride == rhs.stride ); +# endif + } + + bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + size_t offset = {}; + size_t stride = {}; + }; + + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + + // wrapper struct for struct VkDescriptorUpdateTemplateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateCreateInfo.html + struct DescriptorUpdateTemplateCreateInfo + { + using NativeType = VkDescriptorUpdateTemplateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, + uint32_t descriptorUpdateEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , descriptorUpdateEntryCount{ descriptorUpdateEntryCount_ } + , pDescriptorUpdateEntries{ pDescriptorUpdateEntries_ } + , templateType{ templateType_ } + , descriptorSetLayout{ descriptorSetLayout_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipelineLayout{ pipelineLayout_ } + , set{ set_ } + { + } + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorUpdateEntries_, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , descriptorUpdateEntryCount( static_cast( descriptorUpdateEntries_.size() ) ) + , pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = descriptorUpdateEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorUpdateEntries = pDescriptorUpdateEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorUpdateEntries_ ) + VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = static_cast( descriptorUpdateEntries_.size() ); + pDescriptorUpdateEntries = descriptorUpdateEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT + { + templateType = templateType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetLayout = descriptorSetLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLayout = pipelineLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) && + ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) && + ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipelineLayout == rhs.pipelineLayout ) && + ( set == rhs.set ); +# endif + } + + bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; + uint32_t descriptorUpdateEntryCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t set = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorUpdateTemplateCreateInfo; + }; + + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + + // wrapper struct for struct VkDeviceAddressBindingCallbackDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceAddressBindingCallbackDataEXT.html + struct DeviceAddressBindingCallbackDataEXT + { + using NativeType = VkDeviceAddressBindingCallbackDataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceAddressBindingCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , baseAddress{ baseAddress_ } + , size{ size_ } + , bindingType{ bindingType_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceAddressBindingCallbackDataEXT( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceAddressBindingCallbackDataEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceAddressBindingCallbackDataEXT & operator=( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceAddressBindingCallbackDataEXT & operator=( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBaseAddress( VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ ) VULKAN_HPP_NOEXCEPT + { + baseAddress = baseAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & + setBindingType( VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ ) VULKAN_HPP_NOEXCEPT + { + bindingType = bindingType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceAddressBindingCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceAddressBindingCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceAddressBindingCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceAddressBindingCallbackDataEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, baseAddress, size, bindingType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceAddressBindingCallbackDataEXT const & ) const = default; +#else + bool operator==( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( baseAddress == rhs.baseAddress ) && ( size == rhs.size ) && + ( bindingType == rhs.bindingType ); +# endif + } + + bool operator!=( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceAddressBindingCallbackDataEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind; + }; + + template <> + struct CppType + { + using Type = DeviceAddressBindingCallbackDataEXT; + }; + + // wrapper struct for struct VkDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceBufferMemoryRequirements.html + struct DeviceBufferMemoryRequirements + { + using NativeType = VkDeviceBufferMemoryRequirements; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pCreateInfo{ pCreateInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceBufferMemoryRequirements( *reinterpret_cast( &rhs ) ) + { + } + + DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCreateInfo = pCreateInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceBufferMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceBufferMemoryRequirements *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pCreateInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default; +#else + bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ); +# endif + } + + bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceBufferMemoryRequirements; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo = {}; + }; + + template <> + struct CppType + { + using Type = DeviceBufferMemoryRequirements; + }; + + using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements; + + // wrapper struct for struct VkDeviceQueueCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueCreateInfo.html + struct DeviceQueueCreateInfo + { + using NativeType = VkDeviceQueueCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueCount_ = {}, + const float * pQueuePriorities_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , queueCount{ queueCount_ } + , pQueuePriorities{ pQueuePriorities_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, + uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( static_cast( queuePriorities_.size() ) ) + , pQueuePriorities( queuePriorities_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT + { + queueCount = queueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT + { + pQueuePriorities = pQueuePriorities_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT + { + queueCount = static_cast( queuePriorities_.size() ); + pQueuePriorities = queuePriorities_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceQueueCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueCreateInfo const & ) const = default; +#else + bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( queueCount == rhs.queueCount ) && ( pQueuePriorities == rhs.pQueuePriorities ); +# endif + } + + bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueCount = {}; + const float * pQueuePriorities = {}; + }; + + template <> + struct CppType + { + using Type = DeviceQueueCreateInfo; + }; + + // wrapper struct for struct VkPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFeatures.html + struct PhysicalDeviceFeatures + { + using NativeType = VkPhysicalDeviceFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT + : robustBufferAccess{ robustBufferAccess_ } + , fullDrawIndexUint32{ fullDrawIndexUint32_ } + , imageCubeArray{ imageCubeArray_ } + , independentBlend{ independentBlend_ } + , geometryShader{ geometryShader_ } + , tessellationShader{ tessellationShader_ } + , sampleRateShading{ sampleRateShading_ } + , dualSrcBlend{ dualSrcBlend_ } + , logicOp{ logicOp_ } + , multiDrawIndirect{ multiDrawIndirect_ } + , drawIndirectFirstInstance{ drawIndirectFirstInstance_ } + , depthClamp{ depthClamp_ } + , depthBiasClamp{ depthBiasClamp_ } + , fillModeNonSolid{ fillModeNonSolid_ } + , depthBounds{ depthBounds_ } + , wideLines{ wideLines_ } + , largePoints{ largePoints_ } + , alphaToOne{ alphaToOne_ } + , multiViewport{ multiViewport_ } + , samplerAnisotropy{ samplerAnisotropy_ } + , textureCompressionETC2{ textureCompressionETC2_ } + , textureCompressionASTC_LDR{ textureCompressionASTC_LDR_ } + , textureCompressionBC{ textureCompressionBC_ } + , occlusionQueryPrecise{ occlusionQueryPrecise_ } + , pipelineStatisticsQuery{ pipelineStatisticsQuery_ } + , vertexPipelineStoresAndAtomics{ vertexPipelineStoresAndAtomics_ } + , fragmentStoresAndAtomics{ fragmentStoresAndAtomics_ } + , shaderTessellationAndGeometryPointSize{ shaderTessellationAndGeometryPointSize_ } + , shaderImageGatherExtended{ shaderImageGatherExtended_ } + , shaderStorageImageExtendedFormats{ shaderStorageImageExtendedFormats_ } + , shaderStorageImageMultisample{ shaderStorageImageMultisample_ } + , shaderStorageImageReadWithoutFormat{ shaderStorageImageReadWithoutFormat_ } + , shaderStorageImageWriteWithoutFormat{ shaderStorageImageWriteWithoutFormat_ } + , shaderUniformBufferArrayDynamicIndexing{ shaderUniformBufferArrayDynamicIndexing_ } + , shaderSampledImageArrayDynamicIndexing{ shaderSampledImageArrayDynamicIndexing_ } + , shaderStorageBufferArrayDynamicIndexing{ shaderStorageBufferArrayDynamicIndexing_ } + , shaderStorageImageArrayDynamicIndexing{ shaderStorageImageArrayDynamicIndexing_ } + , shaderClipDistance{ shaderClipDistance_ } + , shaderCullDistance{ shaderCullDistance_ } + , shaderFloat64{ shaderFloat64_ } + , shaderInt64{ shaderInt64_ } + , shaderInt16{ shaderInt16_ } + , shaderResourceResidency{ shaderResourceResidency_ } + , shaderResourceMinLod{ shaderResourceMinLod_ } + , sparseBinding{ sparseBinding_ } + , sparseResidencyBuffer{ sparseResidencyBuffer_ } + , sparseResidencyImage2D{ sparseResidencyImage2D_ } + , sparseResidencyImage3D{ sparseResidencyImage3D_ } + , sparseResidency2Samples{ sparseResidency2Samples_ } + , sparseResidency4Samples{ sparseResidency4Samples_ } + , sparseResidency8Samples{ sparseResidency8Samples_ } + , sparseResidency16Samples{ sparseResidency16Samples_ } + , sparseResidencyAliased{ sparseResidencyAliased_ } + , variableMultisampleRate{ variableMultisampleRate_ } + , inheritedQueries{ inheritedQueries_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess = robustBufferAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT + { + fullDrawIndexUint32 = fullDrawIndexUint32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT + { + imageCubeArray = imageCubeArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT + { + independentBlend = independentBlend_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT + { + geometryShader = geometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + tessellationShader = tessellationShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT + { + sampleRateShading = sampleRateShading_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT + { + dualSrcBlend = dualSrcBlend_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT + { + logicOp = logicOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT + { + multiDrawIndirect = multiDrawIndirect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT + { + drawIndirectFirstInstance = drawIndirectFirstInstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthClamp = depthClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT + { + fillModeNonSolid = fillModeNonSolid_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT + { + depthBounds = depthBounds_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT + { + wideLines = wideLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT + { + largePoints = largePoints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT + { + alphaToOne = alphaToOne_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT + { + multiViewport = multiViewport_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + samplerAnisotropy = samplerAnisotropy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionETC2 = textureCompressionETC2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_LDR = textureCompressionASTC_LDR_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionBC = textureCompressionBC_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryPrecise = occlusionQueryPrecise_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatisticsQuery = pipelineStatisticsQuery_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + { + vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + { + fragmentStoresAndAtomics = fragmentStoresAndAtomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT + { + shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageGatherExtended = shaderImageGatherExtended_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageMultisample = shaderStorageImageMultisample_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT + { + shaderClipDistance = shaderClipDistance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT + { + shaderCullDistance = shaderCullDistance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat64 = shaderFloat64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt64 = shaderInt64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt16 = shaderInt16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceResidency = shaderResourceResidency_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceMinLod = shaderResourceMinLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT + { + sparseBinding = sparseBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyBuffer = sparseResidencyBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyImage2D = sparseResidencyImage2D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyImage3D = sparseResidencyImage3D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency2Samples = sparseResidency2Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency4Samples = sparseResidency4Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency8Samples = sparseResidency8Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency16Samples = sparseResidency16Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyAliased = sparseResidencyAliased_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT + { + variableMultisampleRate = variableMultisampleRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT + { + inheritedQueries = inheritedQueries_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( robustBufferAccess, + fullDrawIndexUint32, + imageCubeArray, + independentBlend, + geometryShader, + tessellationShader, + sampleRateShading, + dualSrcBlend, + logicOp, + multiDrawIndirect, + drawIndirectFirstInstance, + depthClamp, + depthBiasClamp, + fillModeNonSolid, + depthBounds, + wideLines, + largePoints, + alphaToOne, + multiViewport, + samplerAnisotropy, + textureCompressionETC2, + textureCompressionASTC_LDR, + textureCompressionBC, + occlusionQueryPrecise, + pipelineStatisticsQuery, + vertexPipelineStoresAndAtomics, + fragmentStoresAndAtomics, + shaderTessellationAndGeometryPointSize, + shaderImageGatherExtended, + shaderStorageImageExtendedFormats, + shaderStorageImageMultisample, + shaderStorageImageReadWithoutFormat, + shaderStorageImageWriteWithoutFormat, + shaderUniformBufferArrayDynamicIndexing, + shaderSampledImageArrayDynamicIndexing, + shaderStorageBufferArrayDynamicIndexing, + shaderStorageImageArrayDynamicIndexing, + shaderClipDistance, + shaderCullDistance, + shaderFloat64, + shaderInt64, + shaderInt16, + shaderResourceResidency, + shaderResourceMinLod, + sparseBinding, + sparseResidencyBuffer, + sparseResidencyImage2D, + sparseResidencyImage3D, + sparseResidency2Samples, + sparseResidency4Samples, + sparseResidency8Samples, + sparseResidency16Samples, + sparseResidencyAliased, + variableMultisampleRate, + inheritedQueries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) && + ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && ( geometryShader == rhs.geometryShader ) && + ( tessellationShader == rhs.tessellationShader ) && ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) && + ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && + ( depthClamp == rhs.depthClamp ) && ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) && + ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) && + ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) && + ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) && + ( textureCompressionBC == rhs.textureCompressionBC ) && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) && + ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) && + ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) && + ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) && + ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) && + ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) && + ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) && + ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) && + ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) && + ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) && + ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) && + ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && ( shaderClipDistance == rhs.shaderClipDistance ) && + ( shaderCullDistance == rhs.shaderCullDistance ) && ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) && + ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) && + ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) && + ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) && + ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) && + ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) && + ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) && + ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries ); +# endif + } + + bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; + VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; + }; + + // wrapper struct for struct VkDeviceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceCreateInfo.html + struct DeviceCreateInfo + { + using NativeType = VkDeviceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, + uint32_t queueCreateInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , queueCreateInfoCount{ queueCreateInfoCount_ } + , pQueueCreateInfos{ pQueueCreateInfos_ } + , enabledLayerCount{ enabledLayerCount_ } + , ppEnabledLayerNames{ ppEnabledLayerNames_ } + , enabledExtensionCount{ enabledExtensionCount_ } + , ppEnabledExtensionNames{ ppEnabledExtensionNames_ } + , pEnabledFeatures{ pEnabledFeatures_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueCreateInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , queueCreateInfoCount( static_cast( queueCreateInfos_.size() ) ) + , pQueueCreateInfos( queueCreateInfos_.data() ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + , pEnabledFeatures( pEnabledFeatures_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = queueCreateInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & + setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + pQueueCreateInfos = pQueueCreateInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setQueueCreateInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = static_cast( queueCreateInfos_.size() ); + pQueueCreateInfos = queueCreateInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & + setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & + setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pEnabledFeatures = pEnabledFeatures_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + queueCreateInfoCount, + pQueueCreateInfos, + enabledLayerCount, + ppEnabledLayerNames, + enabledExtensionCount, + ppEnabledExtensionNames, + pEnabledFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 ) + return cmp; + if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 ) + return cmp; + if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledLayerCount; ++i ) + { + if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) + if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledExtensionCount; ++i ) + { + if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) + if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && + ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && ( enabledLayerCount == rhs.enabledLayerCount ) && + std::equal( ppEnabledLayerNames, + ppEnabledLayerNames + enabledLayerCount, + rhs.ppEnabledLayerNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + std::equal( ppEnabledExtensionNames, + ppEnabledExtensionNames + enabledExtensionCount, + rhs.ppEnabledExtensionNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && + ( pEnabledFeatures == rhs.pEnabledFeatures ); + } + + bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; + uint32_t queueCreateInfoCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {}; + }; + + template <> + struct CppType + { + using Type = DeviceCreateInfo; + }; + + // wrapper struct for struct VkDeviceMemoryReportCallbackDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryReportCallbackDataEXT.html + struct DeviceMemoryReportCallbackDataEXT + { + using NativeType = VkDeviceMemoryReportCallbackDataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, + uint64_t memoryObjectId_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint32_t heapIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , type{ type_ } + , memoryObjectId{ memoryObjectId_ } + , size{ size_ } + , objectType{ objectType_ } + , objectHandle{ objectHandle_ } + , heapIndex{ heapIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryReportCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceMemoryReportCallbackDataEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default; +#else + bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && ( memoryObjectId == rhs.memoryObjectId ) && + ( size == rhs.size ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex ); +# endif + } + + bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate; + uint64_t memoryObjectId = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint32_t heapIndex = {}; + }; + + template <> + struct CppType + { + using Type = DeviceMemoryReportCallbackDataEXT; + }; + + typedef void( VKAPI_PTR * PFN_DeviceMemoryReportCallbackEXT )( const VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT * pCallbackData, + void * pUserData ); + + // wrapper struct for struct VkDeviceDeviceMemoryReportCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceDeviceMemoryReportCreateInfoEXT.html + struct DeviceDeviceMemoryReportCreateInfoEXT + { + using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pfnUserCallback{ pfnUserCallback_ } + , pUserData{ pUserData_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) + + DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_, + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : DeviceDeviceMemoryReportCreateInfoEXT( flags_, + reinterpret_cast( pfnUserCallback_ ), + pUserData_, + pNext_ ) + { + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif + + DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & + setPfnUserCallback( VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnUserCallback( reinterpret_cast( pfnUserCallback_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceDeviceMemoryReportCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceDeviceMemoryReportCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pfnUserCallback, pUserData ); + } +#endif + + bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnUserCallback == rhs.pfnUserCallback ) && + ( pUserData == rhs.pUserData ); +#endif + } + + bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; + }; + + template <> + struct CppType + { + using Type = DeviceDeviceMemoryReportCreateInfoEXT; + }; + + // wrapper struct for struct VkDeviceDiagnosticsConfigCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceDiagnosticsConfigCreateInfoNV.html + struct DeviceDiagnosticsConfigCreateInfoNV + { + using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceDiagnosticsConfigCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceDiagnosticsConfigCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default; +#else + bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; + }; + + template <> + struct CppType + { + using Type = DeviceDiagnosticsConfigCreateInfoNV; + }; + + // wrapper struct for struct VkDeviceEventInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceEventInfoEXT.html + struct DeviceEventInfoEXT + { + using NativeType = VkDeviceEventInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceEvent{ deviceEvent_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceEventInfoEXT( *reinterpret_cast( &rhs ) ) {} + + DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT + { + deviceEvent = deviceEvent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceEventInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceEvent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceEventInfoEXT const & ) const = default; +#else + bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent ); +# endif + } + + bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; + }; + + template <> + struct CppType + { + using Type = DeviceEventInfoEXT; + }; + + // wrapper struct for struct VkDeviceFaultAddressInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultAddressInfoEXT.html + struct DeviceFaultAddressInfoEXT + { + using NativeType = VkDeviceFaultAddressInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceFaultAddressInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone, + VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ = {} ) VULKAN_HPP_NOEXCEPT + : addressType{ addressType_ } + , reportedAddress{ reportedAddress_ } + , addressPrecision{ addressPrecision_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceFaultAddressInfoEXT( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultAddressInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceFaultAddressInfoEXT & operator=( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceFaultAddressInfoEXT & operator=( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressType( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ ) VULKAN_HPP_NOEXCEPT + { + addressType = addressType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setReportedAddress( VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ ) VULKAN_HPP_NOEXCEPT + { + reportedAddress = reportedAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressPrecision( VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ ) VULKAN_HPP_NOEXCEPT + { + addressPrecision = addressPrecision_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceFaultAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultAddressInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultAddressInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceFaultAddressInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( addressType, reportedAddress, addressPrecision ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceFaultAddressInfoEXT const & ) const = default; +#else + bool operator==( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( addressType == rhs.addressType ) && ( reportedAddress == rhs.reportedAddress ) && ( addressPrecision == rhs.addressPrecision ); +# endif + } + + bool operator!=( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone; + VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision = {}; + }; + + // wrapper struct for struct VkDeviceFaultCountsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultCountsEXT.html + struct DeviceFaultCountsEXT + { + using NativeType = VkDeviceFaultCountsEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultCountsEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( uint32_t addressInfoCount_ = {}, + uint32_t vendorInfoCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , addressInfoCount{ addressInfoCount_ } + , vendorInfoCount{ vendorInfoCount_ } + , vendorBinarySize{ vendorBinarySize_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceFaultCountsEXT( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultCountsEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceFaultCountsEXT & operator=( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceFaultCountsEXT & operator=( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setAddressInfoCount( uint32_t addressInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + addressInfoCount = addressInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorInfoCount( uint32_t vendorInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + vendorInfoCount = vendorInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorBinarySize( VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ ) VULKAN_HPP_NOEXCEPT + { + vendorBinarySize = vendorBinarySize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceFaultCountsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultCountsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultCountsEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceFaultCountsEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, addressInfoCount, vendorInfoCount, vendorBinarySize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceFaultCountsEXT const & ) const = default; +#else + bool operator==( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( addressInfoCount == rhs.addressInfoCount ) && ( vendorInfoCount == rhs.vendorInfoCount ) && + ( vendorBinarySize == rhs.vendorBinarySize ); +# endif + } + + bool operator!=( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultCountsEXT; + void * pNext = {}; + uint32_t addressInfoCount = {}; + uint32_t vendorInfoCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize = {}; + }; + + template <> + struct CppType + { + using Type = DeviceFaultCountsEXT; + }; + + // wrapper struct for struct VkDeviceFaultVendorInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultVendorInfoEXT.html + struct DeviceFaultVendorInfoEXT + { + using NativeType = VkDeviceFaultVendorInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( std::array const & description_ = {}, + uint64_t vendorFaultCode_ = {}, + uint64_t vendorFaultData_ = {} ) VULKAN_HPP_NOEXCEPT + : description{ description_ } + , vendorFaultCode{ vendorFaultCode_ } + , vendorFaultData{ vendorFaultData_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceFaultVendorInfoEXT( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultVendorInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceFaultVendorInfoEXT( std::string const & description_, uint64_t vendorFaultCode_ = {}, uint64_t vendorFaultData_ = {} ) + : vendorFaultCode( vendorFaultCode_ ), vendorFaultData( vendorFaultData_ ) + { + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( _WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceFaultVendorInfoEXT & operator=( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceFaultVendorInfoEXT & operator=( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setDescription( std::array description_ ) VULKAN_HPP_NOEXCEPT + { + description = description_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceFaultVendorInfoEXT & setDescription( std::string const & description_ ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( _WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultCode( uint64_t vendorFaultCode_ ) VULKAN_HPP_NOEXCEPT + { + vendorFaultCode = vendorFaultCode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultData( uint64_t vendorFaultData_ ) VULKAN_HPP_NOEXCEPT + { + vendorFaultData = vendorFaultData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceFaultVendorInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultVendorInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultVendorInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceFaultVendorInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, uint64_t const &, uint64_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( description, vendorFaultCode, vendorFaultData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = vendorFaultCode <=> rhs.vendorFaultCode; cmp != 0 ) + return cmp; + if ( auto cmp = vendorFaultData <=> rhs.vendorFaultData; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( strcmp( description, rhs.description ) == 0 ) && ( vendorFaultCode == rhs.vendorFaultCode ) && ( vendorFaultData == rhs.vendorFaultData ); + } + + bool operator!=( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint64_t vendorFaultCode = {}; + uint64_t vendorFaultData = {}; + }; + + // wrapper struct for struct VkDeviceFaultInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultInfoEXT.html + struct DeviceFaultInfoEXT + { + using NativeType = VkDeviceFaultInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ = {}, + VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ = {}, + void * pVendorBinaryData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , description{ description_ } + , pAddressInfos{ pAddressInfos_ } + , pVendorInfos{ pVendorInfos_ } + , pVendorBinaryData{ pVendorBinaryData_ } + { + } + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceFaultInfoEXT( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultInfoEXT( *reinterpret_cast( &rhs ) ) {} + + DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# else + DeviceFaultInfoEXT( DeviceFaultInfoEXT const & ) = delete; + DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & ) = delete; + + DeviceFaultInfoEXT( DeviceFaultInfoEXT && rhs ) VULKAN_HPP_NOEXCEPT + : pNext{ rhs.pNext } + , pAddressInfos{ rhs.pAddressInfos } + , pVendorInfos{ rhs.pVendorInfos } + , pVendorBinaryData{ rhs.pVendorBinaryData } + { + memcpy( description, rhs.description, VK_MAX_DESCRIPTION_SIZE ); + + rhs.pNext = nullptr; + memset( rhs.description, 0, VK_MAX_DESCRIPTION_SIZE ); + rhs.pAddressInfos = nullptr; + rhs.pVendorInfos = nullptr; + rhs.pVendorBinaryData = nullptr; + } + + DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + free( pAddressInfos ); + free( pVendorInfos ); + free( pVendorBinaryData ); + + pNext = rhs.pNext; + memcpy( description, rhs.description, VK_MAX_DESCRIPTION_SIZE ); + pAddressInfos = rhs.pAddressInfos; + pVendorInfos = rhs.pVendorInfos; + pVendorBinaryData = rhs.pVendorBinaryData; + + rhs.pNext = nullptr; + memset( rhs.description, 0, VK_MAX_DESCRIPTION_SIZE ); + rhs.pAddressInfos = nullptr; + rhs.pVendorInfos = nullptr; + rhs.pVendorBinaryData = nullptr; + + return *this; + } + + ~DeviceFaultInfoEXT() VULKAN_HPP_NOEXCEPT + { + free( pAddressInfos ); + free( pVendorInfos ); + free( pVendorBinaryData ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + operator VkDeviceFaultInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceFaultInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * const &, + VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * const &, + void * const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, description, pAddressInfos, pVendorInfos, pVendorBinaryData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pAddressInfos <=> rhs.pAddressInfos; cmp != 0 ) + return cmp; + if ( auto cmp = pVendorInfos <=> rhs.pVendorInfos; cmp != 0 ) + return cmp; + if ( auto cmp = pVendorBinaryData <=> rhs.pVendorBinaryData; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( description, rhs.description ) == 0 ) && ( pAddressInfos == rhs.pAddressInfos ) && + ( pVendorInfos == rhs.pVendorInfos ) && ( pVendorBinaryData == rhs.pVendorBinaryData ); + } + + bool operator!=( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos = {}; + VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos = {}; + void * pVendorBinaryData = {}; + }; + + template <> + struct CppType + { + using Type = DeviceFaultInfoEXT; + }; + + // wrapper struct for struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultVendorBinaryHeaderVersionOneEXT.html + struct DeviceFaultVendorBinaryHeaderVersionOneEXT + { + using NativeType = VkDeviceFaultVendorBinaryHeaderVersionOneEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( + uint32_t headerSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + uint32_t driverVersion_ = {}, + std::array const & pipelineCacheUUID_ = {}, + uint32_t applicationNameOffset_ = {}, + uint32_t applicationVersion_ = {}, + uint32_t engineNameOffset_ = {}, + uint32_t engineVersion_ = {}, + uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : headerSize{ headerSize_ } + , headerVersion{ headerVersion_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , driverVersion{ driverVersion_ } + , pipelineCacheUUID{ pipelineCacheUUID_ } + , applicationNameOffset{ applicationNameOffset_ } + , applicationVersion{ applicationVersion_ } + , engineNameOffset{ engineNameOffset_ } + , engineVersion{ engineVersion_ } + , apiVersion{ apiVersion_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceFaultVendorBinaryHeaderVersionOneEXT( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultVendorBinaryHeaderVersionOneEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT + { + headerSize = headerSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & + setHeaderVersion( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ ) VULKAN_HPP_NOEXCEPT + { + headerVersion = headerVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT + { + vendorID = vendorID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT + { + deviceID = deviceID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDriverVersion( uint32_t driverVersion_ ) VULKAN_HPP_NOEXCEPT + { + driverVersion = driverVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & + setPipelineCacheUUID( std::array pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCacheUUID = pipelineCacheUUID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationNameOffset( uint32_t applicationNameOffset_ ) VULKAN_HPP_NOEXCEPT + { + applicationNameOffset = applicationNameOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT + { + applicationVersion = applicationVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineNameOffset( uint32_t engineNameOffset_ ) VULKAN_HPP_NOEXCEPT + { + engineNameOffset = engineNameOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT + { + engineVersion = engineVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT + { + apiVersion = apiVersion_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( headerSize, + headerVersion, + vendorID, + deviceID, + driverVersion, + pipelineCacheUUID, + applicationNameOffset, + applicationVersion, + engineNameOffset, + engineVersion, + apiVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceFaultVendorBinaryHeaderVersionOneEXT const & ) const = default; +#else + bool operator==( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( driverVersion == rhs.driverVersion ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( applicationNameOffset == rhs.applicationNameOffset ) && ( applicationVersion == rhs.applicationVersion ) && + ( engineNameOffset == rhs.engineNameOffset ) && ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); +# endif + } + + bool operator!=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t headerSize = {}; + VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + uint32_t driverVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + uint32_t applicationNameOffset = {}; + uint32_t applicationVersion = {}; + uint32_t engineNameOffset = {}; + uint32_t engineVersion = {}; + uint32_t apiVersion = {}; + }; + + // wrapper struct for struct VkDeviceGroupBindSparseInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupBindSparseInfo.html + struct DeviceGroupBindSparseInfo + { + using NativeType = VkDeviceGroupBindSparseInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , resourceDeviceIndex{ resourceDeviceIndex_ } + , memoryDeviceIndex{ memoryDeviceIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupBindSparseInfo( *reinterpret_cast( &rhs ) ) + { + } + + DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + resourceDeviceIndex = resourceDeviceIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryDeviceIndex = memoryDeviceIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupBindSparseInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceGroupBindSparseInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, resourceDeviceIndex, memoryDeviceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default; +#else + bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) && + ( memoryDeviceIndex == rhs.memoryDeviceIndex ); +# endif + } + + bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; + const void * pNext = {}; + uint32_t resourceDeviceIndex = {}; + uint32_t memoryDeviceIndex = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupBindSparseInfo; + }; + + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + + // wrapper struct for struct VkDeviceGroupCommandBufferBeginInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupCommandBufferBeginInfo.html + struct DeviceGroupCommandBufferBeginInfo + { + using NativeType = VkDeviceGroupCommandBufferBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceMask{ deviceMask_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + { + } + + DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupCommandBufferBeginInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceGroupCommandBufferBeginInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default; +#else + bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupCommandBufferBeginInfo; + }; + + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + + // wrapper struct for struct VkDeviceGroupDeviceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupDeviceCreateInfo.html + struct DeviceGroupDeviceCreateInfo + { + using NativeType = VkDeviceGroupDeviceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , physicalDeviceCount{ physicalDeviceCount_ } + , pPhysicalDevices{ pPhysicalDevices_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupDeviceCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & physicalDevices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), physicalDeviceCount( static_cast( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = physicalDeviceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & + setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + pPhysicalDevices = pPhysicalDevices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo & setPhysicalDevices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = static_cast( physicalDevices_.size() ); + pPhysicalDevices = physicalDevices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupDeviceCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceGroupDeviceCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default; +#else + bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( pPhysicalDevices == rhs.pPhysicalDevices ); +# endif + } + + bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + const void * pNext = {}; + uint32_t physicalDeviceCount = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupDeviceCreateInfo; + }; + + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + + // wrapper struct for struct VkDeviceGroupPresentCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentCapabilitiesKHR.html + struct DeviceGroupPresentCapabilitiesKHR + { + using NativeType = VkDeviceGroupPresentCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array const & presentMask_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentMask{ presentMask_ } + , modes{ modes_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentMask, modes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default; +#else + bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && ( modes == rhs.modes ); +# endif + } + + bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupPresentCapabilitiesKHR; + }; + + // wrapper struct for struct VkDeviceGroupPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentInfoKHR.html + struct DeviceGroupPresentInfoKHR + { + using NativeType = VkDeviceGroupPresentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( + uint32_t swapchainCount_ = {}, + const uint32_t * pDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pDeviceMasks{ pDeviceMasks_ } + , mode{ mode_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceMasks = pDeviceMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( deviceMasks_.size() ); + pDeviceMasks = deviceMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pDeviceMasks == rhs.pDeviceMasks ) && + ( mode == rhs.mode ); +# endif + } + + bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint32_t * pDeviceMasks = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; + }; + + template <> + struct CppType + { + using Type = DeviceGroupPresentInfoKHR; + }; + + // wrapper struct for struct VkDeviceGroupRenderPassBeginInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupRenderPassBeginInfo.html + struct DeviceGroupRenderPassBeginInfo + { + using NativeType = VkDeviceGroupRenderPassBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, + uint32_t deviceRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceMask{ deviceMask_ } + , deviceRenderAreaCount{ deviceRenderAreaCount_ } + , pDeviceRenderAreas{ pDeviceRenderAreas_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupRenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , deviceMask( deviceMask_ ) + , deviceRenderAreaCount( static_cast( deviceRenderAreas_.size() ) ) + , pDeviceRenderAreas( deviceRenderAreas_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = deviceRenderAreaCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & + setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceRenderAreas = pDeviceRenderAreas_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo & + setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = static_cast( deviceRenderAreas_.size() ); + pDeviceRenderAreas = deviceRenderAreas_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupRenderPassBeginInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceGroupRenderPassBeginInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default; +#else + bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && + ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); +# endif + } + + bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + uint32_t deviceRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupRenderPassBeginInfo; + }; + + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + + // wrapper struct for struct VkDeviceGroupSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupSubmitInfo.html + struct DeviceGroupSubmitInfo + { + using NativeType = VkDeviceGroupSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, + uint32_t commandBufferCount_ = {}, + const uint32_t * pCommandBufferDeviceMasks_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const uint32_t * pSignalSemaphoreDeviceIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphoreDeviceIndices{ pWaitSemaphoreDeviceIndices_ } + , commandBufferCount{ commandBufferCount_ } + , pCommandBufferDeviceMasks{ pCommandBufferDeviceMasks_ } + , signalSemaphoreCount{ signalSemaphoreCount_ } + , pSignalSemaphoreDeviceIndices{ pSignalSemaphoreDeviceIndices_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreCount( static_cast( waitSemaphoreDeviceIndices_.size() ) ) + , pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ) + , commandBufferCount( static_cast( commandBufferDeviceMasks_.size() ) ) + , pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphoreDeviceIndices_.size() ) ) + , pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & + setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphoreDeviceIndices_.size() ); + pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & + setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBufferDeviceMasks_.size() ); + pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & + setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphoreDeviceIndices_.size() ); + pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSubmitInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceGroupSubmitInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + waitSemaphoreCount, + pWaitSemaphoreDeviceIndices, + commandBufferCount, + pCommandBufferDeviceMasks, + signalSemaphoreCount, + pSignalSemaphoreDeviceIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSubmitInfo const & ) const = default; +#else + bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && ( commandBufferCount == rhs.commandBufferCount ) && + ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); +# endif + } + + bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const uint32_t * pWaitSemaphoreDeviceIndices = {}; + uint32_t commandBufferCount = {}; + const uint32_t * pCommandBufferDeviceMasks = {}; + uint32_t signalSemaphoreCount = {}; + const uint32_t * pSignalSemaphoreDeviceIndices = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupSubmitInfo; + }; + + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + + // wrapper struct for struct VkDeviceGroupSwapchainCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupSwapchainCreateInfoKHR.html + struct DeviceGroupSwapchainCreateInfoKHR + { + using NativeType = VkDeviceGroupSwapchainCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , modes{ modes_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT + { + modes = modes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceGroupSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, modes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes ); +# endif + } + + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupSwapchainCreateInfoKHR; + }; + + // wrapper struct for struct VkImageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCreateInfo.html + struct ImageCreateInfo + { + using NativeType = VkImageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + uint32_t mipLevels_ = {}, + uint32_t arrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , imageType{ imageType_ } + , format{ format_ } + , extent{ extent_ } + , mipLevels{ mipLevels_ } + , arrayLayers{ arrayLayers_ } + , samples{ samples_ } + , tiling{ tiling_ } + , usage{ usage_ } + , sharingMode{ sharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } + , initialLayout{ initialLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageType imageType_, + VULKAN_HPP_NAMESPACE::Format format_, + VULKAN_HPP_NAMESPACE::Extent3D extent_, + uint32_t mipLevels_, + uint32_t arrayLayers_, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , initialLayout( initialLayout_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT + { + imageType = imageType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT + { + mipLevels = mipLevels_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + arrayLayers = arrayLayers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + imageType, + format, + extent, + mipLevels, + arrayLayers, + samples, + tiling, + usage, + sharingMode, + queueFamilyIndexCount, + pQueueFamilyIndices, + initialLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCreateInfo const & ) const = default; +#else + bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( imageType == rhs.imageType ) && ( format == rhs.format ) && + ( extent == rhs.extent ) && ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && + ( initialLayout == rhs.initialLayout ); +# endif + } + + bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + uint32_t mipLevels = {}; + uint32_t arrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + template <> + struct CppType + { + using Type = ImageCreateInfo; + }; + + // wrapper struct for struct VkDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceImageMemoryRequirements.html + struct DeviceImageMemoryRequirements + { + using NativeType = VkDeviceImageMemoryRequirements; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pCreateInfo{ pCreateInfo_ } + , planeAspect{ planeAspect_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + { + } + + DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCreateInfo = pCreateInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceImageMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceImageMemoryRequirements *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pCreateInfo, planeAspect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceImageMemoryRequirements const & ) const = default; +#else + bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( planeAspect == rhs.planeAspect ); +# endif + } + + bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageMemoryRequirements; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + + template <> + struct CppType + { + using Type = DeviceImageMemoryRequirements; + }; + + using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; + + // wrapper struct for struct VkImageSubresource2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresource2.html + struct ImageSubresource2 + { + using NativeType = VkImageSubresource2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresource2( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageSubresource{ imageSubresource_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageSubresource2( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresource2( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource2( *reinterpret_cast( &rhs ) ) {} + + ImageSubresource2 & operator=( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageSubresource2 & operator=( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageSubresource2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresource2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresource2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageSubresource2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageSubresource ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresource2 const & ) const = default; +#else + bool operator==( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource ); +# endif + } + + bool operator!=( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {}; + }; + + template <> + struct CppType + { + using Type = ImageSubresource2; + }; + + using ImageSubresource2EXT = ImageSubresource2; + using ImageSubresource2KHR = ImageSubresource2; + + // wrapper struct for struct VkDeviceImageSubresourceInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceImageSubresourceInfo.html + struct DeviceImageSubresourceInfo + { + using NativeType = VkDeviceImageSubresourceInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageSubresourceInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pCreateInfo{ pCreateInfo_ } + , pSubresource{ pSubresource_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceImageSubresourceInfo( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceImageSubresourceInfo( *reinterpret_cast( &rhs ) ) + { + } + + DeviceImageSubresourceInfo & operator=( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceImageSubresourceInfo & operator=( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCreateInfo = pCreateInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPSubresource( const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource_ ) VULKAN_HPP_NOEXCEPT + { + pSubresource = pSubresource_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceImageSubresourceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceImageSubresourceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceImageSubresourceInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceImageSubresourceInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pCreateInfo, pSubresource ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceImageSubresourceInfo const & ) const = default; +#else + bool operator==( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( pSubresource == rhs.pSubresource ); +# endif + } + + bool operator!=( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageSubresourceInfo; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource = {}; + }; + + template <> + struct CppType + { + using Type = DeviceImageSubresourceInfo; + }; + + using DeviceImageSubresourceInfoKHR = DeviceImageSubresourceInfo; + + // wrapper struct for struct VkDeviceMemoryOpaqueCaptureAddressInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryOpaqueCaptureAddressInfo.html + struct DeviceMemoryOpaqueCaptureAddressInfo + { + using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast( &rhs ) ) + { + } + + DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOpaqueCaptureAddressInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceMemoryOpaqueCaptureAddressInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default; +#else + bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); +# endif + } + + bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + + template <> + struct CppType + { + using Type = DeviceMemoryOpaqueCaptureAddressInfo; + }; + + using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; + + // wrapper struct for struct VkDeviceMemoryOverallocationCreateInfoAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryOverallocationCreateInfoAMD.html + struct DeviceMemoryOverallocationCreateInfoAMD + { + using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , overallocationBehavior{ overallocationBehavior_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & + setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT + { + overallocationBehavior = overallocationBehavior_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, overallocationBehavior ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default; +#else + bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( overallocationBehavior == rhs.overallocationBehavior ); +# endif + } + + bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; + }; + + template <> + struct CppType + { + using Type = DeviceMemoryOverallocationCreateInfoAMD; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + union DeviceOrHostAddressConstAMDX + { + using NativeType = VkDeviceOrHostAddressConstAMDX; +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceOrHostAddressConstAMDX const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressConstAMDX &() + { + return *reinterpret_cast( this ); + } + +# ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + const void * hostAddress; +# else + VkDeviceAddress deviceAddress; + const void * hostAddress; +# endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkDevicePipelineBinaryInternalCacheControlKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDevicePipelineBinaryInternalCacheControlKHR.html + struct DevicePipelineBinaryInternalCacheControlKHR + { + using NativeType = VkDevicePipelineBinaryInternalCacheControlKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePipelineBinaryInternalCacheControlKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , disableInternalCache{ disableInternalCache_ } + { + } + + VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePipelineBinaryInternalCacheControlKHR( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DevicePipelineBinaryInternalCacheControlKHR( *reinterpret_cast( &rhs ) ) + { + } + + DevicePipelineBinaryInternalCacheControlKHR & operator=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DevicePipelineBinaryInternalCacheControlKHR & operator=( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & + setDisableInternalCache( VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache_ ) VULKAN_HPP_NOEXCEPT + { + disableInternalCache = disableInternalCache_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDevicePipelineBinaryInternalCacheControlKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDevicePipelineBinaryInternalCacheControlKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDevicePipelineBinaryInternalCacheControlKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDevicePipelineBinaryInternalCacheControlKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, disableInternalCache ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DevicePipelineBinaryInternalCacheControlKHR const & ) const = default; +#else + bool operator==( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disableInternalCache == rhs.disableInternalCache ); +# endif + } + + bool operator!=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePipelineBinaryInternalCacheControlKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache = {}; + }; + + template <> + struct CppType + { + using Type = DevicePipelineBinaryInternalCacheControlKHR; + }; + + // wrapper struct for struct VkDevicePrivateDataCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDevicePrivateDataCreateInfo.html + struct DevicePrivateDataCreateInfo + { + using NativeType = VkDevicePrivateDataCreateInfo; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , privateDataSlotRequestCount{ privateDataSlotRequestCount_ } + { + } + + VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DevicePrivateDataCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT + { + privateDataSlotRequestCount = privateDataSlotRequestCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDevicePrivateDataCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDevicePrivateDataCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, privateDataSlotRequestCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default; +#else + bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount ); +# endif + } + + bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfo; + const void * pNext = {}; + uint32_t privateDataSlotRequestCount = {}; + }; + + template <> + struct CppType + { + using Type = DevicePrivateDataCreateInfo; + }; + + using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; + + // wrapper struct for struct VkDeviceQueueGlobalPriorityCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueGlobalPriorityCreateInfo.html + struct DeviceQueueGlobalPriorityCreateInfo + { + using NativeType = VkDeviceQueueGlobalPriorityCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceQueueGlobalPriorityCreateInfo( VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , globalPriority{ globalPriority_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfo( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueGlobalPriorityCreateInfo( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueGlobalPriorityCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + DeviceQueueGlobalPriorityCreateInfo & operator=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceQueueGlobalPriorityCreateInfo & operator=( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & + setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority_ ) VULKAN_HPP_NOEXCEPT + { + globalPriority = globalPriority_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceQueueGlobalPriorityCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueGlobalPriorityCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueGlobalPriorityCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceQueueGlobalPriorityCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, globalPriority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueGlobalPriorityCreateInfo const & ) const = default; +#else + bool operator==( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority ); +# endif + } + + bool operator!=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow; + }; + + template <> + struct CppType + { + using Type = DeviceQueueGlobalPriorityCreateInfo; + }; + + using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfo; + using DeviceQueueGlobalPriorityCreateInfoKHR = DeviceQueueGlobalPriorityCreateInfo; + + // wrapper struct for struct VkDeviceQueueInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueInfo2.html + struct DeviceQueueInfo2 + { + using NativeType = VkDeviceQueueInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , queueIndex{ queueIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueInfo2( *reinterpret_cast( &rhs ) ) {} + + DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueIndex = queueIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceQueueInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueInfo2 const & ) const = default; +#else + bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( queueIndex == rhs.queueIndex ); +# endif + } + + bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueIndex = {}; + }; + + template <> + struct CppType + { + using Type = DeviceQueueInfo2; + }; + + // wrapper struct for struct VkDeviceQueueShaderCoreControlCreateInfoARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueShaderCoreControlCreateInfoARM.html + struct DeviceQueueShaderCoreControlCreateInfoARM + { + using NativeType = VkDeviceQueueShaderCoreControlCreateInfoARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( uint32_t shaderCoreCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCoreCount{ shaderCoreCount_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueShaderCoreControlCreateInfoARM( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueShaderCoreControlCreateInfoARM( *reinterpret_cast( &rhs ) ) + { + } + + DeviceQueueShaderCoreControlCreateInfoARM & operator=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceQueueShaderCoreControlCreateInfoARM & operator=( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setShaderCoreCount( uint32_t shaderCoreCount_ ) VULKAN_HPP_NOEXCEPT + { + shaderCoreCount = shaderCoreCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceQueueShaderCoreControlCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueShaderCoreControlCreateInfoARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueShaderCoreControlCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDeviceQueueShaderCoreControlCreateInfoARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueShaderCoreControlCreateInfoARM const & ) const = default; +#else + bool operator==( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreCount == rhs.shaderCoreCount ); +# endif + } + + bool operator!=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM; + void * pNext = {}; + uint32_t shaderCoreCount = {}; + }; + + template <> + struct CppType + { + using Type = DeviceQueueShaderCoreControlCreateInfoARM; + }; + + typedef PFN_vkVoidFunction( VKAPI_PTR * PFN_GetInstanceProcAddrLUNARG )( VULKAN_HPP_NAMESPACE::Instance instance, const char * pName ); + + // wrapper struct for struct VkDirectDriverLoadingInfoLUNARG, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectDriverLoadingInfoLUNARG.html + struct DirectDriverLoadingInfoLUNARG + { + using NativeType = VkDirectDriverLoadingInfoLUNARG; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingInfoLUNARG; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ = {}, + VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pfnGetInstanceProcAddr{ pfnGetInstanceProcAddr_ } + { + } + + VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectDriverLoadingInfoLUNARG( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectDriverLoadingInfoLUNARG( *reinterpret_cast( &rhs ) ) + { + } + + DirectDriverLoadingInfoLUNARG & operator=( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DirectDriverLoadingInfoLUNARG & operator=( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setFlags( VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & + setPfnGetInstanceProcAddr( VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ ) VULKAN_HPP_NOEXCEPT + { + pfnGetInstanceProcAddr = pfnGetInstanceProcAddr_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDirectDriverLoadingInfoLUNARG const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectDriverLoadingInfoLUNARG &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectDriverLoadingInfoLUNARG const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDirectDriverLoadingInfoLUNARG *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pfnGetInstanceProcAddr ); + } +#endif + + bool operator==( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnGetInstanceProcAddr == rhs.pfnGetInstanceProcAddr ); +#endif + } + + bool operator!=( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingInfoLUNARG; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags = {}; + VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr = {}; + }; + + template <> + struct CppType + { + using Type = DirectDriverLoadingInfoLUNARG; + }; + + // wrapper struct for struct VkDirectDriverLoadingListLUNARG, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectDriverLoadingListLUNARG.html + struct DirectDriverLoadingListLUNARG + { + using NativeType = VkDirectDriverLoadingListLUNARG; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingListLUNARG; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( + VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_ = VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG::eExclusive, + uint32_t driverCount_ = {}, + const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mode{ mode_ } + , driverCount{ driverCount_ } + , pDrivers{ pDrivers_ } + { + } + + VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectDriverLoadingListLUNARG( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectDriverLoadingListLUNARG( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DirectDriverLoadingListLUNARG( VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drivers_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), mode( mode_ ), driverCount( static_cast( drivers_.size() ) ), pDrivers( drivers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DirectDriverLoadingListLUNARG & operator=( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DirectDriverLoadingListLUNARG & operator=( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setMode( VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setDriverCount( uint32_t driverCount_ ) VULKAN_HPP_NOEXCEPT + { + driverCount = driverCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & + setPDrivers( const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ ) VULKAN_HPP_NOEXCEPT + { + pDrivers = pDrivers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DirectDriverLoadingListLUNARG & setDrivers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drivers_ ) VULKAN_HPP_NOEXCEPT + { + driverCount = static_cast( drivers_.size() ); + pDrivers = drivers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDirectDriverLoadingListLUNARG const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectDriverLoadingListLUNARG &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectDriverLoadingListLUNARG const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDirectDriverLoadingListLUNARG *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mode, driverCount, pDrivers ); + } +#endif + + bool operator==( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( driverCount == rhs.driverCount ) && ( pDrivers == rhs.pDrivers ); +#endif + } + + bool operator!=( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingListLUNARG; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode = VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG::eExclusive; + uint32_t driverCount = {}; + const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers = {}; + }; + + template <> + struct CppType + { + using Type = DirectDriverLoadingListLUNARG; + }; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + // wrapper struct for struct VkDirectFBSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectFBSurfaceCreateInfoEXT.html + struct DirectFBSurfaceCreateInfoEXT + { + using NativeType = VkDirectFBSurfaceCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, + IDirectFB * dfb_ = {}, + IDirectFBSurface * surface_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , dfb{ dfb_ } + , surface{ surface_ } + { + } + + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT + { + dfb = dfb_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectFBSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDirectFBSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dfb, surface ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) && ( surface == rhs.surface ); +# endif + } + + bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {}; + IDirectFB * dfb = {}; + IDirectFBSurface * surface = {}; + }; + + template <> + struct CppType + { + using Type = DirectFBSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkDispatchGraphCountInfoAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchGraphCountInfoAMDX.html + struct DispatchGraphCountInfoAMDX + { + using NativeType = VkDispatchGraphCountInfoAMDX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( uint32_t count_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos_ = {}, + uint64_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : count{ count_ } + , infos{ infos_ } + , stride{ stride_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchGraphCountInfoAMDX( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchGraphCountInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + + DispatchGraphCountInfoAMDX & operator=( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DispatchGraphCountInfoAMDX & operator=( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT + { + count = count_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setInfos( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & infos_ ) VULKAN_HPP_NOEXCEPT + { + infos = infos_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setStride( uint64_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDispatchGraphCountInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchGraphCountInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchGraphCountInfoAMDX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDispatchGraphCountInfoAMDX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( count, infos, stride ); + } +# endif + + public: + uint32_t count = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos = {}; + uint64_t stride = {}; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkDispatchGraphInfoAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchGraphInfoAMDX.html + struct DispatchGraphInfoAMDX + { + using NativeType = VkDispatchGraphInfoAMDX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( uint32_t nodeIndex_ = {}, + uint32_t payloadCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads_ = {}, + uint64_t payloadStride_ = {} ) VULKAN_HPP_NOEXCEPT + : nodeIndex{ nodeIndex_ } + , payloadCount{ payloadCount_ } + , payloads{ payloads_ } + , payloadStride{ payloadStride_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchGraphInfoAMDX( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchGraphInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + + DispatchGraphInfoAMDX & operator=( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DispatchGraphInfoAMDX & operator=( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setNodeIndex( uint32_t nodeIndex_ ) VULKAN_HPP_NOEXCEPT + { + nodeIndex = nodeIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadCount( uint32_t payloadCount_ ) VULKAN_HPP_NOEXCEPT + { + payloadCount = payloadCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloads( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & payloads_ ) VULKAN_HPP_NOEXCEPT + { + payloads = payloads_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadStride( uint64_t payloadStride_ ) VULKAN_HPP_NOEXCEPT + { + payloadStride = payloadStride_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDispatchGraphInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchGraphInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchGraphInfoAMDX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDispatchGraphInfoAMDX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( nodeIndex, payloadCount, payloads, payloadStride ); + } +# endif + + public: + uint32_t nodeIndex = {}; + uint32_t payloadCount = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads = {}; + uint64_t payloadStride = {}; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkDispatchIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchIndirectCommand.html + struct DispatchIndirectCommand + { + using NativeType = VkDispatchIndirectCommand; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } + , z{ z_ } + { + } + + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchIndirectCommand( *reinterpret_cast( &rhs ) ) + { + } + + DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchIndirectCommand const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDispatchIndirectCommand *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, z ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DispatchIndirectCommand const & ) const = default; +#else + bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); +# endif + } + + bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t x = {}; + uint32_t y = {}; + uint32_t z = {}; + }; + + // wrapper struct for struct VkDispatchTileInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchTileInfoQCOM.html + struct DispatchTileInfoQCOM + { + using NativeType = VkDispatchTileInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDispatchTileInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DispatchTileInfoQCOM( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} + + VULKAN_HPP_CONSTEXPR DispatchTileInfoQCOM( DispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchTileInfoQCOM( VkDispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchTileInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + DispatchTileInfoQCOM & operator=( DispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DispatchTileInfoQCOM & operator=( VkDispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchTileInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDispatchTileInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchTileInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchTileInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDispatchTileInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DispatchTileInfoQCOM const & ) const = default; +#else + bool operator==( DispatchTileInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( DispatchTileInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDispatchTileInfoQCOM; + const void * pNext = {}; + }; + + template <> + struct CppType + { + using Type = DispatchTileInfoQCOM; + }; + + // wrapper struct for struct VkDisplayEventInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayEventInfoEXT.html + struct DisplayEventInfoEXT + { + using NativeType = VkDisplayEventInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displayEvent{ displayEvent_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayEventInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT + { + displayEvent = displayEvent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayEventInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayEvent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayEventInfoEXT const & ) const = default; +#else + bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent ); +# endif + } + + bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; + }; + + template <> + struct CppType + { + using Type = DisplayEventInfoEXT; + }; + + // wrapper struct for struct VkDisplayModeParametersKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeParametersKHR.html + struct DisplayModeParametersKHR + { + using NativeType = VkDisplayModeParametersKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT + : visibleRegion{ visibleRegion_ } + , refreshRate{ refreshRate_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeParametersKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT + { + visibleRegion = visibleRegion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT + { + refreshRate = refreshRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeParametersKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayModeParametersKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( visibleRegion, refreshRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeParametersKHR const & ) const = default; +#else + bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate ); +# endif + } + + bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; + uint32_t refreshRate = {}; + }; + + // wrapper struct for struct VkDisplayModeCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeCreateInfoKHR.html + struct DisplayModeCreateInfoKHR + { + using NativeType = VkDisplayModeCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , parameters{ parameters_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT + { + parameters = parameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayModeCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, parameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default; +#else + bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( parameters == rhs.parameters ); +# endif + } + + bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + + template <> + struct CppType + { + using Type = DisplayModeCreateInfoKHR; + }; + + // wrapper struct for struct VkDisplayModePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModePropertiesKHR.html + struct DisplayModePropertiesKHR + { + using NativeType = VkDisplayModePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : displayMode{ displayMode_ } + , parameters{ parameters_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayModePropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( displayMode, parameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters ); +# endif + } + + bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + + // wrapper struct for struct VkDisplayModeProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeProperties2KHR.html + struct DisplayModeProperties2KHR + { + using NativeType = VkDisplayModeProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displayModeProperties{ displayModeProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeProperties2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeProperties2KHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayModeProperties2KHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayModeProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeProperties2KHR const & ) const = default; +#else + bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties ); +# endif + } + + bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; + }; + + template <> + struct CppType + { + using Type = DisplayModeProperties2KHR; + }; + + // wrapper struct for struct VkDisplayModeStereoPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeStereoPropertiesNV.html + struct DisplayModeStereoPropertiesNV + { + using NativeType = VkDisplayModeStereoPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeStereoPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( VULKAN_HPP_NAMESPACE::Bool32 hdmi3DSupported_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hdmi3DSupported{ hdmi3DSupported_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeStereoPropertiesNV( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeStereoPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + DisplayModeStereoPropertiesNV & operator=( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayModeStereoPropertiesNV & operator=( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayModeStereoPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeStereoPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeStereoPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayModeStereoPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hdmi3DSupported ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeStereoPropertiesNV const & ) const = default; +#else + bool operator==( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdmi3DSupported == rhs.hdmi3DSupported ); +# endif + } + + bool operator!=( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeStereoPropertiesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hdmi3DSupported = {}; + }; + + template <> + struct CppType + { + using Type = DisplayModeStereoPropertiesNV; + }; + + // wrapper struct for struct VkDisplayNativeHdrSurfaceCapabilitiesAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayNativeHdrSurfaceCapabilitiesAMD.html + struct DisplayNativeHdrSurfaceCapabilitiesAMD + { + using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , localDimmingSupport{ localDimmingSupport_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast( &rhs ) ) + { + } + + DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, localDimmingSupport ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default; +#else + bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport ); +# endif + } + + bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; + }; + + template <> + struct CppType + { + using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; + }; + + // wrapper struct for struct VkDisplayPlaneCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneCapabilitiesKHR.html + struct DisplayPlaneCapabilitiesKHR + { + using NativeType = VkDisplayPlaneCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedAlpha{ supportedAlpha_ } + , minSrcPosition{ minSrcPosition_ } + , maxSrcPosition{ maxSrcPosition_ } + , minSrcExtent{ minSrcExtent_ } + , maxSrcExtent{ maxSrcExtent_ } + , minDstPosition{ minDstPosition_ } + , maxDstPosition{ maxDstPosition_ } + , minDstExtent{ minDstExtent_ } + , maxDstExtent{ maxDstExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default; +#else + bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && ( maxSrcPosition == rhs.maxSrcPosition ) && + ( minSrcExtent == rhs.minSrcExtent ) && ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) && + ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && ( maxDstExtent == rhs.maxDstExtent ); +# endif + } + + bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; + }; + + // wrapper struct for struct VkDisplayPlaneCapabilities2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneCapabilities2KHR.html + struct DisplayPlaneCapabilities2KHR + { + using NativeType = VkDisplayPlaneCapabilities2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , capabilities{ capabilities_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilities2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilities2KHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilities2KHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, capabilities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities ); +# endif + } + + bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; + }; + + template <> + struct CppType + { + using Type = DisplayPlaneCapabilities2KHR; + }; + + // wrapper struct for struct VkDisplayPlaneInfo2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneInfo2KHR.html + struct DisplayPlaneInfo2KHR + { + using NativeType = VkDisplayPlaneInfo2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mode{ mode_ } + , planeIndex{ planeIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneInfo2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneInfo2KHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayPlaneInfo2KHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mode, planeIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( planeIndex == rhs.planeIndex ); +# endif + } + + bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; + uint32_t planeIndex = {}; + }; + + template <> + struct CppType + { + using Type = DisplayPlaneInfo2KHR; + }; + + // wrapper struct for struct VkDisplayPlanePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlanePropertiesKHR.html + struct DisplayPlanePropertiesKHR + { + using NativeType = VkDisplayPlanePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, + uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : currentDisplay{ currentDisplay_ } + , currentStackIndex{ currentStackIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlanePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlanePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayPlanePropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( currentDisplay, currentStackIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex ); +# endif + } + + bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; + uint32_t currentStackIndex = {}; + }; + + // wrapper struct for struct VkDisplayPlaneProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneProperties2KHR.html + struct DisplayPlaneProperties2KHR + { + using NativeType = VkDisplayPlaneProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displayPlaneProperties{ displayPlaneProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneProperties2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneProperties2KHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayPlaneProperties2KHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayPlaneProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPlaneProperties == rhs.displayPlaneProperties ); +# endif + } + + bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; + }; + + template <> + struct CppType + { + using Type = DisplayPlaneProperties2KHR; + }; + + // wrapper struct for struct VkDisplayPowerInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPowerInfoEXT.html + struct DisplayPowerInfoEXT + { + using NativeType = VkDisplayPowerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , powerState{ powerState_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPowerInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT + { + powerState = powerState_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPowerInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayPowerInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, powerState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPowerInfoEXT const & ) const = default; +#else + bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState ); +# endif + } + + bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; + }; + + template <> + struct CppType + { + using Type = DisplayPowerInfoEXT; + }; + + // wrapper struct for struct VkDisplayPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPresentInfoKHR.html + struct DisplayPresentInfoKHR + { + using NativeType = VkDisplayPresentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcRect{ srcRect_ } + , dstRect{ dstRect_ } + , persistent{ persistent_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPresentInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT + { + srcRect = srcRect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT + { + dstRect = dstRect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT + { + persistent = persistent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayPresentInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcRect, dstRect, persistent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPresentInfoKHR const & ) const = default; +#else + bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent ); +# endif + } + + bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; + VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; + }; + + template <> + struct CppType + { + using Type = DisplayPresentInfoKHR; + }; + + // wrapper struct for struct VkDisplayPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPropertiesKHR.html + struct DisplayPropertiesKHR + { + using NativeType = VkDisplayPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, + const char * displayName_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT + : display{ display_ } + , displayName{ displayName_ } + , physicalDimensions{ physicalDimensions_ } + , physicalResolution{ physicalResolution_ } + , supportedTransforms{ supportedTransforms_ } + , planeReorderPossible{ planeReorderPossible_ } + , persistentContent{ persistentContent_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = display <=> rhs.display; cmp != 0 ) + return cmp; + if ( displayName != rhs.displayName ) + if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 ) + return cmp; + if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 ) + return cmp; + if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 ) + return cmp; + if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 ) + return cmp; + if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( display == rhs.display ) && ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) ) && + ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) && + ( supportedTransforms == rhs.supportedTransforms ) && ( planeReorderPossible == rhs.planeReorderPossible ) && + ( persistentContent == rhs.persistentContent ); + } + + bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; + const char * displayName = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; + }; + + // wrapper struct for struct VkDisplayProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayProperties2KHR.html + struct DisplayProperties2KHR + { + using NativeType = VkDisplayProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displayProperties{ displayProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayProperties2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayProperties2KHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplayProperties2KHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayProperties2KHR const & ) const = default; +#else + bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties ); +# endif + } + + bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; + }; + + template <> + struct CppType + { + using Type = DisplayProperties2KHR; + }; + + // wrapper struct for struct VkDisplaySurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplaySurfaceCreateInfoKHR.html + struct DisplaySurfaceCreateInfoKHR + { + using NativeType = VkDisplaySurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + uint32_t planeIndex_ = {}, + uint32_t planeStackIndex_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + float globalAlpha_ = {}, + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , displayMode{ displayMode_ } + , planeIndex{ planeIndex_ } + , planeStackIndex{ planeStackIndex_ } + , transform{ transform_ } + , globalAlpha{ globalAlpha_ } + , alphaMode{ alphaMode_ } + , imageExtent{ imageExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplaySurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT + { + displayMode = displayMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeStackIndex = planeStackIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT + { + globalAlpha = globalAlpha_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT + { + alphaMode = alphaMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplaySurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default; +#else + bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( displayMode == rhs.displayMode ) && + ( planeIndex == rhs.planeIndex ) && ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) && + ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + uint32_t planeIndex = {}; + uint32_t planeStackIndex = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + float globalAlpha = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + }; + + template <> + struct CppType + { + using Type = DisplaySurfaceCreateInfoKHR; + }; + + // wrapper struct for struct VkDisplaySurfaceStereoCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplaySurfaceStereoCreateInfoNV.html + struct DisplaySurfaceStereoCreateInfoNV + { + using NativeType = VkDisplaySurfaceStereoCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceStereoCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplaySurfaceStereoCreateInfoNV( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType_ = VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV::eNone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stereoType{ stereoType_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplaySurfaceStereoCreateInfoNV( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceStereoCreateInfoNV( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplaySurfaceStereoCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DisplaySurfaceStereoCreateInfoNV & operator=( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplaySurfaceStereoCreateInfoNV & operator=( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setStereoType( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType_ ) VULKAN_HPP_NOEXCEPT + { + stereoType = stereoType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDisplaySurfaceStereoCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceStereoCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceStereoCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDisplaySurfaceStereoCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stereoType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplaySurfaceStereoCreateInfoNV const & ) const = default; +#else + bool operator==( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stereoType == rhs.stereoType ); +# endif + } + + bool operator!=( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceStereoCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType = VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV::eNone; + }; + + template <> + struct CppType + { + using Type = DisplaySurfaceStereoCreateInfoNV; + }; + + // wrapper struct for struct VkDrawIndexedIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndexedIndirectCommand.html + struct DrawIndexedIndirectCommand + { + using NativeType = VkDrawIndexedIndirectCommand; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstIndex_ = {}, + int32_t vertexOffset_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : indexCount{ indexCount_ } + , instanceCount{ instanceCount_ } + , firstIndex{ firstIndex_ } + , vertexOffset{ vertexOffset_ } + , firstInstance{ firstInstance_ } + { + } + + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndexedIndirectCommand( *reinterpret_cast( &rhs ) ) + { + } + + DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndexedIndirectCommand const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDrawIndexedIndirectCommand *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndexedIndirectCommand const & ) const = default; +#else + bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstIndex == rhs.firstIndex ) && + ( vertexOffset == rhs.vertexOffset ) && ( firstInstance == rhs.firstInstance ); +# endif + } + + bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t indexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstIndex = {}; + int32_t vertexOffset = {}; + uint32_t firstInstance = {}; + }; + + // wrapper struct for struct VkDrawIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndirectCommand.html + struct DrawIndirectCommand + { + using NativeType = VkDrawIndirectCommand; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstVertex_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexCount{ vertexCount_ } + , instanceCount{ instanceCount_ } + , firstVertex{ firstVertex_ } + , firstInstance{ firstInstance_ } + { + } + + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT : DrawIndirectCommand( *reinterpret_cast( &rhs ) ) + { + } + + DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndirectCommand const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDrawIndirectCommand *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( vertexCount, instanceCount, firstVertex, firstInstance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndirectCommand const & ) const = default; +#else + bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstVertex == rhs.firstVertex ) && + ( firstInstance == rhs.firstInstance ); +# endif + } + + bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t vertexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstVertex = {}; + uint32_t firstInstance = {}; + }; + + // wrapper struct for struct VkDrawIndirectCountIndirectCommandEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndirectCountIndirectCommandEXT.html + struct DrawIndirectCountIndirectCommandEXT + { + using NativeType = VkDrawIndirectCountIndirectCommandEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t stride_ = {}, + uint32_t commandCount_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , stride{ stride_ } + , commandCount{ commandCount_ } + { + } + + VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCountIndirectCommandEXT( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndirectCountIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + { + } + + DrawIndirectCountIndirectCommandEXT & operator=( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrawIndirectCountIndirectCommandEXT & operator=( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setCommandCount( uint32_t commandCount_ ) VULKAN_HPP_NOEXCEPT + { + commandCount = commandCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDrawIndirectCountIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndirectCountIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndirectCountIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDrawIndirectCountIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, stride, commandCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndirectCountIndirectCommandEXT const & ) const = default; +#else + bool operator==( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( stride == rhs.stride ) && ( commandCount == rhs.commandCount ); +# endif + } + + bool operator!=( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t stride = {}; + uint32_t commandCount = {}; + }; + + // wrapper struct for struct VkDrawMeshTasksIndirectCommandEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawMeshTasksIndirectCommandEXT.html + struct DrawMeshTasksIndirectCommandEXT + { + using NativeType = VkDrawMeshTasksIndirectCommandEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DrawMeshTasksIndirectCommandEXT( uint32_t groupCountX_ = {}, uint32_t groupCountY_ = {}, uint32_t groupCountZ_ = {} ) VULKAN_HPP_NOEXCEPT + : groupCountX{ groupCountX_ } + , groupCountY{ groupCountY_ } + , groupCountZ{ groupCountZ_ } + { + } + + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandEXT( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawMeshTasksIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + { + } + + DrawMeshTasksIndirectCommandEXT & operator=( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrawMeshTasksIndirectCommandEXT & operator=( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountX( uint32_t groupCountX_ ) VULKAN_HPP_NOEXCEPT + { + groupCountX = groupCountX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountY( uint32_t groupCountY_ ) VULKAN_HPP_NOEXCEPT + { + groupCountY = groupCountY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountZ( uint32_t groupCountZ_ ) VULKAN_HPP_NOEXCEPT + { + groupCountZ = groupCountZ_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDrawMeshTasksIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( groupCountX, groupCountY, groupCountZ ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawMeshTasksIndirectCommandEXT const & ) const = default; +#else + bool operator==( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( groupCountX == rhs.groupCountX ) && ( groupCountY == rhs.groupCountY ) && ( groupCountZ == rhs.groupCountZ ); +# endif + } + + bool operator!=( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t groupCountX = {}; + uint32_t groupCountY = {}; + uint32_t groupCountZ = {}; + }; + + // wrapper struct for struct VkDrawMeshTasksIndirectCommandNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawMeshTasksIndirectCommandNV.html + struct DrawMeshTasksIndirectCommandNV + { + using NativeType = VkDrawMeshTasksIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT + : taskCount{ taskCount_ } + , firstTask{ firstTask_ } + { + } + + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawMeshTasksIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT + { + taskCount = taskCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT + { + firstTask = firstTask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( taskCount, firstTask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default; +#else + bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask ); +# endif + } + + bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t taskCount = {}; + uint32_t firstTask = {}; + }; + + // wrapper struct for struct VkDrmFormatModifierProperties2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierProperties2EXT.html + struct DrmFormatModifierProperties2EXT + { + using NativeType = VkDrmFormatModifierProperties2EXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier{ drmFormatModifier_ } + , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } + , drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ } + { + } + + VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierProperties2EXT( *reinterpret_cast( &rhs ) ) + { + } + + DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierProperties2EXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDrmFormatModifierProperties2EXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default; +#else + bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); +# endif + } + + bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {}; + }; + + // wrapper struct for struct VkDrmFormatModifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesEXT.html + struct DrmFormatModifierPropertiesEXT + { + using NativeType = VkDrmFormatModifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier{ drmFormatModifier_ } + , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } + , drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ } + { + } + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); +# endif + } + + bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; + }; + + // wrapper struct for struct VkDrmFormatModifierPropertiesList2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesList2EXT.html + struct DrmFormatModifierPropertiesList2EXT + { + using NativeType = VkDrmFormatModifierPropertiesList2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifierCount{ drmFormatModifierCount_ } + , pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesList2EXT( *reinterpret_cast( &rhs ) ) + { + } + + DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesList2EXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesList2EXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); +# endif + } + + bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {}; + }; + + template <> + struct CppType + { + using Type = DrmFormatModifierPropertiesList2EXT; + }; + + // wrapper struct for struct VkDrmFormatModifierPropertiesListEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesListEXT.html + struct DrmFormatModifierPropertiesListEXT + { + using NativeType = VkDrmFormatModifierPropertiesListEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifierCount{ drmFormatModifierCount_ } + , pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesListEXT( *reinterpret_cast( &rhs ) ) + { + } + + DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesListEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesListEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); +# endif + } + + bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {}; + }; + + template <> + struct CppType + { + using Type = DrmFormatModifierPropertiesListEXT; + }; + + // wrapper struct for struct VkEventCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkEventCreateInfo.html + struct EventCreateInfo + { + using NativeType = VkEventCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : EventCreateInfo( *reinterpret_cast( &rhs ) ) {} + + EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkEventCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkEventCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( EventCreateInfo const & ) const = default; +#else + bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = EventCreateInfo; + }; + + // wrapper struct for struct VkPipelineLibraryCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLibraryCreateInfoKHR.html + struct PipelineLibraryCreateInfoKHR + { + using NativeType = VkPipelineLibraryCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , libraryCount{ libraryCount_ } + , pLibraries{ pLibraries_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLibraryCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = libraryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT + { + pLibraries = pLibraries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR & + setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = static_cast( libraries_.size() ); + pLibraries = libraries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLibraryCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineLibraryCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, libraryCount, pLibraries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries ); +# endif + } + + bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; + const void * pNext = {}; + uint32_t libraryCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {}; + }; + + template <> + struct CppType + { + using Type = PipelineLibraryCreateInfoKHR; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkExecutionGraphPipelineCreateInfoAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExecutionGraphPipelineCreateInfoAMDX.html + struct ExecutionGraphPipelineCreateInfoAMDX + { + using NativeType = VkExecutionGraphPipelineCreateInfoAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineCreateInfoAMDX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , pLibraryInfo{ pLibraryInfo_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExecutionGraphPipelineCreateInfoAMDX( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : ExecutionGraphPipelineCreateInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ExecutionGraphPipelineCreateInfoAMDX( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pLibraryInfo( pLibraryInfo_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ExecutionGraphPipelineCreateInfoAMDX & operator=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExecutionGraphPipelineCreateInfoAMDX & operator=( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ExecutionGraphPipelineCreateInfoAMDX & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInfo = pLibraryInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExecutionGraphPipelineCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExecutionGraphPipelineCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExecutionGraphPipelineCreateInfoAMDX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExecutionGraphPipelineCreateInfoAMDX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stageCount, pStages, pLibraryInfo, layout, basePipelineHandle, basePipelineIndex ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExecutionGraphPipelineCreateInfoAMDX const & ) const = default; +# else + bool operator==( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pLibraryInfo == rhs.pLibraryInfo ) && ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineCreateInfoAMDX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = ExecutionGraphPipelineCreateInfoAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkExecutionGraphPipelineScratchSizeAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExecutionGraphPipelineScratchSizeAMDX.html + struct ExecutionGraphPipelineScratchSizeAMDX + { + using NativeType = VkExecutionGraphPipelineScratchSizeAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::DeviceSize minSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minSize{ minSize_ } + , maxSize{ maxSize_ } + , sizeGranularity{ sizeGranularity_ } + { + } + + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExecutionGraphPipelineScratchSizeAMDX( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : ExecutionGraphPipelineScratchSizeAMDX( *reinterpret_cast( &rhs ) ) + { + } + + ExecutionGraphPipelineScratchSizeAMDX & operator=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExecutionGraphPipelineScratchSizeAMDX & operator=( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMinSize( VULKAN_HPP_NAMESPACE::DeviceSize minSize_ ) VULKAN_HPP_NOEXCEPT + { + minSize = minSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMaxSize( VULKAN_HPP_NAMESPACE::DeviceSize maxSize_ ) VULKAN_HPP_NOEXCEPT + { + maxSize = maxSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSizeGranularity( VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity_ ) VULKAN_HPP_NOEXCEPT + { + sizeGranularity = sizeGranularity_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExecutionGraphPipelineScratchSizeAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExecutionGraphPipelineScratchSizeAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExecutionGraphPipelineScratchSizeAMDX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExecutionGraphPipelineScratchSizeAMDX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minSize, maxSize, sizeGranularity ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExecutionGraphPipelineScratchSizeAMDX const & ) const = default; +# else + bool operator==( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSize == rhs.minSize ) && ( maxSize == rhs.maxSize ) && + ( sizeGranularity == rhs.sizeGranularity ); +# endif + } + + bool operator!=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity = {}; + }; + + template <> + struct CppType + { + using Type = ExecutionGraphPipelineScratchSizeAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkExportFenceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportFenceCreateInfo.html + struct ExportFenceCreateInfo + { + using NativeType = VkExportFenceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportFenceCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceCreateInfo const & ) const = default; +#else + bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExportFenceCreateInfo; + }; + + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkExportFenceWin32HandleInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportFenceWin32HandleInfoKHR.html + struct ExportFenceWin32HandleInfoKHR + { + using NativeType = VkExportFenceWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } + , name{ name_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportFenceWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ExportFenceWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + // wrapper struct for struct VkExportMemoryAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryAllocateInfo.html + struct ExportMemoryAllocateInfo + { + using NativeType = VkExportMemoryAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfo const & ) const = default; +#else + bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfo; + }; + + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + + // wrapper struct for struct VkExportMemoryAllocateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryAllocateInfoNV.html + struct ExportMemoryAllocateInfoNV + { + using NativeType = VkExportMemoryAllocateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default; +#else + bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfoNV; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkExportMemoryWin32HandleInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryWin32HandleInfoKHR.html + struct ExportMemoryWin32HandleInfoKHR + { + using NativeType = VkExportMemoryWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } + , name{ name_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkExportMemoryWin32HandleInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryWin32HandleInfoNV.html + struct ExportMemoryWin32HandleInfoNV + { + using NativeType = VkExportMemoryWin32HandleInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ); +# endif + } + + bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + }; + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkExportMetalBufferInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalBufferInfoEXT.html + struct ExportMetalBufferInfoEXT + { + using NativeType = VkExportMetalBufferInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalBufferInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + MTLBuffer_id mtlBuffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , mtlBuffer{ mtlBuffer_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalBufferInfoEXT( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalBufferInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalBufferInfoEXT & operator=( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMetalBufferInfoEXT & operator=( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT + { + mtlBuffer = mtlBuffer_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalBufferInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMetalBufferInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, mtlBuffer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalBufferInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( mtlBuffer == rhs.mtlBuffer ); +# endif + } + + bool operator!=( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalBufferInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + MTLBuffer_id mtlBuffer = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalBufferInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkExportMetalCommandQueueInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalCommandQueueInfoEXT.html + struct ExportMetalCommandQueueInfoEXT + { + using NativeType = VkExportMetalCommandQueueInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalCommandQueueInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( VULKAN_HPP_NAMESPACE::Queue queue_ = {}, + MTLCommandQueue_id mtlCommandQueue_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queue{ queue_ } + , mtlCommandQueue{ mtlCommandQueue_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalCommandQueueInfoEXT( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalCommandQueueInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalCommandQueueInfoEXT & operator=( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMetalCommandQueueInfoEXT & operator=( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setQueue( VULKAN_HPP_NAMESPACE::Queue queue_ ) VULKAN_HPP_NOEXCEPT + { + queue = queue_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setMtlCommandQueue( MTLCommandQueue_id mtlCommandQueue_ ) VULKAN_HPP_NOEXCEPT + { + mtlCommandQueue = mtlCommandQueue_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMetalCommandQueueInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalCommandQueueInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalCommandQueueInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMetalCommandQueueInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queue, mtlCommandQueue ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalCommandQueueInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queue == rhs.queue ) && ( mtlCommandQueue == rhs.mtlCommandQueue ); +# endif + } + + bool operator!=( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalCommandQueueInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Queue queue = {}; + MTLCommandQueue_id mtlCommandQueue = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalCommandQueueInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkExportMetalDeviceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalDeviceInfoEXT.html + struct ExportMetalDeviceInfoEXT + { + using NativeType = VkExportMetalDeviceInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalDeviceInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( MTLDevice_id mtlDevice_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mtlDevice{ mtlDevice_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalDeviceInfoEXT( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalDeviceInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalDeviceInfoEXT & operator=( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMetalDeviceInfoEXT & operator=( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setMtlDevice( MTLDevice_id mtlDevice_ ) VULKAN_HPP_NOEXCEPT + { + mtlDevice = mtlDevice_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMetalDeviceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalDeviceInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalDeviceInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMetalDeviceInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mtlDevice ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalDeviceInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlDevice == rhs.mtlDevice ); +# endif + } + + bool operator!=( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalDeviceInfoEXT; + const void * pNext = {}; + MTLDevice_id mtlDevice = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalDeviceInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkExportMetalIOSurfaceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalIOSurfaceInfoEXT.html + struct ExportMetalIOSurfaceInfoEXT + { + using NativeType = VkExportMetalIOSurfaceInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalIoSurfaceInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExportMetalIOSurfaceInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , ioSurface{ ioSurface_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalIOSurfaceInfoEXT( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalIOSurfaceInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalIOSurfaceInfoEXT & operator=( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMetalIOSurfaceInfoEXT & operator=( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT + { + ioSurface = ioSurface_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalIOSurfaceInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMetalIOSurfaceInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, ioSurface ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalIOSurfaceInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( ioSurface == rhs.ioSurface ); +# endif + } + + bool operator!=( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalIoSurfaceInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + IOSurfaceRef ioSurface = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalIOSurfaceInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkExportMetalObjectCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalObjectCreateInfoEXT.html + struct ExportMetalObjectCreateInfoEXT + { + using NativeType = VkExportMetalObjectCreateInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , exportObjectType{ exportObjectType_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalObjectCreateInfoEXT( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalObjectCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalObjectCreateInfoEXT & operator=( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMetalObjectCreateInfoEXT & operator=( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & + setExportObjectType( VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ ) VULKAN_HPP_NOEXCEPT + { + exportObjectType = exportObjectType_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMetalObjectCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalObjectCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalObjectCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMetalObjectCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exportObjectType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalObjectCreateInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportObjectType == rhs.exportObjectType ); +# endif + } + + bool operator!=( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice; + }; + + template <> + struct CppType + { + using Type = ExportMetalObjectCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkExportMetalObjectsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalObjectsInfoEXT.html + struct ExportMetalObjectsInfoEXT + { + using NativeType = VkExportMetalObjectsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectsInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} + + VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalObjectsInfoEXT( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalObjectsInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalObjectsInfoEXT & operator=( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMetalObjectsInfoEXT & operator=( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMetalObjectsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalObjectsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalObjectsInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMetalObjectsInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalObjectsInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectsInfoEXT; + const void * pNext = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalObjectsInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkExportMetalSharedEventInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalSharedEventInfoEXT.html + struct ExportMetalSharedEventInfoEXT + { + using NativeType = VkExportMetalSharedEventInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalSharedEventInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::Event event_ = {}, + MTLSharedEvent_id mtlSharedEvent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , event{ event_ } + , mtlSharedEvent{ mtlSharedEvent_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalSharedEventInfoEXT( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalSharedEventInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalSharedEventInfoEXT & operator=( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMetalSharedEventInfoEXT & operator=( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setEvent( VULKAN_HPP_NAMESPACE::Event event_ ) VULKAN_HPP_NOEXCEPT + { + event = event_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT + { + mtlSharedEvent = mtlSharedEvent_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalSharedEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMetalSharedEventInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, event, mtlSharedEvent ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalSharedEventInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( event == rhs.event ) && + ( mtlSharedEvent == rhs.mtlSharedEvent ); +# endif + } + + bool operator!=( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalSharedEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::Event event = {}; + MTLSharedEvent_id mtlSharedEvent = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalSharedEventInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkExportMetalTextureInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalTextureInfoEXT.html + struct ExportMetalTextureInfoEXT + { + using NativeType = VkExportMetalTextureInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalTextureInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::BufferView bufferView_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + MTLTexture_id mtlTexture_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , imageView{ imageView_ } + , bufferView{ bufferView_ } + , plane{ plane_ } + , mtlTexture{ mtlTexture_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalTextureInfoEXT( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalTextureInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalTextureInfoEXT & operator=( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMetalTextureInfoEXT & operator=( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView_ ) VULKAN_HPP_NOEXCEPT + { + bufferView = bufferView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT + { + plane = plane_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT + { + mtlTexture = mtlTexture_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalTextureInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportMetalTextureInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, imageView, bufferView, plane, mtlTexture ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalTextureInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( imageView == rhs.imageView ) && ( bufferView == rhs.bufferView ) && + ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture ); +# endif + } + + bool operator!=( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalTextureInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::BufferView bufferView = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + MTLTexture_id mtlTexture = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalTextureInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + // wrapper struct for struct VkExportSemaphoreCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportSemaphoreCreateInfo.html + struct ExportSemaphoreCreateInfo + { + using NativeType = VkExportSemaphoreCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportSemaphoreCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default; +#else + bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExportSemaphoreCreateInfo; + }; + + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkExportSemaphoreWin32HandleInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportSemaphoreWin32HandleInfoKHR.html + struct ExportSemaphoreWin32HandleInfoKHR + { + using NativeType = VkExportSemaphoreWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } + , name{ name_ } + { + } + + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExportSemaphoreWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ExportSemaphoreWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + // wrapper struct for struct VkExtensionProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtensionProperties.html + struct ExtensionProperties + { + using NativeType = VkExtensionProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array const & extensionName_ = {}, + uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : extensionName{ extensionName_ } + , specVersion{ specVersion_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExtensionProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtensionProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExtensionProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( extensionName, specVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = strcmp( extensionName, rhs.extensionName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( strcmp( extensionName, rhs.extensionName ) == 0 ) && ( specVersion == rhs.specVersion ); + } + + bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; + uint32_t specVersion = {}; + }; + + // wrapper struct for struct VkExternalMemoryProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryProperties.html + struct ExternalMemoryProperties + { + using NativeType = VkExternalMemoryProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryFeatures{ externalMemoryFeatures_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalMemoryProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryProperties const & ) const = default; +#else + bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); +# endif + } + + bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; + }; + + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + + // wrapper struct for struct VkExternalBufferProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalBufferProperties.html + struct ExternalBufferProperties + { + using NativeType = VkExternalBufferProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalMemoryProperties{ externalMemoryProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalBufferProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalBufferProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalBufferProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalMemoryProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalBufferProperties const & ) const = default; +#else + bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); +# endif + } + + bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + + template <> + struct CppType + { + using Type = ExternalBufferProperties; + }; + + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + + // wrapper struct for struct VkExternalComputeQueueCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueCreateInfoNV.html + struct ExternalComputeQueueCreateInfoNV + { + using NativeType = VkExternalComputeQueueCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalComputeQueueCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalComputeQueueCreateInfoNV( VULKAN_HPP_NAMESPACE::Queue preferredQueue_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , preferredQueue{ preferredQueue_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalComputeQueueCreateInfoNV( ExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalComputeQueueCreateInfoNV( VkExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalComputeQueueCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ExternalComputeQueueCreateInfoNV & operator=( ExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalComputeQueueCreateInfoNV & operator=( VkExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueCreateInfoNV & setPreferredQueue( VULKAN_HPP_NAMESPACE::Queue preferredQueue_ ) VULKAN_HPP_NOEXCEPT + { + preferredQueue = preferredQueue_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExternalComputeQueueCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalComputeQueueCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalComputeQueueCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalComputeQueueCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, preferredQueue ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalComputeQueueCreateInfoNV const & ) const = default; +#else + bool operator==( ExternalComputeQueueCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredQueue == rhs.preferredQueue ); +# endif + } + + bool operator!=( ExternalComputeQueueCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalComputeQueueCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Queue preferredQueue = {}; + }; + + template <> + struct CppType + { + using Type = ExternalComputeQueueCreateInfoNV; + }; + + // wrapper struct for struct VkExternalComputeQueueDataParamsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueDataParamsNV.html + struct ExternalComputeQueueDataParamsNV + { + using NativeType = VkExternalComputeQueueDataParamsNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalComputeQueueDataParamsNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalComputeQueueDataParamsNV( uint32_t deviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceIndex{ deviceIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalComputeQueueDataParamsNV( ExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalComputeQueueDataParamsNV( VkExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalComputeQueueDataParamsNV( *reinterpret_cast( &rhs ) ) + { + } + + ExternalComputeQueueDataParamsNV & operator=( ExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalComputeQueueDataParamsNV & operator=( VkExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDataParamsNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDataParamsNV & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndex = deviceIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExternalComputeQueueDataParamsNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalComputeQueueDataParamsNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalComputeQueueDataParamsNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalComputeQueueDataParamsNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalComputeQueueDataParamsNV const & ) const = default; +#else + bool operator==( ExternalComputeQueueDataParamsNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndex == rhs.deviceIndex ); +# endif + } + + bool operator!=( ExternalComputeQueueDataParamsNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalComputeQueueDataParamsNV; + const void * pNext = {}; + uint32_t deviceIndex = {}; + }; + + template <> + struct CppType + { + using Type = ExternalComputeQueueDataParamsNV; + }; + + // wrapper struct for struct VkExternalComputeQueueDeviceCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueDeviceCreateInfoNV.html + struct ExternalComputeQueueDeviceCreateInfoNV + { + using NativeType = VkExternalComputeQueueDeviceCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalComputeQueueDeviceCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalComputeQueueDeviceCreateInfoNV( uint32_t reservedExternalQueues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , reservedExternalQueues{ reservedExternalQueues_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalComputeQueueDeviceCreateInfoNV( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalComputeQueueDeviceCreateInfoNV( VkExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalComputeQueueDeviceCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ExternalComputeQueueDeviceCreateInfoNV & operator=( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalComputeQueueDeviceCreateInfoNV & operator=( VkExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDeviceCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDeviceCreateInfoNV & setReservedExternalQueues( uint32_t reservedExternalQueues_ ) VULKAN_HPP_NOEXCEPT + { + reservedExternalQueues = reservedExternalQueues_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExternalComputeQueueDeviceCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalComputeQueueDeviceCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalComputeQueueDeviceCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalComputeQueueDeviceCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, reservedExternalQueues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalComputeQueueDeviceCreateInfoNV const & ) const = default; +#else + bool operator==( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reservedExternalQueues == rhs.reservedExternalQueues ); +# endif + } + + bool operator!=( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalComputeQueueDeviceCreateInfoNV; + const void * pNext = {}; + uint32_t reservedExternalQueues = {}; + }; + + template <> + struct CppType + { + using Type = ExternalComputeQueueDeviceCreateInfoNV; + }; + + // wrapper struct for struct VkExternalFenceProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFenceProperties.html + struct ExternalFenceProperties + { + using NativeType = VkExternalFenceProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } + , externalFenceFeatures{ externalFenceFeatures_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFenceProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFenceProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalFenceProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFenceProperties const & ) const = default; +#else + bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalFenceFeatures == rhs.externalFenceFeatures ); +# endif + } + + bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; + }; + + template <> + struct CppType + { + using Type = ExternalFenceProperties; + }; + + using ExternalFencePropertiesKHR = ExternalFenceProperties; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkExternalFormatANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFormatANDROID.html + struct ExternalFormatANDROID + { + using NativeType = VkExternalFormatANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalFormat{ externalFormat_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFormatANDROID( *reinterpret_cast( &rhs ) ) + { + } + + ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT + { + externalFormat = externalFormat_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFormatANDROID const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalFormatANDROID *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalFormat ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFormatANDROID const & ) const = default; +# else + bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); +# endif + } + + bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; + void * pNext = {}; + uint64_t externalFormat = {}; + }; + + template <> + struct CppType + { + using Type = ExternalFormatANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + // wrapper struct for struct VkExternalFormatQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFormatQNX.html + struct ExternalFormatQNX + { + using NativeType = VkExternalFormatQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatQNX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFormatQNX( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalFormat{ externalFormat_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalFormatQNX( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatQNX( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalFormatQNX( *reinterpret_cast( &rhs ) ) {} + + ExternalFormatQNX & operator=( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalFormatQNX & operator=( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT + { + externalFormat = externalFormat_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExternalFormatQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFormatQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFormatQNX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalFormatQNX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalFormat ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFormatQNX const & ) const = default; +# else + bool operator==( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); +# endif + } + + bool operator!=( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatQNX; + void * pNext = {}; + uint64_t externalFormat = {}; + }; + + template <> + struct CppType + { + using Type = ExternalFormatQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + // wrapper struct for struct VkExternalImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalImageFormatProperties.html + struct ExternalImageFormatProperties + { + using NativeType = VkExternalImageFormatProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalMemoryProperties{ externalMemoryProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalImageFormatProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalMemoryProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatProperties const & ) const = default; +#else + bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); +# endif + } + + bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + + template <> + struct CppType + { + using Type = ExternalImageFormatProperties; + }; + + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + + // wrapper struct for struct VkImageFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatProperties.html + struct ImageFormatProperties + { + using NativeType = VkImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, + uint32_t maxMipLevels_ = {}, + uint32_t maxArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxExtent{ maxExtent_ } + , maxMipLevels{ maxMipLevels_ } + , maxArrayLayers{ maxArrayLayers_ } + , sampleCounts{ sampleCounts_ } + , maxResourceSize{ maxResourceSize_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties( *reinterpret_cast( &rhs ) ) + { + } + + ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageFormatProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties const & ) const = default; +#else + bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && ( maxArrayLayers == rhs.maxArrayLayers ) && + ( sampleCounts == rhs.sampleCounts ) && ( maxResourceSize == rhs.maxResourceSize ); +# endif + } + + bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; + uint32_t maxMipLevels = {}; + uint32_t maxArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; + }; + + // wrapper struct for struct VkExternalImageFormatPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalImageFormatPropertiesNV.html + struct ExternalImageFormatPropertiesNV + { + using NativeType = VkExternalImageFormatPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties{ imageFormatProperties_ } + , externalMemoryFeatures{ externalMemoryFeatures_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalImageFormatPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default; +#else + bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( imageFormatProperties == rhs.imageFormatProperties ) && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); +# endif + } + + bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; + }; + + // wrapper struct for struct VkExternalMemoryAcquireUnmodifiedEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryAcquireUnmodifiedEXT.html + struct ExternalMemoryAcquireUnmodifiedEXT + { + using NativeType = VkExternalMemoryAcquireUnmodifiedEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryAcquireUnmodifiedEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , acquireUnmodifiedMemory{ acquireUnmodifiedMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryAcquireUnmodifiedEXT( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryAcquireUnmodifiedEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryAcquireUnmodifiedEXT & operator=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalMemoryAcquireUnmodifiedEXT & operator=( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & + setAcquireUnmodifiedMemory( VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory_ ) VULKAN_HPP_NOEXCEPT + { + acquireUnmodifiedMemory = acquireUnmodifiedMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExternalMemoryAcquireUnmodifiedEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryAcquireUnmodifiedEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryAcquireUnmodifiedEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalMemoryAcquireUnmodifiedEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, acquireUnmodifiedMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryAcquireUnmodifiedEXT const & ) const = default; +#else + bool operator==( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireUnmodifiedMemory == rhs.acquireUnmodifiedMemory ); +# endif + } + + bool operator!=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryAcquireUnmodifiedEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory = {}; + }; + + template <> + struct CppType + { + using Type = ExternalMemoryAcquireUnmodifiedEXT; + }; + + // wrapper struct for struct VkExternalMemoryBufferCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryBufferCreateInfo.html + struct ExternalMemoryBufferCreateInfo + { + using NativeType = VkExternalMemoryBufferCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryBufferCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryBufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalMemoryBufferCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExternalMemoryBufferCreateInfo; + }; + + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + + // wrapper struct for struct VkExternalMemoryImageCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryImageCreateInfo.html + struct ExternalMemoryImageCreateInfo + { + using NativeType = VkExternalMemoryImageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfo; + }; + + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + + // wrapper struct for struct VkExternalMemoryImageCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryImageCreateInfoNV.html + struct ExternalMemoryImageCreateInfoNV + { + using NativeType = VkExternalMemoryImageCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfoNV; + }; + + // wrapper struct for struct VkExternalSemaphoreProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalSemaphoreProperties.html + struct ExternalSemaphoreProperties + { + using NativeType = VkExternalSemaphoreProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } + , externalSemaphoreFeatures{ externalSemaphoreFeatures_ } + { + } + + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalSemaphoreProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalSemaphoreProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkExternalSemaphoreProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalSemaphoreProperties const & ) const = default; +#else + bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); +# endif + } + + bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; + }; + + template <> + struct CppType + { + using Type = ExternalSemaphoreProperties; + }; + + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + + // wrapper struct for struct VkFenceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceCreateInfo.html + struct FenceCreateInfo + { + using NativeType = VkFenceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FenceCreateInfo( *reinterpret_cast( &rhs ) ) {} + + FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFenceCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceCreateInfo const & ) const = default; +#else + bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = FenceCreateInfo; + }; + + // wrapper struct for struct VkFenceGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceGetFdInfoKHR.html + struct FenceGetFdInfoKHR + { + using NativeType = VkFenceGetFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fence{ fence_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : FenceGetFdInfoKHR( *reinterpret_cast( &rhs ) ) {} + + FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFenceGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetFdInfoKHR const & ) const = default; +#else + bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = FenceGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkFenceGetWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceGetWin32HandleInfoKHR.html + struct FenceGetWin32HandleInfoKHR + { + using NativeType = VkFenceGetWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fence{ fence_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFenceGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = FenceGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + // wrapper struct for struct VkFilterCubicImageViewImageFormatPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFilterCubicImageViewImageFormatPropertiesEXT.html + struct FilterCubicImageViewImageFormatPropertiesEXT + { + using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , filterCubic{ filterCubic_ } + , filterCubicMinmax{ filterCubicMinmax_ } + { + } + + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, filterCubic, filterCubicMinmax ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default; +#else + bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && ( filterCubicMinmax == rhs.filterCubicMinmax ); +# endif + } + + bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; + }; + + template <> + struct CppType + { + using Type = FilterCubicImageViewImageFormatPropertiesEXT; + }; + + // wrapper struct for struct VkFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties.html + struct FormatProperties + { + using NativeType = VkFormatProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : linearTilingFeatures{ linearTilingFeatures_ } + , optimalTilingFeatures{ optimalTilingFeatures_ } + , bufferFeatures{ bufferFeatures_ } + { + } + + VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties( *reinterpret_cast( &rhs ) ) {} + + FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFormatProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties const & ) const = default; +#else + bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( linearTilingFeatures == rhs.linearTilingFeatures ) && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && + ( bufferFeatures == rhs.bufferFeatures ); +# endif + } + + bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; + }; + + // wrapper struct for struct VkFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties2.html + struct FormatProperties2 + { + using NativeType = VkFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , formatProperties{ formatProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties2( *reinterpret_cast( &rhs ) ) {} + + FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFormatProperties2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties2 const & ) const = default; +#else + bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties ); +# endif + } + + bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; + }; + + template <> + struct CppType + { + using Type = FormatProperties2; + }; + + using FormatProperties2KHR = FormatProperties2; + + // wrapper struct for struct VkFormatProperties3, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties3.html + struct FormatProperties3 + { + using NativeType = VkFormatProperties3; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties3( VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , linearTilingFeatures{ linearTilingFeatures_ } + , optimalTilingFeatures{ optimalTilingFeatures_ } + , bufferFeatures{ bufferFeatures_ } + { + } + + VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties3( *reinterpret_cast( &rhs ) ) {} + + FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties3 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFormatProperties3 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties3 const & ) const = default; +#else + bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearTilingFeatures == rhs.linearTilingFeatures ) && + ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); +# endif + } + + bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties3; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {}; + }; + + template <> + struct CppType + { + using Type = FormatProperties3; + }; + + using FormatProperties3KHR = FormatProperties3; + + // wrapper struct for struct VkFragmentShadingRateAttachmentInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFragmentShadingRateAttachmentInfoKHR.html + struct FragmentShadingRateAttachmentInfoKHR + { + using NativeType = VkFragmentShadingRateAttachmentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pFragmentShadingRateAttachment{ pFragmentShadingRateAttachment_ } + , shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ } + { + } + + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & + setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & + setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFragmentShadingRateAttachmentInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFragmentShadingRateAttachmentInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default; +#else + bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) && + ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); +# endif + } + + bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; + }; + + template <> + struct CppType + { + using Type = FragmentShadingRateAttachmentInfoKHR; + }; + + // wrapper struct for struct VkFrameBoundaryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFrameBoundaryEXT.html + struct FrameBoundaryEXT + { + using NativeType = VkFrameBoundaryEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFrameBoundaryEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_ = {}, + uint64_t frameID_ = {}, + uint32_t imageCount_ = {}, + const VULKAN_HPP_NAMESPACE::Image * pImages_ = {}, + uint32_t bufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , frameID{ frameID_ } + , imageCount{ imageCount_ } + , pImages{ pImages_ } + , bufferCount{ bufferCount_ } + , pBuffers{ pBuffers_ } + , tagName{ tagName_ } + , tagSize{ tagSize_ } + , pTag{ pTag_ } + { + } + + VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FrameBoundaryEXT( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT : FrameBoundaryEXT( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + FrameBoundaryEXT( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_, + uint64_t frameID_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & images_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & buffers_ = {}, + uint64_t tagName_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , frameID( frameID_ ) + , imageCount( static_cast( images_.size() ) ) + , pImages( images_.data() ) + , bufferCount( static_cast( buffers_.size() ) ) + , pBuffers( buffers_.data() ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + FrameBoundaryEXT & operator=( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FrameBoundaryEXT & operator=( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFlags( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFrameID( uint64_t frameID_ ) VULKAN_HPP_NOEXCEPT + { + frameID = frameID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setImageCount( uint32_t imageCount_ ) VULKAN_HPP_NOEXCEPT + { + imageCount = imageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPImages( const VULKAN_HPP_NAMESPACE::Image * pImages_ ) VULKAN_HPP_NOEXCEPT + { + pImages = pImages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FrameBoundaryEXT & setImages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & images_ ) VULKAN_HPP_NOEXCEPT + { + imageCount = static_cast( images_.size() ); + pImages = images_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferCount = bufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPBuffers( const VULKAN_HPP_NAMESPACE::Buffer * pBuffers_ ) VULKAN_HPP_NOEXCEPT + { + pBuffers = pBuffers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FrameBoundaryEXT & setBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & buffers_ ) VULKAN_HPP_NOEXCEPT + { + bufferCount = static_cast( buffers_.size() ); + pBuffers = buffers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + FrameBoundaryEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkFrameBoundaryEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFrameBoundaryEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFrameBoundaryEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFrameBoundaryEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, frameID, imageCount, pImages, bufferCount, pBuffers, tagName, tagSize, pTag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FrameBoundaryEXT const & ) const = default; +#else + bool operator==( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( frameID == rhs.frameID ) && ( imageCount == rhs.imageCount ) && + ( pImages == rhs.pImages ) && ( bufferCount == rhs.bufferCount ) && ( pBuffers == rhs.pBuffers ) && ( tagName == rhs.tagName ) && + ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); +# endif + } + + bool operator!=( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFrameBoundaryEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags = {}; + uint64_t frameID = {}; + uint32_t imageCount = {}; + const VULKAN_HPP_NAMESPACE::Image * pImages = {}; + uint32_t bufferCount = {}; + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; + }; + + template <> + struct CppType + { + using Type = FrameBoundaryEXT; + }; + + // wrapper struct for struct VkFramebufferAttachmentImageInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferAttachmentImageInfo.html + struct FramebufferAttachmentImageInfo + { + using NativeType = VkFramebufferAttachmentImageInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , usage{ usage_ } + , width{ width_ } + , height{ height_ } + , layerCount{ layerCount_ } + , viewFormatCount{ viewFormatCount_ } + , pViewFormats{ pViewFormats_ } + { + } + + VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentImageInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + uint32_t width_, + uint32_t height_, + uint32_t layerCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( static_cast( viewFormats_.size() ) ) + , pViewFormats( viewFormats_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo & + setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentImageInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentImageInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( width == rhs.width ) && + ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); +# endif + } + + bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layerCount = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + }; + + template <> + struct CppType + { + using Type = FramebufferAttachmentImageInfo; + }; + + using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; + + // wrapper struct for struct VkFramebufferAttachmentsCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferAttachmentsCreateInfo.html + struct FramebufferAttachmentsCreateInfo + { + using NativeType = VkFramebufferAttachmentsCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentImageInfoCount{ attachmentImageInfoCount_ } + , pAttachmentImageInfos{ pAttachmentImageInfos_ } + { + } + + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentsCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentImageInfos_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , attachmentImageInfoCount( static_cast( attachmentImageInfos_.size() ) ) + , pAttachmentImageInfos( attachmentImageInfos_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = attachmentImageInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & + setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + pAttachmentImageInfos = pAttachmentImageInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentImageInfos_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = static_cast( attachmentImageInfos_.size() ); + pAttachmentImageInfos = attachmentImageInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentsCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentsCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) && + ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); +# endif + } + + bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; + const void * pNext = {}; + uint32_t attachmentImageInfoCount = {}; + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {}; + }; + + template <> + struct CppType + { + using Type = FramebufferAttachmentsCreateInfo; + }; + + using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; + + // wrapper struct for struct VkFramebufferCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferCreateInfo.html + struct FramebufferCreateInfo + { + using NativeType = VkFramebufferCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , renderPass{ renderPass_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , width{ width_ } + , height{ height_ } + , layers{ layers_ } + { + } + + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferCreateInfo & + setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT + { + layers = layers_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFramebufferCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferCreateInfo const & ) const = default; +#else + bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderPass == rhs.renderPass ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( layers == rhs.layers ); +# endif + } + + bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layers = {}; + }; + + template <> + struct CppType + { + using Type = FramebufferCreateInfo; + }; + + // wrapper struct for struct VkFramebufferMixedSamplesCombinationNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferMixedSamplesCombinationNV.html + struct FramebufferMixedSamplesCombinationNV + { + using NativeType = VkFramebufferMixedSamplesCombinationNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , coverageReductionMode{ coverageReductionMode_ } + , rasterizationSamples{ rasterizationSamples_ } + , depthStencilSamples{ depthStencilSamples_ } + , colorSamples{ colorSamples_ } + { + } + + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferMixedSamplesCombinationNV( *reinterpret_cast( &rhs ) ) + { + } + + FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferMixedSamplesCombinationNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkFramebufferMixedSamplesCombinationNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default; +#else + bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples ); +# endif + } + + bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; + }; + + template <> + struct CppType + { + using Type = FramebufferMixedSamplesCombinationNV; + }; + + // wrapper struct for struct VkGeneratedCommandsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsInfoEXT.html + struct GeneratedCommandsInfoEXT + { + using NativeType = VkGeneratedCommandsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {}, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + uint32_t maxSequenceCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress_ = {}, + uint32_t maxDrawCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderStages{ shaderStages_ } + , indirectExecutionSet{ indirectExecutionSet_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , indirectAddress{ indirectAddress_ } + , indirectAddressSize{ indirectAddressSize_ } + , preprocessAddress{ preprocessAddress_ } + , preprocessSize{ preprocessSize_ } + , maxSequenceCount{ maxSequenceCount_ } + , sequenceCountAddress{ sequenceCountAddress_ } + , maxDrawCount{ maxDrawCount_ } + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsInfoEXT( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + GeneratedCommandsInfoEXT & operator=( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeneratedCommandsInfoEXT & operator=( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT + { + shaderStages = shaderStages_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & + setIndirectExecutionSet( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ ) VULKAN_HPP_NOEXCEPT + { + indirectExecutionSet = indirectExecutionSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddress( VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress_ ) VULKAN_HPP_NOEXCEPT + { + indirectAddress = indirectAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddressSize( VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize_ ) VULKAN_HPP_NOEXCEPT + { + indirectAddressSize = indirectAddressSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessAddress( VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress_ ) VULKAN_HPP_NOEXCEPT + { + preprocessAddress = preprocessAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT + { + preprocessSize = preprocessSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequenceCount = maxSequenceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setSequenceCountAddress( VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress_ ) VULKAN_HPP_NOEXCEPT + { + sequenceCountAddress = sequenceCountAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) VULKAN_HPP_NOEXCEPT + { + maxDrawCount = maxDrawCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeneratedCommandsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderStages, + indirectExecutionSet, + indirectCommandsLayout, + indirectAddress, + indirectAddressSize, + preprocessAddress, + preprocessSize, + maxSequenceCount, + sequenceCountAddress, + maxDrawCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsInfoEXT const & ) const = default; +#else + bool operator==( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderStages == rhs.shaderStages ) && ( indirectExecutionSet == rhs.indirectExecutionSet ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( indirectAddress == rhs.indirectAddress ) && + ( indirectAddressSize == rhs.indirectAddressSize ) && ( preprocessAddress == rhs.preprocessAddress ) && ( preprocessSize == rhs.preprocessSize ) && + ( maxSequenceCount == rhs.maxSequenceCount ) && ( sequenceCountAddress == rhs.sequenceCountAddress ) && ( maxDrawCount == rhs.maxDrawCount ); +# endif + } + + bool operator!=( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + uint32_t maxSequenceCount = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress = {}; + uint32_t maxDrawCount = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsInfoEXT; + }; + + // wrapper struct for struct VkIndirectCommandsStreamNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsStreamNV.html + struct IndirectCommandsStreamNV + { + using NativeType = VkIndirectCommandsStreamNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer{ buffer_ } + , offset{ offset_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsStreamNV( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsStreamNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectCommandsStreamNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( buffer, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsStreamNV const & ) const = default; +#else + bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ); +# endif + } + + bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + + // wrapper struct for struct VkGeneratedCommandsInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsInfoNV.html + struct GeneratedCommandsInfoNV + { + using NativeType = VkGeneratedCommandsInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t streamCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipeline{ pipeline_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , streamCount{ streamCount_ } + , pStreams{ pStreams_ } + , sequencesCount{ sequencesCount_ } + , preprocessBuffer{ preprocessBuffer_ } + , preprocessOffset{ preprocessOffset_ } + , preprocessSize{ preprocessSize_ } + , sequencesCountBuffer{ sequencesCountBuffer_ } + , sequencesCountOffset{ sequencesCountOffset_ } + , sequencesIndexBuffer{ sequencesIndexBuffer_ } + , sequencesIndexOffset{ sequencesIndexOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streams_, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( static_cast( streams_.size() ) ) + , pStreams( streams_.data() ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT + { + pStreams = pStreams_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV & + setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streams_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streams_.size() ); + pStreams = streams_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCount = sequencesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT + { + preprocessBuffer = preprocessBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT + { + preprocessOffset = preprocessOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT + { + preprocessSize = preprocessSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountBuffer = sequencesCountBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountOffset = sequencesCountOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexBuffer = sequencesIndexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexOffset = sequencesIndexOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + pipelineBindPoint, + pipeline, + indirectCommandsLayout, + streamCount, + pStreams, + sequencesCount, + preprocessBuffer, + preprocessOffset, + preprocessSize, + sequencesCountBuffer, + sequencesCountOffset, + sequencesIndexBuffer, + sequencesIndexOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsInfoNV const & ) const = default; +#else + bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) && + ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && ( preprocessOffset == rhs.preprocessOffset ) && + ( preprocessSize == rhs.preprocessSize ) && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) && + ( sequencesCountOffset == rhs.sequencesCountOffset ) && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) && + ( sequencesIndexOffset == rhs.sequencesIndexOffset ); +# endif + } + + bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t streamCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {}; + uint32_t sequencesCount = {}; + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsInfoNV; + }; + + // wrapper struct for struct VkGeneratedCommandsMemoryRequirementsInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsMemoryRequirementsInfoEXT.html + struct GeneratedCommandsMemoryRequirementsInfoEXT + { + using NativeType = VkGeneratedCommandsMemoryRequirementsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, + uint32_t maxSequenceCount_ = {}, + uint32_t maxDrawCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , indirectExecutionSet{ indirectExecutionSet_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , maxSequenceCount{ maxSequenceCount_ } + , maxDrawCount{ maxDrawCount_ } + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoEXT( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsMemoryRequirementsInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + GeneratedCommandsMemoryRequirementsInfoEXT & operator=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeneratedCommandsMemoryRequirementsInfoEXT & operator=( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & + setIndirectExecutionSet( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ ) VULKAN_HPP_NOEXCEPT + { + indirectExecutionSet = indirectExecutionSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequenceCount = maxSequenceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) VULKAN_HPP_NOEXCEPT + { + maxDrawCount = maxDrawCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeneratedCommandsMemoryRequirementsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indirectExecutionSet, indirectCommandsLayout, maxSequenceCount, maxDrawCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoEXT const & ) const = default; +#else + bool operator==( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectExecutionSet == rhs.indirectExecutionSet ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequenceCount == rhs.maxSequenceCount ) && ( maxDrawCount == rhs.maxDrawCount ); +# endif + } + + bool operator!=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout = {}; + uint32_t maxSequenceCount = {}; + uint32_t maxDrawCount = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsMemoryRequirementsInfoEXT; + }; + + // wrapper struct for struct VkGeneratedCommandsMemoryRequirementsInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsMemoryRequirementsInfoNV.html + struct GeneratedCommandsMemoryRequirementsInfoNV + { + using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t maxSequencesCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipeline{ pipeline_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , maxSequencesCount{ maxSequencesCount_ } + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequencesCount = maxSequencesCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default; +#else + bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequencesCount == rhs.maxSequencesCount ); +# endif + } + + bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t maxSequencesCount = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsMemoryRequirementsInfoNV; + }; + + // wrapper struct for struct VkGeneratedCommandsPipelineInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsPipelineInfoEXT.html + struct GeneratedCommandsPipelineInfoEXT + { + using NativeType = VkGeneratedCommandsPipelineInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsPipelineInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipeline{ pipeline_ } + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsPipelineInfoEXT( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsPipelineInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + GeneratedCommandsPipelineInfoEXT & operator=( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeneratedCommandsPipelineInfoEXT & operator=( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeneratedCommandsPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsPipelineInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGeneratedCommandsPipelineInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsPipelineInfoEXT const & ) const = default; +#else + bool operator==( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsPipelineInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsPipelineInfoEXT; + }; + + // wrapper struct for struct VkGeneratedCommandsShaderInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsShaderInfoEXT.html + struct GeneratedCommandsShaderInfoEXT + { + using NativeType = VkGeneratedCommandsShaderInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsShaderInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT( uint32_t shaderCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCount{ shaderCount_ } + , pShaders{ pShaders_ } + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsShaderInfoEXT( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsShaderInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsShaderInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shaders_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), shaderCount( static_cast( shaders_.size() ) ), pShaders( shaders_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GeneratedCommandsShaderInfoEXT & operator=( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeneratedCommandsShaderInfoEXT & operator=( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT + { + shaderCount = shaderCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders_ ) VULKAN_HPP_NOEXCEPT + { + pShaders = pShaders_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsShaderInfoEXT & + setShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shaders_ ) VULKAN_HPP_NOEXCEPT + { + shaderCount = static_cast( shaders_.size() ); + pShaders = shaders_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeneratedCommandsShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsShaderInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsShaderInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGeneratedCommandsShaderInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCount, pShaders ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsShaderInfoEXT const & ) const = default; +#else + bool operator==( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pShaders == rhs.pShaders ); +# endif + } + + bool operator!=( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsShaderInfoEXT; + void * pNext = {}; + uint32_t shaderCount = {}; + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsShaderInfoEXT; + }; + + // wrapper struct for struct VkLatencyTimingsFrameReportNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencyTimingsFrameReportNV.html + struct LatencyTimingsFrameReportNV + { + using NativeType = VkLatencyTimingsFrameReportNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencyTimingsFrameReportNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( uint64_t presentID_ = {}, + uint64_t inputSampleTimeUs_ = {}, + uint64_t simStartTimeUs_ = {}, + uint64_t simEndTimeUs_ = {}, + uint64_t renderSubmitStartTimeUs_ = {}, + uint64_t renderSubmitEndTimeUs_ = {}, + uint64_t presentStartTimeUs_ = {}, + uint64_t presentEndTimeUs_ = {}, + uint64_t driverStartTimeUs_ = {}, + uint64_t driverEndTimeUs_ = {}, + uint64_t osRenderQueueStartTimeUs_ = {}, + uint64_t osRenderQueueEndTimeUs_ = {}, + uint64_t gpuRenderStartTimeUs_ = {}, + uint64_t gpuRenderEndTimeUs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentID{ presentID_ } + , inputSampleTimeUs{ inputSampleTimeUs_ } + , simStartTimeUs{ simStartTimeUs_ } + , simEndTimeUs{ simEndTimeUs_ } + , renderSubmitStartTimeUs{ renderSubmitStartTimeUs_ } + , renderSubmitEndTimeUs{ renderSubmitEndTimeUs_ } + , presentStartTimeUs{ presentStartTimeUs_ } + , presentEndTimeUs{ presentEndTimeUs_ } + , driverStartTimeUs{ driverStartTimeUs_ } + , driverEndTimeUs{ driverEndTimeUs_ } + , osRenderQueueStartTimeUs{ osRenderQueueStartTimeUs_ } + , osRenderQueueEndTimeUs{ osRenderQueueEndTimeUs_ } + , gpuRenderStartTimeUs{ gpuRenderStartTimeUs_ } + , gpuRenderEndTimeUs{ gpuRenderEndTimeUs_ } + { + } + + VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencyTimingsFrameReportNV( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencyTimingsFrameReportNV( *reinterpret_cast( &rhs ) ) + { + } + + LatencyTimingsFrameReportNV & operator=( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + LatencyTimingsFrameReportNV & operator=( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkLatencyTimingsFrameReportNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencyTimingsFrameReportNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencyTimingsFrameReportNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkLatencyTimingsFrameReportNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + presentID, + inputSampleTimeUs, + simStartTimeUs, + simEndTimeUs, + renderSubmitStartTimeUs, + renderSubmitEndTimeUs, + presentStartTimeUs, + presentEndTimeUs, + driverStartTimeUs, + driverEndTimeUs, + osRenderQueueStartTimeUs, + osRenderQueueEndTimeUs, + gpuRenderStartTimeUs, + gpuRenderEndTimeUs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencyTimingsFrameReportNV const & ) const = default; +#else + bool operator==( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( inputSampleTimeUs == rhs.inputSampleTimeUs ) && + ( simStartTimeUs == rhs.simStartTimeUs ) && ( simEndTimeUs == rhs.simEndTimeUs ) && ( renderSubmitStartTimeUs == rhs.renderSubmitStartTimeUs ) && + ( renderSubmitEndTimeUs == rhs.renderSubmitEndTimeUs ) && ( presentStartTimeUs == rhs.presentStartTimeUs ) && + ( presentEndTimeUs == rhs.presentEndTimeUs ) && ( driverStartTimeUs == rhs.driverStartTimeUs ) && ( driverEndTimeUs == rhs.driverEndTimeUs ) && + ( osRenderQueueStartTimeUs == rhs.osRenderQueueStartTimeUs ) && ( osRenderQueueEndTimeUs == rhs.osRenderQueueEndTimeUs ) && + ( gpuRenderStartTimeUs == rhs.gpuRenderStartTimeUs ) && ( gpuRenderEndTimeUs == rhs.gpuRenderEndTimeUs ); +# endif + } + + bool operator!=( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencyTimingsFrameReportNV; + const void * pNext = {}; + uint64_t presentID = {}; + uint64_t inputSampleTimeUs = {}; + uint64_t simStartTimeUs = {}; + uint64_t simEndTimeUs = {}; + uint64_t renderSubmitStartTimeUs = {}; + uint64_t renderSubmitEndTimeUs = {}; + uint64_t presentStartTimeUs = {}; + uint64_t presentEndTimeUs = {}; + uint64_t driverStartTimeUs = {}; + uint64_t driverEndTimeUs = {}; + uint64_t osRenderQueueStartTimeUs = {}; + uint64_t osRenderQueueEndTimeUs = {}; + uint64_t gpuRenderStartTimeUs = {}; + uint64_t gpuRenderEndTimeUs = {}; + }; + + template <> + struct CppType + { + using Type = LatencyTimingsFrameReportNV; + }; + + // wrapper struct for struct VkGetLatencyMarkerInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGetLatencyMarkerInfoNV.html + struct GetLatencyMarkerInfoNV + { + using NativeType = VkGetLatencyMarkerInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGetLatencyMarkerInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( uint32_t timingCount_ = {}, + VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , timingCount{ timingCount_ } + , pTimings{ pTimings_ } + { + } + + VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GetLatencyMarkerInfoNV( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GetLatencyMarkerInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GetLatencyMarkerInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & timings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), timingCount( static_cast( timings_.size() ) ), pTimings( timings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GetLatencyMarkerInfoNV & operator=( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GetLatencyMarkerInfoNV & operator=( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setTimingCount( uint32_t timingCount_ ) VULKAN_HPP_NOEXCEPT + { + timingCount = timingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPTimings( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ ) VULKAN_HPP_NOEXCEPT + { + pTimings = pTimings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GetLatencyMarkerInfoNV & + setTimings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & timings_ ) VULKAN_HPP_NOEXCEPT + { + timingCount = static_cast( timings_.size() ); + pTimings = timings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGetLatencyMarkerInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGetLatencyMarkerInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, timingCount, pTimings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GetLatencyMarkerInfoNV const & ) const = default; +#else + bool operator==( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timingCount == rhs.timingCount ) && ( pTimings == rhs.pTimings ); +# endif + } + + bool operator!=( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGetLatencyMarkerInfoNV; + const void * pNext = {}; + uint32_t timingCount = {}; + VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings = {}; + }; + + template <> + struct CppType + { + using Type = GetLatencyMarkerInfoNV; + }; + + // wrapper struct for struct VkVertexInputBindingDescription, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputBindingDescription.html + struct VertexInputBindingDescription + { + using NativeType = VkVertexInputBindingDescription; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT + : binding{ binding_ } + , stride{ stride_ } + , inputRate{ inputRate_ } + { + } + + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binding, stride, inputRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription const & ) const = default; +#else + bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ); +# endif + } + + bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + }; + + // wrapper struct for struct VkVertexInputAttributeDescription, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputAttributeDescription.html + struct VertexInputAttributeDescription + { + using NativeType = VkVertexInputAttributeDescription; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT + : location{ location_ } + , binding{ binding_ } + , format{ format_ } + , offset{ offset_ } + { + } + + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( location, binding, format, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription const & ) const = default; +#else + bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset ); +# endif + } + + bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + }; + + // wrapper struct for struct VkPipelineVertexInputStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineVertexInputStateCreateInfo.html + struct PipelineVertexInputStateCreateInfo + { + using NativeType = VkPipelineVertexInputStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, + uint32_t vertexBindingDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, + uint32_t vertexAttributeDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , vertexBindingDescriptionCount{ vertexBindingDescriptionCount_ } + , pVertexBindingDescriptions{ pVertexBindingDescriptions_ } + , vertexAttributeDescriptionCount{ vertexAttributeDescriptionCount_ } + , pVertexAttributeDescriptions{ pVertexAttributeDescriptions_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineVertexInputStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDescriptions_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , vertexBindingDescriptionCount( static_cast( vertexBindingDescriptions_.size() ) ) + , pVertexBindingDescriptions( vertexBindingDescriptions_.data() ) + , vertexAttributeDescriptionCount( static_cast( vertexAttributeDescriptions_.size() ) ) + , pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = vertexBindingDescriptionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBindingDescriptions = pVertexBindingDescriptions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDescriptions_ ) + VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = static_cast( vertexBindingDescriptions_.size() ); + pVertexBindingDescriptions = vertexBindingDescriptions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ ) + VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = static_cast( vertexAttributeDescriptions_.size() ); + pVertexAttributeDescriptions = vertexAttributeDescriptions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineVertexInputStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) && + ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); +# endif + } + + bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; + uint32_t vertexBindingDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {}; + uint32_t vertexAttributeDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {}; + }; + + template <> + struct CppType + { + using Type = PipelineVertexInputStateCreateInfo; + }; + + // wrapper struct for struct VkPipelineInputAssemblyStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineInputAssemblyStateCreateInfo.html + struct PipelineInputAssemblyStateCreateInfo + { + using NativeType = VkPipelineInputAssemblyStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , topology{ topology_ } + , primitiveRestartEnable{ primitiveRestartEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT + { + topology = topology_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT + { + primitiveRestartEnable = primitiveRestartEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInputAssemblyStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineInputAssemblyStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, topology, primitiveRestartEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( topology == rhs.topology ) && + ( primitiveRestartEnable == rhs.primitiveRestartEnable ); +# endif + } + + bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; + }; + + template <> + struct CppType + { + using Type = PipelineInputAssemblyStateCreateInfo; + }; + + // wrapper struct for struct VkPipelineTessellationStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineTessellationStateCreateInfo.html + struct PipelineTessellationStateCreateInfo + { + using NativeType = VkPipelineTessellationStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, + uint32_t patchControlPoints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , patchControlPoints{ patchControlPoints_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineTessellationStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + patchControlPoints = patchControlPoints_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineTessellationStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, patchControlPoints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( patchControlPoints == rhs.patchControlPoints ); +# endif + } + + bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; + uint32_t patchControlPoints = {}; + }; + + template <> + struct CppType + { + using Type = PipelineTessellationStateCreateInfo; + }; + + // wrapper struct for struct VkPipelineViewportStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportStateCreateInfo.html + struct PipelineViewportStateCreateInfo + { + using NativeType = VkPipelineViewportStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, + uint32_t scissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , viewportCount{ viewportCount_ } + , pViewports{ pViewports_ } + , scissorCount{ scissorCount_ } + , pScissors{ pScissors_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , viewportCount( static_cast( viewports_.size() ) ) + , pViewports( viewports_.data() ) + , scissorCount( static_cast( scissors_.size() ) ) + , pScissors( scissors_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT + { + pViewports = pViewports_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & + setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewports_.size() ); + pViewports = viewports_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT + { + scissorCount = scissorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT + { + pScissors = pScissors_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & + setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ ) VULKAN_HPP_NOEXCEPT + { + scissorCount = static_cast( scissors_.size() ); + pScissors = scissors_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineViewportStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) && + ( pViewports == rhs.pViewports ) && ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors ); +# endif + } + + bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {}; + uint32_t scissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportStateCreateInfo; + }; + + // wrapper struct for struct VkPipelineRasterizationStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateCreateInfo.html + struct PipelineRasterizationStateCreateInfo + { + using NativeType = VkPipelineRasterizationStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, + VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, + float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + float lineWidth_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , depthClampEnable{ depthClampEnable_ } + , rasterizerDiscardEnable{ rasterizerDiscardEnable_ } + , polygonMode{ polygonMode_ } + , cullMode{ cullMode_ } + , frontFace{ frontFace_ } + , depthBiasEnable{ depthBiasEnable_ } + , depthBiasConstantFactor{ depthBiasConstantFactor_ } + , depthBiasClamp{ depthBiasClamp_ } + , depthBiasSlopeFactor{ depthBiasSlopeFactor_ } + , lineWidth{ lineWidth_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClampEnable = depthClampEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT + { + rasterizerDiscardEnable = rasterizerDiscardEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT + { + polygonMode = polygonMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT + { + cullMode = cullMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT + { + frontFace = frontFace_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasEnable = depthBiasEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasConstantFactor = depthBiasConstantFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasSlopeFactor = depthBiasSlopeFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT + { + lineWidth = lineWidth_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + depthClampEnable, + rasterizerDiscardEnable, + polygonMode, + cullMode, + frontFace, + depthBiasEnable, + depthBiasConstantFactor, + depthBiasClamp, + depthBiasSlopeFactor, + lineWidth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClampEnable == rhs.depthClampEnable ) && + ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && ( cullMode == rhs.cullMode ) && + ( frontFace == rhs.frontFace ) && ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && ( lineWidth == rhs.lineWidth ); +# endif + } + + bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; + VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; + float lineWidth = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationStateCreateInfo; + }; + + // wrapper struct for struct VkPipelineMultisampleStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineMultisampleStateCreateInfo.html + struct PipelineMultisampleStateCreateInfo + { + using NativeType = VkPipelineMultisampleStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, + float minSampleShading_ = {}, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , rasterizationSamples{ rasterizationSamples_ } + , sampleShadingEnable{ sampleShadingEnable_ } + , minSampleShading{ minSampleShading_ } + , pSampleMask{ pSampleMask_ } + , alphaToCoverageEnable{ alphaToCoverageEnable_ } + , alphaToOneEnable{ alphaToOneEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineMultisampleStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT + { + sampleShadingEnable = sampleShadingEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT + { + minSampleShading = minSampleShading_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT + { + pSampleMask = pSampleMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT + { + alphaToCoverageEnable = alphaToCoverageEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + { + alphaToOneEnable = alphaToOneEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineMultisampleStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineMultisampleStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationSamples == rhs.rasterizationSamples ) && + ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && ( pSampleMask == rhs.pSampleMask ) && + ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && ( alphaToOneEnable == rhs.alphaToOneEnable ); +# endif + } + + bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; + float minSampleShading = {}; + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; + }; + + template <> + struct CppType + { + using Type = PipelineMultisampleStateCreateInfo; + }; + + // wrapper struct for struct VkStencilOpState, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStencilOpState.html + struct StencilOpState + { + using NativeType = VkStencilOpState; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + uint32_t compareMask_ = {}, + uint32_t writeMask_ = {}, + uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT + : failOp{ failOp_ } + , passOp{ passOp_ } + , depthFailOp{ depthFailOp_ } + , compareOp{ compareOp_ } + , compareMask{ compareMask_ } + , writeMask{ writeMask_ } + , reference{ reference_ } + { + } + + VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT : StencilOpState( *reinterpret_cast( &rhs ) ) {} + + StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT + { + failOp = failOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT + { + passOp = passOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT + { + depthFailOp = depthFailOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT + { + compareMask = compareMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT + { + writeMask = writeMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT + { + reference = reference_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStencilOpState const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkStencilOpState *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StencilOpState const & ) const = default; +#else + bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && ( compareOp == rhs.compareOp ) && + ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && ( reference == rhs.reference ); +# endif + } + + bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + uint32_t compareMask = {}; + uint32_t writeMask = {}; + uint32_t reference = {}; + }; + + // wrapper struct for struct VkPipelineDepthStencilStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDepthStencilStateCreateInfo.html + struct PipelineDepthStencilStateCreateInfo + { + using NativeType = VkPipelineDepthStencilStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, + float minDepthBounds_ = {}, + float maxDepthBounds_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , depthTestEnable{ depthTestEnable_ } + , depthWriteEnable{ depthWriteEnable_ } + , depthCompareOp{ depthCompareOp_ } + , depthBoundsTestEnable{ depthBoundsTestEnable_ } + , stencilTestEnable{ stencilTestEnable_ } + , front{ front_ } + , back{ back_ } + , minDepthBounds{ minDepthBounds_ } + , maxDepthBounds{ maxDepthBounds_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDepthStencilStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthTestEnable = depthTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthWriteEnable = depthWriteEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT + { + depthCompareOp = depthCompareOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthBoundsTestEnable = depthBoundsTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + stencilTestEnable = stencilTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT + { + front = front_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT + { + back = back_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT + { + minDepthBounds = minDepthBounds_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT + { + maxDepthBounds = maxDepthBounds_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDepthStencilStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineDepthStencilStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + depthTestEnable, + depthWriteEnable, + depthCompareOp, + depthBoundsTestEnable, + stencilTestEnable, + front, + back, + minDepthBounds, + maxDepthBounds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthTestEnable == rhs.depthTestEnable ) && + ( depthWriteEnable == rhs.depthWriteEnable ) && ( depthCompareOp == rhs.depthCompareOp ) && + ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && + ( back == rhs.back ) && ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds ); +# endif + } + + bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; + VULKAN_HPP_NAMESPACE::StencilOpState front = {}; + VULKAN_HPP_NAMESPACE::StencilOpState back = {}; + float minDepthBounds = {}; + float maxDepthBounds = {}; + }; + + template <> + struct CppType + { + using Type = PipelineDepthStencilStateCreateInfo; + }; + + // wrapper struct for struct VkPipelineColorBlendAttachmentState, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendAttachmentState.html + struct PipelineColorBlendAttachmentState + { + using NativeType = VkPipelineColorBlendAttachmentState; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT + : blendEnable{ blendEnable_ } + , srcColorBlendFactor{ srcColorBlendFactor_ } + , dstColorBlendFactor{ dstColorBlendFactor_ } + , colorBlendOp{ colorBlendOp_ } + , srcAlphaBlendFactor{ srcAlphaBlendFactor_ } + , dstAlphaBlendFactor{ dstAlphaBlendFactor_ } + , alphaBlendOp{ alphaBlendOp_ } + , colorWriteMask{ colorWriteMask_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAttachmentState( *reinterpret_cast( &rhs ) ) + { + } + + PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT + { + blendEnable = blendEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcColorBlendFactor = srcColorBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstColorBlendFactor = dstColorBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + colorBlendOp = colorBlendOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcAlphaBlendFactor = srcAlphaBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstAlphaBlendFactor = dstAlphaBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + alphaBlendOp = alphaBlendOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteMask = colorWriteMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAttachmentState const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAttachmentState *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default; +#else + bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && + ( colorBlendOp == rhs.colorBlendOp ) && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && + ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask ); +# endif + } + + bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; + }; + + // wrapper struct for struct VkPipelineColorBlendStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendStateCreateInfo.html + struct PipelineColorBlendStateCreateInfo + { + using NativeType = VkPipelineColorBlendStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, + std::array const & blendConstants_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , logicOpEnable{ logicOpEnable_ } + , logicOp{ logicOp_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , blendConstants{ blendConstants_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + std::array const & blendConstants_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , blendConstants( blendConstants_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT + { + logicOpEnable = logicOpEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT + { + logicOp = logicOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT + { + blendConstants = blendConstants_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineColorBlendStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( logicOpEnable == rhs.logicOpEnable ) && + ( logicOp == rhs.logicOp ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( blendConstants == rhs.blendConstants ); +# endif + } + + bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; + VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; + }; + + template <> + struct CppType + { + using Type = PipelineColorBlendStateCreateInfo; + }; + + // wrapper struct for struct VkPipelineDynamicStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDynamicStateCreateInfo.html + struct PipelineDynamicStateCreateInfo + { + using NativeType = VkPipelineDynamicStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, + uint32_t dynamicStateCount_ = {}, + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , dynamicStateCount{ dynamicStateCount_ } + , pDynamicStates{ pDynamicStates_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDynamicStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), dynamicStateCount( static_cast( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT + { + dynamicStateCount = dynamicStateCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicStates = pDynamicStates_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo & + setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT + { + dynamicStateCount = static_cast( dynamicStates_.size() ); + pDynamicStates = dynamicStates_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDynamicStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineDynamicStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dynamicStateCount == rhs.dynamicStateCount ) && + ( pDynamicStates == rhs.pDynamicStates ); +# endif + } + + bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; + uint32_t dynamicStateCount = {}; + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {}; + }; + + template <> + struct CppType + { + using Type = PipelineDynamicStateCreateInfo; + }; + + // wrapper struct for struct VkGraphicsPipelineCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineCreateInfo.html + struct GraphicsPipelineCreateInfo + { + using NativeType = VkGraphicsPipelineCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , pVertexInputState{ pVertexInputState_ } + , pInputAssemblyState{ pInputAssemblyState_ } + , pTessellationState{ pTessellationState_ } + , pViewportState{ pViewportState_ } + , pRasterizationState{ pRasterizationState_ } + , pMultisampleState{ pMultisampleState_ } + , pDepthStencilState{ pDepthStencilState_ } + , pColorBlendState{ pColorBlendState_ } + , pDynamicState{ pDynamicState_ } + , layout{ layout_ } + , renderPass{ renderPass_ } + , subpass{ subpass_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT + { + pInputAssemblyState = pInputAssemblyState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT + { + pViewportState = pViewportState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT + { + pRasterizationState = pRasterizationState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT + { + pMultisampleState = pMultisampleState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilState = pDepthStencilState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT + { + pColorBlendState = pColorBlendState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGraphicsPipelineCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + stageCount, + pStages, + pVertexInputState, + pInputAssemblyState, + pTessellationState, + pViewportState, + pRasterizationState, + pMultisampleState, + pDepthStencilState, + pColorBlendState, + pDynamicState, + layout, + renderPass, + subpass, + basePipelineHandle, + basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default; +#else + bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) && + ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) && + ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) && + ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && ( pDynamicState == rhs.pDynamicState ) && + ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {}; + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = GraphicsPipelineCreateInfo; + }; + + // wrapper struct for struct VkGraphicsPipelineLibraryCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineLibraryCreateInfoEXT.html + struct GraphicsPipelineLibraryCreateInfoEXT + { + using NativeType = VkGraphicsPipelineLibraryCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineLibraryCreateInfoEXT( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineLibraryCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + GraphicsPipelineLibraryCreateInfoEXT & operator=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GraphicsPipelineLibraryCreateInfoEXT & operator=( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGraphicsPipelineLibraryCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineLibraryCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineLibraryCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGraphicsPipelineLibraryCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineLibraryCreateInfoEXT const & ) const = default; +#else + bool operator==( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags = {}; + }; + + template <> + struct CppType + { + using Type = GraphicsPipelineLibraryCreateInfoEXT; + }; + + // wrapper struct for struct VkGraphicsShaderGroupCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsShaderGroupCreateInfoNV.html + struct GraphicsShaderGroupCreateInfoNV + { + using NativeType = VkGraphicsShaderGroupCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , pVertexInputState{ pVertexInputState_ } + , pTessellationState{ pTessellationState_ } + { + } + + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsShaderGroupCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGraphicsShaderGroupCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default; +#else + bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pVertexInputState == rhs.pVertexInputState ) && ( pTessellationState == rhs.pTessellationState ); +# endif + } + + bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; + const void * pNext = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + }; + + template <> + struct CppType + { + using Type = GraphicsShaderGroupCreateInfoNV; + }; + + // wrapper struct for struct VkGraphicsPipelineShaderGroupsCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineShaderGroupsCreateInfoNV.html + struct GraphicsPipelineShaderGroupsCreateInfoNV + { + using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t pipelineCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , groupCount{ groupCount_ } + , pGroups{ pGroups_ } + , pipelineCount{ pipelineCount_ } + , pPipelines{ pPipelines_ } + { + } + + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , pipelineCount( static_cast( pipelines_.size() ) ) + , pPipelines( pipelines_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = pipelineCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT + { + pPipelines = pPipelines_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & + setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = static_cast( pipelines_.size() ); + pPipelines = pipelines_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, groupCount, pGroups, pipelineCount, pPipelines ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default; +#else + bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && + ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines ); +# endif + } + + bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + const void * pNext = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {}; + uint32_t pipelineCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {}; + }; + + template <> + struct CppType + { + using Type = GraphicsPipelineShaderGroupsCreateInfoNV; + }; + + // wrapper struct for struct VkXYColorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXYColorEXT.html + struct XYColorEXT + { + using NativeType = VkXYColorEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } + { + } + + VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT : XYColorEXT( *reinterpret_cast( &rhs ) ) {} + + XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXYColorEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkXYColorEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XYColorEXT const & ) const = default; +#else + bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ); +# endif + } + + bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + }; + + // wrapper struct for struct VkHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHdrMetadataEXT.html + struct HdrMetadataEXT + { + using NativeType = VkHdrMetadataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, + float maxLuminance_ = {}, + float minLuminance_ = {}, + float maxContentLightLevel_ = {}, + float maxFrameAverageLightLevel_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displayPrimaryRed{ displayPrimaryRed_ } + , displayPrimaryGreen{ displayPrimaryGreen_ } + , displayPrimaryBlue{ displayPrimaryBlue_ } + , whitePoint{ whitePoint_ } + , maxLuminance{ maxLuminance_ } + , minLuminance{ minLuminance_ } + , maxContentLightLevel{ maxContentLightLevel_ } + , maxFrameAverageLightLevel{ maxFrameAverageLightLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HdrMetadataEXT( *reinterpret_cast( &rhs ) ) {} + + HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryRed = displayPrimaryRed_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryGreen = displayPrimaryGreen_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryBlue = displayPrimaryBlue_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT + { + whitePoint = whitePoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT + { + maxLuminance = maxLuminance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT + { + minLuminance = minLuminance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxContentLightLevel = maxContentLightLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameAverageLightLevel = maxFrameAverageLightLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrMetadataEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkHdrMetadataEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + displayPrimaryRed, + displayPrimaryGreen, + displayPrimaryBlue, + whitePoint, + maxLuminance, + minLuminance, + maxContentLightLevel, + maxFrameAverageLightLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HdrMetadataEXT const & ) const = default; +#else + bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) && + ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && ( whitePoint == rhs.whitePoint ) && + ( maxLuminance == rhs.maxLuminance ) && ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) && + ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); +# endif + } + + bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; + float maxLuminance = {}; + float minLuminance = {}; + float maxContentLightLevel = {}; + float maxFrameAverageLightLevel = {}; + }; + + template <> + struct CppType + { + using Type = HdrMetadataEXT; + }; + + // wrapper struct for struct VkHdrVividDynamicMetadataHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHdrVividDynamicMetadataHUAWEI.html + struct HdrVividDynamicMetadataHUAWEI + { + using NativeType = VkHdrVividDynamicMetadataHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrVividDynamicMetadataHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + HdrVividDynamicMetadataHUAWEI( size_t dynamicMetadataSize_ = {}, const void * pDynamicMetadata_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicMetadataSize{ dynamicMetadataSize_ } + , pDynamicMetadata{ pDynamicMetadata_ } + { + } + + VULKAN_HPP_CONSTEXPR HdrVividDynamicMetadataHUAWEI( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrVividDynamicMetadataHUAWEI( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : HdrVividDynamicMetadataHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HdrVividDynamicMetadataHUAWEI( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicMetadata_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dynamicMetadataSize( dynamicMetadata_.size() * sizeof( T ) ), pDynamicMetadata( dynamicMetadata_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + HdrVividDynamicMetadataHUAWEI & operator=( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + HdrVividDynamicMetadataHUAWEI & operator=( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setDynamicMetadataSize( size_t dynamicMetadataSize_ ) VULKAN_HPP_NOEXCEPT + { + dynamicMetadataSize = dynamicMetadataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPDynamicMetadata( const void * pDynamicMetadata_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicMetadata = pDynamicMetadata_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HdrVividDynamicMetadataHUAWEI & setDynamicMetadata( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicMetadata_ ) VULKAN_HPP_NOEXCEPT + { + dynamicMetadataSize = dynamicMetadata_.size() * sizeof( T ); + pDynamicMetadata = dynamicMetadata_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkHdrVividDynamicMetadataHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrVividDynamicMetadataHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrVividDynamicMetadataHUAWEI const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkHdrVividDynamicMetadataHUAWEI *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicMetadataSize, pDynamicMetadata ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HdrVividDynamicMetadataHUAWEI const & ) const = default; +#else + bool operator==( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicMetadataSize == rhs.dynamicMetadataSize ) && + ( pDynamicMetadata == rhs.pDynamicMetadata ); +# endif + } + + bool operator!=( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrVividDynamicMetadataHUAWEI; + const void * pNext = {}; + size_t dynamicMetadataSize = {}; + const void * pDynamicMetadata = {}; + }; + + template <> + struct CppType + { + using Type = HdrVividDynamicMetadataHUAWEI; + }; + + // wrapper struct for struct VkHeadlessSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHeadlessSurfaceCreateInfoEXT.html + struct HeadlessSurfaceCreateInfoEXT + { + using NativeType = VkHeadlessSurfaceCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHeadlessSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkHeadlessSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default; +#else + bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; + }; + + template <> + struct CppType + { + using Type = HeadlessSurfaceCreateInfoEXT; + }; + + // wrapper struct for struct VkHostImageCopyDevicePerformanceQuery, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageCopyDevicePerformanceQuery.html + struct HostImageCopyDevicePerformanceQuery + { + using NativeType = VkHostImageCopyDevicePerformanceQuery; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageCopyDevicePerformanceQuery; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery( VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , optimalDeviceAccess{ optimalDeviceAccess_ } + , identicalMemoryLayout{ identicalMemoryLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HostImageCopyDevicePerformanceQuery( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT + : HostImageCopyDevicePerformanceQuery( *reinterpret_cast( &rhs ) ) + { + } + + HostImageCopyDevicePerformanceQuery & operator=( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + HostImageCopyDevicePerformanceQuery & operator=( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkHostImageCopyDevicePerformanceQuery const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostImageCopyDevicePerformanceQuery &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostImageCopyDevicePerformanceQuery const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkHostImageCopyDevicePerformanceQuery *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, optimalDeviceAccess, identicalMemoryLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HostImageCopyDevicePerformanceQuery const & ) const = default; +#else + bool operator==( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimalDeviceAccess == rhs.optimalDeviceAccess ) && + ( identicalMemoryLayout == rhs.identicalMemoryLayout ); +# endif + } + + bool operator!=( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageCopyDevicePerformanceQuery; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout = {}; + }; + + template <> + struct CppType + { + using Type = HostImageCopyDevicePerformanceQuery; + }; + + using HostImageCopyDevicePerformanceQueryEXT = HostImageCopyDevicePerformanceQuery; + + // wrapper struct for struct VkHostImageLayoutTransitionInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageLayoutTransitionInfo.html + struct HostImageLayoutTransitionInfo + { + using NativeType = VkHostImageLayoutTransitionInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageLayoutTransitionInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , oldLayout{ oldLayout_ } + , newLayout{ newLayout_ } + , subresourceRange{ subresourceRange_ } + { + } + + VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HostImageLayoutTransitionInfo( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : HostImageLayoutTransitionInfo( *reinterpret_cast( &rhs ) ) + { + } + + HostImageLayoutTransitionInfo & operator=( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + HostImageLayoutTransitionInfo & operator=( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkHostImageLayoutTransitionInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostImageLayoutTransitionInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostImageLayoutTransitionInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkHostImageLayoutTransitionInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, oldLayout, newLayout, subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HostImageLayoutTransitionInfo const & ) const = default; +#else + bool operator==( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && + ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageLayoutTransitionInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + + template <> + struct CppType + { + using Type = HostImageLayoutTransitionInfo; + }; + + using HostImageLayoutTransitionInfoEXT = HostImageLayoutTransitionInfo; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + // wrapper struct for struct VkIOSSurfaceCreateInfoMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIOSSurfaceCreateInfoMVK.html + struct IOSSurfaceCreateInfoMVK + { + using NativeType = VkIOSSurfaceCreateInfoMVK; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pView{ pView_ } + { + } + + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : IOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + { + } + + IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIOSSurfaceCreateInfoMVK const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIOSSurfaceCreateInfoMVK *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pView ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); +# endif + } + + bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; + }; + + template <> + struct CppType + { + using Type = IOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + + // wrapper struct for struct VkImageAlignmentControlCreateInfoMESA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageAlignmentControlCreateInfoMESA.html + struct ImageAlignmentControlCreateInfoMESA + { + using NativeType = VkImageAlignmentControlCreateInfoMESA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageAlignmentControlCreateInfoMESA; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( uint32_t maximumRequestedAlignment_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maximumRequestedAlignment{ maximumRequestedAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageAlignmentControlCreateInfoMESA( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageAlignmentControlCreateInfoMESA( *reinterpret_cast( &rhs ) ) + { + } + + ImageAlignmentControlCreateInfoMESA & operator=( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageAlignmentControlCreateInfoMESA & operator=( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setMaximumRequestedAlignment( uint32_t maximumRequestedAlignment_ ) VULKAN_HPP_NOEXCEPT + { + maximumRequestedAlignment = maximumRequestedAlignment_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageAlignmentControlCreateInfoMESA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageAlignmentControlCreateInfoMESA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageAlignmentControlCreateInfoMESA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageAlignmentControlCreateInfoMESA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maximumRequestedAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageAlignmentControlCreateInfoMESA const & ) const = default; +#else + bool operator==( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maximumRequestedAlignment == rhs.maximumRequestedAlignment ); +# endif + } + + bool operator!=( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageAlignmentControlCreateInfoMESA; + const void * pNext = {}; + uint32_t maximumRequestedAlignment = {}; + }; + + template <> + struct CppType + { + using Type = ImageAlignmentControlCreateInfoMESA; + }; + + // wrapper struct for struct VkImageBlit, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageBlit.html + struct ImageBlit + { + using NativeType = VkImageBlit; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource{ srcSubresource_ } + , srcOffsets{ srcOffsets_ } + , dstSubresource{ dstSubresource_ } + , dstOffsets{ dstOffsets_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast( &rhs ) ) {} + + ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageBlit *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit const & ) const = default; +#else + bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffsets == rhs.dstOffsets ); +# endif + } + + bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + }; + + // wrapper struct for struct VkImageCaptureDescriptorDataInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCaptureDescriptorDataInfoEXT.html + struct ImageCaptureDescriptorDataInfoEXT + { + using NativeType = VkImageCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCaptureDescriptorDataInfoEXT( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageCaptureDescriptorDataInfoEXT & operator=( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageCaptureDescriptorDataInfoEXT & operator=( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); +# endif + } + + bool operator!=( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + + template <> + struct CppType + { + using Type = ImageCaptureDescriptorDataInfoEXT; + }; + + // wrapper struct for struct VkImageCompressionControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionControlEXT.html + struct ImageCompressionControlEXT + { + using NativeType = VkImageCompressionControlEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionControlEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ = {}, + uint32_t compressionControlPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , compressionControlPlaneCount{ compressionControlPlaneCount_ } + , pFixedRateFlags{ pFixedRateFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCompressionControlEXT( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCompressionControlEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fixedRateFlags_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , compressionControlPlaneCount( static_cast( fixedRateFlags_.size() ) ) + , pFixedRateFlags( fixedRateFlags_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageCompressionControlEXT & operator=( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageCompressionControlEXT & operator=( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setCompressionControlPlaneCount( uint32_t compressionControlPlaneCount_ ) VULKAN_HPP_NOEXCEPT + { + compressionControlPlaneCount = compressionControlPlaneCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & + setPFixedRateFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ ) VULKAN_HPP_NOEXCEPT + { + pFixedRateFlags = pFixedRateFlags_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCompressionControlEXT & setFixedRateFlags( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fixedRateFlags_ ) VULKAN_HPP_NOEXCEPT + { + compressionControlPlaneCount = static_cast( fixedRateFlags_.size() ); + pFixedRateFlags = fixedRateFlags_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageCompressionControlEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCompressionControlEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCompressionControlEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageCompressionControlEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, compressionControlPlaneCount, pFixedRateFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCompressionControlEXT const & ) const = default; +#else + bool operator==( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( compressionControlPlaneCount == rhs.compressionControlPlaneCount ) && ( pFixedRateFlags == rhs.pFixedRateFlags ); +# endif + } + + bool operator!=( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionControlEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags = {}; + uint32_t compressionControlPlaneCount = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags = {}; + }; + + template <> + struct CppType + { + using Type = ImageCompressionControlEXT; + }; + + // wrapper struct for struct VkImageCompressionPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionPropertiesEXT.html + struct ImageCompressionPropertiesEXT + { + using NativeType = VkImageCompressionPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags_ = {}, + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageCompressionFlags{ imageCompressionFlags_ } + , imageCompressionFixedRateFlags{ imageCompressionFixedRateFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCompressionPropertiesEXT( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCompressionPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageCompressionPropertiesEXT & operator=( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageCompressionPropertiesEXT & operator=( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageCompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCompressionPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageCompressionPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageCompressionFlags, imageCompressionFixedRateFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCompressionPropertiesEXT const & ) const = default; +#else + bool operator==( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionFlags == rhs.imageCompressionFlags ) && + ( imageCompressionFixedRateFlags == rhs.imageCompressionFixedRateFlags ); +# endif + } + + bool operator!=( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags = {}; + }; + + template <> + struct CppType + { + using Type = ImageCompressionPropertiesEXT; + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkImageFormatConstraintsInfoFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatConstraintsInfoFUCHSIA.html + struct ImageFormatConstraintsInfoFUCHSIA + { + using NativeType = VkImageFormatConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {}, + uint64_t sysmemPixelFormat_ = {}, + uint32_t colorSpaceCount_ = {}, + const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageCreateInfo{ imageCreateInfo_ } + , requiredFormatFeatures{ requiredFormatFeatures_ } + , flags{ flags_ } + , sysmemPixelFormat{ sysmemPixelFormat_ } + , colorSpaceCount{ colorSpaceCount_ } + , pColorSpaces{ pColorSpaces_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_, + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_, + uint64_t sysmemPixelFormat_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorSpaces_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , imageCreateInfo( imageCreateInfo_ ) + , requiredFormatFeatures( requiredFormatFeatures_ ) + , flags( flags_ ) + , sysmemPixelFormat( sysmemPixelFormat_ ) + , colorSpaceCount( static_cast( colorSpaces_.size() ) ) + , pColorSpaces( colorSpaces_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageFormatConstraintsInfoFUCHSIA & operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + imageCreateInfo = imageCreateInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT + { + requiredFormatFeatures = requiredFormatFeatures_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT + { + sysmemPixelFormat = sysmemPixelFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT + { + colorSpaceCount = colorSpaceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setPColorSpaces( const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT + { + pColorSpaces = pColorSpaces_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatConstraintsInfoFUCHSIA & setColorSpaces( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorSpaces_ ) VULKAN_HPP_NOEXCEPT + { + colorSpaceCount = static_cast( colorSpaces_.size() ); + pColorSpaces = colorSpaces_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageFormatConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCreateInfo == rhs.imageCreateInfo ) && + ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && ( flags == rhs.flags ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && + ( colorSpaceCount == rhs.colorSpaceCount ) && ( pColorSpaces == rhs.pColorSpaces ); +# endif + } + + bool operator!=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {}; + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags = {}; + uint64_t sysmemPixelFormat = {}; + uint32_t colorSpaceCount = {}; + const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces = {}; + }; + + template <> + struct CppType + { + using Type = ImageFormatConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkImageConstraintsInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageConstraintsInfoFUCHSIA.html + struct ImageConstraintsInfoFUCHSIA + { + using NativeType = VkImageConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( uint32_t formatConstraintsCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {}, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , formatConstraintsCount{ formatConstraintsCount_ } + , pFormatConstraints{ pFormatConstraints_ } + , bufferCollectionConstraints{ bufferCollectionConstraints_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageConstraintsInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & formatConstraints_, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , formatConstraintsCount( static_cast( formatConstraints_.size() ) ) + , pFormatConstraints( formatConstraints_.data() ) + , bufferCollectionConstraints( bufferCollectionConstraints_ ) + , flags( flags_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageConstraintsInfoFUCHSIA & operator=( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageConstraintsInfoFUCHSIA & operator=( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT + { + formatConstraintsCount = formatConstraintsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & + setPFormatConstraints( const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT + { + pFormatConstraints = pFormatConstraints_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageConstraintsInfoFUCHSIA & setFormatConstraints( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & formatConstraints_ ) + VULKAN_HPP_NOEXCEPT + { + formatConstraintsCount = static_cast( formatConstraints_.size() ); + pFormatConstraints = formatConstraints_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & + setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT + { + bufferCollectionConstraints = bufferCollectionConstraints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatConstraintsCount == rhs.formatConstraintsCount ) && + ( pFormatConstraints == rhs.pFormatConstraints ) && ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA; + const void * pNext = {}; + uint32_t formatConstraintsCount = {}; + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags = {}; + }; + + template <> + struct CppType + { + using Type = ImageConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + // wrapper struct for struct VkImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCopy.html + struct ImageCopy + { + using NativeType = VkImageCopy; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast( &rhs ) ) {} + + ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageCopy *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy const & ) const = default; +#else + bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + + // wrapper struct for struct VkSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceLayout.html + struct SubresourceLayout + { + using NativeType = VkSubresourceLayout; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT + : offset{ offset_ } + , size{ size_ } + , rowPitch{ rowPitch_ } + , arrayPitch{ arrayPitch_ } + , depthPitch{ depthPitch_ } + { + } + + VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout( *reinterpret_cast( &rhs ) ) {} + + SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setRowPitch( VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ ) VULKAN_HPP_NOEXCEPT + { + rowPitch = rowPitch_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setArrayPitch( VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ ) VULKAN_HPP_NOEXCEPT + { + arrayPitch = arrayPitch_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setDepthPitch( VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ ) VULKAN_HPP_NOEXCEPT + { + depthPitch = depthPitch_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceLayout const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubresourceLayout *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( offset, size, rowPitch, arrayPitch, depthPitch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceLayout const & ) const = default; +#else + bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && ( arrayPitch == rhs.arrayPitch ) && + ( depthPitch == rhs.depthPitch ); +# endif + } + + bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; + }; + + // wrapper struct for struct VkImageDrmFormatModifierExplicitCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierExplicitCreateInfoEXT.html + struct ImageDrmFormatModifierExplicitCreateInfoEXT + { + using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifier{ drmFormatModifier_ } + , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } + , pPlaneLayouts{ pPlaneLayouts_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT( + uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & planeLayouts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( static_cast( planeLayouts_.size() ) ) + , pPlaneLayouts( planeLayouts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pPlaneLayouts = pPlaneLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT & + setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = static_cast( planeLayouts_.size() ); + pPlaneLayouts = planeLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && ( pPlaneLayouts == rhs.pPlaneLayouts ); +# endif + } + + bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {}; + }; + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; + }; + + // wrapper struct for struct VkImageDrmFormatModifierListCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierListCreateInfoEXT.html + struct ImageDrmFormatModifierListCreateInfoEXT + { + using NativeType = VkImageDrmFormatModifierListCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, + const uint64_t * pDrmFormatModifiers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifierCount{ drmFormatModifierCount_ } + , pDrmFormatModifiers{ pDrmFormatModifiers_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), drmFormatModifierCount( static_cast( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = drmFormatModifierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + pDrmFormatModifiers = pDrmFormatModifiers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT & + setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = static_cast( drmFormatModifiers_.size() ); + pDrmFormatModifiers = drmFormatModifiers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierListCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierListCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); +# endif + } + + bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + const void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + const uint64_t * pDrmFormatModifiers = {}; + }; + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierListCreateInfoEXT; + }; + + // wrapper struct for struct VkImageDrmFormatModifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierPropertiesEXT.html + struct ImageDrmFormatModifierPropertiesEXT + { + using NativeType = VkImageDrmFormatModifierPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifier{ drmFormatModifier_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ); +# endif + } + + bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; + void * pNext = {}; + uint64_t drmFormatModifier = {}; + }; + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierPropertiesEXT; + }; + + // wrapper struct for struct VkImageFormatListCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatListCreateInfo.html + struct ImageFormatListCreateInfo + { + using NativeType = VkImageFormatListCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , viewFormatCount{ viewFormatCount_ } + , pViewFormats{ pViewFormats_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatListCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), viewFormatCount( static_cast( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo & + setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatListCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageFormatListCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewFormatCount, pViewFormats ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatListCreateInfo const & ) const = default; +#else + bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); +# endif + } + + bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; + const void * pNext = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + }; + + template <> + struct CppType + { + using Type = ImageFormatListCreateInfo; + }; + + using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; + + // wrapper struct for struct VkImageFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatProperties2.html + struct ImageFormatProperties2 + { + using NativeType = VkImageFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageFormatProperties{ imageFormatProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties2( *reinterpret_cast( &rhs ) ) + { + } + + ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageFormatProperties2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageFormatProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties2 const & ) const = default; +#else + bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties ); +# endif + } + + bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + }; + + template <> + struct CppType + { + using Type = ImageFormatProperties2; + }; + + using ImageFormatProperties2KHR = ImageFormatProperties2; + + // wrapper struct for struct VkImageMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryBarrier.html + struct ImageMemoryBarrier + { + using NativeType = VkImageMemoryBarrier; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , oldLayout{ oldLayout_ } + , newLayout{ newLayout_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , image{ image_ } + , subresourceRange{ subresourceRange_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier( *reinterpret_cast( &rhs ) ) {} + + ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier const & ) const = default; +#else + bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + + template <> + struct CppType + { + using Type = ImageMemoryBarrier; + }; + + // wrapper struct for struct VkImageMemoryRequirementsInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryRequirementsInfo2.html + struct ImageMemoryRequirementsInfo2 + { + using NativeType = VkImageMemoryRequirementsInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + { + } + + ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); +# endif + } + + bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + + template <> + struct CppType + { + using Type = ImageMemoryRequirementsInfo2; + }; + + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkImagePipeSurfaceCreateInfoFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImagePipeSurfaceCreateInfoFUCHSIA.html + struct ImagePipeSurfaceCreateInfoFUCHSIA + { + using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, + zx_handle_t imagePipeHandle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , imagePipeHandle{ imagePipeHandle_ } + { + } + + VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT + { + imagePipeHandle = imagePipeHandle_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, imagePipeHandle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; + zx_handle_t imagePipeHandle = {}; + }; + + template <> + struct CppType + { + using Type = ImagePipeSurfaceCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + // wrapper struct for struct VkImagePlaneMemoryRequirementsInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImagePlaneMemoryRequirementsInfo.html + struct ImagePlaneMemoryRequirementsInfo + { + using NativeType = VkImagePlaneMemoryRequirementsInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , planeAspect{ planeAspect_ } + { + } + + VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast( &rhs ) ) + { + } + + ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePlaneMemoryRequirementsInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImagePlaneMemoryRequirementsInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, planeAspect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default; +#else + bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); +# endif + } + + bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + + template <> + struct CppType + { + using Type = ImagePlaneMemoryRequirementsInfo; + }; + + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + + // wrapper struct for struct VkImageResolve, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageResolve.html + struct ImageResolve + { + using NativeType = VkImageResolve; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve( *reinterpret_cast( &rhs ) ) {} + + ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageResolve *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve const & ) const = default; +#else + bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + + // wrapper struct for struct VkImageResolve2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageResolve2.html + struct ImageResolve2 + { + using NativeType = VkImageResolve2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve2( *reinterpret_cast( &rhs ) ) {} + + ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageResolve2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve2 const & ) const = default; +#else + bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + + template <> + struct CppType + { + using Type = ImageResolve2; + }; + + using ImageResolve2KHR = ImageResolve2; + + // wrapper struct for struct VkImageSparseMemoryRequirementsInfo2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSparseMemoryRequirementsInfo2.html + struct ImageSparseMemoryRequirementsInfo2 + { + using NativeType = VkImageSparseMemoryRequirementsInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + { + } + + ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSparseMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageSparseMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); +# endif + } + + bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + + template <> + struct CppType + { + using Type = ImageSparseMemoryRequirementsInfo2; + }; + + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + + // wrapper struct for struct VkImageStencilUsageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageStencilUsageCreateInfo.html + struct ImageStencilUsageCreateInfo + { + using NativeType = VkImageStencilUsageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stencilUsage{ stencilUsage_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageStencilUsageCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT + { + stencilUsage = stencilUsage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageStencilUsageCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageStencilUsageCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stencilUsage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default; +#else + bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage ); +# endif + } + + bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; + }; + + template <> + struct CppType + { + using Type = ImageStencilUsageCreateInfo; + }; + + using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; + + // wrapper struct for struct VkImageSwapchainCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSwapchainCreateInfoKHR.html + struct ImageSwapchainCreateInfoKHR + { + using NativeType = VkImageSwapchainCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchain{ swapchain_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ); +# endif + } + + bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + }; + + template <> + struct CppType + { + using Type = ImageSwapchainCreateInfoKHR; + }; + + // wrapper struct for struct VkImageViewASTCDecodeModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewASTCDecodeModeEXT.html + struct ImageViewASTCDecodeModeEXT + { + using NativeType = VkImageViewASTCDecodeModeEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , decodeMode{ decodeMode_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewASTCDecodeModeEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT + { + decodeMode = decodeMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewASTCDecodeModeEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageViewASTCDecodeModeEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decodeMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default; +#else + bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode ); +# endif + } + + bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = ImageViewASTCDecodeModeEXT; + }; + + // wrapper struct for struct VkImageViewAddressPropertiesNVX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewAddressPropertiesNVX.html + struct ImageViewAddressPropertiesNVX + { + using NativeType = VkImageViewAddressPropertiesNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceAddress{ deviceAddress_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewAddressPropertiesNVX( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewAddressPropertiesNVX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageViewAddressPropertiesNVX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceAddress, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default; +#else + bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ); +# endif + } + + bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewAddressPropertiesNVX; + }; + + // wrapper struct for struct VkImageViewCaptureDescriptorDataInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCaptureDescriptorDataInfoEXT.html + struct ImageViewCaptureDescriptorDataInfoEXT + { + using NativeType = VkImageViewCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageView{ imageView_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewCaptureDescriptorDataInfoEXT( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewCaptureDescriptorDataInfoEXT & operator=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewCaptureDescriptorDataInfoEXT & operator=( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageViewCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ); +# endif + } + + bool operator!=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewCaptureDescriptorDataInfoEXT; + }; + + // wrapper struct for struct VkImageViewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCreateInfo.html + struct ImageViewCreateInfo + { + using NativeType = VkImageViewCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , image{ image_ } + , viewType{ viewType_ } + , format{ format_ } + , components{ components_ } + , subresourceRange{ subresourceRange_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT + { + viewType = viewType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageViewCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewCreateInfo const & ) const = default; +#else + bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && ( viewType == rhs.viewType ) && + ( format == rhs.format ) && ( components == rhs.components ) && ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewCreateInfo; + }; + + // wrapper struct for struct VkImageViewHandleInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewHandleInfoNVX.html + struct ImageViewHandleInfoNVX + { + using NativeType = VkImageViewHandleInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageView{ imageView_ } + , descriptorType{ descriptorType_ } + , sampler{ sampler_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewHandleInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewHandleInfoNVX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageViewHandleInfoNVX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, descriptorType, sampler ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewHandleInfoNVX const & ) const = default; +#else + bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( descriptorType == rhs.descriptorType ) && + ( sampler == rhs.sampler ); +# endif + } + + bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewHandleInfoNVX; + }; + + // wrapper struct for struct VkImageViewMinLodCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewMinLodCreateInfoEXT.html + struct ImageViewMinLodCreateInfoEXT + { + using NativeType = VkImageViewMinLodCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( float minLod_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minLod{ minLod_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewMinLodCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewMinLodCreateInfoEXT & operator=( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewMinLodCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageViewMinLodCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minLod ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewMinLodCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); +# endif + } + + bool operator!=( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewMinLodCreateInfoEXT; + const void * pNext = {}; + float minLod = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewMinLodCreateInfoEXT; + }; + + // wrapper struct for struct VkImageViewSampleWeightCreateInfoQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewSampleWeightCreateInfoQCOM.html + struct ImageViewSampleWeightCreateInfoQCOM + { + using NativeType = VkImageViewSampleWeightCreateInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSampleWeightCreateInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( VULKAN_HPP_NAMESPACE::Offset2D filterCenter_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D filterSize_ = {}, + uint32_t numPhases_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , filterCenter{ filterCenter_ } + , filterSize{ filterSize_ } + , numPhases{ numPhases_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewSampleWeightCreateInfoQCOM( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewSampleWeightCreateInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewSampleWeightCreateInfoQCOM & operator=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewSampleWeightCreateInfoQCOM & operator=( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterCenter( VULKAN_HPP_NAMESPACE::Offset2D const & filterCenter_ ) VULKAN_HPP_NOEXCEPT + { + filterCenter = filterCenter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterSize( VULKAN_HPP_NAMESPACE::Extent2D const & filterSize_ ) VULKAN_HPP_NOEXCEPT + { + filterSize = filterSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setNumPhases( uint32_t numPhases_ ) VULKAN_HPP_NOEXCEPT + { + numPhases = numPhases_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewSampleWeightCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewSampleWeightCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewSampleWeightCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageViewSampleWeightCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, filterCenter, filterSize, numPhases ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewSampleWeightCreateInfoQCOM const & ) const = default; +#else + bool operator==( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCenter == rhs.filterCenter ) && ( filterSize == rhs.filterSize ) && + ( numPhases == rhs.numPhases ); +# endif + } + + bool operator!=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSampleWeightCreateInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Offset2D filterCenter = {}; + VULKAN_HPP_NAMESPACE::Extent2D filterSize = {}; + uint32_t numPhases = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewSampleWeightCreateInfoQCOM; + }; + + // wrapper struct for struct VkImageViewSlicedCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewSlicedCreateInfoEXT.html + struct ImageViewSlicedCreateInfoEXT + { + using NativeType = VkImageViewSlicedCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSlicedCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewSlicedCreateInfoEXT( uint32_t sliceOffset_ = {}, uint32_t sliceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sliceOffset{ sliceOffset_ } + , sliceCount{ sliceCount_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageViewSlicedCreateInfoEXT( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewSlicedCreateInfoEXT( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewSlicedCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewSlicedCreateInfoEXT & operator=( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewSlicedCreateInfoEXT & operator=( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceOffset( uint32_t sliceOffset_ ) VULKAN_HPP_NOEXCEPT + { + sliceOffset = sliceOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT + { + sliceCount = sliceCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewSlicedCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewSlicedCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewSlicedCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageViewSlicedCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sliceOffset, sliceCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewSlicedCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sliceOffset == rhs.sliceOffset ) && ( sliceCount == rhs.sliceCount ); +# endif + } + + bool operator!=( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSlicedCreateInfoEXT; + const void * pNext = {}; + uint32_t sliceOffset = {}; + uint32_t sliceCount = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewSlicedCreateInfoEXT; + }; + + // wrapper struct for struct VkImageViewUsageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewUsageCreateInfo.html + struct ImageViewUsageCreateInfo + { + using NativeType = VkImageViewUsageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , usage{ usage_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewUsageCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewUsageCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImageViewUsageCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewUsageCreateInfo const & ) const = default; +#else + bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewUsageCreateInfo; + }; + + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkImportAndroidHardwareBufferInfoANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportAndroidHardwareBufferInfoANDROID.html + struct ImportAndroidHardwareBufferInfoANDROID + { + using NativeType = VkImportAndroidHardwareBufferInfoANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast( &rhs ) ) + { + } + + ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportAndroidHardwareBufferInfoANDROID const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportAndroidHardwareBufferInfoANDROID *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + struct AHardwareBuffer * buffer = {}; + }; + + template <> + struct CppType + { + using Type = ImportAndroidHardwareBufferInfoANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + // wrapper struct for struct VkImportFenceFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportFenceFdInfoKHR.html + struct ImportFenceFdInfoKHR + { + using NativeType = VkImportFenceFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + int fd_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fence{ fence_ } + , flags{ flags_ } + , handleType{ handleType_ } + , fd{ fd_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceFdInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportFenceFdInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, flags, handleType, fd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceFdInfoKHR const & ) const = default; +#else + bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( fd == rhs.fd ); +# endif + } + + bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + + template <> + struct CppType + { + using Type = ImportFenceFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkImportFenceWin32HandleInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportFenceWin32HandleInfoKHR.html + struct ImportFenceWin32HandleInfoKHR + { + using NativeType = VkImportFenceWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fence{ fence_ } + , flags{ flags_ } + , handleType{ handleType_ } + , handle{ handle_ } + , name{ name_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportFenceWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, flags, handleType, handle, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ImportFenceWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkImportMemoryBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryBufferCollectionFUCHSIA.html + struct ImportMemoryBufferCollectionFUCHSIA + { + using NativeType = VkImportMemoryBufferCollectionFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryBufferCollectionFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, + uint32_t index_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , collection{ collection_ } + , index{ index_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryBufferCollectionFUCHSIA & operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMemoryBufferCollectionFUCHSIA & operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + { + collection = collection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryBufferCollectionFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMemoryBufferCollectionFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collection, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryBufferCollectionFUCHSIA const & ) const = default; +# else + bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); +# endif + } + + bool operator!=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryBufferCollectionFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; + uint32_t index = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryBufferCollectionFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + // wrapper struct for struct VkImportMemoryFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryFdInfoKHR.html + struct ImportMemoryFdInfoKHR + { + using NativeType = VkImportMemoryFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + int fd_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , fd{ fd_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryFdInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMemoryFdInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, fd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default; +#else + bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); +# endif + } + + bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryFdInfoKHR; + }; + + // wrapper struct for struct VkImportMemoryHostPointerInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryHostPointerInfoEXT.html + struct ImportMemoryHostPointerInfoEXT + { + using NativeType = VkImportMemoryHostPointerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void * pHostPointer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , pHostPointer{ pHostPointer_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryHostPointerInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryHostPointerInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMemoryHostPointerInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, pHostPointer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default; +#else + bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( pHostPointer == rhs.pHostPointer ); +# endif + } + + bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void * pHostPointer = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryHostPointerInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkImportMemoryMetalHandleInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryMetalHandleInfoEXT.html + struct ImportMemoryMetalHandleInfoEXT + { + using NativeType = VkImportMemoryMetalHandleInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryMetalHandleInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryMetalHandleInfoEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void * handle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryMetalHandleInfoEXT( ImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryMetalHandleInfoEXT( VkImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryMetalHandleInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryMetalHandleInfoEXT & operator=( ImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMemoryMetalHandleInfoEXT & operator=( VkImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setHandle( void * handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMemoryMetalHandleInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryMetalHandleInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryMetalHandleInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMemoryMetalHandleInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, handle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryMetalHandleInfoEXT const & ) const = default; +# else + bool operator==( ImportMemoryMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ); +# endif + } + + bool operator!=( ImportMemoryMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryMetalHandleInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void * handle = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryMetalHandleInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkImportMemoryWin32HandleInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryWin32HandleInfoKHR.html + struct ImportMemoryWin32HandleInfoKHR + { + using NativeType = VkImportMemoryWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } + , name{ name_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, handle, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkImportMemoryWin32HandleInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryWin32HandleInfoNV.html + struct ImportMemoryWin32HandleInfoNV + { + using NativeType = VkImportMemoryWin32HandleInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, + HANDLE handle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, handle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ); +# endif + } + + bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; + HANDLE handle = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkImportMemoryZirconHandleInfoFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryZirconHandleInfoFUCHSIA.html + struct ImportMemoryZirconHandleInfoFUCHSIA + { + using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + zx_handle_t handle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryZirconHandleInfoFUCHSIA & operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMemoryZirconHandleInfoFUCHSIA & operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMemoryZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, handle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + zx_handle_t handle = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkImportMetalBufferInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalBufferInfoEXT.html + struct ImportMetalBufferInfoEXT + { + using NativeType = VkImportMetalBufferInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalBufferInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mtlBuffer{ mtlBuffer_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMetalBufferInfoEXT( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalBufferInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMetalBufferInfoEXT & operator=( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMetalBufferInfoEXT & operator=( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT + { + mtlBuffer = mtlBuffer_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalBufferInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMetalBufferInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mtlBuffer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalBufferInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlBuffer == rhs.mtlBuffer ); +# endif + } + + bool operator!=( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalBufferInfoEXT; + const void * pNext = {}; + MTLBuffer_id mtlBuffer = {}; + }; + + template <> + struct CppType + { + using Type = ImportMetalBufferInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkImportMetalIOSurfaceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalIOSurfaceInfoEXT.html + struct ImportMetalIOSurfaceInfoEXT + { + using NativeType = VkImportMetalIOSurfaceInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalIoSurfaceInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , ioSurface{ ioSurface_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMetalIOSurfaceInfoEXT( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalIOSurfaceInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMetalIOSurfaceInfoEXT & operator=( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMetalIOSurfaceInfoEXT & operator=( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT + { + ioSurface = ioSurface_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalIOSurfaceInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMetalIOSurfaceInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ioSurface ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalIOSurfaceInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ioSurface == rhs.ioSurface ); +# endif + } + + bool operator!=( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalIoSurfaceInfoEXT; + const void * pNext = {}; + IOSurfaceRef ioSurface = {}; + }; + + template <> + struct CppType + { + using Type = ImportMetalIOSurfaceInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkImportMetalSharedEventInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalSharedEventInfoEXT.html + struct ImportMetalSharedEventInfoEXT + { + using NativeType = VkImportMetalSharedEventInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalSharedEventInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mtlSharedEvent{ mtlSharedEvent_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMetalSharedEventInfoEXT( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalSharedEventInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMetalSharedEventInfoEXT & operator=( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMetalSharedEventInfoEXT & operator=( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT + { + mtlSharedEvent = mtlSharedEvent_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalSharedEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMetalSharedEventInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mtlSharedEvent ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalSharedEventInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlSharedEvent == rhs.mtlSharedEvent ); +# endif + } + + bool operator!=( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalSharedEventInfoEXT; + const void * pNext = {}; + MTLSharedEvent_id mtlSharedEvent = {}; + }; + + template <> + struct CppType + { + using Type = ImportMetalSharedEventInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkImportMetalTextureInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalTextureInfoEXT.html + struct ImportMetalTextureInfoEXT + { + using NativeType = VkImportMetalTextureInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalTextureInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + MTLTexture_id mtlTexture_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , plane{ plane_ } + , mtlTexture{ mtlTexture_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMetalTextureInfoEXT( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalTextureInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMetalTextureInfoEXT & operator=( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportMetalTextureInfoEXT & operator=( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT + { + plane = plane_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT + { + mtlTexture = mtlTexture_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalTextureInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportMetalTextureInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, plane, mtlTexture ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalTextureInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture ); +# endif + } + + bool operator!=( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalTextureInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + MTLTexture_id mtlTexture = {}; + }; + + template <> + struct CppType + { + using Type = ImportMetalTextureInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + // wrapper struct for struct VkImportScreenBufferInfoQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportScreenBufferInfoQNX.html + struct ImportScreenBufferInfoQNX + { + using NativeType = VkImportScreenBufferInfoQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportScreenBufferInfoQNX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( struct _screen_buffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportScreenBufferInfoQNX( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportScreenBufferInfoQNX( *reinterpret_cast( &rhs ) ) + { + } + + ImportScreenBufferInfoQNX & operator=( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportScreenBufferInfoQNX & operator=( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setBuffer( struct _screen_buffer * buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportScreenBufferInfoQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportScreenBufferInfoQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportScreenBufferInfoQNX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportScreenBufferInfoQNX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportScreenBufferInfoQNX const & ) const = default; +# else + bool operator==( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportScreenBufferInfoQNX; + const void * pNext = {}; + struct _screen_buffer * buffer = {}; + }; + + template <> + struct CppType + { + using Type = ImportScreenBufferInfoQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + // wrapper struct for struct VkImportSemaphoreFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreFdInfoKHR.html + struct ImportSemaphoreFdInfoKHR + { + using NativeType = VkImportSemaphoreFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + int fd_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , flags{ flags_ } + , handleType{ handleType_ } + , fd{ fd_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreFdInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportSemaphoreFdInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, flags, handleType, fd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default; +#else + bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( fd == rhs.fd ); +# endif + } + + bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + + template <> + struct CppType + { + using Type = ImportSemaphoreFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkImportSemaphoreWin32HandleInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreWin32HandleInfoKHR.html + struct ImportSemaphoreWin32HandleInfoKHR + { + using NativeType = VkImportSemaphoreWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , flags{ flags_ } + , handleType{ handleType_ } + , handle{ handle_ } + , name{ name_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportSemaphoreWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, flags, handleType, handle, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ImportSemaphoreWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkImportSemaphoreZirconHandleInfoFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreZirconHandleInfoFUCHSIA.html + struct ImportSemaphoreZirconHandleInfoFUCHSIA + { + using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + zx_handle_t zirconHandle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , flags{ flags_ } + , handleType{ handleType_ } + , zirconHandle{ zirconHandle_ } + { + } + + VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT + { + zirconHandle = zirconHandle_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, flags, handleType, zirconHandle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = semaphore <=> rhs.semaphore; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + zx_handle_t zirconHandle = {}; + }; + + template <> + struct CppType + { + using Type = ImportSemaphoreZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + // wrapper struct for struct VkIndirectCommandsExecutionSetTokenEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsExecutionSetTokenEXT.html + struct IndirectCommandsExecutionSetTokenEXT + { + using NativeType = VkIndirectCommandsExecutionSetTokenEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {} ) VULKAN_HPP_NOEXCEPT + : type{ type_ } + , shaderStages{ shaderStages_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsExecutionSetTokenEXT( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsExecutionSetTokenEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsExecutionSetTokenEXT & operator=( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsExecutionSetTokenEXT & operator=( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT + { + shaderStages = shaderStages_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsExecutionSetTokenEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsExecutionSetTokenEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsExecutionSetTokenEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectCommandsExecutionSetTokenEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, shaderStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsExecutionSetTokenEXT const & ) const = default; +#else + bool operator==( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( type == rhs.type ) && ( shaderStages == rhs.shaderStages ); +# endif + } + + bool operator!=( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {}; + }; + + // wrapper struct for struct VkIndirectCommandsIndexBufferTokenEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsIndexBufferTokenEXT.html + struct IndirectCommandsIndexBufferTokenEXT + { + using NativeType = VkIndirectCommandsIndexBufferTokenEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + IndirectCommandsIndexBufferTokenEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode_ = + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer ) VULKAN_HPP_NOEXCEPT + : mode{ mode_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsIndexBufferTokenEXT( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsIndexBufferTokenEXT( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsIndexBufferTokenEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsIndexBufferTokenEXT & operator=( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsIndexBufferTokenEXT & operator=( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsIndexBufferTokenEXT & + setMode( VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsIndexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsIndexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsIndexBufferTokenEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectCommandsIndexBufferTokenEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( mode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsIndexBufferTokenEXT const & ) const = default; +#else + bool operator==( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( mode == rhs.mode ); +# endif + } + + bool operator!=( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode = VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer; + }; + + // wrapper struct for struct VkPushConstantRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushConstantRange.html + struct PushConstantRange + { + using NativeType = VkPushConstantRange; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : stageFlags{ stageFlags_ } + , offset{ offset_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantRange( *reinterpret_cast( &rhs ) ) {} + + PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantRange const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPushConstantRange *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( stageFlags, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushConstantRange const & ) const = default; +#else + bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; + }; + + // wrapper struct for struct VkIndirectCommandsPushConstantTokenEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsPushConstantTokenEXT.html + struct IndirectCommandsPushConstantTokenEXT + { + using NativeType = VkIndirectCommandsPushConstantTokenEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( VULKAN_HPP_NAMESPACE::PushConstantRange updateRange_ = {} ) VULKAN_HPP_NOEXCEPT + : updateRange{ updateRange_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsPushConstantTokenEXT( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsPushConstantTokenEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsPushConstantTokenEXT & operator=( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsPushConstantTokenEXT & operator=( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsPushConstantTokenEXT & + setUpdateRange( VULKAN_HPP_NAMESPACE::PushConstantRange const & updateRange_ ) VULKAN_HPP_NOEXCEPT + { + updateRange = updateRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsPushConstantTokenEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsPushConstantTokenEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsPushConstantTokenEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectCommandsPushConstantTokenEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( updateRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsPushConstantTokenEXT const & ) const = default; +#else + bool operator==( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( updateRange == rhs.updateRange ); +# endif + } + + bool operator!=( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PushConstantRange updateRange = {}; + }; + + // wrapper struct for struct VkIndirectCommandsVertexBufferTokenEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsVertexBufferTokenEXT.html + struct IndirectCommandsVertexBufferTokenEXT + { + using NativeType = VkIndirectCommandsVertexBufferTokenEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( uint32_t vertexBindingUnit_ = {} ) VULKAN_HPP_NOEXCEPT : vertexBindingUnit{ vertexBindingUnit_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsVertexBufferTokenEXT( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsVertexBufferTokenEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsVertexBufferTokenEXT & operator=( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsVertexBufferTokenEXT & operator=( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsVertexBufferTokenEXT & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingUnit = vertexBindingUnit_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsVertexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsVertexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsVertexBufferTokenEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectCommandsVertexBufferTokenEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( vertexBindingUnit ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsVertexBufferTokenEXT const & ) const = default; +#else + bool operator==( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( vertexBindingUnit == rhs.vertexBindingUnit ); +# endif + } + + bool operator!=( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t vertexBindingUnit = {}; + }; + + union IndirectCommandsTokenDataEXT + { + using NativeType = VkIndirectCommandsTokenDataEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant_ = {} ) + : pPushConstant( pPushConstant_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) + : pVertexBuffer( pVertexBuffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) + : pIndexBuffer( pIndexBuffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) + : pExecutionSet( pExecutionSet_ ) + { + } +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPPushConstant( const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstant = pPushConstant_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPVertexBuffer( const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBuffer = pVertexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPIndexBuffer( const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pIndexBuffer = pIndexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPExecutionSet( const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) VULKAN_HPP_NOEXCEPT + { + pExecutionSet = pExecutionSet_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsTokenDataEXT const &() const + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsTokenDataEXT &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant; + const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer; + const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer; + const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet; +#else + const VkIndirectCommandsPushConstantTokenEXT * pPushConstant; + const VkIndirectCommandsVertexBufferTokenEXT * pVertexBuffer; + const VkIndirectCommandsIndexBufferTokenEXT * pIndexBuffer; + const VkIndirectCommandsExecutionSetTokenEXT * pExecutionSet; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkIndirectCommandsLayoutTokenEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutTokenEXT.html + struct IndirectCommandsLayoutTokenEXT + { + using NativeType = VkIndirectCommandsLayoutTokenEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT::eExecutionSet, + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT data_ = {}, + uint32_t offset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , data{ data_ } + , offset{ offset_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutTokenEXT( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutTokenEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsLayoutTokenEXT & operator=( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsLayoutTokenEXT & operator=( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setData( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsLayoutTokenEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, data, offset ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT::eExecutionSet; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT data = {}; + uint32_t offset = {}; + }; + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutTokenEXT; + }; + + // wrapper struct for struct VkIndirectCommandsLayoutCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutCreateInfoEXT.html + struct IndirectCommandsLayoutCreateInfoEXT + { + using NativeType = VkIndirectCommandsLayoutCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {}, + uint32_t indirectStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , shaderStages{ shaderStages_ } + , indirectStride{ indirectStride_ } + , pipelineLayout{ pipelineLayout_ } + , tokenCount{ tokenCount_ } + , pTokens{ pTokens_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutCreateInfoEXT( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoEXT( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_, + uint32_t indirectStride_, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , shaderStages( shaderStages_ ) + , indirectStride( indirectStride_ ) + , pipelineLayout( pipelineLayout_ ) + , tokenCount( static_cast( tokens_.size() ) ) + , pTokens( tokens_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectCommandsLayoutCreateInfoEXT & operator=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsLayoutCreateInfoEXT & operator=( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT + { + shaderStages = shaderStages_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setIndirectStride( uint32_t indirectStride_ ) VULKAN_HPP_NOEXCEPT + { + indirectStride = indirectStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLayout = pipelineLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = tokenCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & + setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens_ ) VULKAN_HPP_NOEXCEPT + { + pTokens = pTokens_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoEXT & + setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = static_cast( tokens_.size() ); + pTokens = tokens_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsLayoutCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, shaderStages, indirectStride, pipelineLayout, tokenCount, pTokens ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutCreateInfoEXT const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( shaderStages == rhs.shaderStages ) && + ( indirectStride == rhs.indirectStride ) && ( pipelineLayout == rhs.pipelineLayout ) && ( tokenCount == rhs.tokenCount ) && + ( pTokens == rhs.pTokens ); +# endif + } + + bool operator!=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {}; + uint32_t indirectStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens = {}; + }; + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutCreateInfoEXT; + }; + + // wrapper struct for struct VkIndirectCommandsLayoutTokenNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutTokenNV.html + struct IndirectCommandsLayoutTokenNV + { + using NativeType = VkIndirectCommandsLayoutTokenNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, + uint32_t stream_ = {}, + uint32_t offset_ = {}, + uint32_t vertexBindingUnit_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, + uint32_t pushconstantOffset_ = {}, + uint32_t pushconstantSize_ = {}, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, + uint32_t indexTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, + const uint32_t * pIndexTypeValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , tokenType{ tokenType_ } + , stream{ stream_ } + , offset{ offset_ } + , vertexBindingUnit{ vertexBindingUnit_ } + , vertexDynamicStride{ vertexDynamicStride_ } + , pushconstantPipelineLayout{ pushconstantPipelineLayout_ } + , pushconstantShaderStageFlags{ pushconstantShaderStageFlags_ } + , pushconstantOffset{ pushconstantOffset_ } + , pushconstantSize{ pushconstantSize_ } + , indirectStateFlags{ indirectStateFlags_ } + , indexTypeCount{ indexTypeCount_ } + , pIndexTypes{ pIndexTypes_ } + , pIndexTypeValues{ pIndexTypeValues_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutTokenNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, + uint32_t stream_, + uint32_t offset_, + uint32_t vertexBindingUnit_, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, + uint32_t pushconstantOffset_, + uint32_t pushconstantSize_, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , tokenType( tokenType_ ) + , stream( stream_ ) + , offset( offset_ ) + , vertexBindingUnit( vertexBindingUnit_ ) + , vertexDynamicStride( vertexDynamicStride_ ) + , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) + , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) + , pushconstantOffset( pushconstantOffset_ ) + , pushconstantSize( pushconstantSize_ ) + , indirectStateFlags( indirectStateFlags_ ) + , indexTypeCount( static_cast( indexTypes_.size() ) ) + , pIndexTypes( indexTypes_.data() ) + , pIndexTypeValues( indexTypeValues_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() ); +# else + if ( indexTypes_.size() != indexTypeValues_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT + { + tokenType = tokenType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT + { + stream = stream_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingUnit = vertexBindingUnit_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexDynamicStride = vertexDynamicStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantPipelineLayout = pushconstantPipelineLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantShaderStageFlags = pushconstantShaderStageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantOffset = pushconstantOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantSize = pushconstantSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT + { + indirectStateFlags = indirectStateFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = indexTypeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypes = pIndexTypes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & + setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypes_.size() ); + pIndexTypes = indexTypes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypeValues = pIndexTypeValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & + setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypeValues_.size() ); + pIndexTypeValues = indexTypeValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + tokenType, + stream, + offset, + vertexBindingUnit, + vertexDynamicStride, + pushconstantPipelineLayout, + pushconstantShaderStageFlags, + pushconstantOffset, + pushconstantSize, + indirectStateFlags, + indexTypeCount, + pIndexTypes, + pIndexTypeValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && ( stream == rhs.stream ) && ( offset == rhs.offset ) && + ( vertexBindingUnit == rhs.vertexBindingUnit ) && ( vertexDynamicStride == rhs.vertexDynamicStride ) && + ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) && + ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) && + ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && ( pIndexTypes == rhs.pIndexTypes ) && + ( pIndexTypeValues == rhs.pIndexTypeValues ); +# endif + } + + bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; + uint32_t stream = {}; + uint32_t offset = {}; + uint32_t vertexBindingUnit = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; + uint32_t pushconstantOffset = {}; + uint32_t pushconstantSize = {}; + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; + uint32_t indexTypeCount = {}; + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {}; + const uint32_t * pIndexTypeValues = {}; + }; + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutTokenNV; + }; + + // wrapper struct for struct VkIndirectCommandsLayoutCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutCreateInfoNV.html + struct IndirectCommandsLayoutCreateInfoNV + { + using NativeType = VkIndirectCommandsLayoutCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, + uint32_t streamCount_ = {}, + const uint32_t * pStreamStrides_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , tokenCount{ tokenCount_ } + , pTokens{ pTokens_ } + , streamCount{ streamCount_ } + , pStreamStrides{ pStreamStrides_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , tokenCount( static_cast( tokens_.size() ) ) + , pTokens( tokens_.data() ) + , streamCount( static_cast( streamStrides_.size() ) ) + , pStreamStrides( streamStrides_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = tokenCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT + { + pTokens = pTokens_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & + setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = static_cast( tokens_.size() ); + pTokens = tokens_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT + { + pStreamStrides = pStreamStrides_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & + setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streamStrides_.size() ); + pStreamStrides = streamStrides_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( tokenCount == rhs.tokenCount ) && ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && ( pStreamStrides == rhs.pStreamStrides ); +# endif + } + + bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {}; + uint32_t streamCount = {}; + const uint32_t * pStreamStrides = {}; + }; + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutCreateInfoNV; + }; + + // wrapper struct for struct VkIndirectExecutionSetPipelineInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetPipelineInfoEXT.html + struct IndirectExecutionSetPipelineInfoEXT + { + using NativeType = VkIndirectExecutionSetPipelineInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetPipelineInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ = {}, + uint32_t maxPipelineCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , initialPipeline{ initialPipeline_ } + , maxPipelineCount{ maxPipelineCount_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ ) VULKAN_HPP_NOEXCEPT + { + initialPipeline = initialPipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineCount = maxPipelineCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetPipelineInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetPipelineInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, initialPipeline, maxPipelineCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default; +#else + bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( initialPipeline == rhs.initialPipeline ) && ( maxPipelineCount == rhs.maxPipelineCount ); +# endif + } + + bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetPipelineInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline initialPipeline = {}; + uint32_t maxPipelineCount = {}; + }; + + template <> + struct CppType + { + using Type = IndirectExecutionSetPipelineInfoEXT; + }; + + // wrapper struct for struct VkIndirectExecutionSetShaderLayoutInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetShaderLayoutInfoEXT.html + struct IndirectExecutionSetShaderLayoutInfoEXT + { + using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , setLayoutCount{ setLayoutCount_ } + , pSetLayouts{ pSetLayouts_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderLayoutInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), setLayoutCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderLayoutInfoEXT & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetShaderLayoutInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetShaderLayoutInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, setLayoutCount, pSetLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default; +#else + bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ); +# endif + } + + bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; + const void * pNext = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + }; + + template <> + struct CppType + { + using Type = IndirectExecutionSetShaderLayoutInfoEXT; + }; + + // wrapper struct for struct VkIndirectExecutionSetShaderInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetShaderInfoEXT.html + struct IndirectExecutionSetShaderInfoEXT + { + using NativeType = VkIndirectExecutionSetShaderInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( uint32_t shaderCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ = {}, + uint32_t maxShaderCount_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCount{ shaderCount_ } + , pInitialShaders{ pInitialShaders_ } + , pSetLayoutInfos{ pSetLayoutInfos_ } + , maxShaderCount{ maxShaderCount_ } + , pushConstantRangeCount{ pushConstantRangeCount_ } + , pPushConstantRanges{ pPushConstantRanges_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetShaderInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ = {}, + uint32_t maxShaderCount_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , shaderCount( static_cast( initialShaders_.size() ) ) + , pInitialShaders( initialShaders_.data() ) + , pSetLayoutInfos( setLayoutInfos_.data() ) + , maxShaderCount( maxShaderCount_ ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) ); +# else + if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT + { + shaderCount = shaderCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & + setPInitialShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ ) VULKAN_HPP_NOEXCEPT + { + pInitialShaders = pInitialShaders_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT & + setInitialShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_ ) VULKAN_HPP_NOEXCEPT + { + shaderCount = static_cast( initialShaders_.size() ); + pInitialShaders = initialShaders_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & + setPSetLayoutInfos( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayoutInfos = pSetLayoutInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT & setSetLayoutInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ ) + VULKAN_HPP_NOEXCEPT + { + shaderCount = static_cast( setLayoutInfos_.size() ); + pSetLayoutInfos = setLayoutInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) VULKAN_HPP_NOEXCEPT + { + maxShaderCount = maxShaderCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetShaderInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetShaderInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default; +#else + bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pInitialShaders == rhs.pInitialShaders ) && + ( pSetLayoutInfos == rhs.pSetLayoutInfos ) && ( maxShaderCount == rhs.maxShaderCount ) && + ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && ( pPushConstantRanges == rhs.pPushConstantRanges ); +# endif + } + + bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderInfoEXT; + const void * pNext = {}; + uint32_t shaderCount = {}; + const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders = {}; + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos = {}; + uint32_t maxShaderCount = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + }; + + template <> + struct CppType + { + using Type = IndirectExecutionSetShaderInfoEXT; + }; + + union IndirectExecutionSetInfoEXT + { + using NativeType = VkIndirectExecutionSetInfoEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} ) + : pPipelineInfo( pPipelineInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) + : pShaderInfo( pShaderInfo_ ) + { + } +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & + setPPipelineInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineInfo = pPipelineInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & + setPShaderInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) VULKAN_HPP_NOEXCEPT + { + pShaderInfo = pShaderInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectExecutionSetInfoEXT const &() const + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetInfoEXT &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo; + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo; +#else + const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo; + const VkIndirectExecutionSetShaderInfoEXT * pShaderInfo; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkIndirectExecutionSetCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetCreateInfoEXT.html + struct IndirectExecutionSetCreateInfoEXT + { + using NativeType = VkIndirectExecutionSetCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , info{ info_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT const & info_ ) VULKAN_HPP_NOEXCEPT + { + info = info_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, info ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info = {}; + }; + + template <> + struct CppType + { + using Type = IndirectExecutionSetCreateInfoEXT; + }; + + // wrapper struct for struct VkInitializePerformanceApiInfoINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkInitializePerformanceApiInfoINTEL.html + struct InitializePerformanceApiInfoINTEL + { + using NativeType = VkInitializePerformanceApiInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pUserData{ pUserData_ } + { + } + + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : InitializePerformanceApiInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInitializePerformanceApiInfoINTEL const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkInitializePerformanceApiInfoINTEL *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pUserData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default; +#else + bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData ); +# endif + } + + bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; + const void * pNext = {}; + void * pUserData = {}; + }; + + template <> + struct CppType + { + using Type = InitializePerformanceApiInfoINTEL; + }; + + // wrapper struct for struct VkInputAttachmentAspectReference, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkInputAttachmentAspectReference.html + struct InputAttachmentAspectReference + { + using NativeType = VkInputAttachmentAspectReference; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {}, + uint32_t inputAttachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + : subpass{ subpass_ } + , inputAttachmentIndex{ inputAttachmentIndex_ } + , aspectMask{ aspectMask_ } + { + } + + VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + : InputAttachmentAspectReference( *reinterpret_cast( &rhs ) ) + { + } + + InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentIndex = inputAttachmentIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInputAttachmentAspectReference const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkInputAttachmentAspectReference *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subpass, inputAttachmentIndex, aspectMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InputAttachmentAspectReference const & ) const = default; +#else + bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && ( aspectMask == rhs.aspectMask ); +# endif + } + + bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpass = {}; + uint32_t inputAttachmentIndex = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + + // wrapper struct for struct VkInstanceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInstanceCreateInfo.html + struct InstanceCreateInfo + { + using NativeType = VkInstanceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pApplicationInfo{ pApplicationInfo_ } + , enabledLayerCount{ enabledLayerCount_ } + , ppEnabledLayerNames{ ppEnabledLayerNames_ } + , enabledExtensionCount{ enabledExtensionCount_ } + , ppEnabledExtensionNames{ ppEnabledExtensionNames_ } + { + } + + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : InstanceCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationInfo = pApplicationInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & + setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & + setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInstanceCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkInstanceCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 ) + return cmp; + if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledLayerCount; ++i ) + { + if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) + if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledExtensionCount; ++i ) + { + if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) + if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pApplicationInfo == rhs.pApplicationInfo ) && + ( enabledLayerCount == rhs.enabledLayerCount ) && + std::equal( ppEnabledLayerNames, + ppEnabledLayerNames + enabledLayerCount, + rhs.ppEnabledLayerNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + std::equal( ppEnabledExtensionNames, + ppEnabledExtensionNames + enabledExtensionCount, + rhs.ppEnabledExtensionNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ); + } + + bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + }; + + template <> + struct CppType + { + using Type = InstanceCreateInfo; + }; + + // wrapper struct for struct VkLatencySleepInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySleepInfoNV.html + struct LatencySleepInfoNV + { + using NativeType = VkLatencySleepInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + LatencySleepInfoNV( VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , signalSemaphore{ signalSemaphore_ } + , value{ value_ } + { + } + + VULKAN_HPP_CONSTEXPR LatencySleepInfoNV( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySleepInfoNV( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : LatencySleepInfoNV( *reinterpret_cast( &rhs ) ) {} + + LatencySleepInfoNV & operator=( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + LatencySleepInfoNV & operator=( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setSignalSemaphore( VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphore = signalSemaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkLatencySleepInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySleepInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySleepInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkLatencySleepInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, signalSemaphore, value ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySleepInfoNV const & ) const = default; +#else + bool operator==( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( signalSemaphore == rhs.signalSemaphore ) && ( value == rhs.value ); +# endif + } + + bool operator!=( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySleepInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore = {}; + uint64_t value = {}; + }; + + template <> + struct CppType + { + using Type = LatencySleepInfoNV; + }; + + // wrapper struct for struct VkLatencySleepModeInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySleepModeInfoNV.html + struct LatencySleepModeInfoNV + { + using NativeType = VkLatencySleepModeInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepModeInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ = {}, + uint32_t minimumIntervalUs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , lowLatencyMode{ lowLatencyMode_ } + , lowLatencyBoost{ lowLatencyBoost_ } + , minimumIntervalUs{ minimumIntervalUs_ } + { + } + + VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySleepModeInfoNV( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySleepModeInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + LatencySleepModeInfoNV & operator=( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + LatencySleepModeInfoNV & operator=( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyMode( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode_ ) VULKAN_HPP_NOEXCEPT + { + lowLatencyMode = lowLatencyMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyBoost( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ ) VULKAN_HPP_NOEXCEPT + { + lowLatencyBoost = lowLatencyBoost_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setMinimumIntervalUs( uint32_t minimumIntervalUs_ ) VULKAN_HPP_NOEXCEPT + { + minimumIntervalUs = minimumIntervalUs_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkLatencySleepModeInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySleepModeInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySleepModeInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkLatencySleepModeInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, lowLatencyMode, lowLatencyBoost, minimumIntervalUs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySleepModeInfoNV const & ) const = default; +#else + bool operator==( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lowLatencyMode == rhs.lowLatencyMode ) && ( lowLatencyBoost == rhs.lowLatencyBoost ) && + ( minimumIntervalUs == rhs.minimumIntervalUs ); +# endif + } + + bool operator!=( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySleepModeInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost = {}; + uint32_t minimumIntervalUs = {}; + }; + + template <> + struct CppType + { + using Type = LatencySleepModeInfoNV; + }; + + // wrapper struct for struct VkLatencySubmissionPresentIdNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySubmissionPresentIdNV.html + struct LatencySubmissionPresentIdNV + { + using NativeType = VkLatencySubmissionPresentIdNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySubmissionPresentIdNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( uint64_t presentID_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentID{ presentID_ } + { + } + + VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySubmissionPresentIdNV( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySubmissionPresentIdNV( *reinterpret_cast( &rhs ) ) + { + } + + LatencySubmissionPresentIdNV & operator=( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + LatencySubmissionPresentIdNV & operator=( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkLatencySubmissionPresentIdNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySubmissionPresentIdNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySubmissionPresentIdNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkLatencySubmissionPresentIdNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentID ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySubmissionPresentIdNV const & ) const = default; +#else + bool operator==( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ); +# endif + } + + bool operator!=( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySubmissionPresentIdNV; + const void * pNext = {}; + uint64_t presentID = {}; + }; + + template <> + struct CppType + { + using Type = LatencySubmissionPresentIdNV; + }; + + // wrapper struct for struct VkLatencySurfaceCapabilitiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySurfaceCapabilitiesNV.html + struct LatencySurfaceCapabilitiesNV + { + using NativeType = VkLatencySurfaceCapabilitiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySurfaceCapabilitiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( uint32_t presentModeCount_ = {}, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentModeCount{ presentModeCount_ } + , pPresentModes{ pPresentModes_ } + { + } + + VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySurfaceCapabilitiesNV( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySurfaceCapabilitiesNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LatencySurfaceCapabilitiesNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + LatencySurfaceCapabilitiesNV & operator=( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + LatencySurfaceCapabilitiesNV & operator=( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = presentModeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPPresentModes( VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT + { + pPresentModes = pPresentModes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LatencySurfaceCapabilitiesNV & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkLatencySurfaceCapabilitiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySurfaceCapabilitiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySurfaceCapabilitiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkLatencySurfaceCapabilitiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentModeCount, pPresentModes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySurfaceCapabilitiesNV const & ) const = default; +#else + bool operator==( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); +# endif + } + + bool operator!=( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySurfaceCapabilitiesNV; + const void * pNext = {}; + uint32_t presentModeCount = {}; + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; + }; + + template <> + struct CppType + { + using Type = LatencySurfaceCapabilitiesNV; + }; + + // wrapper struct for struct VkLayerProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerProperties.html + struct LayerProperties + { + using NativeType = VkLayerProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array const & layerName_ = {}, + uint32_t specVersion_ = {}, + uint32_t implementationVersion_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT + : layerName{ layerName_ } + , specVersion{ specVersion_ } + , implementationVersion{ implementationVersion_ } + , description{ description_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast( &rhs ) ) {} + + LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkLayerProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( layerName, specVersion, implementationVersion, description ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = strcmp( layerName, rhs.layerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) + return cmp; + if ( auto cmp = implementationVersion <=> rhs.implementationVersion; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( strcmp( layerName, rhs.layerName ) == 0 ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && + ( strcmp( description, rhs.description ) == 0 ); + } + + bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; + uint32_t specVersion = {}; + uint32_t implementationVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + + // wrapper struct for struct VkLayerSettingEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerSettingEXT.html + struct LayerSettingEXT + { + using NativeType = VkLayerSettingEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LayerSettingEXT( const char * pLayerName_ = {}, + const char * pSettingName_ = {}, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_ = VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT::eBool32, + uint32_t valueCount_ = {}, + const void * pValues_ = {} ) VULKAN_HPP_NOEXCEPT + : pLayerName{ pLayerName_ } + , pSettingName{ pSettingName_ } + , type{ type_ } + , valueCount{ valueCount_ } + , pValues{ pValues_ } + { + } + + VULKAN_HPP_CONSTEXPR LayerSettingEXT( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerSettingEXT( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT : LayerSettingEXT( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + // NOTE: you need to provide the type because VULKAN_HPP_NAMESPACE::Bool32 and uint32_t are indistinguishable! + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + LayerSettingEXT & operator=( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + LayerSettingEXT & operator=( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPLayerName( const char * pLayerName_ ) VULKAN_HPP_NOEXCEPT + { + pLayerName = pLayerName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPSettingName( const char * pSettingName_ ) VULKAN_HPP_NOEXCEPT + { + pSettingName = pSettingName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setType( VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setValueCount( uint32_t valueCount_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = valueCount_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkLayerSettingEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerSettingEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerSettingEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkLayerSettingEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pLayerName, pSettingName, type, valueCount, pValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( pLayerName != rhs.pLayerName ) + if ( auto cmp = strcmp( pLayerName, rhs.pLayerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( pSettingName != rhs.pSettingName ) + if ( auto cmp = strcmp( pSettingName, rhs.pSettingName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = type <=> rhs.type; cmp != 0 ) + return cmp; + if ( auto cmp = valueCount <=> rhs.valueCount; cmp != 0 ) + return cmp; + if ( auto cmp = pValues <=> rhs.pValues; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ( pLayerName == rhs.pLayerName ) || ( strcmp( pLayerName, rhs.pLayerName ) == 0 ) ) && + ( ( pSettingName == rhs.pSettingName ) || ( strcmp( pSettingName, rhs.pSettingName ) == 0 ) ) && ( type == rhs.type ) && + ( valueCount == rhs.valueCount ) && ( pValues == rhs.pValues ); + } + + bool operator!=( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + const char * pLayerName = {}; + const char * pSettingName = {}; + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type = VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT::eBool32; + uint32_t valueCount = {}; + const void * pValues = {}; + }; + + // wrapper struct for struct VkLayerSettingsCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerSettingsCreateInfoEXT.html + struct LayerSettingsCreateInfoEXT + { + using NativeType = VkLayerSettingsCreateInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLayerSettingsCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( uint32_t settingCount_ = {}, + const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , settingCount{ settingCount_ } + , pSettings{ pSettings_ } + { + } + + VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerSettingsCreateInfoEXT( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : LayerSettingsCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerSettingsCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & settings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), settingCount( static_cast( settings_.size() ) ), pSettings( settings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + LayerSettingsCreateInfoEXT & operator=( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + LayerSettingsCreateInfoEXT & operator=( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setSettingCount( uint32_t settingCount_ ) VULKAN_HPP_NOEXCEPT + { + settingCount = settingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPSettings( const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings_ ) VULKAN_HPP_NOEXCEPT + { + pSettings = pSettings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerSettingsCreateInfoEXT & + setSettings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & settings_ ) VULKAN_HPP_NOEXCEPT + { + settingCount = static_cast( settings_.size() ); + pSettings = settings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkLayerSettingsCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerSettingsCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerSettingsCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkLayerSettingsCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, settingCount, pSettings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LayerSettingsCreateInfoEXT const & ) const = default; +#else + bool operator==( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( settingCount == rhs.settingCount ) && ( pSettings == rhs.pSettings ); +# endif + } + + bool operator!=( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLayerSettingsCreateInfoEXT; + const void * pNext = {}; + uint32_t settingCount = {}; + const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings = {}; + }; + + template <> + struct CppType + { + using Type = LayerSettingsCreateInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + // wrapper struct for struct VkMacOSSurfaceCreateInfoMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMacOSSurfaceCreateInfoMVK.html + struct MacOSSurfaceCreateInfoMVK + { + using NativeType = VkMacOSSurfaceCreateInfoMVK; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pView{ pView_ } + { + } + + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : MacOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + { + } + + MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMacOSSurfaceCreateInfoMVK const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMacOSSurfaceCreateInfoMVK *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pView ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); +# endif + } + + bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; + }; + + template <> + struct CppType + { + using Type = MacOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + // wrapper struct for struct VkMappedMemoryRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMappedMemoryRange.html + struct MappedMemoryRange + { + using NativeType = VkMappedMemoryRange; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , offset{ offset_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT : MappedMemoryRange( *reinterpret_cast( &rhs ) ) {} + + MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMappedMemoryRange const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMappedMemoryRange *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MappedMemoryRange const & ) const = default; +#else + bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = MappedMemoryRange; + }; + + // wrapper struct for struct VkMemoryAllocateFlagsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryAllocateFlagsInfo.html + struct MemoryAllocateFlagsInfo + { + using NativeType = VkMemoryAllocateFlagsInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, + uint32_t deviceMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , deviceMask{ deviceMask_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryAllocateFlagsInfo( *reinterpret_cast( &rhs ) ) + { + } + + MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateFlagsInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryAllocateFlagsInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default; +#else + bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; + uint32_t deviceMask = {}; + }; + + template <> + struct CppType + { + using Type = MemoryAllocateFlagsInfo; + }; + + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + + // wrapper struct for struct VkMemoryAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryAllocateInfo.html + struct MemoryAllocateInfo + { + using NativeType = VkMemoryAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , allocationSize{ allocationSize_ } + , memoryTypeIndex{ memoryTypeIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryAllocateInfo( *reinterpret_cast( &rhs ) ) {} + + MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT + { + allocationSize = allocationSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryTypeIndex = memoryTypeIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryAllocateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allocationSize, memoryTypeIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateInfo const & ) const = default; +#else + bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeIndex == rhs.memoryTypeIndex ); +# endif + } + + bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeIndex = {}; + }; + + template <> + struct CppType + { + using Type = MemoryAllocateInfo; + }; + + // wrapper struct for struct VkMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrier.html + struct MemoryBarrier + { + using NativeType = VkMemoryBarrier; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier( *reinterpret_cast( &rhs ) ) {} + + MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryBarrier *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcAccessMask, dstAccessMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier const & ) const = default; +#else + bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ); +# endif + } + + bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + }; + + template <> + struct CppType + { + using Type = MemoryBarrier; + }; + + // wrapper struct for struct VkMemoryBarrierAccessFlags3KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrierAccessFlags3KHR.html + struct MemoryBarrierAccessFlags3KHR + { + using NativeType = VkMemoryBarrierAccessFlags3KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrierAccessFlags3KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrierAccessFlags3KHR( VULKAN_HPP_NAMESPACE::AccessFlags3KHR srcAccessMask3_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags3KHR dstAccessMask3_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcAccessMask3{ srcAccessMask3_ } + , dstAccessMask3{ dstAccessMask3_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryBarrierAccessFlags3KHR( MemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrierAccessFlags3KHR( VkMemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryBarrierAccessFlags3KHR( *reinterpret_cast( &rhs ) ) + { + } + + MemoryBarrierAccessFlags3KHR & operator=( MemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryBarrierAccessFlags3KHR & operator=( VkMemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setSrcAccessMask3( VULKAN_HPP_NAMESPACE::AccessFlags3KHR srcAccessMask3_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask3 = srcAccessMask3_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setDstAccessMask3( VULKAN_HPP_NAMESPACE::AccessFlags3KHR dstAccessMask3_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask3 = dstAccessMask3_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryBarrierAccessFlags3KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrierAccessFlags3KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrierAccessFlags3KHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryBarrierAccessFlags3KHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcAccessMask3, dstAccessMask3 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrierAccessFlags3KHR const & ) const = default; +#else + bool operator==( MemoryBarrierAccessFlags3KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask3 == rhs.srcAccessMask3 ) && ( dstAccessMask3 == rhs.dstAccessMask3 ); +# endif + } + + bool operator!=( MemoryBarrierAccessFlags3KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrierAccessFlags3KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags3KHR srcAccessMask3 = {}; + VULKAN_HPP_NAMESPACE::AccessFlags3KHR dstAccessMask3 = {}; + }; + + template <> + struct CppType + { + using Type = MemoryBarrierAccessFlags3KHR; + }; + + // wrapper struct for struct VkMemoryDedicatedAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedAllocateInfo.html + struct MemoryDedicatedAllocateInfo + { + using NativeType = VkMemoryDedicatedAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , buffer{ buffer_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + + MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedAllocateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryDedicatedAllocateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default; +#else + bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = MemoryDedicatedAllocateInfo; + }; + + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + + // wrapper struct for struct VkMemoryDedicatedRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedRequirements.html + struct MemoryDedicatedRequirements + { + using NativeType = VkMemoryDedicatedRequirements; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , prefersDedicatedAllocation{ prefersDedicatedAllocation_ } + , requiresDedicatedAllocation{ requiresDedicatedAllocation_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedRequirements( *reinterpret_cast( &rhs ) ) + { + } + + MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedRequirements const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryDedicatedRequirements *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedRequirements const & ) const = default; +#else + bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) && + ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); +# endif + } + + bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; + }; + + template <> + struct CppType + { + using Type = MemoryDedicatedRequirements; + }; + + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + + // wrapper struct for struct VkMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryFdPropertiesKHR.html + struct MemoryFdPropertiesKHR + { + using NativeType = VkMemoryFdPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryFdPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryFdPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryFdPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryFdPropertiesKHR const & ) const = default; +#else + bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryFdPropertiesKHR; + }; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkMemoryGetAndroidHardwareBufferInfoANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetAndroidHardwareBufferInfoANDROID.html + struct MemoryGetAndroidHardwareBufferInfoANDROID + { + using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); +# endif + } + + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + + template <> + struct CppType + { + using Type = MemoryGetAndroidHardwareBufferInfoANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + // wrapper struct for struct VkMemoryGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetFdInfoKHR.html + struct MemoryGetFdInfoKHR + { + using NativeType = VkMemoryGetFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetFdInfoKHR( *reinterpret_cast( &rhs ) ) {} + + MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetFdInfoKHR const & ) const = default; +#else + bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkMemoryGetMetalHandleInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetMetalHandleInfoEXT.html + struct MemoryGetMetalHandleInfoEXT + { + using NativeType = VkMemoryGetMetalHandleInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetMetalHandleInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetMetalHandleInfoEXT( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetMetalHandleInfoEXT( MemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetMetalHandleInfoEXT( VkMemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetMetalHandleInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetMetalHandleInfoEXT & operator=( MemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetMetalHandleInfoEXT & operator=( VkMemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryGetMetalHandleInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetMetalHandleInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetMetalHandleInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryGetMetalHandleInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetMetalHandleInfoEXT const & ) const = default; +# else + bool operator==( MemoryGetMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetMetalHandleInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetMetalHandleInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + // wrapper struct for struct VkMemoryGetRemoteAddressInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetRemoteAddressInfoNV.html + struct MemoryGetRemoteAddressInfoNV + { + using NativeType = VkMemoryGetRemoteAddressInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetRemoteAddressInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetRemoteAddressInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetRemoteAddressInfoNV & operator=( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetRemoteAddressInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryGetRemoteAddressInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetRemoteAddressInfoNV const & ) const = default; +#else + bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetRemoteAddressInfoNV; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkMemoryGetWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetWin32HandleInfoKHR.html + struct MemoryGetWin32HandleInfoKHR + { + using NativeType = VkMemoryGetWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkMemoryGetZirconHandleInfoFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetZirconHandleInfoFUCHSIA.html + struct MemoryGetZirconHandleInfoFUCHSIA + { + using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetZirconHandleInfoFUCHSIA & operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryGetZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + // wrapper struct for struct VkMemoryHeap, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryHeap.html + struct MemoryHeap + { + using NativeType = VkMemoryHeap; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : size{ size_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryHeap( *reinterpret_cast( &rhs ) ) {} + + MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHeap const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryHeap *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( size, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHeap const & ) const = default; +#else + bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( size == rhs.size ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; + }; + + // wrapper struct for struct VkMemoryHostPointerPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryHostPointerPropertiesEXT.html + struct MemoryHostPointerPropertiesEXT + { + using NativeType = VkMemoryHostPointerPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryHostPointerPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHostPointerPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryHostPointerPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default; +#else + bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryHostPointerPropertiesEXT; + }; + + // wrapper struct for struct VkMemoryMapInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMapInfo.html + struct MemoryMapInfo + { + using NativeType = VkMemoryMapInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryMapInfo( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , memory{ memory_ } + , offset{ offset_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryMapInfo( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryMapInfo( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryMapInfo( *reinterpret_cast( &rhs ) ) {} + + MemoryMapInfo & operator=( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryMapInfo & operator=( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryMapInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMapInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMapInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryMapInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, memory, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryMapInfo const & ) const = default; +#else + bool operator==( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && + ( size == rhs.size ); +# endif + } + + bool operator!=( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMapInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = MemoryMapInfo; + }; + + using MemoryMapInfoKHR = MemoryMapInfo; + + // wrapper struct for struct VkMemoryMapPlacedInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMapPlacedInfoEXT.html + struct MemoryMapPlacedInfoEXT + { + using NativeType = VkMemoryMapPlacedInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapPlacedInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( void * pPlacedAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pPlacedAddress{ pPlacedAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryMapPlacedInfoEXT( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryMapPlacedInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryMapPlacedInfoEXT & operator=( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryMapPlacedInfoEXT & operator=( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPPlacedAddress( void * pPlacedAddress_ ) VULKAN_HPP_NOEXCEPT + { + pPlacedAddress = pPlacedAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryMapPlacedInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMapPlacedInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMapPlacedInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryMapPlacedInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pPlacedAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryMapPlacedInfoEXT const & ) const = default; +#else + bool operator==( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPlacedAddress == rhs.pPlacedAddress ); +# endif + } + + bool operator!=( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMapPlacedInfoEXT; + const void * pNext = {}; + void * pPlacedAddress = {}; + }; + + template <> + struct CppType + { + using Type = MemoryMapPlacedInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkMemoryMetalHandlePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMetalHandlePropertiesEXT.html + struct MemoryMetalHandlePropertiesEXT + { + using NativeType = VkMemoryMetalHandlePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMetalHandlePropertiesEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryMetalHandlePropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryMetalHandlePropertiesEXT( MemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryMetalHandlePropertiesEXT( VkMemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryMetalHandlePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryMetalHandlePropertiesEXT & operator=( MemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryMetalHandlePropertiesEXT & operator=( VkMemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryMetalHandlePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMetalHandlePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMetalHandlePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryMetalHandlePropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryMetalHandlePropertiesEXT const & ) const = default; +# else + bool operator==( MemoryMetalHandlePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryMetalHandlePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMetalHandlePropertiesEXT; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryMetalHandlePropertiesEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + // wrapper struct for struct VkMemoryOpaqueCaptureAddressAllocateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryOpaqueCaptureAddressAllocateInfo.html + struct MemoryOpaqueCaptureAddressAllocateInfo + { + using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , opaqueCaptureAddress{ opaqueCaptureAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + + MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureAddress = opaqueCaptureAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryOpaqueCaptureAddressAllocateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryOpaqueCaptureAddressAllocateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opaqueCaptureAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default; +#else + bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); +# endif + } + + bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; + }; + + template <> + struct CppType + { + using Type = MemoryOpaqueCaptureAddressAllocateInfo; + }; + + using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; + + // wrapper struct for struct VkMemoryPriorityAllocateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryPriorityAllocateInfoEXT.html + struct MemoryPriorityAllocateInfoEXT + { + using NativeType = VkMemoryPriorityAllocateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , priority{ priority_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryPriorityAllocateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT + { + priority = priority_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryPriorityAllocateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryPriorityAllocateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, priority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default; +#else + bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority ); +# endif + } + + bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; + const void * pNext = {}; + float priority = {}; + }; + + template <> + struct CppType + { + using Type = MemoryPriorityAllocateInfoEXT; + }; + + // wrapper struct for struct VkMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryRequirements.html + struct MemoryRequirements + { + using NativeType = VkMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : size{ size_ } + , alignment{ alignment_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements( *reinterpret_cast( &rhs ) ) {} + + MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryRequirements *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( size, alignment, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements const & ) const = default; +#else + bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + uint32_t memoryTypeBits = {}; + }; + + // wrapper struct for struct VkMemoryRequirements2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryRequirements2.html + struct MemoryRequirements2 + { + using NativeType = VkMemoryRequirements2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryRequirements{ memoryRequirements_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements2( *reinterpret_cast( &rhs ) ) + { + } + + MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryRequirements2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements2 const & ) const = default; +#else + bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); +# endif + } + + bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + }; + + template <> + struct CppType + { + using Type = MemoryRequirements2; + }; + + using MemoryRequirements2KHR = MemoryRequirements2; + + // wrapper struct for struct VkMemoryType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryType.html + struct MemoryType + { + using NativeType = VkMemoryType; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : propertyFlags{ propertyFlags_ } + , heapIndex{ heapIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryType( *reinterpret_cast( &rhs ) ) {} + + MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryType &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryType const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryType *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( propertyFlags, heapIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryType const & ) const = default; +#else + bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex ); +# endif + } + + bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; + uint32_t heapIndex = {}; + }; + + // wrapper struct for struct VkMemoryUnmapInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryUnmapInfo.html + struct MemoryUnmapInfo + { + using NativeType = VkMemoryUnmapInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryUnmapInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , memory{ memory_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryUnmapInfo( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryUnmapInfo( *reinterpret_cast( &rhs ) ) {} + + MemoryUnmapInfo & operator=( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryUnmapInfo & operator=( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryUnmapInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryUnmapInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryUnmapInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryUnmapInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, memory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryUnmapInfo const & ) const = default; +#else + bool operator==( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memory == rhs.memory ); +# endif + } + + bool operator!=( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryUnmapInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + + template <> + struct CppType + { + using Type = MemoryUnmapInfo; + }; + + using MemoryUnmapInfoKHR = MemoryUnmapInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkMemoryWin32HandlePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryWin32HandlePropertiesKHR.html + struct MemoryWin32HandlePropertiesKHR + { + using NativeType = VkMemoryWin32HandlePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryWin32HandlePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryWin32HandlePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryWin32HandlePropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default; +# else + bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryWin32HandlePropertiesKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkMemoryZirconHandlePropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryZirconHandlePropertiesFUCHSIA.html + struct MemoryZirconHandlePropertiesFUCHSIA + { + using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + MemoryZirconHandlePropertiesFUCHSIA & operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryZirconHandlePropertiesFUCHSIA & operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryZirconHandlePropertiesFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMemoryZirconHandlePropertiesFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default; +# else + bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryZirconHandlePropertiesFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + // wrapper struct for struct VkMetalSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMetalSurfaceCreateInfoEXT.html + struct MetalSurfaceCreateInfoEXT + { + using NativeType = VkMetalSurfaceCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, + const CAMetalLayer * pLayer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pLayer{ pLayer_ } + { + } + + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MetalSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT + { + pLayer = pLayer_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMetalSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMetalSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pLayer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer ); +# endif + } + + bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; + const CAMetalLayer * pLayer = {}; + }; + + template <> + struct CppType + { + using Type = MetalSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + // wrapper struct for struct VkMicromapBuildInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapBuildInfoEXT.html + struct MicromapBuildInfoEXT + { + using NativeType = VkMicromapBuildInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, + VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild, + VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ = {}, + uint32_t usageCountsCount_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , flags{ flags_ } + , mode{ mode_ } + , dstMicromap{ dstMicromap_ } + , usageCountsCount{ usageCountsCount_ } + , pUsageCounts{ pUsageCounts_ } + , ppUsageCounts{ ppUsageCounts_ } + , data{ data_ } + , scratchData{ scratchData_ } + , triangleArray{ triangleArray_ } + , triangleArrayStride{ triangleArrayStride_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapBuildInfoEXT( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapBuildInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_, + VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_, + VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , dstMicromap( dstMicromap_ ) + , usageCountsCount( static_cast( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ) + , pUsageCounts( usageCounts_.data() ) + , ppUsageCounts( pUsageCounts_.data() ) + , data( data_ ) + , scratchData( scratchData_ ) + , triangleArray( triangleArray_ ) + , triangleArrayStride( triangleArrayStride_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 ); +# else + if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::MicromapBuildInfoEXT::MicromapBuildInfoEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MicromapBuildInfoEXT & operator=( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapBuildInfoEXT & operator=( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setMode( VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setDstMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ ) VULKAN_HPP_NOEXCEPT + { + dstMicromap = dstMicromap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = usageCountsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + pUsageCounts = pUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MicromapBuildInfoEXT & + setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( usageCounts_.size() ); + pUsageCounts = usageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + ppUsageCounts = ppUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MicromapBuildInfoEXT & setPUsageCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( pUsageCounts_.size() ); + ppUsageCounts = pUsageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & + setTriangleArray( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & triangleArray_ ) VULKAN_HPP_NOEXCEPT + { + triangleArray = triangleArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArrayStride( VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ ) VULKAN_HPP_NOEXCEPT + { + triangleArrayStride = triangleArrayStride_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapBuildInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapBuildInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapBuildInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMicromapBuildInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, type, flags, mode, dstMicromap, usageCountsCount, pUsageCounts, ppUsageCounts, data, scratchData, triangleArray, triangleArrayStride ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap; + VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild; + VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap = {}; + uint32_t usageCountsCount = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray = {}; + VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride = {}; + }; + + template <> + struct CppType + { + using Type = MicromapBuildInfoEXT; + }; + + // wrapper struct for struct VkMicromapBuildSizesInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapBuildSizesInfoEXT.html + struct MicromapBuildSizesInfoEXT + { + using NativeType = VkMicromapBuildSizesInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildSizesInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 discardable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , micromapSize{ micromapSize_ } + , buildScratchSize{ buildScratchSize_ } + , discardable{ discardable_ } + { + } + + VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapBuildSizesInfoEXT( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapBuildSizesInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapBuildSizesInfoEXT & operator=( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapBuildSizesInfoEXT & operator=( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setMicromapSize( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ ) VULKAN_HPP_NOEXCEPT + { + micromapSize = micromapSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT + { + buildScratchSize = buildScratchSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setDiscardable( VULKAN_HPP_NAMESPACE::Bool32 discardable_ ) VULKAN_HPP_NOEXCEPT + { + discardable = discardable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapBuildSizesInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapBuildSizesInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapBuildSizesInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMicromapBuildSizesInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, micromapSize, buildScratchSize, discardable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapBuildSizesInfoEXT const & ) const = default; +#else + bool operator==( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromapSize == rhs.micromapSize ) && ( buildScratchSize == rhs.buildScratchSize ) && + ( discardable == rhs.discardable ); +# endif + } + + bool operator!=( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildSizesInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize micromapSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 discardable = {}; + }; + + template <> + struct CppType + { + using Type = MicromapBuildSizesInfoEXT; + }; + + // wrapper struct for struct VkMicromapCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapCreateInfoEXT.html + struct MicromapCreateInfoEXT + { + using NativeType = VkMicromapCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , createFlags{ createFlags_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } + , type{ type_ } + , deviceAddress{ deviceAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapCreateInfoEXT( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapCreateInfoEXT & operator=( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapCreateInfoEXT & operator=( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setCreateFlags( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ ) VULKAN_HPP_NOEXCEPT + { + createFlags = createFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMicromapCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapCreateInfoEXT const & ) const = default; +#else + bool operator==( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && + ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress ); +# endif + } + + bool operator!=( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + + template <> + struct CppType + { + using Type = MicromapCreateInfoEXT; + }; + + // wrapper struct for struct VkMicromapTriangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapTriangleEXT.html + struct MicromapTriangleEXT + { + using NativeType = VkMicromapTriangleEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( uint32_t dataOffset_ = {}, uint16_t subdivisionLevel_ = {}, uint16_t format_ = {} ) VULKAN_HPP_NOEXCEPT + : dataOffset{ dataOffset_ } + , subdivisionLevel{ subdivisionLevel_ } + , format{ format_ } + { + } + + VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapTriangleEXT( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapTriangleEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapTriangleEXT & operator=( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapTriangleEXT & operator=( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setDataOffset( uint32_t dataOffset_ ) VULKAN_HPP_NOEXCEPT + { + dataOffset = dataOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setSubdivisionLevel( uint16_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT + { + subdivisionLevel = subdivisionLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setFormat( uint16_t format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapTriangleEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapTriangleEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapTriangleEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMicromapTriangleEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( dataOffset, subdivisionLevel, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapTriangleEXT const & ) const = default; +#else + bool operator==( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( dataOffset == rhs.dataOffset ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format ); +# endif + } + + bool operator!=( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t dataOffset = {}; + uint16_t subdivisionLevel = {}; + uint16_t format = {}; + }; + + // wrapper struct for struct VkMicromapVersionInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapVersionInfoEXT.html + struct MicromapVersionInfoEXT + { + using NativeType = VkMicromapVersionInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapVersionInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pVersionData{ pVersionData_ } + { + } + + VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapVersionInfoEXT( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapVersionInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapVersionInfoEXT & operator=( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapVersionInfoEXT & operator=( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT + { + pVersionData = pVersionData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapVersionInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapVersionInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapVersionInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMicromapVersionInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pVersionData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapVersionInfoEXT const & ) const = default; +#else + bool operator==( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData ); +# endif + } + + bool operator!=( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapVersionInfoEXT; + const void * pNext = {}; + const uint8_t * pVersionData = {}; + }; + + template <> + struct CppType + { + using Type = MicromapVersionInfoEXT; + }; + + // wrapper struct for struct VkMultiDrawIndexedInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiDrawIndexedInfoEXT.html + struct MultiDrawIndexedInfoEXT + { + using NativeType = VkMultiDrawIndexedInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : firstIndex{ firstIndex_ } + , indexCount{ indexCount_ } + , vertexOffset{ vertexOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiDrawIndexedInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultiDrawIndexedInfoEXT & operator=( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiDrawIndexedInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMultiDrawIndexedInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( firstIndex, indexCount, vertexOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiDrawIndexedInfoEXT const & ) const = default; +#else + bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( firstIndex == rhs.firstIndex ) && ( indexCount == rhs.indexCount ) && ( vertexOffset == rhs.vertexOffset ); +# endif + } + + bool operator!=( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t firstIndex = {}; + uint32_t indexCount = {}; + int32_t vertexOffset = {}; + }; + + // wrapper struct for struct VkMultiDrawInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiDrawInfoEXT.html + struct MultiDrawInfoEXT + { + using NativeType = VkMultiDrawInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {} ) VULKAN_HPP_NOEXCEPT + : firstVertex{ firstVertex_ } + , vertexCount{ vertexCount_ } + { + } + + VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultiDrawInfoEXT( *reinterpret_cast( &rhs ) ) {} + + MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultiDrawInfoEXT & operator=( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiDrawInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMultiDrawInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( firstVertex, vertexCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiDrawInfoEXT const & ) const = default; +#else + bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( firstVertex == rhs.firstVertex ) && ( vertexCount == rhs.vertexCount ); +# endif + } + + bool operator!=( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t firstVertex = {}; + uint32_t vertexCount = {}; + }; + + // wrapper struct for struct VkMultisamplePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultisamplePropertiesEXT.html + struct MultisamplePropertiesEXT + { + using NativeType = VkMultisamplePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxSampleLocationGridSize{ maxSampleLocationGridSize_ } + { + } + + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultisamplePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisamplePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMultisamplePropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxSampleLocationGridSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultisamplePropertiesEXT const & ) const = default; +#else + bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); +# endif + } + + bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + }; + + template <> + struct CppType + { + using Type = MultisamplePropertiesEXT; + }; + + // wrapper struct for struct VkMultisampledRenderToSingleSampledInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultisampledRenderToSingleSampledInfoEXT.html + struct MultisampledRenderToSingleSampledInfoEXT + { + using NativeType = VkMultisampledRenderToSingleSampledInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisampledRenderToSingleSampledInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MultisampledRenderToSingleSampledInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multisampledRenderToSingleSampledEnable{ multisampledRenderToSingleSampledEnable_ } + , rasterizationSamples{ rasterizationSamples_ } + { + } + + VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisampledRenderToSingleSampledInfoEXT( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultisampledRenderToSingleSampledInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MultisampledRenderToSingleSampledInfoEXT & operator=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultisampledRenderToSingleSampledInfoEXT & operator=( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & + setMultisampledRenderToSingleSampledEnable( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ ) VULKAN_HPP_NOEXCEPT + { + multisampledRenderToSingleSampledEnable = multisampledRenderToSingleSampledEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMultisampledRenderToSingleSampledInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisampledRenderToSingleSampledInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisampledRenderToSingleSampledInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMultisampledRenderToSingleSampledInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multisampledRenderToSingleSampledEnable, rasterizationSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultisampledRenderToSingleSampledInfoEXT const & ) const = default; +#else + bool operator==( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampledEnable == rhs.multisampledRenderToSingleSampledEnable ) && + ( rasterizationSamples == rhs.rasterizationSamples ); +# endif + } + + bool operator!=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisampledRenderToSingleSampledInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + + template <> + struct CppType + { + using Type = MultisampledRenderToSingleSampledInfoEXT; + }; + + // wrapper struct for struct VkMultiviewPerViewAttributesInfoNVX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiviewPerViewAttributesInfoNVX.html + struct MultiviewPerViewAttributesInfoNVX + { + using NativeType = VkMultiviewPerViewAttributesInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewAttributesInfoNVX; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , perViewAttributes{ perViewAttributes_ } + , perViewAttributesPositionXOnly{ perViewAttributesPositionXOnly_ } + { + } + + VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiviewPerViewAttributesInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + + MultiviewPerViewAttributesInfoNVX & operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributes( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT + { + perViewAttributes = perViewAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & + setPerViewAttributesPositionXOnly( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT + { + perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiviewPerViewAttributesInfoNVX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMultiviewPerViewAttributesInfoNVX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewAttributes, perViewAttributesPositionXOnly ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiviewPerViewAttributesInfoNVX const & ) const = default; +#else + bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewAttributes == rhs.perViewAttributes ) && + ( perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly ); +# endif + } + + bool operator!=( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly = {}; + }; + + template <> + struct CppType + { + using Type = MultiviewPerViewAttributesInfoNVX; + }; + + // wrapper struct for struct VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM.html + struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM + { + using NativeType = VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( uint32_t perViewRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , perViewRenderAreaCount{ perViewRenderAreaCount_ } + , pPerViewRenderAreas{ pPerViewRenderAreas_ } + { + } + + VULKAN_HPP_CONSTEXPR + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & perViewRenderAreas_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), perViewRenderAreaCount( static_cast( perViewRenderAreas_.size() ) ), pPerViewRenderAreas( perViewRenderAreas_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + operator=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & operator=( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + setPerViewRenderAreaCount( uint32_t perViewRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + { + perViewRenderAreaCount = perViewRenderAreaCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + setPPerViewRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + pPerViewRenderAreas = pPerViewRenderAreas_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + setPerViewRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & perViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + perViewRenderAreaCount = static_cast( perViewRenderAreas_.size() ); + pPerViewRenderAreas = perViewRenderAreas_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewRenderAreaCount, pPerViewRenderAreas ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & ) const = default; +#else + bool operator==( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewRenderAreaCount == rhs.perViewRenderAreaCount ) && + ( pPerViewRenderAreas == rhs.pPerViewRenderAreas ); +# endif + } + + bool operator!=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + const void * pNext = {}; + uint32_t perViewRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas = {}; + }; + + template <> + struct CppType + { + using Type = MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + }; + + // wrapper struct for struct VkMutableDescriptorTypeListEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMutableDescriptorTypeListEXT.html + struct MutableDescriptorTypeListEXT + { + using NativeType = VkMutableDescriptorTypeListEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( uint32_t descriptorTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorTypeCount{ descriptorTypeCount_ } + , pDescriptorTypes{ pDescriptorTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeListEXT( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeListEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorTypes_ ) + : descriptorTypeCount( static_cast( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MutableDescriptorTypeListEXT & operator=( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MutableDescriptorTypeListEXT & operator=( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = descriptorTypeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & + setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorTypes = pDescriptorTypes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListEXT & setDescriptorTypes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = static_cast( descriptorTypes_.size() ); + pDescriptorTypes = descriptorTypes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMutableDescriptorTypeListEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeListEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeListEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeListEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( descriptorTypeCount, pDescriptorTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeListEXT const & ) const = default; +#else + bool operator==( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes ); +# endif + } + + bool operator!=( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t descriptorTypeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {}; + }; + + using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT; + + // wrapper struct for struct VkMutableDescriptorTypeCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMutableDescriptorTypeCreateInfoEXT.html + struct MutableDescriptorTypeCreateInfoEXT + { + using NativeType = VkMutableDescriptorTypeCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( uint32_t mutableDescriptorTypeListCount_ = {}, + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mutableDescriptorTypeListCount{ mutableDescriptorTypeListCount_ } + , pMutableDescriptorTypeLists{ pMutableDescriptorTypeLists_ } + { + } + + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeCreateInfoEXT( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mutableDescriptorTypeLists_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , mutableDescriptorTypeListCount( static_cast( mutableDescriptorTypeLists_.size() ) ) + , pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MutableDescriptorTypeCreateInfoEXT & operator=( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MutableDescriptorTypeCreateInfoEXT & operator=( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & + setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & + setPMutableDescriptorTypeLists( const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT + { + pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeLists( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mutableDescriptorTypeLists_ ) + VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = static_cast( mutableDescriptorTypeLists_.size() ); + pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMutableDescriptorTypeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeCreateInfoEXT const & ) const = default; +#else + bool operator==( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) && + ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists ); +# endif + } + + bool operator!=( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoEXT; + const void * pNext = {}; + uint32_t mutableDescriptorTypeListCount = {}; + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists = {}; + }; + + template <> + struct CppType + { + using Type = MutableDescriptorTypeCreateInfoEXT; + }; + + using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT; + + // wrapper struct for struct VkOpaqueCaptureDescriptorDataCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpaqueCaptureDescriptorDataCreateInfoEXT.html + struct OpaqueCaptureDescriptorDataCreateInfoEXT + { + using NativeType = VkOpaqueCaptureDescriptorDataCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( const void * opaqueCaptureDescriptorData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , opaqueCaptureDescriptorData{ opaqueCaptureDescriptorData_ } + { + } + + VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpaqueCaptureDescriptorDataCreateInfoEXT( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : OpaqueCaptureDescriptorDataCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & + setOpaqueCaptureDescriptorData( const void * opaqueCaptureDescriptorData_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureDescriptorData = opaqueCaptureDescriptorData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpaqueCaptureDescriptorDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkOpaqueCaptureDescriptorDataCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opaqueCaptureDescriptorData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpaqueCaptureDescriptorDataCreateInfoEXT const & ) const = default; +#else + bool operator==( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureDescriptorData == rhs.opaqueCaptureDescriptorData ); +# endif + } + + bool operator!=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT; + const void * pNext = {}; + const void * opaqueCaptureDescriptorData = {}; + }; + + template <> + struct CppType + { + using Type = OpaqueCaptureDescriptorDataCreateInfoEXT; + }; + + // wrapper struct for struct VkOpticalFlowExecuteInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowExecuteInfoNV.html + struct OpticalFlowExecuteInfoNV + { + using NativeType = VkOpticalFlowExecuteInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowExecuteInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowExecuteInfoNV( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowExecuteInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + OpticalFlowExecuteInfoNV & operator=( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpticalFlowExecuteInfoNV & operator=( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + OpticalFlowExecuteInfoNV & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpticalFlowExecuteInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowExecuteInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowExecuteInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkOpticalFlowExecuteInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowExecuteInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowExecuteInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowExecuteInfoNV; + }; + + // wrapper struct for struct VkOpticalFlowImageFormatInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowImageFormatInfoNV.html + struct OpticalFlowImageFormatInfoNV + { + using NativeType = VkOpticalFlowImageFormatInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , usage{ usage_ } + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowImageFormatInfoNV( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowImageFormatInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowImageFormatInfoNV & operator=( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpticalFlowImageFormatInfoNV & operator=( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setUsage( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpticalFlowImageFormatInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowImageFormatInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowImageFormatInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkOpticalFlowImageFormatInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowImageFormatInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowImageFormatInfoNV; + }; + + // wrapper struct for struct VkOpticalFlowImageFormatPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowImageFormatPropertiesNV.html + struct OpticalFlowImageFormatPropertiesNV + { + using NativeType = VkOpticalFlowImageFormatPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowImageFormatPropertiesNV( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowImageFormatPropertiesNV & operator=( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpticalFlowImageFormatPropertiesNV & operator=( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkOpticalFlowImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowImageFormatPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkOpticalFlowImageFormatPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowImageFormatPropertiesNV const & ) const = default; +#else + bool operator==( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ); +# endif + } + + bool operator!=( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatPropertiesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = OpticalFlowImageFormatPropertiesNV; + }; + + // wrapper struct for struct VkOpticalFlowSessionCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionCreateInfoNV.html + struct OpticalFlowSessionCreateInfoNV + { + using NativeType = VkOpticalFlowSessionCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( + uint32_t width_ = {}, + uint32_t height_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format costFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ = {}, + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ = {}, + VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , width{ width_ } + , height{ height_ } + , imageFormat{ imageFormat_ } + , flowVectorFormat{ flowVectorFormat_ } + , costFormat{ costFormat_ } + , outputGridSize{ outputGridSize_ } + , hintGridSize{ hintGridSize_ } + , performanceLevel{ performanceLevel_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowSessionCreateInfoNV( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowSessionCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowSessionCreateInfoNV & operator=( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpticalFlowSessionCreateInfoNV & operator=( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + { + imageFormat = imageFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlowVectorFormat( VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ ) VULKAN_HPP_NOEXCEPT + { + flowVectorFormat = flowVectorFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setCostFormat( VULKAN_HPP_NAMESPACE::Format costFormat_ ) VULKAN_HPP_NOEXCEPT + { + costFormat = costFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & + setOutputGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ ) VULKAN_HPP_NOEXCEPT + { + outputGridSize = outputGridSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & + setHintGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ ) VULKAN_HPP_NOEXCEPT + { + hintGridSize = hintGridSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & + setPerformanceLevel( VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ ) VULKAN_HPP_NOEXCEPT + { + performanceLevel = performanceLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpticalFlowSessionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowSessionCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowSessionCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkOpticalFlowSessionCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, width, height, imageFormat, flowVectorFormat, costFormat, outputGridSize, hintGridSize, performanceLevel, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowSessionCreateInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( width == rhs.width ) && ( height == rhs.height ) && ( imageFormat == rhs.imageFormat ) && + ( flowVectorFormat == rhs.flowVectorFormat ) && ( costFormat == rhs.costFormat ) && ( outputGridSize == rhs.outputGridSize ) && + ( hintGridSize == rhs.hintGridSize ) && ( performanceLevel == rhs.performanceLevel ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreateInfoNV; + void * pNext = {}; + uint32_t width = {}; + uint32_t height = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format flowVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format costFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown; + VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowSessionCreateInfoNV; + }; + + // wrapper struct for struct VkOpticalFlowSessionCreatePrivateDataInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionCreatePrivateDataInfoNV.html + struct OpticalFlowSessionCreatePrivateDataInfoNV + { + using NativeType = VkOpticalFlowSessionCreatePrivateDataInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( uint32_t id_ = {}, + uint32_t size_ = {}, + const void * pPrivateData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , id{ id_ } + , size{ size_ } + , pPrivateData{ pPrivateData_ } + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowSessionCreatePrivateDataInfoNV( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowSessionCreatePrivateDataInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowSessionCreatePrivateDataInfoNV & operator=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpticalFlowSessionCreatePrivateDataInfoNV & operator=( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setId( uint32_t id_ ) VULKAN_HPP_NOEXCEPT + { + id = id_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPPrivateData( const void * pPrivateData_ ) VULKAN_HPP_NOEXCEPT + { + pPrivateData = pPrivateData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpticalFlowSessionCreatePrivateDataInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowSessionCreatePrivateDataInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowSessionCreatePrivateDataInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkOpticalFlowSessionCreatePrivateDataInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, id, size, pPrivateData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowSessionCreatePrivateDataInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( id == rhs.id ) && ( size == rhs.size ) && ( pPrivateData == rhs.pPrivateData ); +# endif + } + + bool operator!=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV; + void * pNext = {}; + uint32_t id = {}; + uint32_t size = {}; + const void * pPrivateData = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowSessionCreatePrivateDataInfoNV; + }; + + // wrapper struct for struct VkOutOfBandQueueTypeInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOutOfBandQueueTypeInfoNV.html + struct OutOfBandQueueTypeInfoNV + { + using NativeType = VkOutOfBandQueueTypeInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOutOfBandQueueTypeInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queueType{ queueType_ } + { + } + + VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OutOfBandQueueTypeInfoNV( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OutOfBandQueueTypeInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OutOfBandQueueTypeInfoNV & operator=( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OutOfBandQueueTypeInfoNV & operator=( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setQueueType( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ ) VULKAN_HPP_NOEXCEPT + { + queueType = queueType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOutOfBandQueueTypeInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOutOfBandQueueTypeInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOutOfBandQueueTypeInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkOutOfBandQueueTypeInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OutOfBandQueueTypeInfoNV const & ) const = default; +#else + bool operator==( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueType == rhs.queueType ); +# endif + } + + bool operator!=( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOutOfBandQueueTypeInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender; + }; + + template <> + struct CppType + { + using Type = OutOfBandQueueTypeInfoNV; + }; + + // wrapper struct for struct VkPartitionedAccelerationStructureFlagsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureFlagsNV.html + struct PartitionedAccelerationStructureFlagsNV + { + using NativeType = VkPartitionedAccelerationStructureFlagsNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePartitionedAccelerationStructureFlagsNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureFlagsNV( VULKAN_HPP_NAMESPACE::Bool32 enablePartitionTranslation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , enablePartitionTranslation{ enablePartitionTranslation_ } + { + } + + VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureFlagsNV( PartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PartitionedAccelerationStructureFlagsNV( VkPartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PartitionedAccelerationStructureFlagsNV( *reinterpret_cast( &rhs ) ) + { + } + + PartitionedAccelerationStructureFlagsNV & operator=( PartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PartitionedAccelerationStructureFlagsNV & operator=( VkPartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV & + setEnablePartitionTranslation( VULKAN_HPP_NAMESPACE::Bool32 enablePartitionTranslation_ ) VULKAN_HPP_NOEXCEPT + { + enablePartitionTranslation = enablePartitionTranslation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPartitionedAccelerationStructureFlagsNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureFlagsNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureFlagsNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureFlagsNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, enablePartitionTranslation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PartitionedAccelerationStructureFlagsNV const & ) const = default; +#else + bool operator==( PartitionedAccelerationStructureFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enablePartitionTranslation == rhs.enablePartitionTranslation ); +# endif + } + + bool operator!=( PartitionedAccelerationStructureFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePartitionedAccelerationStructureFlagsNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 enablePartitionTranslation = {}; + }; + + template <> + struct CppType + { + using Type = PartitionedAccelerationStructureFlagsNV; + }; + + // wrapper struct for struct VkPartitionedAccelerationStructureUpdateInstanceDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureUpdateInstanceDataNV.html + struct PartitionedAccelerationStructureUpdateInstanceDataNV + { + using NativeType = VkPartitionedAccelerationStructureUpdateInstanceDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PartitionedAccelerationStructureUpdateInstanceDataNV( uint32_t instanceIndex_ = {}, + uint32_t instanceContributionToHitGroupIndex_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : instanceIndex{ instanceIndex_ } + , instanceContributionToHitGroupIndex{ instanceContributionToHitGroupIndex_ } + , accelerationStructure{ accelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR + PartitionedAccelerationStructureUpdateInstanceDataNV( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PartitionedAccelerationStructureUpdateInstanceDataNV( VkPartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PartitionedAccelerationStructureUpdateInstanceDataNV( *reinterpret_cast( &rhs ) ) + { + } + + PartitionedAccelerationStructureUpdateInstanceDataNV & + operator=( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PartitionedAccelerationStructureUpdateInstanceDataNV & operator=( VkPartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & setInstanceIndex( uint32_t instanceIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceIndex = instanceIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & + setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPartitionedAccelerationStructureUpdateInstanceDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureUpdateInstanceDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureUpdateInstanceDataNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureUpdateInstanceDataNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( instanceIndex, instanceContributionToHitGroupIndex, accelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PartitionedAccelerationStructureUpdateInstanceDataNV const & ) const = default; +#else + bool operator==( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( instanceIndex == rhs.instanceIndex ) && ( instanceContributionToHitGroupIndex == rhs.instanceContributionToHitGroupIndex ) && + ( accelerationStructure == rhs.accelerationStructure ); +# endif + } + + bool operator!=( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t instanceIndex = {}; + uint32_t instanceContributionToHitGroupIndex = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure = {}; + }; + + // wrapper struct for struct VkPartitionedAccelerationStructureWriteInstanceDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureWriteInstanceDataNV.html + struct PartitionedAccelerationStructureWriteInstanceDataNV + { + using NativeType = VkPartitionedAccelerationStructureWriteInstanceDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PartitionedAccelerationStructureWriteInstanceDataNV( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, + std::array const & explicitAABB_ = {}, + uint32_t instanceID_ = {}, + uint32_t instanceMask_ = {}, + uint32_t instanceContributionToHitGroupIndex_ = {}, + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_ = {}, + uint32_t instanceIndex_ = {}, + uint32_t partitionIndex_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : transform{ transform_ } + , explicitAABB{ explicitAABB_ } + , instanceID{ instanceID_ } + , instanceMask{ instanceMask_ } + , instanceContributionToHitGroupIndex{ instanceContributionToHitGroupIndex_ } + , instanceFlags{ instanceFlags_ } + , instanceIndex{ instanceIndex_ } + , partitionIndex{ partitionIndex_ } + , accelerationStructure{ accelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PartitionedAccelerationStructureWriteInstanceDataNV( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PartitionedAccelerationStructureWriteInstanceDataNV( VkPartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PartitionedAccelerationStructureWriteInstanceDataNV( *reinterpret_cast( &rhs ) ) + { + } + + PartitionedAccelerationStructureWriteInstanceDataNV & + operator=( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PartitionedAccelerationStructureWriteInstanceDataNV & operator=( VkPartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & + setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setExplicitAABB( std::array explicitAABB_ ) VULKAN_HPP_NOEXCEPT + { + explicitAABB = explicitAABB_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceID( uint32_t instanceID_ ) VULKAN_HPP_NOEXCEPT + { + instanceID = instanceID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceMask( uint32_t instanceMask_ ) VULKAN_HPP_NOEXCEPT + { + instanceMask = instanceMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & + setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & + setInstanceFlags( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_ ) VULKAN_HPP_NOEXCEPT + { + instanceFlags = instanceFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceIndex( uint32_t instanceIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceIndex = instanceIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setPartitionIndex( uint32_t partitionIndex_ ) VULKAN_HPP_NOEXCEPT + { + partitionIndex = partitionIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPartitionedAccelerationStructureWriteInstanceDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureWriteInstanceDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureWriteInstanceDataNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureWriteInstanceDataNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transform, + explicitAABB, + instanceID, + instanceMask, + instanceContributionToHitGroupIndex, + instanceFlags, + instanceIndex, + partitionIndex, + accelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PartitionedAccelerationStructureWriteInstanceDataNV const & ) const = default; +#else + bool operator==( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( transform == rhs.transform ) && ( explicitAABB == rhs.explicitAABB ) && ( instanceID == rhs.instanceID ) && + ( instanceMask == rhs.instanceMask ) && ( instanceContributionToHitGroupIndex == rhs.instanceContributionToHitGroupIndex ) && + ( instanceFlags == rhs.instanceFlags ) && ( instanceIndex == rhs.instanceIndex ) && ( partitionIndex == rhs.partitionIndex ) && + ( accelerationStructure == rhs.accelerationStructure ); +# endif + } + + bool operator!=( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D explicitAABB = {}; + uint32_t instanceID = {}; + uint32_t instanceMask = {}; + uint32_t instanceContributionToHitGroupIndex = {}; + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV instanceFlags = {}; + uint32_t instanceIndex = {}; + uint32_t partitionIndex = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure = {}; + }; + + // wrapper struct for struct VkPartitionedAccelerationStructureWritePartitionTranslationDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureWritePartitionTranslationDataNV.html + struct PartitionedAccelerationStructureWritePartitionTranslationDataNV + { + using NativeType = VkPartitionedAccelerationStructureWritePartitionTranslationDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PartitionedAccelerationStructureWritePartitionTranslationDataNV( uint32_t partitionIndex_ = {}, + std::array const & partitionTranslation_ = {} ) VULKAN_HPP_NOEXCEPT + : partitionIndex{ partitionIndex_ } + , partitionTranslation{ partitionTranslation_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV( + PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PartitionedAccelerationStructureWritePartitionTranslationDataNV( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PartitionedAccelerationStructureWritePartitionTranslationDataNV( + *reinterpret_cast( &rhs ) ) + { + } + + PartitionedAccelerationStructureWritePartitionTranslationDataNV & + operator=( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PartitionedAccelerationStructureWritePartitionTranslationDataNV & + operator=( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV & setPartitionIndex( uint32_t partitionIndex_ ) VULKAN_HPP_NOEXCEPT + { + partitionIndex = partitionIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV & + setPartitionTranslation( std::array partitionTranslation_ ) VULKAN_HPP_NOEXCEPT + { + partitionTranslation = partitionTranslation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( partitionIndex, partitionTranslation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & ) const = default; +#else + bool operator==( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( partitionIndex == rhs.partitionIndex ) && ( partitionTranslation == rhs.partitionTranslation ); +# endif + } + + bool operator!=( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t partitionIndex = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D partitionTranslation = {}; + }; + + // wrapper struct for struct VkPastPresentationTimingGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPastPresentationTimingGOOGLE.html + struct PastPresentationTimingGOOGLE + { + using NativeType = VkPastPresentationTimingGOOGLE; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {}, + uint64_t actualPresentTime_ = {}, + uint64_t earliestPresentTime_ = {}, + uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID{ presentID_ } + , desiredPresentTime{ desiredPresentTime_ } + , actualPresentTime{ actualPresentTime_ } + , earliestPresentTime{ earliestPresentTime_ } + , presentMargin{ presentMargin_ } + { + } + + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PastPresentationTimingGOOGLE( *reinterpret_cast( &rhs ) ) + { + } + + PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPastPresentationTimingGOOGLE const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPastPresentationTimingGOOGLE *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default; +#else + bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && ( actualPresentTime == rhs.actualPresentTime ) && + ( earliestPresentTime == rhs.earliestPresentTime ) && ( presentMargin == rhs.presentMargin ); +# endif + } + + bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + uint64_t actualPresentTime = {}; + uint64_t earliestPresentTime = {}; + uint64_t presentMargin = {}; + }; + + // wrapper struct for struct VkPerTileBeginInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerTileBeginInfoQCOM.html + struct PerTileBeginInfoQCOM + { + using NativeType = VkPerTileBeginInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerTileBeginInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerTileBeginInfoQCOM( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} + + VULKAN_HPP_CONSTEXPR PerTileBeginInfoQCOM( PerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerTileBeginInfoQCOM( VkPerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PerTileBeginInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PerTileBeginInfoQCOM & operator=( PerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerTileBeginInfoQCOM & operator=( VkPerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerTileBeginInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerTileBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerTileBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerTileBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPerTileBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerTileBeginInfoQCOM const & ) const = default; +#else + bool operator==( PerTileBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( PerTileBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerTileBeginInfoQCOM; + const void * pNext = {}; + }; + + template <> + struct CppType + { + using Type = PerTileBeginInfoQCOM; + }; + + // wrapper struct for struct VkPerTileEndInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerTileEndInfoQCOM.html + struct PerTileEndInfoQCOM + { + using NativeType = VkPerTileEndInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerTileEndInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerTileEndInfoQCOM( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} + + VULKAN_HPP_CONSTEXPR PerTileEndInfoQCOM( PerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerTileEndInfoQCOM( VkPerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PerTileEndInfoQCOM( *reinterpret_cast( &rhs ) ) {} + + PerTileEndInfoQCOM & operator=( PerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerTileEndInfoQCOM & operator=( VkPerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerTileEndInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerTileEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerTileEndInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerTileEndInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPerTileEndInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerTileEndInfoQCOM const & ) const = default; +#else + bool operator==( PerTileEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( PerTileEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerTileEndInfoQCOM; + const void * pNext = {}; + }; + + template <> + struct CppType + { + using Type = PerTileEndInfoQCOM; + }; + + // wrapper struct for struct VkPerformanceConfigurationAcquireInfoINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceConfigurationAcquireInfoINTEL.html + struct PerformanceConfigurationAcquireInfoINTEL + { + using NativeType = VkPerformanceConfigurationAcquireInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + { + } + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceConfigurationAcquireInfoINTEL const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPerformanceConfigurationAcquireInfoINTEL *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ); +# endif + } + + bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; + }; + + template <> + struct CppType + { + using Type = PerformanceConfigurationAcquireInfoINTEL; + }; + + // wrapper struct for struct VkPerformanceCounterDescriptionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterDescriptionKHR.html + struct PerformanceCounterDescriptionKHR + { + using NativeType = VkPerformanceCounterDescriptionKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, + std::array const & name_ = {}, + std::array const & category_ = {}, + std::array const & description_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , name{ name_ } + , category{ category_ } + , description{ description_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterDescriptionKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPerformanceCounterDescriptionKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, name, category, description ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( category, rhs.category ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) && + ( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ); + } + + bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceCounterDescriptionKHR; + }; + + // wrapper struct for struct VkPerformanceCounterKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterKHR.html + struct PerformanceCounterKHR + { + using NativeType = VkPerformanceCounterKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, + std::array const & uuid_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , unit{ unit_ } + , scope{ scope_ } + , storage{ storage_ } + , uuid{ uuid_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterKHR( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPerformanceCounterKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, unit, scope, storage, uuid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterKHR const & ) const = default; +#else + bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && ( storage == rhs.storage ) && + ( uuid == rhs.uuid ); +# endif + } + + bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceCounterKHR; + }; + + union PerformanceCounterResultKHR + { + using NativeType = VkPerformanceCounterResultKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT + { + int32 = int32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT + { + int64 = int64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT + { + uint32 = uint32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT + { + uint64 = uint64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT + { + float32 = float32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT + { + float64 = float64_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceCounterResultKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterResultKHR &() + { + return *reinterpret_cast( this ); + } + + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; + }; + + // wrapper struct for struct VkPerformanceMarkerInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceMarkerInfoINTEL.html + struct PerformanceMarkerInfoINTEL + { + using NativeType = VkPerformanceMarkerInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , marker{ marker_ } + { + } + + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceMarkerInfoINTEL const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPerformanceMarkerInfoINTEL *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; + const void * pNext = {}; + uint64_t marker = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceMarkerInfoINTEL; + }; + + // wrapper struct for struct VkPerformanceOverrideInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceOverrideInfoINTEL.html + struct PerformanceOverrideInfoINTEL + { + using NativeType = VkPerformanceOverrideInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, + VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, + uint64_t parameter_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , enable{ enable_ } + , parameter{ parameter_ } + { + } + + VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceOverrideInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT + { + enable = enable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT + { + parameter = parameter_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceOverrideInfoINTEL const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPerformanceOverrideInfoINTEL *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, enable, parameter ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && ( parameter == rhs.parameter ); +# endif + } + + bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; + VULKAN_HPP_NAMESPACE::Bool32 enable = {}; + uint64_t parameter = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceOverrideInfoINTEL; + }; + + // wrapper struct for struct VkPerformanceQuerySubmitInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceQuerySubmitInfoKHR.html + struct PerformanceQuerySubmitInfoKHR + { + using NativeType = VkPerformanceQuerySubmitInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , counterPassIndex{ counterPassIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceQuerySubmitInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT + { + counterPassIndex = counterPassIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceQuerySubmitInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPerformanceQuerySubmitInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, counterPassIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default; +#else + bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex ); +# endif + } + + bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; + const void * pNext = {}; + uint32_t counterPassIndex = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceQuerySubmitInfoKHR; + }; + + // wrapper struct for struct VkPerformanceStreamMarkerInfoINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceStreamMarkerInfoINTEL.html + struct PerformanceStreamMarkerInfoINTEL + { + using NativeType = VkPerformanceStreamMarkerInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , marker{ marker_ } + { + } + + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceStreamMarkerInfoINTEL const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPerformanceStreamMarkerInfoINTEL *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; + const void * pNext = {}; + uint32_t marker = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceStreamMarkerInfoINTEL; + }; + + union PerformanceValueDataINTEL + { + using NativeType = VkPerformanceValueDataINTEL; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT + { + value32 = value32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT + { + value64 = value64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT + { + valueFloat = valueFloat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT + { + valueBool = valueBool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT + { + valueString = valueString_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceValueDataINTEL const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueDataINTEL &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + uint32_t value32; + uint64_t value64; + float valueFloat; + VULKAN_HPP_NAMESPACE::Bool32 valueBool; + const char * valueString; +#else + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char * valueString; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkPerformanceValueINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceValueINTEL.html + struct PerformanceValueINTEL + { + using NativeType = VkPerformanceValueINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT + : type{ type_ } + , data{ data_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceValueINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueINTEL const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPerformanceValueINTEL *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; + }; + + // wrapper struct for struct VkPhysicalDevice16BitStorageFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice16BitStorageFeatures.html + struct PhysicalDevice16BitStorageFeatures + { + using NativeType = VkPhysicalDevice16BitStorageFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , storageBuffer16BitAccess{ storageBuffer16BitAccess_ } + , uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ } + , storagePushConstant16{ storagePushConstant16_ } + , storageInputOutput16{ storageInputOutput16_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice16BitStorageFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice16BitStorageFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevice16BitStorageFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ); +# endif + } + + bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevice16BitStorageFeatures; + }; + + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + + // wrapper struct for struct VkPhysicalDevice4444FormatsFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice4444FormatsFeaturesEXT.html + struct PhysicalDevice4444FormatsFeaturesEXT + { + using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , formatA4R4G4B4{ formatA4R4G4B4_ } + , formatA4B4G4R4{ formatA4B4G4R4_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4R4G4B4 = formatA4R4G4B4_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4B4G4R4 = formatA4B4G4R4_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice4444FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevice4444FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 ); +# endif + } + + bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevice4444FormatsFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDevice8BitStorageFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice8BitStorageFeatures.html + struct PhysicalDevice8BitStorageFeatures + { + using NativeType = VkPhysicalDevice8BitStorageFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , storageBuffer8BitAccess{ storageBuffer8BitAccess_ } + , uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ } + , storagePushConstant8{ storagePushConstant8_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice8BitStorageFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice8BitStorageFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevice8BitStorageFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ); +# endif + } + + bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevice8BitStorageFeatures; + }; + + using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; + + // wrapper struct for struct VkPhysicalDeviceASTCDecodeFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceASTCDecodeFeaturesEXT.html + struct PhysicalDeviceASTCDecodeFeaturesEXT + { + using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , decodeModeSharedExponent{ decodeModeSharedExponent_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & + setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT + { + decodeModeSharedExponent = decodeModeSharedExponent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decodeModeSharedExponent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); +# endif + } + + bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceASTCDecodeFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceAccelerationStructureFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAccelerationStructureFeaturesKHR.html + struct PhysicalDeviceAccelerationStructureFeaturesKHR + { + using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAccelerationStructureFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } + , accelerationStructureCaptureReplay{ accelerationStructureCaptureReplay_ } + , accelerationStructureIndirectBuild{ accelerationStructureIndirectBuild_ } + , accelerationStructureHostCommands{ accelerationStructureHostCommands_ } + , descriptorBindingAccelerationStructureUpdateAfterBind{ descriptorBindingAccelerationStructureUpdateAfterBind_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructureIndirectBuild( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructureHostCommands( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureHostCommands = accelerationStructureHostCommands_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + accelerationStructure, + accelerationStructureCaptureReplay, + accelerationStructureIndirectBuild, + accelerationStructureHostCommands, + descriptorBindingAccelerationStructureUpdateAfterBind ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && + ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) && + ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) && + ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) && + ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind ); +# endif + } + + bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructureFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceAccelerationStructurePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAccelerationStructurePropertiesKHR.html + struct PhysicalDeviceAccelerationStructurePropertiesKHR + { + using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxPrimitiveCount_ = {}, + uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, + uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxGeometryCount{ maxGeometryCount_ } + , maxInstanceCount{ maxInstanceCount_ } + , maxPrimitiveCount{ maxPrimitiveCount_ } + , maxPerStageDescriptorAccelerationStructures{ maxPerStageDescriptorAccelerationStructures_ } + , maxPerStageDescriptorUpdateAfterBindAccelerationStructures{ maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ } + , maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ } + , maxDescriptorSetUpdateAfterBindAccelerationStructures{ maxDescriptorSetUpdateAfterBindAccelerationStructures_ } + , minAccelerationStructureScratchOffsetAlignment{ minAccelerationStructureScratchOffsetAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxGeometryCount, + maxInstanceCount, + maxPrimitiveCount, + maxPerStageDescriptorAccelerationStructures, + maxPerStageDescriptorUpdateAfterBindAccelerationStructures, + maxDescriptorSetAccelerationStructures, + maxDescriptorSetUpdateAfterBindAccelerationStructures, + minAccelerationStructureScratchOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) && + ( maxPrimitiveCount == rhs.maxPrimitiveCount ) && + ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) && + ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) && + ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) && + ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + void * pNext = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxPrimitiveCount = {}; + uint32_t maxPerStageDescriptorAccelerationStructures = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {}; + uint32_t minAccelerationStructureScratchOffsetAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructurePropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceAddressBindingReportFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAddressBindingReportFeaturesEXT.html + struct PhysicalDeviceAddressBindingReportFeaturesEXT + { + using NativeType = VkPhysicalDeviceAddressBindingReportFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , reportAddressBinding{ reportAddressBinding_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAddressBindingReportFeaturesEXT( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAddressBindingReportFeaturesEXT( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAddressBindingReportFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & + setReportAddressBinding( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ ) VULKAN_HPP_NOEXCEPT + { + reportAddressBinding = reportAddressBinding_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAddressBindingReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAddressBindingReportFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, reportAddressBinding ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAddressBindingReportFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reportAddressBinding == rhs.reportAddressBinding ); +# endif + } + + bool operator!=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAddressBindingReportFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceAmigoProfilingFeaturesSEC, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAmigoProfilingFeaturesSEC.html + struct PhysicalDeviceAmigoProfilingFeaturesSEC + { + using NativeType = VkPhysicalDeviceAmigoProfilingFeaturesSEC; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , amigoProfiling{ amigoProfiling_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAmigoProfilingFeaturesSEC( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAmigoProfilingFeaturesSEC( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setAmigoProfiling( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ ) VULKAN_HPP_NOEXCEPT + { + amigoProfiling = amigoProfiling_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAmigoProfilingFeaturesSEC &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAmigoProfilingFeaturesSEC *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, amigoProfiling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAmigoProfilingFeaturesSEC const & ) const = default; +#else + bool operator==( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( amigoProfiling == rhs.amigoProfiling ); +# endif + } + + bool operator!=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAmigoProfilingFeaturesSEC; + }; + + // wrapper struct for struct VkPhysicalDeviceAntiLagFeaturesAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAntiLagFeaturesAMD.html + struct PhysicalDeviceAntiLagFeaturesAMD + { + using NativeType = VkPhysicalDeviceAntiLagFeaturesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAntiLagFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 antiLag_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , antiLag{ antiLag_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAntiLagFeaturesAMD( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAntiLagFeaturesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAntiLagFeaturesAMD & operator=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAntiLagFeaturesAMD & operator=( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setAntiLag( VULKAN_HPP_NAMESPACE::Bool32 antiLag_ ) VULKAN_HPP_NOEXCEPT + { + antiLag = antiLag_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAntiLagFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAntiLagFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAntiLagFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAntiLagFeaturesAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, antiLag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAntiLagFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( antiLag == rhs.antiLag ); +# endif + } + + bool operator!=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAntiLagFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 antiLag = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAntiLagFeaturesAMD; + }; + + // wrapper struct for struct VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.html + struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT + { + using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentFeedbackLoopDynamicState{ attachmentFeedbackLoopDynamicState_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & + operator=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & + operator=( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & + setAttachmentFeedbackLoopDynamicState( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFeedbackLoopDynamicState = attachmentFeedbackLoopDynamicState_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentFeedbackLoopDynamicState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopDynamicState == rhs.attachmentFeedbackLoopDynamicState ); +# endif + } + + bool operator!=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.html + struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT + { + using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentFeedbackLoopLayout{ attachmentFeedbackLoopLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & + operator=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & + setAttachmentFeedbackLoopLayout( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentFeedbackLoopLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopLayout == rhs.attachmentFeedbackLoopLayout ); +# endif + } + + bool operator!=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT.html + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT + { + using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , advancedBlendCoherentOperations{ advancedBlendCoherentOperations_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT + { + advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, advancedBlendCoherentOperations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); +# endif + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT.html + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT + { + using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , advancedBlendMaxColorAttachments{ advancedBlendMaxColorAttachments_ } + , advancedBlendIndependentBlend{ advancedBlendIndependentBlend_ } + , advancedBlendNonPremultipliedSrcColor{ advancedBlendNonPremultipliedSrcColor_ } + , advancedBlendNonPremultipliedDstColor{ advancedBlendNonPremultipliedDstColor_ } + , advancedBlendCorrelatedOverlap{ advancedBlendCorrelatedOverlap_ } + , advancedBlendAllOperations{ advancedBlendAllOperations_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + advancedBlendMaxColorAttachments, + advancedBlendIndependentBlend, + advancedBlendNonPremultipliedSrcColor, + advancedBlendNonPremultipliedDstColor, + advancedBlendCorrelatedOverlap, + advancedBlendAllOperations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) && + ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) && + ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) && + ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) && + ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); +# endif + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + void * pNext = {}; + uint32_t advancedBlendMaxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBorderColorSwizzleFeaturesEXT.html + struct PhysicalDeviceBorderColorSwizzleFeaturesEXT + { + using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , borderColorSwizzle{ borderColorSwizzle_ } + , borderColorSwizzleFromImage{ borderColorSwizzleFromImage_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBorderColorSwizzleFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & + setBorderColorSwizzle( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + borderColorSwizzle = borderColorSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & + setBorderColorSwizzleFromImage( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT + { + borderColorSwizzleFromImage = borderColorSwizzleFromImage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( borderColorSwizzle == rhs.borderColorSwizzle ) && + ( borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage ); +# endif + } + + bool operator!=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceBufferDeviceAddressFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBufferDeviceAddressFeatures.html + struct PhysicalDeviceBufferDeviceAddressFeatures + { + using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , bufferDeviceAddress{ bufferDeviceAddress_ } + , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } + , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); +# endif + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeatures; + }; + + using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; + + // wrapper struct for struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBufferDeviceAddressFeaturesEXT.html + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT + { + using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , bufferDeviceAddress{ bufferDeviceAddress_ } + , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } + , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); +# endif + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + }; + + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + + // wrapper struct for struct VkPhysicalDeviceClusterAccelerationStructureFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterAccelerationStructureFeaturesNV.html + struct PhysicalDeviceClusterAccelerationStructureFeaturesNV + { + using NativeType = VkPhysicalDeviceClusterAccelerationStructureFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructureFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 clusterAccelerationStructure_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , clusterAccelerationStructure{ clusterAccelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceClusterAccelerationStructureFeaturesNV( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterAccelerationStructureFeaturesNV( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterAccelerationStructureFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterAccelerationStructureFeaturesNV & + operator=( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceClusterAccelerationStructureFeaturesNV & operator=( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV & + setClusterAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 clusterAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + clusterAccelerationStructure = clusterAccelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, clusterAccelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clusterAccelerationStructure == rhs.clusterAccelerationStructure ); +# endif + } + + bool operator!=( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 clusterAccelerationStructure = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterAccelerationStructureFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceClusterAccelerationStructurePropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterAccelerationStructurePropertiesNV.html + struct PhysicalDeviceClusterAccelerationStructurePropertiesNV + { + using NativeType = VkPhysicalDeviceClusterAccelerationStructurePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructurePropertiesNV( uint32_t maxVerticesPerCluster_ = {}, + uint32_t maxTrianglesPerCluster_ = {}, + uint32_t clusterScratchByteAlignment_ = {}, + uint32_t clusterByteAlignment_ = {}, + uint32_t clusterTemplateByteAlignment_ = {}, + uint32_t clusterBottomLevelByteAlignment_ = {}, + uint32_t clusterTemplateBoundsByteAlignment_ = {}, + uint32_t maxClusterGeometryIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxVerticesPerCluster{ maxVerticesPerCluster_ } + , maxTrianglesPerCluster{ maxTrianglesPerCluster_ } + , clusterScratchByteAlignment{ clusterScratchByteAlignment_ } + , clusterByteAlignment{ clusterByteAlignment_ } + , clusterTemplateByteAlignment{ clusterTemplateByteAlignment_ } + , clusterBottomLevelByteAlignment{ clusterBottomLevelByteAlignment_ } + , clusterTemplateBoundsByteAlignment{ clusterTemplateBoundsByteAlignment_ } + , maxClusterGeometryIndex{ maxClusterGeometryIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructurePropertiesNV( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterAccelerationStructurePropertiesNV( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterAccelerationStructurePropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterAccelerationStructurePropertiesNV & + operator=( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceClusterAccelerationStructurePropertiesNV & + operator=( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxVerticesPerCluster, + maxTrianglesPerCluster, + clusterScratchByteAlignment, + clusterByteAlignment, + clusterTemplateByteAlignment, + clusterBottomLevelByteAlignment, + clusterTemplateBoundsByteAlignment, + maxClusterGeometryIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVerticesPerCluster == rhs.maxVerticesPerCluster ) && + ( maxTrianglesPerCluster == rhs.maxTrianglesPerCluster ) && ( clusterScratchByteAlignment == rhs.clusterScratchByteAlignment ) && + ( clusterByteAlignment == rhs.clusterByteAlignment ) && ( clusterTemplateByteAlignment == rhs.clusterTemplateByteAlignment ) && + ( clusterBottomLevelByteAlignment == rhs.clusterBottomLevelByteAlignment ) && + ( clusterTemplateBoundsByteAlignment == rhs.clusterTemplateBoundsByteAlignment ) && ( maxClusterGeometryIndex == rhs.maxClusterGeometryIndex ); +# endif + } + + bool operator!=( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV; + void * pNext = {}; + uint32_t maxVerticesPerCluster = {}; + uint32_t maxTrianglesPerCluster = {}; + uint32_t clusterScratchByteAlignment = {}; + uint32_t clusterByteAlignment = {}; + uint32_t clusterTemplateByteAlignment = {}; + uint32_t clusterBottomLevelByteAlignment = {}; + uint32_t clusterTemplateBoundsByteAlignment = {}; + uint32_t maxClusterGeometryIndex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterAccelerationStructurePropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI.html + struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , clustercullingShader{ clustercullingShader_ } + , multiviewClusterCullingShader{ multiviewClusterCullingShader_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & + setClustercullingShader( VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ ) VULKAN_HPP_NOEXCEPT + { + clustercullingShader = clustercullingShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & + setMultiviewClusterCullingShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewClusterCullingShader = multiviewClusterCullingShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, clustercullingShader, multiviewClusterCullingShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clustercullingShader == rhs.clustercullingShader ) && + ( multiviewClusterCullingShader == rhs.multiviewClusterCullingShader ); +# endif + } + + bool operator!=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + }; + + // wrapper struct for struct VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI.html + struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI + { + using NativeType = VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( std::array const & maxWorkGroupCount_ = {}, + std::array const & maxWorkGroupSize_ = {}, + uint32_t maxOutputClusterCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indirectBufferOffsetAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxWorkGroupCount{ maxWorkGroupCount_ } + , maxWorkGroupSize{ maxWorkGroupSize_ } + , maxOutputClusterCount{ maxOutputClusterCount_ } + , indirectBufferOffsetAlignment{ indirectBufferOffsetAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & + operator=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxWorkGroupCount, maxWorkGroupSize, maxOutputClusterCount, indirectBufferOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWorkGroupCount == rhs.maxWorkGroupCount ) && + ( maxWorkGroupSize == rhs.maxWorkGroupSize ) && ( maxOutputClusterCount == rhs.maxOutputClusterCount ) && + ( indirectBufferOffsetAlignment == rhs.indirectBufferOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxWorkGroupCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxWorkGroupSize = {}; + uint32_t maxOutputClusterCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indirectBufferOffsetAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + }; + + // wrapper struct for struct VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.html + struct PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , clusterShadingRate{ clusterShadingRate_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & + operator=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & + setClusterShadingRate( VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + clusterShadingRate = clusterShadingRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, clusterShadingRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clusterShadingRate == rhs.clusterShadingRate ); +# endif + } + + bool operator!=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + }; + + // wrapper struct for struct VkPhysicalDeviceCoherentMemoryFeaturesAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCoherentMemoryFeaturesAMD.html + struct PhysicalDeviceCoherentMemoryFeaturesAMD + { + using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceCoherentMemory{ deviceCoherentMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & + setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT + { + deviceCoherentMemory = deviceCoherentMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceCoherentMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); +# endif + } + + bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; + }; + + // wrapper struct for struct VkPhysicalDeviceColorWriteEnableFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceColorWriteEnableFeaturesEXT.html + struct PhysicalDeviceColorWriteEnableFeaturesEXT + { + using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , colorWriteEnable{ colorWriteEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & + setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteEnable = colorWriteEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorWriteEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable ); +# endif + } + + bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceCommandBufferInheritanceFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCommandBufferInheritanceFeaturesNV.html + struct PhysicalDeviceCommandBufferInheritanceFeaturesNV + { + using NativeType = VkPhysicalDeviceCommandBufferInheritanceFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCommandBufferInheritanceFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , commandBufferInheritance{ commandBufferInheritance_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceCommandBufferInheritanceFeaturesNV( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCommandBufferInheritanceFeaturesNV( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCommandBufferInheritanceFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & + setCommandBufferInheritance( VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInheritance = commandBufferInheritance_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, commandBufferInheritance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBufferInheritance == rhs.commandBufferInheritance ); +# endif + } + + bool operator!=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCommandBufferInheritanceFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR.html + struct PhysicalDeviceComputeShaderDerivativesFeaturesKHR + { + using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , computeDerivativeGroupQuads{ computeDerivativeGroupQuads_ } + , computeDerivativeGroupLinear{ computeDerivativeGroupLinear_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceComputeShaderDerivativesFeaturesKHR( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesFeaturesKHR( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceComputeShaderDerivativesFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceComputeShaderDerivativesFeaturesKHR & + operator=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceComputeShaderDerivativesFeaturesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & + setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupQuads = computeDerivativeGroupQuads_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & + setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupLinear = computeDerivativeGroupLinear_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) && + ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); +# endif + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + }; + + using PhysicalDeviceComputeShaderDerivativesFeaturesNV = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + + // wrapper struct for struct VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR.html + struct PhysicalDeviceComputeShaderDerivativesPropertiesKHR + { + using NativeType = VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 meshAndTaskShaderDerivatives_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , meshAndTaskShaderDerivatives{ meshAndTaskShaderDerivatives_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceComputeShaderDerivativesPropertiesKHR( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesPropertiesKHR( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceComputeShaderDerivativesPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceComputeShaderDerivativesPropertiesKHR & + operator=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceComputeShaderDerivativesPropertiesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, meshAndTaskShaderDerivatives ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( meshAndTaskShaderDerivatives == rhs.meshAndTaskShaderDerivatives ); +# endif + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshAndTaskShaderDerivatives = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceComputeShaderDerivativesPropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceConditionalRenderingFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceConditionalRenderingFeaturesEXT.html + struct PhysicalDeviceConditionalRenderingFeaturesEXT + { + using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , conditionalRendering{ conditionalRendering_ } + , inheritedConditionalRendering{ inheritedConditionalRendering_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & + setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRendering = conditionalRendering_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & + setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + inheritedConditionalRendering = inheritedConditionalRendering_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, conditionalRendering, inheritedConditionalRendering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) && + ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); +# endif + } + + bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceConservativeRasterizationPropertiesEXT.html + struct PhysicalDeviceConservativeRasterizationPropertiesEXT + { + using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {}, + float maxExtraPrimitiveOverestimationSize_ = {}, + float extraPrimitiveOverestimationSizeGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , primitiveOverestimationSize{ primitiveOverestimationSize_ } + , maxExtraPrimitiveOverestimationSize{ maxExtraPrimitiveOverestimationSize_ } + , extraPrimitiveOverestimationSizeGranularity{ extraPrimitiveOverestimationSizeGranularity_ } + , primitiveUnderestimation{ primitiveUnderestimation_ } + , conservativePointAndLineRasterization{ conservativePointAndLineRasterization_ } + , degenerateTrianglesRasterized{ degenerateTrianglesRasterized_ } + , degenerateLinesRasterized{ degenerateLinesRasterized_ } + , fullyCoveredFragmentShaderInputVariable{ fullyCoveredFragmentShaderInputVariable_ } + , conservativeRasterizationPostDepthCoverage{ conservativeRasterizationPostDepthCoverage_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + primitiveOverestimationSize, + maxExtraPrimitiveOverestimationSize, + extraPrimitiveOverestimationSizeGranularity, + primitiveUnderestimation, + conservativePointAndLineRasterization, + degenerateTrianglesRasterized, + degenerateLinesRasterized, + fullyCoveredFragmentShaderInputVariable, + conservativeRasterizationPostDepthCoverage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) && + ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) && + ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) && + ( primitiveUnderestimation == rhs.primitiveUnderestimation ) && + ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) && + ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) && + ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) && + ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); +# endif + } + + bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + void * pNext = {}; + float primitiveOverestimationSize = {}; + float maxExtraPrimitiveOverestimationSize = {}; + float extraPrimitiveOverestimationSizeGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceCooperativeMatrix2FeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrix2FeaturesNV.html + struct PhysicalDeviceCooperativeMatrix2FeaturesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrix2FeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixWorkgroupScope{ cooperativeMatrixWorkgroupScope_ } + , cooperativeMatrixFlexibleDimensions{ cooperativeMatrixFlexibleDimensions_ } + , cooperativeMatrixReductions{ cooperativeMatrixReductions_ } + , cooperativeMatrixConversions{ cooperativeMatrixConversions_ } + , cooperativeMatrixPerElementOperations{ cooperativeMatrixPerElementOperations_ } + , cooperativeMatrixTensorAddressing{ cooperativeMatrixTensorAddressing_ } + , cooperativeMatrixBlockLoads{ cooperativeMatrixBlockLoads_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrix2FeaturesNV( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrix2FeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixWorkgroupScope( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixWorkgroupScope = cooperativeMatrixWorkgroupScope_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixFlexibleDimensions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixFlexibleDimensions = cooperativeMatrixFlexibleDimensions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixReductions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixReductions = cooperativeMatrixReductions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixConversions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixConversions = cooperativeMatrixConversions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixPerElementOperations( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixPerElementOperations = cooperativeMatrixPerElementOperations_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixTensorAddressing( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixTensorAddressing = cooperativeMatrixTensorAddressing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixBlockLoads( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixBlockLoads = cooperativeMatrixBlockLoads_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + cooperativeMatrixWorkgroupScope, + cooperativeMatrixFlexibleDimensions, + cooperativeMatrixReductions, + cooperativeMatrixConversions, + cooperativeMatrixPerElementOperations, + cooperativeMatrixTensorAddressing, + cooperativeMatrixBlockLoads ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrix2FeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixWorkgroupScope == rhs.cooperativeMatrixWorkgroupScope ) && + ( cooperativeMatrixFlexibleDimensions == rhs.cooperativeMatrixFlexibleDimensions ) && + ( cooperativeMatrixReductions == rhs.cooperativeMatrixReductions ) && ( cooperativeMatrixConversions == rhs.cooperativeMatrixConversions ) && + ( cooperativeMatrixPerElementOperations == rhs.cooperativeMatrixPerElementOperations ) && + ( cooperativeMatrixTensorAddressing == rhs.cooperativeMatrixTensorAddressing ) && + ( cooperativeMatrixBlockLoads == rhs.cooperativeMatrixBlockLoads ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrix2FeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceCooperativeMatrix2PropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrix2PropertiesNV.html + struct PhysicalDeviceCooperativeMatrix2PropertiesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrix2PropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ = {}, + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension_ = {}, + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixWorkgroupScopeMaxWorkgroupSize{ cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ } + , cooperativeMatrixFlexibleDimensionsMaxDimension{ cooperativeMatrixFlexibleDimensionsMaxDimension_ } + , cooperativeMatrixWorkgroupScopeReservedSharedMemory{ cooperativeMatrixWorkgroupScopeReservedSharedMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrix2PropertiesNV( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrix2PropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + cooperativeMatrixWorkgroupScopeMaxWorkgroupSize, + cooperativeMatrixFlexibleDimensionsMaxDimension, + cooperativeMatrixWorkgroupScopeReservedSharedMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrix2PropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( cooperativeMatrixWorkgroupScopeMaxWorkgroupSize == rhs.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize ) && + ( cooperativeMatrixFlexibleDimensionsMaxDimension == rhs.cooperativeMatrixFlexibleDimensionsMaxDimension ) && + ( cooperativeMatrixWorkgroupScopeReservedSharedMemory == rhs.cooperativeMatrixWorkgroupScopeReservedSharedMemory ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; + void * pNext = {}; + uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize = {}; + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension = {}; + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixFeaturesKHR.html + struct PhysicalDeviceCooperativeMatrixFeaturesKHR + { + using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrix{ cooperativeMatrix_ } + , cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesKHR( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & + setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrix = cooperativeMatrix_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & + setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && + ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixFeaturesNV.html + struct PhysicalDeviceCooperativeMatrixFeaturesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrix{ cooperativeMatrix_ } + , cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & + setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrix = cooperativeMatrix_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & + setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && + ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixPropertiesKHR.html + struct PhysicalDeviceCooperativeMatrixPropertiesKHR + { + using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixPropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixPropertiesNV.html + struct PhysicalDeviceCooperativeMatrixPropertiesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceCooperativeVectorFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeVectorFeaturesNV.html + struct PhysicalDeviceCooperativeVectorFeaturesNV + { + using NativeType = VkPhysicalDeviceCooperativeVectorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeVector_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTraining_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeVector{ cooperativeVector_ } + , cooperativeVectorTraining{ cooperativeVectorTraining_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorFeaturesNV( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeVectorFeaturesNV( VkPhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeVectorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeVectorFeaturesNV & operator=( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeVectorFeaturesNV & operator=( VkPhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & + setCooperativeVector( VULKAN_HPP_NAMESPACE::Bool32 cooperativeVector_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeVector = cooperativeVector_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & + setCooperativeVectorTraining( VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTraining_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeVectorTraining = cooperativeVectorTraining_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCooperativeVectorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeVectorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeVectorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeVectorFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeVector, cooperativeVectorTraining ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeVectorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeVector == rhs.cooperativeVector ) && + ( cooperativeVectorTraining == rhs.cooperativeVectorTraining ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVector = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTraining = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeVectorFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeVectorPropertiesNV.html + struct PhysicalDeviceCooperativeVectorPropertiesNV + { + using NativeType = VkPhysicalDeviceCooperativeVectorPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeVectorSupportedStages_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat16Accumulation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat32Accumulation_ = {}, + uint32_t maxCooperativeVectorComponents_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeVectorSupportedStages{ cooperativeVectorSupportedStages_ } + , cooperativeVectorTrainingFloat16Accumulation{ cooperativeVectorTrainingFloat16Accumulation_ } + , cooperativeVectorTrainingFloat32Accumulation{ cooperativeVectorTrainingFloat32Accumulation_ } + , maxCooperativeVectorComponents{ maxCooperativeVectorComponents_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorPropertiesNV( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeVectorPropertiesNV( VkPhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeVectorPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeVectorPropertiesNV & operator=( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeVectorPropertiesNV & operator=( VkPhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeVectorPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeVectorPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeVectorPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeVectorPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + cooperativeVectorSupportedStages, + cooperativeVectorTrainingFloat16Accumulation, + cooperativeVectorTrainingFloat32Accumulation, + maxCooperativeVectorComponents ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeVectorPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeVectorSupportedStages == rhs.cooperativeVectorSupportedStages ) && + ( cooperativeVectorTrainingFloat16Accumulation == rhs.cooperativeVectorTrainingFloat16Accumulation ) && + ( cooperativeVectorTrainingFloat32Accumulation == rhs.cooperativeVectorTrainingFloat32Accumulation ) && + ( maxCooperativeVectorComponents == rhs.maxCooperativeVectorComponents ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeVectorSupportedStages = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat16Accumulation = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat32Accumulation = {}; + uint32_t maxCooperativeVectorComponents = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeVectorPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceCopyMemoryIndirectFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCopyMemoryIndirectFeaturesNV.html + struct PhysicalDeviceCopyMemoryIndirectFeaturesNV + { + using NativeType = VkPhysicalDeviceCopyMemoryIndirectFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , indirectCopy{ indirectCopy_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCopyMemoryIndirectFeaturesNV( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCopyMemoryIndirectFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setIndirectCopy( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ ) VULKAN_HPP_NOEXCEPT + { + indirectCopy = indirectCopy_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indirectCopy ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectCopy == rhs.indirectCopy ); +# endif + } + + bool operator!=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indirectCopy = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceCopyMemoryIndirectPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCopyMemoryIndirectPropertiesNV.html + struct PhysicalDeviceCopyMemoryIndirectPropertiesNV + { + using NativeType = VkPhysicalDeviceCopyMemoryIndirectPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedQueues{ supportedQueues_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCopyMemoryIndirectPropertiesNV( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCopyMemoryIndirectPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedQueues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedQueues == rhs.supportedQueues ); +# endif + } + + bool operator!=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCopyMemoryIndirectPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceCornerSampledImageFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCornerSampledImageFeaturesNV.html + struct PhysicalDeviceCornerSampledImageFeaturesNV + { + using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cornerSampledImage{ cornerSampledImage_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & + setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT + { + cornerSampledImage = cornerSampledImage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cornerSampledImage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); +# endif + } + + bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCornerSampledImageFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceCoverageReductionModeFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCoverageReductionModeFeaturesNV.html + struct PhysicalDeviceCoverageReductionModeFeaturesNV + { + using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , coverageReductionMode{ coverageReductionMode_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & + setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, coverageReductionMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ); +# endif + } + + bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceCubicClampFeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCubicClampFeaturesQCOM.html + struct PhysicalDeviceCubicClampFeaturesQCOM + { + using NativeType = VkPhysicalDeviceCubicClampFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cubicRangeClamp{ cubicRangeClamp_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCubicClampFeaturesQCOM( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCubicClampFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCubicClampFeaturesQCOM & operator=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCubicClampFeaturesQCOM & operator=( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setCubicRangeClamp( VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp_ ) VULKAN_HPP_NOEXCEPT + { + cubicRangeClamp = cubicRangeClamp_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCubicClampFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCubicClampFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCubicClampFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCubicClampFeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cubicRangeClamp ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCubicClampFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicRangeClamp == rhs.cubicRangeClamp ); +# endif + } + + bool operator!=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCubicClampFeaturesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceCubicWeightsFeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCubicWeightsFeaturesQCOM.html + struct PhysicalDeviceCubicWeightsFeaturesQCOM + { + using NativeType = VkPhysicalDeviceCubicWeightsFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , selectableCubicWeights{ selectableCubicWeights_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCubicWeightsFeaturesQCOM( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCubicWeightsFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & + setSelectableCubicWeights( VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights_ ) VULKAN_HPP_NOEXCEPT + { + selectableCubicWeights = selectableCubicWeights_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCubicWeightsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCubicWeightsFeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, selectableCubicWeights ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCubicWeightsFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( selectableCubicWeights == rhs.selectableCubicWeights ); +# endif + } + + bool operator!=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCubicWeightsFeaturesQCOM; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkPhysicalDeviceCudaKernelLaunchFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCudaKernelLaunchFeaturesNV.html + struct PhysicalDeviceCudaKernelLaunchFeaturesNV + { + using NativeType = VkPhysicalDeviceCudaKernelLaunchFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cudaKernelLaunchFeatures{ cudaKernelLaunchFeatures_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCudaKernelLaunchFeaturesNV( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCudaKernelLaunchFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & + setCudaKernelLaunchFeatures( VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ ) VULKAN_HPP_NOEXCEPT + { + cudaKernelLaunchFeatures = cudaKernelLaunchFeatures_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cudaKernelLaunchFeatures ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCudaKernelLaunchFeaturesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cudaKernelLaunchFeatures == rhs.cudaKernelLaunchFeatures ); +# endif + } + + bool operator!=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCudaKernelLaunchFeaturesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkPhysicalDeviceCudaKernelLaunchPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCudaKernelLaunchPropertiesNV.html + struct PhysicalDeviceCudaKernelLaunchPropertiesNV + { + using NativeType = VkPhysicalDeviceCudaKernelLaunchPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( uint32_t computeCapabilityMinor_ = {}, + uint32_t computeCapabilityMajor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , computeCapabilityMinor{ computeCapabilityMinor_ } + , computeCapabilityMajor{ computeCapabilityMajor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCudaKernelLaunchPropertiesNV( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCudaKernelLaunchPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, computeCapabilityMinor, computeCapabilityMajor ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCudaKernelLaunchPropertiesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeCapabilityMinor == rhs.computeCapabilityMinor ) && + ( computeCapabilityMajor == rhs.computeCapabilityMajor ); +# endif + } + + bool operator!=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV; + void * pNext = {}; + uint32_t computeCapabilityMinor = {}; + uint32_t computeCapabilityMajor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCudaKernelLaunchPropertiesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkPhysicalDeviceCustomBorderColorFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCustomBorderColorFeaturesEXT.html + struct PhysicalDeviceCustomBorderColorFeaturesEXT + { + using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , customBorderColors{ customBorderColors_ } + , customBorderColorWithoutFormat{ customBorderColorWithoutFormat_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & + setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColors = customBorderColors_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & + setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColorWithoutFormat = customBorderColorWithoutFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) && + ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat ); +# endif + } + + bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceCustomBorderColorPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCustomBorderColorPropertiesEXT.html + struct PhysicalDeviceCustomBorderColorPropertiesEXT + { + using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxCustomBorderColorSamplers{ maxCustomBorderColorSamplers_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxCustomBorderColorSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers ); +# endif + } + + bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + void * pNext = {}; + uint32_t maxCustomBorderColorSamplers = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV.html + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV + { + using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dedicatedAllocationImageAliasing{ dedicatedAllocationImageAliasing_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dedicatedAllocationImageAliasing ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); +# endif + } + + bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceDepthBiasControlFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthBiasControlFeaturesEXT.html + struct PhysicalDeviceDepthBiasControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthBiasControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthBiasControl{ depthBiasControl_ } + , leastRepresentableValueForceUnormRepresentation{ leastRepresentableValueForceUnormRepresentation_ } + , floatRepresentation{ floatRepresentation_ } + , depthBiasExact{ depthBiasExact_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthBiasControlFeaturesEXT( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthBiasControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & + setDepthBiasControl( VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasControl = depthBiasControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & + setLeastRepresentableValueForceUnormRepresentation( VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation_ ) VULKAN_HPP_NOEXCEPT + { + leastRepresentableValueForceUnormRepresentation = leastRepresentableValueForceUnormRepresentation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & + setFloatRepresentation( VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation_ ) VULKAN_HPP_NOEXCEPT + { + floatRepresentation = floatRepresentation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setDepthBiasExact( VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasExact = depthBiasExact_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDepthBiasControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthBiasControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthBiasControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthBiasControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthBiasControl, leastRepresentableValueForceUnormRepresentation, floatRepresentation, depthBiasExact ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthBiasControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasControl == rhs.depthBiasControl ) && + ( leastRepresentableValueForceUnormRepresentation == rhs.leastRepresentableValueForceUnormRepresentation ) && + ( floatRepresentation == rhs.floatRepresentation ) && ( depthBiasExact == rhs.depthBiasExact ); +# endif + } + + bool operator!=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation = {}; + VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthBiasControlFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDepthClampControlFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClampControlFeaturesEXT.html + struct PhysicalDeviceDepthClampControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClampControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClampControl{ depthClampControl_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClampControlFeaturesEXT( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClampControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClampControlFeaturesEXT & operator=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthClampControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & + setDepthClampControl( VULKAN_HPP_NAMESPACE::Bool32 depthClampControl_ ) VULKAN_HPP_NOEXCEPT + { + depthClampControl = depthClampControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDepthClampControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClampControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClampControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClampControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClampControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClampControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampControl == rhs.depthClampControl ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClampControlFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDepthClampZeroOneFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClampZeroOneFeaturesKHR.html + struct PhysicalDeviceDepthClampZeroOneFeaturesKHR + { + using NativeType = VkPhysicalDeviceDepthClampZeroOneFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClampZeroOne{ depthClampZeroOne_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesKHR( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClampZeroOneFeaturesKHR( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClampZeroOneFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClampZeroOneFeaturesKHR & operator=( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthClampZeroOneFeaturesKHR & operator=( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR & + setDepthClampZeroOne( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ ) VULKAN_HPP_NOEXCEPT + { + depthClampZeroOne = depthClampZeroOne_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClampZeroOne ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampZeroOne == rhs.depthClampZeroOne ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClampZeroOneFeaturesKHR; + }; + + using PhysicalDeviceDepthClampZeroOneFeaturesEXT = PhysicalDeviceDepthClampZeroOneFeaturesKHR; + + // wrapper struct for struct VkPhysicalDeviceDepthClipControlFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClipControlFeaturesEXT.html + struct PhysicalDeviceDepthClipControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClipControl{ depthClipControl_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthClipControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & + setDepthClipControl( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ ) VULKAN_HPP_NOEXCEPT + { + depthClipControl = depthClipControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClipControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipControl == rhs.depthClipControl ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipControlFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDepthClipEnableFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClipEnableFeaturesEXT.html + struct PhysicalDeviceDepthClipEnableFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClipEnable{ depthClipEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClipEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDepthStencilResolveProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthStencilResolveProperties.html + struct PhysicalDeviceDepthStencilResolveProperties + { + using NativeType = VkPhysicalDeviceDepthStencilResolveProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedDepthResolveModes{ supportedDepthResolveModes_ } + , supportedStencilResolveModes{ supportedStencilResolveModes_ } + , independentResolveNone{ independentResolveNone_ } + , independentResolve{ independentResolve_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthStencilResolveProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthStencilResolveProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ); +# endif + } + + bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthStencilResolveProperties; + }; + + using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; + + // wrapper struct for struct VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT.html + struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT + { + using NativeType = VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( size_t combinedImageSamplerDensityMapDescriptorSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , combinedImageSamplerDensityMapDescriptorSize{ combinedImageSamplerDensityMapDescriptorSize_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & + operator=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, combinedImageSamplerDensityMapDescriptorSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( combinedImageSamplerDensityMapDescriptorSize == rhs.combinedImageSamplerDensityMapDescriptorSize ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + void * pNext = {}; + size_t combinedImageSamplerDensityMapDescriptorSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDescriptorBufferFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferFeaturesEXT.html + struct PhysicalDeviceDescriptorBufferFeaturesEXT + { + using NativeType = VkPhysicalDeviceDescriptorBufferFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorBuffer{ descriptorBuffer_ } + , descriptorBufferCaptureReplay{ descriptorBufferCaptureReplay_ } + , descriptorBufferImageLayoutIgnored{ descriptorBufferImageLayoutIgnored_ } + , descriptorBufferPushDescriptors{ descriptorBufferPushDescriptors_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorBufferFeaturesEXT( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBuffer( VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBuffer = descriptorBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBufferCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBufferCaptureReplay = descriptorBufferCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBufferImageLayoutIgnored( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBufferImageLayoutIgnored = descriptorBufferImageLayoutIgnored_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBufferPushDescriptors( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBufferPushDescriptors = descriptorBufferPushDescriptors_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorBuffer, descriptorBufferCaptureReplay, descriptorBufferImageLayoutIgnored, descriptorBufferPushDescriptors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorBufferFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorBuffer == rhs.descriptorBuffer ) && + ( descriptorBufferCaptureReplay == rhs.descriptorBufferCaptureReplay ) && + ( descriptorBufferImageLayoutIgnored == rhs.descriptorBufferImageLayoutIgnored ) && + ( descriptorBufferPushDescriptors == rhs.descriptorBufferPushDescriptors ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorBufferFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDescriptorBufferPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferPropertiesEXT.html + struct PhysicalDeviceDescriptorBufferPropertiesEXT + { + using NativeType = VkPhysicalDeviceDescriptorBufferPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 combinedImageSamplerDescriptorSingleArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferlessPushDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 allowSamplerImageViewPostSubmitCreation_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferOffsetAlignment_ = {}, + uint32_t maxDescriptorBufferBindings_ = {}, + uint32_t maxResourceDescriptorBufferBindings_ = {}, + uint32_t maxSamplerDescriptorBufferBindings_ = {}, + uint32_t maxEmbeddedImmutableSamplerBindings_ = {}, + uint32_t maxEmbeddedImmutableSamplers_ = {}, + size_t bufferCaptureReplayDescriptorDataSize_ = {}, + size_t imageCaptureReplayDescriptorDataSize_ = {}, + size_t imageViewCaptureReplayDescriptorDataSize_ = {}, + size_t samplerCaptureReplayDescriptorDataSize_ = {}, + size_t accelerationStructureCaptureReplayDescriptorDataSize_ = {}, + size_t samplerDescriptorSize_ = {}, + size_t combinedImageSamplerDescriptorSize_ = {}, + size_t sampledImageDescriptorSize_ = {}, + size_t storageImageDescriptorSize_ = {}, + size_t uniformTexelBufferDescriptorSize_ = {}, + size_t robustUniformTexelBufferDescriptorSize_ = {}, + size_t storageTexelBufferDescriptorSize_ = {}, + size_t robustStorageTexelBufferDescriptorSize_ = {}, + size_t uniformBufferDescriptorSize_ = {}, + size_t robustUniformBufferDescriptorSize_ = {}, + size_t storageBufferDescriptorSize_ = {}, + size_t robustStorageBufferDescriptorSize_ = {}, + size_t inputAttachmentDescriptorSize_ = {}, + size_t accelerationStructureDescriptorSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxSamplerDescriptorBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceDescriptorBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize samplerDescriptorBufferAddressSpaceSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , combinedImageSamplerDescriptorSingleArray{ combinedImageSamplerDescriptorSingleArray_ } + , bufferlessPushDescriptors{ bufferlessPushDescriptors_ } + , allowSamplerImageViewPostSubmitCreation{ allowSamplerImageViewPostSubmitCreation_ } + , descriptorBufferOffsetAlignment{ descriptorBufferOffsetAlignment_ } + , maxDescriptorBufferBindings{ maxDescriptorBufferBindings_ } + , maxResourceDescriptorBufferBindings{ maxResourceDescriptorBufferBindings_ } + , maxSamplerDescriptorBufferBindings{ maxSamplerDescriptorBufferBindings_ } + , maxEmbeddedImmutableSamplerBindings{ maxEmbeddedImmutableSamplerBindings_ } + , maxEmbeddedImmutableSamplers{ maxEmbeddedImmutableSamplers_ } + , bufferCaptureReplayDescriptorDataSize{ bufferCaptureReplayDescriptorDataSize_ } + , imageCaptureReplayDescriptorDataSize{ imageCaptureReplayDescriptorDataSize_ } + , imageViewCaptureReplayDescriptorDataSize{ imageViewCaptureReplayDescriptorDataSize_ } + , samplerCaptureReplayDescriptorDataSize{ samplerCaptureReplayDescriptorDataSize_ } + , accelerationStructureCaptureReplayDescriptorDataSize{ accelerationStructureCaptureReplayDescriptorDataSize_ } + , samplerDescriptorSize{ samplerDescriptorSize_ } + , combinedImageSamplerDescriptorSize{ combinedImageSamplerDescriptorSize_ } + , sampledImageDescriptorSize{ sampledImageDescriptorSize_ } + , storageImageDescriptorSize{ storageImageDescriptorSize_ } + , uniformTexelBufferDescriptorSize{ uniformTexelBufferDescriptorSize_ } + , robustUniformTexelBufferDescriptorSize{ robustUniformTexelBufferDescriptorSize_ } + , storageTexelBufferDescriptorSize{ storageTexelBufferDescriptorSize_ } + , robustStorageTexelBufferDescriptorSize{ robustStorageTexelBufferDescriptorSize_ } + , uniformBufferDescriptorSize{ uniformBufferDescriptorSize_ } + , robustUniformBufferDescriptorSize{ robustUniformBufferDescriptorSize_ } + , storageBufferDescriptorSize{ storageBufferDescriptorSize_ } + , robustStorageBufferDescriptorSize{ robustStorageBufferDescriptorSize_ } + , inputAttachmentDescriptorSize{ inputAttachmentDescriptorSize_ } + , accelerationStructureDescriptorSize{ accelerationStructureDescriptorSize_ } + , maxSamplerDescriptorBufferRange{ maxSamplerDescriptorBufferRange_ } + , maxResourceDescriptorBufferRange{ maxResourceDescriptorBufferRange_ } + , samplerDescriptorBufferAddressSpaceSize{ samplerDescriptorBufferAddressSpaceSize_ } + , resourceDescriptorBufferAddressSpaceSize{ resourceDescriptorBufferAddressSpaceSize_ } + , descriptorBufferAddressSpaceSize{ descriptorBufferAddressSpaceSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorBufferPropertiesEXT( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorBufferPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + combinedImageSamplerDescriptorSingleArray, + bufferlessPushDescriptors, + allowSamplerImageViewPostSubmitCreation, + descriptorBufferOffsetAlignment, + maxDescriptorBufferBindings, + maxResourceDescriptorBufferBindings, + maxSamplerDescriptorBufferBindings, + maxEmbeddedImmutableSamplerBindings, + maxEmbeddedImmutableSamplers, + bufferCaptureReplayDescriptorDataSize, + imageCaptureReplayDescriptorDataSize, + imageViewCaptureReplayDescriptorDataSize, + samplerCaptureReplayDescriptorDataSize, + accelerationStructureCaptureReplayDescriptorDataSize, + samplerDescriptorSize, + combinedImageSamplerDescriptorSize, + sampledImageDescriptorSize, + storageImageDescriptorSize, + uniformTexelBufferDescriptorSize, + robustUniformTexelBufferDescriptorSize, + storageTexelBufferDescriptorSize, + robustStorageTexelBufferDescriptorSize, + uniformBufferDescriptorSize, + robustUniformBufferDescriptorSize, + storageBufferDescriptorSize, + robustStorageBufferDescriptorSize, + inputAttachmentDescriptorSize, + accelerationStructureDescriptorSize, + maxSamplerDescriptorBufferRange, + maxResourceDescriptorBufferRange, + samplerDescriptorBufferAddressSpaceSize, + resourceDescriptorBufferAddressSpaceSize, + descriptorBufferAddressSpaceSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorBufferPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( combinedImageSamplerDescriptorSingleArray == rhs.combinedImageSamplerDescriptorSingleArray ) && + ( bufferlessPushDescriptors == rhs.bufferlessPushDescriptors ) && + ( allowSamplerImageViewPostSubmitCreation == rhs.allowSamplerImageViewPostSubmitCreation ) && + ( descriptorBufferOffsetAlignment == rhs.descriptorBufferOffsetAlignment ) && ( maxDescriptorBufferBindings == rhs.maxDescriptorBufferBindings ) && + ( maxResourceDescriptorBufferBindings == rhs.maxResourceDescriptorBufferBindings ) && + ( maxSamplerDescriptorBufferBindings == rhs.maxSamplerDescriptorBufferBindings ) && + ( maxEmbeddedImmutableSamplerBindings == rhs.maxEmbeddedImmutableSamplerBindings ) && + ( maxEmbeddedImmutableSamplers == rhs.maxEmbeddedImmutableSamplers ) && + ( bufferCaptureReplayDescriptorDataSize == rhs.bufferCaptureReplayDescriptorDataSize ) && + ( imageCaptureReplayDescriptorDataSize == rhs.imageCaptureReplayDescriptorDataSize ) && + ( imageViewCaptureReplayDescriptorDataSize == rhs.imageViewCaptureReplayDescriptorDataSize ) && + ( samplerCaptureReplayDescriptorDataSize == rhs.samplerCaptureReplayDescriptorDataSize ) && + ( accelerationStructureCaptureReplayDescriptorDataSize == rhs.accelerationStructureCaptureReplayDescriptorDataSize ) && + ( samplerDescriptorSize == rhs.samplerDescriptorSize ) && ( combinedImageSamplerDescriptorSize == rhs.combinedImageSamplerDescriptorSize ) && + ( sampledImageDescriptorSize == rhs.sampledImageDescriptorSize ) && ( storageImageDescriptorSize == rhs.storageImageDescriptorSize ) && + ( uniformTexelBufferDescriptorSize == rhs.uniformTexelBufferDescriptorSize ) && + ( robustUniformTexelBufferDescriptorSize == rhs.robustUniformTexelBufferDescriptorSize ) && + ( storageTexelBufferDescriptorSize == rhs.storageTexelBufferDescriptorSize ) && + ( robustStorageTexelBufferDescriptorSize == rhs.robustStorageTexelBufferDescriptorSize ) && + ( uniformBufferDescriptorSize == rhs.uniformBufferDescriptorSize ) && + ( robustUniformBufferDescriptorSize == rhs.robustUniformBufferDescriptorSize ) && + ( storageBufferDescriptorSize == rhs.storageBufferDescriptorSize ) && + ( robustStorageBufferDescriptorSize == rhs.robustStorageBufferDescriptorSize ) && + ( inputAttachmentDescriptorSize == rhs.inputAttachmentDescriptorSize ) && + ( accelerationStructureDescriptorSize == rhs.accelerationStructureDescriptorSize ) && + ( maxSamplerDescriptorBufferRange == rhs.maxSamplerDescriptorBufferRange ) && + ( maxResourceDescriptorBufferRange == rhs.maxResourceDescriptorBufferRange ) && + ( samplerDescriptorBufferAddressSpaceSize == rhs.samplerDescriptorBufferAddressSpaceSize ) && + ( resourceDescriptorBufferAddressSpaceSize == rhs.resourceDescriptorBufferAddressSpaceSize ) && + ( descriptorBufferAddressSpaceSize == rhs.descriptorBufferAddressSpaceSize ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 combinedImageSamplerDescriptorSingleArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferlessPushDescriptors = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowSamplerImageViewPostSubmitCreation = {}; + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferOffsetAlignment = {}; + uint32_t maxDescriptorBufferBindings = {}; + uint32_t maxResourceDescriptorBufferBindings = {}; + uint32_t maxSamplerDescriptorBufferBindings = {}; + uint32_t maxEmbeddedImmutableSamplerBindings = {}; + uint32_t maxEmbeddedImmutableSamplers = {}; + size_t bufferCaptureReplayDescriptorDataSize = {}; + size_t imageCaptureReplayDescriptorDataSize = {}; + size_t imageViewCaptureReplayDescriptorDataSize = {}; + size_t samplerCaptureReplayDescriptorDataSize = {}; + size_t accelerationStructureCaptureReplayDescriptorDataSize = {}; + size_t samplerDescriptorSize = {}; + size_t combinedImageSamplerDescriptorSize = {}; + size_t sampledImageDescriptorSize = {}; + size_t storageImageDescriptorSize = {}; + size_t uniformTexelBufferDescriptorSize = {}; + size_t robustUniformTexelBufferDescriptorSize = {}; + size_t storageTexelBufferDescriptorSize = {}; + size_t robustStorageTexelBufferDescriptorSize = {}; + size_t uniformBufferDescriptorSize = {}; + size_t robustUniformBufferDescriptorSize = {}; + size_t storageBufferDescriptorSize = {}; + size_t robustStorageBufferDescriptorSize = {}; + size_t inputAttachmentDescriptorSize = {}; + size_t accelerationStructureDescriptorSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxSamplerDescriptorBufferRange = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceDescriptorBufferRange = {}; + VULKAN_HPP_NAMESPACE::DeviceSize samplerDescriptorBufferAddressSpaceSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorBufferPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDescriptorIndexingFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorIndexingFeatures.html + struct PhysicalDeviceDescriptorIndexingFeatures + { + using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ } + , shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ } + , shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ } + , shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ } + , shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ } + , shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ } + , shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ } + , shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ } + , shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ } + , shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ } + , descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ } + , descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ } + , descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ } + , descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ } + , descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ } + , descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ } + , descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ } + , descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ } + , descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ } + , runtimeDescriptorArray{ runtimeDescriptorArray_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderInputAttachmentArrayDynamicIndexing, + shaderUniformTexelBufferArrayDynamicIndexing, + shaderStorageTexelBufferArrayDynamicIndexing, + shaderUniformBufferArrayNonUniformIndexing, + shaderSampledImageArrayNonUniformIndexing, + shaderStorageBufferArrayNonUniformIndexing, + shaderStorageImageArrayNonUniformIndexing, + shaderInputAttachmentArrayNonUniformIndexing, + shaderUniformTexelBufferArrayNonUniformIndexing, + shaderStorageTexelBufferArrayNonUniformIndexing, + descriptorBindingUniformBufferUpdateAfterBind, + descriptorBindingSampledImageUpdateAfterBind, + descriptorBindingStorageImageUpdateAfterBind, + descriptorBindingStorageBufferUpdateAfterBind, + descriptorBindingUniformTexelBufferUpdateAfterBind, + descriptorBindingStorageTexelBufferUpdateAfterBind, + descriptorBindingUpdateUnusedWhilePending, + descriptorBindingPartiallyBound, + descriptorBindingVariableDescriptorCount, + runtimeDescriptorArray ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingFeatures; + }; + + using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; + + // wrapper struct for struct VkPhysicalDeviceDescriptorIndexingProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorIndexingProperties.html + struct PhysicalDeviceDescriptorIndexingProperties + { + using NativeType = VkPhysicalDeviceDescriptorIndexingProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ } + , shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ } + , shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ } + , shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ } + , shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ } + , shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ } + , robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ } + , quadDivergentImplicitLod{ quadDivergentImplicitLod_ } + , maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ } + , maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ } + , maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ } + , maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ } + , maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ } + , maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ } + , maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ } + , maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ } + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ } + , maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ } + , maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxUpdateAfterBindDescriptorsInAllPools, + shaderUniformBufferArrayNonUniformIndexingNative, + shaderSampledImageArrayNonUniformIndexingNative, + shaderStorageBufferArrayNonUniformIndexingNative, + shaderStorageImageArrayNonUniformIndexingNative, + shaderInputAttachmentArrayNonUniformIndexingNative, + robustBufferAccessUpdateAfterBind, + quadDivergentImplicitLod, + maxPerStageDescriptorUpdateAfterBindSamplers, + maxPerStageDescriptorUpdateAfterBindUniformBuffers, + maxPerStageDescriptorUpdateAfterBindStorageBuffers, + maxPerStageDescriptorUpdateAfterBindSampledImages, + maxPerStageDescriptorUpdateAfterBindStorageImages, + maxPerStageDescriptorUpdateAfterBindInputAttachments, + maxPerStageUpdateAfterBindResources, + maxDescriptorSetUpdateAfterBindSamplers, + maxDescriptorSetUpdateAfterBindUniformBuffers, + maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindStorageBuffers, + maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindSampledImages, + maxDescriptorSetUpdateAfterBindStorageImages, + maxDescriptorSetUpdateAfterBindInputAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + void * pNext = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingProperties; + }; + + using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + + // wrapper struct for struct VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV.html + struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV + { + using NativeType = VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorPoolOverallocation{ descriptorPoolOverallocation_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & + operator=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & operator=( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & + setDescriptorPoolOverallocation( VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation_ ) VULKAN_HPP_NOEXCEPT + { + descriptorPoolOverallocation = descriptorPoolOverallocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorPoolOverallocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPoolOverallocation == rhs.descriptorPoolOverallocation ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE.html + struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE + { + using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorSetHostMapping{ descriptorSetHostMapping_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & + operator=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & + setDescriptorSetHostMapping( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetHostMapping = descriptorSetHostMapping_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorSetHostMapping ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetHostMapping == rhs.descriptorSetHostMapping ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + }; + + // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.html + struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceGeneratedCompute{ deviceGeneratedCompute_ } + , deviceGeneratedComputePipelines{ deviceGeneratedComputePipelines_ } + , deviceGeneratedComputeCaptureReplay{ deviceGeneratedComputeCaptureReplay_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedCompute( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCompute = deviceGeneratedCompute_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedComputePipelines( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedComputePipelines = deviceGeneratedComputePipelines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedComputeCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedComputeCaptureReplay = deviceGeneratedComputeCaptureReplay_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCompute, deviceGeneratedComputePipelines, deviceGeneratedComputeCaptureReplay ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCompute == rhs.deviceGeneratedCompute ) && + ( deviceGeneratedComputePipelines == rhs.deviceGeneratedComputePipelines ) && + ( deviceGeneratedComputeCaptureReplay == rhs.deviceGeneratedComputeCaptureReplay ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT.html + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceGeneratedCommands{ deviceGeneratedCommands_ } + , dynamicGeneratedPipelineLayout{ dynamicGeneratedPipelineLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & + setDynamicGeneratedPipelineLayout( VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + dynamicGeneratedPipelineLayout = dynamicGeneratedPipelineLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCommands, dynamicGeneratedPipelineLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ) && + ( dynamicGeneratedPipelineLayout == rhs.dynamicGeneratedPipelineLayout ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV.html + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceGeneratedCommands{ deviceGeneratedCommands_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCommands ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT.html + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( uint32_t maxIndirectPipelineCount_ = {}, + uint32_t maxIndirectShaderObjectCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsIndirectStride_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStages_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsTransformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsMultiDrawIndirectCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxIndirectPipelineCount{ maxIndirectPipelineCount_ } + , maxIndirectShaderObjectCount{ maxIndirectShaderObjectCount_ } + , maxIndirectSequenceCount{ maxIndirectSequenceCount_ } + , maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ } + , maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ } + , maxIndirectCommandsIndirectStride{ maxIndirectCommandsIndirectStride_ } + , supportedIndirectCommandsInputModes{ supportedIndirectCommandsInputModes_ } + , supportedIndirectCommandsShaderStages{ supportedIndirectCommandsShaderStages_ } + , supportedIndirectCommandsShaderStagesPipelineBinding{ supportedIndirectCommandsShaderStagesPipelineBinding_ } + , supportedIndirectCommandsShaderStagesShaderBinding{ supportedIndirectCommandsShaderStagesShaderBinding_ } + , deviceGeneratedCommandsTransformFeedback{ deviceGeneratedCommandsTransformFeedback_ } + , deviceGeneratedCommandsMultiDrawIndirectCount{ deviceGeneratedCommandsMultiDrawIndirectCount_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & + operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxIndirectPipelineCount, + maxIndirectShaderObjectCount, + maxIndirectSequenceCount, + maxIndirectCommandsTokenCount, + maxIndirectCommandsTokenOffset, + maxIndirectCommandsIndirectStride, + supportedIndirectCommandsInputModes, + supportedIndirectCommandsShaderStages, + supportedIndirectCommandsShaderStagesPipelineBinding, + supportedIndirectCommandsShaderStagesShaderBinding, + deviceGeneratedCommandsTransformFeedback, + deviceGeneratedCommandsMultiDrawIndirectCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxIndirectPipelineCount == rhs.maxIndirectPipelineCount ) && + ( maxIndirectShaderObjectCount == rhs.maxIndirectShaderObjectCount ) && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && + ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && + ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && + ( maxIndirectCommandsIndirectStride == rhs.maxIndirectCommandsIndirectStride ) && + ( supportedIndirectCommandsInputModes == rhs.supportedIndirectCommandsInputModes ) && + ( supportedIndirectCommandsShaderStages == rhs.supportedIndirectCommandsShaderStages ) && + ( supportedIndirectCommandsShaderStagesPipelineBinding == rhs.supportedIndirectCommandsShaderStagesPipelineBinding ) && + ( supportedIndirectCommandsShaderStagesShaderBinding == rhs.supportedIndirectCommandsShaderStagesShaderBinding ) && + ( deviceGeneratedCommandsTransformFeedback == rhs.deviceGeneratedCommandsTransformFeedback ) && + ( deviceGeneratedCommandsMultiDrawIndirectCount == rhs.deviceGeneratedCommandsMultiDrawIndirectCount ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + void * pNext = {}; + uint32_t maxIndirectPipelineCount = {}; + uint32_t maxIndirectShaderObjectCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsIndirectStride = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStages = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsTransformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsMultiDrawIndirectCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV.html + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( uint32_t maxGraphicsShaderGroupCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsStreamCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsStreamStride_ = {}, + uint32_t minSequencesCountBufferOffsetAlignment_ = {}, + uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, + uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxGraphicsShaderGroupCount{ maxGraphicsShaderGroupCount_ } + , maxIndirectSequenceCount{ maxIndirectSequenceCount_ } + , maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ } + , maxIndirectCommandsStreamCount{ maxIndirectCommandsStreamCount_ } + , maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ } + , maxIndirectCommandsStreamStride{ maxIndirectCommandsStreamStride_ } + , minSequencesCountBufferOffsetAlignment{ minSequencesCountBufferOffsetAlignment_ } + , minSequencesIndexBufferOffsetAlignment{ minSequencesIndexBufferOffsetAlignment_ } + , minIndirectCommandsBufferOffsetAlignment{ minIndirectCommandsBufferOffsetAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxGraphicsShaderGroupCount, + maxIndirectSequenceCount, + maxIndirectCommandsTokenCount, + maxIndirectCommandsStreamCount, + maxIndirectCommandsTokenOffset, + maxIndirectCommandsStreamStride, + minSequencesCountBufferOffsetAlignment, + minSequencesIndexBufferOffsetAlignment, + minIndirectCommandsBufferOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) && + ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && + ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) && + ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && + ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) && + ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) && + ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) && + ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + void * pNext = {}; + uint32_t maxGraphicsShaderGroupCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsStreamCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsStreamStride = {}; + uint32_t minSequencesCountBufferOffsetAlignment = {}; + uint32_t minSequencesIndexBufferOffsetAlignment = {}; + uint32_t minIndirectCommandsBufferOffsetAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceMemoryReportFeaturesEXT.html + struct PhysicalDeviceDeviceMemoryReportFeaturesEXT + { + using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceMemoryReport{ deviceMemoryReport_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & + setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT + { + deviceMemoryReport = deviceMemoryReport_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceMemoryReport ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDiagnosticsConfigFeaturesNV.html + struct PhysicalDeviceDiagnosticsConfigFeaturesNV + { + using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , diagnosticsConfig{ diagnosticsConfig_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & + setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + { + diagnosticsConfig = diagnosticsConfig_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, diagnosticsConfig ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); +# endif + } + + bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceDiscardRectanglePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDiscardRectanglePropertiesEXT.html + struct PhysicalDeviceDiscardRectanglePropertiesEXT + { + using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxDiscardRectangles{ maxDiscardRectangles_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxDiscardRectangles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); +# endif + } + + bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + void * pNext = {}; + uint32_t maxDiscardRectangles = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkPhysicalDeviceDisplacementMicromapFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDisplacementMicromapFeaturesNV.html + struct PhysicalDeviceDisplacementMicromapFeaturesNV + { + using NativeType = VkPhysicalDeviceDisplacementMicromapFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displacementMicromap{ displacementMicromap_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDisplacementMicromapFeaturesNV( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDisplacementMicromapFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & + setDisplacementMicromap( VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap_ ) VULKAN_HPP_NOEXCEPT + { + displacementMicromap = displacementMicromap_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displacementMicromap ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDisplacementMicromapFeaturesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displacementMicromap == rhs.displacementMicromap ); +# endif + } + + bool operator!=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDisplacementMicromapFeaturesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkPhysicalDeviceDisplacementMicromapPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDisplacementMicromapPropertiesNV.html + struct PhysicalDeviceDisplacementMicromapPropertiesNV + { + using NativeType = VkPhysicalDeviceDisplacementMicromapPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapPropertiesNV( uint32_t maxDisplacementMicromapSubdivisionLevel_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxDisplacementMicromapSubdivisionLevel{ maxDisplacementMicromapSubdivisionLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDisplacementMicromapPropertiesNV( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDisplacementMicromapPropertiesNV( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDisplacementMicromapPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDisplacementMicromapPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxDisplacementMicromapSubdivisionLevel ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDisplacementMicromapPropertiesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDisplacementMicromapSubdivisionLevel == rhs.maxDisplacementMicromapSubdivisionLevel ); +# endif + } + + bool operator!=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV; + void * pNext = {}; + uint32_t maxDisplacementMicromapSubdivisionLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDisplacementMicromapPropertiesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkPhysicalDeviceDriverProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDriverProperties.html + struct PhysicalDeviceDriverProperties + { + using NativeType = VkPhysicalDeviceDriverProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , driverID{ driverID_ } + , driverName{ driverName_ } + , driverInfo{ driverInfo_ } + , conformanceVersion{ conformanceVersion_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDriverProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDriverProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDriverProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ConformanceVersion const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) && + ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ); + } + + bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDriverProperties; + }; + + using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; + + // wrapper struct for struct VkPhysicalDeviceDrmPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDrmPropertiesEXT.html + struct PhysicalDeviceDrmPropertiesEXT + { + using NativeType = VkPhysicalDeviceDrmPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDrmPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 hasPrimary_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasRender_ = {}, + int64_t primaryMajor_ = {}, + int64_t primaryMinor_ = {}, + int64_t renderMajor_ = {}, + int64_t renderMinor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hasPrimary{ hasPrimary_ } + , hasRender{ hasRender_ } + , primaryMajor{ primaryMajor_ } + , primaryMinor{ primaryMinor_ } + , renderMajor{ renderMajor_ } + , renderMinor{ renderMinor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDrmPropertiesEXT & operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDrmPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDrmPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDrmPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasPrimary == rhs.hasPrimary ) && ( hasRender == rhs.hasRender ) && + ( primaryMajor == rhs.primaryMajor ) && ( primaryMinor == rhs.primaryMinor ) && ( renderMajor == rhs.renderMajor ) && + ( renderMinor == rhs.renderMinor ); +# endif + } + + bool operator!=( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDrmPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasPrimary = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasRender = {}; + int64_t primaryMajor = {}; + int64_t primaryMinor = {}; + int64_t renderMajor = {}; + int64_t renderMinor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDrmPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceDynamicRenderingFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingFeatures.html + struct PhysicalDeviceDynamicRenderingFeatures + { + using NativeType = VkPhysicalDeviceDynamicRenderingFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicRendering{ dynamicRendering_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRendering = dynamicRendering_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicRendering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRendering == rhs.dynamicRendering ); +# endif + } + + bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDynamicRenderingFeatures; + }; + + using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; + + // wrapper struct for struct VkPhysicalDeviceDynamicRenderingLocalReadFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingLocalReadFeatures.html + struct PhysicalDeviceDynamicRenderingLocalReadFeatures + { + using NativeType = VkPhysicalDeviceDynamicRenderingLocalReadFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingLocalReadFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDynamicRenderingLocalReadFeatures( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDynamicRenderingLocalReadFeatures( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDynamicRenderingLocalReadFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & + setDynamicRenderingLocalRead( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRenderingLocalRead = dynamicRenderingLocalRead_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicRenderingLocalRead ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDynamicRenderingLocalReadFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRenderingLocalRead == rhs.dynamicRenderingLocalRead ); +# endif + } + + bool operator!=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDynamicRenderingLocalReadFeatures; + }; + + using PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = PhysicalDeviceDynamicRenderingLocalReadFeatures; + + // wrapper struct for struct VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.html + struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT + { + using NativeType = VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicRenderingUnusedAttachments{ dynamicRenderingUnusedAttachments_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & + operator=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & + operator=( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & + setDynamicRenderingUnusedAttachments( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRenderingUnusedAttachments = dynamicRenderingUnusedAttachments_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicRenderingUnusedAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRenderingUnusedAttachments == rhs.dynamicRenderingUnusedAttachments ); +# endif + } + + bool operator!=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceExclusiveScissorFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExclusiveScissorFeaturesNV.html + struct PhysicalDeviceExclusiveScissorFeaturesNV + { + using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , exclusiveScissor{ exclusiveScissor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissor = exclusiveScissor_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exclusiveScissor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor ); +# endif + } + + bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExclusiveScissorFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState2FeaturesEXT.html + struct PhysicalDeviceExtendedDynamicState2FeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , extendedDynamicState2{ extendedDynamicState2_ } + , extendedDynamicState2LogicOp{ extendedDynamicState2LogicOp_ } + , extendedDynamicState2PatchControlPoints{ extendedDynamicState2PatchControlPoints_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2 = extendedDynamicState2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2PatchControlPoints( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState2 == rhs.extendedDynamicState2 ) && + ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) && + ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState3FeaturesEXT.html + struct PhysicalDeviceExtendedDynamicState3FeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicState3FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState3FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , extendedDynamicState3TessellationDomainOrigin{ extendedDynamicState3TessellationDomainOrigin_ } + , extendedDynamicState3DepthClampEnable{ extendedDynamicState3DepthClampEnable_ } + , extendedDynamicState3PolygonMode{ extendedDynamicState3PolygonMode_ } + , extendedDynamicState3RasterizationSamples{ extendedDynamicState3RasterizationSamples_ } + , extendedDynamicState3SampleMask{ extendedDynamicState3SampleMask_ } + , extendedDynamicState3AlphaToCoverageEnable{ extendedDynamicState3AlphaToCoverageEnable_ } + , extendedDynamicState3AlphaToOneEnable{ extendedDynamicState3AlphaToOneEnable_ } + , extendedDynamicState3LogicOpEnable{ extendedDynamicState3LogicOpEnable_ } + , extendedDynamicState3ColorBlendEnable{ extendedDynamicState3ColorBlendEnable_ } + , extendedDynamicState3ColorBlendEquation{ extendedDynamicState3ColorBlendEquation_ } + , extendedDynamicState3ColorWriteMask{ extendedDynamicState3ColorWriteMask_ } + , extendedDynamicState3RasterizationStream{ extendedDynamicState3RasterizationStream_ } + , extendedDynamicState3ConservativeRasterizationMode{ extendedDynamicState3ConservativeRasterizationMode_ } + , extendedDynamicState3ExtraPrimitiveOverestimationSize{ extendedDynamicState3ExtraPrimitiveOverestimationSize_ } + , extendedDynamicState3DepthClipEnable{ extendedDynamicState3DepthClipEnable_ } + , extendedDynamicState3SampleLocationsEnable{ extendedDynamicState3SampleLocationsEnable_ } + , extendedDynamicState3ColorBlendAdvanced{ extendedDynamicState3ColorBlendAdvanced_ } + , extendedDynamicState3ProvokingVertexMode{ extendedDynamicState3ProvokingVertexMode_ } + , extendedDynamicState3LineRasterizationMode{ extendedDynamicState3LineRasterizationMode_ } + , extendedDynamicState3LineStippleEnable{ extendedDynamicState3LineStippleEnable_ } + , extendedDynamicState3DepthClipNegativeOneToOne{ extendedDynamicState3DepthClipNegativeOneToOne_ } + , extendedDynamicState3ViewportWScalingEnable{ extendedDynamicState3ViewportWScalingEnable_ } + , extendedDynamicState3ViewportSwizzle{ extendedDynamicState3ViewportSwizzle_ } + , extendedDynamicState3CoverageToColorEnable{ extendedDynamicState3CoverageToColorEnable_ } + , extendedDynamicState3CoverageToColorLocation{ extendedDynamicState3CoverageToColorLocation_ } + , extendedDynamicState3CoverageModulationMode{ extendedDynamicState3CoverageModulationMode_ } + , extendedDynamicState3CoverageModulationTableEnable{ extendedDynamicState3CoverageModulationTableEnable_ } + , extendedDynamicState3CoverageModulationTable{ extendedDynamicState3CoverageModulationTable_ } + , extendedDynamicState3CoverageReductionMode{ extendedDynamicState3CoverageReductionMode_ } + , extendedDynamicState3RepresentativeFragmentTestEnable{ extendedDynamicState3RepresentativeFragmentTestEnable_ } + , extendedDynamicState3ShadingRateImageEnable{ extendedDynamicState3ShadingRateImageEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState3FeaturesEXT( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3TessellationDomainOrigin( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3DepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3DepthClampEnable = extendedDynamicState3DepthClampEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3PolygonMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3RasterizationSamples( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3SampleMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3AlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3AlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3LogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorBlendEnable = extendedDynamicState3ColorBlendEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorBlendEquation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorWriteMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3RasterizationStream( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3RasterizationStream = extendedDynamicState3RasterizationStream_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ConservativeRasterizationMode( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ExtraPrimitiveOverestimationSize( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3DepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3DepthClipEnable = extendedDynamicState3DepthClipEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3SampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorBlendAdvanced( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorBlendAdvanced = extendedDynamicState3ColorBlendAdvanced_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ProvokingVertexMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ProvokingVertexMode = extendedDynamicState3ProvokingVertexMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3LineRasterizationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3LineRasterizationMode = extendedDynamicState3LineRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3LineStippleEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3LineStippleEnable = extendedDynamicState3LineStippleEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3DepthClipNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3DepthClipNegativeOneToOne = extendedDynamicState3DepthClipNegativeOneToOne_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ViewportSwizzle( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ViewportSwizzle = extendedDynamicState3ViewportSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageToColorEnable = extendedDynamicState3CoverageToColorEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageToColorLocation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageToColorLocation = extendedDynamicState3CoverageToColorLocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageModulationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageModulationMode = extendedDynamicState3CoverageModulationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTableEnable( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageModulationTable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageModulationTable = extendedDynamicState3CoverageModulationTable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RepresentativeFragmentTestEnable( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3RepresentativeFragmentTestEnable = extendedDynamicState3RepresentativeFragmentTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ShadingRateImageEnable = extendedDynamicState3ShadingRateImageEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + extendedDynamicState3TessellationDomainOrigin, + extendedDynamicState3DepthClampEnable, + extendedDynamicState3PolygonMode, + extendedDynamicState3RasterizationSamples, + extendedDynamicState3SampleMask, + extendedDynamicState3AlphaToCoverageEnable, + extendedDynamicState3AlphaToOneEnable, + extendedDynamicState3LogicOpEnable, + extendedDynamicState3ColorBlendEnable, + extendedDynamicState3ColorBlendEquation, + extendedDynamicState3ColorWriteMask, + extendedDynamicState3RasterizationStream, + extendedDynamicState3ConservativeRasterizationMode, + extendedDynamicState3ExtraPrimitiveOverestimationSize, + extendedDynamicState3DepthClipEnable, + extendedDynamicState3SampleLocationsEnable, + extendedDynamicState3ColorBlendAdvanced, + extendedDynamicState3ProvokingVertexMode, + extendedDynamicState3LineRasterizationMode, + extendedDynamicState3LineStippleEnable, + extendedDynamicState3DepthClipNegativeOneToOne, + extendedDynamicState3ViewportWScalingEnable, + extendedDynamicState3ViewportSwizzle, + extendedDynamicState3CoverageToColorEnable, + extendedDynamicState3CoverageToColorLocation, + extendedDynamicState3CoverageModulationMode, + extendedDynamicState3CoverageModulationTableEnable, + extendedDynamicState3CoverageModulationTable, + extendedDynamicState3CoverageReductionMode, + extendedDynamicState3RepresentativeFragmentTestEnable, + extendedDynamicState3ShadingRateImageEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( extendedDynamicState3TessellationDomainOrigin == rhs.extendedDynamicState3TessellationDomainOrigin ) && + ( extendedDynamicState3DepthClampEnable == rhs.extendedDynamicState3DepthClampEnable ) && + ( extendedDynamicState3PolygonMode == rhs.extendedDynamicState3PolygonMode ) && + ( extendedDynamicState3RasterizationSamples == rhs.extendedDynamicState3RasterizationSamples ) && + ( extendedDynamicState3SampleMask == rhs.extendedDynamicState3SampleMask ) && + ( extendedDynamicState3AlphaToCoverageEnable == rhs.extendedDynamicState3AlphaToCoverageEnable ) && + ( extendedDynamicState3AlphaToOneEnable == rhs.extendedDynamicState3AlphaToOneEnable ) && + ( extendedDynamicState3LogicOpEnable == rhs.extendedDynamicState3LogicOpEnable ) && + ( extendedDynamicState3ColorBlendEnable == rhs.extendedDynamicState3ColorBlendEnable ) && + ( extendedDynamicState3ColorBlendEquation == rhs.extendedDynamicState3ColorBlendEquation ) && + ( extendedDynamicState3ColorWriteMask == rhs.extendedDynamicState3ColorWriteMask ) && + ( extendedDynamicState3RasterizationStream == rhs.extendedDynamicState3RasterizationStream ) && + ( extendedDynamicState3ConservativeRasterizationMode == rhs.extendedDynamicState3ConservativeRasterizationMode ) && + ( extendedDynamicState3ExtraPrimitiveOverestimationSize == rhs.extendedDynamicState3ExtraPrimitiveOverestimationSize ) && + ( extendedDynamicState3DepthClipEnable == rhs.extendedDynamicState3DepthClipEnable ) && + ( extendedDynamicState3SampleLocationsEnable == rhs.extendedDynamicState3SampleLocationsEnable ) && + ( extendedDynamicState3ColorBlendAdvanced == rhs.extendedDynamicState3ColorBlendAdvanced ) && + ( extendedDynamicState3ProvokingVertexMode == rhs.extendedDynamicState3ProvokingVertexMode ) && + ( extendedDynamicState3LineRasterizationMode == rhs.extendedDynamicState3LineRasterizationMode ) && + ( extendedDynamicState3LineStippleEnable == rhs.extendedDynamicState3LineStippleEnable ) && + ( extendedDynamicState3DepthClipNegativeOneToOne == rhs.extendedDynamicState3DepthClipNegativeOneToOne ) && + ( extendedDynamicState3ViewportWScalingEnable == rhs.extendedDynamicState3ViewportWScalingEnable ) && + ( extendedDynamicState3ViewportSwizzle == rhs.extendedDynamicState3ViewportSwizzle ) && + ( extendedDynamicState3CoverageToColorEnable == rhs.extendedDynamicState3CoverageToColorEnable ) && + ( extendedDynamicState3CoverageToColorLocation == rhs.extendedDynamicState3CoverageToColorLocation ) && + ( extendedDynamicState3CoverageModulationMode == rhs.extendedDynamicState3CoverageModulationMode ) && + ( extendedDynamicState3CoverageModulationTableEnable == rhs.extendedDynamicState3CoverageModulationTableEnable ) && + ( extendedDynamicState3CoverageModulationTable == rhs.extendedDynamicState3CoverageModulationTable ) && + ( extendedDynamicState3CoverageReductionMode == rhs.extendedDynamicState3CoverageReductionMode ) && + ( extendedDynamicState3RepresentativeFragmentTestEnable == rhs.extendedDynamicState3RepresentativeFragmentTestEnable ) && + ( extendedDynamicState3ShadingRateImageEnable == rhs.extendedDynamicState3ShadingRateImageEnable ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState3PropertiesEXT.html + struct PhysicalDeviceExtendedDynamicState3PropertiesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicState3PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicPrimitiveTopologyUnrestricted{ dynamicPrimitiveTopologyUnrestricted_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState3PropertiesEXT( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState3PropertiesEXT( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState3PropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicPrimitiveTopologyUnrestricted ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicPrimitiveTopologyUnrestricted == rhs.dynamicPrimitiveTopologyUnrestricted ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicStateFeaturesEXT.html + struct PhysicalDeviceExtendedDynamicStateFeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , extendedDynamicState{ extendedDynamicState_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & + setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState = extendedDynamicState_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedDynamicState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV.html + struct PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV + { + using NativeType = VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , extendedSparseAddressSpace{ extendedSparseAddressSpace_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & + operator=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & + setExtendedSparseAddressSpace( VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace_ ) VULKAN_HPP_NOEXCEPT + { + extendedSparseAddressSpace = extendedSparseAddressSpace_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedSparseAddressSpace ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedSparseAddressSpace == rhs.extendedSparseAddressSpace ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV.html + struct PhysicalDeviceExtendedSparseAddressSpacePropertiesNV + { + using NativeType = VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( VULKAN_HPP_NAMESPACE::DeviceSize extendedSparseAddressSpaceSize_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags extendedSparseImageUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags extendedSparseBufferUsageFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , extendedSparseAddressSpaceSize{ extendedSparseAddressSpaceSize_ } + , extendedSparseImageUsageFlags{ extendedSparseImageUsageFlags_ } + , extendedSparseBufferUsageFlags{ extendedSparseBufferUsageFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & + operator=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedSparseAddressSpaceSize, extendedSparseImageUsageFlags, extendedSparseBufferUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedSparseAddressSpaceSize == rhs.extendedSparseAddressSpaceSize ) && + ( extendedSparseImageUsageFlags == rhs.extendedSparseImageUsageFlags ) && ( extendedSparseBufferUsageFlags == rhs.extendedSparseBufferUsageFlags ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize extendedSparseAddressSpaceSize = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags extendedSparseImageUsageFlags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags extendedSparseBufferUsageFlags = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceExternalBufferInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalBufferInfo.html + struct PhysicalDeviceExternalBufferInfo + { + using NativeType = VkPhysicalDeviceExternalBufferInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , usage{ usage_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalBufferInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalBufferInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalBufferInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, usage, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalBufferInfo; + }; + + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + + // wrapper struct for struct VkPhysicalDeviceExternalComputeQueuePropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalComputeQueuePropertiesNV.html + struct PhysicalDeviceExternalComputeQueuePropertiesNV + { + using NativeType = VkPhysicalDeviceExternalComputeQueuePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalComputeQueuePropertiesNV( uint32_t externalDataSize_ = {}, + uint32_t maxExternalQueues_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalDataSize{ externalDataSize_ } + , maxExternalQueues{ maxExternalQueues_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalComputeQueuePropertiesNV( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalComputeQueuePropertiesNV( VkPhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalComputeQueuePropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalComputeQueuePropertiesNV & operator=( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalComputeQueuePropertiesNV & operator=( VkPhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExternalComputeQueuePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalComputeQueuePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalComputeQueuePropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalComputeQueuePropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalDataSize, maxExternalQueues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalComputeQueuePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalDataSize == rhs.externalDataSize ) && ( maxExternalQueues == rhs.maxExternalQueues ); +# endif + } + + bool operator!=( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV; + void * pNext = {}; + uint32_t externalDataSize = {}; + uint32_t maxExternalQueues = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalComputeQueuePropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceExternalFenceInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFenceInfo.html + struct PhysicalDeviceExternalFenceInfo + { + using NativeType = VkPhysicalDeviceExternalFenceInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFenceInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFenceInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFenceInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFenceInfo; + }; + + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkPhysicalDeviceExternalFormatResolveFeaturesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFormatResolveFeaturesANDROID.html + struct PhysicalDeviceExternalFormatResolveFeaturesANDROID + { + using NativeType = VkPhysicalDeviceExternalFormatResolveFeaturesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolveFeaturesANDROID( VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalFormatResolve{ externalFormatResolve_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFormatResolveFeaturesANDROID( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFormatResolveFeaturesANDROID( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFormatResolveFeaturesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFormatResolveFeaturesANDROID & + operator=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalFormatResolveFeaturesANDROID & operator=( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & + setExternalFormatResolve( VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ ) VULKAN_HPP_NOEXCEPT + { + externalFormatResolve = externalFormatResolve_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalFormatResolve ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormatResolve == rhs.externalFormatResolve ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFormatResolveFeaturesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + // wrapper struct for struct VkPhysicalDeviceExternalFormatResolvePropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFormatResolvePropertiesANDROID.html + struct PhysicalDeviceExternalFormatResolvePropertiesANDROID + { + using NativeType = VkPhysicalDeviceExternalFormatResolvePropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolvePropertiesANDROID( + VULKAN_HPP_NAMESPACE::Bool32 nullColorAttachmentWithExternalFormatResolve_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , nullColorAttachmentWithExternalFormatResolve{ nullColorAttachmentWithExternalFormatResolve_ } + , externalFormatResolveChromaOffsetX{ externalFormatResolveChromaOffsetX_ } + , externalFormatResolveChromaOffsetY{ externalFormatResolveChromaOffsetY_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFormatResolvePropertiesANDROID( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFormatResolvePropertiesANDROID( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFormatResolvePropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFormatResolvePropertiesANDROID & + operator=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalFormatResolvePropertiesANDROID & operator=( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nullColorAttachmentWithExternalFormatResolve, externalFormatResolveChromaOffsetX, externalFormatResolveChromaOffsetY ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( nullColorAttachmentWithExternalFormatResolve == rhs.nullColorAttachmentWithExternalFormatResolve ) && + ( externalFormatResolveChromaOffsetX == rhs.externalFormatResolveChromaOffsetX ) && + ( externalFormatResolveChromaOffsetY == rhs.externalFormatResolveChromaOffsetY ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullColorAttachmentWithExternalFormatResolve = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFormatResolvePropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + // wrapper struct for struct VkPhysicalDeviceExternalImageFormatInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalImageFormatInfo.html + struct PhysicalDeviceExternalImageFormatInfo + { + using NativeType = VkPhysicalDeviceExternalImageFormatInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalImageFormatInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalImageFormatInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalImageFormatInfo; + }; + + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + + // wrapper struct for struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryHostPropertiesEXT.html + struct PhysicalDeviceExternalMemoryHostPropertiesEXT + { + using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minImportedHostPointerAlignment{ minImportedHostPointerAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minImportedHostPointerAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryRDMAFeaturesNV.html + struct PhysicalDeviceExternalMemoryRDMAFeaturesNV + { + using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalMemoryRDMA{ externalMemoryRDMA_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryRDMAFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & + setExternalMemoryRDMA( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT + { + externalMemoryRDMA = externalMemoryRDMA_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalMemoryRDMA ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryRDMA == rhs.externalMemoryRDMA ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV; + }; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + // wrapper struct for struct VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX.html + struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX + { + using NativeType = VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , screenBufferImport{ screenBufferImport_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & + operator=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & operator=( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & + setScreenBufferImport( VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ ) VULKAN_HPP_NOEXCEPT + { + screenBufferImport = screenBufferImport_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, screenBufferImport ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( screenBufferImport == rhs.screenBufferImport ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + // wrapper struct for struct VkPhysicalDeviceExternalSemaphoreInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalSemaphoreInfo.html + struct PhysicalDeviceExternalSemaphoreInfo + { + using NativeType = VkPhysicalDeviceExternalSemaphoreInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalSemaphoreInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalSemaphoreInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalSemaphoreInfo; + }; + + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + + // wrapper struct for struct VkPhysicalDeviceFaultFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFaultFeaturesEXT.html + struct PhysicalDeviceFaultFeaturesEXT + { + using NativeType = VkPhysicalDeviceFaultFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFaultFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceFault{ deviceFault_ } + , deviceFaultVendorBinary{ deviceFaultVendorBinary_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFaultFeaturesEXT( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFaultFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFaultFeaturesEXT & operator=( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFaultFeaturesEXT & operator=( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFault( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ ) VULKAN_HPP_NOEXCEPT + { + deviceFault = deviceFault_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & + setDeviceFaultVendorBinary( VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ ) VULKAN_HPP_NOEXCEPT + { + deviceFaultVendorBinary = deviceFaultVendorBinary_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFaultFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFaultFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFaultFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFaultFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceFault, deviceFaultVendorBinary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFaultFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceFault == rhs.deviceFault ) && + ( deviceFaultVendorBinary == rhs.deviceFaultVendorBinary ); +# endif + } + + bool operator!=( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFaultFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceFault = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFaultFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceFeatures2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFeatures2.html + struct PhysicalDeviceFeatures2 + { + using NativeType = VkPhysicalDeviceFeatures2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , features{ features_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT + { + features = features_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, features ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features ); +# endif + } + + bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFeatures2; + }; + + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + + // wrapper struct for struct VkPhysicalDeviceFloatControlsProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFloatControlsProperties.html + struct PhysicalDeviceFloatControlsProperties + { + using NativeType = VkPhysicalDeviceFloatControlsProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , denormBehaviorIndependence{ denormBehaviorIndependence_ } + , roundingModeIndependence{ roundingModeIndependence_ } + , shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ } + , shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ } + , shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ } + , shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ } + , shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ } + , shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ } + , shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ } + , shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ } + , shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ } + , shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ } + , shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ } + , shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ } + , shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ } + , shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ } + , shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFloatControlsProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFloatControlsProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFloatControlsProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + denormBehaviorIndependence, + roundingModeIndependence, + shaderSignedZeroInfNanPreserveFloat16, + shaderSignedZeroInfNanPreserveFloat32, + shaderSignedZeroInfNanPreserveFloat64, + shaderDenormPreserveFloat16, + shaderDenormPreserveFloat32, + shaderDenormPreserveFloat64, + shaderDenormFlushToZeroFloat16, + shaderDenormFlushToZeroFloat32, + shaderDenormFlushToZeroFloat64, + shaderRoundingModeRTEFloat16, + shaderRoundingModeRTEFloat32, + shaderRoundingModeRTEFloat64, + shaderRoundingModeRTZFloat16, + shaderRoundingModeRTZFloat32, + shaderRoundingModeRTZFloat64 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); +# endif + } + + bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFloatControlsProperties; + }; + + using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; + + // wrapper struct for struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMap2FeaturesEXT.html + struct PhysicalDeviceFragmentDensityMap2FeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityMapDeferred{ fragmentDensityMapDeferred_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & + setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDeferred = fragmentDensityMapDeferred_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapDeferred ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMap2PropertiesEXT.html + struct PhysicalDeviceFragmentDensityMap2PropertiesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, + uint32_t maxSubsampledArrayLayers_ = {}, + uint32_t maxDescriptorSetSubsampledSamplers_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subsampledLoads{ subsampledLoads_ } + , subsampledCoarseReconstructionEarlyAccess{ subsampledCoarseReconstructionEarlyAccess_ } + , maxSubsampledArrayLayers{ maxSubsampledArrayLayers_ } + , maxDescriptorSetSubsampledSamplers{ maxDescriptorSetSubsampledSamplers_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) && + ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) && + ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {}; + uint32_t maxSubsampledArrayLayers = {}; + uint32_t maxDescriptorSetSubsampledSamplers = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapFeaturesEXT.html + struct PhysicalDeviceFragmentDensityMapFeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityMap{ fragmentDensityMap_ } + , fragmentDensityMapDynamic{ fragmentDensityMapDynamic_ } + , fragmentDensityMapNonSubsampledImages{ fragmentDensityMapNonSubsampledImages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMap = fragmentDensityMap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDynamic = fragmentDensityMapDynamic_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) && + ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) && + ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT.html + struct PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityMapOffset{ fragmentDensityMapOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & + operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & + setFragmentDensityMapOffset( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapOffset = fragmentDensityMapOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; + }; + + using PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; + + // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT.html + struct PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityOffsetGranularity{ fragmentDensityOffsetGranularity_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT & + operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityOffsetGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; + }; + + using PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; + + // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapPropertiesEXT.html + struct PhysicalDeviceFragmentDensityMapPropertiesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minFragmentDensityTexelSize{ minFragmentDensityTexelSize_ } + , maxFragmentDensityTexelSize{ maxFragmentDensityTexelSize_ } + , fragmentDensityInvocations{ fragmentDensityInvocations_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) && + ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR.html + struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR + { + using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentShaderBarycentric{ fragmentShaderBarycentric_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & + operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & + setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderBarycentric = fragmentShaderBarycentric_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentShaderBarycentric ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + }; + + using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + + // wrapper struct for struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR.html + struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR + { + using NativeType = VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , triStripVertexOrderIndependentOfProvokingVertex{ triStripVertexOrderIndependentOfProvokingVertex_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & + operator=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, triStripVertexOrderIndependentOfProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( triStripVertexOrderIndependentOfProvokingVertex == rhs.triStripVertexOrderIndependentOfProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT.html + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentShaderSampleInterlock{ fragmentShaderSampleInterlock_ } + , fragmentShaderPixelInterlock{ fragmentShaderPixelInterlock_ } + , fragmentShaderShadingRateInterlock{ fragmentShaderShadingRateInterlock_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) && + ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) && + ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV.html + struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV + { + using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentShadingRateEnums{ fragmentShadingRateEnums_ } + , supersampleFragmentShadingRates{ supersampleFragmentShadingRates_ } + , noInvocationFragmentShadingRates{ noInvocationFragmentShadingRates_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShadingRateEnums = fragmentShadingRateEnums_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setSupersampleFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + supersampleFragmentShadingRates = supersampleFragmentShadingRates_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setNoInvocationFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) && + ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) && + ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {}; + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {}; + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV.html + struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV + { + using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxFragmentShadingRateInvocationCount{ maxFragmentShadingRateInvocationCount_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxFragmentShadingRateInvocationCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateFeaturesKHR.html + struct PhysicalDeviceFragmentShadingRateFeaturesKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineFragmentShadingRate{ pipelineFragmentShadingRate_ } + , primitiveFragmentShadingRate{ primitiveFragmentShadingRate_ } + , attachmentFragmentShadingRate{ attachmentFragmentShadingRate_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + pipelineFragmentShadingRate = pipelineFragmentShadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + primitiveFragmentShadingRate = primitiveFragmentShadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFragmentShadingRate = attachmentFragmentShadingRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) && + ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateKHR.html + struct PhysicalDeviceFragmentShadingRateKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRateKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampleCounts{ sampleCounts_ } + , fragmentSize{ fragmentSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleCounts, fragmentSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && ( fragmentSize == rhs.fragmentSize ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRatePropertiesKHR.html + struct PhysicalDeviceFragmentShadingRatePropertiesKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, + uint32_t maxFragmentSizeAspectRatio_ = {}, + uint32_t maxFragmentShadingRateCoverageSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minFragmentShadingRateAttachmentTexelSize{ minFragmentShadingRateAttachmentTexelSize_ } + , maxFragmentShadingRateAttachmentTexelSize{ maxFragmentShadingRateAttachmentTexelSize_ } + , maxFragmentShadingRateAttachmentTexelSizeAspectRatio{ maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ } + , primitiveFragmentShadingRateWithMultipleViewports{ primitiveFragmentShadingRateWithMultipleViewports_ } + , layeredShadingRateAttachments{ layeredShadingRateAttachments_ } + , fragmentShadingRateNonTrivialCombinerOps{ fragmentShadingRateNonTrivialCombinerOps_ } + , maxFragmentSize{ maxFragmentSize_ } + , maxFragmentSizeAspectRatio{ maxFragmentSizeAspectRatio_ } + , maxFragmentShadingRateCoverageSamples{ maxFragmentShadingRateCoverageSamples_ } + , maxFragmentShadingRateRasterizationSamples{ maxFragmentShadingRateRasterizationSamples_ } + , fragmentShadingRateWithShaderDepthStencilWrites{ fragmentShadingRateWithShaderDepthStencilWrites_ } + , fragmentShadingRateWithSampleMask{ fragmentShadingRateWithSampleMask_ } + , fragmentShadingRateWithShaderSampleMask{ fragmentShadingRateWithShaderSampleMask_ } + , fragmentShadingRateWithConservativeRasterization{ fragmentShadingRateWithConservativeRasterization_ } + , fragmentShadingRateWithFragmentShaderInterlock{ fragmentShadingRateWithFragmentShaderInterlock_ } + , fragmentShadingRateWithCustomSampleLocations{ fragmentShadingRateWithCustomSampleLocations_ } + , fragmentShadingRateStrictMultiplyCombiner{ fragmentShadingRateStrictMultiplyCombiner_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minFragmentShadingRateAttachmentTexelSize, + maxFragmentShadingRateAttachmentTexelSize, + maxFragmentShadingRateAttachmentTexelSizeAspectRatio, + primitiveFragmentShadingRateWithMultipleViewports, + layeredShadingRateAttachments, + fragmentShadingRateNonTrivialCombinerOps, + maxFragmentSize, + maxFragmentSizeAspectRatio, + maxFragmentShadingRateCoverageSamples, + maxFragmentShadingRateRasterizationSamples, + fragmentShadingRateWithShaderDepthStencilWrites, + fragmentShadingRateWithSampleMask, + fragmentShadingRateWithShaderSampleMask, + fragmentShadingRateWithConservativeRasterization, + fragmentShadingRateWithFragmentShaderInterlock, + fragmentShadingRateWithCustomSampleLocations, + fragmentShadingRateStrictMultiplyCombiner ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) && + ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports ) && + ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) && + ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && ( maxFragmentSize == rhs.maxFragmentSize ) && + ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) && + ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) && + ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) && + ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites ) && + ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) && + ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) && + ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization ) && + ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) && + ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) && + ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {}; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {}; + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {}; + uint32_t maxFragmentSizeAspectRatio = {}; + uint32_t maxFragmentShadingRateCoverageSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceFrameBoundaryFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFrameBoundaryFeaturesEXT.html + struct PhysicalDeviceFrameBoundaryFeaturesEXT + { + using NativeType = VkPhysicalDeviceFrameBoundaryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 frameBoundary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , frameBoundary{ frameBoundary_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFrameBoundaryFeaturesEXT( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFrameBoundaryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setFrameBoundary( VULKAN_HPP_NAMESPACE::Bool32 frameBoundary_ ) VULKAN_HPP_NOEXCEPT + { + frameBoundary = frameBoundary_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFrameBoundaryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFrameBoundaryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFrameBoundaryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFrameBoundaryFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, frameBoundary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFrameBoundaryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( frameBoundary == rhs.frameBoundary ); +# endif + } + + bool operator!=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 frameBoundary = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFrameBoundaryFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceGlobalPriorityQueryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGlobalPriorityQueryFeatures.html + struct PhysicalDeviceGlobalPriorityQueryFeatures + { + using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , globalPriorityQuery{ globalPriorityQuery_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGlobalPriorityQueryFeatures( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGlobalPriorityQueryFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGlobalPriorityQueryFeatures & operator=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceGlobalPriorityQueryFeatures & operator=( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & + setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT + { + globalPriorityQuery = globalPriorityQuery_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceGlobalPriorityQueryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGlobalPriorityQueryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGlobalPriorityQueryFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGlobalPriorityQueryFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, globalPriorityQuery ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ); +# endif + } + + bool operator!=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGlobalPriorityQueryFeatures; + }; + + using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeatures; + using PhysicalDeviceGlobalPriorityQueryFeaturesKHR = PhysicalDeviceGlobalPriorityQueryFeatures; + + // wrapper struct for struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT.html + struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT + { + using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , graphicsPipelineLibrary{ graphicsPipelineLibrary_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & + setGraphicsPipelineLibrary( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ ) VULKAN_HPP_NOEXCEPT + { + graphicsPipelineLibrary = graphicsPipelineLibrary_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, graphicsPipelineLibrary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibrary == rhs.graphicsPipelineLibrary ); +# endif + } + + bool operator!=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT.html + struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT + { + using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , graphicsPipelineLibraryFastLinking{ graphicsPipelineLibraryFastLinking_ } + , graphicsPipelineLibraryIndependentInterpolationDecoration{ graphicsPipelineLibraryIndependentInterpolationDecoration_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & + operator=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, graphicsPipelineLibraryFastLinking, graphicsPipelineLibraryIndependentInterpolationDecoration ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibraryFastLinking == rhs.graphicsPipelineLibraryFastLinking ) && + ( graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration ); +# endif + } + + bool operator!=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking = {}; + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceGroupProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGroupProperties.html + struct PhysicalDeviceGroupProperties + { + using NativeType = VkPhysicalDeviceGroupProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {}, + std::array const & physicalDevices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , physicalDeviceCount{ physicalDeviceCount_ } + , physicalDevices{ physicalDevices_ } + , subsetAllocation{ subsetAllocation_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGroupProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGroupProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGroupProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = physicalDeviceCount <=> rhs.physicalDeviceCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < physicalDeviceCount; ++i ) + { + if ( auto cmp = physicalDevices[i] <=> rhs.physicalDevices[i]; cmp != 0 ) + return cmp; + } + if ( auto cmp = subsetAllocation <=> rhs.subsetAllocation; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( memcmp( physicalDevices, rhs.physicalDevices, physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 ) && + ( subsetAllocation == rhs.subsetAllocation ); + } + + bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; + void * pNext = {}; + uint32_t physicalDeviceCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D physicalDevices = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGroupProperties; + }; + + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + + // wrapper struct for struct VkPhysicalDeviceHdrVividFeaturesHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHdrVividFeaturesHUAWEI.html + struct PhysicalDeviceHdrVividFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceHdrVividFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 hdrVivid_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hdrVivid{ hdrVivid_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHdrVividFeaturesHUAWEI( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHdrVividFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setHdrVivid( VULKAN_HPP_NAMESPACE::Bool32 hdrVivid_ ) VULKAN_HPP_NOEXCEPT + { + hdrVivid = hdrVivid_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHdrVividFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHdrVividFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hdrVivid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHdrVividFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdrVivid == rhs.hdrVivid ); +# endif + } + + bool operator!=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hdrVivid = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHdrVividFeaturesHUAWEI; + }; + + // wrapper struct for struct VkPhysicalDeviceHostImageCopyFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostImageCopyFeatures.html + struct PhysicalDeviceHostImageCopyFeatures + { + using NativeType = VkPhysicalDeviceHostImageCopyFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hostImageCopy{ hostImageCopy_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostImageCopyFeatures( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostImageCopyFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHostImageCopyFeatures & operator=( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceHostImageCopyFeatures & operator=( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT + { + hostImageCopy = hostImageCopy_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceHostImageCopyFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hostImageCopy ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostImageCopyFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostImageCopy == rhs.hostImageCopy ); +# endif + } + + bool operator!=( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostImageCopyFeatures; + }; + + using PhysicalDeviceHostImageCopyFeaturesEXT = PhysicalDeviceHostImageCopyFeatures; + + // wrapper struct for struct VkPhysicalDeviceHostImageCopyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostImageCopyProperties.html + struct PhysicalDeviceHostImageCopyProperties + { + using NativeType = VkPhysicalDeviceHostImageCopyProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( uint32_t copySrcLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, + uint32_t copyDstLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , copySrcLayoutCount{ copySrcLayoutCount_ } + , pCopySrcLayouts{ pCopySrcLayouts_ } + , copyDstLayoutCount{ copyDstLayoutCount_ } + , pCopyDstLayouts{ pCopyDstLayouts_ } + , optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ } + , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostImageCopyProperties( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostImageCopyProperties( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyProperties( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , copySrcLayoutCount( static_cast( copySrcLayouts_.size() ) ) + , pCopySrcLayouts( copySrcLayouts_.data() ) + , copyDstLayoutCount( static_cast( copyDstLayouts_.size() ) ) + , pCopyDstLayouts( copyDstLayouts_.data() ) + , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ ) + , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PhysicalDeviceHostImageCopyProperties & operator=( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceHostImageCopyProperties & operator=( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + copySrcLayoutCount = copySrcLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & + setPCopySrcLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pCopySrcLayouts = pCopySrcLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyProperties & + setCopySrcLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_ ) VULKAN_HPP_NOEXCEPT + { + copySrcLayoutCount = static_cast( copySrcLayouts_.size() ); + pCopySrcLayouts = copySrcLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + copyDstLayoutCount = copyDstLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & + setPCopyDstLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pCopyDstLayouts = pCopyDstLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyProperties & + setCopyDstLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ ) VULKAN_HPP_NOEXCEPT + { + copyDstLayoutCount = static_cast( copyDstLayouts_.size() ); + pCopyDstLayouts = copyDstLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & + setOptimalTilingLayoutUUID( std::array optimalTilingLayoutUUID_ ) VULKAN_HPP_NOEXCEPT + { + optimalTilingLayoutUUID = optimalTilingLayoutUUID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & + setIdenticalMemoryTypeRequirements( VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ ) VULKAN_HPP_NOEXCEPT + { + identicalMemoryTypeRequirements = identicalMemoryTypeRequirements_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceHostImageCopyProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, copySrcLayoutCount, pCopySrcLayouts, copyDstLayoutCount, pCopyDstLayouts, optimalTilingLayoutUUID, identicalMemoryTypeRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostImageCopyProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( copySrcLayoutCount == rhs.copySrcLayoutCount ) && + ( pCopySrcLayouts == rhs.pCopySrcLayouts ) && ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) && + ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements ); +# endif + } + + bool operator!=( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyProperties; + void * pNext = {}; + uint32_t copySrcLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {}; + uint32_t copyDstLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D optimalTilingLayoutUUID = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostImageCopyProperties; + }; + + using PhysicalDeviceHostImageCopyPropertiesEXT = PhysicalDeviceHostImageCopyProperties; + + // wrapper struct for struct VkPhysicalDeviceHostQueryResetFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostQueryResetFeatures.html + struct PhysicalDeviceHostQueryResetFeatures + { + using NativeType = VkPhysicalDeviceHostQueryResetFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hostQueryReset{ hostQueryReset_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostQueryResetFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostQueryResetFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hostQueryReset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset ); +# endif + } + + bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostQueryResetFeatures; + }; + + using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + + // wrapper struct for struct VkPhysicalDeviceIDProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceIDProperties.html + struct PhysicalDeviceIDProperties + { + using NativeType = VkPhysicalDeviceIDProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceUUID{ deviceUUID_ } + , driverUUID{ driverUUID_ } + , deviceLUID{ deviceLUID_ } + , deviceNodeMask{ deviceNodeMask_ } + , deviceLUIDValid{ deviceLUIDValid_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIDProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIDProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIDProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIDProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) && + ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ); +# endif + } + + bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceIDProperties; + }; + + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + + // wrapper struct for struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImage2DViewOf3DFeaturesEXT.html + struct PhysicalDeviceImage2DViewOf3DFeaturesEXT + { + using NativeType = VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image2DViewOf3D{ image2DViewOf3D_ } + , sampler2DViewOf3D{ sampler2DViewOf3D_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImage2DViewOf3DFeaturesEXT( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImage2DViewOf3DFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setImage2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT + { + image2DViewOf3D = image2DViewOf3D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & + setSampler2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT + { + sampler2DViewOf3D = sampler2DViewOf3D_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image2DViewOf3D, sampler2DViewOf3D ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image2DViewOf3D == rhs.image2DViewOf3D ) && ( sampler2DViewOf3D == rhs.sampler2DViewOf3D ); +# endif + } + + bool operator!=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceImageAlignmentControlFeaturesMESA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageAlignmentControlFeaturesMESA.html + struct PhysicalDeviceImageAlignmentControlFeaturesMESA + { + using NativeType = VkPhysicalDeviceImageAlignmentControlFeaturesMESA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlFeaturesMESA( VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageAlignmentControl{ imageAlignmentControl_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageAlignmentControlFeaturesMESA( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageAlignmentControlFeaturesMESA( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageAlignmentControlFeaturesMESA( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & + setImageAlignmentControl( VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl_ ) VULKAN_HPP_NOEXCEPT + { + imageAlignmentControl = imageAlignmentControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageAlignmentControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageAlignmentControlFeaturesMESA const & ) const = default; +#else + bool operator==( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageAlignmentControl == rhs.imageAlignmentControl ); +# endif + } + + bool operator!=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageAlignmentControlFeaturesMESA; + }; + + // wrapper struct for struct VkPhysicalDeviceImageAlignmentControlPropertiesMESA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageAlignmentControlPropertiesMESA.html + struct PhysicalDeviceImageAlignmentControlPropertiesMESA + { + using NativeType = VkPhysicalDeviceImageAlignmentControlPropertiesMESA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlPropertiesMESA( uint32_t supportedImageAlignmentMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedImageAlignmentMask{ supportedImageAlignmentMask_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageAlignmentControlPropertiesMESA( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageAlignmentControlPropertiesMESA( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageAlignmentControlPropertiesMESA( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageAlignmentControlPropertiesMESA & + operator=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageAlignmentControlPropertiesMESA & operator=( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedImageAlignmentMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageAlignmentControlPropertiesMESA const & ) const = default; +#else + bool operator==( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedImageAlignmentMask == rhs.supportedImageAlignmentMask ); +# endif + } + + bool operator!=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA; + void * pNext = {}; + uint32_t supportedImageAlignmentMask = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageAlignmentControlPropertiesMESA; + }; + + // wrapper struct for struct VkPhysicalDeviceImageCompressionControlFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageCompressionControlFeaturesEXT.html + struct PhysicalDeviceImageCompressionControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageCompressionControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageCompressionControl{ imageCompressionControl_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageCompressionControlFeaturesEXT( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageCompressionControlFeaturesEXT( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageCompressionControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & + setImageCompressionControl( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ ) VULKAN_HPP_NOEXCEPT + { + imageCompressionControl = imageCompressionControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageCompressionControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageCompressionControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageCompressionControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageCompressionControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControl == rhs.imageCompressionControl ); +# endif + } + + bool operator!=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageCompressionControlFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT.html + struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageCompressionControlSwapchain{ imageCompressionControlSwapchain_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & + operator=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & + operator=( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & + setImageCompressionControlSwapchain( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ ) VULKAN_HPP_NOEXCEPT + { + imageCompressionControlSwapchain = imageCompressionControlSwapchain_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageCompressionControlSwapchain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControlSwapchain == rhs.imageCompressionControlSwapchain ); +# endif + } + + bool operator!=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageDrmFormatModifierInfoEXT.html + struct PhysicalDeviceImageDrmFormatModifierInfoEXT + { + using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifier{ drmFormatModifier_ } + , sharingMode{ sharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); +# endif + } + + bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceImageFormatInfo2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageFormatInfo2.html + struct PhysicalDeviceImageFormatInfo2 + { + using NativeType = VkPhysicalDeviceImageFormatInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , type{ type_ } + , tiling{ tiling_ } + , usage{ usage_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageFormatInfo2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageFormatInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageFormatInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, type, tiling, usage, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default; +#else + bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( tiling == rhs.tiling ) && + ( usage == rhs.usage ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageFormatInfo2; + }; + + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + + // wrapper struct for struct VkPhysicalDeviceImageProcessing2FeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessing2FeaturesQCOM.html + struct PhysicalDeviceImageProcessing2FeaturesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessing2FeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , textureBlockMatch2{ textureBlockMatch2_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessing2FeaturesQCOM( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessing2FeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & + setTextureBlockMatch2( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2_ ) VULKAN_HPP_NOEXCEPT + { + textureBlockMatch2 = textureBlockMatch2_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageProcessing2FeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessing2FeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessing2FeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessing2FeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, textureBlockMatch2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessing2FeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureBlockMatch2 == rhs.textureBlockMatch2 ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessing2FeaturesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceImageProcessing2PropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessing2PropertiesQCOM.html + struct PhysicalDeviceImageProcessing2PropertiesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessing2PropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchWindow_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxBlockMatchWindow{ maxBlockMatchWindow_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessing2PropertiesQCOM( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessing2PropertiesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceImageProcessing2PropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessing2PropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessing2PropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessing2PropertiesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxBlockMatchWindow ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessing2PropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBlockMatchWindow == rhs.maxBlockMatchWindow ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchWindow = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessing2PropertiesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceImageProcessingFeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessingFeaturesQCOM.html + struct PhysicalDeviceImageProcessingFeaturesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessingFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , textureSampleWeighted{ textureSampleWeighted_ } + , textureBoxFilter{ textureBoxFilter_ } + , textureBlockMatch{ textureBlockMatch_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessingFeaturesQCOM( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessingFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessingFeaturesQCOM & operator=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessingFeaturesQCOM & operator=( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & + setTextureSampleWeighted( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ ) VULKAN_HPP_NOEXCEPT + { + textureSampleWeighted = textureSampleWeighted_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & + setTextureBoxFilter( VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ ) VULKAN_HPP_NOEXCEPT + { + textureBoxFilter = textureBoxFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & + setTextureBlockMatch( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ ) VULKAN_HPP_NOEXCEPT + { + textureBlockMatch = textureBlockMatch_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageProcessingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessingFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessingFeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, textureSampleWeighted, textureBoxFilter, textureBlockMatch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessingFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureSampleWeighted == rhs.textureSampleWeighted ) && + ( textureBoxFilter == rhs.textureBoxFilter ) && ( textureBlockMatch == rhs.textureBlockMatch ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessingFeaturesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceImageProcessingPropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessingPropertiesQCOM.html + struct PhysicalDeviceImageProcessingPropertiesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessingPropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( uint32_t maxWeightFilterPhases_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxWeightFilterPhases{ maxWeightFilterPhases_ } + , maxWeightFilterDimension{ maxWeightFilterDimension_ } + , maxBlockMatchRegion{ maxBlockMatchRegion_ } + , maxBoxFilterBlockSize{ maxBoxFilterBlockSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessingPropertiesQCOM( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessingPropertiesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessingPropertiesQCOM & operator=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessingPropertiesQCOM & operator=( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceImageProcessingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessingPropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessingPropertiesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxWeightFilterPhases, maxWeightFilterDimension, maxBlockMatchRegion, maxBoxFilterBlockSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessingPropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWeightFilterPhases == rhs.maxWeightFilterPhases ) && + ( maxWeightFilterDimension == rhs.maxWeightFilterDimension ) && ( maxBlockMatchRegion == rhs.maxBlockMatchRegion ) && + ( maxBoxFilterBlockSize == rhs.maxBoxFilterBlockSize ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM; + void * pNext = {}; + uint32_t maxWeightFilterPhases = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessingPropertiesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceImageRobustnessFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageRobustnessFeatures.html + struct PhysicalDeviceImageRobustnessFeatures + { + using NativeType = VkPhysicalDeviceImageRobustnessFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustImageAccess{ robustImageAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess = robustImageAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageRobustnessFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageRobustnessFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustImageAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ); +# endif + } + + bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageRobustnessFeatures; + }; + + using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures; + + // wrapper struct for struct VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT.html + struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageSlicedViewOf3D{ imageSlicedViewOf3D_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & + setImageSlicedViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D_ ) VULKAN_HPP_NOEXCEPT + { + imageSlicedViewOf3D = imageSlicedViewOf3D_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageSlicedViewOf3D ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSlicedViewOf3D == rhs.imageSlicedViewOf3D ); +# endif + } + + bool operator!=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceImageViewImageFormatInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageViewImageFormatInfoEXT.html + struct PhysicalDeviceImageViewImageFormatInfoEXT + { + using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageViewType{ imageViewType_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & + setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT + { + imageViewType = imageViewType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageViewType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType ); +# endif + } + + bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewImageFormatInfoEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceImageViewMinLodFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageViewMinLodFeaturesEXT.html + struct PhysicalDeviceImageViewMinLodFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minLod{ minLod_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageViewMinLodFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setMinLod( VULKAN_HPP_NAMESPACE::Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewMinLodFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minLod ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewMinLodFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); +# endif + } + + bool operator!=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 minLod = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewMinLodFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceImagelessFramebufferFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImagelessFramebufferFeatures.html + struct PhysicalDeviceImagelessFramebufferFeatures + { + using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imagelessFramebuffer{ imagelessFramebuffer_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImagelessFramebufferFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImagelessFramebufferFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imagelessFramebuffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); +# endif + } + + bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImagelessFramebufferFeatures; + }; + + using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; + + // wrapper struct for struct VkPhysicalDeviceIndexTypeUint8Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceIndexTypeUint8Features.html + struct PhysicalDeviceIndexTypeUint8Features + { + using NativeType = VkPhysicalDeviceIndexTypeUint8Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , indexTypeUint8{ indexTypeUint8_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIndexTypeUint8Features( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIndexTypeUint8Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceIndexTypeUint8Features & operator=( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceIndexTypeUint8Features & operator=( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeUint8 = indexTypeUint8_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceIndexTypeUint8Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIndexTypeUint8Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIndexTypeUint8Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIndexTypeUint8Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indexTypeUint8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIndexTypeUint8Features const & ) const = default; +#else + bool operator==( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 ); +# endif + } + + bool operator!=( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceIndexTypeUint8Features; + }; + + using PhysicalDeviceIndexTypeUint8FeaturesEXT = PhysicalDeviceIndexTypeUint8Features; + using PhysicalDeviceIndexTypeUint8FeaturesKHR = PhysicalDeviceIndexTypeUint8Features; + + // wrapper struct for struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInheritedViewportScissorFeaturesNV.html + struct PhysicalDeviceInheritedViewportScissorFeaturesNV + { + using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , inheritedViewportScissor2D{ inheritedViewportScissor2D_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceInheritedViewportScissorFeaturesNV( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInheritedViewportScissorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & + setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + inheritedViewportScissor2D = inheritedViewportScissor2D_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, inheritedViewportScissor2D ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D ); +# endif + } + + bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceInlineUniformBlockFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInlineUniformBlockFeatures.html + struct PhysicalDeviceInlineUniformBlockFeatures + { + using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , inlineUniformBlock{ inlineUniformBlock_ } + , descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & + setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + { + inlineUniformBlock = inlineUniformBlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); +# endif + } + + bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockFeatures; + }; + + using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; + + // wrapper struct for struct VkPhysicalDeviceInlineUniformBlockProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInlineUniformBlockProperties.html + struct PhysicalDeviceInlineUniformBlockProperties + { + using NativeType = VkPhysicalDeviceInlineUniformBlockProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ } + , maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ } + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ } + , maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ } + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxInlineUniformBlockSize, + maxPerStageDescriptorInlineUniformBlocks, + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, + maxDescriptorSetInlineUniformBlocks, + maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); +# endif + } + + bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; + void * pNext = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockProperties; + }; + + using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; + + // wrapper struct for struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInvocationMaskFeaturesHUAWEI.html + struct PhysicalDeviceInvocationMaskFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , invocationMask{ invocationMask_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInvocationMaskFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setInvocationMask( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT + { + invocationMask = invocationMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, invocationMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( invocationMask == rhs.invocationMask ); +# endif + } + + bool operator!=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 invocationMask = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI; + }; + + // wrapper struct for struct VkPhysicalDeviceLayeredApiPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiPropertiesKHR.html + struct PhysicalDeviceLayeredApiPropertiesKHR + { + using NativeType = VkPhysicalDeviceLayeredApiPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR layeredAPI_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR::eVulkan, + std::array const & deviceName_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , layeredAPI{ layeredAPI_ } + , deviceName{ deviceName_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredApiPropertiesKHR( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredApiPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLayeredApiPropertiesKHR & operator=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLayeredApiPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLayeredApiPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vendorID, deviceID, layeredAPI, deviceName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredApiPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( layeredAPI == rhs.layeredAPI ) && ( deviceName == rhs.deviceName ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR; + void * pNext = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR layeredAPI = VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR::eVulkan; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredApiPropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceLayeredApiPropertiesListKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiPropertiesListKHR.html + struct PhysicalDeviceLayeredApiPropertiesListKHR + { + using NativeType = VkPhysicalDeviceLayeredApiPropertiesListKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( uint32_t layeredApiCount_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , layeredApiCount{ layeredApiCount_ } + , pLayeredApis{ pLayeredApis_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredApiPropertiesListKHR( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredApiPropertiesListKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceLayeredApiPropertiesListKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layeredApis_, void * pNext_ = nullptr ) + : pNext( pNext_ ), layeredApiCount( static_cast( layeredApis_.size() ) ), pLayeredApis( layeredApis_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PhysicalDeviceLayeredApiPropertiesListKHR & operator=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLayeredApiPropertiesListKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApiCount( uint32_t layeredApiCount_ ) VULKAN_HPP_NOEXCEPT + { + layeredApiCount = layeredApiCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & + setPLayeredApis( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ ) VULKAN_HPP_NOEXCEPT + { + pLayeredApis = pLayeredApis_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApis( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layeredApis_ ) VULKAN_HPP_NOEXCEPT + { + layeredApiCount = static_cast( layeredApis_.size() ); + pLayeredApis = layeredApis_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLayeredApiPropertiesListKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiPropertiesListKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiPropertiesListKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiPropertiesListKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, layeredApiCount, pLayeredApis ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredApiPropertiesListKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( layeredApiCount == rhs.layeredApiCount ) && ( pLayeredApis == rhs.pLayeredApis ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR; + void * pNext = {}; + uint32_t layeredApiCount = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredApiPropertiesListKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceLimits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLimits.html + struct PhysicalDeviceLimits + { + using NativeType = VkPhysicalDeviceLimits; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, + uint32_t maxImageDimension2D_ = {}, + uint32_t maxImageDimension3D_ = {}, + uint32_t maxImageDimensionCube_ = {}, + uint32_t maxImageArrayLayers_ = {}, + uint32_t maxTexelBufferElements_ = {}, + uint32_t maxUniformBufferRange_ = {}, + uint32_t maxStorageBufferRange_ = {}, + uint32_t maxPushConstantsSize_ = {}, + uint32_t maxMemoryAllocationCount_ = {}, + uint32_t maxSamplerAllocationCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, + uint32_t maxBoundDescriptorSets_ = {}, + uint32_t maxPerStageDescriptorSamplers_ = {}, + uint32_t maxPerStageDescriptorUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorSampledImages_ = {}, + uint32_t maxPerStageDescriptorStorageImages_ = {}, + uint32_t maxPerStageDescriptorInputAttachments_ = {}, + uint32_t maxPerStageResources_ = {}, + uint32_t maxDescriptorSetSamplers_ = {}, + uint32_t maxDescriptorSetUniformBuffers_ = {}, + uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetStorageBuffers_ = {}, + uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetSampledImages_ = {}, + uint32_t maxDescriptorSetStorageImages_ = {}, + uint32_t maxDescriptorSetInputAttachments_ = {}, + uint32_t maxVertexInputAttributes_ = {}, + uint32_t maxVertexInputBindings_ = {}, + uint32_t maxVertexInputAttributeOffset_ = {}, + uint32_t maxVertexInputBindingStride_ = {}, + uint32_t maxVertexOutputComponents_ = {}, + uint32_t maxTessellationGenerationLevel_ = {}, + uint32_t maxTessellationPatchSize_ = {}, + uint32_t maxTessellationControlPerVertexInputComponents_ = {}, + uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, + uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, + uint32_t maxTessellationControlTotalOutputComponents_ = {}, + uint32_t maxTessellationEvaluationInputComponents_ = {}, + uint32_t maxTessellationEvaluationOutputComponents_ = {}, + uint32_t maxGeometryShaderInvocations_ = {}, + uint32_t maxGeometryInputComponents_ = {}, + uint32_t maxGeometryOutputComponents_ = {}, + uint32_t maxGeometryOutputVertices_ = {}, + uint32_t maxGeometryTotalOutputComponents_ = {}, + uint32_t maxFragmentInputComponents_ = {}, + uint32_t maxFragmentOutputAttachments_ = {}, + uint32_t maxFragmentDualSrcAttachments_ = {}, + uint32_t maxFragmentCombinedOutputResources_ = {}, + uint32_t maxComputeSharedMemorySize_ = {}, + std::array const & maxComputeWorkGroupCount_ = {}, + uint32_t maxComputeWorkGroupInvocations_ = {}, + std::array const & maxComputeWorkGroupSize_ = {}, + uint32_t subPixelPrecisionBits_ = {}, + uint32_t subTexelPrecisionBits_ = {}, + uint32_t mipmapPrecisionBits_ = {}, + uint32_t maxDrawIndexedIndexValue_ = {}, + uint32_t maxDrawIndirectCount_ = {}, + float maxSamplerLodBias_ = {}, + float maxSamplerAnisotropy_ = {}, + uint32_t maxViewports_ = {}, + std::array const & maxViewportDimensions_ = {}, + std::array const & viewportBoundsRange_ = {}, + uint32_t viewportSubPixelBits_ = {}, + size_t minMemoryMapAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, + int32_t minTexelOffset_ = {}, + uint32_t maxTexelOffset_ = {}, + int32_t minTexelGatherOffset_ = {}, + uint32_t maxTexelGatherOffset_ = {}, + float minInterpolationOffset_ = {}, + float maxInterpolationOffset_ = {}, + uint32_t subPixelInterpolationOffsetBits_ = {}, + uint32_t maxFramebufferWidth_ = {}, + uint32_t maxFramebufferHeight_ = {}, + uint32_t maxFramebufferLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, + uint32_t maxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, + uint32_t maxSampleMaskWords_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, + float timestampPeriod_ = {}, + uint32_t maxClipDistances_ = {}, + uint32_t maxCullDistances_ = {}, + uint32_t maxCombinedClipAndCullDistances_ = {}, + uint32_t discreteQueuePriorities_ = {}, + std::array const & pointSizeRange_ = {}, + std::array const & lineWidthRange_ = {}, + float pointSizeGranularity_ = {}, + float lineWidthGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxImageDimension1D{ maxImageDimension1D_ } + , maxImageDimension2D{ maxImageDimension2D_ } + , maxImageDimension3D{ maxImageDimension3D_ } + , maxImageDimensionCube{ maxImageDimensionCube_ } + , maxImageArrayLayers{ maxImageArrayLayers_ } + , maxTexelBufferElements{ maxTexelBufferElements_ } + , maxUniformBufferRange{ maxUniformBufferRange_ } + , maxStorageBufferRange{ maxStorageBufferRange_ } + , maxPushConstantsSize{ maxPushConstantsSize_ } + , maxMemoryAllocationCount{ maxMemoryAllocationCount_ } + , maxSamplerAllocationCount{ maxSamplerAllocationCount_ } + , bufferImageGranularity{ bufferImageGranularity_ } + , sparseAddressSpaceSize{ sparseAddressSpaceSize_ } + , maxBoundDescriptorSets{ maxBoundDescriptorSets_ } + , maxPerStageDescriptorSamplers{ maxPerStageDescriptorSamplers_ } + , maxPerStageDescriptorUniformBuffers{ maxPerStageDescriptorUniformBuffers_ } + , maxPerStageDescriptorStorageBuffers{ maxPerStageDescriptorStorageBuffers_ } + , maxPerStageDescriptorSampledImages{ maxPerStageDescriptorSampledImages_ } + , maxPerStageDescriptorStorageImages{ maxPerStageDescriptorStorageImages_ } + , maxPerStageDescriptorInputAttachments{ maxPerStageDescriptorInputAttachments_ } + , maxPerStageResources{ maxPerStageResources_ } + , maxDescriptorSetSamplers{ maxDescriptorSetSamplers_ } + , maxDescriptorSetUniformBuffers{ maxDescriptorSetUniformBuffers_ } + , maxDescriptorSetUniformBuffersDynamic{ maxDescriptorSetUniformBuffersDynamic_ } + , maxDescriptorSetStorageBuffers{ maxDescriptorSetStorageBuffers_ } + , maxDescriptorSetStorageBuffersDynamic{ maxDescriptorSetStorageBuffersDynamic_ } + , maxDescriptorSetSampledImages{ maxDescriptorSetSampledImages_ } + , maxDescriptorSetStorageImages{ maxDescriptorSetStorageImages_ } + , maxDescriptorSetInputAttachments{ maxDescriptorSetInputAttachments_ } + , maxVertexInputAttributes{ maxVertexInputAttributes_ } + , maxVertexInputBindings{ maxVertexInputBindings_ } + , maxVertexInputAttributeOffset{ maxVertexInputAttributeOffset_ } + , maxVertexInputBindingStride{ maxVertexInputBindingStride_ } + , maxVertexOutputComponents{ maxVertexOutputComponents_ } + , maxTessellationGenerationLevel{ maxTessellationGenerationLevel_ } + , maxTessellationPatchSize{ maxTessellationPatchSize_ } + , maxTessellationControlPerVertexInputComponents{ maxTessellationControlPerVertexInputComponents_ } + , maxTessellationControlPerVertexOutputComponents{ maxTessellationControlPerVertexOutputComponents_ } + , maxTessellationControlPerPatchOutputComponents{ maxTessellationControlPerPatchOutputComponents_ } + , maxTessellationControlTotalOutputComponents{ maxTessellationControlTotalOutputComponents_ } + , maxTessellationEvaluationInputComponents{ maxTessellationEvaluationInputComponents_ } + , maxTessellationEvaluationOutputComponents{ maxTessellationEvaluationOutputComponents_ } + , maxGeometryShaderInvocations{ maxGeometryShaderInvocations_ } + , maxGeometryInputComponents{ maxGeometryInputComponents_ } + , maxGeometryOutputComponents{ maxGeometryOutputComponents_ } + , maxGeometryOutputVertices{ maxGeometryOutputVertices_ } + , maxGeometryTotalOutputComponents{ maxGeometryTotalOutputComponents_ } + , maxFragmentInputComponents{ maxFragmentInputComponents_ } + , maxFragmentOutputAttachments{ maxFragmentOutputAttachments_ } + , maxFragmentDualSrcAttachments{ maxFragmentDualSrcAttachments_ } + , maxFragmentCombinedOutputResources{ maxFragmentCombinedOutputResources_ } + , maxComputeSharedMemorySize{ maxComputeSharedMemorySize_ } + , maxComputeWorkGroupCount{ maxComputeWorkGroupCount_ } + , maxComputeWorkGroupInvocations{ maxComputeWorkGroupInvocations_ } + , maxComputeWorkGroupSize{ maxComputeWorkGroupSize_ } + , subPixelPrecisionBits{ subPixelPrecisionBits_ } + , subTexelPrecisionBits{ subTexelPrecisionBits_ } + , mipmapPrecisionBits{ mipmapPrecisionBits_ } + , maxDrawIndexedIndexValue{ maxDrawIndexedIndexValue_ } + , maxDrawIndirectCount{ maxDrawIndirectCount_ } + , maxSamplerLodBias{ maxSamplerLodBias_ } + , maxSamplerAnisotropy{ maxSamplerAnisotropy_ } + , maxViewports{ maxViewports_ } + , maxViewportDimensions{ maxViewportDimensions_ } + , viewportBoundsRange{ viewportBoundsRange_ } + , viewportSubPixelBits{ viewportSubPixelBits_ } + , minMemoryMapAlignment{ minMemoryMapAlignment_ } + , minTexelBufferOffsetAlignment{ minTexelBufferOffsetAlignment_ } + , minUniformBufferOffsetAlignment{ minUniformBufferOffsetAlignment_ } + , minStorageBufferOffsetAlignment{ minStorageBufferOffsetAlignment_ } + , minTexelOffset{ minTexelOffset_ } + , maxTexelOffset{ maxTexelOffset_ } + , minTexelGatherOffset{ minTexelGatherOffset_ } + , maxTexelGatherOffset{ maxTexelGatherOffset_ } + , minInterpolationOffset{ minInterpolationOffset_ } + , maxInterpolationOffset{ maxInterpolationOffset_ } + , subPixelInterpolationOffsetBits{ subPixelInterpolationOffsetBits_ } + , maxFramebufferWidth{ maxFramebufferWidth_ } + , maxFramebufferHeight{ maxFramebufferHeight_ } + , maxFramebufferLayers{ maxFramebufferLayers_ } + , framebufferColorSampleCounts{ framebufferColorSampleCounts_ } + , framebufferDepthSampleCounts{ framebufferDepthSampleCounts_ } + , framebufferStencilSampleCounts{ framebufferStencilSampleCounts_ } + , framebufferNoAttachmentsSampleCounts{ framebufferNoAttachmentsSampleCounts_ } + , maxColorAttachments{ maxColorAttachments_ } + , sampledImageColorSampleCounts{ sampledImageColorSampleCounts_ } + , sampledImageIntegerSampleCounts{ sampledImageIntegerSampleCounts_ } + , sampledImageDepthSampleCounts{ sampledImageDepthSampleCounts_ } + , sampledImageStencilSampleCounts{ sampledImageStencilSampleCounts_ } + , storageImageSampleCounts{ storageImageSampleCounts_ } + , maxSampleMaskWords{ maxSampleMaskWords_ } + , timestampComputeAndGraphics{ timestampComputeAndGraphics_ } + , timestampPeriod{ timestampPeriod_ } + , maxClipDistances{ maxClipDistances_ } + , maxCullDistances{ maxCullDistances_ } + , maxCombinedClipAndCullDistances{ maxCombinedClipAndCullDistances_ } + , discreteQueuePriorities{ discreteQueuePriorities_ } + , pointSizeRange{ pointSizeRange_ } + , lineWidthRange{ lineWidthRange_ } + , pointSizeGranularity{ pointSizeGranularity_ } + , lineWidthGranularity{ lineWidthGranularity_ } + , strictLines{ strictLines_ } + , standardSampleLocations{ standardSampleLocations_ } + , optimalBufferCopyOffsetAlignment{ optimalBufferCopyOffsetAlignment_ } + , optimalBufferCopyRowPitchAlignment{ optimalBufferCopyRowPitchAlignment_ } + , nonCoherentAtomSize{ nonCoherentAtomSize_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLimits( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLimits const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLimits *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + float const &, + float const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + size_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + int32_t const &, + uint32_t const &, + int32_t const &, + uint32_t const &, + float const &, + float const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + float const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + float const &, + float const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( maxImageDimension1D, + maxImageDimension2D, + maxImageDimension3D, + maxImageDimensionCube, + maxImageArrayLayers, + maxTexelBufferElements, + maxUniformBufferRange, + maxStorageBufferRange, + maxPushConstantsSize, + maxMemoryAllocationCount, + maxSamplerAllocationCount, + bufferImageGranularity, + sparseAddressSpaceSize, + maxBoundDescriptorSets, + maxPerStageDescriptorSamplers, + maxPerStageDescriptorUniformBuffers, + maxPerStageDescriptorStorageBuffers, + maxPerStageDescriptorSampledImages, + maxPerStageDescriptorStorageImages, + maxPerStageDescriptorInputAttachments, + maxPerStageResources, + maxDescriptorSetSamplers, + maxDescriptorSetUniformBuffers, + maxDescriptorSetUniformBuffersDynamic, + maxDescriptorSetStorageBuffers, + maxDescriptorSetStorageBuffersDynamic, + maxDescriptorSetSampledImages, + maxDescriptorSetStorageImages, + maxDescriptorSetInputAttachments, + maxVertexInputAttributes, + maxVertexInputBindings, + maxVertexInputAttributeOffset, + maxVertexInputBindingStride, + maxVertexOutputComponents, + maxTessellationGenerationLevel, + maxTessellationPatchSize, + maxTessellationControlPerVertexInputComponents, + maxTessellationControlPerVertexOutputComponents, + maxTessellationControlPerPatchOutputComponents, + maxTessellationControlTotalOutputComponents, + maxTessellationEvaluationInputComponents, + maxTessellationEvaluationOutputComponents, + maxGeometryShaderInvocations, + maxGeometryInputComponents, + maxGeometryOutputComponents, + maxGeometryOutputVertices, + maxGeometryTotalOutputComponents, + maxFragmentInputComponents, + maxFragmentOutputAttachments, + maxFragmentDualSrcAttachments, + maxFragmentCombinedOutputResources, + maxComputeSharedMemorySize, + maxComputeWorkGroupCount, + maxComputeWorkGroupInvocations, + maxComputeWorkGroupSize, + subPixelPrecisionBits, + subTexelPrecisionBits, + mipmapPrecisionBits, + maxDrawIndexedIndexValue, + maxDrawIndirectCount, + maxSamplerLodBias, + maxSamplerAnisotropy, + maxViewports, + maxViewportDimensions, + viewportBoundsRange, + viewportSubPixelBits, + minMemoryMapAlignment, + minTexelBufferOffsetAlignment, + minUniformBufferOffsetAlignment, + minStorageBufferOffsetAlignment, + minTexelOffset, + maxTexelOffset, + minTexelGatherOffset, + maxTexelGatherOffset, + minInterpolationOffset, + maxInterpolationOffset, + subPixelInterpolationOffsetBits, + maxFramebufferWidth, + maxFramebufferHeight, + maxFramebufferLayers, + framebufferColorSampleCounts, + framebufferDepthSampleCounts, + framebufferStencilSampleCounts, + framebufferNoAttachmentsSampleCounts, + maxColorAttachments, + sampledImageColorSampleCounts, + sampledImageIntegerSampleCounts, + sampledImageDepthSampleCounts, + sampledImageStencilSampleCounts, + storageImageSampleCounts, + maxSampleMaskWords, + timestampComputeAndGraphics, + timestampPeriod, + maxClipDistances, + maxCullDistances, + maxCombinedClipAndCullDistances, + discreteQueuePriorities, + pointSizeRange, + lineWidthRange, + pointSizeGranularity, + lineWidthGranularity, + strictLines, + standardSampleLocations, + optimalBufferCopyOffsetAlignment, + optimalBufferCopyRowPitchAlignment, + nonCoherentAtomSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLimits const & ) const = default; +#else + bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) && + ( maxImageDimension3D == rhs.maxImageDimension3D ) && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) && + ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) && + ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) && + ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && ( bufferImageGranularity == rhs.bufferImageGranularity ) && + ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) && + ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) && + ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) && + ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) && + ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) && + ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) && + ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && ( maxPerStageResources == rhs.maxPerStageResources ) && + ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) && + ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) && + ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) && + ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) && + ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) && + ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) && + ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) && + ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) && + ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) && + ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) && + ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) && + ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) && + ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) && + ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) && + ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) && + ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) && + ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) && + ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) && + ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) && + ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) && + ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) && + ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) && + ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) && + ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) && + ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) && + ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) && + ( maxViewportDimensions == rhs.maxViewportDimensions ) && ( viewportBoundsRange == rhs.viewportBoundsRange ) && + ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) && + ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) && + ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) && + ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && ( minTexelOffset == rhs.minTexelOffset ) && + ( maxTexelOffset == rhs.maxTexelOffset ) && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) && + ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && ( minInterpolationOffset == rhs.minInterpolationOffset ) && + ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) && + ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) && + ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) && + ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) && + ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && ( maxColorAttachments == rhs.maxColorAttachments ) && + ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) && + ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) && + ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) && + ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) && + ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) && + ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && ( maxCullDistances == rhs.maxCullDistances ) && + ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && + ( pointSizeRange == rhs.pointSizeRange ) && ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) && + ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) && + ( standardSampleLocations == rhs.standardSampleLocations ) && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) && + ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); +# endif + } + + bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t maxImageDimension1D = {}; + uint32_t maxImageDimension2D = {}; + uint32_t maxImageDimension3D = {}; + uint32_t maxImageDimensionCube = {}; + uint32_t maxImageArrayLayers = {}; + uint32_t maxTexelBufferElements = {}; + uint32_t maxUniformBufferRange = {}; + uint32_t maxStorageBufferRange = {}; + uint32_t maxPushConstantsSize = {}; + uint32_t maxMemoryAllocationCount = {}; + uint32_t maxSamplerAllocationCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; + uint32_t maxBoundDescriptorSets = {}; + uint32_t maxPerStageDescriptorSamplers = {}; + uint32_t maxPerStageDescriptorUniformBuffers = {}; + uint32_t maxPerStageDescriptorStorageBuffers = {}; + uint32_t maxPerStageDescriptorSampledImages = {}; + uint32_t maxPerStageDescriptorStorageImages = {}; + uint32_t maxPerStageDescriptorInputAttachments = {}; + uint32_t maxPerStageResources = {}; + uint32_t maxDescriptorSetSamplers = {}; + uint32_t maxDescriptorSetUniformBuffers = {}; + uint32_t maxDescriptorSetUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetStorageBuffers = {}; + uint32_t maxDescriptorSetStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetSampledImages = {}; + uint32_t maxDescriptorSetStorageImages = {}; + uint32_t maxDescriptorSetInputAttachments = {}; + uint32_t maxVertexInputAttributes = {}; + uint32_t maxVertexInputBindings = {}; + uint32_t maxVertexInputAttributeOffset = {}; + uint32_t maxVertexInputBindingStride = {}; + uint32_t maxVertexOutputComponents = {}; + uint32_t maxTessellationGenerationLevel = {}; + uint32_t maxTessellationPatchSize = {}; + uint32_t maxTessellationControlPerVertexInputComponents = {}; + uint32_t maxTessellationControlPerVertexOutputComponents = {}; + uint32_t maxTessellationControlPerPatchOutputComponents = {}; + uint32_t maxTessellationControlTotalOutputComponents = {}; + uint32_t maxTessellationEvaluationInputComponents = {}; + uint32_t maxTessellationEvaluationOutputComponents = {}; + uint32_t maxGeometryShaderInvocations = {}; + uint32_t maxGeometryInputComponents = {}; + uint32_t maxGeometryOutputComponents = {}; + uint32_t maxGeometryOutputVertices = {}; + uint32_t maxGeometryTotalOutputComponents = {}; + uint32_t maxFragmentInputComponents = {}; + uint32_t maxFragmentOutputAttachments = {}; + uint32_t maxFragmentDualSrcAttachments = {}; + uint32_t maxFragmentCombinedOutputResources = {}; + uint32_t maxComputeSharedMemorySize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupCount = {}; + uint32_t maxComputeWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupSize = {}; + uint32_t subPixelPrecisionBits = {}; + uint32_t subTexelPrecisionBits = {}; + uint32_t mipmapPrecisionBits = {}; + uint32_t maxDrawIndexedIndexValue = {}; + uint32_t maxDrawIndirectCount = {}; + float maxSamplerLodBias = {}; + float maxSamplerAnisotropy = {}; + uint32_t maxViewports = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxViewportDimensions = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D viewportBoundsRange = {}; + uint32_t viewportSubPixelBits = {}; + size_t minMemoryMapAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; + int32_t minTexelOffset = {}; + uint32_t maxTexelOffset = {}; + int32_t minTexelGatherOffset = {}; + uint32_t maxTexelGatherOffset = {}; + float minInterpolationOffset = {}; + float maxInterpolationOffset = {}; + uint32_t subPixelInterpolationOffsetBits = {}; + uint32_t maxFramebufferWidth = {}; + uint32_t maxFramebufferHeight = {}; + uint32_t maxFramebufferLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; + uint32_t maxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; + uint32_t maxSampleMaskWords = {}; + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; + float timestampPeriod = {}; + uint32_t maxClipDistances = {}; + uint32_t maxCullDistances = {}; + uint32_t maxCombinedClipAndCullDistances = {}; + uint32_t discreteQueuePriorities = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pointSizeRange = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lineWidthRange = {}; + float pointSizeGranularity = {}; + float lineWidthGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; + }; + + // wrapper struct for struct VkPhysicalDeviceSparseProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSparseProperties.html + struct PhysicalDeviceSparseProperties + { + using NativeType = VkPhysicalDeviceSparseProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT + : residencyStandard2DBlockShape{ residencyStandard2DBlockShape_ } + , residencyStandard2DMultisampleBlockShape{ residencyStandard2DMultisampleBlockShape_ } + , residencyStandard3DBlockShape{ residencyStandard3DBlockShape_ } + , residencyAlignedMipSize{ residencyAlignedMipSize_ } + , residencyNonResidentStrict{ residencyNonResidentStrict_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( residencyStandard2DBlockShape, + residencyStandard2DMultisampleBlockShape, + residencyStandard3DBlockShape, + residencyAlignedMipSize, + residencyNonResidentStrict ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && + ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && + ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && + ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); +# endif + } + + bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; + }; + + // wrapper struct for struct VkPhysicalDeviceProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProperties.html + struct PhysicalDeviceProperties + { + using NativeType = VkPhysicalDeviceProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {}, + uint32_t driverVersion_ = {}, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, + std::array const & deviceName_ = {}, + std::array const & pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : apiVersion{ apiVersion_ } + , driverVersion{ driverVersion_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , deviceType{ deviceType_ } + , deviceName{ deviceName_ } + , pipelineCacheUUID{ pipelineCacheUUID_ } + , limits{ limits_ } + , sparseProperties{ sparseProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) + return cmp; + if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 ) + return cmp; + if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 ) + return cmp; + if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 ) + return cmp; + if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 ) + return cmp; + if ( auto cmp = limits <=> rhs.limits; cmp != 0 ) + return cmp; + if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( deviceType == rhs.deviceType ) && ( strcmp( deviceName, rhs.deviceName ) == 0 ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); + } + + bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t apiVersion = {}; + uint32_t driverVersion = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; + }; + + // wrapper struct for struct VkPhysicalDeviceProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProperties2.html + struct PhysicalDeviceProperties2 + { + using NativeType = VkPhysicalDeviceProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , properties{ properties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProperties2; + }; + + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + + // wrapper struct for struct VkPhysicalDeviceLayeredApiVulkanPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiVulkanPropertiesKHR.html + struct PhysicalDeviceLayeredApiVulkanPropertiesKHR + { + using NativeType = VkPhysicalDeviceLayeredApiVulkanPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiVulkanPropertiesKHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , properties{ properties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceLayeredApiVulkanPropertiesKHR( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredApiVulkanPropertiesKHR( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredApiVulkanPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredApiVulkanPropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceLayeredDriverPropertiesMSFT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredDriverPropertiesMSFT.html + struct PhysicalDeviceLayeredDriverPropertiesMSFT + { + using NativeType = VkPhysicalDeviceLayeredDriverPropertiesMSFT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( + VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI_ = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , underlyingAPI{ underlyingAPI_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredDriverPropertiesMSFT( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredDriverPropertiesMSFT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredDriverPropertiesMSFT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredDriverPropertiesMSFT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, underlyingAPI ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredDriverPropertiesMSFT const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( underlyingAPI == rhs.underlyingAPI ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredDriverPropertiesMSFT; + }; + + // wrapper struct for struct VkPhysicalDeviceLegacyDitheringFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyDitheringFeaturesEXT.html + struct PhysicalDeviceLegacyDitheringFeaturesEXT + { + using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , legacyDithering{ legacyDithering_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ ) VULKAN_HPP_NOEXCEPT + { + legacyDithering = legacyDithering_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyDitheringFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, legacyDithering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyDithering == rhs.legacyDithering ); +# endif + } + + bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 legacyDithering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLegacyDitheringFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT.html + struct PhysicalDeviceLegacyVertexAttributesFeaturesEXT + { + using NativeType = VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , legacyVertexAttributes{ legacyVertexAttributes_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLegacyVertexAttributesFeaturesEXT( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLegacyVertexAttributesFeaturesEXT( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLegacyVertexAttributesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & + setLegacyVertexAttributes( VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes_ ) VULKAN_HPP_NOEXCEPT + { + legacyVertexAttributes = legacyVertexAttributes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, legacyVertexAttributes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyVertexAttributes == rhs.legacyVertexAttributes ); +# endif + } + + bool operator!=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLegacyVertexAttributesFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT.html + struct PhysicalDeviceLegacyVertexAttributesPropertiesEXT + { + using NativeType = VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 nativeUnalignedPerformance_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , nativeUnalignedPerformance{ nativeUnalignedPerformance_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLegacyVertexAttributesPropertiesEXT( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLegacyVertexAttributesPropertiesEXT( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLegacyVertexAttributesPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLegacyVertexAttributesPropertiesEXT & + operator=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLegacyVertexAttributesPropertiesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nativeUnalignedPerformance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nativeUnalignedPerformance == rhs.nativeUnalignedPerformance ); +# endif + } + + bool operator!=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nativeUnalignedPerformance = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLegacyVertexAttributesPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceLineRasterizationFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLineRasterizationFeatures.html + struct PhysicalDeviceLineRasterizationFeatures + { + using NativeType = VkPhysicalDeviceLineRasterizationFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rectangularLines{ rectangularLines_ } + , bresenhamLines{ bresenhamLines_ } + , smoothLines{ smoothLines_ } + , stippledRectangularLines{ stippledRectangularLines_ } + , stippledBresenhamLines{ stippledBresenhamLines_ } + , stippledSmoothLines{ stippledSmoothLines_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationFeatures( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLineRasterizationFeatures & operator=( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLineRasterizationFeatures & operator=( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + rectangularLines = rectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + bresenhamLines = bresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + { + smoothLines = smoothLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & + setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledRectangularLines = stippledRectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & + setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledBresenhamLines = stippledBresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & + setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledSmoothLines = stippledSmoothLines_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLineRasterizationFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) && + ( smoothLines == rhs.smoothLines ) && ( stippledRectangularLines == rhs.stippledRectangularLines ) && + ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && ( stippledSmoothLines == rhs.stippledSmoothLines ); +# endif + } + + bool operator!=( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationFeatures; + }; + + using PhysicalDeviceLineRasterizationFeaturesEXT = PhysicalDeviceLineRasterizationFeatures; + using PhysicalDeviceLineRasterizationFeaturesKHR = PhysicalDeviceLineRasterizationFeatures; + + // wrapper struct for struct VkPhysicalDeviceLineRasterizationProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLineRasterizationProperties.html + struct PhysicalDeviceLineRasterizationProperties + { + using NativeType = VkPhysicalDeviceLineRasterizationProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationProperties( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLineRasterizationProperties & operator=( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLineRasterizationProperties & operator=( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLineRasterizationProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, lineSubPixelPrecisionBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); +# endif + } + + bool operator!=( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationProperties; + void * pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationProperties; + }; + + using PhysicalDeviceLineRasterizationPropertiesEXT = PhysicalDeviceLineRasterizationProperties; + using PhysicalDeviceLineRasterizationPropertiesKHR = PhysicalDeviceLineRasterizationProperties; + + // wrapper struct for struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLinearColorAttachmentFeaturesNV.html + struct PhysicalDeviceLinearColorAttachmentFeaturesNV + { + using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , linearColorAttachment{ linearColorAttachment_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLinearColorAttachmentFeaturesNV( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLinearColorAttachmentFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & + setLinearColorAttachment( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT + { + linearColorAttachment = linearColorAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, linearColorAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLinearColorAttachmentFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearColorAttachment == rhs.linearColorAttachment ); +# endif + } + + bool operator!=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceMaintenance3Properties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance3Properties.html + struct PhysicalDeviceMaintenance3Properties + { + using NativeType = VkPhysicalDeviceMaintenance3Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxPerSetDescriptors{ maxPerSetDescriptors_ } + , maxMemoryAllocationSize{ maxMemoryAllocationSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance3Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance3Properties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance3Properties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; + void * pNext = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance3Properties; + }; + + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + + // wrapper struct for struct VkPhysicalDeviceMaintenance4Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance4Features.html + struct PhysicalDeviceMaintenance4Features + { + using NativeType = VkPhysicalDeviceMaintenance4Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance4{ maintenance4_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance4Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT + { + maintenance4 = maintenance4_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance4Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance4Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance4 == rhs.maintenance4 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance4Features; + }; + + using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features; + + // wrapper struct for struct VkPhysicalDeviceMaintenance4Properties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance4Properties.html + struct PhysicalDeviceMaintenance4Properties + { + using NativeType = VkPhysicalDeviceMaintenance4Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxBufferSize{ maxBufferSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance4Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance4Properties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance4Properties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxBufferSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBufferSize == rhs.maxBufferSize ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance4Properties; + }; + + using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; + + // wrapper struct for struct VkPhysicalDeviceMaintenance5Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance5Features.html + struct PhysicalDeviceMaintenance5Features + { + using NativeType = VkPhysicalDeviceMaintenance5Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance5{ maintenance5_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance5Features( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance5Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance5Features & operator=( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance5Features & operator=( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT + { + maintenance5 = maintenance5_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMaintenance5Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance5 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance5Features const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance5 == rhs.maintenance5 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance5Features; + }; + + using PhysicalDeviceMaintenance5FeaturesKHR = PhysicalDeviceMaintenance5Features; + + // wrapper struct for struct VkPhysicalDeviceMaintenance5Properties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance5Properties.html + struct PhysicalDeviceMaintenance5Properties + { + using NativeType = VkPhysicalDeviceMaintenance5Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ } + , earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ } + , depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ } + , polygonModePointSize{ polygonModePointSize_ } + , nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ } + , nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance5Properties( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance5Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance5Properties & operator=( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance5Properties & operator=( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance5Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5Properties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5Properties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + earlyFragmentMultisampleCoverageAfterSampleCounting, + earlyFragmentSampleMaskTestBeforeSampleCounting, + depthStencilSwizzleOneSupport, + polygonModePointSize, + nonStrictSinglePixelWideLinesUseParallelogram, + nonStrictWideLinesUseParallelogram ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance5Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) && + ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) && + ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) && + ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) && + ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport = {}; + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance5Properties; + }; + + using PhysicalDeviceMaintenance5PropertiesKHR = PhysicalDeviceMaintenance5Properties; + + // wrapper struct for struct VkPhysicalDeviceMaintenance6Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance6Features.html + struct PhysicalDeviceMaintenance6Features + { + using NativeType = VkPhysicalDeviceMaintenance6Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance6{ maintenance6_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance6Features( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance6Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance6Features & operator=( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance6Features & operator=( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setMaintenance6( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT + { + maintenance6 = maintenance6_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMaintenance6Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance6Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance6Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance6Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance6 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance6Features const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance6 == rhs.maintenance6 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance6 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance6Features; + }; + + using PhysicalDeviceMaintenance6FeaturesKHR = PhysicalDeviceMaintenance6Features; + + // wrapper struct for struct VkPhysicalDeviceMaintenance6Properties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance6Properties.html + struct PhysicalDeviceMaintenance6Properties + { + using NativeType = VkPhysicalDeviceMaintenance6Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, + uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ } + , maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ } + , fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance6Properties( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance6Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance6Properties & operator=( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance6Properties & operator=( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance6Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance6Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance6Properties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance6Properties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, blockTexelViewCompatibleMultipleLayers, maxCombinedImageSamplerDescriptorCount, fragmentShadingRateClampCombinerInputs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance6Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers ) && + ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount ) && + ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers = {}; + uint32_t maxCombinedImageSamplerDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance6Properties; + }; + + using PhysicalDeviceMaintenance6PropertiesKHR = PhysicalDeviceMaintenance6Properties; + + // wrapper struct for struct VkPhysicalDeviceMaintenance7FeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance7FeaturesKHR.html + struct PhysicalDeviceMaintenance7FeaturesKHR + { + using NativeType = VkPhysicalDeviceMaintenance7FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance7_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance7{ maintenance7_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance7FeaturesKHR( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance7FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance7FeaturesKHR & operator=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance7FeaturesKHR & operator=( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setMaintenance7( VULKAN_HPP_NAMESPACE::Bool32 maintenance7_ ) VULKAN_HPP_NOEXCEPT + { + maintenance7 = maintenance7_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMaintenance7FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance7FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance7FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance7FeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance7 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance7FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance7 == rhs.maintenance7 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance7 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance7FeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceMaintenance7PropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance7PropertiesKHR.html + struct PhysicalDeviceMaintenance7PropertiesKHR + { + using NativeType = VkPhysicalDeviceMaintenance7PropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 robustFragmentShadingRateAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilAttachmentAccess_ = {}, + uint32_t maxDescriptorSetTotalUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetTotalStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetTotalBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustFragmentShadingRateAttachmentAccess{ robustFragmentShadingRateAttachmentAccess_ } + , separateDepthStencilAttachmentAccess{ separateDepthStencilAttachmentAccess_ } + , maxDescriptorSetTotalUniformBuffersDynamic{ maxDescriptorSetTotalUniformBuffersDynamic_ } + , maxDescriptorSetTotalStorageBuffersDynamic{ maxDescriptorSetTotalStorageBuffersDynamic_ } + , maxDescriptorSetTotalBuffersDynamic{ maxDescriptorSetTotalBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindTotalBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance7PropertiesKHR( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance7PropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance7PropertiesKHR & operator=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance7PropertiesKHR & operator=( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance7PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance7PropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance7PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance7PropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + robustFragmentShadingRateAttachmentAccess, + separateDepthStencilAttachmentAccess, + maxDescriptorSetTotalUniformBuffersDynamic, + maxDescriptorSetTotalStorageBuffersDynamic, + maxDescriptorSetTotalBuffersDynamic, + maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance7PropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( robustFragmentShadingRateAttachmentAccess == rhs.robustFragmentShadingRateAttachmentAccess ) && + ( separateDepthStencilAttachmentAccess == rhs.separateDepthStencilAttachmentAccess ) && + ( maxDescriptorSetTotalUniformBuffersDynamic == rhs.maxDescriptorSetTotalUniformBuffersDynamic ) && + ( maxDescriptorSetTotalStorageBuffersDynamic == rhs.maxDescriptorSetTotalStorageBuffersDynamic ) && + ( maxDescriptorSetTotalBuffersDynamic == rhs.maxDescriptorSetTotalBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindTotalBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustFragmentShadingRateAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilAttachmentAccess = {}; + uint32_t maxDescriptorSetTotalUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetTotalStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetTotalBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance7PropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceMaintenance8FeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance8FeaturesKHR.html + struct PhysicalDeviceMaintenance8FeaturesKHR + { + using NativeType = VkPhysicalDeviceMaintenance8FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance8FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance8FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance8{ maintenance8_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance8FeaturesKHR( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance8FeaturesKHR( VkPhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance8FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance8FeaturesKHR & operator=( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance8FeaturesKHR & operator=( VkPhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR & setMaintenance8( VULKAN_HPP_NAMESPACE::Bool32 maintenance8_ ) VULKAN_HPP_NOEXCEPT + { + maintenance8 = maintenance8_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMaintenance8FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance8FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance8FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance8FeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance8FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance8 == rhs.maintenance8 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance8FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance8FeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceMapMemoryPlacedFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMapMemoryPlacedFeaturesEXT.html + struct PhysicalDeviceMapMemoryPlacedFeaturesEXT + { + using NativeType = VkPhysicalDeviceMapMemoryPlacedFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryMapPlaced{ memoryMapPlaced_ } + , memoryMapRangePlaced{ memoryMapRangePlaced_ } + , memoryUnmapReserve{ memoryUnmapReserve_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMapMemoryPlacedFeaturesEXT( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMapMemoryPlacedFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryMapPlaced( VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced_ ) VULKAN_HPP_NOEXCEPT + { + memoryMapPlaced = memoryMapPlaced_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & + setMemoryMapRangePlaced( VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced_ ) VULKAN_HPP_NOEXCEPT + { + memoryMapRangePlaced = memoryMapRangePlaced_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & + setMemoryUnmapReserve( VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve_ ) VULKAN_HPP_NOEXCEPT + { + memoryUnmapReserve = memoryUnmapReserve_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryMapPlaced, memoryMapRangePlaced, memoryUnmapReserve ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryMapPlaced == rhs.memoryMapPlaced ) && + ( memoryMapRangePlaced == rhs.memoryMapRangePlaced ) && ( memoryUnmapReserve == rhs.memoryUnmapReserve ); +# endif + } + + bool operator!=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMapMemoryPlacedFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceMapMemoryPlacedPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMapMemoryPlacedPropertiesEXT.html + struct PhysicalDeviceMapMemoryPlacedPropertiesEXT + { + using NativeType = VkPhysicalDeviceMapMemoryPlacedPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minPlacedMemoryMapAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minPlacedMemoryMapAlignment{ minPlacedMemoryMapAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMapMemoryPlacedPropertiesEXT( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMapMemoryPlacedPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minPlacedMemoryMapAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minPlacedMemoryMapAlignment == rhs.minPlacedMemoryMapAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minPlacedMemoryMapAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMapMemoryPlacedPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceMemoryBudgetPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryBudgetPropertiesEXT.html + struct PhysicalDeviceMemoryBudgetPropertiesEXT + { + using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const & heapBudget_ = {}, + std::array const & heapUsage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , heapBudget{ heapBudget_ } + , heapUsage{ heapUsage_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, heapBudget, heapUsage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && ( heapUsage == rhs.heapUsage ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceMemoryDecompressionFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryDecompressionFeaturesNV.html + struct PhysicalDeviceMemoryDecompressionFeaturesNV + { + using NativeType = VkPhysicalDeviceMemoryDecompressionFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryDecompression{ memoryDecompression_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryDecompressionFeaturesNV( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryDecompressionFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & + setMemoryDecompression( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ ) VULKAN_HPP_NOEXCEPT + { + memoryDecompression = memoryDecompression_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMemoryDecompressionFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryDecompression ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryDecompressionFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryDecompression == rhs.memoryDecompression ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryDecompressionFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceMemoryDecompressionPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryDecompressionPropertiesNV.html + struct PhysicalDeviceMemoryDecompressionPropertiesNV + { + using NativeType = VkPhysicalDeviceMemoryDecompressionPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesNV( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods_ = {}, + uint64_t maxDecompressionIndirectCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , decompressionMethods{ decompressionMethods_ } + , maxDecompressionIndirectCount{ maxDecompressionIndirectCount_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMemoryDecompressionPropertiesNV( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryDecompressionPropertiesNV( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryDecompressionPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryDecompressionPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decompressionMethods, maxDecompressionIndirectCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryDecompressionPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decompressionMethods == rhs.decompressionMethods ) && + ( maxDecompressionIndirectCount == rhs.maxDecompressionIndirectCount ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods = {}; + uint64_t maxDecompressionIndirectCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryDecompressionPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceMemoryPriorityFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryPriorityFeaturesEXT.html + struct PhysicalDeviceMemoryPriorityFeaturesEXT + { + using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryPriority{ memoryPriority_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT + { + memoryPriority = memoryPriority_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryPriority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceMemoryProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryProperties.html + struct PhysicalDeviceMemoryProperties + { + using NativeType = VkPhysicalDeviceMemoryProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {}, + std::array const & memoryTypes_ = {}, + uint32_t memoryHeapCount_ = {}, + std::array const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeCount{ memoryTypeCount_ } + , memoryTypes{ memoryTypes_ } + , memoryHeapCount{ memoryHeapCount_ } + , memoryHeaps{ memoryHeaps_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = memoryTypeCount <=> rhs.memoryTypeCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < memoryTypeCount; ++i ) + { + if ( auto cmp = memoryTypes[i] <=> rhs.memoryTypes[i]; cmp != 0 ) + return cmp; + } + if ( auto cmp = memoryHeapCount <=> rhs.memoryHeapCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < memoryHeapCount; ++i ) + { + if ( auto cmp = memoryHeaps[i] <=> rhs.memoryHeaps[i]; cmp != 0 ) + return cmp; + } + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memoryTypeCount == rhs.memoryTypeCount ) && + ( memcmp( memoryTypes, rhs.memoryTypes, memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 ) && + ( memoryHeapCount == rhs.memoryHeapCount ) && + ( memcmp( memoryHeaps, rhs.memoryHeaps, memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 ); + } + + bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t memoryTypeCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; + uint32_t memoryHeapCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; + }; + + // wrapper struct for struct VkPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryProperties2.html + struct PhysicalDeviceMemoryProperties2 + { + using NativeType = VkPhysicalDeviceMemoryProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryProperties{ memoryProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryProperties2; + }; + + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + + // wrapper struct for struct VkPhysicalDeviceMeshShaderFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderFeaturesEXT.html + struct PhysicalDeviceMeshShaderFeaturesEXT + { + using NativeType = VkPhysicalDeviceMeshShaderFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , taskShader{ taskShader_ } + , meshShader{ meshShader_ } + , multiviewMeshShader{ multiviewMeshShader_ } + , primitiveFragmentShadingRateMeshShader{ primitiveFragmentShadingRateMeshShader_ } + , meshShaderQueries{ meshShaderQueries_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesEXT( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderFeaturesEXT & operator=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderFeaturesEXT & operator=( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT + { + taskShader = taskShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + { + meshShader = meshShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & + setMultiviewMeshShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewMeshShader = multiviewMeshShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & + setPrimitiveFragmentShadingRateMeshShader( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ ) VULKAN_HPP_NOEXCEPT + { + primitiveFragmentShadingRateMeshShader = primitiveFragmentShadingRateMeshShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShaderQueries( VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ ) VULKAN_HPP_NOEXCEPT + { + meshShaderQueries = meshShaderQueries_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMeshShaderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, taskShader, meshShader, multiviewMeshShader, primitiveFragmentShadingRateMeshShader, meshShaderQueries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ) && + ( multiviewMeshShader == rhs.multiviewMeshShader ) && ( primitiveFragmentShadingRateMeshShader == rhs.primitiveFragmentShadingRateMeshShader ) && + ( meshShaderQueries == rhs.meshShaderQueries ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceMeshShaderFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderFeaturesNV.html + struct PhysicalDeviceMeshShaderFeaturesNV + { + using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , taskShader{ taskShader_ } + , meshShader{ meshShader_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT + { + taskShader = taskShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + { + meshShader = meshShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, taskShader, meshShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceMeshShaderPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderPropertiesEXT.html + struct PhysicalDeviceMeshShaderPropertiesEXT + { + using NativeType = VkPhysicalDeviceMeshShaderPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( uint32_t maxTaskWorkGroupTotalCount_ = {}, + std::array const & maxTaskWorkGroupCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const & maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskPayloadSize_ = {}, + uint32_t maxTaskSharedMemorySize_ = {}, + uint32_t maxTaskPayloadAndSharedMemorySize_ = {}, + uint32_t maxMeshWorkGroupTotalCount_ = {}, + std::array const & maxMeshWorkGroupCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const & maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshSharedMemorySize_ = {}, + uint32_t maxMeshPayloadAndSharedMemorySize_ = {}, + uint32_t maxMeshOutputMemorySize_ = {}, + uint32_t maxMeshPayloadAndOutputMemorySize_ = {}, + uint32_t maxMeshOutputComponents_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshOutputLayers_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {}, + uint32_t maxPreferredTaskWorkGroupInvocations_ = {}, + uint32_t maxPreferredMeshWorkGroupInvocations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxTaskWorkGroupTotalCount{ maxTaskWorkGroupTotalCount_ } + , maxTaskWorkGroupCount{ maxTaskWorkGroupCount_ } + , maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ } + , maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ } + , maxTaskPayloadSize{ maxTaskPayloadSize_ } + , maxTaskSharedMemorySize{ maxTaskSharedMemorySize_ } + , maxTaskPayloadAndSharedMemorySize{ maxTaskPayloadAndSharedMemorySize_ } + , maxMeshWorkGroupTotalCount{ maxMeshWorkGroupTotalCount_ } + , maxMeshWorkGroupCount{ maxMeshWorkGroupCount_ } + , maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ } + , maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ } + , maxMeshSharedMemorySize{ maxMeshSharedMemorySize_ } + , maxMeshPayloadAndSharedMemorySize{ maxMeshPayloadAndSharedMemorySize_ } + , maxMeshOutputMemorySize{ maxMeshOutputMemorySize_ } + , maxMeshPayloadAndOutputMemorySize{ maxMeshPayloadAndOutputMemorySize_ } + , maxMeshOutputComponents{ maxMeshOutputComponents_ } + , maxMeshOutputVertices{ maxMeshOutputVertices_ } + , maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ } + , maxMeshOutputLayers{ maxMeshOutputLayers_ } + , maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ } + , meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ } + , meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ } + , maxPreferredTaskWorkGroupInvocations{ maxPreferredTaskWorkGroupInvocations_ } + , maxPreferredMeshWorkGroupInvocations{ maxPreferredMeshWorkGroupInvocations_ } + , prefersLocalInvocationVertexOutput{ prefersLocalInvocationVertexOutput_ } + , prefersLocalInvocationPrimitiveOutput{ prefersLocalInvocationPrimitiveOutput_ } + , prefersCompactVertexOutput{ prefersCompactVertexOutput_ } + , prefersCompactPrimitiveOutput{ prefersCompactPrimitiveOutput_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesEXT( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderPropertiesEXT & operator=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderPropertiesEXT & operator=( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxTaskWorkGroupTotalCount, + maxTaskWorkGroupCount, + maxTaskWorkGroupInvocations, + maxTaskWorkGroupSize, + maxTaskPayloadSize, + maxTaskSharedMemorySize, + maxTaskPayloadAndSharedMemorySize, + maxMeshWorkGroupTotalCount, + maxMeshWorkGroupCount, + maxMeshWorkGroupInvocations, + maxMeshWorkGroupSize, + maxMeshSharedMemorySize, + maxMeshPayloadAndSharedMemorySize, + maxMeshOutputMemorySize, + maxMeshPayloadAndOutputMemorySize, + maxMeshOutputComponents, + maxMeshOutputVertices, + maxMeshOutputPrimitives, + maxMeshOutputLayers, + maxMeshMultiviewViewCount, + meshOutputPerVertexGranularity, + meshOutputPerPrimitiveGranularity, + maxPreferredTaskWorkGroupInvocations, + maxPreferredMeshWorkGroupInvocations, + prefersLocalInvocationVertexOutput, + prefersLocalInvocationPrimitiveOutput, + prefersCompactVertexOutput, + prefersCompactPrimitiveOutput ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTaskWorkGroupTotalCount == rhs.maxTaskWorkGroupTotalCount ) && + ( maxTaskWorkGroupCount == rhs.maxTaskWorkGroupCount ) && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && + ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && ( maxTaskPayloadSize == rhs.maxTaskPayloadSize ) && + ( maxTaskSharedMemorySize == rhs.maxTaskSharedMemorySize ) && ( maxTaskPayloadAndSharedMemorySize == rhs.maxTaskPayloadAndSharedMemorySize ) && + ( maxMeshWorkGroupTotalCount == rhs.maxMeshWorkGroupTotalCount ) && ( maxMeshWorkGroupCount == rhs.maxMeshWorkGroupCount ) && + ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && + ( maxMeshSharedMemorySize == rhs.maxMeshSharedMemorySize ) && ( maxMeshPayloadAndSharedMemorySize == rhs.maxMeshPayloadAndSharedMemorySize ) && + ( maxMeshOutputMemorySize == rhs.maxMeshOutputMemorySize ) && ( maxMeshPayloadAndOutputMemorySize == rhs.maxMeshPayloadAndOutputMemorySize ) && + ( maxMeshOutputComponents == rhs.maxMeshOutputComponents ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && + ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshOutputLayers == rhs.maxMeshOutputLayers ) && + ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && + ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ) && + ( maxPreferredTaskWorkGroupInvocations == rhs.maxPreferredTaskWorkGroupInvocations ) && + ( maxPreferredMeshWorkGroupInvocations == rhs.maxPreferredMeshWorkGroupInvocations ) && + ( prefersLocalInvocationVertexOutput == rhs.prefersLocalInvocationVertexOutput ) && + ( prefersLocalInvocationPrimitiveOutput == rhs.prefersLocalInvocationPrimitiveOutput ) && + ( prefersCompactVertexOutput == rhs.prefersCompactVertexOutput ) && ( prefersCompactPrimitiveOutput == rhs.prefersCompactPrimitiveOutput ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT; + void * pNext = {}; + uint32_t maxTaskWorkGroupTotalCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; + uint32_t maxTaskPayloadSize = {}; + uint32_t maxTaskSharedMemorySize = {}; + uint32_t maxTaskPayloadAndSharedMemorySize = {}; + uint32_t maxMeshWorkGroupTotalCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; + uint32_t maxMeshSharedMemorySize = {}; + uint32_t maxMeshPayloadAndSharedMemorySize = {}; + uint32_t maxMeshOutputMemorySize = {}; + uint32_t maxMeshPayloadAndOutputMemorySize = {}; + uint32_t maxMeshOutputComponents = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshOutputLayers = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; + uint32_t maxPreferredTaskWorkGroupInvocations = {}; + uint32_t maxPreferredMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceMeshShaderPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderPropertiesNV.html + struct PhysicalDeviceMeshShaderPropertiesNV + { + using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const & maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskTotalMemorySize_ = {}, + uint32_t maxTaskOutputCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const & maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshTotalMemorySize_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxDrawMeshTasksCount{ maxDrawMeshTasksCount_ } + , maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ } + , maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ } + , maxTaskTotalMemorySize{ maxTaskTotalMemorySize_ } + , maxTaskOutputCount{ maxTaskOutputCount_ } + , maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ } + , maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ } + , maxMeshTotalMemorySize{ maxMeshTotalMemorySize_ } + , maxMeshOutputVertices{ maxMeshOutputVertices_ } + , maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ } + , maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ } + , meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ } + , meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxDrawMeshTasksCount, + maxTaskWorkGroupInvocations, + maxTaskWorkGroupSize, + maxTaskTotalMemorySize, + maxTaskOutputCount, + maxMeshWorkGroupInvocations, + maxMeshWorkGroupSize, + maxMeshTotalMemorySize, + maxMeshOutputVertices, + maxMeshOutputPrimitives, + maxMeshMultiviewViewCount, + meshOutputPerVertexGranularity, + meshOutputPerPrimitiveGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) && + ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && + ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) && + ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && + ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && + ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && + ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && + ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + void * pNext = {}; + uint32_t maxDrawMeshTasksCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; + uint32_t maxTaskTotalMemorySize = {}; + uint32_t maxTaskOutputCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; + uint32_t maxMeshTotalMemorySize = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceMultiDrawFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiDrawFeaturesEXT.html + struct PhysicalDeviceMultiDrawFeaturesEXT + { + using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multiDraw{ multiDraw_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiDrawFeaturesEXT & operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiDrawFeaturesEXT & operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setMultiDraw( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT + { + multiDraw = multiDraw_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiDrawFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiDrawFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiDraw ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiDrawFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiDraw == rhs.multiDraw ); +# endif + } + + bool operator!=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDraw = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiDrawFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceMultiDrawPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiDrawPropertiesEXT.html + struct PhysicalDeviceMultiDrawPropertiesEXT + { + using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( uint32_t maxMultiDrawCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxMultiDrawCount{ maxMultiDrawCount_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiDrawPropertiesEXT & operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiDrawPropertiesEXT & operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiDrawPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiDrawPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxMultiDrawCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiDrawPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiDrawCount == rhs.maxMultiDrawCount ); +# endif + } + + bool operator!=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; + void * pNext = {}; + uint32_t maxMultiDrawCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiDrawPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.html + struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT + { + using NativeType = VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multisampledRenderToSingleSampled{ multisampledRenderToSingleSampled_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & + operator=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & + operator=( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & + setMultisampledRenderToSingleSampled( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ ) VULKAN_HPP_NOEXCEPT + { + multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multisampledRenderToSingleSampled ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampled == rhs.multisampledRenderToSingleSampled ); +# endif + } + + bool operator!=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceMultiviewFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewFeatures.html + struct PhysicalDeviceMultiviewFeatures + { + using NativeType = VkPhysicalDeviceMultiviewFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multiview{ multiview_ } + , multiviewGeometryShader{ multiviewGeometryShader_ } + , multiviewTessellationShader{ multiviewTessellationShader_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewFeatures; + }; + + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + + // wrapper struct for struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX.html + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX + { + using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , perViewPositionAllComponents{ perViewPositionAllComponents_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewPositionAllComponents ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + }; + + // wrapper struct for struct VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.html + struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM + { + using NativeType = VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multiviewPerViewRenderAreas{ multiviewPerViewRenderAreas_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & + operator=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & + setMultiviewPerViewRenderAreas( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + multiviewPerViewRenderAreas = multiviewPerViewRenderAreas_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiviewPerViewRenderAreas ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiviewPerViewRenderAreas == rhs.multiviewPerViewRenderAreas ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM.html + struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM + { + using NativeType = VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multiviewPerViewViewports{ multiviewPerViewViewports_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & + operator=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & + setMultiviewPerViewViewports( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ ) VULKAN_HPP_NOEXCEPT + { + multiviewPerViewViewports = multiviewPerViewViewports_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiviewPerViewViewports ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiviewPerViewViewports == rhs.multiviewPerViewViewports ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceMultiviewProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewProperties.html + struct PhysicalDeviceMultiviewProperties + { + using NativeType = VkPhysicalDeviceMultiviewProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxMultiviewViewCount{ maxMultiviewViewCount_ } + , maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; + void * pNext = {}; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewProperties; + }; + + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + + // wrapper struct for struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT.html + struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT + { + using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mutableDescriptorType{ mutableDescriptorType_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMutableDescriptorTypeFeaturesEXT( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMutableDescriptorTypeFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & + setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorType = mutableDescriptorType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mutableDescriptorType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType ); +# endif + } + + bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + }; + + using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + + // wrapper struct for struct VkPhysicalDeviceNestedCommandBufferFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNestedCommandBufferFeaturesEXT.html + struct PhysicalDeviceNestedCommandBufferFeaturesEXT + { + using NativeType = VkPhysicalDeviceNestedCommandBufferFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , nestedCommandBuffer{ nestedCommandBuffer_ } + , nestedCommandBufferRendering{ nestedCommandBufferRendering_ } + , nestedCommandBufferSimultaneousUse{ nestedCommandBufferSimultaneousUse_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceNestedCommandBufferFeaturesEXT( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceNestedCommandBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & + setNestedCommandBuffer( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer_ ) VULKAN_HPP_NOEXCEPT + { + nestedCommandBuffer = nestedCommandBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & + setNestedCommandBufferRendering( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering_ ) VULKAN_HPP_NOEXCEPT + { + nestedCommandBufferRendering = nestedCommandBufferRendering_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & + setNestedCommandBufferSimultaneousUse( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse_ ) VULKAN_HPP_NOEXCEPT + { + nestedCommandBufferSimultaneousUse = nestedCommandBufferSimultaneousUse_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nestedCommandBuffer, nestedCommandBufferRendering, nestedCommandBufferSimultaneousUse ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceNestedCommandBufferFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nestedCommandBuffer == rhs.nestedCommandBuffer ) && + ( nestedCommandBufferRendering == rhs.nestedCommandBufferRendering ) && + ( nestedCommandBufferSimultaneousUse == rhs.nestedCommandBufferSimultaneousUse ); +# endif + } + + bool operator!=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceNestedCommandBufferFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceNestedCommandBufferPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNestedCommandBufferPropertiesEXT.html + struct PhysicalDeviceNestedCommandBufferPropertiesEXT + { + using NativeType = VkPhysicalDeviceNestedCommandBufferPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferPropertiesEXT( uint32_t maxCommandBufferNestingLevel_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxCommandBufferNestingLevel{ maxCommandBufferNestingLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceNestedCommandBufferPropertiesEXT( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceNestedCommandBufferPropertiesEXT( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceNestedCommandBufferPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxCommandBufferNestingLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceNestedCommandBufferPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCommandBufferNestingLevel == rhs.maxCommandBufferNestingLevel ); +# endif + } + + bool operator!=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT; + void * pNext = {}; + uint32_t maxCommandBufferNestingLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceNestedCommandBufferPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT.html + struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT + { + using NativeType = VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , nonSeamlessCubeMap{ nonSeamlessCubeMap_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & + setNonSeamlessCubeMap( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ ) VULKAN_HPP_NOEXCEPT + { + nonSeamlessCubeMap = nonSeamlessCubeMap_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nonSeamlessCubeMap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nonSeamlessCubeMap == rhs.nonSeamlessCubeMap ); +# endif + } + + bool operator!=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceOpacityMicromapFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpacityMicromapFeaturesEXT.html + struct PhysicalDeviceOpacityMicromapFeaturesEXT + { + using NativeType = VkPhysicalDeviceOpacityMicromapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 micromap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , micromap{ micromap_ } + , micromapCaptureReplay{ micromapCaptureReplay_ } + , micromapHostCommands{ micromapHostCommands_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpacityMicromapFeaturesEXT( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpacityMicromapFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromap( VULKAN_HPP_NAMESPACE::Bool32 micromap_ ) VULKAN_HPP_NOEXCEPT + { + micromap = micromap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & + setMicromapCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + micromapCaptureReplay = micromapCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & + setMicromapHostCommands( VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ ) VULKAN_HPP_NOEXCEPT + { + micromapHostCommands = micromapHostCommands_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpacityMicromapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpacityMicromapFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, micromap, micromapCaptureReplay, micromapHostCommands ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpacityMicromapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromap == rhs.micromap ) && ( micromapCaptureReplay == rhs.micromapCaptureReplay ) && + ( micromapHostCommands == rhs.micromapHostCommands ); +# endif + } + + bool operator!=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 micromap = {}; + VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpacityMicromapFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceOpacityMicromapPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpacityMicromapPropertiesEXT.html + struct PhysicalDeviceOpacityMicromapPropertiesEXT + { + using NativeType = VkPhysicalDeviceOpacityMicromapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( uint32_t maxOpacity2StateSubdivisionLevel_ = {}, + uint32_t maxOpacity4StateSubdivisionLevel_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxOpacity2StateSubdivisionLevel{ maxOpacity2StateSubdivisionLevel_ } + , maxOpacity4StateSubdivisionLevel{ maxOpacity4StateSubdivisionLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpacityMicromapPropertiesEXT( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpacityMicromapPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpacityMicromapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpacityMicromapPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxOpacity2StateSubdivisionLevel, maxOpacity4StateSubdivisionLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpacityMicromapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxOpacity2StateSubdivisionLevel == rhs.maxOpacity2StateSubdivisionLevel ) && + ( maxOpacity4StateSubdivisionLevel == rhs.maxOpacity4StateSubdivisionLevel ); +# endif + } + + bool operator!=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT; + void * pNext = {}; + uint32_t maxOpacity2StateSubdivisionLevel = {}; + uint32_t maxOpacity4StateSubdivisionLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpacityMicromapPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceOpticalFlowFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpticalFlowFeaturesNV.html + struct PhysicalDeviceOpticalFlowFeaturesNV + { + using NativeType = VkPhysicalDeviceOpticalFlowFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , opticalFlow{ opticalFlow_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpticalFlowFeaturesNV( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpticalFlowFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpticalFlowFeaturesNV & operator=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceOpticalFlowFeaturesNV & operator=( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setOpticalFlow( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ ) VULKAN_HPP_NOEXCEPT + { + opticalFlow = opticalFlow_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceOpticalFlowFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpticalFlowFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpticalFlowFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpticalFlowFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opticalFlow ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpticalFlowFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opticalFlow == rhs.opticalFlow ); +# endif + } + + bool operator!=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 opticalFlow = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpticalFlowFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceOpticalFlowPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpticalFlowPropertiesNV.html + struct PhysicalDeviceOpticalFlowPropertiesNV + { + using NativeType = VkPhysicalDeviceOpticalFlowPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes_ = {}, + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hintSupported_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 costSupported_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported_ = {}, + uint32_t minWidth_ = {}, + uint32_t minHeight_ = {}, + uint32_t maxWidth_ = {}, + uint32_t maxHeight_ = {}, + uint32_t maxNumRegionsOfInterest_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedOutputGridSizes{ supportedOutputGridSizes_ } + , supportedHintGridSizes{ supportedHintGridSizes_ } + , hintSupported{ hintSupported_ } + , costSupported{ costSupported_ } + , bidirectionalFlowSupported{ bidirectionalFlowSupported_ } + , globalFlowSupported{ globalFlowSupported_ } + , minWidth{ minWidth_ } + , minHeight{ minHeight_ } + , maxWidth{ maxWidth_ } + , maxHeight{ maxHeight_ } + , maxNumRegionsOfInterest{ maxNumRegionsOfInterest_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpticalFlowPropertiesNV( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpticalFlowPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpticalFlowPropertiesNV & operator=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceOpticalFlowPropertiesNV & operator=( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceOpticalFlowPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpticalFlowPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpticalFlowPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpticalFlowPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + supportedOutputGridSizes, + supportedHintGridSizes, + hintSupported, + costSupported, + bidirectionalFlowSupported, + globalFlowSupported, + minWidth, + minHeight, + maxWidth, + maxHeight, + maxNumRegionsOfInterest ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpticalFlowPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedOutputGridSizes == rhs.supportedOutputGridSizes ) && + ( supportedHintGridSizes == rhs.supportedHintGridSizes ) && ( hintSupported == rhs.hintSupported ) && ( costSupported == rhs.costSupported ) && + ( bidirectionalFlowSupported == rhs.bidirectionalFlowSupported ) && ( globalFlowSupported == rhs.globalFlowSupported ) && + ( minWidth == rhs.minWidth ) && ( minHeight == rhs.minHeight ) && ( maxWidth == rhs.maxWidth ) && ( maxHeight == rhs.maxHeight ) && + ( maxNumRegionsOfInterest == rhs.maxNumRegionsOfInterest ); +# endif + } + + bool operator!=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes = {}; + VULKAN_HPP_NAMESPACE::Bool32 hintSupported = {}; + VULKAN_HPP_NAMESPACE::Bool32 costSupported = {}; + VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported = {}; + uint32_t minWidth = {}; + uint32_t minHeight = {}; + uint32_t maxWidth = {}; + uint32_t maxHeight = {}; + uint32_t maxNumRegionsOfInterest = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpticalFlowPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDevicePCIBusInfoPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePCIBusInfoPropertiesEXT.html + struct PhysicalDevicePCIBusInfoPropertiesEXT + { + using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( + uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pciDomain{ pciDomain_ } + , pciBus{ pciBus_ } + , pciDevice{ pciDevice_ } + , pciFunction{ pciFunction_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && + ( pciFunction == rhs.pciFunction ); +# endif + } + + bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + void * pNext = {}; + uint32_t pciDomain = {}; + uint32_t pciBus = {}; + uint32_t pciDevice = {}; + uint32_t pciFunction = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePCIBusInfoPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT.html + struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT + { + using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pageableDeviceLocalMemory{ pageableDeviceLocalMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & + operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & + setPageableDeviceLocalMemory( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT + { + pageableDeviceLocalMemory = pageableDeviceLocalMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pageableDeviceLocalMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory ); +# endif + } + + bool operator!=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV.html + struct PhysicalDevicePartitionedAccelerationStructureFeaturesNV + { + using NativeType = VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructureFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 partitionedAccelerationStructure_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , partitionedAccelerationStructure{ partitionedAccelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructureFeaturesNV( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePartitionedAccelerationStructureFeaturesNV( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePartitionedAccelerationStructureFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePartitionedAccelerationStructureFeaturesNV & + operator=( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePartitionedAccelerationStructureFeaturesNV & + operator=( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV & + setPartitionedAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 partitionedAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + partitionedAccelerationStructure = partitionedAccelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, partitionedAccelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( partitionedAccelerationStructure == rhs.partitionedAccelerationStructure ); +# endif + } + + bool operator!=( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 partitionedAccelerationStructure = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePartitionedAccelerationStructureFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV.html + struct PhysicalDevicePartitionedAccelerationStructurePropertiesNV + { + using NativeType = VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructurePropertiesNV( uint32_t maxPartitionCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxPartitionCount{ maxPartitionCount_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructurePropertiesNV( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePartitionedAccelerationStructurePropertiesNV( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePartitionedAccelerationStructurePropertiesNV( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePartitionedAccelerationStructurePropertiesNV & + operator=( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePartitionedAccelerationStructurePropertiesNV & + operator=( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPartitionCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPartitionCount == rhs.maxPartitionCount ); +# endif + } + + bool operator!=( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV; + void * pNext = {}; + uint32_t maxPartitionCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePartitionedAccelerationStructurePropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDevicePerStageDescriptorSetFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerStageDescriptorSetFeaturesNV.html + struct PhysicalDevicePerStageDescriptorSetFeaturesNV + { + using NativeType = VkPhysicalDevicePerStageDescriptorSetFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerStageDescriptorSetFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , perStageDescriptorSet{ perStageDescriptorSet_ } + , dynamicPipelineLayout{ dynamicPipelineLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePerStageDescriptorSetFeaturesNV( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerStageDescriptorSetFeaturesNV( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerStageDescriptorSetFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & + setPerStageDescriptorSet( VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet_ ) VULKAN_HPP_NOEXCEPT + { + perStageDescriptorSet = perStageDescriptorSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & + setDynamicPipelineLayout( VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + dynamicPipelineLayout = dynamicPipelineLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perStageDescriptorSet, dynamicPipelineLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerStageDescriptorSetFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perStageDescriptorSet == rhs.perStageDescriptorSet ) && + ( dynamicPipelineLayout == rhs.dynamicPipelineLayout ); +# endif + } + + bool operator!=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePerStageDescriptorSetFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDevicePerformanceQueryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceQueryFeaturesKHR.html + struct PhysicalDevicePerformanceQueryFeaturesKHR + { + using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , performanceCounterQueryPools{ performanceCounterQueryPools_ } + , performanceCounterMultipleQueryPools{ performanceCounterMultipleQueryPools_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & + setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterQueryPools = performanceCounterQueryPools_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & + setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) && + ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); +# endif + } + + bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDevicePerformanceQueryPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceQueryPropertiesKHR.html + struct PhysicalDevicePerformanceQueryPropertiesKHR + { + using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , allowCommandBufferQueryCopies{ allowCommandBufferQueryCopies_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allowCommandBufferQueryCopies ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); +# endif + } + + bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryPropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDevicePipelineBinaryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineBinaryFeaturesKHR.html + struct PhysicalDevicePipelineBinaryFeaturesKHR + { + using NativeType = VkPhysicalDevicePipelineBinaryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinaries{ pipelineBinaries_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineBinaryFeaturesKHR( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineBinaryFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineBinaryFeaturesKHR & operator=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineBinaryFeaturesKHR & operator=( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPipelineBinaries( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaries = pipelineBinaries_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineBinaryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineBinaryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineBinaryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineBinaryFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBinaries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineBinaryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaries == rhs.pipelineBinaries ); +# endif + } + + bool operator!=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineBinaryFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDevicePipelineBinaryPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineBinaryPropertiesKHR.html + struct PhysicalDevicePipelineBinaryPropertiesKHR + { + using NativeType = VkPhysicalDevicePipelineBinaryPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCache_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCacheControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrefersInternalCache_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrecompiledInternalCache_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryCompressedData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinaryInternalCache{ pipelineBinaryInternalCache_ } + , pipelineBinaryInternalCacheControl{ pipelineBinaryInternalCacheControl_ } + , pipelineBinaryPrefersInternalCache{ pipelineBinaryPrefersInternalCache_ } + , pipelineBinaryPrecompiledInternalCache{ pipelineBinaryPrecompiledInternalCache_ } + , pipelineBinaryCompressedData{ pipelineBinaryCompressedData_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineBinaryPropertiesKHR( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineBinaryPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineBinaryPropertiesKHR & operator=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineBinaryPropertiesKHR & operator=( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePipelineBinaryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineBinaryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineBinaryPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineBinaryPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + pipelineBinaryInternalCache, + pipelineBinaryInternalCacheControl, + pipelineBinaryPrefersInternalCache, + pipelineBinaryPrecompiledInternalCache, + pipelineBinaryCompressedData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineBinaryPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaryInternalCache == rhs.pipelineBinaryInternalCache ) && + ( pipelineBinaryInternalCacheControl == rhs.pipelineBinaryInternalCacheControl ) && + ( pipelineBinaryPrefersInternalCache == rhs.pipelineBinaryPrefersInternalCache ) && + ( pipelineBinaryPrecompiledInternalCache == rhs.pipelineBinaryPrecompiledInternalCache ) && + ( pipelineBinaryCompressedData == rhs.pipelineBinaryCompressedData ); +# endif + } + + bool operator!=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCache = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCacheControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrefersInternalCache = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrecompiledInternalCache = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryCompressedData = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineBinaryPropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDevicePipelineCreationCacheControlFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineCreationCacheControlFeatures.html + struct PhysicalDevicePipelineCreationCacheControlFeatures + { + using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineCreationCacheControl{ pipelineCreationCacheControl_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineCreationCacheControlFeatures & + operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineCreationCacheControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); +# endif + } + + bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineCreationCacheControlFeatures; + }; + + using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures; + + // wrapper struct for struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR.html + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR + { + using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineExecutableInfo{ pipelineExecutableInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT + { + pipelineExecutableInfo = pipelineExecutableInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineExecutableInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); +# endif + } + + bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT.html + struct PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT + { + using NativeType = VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineLibraryGroupHandles{ pipelineLibraryGroupHandles_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & + operator=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & operator=( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & + setPipelineLibraryGroupHandles( VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLibraryGroupHandles = pipelineLibraryGroupHandles_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineLibraryGroupHandles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineLibraryGroupHandles == rhs.pipelineLibraryGroupHandles ); +# endif + } + + bool operator!=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDevicePipelineOpacityMicromapFeaturesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineOpacityMicromapFeaturesARM.html + struct PhysicalDevicePipelineOpacityMicromapFeaturesARM + { + using NativeType = VkPhysicalDevicePipelineOpacityMicromapFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineOpacityMicromapFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 pipelineOpacityMicromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineOpacityMicromap{ pipelineOpacityMicromap_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineOpacityMicromapFeaturesARM( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineOpacityMicromapFeaturesARM( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineOpacityMicromapFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineOpacityMicromapFeaturesARM & operator=( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineOpacityMicromapFeaturesARM & operator=( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM & + setPipelineOpacityMicromap( VULKAN_HPP_NAMESPACE::Bool32 pipelineOpacityMicromap_ ) VULKAN_HPP_NOEXCEPT + { + pipelineOpacityMicromap = pipelineOpacityMicromap_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineOpacityMicromap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineOpacityMicromap == rhs.pipelineOpacityMicromap ); +# endif + } + + bool operator!=( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineOpacityMicromap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineOpacityMicromapFeaturesARM; + }; + + // wrapper struct for struct VkPhysicalDevicePipelinePropertiesFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelinePropertiesFeaturesEXT.html + struct PhysicalDevicePipelinePropertiesFeaturesEXT + { + using NativeType = VkPhysicalDevicePipelinePropertiesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelinePropertiesIdentifier{ pipelinePropertiesIdentifier_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelinePropertiesFeaturesEXT( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelinePropertiesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & + setPipelinePropertiesIdentifier( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ ) VULKAN_HPP_NOEXCEPT + { + pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelinePropertiesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelinePropertiesFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelinePropertiesIdentifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelinePropertiesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelinePropertiesIdentifier == rhs.pipelinePropertiesIdentifier ); +# endif + } + + bool operator!=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelinePropertiesFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDevicePipelineProtectedAccessFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineProtectedAccessFeatures.html + struct PhysicalDevicePipelineProtectedAccessFeatures + { + using NativeType = VkPhysicalDevicePipelineProtectedAccessFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineProtectedAccess{ pipelineProtectedAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineProtectedAccessFeatures( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineProtectedAccessFeatures( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineProtectedAccessFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineProtectedAccessFeatures & operator=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineProtectedAccessFeatures & operator=( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & + setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT + { + pipelineProtectedAccess = pipelineProtectedAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineProtectedAccessFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineProtectedAccessFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineProtectedAccessFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineProtectedAccessFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineProtectedAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineProtectedAccessFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineProtectedAccess == rhs.pipelineProtectedAccess ); +# endif + } + + bool operator!=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineProtectedAccessFeatures; + }; + + using PhysicalDevicePipelineProtectedAccessFeaturesEXT = PhysicalDevicePipelineProtectedAccessFeatures; + + // wrapper struct for struct VkPhysicalDevicePipelineRobustnessFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineRobustnessFeatures.html + struct PhysicalDevicePipelineRobustnessFeatures + { + using NativeType = VkPhysicalDevicePipelineRobustnessFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineRobustness{ pipelineRobustness_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineRobustnessFeatures( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineRobustnessFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineRobustnessFeatures & operator=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineRobustnessFeatures & operator=( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & + setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT + { + pipelineRobustness = pipelineRobustness_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineRobustnessFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineRobustnessFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineRobustnessFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineRobustness ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineRobustnessFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineRobustness == rhs.pipelineRobustness ); +# endif + } + + bool operator!=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineRobustnessFeatures; + }; + + using PhysicalDevicePipelineRobustnessFeaturesEXT = PhysicalDevicePipelineRobustnessFeatures; + + // wrapper struct for struct VkPhysicalDevicePipelineRobustnessProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineRobustnessProperties.html + struct PhysicalDevicePipelineRobustnessProperties + { + using NativeType = VkPhysicalDevicePipelineRobustnessProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties( + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ } + , defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ } + , defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ } + , defaultRobustnessImages{ defaultRobustnessImages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineRobustnessProperties( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineRobustnessProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineRobustnessProperties & operator=( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineRobustnessProperties & operator=( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePipelineRobustnessProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineRobustnessProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineRobustnessProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineRobustnessProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, defaultRobustnessStorageBuffers, defaultRobustnessUniformBuffers, defaultRobustnessVertexInputs, defaultRobustnessImages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineRobustnessProperties const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) && + ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) && + ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages ); +# endif + } + + bool operator!=( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineRobustnessProperties; + }; + + using PhysicalDevicePipelineRobustnessPropertiesEXT = PhysicalDevicePipelineRobustnessProperties; + + // wrapper struct for struct VkPhysicalDevicePointClippingProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePointClippingProperties.html + struct PhysicalDevicePointClippingProperties + { + using NativeType = VkPhysicalDevicePointClippingProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pointClippingBehavior{ pointClippingBehavior_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePointClippingProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePointClippingProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePointClippingProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pointClippingBehavior ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default; +#else + bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior ); +# endif + } + + bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePointClippingProperties; + }; + + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkPhysicalDevicePortabilitySubsetFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePortabilitySubsetFeaturesKHR.html + struct PhysicalDevicePortabilitySubsetFeaturesKHR + { + using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , constantAlphaColorBlendFactors{ constantAlphaColorBlendFactors_ } + , events{ events_ } + , imageViewFormatReinterpretation{ imageViewFormatReinterpretation_ } + , imageViewFormatSwizzle{ imageViewFormatSwizzle_ } + , imageView2DOn3DImage{ imageView2DOn3DImage_ } + , multisampleArrayImage{ multisampleArrayImage_ } + , mutableComparisonSamplers{ mutableComparisonSamplers_ } + , pointPolygons{ pointPolygons_ } + , samplerMipLodBias{ samplerMipLodBias_ } + , separateStencilMaskRef{ separateStencilMaskRef_ } + , shaderSampleRateInterpolationFunctions{ shaderSampleRateInterpolationFunctions_ } + , tessellationIsolines{ tessellationIsolines_ } + , tessellationPointMode{ tessellationPointMode_ } + , triangleFans{ triangleFans_ } + , vertexAttributeAccessBeyondStride{ vertexAttributeAccessBeyondStride_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT + { + constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT + { + events = events_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatReinterpretation = imageViewFormatReinterpretation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatSwizzle = imageViewFormatSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT + { + imageView2DOn3DImage = imageView2DOn3DImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT + { + multisampleArrayImage = multisampleArrayImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT + { + mutableComparisonSamplers = mutableComparisonSamplers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT + { + pointPolygons = pointPolygons_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + samplerMipLodBias = samplerMipLodBias_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT + { + separateStencilMaskRef = separateStencilMaskRef_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT + { + tessellationIsolines = tessellationIsolines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT + { + tessellationPointMode = tessellationPointMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT + { + triangleFans = triangleFans_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + constantAlphaColorBlendFactors, + events, + imageViewFormatReinterpretation, + imageViewFormatSwizzle, + imageView2DOn3DImage, + multisampleArrayImage, + mutableComparisonSamplers, + pointPolygons, + samplerMipLodBias, + separateStencilMaskRef, + shaderSampleRateInterpolationFunctions, + tessellationIsolines, + tessellationPointMode, + triangleFans, + vertexAttributeAccessBeyondStride ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) && + ( events == rhs.events ) && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) && + ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) && + ( multisampleArrayImage == rhs.multisampleArrayImage ) && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) && + ( pointPolygons == rhs.pointPolygons ) && ( samplerMipLodBias == rhs.samplerMipLodBias ) && + ( separateStencilMaskRef == rhs.separateStencilMaskRef ) && + ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) && ( tessellationIsolines == rhs.tessellationIsolines ) && + ( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) && + ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride ); +# endif + } + + bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {}; + VULKAN_HPP_NAMESPACE::Bool32 events = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {}; + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkPhysicalDevicePortabilitySubsetPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePortabilitySubsetPropertiesKHR.html + struct PhysicalDevicePortabilitySubsetPropertiesKHR + { + using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( uint32_t minVertexInputBindingStrideAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minVertexInputBindingStrideAlignment{ minVertexInputBindingStrideAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minVertexInputBindingStrideAlignment ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment ); +# endif + } + + bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + void * pNext = {}; + uint32_t minVertexInputBindingStrideAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkPhysicalDevicePresentBarrierFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentBarrierFeaturesNV.html + struct PhysicalDevicePresentBarrierFeaturesNV + { + using NativeType = VkPhysicalDevicePresentBarrierFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentBarrier{ presentBarrier_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePresentBarrierFeaturesNV & operator=( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPresentBarrier( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ ) VULKAN_HPP_NOEXCEPT + { + presentBarrier = presentBarrier_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentBarrierFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentBarrierFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentBarrier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrier == rhs.presentBarrier ); +# endif + } + + bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentBarrier = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentBarrierFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDevicePresentIdFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentIdFeaturesKHR.html + struct PhysicalDevicePresentIdFeaturesKHR + { + using NativeType = VkPhysicalDevicePresentIdFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentId{ presentId_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePresentIdFeaturesKHR & operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPresentId( VULKAN_HPP_NAMESPACE::Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT + { + presentId = presentId_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentIdFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentIdFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ); +# endif + } + + bool operator!=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentId = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentIdFeaturesKHR; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkPhysicalDevicePresentMeteringFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentMeteringFeaturesNV.html + struct PhysicalDevicePresentMeteringFeaturesNV + { + using NativeType = VkPhysicalDevicePresentMeteringFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentMeteringFeaturesNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentMeteringFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 presentMetering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentMetering{ presentMetering_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentMeteringFeaturesNV( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentMeteringFeaturesNV( VkPhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentMeteringFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentMeteringFeaturesNV & operator=( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePresentMeteringFeaturesNV & operator=( VkPhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentMeteringFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentMeteringFeaturesNV & setPresentMetering( VULKAN_HPP_NAMESPACE::Bool32 presentMetering_ ) VULKAN_HPP_NOEXCEPT + { + presentMetering = presentMetering_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePresentMeteringFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentMeteringFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentMeteringFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentMeteringFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentMetering ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentMeteringFeaturesNV const & ) const = default; +# else + bool operator==( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMetering == rhs.presentMetering ); +# endif + } + + bool operator!=( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentMeteringFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentMetering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentMeteringFeaturesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT.html + struct PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT + { + using NativeType = VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentModeFifoLatestReady{ presentModeFifoLatestReady_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & + operator=( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & operator=( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & + setPresentModeFifoLatestReady( VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady_ ) VULKAN_HPP_NOEXCEPT + { + presentModeFifoLatestReady = presentModeFifoLatestReady_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentModeFifoLatestReady ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeFifoLatestReady == rhs.presentModeFifoLatestReady ); +# endif + } + + bool operator!=( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDevicePresentWaitFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentWaitFeaturesKHR.html + struct PhysicalDevicePresentWaitFeaturesKHR + { + using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentWait{ presentWait_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentWaitFeaturesKHR & operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePresentWaitFeaturesKHR & operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPresentWait( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT + { + presentWait = presentWait_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentWaitFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentWaitFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentWait ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentWaitFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait == rhs.presentWait ); +# endif + } + + bool operator!=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentWait = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentWaitFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT.html + struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT + { + using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , primitiveTopologyListRestart{ primitiveTopologyListRestart_ } + , primitiveTopologyPatchListRestart{ primitiveTopologyPatchListRestart_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + setPrimitiveTopologyListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT + { + primitiveTopologyListRestart = primitiveTopologyListRestart_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + setPrimitiveTopologyPatchListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ ) VULKAN_HPP_NOEXCEPT + { + primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart ) && + ( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart ); +# endif + } + + bool operator!=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT.html + struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT + { + using NativeType = VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , primitivesGeneratedQuery{ primitivesGeneratedQuery_ } + , primitivesGeneratedQueryWithRasterizerDiscard{ primitivesGeneratedQueryWithRasterizerDiscard_ } + , primitivesGeneratedQueryWithNonZeroStreams{ primitivesGeneratedQueryWithNonZeroStreams_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + operator=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + setPrimitivesGeneratedQuery( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ ) VULKAN_HPP_NOEXCEPT + { + primitivesGeneratedQuery = primitivesGeneratedQuery_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + setPrimitivesGeneratedQueryWithRasterizerDiscard( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ ) VULKAN_HPP_NOEXCEPT + { + primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + setPrimitivesGeneratedQueryWithNonZeroStreams( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ ) VULKAN_HPP_NOEXCEPT + { + primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitivesGeneratedQuery == rhs.primitivesGeneratedQuery ) && + ( primitivesGeneratedQueryWithRasterizerDiscard == rhs.primitivesGeneratedQueryWithRasterizerDiscard ) && + ( primitivesGeneratedQueryWithNonZeroStreams == rhs.primitivesGeneratedQueryWithNonZeroStreams ); +# endif + } + + bool operator!=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDevicePrivateDataFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrivateDataFeatures.html + struct PhysicalDevicePrivateDataFeatures + { + using NativeType = VkPhysicalDevicePrivateDataFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , privateData{ privateData_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrivateDataFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + { + privateData = privateData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrivateDataFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrivateDataFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, privateData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData ); +# endif + } + + bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePrivateDataFeatures; + }; + + using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; + + // wrapper struct for struct VkPhysicalDeviceProtectedMemoryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProtectedMemoryFeatures.html + struct PhysicalDeviceProtectedMemoryFeatures + { + using NativeType = VkPhysicalDeviceProtectedMemoryFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , protectedMemory{ protectedMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory ); +# endif + } + + bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryFeatures; + }; + + // wrapper struct for struct VkPhysicalDeviceProtectedMemoryProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProtectedMemoryProperties.html + struct PhysicalDeviceProtectedMemoryProperties + { + using NativeType = VkPhysicalDeviceProtectedMemoryProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , protectedNoFault{ protectedNoFault_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedNoFault ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault ); +# endif + } + + bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryProperties; + }; + + // wrapper struct for struct VkPhysicalDeviceProvokingVertexFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProvokingVertexFeaturesEXT.html + struct PhysicalDeviceProvokingVertexFeaturesEXT + { + using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , provokingVertexLast{ provokingVertexLast_ } + , transformFeedbackPreservesProvokingVertex{ transformFeedbackPreservesProvokingVertex_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProvokingVertexFeaturesEXT & operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProvokingVertexFeaturesEXT & operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & + setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT + { + provokingVertexLast = provokingVertexLast_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & + setTransformFeedbackPreservesProvokingVertex( VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) && + ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceProvokingVertexPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProvokingVertexPropertiesEXT.html + struct PhysicalDeviceProvokingVertexPropertiesEXT + { + using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , provokingVertexModePerPipeline{ provokingVertexModePerPipeline_ } + , transformFeedbackPreservesTriangleFanProvokingVertex{ transformFeedbackPreservesTriangleFanProvokingVertex_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProvokingVertexPropertiesEXT & operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProvokingVertexPropertiesEXT & operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) && + ( transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDevicePushDescriptorProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePushDescriptorProperties.html + struct PhysicalDevicePushDescriptorProperties + { + using NativeType = VkPhysicalDevicePushDescriptorProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxPushDescriptors{ maxPushDescriptors_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushDescriptorProperties( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePushDescriptorProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePushDescriptorProperties & operator=( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePushDescriptorProperties & operator=( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePushDescriptorProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushDescriptorProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushDescriptorProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushDescriptorProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPushDescriptors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePushDescriptorProperties const & ) const = default; +#else + bool operator==( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors ); +# endif + } + + bool operator!=( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorProperties; + void * pNext = {}; + uint32_t maxPushDescriptors = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePushDescriptorProperties; + }; + + using PhysicalDevicePushDescriptorPropertiesKHR = PhysicalDevicePushDescriptorProperties; + + // wrapper struct for struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT.html + struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT + { + using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , formatRgba10x6WithoutYCbCrSampler{ formatRgba10x6WithoutYCbCrSampler_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRGBA10X6FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & + setFormatRgba10x6WithoutYCbCrSampler( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT + { + formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatRgba10x6WithoutYCbCrSampler ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler ); +# endif + } + + bool operator!=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.html + struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT + { + using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rasterizationOrderColorAttachmentAccess{ rasterizationOrderColorAttachmentAccess_ } + , rasterizationOrderDepthAttachmentAccess{ rasterizationOrderDepthAttachmentAccess_ } + , rasterizationOrderStencilAttachmentAccess{ rasterizationOrderStencilAttachmentAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + operator=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + setRasterizationOrderColorAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + setRasterizationOrderDepthAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + setRasterizationOrderStencilAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess ) && + ( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess ) && + ( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess ); +# endif + } + + bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + }; + + using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + // wrapper struct for struct VkPhysicalDeviceRawAccessChainsFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRawAccessChainsFeaturesNV.html + struct PhysicalDeviceRawAccessChainsFeaturesNV + { + using NativeType = VkPhysicalDeviceRawAccessChainsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderRawAccessChains_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderRawAccessChains{ shaderRawAccessChains_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRawAccessChainsFeaturesNV( VkPhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRawAccessChainsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRawAccessChainsFeaturesNV & operator=( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRawAccessChainsFeaturesNV & operator=( VkPhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV & + setShaderRawAccessChains( VULKAN_HPP_NAMESPACE::Bool32 shaderRawAccessChains_ ) VULKAN_HPP_NOEXCEPT + { + shaderRawAccessChains = shaderRawAccessChains_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRawAccessChainsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRawAccessChainsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRawAccessChainsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRawAccessChainsFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderRawAccessChains ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRawAccessChainsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderRawAccessChains == rhs.shaderRawAccessChains ); +# endif + } + + bool operator!=( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRawAccessChains = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRawAccessChainsFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceRayQueryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayQueryFeaturesKHR.html + struct PhysicalDeviceRayQueryFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayQuery{ rayQuery_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT + { + rayQuery = rayQuery_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayQueryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayQueryFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayQuery ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery ); +# endif + } + + bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayQueryFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV.html + struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingInvocationReorder{ rayTracingInvocationReorder_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingInvocationReorderFeaturesNV( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingInvocationReorderFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV & + operator=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & + setRayTracingInvocationReorder( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingInvocationReorder = rayTracingInvocationReorder_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingInvocationReorder ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorder == rhs.rayTracingInvocationReorder ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV.html + struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV + { + using NativeType = VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV( + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint_ = + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingInvocationReorderReorderingHint{ rayTracingInvocationReorderReorderingHint_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingInvocationReorderPropertiesNV( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingInvocationReorderPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV & + operator=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingInvocationReorderReorderingHint ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorderReorderingHint == rhs.rayTracingInvocationReorderReorderingHint ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint = + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV.html + struct PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 spheres_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 linearSweptSpheres_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , spheres{ spheres_ } + , linearSweptSpheres{ linearSweptSpheres_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & + operator=( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & operator=( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setSpheres( VULKAN_HPP_NAMESPACE::Bool32 spheres_ ) VULKAN_HPP_NOEXCEPT + { + spheres = spheres_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & + setLinearSweptSpheres( VULKAN_HPP_NAMESPACE::Bool32 linearSweptSpheres_ ) VULKAN_HPP_NOEXCEPT + { + linearSweptSpheres = linearSweptSpheres_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, spheres, linearSweptSpheres ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spheres == rhs.spheres ) && ( linearSweptSpheres == rhs.linearSweptSpheres ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 spheres = {}; + VULKAN_HPP_NAMESPACE::Bool32 linearSweptSpheres = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR.html + struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR + { + using NativeType = VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingMaintenance1{ rayTracingMaintenance1_ } + , rayTracingPipelineTraceRaysIndirect2{ rayTracingPipelineTraceRaysIndirect2_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingMaintenance1FeaturesKHR( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingMaintenance1FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & + setRayTracingMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMaintenance1 = rayTracingMaintenance1_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & + setRayTracingPipelineTraceRaysIndirect2( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineTraceRaysIndirect2 = rayTracingPipelineTraceRaysIndirect2_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingMaintenance1, rayTracingPipelineTraceRaysIndirect2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMaintenance1 == rhs.rayTracingMaintenance1 ) && + ( rayTracingPipelineTraceRaysIndirect2 == rhs.rayTracingPipelineTraceRaysIndirect2 ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1 = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingMaintenance1FeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingMotionBlurFeaturesNV.html + struct PhysicalDeviceRayTracingMotionBlurFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingMotionBlur{ rayTracingMotionBlur_ } + , rayTracingMotionBlurPipelineTraceRaysIndirect{ rayTracingMotionBlurPipelineTraceRaysIndirect_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingMotionBlurFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & + setRayTracingMotionBlur( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMotionBlur = rayTracingMotionBlur_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & + setRayTracingMotionBlurPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingMotionBlur, rayTracingMotionBlurPipelineTraceRaysIndirect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMotionBlur == rhs.rayTracingMotionBlur ) && + ( rayTracingMotionBlurPipelineTraceRaysIndirect == rhs.rayTracingMotionBlurPipelineTraceRaysIndirect ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPipelineFeaturesKHR.html + struct PhysicalDeviceRayTracingPipelineFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingPipeline{ rayTracingPipeline_ } + , rayTracingPipelineShaderGroupHandleCaptureReplay{ rayTracingPipelineShaderGroupHandleCaptureReplay_ } + , rayTracingPipelineShaderGroupHandleCaptureReplayMixed{ rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ } + , rayTracingPipelineTraceRaysIndirect{ rayTracingPipelineTraceRaysIndirect_ } + , rayTraversalPrimitiveCulling{ rayTraversalPrimitiveCulling_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelineFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipeline = rayTracingPipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipelineShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT + { + rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + rayTracingPipeline, + rayTracingPipelineShaderGroupHandleCaptureReplay, + rayTracingPipelineShaderGroupHandleCaptureReplayMixed, + rayTracingPipelineTraceRaysIndirect, + rayTraversalPrimitiveCulling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPipeline == rhs.rayTracingPipeline ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) && + ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect ) && + ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPipelinePropertiesKHR.html + struct PhysicalDeviceRayTracingPipelinePropertiesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRayRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint32_t shaderGroupHandleCaptureReplaySize_ = {}, + uint32_t maxRayDispatchInvocationCount_ = {}, + uint32_t shaderGroupHandleAlignment_ = {}, + uint32_t maxRayHitAttributeSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderGroupHandleSize{ shaderGroupHandleSize_ } + , maxRayRecursionDepth{ maxRayRecursionDepth_ } + , maxShaderGroupStride{ maxShaderGroupStride_ } + , shaderGroupBaseAlignment{ shaderGroupBaseAlignment_ } + , shaderGroupHandleCaptureReplaySize{ shaderGroupHandleCaptureReplaySize_ } + , maxRayDispatchInvocationCount{ maxRayDispatchInvocationCount_ } + , shaderGroupHandleAlignment{ shaderGroupHandleAlignment_ } + , maxRayHitAttributeSize{ maxRayHitAttributeSize_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPipelinePropertiesKHR( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelinePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderGroupHandleSize, + maxRayRecursionDepth, + maxShaderGroupStride, + shaderGroupBaseAlignment, + shaderGroupHandleCaptureReplaySize, + maxRayDispatchInvocationCount, + shaderGroupHandleAlignment, + maxRayHitAttributeSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && + ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) && + ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) && ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) && + ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRayRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint32_t shaderGroupHandleCaptureReplaySize = {}; + uint32_t maxRayDispatchInvocationCount = {}; + uint32_t shaderGroupHandleAlignment = {}; + uint32_t maxRayHitAttributeSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR.html + struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPositionFetchFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingPositionFetch{ rayTracingPositionFetch_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPositionFetchFeaturesKHR( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPositionFetchFeaturesKHR( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPositionFetchFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPositionFetchFeaturesKHR & operator=( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPositionFetchFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPositionFetchFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPositionFetchFeaturesKHR & + setRayTracingPositionFetch( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPositionFetch = rayTracingPositionFetch_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingPositionFetch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPositionFetch == rhs.rayTracingPositionFetch ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPositionFetchFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceRayTracingPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingPropertiesNV.html + struct PhysicalDeviceRayTracingPropertiesNV + { + using NativeType = VkPhysicalDeviceRayTracingPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxTriangleCount_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderGroupHandleSize{ shaderGroupHandleSize_ } + , maxRecursionDepth{ maxRecursionDepth_ } + , maxShaderGroupStride{ maxShaderGroupStride_ } + , shaderGroupBaseAlignment{ shaderGroupBaseAlignment_ } + , maxGeometryCount{ maxGeometryCount_ } + , maxInstanceCount{ maxInstanceCount_ } + , maxTriangleCount{ maxTriangleCount_ } + , maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderGroupHandleSize, + maxRecursionDepth, + maxShaderGroupStride, + shaderGroupBaseAlignment, + maxGeometryCount, + maxInstanceCount, + maxTriangleCount, + maxDescriptorSetAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && + ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( maxGeometryCount == rhs.maxGeometryCount ) && + ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxTriangleCount == rhs.maxTriangleCount ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxTriangleCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceRayTracingValidationFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingValidationFeaturesNV.html + struct PhysicalDeviceRayTracingValidationFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingValidationFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingValidationFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingValidation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingValidation{ rayTracingValidation_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingValidationFeaturesNV( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingValidationFeaturesNV( VkPhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingValidationFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingValidationFeaturesNV & operator=( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingValidationFeaturesNV & operator=( VkPhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingValidationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingValidationFeaturesNV & + setRayTracingValidation( VULKAN_HPP_NAMESPACE::Bool32 rayTracingValidation_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingValidation = rayTracingValidation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingValidationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingValidationFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingValidationFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingValidationFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingValidation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingValidationFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingValidation == rhs.rayTracingValidation ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingValidation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingValidationFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG.html + struct PhysicalDeviceRelaxedLineRasterizationFeaturesIMG + { + using NativeType = VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , relaxedLineRasterization{ relaxedLineRasterization_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & + operator=( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & operator=( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & + setRelaxedLineRasterization( VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization_ ) VULKAN_HPP_NOEXCEPT + { + relaxedLineRasterization = relaxedLineRasterization_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, relaxedLineRasterization ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & ) const = default; +#else + bool operator==( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( relaxedLineRasterization == rhs.relaxedLineRasterization ); +# endif + } + + bool operator!=( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + }; + + // wrapper struct for struct VkPhysicalDeviceRenderPassStripedFeaturesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRenderPassStripedFeaturesARM.html + struct PhysicalDeviceRenderPassStripedFeaturesARM + { + using NativeType = VkPhysicalDeviceRenderPassStripedFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , renderPassStriped{ renderPassStriped_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRenderPassStripedFeaturesARM( VkPhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRenderPassStripedFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRenderPassStripedFeaturesARM & operator=( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRenderPassStripedFeaturesARM & operator=( VkPhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRenderPassStripedFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRenderPassStripedFeaturesARM & + setRenderPassStriped( VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped_ ) VULKAN_HPP_NOEXCEPT + { + renderPassStriped = renderPassStriped_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRenderPassStripedFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRenderPassStripedFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRenderPassStripedFeaturesARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRenderPassStripedFeaturesARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPassStriped ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRenderPassStripedFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPassStriped == rhs.renderPassStriped ); +# endif + } + + bool operator!=( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRenderPassStripedFeaturesARM; + }; + + // wrapper struct for struct VkPhysicalDeviceRenderPassStripedPropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRenderPassStripedPropertiesARM.html + struct PhysicalDeviceRenderPassStripedPropertiesARM + { + using NativeType = VkPhysicalDeviceRenderPassStripedPropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM( VULKAN_HPP_NAMESPACE::Extent2D renderPassStripeGranularity_ = {}, + uint32_t maxRenderPassStripes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , renderPassStripeGranularity{ renderPassStripeGranularity_ } + , maxRenderPassStripes{ maxRenderPassStripes_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRenderPassStripedPropertiesARM( VkPhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRenderPassStripedPropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRenderPassStripedPropertiesARM & operator=( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRenderPassStripedPropertiesARM & operator=( VkPhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRenderPassStripedPropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRenderPassStripedPropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRenderPassStripedPropertiesARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRenderPassStripedPropertiesARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPassStripeGranularity, maxRenderPassStripes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRenderPassStripedPropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPassStripeGranularity == rhs.renderPassStripeGranularity ) && + ( maxRenderPassStripes == rhs.maxRenderPassStripes ); +# endif + } + + bool operator!=( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D renderPassStripeGranularity = {}; + uint32_t maxRenderPassStripes = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRenderPassStripedPropertiesARM; + }; + + // wrapper struct for struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV.html + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV + { + using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , representativeFragmentTest{ representativeFragmentTest_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTest = representativeFragmentTest_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, representativeFragmentTest ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTest == rhs.representativeFragmentTest ); +# endif + } + + bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceRobustness2FeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRobustness2FeaturesEXT.html + struct PhysicalDeviceRobustness2FeaturesEXT + { + using NativeType = VkPhysicalDeviceRobustness2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustBufferAccess2{ robustBufferAccess2_ } + , robustImageAccess2{ robustImageAccess2_ } + , nullDescriptor{ nullDescriptor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & + setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess2 = robustBufferAccess2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess2 = robustImageAccess2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + nullDescriptor = nullDescriptor_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2FeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) && + ( robustImageAccess2 == rhs.robustImageAccess2 ) && ( nullDescriptor == rhs.nullDescriptor ); +# endif + } + + bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2FeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceRobustness2PropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRobustness2PropertiesEXT.html + struct PhysicalDeviceRobustness2PropertiesEXT + { + using NativeType = VkPhysicalDeviceRobustness2PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustStorageBufferAccessSizeAlignment{ robustStorageBufferAccessSizeAlignment_ } + , robustUniformBufferAccessSizeAlignment{ robustUniformBufferAccessSizeAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2PropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) && + ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2PropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceSampleLocationsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSampleLocationsPropertiesEXT.html + struct PhysicalDeviceSampleLocationsPropertiesEXT + { + using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, + std::array const & sampleLocationCoordinateRange_ = {}, + uint32_t sampleLocationSubPixelBits_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampleLocationSampleCounts{ sampleLocationSampleCounts_ } + , maxSampleLocationGridSize{ maxSampleLocationGridSize_ } + , sampleLocationCoordinateRange{ sampleLocationCoordinateRange_ } + , sampleLocationSubPixelBits{ sampleLocationSubPixelBits_ } + , variableSampleLocations{ variableSampleLocations_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + sampleLocationSampleCounts, + maxSampleLocationGridSize, + sampleLocationCoordinateRange, + sampleLocationSubPixelBits, + variableSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) && + ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && ( variableSampleLocations == rhs.variableSampleLocations ); +# endif + } + + bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; + uint32_t sampleLocationSubPixelBits = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSampleLocationsPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceSamplerFilterMinmaxProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSamplerFilterMinmaxProperties.html + struct PhysicalDeviceSamplerFilterMinmaxProperties + { + using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , filterMinmaxSingleComponentFormats{ filterMinmaxSingleComponentFormats_ } + , filterMinmaxImageComponentMapping{ filterMinmaxImageComponentMapping_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); +# endif + } + + bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerFilterMinmaxProperties; + }; + + using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + + // wrapper struct for struct VkPhysicalDeviceSamplerYcbcrConversionFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSamplerYcbcrConversionFeatures.html + struct PhysicalDeviceSamplerYcbcrConversionFeatures + { + using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , samplerYcbcrConversion{ samplerYcbcrConversion_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, samplerYcbcrConversion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); +# endif + } + + bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerYcbcrConversionFeatures; + }; + + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + + // wrapper struct for struct VkPhysicalDeviceScalarBlockLayoutFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceScalarBlockLayoutFeatures.html + struct PhysicalDeviceScalarBlockLayoutFeatures + { + using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , scalarBlockLayout{ scalarBlockLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceScalarBlockLayoutFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceScalarBlockLayoutFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, scalarBlockLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout ); +# endif + } + + bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceScalarBlockLayoutFeatures; + }; + + using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; + + // wrapper struct for struct VkPhysicalDeviceSchedulingControlsFeaturesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSchedulingControlsFeaturesARM.html + struct PhysicalDeviceSchedulingControlsFeaturesARM + { + using NativeType = VkPhysicalDeviceSchedulingControlsFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , schedulingControls{ schedulingControls_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSchedulingControlsFeaturesARM( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSchedulingControlsFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSchedulingControlsFeaturesARM & operator=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSchedulingControlsFeaturesARM & operator=( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & + setSchedulingControls( VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ ) VULKAN_HPP_NOEXCEPT + { + schedulingControls = schedulingControls_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSchedulingControlsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSchedulingControlsFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSchedulingControlsFeaturesARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSchedulingControlsFeaturesARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, schedulingControls ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSchedulingControlsFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( schedulingControls == rhs.schedulingControls ); +# endif + } + + bool operator!=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 schedulingControls = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSchedulingControlsFeaturesARM; + }; + + // wrapper struct for struct VkPhysicalDeviceSchedulingControlsPropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSchedulingControlsPropertiesARM.html + struct PhysicalDeviceSchedulingControlsPropertiesARM + { + using NativeType = VkPhysicalDeviceSchedulingControlsPropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSchedulingControlsPropertiesARM( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , schedulingControlsFlags{ schedulingControlsFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSchedulingControlsPropertiesARM( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSchedulingControlsPropertiesARM( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSchedulingControlsPropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSchedulingControlsPropertiesARM & operator=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSchedulingControlsPropertiesARM & operator=( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSchedulingControlsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSchedulingControlsPropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSchedulingControlsPropertiesARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSchedulingControlsPropertiesARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, schedulingControlsFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSchedulingControlsPropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( schedulingControlsFlags == rhs.schedulingControlsFlags ); +# endif + } + + bool operator!=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSchedulingControlsPropertiesARM; + }; + + // wrapper struct for struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures.html + struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures + { + using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , separateDepthStencilLayouts{ separateDepthStencilLayouts_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + { + separateDepthStencilLayouts = separateDepthStencilLayouts_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, separateDepthStencilLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ); +# endif + } + + bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + }; + + using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + // wrapper struct for struct VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV.html + struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderFloat16VectorAtomics{ shaderFloat16VectorAtomics_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & + operator=( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & operator=( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & + setShaderFloat16VectorAtomics( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16VectorAtomics = shaderFloat16VectorAtomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderFloat16VectorAtomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16VectorAtomics == rhs.shaderFloat16VectorAtomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT.html + struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderBufferFloat16Atomics{ shaderBufferFloat16Atomics_ } + , shaderBufferFloat16AtomicAdd{ shaderBufferFloat16AtomicAdd_ } + , shaderBufferFloat16AtomicMinMax{ shaderBufferFloat16AtomicMinMax_ } + , shaderBufferFloat32AtomicMinMax{ shaderBufferFloat32AtomicMinMax_ } + , shaderBufferFloat64AtomicMinMax{ shaderBufferFloat64AtomicMinMax_ } + , shaderSharedFloat16Atomics{ shaderSharedFloat16Atomics_ } + , shaderSharedFloat16AtomicAdd{ shaderSharedFloat16AtomicAdd_ } + , shaderSharedFloat16AtomicMinMax{ shaderSharedFloat16AtomicMinMax_ } + , shaderSharedFloat32AtomicMinMax{ shaderSharedFloat32AtomicMinMax_ } + , shaderSharedFloat64AtomicMinMax{ shaderSharedFloat64AtomicMinMax_ } + , shaderImageFloat32AtomicMinMax{ shaderImageFloat32AtomicMinMax_ } + , sparseImageFloat32AtomicMinMax{ sparseImageFloat32AtomicMinMax_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloat2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setSparseImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderBufferFloat16Atomics, + shaderBufferFloat16AtomicAdd, + shaderBufferFloat16AtomicMinMax, + shaderBufferFloat32AtomicMinMax, + shaderBufferFloat64AtomicMinMax, + shaderSharedFloat16Atomics, + shaderSharedFloat16AtomicAdd, + shaderSharedFloat16AtomicMinMax, + shaderSharedFloat32AtomicMinMax, + shaderSharedFloat64AtomicMinMax, + shaderImageFloat32AtomicMinMax, + sparseImageFloat32AtomicMinMax ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics ) && + ( shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd ) && + ( shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax ) && + ( shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax ) && + ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax ) && ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics ) && + ( shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd ) && + ( shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax ) && + ( shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax ) && + ( shaderSharedFloat64AtomicMinMax == rhs.shaderSharedFloat64AtomicMinMax ) && + ( shaderImageFloat32AtomicMinMax == rhs.shaderImageFloat32AtomicMinMax ) && + ( sparseImageFloat32AtomicMinMax == rhs.sparseImageFloat32AtomicMinMax ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicFloatFeaturesEXT.html + struct PhysicalDeviceShaderAtomicFloatFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderBufferFloat32Atomics{ shaderBufferFloat32Atomics_ } + , shaderBufferFloat32AtomicAdd{ shaderBufferFloat32AtomicAdd_ } + , shaderBufferFloat64Atomics{ shaderBufferFloat64Atomics_ } + , shaderBufferFloat64AtomicAdd{ shaderBufferFloat64AtomicAdd_ } + , shaderSharedFloat32Atomics{ shaderSharedFloat32Atomics_ } + , shaderSharedFloat32AtomicAdd{ shaderSharedFloat32AtomicAdd_ } + , shaderSharedFloat64Atomics{ shaderSharedFloat64Atomics_ } + , shaderSharedFloat64AtomicAdd{ shaderSharedFloat64AtomicAdd_ } + , shaderImageFloat32Atomics{ shaderImageFloat32Atomics_ } + , shaderImageFloat32AtomicAdd{ shaderImageFloat32AtomicAdd_ } + , sparseImageFloat32Atomics{ sparseImageFloat32Atomics_ } + , sparseImageFloat32AtomicAdd{ sparseImageFloat32AtomicAdd_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32Atomics = shaderImageFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32Atomics = sparseImageFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderBufferFloat32Atomics, + shaderBufferFloat32AtomicAdd, + shaderBufferFloat64Atomics, + shaderBufferFloat64AtomicAdd, + shaderSharedFloat32Atomics, + shaderSharedFloat32AtomicAdd, + shaderSharedFloat64Atomics, + shaderSharedFloat64AtomicAdd, + shaderImageFloat32Atomics, + shaderImageFloat32AtomicAdd, + sparseImageFloat32Atomics, + sparseImageFloat32AtomicAdd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) && + ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) && + ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) && + ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) && + ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) && + ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) && + ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderAtomicInt64Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderAtomicInt64Features.html + struct PhysicalDeviceShaderAtomicInt64Features + { + using NativeType = VkPhysicalDeviceShaderAtomicInt64Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ } + , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicInt64Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicInt64Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicInt64Features; + }; + + using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; + + // wrapper struct for struct VkPhysicalDeviceShaderBfloat16FeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderBfloat16FeaturesKHR.html + struct PhysicalDeviceShaderBfloat16FeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderBfloat16FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderBfloat16FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderBfloat16FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16Type_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16DotProduct_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16CooperativeMatrix_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderBFloat16Type{ shaderBFloat16Type_ } + , shaderBFloat16DotProduct{ shaderBFloat16DotProduct_ } + , shaderBFloat16CooperativeMatrix{ shaderBFloat16CooperativeMatrix_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderBfloat16FeaturesKHR( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderBfloat16FeaturesKHR( VkPhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderBfloat16FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderBfloat16FeaturesKHR & operator=( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderBfloat16FeaturesKHR & operator=( VkPhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & + setShaderBFloat16Type( VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16Type_ ) VULKAN_HPP_NOEXCEPT + { + shaderBFloat16Type = shaderBFloat16Type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & + setShaderBFloat16DotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16DotProduct_ ) VULKAN_HPP_NOEXCEPT + { + shaderBFloat16DotProduct = shaderBFloat16DotProduct_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderBfloat16FeaturesKHR & + setShaderBFloat16CooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16CooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + shaderBFloat16CooperativeMatrix = shaderBFloat16CooperativeMatrix_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderBfloat16FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderBfloat16FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderBfloat16FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderBfloat16FeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderBFloat16Type, shaderBFloat16DotProduct, shaderBFloat16CooperativeMatrix ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderBfloat16FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBFloat16Type == rhs.shaderBFloat16Type ) && + ( shaderBFloat16DotProduct == rhs.shaderBFloat16DotProduct ) && ( shaderBFloat16CooperativeMatrix == rhs.shaderBFloat16CooperativeMatrix ); +# endif + } + + bool operator!=( PhysicalDeviceShaderBfloat16FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderBfloat16FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16Type = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16DotProduct = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBFloat16CooperativeMatrix = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderBfloat16FeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderClockFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderClockFeaturesKHR.html + struct PhysicalDeviceShaderClockFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSubgroupClock{ shaderSubgroupClock_ } + , shaderDeviceClock{ shaderDeviceClock_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & + setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupClock = shaderSubgroupClock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderDeviceClock = shaderDeviceClock_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderClockFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderClockFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupClock, shaderDeviceClock ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) && + ( shaderDeviceClock == rhs.shaderDeviceClock ); +# endif + } + + bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderClockFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM.html + struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM + { + using NativeType = VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCoreBuiltins{ shaderCoreBuiltins_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreBuiltinsFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & + setShaderCoreBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ ) VULKAN_HPP_NOEXCEPT + { + shaderCoreBuiltins = shaderCoreBuiltins_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreBuiltins ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreBuiltins == rhs.shaderCoreBuiltins ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreBuiltinsFeaturesARM; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM.html + struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM + { + using NativeType = VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsPropertiesARM( uint64_t shaderCoreMask_ = {}, + uint32_t shaderCoreCount_ = {}, + uint32_t shaderWarpsPerCore_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCoreMask{ shaderCoreMask_ } + , shaderCoreCount{ shaderCoreCount_ } + , shaderWarpsPerCore{ shaderWarpsPerCore_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderCoreBuiltinsPropertiesARM( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreBuiltinsPropertiesARM( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreBuiltinsPropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreMask, shaderCoreCount, shaderWarpsPerCore ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreMask == rhs.shaderCoreMask ) && ( shaderCoreCount == rhs.shaderCoreCount ) && + ( shaderWarpsPerCore == rhs.shaderWarpsPerCore ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM; + void * pNext = {}; + uint64_t shaderCoreMask = {}; + uint32_t shaderCoreCount = {}; + uint32_t shaderWarpsPerCore = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreBuiltinsPropertiesARM; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderCoreProperties2AMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCoreProperties2AMD.html + struct PhysicalDeviceShaderCoreProperties2AMD + { + using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, + uint32_t activeComputeUnitCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCoreFeatures{ shaderCoreFeatures_ } + , activeComputeUnitCount{ activeComputeUnitCount_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreFeatures, activeComputeUnitCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) && + ( activeComputeUnitCount == rhs.activeComputeUnitCount ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; + uint32_t activeComputeUnitCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreProperties2AMD; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderCorePropertiesAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCorePropertiesAMD.html + struct PhysicalDeviceShaderCorePropertiesAMD + { + using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, + uint32_t shaderArraysPerEngineCount_ = {}, + uint32_t computeUnitsPerShaderArray_ = {}, + uint32_t simdPerComputeUnit_ = {}, + uint32_t wavefrontsPerSimd_ = {}, + uint32_t wavefrontSize_ = {}, + uint32_t sgprsPerSimd_ = {}, + uint32_t minSgprAllocation_ = {}, + uint32_t maxSgprAllocation_ = {}, + uint32_t sgprAllocationGranularity_ = {}, + uint32_t vgprsPerSimd_ = {}, + uint32_t minVgprAllocation_ = {}, + uint32_t maxVgprAllocation_ = {}, + uint32_t vgprAllocationGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderEngineCount{ shaderEngineCount_ } + , shaderArraysPerEngineCount{ shaderArraysPerEngineCount_ } + , computeUnitsPerShaderArray{ computeUnitsPerShaderArray_ } + , simdPerComputeUnit{ simdPerComputeUnit_ } + , wavefrontsPerSimd{ wavefrontsPerSimd_ } + , wavefrontSize{ wavefrontSize_ } + , sgprsPerSimd{ sgprsPerSimd_ } + , minSgprAllocation{ minSgprAllocation_ } + , maxSgprAllocation{ maxSgprAllocation_ } + , sgprAllocationGranularity{ sgprAllocationGranularity_ } + , vgprsPerSimd{ vgprsPerSimd_ } + , minVgprAllocation{ minVgprAllocation_ } + , maxVgprAllocation{ maxVgprAllocation_ } + , vgprAllocationGranularity{ vgprAllocationGranularity_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderEngineCount, + shaderArraysPerEngineCount, + computeUnitsPerShaderArray, + simdPerComputeUnit, + wavefrontsPerSimd, + wavefrontSize, + sgprsPerSimd, + minSgprAllocation, + maxSgprAllocation, + sgprAllocationGranularity, + vgprsPerSimd, + minVgprAllocation, + maxVgprAllocation, + vgprAllocationGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) && + ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) && + ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) && ( wavefrontSize == rhs.wavefrontSize ) && + ( sgprsPerSimd == rhs.sgprsPerSimd ) && ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) && + ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) && + ( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) && + ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + void * pNext = {}; + uint32_t shaderEngineCount = {}; + uint32_t shaderArraysPerEngineCount = {}; + uint32_t computeUnitsPerShaderArray = {}; + uint32_t simdPerComputeUnit = {}; + uint32_t wavefrontsPerSimd = {}; + uint32_t wavefrontSize = {}; + uint32_t sgprsPerSimd = {}; + uint32_t minSgprAllocation = {}; + uint32_t maxSgprAllocation = {}; + uint32_t sgprAllocationGranularity = {}; + uint32_t vgprsPerSimd = {}; + uint32_t minVgprAllocation = {}; + uint32_t maxVgprAllocation = {}; + uint32_t vgprAllocationGranularity = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCorePropertiesAMD; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderCorePropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderCorePropertiesARM.html + struct PhysicalDeviceShaderCorePropertiesARM + { + using NativeType = VkPhysicalDeviceShaderCorePropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesARM( uint32_t pixelRate_ = {}, + uint32_t texelRate_ = {}, + uint32_t fmaRate_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pixelRate{ pixelRate_ } + , texelRate{ texelRate_ } + , fmaRate{ fmaRate_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesARM( PhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesARM( VkPhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCorePropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCorePropertiesARM & operator=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderCorePropertiesARM & operator=( VkPhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCorePropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pixelRate, texelRate, fmaRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCorePropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pixelRate == rhs.pixelRate ) && ( texelRate == rhs.texelRate ) && ( fmaRate == rhs.fmaRate ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesARM; + void * pNext = {}; + uint32_t pixelRate = {}; + uint32_t texelRate = {}; + uint32_t fmaRate = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCorePropertiesARM; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.html + struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures + { + using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderDemoteToHelperInvocationFeatures( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures & + operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & + setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderDemoteToHelperInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + }; + + using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + + // wrapper struct for struct VkPhysicalDeviceShaderDrawParametersFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderDrawParametersFeatures.html + struct PhysicalDeviceShaderDrawParametersFeatures + { + using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderDrawParameters{ shaderDrawParameters_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderDrawParameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters ); +# endif + } + + bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDrawParametersFeatures; + }; + + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + + // wrapper struct for struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.html + struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD + { + using NativeType = VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderEarlyAndLateFragmentTests{ shaderEarlyAndLateFragmentTests_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & + operator=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & + operator=( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & + setShaderEarlyAndLateFragmentTests( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ ) VULKAN_HPP_NOEXCEPT + { + shaderEarlyAndLateFragmentTests = shaderEarlyAndLateFragmentTests_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderEarlyAndLateFragmentTests ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEarlyAndLateFragmentTests == rhs.shaderEarlyAndLateFragmentTests ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderEnqueueFeaturesAMDX.html + struct PhysicalDeviceShaderEnqueueFeaturesAMDX + { + using NativeType = VkPhysicalDeviceShaderEnqueueFeaturesAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderEnqueue{ shaderEnqueue_ } + , shaderMeshEnqueue{ shaderMeshEnqueue_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEnqueueFeaturesAMDX( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEnqueueFeaturesAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setShaderEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ ) VULKAN_HPP_NOEXCEPT + { + shaderEnqueue = shaderEnqueue_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & + setShaderMeshEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue_ ) VULKAN_HPP_NOEXCEPT + { + shaderMeshEnqueue = shaderMeshEnqueue_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderEnqueue, shaderMeshEnqueue ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEnqueueFeaturesAMDX const & ) const = default; +# else + bool operator==( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEnqueue == rhs.shaderEnqueue ) && ( shaderMeshEnqueue == rhs.shaderMeshEnqueue ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEnqueueFeaturesAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderEnqueuePropertiesAMDX.html + struct PhysicalDeviceShaderEnqueuePropertiesAMDX + { + using NativeType = VkPhysicalDeviceShaderEnqueuePropertiesAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( uint32_t maxExecutionGraphDepth_ = {}, + uint32_t maxExecutionGraphShaderOutputNodes_ = {}, + uint32_t maxExecutionGraphShaderPayloadSize_ = {}, + uint32_t maxExecutionGraphShaderPayloadCount_ = {}, + uint32_t executionGraphDispatchAddressAlignment_ = {}, + std::array const & maxExecutionGraphWorkgroupCount_ = {}, + uint32_t maxExecutionGraphWorkgroups_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxExecutionGraphDepth{ maxExecutionGraphDepth_ } + , maxExecutionGraphShaderOutputNodes{ maxExecutionGraphShaderOutputNodes_ } + , maxExecutionGraphShaderPayloadSize{ maxExecutionGraphShaderPayloadSize_ } + , maxExecutionGraphShaderPayloadCount{ maxExecutionGraphShaderPayloadCount_ } + , executionGraphDispatchAddressAlignment{ executionGraphDispatchAddressAlignment_ } + , maxExecutionGraphWorkgroupCount{ maxExecutionGraphWorkgroupCount_ } + , maxExecutionGraphWorkgroups{ maxExecutionGraphWorkgroups_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEnqueuePropertiesAMDX( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEnqueuePropertiesAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxExecutionGraphDepth, + maxExecutionGraphShaderOutputNodes, + maxExecutionGraphShaderPayloadSize, + maxExecutionGraphShaderPayloadCount, + executionGraphDispatchAddressAlignment, + maxExecutionGraphWorkgroupCount, + maxExecutionGraphWorkgroups ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEnqueuePropertiesAMDX const & ) const = default; +# else + bool operator==( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxExecutionGraphDepth == rhs.maxExecutionGraphDepth ) && + ( maxExecutionGraphShaderOutputNodes == rhs.maxExecutionGraphShaderOutputNodes ) && + ( maxExecutionGraphShaderPayloadSize == rhs.maxExecutionGraphShaderPayloadSize ) && + ( maxExecutionGraphShaderPayloadCount == rhs.maxExecutionGraphShaderPayloadCount ) && + ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment ) && + ( maxExecutionGraphWorkgroupCount == rhs.maxExecutionGraphWorkgroupCount ) && ( maxExecutionGraphWorkgroups == rhs.maxExecutionGraphWorkgroups ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; + void * pNext = {}; + uint32_t maxExecutionGraphDepth = {}; + uint32_t maxExecutionGraphShaderOutputNodes = {}; + uint32_t maxExecutionGraphShaderPayloadSize = {}; + uint32_t maxExecutionGraphShaderPayloadCount = {}; + uint32_t executionGraphDispatchAddressAlignment = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxExecutionGraphWorkgroupCount = {}; + uint32_t maxExecutionGraphWorkgroups = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEnqueuePropertiesAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkPhysicalDeviceShaderExpectAssumeFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderExpectAssumeFeatures.html + struct PhysicalDeviceShaderExpectAssumeFeatures + { + using NativeType = VkPhysicalDeviceShaderExpectAssumeFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderExpectAssumeFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderExpectAssume{ shaderExpectAssume_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeatures( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderExpectAssumeFeatures( VkPhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderExpectAssumeFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderExpectAssumeFeatures & operator=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderExpectAssumeFeatures & operator=( VkPhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeatures & + setShaderExpectAssume( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT + { + shaderExpectAssume = shaderExpectAssume_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderExpectAssumeFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderExpectAssumeFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderExpectAssumeFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderExpectAssumeFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderExpectAssume ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderExpectAssumeFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderExpectAssume == rhs.shaderExpectAssume ); +# endif + } + + bool operator!=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderExpectAssumeFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderExpectAssumeFeatures; + }; + + using PhysicalDeviceShaderExpectAssumeFeaturesKHR = PhysicalDeviceShaderExpectAssumeFeatures; + + // wrapper struct for struct VkPhysicalDeviceShaderFloat16Int8Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderFloat16Int8Features.html + struct PhysicalDeviceShaderFloat16Int8Features + { + using NativeType = VkPhysicalDeviceShaderFloat16Int8Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderFloat16{ shaderFloat16_ } + , shaderInt8{ shaderInt8_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloat16Int8Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloat16Int8Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderFloat16, shaderInt8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ); +# endif + } + + bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderFloat16Int8Features; + }; + + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + + // wrapper struct for struct VkPhysicalDeviceShaderFloatControls2Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderFloatControls2Features.html + struct PhysicalDeviceShaderFloatControls2Features + { + using NativeType = VkPhysicalDeviceShaderFloatControls2Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloatControls2Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderFloatControls2{ shaderFloatControls2_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2Features( PhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloatControls2Features( VkPhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderFloatControls2Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderFloatControls2Features & operator=( PhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderFloatControls2Features & operator=( VkPhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2Features & + setShaderFloatControls2( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloatControls2 = shaderFloatControls2_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderFloatControls2Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloatControls2Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloatControls2Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloatControls2Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderFloatControls2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderFloatControls2Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloatControls2 == rhs.shaderFloatControls2 ); +# endif + } + + bool operator!=( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloatControls2Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderFloatControls2Features; + }; + + using PhysicalDeviceShaderFloatControls2FeaturesKHR = PhysicalDeviceShaderFloatControls2Features; + + // wrapper struct for struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT.html + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderImageInt64Atomics{ shaderImageInt64Atomics_ } + , sparseImageInt64Atomics{ sparseImageInt64Atomics_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageInt64Atomics = shaderImageInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageInt64Atomics = sparseImageInt64Atomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) && + ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderImageFootprintFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderImageFootprintFeaturesNV.html + struct PhysicalDeviceShaderImageFootprintFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageFootprint{ imageFootprint_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT + { + imageFootprint = imageFootprint_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageFootprint ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint ); +# endif + } + + bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageFootprintFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderIntegerDotProductFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderIntegerDotProductFeatures.html + struct PhysicalDeviceShaderIntegerDotProductFeatures + { + using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderIntegerDotProduct{ shaderIntegerDotProduct_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderIntegerDotProductFeatures( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerDotProductFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderIntegerDotProductFeatures & operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerDotProductFeatures & operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & + setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerDotProduct = shaderIntegerDotProduct_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerDotProductFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerDotProductFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderIntegerDotProduct ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerDotProductFeatures; + }; + + using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures; + + // wrapper struct for struct VkPhysicalDeviceShaderIntegerDotProductProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderIntegerDotProductProperties.html + struct PhysicalDeviceShaderIntegerDotProductProperties + { + using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , integerDotProduct8BitUnsignedAccelerated{ integerDotProduct8BitUnsignedAccelerated_ } + , integerDotProduct8BitSignedAccelerated{ integerDotProduct8BitSignedAccelerated_ } + , integerDotProduct8BitMixedSignednessAccelerated{ integerDotProduct8BitMixedSignednessAccelerated_ } + , integerDotProduct4x8BitPackedUnsignedAccelerated{ integerDotProduct4x8BitPackedUnsignedAccelerated_ } + , integerDotProduct4x8BitPackedSignedAccelerated{ integerDotProduct4x8BitPackedSignedAccelerated_ } + , integerDotProduct4x8BitPackedMixedSignednessAccelerated{ integerDotProduct4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProduct16BitUnsignedAccelerated{ integerDotProduct16BitUnsignedAccelerated_ } + , integerDotProduct16BitSignedAccelerated{ integerDotProduct16BitSignedAccelerated_ } + , integerDotProduct16BitMixedSignednessAccelerated{ integerDotProduct16BitMixedSignednessAccelerated_ } + , integerDotProduct32BitUnsignedAccelerated{ integerDotProduct32BitUnsignedAccelerated_ } + , integerDotProduct32BitSignedAccelerated{ integerDotProduct32BitSignedAccelerated_ } + , integerDotProduct32BitMixedSignednessAccelerated{ integerDotProduct32BitMixedSignednessAccelerated_ } + , integerDotProduct64BitUnsignedAccelerated{ integerDotProduct64BitUnsignedAccelerated_ } + , integerDotProduct64BitSignedAccelerated{ integerDotProduct64BitSignedAccelerated_ } + , integerDotProduct64BitMixedSignednessAccelerated{ integerDotProduct64BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitSignedAccelerated{ integerDotProductAccumulatingSaturating8BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitSignedAccelerated{ integerDotProductAccumulatingSaturating16BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitSignedAccelerated{ integerDotProductAccumulatingSaturating32BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitSignedAccelerated{ integerDotProductAccumulatingSaturating64BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderIntegerDotProductProperties( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerDotProductProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderIntegerDotProductProperties & operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerDotProductProperties & operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerDotProductProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerDotProductProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + integerDotProduct8BitUnsignedAccelerated, + integerDotProduct8BitSignedAccelerated, + integerDotProduct8BitMixedSignednessAccelerated, + integerDotProduct4x8BitPackedUnsignedAccelerated, + integerDotProduct4x8BitPackedSignedAccelerated, + integerDotProduct4x8BitPackedMixedSignednessAccelerated, + integerDotProduct16BitUnsignedAccelerated, + integerDotProduct16BitSignedAccelerated, + integerDotProduct16BitMixedSignednessAccelerated, + integerDotProduct32BitUnsignedAccelerated, + integerDotProduct32BitSignedAccelerated, + integerDotProduct32BitMixedSignednessAccelerated, + integerDotProduct64BitUnsignedAccelerated, + integerDotProduct64BitSignedAccelerated, + integerDotProduct64BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating8BitSignedAccelerated, + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating16BitSignedAccelerated, + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating32BitSignedAccelerated, + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating64BitSignedAccelerated, + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && + ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) && + ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) && + ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) && + ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) && + ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) && + ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) && + ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) && + ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) && + ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) && + ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) && + ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) && + ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) && + ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerDotProductProperties; + }; + + using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties; + + // wrapper struct for struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL.html + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL + { + using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderIntegerFunctions2{ shaderIntegerFunctions2_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerFunctions2 = shaderIntegerFunctions2_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderIntegerFunctions2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR.html + struct PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderMaximalReconvergence_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderMaximalReconvergence{ shaderMaximalReconvergence_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & + operator=( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & operator=( VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & + setShaderMaximalReconvergence( VULKAN_HPP_NAMESPACE::Bool32 shaderMaximalReconvergence_ ) VULKAN_HPP_NOEXCEPT + { + shaderMaximalReconvergence = shaderMaximalReconvergence_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderMaximalReconvergence ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderMaximalReconvergence == rhs.shaderMaximalReconvergence ); +# endif + } + + bool operator!=( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderMaximalReconvergence = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT.html + struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderModuleIdentifier{ shaderModuleIdentifier_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderModuleIdentifierFeaturesEXT( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderModuleIdentifierFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & + setShaderModuleIdentifier( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ ) VULKAN_HPP_NOEXCEPT + { + shaderModuleIdentifier = shaderModuleIdentifier_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderModuleIdentifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifier == rhs.shaderModuleIdentifier ); +# endif + } + + bool operator!=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderModuleIdentifierFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT.html + struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT + { + using NativeType = VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceShaderModuleIdentifierPropertiesEXT( std::array const & shaderModuleIdentifierAlgorithmUUID_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderModuleIdentifierAlgorithmUUID{ shaderModuleIdentifierAlgorithmUUID_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceShaderModuleIdentifierPropertiesEXT( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderModuleIdentifierPropertiesEXT( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderModuleIdentifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderModuleIdentifierPropertiesEXT & + operator=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderModuleIdentifierPropertiesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderModuleIdentifierAlgorithmUUID ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifierAlgorithmUUID == rhs.shaderModuleIdentifierAlgorithmUUID ); +# endif + } + + bool operator!=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D shaderModuleIdentifierAlgorithmUUID = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderObjectFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderObjectFeaturesEXT.html + struct PhysicalDeviceShaderObjectFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderObjectFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderObject{ shaderObject_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderObjectFeaturesEXT( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderObjectFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderObjectFeaturesEXT & operator=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderObjectFeaturesEXT & operator=( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setShaderObject( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ ) VULKAN_HPP_NOEXCEPT + { + shaderObject = shaderObject_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderObjectFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderObject ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderObjectFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderObject == rhs.shaderObject ); +# endif + } + + bool operator!=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderObject = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderObjectFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderObjectPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderObjectPropertiesEXT.html + struct PhysicalDeviceShaderObjectPropertiesEXT + { + using NativeType = VkPhysicalDeviceShaderObjectPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( std::array const & shaderBinaryUUID_ = {}, + uint32_t shaderBinaryVersion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderBinaryUUID{ shaderBinaryUUID_ } + , shaderBinaryVersion{ shaderBinaryVersion_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderObjectPropertiesEXT( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderObjectPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderObjectPropertiesEXT & operator=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderObjectPropertiesEXT & operator=( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderObjectPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple const &, uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderBinaryUUID, shaderBinaryVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderObjectPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBinaryUUID == rhs.shaderBinaryUUID ) && + ( shaderBinaryVersion == rhs.shaderBinaryVersion ); +# endif + } + + bool operator!=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D shaderBinaryUUID = {}; + uint32_t shaderBinaryVersion = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderObjectPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderQuadControlFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderQuadControlFeaturesKHR.html + struct PhysicalDeviceShaderQuadControlFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderQuadControlFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderQuadControlFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderQuadControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderQuadControl{ shaderQuadControl_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderQuadControlFeaturesKHR( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderQuadControlFeaturesKHR( VkPhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderQuadControlFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderQuadControlFeaturesKHR & operator=( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderQuadControlFeaturesKHR & operator=( VkPhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderQuadControlFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderQuadControlFeaturesKHR & + setShaderQuadControl( VULKAN_HPP_NAMESPACE::Bool32 shaderQuadControl_ ) VULKAN_HPP_NOEXCEPT + { + shaderQuadControl = shaderQuadControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderQuadControlFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderQuadControlFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderQuadControlFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderQuadControlFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderQuadControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderQuadControlFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderQuadControl == rhs.shaderQuadControl ); +# endif + } + + bool operator!=( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderQuadControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderQuadControlFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.html + struct PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderRelaxedExtendedInstruction{ shaderRelaxedExtendedInstruction_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & + operator=( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & + operator=( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & + setShaderRelaxedExtendedInstruction( VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction_ ) VULKAN_HPP_NOEXCEPT + { + shaderRelaxedExtendedInstruction = shaderRelaxedExtendedInstruction_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderRelaxedExtendedInstruction ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderRelaxedExtendedInstruction == rhs.shaderRelaxedExtendedInstruction ); +# endif + } + + bool operator!=( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT.html + struct PhysicalDeviceShaderReplicatedCompositesFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderReplicatedComposites{ shaderReplicatedComposites_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & + operator=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & operator=( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & + setShaderReplicatedComposites( VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites_ ) VULKAN_HPP_NOEXCEPT + { + shaderReplicatedComposites = shaderReplicatedComposites_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderReplicatedComposites ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderReplicatedComposites == rhs.shaderReplicatedComposites ); +# endif + } + + bool operator!=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSMBuiltinsFeaturesNV.html + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSMBuiltins{ shaderSMBuiltins_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT + { + shaderSMBuiltins = shaderSMBuiltins_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSMBuiltins ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSMBuiltinsPropertiesNV.html + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV + { + using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSMCount{ shaderSMCount_ } + , shaderWarpsPerSM{ shaderWarpsPerSM_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSMCount, shaderWarpsPerSM ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + void * pNext = {}; + uint32_t shaderSMCount = {}; + uint32_t shaderWarpsPerSM = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures.html + struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures + { + using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSubgroupExtendedTypes{ shaderSubgroupExtendedTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupExtendedTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + }; + + using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + + // wrapper struct for struct VkPhysicalDeviceShaderSubgroupRotateFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupRotateFeatures.html + struct PhysicalDeviceShaderSubgroupRotateFeatures + { + using NativeType = VkPhysicalDeviceShaderSubgroupRotateFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSubgroupRotate{ shaderSubgroupRotate_ } + , shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeatures( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupRotateFeatures( VkPhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupRotateFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSubgroupRotateFeatures & operator=( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderSubgroupRotateFeatures & operator=( VkPhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & + setShaderSubgroupRotate( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupRotate = shaderSubgroupRotate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & + setShaderSubgroupRotateClustered( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderSubgroupRotateFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupRotateFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupRotateFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupRotateFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupRotate, shaderSubgroupRotateClustered ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupRotateFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupRotate == rhs.shaderSubgroupRotate ) && + ( shaderSubgroupRotateClustered == rhs.shaderSubgroupRotateClustered ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupRotateFeatures; + }; + + using PhysicalDeviceShaderSubgroupRotateFeaturesKHR = PhysicalDeviceShaderSubgroupRotateFeatures; + + // wrapper struct for struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.html + struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSubgroupUniformControlFlow{ shaderSubgroupUniformControlFlow_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + setShaderSubgroupUniformControlFlow( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupUniformControlFlow ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderTerminateInvocationFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderTerminateInvocationFeatures.html + struct PhysicalDeviceShaderTerminateInvocationFeatures + { + using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderTerminateInvocation{ shaderTerminateInvocation_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderTerminateInvocationFeatures( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTerminateInvocationFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderTerminateInvocationFeatures & operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderTerminateInvocationFeatures & operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & + setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderTerminateInvocation = shaderTerminateInvocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTerminateInvocationFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTerminateInvocationFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderTerminateInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTerminateInvocationFeatures; + }; + + using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures; + + // wrapper struct for struct VkPhysicalDeviceShaderTileImageFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderTileImageFeaturesEXT.html + struct PhysicalDeviceShaderTileImageFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderTileImageFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderTileImageColorReadAccess{ shaderTileImageColorReadAccess_ } + , shaderTileImageDepthReadAccess{ shaderTileImageDepthReadAccess_ } + , shaderTileImageStencilReadAccess{ shaderTileImageStencilReadAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTileImageFeaturesEXT( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTileImageFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderTileImageFeaturesEXT & operator=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderTileImageFeaturesEXT & operator=( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageColorReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageColorReadAccess = shaderTileImageColorReadAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageDepthReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageDepthReadAccess = shaderTileImageDepthReadAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageStencilReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageStencilReadAccess = shaderTileImageStencilReadAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderTileImageFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTileImageFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTileImageFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTileImageFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderTileImageColorReadAccess, shaderTileImageDepthReadAccess, shaderTileImageStencilReadAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTileImageFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageColorReadAccess == rhs.shaderTileImageColorReadAccess ) && + ( shaderTileImageDepthReadAccess == rhs.shaderTileImageDepthReadAccess ) && + ( shaderTileImageStencilReadAccess == rhs.shaderTileImageStencilReadAccess ); +# endif + } + + bool operator!=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTileImageFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceShaderTileImagePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShaderTileImagePropertiesEXT.html + struct PhysicalDeviceShaderTileImagePropertiesEXT + { + using NativeType = VkPhysicalDeviceShaderTileImagePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderTileImageCoherentReadAccelerated{ shaderTileImageCoherentReadAccelerated_ } + , shaderTileImageReadSampleFromPixelRateInvocation{ shaderTileImageReadSampleFromPixelRateInvocation_ } + , shaderTileImageReadFromHelperInvocation{ shaderTileImageReadFromHelperInvocation_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTileImagePropertiesEXT( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTileImagePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderTileImagePropertiesEXT & operator=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderTileImagePropertiesEXT & operator=( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderTileImagePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTileImagePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTileImagePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTileImagePropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, shaderTileImageCoherentReadAccelerated, shaderTileImageReadSampleFromPixelRateInvocation, shaderTileImageReadFromHelperInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTileImagePropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageCoherentReadAccelerated == rhs.shaderTileImageCoherentReadAccelerated ) && + ( shaderTileImageReadSampleFromPixelRateInvocation == rhs.shaderTileImageReadSampleFromPixelRateInvocation ) && + ( shaderTileImageReadFromHelperInvocation == rhs.shaderTileImageReadFromHelperInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTileImagePropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceShadingRateImageFeaturesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShadingRateImageFeaturesNV.html + struct PhysicalDeviceShadingRateImageFeaturesNV + { + using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shadingRateImage{ shadingRateImage_ } + , shadingRateCoarseSampleOrder{ shadingRateCoarseSampleOrder_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImageFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImage = shadingRateImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateImage, shadingRateCoarseSampleOrder ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) && + ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); +# endif + } + + bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImageFeaturesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceShadingRateImagePropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceShadingRateImagePropertiesNV.html + struct PhysicalDeviceShadingRateImagePropertiesNV + { + using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, + uint32_t shadingRatePaletteSize_ = {}, + uint32_t shadingRateMaxCoarseSamples_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shadingRateTexelSize{ shadingRateTexelSize_ } + , shadingRatePaletteSize{ shadingRatePaletteSize_ } + , shadingRateMaxCoarseSamples{ shadingRateMaxCoarseSamples_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImagePropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateTexelSize, shadingRatePaletteSize, shadingRateMaxCoarseSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) && + ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); +# endif + } + + bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; + uint32_t shadingRatePaletteSize = {}; + uint32_t shadingRateMaxCoarseSamples = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImagePropertiesNV; + }; + + // wrapper struct for struct VkPhysicalDeviceSparseImageFormatInfo2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSparseImageFormatInfo2.html + struct PhysicalDeviceSparseImageFormatInfo2 + { + using NativeType = VkPhysicalDeviceSparseImageFormatInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , type{ type_ } + , samples{ samples_ } + , usage{ usage_ } + , tiling{ tiling_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, type, samples, usage, tiling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( samples == rhs.samples ) && + ( usage == rhs.usage ) && ( tiling == rhs.tiling ); +# endif + } + + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSparseImageFormatInfo2; + }; + + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + + // wrapper struct for struct VkPhysicalDeviceSubgroupProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubgroupProperties.html + struct PhysicalDeviceSubgroupProperties + { + using NativeType = VkPhysicalDeviceSubgroupProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subgroupSize{ subgroupSize_ } + , supportedStages{ supportedStages_ } + , supportedOperations{ supportedOperations_ } + , quadOperationsInAllStages{ quadOperationsInAllStages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && ( supportedStages == rhs.supportedStages ) && + ( supportedOperations == rhs.supportedOperations ) && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); +# endif + } + + bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; + void * pNext = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupProperties; + }; + + // wrapper struct for struct VkPhysicalDeviceSubgroupSizeControlFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubgroupSizeControlFeatures.html + struct PhysicalDeviceSubgroupSizeControlFeatures + { + using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subgroupSizeControl{ subgroupSizeControl_ } + , computeFullSubgroups{ computeFullSubgroups_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubgroupSizeControlFeatures & operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSubgroupSizeControlFeatures & operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & + setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + { + subgroupSizeControl = subgroupSizeControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & + setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + { + computeFullSubgroups = computeFullSubgroups_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subgroupSizeControl, computeFullSubgroups ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) && + ( computeFullSubgroups == rhs.computeFullSubgroups ); +# endif + } + + bool operator!=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupSizeControlFeatures; + }; + + using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures; + + // wrapper struct for struct VkPhysicalDeviceSubgroupSizeControlProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubgroupSizeControlProperties.html + struct PhysicalDeviceSubgroupSizeControlProperties + { + using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minSubgroupSize{ minSubgroupSize_ } + , maxSubgroupSize{ maxSubgroupSize_ } + , maxComputeWorkgroupSubgroups{ maxComputeWorkgroupSubgroups_ } + , requiredSubgroupSizeStages{ requiredSubgroupSizeStages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubgroupSizeControlProperties & operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSubgroupSizeControlProperties & operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); +# endif + } + + bool operator!=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupSizeControlProperties; + }; + + using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; + + // wrapper struct for struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT.html + struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT + { + using NativeType = VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subpassMergeFeedback{ subpassMergeFeedback_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & + setSubpassMergeFeedback( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ ) VULKAN_HPP_NOEXCEPT + { + subpassMergeFeedback = subpassMergeFeedback_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subpassMergeFeedback ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassMergeFeedback == rhs.subpassMergeFeedback ); +# endif + } + + bool operator!=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubpassShadingFeaturesHUAWEI.html + struct PhysicalDeviceSubpassShadingFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subpassShading{ subpassShading_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassShadingFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setSubpassShading( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT + { + subpassShading = subpassShading_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subpassShading ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassShading == rhs.subpassShading ); +# endif + } + + bool operator!=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subpassShading = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI; + }; + + // wrapper struct for struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSubpassShadingPropertiesHUAWEI.html + struct PhysicalDeviceSubpassShadingPropertiesHUAWEI + { + using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxSubpassShadingWorkgroupSizeAspectRatio{ maxSubpassShadingWorkgroupSizeAspectRatio_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassShadingPropertiesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxSubpassShadingWorkgroupSizeAspectRatio ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio ); +# endif + } + + bool operator!=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; + void * pNext = {}; + uint32_t maxSubpassShadingWorkgroupSizeAspectRatio = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI; + }; + + // wrapper struct for struct VkPhysicalDeviceSurfaceInfo2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSurfaceInfo2KHR.html + struct PhysicalDeviceSurfaceInfo2KHR + { + using NativeType = VkPhysicalDeviceSurfaceInfo2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , surface{ surface_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSurfaceInfo2KHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSurfaceInfo2KHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surface ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default; +#else + bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface ); +# endif + } + + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSurfaceInfo2KHR; + }; + + // wrapper struct for struct VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT.html + struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT + { + using NativeType = VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSwapchainMaintenance1FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainMaintenance1{ swapchainMaintenance1_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSwapchainMaintenance1FeaturesEXT( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSwapchainMaintenance1FeaturesEXT( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSwapchainMaintenance1FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSwapchainMaintenance1FeaturesEXT & operator=( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSwapchainMaintenance1FeaturesEXT & operator=( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesEXT & + setSwapchainMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ ) VULKAN_HPP_NOEXCEPT + { + swapchainMaintenance1 = swapchainMaintenance1_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainMaintenance1 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainMaintenance1 == rhs.swapchainMaintenance1 ); +# endif + } + + bool operator!=( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSwapchainMaintenance1FeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceSynchronization2Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSynchronization2Features.html + struct PhysicalDeviceSynchronization2Features + { + using NativeType = VkPhysicalDeviceSynchronization2Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , synchronization2{ synchronization2_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSynchronization2Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSynchronization2Features & operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSynchronization2Features & operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT + { + synchronization2 = synchronization2_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSynchronization2Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSynchronization2Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, synchronization2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSynchronization2Features const & ) const = default; +#else + bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 ); +# endif + } + + bool operator!=( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSynchronization2Features; + }; + + using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features; + + // wrapper struct for struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT.html + struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT + { + using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , texelBufferAlignment{ texelBufferAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT + { + texelBufferAlignment = texelBufferAlignment_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, texelBufferAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceTexelBufferAlignmentProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTexelBufferAlignmentProperties.html + struct PhysicalDeviceTexelBufferAlignmentProperties + { + using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , storageTexelBufferOffsetAlignmentBytes{ storageTexelBufferOffsetAlignmentBytes_ } + , storageTexelBufferOffsetSingleTexelAlignment{ storageTexelBufferOffsetSingleTexelAlignment_ } + , uniformTexelBufferOffsetAlignmentBytes{ uniformTexelBufferOffsetAlignmentBytes_ } + , uniformTexelBufferOffsetSingleTexelAlignment{ uniformTexelBufferOffsetSingleTexelAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTexelBufferAlignmentProperties & operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTexelBufferAlignmentProperties & operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + storageTexelBufferOffsetAlignmentBytes, + storageTexelBufferOffsetSingleTexelAlignment, + uniformTexelBufferOffsetAlignmentBytes, + uniformTexelBufferOffsetSingleTexelAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTexelBufferAlignmentProperties; + }; + + using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties; + + // wrapper struct for struct VkPhysicalDeviceTextureCompressionASTCHDRFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTextureCompressionASTCHDRFeatures.html + struct PhysicalDeviceTextureCompressionASTCHDRFeatures + { + using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , textureCompressionASTC_HDR{ textureCompressionASTC_HDR_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceTextureCompressionASTCHDRFeatures( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTextureCompressionASTCHDRFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, textureCompressionASTC_HDR ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); +# endif + } + + bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures; + }; + + using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures; + + // wrapper struct for struct VkPhysicalDeviceTileMemoryHeapFeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileMemoryHeapFeaturesQCOM.html + struct PhysicalDeviceTileMemoryHeapFeaturesQCOM + { + using NativeType = VkPhysicalDeviceTileMemoryHeapFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTileMemoryHeapFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 tileMemoryHeap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , tileMemoryHeap{ tileMemoryHeap_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapFeaturesQCOM( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTileMemoryHeapFeaturesQCOM( VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTileMemoryHeapFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTileMemoryHeapFeaturesQCOM & operator=( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTileMemoryHeapFeaturesQCOM & operator=( VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapFeaturesQCOM & setTileMemoryHeap( VULKAN_HPP_NAMESPACE::Bool32 tileMemoryHeap_ ) VULKAN_HPP_NOEXCEPT + { + tileMemoryHeap = tileMemoryHeap_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileMemoryHeapFeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, tileMemoryHeap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileMemoryHeap == rhs.tileMemoryHeap ); +# endif + } + + bool operator!=( PhysicalDeviceTileMemoryHeapFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTileMemoryHeapFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileMemoryHeap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTileMemoryHeapFeaturesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceTileMemoryHeapPropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileMemoryHeapPropertiesQCOM.html + struct PhysicalDeviceTileMemoryHeapPropertiesQCOM + { + using NativeType = VkPhysicalDeviceTileMemoryHeapPropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTileMemoryHeapPropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapPropertiesQCOM( VULKAN_HPP_NAMESPACE::Bool32 queueSubmitBoundary_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileBufferTransfers_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queueSubmitBoundary{ queueSubmitBoundary_ } + , tileBufferTransfers{ tileBufferTransfers_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTileMemoryHeapPropertiesQCOM( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTileMemoryHeapPropertiesQCOM( VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTileMemoryHeapPropertiesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTileMemoryHeapPropertiesQCOM & operator=( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTileMemoryHeapPropertiesQCOM & operator=( VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapPropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapPropertiesQCOM & + setQueueSubmitBoundary( VULKAN_HPP_NAMESPACE::Bool32 queueSubmitBoundary_ ) VULKAN_HPP_NOEXCEPT + { + queueSubmitBoundary = queueSubmitBoundary_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileMemoryHeapPropertiesQCOM & + setTileBufferTransfers( VULKAN_HPP_NAMESPACE::Bool32 tileBufferTransfers_ ) VULKAN_HPP_NOEXCEPT + { + tileBufferTransfers = tileBufferTransfers_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileMemoryHeapPropertiesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueSubmitBoundary, tileBufferTransfers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueSubmitBoundary == rhs.queueSubmitBoundary ) && + ( tileBufferTransfers == rhs.tileBufferTransfers ); +# endif + } + + bool operator!=( PhysicalDeviceTileMemoryHeapPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTileMemoryHeapPropertiesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 queueSubmitBoundary = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileBufferTransfers = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTileMemoryHeapPropertiesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceTilePropertiesFeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTilePropertiesFeaturesQCOM.html + struct PhysicalDeviceTilePropertiesFeaturesQCOM + { + using NativeType = VkPhysicalDeviceTilePropertiesFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , tileProperties{ tileProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTilePropertiesFeaturesQCOM( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTilePropertiesFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setTileProperties( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ ) VULKAN_HPP_NOEXCEPT + { + tileProperties = tileProperties_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTilePropertiesFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTilePropertiesFeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, tileProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTilePropertiesFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileProperties == rhs.tileProperties ); +# endif + } + + bool operator!=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileProperties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTilePropertiesFeaturesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceTileShadingFeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileShadingFeaturesQCOM.html + struct PhysicalDeviceTileShadingFeaturesQCOM + { + using NativeType = VkPhysicalDeviceTileShadingFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTileShadingFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 tileShading_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingFragmentStage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingDepthAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingStencilAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingInputAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingSampledAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDraw_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDispatch_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingDispatchTile_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingApron_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingAnisotropicApron_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingAtomicOps_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tileShadingImageProcessing_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , tileShading{ tileShading_ } + , tileShadingFragmentStage{ tileShadingFragmentStage_ } + , tileShadingColorAttachments{ tileShadingColorAttachments_ } + , tileShadingDepthAttachments{ tileShadingDepthAttachments_ } + , tileShadingStencilAttachments{ tileShadingStencilAttachments_ } + , tileShadingInputAttachments{ tileShadingInputAttachments_ } + , tileShadingSampledAttachments{ tileShadingSampledAttachments_ } + , tileShadingPerTileDraw{ tileShadingPerTileDraw_ } + , tileShadingPerTileDispatch{ tileShadingPerTileDispatch_ } + , tileShadingDispatchTile{ tileShadingDispatchTile_ } + , tileShadingApron{ tileShadingApron_ } + , tileShadingAnisotropicApron{ tileShadingAnisotropicApron_ } + , tileShadingAtomicOps{ tileShadingAtomicOps_ } + , tileShadingImageProcessing{ tileShadingImageProcessing_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingFeaturesQCOM( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTileShadingFeaturesQCOM( VkPhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTileShadingFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTileShadingFeaturesQCOM & operator=( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTileShadingFeaturesQCOM & operator=( VkPhysicalDeviceTileShadingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShading( VULKAN_HPP_NAMESPACE::Bool32 tileShading_ ) VULKAN_HPP_NOEXCEPT + { + tileShading = tileShading_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingFragmentStage( VULKAN_HPP_NAMESPACE::Bool32 tileShadingFragmentStage_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingFragmentStage = tileShadingFragmentStage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingColorAttachments( VULKAN_HPP_NAMESPACE::Bool32 tileShadingColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingColorAttachments = tileShadingColorAttachments_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingDepthAttachments( VULKAN_HPP_NAMESPACE::Bool32 tileShadingDepthAttachments_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingDepthAttachments = tileShadingDepthAttachments_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingStencilAttachments( VULKAN_HPP_NAMESPACE::Bool32 tileShadingStencilAttachments_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingStencilAttachments = tileShadingStencilAttachments_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingInputAttachments( VULKAN_HPP_NAMESPACE::Bool32 tileShadingInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingInputAttachments = tileShadingInputAttachments_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingSampledAttachments( VULKAN_HPP_NAMESPACE::Bool32 tileShadingSampledAttachments_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingSampledAttachments = tileShadingSampledAttachments_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingPerTileDraw( VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDraw_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingPerTileDraw = tileShadingPerTileDraw_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingPerTileDispatch( VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDispatch_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingPerTileDispatch = tileShadingPerTileDispatch_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingDispatchTile( VULKAN_HPP_NAMESPACE::Bool32 tileShadingDispatchTile_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingDispatchTile = tileShadingDispatchTile_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & setTileShadingApron( VULKAN_HPP_NAMESPACE::Bool32 tileShadingApron_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingApron = tileShadingApron_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingAnisotropicApron( VULKAN_HPP_NAMESPACE::Bool32 tileShadingAnisotropicApron_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingAnisotropicApron = tileShadingAnisotropicApron_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingAtomicOps( VULKAN_HPP_NAMESPACE::Bool32 tileShadingAtomicOps_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingAtomicOps = tileShadingAtomicOps_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTileShadingFeaturesQCOM & + setTileShadingImageProcessing( VULKAN_HPP_NAMESPACE::Bool32 tileShadingImageProcessing_ ) VULKAN_HPP_NOEXCEPT + { + tileShadingImageProcessing = tileShadingImageProcessing_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceTileShadingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileShadingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileShadingFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileShadingFeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + tileShading, + tileShadingFragmentStage, + tileShadingColorAttachments, + tileShadingDepthAttachments, + tileShadingStencilAttachments, + tileShadingInputAttachments, + tileShadingSampledAttachments, + tileShadingPerTileDraw, + tileShadingPerTileDispatch, + tileShadingDispatchTile, + tileShadingApron, + tileShadingAnisotropicApron, + tileShadingAtomicOps, + tileShadingImageProcessing ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTileShadingFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileShading == rhs.tileShading ) && + ( tileShadingFragmentStage == rhs.tileShadingFragmentStage ) && ( tileShadingColorAttachments == rhs.tileShadingColorAttachments ) && + ( tileShadingDepthAttachments == rhs.tileShadingDepthAttachments ) && ( tileShadingStencilAttachments == rhs.tileShadingStencilAttachments ) && + ( tileShadingInputAttachments == rhs.tileShadingInputAttachments ) && ( tileShadingSampledAttachments == rhs.tileShadingSampledAttachments ) && + ( tileShadingPerTileDraw == rhs.tileShadingPerTileDraw ) && ( tileShadingPerTileDispatch == rhs.tileShadingPerTileDispatch ) && + ( tileShadingDispatchTile == rhs.tileShadingDispatchTile ) && ( tileShadingApron == rhs.tileShadingApron ) && + ( tileShadingAnisotropicApron == rhs.tileShadingAnisotropicApron ) && ( tileShadingAtomicOps == rhs.tileShadingAtomicOps ) && + ( tileShadingImageProcessing == rhs.tileShadingImageProcessing ); +# endif + } + + bool operator!=( PhysicalDeviceTileShadingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTileShadingFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShading = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingFragmentStage = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingColorAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingDepthAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingStencilAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingInputAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingSampledAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDraw = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingPerTileDispatch = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingDispatchTile = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingApron = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingAnisotropicApron = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingAtomicOps = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileShadingImageProcessing = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTileShadingFeaturesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceTileShadingPropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTileShadingPropertiesQCOM.html + struct PhysicalDeviceTileShadingPropertiesQCOM + { + using NativeType = VkPhysicalDeviceTileShadingPropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTileShadingPropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingPropertiesQCOM( uint32_t maxApronSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 preferNonCoherent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D tileGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxTileShadingRate_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxApronSize{ maxApronSize_ } + , preferNonCoherent{ preferNonCoherent_ } + , tileGranularity{ tileGranularity_ } + , maxTileShadingRate{ maxTileShadingRate_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTileShadingPropertiesQCOM( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTileShadingPropertiesQCOM( VkPhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTileShadingPropertiesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTileShadingPropertiesQCOM & operator=( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTileShadingPropertiesQCOM & operator=( VkPhysicalDeviceTileShadingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceTileShadingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileShadingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileShadingPropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTileShadingPropertiesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxApronSize, preferNonCoherent, tileGranularity, maxTileShadingRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTileShadingPropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxApronSize == rhs.maxApronSize ) && ( preferNonCoherent == rhs.preferNonCoherent ) && + ( tileGranularity == rhs.tileGranularity ) && ( maxTileShadingRate == rhs.maxTileShadingRate ); +# endif + } + + bool operator!=( PhysicalDeviceTileShadingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTileShadingPropertiesQCOM; + void * pNext = {}; + uint32_t maxApronSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 preferNonCoherent = {}; + VULKAN_HPP_NAMESPACE::Extent2D tileGranularity = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxTileShadingRate = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTileShadingPropertiesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceTimelineSemaphoreFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTimelineSemaphoreFeatures.html + struct PhysicalDeviceTimelineSemaphoreFeatures + { + using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , timelineSemaphore{ timelineSemaphore_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + timelineSemaphore = timelineSemaphore_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTimelineSemaphoreFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTimelineSemaphoreFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, timelineSemaphore ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore ); +# endif + } + + bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTimelineSemaphoreFeatures; + }; + + using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; + + // wrapper struct for struct VkPhysicalDeviceTimelineSemaphoreProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTimelineSemaphoreProperties.html + struct PhysicalDeviceTimelineSemaphoreProperties + { + using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTimelineSemaphoreProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTimelineSemaphoreProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxTimelineSemaphoreValueDifference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ); +# endif + } + + bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + void * pNext = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTimelineSemaphoreProperties; + }; + + using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; + + // wrapper struct for struct VkPhysicalDeviceToolProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceToolProperties.html + struct PhysicalDeviceToolProperties + { + using NativeType = VkPhysicalDeviceToolProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( std::array const & name_ = {}, + std::array const & version_ = {}, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, + std::array const & description_ = {}, + std::array const & layer_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , name{ name_ } + , version{ version_ } + , purposes{ purposes_ } + , description{ description_ } + , layer{ layer_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceToolProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceToolProperties & operator=( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceToolProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceToolProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, name, version, purposes, description, layer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( version, rhs.version ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = purposes <=> rhs.purposes; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( layer, rhs.layer ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( version, rhs.version ) == 0 ) && + ( purposes == rhs.purposes ) && ( strcmp( description, rhs.description ) == 0 ) && ( strcmp( layer, rhs.layer ) == 0 ); + } + + bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; + VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceToolProperties; + }; + + using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties; + + // wrapper struct for struct VkPhysicalDeviceTransformFeedbackFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTransformFeedbackFeaturesEXT.html + struct PhysicalDeviceTransformFeedbackFeaturesEXT + { + using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , transformFeedback{ transformFeedback_ } + , geometryStreams{ geometryStreams_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & + setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedback = transformFeedback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT + { + geometryStreams = geometryStreams_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transformFeedback, geometryStreams ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) && ( geometryStreams == rhs.geometryStreams ); +# endif + } + + bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTransformFeedbackFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceTransformFeedbackPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceTransformFeedbackPropertiesEXT.html + struct PhysicalDeviceTransformFeedbackPropertiesEXT + { + using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {}, + uint32_t maxTransformFeedbackBuffers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, + uint32_t maxTransformFeedbackStreamDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataStride_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxTransformFeedbackStreams{ maxTransformFeedbackStreams_ } + , maxTransformFeedbackBuffers{ maxTransformFeedbackBuffers_ } + , maxTransformFeedbackBufferSize{ maxTransformFeedbackBufferSize_ } + , maxTransformFeedbackStreamDataSize{ maxTransformFeedbackStreamDataSize_ } + , maxTransformFeedbackBufferDataSize{ maxTransformFeedbackBufferDataSize_ } + , maxTransformFeedbackBufferDataStride{ maxTransformFeedbackBufferDataStride_ } + , transformFeedbackQueries{ transformFeedbackQueries_ } + , transformFeedbackStreamsLinesTriangles{ transformFeedbackStreamsLinesTriangles_ } + , transformFeedbackRasterizationStreamSelect{ transformFeedbackRasterizationStreamSelect_ } + , transformFeedbackDraw{ transformFeedbackDraw_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxTransformFeedbackStreams, + maxTransformFeedbackBuffers, + maxTransformFeedbackBufferSize, + maxTransformFeedbackStreamDataSize, + maxTransformFeedbackBufferDataSize, + maxTransformFeedbackBufferDataStride, + transformFeedbackQueries, + transformFeedbackStreamsLinesTriangles, + transformFeedbackRasterizationStreamSelect, + transformFeedbackDraw ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) && + ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) && + ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) && + ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) && + ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) && + ( transformFeedbackQueries == rhs.transformFeedbackQueries ) && + ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) && + ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) && + ( transformFeedbackDraw == rhs.transformFeedbackDraw ); +# endif + } + + bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + void * pNext = {}; + uint32_t maxTransformFeedbackStreams = {}; + uint32_t maxTransformFeedbackBuffers = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; + uint32_t maxTransformFeedbackStreamDataSize = {}; + uint32_t maxTransformFeedbackBufferDataSize = {}; + uint32_t maxTransformFeedbackBufferDataStride = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTransformFeedbackPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceUniformBufferStandardLayoutFeatures.html + struct PhysicalDeviceUniformBufferStandardLayoutFeatures + { + using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , uniformBufferStandardLayout{ uniformBufferStandardLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + { + uniformBufferStandardLayout = uniformBufferStandardLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, uniformBufferStandardLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); +# endif + } + + bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures; + }; + + using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; + + // wrapper struct for struct VkPhysicalDeviceVariablePointersFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVariablePointersFeatures.html + struct PhysicalDeviceVariablePointersFeatures + { + using NativeType = VkPhysicalDeviceVariablePointersFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , variablePointersStorageBuffer{ variablePointersStorageBuffer_ } + , variablePointers{ variablePointers_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVariablePointersFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & + setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + { + variablePointers = variablePointers_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVariablePointersFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVariablePointersFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, variablePointersStorageBuffer, variablePointers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ); +# endif + } + + bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVariablePointersFeatures; + }; + + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + + // wrapper struct for struct VkPhysicalDeviceVertexAttributeDivisorFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeDivisorFeatures.html + struct PhysicalDeviceVertexAttributeDivisorFeatures + { + using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeatures( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ } + , vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeatures( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorFeatures( VkPhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexAttributeDivisorFeatures & operator=( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeDivisorFeatures & operator=( VkPhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & + setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & + setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVertexAttributeDivisorFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && + ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorFeatures; + }; + + using PhysicalDeviceVertexAttributeDivisorFeaturesEXT = PhysicalDeviceVertexAttributeDivisorFeatures; + using PhysicalDeviceVertexAttributeDivisorFeaturesKHR = PhysicalDeviceVertexAttributeDivisorFeatures; + + // wrapper struct for struct VkPhysicalDeviceVertexAttributeDivisorProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeDivisorProperties.html + struct PhysicalDeviceVertexAttributeDivisorProperties + { + using NativeType = VkPhysicalDeviceVertexAttributeDivisorProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorProperties( uint32_t maxVertexAttribDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } + , supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorProperties( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorProperties( VkPhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexAttributeDivisorProperties & operator=( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeDivisorProperties & operator=( VkPhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxVertexAttribDivisor, supportsNonZeroFirstInstance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ) && + ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorProperties; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorProperties; + }; + + using PhysicalDeviceVertexAttributeDivisorPropertiesKHR = PhysicalDeviceVertexAttributeDivisorProperties; + + // wrapper struct for struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT.html + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT + { + using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxVertexAttribDivisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT.html + struct PhysicalDeviceVertexAttributeRobustnessFeaturesEXT + { + using NativeType = VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexAttributeRobustness{ vertexAttributeRobustness_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & + operator=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & + setVertexAttributeRobustness( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeRobustness = vertexAttributeRobustness_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexAttributeRobustness ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeRobustness == rhs.vertexAttributeRobustness ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT.html + struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT + { + using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexInputDynamicState{ vertexInputDynamicState_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexInputDynamicStateFeaturesEXT( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexInputDynamicStateFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + vertexInputDynamicState = vertexInputDynamicState_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexInputDynamicState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexInputDynamicState == rhs.vertexInputDynamicState ); +# endif + } + + bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceVideoEncodeAV1FeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeAV1FeaturesKHR.html + struct PhysicalDeviceVideoEncodeAV1FeaturesKHR + { + using NativeType = VkPhysicalDeviceVideoEncodeAV1FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeAV1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoEncodeAV1{ videoEncodeAV1_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeAV1FeaturesKHR( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoEncodeAV1FeaturesKHR( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoEncodeAV1FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoEncodeAV1FeaturesKHR & operator=( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVideoEncodeAV1FeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeAV1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeAV1FeaturesKHR & setVideoEncodeAV1( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1_ ) VULKAN_HPP_NOEXCEPT + { + videoEncodeAV1 = videoEncodeAV1_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoEncodeAV1 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoEncodeAV1 == rhs.videoEncodeAV1 ); +# endif + } + + bool operator!=( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoEncodeAV1FeaturesKHR; + }; + + // wrapper struct for struct VkVideoProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoProfileInfoKHR.html + struct VideoProfileInfoKHR + { + using NativeType = VkVideoProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone, + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoCodecOperation{ videoCodecOperation_ } + , chromaSubsampling{ chromaSubsampling_ } + , lumaBitDepth{ lumaBitDepth_ } + , chromaBitDepth{ chromaBitDepth_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfileInfoKHR( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoProfileInfoKHR & operator=( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoProfileInfoKHR & operator=( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & + setVideoCodecOperation( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT + { + videoCodecOperation = videoCodecOperation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & + setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT + { + chromaSubsampling = chromaSubsampling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT + { + lumaBitDepth = lumaBitDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT + { + chromaBitDepth = chromaBitDepth_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoProfileInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoProfileInfoKHR const & ) const = default; +#else + bool operator==( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) && + ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) && ( chromaBitDepth == rhs.chromaBitDepth ); +# endif + } + + bool operator!=( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone; + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {}; + }; + + template <> + struct CppType + { + using Type = VideoProfileInfoKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR.html + struct PhysicalDeviceVideoEncodeQualityLevelInfoKHR + { + using NativeType = VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {}, + uint32_t qualityLevel_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pVideoProfile{ pVideoProfile_ } + , qualityLevel{ qualityLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoEncodeQualityLevelInfoKHR( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoEncodeQualityLevelInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoEncodeQualityLevelInfoKHR & operator=( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVideoEncodeQualityLevelInfoKHR & operator=( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & + setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT + { + pVideoProfile = pVideoProfile_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevel = qualityLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pVideoProfile, qualityLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVideoProfile == rhs.pVideoProfile ) && ( qualityLevel == rhs.qualityLevel ); +# endif + } + + bool operator!=( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {}; + uint32_t qualityLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoEncodeQualityLevelInfoKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR.html + struct PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR + { + using NativeType = VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoEncodeQuantizationMap{ videoEncodeQuantizationMap_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & + operator=( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & + setVideoEncodeQuantizationMap( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap_ ) VULKAN_HPP_NOEXCEPT + { + videoEncodeQuantizationMap = videoEncodeQuantizationMap_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoEncodeQuantizationMap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoEncodeQuantizationMap == rhs.videoEncodeQuantizationMap ); +# endif + } + + bool operator!=( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceVideoFormatInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoFormatInfoKHR.html + struct PhysicalDeviceVideoFormatInfoKHR + { + using NativeType = VkPhysicalDeviceVideoFormatInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageUsage{ imageUsage_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoFormatInfoKHR & operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT + { + imageUsage = imageUsage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoFormatInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoFormatInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageUsage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage ); +# endif + } + + bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoFormatInfoKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceVideoMaintenance1FeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoMaintenance1FeaturesKHR.html + struct PhysicalDeviceVideoMaintenance1FeaturesKHR + { + using NativeType = VkPhysicalDeviceVideoMaintenance1FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoMaintenance1{ videoMaintenance1_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoMaintenance1FeaturesKHR( VkPhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoMaintenance1FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoMaintenance1FeaturesKHR & operator=( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVideoMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance1FeaturesKHR & + setVideoMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1_ ) VULKAN_HPP_NOEXCEPT + { + videoMaintenance1 = videoMaintenance1_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoMaintenance1 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoMaintenance1FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoMaintenance1 == rhs.videoMaintenance1 ); +# endif + } + + bool operator!=( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoMaintenance1FeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceVideoMaintenance2FeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVideoMaintenance2FeaturesKHR.html + struct PhysicalDeviceVideoMaintenance2FeaturesKHR + { + using NativeType = VkPhysicalDeviceVideoMaintenance2FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance2FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoMaintenance2{ videoMaintenance2_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance2FeaturesKHR( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoMaintenance2FeaturesKHR( VkPhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoMaintenance2FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoMaintenance2FeaturesKHR & operator=( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVideoMaintenance2FeaturesKHR & operator=( VkPhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance2FeaturesKHR & + setVideoMaintenance2( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance2_ ) VULKAN_HPP_NOEXCEPT + { + videoMaintenance2 = videoMaintenance2_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoMaintenance2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoMaintenance2FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoMaintenance2 == rhs.videoMaintenance2 ); +# endif + } + + bool operator!=( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoMaintenance2FeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceVulkan11Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan11Features.html + struct PhysicalDeviceVulkan11Features + { + using NativeType = VkPhysicalDeviceVulkan11Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , storageBuffer16BitAccess{ storageBuffer16BitAccess_ } + , uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ } + , storagePushConstant16{ storagePushConstant16_ } + , storageInputOutput16{ storageInputOutput16_ } + , multiview{ multiview_ } + , multiviewGeometryShader{ multiviewGeometryShader_ } + , multiviewTessellationShader{ multiviewTessellationShader_ } + , variablePointersStorageBuffer{ variablePointersStorageBuffer_ } + , variablePointers{ variablePointers_ } + , protectedMemory{ protectedMemory_ } + , samplerYcbcrConversion{ samplerYcbcrConversion_ } + , shaderDrawParameters{ shaderDrawParameters_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan11Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + { + variablePointers = variablePointers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + storageBuffer16BitAccess, + uniformAndStorageBuffer16BitAccess, + storagePushConstant16, + storageInputOutput16, + multiview, + multiviewGeometryShader, + multiviewTessellationShader, + variablePointersStorageBuffer, + variablePointers, + protectedMemory, + samplerYcbcrConversion, + shaderDrawParameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && ( multiviewTessellationShader == rhs.multiviewTessellationShader ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && ( variablePointers == rhs.variablePointers ) && + ( protectedMemory == rhs.protectedMemory ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) && + ( shaderDrawParameters == rhs.shaderDrawParameters ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan11Features; + }; + + // wrapper struct for struct VkPhysicalDeviceVulkan11Properties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan11Properties.html + struct PhysicalDeviceVulkan11Properties + { + using NativeType = VkPhysicalDeviceVulkan11Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( + std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, + uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceUUID{ deviceUUID_ } + , driverUUID{ driverUUID_ } + , deviceLUID{ deviceLUID_ } + , deviceNodeMask{ deviceNodeMask_ } + , deviceLUIDValid{ deviceLUIDValid_ } + , subgroupSize{ subgroupSize_ } + , subgroupSupportedStages{ subgroupSupportedStages_ } + , subgroupSupportedOperations{ subgroupSupportedOperations_ } + , subgroupQuadOperationsInAllStages{ subgroupQuadOperationsInAllStages_ } + , pointClippingBehavior{ pointClippingBehavior_ } + , maxMultiviewViewCount{ maxMultiviewViewCount_ } + , maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ } + , protectedNoFault{ protectedNoFault_ } + , maxPerSetDescriptors{ maxPerSetDescriptors_ } + , maxMemoryAllocationSize{ maxMemoryAllocationSize_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan11Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Properties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Properties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::PointClippingBehavior const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + deviceUUID, + driverUUID, + deviceLUID, + deviceNodeMask, + deviceLUIDValid, + subgroupSize, + subgroupSupportedStages, + subgroupSupportedOperations, + subgroupQuadOperationsInAllStages, + pointClippingBehavior, + maxMultiviewViewCount, + maxMultiviewInstanceIndex, + protectedNoFault, + maxPerSetDescriptors, + maxMemoryAllocationSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) && + ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) && + ( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) && + ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) && + ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && ( pointClippingBehavior == rhs.pointClippingBehavior ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) && + ( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan11Properties; + }; + + // wrapper struct for struct VkPhysicalDeviceVulkan12Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan12Features.html + struct PhysicalDeviceVulkan12Features + { + using NativeType = VkPhysicalDeviceVulkan12Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , samplerMirrorClampToEdge{ samplerMirrorClampToEdge_ } + , drawIndirectCount{ drawIndirectCount_ } + , storageBuffer8BitAccess{ storageBuffer8BitAccess_ } + , uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ } + , storagePushConstant8{ storagePushConstant8_ } + , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ } + , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ } + , shaderFloat16{ shaderFloat16_ } + , shaderInt8{ shaderInt8_ } + , descriptorIndexing{ descriptorIndexing_ } + , shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ } + , shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ } + , shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ } + , shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ } + , shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ } + , shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ } + , shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ } + , shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ } + , shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ } + , shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ } + , descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ } + , descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ } + , descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ } + , descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ } + , descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ } + , descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ } + , descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ } + , descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ } + , descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ } + , runtimeDescriptorArray{ runtimeDescriptorArray_ } + , samplerFilterMinmax{ samplerFilterMinmax_ } + , scalarBlockLayout{ scalarBlockLayout_ } + , imagelessFramebuffer{ imagelessFramebuffer_ } + , uniformBufferStandardLayout{ uniformBufferStandardLayout_ } + , shaderSubgroupExtendedTypes{ shaderSubgroupExtendedTypes_ } + , separateDepthStencilLayouts{ separateDepthStencilLayouts_ } + , hostQueryReset{ hostQueryReset_ } + , timelineSemaphore{ timelineSemaphore_ } + , bufferDeviceAddress{ bufferDeviceAddress_ } + , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } + , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } + , vulkanMemoryModel{ vulkanMemoryModel_ } + , vulkanMemoryModelDeviceScope{ vulkanMemoryModelDeviceScope_ } + , vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ } + , shaderOutputViewportIndex{ shaderOutputViewportIndex_ } + , shaderOutputLayer{ shaderOutputLayer_ } + , subgroupBroadcastDynamicId{ subgroupBroadcastDynamicId_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan12Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT + { + samplerMirrorClampToEdge = samplerMirrorClampToEdge_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT + { + drawIndirectCount = drawIndirectCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT + { + descriptorIndexing = descriptorIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT + { + samplerFilterMinmax = samplerFilterMinmax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + { + uniformBufferStandardLayout = uniformBufferStandardLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + { + separateDepthStencilLayouts = separateDepthStencilLayouts_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + timelineSemaphore = timelineSemaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModel = vulkanMemoryModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT + { + shaderOutputViewportIndex = shaderOutputViewportIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT + { + shaderOutputLayer = shaderOutputLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT + { + subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan12Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan12Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + samplerMirrorClampToEdge, + drawIndirectCount, + storageBuffer8BitAccess, + uniformAndStorageBuffer8BitAccess, + storagePushConstant8, + shaderBufferInt64Atomics, + shaderSharedInt64Atomics, + shaderFloat16, + shaderInt8, + descriptorIndexing, + shaderInputAttachmentArrayDynamicIndexing, + shaderUniformTexelBufferArrayDynamicIndexing, + shaderStorageTexelBufferArrayDynamicIndexing, + shaderUniformBufferArrayNonUniformIndexing, + shaderSampledImageArrayNonUniformIndexing, + shaderStorageBufferArrayNonUniformIndexing, + shaderStorageImageArrayNonUniformIndexing, + shaderInputAttachmentArrayNonUniformIndexing, + shaderUniformTexelBufferArrayNonUniformIndexing, + shaderStorageTexelBufferArrayNonUniformIndexing, + descriptorBindingUniformBufferUpdateAfterBind, + descriptorBindingSampledImageUpdateAfterBind, + descriptorBindingStorageImageUpdateAfterBind, + descriptorBindingStorageBufferUpdateAfterBind, + descriptorBindingUniformTexelBufferUpdateAfterBind, + descriptorBindingStorageTexelBufferUpdateAfterBind, + descriptorBindingUpdateUnusedWhilePending, + descriptorBindingPartiallyBound, + descriptorBindingVariableDescriptorCount, + runtimeDescriptorArray, + samplerFilterMinmax, + scalarBlockLayout, + imagelessFramebuffer, + uniformBufferStandardLayout, + shaderSubgroupExtendedTypes, + separateDepthStencilLayouts, + hostQueryReset, + timelineSemaphore, + bufferDeviceAddress, + bufferDeviceAddressCaptureReplay, + bufferDeviceAddressMultiDevice, + vulkanMemoryModel, + vulkanMemoryModelDeviceScope, + vulkanMemoryModelAvailabilityVisibilityChains, + shaderOutputViewportIndex, + shaderOutputLayer, + subgroupBroadcastDynamicId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) && + ( drawIndirectCount == rhs.drawIndirectCount ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) && + ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && ( samplerFilterMinmax == rhs.samplerFilterMinmax ) && + ( scalarBlockLayout == rhs.scalarBlockLayout ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && ( hostQueryReset == rhs.hostQueryReset ) && + ( timelineSemaphore == rhs.timelineSemaphore ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) && + ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && ( shaderOutputLayer == rhs.shaderOutputLayer ) && + ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan12Features; + }; + + // wrapper struct for struct VkPhysicalDeviceVulkan12Properties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan12Properties.html + struct PhysicalDeviceVulkan12Properties + { + using NativeType = VkPhysicalDeviceVulkan12Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + uint64_t maxTimelineSemaphoreValueDifference_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , driverID{ driverID_ } + , driverName{ driverName_ } + , driverInfo{ driverInfo_ } + , conformanceVersion{ conformanceVersion_ } + , denormBehaviorIndependence{ denormBehaviorIndependence_ } + , roundingModeIndependence{ roundingModeIndependence_ } + , shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ } + , shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ } + , shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ } + , shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ } + , shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ } + , shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ } + , shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ } + , shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ } + , shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ } + , shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ } + , shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ } + , shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ } + , shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ } + , shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ } + , shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ } + , maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ } + , shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ } + , shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ } + , shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ } + , shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ } + , shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ } + , robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ } + , quadDivergentImplicitLod{ quadDivergentImplicitLod_ } + , maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ } + , maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ } + , maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ } + , maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ } + , maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ } + , maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ } + , maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ } + , maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ } + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ } + , maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ } + , maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ } + , supportedDepthResolveModes{ supportedDepthResolveModes_ } + , supportedStencilResolveModes{ supportedStencilResolveModes_ } + , independentResolveNone{ independentResolveNone_ } + , independentResolve{ independentResolve_ } + , filterMinmaxSingleComponentFormats{ filterMinmaxSingleComponentFormats_ } + , filterMinmaxImageComponentMapping{ filterMinmaxImageComponentMapping_ } + , maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ } + , framebufferIntegerColorSampleCounts{ framebufferIntegerColorSampleCounts_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan12Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan12Properties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan12Properties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ConformanceVersion const &, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, + VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint64_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + driverID, + driverName, + driverInfo, + conformanceVersion, + denormBehaviorIndependence, + roundingModeIndependence, + shaderSignedZeroInfNanPreserveFloat16, + shaderSignedZeroInfNanPreserveFloat32, + shaderSignedZeroInfNanPreserveFloat64, + shaderDenormPreserveFloat16, + shaderDenormPreserveFloat32, + shaderDenormPreserveFloat64, + shaderDenormFlushToZeroFloat16, + shaderDenormFlushToZeroFloat32, + shaderDenormFlushToZeroFloat64, + shaderRoundingModeRTEFloat16, + shaderRoundingModeRTEFloat32, + shaderRoundingModeRTEFloat64, + shaderRoundingModeRTZFloat16, + shaderRoundingModeRTZFloat32, + shaderRoundingModeRTZFloat64, + maxUpdateAfterBindDescriptorsInAllPools, + shaderUniformBufferArrayNonUniformIndexingNative, + shaderSampledImageArrayNonUniformIndexingNative, + shaderStorageBufferArrayNonUniformIndexingNative, + shaderStorageImageArrayNonUniformIndexingNative, + shaderInputAttachmentArrayNonUniformIndexingNative, + robustBufferAccessUpdateAfterBind, + quadDivergentImplicitLod, + maxPerStageDescriptorUpdateAfterBindSamplers, + maxPerStageDescriptorUpdateAfterBindUniformBuffers, + maxPerStageDescriptorUpdateAfterBindStorageBuffers, + maxPerStageDescriptorUpdateAfterBindSampledImages, + maxPerStageDescriptorUpdateAfterBindStorageImages, + maxPerStageDescriptorUpdateAfterBindInputAttachments, + maxPerStageUpdateAfterBindResources, + maxDescriptorSetUpdateAfterBindSamplers, + maxDescriptorSetUpdateAfterBindUniformBuffers, + maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindStorageBuffers, + maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindSampledImages, + maxDescriptorSetUpdateAfterBindStorageImages, + maxDescriptorSetUpdateAfterBindInputAttachments, + supportedDepthResolveModes, + supportedStencilResolveModes, + independentResolveNone, + independentResolve, + filterMinmaxSingleComponentFormats, + filterMinmaxImageComponentMapping, + maxTimelineSemaphoreValueDifference, + framebufferIntegerColorSampleCounts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) + return cmp; + if ( auto cmp = denormBehaviorIndependence <=> rhs.denormBehaviorIndependence; cmp != 0 ) + return cmp; + if ( auto cmp = roundingModeIndependence <=> rhs.roundingModeIndependence; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat16 <=> rhs.shaderSignedZeroInfNanPreserveFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat32 <=> rhs.shaderSignedZeroInfNanPreserveFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat64 <=> rhs.shaderSignedZeroInfNanPreserveFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat16 <=> rhs.shaderDenormPreserveFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat32 <=> rhs.shaderDenormPreserveFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat64 <=> rhs.shaderDenormPreserveFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat16 <=> rhs.shaderDenormFlushToZeroFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat32 <=> rhs.shaderDenormFlushToZeroFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat64 <=> rhs.shaderDenormFlushToZeroFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat16 <=> rhs.shaderRoundingModeRTEFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat32 <=> rhs.shaderRoundingModeRTEFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat64 <=> rhs.shaderRoundingModeRTEFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat16 <=> rhs.shaderRoundingModeRTZFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat32 <=> rhs.shaderRoundingModeRTZFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat64 <=> rhs.shaderRoundingModeRTZFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = maxUpdateAfterBindDescriptorsInAllPools <=> rhs.maxUpdateAfterBindDescriptorsInAllPools; cmp != 0 ) + return cmp; + if ( auto cmp = shaderUniformBufferArrayNonUniformIndexingNative <=> rhs.shaderUniformBufferArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSampledImageArrayNonUniformIndexingNative <=> rhs.shaderSampledImageArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderStorageBufferArrayNonUniformIndexingNative <=> rhs.shaderStorageBufferArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderStorageImageArrayNonUniformIndexingNative <=> rhs.shaderStorageImageArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderInputAttachmentArrayNonUniformIndexingNative <=> rhs.shaderInputAttachmentArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = robustBufferAccessUpdateAfterBind <=> rhs.robustBufferAccessUpdateAfterBind; cmp != 0 ) + return cmp; + if ( auto cmp = quadDivergentImplicitLod <=> rhs.quadDivergentImplicitLod; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSamplers <=> rhs.maxPerStageDescriptorUpdateAfterBindSamplers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindUniformBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSampledImages <=> rhs.maxPerStageDescriptorUpdateAfterBindSampledImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageImages <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindInputAttachments <=> rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageUpdateAfterBindResources <=> rhs.maxPerStageUpdateAfterBindResources; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindSamplers <=> rhs.maxDescriptorSetUpdateAfterBindSamplers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffers <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffers <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindSampledImages <=> rhs.maxDescriptorSetUpdateAfterBindSampledImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageImages <=> rhs.maxDescriptorSetUpdateAfterBindStorageImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindInputAttachments <=> rhs.maxDescriptorSetUpdateAfterBindInputAttachments; cmp != 0 ) + return cmp; + if ( auto cmp = supportedDepthResolveModes <=> rhs.supportedDepthResolveModes; cmp != 0 ) + return cmp; + if ( auto cmp = supportedStencilResolveModes <=> rhs.supportedStencilResolveModes; cmp != 0 ) + return cmp; + if ( auto cmp = independentResolveNone <=> rhs.independentResolveNone; cmp != 0 ) + return cmp; + if ( auto cmp = independentResolve <=> rhs.independentResolve; cmp != 0 ) + return cmp; + if ( auto cmp = filterMinmaxSingleComponentFormats <=> rhs.filterMinmaxSingleComponentFormats; cmp != 0 ) + return cmp; + if ( auto cmp = filterMinmaxImageComponentMapping <=> rhs.filterMinmaxImageComponentMapping; cmp != 0 ) + return cmp; + if ( auto cmp = maxTimelineSemaphoreValueDifference <=> rhs.maxTimelineSemaphoreValueDifference; cmp != 0 ) + return cmp; + if ( auto cmp = framebufferIntegerColorSampleCounts <=> rhs.framebufferIntegerColorSampleCounts; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) && + ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && ( independentResolve == rhs.independentResolve ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) && + ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); + } + + bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan12Properties; + }; + + // wrapper struct for struct VkPhysicalDeviceVulkan13Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan13Features.html + struct PhysicalDeviceVulkan13Features + { + using NativeType = VkPhysicalDeviceVulkan13Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustImageAccess{ robustImageAccess_ } + , inlineUniformBlock{ inlineUniformBlock_ } + , descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ } + , pipelineCreationCacheControl{ pipelineCreationCacheControl_ } + , privateData{ privateData_ } + , shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ } + , shaderTerminateInvocation{ shaderTerminateInvocation_ } + , subgroupSizeControl{ subgroupSizeControl_ } + , computeFullSubgroups{ computeFullSubgroups_ } + , synchronization2{ synchronization2_ } + , textureCompressionASTC_HDR{ textureCompressionASTC_HDR_ } + , shaderZeroInitializeWorkgroupMemory{ shaderZeroInitializeWorkgroupMemory_ } + , dynamicRendering{ dynamicRendering_ } + , shaderIntegerDotProduct{ shaderIntegerDotProduct_ } + , maintenance4{ maintenance4_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan13Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan13Features & operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess = robustImageAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + { + inlineUniformBlock = inlineUniformBlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + { + privateData = privateData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderTerminateInvocation = shaderTerminateInvocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + { + subgroupSizeControl = subgroupSizeControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + { + computeFullSubgroups = computeFullSubgroups_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT + { + synchronization2 = synchronization2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT + { + shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRendering = dynamicRendering_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerDotProduct = shaderIntegerDotProduct_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT + { + maintenance4 = maintenance4_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan13Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan13Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + robustImageAccess, + inlineUniformBlock, + descriptorBindingInlineUniformBlockUpdateAfterBind, + pipelineCreationCacheControl, + privateData, + shaderDemoteToHelperInvocation, + shaderTerminateInvocation, + subgroupSizeControl, + computeFullSubgroups, + synchronization2, + textureCompressionASTC_HDR, + shaderZeroInitializeWorkgroupMemory, + dynamicRendering, + shaderIntegerDotProduct, + maintenance4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan13Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ) && + ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ) && + ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ) && ( privateData == rhs.privateData ) && + ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ) && + ( subgroupSizeControl == rhs.subgroupSizeControl ) && ( computeFullSubgroups == rhs.computeFullSubgroups ) && + ( synchronization2 == rhs.synchronization2 ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ) && + ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ) && ( dynamicRendering == rhs.dynamicRendering ) && + ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ) && ( maintenance4 == rhs.maintenance4 ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan13Features; + }; + + // wrapper struct for struct VkPhysicalDeviceVulkan13Properties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan13Properties.html + struct PhysicalDeviceVulkan13Properties + { + using NativeType = VkPhysicalDeviceVulkan13Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan13Properties( uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, + uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxInlineUniformTotalSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minSubgroupSize{ minSubgroupSize_ } + , maxSubgroupSize{ maxSubgroupSize_ } + , maxComputeWorkgroupSubgroups{ maxComputeWorkgroupSubgroups_ } + , requiredSubgroupSizeStages{ requiredSubgroupSizeStages_ } + , maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ } + , maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ } + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ } + , maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ } + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ } + , maxInlineUniformTotalSize{ maxInlineUniformTotalSize_ } + , integerDotProduct8BitUnsignedAccelerated{ integerDotProduct8BitUnsignedAccelerated_ } + , integerDotProduct8BitSignedAccelerated{ integerDotProduct8BitSignedAccelerated_ } + , integerDotProduct8BitMixedSignednessAccelerated{ integerDotProduct8BitMixedSignednessAccelerated_ } + , integerDotProduct4x8BitPackedUnsignedAccelerated{ integerDotProduct4x8BitPackedUnsignedAccelerated_ } + , integerDotProduct4x8BitPackedSignedAccelerated{ integerDotProduct4x8BitPackedSignedAccelerated_ } + , integerDotProduct4x8BitPackedMixedSignednessAccelerated{ integerDotProduct4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProduct16BitUnsignedAccelerated{ integerDotProduct16BitUnsignedAccelerated_ } + , integerDotProduct16BitSignedAccelerated{ integerDotProduct16BitSignedAccelerated_ } + , integerDotProduct16BitMixedSignednessAccelerated{ integerDotProduct16BitMixedSignednessAccelerated_ } + , integerDotProduct32BitUnsignedAccelerated{ integerDotProduct32BitUnsignedAccelerated_ } + , integerDotProduct32BitSignedAccelerated{ integerDotProduct32BitSignedAccelerated_ } + , integerDotProduct32BitMixedSignednessAccelerated{ integerDotProduct32BitMixedSignednessAccelerated_ } + , integerDotProduct64BitUnsignedAccelerated{ integerDotProduct64BitUnsignedAccelerated_ } + , integerDotProduct64BitSignedAccelerated{ integerDotProduct64BitSignedAccelerated_ } + , integerDotProduct64BitMixedSignednessAccelerated{ integerDotProduct64BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitSignedAccelerated{ integerDotProductAccumulatingSaturating8BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitSignedAccelerated{ integerDotProductAccumulatingSaturating16BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitSignedAccelerated{ integerDotProductAccumulatingSaturating32BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitSignedAccelerated{ integerDotProductAccumulatingSaturating64BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ } + , storageTexelBufferOffsetAlignmentBytes{ storageTexelBufferOffsetAlignmentBytes_ } + , storageTexelBufferOffsetSingleTexelAlignment{ storageTexelBufferOffsetSingleTexelAlignment_ } + , uniformTexelBufferOffsetAlignmentBytes{ uniformTexelBufferOffsetAlignmentBytes_ } + , uniformTexelBufferOffsetSingleTexelAlignment{ uniformTexelBufferOffsetSingleTexelAlignment_ } + , maxBufferSize{ maxBufferSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan13Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan13Properties & operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan13Properties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan13Properties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minSubgroupSize, + maxSubgroupSize, + maxComputeWorkgroupSubgroups, + requiredSubgroupSizeStages, + maxInlineUniformBlockSize, + maxPerStageDescriptorInlineUniformBlocks, + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, + maxDescriptorSetInlineUniformBlocks, + maxDescriptorSetUpdateAfterBindInlineUniformBlocks, + maxInlineUniformTotalSize, + integerDotProduct8BitUnsignedAccelerated, + integerDotProduct8BitSignedAccelerated, + integerDotProduct8BitMixedSignednessAccelerated, + integerDotProduct4x8BitPackedUnsignedAccelerated, + integerDotProduct4x8BitPackedSignedAccelerated, + integerDotProduct4x8BitPackedMixedSignednessAccelerated, + integerDotProduct16BitUnsignedAccelerated, + integerDotProduct16BitSignedAccelerated, + integerDotProduct16BitMixedSignednessAccelerated, + integerDotProduct32BitUnsignedAccelerated, + integerDotProduct32BitSignedAccelerated, + integerDotProduct32BitMixedSignednessAccelerated, + integerDotProduct64BitUnsignedAccelerated, + integerDotProduct64BitSignedAccelerated, + integerDotProduct64BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating8BitSignedAccelerated, + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating16BitSignedAccelerated, + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating32BitSignedAccelerated, + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating64BitSignedAccelerated, + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated, + storageTexelBufferOffsetAlignmentBytes, + storageTexelBufferOffsetSingleTexelAlignment, + uniformTexelBufferOffsetAlignmentBytes, + uniformTexelBufferOffsetSingleTexelAlignment, + maxBufferSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan13Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ) && + ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ) && + ( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize ) && + ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && + ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) && + ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) && + ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) && + ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) && + ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) && + ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) && + ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) && + ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) && + ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) && + ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) && + ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) && + ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) && + ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ) && + ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ) && ( maxBufferSize == rhs.maxBufferSize ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxInlineUniformTotalSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan13Properties; + }; + + // wrapper struct for struct VkPhysicalDeviceVulkan14Features, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan14Features.html + struct PhysicalDeviceVulkan14Features + { + using NativeType = VkPhysicalDeviceVulkan14Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan14Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan14Features( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , globalPriorityQuery{ globalPriorityQuery_ } + , shaderSubgroupRotate{ shaderSubgroupRotate_ } + , shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ } + , shaderFloatControls2{ shaderFloatControls2_ } + , shaderExpectAssume{ shaderExpectAssume_ } + , rectangularLines{ rectangularLines_ } + , bresenhamLines{ bresenhamLines_ } + , smoothLines{ smoothLines_ } + , stippledRectangularLines{ stippledRectangularLines_ } + , stippledBresenhamLines{ stippledBresenhamLines_ } + , stippledSmoothLines{ stippledSmoothLines_ } + , vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ } + , vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ } + , indexTypeUint8{ indexTypeUint8_ } + , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ } + , maintenance5{ maintenance5_ } + , maintenance6{ maintenance6_ } + , pipelineProtectedAccess{ pipelineProtectedAccess_ } + , pipelineRobustness{ pipelineRobustness_ } + , hostImageCopy{ hostImageCopy_ } + , pushDescriptor{ pushDescriptor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan14Features( PhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan14Features( VkPhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan14Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan14Features & operator=( PhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVulkan14Features & operator=( VkPhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT + { + globalPriorityQuery = globalPriorityQuery_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderSubgroupRotate( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupRotate = shaderSubgroupRotate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setShaderSubgroupRotateClustered( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderFloatControls2( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloatControls2 = shaderFloatControls2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderExpectAssume( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT + { + shaderExpectAssume = shaderExpectAssume_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + rectangularLines = rectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + bresenhamLines = bresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + { + smoothLines = smoothLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledRectangularLines = stippledRectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledBresenhamLines = stippledBresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledSmoothLines = stippledSmoothLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeUint8 = indexTypeUint8_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setDynamicRenderingLocalRead( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRenderingLocalRead = dynamicRenderingLocalRead_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT + { + maintenance5 = maintenance5_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setMaintenance6( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT + { + maintenance6 = maintenance6_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT + { + pipelineProtectedAccess = pipelineProtectedAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT + { + pipelineRobustness = pipelineRobustness_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT + { + hostImageCopy = hostImageCopy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPushDescriptor( VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + pushDescriptor = pushDescriptor_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVulkan14Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan14Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan14Features const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan14Features *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + globalPriorityQuery, + shaderSubgroupRotate, + shaderSubgroupRotateClustered, + shaderFloatControls2, + shaderExpectAssume, + rectangularLines, + bresenhamLines, + smoothLines, + stippledRectangularLines, + stippledBresenhamLines, + stippledSmoothLines, + vertexAttributeInstanceRateDivisor, + vertexAttributeInstanceRateZeroDivisor, + indexTypeUint8, + dynamicRenderingLocalRead, + maintenance5, + maintenance6, + pipelineProtectedAccess, + pipelineRobustness, + hostImageCopy, + pushDescriptor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan14Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ) && + ( shaderSubgroupRotate == rhs.shaderSubgroupRotate ) && ( shaderSubgroupRotateClustered == rhs.shaderSubgroupRotateClustered ) && + ( shaderFloatControls2 == rhs.shaderFloatControls2 ) && ( shaderExpectAssume == rhs.shaderExpectAssume ) && + ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) && + ( stippledRectangularLines == rhs.stippledRectangularLines ) && ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && + ( stippledSmoothLines == rhs.stippledSmoothLines ) && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && + ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ) && ( indexTypeUint8 == rhs.indexTypeUint8 ) && + ( dynamicRenderingLocalRead == rhs.dynamicRenderingLocalRead ) && ( maintenance5 == rhs.maintenance5 ) && ( maintenance6 == rhs.maintenance6 ) && + ( pipelineProtectedAccess == rhs.pipelineProtectedAccess ) && ( pipelineRobustness == rhs.pipelineRobustness ) && + ( hostImageCopy == rhs.hostImageCopy ) && ( pushDescriptor == rhs.pushDescriptor ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan14Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance6 = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {}; + VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan14Features; + }; + + // wrapper struct for struct VkPhysicalDeviceVulkan14Properties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkan14Properties.html + struct PhysicalDeviceVulkan14Properties + { + using NativeType = VkPhysicalDeviceVulkan14Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan14Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Properties( + uint32_t lineSubPixelPrecisionBits_ = {}, + uint32_t maxVertexAttribDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, + uint32_t maxPushDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadDepthStencilAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadMultisampledAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, + uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, + uint32_t copySrcLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, + uint32_t copyDstLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ } + , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } + , supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ } + , maxPushDescriptors{ maxPushDescriptors_ } + , dynamicRenderingLocalReadDepthStencilAttachments{ dynamicRenderingLocalReadDepthStencilAttachments_ } + , dynamicRenderingLocalReadMultisampledAttachments{ dynamicRenderingLocalReadMultisampledAttachments_ } + , earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ } + , earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ } + , depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ } + , polygonModePointSize{ polygonModePointSize_ } + , nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ } + , nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ } + , blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ } + , maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ } + , fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ } + , defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ } + , defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ } + , defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ } + , defaultRobustnessImages{ defaultRobustnessImages_ } + , copySrcLayoutCount{ copySrcLayoutCount_ } + , pCopySrcLayouts{ pCopySrcLayouts_ } + , copyDstLayoutCount{ copyDstLayoutCount_ } + , pCopyDstLayouts{ pCopyDstLayouts_ } + , optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ } + , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Properties( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan14Properties( VkPhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan14Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan14Properties & operator=( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVulkan14Properties & operator=( VkPhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVulkan14Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan14Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan14Properties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan14Properties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + lineSubPixelPrecisionBits, + maxVertexAttribDivisor, + supportsNonZeroFirstInstance, + maxPushDescriptors, + dynamicRenderingLocalReadDepthStencilAttachments, + dynamicRenderingLocalReadMultisampledAttachments, + earlyFragmentMultisampleCoverageAfterSampleCounting, + earlyFragmentSampleMaskTestBeforeSampleCounting, + depthStencilSwizzleOneSupport, + polygonModePointSize, + nonStrictSinglePixelWideLinesUseParallelogram, + nonStrictWideLinesUseParallelogram, + blockTexelViewCompatibleMultipleLayers, + maxCombinedImageSamplerDescriptorCount, + fragmentShadingRateClampCombinerInputs, + defaultRobustnessStorageBuffers, + defaultRobustnessUniformBuffers, + defaultRobustnessVertexInputs, + defaultRobustnessImages, + copySrcLayoutCount, + pCopySrcLayouts, + copyDstLayoutCount, + pCopyDstLayouts, + optimalTilingLayoutUUID, + identicalMemoryTypeRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan14Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ) && + ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ) && ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance ) && + ( maxPushDescriptors == rhs.maxPushDescriptors ) && + ( dynamicRenderingLocalReadDepthStencilAttachments == rhs.dynamicRenderingLocalReadDepthStencilAttachments ) && + ( dynamicRenderingLocalReadMultisampledAttachments == rhs.dynamicRenderingLocalReadMultisampledAttachments ) && + ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) && + ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) && + ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) && + ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) && + ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram ) && + ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers ) && + ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount ) && + ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs ) && + ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) && + ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) && + ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages ) && + ( copySrcLayoutCount == rhs.copySrcLayoutCount ) && ( pCopySrcLayouts == rhs.pCopySrcLayouts ) && + ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) && + ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan14Properties; + void * pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; + uint32_t maxVertexAttribDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance = {}; + uint32_t maxPushDescriptors = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadDepthStencilAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadMultisampledAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport = {}; + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram = {}; + VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers = {}; + uint32_t maxCombinedImageSamplerDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault; + uint32_t copySrcLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {}; + uint32_t copyDstLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D optimalTilingLayoutUUID = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan14Properties; + }; + + // wrapper struct for struct VkPhysicalDeviceVulkanMemoryModelFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceVulkanMemoryModelFeatures.html + struct PhysicalDeviceVulkanMemoryModelFeatures + { + using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vulkanMemoryModel{ vulkanMemoryModel_ } + , vulkanMemoryModelDeviceScope{ vulkanMemoryModelDeviceScope_ } + , vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModel = vulkanMemoryModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkanMemoryModelFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkanMemoryModelFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); +# endif + } + + bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkanMemoryModelFeatures; + }; + + using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; + + // wrapper struct for struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.html + struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR + { + using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , workgroupMemoryExplicitLayout{ workgroupMemoryExplicitLayout_ } + , workgroupMemoryExplicitLayoutScalarBlockLayout{ workgroupMemoryExplicitLayoutScalarBlockLayout_ } + , workgroupMemoryExplicitLayout8BitAccess{ workgroupMemoryExplicitLayout8BitAccess_ } + , workgroupMemoryExplicitLayout16BitAccess{ workgroupMemoryExplicitLayout16BitAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayoutScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + workgroupMemoryExplicitLayout, + workgroupMemoryExplicitLayoutScalarBlockLayout, + workgroupMemoryExplicitLayout8BitAccess, + workgroupMemoryExplicitLayout16BitAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) && + ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout ) && + ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess ) && + ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess ); +# endif + } + + bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + }; + + // wrapper struct for struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT.html + struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT + { + using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , ycbcr2plane444Formats{ ycbcr2plane444Formats_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT + { + ycbcr2plane444Formats = ycbcr2plane444Formats_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ycbcr2plane444Formats ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats ); +# endif + } + + bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceYcbcrDegammaFeaturesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceYcbcrDegammaFeaturesQCOM.html + struct PhysicalDeviceYcbcrDegammaFeaturesQCOM + { + using NativeType = VkPhysicalDeviceYcbcrDegammaFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , ycbcrDegamma{ ycbcrDegamma_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcrDegammaFeaturesQCOM( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcrDegammaFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceYcbcrDegammaFeaturesQCOM & operator=( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceYcbcrDegammaFeaturesQCOM & operator=( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrDegammaFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrDegammaFeaturesQCOM & setYcbcrDegamma( VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrDegamma = ycbcrDegamma_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ycbcrDegamma ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrDegamma == rhs.ycbcrDegamma ); +# endif + } + + bool operator!=( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcrDegammaFeaturesQCOM; + }; + + // wrapper struct for struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceYcbcrImageArraysFeaturesEXT.html + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT + { + using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , ycbcrImageArrays{ ycbcrImageArrays_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & + setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrImageArrays = ycbcrImageArrays_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ycbcrImageArrays ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); +# endif + } + + bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT; + }; + + // wrapper struct for struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures.html + struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures + { + using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderZeroInitializeWorkgroupMemory{ shaderZeroInitializeWorkgroupMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & + operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & + setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT + { + shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderZeroInitializeWorkgroupMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ); +# endif + } + + bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + }; + + using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + + // wrapper struct for struct VkPipelineBinaryKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryKeyKHR.html + struct PipelineBinaryKeyKHR + { + using NativeType = VkPipelineBinaryKeyKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryKeyKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR( uint32_t keySize_ = {}, + std::array const & key_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , keySize{ keySize_ } + , key{ key_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR( PipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryKeyKHR( VkPipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryKeyKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineBinaryKeyKHR & operator=( PipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryKeyKHR & operator=( VkPipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setKeySize( uint32_t keySize_ ) VULKAN_HPP_NOEXCEPT + { + keySize = keySize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setKey( std::array key_ ) VULKAN_HPP_NOEXCEPT + { + key = key_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineBinaryKeyKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryKeyKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryKeyKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineBinaryKeyKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, keySize, key ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryKeyKHR const & ) const = default; +#else + bool operator==( PipelineBinaryKeyKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( keySize == rhs.keySize ) && ( key == rhs.key ); +# endif + } + + bool operator!=( PipelineBinaryKeyKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryKeyKHR; + void * pNext = {}; + uint32_t keySize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D key = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryKeyKHR; + }; + + // wrapper struct for struct VkPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryDataKHR.html + struct PipelineBinaryDataKHR + { + using NativeType = VkPipelineBinaryDataKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryDataKHR( size_t dataSize_ = {}, void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : dataSize{ dataSize_ } + , pData{ pData_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineBinaryDataKHR( PipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryDataKHR( VkPipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryDataKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineBinaryDataKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) : dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineBinaryDataKHR & operator=( PipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryDataKHR & operator=( VkPipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataKHR & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataKHR & setPData( void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineBinaryDataKHR & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineBinaryDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryDataKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineBinaryDataKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryDataKHR const & ) const = default; +#else + bool operator==( PipelineBinaryDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( PipelineBinaryDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + size_t dataSize = {}; + void * pData = {}; + }; + + // wrapper struct for struct VkPipelineBinaryKeysAndDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryKeysAndDataKHR.html + struct PipelineBinaryKeysAndDataKHR + { + using NativeType = VkPipelineBinaryKeysAndDataKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR( uint32_t binaryCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData_ = {} ) VULKAN_HPP_NOEXCEPT + : binaryCount{ binaryCount_ } + , pPipelineBinaryKeys{ pPipelineBinaryKeys_ } + , pPipelineBinaryData{ pPipelineBinaryData_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR( PipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryKeysAndDataKHR( VkPipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryKeysAndDataKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryKeysAndDataKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryKeys_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryData_ = {} ) + : binaryCount( static_cast( pipelineBinaryKeys_.size() ) ) + , pPipelineBinaryKeys( pipelineBinaryKeys_.data() ) + , pPipelineBinaryData( pipelineBinaryData_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( pipelineBinaryKeys_.size() == pipelineBinaryData_.size() ); +# else + if ( pipelineBinaryKeys_.size() != pipelineBinaryData_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::PipelineBinaryKeysAndDataKHR::PipelineBinaryKeysAndDataKHR: pipelineBinaryKeys_.size() != pipelineBinaryData_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineBinaryKeysAndDataKHR & operator=( PipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryKeysAndDataKHR & operator=( VkPipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & setBinaryCount( uint32_t binaryCount_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = binaryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & + setPPipelineBinaryKeys( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineBinaryKeys = pPipelineBinaryKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryKeysAndDataKHR & setPipelineBinaryKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryKeys_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = static_cast( pipelineBinaryKeys_.size() ); + pPipelineBinaryKeys = pipelineBinaryKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & + setPPipelineBinaryData( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineBinaryData = pPipelineBinaryData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryKeysAndDataKHR & setPipelineBinaryData( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryData_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = static_cast( pipelineBinaryData_.size() ); + pPipelineBinaryData = pipelineBinaryData_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineBinaryKeysAndDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryKeysAndDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryKeysAndDataKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineBinaryKeysAndDataKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binaryCount, pPipelineBinaryKeys, pPipelineBinaryData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryKeysAndDataKHR const & ) const = default; +#else + bool operator==( PipelineBinaryKeysAndDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( binaryCount == rhs.binaryCount ) && ( pPipelineBinaryKeys == rhs.pPipelineBinaryKeys ) && ( pPipelineBinaryData == rhs.pPipelineBinaryData ); +# endif + } + + bool operator!=( PipelineBinaryKeysAndDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binaryCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData = {}; + }; + + // wrapper struct for struct VkPipelineCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreateInfoKHR.html + struct PipelineCreateInfoKHR + { + using NativeType = VkPipelineCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreateInfoKHR( void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} + + VULKAN_HPP_CONSTEXPR PipelineCreateInfoKHR( PipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreateInfoKHR( VkPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCreateInfoKHR & operator=( PipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCreateInfoKHR & operator=( VkPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreateInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( PipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateInfoKHR; + void * pNext = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCreateInfoKHR; + }; + + // wrapper struct for struct VkPipelineBinaryCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryCreateInfoKHR.html + struct PipelineBinaryCreateInfoKHR + { + using NativeType = VkPipelineBinaryCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pKeysAndDataInfo{ pKeysAndDataInfo_ } + , pipeline{ pipeline_ } + , pPipelineCreateInfo{ pPipelineCreateInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR( PipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryCreateInfoKHR( VkPipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineBinaryCreateInfoKHR & operator=( PipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryCreateInfoKHR & operator=( VkPipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & + setPKeysAndDataInfo( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo_ ) VULKAN_HPP_NOEXCEPT + { + pKeysAndDataInfo = pKeysAndDataInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & + setPPipelineCreateInfo( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineCreateInfo = pPipelineCreateInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineBinaryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineBinaryCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pKeysAndDataInfo, pipeline, pPipelineCreateInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineBinaryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pKeysAndDataInfo == rhs.pKeysAndDataInfo ) && ( pipeline == rhs.pipeline ) && + ( pPipelineCreateInfo == rhs.pPipelineCreateInfo ); +# endif + } + + bool operator!=( PipelineBinaryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryCreateInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryCreateInfoKHR; + }; + + // wrapper struct for struct VkPipelineBinaryDataInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryDataInfoKHR.html + struct PipelineBinaryDataInfoKHR + { + using NativeType = VkPipelineBinaryDataInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryDataInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryDataInfoKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinary{ pipelineBinary_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineBinaryDataInfoKHR( PipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryDataInfoKHR( VkPipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryDataInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineBinaryDataInfoKHR & operator=( PipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryDataInfoKHR & operator=( VkPipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataInfoKHR & setPipelineBinary( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinary = pipelineBinary_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineBinaryDataInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryDataInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryDataInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineBinaryDataInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBinary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryDataInfoKHR const & ) const = default; +#else + bool operator==( PipelineBinaryDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinary == rhs.pipelineBinary ); +# endif + } + + bool operator!=( PipelineBinaryDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryDataInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryDataInfoKHR; + }; + + // wrapper struct for struct VkPipelineBinaryHandlesInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryHandlesInfoKHR.html + struct PipelineBinaryHandlesInfoKHR + { + using NativeType = VkPipelineBinaryHandlesInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryHandlesInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryHandlesInfoKHR( uint32_t pipelineBinaryCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinaryCount{ pipelineBinaryCount_ } + , pPipelineBinaries{ pPipelineBinaries_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineBinaryHandlesInfoKHR( PipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryHandlesInfoKHR( VkPipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryHandlesInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryHandlesInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), pipelineBinaryCount( static_cast( pipelineBinaries_.size() ) ), pPipelineBinaries( pipelineBinaries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineBinaryHandlesInfoKHR & operator=( PipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryHandlesInfoKHR & operator=( VkPipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPipelineBinaryCount( uint32_t pipelineBinaryCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryCount = pipelineBinaryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & + setPPipelineBinaries( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineBinaries = pPipelineBinaries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryHandlesInfoKHR & setPipelineBinaries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryCount = static_cast( pipelineBinaries_.size() ); + pPipelineBinaries = pipelineBinaries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineBinaryHandlesInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryHandlesInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryHandlesInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineBinaryHandlesInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBinaryCount, pPipelineBinaries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryHandlesInfoKHR const & ) const = default; +#else + bool operator==( PipelineBinaryHandlesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaryCount == rhs.pipelineBinaryCount ) && + ( pPipelineBinaries == rhs.pPipelineBinaries ); +# endif + } + + bool operator!=( PipelineBinaryHandlesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryHandlesInfoKHR; + const void * pNext = {}; + uint32_t pipelineBinaryCount = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryHandlesInfoKHR; + }; + + // wrapper struct for struct VkPipelineBinaryInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineBinaryInfoKHR.html + struct PipelineBinaryInfoKHR + { + using NativeType = VkPipelineBinaryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryInfoKHR( uint32_t binaryCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , binaryCount{ binaryCount_ } + , pPipelineBinaries{ pPipelineBinaries_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineBinaryInfoKHR( PipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryInfoKHR( VkPipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), binaryCount( static_cast( pipelineBinaries_.size() ) ), pPipelineBinaries( pipelineBinaries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineBinaryInfoKHR & operator=( PipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryInfoKHR & operator=( VkPipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setBinaryCount( uint32_t binaryCount_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = binaryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & + setPPipelineBinaries( const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineBinaries = pPipelineBinaries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryInfoKHR & setPipelineBinaries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = static_cast( pipelineBinaries_.size() ); + pPipelineBinaries = pipelineBinaries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineBinaryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineBinaryInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, binaryCount, pPipelineBinaries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryInfoKHR const & ) const = default; +#else + bool operator==( PipelineBinaryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binaryCount == rhs.binaryCount ) && ( pPipelineBinaries == rhs.pPipelineBinaries ); +# endif + } + + bool operator!=( PipelineBinaryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryInfoKHR; + const void * pNext = {}; + uint32_t binaryCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryInfoKHR; + }; + + // wrapper struct for struct VkPipelineCacheCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCacheCreateInfo.html + struct PipelineCacheCreateInfo + { + using NativeType = VkPipelineCacheCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , initialDataSize{ initialDataSize_ } + , pInitialData{ pInitialData_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCacheCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCacheCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineCacheCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, initialDataSize, pInitialData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCacheCreateInfo const & ) const = default; +#else + bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) && + ( pInitialData == rhs.pInitialData ); +# endif + } + + bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCacheCreateInfo; + }; + + // wrapper struct for struct VkPipelineCacheHeaderVersionOne, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCacheHeaderVersionOne.html + struct PipelineCacheHeaderVersionOne + { + using NativeType = VkPipelineCacheHeaderVersionOne; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PipelineCacheHeaderVersionOne( uint32_t headerSize_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + std::array const & pipelineCacheUUID_ = {} ) VULKAN_HPP_NOEXCEPT + : headerSize{ headerSize_ } + , headerVersion{ headerVersion_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , pipelineCacheUUID{ pipelineCacheUUID_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCacheHeaderVersionOne( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCacheHeaderVersionOne & operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT + { + headerSize = headerSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & + setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT + { + headerVersion = headerVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT + { + vendorID = vendorID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT + { + deviceID = deviceID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setPipelineCacheUUID( std::array pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCacheUUID = pipelineCacheUUID_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCacheHeaderVersionOne const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineCacheHeaderVersionOne *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCacheHeaderVersionOne const & ) const = default; +#else + bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( pipelineCacheUUID == rhs.pipelineCacheUUID ); +# endif + } + + bool operator!=( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t headerSize = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + }; + + // wrapper struct for struct VkPipelineColorBlendAdvancedStateCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendAdvancedStateCreateInfoEXT.html + struct PipelineColorBlendAdvancedStateCreateInfoEXT + { + using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcPremultiplied{ srcPremultiplied_ } + , dstPremultiplied{ dstPremultiplied_ } + , blendOverlap{ blendOverlap_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + srcPremultiplied = srcPremultiplied_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + dstPremultiplied = dstPremultiplied_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT + { + blendOverlap = blendOverlap_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) && + ( blendOverlap == rhs.blendOverlap ); +# endif + } + + bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + }; + + template <> + struct CppType + { + using Type = PipelineColorBlendAdvancedStateCreateInfoEXT; + }; + + // wrapper struct for struct VkPipelineColorWriteCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorWriteCreateInfoEXT.html + struct PipelineColorWriteCreateInfoEXT + { + using NativeType = VkPipelineColorWriteCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentCount{ attachmentCount_ } + , pColorWriteEnables{ pColorWriteEnables_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorWriteCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), attachmentCount( static_cast( colorWriteEnables_.size() ) ), pColorWriteEnables( colorWriteEnables_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineColorWriteCreateInfoEXT & operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & + setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT + { + pColorWriteEnables = pColorWriteEnables_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT & + setColorWriteEnables( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( colorWriteEnables_.size() ); + pColorWriteEnables = colorWriteEnables_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorWriteCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineColorWriteCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentCount, pColorWriteEnables ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pColorWriteEnables == rhs.pColorWriteEnables ); +# endif + } + + bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {}; + }; + + template <> + struct CppType + { + using Type = PipelineColorWriteCreateInfoEXT; + }; + + // wrapper struct for struct VkPipelineCompilerControlCreateInfoAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCompilerControlCreateInfoAMD.html + struct PipelineCompilerControlCreateInfoAMD + { + using NativeType = VkPipelineCompilerControlCreateInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , compilerControlFlags{ compilerControlFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & + setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT + { + compilerControlFlags = compilerControlFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCompilerControlCreateInfoAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineCompilerControlCreateInfoAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compilerControlFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default; +#else + bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags ); +# endif + } + + bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCompilerControlCreateInfoAMD; + }; + + // wrapper struct for struct VkPipelineCoverageModulationStateCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageModulationStateCreateInfoNV.html + struct PipelineCoverageModulationStateCreateInfoNV + { + using NativeType = VkPipelineCoverageModulationStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, + uint32_t coverageModulationTableCount_ = {}, + const float * pCoverageModulationTable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , coverageModulationMode{ coverageModulationMode_ } + , coverageModulationTableEnable{ coverageModulationTableEnable_ } + , coverageModulationTableCount{ coverageModulationTableCount_ } + , pCoverageModulationTable{ pCoverageModulationTable_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCoverageModulationStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( static_cast( coverageModulationTable_.size() ) ) + , pCoverageModulationTable( coverageModulationTable_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationMode = coverageModulationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableEnable = coverageModulationTableEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = coverageModulationTableCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + pCoverageModulationTable = pCoverageModulationTable_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = static_cast( coverageModulationTable_.size() ); + pCoverageModulationTable = coverageModulationTable_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageModulationStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineCoverageModulationStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, coverageModulationMode, coverageModulationTableEnable, coverageModulationTableCount, pCoverageModulationTable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageModulationMode == rhs.coverageModulationMode ) && + ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) && + ( pCoverageModulationTable == rhs.pCoverageModulationTable ); +# endif + } + + bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; + uint32_t coverageModulationTableCount = {}; + const float * pCoverageModulationTable = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCoverageModulationStateCreateInfoNV; + }; + + // wrapper struct for struct VkPipelineCoverageReductionStateCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageReductionStateCreateInfoNV.html + struct PipelineCoverageReductionStateCreateInfoNV + { + using NativeType = VkPipelineCoverageReductionStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , coverageReductionMode{ coverageReductionMode_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCoverageReductionStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & + setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageReductionStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineCoverageReductionStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, coverageReductionMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageReductionMode == rhs.coverageReductionMode ); +# endif + } + + bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + }; + + template <> + struct CppType + { + using Type = PipelineCoverageReductionStateCreateInfoNV; + }; + + // wrapper struct for struct VkPipelineCoverageToColorStateCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCoverageToColorStateCreateInfoNV.html + struct PipelineCoverageToColorStateCreateInfoNV + { + using NativeType = VkPipelineCoverageToColorStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, + uint32_t coverageToColorLocation_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , coverageToColorEnable{ coverageToColorEnable_ } + , coverageToColorLocation{ coverageToColorLocation_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCoverageToColorStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT + { + coverageToColorEnable = coverageToColorEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + { + coverageToColorLocation = coverageToColorLocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageToColorStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineCoverageToColorStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, coverageToColorEnable, coverageToColorLocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageToColorEnable == rhs.coverageToColorEnable ) && + ( coverageToColorLocation == rhs.coverageToColorLocation ); +# endif + } + + bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; + uint32_t coverageToColorLocation = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCoverageToColorStateCreateInfoNV; + }; + + // wrapper struct for struct VkPipelineCreateFlags2CreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreateFlags2CreateInfo.html + struct PipelineCreateFlags2CreateInfo + { + using NativeType = VkPipelineCreateFlags2CreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateFlags2CreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfo( PipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreateFlags2CreateInfo( VkPipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreateFlags2CreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCreateFlags2CreateInfo & operator=( PipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCreateFlags2CreateInfo & operator=( VkPipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCreateFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreateFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreateFlags2CreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineCreateFlags2CreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreateFlags2CreateInfo const & ) const = default; +#else + bool operator==( PipelineCreateFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( PipelineCreateFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateFlags2CreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCreateFlags2CreateInfo; + }; + + using PipelineCreateFlags2CreateInfoKHR = PipelineCreateFlags2CreateInfo; + + // wrapper struct for struct VkPipelineCreationFeedback, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreationFeedback.html + struct PipelineCreationFeedback + { + using NativeType = VkPipelineCreationFeedback; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {}, + uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT + : flags{ flags_ } + , duration{ duration_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedback( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCreationFeedback & operator=( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedback const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedback *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( flags, duration ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedback const & ) const = default; +#else + bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( flags == rhs.flags ) && ( duration == rhs.duration ); +# endif + } + + bool operator!=( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {}; + uint64_t duration = {}; + }; + + using PipelineCreationFeedbackEXT = PipelineCreationFeedback; + + // wrapper struct for struct VkPipelineCreationFeedbackCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineCreationFeedbackCreateInfo.html + struct PipelineCreationFeedbackCreateInfo + { + using NativeType = VkPipelineCreationFeedbackCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {}, + uint32_t pipelineStageCreationFeedbackCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pPipelineCreationFeedback{ pPipelineCreationFeedback_ } + , pipelineStageCreationFeedbackCount{ pipelineStageCreationFeedbackCount_ } + , pPipelineStageCreationFeedbacks{ pPipelineStageCreationFeedbacks_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedbackCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineStageCreationFeedbacks_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pPipelineCreationFeedback( pPipelineCreationFeedback_ ) + , pipelineStageCreationFeedbackCount( static_cast( pipelineStageCreationFeedbacks_.size() ) ) + , pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineCreationFeedbackCreateInfo & operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCreationFeedbackCreateInfo & operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & + setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineCreationFeedback = pPipelineCreationFeedback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & + setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & + setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbacks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineStageCreationFeedbacks_ ) + VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = static_cast( pipelineStageCreationFeedbacks_.size() ); + pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedbackCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedbackCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedbackCreateInfo const & ) const = default; +#else + bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) && + ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) && + ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); +# endif + } + + bool operator!=( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback = {}; + uint32_t pipelineStageCreationFeedbackCount = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCreationFeedbackCreateInfo; + }; + + using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo; + + // wrapper struct for struct VkPipelineDiscardRectangleStateCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDiscardRectangleStateCreateInfoEXT.html + struct PipelineDiscardRectangleStateCreateInfoEXT + { + using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, + uint32_t discardRectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , discardRectangleMode{ discardRectangleMode_ } + , discardRectangleCount{ discardRectangleCount_ } + , pDiscardRectangles{ pDiscardRectangles_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( static_cast( discardRectangles_.size() ) ) + , pDiscardRectangles( discardRectangles_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleMode = discardRectangleMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = discardRectangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pDiscardRectangles = pDiscardRectangles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = static_cast( discardRectangles_.size() ); + pDiscardRectangles = discardRectangles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDiscardRectangleStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineDiscardRectangleStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( discardRectangleMode == rhs.discardRectangleMode ) && + ( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles ); +# endif + } + + bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; + uint32_t discardRectangleCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {}; + }; + + template <> + struct CppType + { + using Type = PipelineDiscardRectangleStateCreateInfoEXT; + }; + + // wrapper struct for struct VkPipelineExecutableInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutableInfoKHR.html + struct PipelineExecutableInfoKHR + { + using NativeType = VkPipelineExecutableInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + uint32_t executableIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipeline{ pipeline_ } + , executableIndex{ executableIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT + { + executableIndex = executableIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineExecutableInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipeline, executableIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInfoKHR const & ) const = default; +#else + bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) && ( executableIndex == rhs.executableIndex ); +# endif + } + + bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + uint32_t executableIndex = {}; + }; + + template <> + struct CppType + { + using Type = PipelineExecutableInfoKHR; + }; + + // wrapper struct for struct VkPipelineExecutableInternalRepresentationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutableInternalRepresentationKHR.html + struct PipelineExecutableInternalRepresentationKHR + { + using NativeType = VkPipelineExecutableInternalRepresentationKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, + size_t dataSize_ = {}, + void * pData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , name{ name_ } + , description{ description_ } + , isText{ isText_ } + , dataSize{ dataSize_ } + , pData{ pData_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableInternalRepresentationKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInternalRepresentationKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineExecutableInternalRepresentationKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + size_t const &, + void * const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, name, description, isText, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = isText <=> rhs.isText; cmp != 0 ) + return cmp; + if ( auto cmp = dataSize <=> rhs.dataSize; cmp != 0 ) + return cmp; + if ( auto cmp = pData <=> rhs.pData; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ) && + ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); + } + + bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::Bool32 isText = {}; + size_t dataSize = {}; + void * pData = {}; + }; + + template <> + struct CppType + { + using Type = PipelineExecutableInternalRepresentationKHR; + }; + + // wrapper struct for struct VkPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutablePropertiesKHR.html + struct PipelineExecutablePropertiesKHR + { + using NativeType = VkPipelineExecutablePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, + std::array const & name_ = {}, + std::array const & description_ = {}, + uint32_t subgroupSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stages{ stages_ } + , name{ name_ } + , description{ description_ } + , subgroupSize{ subgroupSize_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutablePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutablePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineExecutablePropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stages, name, description, subgroupSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = stages <=> rhs.stages; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = subgroupSize <=> rhs.subgroupSize; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( strcmp( name, rhs.name ) == 0 ) && + ( strcmp( description, rhs.description ) == 0 ) && ( subgroupSize == rhs.subgroupSize ); + } + + bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t subgroupSize = {}; + }; + + template <> + struct CppType + { + using Type = PipelineExecutablePropertiesKHR; + }; + + union PipelineExecutableStatisticValueKHR + { + using NativeType = VkPipelineExecutableStatisticValueKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( uint64_t u64_ ) : u64( u64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( double f64_ ) : f64( f64_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT + { + b32 = b32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT + { + i64 = i64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT + { + u64 = u64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT + { + f64 = f64_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineExecutableStatisticValueKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticValueKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::Bool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#else + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + // wrapper struct for struct VkPipelineExecutableStatisticKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineExecutableStatisticKHR.html + struct PipelineExecutableStatisticKHR + { + using NativeType = VkPipelineExecutableStatisticKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( + std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , name{ name_ } + , description{ description_ } + , format{ format_ } + , value{ value_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableStatisticKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR const &, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, name, description, format, value ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; + }; + + template <> + struct CppType + { + using Type = PipelineExecutableStatisticKHR; + }; + + // wrapper struct for struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineFragmentShadingRateEnumStateCreateInfoNV.html + struct PipelineFragmentShadingRateEnumStateCreateInfoNV + { + using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shadingRateType{ shadingRateType_ } + , shadingRate{ shadingRate_ } + , combinerOps{ combinerOps_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PipelineFragmentShadingRateEnumStateCreateInfoNV( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateEnumStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateType = shadingRateType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + { + shadingRate = shadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setCombinerOps( std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + { + combinerOps = combinerOps_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateType, shadingRate, combinerOps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) && ( shadingRate == rhs.shadingRate ) && + ( combinerOps == rhs.combinerOps ); +# endif + } + + bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize; + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; + }; + + template <> + struct CppType + { + using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV; + }; + + // wrapper struct for struct VkPipelineFragmentShadingRateStateCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineFragmentShadingRateStateCreateInfoKHR.html + struct PipelineFragmentShadingRateStateCreateInfoKHR + { + using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentSize{ fragmentSize_ } + , combinerOps{ combinerOps_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & + setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT + { + fragmentSize = fragmentSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & + setCombinerOps( std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + { + combinerOps = combinerOps_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateStateCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateStateCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentSize, combinerOps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) && ( combinerOps == rhs.combinerOps ); +# endif + } + + bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; + }; + + template <> + struct CppType + { + using Type = PipelineFragmentShadingRateStateCreateInfoKHR; + }; + + // wrapper struct for struct VkPipelineIndirectDeviceAddressInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineIndirectDeviceAddressInfoNV.html + struct PipelineIndirectDeviceAddressInfoNV + { + using NativeType = VkPipelineIndirectDeviceAddressInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineIndirectDeviceAddressInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineIndirectDeviceAddressInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipeline{ pipeline_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineIndirectDeviceAddressInfoNV( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineIndirectDeviceAddressInfoNV( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineIndirectDeviceAddressInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineIndirectDeviceAddressInfoNV & operator=( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineIndirectDeviceAddressInfoNV & operator=( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineIndirectDeviceAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineIndirectDeviceAddressInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineIndirectDeviceAddressInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineIndirectDeviceAddressInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBindPoint, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineIndirectDeviceAddressInfoNV const & ) const = default; +#else + bool operator==( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineIndirectDeviceAddressInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = PipelineIndirectDeviceAddressInfoNV; + }; + + // wrapper struct for struct VkPipelineInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineInfoKHR.html + struct PipelineInfoKHR + { + using NativeType = VkPipelineInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipeline{ pipeline_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineInfoKHR( *reinterpret_cast( &rhs ) ) {} + + PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInfoKHR const & ) const = default; +#else + bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = PipelineInfoKHR; + }; + + using PipelineInfoEXT = PipelineInfoKHR; + + // wrapper struct for struct VkPipelineLayoutCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLayoutCreateInfo.html + struct PipelineLayoutCreateInfo + { + using NativeType = VkPipelineLayoutCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , setLayoutCount{ setLayoutCount_ } + , pSetLayouts{ pSetLayouts_ } + , pushConstantRangeCount{ pushConstantRangeCount_ } + , pPushConstantRanges{ pPushConstantRanges_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , setLayoutCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLayoutCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineLayoutCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayoutCreateInfo const & ) const = default; +#else + bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( setLayoutCount == rhs.setLayoutCount ) && + ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ); +# endif + } + + bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + }; + + template <> + struct CppType + { + using Type = PipelineLayoutCreateInfo; + }; + + // wrapper struct for struct VkPipelinePropertiesIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelinePropertiesIdentifierEXT.html + struct PipelinePropertiesIdentifierEXT + { + using NativeType = VkPipelinePropertiesIdentifierEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelinePropertiesIdentifierEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( std::array const & pipelineIdentifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineIdentifier{ pipelineIdentifier_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelinePropertiesIdentifierEXT( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelinePropertiesIdentifierEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelinePropertiesIdentifierEXT & operator=( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelinePropertiesIdentifierEXT & operator=( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelinePropertiesIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelinePropertiesIdentifierEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelinePropertiesIdentifierEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelinePropertiesIdentifierEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineIdentifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelinePropertiesIdentifierEXT const & ) const = default; +#else + bool operator==( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineIdentifier == rhs.pipelineIdentifier ); +# endif + } + + bool operator!=( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelinePropertiesIdentifierEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineIdentifier = {}; + }; + + template <> + struct CppType + { + using Type = PipelinePropertiesIdentifierEXT; + }; + + // wrapper struct for struct VkPipelineRasterizationConservativeStateCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationConservativeStateCreateInfoEXT.html + struct PipelineRasterizationConservativeStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, + float extraPrimitiveOverestimationSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , conservativeRasterizationMode{ conservativeRasterizationMode_ } + , extraPrimitiveOverestimationSize{ extraPrimitiveOverestimationSize_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + conservativeRasterizationMode = conservativeRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + { + extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) && + ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); +# endif + } + + bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; + float extraPrimitiveOverestimationSize = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationConservativeStateCreateInfoEXT; + }; + + // wrapper struct for struct VkPipelineRasterizationDepthClipStateCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationDepthClipStateCreateInfoEXT.html + struct PipelineRasterizationDepthClipStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , depthClipEnable{ depthClipEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, depthClipEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClipEnable == rhs.depthClipEnable ); +# endif + } + + bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationDepthClipStateCreateInfoEXT; + }; + + // wrapper struct for struct VkPipelineRasterizationLineStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationLineStateCreateInfo.html + struct PipelineRasterizationLineStateCreateInfo + { + using NativeType = VkPipelineRasterizationLineStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfo( + VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationMode::eDefault, + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, + uint32_t lineStippleFactor_ = {}, + uint16_t lineStipplePattern_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , lineRasterizationMode{ lineRasterizationMode_ } + , stippledLineEnable{ stippledLineEnable_ } + , lineStippleFactor{ lineStippleFactor_ } + , lineStipplePattern{ lineStipplePattern_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfo( PipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationLineStateCreateInfo( VkPipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationLineStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationLineStateCreateInfo & operator=( PipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRasterizationLineStateCreateInfo & operator=( VkPipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & + setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + lineRasterizationMode = lineRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & + setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT + { + stippledLineEnable = stippledLineEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT + { + lineStippleFactor = lineStippleFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT + { + lineStipplePattern = lineStipplePattern_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRasterizationLineStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationLineStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationLineStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineRasterizationLineStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, lineRasterizationMode, stippledLineEnable, lineStippleFactor, lineStipplePattern ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationLineStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineRasterizationLineStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineRasterizationMode == rhs.lineRasterizationMode ) && + ( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) && + ( lineStipplePattern == rhs.lineStipplePattern ); +# endif + } + + bool operator!=( PipelineRasterizationLineStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationMode::eDefault; + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; + uint32_t lineStippleFactor = {}; + uint16_t lineStipplePattern = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationLineStateCreateInfo; + }; + + using PipelineRasterizationLineStateCreateInfoEXT = PipelineRasterizationLineStateCreateInfo; + using PipelineRasterizationLineStateCreateInfoKHR = PipelineRasterizationLineStateCreateInfo; + + // wrapper struct for struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationProvokingVertexStateCreateInfoEXT.html + struct PipelineRasterizationProvokingVertexStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , provokingVertexMode{ provokingVertexMode_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationProvokingVertexStateCreateInfoEXT( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationProvokingVertexStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & + setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT + { + provokingVertexMode = provokingVertexMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, provokingVertexMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexMode == rhs.provokingVertexMode ); +# endif + } + + bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT; + }; + + // wrapper struct for struct VkPipelineRasterizationStateRasterizationOrderAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateRasterizationOrderAMD.html + struct PipelineRasterizationStateRasterizationOrderAMD + { + using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rasterizationOrder{ rasterizationOrder_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateRasterizationOrderAMD( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & + setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrder = rasterizationOrder_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rasterizationOrder ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default; +#else + bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder ); +# endif + } + + bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationStateRasterizationOrderAMD; + }; + + // wrapper struct for struct VkPipelineRasterizationStateStreamCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateStreamCreateInfoEXT.html + struct PipelineRasterizationStateStreamCreateInfoEXT + { + using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, + uint32_t rasterizationStream_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , rasterizationStream{ rasterizationStream_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateStreamCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationStream = rasterizationStream_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, rasterizationStream ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationStream == rhs.rasterizationStream ); +# endif + } + + bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; + uint32_t rasterizationStream = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationStateStreamCreateInfoEXT; + }; + + // wrapper struct for struct VkPipelineRenderingCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRenderingCreateInfo.html + struct PipelineRenderingCreateInfo + { + using NativeType = VkPipelineRenderingCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentFormats{ pColorAttachmentFormats_ } + , depthAttachmentFormat{ depthAttachmentFormat_ } + , stencilAttachmentFormat{ stencilAttachmentFormat_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRenderingCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineRenderingCreateInfo( uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineRenderingCreateInfo & operator=( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRenderingCreateInfo & operator=( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & + setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentFormats = pColorAttachmentFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineRenderingCreateInfo & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & + setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRenderingCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineRenderingCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRenderingCreateInfo const & ) const = default; +#else + bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && + ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ); +# endif + } + + bool operator!=( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRenderingCreateInfo; + const void * pNext = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = PipelineRenderingCreateInfo; + }; + + using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo; + + // wrapper struct for struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRepresentativeFragmentTestStateCreateInfoNV.html + struct PipelineRepresentativeFragmentTestStateCreateInfoNV + { + using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , representativeFragmentTestEnable{ representativeFragmentTestEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRepresentativeFragmentTestStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & + setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTestEnable = representativeFragmentTestEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, representativeFragmentTestEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); +# endif + } + + bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV; + }; + + // wrapper struct for struct VkPipelineRobustnessCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRobustnessCreateInfo.html + struct PipelineRobustnessCreateInfo + { + using NativeType = VkPipelineRobustnessCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRobustnessCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , storageBuffers{ storageBuffers_ } + , uniformBuffers{ uniformBuffers_ } + , vertexInputs{ vertexInputs_ } + , images{ images_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfo( PipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRobustnessCreateInfo( VkPipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRobustnessCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRobustnessCreateInfo & operator=( PipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRobustnessCreateInfo & operator=( VkPipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & + setStorageBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffers = storageBuffers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & + setUniformBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers_ ) VULKAN_HPP_NOEXCEPT + { + uniformBuffers = uniformBuffers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & + setVertexInputs( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs_ ) VULKAN_HPP_NOEXCEPT + { + vertexInputs = vertexInputs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setImages( VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images_ ) VULKAN_HPP_NOEXCEPT + { + images = images_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRobustnessCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRobustnessCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRobustnessCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineRobustnessCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, storageBuffers, uniformBuffers, vertexInputs, images ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRobustnessCreateInfo const & ) const = default; +#else + bool operator==( PipelineRobustnessCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffers == rhs.storageBuffers ) && ( uniformBuffers == rhs.uniformBuffers ) && + ( vertexInputs == rhs.vertexInputs ) && ( images == rhs.images ); +# endif + } + + bool operator!=( PipelineRobustnessCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRobustnessCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault; + }; + + template <> + struct CppType + { + using Type = PipelineRobustnessCreateInfo; + }; + + using PipelineRobustnessCreateInfoEXT = PipelineRobustnessCreateInfo; + + // wrapper struct for struct VkPipelineSampleLocationsStateCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineSampleLocationsStateCreateInfoEXT.html + struct PipelineSampleLocationsStateCreateInfoEXT + { + using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampleLocationsEnable{ sampleLocationsEnable_ } + , sampleLocationsInfo{ sampleLocationsInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & + setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsEnable = sampleLocationsEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & + setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineSampleLocationsStateCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineSampleLocationsStateCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleLocationsEnable, sampleLocationsInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) && + ( sampleLocationsInfo == rhs.sampleLocationsInfo ); +# endif + } + + bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + + template <> + struct CppType + { + using Type = PipelineSampleLocationsStateCreateInfoEXT; + }; + + // wrapper struct for struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageModuleIdentifierCreateInfoEXT.html + struct PipelineShaderStageModuleIdentifierCreateInfoEXT + { + using NativeType = VkPipelineShaderStageModuleIdentifierCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT( uint32_t identifierSize_ = {}, + const uint8_t * pIdentifier_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , identifierSize{ identifierSize_ } + , pIdentifier{ pIdentifier_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineShaderStageModuleIdentifierCreateInfoEXT( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageModuleIdentifierCreateInfoEXT( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageModuleIdentifierCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineShaderStageModuleIdentifierCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & identifier_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), identifierSize( static_cast( identifier_.size() ) ), pIdentifier( identifier_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifierSize( uint32_t identifierSize_ ) VULKAN_HPP_NOEXCEPT + { + identifierSize = identifierSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPIdentifier( const uint8_t * pIdentifier_ ) VULKAN_HPP_NOEXCEPT + { + pIdentifier = pIdentifier_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineShaderStageModuleIdentifierCreateInfoEXT & + setIdentifier( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & identifier_ ) VULKAN_HPP_NOEXCEPT + { + identifierSize = static_cast( identifier_.size() ); + pIdentifier = identifier_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, identifierSize, pIdentifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageModuleIdentifierCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( pIdentifier == rhs.pIdentifier ); +# endif + } + + bool operator!=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT; + const void * pNext = {}; + uint32_t identifierSize = {}; + const uint8_t * pIdentifier = {}; + }; + + template <> + struct CppType + { + using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkPipelineShaderStageNodeCreateInfoAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageNodeCreateInfoAMDX.html + struct PipelineShaderStageNodeCreateInfoAMDX + { + using NativeType = VkPipelineShaderStageNodeCreateInfoAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineShaderStageNodeCreateInfoAMDX( const char * pName_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pName{ pName_ } + , index{ index_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineShaderStageNodeCreateInfoAMDX( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageNodeCreateInfoAMDX( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageNodeCreateInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PipelineShaderStageNodeCreateInfoAMDX & operator=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineShaderStageNodeCreateInfoAMDX & operator=( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineShaderStageNodeCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageNodeCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageNodeCreateInfoAMDX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineShaderStageNodeCreateInfoAMDX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pName, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = index <=> rhs.index; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( index == rhs.index ); + } + + bool operator!=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX; + const void * pNext = {}; + const char * pName = {}; + uint32_t index = {}; + }; + + template <> + struct CppType + { + using Type = PipelineShaderStageNodeCreateInfoAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageRequiredSubgroupSizeCreateInfo.html + struct PipelineShaderStageRequiredSubgroupSizeCreateInfo + { + using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( uint32_t requiredSubgroupSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , requiredSubgroupSize{ requiredSubgroupSize_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineShaderStageRequiredSubgroupSizeCreateInfo( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageRequiredSubgroupSizeCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineShaderStageRequiredSubgroupSizeCreateInfo & + operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, requiredSubgroupSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & ) const = default; +#else + bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); +# endif + } + + bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; + void * pNext = {}; + uint32_t requiredSubgroupSize = {}; + }; + + template <> + struct CppType + { + using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + }; + + using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + + // wrapper struct for struct VkPipelineTessellationDomainOriginStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineTessellationDomainOriginStateCreateInfo.html + struct PipelineTessellationDomainOriginStateCreateInfo + { + using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , domainOrigin{ domainOrigin_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & + setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT + { + domainOrigin = domainOrigin_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, domainOrigin ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin ); +# endif + } + + bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; + }; + + template <> + struct CppType + { + using Type = PipelineTessellationDomainOriginStateCreateInfo; + }; + + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + + // wrapper struct for struct VkVertexInputBindingDivisorDescription, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputBindingDivisorDescription.html + struct VertexInputBindingDivisorDescription + { + using NativeType = VkVertexInputBindingDivisorDescription; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescription( uint32_t binding_ = {}, uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + : binding{ binding_ } + , divisor{ divisor_ } + { + } + + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescription( VertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDivisorDescription( VkVertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDivisorDescription( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputBindingDivisorDescription & operator=( VertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VertexInputBindingDivisorDescription & operator=( VkVertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescription & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVertexInputBindingDivisorDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDivisorDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDivisorDescription const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVertexInputBindingDivisorDescription *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binding, divisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDivisorDescription const & ) const = default; +#else + bool operator==( VertexInputBindingDivisorDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( binding == rhs.binding ) && ( divisor == rhs.divisor ); +# endif + } + + bool operator!=( VertexInputBindingDivisorDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + uint32_t divisor = {}; + }; + + using VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescription; + using VertexInputBindingDivisorDescriptionKHR = VertexInputBindingDivisorDescription; + + // wrapper struct for struct VkPipelineVertexInputDivisorStateCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineVertexInputDivisorStateCreateInfo.html + struct PipelineVertexInputDivisorStateCreateInfo + { + using NativeType = VkPipelineVertexInputDivisorStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineVertexInputDivisorStateCreateInfo( uint32_t vertexBindingDivisorCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexBindingDivisorCount{ vertexBindingDivisorCount_ } + , pVertexBindingDivisors{ pVertexBindingDivisors_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfo( PipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputDivisorStateCreateInfo( VkPipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineVertexInputDivisorStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , vertexBindingDivisorCount( static_cast( vertexBindingDivisors_.size() ) ) + , pVertexBindingDivisors( vertexBindingDivisors_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineVertexInputDivisorStateCreateInfo & operator=( PipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineVertexInputDivisorStateCreateInfo & operator=( VkPipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDivisorCount = vertexBindingDivisorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & + setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBindingDivisors = pVertexBindingDivisors_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfo & setVertexBindingDivisors( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_ ) + VULKAN_HPP_NOEXCEPT + { + vertexBindingDivisorCount = static_cast( vertexBindingDivisors_.size() ); + pVertexBindingDivisors = vertexBindingDivisors_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineVertexInputDivisorStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputDivisorStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputDivisorStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineVertexInputDivisorStateCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexBindingDivisorCount, pVertexBindingDivisors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputDivisorStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineVertexInputDivisorStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) && + ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); +# endif + } + + bool operator!=( PipelineVertexInputDivisorStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfo; + const void * pNext = {}; + uint32_t vertexBindingDivisorCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors = {}; + }; + + template <> + struct CppType + { + using Type = PipelineVertexInputDivisorStateCreateInfo; + }; + + using PipelineVertexInputDivisorStateCreateInfoEXT = PipelineVertexInputDivisorStateCreateInfo; + using PipelineVertexInputDivisorStateCreateInfoKHR = PipelineVertexInputDivisorStateCreateInfo; + + // wrapper struct for struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportCoarseSampleOrderStateCreateInfoNV.html + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV + { + using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, + uint32_t customSampleOrderCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampleOrderType{ sampleOrderType_ } + , customSampleOrderCount{ customSampleOrderCount_ } + , pCustomSampleOrders{ pCustomSampleOrders_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportCoarseSampleOrderStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & customSampleOrders_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( static_cast( customSampleOrders_.size() ) ) + , pCustomSampleOrders( customSampleOrders_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT + { + sampleOrderType = sampleOrderType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT + { + customSampleOrderCount = customSampleOrderCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + pCustomSampleOrders = pCustomSampleOrders_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + customSampleOrderCount = static_cast( customSampleOrders_.size() ); + pCustomSampleOrders = customSampleOrders_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) && + ( customSampleOrderCount == rhs.customSampleOrderCount ) && ( pCustomSampleOrders == rhs.pCustomSampleOrders ); +# endif + } + + bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; + uint32_t customSampleOrderCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV; + }; + + // wrapper struct for struct VkPipelineViewportDepthClampControlCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportDepthClampControlCreateInfoEXT.html + struct PipelineViewportDepthClampControlCreateInfoEXT + { + using NativeType = VkPipelineViewportDepthClampControlCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClampControlCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportDepthClampControlCreateInfoEXT( + VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode_ = VULKAN_HPP_NAMESPACE::DepthClampModeEXT::eViewportRange, + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClampMode{ depthClampMode_ } + , pDepthClampRange{ pDepthClampRange_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportDepthClampControlCreateInfoEXT( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportDepthClampControlCreateInfoEXT( VkPipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportDepthClampControlCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineViewportDepthClampControlCreateInfoEXT & operator=( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineViewportDepthClampControlCreateInfoEXT & operator=( VkPipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & + setDepthClampMode( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode_ ) VULKAN_HPP_NOEXCEPT + { + depthClampMode = depthClampMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & + setPDepthClampRange( const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange_ ) VULKAN_HPP_NOEXCEPT + { + pDepthClampRange = pDepthClampRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineViewportDepthClampControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportDepthClampControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportDepthClampControlCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineViewportDepthClampControlCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClampMode, pDepthClampRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportDepthClampControlCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampMode == rhs.depthClampMode ) && ( pDepthClampRange == rhs.pDepthClampRange ); +# endif + } + + bool operator!=( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClampControlCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode = VULKAN_HPP_NAMESPACE::DepthClampModeEXT::eViewportRange; + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportDepthClampControlCreateInfoEXT; + }; + + // wrapper struct for struct VkPipelineViewportDepthClipControlCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportDepthClipControlCreateInfoEXT.html + struct PipelineViewportDepthClipControlCreateInfoEXT + { + using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , negativeOneToOne{ negativeOneToOne_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportDepthClipControlCreateInfoEXT( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportDepthClipControlCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineViewportDepthClipControlCreateInfoEXT & operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineViewportDepthClipControlCreateInfoEXT & operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & + setNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ ) VULKAN_HPP_NOEXCEPT + { + negativeOneToOne = negativeOneToOne_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportDepthClipControlCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineViewportDepthClipControlCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, negativeOneToOne ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportDepthClipControlCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( negativeOneToOne == rhs.negativeOneToOne ); +# endif + } + + bool operator!=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportDepthClipControlCreateInfoEXT; + }; + + // wrapper struct for struct VkPipelineViewportExclusiveScissorStateCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportExclusiveScissorStateCreateInfoNV.html + struct PipelineViewportExclusiveScissorStateCreateInfoNV + { + using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , exclusiveScissorCount{ exclusiveScissorCount_ } + , pExclusiveScissors{ pExclusiveScissors_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportExclusiveScissorStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), exclusiveScissorCount( static_cast( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = exclusiveScissorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & + setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + { + pExclusiveScissors = pExclusiveScissors_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV & + setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = static_cast( exclusiveScissors_.size() ); + pExclusiveScissors = exclusiveScissors_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exclusiveScissorCount, pExclusiveScissors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) && + ( pExclusiveScissors == rhs.pExclusiveScissors ); +# endif + } + + bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + const void * pNext = {}; + uint32_t exclusiveScissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportExclusiveScissorStateCreateInfoNV; + }; + + // wrapper struct for struct VkShadingRatePaletteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShadingRatePaletteNV.html + struct ShadingRatePaletteNV + { + using NativeType = VkShadingRatePaletteNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRatePaletteEntryCount{ shadingRatePaletteEntryCount_ } + , pShadingRatePaletteEntries{ pShadingRatePaletteEntries_ } + { + } + + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ShadingRatePaletteNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePaletteEntries_ ) + : shadingRatePaletteEntryCount( static_cast( shadingRatePaletteEntries_.size() ) ) + , pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & + setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePaletteEntries = pShadingRatePaletteEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV & setShadingRatePaletteEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePaletteEntries_ ) + VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = static_cast( shadingRatePaletteEntries_.size() ); + pShadingRatePaletteEntries = shadingRatePaletteEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShadingRatePaletteNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkShadingRatePaletteNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( shadingRatePaletteEntryCount, pShadingRatePaletteEntries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShadingRatePaletteNV const & ) const = default; +#else + bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); +# endif + } + + bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t shadingRatePaletteEntryCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {}; + }; + + // wrapper struct for struct VkPipelineViewportShadingRateImageStateCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportShadingRateImageStateCreateInfoNV.html + struct PipelineViewportShadingRateImageStateCreateInfoNV + { + using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shadingRateImageEnable{ shadingRateImageEnable_ } + , viewportCount{ viewportCount_ } + , pShadingRatePalettes{ pShadingRatePalettes_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportShadingRateImageStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePalettes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( static_cast( shadingRatePalettes_.size() ) ) + , pShadingRatePalettes( shadingRatePalettes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImageEnable = shadingRateImageEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePalettes = pShadingRatePalettes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( shadingRatePalettes_.size() ); + pShadingRatePalettes = shadingRatePalettes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateImageEnable, viewportCount, pShadingRatePalettes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) && + ( viewportCount == rhs.viewportCount ) && ( pShadingRatePalettes == rhs.pShadingRatePalettes ); +# endif + } + + bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportShadingRateImageStateCreateInfoNV; + }; + + // wrapper struct for struct VkViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewportSwizzleNV.html + struct ViewportSwizzleNV + { + using NativeType = VkViewportSwizzleNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } + , z{ z_ } + , w{ w_ } + { + } + + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportSwizzleNV( *reinterpret_cast( &rhs ) ) {} + + ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT + { + w = w_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportSwizzleNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkViewportSwizzleNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, z, w ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportSwizzleNV const & ) const = default; +#else + bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w ); +# endif + } + + bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + }; + + // wrapper struct for struct VkPipelineViewportSwizzleStateCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportSwizzleStateCreateInfoNV.html + struct PipelineViewportSwizzleStateCreateInfoNV + { + using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , viewportCount{ viewportCount_ } + , pViewportSwizzles{ pViewportSwizzles_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportSwizzleStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportSwizzles_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), viewportCount( static_cast( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + { + pViewportSwizzles = pViewportSwizzles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewportSwizzles_.size() ); + pViewportSwizzles = viewportSwizzles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportSwizzleStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineViewportSwizzleStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, viewportCount, pViewportSwizzles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) && + ( pViewportSwizzles == rhs.pViewportSwizzles ); +# endif + } + + bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportSwizzleStateCreateInfoNV; + }; + + // wrapper struct for struct VkViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewportWScalingNV.html + struct ViewportWScalingNV + { + using NativeType = VkViewportWScalingNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT + : xcoeff{ xcoeff_ } + , ycoeff{ ycoeff_ } + { + } + + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportWScalingNV( *reinterpret_cast( &rhs ) ) {} + + ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT + { + xcoeff = xcoeff_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT + { + ycoeff = ycoeff_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportWScalingNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkViewportWScalingNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( xcoeff, ycoeff ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportWScalingNV const & ) const = default; +#else + bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff ); +# endif + } + + bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float xcoeff = {}; + float ycoeff = {}; + }; + + // wrapper struct for struct VkPipelineViewportWScalingStateCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportWScalingStateCreateInfoNV.html + struct PipelineViewportWScalingStateCreateInfoNV + { + using NativeType = VkPipelineViewportWScalingStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , viewportWScalingEnable{ viewportWScalingEnable_ } + , viewportCount{ viewportCount_ } + , pViewportWScalings{ pViewportWScalings_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportWScalingStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportWScalings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( static_cast( viewportWScalings_.size() ) ) + , pViewportWScalings( viewportWScalings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT + { + viewportWScalingEnable = viewportWScalingEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT + { + pViewportWScalings = pViewportWScalings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewportWScalings_.size() ); + pViewportWScalings = viewportWScalings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportWScalingStateCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPipelineViewportWScalingStateCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewportWScalingEnable, viewportCount, pViewportWScalings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) && + ( viewportCount == rhs.viewportCount ) && ( pViewportWScalings == rhs.pViewportWScalings ); +# endif + } + + bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportWScalingStateCreateInfoNV; + }; + +#if defined( VK_USE_PLATFORM_GGP ) + // wrapper struct for struct VkPresentFrameTokenGGP, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentFrameTokenGGP.html + struct PresentFrameTokenGGP + { + using NativeType = VkPresentFrameTokenGGP; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , frameToken{ frameToken_ } + { + } + + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentFrameTokenGGP( *reinterpret_cast( &rhs ) ) + { + } + + PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT + { + frameToken = frameToken_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentFrameTokenGGP const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPresentFrameTokenGGP *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, frameToken ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 ); + } + + bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; + const void * pNext = {}; + GgpFrameToken frameToken = {}; + }; + + template <> + struct CppType + { + using Type = PresentFrameTokenGGP; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + // wrapper struct for struct VkPresentIdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentIdKHR.html + struct PresentIdKHR + { + using NativeType = VkPresentIdKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentIdKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentIdKHR( uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pPresentIds{ pPresentIds_ } + { + } + + VULKAN_HPP_CONSTEXPR PresentIdKHR( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentIdKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentIdKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentIds_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( presentIds_.size() ) ), pPresentIds( presentIds_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentIdKHR & operator=( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentIdKHR & operator=( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT + { + pPresentIds = pPresentIds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentIdKHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentIds_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( presentIds_.size() ); + pPresentIds = presentIds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentIdKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPresentIdKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pPresentIds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentIdKHR const & ) const = default; +#else + bool operator==( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentIds == rhs.pPresentIds ); +# endif + } + + bool operator!=( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentIdKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint64_t * pPresentIds = {}; + }; + + template <> + struct CppType + { + using Type = PresentIdKHR; + }; + + // wrapper struct for struct VkPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentInfoKHR.html + struct PresentInfoKHR + { + using NativeType = VkPresentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, + const uint32_t * pImageIndices_ = {}, + VULKAN_HPP_NAMESPACE::Result * pResults_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphores{ pWaitSemaphores_ } + , swapchainCount{ swapchainCount_ } + , pSwapchains{ pSwapchains_ } + , pImageIndices{ pImageIndices_ } + , pResults{ pResults_ } + { + } + + VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentInfoKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , swapchainCount( static_cast( swapchains_.size() ) ) + , pSwapchains( swapchains_.data() ) + , pImageIndices( imageIndices_.data() ) + , pResults( results_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() ); + VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) ); + VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) ); +# else + if ( swapchains_.size() != imageIndices_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" ); + } + if ( !results_.empty() && ( swapchains_.size() != results_.size() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" ); + } + if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & + setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT + { + pSwapchains = pSwapchains_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & + setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( swapchains_.size() ); + pSwapchains = swapchains_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT + { + pImageIndices = pImageIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( imageIndices_.size() ); + pImageIndices = imageIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT + { + pResults = pResults_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( results_.size() ); + pResults = results_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPresentInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentInfoKHR const & ) const = default; +#else + bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && ( pSwapchains == rhs.pSwapchains ) && + ( pImageIndices == rhs.pImageIndices ) && ( pResults == rhs.pResults ); +# endif + } + + bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {}; + const uint32_t * pImageIndices = {}; + VULKAN_HPP_NAMESPACE::Result * pResults = {}; + }; + + template <> + struct CppType + { + using Type = PresentInfoKHR; + }; + + // wrapper struct for struct VkRectLayerKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRectLayerKHR.html + struct RectLayerKHR + { + using NativeType = VkRectLayerKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT + : offset{ offset_ } + , extent{ extent_ } + , layer{ layer_ } + { + } + + VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT : RectLayerKHR( *reinterpret_cast( &rhs ) ) {} + + explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) {} + + RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT + { + layer = layer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRectLayerKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRectLayerKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( offset, extent, layer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RectLayerKHR const & ) const = default; +#else + bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer ); +# endif + } + + bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + uint32_t layer = {}; + }; + + // wrapper struct for struct VkPresentRegionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentRegionKHR.html + struct PresentRegionKHR + { + using NativeType = VkPresentRegionKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : rectangleCount{ rectangleCount_ } + , pRectangles{ pRectangles_ } + { + } + + VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) + : rectangleCount( static_cast( rectangles_.size() ) ), pRectangles( rectangles_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT + { + rectangleCount = rectangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pRectangles = pRectangles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR & + setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) VULKAN_HPP_NOEXCEPT + { + rectangleCount = static_cast( rectangles_.size() ); + pRectangles = rectangles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentRegionKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPresentRegionKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( rectangleCount, pRectangles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionKHR const & ) const = default; +#else + bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles ); +# endif + } + + bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t rectangleCount = {}; + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {}; + }; + + // wrapper struct for struct VkPresentRegionsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentRegionsKHR.html + struct PresentRegionsKHR + { + using NativeType = VkPresentRegionsKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionsKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentRegionsKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPresentRegionsKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionsKHR const & ) const = default; +#else + bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = PresentRegionsKHR; + }; + + // wrapper struct for struct VkPresentTimeGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentTimeGOOGLE.html + struct PresentTimeGOOGLE + { + using NativeType = VkPresentTimeGOOGLE; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID{ presentID_ } + , desiredPresentTime{ desiredPresentTime_ } + { + } + + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT : PresentTimeGOOGLE( *reinterpret_cast( &rhs ) ) {} + + PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT + { + desiredPresentTime = desiredPresentTime_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentTimeGOOGLE const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPresentTimeGOOGLE *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( presentID, desiredPresentTime ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimeGOOGLE const & ) const = default; +#else + bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ); +# endif + } + + bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + }; + + // wrapper struct for struct VkPresentTimesInfoGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentTimesInfoGOOGLE.html + struct PresentTimesInfoGOOGLE + { + using NativeType = VkPresentTimesInfoGOOGLE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pTimes{ pTimes_ } + { + } + + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentTimesInfoGOOGLE( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( times_.size() ) ), pTimes( times_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT + { + pTimes = pTimes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE & + setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( times_.size() ); + pTimes = times_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentTimesInfoGOOGLE const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPresentTimesInfoGOOGLE *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pTimes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default; +#else + bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pTimes == rhs.pTimes ); +# endif + } + + bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {}; + }; + + template <> + struct CppType + { + using Type = PresentTimesInfoGOOGLE; + }; + + // wrapper struct for struct VkPrivateDataSlotCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPrivateDataSlotCreateInfo.html + struct PrivateDataSlotCreateInfo + { + using NativeType = VkPrivateDataSlotCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PrivateDataSlotCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PrivateDataSlotCreateInfo & operator=( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPrivateDataSlotCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPrivateDataSlotCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PrivateDataSlotCreateInfo const & ) const = default; +#else + bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = PrivateDataSlotCreateInfo; + }; + + using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo; + + // wrapper struct for struct VkProtectedSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkProtectedSubmitInfo.html + struct ProtectedSubmitInfo + { + using NativeType = VkProtectedSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , protectedSubmit{ protectedSubmit_ } + { + } + + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ProtectedSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } + + ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT + { + protectedSubmit = protectedSubmit_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkProtectedSubmitInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkProtectedSubmitInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedSubmit ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ProtectedSubmitInfo const & ) const = default; +#else + bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit ); +# endif + } + + bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; + }; + + template <> + struct CppType + { + using Type = ProtectedSubmitInfo; + }; + + // wrapper struct for struct VkPushConstantsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushConstantsInfo.html + struct PushConstantsInfo + { + using NativeType = VkPushConstantsInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushConstantsInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushConstantsInfo( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + uint32_t offset_ = {}, + uint32_t size_ = {}, + const void * pValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , layout{ layout_ } + , stageFlags{ stageFlags_ } + , offset{ offset_ } + , size{ size_ } + , pValues{ pValues_ } + { + } + + VULKAN_HPP_CONSTEXPR PushConstantsInfo( PushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantsInfo( VkPushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantsInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PushConstantsInfo( VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + uint32_t offset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , layout( layout_ ) + , stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( static_cast( values_.size() * sizeof( T ) ) ) + , pValues( values_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PushConstantsInfo & operator=( PushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PushConstantsInfo & operator=( VkPushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setPValues( const void * pValues_ ) VULKAN_HPP_NOEXCEPT + { + pValues = pValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PushConstantsInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + size = static_cast( values_.size() * sizeof( T ) ); + pValues = values_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPushConstantsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantsInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPushConstantsInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, layout, stageFlags, offset, size, pValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushConstantsInfo const & ) const = default; +#else + bool operator==( PushConstantsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( layout == rhs.layout ) && ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && + ( size == rhs.size ) && ( pValues == rhs.pValues ); +# endif + } + + bool operator!=( PushConstantsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushConstantsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; + const void * pValues = {}; + }; + + template <> + struct CppType + { + using Type = PushConstantsInfo; + }; + + using PushConstantsInfoKHR = PushConstantsInfo; + + // wrapper struct for struct VkWriteDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSet.html + struct WriteDescriptorSet + { + using NativeType = VkWriteDescriptorSet; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dstSet{ dstSet_ } + , dstBinding{ dstBinding_ } + , dstArrayElement{ dstArrayElement_ } + , descriptorCount{ descriptorCount_ } + , descriptorType{ descriptorType_ } + , pImageInfo{ pImageInfo_ } + , pBufferInfo{ pBufferInfo_ } + , pTexelBufferView{ pTexelBufferView_ } + { + } + + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : WriteDescriptorSet( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, + uint32_t dstBinding_, + uint32_t dstArrayElement_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferInfo_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( static_cast( !imageInfo_.empty() ? imageInfo_.size() + : !bufferInfo_.empty() ? bufferInfo_.size() + : texelBufferView_.size() ) ) + , descriptorType( descriptorType_ ) + , pImageInfo( imageInfo_.data() ) + , pBufferInfo( bufferInfo_.data() ) + , pTexelBufferView( texelBufferView_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1 ); +# else + if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT + { + pImageInfo = pImageInfo_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & + setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( imageInfo_.size() ); + pImageInfo = imageInfo_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + pBufferInfo = pBufferInfo_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & + setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( bufferInfo_.size() ); + pBufferInfo = bufferInfo_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + pTexelBufferView = pTexelBufferView_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & + setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( texelBufferView_.size() ); + pTexelBufferView = texelBufferView_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSet const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWriteDescriptorSet *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSet const & ) const = default; +#else + bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && + ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && ( pTexelBufferView == rhs.pTexelBufferView ); +# endif + } + + bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {}; + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {}; + }; + + template <> + struct CppType + { + using Type = WriteDescriptorSet; + }; + + // wrapper struct for struct VkPushDescriptorSetInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushDescriptorSetInfo.html + struct PushDescriptorSetInfo + { + using NativeType = VkPushDescriptorSetInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushDescriptorSetInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + uint32_t descriptorWriteCount_ = {}, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , set{ set_ } + , descriptorWriteCount{ descriptorWriteCount_ } + , pDescriptorWrites{ pDescriptorWrites_ } + { + } + + VULKAN_HPP_CONSTEXPR PushDescriptorSetInfo( PushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushDescriptorSetInfo( VkPushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PushDescriptorSetInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PushDescriptorSetInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t set_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorWrites_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , set( set_ ) + , descriptorWriteCount( static_cast( descriptorWrites_.size() ) ) + , pDescriptorWrites( descriptorWrites_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PushDescriptorSetInfo & operator=( PushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PushDescriptorSetInfo & operator=( VkPushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setDescriptorWriteCount( uint32_t descriptorWriteCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorWriteCount = descriptorWriteCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & + setPDescriptorWrites( const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorWrites = pDescriptorWrites_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PushDescriptorSetInfo & setDescriptorWrites( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorWrites_ ) VULKAN_HPP_NOEXCEPT + { + descriptorWriteCount = static_cast( descriptorWrites_.size() ); + pDescriptorWrites = descriptorWrites_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPushDescriptorSetInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushDescriptorSetInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushDescriptorSetInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPushDescriptorSetInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageFlags, layout, set, descriptorWriteCount, pDescriptorWrites ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushDescriptorSetInfo const & ) const = default; +#else + bool operator==( PushDescriptorSetInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( set == rhs.set ) && + ( descriptorWriteCount == rhs.descriptorWriteCount ) && ( pDescriptorWrites == rhs.pDescriptorWrites ); +# endif + } + + bool operator!=( PushDescriptorSetInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t set = {}; + uint32_t descriptorWriteCount = {}; + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites = {}; + }; + + template <> + struct CppType + { + using Type = PushDescriptorSetInfo; + }; + + using PushDescriptorSetInfoKHR = PushDescriptorSetInfo; + + // wrapper struct for struct VkPushDescriptorSetWithTemplateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushDescriptorSetWithTemplateInfo.html + struct PushDescriptorSetWithTemplateInfo + { + using NativeType = VkPushDescriptorSetWithTemplateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetWithTemplateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + const void * pData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorUpdateTemplate{ descriptorUpdateTemplate_ } + , layout{ layout_ } + , set{ set_ } + , pData{ pData_ } + { + } + + VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfo( PushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushDescriptorSetWithTemplateInfo( VkPushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PushDescriptorSetWithTemplateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PushDescriptorSetWithTemplateInfo & operator=( PushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PushDescriptorSetWithTemplateInfo & operator=( VkPushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & + setDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateTemplate = descriptorUpdateTemplate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPushDescriptorSetWithTemplateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushDescriptorSetWithTemplateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushDescriptorSetWithTemplateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkPushDescriptorSetWithTemplateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorUpdateTemplate, layout, set, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushDescriptorSetWithTemplateInfo const & ) const = default; +#else + bool operator==( PushDescriptorSetWithTemplateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorUpdateTemplate == rhs.descriptorUpdateTemplate ) && ( layout == rhs.layout ) && + ( set == rhs.set ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( PushDescriptorSetWithTemplateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetWithTemplateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t set = {}; + const void * pData = {}; + }; + + template <> + struct CppType + { + using Type = PushDescriptorSetWithTemplateInfo; + }; + + using PushDescriptorSetWithTemplateInfoKHR = PushDescriptorSetWithTemplateInfo; + + // wrapper struct for struct VkQueryLowLatencySupportNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryLowLatencySupportNV.html + struct QueryLowLatencySupportNV + { + using NativeType = VkQueryLowLatencySupportNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryLowLatencySupportNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV( void * pQueriedLowLatencyData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pQueriedLowLatencyData{ pQueriedLowLatencyData_ } + { + } + + VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV( QueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryLowLatencySupportNV( VkQueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryLowLatencySupportNV( *reinterpret_cast( &rhs ) ) + { + } + + QueryLowLatencySupportNV & operator=( QueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueryLowLatencySupportNV & operator=( VkQueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryLowLatencySupportNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryLowLatencySupportNV & setPQueriedLowLatencyData( void * pQueriedLowLatencyData_ ) VULKAN_HPP_NOEXCEPT + { + pQueriedLowLatencyData = pQueriedLowLatencyData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkQueryLowLatencySupportNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryLowLatencySupportNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryLowLatencySupportNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueryLowLatencySupportNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pQueriedLowLatencyData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryLowLatencySupportNV const & ) const = default; +#else + bool operator==( QueryLowLatencySupportNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pQueriedLowLatencyData == rhs.pQueriedLowLatencyData ); +# endif + } + + bool operator!=( QueryLowLatencySupportNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryLowLatencySupportNV; + const void * pNext = {}; + void * pQueriedLowLatencyData = {}; + }; + + template <> + struct CppType + { + using Type = QueryLowLatencySupportNV; + }; + + // wrapper struct for struct VkQueryPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolCreateInfo.html + struct QueryPoolCreateInfo + { + using NativeType = VkQueryPoolCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, + uint32_t queryCount_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , queryType{ queryType_ } + , queryCount{ queryCount_ } + , pipelineStatistics{ pipelineStatistics_ } + { + } + + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : QueryPoolCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT + { + queryType = queryType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT + { + queryCount = queryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueryPoolCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queryType, queryCount, pipelineStatistics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolCreateInfo const & ) const = default; +#else + bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) && + ( pipelineStatistics == rhs.pipelineStatistics ); +# endif + } + + bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; + uint32_t queryCount = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + + template <> + struct CppType + { + using Type = QueryPoolCreateInfo; + }; + + // wrapper struct for struct VkQueryPoolPerformanceCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolPerformanceCreateInfoKHR.html + struct QueryPoolPerformanceCreateInfoKHR + { + using NativeType = VkQueryPoolPerformanceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, + uint32_t counterIndexCount_ = {}, + const uint32_t * pCounterIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , counterIndexCount{ counterIndexCount_ } + , pCounterIndices{ pCounterIndices_ } + { + } + + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( static_cast( counterIndices_.size() ) ) + , pCounterIndices( counterIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = counterIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT + { + pCounterIndices = pCounterIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR & + setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = static_cast( counterIndices_.size() ); + pCounterIndices = counterIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default; +#else + bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices ); +# endif + } + + bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + uint32_t counterIndexCount = {}; + const uint32_t * pCounterIndices = {}; + }; + + template <> + struct CppType + { + using Type = QueryPoolPerformanceCreateInfoKHR; + }; + + // wrapper struct for struct VkQueryPoolPerformanceQueryCreateInfoINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolPerformanceQueryCreateInfoINTEL.html + struct QueryPoolPerformanceQueryCreateInfoINTEL + { + using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , performanceCountersSampling{ performanceCountersSampling_ } + { + } + + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceQueryCreateInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & + setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT + { + performanceCountersSampling = performanceCountersSampling_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceQueryCreateInfoINTEL const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceQueryCreateInfoINTEL *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, performanceCountersSampling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default; +#else + bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCountersSampling == rhs.performanceCountersSampling ); +# endif + } + + bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; + }; + + template <> + struct CppType + { + using Type = QueryPoolPerformanceQueryCreateInfoINTEL; + }; + + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; + + // wrapper struct for struct VkQueryPoolVideoEncodeFeedbackCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolVideoEncodeFeedbackCreateInfoKHR.html + struct QueryPoolVideoEncodeFeedbackCreateInfoKHR + { + using NativeType = VkQueryPoolVideoEncodeFeedbackCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , encodeFeedbackFlags{ encodeFeedbackFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolVideoEncodeFeedbackCreateInfoKHR( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolVideoEncodeFeedbackCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + QueryPoolVideoEncodeFeedbackCreateInfoKHR & operator=( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueryPoolVideoEncodeFeedbackCreateInfoKHR & operator=( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolVideoEncodeFeedbackCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolVideoEncodeFeedbackCreateInfoKHR & + setEncodeFeedbackFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ ) VULKAN_HPP_NOEXCEPT + { + encodeFeedbackFlags = encodeFeedbackFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, encodeFeedbackFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & ) const = default; +#else + bool operator==( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( encodeFeedbackFlags == rhs.encodeFeedbackFlags ); +# endif + } + + bool operator!=( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags = {}; + }; + + template <> + struct CppType + { + using Type = QueryPoolVideoEncodeFeedbackCreateInfoKHR; + }; + + // wrapper struct for struct VkQueueFamilyCheckpointProperties2NV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyCheckpointProperties2NV.html + struct QueueFamilyCheckpointProperties2NV + { + using NativeType = VkQueueFamilyCheckpointProperties2NV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2NV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , checkpointExecutionStageMask{ checkpointExecutionStageMask_ } + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointProperties2NV( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointProperties2NV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointProperties2NV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, checkpointExecutionStageMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default; +#else + bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); +# endif + } + + bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyCheckpointProperties2NV; + }; + + // wrapper struct for struct VkQueueFamilyCheckpointPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyCheckpointPropertiesNV.html + struct QueueFamilyCheckpointPropertiesNV + { + using NativeType = VkQueueFamilyCheckpointPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , checkpointExecutionStageMask{ checkpointExecutionStageMask_ } + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointPropertiesNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointPropertiesNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, checkpointExecutionStageMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default; +#else + bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); +# endif + } + + bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyCheckpointPropertiesNV; + }; + + // wrapper struct for struct VkQueueFamilyGlobalPriorityProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyGlobalPriorityProperties.html + struct QueueFamilyGlobalPriorityProperties + { + using NativeType = VkQueueFamilyGlobalPriorityProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityProperties( uint32_t priorityCount_ = {}, + std::array const & + priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow } }, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , priorityCount{ priorityCount_ } + , priorities{ priorities_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityProperties( QueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyGlobalPriorityProperties( VkQueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyGlobalPriorityProperties( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyGlobalPriorityProperties & operator=( QueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueueFamilyGlobalPriorityProperties & operator=( VkQueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyGlobalPriorityProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyGlobalPriorityProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyGlobalPriorityProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueueFamilyGlobalPriorityProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, priorityCount, priorities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = priorityCount <=> rhs.priorityCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < priorityCount; ++i ) + { + if ( auto cmp = priorities[i] <=> rhs.priorities[i]; cmp != 0 ) + return cmp; + } + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) && + ( memcmp( priorities, rhs.priorities, priorityCount * sizeof( VULKAN_HPP_NAMESPACE::QueueGlobalPriority ) ) == 0 ); + } + + bool operator!=( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityProperties; + void * pNext = {}; + uint32_t priorityCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D priorities = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyGlobalPriorityProperties; + }; + + using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityProperties; + using QueueFamilyGlobalPriorityPropertiesKHR = QueueFamilyGlobalPriorityProperties; + + // wrapper struct for struct VkQueueFamilyProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyProperties.html + struct QueueFamilyProperties + { + using NativeType = VkQueueFamilyProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, + uint32_t queueCount_ = {}, + uint32_t timestampValidBits_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFlags{ queueFlags_ } + , queueCount{ queueCount_ } + , timestampValidBits{ timestampValidBits_ } + , minImageTransferGranularity{ minImageTransferGranularity_ } + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( queueFlags, queueCount, timestampValidBits, minImageTransferGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties const & ) const = default; +#else + bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && ( timestampValidBits == rhs.timestampValidBits ) && + ( minImageTransferGranularity == rhs.minImageTransferGranularity ); +# endif + } + + bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; + uint32_t queueCount = {}; + uint32_t timestampValidBits = {}; + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; + }; + + // wrapper struct for struct VkQueueFamilyProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyProperties2.html + struct QueueFamilyProperties2 + { + using NativeType = VkQueueFamilyProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queueFamilyProperties{ queueFamilyProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties2( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueFamilyProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties2 const & ) const = default; +#else + bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties ); +# endif + } + + bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyProperties2; + }; + + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + + // wrapper struct for struct VkQueueFamilyQueryResultStatusPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyQueryResultStatusPropertiesKHR.html + struct QueueFamilyQueryResultStatusPropertiesKHR + { + using NativeType = VkQueueFamilyQueryResultStatusPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queryResultStatusSupport{ queryResultStatusSupport_ } + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyQueryResultStatusPropertiesKHR( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyQueryResultStatusPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyQueryResultStatusPropertiesKHR & operator=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueueFamilyQueryResultStatusPropertiesKHR & operator=( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyQueryResultStatusPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyQueryResultStatusPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyQueryResultStatusPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueueFamilyQueryResultStatusPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queryResultStatusSupport ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyQueryResultStatusPropertiesKHR const & ) const = default; +#else + bool operator==( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queryResultStatusSupport == rhs.queryResultStatusSupport ); +# endif + } + + bool operator!=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyQueryResultStatusPropertiesKHR; + }; + + // wrapper struct for struct VkQueueFamilyVideoPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueueFamilyVideoPropertiesKHR.html + struct QueueFamilyVideoPropertiesKHR + { + using NativeType = VkQueueFamilyVideoPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyVideoPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoCodecOperations{ videoCodecOperations_ } + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyVideoPropertiesKHR( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyVideoPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyVideoPropertiesKHR & operator=( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueueFamilyVideoPropertiesKHR & operator=( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyVideoPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyVideoPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyVideoPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkQueueFamilyVideoPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoCodecOperations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyVideoPropertiesKHR const & ) const = default; +#else + bool operator==( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations ); +# endif + } + + bool operator!=( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyVideoPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyVideoPropertiesKHR; + }; + + // wrapper struct for struct VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV.html + struct RayTracingPipelineClusterAccelerationStructureCreateInfoNV + { + using NativeType = VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineClusterAccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 allowClusterAccelerationStructure_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , allowClusterAccelerationStructure{ allowClusterAccelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR RayTracingPipelineClusterAccelerationStructureCreateInfoNV( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineClusterAccelerationStructureCreateInfoNV( VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineClusterAccelerationStructureCreateInfoNV( + *reinterpret_cast( &rhs ) ) + { + } + + RayTracingPipelineClusterAccelerationStructureCreateInfoNV & + operator=( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RayTracingPipelineClusterAccelerationStructureCreateInfoNV & + operator=( VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineClusterAccelerationStructureCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineClusterAccelerationStructureCreateInfoNV & + setAllowClusterAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 allowClusterAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + allowClusterAccelerationStructure = allowClusterAccelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allowClusterAccelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowClusterAccelerationStructure == rhs.allowClusterAccelerationStructure ); +# endif + } + + bool operator!=( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowClusterAccelerationStructure = {}; + }; + + template <> + struct CppType + { + using Type = RayTracingPipelineClusterAccelerationStructureCreateInfoNV; + }; + + // wrapper struct for struct VkRayTracingShaderGroupCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingShaderGroupCreateInfoKHR.html + struct RayTracingShaderGroupCreateInfoKHR + { + using NativeType = VkRayTracingShaderGroupCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + uint32_t closestHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + const void * pShaderGroupCaptureReplayHandle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , generalShader{ generalShader_ } + , closestHitShader{ closestHitShader_ } + , anyHitShader{ anyHitShader_ } + , intersectionShader{ intersectionShader_ } + , pShaderGroupCaptureReplayHandle{ pShaderGroupCaptureReplayHandle_ } + { + } + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT + { + pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader, pShaderGroupCaptureReplayHandle ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) && + ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) && + ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); +# endif + } + + bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + uint32_t closestHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + uint32_t anyHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + uint32_t intersectionShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + const void * pShaderGroupCaptureReplayHandle = {}; + }; + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoKHR; + }; + + // wrapper struct for struct VkRayTracingPipelineInterfaceCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineInterfaceCreateInfoKHR.html + struct RayTracingPipelineInterfaceCreateInfoKHR + { + using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_ = {}, + uint32_t maxPipelineRayHitAttributeSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxPipelineRayPayloadSize{ maxPipelineRayPayloadSize_ } + , maxPipelineRayHitAttributeSize{ maxPipelineRayHitAttributeSize_ } + { + } + + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineInterfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & + setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPipelineRayPayloadSize, maxPipelineRayHitAttributeSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) && + ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize ); +# endif + } + + bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + const void * pNext = {}; + uint32_t maxPipelineRayPayloadSize = {}; + uint32_t maxPipelineRayHitAttributeSize = {}; + }; + + template <> + struct CppType + { + using Type = RayTracingPipelineInterfaceCreateInfoKHR; + }; + + // wrapper struct for struct VkRayTracingPipelineCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineCreateInfoKHR.html + struct RayTracingPipelineCreateInfoKHR + { + using NativeType = VkRayTracingPipelineCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , groupCount{ groupCount_ } + , pGroups{ pGroups_ } + , maxPipelineRayRecursionDepth{ maxPipelineRayRecursionDepth_ } + , pLibraryInfo{ pLibraryInfo_ } + , pLibraryInterface{ pLibraryInterface_ } + , pDynamicState{ pDynamicState_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) + , pLibraryInfo( pLibraryInfo_ ) + , pLibraryInterface( pLibraryInterface_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInfo = pLibraryInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInterface = pLibraryInterface_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + stageCount, + pStages, + groupCount, + pGroups, + maxPipelineRayRecursionDepth, + pLibraryInfo, + pLibraryInterface, + pDynamicState, + layout, + basePipelineHandle, + basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) && + ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) && ( pDynamicState == rhs.pDynamicState ) && + ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {}; + uint32_t maxPipelineRayRecursionDepth = {}; + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoKHR; + }; + + // wrapper struct for struct VkRayTracingShaderGroupCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingShaderGroupCreateInfoNV.html + struct RayTracingShaderGroupCreateInfoNV + { + using NativeType = VkRayTracingShaderGroupCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + uint32_t closestHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , generalShader{ generalShader_ } + , closestHitShader{ closestHitShader_ } + , anyHitShader{ anyHitShader_ } + , intersectionShader{ intersectionShader_ } + { + } + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) && + ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ); +# endif + } + + bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + uint32_t closestHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + uint32_t anyHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + uint32_t intersectionShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + }; + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoNV; + }; + + // wrapper struct for struct VkRayTracingPipelineCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRayTracingPipelineCreateInfoNV.html + struct RayTracingPipelineCreateInfoNV + { + using NativeType = VkRayTracingPipelineCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , groupCount{ groupCount_ } + , pGroups{ pGroups_ } + , maxRecursionDepth{ maxRecursionDepth_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxRecursionDepth, layout, basePipelineHandle, basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoNV; + }; + + // wrapper struct for struct VkRefreshCycleDurationGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRefreshCycleDurationGOOGLE.html + struct RefreshCycleDurationGOOGLE + { + using NativeType = VkRefreshCycleDurationGOOGLE; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT : refreshDuration{ refreshDuration_ } {} + + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : RefreshCycleDurationGOOGLE( *reinterpret_cast( &rhs ) ) + { + } + + RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRefreshCycleDurationGOOGLE const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRefreshCycleDurationGOOGLE *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( refreshDuration ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default; +#else + bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( refreshDuration == rhs.refreshDuration ); +# endif + } + + bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t refreshDuration = {}; + }; + + // wrapper struct for struct VkReleaseCapturedPipelineDataInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkReleaseCapturedPipelineDataInfoKHR.html + struct ReleaseCapturedPipelineDataInfoKHR + { + using NativeType = VkReleaseCapturedPipelineDataInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eReleaseCapturedPipelineDataInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ReleaseCapturedPipelineDataInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipeline{ pipeline_ } + { + } + + VULKAN_HPP_CONSTEXPR ReleaseCapturedPipelineDataInfoKHR( ReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ReleaseCapturedPipelineDataInfoKHR( VkReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ReleaseCapturedPipelineDataInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ReleaseCapturedPipelineDataInfoKHR & operator=( ReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ReleaseCapturedPipelineDataInfoKHR & operator=( VkReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ReleaseCapturedPipelineDataInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ReleaseCapturedPipelineDataInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkReleaseCapturedPipelineDataInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkReleaseCapturedPipelineDataInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkReleaseCapturedPipelineDataInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkReleaseCapturedPipelineDataInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ReleaseCapturedPipelineDataInfoKHR const & ) const = default; +#else + bool operator==( ReleaseCapturedPipelineDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( ReleaseCapturedPipelineDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eReleaseCapturedPipelineDataInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = ReleaseCapturedPipelineDataInfoKHR; + }; + + // wrapper struct for struct VkReleaseSwapchainImagesInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkReleaseSwapchainImagesInfoEXT.html + struct ReleaseSwapchainImagesInfoEXT + { + using NativeType = VkReleaseSwapchainImagesInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eReleaseSwapchainImagesInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint32_t imageIndexCount_ = {}, + const uint32_t * pImageIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchain{ swapchain_ } + , imageIndexCount{ imageIndexCount_ } + , pImageIndices{ pImageIndices_ } + { + } + + VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoEXT( ReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ReleaseSwapchainImagesInfoEXT( VkReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ReleaseSwapchainImagesInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ReleaseSwapchainImagesInfoEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchain( swapchain_ ), imageIndexCount( static_cast( imageIndices_.size() ) ), pImageIndices( imageIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ReleaseSwapchainImagesInfoEXT & operator=( ReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ReleaseSwapchainImagesInfoEXT & operator=( VkReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setImageIndexCount( uint32_t imageIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + imageIndexCount = imageIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT + { + pImageIndices = pImageIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ReleaseSwapchainImagesInfoEXT & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT + { + imageIndexCount = static_cast( imageIndices_.size() ); + pImageIndices = imageIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkReleaseSwapchainImagesInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkReleaseSwapchainImagesInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkReleaseSwapchainImagesInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkReleaseSwapchainImagesInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain, imageIndexCount, pImageIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ReleaseSwapchainImagesInfoEXT const & ) const = default; +#else + bool operator==( ReleaseSwapchainImagesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndexCount == rhs.imageIndexCount ) && + ( pImageIndices == rhs.pImageIndices ); +# endif + } + + bool operator!=( ReleaseSwapchainImagesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eReleaseSwapchainImagesInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint32_t imageIndexCount = {}; + const uint32_t * pImageIndices = {}; + }; + + template <> + struct CppType + { + using Type = ReleaseSwapchainImagesInfoEXT; + }; + + // wrapper struct for struct VkRenderPassAttachmentBeginInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassAttachmentBeginInfo.html + struct RenderPassAttachmentBeginInfo + { + using NativeType = VkRenderPassAttachmentBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassAttachmentBeginInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo & + setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassAttachmentBeginInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassAttachmentBeginInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentCount, pAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default; +#else + bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ); +# endif + } + + bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassAttachmentBeginInfo; + }; + + using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; + + // wrapper struct for struct VkRenderPassBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassBeginInfo.html + struct RenderPassBeginInfo + { + using NativeType = VkRenderPassBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t clearValueCount_ = {}, + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , renderPass{ renderPass_ } + , framebuffer{ framebuffer_ } + , renderArea{ renderArea_ } + , clearValueCount{ clearValueCount_ } + , pClearValues{ pClearValues_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( static_cast( clearValues_.size() ) ) + , pClearValues( clearValues_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT + { + clearValueCount = clearValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT + { + pClearValues = pClearValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo & + setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) VULKAN_HPP_NOEXCEPT + { + clearValueCount = static_cast( clearValues_.size() ); + pClearValues = clearValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassBeginInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassBeginInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassBeginInfo const & ) const = default; +#else + bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( framebuffer == rhs.framebuffer ) && + ( renderArea == rhs.renderArea ) && ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues ); +# endif + } + + bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t clearValueCount = {}; + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassBeginInfo; + }; + + // wrapper struct for struct VkSubpassDescription, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescription.html + struct SubpassDescription + { + using NativeType = VkSubpassDescription; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags{ flags_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , inputAttachmentCount{ inputAttachmentCount_ } + , pInputAttachments{ pInputAttachments_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachments{ pColorAttachments_ } + , pResolveAttachments{ pResolveAttachments_ } + , pDepthStencilAttachment{ pDepthStencilAttachment_ } + , preserveAttachmentCount{ preserveAttachmentCount_ } + , pPreserveAttachments{ pPreserveAttachments_ } + { + } + + VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & + setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubpassDescription *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( flags, + pipelineBindPoint, + inputAttachmentCount, + pInputAttachments, + colorAttachmentCount, + pColorAttachments, + pResolveAttachments, + pDepthStencilAttachment, + preserveAttachmentCount, + pPreserveAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription const & ) const = default; +#else + bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && + ( pInputAttachments == rhs.pInputAttachments ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachments == rhs.pColorAttachments ) && ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); +# endif + } + + bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + + // wrapper struct for struct VkSubpassDependency, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDependency.html + struct SubpassDependency + { + using NativeType = VkSubpassDependency; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass{ srcSubpass_ } + , dstSubpass{ dstSubpass_ } + , srcStageMask{ srcStageMask_ } + , dstStageMask{ dstStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , dependencyFlags{ dependencyFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency( *reinterpret_cast( &rhs ) ) {} + + SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubpassDependency *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency const & ) const = default; +#else + bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( dependencyFlags == rhs.dependencyFlags ); +# endif + } + + bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + }; + + // wrapper struct for struct VkRenderPassCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreateInfo.html + struct RenderPassCreateInfo + { + using NativeType = VkRenderPassCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , subpassCount{ subpassCount_ } + , pSubpasses{ pSubpasses_ } + , dependencyCount{ dependencyCount_ } + , pDependencies{ pDependencies_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & + setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & + setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo const & ) const = default; +#else + bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ); +# endif + } + + bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassCreateInfo; + }; + + // wrapper struct for struct VkSubpassDescription2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescription2.html + struct SubpassDescription2 + { + using NativeType = VkSubpassDescription2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t viewMask_ = {}, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , viewMask{ viewMask_ } + , inputAttachmentCount{ inputAttachmentCount_ } + , pInputAttachments{ pInputAttachments_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachments{ pColorAttachments_ } + , pResolveAttachments{ pResolveAttachments_ } + , pDepthStencilAttachment{ pDepthStencilAttachment_ } + , preserveAttachmentCount{ preserveAttachmentCount_ } + , pPreserveAttachments{ pPreserveAttachments_ } + { + } + + VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription2( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & + setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubpassDescription2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + pipelineBindPoint, + viewMask, + inputAttachmentCount, + pInputAttachments, + colorAttachmentCount, + pColorAttachments, + pResolveAttachments, + pDepthStencilAttachment, + preserveAttachmentCount, + pPreserveAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription2 const & ) const = default; +#else + bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( viewMask == rhs.viewMask ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && ( pPreserveAttachments == rhs.pPreserveAttachments ); +# endif + } + + bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t viewMask = {}; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + + template <> + struct CppType + { + using Type = SubpassDescription2; + }; + + using SubpassDescription2KHR = SubpassDescription2; + + // wrapper struct for struct VkSubpassDependency2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDependency2.html + struct SubpassDependency2 + { + using NativeType = VkSubpassDependency2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + int32_t viewOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSubpass{ srcSubpass_ } + , dstSubpass{ dstSubpass_ } + , srcStageMask{ srcStageMask_ } + , dstStageMask{ dstStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , dependencyFlags{ dependencyFlags_ } + , viewOffset{ viewOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency2( *reinterpret_cast( &rhs ) ) {} + + SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT + { + viewOffset = viewOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubpassDependency2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency2 const & ) const = default; +#else + bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && + ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && ( viewOffset == rhs.viewOffset ); +# endif + } + + bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; + const void * pNext = {}; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + int32_t viewOffset = {}; + }; + + template <> + struct CppType + { + using Type = SubpassDependency2; + }; + + using SubpassDependency2KHR = SubpassDependency2; + + // wrapper struct for struct VkRenderPassCreateInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreateInfo2.html + struct RenderPassCreateInfo2 + { + using NativeType = VkRenderPassCreateInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, + uint32_t correlatedViewMaskCount_ = {}, + const uint32_t * pCorrelatedViewMasks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , subpassCount{ subpassCount_ } + , pSubpasses{ pSubpasses_ } + , dependencyCount{ dependencyCount_ } + , pDependencies{ pDependencies_ } + , correlatedViewMaskCount{ correlatedViewMaskCount_ } + , pCorrelatedViewMasks{ pCorrelatedViewMasks_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo2( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + , correlatedViewMaskCount( static_cast( correlatedViewMasks_.size() ) ) + , pCorrelatedViewMasks( correlatedViewMasks_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & + setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & + setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = correlatedViewMaskCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelatedViewMasks = pCorrelatedViewMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & + setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = static_cast( correlatedViewMasks_.size() ); + pCorrelatedViewMasks = correlatedViewMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + attachmentCount, + pAttachments, + subpassCount, + pSubpasses, + dependencyCount, + pDependencies, + correlatedViewMaskCount, + pCorrelatedViewMasks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo2 const & ) const = default; +#else + bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) && + ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); +# endif + } + + bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {}; + uint32_t correlatedViewMaskCount = {}; + const uint32_t * pCorrelatedViewMasks = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassCreateInfo2; + }; + + using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; + + // wrapper struct for struct VkRenderPassCreationControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreationControlEXT.html + struct RenderPassCreationControlEXT + { + using NativeType = VkRenderPassCreationControlEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationControlEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , disallowMerging{ disallowMerging_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreationControlEXT( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreationControlEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassCreationControlEXT & operator=( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassCreationControlEXT & operator=( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setDisallowMerging( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ ) VULKAN_HPP_NOEXCEPT + { + disallowMerging = disallowMerging_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassCreationControlEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreationControlEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreationControlEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassCreationControlEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, disallowMerging ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreationControlEXT const & ) const = default; +#else + bool operator==( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disallowMerging == rhs.disallowMerging ); +# endif + } + + bool operator!=( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationControlEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 disallowMerging = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassCreationControlEXT; + }; + + // wrapper struct for struct VkRenderPassCreationFeedbackInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreationFeedbackInfoEXT.html + struct RenderPassCreationFeedbackInfoEXT + { + using NativeType = VkRenderPassCreationFeedbackInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( uint32_t postMergeSubpassCount_ = {} ) VULKAN_HPP_NOEXCEPT + : postMergeSubpassCount{ postMergeSubpassCount_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreationFeedbackInfoEXT( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreationFeedbackInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassCreationFeedbackInfoEXT & operator=( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassCreationFeedbackInfoEXT & operator=( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkRenderPassCreationFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreationFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreationFeedbackInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassCreationFeedbackInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( postMergeSubpassCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreationFeedbackInfoEXT const & ) const = default; +#else + bool operator==( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( postMergeSubpassCount == rhs.postMergeSubpassCount ); +# endif + } + + bool operator!=( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t postMergeSubpassCount = {}; + }; + + // wrapper struct for struct VkRenderPassCreationFeedbackCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassCreationFeedbackCreateInfoEXT.html + struct RenderPassCreationFeedbackCreateInfoEXT + { + using NativeType = VkRenderPassCreationFeedbackCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pRenderPassFeedback{ pRenderPassFeedback_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreationFeedbackCreateInfoEXT( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreationFeedbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassCreationFeedbackCreateInfoEXT & operator=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassCreationFeedbackCreateInfoEXT & operator=( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & + setPRenderPassFeedback( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ ) VULKAN_HPP_NOEXCEPT + { + pRenderPassFeedback = pRenderPassFeedback_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreationFeedbackCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassCreationFeedbackCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pRenderPassFeedback ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreationFeedbackCreateInfoEXT const & ) const = default; +#else + bool operator==( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pRenderPassFeedback == rhs.pRenderPassFeedback ); +# endif + } + + bool operator!=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassCreationFeedbackCreateInfoEXT; + }; + + // wrapper struct for struct VkRenderPassFragmentDensityMapCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassFragmentDensityMapCreateInfoEXT.html + struct RenderPassFragmentDensityMapCreateInfoEXT + { + using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityMapAttachment{ fragmentDensityMapAttachment_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassFragmentDensityMapCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & + setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapAttachment = fragmentDensityMapAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassFragmentDensityMapCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassFragmentDensityMapCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default; +#else + bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); +# endif + } + + bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassFragmentDensityMapCreateInfoEXT; + }; + + // wrapper struct for struct VkRenderPassFragmentDensityMapOffsetEndInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassFragmentDensityMapOffsetEndInfoEXT.html + struct RenderPassFragmentDensityMapOffsetEndInfoEXT + { + using NativeType = VkRenderPassFragmentDensityMapOffsetEndInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapOffsetEndInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapOffsetEndInfoEXT( uint32_t fragmentDensityOffsetCount_ = {}, + const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityOffsetCount{ fragmentDensityOffsetCount_ } + , pFragmentDensityOffsets{ pFragmentDensityOffsets_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapOffsetEndInfoEXT( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassFragmentDensityMapOffsetEndInfoEXT( VkRenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassFragmentDensityMapOffsetEndInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassFragmentDensityMapOffsetEndInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fragmentDensityOffsets_, const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , fragmentDensityOffsetCount( static_cast( fragmentDensityOffsets_.size() ) ) + , pFragmentDensityOffsets( fragmentDensityOffsets_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassFragmentDensityMapOffsetEndInfoEXT & operator=( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassFragmentDensityMapOffsetEndInfoEXT & operator=( VkRenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapOffsetEndInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapOffsetEndInfoEXT & + setFragmentDensityOffsetCount( uint32_t fragmentDensityOffsetCount_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityOffsetCount = fragmentDensityOffsetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapOffsetEndInfoEXT & + setPFragmentDensityOffsets( const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pFragmentDensityOffsets = pFragmentDensityOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassFragmentDensityMapOffsetEndInfoEXT & setFragmentDensityOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityOffsetCount = static_cast( fragmentDensityOffsets_.size() ); + pFragmentDensityOffsets = fragmentDensityOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassFragmentDensityMapOffsetEndInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityOffsetCount, pFragmentDensityOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassFragmentDensityMapOffsetEndInfoEXT const & ) const = default; +#else + bool operator==( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount ) && + ( pFragmentDensityOffsets == rhs.pFragmentDensityOffsets ); +# endif + } + + bool operator!=( RenderPassFragmentDensityMapOffsetEndInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapOffsetEndInfoEXT; + const void * pNext = {}; + uint32_t fragmentDensityOffsetCount = {}; + const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassFragmentDensityMapOffsetEndInfoEXT; + }; + + using SubpassFragmentDensityMapOffsetEndInfoQCOM = RenderPassFragmentDensityMapOffsetEndInfoEXT; + + // wrapper struct for struct VkRenderPassInputAttachmentAspectCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassInputAttachmentAspectCreateInfo.html + struct RenderPassInputAttachmentAspectCreateInfo + { + using NativeType = VkRenderPassInputAttachmentAspectCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {}, + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , aspectReferenceCount{ aspectReferenceCount_ } + , pAspectReferences{ pAspectReferences_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassInputAttachmentAspectCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & aspectReferences_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), aspectReferenceCount( static_cast( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT + { + aspectReferenceCount = aspectReferenceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & + setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + pAspectReferences = pAspectReferences_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + aspectReferenceCount = static_cast( aspectReferences_.size() ); + pAspectReferences = aspectReferences_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassInputAttachmentAspectCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassInputAttachmentAspectCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, aspectReferenceCount, pAspectReferences ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default; +#else + bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) && + ( pAspectReferences == rhs.pAspectReferences ); +# endif + } + + bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + const void * pNext = {}; + uint32_t aspectReferenceCount = {}; + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassInputAttachmentAspectCreateInfo; + }; + + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + + // wrapper struct for struct VkRenderPassMultiviewCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassMultiviewCreateInfo.html + struct RenderPassMultiviewCreateInfo + { + using NativeType = VkRenderPassMultiviewCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, + const uint32_t * pViewMasks_ = {}, + uint32_t dependencyCount_ = {}, + const int32_t * pViewOffsets_ = {}, + uint32_t correlationMaskCount_ = {}, + const uint32_t * pCorrelationMasks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subpassCount{ subpassCount_ } + , pViewMasks{ pViewMasks_ } + , dependencyCount{ dependencyCount_ } + , pViewOffsets{ pViewOffsets_ } + , correlationMaskCount{ correlationMaskCount_ } + , pCorrelationMasks{ pCorrelationMasks_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassMultiviewCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , subpassCount( static_cast( viewMasks_.size() ) ) + , pViewMasks( viewMasks_.data() ) + , dependencyCount( static_cast( viewOffsets_.size() ) ) + , pViewOffsets( viewOffsets_.data() ) + , correlationMaskCount( static_cast( correlationMasks_.size() ) ) + , pCorrelationMasks( correlationMasks_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pViewMasks = pViewMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( viewMasks_.size() ); + pViewMasks = viewMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pViewOffsets = pViewOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( viewOffsets_.size() ); + pViewOffsets = viewOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = correlationMaskCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelationMasks = pCorrelationMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & + setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = static_cast( correlationMasks_.size() ); + pCorrelationMasks = correlationMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassMultiviewCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassMultiviewCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default; +#else + bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && ( pViewMasks == rhs.pViewMasks ) && + ( dependencyCount == rhs.dependencyCount ) && ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) && + ( pCorrelationMasks == rhs.pCorrelationMasks ); +# endif + } + + bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; + const void * pNext = {}; + uint32_t subpassCount = {}; + const uint32_t * pViewMasks = {}; + uint32_t dependencyCount = {}; + const int32_t * pViewOffsets = {}; + uint32_t correlationMaskCount = {}; + const uint32_t * pCorrelationMasks = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassMultiviewCreateInfo; + }; + + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + + // wrapper struct for struct VkSubpassSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassSampleLocationsEXT.html + struct SubpassSampleLocationsEXT + { + using NativeType = VkSubpassSampleLocationsEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassIndex{ subpassIndex_ } + , sampleLocationsInfo{ sampleLocationsInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + { + } + + SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT + { + subpassIndex = subpassIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & + setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassSampleLocationsEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubpassSampleLocationsEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subpassIndex, sampleLocationsInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassSampleLocationsEXT const & ) const = default; +#else + bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); +# endif + } + + bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpassIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + + // wrapper struct for struct VkRenderPassSampleLocationsBeginInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassSampleLocationsBeginInfoEXT.html + struct RenderPassSampleLocationsBeginInfoEXT + { + using NativeType = VkRenderPassSampleLocationsBeginInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, + uint32_t postSubpassSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentInitialSampleLocationsCount{ attachmentInitialSampleLocationsCount_ } + , pAttachmentInitialSampleLocations{ pAttachmentInitialSampleLocations_ } + , postSubpassSampleLocationsCount{ postSubpassSampleLocationsCount_ } + , pPostSubpassSampleLocations{ pPostSubpassSampleLocations_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentInitialSampleLocations_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & postSubpassSampleLocations_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , attachmentInitialSampleLocationsCount( static_cast( attachmentInitialSampleLocations_.size() ) ) + , pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ) + , postSubpassSampleLocationsCount( static_cast( postSubpassSampleLocations_.size() ) ) + , pPostSubpassSampleLocations( postSubpassSampleLocations_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentInitialSampleLocations_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = static_cast( attachmentInitialSampleLocations_.size() ); + pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pPostSubpassSampleLocations = pPostSubpassSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & postSubpassSampleLocations_ ) + VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = static_cast( postSubpassSampleLocations_.size() ); + pPostSubpassSampleLocations = postSubpassSampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSampleLocationsBeginInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassSampleLocationsBeginInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default; +#else + bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) && + ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) && + ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); +# endif + } + + bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + const void * pNext = {}; + uint32_t attachmentInitialSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {}; + uint32_t postSubpassSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassSampleLocationsBeginInfoEXT; + }; + + // wrapper struct for struct VkRenderPassStripeInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassStripeInfoARM.html + struct RenderPassStripeInfoARM + { + using NativeType = VkRenderPassStripeInfoARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeInfoARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM( VULKAN_HPP_NAMESPACE::Rect2D stripeArea_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stripeArea{ stripeArea_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM( RenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassStripeInfoARM( VkRenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassStripeInfoARM( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassStripeInfoARM & operator=( RenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassStripeInfoARM & operator=( VkRenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeInfoARM & setStripeArea( VULKAN_HPP_NAMESPACE::Rect2D const & stripeArea_ ) VULKAN_HPP_NOEXCEPT + { + stripeArea = stripeArea_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassStripeInfoARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassStripeInfoARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassStripeInfoARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassStripeInfoARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stripeArea ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassStripeInfoARM const & ) const = default; +#else + bool operator==( RenderPassStripeInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeArea == rhs.stripeArea ); +# endif + } + + bool operator!=( RenderPassStripeInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeInfoARM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D stripeArea = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassStripeInfoARM; + }; + + // wrapper struct for struct VkRenderPassStripeBeginInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassStripeBeginInfoARM.html + struct RenderPassStripeBeginInfoARM + { + using NativeType = VkRenderPassStripeBeginInfoARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeBeginInfoARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM( uint32_t stripeInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stripeInfoCount{ stripeInfoCount_ } + , pStripeInfos{ pStripeInfos_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM( RenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassStripeBeginInfoARM( VkRenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassStripeBeginInfoARM( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassStripeBeginInfoARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeInfos_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), stripeInfoCount( static_cast( stripeInfos_.size() ) ), pStripeInfos( stripeInfos_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassStripeBeginInfoARM & operator=( RenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassStripeBeginInfoARM & operator=( VkRenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setStripeInfoCount( uint32_t stripeInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + stripeInfoCount = stripeInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & + setPStripeInfos( const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos_ ) VULKAN_HPP_NOEXCEPT + { + pStripeInfos = pStripeInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassStripeBeginInfoARM & setStripeInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeInfos_ ) VULKAN_HPP_NOEXCEPT + { + stripeInfoCount = static_cast( stripeInfos_.size() ); + pStripeInfos = stripeInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassStripeBeginInfoARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassStripeBeginInfoARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassStripeBeginInfoARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassStripeBeginInfoARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stripeInfoCount, pStripeInfos ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassStripeBeginInfoARM const & ) const = default; +#else + bool operator==( RenderPassStripeBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeInfoCount == rhs.stripeInfoCount ) && ( pStripeInfos == rhs.pStripeInfos ); +# endif + } + + bool operator!=( RenderPassStripeBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeBeginInfoARM; + const void * pNext = {}; + uint32_t stripeInfoCount = {}; + const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassStripeBeginInfoARM; + }; + + // wrapper struct for struct VkSemaphoreSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreSubmitInfo.html + struct SemaphoreSubmitInfo + { + using NativeType = VkSemaphoreSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + uint64_t value_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {}, + uint32_t deviceIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , value{ value_ } + , stageMask{ stageMask_ } + , deviceIndex{ deviceIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreSubmitInfo & operator=( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT + { + stageMask = stageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndex = deviceIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreSubmitInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSemaphoreSubmitInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, value, stageMask, deviceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSubmitInfo const & ) const = default; +#else + bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ) && ( stageMask == rhs.stageMask ) && + ( deviceIndex == rhs.deviceIndex ); +# endif + } + + bool operator!=( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {}; + uint32_t deviceIndex = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreSubmitInfo; + }; + + using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo; + + // wrapper struct for struct VkRenderPassStripeSubmitInfoARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassStripeSubmitInfoARM.html + struct RenderPassStripeSubmitInfoARM + { + using NativeType = VkRenderPassStripeSubmitInfoARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeSubmitInfoARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM( uint32_t stripeSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stripeSemaphoreInfoCount{ stripeSemaphoreInfoCount_ } + , pStripeSemaphoreInfos{ pStripeSemaphoreInfos_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM( RenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassStripeSubmitInfoARM( VkRenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassStripeSubmitInfoARM( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassStripeSubmitInfoARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeSemaphoreInfos_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stripeSemaphoreInfoCount( static_cast( stripeSemaphoreInfos_.size() ) ) + , pStripeSemaphoreInfos( stripeSemaphoreInfos_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassStripeSubmitInfoARM & operator=( RenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassStripeSubmitInfoARM & operator=( VkRenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setStripeSemaphoreInfoCount( uint32_t stripeSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + stripeSemaphoreInfoCount = stripeSemaphoreInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & + setPStripeSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + pStripeSemaphoreInfos = pStripeSemaphoreInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassStripeSubmitInfoARM & setStripeSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + stripeSemaphoreInfoCount = static_cast( stripeSemaphoreInfos_.size() ); + pStripeSemaphoreInfos = stripeSemaphoreInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassStripeSubmitInfoARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassStripeSubmitInfoARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassStripeSubmitInfoARM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassStripeSubmitInfoARM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stripeSemaphoreInfoCount, pStripeSemaphoreInfos ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassStripeSubmitInfoARM const & ) const = default; +#else + bool operator==( RenderPassStripeSubmitInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeSemaphoreInfoCount == rhs.stripeSemaphoreInfoCount ) && + ( pStripeSemaphoreInfos == rhs.pStripeSemaphoreInfos ); +# endif + } + + bool operator!=( RenderPassStripeSubmitInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeSubmitInfoARM; + const void * pNext = {}; + uint32_t stripeSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassStripeSubmitInfoARM; + }; + + // wrapper struct for struct VkRenderPassSubpassFeedbackInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassSubpassFeedbackInfoEXT.html + struct RenderPassSubpassFeedbackInfoEXT + { + using NativeType = VkRenderPassSubpassFeedbackInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + RenderPassSubpassFeedbackInfoEXT( VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus_ = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged, + std::array const & description_ = {}, + uint32_t postMergeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassMergeStatus{ subpassMergeStatus_ } + , description{ description_ } + , postMergeIndex{ postMergeIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassSubpassFeedbackInfoEXT( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSubpassFeedbackInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassSubpassFeedbackInfoEXT & operator=( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassSubpassFeedbackInfoEXT & operator=( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkRenderPassSubpassFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSubpassFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSubpassFeedbackInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassSubpassFeedbackInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple const &, uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subpassMergeStatus, description, postMergeIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = subpassMergeStatus <=> rhs.subpassMergeStatus; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = postMergeIndex <=> rhs.postMergeIndex; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( subpassMergeStatus == rhs.subpassMergeStatus ) && ( strcmp( description, rhs.description ) == 0 ) && ( postMergeIndex == rhs.postMergeIndex ); + } + + bool operator!=( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t postMergeIndex = {}; + }; + + // wrapper struct for struct VkRenderPassSubpassFeedbackCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassSubpassFeedbackCreateInfoEXT.html + struct RenderPassSubpassFeedbackCreateInfoEXT + { + using NativeType = VkRenderPassSubpassFeedbackCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pSubpassFeedback{ pSubpassFeedback_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassSubpassFeedbackCreateInfoEXT( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSubpassFeedbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassSubpassFeedbackCreateInfoEXT & operator=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassSubpassFeedbackCreateInfoEXT & operator=( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & + setPSubpassFeedback( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ ) VULKAN_HPP_NOEXCEPT + { + pSubpassFeedback = pSubpassFeedback_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassSubpassFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSubpassFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSubpassFeedbackCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassSubpassFeedbackCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pSubpassFeedback ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassSubpassFeedbackCreateInfoEXT const & ) const = default; +#else + bool operator==( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSubpassFeedback == rhs.pSubpassFeedback ); +# endif + } + + bool operator!=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassSubpassFeedbackCreateInfoEXT; + }; + + // wrapper struct for struct VkRenderPassTileShadingCreateInfoQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassTileShadingCreateInfoQCOM.html + struct RenderPassTileShadingCreateInfoQCOM + { + using NativeType = VkRenderPassTileShadingCreateInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTileShadingCreateInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassTileShadingCreateInfoQCOM( VULKAN_HPP_NAMESPACE::TileShadingRenderPassFlagsQCOM flags_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D tileApronSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , tileApronSize{ tileApronSize_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassTileShadingCreateInfoQCOM( RenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassTileShadingCreateInfoQCOM( VkRenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassTileShadingCreateInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassTileShadingCreateInfoQCOM & operator=( RenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassTileShadingCreateInfoQCOM & operator=( VkRenderPassTileShadingCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassTileShadingCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassTileShadingCreateInfoQCOM & setFlags( VULKAN_HPP_NAMESPACE::TileShadingRenderPassFlagsQCOM flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassTileShadingCreateInfoQCOM & setTileApronSize( VULKAN_HPP_NAMESPACE::Extent2D const & tileApronSize_ ) VULKAN_HPP_NOEXCEPT + { + tileApronSize = tileApronSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassTileShadingCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassTileShadingCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassTileShadingCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassTileShadingCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, tileApronSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassTileShadingCreateInfoQCOM const & ) const = default; +#else + bool operator==( RenderPassTileShadingCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( tileApronSize == rhs.tileApronSize ); +# endif + } + + bool operator!=( RenderPassTileShadingCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTileShadingCreateInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TileShadingRenderPassFlagsQCOM flags = {}; + VULKAN_HPP_NAMESPACE::Extent2D tileApronSize = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassTileShadingCreateInfoQCOM; + }; + + // wrapper struct for struct VkRenderPassTransformBeginInfoQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderPassTransformBeginInfoQCOM.html + struct RenderPassTransformBeginInfoQCOM + { + using NativeType = VkRenderPassTransformBeginInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , transform{ transform_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassTransformBeginInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassTransformBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderPassTransformBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transform ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default; +#else + bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); +# endif + } + + bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + }; + + template <> + struct CppType + { + using Type = RenderPassTransformBeginInfoQCOM; + }; + + // wrapper struct for struct VkRenderingAreaInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingAreaInfo.html + struct RenderingAreaInfo + { + using NativeType = VkRenderingAreaInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAreaInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingAreaInfo( uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentFormats{ pColorAttachmentFormats_ } + , depthAttachmentFormat{ depthAttachmentFormat_ } + , stencilAttachmentFormat{ stencilAttachmentFormat_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderingAreaInfo( RenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingAreaInfo( VkRenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingAreaInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingAreaInfo( uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderingAreaInfo & operator=( RenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderingAreaInfo & operator=( VkRenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentFormats = pColorAttachmentFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingAreaInfo & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingAreaInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAreaInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAreaInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderingAreaInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingAreaInfo const & ) const = default; +#else + bool operator==( RenderingAreaInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && + ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ); +# endif + } + + bool operator!=( RenderingAreaInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAreaInfo; + const void * pNext = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = RenderingAreaInfo; + }; + + using RenderingAreaInfoKHR = RenderingAreaInfo; + + // wrapper struct for struct VkRenderingAttachmentInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingAttachmentInfo.html + struct RenderingAttachmentInfo + { + using NativeType = VkRenderingAttachmentInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } + , resolveMode{ resolveMode_ } + , resolveImageView{ resolveImageView_ } + , resolveImageLayout{ resolveImageLayout_ } + , loadOp{ loadOp_ } + , storeOp{ storeOp_ } + , clearValue{ clearValue_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingAttachmentInfo( *reinterpret_cast( &rhs ) ) + { + } + + RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderingAttachmentInfo & operator=( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT + { + resolveMode = resolveMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT + { + resolveImageView = resolveImageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + resolveImageLayout = resolveImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT + { + clearValue = clearValue_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAttachmentInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderingAttachmentInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + VULKAN_HPP_NAMESPACE::ImageView resolveImageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + }; + + template <> + struct CppType + { + using Type = RenderingAttachmentInfo; + }; + + using RenderingAttachmentInfoKHR = RenderingAttachmentInfo; + + // wrapper struct for struct VkRenderingAttachmentLocationInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingAttachmentLocationInfo.html + struct RenderingAttachmentLocationInfo + { + using NativeType = VkRenderingAttachmentLocationInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentLocationInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfo( uint32_t colorAttachmentCount_ = {}, + const uint32_t * pColorAttachmentLocations_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentLocations{ pColorAttachmentLocations_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfo( RenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingAttachmentLocationInfo( VkRenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingAttachmentLocationInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingAttachmentLocationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentLocations_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , colorAttachmentCount( static_cast( colorAttachmentLocations_.size() ) ) + , pColorAttachmentLocations( colorAttachmentLocations_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderingAttachmentLocationInfo & operator=( RenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderingAttachmentLocationInfo & operator=( VkRenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setPColorAttachmentLocations( const uint32_t * pColorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentLocations = pColorAttachmentLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingAttachmentLocationInfo & + setColorAttachmentLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentLocations_.size() ); + pColorAttachmentLocations = colorAttachmentLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingAttachmentLocationInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAttachmentLocationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAttachmentLocationInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderingAttachmentLocationInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingAttachmentLocationInfo const & ) const = default; +#else + bool operator==( RenderingAttachmentLocationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentLocations == rhs.pColorAttachmentLocations ); +# endif + } + + bool operator!=( RenderingAttachmentLocationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentLocationInfo; + const void * pNext = {}; + uint32_t colorAttachmentCount = {}; + const uint32_t * pColorAttachmentLocations = {}; + }; + + template <> + struct CppType + { + using Type = RenderingAttachmentLocationInfo; + }; + + using RenderingAttachmentLocationInfoKHR = RenderingAttachmentLocationInfo; + + // wrapper struct for struct VkRenderingEndInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingEndInfoEXT.html + struct RenderingEndInfoEXT + { + using NativeType = VkRenderingEndInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingEndInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingEndInfoEXT( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} + + VULKAN_HPP_CONSTEXPR RenderingEndInfoEXT( RenderingEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingEndInfoEXT( VkRenderingEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingEndInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderingEndInfoEXT & operator=( RenderingEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderingEndInfoEXT & operator=( VkRenderingEndInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingEndInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingEndInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingEndInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingEndInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderingEndInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingEndInfoEXT const & ) const = default; +#else + bool operator==( RenderingEndInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( RenderingEndInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingEndInfoEXT; + const void * pNext = {}; + }; + + template <> + struct CppType + { + using Type = RenderingEndInfoEXT; + }; + + // wrapper struct for struct VkRenderingFragmentDensityMapAttachmentInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingFragmentDensityMapAttachmentInfoEXT.html + struct RenderingFragmentDensityMapAttachmentInfoEXT + { + using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderingFragmentDensityMapAttachmentInfoEXT( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingFragmentDensityMapAttachmentInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderingFragmentDensityMapAttachmentInfoEXT & operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderingFragmentDensityMapAttachmentInfoEXT & operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingFragmentDensityMapAttachmentInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderingFragmentDensityMapAttachmentInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, imageLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingFragmentDensityMapAttachmentInfoEXT const & ) const = default; +#else + bool operator==( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); +# endif + } + + bool operator!=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + template <> + struct CppType + { + using Type = RenderingFragmentDensityMapAttachmentInfoEXT; + }; + + // wrapper struct for struct VkRenderingFragmentShadingRateAttachmentInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingFragmentShadingRateAttachmentInfoKHR.html + struct RenderingFragmentShadingRateAttachmentInfoKHR + { + using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderingFragmentShadingRateAttachmentInfoKHR( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } + , shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ } + { + } + + VULKAN_HPP_CONSTEXPR + RenderingFragmentShadingRateAttachmentInfoKHR( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingFragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + RenderingFragmentShadingRateAttachmentInfoKHR & operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderingFragmentShadingRateAttachmentInfoKHR & operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & + setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingFragmentShadingRateAttachmentInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderingFragmentShadingRateAttachmentInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, imageLayout, shadingRateAttachmentTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingFragmentShadingRateAttachmentInfoKHR const & ) const = default; +#else + bool operator==( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ) && + ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); +# endif + } + + bool operator!=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; + }; + + template <> + struct CppType + { + using Type = RenderingFragmentShadingRateAttachmentInfoKHR; + }; + + // wrapper struct for struct VkRenderingInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingInfo.html + struct RenderingInfo + { + using NativeType = VkRenderingInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , renderArea{ renderArea_ } + , layerCount{ layerCount_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachments{ pColorAttachments_ } + , pDepthAttachment{ pDepthAttachment_ } + , pStencilAttachment{ pStencilAttachment_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_, + uint32_t layerCount_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , renderArea( renderArea_ ) + , layerCount( layerCount_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pDepthAttachment( pDepthAttachment_ ) + , pStencilAttachment( pStencilAttachment_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderingInfo & operator=( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderingInfo & operator=( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInfo & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthAttachment = pDepthAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & + setPStencilAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pStencilAttachment = pStencilAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderingInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingInfo const & ) const = default; +#else + bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderArea == rhs.renderArea ) && + ( layerCount == rhs.layerCount ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachments == rhs.pColorAttachments ) && ( pDepthAttachment == rhs.pDepthAttachment ) && ( pStencilAttachment == rhs.pStencilAttachment ); +# endif + } + + bool operator!=( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t layerCount = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment = {}; + }; + + template <> + struct CppType + { + using Type = RenderingInfo; + }; + + using RenderingInfoKHR = RenderingInfo; + + // wrapper struct for struct VkRenderingInputAttachmentIndexInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkRenderingInputAttachmentIndexInfo.html + struct RenderingInputAttachmentIndexInfo + { + using NativeType = VkRenderingInputAttachmentIndexInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInputAttachmentIndexInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfo( uint32_t colorAttachmentCount_ = {}, + const uint32_t * pColorAttachmentInputIndices_ = {}, + const uint32_t * pDepthInputAttachmentIndex_ = {}, + const uint32_t * pStencilInputAttachmentIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentInputIndices{ pColorAttachmentInputIndices_ } + , pDepthInputAttachmentIndex{ pDepthInputAttachmentIndex_ } + , pStencilInputAttachmentIndex{ pStencilInputAttachmentIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfo( RenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingInputAttachmentIndexInfo( VkRenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingInputAttachmentIndexInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInputAttachmentIndexInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentInputIndices_, + const uint32_t * pDepthInputAttachmentIndex_ = {}, + const uint32_t * pStencilInputAttachmentIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , colorAttachmentCount( static_cast( colorAttachmentInputIndices_.size() ) ) + , pColorAttachmentInputIndices( colorAttachmentInputIndices_.data() ) + , pDepthInputAttachmentIndex( pDepthInputAttachmentIndex_ ) + , pStencilInputAttachmentIndex( pStencilInputAttachmentIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderingInputAttachmentIndexInfo & operator=( RenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderingInputAttachmentIndexInfo & operator=( VkRenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & + setPColorAttachmentInputIndices( const uint32_t * pColorAttachmentInputIndices_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentInputIndices = pColorAttachmentInputIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInputAttachmentIndexInfo & + setColorAttachmentInputIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentInputIndices_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentInputIndices_.size() ); + pColorAttachmentInputIndices = colorAttachmentInputIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & + setPDepthInputAttachmentIndex( const uint32_t * pDepthInputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + pDepthInputAttachmentIndex = pDepthInputAttachmentIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & + setPStencilInputAttachmentIndex( const uint32_t * pStencilInputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + pStencilInputAttachmentIndex = pStencilInputAttachmentIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingInputAttachmentIndexInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingInputAttachmentIndexInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingInputAttachmentIndexInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkRenderingInputAttachmentIndexInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentInputIndices, pDepthInputAttachmentIndex, pStencilInputAttachmentIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingInputAttachmentIndexInfo const & ) const = default; +#else + bool operator==( RenderingInputAttachmentIndexInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentInputIndices == rhs.pColorAttachmentInputIndices ) && ( pDepthInputAttachmentIndex == rhs.pDepthInputAttachmentIndex ) && + ( pStencilInputAttachmentIndex == rhs.pStencilInputAttachmentIndex ); +# endif + } + + bool operator!=( RenderingInputAttachmentIndexInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInputAttachmentIndexInfo; + const void * pNext = {}; + uint32_t colorAttachmentCount = {}; + const uint32_t * pColorAttachmentInputIndices = {}; + const uint32_t * pDepthInputAttachmentIndex = {}; + const uint32_t * pStencilInputAttachmentIndex = {}; + }; + + template <> + struct CppType + { + using Type = RenderingInputAttachmentIndexInfo; + }; + + using RenderingInputAttachmentIndexInfoKHR = RenderingInputAttachmentIndexInfo; + + // wrapper struct for struct VkResolveImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkResolveImageInfo2.html + struct ResolveImageInfo2 + { + using NativeType = VkResolveImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : ResolveImageInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ResolveImageInfo2 & operator=( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ResolveImageInfo2 & operator=( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2 & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkResolveImageInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkResolveImageInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ResolveImageInfo2 const & ) const = default; +#else + bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = ResolveImageInfo2; + }; + + using ResolveImageInfo2KHR = ResolveImageInfo2; + + // wrapper struct for struct VkSamplerBlockMatchWindowCreateInfoQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerBlockMatchWindowCreateInfoQCOM.html + struct SamplerBlockMatchWindowCreateInfoQCOM + { + using NativeType = VkSamplerBlockMatchWindowCreateInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBlockMatchWindowCreateInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerBlockMatchWindowCreateInfoQCOM( + VULKAN_HPP_NAMESPACE::Extent2D windowExtent_ = {}, + VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode_ = VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM::eMin, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , windowExtent{ windowExtent_ } + , windowCompareMode{ windowCompareMode_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerBlockMatchWindowCreateInfoQCOM( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerBlockMatchWindowCreateInfoQCOM( VkSamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerBlockMatchWindowCreateInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + SamplerBlockMatchWindowCreateInfoQCOM & operator=( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerBlockMatchWindowCreateInfoQCOM & operator=( VkSamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setWindowExtent( VULKAN_HPP_NAMESPACE::Extent2D const & windowExtent_ ) VULKAN_HPP_NOEXCEPT + { + windowExtent = windowExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & + setWindowCompareMode( VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode_ ) VULKAN_HPP_NOEXCEPT + { + windowCompareMode = windowCompareMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerBlockMatchWindowCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerBlockMatchWindowCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerBlockMatchWindowCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerBlockMatchWindowCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, windowExtent, windowCompareMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerBlockMatchWindowCreateInfoQCOM const & ) const = default; +#else + bool operator==( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( windowExtent == rhs.windowExtent ) && ( windowCompareMode == rhs.windowCompareMode ); +# endif + } + + bool operator!=( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBlockMatchWindowCreateInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D windowExtent = {}; + VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode = VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM::eMin; + }; + + template <> + struct CppType + { + using Type = SamplerBlockMatchWindowCreateInfoQCOM; + }; + + // wrapper struct for struct VkSamplerBorderColorComponentMappingCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerBorderColorComponentMappingCreateInfoEXT.html + struct SamplerBorderColorComponentMappingCreateInfoEXT + { + using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT( VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , components{ components_ } + , srgb{ srgb_ } + { + } + + VULKAN_HPP_CONSTEXPR + SamplerBorderColorComponentMappingCreateInfoEXT( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerBorderColorComponentMappingCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SamplerBorderColorComponentMappingCreateInfoEXT & operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerBorderColorComponentMappingCreateInfoEXT & operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setSrgb( VULKAN_HPP_NAMESPACE::Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT + { + srgb = srgb_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerBorderColorComponentMappingCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerBorderColorComponentMappingCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, components, srgb ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerBorderColorComponentMappingCreateInfoEXT const & ) const = default; +#else + bool operator==( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( components == rhs.components ) && ( srgb == rhs.srgb ); +# endif + } + + bool operator!=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::Bool32 srgb = {}; + }; + + template <> + struct CppType + { + using Type = SamplerBorderColorComponentMappingCreateInfoEXT; + }; + + // wrapper struct for struct VkSamplerCaptureDescriptorDataInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCaptureDescriptorDataInfoEXT.html + struct SamplerCaptureDescriptorDataInfoEXT + { + using NativeType = VkSamplerCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampler{ sampler_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCaptureDescriptorDataInfoEXT( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SamplerCaptureDescriptorDataInfoEXT & operator=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerCaptureDescriptorDataInfoEXT & operator=( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampler ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampler == rhs.sampler ); +# endif + } + + bool operator!=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + }; + + template <> + struct CppType + { + using Type = SamplerCaptureDescriptorDataInfoEXT; + }; + + // wrapper struct for struct VkSamplerCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCreateInfo.html + struct SamplerCreateInfo + { + using NativeType = VkSamplerCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + float mipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, + float maxAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + float minLod_ = {}, + float maxLod_ = {}, + VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , magFilter{ magFilter_ } + , minFilter{ minFilter_ } + , mipmapMode{ mipmapMode_ } + , addressModeU{ addressModeU_ } + , addressModeV{ addressModeV_ } + , addressModeW{ addressModeW_ } + , mipLodBias{ mipLodBias_ } + , anisotropyEnable{ anisotropyEnable_ } + , maxAnisotropy{ maxAnisotropy_ } + , compareEnable{ compareEnable_ } + , compareOp{ compareOp_ } + , minLod{ minLod_ } + , maxLod{ maxLod_ } + , borderColor{ borderColor_ } + , unnormalizedCoordinates{ unnormalizedCoordinates_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo( *reinterpret_cast( &rhs ) ) {} + + SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT + { + magFilter = magFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT + { + minFilter = minFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT + { + mipmapMode = mipmapMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT + { + addressModeU = addressModeU_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT + { + addressModeV = addressModeV_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT + { + addressModeW = addressModeW_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + mipLodBias = mipLodBias_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT + { + anisotropyEnable = anisotropyEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + maxAnisotropy = maxAnisotropy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT + { + compareEnable = compareEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT + { + maxLod = maxLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT + { + borderColor = borderColor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + magFilter, + minFilter, + mipmapMode, + addressModeU, + addressModeV, + addressModeW, + mipLodBias, + anisotropyEnable, + maxAnisotropy, + compareEnable, + compareOp, + minLod, + maxLod, + borderColor, + unnormalizedCoordinates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCreateInfo const & ) const = default; +#else + bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && + ( mipmapMode == rhs.mipmapMode ) && ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && + ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && ( anisotropyEnable == rhs.anisotropyEnable ) && + ( maxAnisotropy == rhs.maxAnisotropy ) && ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && + ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); +# endif + } + + bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + float mipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; + float maxAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + float minLod = {}; + float maxLod = {}; + VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; + }; + + template <> + struct CppType + { + using Type = SamplerCreateInfo; + }; + + // wrapper struct for struct VkSamplerCubicWeightsCreateInfoQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCubicWeightsCreateInfoQCOM.html + struct SamplerCubicWeightsCreateInfoQCOM + { + using NativeType = VkSamplerCubicWeightsCreateInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCubicWeightsCreateInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SamplerCubicWeightsCreateInfoQCOM( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cubicWeights{ cubicWeights_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerCubicWeightsCreateInfoQCOM( SamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCubicWeightsCreateInfoQCOM( VkSamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCubicWeightsCreateInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + SamplerCubicWeightsCreateInfoQCOM & operator=( SamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerCubicWeightsCreateInfoQCOM & operator=( VkSamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCubicWeightsCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCubicWeightsCreateInfoQCOM & + setCubicWeights( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ ) VULKAN_HPP_NOEXCEPT + { + cubicWeights = cubicWeights_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerCubicWeightsCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCubicWeightsCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCubicWeightsCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerCubicWeightsCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cubicWeights ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCubicWeightsCreateInfoQCOM const & ) const = default; +#else + bool operator==( SamplerCubicWeightsCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicWeights == rhs.cubicWeights ); +# endif + } + + bool operator!=( SamplerCubicWeightsCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCubicWeightsCreateInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom; + }; + + template <> + struct CppType + { + using Type = SamplerCubicWeightsCreateInfoQCOM; + }; + + // wrapper struct for struct VkSamplerCustomBorderColorCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCustomBorderColorCreateInfoEXT.html + struct SamplerCustomBorderColorCreateInfoEXT + { + using NativeType = VkSamplerCustomBorderColorCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , customBorderColor{ customBorderColor_ } + , format{ format_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & + setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColor = customBorderColor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCustomBorderColorCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerCustomBorderColorCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, customBorderColor, format ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = SamplerCustomBorderColorCreateInfoEXT; + }; + + // wrapper struct for struct VkSamplerReductionModeCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerReductionModeCreateInfo.html + struct SamplerReductionModeCreateInfo + { + using NativeType = VkSamplerReductionModeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , reductionMode{ reductionMode_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerReductionModeCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT + { + reductionMode = reductionMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerReductionModeCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerReductionModeCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, reductionMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default; +#else + bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode ); +# endif + } + + bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; + }; + + template <> + struct CppType + { + using Type = SamplerReductionModeCreateInfo; + }; + + using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; + + // wrapper struct for struct VkSamplerYcbcrConversionCreateInfo, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionCreateInfo.html + struct SamplerYcbcrConversionCreateInfo + { + using NativeType = VkSamplerYcbcrConversionCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , ycbcrModel{ ycbcrModel_ } + , ycbcrRange{ ycbcrRange_ } + , components{ components_ } + , xChromaOffset{ xChromaOffset_ } + , yChromaOffset{ yChromaOffset_ } + , chromaFilter{ chromaFilter_ } + , forceExplicitReconstruction{ forceExplicitReconstruction_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrModel = ycbcrModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrRange = ycbcrRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + xChromaOffset = xChromaOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + yChromaOffset = yChromaOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT + { + chromaFilter = chromaFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT + { + forceExplicitReconstruction = forceExplicitReconstruction_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( ycbcrModel == rhs.ycbcrModel ) && + ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && ( xChromaOffset == rhs.xChromaOffset ) && + ( yChromaOffset == rhs.yChromaOffset ) && ( chromaFilter == rhs.chromaFilter ) && + ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); +# endif + } + + bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; + }; + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionCreateInfo; + }; + + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + + // wrapper struct for struct VkSamplerYcbcrConversionImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionImageFormatProperties.html + struct SamplerYcbcrConversionImageFormatProperties + { + using NativeType = VkSamplerYcbcrConversionImageFormatProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , combinedImageSamplerDescriptorCount{ combinedImageSamplerDescriptorCount_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast( &rhs ) ) + { + } + + SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionImageFormatProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, combinedImageSamplerDescriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); +# endif + } + + bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + void * pNext = {}; + uint32_t combinedImageSamplerDescriptorCount = {}; + }; + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionImageFormatProperties; + }; + + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + + // wrapper struct for struct VkSamplerYcbcrConversionInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionInfo.html + struct SamplerYcbcrConversionInfo + { + using NativeType = VkSamplerYcbcrConversionInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , conversion{ conversion_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionInfo( *reinterpret_cast( &rhs ) ) + { + } + + SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT + { + conversion = conversion_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, conversion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion ); +# endif + } + + bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; + }; + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionInfo; + }; + + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + + // wrapper struct for struct VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.html + struct SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM + { + using NativeType = VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , enableYDegamma{ enableYDegamma_ } + , enableCbCrDegamma{ enableCbCrDegamma_ } + { + } + + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & operator=( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & operator=( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & + setEnableYDegamma( VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma_ ) VULKAN_HPP_NOEXCEPT + { + enableYDegamma = enableYDegamma_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & + setEnableCbCrDegamma( VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma_ ) VULKAN_HPP_NOEXCEPT + { + enableCbCrDegamma = enableCbCrDegamma_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, enableYDegamma, enableCbCrDegamma ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enableYDegamma == rhs.enableYDegamma ) && ( enableCbCrDegamma == rhs.enableCbCrDegamma ); +# endif + } + + bool operator!=( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma = {}; + VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma = {}; + }; + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + }; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + // wrapper struct for struct VkScreenBufferFormatPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkScreenBufferFormatPropertiesQNX.html + struct ScreenBufferFormatPropertiesQNX + { + using NativeType = VkScreenBufferFormatPropertiesQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenBufferFormatPropertiesQNX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + uint64_t screenUsage_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , externalFormat{ externalFormat_ } + , screenUsage{ screenUsage_ } + , formatFeatures{ formatFeatures_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenBufferFormatPropertiesQNX( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenBufferFormatPropertiesQNX( *reinterpret_cast( &rhs ) ) + { + } + + ScreenBufferFormatPropertiesQNX & operator=( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ScreenBufferFormatPropertiesQNX & operator=( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkScreenBufferFormatPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkScreenBufferFormatPropertiesQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkScreenBufferFormatPropertiesQNX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkScreenBufferFormatPropertiesQNX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + format, + externalFormat, + screenUsage, + formatFeatures, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenBufferFormatPropertiesQNX const & ) const = default; +# else + bool operator==( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) && + ( screenUsage == rhs.screenUsage ) && ( formatFeatures == rhs.formatFeatures ) && + ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && + ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && + ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenBufferFormatPropertiesQNX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + uint64_t screenUsage = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = ScreenBufferFormatPropertiesQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + // wrapper struct for struct VkScreenBufferPropertiesQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkScreenBufferPropertiesQNX.html + struct ScreenBufferPropertiesQNX + { + using NativeType = VkScreenBufferPropertiesQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenBufferPropertiesQNX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeBits_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , allocationSize{ allocationSize_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenBufferPropertiesQNX( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenBufferPropertiesQNX( *reinterpret_cast( &rhs ) ) + { + } + + ScreenBufferPropertiesQNX & operator=( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ScreenBufferPropertiesQNX & operator=( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkScreenBufferPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkScreenBufferPropertiesQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkScreenBufferPropertiesQNX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkScreenBufferPropertiesQNX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allocationSize, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenBufferPropertiesQNX const & ) const = default; +# else + bool operator==( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenBufferPropertiesQNX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = ScreenBufferPropertiesQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + // wrapper struct for struct VkScreenSurfaceCreateInfoQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkScreenSurfaceCreateInfoQNX.html + struct ScreenSurfaceCreateInfoQNX + { + using NativeType = VkScreenSurfaceCreateInfoQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, + struct _screen_context * context_ = {}, + struct _screen_window * window_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , context{ context_ } + , window{ window_ } + { + } + + VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenSurfaceCreateInfoQNX( *reinterpret_cast( &rhs ) ) + { + } + + ScreenSurfaceCreateInfoQNX & operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT + { + context = context_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkScreenSurfaceCreateInfoQNX const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkScreenSurfaceCreateInfoQNX *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, context, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default; +# else + bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) && ( window == rhs.window ); +# endif + } + + bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {}; + struct _screen_context * context = {}; + struct _screen_window * window = {}; + }; + + template <> + struct CppType + { + using Type = ScreenSurfaceCreateInfoQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + // wrapper struct for struct VkSemaphoreCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreCreateInfo.html + struct SemaphoreCreateInfo + { + using NativeType = VkSemaphoreCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSemaphoreCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreCreateInfo const & ) const = default; +#else + bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreCreateInfo; + }; + + // wrapper struct for struct VkSemaphoreGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreGetFdInfoKHR.html + struct SemaphoreGetFdInfoKHR + { + using NativeType = VkSemaphoreGetFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSemaphoreGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default; +#else + bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = SemaphoreGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkSemaphoreGetWin32HandleInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreGetWin32HandleInfoKHR.html + struct SemaphoreGetWin32HandleInfoKHR + { + using NativeType = VkSemaphoreGetWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSemaphoreGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = SemaphoreGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + // wrapper struct for struct VkSemaphoreGetZirconHandleInfoFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreGetZirconHandleInfoFUCHSIA.html + struct SemaphoreGetZirconHandleInfoFUCHSIA + { + using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreGetZirconHandleInfoFUCHSIA & operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreGetZirconHandleInfoFUCHSIA & operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSemaphoreGetZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = SemaphoreGetZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + // wrapper struct for struct VkSemaphoreSignalInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreSignalInfo.html + struct SemaphoreSignalInfo + { + using NativeType = VkSemaphoreSignalInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , value{ value_ } + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSignalInfo( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreSignalInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSemaphoreSignalInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, value ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSignalInfo const & ) const = default; +#else + bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ); +# endif + } + + bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreSignalInfo; + }; + + using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; + + // wrapper struct for struct VkSemaphoreTypeCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreTypeCreateInfo.html + struct SemaphoreTypeCreateInfo + { + using NativeType = VkSemaphoreTypeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, + uint64_t initialValue_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphoreType{ semaphoreType_ } + , initialValue{ initialValue_ } + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreTypeCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreType = semaphoreType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT + { + initialValue = initialValue_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreTypeCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSemaphoreTypeCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphoreType, initialValue ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default; +#else + bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && ( initialValue == rhs.initialValue ); +# endif + } + + bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; + uint64_t initialValue = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreTypeCreateInfo; + }; + + using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; + + // wrapper struct for struct VkSemaphoreWaitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSemaphoreWaitInfo.html + struct SemaphoreWaitInfo + { + using NativeType = VkSemaphoreWaitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, + uint32_t semaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, + const uint64_t * pValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , semaphoreCount{ semaphoreCount_ } + , pSemaphores{ pSemaphores_ } + , pValues{ pValues_ } + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreWaitInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , semaphoreCount( static_cast( semaphores_.size() ) ) + , pSemaphores( semaphores_.data() ) + , pValues( values_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() ); +# else + if ( semaphores_.size() != values_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = semaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSemaphores = pSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & + setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( semaphores_.size() ); + pSemaphores = semaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT + { + pValues = pValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreWaitInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSemaphoreWaitInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, semaphoreCount, pSemaphores, pValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreWaitInfo const & ) const = default; +#else + bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( semaphoreCount == rhs.semaphoreCount ) && + ( pSemaphores == rhs.pSemaphores ) && ( pValues == rhs.pValues ); +# endif + } + + bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; + uint32_t semaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {}; + const uint64_t * pValues = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreWaitInfo; + }; + + using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + + // wrapper struct for struct VkSetDescriptorBufferOffsetsInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetDescriptorBufferOffsetsInfoEXT.html + struct SetDescriptorBufferOffsetsInfoEXT + { + using NativeType = VkSetDescriptorBufferOffsetsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetDescriptorBufferOffsetsInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetDescriptorBufferOffsetsInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t firstSet_ = {}, + uint32_t setCount_ = {}, + const uint32_t * pBufferIndices_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , firstSet{ firstSet_ } + , setCount{ setCount_ } + , pBufferIndices{ pBufferIndices_ } + , pOffsets{ pOffsets_ } + { + } + + VULKAN_HPP_CONSTEXPR SetDescriptorBufferOffsetsInfoEXT( SetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetDescriptorBufferOffsetsInfoEXT( VkSetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SetDescriptorBufferOffsetsInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SetDescriptorBufferOffsetsInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t firstSet_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & offsets_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , firstSet( firstSet_ ) + , setCount( static_cast( bufferIndices_.size() ) ) + , pBufferIndices( bufferIndices_.data() ) + , pOffsets( offsets_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( bufferIndices_.size() == offsets_.size() ); +# else + if ( bufferIndices_.size() != offsets_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::SetDescriptorBufferOffsetsInfoEXT::SetDescriptorBufferOffsetsInfoEXT: bufferIndices_.size() != offsets_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SetDescriptorBufferOffsetsInfoEXT & operator=( SetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SetDescriptorBufferOffsetsInfoEXT & operator=( VkSetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT + { + firstSet = firstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setSetCount( uint32_t setCount_ ) VULKAN_HPP_NOEXCEPT + { + setCount = setCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPBufferIndices( const uint32_t * pBufferIndices_ ) VULKAN_HPP_NOEXCEPT + { + pBufferIndices = pBufferIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SetDescriptorBufferOffsetsInfoEXT & + setBufferIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferIndices_ ) VULKAN_HPP_NOEXCEPT + { + setCount = static_cast( bufferIndices_.size() ); + pBufferIndices = bufferIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPOffsets( const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pOffsets = pOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SetDescriptorBufferOffsetsInfoEXT & + setOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & offsets_ ) VULKAN_HPP_NOEXCEPT + { + setCount = static_cast( offsets_.size() ); + pOffsets = offsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSetDescriptorBufferOffsetsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetDescriptorBufferOffsetsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetDescriptorBufferOffsetsInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSetDescriptorBufferOffsetsInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageFlags, layout, firstSet, setCount, pBufferIndices, pOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetDescriptorBufferOffsetsInfoEXT const & ) const = default; +#else + bool operator==( SetDescriptorBufferOffsetsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( firstSet == rhs.firstSet ) && + ( setCount == rhs.setCount ) && ( pBufferIndices == rhs.pBufferIndices ) && ( pOffsets == rhs.pOffsets ); +# endif + } + + bool operator!=( SetDescriptorBufferOffsetsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetDescriptorBufferOffsetsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t firstSet = {}; + uint32_t setCount = {}; + const uint32_t * pBufferIndices = {}; + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets = {}; + }; + + template <> + struct CppType + { + using Type = SetDescriptorBufferOffsetsInfoEXT; + }; + + // wrapper struct for struct VkSetLatencyMarkerInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetLatencyMarkerInfoNV.html + struct SetLatencyMarkerInfoNV + { + using NativeType = VkSetLatencyMarkerInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetLatencyMarkerInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( uint64_t presentID_ = {}, + VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentID{ presentID_ } + , marker{ marker_ } + { + } + + VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetLatencyMarkerInfoNV( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SetLatencyMarkerInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + SetLatencyMarkerInfoNV & operator=( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SetLatencyMarkerInfoNV & operator=( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setMarker( VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetLatencyMarkerInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSetLatencyMarkerInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentID, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetLatencyMarkerInfoNV const & ) const = default; +#else + bool operator==( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetLatencyMarkerInfoNV; + const void * pNext = {}; + uint64_t presentID = {}; + VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart; + }; + + template <> + struct CppType + { + using Type = SetLatencyMarkerInfoNV; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + // wrapper struct for struct VkSetPresentConfigNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetPresentConfigNV.html + struct SetPresentConfigNV + { + using NativeType = VkSetPresentConfigNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetPresentConfigNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SetPresentConfigNV( uint32_t numFramesPerBatch_ = {}, uint32_t presentConfigFeedback_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , numFramesPerBatch{ numFramesPerBatch_ } + , presentConfigFeedback{ presentConfigFeedback_ } + { + } + + VULKAN_HPP_CONSTEXPR SetPresentConfigNV( SetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetPresentConfigNV( VkSetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT : SetPresentConfigNV( *reinterpret_cast( &rhs ) ) {} + + SetPresentConfigNV & operator=( SetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SetPresentConfigNV & operator=( VkSetPresentConfigNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetPresentConfigNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetPresentConfigNV & setNumFramesPerBatch( uint32_t numFramesPerBatch_ ) VULKAN_HPP_NOEXCEPT + { + numFramesPerBatch = numFramesPerBatch_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetPresentConfigNV & setPresentConfigFeedback( uint32_t presentConfigFeedback_ ) VULKAN_HPP_NOEXCEPT + { + presentConfigFeedback = presentConfigFeedback_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSetPresentConfigNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetPresentConfigNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetPresentConfigNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSetPresentConfigNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, numFramesPerBatch, presentConfigFeedback ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetPresentConfigNV const & ) const = default; +# else + bool operator==( SetPresentConfigNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( numFramesPerBatch == rhs.numFramesPerBatch ) && + ( presentConfigFeedback == rhs.presentConfigFeedback ); +# endif + } + + bool operator!=( SetPresentConfigNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetPresentConfigNV; + const void * pNext = {}; + uint32_t numFramesPerBatch = {}; + uint32_t presentConfigFeedback = {}; + }; + + template <> + struct CppType + { + using Type = SetPresentConfigNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + // wrapper struct for struct VkSetStateFlagsIndirectCommandNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSetStateFlagsIndirectCommandNV.html + struct SetStateFlagsIndirectCommandNV + { + using NativeType = VkSetStateFlagsIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data{ data_ } {} + + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SetStateFlagsIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetStateFlagsIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSetStateFlagsIndirectCommandNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( data ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default; +#else + bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( data == rhs.data ); +# endif + } + + bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t data = {}; + }; + + // wrapper struct for struct VkShaderCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderCreateInfoEXT.html + struct ShaderCreateInfoEXT + { + using NativeType = VkShaderCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ = {}, + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary, + size_t codeSize_ = {}, + const void * pCode_ = {}, + const char * pName_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stage{ stage_ } + , nextStage{ nextStage_ } + , codeType{ codeType_ } + , codeSize{ codeSize_ } + , pCode{ pCode_ } + , pName{ pName_ } + , setLayoutCount{ setLayoutCount_ } + , pSetLayouts{ pSetLayouts_ } + , pushConstantRangeCount{ pushConstantRangeCount_ } + , pPushConstantRanges{ pPushConstantRanges_ } + , pSpecializationInfo{ pSpecializationInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderCreateInfoEXT( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ShaderCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_, + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_, + const char * pName_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stage( stage_ ) + , nextStage( nextStage_ ) + , codeType( codeType_ ) + , codeSize( code_.size() * sizeof( T ) ) + , pCode( code_.data() ) + , pName( pName_ ) + , setLayoutCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + , pSpecializationInfo( pSpecializationInfo_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ShaderCreateInfoEXT & operator=( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShaderCreateInfoEXT & operator=( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setNextStage( VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ ) VULKAN_HPP_NOEXCEPT + { + nextStage = nextStage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeType( VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ ) VULKAN_HPP_NOEXCEPT + { + codeType = codeType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = codeSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPCode( const void * pCode_ ) VULKAN_HPP_NOEXCEPT + { + pCode = pCode_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ShaderCreateInfoEXT & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = code_.size() * sizeof( T ); + pCode = code_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderCreateInfoEXT & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderCreateInfoEXT & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & + setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkShaderCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkShaderCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + stage, + nextStage, + codeType, + codeSize, + pCode, + pName, + setLayoutCount, + pSetLayouts, + pushConstantRangeCount, + pPushConstantRanges, + pSpecializationInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) + return cmp; + if ( auto cmp = nextStage <=> rhs.nextStage; cmp != 0 ) + return cmp; + if ( auto cmp = codeType <=> rhs.codeType; cmp != 0 ) + return cmp; + if ( auto cmp = codeSize <=> rhs.codeSize; cmp != 0 ) + return cmp; + if ( auto cmp = pCode <=> rhs.pCode; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = setLayoutCount <=> rhs.setLayoutCount; cmp != 0 ) + return cmp; + if ( auto cmp = pSetLayouts <=> rhs.pSetLayouts; cmp != 0 ) + return cmp; + if ( auto cmp = pushConstantRangeCount <=> rhs.pushConstantRangeCount; cmp != 0 ) + return cmp; + if ( auto cmp = pPushConstantRanges <=> rhs.pPushConstantRanges; cmp != 0 ) + return cmp; + if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( nextStage == rhs.nextStage ) && + ( codeType == rhs.codeType ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ) && + ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( setLayoutCount == rhs.setLayoutCount ) && + ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage = {}; + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary; + size_t codeSize = {}; + const void * pCode = {}; + const char * pName = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; + }; + + template <> + struct CppType + { + using Type = ShaderCreateInfoEXT; + }; + + // wrapper struct for struct VkShaderModuleCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleCreateInfo.html + struct ShaderModuleCreateInfo + { + using NativeType = VkShaderModuleCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, + size_t codeSize_ = {}, + const uint32_t * pCode_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , codeSize{ codeSize_ } + , pCode{ pCode_ } + { + } + + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = codeSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT + { + pCode = pCode_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = code_.size() * 4; + pCode = code_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleCreateInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkShaderModuleCreateInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, codeSize, pCode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleCreateInfo const & ) const = default; +#else + bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ); +# endif + } + + bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; + size_t codeSize = {}; + const uint32_t * pCode = {}; + }; + + template <> + struct CppType + { + using Type = ShaderModuleCreateInfo; + }; + + // wrapper struct for struct VkShaderModuleIdentifierEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleIdentifierEXT.html + struct ShaderModuleIdentifierEXT + { + using NativeType = VkShaderModuleIdentifierEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleIdentifierEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( uint32_t identifierSize_ = {}, + std::array const & identifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , identifierSize{ identifierSize_ } + , identifier{ identifier_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleIdentifierEXT( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleIdentifierEXT( *reinterpret_cast( &rhs ) ) + { + } + + ShaderModuleIdentifierEXT & operator=( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShaderModuleIdentifierEXT & operator=( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkShaderModuleIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleIdentifierEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleIdentifierEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkShaderModuleIdentifierEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, identifierSize, identifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = identifierSize <=> rhs.identifierSize; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < identifierSize; ++i ) + { + if ( auto cmp = identifier[i] <=> rhs.identifier[i]; cmp != 0 ) + return cmp; + } + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && + ( memcmp( identifier, rhs.identifier, identifierSize * sizeof( uint8_t ) ) == 0 ); + } + + bool operator!=( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleIdentifierEXT; + void * pNext = {}; + uint32_t identifierSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D identifier = {}; + }; + + template <> + struct CppType + { + using Type = ShaderModuleIdentifierEXT; + }; + + // wrapper struct for struct VkShaderModuleValidationCacheCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderModuleValidationCacheCreateInfoEXT.html + struct ShaderModuleValidationCacheCreateInfoEXT + { + using NativeType = VkShaderModuleValidationCacheCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , validationCache{ validationCache_ } + { + } + + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & + setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT + { + validationCache = validationCache_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleValidationCacheCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkShaderModuleValidationCacheCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, validationCache ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default; +#else + bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache ); +# endif + } + + bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; + }; + + template <> + struct CppType + { + using Type = ShaderModuleValidationCacheCreateInfoEXT; + }; + + // wrapper struct for struct VkShaderResourceUsageAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderResourceUsageAMD.html + struct ShaderResourceUsageAMD + { + using NativeType = VkShaderResourceUsageAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, + uint32_t numUsedSgprs_ = {}, + uint32_t ldsSizePerLocalWorkGroup_ = {}, + size_t ldsUsageSizeInBytes_ = {}, + size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT + : numUsedVgprs{ numUsedVgprs_ } + , numUsedSgprs{ numUsedSgprs_ } + , ldsSizePerLocalWorkGroup{ ldsSizePerLocalWorkGroup_ } + , ldsUsageSizeInBytes{ ldsUsageSizeInBytes_ } + , scratchMemUsageInBytes{ scratchMemUsageInBytes_ } + { + } + + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderResourceUsageAMD( *reinterpret_cast( &rhs ) ) + { + } + + ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderResourceUsageAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkShaderResourceUsageAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderResourceUsageAMD const & ) const = default; +#else + bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) && + ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); +# endif + } + + bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t numUsedVgprs = {}; + uint32_t numUsedSgprs = {}; + uint32_t ldsSizePerLocalWorkGroup = {}; + size_t ldsUsageSizeInBytes = {}; + size_t scratchMemUsageInBytes = {}; + }; + + // wrapper struct for struct VkShaderStatisticsInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkShaderStatisticsInfoAMD.html + struct ShaderStatisticsInfoAMD + { + using NativeType = VkShaderStatisticsInfoAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, + uint32_t numPhysicalVgprs_ = {}, + uint32_t numPhysicalSgprs_ = {}, + uint32_t numAvailableVgprs_ = {}, + uint32_t numAvailableSgprs_ = {}, + std::array const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderStageMask{ shaderStageMask_ } + , resourceUsage{ resourceUsage_ } + , numPhysicalVgprs{ numPhysicalVgprs_ } + , numPhysicalSgprs{ numPhysicalSgprs_ } + , numAvailableVgprs{ numAvailableVgprs_ } + , numAvailableSgprs{ numAvailableSgprs_ } + , computeWorkGroupSize{ computeWorkGroupSize_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderStatisticsInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderStatisticsInfoAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkShaderStatisticsInfoAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( shaderStageMask, resourceUsage, numPhysicalVgprs, numPhysicalSgprs, numAvailableVgprs, numAvailableSgprs, computeWorkGroupSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default; +#else + bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && + ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) && + ( computeWorkGroupSize == rhs.computeWorkGroupSize ); +# endif + } + + bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; + uint32_t numPhysicalVgprs = {}; + uint32_t numPhysicalSgprs = {}; + uint32_t numAvailableVgprs = {}; + uint32_t numAvailableSgprs = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D computeWorkGroupSize = {}; + }; + + // wrapper struct for struct VkSharedPresentSurfaceCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSharedPresentSurfaceCapabilitiesKHR.html + struct SharedPresentSurfaceCapabilitiesKHR + { + using NativeType = VkSharedPresentSurfaceCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sharedPresentSupportedUsageFlags{ sharedPresentSupportedUsageFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSharedPresentSurfaceCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSharedPresentSurfaceCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sharedPresentSupportedUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default; +#else + bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); +# endif + } + + bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; + }; + + template <> + struct CppType + { + using Type = SharedPresentSurfaceCapabilitiesKHR; + }; + + // wrapper struct for struct VkSparseImageFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageFormatProperties.html + struct SparseImageFormatProperties + { + using NativeType = VkSparseImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask{ aspectMask_ } + , imageGranularity{ imageGranularity_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties( *reinterpret_cast( &rhs ) ) + { + } + + SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, imageGranularity, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties const & ) const = default; +#else + bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + }; + + // wrapper struct for struct VkSparseImageFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageFormatProperties2.html + struct SparseImageFormatProperties2 + { + using NativeType = VkSparseImageFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , properties{ properties_ } + { + } + + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties2( *reinterpret_cast( &rhs ) ) + { + } + + SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties2 const & ) const = default; +#else + bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; + }; + + template <> + struct CppType + { + using Type = SparseImageFormatProperties2; + }; + + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + + // wrapper struct for struct VkSparseImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryRequirements.html + struct SparseImageMemoryRequirements + { + using NativeType = VkSparseImageMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, + uint32_t imageMipTailFirstLod_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties{ formatProperties_ } + , imageMipTailFirstLod{ imageMipTailFirstLod_ } + , imageMipTailSize{ imageMipTailSize_ } + , imageMipTailOffset{ imageMipTailOffset_ } + , imageMipTailStride{ imageMipTailStride_ } + { + } + + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + { + } + + SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) && + ( imageMipTailSize == rhs.imageMipTailSize ) && ( imageMipTailOffset == rhs.imageMipTailOffset ) && + ( imageMipTailStride == rhs.imageMipTailStride ); +# endif + } + + bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; + uint32_t imageMipTailFirstLod = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; + }; + + // wrapper struct for struct VkSparseImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryRequirements2.html + struct SparseImageMemoryRequirements2 + { + using NativeType = VkSparseImageMemoryRequirements2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryRequirements{ memoryRequirements_ } + { + } + + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements2( *reinterpret_cast( &rhs ) ) + { + } + + SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); +# endif + } + + bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; + }; + + template <> + struct CppType + { + using Type = SparseImageMemoryRequirements2; + }; + + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + +#if defined( VK_USE_PLATFORM_GGP ) + // wrapper struct for struct VkStreamDescriptorSurfaceCreateInfoGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkStreamDescriptorSurfaceCreateInfoGGP.html + struct StreamDescriptorSurfaceCreateInfoGGP + { + using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, + GgpStreamDescriptor streamDescriptor_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , streamDescriptor{ streamDescriptor_ } + { + } + + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast( &rhs ) ) + { + } + + StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & + setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + streamDescriptor = streamDescriptor_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStreamDescriptorSurfaceCreateInfoGGP const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkStreamDescriptorSurfaceCreateInfoGGP *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, streamDescriptor ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 ); + } + + bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; + GgpStreamDescriptor streamDescriptor = {}; + }; + + template <> + struct CppType + { + using Type = StreamDescriptorSurfaceCreateInfoGGP; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + // wrapper struct for struct VkSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubmitInfo.html + struct SubmitInfo + { + using NativeType = VkSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, + uint32_t commandBufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphores{ pWaitSemaphores_ } + , pWaitDstStageMask{ pWaitDstStageMask_ } + , commandBufferCount{ commandBufferCount_ } + , pCommandBuffers{ pCommandBuffers_ } + , signalSemaphoreCount{ signalSemaphoreCount_ } + , pSignalSemaphores{ pSignalSemaphores_ } + { + } + + VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitDstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , pWaitDstStageMask( waitDstStageMask_.data() ) + , commandBufferCount( static_cast( commandBuffers_.size() ) ) + , pCommandBuffers( commandBuffers_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() ); +# else + if ( waitSemaphores_.size() != waitDstStageMask_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & + setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + pWaitDstStageMask = pWaitDstStageMask_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitDstStageMask_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitDstStageMask_.size() ); + pWaitDstStageMask = waitDstStageMask_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBuffers = pCommandBuffers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & + setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBuffers_.size() ); + pCommandBuffers = commandBuffers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & + setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubmitInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo const & ) const = default; +#else + bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && ( commandBufferCount == rhs.commandBufferCount ) && + ( pCommandBuffers == rhs.pCommandBuffers ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphores == rhs.pSignalSemaphores ); +# endif + } + + bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {}; + uint32_t commandBufferCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; + }; + + template <> + struct CppType + { + using Type = SubmitInfo; + }; + + // wrapper struct for struct VkSubmitInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubmitInfo2.html + struct SubmitInfo2 + { + using NativeType = VkSubmitInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {}, + uint32_t waitSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {}, + uint32_t commandBufferInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {}, + uint32_t signalSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , waitSemaphoreInfoCount{ waitSemaphoreInfoCount_ } + , pWaitSemaphoreInfos{ pWaitSemaphoreInfos_ } + , commandBufferInfoCount{ commandBufferInfoCount_ } + , pCommandBufferInfos{ pCommandBufferInfos_ } + , signalSemaphoreInfoCount{ signalSemaphoreInfoCount_ } + , pSignalSemaphoreInfos{ pSignalSemaphoreInfos_ } + { + } + + VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferInfos_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreInfos_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , waitSemaphoreInfoCount( static_cast( waitSemaphoreInfos_.size() ) ) + , pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ) + , commandBufferInfoCount( static_cast( commandBufferInfos_.size() ) ) + , pCommandBufferInfos( commandBufferInfos_.data() ) + , signalSemaphoreInfoCount( static_cast( signalSemaphoreInfos_.size() ) ) + , pSignalSemaphoreInfos( signalSemaphoreInfos_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo2 & operator=( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubmitInfo2 & operator=( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreInfoCount = waitSemaphoreInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreInfos = pWaitSemaphoreInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setWaitSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreInfoCount = static_cast( waitSemaphoreInfos_.size() ); + pWaitSemaphoreInfos = waitSemaphoreInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = commandBufferInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & + setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferInfos = pCommandBufferInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setCommandBufferInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = static_cast( commandBufferInfos_.size() ); + pCommandBufferInfos = commandBufferInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreInfoCount = signalSemaphoreInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & + setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreInfos = pSignalSemaphoreInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setSignalSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreInfoCount = static_cast( signalSemaphoreInfos_.size() ); + pSignalSemaphoreInfos = signalSemaphoreInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubmitInfo2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + waitSemaphoreInfoCount, + pWaitSemaphoreInfos, + commandBufferInfoCount, + pCommandBufferInfos, + signalSemaphoreInfoCount, + pSignalSemaphoreInfos ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo2 const & ) const = default; +#else + bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) && + ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) && ( commandBufferInfoCount == rhs.commandBufferInfoCount ) && + ( pCommandBufferInfos == rhs.pCommandBufferInfos ) && ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) && + ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos ); +# endif + } + + bool operator!=( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubmitFlags flags = {}; + uint32_t waitSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos = {}; + uint32_t commandBufferInfoCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos = {}; + uint32_t signalSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos = {}; + }; + + template <> + struct CppType + { + using Type = SubmitInfo2; + }; + + using SubmitInfo2KHR = SubmitInfo2; + + // wrapper struct for struct VkSubpassBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassBeginInfo.html + struct SubpassBeginInfo + { + using NativeType = VkSubpassBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , contents{ contents_ } + { + } + + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassBeginInfo( *reinterpret_cast( &rhs ) ) {} + + SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT + { + contents = contents_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassBeginInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubpassBeginInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, contents ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassBeginInfo const & ) const = default; +#else + bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents ); +# endif + } + + bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; + }; + + template <> + struct CppType + { + using Type = SubpassBeginInfo; + }; + + using SubpassBeginInfoKHR = SubpassBeginInfo; + + // wrapper struct for struct VkSubpassDescriptionDepthStencilResolve, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassDescriptionDepthStencilResolve.html + struct SubpassDescriptionDepthStencilResolve + { + using NativeType = VkSubpassDescriptionDepthStencilResolve; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthResolveMode{ depthResolveMode_ } + , stencilResolveMode{ stencilResolveMode_ } + , pDepthStencilResolveAttachment{ pDepthStencilResolveAttachment_ } + { + } + + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescriptionDepthStencilResolve( *reinterpret_cast( &rhs ) ) + { + } + + SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT + { + depthResolveMode = depthResolveMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT + { + stencilResolveMode = stencilResolveMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescriptionDepthStencilResolve const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubpassDescriptionDepthStencilResolve *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default; +#else + bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) && + ( stencilResolveMode == rhs.stencilResolveMode ) && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); +# endif + } + + bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {}; + }; + + template <> + struct CppType + { + using Type = SubpassDescriptionDepthStencilResolve; + }; + + using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; + + // wrapper struct for struct VkSubpassEndInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassEndInfo.html + struct SubpassEndInfo + { + using NativeType = VkSubpassEndInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassEndInfo( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} + + VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassEndInfo( *reinterpret_cast( &rhs ) ) {} + + SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassEndInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubpassEndInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassEndInfo const & ) const = default; +#else + bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; + const void * pNext = {}; + }; + + template <> + struct CppType + { + using Type = SubpassEndInfo; + }; + + using SubpassEndInfoKHR = SubpassEndInfo; + + // wrapper struct for struct VkSubpassResolvePerformanceQueryEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassResolvePerformanceQueryEXT.html + struct SubpassResolvePerformanceQueryEXT + { + using NativeType = VkSubpassResolvePerformanceQueryEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassResolvePerformanceQueryEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimal_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , optimal{ optimal_ } + { + } + + VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassResolvePerformanceQueryEXT( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassResolvePerformanceQueryEXT( *reinterpret_cast( &rhs ) ) + { + } + + SubpassResolvePerformanceQueryEXT & operator=( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassResolvePerformanceQueryEXT & operator=( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSubpassResolvePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassResolvePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassResolvePerformanceQueryEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubpassResolvePerformanceQueryEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, optimal ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassResolvePerformanceQueryEXT const & ) const = default; +#else + bool operator==( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimal == rhs.optimal ); +# endif + } + + bool operator!=( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassResolvePerformanceQueryEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 optimal = {}; + }; + + template <> + struct CppType + { + using Type = SubpassResolvePerformanceQueryEXT; + }; + + // wrapper struct for struct VkSubpassShadingPipelineCreateInfoHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubpassShadingPipelineCreateInfoHUAWEI.html + struct SubpassShadingPipelineCreateInfoHUAWEI + { + using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , renderPass{ renderPass_ } + , subpass{ subpass_ } + { + } + + VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassShadingPipelineCreateInfoHUAWEI( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassShadingPipelineCreateInfoHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + SubpassShadingPipelineCreateInfoHUAWEI & operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassShadingPipelineCreateInfoHUAWEI & operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassShadingPipelineCreateInfoHUAWEI const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubpassShadingPipelineCreateInfoHUAWEI *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPass, subpass ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassShadingPipelineCreateInfoHUAWEI const & ) const = default; +#else + bool operator==( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ); +# endif + } + + bool operator!=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + }; + + template <> + struct CppType + { + using Type = SubpassShadingPipelineCreateInfoHUAWEI; + }; + + // wrapper struct for struct VkSubresourceHostMemcpySize, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceHostMemcpySize.html + struct SubresourceHostMemcpySize + { + using NativeType = VkSubresourceHostMemcpySize; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceHostMemcpySize; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySize( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySize( SubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceHostMemcpySize( VkSubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT + : SubresourceHostMemcpySize( *reinterpret_cast( &rhs ) ) + { + } + + SubresourceHostMemcpySize & operator=( SubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubresourceHostMemcpySize & operator=( VkSubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSubresourceHostMemcpySize const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceHostMemcpySize &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceHostMemcpySize const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubresourceHostMemcpySize *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceHostMemcpySize const & ) const = default; +#else + bool operator==( SubresourceHostMemcpySize const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ); +# endif + } + + bool operator!=( SubresourceHostMemcpySize const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceHostMemcpySize; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = SubresourceHostMemcpySize; + }; + + using SubresourceHostMemcpySizeEXT = SubresourceHostMemcpySize; + + // wrapper struct for struct VkSubresourceLayout2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceLayout2.html + struct SubresourceLayout2 + { + using NativeType = VkSubresourceLayout2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceLayout2( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subresourceLayout{ subresourceLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR SubresourceLayout2( SubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout2( VkSubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout2( *reinterpret_cast( &rhs ) ) {} + + SubresourceLayout2 & operator=( SubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubresourceLayout2 & operator=( VkSubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSubresourceLayout2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceLayout2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceLayout2 const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSubresourceLayout2 *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subresourceLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceLayout2 const & ) const = default; +#else + bool operator==( SubresourceLayout2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subresourceLayout == rhs.subresourceLayout ); +# endif + } + + bool operator!=( SubresourceLayout2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout = {}; + }; + + template <> + struct CppType + { + using Type = SubresourceLayout2; + }; + + using SubresourceLayout2EXT = SubresourceLayout2; + using SubresourceLayout2KHR = SubresourceLayout2; + + // wrapper struct for struct VkSurfaceCapabilities2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilities2EXT.html + struct SurfaceCapabilities2EXT + { + using NativeType = VkSurfaceCapabilities2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( + uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minImageCount{ minImageCount_ } + , maxImageCount{ maxImageCount_ } + , currentExtent{ currentExtent_ } + , minImageExtent{ minImageExtent_ } + , maxImageExtent{ maxImageExtent_ } + , maxImageArrayLayers{ maxImageArrayLayers_ } + , supportedTransforms{ supportedTransforms_ } + , currentTransform{ currentTransform_ } + , supportedCompositeAlpha{ supportedCompositeAlpha_ } + , supportedUsageFlags{ supportedUsageFlags_ } + , supportedSurfaceCounters{ supportedSurfaceCounters_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2EXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2EXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2EXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minImageCount, + maxImageCount, + currentExtent, + minImageExtent, + maxImageExtent, + maxImageArrayLayers, + supportedTransforms, + currentTransform, + supportedCompositeAlpha, + supportedUsageFlags, + supportedSurfaceCounters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2EXT const & ) const = default; +#else + bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && + ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) && + ( currentTransform == rhs.currentTransform ) && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ) && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); +# endif + } + + bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; + void * pNext = {}; + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceCapabilities2EXT; + }; + + // wrapper struct for struct VkSurfaceCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesKHR.html + struct SurfaceCapabilitiesKHR + { + using NativeType = VkSurfaceCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( + uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount{ minImageCount_ } + , maxImageCount{ maxImageCount_ } + , currentExtent{ currentExtent_ } + , minImageExtent{ minImageExtent_ } + , maxImageExtent{ maxImageExtent_ } + , maxImageArrayLayers{ maxImageArrayLayers_ } + , supportedTransforms{ supportedTransforms_ } + , currentTransform{ currentTransform_ } + , supportedCompositeAlpha{ supportedCompositeAlpha_ } + , supportedUsageFlags{ supportedUsageFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( minImageCount, + maxImageCount, + currentExtent, + minImageExtent, + maxImageExtent, + maxImageArrayLayers, + supportedTransforms, + currentTransform, + supportedCompositeAlpha, + supportedUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) && + ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && ( supportedUsageFlags == rhs.supportedUsageFlags ); +# endif + } + + bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + }; + + // wrapper struct for struct VkSurfaceCapabilities2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilities2KHR.html + struct SurfaceCapabilities2KHR + { + using NativeType = VkSurfaceCapabilities2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , surfaceCapabilities{ surfaceCapabilities_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2KHR( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2KHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2KHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surfaceCapabilities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2KHR const & ) const = default; +#else + bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities ); +# endif + } + + bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceCapabilities2KHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkSurfaceCapabilitiesFullScreenExclusiveEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesFullScreenExclusiveEXT.html + struct SurfaceCapabilitiesFullScreenExclusiveEXT + { + using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fullScreenExclusiveSupported{ fullScreenExclusiveSupported_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesFullScreenExclusiveEXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fullScreenExclusiveSupported ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default; +# else + bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); +# endif + } + + bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceCapabilitiesFullScreenExclusiveEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + // wrapper struct for struct VkSurfaceCapabilitiesPresentBarrierNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceCapabilitiesPresentBarrierNV.html + struct SurfaceCapabilitiesPresentBarrierNV + { + using NativeType = VkSurfaceCapabilitiesPresentBarrierNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesPresentBarrierNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentBarrierSupported{ presentBarrierSupported_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesPresentBarrierNV( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesPresentBarrierNV( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceCapabilitiesPresentBarrierNV & operator=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceCapabilitiesPresentBarrierNV & operator=( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilitiesPresentBarrierNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesPresentBarrierNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesPresentBarrierNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesPresentBarrierNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentBarrierSupported ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesPresentBarrierNV const & ) const = default; +#else + bool operator==( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierSupported == rhs.presentBarrierSupported ); +# endif + } + + bool operator!=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesPresentBarrierNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceCapabilitiesPresentBarrierNV; + }; + + // wrapper struct for struct VkSurfaceFormatKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFormatKHR.html + struct SurfaceFormatKHR + { + using NativeType = VkSurfaceFormatKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT + : format{ format_ } + , colorSpace{ colorSpace_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormatKHR( *reinterpret_cast( &rhs ) ) {} + + SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormatKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfaceFormatKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( format, colorSpace ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormatKHR const & ) const = default; +#else + bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace ); +# endif + } + + bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + }; + + // wrapper struct for struct VkSurfaceFormat2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFormat2KHR.html + struct SurfaceFormat2KHR + { + using NativeType = VkSurfaceFormat2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , surfaceFormat{ surfaceFormat_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormat2KHR( *reinterpret_cast( &rhs ) ) {} + + SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormat2KHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfaceFormat2KHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surfaceFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormat2KHR const & ) const = default; +#else + bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat ); +# endif + } + + bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceFormat2KHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkSurfaceFullScreenExclusiveInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFullScreenExclusiveInfoEXT.html + struct SurfaceFullScreenExclusiveInfoEXT + { + using NativeType = VkSurfaceFullScreenExclusiveInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fullScreenExclusive{ fullScreenExclusive_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & + setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT + { + fullScreenExclusive = fullScreenExclusive_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fullScreenExclusive ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive ); +# endif + } + + bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; + }; + + template <> + struct CppType + { + using Type = SurfaceFullScreenExclusiveInfoEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkSurfaceFullScreenExclusiveWin32InfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceFullScreenExclusiveWin32InfoEXT.html + struct SurfaceFullScreenExclusiveWin32InfoEXT + { + using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hmonitor{ hmonitor_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFullScreenExclusiveWin32InfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT + { + hmonitor = hmonitor_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveWin32InfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveWin32InfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hmonitor ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor ); +# endif + } + + bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + const void * pNext = {}; + HMONITOR hmonitor = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceFullScreenExclusiveWin32InfoEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + // wrapper struct for struct VkSurfacePresentModeCompatibilityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfacePresentModeCompatibilityEXT.html + struct SurfacePresentModeCompatibilityEXT + { + using NativeType = VkSurfacePresentModeCompatibilityEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentModeCompatibilityEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT( uint32_t presentModeCount_ = {}, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentModeCount{ presentModeCount_ } + , pPresentModes{ pPresentModes_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT( SurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfacePresentModeCompatibilityEXT( VkSurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfacePresentModeCompatibilityEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SurfacePresentModeCompatibilityEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SurfacePresentModeCompatibilityEXT & operator=( SurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfacePresentModeCompatibilityEXT & operator=( VkSurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = presentModeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPPresentModes( VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT + { + pPresentModes = pPresentModes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SurfacePresentModeCompatibilityEXT & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSurfacePresentModeCompatibilityEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfacePresentModeCompatibilityEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfacePresentModeCompatibilityEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfacePresentModeCompatibilityEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentModeCount, pPresentModes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfacePresentModeCompatibilityEXT const & ) const = default; +#else + bool operator==( SurfacePresentModeCompatibilityEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); +# endif + } + + bool operator!=( SurfacePresentModeCompatibilityEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentModeCompatibilityEXT; + void * pNext = {}; + uint32_t presentModeCount = {}; + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; + }; + + template <> + struct CppType + { + using Type = SurfacePresentModeCompatibilityEXT; + }; + + // wrapper struct for struct VkSurfacePresentModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfacePresentModeEXT.html + struct SurfacePresentModeEXT + { + using NativeType = VkSurfacePresentModeEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentModeEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentMode{ presentMode_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT( SurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfacePresentModeEXT( VkSurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfacePresentModeEXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfacePresentModeEXT & operator=( SurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfacePresentModeEXT & operator=( VkSurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeEXT & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT + { + presentMode = presentMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSurfacePresentModeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfacePresentModeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfacePresentModeEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfacePresentModeEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfacePresentModeEXT const & ) const = default; +#else + bool operator==( SurfacePresentModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMode == rhs.presentMode ); +# endif + } + + bool operator!=( SurfacePresentModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentModeEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + }; + + template <> + struct CppType + { + using Type = SurfacePresentModeEXT; + }; + + // wrapper struct for struct VkSurfacePresentScalingCapabilitiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfacePresentScalingCapabilitiesEXT.html + struct SurfacePresentScalingCapabilitiesEXT + { + using NativeType = VkSurfacePresentScalingCapabilitiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentScalingCapabilitiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesEXT( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedPresentScaling{ supportedPresentScaling_ } + , supportedPresentGravityX{ supportedPresentGravityX_ } + , supportedPresentGravityY{ supportedPresentGravityY_ } + , minScaledImageExtent{ minScaledImageExtent_ } + , maxScaledImageExtent{ maxScaledImageExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesEXT( SurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfacePresentScalingCapabilitiesEXT( VkSurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfacePresentScalingCapabilitiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfacePresentScalingCapabilitiesEXT & operator=( SurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfacePresentScalingCapabilitiesEXT & operator=( VkSurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfacePresentScalingCapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfacePresentScalingCapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfacePresentScalingCapabilitiesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfacePresentScalingCapabilitiesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedPresentScaling, supportedPresentGravityX, supportedPresentGravityY, minScaledImageExtent, maxScaledImageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfacePresentScalingCapabilitiesEXT const & ) const = default; +#else + bool operator==( SurfacePresentScalingCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedPresentScaling == rhs.supportedPresentScaling ) && + ( supportedPresentGravityX == rhs.supportedPresentGravityX ) && ( supportedPresentGravityY == rhs.supportedPresentGravityY ) && + ( minScaledImageExtent == rhs.minScaledImageExtent ) && ( maxScaledImageExtent == rhs.maxScaledImageExtent ); +# endif + } + + bool operator!=( SurfacePresentScalingCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentScalingCapabilitiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY = {}; + VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent = {}; + }; + + template <> + struct CppType + { + using Type = SurfacePresentScalingCapabilitiesEXT; + }; + + // wrapper struct for struct VkSurfaceProtectedCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSurfaceProtectedCapabilitiesKHR.html + struct SurfaceProtectedCapabilitiesKHR + { + using NativeType = VkSurfaceProtectedCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportsProtected{ supportsProtected_ } + { + } + + VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceProtectedCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSurfaceProtectedCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportsProtected ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected ); +# endif + } + + bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceProtectedCapabilitiesKHR; + }; + + // wrapper struct for struct VkSwapchainCounterCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainCounterCreateInfoEXT.html + struct SwapchainCounterCreateInfoEXT + { + using NativeType = VkSwapchainCounterCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , surfaceCounters{ surfaceCounters_ } + { + } + + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCounterCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & + setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT + { + surfaceCounters = surfaceCounters_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCounterCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSwapchainCounterCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surfaceCounters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default; +#else + bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters ); +# endif + } + + bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainCounterCreateInfoEXT; + }; + + // wrapper struct for struct VkSwapchainCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainCreateInfoKHR.html + struct SwapchainCreateInfoKHR + { + using NativeType = VkSwapchainCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, + uint32_t minImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + uint32_t imageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , surface{ surface_ } + , minImageCount{ minImageCount_ } + , imageFormat{ imageFormat_ } + , imageColorSpace{ imageColorSpace_ } + , imageExtent{ imageExtent_ } + , imageArrayLayers{ imageArrayLayers_ } + , imageUsage{ imageUsage_ } + , imageSharingMode{ imageSharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } + , preTransform{ preTransform_ } + , compositeAlpha{ compositeAlpha_ } + , presentMode{ presentMode_ } + , clipped{ clipped_ } + , oldSwapchain{ oldSwapchain_ } + { + } + + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, + uint32_t minImageCount_, + VULKAN_HPP_NAMESPACE::Format imageFormat_, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, + uint32_t imageArrayLayers_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT + { + minImageCount = minImageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + { + imageFormat = imageFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT + { + imageColorSpace = imageColorSpace_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + imageArrayLayers = imageArrayLayers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT + { + imageUsage = imageUsage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT + { + imageSharingMode = imageSharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR & + setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT + { + preTransform = preTransform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT + { + compositeAlpha = compositeAlpha_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT + { + presentMode = presentMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT + { + clipped = clipped_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT + { + oldSwapchain = oldSwapchain_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + surface, + minImageCount, + imageFormat, + imageColorSpace, + imageExtent, + imageArrayLayers, + imageUsage, + imageSharingMode, + queueFamilyIndexCount, + pQueueFamilyIndices, + preTransform, + compositeAlpha, + presentMode, + clipped, + oldSwapchain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) && + ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && ( imageColorSpace == rhs.imageColorSpace ) && + ( imageExtent == rhs.imageExtent ) && ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) && + ( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && ( compositeAlpha == rhs.compositeAlpha ) && + ( presentMode == rhs.presentMode ) && ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain ); +# endif + } + + bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + uint32_t minImageCount = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + uint32_t imageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainCreateInfoKHR; + }; + + // wrapper struct for struct VkSwapchainDisplayNativeHdrCreateInfoAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainDisplayNativeHdrCreateInfoAMD.html + struct SwapchainDisplayNativeHdrCreateInfoAMD + { + using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , localDimmingEnable{ localDimmingEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainDisplayNativeHdrCreateInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & + setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT + { + localDimmingEnable = localDimmingEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainDisplayNativeHdrCreateInfoAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSwapchainDisplayNativeHdrCreateInfoAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, localDimmingEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default; +#else + bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable ); +# endif + } + + bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainDisplayNativeHdrCreateInfoAMD; + }; + + // wrapper struct for struct VkSwapchainLatencyCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainLatencyCreateInfoNV.html + struct SwapchainLatencyCreateInfoNV + { + using NativeType = VkSwapchainLatencyCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainLatencyCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , latencyModeEnable{ latencyModeEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainLatencyCreateInfoNV( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainLatencyCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainLatencyCreateInfoNV & operator=( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainLatencyCreateInfoNV & operator=( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setLatencyModeEnable( VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ ) VULKAN_HPP_NOEXCEPT + { + latencyModeEnable = latencyModeEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainLatencyCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainLatencyCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainLatencyCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSwapchainLatencyCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, latencyModeEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainLatencyCreateInfoNV const & ) const = default; +#else + bool operator==( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( latencyModeEnable == rhs.latencyModeEnable ); +# endif + } + + bool operator!=( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainLatencyCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainLatencyCreateInfoNV; + }; + + // wrapper struct for struct VkSwapchainPresentBarrierCreateInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentBarrierCreateInfoNV.html + struct SwapchainPresentBarrierCreateInfoNV + { + using NativeType = VkSwapchainPresentBarrierCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentBarrierCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentBarrierEnable{ presentBarrierEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentBarrierCreateInfoNV( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentBarrierCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainPresentBarrierCreateInfoNV & operator=( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainPresentBarrierCreateInfoNV & operator=( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & + setPresentBarrierEnable( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ ) VULKAN_HPP_NOEXCEPT + { + presentBarrierEnable = presentBarrierEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainPresentBarrierCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentBarrierCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentBarrierCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSwapchainPresentBarrierCreateInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentBarrierEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainPresentBarrierCreateInfoNV const & ) const = default; +#else + bool operator==( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierEnable == rhs.presentBarrierEnable ); +# endif + } + + bool operator!=( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentBarrierCreateInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainPresentBarrierCreateInfoNV; + }; + + // wrapper struct for struct VkSwapchainPresentFenceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentFenceInfoEXT.html + struct SwapchainPresentFenceInfoEXT + { + using NativeType = VkSwapchainPresentFenceInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentFenceInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::Fence * pFences_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pFences{ pFences_ } + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT( SwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentFenceInfoEXT( VkSwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentFenceInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentFenceInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fences_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( fences_.size() ) ), pFences( fences_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainPresentFenceInfoEXT & operator=( SwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainPresentFenceInfoEXT & operator=( VkSwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setPFences( const VULKAN_HPP_NAMESPACE::Fence * pFences_ ) VULKAN_HPP_NOEXCEPT + { + pFences = pFences_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentFenceInfoEXT & + setFences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fences_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( fences_.size() ); + pFences = fences_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainPresentFenceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentFenceInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentFenceInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSwapchainPresentFenceInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pFences ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainPresentFenceInfoEXT const & ) const = default; +#else + bool operator==( SwapchainPresentFenceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pFences == rhs.pFences ); +# endif + } + + bool operator!=( SwapchainPresentFenceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentFenceInfoEXT; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::Fence * pFences = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainPresentFenceInfoEXT; + }; + + // wrapper struct for struct VkSwapchainPresentModeInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentModeInfoEXT.html + struct SwapchainPresentModeInfoEXT + { + using NativeType = VkSwapchainPresentModeInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentModeInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pPresentModes{ pPresentModes_ } + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT( SwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentModeInfoEXT( VkSwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentModeInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModeInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainPresentModeInfoEXT & operator=( SwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainPresentModeInfoEXT & operator=( VkSwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setPPresentModes( const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT + { + pPresentModes = pPresentModes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModeInfoEXT & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainPresentModeInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentModeInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentModeInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSwapchainPresentModeInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pPresentModes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainPresentModeInfoEXT const & ) const = default; +#else + bool operator==( SwapchainPresentModeInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentModes == rhs.pPresentModes ); +# endif + } + + bool operator!=( SwapchainPresentModeInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentModeInfoEXT; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainPresentModeInfoEXT; + }; + + // wrapper struct for struct VkSwapchainPresentModesCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentModesCreateInfoEXT.html + struct SwapchainPresentModesCreateInfoEXT + { + using NativeType = VkSwapchainPresentModesCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentModesCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT( uint32_t presentModeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentModeCount{ presentModeCount_ } + , pPresentModes{ pPresentModes_ } + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT( SwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentModesCreateInfoEXT( VkSwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentModesCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModesCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainPresentModesCreateInfoEXT & operator=( SwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainPresentModesCreateInfoEXT & operator=( VkSwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = presentModeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & + setPPresentModes( const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT + { + pPresentModes = pPresentModes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModesCreateInfoEXT & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainPresentModesCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentModesCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentModesCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSwapchainPresentModesCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentModeCount, pPresentModes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainPresentModesCreateInfoEXT const & ) const = default; +#else + bool operator==( SwapchainPresentModesCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); +# endif + } + + bool operator!=( SwapchainPresentModesCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentModesCreateInfoEXT; + const void * pNext = {}; + uint32_t presentModeCount = {}; + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainPresentModesCreateInfoEXT; + }; + + // wrapper struct for struct VkSwapchainPresentScalingCreateInfoEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSwapchainPresentScalingCreateInfoEXT.html + struct SwapchainPresentScalingCreateInfoEXT + { + using NativeType = VkSwapchainPresentScalingCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentScalingCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoEXT( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , scalingBehavior{ scalingBehavior_ } + , presentGravityX{ presentGravityX_ } + , presentGravityY{ presentGravityY_ } + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoEXT( SwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentScalingCreateInfoEXT( VkSwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentScalingCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainPresentScalingCreateInfoEXT & operator=( SwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainPresentScalingCreateInfoEXT & operator=( VkSwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & + setScalingBehavior( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior_ ) VULKAN_HPP_NOEXCEPT + { + scalingBehavior = scalingBehavior_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & + setPresentGravityX( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ ) VULKAN_HPP_NOEXCEPT + { + presentGravityX = presentGravityX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & + setPresentGravityY( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ ) VULKAN_HPP_NOEXCEPT + { + presentGravityY = presentGravityY_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainPresentScalingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentScalingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentScalingCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkSwapchainPresentScalingCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, scalingBehavior, presentGravityX, presentGravityY ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainPresentScalingCreateInfoEXT const & ) const = default; +#else + bool operator==( SwapchainPresentScalingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalingBehavior == rhs.scalingBehavior ) && ( presentGravityX == rhs.presentGravityX ) && + ( presentGravityY == rhs.presentGravityY ); +# endif + } + + bool operator!=( SwapchainPresentScalingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentScalingCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainPresentScalingCreateInfoEXT; + }; + + // wrapper struct for struct VkTextureLODGatherFormatPropertiesAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkTextureLODGatherFormatPropertiesAMD.html + struct TextureLODGatherFormatPropertiesAMD + { + using NativeType = VkTextureLODGatherFormatPropertiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportsTextureGatherLODBiasAMD{ supportsTextureGatherLODBiasAMD_ } + { + } + + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast( &rhs ) ) + { + } + + TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTextureLODGatherFormatPropertiesAMD const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTextureLODGatherFormatPropertiesAMD *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportsTextureGatherLODBiasAMD ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default; +#else + bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); +# endif + } + + bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; + }; + + template <> + struct CppType + { + using Type = TextureLODGatherFormatPropertiesAMD; + }; + + // wrapper struct for struct VkTileMemoryBindInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileMemoryBindInfoQCOM.html + struct TileMemoryBindInfoQCOM + { + using NativeType = VkTileMemoryBindInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTileMemoryBindInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TileMemoryBindInfoQCOM( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + { + } + + VULKAN_HPP_CONSTEXPR TileMemoryBindInfoQCOM( TileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TileMemoryBindInfoQCOM( VkTileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : TileMemoryBindInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + TileMemoryBindInfoQCOM & operator=( TileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TileMemoryBindInfoQCOM & operator=( VkTileMemoryBindInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TileMemoryBindInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TileMemoryBindInfoQCOM & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTileMemoryBindInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTileMemoryBindInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTileMemoryBindInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTileMemoryBindInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TileMemoryBindInfoQCOM const & ) const = default; +#else + bool operator==( TileMemoryBindInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); +# endif + } + + bool operator!=( TileMemoryBindInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTileMemoryBindInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + + template <> + struct CppType + { + using Type = TileMemoryBindInfoQCOM; + }; + + // wrapper struct for struct VkTileMemoryRequirementsQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileMemoryRequirementsQCOM.html + struct TileMemoryRequirementsQCOM + { + using NativeType = VkTileMemoryRequirementsQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTileMemoryRequirementsQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TileMemoryRequirementsQCOM( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , size{ size_ } + , alignment{ alignment_ } + { + } + + VULKAN_HPP_CONSTEXPR TileMemoryRequirementsQCOM( TileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TileMemoryRequirementsQCOM( VkTileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : TileMemoryRequirementsQCOM( *reinterpret_cast( &rhs ) ) + { + } + + TileMemoryRequirementsQCOM & operator=( TileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TileMemoryRequirementsQCOM & operator=( VkTileMemoryRequirementsQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TileMemoryRequirementsQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TileMemoryRequirementsQCOM & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TileMemoryRequirementsQCOM & setAlignment( VULKAN_HPP_NAMESPACE::DeviceSize alignment_ ) VULKAN_HPP_NOEXCEPT + { + alignment = alignment_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTileMemoryRequirementsQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTileMemoryRequirementsQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTileMemoryRequirementsQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTileMemoryRequirementsQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, size, alignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TileMemoryRequirementsQCOM const & ) const = default; +#else + bool operator==( TileMemoryRequirementsQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ) && ( alignment == rhs.alignment ); +# endif + } + + bool operator!=( TileMemoryRequirementsQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTileMemoryRequirementsQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + }; + + template <> + struct CppType + { + using Type = TileMemoryRequirementsQCOM; + }; + + // wrapper struct for struct VkTileMemorySizeInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTileMemorySizeInfoQCOM.html + struct TileMemorySizeInfoQCOM + { + using NativeType = VkTileMemorySizeInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTileMemorySizeInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TileMemorySizeInfoQCOM( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR TileMemorySizeInfoQCOM( TileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TileMemorySizeInfoQCOM( VkTileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : TileMemorySizeInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + TileMemorySizeInfoQCOM & operator=( TileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TileMemorySizeInfoQCOM & operator=( VkTileMemorySizeInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TileMemorySizeInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TileMemorySizeInfoQCOM & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTileMemorySizeInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTileMemorySizeInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTileMemorySizeInfoQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTileMemorySizeInfoQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TileMemorySizeInfoQCOM const & ) const = default; +#else + bool operator==( TileMemorySizeInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ); +# endif + } + + bool operator!=( TileMemorySizeInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTileMemorySizeInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = TileMemorySizeInfoQCOM; + }; + + // wrapper struct for struct VkTilePropertiesQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTilePropertiesQCOM.html + struct TilePropertiesQCOM + { + using NativeType = VkTilePropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTilePropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent3D tileSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D apronSize_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D origin_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , tileSize{ tileSize_ } + , apronSize{ apronSize_ } + , origin{ origin_ } + { + } + + VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TilePropertiesQCOM( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : TilePropertiesQCOM( *reinterpret_cast( &rhs ) ) {} + + TilePropertiesQCOM & operator=( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TilePropertiesQCOM & operator=( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setTileSize( VULKAN_HPP_NAMESPACE::Extent3D const & tileSize_ ) VULKAN_HPP_NOEXCEPT + { + tileSize = tileSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setApronSize( VULKAN_HPP_NAMESPACE::Extent2D const & apronSize_ ) VULKAN_HPP_NOEXCEPT + { + apronSize = apronSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setOrigin( VULKAN_HPP_NAMESPACE::Offset2D const & origin_ ) VULKAN_HPP_NOEXCEPT + { + origin = origin_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTilePropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTilePropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTilePropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTilePropertiesQCOM *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, tileSize, apronSize, origin ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TilePropertiesQCOM const & ) const = default; +#else + bool operator==( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileSize == rhs.tileSize ) && ( apronSize == rhs.apronSize ) && ( origin == rhs.origin ); +# endif + } + + bool operator!=( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTilePropertiesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent3D tileSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D apronSize = {}; + VULKAN_HPP_NAMESPACE::Offset2D origin = {}; + }; + + template <> + struct CppType + { + using Type = TilePropertiesQCOM; + }; + + // wrapper struct for struct VkTimelineSemaphoreSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTimelineSemaphoreSubmitInfo.html + struct TimelineSemaphoreSubmitInfo + { + using NativeType = VkTimelineSemaphoreSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValueCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreValueCount{ waitSemaphoreValueCount_ } + , pWaitSemaphoreValues{ pWaitSemaphoreValues_ } + , signalSemaphoreValueCount{ signalSemaphoreValueCount_ } + , pSignalSemaphoreValues{ pSignalSemaphoreValues_ } + { + } + + VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : TimelineSemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreValueCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValueCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = waitSemaphoreValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & + setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = signalSemaphoreValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & + setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTimelineSemaphoreSubmitInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTimelineSemaphoreSubmitInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default; +#else + bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); +# endif + } + + bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreValueCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValueCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; + }; + + template <> + struct CppType + { + using Type = TimelineSemaphoreSubmitInfo; + }; + + using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; + + // wrapper struct for struct VkTraceRaysIndirectCommand2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTraceRaysIndirectCommand2KHR.html + struct TraceRaysIndirectCommand2KHR + { + using NativeType = VkTraceRaysIndirectCommand2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : raygenShaderRecordAddress{ raygenShaderRecordAddress_ } + , raygenShaderRecordSize{ raygenShaderRecordSize_ } + , missShaderBindingTableAddress{ missShaderBindingTableAddress_ } + , missShaderBindingTableSize{ missShaderBindingTableSize_ } + , missShaderBindingTableStride{ missShaderBindingTableStride_ } + , hitShaderBindingTableAddress{ hitShaderBindingTableAddress_ } + , hitShaderBindingTableSize{ hitShaderBindingTableSize_ } + , hitShaderBindingTableStride{ hitShaderBindingTableStride_ } + , callableShaderBindingTableAddress{ callableShaderBindingTableAddress_ } + , callableShaderBindingTableSize{ callableShaderBindingTableSize_ } + , callableShaderBindingTableStride{ callableShaderBindingTableStride_ } + , width{ width_ } + , height{ height_ } + , depth{ depth_ } + { + } + + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TraceRaysIndirectCommand2KHR( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TraceRaysIndirectCommand2KHR( *reinterpret_cast( &rhs ) ) + { + } + + TraceRaysIndirectCommand2KHR & operator=( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TraceRaysIndirectCommand2KHR & operator=( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setRaygenShaderRecordAddress( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ ) VULKAN_HPP_NOEXCEPT + { + raygenShaderRecordAddress = raygenShaderRecordAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setRaygenShaderRecordSize( VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ ) VULKAN_HPP_NOEXCEPT + { + raygenShaderRecordSize = raygenShaderRecordSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setMissShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT + { + missShaderBindingTableAddress = missShaderBindingTableAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setMissShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT + { + missShaderBindingTableSize = missShaderBindingTableSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setMissShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT + { + missShaderBindingTableStride = missShaderBindingTableStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setHitShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT + { + hitShaderBindingTableAddress = hitShaderBindingTableAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setHitShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT + { + hitShaderBindingTableSize = hitShaderBindingTableSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setHitShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT + { + hitShaderBindingTableStride = hitShaderBindingTableStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setCallableShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT + { + callableShaderBindingTableAddress = callableShaderBindingTableAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setCallableShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT + { + callableShaderBindingTableSize = callableShaderBindingTableSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setCallableShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT + { + callableShaderBindingTableStride = callableShaderBindingTableStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTraceRaysIndirectCommand2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommand2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommand2KHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommand2KHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( raygenShaderRecordAddress, + raygenShaderRecordSize, + missShaderBindingTableAddress, + missShaderBindingTableSize, + missShaderBindingTableStride, + hitShaderBindingTableAddress, + hitShaderBindingTableSize, + hitShaderBindingTableStride, + callableShaderBindingTableAddress, + callableShaderBindingTableSize, + callableShaderBindingTableStride, + width, + height, + depth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TraceRaysIndirectCommand2KHR const & ) const = default; +#else + bool operator==( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( raygenShaderRecordAddress == rhs.raygenShaderRecordAddress ) && ( raygenShaderRecordSize == rhs.raygenShaderRecordSize ) && + ( missShaderBindingTableAddress == rhs.missShaderBindingTableAddress ) && ( missShaderBindingTableSize == rhs.missShaderBindingTableSize ) && + ( missShaderBindingTableStride == rhs.missShaderBindingTableStride ) && ( hitShaderBindingTableAddress == rhs.hitShaderBindingTableAddress ) && + ( hitShaderBindingTableSize == rhs.hitShaderBindingTableSize ) && ( hitShaderBindingTableStride == rhs.hitShaderBindingTableStride ) && + ( callableShaderBindingTableAddress == rhs.callableShaderBindingTableAddress ) && + ( callableShaderBindingTableSize == rhs.callableShaderBindingTableSize ) && + ( callableShaderBindingTableStride == rhs.callableShaderBindingTableStride ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( depth == rhs.depth ); +# endif + } + + bool operator!=( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + + // wrapper struct for struct VkTraceRaysIndirectCommandKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTraceRaysIndirectCommandKHR.html + struct TraceRaysIndirectCommandKHR + { + using NativeType = VkTraceRaysIndirectCommandKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width{ width_ } + , height{ height_ } + , depth{ depth_ } + { + } + + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TraceRaysIndirectCommandKHR( *reinterpret_cast( &rhs ) ) + { + } + + explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) + { + } + + TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommandKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommandKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( width, height, depth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default; +#else + bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); +# endif + } + + bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + + // wrapper struct for struct VkValidationCacheCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationCacheCreateInfoEXT.html + struct ValidationCacheCreateInfoEXT + { + using NativeType = VkValidationCacheCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , initialDataSize{ initialDataSize_ } + , pInitialData{ pInitialData_ } + { + } + + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationCacheCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkValidationCacheCreateInfoEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, initialDataSize, pInitialData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default; +#else + bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) && + ( pInitialData == rhs.pInitialData ); +# endif + } + + bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; + }; + + template <> + struct CppType + { + using Type = ValidationCacheCreateInfoEXT; + }; + + // wrapper struct for struct VkValidationFeaturesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationFeaturesEXT.html + struct ValidationFeaturesEXT + { + using NativeType = VkValidationFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, + uint32_t disabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , enabledValidationFeatureCount{ enabledValidationFeatureCount_ } + , pEnabledValidationFeatures{ pEnabledValidationFeatures_ } + , disabledValidationFeatureCount{ disabledValidationFeatureCount_ } + , pDisabledValidationFeatures{ pDisabledValidationFeatures_ } + { + } + + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & enabledValidationFeatures_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationFeatures_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , enabledValidationFeatureCount( static_cast( enabledValidationFeatures_.size() ) ) + , pEnabledValidationFeatures( enabledValidationFeatures_.data() ) + , disabledValidationFeatureCount( static_cast( disabledValidationFeatures_.size() ) ) + , pDisabledValidationFeatures( disabledValidationFeatures_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = enabledValidationFeatureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & + setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pEnabledValidationFeatures = pEnabledValidationFeatures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setEnabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & enabledValidationFeatures_ ) + VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = static_cast( enabledValidationFeatures_.size() ); + pEnabledValidationFeatures = enabledValidationFeatures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = disabledValidationFeatureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & + setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationFeatures = pDisabledValidationFeatures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setDisabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationFeatures_ ) + VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = static_cast( disabledValidationFeatures_.size() ); + pDisabledValidationFeatures = disabledValidationFeatures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkValidationFeaturesEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFeaturesEXT const & ) const = default; +#else + bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) && + ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) && + ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); +# endif + } + + bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; + const void * pNext = {}; + uint32_t enabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {}; + uint32_t disabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {}; + }; + + template <> + struct CppType + { + using Type = ValidationFeaturesEXT; + }; + + // wrapper struct for struct VkValidationFlagsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationFlagsEXT.html + struct ValidationFlagsEXT + { + using NativeType = VkValidationFlagsEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , disabledValidationCheckCount{ disabledValidationCheckCount_ } + , pDisabledValidationChecks{ pDisabledValidationChecks_ } + { + } + + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ValidationFlagsEXT( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationChecks_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , disabledValidationCheckCount( static_cast( disabledValidationChecks_.size() ) ) + , pDisabledValidationChecks( disabledValidationChecks_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationCheckCount = disabledValidationCheckCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & + setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationChecks = pDisabledValidationChecks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT & setDisabledValidationChecks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationCheckCount = static_cast( disabledValidationChecks_.size() ); + pDisabledValidationChecks = disabledValidationChecks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFlagsEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkValidationFlagsEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, disabledValidationCheckCount, pDisabledValidationChecks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFlagsEXT const & ) const = default; +#else + bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) && + ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); +# endif + } + + bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; + const void * pNext = {}; + uint32_t disabledValidationCheckCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {}; + }; + + template <> + struct CppType + { + using Type = ValidationFlagsEXT; + }; + + // wrapper struct for struct VkVertexInputAttributeDescription2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputAttributeDescription2EXT.html + struct VertexInputAttributeDescription2EXT + { + using NativeType = VkVertexInputAttributeDescription2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputAttributeDescription2EXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , location{ location_ } + , binding{ binding_ } + , format{ format_ } + , offset{ offset_ } + { + } + + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription2EXT( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputAttributeDescription2EXT & operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VertexInputAttributeDescription2EXT & operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription2EXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription2EXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, location, binding, format, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default; +#else + bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && + ( offset == rhs.offset ); +# endif + } + + bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT; + void * pNext = {}; + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + }; + + template <> + struct CppType + { + using Type = VertexInputAttributeDescription2EXT; + }; + + // wrapper struct for struct VkVertexInputBindingDescription2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputBindingDescription2EXT.html + struct VertexInputBindingDescription2EXT + { + using NativeType = VkVertexInputBindingDescription2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputBindingDescription2EXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, + uint32_t divisor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , binding{ binding_ } + , stride{ stride_ } + , inputRate{ inputRate_ } + , divisor{ divisor_ } + { + } + + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription2EXT( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputBindingDescription2EXT & operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription2EXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription2EXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, binding, stride, inputRate, divisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default; +#else + bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) && + ( divisor == rhs.divisor ); +# endif + } + + bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT; + void * pNext = {}; + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + uint32_t divisor = {}; + }; + + template <> + struct CppType + { + using Type = VertexInputBindingDescription2EXT; + }; + +#if defined( VK_USE_PLATFORM_VI_NN ) + // wrapper struct for struct VkViSurfaceCreateInfoNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViSurfaceCreateInfoNN.html + struct ViSurfaceCreateInfoNN + { + using NativeType = VkViSurfaceCreateInfoNN; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , window{ window_ } + { + } + + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + : ViSurfaceCreateInfoNN( *reinterpret_cast( &rhs ) ) + { + } + + ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViSurfaceCreateInfoNN const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkViSurfaceCreateInfoNN *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default; +# else + bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); +# endif + } + + bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; + void * window = {}; + }; + + template <> + struct CppType + { + using Type = ViSurfaceCreateInfoNN; + }; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + // wrapper struct for struct VkVideoPictureResourceInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoPictureResourceInfoKHR.html + struct VideoPictureResourceInfoKHR + { + using NativeType = VkVideoPictureResourceInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + uint32_t baseArrayLayer_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , codedOffset{ codedOffset_ } + , codedExtent{ codedExtent_ } + , baseArrayLayer{ baseArrayLayer_ } + , imageViewBinding{ imageViewBinding_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoPictureResourceInfoKHR( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoPictureResourceInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoPictureResourceInfoKHR & operator=( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoPictureResourceInfoKHR & operator=( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT + { + codedOffset = codedOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT + { + imageViewBinding = imageViewBinding_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoPictureResourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoPictureResourceInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoPictureResourceInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoPictureResourceInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, codedOffset, codedExtent, baseArrayLayer, imageViewBinding ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoPictureResourceInfoKHR const & ) const = default; +#else + bool operator==( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) && + ( baseArrayLayer == rhs.baseArrayLayer ) && ( imageViewBinding == rhs.imageViewBinding ); +# endif + } + + bool operator!=( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + uint32_t baseArrayLayer = {}; + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {}; + }; + + template <> + struct CppType + { + using Type = VideoPictureResourceInfoKHR; + }; + + // wrapper struct for struct VkVideoReferenceSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoReferenceSlotInfoKHR.html + struct VideoReferenceSlotInfoKHR + { + using NativeType = VkVideoReferenceSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( int32_t slotIndex_ = {}, + const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , slotIndex{ slotIndex_ } + , pPictureResource{ pPictureResource_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoReferenceSlotInfoKHR( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoReferenceSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoReferenceSlotInfoKHR & operator=( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoReferenceSlotInfoKHR & operator=( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setSlotIndex( int32_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + { + slotIndex = slotIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & + setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + pPictureResource = pPictureResource_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoReferenceSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoReferenceSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoReferenceSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoReferenceSlotInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, slotIndex, pPictureResource ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoReferenceSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && ( pPictureResource == rhs.pPictureResource ); +# endif + } + + bool operator!=( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotInfoKHR; + const void * pNext = {}; + int32_t slotIndex = {}; + const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource = {}; + }; + + template <> + struct CppType + { + using Type = VideoReferenceSlotInfoKHR; + }; + + // wrapper struct for struct VkVideoBeginCodingInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoBeginCodingInfoKHR.html + struct VideoBeginCodingInfoKHR + { + using NativeType = VkVideoBeginCodingInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , videoSession{ videoSession_ } + , videoSessionParameters{ videoSessionParameters_ } + , referenceSlotCount{ referenceSlotCount_ } + , pReferenceSlots{ pReferenceSlots_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoBeginCodingInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , videoSession( videoSession_ ) + , videoSessionParameters( videoSessionParameters_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoBeginCodingInfoKHR & operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT + { + videoSession = videoSession_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParameters = videoSessionParameters_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoBeginCodingInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoBeginCodingInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, videoSession, videoSessionParameters, referenceSlotCount, pReferenceSlots ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default; +#else + bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( videoSession == rhs.videoSession ) && + ( videoSessionParameters == rhs.videoSessionParameters ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ); +# endif + } + + bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; + }; + + template <> + struct CppType + { + using Type = VideoBeginCodingInfoKHR; + }; + + // wrapper struct for struct VkVideoCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCapabilitiesKHR.html + struct VideoCapabilitiesKHR + { + using NativeType = VkVideoCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, + uint32_t maxDpbSlots_ = {}, + uint32_t maxActiveReferencePictures_ = {}, + VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , minBitstreamBufferOffsetAlignment{ minBitstreamBufferOffsetAlignment_ } + , minBitstreamBufferSizeAlignment{ minBitstreamBufferSizeAlignment_ } + , pictureAccessGranularity{ pictureAccessGranularity_ } + , minCodedExtent{ minCodedExtent_ } + , maxCodedExtent{ maxCodedExtent_ } + , maxDpbSlots{ maxDpbSlots_ } + , maxActiveReferencePictures{ maxActiveReferencePictures_ } + , stdHeaderVersion{ stdHeaderVersion_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoCapabilitiesKHR & operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + minBitstreamBufferOffsetAlignment, + minBitstreamBufferSizeAlignment, + pictureAccessGranularity, + minCodedExtent, + maxCodedExtent, + maxDpbSlots, + maxActiveReferencePictures, + stdHeaderVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment ) && + ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) && ( pictureAccessGranularity == rhs.pictureAccessGranularity ) && + ( minCodedExtent == rhs.minCodedExtent ) && ( maxCodedExtent == rhs.maxCodedExtent ) && ( maxDpbSlots == rhs.maxDpbSlots ) && + ( maxActiveReferencePictures == rhs.maxActiveReferencePictures ) && ( stdHeaderVersion == rhs.stdHeaderVersion ); +# endif + } + + bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity = {}; + VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; + uint32_t maxDpbSlots = {}; + uint32_t maxActiveReferencePictures = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion = {}; + }; + + template <> + struct CppType + { + using Type = VideoCapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoCodingControlInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoCodingControlInfoKHR.html + struct VideoCodingControlInfoKHR + { + using NativeType = VkVideoCodingControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCodingControlInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoCodingControlInfoKHR & operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoCodingControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoCodingControlInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoCodingControlInfoKHR const & ) const = default; +#else + bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {}; + }; + + template <> + struct CppType + { + using Type = VideoCodingControlInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeAV1CapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1CapabilitiesKHR.html + struct VideoDecodeAV1CapabilitiesKHR + { + using NativeType = VkVideoDecodeAV1CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeAV1CapabilitiesKHR( StdVideoAV1Level maxLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxLevel{ maxLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeAV1CapabilitiesKHR( VideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeAV1CapabilitiesKHR( VkVideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeAV1CapabilitiesKHR & operator=( VideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeAV1CapabilitiesKHR & operator=( VkVideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoDecodeAV1CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 ); + } + + bool operator!=( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1CapabilitiesKHR; + void * pNext = {}; + StdVideoAV1Level maxLevel = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeAV1CapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoDecodeAV1DpbSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1DpbSlotInfoKHR.html + struct VideoDecodeAV1DpbSlotInfoKHR + { + using NativeType = VkVideoDecodeAV1DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeAV1DpbSlotInfoKHR( const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeAV1DpbSlotInfoKHR( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeAV1DpbSlotInfoKHR( VkVideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeAV1DpbSlotInfoKHR & operator=( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeAV1DpbSlotInfoKHR & operator=( VkVideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeAV1DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeAV1DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeAV1DpbSlotInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeAV1InlineSessionParametersInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1InlineSessionParametersInfoKHR.html + struct VideoDecodeAV1InlineSessionParametersInfoKHR + { + using NativeType = VkVideoDecodeAV1InlineSessionParametersInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeAV1InlineSessionParametersInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdSequenceHeader{ pStdSequenceHeader_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeAV1InlineSessionParametersInfoKHR( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeAV1InlineSessionParametersInfoKHR( VkVideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1InlineSessionParametersInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeAV1InlineSessionParametersInfoKHR & operator=( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeAV1InlineSessionParametersInfoKHR & operator=( VkVideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1InlineSessionParametersInfoKHR & + setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT + { + pStdSequenceHeader = pStdSequenceHeader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeAV1InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1InlineSessionParametersInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1InlineSessionParametersInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdSequenceHeader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeAV1InlineSessionParametersInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSequenceHeader == rhs.pStdSequenceHeader ); +# endif + } + + bool operator!=( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR; + const void * pNext = {}; + const StdVideoAV1SequenceHeader * pStdSequenceHeader = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeAV1InlineSessionParametersInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeAV1PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1PictureInfoKHR.html + struct VideoDecodeAV1PictureInfoKHR + { + using NativeType = VkVideoDecodeAV1PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR( const StdVideoDecodeAV1PictureInfo * pStdPictureInfo_ = {}, + std::array const & referenceNameSlotIndices_ = {}, + uint32_t frameHeaderOffset_ = {}, + uint32_t tileCount_ = {}, + const uint32_t * pTileOffsets_ = {}, + const uint32_t * pTileSizes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , referenceNameSlotIndices{ referenceNameSlotIndices_ } + , frameHeaderOffset{ frameHeaderOffset_ } + , tileCount{ tileCount_ } + , pTileOffsets{ pTileOffsets_ } + , pTileSizes{ pTileSizes_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR( VideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeAV1PictureInfoKHR( VkVideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeAV1PictureInfoKHR( const StdVideoDecodeAV1PictureInfo * pStdPictureInfo_, + std::array const & referenceNameSlotIndices_, + uint32_t frameHeaderOffset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tileOffsets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tileSizes_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + , referenceNameSlotIndices( referenceNameSlotIndices_ ) + , frameHeaderOffset( frameHeaderOffset_ ) + , tileCount( static_cast( tileOffsets_.size() ) ) + , pTileOffsets( tileOffsets_.data() ) + , pTileSizes( tileSizes_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( tileOffsets_.size() == tileSizes_.size() ); +# else + if ( tileOffsets_.size() != tileSizes_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VideoDecodeAV1PictureInfoKHR::VideoDecodeAV1PictureInfoKHR: tileOffsets_.size() != tileSizes_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeAV1PictureInfoKHR & operator=( VideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeAV1PictureInfoKHR & operator=( VkVideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeAV1PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & + setReferenceNameSlotIndices( std::array referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT + { + referenceNameSlotIndices = referenceNameSlotIndices_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setFrameHeaderOffset( uint32_t frameHeaderOffset_ ) VULKAN_HPP_NOEXCEPT + { + frameHeaderOffset = frameHeaderOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setTileCount( uint32_t tileCount_ ) VULKAN_HPP_NOEXCEPT + { + tileCount = tileCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPTileOffsets( const uint32_t * pTileOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pTileOffsets = pTileOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeAV1PictureInfoKHR & setTileOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tileOffsets_ ) VULKAN_HPP_NOEXCEPT + { + tileCount = static_cast( tileOffsets_.size() ); + pTileOffsets = tileOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPTileSizes( const uint32_t * pTileSizes_ ) VULKAN_HPP_NOEXCEPT + { + pTileSizes = pTileSizes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeAV1PictureInfoKHR & setTileSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tileSizes_ ) VULKAN_HPP_NOEXCEPT + { + tileCount = static_cast( tileSizes_.size() ); + pTileSizes = tileSizes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeAV1PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1PictureInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + const uint32_t * const &, + const uint32_t * const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdPictureInfo, referenceNameSlotIndices, frameHeaderOffset, tileCount, pTileOffsets, pTileSizes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeAV1PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( referenceNameSlotIndices == rhs.referenceNameSlotIndices ) && ( frameHeaderOffset == rhs.frameHeaderOffset ) && ( tileCount == rhs.tileCount ) && + ( pTileOffsets == rhs.pTileOffsets ) && ( pTileSizes == rhs.pTileSizes ); +# endif + } + + bool operator!=( VideoDecodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1PictureInfoKHR; + const void * pNext = {}; + const StdVideoDecodeAV1PictureInfo * pStdPictureInfo = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D referenceNameSlotIndices = {}; + uint32_t frameHeaderOffset = {}; + uint32_t tileCount = {}; + const uint32_t * pTileOffsets = {}; + const uint32_t * pTileSizes = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeAV1PictureInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeAV1ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1ProfileInfoKHR.html + struct VideoDecodeAV1ProfileInfoKHR + { + using NativeType = VkVideoDecodeAV1ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeAV1ProfileInfoKHR( StdVideoAV1Profile stdProfile_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filmGrainSupport_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfile{ stdProfile_ } + , filmGrainSupport{ filmGrainSupport_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeAV1ProfileInfoKHR( VideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeAV1ProfileInfoKHR( VkVideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeAV1ProfileInfoKHR & operator=( VideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeAV1ProfileInfoKHR & operator=( VkVideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setStdProfile( StdVideoAV1Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT + { + stdProfile = stdProfile_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setFilmGrainSupport( VULKAN_HPP_NAMESPACE::Bool32 filmGrainSupport_ ) VULKAN_HPP_NOEXCEPT + { + filmGrainSupport = filmGrainSupport_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeAV1ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfile, filmGrainSupport ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = filmGrainSupport <=> rhs.filmGrainSupport; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ) == 0 ) && + ( filmGrainSupport == rhs.filmGrainSupport ); + } + + bool operator!=( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1ProfileInfoKHR; + const void * pNext = {}; + StdVideoAV1Profile stdProfile = {}; + VULKAN_HPP_NAMESPACE::Bool32 filmGrainSupport = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeAV1ProfileInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeAV1SessionParametersCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeAV1SessionParametersCreateInfoKHR.html + struct VideoDecodeAV1SessionParametersCreateInfoKHR + { + using NativeType = VkVideoDecodeAV1SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeAV1SessionParametersCreateInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdSequenceHeader{ pStdSequenceHeader_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeAV1SessionParametersCreateInfoKHR( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeAV1SessionParametersCreateInfoKHR( VkVideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeAV1SessionParametersCreateInfoKHR & operator=( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeAV1SessionParametersCreateInfoKHR & operator=( VkVideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1SessionParametersCreateInfoKHR & + setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT + { + pStdSequenceHeader = pStdSequenceHeader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeAV1SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdSequenceHeader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeAV1SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSequenceHeader == rhs.pStdSequenceHeader ); +# endif + } + + bool operator!=( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR; + const void * pNext = {}; + const StdVideoAV1SequenceHeader * pStdSequenceHeader = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeAV1SessionParametersCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeCapabilitiesKHR.html + struct VideoDecodeCapabilitiesKHR + { + using NativeType = VkVideoDecodeCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeCapabilitiesKHR( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeCapabilitiesKHR & operator=( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeCapabilitiesKHR & operator=( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoDecodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeCapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoDecodeH264CapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264CapabilitiesKHR.html + struct VideoDecodeH264CapabilitiesKHR + { + using NativeType = VkVideoDecodeH264CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( StdVideoH264LevelIdc maxLevelIdc_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxLevelIdc{ maxLevelIdc_ } + , fieldOffsetGranularity{ fieldOffsetGranularity_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264CapabilitiesKHR( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH264CapabilitiesKHR & operator=( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH264CapabilitiesKHR & operator=( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoDecodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH264CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxLevelIdc, fieldOffsetGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = fieldOffsetGranularity <=> rhs.fieldOffsetGranularity; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ) && + ( fieldOffsetGranularity == rhs.fieldOffsetGranularity ); + } + + bool operator!=( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesKHR; + void * pNext = {}; + StdVideoH264LevelIdc maxLevelIdc = {}; + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264CapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoDecodeH264DpbSlotInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264DpbSlotInfoKHR.html + struct VideoDecodeH264DpbSlotInfoKHR + { + using NativeType = VkVideoDecodeH264DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264DpbSlotInfoKHR( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH264DpbSlotInfoKHR & operator=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH264DpbSlotInfoKHR & operator=( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH264DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264DpbSlotInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH264InlineSessionParametersInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264InlineSessionParametersInfoKHR.html + struct VideoDecodeH264InlineSessionParametersInfoKHR + { + using NativeType = VkVideoDecodeH264InlineSessionParametersInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264InlineSessionParametersInfoKHR( const StdVideoH264SequenceParameterSet * pStdSPS_ = {}, + const StdVideoH264PictureParameterSet * pStdPPS_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdSPS{ pStdSPS_ } + , pStdPPS{ pStdPPS_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264InlineSessionParametersInfoKHR( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264InlineSessionParametersInfoKHR( VkVideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264InlineSessionParametersInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH264InlineSessionParametersInfoKHR & operator=( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH264InlineSessionParametersInfoKHR & operator=( VkVideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPStdSPS( const StdVideoH264SequenceParameterSet * pStdSPS_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPS = pStdSPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPStdPPS( const StdVideoH264PictureParameterSet * pStdPPS_ ) VULKAN_HPP_NOEXCEPT + { + pStdPPS = pStdPPS_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH264InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264InlineSessionParametersInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH264InlineSessionParametersInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdSPS, pStdPPS ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264InlineSessionParametersInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSPS == rhs.pStdSPS ) && ( pStdPPS == rhs.pStdPPS ); +# endif + } + + bool operator!=( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR; + const void * pNext = {}; + const StdVideoH264SequenceParameterSet * pStdSPS = {}; + const StdVideoH264PictureParameterSet * pStdPPS = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264InlineSessionParametersInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH264PictureInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264PictureInfoKHR.html + struct VideoDecodeH264PictureInfoKHR + { + using NativeType = VkVideoDecodeH264PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {}, + uint32_t sliceCount_ = {}, + const uint32_t * pSliceOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , sliceCount{ sliceCount_ } + , pSliceOffsets{ pSliceOffsets_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264PictureInfoKHR( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoKHR( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceOffsets_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceCount( static_cast( sliceOffsets_.size() ) ), pSliceOffsets( sliceOffsets_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH264PictureInfoKHR & operator=( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH264PictureInfoKHR & operator=( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT + { + sliceCount = sliceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPSliceOffsets( const uint32_t * pSliceOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pSliceOffsets = pSliceOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoKHR & setSliceOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceOffsets_ ) VULKAN_HPP_NOEXCEPT + { + sliceCount = static_cast( sliceOffsets_.size() ); + pSliceOffsets = sliceOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH264PictureInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdPictureInfo, sliceCount, pSliceOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( sliceCount == rhs.sliceCount ) && + ( pSliceOffsets == rhs.pSliceOffsets ); +# endif + } + + bool operator!=( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {}; + uint32_t sliceCount = {}; + const uint32_t * pSliceOffsets = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264PictureInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH264ProfileInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264ProfileInfoKHR.html + struct VideoDecodeH264ProfileInfoKHR + { + using NativeType = VkVideoDecodeH264ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR( StdVideoH264ProfileIdc stdProfileIdc_ = {}, + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ = + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } + , pictureLayout{ pictureLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264ProfileInfoKHR( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH264ProfileInfoKHR & operator=( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH264ProfileInfoKHR & operator=( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & + setPictureLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ ) VULKAN_HPP_NOEXCEPT + { + pictureLayout = pictureLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH264ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc, pictureLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pictureLayout <=> rhs.pictureLayout; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) && + ( pictureLayout == rhs.pictureLayout ); + } + + bool operator!=( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileInfoKHR; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout = VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264ProfileInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH264SessionParametersAddInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264SessionParametersAddInfoKHR.html + struct VideoDecodeH264SessionParametersAddInfoKHR + { + using NativeType = VkVideoDecodeH264SessionParametersAddInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR( uint32_t stdSPSCount_ = {}, + const StdVideoH264SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersAddInfoKHR( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH264SessionParametersAddInfoKHR & operator=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH264SessionParametersAddInfoKHR & operator=( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = stdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPSs = pStdSPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = stdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdPPSs = pStdPPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && + ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif + } + + bool operator!=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdSPSCount = {}; + const StdVideoH264SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH264PictureParameterSet * pStdPPSs = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersAddInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH264SessionParametersCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH264SessionParametersCreateInfoKHR.html + struct VideoDecodeH264SessionParametersCreateInfoKHR + { + using NativeType = VkVideoDecodeH264SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoDecodeH264SessionParametersCreateInfoKHR( uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264SessionParametersCreateInfoKHR( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersCreateInfoKHR( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdSPSCount = maxStdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdPPSCount = maxStdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && ( maxStdPPSCount == rhs.maxStdPPSCount ) && + ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH265CapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265CapabilitiesKHR.html + struct VideoDecodeH265CapabilitiesKHR + { + using NativeType = VkVideoDecodeH265CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( StdVideoH265LevelIdc maxLevelIdc_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxLevelIdc{ maxLevelIdc_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265CapabilitiesKHR( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH265CapabilitiesKHR & operator=( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH265CapabilitiesKHR & operator=( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoDecodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH265CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxLevelIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 ); + } + + bool operator!=( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesKHR; + void * pNext = {}; + StdVideoH265LevelIdc maxLevelIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265CapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoDecodeH265DpbSlotInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265DpbSlotInfoKHR.html + struct VideoDecodeH265DpbSlotInfoKHR + { + using NativeType = VkVideoDecodeH265DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265DpbSlotInfoKHR( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH265DpbSlotInfoKHR & operator=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH265DpbSlotInfoKHR & operator=( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH265DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265DpbSlotInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH265InlineSessionParametersInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265InlineSessionParametersInfoKHR.html + struct VideoDecodeH265InlineSessionParametersInfoKHR + { + using NativeType = VkVideoDecodeH265InlineSessionParametersInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265InlineSessionParametersInfoKHR( const StdVideoH265VideoParameterSet * pStdVPS_ = {}, + const StdVideoH265SequenceParameterSet * pStdSPS_ = {}, + const StdVideoH265PictureParameterSet * pStdPPS_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdVPS{ pStdVPS_ } + , pStdSPS{ pStdSPS_ } + , pStdPPS{ pStdPPS_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265InlineSessionParametersInfoKHR( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265InlineSessionParametersInfoKHR( VkVideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265InlineSessionParametersInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH265InlineSessionParametersInfoKHR & operator=( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH265InlineSessionParametersInfoKHR & operator=( VkVideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdVPS( const StdVideoH265VideoParameterSet * pStdVPS_ ) VULKAN_HPP_NOEXCEPT + { + pStdVPS = pStdVPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdSPS( const StdVideoH265SequenceParameterSet * pStdSPS_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPS = pStdSPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdPPS( const StdVideoH265PictureParameterSet * pStdPPS_ ) VULKAN_HPP_NOEXCEPT + { + pStdPPS = pStdPPS_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH265InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265InlineSessionParametersInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH265InlineSessionParametersInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdVPS, pStdSPS, pStdPPS ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265InlineSessionParametersInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdVPS == rhs.pStdVPS ) && ( pStdSPS == rhs.pStdSPS ) && ( pStdPPS == rhs.pStdPPS ); +# endif + } + + bool operator!=( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR; + const void * pNext = {}; + const StdVideoH265VideoParameterSet * pStdVPS = {}; + const StdVideoH265SequenceParameterSet * pStdSPS = {}; + const StdVideoH265PictureParameterSet * pStdPPS = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265InlineSessionParametersInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH265PictureInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265PictureInfoKHR.html + struct VideoDecodeH265PictureInfoKHR + { + using NativeType = VkVideoDecodeH265PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {}, + uint32_t sliceSegmentCount_ = {}, + const uint32_t * pSliceSegmentOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , sliceSegmentCount{ sliceSegmentCount_ } + , pSliceSegmentOffsets{ pSliceSegmentOffsets_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265PictureInfoKHR( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265PictureInfoKHR( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceSegmentOffsets_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + , sliceSegmentCount( static_cast( sliceSegmentOffsets_.size() ) ) + , pSliceSegmentOffsets( sliceSegmentOffsets_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH265PictureInfoKHR & operator=( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH265PictureInfoKHR & operator=( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setSliceSegmentCount( uint32_t sliceSegmentCount_ ) VULKAN_HPP_NOEXCEPT + { + sliceSegmentCount = sliceSegmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPSliceSegmentOffsets( const uint32_t * pSliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pSliceSegmentOffsets = pSliceSegmentOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265PictureInfoKHR & + setSliceSegmentOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT + { + sliceSegmentCount = static_cast( sliceSegmentOffsets_.size() ); + pSliceSegmentOffsets = sliceSegmentOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH265PictureInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdPictureInfo, sliceSegmentCount, pSliceSegmentOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( sliceSegmentCount == rhs.sliceSegmentCount ) && + ( pSliceSegmentOffsets == rhs.pSliceSegmentOffsets ); +# endif + } + + bool operator!=( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH265PictureInfo * pStdPictureInfo = {}; + uint32_t sliceSegmentCount = {}; + const uint32_t * pSliceSegmentOffsets = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265PictureInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH265ProfileInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265ProfileInfoKHR.html + struct VideoDecodeH265ProfileInfoKHR + { + using NativeType = VkVideoDecodeH265ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265ProfileInfoKHR( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH265ProfileInfoKHR & operator=( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH265ProfileInfoKHR & operator=( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH265ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileInfoKHR; + const void * pNext = {}; + StdVideoH265ProfileIdc stdProfileIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265ProfileInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH265SessionParametersAddInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265SessionParametersAddInfoKHR.html + struct VideoDecodeH265SessionParametersAddInfoKHR + { + using NativeType = VkVideoDecodeH265SessionParametersAddInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR( uint32_t stdVPSCount_ = {}, + const StdVideoH265VideoParameterSet * pStdVPSs_ = {}, + uint32_t stdSPSCount_ = {}, + const StdVideoH265SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdVPSCount{ stdVPSCount_ } + , pStdVPSs{ pStdVPSs_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersAddInfoKHR( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stdVPSCount( static_cast( stdVPSs_.size() ) ) + , pStdVPSs( stdVPSs_.data() ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH265SessionParametersAddInfoKHR & operator=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH265SessionParametersAddInfoKHR & operator=( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSCount = stdVPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdVPSs = pStdVPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoKHR & + setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSCount = static_cast( stdVPSs_.size() ); + pStdVPSs = stdVPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = stdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPSs = pStdSPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = stdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdPPSs = pStdPPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdVPSCount == rhs.stdVPSCount ) && ( pStdVPSs == rhs.pStdVPSs ) && + ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif + } + + bool operator!=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdVPSCount = {}; + const StdVideoH265VideoParameterSet * pStdVPSs = {}; + uint32_t stdSPSCount = {}; + const StdVideoH265SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH265PictureParameterSet * pStdPPSs = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionParametersAddInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeH265SessionParametersCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeH265SessionParametersCreateInfoKHR.html + struct VideoDecodeH265SessionParametersCreateInfoKHR + { + using NativeType = VkVideoDecodeH265SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoDecodeH265SessionParametersCreateInfoKHR( uint32_t maxStdVPSCount_ = {}, + uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxStdVPSCount{ maxStdVPSCount_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265SessionParametersCreateInfoKHR( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersCreateInfoKHR( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdVPSCount = maxStdVPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdSPSCount = maxStdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdPPSCount = maxStdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdVPSCount == rhs.maxStdVPSCount ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && + ( maxStdPPSCount == rhs.maxStdPPSCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdVPSCount = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionParametersCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeInfoKHR.html + struct VideoDecodeInfoKHR + { + using NativeType = VkVideoDecodeInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , srcBuffer{ srcBuffer_ } + , srcBufferOffset{ srcBufferOffset_ } + , srcBufferRange{ srcBufferRange_ } + , dstPictureResource{ dstPictureResource_ } + , pSetupReferenceSlot{ pSetupReferenceSlot_ } + , referenceSlotCount{ referenceSlotCount_ } + , pReferenceSlots{ pReferenceSlots_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoDecodeInfoKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcBuffer( srcBuffer_ ) + , srcBufferOffset( srcBufferOffset_ ) + , srcBufferRange( srcBufferRange_ ) + , dstPictureResource( dstPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeInfoKHR & operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcBufferOffset = srcBufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT + { + srcBufferRange = srcBufferRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setDstPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + dstPictureResource = dstPictureResource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + { + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, srcBuffer, srcBufferOffset, srcBufferRange, dstPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcBuffer == rhs.srcBuffer ) && + ( srcBufferOffset == rhs.srcBufferOffset ) && ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) && + ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ); +# endif + } + + bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeInfoKHR; + }; + + // wrapper struct for struct VkVideoDecodeUsageInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoDecodeUsageInfoKHR.html + struct VideoDecodeUsageInfoKHR + { + using NativeType = VkVideoDecodeUsageInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeUsageInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoUsageHints{ videoUsageHints_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeUsageInfoKHR( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeUsageInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeUsageInfoKHR & operator=( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeUsageInfoKHR & operator=( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT + { + videoUsageHints = videoUsageHints_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeUsageInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoDecodeUsageInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoUsageHints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeUsageInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints ); +# endif + } + + bool operator!=( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeUsageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeUsageInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1CapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1CapabilitiesKHR.html + struct VideoEncodeAV1CapabilitiesKHR + { + using NativeType = VkVideoEncodeAV1CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1CapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR flags_ = {}, + StdVideoAV1Level maxLevel_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedPictureAlignment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxTiles_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minTileSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxTileSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes_ = {}, + uint32_t maxSingleReferenceCount_ = {}, + uint32_t singleReferenceNameMask_ = {}, + uint32_t maxUnidirectionalCompoundReferenceCount_ = {}, + uint32_t maxUnidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t unidirectionalCompoundReferenceNameMask_ = {}, + uint32_t maxBidirectionalCompoundReferenceCount_ = {}, + uint32_t maxBidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t maxBidirectionalCompoundGroup2ReferenceCount_ = {}, + uint32_t bidirectionalCompoundReferenceNameMask_ = {}, + uint32_t maxTemporalLayerCount_ = {}, + uint32_t maxSpatialLayerCount_ = {}, + uint32_t maxOperatingPoints_ = {}, + uint32_t minQIndex_ = {}, + uint32_t maxQIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR stdSyntaxFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , maxLevel{ maxLevel_ } + , codedPictureAlignment{ codedPictureAlignment_ } + , maxTiles{ maxTiles_ } + , minTileSize{ minTileSize_ } + , maxTileSize{ maxTileSize_ } + , superblockSizes{ superblockSizes_ } + , maxSingleReferenceCount{ maxSingleReferenceCount_ } + , singleReferenceNameMask{ singleReferenceNameMask_ } + , maxUnidirectionalCompoundReferenceCount{ maxUnidirectionalCompoundReferenceCount_ } + , maxUnidirectionalCompoundGroup1ReferenceCount{ maxUnidirectionalCompoundGroup1ReferenceCount_ } + , unidirectionalCompoundReferenceNameMask{ unidirectionalCompoundReferenceNameMask_ } + , maxBidirectionalCompoundReferenceCount{ maxBidirectionalCompoundReferenceCount_ } + , maxBidirectionalCompoundGroup1ReferenceCount{ maxBidirectionalCompoundGroup1ReferenceCount_ } + , maxBidirectionalCompoundGroup2ReferenceCount{ maxBidirectionalCompoundGroup2ReferenceCount_ } + , bidirectionalCompoundReferenceNameMask{ bidirectionalCompoundReferenceNameMask_ } + , maxTemporalLayerCount{ maxTemporalLayerCount_ } + , maxSpatialLayerCount{ maxSpatialLayerCount_ } + , maxOperatingPoints{ maxOperatingPoints_ } + , minQIndex{ minQIndex_ } + , maxQIndex{ maxQIndex_ } + , prefersGopRemainingFrames{ prefersGopRemainingFrames_ } + , requiresGopRemainingFrames{ requiresGopRemainingFrames_ } + , stdSyntaxFlags{ stdSyntaxFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1CapabilitiesKHR( VideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1CapabilitiesKHR( VkVideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1CapabilitiesKHR & operator=( VideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1CapabilitiesKHR & operator=( VkVideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeAV1CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + maxLevel, + codedPictureAlignment, + maxTiles, + minTileSize, + maxTileSize, + superblockSizes, + maxSingleReferenceCount, + singleReferenceNameMask, + maxUnidirectionalCompoundReferenceCount, + maxUnidirectionalCompoundGroup1ReferenceCount, + unidirectionalCompoundReferenceNameMask, + maxBidirectionalCompoundReferenceCount, + maxBidirectionalCompoundGroup1ReferenceCount, + maxBidirectionalCompoundGroup2ReferenceCount, + bidirectionalCompoundReferenceNameMask, + maxTemporalLayerCount, + maxSpatialLayerCount, + maxOperatingPoints, + minQIndex, + maxQIndex, + prefersGopRemainingFrames, + requiresGopRemainingFrames, + stdSyntaxFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = codedPictureAlignment <=> rhs.codedPictureAlignment; cmp != 0 ) + return cmp; + if ( auto cmp = maxTiles <=> rhs.maxTiles; cmp != 0 ) + return cmp; + if ( auto cmp = minTileSize <=> rhs.minTileSize; cmp != 0 ) + return cmp; + if ( auto cmp = maxTileSize <=> rhs.maxTileSize; cmp != 0 ) + return cmp; + if ( auto cmp = superblockSizes <=> rhs.superblockSizes; cmp != 0 ) + return cmp; + if ( auto cmp = maxSingleReferenceCount <=> rhs.maxSingleReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = singleReferenceNameMask <=> rhs.singleReferenceNameMask; cmp != 0 ) + return cmp; + if ( auto cmp = maxUnidirectionalCompoundReferenceCount <=> rhs.maxUnidirectionalCompoundReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxUnidirectionalCompoundGroup1ReferenceCount <=> rhs.maxUnidirectionalCompoundGroup1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = unidirectionalCompoundReferenceNameMask <=> rhs.unidirectionalCompoundReferenceNameMask; cmp != 0 ) + return cmp; + if ( auto cmp = maxBidirectionalCompoundReferenceCount <=> rhs.maxBidirectionalCompoundReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBidirectionalCompoundGroup1ReferenceCount <=> rhs.maxBidirectionalCompoundGroup1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBidirectionalCompoundGroup2ReferenceCount <=> rhs.maxBidirectionalCompoundGroup2ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = bidirectionalCompoundReferenceNameMask <=> rhs.bidirectionalCompoundReferenceNameMask; cmp != 0 ) + return cmp; + if ( auto cmp = maxTemporalLayerCount <=> rhs.maxTemporalLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxSpatialLayerCount <=> rhs.maxSpatialLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxOperatingPoints <=> rhs.maxOperatingPoints; cmp != 0 ) + return cmp; + if ( auto cmp = minQIndex <=> rhs.minQIndex; cmp != 0 ) + return cmp; + if ( auto cmp = maxQIndex <=> rhs.maxQIndex; cmp != 0 ) + return cmp; + if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 ) && ( codedPictureAlignment == rhs.codedPictureAlignment ) && + ( maxTiles == rhs.maxTiles ) && ( minTileSize == rhs.minTileSize ) && ( maxTileSize == rhs.maxTileSize ) && + ( superblockSizes == rhs.superblockSizes ) && ( maxSingleReferenceCount == rhs.maxSingleReferenceCount ) && + ( singleReferenceNameMask == rhs.singleReferenceNameMask ) && + ( maxUnidirectionalCompoundReferenceCount == rhs.maxUnidirectionalCompoundReferenceCount ) && + ( maxUnidirectionalCompoundGroup1ReferenceCount == rhs.maxUnidirectionalCompoundGroup1ReferenceCount ) && + ( unidirectionalCompoundReferenceNameMask == rhs.unidirectionalCompoundReferenceNameMask ) && + ( maxBidirectionalCompoundReferenceCount == rhs.maxBidirectionalCompoundReferenceCount ) && + ( maxBidirectionalCompoundGroup1ReferenceCount == rhs.maxBidirectionalCompoundGroup1ReferenceCount ) && + ( maxBidirectionalCompoundGroup2ReferenceCount == rhs.maxBidirectionalCompoundGroup2ReferenceCount ) && + ( bidirectionalCompoundReferenceNameMask == rhs.bidirectionalCompoundReferenceNameMask ) && + ( maxTemporalLayerCount == rhs.maxTemporalLayerCount ) && ( maxSpatialLayerCount == rhs.maxSpatialLayerCount ) && + ( maxOperatingPoints == rhs.maxOperatingPoints ) && ( minQIndex == rhs.minQIndex ) && ( maxQIndex == rhs.maxQIndex ) && + ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) && + ( stdSyntaxFlags == rhs.stdSyntaxFlags ); + } + + bool operator!=( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1CapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR flags = {}; + StdVideoAV1Level maxLevel = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedPictureAlignment = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxTiles = {}; + VULKAN_HPP_NAMESPACE::Extent2D minTileSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxTileSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes = {}; + uint32_t maxSingleReferenceCount = {}; + uint32_t singleReferenceNameMask = {}; + uint32_t maxUnidirectionalCompoundReferenceCount = {}; + uint32_t maxUnidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t unidirectionalCompoundReferenceNameMask = {}; + uint32_t maxBidirectionalCompoundReferenceCount = {}; + uint32_t maxBidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t maxBidirectionalCompoundGroup2ReferenceCount = {}; + uint32_t bidirectionalCompoundReferenceNameMask = {}; + uint32_t maxTemporalLayerCount = {}; + uint32_t maxSpatialLayerCount = {}; + uint32_t maxOperatingPoints = {}; + uint32_t minQIndex = {}; + uint32_t maxQIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR stdSyntaxFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1CapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1DpbSlotInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1DpbSlotInfoKHR.html + struct VideoEncodeAV1DpbSlotInfoKHR + { + using NativeType = VkVideoEncodeAV1DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1DpbSlotInfoKHR( const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1DpbSlotInfoKHR( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1DpbSlotInfoKHR( VkVideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1DpbSlotInfoKHR & operator=( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1DpbSlotInfoKHR & operator=( VkVideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1DpbSlotInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1FrameSizeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1FrameSizeKHR.html + struct VideoEncodeAV1FrameSizeKHR + { + using NativeType = VkVideoEncodeAV1FrameSizeKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeAV1FrameSizeKHR( uint32_t intraFrameSize_ = {}, uint32_t predictiveFrameSize_ = {}, uint32_t bipredictiveFrameSize_ = {} ) VULKAN_HPP_NOEXCEPT + : intraFrameSize{ intraFrameSize_ } + , predictiveFrameSize{ predictiveFrameSize_ } + , bipredictiveFrameSize{ bipredictiveFrameSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1FrameSizeKHR( VideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1FrameSizeKHR( VkVideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1FrameSizeKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1FrameSizeKHR & operator=( VideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1FrameSizeKHR & operator=( VkVideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setIntraFrameSize( uint32_t intraFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + intraFrameSize = intraFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setPredictiveFrameSize( uint32_t predictiveFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + predictiveFrameSize = predictiveFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setBipredictiveFrameSize( uint32_t bipredictiveFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + bipredictiveFrameSize = bipredictiveFrameSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1FrameSizeKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1FrameSizeKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1FrameSizeKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( intraFrameSize, predictiveFrameSize, bipredictiveFrameSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1FrameSizeKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( intraFrameSize == rhs.intraFrameSize ) && ( predictiveFrameSize == rhs.predictiveFrameSize ) && + ( bipredictiveFrameSize == rhs.bipredictiveFrameSize ); +# endif + } + + bool operator!=( VideoEncodeAV1FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t intraFrameSize = {}; + uint32_t predictiveFrameSize = {}; + uint32_t bipredictiveFrameSize = {}; + }; + + // wrapper struct for struct VkVideoEncodeAV1GopRemainingFrameInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1GopRemainingFrameInfoKHR.html + struct VideoEncodeAV1GopRemainingFrameInfoKHR + { + using NativeType = VkVideoEncodeAV1GopRemainingFrameInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1GopRemainingFrameInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, + uint32_t gopRemainingIntra_ = {}, + uint32_t gopRemainingPredictive_ = {}, + uint32_t gopRemainingBipredictive_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useGopRemainingFrames{ useGopRemainingFrames_ } + , gopRemainingIntra{ gopRemainingIntra_ } + , gopRemainingPredictive{ gopRemainingPredictive_ } + , gopRemainingBipredictive{ gopRemainingBipredictive_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1GopRemainingFrameInfoKHR( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1GopRemainingFrameInfoKHR( VkVideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1GopRemainingFrameInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1GopRemainingFrameInfoKHR & operator=( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1GopRemainingFrameInfoKHR & operator=( VkVideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & + setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT + { + useGopRemainingFrames = useGopRemainingFrames_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingIntra( uint32_t gopRemainingIntra_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingIntra = gopRemainingIntra_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingPredictive( uint32_t gopRemainingPredictive_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingPredictive = gopRemainingPredictive_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingBipredictive( uint32_t gopRemainingBipredictive_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingBipredictive = gopRemainingBipredictive_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1GopRemainingFrameInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1GopRemainingFrameInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingIntra, gopRemainingPredictive, gopRemainingBipredictive ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1GopRemainingFrameInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) && + ( gopRemainingIntra == rhs.gopRemainingIntra ) && ( gopRemainingPredictive == rhs.gopRemainingPredictive ) && + ( gopRemainingBipredictive == rhs.gopRemainingBipredictive ); +# endif + } + + bool operator!=( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {}; + uint32_t gopRemainingIntra = {}; + uint32_t gopRemainingPredictive = {}; + uint32_t gopRemainingBipredictive = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1GopRemainingFrameInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1PictureInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1PictureInfoKHR.html + struct VideoEncodeAV1PictureInfoKHR + { + using NativeType = VkVideoEncodeAV1PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR::eIntraOnly, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup_ = VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR::eIntra, + uint32_t constantQIndex_ = {}, + const StdVideoEncodeAV1PictureInfo * pStdPictureInfo_ = {}, + std::array const & referenceNameSlotIndices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , predictionMode{ predictionMode_ } + , rateControlGroup{ rateControlGroup_ } + , constantQIndex{ constantQIndex_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , referenceNameSlotIndices{ referenceNameSlotIndices_ } + , primaryReferenceCdfOnly{ primaryReferenceCdfOnly_ } + , generateObuExtensionHeader{ generateObuExtensionHeader_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR( VideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1PictureInfoKHR( VkVideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1PictureInfoKHR & operator=( VideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1PictureInfoKHR & operator=( VkVideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setPredictionMode( VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode_ ) VULKAN_HPP_NOEXCEPT + { + predictionMode = predictionMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setRateControlGroup( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup_ ) VULKAN_HPP_NOEXCEPT + { + rateControlGroup = rateControlGroup_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setConstantQIndex( uint32_t constantQIndex_ ) VULKAN_HPP_NOEXCEPT + { + constantQIndex = constantQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeAV1PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setReferenceNameSlotIndices( std::array referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT + { + referenceNameSlotIndices = referenceNameSlotIndices_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setPrimaryReferenceCdfOnly( VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly_ ) VULKAN_HPP_NOEXCEPT + { + primaryReferenceCdfOnly = primaryReferenceCdfOnly_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setGenerateObuExtensionHeader( VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader_ ) VULKAN_HPP_NOEXCEPT + { + generateObuExtensionHeader = generateObuExtensionHeader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1PictureInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + predictionMode, + rateControlGroup, + constantQIndex, + pStdPictureInfo, + referenceNameSlotIndices, + primaryReferenceCdfOnly, + generateObuExtensionHeader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( predictionMode == rhs.predictionMode ) && ( rateControlGroup == rhs.rateControlGroup ) && + ( constantQIndex == rhs.constantQIndex ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( referenceNameSlotIndices == rhs.referenceNameSlotIndices ) && ( primaryReferenceCdfOnly == rhs.primaryReferenceCdfOnly ) && + ( generateObuExtensionHeader == rhs.generateObuExtensionHeader ); +# endif + } + + bool operator!=( VideoEncodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1PictureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode = VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR::eIntraOnly; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup = VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR::eIntra; + uint32_t constantQIndex = {}; + const StdVideoEncodeAV1PictureInfo * pStdPictureInfo = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D referenceNameSlotIndices = {}; + VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly = {}; + VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1PictureInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1ProfileInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1ProfileInfoKHR.html + struct VideoEncodeAV1ProfileInfoKHR + { + using NativeType = VkVideoEncodeAV1ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1ProfileInfoKHR( StdVideoAV1Profile stdProfile_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfile{ stdProfile_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1ProfileInfoKHR( VideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1ProfileInfoKHR( VkVideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1ProfileInfoKHR & operator=( VideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1ProfileInfoKHR & operator=( VkVideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1ProfileInfoKHR & setStdProfile( StdVideoAV1Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT + { + stdProfile = stdProfile_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfile ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ) == 0 ); + } + + bool operator!=( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1ProfileInfoKHR; + const void * pNext = {}; + StdVideoAV1Profile stdProfile = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1ProfileInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1QIndexKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1QIndexKHR.html + struct VideoEncodeAV1QIndexKHR + { + using NativeType = VkVideoEncodeAV1QIndexKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeAV1QIndexKHR( uint32_t intraQIndex_ = {}, uint32_t predictiveQIndex_ = {}, uint32_t bipredictiveQIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : intraQIndex{ intraQIndex_ } + , predictiveQIndex{ predictiveQIndex_ } + , bipredictiveQIndex{ bipredictiveQIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QIndexKHR( VideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1QIndexKHR( VkVideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1QIndexKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1QIndexKHR & operator=( VideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1QIndexKHR & operator=( VkVideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setIntraQIndex( uint32_t intraQIndex_ ) VULKAN_HPP_NOEXCEPT + { + intraQIndex = intraQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setPredictiveQIndex( uint32_t predictiveQIndex_ ) VULKAN_HPP_NOEXCEPT + { + predictiveQIndex = predictiveQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setBipredictiveQIndex( uint32_t bipredictiveQIndex_ ) VULKAN_HPP_NOEXCEPT + { + bipredictiveQIndex = bipredictiveQIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1QIndexKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QIndexKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QIndexKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QIndexKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( intraQIndex, predictiveQIndex, bipredictiveQIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1QIndexKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1QIndexKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( intraQIndex == rhs.intraQIndex ) && ( predictiveQIndex == rhs.predictiveQIndex ) && ( bipredictiveQIndex == rhs.bipredictiveQIndex ); +# endif + } + + bool operator!=( VideoEncodeAV1QIndexKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t intraQIndex = {}; + uint32_t predictiveQIndex = {}; + uint32_t bipredictiveQIndex = {}; + }; + + // wrapper struct for struct VkVideoEncodeAV1QualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1QualityLevelPropertiesKHR.html + struct VideoEncodeAV1QualityLevelPropertiesKHR + { + using NativeType = VkVideoEncodeAV1QualityLevelPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags_ = {}, + uint32_t preferredGopFrameCount_ = {}, + uint32_t preferredKeyFramePeriod_ = {}, + uint32_t preferredConsecutiveBipredictiveFrameCount_ = {}, + uint32_t preferredTemporalLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR preferredConstantQIndex_ = {}, + uint32_t preferredMaxSingleReferenceCount_ = {}, + uint32_t preferredSingleReferenceNameMask_ = {}, + uint32_t preferredMaxUnidirectionalCompoundReferenceCount_ = {}, + uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t preferredUnidirectionalCompoundReferenceNameMask_ = {}, + uint32_t preferredMaxBidirectionalCompoundReferenceCount_ = {}, + uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount_ = {}, + uint32_t preferredBidirectionalCompoundReferenceNameMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , preferredRateControlFlags{ preferredRateControlFlags_ } + , preferredGopFrameCount{ preferredGopFrameCount_ } + , preferredKeyFramePeriod{ preferredKeyFramePeriod_ } + , preferredConsecutiveBipredictiveFrameCount{ preferredConsecutiveBipredictiveFrameCount_ } + , preferredTemporalLayerCount{ preferredTemporalLayerCount_ } + , preferredConstantQIndex{ preferredConstantQIndex_ } + , preferredMaxSingleReferenceCount{ preferredMaxSingleReferenceCount_ } + , preferredSingleReferenceNameMask{ preferredSingleReferenceNameMask_ } + , preferredMaxUnidirectionalCompoundReferenceCount{ preferredMaxUnidirectionalCompoundReferenceCount_ } + , preferredMaxUnidirectionalCompoundGroup1ReferenceCount{ preferredMaxUnidirectionalCompoundGroup1ReferenceCount_ } + , preferredUnidirectionalCompoundReferenceNameMask{ preferredUnidirectionalCompoundReferenceNameMask_ } + , preferredMaxBidirectionalCompoundReferenceCount{ preferredMaxBidirectionalCompoundReferenceCount_ } + , preferredMaxBidirectionalCompoundGroup1ReferenceCount{ preferredMaxBidirectionalCompoundGroup1ReferenceCount_ } + , preferredMaxBidirectionalCompoundGroup2ReferenceCount{ preferredMaxBidirectionalCompoundGroup2ReferenceCount_ } + , preferredBidirectionalCompoundReferenceNameMask{ preferredBidirectionalCompoundReferenceNameMask_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QualityLevelPropertiesKHR( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1QualityLevelPropertiesKHR( VkVideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1QualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1QualityLevelPropertiesKHR & operator=( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1QualityLevelPropertiesKHR & operator=( VkVideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeAV1QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + preferredRateControlFlags, + preferredGopFrameCount, + preferredKeyFramePeriod, + preferredConsecutiveBipredictiveFrameCount, + preferredTemporalLayerCount, + preferredConstantQIndex, + preferredMaxSingleReferenceCount, + preferredSingleReferenceNameMask, + preferredMaxUnidirectionalCompoundReferenceCount, + preferredMaxUnidirectionalCompoundGroup1ReferenceCount, + preferredUnidirectionalCompoundReferenceNameMask, + preferredMaxBidirectionalCompoundReferenceCount, + preferredMaxBidirectionalCompoundGroup1ReferenceCount, + preferredMaxBidirectionalCompoundGroup2ReferenceCount, + preferredBidirectionalCompoundReferenceNameMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1QualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) && + ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredKeyFramePeriod == rhs.preferredKeyFramePeriod ) && + ( preferredConsecutiveBipredictiveFrameCount == rhs.preferredConsecutiveBipredictiveFrameCount ) && + ( preferredTemporalLayerCount == rhs.preferredTemporalLayerCount ) && ( preferredConstantQIndex == rhs.preferredConstantQIndex ) && + ( preferredMaxSingleReferenceCount == rhs.preferredMaxSingleReferenceCount ) && + ( preferredSingleReferenceNameMask == rhs.preferredSingleReferenceNameMask ) && + ( preferredMaxUnidirectionalCompoundReferenceCount == rhs.preferredMaxUnidirectionalCompoundReferenceCount ) && + ( preferredMaxUnidirectionalCompoundGroup1ReferenceCount == rhs.preferredMaxUnidirectionalCompoundGroup1ReferenceCount ) && + ( preferredUnidirectionalCompoundReferenceNameMask == rhs.preferredUnidirectionalCompoundReferenceNameMask ) && + ( preferredMaxBidirectionalCompoundReferenceCount == rhs.preferredMaxBidirectionalCompoundReferenceCount ) && + ( preferredMaxBidirectionalCompoundGroup1ReferenceCount == rhs.preferredMaxBidirectionalCompoundGroup1ReferenceCount ) && + ( preferredMaxBidirectionalCompoundGroup2ReferenceCount == rhs.preferredMaxBidirectionalCompoundGroup2ReferenceCount ) && + ( preferredBidirectionalCompoundReferenceNameMask == rhs.preferredBidirectionalCompoundReferenceNameMask ); +# endif + } + + bool operator!=( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags = {}; + uint32_t preferredGopFrameCount = {}; + uint32_t preferredKeyFramePeriod = {}; + uint32_t preferredConsecutiveBipredictiveFrameCount = {}; + uint32_t preferredTemporalLayerCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR preferredConstantQIndex = {}; + uint32_t preferredMaxSingleReferenceCount = {}; + uint32_t preferredSingleReferenceNameMask = {}; + uint32_t preferredMaxUnidirectionalCompoundReferenceCount = {}; + uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t preferredUnidirectionalCompoundReferenceNameMask = {}; + uint32_t preferredMaxBidirectionalCompoundReferenceCount = {}; + uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount = {}; + uint32_t preferredBidirectionalCompoundReferenceNameMask = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1QualityLevelPropertiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1QuantizationMapCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1QuantizationMapCapabilitiesKHR.html + struct VideoEncodeAV1QuantizationMapCapabilitiesKHR + { + using NativeType = VkVideoEncodeAV1QuantizationMapCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeAV1QuantizationMapCapabilitiesKHR( int32_t minQIndexDelta_ = {}, int32_t maxQIndexDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minQIndexDelta{ minQIndexDelta_ } + , maxQIndexDelta{ maxQIndexDelta_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QuantizationMapCapabilitiesKHR( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1QuantizationMapCapabilitiesKHR( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1QuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1QuantizationMapCapabilitiesKHR & operator=( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minQIndexDelta, maxQIndexDelta ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQIndexDelta == rhs.minQIndexDelta ) && ( maxQIndexDelta == rhs.maxQIndexDelta ); +# endif + } + + bool operator!=( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR; + void * pNext = {}; + int32_t minQIndexDelta = {}; + int32_t maxQIndexDelta = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1QuantizationMapCapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1RateControlInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1RateControlInfoKHR.html + struct VideoEncodeAV1RateControlInfoKHR + { + using NativeType = VkVideoEncodeAV1RateControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1RateControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags_ = {}, + uint32_t gopFrameCount_ = {}, + uint32_t keyFramePeriod_ = {}, + uint32_t consecutiveBipredictiveFrameCount_ = {}, + uint32_t temporalLayerCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , gopFrameCount{ gopFrameCount_ } + , keyFramePeriod{ keyFramePeriod_ } + , consecutiveBipredictiveFrameCount{ consecutiveBipredictiveFrameCount_ } + , temporalLayerCount{ temporalLayerCount_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlInfoKHR( VideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1RateControlInfoKHR( VkVideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1RateControlInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1RateControlInfoKHR & operator=( VideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1RateControlInfoKHR & operator=( VkVideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + gopFrameCount = gopFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setKeyFramePeriod( uint32_t keyFramePeriod_ ) VULKAN_HPP_NOEXCEPT + { + keyFramePeriod = keyFramePeriod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & + setConsecutiveBipredictiveFrameCount( uint32_t consecutiveBipredictiveFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + consecutiveBipredictiveFrameCount = consecutiveBipredictiveFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setTemporalLayerCount( uint32_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + temporalLayerCount = temporalLayerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1RateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1RateControlInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, gopFrameCount, keyFramePeriod, consecutiveBipredictiveFrameCount, temporalLayerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1RateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( keyFramePeriod == rhs.keyFramePeriod ) && ( consecutiveBipredictiveFrameCount == rhs.consecutiveBipredictiveFrameCount ) && + ( temporalLayerCount == rhs.temporalLayerCount ); +# endif + } + + bool operator!=( VideoEncodeAV1RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1RateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags = {}; + uint32_t gopFrameCount = {}; + uint32_t keyFramePeriod = {}; + uint32_t consecutiveBipredictiveFrameCount = {}; + uint32_t temporalLayerCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1RateControlInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1RateControlLayerInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1RateControlLayerInfoKHR.html + struct VideoEncodeAV1RateControlLayerInfoKHR + { + using NativeType = VkVideoEncodeAV1RateControlLayerInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1RateControlLayerInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlLayerInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR minQIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR maxQIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR maxFrameSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMinQIndex{ useMinQIndex_ } + , minQIndex{ minQIndex_ } + , useMaxQIndex{ useMaxQIndex_ } + , maxQIndex{ maxQIndex_ } + , useMaxFrameSize{ useMaxFrameSize_ } + , maxFrameSize{ maxFrameSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlLayerInfoKHR( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1RateControlLayerInfoKHR( VkVideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1RateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1RateControlLayerInfoKHR & operator=( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1RateControlLayerInfoKHR & operator=( VkVideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMinQIndex( VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex_ ) VULKAN_HPP_NOEXCEPT + { + useMinQIndex = useMinQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & + setMinQIndex( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & minQIndex_ ) VULKAN_HPP_NOEXCEPT + { + minQIndex = minQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMaxQIndex( VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex_ ) VULKAN_HPP_NOEXCEPT + { + useMaxQIndex = useMaxQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & + setMaxQIndex( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & maxQIndex_ ) VULKAN_HPP_NOEXCEPT + { + maxQIndex = maxQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + useMaxFrameSize = useMaxFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameSize = maxFrameSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1RateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1RateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMinQIndex, minQIndex, useMaxQIndex, maxQIndex, useMaxFrameSize, maxFrameSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1RateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQIndex == rhs.useMinQIndex ) && ( minQIndex == rhs.minQIndex ) && + ( useMaxQIndex == rhs.useMaxQIndex ) && ( maxQIndex == rhs.maxQIndex ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && + ( maxFrameSize == rhs.maxFrameSize ); +# endif + } + + bool operator!=( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1RateControlLayerInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR minQIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR maxQIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR maxFrameSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1RateControlLayerInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1SessionCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1SessionCreateInfoKHR.html + struct VideoEncodeAV1SessionCreateInfoKHR + { + using NativeType = VkVideoEncodeAV1SessionCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1SessionCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel_ = {}, + StdVideoAV1Level maxLevel_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMaxLevel{ useMaxLevel_ } + , maxLevel{ maxLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionCreateInfoKHR( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1SessionCreateInfoKHR( VkVideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1SessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1SessionCreateInfoKHR & operator=( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1SessionCreateInfoKHR & operator=( VkVideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setUseMaxLevel( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel_ ) VULKAN_HPP_NOEXCEPT + { + useMaxLevel = useMaxLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setMaxLevel( StdVideoAV1Level maxLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxLevel = maxLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1SessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1SessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMaxLevel, maxLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = useMaxLevel <=> rhs.useMaxLevel; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevel == rhs.useMaxLevel ) && + ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 ); + } + + bool operator!=( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1SessionCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel = {}; + StdVideoAV1Level maxLevel = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1SessionCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeAV1SessionParametersCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeAV1SessionParametersCreateInfoKHR.html + struct VideoEncodeAV1SessionParametersCreateInfoKHR + { + using NativeType = VkVideoEncodeAV1SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionParametersCreateInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, + const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_ = {}, + uint32_t stdOperatingPointCount_ = {}, + const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdSequenceHeader{ pStdSequenceHeader_ } + , pStdDecoderModelInfo{ pStdDecoderModelInfo_ } + , stdOperatingPointCount{ stdOperatingPointCount_ } + , pStdOperatingPoints{ pStdOperatingPoints_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionParametersCreateInfoKHR( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1SessionParametersCreateInfoKHR( VkVideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeAV1SessionParametersCreateInfoKHR( + const StdVideoAV1SequenceHeader * pStdSequenceHeader_, + const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdOperatingPoints_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pStdSequenceHeader( pStdSequenceHeader_ ) + , pStdDecoderModelInfo( pStdDecoderModelInfo_ ) + , stdOperatingPointCount( static_cast( stdOperatingPoints_.size() ) ) + , pStdOperatingPoints( stdOperatingPoints_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeAV1SessionParametersCreateInfoKHR & operator=( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1SessionParametersCreateInfoKHR & operator=( VkVideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & + setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT + { + pStdSequenceHeader = pStdSequenceHeader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & + setPStdDecoderModelInfo( const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdDecoderModelInfo = pStdDecoderModelInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setStdOperatingPointCount( uint32_t stdOperatingPointCount_ ) VULKAN_HPP_NOEXCEPT + { + stdOperatingPointCount = stdOperatingPointCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & + setPStdOperatingPoints( const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints_ ) VULKAN_HPP_NOEXCEPT + { + pStdOperatingPoints = pStdOperatingPoints_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeAV1SessionParametersCreateInfoKHR & setStdOperatingPoints( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdOperatingPoints_ ) VULKAN_HPP_NOEXCEPT + { + stdOperatingPointCount = static_cast( stdOperatingPoints_.size() ); + pStdOperatingPoints = stdOperatingPoints_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdSequenceHeader, pStdDecoderModelInfo, stdOperatingPointCount, pStdOperatingPoints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSequenceHeader == rhs.pStdSequenceHeader ) && + ( pStdDecoderModelInfo == rhs.pStdDecoderModelInfo ) && ( stdOperatingPointCount == rhs.stdOperatingPointCount ) && + ( pStdOperatingPoints == rhs.pStdOperatingPoints ); +# endif + } + + bool operator!=( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR; + const void * pNext = {}; + const StdVideoAV1SequenceHeader * pStdSequenceHeader = {}; + const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo = {}; + uint32_t stdOperatingPointCount = {}; + const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1SessionParametersCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeCapabilitiesKHR.html + struct VideoEncodeCapabilitiesKHR + { + using NativeType = VkVideoEncodeCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes_ = {}, + uint32_t maxRateControlLayers_ = {}, + uint64_t maxBitrate_ = {}, + uint32_t maxQualityLevels_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D encodeInputPictureGranularity_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , rateControlModes{ rateControlModes_ } + , maxRateControlLayers{ maxRateControlLayers_ } + , maxBitrate{ maxBitrate_ } + , maxQualityLevels{ maxQualityLevels_ } + , encodeInputPictureGranularity{ encodeInputPictureGranularity_ } + , supportedEncodeFeedbackFlags{ supportedEncodeFeedbackFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeCapabilitiesKHR( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeCapabilitiesKHR & operator=( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeCapabilitiesKHR & operator=( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + rateControlModes, + maxRateControlLayers, + maxBitrate, + maxQualityLevels, + encodeInputPictureGranularity, + supportedEncodeFeedbackFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlModes == rhs.rateControlModes ) && + ( maxRateControlLayers == rhs.maxRateControlLayers ) && ( maxBitrate == rhs.maxBitrate ) && ( maxQualityLevels == rhs.maxQualityLevels ) && + ( encodeInputPictureGranularity == rhs.encodeInputPictureGranularity ) && ( supportedEncodeFeedbackFlags == rhs.supportedEncodeFeedbackFlags ); +# endif + } + + bool operator!=( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes = {}; + uint32_t maxRateControlLayers = {}; + uint64_t maxBitrate = {}; + uint32_t maxQualityLevels = {}; + VULKAN_HPP_NAMESPACE::Extent2D encodeInputPictureGranularity = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeCapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264CapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264CapabilitiesKHR.html + struct VideoEncodeH264CapabilitiesKHR + { + using NativeType = VkVideoEncodeH264CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR flags_ = {}, + StdVideoH264LevelIdc maxLevelIdc_ = {}, + uint32_t maxSliceCount_ = {}, + uint32_t maxPPictureL0ReferenceCount_ = {}, + uint32_t maxBPictureL0ReferenceCount_ = {}, + uint32_t maxL1ReferenceCount_ = {}, + uint32_t maxTemporalLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalLayerPattern_ = {}, + int32_t minQp_ = {}, + int32_t maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR stdSyntaxFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , maxLevelIdc{ maxLevelIdc_ } + , maxSliceCount{ maxSliceCount_ } + , maxPPictureL0ReferenceCount{ maxPPictureL0ReferenceCount_ } + , maxBPictureL0ReferenceCount{ maxBPictureL0ReferenceCount_ } + , maxL1ReferenceCount{ maxL1ReferenceCount_ } + , maxTemporalLayerCount{ maxTemporalLayerCount_ } + , expectDyadicTemporalLayerPattern{ expectDyadicTemporalLayerPattern_ } + , minQp{ minQp_ } + , maxQp{ maxQp_ } + , prefersGopRemainingFrames{ prefersGopRemainingFrames_ } + , requiresGopRemainingFrames{ requiresGopRemainingFrames_ } + , stdSyntaxFlags{ stdSyntaxFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesKHR( VideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264CapabilitiesKHR( VkVideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264CapabilitiesKHR & operator=( VideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264CapabilitiesKHR & operator=( VkVideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + maxLevelIdc, + maxSliceCount, + maxPPictureL0ReferenceCount, + maxBPictureL0ReferenceCount, + maxL1ReferenceCount, + maxTemporalLayerCount, + expectDyadicTemporalLayerPattern, + minQp, + maxQp, + prefersGopRemainingFrames, + requiresGopRemainingFrames, + stdSyntaxFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = maxSliceCount <=> rhs.maxSliceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxPPictureL0ReferenceCount <=> rhs.maxPPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBPictureL0ReferenceCount <=> rhs.maxBPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxL1ReferenceCount <=> rhs.maxL1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxTemporalLayerCount <=> rhs.maxTemporalLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = expectDyadicTemporalLayerPattern <=> rhs.expectDyadicTemporalLayerPattern; cmp != 0 ) + return cmp; + if ( auto cmp = minQp <=> rhs.minQp; cmp != 0 ) + return cmp; + if ( auto cmp = maxQp <=> rhs.maxQp; cmp != 0 ) + return cmp; + if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ) && ( maxSliceCount == rhs.maxSliceCount ) && + ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) && + ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && ( maxTemporalLayerCount == rhs.maxTemporalLayerCount ) && + ( expectDyadicTemporalLayerPattern == rhs.expectDyadicTemporalLayerPattern ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp ) && + ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) && + ( stdSyntaxFlags == rhs.stdSyntaxFlags ); + } + + bool operator!=( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR flags = {}; + StdVideoH264LevelIdc maxLevelIdc = {}; + uint32_t maxSliceCount = {}; + uint32_t maxPPictureL0ReferenceCount = {}; + uint32_t maxBPictureL0ReferenceCount = {}; + uint32_t maxL1ReferenceCount = {}; + uint32_t maxTemporalLayerCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalLayerPattern = {}; + int32_t minQp = {}; + int32_t maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR stdSyntaxFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264CapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264DpbSlotInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264DpbSlotInfoKHR.html + struct VideoEncodeH264DpbSlotInfoKHR + { + using NativeType = VkVideoEncodeH264DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR( VideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264DpbSlotInfoKHR( VkVideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264DpbSlotInfoKHR & operator=( VideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264DpbSlotInfoKHR & operator=( VkVideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoEncodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264DpbSlotInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264FrameSizeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264FrameSizeKHR.html + struct VideoEncodeH264FrameSizeKHR + { + using NativeType = VkVideoEncodeH264FrameSizeKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT + : frameISize{ frameISize_ } + , framePSize{ framePSize_ } + , frameBSize{ frameBSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR( VideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264FrameSizeKHR( VkVideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264FrameSizeKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264FrameSizeKHR & operator=( VideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264FrameSizeKHR & operator=( VkVideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT + { + frameISize = frameISize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT + { + framePSize = framePSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT + { + frameBSize = frameBSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264FrameSizeKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264FrameSizeKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264FrameSizeKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( frameISize, framePSize, frameBSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264FrameSizeKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize ); +# endif + } + + bool operator!=( VideoEncodeH264FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t frameISize = {}; + uint32_t framePSize = {}; + uint32_t frameBSize = {}; + }; + + // wrapper struct for struct VkVideoEncodeH264GopRemainingFrameInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264GopRemainingFrameInfoKHR.html + struct VideoEncodeH264GopRemainingFrameInfoKHR + { + using NativeType = VkVideoEncodeH264GopRemainingFrameInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264GopRemainingFrameInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, + uint32_t gopRemainingI_ = {}, + uint32_t gopRemainingP_ = {}, + uint32_t gopRemainingB_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useGopRemainingFrames{ useGopRemainingFrames_ } + , gopRemainingI{ gopRemainingI_ } + , gopRemainingP{ gopRemainingP_ } + , gopRemainingB{ gopRemainingB_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264GopRemainingFrameInfoKHR( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264GopRemainingFrameInfoKHR( VkVideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264GopRemainingFrameInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264GopRemainingFrameInfoKHR & operator=( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264GopRemainingFrameInfoKHR & operator=( VkVideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & + setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT + { + useGopRemainingFrames = useGopRemainingFrames_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingI( uint32_t gopRemainingI_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingI = gopRemainingI_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingP( uint32_t gopRemainingP_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingP = gopRemainingP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingB( uint32_t gopRemainingB_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingB = gopRemainingB_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264GopRemainingFrameInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264GopRemainingFrameInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingI, gopRemainingP, gopRemainingB ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264GopRemainingFrameInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) && + ( gopRemainingI == rhs.gopRemainingI ) && ( gopRemainingP == rhs.gopRemainingP ) && ( gopRemainingB == rhs.gopRemainingB ); +# endif + } + + bool operator!=( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {}; + uint32_t gopRemainingI = {}; + uint32_t gopRemainingP = {}; + uint32_t gopRemainingB = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264GopRemainingFrameInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264NaluSliceInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264NaluSliceInfoKHR.html + struct VideoEncodeH264NaluSliceInfoKHR + { + using NativeType = VkVideoEncodeH264NaluSliceInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR( int32_t constantQp_ = {}, + const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , constantQp{ constantQp_ } + , pStdSliceHeader{ pStdSliceHeader_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR( VideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264NaluSliceInfoKHR( VkVideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264NaluSliceInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264NaluSliceInfoKHR & operator=( VideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264NaluSliceInfoKHR & operator=( VkVideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setConstantQp( int32_t constantQp_ ) VULKAN_HPP_NOEXCEPT + { + constantQp = constantQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setPStdSliceHeader( const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ ) VULKAN_HPP_NOEXCEPT + { + pStdSliceHeader = pStdSliceHeader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264NaluSliceInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264NaluSliceInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264NaluSliceInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264NaluSliceInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, constantQp, pStdSliceHeader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264NaluSliceInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264NaluSliceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantQp == rhs.constantQp ) && ( pStdSliceHeader == rhs.pStdSliceHeader ); +# endif + } + + bool operator!=( VideoEncodeH264NaluSliceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceInfoKHR; + const void * pNext = {}; + int32_t constantQp = {}; + const StdVideoEncodeH264SliceHeader * pStdSliceHeader = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264NaluSliceInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264PictureInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264PictureInfoKHR.html + struct VideoEncodeH264PictureInfoKHR + { + using NativeType = VkVideoEncodeH264PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264PictureInfoKHR( uint32_t naluSliceEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries_ = {}, + const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , naluSliceEntryCount{ naluSliceEntryCount_ } + , pNaluSliceEntries{ pNaluSliceEntries_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , generatePrefixNalu{ generatePrefixNalu_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264PictureInfoKHR( VideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264PictureInfoKHR( VkVideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264PictureInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceEntries_, + const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , naluSliceEntryCount( static_cast( naluSliceEntries_.size() ) ) + , pNaluSliceEntries( naluSliceEntries_.data() ) + , pStdPictureInfo( pStdPictureInfo_ ) + , generatePrefixNalu( generatePrefixNalu_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264PictureInfoKHR & operator=( VideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264PictureInfoKHR & operator=( VkVideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = naluSliceEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & + setPNaluSliceEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + pNaluSliceEntries = pNaluSliceEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264PictureInfoKHR & setNaluSliceEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = static_cast( naluSliceEntries_.size() ); + pNaluSliceEntries = naluSliceEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setGeneratePrefixNalu( VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ ) VULKAN_HPP_NOEXCEPT + { + generatePrefixNalu = generatePrefixNalu_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264PictureInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, naluSliceEntryCount, pNaluSliceEntries, pStdPictureInfo, generatePrefixNalu ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( naluSliceEntryCount == rhs.naluSliceEntryCount ) && + ( pNaluSliceEntries == rhs.pNaluSliceEntries ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( generatePrefixNalu == rhs.generatePrefixNalu ); +# endif + } + + bool operator!=( VideoEncodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264PictureInfoKHR; + const void * pNext = {}; + uint32_t naluSliceEntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries = {}; + const StdVideoEncodeH264PictureInfo * pStdPictureInfo = {}; + VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264PictureInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264ProfileInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264ProfileInfoKHR.html + struct VideoEncodeH264ProfileInfoKHR + { + using NativeType = VkVideoEncodeH264ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR( StdVideoH264ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR( VideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264ProfileInfoKHR( VkVideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264ProfileInfoKHR & operator=( VideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264ProfileInfoKHR & operator=( VkVideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileInfoKHR; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264ProfileInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264QpKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264QpKHR.html + struct VideoEncodeH264QpKHR + { + using NativeType = VkVideoEncodeH264QpKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT + : qpI{ qpI_ } + , qpP{ qpP_ } + , qpB{ qpB_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR( VideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264QpKHR( VkVideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264QpKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264QpKHR & operator=( VideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264QpKHR & operator=( VkVideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT + { + qpI = qpI_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT + { + qpP = qpP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT + { + qpB = qpB_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264QpKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QpKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QpKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QpKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( qpI, qpP, qpB ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264QpKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB ); +# endif + } + + bool operator!=( VideoEncodeH264QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t qpI = {}; + int32_t qpP = {}; + int32_t qpB = {}; + }; + + // wrapper struct for struct VkVideoEncodeH264QualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264QualityLevelPropertiesKHR.html + struct VideoEncodeH264QualityLevelPropertiesKHR + { + using NativeType = VkVideoEncodeH264QualityLevelPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264QualityLevelPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264QualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR preferredRateControlFlags_ = {}, + uint32_t preferredGopFrameCount_ = {}, + uint32_t preferredIdrPeriod_ = {}, + uint32_t preferredConsecutiveBFrameCount_ = {}, + uint32_t preferredTemporalLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR preferredConstantQp_ = {}, + uint32_t preferredMaxL0ReferenceCount_ = {}, + uint32_t preferredMaxL1ReferenceCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 preferredStdEntropyCodingModeFlag_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , preferredRateControlFlags{ preferredRateControlFlags_ } + , preferredGopFrameCount{ preferredGopFrameCount_ } + , preferredIdrPeriod{ preferredIdrPeriod_ } + , preferredConsecutiveBFrameCount{ preferredConsecutiveBFrameCount_ } + , preferredTemporalLayerCount{ preferredTemporalLayerCount_ } + , preferredConstantQp{ preferredConstantQp_ } + , preferredMaxL0ReferenceCount{ preferredMaxL0ReferenceCount_ } + , preferredMaxL1ReferenceCount{ preferredMaxL1ReferenceCount_ } + , preferredStdEntropyCodingModeFlag{ preferredStdEntropyCodingModeFlag_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264QualityLevelPropertiesKHR( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264QualityLevelPropertiesKHR( VkVideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264QualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264QualityLevelPropertiesKHR & operator=( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264QualityLevelPropertiesKHR & operator=( VkVideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH264QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + preferredRateControlFlags, + preferredGopFrameCount, + preferredIdrPeriod, + preferredConsecutiveBFrameCount, + preferredTemporalLayerCount, + preferredConstantQp, + preferredMaxL0ReferenceCount, + preferredMaxL1ReferenceCount, + preferredStdEntropyCodingModeFlag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264QualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) && + ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredIdrPeriod == rhs.preferredIdrPeriod ) && + ( preferredConsecutiveBFrameCount == rhs.preferredConsecutiveBFrameCount ) && ( preferredTemporalLayerCount == rhs.preferredTemporalLayerCount ) && + ( preferredConstantQp == rhs.preferredConstantQp ) && ( preferredMaxL0ReferenceCount == rhs.preferredMaxL0ReferenceCount ) && + ( preferredMaxL1ReferenceCount == rhs.preferredMaxL1ReferenceCount ) && + ( preferredStdEntropyCodingModeFlag == rhs.preferredStdEntropyCodingModeFlag ); +# endif + } + + bool operator!=( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264QualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR preferredRateControlFlags = {}; + uint32_t preferredGopFrameCount = {}; + uint32_t preferredIdrPeriod = {}; + uint32_t preferredConsecutiveBFrameCount = {}; + uint32_t preferredTemporalLayerCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR preferredConstantQp = {}; + uint32_t preferredMaxL0ReferenceCount = {}; + uint32_t preferredMaxL1ReferenceCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 preferredStdEntropyCodingModeFlag = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264QualityLevelPropertiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264QuantizationMapCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264QuantizationMapCapabilitiesKHR.html + struct VideoEncodeH264QuantizationMapCapabilitiesKHR + { + using NativeType = VkVideoEncodeH264QuantizationMapCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264QuantizationMapCapabilitiesKHR( int32_t minQpDelta_ = {}, int32_t maxQpDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minQpDelta{ minQpDelta_ } + , maxQpDelta{ maxQpDelta_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264QuantizationMapCapabilitiesKHR( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264QuantizationMapCapabilitiesKHR( VkVideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264QuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264QuantizationMapCapabilitiesKHR & operator=( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minQpDelta, maxQpDelta ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264QuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQpDelta == rhs.minQpDelta ) && ( maxQpDelta == rhs.maxQpDelta ); +# endif + } + + bool operator!=( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR; + void * pNext = {}; + int32_t minQpDelta = {}; + int32_t maxQpDelta = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264QuantizationMapCapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264RateControlInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264RateControlInfoKHR.html + struct VideoEncodeH264RateControlInfoKHR + { + using NativeType = VkVideoEncodeH264RateControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags_ = {}, + uint32_t gopFrameCount_ = {}, + uint32_t idrPeriod_ = {}, + uint32_t consecutiveBFrameCount_ = {}, + uint32_t temporalLayerCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , gopFrameCount{ gopFrameCount_ } + , idrPeriod{ idrPeriod_ } + , consecutiveBFrameCount{ consecutiveBFrameCount_ } + , temporalLayerCount{ temporalLayerCount_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoKHR( VideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264RateControlInfoKHR( VkVideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264RateControlInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264RateControlInfoKHR & operator=( VideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264RateControlInfoKHR & operator=( VkVideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + gopFrameCount = gopFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT + { + idrPeriod = idrPeriod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + consecutiveBFrameCount = consecutiveBFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setTemporalLayerCount( uint32_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + temporalLayerCount = temporalLayerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264RateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264RateControlInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, gopFrameCount, idrPeriod, consecutiveBFrameCount, temporalLayerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264RateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( temporalLayerCount == rhs.temporalLayerCount ); +# endif + } + + bool operator!=( VideoEncodeH264RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags = {}; + uint32_t gopFrameCount = {}; + uint32_t idrPeriod = {}; + uint32_t consecutiveBFrameCount = {}; + uint32_t temporalLayerCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264RateControlInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264RateControlLayerInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264RateControlLayerInfoKHR.html + struct VideoEncodeH264RateControlLayerInfoKHR + { + using NativeType = VkVideoEncodeH264RateControlLayerInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlLayerInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR minQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR maxFrameSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMinQp{ useMinQp_ } + , minQp{ minQp_ } + , useMaxQp{ useMaxQp_ } + , maxQp{ maxQp_ } + , useMaxFrameSize{ useMaxFrameSize_ } + , maxFrameSize{ maxFrameSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoKHR( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264RateControlLayerInfoKHR( VkVideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264RateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264RateControlLayerInfoKHR & operator=( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264RateControlLayerInfoKHR & operator=( VkVideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT + { + useMinQp = useMinQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & minQp_ ) VULKAN_HPP_NOEXCEPT + { + minQp = minQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT + { + useMaxQp = useMaxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & maxQp_ ) VULKAN_HPP_NOEXCEPT + { + maxQp = maxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + useMaxFrameSize = useMaxFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameSize = maxFrameSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264RateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264RateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264RateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) && + ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize ); +# endif + } + + bool operator!=( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR minQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR maxFrameSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264RateControlLayerInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264SessionCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionCreateInfoKHR.html + struct VideoEncodeH264SessionCreateInfoKHR + { + using NativeType = VkVideoEncodeH264SessionCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ = {}, + StdVideoH264LevelIdc maxLevelIdc_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMaxLevelIdc{ useMaxLevelIdc_ } + , maxLevelIdc{ maxLevelIdc_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR( VideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionCreateInfoKHR( VkVideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264SessionCreateInfoKHR & operator=( VideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264SessionCreateInfoKHR & operator=( VkVideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setUseMaxLevelIdc( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ ) VULKAN_HPP_NOEXCEPT + { + useMaxLevelIdc = useMaxLevelIdc_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setMaxLevelIdc( StdVideoH264LevelIdc maxLevelIdc_ ) VULKAN_HPP_NOEXCEPT + { + maxLevelIdc = maxLevelIdc_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMaxLevelIdc, maxLevelIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = useMaxLevelIdc <=> rhs.useMaxLevelIdc; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevelIdc == rhs.useMaxLevelIdc ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc = {}; + StdVideoH264LevelIdc maxLevelIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264SessionParametersAddInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersAddInfoKHR.html + struct VideoEncodeH264SessionParametersAddInfoKHR + { + using NativeType = VkVideoEncodeH264SessionParametersAddInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersAddInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoKHR( uint32_t stdSPSCount_ = {}, + const StdVideoH264SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoKHR( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersAddInfoKHR( VkVideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264SessionParametersAddInfoKHR & operator=( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264SessionParametersAddInfoKHR & operator=( VkVideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = stdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPSs = pStdSPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = stdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdPPSs = pStdPPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && + ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif + } + + bool operator!=( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdSPSCount = {}; + const StdVideoH264SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH264PictureParameterSet * pStdPPSs = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersAddInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264SessionParametersCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersCreateInfoKHR.html + struct VideoEncodeH264SessionParametersCreateInfoKHR + { + using NativeType = VkVideoEncodeH264SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264SessionParametersCreateInfoKHR( uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264SessionParametersCreateInfoKHR( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersCreateInfoKHR( VkVideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264SessionParametersCreateInfoKHR & operator=( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264SessionParametersCreateInfoKHR & operator=( VkVideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdSPSCount = maxStdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdPPSCount = maxStdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && ( maxStdPPSCount == rhs.maxStdPPSCount ) && + ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264SessionParametersFeedbackInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersFeedbackInfoKHR.html + struct VideoEncodeH264SessionParametersFeedbackInfoKHR + { + using NativeType = VkVideoEncodeH264SessionParametersFeedbackInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hasStdSPSOverrides{ hasStdSPSOverrides_ } + , hasStdPPSOverrides{ hasStdPPSOverrides_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264SessionParametersFeedbackInfoKHR( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersFeedbackInfoKHR( VkVideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersFeedbackInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264SessionParametersFeedbackInfoKHR & operator=( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264SessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasStdSPSOverrides, hasStdPPSOverrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersFeedbackInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasStdSPSOverrides == rhs.hasStdSPSOverrides ) && + ( hasStdPPSOverrides == rhs.hasStdPPSOverrides ); +# endif + } + + bool operator!=( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersFeedbackInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH264SessionParametersGetInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH264SessionParametersGetInfoKHR.html + struct VideoEncodeH264SessionParametersGetInfoKHR + { + using NativeType = VkVideoEncodeH264SessionParametersGetInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersGetInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersGetInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ = {}, + uint32_t stdSPSId_ = {}, + uint32_t stdPPSId_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , writeStdSPS{ writeStdSPS_ } + , writeStdPPS{ writeStdPPS_ } + , stdSPSId{ stdSPSId_ } + , stdPPSId{ stdPPSId_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersGetInfoKHR( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersGetInfoKHR( VkVideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersGetInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264SessionParametersGetInfoKHR & operator=( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH264SessionParametersGetInfoKHR & operator=( VkVideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setWriteStdSPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ ) VULKAN_HPP_NOEXCEPT + { + writeStdSPS = writeStdSPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setWriteStdPPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ ) VULKAN_HPP_NOEXCEPT + { + writeStdPPS = writeStdPPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setStdSPSId( uint32_t stdSPSId_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSId = stdSPSId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setStdPPSId( uint32_t stdPPSId_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSId = stdPPSId_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264SessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersGetInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersGetInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, writeStdSPS, writeStdPPS, stdSPSId, stdPPSId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersGetInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( writeStdSPS == rhs.writeStdSPS ) && ( writeStdPPS == rhs.writeStdPPS ) && + ( stdSPSId == rhs.stdSPSId ) && ( stdPPSId == rhs.stdPPSId ); +# endif + } + + bool operator!=( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersGetInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS = {}; + uint32_t stdSPSId = {}; + uint32_t stdPPSId = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersGetInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265CapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265CapabilitiesKHR.html + struct VideoEncodeH265CapabilitiesKHR + { + using NativeType = VkVideoEncodeH265CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR flags_ = {}, + StdVideoH265LevelIdc maxLevelIdc_ = {}, + uint32_t maxSliceSegmentCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxTiles_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR ctbSizes_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes_ = {}, + uint32_t maxPPictureL0ReferenceCount_ = {}, + uint32_t maxBPictureL0ReferenceCount_ = {}, + uint32_t maxL1ReferenceCount_ = {}, + uint32_t maxSubLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalSubLayerPattern_ = {}, + int32_t minQp_ = {}, + int32_t maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR stdSyntaxFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , maxLevelIdc{ maxLevelIdc_ } + , maxSliceSegmentCount{ maxSliceSegmentCount_ } + , maxTiles{ maxTiles_ } + , ctbSizes{ ctbSizes_ } + , transformBlockSizes{ transformBlockSizes_ } + , maxPPictureL0ReferenceCount{ maxPPictureL0ReferenceCount_ } + , maxBPictureL0ReferenceCount{ maxBPictureL0ReferenceCount_ } + , maxL1ReferenceCount{ maxL1ReferenceCount_ } + , maxSubLayerCount{ maxSubLayerCount_ } + , expectDyadicTemporalSubLayerPattern{ expectDyadicTemporalSubLayerPattern_ } + , minQp{ minQp_ } + , maxQp{ maxQp_ } + , prefersGopRemainingFrames{ prefersGopRemainingFrames_ } + , requiresGopRemainingFrames{ requiresGopRemainingFrames_ } + , stdSyntaxFlags{ stdSyntaxFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesKHR( VideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265CapabilitiesKHR( VkVideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265CapabilitiesKHR & operator=( VideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265CapabilitiesKHR & operator=( VkVideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265CapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265CapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + maxLevelIdc, + maxSliceSegmentCount, + maxTiles, + ctbSizes, + transformBlockSizes, + maxPPictureL0ReferenceCount, + maxBPictureL0ReferenceCount, + maxL1ReferenceCount, + maxSubLayerCount, + expectDyadicTemporalSubLayerPattern, + minQp, + maxQp, + prefersGopRemainingFrames, + requiresGopRemainingFrames, + stdSyntaxFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = maxSliceSegmentCount <=> rhs.maxSliceSegmentCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxTiles <=> rhs.maxTiles; cmp != 0 ) + return cmp; + if ( auto cmp = ctbSizes <=> rhs.ctbSizes; cmp != 0 ) + return cmp; + if ( auto cmp = transformBlockSizes <=> rhs.transformBlockSizes; cmp != 0 ) + return cmp; + if ( auto cmp = maxPPictureL0ReferenceCount <=> rhs.maxPPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBPictureL0ReferenceCount <=> rhs.maxBPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxL1ReferenceCount <=> rhs.maxL1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxSubLayerCount <=> rhs.maxSubLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = expectDyadicTemporalSubLayerPattern <=> rhs.expectDyadicTemporalSubLayerPattern; cmp != 0 ) + return cmp; + if ( auto cmp = minQp <=> rhs.minQp; cmp != 0 ) + return cmp; + if ( auto cmp = maxQp <=> rhs.maxQp; cmp != 0 ) + return cmp; + if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 ) && ( maxSliceSegmentCount == rhs.maxSliceSegmentCount ) && + ( maxTiles == rhs.maxTiles ) && ( ctbSizes == rhs.ctbSizes ) && ( transformBlockSizes == rhs.transformBlockSizes ) && + ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) && + ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && ( maxSubLayerCount == rhs.maxSubLayerCount ) && + ( expectDyadicTemporalSubLayerPattern == rhs.expectDyadicTemporalSubLayerPattern ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp ) && + ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) && + ( stdSyntaxFlags == rhs.stdSyntaxFlags ); + } + + bool operator!=( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR flags = {}; + StdVideoH265LevelIdc maxLevelIdc = {}; + uint32_t maxSliceSegmentCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxTiles = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR ctbSizes = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes = {}; + uint32_t maxPPictureL0ReferenceCount = {}; + uint32_t maxBPictureL0ReferenceCount = {}; + uint32_t maxL1ReferenceCount = {}; + uint32_t maxSubLayerCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalSubLayerPattern = {}; + int32_t minQp = {}; + int32_t maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR stdSyntaxFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265CapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265DpbSlotInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265DpbSlotInfoKHR.html + struct VideoEncodeH265DpbSlotInfoKHR + { + using NativeType = VkVideoEncodeH265DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR( VideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265DpbSlotInfoKHR( VkVideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265DpbSlotInfoKHR & operator=( VideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265DpbSlotInfoKHR & operator=( VkVideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265DpbSlotInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265DpbSlotInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoEncodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265DpbSlotInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265FrameSizeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265FrameSizeKHR.html + struct VideoEncodeH265FrameSizeKHR + { + using NativeType = VkVideoEncodeH265FrameSizeKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT + : frameISize{ frameISize_ } + , framePSize{ framePSize_ } + , frameBSize{ frameBSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR( VideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265FrameSizeKHR( VkVideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265FrameSizeKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265FrameSizeKHR & operator=( VideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265FrameSizeKHR & operator=( VkVideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT + { + frameISize = frameISize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT + { + framePSize = framePSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT + { + frameBSize = frameBSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265FrameSizeKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265FrameSizeKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265FrameSizeKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( frameISize, framePSize, frameBSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265FrameSizeKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize ); +# endif + } + + bool operator!=( VideoEncodeH265FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t frameISize = {}; + uint32_t framePSize = {}; + uint32_t frameBSize = {}; + }; + + // wrapper struct for struct VkVideoEncodeH265GopRemainingFrameInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265GopRemainingFrameInfoKHR.html + struct VideoEncodeH265GopRemainingFrameInfoKHR + { + using NativeType = VkVideoEncodeH265GopRemainingFrameInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265GopRemainingFrameInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, + uint32_t gopRemainingI_ = {}, + uint32_t gopRemainingP_ = {}, + uint32_t gopRemainingB_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useGopRemainingFrames{ useGopRemainingFrames_ } + , gopRemainingI{ gopRemainingI_ } + , gopRemainingP{ gopRemainingP_ } + , gopRemainingB{ gopRemainingB_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265GopRemainingFrameInfoKHR( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265GopRemainingFrameInfoKHR( VkVideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265GopRemainingFrameInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265GopRemainingFrameInfoKHR & operator=( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265GopRemainingFrameInfoKHR & operator=( VkVideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & + setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT + { + useGopRemainingFrames = useGopRemainingFrames_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingI( uint32_t gopRemainingI_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingI = gopRemainingI_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingP( uint32_t gopRemainingP_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingP = gopRemainingP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingB( uint32_t gopRemainingB_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingB = gopRemainingB_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265GopRemainingFrameInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265GopRemainingFrameInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingI, gopRemainingP, gopRemainingB ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265GopRemainingFrameInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) && + ( gopRemainingI == rhs.gopRemainingI ) && ( gopRemainingP == rhs.gopRemainingP ) && ( gopRemainingB == rhs.gopRemainingB ); +# endif + } + + bool operator!=( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {}; + uint32_t gopRemainingI = {}; + uint32_t gopRemainingP = {}; + uint32_t gopRemainingB = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265GopRemainingFrameInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265NaluSliceSegmentInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265NaluSliceSegmentInfoKHR.html + struct VideoEncodeH265NaluSliceSegmentInfoKHR + { + using NativeType = VkVideoEncodeH265NaluSliceSegmentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR( int32_t constantQp_ = {}, + const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , constantQp{ constantQp_ } + , pStdSliceSegmentHeader{ pStdSliceSegmentHeader_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265NaluSliceSegmentInfoKHR( VkVideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265NaluSliceSegmentInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265NaluSliceSegmentInfoKHR & operator=( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265NaluSliceSegmentInfoKHR & operator=( VkVideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setConstantQp( int32_t constantQp_ ) VULKAN_HPP_NOEXCEPT + { + constantQp = constantQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & + setPStdSliceSegmentHeader( const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ ) VULKAN_HPP_NOEXCEPT + { + pStdSliceSegmentHeader = pStdSliceSegmentHeader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265NaluSliceSegmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265NaluSliceSegmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265NaluSliceSegmentInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265NaluSliceSegmentInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, constantQp, pStdSliceSegmentHeader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265NaluSliceSegmentInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantQp == rhs.constantQp ) && ( pStdSliceSegmentHeader == rhs.pStdSliceSegmentHeader ); +# endif + } + + bool operator!=( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR; + const void * pNext = {}; + int32_t constantQp = {}; + const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265NaluSliceSegmentInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265PictureInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265PictureInfoKHR.html + struct VideoEncodeH265PictureInfoKHR + { + using NativeType = VkVideoEncodeH265PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265PictureInfoKHR( uint32_t naluSliceSegmentEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_ = {}, + const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , naluSliceSegmentEntryCount{ naluSliceSegmentEntryCount_ } + , pNaluSliceSegmentEntries{ pNaluSliceSegmentEntries_ } + , pStdPictureInfo{ pStdPictureInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265PictureInfoKHR( VideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265PictureInfoKHR( VkVideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265PictureInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceSegmentEntries_, + const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , naluSliceSegmentEntryCount( static_cast( naluSliceSegmentEntries_.size() ) ) + , pNaluSliceSegmentEntries( naluSliceSegmentEntries_.data() ) + , pStdPictureInfo( pStdPictureInfo_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH265PictureInfoKHR & operator=( VideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265PictureInfoKHR & operator=( VkVideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setNaluSliceSegmentEntryCount( uint32_t naluSliceSegmentEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceSegmentEntryCount = naluSliceSegmentEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & + setPNaluSliceSegmentEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT + { + pNaluSliceSegmentEntries = pNaluSliceSegmentEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265PictureInfoKHR & setNaluSliceSegmentEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceSegmentEntries_ ) + VULKAN_HPP_NOEXCEPT + { + naluSliceSegmentEntryCount = static_cast( naluSliceSegmentEntries_.size() ); + pNaluSliceSegmentEntries = naluSliceSegmentEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265PictureInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265PictureInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pStdPictureInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount ) && + ( pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries ) && ( pStdPictureInfo == rhs.pStdPictureInfo ); +# endif + } + + bool operator!=( VideoEncodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265PictureInfoKHR; + const void * pNext = {}; + uint32_t naluSliceSegmentEntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries = {}; + const StdVideoEncodeH265PictureInfo * pStdPictureInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265PictureInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265ProfileInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265ProfileInfoKHR.html + struct VideoEncodeH265ProfileInfoKHR + { + using NativeType = VkVideoEncodeH265ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR( VideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265ProfileInfoKHR( VkVideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265ProfileInfoKHR & operator=( VideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265ProfileInfoKHR & operator=( VkVideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265ProfileInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265ProfileInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileInfoKHR; + const void * pNext = {}; + StdVideoH265ProfileIdc stdProfileIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265ProfileInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265QpKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265QpKHR.html + struct VideoEncodeH265QpKHR + { + using NativeType = VkVideoEncodeH265QpKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT + : qpI{ qpI_ } + , qpP{ qpP_ } + , qpB{ qpB_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR( VideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265QpKHR( VkVideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265QpKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265QpKHR & operator=( VideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265QpKHR & operator=( VkVideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT + { + qpI = qpI_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT + { + qpP = qpP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT + { + qpB = qpB_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265QpKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QpKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QpKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QpKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( qpI, qpP, qpB ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265QpKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB ); +# endif + } + + bool operator!=( VideoEncodeH265QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t qpI = {}; + int32_t qpP = {}; + int32_t qpB = {}; + }; + + // wrapper struct for struct VkVideoEncodeH265QualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265QualityLevelPropertiesKHR.html + struct VideoEncodeH265QualityLevelPropertiesKHR + { + using NativeType = VkVideoEncodeH265QualityLevelPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265QualityLevelPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265QualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR preferredRateControlFlags_ = {}, + uint32_t preferredGopFrameCount_ = {}, + uint32_t preferredIdrPeriod_ = {}, + uint32_t preferredConsecutiveBFrameCount_ = {}, + uint32_t preferredSubLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR preferredConstantQp_ = {}, + uint32_t preferredMaxL0ReferenceCount_ = {}, + uint32_t preferredMaxL1ReferenceCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , preferredRateControlFlags{ preferredRateControlFlags_ } + , preferredGopFrameCount{ preferredGopFrameCount_ } + , preferredIdrPeriod{ preferredIdrPeriod_ } + , preferredConsecutiveBFrameCount{ preferredConsecutiveBFrameCount_ } + , preferredSubLayerCount{ preferredSubLayerCount_ } + , preferredConstantQp{ preferredConstantQp_ } + , preferredMaxL0ReferenceCount{ preferredMaxL0ReferenceCount_ } + , preferredMaxL1ReferenceCount{ preferredMaxL1ReferenceCount_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265QualityLevelPropertiesKHR( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265QualityLevelPropertiesKHR( VkVideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265QualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265QualityLevelPropertiesKHR & operator=( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265QualityLevelPropertiesKHR & operator=( VkVideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH265QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + preferredRateControlFlags, + preferredGopFrameCount, + preferredIdrPeriod, + preferredConsecutiveBFrameCount, + preferredSubLayerCount, + preferredConstantQp, + preferredMaxL0ReferenceCount, + preferredMaxL1ReferenceCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265QualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) && + ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredIdrPeriod == rhs.preferredIdrPeriod ) && + ( preferredConsecutiveBFrameCount == rhs.preferredConsecutiveBFrameCount ) && ( preferredSubLayerCount == rhs.preferredSubLayerCount ) && + ( preferredConstantQp == rhs.preferredConstantQp ) && ( preferredMaxL0ReferenceCount == rhs.preferredMaxL0ReferenceCount ) && + ( preferredMaxL1ReferenceCount == rhs.preferredMaxL1ReferenceCount ); +# endif + } + + bool operator!=( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265QualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR preferredRateControlFlags = {}; + uint32_t preferredGopFrameCount = {}; + uint32_t preferredIdrPeriod = {}; + uint32_t preferredConsecutiveBFrameCount = {}; + uint32_t preferredSubLayerCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR preferredConstantQp = {}; + uint32_t preferredMaxL0ReferenceCount = {}; + uint32_t preferredMaxL1ReferenceCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265QualityLevelPropertiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265QuantizationMapCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265QuantizationMapCapabilitiesKHR.html + struct VideoEncodeH265QuantizationMapCapabilitiesKHR + { + using NativeType = VkVideoEncodeH265QuantizationMapCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH265QuantizationMapCapabilitiesKHR( int32_t minQpDelta_ = {}, int32_t maxQpDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minQpDelta{ minQpDelta_ } + , maxQpDelta{ maxQpDelta_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265QuantizationMapCapabilitiesKHR( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265QuantizationMapCapabilitiesKHR( VkVideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265QuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265QuantizationMapCapabilitiesKHR & operator=( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minQpDelta, maxQpDelta ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265QuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQpDelta == rhs.minQpDelta ) && ( maxQpDelta == rhs.maxQpDelta ); +# endif + } + + bool operator!=( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR; + void * pNext = {}; + int32_t minQpDelta = {}; + int32_t maxQpDelta = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265QuantizationMapCapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265RateControlInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265RateControlInfoKHR.html + struct VideoEncodeH265RateControlInfoKHR + { + using NativeType = VkVideoEncodeH265RateControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags_ = {}, + uint32_t gopFrameCount_ = {}, + uint32_t idrPeriod_ = {}, + uint32_t consecutiveBFrameCount_ = {}, + uint32_t subLayerCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , gopFrameCount{ gopFrameCount_ } + , idrPeriod{ idrPeriod_ } + , consecutiveBFrameCount{ consecutiveBFrameCount_ } + , subLayerCount{ subLayerCount_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoKHR( VideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265RateControlInfoKHR( VkVideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265RateControlInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265RateControlInfoKHR & operator=( VideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265RateControlInfoKHR & operator=( VkVideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + gopFrameCount = gopFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT + { + idrPeriod = idrPeriod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + consecutiveBFrameCount = consecutiveBFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setSubLayerCount( uint32_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + subLayerCount = subLayerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265RateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265RateControlInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, gopFrameCount, idrPeriod, consecutiveBFrameCount, subLayerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265RateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( subLayerCount == rhs.subLayerCount ); +# endif + } + + bool operator!=( VideoEncodeH265RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags = {}; + uint32_t gopFrameCount = {}; + uint32_t idrPeriod = {}; + uint32_t consecutiveBFrameCount = {}; + uint32_t subLayerCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265RateControlInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265RateControlLayerInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265RateControlLayerInfoKHR.html + struct VideoEncodeH265RateControlLayerInfoKHR + { + using NativeType = VkVideoEncodeH265RateControlLayerInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlLayerInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR minQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR maxFrameSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMinQp{ useMinQp_ } + , minQp{ minQp_ } + , useMaxQp{ useMaxQp_ } + , maxQp{ maxQp_ } + , useMaxFrameSize{ useMaxFrameSize_ } + , maxFrameSize{ maxFrameSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoKHR( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265RateControlLayerInfoKHR( VkVideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265RateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265RateControlLayerInfoKHR & operator=( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265RateControlLayerInfoKHR & operator=( VkVideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT + { + useMinQp = useMinQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & minQp_ ) VULKAN_HPP_NOEXCEPT + { + minQp = minQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT + { + useMaxQp = useMaxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & maxQp_ ) VULKAN_HPP_NOEXCEPT + { + maxQp = maxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + useMaxFrameSize = useMaxFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameSize = maxFrameSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265RateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265RateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265RateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) && + ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize ); +# endif + } + + bool operator!=( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR minQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR maxFrameSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265RateControlLayerInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265SessionCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionCreateInfoKHR.html + struct VideoEncodeH265SessionCreateInfoKHR + { + using NativeType = VkVideoEncodeH265SessionCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ = {}, + StdVideoH265LevelIdc maxLevelIdc_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMaxLevelIdc{ useMaxLevelIdc_ } + , maxLevelIdc{ maxLevelIdc_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR( VideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionCreateInfoKHR( VkVideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265SessionCreateInfoKHR & operator=( VideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265SessionCreateInfoKHR & operator=( VkVideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setUseMaxLevelIdc( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ ) VULKAN_HPP_NOEXCEPT + { + useMaxLevelIdc = useMaxLevelIdc_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setMaxLevelIdc( StdVideoH265LevelIdc maxLevelIdc_ ) VULKAN_HPP_NOEXCEPT + { + maxLevelIdc = maxLevelIdc_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMaxLevelIdc, maxLevelIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = useMaxLevelIdc <=> rhs.useMaxLevelIdc; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevelIdc == rhs.useMaxLevelIdc ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc = {}; + StdVideoH265LevelIdc maxLevelIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265SessionParametersAddInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersAddInfoKHR.html + struct VideoEncodeH265SessionParametersAddInfoKHR + { + using NativeType = VkVideoEncodeH265SessionParametersAddInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersAddInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoKHR( uint32_t stdVPSCount_ = {}, + const StdVideoH265VideoParameterSet * pStdVPSs_ = {}, + uint32_t stdSPSCount_ = {}, + const StdVideoH265SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdVPSCount{ stdVPSCount_ } + , pStdVPSs{ pStdVPSs_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoKHR( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersAddInfoKHR( VkVideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stdVPSCount( static_cast( stdVPSs_.size() ) ) + , pStdVPSs( stdVPSs_.data() ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH265SessionParametersAddInfoKHR & operator=( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersAddInfoKHR & operator=( VkVideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSCount = stdVPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdVPSs = pStdVPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR & + setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSCount = static_cast( stdVPSs_.size() ); + pStdVPSs = stdVPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = stdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPSs = pStdSPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = stdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdPPSs = pStdPPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersAddInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersAddInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdVPSCount == rhs.stdVPSCount ) && ( pStdVPSs == rhs.pStdVPSs ) && + ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdVPSCount = {}; + const StdVideoH265VideoParameterSet * pStdVPSs = {}; + uint32_t stdSPSCount = {}; + const StdVideoH265SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH265PictureParameterSet * pStdPPSs = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersAddInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265SessionParametersCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersCreateInfoKHR.html + struct VideoEncodeH265SessionParametersCreateInfoKHR + { + using NativeType = VkVideoEncodeH265SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH265SessionParametersCreateInfoKHR( uint32_t maxStdVPSCount_ = {}, + uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxStdVPSCount{ maxStdVPSCount_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265SessionParametersCreateInfoKHR( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersCreateInfoKHR( VkVideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265SessionParametersCreateInfoKHR & operator=( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersCreateInfoKHR & operator=( VkVideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdVPSCount = maxStdVPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdSPSCount = maxStdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdPPSCount = maxStdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdVPSCount == rhs.maxStdVPSCount ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && + ( maxStdPPSCount == rhs.maxStdPPSCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdVPSCount = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265SessionParametersFeedbackInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersFeedbackInfoKHR.html + struct VideoEncodeH265SessionParametersFeedbackInfoKHR + { + using NativeType = VkVideoEncodeH265SessionParametersFeedbackInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasStdVPSOverrides_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hasStdVPSOverrides{ hasStdVPSOverrides_ } + , hasStdSPSOverrides{ hasStdSPSOverrides_ } + , hasStdPPSOverrides{ hasStdPPSOverrides_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265SessionParametersFeedbackInfoKHR( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersFeedbackInfoKHR( VkVideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersFeedbackInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265SessionParametersFeedbackInfoKHR & operator=( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasStdVPSOverrides, hasStdSPSOverrides, hasStdPPSOverrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersFeedbackInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasStdVPSOverrides == rhs.hasStdVPSOverrides ) && + ( hasStdSPSOverrides == rhs.hasStdSPSOverrides ) && ( hasStdPPSOverrides == rhs.hasStdPPSOverrides ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdVPSOverrides = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersFeedbackInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeH265SessionParametersGetInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeH265SessionParametersGetInfoKHR.html + struct VideoEncodeH265SessionParametersGetInfoKHR + { + using NativeType = VkVideoEncodeH265SessionParametersGetInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersGetInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersGetInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ = {}, + uint32_t stdVPSId_ = {}, + uint32_t stdSPSId_ = {}, + uint32_t stdPPSId_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , writeStdVPS{ writeStdVPS_ } + , writeStdSPS{ writeStdSPS_ } + , writeStdPPS{ writeStdPPS_ } + , stdVPSId{ stdVPSId_ } + , stdSPSId{ stdSPSId_ } + , stdPPSId{ stdPPSId_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersGetInfoKHR( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersGetInfoKHR( VkVideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersGetInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265SessionParametersGetInfoKHR & operator=( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersGetInfoKHR & operator=( VkVideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdVPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS_ ) VULKAN_HPP_NOEXCEPT + { + writeStdVPS = writeStdVPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdSPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ ) VULKAN_HPP_NOEXCEPT + { + writeStdSPS = writeStdSPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdPPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ ) VULKAN_HPP_NOEXCEPT + { + writeStdPPS = writeStdPPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdVPSId( uint32_t stdVPSId_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSId = stdVPSId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdSPSId( uint32_t stdSPSId_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSId = stdSPSId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdPPSId( uint32_t stdPPSId_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSId = stdPPSId_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265SessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersGetInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersGetInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, writeStdVPS, writeStdSPS, writeStdPPS, stdVPSId, stdSPSId, stdPPSId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersGetInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( writeStdVPS == rhs.writeStdVPS ) && ( writeStdSPS == rhs.writeStdSPS ) && + ( writeStdPPS == rhs.writeStdPPS ) && ( stdVPSId == rhs.stdVPSId ) && ( stdSPSId == rhs.stdSPSId ) && ( stdPPSId == rhs.stdPPSId ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersGetInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS = {}; + uint32_t stdVPSId = {}; + uint32_t stdSPSId = {}; + uint32_t stdPPSId = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersGetInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeInfoKHR.html + struct VideoEncodeInfoKHR + { + using NativeType = VkVideoEncodeInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, + uint32_t precedingExternallyEncodedBytes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , dstBuffer{ dstBuffer_ } + , dstBufferOffset{ dstBufferOffset_ } + , dstBufferRange{ dstBufferRange_ } + , srcPictureResource{ srcPictureResource_ } + , pSetupReferenceSlot{ pSetupReferenceSlot_ } + , referenceSlotCount{ referenceSlotCount_ } + , pReferenceSlots{ pReferenceSlots_ } + , precedingExternallyEncodedBytes{ precedingExternallyEncodedBytes_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoEncodeInfoKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, + uint32_t precedingExternallyEncodedBytes_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , dstBuffer( dstBuffer_ ) + , dstBufferOffset( dstBufferOffset_ ) + , dstBufferRange( dstBufferRange_ ) + , srcPictureResource( srcPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + , precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeInfoKHR & operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstBufferOffset = dstBufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_ ) VULKAN_HPP_NOEXCEPT + { + dstBufferRange = dstBufferRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setSrcPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + srcPictureResource = srcPictureResource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + { + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT + { + precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + dstBuffer, + dstBufferOffset, + dstBufferRange, + srcPictureResource, + pSetupReferenceSlot, + referenceSlotCount, + pReferenceSlots, + precedingExternallyEncodedBytes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstBuffer == rhs.dstBuffer ) && + ( dstBufferOffset == rhs.dstBufferOffset ) && ( dstBufferRange == rhs.dstBufferRange ) && ( srcPictureResource == rhs.srcPictureResource ) && + ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ) && ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes ); +# endif + } + + bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; + uint32_t precedingExternallyEncodedBytes = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeQualityLevelInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQualityLevelInfoKHR.html + struct VideoEncodeQualityLevelInfoKHR + { + using NativeType = VkVideoEncodeQualityLevelInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQualityLevelInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR( uint32_t qualityLevel_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , qualityLevel{ qualityLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR( VideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQualityLevelInfoKHR( VkVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQualityLevelInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeQualityLevelInfoKHR & operator=( VideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeQualityLevelInfoKHR & operator=( VkVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQualityLevelInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQualityLevelInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevel = qualityLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeQualityLevelInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQualityLevelInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQualityLevelInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeQualityLevelInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, qualityLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQualityLevelInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( qualityLevel == rhs.qualityLevel ); +# endif + } + + bool operator!=( VideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQualityLevelInfoKHR; + const void * pNext = {}; + uint32_t qualityLevel = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQualityLevelInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQualityLevelPropertiesKHR.html + struct VideoEncodeQualityLevelPropertiesKHR + { + using NativeType = VkVideoEncodeQualityLevelPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQualityLevelPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode_ = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault, + uint32_t preferredRateControlLayerCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , preferredRateControlMode{ preferredRateControlMode_ } + , preferredRateControlLayerCount{ preferredRateControlLayerCount_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelPropertiesKHR( VideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQualityLevelPropertiesKHR( VkVideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeQualityLevelPropertiesKHR & operator=( VideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeQualityLevelPropertiesKHR & operator=( VkVideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeQualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQualityLevelPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeQualityLevelPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, preferredRateControlMode, preferredRateControlLayerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeQualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlMode == rhs.preferredRateControlMode ) && + ( preferredRateControlLayerCount == rhs.preferredRateControlLayerCount ); +# endif + } + + bool operator!=( VideoEncodeQualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault; + uint32_t preferredRateControlLayerCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQualityLevelPropertiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeQuantizationMapCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQuantizationMapCapabilitiesKHR.html + struct VideoEncodeQuantizationMapCapabilitiesKHR + { + using NativeType = VkVideoEncodeQuantizationMapCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Extent2D maxQuantizationMapExtent_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxQuantizationMapExtent{ maxQuantizationMapExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapCapabilitiesKHR( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQuantizationMapCapabilitiesKHR( VkVideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeQuantizationMapCapabilitiesKHR & operator=( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeQuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeQuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxQuantizationMapExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxQuantizationMapExtent == rhs.maxQuantizationMapExtent ); +# endif + } + + bool operator!=( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxQuantizationMapExtent = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQuantizationMapCapabilitiesKHR; + }; + + // wrapper struct for struct VkVideoEncodeQuantizationMapInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQuantizationMapInfoKHR.html + struct VideoEncodeQuantizationMapInfoKHR + { + using NativeType = VkVideoEncodeQuantizationMapInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapInfoKHR( VULKAN_HPP_NAMESPACE::ImageView quantizationMap_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , quantizationMap{ quantizationMap_ } + , quantizationMapExtent{ quantizationMapExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapInfoKHR( VideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQuantizationMapInfoKHR( VkVideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQuantizationMapInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeQuantizationMapInfoKHR & operator=( VideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeQuantizationMapInfoKHR & operator=( VkVideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setQuantizationMap( VULKAN_HPP_NAMESPACE::ImageView quantizationMap_ ) VULKAN_HPP_NOEXCEPT + { + quantizationMap = quantizationMap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & + setQuantizationMapExtent( VULKAN_HPP_NAMESPACE::Extent2D const & quantizationMapExtent_ ) VULKAN_HPP_NOEXCEPT + { + quantizationMapExtent = quantizationMapExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeQuantizationMapInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, quantizationMap, quantizationMapExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQuantizationMapInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeQuantizationMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMap == rhs.quantizationMap ) && + ( quantizationMapExtent == rhs.quantizationMapExtent ); +# endif + } + + bool operator!=( VideoEncodeQuantizationMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView quantizationMap = {}; + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapExtent = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQuantizationMapInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR.html + struct VideoEncodeQuantizationMapSessionParametersCreateInfoKHR + { + using NativeType = VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , quantizationMapTexelSize{ quantizationMapTexelSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & + operator=( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & + operator=( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & + setQuantizationMapTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & quantizationMapTexelSize_ ) VULKAN_HPP_NOEXCEPT + { + quantizationMapTexelSize = quantizationMapTexelSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, quantizationMapTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMapTexelSize == rhs.quantizationMapTexelSize ); +# endif + } + + bool operator!=( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeRateControlLayerInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeRateControlLayerInfoKHR.html + struct VideoEncodeRateControlLayerInfoKHR + { + using NativeType = VkVideoEncodeRateControlLayerInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlLayerInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( uint64_t averageBitrate_ = {}, + uint64_t maxBitrate_ = {}, + uint32_t frameRateNumerator_ = {}, + uint32_t frameRateDenominator_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , averageBitrate{ averageBitrate_ } + , maxBitrate{ maxBitrate_ } + , frameRateNumerator{ frameRateNumerator_ } + , frameRateDenominator{ frameRateDenominator_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlLayerInfoKHR( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeRateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeRateControlLayerInfoKHR & operator=( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeRateControlLayerInfoKHR & operator=( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setAverageBitrate( uint64_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT + { + averageBitrate = averageBitrate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setMaxBitrate( uint64_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT + { + maxBitrate = maxBitrate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateNumerator( uint32_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT + { + frameRateNumerator = frameRateNumerator_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateDenominator( uint32_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT + { + frameRateDenominator = frameRateDenominator_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeRateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeRateControlLayerInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeRateControlLayerInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, averageBitrate, maxBitrate, frameRateNumerator, frameRateDenominator ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeRateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( averageBitrate == rhs.averageBitrate ) && ( maxBitrate == rhs.maxBitrate ) && + ( frameRateNumerator == rhs.frameRateNumerator ) && ( frameRateDenominator == rhs.frameRateDenominator ); +# endif + } + + bool operator!=( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR; + const void * pNext = {}; + uint64_t averageBitrate = {}; + uint64_t maxBitrate = {}; + uint32_t frameRateNumerator = {}; + uint32_t frameRateDenominator = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeRateControlLayerInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeRateControlInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeRateControlInfoKHR.html + struct VideoEncodeRateControlInfoKHR + { + using NativeType = VkVideoEncodeRateControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault, + uint32_t layerCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers_ = {}, + uint32_t virtualBufferSizeInMs_ = {}, + uint32_t initialVirtualBufferSizeInMs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , rateControlMode{ rateControlMode_ } + , layerCount{ layerCount_ } + , pLayers{ pLayers_ } + , virtualBufferSizeInMs{ virtualBufferSizeInMs_ } + , initialVirtualBufferSizeInMs{ initialVirtualBufferSizeInMs_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeRateControlInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeRateControlInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layers_, + uint32_t virtualBufferSizeInMs_ = {}, + uint32_t initialVirtualBufferSizeInMs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , rateControlMode( rateControlMode_ ) + , layerCount( static_cast( layers_.size() ) ) + , pLayers( layers_.data() ) + , virtualBufferSizeInMs( virtualBufferSizeInMs_ ) + , initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeRateControlInfoKHR & operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & + setRateControlMode( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT + { + rateControlMode = rateControlMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & + setPLayers( const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers_ ) VULKAN_HPP_NOEXCEPT + { + pLayers = pLayers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeRateControlInfoKHR & setLayers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layers_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = static_cast( layers_.size() ); + pLayers = layers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT + { + virtualBufferSizeInMs = virtualBufferSizeInMs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT + { + initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeRateControlInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeRateControlInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, rateControlMode, layerCount, pLayers, virtualBufferSizeInMs, initialVirtualBufferSizeInMs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlMode == rhs.rateControlMode ) && + ( layerCount == rhs.layerCount ) && ( pLayers == rhs.pLayers ) && ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs ) && + ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs ); +# endif + } + + bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault; + uint32_t layerCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers = {}; + uint32_t virtualBufferSizeInMs = {}; + uint32_t initialVirtualBufferSizeInMs = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeRateControlInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeSessionParametersFeedbackInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeSessionParametersFeedbackInfoKHR.html + struct VideoEncodeSessionParametersFeedbackInfoKHR + { + using NativeType = VkVideoEncodeSessionParametersFeedbackInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasOverrides_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hasOverrides{ hasOverrides_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeSessionParametersFeedbackInfoKHR( VkVideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeSessionParametersFeedbackInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeSessionParametersFeedbackInfoKHR & operator=( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeSessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeSessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeSessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeSessionParametersFeedbackInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeSessionParametersFeedbackInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasOverrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeSessionParametersFeedbackInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasOverrides == rhs.hasOverrides ); +# endif + } + + bool operator!=( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasOverrides = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeSessionParametersFeedbackInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeSessionParametersGetInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeSessionParametersGetInfoKHR.html + struct VideoEncodeSessionParametersGetInfoKHR + { + using NativeType = VkVideoEncodeSessionParametersGetInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeSessionParametersGetInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoSessionParameters{ videoSessionParameters_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR( VideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeSessionParametersGetInfoKHR( VkVideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeSessionParametersGetInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeSessionParametersGetInfoKHR & operator=( VideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeSessionParametersGetInfoKHR & operator=( VkVideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionParametersGetInfoKHR & + setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParameters = videoSessionParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeSessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeSessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeSessionParametersGetInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeSessionParametersGetInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoSessionParameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeSessionParametersGetInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeSessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoSessionParameters == rhs.videoSessionParameters ); +# endif + } + + bool operator!=( VideoEncodeSessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeSessionParametersGetInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeSessionParametersGetInfoKHR; + }; + + // wrapper struct for struct VkVideoEncodeUsageInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEncodeUsageInfoKHR.html + struct VideoEncodeUsageInfoKHR + { + using NativeType = VkVideoEncodeUsageInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeUsageInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeUsageInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoUsageHints{ videoUsageHints_ } + , videoContentHints{ videoContentHints_ } + , tuningMode{ tuningMode_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeUsageInfoKHR( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeUsageInfoKHR( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeUsageInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeUsageInfoKHR & operator=( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeUsageInfoKHR & operator=( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT + { + videoUsageHints = videoUsageHints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & + setVideoContentHints( VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ ) VULKAN_HPP_NOEXCEPT + { + videoContentHints = videoContentHints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setTuningMode( VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ ) VULKAN_HPP_NOEXCEPT + { + tuningMode = tuningMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeUsageInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEncodeUsageInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoUsageHints, videoContentHints, tuningMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeUsageInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints ) && ( videoContentHints == rhs.videoContentHints ) && + ( tuningMode == rhs.tuningMode ); +# endif + } + + bool operator!=( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeUsageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault; + }; + + template <> + struct CppType + { + using Type = VideoEncodeUsageInfoKHR; + }; + + // wrapper struct for struct VkVideoEndCodingInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoEndCodingInfoKHR.html + struct VideoEndCodingInfoKHR + { + using NativeType = VkVideoEndCodingInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEndCodingInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEndCodingInfoKHR & operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEndCodingInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoEndCodingInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEndCodingInfoKHR const & ) const = default; +#else + bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEndCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEndCodingInfoKHR; + }; + + // wrapper struct for struct VkVideoFormatAV1QuantizationMapPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatAV1QuantizationMapPropertiesKHR.html + struct VideoFormatAV1QuantizationMapPropertiesKHR + { + using NativeType = VkVideoFormatAV1QuantizationMapPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatAV1QuantizationMapPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , compatibleSuperblockSizes{ compatibleSuperblockSizes_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoFormatAV1QuantizationMapPropertiesKHR( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatAV1QuantizationMapPropertiesKHR( VkVideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatAV1QuantizationMapPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoFormatAV1QuantizationMapPropertiesKHR & operator=( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoFormatAV1QuantizationMapPropertiesKHR & operator=( VkVideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatAV1QuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatAV1QuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatAV1QuantizationMapPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoFormatAV1QuantizationMapPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compatibleSuperblockSizes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatAV1QuantizationMapPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compatibleSuperblockSizes == rhs.compatibleSuperblockSizes ); +# endif + } + + bool operator!=( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatAV1QuantizationMapPropertiesKHR; + }; + + // wrapper struct for struct VkVideoFormatH265QuantizationMapPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatH265QuantizationMapPropertiesKHR.html + struct VideoFormatH265QuantizationMapPropertiesKHR + { + using NativeType = VkVideoFormatH265QuantizationMapPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatH265QuantizationMapPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatH265QuantizationMapPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , compatibleCtbSizes{ compatibleCtbSizes_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoFormatH265QuantizationMapPropertiesKHR( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatH265QuantizationMapPropertiesKHR( VkVideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatH265QuantizationMapPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoFormatH265QuantizationMapPropertiesKHR & operator=( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoFormatH265QuantizationMapPropertiesKHR & operator=( VkVideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatH265QuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatH265QuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatH265QuantizationMapPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoFormatH265QuantizationMapPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compatibleCtbSizes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatH265QuantizationMapPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compatibleCtbSizes == rhs.compatibleCtbSizes ); +# endif + } + + bool operator!=( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatH265QuantizationMapPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatH265QuantizationMapPropertiesKHR; + }; + + // wrapper struct for struct VkVideoFormatPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatPropertiesKHR.html + struct VideoFormatPropertiesKHR + { + using NativeType = VkVideoFormatPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping_ = {}, + VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags_ = {}, + VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::ImageTiling imageTiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , componentMapping{ componentMapping_ } + , imageCreateFlags{ imageCreateFlags_ } + , imageType{ imageType_ } + , imageTiling{ imageTiling_ } + , imageUsageFlags{ imageUsageFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoFormatPropertiesKHR & operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoFormatPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, componentMapping, imageCreateFlags, imageType, imageTiling, imageUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( componentMapping == rhs.componentMapping ) && + ( imageCreateFlags == rhs.imageCreateFlags ) && ( imageType == rhs.imageType ) && ( imageTiling == rhs.imageTiling ) && + ( imageUsageFlags == rhs.imageUsageFlags ); +# endif + } + + bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags = {}; + VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling imageTiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatPropertiesKHR; + }; + + // wrapper struct for struct VkVideoFormatQuantizationMapPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoFormatQuantizationMapPropertiesKHR.html + struct VideoFormatQuantizationMapPropertiesKHR + { + using NativeType = VkVideoFormatQuantizationMapPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatQuantizationMapPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatQuantizationMapPropertiesKHR( VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , quantizationMapTexelSize{ quantizationMapTexelSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoFormatQuantizationMapPropertiesKHR( VideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatQuantizationMapPropertiesKHR( VkVideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatQuantizationMapPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoFormatQuantizationMapPropertiesKHR & operator=( VideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoFormatQuantizationMapPropertiesKHR & operator=( VkVideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatQuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatQuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatQuantizationMapPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoFormatQuantizationMapPropertiesKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, quantizationMapTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatQuantizationMapPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatQuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMapTexelSize == rhs.quantizationMapTexelSize ); +# endif + } + + bool operator!=( VideoFormatQuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatQuantizationMapPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatQuantizationMapPropertiesKHR; + }; + + // wrapper struct for struct VkVideoInlineQueryInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoInlineQueryInfoKHR.html + struct VideoInlineQueryInfoKHR + { + using NativeType = VkVideoInlineQueryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoInlineQueryInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoInlineQueryInfoKHR( VULKAN_HPP_NAMESPACE::QueryPool queryPool_ = {}, + uint32_t firstQuery_ = {}, + uint32_t queryCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queryPool{ queryPool_ } + , firstQuery{ firstQuery_ } + , queryCount{ queryCount_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoInlineQueryInfoKHR( VideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoInlineQueryInfoKHR( VkVideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoInlineQueryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoInlineQueryInfoKHR & operator=( VideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoInlineQueryInfoKHR & operator=( VkVideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool_ ) VULKAN_HPP_NOEXCEPT + { + queryPool = queryPool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setFirstQuery( uint32_t firstQuery_ ) VULKAN_HPP_NOEXCEPT + { + firstQuery = firstQuery_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT + { + queryCount = queryCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoInlineQueryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoInlineQueryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoInlineQueryInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoInlineQueryInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queryPool, firstQuery, queryCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoInlineQueryInfoKHR const & ) const = default; +#else + bool operator==( VideoInlineQueryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queryPool == rhs.queryPool ) && ( firstQuery == rhs.firstQuery ) && + ( queryCount == rhs.queryCount ); +# endif + } + + bool operator!=( VideoInlineQueryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoInlineQueryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPool queryPool = {}; + uint32_t firstQuery = {}; + uint32_t queryCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoInlineQueryInfoKHR; + }; + + // wrapper struct for struct VkVideoProfileListInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoProfileListInfoKHR.html + struct VideoProfileListInfoKHR + { + using NativeType = VkVideoProfileListInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileListInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( uint32_t profileCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , profileCount{ profileCount_ } + , pProfiles{ pProfiles_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfileListInfoKHR( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoProfileListInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoProfileListInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & profiles_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), profileCount( static_cast( profiles_.size() ) ), pProfiles( profiles_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoProfileListInfoKHR & operator=( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoProfileListInfoKHR & operator=( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT + { + profileCount = profileCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT + { + pProfiles = pProfiles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoProfileListInfoKHR & + setProfiles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & profiles_ ) VULKAN_HPP_NOEXCEPT + { + profileCount = static_cast( profiles_.size() ); + pProfiles = profiles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoProfileListInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoProfileListInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoProfileListInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoProfileListInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, profileCount, pProfiles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoProfileListInfoKHR const & ) const = default; +#else + bool operator==( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( profileCount == rhs.profileCount ) && ( pProfiles == rhs.pProfiles ); +# endif + } + + bool operator!=( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileListInfoKHR; + const void * pNext = {}; + uint32_t profileCount = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles = {}; + }; + + template <> + struct CppType + { + using Type = VideoProfileListInfoKHR; + }; + + // wrapper struct for struct VkVideoSessionCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionCreateInfoKHR.html + struct VideoSessionCreateInfoKHR + { + using NativeType = VkVideoSessionCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {}, + VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t maxDpbSlots_ = {}, + uint32_t maxActiveReferencePictures_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , flags{ flags_ } + , pVideoProfile{ pVideoProfile_ } + , pictureFormat{ pictureFormat_ } + , maxCodedExtent{ maxCodedExtent_ } + , referencePictureFormat{ referencePictureFormat_ } + , maxDpbSlots{ maxDpbSlots_ } + , maxActiveReferencePictures{ maxActiveReferencePictures_ } + , pStdHeaderVersion{ pStdHeaderVersion_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoSessionCreateInfoKHR & operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT + { + pVideoProfile = pVideoProfile_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT + { + pictureFormat = pictureFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT + { + maxCodedExtent = maxCodedExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setReferencePictureFormat( VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ ) VULKAN_HPP_NOEXCEPT + { + referencePictureFormat = referencePictureFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxDpbSlots( uint32_t maxDpbSlots_ ) VULKAN_HPP_NOEXCEPT + { + maxDpbSlots = maxDpbSlots_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxActiveReferencePictures( uint32_t maxActiveReferencePictures_ ) VULKAN_HPP_NOEXCEPT + { + maxActiveReferencePictures = maxActiveReferencePictures_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + setPStdHeaderVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdHeaderVersion = pStdHeaderVersion_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoSessionCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + queueFamilyIndex, + flags, + pVideoProfile, + pictureFormat, + maxCodedExtent, + referencePictureFormat, + maxDpbSlots, + maxActiveReferencePictures, + pStdHeaderVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( flags == rhs.flags ) && + ( pVideoProfile == rhs.pVideoProfile ) && ( pictureFormat == rhs.pictureFormat ) && ( maxCodedExtent == rhs.maxCodedExtent ) && + ( referencePictureFormat == rhs.referencePictureFormat ) && ( maxDpbSlots == rhs.maxDpbSlots ) && + ( maxActiveReferencePictures == rhs.maxActiveReferencePictures ) && ( pStdHeaderVersion == rhs.pStdHeaderVersion ); +# endif + } + + bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {}; + VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; + VULKAN_HPP_NAMESPACE::Format referencePictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t maxDpbSlots = {}; + uint32_t maxActiveReferencePictures = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion = {}; + }; + + template <> + struct CppType + { + using Type = VideoSessionCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionMemoryRequirementsKHR.html + struct VideoSessionMemoryRequirementsKHR + { + using NativeType = VkVideoSessionMemoryRequirementsKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionMemoryRequirementsKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( uint32_t memoryBindIndex_ = {}, + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryBindIndex{ memoryBindIndex_ } + , memoryRequirements{ memoryRequirements_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionMemoryRequirementsKHR( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionMemoryRequirementsKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoSessionMemoryRequirementsKHR & operator=( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoSessionMemoryRequirementsKHR & operator=( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoSessionMemoryRequirementsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionMemoryRequirementsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionMemoryRequirementsKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoSessionMemoryRequirementsKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryBindIndex, memoryRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionMemoryRequirementsKHR const & ) const = default; +#else + bool operator==( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memoryRequirements == rhs.memoryRequirements ); +# endif + } + + bool operator!=( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionMemoryRequirementsKHR; + void * pNext = {}; + uint32_t memoryBindIndex = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + }; + + template <> + struct CppType + { + using Type = VideoSessionMemoryRequirementsKHR; + }; + + // wrapper struct for struct VkVideoSessionParametersCreateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionParametersCreateInfoKHR.html + struct VideoSessionParametersCreateInfoKHR + { + using NativeType = VkVideoSessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , videoSessionParametersTemplate{ videoSessionParametersTemplate_ } + , videoSession{ videoSession_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoSessionParametersCreateInfoKHR & operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoSessionParametersCreateInfoKHR & operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & + setVideoSessionParametersTemplate( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParametersTemplate = videoSessionParametersTemplate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT + { + videoSession = videoSession_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionParametersCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoSessionParametersCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, videoSessionParametersTemplate, videoSession ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate ) && ( videoSession == rhs.videoSession ); +# endif + } + + bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + }; + + template <> + struct CppType + { + using Type = VideoSessionParametersCreateInfoKHR; + }; + + // wrapper struct for struct VkVideoSessionParametersUpdateInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoSessionParametersUpdateInfoKHR.html + struct VideoSessionParametersUpdateInfoKHR + { + using NativeType = VkVideoSessionParametersUpdateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersUpdateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , updateSequenceCount{ updateSequenceCount_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionParametersUpdateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoSessionParametersUpdateInfoKHR & operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoSessionParametersUpdateInfoKHR & operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT + { + updateSequenceCount = updateSequenceCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionParametersUpdateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkVideoSessionParametersUpdateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, updateSequenceCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default; +#else + bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( updateSequenceCount == rhs.updateSequenceCount ); +# endif + } + + bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersUpdateInfoKHR; + const void * pNext = {}; + uint32_t updateSequenceCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoSessionParametersUpdateInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + // wrapper struct for struct VkWaylandSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWaylandSurfaceCreateInfoKHR.html + struct WaylandSurfaceCreateInfoKHR + { + using NativeType = VkWaylandSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, + struct wl_display * display_ = {}, + struct wl_surface * surface_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , display{ display_ } + , surface{ surface_ } + { + } + + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : WaylandSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT + { + display = display_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWaylandSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWaylandSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, display, surface ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( display == rhs.display ) && ( surface == rhs.surface ); +# endif + } + + bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; + struct wl_display * display = {}; + struct wl_surface * surface = {}; + }; + + template <> + struct CppType + { + using Type = WaylandSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkWin32KeyedMutexAcquireReleaseInfoKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWin32KeyedMutexAcquireReleaseInfoKHR.html + struct Win32KeyedMutexAcquireReleaseInfoKHR + { + using NativeType = VkWin32KeyedMutexAcquireReleaseInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeouts_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , acquireCount{ acquireCount_ } + , pAcquireSyncs{ pAcquireSyncs_ } + , pAcquireKeys{ pAcquireKeys_ } + , pAcquireTimeouts{ pAcquireTimeouts_ } + , releaseCount{ releaseCount_ } + , pReleaseSyncs{ pReleaseSyncs_ } + , pReleaseKeys{ pReleaseKeys_ } + { + } + + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , acquireCount( static_cast( acquireSyncs_.size() ) ) + , pAcquireSyncs( acquireSyncs_.data() ) + , pAcquireKeys( acquireKeys_.data() ) + , pAcquireTimeouts( acquireTimeouts_.data() ) + , releaseCount( static_cast( releaseSyncs_.size() ) ) + , pReleaseSyncs( releaseSyncs_.data() ) + , pReleaseKeys( releaseKeys_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() ); +# else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeouts_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeouts_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +# else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = acquireCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & + setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & + setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireTimeouts = pAcquireTimeouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & + setAcquireTimeouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeouts_.size() ); + pAcquireTimeouts = acquireTimeouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = releaseCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & + setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & + setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeouts, releaseCount, pReleaseSyncs, pReleaseKeys ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && ( pAcquireSyncs == rhs.pAcquireSyncs ) && + ( pAcquireKeys == rhs.pAcquireKeys ) && ( pAcquireTimeouts == rhs.pAcquireTimeouts ) && ( releaseCount == rhs.releaseCount ) && + ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys ); +# endif + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeouts = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; + }; + + template <> + struct CppType + { + using Type = Win32KeyedMutexAcquireReleaseInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkWin32KeyedMutexAcquireReleaseInfoNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWin32KeyedMutexAcquireReleaseInfoNV.html + struct Win32KeyedMutexAcquireReleaseInfoNV + { + using NativeType = VkWin32KeyedMutexAcquireReleaseInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeoutMilliseconds_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , acquireCount{ acquireCount_ } + , pAcquireSyncs{ pAcquireSyncs_ } + , pAcquireKeys{ pAcquireKeys_ } + , pAcquireTimeoutMilliseconds{ pAcquireTimeoutMilliseconds_ } + , releaseCount{ releaseCount_ } + , pReleaseSyncs{ pReleaseSyncs_ } + , pReleaseKeys{ pReleaseKeys_ } + { + } + + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , acquireCount( static_cast( acquireSyncs_.size() ) ) + , pAcquireSyncs( acquireSyncs_.data() ) + , pAcquireKeys( acquireKeys_.data() ) + , pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ) + , releaseCount( static_cast( releaseSyncs_.size() ) ) + , pReleaseSyncs( releaseSyncs_.data() ) + , pReleaseKeys( releaseKeys_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() ); +# else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +# else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = acquireCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & + setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & + setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & + setAcquireTimeoutMilliseconds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeoutMilliseconds_.size() ); + pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = releaseCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & + setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & + setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeoutMilliseconds, releaseCount, pReleaseSyncs, pReleaseKeys ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && ( pAcquireSyncs == rhs.pAcquireSyncs ) && + ( pAcquireKeys == rhs.pAcquireKeys ) && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) && + ( releaseCount == rhs.releaseCount ) && ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys ); +# endif + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeoutMilliseconds = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; + }; + + template <> + struct CppType + { + using Type = Win32KeyedMutexAcquireReleaseInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + // wrapper struct for struct VkWin32SurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkWin32SurfaceCreateInfoKHR.html + struct Win32SurfaceCreateInfoKHR + { + using NativeType = VkWin32SurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, + HINSTANCE hinstance_ = {}, + HWND hwnd_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , hinstance{ hinstance_ } + , hwnd{ hwnd_ } + { + } + + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32SurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT + { + hinstance = hinstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT + { + hwnd = hwnd_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32SurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWin32SurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, hinstance, hwnd ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( hinstance == rhs.hinstance ) && ( hwnd == rhs.hwnd ); +# endif + } + + bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; + HINSTANCE hinstance = {}; + HWND hwnd = {}; + }; + + template <> + struct CppType + { + using Type = Win32SurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + // wrapper struct for struct VkWriteDescriptorSetAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetAccelerationStructureKHR.html + struct WriteDescriptorSetAccelerationStructureKHR + { + using NativeType = VkWriteDescriptorSetAccelerationStructureKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructureCount{ accelerationStructureCount_ } + , pAccelerationStructures{ pAccelerationStructures_ } + { + } + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetAccelerationStructureKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & + setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); +# endif + } + + bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {}; + }; + + template <> + struct CppType + { + using Type = WriteDescriptorSetAccelerationStructureKHR; + }; + + // wrapper struct for struct VkWriteDescriptorSetAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetAccelerationStructureNV.html + struct WriteDescriptorSetAccelerationStructureNV + { + using NativeType = VkWriteDescriptorSetAccelerationStructureNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructureCount{ accelerationStructureCount_ } + , pAccelerationStructures{ pAccelerationStructures_ } + { + } + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetAccelerationStructureNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WriteDescriptorSetAccelerationStructureNV & operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & + setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureNV const & ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); +# endif + } + + bool operator!=( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {}; + }; + + template <> + struct CppType + { + using Type = WriteDescriptorSetAccelerationStructureNV; + }; + + // wrapper struct for struct VkWriteDescriptorSetInlineUniformBlock, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetInlineUniformBlock.html + struct WriteDescriptorSetInlineUniformBlock + { + using NativeType = VkWriteDescriptorSetInlineUniformBlock; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlock; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + WriteDescriptorSetInlineUniformBlock( uint32_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dataSize{ dataSize_ } + , pData{ pData_ } + { + } + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetInlineUniformBlock( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetInlineUniformBlock( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + WriteDescriptorSetInlineUniformBlock( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dataSize( static_cast( data_.size() * sizeof( T ) ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSetInlineUniformBlock & operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WriteDescriptorSetInlineUniformBlock & operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + WriteDescriptorSetInlineUniformBlock & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = static_cast( data_.size() * sizeof( T ) ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetInlineUniformBlock const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetInlineUniformBlock *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetInlineUniformBlock const & ) const = default; +#else + bool operator==( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlock; + const void * pNext = {}; + uint32_t dataSize = {}; + const void * pData = {}; + }; + + template <> + struct CppType + { + using Type = WriteDescriptorSetInlineUniformBlock; + }; + + using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock; + + // wrapper struct for struct VkWriteDescriptorSetPartitionedAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteDescriptorSetPartitionedAccelerationStructureNV.html + struct WriteDescriptorSetPartitionedAccelerationStructureNV + { + using NativeType = VkWriteDescriptorSetPartitionedAccelerationStructureNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetPartitionedAccelerationStructureNV( uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pAccelerationStructures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructureCount{ accelerationStructureCount_ } + , pAccelerationStructures{ pAccelerationStructures_ } + { + } + + VULKAN_HPP_CONSTEXPR + WriteDescriptorSetPartitionedAccelerationStructureNV( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetPartitionedAccelerationStructureNV( VkWriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetPartitionedAccelerationStructureNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetPartitionedAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_, void * pNext_ = nullptr ) + : pNext( pNext_ ) + , accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSetPartitionedAccelerationStructureNV & + operator=( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WriteDescriptorSetPartitionedAccelerationStructureNV & operator=( VkWriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV & + setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::DeviceAddress * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetPartitionedAccelerationStructureNV & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteDescriptorSetPartitionedAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetPartitionedAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetPartitionedAccelerationStructureNV const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetPartitionedAccelerationStructureNV *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetPartitionedAccelerationStructureNV const & ) const = default; +#else + bool operator==( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); +# endif + } + + bool operator!=( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV; + void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceAddress * pAccelerationStructures = {}; + }; + + template <> + struct CppType + { + using Type = WriteDescriptorSetPartitionedAccelerationStructureNV; + }; + + // wrapper struct for struct VkWriteIndirectExecutionSetPipelineEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteIndirectExecutionSetPipelineEXT.html + struct WriteIndirectExecutionSetPipelineEXT + { + using NativeType = VkWriteIndirectExecutionSetPipelineEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteIndirectExecutionSetPipelineEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetPipelineEXT( uint32_t index_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , index{ index_ } + , pipeline{ pipeline_ } + { + } + + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetPipelineEXT( WriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteIndirectExecutionSetPipelineEXT( VkWriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteIndirectExecutionSetPipelineEXT( *reinterpret_cast( &rhs ) ) + { + } + + WriteIndirectExecutionSetPipelineEXT & operator=( WriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WriteIndirectExecutionSetPipelineEXT & operator=( VkWriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteIndirectExecutionSetPipelineEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteIndirectExecutionSetPipelineEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteIndirectExecutionSetPipelineEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWriteIndirectExecutionSetPipelineEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, index, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteIndirectExecutionSetPipelineEXT const & ) const = default; +#else + bool operator==( WriteIndirectExecutionSetPipelineEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( index == rhs.index ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( WriteIndirectExecutionSetPipelineEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteIndirectExecutionSetPipelineEXT; + const void * pNext = {}; + uint32_t index = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = WriteIndirectExecutionSetPipelineEXT; + }; + + // wrapper struct for struct VkWriteIndirectExecutionSetShaderEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/VkWriteIndirectExecutionSetShaderEXT.html + struct WriteIndirectExecutionSetShaderEXT + { + using NativeType = VkWriteIndirectExecutionSetShaderEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteIndirectExecutionSetShaderEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetShaderEXT( uint32_t index_ = {}, + VULKAN_HPP_NAMESPACE::ShaderEXT shader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , index{ index_ } + , shader{ shader_ } + { + } + + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetShaderEXT( WriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteIndirectExecutionSetShaderEXT( VkWriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteIndirectExecutionSetShaderEXT( *reinterpret_cast( &rhs ) ) + { + } + + WriteIndirectExecutionSetShaderEXT & operator=( WriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WriteIndirectExecutionSetShaderEXT & operator=( VkWriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setShader( VULKAN_HPP_NAMESPACE::ShaderEXT shader_ ) VULKAN_HPP_NOEXCEPT + { + shader = shader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteIndirectExecutionSetShaderEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteIndirectExecutionSetShaderEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteIndirectExecutionSetShaderEXT const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkWriteIndirectExecutionSetShaderEXT *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, index, shader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteIndirectExecutionSetShaderEXT const & ) const = default; +#else + bool operator==( WriteIndirectExecutionSetShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( index == rhs.index ) && ( shader == rhs.shader ); +# endif + } + + bool operator!=( WriteIndirectExecutionSetShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteIndirectExecutionSetShaderEXT; + const void * pNext = {}; + uint32_t index = {}; + VULKAN_HPP_NAMESPACE::ShaderEXT shader = {}; + }; + + template <> + struct CppType + { + using Type = WriteIndirectExecutionSetShaderEXT; + }; + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + // wrapper struct for struct VkXcbSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXcbSurfaceCreateInfoKHR.html + struct XcbSurfaceCreateInfoKHR + { + using NativeType = VkXcbSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, + xcb_connection_t * connection_ = {}, + xcb_window_t window_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , connection{ connection_ } + , window{ window_ } + { + } + + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : XcbSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT + { + connection = connection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXcbSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkXcbSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, connection, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = connection <=> rhs.connection; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( connection == rhs.connection ) && + ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 ); + } + + bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; + xcb_connection_t * connection = {}; + xcb_window_t window = {}; + }; + + template <> + struct CppType + { + using Type = XcbSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + // wrapper struct for struct VkXlibSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXlibSurfaceCreateInfoKHR.html + struct XlibSurfaceCreateInfoKHR + { + using NativeType = VkXlibSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, + Display * dpy_ = {}, + Window window_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , dpy{ dpy_ } + , window{ window_ } + { + } + + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : XlibSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT + { + dpy = dpy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXlibSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator VkXlibSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dpy, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = dpy <=> rhs.dpy; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &window, &rhs.window, sizeof( Window ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dpy == rhs.dpy ) && + ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 ); + } + + bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; + Display * dpy = {}; + Window window = {}; + }; + + template <> + struct CppType + { + using Type = XlibSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp new file mode 100644 index 00000000..80427395 --- /dev/null +++ b/include/vulkan/vulkan_to_string.hpp @@ -0,0 +1,10081 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_TO_STRING_HPP +#define VULKAN_TO_STRING_HPP + +#include + +// ignore warnings on using deprecated enum values in this header +#if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#elif defined( _MSC_VER ) +# pragma warning( push ) +# pragma warning( disable : 4996 ) +#endif + +#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) +import VULKAN_HPP_STD_MODULE; +#else +# if __cpp_lib_format +# include // std::format +# else +# include // std::stringstream +# endif +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + + //========================== + //=== BITMASKs to_string === + //========================== + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) + { + std::string result = "{"; + if ( value & FormatFeatureFlagBits::eSampledImage ) + result += " SampledImage |"; + if ( value & FormatFeatureFlagBits::eStorageImage ) + result += " StorageImage |"; + if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) + result += " StorageImageAtomic |"; + if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) + result += " UniformTexelBuffer |"; + if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) + result += " StorageTexelBuffer |"; + if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) + result += " StorageTexelBufferAtomic |"; + if ( value & FormatFeatureFlagBits::eVertexBuffer ) + result += " VertexBuffer |"; + if ( value & FormatFeatureFlagBits::eColorAttachment ) + result += " ColorAttachment |"; + if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) + result += " ColorAttachmentBlend |"; + if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) + result += " DepthStencilAttachment |"; + if ( value & FormatFeatureFlagBits::eBlitSrc ) + result += " BlitSrc |"; + if ( value & FormatFeatureFlagBits::eBlitDst ) + result += " BlitDst |"; + if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) + result += " SampledImageFilterLinear |"; + if ( value & FormatFeatureFlagBits::eTransferSrc ) + result += " TransferSrc |"; + if ( value & FormatFeatureFlagBits::eTransferDst ) + result += " TransferDst |"; + if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) + result += " MidpointChromaSamples |"; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) + result += " SampledImageYcbcrConversionLinearFilter |"; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) + result += " SampledImageYcbcrConversionSeparateReconstructionFilter |"; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) + result += " SampledImageYcbcrConversionChromaReconstructionExplicit |"; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) + result += " SampledImageYcbcrConversionChromaReconstructionExplicitForceable |"; + if ( value & FormatFeatureFlagBits::eDisjoint ) + result += " Disjoint |"; + if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) + result += " CositedChromaSamples |"; + if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) + result += " SampledImageFilterMinmax |"; + if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR ) + result += " VideoDecodeOutputKHR |"; + if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR ) + result += " VideoDecodeDpbKHR |"; + if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) + result += " AccelerationStructureVertexBufferKHR |"; + if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicEXT ) + result += " SampledImageFilterCubicEXT |"; + if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) + result += " FragmentDensityMapEXT |"; + if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) + result += " FragmentShadingRateAttachmentKHR |"; + if ( value & FormatFeatureFlagBits::eVideoEncodeInputKHR ) + result += " VideoEncodeInputKHR |"; + if ( value & FormatFeatureFlagBits::eVideoEncodeDpbKHR ) + result += " VideoEncodeDpbKHR |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) + { + std::string result = "{"; + if ( value & ImageCreateFlagBits::eSparseBinding ) + result += " SparseBinding |"; + if ( value & ImageCreateFlagBits::eSparseResidency ) + result += " SparseResidency |"; + if ( value & ImageCreateFlagBits::eSparseAliased ) + result += " SparseAliased |"; + if ( value & ImageCreateFlagBits::eMutableFormat ) + result += " MutableFormat |"; + if ( value & ImageCreateFlagBits::eCubeCompatible ) + result += " CubeCompatible |"; + if ( value & ImageCreateFlagBits::eAlias ) + result += " Alias |"; + if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) + result += " SplitInstanceBindRegions |"; + if ( value & ImageCreateFlagBits::e2DArrayCompatible ) + result += " 2DArrayCompatible |"; + if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) + result += " BlockTexelViewCompatible |"; + if ( value & ImageCreateFlagBits::eExtendedUsage ) + result += " ExtendedUsage |"; + if ( value & ImageCreateFlagBits::eProtected ) + result += " Protected |"; + if ( value & ImageCreateFlagBits::eDisjoint ) + result += " Disjoint |"; + if ( value & ImageCreateFlagBits::eCornerSampledNV ) + result += " CornerSampledNV |"; + if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) + result += " SampleLocationsCompatibleDepthEXT |"; + if ( value & ImageCreateFlagBits::eSubsampledEXT ) + result += " SubsampledEXT |"; + if ( value & ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += " DescriptorBufferCaptureReplayEXT |"; + if ( value & ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT ) + result += " MultisampledRenderToSingleSampledEXT |"; + if ( value & ImageCreateFlagBits::e2DViewCompatibleEXT ) + result += " 2DViewCompatibleEXT |"; + if ( value & ImageCreateFlagBits::eVideoProfileIndependentKHR ) + result += " VideoProfileIndependentKHR |"; + if ( value & ImageCreateFlagBits::eFragmentDensityMapOffsetEXT ) + result += " FragmentDensityMapOffsetEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) + { + std::string result = "{"; + if ( value & ImageUsageFlagBits::eTransferSrc ) + result += " TransferSrc |"; + if ( value & ImageUsageFlagBits::eTransferDst ) + result += " TransferDst |"; + if ( value & ImageUsageFlagBits::eSampled ) + result += " Sampled |"; + if ( value & ImageUsageFlagBits::eStorage ) + result += " Storage |"; + if ( value & ImageUsageFlagBits::eColorAttachment ) + result += " ColorAttachment |"; + if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) + result += " DepthStencilAttachment |"; + if ( value & ImageUsageFlagBits::eTransientAttachment ) + result += " TransientAttachment |"; + if ( value & ImageUsageFlagBits::eInputAttachment ) + result += " InputAttachment |"; + if ( value & ImageUsageFlagBits::eHostTransfer ) + result += " HostTransfer |"; + if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR ) + result += " VideoDecodeDstKHR |"; + if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR ) + result += " VideoDecodeSrcKHR |"; + if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR ) + result += " VideoDecodeDpbKHR |"; + if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) + result += " FragmentDensityMapEXT |"; + if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) + result += " FragmentShadingRateAttachmentKHR |"; + if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR ) + result += " VideoEncodeDstKHR |"; + if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR ) + result += " VideoEncodeSrcKHR |"; + if ( value & ImageUsageFlagBits::eVideoEncodeDpbKHR ) + result += " VideoEncodeDpbKHR |"; + if ( value & ImageUsageFlagBits::eAttachmentFeedbackLoopEXT ) + result += " AttachmentFeedbackLoopEXT |"; + if ( value & ImageUsageFlagBits::eInvocationMaskHUAWEI ) + result += " InvocationMaskHUAWEI |"; + if ( value & ImageUsageFlagBits::eSampleWeightQCOM ) + result += " SampleWeightQCOM |"; + if ( value & ImageUsageFlagBits::eSampleBlockMatchQCOM ) + result += " SampleBlockMatchQCOM |"; + if ( value & ImageUsageFlagBits::eTileMemoryQCOM ) + result += " TileMemoryQCOM |"; + if ( value & ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR ) + result += " VideoEncodeQuantizationDeltaMapKHR |"; + if ( value & ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR ) + result += " VideoEncodeEmphasisMapKHR |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags value ) + { + std::string result = "{"; + if ( value & InstanceCreateFlagBits::eEnumeratePortabilityKHR ) + result += " EnumeratePortabilityKHR |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) + { + std::string result = "{"; + if ( value & MemoryHeapFlagBits::eDeviceLocal ) + result += " DeviceLocal |"; + if ( value & MemoryHeapFlagBits::eMultiInstance ) + result += " MultiInstance |"; + if ( value & MemoryHeapFlagBits::eTileMemoryQCOM ) + result += " TileMemoryQCOM |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) + { + std::string result = "{"; + if ( value & MemoryPropertyFlagBits::eDeviceLocal ) + result += " DeviceLocal |"; + if ( value & MemoryPropertyFlagBits::eHostVisible ) + result += " HostVisible |"; + if ( value & MemoryPropertyFlagBits::eHostCoherent ) + result += " HostCoherent |"; + if ( value & MemoryPropertyFlagBits::eHostCached ) + result += " HostCached |"; + if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) + result += " LazilyAllocated |"; + if ( value & MemoryPropertyFlagBits::eProtected ) + result += " Protected |"; + if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) + result += " DeviceCoherentAMD |"; + if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) + result += " DeviceUncachedAMD |"; + if ( value & MemoryPropertyFlagBits::eRdmaCapableNV ) + result += " RdmaCapableNV |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) + { + std::string result = "{"; + if ( value & QueueFlagBits::eGraphics ) + result += " Graphics |"; + if ( value & QueueFlagBits::eCompute ) + result += " Compute |"; + if ( value & QueueFlagBits::eTransfer ) + result += " Transfer |"; + if ( value & QueueFlagBits::eSparseBinding ) + result += " SparseBinding |"; + if ( value & QueueFlagBits::eProtected ) + result += " Protected |"; + if ( value & QueueFlagBits::eVideoDecodeKHR ) + result += " VideoDecodeKHR |"; + if ( value & QueueFlagBits::eVideoEncodeKHR ) + result += " VideoEncodeKHR |"; + if ( value & QueueFlagBits::eOpticalFlowNV ) + result += " OpticalFlowNV |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) + { + std::string result = "{"; + if ( value & SampleCountFlagBits::e1 ) + result += " 1 |"; + if ( value & SampleCountFlagBits::e2 ) + result += " 2 |"; + if ( value & SampleCountFlagBits::e4 ) + result += " 4 |"; + if ( value & SampleCountFlagBits::e8 ) + result += " 8 |"; + if ( value & SampleCountFlagBits::e16 ) + result += " 16 |"; + if ( value & SampleCountFlagBits::e32 ) + result += " 32 |"; + if ( value & SampleCountFlagBits::e64 ) + result += " 64 |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) + { + std::string result = "{"; + if ( value & DeviceQueueCreateFlagBits::eProtected ) + result += " Protected |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) + { + std::string result = "{"; + if ( value & PipelineStageFlagBits::eTopOfPipe ) + result += " TopOfPipe |"; + if ( value & PipelineStageFlagBits::eDrawIndirect ) + result += " DrawIndirect |"; + if ( value & PipelineStageFlagBits::eVertexInput ) + result += " VertexInput |"; + if ( value & PipelineStageFlagBits::eVertexShader ) + result += " VertexShader |"; + if ( value & PipelineStageFlagBits::eTessellationControlShader ) + result += " TessellationControlShader |"; + if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) + result += " TessellationEvaluationShader |"; + if ( value & PipelineStageFlagBits::eGeometryShader ) + result += " GeometryShader |"; + if ( value & PipelineStageFlagBits::eFragmentShader ) + result += " FragmentShader |"; + if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) + result += " EarlyFragmentTests |"; + if ( value & PipelineStageFlagBits::eLateFragmentTests ) + result += " LateFragmentTests |"; + if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) + result += " ColorAttachmentOutput |"; + if ( value & PipelineStageFlagBits::eComputeShader ) + result += " ComputeShader |"; + if ( value & PipelineStageFlagBits::eTransfer ) + result += " Transfer |"; + if ( value & PipelineStageFlagBits::eBottomOfPipe ) + result += " BottomOfPipe |"; + if ( value & PipelineStageFlagBits::eHost ) + result += " Host |"; + if ( value & PipelineStageFlagBits::eAllGraphics ) + result += " AllGraphics |"; + if ( value & PipelineStageFlagBits::eAllCommands ) + result += " AllCommands |"; + if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) + result += " TransformFeedbackEXT |"; + if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) + result += " ConditionalRenderingEXT |"; + if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) + result += " AccelerationStructureBuildKHR |"; + if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) + result += " RayTracingShaderKHR |"; + if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) + result += " FragmentDensityProcessEXT |"; + if ( value & PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) + result += " FragmentShadingRateAttachmentKHR |"; + if ( value & PipelineStageFlagBits::eTaskShaderEXT ) + result += " TaskShaderEXT |"; + if ( value & PipelineStageFlagBits::eMeshShaderEXT ) + result += " MeshShaderEXT |"; + if ( value & PipelineStageFlagBits::eCommandPreprocessEXT ) + result += " CommandPreprocessEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "None"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags value ) + { + std::string result = "{"; + if ( value & MemoryMapFlagBits::ePlacedEXT ) + result += " PlacedEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) + { + std::string result = "{"; + if ( value & ImageAspectFlagBits::eColor ) + result += " Color |"; + if ( value & ImageAspectFlagBits::eDepth ) + result += " Depth |"; + if ( value & ImageAspectFlagBits::eStencil ) + result += " Stencil |"; + if ( value & ImageAspectFlagBits::eMetadata ) + result += " Metadata |"; + if ( value & ImageAspectFlagBits::ePlane0 ) + result += " Plane0 |"; + if ( value & ImageAspectFlagBits::ePlane1 ) + result += " Plane1 |"; + if ( value & ImageAspectFlagBits::ePlane2 ) + result += " Plane2 |"; + if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) + result += " MemoryPlane0EXT |"; + if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) + result += " MemoryPlane1EXT |"; + if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) + result += " MemoryPlane2EXT |"; + if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) + result += " MemoryPlane3EXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "None"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) + { + std::string result = "{"; + if ( value & SparseImageFormatFlagBits::eSingleMiptail ) + result += " SingleMiptail |"; + if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) + result += " AlignedMipSize |"; + if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) + result += " NonstandardBlockSize |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) + { + std::string result = "{"; + if ( value & SparseMemoryBindFlagBits::eMetadata ) + result += " Metadata |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) + { + std::string result = "{"; + if ( value & FenceCreateFlagBits::eSignaled ) + result += " Signaled |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SemaphoreCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value ) + { + std::string result = "{"; + if ( value & EventCreateFlagBits::eDeviceOnly ) + result += " DeviceOnly |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) + { + std::string result = "{"; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) + result += " InputAssemblyVertices |"; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) + result += " InputAssemblyPrimitives |"; + if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) + result += " VertexShaderInvocations |"; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) + result += " GeometryShaderInvocations |"; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) + result += " GeometryShaderPrimitives |"; + if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) + result += " ClippingInvocations |"; + if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) + result += " ClippingPrimitives |"; + if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) + result += " FragmentShaderInvocations |"; + if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) + result += " TessellationControlShaderPatches |"; + if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) + result += " TessellationEvaluationShaderInvocations |"; + if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) + result += " ComputeShaderInvocations |"; + if ( value & QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT ) + result += " TaskShaderInvocationsEXT |"; + if ( value & QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT ) + result += " MeshShaderInvocationsEXT |"; + if ( value & QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI ) + result += " ClusterCullingShaderInvocationsHUAWEI |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryPoolCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) + { + std::string result = "{"; + if ( value & QueryResultFlagBits::e64 ) + result += " 64 |"; + if ( value & QueryResultFlagBits::eWait ) + result += " Wait |"; + if ( value & QueryResultFlagBits::eWithAvailability ) + result += " WithAvailability |"; + if ( value & QueryResultFlagBits::ePartial ) + result += " Partial |"; + if ( value & QueryResultFlagBits::eWithStatusKHR ) + result += " WithStatusKHR |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) + { + std::string result = "{"; + if ( value & BufferCreateFlagBits::eSparseBinding ) + result += " SparseBinding |"; + if ( value & BufferCreateFlagBits::eSparseResidency ) + result += " SparseResidency |"; + if ( value & BufferCreateFlagBits::eSparseAliased ) + result += " SparseAliased |"; + if ( value & BufferCreateFlagBits::eProtected ) + result += " Protected |"; + if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) + result += " DeviceAddressCaptureReplay |"; + if ( value & BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += " DescriptorBufferCaptureReplayEXT |"; + if ( value & BufferCreateFlagBits::eVideoProfileIndependentKHR ) + result += " VideoProfileIndependentKHR |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) + { + std::string result = "{"; + if ( value & BufferUsageFlagBits::eTransferSrc ) + result += " TransferSrc |"; + if ( value & BufferUsageFlagBits::eTransferDst ) + result += " TransferDst |"; + if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) + result += " UniformTexelBuffer |"; + if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) + result += " StorageTexelBuffer |"; + if ( value & BufferUsageFlagBits::eUniformBuffer ) + result += " UniformBuffer |"; + if ( value & BufferUsageFlagBits::eStorageBuffer ) + result += " StorageBuffer |"; + if ( value & BufferUsageFlagBits::eIndexBuffer ) + result += " IndexBuffer |"; + if ( value & BufferUsageFlagBits::eVertexBuffer ) + result += " VertexBuffer |"; + if ( value & BufferUsageFlagBits::eIndirectBuffer ) + result += " IndirectBuffer |"; + if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) + result += " ShaderDeviceAddress |"; + if ( value & BufferUsageFlagBits::eVideoDecodeSrcKHR ) + result += " VideoDecodeSrcKHR |"; + if ( value & BufferUsageFlagBits::eVideoDecodeDstKHR ) + result += " VideoDecodeDstKHR |"; + if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) + result += " TransformFeedbackBufferEXT |"; + if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) + result += " TransformFeedbackCounterBufferEXT |"; + if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) + result += " ConditionalRenderingEXT |"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eExecutionGraphScratchAMDX ) + result += " ExecutionGraphScratchAMDX |"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) + result += " AccelerationStructureBuildInputReadOnlyKHR |"; + if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR ) + result += " AccelerationStructureStorageKHR |"; + if ( value & BufferUsageFlagBits::eShaderBindingTableKHR ) + result += " ShaderBindingTableKHR |"; + if ( value & BufferUsageFlagBits::eVideoEncodeDstKHR ) + result += " VideoEncodeDstKHR |"; + if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR ) + result += " VideoEncodeSrcKHR |"; + if ( value & BufferUsageFlagBits::eSamplerDescriptorBufferEXT ) + result += " SamplerDescriptorBufferEXT |"; + if ( value & BufferUsageFlagBits::eResourceDescriptorBufferEXT ) + result += " ResourceDescriptorBufferEXT |"; + if ( value & BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT ) + result += " PushDescriptorsDescriptorBufferEXT |"; + if ( value & BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT ) + result += " MicromapBuildInputReadOnlyEXT |"; + if ( value & BufferUsageFlagBits::eMicromapStorageEXT ) + result += " MicromapStorageEXT |"; + if ( value & BufferUsageFlagBits::eTileMemoryQCOM ) + result += " TileMemoryQCOM |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BufferViewCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) + { + std::string result = "{"; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) + result += " FragmentDensityMapDynamicEXT |"; + if ( value & ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += " DescriptorBufferCaptureReplayEXT |"; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) + result += " FragmentDensityMapDeferredEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShaderModuleCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value ) + { + std::string result = "{"; + if ( value & PipelineCacheCreateFlagBits::eExternallySynchronized ) + result += " ExternallySynchronized |"; + if ( value & PipelineCacheCreateFlagBits::eInternallySynchronizedMergeKHR ) + result += " InternallySynchronizedMergeKHR |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) + { + std::string result = "{"; + if ( value & ColorComponentFlagBits::eR ) + result += " R |"; + if ( value & ColorComponentFlagBits::eG ) + result += " G |"; + if ( value & ColorComponentFlagBits::eB ) + result += " B |"; + if ( value & ColorComponentFlagBits::eA ) + result += " A |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) + { + std::string result = "{"; + if ( value & CullModeFlagBits::eFront ) + result += " Front |"; + if ( value & CullModeFlagBits::eBack ) + result += " Back |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "None"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags value ) + { + std::string result = "{"; + if ( value & PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT ) + result += " RasterizationOrderAttachmentAccessEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) + { + std::string result = "{"; + if ( value & PipelineCreateFlagBits::eDisableOptimization ) + result += " DisableOptimization |"; + if ( value & PipelineCreateFlagBits::eAllowDerivatives ) + result += " AllowDerivatives |"; + if ( value & PipelineCreateFlagBits::eDerivative ) + result += " Derivative |"; + if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) + result += " ViewIndexFromDeviceIndex |"; + if ( value & PipelineCreateFlagBits::eDispatchBase ) + result += " DispatchBase |"; + if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequired ) + result += " FailOnPipelineCompileRequired |"; + if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailure ) + result += " EarlyReturnOnFailure |"; + if ( value & PipelineCreateFlagBits::eNoProtectedAccess ) + result += " NoProtectedAccess |"; + if ( value & PipelineCreateFlagBits::eProtectedAccessOnly ) + result += " ProtectedAccessOnly |"; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) + result += " RayTracingNoNullAnyHitShadersKHR |"; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) + result += " RayTracingNoNullClosestHitShadersKHR |"; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) + result += " RayTracingNoNullMissShadersKHR |"; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) + result += " RayTracingNoNullIntersectionShadersKHR |"; + if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) + result += " RayTracingSkipTrianglesKHR |"; + if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) + result += " RayTracingSkipAabbsKHR |"; + if ( value & PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) + result += " RayTracingShaderGroupHandleCaptureReplayKHR |"; + if ( value & PipelineCreateFlagBits::eDeferCompileNV ) + result += " DeferCompileNV |"; + if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) + result += " RenderingFragmentDensityMapAttachmentEXT |"; + if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) + result += " RenderingFragmentShadingRateAttachmentKHR |"; + if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) + result += " CaptureStatisticsKHR |"; + if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) + result += " CaptureInternalRepresentationsKHR |"; + if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) + result += " IndirectBindableNV |"; + if ( value & PipelineCreateFlagBits::eLibraryKHR ) + result += " LibraryKHR |"; + if ( value & PipelineCreateFlagBits::eDescriptorBufferEXT ) + result += " DescriptorBufferEXT |"; + if ( value & PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT ) + result += " RetainLinkTimeOptimizationInfoEXT |"; + if ( value & PipelineCreateFlagBits::eLinkTimeOptimizationEXT ) + result += " LinkTimeOptimizationEXT |"; + if ( value & PipelineCreateFlagBits::eRayTracingAllowMotionNV ) + result += " RayTracingAllowMotionNV |"; + if ( value & PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT ) + result += " ColorAttachmentFeedbackLoopEXT |"; + if ( value & PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT ) + result += " DepthStencilAttachmentFeedbackLoopEXT |"; + if ( value & PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT ) + result += " RayTracingOpacityMicromapEXT |"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV ) + result += " RayTracingDisplacementMicromapNV |"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags value ) + { + std::string result = "{"; + if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT ) + result += " RasterizationOrderAttachmentDepthAccessEXT |"; + if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT ) + result += " RasterizationOrderAttachmentStencilAccessEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineDynamicStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineInputAssemblyStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags value ) + { + std::string result = "{"; + if ( value & PipelineLayoutCreateFlagBits::eIndependentSetsEXT ) + result += " IndependentSetsEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineMultisampleStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineRasterizationStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) + { + std::string result = "{"; + if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize ) + result += " AllowVaryingSubgroupSize |"; + if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroups ) + result += " RequireFullSubgroups |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineTessellationStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineVertexInputStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineViewportStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) + { + std::string result = "{"; + if ( value & ShaderStageFlagBits::eVertex ) + result += " Vertex |"; + if ( value & ShaderStageFlagBits::eTessellationControl ) + result += " TessellationControl |"; + if ( value & ShaderStageFlagBits::eTessellationEvaluation ) + result += " TessellationEvaluation |"; + if ( value & ShaderStageFlagBits::eGeometry ) + result += " Geometry |"; + if ( value & ShaderStageFlagBits::eFragment ) + result += " Fragment |"; + if ( value & ShaderStageFlagBits::eCompute ) + result += " Compute |"; + if ( value & ShaderStageFlagBits::eRaygenKHR ) + result += " RaygenKHR |"; + if ( value & ShaderStageFlagBits::eAnyHitKHR ) + result += " AnyHitKHR |"; + if ( value & ShaderStageFlagBits::eClosestHitKHR ) + result += " ClosestHitKHR |"; + if ( value & ShaderStageFlagBits::eMissKHR ) + result += " MissKHR |"; + if ( value & ShaderStageFlagBits::eIntersectionKHR ) + result += " IntersectionKHR |"; + if ( value & ShaderStageFlagBits::eCallableKHR ) + result += " CallableKHR |"; + if ( value & ShaderStageFlagBits::eTaskEXT ) + result += " TaskEXT |"; + if ( value & ShaderStageFlagBits::eMeshEXT ) + result += " MeshEXT |"; + if ( value & ShaderStageFlagBits::eSubpassShadingHUAWEI ) + result += " SubpassShadingHUAWEI |"; + if ( value & ShaderStageFlagBits::eClusterCullingHUAWEI ) + result += " ClusterCullingHUAWEI |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) + { + std::string result = "{"; + if ( value & SamplerCreateFlagBits::eSubsampledEXT ) + result += " SubsampledEXT |"; + if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) + result += " SubsampledCoarseReconstructionEXT |"; + if ( value & SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += " DescriptorBufferCaptureReplayEXT |"; + if ( value & SamplerCreateFlagBits::eNonSeamlessCubeMapEXT ) + result += " NonSeamlessCubeMapEXT |"; + if ( value & SamplerCreateFlagBits::eImageProcessingQCOM ) + result += " ImageProcessingQCOM |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) + { + std::string result = "{"; + if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) + result += " FreeDescriptorSet |"; + if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) + result += " UpdateAfterBind |"; + if ( value & DescriptorPoolCreateFlagBits::eHostOnlyEXT ) + result += " HostOnlyEXT |"; + if ( value & DescriptorPoolCreateFlagBits::eAllowOverallocationSetsNV ) + result += " AllowOverallocationSetsNV |"; + if ( value & DescriptorPoolCreateFlagBits::eAllowOverallocationPoolsNV ) + result += " AllowOverallocationPoolsNV |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DescriptorPoolResetFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) + { + std::string result = "{"; + if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) + result += " UpdateAfterBindPool |"; + if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptor ) + result += " PushDescriptor |"; + if ( value & DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT ) + result += " DescriptorBufferEXT |"; + if ( value & DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT ) + result += " EmbeddedImmutableSamplersEXT |"; + if ( value & DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV ) + result += " IndirectBindableNV |"; + if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT ) + result += " HostOnlyPoolEXT |"; + if ( value & DescriptorSetLayoutCreateFlagBits::ePerStageNV ) + result += " PerStageNV |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) + { + std::string result = "{"; + if ( value & AccessFlagBits::eIndirectCommandRead ) + result += " IndirectCommandRead |"; + if ( value & AccessFlagBits::eIndexRead ) + result += " IndexRead |"; + if ( value & AccessFlagBits::eVertexAttributeRead ) + result += " VertexAttributeRead |"; + if ( value & AccessFlagBits::eUniformRead ) + result += " UniformRead |"; + if ( value & AccessFlagBits::eInputAttachmentRead ) + result += " InputAttachmentRead |"; + if ( value & AccessFlagBits::eShaderRead ) + result += " ShaderRead |"; + if ( value & AccessFlagBits::eShaderWrite ) + result += " ShaderWrite |"; + if ( value & AccessFlagBits::eColorAttachmentRead ) + result += " ColorAttachmentRead |"; + if ( value & AccessFlagBits::eColorAttachmentWrite ) + result += " ColorAttachmentWrite |"; + if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) + result += " DepthStencilAttachmentRead |"; + if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) + result += " DepthStencilAttachmentWrite |"; + if ( value & AccessFlagBits::eTransferRead ) + result += " TransferRead |"; + if ( value & AccessFlagBits::eTransferWrite ) + result += " TransferWrite |"; + if ( value & AccessFlagBits::eHostRead ) + result += " HostRead |"; + if ( value & AccessFlagBits::eHostWrite ) + result += " HostWrite |"; + if ( value & AccessFlagBits::eMemoryRead ) + result += " MemoryRead |"; + if ( value & AccessFlagBits::eMemoryWrite ) + result += " MemoryWrite |"; + if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) + result += " TransformFeedbackWriteEXT |"; + if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) + result += " TransformFeedbackCounterReadEXT |"; + if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) + result += " TransformFeedbackCounterWriteEXT |"; + if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) + result += " ConditionalRenderingReadEXT |"; + if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) + result += " ColorAttachmentReadNoncoherentEXT |"; + if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) + result += " AccelerationStructureReadKHR |"; + if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) + result += " AccelerationStructureWriteKHR |"; + if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) + result += " FragmentDensityMapReadEXT |"; + if ( value & AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) + result += " FragmentShadingRateAttachmentReadKHR |"; + if ( value & AccessFlagBits::eCommandPreprocessReadEXT ) + result += " CommandPreprocessReadEXT |"; + if ( value & AccessFlagBits::eCommandPreprocessWriteEXT ) + result += " CommandPreprocessWriteEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "None"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) + { + std::string result = "{"; + if ( value & AttachmentDescriptionFlagBits::eMayAlias ) + result += " MayAlias |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) + { + std::string result = "{"; + if ( value & DependencyFlagBits::eByRegion ) + result += " ByRegion |"; + if ( value & DependencyFlagBits::eDeviceGroup ) + result += " DeviceGroup |"; + if ( value & DependencyFlagBits::eViewLocal ) + result += " ViewLocal |"; + if ( value & DependencyFlagBits::eFeedbackLoopEXT ) + result += " FeedbackLoopEXT |"; + if ( value & DependencyFlagBits::eQueueFamilyOwnershipTransferUseAllStagesKHR ) + result += " QueueFamilyOwnershipTransferUseAllStagesKHR |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) + { + std::string result = "{"; + if ( value & FramebufferCreateFlagBits::eImageless ) + result += " Imageless |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value ) + { + std::string result = "{"; + if ( value & RenderPassCreateFlagBits::eTransformQCOM ) + result += " TransformQCOM |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) + { + std::string result = "{"; + if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) + result += " PerViewAttributesNVX |"; + if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) + result += " PerViewPositionXOnlyNVX |"; + if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) + result += " FragmentRegionQCOM |"; + if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) + result += " ShaderResolveQCOM |"; + if ( value & SubpassDescriptionFlagBits::eTileShadingApronQCOM ) + result += " TileShadingApronQCOM |"; + if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT ) + result += " RasterizationOrderAttachmentColorAccessEXT |"; + if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT ) + result += " RasterizationOrderAttachmentDepthAccessEXT |"; + if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT ) + result += " RasterizationOrderAttachmentStencilAccessEXT |"; + if ( value & SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT ) + result += " EnableLegacyDitheringEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) + { + std::string result = "{"; + if ( value & CommandPoolCreateFlagBits::eTransient ) + result += " Transient |"; + if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) + result += " ResetCommandBuffer |"; + if ( value & CommandPoolCreateFlagBits::eProtected ) + result += " Protected |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) + { + std::string result = "{"; + if ( value & CommandPoolResetFlagBits::eReleaseResources ) + result += " ReleaseResources |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) + { + std::string result = "{"; + if ( value & CommandBufferResetFlagBits::eReleaseResources ) + result += " ReleaseResources |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) + { + std::string result = "{"; + if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) + result += " OneTimeSubmit |"; + if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) + result += " RenderPassContinue |"; + if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) + result += " SimultaneousUse |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) + { + std::string result = "{"; + if ( value & QueryControlFlagBits::ePrecise ) + result += " Precise |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) + { + std::string result = "{"; + if ( value & StencilFaceFlagBits::eFront ) + result += " Front |"; + if ( value & StencilFaceFlagBits::eBack ) + result += " Back |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) + { + std::string result = "{"; + if ( value & SubgroupFeatureFlagBits::eBasic ) + result += " Basic |"; + if ( value & SubgroupFeatureFlagBits::eVote ) + result += " Vote |"; + if ( value & SubgroupFeatureFlagBits::eArithmetic ) + result += " Arithmetic |"; + if ( value & SubgroupFeatureFlagBits::eBallot ) + result += " Ballot |"; + if ( value & SubgroupFeatureFlagBits::eShuffle ) + result += " Shuffle |"; + if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) + result += " ShuffleRelative |"; + if ( value & SubgroupFeatureFlagBits::eClustered ) + result += " Clustered |"; + if ( value & SubgroupFeatureFlagBits::eQuad ) + result += " Quad |"; + if ( value & SubgroupFeatureFlagBits::eRotate ) + result += " Rotate |"; + if ( value & SubgroupFeatureFlagBits::eRotateClustered ) + result += " RotateClustered |"; + if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) + result += " PartitionedNV |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) + { + std::string result = "{"; + if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) + result += " CopySrc |"; + if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) + result += " CopyDst |"; + if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) + result += " GenericSrc |"; + if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) + result += " GenericDst |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) + { + std::string result = "{"; + if ( value & MemoryAllocateFlagBits::eDeviceMask ) + result += " DeviceMask |"; + if ( value & MemoryAllocateFlagBits::eDeviceAddress ) + result += " DeviceAddress |"; + if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) + result += " DeviceAddressCaptureReplay |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CommandPoolTrimFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DescriptorUpdateTemplateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) + { + std::string result = "{"; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + result += " OpaqueFd |"; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) + result += " OpaqueWin32 |"; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += " OpaqueWin32Kmt |"; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) + result += " D3D11Texture |"; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) + result += " D3D11TextureKmt |"; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) + result += " D3D12Heap |"; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) + result += " D3D12Resource |"; + if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) + result += " DmaBufEXT |"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) + result += " AndroidHardwareBufferANDROID |"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) + result += " HostAllocationEXT |"; + if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) + result += " HostMappedForeignMemoryEXT |"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + if ( value & ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA ) + result += " ZirconVmoFUCHSIA |"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + if ( value & ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV ) + result += " RdmaAddressNV |"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + if ( value & ExternalMemoryHandleTypeFlagBits::eScreenBufferQNX ) + result += " ScreenBufferQNX |"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + if ( value & ExternalMemoryHandleTypeFlagBits::eMtlbufferEXT ) + result += " MtlbufferEXT |"; + if ( value & ExternalMemoryHandleTypeFlagBits::eMtltextureEXT ) + result += " MtltextureEXT |"; + if ( value & ExternalMemoryHandleTypeFlagBits::eMtlheapEXT ) + result += " MtlheapEXT |"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) + { + std::string result = "{"; + if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) + result += " DedicatedOnly |"; + if ( value & ExternalMemoryFeatureFlagBits::eExportable ) + result += " Exportable |"; + if ( value & ExternalMemoryFeatureFlagBits::eImportable ) + result += " Importable |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) + { + std::string result = "{"; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + result += " OpaqueFd |"; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) + result += " OpaqueWin32 |"; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += " OpaqueWin32Kmt |"; + if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) + result += " SyncFd |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) + { + std::string result = "{"; + if ( value & ExternalFenceFeatureFlagBits::eExportable ) + result += " Exportable |"; + if ( value & ExternalFenceFeatureFlagBits::eImportable ) + result += " Importable |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) + { + std::string result = "{"; + if ( value & FenceImportFlagBits::eTemporary ) + result += " Temporary |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) + { + std::string result = "{"; + if ( value & SemaphoreImportFlagBits::eTemporary ) + result += " Temporary |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) + { + std::string result = "{"; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + result += " OpaqueFd |"; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) + result += " OpaqueWin32 |"; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += " OpaqueWin32Kmt |"; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) + result += " D3D12Fence |"; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) + result += " SyncFd |"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + if ( value & ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA ) + result += " ZirconEventFUCHSIA |"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) + { + std::string result = "{"; + if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) + result += " Exportable |"; + if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) + result += " Importable |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value ) + { + std::string result = "{"; + if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) + result += " UpdateAfterBind |"; + if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) + result += " UpdateUnusedWhilePending |"; + if ( value & DescriptorBindingFlagBits::ePartiallyBound ) + result += " PartiallyBound |"; + if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) + result += " VariableDescriptorCount |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value ) + { + std::string result = "{"; + if ( value & ResolveModeFlagBits::eSampleZero ) + result += " SampleZero |"; + if ( value & ResolveModeFlagBits::eAverage ) + result += " Average |"; + if ( value & ResolveModeFlagBits::eMin ) + result += " Min |"; + if ( value & ResolveModeFlagBits::eMax ) + result += " Max |"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + if ( value & ResolveModeFlagBits::eExternalFormatDownsampleANDROID ) + result += " ExternalFormatDownsampleANDROID |"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "None"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value ) + { + std::string result = "{"; + if ( value & SemaphoreWaitFlagBits::eAny ) + result += " Any |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlags value ) + { + std::string result = "{"; + if ( value & PipelineCreationFeedbackFlagBits::eValid ) + result += " Valid |"; + if ( value & PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit ) + result += " ApplicationPipelineCacheHit |"; + if ( value & PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration ) + result += " BasePipelineAcceleration |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlags value ) + { + std::string result = "{"; + if ( value & ToolPurposeFlagBits::eValidation ) + result += " Validation |"; + if ( value & ToolPurposeFlagBits::eProfiling ) + result += " Profiling |"; + if ( value & ToolPurposeFlagBits::eTracing ) + result += " Tracing |"; + if ( value & ToolPurposeFlagBits::eAdditionalFeatures ) + result += " AdditionalFeatures |"; + if ( value & ToolPurposeFlagBits::eModifyingFeatures ) + result += " ModifyingFeatures |"; + if ( value & ToolPurposeFlagBits::eDebugReportingEXT ) + result += " DebugReportingEXT |"; + if ( value & ToolPurposeFlagBits::eDebugMarkersEXT ) + result += " DebugMarkersEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PrivateDataSlotCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2 value ) + { + std::string result = "{"; + if ( value & PipelineStageFlagBits2::eTopOfPipe ) + result += " TopOfPipe |"; + if ( value & PipelineStageFlagBits2::eDrawIndirect ) + result += " DrawIndirect |"; + if ( value & PipelineStageFlagBits2::eVertexInput ) + result += " VertexInput |"; + if ( value & PipelineStageFlagBits2::eVertexShader ) + result += " VertexShader |"; + if ( value & PipelineStageFlagBits2::eTessellationControlShader ) + result += " TessellationControlShader |"; + if ( value & PipelineStageFlagBits2::eTessellationEvaluationShader ) + result += " TessellationEvaluationShader |"; + if ( value & PipelineStageFlagBits2::eGeometryShader ) + result += " GeometryShader |"; + if ( value & PipelineStageFlagBits2::eFragmentShader ) + result += " FragmentShader |"; + if ( value & PipelineStageFlagBits2::eEarlyFragmentTests ) + result += " EarlyFragmentTests |"; + if ( value & PipelineStageFlagBits2::eLateFragmentTests ) + result += " LateFragmentTests |"; + if ( value & PipelineStageFlagBits2::eColorAttachmentOutput ) + result += " ColorAttachmentOutput |"; + if ( value & PipelineStageFlagBits2::eComputeShader ) + result += " ComputeShader |"; + if ( value & PipelineStageFlagBits2::eAllTransfer ) + result += " AllTransfer |"; + if ( value & PipelineStageFlagBits2::eBottomOfPipe ) + result += " BottomOfPipe |"; + if ( value & PipelineStageFlagBits2::eHost ) + result += " Host |"; + if ( value & PipelineStageFlagBits2::eAllGraphics ) + result += " AllGraphics |"; + if ( value & PipelineStageFlagBits2::eAllCommands ) + result += " AllCommands |"; + if ( value & PipelineStageFlagBits2::eCopy ) + result += " Copy |"; + if ( value & PipelineStageFlagBits2::eResolve ) + result += " Resolve |"; + if ( value & PipelineStageFlagBits2::eBlit ) + result += " Blit |"; + if ( value & PipelineStageFlagBits2::eClear ) + result += " Clear |"; + if ( value & PipelineStageFlagBits2::eIndexInput ) + result += " IndexInput |"; + if ( value & PipelineStageFlagBits2::eVertexAttributeInput ) + result += " VertexAttributeInput |"; + if ( value & PipelineStageFlagBits2::ePreRasterizationShaders ) + result += " PreRasterizationShaders |"; + if ( value & PipelineStageFlagBits2::eVideoDecodeKHR ) + result += " VideoDecodeKHR |"; + if ( value & PipelineStageFlagBits2::eVideoEncodeKHR ) + result += " VideoEncodeKHR |"; + if ( value & PipelineStageFlagBits2::eTransformFeedbackEXT ) + result += " TransformFeedbackEXT |"; + if ( value & PipelineStageFlagBits2::eConditionalRenderingEXT ) + result += " ConditionalRenderingEXT |"; + if ( value & PipelineStageFlagBits2::eCommandPreprocessEXT ) + result += " CommandPreprocessEXT |"; + if ( value & PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR ) + result += " FragmentShadingRateAttachmentKHR |"; + if ( value & PipelineStageFlagBits2::eAccelerationStructureBuildKHR ) + result += " AccelerationStructureBuildKHR |"; + if ( value & PipelineStageFlagBits2::eRayTracingShaderKHR ) + result += " RayTracingShaderKHR |"; + if ( value & PipelineStageFlagBits2::eFragmentDensityProcessEXT ) + result += " FragmentDensityProcessEXT |"; + if ( value & PipelineStageFlagBits2::eTaskShaderEXT ) + result += " TaskShaderEXT |"; + if ( value & PipelineStageFlagBits2::eMeshShaderEXT ) + result += " MeshShaderEXT |"; + if ( value & PipelineStageFlagBits2::eSubpassShaderHUAWEI ) + result += " SubpassShaderHUAWEI |"; + if ( value & PipelineStageFlagBits2::eInvocationMaskHUAWEI ) + result += " InvocationMaskHUAWEI |"; + if ( value & PipelineStageFlagBits2::eAccelerationStructureCopyKHR ) + result += " AccelerationStructureCopyKHR |"; + if ( value & PipelineStageFlagBits2::eMicromapBuildEXT ) + result += " MicromapBuildEXT |"; + if ( value & PipelineStageFlagBits2::eClusterCullingShaderHUAWEI ) + result += " ClusterCullingShaderHUAWEI |"; + if ( value & PipelineStageFlagBits2::eOpticalFlowNV ) + result += " OpticalFlowNV |"; + if ( value & PipelineStageFlagBits2::eConvertCooperativeVectorMatrixNV ) + result += " ConvertCooperativeVectorMatrixNV |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "None"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlags2 value ) + { + std::string result = "{"; + if ( value & AccessFlagBits2::eIndirectCommandRead ) + result += " IndirectCommandRead |"; + if ( value & AccessFlagBits2::eIndexRead ) + result += " IndexRead |"; + if ( value & AccessFlagBits2::eVertexAttributeRead ) + result += " VertexAttributeRead |"; + if ( value & AccessFlagBits2::eUniformRead ) + result += " UniformRead |"; + if ( value & AccessFlagBits2::eInputAttachmentRead ) + result += " InputAttachmentRead |"; + if ( value & AccessFlagBits2::eShaderRead ) + result += " ShaderRead |"; + if ( value & AccessFlagBits2::eShaderWrite ) + result += " ShaderWrite |"; + if ( value & AccessFlagBits2::eColorAttachmentRead ) + result += " ColorAttachmentRead |"; + if ( value & AccessFlagBits2::eColorAttachmentWrite ) + result += " ColorAttachmentWrite |"; + if ( value & AccessFlagBits2::eDepthStencilAttachmentRead ) + result += " DepthStencilAttachmentRead |"; + if ( value & AccessFlagBits2::eDepthStencilAttachmentWrite ) + result += " DepthStencilAttachmentWrite |"; + if ( value & AccessFlagBits2::eTransferRead ) + result += " TransferRead |"; + if ( value & AccessFlagBits2::eTransferWrite ) + result += " TransferWrite |"; + if ( value & AccessFlagBits2::eHostRead ) + result += " HostRead |"; + if ( value & AccessFlagBits2::eHostWrite ) + result += " HostWrite |"; + if ( value & AccessFlagBits2::eMemoryRead ) + result += " MemoryRead |"; + if ( value & AccessFlagBits2::eMemoryWrite ) + result += " MemoryWrite |"; + if ( value & AccessFlagBits2::eShaderSampledRead ) + result += " ShaderSampledRead |"; + if ( value & AccessFlagBits2::eShaderStorageRead ) + result += " ShaderStorageRead |"; + if ( value & AccessFlagBits2::eShaderStorageWrite ) + result += " ShaderStorageWrite |"; + if ( value & AccessFlagBits2::eVideoDecodeReadKHR ) + result += " VideoDecodeReadKHR |"; + if ( value & AccessFlagBits2::eVideoDecodeWriteKHR ) + result += " VideoDecodeWriteKHR |"; + if ( value & AccessFlagBits2::eVideoEncodeReadKHR ) + result += " VideoEncodeReadKHR |"; + if ( value & AccessFlagBits2::eVideoEncodeWriteKHR ) + result += " VideoEncodeWriteKHR |"; + if ( value & AccessFlagBits2::eShaderTileAttachmentReadQCOM ) + result += " ShaderTileAttachmentReadQCOM |"; + if ( value & AccessFlagBits2::eShaderTileAttachmentWriteQCOM ) + result += " ShaderTileAttachmentWriteQCOM |"; + if ( value & AccessFlagBits2::eTransformFeedbackWriteEXT ) + result += " TransformFeedbackWriteEXT |"; + if ( value & AccessFlagBits2::eTransformFeedbackCounterReadEXT ) + result += " TransformFeedbackCounterReadEXT |"; + if ( value & AccessFlagBits2::eTransformFeedbackCounterWriteEXT ) + result += " TransformFeedbackCounterWriteEXT |"; + if ( value & AccessFlagBits2::eConditionalRenderingReadEXT ) + result += " ConditionalRenderingReadEXT |"; + if ( value & AccessFlagBits2::eCommandPreprocessReadEXT ) + result += " CommandPreprocessReadEXT |"; + if ( value & AccessFlagBits2::eCommandPreprocessWriteEXT ) + result += " CommandPreprocessWriteEXT |"; + if ( value & AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR ) + result += " FragmentShadingRateAttachmentReadKHR |"; + if ( value & AccessFlagBits2::eAccelerationStructureReadKHR ) + result += " AccelerationStructureReadKHR |"; + if ( value & AccessFlagBits2::eAccelerationStructureWriteKHR ) + result += " AccelerationStructureWriteKHR |"; + if ( value & AccessFlagBits2::eFragmentDensityMapReadEXT ) + result += " FragmentDensityMapReadEXT |"; + if ( value & AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) + result += " ColorAttachmentReadNoncoherentEXT |"; + if ( value & AccessFlagBits2::eDescriptorBufferReadEXT ) + result += " DescriptorBufferReadEXT |"; + if ( value & AccessFlagBits2::eInvocationMaskReadHUAWEI ) + result += " InvocationMaskReadHUAWEI |"; + if ( value & AccessFlagBits2::eShaderBindingTableReadKHR ) + result += " ShaderBindingTableReadKHR |"; + if ( value & AccessFlagBits2::eMicromapReadEXT ) + result += " MicromapReadEXT |"; + if ( value & AccessFlagBits2::eMicromapWriteEXT ) + result += " MicromapWriteEXT |"; + if ( value & AccessFlagBits2::eOpticalFlowReadNV ) + result += " OpticalFlowReadNV |"; + if ( value & AccessFlagBits2::eOpticalFlowWriteNV ) + result += " OpticalFlowWriteNV |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "None"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( SubmitFlags value ) + { + std::string result = "{"; + if ( value & SubmitFlagBits::eProtected ) + result += " Protected |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( RenderingFlags value ) + { + std::string result = "{"; + if ( value & RenderingFlagBits::eContentsSecondaryCommandBuffers ) + result += " ContentsSecondaryCommandBuffers |"; + if ( value & RenderingFlagBits::eSuspending ) + result += " Suspending |"; + if ( value & RenderingFlagBits::eResuming ) + result += " Resuming |"; + if ( value & RenderingFlagBits::eEnableLegacyDitheringEXT ) + result += " EnableLegacyDitheringEXT |"; + if ( value & RenderingFlagBits::eContentsInlineKHR ) + result += " ContentsInlineKHR |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags2 value ) + { + std::string result = "{"; + if ( value & FormatFeatureFlagBits2::eSampledImage ) + result += " SampledImage |"; + if ( value & FormatFeatureFlagBits2::eStorageImage ) + result += " StorageImage |"; + if ( value & FormatFeatureFlagBits2::eStorageImageAtomic ) + result += " StorageImageAtomic |"; + if ( value & FormatFeatureFlagBits2::eUniformTexelBuffer ) + result += " UniformTexelBuffer |"; + if ( value & FormatFeatureFlagBits2::eStorageTexelBuffer ) + result += " StorageTexelBuffer |"; + if ( value & FormatFeatureFlagBits2::eStorageTexelBufferAtomic ) + result += " StorageTexelBufferAtomic |"; + if ( value & FormatFeatureFlagBits2::eVertexBuffer ) + result += " VertexBuffer |"; + if ( value & FormatFeatureFlagBits2::eColorAttachment ) + result += " ColorAttachment |"; + if ( value & FormatFeatureFlagBits2::eColorAttachmentBlend ) + result += " ColorAttachmentBlend |"; + if ( value & FormatFeatureFlagBits2::eDepthStencilAttachment ) + result += " DepthStencilAttachment |"; + if ( value & FormatFeatureFlagBits2::eBlitSrc ) + result += " BlitSrc |"; + if ( value & FormatFeatureFlagBits2::eBlitDst ) + result += " BlitDst |"; + if ( value & FormatFeatureFlagBits2::eSampledImageFilterLinear ) + result += " SampledImageFilterLinear |"; + if ( value & FormatFeatureFlagBits2::eTransferSrc ) + result += " TransferSrc |"; + if ( value & FormatFeatureFlagBits2::eTransferDst ) + result += " TransferDst |"; + if ( value & FormatFeatureFlagBits2::eSampledImageFilterMinmax ) + result += " SampledImageFilterMinmax |"; + if ( value & FormatFeatureFlagBits2::eMidpointChromaSamples ) + result += " MidpointChromaSamples |"; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter ) + result += " SampledImageYcbcrConversionLinearFilter |"; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter ) + result += " SampledImageYcbcrConversionSeparateReconstructionFilter |"; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit ) + result += " SampledImageYcbcrConversionChromaReconstructionExplicit |"; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) + result += " SampledImageYcbcrConversionChromaReconstructionExplicitForceable |"; + if ( value & FormatFeatureFlagBits2::eDisjoint ) + result += " Disjoint |"; + if ( value & FormatFeatureFlagBits2::eCositedChromaSamples ) + result += " CositedChromaSamples |"; + if ( value & FormatFeatureFlagBits2::eStorageReadWithoutFormat ) + result += " StorageReadWithoutFormat |"; + if ( value & FormatFeatureFlagBits2::eStorageWriteWithoutFormat ) + result += " StorageWriteWithoutFormat |"; + if ( value & FormatFeatureFlagBits2::eSampledImageDepthComparison ) + result += " SampledImageDepthComparison |"; + if ( value & FormatFeatureFlagBits2::eSampledImageFilterCubic ) + result += " SampledImageFilterCubic |"; + if ( value & FormatFeatureFlagBits2::eHostImageTransfer ) + result += " HostImageTransfer |"; + if ( value & FormatFeatureFlagBits2::eVideoDecodeOutputKHR ) + result += " VideoDecodeOutputKHR |"; + if ( value & FormatFeatureFlagBits2::eVideoDecodeDpbKHR ) + result += " VideoDecodeDpbKHR |"; + if ( value & FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR ) + result += " AccelerationStructureVertexBufferKHR |"; + if ( value & FormatFeatureFlagBits2::eFragmentDensityMapEXT ) + result += " FragmentDensityMapEXT |"; + if ( value & FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR ) + result += " FragmentShadingRateAttachmentKHR |"; + if ( value & FormatFeatureFlagBits2::eVideoEncodeInputKHR ) + result += " VideoEncodeInputKHR |"; + if ( value & FormatFeatureFlagBits2::eVideoEncodeDpbKHR ) + result += " VideoEncodeDpbKHR |"; + if ( value & FormatFeatureFlagBits2::eAccelerationStructureRadiusBufferNV ) + result += " AccelerationStructureRadiusBufferNV |"; + if ( value & FormatFeatureFlagBits2::eLinearColorAttachmentNV ) + result += " LinearColorAttachmentNV |"; + if ( value & FormatFeatureFlagBits2::eWeightImageQCOM ) + result += " WeightImageQCOM |"; + if ( value & FormatFeatureFlagBits2::eWeightSampledImageQCOM ) + result += " WeightSampledImageQCOM |"; + if ( value & FormatFeatureFlagBits2::eBlockMatchingQCOM ) + result += " BlockMatchingQCOM |"; + if ( value & FormatFeatureFlagBits2::eBoxFilterSampledQCOM ) + result += " BoxFilterSampledQCOM |"; + if ( value & FormatFeatureFlagBits2::eOpticalFlowImageNV ) + result += " OpticalFlowImageNV |"; + if ( value & FormatFeatureFlagBits2::eOpticalFlowVectorNV ) + result += " OpticalFlowVectorNV |"; + if ( value & FormatFeatureFlagBits2::eOpticalFlowCostNV ) + result += " OpticalFlowCostNV |"; + if ( value & FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR ) + result += " VideoEncodeQuantizationDeltaMapKHR |"; + if ( value & FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR ) + result += " VideoEncodeEmphasisMapKHR |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_VERSION_1_4 === + + VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlags value ) + { + std::string result = "{"; + if ( value & MemoryUnmapFlagBits::eReserveEXT ) + result += " ReserveEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags2 value ) + { + std::string result = "{"; + if ( value & PipelineCreateFlagBits2::eDisableOptimization ) + result += " DisableOptimization |"; + if ( value & PipelineCreateFlagBits2::eAllowDerivatives ) + result += " AllowDerivatives |"; + if ( value & PipelineCreateFlagBits2::eDerivative ) + result += " Derivative |"; + if ( value & PipelineCreateFlagBits2::eViewIndexFromDeviceIndex ) + result += " ViewIndexFromDeviceIndex |"; + if ( value & PipelineCreateFlagBits2::eDispatchBase ) + result += " DispatchBase |"; + if ( value & PipelineCreateFlagBits2::eFailOnPipelineCompileRequired ) + result += " FailOnPipelineCompileRequired |"; + if ( value & PipelineCreateFlagBits2::eEarlyReturnOnFailure ) + result += " EarlyReturnOnFailure |"; + if ( value & PipelineCreateFlagBits2::eNoProtectedAccess ) + result += " NoProtectedAccess |"; + if ( value & PipelineCreateFlagBits2::eProtectedAccessOnly ) + result += " ProtectedAccessOnly |"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineCreateFlagBits2::eExecutionGraphAMDX ) + result += " ExecutionGraphAMDX |"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & PipelineCreateFlagBits2::eRayTracingAllowSpheresAndLinearSweptSpheresNV ) + result += " RayTracingAllowSpheresAndLinearSweptSpheresNV |"; + if ( value & PipelineCreateFlagBits2::eEnableLegacyDitheringEXT ) + result += " EnableLegacyDitheringEXT |"; + if ( value & PipelineCreateFlagBits2::eDeferCompileNV ) + result += " DeferCompileNV |"; + if ( value & PipelineCreateFlagBits2::eCaptureStatisticsKHR ) + result += " CaptureStatisticsKHR |"; + if ( value & PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR ) + result += " CaptureInternalRepresentationsKHR |"; + if ( value & PipelineCreateFlagBits2::eLinkTimeOptimizationEXT ) + result += " LinkTimeOptimizationEXT |"; + if ( value & PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT ) + result += " RetainLinkTimeOptimizationInfoEXT |"; + if ( value & PipelineCreateFlagBits2::eLibraryKHR ) + result += " LibraryKHR |"; + if ( value & PipelineCreateFlagBits2::eRayTracingSkipTrianglesKHR ) + result += " RayTracingSkipTrianglesKHR |"; + if ( value & PipelineCreateFlagBits2::eRayTracingSkipAabbsKHR ) + result += " RayTracingSkipAabbsKHR |"; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullAnyHitShadersKHR ) + result += " RayTracingNoNullAnyHitShadersKHR |"; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullClosestHitShadersKHR ) + result += " RayTracingNoNullClosestHitShadersKHR |"; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullMissShadersKHR ) + result += " RayTracingNoNullMissShadersKHR |"; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullIntersectionShadersKHR ) + result += " RayTracingNoNullIntersectionShadersKHR |"; + if ( value & PipelineCreateFlagBits2::eRayTracingShaderGroupHandleCaptureReplayKHR ) + result += " RayTracingShaderGroupHandleCaptureReplayKHR |"; + if ( value & PipelineCreateFlagBits2::eIndirectBindableNV ) + result += " IndirectBindableNV |"; + if ( value & PipelineCreateFlagBits2::eRayTracingAllowMotionNV ) + result += " RayTracingAllowMotionNV |"; + if ( value & PipelineCreateFlagBits2::eRenderingFragmentShadingRateAttachmentKHR ) + result += " RenderingFragmentShadingRateAttachmentKHR |"; + if ( value & PipelineCreateFlagBits2::eRenderingFragmentDensityMapAttachmentEXT ) + result += " RenderingFragmentDensityMapAttachmentEXT |"; + if ( value & PipelineCreateFlagBits2::eRayTracingOpacityMicromapEXT ) + result += " RayTracingOpacityMicromapEXT |"; + if ( value & PipelineCreateFlagBits2::eColorAttachmentFeedbackLoopEXT ) + result += " ColorAttachmentFeedbackLoopEXT |"; + if ( value & PipelineCreateFlagBits2::eDepthStencilAttachmentFeedbackLoopEXT ) + result += " DepthStencilAttachmentFeedbackLoopEXT |"; + if ( value & PipelineCreateFlagBits2::eRayTracingDisplacementMicromapNV ) + result += " RayTracingDisplacementMicromapNV |"; + if ( value & PipelineCreateFlagBits2::eDescriptorBufferEXT ) + result += " DescriptorBufferEXT |"; + if ( value & PipelineCreateFlagBits2::eDisallowOpacityMicromapARM ) + result += " DisallowOpacityMicromapARM |"; + if ( value & PipelineCreateFlagBits2::eCaptureDataKHR ) + result += " CaptureDataKHR |"; + if ( value & PipelineCreateFlagBits2::eIndirectBindableEXT ) + result += " IndirectBindableEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags2 value ) + { + std::string result = "{"; + if ( value & BufferUsageFlagBits2::eTransferSrc ) + result += " TransferSrc |"; + if ( value & BufferUsageFlagBits2::eTransferDst ) + result += " TransferDst |"; + if ( value & BufferUsageFlagBits2::eUniformTexelBuffer ) + result += " UniformTexelBuffer |"; + if ( value & BufferUsageFlagBits2::eStorageTexelBuffer ) + result += " StorageTexelBuffer |"; + if ( value & BufferUsageFlagBits2::eUniformBuffer ) + result += " UniformBuffer |"; + if ( value & BufferUsageFlagBits2::eStorageBuffer ) + result += " StorageBuffer |"; + if ( value & BufferUsageFlagBits2::eIndexBuffer ) + result += " IndexBuffer |"; + if ( value & BufferUsageFlagBits2::eVertexBuffer ) + result += " VertexBuffer |"; + if ( value & BufferUsageFlagBits2::eIndirectBuffer ) + result += " IndirectBuffer |"; + if ( value & BufferUsageFlagBits2::eShaderDeviceAddress ) + result += " ShaderDeviceAddress |"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits2::eExecutionGraphScratchAMDX ) + result += " ExecutionGraphScratchAMDX |"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits2::eConditionalRenderingEXT ) + result += " ConditionalRenderingEXT |"; + if ( value & BufferUsageFlagBits2::eShaderBindingTableKHR ) + result += " ShaderBindingTableKHR |"; + if ( value & BufferUsageFlagBits2::eTransformFeedbackBufferEXT ) + result += " TransformFeedbackBufferEXT |"; + if ( value & BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT ) + result += " TransformFeedbackCounterBufferEXT |"; + if ( value & BufferUsageFlagBits2::eVideoDecodeSrcKHR ) + result += " VideoDecodeSrcKHR |"; + if ( value & BufferUsageFlagBits2::eVideoDecodeDstKHR ) + result += " VideoDecodeDstKHR |"; + if ( value & BufferUsageFlagBits2::eVideoEncodeDstKHR ) + result += " VideoEncodeDstKHR |"; + if ( value & BufferUsageFlagBits2::eVideoEncodeSrcKHR ) + result += " VideoEncodeSrcKHR |"; + if ( value & BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR ) + result += " AccelerationStructureBuildInputReadOnlyKHR |"; + if ( value & BufferUsageFlagBits2::eAccelerationStructureStorageKHR ) + result += " AccelerationStructureStorageKHR |"; + if ( value & BufferUsageFlagBits2::eSamplerDescriptorBufferEXT ) + result += " SamplerDescriptorBufferEXT |"; + if ( value & BufferUsageFlagBits2::eResourceDescriptorBufferEXT ) + result += " ResourceDescriptorBufferEXT |"; + if ( value & BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT ) + result += " PushDescriptorsDescriptorBufferEXT |"; + if ( value & BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT ) + result += " MicromapBuildInputReadOnlyEXT |"; + if ( value & BufferUsageFlagBits2::eMicromapStorageEXT ) + result += " MicromapStorageEXT |"; + if ( value & BufferUsageFlagBits2::eTileMemoryQCOM ) + result += " TileMemoryQCOM |"; + if ( value & BufferUsageFlagBits2::ePreprocessBufferEXT ) + result += " PreprocessBufferEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlags value ) + { + std::string result = "{"; + if ( value & HostImageCopyFlagBits::eMemcpy ) + result += " Memcpy |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_KHR_surface === + + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) + { + std::string result = "{"; + if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) + result += " Opaque |"; + if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) + result += " PreMultiplied |"; + if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) + result += " PostMultiplied |"; + if ( value & CompositeAlphaFlagBitsKHR::eInherit ) + result += " Inherit |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_KHR_swapchain === + + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) + { + std::string result = "{"; + if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) + result += " SplitInstanceBindRegions |"; + if ( value & SwapchainCreateFlagBitsKHR::eProtected ) + result += " Protected |"; + if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) + result += " MutableFormat |"; + if ( value & SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT ) + result += " DeferredMemoryAllocationEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) + { + std::string result = "{"; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) + result += " Local |"; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) + result += " Remote |"; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) + result += " Sum |"; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) + result += " LocalMultiDevice |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_KHR_display === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DisplayModeCreateFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) + { + std::string result = "{"; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) + result += " Opaque |"; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) + result += " Global |"; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) + result += " PerPixel |"; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) + result += " PerPixelPremultiplied |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DisplaySurfaceCreateFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) + { + std::string result = "{"; + if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) + result += " Identity |"; + if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) + result += " Rotate90 |"; + if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) + result += " Rotate180 |"; + if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) + result += " Rotate270 |"; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) + result += " HorizontalMirror |"; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) + result += " HorizontalMirrorRotate90 |"; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) + result += " HorizontalMirrorRotate180 |"; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) + result += " HorizontalMirrorRotate270 |"; + if ( value & SurfaceTransformFlagBitsKHR::eInherit ) + result += " Inherit |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( XlibSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( XcbSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( WaylandSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AndroidSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( Win32SurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) + { + std::string result = "{"; + if ( value & DebugReportFlagBitsEXT::eInformation ) + result += " Information |"; + if ( value & DebugReportFlagBitsEXT::eWarning ) + result += " Warning |"; + if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) + result += " PerformanceWarning |"; + if ( value & DebugReportFlagBitsEXT::eError ) + result += " Error |"; + if ( value & DebugReportFlagBitsEXT::eDebug ) + result += " Debug |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_KHR_video_queue === + + VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264 ) + result += " EncodeH264 |"; + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH265 ) + result += " EncodeH265 |"; + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264 ) + result += " DecodeH264 |"; + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265 ) + result += " DecodeH265 |"; + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeAv1 ) + result += " DecodeAv1 |"; + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeAv1 ) + result += " EncodeAv1 |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "None"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) + result += " Monochrome |"; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e420 ) + result += " 420 |"; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e422 ) + result += " 422 |"; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e444 ) + result += " 444 |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "Invalid"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoComponentBitDepthFlagBitsKHR::e8 ) + result += " 8 |"; + if ( value & VideoComponentBitDepthFlagBitsKHR::e10 ) + result += " 10 |"; + if ( value & VideoComponentBitDepthFlagBitsKHR::e12 ) + result += " 12 |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "Invalid"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoCapabilityFlagBitsKHR::eProtectedContent ) + result += " ProtectedContent |"; + if ( value & VideoCapabilityFlagBitsKHR::eSeparateReferenceImages ) + result += " SeparateReferenceImages |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent ) + result += " ProtectedContent |"; + if ( value & VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations ) + result += " AllowEncodeParameterOptimizations |"; + if ( value & VideoSessionCreateFlagBitsKHR::eInlineQueries ) + result += " InlineQueries |"; + if ( value & VideoSessionCreateFlagBitsKHR::eAllowEncodeQuantizationDeltaMap ) + result += " AllowEncodeQuantizationDeltaMap |"; + if ( value & VideoSessionCreateFlagBitsKHR::eAllowEncodeEmphasisMap ) + result += " AllowEncodeEmphasisMap |"; + if ( value & VideoSessionCreateFlagBitsKHR::eInlineSessionParameters ) + result += " InlineSessionParameters |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoSessionParametersCreateFlagBitsKHR::eQuantizationMapCompatible ) + result += " QuantizationMapCompatible |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoBeginCodingFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEndCodingFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoCodingControlFlagBitsKHR::eReset ) + result += " Reset |"; + if ( value & VideoCodingControlFlagBitsKHR::eEncodeRateControl ) + result += " EncodeRateControl |"; + if ( value & VideoCodingControlFlagBitsKHR::eEncodeQualityLevel ) + result += " EncodeQualityLevel |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_KHR_video_decode_queue === + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeCapabilityFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide ) + result += " DpbAndOutputCoincide |"; + if ( value & VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct ) + result += " DpbAndOutputDistinct |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeUsageFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoDecodeUsageFlagBitsKHR::eTranscoding ) + result += " Transcoding |"; + if ( value & VideoDecodeUsageFlagBitsKHR::eOffline ) + result += " Offline |"; + if ( value & VideoDecodeUsageFlagBitsKHR::eStreaming ) + result += " Streaming |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "Default"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoDecodeFlagsKHR ) + { + return "{}"; + } + + //=== VK_EXT_transform_feedback === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_KHR_video_encode_h264 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eHrdCompliance ) + result += " HrdCompliance |"; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::ePredictionWeightTableGenerated ) + result += " PredictionWeightTableGenerated |"; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eRowUnalignedSlice ) + result += " RowUnalignedSlice |"; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eDifferentSliceType ) + result += " DifferentSliceType |"; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL0List ) + result += " BFrameInL0List |"; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL1List ) + result += " BFrameInL1List |"; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp ) + result += " PerPictureTypeMinMaxQp |"; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp ) + result += " PerSliceConstantQp |"; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu ) + result += " GeneratePrefixNalu |"; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eMbQpDiffWraparound ) + result += " MbQpDiffWraparound |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264StdFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eSeparateColorPlaneFlagSet ) + result += " SeparateColorPlaneFlagSet |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eQpprimeYZeroTransformBypassFlagSet ) + result += " QpprimeYZeroTransformBypassFlagSet |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eScalingMatrixPresentFlagSet ) + result += " ScalingMatrixPresentFlagSet |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eChromaQpIndexOffset ) + result += " ChromaQpIndexOffset |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eSecondChromaQpIndexOffset ) + result += " SecondChromaQpIndexOffset |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::ePicInitQpMinus26 ) + result += " PicInitQpMinus26 |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eWeightedPredFlagSet ) + result += " WeightedPredFlagSet |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcExplicit ) + result += " WeightedBipredIdcExplicit |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcImplicit ) + result += " WeightedBipredIdcImplicit |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eTransform8X8ModeFlagSet ) + result += " Transform8X8ModeFlagSet |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDirectSpatialMvPredFlagUnset ) + result += " DirectSpatialMvPredFlagUnset |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagUnset ) + result += " EntropyCodingModeFlagUnset |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagSet ) + result += " EntropyCodingModeFlagSet |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDirect8X8InferenceFlagUnset ) + result += " Direct8X8InferenceFlagUnset |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eConstrainedIntraPredFlagSet ) + result += " ConstrainedIntraPredFlagSet |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterDisabled ) + result += " DeblockingFilterDisabled |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterEnabled ) + result += " DeblockingFilterEnabled |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterPartial ) + result += " DeblockingFilterPartial |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eSliceQpDelta ) + result += " SliceQpDelta |"; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDifferentSliceQpDelta ) + result += " DifferentSliceQpDelta |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eAttemptHrdCompliance ) + result += " AttemptHrdCompliance |"; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eRegularGop ) + result += " RegularGop |"; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternFlat ) + result += " ReferencePatternFlat |"; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternDyadic ) + result += " ReferencePatternDyadic |"; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eTemporalLayerPatternDyadic ) + result += " TemporalLayerPatternDyadic |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_KHR_video_encode_h265 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eHrdCompliance ) + result += " HrdCompliance |"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::ePredictionWeightTableGenerated ) + result += " PredictionWeightTableGenerated |"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eRowUnalignedSliceSegment ) + result += " RowUnalignedSliceSegment |"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eDifferentSliceSegmentType ) + result += " DifferentSliceSegmentType |"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL0List ) + result += " BFrameInL0List |"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL1List ) + result += " BFrameInL1List |"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp ) + result += " PerPictureTypeMinMaxQp |"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp ) + result += " PerSliceSegmentConstantQp |"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment ) + result += " MultipleTilesPerSliceSegment |"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile ) + result += " MultipleSliceSegmentsPerTile |"; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eCuQpDiffWraparound ) + result += " CuQpDiffWraparound |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265StdFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSeparateColorPlaneFlagSet ) + result += " SeparateColorPlaneFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSampleAdaptiveOffsetEnabledFlagSet ) + result += " SampleAdaptiveOffsetEnabledFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eScalingListDataPresentFlagSet ) + result += " ScalingListDataPresentFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::ePcmEnabledFlagSet ) + result += " PcmEnabledFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSpsTemporalMvpEnabledFlagSet ) + result += " SpsTemporalMvpEnabledFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eInitQpMinus26 ) + result += " InitQpMinus26 |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eWeightedPredFlagSet ) + result += " WeightedPredFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eWeightedBipredFlagSet ) + result += " WeightedBipredFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eLog2ParallelMergeLevelMinus2 ) + result += " Log2ParallelMergeLevelMinus2 |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSignDataHidingEnabledFlagSet ) + result += " SignDataHidingEnabledFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagSet ) + result += " TransformSkipEnabledFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagUnset ) + result += " TransformSkipEnabledFlagUnset |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::ePpsSliceChromaQpOffsetsPresentFlagSet ) + result += " PpsSliceChromaQpOffsetsPresentFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eTransquantBypassEnabledFlagSet ) + result += " TransquantBypassEnabledFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eConstrainedIntraPredFlagSet ) + result += " ConstrainedIntraPredFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eEntropyCodingSyncEnabledFlagSet ) + result += " EntropyCodingSyncEnabledFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDeblockingFilterOverrideEnabledFlagSet ) + result += " DeblockingFilterOverrideEnabledFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentsEnabledFlagSet ) + result += " DependentSliceSegmentsEnabledFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentFlagSet ) + result += " DependentSliceSegmentFlagSet |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSliceQpDelta ) + result += " SliceQpDelta |"; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDifferentSliceQpDelta ) + result += " DifferentSliceQpDelta |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeH265CtbSizeFlagBitsKHR::e16 ) + result += " 16 |"; + if ( value & VideoEncodeH265CtbSizeFlagBitsKHR::e32 ) + result += " 32 |"; + if ( value & VideoEncodeH265CtbSizeFlagBitsKHR::e64 ) + result += " 64 |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e4 ) + result += " 4 |"; + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e8 ) + result += " 8 |"; + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e16 ) + result += " 16 |"; + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e32 ) + result += " 32 |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eAttemptHrdCompliance ) + result += " AttemptHrdCompliance |"; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eRegularGop ) + result += " RegularGop |"; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternFlat ) + result += " ReferencePatternFlat |"; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternDyadic ) + result += " ReferencePatternDyadic |"; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eTemporalSubLayerPatternDyadic ) + result += " TemporalSubLayerPatternDyadic |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_KHR_video_decode_h264 === + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines ) + result += " InterlacedInterleavedLines |"; + if ( value & VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes ) + result += " InterlacedSeparatePlanes |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "Progressive"; + return result; + } + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) + { + std::string result = "{"; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) + result += " OpaqueWin32 |"; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) + result += " OpaqueWin32Kmt |"; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) + result += " D3D11Image |"; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) + result += " D3D11ImageKmt |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) + { + std::string result = "{"; + if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) + result += " DedicatedOnly |"; + if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) + result += " Exportable |"; + if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) + result += " Importable |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ViSurfaceCreateFlagsNN ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_conditional_rendering === + + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) + { + std::string result = "{"; + if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) + result += " Inverted |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_EXT_display_surface_counter === + + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) + { + std::string result = "{"; + if ( value & SurfaceCounterFlagBitsEXT::eVblank ) + result += " Vblank |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_NV_viewport_swizzle === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_discard_rectangles === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_conservative_rasterization === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_depth_clip_enable === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_KHR_performance_query === + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value ) + { + std::string result = "{"; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) + result += " PerformanceImpacting |"; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) + result += " ConcurrentlyImpacted |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AcquireProfilingLockFlagsKHR ) + { + return "{}"; + } + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( IOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MacOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) + { + std::string result = "{"; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) + result += " Verbose |"; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) + result += " Info |"; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) + result += " Warning |"; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) + result += " Error |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) + { + std::string result = "{"; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) + result += " General |"; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) + result += " Validation |"; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) + result += " Performance |"; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding ) + result += " DeviceAddressBinding |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) + { + return "{}"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_NV_fragment_coverage_to_color === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_KHR_acceleration_structure === + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value ) + { + std::string result = "{"; + if ( value & GeometryFlagBitsKHR::eOpaque ) + result += " Opaque |"; + if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) + result += " NoDuplicateAnyHitInvocation |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value ) + { + std::string result = "{"; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) + result += " TriangleFacingCullDisable |"; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFlipFacing ) + result += " TriangleFlipFacing |"; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) + result += " ForceOpaque |"; + if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) + result += " ForceNoOpaque |"; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT ) + result += " ForceOpacityMicromap2StateEXT |"; + if ( value & GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT ) + result += " DisableOpacityMicromapsEXT |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value ) + { + std::string result = "{"; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) + result += " AllowUpdate |"; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) + result += " AllowCompaction |"; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) + result += " PreferFastTrace |"; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) + result += " PreferFastBuild |"; + if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) + result += " LowMemory |"; + if ( value & BuildAccelerationStructureFlagBitsKHR::eMotionNV ) + result += " MotionNV |"; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT ) + result += " AllowOpacityMicromapUpdateEXT |"; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT ) + result += " AllowDisableOpacityMicromapsEXT |"; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT ) + result += " AllowOpacityMicromapDataUpdateEXT |"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDisplacementMicromapUpdateNV ) + result += " AllowDisplacementMicromapUpdateNV |"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDataAccess ) + result += " AllowDataAccess |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagsKHR value ) + { + std::string result = "{"; + if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) + result += " DeviceAddressCaptureReplay |"; + if ( value & AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT ) + result += " DescriptorBufferCaptureReplayEXT |"; + if ( value & AccelerationStructureCreateFlagBitsKHR::eMotionNV ) + result += " MotionNV |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_NV_framebuffer_mixed_samples === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_validation_cache === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ValidationCacheCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_AMD_pipeline_compiler_control === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCompilerControlFlagsAMD ) + { + return "{}"; + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MetalSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_AMD_shader_core_properties2 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShaderCorePropertiesFlagsAMD ) + { + return "{}"; + } + + //=== VK_NV_coverage_reduction_mode === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_headless_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( HeadlessSurfaceCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_surface_maintenance1 === + + VULKAN_HPP_INLINE std::string to_string( PresentScalingFlagsEXT value ) + { + std::string result = "{"; + if ( value & PresentScalingFlagBitsEXT::eOneToOne ) + result += " OneToOne |"; + if ( value & PresentScalingFlagBitsEXT::eAspectRatioStretch ) + result += " AspectRatioStretch |"; + if ( value & PresentScalingFlagBitsEXT::eStretch ) + result += " Stretch |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( PresentGravityFlagsEXT value ) + { + std::string result = "{"; + if ( value & PresentGravityFlagBitsEXT::eMin ) + result += " Min |"; + if ( value & PresentGravityFlagBitsEXT::eMax ) + result += " Max |"; + if ( value & PresentGravityFlagBitsEXT::eCentered ) + result += " Centered |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_NV_device_generated_commands === + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) + { + std::string result = "{"; + if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) + result += " FlagFrontface |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value ) + { + std::string result = "{"; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) + result += " ExplicitPreprocess |"; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) + result += " IndexedSequences |"; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) + result += " UnorderedSequences |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_EXT_device_memory_report === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceMemoryReportFlagsEXT ) + { + return "{}"; + } + + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeFlagBitsKHR::eWithQuantizationDeltaMap ) + result += " WithQuantizationDeltaMap |"; + if ( value & VideoEncodeFlagBitsKHR::eWithEmphasisMap ) + result += " WithEmphasisMap |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes ) + result += " PrecedingExternallyEncodedBytes |"; + if ( value & VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection ) + result += " InsufficientBitstreamBufferRangeDetection |"; + if ( value & VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap ) + result += " QuantizationDeltaMap |"; + if ( value & VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap ) + result += " EmphasisMap |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFeedbackFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset ) + result += " BitstreamBufferOffset |"; + if ( value & VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten ) + result += " BitstreamBytesWritten |"; + if ( value & VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides ) + result += " BitstreamHasOverrides |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeUsageFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeUsageFlagBitsKHR::eTranscoding ) + result += " Transcoding |"; + if ( value & VideoEncodeUsageFlagBitsKHR::eStreaming ) + result += " Streaming |"; + if ( value & VideoEncodeUsageFlagBitsKHR::eRecording ) + result += " Recording |"; + if ( value & VideoEncodeUsageFlagBitsKHR::eConferencing ) + result += " Conferencing |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "Default"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeContentFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeContentFlagBitsKHR::eCamera ) + result += " Camera |"; + if ( value & VideoEncodeContentFlagBitsKHR::eDesktop ) + result += " Desktop |"; + if ( value & VideoEncodeContentFlagBitsKHR::eRendered ) + result += " Rendered |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "Default"; + return result; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeRateControlFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeRateControlModeFlagBitsKHR::eDisabled ) + result += " Disabled |"; + if ( value & VideoEncodeRateControlModeFlagBitsKHR::eCbr ) + result += " Cbr |"; + if ( value & VideoEncodeRateControlModeFlagBitsKHR::eVbr ) + result += " Vbr |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "Default"; + return result; + } + + //=== VK_NV_device_diagnostics_config === + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value ) + { + std::string result = "{"; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) + result += " EnableShaderDebugInfo |"; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) + result += " EnableResourceTracking |"; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) + result += " EnableAutomaticCheckpoints |"; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting ) + result += " EnableShaderErrorReporting |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_QCOM_tile_shading === + + VULKAN_HPP_INLINE std::string to_string( TileShadingRenderPassFlagsQCOM value ) + { + std::string result = "{"; + if ( value & TileShadingRenderPassFlagBitsQCOM::eEnable ) + result += " Enable |"; + if ( value & TileShadingRenderPassFlagBitsQCOM::ePerTileExecution ) + result += " PerTileExecution |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + VULKAN_HPP_INLINE std::string to_string( ExportMetalObjectTypeFlagsEXT value ) + { + std::string result = "{"; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalDevice ) + result += " MetalDevice |"; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue ) + result += " MetalCommandQueue |"; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer ) + result += " MetalBuffer |"; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalTexture ) + result += " MetalTexture |"; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface ) + result += " MetalIosurface |"; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent ) + result += " MetalSharedEvent |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_graphics_pipeline_library === + + VULKAN_HPP_INLINE std::string to_string( GraphicsPipelineLibraryFlagsEXT value ) + { + std::string result = "{"; + if ( value & GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface ) + result += " VertexInputInterface |"; + if ( value & GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders ) + result += " PreRasterizationShaders |"; + if ( value & GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader ) + result += " FragmentShader |"; + if ( value & GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface ) + result += " FragmentOutputInterface |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_NV_ray_tracing_motion_blur === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccelerationStructureMotionInfoFlagsNV ) + { + return "{}"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccelerationStructureMotionInstanceFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_image_compression_control === + + VULKAN_HPP_INLINE std::string to_string( ImageCompressionFlagsEXT value ) + { + std::string result = "{"; + if ( value & ImageCompressionFlagBitsEXT::eFixedRateDefault ) + result += " FixedRateDefault |"; + if ( value & ImageCompressionFlagBitsEXT::eFixedRateExplicit ) + result += " FixedRateExplicit |"; + if ( value & ImageCompressionFlagBitsEXT::eDisabled ) + result += " Disabled |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "Default"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ImageCompressionFixedRateFlagsEXT value ) + { + std::string result = "{"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e1Bpc ) + result += " 1Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e2Bpc ) + result += " 2Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e3Bpc ) + result += " 3Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e4Bpc ) + result += " 4Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e5Bpc ) + result += " 5Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e6Bpc ) + result += " 6Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e7Bpc ) + result += " 7Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e8Bpc ) + result += " 8Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e9Bpc ) + result += " 9Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e10Bpc ) + result += " 10Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e11Bpc ) + result += " 11Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e12Bpc ) + result += " 12Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e13Bpc ) + result += " 13Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e14Bpc ) + result += " 14Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e15Bpc ) + result += " 15Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e16Bpc ) + result += " 16Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e17Bpc ) + result += " 17Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e18Bpc ) + result += " 18Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e19Bpc ) + result += " 19Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e20Bpc ) + result += " 20Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e21Bpc ) + result += " 21Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e22Bpc ) + result += " 22Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e23Bpc ) + result += " 23Bpc |"; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e24Bpc ) + result += " 24Bpc |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "None"; + return result; + } + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DirectFBSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_device_address_binding_report === + + VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagsEXT value ) + { + std::string result = "{"; + if ( value & DeviceAddressBindingFlagBitsEXT::eInternalObject ) + result += " InternalObject |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageFormatConstraintsFlagsFUCHSIA ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagsFUCHSIA value ) + { + std::string result = "{"; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely ) + result += " CpuReadRarely |"; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften ) + result += " CpuReadOften |"; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely ) + result += " CpuWriteRarely |"; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften ) + result += " CpuWriteOften |"; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional ) + result += " ProtectedOptional |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_frame_boundary === + + VULKAN_HPP_INLINE std::string to_string( FrameBoundaryFlagsEXT value ) + { + std::string result = "{"; + if ( value & FrameBoundaryFlagBitsEXT::eFrameEnd ) + result += " FrameEnd |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ScreenSurfaceCreateFlagsQNX ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_opacity_micromap === + + VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagsEXT value ) + { + std::string result = "{"; + if ( value & BuildMicromapFlagBitsEXT::ePreferFastTrace ) + result += " PreferFastTrace |"; + if ( value & BuildMicromapFlagBitsEXT::ePreferFastBuild ) + result += " PreferFastBuild |"; + if ( value & BuildMicromapFlagBitsEXT::eAllowCompaction ) + result += " AllowCompaction |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagsEXT value ) + { + std::string result = "{"; + if ( value & MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay ) + result += " DeviceAddressCaptureReplay |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_ARM_scheduling_controls === + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceSchedulingControlsFlagsARM value ) + { + std::string result = "{"; + if ( value & PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount ) + result += " ShaderCoreCount |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagsNV value ) + { + std::string result = "{"; + if ( value & MemoryDecompressionMethodFlagBitsNV::eGdeflate10 ) + result += " Gdeflate10 |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_LUNARG_direct_driver_loading === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DirectDriverLoadingFlagsLUNARG ) + { + return "{}"; + } + + //=== VK_NV_optical_flow === + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagsNV value ) + { + std::string result = "{"; + if ( value & OpticalFlowUsageFlagBitsNV::eInput ) + result += " Input |"; + if ( value & OpticalFlowUsageFlagBitsNV::eOutput ) + result += " Output |"; + if ( value & OpticalFlowUsageFlagBitsNV::eHint ) + result += " Hint |"; + if ( value & OpticalFlowUsageFlagBitsNV::eCost ) + result += " Cost |"; + if ( value & OpticalFlowUsageFlagBitsNV::eGlobalFlow ) + result += " GlobalFlow |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "Unknown"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagsNV value ) + { + std::string result = "{"; + if ( value & OpticalFlowGridSizeFlagBitsNV::e1X1 ) + result += " 1X1 |"; + if ( value & OpticalFlowGridSizeFlagBitsNV::e2X2 ) + result += " 2X2 |"; + if ( value & OpticalFlowGridSizeFlagBitsNV::e4X4 ) + result += " 4X4 |"; + if ( value & OpticalFlowGridSizeFlagBitsNV::e8X8 ) + result += " 8X8 |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "Unknown"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagsNV value ) + { + std::string result = "{"; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableHint ) + result += " EnableHint |"; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableCost ) + result += " EnableCost |"; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow ) + result += " EnableGlobalFlow |"; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eAllowRegions ) + result += " AllowRegions |"; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eBothDirections ) + result += " BothDirections |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagsNV value ) + { + std::string result = "{"; + if ( value & OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints ) + result += " DisableTemporalHints |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_EXT_shader_object === + + VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagsEXT value ) + { + std::string result = "{"; + if ( value & ShaderCreateFlagBitsEXT::eLinkStage ) + result += " LinkStage |"; + if ( value & ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize ) + result += " AllowVaryingSubgroupSize |"; + if ( value & ShaderCreateFlagBitsEXT::eRequireFullSubgroups ) + result += " RequireFullSubgroups |"; + if ( value & ShaderCreateFlagBitsEXT::eNoTaskShader ) + result += " NoTaskShader |"; + if ( value & ShaderCreateFlagBitsEXT::eDispatchBase ) + result += " DispatchBase |"; + if ( value & ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment ) + result += " FragmentShadingRateAttachment |"; + if ( value & ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment ) + result += " FragmentDensityMapAttachment |"; + if ( value & ShaderCreateFlagBitsEXT::eIndirectBindable ) + result += " IndirectBindable |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_KHR_video_encode_av1 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1CapabilityFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::ePerRateControlGroupMinMaxQIndex ) + result += " PerRateControlGroupMinMaxQIndex |"; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::eGenerateObuExtensionHeader ) + result += " GenerateObuExtensionHeader |"; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::ePrimaryReferenceCdfOnly ) + result += " PrimaryReferenceCdfOnly |"; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::eFrameSizeOverride ) + result += " FrameSizeOverride |"; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::eMotionVectorScaling ) + result += " MotionVectorScaling |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1StdFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeAV1StdFlagBitsKHR::eUniformTileSpacingFlagSet ) + result += " UniformTileSpacingFlagSet |"; + if ( value & VideoEncodeAV1StdFlagBitsKHR::eSkipModePresentUnset ) + result += " SkipModePresentUnset |"; + if ( value & VideoEncodeAV1StdFlagBitsKHR::ePrimaryRefFrame ) + result += " PrimaryRefFrame |"; + if ( value & VideoEncodeAV1StdFlagBitsKHR::eDeltaQ ) + result += " DeltaQ |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1SuperblockSizeFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeAV1SuperblockSizeFlagBitsKHR::e64 ) + result += " 64 |"; + if ( value & VideoEncodeAV1SuperblockSizeFlagBitsKHR::e128 ) + result += " 128 |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1RateControlFlagsKHR value ) + { + std::string result = "{"; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eRegularGop ) + result += " RegularGop |"; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eTemporalLayerPatternDyadic ) + result += " TemporalLayerPatternDyadic |"; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternFlat ) + result += " ReferencePatternFlat |"; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternDyadic ) + result += " ReferencePatternDyadic |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_NV_cluster_acceleration_structure === + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureClusterFlagsNV value ) + { + std::string result = "{"; + if ( value & ClusterAccelerationStructureClusterFlagBitsNV::eAllowDisableOpacityMicromaps ) + result += " AllowDisableOpacityMicromaps |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureGeometryFlagsNV value ) + { + std::string result = "{"; + if ( value & ClusterAccelerationStructureGeometryFlagBitsNV::eCullDisable ) + result += " CullDisable |"; + if ( value & ClusterAccelerationStructureGeometryFlagBitsNV::eNoDuplicateAnyhitInvocation ) + result += " NoDuplicateAnyhitInvocation |"; + if ( value & ClusterAccelerationStructureGeometryFlagBitsNV::eOpaque ) + result += " Opaque |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureAddressResolutionFlagsNV value ) + { + std::string result = "{"; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstImplicitData ) + result += " IndirectedDstImplicitData |"; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedScratchData ) + result += " IndirectedScratchData |"; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstAddressArray ) + result += " IndirectedDstAddressArray |"; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstSizesArray ) + result += " IndirectedDstSizesArray |"; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosArray ) + result += " IndirectedSrcInfosArray |"; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosCount ) + result += " IndirectedSrcInfosCount |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureIndexFormatFlagsNV value ) + { + std::string result = "{"; + if ( value & ClusterAccelerationStructureIndexFormatFlagBitsNV::e8 ) + result += " 8 |"; + if ( value & ClusterAccelerationStructureIndexFormatFlagBitsNV::e16 ) + result += " 16 |"; + if ( value & ClusterAccelerationStructureIndexFormatFlagBitsNV::e32 ) + result += " 32 |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_NV_partitioned_acceleration_structure === + + VULKAN_HPP_INLINE std::string to_string( PartitionedAccelerationStructureInstanceFlagsNV value ) + { + std::string result = "{"; + if ( value & PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFacingCullDisable ) + result += " FlagTriangleFacingCullDisable |"; + if ( value & PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFlipFacing ) + result += " FlagTriangleFlipFacing |"; + if ( value & PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceOpaque ) + result += " FlagForceOpaque |"; + if ( value & PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceNoOpaque ) + result += " FlagForceNoOpaque |"; + if ( value & PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagEnableExplicitBoundingBox ) + result += " FlagEnableExplicitBoundingBox |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_EXT_device_generated_commands === + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsEXT value ) + { + std::string result = "{"; + if ( value & IndirectCommandsLayoutUsageFlagBitsEXT::eExplicitPreprocess ) + result += " ExplicitPreprocess |"; + if ( value & IndirectCommandsLayoutUsageFlagBitsEXT::eUnorderedSequences ) + result += " UnorderedSequences |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsInputModeFlagsEXT value ) + { + std::string result = "{"; + if ( value & IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer ) + result += " VulkanIndexBuffer |"; + if ( value & IndirectCommandsInputModeFlagBitsEXT::eDxgiIndexBuffer ) + result += " DxgiIndexBuffer |"; + + if ( result.size() > 1 ) + result.back() = '}'; + else + result = "{}"; + return result; + } + + //=== VK_KHR_maintenance8 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccessFlags3KHR ) + { + return "None"; + } + + //======================= + //=== ENUMs to_string === + //======================= + + VULKAN_HPP_INLINE std::string toHexString( uint32_t value ) + { +#if __cpp_lib_format + return std::format( "{:x}", value ); +#else + std::stringstream stream; + stream << std::hex << value; + return stream.str(); +#endif + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( Result value ) + { + switch ( value ) + { + case Result::eSuccess: return "Success"; + case Result::eNotReady: return "NotReady"; + case Result::eTimeout: return "Timeout"; + case Result::eEventSet: return "EventSet"; + case Result::eEventReset: return "EventReset"; + case Result::eIncomplete: return "Incomplete"; + case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory"; + case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory"; + case Result::eErrorInitializationFailed: return "ErrorInitializationFailed"; + case Result::eErrorDeviceLost: return "ErrorDeviceLost"; + case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed"; + case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent"; + case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent"; + case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent"; + case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver"; + case Result::eErrorTooManyObjects: return "ErrorTooManyObjects"; + case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported"; + case Result::eErrorFragmentedPool: return "ErrorFragmentedPool"; + case Result::eErrorUnknown: return "ErrorUnknown"; + case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory"; + case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle"; + case Result::eErrorFragmentation: return "ErrorFragmentation"; + case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress"; + case Result::ePipelineCompileRequired: return "PipelineCompileRequired"; + case Result::eErrorNotPermitted: return "ErrorNotPermitted"; + case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR"; + case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR"; + case Result::eSuboptimalKHR: return "SuboptimalKHR"; + case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR"; + case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR"; + case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT"; + case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV"; + case Result::eErrorImageUsageNotSupportedKHR: return "ErrorImageUsageNotSupportedKHR"; + case Result::eErrorVideoPictureLayoutNotSupportedKHR: return "ErrorVideoPictureLayoutNotSupportedKHR"; + case Result::eErrorVideoProfileOperationNotSupportedKHR: return "ErrorVideoProfileOperationNotSupportedKHR"; + case Result::eErrorVideoProfileFormatNotSupportedKHR: return "ErrorVideoProfileFormatNotSupportedKHR"; + case Result::eErrorVideoProfileCodecNotSupportedKHR: return "ErrorVideoProfileCodecNotSupportedKHR"; + case Result::eErrorVideoStdVersionNotSupportedKHR: return "ErrorVideoStdVersionNotSupportedKHR"; + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case Result::eThreadIdleKHR: return "ThreadIdleKHR"; + case Result::eThreadDoneKHR: return "ThreadDoneKHR"; + case Result::eOperationDeferredKHR: return "OperationDeferredKHR"; + case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR"; + case Result::eErrorInvalidVideoStdParametersKHR: return "ErrorInvalidVideoStdParametersKHR"; + case Result::eErrorCompressionExhaustedEXT: return "ErrorCompressionExhaustedEXT"; + case Result::eIncompatibleShaderBinaryEXT: return "IncompatibleShaderBinaryEXT"; + case Result::ePipelineBinaryMissingKHR: return "PipelineBinaryMissingKHR"; + case Result::eErrorNotEnoughSpaceKHR: return "ErrorNotEnoughSpaceKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( StructureType value ) + { + switch ( value ) + { + case StructureType::eApplicationInfo: return "ApplicationInfo"; + case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo"; + case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo"; + case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo"; + case StructureType::eSubmitInfo: return "SubmitInfo"; + case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo"; + case StructureType::eMappedMemoryRange: return "MappedMemoryRange"; + case StructureType::eBindSparseInfo: return "BindSparseInfo"; + case StructureType::eFenceCreateInfo: return "FenceCreateInfo"; + case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo"; + case StructureType::eEventCreateInfo: return "EventCreateInfo"; + case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo"; + case StructureType::eBufferCreateInfo: return "BufferCreateInfo"; + case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo"; + case StructureType::eImageCreateInfo: return "ImageCreateInfo"; + case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo"; + case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo"; + case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo"; + case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo"; + case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo"; + case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo"; + case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo"; + case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo"; + case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo"; + case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo"; + case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo"; + case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo"; + case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo"; + case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo"; + case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo"; + case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo"; + case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo"; + case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo"; + case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo"; + case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo"; + case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet"; + case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet"; + case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo"; + case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo"; + case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo"; + case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo"; + case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo"; + case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo"; + case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo"; + case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier"; + case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier"; + case StructureType::eMemoryBarrier: return "MemoryBarrier"; + case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo"; + case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties"; + case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo"; + case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo"; + case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures"; + case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements"; + case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo"; + case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo"; + case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo"; + case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo"; + case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo"; + case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo"; + case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo"; + case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo"; + case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties"; + case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo"; + case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2"; + case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2"; + case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2"; + case StructureType::eMemoryRequirements2: return "MemoryRequirements2"; + case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2"; + case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2"; + case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2"; + case StructureType::eFormatProperties2: return "FormatProperties2"; + case StructureType::eImageFormatProperties2: return "ImageFormatProperties2"; + case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2"; + case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2"; + case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2"; + case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2"; + case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2"; + case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties"; + case StructureType::eRenderPassInputAttachmentAspectCreateInfo: return "RenderPassInputAttachmentAspectCreateInfo"; + case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo"; + case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: return "PipelineTessellationDomainOriginStateCreateInfo"; + case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo"; + case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures"; + case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties"; + case StructureType::ePhysicalDeviceVariablePointersFeatures: return "PhysicalDeviceVariablePointersFeatures"; + case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo"; + case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures"; + case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties"; + case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2"; + case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo"; + case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo"; + case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo"; + case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo"; + case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: return "PhysicalDeviceSamplerYcbcrConversionFeatures"; + case StructureType::eSamplerYcbcrConversionImageFormatProperties: return "SamplerYcbcrConversionImageFormatProperties"; + case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo"; + case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo"; + case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties"; + case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo"; + case StructureType::eExternalBufferProperties: return "ExternalBufferProperties"; + case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties"; + case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo"; + case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo"; + case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo"; + case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo"; + case StructureType::eExternalFenceProperties: return "ExternalFenceProperties"; + case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo"; + case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo"; + case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo"; + case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties"; + case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties"; + case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport"; + case StructureType::ePhysicalDeviceShaderDrawParametersFeatures: return "PhysicalDeviceShaderDrawParametersFeatures"; + case StructureType::ePhysicalDeviceVulkan11Features: return "PhysicalDeviceVulkan11Features"; + case StructureType::ePhysicalDeviceVulkan11Properties: return "PhysicalDeviceVulkan11Properties"; + case StructureType::ePhysicalDeviceVulkan12Features: return "PhysicalDeviceVulkan12Features"; + case StructureType::ePhysicalDeviceVulkan12Properties: return "PhysicalDeviceVulkan12Properties"; + case StructureType::eImageFormatListCreateInfo: return "ImageFormatListCreateInfo"; + case StructureType::eAttachmentDescription2: return "AttachmentDescription2"; + case StructureType::eAttachmentReference2: return "AttachmentReference2"; + case StructureType::eSubpassDescription2: return "SubpassDescription2"; + case StructureType::eSubpassDependency2: return "SubpassDependency2"; + case StructureType::eRenderPassCreateInfo2: return "RenderPassCreateInfo2"; + case StructureType::eSubpassBeginInfo: return "SubpassBeginInfo"; + case StructureType::eSubpassEndInfo: return "SubpassEndInfo"; + case StructureType::ePhysicalDevice8BitStorageFeatures: return "PhysicalDevice8BitStorageFeatures"; + case StructureType::ePhysicalDeviceDriverProperties: return "PhysicalDeviceDriverProperties"; + case StructureType::ePhysicalDeviceShaderAtomicInt64Features: return "PhysicalDeviceShaderAtomicInt64Features"; + case StructureType::ePhysicalDeviceShaderFloat16Int8Features: return "PhysicalDeviceShaderFloat16Int8Features"; + case StructureType::ePhysicalDeviceFloatControlsProperties: return "PhysicalDeviceFloatControlsProperties"; + case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo: return "DescriptorSetLayoutBindingFlagsCreateInfo"; + case StructureType::ePhysicalDeviceDescriptorIndexingFeatures: return "PhysicalDeviceDescriptorIndexingFeatures"; + case StructureType::ePhysicalDeviceDescriptorIndexingProperties: return "PhysicalDeviceDescriptorIndexingProperties"; + case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo: return "DescriptorSetVariableDescriptorCountAllocateInfo"; + case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport: return "DescriptorSetVariableDescriptorCountLayoutSupport"; + case StructureType::ePhysicalDeviceDepthStencilResolveProperties: return "PhysicalDeviceDepthStencilResolveProperties"; + case StructureType::eSubpassDescriptionDepthStencilResolve: return "SubpassDescriptionDepthStencilResolve"; + case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures: return "PhysicalDeviceScalarBlockLayoutFeatures"; + case StructureType::eImageStencilUsageCreateInfo: return "ImageStencilUsageCreateInfo"; + case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties: return "PhysicalDeviceSamplerFilterMinmaxProperties"; + case StructureType::eSamplerReductionModeCreateInfo: return "SamplerReductionModeCreateInfo"; + case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures: return "PhysicalDeviceVulkanMemoryModelFeatures"; + case StructureType::ePhysicalDeviceImagelessFramebufferFeatures: return "PhysicalDeviceImagelessFramebufferFeatures"; + case StructureType::eFramebufferAttachmentsCreateInfo: return "FramebufferAttachmentsCreateInfo"; + case StructureType::eFramebufferAttachmentImageInfo: return "FramebufferAttachmentImageInfo"; + case StructureType::eRenderPassAttachmentBeginInfo: return "RenderPassAttachmentBeginInfo"; + case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures: return "PhysicalDeviceUniformBufferStandardLayoutFeatures"; + case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures: return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures"; + case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures: return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures"; + case StructureType::eAttachmentReferenceStencilLayout: return "AttachmentReferenceStencilLayout"; + case StructureType::eAttachmentDescriptionStencilLayout: return "AttachmentDescriptionStencilLayout"; + case StructureType::ePhysicalDeviceHostQueryResetFeatures: return "PhysicalDeviceHostQueryResetFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures: return "PhysicalDeviceTimelineSemaphoreFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreProperties: return "PhysicalDeviceTimelineSemaphoreProperties"; + case StructureType::eSemaphoreTypeCreateInfo: return "SemaphoreTypeCreateInfo"; + case StructureType::eTimelineSemaphoreSubmitInfo: return "TimelineSemaphoreSubmitInfo"; + case StructureType::eSemaphoreWaitInfo: return "SemaphoreWaitInfo"; + case StructureType::eSemaphoreSignalInfo: return "SemaphoreSignalInfo"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures: return "PhysicalDeviceBufferDeviceAddressFeatures"; + case StructureType::eBufferDeviceAddressInfo: return "BufferDeviceAddressInfo"; + case StructureType::eBufferOpaqueCaptureAddressCreateInfo: return "BufferOpaqueCaptureAddressCreateInfo"; + case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo: return "MemoryOpaqueCaptureAddressAllocateInfo"; + case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo: return "DeviceMemoryOpaqueCaptureAddressInfo"; + case StructureType::ePhysicalDeviceVulkan13Features: return "PhysicalDeviceVulkan13Features"; + case StructureType::ePhysicalDeviceVulkan13Properties: return "PhysicalDeviceVulkan13Properties"; + case StructureType::ePipelineCreationFeedbackCreateInfo: return "PipelineCreationFeedbackCreateInfo"; + case StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures: return "PhysicalDeviceShaderTerminateInvocationFeatures"; + case StructureType::ePhysicalDeviceToolProperties: return "PhysicalDeviceToolProperties"; + case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures: return "PhysicalDeviceShaderDemoteToHelperInvocationFeatures"; + case StructureType::ePhysicalDevicePrivateDataFeatures: return "PhysicalDevicePrivateDataFeatures"; + case StructureType::eDevicePrivateDataCreateInfo: return "DevicePrivateDataCreateInfo"; + case StructureType::ePrivateDataSlotCreateInfo: return "PrivateDataSlotCreateInfo"; + case StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures: return "PhysicalDevicePipelineCreationCacheControlFeatures"; + case StructureType::eMemoryBarrier2: return "MemoryBarrier2"; + case StructureType::eBufferMemoryBarrier2: return "BufferMemoryBarrier2"; + case StructureType::eImageMemoryBarrier2: return "ImageMemoryBarrier2"; + case StructureType::eDependencyInfo: return "DependencyInfo"; + case StructureType::eSubmitInfo2: return "SubmitInfo2"; + case StructureType::eSemaphoreSubmitInfo: return "SemaphoreSubmitInfo"; + case StructureType::eCommandBufferSubmitInfo: return "CommandBufferSubmitInfo"; + case StructureType::ePhysicalDeviceSynchronization2Features: return "PhysicalDeviceSynchronization2Features"; + case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures: return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures"; + case StructureType::ePhysicalDeviceImageRobustnessFeatures: return "PhysicalDeviceImageRobustnessFeatures"; + case StructureType::eCopyBufferInfo2: return "CopyBufferInfo2"; + case StructureType::eCopyImageInfo2: return "CopyImageInfo2"; + case StructureType::eCopyBufferToImageInfo2: return "CopyBufferToImageInfo2"; + case StructureType::eCopyImageToBufferInfo2: return "CopyImageToBufferInfo2"; + case StructureType::eBlitImageInfo2: return "BlitImageInfo2"; + case StructureType::eResolveImageInfo2: return "ResolveImageInfo2"; + case StructureType::eBufferCopy2: return "BufferCopy2"; + case StructureType::eImageCopy2: return "ImageCopy2"; + case StructureType::eImageBlit2: return "ImageBlit2"; + case StructureType::eBufferImageCopy2: return "BufferImageCopy2"; + case StructureType::eImageResolve2: return "ImageResolve2"; + case StructureType::ePhysicalDeviceSubgroupSizeControlProperties: return "PhysicalDeviceSubgroupSizeControlProperties"; + case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo: return "PipelineShaderStageRequiredSubgroupSizeCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupSizeControlFeatures: return "PhysicalDeviceSubgroupSizeControlFeatures"; + case StructureType::ePhysicalDeviceInlineUniformBlockFeatures: return "PhysicalDeviceInlineUniformBlockFeatures"; + case StructureType::ePhysicalDeviceInlineUniformBlockProperties: return "PhysicalDeviceInlineUniformBlockProperties"; + case StructureType::eWriteDescriptorSetInlineUniformBlock: return "WriteDescriptorSetInlineUniformBlock"; + case StructureType::eDescriptorPoolInlineUniformBlockCreateInfo: return "DescriptorPoolInlineUniformBlockCreateInfo"; + case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures: return "PhysicalDeviceTextureCompressionAstcHdrFeatures"; + case StructureType::eRenderingInfo: return "RenderingInfo"; + case StructureType::eRenderingAttachmentInfo: return "RenderingAttachmentInfo"; + case StructureType::ePipelineRenderingCreateInfo: return "PipelineRenderingCreateInfo"; + case StructureType::ePhysicalDeviceDynamicRenderingFeatures: return "PhysicalDeviceDynamicRenderingFeatures"; + case StructureType::eCommandBufferInheritanceRenderingInfo: return "CommandBufferInheritanceRenderingInfo"; + case StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures: return "PhysicalDeviceShaderIntegerDotProductFeatures"; + case StructureType::ePhysicalDeviceShaderIntegerDotProductProperties: return "PhysicalDeviceShaderIntegerDotProductProperties"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentProperties: return "PhysicalDeviceTexelBufferAlignmentProperties"; + case StructureType::eFormatProperties3: return "FormatProperties3"; + case StructureType::ePhysicalDeviceMaintenance4Features: return "PhysicalDeviceMaintenance4Features"; + case StructureType::ePhysicalDeviceMaintenance4Properties: return "PhysicalDeviceMaintenance4Properties"; + case StructureType::eDeviceBufferMemoryRequirements: return "DeviceBufferMemoryRequirements"; + case StructureType::eDeviceImageMemoryRequirements: return "DeviceImageMemoryRequirements"; + case StructureType::ePhysicalDeviceVulkan14Features: return "PhysicalDeviceVulkan14Features"; + case StructureType::ePhysicalDeviceVulkan14Properties: return "PhysicalDeviceVulkan14Properties"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfo: return "DeviceQueueGlobalPriorityCreateInfo"; + case StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures: return "PhysicalDeviceGlobalPriorityQueryFeatures"; + case StructureType::eQueueFamilyGlobalPriorityProperties: return "QueueFamilyGlobalPriorityProperties"; + case StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures: return "PhysicalDeviceShaderSubgroupRotateFeatures"; + case StructureType::ePhysicalDeviceShaderFloatControls2Features: return "PhysicalDeviceShaderFloatControls2Features"; + case StructureType::ePhysicalDeviceShaderExpectAssumeFeatures: return "PhysicalDeviceShaderExpectAssumeFeatures"; + case StructureType::ePhysicalDeviceLineRasterizationFeatures: return "PhysicalDeviceLineRasterizationFeatures"; + case StructureType::ePipelineRasterizationLineStateCreateInfo: return "PipelineRasterizationLineStateCreateInfo"; + case StructureType::ePhysicalDeviceLineRasterizationProperties: return "PhysicalDeviceLineRasterizationProperties"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorProperties: return "PhysicalDeviceVertexAttributeDivisorProperties"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfo: return "PipelineVertexInputDivisorStateCreateInfo"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures: return "PhysicalDeviceVertexAttributeDivisorFeatures"; + case StructureType::ePhysicalDeviceIndexTypeUint8Features: return "PhysicalDeviceIndexTypeUint8Features"; + case StructureType::eMemoryMapInfo: return "MemoryMapInfo"; + case StructureType::eMemoryUnmapInfo: return "MemoryUnmapInfo"; + case StructureType::ePhysicalDeviceMaintenance5Features: return "PhysicalDeviceMaintenance5Features"; + case StructureType::ePhysicalDeviceMaintenance5Properties: return "PhysicalDeviceMaintenance5Properties"; + case StructureType::eRenderingAreaInfo: return "RenderingAreaInfo"; + case StructureType::eDeviceImageSubresourceInfo: return "DeviceImageSubresourceInfo"; + case StructureType::eSubresourceLayout2: return "SubresourceLayout2"; + case StructureType::eImageSubresource2: return "ImageSubresource2"; + case StructureType::ePipelineCreateFlags2CreateInfo: return "PipelineCreateFlags2CreateInfo"; + case StructureType::eBufferUsageFlags2CreateInfo: return "BufferUsageFlags2CreateInfo"; + case StructureType::ePhysicalDevicePushDescriptorProperties: return "PhysicalDevicePushDescriptorProperties"; + case StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures: return "PhysicalDeviceDynamicRenderingLocalReadFeatures"; + case StructureType::eRenderingAttachmentLocationInfo: return "RenderingAttachmentLocationInfo"; + case StructureType::eRenderingInputAttachmentIndexInfo: return "RenderingInputAttachmentIndexInfo"; + case StructureType::ePhysicalDeviceMaintenance6Features: return "PhysicalDeviceMaintenance6Features"; + case StructureType::ePhysicalDeviceMaintenance6Properties: return "PhysicalDeviceMaintenance6Properties"; + case StructureType::eBindMemoryStatus: return "BindMemoryStatus"; + case StructureType::eBindDescriptorSetsInfo: return "BindDescriptorSetsInfo"; + case StructureType::ePushConstantsInfo: return "PushConstantsInfo"; + case StructureType::ePushDescriptorSetInfo: return "PushDescriptorSetInfo"; + case StructureType::ePushDescriptorSetWithTemplateInfo: return "PushDescriptorSetWithTemplateInfo"; + case StructureType::ePhysicalDevicePipelineProtectedAccessFeatures: return "PhysicalDevicePipelineProtectedAccessFeatures"; + case StructureType::ePipelineRobustnessCreateInfo: return "PipelineRobustnessCreateInfo"; + case StructureType::ePhysicalDevicePipelineRobustnessFeatures: return "PhysicalDevicePipelineRobustnessFeatures"; + case StructureType::ePhysicalDevicePipelineRobustnessProperties: return "PhysicalDevicePipelineRobustnessProperties"; + case StructureType::ePhysicalDeviceHostImageCopyFeatures: return "PhysicalDeviceHostImageCopyFeatures"; + case StructureType::ePhysicalDeviceHostImageCopyProperties: return "PhysicalDeviceHostImageCopyProperties"; + case StructureType::eMemoryToImageCopy: return "MemoryToImageCopy"; + case StructureType::eImageToMemoryCopy: return "ImageToMemoryCopy"; + case StructureType::eCopyImageToMemoryInfo: return "CopyImageToMemoryInfo"; + case StructureType::eCopyMemoryToImageInfo: return "CopyMemoryToImageInfo"; + case StructureType::eHostImageLayoutTransitionInfo: return "HostImageLayoutTransitionInfo"; + case StructureType::eCopyImageToImageInfo: return "CopyImageToImageInfo"; + case StructureType::eSubresourceHostMemcpySize: return "SubresourceHostMemcpySize"; + case StructureType::eHostImageCopyDevicePerformanceQuery: return "HostImageCopyDevicePerformanceQuery"; + case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR"; + case StructureType::ePresentInfoKHR: return "PresentInfoKHR"; + case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR"; + case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR"; + case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR"; + case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR"; + case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR"; + case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR"; + case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR"; + case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR"; + case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR"; +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT"; + case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD"; + case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT"; + case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT"; + case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT"; + case StructureType::eVideoProfileInfoKHR: return "VideoProfileInfoKHR"; + case StructureType::eVideoCapabilitiesKHR: return "VideoCapabilitiesKHR"; + case StructureType::eVideoPictureResourceInfoKHR: return "VideoPictureResourceInfoKHR"; + case StructureType::eVideoSessionMemoryRequirementsKHR: return "VideoSessionMemoryRequirementsKHR"; + case StructureType::eBindVideoSessionMemoryInfoKHR: return "BindVideoSessionMemoryInfoKHR"; + case StructureType::eVideoSessionCreateInfoKHR: return "VideoSessionCreateInfoKHR"; + case StructureType::eVideoSessionParametersCreateInfoKHR: return "VideoSessionParametersCreateInfoKHR"; + case StructureType::eVideoSessionParametersUpdateInfoKHR: return "VideoSessionParametersUpdateInfoKHR"; + case StructureType::eVideoBeginCodingInfoKHR: return "VideoBeginCodingInfoKHR"; + case StructureType::eVideoEndCodingInfoKHR: return "VideoEndCodingInfoKHR"; + case StructureType::eVideoCodingControlInfoKHR: return "VideoCodingControlInfoKHR"; + case StructureType::eVideoReferenceSlotInfoKHR: return "VideoReferenceSlotInfoKHR"; + case StructureType::eQueueFamilyVideoPropertiesKHR: return "QueueFamilyVideoPropertiesKHR"; + case StructureType::eVideoProfileListInfoKHR: return "VideoProfileListInfoKHR"; + case StructureType::ePhysicalDeviceVideoFormatInfoKHR: return "PhysicalDeviceVideoFormatInfoKHR"; + case StructureType::eVideoFormatPropertiesKHR: return "VideoFormatPropertiesKHR"; + case StructureType::eQueueFamilyQueryResultStatusPropertiesKHR: return "QueueFamilyQueryResultStatusPropertiesKHR"; + case StructureType::eVideoDecodeInfoKHR: return "VideoDecodeInfoKHR"; + case StructureType::eVideoDecodeCapabilitiesKHR: return "VideoDecodeCapabilitiesKHR"; + case StructureType::eVideoDecodeUsageInfoKHR: return "VideoDecodeUsageInfoKHR"; + case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV"; + case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV"; + case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV"; + case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT: return "PhysicalDeviceTransformFeedbackFeaturesEXT"; + case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT: return "PhysicalDeviceTransformFeedbackPropertiesEXT"; + case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT: return "PipelineRasterizationStateStreamCreateInfoEXT"; + case StructureType::eCuModuleCreateInfoNVX: return "CuModuleCreateInfoNVX"; + case StructureType::eCuFunctionCreateInfoNVX: return "CuFunctionCreateInfoNVX"; + case StructureType::eCuLaunchInfoNVX: return "CuLaunchInfoNVX"; + case StructureType::eCuModuleTexturingModeCreateInfoNVX: return "CuModuleTexturingModeCreateInfoNVX"; + case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX"; + case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX"; + case StructureType::eVideoEncodeH264CapabilitiesKHR: return "VideoEncodeH264CapabilitiesKHR"; + case StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR: return "VideoEncodeH264SessionParametersCreateInfoKHR"; + case StructureType::eVideoEncodeH264SessionParametersAddInfoKHR: return "VideoEncodeH264SessionParametersAddInfoKHR"; + case StructureType::eVideoEncodeH264PictureInfoKHR: return "VideoEncodeH264PictureInfoKHR"; + case StructureType::eVideoEncodeH264DpbSlotInfoKHR: return "VideoEncodeH264DpbSlotInfoKHR"; + case StructureType::eVideoEncodeH264NaluSliceInfoKHR: return "VideoEncodeH264NaluSliceInfoKHR"; + case StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR: return "VideoEncodeH264GopRemainingFrameInfoKHR"; + case StructureType::eVideoEncodeH264ProfileInfoKHR: return "VideoEncodeH264ProfileInfoKHR"; + case StructureType::eVideoEncodeH264RateControlInfoKHR: return "VideoEncodeH264RateControlInfoKHR"; + case StructureType::eVideoEncodeH264RateControlLayerInfoKHR: return "VideoEncodeH264RateControlLayerInfoKHR"; + case StructureType::eVideoEncodeH264SessionCreateInfoKHR: return "VideoEncodeH264SessionCreateInfoKHR"; + case StructureType::eVideoEncodeH264QualityLevelPropertiesKHR: return "VideoEncodeH264QualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeH264SessionParametersGetInfoKHR: return "VideoEncodeH264SessionParametersGetInfoKHR"; + case StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR: return "VideoEncodeH264SessionParametersFeedbackInfoKHR"; + case StructureType::eVideoEncodeH265CapabilitiesKHR: return "VideoEncodeH265CapabilitiesKHR"; + case StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR: return "VideoEncodeH265SessionParametersCreateInfoKHR"; + case StructureType::eVideoEncodeH265SessionParametersAddInfoKHR: return "VideoEncodeH265SessionParametersAddInfoKHR"; + case StructureType::eVideoEncodeH265PictureInfoKHR: return "VideoEncodeH265PictureInfoKHR"; + case StructureType::eVideoEncodeH265DpbSlotInfoKHR: return "VideoEncodeH265DpbSlotInfoKHR"; + case StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR: return "VideoEncodeH265NaluSliceSegmentInfoKHR"; + case StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR: return "VideoEncodeH265GopRemainingFrameInfoKHR"; + case StructureType::eVideoEncodeH265ProfileInfoKHR: return "VideoEncodeH265ProfileInfoKHR"; + case StructureType::eVideoEncodeH265RateControlInfoKHR: return "VideoEncodeH265RateControlInfoKHR"; + case StructureType::eVideoEncodeH265RateControlLayerInfoKHR: return "VideoEncodeH265RateControlLayerInfoKHR"; + case StructureType::eVideoEncodeH265SessionCreateInfoKHR: return "VideoEncodeH265SessionCreateInfoKHR"; + case StructureType::eVideoEncodeH265QualityLevelPropertiesKHR: return "VideoEncodeH265QualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeH265SessionParametersGetInfoKHR: return "VideoEncodeH265SessionParametersGetInfoKHR"; + case StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR: return "VideoEncodeH265SessionParametersFeedbackInfoKHR"; + case StructureType::eVideoDecodeH264CapabilitiesKHR: return "VideoDecodeH264CapabilitiesKHR"; + case StructureType::eVideoDecodeH264PictureInfoKHR: return "VideoDecodeH264PictureInfoKHR"; + case StructureType::eVideoDecodeH264ProfileInfoKHR: return "VideoDecodeH264ProfileInfoKHR"; + case StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR: return "VideoDecodeH264SessionParametersCreateInfoKHR"; + case StructureType::eVideoDecodeH264SessionParametersAddInfoKHR: return "VideoDecodeH264SessionParametersAddInfoKHR"; + case StructureType::eVideoDecodeH264DpbSlotInfoKHR: return "VideoDecodeH264DpbSlotInfoKHR"; + case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; +#if defined( VK_USE_PLATFORM_GGP ) + case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP"; +#endif /*VK_USE_PLATFORM_GGP*/ + case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV: return "PhysicalDeviceCornerSampledImageFeaturesNV"; + case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV"; + case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV"; + case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT"; +#if defined( VK_USE_PLATFORM_VI_NN ) + case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN"; +#endif /*VK_USE_PLATFORM_VI_NN*/ + case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT"; + case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR"; + case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR"; + case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR"; + case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR"; + case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR"; + case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR"; + case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR"; + case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR"; + case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR"; + case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR"; + case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR"; + case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: return "CommandBufferInheritanceConditionalRenderingInfoEXT"; + case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: return "PhysicalDeviceConditionalRenderingFeaturesEXT"; + case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT"; + case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR"; + case StructureType::ePipelineViewportWScalingStateCreateInfoNV: return "PipelineViewportWScalingStateCreateInfoNV"; + case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT"; + case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT"; + case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT"; + case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT"; + case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; + case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; + case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX"; + case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; + case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT"; + case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; + case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: return "PipelineRasterizationConservativeStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT: return "PhysicalDeviceDepthClipEnableFeaturesEXT"; + case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT: return "PipelineRasterizationDepthClipStateCreateInfoEXT"; + case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT"; + case StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG: return "PhysicalDeviceRelaxedLineRasterizationFeaturesIMG"; + case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR"; + case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR"; + case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR"; + case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR"; + case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR: return "PhysicalDevicePerformanceQueryFeaturesKHR"; + case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR: return "PhysicalDevicePerformanceQueryPropertiesKHR"; + case StructureType::eQueryPoolPerformanceCreateInfoKHR: return "QueryPoolPerformanceCreateInfoKHR"; + case StructureType::ePerformanceQuerySubmitInfoKHR: return "PerformanceQuerySubmitInfoKHR"; + case StructureType::eAcquireProfilingLockInfoKHR: return "AcquireProfilingLockInfoKHR"; + case StructureType::ePerformanceCounterKHR: return "PerformanceCounterKHR"; + case StructureType::ePerformanceCounterDescriptionKHR: return "PerformanceCounterDescriptionKHR"; + case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR"; + case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR"; + case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR"; + case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR"; + case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR"; + case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR"; + case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR"; + case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR"; +#if defined( VK_USE_PLATFORM_IOS_MVK ) + case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK"; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK"; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT"; + case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT"; + case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT"; + case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT"; + case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID"; + case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: return "AndroidHardwareBufferFormatPropertiesANDROID"; + case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID"; + case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: return "MemoryGetAndroidHardwareBufferInfoANDROID"; + case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID"; + case StructureType::eAndroidHardwareBufferFormatProperties2ANDROID: return "AndroidHardwareBufferFormatProperties2ANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX: return "PhysicalDeviceShaderEnqueueFeaturesAMDX"; + case StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX: return "PhysicalDeviceShaderEnqueuePropertiesAMDX"; + case StructureType::eExecutionGraphPipelineScratchSizeAMDX: return "ExecutionGraphPipelineScratchSizeAMDX"; + case StructureType::eExecutionGraphPipelineCreateInfoAMDX: return "ExecutionGraphPipelineCreateInfoAMDX"; + case StructureType::ePipelineShaderStageNodeCreateInfoAMDX: return "PipelineShaderStageNodeCreateInfoAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD"; + case StructureType::ePhysicalDeviceShaderBfloat16FeaturesKHR: return "PhysicalDeviceShaderBfloat16FeaturesKHR"; + case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; + case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; + case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: return "PhysicalDeviceSampleLocationsPropertiesEXT"; + case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; + case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: return "PipelineColorBlendAdvancedStateCreateInfoEXT"; + case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureKHR: return "WriteDescriptorSetAccelerationStructureKHR"; + case StructureType::eAccelerationStructureBuildGeometryInfoKHR: return "AccelerationStructureBuildGeometryInfoKHR"; + case StructureType::eAccelerationStructureDeviceAddressInfoKHR: return "AccelerationStructureDeviceAddressInfoKHR"; + case StructureType::eAccelerationStructureGeometryAabbsDataKHR: return "AccelerationStructureGeometryAabbsDataKHR"; + case StructureType::eAccelerationStructureGeometryInstancesDataKHR: return "AccelerationStructureGeometryInstancesDataKHR"; + case StructureType::eAccelerationStructureGeometryTrianglesDataKHR: return "AccelerationStructureGeometryTrianglesDataKHR"; + case StructureType::eAccelerationStructureGeometryKHR: return "AccelerationStructureGeometryKHR"; + case StructureType::eAccelerationStructureVersionInfoKHR: return "AccelerationStructureVersionInfoKHR"; + case StructureType::eCopyAccelerationStructureInfoKHR: return "CopyAccelerationStructureInfoKHR"; + case StructureType::eCopyAccelerationStructureToMemoryInfoKHR: return "CopyAccelerationStructureToMemoryInfoKHR"; + case StructureType::eCopyMemoryToAccelerationStructureInfoKHR: return "CopyMemoryToAccelerationStructureInfoKHR"; + case StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR: return "PhysicalDeviceAccelerationStructureFeaturesKHR"; + case StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR: return "PhysicalDeviceAccelerationStructurePropertiesKHR"; + case StructureType::eAccelerationStructureCreateInfoKHR: return "AccelerationStructureCreateInfoKHR"; + case StructureType::eAccelerationStructureBuildSizesInfoKHR: return "AccelerationStructureBuildSizesInfoKHR"; + case StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR: return "PhysicalDeviceRayTracingPipelineFeaturesKHR"; + case StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR: return "PhysicalDeviceRayTracingPipelinePropertiesKHR"; + case StructureType::eRayTracingPipelineCreateInfoKHR: return "RayTracingPipelineCreateInfoKHR"; + case StructureType::eRayTracingShaderGroupCreateInfoKHR: return "RayTracingShaderGroupCreateInfoKHR"; + case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR: return "RayTracingPipelineInterfaceCreateInfoKHR"; + case StructureType::ePhysicalDeviceRayQueryFeaturesKHR: return "PhysicalDeviceRayQueryFeaturesKHR"; + case StructureType::ePipelineCoverageModulationStateCreateInfoNV: return "PipelineCoverageModulationStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV: return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV: return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; + case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT"; + case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT: return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; + case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT: return "ImageDrmFormatModifierExplicitCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT"; + case StructureType::eDrmFormatModifierPropertiesList2EXT: return "DrmFormatModifierPropertiesList2EXT"; + case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT"; + case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR: return "PhysicalDevicePortabilitySubsetFeaturesKHR"; + case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR: return "PhysicalDevicePortabilitySubsetPropertiesKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV: return "PipelineViewportShadingRateImageStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV"; + case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV: return "PhysicalDeviceShadingRateImagePropertiesNV"; + case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV: return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; + case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV"; + case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV"; + case StructureType::eGeometryNV: return "GeometryNV"; + case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV"; + case StructureType::eGeometryAabbNV: return "GeometryAabbNV"; + case StructureType::eBindAccelerationStructureMemoryInfoNV: return "BindAccelerationStructureMemoryInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureNV: return "WriteDescriptorSetAccelerationStructureNV"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoNV: return "AccelerationStructureMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV"; + case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV"; + case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV"; + case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV: return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; + case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; + case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT: return "PhysicalDeviceImageViewImageFormatInfoEXT"; + case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT: return "FilterCubicImageViewImageFormatPropertiesEXT"; + case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT"; + case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT"; + case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR"; + case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD"; + case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD"; + case StructureType::eVideoDecodeH265CapabilitiesKHR: return "VideoDecodeH265CapabilitiesKHR"; + case StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR: return "VideoDecodeH265SessionParametersCreateInfoKHR"; + case StructureType::eVideoDecodeH265SessionParametersAddInfoKHR: return "VideoDecodeH265SessionParametersAddInfoKHR"; + case StructureType::eVideoDecodeH265ProfileInfoKHR: return "VideoDecodeH265ProfileInfoKHR"; + case StructureType::eVideoDecodeH265PictureInfoKHR: return "VideoDecodeH265PictureInfoKHR"; + case StructureType::eVideoDecodeH265DpbSlotInfoKHR: return "VideoDecodeH265DpbSlotInfoKHR"; + case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; +#if defined( VK_USE_PLATFORM_GGP ) + case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP"; +#endif /*VK_USE_PLATFORM_GGP*/ + case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV"; + case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: return "PhysicalDeviceShaderImageFootprintFeaturesNV"; + case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV: return "PipelineViewportExclusiveScissorStateCreateInfoNV"; + case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; + case StructureType::eCheckpointDataNV: return "CheckpointDataNV"; + case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV"; + case StructureType::eCheckpointData2NV: return "CheckpointData2NV"; + case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL: return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; + case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL"; + case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL"; + case StructureType::ePerformanceMarkerInfoINTEL: return "PerformanceMarkerInfoINTEL"; + case StructureType::ePerformanceStreamMarkerInfoINTEL: return "PerformanceStreamMarkerInfoINTEL"; + case StructureType::ePerformanceOverrideInfoINTEL: return "PerformanceOverrideInfoINTEL"; + case StructureType::ePerformanceConfigurationAcquireInfoINTEL: return "PerformanceConfigurationAcquireInfoINTEL"; + case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT"; + case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD: return "DisplayNativeHdrSurfaceCapabilitiesAMD"; + case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD: return "SwapchainDisplayNativeHdrCreateInfoAMD"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case StructureType::eMetalSurfaceCreateInfoEXT: return "MetalSurfaceCreateInfoEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT: return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT: return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; + case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT: return "RenderPassFragmentDensityMapCreateInfoEXT"; + case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT: return "RenderingFragmentDensityMapAttachmentInfoEXT"; + case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR"; + case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR: return "PipelineFragmentShadingRateStateCreateInfoKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR: return "PhysicalDeviceFragmentShadingRatePropertiesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR: return "PhysicalDeviceFragmentShadingRateFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR"; + case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: return "RenderingFragmentShadingRateAttachmentInfoKHR"; + case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD"; + case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD"; + case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT: return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT"; + case StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR: return "PhysicalDeviceShaderQuadControlFeaturesKHR"; + case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT"; + case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT: return "PhysicalDeviceMemoryPriorityFeaturesEXT"; + case StructureType::eMemoryPriorityAllocateInfoEXT: return "MemoryPriorityAllocateInfoEXT"; + case StructureType::eSurfaceProtectedCapabilitiesKHR: return "SurfaceProtectedCapabilitiesKHR"; + case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV: return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT: return "PhysicalDeviceBufferDeviceAddressFeaturesEXT"; + case StructureType::eBufferDeviceAddressCreateInfoEXT: return "BufferDeviceAddressCreateInfoEXT"; + case StructureType::eValidationFeaturesEXT: return "ValidationFeaturesEXT"; + case StructureType::ePhysicalDevicePresentWaitFeaturesKHR: return "PhysicalDevicePresentWaitFeaturesKHR"; + case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV: return "PhysicalDeviceCooperativeMatrixFeaturesNV"; + case StructureType::eCooperativeMatrixPropertiesNV: return "CooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV: return "PhysicalDeviceCooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV: return "PhysicalDeviceCoverageReductionModeFeaturesNV"; + case StructureType::ePipelineCoverageReductionStateCreateInfoNV: return "PipelineCoverageReductionStateCreateInfoNV"; + case StructureType::eFramebufferMixedSamplesCombinationNV: return "FramebufferMixedSamplesCombinationNV"; + case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT: return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT"; + case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT: return "PhysicalDeviceYcbcrImageArraysFeaturesEXT"; + case StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT: return "PhysicalDeviceProvokingVertexFeaturesEXT"; + case StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT: return "PipelineRasterizationProvokingVertexStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT: return "PhysicalDeviceProvokingVertexPropertiesEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eSurfaceFullScreenExclusiveInfoEXT: return "SurfaceFullScreenExclusiveInfoEXT"; + case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT: return "SurfaceCapabilitiesFullScreenExclusiveEXT"; + case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT"; + case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT: return "PhysicalDeviceShaderAtomicFloatFeaturesEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT: return "PhysicalDeviceExtendedDynamicStateFeaturesEXT"; + case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR: return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; + case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR"; + case StructureType::ePipelineExecutablePropertiesKHR: return "PipelineExecutablePropertiesKHR"; + case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR"; + case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR"; + case StructureType::ePipelineExecutableInternalRepresentationKHR: return "PipelineExecutableInternalRepresentationKHR"; + case StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT: return "PhysicalDeviceMapMemoryPlacedFeaturesEXT"; + case StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT: return "PhysicalDeviceMapMemoryPlacedPropertiesEXT"; + case StructureType::eMemoryMapPlacedInfoEXT: return "MemoryMapPlacedInfoEXT"; + case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT: return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT"; + case StructureType::eSurfacePresentModeEXT: return "SurfacePresentModeEXT"; + case StructureType::eSurfacePresentScalingCapabilitiesEXT: return "SurfacePresentScalingCapabilitiesEXT"; + case StructureType::eSurfacePresentModeCompatibilityEXT: return "SurfacePresentModeCompatibilityEXT"; + case StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT: return "PhysicalDeviceSwapchainMaintenance1FeaturesEXT"; + case StructureType::eSwapchainPresentFenceInfoEXT: return "SwapchainPresentFenceInfoEXT"; + case StructureType::eSwapchainPresentModesCreateInfoEXT: return "SwapchainPresentModesCreateInfoEXT"; + case StructureType::eSwapchainPresentModeInfoEXT: return "SwapchainPresentModeInfoEXT"; + case StructureType::eSwapchainPresentScalingCreateInfoEXT: return "SwapchainPresentScalingCreateInfoEXT"; + case StructureType::eReleaseSwapchainImagesInfoEXT: return "ReleaseSwapchainImagesInfoEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV: return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; + case StructureType::eGraphicsShaderGroupCreateInfoNV: return "GraphicsShaderGroupCreateInfoNV"; + case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV: return "GraphicsPipelineShaderGroupsCreateInfoNV"; + case StructureType::eIndirectCommandsLayoutTokenNV: return "IndirectCommandsLayoutTokenNV"; + case StructureType::eIndirectCommandsLayoutCreateInfoNV: return "IndirectCommandsLayoutCreateInfoNV"; + case StructureType::eGeneratedCommandsInfoNV: return "GeneratedCommandsInfoNV"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV: return "GeneratedCommandsMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV: return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV"; + case StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV: return "PhysicalDeviceInheritedViewportScissorFeaturesNV"; + case StructureType::eCommandBufferInheritanceViewportScissorInfoNV: return "CommandBufferInheritanceViewportScissorInfoNV"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT: return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; + case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM: return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; + case StructureType::eRenderPassTransformBeginInfoQCOM: return "RenderPassTransformBeginInfoQCOM"; + case StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT: return "PhysicalDeviceDepthBiasControlFeaturesEXT"; + case StructureType::eDepthBiasInfoEXT: return "DepthBiasInfoEXT"; + case StructureType::eDepthBiasRepresentationInfoEXT: return "DepthBiasRepresentationInfoEXT"; + case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT: return "PhysicalDeviceDeviceMemoryReportFeaturesEXT"; + case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT: return "DeviceDeviceMemoryReportCreateInfoEXT"; + case StructureType::eDeviceMemoryReportCallbackDataEXT: return "DeviceMemoryReportCallbackDataEXT"; + case StructureType::ePhysicalDeviceRobustness2FeaturesEXT: return "PhysicalDeviceRobustness2FeaturesEXT"; + case StructureType::ePhysicalDeviceRobustness2PropertiesEXT: return "PhysicalDeviceRobustness2PropertiesEXT"; + case StructureType::eSamplerCustomBorderColorCreateInfoEXT: return "SamplerCustomBorderColorCreateInfoEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT: return "PhysicalDeviceCustomBorderColorPropertiesEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT: return "PhysicalDeviceCustomBorderColorFeaturesEXT"; + case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR"; + case StructureType::ePhysicalDevicePresentBarrierFeaturesNV: return "PhysicalDevicePresentBarrierFeaturesNV"; + case StructureType::eSurfaceCapabilitiesPresentBarrierNV: return "SurfaceCapabilitiesPresentBarrierNV"; + case StructureType::eSwapchainPresentBarrierCreateInfoNV: return "SwapchainPresentBarrierCreateInfoNV"; + case StructureType::ePresentIdKHR: return "PresentIdKHR"; + case StructureType::ePhysicalDevicePresentIdFeaturesKHR: return "PhysicalDevicePresentIdFeaturesKHR"; + case StructureType::eVideoEncodeInfoKHR: return "VideoEncodeInfoKHR"; + case StructureType::eVideoEncodeRateControlInfoKHR: return "VideoEncodeRateControlInfoKHR"; + case StructureType::eVideoEncodeRateControlLayerInfoKHR: return "VideoEncodeRateControlLayerInfoKHR"; + case StructureType::eVideoEncodeCapabilitiesKHR: return "VideoEncodeCapabilitiesKHR"; + case StructureType::eVideoEncodeUsageInfoKHR: return "VideoEncodeUsageInfoKHR"; + case StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR: return "QueryPoolVideoEncodeFeedbackCreateInfoKHR"; + case StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR: return "PhysicalDeviceVideoEncodeQualityLevelInfoKHR"; + case StructureType::eVideoEncodeQualityLevelPropertiesKHR: return "VideoEncodeQualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeQualityLevelInfoKHR: return "VideoEncodeQualityLevelInfoKHR"; + case StructureType::eVideoEncodeSessionParametersGetInfoKHR: return "VideoEncodeSessionParametersGetInfoKHR"; + case StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR: return "VideoEncodeSessionParametersFeedbackInfoKHR"; + case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; + case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eCudaModuleCreateInfoNV: return "CudaModuleCreateInfoNV"; + case StructureType::eCudaFunctionCreateInfoNV: return "CudaFunctionCreateInfoNV"; + case StructureType::eCudaLaunchInfoNV: return "CudaLaunchInfoNV"; + case StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV: return "PhysicalDeviceCudaKernelLaunchFeaturesNV"; + case StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV: return "PhysicalDeviceCudaKernelLaunchPropertiesNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePhysicalDeviceTileShadingFeaturesQCOM: return "PhysicalDeviceTileShadingFeaturesQCOM"; + case StructureType::ePhysicalDeviceTileShadingPropertiesQCOM: return "PhysicalDeviceTileShadingPropertiesQCOM"; + case StructureType::eRenderPassTileShadingCreateInfoQCOM: return "RenderPassTileShadingCreateInfoQCOM"; + case StructureType::ePerTileBeginInfoQCOM: return "PerTileBeginInfoQCOM"; + case StructureType::ePerTileEndInfoQCOM: return "PerTileEndInfoQCOM"; + case StructureType::eDispatchTileInfoQCOM: return "DispatchTileInfoQCOM"; + case StructureType::eQueryLowLatencySupportNV: return "QueryLowLatencySupportNV"; +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case StructureType::eExportMetalObjectCreateInfoEXT: return "ExportMetalObjectCreateInfoEXT"; + case StructureType::eExportMetalObjectsInfoEXT: return "ExportMetalObjectsInfoEXT"; + case StructureType::eExportMetalDeviceInfoEXT: return "ExportMetalDeviceInfoEXT"; + case StructureType::eExportMetalCommandQueueInfoEXT: return "ExportMetalCommandQueueInfoEXT"; + case StructureType::eExportMetalBufferInfoEXT: return "ExportMetalBufferInfoEXT"; + case StructureType::eImportMetalBufferInfoEXT: return "ImportMetalBufferInfoEXT"; + case StructureType::eExportMetalTextureInfoEXT: return "ExportMetalTextureInfoEXT"; + case StructureType::eImportMetalTextureInfoEXT: return "ImportMetalTextureInfoEXT"; + case StructureType::eExportMetalIoSurfaceInfoEXT: return "ExportMetalIoSurfaceInfoEXT"; + case StructureType::eImportMetalIoSurfaceInfoEXT: return "ImportMetalIoSurfaceInfoEXT"; + case StructureType::eExportMetalSharedEventInfoEXT: return "ExportMetalSharedEventInfoEXT"; + case StructureType::eImportMetalSharedEventInfoEXT: return "ImportMetalSharedEventInfoEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + case StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT: return "PhysicalDeviceDescriptorBufferPropertiesEXT"; + case StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT: return "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT"; + case StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT: return "PhysicalDeviceDescriptorBufferFeaturesEXT"; + case StructureType::eDescriptorAddressInfoEXT: return "DescriptorAddressInfoEXT"; + case StructureType::eDescriptorGetInfoEXT: return "DescriptorGetInfoEXT"; + case StructureType::eBufferCaptureDescriptorDataInfoEXT: return "BufferCaptureDescriptorDataInfoEXT"; + case StructureType::eImageCaptureDescriptorDataInfoEXT: return "ImageCaptureDescriptorDataInfoEXT"; + case StructureType::eImageViewCaptureDescriptorDataInfoEXT: return "ImageViewCaptureDescriptorDataInfoEXT"; + case StructureType::eSamplerCaptureDescriptorDataInfoEXT: return "SamplerCaptureDescriptorDataInfoEXT"; + case StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT: return "OpaqueCaptureDescriptorDataCreateInfoEXT"; + case StructureType::eDescriptorBufferBindingInfoEXT: return "DescriptorBufferBindingInfoEXT"; + case StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT: return "DescriptorBufferBindingPushDescriptorBufferHandleEXT"; + case StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT: return "AccelerationStructureCaptureDescriptorDataInfoEXT"; + case StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT: return "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT"; + case StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT: return "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT"; + case StructureType::eGraphicsPipelineLibraryCreateInfoEXT: return "GraphicsPipelineLibraryCreateInfoEXT"; + case StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD: return "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR: return "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR: return "PhysicalDeviceFragmentShaderBarycentricPropertiesKHR"; + case StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR: return "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV: return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV: return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV"; + case StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV: return "PipelineFragmentShadingRateEnumStateCreateInfoNV"; + case StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV: return "AccelerationStructureGeometryMotionTrianglesDataNV"; + case StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV: return "PhysicalDeviceRayTracingMotionBlurFeaturesNV"; + case StructureType::eAccelerationStructureMotionInfoNV: return "AccelerationStructureMotionInfoNV"; + case StructureType::ePhysicalDeviceMeshShaderFeaturesEXT: return "PhysicalDeviceMeshShaderFeaturesEXT"; + case StructureType::ePhysicalDeviceMeshShaderPropertiesEXT: return "PhysicalDeviceMeshShaderPropertiesEXT"; + case StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT: return "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT: return "PhysicalDeviceFragmentDensityMap2FeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT: return "PhysicalDeviceFragmentDensityMap2PropertiesEXT"; + case StructureType::eCopyCommandTransformInfoQCOM: return "CopyCommandTransformInfoQCOM"; + case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR"; + case StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT: return "PhysicalDeviceImageCompressionControlFeaturesEXT"; + case StructureType::eImageCompressionControlEXT: return "ImageCompressionControlEXT"; + case StructureType::eImageCompressionPropertiesEXT: return "ImageCompressionPropertiesEXT"; + case StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT"; + case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT"; + case StructureType::ePhysicalDeviceFaultFeaturesEXT: return "PhysicalDeviceFaultFeaturesEXT"; + case StructureType::eDeviceFaultCountsEXT: return "DeviceFaultCountsEXT"; + case StructureType::eDeviceFaultInfoEXT: return "DeviceFaultInfoEXT"; + case StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT: return "PhysicalDeviceRgba10X6FormatsFeaturesEXT"; +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT"; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + case StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT: return "PhysicalDeviceVertexInputDynamicStateFeaturesEXT"; + case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT"; + case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT"; + case StructureType::ePhysicalDeviceDrmPropertiesEXT: return "PhysicalDeviceDrmPropertiesEXT"; + case StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT: return "PhysicalDeviceAddressBindingReportFeaturesEXT"; + case StructureType::eDeviceAddressBindingCallbackDataEXT: return "DeviceAddressBindingCallbackDataEXT"; + case StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT: return "PhysicalDeviceDepthClipControlFeaturesEXT"; + case StructureType::ePipelineViewportDepthClipControlCreateInfoEXT: return "PipelineViewportDepthClipControlCreateInfoEXT"; + case StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT: return "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT"; + case StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT: return "PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case StructureType::eImportMemoryZirconHandleInfoFUCHSIA: return "ImportMemoryZirconHandleInfoFUCHSIA"; + case StructureType::eMemoryZirconHandlePropertiesFUCHSIA: return "MemoryZirconHandlePropertiesFUCHSIA"; + case StructureType::eMemoryGetZirconHandleInfoFUCHSIA: return "MemoryGetZirconHandleInfoFUCHSIA"; + case StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA: return "ImportSemaphoreZirconHandleInfoFUCHSIA"; + case StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA: return "SemaphoreGetZirconHandleInfoFUCHSIA"; + case StructureType::eBufferCollectionCreateInfoFUCHSIA: return "BufferCollectionCreateInfoFUCHSIA"; + case StructureType::eImportMemoryBufferCollectionFUCHSIA: return "ImportMemoryBufferCollectionFUCHSIA"; + case StructureType::eBufferCollectionImageCreateInfoFUCHSIA: return "BufferCollectionImageCreateInfoFUCHSIA"; + case StructureType::eBufferCollectionPropertiesFUCHSIA: return "BufferCollectionPropertiesFUCHSIA"; + case StructureType::eBufferConstraintsInfoFUCHSIA: return "BufferConstraintsInfoFUCHSIA"; + case StructureType::eBufferCollectionBufferCreateInfoFUCHSIA: return "BufferCollectionBufferCreateInfoFUCHSIA"; + case StructureType::eImageConstraintsInfoFUCHSIA: return "ImageConstraintsInfoFUCHSIA"; + case StructureType::eImageFormatConstraintsInfoFUCHSIA: return "ImageFormatConstraintsInfoFUCHSIA"; + case StructureType::eSysmemColorSpaceFUCHSIA: return "SysmemColorSpaceFUCHSIA"; + case StructureType::eBufferCollectionConstraintsInfoFUCHSIA: return "BufferCollectionConstraintsInfoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case StructureType::eSubpassShadingPipelineCreateInfoHUAWEI: return "SubpassShadingPipelineCreateInfoHUAWEI"; + case StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI: return "PhysicalDeviceSubpassShadingFeaturesHUAWEI"; + case StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI: return "PhysicalDeviceSubpassShadingPropertiesHUAWEI"; + case StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI: return "PhysicalDeviceInvocationMaskFeaturesHUAWEI"; + case StructureType::eMemoryGetRemoteAddressInfoNV: return "MemoryGetRemoteAddressInfoNV"; + case StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV: return "PhysicalDeviceExternalMemoryRdmaFeaturesNV"; + case StructureType::ePipelinePropertiesIdentifierEXT: return "PipelinePropertiesIdentifierEXT"; + case StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT: return "PhysicalDevicePipelinePropertiesFeaturesEXT"; + case StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT: return "PhysicalDeviceFrameBoundaryFeaturesEXT"; + case StructureType::eFrameBoundaryEXT: return "FrameBoundaryEXT"; + case StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT: return "PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT"; + case StructureType::eSubpassResolvePerformanceQueryEXT: return "SubpassResolvePerformanceQueryEXT"; + case StructureType::eMultisampledRenderToSingleSampledInfoEXT: return "MultisampledRenderToSingleSampledInfoEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT: return "PhysicalDeviceExtendedDynamicState2FeaturesEXT"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case StructureType::eScreenSurfaceCreateInfoQNX: return "ScreenSurfaceCreateInfoQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + case StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT: return "PhysicalDeviceColorWriteEnableFeaturesEXT"; + case StructureType::ePipelineColorWriteCreateInfoEXT: return "PipelineColorWriteCreateInfoEXT"; + case StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT: return "PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT"; + case StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR: return "PhysicalDeviceRayTracingMaintenance1FeaturesKHR"; + case StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT: return "PhysicalDeviceImageViewMinLodFeaturesEXT"; + case StructureType::eImageViewMinLodCreateInfoEXT: return "ImageViewMinLodCreateInfoEXT"; + case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT: return "PhysicalDeviceMultiDrawFeaturesEXT"; + case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT: return "PhysicalDeviceMultiDrawPropertiesEXT"; + case StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT: return "PhysicalDeviceImage2DViewOf3DFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT: return "PhysicalDeviceShaderTileImageFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT: return "PhysicalDeviceShaderTileImagePropertiesEXT"; + case StructureType::eMicromapBuildInfoEXT: return "MicromapBuildInfoEXT"; + case StructureType::eMicromapVersionInfoEXT: return "MicromapVersionInfoEXT"; + case StructureType::eCopyMicromapInfoEXT: return "CopyMicromapInfoEXT"; + case StructureType::eCopyMicromapToMemoryInfoEXT: return "CopyMicromapToMemoryInfoEXT"; + case StructureType::eCopyMemoryToMicromapInfoEXT: return "CopyMemoryToMicromapInfoEXT"; + case StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT: return "PhysicalDeviceOpacityMicromapFeaturesEXT"; + case StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT: return "PhysicalDeviceOpacityMicromapPropertiesEXT"; + case StructureType::eMicromapCreateInfoEXT: return "MicromapCreateInfoEXT"; + case StructureType::eMicromapBuildSizesInfoEXT: return "MicromapBuildSizesInfoEXT"; + case StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT: return "AccelerationStructureTrianglesOpacityMicromapEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV: return "PhysicalDeviceDisplacementMicromapFeaturesNV"; + case StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV: return "PhysicalDeviceDisplacementMicromapPropertiesNV"; + case StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV: return "AccelerationStructureTrianglesDisplacementMicromapNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI: return "PhysicalDeviceClusterCullingShaderFeaturesHUAWEI"; + case StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI: return "PhysicalDeviceClusterCullingShaderPropertiesHUAWEI"; + case StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI: return "PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI"; + case StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT: return "PhysicalDeviceBorderColorSwizzleFeaturesEXT"; + case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT: return "SamplerBorderColorComponentMappingCreateInfoEXT"; + case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT: return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderCorePropertiesARM: return "PhysicalDeviceShaderCorePropertiesARM"; + case StructureType::eDeviceQueueShaderCoreControlCreateInfoARM: return "DeviceQueueShaderCoreControlCreateInfoARM"; + case StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM: return "PhysicalDeviceSchedulingControlsFeaturesARM"; + case StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM: return "PhysicalDeviceSchedulingControlsPropertiesARM"; + case StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT: return "PhysicalDeviceImageSlicedViewOf3DFeaturesEXT"; + case StructureType::eImageViewSlicedCreateInfoEXT: return "ImageViewSlicedCreateInfoEXT"; + case StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE: return "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE"; + case StructureType::eDescriptorSetBindingReferenceVALVE: return "DescriptorSetBindingReferenceVALVE"; + case StructureType::eDescriptorSetLayoutHostMappingInfoVALVE: return "DescriptorSetLayoutHostMappingInfoVALVE"; + case StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT: return "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT"; + case StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM: return "PhysicalDeviceRenderPassStripedFeaturesARM"; + case StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM: return "PhysicalDeviceRenderPassStripedPropertiesARM"; + case StructureType::eRenderPassStripeBeginInfoARM: return "RenderPassStripeBeginInfoARM"; + case StructureType::eRenderPassStripeInfoARM: return "RenderPassStripeInfoARM"; + case StructureType::eRenderPassStripeSubmitInfoARM: return "RenderPassStripeSubmitInfoARM"; + case StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV: return "PhysicalDeviceCopyMemoryIndirectFeaturesNV"; + case StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV: return "PhysicalDeviceCopyMemoryIndirectPropertiesNV"; + case StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV: return "PhysicalDeviceMemoryDecompressionFeaturesNV"; + case StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV: return "PhysicalDeviceMemoryDecompressionPropertiesNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV: return "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV"; + case StructureType::eComputePipelineIndirectBufferInfoNV: return "ComputePipelineIndirectBufferInfoNV"; + case StructureType::ePipelineIndirectDeviceAddressInfoNV: return "PipelineIndirectDeviceAddressInfoNV"; + case StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV: return "PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV"; + case StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV: return "AccelerationStructureGeometryLinearSweptSpheresDataNV"; + case StructureType::eAccelerationStructureGeometrySpheresDataNV: return "AccelerationStructureGeometrySpheresDataNV"; + case StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV: return "PhysicalDeviceLinearColorAttachmentFeaturesNV"; + case StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR: return "PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR"; + case StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT: return "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT"; + case StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM: return "PhysicalDeviceImageProcessingFeaturesQCOM"; + case StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM: return "PhysicalDeviceImageProcessingPropertiesQCOM"; + case StructureType::eImageViewSampleWeightCreateInfoQCOM: return "ImageViewSampleWeightCreateInfoQCOM"; + case StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT: return "PhysicalDeviceNestedCommandBufferFeaturesEXT"; + case StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT: return "PhysicalDeviceNestedCommandBufferPropertiesEXT"; + case StructureType::eExternalMemoryAcquireUnmodifiedEXT: return "ExternalMemoryAcquireUnmodifiedEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT: return "PhysicalDeviceExtendedDynamicState3FeaturesEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT: return "PhysicalDeviceExtendedDynamicState3PropertiesEXT"; + case StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT: return "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT"; + case StructureType::eRenderPassCreationControlEXT: return "RenderPassCreationControlEXT"; + case StructureType::eRenderPassCreationFeedbackCreateInfoEXT: return "RenderPassCreationFeedbackCreateInfoEXT"; + case StructureType::eRenderPassSubpassFeedbackCreateInfoEXT: return "RenderPassSubpassFeedbackCreateInfoEXT"; + case StructureType::eDirectDriverLoadingInfoLUNARG: return "DirectDriverLoadingInfoLUNARG"; + case StructureType::eDirectDriverLoadingListLUNARG: return "DirectDriverLoadingListLUNARG"; + case StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT: return "PhysicalDeviceShaderModuleIdentifierFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT: return "PhysicalDeviceShaderModuleIdentifierPropertiesEXT"; + case StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT: return "PipelineShaderStageModuleIdentifierCreateInfoEXT"; + case StructureType::eShaderModuleIdentifierEXT: return "ShaderModuleIdentifierEXT"; + case StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT: return "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT"; + case StructureType::ePhysicalDeviceOpticalFlowFeaturesNV: return "PhysicalDeviceOpticalFlowFeaturesNV"; + case StructureType::ePhysicalDeviceOpticalFlowPropertiesNV: return "PhysicalDeviceOpticalFlowPropertiesNV"; + case StructureType::eOpticalFlowImageFormatInfoNV: return "OpticalFlowImageFormatInfoNV"; + case StructureType::eOpticalFlowImageFormatPropertiesNV: return "OpticalFlowImageFormatPropertiesNV"; + case StructureType::eOpticalFlowSessionCreateInfoNV: return "OpticalFlowSessionCreateInfoNV"; + case StructureType::eOpticalFlowExecuteInfoNV: return "OpticalFlowExecuteInfoNV"; + case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV"; + case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID: return "PhysicalDeviceExternalFormatResolveFeaturesANDROID"; + case StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID: return "PhysicalDeviceExternalFormatResolvePropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID: return "AndroidHardwareBufferFormatResolvePropertiesANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case StructureType::ePhysicalDeviceAntiLagFeaturesAMD: return "PhysicalDeviceAntiLagFeaturesAMD"; + case StructureType::eAntiLagDataAMD: return "AntiLagDataAMD"; + case StructureType::eAntiLagPresentationInfoAMD: return "AntiLagPresentationInfoAMD"; + case StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR: return "PhysicalDeviceRayTracingPositionFetchFeaturesKHR"; + case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT: return "PhysicalDeviceShaderObjectFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT: return "PhysicalDeviceShaderObjectPropertiesEXT"; + case StructureType::eShaderCreateInfoEXT: return "ShaderCreateInfoEXT"; + case StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR: return "PhysicalDevicePipelineBinaryFeaturesKHR"; + case StructureType::ePipelineBinaryCreateInfoKHR: return "PipelineBinaryCreateInfoKHR"; + case StructureType::ePipelineBinaryInfoKHR: return "PipelineBinaryInfoKHR"; + case StructureType::ePipelineBinaryKeyKHR: return "PipelineBinaryKeyKHR"; + case StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR: return "PhysicalDevicePipelineBinaryPropertiesKHR"; + case StructureType::eReleaseCapturedPipelineDataInfoKHR: return "ReleaseCapturedPipelineDataInfoKHR"; + case StructureType::ePipelineBinaryDataInfoKHR: return "PipelineBinaryDataInfoKHR"; + case StructureType::ePipelineCreateInfoKHR: return "PipelineCreateInfoKHR"; + case StructureType::eDevicePipelineBinaryInternalCacheControlKHR: return "DevicePipelineBinaryInternalCacheControlKHR"; + case StructureType::ePipelineBinaryHandlesInfoKHR: return "PipelineBinaryHandlesInfoKHR"; + case StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM: return "PhysicalDeviceTilePropertiesFeaturesQCOM"; + case StructureType::eTilePropertiesQCOM: return "TilePropertiesQCOM"; + case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC: return "PhysicalDeviceAmigoProfilingFeaturesSEC"; + case StructureType::eAmigoProfilingSubmitInfoSEC: return "AmigoProfilingSubmitInfoSEC"; + case StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM: return "PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM"; + case StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV: return "PhysicalDeviceRayTracingInvocationReorderFeaturesNV"; + case StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV: return "PhysicalDeviceRayTracingInvocationReorderPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV: return "PhysicalDeviceCooperativeVectorFeaturesNV"; + case StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV: return "PhysicalDeviceCooperativeVectorPropertiesNV"; + case StructureType::eCooperativeVectorPropertiesNV: return "CooperativeVectorPropertiesNV"; + case StructureType::eConvertCooperativeVectorMatrixInfoNV: return "ConvertCooperativeVectorMatrixInfoNV"; + case StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV: return "PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV"; + case StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV: return "PhysicalDeviceExtendedSparseAddressSpacePropertiesNV"; + case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT: return "PhysicalDeviceMutableDescriptorTypeFeaturesEXT"; + case StructureType::eMutableDescriptorTypeCreateInfoEXT: return "MutableDescriptorTypeCreateInfoEXT"; + case StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT: return "PhysicalDeviceLegacyVertexAttributesFeaturesEXT"; + case StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT: return "PhysicalDeviceLegacyVertexAttributesPropertiesEXT"; + case StructureType::eLayerSettingsCreateInfoEXT: return "LayerSettingsCreateInfoEXT"; + case StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM: return "PhysicalDeviceShaderCoreBuiltinsFeaturesARM"; + case StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM: return "PhysicalDeviceShaderCoreBuiltinsPropertiesARM"; + case StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT: return "PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT"; + case StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT: return "PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT"; + case StructureType::eLatencySleepModeInfoNV: return "LatencySleepModeInfoNV"; + case StructureType::eLatencySleepInfoNV: return "LatencySleepInfoNV"; + case StructureType::eSetLatencyMarkerInfoNV: return "SetLatencyMarkerInfoNV"; + case StructureType::eGetLatencyMarkerInfoNV: return "GetLatencyMarkerInfoNV"; + case StructureType::eLatencyTimingsFrameReportNV: return "LatencyTimingsFrameReportNV"; + case StructureType::eLatencySubmissionPresentIdNV: return "LatencySubmissionPresentIdNV"; + case StructureType::eOutOfBandQueueTypeInfoNV: return "OutOfBandQueueTypeInfoNV"; + case StructureType::eSwapchainLatencyCreateInfoNV: return "SwapchainLatencyCreateInfoNV"; + case StructureType::eLatencySurfaceCapabilitiesNV: return "LatencySurfaceCapabilitiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR: return "PhysicalDeviceCooperativeMatrixFeaturesKHR"; + case StructureType::eCooperativeMatrixPropertiesKHR: return "CooperativeMatrixPropertiesKHR"; + case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR: return "PhysicalDeviceCooperativeMatrixPropertiesKHR"; + case StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM: return "PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM"; + case StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM: return "MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR: return "PhysicalDeviceComputeShaderDerivativesFeaturesKHR"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR: return "PhysicalDeviceComputeShaderDerivativesPropertiesKHR"; + case StructureType::eVideoDecodeAv1CapabilitiesKHR: return "VideoDecodeAv1CapabilitiesKHR"; + case StructureType::eVideoDecodeAv1PictureInfoKHR: return "VideoDecodeAv1PictureInfoKHR"; + case StructureType::eVideoDecodeAv1ProfileInfoKHR: return "VideoDecodeAv1ProfileInfoKHR"; + case StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR: return "VideoDecodeAv1SessionParametersCreateInfoKHR"; + case StructureType::eVideoDecodeAv1DpbSlotInfoKHR: return "VideoDecodeAv1DpbSlotInfoKHR"; + case StructureType::eVideoEncodeAv1CapabilitiesKHR: return "VideoEncodeAv1CapabilitiesKHR"; + case StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR: return "VideoEncodeAv1SessionParametersCreateInfoKHR"; + case StructureType::eVideoEncodeAv1PictureInfoKHR: return "VideoEncodeAv1PictureInfoKHR"; + case StructureType::eVideoEncodeAv1DpbSlotInfoKHR: return "VideoEncodeAv1DpbSlotInfoKHR"; + case StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR: return "PhysicalDeviceVideoEncodeAv1FeaturesKHR"; + case StructureType::eVideoEncodeAv1ProfileInfoKHR: return "VideoEncodeAv1ProfileInfoKHR"; + case StructureType::eVideoEncodeAv1RateControlInfoKHR: return "VideoEncodeAv1RateControlInfoKHR"; + case StructureType::eVideoEncodeAv1RateControlLayerInfoKHR: return "VideoEncodeAv1RateControlLayerInfoKHR"; + case StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR: return "VideoEncodeAv1QualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeAv1SessionCreateInfoKHR: return "VideoEncodeAv1SessionCreateInfoKHR"; + case StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR: return "VideoEncodeAv1GopRemainingFrameInfoKHR"; + case StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR: return "PhysicalDeviceVideoMaintenance1FeaturesKHR"; + case StructureType::eVideoInlineQueryInfoKHR: return "VideoInlineQueryInfoKHR"; + case StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV: return "PhysicalDevicePerStageDescriptorSetFeaturesNV"; + case StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM: return "PhysicalDeviceImageProcessing2FeaturesQCOM"; + case StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM: return "PhysicalDeviceImageProcessing2PropertiesQCOM"; + case StructureType::eSamplerBlockMatchWindowCreateInfoQCOM: return "SamplerBlockMatchWindowCreateInfoQCOM"; + case StructureType::eSamplerCubicWeightsCreateInfoQCOM: return "SamplerCubicWeightsCreateInfoQCOM"; + case StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM: return "PhysicalDeviceCubicWeightsFeaturesQCOM"; + case StructureType::eBlitImageCubicWeightsInfoQCOM: return "BlitImageCubicWeightsInfoQCOM"; + case StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM: return "PhysicalDeviceYcbcrDegammaFeaturesQCOM"; + case StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM: return "SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM"; + case StructureType::ePhysicalDeviceCubicClampFeaturesQCOM: return "PhysicalDeviceCubicClampFeaturesQCOM"; + case StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case StructureType::eScreenBufferPropertiesQNX: return "ScreenBufferPropertiesQNX"; + case StructureType::eScreenBufferFormatPropertiesQNX: return "ScreenBufferFormatPropertiesQNX"; + case StructureType::eImportScreenBufferInfoQNX: return "ImportScreenBufferInfoQNX"; + case StructureType::eExternalFormatQNX: return "ExternalFormatQNX"; + case StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX: return "PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + case StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT: return "PhysicalDeviceLayeredDriverPropertiesMSFT"; + case StructureType::eCalibratedTimestampInfoKHR: return "CalibratedTimestampInfoKHR"; + case StructureType::eSetDescriptorBufferOffsetsInfoEXT: return "SetDescriptorBufferOffsetsInfoEXT"; + case StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT: return "BindDescriptorBufferEmbeddedSamplersInfoEXT"; + case StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV: return "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV"; + case StructureType::ePhysicalDeviceTileMemoryHeapFeaturesQCOM: return "PhysicalDeviceTileMemoryHeapFeaturesQCOM"; + case StructureType::ePhysicalDeviceTileMemoryHeapPropertiesQCOM: return "PhysicalDeviceTileMemoryHeapPropertiesQCOM"; + case StructureType::eTileMemoryRequirementsQCOM: return "TileMemoryRequirementsQCOM"; + case StructureType::eTileMemoryBindInfoQCOM: return "TileMemoryBindInfoQCOM"; + case StructureType::eTileMemorySizeInfoQCOM: return "TileMemorySizeInfoQCOM"; + case StructureType::eDisplaySurfaceStereoCreateInfoNV: return "DisplaySurfaceStereoCreateInfoNV"; + case StructureType::eDisplayModeStereoPropertiesNV: return "DisplayModeStereoPropertiesNV"; + case StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR: return "VideoEncodeQuantizationMapCapabilitiesKHR"; + case StructureType::eVideoFormatQuantizationMapPropertiesKHR: return "VideoFormatQuantizationMapPropertiesKHR"; + case StructureType::eVideoEncodeQuantizationMapInfoKHR: return "VideoEncodeQuantizationMapInfoKHR"; + case StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR: return "VideoEncodeQuantizationMapSessionParametersCreateInfoKHR"; + case StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR: return "PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR"; + case StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR: return "VideoEncodeH264QuantizationMapCapabilitiesKHR"; + case StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR: return "VideoEncodeH265QuantizationMapCapabilitiesKHR"; + case StructureType::eVideoFormatH265QuantizationMapPropertiesKHR: return "VideoFormatH265QuantizationMapPropertiesKHR"; + case StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR: return "VideoEncodeAv1QuantizationMapCapabilitiesKHR"; + case StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR: return "VideoFormatAv1QuantizationMapPropertiesKHR"; + case StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV: return "PhysicalDeviceRawAccessChainsFeaturesNV"; + case StructureType::eExternalComputeQueueDeviceCreateInfoNV: return "ExternalComputeQueueDeviceCreateInfoNV"; + case StructureType::eExternalComputeQueueCreateInfoNV: return "ExternalComputeQueueCreateInfoNV"; + case StructureType::eExternalComputeQueueDataParamsNV: return "ExternalComputeQueueDataParamsNV"; + case StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV: return "PhysicalDeviceExternalComputeQueuePropertiesNV"; + case StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR: return "PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR"; + case StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV: return "PhysicalDeviceCommandBufferInheritanceFeaturesNV"; + case StructureType::ePhysicalDeviceMaintenance7FeaturesKHR: return "PhysicalDeviceMaintenance7FeaturesKHR"; + case StructureType::ePhysicalDeviceMaintenance7PropertiesKHR: return "PhysicalDeviceMaintenance7PropertiesKHR"; + case StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR: return "PhysicalDeviceLayeredApiPropertiesListKHR"; + case StructureType::ePhysicalDeviceLayeredApiPropertiesKHR: return "PhysicalDeviceLayeredApiPropertiesKHR"; + case StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR: return "PhysicalDeviceLayeredApiVulkanPropertiesKHR"; + case StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV: return "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV"; + case StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT: return "PhysicalDeviceShaderReplicatedCompositesFeaturesEXT"; + case StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV: return "PhysicalDeviceRayTracingValidationFeaturesNV"; + case StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV: return "PhysicalDeviceClusterAccelerationStructureFeaturesNV"; + case StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV: return "PhysicalDeviceClusterAccelerationStructurePropertiesNV"; + case StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV: return "ClusterAccelerationStructureClustersBottomLevelInputNV"; + case StructureType::eClusterAccelerationStructureTriangleClusterInputNV: return "ClusterAccelerationStructureTriangleClusterInputNV"; + case StructureType::eClusterAccelerationStructureMoveObjectsInputNV: return "ClusterAccelerationStructureMoveObjectsInputNV"; + case StructureType::eClusterAccelerationStructureInputInfoNV: return "ClusterAccelerationStructureInputInfoNV"; + case StructureType::eClusterAccelerationStructureCommandsInfoNV: return "ClusterAccelerationStructureCommandsInfoNV"; + case StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV: return "RayTracingPipelineClusterAccelerationStructureCreateInfoNV"; + case StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV: return "PhysicalDevicePartitionedAccelerationStructureFeaturesNV"; + case StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV: return "PhysicalDevicePartitionedAccelerationStructurePropertiesNV"; + case StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV: return "WriteDescriptorSetPartitionedAccelerationStructureNV"; + case StructureType::ePartitionedAccelerationStructureInstancesInputNV: return "PartitionedAccelerationStructureInstancesInputNV"; + case StructureType::eBuildPartitionedAccelerationStructureInfoNV: return "BuildPartitionedAccelerationStructureInfoNV"; + case StructureType::ePartitionedAccelerationStructureFlagsNV: return "PartitionedAccelerationStructureFlagsNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT: return "PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT: return "PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT: return "GeneratedCommandsMemoryRequirementsInfoEXT"; + case StructureType::eIndirectExecutionSetCreateInfoEXT: return "IndirectExecutionSetCreateInfoEXT"; + case StructureType::eGeneratedCommandsInfoEXT: return "GeneratedCommandsInfoEXT"; + case StructureType::eIndirectCommandsLayoutCreateInfoEXT: return "IndirectCommandsLayoutCreateInfoEXT"; + case StructureType::eIndirectCommandsLayoutTokenEXT: return "IndirectCommandsLayoutTokenEXT"; + case StructureType::eWriteIndirectExecutionSetPipelineEXT: return "WriteIndirectExecutionSetPipelineEXT"; + case StructureType::eWriteIndirectExecutionSetShaderEXT: return "WriteIndirectExecutionSetShaderEXT"; + case StructureType::eIndirectExecutionSetPipelineInfoEXT: return "IndirectExecutionSetPipelineInfoEXT"; + case StructureType::eIndirectExecutionSetShaderInfoEXT: return "IndirectExecutionSetShaderInfoEXT"; + case StructureType::eIndirectExecutionSetShaderLayoutInfoEXT: return "IndirectExecutionSetShaderLayoutInfoEXT"; + case StructureType::eGeneratedCommandsPipelineInfoEXT: return "GeneratedCommandsPipelineInfoEXT"; + case StructureType::eGeneratedCommandsShaderInfoEXT: return "GeneratedCommandsShaderInfoEXT"; + case StructureType::ePhysicalDeviceMaintenance8FeaturesKHR: return "PhysicalDeviceMaintenance8FeaturesKHR"; + case StructureType::eMemoryBarrierAccessFlags3KHR: return "MemoryBarrierAccessFlags3KHR"; + case StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA: return "PhysicalDeviceImageAlignmentControlFeaturesMESA"; + case StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA: return "PhysicalDeviceImageAlignmentControlPropertiesMESA"; + case StructureType::eImageAlignmentControlCreateInfoMESA: return "ImageAlignmentControlCreateInfoMESA"; + case StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT: return "PhysicalDeviceDepthClampControlFeaturesEXT"; + case StructureType::ePipelineViewportDepthClampControlCreateInfoEXT: return "PipelineViewportDepthClampControlCreateInfoEXT"; + case StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR: return "PhysicalDeviceVideoMaintenance2FeaturesKHR"; + case StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR: return "VideoDecodeH264InlineSessionParametersInfoKHR"; + case StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR: return "VideoDecodeH265InlineSessionParametersInfoKHR"; + case StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR: return "VideoDecodeAv1InlineSessionParametersInfoKHR"; + case StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI: return "PhysicalDeviceHdrVividFeaturesHUAWEI"; + case StructureType::eHdrVividDynamicMetadataHUAWEI: return "HdrVividDynamicMetadataHUAWEI"; + case StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV: return "PhysicalDeviceCooperativeMatrix2FeaturesNV"; + case StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV: return "CooperativeMatrixFlexibleDimensionsPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV: return "PhysicalDeviceCooperativeMatrix2PropertiesNV"; + case StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM: return "PhysicalDevicePipelineOpacityMicromapFeaturesARM"; +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case StructureType::eImportMemoryMetalHandleInfoEXT: return "ImportMemoryMetalHandleInfoEXT"; + case StructureType::eMemoryMetalHandlePropertiesEXT: return "MemoryMetalHandlePropertiesEXT"; + case StructureType::eMemoryGetMetalHandleInfoEXT: return "MemoryGetMetalHandleInfoEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + case StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR: return "PhysicalDeviceDepthClampZeroOneFeaturesKHR"; + case StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT: return "PhysicalDeviceVertexAttributeRobustnessFeaturesEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eSetPresentConfigNV: return "SetPresentConfigNV"; + case StructureType::ePhysicalDevicePresentMeteringFeaturesNV: return "PhysicalDevicePresentMeteringFeaturesNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT: return "PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT: return "PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT"; + case StructureType::eRenderPassFragmentDensityMapOffsetEndInfoEXT: return "RenderPassFragmentDensityMapOffsetEndInfoEXT"; + case StructureType::eRenderingEndInfoEXT: return "RenderingEndInfoEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCacheHeaderVersion value ) + { + switch ( value ) + { + case PipelineCacheHeaderVersion::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ObjectType value ) + { + switch ( value ) + { + case ObjectType::eUnknown: return "Unknown"; + case ObjectType::eInstance: return "Instance"; + case ObjectType::ePhysicalDevice: return "PhysicalDevice"; + case ObjectType::eDevice: return "Device"; + case ObjectType::eQueue: return "Queue"; + case ObjectType::eSemaphore: return "Semaphore"; + case ObjectType::eCommandBuffer: return "CommandBuffer"; + case ObjectType::eFence: return "Fence"; + case ObjectType::eDeviceMemory: return "DeviceMemory"; + case ObjectType::eBuffer: return "Buffer"; + case ObjectType::eImage: return "Image"; + case ObjectType::eEvent: return "Event"; + case ObjectType::eQueryPool: return "QueryPool"; + case ObjectType::eBufferView: return "BufferView"; + case ObjectType::eImageView: return "ImageView"; + case ObjectType::eShaderModule: return "ShaderModule"; + case ObjectType::ePipelineCache: return "PipelineCache"; + case ObjectType::ePipelineLayout: return "PipelineLayout"; + case ObjectType::eRenderPass: return "RenderPass"; + case ObjectType::ePipeline: return "Pipeline"; + case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout"; + case ObjectType::eSampler: return "Sampler"; + case ObjectType::eDescriptorPool: return "DescriptorPool"; + case ObjectType::eDescriptorSet: return "DescriptorSet"; + case ObjectType::eFramebuffer: return "Framebuffer"; + case ObjectType::eCommandPool: return "CommandPool"; + case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case ObjectType::ePrivateDataSlot: return "PrivateDataSlot"; + case ObjectType::eSurfaceKHR: return "SurfaceKHR"; + case ObjectType::eSwapchainKHR: return "SwapchainKHR"; + case ObjectType::eDisplayKHR: return "DisplayKHR"; + case ObjectType::eDisplayModeKHR: return "DisplayModeKHR"; + case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case ObjectType::eVideoSessionKHR: return "VideoSessionKHR"; + case ObjectType::eVideoSessionParametersKHR: return "VideoSessionParametersKHR"; + case ObjectType::eCuModuleNVX: return "CuModuleNVX"; + case ObjectType::eCuFunctionNVX: return "CuFunctionNVX"; + case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT"; + case ObjectType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT"; + case ObjectType::eAccelerationStructureNV: return "AccelerationStructureNV"; + case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL"; + case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR"; + case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ObjectType::eCudaModuleNV: return "CudaModuleNV"; + case ObjectType::eCudaFunctionNV: return "CudaFunctionNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ObjectType::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case ObjectType::eMicromapEXT: return "MicromapEXT"; + case ObjectType::eOpticalFlowSessionNV: return "OpticalFlowSessionNV"; + case ObjectType::eShaderEXT: return "ShaderEXT"; + case ObjectType::ePipelineBinaryKHR: return "PipelineBinaryKHR"; + case ObjectType::eExternalComputeQueueNV: return "ExternalComputeQueueNV"; + case ObjectType::eIndirectCommandsLayoutEXT: return "IndirectCommandsLayoutEXT"; + case ObjectType::eIndirectExecutionSetEXT: return "IndirectExecutionSetEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VendorId value ) + { + switch ( value ) + { + case VendorId::eKhronos: return "Khronos"; + case VendorId::eVIV: return "VIV"; + case VendorId::eVSI: return "VSI"; + case VendorId::eKazan: return "Kazan"; + case VendorId::eCodeplay: return "Codeplay"; + case VendorId::eMESA: return "MESA"; + case VendorId::ePocl: return "Pocl"; + case VendorId::eMobileye: return "Mobileye"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( Format value ) + { + switch ( value ) + { + case Format::eUndefined: return "Undefined"; + case Format::eR4G4UnormPack8: return "R4G4UnormPack8"; + case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16"; + case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16"; + case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16"; + case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16"; + case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16"; + case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16"; + case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16"; + case Format::eR8Unorm: return "R8Unorm"; + case Format::eR8Snorm: return "R8Snorm"; + case Format::eR8Uscaled: return "R8Uscaled"; + case Format::eR8Sscaled: return "R8Sscaled"; + case Format::eR8Uint: return "R8Uint"; + case Format::eR8Sint: return "R8Sint"; + case Format::eR8Srgb: return "R8Srgb"; + case Format::eR8G8Unorm: return "R8G8Unorm"; + case Format::eR8G8Snorm: return "R8G8Snorm"; + case Format::eR8G8Uscaled: return "R8G8Uscaled"; + case Format::eR8G8Sscaled: return "R8G8Sscaled"; + case Format::eR8G8Uint: return "R8G8Uint"; + case Format::eR8G8Sint: return "R8G8Sint"; + case Format::eR8G8Srgb: return "R8G8Srgb"; + case Format::eR8G8B8Unorm: return "R8G8B8Unorm"; + case Format::eR8G8B8Snorm: return "R8G8B8Snorm"; + case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled"; + case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled"; + case Format::eR8G8B8Uint: return "R8G8B8Uint"; + case Format::eR8G8B8Sint: return "R8G8B8Sint"; + case Format::eR8G8B8Srgb: return "R8G8B8Srgb"; + case Format::eB8G8R8Unorm: return "B8G8R8Unorm"; + case Format::eB8G8R8Snorm: return "B8G8R8Snorm"; + case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled"; + case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled"; + case Format::eB8G8R8Uint: return "B8G8R8Uint"; + case Format::eB8G8R8Sint: return "B8G8R8Sint"; + case Format::eB8G8R8Srgb: return "B8G8R8Srgb"; + case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm"; + case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm"; + case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled"; + case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled"; + case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint"; + case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint"; + case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb"; + case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm"; + case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm"; + case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled"; + case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled"; + case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint"; + case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint"; + case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb"; + case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32"; + case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32"; + case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32"; + case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32"; + case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32"; + case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32"; + case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32"; + case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32"; + case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32"; + case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32"; + case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32"; + case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32"; + case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32"; + case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32"; + case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32"; + case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32"; + case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32"; + case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32"; + case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32"; + case Format::eR16Unorm: return "R16Unorm"; + case Format::eR16Snorm: return "R16Snorm"; + case Format::eR16Uscaled: return "R16Uscaled"; + case Format::eR16Sscaled: return "R16Sscaled"; + case Format::eR16Uint: return "R16Uint"; + case Format::eR16Sint: return "R16Sint"; + case Format::eR16Sfloat: return "R16Sfloat"; + case Format::eR16G16Unorm: return "R16G16Unorm"; + case Format::eR16G16Snorm: return "R16G16Snorm"; + case Format::eR16G16Uscaled: return "R16G16Uscaled"; + case Format::eR16G16Sscaled: return "R16G16Sscaled"; + case Format::eR16G16Uint: return "R16G16Uint"; + case Format::eR16G16Sint: return "R16G16Sint"; + case Format::eR16G16Sfloat: return "R16G16Sfloat"; + case Format::eR16G16B16Unorm: return "R16G16B16Unorm"; + case Format::eR16G16B16Snorm: return "R16G16B16Snorm"; + case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled"; + case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled"; + case Format::eR16G16B16Uint: return "R16G16B16Uint"; + case Format::eR16G16B16Sint: return "R16G16B16Sint"; + case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat"; + case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm"; + case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm"; + case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled"; + case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled"; + case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint"; + case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint"; + case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat"; + case Format::eR32Uint: return "R32Uint"; + case Format::eR32Sint: return "R32Sint"; + case Format::eR32Sfloat: return "R32Sfloat"; + case Format::eR32G32Uint: return "R32G32Uint"; + case Format::eR32G32Sint: return "R32G32Sint"; + case Format::eR32G32Sfloat: return "R32G32Sfloat"; + case Format::eR32G32B32Uint: return "R32G32B32Uint"; + case Format::eR32G32B32Sint: return "R32G32B32Sint"; + case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat"; + case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint"; + case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint"; + case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat"; + case Format::eR64Uint: return "R64Uint"; + case Format::eR64Sint: return "R64Sint"; + case Format::eR64Sfloat: return "R64Sfloat"; + case Format::eR64G64Uint: return "R64G64Uint"; + case Format::eR64G64Sint: return "R64G64Sint"; + case Format::eR64G64Sfloat: return "R64G64Sfloat"; + case Format::eR64G64B64Uint: return "R64G64B64Uint"; + case Format::eR64G64B64Sint: return "R64G64B64Sint"; + case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat"; + case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint"; + case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint"; + case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat"; + case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32"; + case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32"; + case Format::eD16Unorm: return "D16Unorm"; + case Format::eX8D24UnormPack32: return "X8D24UnormPack32"; + case Format::eD32Sfloat: return "D32Sfloat"; + case Format::eS8Uint: return "S8Uint"; + case Format::eD16UnormS8Uint: return "D16UnormS8Uint"; + case Format::eD24UnormS8Uint: return "D24UnormS8Uint"; + case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint"; + case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock"; + case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock"; + case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock"; + case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock"; + case Format::eBc2UnormBlock: return "Bc2UnormBlock"; + case Format::eBc2SrgbBlock: return "Bc2SrgbBlock"; + case Format::eBc3UnormBlock: return "Bc3UnormBlock"; + case Format::eBc3SrgbBlock: return "Bc3SrgbBlock"; + case Format::eBc4UnormBlock: return "Bc4UnormBlock"; + case Format::eBc4SnormBlock: return "Bc4SnormBlock"; + case Format::eBc5UnormBlock: return "Bc5UnormBlock"; + case Format::eBc5SnormBlock: return "Bc5SnormBlock"; + case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock"; + case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock"; + case Format::eBc7UnormBlock: return "Bc7UnormBlock"; + case Format::eBc7SrgbBlock: return "Bc7SrgbBlock"; + case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock"; + case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock"; + case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock"; + case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock"; + case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock"; + case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock"; + case Format::eEacR11UnormBlock: return "EacR11UnormBlock"; + case Format::eEacR11SnormBlock: return "EacR11SnormBlock"; + case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock"; + case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock"; + case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock"; + case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock"; + case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock"; + case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock"; + case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock"; + case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock"; + case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock"; + case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock"; + case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock"; + case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock"; + case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock"; + case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock"; + case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock"; + case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock"; + case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock"; + case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock"; + case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock"; + case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock"; + case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock"; + case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock"; + case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock"; + case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock"; + case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock"; + case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock"; + case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock"; + case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock"; + case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock"; + case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock"; + case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm"; + case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm"; + case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm"; + case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm"; + case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm"; + case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm"; + case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm"; + case Format::eR10X6UnormPack16: return "R10X6UnormPack16"; + case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16"; + case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16"; + case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; + case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; + case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16"; + case Format::eR12X4UnormPack16: return "R12X4UnormPack16"; + case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16"; + case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16"; + case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; + case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; + case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16"; + case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm"; + case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm"; + case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm"; + case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm"; + case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm"; + case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm"; + case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm"; + case Format::eG8B8R82Plane444Unorm: return "G8B8R82Plane444Unorm"; + case Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return "G10X6B10X6R10X62Plane444Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return "G12X4B12X4R12X42Plane444Unorm3Pack16"; + case Format::eG16B16R162Plane444Unorm: return "G16B16R162Plane444Unorm"; + case Format::eA4R4G4B4UnormPack16: return "A4R4G4B4UnormPack16"; + case Format::eA4B4G4R4UnormPack16: return "A4B4G4R4UnormPack16"; + case Format::eAstc4x4SfloatBlock: return "Astc4x4SfloatBlock"; + case Format::eAstc5x4SfloatBlock: return "Astc5x4SfloatBlock"; + case Format::eAstc5x5SfloatBlock: return "Astc5x5SfloatBlock"; + case Format::eAstc6x5SfloatBlock: return "Astc6x5SfloatBlock"; + case Format::eAstc6x6SfloatBlock: return "Astc6x6SfloatBlock"; + case Format::eAstc8x5SfloatBlock: return "Astc8x5SfloatBlock"; + case Format::eAstc8x6SfloatBlock: return "Astc8x6SfloatBlock"; + case Format::eAstc8x8SfloatBlock: return "Astc8x8SfloatBlock"; + case Format::eAstc10x5SfloatBlock: return "Astc10x5SfloatBlock"; + case Format::eAstc10x6SfloatBlock: return "Astc10x6SfloatBlock"; + case Format::eAstc10x8SfloatBlock: return "Astc10x8SfloatBlock"; + case Format::eAstc10x10SfloatBlock: return "Astc10x10SfloatBlock"; + case Format::eAstc12x10SfloatBlock: return "Astc12x10SfloatBlock"; + case Format::eAstc12x12SfloatBlock: return "Astc12x12SfloatBlock"; + case Format::eA1B5G5R5UnormPack16: return "A1B5G5R5UnormPack16"; + case Format::eA8Unorm: return "A8Unorm"; + case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG"; + case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG"; + case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG"; + case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG"; + case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG"; + case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG"; + case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; + case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; + case Format::eR16G16Sfixed5NV: return "R16G16Sfixed5NV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FormatFeatureFlagBits value ) + { + switch ( value ) + { + case FormatFeatureFlagBits::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: + return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; + case FormatFeatureFlagBits::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; + case FormatFeatureFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; + case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; + case FormatFeatureFlagBits::eSampledImageFilterCubicEXT: return "SampledImageFilterCubicEXT"; + case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case FormatFeatureFlagBits::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; + case FormatFeatureFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageCreateFlagBits value ) + { + switch ( value ) + { + case ImageCreateFlagBits::eSparseBinding: return "SparseBinding"; + case ImageCreateFlagBits::eSparseResidency: return "SparseResidency"; + case ImageCreateFlagBits::eSparseAliased: return "SparseAliased"; + case ImageCreateFlagBits::eMutableFormat: return "MutableFormat"; + case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible"; + case ImageCreateFlagBits::eAlias: return "Alias"; + case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible"; + case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible"; + case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage"; + case ImageCreateFlagBits::eProtected: return "Protected"; + case ImageCreateFlagBits::eDisjoint: return "Disjoint"; + case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV"; + case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT"; + case ImageCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + case ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT: return "MultisampledRenderToSingleSampledEXT"; + case ImageCreateFlagBits::e2DViewCompatibleEXT: return "2DViewCompatibleEXT"; + case ImageCreateFlagBits::eVideoProfileIndependentKHR: return "VideoProfileIndependentKHR"; + case ImageCreateFlagBits::eFragmentDensityMapOffsetEXT: return "FragmentDensityMapOffsetEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageTiling value ) + { + switch ( value ) + { + case ImageTiling::eOptimal: return "Optimal"; + case ImageTiling::eLinear: return "Linear"; + case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageType value ) + { + switch ( value ) + { + case ImageType::e1D: return "1D"; + case ImageType::e2D: return "2D"; + case ImageType::e3D: return "3D"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageUsageFlagBits value ) + { + switch ( value ) + { + case ImageUsageFlagBits::eTransferSrc: return "TransferSrc"; + case ImageUsageFlagBits::eTransferDst: return "TransferDst"; + case ImageUsageFlagBits::eSampled: return "Sampled"; + case ImageUsageFlagBits::eStorage: return "Storage"; + case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment"; + case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment"; + case ImageUsageFlagBits::eInputAttachment: return "InputAttachment"; + case ImageUsageFlagBits::eHostTransfer: return "HostTransfer"; + case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; + case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case ImageUsageFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; + case ImageUsageFlagBits::eAttachmentFeedbackLoopEXT: return "AttachmentFeedbackLoopEXT"; + case ImageUsageFlagBits::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI"; + case ImageUsageFlagBits::eSampleWeightQCOM: return "SampleWeightQCOM"; + case ImageUsageFlagBits::eSampleBlockMatchQCOM: return "SampleBlockMatchQCOM"; + case ImageUsageFlagBits::eTileMemoryQCOM: return "TileMemoryQCOM"; + case ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR: return "VideoEncodeQuantizationDeltaMapKHR"; + case ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR: return "VideoEncodeEmphasisMapKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( InstanceCreateFlagBits value ) + { + switch ( value ) + { + case InstanceCreateFlagBits::eEnumeratePortabilityKHR: return "EnumeratePortabilityKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( InternalAllocationType value ) + { + switch ( value ) + { + case InternalAllocationType::eExecutable: return "Executable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MemoryHeapFlagBits value ) + { + switch ( value ) + { + case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance"; + case MemoryHeapFlagBits::eTileMemoryQCOM: return "TileMemoryQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MemoryPropertyFlagBits value ) + { + switch ( value ) + { + case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryPropertyFlagBits::eHostVisible: return "HostVisible"; + case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent"; + case MemoryPropertyFlagBits::eHostCached: return "HostCached"; + case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated"; + case MemoryPropertyFlagBits::eProtected: return "Protected"; + case MemoryPropertyFlagBits::eDeviceCoherentAMD: return "DeviceCoherentAMD"; + case MemoryPropertyFlagBits::eDeviceUncachedAMD: return "DeviceUncachedAMD"; + case MemoryPropertyFlagBits::eRdmaCapableNV: return "RdmaCapableNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PhysicalDeviceType value ) + { + switch ( value ) + { + case PhysicalDeviceType::eOther: return "Other"; + case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu"; + case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu"; + case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu"; + case PhysicalDeviceType::eCpu: return "Cpu"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueueFlagBits value ) + { + switch ( value ) + { + case QueueFlagBits::eGraphics: return "Graphics"; + case QueueFlagBits::eCompute: return "Compute"; + case QueueFlagBits::eTransfer: return "Transfer"; + case QueueFlagBits::eSparseBinding: return "SparseBinding"; + case QueueFlagBits::eProtected: return "Protected"; + case QueueFlagBits::eVideoDecodeKHR: return "VideoDecodeKHR"; + case QueueFlagBits::eVideoEncodeKHR: return "VideoEncodeKHR"; + case QueueFlagBits::eOpticalFlowNV: return "OpticalFlowNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SampleCountFlagBits value ) + { + switch ( value ) + { + case SampleCountFlagBits::e1: return "1"; + case SampleCountFlagBits::e2: return "2"; + case SampleCountFlagBits::e4: return "4"; + case SampleCountFlagBits::e8: return "8"; + case SampleCountFlagBits::e16: return "16"; + case SampleCountFlagBits::e32: return "32"; + case SampleCountFlagBits::e64: return "64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SystemAllocationScope value ) + { + switch ( value ) + { + case SystemAllocationScope::eCommand: return "Command"; + case SystemAllocationScope::eObject: return "Object"; + case SystemAllocationScope::eCache: return "Cache"; + case SystemAllocationScope::eDevice: return "Device"; + case SystemAllocationScope::eInstance: return "Instance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceQueueCreateFlagBits value ) + { + switch ( value ) + { + case DeviceQueueCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineStageFlagBits value ) + { + switch ( value ) + { + case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits::eTransfer: return "Transfer"; + case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits::eHost: return "Host"; + case PipelineStageFlagBits::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits::eNone: return "None"; + case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR"; + case PipelineStageFlagBits::eRayTracingShaderKHR: return "RayTracingShaderKHR"; + case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case PipelineStageFlagBits::eTaskShaderEXT: return "TaskShaderEXT"; + case PipelineStageFlagBits::eMeshShaderEXT: return "MeshShaderEXT"; + case PipelineStageFlagBits::eCommandPreprocessEXT: return "CommandPreprocessEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MemoryMapFlagBits value ) + { + switch ( value ) + { + case MemoryMapFlagBits::ePlacedEXT: return "PlacedEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageAspectFlagBits value ) + { + switch ( value ) + { + case ImageAspectFlagBits::eColor: return "Color"; + case ImageAspectFlagBits::eDepth: return "Depth"; + case ImageAspectFlagBits::eStencil: return "Stencil"; + case ImageAspectFlagBits::eMetadata: return "Metadata"; + case ImageAspectFlagBits::ePlane0: return "Plane0"; + case ImageAspectFlagBits::ePlane1: return "Plane1"; + case ImageAspectFlagBits::ePlane2: return "Plane2"; + case ImageAspectFlagBits::eNone: return "None"; + case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT"; + case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT"; + case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT"; + case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SparseImageFormatFlagBits value ) + { + switch ( value ) + { + case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail"; + case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize"; + case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SparseMemoryBindFlagBits value ) + { + switch ( value ) + { + case SparseMemoryBindFlagBits::eMetadata: return "Metadata"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FenceCreateFlagBits value ) + { + switch ( value ) + { + case FenceCreateFlagBits::eSignaled: return "Signaled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SemaphoreCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( EventCreateFlagBits value ) + { + switch ( value ) + { + case EventCreateFlagBits::eDeviceOnly: return "DeviceOnly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryPipelineStatisticFlagBits value ) + { + switch ( value ) + { + case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices"; + case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives"; + case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives"; + case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations"; + case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives"; + case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches"; + case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations"; + case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT: return "TaskShaderInvocationsEXT"; + case QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT: return "MeshShaderInvocationsEXT"; + case QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI: return "ClusterCullingShaderInvocationsHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryResultFlagBits value ) + { + switch ( value ) + { + case QueryResultFlagBits::e64: return "64"; + case QueryResultFlagBits::eWait: return "Wait"; + case QueryResultFlagBits::eWithAvailability: return "WithAvailability"; + case QueryResultFlagBits::ePartial: return "Partial"; + case QueryResultFlagBits::eWithStatusKHR: return "WithStatusKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryType value ) + { + switch ( value ) + { + case QueryType::eOcclusion: return "Occlusion"; + case QueryType::ePipelineStatistics: return "PipelineStatistics"; + case QueryType::eTimestamp: return "Timestamp"; + case QueryType::eResultStatusOnlyKHR: return "ResultStatusOnlyKHR"; + case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT"; + case QueryType::ePerformanceQueryKHR: return "PerformanceQueryKHR"; + case QueryType::eAccelerationStructureCompactedSizeKHR: return "AccelerationStructureCompactedSizeKHR"; + case QueryType::eAccelerationStructureSerializationSizeKHR: return "AccelerationStructureSerializationSizeKHR"; + case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV"; + case QueryType::ePerformanceQueryINTEL: return "PerformanceQueryINTEL"; + case QueryType::eVideoEncodeFeedbackKHR: return "VideoEncodeFeedbackKHR"; + case QueryType::eMeshPrimitivesGeneratedEXT: return "MeshPrimitivesGeneratedEXT"; + case QueryType::ePrimitivesGeneratedEXT: return "PrimitivesGeneratedEXT"; + case QueryType::eAccelerationStructureSerializationBottomLevelPointersKHR: return "AccelerationStructureSerializationBottomLevelPointersKHR"; + case QueryType::eAccelerationStructureSizeKHR: return "AccelerationStructureSizeKHR"; + case QueryType::eMicromapSerializationSizeEXT: return "MicromapSerializationSizeEXT"; + case QueryType::eMicromapCompactedSizeEXT: return "MicromapCompactedSizeEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryPoolCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BufferCreateFlagBits value ) + { + switch ( value ) + { + case BufferCreateFlagBits::eSparseBinding: return "SparseBinding"; + case BufferCreateFlagBits::eSparseResidency: return "SparseResidency"; + case BufferCreateFlagBits::eSparseAliased: return "SparseAliased"; + case BufferCreateFlagBits::eProtected: return "Protected"; + case BufferCreateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + case BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case BufferCreateFlagBits::eVideoProfileIndependentKHR: return "VideoProfileIndependentKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BufferUsageFlagBits value ) + { + switch ( value ) + { + case BufferUsageFlagBits::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer"; + case BufferUsageFlagBits::eShaderDeviceAddress: return "ShaderDeviceAddress"; + case BufferUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case BufferUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR"; + case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR"; + case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR"; + case BufferUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case BufferUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case BufferUsageFlagBits::eSamplerDescriptorBufferEXT: return "SamplerDescriptorBufferEXT"; + case BufferUsageFlagBits::eResourceDescriptorBufferEXT: return "ResourceDescriptorBufferEXT"; + case BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT: return "PushDescriptorsDescriptorBufferEXT"; + case BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT"; + case BufferUsageFlagBits::eMicromapStorageEXT: return "MicromapStorageEXT"; + case BufferUsageFlagBits::eTileMemoryQCOM: return "TileMemoryQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SharingMode value ) + { + switch ( value ) + { + case SharingMode::eExclusive: return "Exclusive"; + case SharingMode::eConcurrent: return "Concurrent"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BufferViewCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageLayout value ) + { + switch ( value ) + { + case ImageLayout::eUndefined: return "Undefined"; + case ImageLayout::eGeneral: return "General"; + case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal"; + case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal"; + case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal"; + case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal"; + case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal"; + case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal"; + case ImageLayout::ePreinitialized: return "Preinitialized"; + case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal"; + case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal"; + case ImageLayout::eDepthAttachmentOptimal: return "DepthAttachmentOptimal"; + case ImageLayout::eDepthReadOnlyOptimal: return "DepthReadOnlyOptimal"; + case ImageLayout::eStencilAttachmentOptimal: return "StencilAttachmentOptimal"; + case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal"; + case ImageLayout::eReadOnlyOptimal: return "ReadOnlyOptimal"; + case ImageLayout::eAttachmentOptimal: return "AttachmentOptimal"; + case ImageLayout::eRenderingLocalRead: return "RenderingLocalRead"; + case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR"; + case ImageLayout::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case ImageLayout::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; + case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; + case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT"; + case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR"; + case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; + case ImageLayout::eAttachmentFeedbackLoopOptimalEXT: return "AttachmentFeedbackLoopOptimalEXT"; + case ImageLayout::eVideoEncodeQuantizationMapKHR: return "VideoEncodeQuantizationMapKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ComponentSwizzle value ) + { + switch ( value ) + { + case ComponentSwizzle::eIdentity: return "Identity"; + case ComponentSwizzle::eZero: return "Zero"; + case ComponentSwizzle::eOne: return "One"; + case ComponentSwizzle::eR: return "R"; + case ComponentSwizzle::eG: return "G"; + case ComponentSwizzle::eB: return "B"; + case ComponentSwizzle::eA: return "A"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageViewCreateFlagBits value ) + { + switch ( value ) + { + case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT: return "FragmentDensityMapDynamicEXT"; + case ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT: return "FragmentDensityMapDeferredEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageViewType value ) + { + switch ( value ) + { + case ImageViewType::e1D: return "1D"; + case ImageViewType::e2D: return "2D"; + case ImageViewType::e3D: return "3D"; + case ImageViewType::eCube: return "Cube"; + case ImageViewType::e1DArray: return "1DArray"; + case ImageViewType::e2DArray: return "2DArray"; + case ImageViewType::eCubeArray: return "CubeArray"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShaderModuleCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCacheCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCacheCreateFlagBits::eExternallySynchronized: return "ExternallySynchronized"; + case PipelineCacheCreateFlagBits::eInternallySynchronizedMergeKHR: return "InternallySynchronizedMergeKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BlendFactor value ) + { + switch ( value ) + { + case BlendFactor::eZero: return "Zero"; + case BlendFactor::eOne: return "One"; + case BlendFactor::eSrcColor: return "SrcColor"; + case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor"; + case BlendFactor::eDstColor: return "DstColor"; + case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor"; + case BlendFactor::eSrcAlpha: return "SrcAlpha"; + case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha"; + case BlendFactor::eDstAlpha: return "DstAlpha"; + case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha"; + case BlendFactor::eConstantColor: return "ConstantColor"; + case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor"; + case BlendFactor::eConstantAlpha: return "ConstantAlpha"; + case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha"; + case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate"; + case BlendFactor::eSrc1Color: return "Src1Color"; + case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color"; + case BlendFactor::eSrc1Alpha: return "Src1Alpha"; + case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BlendOp value ) + { + switch ( value ) + { + case BlendOp::eAdd: return "Add"; + case BlendOp::eSubtract: return "Subtract"; + case BlendOp::eReverseSubtract: return "ReverseSubtract"; + case BlendOp::eMin: return "Min"; + case BlendOp::eMax: return "Max"; + case BlendOp::eZeroEXT: return "ZeroEXT"; + case BlendOp::eSrcEXT: return "SrcEXT"; + case BlendOp::eDstEXT: return "DstEXT"; + case BlendOp::eSrcOverEXT: return "SrcOverEXT"; + case BlendOp::eDstOverEXT: return "DstOverEXT"; + case BlendOp::eSrcInEXT: return "SrcInEXT"; + case BlendOp::eDstInEXT: return "DstInEXT"; + case BlendOp::eSrcOutEXT: return "SrcOutEXT"; + case BlendOp::eDstOutEXT: return "DstOutEXT"; + case BlendOp::eSrcAtopEXT: return "SrcAtopEXT"; + case BlendOp::eDstAtopEXT: return "DstAtopEXT"; + case BlendOp::eXorEXT: return "XorEXT"; + case BlendOp::eMultiplyEXT: return "MultiplyEXT"; + case BlendOp::eScreenEXT: return "ScreenEXT"; + case BlendOp::eOverlayEXT: return "OverlayEXT"; + case BlendOp::eDarkenEXT: return "DarkenEXT"; + case BlendOp::eLightenEXT: return "LightenEXT"; + case BlendOp::eColordodgeEXT: return "ColordodgeEXT"; + case BlendOp::eColorburnEXT: return "ColorburnEXT"; + case BlendOp::eHardlightEXT: return "HardlightEXT"; + case BlendOp::eSoftlightEXT: return "SoftlightEXT"; + case BlendOp::eDifferenceEXT: return "DifferenceEXT"; + case BlendOp::eExclusionEXT: return "ExclusionEXT"; + case BlendOp::eInvertEXT: return "InvertEXT"; + case BlendOp::eInvertRgbEXT: return "InvertRgbEXT"; + case BlendOp::eLineardodgeEXT: return "LineardodgeEXT"; + case BlendOp::eLinearburnEXT: return "LinearburnEXT"; + case BlendOp::eVividlightEXT: return "VividlightEXT"; + case BlendOp::eLinearlightEXT: return "LinearlightEXT"; + case BlendOp::ePinlightEXT: return "PinlightEXT"; + case BlendOp::eHardmixEXT: return "HardmixEXT"; + case BlendOp::eHslHueEXT: return "HslHueEXT"; + case BlendOp::eHslSaturationEXT: return "HslSaturationEXT"; + case BlendOp::eHslColorEXT: return "HslColorEXT"; + case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT"; + case BlendOp::ePlusEXT: return "PlusEXT"; + case BlendOp::ePlusClampedEXT: return "PlusClampedEXT"; + case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT"; + case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT"; + case BlendOp::eMinusEXT: return "MinusEXT"; + case BlendOp::eMinusClampedEXT: return "MinusClampedEXT"; + case BlendOp::eContrastEXT: return "ContrastEXT"; + case BlendOp::eInvertOvgEXT: return "InvertOvgEXT"; + case BlendOp::eRedEXT: return "RedEXT"; + case BlendOp::eGreenEXT: return "GreenEXT"; + case BlendOp::eBlueEXT: return "BlueEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ColorComponentFlagBits value ) + { + switch ( value ) + { + case ColorComponentFlagBits::eR: return "R"; + case ColorComponentFlagBits::eG: return "G"; + case ColorComponentFlagBits::eB: return "B"; + case ColorComponentFlagBits::eA: return "A"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CompareOp value ) + { + switch ( value ) + { + case CompareOp::eNever: return "Never"; + case CompareOp::eLess: return "Less"; + case CompareOp::eEqual: return "Equal"; + case CompareOp::eLessOrEqual: return "LessOrEqual"; + case CompareOp::eGreater: return "Greater"; + case CompareOp::eNotEqual: return "NotEqual"; + case CompareOp::eGreaterOrEqual: return "GreaterOrEqual"; + case CompareOp::eAlways: return "Always"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CullModeFlagBits value ) + { + switch ( value ) + { + case CullModeFlagBits::eNone: return "None"; + case CullModeFlagBits::eFront: return "Front"; + case CullModeFlagBits::eBack: return "Back"; + case CullModeFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DynamicState value ) + { + switch ( value ) + { + case DynamicState::eViewport: return "Viewport"; + case DynamicState::eScissor: return "Scissor"; + case DynamicState::eLineWidth: return "LineWidth"; + case DynamicState::eDepthBias: return "DepthBias"; + case DynamicState::eBlendConstants: return "BlendConstants"; + case DynamicState::eDepthBounds: return "DepthBounds"; + case DynamicState::eStencilCompareMask: return "StencilCompareMask"; + case DynamicState::eStencilWriteMask: return "StencilWriteMask"; + case DynamicState::eStencilReference: return "StencilReference"; + case DynamicState::eCullMode: return "CullMode"; + case DynamicState::eFrontFace: return "FrontFace"; + case DynamicState::ePrimitiveTopology: return "PrimitiveTopology"; + case DynamicState::eViewportWithCount: return "ViewportWithCount"; + case DynamicState::eScissorWithCount: return "ScissorWithCount"; + case DynamicState::eVertexInputBindingStride: return "VertexInputBindingStride"; + case DynamicState::eDepthTestEnable: return "DepthTestEnable"; + case DynamicState::eDepthWriteEnable: return "DepthWriteEnable"; + case DynamicState::eDepthCompareOp: return "DepthCompareOp"; + case DynamicState::eDepthBoundsTestEnable: return "DepthBoundsTestEnable"; + case DynamicState::eStencilTestEnable: return "StencilTestEnable"; + case DynamicState::eStencilOp: return "StencilOp"; + case DynamicState::eRasterizerDiscardEnable: return "RasterizerDiscardEnable"; + case DynamicState::eDepthBiasEnable: return "DepthBiasEnable"; + case DynamicState::ePrimitiveRestartEnable: return "PrimitiveRestartEnable"; + case DynamicState::eLineStipple: return "LineStipple"; + case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV"; + case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT"; + case DynamicState::eDiscardRectangleEnableEXT: return "DiscardRectangleEnableEXT"; + case DynamicState::eDiscardRectangleModeEXT: return "DiscardRectangleModeEXT"; + case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT"; + case DynamicState::eRayTracingPipelineStackSizeKHR: return "RayTracingPipelineStackSizeKHR"; + case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV"; + case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV"; + case DynamicState::eExclusiveScissorEnableNV: return "ExclusiveScissorEnableNV"; + case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV"; + case DynamicState::eFragmentShadingRateKHR: return "FragmentShadingRateKHR"; + case DynamicState::eVertexInputEXT: return "VertexInputEXT"; + case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT"; + case DynamicState::eLogicOpEXT: return "LogicOpEXT"; + case DynamicState::eColorWriteEnableEXT: return "ColorWriteEnableEXT"; + case DynamicState::eDepthClampEnableEXT: return "DepthClampEnableEXT"; + case DynamicState::ePolygonModeEXT: return "PolygonModeEXT"; + case DynamicState::eRasterizationSamplesEXT: return "RasterizationSamplesEXT"; + case DynamicState::eSampleMaskEXT: return "SampleMaskEXT"; + case DynamicState::eAlphaToCoverageEnableEXT: return "AlphaToCoverageEnableEXT"; + case DynamicState::eAlphaToOneEnableEXT: return "AlphaToOneEnableEXT"; + case DynamicState::eLogicOpEnableEXT: return "LogicOpEnableEXT"; + case DynamicState::eColorBlendEnableEXT: return "ColorBlendEnableEXT"; + case DynamicState::eColorBlendEquationEXT: return "ColorBlendEquationEXT"; + case DynamicState::eColorWriteMaskEXT: return "ColorWriteMaskEXT"; + case DynamicState::eTessellationDomainOriginEXT: return "TessellationDomainOriginEXT"; + case DynamicState::eRasterizationStreamEXT: return "RasterizationStreamEXT"; + case DynamicState::eConservativeRasterizationModeEXT: return "ConservativeRasterizationModeEXT"; + case DynamicState::eExtraPrimitiveOverestimationSizeEXT: return "ExtraPrimitiveOverestimationSizeEXT"; + case DynamicState::eDepthClipEnableEXT: return "DepthClipEnableEXT"; + case DynamicState::eSampleLocationsEnableEXT: return "SampleLocationsEnableEXT"; + case DynamicState::eColorBlendAdvancedEXT: return "ColorBlendAdvancedEXT"; + case DynamicState::eProvokingVertexModeEXT: return "ProvokingVertexModeEXT"; + case DynamicState::eLineRasterizationModeEXT: return "LineRasterizationModeEXT"; + case DynamicState::eLineStippleEnableEXT: return "LineStippleEnableEXT"; + case DynamicState::eDepthClipNegativeOneToOneEXT: return "DepthClipNegativeOneToOneEXT"; + case DynamicState::eViewportWScalingEnableNV: return "ViewportWScalingEnableNV"; + case DynamicState::eViewportSwizzleNV: return "ViewportSwizzleNV"; + case DynamicState::eCoverageToColorEnableNV: return "CoverageToColorEnableNV"; + case DynamicState::eCoverageToColorLocationNV: return "CoverageToColorLocationNV"; + case DynamicState::eCoverageModulationModeNV: return "CoverageModulationModeNV"; + case DynamicState::eCoverageModulationTableEnableNV: return "CoverageModulationTableEnableNV"; + case DynamicState::eCoverageModulationTableNV: return "CoverageModulationTableNV"; + case DynamicState::eShadingRateImageEnableNV: return "ShadingRateImageEnableNV"; + case DynamicState::eRepresentativeFragmentTestEnableNV: return "RepresentativeFragmentTestEnableNV"; + case DynamicState::eCoverageReductionModeNV: return "CoverageReductionModeNV"; + case DynamicState::eAttachmentFeedbackLoopEnableEXT: return "AttachmentFeedbackLoopEnableEXT"; + case DynamicState::eDepthClampRangeEXT: return "DepthClampRangeEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FrontFace value ) + { + switch ( value ) + { + case FrontFace::eCounterClockwise: return "CounterClockwise"; + case FrontFace::eClockwise: return "Clockwise"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( LogicOp value ) + { + switch ( value ) + { + case LogicOp::eClear: return "Clear"; + case LogicOp::eAnd: return "And"; + case LogicOp::eAndReverse: return "AndReverse"; + case LogicOp::eCopy: return "Copy"; + case LogicOp::eAndInverted: return "AndInverted"; + case LogicOp::eNoOp: return "NoOp"; + case LogicOp::eXor: return "Xor"; + case LogicOp::eOr: return "Or"; + case LogicOp::eNor: return "Nor"; + case LogicOp::eEquivalent: return "Equivalent"; + case LogicOp::eInvert: return "Invert"; + case LogicOp::eOrReverse: return "OrReverse"; + case LogicOp::eCopyInverted: return "CopyInverted"; + case LogicOp::eOrInverted: return "OrInverted"; + case LogicOp::eNand: return "Nand"; + case LogicOp::eSet: return "Set"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits::eDerivative: return "Derivative"; + case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; + case PipelineCreateFlagBits::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; + case PipelineCreateFlagBits::eNoProtectedAccess: return "NoProtectedAccess"; + case PipelineCreateFlagBits::eProtectedAccessOnly: return "ProtectedAccessOnly"; + case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR: return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; + case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR: return "RayTracingShaderGroupHandleCaptureReplayKHR"; + case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; + case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; + case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits::eLibraryKHR: return "LibraryKHR"; + case PipelineCreateFlagBits::eDescriptorBufferEXT: return "DescriptorBufferEXT"; + case PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT: return "RetainLinkTimeOptimizationInfoEXT"; + case PipelineCreateFlagBits::eLinkTimeOptimizationEXT: return "LinkTimeOptimizationEXT"; + case PipelineCreateFlagBits::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV"; + case PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineShaderStageCreateFlagBits value ) + { + switch ( value ) + { + case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize"; + case PipelineShaderStageCreateFlagBits::eRequireFullSubgroups: return "RequireFullSubgroups"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PolygonMode value ) + { + switch ( value ) + { + case PolygonMode::eFill: return "Fill"; + case PolygonMode::eLine: return "Line"; + case PolygonMode::ePoint: return "Point"; + case PolygonMode::eFillRectangleNV: return "FillRectangleNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PrimitiveTopology value ) + { + switch ( value ) + { + case PrimitiveTopology::ePointList: return "PointList"; + case PrimitiveTopology::eLineList: return "LineList"; + case PrimitiveTopology::eLineStrip: return "LineStrip"; + case PrimitiveTopology::eTriangleList: return "TriangleList"; + case PrimitiveTopology::eTriangleStrip: return "TriangleStrip"; + case PrimitiveTopology::eTriangleFan: return "TriangleFan"; + case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency"; + case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency"; + case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency"; + case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency"; + case PrimitiveTopology::ePatchList: return "PatchList"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShaderStageFlagBits value ) + { + switch ( value ) + { + case ShaderStageFlagBits::eVertex: return "Vertex"; + case ShaderStageFlagBits::eTessellationControl: return "TessellationControl"; + case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation"; + case ShaderStageFlagBits::eGeometry: return "Geometry"; + case ShaderStageFlagBits::eFragment: return "Fragment"; + case ShaderStageFlagBits::eCompute: return "Compute"; + case ShaderStageFlagBits::eAllGraphics: return "AllGraphics"; + case ShaderStageFlagBits::eAll: return "All"; + case ShaderStageFlagBits::eRaygenKHR: return "RaygenKHR"; + case ShaderStageFlagBits::eAnyHitKHR: return "AnyHitKHR"; + case ShaderStageFlagBits::eClosestHitKHR: return "ClosestHitKHR"; + case ShaderStageFlagBits::eMissKHR: return "MissKHR"; + case ShaderStageFlagBits::eIntersectionKHR: return "IntersectionKHR"; + case ShaderStageFlagBits::eCallableKHR: return "CallableKHR"; + case ShaderStageFlagBits::eTaskEXT: return "TaskEXT"; + case ShaderStageFlagBits::eMeshEXT: return "MeshEXT"; + case ShaderStageFlagBits::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; + case ShaderStageFlagBits::eClusterCullingHUAWEI: return "ClusterCullingHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( StencilOp value ) + { + switch ( value ) + { + case StencilOp::eKeep: return "Keep"; + case StencilOp::eZero: return "Zero"; + case StencilOp::eReplace: return "Replace"; + case StencilOp::eIncrementAndClamp: return "IncrementAndClamp"; + case StencilOp::eDecrementAndClamp: return "DecrementAndClamp"; + case StencilOp::eInvert: return "Invert"; + case StencilOp::eIncrementAndWrap: return "IncrementAndWrap"; + case StencilOp::eDecrementAndWrap: return "DecrementAndWrap"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VertexInputRate value ) + { + switch ( value ) + { + case VertexInputRate::eVertex: return "Vertex"; + case VertexInputRate::eInstance: return "Instance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineColorBlendStateCreateFlagBits value ) + { + switch ( value ) + { + case PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT: return "RasterizationOrderAttachmentAccessEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineDepthStencilStateCreateFlagBits value ) + { + switch ( value ) + { + case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT: return "RasterizationOrderAttachmentDepthAccessEXT"; + case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT: return "RasterizationOrderAttachmentStencilAccessEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineDynamicStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineLayoutCreateFlagBits value ) + { + switch ( value ) + { + case PipelineLayoutCreateFlagBits::eIndependentSetsEXT: return "IndependentSetsEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineMultisampleStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineRasterizationStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineTessellationStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineVertexInputStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineViewportStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BorderColor value ) + { + switch ( value ) + { + case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack"; + case BorderColor::eIntTransparentBlack: return "IntTransparentBlack"; + case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack"; + case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack"; + case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite"; + case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite"; + case BorderColor::eFloatCustomEXT: return "FloatCustomEXT"; + case BorderColor::eIntCustomEXT: return "IntCustomEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( Filter value ) + { + switch ( value ) + { + case Filter::eNearest: return "Nearest"; + case Filter::eLinear: return "Linear"; + case Filter::eCubicEXT: return "CubicEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SamplerAddressMode value ) + { + switch ( value ) + { + case SamplerAddressMode::eRepeat: return "Repeat"; + case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat"; + case SamplerAddressMode::eClampToEdge: return "ClampToEdge"; + case SamplerAddressMode::eClampToBorder: return "ClampToBorder"; + case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SamplerCreateFlagBits value ) + { + switch ( value ) + { + case SamplerCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT: return "SubsampledCoarseReconstructionEXT"; + case SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case SamplerCreateFlagBits::eNonSeamlessCubeMapEXT: return "NonSeamlessCubeMapEXT"; + case SamplerCreateFlagBits::eImageProcessingQCOM: return "ImageProcessingQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SamplerMipmapMode value ) + { + switch ( value ) + { + case SamplerMipmapMode::eNearest: return "Nearest"; + case SamplerMipmapMode::eLinear: return "Linear"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DescriptorPoolCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet"; + case DescriptorPoolCreateFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorPoolCreateFlagBits::eHostOnlyEXT: return "HostOnlyEXT"; + case DescriptorPoolCreateFlagBits::eAllowOverallocationSetsNV: return "AllowOverallocationSetsNV"; + case DescriptorPoolCreateFlagBits::eAllowOverallocationPoolsNV: return "AllowOverallocationPoolsNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DescriptorSetLayoutCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool"; + case DescriptorSetLayoutCreateFlagBits::ePushDescriptor: return "PushDescriptor"; + case DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT: return "DescriptorBufferEXT"; + case DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT: return "EmbeddedImmutableSamplersEXT"; + case DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; + case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT: return "HostOnlyPoolEXT"; + case DescriptorSetLayoutCreateFlagBits::ePerStageNV: return "PerStageNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DescriptorType value ) + { + switch ( value ) + { + case DescriptorType::eSampler: return "Sampler"; + case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler"; + case DescriptorType::eSampledImage: return "SampledImage"; + case DescriptorType::eStorageImage: return "StorageImage"; + case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer"; + case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer"; + case DescriptorType::eUniformBuffer: return "UniformBuffer"; + case DescriptorType::eStorageBuffer: return "StorageBuffer"; + case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic"; + case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic"; + case DescriptorType::eInputAttachment: return "InputAttachment"; + case DescriptorType::eInlineUniformBlock: return "InlineUniformBlock"; + case DescriptorType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV"; + case DescriptorType::eSampleWeightImageQCOM: return "SampleWeightImageQCOM"; + case DescriptorType::eBlockMatchImageQCOM: return "BlockMatchImageQCOM"; + case DescriptorType::eMutableEXT: return "MutableEXT"; + case DescriptorType::ePartitionedAccelerationStructureNV: return "PartitionedAccelerationStructureNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DescriptorPoolResetFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccessFlagBits value ) + { + switch ( value ) + { + case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits::eIndexRead: return "IndexRead"; + case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits::eUniformRead: return "UniformRead"; + case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits::eShaderRead: return "ShaderRead"; + case AccessFlagBits::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits::eTransferRead: return "TransferRead"; + case AccessFlagBits::eTransferWrite: return "TransferWrite"; + case AccessFlagBits::eHostRead: return "HostRead"; + case AccessFlagBits::eHostWrite: return "HostWrite"; + case AccessFlagBits::eMemoryRead: return "MemoryRead"; + case AccessFlagBits::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits::eNone: return "None"; + case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR"; + case AccessFlagBits::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; + case AccessFlagBits::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR"; + case AccessFlagBits::eCommandPreprocessReadEXT: return "CommandPreprocessReadEXT"; + case AccessFlagBits::eCommandPreprocessWriteEXT: return "CommandPreprocessWriteEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AttachmentDescriptionFlagBits value ) + { + switch ( value ) + { + case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AttachmentLoadOp value ) + { + switch ( value ) + { + case AttachmentLoadOp::eLoad: return "Load"; + case AttachmentLoadOp::eClear: return "Clear"; + case AttachmentLoadOp::eDontCare: return "DontCare"; + case AttachmentLoadOp::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AttachmentStoreOp value ) + { + switch ( value ) + { + case AttachmentStoreOp::eStore: return "Store"; + case AttachmentStoreOp::eDontCare: return "DontCare"; + case AttachmentStoreOp::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DependencyFlagBits value ) + { + switch ( value ) + { + case DependencyFlagBits::eByRegion: return "ByRegion"; + case DependencyFlagBits::eDeviceGroup: return "DeviceGroup"; + case DependencyFlagBits::eViewLocal: return "ViewLocal"; + case DependencyFlagBits::eFeedbackLoopEXT: return "FeedbackLoopEXT"; + case DependencyFlagBits::eQueueFamilyOwnershipTransferUseAllStagesKHR: return "QueueFamilyOwnershipTransferUseAllStagesKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FramebufferCreateFlagBits value ) + { + switch ( value ) + { + case FramebufferCreateFlagBits::eImageless: return "Imageless"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineBindPoint value ) + { + switch ( value ) + { + case PipelineBindPoint::eGraphics: return "Graphics"; + case PipelineBindPoint::eCompute: return "Compute"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineBindPoint::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR"; + case PipelineBindPoint::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( RenderPassCreateFlagBits value ) + { + switch ( value ) + { + case RenderPassCreateFlagBits::eTransformQCOM: return "TransformQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SubpassDescriptionFlagBits value ) + { + switch ( value ) + { + case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX"; + case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX"; + case SubpassDescriptionFlagBits::eFragmentRegionQCOM: return "FragmentRegionQCOM"; + case SubpassDescriptionFlagBits::eShaderResolveQCOM: return "ShaderResolveQCOM"; + case SubpassDescriptionFlagBits::eTileShadingApronQCOM: return "TileShadingApronQCOM"; + case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT: return "RasterizationOrderAttachmentColorAccessEXT"; + case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT: return "RasterizationOrderAttachmentDepthAccessEXT"; + case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT: return "RasterizationOrderAttachmentStencilAccessEXT"; + case SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CommandPoolCreateFlagBits value ) + { + switch ( value ) + { + case CommandPoolCreateFlagBits::eTransient: return "Transient"; + case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer"; + case CommandPoolCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CommandPoolResetFlagBits value ) + { + switch ( value ) + { + case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CommandBufferLevel value ) + { + switch ( value ) + { + case CommandBufferLevel::ePrimary: return "Primary"; + case CommandBufferLevel::eSecondary: return "Secondary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CommandBufferResetFlagBits value ) + { + switch ( value ) + { + case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CommandBufferUsageFlagBits value ) + { + switch ( value ) + { + case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit"; + case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue"; + case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryControlFlagBits value ) + { + switch ( value ) + { + case QueryControlFlagBits::ePrecise: return "Precise"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( IndexType value ) + { + switch ( value ) + { + case IndexType::eUint16: return "Uint16"; + case IndexType::eUint32: return "Uint32"; + case IndexType::eUint8: return "Uint8"; + case IndexType::eNoneKHR: return "NoneKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( StencilFaceFlagBits value ) + { + switch ( value ) + { + case StencilFaceFlagBits::eFront: return "Front"; + case StencilFaceFlagBits::eBack: return "Back"; + case StencilFaceFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SubpassContents value ) + { + switch ( value ) + { + case SubpassContents::eInline: return "Inline"; + case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers"; + case SubpassContents::eInlineAndSecondaryCommandBuffersKHR: return "InlineAndSecondaryCommandBuffersKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SubgroupFeatureFlagBits value ) + { + switch ( value ) + { + case SubgroupFeatureFlagBits::eBasic: return "Basic"; + case SubgroupFeatureFlagBits::eVote: return "Vote"; + case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic"; + case SubgroupFeatureFlagBits::eBallot: return "Ballot"; + case SubgroupFeatureFlagBits::eShuffle: return "Shuffle"; + case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative"; + case SubgroupFeatureFlagBits::eClustered: return "Clustered"; + case SubgroupFeatureFlagBits::eQuad: return "Quad"; + case SubgroupFeatureFlagBits::eRotate: return "Rotate"; + case SubgroupFeatureFlagBits::eRotateClustered: return "RotateClustered"; + case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PeerMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc"; + case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst"; + case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc"; + case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MemoryAllocateFlagBits value ) + { + switch ( value ) + { + case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask"; + case MemoryAllocateFlagBits::eDeviceAddress: return "DeviceAddress"; + case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CommandPoolTrimFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PointClippingBehavior value ) + { + switch ( value ) + { + case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes"; + case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( TessellationDomainOrigin value ) + { + switch ( value ) + { + case TessellationDomainOrigin::eUpperLeft: return "UpperLeft"; + case TessellationDomainOrigin::eLowerLeft: return "LowerLeft"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SamplerYcbcrModelConversion value ) + { + switch ( value ) + { + case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity"; + case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity"; + case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709"; + case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601"; + case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SamplerYcbcrRange value ) + { + switch ( value ) + { + case SamplerYcbcrRange::eItuFull: return "ItuFull"; + case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ChromaLocation value ) + { + switch ( value ) + { + case ChromaLocation::eCositedEven: return "CositedEven"; + case ChromaLocation::eMidpoint: return "Midpoint"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DescriptorUpdateTemplateType value ) + { + switch ( value ) + { + case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet"; + case DescriptorUpdateTemplateType::ePushDescriptors: return "PushDescriptors"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DescriptorUpdateTemplateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ExternalMemoryHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture"; + case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource"; + case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT"; + case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA: return "ZirconVmoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV: return "RdmaAddressNV"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case ExternalMemoryHandleTypeFlagBits::eScreenBufferQNX: return "ScreenBufferQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case ExternalMemoryHandleTypeFlagBits::eMtlbufferEXT: return "MtlbufferEXT"; + case ExternalMemoryHandleTypeFlagBits::eMtltextureEXT: return "MtltextureEXT"; + case ExternalMemoryHandleTypeFlagBits::eMtlheapEXT: return "MtlheapEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ExternalMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ExternalFenceHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ExternalFenceFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalFenceFeatureFlagBits::eExportable: return "Exportable"; + case ExternalFenceFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FenceImportFlagBits value ) + { + switch ( value ) + { + case FenceImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SemaphoreImportFlagBits value ) + { + switch ( value ) + { + case SemaphoreImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ExternalSemaphoreHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence"; + case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA: return "ZirconEventFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ExternalSemaphoreFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable"; + case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DriverId value ) + { + switch ( value ) + { + case DriverId::eAmdProprietary: return "AmdProprietary"; + case DriverId::eAmdOpenSource: return "AmdOpenSource"; + case DriverId::eMesaRadv: return "MesaRadv"; + case DriverId::eNvidiaProprietary: return "NvidiaProprietary"; + case DriverId::eIntelProprietaryWindows: return "IntelProprietaryWindows"; + case DriverId::eIntelOpenSourceMESA: return "IntelOpenSourceMESA"; + case DriverId::eImaginationProprietary: return "ImaginationProprietary"; + case DriverId::eQualcommProprietary: return "QualcommProprietary"; + case DriverId::eArmProprietary: return "ArmProprietary"; + case DriverId::eGoogleSwiftshader: return "GoogleSwiftshader"; + case DriverId::eGgpProprietary: return "GgpProprietary"; + case DriverId::eBroadcomProprietary: return "BroadcomProprietary"; + case DriverId::eMesaLlvmpipe: return "MesaLlvmpipe"; + case DriverId::eMoltenvk: return "Moltenvk"; + case DriverId::eCoreaviProprietary: return "CoreaviProprietary"; + case DriverId::eJuiceProprietary: return "JuiceProprietary"; + case DriverId::eVerisiliconProprietary: return "VerisiliconProprietary"; + case DriverId::eMesaTurnip: return "MesaTurnip"; + case DriverId::eMesaV3Dv: return "MesaV3Dv"; + case DriverId::eMesaPanvk: return "MesaPanvk"; + case DriverId::eSamsungProprietary: return "SamsungProprietary"; + case DriverId::eMesaVenus: return "MesaVenus"; + case DriverId::eMesaDozen: return "MesaDozen"; + case DriverId::eMesaNvk: return "MesaNvk"; + case DriverId::eImaginationOpenSourceMESA: return "ImaginationOpenSourceMESA"; + case DriverId::eMesaHoneykrisp: return "MesaHoneykrisp"; + case DriverId::eVulkanScEmulationOnVulkan: return "VulkanScEmulationOnVulkan"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShaderFloatControlsIndependence value ) + { + switch ( value ) + { + case ShaderFloatControlsIndependence::e32BitOnly: return "32BitOnly"; + case ShaderFloatControlsIndependence::eAll: return "All"; + case ShaderFloatControlsIndependence::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DescriptorBindingFlagBits value ) + { + switch ( value ) + { + case DescriptorBindingFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorBindingFlagBits::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending"; + case DescriptorBindingFlagBits::ePartiallyBound: return "PartiallyBound"; + case DescriptorBindingFlagBits::eVariableDescriptorCount: return "VariableDescriptorCount"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ResolveModeFlagBits value ) + { + switch ( value ) + { + case ResolveModeFlagBits::eNone: return "None"; + case ResolveModeFlagBits::eSampleZero: return "SampleZero"; + case ResolveModeFlagBits::eAverage: return "Average"; + case ResolveModeFlagBits::eMin: return "Min"; + case ResolveModeFlagBits::eMax: return "Max"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case ResolveModeFlagBits::eExternalFormatDownsampleANDROID: return "ExternalFormatDownsampleANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SamplerReductionMode value ) + { + switch ( value ) + { + case SamplerReductionMode::eWeightedAverage: return "WeightedAverage"; + case SamplerReductionMode::eMin: return "Min"; + case SamplerReductionMode::eMax: return "Max"; + case SamplerReductionMode::eWeightedAverageRangeclampQCOM: return "WeightedAverageRangeclampQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SemaphoreType value ) + { + switch ( value ) + { + case SemaphoreType::eBinary: return "Binary"; + case SemaphoreType::eTimeline: return "Timeline"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SemaphoreWaitFlagBits value ) + { + switch ( value ) + { + case SemaphoreWaitFlagBits::eAny: return "Any"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCreationFeedbackFlagBits value ) + { + switch ( value ) + { + case PipelineCreationFeedbackFlagBits::eValid: return "Valid"; + case PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit: return "ApplicationPipelineCacheHit"; + case PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration: return "BasePipelineAcceleration"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ToolPurposeFlagBits value ) + { + switch ( value ) + { + case ToolPurposeFlagBits::eValidation: return "Validation"; + case ToolPurposeFlagBits::eProfiling: return "Profiling"; + case ToolPurposeFlagBits::eTracing: return "Tracing"; + case ToolPurposeFlagBits::eAdditionalFeatures: return "AdditionalFeatures"; + case ToolPurposeFlagBits::eModifyingFeatures: return "ModifyingFeatures"; + case ToolPurposeFlagBits::eDebugReportingEXT: return "DebugReportingEXT"; + case ToolPurposeFlagBits::eDebugMarkersEXT: return "DebugMarkersEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PrivateDataSlotCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineStageFlagBits2 value ) + { + switch ( value ) + { + case PipelineStageFlagBits2::eNone: return "None"; + case PipelineStageFlagBits2::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits2::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits2::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits2::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits2::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits2::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits2::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits2::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits2::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits2::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits2::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits2::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits2::eAllTransfer: return "AllTransfer"; + case PipelineStageFlagBits2::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits2::eHost: return "Host"; + case PipelineStageFlagBits2::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits2::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits2::eCopy: return "Copy"; + case PipelineStageFlagBits2::eResolve: return "Resolve"; + case PipelineStageFlagBits2::eBlit: return "Blit"; + case PipelineStageFlagBits2::eClear: return "Clear"; + case PipelineStageFlagBits2::eIndexInput: return "IndexInput"; + case PipelineStageFlagBits2::eVertexAttributeInput: return "VertexAttributeInput"; + case PipelineStageFlagBits2::ePreRasterizationShaders: return "PreRasterizationShaders"; + case PipelineStageFlagBits2::eVideoDecodeKHR: return "VideoDecodeKHR"; + case PipelineStageFlagBits2::eVideoEncodeKHR: return "VideoEncodeKHR"; + case PipelineStageFlagBits2::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits2::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits2::eCommandPreprocessEXT: return "CommandPreprocessEXT"; + case PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case PipelineStageFlagBits2::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR"; + case PipelineStageFlagBits2::eRayTracingShaderKHR: return "RayTracingShaderKHR"; + case PipelineStageFlagBits2::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits2::eTaskShaderEXT: return "TaskShaderEXT"; + case PipelineStageFlagBits2::eMeshShaderEXT: return "MeshShaderEXT"; + case PipelineStageFlagBits2::eSubpassShaderHUAWEI: return "SubpassShaderHUAWEI"; + case PipelineStageFlagBits2::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI"; + case PipelineStageFlagBits2::eAccelerationStructureCopyKHR: return "AccelerationStructureCopyKHR"; + case PipelineStageFlagBits2::eMicromapBuildEXT: return "MicromapBuildEXT"; + case PipelineStageFlagBits2::eClusterCullingShaderHUAWEI: return "ClusterCullingShaderHUAWEI"; + case PipelineStageFlagBits2::eOpticalFlowNV: return "OpticalFlowNV"; + case PipelineStageFlagBits2::eConvertCooperativeVectorMatrixNV: return "ConvertCooperativeVectorMatrixNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccessFlagBits2 value ) + { + switch ( value ) + { + case AccessFlagBits2::eNone: return "None"; + case AccessFlagBits2::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits2::eIndexRead: return "IndexRead"; + case AccessFlagBits2::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits2::eUniformRead: return "UniformRead"; + case AccessFlagBits2::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits2::eShaderRead: return "ShaderRead"; + case AccessFlagBits2::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits2::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits2::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits2::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits2::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits2::eTransferRead: return "TransferRead"; + case AccessFlagBits2::eTransferWrite: return "TransferWrite"; + case AccessFlagBits2::eHostRead: return "HostRead"; + case AccessFlagBits2::eHostWrite: return "HostWrite"; + case AccessFlagBits2::eMemoryRead: return "MemoryRead"; + case AccessFlagBits2::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits2::eShaderSampledRead: return "ShaderSampledRead"; + case AccessFlagBits2::eShaderStorageRead: return "ShaderStorageRead"; + case AccessFlagBits2::eShaderStorageWrite: return "ShaderStorageWrite"; + case AccessFlagBits2::eVideoDecodeReadKHR: return "VideoDecodeReadKHR"; + case AccessFlagBits2::eVideoDecodeWriteKHR: return "VideoDecodeWriteKHR"; + case AccessFlagBits2::eVideoEncodeReadKHR: return "VideoEncodeReadKHR"; + case AccessFlagBits2::eVideoEncodeWriteKHR: return "VideoEncodeWriteKHR"; + case AccessFlagBits2::eShaderTileAttachmentReadQCOM: return "ShaderTileAttachmentReadQCOM"; + case AccessFlagBits2::eShaderTileAttachmentWriteQCOM: return "ShaderTileAttachmentWriteQCOM"; + case AccessFlagBits2::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits2::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits2::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits2::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits2::eCommandPreprocessReadEXT: return "CommandPreprocessReadEXT"; + case AccessFlagBits2::eCommandPreprocessWriteEXT: return "CommandPreprocessWriteEXT"; + case AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR"; + case AccessFlagBits2::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR"; + case AccessFlagBits2::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; + case AccessFlagBits2::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits2::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits2::eDescriptorBufferReadEXT: return "DescriptorBufferReadEXT"; + case AccessFlagBits2::eInvocationMaskReadHUAWEI: return "InvocationMaskReadHUAWEI"; + case AccessFlagBits2::eShaderBindingTableReadKHR: return "ShaderBindingTableReadKHR"; + case AccessFlagBits2::eMicromapReadEXT: return "MicromapReadEXT"; + case AccessFlagBits2::eMicromapWriteEXT: return "MicromapWriteEXT"; + case AccessFlagBits2::eOpticalFlowReadNV: return "OpticalFlowReadNV"; + case AccessFlagBits2::eOpticalFlowWriteNV: return "OpticalFlowWriteNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SubmitFlagBits value ) + { + switch ( value ) + { + case SubmitFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( RenderingFlagBits value ) + { + switch ( value ) + { + case RenderingFlagBits::eContentsSecondaryCommandBuffers: return "ContentsSecondaryCommandBuffers"; + case RenderingFlagBits::eSuspending: return "Suspending"; + case RenderingFlagBits::eResuming: return "Resuming"; + case RenderingFlagBits::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; + case RenderingFlagBits::eContentsInlineKHR: return "ContentsInlineKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FormatFeatureFlagBits2 value ) + { + switch ( value ) + { + case FormatFeatureFlagBits2::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits2::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits2::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits2::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits2::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits2::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits2::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits2::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits2::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits2::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits2::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits2::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits2::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits2::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits2::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits2::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; + case FormatFeatureFlagBits2::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter: return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter: return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit: return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: + return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits2::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits2::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits2::eStorageReadWithoutFormat: return "StorageReadWithoutFormat"; + case FormatFeatureFlagBits2::eStorageWriteWithoutFormat: return "StorageWriteWithoutFormat"; + case FormatFeatureFlagBits2::eSampledImageDepthComparison: return "SampledImageDepthComparison"; + case FormatFeatureFlagBits2::eSampledImageFilterCubic: return "SampledImageFilterCubic"; + case FormatFeatureFlagBits2::eHostImageTransfer: return "HostImageTransfer"; + case FormatFeatureFlagBits2::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; + case FormatFeatureFlagBits2::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; + case FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; + case FormatFeatureFlagBits2::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case FormatFeatureFlagBits2::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; + case FormatFeatureFlagBits2::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; + case FormatFeatureFlagBits2::eAccelerationStructureRadiusBufferNV: return "AccelerationStructureRadiusBufferNV"; + case FormatFeatureFlagBits2::eLinearColorAttachmentNV: return "LinearColorAttachmentNV"; + case FormatFeatureFlagBits2::eWeightImageQCOM: return "WeightImageQCOM"; + case FormatFeatureFlagBits2::eWeightSampledImageQCOM: return "WeightSampledImageQCOM"; + case FormatFeatureFlagBits2::eBlockMatchingQCOM: return "BlockMatchingQCOM"; + case FormatFeatureFlagBits2::eBoxFilterSampledQCOM: return "BoxFilterSampledQCOM"; + case FormatFeatureFlagBits2::eOpticalFlowImageNV: return "OpticalFlowImageNV"; + case FormatFeatureFlagBits2::eOpticalFlowVectorNV: return "OpticalFlowVectorNV"; + case FormatFeatureFlagBits2::eOpticalFlowCostNV: return "OpticalFlowCostNV"; + case FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR: return "VideoEncodeQuantizationDeltaMapKHR"; + case FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR: return "VideoEncodeEmphasisMapKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_4 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueueGlobalPriority value ) + { + switch ( value ) + { + case QueueGlobalPriority::eLow: return "Low"; + case QueueGlobalPriority::eMedium: return "Medium"; + case QueueGlobalPriority::eHigh: return "High"; + case QueueGlobalPriority::eRealtime: return "Realtime"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( LineRasterizationMode value ) + { + switch ( value ) + { + case LineRasterizationMode::eDefault: return "Default"; + case LineRasterizationMode::eRectangular: return "Rectangular"; + case LineRasterizationMode::eBresenham: return "Bresenham"; + case LineRasterizationMode::eRectangularSmooth: return "RectangularSmooth"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MemoryUnmapFlagBits value ) + { + switch ( value ) + { + case MemoryUnmapFlagBits::eReserveEXT: return "ReserveEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCreateFlagBits2 value ) + { + switch ( value ) + { + case PipelineCreateFlagBits2::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits2::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits2::eDerivative: return "Derivative"; + case PipelineCreateFlagBits2::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits2::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits2::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; + case PipelineCreateFlagBits2::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; + case PipelineCreateFlagBits2::eNoProtectedAccess: return "NoProtectedAccess"; + case PipelineCreateFlagBits2::eProtectedAccessOnly: return "ProtectedAccessOnly"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineCreateFlagBits2::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineCreateFlagBits2::eRayTracingAllowSpheresAndLinearSweptSpheresNV: return "RayTracingAllowSpheresAndLinearSweptSpheresNV"; + case PipelineCreateFlagBits2::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; + case PipelineCreateFlagBits2::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits2::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; + case PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits2::eLinkTimeOptimizationEXT: return "LinkTimeOptimizationEXT"; + case PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT: return "RetainLinkTimeOptimizationInfoEXT"; + case PipelineCreateFlagBits2::eLibraryKHR: return "LibraryKHR"; + case PipelineCreateFlagBits2::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits2::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullIntersectionShadersKHR: return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingShaderGroupHandleCaptureReplayKHR: return "RayTracingShaderGroupHandleCaptureReplayKHR"; + case PipelineCreateFlagBits2::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits2::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV"; + case PipelineCreateFlagBits2::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; + case PipelineCreateFlagBits2::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits2::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT"; + case PipelineCreateFlagBits2::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits2::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits2::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; + case PipelineCreateFlagBits2::eDescriptorBufferEXT: return "DescriptorBufferEXT"; + case PipelineCreateFlagBits2::eDisallowOpacityMicromapARM: return "DisallowOpacityMicromapARM"; + case PipelineCreateFlagBits2::eCaptureDataKHR: return "CaptureDataKHR"; + case PipelineCreateFlagBits2::eIndirectBindableEXT: return "IndirectBindableEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BufferUsageFlagBits2 value ) + { + switch ( value ) + { + case BufferUsageFlagBits2::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits2::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits2::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits2::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits2::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits2::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits2::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits2::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits2::eIndirectBuffer: return "IndirectBuffer"; + case BufferUsageFlagBits2::eShaderDeviceAddress: return "ShaderDeviceAddress"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits2::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits2::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case BufferUsageFlagBits2::eShaderBindingTableKHR: return "ShaderBindingTableKHR"; + case BufferUsageFlagBits2::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits2::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case BufferUsageFlagBits2::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case BufferUsageFlagBits2::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case BufferUsageFlagBits2::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR"; + case BufferUsageFlagBits2::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR"; + case BufferUsageFlagBits2::eSamplerDescriptorBufferEXT: return "SamplerDescriptorBufferEXT"; + case BufferUsageFlagBits2::eResourceDescriptorBufferEXT: return "ResourceDescriptorBufferEXT"; + case BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT: return "PushDescriptorsDescriptorBufferEXT"; + case BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT"; + case BufferUsageFlagBits2::eMicromapStorageEXT: return "MicromapStorageEXT"; + case BufferUsageFlagBits2::eTileMemoryQCOM: return "TileMemoryQCOM"; + case BufferUsageFlagBits2::ePreprocessBufferEXT: return "PreprocessBufferEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineRobustnessBufferBehavior value ) + { + switch ( value ) + { + case PipelineRobustnessBufferBehavior::eDeviceDefault: return "DeviceDefault"; + case PipelineRobustnessBufferBehavior::eDisabled: return "Disabled"; + case PipelineRobustnessBufferBehavior::eRobustBufferAccess: return "RobustBufferAccess"; + case PipelineRobustnessBufferBehavior::eRobustBufferAccess2: return "RobustBufferAccess2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineRobustnessImageBehavior value ) + { + switch ( value ) + { + case PipelineRobustnessImageBehavior::eDeviceDefault: return "DeviceDefault"; + case PipelineRobustnessImageBehavior::eDisabled: return "Disabled"; + case PipelineRobustnessImageBehavior::eRobustImageAccess: return "RobustImageAccess"; + case PipelineRobustnessImageBehavior::eRobustImageAccess2: return "RobustImageAccess2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( HostImageCopyFlagBits value ) + { + switch ( value ) + { + case HostImageCopyFlagBits::eMemcpy: return "Memcpy"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SurfaceTransformFlagBitsKHR value ) + { + switch ( value ) + { + case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity"; + case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90"; + case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180"; + case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270"; + case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PresentModeKHR value ) + { + switch ( value ) + { + case PresentModeKHR::eImmediate: return "Immediate"; + case PresentModeKHR::eMailbox: return "Mailbox"; + case PresentModeKHR::eFifo: return "Fifo"; + case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed"; + case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh"; + case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh"; + case PresentModeKHR::eFifoLatestReadyEXT: return "FifoLatestReadyEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ColorSpaceKHR value ) + { + switch ( value ) + { + case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear"; + case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT"; + case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT"; + case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT"; + case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT"; + case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT"; + case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT"; + case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT"; + case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT"; + case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT"; + case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT"; + case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT"; + case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT"; + case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT"; + case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT"; + case ColorSpaceKHR::eDisplayNativeAMD: return "DisplayNativeAMD"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CompositeAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied"; + case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied"; + case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_swapchain === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SwapchainCreateFlagBitsKHR value ) + { + switch ( value ) + { + case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case SwapchainCreateFlagBitsKHR::eProtected: return "Protected"; + case SwapchainCreateFlagBitsKHR::eMutableFormat: return "MutableFormat"; + case SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT: return "DeferredMemoryAllocationEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceGroupPresentModeFlagBitsKHR value ) + { + switch ( value ) + { + case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local"; + case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote"; + case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum"; + case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_display === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DisplayPlaneAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DisplayModeCreateFlagBitsKHR ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DisplaySurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( XlibSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( XcbSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( WaylandSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AndroidSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( Win32SurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DebugReportFlagBitsEXT value ) + { + switch ( value ) + { + case DebugReportFlagBitsEXT::eInformation: return "Information"; + case DebugReportFlagBitsEXT::eWarning: return "Warning"; + case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning"; + case DebugReportFlagBitsEXT::eError: return "Error"; + case DebugReportFlagBitsEXT::eDebug: return "Debug"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DebugReportObjectTypeEXT value ) + { + switch ( value ) + { + case DebugReportObjectTypeEXT::eUnknown: return "Unknown"; + case DebugReportObjectTypeEXT::eInstance: return "Instance"; + case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice"; + case DebugReportObjectTypeEXT::eDevice: return "Device"; + case DebugReportObjectTypeEXT::eQueue: return "Queue"; + case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore"; + case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer"; + case DebugReportObjectTypeEXT::eFence: return "Fence"; + case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory"; + case DebugReportObjectTypeEXT::eBuffer: return "Buffer"; + case DebugReportObjectTypeEXT::eImage: return "Image"; + case DebugReportObjectTypeEXT::eEvent: return "Event"; + case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool"; + case DebugReportObjectTypeEXT::eBufferView: return "BufferView"; + case DebugReportObjectTypeEXT::eImageView: return "ImageView"; + case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule"; + case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache"; + case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout"; + case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass"; + case DebugReportObjectTypeEXT::ePipeline: return "Pipeline"; + case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout"; + case DebugReportObjectTypeEXT::eSampler: return "Sampler"; + case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool"; + case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet"; + case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer"; + case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool"; + case DebugReportObjectTypeEXT::eSurfaceKHR: return "SurfaceKHR"; + case DebugReportObjectTypeEXT::eSwapchainKHR: return "SwapchainKHR"; + case DebugReportObjectTypeEXT::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case DebugReportObjectTypeEXT::eDisplayKHR: return "DisplayKHR"; + case DebugReportObjectTypeEXT::eDisplayModeKHR: return "DisplayModeKHR"; + case DebugReportObjectTypeEXT::eValidationCacheEXT: return "ValidationCacheEXT"; + case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case DebugReportObjectTypeEXT::eCuModuleNVX: return "CuModuleNVX"; + case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX"; + case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case DebugReportObjectTypeEXT::eCudaModuleNV: return "CudaModuleNV"; + case DebugReportObjectTypeEXT::eCudaFunctionNV: return "CudaFunctionNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_rasterization_order === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( RasterizationOrderAMD value ) + { + switch ( value ) + { + case RasterizationOrderAMD::eStrict: return "Strict"; + case RasterizationOrderAMD::eRelaxed: return "Relaxed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_video_queue === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoCodecOperationFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodecOperationFlagBitsKHR::eNone: return "None"; + case VideoCodecOperationFlagBitsKHR::eEncodeH264: return "EncodeH264"; + case VideoCodecOperationFlagBitsKHR::eEncodeH265: return "EncodeH265"; + case VideoCodecOperationFlagBitsKHR::eDecodeH264: return "DecodeH264"; + case VideoCodecOperationFlagBitsKHR::eDecodeH265: return "DecodeH265"; + case VideoCodecOperationFlagBitsKHR::eDecodeAv1: return "DecodeAv1"; + case VideoCodecOperationFlagBitsKHR::eEncodeAv1: return "EncodeAv1"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoChromaSubsamplingFlagBitsKHR value ) + { + switch ( value ) + { + case VideoChromaSubsamplingFlagBitsKHR::eInvalid: return "Invalid"; + case VideoChromaSubsamplingFlagBitsKHR::eMonochrome: return "Monochrome"; + case VideoChromaSubsamplingFlagBitsKHR::e420: return "420"; + case VideoChromaSubsamplingFlagBitsKHR::e422: return "422"; + case VideoChromaSubsamplingFlagBitsKHR::e444: return "444"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoComponentBitDepthFlagBitsKHR value ) + { + switch ( value ) + { + case VideoComponentBitDepthFlagBitsKHR::eInvalid: return "Invalid"; + case VideoComponentBitDepthFlagBitsKHR::e8: return "8"; + case VideoComponentBitDepthFlagBitsKHR::e10: return "10"; + case VideoComponentBitDepthFlagBitsKHR::e12: return "12"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoCapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCapabilityFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + case VideoCapabilityFlagBitsKHR::eSeparateReferenceImages: return "SeparateReferenceImages"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoSessionCreateFlagBitsKHR value ) + { + switch ( value ) + { + case VideoSessionCreateFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + case VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations: return "AllowEncodeParameterOptimizations"; + case VideoSessionCreateFlagBitsKHR::eInlineQueries: return "InlineQueries"; + case VideoSessionCreateFlagBitsKHR::eAllowEncodeQuantizationDeltaMap: return "AllowEncodeQuantizationDeltaMap"; + case VideoSessionCreateFlagBitsKHR::eAllowEncodeEmphasisMap: return "AllowEncodeEmphasisMap"; + case VideoSessionCreateFlagBitsKHR::eInlineSessionParameters: return "InlineSessionParameters"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoCodingControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodingControlFlagBitsKHR::eReset: return "Reset"; + case VideoCodingControlFlagBitsKHR::eEncodeRateControl: return "EncodeRateControl"; + case VideoCodingControlFlagBitsKHR::eEncodeQualityLevel: return "EncodeQualityLevel"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryResultStatusKHR value ) + { + switch ( value ) + { + case QueryResultStatusKHR::eError: return "Error"; + case QueryResultStatusKHR::eNotReady: return "NotReady"; + case QueryResultStatusKHR::eComplete: return "Complete"; + case QueryResultStatusKHR::eInsufficientBitstreamBufferRange: return "InsufficientBitstreamBufferRange"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoSessionParametersCreateFlagBitsKHR value ) + { + switch ( value ) + { + case VideoSessionParametersCreateFlagBitsKHR::eQuantizationMapCompatible: return "QuantizationMapCompatible"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoBeginCodingFlagBitsKHR ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEndCodingFlagBitsKHR ) + { + return "(void)"; + } + + //=== VK_KHR_video_decode_queue === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoDecodeCapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide: return "DpbAndOutputCoincide"; + case VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct: return "DpbAndOutputDistinct"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoDecodeUsageFlagBitsKHR value ) + { + switch ( value ) + { + case VideoDecodeUsageFlagBitsKHR::eDefault: return "Default"; + case VideoDecodeUsageFlagBitsKHR::eTranscoding: return "Transcoding"; + case VideoDecodeUsageFlagBitsKHR::eOffline: return "Offline"; + case VideoDecodeUsageFlagBitsKHR::eStreaming: return "Streaming"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoDecodeFlagBitsKHR ) + { + return "(void)"; + } + + //=== VK_EXT_transform_feedback === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_KHR_video_encode_h264 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeH264CapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH264CapabilityFlagBitsKHR::eHrdCompliance: return "HrdCompliance"; + case VideoEncodeH264CapabilityFlagBitsKHR::ePredictionWeightTableGenerated: return "PredictionWeightTableGenerated"; + case VideoEncodeH264CapabilityFlagBitsKHR::eRowUnalignedSlice: return "RowUnalignedSlice"; + case VideoEncodeH264CapabilityFlagBitsKHR::eDifferentSliceType: return "DifferentSliceType"; + case VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL0List: return "BFrameInL0List"; + case VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL1List: return "BFrameInL1List"; + case VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp: return "PerPictureTypeMinMaxQp"; + case VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp: return "PerSliceConstantQp"; + case VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu: return "GeneratePrefixNalu"; + case VideoEncodeH264CapabilityFlagBitsKHR::eMbQpDiffWraparound: return "MbQpDiffWraparound"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeH264StdFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH264StdFlagBitsKHR::eSeparateColorPlaneFlagSet: return "SeparateColorPlaneFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eQpprimeYZeroTransformBypassFlagSet: return "QpprimeYZeroTransformBypassFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eScalingMatrixPresentFlagSet: return "ScalingMatrixPresentFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eChromaQpIndexOffset: return "ChromaQpIndexOffset"; + case VideoEncodeH264StdFlagBitsKHR::eSecondChromaQpIndexOffset: return "SecondChromaQpIndexOffset"; + case VideoEncodeH264StdFlagBitsKHR::ePicInitQpMinus26: return "PicInitQpMinus26"; + case VideoEncodeH264StdFlagBitsKHR::eWeightedPredFlagSet: return "WeightedPredFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcExplicit: return "WeightedBipredIdcExplicit"; + case VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcImplicit: return "WeightedBipredIdcImplicit"; + case VideoEncodeH264StdFlagBitsKHR::eTransform8X8ModeFlagSet: return "Transform8X8ModeFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eDirectSpatialMvPredFlagUnset: return "DirectSpatialMvPredFlagUnset"; + case VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagUnset: return "EntropyCodingModeFlagUnset"; + case VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagSet: return "EntropyCodingModeFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eDirect8X8InferenceFlagUnset: return "Direct8X8InferenceFlagUnset"; + case VideoEncodeH264StdFlagBitsKHR::eConstrainedIntraPredFlagSet: return "ConstrainedIntraPredFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterDisabled: return "DeblockingFilterDisabled"; + case VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterEnabled: return "DeblockingFilterEnabled"; + case VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterPartial: return "DeblockingFilterPartial"; + case VideoEncodeH264StdFlagBitsKHR::eSliceQpDelta: return "SliceQpDelta"; + case VideoEncodeH264StdFlagBitsKHR::eDifferentSliceQpDelta: return "DifferentSliceQpDelta"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeH264RateControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH264RateControlFlagBitsKHR::eAttemptHrdCompliance: return "AttemptHrdCompliance"; + case VideoEncodeH264RateControlFlagBitsKHR::eRegularGop: return "RegularGop"; + case VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternFlat: return "ReferencePatternFlat"; + case VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternDyadic: return "ReferencePatternDyadic"; + case VideoEncodeH264RateControlFlagBitsKHR::eTemporalLayerPatternDyadic: return "TemporalLayerPatternDyadic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_video_encode_h265 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeH265CapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH265CapabilityFlagBitsKHR::eHrdCompliance: return "HrdCompliance"; + case VideoEncodeH265CapabilityFlagBitsKHR::ePredictionWeightTableGenerated: return "PredictionWeightTableGenerated"; + case VideoEncodeH265CapabilityFlagBitsKHR::eRowUnalignedSliceSegment: return "RowUnalignedSliceSegment"; + case VideoEncodeH265CapabilityFlagBitsKHR::eDifferentSliceSegmentType: return "DifferentSliceSegmentType"; + case VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL0List: return "BFrameInL0List"; + case VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL1List: return "BFrameInL1List"; + case VideoEncodeH265CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp: return "PerPictureTypeMinMaxQp"; + case VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp: return "PerSliceSegmentConstantQp"; + case VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment: return "MultipleTilesPerSliceSegment"; + case VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile: return "MultipleSliceSegmentsPerTile"; + case VideoEncodeH265CapabilityFlagBitsKHR::eCuQpDiffWraparound: return "CuQpDiffWraparound"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeH265StdFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH265StdFlagBitsKHR::eSeparateColorPlaneFlagSet: return "SeparateColorPlaneFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eSampleAdaptiveOffsetEnabledFlagSet: return "SampleAdaptiveOffsetEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eScalingListDataPresentFlagSet: return "ScalingListDataPresentFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::ePcmEnabledFlagSet: return "PcmEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eSpsTemporalMvpEnabledFlagSet: return "SpsTemporalMvpEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eInitQpMinus26: return "InitQpMinus26"; + case VideoEncodeH265StdFlagBitsKHR::eWeightedPredFlagSet: return "WeightedPredFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eWeightedBipredFlagSet: return "WeightedBipredFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eLog2ParallelMergeLevelMinus2: return "Log2ParallelMergeLevelMinus2"; + case VideoEncodeH265StdFlagBitsKHR::eSignDataHidingEnabledFlagSet: return "SignDataHidingEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagSet: return "TransformSkipEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagUnset: return "TransformSkipEnabledFlagUnset"; + case VideoEncodeH265StdFlagBitsKHR::ePpsSliceChromaQpOffsetsPresentFlagSet: return "PpsSliceChromaQpOffsetsPresentFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eTransquantBypassEnabledFlagSet: return "TransquantBypassEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eConstrainedIntraPredFlagSet: return "ConstrainedIntraPredFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eEntropyCodingSyncEnabledFlagSet: return "EntropyCodingSyncEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eDeblockingFilterOverrideEnabledFlagSet: return "DeblockingFilterOverrideEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentsEnabledFlagSet: return "DependentSliceSegmentsEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentFlagSet: return "DependentSliceSegmentFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eSliceQpDelta: return "SliceQpDelta"; + case VideoEncodeH265StdFlagBitsKHR::eDifferentSliceQpDelta: return "DifferentSliceQpDelta"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeH265CtbSizeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH265CtbSizeFlagBitsKHR::e16: return "16"; + case VideoEncodeH265CtbSizeFlagBitsKHR::e32: return "32"; + case VideoEncodeH265CtbSizeFlagBitsKHR::e64: return "64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeH265TransformBlockSizeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e4: return "4"; + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e8: return "8"; + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e16: return "16"; + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e32: return "32"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeH265RateControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH265RateControlFlagBitsKHR::eAttemptHrdCompliance: return "AttemptHrdCompliance"; + case VideoEncodeH265RateControlFlagBitsKHR::eRegularGop: return "RegularGop"; + case VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternFlat: return "ReferencePatternFlat"; + case VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternDyadic: return "ReferencePatternDyadic"; + case VideoEncodeH265RateControlFlagBitsKHR::eTemporalSubLayerPatternDyadic: return "TemporalSubLayerPatternDyadic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_video_decode_h264 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoDecodeH264PictureLayoutFlagBitsKHR value ) + { + switch ( value ) + { + case VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive: return "Progressive"; + case VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines: return "InterlacedInterleavedLines"; + case VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes: return "InterlacedSeparatePlanes"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_shader_info === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShaderInfoTypeAMD value ) + { + switch ( value ) + { + case ShaderInfoTypeAMD::eStatistics: return "Statistics"; + case ShaderInfoTypeAMD::eBinary: return "Binary"; + case ShaderInfoTypeAMD::eDisassembly: return "Disassembly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ExternalMemoryFeatureFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_validation_flags === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ValidationCheckEXT value ) + { + switch ( value ) + { + case ValidationCheckEXT::eAll: return "All"; + case ValidationCheckEXT::eShaders: return "Shaders"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ViSurfaceCreateFlagBitsNN ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_conditional_rendering === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ConditionalRenderingFlagBitsEXT value ) + { + switch ( value ) + { + case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_display_surface_counter === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SurfaceCounterFlagBitsEXT value ) + { + switch ( value ) + { + case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_display_control === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DisplayPowerStateEXT value ) + { + switch ( value ) + { + case DisplayPowerStateEXT::eOff: return "Off"; + case DisplayPowerStateEXT::eSuspend: return "Suspend"; + case DisplayPowerStateEXT::eOn: return "On"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceEventTypeEXT value ) + { + switch ( value ) + { + case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DisplayEventTypeEXT value ) + { + switch ( value ) + { + case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_viewport_swizzle === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ViewportCoordinateSwizzleNV value ) + { + switch ( value ) + { + case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX"; + case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX"; + case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY"; + case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY"; + case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ"; + case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ"; + case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW"; + case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_discard_rectangles === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DiscardRectangleModeEXT value ) + { + switch ( value ) + { + case DiscardRectangleModeEXT::eInclusive: return "Inclusive"; + case DiscardRectangleModeEXT::eExclusive: return "Exclusive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_conservative_rasterization === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ConservativeRasterizationModeEXT value ) + { + switch ( value ) + { + case ConservativeRasterizationModeEXT::eDisabled: return "Disabled"; + case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate"; + case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_depth_clip_enable === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_KHR_performance_query === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value ) + { + switch ( value ) + { + case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting: return "PerformanceImpacting"; + case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted: return "ConcurrentlyImpacted"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PerformanceCounterScopeKHR value ) + { + switch ( value ) + { + case PerformanceCounterScopeKHR::eCommandBuffer: return "CommandBuffer"; + case PerformanceCounterScopeKHR::eRenderPass: return "RenderPass"; + case PerformanceCounterScopeKHR::eCommand: return "Command"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PerformanceCounterStorageKHR value ) + { + switch ( value ) + { + case PerformanceCounterStorageKHR::eInt32: return "Int32"; + case PerformanceCounterStorageKHR::eInt64: return "Int64"; + case PerformanceCounterStorageKHR::eUint32: return "Uint32"; + case PerformanceCounterStorageKHR::eUint64: return "Uint64"; + case PerformanceCounterStorageKHR::eFloat32: return "Float32"; + case PerformanceCounterStorageKHR::eFloat64: return "Float64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PerformanceCounterUnitKHR value ) + { + switch ( value ) + { + case PerformanceCounterUnitKHR::eGeneric: return "Generic"; + case PerformanceCounterUnitKHR::ePercentage: return "Percentage"; + case PerformanceCounterUnitKHR::eNanoseconds: return "Nanoseconds"; + case PerformanceCounterUnitKHR::eBytes: return "Bytes"; + case PerformanceCounterUnitKHR::eBytesPerSecond: return "BytesPerSecond"; + case PerformanceCounterUnitKHR::eKelvin: return "Kelvin"; + case PerformanceCounterUnitKHR::eWatts: return "Watts"; + case PerformanceCounterUnitKHR::eVolts: return "Volts"; + case PerformanceCounterUnitKHR::eAmps: return "Amps"; + case PerformanceCounterUnitKHR::eHertz: return "Hertz"; + case PerformanceCounterUnitKHR::eCycles: return "Cycles"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AcquireProfilingLockFlagBitsKHR ) + { + return "(void)"; + } + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( IOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MacOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose"; + case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info"; + case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning"; + case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General"; + case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation"; + case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance"; + case DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding: return "DeviceAddressBinding"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_blend_operation_advanced === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BlendOverlapEXT value ) + { + switch ( value ) + { + case BlendOverlapEXT::eUncorrelated: return "Uncorrelated"; + case BlendOverlapEXT::eDisjoint: return "Disjoint"; + case BlendOverlapEXT::eConjoint: return "Conjoint"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_fragment_coverage_to_color === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_KHR_acceleration_structure === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccelerationStructureTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureTypeKHR::eTopLevel: return "TopLevel"; + case AccelerationStructureTypeKHR::eBottomLevel: return "BottomLevel"; + case AccelerationStructureTypeKHR::eGeneric: return "Generic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccelerationStructureBuildTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureBuildTypeKHR::eHost: return "Host"; + case AccelerationStructureBuildTypeKHR::eDevice: return "Device"; + case AccelerationStructureBuildTypeKHR::eHostOrDevice: return "HostOrDevice"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( GeometryFlagBitsKHR value ) + { + switch ( value ) + { + case GeometryFlagBitsKHR::eOpaque: return "Opaque"; + case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( GeometryInstanceFlagBitsKHR value ) + { + switch ( value ) + { + case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable: return "TriangleFacingCullDisable"; + case GeometryInstanceFlagBitsKHR::eTriangleFlipFacing: return "TriangleFlipFacing"; + case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque"; + case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque"; + case GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT: return "ForceOpacityMicromap2StateEXT"; + case GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT: return "DisableOpacityMicromapsEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BuildAccelerationStructureFlagBitsKHR value ) + { + switch ( value ) + { + case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate: return "AllowUpdate"; + case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction: return "AllowCompaction"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace: return "PreferFastTrace"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild"; + case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory"; + case BuildAccelerationStructureFlagBitsKHR::eMotionNV: return "MotionNV"; + case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT: return "AllowOpacityMicromapUpdateEXT"; + case BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT: return "AllowDisableOpacityMicromapsEXT"; + case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT: return "AllowOpacityMicromapDataUpdateEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BuildAccelerationStructureFlagBitsKHR::eAllowDisplacementMicromapUpdateNV: return "AllowDisplacementMicromapUpdateNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BuildAccelerationStructureFlagBitsKHR::eAllowDataAccess: return "AllowDataAccess"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CopyAccelerationStructureModeKHR value ) + { + switch ( value ) + { + case CopyAccelerationStructureModeKHR::eClone: return "Clone"; + case CopyAccelerationStructureModeKHR::eCompact: return "Compact"; + case CopyAccelerationStructureModeKHR::eSerialize: return "Serialize"; + case CopyAccelerationStructureModeKHR::eDeserialize: return "Deserialize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( GeometryTypeKHR value ) + { + switch ( value ) + { + case GeometryTypeKHR::eTriangles: return "Triangles"; + case GeometryTypeKHR::eAabbs: return "Aabbs"; + case GeometryTypeKHR::eInstances: return "Instances"; + case GeometryTypeKHR::eSpheresNV: return "SpheresNV"; + case GeometryTypeKHR::eLinearSweptSpheresNV: return "LinearSweptSpheresNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccelerationStructureCompatibilityKHR value ) + { + switch ( value ) + { + case AccelerationStructureCompatibilityKHR::eCompatible: return "Compatible"; + case AccelerationStructureCompatibilityKHR::eIncompatible: return "Incompatible"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccelerationStructureCreateFlagBitsKHR value ) + { + switch ( value ) + { + case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + case AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case AccelerationStructureCreateFlagBitsKHR::eMotionNV: return "MotionNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BuildAccelerationStructureModeKHR value ) + { + switch ( value ) + { + case BuildAccelerationStructureModeKHR::eBuild: return "Build"; + case BuildAccelerationStructureModeKHR::eUpdate: return "Update"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_ray_tracing_pipeline === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( RayTracingShaderGroupTypeKHR value ) + { + switch ( value ) + { + case RayTracingShaderGroupTypeKHR::eGeneral: return "General"; + case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShaderGroupShaderKHR value ) + { + switch ( value ) + { + case ShaderGroupShaderKHR::eGeneral: return "General"; + case ShaderGroupShaderKHR::eClosestHit: return "ClosestHit"; + case ShaderGroupShaderKHR::eAnyHit: return "AnyHit"; + case ShaderGroupShaderKHR::eIntersection: return "Intersection"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_framebuffer_mixed_samples === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CoverageModulationModeNV value ) + { + switch ( value ) + { + case CoverageModulationModeNV::eNone: return "None"; + case CoverageModulationModeNV::eRgb: return "Rgb"; + case CoverageModulationModeNV::eAlpha: return "Alpha"; + case CoverageModulationModeNV::eRgba: return "Rgba"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_validation_cache === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ValidationCacheHeaderVersionEXT value ) + { + switch ( value ) + { + case ValidationCacheHeaderVersionEXT::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ValidationCacheCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_NV_shading_rate_image === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShadingRatePaletteEntryNV value ) + { + switch ( value ) + { + case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations"; + case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CoarseSampleOrderTypeNV value ) + { + switch ( value ) + { + case CoarseSampleOrderTypeNV::eDefault: return "Default"; + case CoarseSampleOrderTypeNV::eCustom: return "Custom"; + case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor"; + case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value ) + { + switch ( value ) + { + case AccelerationStructureMemoryRequirementsTypeNV::eObject: return "Object"; + case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch: return "BuildScratch"; + case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch: return "UpdateScratch"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_pipeline_compiler_control === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCompilerControlFlagBitsAMD ) + { + return "(void)"; + } + + //=== VK_AMD_memory_overallocation_behavior === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MemoryOverallocationBehaviorAMD value ) + { + switch ( value ) + { + case MemoryOverallocationBehaviorAMD::eDefault: return "Default"; + case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed"; + case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_INTEL_performance_query === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PerformanceConfigurationTypeINTEL value ) + { + switch ( value ) + { + case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated: return "CommandQueueMetricsDiscoveryActivated"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryPoolSamplingModeINTEL value ) + { + switch ( value ) + { + case QueryPoolSamplingModeINTEL::eManual: return "Manual"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PerformanceOverrideTypeINTEL value ) + { + switch ( value ) + { + case PerformanceOverrideTypeINTEL::eNullHardware: return "NullHardware"; + case PerformanceOverrideTypeINTEL::eFlushGpuCaches: return "FlushGpuCaches"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PerformanceParameterTypeINTEL value ) + { + switch ( value ) + { + case PerformanceParameterTypeINTEL::eHwCountersSupported: return "HwCountersSupported"; + case PerformanceParameterTypeINTEL::eStreamMarkerValidBits: return "StreamMarkerValidBits"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PerformanceValueTypeINTEL value ) + { + switch ( value ) + { + case PerformanceValueTypeINTEL::eUint32: return "Uint32"; + case PerformanceValueTypeINTEL::eUint64: return "Uint64"; + case PerformanceValueTypeINTEL::eFloat: return "Float"; + case PerformanceValueTypeINTEL::eBool: return "Bool"; + case PerformanceValueTypeINTEL::eString: return "String"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MetalSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FragmentShadingRateCombinerOpKHR value ) + { + switch ( value ) + { + case FragmentShadingRateCombinerOpKHR::eKeep: return "Keep"; + case FragmentShadingRateCombinerOpKHR::eReplace: return "Replace"; + case FragmentShadingRateCombinerOpKHR::eMin: return "Min"; + case FragmentShadingRateCombinerOpKHR::eMax: return "Max"; + case FragmentShadingRateCombinerOpKHR::eMul: return "Mul"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_shader_core_properties2 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShaderCorePropertiesFlagBitsAMD ) + { + return "(void)"; + } + + //=== VK_EXT_validation_features === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ValidationFeatureEnableEXT value ) + { + switch ( value ) + { + case ValidationFeatureEnableEXT::eGpuAssisted: return "GpuAssisted"; + case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot: return "GpuAssistedReserveBindingSlot"; + case ValidationFeatureEnableEXT::eBestPractices: return "BestPractices"; + case ValidationFeatureEnableEXT::eDebugPrintf: return "DebugPrintf"; + case ValidationFeatureEnableEXT::eSynchronizationValidation: return "SynchronizationValidation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ValidationFeatureDisableEXT value ) + { + switch ( value ) + { + case ValidationFeatureDisableEXT::eAll: return "All"; + case ValidationFeatureDisableEXT::eShaders: return "Shaders"; + case ValidationFeatureDisableEXT::eThreadSafety: return "ThreadSafety"; + case ValidationFeatureDisableEXT::eApiParameters: return "ApiParameters"; + case ValidationFeatureDisableEXT::eObjectLifetimes: return "ObjectLifetimes"; + case ValidationFeatureDisableEXT::eCoreChecks: return "CoreChecks"; + case ValidationFeatureDisableEXT::eUniqueHandles: return "UniqueHandles"; + case ValidationFeatureDisableEXT::eShaderValidationCache: return "ShaderValidationCache"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_coverage_reduction_mode === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CoverageReductionModeNV value ) + { + switch ( value ) + { + case CoverageReductionModeNV::eMerge: return "Merge"; + case CoverageReductionModeNV::eTruncate: return "Truncate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_provoking_vertex === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ProvokingVertexModeEXT value ) + { + switch ( value ) + { + case ProvokingVertexModeEXT::eFirstVertex: return "FirstVertex"; + case ProvokingVertexModeEXT::eLastVertex: return "LastVertex"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FullScreenExclusiveEXT value ) + { + switch ( value ) + { + case FullScreenExclusiveEXT::eDefault: return "Default"; + case FullScreenExclusiveEXT::eAllowed: return "Allowed"; + case FullScreenExclusiveEXT::eDisallowed: return "Disallowed"; + case FullScreenExclusiveEXT::eApplicationControlled: return "ApplicationControlled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( HeadlessSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_KHR_pipeline_executable_properties === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PipelineExecutableStatisticFormatKHR value ) + { + switch ( value ) + { + case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32"; + case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64"; + case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64"; + case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_surface_maintenance1 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PresentScalingFlagBitsEXT value ) + { + switch ( value ) + { + case PresentScalingFlagBitsEXT::eOneToOne: return "OneToOne"; + case PresentScalingFlagBitsEXT::eAspectRatioStretch: return "AspectRatioStretch"; + case PresentScalingFlagBitsEXT::eStretch: return "Stretch"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PresentGravityFlagBitsEXT value ) + { + switch ( value ) + { + case PresentGravityFlagBitsEXT::eMin: return "Min"; + case PresentGravityFlagBitsEXT::eMax: return "Max"; + case PresentGravityFlagBitsEXT::eCentered: return "Centered"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_device_generated_commands === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( IndirectStateFlagBitsNV value ) + { + switch ( value ) + { + case IndirectStateFlagBitsNV::eFlagFrontface: return "FlagFrontface"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( IndirectCommandsTokenTypeNV value ) + { + switch ( value ) + { + case IndirectCommandsTokenTypeNV::eShaderGroup: return "ShaderGroup"; + case IndirectCommandsTokenTypeNV::eStateFlags: return "StateFlags"; + case IndirectCommandsTokenTypeNV::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeNV::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeNV::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeNV::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeNV::eDraw: return "Draw"; + case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks"; + case IndirectCommandsTokenTypeNV::eDrawMeshTasks: return "DrawMeshTasks"; + case IndirectCommandsTokenTypeNV::ePipeline: return "Pipeline"; + case IndirectCommandsTokenTypeNV::eDispatch: return "Dispatch"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value ) + { + switch ( value ) + { + case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess: return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences: return "IndexedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences: return "UnorderedSequences"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_depth_bias_control === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DepthBiasRepresentationEXT value ) + { + switch ( value ) + { + case DepthBiasRepresentationEXT::eLeastRepresentableValueFormat: return "LeastRepresentableValueFormat"; + case DepthBiasRepresentationEXT::eLeastRepresentableValueForceUnorm: return "LeastRepresentableValueForceUnorm"; + case DepthBiasRepresentationEXT::eFloat: return "Float"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_device_memory_report === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceMemoryReportEventTypeEXT value ) + { + switch ( value ) + { + case DeviceMemoryReportEventTypeEXT::eAllocate: return "Allocate"; + case DeviceMemoryReportEventTypeEXT::eFree: return "Free"; + case DeviceMemoryReportEventTypeEXT::eImport: return "Import"; + case DeviceMemoryReportEventTypeEXT::eUnimport: return "Unimport"; + case DeviceMemoryReportEventTypeEXT::eAllocationFailed: return "AllocationFailed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceMemoryReportFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeCapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes: return "PrecedingExternallyEncodedBytes"; + case VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection: return "InsufficientBitstreamBufferRangeDetection"; + case VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap: return "QuantizationDeltaMap"; + case VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap: return "EmphasisMap"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeFeedbackFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset: return "BitstreamBufferOffset"; + case VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten: return "BitstreamBytesWritten"; + case VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides: return "BitstreamHasOverrides"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeUsageFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeUsageFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeUsageFlagBitsKHR::eTranscoding: return "Transcoding"; + case VideoEncodeUsageFlagBitsKHR::eStreaming: return "Streaming"; + case VideoEncodeUsageFlagBitsKHR::eRecording: return "Recording"; + case VideoEncodeUsageFlagBitsKHR::eConferencing: return "Conferencing"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeContentFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeContentFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeContentFlagBitsKHR::eCamera: return "Camera"; + case VideoEncodeContentFlagBitsKHR::eDesktop: return "Desktop"; + case VideoEncodeContentFlagBitsKHR::eRendered: return "Rendered"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeTuningModeKHR value ) + { + switch ( value ) + { + case VideoEncodeTuningModeKHR::eDefault: return "Default"; + case VideoEncodeTuningModeKHR::eHighQuality: return "HighQuality"; + case VideoEncodeTuningModeKHR::eLowLatency: return "LowLatency"; + case VideoEncodeTuningModeKHR::eUltraLowLatency: return "UltraLowLatency"; + case VideoEncodeTuningModeKHR::eLossless: return "Lossless"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeRateControlModeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeRateControlModeFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeRateControlModeFlagBitsKHR::eDisabled: return "Disabled"; + case VideoEncodeRateControlModeFlagBitsKHR::eCbr: return "Cbr"; + case VideoEncodeRateControlModeFlagBitsKHR::eVbr: return "Vbr"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeFlagBitsKHR::eWithQuantizationDeltaMap: return "WithQuantizationDeltaMap"; + case VideoEncodeFlagBitsKHR::eWithEmphasisMap: return "WithEmphasisMap"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeRateControlFlagBitsKHR ) + { + return "(void)"; + } + + //=== VK_NV_device_diagnostics_config === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value ) + { + switch ( value ) + { + case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo: return "EnableShaderDebugInfo"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking: return "EnableResourceTracking"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints: return "EnableAutomaticCheckpoints"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting: return "EnableShaderErrorReporting"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_QCOM_tile_shading === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( TileShadingRenderPassFlagBitsQCOM value ) + { + switch ( value ) + { + case TileShadingRenderPassFlagBitsQCOM::eEnable: return "Enable"; + case TileShadingRenderPassFlagBitsQCOM::ePerTileExecution: return "PerTileExecution"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ExportMetalObjectTypeFlagBitsEXT value ) + { + switch ( value ) + { + case ExportMetalObjectTypeFlagBitsEXT::eMetalDevice: return "MetalDevice"; + case ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue: return "MetalCommandQueue"; + case ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer: return "MetalBuffer"; + case ExportMetalObjectTypeFlagBitsEXT::eMetalTexture: return "MetalTexture"; + case ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface: return "MetalIosurface"; + case ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent: return "MetalSharedEvent"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_graphics_pipeline_library === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( GraphicsPipelineLibraryFlagBitsEXT value ) + { + switch ( value ) + { + case GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface: return "VertexInputInterface"; + case GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders: return "PreRasterizationShaders"; + case GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader: return "FragmentShader"; + case GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface: return "FragmentOutputInterface"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_fragment_shading_rate_enums === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FragmentShadingRateNV value ) + { + switch ( value ) + { + case FragmentShadingRateNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case FragmentShadingRateNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case FragmentShadingRateNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + case FragmentShadingRateNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case FragmentShadingRateNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case FragmentShadingRateNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case FragmentShadingRateNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case FragmentShadingRateNV::eNoInvocations: return "NoInvocations"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FragmentShadingRateTypeNV value ) + { + switch ( value ) + { + case FragmentShadingRateTypeNV::eFragmentSize: return "FragmentSize"; + case FragmentShadingRateTypeNV::eEnums: return "Enums"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing_motion_blur === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccelerationStructureMotionInstanceTypeNV value ) + { + switch ( value ) + { + case AccelerationStructureMotionInstanceTypeNV::eStatic: return "Static"; + case AccelerationStructureMotionInstanceTypeNV::eMatrixMotion: return "MatrixMotion"; + case AccelerationStructureMotionInstanceTypeNV::eSrtMotion: return "SrtMotion"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccelerationStructureMotionInfoFlagBitsNV ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccelerationStructureMotionInstanceFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_image_compression_control === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageCompressionFlagBitsEXT value ) + { + switch ( value ) + { + case ImageCompressionFlagBitsEXT::eDefault: return "Default"; + case ImageCompressionFlagBitsEXT::eFixedRateDefault: return "FixedRateDefault"; + case ImageCompressionFlagBitsEXT::eFixedRateExplicit: return "FixedRateExplicit"; + case ImageCompressionFlagBitsEXT::eDisabled: return "Disabled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageCompressionFixedRateFlagBitsEXT value ) + { + switch ( value ) + { + case ImageCompressionFixedRateFlagBitsEXT::eNone: return "None"; + case ImageCompressionFixedRateFlagBitsEXT::e1Bpc: return "1Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e2Bpc: return "2Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e3Bpc: return "3Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e4Bpc: return "4Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e5Bpc: return "5Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e6Bpc: return "6Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e7Bpc: return "7Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e8Bpc: return "8Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e9Bpc: return "9Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e10Bpc: return "10Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e11Bpc: return "11Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e12Bpc: return "12Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e13Bpc: return "13Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e14Bpc: return "14Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e15Bpc: return "15Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e16Bpc: return "16Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e17Bpc: return "17Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e18Bpc: return "18Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e19Bpc: return "19Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e20Bpc: return "20Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e21Bpc: return "21Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e22Bpc: return "22Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e23Bpc: return "23Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e24Bpc: return "24Bpc"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_device_fault === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceFaultAddressTypeEXT value ) + { + switch ( value ) + { + case DeviceFaultAddressTypeEXT::eNone: return "None"; + case DeviceFaultAddressTypeEXT::eReadInvalid: return "ReadInvalid"; + case DeviceFaultAddressTypeEXT::eWriteInvalid: return "WriteInvalid"; + case DeviceFaultAddressTypeEXT::eExecuteInvalid: return "ExecuteInvalid"; + case DeviceFaultAddressTypeEXT::eInstructionPointerUnknown: return "InstructionPointerUnknown"; + case DeviceFaultAddressTypeEXT::eInstructionPointerInvalid: return "InstructionPointerInvalid"; + case DeviceFaultAddressTypeEXT::eInstructionPointerFault: return "InstructionPointerFault"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceFaultVendorBinaryHeaderVersionEXT value ) + { + switch ( value ) + { + case DeviceFaultVendorBinaryHeaderVersionEXT::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DirectFBSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_device_address_binding_report === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceAddressBindingFlagBitsEXT value ) + { + switch ( value ) + { + case DeviceAddressBindingFlagBitsEXT::eInternalObject: return "InternalObject"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DeviceAddressBindingTypeEXT value ) + { + switch ( value ) + { + case DeviceAddressBindingTypeEXT::eBind: return "Bind"; + case DeviceAddressBindingTypeEXT::eUnbind: return "Unbind"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageConstraintsInfoFlagBitsFUCHSIA value ) + { + switch ( value ) + { + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely: return "CpuReadRarely"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften: return "CpuReadOften"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely: return "CpuWriteRarely"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften: return "CpuWriteOften"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional: return "ProtectedOptional"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ImageFormatConstraintsFlagBitsFUCHSIA ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_frame_boundary === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( FrameBoundaryFlagBitsEXT value ) + { + switch ( value ) + { + case FrameBoundaryFlagBitsEXT::eFrameEnd: return "FrameEnd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ScreenSurfaceCreateFlagBitsQNX ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_opacity_micromap === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MicromapTypeEXT value ) + { + switch ( value ) + { + case MicromapTypeEXT::eOpacityMicromap: return "OpacityMicromap"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case MicromapTypeEXT::eDisplacementMicromapNV: return "DisplacementMicromapNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BuildMicromapFlagBitsEXT value ) + { + switch ( value ) + { + case BuildMicromapFlagBitsEXT::ePreferFastTrace: return "PreferFastTrace"; + case BuildMicromapFlagBitsEXT::ePreferFastBuild: return "PreferFastBuild"; + case BuildMicromapFlagBitsEXT::eAllowCompaction: return "AllowCompaction"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CopyMicromapModeEXT value ) + { + switch ( value ) + { + case CopyMicromapModeEXT::eClone: return "Clone"; + case CopyMicromapModeEXT::eSerialize: return "Serialize"; + case CopyMicromapModeEXT::eDeserialize: return "Deserialize"; + case CopyMicromapModeEXT::eCompact: return "Compact"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MicromapCreateFlagBitsEXT value ) + { + switch ( value ) + { + case MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BuildMicromapModeEXT value ) + { + switch ( value ) + { + case BuildMicromapModeEXT::eBuild: return "Build"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( OpacityMicromapFormatEXT value ) + { + switch ( value ) + { + case OpacityMicromapFormatEXT::e2State: return "2State"; + case OpacityMicromapFormatEXT::e4State: return "4State"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( OpacityMicromapSpecialIndexEXT value ) + { + switch ( value ) + { + case OpacityMicromapSpecialIndexEXT::eFullyTransparent: return "FullyTransparent"; + case OpacityMicromapSpecialIndexEXT::eFullyOpaque: return "FullyOpaque"; + case OpacityMicromapSpecialIndexEXT::eFullyUnknownTransparent: return "FullyUnknownTransparent"; + case OpacityMicromapSpecialIndexEXT::eFullyUnknownOpaque: return "FullyUnknownOpaque"; + case OpacityMicromapSpecialIndexEXT::eClusterGeometryDisableOpacityMicromapNV: return "ClusterGeometryDisableOpacityMicromapNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DisplacementMicromapFormatNV value ) + { + switch ( value ) + { + case DisplacementMicromapFormatNV::e64Triangles64Bytes: return "64Triangles64Bytes"; + case DisplacementMicromapFormatNV::e256Triangles128Bytes: return "256Triangles128Bytes"; + case DisplacementMicromapFormatNV::e1024Triangles128Bytes: return "1024Triangles128Bytes"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_ARM_scheduling_controls === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PhysicalDeviceSchedulingControlsFlagBitsARM value ) + { + switch ( value ) + { + case PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount: return "ShaderCoreCount"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( MemoryDecompressionMethodFlagBitsNV value ) + { + switch ( value ) + { + case MemoryDecompressionMethodFlagBitsNV::eGdeflate10: return "Gdeflate10"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing_linear_swept_spheres === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( RayTracingLssIndexingModeNV value ) + { + switch ( value ) + { + case RayTracingLssIndexingModeNV::eList: return "List"; + case RayTracingLssIndexingModeNV::eSuccessive: return "Successive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( RayTracingLssPrimitiveEndCapsModeNV value ) + { + switch ( value ) + { + case RayTracingLssPrimitiveEndCapsModeNV::eNone: return "None"; + case RayTracingLssPrimitiveEndCapsModeNV::eChained: return "Chained"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_subpass_merge_feedback === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( SubpassMergeStatusEXT value ) + { + switch ( value ) + { + case SubpassMergeStatusEXT::eMerged: return "Merged"; + case SubpassMergeStatusEXT::eDisallowed: return "Disallowed"; + case SubpassMergeStatusEXT::eNotMergedSideEffects: return "NotMergedSideEffects"; + case SubpassMergeStatusEXT::eNotMergedSamplesMismatch: return "NotMergedSamplesMismatch"; + case SubpassMergeStatusEXT::eNotMergedViewsMismatch: return "NotMergedViewsMismatch"; + case SubpassMergeStatusEXT::eNotMergedAliasing: return "NotMergedAliasing"; + case SubpassMergeStatusEXT::eNotMergedDependencies: return "NotMergedDependencies"; + case SubpassMergeStatusEXT::eNotMergedIncompatibleInputAttachment: return "NotMergedIncompatibleInputAttachment"; + case SubpassMergeStatusEXT::eNotMergedTooManyAttachments: return "NotMergedTooManyAttachments"; + case SubpassMergeStatusEXT::eNotMergedInsufficientStorage: return "NotMergedInsufficientStorage"; + case SubpassMergeStatusEXT::eNotMergedDepthStencilCount: return "NotMergedDepthStencilCount"; + case SubpassMergeStatusEXT::eNotMergedResolveAttachmentReuse: return "NotMergedResolveAttachmentReuse"; + case SubpassMergeStatusEXT::eNotMergedSingleSubpass: return "NotMergedSingleSubpass"; + case SubpassMergeStatusEXT::eNotMergedUnspecified: return "NotMergedUnspecified"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_LUNARG_direct_driver_loading === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DirectDriverLoadingModeLUNARG value ) + { + switch ( value ) + { + case DirectDriverLoadingModeLUNARG::eExclusive: return "Exclusive"; + case DirectDriverLoadingModeLUNARG::eInclusive: return "Inclusive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DirectDriverLoadingFlagBitsLUNARG ) + { + return "(void)"; + } + + //=== VK_NV_optical_flow === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( OpticalFlowUsageFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowUsageFlagBitsNV::eUnknown: return "Unknown"; + case OpticalFlowUsageFlagBitsNV::eInput: return "Input"; + case OpticalFlowUsageFlagBitsNV::eOutput: return "Output"; + case OpticalFlowUsageFlagBitsNV::eHint: return "Hint"; + case OpticalFlowUsageFlagBitsNV::eCost: return "Cost"; + case OpticalFlowUsageFlagBitsNV::eGlobalFlow: return "GlobalFlow"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( OpticalFlowGridSizeFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowGridSizeFlagBitsNV::eUnknown: return "Unknown"; + case OpticalFlowGridSizeFlagBitsNV::e1X1: return "1X1"; + case OpticalFlowGridSizeFlagBitsNV::e2X2: return "2X2"; + case OpticalFlowGridSizeFlagBitsNV::e4X4: return "4X4"; + case OpticalFlowGridSizeFlagBitsNV::e8X8: return "8X8"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( OpticalFlowPerformanceLevelNV value ) + { + switch ( value ) + { + case OpticalFlowPerformanceLevelNV::eUnknown: return "Unknown"; + case OpticalFlowPerformanceLevelNV::eSlow: return "Slow"; + case OpticalFlowPerformanceLevelNV::eMedium: return "Medium"; + case OpticalFlowPerformanceLevelNV::eFast: return "Fast"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( OpticalFlowSessionBindingPointNV value ) + { + switch ( value ) + { + case OpticalFlowSessionBindingPointNV::eUnknown: return "Unknown"; + case OpticalFlowSessionBindingPointNV::eInput: return "Input"; + case OpticalFlowSessionBindingPointNV::eReference: return "Reference"; + case OpticalFlowSessionBindingPointNV::eHint: return "Hint"; + case OpticalFlowSessionBindingPointNV::eFlowVector: return "FlowVector"; + case OpticalFlowSessionBindingPointNV::eBackwardFlowVector: return "BackwardFlowVector"; + case OpticalFlowSessionBindingPointNV::eCost: return "Cost"; + case OpticalFlowSessionBindingPointNV::eBackwardCost: return "BackwardCost"; + case OpticalFlowSessionBindingPointNV::eGlobalFlow: return "GlobalFlow"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( OpticalFlowSessionCreateFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowSessionCreateFlagBitsNV::eEnableHint: return "EnableHint"; + case OpticalFlowSessionCreateFlagBitsNV::eEnableCost: return "EnableCost"; + case OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow: return "EnableGlobalFlow"; + case OpticalFlowSessionCreateFlagBitsNV::eAllowRegions: return "AllowRegions"; + case OpticalFlowSessionCreateFlagBitsNV::eBothDirections: return "BothDirections"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( OpticalFlowExecuteFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints: return "DisableTemporalHints"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_anti_lag === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AntiLagModeAMD value ) + { + switch ( value ) + { + case AntiLagModeAMD::eDriverControl: return "DriverControl"; + case AntiLagModeAMD::eOn: return "On"; + case AntiLagModeAMD::eOff: return "Off"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AntiLagStageAMD value ) + { + switch ( value ) + { + case AntiLagStageAMD::eInput: return "Input"; + case AntiLagStageAMD::ePresent: return "Present"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_shader_object === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShaderCreateFlagBitsEXT value ) + { + switch ( value ) + { + case ShaderCreateFlagBitsEXT::eLinkStage: return "LinkStage"; + case ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize"; + case ShaderCreateFlagBitsEXT::eRequireFullSubgroups: return "RequireFullSubgroups"; + case ShaderCreateFlagBitsEXT::eNoTaskShader: return "NoTaskShader"; + case ShaderCreateFlagBitsEXT::eDispatchBase: return "DispatchBase"; + case ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment"; + case ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment: return "FragmentDensityMapAttachment"; + case ShaderCreateFlagBitsEXT::eIndirectBindable: return "IndirectBindable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ShaderCodeTypeEXT value ) + { + switch ( value ) + { + case ShaderCodeTypeEXT::eBinary: return "Binary"; + case ShaderCodeTypeEXT::eSpirv: return "Spirv"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing_invocation_reorder === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( RayTracingInvocationReorderModeNV value ) + { + switch ( value ) + { + case RayTracingInvocationReorderModeNV::eNone: return "None"; + case RayTracingInvocationReorderModeNV::eReorder: return "Reorder"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_cooperative_vector === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CooperativeVectorMatrixLayoutNV value ) + { + switch ( value ) + { + case CooperativeVectorMatrixLayoutNV::eRowMajor: return "RowMajor"; + case CooperativeVectorMatrixLayoutNV::eColumnMajor: return "ColumnMajor"; + case CooperativeVectorMatrixLayoutNV::eInferencingOptimal: return "InferencingOptimal"; + case CooperativeVectorMatrixLayoutNV::eTrainingOptimal: return "TrainingOptimal"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ComponentTypeKHR value ) + { + switch ( value ) + { + case ComponentTypeKHR::eFloat16: return "Float16"; + case ComponentTypeKHR::eFloat32: return "Float32"; + case ComponentTypeKHR::eFloat64: return "Float64"; + case ComponentTypeKHR::eSint8: return "Sint8"; + case ComponentTypeKHR::eSint16: return "Sint16"; + case ComponentTypeKHR::eSint32: return "Sint32"; + case ComponentTypeKHR::eSint64: return "Sint64"; + case ComponentTypeKHR::eUint8: return "Uint8"; + case ComponentTypeKHR::eUint16: return "Uint16"; + case ComponentTypeKHR::eUint32: return "Uint32"; + case ComponentTypeKHR::eUint64: return "Uint64"; + case ComponentTypeKHR::eBfloat16: return "Bfloat16"; + case ComponentTypeKHR::eSint8PackedNV: return "Sint8PackedNV"; + case ComponentTypeKHR::eUint8PackedNV: return "Uint8PackedNV"; + case ComponentTypeKHR::eFloatE4M3NV: return "FloatE4M3NV"; + case ComponentTypeKHR::eFloatE5M2NV: return "FloatE5M2NV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_layer_settings === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( LayerSettingTypeEXT value ) + { + switch ( value ) + { + case LayerSettingTypeEXT::eBool32: return "Bool32"; + case LayerSettingTypeEXT::eInt32: return "Int32"; + case LayerSettingTypeEXT::eInt64: return "Int64"; + case LayerSettingTypeEXT::eUint32: return "Uint32"; + case LayerSettingTypeEXT::eUint64: return "Uint64"; + case LayerSettingTypeEXT::eFloat32: return "Float32"; + case LayerSettingTypeEXT::eFloat64: return "Float64"; + case LayerSettingTypeEXT::eString: return "String"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_low_latency2 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( LatencyMarkerNV value ) + { + switch ( value ) + { + case LatencyMarkerNV::eSimulationStart: return "SimulationStart"; + case LatencyMarkerNV::eSimulationEnd: return "SimulationEnd"; + case LatencyMarkerNV::eRendersubmitStart: return "RendersubmitStart"; + case LatencyMarkerNV::eRendersubmitEnd: return "RendersubmitEnd"; + case LatencyMarkerNV::ePresentStart: return "PresentStart"; + case LatencyMarkerNV::ePresentEnd: return "PresentEnd"; + case LatencyMarkerNV::eInputSample: return "InputSample"; + case LatencyMarkerNV::eTriggerFlash: return "TriggerFlash"; + case LatencyMarkerNV::eOutOfBandRendersubmitStart: return "OutOfBandRendersubmitStart"; + case LatencyMarkerNV::eOutOfBandRendersubmitEnd: return "OutOfBandRendersubmitEnd"; + case LatencyMarkerNV::eOutOfBandPresentStart: return "OutOfBandPresentStart"; + case LatencyMarkerNV::eOutOfBandPresentEnd: return "OutOfBandPresentEnd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( OutOfBandQueueTypeNV value ) + { + switch ( value ) + { + case OutOfBandQueueTypeNV::eRender: return "Render"; + case OutOfBandQueueTypeNV::ePresent: return "Present"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_cooperative_matrix === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ScopeKHR value ) + { + switch ( value ) + { + case ScopeKHR::eDevice: return "Device"; + case ScopeKHR::eWorkgroup: return "Workgroup"; + case ScopeKHR::eSubgroup: return "Subgroup"; + case ScopeKHR::eQueueFamily: return "QueueFamily"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_video_encode_av1 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeAV1PredictionModeKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1PredictionModeKHR::eIntraOnly: return "IntraOnly"; + case VideoEncodeAV1PredictionModeKHR::eSingleReference: return "SingleReference"; + case VideoEncodeAV1PredictionModeKHR::eUnidirectionalCompound: return "UnidirectionalCompound"; + case VideoEncodeAV1PredictionModeKHR::eBidirectionalCompound: return "BidirectionalCompound"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeAV1RateControlGroupKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1RateControlGroupKHR::eIntra: return "Intra"; + case VideoEncodeAV1RateControlGroupKHR::ePredictive: return "Predictive"; + case VideoEncodeAV1RateControlGroupKHR::eBipredictive: return "Bipredictive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeAV1CapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1CapabilityFlagBitsKHR::ePerRateControlGroupMinMaxQIndex: return "PerRateControlGroupMinMaxQIndex"; + case VideoEncodeAV1CapabilityFlagBitsKHR::eGenerateObuExtensionHeader: return "GenerateObuExtensionHeader"; + case VideoEncodeAV1CapabilityFlagBitsKHR::ePrimaryReferenceCdfOnly: return "PrimaryReferenceCdfOnly"; + case VideoEncodeAV1CapabilityFlagBitsKHR::eFrameSizeOverride: return "FrameSizeOverride"; + case VideoEncodeAV1CapabilityFlagBitsKHR::eMotionVectorScaling: return "MotionVectorScaling"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeAV1StdFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1StdFlagBitsKHR::eUniformTileSpacingFlagSet: return "UniformTileSpacingFlagSet"; + case VideoEncodeAV1StdFlagBitsKHR::eSkipModePresentUnset: return "SkipModePresentUnset"; + case VideoEncodeAV1StdFlagBitsKHR::ePrimaryRefFrame: return "PrimaryRefFrame"; + case VideoEncodeAV1StdFlagBitsKHR::eDeltaQ: return "DeltaQ"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeAV1SuperblockSizeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1SuperblockSizeFlagBitsKHR::e64: return "64"; + case VideoEncodeAV1SuperblockSizeFlagBitsKHR::e128: return "128"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( VideoEncodeAV1RateControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1RateControlFlagBitsKHR::eRegularGop: return "RegularGop"; + case VideoEncodeAV1RateControlFlagBitsKHR::eTemporalLayerPatternDyadic: return "TemporalLayerPatternDyadic"; + case VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternFlat: return "ReferencePatternFlat"; + case VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternDyadic: return "ReferencePatternDyadic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_QCOM_image_processing2 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BlockMatchWindowCompareModeQCOM value ) + { + switch ( value ) + { + case BlockMatchWindowCompareModeQCOM::eMin: return "Min"; + case BlockMatchWindowCompareModeQCOM::eMax: return "Max"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_QCOM_filter_cubic_weights === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( CubicFilterWeightsQCOM value ) + { + switch ( value ) + { + case CubicFilterWeightsQCOM::eCatmullRom: return "CatmullRom"; + case CubicFilterWeightsQCOM::eZeroTangentCardinal: return "ZeroTangentCardinal"; + case CubicFilterWeightsQCOM::eBSpline: return "BSpline"; + case CubicFilterWeightsQCOM::eMitchellNetravali: return "MitchellNetravali"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_MSFT_layered_driver === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( LayeredDriverUnderlyingApiMSFT value ) + { + switch ( value ) + { + case LayeredDriverUnderlyingApiMSFT::eNone: return "None"; + case LayeredDriverUnderlyingApiMSFT::eD3D12: return "D3D12"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_calibrated_timestamps === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( TimeDomainKHR value ) + { + switch ( value ) + { + case TimeDomainKHR::eDevice: return "Device"; + case TimeDomainKHR::eClockMonotonic: return "ClockMonotonic"; + case TimeDomainKHR::eClockMonotonicRaw: return "ClockMonotonicRaw"; + case TimeDomainKHR::eQueryPerformanceCounter: return "QueryPerformanceCounter"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_display_stereo === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DisplaySurfaceStereoTypeNV value ) + { + switch ( value ) + { + case DisplaySurfaceStereoTypeNV::eNone: return "None"; + case DisplaySurfaceStereoTypeNV::eOnboardDin: return "OnboardDin"; + case DisplaySurfaceStereoTypeNV::eHdmi3D: return "Hdmi3D"; + case DisplaySurfaceStereoTypeNV::eInbandDisplayport: return "InbandDisplayport"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_maintenance7 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PhysicalDeviceLayeredApiKHR value ) + { + switch ( value ) + { + case PhysicalDeviceLayeredApiKHR::eVulkan: return "Vulkan"; + case PhysicalDeviceLayeredApiKHR::eD3D12: return "D3D12"; + case PhysicalDeviceLayeredApiKHR::eMetal: return "Metal"; + case PhysicalDeviceLayeredApiKHR::eOpengl: return "Opengl"; + case PhysicalDeviceLayeredApiKHR::eOpengles: return "Opengles"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_cluster_acceleration_structure === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ClusterAccelerationStructureClusterFlagBitsNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureClusterFlagBitsNV::eAllowDisableOpacityMicromaps: return "AllowDisableOpacityMicromaps"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ClusterAccelerationStructureGeometryFlagBitsNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureGeometryFlagBitsNV::eCullDisable: return "CullDisable"; + case ClusterAccelerationStructureGeometryFlagBitsNV::eNoDuplicateAnyhitInvocation: return "NoDuplicateAnyhitInvocation"; + case ClusterAccelerationStructureGeometryFlagBitsNV::eOpaque: return "Opaque"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ClusterAccelerationStructureAddressResolutionFlagBitsNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstImplicitData: return "IndirectedDstImplicitData"; + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedScratchData: return "IndirectedScratchData"; + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstAddressArray: return "IndirectedDstAddressArray"; + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstSizesArray: return "IndirectedDstSizesArray"; + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosArray: return "IndirectedSrcInfosArray"; + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosCount: return "IndirectedSrcInfosCount"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ClusterAccelerationStructureIndexFormatFlagBitsNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureIndexFormatFlagBitsNV::e8: return "8"; + case ClusterAccelerationStructureIndexFormatFlagBitsNV::e16: return "16"; + case ClusterAccelerationStructureIndexFormatFlagBitsNV::e32: return "32"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ClusterAccelerationStructureTypeNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureTypeNV::eClustersBottomLevel: return "ClustersBottomLevel"; + case ClusterAccelerationStructureTypeNV::eTriangleCluster: return "TriangleCluster"; + case ClusterAccelerationStructureTypeNV::eTriangleClusterTemplate: return "TriangleClusterTemplate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ClusterAccelerationStructureOpTypeNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureOpTypeNV::eMoveObjects: return "MoveObjects"; + case ClusterAccelerationStructureOpTypeNV::eBuildClustersBottomLevel: return "BuildClustersBottomLevel"; + case ClusterAccelerationStructureOpTypeNV::eBuildTriangleCluster: return "BuildTriangleCluster"; + case ClusterAccelerationStructureOpTypeNV::eBuildTriangleClusterTemplate: return "BuildTriangleClusterTemplate"; + case ClusterAccelerationStructureOpTypeNV::eInstantiateTriangleCluster: return "InstantiateTriangleCluster"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( ClusterAccelerationStructureOpModeNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureOpModeNV::eImplicitDestinations: return "ImplicitDestinations"; + case ClusterAccelerationStructureOpModeNV::eExplicitDestinations: return "ExplicitDestinations"; + case ClusterAccelerationStructureOpModeNV::eComputeSizes: return "ComputeSizes"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_partitioned_acceleration_structure === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PartitionedAccelerationStructureOpTypeNV value ) + { + switch ( value ) + { + case PartitionedAccelerationStructureOpTypeNV::eWriteInstance: return "WriteInstance"; + case PartitionedAccelerationStructureOpTypeNV::eUpdateInstance: return "UpdateInstance"; + case PartitionedAccelerationStructureOpTypeNV::eWritePartitionTranslation: return "WritePartitionTranslation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( PartitionedAccelerationStructureInstanceFlagBitsNV value ) + { + switch ( value ) + { + case PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFacingCullDisable: return "FlagTriangleFacingCullDisable"; + case PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFlipFacing: return "FlagTriangleFlipFacing"; + case PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceOpaque: return "FlagForceOpaque"; + case PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceNoOpaque: return "FlagForceNoOpaque"; + case PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagEnableExplicitBoundingBox: return "FlagEnableExplicitBoundingBox"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_device_generated_commands === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( IndirectCommandsTokenTypeEXT value ) + { + switch ( value ) + { + case IndirectCommandsTokenTypeEXT::eExecutionSet: return "ExecutionSet"; + case IndirectCommandsTokenTypeEXT::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeEXT::eSequenceIndex: return "SequenceIndex"; + case IndirectCommandsTokenTypeEXT::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeEXT::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeEXT::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeEXT::eDraw: return "Draw"; + case IndirectCommandsTokenTypeEXT::eDrawIndexedCount: return "DrawIndexedCount"; + case IndirectCommandsTokenTypeEXT::eDrawCount: return "DrawCount"; + case IndirectCommandsTokenTypeEXT::eDispatch: return "Dispatch"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksNV: return "DrawMeshTasksNV"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksCountNV: return "DrawMeshTasksCountNV"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasks: return "DrawMeshTasks"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksCount: return "DrawMeshTasksCount"; + case IndirectCommandsTokenTypeEXT::eTraceRays2: return "TraceRays2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( IndirectExecutionSetInfoTypeEXT value ) + { + switch ( value ) + { + case IndirectExecutionSetInfoTypeEXT::ePipelines: return "Pipelines"; + case IndirectExecutionSetInfoTypeEXT::eShaderObjects: return "ShaderObjects"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( IndirectCommandsLayoutUsageFlagBitsEXT value ) + { + switch ( value ) + { + case IndirectCommandsLayoutUsageFlagBitsEXT::eExplicitPreprocess: return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsEXT::eUnorderedSequences: return "UnorderedSequences"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( IndirectCommandsInputModeFlagBitsEXT value ) + { + switch ( value ) + { + case IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer: return "VulkanIndexBuffer"; + case IndirectCommandsInputModeFlagBitsEXT::eDxgiIndexBuffer: return "DxgiIndexBuffer"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_maintenance8 === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( AccessFlagBits3KHR value ) + { + switch ( value ) + { + case AccessFlagBits3KHR::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_depth_clamp_control === + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DepthClampModeEXT value ) + { + switch ( value ) + { + case DepthClampModeEXT::eViewportRange: return "ViewportRange"; + case DepthClampModeEXT::eUserDefinedRange: return "UserDefinedRange"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +} // namespace VULKAN_HPP_NAMESPACE + +#if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +#elif defined( _MSC_VER ) +# pragma warning( pop ) +#endif + +#endif \ No newline at end of file diff --git a/include/vulkan/vulkan_vi.h b/include/vulkan/vulkan_vi.h new file mode 100644 index 00000000..a30bfb1b --- /dev/null +++ b/include/vulkan/vulkan_vi.h @@ -0,0 +1,48 @@ +#ifndef VULKAN_VI_H_ +#define VULKAN_VI_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_NN_vi_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_NN_vi_surface 1 +#define VK_NN_VI_SURFACE_SPEC_VERSION 1 +#define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" +typedef VkFlags VkViSurfaceCreateFlagsNN; +typedef struct VkViSurfaceCreateInfoNN { + VkStructureType sType; + const void* pNext; + VkViSurfaceCreateFlagsNN flags; + void* window; +} VkViSurfaceCreateInfoNN; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( + VkInstance instance, + const VkViSurfaceCreateInfoNN* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_video.hpp b/include/vulkan/vulkan_video.hpp new file mode 100644 index 00000000..28f1eddf --- /dev/null +++ b/include/vulkan/vulkan_video.hpp @@ -0,0 +1,4937 @@ +// Copyright 2021-2025 The Khronos Group Inc. +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_VIDEO_HPP +#define VULKAN_VIDEO_HPP + +// clang-format off +#include +// clang-format on + +#include +#include +#if ( 301 < VK_HEADER_VERSION ) +# include +#endif +#include +#include +#include +#include +#include +#include +#include + +#if !defined( VULKAN_HPP_VIDEO_NAMESPACE ) +# define VULKAN_HPP_VIDEO_NAMESPACE video +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + namespace VULKAN_HPP_VIDEO_NAMESPACE + { + + //============= + //=== ENUMs === + //============= + + //=== vulkan_video_codec_h264std === + + enum class H264ChromaFormatIdc + { + eMonochrome = STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME, + e420 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_420, + e422 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_422, + e444 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_444, + eInvalid = STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID + }; + + enum class H264ProfileIdc + { + eBaseline = STD_VIDEO_H264_PROFILE_IDC_BASELINE, + eMain = STD_VIDEO_H264_PROFILE_IDC_MAIN, + eHigh = STD_VIDEO_H264_PROFILE_IDC_HIGH, + eHigh444Predictive = STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE, + eInvalid = STD_VIDEO_H264_PROFILE_IDC_INVALID + }; + + enum class H264LevelIdc + { + e1_0 = STD_VIDEO_H264_LEVEL_IDC_1_0, + e1_1 = STD_VIDEO_H264_LEVEL_IDC_1_1, + e1_2 = STD_VIDEO_H264_LEVEL_IDC_1_2, + e1_3 = STD_VIDEO_H264_LEVEL_IDC_1_3, + e2_0 = STD_VIDEO_H264_LEVEL_IDC_2_0, + e2_1 = STD_VIDEO_H264_LEVEL_IDC_2_1, + e2_2 = STD_VIDEO_H264_LEVEL_IDC_2_2, + e3_0 = STD_VIDEO_H264_LEVEL_IDC_3_0, + e3_1 = STD_VIDEO_H264_LEVEL_IDC_3_1, + e3_2 = STD_VIDEO_H264_LEVEL_IDC_3_2, + e4_0 = STD_VIDEO_H264_LEVEL_IDC_4_0, + e4_1 = STD_VIDEO_H264_LEVEL_IDC_4_1, + e4_2 = STD_VIDEO_H264_LEVEL_IDC_4_2, + e5_0 = STD_VIDEO_H264_LEVEL_IDC_5_0, + e5_1 = STD_VIDEO_H264_LEVEL_IDC_5_1, + e5_2 = STD_VIDEO_H264_LEVEL_IDC_5_2, + e6_0 = STD_VIDEO_H264_LEVEL_IDC_6_0, + e6_1 = STD_VIDEO_H264_LEVEL_IDC_6_1, + e6_2 = STD_VIDEO_H264_LEVEL_IDC_6_2, + eInvalid = STD_VIDEO_H264_LEVEL_IDC_INVALID + }; + + enum class H264PocType + { + e0 = STD_VIDEO_H264_POC_TYPE_0, + e1 = STD_VIDEO_H264_POC_TYPE_1, + e2 = STD_VIDEO_H264_POC_TYPE_2, + eInvalid = STD_VIDEO_H264_POC_TYPE_INVALID + }; + + enum class H264AspectRatioIdc + { + eUnspecified = STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED, + eSquare = STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE, + e12_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11, + e10_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11, + e16_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11, + e40_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33, + e24_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11, + e20_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11, + e32_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11, + e80_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33, + e18_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11, + e15_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11, + e64_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33, + e160_99 = STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99, + e4_3 = STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3, + e3_2 = STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2, + e2_1 = STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1, + eExtendedSar = STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR, + eInvalid = STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID + }; + + enum class H264WeightedBipredIdc + { + eDefault = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT, + eExplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT, + eImplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT, + eInvalid = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID + }; + + enum class H264ModificationOfPicNumsIdc + { + eShortTermSubtract = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT, + eShortTermAdd = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD, + eLongTerm = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM, + eEnd = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END, + eInvalid = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID + }; + + enum class H264MemMgmtControlOp + { + eEnd = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END, + eUnmarkShortTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM, + eUnmarkLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM, + eMarkLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM, + eSetMaxLongTermIndex = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX, + eUnmarkAll = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL, + eMarkCurrentAsLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM, + eInvalid = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID + }; + + enum class H264CabacInitIdc + { + e0 = STD_VIDEO_H264_CABAC_INIT_IDC_0, + e1 = STD_VIDEO_H264_CABAC_INIT_IDC_1, + e2 = STD_VIDEO_H264_CABAC_INIT_IDC_2, + eInvalid = STD_VIDEO_H264_CABAC_INIT_IDC_INVALID + }; + + enum class H264DisableDeblockingFilterIdc + { + eDisabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED, + eEnabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED, + ePartial = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL, + eInvalid = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID + }; + + enum class H264SliceType + { + eP = STD_VIDEO_H264_SLICE_TYPE_P, + eB = STD_VIDEO_H264_SLICE_TYPE_B, + eI = STD_VIDEO_H264_SLICE_TYPE_I, + eInvalid = STD_VIDEO_H264_SLICE_TYPE_INVALID + }; + + enum class H264PictureType + { + eP = STD_VIDEO_H264_PICTURE_TYPE_P, + eB = STD_VIDEO_H264_PICTURE_TYPE_B, + eI = STD_VIDEO_H264_PICTURE_TYPE_I, + eIdr = STD_VIDEO_H264_PICTURE_TYPE_IDR, + eInvalid = STD_VIDEO_H264_PICTURE_TYPE_INVALID + }; + + enum class H264NonVclNaluType + { + eSps = STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS, + ePps = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS, + eAud = STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD, + ePrefix = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX, + eEndOfSequence = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE, + eEndOfStream = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM, + ePrecoded = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED, + eInvalid = STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID + }; + + //=== vulkan_video_codec_h264std_decode === + + enum class DecodeH264FieldOrderCount + { + eTop = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP, + eBottom = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM, + eInvalid = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID + }; + + //=== vulkan_video_codec_h265std === + + enum class H265ChromaFormatIdc + { + eMonochrome = STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME, + e420 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_420, + e422 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_422, + e444 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_444, + eInvalid = STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID + }; + + enum class H265ProfileIdc + { + eMain = STD_VIDEO_H265_PROFILE_IDC_MAIN, + eMain10 = STD_VIDEO_H265_PROFILE_IDC_MAIN_10, + eMainStillPicture = STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE, + eFormatRangeExtensions = STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS, + eSccExtensions = STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS, + eInvalid = STD_VIDEO_H265_PROFILE_IDC_INVALID + }; + + enum class H265LevelIdc + { + e1_0 = STD_VIDEO_H265_LEVEL_IDC_1_0, + e2_0 = STD_VIDEO_H265_LEVEL_IDC_2_0, + e2_1 = STD_VIDEO_H265_LEVEL_IDC_2_1, + e3_0 = STD_VIDEO_H265_LEVEL_IDC_3_0, + e3_1 = STD_VIDEO_H265_LEVEL_IDC_3_1, + e4_0 = STD_VIDEO_H265_LEVEL_IDC_4_0, + e4_1 = STD_VIDEO_H265_LEVEL_IDC_4_1, + e5_0 = STD_VIDEO_H265_LEVEL_IDC_5_0, + e5_1 = STD_VIDEO_H265_LEVEL_IDC_5_1, + e5_2 = STD_VIDEO_H265_LEVEL_IDC_5_2, + e6_0 = STD_VIDEO_H265_LEVEL_IDC_6_0, + e6_1 = STD_VIDEO_H265_LEVEL_IDC_6_1, + e6_2 = STD_VIDEO_H265_LEVEL_IDC_6_2, + eInvalid = STD_VIDEO_H265_LEVEL_IDC_INVALID + }; + + enum class H265SliceType + { + eB = STD_VIDEO_H265_SLICE_TYPE_B, + eP = STD_VIDEO_H265_SLICE_TYPE_P, + eI = STD_VIDEO_H265_SLICE_TYPE_I, + eInvalid = STD_VIDEO_H265_SLICE_TYPE_INVALID + }; + + enum class H265PictureType + { + eP = STD_VIDEO_H265_PICTURE_TYPE_P, + eB = STD_VIDEO_H265_PICTURE_TYPE_B, + eI = STD_VIDEO_H265_PICTURE_TYPE_I, + eIdr = STD_VIDEO_H265_PICTURE_TYPE_IDR, + eInvalid = STD_VIDEO_H265_PICTURE_TYPE_INVALID + }; + + enum class H265AspectRatioIdc + { + eUnspecified = STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED, + eSquare = STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE, + e12_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11, + e10_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11, + e16_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11, + e40_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33, + e24_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11, + e20_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11, + e32_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11, + e80_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33, + e18_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11, + e15_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11, + e64_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33, + e160_99 = STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99, + e4_3 = STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3, + e3_2 = STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2, + e2_1 = STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1, + eExtendedSar = STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR, + eInvalid = STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID + }; + + //=== vulkan_video_codec_av1std === + + enum class AV1Profile + { + eMain = STD_VIDEO_AV1_PROFILE_MAIN, + eHigh = STD_VIDEO_AV1_PROFILE_HIGH, + eProfessional = STD_VIDEO_AV1_PROFILE_PROFESSIONAL, + eInvalid = STD_VIDEO_AV1_PROFILE_INVALID + }; + + enum class AV1Level + { + e2_0 = STD_VIDEO_AV1_LEVEL_2_0, + e2_1 = STD_VIDEO_AV1_LEVEL_2_1, + e2_2 = STD_VIDEO_AV1_LEVEL_2_2, + e2_3 = STD_VIDEO_AV1_LEVEL_2_3, + e3_0 = STD_VIDEO_AV1_LEVEL_3_0, + e3_1 = STD_VIDEO_AV1_LEVEL_3_1, + e3_2 = STD_VIDEO_AV1_LEVEL_3_2, + e3_3 = STD_VIDEO_AV1_LEVEL_3_3, + e4_0 = STD_VIDEO_AV1_LEVEL_4_0, + e4_1 = STD_VIDEO_AV1_LEVEL_4_1, + e4_2 = STD_VIDEO_AV1_LEVEL_4_2, + e4_3 = STD_VIDEO_AV1_LEVEL_4_3, + e5_0 = STD_VIDEO_AV1_LEVEL_5_0, + e5_1 = STD_VIDEO_AV1_LEVEL_5_1, + e5_2 = STD_VIDEO_AV1_LEVEL_5_2, + e5_3 = STD_VIDEO_AV1_LEVEL_5_3, + e6_0 = STD_VIDEO_AV1_LEVEL_6_0, + e6_1 = STD_VIDEO_AV1_LEVEL_6_1, + e6_2 = STD_VIDEO_AV1_LEVEL_6_2, + e6_3 = STD_VIDEO_AV1_LEVEL_6_3, + e7_0 = STD_VIDEO_AV1_LEVEL_7_0, + e7_1 = STD_VIDEO_AV1_LEVEL_7_1, + e7_2 = STD_VIDEO_AV1_LEVEL_7_2, + e7_3 = STD_VIDEO_AV1_LEVEL_7_3, + eInvalid = STD_VIDEO_AV1_LEVEL_INVALID + }; + + enum class AV1FrameType + { + eKey = STD_VIDEO_AV1_FRAME_TYPE_KEY, + eInter = STD_VIDEO_AV1_FRAME_TYPE_INTER, + eIntraOnly = STD_VIDEO_AV1_FRAME_TYPE_INTRA_ONLY, + eSwitch = STD_VIDEO_AV1_FRAME_TYPE_SWITCH, + eInvalid = STD_VIDEO_AV1_FRAME_TYPE_INVALID + }; + + enum class AV1ReferenceName + { + eIntraFrame = STD_VIDEO_AV1_REFERENCE_NAME_INTRA_FRAME, + eLastFrame = STD_VIDEO_AV1_REFERENCE_NAME_LAST_FRAME, + eLast2Frame = STD_VIDEO_AV1_REFERENCE_NAME_LAST2_FRAME, + eLast3Frame = STD_VIDEO_AV1_REFERENCE_NAME_LAST3_FRAME, + eGoldenFrame = STD_VIDEO_AV1_REFERENCE_NAME_GOLDEN_FRAME, + eBwdrefFrame = STD_VIDEO_AV1_REFERENCE_NAME_BWDREF_FRAME, + eAltref2Frame = STD_VIDEO_AV1_REFERENCE_NAME_ALTREF2_FRAME, + eAltrefFrame = STD_VIDEO_AV1_REFERENCE_NAME_ALTREF_FRAME, + eInvalid = STD_VIDEO_AV1_REFERENCE_NAME_INVALID + }; + + enum class AV1InterpolationFilter + { + eEighttap = STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP, + eEighttapSmooth = STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SMOOTH, + eEighttapSharp = STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SHARP, + eBilinear = STD_VIDEO_AV1_INTERPOLATION_FILTER_BILINEAR, + eSwitchable = STD_VIDEO_AV1_INTERPOLATION_FILTER_SWITCHABLE, + eInvalid = STD_VIDEO_AV1_INTERPOLATION_FILTER_INVALID + }; + + enum class AV1TxMode + { + eOnly4X4 = STD_VIDEO_AV1_TX_MODE_ONLY_4X4, + eLargest = STD_VIDEO_AV1_TX_MODE_LARGEST, + eSelect = STD_VIDEO_AV1_TX_MODE_SELECT, + eInvalid = STD_VIDEO_AV1_TX_MODE_INVALID + }; + + enum class AV1FrameRestorationType + { + eNone = STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_NONE, + eWiener = STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_WIENER, + eSgrproj = STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SGRPROJ, + eSwitchable = STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SWITCHABLE, + eInvalid = STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_INVALID + }; + + enum class AV1ColorPrimaries + { + eBt709 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_709, + eUnspecified = STD_VIDEO_AV1_COLOR_PRIMARIES_UNSPECIFIED, + eBtUnspecified VULKAN_HPP_DEPRECATED_17( "eBtUnspecified is deprecated, eUnspecified should be used instead." ) = + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED, + eBt470M = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_M, + eBt470BG = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_B_G, + eBt601 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_601, + eSmpte240 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_240, + eGenericFilm = STD_VIDEO_AV1_COLOR_PRIMARIES_GENERIC_FILM, + eBt2020 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_2020, + eXyz = STD_VIDEO_AV1_COLOR_PRIMARIES_XYZ, + eSmpte431 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_431, + eSmpte432 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_432, + eEbu3213 = STD_VIDEO_AV1_COLOR_PRIMARIES_EBU_3213, + eInvalid = STD_VIDEO_AV1_COLOR_PRIMARIES_INVALID + }; + + enum class AV1TransferCharacteristics + { + eReserved0 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_0, + eBt709 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_709, + eUnspecified = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_UNSPECIFIED, + eReserved3 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_3, + eBt470M = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_M, + eBt470BG = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_B_G, + eBt601 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_601, + eSmpte240 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_240, + eLinear = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LINEAR, + eLog100 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100, + eLog100Sqrt10 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100_SQRT10, + eIec61966 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_IEC_61966, + eBt1361 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_1361, + eSrgb = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SRGB, + eBt2020_10Bit = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_10_BIT, + eBt2020_12Bit = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_12_BIT, + eSmpte2084 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_2084, + eSmpte428 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_428, + eHlg = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_HLG, + eInvalid = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_INVALID + }; + + enum class AV1MatrixCoefficients + { + eIdentity = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_IDENTITY, + eBt709 = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_709, + eUnspecified = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_UNSPECIFIED, + eReserved3 = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_RESERVED_3, + eFcc = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_FCC, + eBt470BG = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_470_B_G, + eBt601 = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_601, + eSmpte240 = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_240, + eSmpteYcgco = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_YCGCO, + eBt2020Ncl = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_NCL, + eBt2020Cl = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_CL, + eSmpte2085 = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_2085, + eChromatNcl = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_NCL, + eChromatCl = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_CL, + eIctcp = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_ICTCP, + eInvalid = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_INVALID + }; + + enum class AV1ChromaSamplePosition + { + eUnknown = STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_UNKNOWN, + eVertical = STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_VERTICAL, + eColocated = STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_COLOCATED, + eReserved = STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_RESERVED, + eInvalid = STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_INVALID + }; + + //=============== + //=== STRUCTS === + //=============== + + //=== vulkan_video_codec_h264std === + + struct H264SpsVuiFlags + { + using NativeType = StdVideoH264SpsVuiFlags; + + operator StdVideoH264SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SpsVuiFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SpsVuiFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH264SpsVuiFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && + ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && + ( video_full_range_flag == rhs.video_full_range_flag ) && ( color_description_present_flag == rhs.color_description_present_flag ) && + ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && ( timing_info_present_flag == rhs.timing_info_present_flag ) && + ( fixed_frame_rate_flag == rhs.fixed_frame_rate_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && + ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) && + ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ); + } + + bool operator!=( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t color_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t timing_info_present_flag : 1; + uint32_t fixed_frame_rate_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + }; + + struct H264HrdParameters + { + using NativeType = StdVideoH264HrdParameters; + + operator StdVideoH264HrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264HrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264HrdParameters const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH264HrdParameters *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && ( cpb_size_scale == rhs.cpb_size_scale ) && + ( reserved1 == rhs.reserved1 ) && ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && + ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && ( cbr_flag == rhs.cbr_flag ) && + ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) && + ( cpb_removal_delay_length_minus1 == rhs.cpb_removal_delay_length_minus1 ) && + ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( time_offset_length == rhs.time_offset_length ); + } + + bool operator!=( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t cpb_cnt_minus1 = {}; + uint8_t bit_rate_scale = {}; + uint8_t cpb_size_scale = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cbr_flag = {}; + uint32_t initial_cpb_removal_delay_length_minus1 = {}; + uint32_t cpb_removal_delay_length_minus1 = {}; + uint32_t dpb_output_delay_length_minus1 = {}; + uint32_t time_offset_length = {}; + }; + + struct H264SequenceParameterSetVui + { + using NativeType = StdVideoH264SequenceParameterSetVui; + + operator StdVideoH264SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSetVui const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSetVui *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) && + ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coefficients == rhs.matrix_coefficients ) && + ( num_units_in_tick == rhs.num_units_in_tick ) && ( time_scale == rhs.time_scale ) && ( max_num_reorder_frames == rhs.max_num_reorder_frames ) && + ( max_dec_frame_buffering == rhs.max_dec_frame_buffering ) && ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && + ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) && + ( pHrdParameters == rhs.pHrdParameters ); + } + + bool operator!=( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsVuiFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc aspect_ratio_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc::eUnspecified; + uint16_t sar_width = {}; + uint16_t sar_height = {}; + uint8_t video_format = {}; + uint8_t colour_primaries = {}; + uint8_t transfer_characteristics = {}; + uint8_t matrix_coefficients = {}; + uint32_t num_units_in_tick = {}; + uint32_t time_scale = {}; + uint8_t max_num_reorder_frames = {}; + uint8_t max_dec_frame_buffering = {}; + uint8_t chroma_sample_loc_type_top_field = {}; + uint8_t chroma_sample_loc_type_bottom_field = {}; + uint32_t reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264HrdParameters * pHrdParameters = {}; + }; + + struct H264SpsFlags + { + using NativeType = StdVideoH264SpsFlags; + + operator StdVideoH264SpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SpsFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH264SpsFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( constraint_set0_flag == rhs.constraint_set0_flag ) && ( constraint_set1_flag == rhs.constraint_set1_flag ) && + ( constraint_set2_flag == rhs.constraint_set2_flag ) && ( constraint_set3_flag == rhs.constraint_set3_flag ) && + ( constraint_set4_flag == rhs.constraint_set4_flag ) && ( constraint_set5_flag == rhs.constraint_set5_flag ) && + ( direct_8x8_inference_flag == rhs.direct_8x8_inference_flag ) && ( mb_adaptive_frame_field_flag == rhs.mb_adaptive_frame_field_flag ) && + ( frame_mbs_only_flag == rhs.frame_mbs_only_flag ) && ( delta_pic_order_always_zero_flag == rhs.delta_pic_order_always_zero_flag ) && + ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && + ( gaps_in_frame_num_value_allowed_flag == rhs.gaps_in_frame_num_value_allowed_flag ) && + ( qpprime_y_zero_transform_bypass_flag == rhs.qpprime_y_zero_transform_bypass_flag ) && ( frame_cropping_flag == rhs.frame_cropping_flag ) && + ( seq_scaling_matrix_present_flag == rhs.seq_scaling_matrix_present_flag ) && ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ); + } + + bool operator!=( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t constraint_set0_flag : 1; + uint32_t constraint_set1_flag : 1; + uint32_t constraint_set2_flag : 1; + uint32_t constraint_set3_flag : 1; + uint32_t constraint_set4_flag : 1; + uint32_t constraint_set5_flag : 1; + uint32_t direct_8x8_inference_flag : 1; + uint32_t mb_adaptive_frame_field_flag : 1; + uint32_t frame_mbs_only_flag : 1; + uint32_t delta_pic_order_always_zero_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t gaps_in_frame_num_value_allowed_flag : 1; + uint32_t qpprime_y_zero_transform_bypass_flag : 1; + uint32_t frame_cropping_flag : 1; + uint32_t seq_scaling_matrix_present_flag : 1; + uint32_t vui_parameters_present_flag : 1; + }; + + struct H264ScalingLists + { + using NativeType = StdVideoH264ScalingLists; + + operator StdVideoH264ScalingLists const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264ScalingLists &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264ScalingLists const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH264ScalingLists *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( scaling_list_present_mask == rhs.scaling_list_present_mask ) && ( use_default_scaling_matrix_mask == rhs.use_default_scaling_matrix_mask ) && + ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ); + } + + bool operator!=( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint16_t scaling_list_present_mask = {}; + uint16_t use_default_scaling_matrix_mask = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList4x4 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList8x8 = {}; + }; + + struct H264SequenceParameterSet + { + using NativeType = StdVideoH264SequenceParameterSet; + + operator StdVideoH264SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSet const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSet *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( profile_idc == rhs.profile_idc ) && ( level_idc == rhs.level_idc ) && + ( chroma_format_idc == rhs.chroma_format_idc ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && + ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && + ( log2_max_frame_num_minus4 == rhs.log2_max_frame_num_minus4 ) && ( pic_order_cnt_type == rhs.pic_order_cnt_type ) && + ( offset_for_non_ref_pic == rhs.offset_for_non_ref_pic ) && ( offset_for_top_to_bottom_field == rhs.offset_for_top_to_bottom_field ) && + ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) && + ( num_ref_frames_in_pic_order_cnt_cycle == rhs.num_ref_frames_in_pic_order_cnt_cycle ) && ( max_num_ref_frames == rhs.max_num_ref_frames ) && + ( reserved1 == rhs.reserved1 ) && ( pic_width_in_mbs_minus1 == rhs.pic_width_in_mbs_minus1 ) && + ( pic_height_in_map_units_minus1 == rhs.pic_height_in_map_units_minus1 ) && ( frame_crop_left_offset == rhs.frame_crop_left_offset ) && + ( frame_crop_right_offset == rhs.frame_crop_right_offset ) && ( frame_crop_top_offset == rhs.frame_crop_top_offset ) && + ( frame_crop_bottom_offset == rhs.frame_crop_bottom_offset ) && ( reserved2 == rhs.reserved2 ) && + ( pOffsetForRefFrame == rhs.pOffsetForRefFrame ) && ( pScalingLists == rhs.pScalingLists ) && + ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ); + } + + bool operator!=( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc profile_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc::eBaseline; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc::e1_0; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc chroma_format_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc::eMonochrome; + uint8_t seq_parameter_set_id = {}; + uint8_t bit_depth_luma_minus8 = {}; + uint8_t bit_depth_chroma_minus8 = {}; + uint8_t log2_max_frame_num_minus4 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType pic_order_cnt_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType::e0; + int32_t offset_for_non_ref_pic = {}; + int32_t offset_for_top_to_bottom_field = {}; + uint8_t log2_max_pic_order_cnt_lsb_minus4 = {}; + uint8_t num_ref_frames_in_pic_order_cnt_cycle = {}; + uint8_t max_num_ref_frames = {}; + uint8_t reserved1 = {}; + uint32_t pic_width_in_mbs_minus1 = {}; + uint32_t pic_height_in_map_units_minus1 = {}; + uint32_t frame_crop_left_offset = {}; + uint32_t frame_crop_right_offset = {}; + uint32_t frame_crop_top_offset = {}; + uint32_t frame_crop_bottom_offset = {}; + uint32_t reserved2 = {}; + const int32_t * pOffsetForRefFrame = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SequenceParameterSetVui * pSequenceParameterSetVui = {}; + }; + + struct H264PpsFlags + { + using NativeType = StdVideoH264PpsFlags; + + operator StdVideoH264PpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264PpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264PpsFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH264PpsFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( transform_8x8_mode_flag == rhs.transform_8x8_mode_flag ) && ( redundant_pic_cnt_present_flag == rhs.redundant_pic_cnt_present_flag ) && + ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && + ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && ( weighted_pred_flag == rhs.weighted_pred_flag ) && + ( bottom_field_pic_order_in_frame_present_flag == rhs.bottom_field_pic_order_in_frame_present_flag ) && + ( entropy_coding_mode_flag == rhs.entropy_coding_mode_flag ) && ( pic_scaling_matrix_present_flag == rhs.pic_scaling_matrix_present_flag ); + } + + bool operator!=( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t transform_8x8_mode_flag : 1; + uint32_t redundant_pic_cnt_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t bottom_field_pic_order_in_frame_present_flag : 1; + uint32_t entropy_coding_mode_flag : 1; + uint32_t pic_scaling_matrix_present_flag : 1; + }; + + struct H264PictureParameterSet + { + using NativeType = StdVideoH264PictureParameterSet; + + operator StdVideoH264PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264PictureParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264PictureParameterSet const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH264PictureParameterSet *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) && + ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( weighted_bipred_idc == rhs.weighted_bipred_idc ) && + ( pic_init_qp_minus26 == rhs.pic_init_qp_minus26 ) && ( pic_init_qs_minus26 == rhs.pic_init_qs_minus26 ) && + ( chroma_qp_index_offset == rhs.chroma_qp_index_offset ) && ( second_chroma_qp_index_offset == rhs.second_chroma_qp_index_offset ) && + ( pScalingLists == rhs.pScalingLists ); + } + + bool operator!=( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PpsFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint8_t num_ref_idx_l0_default_active_minus1 = {}; + uint8_t num_ref_idx_l1_default_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc weighted_bipred_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc::eDefault; + int8_t pic_init_qp_minus26 = {}; + int8_t pic_init_qs_minus26 = {}; + int8_t chroma_qp_index_offset = {}; + int8_t second_chroma_qp_index_offset = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists = {}; + }; + + //=== vulkan_video_codec_h264std_decode === + + struct DecodeH264PictureInfoFlags + { + using NativeType = StdVideoDecodeH264PictureInfoFlags; + + operator StdVideoDecodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( field_pic_flag == rhs.field_pic_flag ) && ( is_intra == rhs.is_intra ) && ( IdrPicFlag == rhs.IdrPicFlag ) && + ( bottom_field_flag == rhs.bottom_field_flag ) && ( is_reference == rhs.is_reference ) && + ( complementary_field_pair == rhs.complementary_field_pair ); + } + + bool operator!=( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t field_pic_flag : 1; + uint32_t is_intra : 1; + uint32_t IdrPicFlag : 1; + uint32_t bottom_field_flag : 1; + uint32_t is_reference : 1; + uint32_t complementary_field_pair : 1; + }; + + struct DecodeH264PictureInfo + { + using NativeType = StdVideoDecodeH264PictureInfo; + + operator StdVideoDecodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( frame_num == rhs.frame_num ) && ( idr_pic_id == rhs.idr_pic_id ) && + ( PicOrderCnt == rhs.PicOrderCnt ); + } + + bool operator!=( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264PictureInfoFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint16_t frame_num = {}; + uint16_t idr_pic_id = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D PicOrderCnt = {}; + }; + + struct DecodeH264ReferenceInfoFlags + { + using NativeType = StdVideoDecodeH264ReferenceInfoFlags; + + operator StdVideoDecodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( top_field_flag == rhs.top_field_flag ) && ( bottom_field_flag == rhs.bottom_field_flag ) && + ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( is_non_existing == rhs.is_non_existing ); + } + + bool operator!=( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t top_field_flag : 1; + uint32_t bottom_field_flag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t is_non_existing : 1; + }; + + struct DecodeH264ReferenceInfo + { + using NativeType = StdVideoDecodeH264ReferenceInfo; + + operator StdVideoDecodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( FrameNum == rhs.FrameNum ) && ( reserved == rhs.reserved ) && ( PicOrderCnt == rhs.PicOrderCnt ); + } + + bool operator!=( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264ReferenceInfoFlags flags = {}; + uint16_t FrameNum = {}; + uint16_t reserved = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D PicOrderCnt = {}; + }; + + //=== vulkan_video_codec_h264std_encode === + + struct EncodeH264WeightTableFlags + { + using NativeType = StdVideoEncodeH264WeightTableFlags; + + operator StdVideoEncodeH264WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTableFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTableFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTableFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && + ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); + } + + bool operator!=( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t luma_weight_l0_flag = {}; + uint32_t chroma_weight_l0_flag = {}; + uint32_t luma_weight_l1_flag = {}; + uint32_t chroma_weight_l1_flag = {}; + }; + + struct EncodeH264WeightTable + { + using NativeType = StdVideoEncodeH264WeightTable; + + operator StdVideoEncodeH264WeightTable const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTable &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTable const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTable *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && + ( chroma_log2_weight_denom == rhs.chroma_log2_weight_denom ) && ( luma_weight_l0 == rhs.luma_weight_l0 ) && + ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( chroma_weight_l0 == rhs.chroma_weight_l0 ) && ( chroma_offset_l0 == rhs.chroma_offset_l0 ) && + ( luma_weight_l1 == rhs.luma_weight_l1 ) && ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( chroma_weight_l1 == rhs.chroma_weight_l1 ) && + ( chroma_offset_l1 == rhs.chroma_offset_l1 ); + } + + bool operator!=( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTableFlags flags = {}; + uint8_t luma_log2_weight_denom = {}; + uint8_t chroma_log2_weight_denom = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_offset_l1 = {}; + }; + + struct EncodeH264SliceHeaderFlags + { + using NativeType = StdVideoEncodeH264SliceHeaderFlags; + + operator StdVideoEncodeH264SliceHeaderFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeaderFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeaderFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeaderFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( direct_spatial_mv_pred_flag == rhs.direct_spatial_mv_pred_flag ) && + ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t direct_spatial_mv_pred_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t reserved : 30; + }; + + struct EncodeH264PictureInfoFlags + { + using NativeType = StdVideoEncodeH264PictureInfoFlags; + + operator StdVideoEncodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( IdrPicFlag == rhs.IdrPicFlag ) && ( is_reference == rhs.is_reference ) && + ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && ( long_term_reference_flag == rhs.long_term_reference_flag ) && + ( adaptive_ref_pic_marking_mode_flag == rhs.adaptive_ref_pic_marking_mode_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t IdrPicFlag : 1; + uint32_t is_reference : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t long_term_reference_flag : 1; + uint32_t adaptive_ref_pic_marking_mode_flag : 1; + uint32_t reserved : 27; + }; + + struct EncodeH264ReferenceInfoFlags + { + using NativeType = StdVideoEncodeH264ReferenceInfoFlags; + + operator StdVideoEncodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t reserved : 31; + }; + + struct EncodeH264ReferenceListsInfoFlags + { + using NativeType = StdVideoEncodeH264ReferenceListsInfoFlags; + + operator StdVideoEncodeH264ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) && + ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; + }; + + struct EncodeH264RefListModEntry + { + using NativeType = StdVideoEncodeH264RefListModEntry; + + operator StdVideoEncodeH264RefListModEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefListModEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefListModEntry const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefListModEntry *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( modification_of_pic_nums_idc == rhs.modification_of_pic_nums_idc ) && ( abs_diff_pic_num_minus1 == rhs.abs_diff_pic_num_minus1 ) && + ( long_term_pic_num == rhs.long_term_pic_num ); + } + + bool operator!=( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc modification_of_pic_nums_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc::eShortTermSubtract; + uint16_t abs_diff_pic_num_minus1 = {}; + uint16_t long_term_pic_num = {}; + }; + + struct EncodeH264RefPicMarkingEntry + { + using NativeType = StdVideoEncodeH264RefPicMarkingEntry; + + operator StdVideoEncodeH264RefPicMarkingEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefPicMarkingEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefPicMarkingEntry const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefPicMarkingEntry *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memory_management_control_operation == rhs.memory_management_control_operation ) && + ( difference_of_pic_nums_minus1 == rhs.difference_of_pic_nums_minus1 ) && ( long_term_pic_num == rhs.long_term_pic_num ) && + ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( max_long_term_frame_idx_plus1 == rhs.max_long_term_frame_idx_plus1 ); + } + + bool operator!=( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp memory_management_control_operation = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp::eEnd; + uint16_t difference_of_pic_nums_minus1 = {}; + uint16_t long_term_pic_num = {}; + uint16_t long_term_frame_idx = {}; + uint16_t max_long_term_frame_idx_plus1 = {}; + }; + + struct EncodeH264ReferenceListsInfo + { + using NativeType = StdVideoEncodeH264ReferenceListsInfo; + + operator StdVideoEncodeH264ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && + ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) && + ( RefPicList1 == rhs.RefPicList1 ) && ( refList0ModOpCount == rhs.refList0ModOpCount ) && ( refList1ModOpCount == rhs.refList1ModOpCount ) && + ( refPicMarkingOpCount == rhs.refPicMarkingOpCount ) && ( reserved1 == rhs.reserved1 ) && + ( pRefList0ModOperations == rhs.pRefList0ModOperations ) && ( pRefList1ModOperations == rhs.pRefList1ModOperations ) && + ( pRefPicMarkingOperations == rhs.pRefPicMarkingOperations ); + } + + bool operator!=( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfoFlags flags = {}; + uint8_t num_ref_idx_l0_active_minus1 = {}; + uint8_t num_ref_idx_l1_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList1 = {}; + uint8_t refList0ModOpCount = {}; + uint8_t refList1ModOpCount = {}; + uint8_t refPicMarkingOpCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry * pRefList0ModOperations = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry * pRefList1ModOperations = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefPicMarkingEntry * pRefPicMarkingOperations = {}; + }; + + struct EncodeH264PictureInfo + { + using NativeType = StdVideoEncodeH264PictureInfo; + + operator StdVideoEncodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( idr_pic_id == rhs.idr_pic_id ) && ( primary_pic_type == rhs.primary_pic_type ) && ( frame_num == rhs.frame_num ) && + ( PicOrderCnt == rhs.PicOrderCnt ) && ( temporal_id == rhs.temporal_id ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ); + } + + bool operator!=( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264PictureInfoFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint16_t idr_pic_id = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType primary_pic_type = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP; + uint32_t frame_num = {}; + int32_t PicOrderCnt = {}; + uint8_t temporal_id = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfo * pRefLists = {}; + }; + + struct EncodeH264ReferenceInfo + { + using NativeType = StdVideoEncodeH264ReferenceInfo; + + operator StdVideoEncodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( primary_pic_type == rhs.primary_pic_type ) && ( FrameNum == rhs.FrameNum ) && ( PicOrderCnt == rhs.PicOrderCnt ) && + ( long_term_pic_num == rhs.long_term_pic_num ) && ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( temporal_id == rhs.temporal_id ); + } + + bool operator!=( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType primary_pic_type = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP; + uint32_t FrameNum = {}; + int32_t PicOrderCnt = {}; + uint16_t long_term_pic_num = {}; + uint16_t long_term_frame_idx = {}; + uint8_t temporal_id = {}; + }; + + struct EncodeH264SliceHeader + { + using NativeType = StdVideoEncodeH264SliceHeader; + + operator StdVideoEncodeH264SliceHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeader const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeader *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( first_mb_in_slice == rhs.first_mb_in_slice ) && ( slice_type == rhs.slice_type ) && + ( slice_alpha_c0_offset_div2 == rhs.slice_alpha_c0_offset_div2 ) && ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && + ( slice_qp_delta == rhs.slice_qp_delta ) && ( reserved1 == rhs.reserved1 ) && ( cabac_init_idc == rhs.cabac_init_idc ) && + ( disable_deblocking_filter_idc == rhs.disable_deblocking_filter_idc ) && ( pWeightTable == rhs.pWeightTable ); + } + + bool operator!=( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264SliceHeaderFlags flags = {}; + uint32_t first_mb_in_slice = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType::eP; + int8_t slice_alpha_c0_offset_div2 = {}; + int8_t slice_beta_offset_div2 = {}; + int8_t slice_qp_delta = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc cabac_init_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc::e0; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc disable_deblocking_filter_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc::eDisabled; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTable * pWeightTable = {}; + }; + + //=== vulkan_video_codec_h265std === + + struct H265DecPicBufMgr + { + using NativeType = StdVideoH265DecPicBufMgr; + + operator StdVideoH265DecPicBufMgr const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265DecPicBufMgr &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265DecPicBufMgr const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265DecPicBufMgr *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( max_latency_increase_plus1 == rhs.max_latency_increase_plus1 ) && ( max_dec_pic_buffering_minus1 == rhs.max_dec_pic_buffering_minus1 ) && + ( max_num_reorder_pics == rhs.max_num_reorder_pics ); + } + + bool operator!=( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_latency_increase_plus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_dec_pic_buffering_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_num_reorder_pics = {}; + }; + + struct H265SubLayerHrdParameters + { + using NativeType = StdVideoH265SubLayerHrdParameters; + + operator StdVideoH265SubLayerHrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SubLayerHrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SubLayerHrdParameters const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265SubLayerHrdParameters *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && + ( cpb_size_du_value_minus1 == rhs.cpb_size_du_value_minus1 ) && ( bit_rate_du_value_minus1 == rhs.bit_rate_du_value_minus1 ) && + ( cbr_flag == rhs.cbr_flag ); + } + + bool operator!=( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_du_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_du_value_minus1 = {}; + uint32_t cbr_flag = {}; + }; + + struct H265HrdFlags + { + using NativeType = StdVideoH265HrdFlags; + + operator StdVideoH265HrdFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265HrdFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265HrdFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265HrdFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) && + ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ) && + ( sub_pic_hrd_params_present_flag == rhs.sub_pic_hrd_params_present_flag ) && + ( sub_pic_cpb_params_in_pic_timing_sei_flag == rhs.sub_pic_cpb_params_in_pic_timing_sei_flag ) && + ( fixed_pic_rate_general_flag == rhs.fixed_pic_rate_general_flag ) && ( fixed_pic_rate_within_cvs_flag == rhs.fixed_pic_rate_within_cvs_flag ) && + ( low_delay_hrd_flag == rhs.low_delay_hrd_flag ); + } + + bool operator!=( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + uint32_t sub_pic_hrd_params_present_flag : 1; + uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1; + uint32_t fixed_pic_rate_general_flag : 8; + uint32_t fixed_pic_rate_within_cvs_flag : 8; + uint32_t low_delay_hrd_flag : 8; + }; + + struct H265HrdParameters + { + using NativeType = StdVideoH265HrdParameters; + + operator StdVideoH265HrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265HrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265HrdParameters const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265HrdParameters *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( tick_divisor_minus2 == rhs.tick_divisor_minus2 ) && + ( du_cpb_removal_delay_increment_length_minus1 == rhs.du_cpb_removal_delay_increment_length_minus1 ) && + ( dpb_output_delay_du_length_minus1 == rhs.dpb_output_delay_du_length_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && + ( cpb_size_scale == rhs.cpb_size_scale ) && ( cpb_size_du_scale == rhs.cpb_size_du_scale ) && + ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) && + ( au_cpb_removal_delay_length_minus1 == rhs.au_cpb_removal_delay_length_minus1 ) && + ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && + ( elemental_duration_in_tc_minus1 == rhs.elemental_duration_in_tc_minus1 ) && ( reserved == rhs.reserved ) && + ( pSubLayerHrdParametersNal == rhs.pSubLayerHrdParametersNal ) && ( pSubLayerHrdParametersVcl == rhs.pSubLayerHrdParametersVcl ); + } + + bool operator!=( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdFlags flags = {}; + uint8_t tick_divisor_minus2 = {}; + uint8_t du_cpb_removal_delay_increment_length_minus1 = {}; + uint8_t dpb_output_delay_du_length_minus1 = {}; + uint8_t bit_rate_scale = {}; + uint8_t cpb_size_scale = {}; + uint8_t cpb_size_du_scale = {}; + uint8_t initial_cpb_removal_delay_length_minus1 = {}; + uint8_t au_cpb_removal_delay_length_minus1 = {}; + uint8_t dpb_output_delay_length_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_cnt_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D elemental_duration_in_tc_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersNal = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersVcl = {}; + }; + + struct H265VpsFlags + { + using NativeType = StdVideoH265VpsFlags; + + operator StdVideoH265VpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265VpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265VpsFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265VpsFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( vps_temporal_id_nesting_flag == rhs.vps_temporal_id_nesting_flag ) && + ( vps_sub_layer_ordering_info_present_flag == rhs.vps_sub_layer_ordering_info_present_flag ) && + ( vps_timing_info_present_flag == rhs.vps_timing_info_present_flag ) && + ( vps_poc_proportional_to_timing_flag == rhs.vps_poc_proportional_to_timing_flag ); + } + + bool operator!=( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t vps_temporal_id_nesting_flag : 1; + uint32_t vps_sub_layer_ordering_info_present_flag : 1; + uint32_t vps_timing_info_present_flag : 1; + uint32_t vps_poc_proportional_to_timing_flag : 1; + }; + + struct H265ProfileTierLevelFlags + { + using NativeType = StdVideoH265ProfileTierLevelFlags; + + operator StdVideoH265ProfileTierLevelFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevelFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevelFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevelFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( general_tier_flag == rhs.general_tier_flag ) && ( general_progressive_source_flag == rhs.general_progressive_source_flag ) && + ( general_interlaced_source_flag == rhs.general_interlaced_source_flag ) && + ( general_non_packed_constraint_flag == rhs.general_non_packed_constraint_flag ) && + ( general_frame_only_constraint_flag == rhs.general_frame_only_constraint_flag ); + } + + bool operator!=( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t general_tier_flag : 1; + uint32_t general_progressive_source_flag : 1; + uint32_t general_interlaced_source_flag : 1; + uint32_t general_non_packed_constraint_flag : 1; + uint32_t general_frame_only_constraint_flag : 1; + }; + + struct H265ProfileTierLevel + { + using NativeType = StdVideoH265ProfileTierLevel; + + operator StdVideoH265ProfileTierLevel const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevel &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevel const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevel *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( general_profile_idc == rhs.general_profile_idc ) && ( general_level_idc == rhs.general_level_idc ); + } + + bool operator!=( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevelFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc general_profile_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc::eMain; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc general_level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc::e1_0; + }; + + struct H265VideoParameterSet + { + using NativeType = StdVideoH265VideoParameterSet; + + operator StdVideoH265VideoParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265VideoParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265VideoParameterSet const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265VideoParameterSet *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( vps_video_parameter_set_id == rhs.vps_video_parameter_set_id ) && + ( vps_max_sub_layers_minus1 == rhs.vps_max_sub_layers_minus1 ) && ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && + ( vps_num_units_in_tick == rhs.vps_num_units_in_tick ) && ( vps_time_scale == rhs.vps_time_scale ) && + ( vps_num_ticks_poc_diff_one_minus1 == rhs.vps_num_ticks_poc_diff_one_minus1 ) && ( reserved3 == rhs.reserved3 ) && + ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pHrdParameters == rhs.pHrdParameters ) && ( pProfileTierLevel == rhs.pProfileTierLevel ); + } + + bool operator!=( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265VpsFlags flags = {}; + uint8_t vps_video_parameter_set_id = {}; + uint8_t vps_max_sub_layers_minus1 = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint32_t vps_num_units_in_tick = {}; + uint32_t vps_time_scale = {}; + uint32_t vps_num_ticks_poc_diff_one_minus1 = {}; + uint32_t reserved3 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr * pDecPicBufMgr = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel = {}; + }; + + struct H265ScalingLists + { + using NativeType = StdVideoH265ScalingLists; + + operator StdVideoH265ScalingLists const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ScalingLists &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ScalingLists const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265ScalingLists *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ) && ( ScalingList16x16 == rhs.ScalingList16x16 ) && + ( ScalingList32x32 == rhs.ScalingList32x32 ) && ( ScalingListDCCoef16x16 == rhs.ScalingListDCCoef16x16 ) && + ( ScalingListDCCoef32x32 == rhs.ScalingListDCCoef32x32 ); + } + + bool operator!=( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList4x4 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList8x8 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList16x16 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList32x32 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ScalingListDCCoef16x16 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ScalingListDCCoef32x32 = {}; + }; + + struct H265SpsVuiFlags + { + using NativeType = StdVideoH265SpsVuiFlags; + + operator StdVideoH265SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SpsVuiFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SpsVuiFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265SpsVuiFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && + ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && + ( video_full_range_flag == rhs.video_full_range_flag ) && ( colour_description_present_flag == rhs.colour_description_present_flag ) && + ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && + ( neutral_chroma_indication_flag == rhs.neutral_chroma_indication_flag ) && ( field_seq_flag == rhs.field_seq_flag ) && + ( frame_field_info_present_flag == rhs.frame_field_info_present_flag ) && ( default_display_window_flag == rhs.default_display_window_flag ) && + ( vui_timing_info_present_flag == rhs.vui_timing_info_present_flag ) && + ( vui_poc_proportional_to_timing_flag == rhs.vui_poc_proportional_to_timing_flag ) && + ( vui_hrd_parameters_present_flag == rhs.vui_hrd_parameters_present_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && + ( tiles_fixed_structure_flag == rhs.tiles_fixed_structure_flag ) && + ( motion_vectors_over_pic_boundaries_flag == rhs.motion_vectors_over_pic_boundaries_flag ) && + ( restricted_ref_pic_lists_flag == rhs.restricted_ref_pic_lists_flag ); + } + + bool operator!=( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t colour_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t neutral_chroma_indication_flag : 1; + uint32_t field_seq_flag : 1; + uint32_t frame_field_info_present_flag : 1; + uint32_t default_display_window_flag : 1; + uint32_t vui_timing_info_present_flag : 1; + uint32_t vui_poc_proportional_to_timing_flag : 1; + uint32_t vui_hrd_parameters_present_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t tiles_fixed_structure_flag : 1; + uint32_t motion_vectors_over_pic_boundaries_flag : 1; + uint32_t restricted_ref_pic_lists_flag : 1; + }; + + struct H265SequenceParameterSetVui + { + using NativeType = StdVideoH265SequenceParameterSetVui; + + operator StdVideoH265SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSetVui const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSetVui *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) && + ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coeffs == rhs.matrix_coeffs ) && + ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && + ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && ( def_disp_win_left_offset == rhs.def_disp_win_left_offset ) && + ( def_disp_win_right_offset == rhs.def_disp_win_right_offset ) && ( def_disp_win_top_offset == rhs.def_disp_win_top_offset ) && + ( def_disp_win_bottom_offset == rhs.def_disp_win_bottom_offset ) && ( vui_num_units_in_tick == rhs.vui_num_units_in_tick ) && + ( vui_time_scale == rhs.vui_time_scale ) && ( vui_num_ticks_poc_diff_one_minus1 == rhs.vui_num_ticks_poc_diff_one_minus1 ) && + ( min_spatial_segmentation_idc == rhs.min_spatial_segmentation_idc ) && ( reserved3 == rhs.reserved3 ) && + ( max_bytes_per_pic_denom == rhs.max_bytes_per_pic_denom ) && ( max_bits_per_min_cu_denom == rhs.max_bits_per_min_cu_denom ) && + ( log2_max_mv_length_horizontal == rhs.log2_max_mv_length_horizontal ) && ( log2_max_mv_length_vertical == rhs.log2_max_mv_length_vertical ) && + ( pHrdParameters == rhs.pHrdParameters ); + } + + bool operator!=( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsVuiFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc aspect_ratio_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc::eUnspecified; + uint16_t sar_width = {}; + uint16_t sar_height = {}; + uint8_t video_format = {}; + uint8_t colour_primaries = {}; + uint8_t transfer_characteristics = {}; + uint8_t matrix_coeffs = {}; + uint8_t chroma_sample_loc_type_top_field = {}; + uint8_t chroma_sample_loc_type_bottom_field = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint16_t def_disp_win_left_offset = {}; + uint16_t def_disp_win_right_offset = {}; + uint16_t def_disp_win_top_offset = {}; + uint16_t def_disp_win_bottom_offset = {}; + uint32_t vui_num_units_in_tick = {}; + uint32_t vui_time_scale = {}; + uint32_t vui_num_ticks_poc_diff_one_minus1 = {}; + uint16_t min_spatial_segmentation_idc = {}; + uint16_t reserved3 = {}; + uint8_t max_bytes_per_pic_denom = {}; + uint8_t max_bits_per_min_cu_denom = {}; + uint8_t log2_max_mv_length_horizontal = {}; + uint8_t log2_max_mv_length_vertical = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters = {}; + }; + + struct H265PredictorPaletteEntries + { + using NativeType = StdVideoH265PredictorPaletteEntries; + + operator StdVideoH265PredictorPaletteEntries const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PredictorPaletteEntries &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PredictorPaletteEntries const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265PredictorPaletteEntries *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( PredictorPaletteEntries == rhs.PredictorPaletteEntries ); + } + + bool operator!=( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE:: + ArrayWrapper2D + PredictorPaletteEntries = {}; + }; + + struct H265SpsFlags + { + using NativeType = StdVideoH265SpsFlags; + + operator StdVideoH265SpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SpsFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265SpsFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sps_temporal_id_nesting_flag == rhs.sps_temporal_id_nesting_flag ) && ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && + ( conformance_window_flag == rhs.conformance_window_flag ) && + ( sps_sub_layer_ordering_info_present_flag == rhs.sps_sub_layer_ordering_info_present_flag ) && + ( scaling_list_enabled_flag == rhs.scaling_list_enabled_flag ) && + ( sps_scaling_list_data_present_flag == rhs.sps_scaling_list_data_present_flag ) && ( amp_enabled_flag == rhs.amp_enabled_flag ) && + ( sample_adaptive_offset_enabled_flag == rhs.sample_adaptive_offset_enabled_flag ) && ( pcm_enabled_flag == rhs.pcm_enabled_flag ) && + ( pcm_loop_filter_disabled_flag == rhs.pcm_loop_filter_disabled_flag ) && + ( long_term_ref_pics_present_flag == rhs.long_term_ref_pics_present_flag ) && + ( sps_temporal_mvp_enabled_flag == rhs.sps_temporal_mvp_enabled_flag ) && + ( strong_intra_smoothing_enabled_flag == rhs.strong_intra_smoothing_enabled_flag ) && + ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ) && ( sps_extension_present_flag == rhs.sps_extension_present_flag ) && + ( sps_range_extension_flag == rhs.sps_range_extension_flag ) && + ( transform_skip_rotation_enabled_flag == rhs.transform_skip_rotation_enabled_flag ) && + ( transform_skip_context_enabled_flag == rhs.transform_skip_context_enabled_flag ) && + ( implicit_rdpcm_enabled_flag == rhs.implicit_rdpcm_enabled_flag ) && ( explicit_rdpcm_enabled_flag == rhs.explicit_rdpcm_enabled_flag ) && + ( extended_precision_processing_flag == rhs.extended_precision_processing_flag ) && + ( intra_smoothing_disabled_flag == rhs.intra_smoothing_disabled_flag ) && + ( high_precision_offsets_enabled_flag == rhs.high_precision_offsets_enabled_flag ) && + ( persistent_rice_adaptation_enabled_flag == rhs.persistent_rice_adaptation_enabled_flag ) && + ( cabac_bypass_alignment_enabled_flag == rhs.cabac_bypass_alignment_enabled_flag ) && ( sps_scc_extension_flag == rhs.sps_scc_extension_flag ) && + ( sps_curr_pic_ref_enabled_flag == rhs.sps_curr_pic_ref_enabled_flag ) && ( palette_mode_enabled_flag == rhs.palette_mode_enabled_flag ) && + ( sps_palette_predictor_initializers_present_flag == rhs.sps_palette_predictor_initializers_present_flag ) && + ( intra_boundary_filtering_disabled_flag == rhs.intra_boundary_filtering_disabled_flag ); + } + + bool operator!=( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t sps_temporal_id_nesting_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t conformance_window_flag : 1; + uint32_t sps_sub_layer_ordering_info_present_flag : 1; + uint32_t scaling_list_enabled_flag : 1; + uint32_t sps_scaling_list_data_present_flag : 1; + uint32_t amp_enabled_flag : 1; + uint32_t sample_adaptive_offset_enabled_flag : 1; + uint32_t pcm_enabled_flag : 1; + uint32_t pcm_loop_filter_disabled_flag : 1; + uint32_t long_term_ref_pics_present_flag : 1; + uint32_t sps_temporal_mvp_enabled_flag : 1; + uint32_t strong_intra_smoothing_enabled_flag : 1; + uint32_t vui_parameters_present_flag : 1; + uint32_t sps_extension_present_flag : 1; + uint32_t sps_range_extension_flag : 1; + uint32_t transform_skip_rotation_enabled_flag : 1; + uint32_t transform_skip_context_enabled_flag : 1; + uint32_t implicit_rdpcm_enabled_flag : 1; + uint32_t explicit_rdpcm_enabled_flag : 1; + uint32_t extended_precision_processing_flag : 1; + uint32_t intra_smoothing_disabled_flag : 1; + uint32_t high_precision_offsets_enabled_flag : 1; + uint32_t persistent_rice_adaptation_enabled_flag : 1; + uint32_t cabac_bypass_alignment_enabled_flag : 1; + uint32_t sps_scc_extension_flag : 1; + uint32_t sps_curr_pic_ref_enabled_flag : 1; + uint32_t palette_mode_enabled_flag : 1; + uint32_t sps_palette_predictor_initializers_present_flag : 1; + uint32_t intra_boundary_filtering_disabled_flag : 1; + }; + + struct H265ShortTermRefPicSetFlags + { + using NativeType = StdVideoH265ShortTermRefPicSetFlags; + + operator StdVideoH265ShortTermRefPicSetFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSetFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSetFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSetFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( inter_ref_pic_set_prediction_flag == rhs.inter_ref_pic_set_prediction_flag ) && ( delta_rps_sign == rhs.delta_rps_sign ); + } + + bool operator!=( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t inter_ref_pic_set_prediction_flag : 1; + uint32_t delta_rps_sign : 1; + }; + + struct H265ShortTermRefPicSet + { + using NativeType = StdVideoH265ShortTermRefPicSet; + + operator StdVideoH265ShortTermRefPicSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSet const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSet *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( delta_idx_minus1 == rhs.delta_idx_minus1 ) && ( use_delta_flag == rhs.use_delta_flag ) && + ( abs_delta_rps_minus1 == rhs.abs_delta_rps_minus1 ) && ( used_by_curr_pic_flag == rhs.used_by_curr_pic_flag ) && + ( used_by_curr_pic_s0_flag == rhs.used_by_curr_pic_s0_flag ) && ( used_by_curr_pic_s1_flag == rhs.used_by_curr_pic_s1_flag ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( reserved3 == rhs.reserved3 ) && + ( num_negative_pics == rhs.num_negative_pics ) && ( num_positive_pics == rhs.num_positive_pics ) && + ( delta_poc_s0_minus1 == rhs.delta_poc_s0_minus1 ) && ( delta_poc_s1_minus1 == rhs.delta_poc_s1_minus1 ); + } + + bool operator!=( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSetFlags flags = {}; + uint32_t delta_idx_minus1 = {}; + uint16_t use_delta_flag = {}; + uint16_t abs_delta_rps_minus1 = {}; + uint16_t used_by_curr_pic_flag = {}; + uint16_t used_by_curr_pic_s0_flag = {}; + uint16_t used_by_curr_pic_s1_flag = {}; + uint16_t reserved1 = {}; + uint8_t reserved2 = {}; + uint8_t reserved3 = {}; + uint8_t num_negative_pics = {}; + uint8_t num_positive_pics = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_s0_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_s1_minus1 = {}; + }; + + struct H265LongTermRefPicsSps + { + using NativeType = StdVideoH265LongTermRefPicsSps; + + operator StdVideoH265LongTermRefPicsSps const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265LongTermRefPicsSps &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265LongTermRefPicsSps const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265LongTermRefPicsSps *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_by_curr_pic_lt_sps_flag == rhs.used_by_curr_pic_lt_sps_flag ) && ( lt_ref_pic_poc_lsb_sps == rhs.lt_ref_pic_poc_lsb_sps ); + } + + bool operator!=( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_by_curr_pic_lt_sps_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lt_ref_pic_poc_lsb_sps = {}; + }; + + struct H265SequenceParameterSet + { + using NativeType = StdVideoH265SequenceParameterSet; + + operator StdVideoH265SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSet const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSet *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( chroma_format_idc == rhs.chroma_format_idc ) && ( pic_width_in_luma_samples == rhs.pic_width_in_luma_samples ) && + ( pic_height_in_luma_samples == rhs.pic_height_in_luma_samples ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( sps_max_sub_layers_minus1 == rhs.sps_max_sub_layers_minus1 ) && ( sps_seq_parameter_set_id == rhs.sps_seq_parameter_set_id ) && + ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && + ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) && + ( log2_min_luma_coding_block_size_minus3 == rhs.log2_min_luma_coding_block_size_minus3 ) && + ( log2_diff_max_min_luma_coding_block_size == rhs.log2_diff_max_min_luma_coding_block_size ) && + ( log2_min_luma_transform_block_size_minus2 == rhs.log2_min_luma_transform_block_size_minus2 ) && + ( log2_diff_max_min_luma_transform_block_size == rhs.log2_diff_max_min_luma_transform_block_size ) && + ( max_transform_hierarchy_depth_inter == rhs.max_transform_hierarchy_depth_inter ) && + ( max_transform_hierarchy_depth_intra == rhs.max_transform_hierarchy_depth_intra ) && + ( num_short_term_ref_pic_sets == rhs.num_short_term_ref_pic_sets ) && ( num_long_term_ref_pics_sps == rhs.num_long_term_ref_pics_sps ) && + ( pcm_sample_bit_depth_luma_minus1 == rhs.pcm_sample_bit_depth_luma_minus1 ) && + ( pcm_sample_bit_depth_chroma_minus1 == rhs.pcm_sample_bit_depth_chroma_minus1 ) && + ( log2_min_pcm_luma_coding_block_size_minus3 == rhs.log2_min_pcm_luma_coding_block_size_minus3 ) && + ( log2_diff_max_min_pcm_luma_coding_block_size == rhs.log2_diff_max_min_pcm_luma_coding_block_size ) && ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && ( palette_max_size == rhs.palette_max_size ) && + ( delta_palette_max_predictor_size == rhs.delta_palette_max_predictor_size ) && + ( motion_vector_resolution_control_idc == rhs.motion_vector_resolution_control_idc ) && + ( sps_num_palette_predictor_initializers_minus1 == rhs.sps_num_palette_predictor_initializers_minus1 ) && + ( conf_win_left_offset == rhs.conf_win_left_offset ) && ( conf_win_right_offset == rhs.conf_win_right_offset ) && + ( conf_win_top_offset == rhs.conf_win_top_offset ) && ( conf_win_bottom_offset == rhs.conf_win_bottom_offset ) && + ( pProfileTierLevel == rhs.pProfileTierLevel ) && ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pScalingLists == rhs.pScalingLists ) && + ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPicsSps == rhs.pLongTermRefPicsSps ) && + ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ) && ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); + } + + bool operator!=( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc chroma_format_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc::eMonochrome; + uint32_t pic_width_in_luma_samples = {}; + uint32_t pic_height_in_luma_samples = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t sps_max_sub_layers_minus1 = {}; + uint8_t sps_seq_parameter_set_id = {}; + uint8_t bit_depth_luma_minus8 = {}; + uint8_t bit_depth_chroma_minus8 = {}; + uint8_t log2_max_pic_order_cnt_lsb_minus4 = {}; + uint8_t log2_min_luma_coding_block_size_minus3 = {}; + uint8_t log2_diff_max_min_luma_coding_block_size = {}; + uint8_t log2_min_luma_transform_block_size_minus2 = {}; + uint8_t log2_diff_max_min_luma_transform_block_size = {}; + uint8_t max_transform_hierarchy_depth_inter = {}; + uint8_t max_transform_hierarchy_depth_intra = {}; + uint8_t num_short_term_ref_pic_sets = {}; + uint8_t num_long_term_ref_pics_sps = {}; + uint8_t pcm_sample_bit_depth_luma_minus1 = {}; + uint8_t pcm_sample_bit_depth_chroma_minus1 = {}; + uint8_t log2_min_pcm_luma_coding_block_size_minus3 = {}; + uint8_t log2_diff_max_min_pcm_luma_coding_block_size = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint8_t palette_max_size = {}; + uint8_t delta_palette_max_predictor_size = {}; + uint8_t motion_vector_resolution_control_idc = {}; + uint8_t sps_num_palette_predictor_initializers_minus1 = {}; + uint32_t conf_win_left_offset = {}; + uint32_t conf_win_right_offset = {}; + uint32_t conf_win_top_offset = {}; + uint32_t conf_win_bottom_offset = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr * pDecPicBufMgr = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet * pShortTermRefPicSet = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LongTermRefPicsSps * pLongTermRefPicsSps = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SequenceParameterSetVui * pSequenceParameterSetVui = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries = {}; + }; + + struct H265PpsFlags + { + using NativeType = StdVideoH265PpsFlags; + + operator StdVideoH265PpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PpsFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265PpsFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( dependent_slice_segments_enabled_flag == rhs.dependent_slice_segments_enabled_flag ) && + ( output_flag_present_flag == rhs.output_flag_present_flag ) && ( sign_data_hiding_enabled_flag == rhs.sign_data_hiding_enabled_flag ) && + ( cabac_init_present_flag == rhs.cabac_init_present_flag ) && ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && + ( transform_skip_enabled_flag == rhs.transform_skip_enabled_flag ) && ( cu_qp_delta_enabled_flag == rhs.cu_qp_delta_enabled_flag ) && + ( pps_slice_chroma_qp_offsets_present_flag == rhs.pps_slice_chroma_qp_offsets_present_flag ) && + ( weighted_pred_flag == rhs.weighted_pred_flag ) && ( weighted_bipred_flag == rhs.weighted_bipred_flag ) && + ( transquant_bypass_enabled_flag == rhs.transquant_bypass_enabled_flag ) && ( tiles_enabled_flag == rhs.tiles_enabled_flag ) && + ( entropy_coding_sync_enabled_flag == rhs.entropy_coding_sync_enabled_flag ) && ( uniform_spacing_flag == rhs.uniform_spacing_flag ) && + ( loop_filter_across_tiles_enabled_flag == rhs.loop_filter_across_tiles_enabled_flag ) && + ( pps_loop_filter_across_slices_enabled_flag == rhs.pps_loop_filter_across_slices_enabled_flag ) && + ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && + ( deblocking_filter_override_enabled_flag == rhs.deblocking_filter_override_enabled_flag ) && + ( pps_deblocking_filter_disabled_flag == rhs.pps_deblocking_filter_disabled_flag ) && + ( pps_scaling_list_data_present_flag == rhs.pps_scaling_list_data_present_flag ) && + ( lists_modification_present_flag == rhs.lists_modification_present_flag ) && + ( slice_segment_header_extension_present_flag == rhs.slice_segment_header_extension_present_flag ) && + ( pps_extension_present_flag == rhs.pps_extension_present_flag ) && + ( cross_component_prediction_enabled_flag == rhs.cross_component_prediction_enabled_flag ) && + ( chroma_qp_offset_list_enabled_flag == rhs.chroma_qp_offset_list_enabled_flag ) && + ( pps_curr_pic_ref_enabled_flag == rhs.pps_curr_pic_ref_enabled_flag ) && + ( residual_adaptive_colour_transform_enabled_flag == rhs.residual_adaptive_colour_transform_enabled_flag ) && + ( pps_slice_act_qp_offsets_present_flag == rhs.pps_slice_act_qp_offsets_present_flag ) && + ( pps_palette_predictor_initializers_present_flag == rhs.pps_palette_predictor_initializers_present_flag ) && + ( monochrome_palette_flag == rhs.monochrome_palette_flag ) && ( pps_range_extension_flag == rhs.pps_range_extension_flag ); + } + + bool operator!=( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t dependent_slice_segments_enabled_flag : 1; + uint32_t output_flag_present_flag : 1; + uint32_t sign_data_hiding_enabled_flag : 1; + uint32_t cabac_init_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t transform_skip_enabled_flag : 1; + uint32_t cu_qp_delta_enabled_flag : 1; + uint32_t pps_slice_chroma_qp_offsets_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t weighted_bipred_flag : 1; + uint32_t transquant_bypass_enabled_flag : 1; + uint32_t tiles_enabled_flag : 1; + uint32_t entropy_coding_sync_enabled_flag : 1; + uint32_t uniform_spacing_flag : 1; + uint32_t loop_filter_across_tiles_enabled_flag : 1; + uint32_t pps_loop_filter_across_slices_enabled_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t deblocking_filter_override_enabled_flag : 1; + uint32_t pps_deblocking_filter_disabled_flag : 1; + uint32_t pps_scaling_list_data_present_flag : 1; + uint32_t lists_modification_present_flag : 1; + uint32_t slice_segment_header_extension_present_flag : 1; + uint32_t pps_extension_present_flag : 1; + uint32_t cross_component_prediction_enabled_flag : 1; + uint32_t chroma_qp_offset_list_enabled_flag : 1; + uint32_t pps_curr_pic_ref_enabled_flag : 1; + uint32_t residual_adaptive_colour_transform_enabled_flag : 1; + uint32_t pps_slice_act_qp_offsets_present_flag : 1; + uint32_t pps_palette_predictor_initializers_present_flag : 1; + uint32_t monochrome_palette_flag : 1; + uint32_t pps_range_extension_flag : 1; + }; + + struct H265PictureParameterSet + { + using NativeType = StdVideoH265PictureParameterSet; + + operator StdVideoH265PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PictureParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PictureParameterSet const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoH265PictureParameterSet *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( num_extra_slice_header_bits == rhs.num_extra_slice_header_bits ) && + ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) && + ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( init_qp_minus26 == rhs.init_qp_minus26 ) && + ( diff_cu_qp_delta_depth == rhs.diff_cu_qp_delta_depth ) && ( pps_cb_qp_offset == rhs.pps_cb_qp_offset ) && + ( pps_cr_qp_offset == rhs.pps_cr_qp_offset ) && ( pps_beta_offset_div2 == rhs.pps_beta_offset_div2 ) && + ( pps_tc_offset_div2 == rhs.pps_tc_offset_div2 ) && ( log2_parallel_merge_level_minus2 == rhs.log2_parallel_merge_level_minus2 ) && + ( log2_max_transform_skip_block_size_minus2 == rhs.log2_max_transform_skip_block_size_minus2 ) && + ( diff_cu_chroma_qp_offset_depth == rhs.diff_cu_chroma_qp_offset_depth ) && + ( chroma_qp_offset_list_len_minus1 == rhs.chroma_qp_offset_list_len_minus1 ) && ( cb_qp_offset_list == rhs.cb_qp_offset_list ) && + ( cr_qp_offset_list == rhs.cr_qp_offset_list ) && ( log2_sao_offset_scale_luma == rhs.log2_sao_offset_scale_luma ) && + ( log2_sao_offset_scale_chroma == rhs.log2_sao_offset_scale_chroma ) && ( pps_act_y_qp_offset_plus5 == rhs.pps_act_y_qp_offset_plus5 ) && + ( pps_act_cb_qp_offset_plus5 == rhs.pps_act_cb_qp_offset_plus5 ) && ( pps_act_cr_qp_offset_plus3 == rhs.pps_act_cr_qp_offset_plus3 ) && + ( pps_num_palette_predictor_initializers == rhs.pps_num_palette_predictor_initializers ) && + ( luma_bit_depth_entry_minus8 == rhs.luma_bit_depth_entry_minus8 ) && ( chroma_bit_depth_entry_minus8 == rhs.chroma_bit_depth_entry_minus8 ) && + ( num_tile_columns_minus1 == rhs.num_tile_columns_minus1 ) && ( num_tile_rows_minus1 == rhs.num_tile_rows_minus1 ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( column_width_minus1 == rhs.column_width_minus1 ) && + ( row_height_minus1 == rhs.row_height_minus1 ) && ( reserved3 == rhs.reserved3 ) && ( pScalingLists == rhs.pScalingLists ) && + ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); + } + + bool operator!=( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PpsFlags flags = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t num_extra_slice_header_bits = {}; + uint8_t num_ref_idx_l0_default_active_minus1 = {}; + uint8_t num_ref_idx_l1_default_active_minus1 = {}; + int8_t init_qp_minus26 = {}; + uint8_t diff_cu_qp_delta_depth = {}; + int8_t pps_cb_qp_offset = {}; + int8_t pps_cr_qp_offset = {}; + int8_t pps_beta_offset_div2 = {}; + int8_t pps_tc_offset_div2 = {}; + uint8_t log2_parallel_merge_level_minus2 = {}; + uint8_t log2_max_transform_skip_block_size_minus2 = {}; + uint8_t diff_cu_chroma_qp_offset_depth = {}; + uint8_t chroma_qp_offset_list_len_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cb_qp_offset_list = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cr_qp_offset_list = {}; + uint8_t log2_sao_offset_scale_luma = {}; + uint8_t log2_sao_offset_scale_chroma = {}; + int8_t pps_act_y_qp_offset_plus5 = {}; + int8_t pps_act_cb_qp_offset_plus5 = {}; + int8_t pps_act_cr_qp_offset_plus3 = {}; + uint8_t pps_num_palette_predictor_initializers = {}; + uint8_t luma_bit_depth_entry_minus8 = {}; + uint8_t chroma_bit_depth_entry_minus8 = {}; + uint8_t num_tile_columns_minus1 = {}; + uint8_t num_tile_rows_minus1 = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D column_width_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D row_height_minus1 = {}; + uint32_t reserved3 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries = {}; + }; + + //=== vulkan_video_codec_h265std_decode === + + struct DecodeH265PictureInfoFlags + { + using NativeType = StdVideoDecodeH265PictureInfoFlags; + + operator StdVideoDecodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( IrapPicFlag == rhs.IrapPicFlag ) && ( IdrPicFlag == rhs.IdrPicFlag ) && ( IsReference == rhs.IsReference ) && + ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ); + } + + bool operator!=( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t IrapPicFlag : 1; + uint32_t IdrPicFlag : 1; + uint32_t IsReference : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + }; + + struct DecodeH265PictureInfo + { + using NativeType = StdVideoDecodeH265PictureInfo; + + operator StdVideoDecodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( NumDeltaPocsOfRefRpsIdx == rhs.NumDeltaPocsOfRefRpsIdx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && + ( NumBitsForSTRefPicSetInSlice == rhs.NumBitsForSTRefPicSetInSlice ) && ( reserved == rhs.reserved ) && + ( RefPicSetStCurrBefore == rhs.RefPicSetStCurrBefore ) && ( RefPicSetStCurrAfter == rhs.RefPicSetStCurrAfter ) && + ( RefPicSetLtCurr == rhs.RefPicSetLtCurr ); + } + + bool operator!=( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265PictureInfoFlags flags = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t NumDeltaPocsOfRefRpsIdx = {}; + int32_t PicOrderCntVal = {}; + uint16_t NumBitsForSTRefPicSetInSlice = {}; + uint16_t reserved = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetStCurrBefore = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetStCurrAfter = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetLtCurr = {}; + }; + + struct DecodeH265ReferenceInfoFlags + { + using NativeType = StdVideoDecodeH265ReferenceInfoFlags; + + operator StdVideoDecodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ); + } + + bool operator!=( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + }; + + struct DecodeH265ReferenceInfo + { + using NativeType = StdVideoDecodeH265ReferenceInfo; + + operator StdVideoDecodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( PicOrderCntVal == rhs.PicOrderCntVal ); + } + + bool operator!=( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265ReferenceInfoFlags flags = {}; + int32_t PicOrderCntVal = {}; + }; + + //=== vulkan_video_codec_h265std_encode === + + struct EncodeH265WeightTableFlags + { + using NativeType = StdVideoEncodeH265WeightTableFlags; + + operator StdVideoEncodeH265WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTableFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTableFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTableFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && + ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); + } + + bool operator!=( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint16_t luma_weight_l0_flag = {}; + uint16_t chroma_weight_l0_flag = {}; + uint16_t luma_weight_l1_flag = {}; + uint16_t chroma_weight_l1_flag = {}; + }; + + struct EncodeH265WeightTable + { + using NativeType = StdVideoEncodeH265WeightTable; + + operator StdVideoEncodeH265WeightTable const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTable &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTable const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTable *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && + ( delta_chroma_log2_weight_denom == rhs.delta_chroma_log2_weight_denom ) && ( delta_luma_weight_l0 == rhs.delta_luma_weight_l0 ) && + ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( delta_chroma_weight_l0 == rhs.delta_chroma_weight_l0 ) && + ( delta_chroma_offset_l0 == rhs.delta_chroma_offset_l0 ) && ( delta_luma_weight_l1 == rhs.delta_luma_weight_l1 ) && + ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( delta_chroma_weight_l1 == rhs.delta_chroma_weight_l1 ) && + ( delta_chroma_offset_l1 == rhs.delta_chroma_offset_l1 ); + } + + bool operator!=( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTableFlags flags = {}; + uint8_t luma_log2_weight_denom = {}; + int8_t delta_chroma_log2_weight_denom = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_luma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_luma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_offset_l1 = {}; + }; + + struct EncodeH265SliceSegmentHeaderFlags + { + using NativeType = StdVideoEncodeH265SliceSegmentHeaderFlags; + + operator StdVideoEncodeH265SliceSegmentHeaderFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeaderFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeaderFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeaderFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( first_slice_segment_in_pic_flag == rhs.first_slice_segment_in_pic_flag ) && + ( dependent_slice_segment_flag == rhs.dependent_slice_segment_flag ) && ( slice_sao_luma_flag == rhs.slice_sao_luma_flag ) && + ( slice_sao_chroma_flag == rhs.slice_sao_chroma_flag ) && ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && + ( mvd_l1_zero_flag == rhs.mvd_l1_zero_flag ) && ( cabac_init_flag == rhs.cabac_init_flag ) && + ( cu_chroma_qp_offset_enabled_flag == rhs.cu_chroma_qp_offset_enabled_flag ) && + ( deblocking_filter_override_flag == rhs.deblocking_filter_override_flag ) && + ( slice_deblocking_filter_disabled_flag == rhs.slice_deblocking_filter_disabled_flag ) && + ( collocated_from_l0_flag == rhs.collocated_from_l0_flag ) && + ( slice_loop_filter_across_slices_enabled_flag == rhs.slice_loop_filter_across_slices_enabled_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t first_slice_segment_in_pic_flag : 1; + uint32_t dependent_slice_segment_flag : 1; + uint32_t slice_sao_luma_flag : 1; + uint32_t slice_sao_chroma_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t mvd_l1_zero_flag : 1; + uint32_t cabac_init_flag : 1; + uint32_t cu_chroma_qp_offset_enabled_flag : 1; + uint32_t deblocking_filter_override_flag : 1; + uint32_t slice_deblocking_filter_disabled_flag : 1; + uint32_t collocated_from_l0_flag : 1; + uint32_t slice_loop_filter_across_slices_enabled_flag : 1; + uint32_t reserved : 20; + }; + + struct EncodeH265SliceSegmentHeader + { + using NativeType = StdVideoEncodeH265SliceSegmentHeader; + + operator StdVideoEncodeH265SliceSegmentHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeader const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeader *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( slice_type == rhs.slice_type ) && ( slice_segment_address == rhs.slice_segment_address ) && + ( collocated_ref_idx == rhs.collocated_ref_idx ) && ( MaxNumMergeCand == rhs.MaxNumMergeCand ) && + ( slice_cb_qp_offset == rhs.slice_cb_qp_offset ) && ( slice_cr_qp_offset == rhs.slice_cr_qp_offset ) && + ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && ( slice_tc_offset_div2 == rhs.slice_tc_offset_div2 ) && + ( slice_act_y_qp_offset == rhs.slice_act_y_qp_offset ) && ( slice_act_cb_qp_offset == rhs.slice_act_cb_qp_offset ) && + ( slice_act_cr_qp_offset == rhs.slice_act_cr_qp_offset ) && ( slice_qp_delta == rhs.slice_qp_delta ) && ( reserved1 == rhs.reserved1 ) && + ( pWeightTable == rhs.pWeightTable ); + } + + bool operator!=( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265SliceSegmentHeaderFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType::eB; + uint32_t slice_segment_address = {}; + uint8_t collocated_ref_idx = {}; + uint8_t MaxNumMergeCand = {}; + int8_t slice_cb_qp_offset = {}; + int8_t slice_cr_qp_offset = {}; + int8_t slice_beta_offset_div2 = {}; + int8_t slice_tc_offset_div2 = {}; + int8_t slice_act_y_qp_offset = {}; + int8_t slice_act_cb_qp_offset = {}; + int8_t slice_act_cr_qp_offset = {}; + int8_t slice_qp_delta = {}; + uint16_t reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTable * pWeightTable = {}; + }; + + struct EncodeH265ReferenceListsInfoFlags + { + using NativeType = StdVideoEncodeH265ReferenceListsInfoFlags; + + operator StdVideoEncodeH265ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) && + ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; + }; + + struct EncodeH265ReferenceListsInfo + { + using NativeType = StdVideoEncodeH265ReferenceListsInfo; + + operator StdVideoEncodeH265ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && + ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) && + ( RefPicList1 == rhs.RefPicList1 ) && ( list_entry_l0 == rhs.list_entry_l0 ) && ( list_entry_l1 == rhs.list_entry_l1 ); + } + + bool operator!=( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfoFlags flags = {}; + uint8_t num_ref_idx_l0_active_minus1 = {}; + uint8_t num_ref_idx_l1_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D list_entry_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D list_entry_l1 = {}; + }; + + struct EncodeH265PictureInfoFlags + { + using NativeType = StdVideoEncodeH265PictureInfoFlags; + + operator StdVideoEncodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( is_reference == rhs.is_reference ) && ( IrapPicFlag == rhs.IrapPicFlag ) && + ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( discardable_flag == rhs.discardable_flag ) && + ( cross_layer_bla_flag == rhs.cross_layer_bla_flag ) && ( pic_output_flag == rhs.pic_output_flag ) && + ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && + ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ) && + ( slice_temporal_mvp_enabled_flag == rhs.slice_temporal_mvp_enabled_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t is_reference : 1; + uint32_t IrapPicFlag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t discardable_flag : 1; + uint32_t cross_layer_bla_flag : 1; + uint32_t pic_output_flag : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + uint32_t slice_temporal_mvp_enabled_flag : 1; + uint32_t reserved : 23; + }; + + struct EncodeH265LongTermRefPics + { + using NativeType = StdVideoEncodeH265LongTermRefPics; + + operator StdVideoEncodeH265LongTermRefPics const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265LongTermRefPics &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265LongTermRefPics const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265LongTermRefPics *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265LongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( num_long_term_sps == rhs.num_long_term_sps ) && ( num_long_term_pics == rhs.num_long_term_pics ) && ( lt_idx_sps == rhs.lt_idx_sps ) && + ( poc_lsb_lt == rhs.poc_lsb_lt ) && ( used_by_curr_pic_lt_flag == rhs.used_by_curr_pic_lt_flag ) && + ( delta_poc_msb_present_flag == rhs.delta_poc_msb_present_flag ) && ( delta_poc_msb_cycle_lt == rhs.delta_poc_msb_cycle_lt ); + } + + bool operator!=( EncodeH265LongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t num_long_term_sps = {}; + uint8_t num_long_term_pics = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lt_idx_sps = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D poc_lsb_lt = {}; + uint16_t used_by_curr_pic_lt_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_msb_present_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_msb_cycle_lt = {}; + }; + + struct EncodeH265PictureInfo + { + using NativeType = StdVideoEncodeH265PictureInfo; + + operator StdVideoEncodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( short_term_ref_pic_set_idx == rhs.short_term_ref_pic_set_idx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && + ( TemporalId == rhs.TemporalId ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ) && + ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPics == rhs.pLongTermRefPics ); + } + + bool operator!=( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265PictureInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP; + uint8_t sps_video_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t short_term_ref_pic_set_idx = {}; + int32_t PicOrderCntVal = {}; + uint8_t TemporalId = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfo * pRefLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet * pShortTermRefPicSet = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265LongTermRefPics * pLongTermRefPics = {}; + }; + + struct EncodeH265ReferenceInfoFlags + { + using NativeType = StdVideoEncodeH265ReferenceInfoFlags; + + operator StdVideoEncodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + uint32_t reserved : 30; + }; + + struct EncodeH265ReferenceInfo + { + using NativeType = StdVideoEncodeH265ReferenceInfo; + + operator StdVideoEncodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && ( TemporalId == rhs.TemporalId ); + } + + bool operator!=( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP; + int32_t PicOrderCntVal = {}; + uint8_t TemporalId = {}; + }; + + //=== vulkan_video_codec_av1std === + + struct AV1ColorConfigFlags + { + using NativeType = StdVideoAV1ColorConfigFlags; + + operator StdVideoAV1ColorConfigFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1ColorConfigFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1ColorConfigFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1ColorConfigFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1ColorConfigFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( mono_chrome == rhs.mono_chrome ) && ( color_range == rhs.color_range ) && ( separate_uv_delta_q == rhs.separate_uv_delta_q ) && + ( color_description_present_flag == rhs.color_description_present_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1ColorConfigFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t mono_chrome : 1; + uint32_t color_range : 1; + uint32_t separate_uv_delta_q : 1; + uint32_t color_description_present_flag : 1; + uint32_t reserved : 28; + }; + + struct AV1ColorConfig + { + using NativeType = StdVideoAV1ColorConfig; + + operator StdVideoAV1ColorConfig const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1ColorConfig &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1ColorConfig const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1ColorConfig *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1ColorConfig const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( BitDepth == rhs.BitDepth ) && ( subsampling_x == rhs.subsampling_x ) && ( subsampling_y == rhs.subsampling_y ) && + ( reserved1 == rhs.reserved1 ) && ( color_primaries == rhs.color_primaries ) && ( transfer_characteristics == rhs.transfer_characteristics ) && + ( matrix_coefficients == rhs.matrix_coefficients ) && ( chroma_sample_position == rhs.chroma_sample_position ); + } + + bool operator!=( AV1ColorConfig const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ColorConfigFlags flags = {}; + uint8_t BitDepth = {}; + uint8_t subsampling_x = {}; + uint8_t subsampling_y = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ColorPrimaries color_primaries = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ColorPrimaries::eBt709; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TransferCharacteristics transfer_characteristics = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TransferCharacteristics::eReserved0; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1MatrixCoefficients matrix_coefficients = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1MatrixCoefficients::eIdentity; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ChromaSamplePosition chroma_sample_position = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ChromaSamplePosition::eUnknown; + }; + + struct AV1TimingInfoFlags + { + using NativeType = StdVideoAV1TimingInfoFlags; + + operator StdVideoAV1TimingInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TimingInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TimingInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1TimingInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1TimingInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( equal_picture_interval == rhs.equal_picture_interval ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1TimingInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t equal_picture_interval : 1; + uint32_t reserved : 31; + }; + + struct AV1TimingInfo + { + using NativeType = StdVideoAV1TimingInfo; + + operator StdVideoAV1TimingInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TimingInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TimingInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1TimingInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1TimingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( num_units_in_display_tick == rhs.num_units_in_display_tick ) && ( time_scale == rhs.time_scale ) && + ( num_ticks_per_picture_minus_1 == rhs.num_ticks_per_picture_minus_1 ); + } + + bool operator!=( AV1TimingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TimingInfoFlags flags = {}; + uint32_t num_units_in_display_tick = {}; + uint32_t time_scale = {}; + uint32_t num_ticks_per_picture_minus_1 = {}; + }; + + struct AV1LoopFilterFlags + { + using NativeType = StdVideoAV1LoopFilterFlags; + + operator StdVideoAV1LoopFilterFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1LoopFilterFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1LoopFilterFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1LoopFilterFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1LoopFilterFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( loop_filter_delta_enabled == rhs.loop_filter_delta_enabled ) && ( loop_filter_delta_update == rhs.loop_filter_delta_update ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( AV1LoopFilterFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t loop_filter_delta_enabled : 1; + uint32_t loop_filter_delta_update : 1; + uint32_t reserved : 30; + }; + + struct AV1LoopFilter + { + using NativeType = StdVideoAV1LoopFilter; + + operator StdVideoAV1LoopFilter const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1LoopFilter &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1LoopFilter const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1LoopFilter *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1LoopFilter const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( loop_filter_level == rhs.loop_filter_level ) && ( loop_filter_sharpness == rhs.loop_filter_sharpness ) && + ( update_ref_delta == rhs.update_ref_delta ) && ( loop_filter_ref_deltas == rhs.loop_filter_ref_deltas ) && + ( update_mode_delta == rhs.update_mode_delta ) && ( loop_filter_mode_deltas == rhs.loop_filter_mode_deltas ); + } + + bool operator!=( AV1LoopFilter const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopFilterFlags flags = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D loop_filter_level = {}; + uint8_t loop_filter_sharpness = {}; + uint8_t update_ref_delta = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D loop_filter_ref_deltas = {}; + uint8_t update_mode_delta = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D loop_filter_mode_deltas = {}; + }; + + struct AV1QuantizationFlags + { + using NativeType = StdVideoAV1QuantizationFlags; + + operator StdVideoAV1QuantizationFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1QuantizationFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1QuantizationFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1QuantizationFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1QuantizationFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( using_qmatrix == rhs.using_qmatrix ) && ( diff_uv_delta == rhs.diff_uv_delta ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1QuantizationFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t using_qmatrix : 1; + uint32_t diff_uv_delta : 1; + uint32_t reserved : 30; + }; + + struct AV1Quantization + { + using NativeType = StdVideoAV1Quantization; + + operator StdVideoAV1Quantization const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1Quantization &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1Quantization const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1Quantization *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1Quantization const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( base_q_idx == rhs.base_q_idx ) && ( DeltaQYDc == rhs.DeltaQYDc ) && ( DeltaQUDc == rhs.DeltaQUDc ) && + ( DeltaQUAc == rhs.DeltaQUAc ) && ( DeltaQVDc == rhs.DeltaQVDc ) && ( DeltaQVAc == rhs.DeltaQVAc ) && ( qm_y == rhs.qm_y ) && + ( qm_u == rhs.qm_u ) && ( qm_v == rhs.qm_v ); + } + + bool operator!=( AV1Quantization const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1QuantizationFlags flags = {}; + uint8_t base_q_idx = {}; + int8_t DeltaQYDc = {}; + int8_t DeltaQUDc = {}; + int8_t DeltaQUAc = {}; + int8_t DeltaQVDc = {}; + int8_t DeltaQVAc = {}; + uint8_t qm_y = {}; + uint8_t qm_u = {}; + uint8_t qm_v = {}; + }; + + struct AV1Segmentation + { + using NativeType = StdVideoAV1Segmentation; + + operator StdVideoAV1Segmentation const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1Segmentation &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1Segmentation const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1Segmentation *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1Segmentation const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( FeatureEnabled == rhs.FeatureEnabled ) && ( FeatureData == rhs.FeatureData ); + } + + bool operator!=( AV1Segmentation const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D FeatureEnabled = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D FeatureData = {}; + }; + + struct AV1TileInfoFlags + { + using NativeType = StdVideoAV1TileInfoFlags; + + operator StdVideoAV1TileInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TileInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TileInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1TileInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1TileInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( uniform_tile_spacing_flag == rhs.uniform_tile_spacing_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1TileInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t uniform_tile_spacing_flag : 1; + uint32_t reserved : 31; + }; + + struct AV1TileInfo + { + using NativeType = StdVideoAV1TileInfo; + + operator StdVideoAV1TileInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TileInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TileInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1TileInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1TileInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( TileCols == rhs.TileCols ) && ( TileRows == rhs.TileRows ) && + ( context_update_tile_id == rhs.context_update_tile_id ) && ( tile_size_bytes_minus_1 == rhs.tile_size_bytes_minus_1 ) && + ( reserved1 == rhs.reserved1 ) && ( pMiColStarts == rhs.pMiColStarts ) && ( pMiRowStarts == rhs.pMiRowStarts ) && + ( pWidthInSbsMinus1 == rhs.pWidthInSbsMinus1 ) && ( pHeightInSbsMinus1 == rhs.pHeightInSbsMinus1 ); + } + + bool operator!=( AV1TileInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TileInfoFlags flags = {}; + uint8_t TileCols = {}; + uint8_t TileRows = {}; + uint16_t context_update_tile_id = {}; + uint8_t tile_size_bytes_minus_1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const uint16_t * pMiColStarts = {}; + const uint16_t * pMiRowStarts = {}; + const uint16_t * pWidthInSbsMinus1 = {}; + const uint16_t * pHeightInSbsMinus1 = {}; + }; + + struct AV1CDEF + { + using NativeType = StdVideoAV1CDEF; + + operator StdVideoAV1CDEF const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1CDEF &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1CDEF const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1CDEF *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1CDEF const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( cdef_damping_minus_3 == rhs.cdef_damping_minus_3 ) && ( cdef_bits == rhs.cdef_bits ) && ( cdef_y_pri_strength == rhs.cdef_y_pri_strength ) && + ( cdef_y_sec_strength == rhs.cdef_y_sec_strength ) && ( cdef_uv_pri_strength == rhs.cdef_uv_pri_strength ) && + ( cdef_uv_sec_strength == rhs.cdef_uv_sec_strength ); + } + + bool operator!=( AV1CDEF const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t cdef_damping_minus_3 = {}; + uint8_t cdef_bits = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cdef_y_pri_strength = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cdef_y_sec_strength = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cdef_uv_pri_strength = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cdef_uv_sec_strength = {}; + }; + + struct AV1LoopRestoration + { + using NativeType = StdVideoAV1LoopRestoration; + + operator StdVideoAV1LoopRestoration const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1LoopRestoration &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1LoopRestoration const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1LoopRestoration *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1LoopRestoration const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( FrameRestorationType == rhs.FrameRestorationType ) && ( LoopRestorationSize == rhs.LoopRestorationSize ); + } + + bool operator!=( AV1LoopRestoration const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D FrameRestorationType = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D LoopRestorationSize = {}; + }; + + struct AV1GlobalMotion + { + using NativeType = StdVideoAV1GlobalMotion; + + operator StdVideoAV1GlobalMotion const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1GlobalMotion &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1GlobalMotion const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1GlobalMotion *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1GlobalMotion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( GmType == rhs.GmType ) && ( gm_params == rhs.gm_params ); + } + + bool operator!=( AV1GlobalMotion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D GmType = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D gm_params = {}; + }; + + struct AV1FilmGrainFlags + { + using NativeType = StdVideoAV1FilmGrainFlags; + + operator StdVideoAV1FilmGrainFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1FilmGrainFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1FilmGrainFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1FilmGrainFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1FilmGrainFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( chroma_scaling_from_luma == rhs.chroma_scaling_from_luma ) && ( overlap_flag == rhs.overlap_flag ) && + ( clip_to_restricted_range == rhs.clip_to_restricted_range ) && ( update_grain == rhs.update_grain ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1FilmGrainFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t chroma_scaling_from_luma : 1; + uint32_t overlap_flag : 1; + uint32_t clip_to_restricted_range : 1; + uint32_t update_grain : 1; + uint32_t reserved : 28; + }; + + struct AV1FilmGrain + { + using NativeType = StdVideoAV1FilmGrain; + + operator StdVideoAV1FilmGrain const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1FilmGrain &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1FilmGrain const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1FilmGrain *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1FilmGrain const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( grain_scaling_minus_8 == rhs.grain_scaling_minus_8 ) && ( ar_coeff_lag == rhs.ar_coeff_lag ) && + ( ar_coeff_shift_minus_6 == rhs.ar_coeff_shift_minus_6 ) && ( grain_scale_shift == rhs.grain_scale_shift ) && ( grain_seed == rhs.grain_seed ) && + ( film_grain_params_ref_idx == rhs.film_grain_params_ref_idx ) && ( num_y_points == rhs.num_y_points ) && + ( point_y_value == rhs.point_y_value ) && ( point_y_scaling == rhs.point_y_scaling ) && ( num_cb_points == rhs.num_cb_points ) && + ( point_cb_value == rhs.point_cb_value ) && ( point_cb_scaling == rhs.point_cb_scaling ) && ( num_cr_points == rhs.num_cr_points ) && + ( point_cr_value == rhs.point_cr_value ) && ( point_cr_scaling == rhs.point_cr_scaling ) && + ( ar_coeffs_y_plus_128 == rhs.ar_coeffs_y_plus_128 ) && ( ar_coeffs_cb_plus_128 == rhs.ar_coeffs_cb_plus_128 ) && + ( ar_coeffs_cr_plus_128 == rhs.ar_coeffs_cr_plus_128 ) && ( cb_mult == rhs.cb_mult ) && ( cb_luma_mult == rhs.cb_luma_mult ) && + ( cb_offset == rhs.cb_offset ) && ( cr_mult == rhs.cr_mult ) && ( cr_luma_mult == rhs.cr_luma_mult ) && ( cr_offset == rhs.cr_offset ); + } + + bool operator!=( AV1FilmGrain const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FilmGrainFlags flags = {}; + uint8_t grain_scaling_minus_8 = {}; + uint8_t ar_coeff_lag = {}; + uint8_t ar_coeff_shift_minus_6 = {}; + uint8_t grain_scale_shift = {}; + uint16_t grain_seed = {}; + uint8_t film_grain_params_ref_idx = {}; + uint8_t num_y_points = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_y_value = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_y_scaling = {}; + uint8_t num_cb_points = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_cb_value = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_cb_scaling = {}; + uint8_t num_cr_points = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_cr_value = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_cr_scaling = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ar_coeffs_y_plus_128 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ar_coeffs_cb_plus_128 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ar_coeffs_cr_plus_128 = {}; + uint8_t cb_mult = {}; + uint8_t cb_luma_mult = {}; + uint16_t cb_offset = {}; + uint8_t cr_mult = {}; + uint8_t cr_luma_mult = {}; + uint16_t cr_offset = {}; + }; + + struct AV1SequenceHeaderFlags + { + using NativeType = StdVideoAV1SequenceHeaderFlags; + + operator StdVideoAV1SequenceHeaderFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1SequenceHeaderFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1SequenceHeaderFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1SequenceHeaderFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1SequenceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( still_picture == rhs.still_picture ) && ( reduced_still_picture_header == rhs.reduced_still_picture_header ) && + ( use_128x128_superblock == rhs.use_128x128_superblock ) && ( enable_filter_intra == rhs.enable_filter_intra ) && + ( enable_intra_edge_filter == rhs.enable_intra_edge_filter ) && ( enable_interintra_compound == rhs.enable_interintra_compound ) && + ( enable_masked_compound == rhs.enable_masked_compound ) && ( enable_warped_motion == rhs.enable_warped_motion ) && + ( enable_dual_filter == rhs.enable_dual_filter ) && ( enable_order_hint == rhs.enable_order_hint ) && + ( enable_jnt_comp == rhs.enable_jnt_comp ) && ( enable_ref_frame_mvs == rhs.enable_ref_frame_mvs ) && + ( frame_id_numbers_present_flag == rhs.frame_id_numbers_present_flag ) && ( enable_superres == rhs.enable_superres ) && + ( enable_cdef == rhs.enable_cdef ) && ( enable_restoration == rhs.enable_restoration ) && + ( film_grain_params_present == rhs.film_grain_params_present ) && ( timing_info_present_flag == rhs.timing_info_present_flag ) && + ( initial_display_delay_present_flag == rhs.initial_display_delay_present_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1SequenceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t still_picture : 1; + uint32_t reduced_still_picture_header : 1; + uint32_t use_128x128_superblock : 1; + uint32_t enable_filter_intra : 1; + uint32_t enable_intra_edge_filter : 1; + uint32_t enable_interintra_compound : 1; + uint32_t enable_masked_compound : 1; + uint32_t enable_warped_motion : 1; + uint32_t enable_dual_filter : 1; + uint32_t enable_order_hint : 1; + uint32_t enable_jnt_comp : 1; + uint32_t enable_ref_frame_mvs : 1; + uint32_t frame_id_numbers_present_flag : 1; + uint32_t enable_superres : 1; + uint32_t enable_cdef : 1; + uint32_t enable_restoration : 1; + uint32_t film_grain_params_present : 1; + uint32_t timing_info_present_flag : 1; + uint32_t initial_display_delay_present_flag : 1; + uint32_t reserved : 13; + }; + + struct AV1SequenceHeader + { + using NativeType = StdVideoAV1SequenceHeader; + + operator StdVideoAV1SequenceHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1SequenceHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1SequenceHeader const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoAV1SequenceHeader *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( AV1SequenceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_profile == rhs.seq_profile ) && ( frame_width_bits_minus_1 == rhs.frame_width_bits_minus_1 ) && + ( frame_height_bits_minus_1 == rhs.frame_height_bits_minus_1 ) && ( max_frame_width_minus_1 == rhs.max_frame_width_minus_1 ) && + ( max_frame_height_minus_1 == rhs.max_frame_height_minus_1 ) && ( delta_frame_id_length_minus_2 == rhs.delta_frame_id_length_minus_2 ) && + ( additional_frame_id_length_minus_1 == rhs.additional_frame_id_length_minus_1 ) && ( order_hint_bits_minus_1 == rhs.order_hint_bits_minus_1 ) && + ( seq_force_integer_mv == rhs.seq_force_integer_mv ) && ( seq_force_screen_content_tools == rhs.seq_force_screen_content_tools ) && + ( reserved1 == rhs.reserved1 ) && ( pColorConfig == rhs.pColorConfig ) && ( pTimingInfo == rhs.pTimingInfo ); + } + + bool operator!=( AV1SequenceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1SequenceHeaderFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Profile seq_profile = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Profile::eMain; + uint8_t frame_width_bits_minus_1 = {}; + uint8_t frame_height_bits_minus_1 = {}; + uint16_t max_frame_width_minus_1 = {}; + uint16_t max_frame_height_minus_1 = {}; + uint8_t delta_frame_id_length_minus_2 = {}; + uint8_t additional_frame_id_length_minus_1 = {}; + uint8_t order_hint_bits_minus_1 = {}; + uint8_t seq_force_integer_mv = {}; + uint8_t seq_force_screen_content_tools = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ColorConfig * pColorConfig = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TimingInfo * pTimingInfo = {}; + }; + + //=== vulkan_video_codec_av1std_decode === + + struct DecodeAV1PictureInfoFlags + { + using NativeType = StdVideoDecodeAV1PictureInfoFlags; + + operator StdVideoDecodeAV1PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1PictureInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1PictureInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( error_resilient_mode == rhs.error_resilient_mode ) && ( disable_cdf_update == rhs.disable_cdf_update ) && + ( use_superres == rhs.use_superres ) && ( render_and_frame_size_different == rhs.render_and_frame_size_different ) && + ( allow_screen_content_tools == rhs.allow_screen_content_tools ) && ( is_filter_switchable == rhs.is_filter_switchable ) && + ( force_integer_mv == rhs.force_integer_mv ) && ( frame_size_override_flag == rhs.frame_size_override_flag ) && + ( buffer_removal_time_present_flag == rhs.buffer_removal_time_present_flag ) && ( allow_intrabc == rhs.allow_intrabc ) && + ( frame_refs_short_signaling == rhs.frame_refs_short_signaling ) && ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && + ( is_motion_mode_switchable == rhs.is_motion_mode_switchable ) && ( use_ref_frame_mvs == rhs.use_ref_frame_mvs ) && + ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( allow_warped_motion == rhs.allow_warped_motion ) && + ( reduced_tx_set == rhs.reduced_tx_set ) && ( reference_select == rhs.reference_select ) && ( skip_mode_present == rhs.skip_mode_present ) && + ( delta_q_present == rhs.delta_q_present ) && ( delta_lf_present == rhs.delta_lf_present ) && ( delta_lf_multi == rhs.delta_lf_multi ) && + ( segmentation_enabled == rhs.segmentation_enabled ) && ( segmentation_update_map == rhs.segmentation_update_map ) && + ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && ( segmentation_update_data == rhs.segmentation_update_data ) && + ( UsesLr == rhs.UsesLr ) && ( usesChromaLr == rhs.usesChromaLr ) && ( apply_grain == rhs.apply_grain ) && ( reserved == rhs.reserved ); + } + + bool operator!=( DecodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t reference_select : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t apply_grain : 1; + uint32_t reserved : 3; + }; + + struct DecodeAV1PictureInfo + { + using NativeType = StdVideoDecodeAV1PictureInfo; + + operator StdVideoDecodeAV1PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1PictureInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1PictureInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( frame_type == rhs.frame_type ) && ( current_frame_id == rhs.current_frame_id ) && ( OrderHint == rhs.OrderHint ) && + ( primary_ref_frame == rhs.primary_ref_frame ) && ( refresh_frame_flags == rhs.refresh_frame_flags ) && ( reserved1 == rhs.reserved1 ) && + ( interpolation_filter == rhs.interpolation_filter ) && ( TxMode == rhs.TxMode ) && ( delta_q_res == rhs.delta_q_res ) && + ( delta_lf_res == rhs.delta_lf_res ) && ( SkipModeFrame == rhs.SkipModeFrame ) && ( coded_denom == rhs.coded_denom ) && + ( reserved2 == rhs.reserved2 ) && ( OrderHints == rhs.OrderHints ) && ( expectedFrameId == rhs.expectedFrameId ) && + ( pTileInfo == rhs.pTileInfo ) && ( pQuantization == rhs.pQuantization ) && ( pSegmentation == rhs.pSegmentation ) && + ( pLoopFilter == rhs.pLoopFilter ) && ( pCDEF == rhs.pCDEF ) && ( pLoopRestoration == rhs.pLoopRestoration ) && + ( pGlobalMotion == rhs.pGlobalMotion ) && ( pFilmGrain == rhs.pFilmGrain ); + } + + bool operator!=( DecodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeAV1PictureInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType frame_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType::eKey; + uint32_t current_frame_id = {}; + uint8_t OrderHint = {}; + uint8_t primary_ref_frame = {}; + uint8_t refresh_frame_flags = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1InterpolationFilter interpolation_filter = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1InterpolationFilter::eEighttap; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TxMode TxMode = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TxMode::eOnly4X4; + uint8_t delta_q_res = {}; + uint8_t delta_lf_res = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D SkipModeFrame = {}; + uint8_t coded_denom = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved2 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D OrderHints = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D expectedFrameId = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TileInfo * pTileInfo = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Quantization * pQuantization = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Segmentation * pSegmentation = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopFilter * pLoopFilter = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1CDEF * pCDEF = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopRestoration * pLoopRestoration = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1GlobalMotion * pGlobalMotion = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FilmGrain * pFilmGrain = {}; + }; + + struct DecodeAV1ReferenceInfoFlags + { + using NativeType = StdVideoDecodeAV1ReferenceInfoFlags; + + operator StdVideoDecodeAV1ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1ReferenceInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1ReferenceInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( segmentation_enabled == rhs.segmentation_enabled ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( DecodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; + }; + + struct DecodeAV1ReferenceInfo + { + using NativeType = StdVideoDecodeAV1ReferenceInfo; + + operator StdVideoDecodeAV1ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1ReferenceInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1ReferenceInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( DecodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( frame_type == rhs.frame_type ) && ( RefFrameSignBias == rhs.RefFrameSignBias ) && ( OrderHint == rhs.OrderHint ) && + ( SavedOrderHints == rhs.SavedOrderHints ); + } + + bool operator!=( DecodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeAV1ReferenceInfoFlags flags = {}; + uint8_t frame_type = {}; + uint8_t RefFrameSignBias = {}; + uint8_t OrderHint = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D SavedOrderHints = {}; + }; + + //=== vulkan_video_codec_av1std_encode === + + struct EncodeAV1DecoderModelInfo + { + using NativeType = StdVideoEncodeAV1DecoderModelInfo; + + operator StdVideoEncodeAV1DecoderModelInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1DecoderModelInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1DecoderModelInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1DecoderModelInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeAV1DecoderModelInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer_delay_length_minus_1 == rhs.buffer_delay_length_minus_1 ) && + ( buffer_removal_time_length_minus_1 == rhs.buffer_removal_time_length_minus_1 ) && + ( frame_presentation_time_length_minus_1 == rhs.frame_presentation_time_length_minus_1 ) && ( reserved1 == rhs.reserved1 ) && + ( num_units_in_decoding_tick == rhs.num_units_in_decoding_tick ); + } + + bool operator!=( EncodeAV1DecoderModelInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t buffer_delay_length_minus_1 = {}; + uint8_t buffer_removal_time_length_minus_1 = {}; + uint8_t frame_presentation_time_length_minus_1 = {}; + uint8_t reserved1 = {}; + uint32_t num_units_in_decoding_tick = {}; + }; + + struct EncodeAV1ExtensionHeader + { + using NativeType = StdVideoEncodeAV1ExtensionHeader; + + operator StdVideoEncodeAV1ExtensionHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ExtensionHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ExtensionHeader const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ExtensionHeader *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeAV1ExtensionHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( temporal_id == rhs.temporal_id ) && ( spatial_id == rhs.spatial_id ); + } + + bool operator!=( EncodeAV1ExtensionHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t temporal_id = {}; + uint8_t spatial_id = {}; + }; + + struct EncodeAV1OperatingPointInfoFlags + { + using NativeType = StdVideoEncodeAV1OperatingPointInfoFlags; + + operator StdVideoEncodeAV1OperatingPointInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1OperatingPointInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1OperatingPointInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1OperatingPointInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeAV1OperatingPointInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( decoder_model_present_for_this_op == rhs.decoder_model_present_for_this_op ) && ( low_delay_mode_flag == rhs.low_delay_mode_flag ) && + ( initial_display_delay_present_for_this_op == rhs.initial_display_delay_present_for_this_op ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeAV1OperatingPointInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t decoder_model_present_for_this_op : 1; + uint32_t low_delay_mode_flag : 1; + uint32_t initial_display_delay_present_for_this_op : 1; + uint32_t reserved : 29; + }; + + struct EncodeAV1OperatingPointInfo + { + using NativeType = StdVideoEncodeAV1OperatingPointInfo; + + operator StdVideoEncodeAV1OperatingPointInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1OperatingPointInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1OperatingPointInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1OperatingPointInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeAV1OperatingPointInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( operating_point_idc == rhs.operating_point_idc ) && ( seq_level_idx == rhs.seq_level_idx ) && + ( seq_tier == rhs.seq_tier ) && ( decoder_buffer_delay == rhs.decoder_buffer_delay ) && ( encoder_buffer_delay == rhs.encoder_buffer_delay ) && + ( initial_display_delay_minus_1 == rhs.initial_display_delay_minus_1 ); + } + + bool operator!=( EncodeAV1OperatingPointInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1OperatingPointInfoFlags flags = {}; + uint16_t operating_point_idc = {}; + uint8_t seq_level_idx = {}; + uint8_t seq_tier = {}; + uint32_t decoder_buffer_delay = {}; + uint32_t encoder_buffer_delay = {}; + uint8_t initial_display_delay_minus_1 = {}; + }; + + struct EncodeAV1PictureInfoFlags + { + using NativeType = StdVideoEncodeAV1PictureInfoFlags; + + operator StdVideoEncodeAV1PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1PictureInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1PictureInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( error_resilient_mode == rhs.error_resilient_mode ) && ( disable_cdf_update == rhs.disable_cdf_update ) && + ( use_superres == rhs.use_superres ) && ( render_and_frame_size_different == rhs.render_and_frame_size_different ) && + ( allow_screen_content_tools == rhs.allow_screen_content_tools ) && ( is_filter_switchable == rhs.is_filter_switchable ) && + ( force_integer_mv == rhs.force_integer_mv ) && ( frame_size_override_flag == rhs.frame_size_override_flag ) && + ( buffer_removal_time_present_flag == rhs.buffer_removal_time_present_flag ) && ( allow_intrabc == rhs.allow_intrabc ) && + ( frame_refs_short_signaling == rhs.frame_refs_short_signaling ) && ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && + ( is_motion_mode_switchable == rhs.is_motion_mode_switchable ) && ( use_ref_frame_mvs == rhs.use_ref_frame_mvs ) && + ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( allow_warped_motion == rhs.allow_warped_motion ) && + ( reduced_tx_set == rhs.reduced_tx_set ) && ( skip_mode_present == rhs.skip_mode_present ) && ( delta_q_present == rhs.delta_q_present ) && + ( delta_lf_present == rhs.delta_lf_present ) && ( delta_lf_multi == rhs.delta_lf_multi ) && + ( segmentation_enabled == rhs.segmentation_enabled ) && ( segmentation_update_map == rhs.segmentation_update_map ) && + ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && ( segmentation_update_data == rhs.segmentation_update_data ) && + ( UsesLr == rhs.UsesLr ) && ( usesChromaLr == rhs.usesChromaLr ) && ( show_frame == rhs.show_frame ) && + ( showable_frame == rhs.showable_frame ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t show_frame : 1; + uint32_t showable_frame : 1; + uint32_t reserved : 3; + }; + + struct EncodeAV1PictureInfo + { + using NativeType = StdVideoEncodeAV1PictureInfo; + + operator StdVideoEncodeAV1PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1PictureInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1PictureInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( frame_type == rhs.frame_type ) && ( frame_presentation_time == rhs.frame_presentation_time ) && + ( current_frame_id == rhs.current_frame_id ) && ( order_hint == rhs.order_hint ) && ( primary_ref_frame == rhs.primary_ref_frame ) && + ( refresh_frame_flags == rhs.refresh_frame_flags ) && ( coded_denom == rhs.coded_denom ) && + ( render_width_minus_1 == rhs.render_width_minus_1 ) && ( render_height_minus_1 == rhs.render_height_minus_1 ) && + ( interpolation_filter == rhs.interpolation_filter ) && ( TxMode == rhs.TxMode ) && ( delta_q_res == rhs.delta_q_res ) && + ( delta_lf_res == rhs.delta_lf_res ) && ( ref_order_hint == rhs.ref_order_hint ) && ( ref_frame_idx == rhs.ref_frame_idx ) && + ( reserved1 == rhs.reserved1 ) && ( delta_frame_id_minus_1 == rhs.delta_frame_id_minus_1 ) && ( pTileInfo == rhs.pTileInfo ) && + ( pQuantization == rhs.pQuantization ) && ( pSegmentation == rhs.pSegmentation ) && ( pLoopFilter == rhs.pLoopFilter ) && + ( pCDEF == rhs.pCDEF ) && ( pLoopRestoration == rhs.pLoopRestoration ) && ( pGlobalMotion == rhs.pGlobalMotion ) && + ( pExtensionHeader == rhs.pExtensionHeader ) && ( pBufferRemovalTimes == rhs.pBufferRemovalTimes ); + } + + bool operator!=( EncodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1PictureInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType frame_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType::eKey; + uint32_t frame_presentation_time = {}; + uint32_t current_frame_id = {}; + uint8_t order_hint = {}; + uint8_t primary_ref_frame = {}; + uint8_t refresh_frame_flags = {}; + uint8_t coded_denom = {}; + uint16_t render_width_minus_1 = {}; + uint16_t render_height_minus_1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1InterpolationFilter interpolation_filter = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1InterpolationFilter::eEighttap; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TxMode TxMode = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TxMode::eOnly4X4; + uint8_t delta_q_res = {}; + uint8_t delta_lf_res = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ref_order_hint = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ref_frame_idx = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_frame_id_minus_1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TileInfo * pTileInfo = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Quantization * pQuantization = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Segmentation * pSegmentation = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopFilter * pLoopFilter = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1CDEF * pCDEF = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopRestoration * pLoopRestoration = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1GlobalMotion * pGlobalMotion = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ExtensionHeader * pExtensionHeader = {}; + const uint32_t * pBufferRemovalTimes = {}; + }; + + struct EncodeAV1ReferenceInfoFlags + { + using NativeType = StdVideoEncodeAV1ReferenceInfoFlags; + + operator StdVideoEncodeAV1ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ReferenceInfoFlags const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ReferenceInfoFlags *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( segmentation_enabled == rhs.segmentation_enabled ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; + }; + + struct EncodeAV1ReferenceInfo + { + using NativeType = StdVideoEncodeAV1ReferenceInfo; + + operator StdVideoEncodeAV1ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ReferenceInfo const *() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ReferenceInfo *() VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( this ); + } + + bool operator==( EncodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( RefFrameId == rhs.RefFrameId ) && ( frame_type == rhs.frame_type ) && ( OrderHint == rhs.OrderHint ) && + ( reserved1 == rhs.reserved1 ) && ( pExtensionHeader == rhs.pExtensionHeader ); + } + + bool operator!=( EncodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ReferenceInfoFlags flags = {}; + uint32_t RefFrameId = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType frame_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType::eKey; + uint8_t OrderHint = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ExtensionHeader * pExtensionHeader = {}; + }; + + } // namespace VULKAN_HPP_VIDEO_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/include/vulkan/vulkan_wayland.h b/include/vulkan/vulkan_wayland.h new file mode 100644 index 00000000..75bf0701 --- /dev/null +++ b/include/vulkan/vulkan_wayland.h @@ -0,0 +1,55 @@ +#ifndef VULKAN_WAYLAND_H_ +#define VULKAN_WAYLAND_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_wayland_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_wayland_surface 1 +#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" +typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; +typedef struct VkWaylandSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWaylandSurfaceCreateFlagsKHR flags; + struct wl_display* display; + struct wl_surface* surface; +} VkWaylandSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( + VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display* display); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_win32.h b/include/vulkan/vulkan_win32.h new file mode 100644 index 00000000..e66ed1fc --- /dev/null +++ b/include/vulkan/vulkan_win32.h @@ -0,0 +1,342 @@ +#ifndef VULKAN_WIN32_H_ +#define VULKAN_WIN32_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_win32_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_win32_surface 1 +#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" +typedef VkFlags VkWin32SurfaceCreateFlagsKHR; +typedef struct VkWin32SurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWin32SurfaceCreateFlagsKHR flags; + HINSTANCE hinstance; + HWND hwnd; +} VkWin32SurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( + VkInstance instance, + const VkWin32SurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex); +#endif + + +// VK_KHR_external_memory_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_win32 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" +typedef struct VkImportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportMemoryWin32HandleInfoKHR; + +typedef struct VkExportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportMemoryWin32HandleInfoKHR; + +typedef struct VkMemoryWin32HandlePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryWin32HandlePropertiesKHR; + +typedef struct VkMemoryGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR( + VkDevice device, + const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); +#endif + + +// VK_KHR_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_win32_keyed_mutex 1 +#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 +#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" +typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeouts; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoKHR; + + + +// VK_KHR_external_semaphore_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_win32 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" +typedef struct VkImportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportSemaphoreWin32HandleInfoKHR; + +typedef struct VkExportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportSemaphoreWin32HandleInfoKHR; + +typedef struct VkD3D12FenceSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValuesCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValuesCount; + const uint64_t* pSignalSemaphoreValues; +} VkD3D12FenceSubmitInfoKHR; + +typedef struct VkSemaphoreGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( + VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + + +// VK_KHR_external_fence_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_win32 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" +typedef struct VkImportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportFenceWin32HandleInfoKHR; + +typedef struct VkExportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportFenceWin32HandleInfoKHR; + +typedef struct VkFenceGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR( + VkDevice device, + const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( + VkDevice device, + const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + + +// VK_NV_external_memory_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_win32 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" +typedef struct VkImportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleType; + HANDLE handle; +} VkImportMemoryWin32HandleInfoNV; + +typedef struct VkExportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; +} VkExportMemoryWin32HandleInfoNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( + VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE* pHandle); +#endif + + +// VK_NV_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_win32_keyed_mutex 1 +#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 2 +#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" +typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeoutMilliseconds; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoNV; + + + +// VK_EXT_full_screen_exclusive is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_full_screen_exclusive 1 +#define VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION 4 +#define VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME "VK_EXT_full_screen_exclusive" + +typedef enum VkFullScreenExclusiveEXT { + VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT = 0, + VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT = 1, + VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT = 2, + VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT = 3, + VK_FULL_SCREEN_EXCLUSIVE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkFullScreenExclusiveEXT; +typedef struct VkSurfaceFullScreenExclusiveInfoEXT { + VkStructureType sType; + void* pNext; + VkFullScreenExclusiveEXT fullScreenExclusive; +} VkSurfaceFullScreenExclusiveInfoEXT; + +typedef struct VkSurfaceCapabilitiesFullScreenExclusiveEXT { + VkStructureType sType; + void* pNext; + VkBool32 fullScreenExclusiveSupported; +} VkSurfaceCapabilitiesFullScreenExclusiveEXT; + +typedef struct VkSurfaceFullScreenExclusiveWin32InfoEXT { + VkStructureType sType; + const void* pNext; + HMONITOR hmonitor; +} VkSurfaceFullScreenExclusiveWin32InfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain); +typedef VkResult (VKAPI_PTR *PFN_vkReleaseFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModes2EXT)(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModes2EXT( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireFullScreenExclusiveModeEXT( + VkDevice device, + VkSwapchainKHR swapchain); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseFullScreenExclusiveModeEXT( + VkDevice device, + VkSwapchainKHR swapchain); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModes2EXT( + VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR* pModes); +#endif + + +// VK_NV_acquire_winrt_display is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_acquire_winrt_display 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireWinrtDisplayNV)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetWinrtDisplayNV)(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + uint32_t deviceRelativeId, + VkDisplayKHR* pDisplay); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_xcb.h b/include/vulkan/vulkan_xcb.h new file mode 100644 index 00000000..4e062755 --- /dev/null +++ b/include/vulkan/vulkan_xcb.h @@ -0,0 +1,56 @@ +#ifndef VULKAN_XCB_H_ +#define VULKAN_XCB_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_xcb_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_xcb_surface 1 +#define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" +typedef VkFlags VkXcbSurfaceCreateFlagsKHR; +typedef struct VkXcbSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXcbSurfaceCreateFlagsKHR flags; + xcb_connection_t* connection; + xcb_window_t window; +} VkXcbSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( + VkInstance instance, + const VkXcbSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t* connection, + xcb_visualid_t visual_id); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_xlib.h b/include/vulkan/vulkan_xlib.h new file mode 100644 index 00000000..b581779c --- /dev/null +++ b/include/vulkan/vulkan_xlib.h @@ -0,0 +1,56 @@ +#ifndef VULKAN_XLIB_H_ +#define VULKAN_XLIB_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_xlib_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_xlib_surface 1 +#define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" +typedef VkFlags VkXlibSurfaceCreateFlagsKHR; +typedef struct VkXlibSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXlibSurfaceCreateFlagsKHR flags; + Display* dpy; + Window window; +} VkXlibSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( + VkInstance instance, + const VkXlibSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display* dpy, + VisualID visualID); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_xlib_xrandr.h b/include/vulkan/vulkan_xlib_xrandr.h new file mode 100644 index 00000000..ba88a663 --- /dev/null +++ b/include/vulkan/vulkan_xlib_xrandr.h @@ -0,0 +1,46 @@ +#ifndef VULKAN_XLIB_XRANDR_H_ +#define VULKAN_XLIB_XRANDR_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_EXT_acquire_xlib_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_acquire_xlib_display 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + RROutput rrOutput, + VkDisplayKHR* pDisplay); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/wrappers/buffer.h b/include/wrappers/buffer.h index aa8c316b..a9845122 100644 --- a/include/wrappers/buffer.h +++ b/include/wrappers/buffer.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -35,6 +35,7 @@ #include "misc/callbacks.h" #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" #include "misc/page_tracker.h" @@ -69,102 +70,20 @@ namespace Anvil BUFFER_CALLBACK_ID_COUNT }; - class Buffer : public std::enable_shared_from_this, - public CallbacksSupportProvider, - public DebugMarkerSupportProvider + class Buffer : public CallbacksSupportProvider, + public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ - /** Initializes a new NON-SPARSE buffer object using user-specified parameters. - * - * Does NOT allocate and bind any memory blocks to the object. It is user's responsibility - * to call Buffer::set_memory() to configure the binding. - * - * @param in_device_ptr Device to use. - * @param in_size Size of the buffer object to be initialized. - * @param in_queue_families Queue families which the buffer object is going to be used with. - * One or more user queue family bits can be enabled. - * @param in_queue_sharing_mode VkSharingMode value, which is going to be passed to the vkCreateBuffer() - * call. - * @param in_usage_flags Usage flags to set in the VkBufferCreateInfo descriptor, passed to - * to the vkCreateBuffer() call. - **/ - static std::shared_ptr create_nonsparse(std::weak_ptr in_device_ptr, - VkDeviceSize in_size, - QueueFamilyBits in_queue_families, - VkSharingMode in_queue_sharing_mode, - VkBufferUsageFlags in_usage_flags); - - /** Initializes a new NON-SPARSE buffer object using user-specified parameters. - * - * This version of create() ALLOCATES and BINDS a unique memory block to the object. Do NOT - * call Buffer::set_memory() to configure the binding. - * - * The constructor can optionally upload data to the initialized memory. - * - * @param in_device_ptr Device to use. - * @param in_size Size of the buffer object to be initialized. - * @param in_queue_families Queue families which the buffer object is going to be used with. - * One or more user queue family bits can be enabled. - * @param in_queue_sharing_mode VkSharingMode value, which is going to be passed to the vkCreateBuffer() - * call. - * @param in_usage_flags Usage flags to set in the VkBufferCreateInfo descriptor, passed to - * to the vkCreateBuffer() call. - * @param in_memory_freatures Required memory features. - * @param in_opt_client_data if not nullptr, exactly @param in_size bytes will be copied to the allocated - * buffer memory. - **/ - static std::shared_ptr create_nonsparse(std::weak_ptr in_device_ptr, - VkDeviceSize in_size, - QueueFamilyBits in_queue_families, - VkSharingMode in_queue_sharing_mode, - VkBufferUsageFlags in_usage_flags, - Anvil::MemoryFeatureFlags in_memory_features, - const void* in_opt_client_data); - - /** Creates a new Buffer wrapper instance. The new NON-SPARSE buffer will reuse a region of the specified - * buffer's storage, instead of creating one's own. - * - * It is user's responsibility to ensure memory aliasing or synchronization is used, according - * to the spec rules. - * - * @param in_parent_buffer_ptr Specifies the buffer, whose memory block should be used. Must not be - * nullptr. MUST BE NON-SPARSE. - * @param in_start_offset Memory region's start offset. - * @param in_size Size of the memory region to "claim". - **/ - static std::shared_ptr create_nonsparse(std::shared_ptr in_parent_nonsparse_buffer_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size); - - /** Initializes a new SPARSE buffer object using user-specified parameters. - * - * Does NOT bind any memory regions to the object. It is user's responsibility to call - * Queue::bind_sparse_memory() afterward to update page configuration. - * - * @param in_device_ptr Device to use. - * @param in_size Size of the buffer object to be initialized. - * @param in_queue_families Queue families which the buffer object is going to be used with. - * One or more user queue family bits can be enabled. - * @param in_queue_sharing_mode VkSharingMode value, which is going to be passed to the vkCreateBuffer() - * call. - * @param in_usage_flags Usage flags to set in the VkBufferCreateInfo descriptor, passed to - * to the vkCreateBuffer() call. - * @param in_residency_scope Scope of residency to support for the buffer. - **/ - static std::shared_ptr create_sparse(std::weak_ptr in_device_ptr, - VkDeviceSize in_size, - QueueFamilyBits in_queue_families, - VkSharingMode in_queue_sharing_mode, - VkBufferUsageFlags in_usage_flags, - Anvil::SparseResidencyScope in_residency_scope); - /** Destroys the Vulkan objects and unregister the Buffer instance from Object Tracker. */ virtual ~Buffer(); + static Anvil::BufferUniquePtr create(Anvil::BufferCreateInfoUniquePtr in_create_info_ptr); + /** Returns the lowest-level Buffer instance which stores the data exposed by this Buffer instance. */ - std::shared_ptr get_base_buffer(); + const Anvil::Buffer* get_base_buffer(); /** Returns the encapsulated raw Vulkan buffer handle * @@ -173,8 +92,11 @@ namespace Anvil * any memory allocator, which has this buffer scheduled for deferred memory allocation, * gets a chance to allocate & bind a memory block to the instance. A non-sparse buffer instance * without any memory block bound msut not be used for any GPU-side operation. + * + * This behavior may optionally be disabled by setting @param in_bake_memory_if_necessary to false. + * Should only be done in special circumstances. */ - VkBuffer get_buffer(); + VkBuffer get_buffer(const bool& in_bake_memory_if_necessary = true); /** Returns a pointer to the encapsulated raw Vulkan buffer handle */ const VkBuffer* get_buffer_ptr() const @@ -182,6 +104,11 @@ namespace Anvil return &m_buffer; } + const Anvil::BufferCreateInfo* get_create_info_ptr() const + { + return m_create_info_ptr.get(); + } + /** Returns a pointer to the underlying memory block wrapper instance. * * For non-sparse buffers, in case no memory block has been assigned to the buffer, @@ -193,83 +120,22 @@ namespace Anvil * * Note that resident sparse buffers may have multiple memory blocks assigned. **/ - std::shared_ptr get_memory_block(uint32_t in_n_memory_block); + Anvil::MemoryBlock* get_memory_block(uint32_t in_n_memory_block); /** Returns memory requirements for the buffer */ - VkMemoryRequirements get_memory_requirements() const - { - if (m_parent_buffer_ptr != nullptr) - { - return m_parent_buffer_ptr->get_memory_requirements(); - } - else - { - return m_buffer_memory_reqs; - } - } + VkMemoryRequirements get_memory_requirements() const; /** Returns the number of memory blocks assigned to the buffer. */ - uint32_t get_n_memory_blocks() const - { - if (m_is_sparse) - { - return m_page_tracker_ptr->get_n_memory_blocks(); - } - else - { - return 1; - } - } + uint32_t get_n_memory_blocks() const; - /** Returns a pointer to the parent buffer, if one was specified at creation time */ - std::shared_ptr get_parent_buffer_ptr() const + const Anvil::PageTracker* get_page_tracker() const { - return m_parent_buffer_ptr; + return m_page_tracker_ptr.get(); } - /** Returns info about queue families this buffer has been created for */ - QueueFamilyBits get_queue_families() const - { - return m_queue_families; - } - - /** Returns the residency scope. - * - * Triggers an assertion failure if called for non-sparse images - */ - Anvil::SparseResidencyScope get_residency_scope() const - { - anvil_assert(m_is_sparse); - - return m_residency_scope; - } - - /** Returns sharing mode of the buffer */ - VkSharingMode get_sharing_mode() const - { - return m_sharing_mode; - } - - /** Returns size of the encapsulated Vulkan buffer memory region. - * - * @return >= 0 if successful, UINT64_MAX otherwise */ - VkDeviceSize get_size() const; - - /** Returns start offset of the encapsulated Vulkan buffer memory region. - * - * @return >= 0 if successful, UINT64_MAX otherwise */ - VkDeviceSize get_start_offset() const; - - /** Tells whether or not the buffer is sparse */ - bool is_sparse() const + bool prefers_dedicated_allocation() const { - return m_is_sparse; - } - - /** Returns usage defined for the buffer */ - VkBufferUsageFlags get_usage() const - { - return m_usage_flags; + return m_prefers_dedicated_allocation; } /** Reads @param in_size bytes, starting from @param in_start_offset, from the wrapped memory object. @@ -280,23 +146,36 @@ namespace Anvil * * If the buffer object uses non-mappable storage memory, a staging buffer using mappable memory will be created * instead. User-specified region of the source buffer will then be copied into it by submitting a copy operation, - * executed either on the transfer queue (if available), or on the universal queue. Afterward, the staging buffer + * executed either on the transfer queue (if available), or on the universal queue. Afterward, the staging buffer * will be released. * + * The function prototype without @param in_device_mask argument should be used for single-GPU devices only. + * The function prototype with @param in_device_mask argument should be used for multi-GPU devices only. + * The mask must contain only one bit set. + * * This function must not be used to read data from buffers, whose memory backing comes from a multi-instance heap. * * This function blocks until the transfer completes. * - * @param in_start_offset As per description. Must be smaller than the underlying memory object's size. - * @param in_size As per description. @param in_start_offset + @param in_size must be lower than or - * equal to the underlying memory object's size. - * @param in_out_result_ptr Retrieved data will be stored under this location. Must not be nullptr. + * @param in_start_offset As per description. Must be smaller than the underlying memory object's size. + * @param in_size As per description. @param in_start_offset + @param in_size must be lower than or + * equal to the underlying memory object's size. + * @param out_result_ptr Retrieved data will be stored under this location. Must not be nullptr. * * @return true if the operation was successful, false otherwise. **/ bool read(VkDeviceSize in_start_offset, VkDeviceSize in_size, void* out_result_ptr); + bool read(VkDeviceSize in_start_offset, + VkDeviceSize in_size, + uint32_t in_device_mask, + void* out_result_ptr); + + bool requires_dedicated_allocation() const + { + return m_requires_dedicated_allocation; + } /** Attaches a memory block to the buffer object. * @@ -306,11 +185,36 @@ namespace Anvil * This function can only be used for NON-SPARSE buffers. Calling this function for sparse buffers will * result in an assertion failure. * - * @param in_memory_block_ptr Memory block to attach to the buffer object. Must not be NULL. + * @param in_memory_block_ptr Memory block to attach to the buffer object. Must not be NULL. + * @param in_memory_block_owned_by_buffer TODO + * @param in_n_device_group_indices Describes the number of device indices available under @param in_device_group_indices_ptr. + * @param in_device_group_indices_ptr Device group indices to use to form the device mask. * * @return true if successful, false otherwise. **/ - bool set_nonsparse_memory(std::shared_ptr in_memory_block_ptr); + bool set_nonsparse_memory(MemoryBlockUniquePtr in_memory_block_ptr); + bool set_nonsparse_memory(MemoryBlock* in_memory_block_ptr, + bool in_memory_block_owned_by_buffer); + + bool set_nonsparse_memory(MemoryBlockUniquePtr in_memory_block_ptr, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr); + bool set_nonsparse_memory(MemoryBlock* in_memory_block_ptr, + bool in_memory_block_owned_by_buffer, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr); + + /** See set_nonsparse_memory() for general documentation. + * + * This static function can be used to set buffer memory bindings in a batched manner. + * + * Can be used for both single- and multi-GPU devices. + * Requires VK_KHR_device_group to be supported by & enabled for the device. + * + * TODO + **/ + static bool set_nonsparse_memory_multi(uint32_t in_n_buffer_memory_binding_updates, + BufferMemoryBindingUpdate* in_updates_ptr); /** Writes @param in_size bytes, starting from @param in_start_offset, into the wrapped memory object. * @@ -325,6 +229,11 @@ namespace Anvil * * This function must not be used to read data from buffers, whose memory backing comes from a multi-instance heap. * + * The function prototype without @param in_device_mask argument should be used for single-GPU devices only. + * + * The function prototype with @param in_device_mask argument should be used for multi-GPU devices only. + * The mask must contain only one bit set. + * * If the buffer instance uses an exclusive sharing mode and supports more than just one queue family type AND memory * backing the buffer is not mappable, you MUST specify a queue instance that should be used to perform a buffer->buffer * copy op. The queue MUST support transfer ops. @@ -335,62 +244,57 @@ namespace Anvil * @param in_size As per description. @param in_start_offset + @param in_size must be lower than or * equal to the underlying memory object's size. * @param in_data Data to store. Must not be nullptr. - * @param in_opt_queue_ptr See documentation above. May be nullptr. - + * * @return true if the operation was successful, false otherwise. **/ - bool write(VkDeviceSize in_start_offset, - VkDeviceSize in_size, - const void* in_data, - std::shared_ptr in_opt_queue_ptr = nullptr); + bool write(VkDeviceSize in_start_offset, + VkDeviceSize in_size, + const void* in_data, + Anvil::Queue* in_opt_queue_ptr = nullptr); + bool write(VkDeviceSize in_start_offset, + VkDeviceSize in_size, + const void* in_data, + uint32_t in_device_mask, + Anvil::Queue* in_opt_queue_ptr = nullptr); private: /* Private functions */ - explicit Buffer(std::weak_ptr in_device_ptr, - VkDeviceSize in_size, - QueueFamilyBits in_queue_families, - VkSharingMode in_queue_sharing_mode, - VkBufferUsageFlags in_usage_flags, - Anvil::SparseResidencyScope in_residency_scope); - explicit Buffer(std::weak_ptr in_device_ptr, - VkDeviceSize in_size, - QueueFamilyBits in_queue_families, - VkSharingMode in_queue_sharing_mode, - VkBufferUsageFlags in_usage_flags, - MemoryFeatureFlags in_memory_features); - explicit Buffer(std::shared_ptr in_parent_buffer_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size); - - Buffer (const Buffer&); - Buffer& operator=(const Buffer&); - - void create_buffer (Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_sharing_mode, - VkDeviceSize in_size); - bool set_memory_sparse(std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_start_offset, - VkDeviceSize in_start_offset, - VkDeviceSize in_size); + + Buffer(Anvil::BufferCreateInfoUniquePtr in_create_info_ptr); + + bool init (); + bool init_staging_buffer(const VkDeviceSize& in_size, + Anvil::Queue* in_opt_queue_ptr); + bool set_memory_sparse (MemoryBlock* in_memory_block_ptr, + bool in_memory_block_owned_by_buffer, + VkDeviceSize in_memory_start_offset, + VkDeviceSize in_start_offset, + VkDeviceSize in_size); + + bool set_memory_nonsparse_internal(MemoryBlockUniquePtr in_memory_block_ptr, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr); + + bool is_memory_block_owned(const MemoryBlock* in_memory_block_ptr) const; /* Private members */ - VkBuffer m_buffer; - VkMemoryRequirements m_buffer_memory_reqs; - VkDeviceSize m_buffer_size; - std::weak_ptr m_device_ptr; - bool m_is_sparse; - std::shared_ptr m_memory_block_ptr; // only used by non-sparse buffers - std::unique_ptr m_page_tracker_ptr; // only used by sparse buffers - std::shared_ptr m_parent_buffer_ptr; - Anvil::QueueFamilyBits m_queue_families; - Anvil::SparseResidencyScope m_residency_scope; - VkSharingMode m_sharing_mode; - VkDeviceSize m_start_offset; - - VkBufferCreateFlagsVariable(m_create_flags); - VkBufferUsageFlagsVariable (m_usage_flags); - - friend class Anvil::Queue; /* set_sparse_memory() */ + VkBuffer m_buffer; + VkMemoryRequirements m_buffer_memory_reqs; + std::unique_ptr m_create_info_ptr; + + Anvil::MemoryBlock* m_memory_block_ptr; // only used by non-sparse buffers + std::unique_ptr m_page_tracker_ptr; // only used by sparse buffers + Anvil::BufferUniquePtr m_staging_buffer_ptr; + Anvil::Queue* m_staging_buffer_queue_ptr; + + std::vector m_owned_memory_blocks; + bool m_prefers_dedicated_allocation; + bool m_requires_dedicated_allocation; + + friend class Anvil::Queue; /* set_memory_sparse() */ + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(Buffer); + ANVIL_DISABLE_COPY_CONSTRUCTOR(Buffer); }; }; /* namespace Anvil */ diff --git a/include/wrappers/buffer_view.h b/include/wrappers/buffer_view.h index e91027ac..598a30bd 100644 --- a/include/wrappers/buffer_view.h +++ b/include/wrappers/buffer_view.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -30,23 +30,21 @@ #ifndef WRAPPERS_BUFFER_VIEW_H #define WRAPPERS_BUFFER_VIEW_H +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { /** Wrapper class for Vulkan buffer views */ - class BufferView : public DebugMarkerSupportProvider + class BufferView : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ /** Creates a single Vulkan buffer view instance and registers the object in Object Tracker. * For argument documentation, please see Vulkan API specification. */ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - std::shared_ptr in_buffer_ptr, - VkFormat in_format, - VkDeviceSize in_start_offset, - VkDeviceSize in_size); + static BufferViewUniquePtr create(Anvil::BufferViewCreateInfoUniquePtr in_create_info_ptr); /** Destructor */ virtual ~BufferView(); @@ -63,48 +61,23 @@ namespace Anvil return &m_buffer_view; } - /** Returns format used by the buffer view */ - VkFormat get_format() const + const Anvil::BufferViewCreateInfo* get_create_info_ptr() const { - return m_format; - } - - /** Returns pointer to the parent buffer instance */ - std::shared_ptr get_parent_buffer() const - { - return m_buffer_ptr; - } - - /** Returns size of the encapsulated buffer memory region */ - VkDeviceSize get_size() const - { - return m_size; - } - - /** Returns start offset of the encapsulated buffer memory region */ - VkDeviceSize get_start_offset() const - { - return m_start_offset; + return m_create_info_ptr.get(); } private: /* Private functions */ - BufferView(std::weak_ptr in_device_ptr, - std::shared_ptr in_buffer_ptr, - VkFormat in_format, - VkDeviceSize in_start_offset, - VkDeviceSize in_size); + BufferView(Anvil::BufferViewCreateInfoUniquePtr in_create_info_ptr); - BufferView (const BufferView&); - BufferView& operator=(const BufferView&); + bool init(); /* Private variables */ - std::shared_ptr m_buffer_ptr; - VkBufferView m_buffer_view; - std::weak_ptr m_device_ptr; - VkFormat m_format; - VkDeviceSize m_size; - VkDeviceSize m_start_offset; + VkBufferView m_buffer_view; + Anvil::BufferViewCreateInfoUniquePtr m_create_info_ptr; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(BufferView); + ANVIL_DISABLE_COPY_CONSTRUCTOR(BufferView); }; }; /* namespace Anvil */ diff --git a/include/wrappers/command_buffer.h b/include/wrappers/command_buffer.h index cde8a545..d6ba939e 100644 --- a/include/wrappers/command_buffer.h +++ b/include/wrappers/command_buffer.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -38,6 +38,7 @@ #include "misc/callbacks.h" #include "misc/debug_marker.h" #include "misc/io.h" +#include "misc/mt_safety.h" #include "misc/types.h" #ifdef _DEBUG @@ -61,10 +62,14 @@ namespace Anvil typedef enum { COMMAND_TYPE_BEGIN_RENDER_PASS, + COMMAND_TYPE_BEGIN_RENDER_PASS_2_KHR, COMMAND_TYPE_BEGIN_QUERY, + COMMAND_TYPE_BEGIN_QUERY_INDEXED_EXT, + COMMAND_TYPE_BEGIN_TRANSFORM_FEEDBACK_EXT, COMMAND_TYPE_BIND_DESCRIPTOR_SETS, COMMAND_TYPE_BIND_INDEX_BUFFER, COMMAND_TYPE_BIND_PIPELINE, + COMMAND_TYPE_BIND_TRANSFORM_FEEDBACK_BUFFERS_EXT, COMMAND_TYPE_BIND_VERTEX_BUFFER, COMMAND_TYPE_BLIT_IMAGE, COMMAND_TYPE_CLEAR_ATTACHMENTS, @@ -79,18 +84,26 @@ namespace Anvil COMMAND_TYPE_DEBUG_MARKER_END_EXT, COMMAND_TYPE_DEBUG_MARKER_INSERT_EXT, COMMAND_TYPE_DISPATCH, + COMMAND_TYPE_DISPATCH_BASE_KHR, COMMAND_TYPE_DISPATCH_INDIRECT, COMMAND_TYPE_DRAW, COMMAND_TYPE_DRAW_INDEXED, COMMAND_TYPE_DRAW_INDEXED_INDIRECT, COMMAND_TYPE_DRAW_INDEXED_INDIRECT_COUNT_AMD, + COMMAND_TYPE_DRAW_INDEXED_INDIRECT_COUNT_KHR, COMMAND_TYPE_DRAW_INDIRECT, + COMMAND_TYPE_DRAW_INDIRECT_BYTE_COUNT_EXT, COMMAND_TYPE_DRAW_INDIRECT_COUNT_AMD, + COMMAND_TYPE_DRAW_INDIRECT_COUNT_KHR, COMMAND_TYPE_END_QUERY, + COMMAND_TYPE_END_QUERY_INDEXED_EXT, COMMAND_TYPE_END_RENDER_PASS, + COMMAND_TYPE_END_RENDER_PASS_2_KHR, + COMMAND_TYPE_END_TRANSFORM_FEEDBACK_EXT, COMMAND_TYPE_EXECUTE_COMMANDS, COMMAND_TYPE_FILL_BUFFER, COMMAND_TYPE_NEXT_SUBPASS, + COMMAND_TYPE_NEXT_SUBPASS_2_KHR, COMMAND_TYPE_PIPELINE_BARRIER, COMMAND_TYPE_PUSH_CONSTANTS, COMMAND_TYPE_RESET_EVENT, @@ -99,8 +112,10 @@ namespace Anvil COMMAND_TYPE_SET_BLEND_CONSTANTS, COMMAND_TYPE_SET_DEPTH_BIAS, COMMAND_TYPE_SET_DEPTH_BOUNDS, + COMMAND_TYPE_SET_DEVICE_MASK_KHR, COMMAND_TYPE_SET_EVENT, COMMAND_TYPE_SET_LINE_WIDTH, + COMMAND_TYPE_SET_SAMPLE_LOCATIONS_EXT, COMMAND_TYPE_SET_SCISSOR, COMMAND_TYPE_SET_STENCIL_COMPARE_MASK, COMMAND_TYPE_SET_STENCIL_REFERENCE, @@ -108,6 +123,7 @@ namespace Anvil COMMAND_TYPE_SET_VIEWPORT, COMMAND_TYPE_UPDATE_BUFFER, COMMAND_TYPE_WAIT_EVENTS, + COMMAND_TYPE_WRITE_BUFFER_MARKER_AMD, COMMAND_TYPE_WRITE_TIMESTAMP, } CommandType; @@ -153,35 +169,68 @@ namespace Anvil COMMAND_BUFFER_CALLBACK_ID_COUNT }; - /** Holds all arguments passed to a vkCmdBeginRenderPass() command. - * - * Raw Vulkan object handles have been replaced with pointers to wrapper objects. - * These objects are retained at construction time, and released at descriptor - * destruction time. - */ - typedef struct BeginRenderPassCommand : public Command + /** Holds all arguments passed to a vkCmdBeginQueryIndexedEXT() command. */ + typedef struct BeginQueryIndexedEXTCommand : public Command { - std::vector clear_values; - VkSubpassContents contents; - std::shared_ptr fbo_ptr; - std::vector > physical_devices; - std::vector render_areas; - std::shared_ptr render_pass_ptr; + Anvil::QueryControlFlags flags; + uint32_t index; + Anvil::QueryPool* query_pool_ptr; + uint32_t query; /** Constructor. * - * Retains @param in_fbo_ptr and @param in_render_pass_ptr objects. + * Arguments as per VK_EXT_transform_feedback. + **/ + explicit BeginQueryIndexedEXTCommand(Anvil::QueryPool* in_query_pool_ptr, + const uint32_t& in_query, + const Anvil::QueryControlFlags& in_flags, + const uint32_t& in_index); + + /** Destructor. + * + * Releases the encapsulated Framebuffer and RenderPass instances. + **/ + virtual ~BeginQueryIndexedEXTCommand() + { + /* Stub */ + } + + + private: + BeginQueryIndexedEXTCommand (const BeginQueryIndexedEXTCommand&); + BeginQueryIndexedEXTCommand& operator=(const BeginQueryIndexedEXTCommand&); + } BeginQueryIndexedEXTCommand; + + /** Holds all arguments passed to a vkCmdBeginRenderPass() command. */ + typedef struct BeginRenderPassCommand : public Command + { + std::vector clear_values; + Anvil::SubpassContents contents; + uint32_t device_mask; + Anvil::Framebuffer* fbo_ptr; + std::vector render_areas; + Anvil::RenderPass* render_pass_ptr; + + /* VK_EXT_sample_locations: */ + std::vector attachment_initial_sample_locations; + std::vector post_subpass_sample_locations; + + /** Constructor. * * Arguments as per Vulkan API. **/ - explicit BeginRenderPassCommand(uint32_t in_n_clear_values, - const VkClearValue* in_clear_value_ptrs, - std::shared_ptr in_fbo_ptr, - uint32_t in_n_physical_devices, - const std::weak_ptr* in_physical_devices, - const VkRect2D* in_render_areas, - std::shared_ptr in_render_pass_ptr, - VkSubpassContents in_contents); + explicit BeginRenderPassCommand(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + uint32_t in_device_mask, + uint32_t in_n_render_areas, + const VkRect2D* in_render_areas_ptr, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_n_attachment_initial_sample_locations, + const Anvil::AttachmentSampleLocations* in_attachment_initial_sample_locations_ptr, + const uint32_t& in_n_post_subpass_sample_locations, + const Anvil::SubpassSampleLocations* in_post_subpass_sample_locations_ptr); /** Destructor. * @@ -197,7 +246,38 @@ namespace Anvil BeginRenderPassCommand (const BeginRenderPassCommand&); BeginRenderPassCommand& operator=(const BeginRenderPassCommand&); } BeginRenderPassCommand; - + + typedef struct BeginRenderPass2KHRCommand : BeginRenderPassCommand + { + BeginRenderPass2KHRCommand(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + uint32_t in_device_mask, + uint32_t in_n_render_areas, + const VkRect2D* in_render_areas_ptr, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_n_attachment_initial_sample_locations, + const Anvil::AttachmentSampleLocations* in_attachment_initial_sample_locations_ptr, + const uint32_t& in_n_post_subpass_sample_locations, + const Anvil::SubpassSampleLocations* in_post_subpass_sample_locations_ptr) + :BeginRenderPassCommand(in_n_clear_values, + in_clear_value_ptrs, + in_fbo_ptr, + in_device_mask, + in_n_render_areas, + in_render_areas_ptr, + in_render_pass_ptr, + in_contents, + in_n_attachment_initial_sample_locations, + in_attachment_initial_sample_locations_ptr, + in_n_post_subpass_sample_locations, + in_post_subpass_sample_locations_ptr) + { + type = CommandType::COMMAND_TYPE_BEGIN_RENDER_PASS_2_KHR; + } + } BeginRenderPass2KHRCommand; + /* Structure passed as a COMMAND_BUFFER_CALLBACK_ID_BEGIN_RENDER_PASS_COMMAND_RECORDED call-back argument */ typedef struct BeginRenderPassCommandRecordedCallbackData { @@ -212,12 +292,46 @@ namespace Anvil explicit BeginRenderPassCommandRecordedCallbackData(CommandBufferBase* in_command_buffer_ptr, const BeginRenderPassCommand* in_command_details_ptr) :command_buffer_ptr (in_command_buffer_ptr), - command_details_ptr(in_command_details_ptr) + command_details_ptr(in_command_details_ptr) { /* Stub */ } } BeginRenderPassCommandRecordedCallbackData; + typedef struct BeginTransformFeedbackEXTCommand : public Command + { + std::vector counter_buffer_offsets; + std::vector counter_buffer_ptrs; + uint32_t first_counter_buffer; + + explicit BeginTransformFeedbackEXTCommand(const uint32_t& in_first_counter_buffer, + const std::vector& in_counter_buffer_ptrs, + const std::vector& in_counter_buffer_offsets); + + private: + BeginTransformFeedbackEXTCommand (const BeginTransformFeedbackEXTCommand&); + BeginTransformFeedbackEXTCommand& operator=(const BeginTransformFeedbackEXTCommand); + } BeginTransformFeedbackEXTCommand; + + typedef struct BindTransformFeedbackBuffersEXTCommand : public Command + { + std::vector buffer_ptrs; + uint32_t first_binding; + uint32_t n_bindings; + std::vector offsets; + std::vector sizes; + + explicit BindTransformFeedbackBuffersEXTCommand(const uint32_t& in_first_binding, + const uint32_t& in_n_bindings, + const std::vector& in_buffer_ptrs, + const std::vector& in_offsets, + const std::vector& in_sizes); + + private: + BindTransformFeedbackBuffersEXTCommand (const BindTransformFeedbackBuffersEXTCommand&); + BindTransformFeedbackBuffersEXTCommand& operator=(const BindTransformFeedbackBuffersEXTCommand); + } BindTransformFeedbackBuffersEXTCommand; + /** Holds all arguments passed to a vkCmdEndRenderPass() command. */ typedef struct EndRenderPassCommand : public Command { @@ -230,6 +344,15 @@ namespace Anvil } } EndRenderPassCommand; + typedef struct EndRenderPass2KHRCommand : public EndRenderPassCommand + { + /** Constructor. */ + EndRenderPass2KHRCommand() + { + type = COMMAND_TYPE_END_RENDER_PASS_2_KHR; + } + } EndRenderPass2KHRCommand; + /* Structure passed as a COMMAND_BUFFER_CALLBACK_ID_END_RENDER_PASS_COMMAND_RECORDED call-back argument */ typedef struct EndRenderPassCommandRecordedCallbackData { @@ -244,28 +367,23 @@ namespace Anvil explicit EndRenderPassCommandRecordedCallbackData(CommandBufferBase* in_command_buffer_ptr, const EndRenderPassCommand* in_command_details_ptr) :command_buffer_ptr (in_command_buffer_ptr), - command_details_ptr(in_command_details_ptr) + command_details_ptr(in_command_details_ptr) { /* Stub */ } } EndRenderPassCommandRecordedCallbackData; - /** Holds all arguments passed to a vkCmdPipelineBarrier() command. - * - * Takes an array of Barrier descriptors instead of void* pointers, as is the case - * with the original Vulkan API. Each buffer in a buffer barrier, and each image in - * an image barrier, is retained. - **/ + /** Holds all arguments passed to a vkCmdPipelineBarrier() command. */ typedef struct PipelineBarrierCommand : public Command { std::vector buffer_barriers; std::vector image_barriers; - std::vector memory_barriers; + std::vector memory_barriers; - VkDependencyFlagsVariable(flags); + Anvil::DependencyFlags flags; - VkPipelineStageFlagsVariable(dst_stage_mask); - VkPipelineStageFlagsVariable(src_stage_mask); + Anvil::PipelineStageFlags dst_stage_mask; + Anvil::PipelineStageFlags src_stage_mask; /** Constructor. * @@ -273,11 +391,11 @@ namespace Anvil * * Arguments as per Vulkan API. **/ - explicit PipelineBarrierCommand(VkPipelineStageFlags in_src_stage_mask, - VkPipelineStageFlags in_dst_stage_mask, - VkDependencyFlags in_flags, + explicit PipelineBarrierCommand(Anvil::PipelineStageFlags in_src_stage_mask, + Anvil::PipelineStageFlags in_dst_stage_mask, + Anvil::DependencyFlags in_flags, uint32_t in_memory_barrier_count, - const MemoryBarrier* const in_memory_barriers_ptr_ptr, + const MemoryBarrierAnv* const in_memory_barriers_ptr_ptr, uint32_t in_buffer_memory_barrier_count, const BufferBarrier* const in_buffer_memory_barrier_ptr_ptr, uint32_t in_image_memory_barrier_count, @@ -297,12 +415,26 @@ namespace Anvil * * Provides core functionality for the PrimaryCommandBuffer and SecondaryCommandBuffer classes. */ - class CommandBufferBase : public DebugMarkerSupportProvider, + class CommandBufferBase : public MTSafetySupportProvider, + public DebugMarkerSupportProvider, public CallbacksSupportProvider { public: /* Public functions */ + virtual ~CommandBufferBase(); + + /** Starts a queue debug label region. App must call end_debug_utils_label() for the same cmd buffer instance + * at some point to declare the end of the label region. + * + * Requires VK_EXT_debug_utils support. Otherwise, the call is moot. + * + * @param in_label_name_ptr Meaning as per VkDebugUtilsLabelEXT::pLabelName. Must not be nullptr. + * @param in_color_vec4_ptr Meaning as per VkDebugUtilsLabelEXT::color. Must not be nullptr. + */ + void begin_debug_utils_label(const char* in_label_name_ptr, + const float* in_color_vec4_ptr); + /* Disables internal command stashing which is enbled for builds created with * STORE_COMMAND_BUFFER_COMMANDS enabled. * @@ -313,8 +445,15 @@ namespace Anvil m_command_stashing_disabled = true; } + /** Ends a queue debug label region. Requires a preceding begin_debug_utils_label() call. + * + * Requires VK_EXT_debug_utils support. Otherwise, the call is moot. + * + */ + void end_debug_utils_label(); + /** Returns a handle to the raw Vulkan command buffer instance, encapsulated by the object */ - const VkCommandBuffer get_command_buffer() const + VkCommandBuffer get_command_buffer() const { return m_command_buffer; } @@ -334,11 +473,21 @@ namespace Anvil } /** Returns the parent command pool */ - std::weak_ptr get_parent_command_pool() const + Anvil::CommandPool* get_parent_command_pool() const { return m_parent_command_pool_ptr; } + /** Inserts a single queue debug label. + * + * Requires VK_EXT_debug_utils support. Otherwise, the call is moot. + * + * @param in_label_name_ptr Meaning as per VkDebugUtilsLabelEXT::pLabelName. Must not be nullptr. + * @param in_color_vec4_ptr Meaning as per VkDebugUtilsLabelEXT::color. Must not be nullptr. + */ + void insert_debug_utils_label(const char* in_label_name_ptr, + const float* in_color_vec4_ptr); + /** Issues a vkCmdBeginQuery() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS * #define enabled). @@ -350,9 +499,47 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_begin_query(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry, - VkQueryControlFlags in_flags); + bool record_begin_query(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry, + Anvil::QueryControlFlags in_flags); + + /** Issues a vkCmdBeginQueryIndexedEXT() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * Argument meaning is as per VK_EXT_transform_feedback. + * + * Requires VK_EXT_transform_feedback support. + * + * @return true if successful, false otherwise. + **/ + bool record_begin_query_indexed(Anvil::QueryPool* in_query_pool_ptr, + const Anvil::QueryIndex& in_query, + const Anvil::QueryControlFlags& in_flags, + const uint32_t& in_index); + + /** Issues a vkCmdBeginTransformFeedbackEXT() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * Requires active renderpass. + * + * Argument meaning is as per VK_EXT_transform_feedback. + * + * Requires VK_EXT_transform_feedback support. + * + * @return true if successful, false otherwise. + **/ + bool record_begin_transform_feedback_EXT(const uint32_t& in_first_counter_buffer, + const uint32_t& in_n_counter_buffers, + Anvil::Buffer** in_opt_counter_buffer_ptrs, + const VkDeviceSize* in_opt_counter_buffer_offsets); /** Issues a vkCmdBindDescriptorSets() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -365,13 +552,13 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_bind_descriptor_sets(VkPipelineBindPoint in_pipeline_bind_point, - std::shared_ptr in_layout_ptr, - uint32_t in_first_set, - uint32_t in_set_count, - std::shared_ptr* in_descriptor_set_ptrs, - uint32_t in_dynamic_offset_count, - const uint32_t* in_dynamic_offset_ptrs); + bool record_bind_descriptor_sets(Anvil::PipelineBindPoint in_pipeline_bind_point, + Anvil::PipelineLayout* in_layout_ptr, + uint32_t in_first_set, + uint32_t in_set_count, + const Anvil::DescriptorSet* const* in_descriptor_set_ptrs, + uint32_t in_dynamic_offset_count, + const uint32_t* in_dynamic_offset_ptrs); /** Issues a vkCmdBindIndexBuffer() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -380,16 +567,13 @@ namespace Anvil * Calling this function for a command buffer which has not been put into a recording mode * (by issuing a start_recording() call earlier) will result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_bind_index_buffer(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - VkIndexType in_index_type); + bool record_bind_index_buffer(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::IndexType in_index_type); /** Issues a vkCmdBindPipeline() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -402,27 +586,44 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_bind_pipeline(VkPipelineBindPoint in_pipeline_bind_point, - Anvil::PipelineID in_pipeline_id); + bool record_bind_pipeline(Anvil::PipelineBindPoint in_pipeline_bind_point, + Anvil::PipelineID in_pipeline_id); - /** Issues a vkCmdBindVertexBuffers() call and appends it to the internal vector of commands + /** Issues a vkCmdBindTransformFeedbackBuffersEXT() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS * #define enabled). * * Calling this function for a command buffer which has not been put into a recording mode * (by issuing a start_recording() call earlier) will result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. + * Requires VK_EXT_transform_feedback support. + * + * Argument meaning is as per VK_EXT_transform_feedback. + * + * @return true if successful, false otherwise. + * + */ + bool record_bind_transform_feedback_buffers_EXT(const uint32_t& in_first_binding, + const uint32_t& in_n_bindings, + Anvil::Buffer** in_buffer_ptrs, + const VkDeviceSize* in_offsets_ptr, + const VkDeviceSize* in_sizes_ptr); + + /** Issues a vkCmdBindVertexBuffers() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_bind_vertex_buffers(uint32_t in_start_binding, - uint32_t in_binding_count, - std::shared_ptr* in_buffer_ptrs, - const VkDeviceSize* in_offset_ptrs); + bool record_bind_vertex_buffers(uint32_t in_start_binding, + uint32_t in_binding_count, + Anvil::Buffer** in_buffer_ptrs, + const VkDeviceSize* in_offset_ptrs); /** Issues a vkCmdBlitImage() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -434,20 +635,17 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_blit_image(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageBlit* in_region_ptrs, - VkFilter in_filter); + bool record_blit_image(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageBlit* in_region_ptrs, + Anvil::Filter in_filter); /** Issues a vkCmdClearAttachments() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -459,17 +657,14 @@ namespace Anvil * It is also illegal to call this function when not recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_clear_attachments(uint32_t in_n_attachments, - const VkClearAttachment* in_attachment_ptrs, - uint32_t in_n_rects, - const VkClearRect* in_rect_ptrs); + bool record_clear_attachments(uint32_t in_n_attachments, + const Anvil::ClearAttachment* in_attachment_ptrs, + uint32_t in_n_rects, + const VkClearRect* in_rect_ptrs); /** Issues a vkCmdClearColorImage() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -478,21 +673,18 @@ namespace Anvil * Calling this function for a command buffer which has not been put into a recording mode * (by issuing a start_recording() call earlier) will result in an assertion failure. * - * It is also illegal to call this function when not recording renderpass commands. Doing so + * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_clear_color_image(std::shared_ptr in_image_ptr, - VkImageLayout in_image_layout, - const VkClearColorValue* in_color_ptr, - uint32_t in_range_count, - const VkImageSubresourceRange* in_range_ptrs); + bool record_clear_color_image(Anvil::Image* in_image_ptr, + Anvil::ImageLayout in_image_layout, + const VkClearColorValue* in_color_ptr, + uint32_t in_range_count, + const Anvil::ImageSubresourceRange* in_range_ptrs); /** Issues a vkCmdClearDepthStencilImage() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -504,18 +696,15 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_clear_depth_stencil_image(std::shared_ptr in_image_ptr, - VkImageLayout in_image_layout, - const VkClearDepthStencilValue* in_depth_stencil_ptr, - uint32_t in_range_count, - const VkImageSubresourceRange* in_range_ptrs); + bool record_clear_depth_stencil_image(Anvil::Image* in_image_ptr, + Anvil::ImageLayout in_image_layout, + const VkClearDepthStencilValue* in_depth_stencil_ptr, + uint32_t in_range_count, + const Anvil::ImageSubresourceRange* in_range_ptrs); /** Issues a vkCmdCopyBuffer() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -527,17 +716,14 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_copy_buffer(std::shared_ptr in_src_buffer_ptr, - std::shared_ptr in_dst_buffer_ptr, - uint32_t in_region_count, - const VkBufferCopy* in_region_ptrs); + bool record_copy_buffer(Anvil::Buffer* in_src_buffer_ptr, + Anvil::Buffer* in_dst_buffer_ptr, + uint32_t in_region_count, + const Anvil::BufferCopy* in_region_ptrs); /** Issues a vkCmdCopyBufferToImage() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -549,18 +735,15 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_copy_buffer_to_image(std::shared_ptr in_src_buffer_ptr, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkBufferImageCopy* in_region_ptrs); + bool record_copy_buffer_to_image(Anvil::Buffer* in_src_buffer_ptr, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::BufferImageCopy* in_region_ptrs); /** Issues a vkCmdCopyImage() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -572,19 +755,16 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_copy_image(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageCopy* in_region_ptrs); + bool record_copy_image(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageCopy* in_region_ptrs); /** Issues a vkCmdCopyImageToBuffer() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -596,18 +776,15 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_copy_image_to_buffer(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_buffer_ptr, - uint32_t in_region_count, - const VkBufferImageCopy* in_region_ptrs); + bool record_copy_image_to_buffer(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Buffer* in_dst_buffer_ptr, + uint32_t in_region_count, + const Anvil::BufferImageCopy* in_region_ptrs); /** Issues a vkCmdCopyQueryPoolResults() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -619,20 +796,17 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_copy_query_pool_results(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_start_query, - uint32_t in_query_count, - std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_dst_stride, - VkQueryResultFlags in_flags); + bool record_copy_query_pool_results(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_start_query, + uint32_t in_query_count, + Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_dst_stride, + VkQueryResultFlags in_flags); /** Issues a vkCmdDebugMarkerBeginEXT() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -693,7 +867,7 @@ namespace Anvil uint32_t in_y, uint32_t in_z); - /** Issues a vkCmdDispatchIndirect() call and appends it to the internal vector of commands + /** Issues a vkCmdDispatchBaseKHR() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS * #define enabled). * @@ -703,15 +877,35 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. + * This function can only be called for command buffers created for a mGPU device. + * + * Argument meaning is as per Vulkan API specification. + * + * @return true if successful, false otherwise. + **/ + bool record_dispatch_base_KHR(uint32_t in_base_group_x, + uint32_t in_base_group_y, + uint32_t in_base_group_z, + uint32_t in_group_count_x, + uint32_t in_group_count_y, + uint32_t in_group_count_z); + + /** Issues a vkCmdDispatchIndirect() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * It is also illegal to call this function when recording renderpass commands. Doing so + * will also result in an assertion failure. * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_dispatch_indirect(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset); + bool record_dispatch_indirect(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset); /** Issues a vkCmdDraw() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -762,19 +956,16 @@ namespace Anvil * It is also illegal to call this function when not recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_draw_indexed_indirect(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - uint32_t in_draw_count, - uint32_t in_stride); + bool record_draw_indexed_indirect(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + uint32_t in_draw_count, + uint32_t in_stride); - /** Issues a vkCmdDrawIndexedIndirectCount() call and appends it to the internal vector of commands + /** Issues a vkCmdDrawIndexedIndirectCountAMD() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS * #define enabled). * @@ -784,9 +975,6 @@ namespace Anvil * It is also illegal to call this function when not recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * This function is only available if VK_AMD_draw_indirect_count is supported by the Vulkan * device AND if the extension has been requested at creation time. * @@ -794,14 +982,14 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_draw_indexed_indirect_count_AMD(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - std::shared_ptr in_count_buffer_ptr, - VkDeviceSize in_count_offset, - uint32_t in_max_draw_count, - uint32_t in_stride); - - /** Issues a vkCmdDrawIndirect() call and appends it to the internal vector of commands + bool record_draw_indexed_indirect_count_AMD(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride); + + /** Issues a vkCmdDrawIndexedIndirectCountKHR() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS * #define enabled). * @@ -811,19 +999,40 @@ namespace Anvil * It is also illegal to call this function when not recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. + * This function is only available if VK_KHR_draw_indirect_count is supported by the Vulkan + * device AND if the extension has been requested at creation time. + * + * Argument meaning is as per VK_KHR_draw_indirect_count specification. + * + * @return true if successful, false otherwise. + **/ + bool record_draw_indexed_indirect_count_KHR(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride); + + /** Issues a vkCmdDrawIndirect() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * It is also illegal to call this function when not recording renderpass commands. Doing so + * will also result in an assertion failure. * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_draw_indirect(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - uint32_t in_count, - uint32_t in_stride); + bool record_draw_indirect(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + uint32_t in_count, + uint32_t in_stride); - /** Issues a vkCmdDrawIndirectCount() call and appends it to the internal vector of commands + /** Issues a vkCmdDrawIndirectByteCountEXT() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS * #define enabled). * @@ -833,8 +1042,29 @@ namespace Anvil * It is also illegal to call this function when not recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. + * This function is only available if VK_EXT_transform_feedback is supported by the Vulkan + * device AND if the extension has been requested at creation time. + * + * Argument meaning is as per VK_EXT_transform_feedback specification. + * + * @return true if successful, false otherwise. + **/ + bool record_draw_indirect_byte_count_EXT(const uint32_t& in_instance_count, + const uint32_t& in_first_instance, + Anvil::Buffer* in_counter_buffer_ptr, + const VkDeviceSize& in_counter_buffer_offset, + const uint32_t& in_counter_offset, + const uint32_t& in_vertex_stride); + + /** Issues a vkCmdDrawIndirectCount() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * It is also illegal to call this function when not recording renderpass commands. Doing so + * will also result in an assertion failure. * * This function is only available if VK_AMD_draw_indirect_count is supported by the Vulkan * device AND if the extension has been requested at creation time. @@ -843,12 +1073,36 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_draw_indirect_count_AMD(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - std::shared_ptr in_count_buffer_ptr, - VkDeviceSize in_count_offset, - uint32_t in_max_draw_count, - uint32_t in_stride); + bool record_draw_indirect_count_AMD(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride); + + /** Issues a vkCmdDrawIndirectCount() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * It is also illegal to call this function when not recording renderpass commands. Doing so + * will also result in an assertion failure. + * + * This function is only available if VK_KHR_draw_indirect_count is supported by the Vulkan + * device AND if the extension has been requested at creation time. + * + * Argument meaning is as per VK_KHR_draw_indirect_count specification. + * + * @return true if successful, false otherwise. + **/ + bool record_draw_indirect_count_KHR(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride); /** Issues a vkCmdEndQuery() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -861,8 +1115,45 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_end_query(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry); + bool record_end_query(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry); + + /** Issues a vkCmdEndQueryIndexedEXT() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * Argument meaning is as per VK_EXT_transform_feedback. + * + * Requires VK_EXT_transform_feedback support. + * + * @return true if successful, false otherwise. + **/ + bool record_end_query_indexed_EXT(Anvil::QueryPool* in_query_pool_ptr, + const Anvil::QueryIndex& in_query, + const uint32_t& in_index); + + /** Issues a vkCmdEndTransformFeedbackEXT() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * Requires active renderpass. + * + * Argument meaning is as per VK_EXT_transform_feedback. + * + * Requires VK_EXT_transform_feedback support. + * + * @return true if successful, false otherwise. + **/ + bool record_end_transform_feedback_EXT(const uint32_t& in_first_counter_buffer, + const uint32_t& in_n_counter_buffers, + Anvil::Buffer** in_opt_counter_buffer_ptrs, + const VkDeviceSize* in_opt_counter_buffer_offsets); /** Issues a vkCmdFillBuffer() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -874,17 +1165,14 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_fill_buffer(std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_size, - uint32_t in_data); + bool record_fill_buffer(Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_size, + uint32_t in_data); /** Issues a vkCmdPipelineBarrier() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -893,18 +1181,15 @@ namespace Anvil * Calling this function for a command buffer which has not been put into a recording mode * (by issuing a start_recording() call earlier) will result in an assertion failure. * - * Any Vulkan object wrapper instances, passed implicitly to this function, are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_pipeline_barrier(VkPipelineStageFlags in_src_stage_mask, - VkPipelineStageFlags in_dst_stage_mask, - VkBool32 in_by_region, + bool record_pipeline_barrier(Anvil::PipelineStageFlags in_src_stage_mask, + Anvil::PipelineStageFlags in_dst_stage_mask, + Anvil::DependencyFlags in_dependency_flags, uint32_t in_memory_barrier_count, - const MemoryBarrier* const in_memory_barriers_ptr, + const MemoryBarrierAnv* const in_memory_barriers_ptr, uint32_t in_buffer_memory_barrier_count, const BufferBarrier* const in_buffer_memory_barriers_ptr, uint32_t in_image_memory_barrier_count, @@ -921,11 +1206,11 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_push_constants(std::shared_ptr in_layout_ptr, - VkShaderStageFlags in_stage_flags, - uint32_t in_offset, - uint32_t in_size, - const void* in_values); + bool record_push_constants(Anvil::PipelineLayout* in_layout_ptr, + Anvil::ShaderStageFlags in_stage_flags, + uint32_t in_offset, + uint32_t in_size, + const void* in_values); /** Issues a vkCmdResetEvent() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -937,15 +1222,12 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_reset_event(std::shared_ptr in_event_ptr, - VkPipelineStageFlags in_stage_mask); + bool record_reset_event(Anvil::Event* in_event_ptr, + Anvil::PipelineStageFlags in_stage_mask); /** Issues a vkCmdResetQueryPool() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -961,9 +1243,9 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_reset_query_pool(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_start_query, - uint32_t in_query_count); + bool record_reset_query_pool(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_start_query, + uint32_t in_query_count); /** Issues a vkCmdResolveImage() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -975,19 +1257,16 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_resolve_image(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageResolve* in_region_ptrs); + bool record_resolve_image(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageResolve* in_region_ptrs); /** Issues a vkCmdSetBlendConstants() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -1031,6 +1310,21 @@ namespace Anvil bool record_set_depth_bounds(float in_min_depth_bounds, float in_max_depth_bounds); + /** Issues a vkCmdSetDeviceMaskKHR() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * This function can only be called for command buffers created for a mGPU device. + * + * Argument meaning is as per Vulkan API specification. + * + * @return true if successful, false otherwise. + **/ + bool record_set_device_mask_KHR(uint32_t in_device_mask); + /** Issues a vkCmdSetEvent() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS * #define enabled). @@ -1041,15 +1335,12 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_set_event(std::shared_ptr in_event_ptr, - VkPipelineStageFlags in_stage_mask); + bool record_set_event(Anvil::Event* in_event_ptr, + Anvil::PipelineStageFlags in_stage_mask); /** Issues a vkCmdSetLineWidth() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -1064,6 +1355,21 @@ namespace Anvil **/ bool record_set_line_width(float in_line_width); + /** Issues a vkCmdSetSampleLocationsEXT() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * Argument meaning is as per VK_EXT_sample_locations specification. + * + * Must not be called if VK_EXT_sample_locations is not enabled and supported. + * + * @return true if successful, false otherwise. + **/ + bool record_set_sample_locations_EXT(const Anvil::SampleLocationsInfo& in_sample_locations_info); + /** Issues a vkCmdSetScissor() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS * #define enabled). @@ -1090,8 +1396,8 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_set_stencil_compare_mask(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_compare_mask); + bool record_set_stencil_compare_mask(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_compare_mask); /** Issues a vkCmdSetStencilReference() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -1104,8 +1410,8 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_set_stencil_reference(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_reference); + bool record_set_stencil_reference(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_reference); /** Issues a vkCmdSetStencilWriteMask() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -1118,8 +1424,8 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_set_stencil_write_mask(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_write_mask); + bool record_set_stencil_write_mask(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_write_mask); /** Issues a vkCmdSetViewport() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -1146,17 +1452,15 @@ namespace Anvil * It is also illegal to call this function when recording renderpass commands. Doing so * will also result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_update_buffer(std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_data_size, - const uint32_t* in_data_ptr); + bool record_update_buffer(Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_data_size, + const void* in_data_ptr); + /** Issues a vkCmdWaitEvents() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -1165,23 +1469,38 @@ namespace Anvil * Calling this function for a command buffer which has not been put into a recording mode * (by issuing a start_recording() call earlier) will result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_wait_events(uint32_t in_event_count, - std::shared_ptr* in_event_ptrs, - VkPipelineStageFlags in_src_stage_mask, - VkPipelineStageFlags in_dst_stage_mask, - uint32_t in_memory_barrier_count, - const MemoryBarrier* const in_memory_barriers_ptr, - uint32_t in_buffer_memory_barrier_count, - const BufferBarrier* const in_buffer_memory_barriers_ptr, - uint32_t in_image_memory_barrier_count, - const ImageBarrier* const in_image_memory_barriers_ptr); + bool record_wait_events(uint32_t in_event_count, + Anvil::Event* const* in_event_ptrs, + Anvil::PipelineStageFlags in_src_stage_mask, + Anvil::PipelineStageFlags in_dst_stage_mask, + uint32_t in_memory_barrier_count, + const MemoryBarrierAnv* const in_memory_barriers_ptr, + uint32_t in_buffer_memory_barrier_count, + const BufferBarrier* const in_buffer_memory_barriers_ptr, + uint32_t in_image_memory_barrier_count, + const ImageBarrier* const in_image_memory_barriers_ptr); + + /** Issues a vkCmdWriteBufferMarkerAMD() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * Argument meaning is as per VK_AMD_buffer_marker specification. + * + * Requires VK_AMD_buffer_marker extension. + * + * @return true if successful, false otherwise. + **/ + bool record_write_buffer_marker_AMD(const Anvil::PipelineStageFlagBits& in_pipeline_stage, + Anvil::Buffer* in_dst_buffer_ptr, + const VkDeviceSize& in_dst_offset, + const uint32_t& in_marker); /** Issues a vkCmdWriteTimestamp() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -1190,16 +1509,13 @@ namespace Anvil * Calling this function for a command buffer which has not been put into a recording mode * (by issuing a start_recording() call earlier) will result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. - * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_write_timestamp(VkPipelineStageFlagBits in_pipeline_stage, - std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry); + bool record_write_timestamp(Anvil::PipelineStageFlagBits in_pipeline_stage, + Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry); /** Resets the underlying Vulkan command buffer and clears the internally managed vector of * recorded commands, if STORE_COMMAND_BUFFER_COMMANDS has been defined for the build. @@ -1245,6 +1561,7 @@ namespace Anvil struct DrawIndirectCommand; struct DrawIndexedIndirectCommand; struct EndQueryCommand; + struct EndQueryIndexedEXTCommand; struct ExecuteCommandsCommand; struct FillBufferCommand; struct NextSubpassCommand; @@ -1257,6 +1574,7 @@ namespace Anvil struct SetDepthBoundsCommand; struct SetEventCommand; struct SetLineWidthCommand; + struct SetSampleLocationsEXTCommand; struct SetScissorCommand; struct SetStencilCompareMaskCommand; struct SetStencilReferenceCommand; @@ -1271,15 +1589,15 @@ namespace Anvil /** Holds all arguments passed to a vkCmdBeginQuery() command */ typedef struct BeginQueryCommand : public Command { - VkQueryControlFlagsVariable(flags); + Anvil::QueryControlFlags flags; - Anvil::QueryIndex entry; - std::shared_ptr query_pool_ptr; + Anvil::QueryIndex entry; + Anvil::QueryPool* query_pool_ptr; /** Constructor. */ - explicit BeginQueryCommand(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry, - VkQueryControlFlags in_flags); + explicit BeginQueryCommand(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry, + Anvil::QueryControlFlags in_flags); /** Destructor. */ virtual ~BeginQueryCommand() @@ -1291,20 +1609,20 @@ namespace Anvil /** Holds all arguments passed to a vkCmdBindDescriptorSets() command. */ typedef struct BindDescriptorSetsCommand : public Command { - std::vector > descriptor_sets; - std::vector dynamic_offsets; - uint32_t first_set; - std::shared_ptr layout_ptr; - VkPipelineBindPoint pipeline_bind_point; + std::vector descriptor_sets; + std::vector dynamic_offsets; + uint32_t first_set; + Anvil::PipelineLayout* layout_ptr; + Anvil::PipelineBindPoint pipeline_bind_point; /** Constructor. **/ - explicit BindDescriptorSetsCommand(VkPipelineBindPoint in_pipeline_bind_point, - std::shared_ptr in_layout_ptr, - uint32_t in_first_set, - uint32_t in_set_count, - std::shared_ptr* in_descriptor_set_ptrs, - uint32_t in_dynamic_offset_count, - const uint32_t* in_dynamic_offset_ptrs); + explicit BindDescriptorSetsCommand(Anvil::PipelineBindPoint in_pipeline_bind_point, + Anvil::PipelineLayout* in_layout_ptr, + uint32_t in_first_set, + uint32_t in_set_count, + const Anvil::DescriptorSet* const* in_descriptor_set_ptrs, + uint32_t in_dynamic_offset_count, + const uint32_t* in_dynamic_offset_ptrs); /** Destructor. */ virtual ~BindDescriptorSetsCommand() @@ -1315,23 +1633,18 @@ namespace Anvil } BindDescriptorSetsCommand; - /** Holds all arguments passed to a vkCmdBindIndexBuffer() command. - * - * Raw Vulkan object handles have been replaced with pointers to wrapper objects. - * These objects are retained at construction time, and released at descriptor - * destruction time. - */ + /** Holds all arguments passed to a vkCmdBindIndexBuffer() command. */ typedef struct BindIndexBufferCommand : public Command { - VkBuffer buffer; - std::shared_ptr buffer_ptr; - VkIndexType index_type; - VkDeviceSize offset; + VkBuffer buffer; + Anvil::Buffer* buffer_ptr; + Anvil::IndexType index_type; + VkDeviceSize offset; /** Constructor. **/ - explicit BindIndexBufferCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - VkIndexType in_index_type); + explicit BindIndexBufferCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::IndexType in_index_type); /** Destructor. */ virtual ~BindIndexBufferCommand() @@ -1347,8 +1660,8 @@ namespace Anvil /** Holds all arguments passed to a vkCmdBindPipeline() command. */ typedef struct BindPipelineCommand : public Command { - VkPipelineBindPoint pipeline_bind_point; - Anvil::PipelineID pipeline_id; + Anvil::PipelineBindPoint pipeline_bind_point; + Anvil::PipelineID pipeline_id; /** Constructor. * @@ -1358,8 +1671,8 @@ namespace Anvil * or a graphics pipeline ID, coming from the device-specific graphics pipeline * manager. The type of the pipeline is deduced from @param in_pipeline_bind_point. **/ - explicit BindPipelineCommand(VkPipelineBindPoint in_pipeline_bind_point, - Anvil::PipelineID in_pipeline_id); + explicit BindPipelineCommand(Anvil::PipelineBindPoint in_pipeline_bind_point, + Anvil::PipelineID in_pipeline_id); /* Destructor. */ virtual ~BindPipelineCommand() @@ -1374,13 +1687,13 @@ namespace Anvil **/ typedef struct BindVertexBuffersCommandBinding { - VkBuffer buffer; - std::shared_ptr buffer_ptr; - VkDeviceSize offset; + VkBuffer buffer; + Anvil::Buffer* buffer_ptr; + VkDeviceSize offset; /** Constructor. **/ - explicit BindVertexBuffersCommandBinding(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset); + explicit BindVertexBuffersCommandBinding(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset); /** Destructor. */ virtual ~BindVertexBuffersCommandBinding() @@ -1401,10 +1714,10 @@ namespace Anvil uint32_t start_binding; /** Constructor. **/ - explicit BindVertexBuffersCommand(uint32_t in_start_binding, - uint32_t in_binding_count, - std::shared_ptr* in_buffer_ptrs, - const VkDeviceSize* in_offset_ptrs); + explicit BindVertexBuffersCommand(uint32_t in_start_binding, + uint32_t in_binding_count, + Anvil::Buffer** in_buffer_ptrs, + const VkDeviceSize* in_offset_ptrs); /** Destructor. */ virtual ~BindVertexBuffersCommand() @@ -1416,24 +1729,24 @@ namespace Anvil /** Holds all arguments passed to a vkCmdBlitImage() command. */ typedef struct BlitImageCommand : public Command { - VkImage dst_image; - VkImageLayout dst_image_layout; - std::shared_ptr dst_image_ptr; - VkImage src_image; - VkImageLayout src_image_layout; - std::shared_ptr src_image_ptr; + VkImage dst_image; + Anvil::ImageLayout dst_image_layout; + Anvil::Image* dst_image_ptr; + VkImage src_image; + Anvil::ImageLayout src_image_layout; + Anvil::Image* src_image_ptr; - VkFilter filter; - std::vector regions; + Anvil::Filter filter; + std::vector regions; /** Constructor. */ - explicit BlitImageCommand(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageBlit* in_region_ptrs, - VkFilter in_filter); + explicit BlitImageCommand(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageBlit* in_region_ptrs, + Anvil::Filter in_filter); /** Destructor. */ virtual ~BlitImageCommand() @@ -1449,15 +1762,15 @@ namespace Anvil /* Holds a single attachment definition, as used by ClearAttachmentsCommand descriptor */ typedef struct ClearAttachmentsCommandAttachment { - VkImageAspectFlagsVariable(aspect_mask); + Anvil::ImageAspectFlags aspect_mask; VkClearValue clear_value; uint32_t color_attachment; /** Constructor. **/ - ClearAttachmentsCommandAttachment(VkImageAspectFlags in_aspect_mask, - VkClearValue in_clear_value, - uint32_t in_color_attachment) + ClearAttachmentsCommandAttachment(Anvil::ImageAspectFlags in_aspect_mask, + VkClearValue in_clear_value, + uint32_t in_color_attachment) { aspect_mask = in_aspect_mask; clear_value = in_clear_value; @@ -1478,10 +1791,10 @@ namespace Anvil std::vector rects; /* Constructor. **/ - explicit ClearAttachmentsCommand(uint32_t in_n_attachments, - const VkClearAttachment* in_attachments, - uint32_t in_n_rects, - const VkClearRect* in_rect_ptrs); + explicit ClearAttachmentsCommand(uint32_t in_n_attachments, + const Anvil::ClearAttachment* in_attachments, + uint32_t in_n_rects, + const VkClearRect* in_rect_ptrs); /** Destructor. */ virtual ~ClearAttachmentsCommand() @@ -1494,18 +1807,18 @@ namespace Anvil /** Holds all arguments passed to a vkCmdClearColorImage() command. */ typedef struct ClearColorImageCommand : public Command { - VkClearColorValue color; - VkImage image; - VkImageLayout image_layout; - std::shared_ptr image_ptr; - std::vector ranges; + VkClearColorValue color; + VkImage image; + Anvil::ImageLayout image_layout; + Anvil::Image* image_ptr; + std::vector ranges; /** Constructor. **/ - explicit ClearColorImageCommand(std::shared_ptr in_image_ptr, - VkImageLayout in_image_layout, - const VkClearColorValue* in_color_ptr, - uint32_t in_range_count, - const VkImageSubresourceRange* in_range_ptrs); + explicit ClearColorImageCommand(Anvil::Image* in_image_ptr, + Anvil::ImageLayout in_image_layout, + const VkClearColorValue* in_color_ptr, + uint32_t in_range_count, + const Anvil::ImageSubresourceRange* in_range_ptrs); /** Destructor. */ virtual ~ClearColorImageCommand() @@ -1521,18 +1834,18 @@ namespace Anvil /** Holds all arguments passed to a vkCmdClearDepthStencilImage() command. */ typedef struct ClearDepthStencilImageCommand : public Command { - VkClearDepthStencilValue depth_stencil; - VkImage image; - VkImageLayout image_layout; - std::shared_ptr image_ptr; - std::vector ranges; + VkClearDepthStencilValue depth_stencil; + VkImage image; + Anvil::ImageLayout image_layout; + Anvil::Image* image_ptr; + std::vector ranges; /** Constructor. **/ - explicit ClearDepthStencilImageCommand(std::shared_ptr in_image_ptr, - VkImageLayout in_image_layout, - const VkClearDepthStencilValue* in_depth_stencil_ptr, - uint32_t in_range_count, - const VkImageSubresourceRange* in_range_ptrs); + explicit ClearDepthStencilImageCommand(Anvil::Image* in_image_ptr, + Anvil::ImageLayout in_image_layout, + const VkClearDepthStencilValue* in_depth_stencil_ptr, + uint32_t in_range_count, + const Anvil::ImageSubresourceRange* in_range_ptrs); /** Destructor. */ virtual ~ClearDepthStencilImageCommand() @@ -1544,20 +1857,21 @@ namespace Anvil ClearDepthStencilImageCommand& operator=(const ClearDepthStencilImageCommand& in); } ClearDepthStencilImageCommand; + /** Holds all arguments passed to a vkCmdCopyBuffer() command. */ typedef struct CopyBufferCommand : public Command { VkBuffer dst_buffer; - std::shared_ptr dst_buffer_ptr; - std::vector regions; + Anvil::Buffer* dst_buffer_ptr; + std::vector regions; VkBuffer src_buffer; - std::shared_ptr src_buffer_ptr; + Anvil::Buffer* src_buffer_ptr; /** Constructor. **/ - explicit CopyBufferCommand(std::shared_ptr in_src_buffer_ptr, - std::shared_ptr in_dst_buffer_ptr, - uint32_t in_region_count, - const VkBufferCopy* in_region_ptrs); + explicit CopyBufferCommand(Anvil::Buffer* in_src_buffer_ptr, + Anvil::Buffer* in_dst_buffer_ptr, + uint32_t in_region_count, + const Anvil::BufferCopy* in_region_ptrs); /** Destructor. */ virtual ~CopyBufferCommand() @@ -1573,19 +1887,19 @@ namespace Anvil /** Holds all arguments passed to a vkCmdCopyBufferToImage() command. */ typedef struct CopyBufferToImageCommand : public Command { - VkImage dst_image; - VkImageLayout dst_image_layout; - std::shared_ptr dst_image_ptr; - std::vector regions; - VkBuffer src_buffer; - std::shared_ptr src_buffer_ptr; + VkImage dst_image; + Anvil::ImageLayout dst_image_layout; + Anvil::Image* dst_image_ptr; + std::vector regions; + VkBuffer src_buffer; + Anvil::Buffer* src_buffer_ptr; /** Constructor. **/ - explicit CopyBufferToImageCommand(std::shared_ptr in_src_buffer_ptr, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkBufferImageCopy* in_region_ptrs); + explicit CopyBufferToImageCommand(Anvil::Buffer* in_src_buffer_ptr, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::BufferImageCopy* in_region_ptrs); /** Destructor. */ virtual ~CopyBufferToImageCommand() @@ -1603,20 +1917,20 @@ namespace Anvil typedef struct CopyImageCommand : public Command { VkImage dst_image; - std::shared_ptr dst_image_ptr; - VkImageLayout dst_image_layout; - std::vector regions; + Anvil::Image* dst_image_ptr; + Anvil::ImageLayout dst_image_layout; + std::vector regions; VkImage src_image; - std::shared_ptr src_image_ptr; - VkImageLayout src_image_layout; + Anvil::Image* src_image_ptr; + Anvil::ImageLayout src_image_layout; /** Constructor. **/ - explicit CopyImageCommand(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageCopy* in_region_ptrs); + explicit CopyImageCommand(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageCopy* in_region_ptrs); /** Destructor. */ virtual ~CopyImageCommand() @@ -1632,19 +1946,19 @@ namespace Anvil /** Holds all arguments passed to a vkCmdCopyImageToBuffer() command. */ typedef struct CopyImageToBufferCommand : public Command { - VkBuffer dst_buffer; - std::shared_ptr dst_buffer_ptr; - std::vector regions; - VkImage src_image; - VkImageLayout src_image_layout; - std::shared_ptr src_image_ptr; + VkBuffer dst_buffer; + Anvil::Buffer* dst_buffer_ptr; + std::vector regions; + VkImage src_image; + Anvil::ImageLayout src_image_layout; + Anvil::Image* src_image_ptr; /** Constructor. **/ - explicit CopyImageToBufferCommand(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_buffer_ptr, - uint32_t in_region_count, - const VkBufferImageCopy* in_region_ptrs); + explicit CopyImageToBufferCommand(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Buffer* in_dst_buffer_ptr, + uint32_t in_region_count, + const Anvil::BufferImageCopy* in_region_ptrs); /** Destructor. */ virtual ~CopyImageToBufferCommand() @@ -1660,24 +1974,24 @@ namespace Anvil /** Holds all arguments passed to a vkCmdCopyQueryPoolResults() command. */ typedef struct CopyQueryPoolResultsCommand : public Command { - VkQueryResultFlagsVariable(flags); + VkQueryResultFlags flags; - VkBuffer dst_buffer; - std::shared_ptr dst_buffer_ptr; - VkDeviceSize dst_offset; - VkDeviceSize dst_stride; - uint32_t query_count; - std::shared_ptr query_pool_ptr; - Anvil::QueryIndex start_query; + VkBuffer dst_buffer; + Anvil::Buffer* dst_buffer_ptr; + VkDeviceSize dst_offset; + VkDeviceSize dst_stride; + uint32_t query_count; + Anvil::QueryPool* query_pool_ptr; + Anvil::QueryIndex start_query; /** Constructor. **/ - explicit CopyQueryPoolResultsCommand(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_start_query, - uint32_t in_query_count, - std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_dst_stride, - VkQueryResultFlags in_flags); + explicit CopyQueryPoolResultsCommand(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_start_query, + uint32_t in_query_count, + Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_dst_stride, + VkQueryResultFlags in_flags); /** Destructor. */ virtual ~CopyQueryPoolResultsCommand() @@ -1689,7 +2003,6 @@ namespace Anvil CopyQueryPoolResultsCommand& operator=(const CopyQueryPoolResultsCommand&); } CopyQueryPoolResultsCommand; - /** Holds all arguments passed to a vkCmdDebugMarkerBeginEXT() command. */ typedef struct DebugMarkerBeginEXTCommand : public Command { @@ -1756,16 +2069,42 @@ namespace Anvil } DispatchCommand; + /** Holds all arguments passed to a vkCmdDispatchBaseKHR() command. */ + typedef struct DispatchBaseKHRCommand : public Command + { + uint32_t base_group_x; + uint32_t base_group_y; + uint32_t base_group_z; + uint32_t group_count_x; + uint32_t group_count_y; + uint32_t group_count_z; + + /** Constructor. **/ + explicit DispatchBaseKHRCommand(uint32_t in_base_group_x, + uint32_t in_base_group_y, + uint32_t in_base_group_z, + uint32_t in_group_count_x, + uint32_t in_group_count_y, + uint32_t in_group_count_z); + + /** Destructor. */ + virtual ~DispatchBaseKHRCommand() + { + /* Stub */ + } + } DispatchBaseKHRCommand; + + /** Holds all arguments passed to a vkCmdDispatchIndirect() command. */ typedef struct DispatchIndirectCommand : public Command { - VkBuffer buffer; - std::shared_ptr buffer_ptr; - VkDeviceSize offset; + VkBuffer buffer; + Anvil::Buffer* buffer_ptr; + VkDeviceSize offset; /** Constructor. **/ - explicit DispatchIndirectCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset); + explicit DispatchIndirectCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset); /** Destructor. */ virtual ~DispatchIndirectCommand() @@ -1830,17 +2169,17 @@ namespace Anvil /** Holds all arguments passed to a vkCmdDrawIndirect() command. */ typedef struct DrawIndirectCommand : public Command { - VkBuffer buffer; - std::shared_ptr buffer_ptr; - uint32_t count; - VkDeviceSize offset; - uint32_t stride; + VkBuffer buffer; + Anvil::Buffer* buffer_ptr; + uint32_t count; + VkDeviceSize offset; + uint32_t stride; /** Constructor. **/ - explicit DrawIndirectCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - uint32_t in_count, - uint32_t in_stride); + explicit DrawIndirectCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + uint32_t in_count, + uint32_t in_stride); /** Destructor. */ virtual ~DrawIndirectCommand() @@ -1855,22 +2194,22 @@ namespace Anvil /** Holds all arguments passed to a vkCmdDrawIndirectCountAMD() command. */ typedef struct DrawIndirectCountAMDCommand : public Command { - VkBuffer buffer; - std::shared_ptr buffer_ptr; - VkBuffer count_buffer; - std::shared_ptr count_buffer_ptr; - VkDeviceSize count_offset; - uint32_t max_draw_count; - VkDeviceSize offset; - uint32_t stride; + VkBuffer buffer; + Anvil::Buffer* buffer_ptr; + VkBuffer count_buffer; + Anvil::Buffer* count_buffer_ptr; + VkDeviceSize count_offset; + uint32_t max_draw_count; + VkDeviceSize offset; + uint32_t stride; /** Constructor. **/ - explicit DrawIndirectCountAMDCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - std::shared_ptr in_count_buffer_ptr, - VkDeviceSize in_count_offset, - uint32_t in_max_draw_count, - uint32_t in_stride); + explicit DrawIndirectCountAMDCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride); /** Destructor */ virtual ~DrawIndirectCountAMDCommand() @@ -1882,20 +2221,50 @@ namespace Anvil DrawIndirectCountAMDCommand& operator=(const DrawIndirectCountAMDCommand&); } DrawIndirectCountAMDCommand; + /** Holds all arguments passed to a vkCmdDrawIndirectCountKHR() command. */ + typedef struct DrawIndirectCountKHRCommand : public Command + { + VkBuffer buffer; + Anvil::Buffer* buffer_ptr; + VkBuffer count_buffer; + Anvil::Buffer* count_buffer_ptr; + VkDeviceSize count_offset; + uint32_t max_draw_count; + VkDeviceSize offset; + uint32_t stride; + + /** Constructor. **/ + explicit DrawIndirectCountKHRCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride); + + /** Destructor */ + virtual ~DrawIndirectCountKHRCommand() + { + /* Stub */ + } + + private: + DrawIndirectCountKHRCommand& operator=(const DrawIndirectCountKHRCommand&); + } DrawIndirectCountKHRCommand; + /** Holds all arguments passed to a vkCmdDrawIndexedIndirect() command. */ typedef struct DrawIndexedIndirectCommand : public Command { - VkBuffer buffer; - std::shared_ptr buffer_ptr; - uint32_t draw_count; - VkDeviceSize offset; - uint32_t stride; + VkBuffer buffer; + Anvil::Buffer* buffer_ptr; + uint32_t draw_count; + VkDeviceSize offset; + uint32_t stride; /** Constructor. **/ - explicit DrawIndexedIndirectCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - uint32_t in_draw_count, - uint32_t in_stride); + explicit DrawIndexedIndirectCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + uint32_t in_draw_count, + uint32_t in_stride); /** Destructor. */ virtual ~DrawIndexedIndirectCommand() @@ -1907,25 +2276,53 @@ namespace Anvil DrawIndexedIndirectCommand& operator=(const DrawIndexedIndirectCommand&); } DrawIndexedIndirectCommand; + /** Holds all arguments passed to a vkCmdDrawIndirectByteCountEXT() command. */ + typedef struct DrawIndirectByteCountEXTCommand : public Command + { + VkDeviceSize counter_buffer_offset; + Anvil::Buffer* counter_buffer_ptr; + uint32_t counter_offset; + uint32_t first_instance; + uint32_t instance_count; + uint32_t vertex_stride; + + /** Constructor. **/ + explicit DrawIndirectByteCountEXTCommand(const uint32_t& in_instance_count, + const uint32_t& in_first_instance, + Anvil::Buffer* in_counter_buffer_ptr, + const VkDeviceSize& in_counter_buffer_offset, + const uint32_t& in_counter_offset, + const uint32_t& in_vertex_stride); + + /** Destructor */ + virtual ~DrawIndirectByteCountEXTCommand() + { + /* Stub */ + } + + private: + DrawIndirectByteCountEXTCommand& operator=(const DrawIndirectByteCountEXTCommand&); + } DrawIndirectByteCountEXTCommand; + /** Holds all arguments passed to a vkCmdDrawIndexedIndirectCountAMD() command. */ typedef struct DrawIndexedIndirectCountAMDCommand : public Command { - VkBuffer buffer; - std::shared_ptr buffer_ptr; - VkBuffer count_buffer; - std::shared_ptr count_buffer_ptr; - VkDeviceSize count_offset; - uint32_t max_draw_count; - VkDeviceSize offset; - uint32_t stride; + VkBuffer buffer; + Anvil::Buffer* buffer_ptr; + VkBuffer count_buffer; + Anvil::Buffer* count_buffer_ptr; + VkDeviceSize count_offset; + uint32_t max_draw_count; + VkDeviceSize offset; + uint32_t stride; /** Constructor. **/ - explicit DrawIndexedIndirectCountAMDCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - std::shared_ptr in_count_buffer_ptr, - VkDeviceSize in_count_offset, - uint32_t in_max_draw_count, - uint32_t in_stride); + explicit DrawIndexedIndirectCountAMDCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride); /** Destructor */ virtual ~DrawIndexedIndirectCountAMDCommand() @@ -1937,15 +2334,45 @@ namespace Anvil DrawIndexedIndirectCountAMDCommand& operator=(const DrawIndexedIndirectCountAMDCommand&); } DrawIndexedIndirectCountAMDCommand; + /** Holds all arguments passed to a vkCmdDrawIndexedIndirectCountKHR() command. */ + typedef struct DrawIndexedIndirectCountKHRCommand : public Command + { + VkBuffer buffer; + Anvil::Buffer* buffer_ptr; + VkBuffer count_buffer; + Anvil::Buffer* count_buffer_ptr; + VkDeviceSize count_offset; + uint32_t max_draw_count; + VkDeviceSize offset; + uint32_t stride; + + /** Constructor. **/ + explicit DrawIndexedIndirectCountKHRCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride); + + /** Destructor */ + virtual ~DrawIndexedIndirectCountKHRCommand() + { + /* Stub */ + } + + private: + DrawIndexedIndirectCountKHRCommand& operator=(const DrawIndexedIndirectCountKHRCommand&); + } DrawIndexedIndirectCountKHRCommand; + /** Holds all arguments passed to a vkCmdEndQuery() command. */ typedef struct EndQueryCommand : public Command { - Anvil::QueryIndex entry; - std::shared_ptr query_pool_ptr; + Anvil::QueryIndex entry; + Anvil::QueryPool* query_pool_ptr; /** Constructor. **/ - explicit EndQueryCommand(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry); + explicit EndQueryCommand(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry); /** Destructor. */ virtual ~EndQueryCommand() @@ -1955,15 +2382,50 @@ namespace Anvil } EndQueryCommand; + /** Holds all arguments passed to a vkCmdEndQueryIndexedEXT() command. */ + typedef struct EndQueryIndexedEXTCommand : public Command + { + uint32_t index; + Anvil::QueryIndex query; + Anvil::QueryPool* query_pool_ptr; + + /** Constructor. **/ + explicit EndQueryIndexedEXTCommand(Anvil::QueryPool* in_query_pool_ptr, + const Anvil::QueryIndex& in_entry, + const uint32_t& in_index); + + /** Destructor. */ + virtual ~EndQueryIndexedEXTCommand() + { + /* Stub */ + } + + } EndQueryIndexedEXTCommand; + + typedef struct EndTransformFeedbackEXTCommand : public Command + { + std::vector counter_buffer_offsets; + std::vector counter_buffer_ptrs; + uint32_t first_counter_buffer; + + explicit EndTransformFeedbackEXTCommand(const uint32_t& in_first_counter_buffer, + const std::vector& in_counter_buffer_ptrs, + const std::vector& in_counter_buffer_offsets); + + private: + EndTransformFeedbackEXTCommand (const EndTransformFeedbackEXTCommand&); + EndTransformFeedbackEXTCommand& operator=(const EndTransformFeedbackEXTCommand); + } EndTransformFeedbackEXTCommand; + /** Holds all arguments passed to a vkCmdExecuteCommands() command. */ typedef struct ExecuteCommandsCommand : public Command { - std::vector > command_buffer_ptrs; - std::vector command_buffers; + std::vector command_buffer_ptrs; + std::vector command_buffers; /** Constructor. **/ - explicit ExecuteCommandsCommand(uint32_t in_cmd_buffers_count, - std::shared_ptr* in_cmd_buffer_ptrs); + explicit ExecuteCommandsCommand(uint32_t in_cmd_buffers_count, + Anvil::SecondaryCommandBuffer** in_cmd_buffer_ptrs); /** Destructor. */ virtual ~ExecuteCommandsCommand() @@ -1979,17 +2441,17 @@ namespace Anvil /** Holds all arguments passed to a vkCmdFillBuffer() command. */ typedef struct FillBufferCommand : public Command { - uint32_t data; - VkBuffer dst_buffer; - std::shared_ptr dst_buffer_ptr; - VkDeviceSize dst_offset; - VkDeviceSize size; + uint32_t data; + VkBuffer dst_buffer; + Anvil::Buffer* dst_buffer_ptr; + VkDeviceSize dst_offset; + VkDeviceSize size; /** Constructor. **/ - explicit FillBufferCommand(std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_size, - uint32_t in_data); + explicit FillBufferCommand(Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_size, + uint32_t in_data); /** Destructor. */ virtual ~FillBufferCommand() @@ -2005,10 +2467,10 @@ namespace Anvil /** Holds all arguments passed to a vkCmdNextSubpass() command. */ typedef struct NextSubpassCommand : public Command { - VkSubpassContents contents; + Anvil::SubpassContents contents; /** Constructor. **/ - explicit NextSubpassCommand(VkSubpassContents in_contents); + explicit NextSubpassCommand(Anvil::SubpassContents in_contents); /** Destructor. */ virtual ~NextSubpassCommand() @@ -2017,23 +2479,32 @@ namespace Anvil } } NextSubpassCommand; + typedef struct NextSubpass2KHRCommand : public NextSubpassCommand + { + /** Constructor. **/ + NextSubpass2KHRCommand(Anvil::SubpassContents in_contents) + :NextSubpassCommand(in_contents) + { + type = COMMAND_TYPE_NEXT_SUBPASS_2_KHR; + } + } NextSubpass2KHRCommand; /** Holds all arguments passed to a vkCmdPushConstants() command. */ typedef struct PushConstantsCommand : public Command { - VkShaderStageFlagsVariable(stage_flags); + Anvil::ShaderStageFlags stage_flags; - std::shared_ptr layout_ptr; - uint32_t offset; - uint32_t size; - const void* values; + Anvil::PipelineLayout* layout_ptr; + uint32_t offset; + uint32_t size; + const void* values; /** Constructor. **/ - explicit PushConstantsCommand(std::shared_ptr in_layout_ptr, - VkShaderStageFlags in_stage_flags, - uint32_t in_offset, - uint32_t in_size, - const void* in_values); + explicit PushConstantsCommand(Anvil::PipelineLayout* in_layout_ptr, + Anvil::ShaderStageFlags in_stage_flags, + uint32_t in_offset, + uint32_t in_size, + const void* in_values); /** Destructor. */ virtual ~PushConstantsCommand() @@ -2042,18 +2513,17 @@ namespace Anvil } } PushConstantsCommand; - /** Holds all arguments passed to a vkCmdResetEvent() command. **/ typedef struct ResetEventCommand : public Command { - VkPipelineStageFlagsVariable(stage_mask); + Anvil::PipelineStageFlags stage_mask; - VkEvent event; - std::shared_ptr event_ptr; + VkEvent event; + Anvil::Event* event_ptr; /** Constructor. **/ - explicit ResetEventCommand(std::shared_ptr in_event_ptr, - VkPipelineStageFlags in_stage_mask); + explicit ResetEventCommand(Anvil::Event* in_event_ptr, + Anvil::PipelineStageFlags in_stage_mask); /** Destructor. */ virtual ~ResetEventCommand() @@ -2069,14 +2539,14 @@ namespace Anvil /** Holds all arguments passed to a vkCmdResetQueryPoolCommand() command. **/ typedef struct ResetQueryPoolCommand : public Command { - uint32_t query_count; - std::shared_ptr query_pool_ptr; - Anvil::QueryIndex start_query; + uint32_t query_count; + Anvil::QueryPool* query_pool_ptr; + Anvil::QueryIndex start_query; /** Constructor. **/ - explicit ResetQueryPoolCommand(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_start_query, - uint32_t in_query_count); + explicit ResetQueryPoolCommand(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_start_query, + uint32_t in_query_count); /** Destructor. */ virtual ~ResetQueryPoolCommand() @@ -2089,21 +2559,21 @@ namespace Anvil /** Holds all arguments passed to a vkCmdResolveImage() command. **/ typedef struct ResolveImageCommand : public Command { - VkImage dst_image; - std::shared_ptr dst_image_ptr; - VkImageLayout dst_image_layout; - std::vector regions; - VkImage src_image; - std::shared_ptr src_image_ptr; - VkImageLayout src_image_layout; + VkImage dst_image; + Anvil::Image* dst_image_ptr; + Anvil::ImageLayout dst_image_layout; + std::vector regions; + VkImage src_image; + Anvil::Image* src_image_ptr; + Anvil::ImageLayout src_image_layout; /** Constructor. **/ - explicit ResolveImageCommand(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageResolve* in_region_ptrs); + explicit ResolveImageCommand(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageResolve* in_region_ptrs); /** Destructor. */ virtual ~ResolveImageCommand() @@ -2115,7 +2585,6 @@ namespace Anvil ResolveImageCommand& operator=(const ResolveImageCommand&); } ResolveImageCommand; - /** Holds all arguments passed to a vkCmdSetBlendConstants() command. **/ typedef struct SetBlendConstantsCommand : public Command { @@ -2169,17 +2638,32 @@ namespace Anvil } } SetDepthBoundsCommand; + /** Holds all arguments passed to a vkCmdSetDeviceMaskKHR() command. **/ + typedef struct SetDeviceMaskKHRCommand : public Command + { + uint32_t device_mask; + + /** Constructor. **/ + SetDeviceMaskKHRCommand(uint32_t in_device_mask); + + /** Destructor. */ + virtual ~SetDeviceMaskKHRCommand() + { + /* Stub */ + } + } SetDeviceMaskKHRCommand; + /** Holds all arguments passed to a vkCmdSetEvent() command. **/ typedef struct SetEventCommand : public Command { - VkEvent event; - std::shared_ptr event_ptr; + VkEvent event; + Anvil::Event* event_ptr; - VkPipelineStageFlagsVariable(stage_mask); + Anvil::PipelineStageFlags stage_mask; /** Constructor. **/ - explicit SetEventCommand(std::shared_ptr in_event_ptr, - VkPipelineStageFlags in_stage_mask); + explicit SetEventCommand(Anvil::Event* in_event_ptr, + Anvil::PipelineStageFlags in_stage_mask); /** Destructor. */ virtual ~SetEventCommand() @@ -2208,6 +2692,22 @@ namespace Anvil } SetLineWidthCommand; + /** Holds all arguments passed to vkCmdSetSampleLocationsEXT() command. **/ + typedef struct SetSampleLocationsEXTCommand : public Command + { + Anvil::SampleLocationsInfo sample_locations_info; + + /** Constructor. **/ + explicit SetSampleLocationsEXTCommand(const Anvil::SampleLocationsInfo& in_sample_locations_info); + + /** Destructor. */ + virtual ~SetSampleLocationsEXTCommand() + { + /* Stub */ + } + } SetSampleLocationsEXTCommand; + + /** Holds all arguments passed to a vkCmdSetScissor() command. **/ typedef struct SetScissorCommand : public Command { @@ -2230,13 +2730,13 @@ namespace Anvil /** Holds all arguments passed to a vkCmdSetStencilCompareMask() command. **/ typedef struct SetStencilCompareMaskCommand : public Command { - VkStencilFaceFlagsVariable(face_mask); + Anvil::StencilFaceFlags face_mask; uint32_t stencil_compare_mask; /** Constructor. **/ - explicit SetStencilCompareMaskCommand(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_compare_mask); + explicit SetStencilCompareMaskCommand(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_compare_mask); /** Destructor. */ virtual ~SetStencilCompareMaskCommand() @@ -2249,13 +2749,13 @@ namespace Anvil /** Holds all arguments passed to a vkCmdSetStencilReference() command. **/ typedef struct SetStencilReferenceCommand : public Command { - VkStencilFaceFlagsVariable(face_mask); + Anvil::StencilFaceFlags face_mask; uint32_t stencil_reference; /** Constructor. **/ - explicit SetStencilReferenceCommand(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_reference); + explicit SetStencilReferenceCommand(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_reference); /** Destructor. */ virtual ~SetStencilReferenceCommand() @@ -2268,13 +2768,13 @@ namespace Anvil /** Holds all arguments passed to a vkCmdSetStencilWriteMask() command. **/ typedef struct SetStencilWriteMaskCommand : public Command { - VkStencilFaceFlagsVariable(face_mask); + Anvil::StencilFaceFlags face_mask; uint32_t stencil_write_mask; /** Constructor. **/ - explicit SetStencilWriteMaskCommand(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_write_mask); + explicit SetStencilWriteMaskCommand(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_write_mask); /** Destructor. */ virtual ~SetStencilWriteMaskCommand() @@ -2305,17 +2805,17 @@ namespace Anvil /** Holds all arguments passed to a vkCmdUpdateBuffer() command. **/ typedef struct UpdateBufferCommand : public Command { - const uint32_t* data_ptr; - VkDeviceSize data_size; - VkBuffer dst_buffer; - std::shared_ptr dst_buffer_ptr; - VkDeviceSize dst_offset; + const void* data_ptr; + VkDeviceSize data_size; + VkBuffer dst_buffer; + Anvil::Buffer* dst_buffer_ptr; + VkDeviceSize dst_offset; /** Constructor **/ - explicit UpdateBufferCommand(std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_data_size, - const uint32_t* in_data_ptr); + explicit UpdateBufferCommand(Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_data_size, + const void* in_data_ptr); /** Destructor. */ virtual ~UpdateBufferCommand() @@ -2327,31 +2827,30 @@ namespace Anvil UpdateBufferCommand& operator=(const UpdateBufferCommand&); } UpdateBufferCommand; - /** Holds all arguments passed to a vkCmdWaitEvents() command. **/ typedef struct WaitEventsCommand : public Command { - VkPipelineStageFlagsVariable(dst_stage_mask); - VkPipelineStageFlagsVariable(src_stage_mask); + Anvil::PipelineStageFlags dst_stage_mask; + Anvil::PipelineStageFlags src_stage_mask; std::vector buffer_barriers; std::vector image_barriers; - std::vector memory_barriers; + std::vector memory_barriers; - std::vector events; - std::vector > event_ptrs; + std::vector events; + std::vector event_ptrs; /** Constructor **/ - explicit WaitEventsCommand(uint32_t in_event_count, - std::shared_ptr* in_event_ptrs, - VkPipelineStageFlags in_src_stage_mask, - VkPipelineStageFlags in_dst_stage_mask, - uint32_t in_memory_barrier_count, - const MemoryBarrier* const in_memory_barrier_ptr_ptr, - uint32_t in_buffer_memory_barrier_count, - const BufferBarrier* const in_buffer_memory_barrier_ptr_ptr, - uint32_t in_image_memory_barrier_count, - const ImageBarrier* const in_image_memory_barrier_ptr_ptr); + explicit WaitEventsCommand(uint32_t in_event_count, + Anvil::Event* const* in_event_ptrs, + Anvil::PipelineStageFlags in_src_stage_mask, + Anvil::PipelineStageFlags in_dst_stage_mask, + uint32_t in_memory_barrier_count, + const MemoryBarrierAnv* const in_memory_barrier_ptr_ptr, + uint32_t in_buffer_memory_barrier_count, + const BufferBarrier* const in_buffer_memory_barrier_ptr_ptr, + uint32_t in_image_memory_barrier_count, + const ImageBarrier* const in_image_memory_barrier_ptr_ptr); /** Destructor. */ virtual ~WaitEventsCommand() @@ -2363,19 +2862,42 @@ namespace Anvil WaitEventsCommand& operator=(const WaitEventsCommand&); } WaitEventsCommand; + /** Holds all arguments passed to vkCmdWriteBufferMarkerAMD() command. **/ + typedef struct WriteBufferMarkerAMDCommand : public Command + { + Anvil::PipelineStageFlagBits pipeline_stage; + Anvil::Buffer* dst_buffer_ptr; + VkDeviceSize dst_offset; + uint32_t marker; + + /** Constructor **/ + explicit WriteBufferMarkerAMDCommand(const Anvil::PipelineStageFlagBits& in_pipeline_stage, + Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + const uint32_t& in_marker); + + /** Destructor. */ + virtual ~WriteBufferMarkerAMDCommand() + { + /* Stub */ + } + + private: + WriteBufferMarkerAMDCommand& operator=(const WriteBufferMarkerAMDCommand&); + } WriteBufferMarkerAMDCommand; /** Holds all arguments passed to a vkCmdWriteTimestamp() command. **/ typedef struct WriteTimestampCommand : public Command { - VkPipelineStageFlagsVariable(pipeline_stage); + Anvil::PipelineStageFlagBits pipeline_stage; - Anvil::QueryIndex entry; - std::shared_ptr query_pool_ptr; + Anvil::QueryIndex entry; + Anvil::QueryPool* query_pool_ptr; /** Constructor. **/ - explicit WriteTimestampCommand(VkPipelineStageFlagBits in_pipeline_stage, - std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry); + explicit WriteTimestampCommand(Anvil::PipelineStageFlagBits in_pipeline_stage, + Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry); /** Destructor. */ virtual ~WriteTimestampCommand() @@ -2391,38 +2913,29 @@ namespace Anvil typedef std::vector Commands; /* Protected functions */ - explicit CommandBufferBase(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_command_pool_ptr, - CommandBufferType in_type); - - virtual ~CommandBufferBase(); - - void cache_referenced_buffer (std::shared_ptr in_buffer_ptr); - void cache_referenced_command_buffer(std::shared_ptr in_cmd_buffer_ptr); - void cache_referenced_descriptor_set(std::shared_ptr in_ds_ptr); - void cache_referenced_event (std::shared_ptr in_event_ptr); - void cache_referenced_framebuffer (std::shared_ptr in_fb_ptr); - void cache_referenced_image (std::shared_ptr in_image_ptr); - void cache_referenced_query_pool (std::shared_ptr in_query_pool_ptr); - void cache_referenced_renderpass (std::shared_ptr in_renderpass_ptr); + explicit CommandBufferBase(const Anvil::BaseDevice* in_device_ptr, + Anvil::CommandPool* in_parent_command_pool_ptr, + CommandBufferType in_type, + bool in_mt_safe); #ifdef STORE_COMMAND_BUFFER_COMMANDS void clear_commands(); #endif - void clear_referenced_objects(); - /* Protected variables */ #ifdef STORE_COMMAND_BUFFER_COMMANDS Commands m_commands; #endif - VkCommandBuffer m_command_buffer; - std::weak_ptr m_device_ptr; - bool m_is_renderpass_active; - std::weak_ptr m_parent_command_pool_ptr; - bool m_recording_in_progress; - CommandBufferType m_type; + VkCommandBuffer m_command_buffer; + uint32_t m_device_mask; + const Anvil::BaseDevice* m_device_ptr; + bool m_is_renderpass_active; + uint32_t m_n_debug_label_regions_started; + Anvil::CommandPool* m_parent_command_pool_ptr; + bool m_recording_in_progress; + uint32_t m_renderpass_device_mask; + CommandBufferType m_type; static bool m_command_stashing_disabled; @@ -2434,14 +2947,6 @@ namespace Anvil CommandBufferBase& operator=(const CommandBufferBase&); /* Private variables */ - std::vector > m_referenced_buffers; - std::vector > m_referenced_command_buffers; - std::vector > m_referenced_descriptor_sets; - std::vector > m_referenced_events; - std::vector > m_referenced_framebuffers; - std::vector > m_referenced_images; - std::vector > m_referenced_query_pools; - std::vector > m_referenced_renderpasses; friend class Anvil::CommandPool; }; @@ -2465,19 +2970,96 @@ namespace Anvil * Calling this function for a command buffer which has not been put into a recording mode * (by issuing a start_recording() call earlier) will result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. + * This function prototype can be called for both single- and multi-GPU devices. In the latter case, + * it will be assumed that all physical devices within the device group should be used for + * the device mask and that they all should use the same render area. * * Argument meaning is as per Vulkan API specification. * + * NOTE: If either @param in_opt_n_attachment_initial_sample_locations or @param in_opt_n_post_subpass_sample_locations + * (or both) are not zero, VK_EXT_sample_locations is required. + * + * @return true if successful, false otherwise. + **/ + bool record_begin_render_pass(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + VkRect2D in_render_area, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_opt_n_attachment_initial_sample_locations = 0, + const Anvil::AttachmentSampleLocations* in_opt_attachment_initial_sample_locations_ptr = nullptr, + const uint32_t& in_opt_n_post_subpass_sample_locations = 0, + const Anvil::SubpassSampleLocations* in_opt_post_subpass_sample_locations_ptr = nullptr); + + /** See documentation for the other record_begin_render_pass() function prototype for general + * information about this function. + * + * This prototype can only be used for multi-GPU devices. + * + * TODO + */ + bool record_begin_render_pass(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + uint32_t in_device_mask, + uint32_t in_n_render_areas, + const VkRect2D* in_render_areas_ptr, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_opt_n_attachment_initial_sample_locations = 0, + const Anvil::AttachmentSampleLocations* in_opt_attachment_initial_sample_locations_ptr = nullptr, + const uint32_t& in_opt_n_post_subpass_sample_locations = 0, + const Anvil::SubpassSampleLocations* in_opt_post_subpass_sample_locations_ptr = nullptr); + + /** Issues a vkCmdBeginRenderPass2KHR() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * This function prototype can be called for both single- and multi-GPU devices. In the latter case, + * it will be assumed that all physical devices within the device group should be used for + * the device mask and that they all should use the same render area. + * + * Requires VK_KHR_create_renderpass2 support. Argument meaning is as per extension specification.. + * + * NOTE: If either @param in_opt_n_attachment_initial_sample_locations or @param in_opt_n_post_subpass_sample_locations + * (or both) are not zero, VK_EXT_sample_locations is required. + * * @return true if successful, false otherwise. **/ - bool record_begin_render_pass(uint32_t in_n_clear_values, - const VkClearValue* in_clear_value_ptrs, - std::shared_ptr in_fbo_ptr, - VkRect2D in_render_area, - std::shared_ptr in_render_pass_ptr, - VkSubpassContents in_contents); + bool record_begin_render_pass2_KHR(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + VkRect2D in_render_area, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_opt_n_attachment_initial_sample_locations = 0, + const Anvil::AttachmentSampleLocations* in_opt_attachment_initial_sample_locations_ptr = nullptr, + const uint32_t& in_opt_n_post_subpass_sample_locations = 0, + const Anvil::SubpassSampleLocations* in_opt_post_subpass_sample_locations_ptr = nullptr); + + /** See documentation for the other record_begin_render_pass2_KHR() function prototype for general + * information about this function. + * + * This prototype can only be used for multi-GPU devices. + * + * TODO + */ + bool record_begin_render_pass2_KHR(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + uint32_t in_device_mask, + uint32_t in_n_render_areas, + const VkRect2D* in_render_areas_ptr, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_opt_n_attachment_initial_sample_locations = 0, + const Anvil::AttachmentSampleLocations* in_opt_attachment_initial_sample_locations_ptr = nullptr, + const uint32_t& in_opt_n_post_subpass_sample_locations = 0, + const Anvil::SubpassSampleLocations* in_opt_post_subpass_sample_locations_ptr = nullptr); /** Issues a vkCmdEndRenderPass() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -2492,22 +3074,32 @@ namespace Anvil **/ bool record_end_render_pass(); - /** Issues a vkCmdExecuteCommands() call and appends it to the internal vector of commands + /** Issues a vkCmdEndRenderPass2KHR() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS * #define enabled). * * Calling this function for a command buffer which has not been put into a recording mode * (by issuing a start_recording() call earlier) will result in an assertion failure. * - * Any Vulkan object wrapper instances passed to this function are going to be retained, - * and will be released when the command buffer is released or resetted. + * Requires VK_KHR_create_renderpass2 support. Argument meaning is as per extension specification. + * + * @return true if successful, false otherwise. + **/ + bool record_end_render_pass2_KHR(); + + /** Issues a vkCmdExecuteCommands() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. * * Argument meaning is as per Vulkan API specification. * * @return true if successful, false otherwise. **/ - bool record_execute_commands(uint32_t in_cmd_buffers_count, - std::shared_ptr* in_cmd_buffers); + bool record_execute_commands(uint32_t in_cmd_buffers_count, + Anvil::SecondaryCommandBuffer** in_cmd_buffers); /** Issues a vkCmdNextSubpass() call and appends it to the internal vector of commands * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS @@ -2520,22 +3112,42 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool record_next_subpass(VkSubpassContents in_contents); + bool record_next_subpass(Anvil::SubpassContents in_contents); + + /** Issues a vkCmdNextSubpass2KHR() call and appends it to the internal vector of commands + * recorded for the specified command buffer (for builds with STORE_COMMAND_BUFFER_COMMANDS + * #define enabled). + * + * Calling this function for a command buffer which has not been put into a recording mode + * (by issuing a start_recording() call earlier) will result in an assertion failure. + * + * Requires VK_KHR_create_renderpass2. Argument meaning is as per extension spec. + * + * @return true if successful, false otherwise. + **/ + bool record_next_subpass2_KHR(Anvil::SubpassContents in_contents); /** Issues a vkBeginCommandBufer() call and clears the internally managed vector of recorded * commands, if STORE_COMMAND_BUFFER_COMMANDS has been defined for the build. * * It is an error to invoke this function if recording is already in progress. * + * The prototype without the @param in_device_mask argument can be used for both sGPU and mGPU devices. + * In the latter case, all physical devices assigned to the logical device will be used for the device + * mask. + * * @param in_one_time_submit true if the VK_CMD_BUFFER_OPTIMIZE_ONE_TIME_SUBMIT_BIT flag should * be used for the Vulkan API call. * @param in_simultaneous_use_allowed true if the VK_CMD_BUFFER_OPTIMIZE_NO_SIMULTANEOUS_USE_BIT flag should * be used for the Vulkan API call. + * @param in_opt_device_mask Indices of physical devices to allow for usage for the command buffer. Must not be 0. + * This is only used with mGPU devices. * * @return true if successful, false otherwise. **/ - bool start_recording(bool in_one_time_submit, - bool in_simultaneous_use_allowed); + bool start_recording(bool in_one_time_submit, + bool in_simultaneous_use_allowed, + uint32_t in_opt_device_mask = UINT32_MAX); protected: /** Constructor. Should be used to instantiate primary-level command buffers. @@ -2547,8 +3159,9 @@ namespace Anvil * @param in_parent_command_pool_ptr Command pool to use as a parent. Must not be nullptr. * **/ - PrimaryCommandBuffer(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_command_pool_ptr); + PrimaryCommandBuffer(const Anvil::BaseDevice* in_device_ptr, + CommandPool* in_parent_command_pool_ptr, + bool in_mt_safe); private: friend class Anvil::CommandPool; @@ -2556,6 +3169,23 @@ namespace Anvil /* Private functions */ PrimaryCommandBuffer (const PrimaryCommandBuffer&); PrimaryCommandBuffer& operator=(const PrimaryCommandBuffer&); + + bool record_begin_render_pass_internal(const bool& in_use_khr_create_rp2_extension, + uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + uint32_t in_device_mask, + uint32_t in_n_render_areas, + const VkRect2D* in_render_areas_ptr, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_opt_n_attachment_initial_sample_locations, + const Anvil::AttachmentSampleLocations* in_opt_attachment_initial_sample_locations_ptr, + const uint32_t& in_opt_n_post_subpass_sample_locations, + const Anvil::SubpassSampleLocations* in_opt_post_subpass_sample_locations_ptr); + bool record_end_render_pass_internal (const bool& in_use_khr_create_rp2_extension); + bool record_next_subpass_internal (const bool& in_use_khr_create_rp2_extension, + Anvil::SubpassContents in_contents); }; /** Wrapper class for secondary command buffers. */ @@ -2587,15 +3217,16 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool start_recording(bool in_one_time_submit, - bool in_simultaneous_use_allowed, - bool in_renderpass_usage_only, - std::shared_ptr in_framebuffer_ptr, - std::shared_ptr in_render_pass_ptr, - SubPassID in_subpass_id, - OcclusionQuerySupportScope in_required_occlusion_query_support_scope, - bool in_occlusion_query_used_by_primary_command_buffer, - VkQueryPipelineStatisticFlags in_required_pipeline_statistics_scope); + bool start_recording(bool in_one_time_submit, + bool in_simultaneous_use_allowed, + bool in_renderpass_usage_only, + Framebuffer* in_framebuffer_ptr, + RenderPass* in_render_pass_ptr, + SubPassID in_subpass_id, + OcclusionQuerySupportScope in_required_occlusion_query_support_scope, + bool in_occlusion_query_used_by_primary_command_buffer, + Anvil::QueryPipelineStatisticFlags in_required_pipeline_statistics_scope, + uint32_t in_opt_device_mask = UINT32_MAX); /* Destructor */ virtual ~SecondaryCommandBuffer() @@ -2609,8 +3240,9 @@ namespace Anvil * @param in_parent_command_pool_ptr Command pool to use as a parent. Must not be nullptr. * **/ - SecondaryCommandBuffer(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_command_pool_ptr); + SecondaryCommandBuffer(const Anvil::BaseDevice* in_device_ptr, + CommandPool* in_parent_command_pool_ptr, + bool in_mt_safe); private: friend class Anvil::CommandPool; diff --git a/include/wrappers/command_pool.h b/include/wrappers/command_pool.h index 37685da6..00c29a1e 100644 --- a/include/wrappers/command_pool.h +++ b/include/wrappers/command_pool.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,24 +20,19 @@ // THE SOFTWARE. // -/** Defines a command pool wrapper class which simplify the following tasks: - * - * - Primary- and second-level command buffer allocation & resetting. - * - State caching - * - **/ #ifndef WRAPPERS_COMMAND_POOL_H #define WRAPPERS_COMMAND_POOL_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { /** Implements a command pool wrapper */ - class CommandPool : public DebugMarkerSupportProvider, - public std::enable_shared_from_this + class CommandPool : public MTSafetySupportProvider, + public DebugMarkerSupportProvider { public: /* Public functions */ @@ -51,7 +46,7 @@ namespace Anvil * * @return As per description. **/ - std::shared_ptr alloc_primary_level_command_buffer(); + Anvil::PrimaryCommandBufferUniquePtr alloc_primary_level_command_buffer(); /** Allocates a new secondary-level command buffer instance from this command pool. * @@ -59,21 +54,20 @@ namespace Anvil * * @return As per description. **/ - std::shared_ptr alloc_secondary_level_command_buffer(); + Anvil::SecondaryCommandBufferUniquePtr alloc_secondary_level_command_buffer(); /** Creates a new CommandPool object. * - * @param in_device_ptr Device to create the command pool for. Must not be nullptr. - * @param in_transient_allocations_friendly Set to true if the command pool should be created with the - * VK_COMMAND_POOL_CREATE_TRANSIENT_BIT flag set on. - * @param in_support_per_cmdbuf_reset_ops Set to true if the command pool should be created with the - * VK_COMMAND_POOL_RESET_COMMAND_BUFFER_BIT flag set on. - * @param in_queue_family Index of the queue family the command pool should be created for. + * @param in_device_ptr Device to create the command pool for. Must not be nullptr. + * @param in_create_flags Create flags to use. + * @param in_queue_family_index Index of the Vulkan queue family the command pool should be created for. + * @param in_mt_safe Enable if your application is going to be calling any of the + * alloc_*() functions from more than one thread at a time. **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - bool in_transient_allocations_friendly, - bool in_support_per_cmdbuf_reset_ops, - Anvil::QueueFamilyType in_queue_family); + static CommandPoolUniquePtr create(Anvil::BaseDevice* in_device_ptr, + const Anvil::CommandPoolCreateFlags& in_create_flags, + uint32_t in_queue_family_index, + MTSafety in_mt_safety = MTSafety::INHERIT_FROM_PARENT_DEVICE); /** Retrieves the raw Vulkan handle for the encapsulated command pool */ VkCommandPool get_command_pool() const @@ -81,18 +75,16 @@ namespace Anvil return m_command_pool; } - /** Tells what queue family this command pool instance has been created for */ - Anvil::QueueFamilyType get_queue_family_type() const + /** Returns create flags specified at instantiaton time */ + const Anvil::CommandPoolCreateFlags& get_create_flags() const { - return m_queue_family; + return m_create_flags; } - /** Tells whether the command pool has been created with VK_COMMAND_POOL_CREATE_TRANSIENT_BIT - * flag defined. - ***/ - bool is_transient_allocations_friendly() const + /** Tells which Vulkan queue family this command pool instance has been created for */ + uint32_t get_queue_family_index() const { - return m_is_transient_allocations_friendly; + return m_queue_family_index; } /** Reset the command pool. @@ -104,12 +96,6 @@ namespace Anvil **/ bool reset(bool in_release_resources); - /** Tells whether the command buffers, allocated from this command pool, support reset operations */ - bool supports_per_cmdbuf_reset_ops() const - { - return m_supports_per_cmdbuf_reset_ops; - } - /* Trims the command buffer as per VK_KHR_maintenance1 extension spec. * * Requires VK_KHR_maintenance1 extension support. An assertion failure will occur if the @@ -121,20 +107,19 @@ namespace Anvil /* Private functions */ /* Please seee create() documentation for more details */ - explicit CommandPool(std::weak_ptr in_device_ptr, - bool in_transient_allocations_friendly, - bool in_support_per_cmdbuf_reset_ops, - Anvil::QueueFamilyType in_queue_family); + explicit CommandPool(Anvil::BaseDevice* in_device_ptr, + const Anvil::CommandPoolCreateFlags& in_create_flags, + uint32_t in_queue_family_index, + bool in_mt_safe); CommandPool (const CommandPool&); CommandPool& operator=(const CommandPool&); /* Private variables */ - VkCommandPool m_command_pool; - std::weak_ptr m_device_ptr; - bool m_is_transient_allocations_friendly; - Anvil::QueueFamilyType m_queue_family; - bool m_supports_per_cmdbuf_reset_ops; + VkCommandPool m_command_pool; + Anvil::CommandPoolCreateFlags m_create_flags; + Anvil::BaseDevice* m_device_ptr; + uint32_t m_queue_family_index; friend class Anvil::CommandBufferBase; }; diff --git a/include/wrappers/compute_pipeline_manager.h b/include/wrappers/compute_pipeline_manager.h index 05185769..272a943e 100644 --- a/include/wrappers/compute_pipeline_manager.h +++ b/include/wrappers/compute_pipeline_manager.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -36,179 +36,27 @@ namespace Anvil { - typedef PipelineID ComputePipelineID; - class ComputePipelineManager : public BasePipelineManager { public: + using BasePipelineManager::bake; + /* Public functions */ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - bool in_use_pipeline_cache = false, - std::shared_ptr in_pipeline_cache_to_reuse_ptr = nullptr); + static std::unique_ptr create(Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + bool in_use_pipeline_cache, + Anvil::PipelineCache* in_pipeline_cache_to_reuse_ptr); virtual ~ComputePipelineManager(); - - /** Registers a new derivative pipeline which is going to inherit state from another compute pipeline object, - * which has already been added to the compute pipeline manager. - * - * The function does not automatically bake the new pipeline. This action can either be explicitly - * requested by calling bake(), or by calling one of the get_*() functions. - * - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_compute_shader_stage_entrypoint_info Compute shader stage entrypoint info. - * @param in_base_pipeline_id ID of the pipeline, which should be used as base for the new pipeline - * object. Must be an ID earlier returned by one of the other add_() functions. - * @param out_pipeline_id_ptr Deref will be set to the ID of the result pipeline object. Must not be nullptr. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_derivative_pipeline_from_sibling_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - const ShaderModuleStageEntryPoint& in_compute_shader_stage_entrypoint_info, - ComputePipelineID in_base_pipeline_id, - ComputePipelineID* out_pipeline_id_ptr) - { - return BasePipelineManager::add_derivative_pipeline_from_sibling_pipeline(in_disable_optimizations, - in_allow_derivatives, - 1, /* n_pipeline_stage_shader_info_items */ - &in_compute_shader_stage_entrypoint_info, - in_base_pipeline_id, - out_pipeline_id_ptr); - } - - /** Registers a new derivative pipeline which is going to inherit its state from another Vulkan pipeline object. - * - * The function does not automatically bake the new pipeline. This action can either be explicitly - * requested by calling bake(), or by calling one of the get_*() functions. - * - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_compute_shader_stage_info Compute shader stage entrypoint info. - * @param in_base_pipeline Handle of the pipeline, which should be used as base for the new pipeline - * object. Must not be nullptr. - * @param out_pipeline_id_ptr Deref will be set to the ID of the result pipeline object. Must not be nullptr. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_derivative_pipeline_from_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - const ShaderModuleStageEntryPoint& in_compute_shader_stage_entrypoint_info, - VkPipeline in_base_pipeline, - ComputePipelineID* out_compute_pipeline_id_ptr) - { - return BasePipelineManager::add_derivative_pipeline_from_pipeline(in_disable_optimizations, - in_allow_derivatives, - 1, /* n_pipeline_stage_shader_info_items */ - &in_compute_shader_stage_entrypoint_info, - in_base_pipeline, - out_compute_pipeline_id_ptr); - } - - /** Registers a new pipeline object. - * - * The function does not automatically bake the new pipeline. This action can either be explicitly - * requested by calling bake(), or by calling one of the get_*() functions. - * - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_compute_shader_stage_entrypoint_info Compute shader stage info. - * @param out_compute_pipeline_id_ptr Deref will be set to the ID of the result pipeline object. Must not be nullptr. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_regular_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - const ShaderModuleStageEntryPoint& in_compute_shader_stage_entrypoint_info, - ComputePipelineID* out_compute_pipeline_id_ptr) - { - return BasePipelineManager::add_regular_pipeline(in_disable_optimizations, - in_allow_derivatives, - 1, /* n_pipeline_stage_shader_info_items */ - &in_compute_shader_stage_entrypoint_info, - out_compute_pipeline_id_ptr); - } - - /** Adds a new specialization constant to the specified pipeline object. - * - * This function marks the pipeline as dirty, meaning it will be rebaked at the next get_() call. - * - * @param in_pipeline_id ID of the pipeline to add the constant to. Must be an ID returned - * by one of the add_() functions. - * @param in_constant_id ID of the specialization constant to assign data for. - * @param in_n_data_bytes Number of bytes under @param in_data_ptr to assign to the specialization constant. - * @param in_data_ptr A buffer holding the data to be assigned to the constant. Must hold at least - * @param in_n_data_bytes bytes that will be read by the function. - * - * @return true if successful, false otherwise. - **/ - bool add_specialization_constant_to_pipeline(ComputePipelineID in_pipeline_id, - uint32_t in_constant_id, - uint32_t in_n_data_bytes, - const void* in_data_ptr) - { - return BasePipelineManager::add_specialization_constant_to_pipeline(in_pipeline_id, - 0, /* shader_index */ - in_constant_id, - in_n_data_bytes, - in_data_ptr); - } - bool bake(); - /** Deletes an existing pipeline. - * - * @param in_pipeline_id ID of a pipeline to delete. - * - * @return true if successful, false otherwise. - **/ - bool delete_pipeline(ComputePipelineID in_pipeline_id) - { - return BasePipelineManager::delete_pipeline(in_pipeline_id); - } - - /** Retrieves a VkPipeline instance associated with the specified pipeline ID. - * - * The function will bake a pipeline object (and, possibly, a pipeline layout object, too) if - * the specified pipeline is marked as dirty. - * - * @param in_pipeline_id ID of the pipeline to return the raw Vulkan pipeline handle for. - * - * @return VkPipeline handle or nullptr if the function failed. - **/ - VkPipeline get_compute_pipeline(ComputePipelineID in_pipeline_id) - { - return BasePipelineManager::get_pipeline(in_pipeline_id); - } - - /** Retrieves a PipelineLayout instance associated with the specified pipeline ID. - * - * The function will bake a pipeline object (and, possibly, a pipeline layout object, too) if - * the specified pipeline is marked as dirty. - * - * @param in_pipeline_id ID of the pipeline to return the wrapper instance for. - * Must not describe a proxy pipeline. - * - * @return Ptr to a PipelineLayout instance or nullptr if the function failed. - **/ - std::shared_ptr get_compute_pipeline_layout(ComputePipelineID in_pipeline_id) - { - return BasePipelineManager::get_pipeline_layout(in_pipeline_id); - } - private: /* Constructor */ - explicit ComputePipelineManager(std::weak_ptr in_device_ptr, - bool in_use_pipeline_cache = false, - std::shared_ptr in_pipeline_cache_to_reuse_ptr = nullptr); - + explicit ComputePipelineManager(Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + bool in_use_pipeline_cache = false, + Anvil::PipelineCache* in_pipeline_cache_to_reuse_ptr = nullptr); ANVIL_DISABLE_ASSIGNMENT_OPERATOR(ComputePipelineManager); ANVIL_DISABLE_COPY_CONSTRUCTOR (ComputePipelineManager); diff --git a/include/wrappers/debug_messenger.h b/include/wrappers/debug_messenger.h new file mode 100644 index 00000000..8dbe93de --- /dev/null +++ b/include/wrappers/debug_messenger.h @@ -0,0 +1,108 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +/* Provides support for debug utils messengers, introduced by VK_EXT_debug_utils extension. + * + * Also supports a fall-back path leveraging VK_EXT_debug_report if VK_EXT_debug_utils is unavailable. + * + */ +#ifndef WRAPPERS_DEBUG_MESSENGER_H +#define WRAPPERS_DEBUG_MESSENGER_H + +#include "misc/types.h" +#include "misc/debug_marker.h" +#include "misc/mt_safety.h" +#include "wrappers/device.h" + +namespace Anvil +{ + class DebugMessenger : public MTSafetySupportProvider + { + public: + /* Public functions */ + + virtual ~DebugMessenger(); + + /* Creates a debug messenger instance. + * + * Functions exposed by this wrapper require VK_EXT_debug_utils to be available. However, portion of the offered + * functionality can be handled by implementations supporting VK_EXT_debug_report; in cases only the latter is + * available, it will be used instead. Unavailable pieces of information will not be included in the callbacks + * in such case. + * + * The function will fail if neither of the two extensions is available. + **/ + static DebugMessengerUniquePtr create(Anvil::DebugMessengerCreateInfoUniquePtr in_create_info_ptr); + + const Anvil::DebugMessengerCreateInfo* get_create_info_ptr() const + { + return m_create_info_ptr.get(); + } + + void submit_message(const Anvil::DebugMessageSeverityFlagBits& in_message_severity, + const Anvil::DebugMessageTypeFlags& in_message_type_flags, + const char* in_message_id_name_ptr, + int32_t in_message_id, + const char* in_message_ptr, + const std::vector& in_queue_labels, + const std::vector& in_cmd_buffer_labels, + const std::vector& in_objects); + + private: + /* Private type definitions */ + enum class DebugAPI + { + EXT_DEBUG_REPORT, + EXT_DEBUG_UTILS + }; + + /* Private functions */ + DebugMessenger(const DebugAPI& in_debug_api, + Anvil::DebugMessengerCreateInfoUniquePtr in_create_info_ptr); + + static Anvil::DebugMessageSeverityFlags get_debug_message_severity_flags_for_debug_report_flags(const VkDebugReportFlagsEXT& in_flags); + static VkDebugReportFlagsEXT get_debug_report_flags_for_debug_message_severity_flags(const Anvil::DebugMessageSeverityFlags& in_flags); + + static VkBool32 VKAPI_PTR callback_handler_ext_debug_report(VkDebugReportFlagsEXT in_flags, + VkDebugReportObjectTypeEXT in_object_type, + uint64_t in_object, + size_t in_location, + int32_t in_message_code, + const char* in_layer_prefix_ptr, + const char* in_message_ptr, + void* in_user_data_ptr); + static VkBool32 VKAPI_PTR callback_handler_ext_debug_utils (VkDebugUtilsMessageSeverityFlagBitsEXT in_message_severity, + VkDebugUtilsMessageTypeFlagsEXT in_message_types, + const VkDebugUtilsMessengerCallbackDataEXT* in_callback_data_ptr, + void* in_user_data_ptr); + + /* Private variables */ + Anvil::DebugMessengerCreateInfoUniquePtr m_create_info_ptr; + const DebugAPI m_debug_api; + + + VkDebugReportCallbackEXT m_debug_callback; //< only used if m_debug_api == EXT_DEBUG_REPORT + VkDebugUtilsMessengerEXT m_messenger; //< only used if m_debug_api == EXT_DEBUG_UTILS + }; +} + +#endif /* WRAPPERS_DEBUG_MESSENGER_H */ diff --git a/include/wrappers/descriptor_pool.h b/include/wrappers/descriptor_pool.h index 7098034c..f62af3d8 100644 --- a/include/wrappers/descriptor_pool.h +++ b/include/wrappers/descriptor_pool.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -31,6 +31,7 @@ #include "misc/callbacks.h" #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil @@ -49,7 +50,7 @@ namespace Anvil class DescriptorPool : public CallbacksSupportProvider, public DebugMarkerSupportProvider, - public std::enable_shared_from_this + public MTSafetySupportProvider { public: /* Public functions */ @@ -57,15 +58,8 @@ namespace Anvil /** Creates a new DescriptorPool instance. Sets up the wrapper, but does not immediately * bake a new Vulkan pool. * - * @param in_device_ptr Device to use. - * @param in_n_max_sets Maximum number of sets to be allocable from the pool. Must be at - * least 1. - * @param in_releaseable_sets true if the sets should be releaseable with vkFreeDescriptorSet() - * calls. false otherwise. **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - uint32_t in_n_max_sets, - bool in_releaseable_sets); + static DescriptorPoolUniquePtr create(Anvil::DescriptorPoolCreateInfoUniquePtr in_create_info_ptr); /** Destructor. Releases the Vulkan pool object if instantiated. */ virtual ~DescriptorPool(); @@ -87,27 +81,21 @@ namespace Anvil * This may be useful for KHR_maintenance1-aware applications. * @return true if successful, false otherwise. **/ - bool alloc_descriptor_sets(uint32_t in_n_sets, - std::shared_ptr* in_descriptor_set_layouts_ptr, - std::shared_ptr* out_descriptor_sets_ptr, - VkResult* out_opt_result_ptr = nullptr); - - bool alloc_descriptor_sets(uint32_t in_n_sets, - std::shared_ptr* in_descriptor_set_layouts_ptr, - VkDescriptorSet* out_descriptor_sets_vk_ptr, - VkResult* out_opt_result_ptr = nullptr); - - /** Tells if the pool allocated from the pool can be freed with vkFreeDescriptorSet() call. */ - bool are_sets_releaseable() const + bool alloc_descriptor_sets(uint32_t in_n_sets, + const DescriptorSetAllocation* in_ds_allocations_ptr, + DescriptorSetUniquePtr* out_descriptor_sets_ptr, + VkResult* out_opt_result_ptr = nullptr); + + bool alloc_descriptor_sets(uint32_t in_n_sets, + const DescriptorSetAllocation* in_ds_allocations_ptr, + VkDescriptorSet* out_descriptor_sets_vk_ptr, + VkResult* out_opt_result_ptr = nullptr); + + const Anvil::DescriptorPoolCreateInfo* get_create_info_ptr() const { - return m_releaseable_sets; + return m_create_info_ptr.get(); } - /** Releases previously baked Vulkan pool handle and instantiates a new Vulkan object, according - * to how the wrapper has been configured. - **/ - void bake(); - /** Returns a raw Vulkan handle for the pool. * * This function may re-bake the pool object, if necessary. @@ -116,11 +104,6 @@ namespace Anvil **/ const VkDescriptorPool& get_descriptor_pool() { - if (!m_baked) - { - bake(); - } - return m_pool; } @@ -130,75 +113,23 @@ namespace Anvil **/ bool reset(); - /** Configures how many descriptors of user-specified type should be allocable for a single - * set, that's going to be allocated from the pool. - * - * This function may mark the pool as dirty, meaning it will be re-baked at the next - * get_descriptor_pool() call time. - * - * @param in_descriptor_type Type of the descriptor to adjust the number of slots. - * @param in_n_slots_in_pool Number of descriptors of the specified type to be allocable for - * a single set. - * - **/ - void set_descriptor_array_size(VkDescriptorType in_descriptor_type, - uint32_t in_n_slots_in_pool) - { - anvil_assert(in_descriptor_type < VK_DESCRIPTOR_TYPE_RANGE_SIZE); - if (in_descriptor_type < VK_DESCRIPTOR_TYPE_RANGE_SIZE) - { - if (m_descriptor_count[in_descriptor_type] != in_n_slots_in_pool) - { - m_baked = false; - m_descriptor_count[in_descriptor_type] = in_n_slots_in_pool; - } - } - } - - /** Updates the maximum number of sets which can be allocated from the pool. - * - * This function may mark the pool as dirty, meaning it will be re-baked at the next - * get_descriptor_pool() call time. - * - * @param in_n_maximum_sets New maximum number of allocable sets. - **/ - void set_n_maximum_sets(uint32_t in_n_maximum_sets) - { - anvil_assert(in_n_maximum_sets != 0); - - if (m_n_max_sets != in_n_maximum_sets) - { - m_baked = false; - m_n_max_sets = in_n_maximum_sets; - } - } private: /* Private functions */ + bool init(); + /** Constructor */ - DescriptorPool(std::weak_ptr in_device_ptr, - uint32_t in_n_max_sets, - bool in_releaseable_sets); + DescriptorPool(Anvil::DescriptorPoolCreateInfoUniquePtr in_create_info_ptr, + bool in_mt_safe); DescriptorPool (const DescriptorPool&); DescriptorPool& operator=(const DescriptorPool&); /* Private variables */ - bool m_baked; - std::weak_ptr m_device_ptr; - VkDescriptorPool m_pool; - - uint32_t m_descriptor_count[VK_DESCRIPTOR_TYPE_RANGE_SIZE]; - uint32_t m_n_max_sets; - - /* The instances stored in this vector are owned by alloc() callers - do not release - * unless Vulkan object goes out of scope. - */ - std::vector > m_alloced_dses; - std::vector m_ds_cache; - std::vector m_ds_layout_cache; - - const bool m_releaseable_sets; + Anvil::DescriptorPoolCreateInfoUniquePtr m_create_info_ptr; + std::vector m_ds_cache; + std::vector m_ds_layout_cache; + VkDescriptorPool m_pool; }; }; /* namespace Anvil */ diff --git a/include/wrappers/descriptor_set.h b/include/wrappers/descriptor_set.h index 7ade5c53..62443934 100644 --- a/include/wrappers/descriptor_set.h +++ b/include/wrappers/descriptor_set.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -24,44 +24,43 @@ * * Implemented to: * - * - reference-count wrapper instances * - cache set binding information. * - monitor layout adjustments and act accordingly. * - monitor pool reset events and act accordingly. - * - * Not thread-safe at the moment. */ #ifndef WRAPPERS_DESCRIPTOR_SET_H #define WRAPPERS_DESCRIPTOR_SET_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { - class DescriptorSet : public DebugMarkerSupportProvider + class DescriptorSet : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /** Represents a single buffer object, which can be bound to a specific descriptor set slot */ typedef struct BufferBindingElement { - std::shared_ptr buffer_ptr; - VkDeviceSize size; - VkDeviceSize start_offset; + Anvil::Buffer* buffer_ptr; + VkDeviceSize size; + VkDeviceSize start_offset; /** Constructor. Associates all available buffer memory with the binding. * * @param in_buffer_ptr Buffer object to use for the binding. Must not be nullptr. **/ - BufferBindingElement(std::shared_ptr in_buffer_ptr); + BufferBindingElement(Anvil::Buffer* in_buffer_ptr); /** Constructor. Associates specified sub-region of the buffer memory with the binding. * * @param in_buffer_ptr Buffer object to use for the binding. Must not be nullptr. **/ - BufferBindingElement(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size); + BufferBindingElement(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size); /** Destructor. Releases the encapsulated buffer instance */ virtual ~BufferBindingElement(); @@ -73,7 +72,7 @@ namespace Anvil BufferBindingElement(const BufferBindingElement& in); /* Returns Vulkan descriptor type for this structure */ - virtual VkDescriptorType get_type() const = 0; + virtual Anvil::DescriptorType get_type() const = 0; private: BufferBindingElement& operator=(const BufferBindingElement& in); @@ -86,15 +85,15 @@ namespace Anvil { DynamicStorageBufferBindingElement() = delete; - DynamicStorageBufferBindingElement(std::shared_ptr in_buffer_ptr) + DynamicStorageBufferBindingElement(Anvil::Buffer* in_buffer_ptr) : BufferBindingElement(in_buffer_ptr) { /* Stub */ } - DynamicStorageBufferBindingElement(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) + DynamicStorageBufferBindingElement(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) : BufferBindingElement(in_buffer_ptr, in_start_offset, in_size) @@ -102,9 +101,9 @@ namespace Anvil /* Stub */ } - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC; + return Anvil::DescriptorType::STORAGE_BUFFER_DYNAMIC; } }; @@ -115,15 +114,15 @@ namespace Anvil { DynamicUniformBufferBindingElement() = delete; - DynamicUniformBufferBindingElement(std::shared_ptr in_buffer_ptr) + DynamicUniformBufferBindingElement(Anvil::Buffer* in_buffer_ptr) : BufferBindingElement(in_buffer_ptr) { /* Stub */ } - DynamicUniformBufferBindingElement(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) + DynamicUniformBufferBindingElement(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) : BufferBindingElement(in_buffer_ptr, in_start_offset, in_size) @@ -131,9 +130,9 @@ namespace Anvil /* Stub */ } - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC; + return Anvil::DescriptorType::UNIFORM_BUFFER_DYNAMIC; } }; @@ -144,15 +143,15 @@ namespace Anvil { StorageBufferBindingElement() = delete; - StorageBufferBindingElement(std::shared_ptr in_buffer_ptr) + StorageBufferBindingElement(Anvil::Buffer* in_buffer_ptr) :BufferBindingElement(in_buffer_ptr) { /* Stub */ } - StorageBufferBindingElement(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) + StorageBufferBindingElement(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) :BufferBindingElement(in_buffer_ptr, in_start_offset, in_size) @@ -160,9 +159,9 @@ namespace Anvil /* Stub */ } - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; + return Anvil::DescriptorType::STORAGE_BUFFER; } }; @@ -173,15 +172,15 @@ namespace Anvil { UniformBufferBindingElement() = delete; - UniformBufferBindingElement(std::shared_ptr in_buffer_ptr) + UniformBufferBindingElement(Anvil::Buffer* in_buffer_ptr) :BufferBindingElement(in_buffer_ptr) { /* Stub */ } - UniformBufferBindingElement(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) + UniformBufferBindingElement(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) :BufferBindingElement(in_buffer_ptr, in_start_offset, in_size) @@ -189,9 +188,9 @@ namespace Anvil /* Stub */ } - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; + return Anvil::DescriptorType::UNIFORM_BUFFER; } }; @@ -200,9 +199,9 @@ namespace Anvil **/ typedef struct CombinedImageSamplerBindingElement { - VkImageLayout image_layout; - std::shared_ptr image_view_ptr; - std::shared_ptr sampler_ptr; + Anvil::ImageLayout image_layout; + Anvil::ImageView* image_view_ptr; + Anvil::Sampler* sampler_ptr; /** Constructor. * @@ -212,9 +211,9 @@ namespace Anvil * it will be assumed the element corresponds to an immutable * sampler. **/ - CombinedImageSamplerBindingElement(VkImageLayout in_image_layout, - std::shared_ptr in_image_view_ptr, - std::shared_ptr in_sampler_ptr); + CombinedImageSamplerBindingElement(Anvil::ImageLayout in_image_layout, + Anvil::ImageView* in_image_view_ptr, + Anvil::Sampler* in_sampler_ptr); /** Destructor. * @@ -229,9 +228,9 @@ namespace Anvil CombinedImageSamplerBindingElement(const CombinedImageSamplerBindingElement& in); /* Returns Vulkan descriptor type for this structure */ - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; + return Anvil::DescriptorType::COMBINED_IMAGE_SAMPLER; } private: @@ -241,16 +240,16 @@ namespace Anvil /** Holds a single image view, along with other metadata required bound it to a specific descriptor set slot */ typedef struct ImageBindingElement { - VkImageLayout image_layout; - std::shared_ptr image_view_ptr; + Anvil::ImageLayout image_layout; + Anvil::ImageView* image_view_ptr; /** Constructor. * * @param in_image_layout Image layout to use for the binding. * @param in_image_view_ptr Image view to use for the binding. Must not be nullptr. **/ - ImageBindingElement(VkImageLayout in_image_layout, - std::shared_ptr in_image_view_ptr); + ImageBindingElement(Anvil::ImageLayout in_image_layout, + Anvil::ImageView* in_image_view_ptr); /** Copy assignment operator. * @@ -262,10 +261,10 @@ namespace Anvil * * Releases the embedded image view instance. **/ - ~ImageBindingElement(); + virtual ~ImageBindingElement(); /* Returns Vulkan descriptor type for this structure */ - virtual VkDescriptorType get_type() const = 0; + virtual Anvil::DescriptorType get_type() const = 0; private: ImageBindingElement& operator=(const ImageBindingElement&); @@ -278,17 +277,17 @@ namespace Anvil { InputAttachmentBindingElement() = delete; - InputAttachmentBindingElement(VkImageLayout in_image_layout, - std::shared_ptr in_image_view_ptr) + InputAttachmentBindingElement(Anvil::ImageLayout in_image_layout, + Anvil::ImageView* in_image_view_ptr) :ImageBindingElement(in_image_layout, in_image_view_ptr) { /* Stub */ } - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; + return Anvil::DescriptorType::INPUT_ATTACHMENT; } }; @@ -299,17 +298,17 @@ namespace Anvil { SampledImageBindingElement() = delete; - SampledImageBindingElement(VkImageLayout in_image_layout, - std::shared_ptr in_image_view_ptr) + SampledImageBindingElement(Anvil::ImageLayout in_image_layout, + Anvil::ImageView* in_image_view_ptr) :ImageBindingElement(in_image_layout, in_image_view_ptr) { /* Stub */ } - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; + return Anvil::DescriptorType::SAMPLED_IMAGE; } }; @@ -320,24 +319,24 @@ namespace Anvil { StorageImageBindingElement() = delete; - StorageImageBindingElement(VkImageLayout in_image_layout, - std::shared_ptr in_image_view_ptr) + StorageImageBindingElement(Anvil::ImageLayout in_image_layout, + Anvil::ImageView* in_image_view_ptr) :ImageBindingElement(in_image_layout, in_image_view_ptr) { /* Stub */ } - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_STORAGE_IMAGE; + return Anvil::DescriptorType::STORAGE_IMAGE; } }; /** Holds a single sampler. Can be used to bind a sampler to a descriptor set slot **/ typedef struct SamplerBindingElement { - std::shared_ptr sampler_ptr; + Anvil::Sampler* sampler_ptr; /** Constructor. * @@ -345,7 +344,7 @@ namespace Anvil * it will be assumed the element corresponds to an immutable * sampler. **/ - SamplerBindingElement(std::shared_ptr in_sampler_ptr); + SamplerBindingElement(Anvil::Sampler* in_sampler_ptr); /** Destructor. * @@ -360,9 +359,9 @@ namespace Anvil SamplerBindingElement(const SamplerBindingElement& in); /* Returns Vulkan descriptor type for this structure */ - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_SAMPLER; + return Anvil::DescriptorType::SAMPLER; } private: @@ -372,20 +371,20 @@ namespace Anvil /** Holds a single buffer view instance. Can be used to bind a sampler to a descriptor set slot */ typedef struct TexelBufferBindingElement { - std::shared_ptr buffer_view_ptr; + Anvil::BufferView* buffer_view_ptr; /** Constructor. * * @param in_buffer_view_ptr Buffer view to use for the binding. Must not be nullptr. * Retained. The object will be released at destruction time. **/ - TexelBufferBindingElement(std::shared_ptr in_buffer_view_ptr); + TexelBufferBindingElement(Anvil::BufferView* in_buffer_view_ptr); /** Destructor. * * Releases the embedded buffer view instance. **/ - ~TexelBufferBindingElement(); + virtual ~TexelBufferBindingElement(); /** Copy assignment operator. * @@ -394,7 +393,7 @@ namespace Anvil TexelBufferBindingElement(const TexelBufferBindingElement& in); /* Returns Vulkan descriptor type for this structure */ - virtual VkDescriptorType get_type() const = 0; + virtual Anvil::DescriptorType get_type() const = 0; private: TexelBufferBindingElement& operator=(const TexelBufferBindingElement&); @@ -407,15 +406,15 @@ namespace Anvil { StorageTexelBufferBindingElement() = delete; - StorageTexelBufferBindingElement(std::shared_ptr in_buffer_view_ptr) + StorageTexelBufferBindingElement(Anvil::BufferView* in_buffer_view_ptr) : TexelBufferBindingElement(in_buffer_view_ptr) { /* Stub */ } - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER; + return Anvil::DescriptorType::STORAGE_TEXEL_BUFFER; } }; @@ -426,15 +425,15 @@ namespace Anvil { UniformTexelBufferBindingElement() = delete; - UniformTexelBufferBindingElement(std::shared_ptr in_buffer_view_ptr) + UniformTexelBufferBindingElement(Anvil::BufferView* in_buffer_view_ptr) : TexelBufferBindingElement(in_buffer_view_ptr) { /* Stub */ } - VkDescriptorType get_type() const + Anvil::DescriptorType get_type() const { - return VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER; + return Anvil::DescriptorType::UNIFORM_TEXEL_BUFFER; } }; @@ -447,47 +446,6 @@ namespace Anvil **/ virtual ~DescriptorSet(); - /** Updates internally-maintained Vulkan descriptor set instances. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool bake(); - - /** Creates a new DescriptorSet instance. - * - * @param in_device_ptr Device the descriptor set has been initialized for. - * @param in_parent_pool_ptr Pool, from which the descriptor set has been allocated from. - * Must not be nullptr. - * @param in_layout_ptr Layout which has been used at descriptor set construction time. - * @param in_descriptor_set Raw Vulkan handle the wrapper instance is being created for. - **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_pool_ptr, - std::shared_ptr in_layout_ptr, - VkDescriptorSet in_descriptor_set); - - /** Tells how many array items have been declared for a binding at a given index - * - * @param in_n_binding Binding index to use for the query. - * @param out_result_ptr If the function reports success, deref will be set to the query result. - * Must not be null. - * - * @return true if successful, false otherwise. - **/ - bool get_binding_array_size(uint32_t in_n_binding, - uint32_t* out_result_ptr) const; - - /** Tells the descriptor type associated to a binding at index @param in_n_binding . - * - * @param in_n_binding Binding index to use for the query. - * @param out_descriptor_type_ptr If the function reports success, deref will be set to the query result. - * Must not be null. - * - * @return true if successful, false otherwise. - */ - bool get_binding_descriptor_type(uint32_t in_n_binding, - VkDescriptorType* out_descriptor_type_ptr) const; - /** Returns properties of a combined image/sampler descriptor binding. * * @param in_n_binding Binding index to use for the query. @@ -501,11 +459,11 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool get_combined_image_sampler_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - VkImageLayout* out_opt_image_layout_ptr, - std::shared_ptr* out_opt_image_view_ptr, - std::shared_ptr* out_opt_sampler_ptr); + bool get_combined_image_sampler_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::ImageLayout* out_opt_image_layout_ptr, + Anvil::ImageView** out_opt_image_view_ptr_ptr, + Anvil::Sampler** out_opt_sampler_ptr_ptr); /** Retrieves raw Vulkan handle of the encapsulated descriptor set. * @@ -514,11 +472,11 @@ namespace Anvil * * @return As per description. **/ - VkDescriptorSet get_descriptor_set_vk() + VkDescriptorSet get_descriptor_set_vk(bool updateIfDirty = true) const { - if (m_dirty) + if (m_dirty && updateIfDirty) { - bake(); + update(); anvil_assert(!m_dirty); } @@ -527,75 +485,69 @@ namespace Anvil } /** Returns a descriptor set layout wrapper instance, assigned to the descriptor set wrapper */ - std::shared_ptr get_descriptor_set_layout() const + const Anvil::DescriptorSetLayout* get_descriptor_set_layout() const { return m_layout_ptr; } /** Returns properties of an input attachment descriptor binding. * - * @param in_n_binding Binding index to use for the query. - * @param in_n_binding_array_item Index of the array item to use for the query. - * @param out_opt_image_layout_ptr If not null, deref will be set to the image layout specified for - * the binding. May be null. - * @param out_opt_image_view_ptr If not null, deref will be set to the image view specified for - * the binding. May be null. + * @param in_n_binding Binding index to use for the query. + * @param in_n_binding_array_item Index of the array item to use for the query. + * @param out_opt_image_layout_ptr_ptr If not null, deref will be set to the image layout specified for + * the binding. May be null. + * @param out_opt_image_view_ptr_ptr If not null, deref will be set to the image view specified for + * the binding. May be null. * * @return true if successful, false otherwise. * */ - bool get_input_attachment_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - VkImageLayout* out_opt_image_layout_ptr, - std::shared_ptr* out_opt_image_view_ptr) const; - - /** Returns information about the number of bindings described by the descriptor set. */ - uint32_t get_n_bindings() const - { - return static_cast(m_bindings.size() ); - } + bool get_input_attachment_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::ImageLayout* out_opt_image_layout_ptr_ptr, + Anvil::ImageView** out_opt_image_view_ptr_ptr) const; /** Returns properties of a sampled image descriptor binding. * - * @param in_n_binding Binding index to use for the query. - * @param in_n_binding_array_item Index of the array item to use for the query. - * @param out_opt_image_layout_ptr If not null, deref will be set to the image layout specified - * for the binding. May be null. - * @param out_opt_image_view_ptr If not null, deref will be set to the image view specified for - * the binding. May be null. + * @param in_n_binding Binding index to use for the query. + * @param in_n_binding_array_item Index of the array item to use for the query. + * @param out_opt_image_layout_ptr If not null, deref will be set to the image layout specified + * for the binding. May be null. + * @param out_opt_image_view_ptr_prt If not null, deref will be set to the image view specified for + * the binding. May be null. * * @return true if successful, false otherwise. */ - bool get_sampled_image_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - VkImageLayout* out_opt_image_layout_ptr, - std::shared_ptr* out_opt_image_view_ptr) const + bool get_sampled_image_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::ImageLayout* out_opt_image_layout_ptr, + Anvil::ImageView** out_opt_image_view_ptr_prt) const { /* Re-use existing code */ return get_input_attachment_binding_properties(in_n_binding, in_n_binding_array_item, out_opt_image_layout_ptr, - out_opt_image_view_ptr); + out_opt_image_view_ptr_prt); } /** Returns properties of a sampler descriptor binding. * * @param in_n_binding Binding index to use for the query. * @param in_n_binding_array_item Index of the array item to use for the query. - * @param out_opt_sampler_ptr If not null, deref will be set to the sampler specified for + * @param out_sampler_ptr_ptr If not null, deref will be set to the sampler specified for * the binding. May be null. * * @return true if successful, false otherwise. */ - bool get_sampler_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - std::shared_ptr* out_sampler_ptr) const; + bool get_sampler_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::Sampler** out_sampler_ptr_ptr) const; /** Returns properties of a storage buffer descriptor binding. * * @param in_n_binding Binding index to use for the query. * @param in_n_binding_array_item Index of the array item to use for the query. - * @param out_opt_buffer_ptr If not null, deref will be set to the buffer specified for + * @param out_opt_buffer_ptr_ptr If not null, deref will be set to the buffer specified for * the binding. May be null. * @param out_opt_size_ptr If not null, deref will be set to the size of the buffer * memory region associated with the binding. May be null. @@ -605,53 +557,53 @@ namespace Anvil * * @return true if successful, false otherwise. */ - bool get_storage_buffer_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - std::shared_ptr* out_opt_buffer_ptr, - VkDeviceSize* out_opt_size_ptr, - VkDeviceSize* out_opt_start_offset_ptr) const; + bool get_storage_buffer_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::Buffer** out_opt_buffer_ptr_ptr, + VkDeviceSize* out_opt_size_ptr, + VkDeviceSize* out_opt_start_offset_ptr) const; /** Returns properties of a storage image descriptor binding. * - * @param in_n_binding Binding index to use for the query. - * @param in_n_binding_array_item Index of the array item to use for the query. - * @param out_opt_image_layout_ptr If not null, deref will be set to the image layout declared for - * the binding. May be null. - * @param out_opt_image_view_ptr If not null, deref will be set to the image view specified for - * the binding. May be null. + * @param in_n_binding Binding index to use for the query. + * @param in_n_binding_array_item Index of the array item to use for the query. + * @param out_opt_image_layout_ptr If not null, deref will be set to the image layout declared for + * the binding. May be null. + * @param out_opt_image_view_ptr_ptr If not null, deref will be set to the image view specified for + * the binding. May be null. * * @return true if successful, false otherwise. */ - bool get_storage_image_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - VkImageLayout* out_opt_image_layout_ptr, - std::shared_ptr* out_opt_image_view_ptr) const + bool get_storage_image_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::ImageLayout* out_opt_image_layout_ptr, + Anvil::ImageView** out_opt_image_view_ptr_ptr) const { /* Re-use existing code */ return get_input_attachment_binding_properties(in_n_binding, in_n_binding_array_item, out_opt_image_layout_ptr, - out_opt_image_view_ptr); + out_opt_image_view_ptr_ptr); } /** Returns properties of a storage texel buffer descriptor binding. * - * @param in_n_binding Binding index to use for the query. - * @param in_n_binding_array_item Index of the array item to use for the query. - * @param out_opt_buffer_view_ptr If not null, deref will be set to the buffer view specified for - * the binding. May be null. + * @param in_n_binding Binding index to use for the query. + * @param in_n_binding_array_item Index of the array item to use for the query. + * @param out_opt_buffer_view_ptr_ptr If not null, deref will be set to the buffer view specified for + * the binding. May be null. * * @return true if successful, false otherwise. */ - bool get_storage_texel_buffer_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - std::shared_ptr* out_opt_buffer_view_ptr) const; + bool get_storage_texel_buffer_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::BufferView** out_opt_buffer_view_ptr_ptr) const; /** Returns properties of a uniform buffer descriptor binding. * * @param in_n_binding Binding index to use for the query. * @param in_n_binding_array_item Index of the array item to use for the query. - * @param out_opt_buffer_ptr If not null, deref will be set to the buffer specified for + * @param out_opt_buffer_ptr_ptr If not null, deref will be set to the buffer specified for * the binding. May be null. * @param out_opt_size_ptr If not null, deref will be set to size of the buffer memory region * declared for the binding. May be null. @@ -660,37 +612,37 @@ namespace Anvil * * @return true if successful, false otherwise. */ - bool get_uniform_buffer_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - std::shared_ptr* out_opt_buffer_ptr, - VkDeviceSize* out_opt_size_ptr, - VkDeviceSize* out_opt_start_offset_ptr) const + bool get_uniform_buffer_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::Buffer** out_opt_buffer_ptr_ptr, + VkDeviceSize* out_opt_size_ptr, + VkDeviceSize* out_opt_start_offset_ptr) const { /* Re-use existing code */ return get_storage_buffer_binding_properties(in_n_binding, in_n_binding_array_item, - out_opt_buffer_ptr, + out_opt_buffer_ptr_ptr, out_opt_size_ptr, out_opt_start_offset_ptr); } /** Returns properties of a uniform texel buffer descriptor binding. * - * @param in_n_binding Binding index to use for the query. - * @param in_n_binding_array_item Index of the array item to use for the query. - * @param out_opt_buffer_view_ptr If not null, deref will be set to the buffer view specified for - * the binding. May be null. + * @param in_n_binding Binding index to use for the query. + * @param in_n_binding_array_item Index of the array item to use for the query. + * @param out_opt_buffer_view_ptr_ptr If not null, deref will be set to the buffer view specified for + * the binding. May be null. * * @return true if successful, false otherwise. */ - bool get_uniform_texel_buffer_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - std::shared_ptr* out_opt_buffer_view_ptr) const + bool get_uniform_texel_buffer_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::BufferView** out_opt_buffer_view_ptr_ptr) const { /* Re-use existing code */ return get_storage_texel_buffer_binding_properties(in_n_binding, in_n_binding_array_item, - out_opt_buffer_view_ptr); + out_opt_buffer_view_ptr_ptr); } /** This function should be set to assign physical Vulkan objects to a descriptor binding @@ -713,6 +665,8 @@ namespace Anvil * UniformBufferBindingElement - for uniform buffer bindings. * UniformTexelBufferBindingElement - for storage uniform buffer bindings. * + * This function CANNOT be used for inline uniform block binding updates. Instead, please use set_inline_uniform_block_binding_data(). + * * @param in_binding_index As per documentation. Must correspond to a binding which has earlier * been added by calling add_binding() function. * @param in_element_range As per documentation. Must not be equal to or larger than the array size, @@ -724,21 +678,60 @@ namespace Anvil template bool set_binding_array_items(BindingIndex in_binding_index, BindingElementArrayRange in_element_range, - const BindingElementType* in_elements) + const BindingElementType* in_elements_ptr) + { + anvil_assert(in_elements_ptr != nullptr); + anvil_assert(m_unusable == false); + + BindingItemUniquePtrs& binding_item_ptrs = m_binding_ptrs[in_binding_index]; + const uint32_t last_element_index = in_element_range.second + in_element_range.first; + + for (BindingElementIndex current_element_index = in_element_range.first; + current_element_index < last_element_index; + ++current_element_index) + { + if (!( binding_item_ptrs[current_element_index] != nullptr && + *binding_item_ptrs[current_element_index] == in_elements_ptr[current_element_index - in_element_range.first]) ) + { + m_dirty = true; + + binding_item_ptrs[current_element_index].reset( + new Anvil::DescriptorSet::BindingItem() + ); + + *binding_item_ptrs[current_element_index] = in_elements_ptr[current_element_index - in_element_range.first]; + } + } + + return true; + } + + /* TODO + * + * NOTE: This function CANNOT be used for inline uniform block binding updates. Instead, please use set_inline_uniform_block_binding_data(). + */ + template + bool set_binding_array_items(BindingIndex in_binding_index, + BindingElementArrayRange in_element_range, + const BindingElementType* const* in_elements_ptr_ptr) { - anvil_assert(in_elements != nullptr); - anvil_assert(m_unusable == false); + anvil_assert(in_elements_ptr_ptr != nullptr); + anvil_assert(m_unusable == false); - BindingItems& binding_items = m_bindings[in_binding_index]; - const uint32_t last_element_index = in_element_range.second + in_element_range.first; + BindingItemUniquePtrs& binding_item_ptrs = m_binding_ptrs[in_binding_index]; + const uint32_t last_element_index = in_element_range.second + in_element_range.first; for (BindingElementIndex current_element_index = in_element_range.first; current_element_index < last_element_index; ++current_element_index) { - m_dirty |= !(binding_items[current_element_index] == in_elements[current_element_index - in_element_range.first]); + m_dirty |= !(*binding_item_ptrs[current_element_index] == *in_elements_ptr_ptr[current_element_index - in_element_range.first]); - binding_items[current_element_index] = in_elements[current_element_index - in_element_range.first]; + binding_item_ptrs[current_element_index].reset( + new Anvil::DescriptorSet::BindingItem() + ); + + *binding_item_ptrs[current_element_index] = *in_elements_ptr_ptr[current_element_index - in_element_range.first]; } return true; @@ -746,6 +739,9 @@ namespace Anvil /** This function works exactly like set_binding_array_items(), except that it always replaces the zeroth element * attached to the specified descriptor set's binding. + * + * NOTE: This function CANNOT be used for inline uniform block binding updates. Instead, please use set_inline_uniform_block_binding_data(). + * */ template bool set_binding_item(BindingIndex in_binding_index, @@ -757,18 +753,35 @@ namespace Anvil &in_element); } - /** Assigns a new Vulkan descriptor set handle to the wrapper instance. + /** TODO + * + * NOTE: Multiple update requests for the same inline uniform block binding are supported and will be executed via consecutive API calls. + * + * Requires VK_EXT_inline_uniform_block. + * + * @param in_binding_index Index of the inline uniform block binding to use for the update. + * @param in_start_offset Start offset of the inline uniform block's memory region which should be updated. Must be a mul of 4. + * @param in_size Size of the inline uniform block's memory region to update. Must be a mul of 4. + * @param in_raw_data_ptr Data to use for the update. Whether or not the data is cached internally depends on value passed to + * @param in_should_cache_raw_data. Must not be nullptr. + * @param in_should_cache_raw_data True if data provided via @param in_raw_data_ptr should be cached internally, false otherwise. In other + * words, if this argument is set to true, it is safe to release memory block, to which @param in_raw_data_ptr points. + * + * @return true if successful, false otherwise. + */ + bool set_inline_uniform_block_binding_data(const BindingIndex& in_binding_index, + const uint32_t& in_start_offset, + const uint32_t& in_size, + const void* in_raw_data_ptr, + const bool& in_should_cache_raw_data); + + /** Updates internally-maintained Vulkan descriptor set instances. * - * This function should only be used internally. Its purpose is to introduce support for "recycling" of - * deprecated descriptor sets. When a descriptor set pool is reset or the descriptor set layout is adjusted, - * Descriptor Set's Vulkan handle may become obsolete. When that happens, this function can be called to - * "revive" the object by assigning it a new handle, at which point the object becomes usable again. - * Furthermore, all cached binding information will be automatically writtne to the descriptor set at - * next baking time. + * @param in_update_method Please see DescriptorSetUpdateMethod documentation for more details. * - * @param in_ds New Vulkan handle to use. Must not be VK_NULL_HANDLE. + * @return true if the function executed successfully, false otherwise. **/ - void set_new_vk_handle(VkDescriptorSet in_ds); + bool update(const DescriptorSetUpdateMethod& in_update_method = Anvil::DescriptorSetUpdateMethod::CORE) const; private: /* Private type declarations */ @@ -780,14 +793,16 @@ namespace Anvil **/ typedef struct BindingItem { - std::shared_ptr buffer_ptr; - std::shared_ptr buffer_view_ptr; - VkImageLayout image_layout; - std::shared_ptr image_view_ptr; - std::shared_ptr sampler_ptr; - VkDeviceSize size; - VkDeviceSize start_offset; - VkDescriptorType type_vk; + Anvil::Buffer* buffer_ptr; + Anvil::BufferView* buffer_view_ptr; + Anvil::ImageLayout image_layout; + Anvil::ImageView* image_view_ptr; + std::unique_ptr > iub_update_data_ptr; + Anvil::Sampler* sampler_ptr; + VkDeviceSize size; + VkDeviceSize start_offset; + Anvil::DescriptorType type_vk; + bool dirty; @@ -795,69 +810,57 @@ namespace Anvil { return (buffer_ptr == in.buffer_ptr && size == in.size && - start_offset == in.start_offset); + start_offset == in.start_offset && + type_vk == in.get_type() ); } bool operator==(const CombinedImageSamplerBindingElement& in) const { return (image_layout == in.image_layout && image_view_ptr == in.image_view_ptr && - sampler_ptr == in.sampler_ptr); + sampler_ptr == in.sampler_ptr && + type_vk == in.get_type() ); } bool operator==(const ImageBindingElement& in) const { return (image_layout == in.image_layout && - image_view_ptr == in.image_view_ptr); + image_view_ptr == in.image_view_ptr && + type_vk == in.get_type() ); } bool operator==(const SamplerBindingElement& in) const { - return (sampler_ptr == in.sampler_ptr); + return (sampler_ptr == in.sampler_ptr && + type_vk == in.get_type() ); } bool operator==(const TexelBufferBindingElement& in) const { - return (buffer_view_ptr == in.buffer_view_ptr); + return (buffer_view_ptr == in.buffer_view_ptr && + type_vk == in.get_type() ); } - /* Copy assignment operator. - * - * Retains the buffer instance embedded in @param in_element - **/ - BindingItem& operator=(const BufferBindingElement& in_element); - - /* Copy assignment operator. - * - * Retains the image view & sampler instances embedded in @param in_element - **/ + BindingItem& operator=(const BufferBindingElement& in_element); BindingItem& operator=(const CombinedImageSamplerBindingElement& in_element); - - /* Copy assignment operator. - * - * Retains the image view instance embedded in @param in_element - **/ - BindingItem& operator=(const ImageBindingElement& in_element); - - /* Copy assignment operator. - * - * Retains the sampler instance embedded in @param in_element - **/ - BindingItem& operator=(const SamplerBindingElement& in_element); - - /* Copy assignment operator. - * - * Retains the buffer view instance embedded in @param in_element - **/ - BindingItem& operator=(const TexelBufferBindingElement& in_element); + BindingItem& operator=(const ImageBindingElement& in_element); + BindingItem& operator=(const SamplerBindingElement& in_element); + BindingItem& operator=(const TexelBufferBindingElement& in_element); /* Default dummy constructor. */ BindingItem() { dirty = false; - image_layout = VK_IMAGE_LAYOUT_MAX_ENUM; + image_layout = Anvil::ImageLayout::UNKNOWN; size = 0; start_offset = 0; + + buffer_ptr = nullptr; + buffer_view_ptr = nullptr; + image_view_ptr = nullptr; + sampler_ptr = nullptr; + + type_vk = Anvil::DescriptorType::UNKNOWN; } /* Destructor. @@ -866,39 +869,77 @@ namespace Anvil ~BindingItem(); } BindingItem; - typedef std::vector BindingItems; - typedef std::map BindingIndexToBindingItemsMap; + typedef std::unique_ptr BindingItemUniquePtr; + typedef std::vector BindingItemUniquePtrs; + typedef std::map BindingIndexToBindingItemUniquePtrsMap; /* Private functions */ /** Please see create() documentation for argument discussion */ - DescriptorSet(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_pool_ptr, - std::shared_ptr in_layout_ptr, - VkDescriptorSet in_descriptor_set); + DescriptorSet(const Anvil::BaseDevice* in_device_ptr, + Anvil::DescriptorPool* in_parent_pool_ptr, + const Anvil::DescriptorSetLayout* in_layout_ptr, + VkDescriptorSet in_descriptor_set, + bool in_mt_safe); + + /** Creates a new DescriptorSet instance. + * + * @param in_device_ptr Device the descriptor set has been initialized for. + * @param in_parent_pool_ptr Pool, from which the descriptor set has been allocated from. + * Must not be nullptr. + * @param in_layout_ptr Layout which has been used at descriptor set construction time. + * @param in_descriptor_set Raw Vulkan handle the wrapper instance is being created for. + **/ + static DescriptorSetUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + Anvil::DescriptorPool* in_parent_pool_ptr, + const Anvil::DescriptorSetLayout* in_layout_ptr, + VkDescriptorSet in_descriptor_set, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); DescriptorSet (const DescriptorSet&); DescriptorSet& operator=(const DescriptorSet&); - void on_binding_added_to_layout(); - void on_parent_pool_reset (); - - void alloc_bindings(); + void alloc_bindings (); + void fill_buffer_info_vk_descriptor(const Anvil::DescriptorSet::BindingItem& in_binding_item, + VkDescriptorBufferInfo* out_descriptor_ptr) const; + void fill_image_info_vk_descriptor (const Anvil::DescriptorSet::BindingItem& in_binding_item, + const bool& in_immutable_samplers_enabled, + VkDescriptorImageInfo* out_descriptor_ptr) const; + void fill_iub_vk_descriptor (const Anvil::DescriptorSet::BindingItem& in_binding_item, + VkWriteDescriptorSetInlineUniformBlockEXT* out_descriptor_ptr) const; + void on_parent_pool_reset (); + bool update_using_core_method () const; + bool update_using_template_method () const; /* Private variables */ - BindingIndexToBindingItemsMap m_bindings; - VkDescriptorSet m_descriptor_set; - std::weak_ptr m_device_ptr; - bool m_dirty; - std::shared_ptr m_layout_ptr; - bool m_unusable; - std::shared_ptr m_parent_pool_ptr; - - std::vector m_cached_ds_info_buffer_info_items_vk; - std::vector m_cached_ds_info_image_info_items_vk; - std::vector m_cached_ds_info_texel_buffer_info_items_vk; - std::vector m_cached_ds_write_items_vk; + + /* For descriptor types != INLINE_UNIFORM_BLOCK, this is the usual binding index->binding item map. Holds bindings associated + * with corresponding array items. + * + * For INLINE_UNIFORM_BLOCK descriptors, this is a binding index->pending updates map. + */ + mutable BindingIndexToBindingItemUniquePtrsMap m_binding_ptrs; + + + VkDescriptorSet m_descriptor_set; + const Anvil::BaseDevice* m_device_ptr; + mutable bool m_dirty; + const Anvil::DescriptorSetLayout* m_layout_ptr; + Anvil::DescriptorPool* m_parent_pool_ptr; + bool m_unusable; + + mutable std::vector m_cached_ds_info_buffer_info_items_vk; + mutable std::vector m_cached_ds_info_image_info_items_vk; + mutable std::vector m_cached_ds_info_texel_buffer_info_items_vk; + mutable std::vector m_cached_ds_write_iub_items_vk; + mutable std::vector m_cached_ds_write_items_vk; + + mutable std::vector m_template_entries; + mutable std::map, Anvil::DescriptorUpdateTemplateUniquePtr> m_template_object_map; + mutable std::vector m_template_raw_data; + + friend class Anvil::DescriptorPool; }; }; -#endif /* WRAPPERS_DESCRIPTOR_SET_H */ \ No newline at end of file +#endif /* WRAPPERS_DESCRIPTOR_SET_H */ diff --git a/include/wrappers/descriptor_set_group.h b/include/wrappers/descriptor_set_group.h index 5e66f669..4cf4c0a1 100644 --- a/include/wrappers/descriptor_set_group.h +++ b/include/wrappers/descriptor_set_group.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -51,12 +51,35 @@ #ifndef WRAPPERS_DESCRIPTOR_SET_GROUP_H #define WRAPPERS_DESCRIPTOR_SET_GROUP_H +#include "misc/descriptor_set_create_info.h" +#include "misc/mt_safety.h" #include "misc/types.h" #include "wrappers/descriptor_set.h" +#include "wrappers/descriptor_set_layout.h" +#include namespace Anvil { - class DescriptorSetGroup + typedef struct OverheadAllocation + { + Anvil::DescriptorType descriptor_type; + uint32_t n_overhead_allocations; + + OverheadAllocation() + { + descriptor_type = Anvil::DescriptorType::UNKNOWN; + n_overhead_allocations = UINT32_MAX; + } + + OverheadAllocation(Anvil::DescriptorType in_descriptor_type, + uint32_t in_n_overhead_allocations) + { + descriptor_type = in_descriptor_type; + n_overhead_allocations = in_n_overhead_allocations; + } + } OverheadAllocation; + + class DescriptorSetGroup : public MTSafetySupportProvider { public: /* Public functions */ @@ -64,33 +87,6 @@ namespace Anvil /** Destructor */ virtual ~DescriptorSetGroup(); - /** Adds a new descriptor set binding to the DSG. This lets you attach one or more descriptors - * to the binding's individual array items by calling set_binding() later on. - * - * This call invalidates internally-maintained Vulkan DS and DS layout instances. - * - * @param in_n_set Index of the descriptor set the new binding should be created for. - * This number must not be equal to or larger than the number of sets - * specified for the DSG at creation time. - * @param in_binding Index of the binding to create. This index must not have been used earlier - * to create another binding. - * @param in_type Type of descriptor(s), which are going to be used to configure the binding. - * @param in_n_elements Binding array's size. Must be at least 1. - * @param in_shader_stages A bitfield combination of shader stage bits, telling which shader stages - * this binding is going to be used for. - * @param in_opt_immutable_sampler_ptrs If not nullptr, an array of @param in_n_elements samplers should - * be passed. The binding will then be considered immutable, as per spec language. - * May be nullptr. - * - * @return true if the function executed successfully, false otherwise. - ***/ - bool add_binding(uint32_t in_n_set, - uint32_t in_binding, - VkDescriptorType in_type, - uint32_t in_n_elements, - VkShaderStageFlags in_shader_stages, - const std::shared_ptr* in_opt_immutable_sampler_ptrs = nullptr); - /** Creates a new DescriptorSetGroup instance. * * Apart from the usual stuff, this function also preallocates memory for a number of @@ -102,15 +98,18 @@ namespace Anvil * takes a ptr to DescriptorSetGroup instance, causing objects created in such fashion to treat the * specified DescriptorSetGroup instance as a parent. * - * @param in_device_ptr Device to use. - * @param in_releaseable_sets true if the created VkDescriptorSet instances should be releaseable - * to the internal descriptor pool by invoking vkFreeDescriptorSets(). - * false otherwise. - * @param in_n_sets Number of descriptor sets this instance should store information for. + * @param in_device_ptr Device to use. + * @param in_ds_create_info_ptrs TODO. + * @param in_descriptor_pool_create_flags Create flags to specify when creating a descriptor pool for the DSG. + * @param in_mt_safety MT safety setting for the created object. + * @param in_opt_overhead_allocations Extra allocations to request when creating a descriptor pool for the DSG. */ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - bool in_releaseable_sets, - uint32_t in_n_sets); + static Anvil::DescriptorSetGroupUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + std::vector& in_ds_create_info_ptrs, + const Anvil::DescriptorPoolCreateFlags& in_descriptor_pool_create_flags = Anvil::DescriptorPoolCreateFlagBits::NONE, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE, + const std::vector& in_opt_overhead_allocations = std::vector() ); + /** Creates a new DescriptorSetGroup instance. * @@ -118,11 +117,9 @@ namespace Anvil * to re-use layout of another DSG. This is useful if you'd like to re-use the same layout with * a different combination of descriptor sets. * - * @param in_parent_dsg_ptr Pointer to a DSG without a parent. Must not be nullptr. - * @param in_releaseable_sets See the documentation above for more details. + * @param in_parent_dsg_ptr Pointer to a DSG without a parent. Must not be nullptr. **/ - static std::shared_ptr create(std::shared_ptr in_parent_dsg_ptr, - bool in_releaseable_sets); + static DescriptorSetGroupUniquePtr create(const DescriptorSetGroup* in_parent_dsg_ptr); /** Retrieves a Vulkan instance of the descriptor set, as configured for the DSG instance's set * at index @param in_n_set. @@ -134,10 +131,13 @@ namespace Anvil * * @return Pointer to the requested Anvil::DescriptorSet instance. **/ - std::shared_ptr get_descriptor_set(uint32_t in_n_set); + Anvil::DescriptorSet* get_descriptor_set(uint32_t in_n_set); + + DescriptorPool *get_descriptor_pool() { return m_descriptor_pool_ptr.get(); } + const DescriptorPool *get_descriptor_pool() const { return m_descriptor_pool_ptr.get(); } - /** Returns a descriptor set binding index for a descriptor set at index @param in_n_set. */ - uint32_t get_descriptor_set_binding_index(uint32_t in_n_set) const; + const std::vector* get_descriptor_set_create_info() const; + const Anvil::DescriptorSetCreateInfo* get_descriptor_set_create_info(uint32_t in_n_set) const; /** Retrieves a Vulkan instace of the descriptor set layout, as configured for the DSG instance's * set at index @param in_n_set. @@ -146,19 +146,18 @@ namespace Anvil * * @return Requested Anvil::DescriptorSetLayout instance. */ - std::shared_ptr get_descriptor_set_layout(uint32_t in_n_set); + Anvil::DescriptorSetLayout* get_descriptor_set_layout(uint32_t in_n_set) const; /** Returns the total number of added descriptor sets. * - * Bear in mind that descriptor set bindings may not form a continuous range set. - * For example, if this function returns 2, it doesn't mean that the defined DS bindings - * can only use indices 0, and 1. What binding indices are used specifically for descriptor - * sets at a given index can be checked by calling get_descriptor_set_binding_index() for - * a value of <0, (value returned by this func - 1)>. + * NOTE: Descriptor set bindings need not form a continuous range set. For instance, even if + * this function returns 3, get_descriptor_set() may return nullptr for set at index 1, + * if no layout info has been provided for this set index at creation time. + * **/ - uint32_t get_n_of_descriptor_sets() const + uint32_t get_n_descriptor_sets() const { - return (uint32_t) m_descriptor_sets.size(); + return static_cast(m_ds_create_info_ptrs.size() ); } /** This function should be set to assign physical Vulkan objects to a descriptor binding @@ -194,24 +193,86 @@ namespace Anvil bool set_binding_array_items(uint32_t in_n_set, BindingIndex in_binding_index, BindingElementArrayRange in_element_range, - const BindingElementType* in_elements) + const BindingElementType* in_elements_ptr) { anvil_assert(m_descriptor_sets.find(in_n_set) != m_descriptor_sets.end() ); - if (m_descriptor_sets[in_n_set].descriptor_set_ptr == nullptr) + if (m_descriptor_sets[in_n_set]->descriptor_set_ptr == nullptr) { bake_descriptor_sets(); - anvil_assert(m_descriptor_sets[in_n_set].descriptor_set_ptr != nullptr); + anvil_assert(m_descriptor_sets[in_n_set]->descriptor_set_ptr != nullptr); } - m_descriptor_sets[in_n_set].descriptor_set_ptr->set_binding_array_items(in_binding_index, - in_element_range, - in_elements); + m_descriptor_sets[in_n_set]->descriptor_set_ptr->set_binding_array_items(in_binding_index, + in_element_range, + in_elements_ptr); return true; } + template + bool set_binding_array_items(uint32_t in_n_set, + BindingIndex in_binding_index, + BindingElementArrayRange in_element_range, + const BindingElementType* const* in_elements_ptr_ptr) + { + anvil_assert(m_descriptor_sets.find(in_n_set) != m_descriptor_sets.end() ); + + if (m_descriptor_sets[in_n_set]->descriptor_set_ptr == nullptr) + { + bake_descriptor_sets(); + + anvil_assert(m_descriptor_sets[in_n_set]->descriptor_set_ptr != nullptr); + } + + m_descriptor_sets[in_n_set]->descriptor_set_ptr->set_binding_array_items(in_binding_index, + in_element_range, + in_elements_ptr_ptr); + + return true; + } + + /** TODO + * + * NOTE: Do NOT schedule multiple updates for overlapping inline uniform block memory regions without a bake operation in-between. Ignoring + * this requirement results in undefined behavior. + * + * Requires VK_EXT_inline_uniform_block. + * + * @param in_binding_index Index of the inline uniform block binding to use for the update. + * @param in_start_offset Start offset of the inline uniform block's memory region which should be updated. Must be a mul of 4. + * @param in_size Size of the inline uniform block's memory region to update. Must be a mul of 4. + * @param in_raw_data_ptr Data to use for the update. Whether or not the data is cached internally depends on value passed to + * @param in_should_cache_raw_data. Must not be nullptr. + * @param in_should_cache_raw_data True if data provided via @param in_raw_data_ptr should be cached internally, false otherwise. In other + * words, if this argument is set to true, it is safe to release memory block, to which @param in_raw_data_ptr points. + * + * @return true if successful, false otherwise. + */ + bool set_inline_uniform_block_binding_data(const uint32_t& in_n_set, + const BindingIndex& in_binding_index, + const uint32_t& in_start_offset, + const uint32_t& in_size, + const void* in_raw_data_ptr, + const bool& in_should_cache_raw_data) + { + anvil_assert(m_descriptor_sets.find(in_n_set) != m_descriptor_sets.end() ); + + if (m_descriptor_sets[in_n_set]->descriptor_set_ptr == nullptr) + { + bake_descriptor_sets(); + + anvil_assert(m_descriptor_sets[in_n_set]->descriptor_set_ptr != nullptr); + } + + return m_descriptor_sets[in_n_set]->descriptor_set_ptr->set_inline_uniform_block_binding_data(in_binding_index, + in_start_offset, + in_size, + in_raw_data_ptr, + in_should_cache_raw_data); + } + /** This function works exactly like set_bindings(), except that it always replaces the zeroth element * attached to the specified descriptor set's binding. */ @@ -227,84 +288,72 @@ namespace Anvil &in_element); } - /** Configures how many overhead allocations should be requested from the descriptor pool. - * - * @param in_descriptor_type Descriptor type to increase the number of requested descriptor allocations for. - * @param in_n_overhead_allocations Value specifying how many additional allocations should be made. 0 by default. - **/ - void set_descriptor_pool_overhead_allocations(VkDescriptorType in_descriptor_type, - uint32_t in_n_overhead_allocations); - private: /* Private type declarations */ /* Encapsulates all info related to a single descriptor set */ - typedef struct DescriptorSetInfo + typedef struct DescriptorSetInfoContainer { - std::shared_ptr descriptor_set_ptr; - std::shared_ptr layout_ptr; + Anvil::DescriptorSetUniquePtr descriptor_set_ptr; + Anvil::DescriptorSetLayoutUniquePtr layout_ptr; /* Dummy constructor */ - DescriptorSetInfo() + DescriptorSetInfoContainer() { /* Stub */ } - /** Copy constructor. - * - * Retains non-null descriptor set and layout wrapper instances. - **/ - DescriptorSetInfo(const DescriptorSetInfo& in); - - /** Assignment operator. - * - * Retains non-null descriptor set and layout wrapper instances. - **/ - DescriptorSetInfo& operator=(const DescriptorSetInfo& in); - /** Deinitializes a DescriptorSet instance by releasing the Vulkan descriptor set layout object, * if it's not nullptr. * * The function does not release the VkDescriptorSet instance it also holds. This object * should be released by the DescriptorSetGroup destructor instead. */ - ~DescriptorSetInfo(); - } DescriptorSetInfo; + ~DescriptorSetInfoContainer(); + } DescriptorSetInfoContainer; + + typedef struct DescriptorTypeProperties + { + uint32_t n_overhead_allocations; + uint32_t pool_size; + + DescriptorTypeProperties() + :n_overhead_allocations(0), + pool_size (0) + { + /* Stub */ + } + } DescriptorTypeProperties; /* Private functions */ /** Please see create() documentation for more details. */ - DescriptorSetGroup(std::weak_ptr in_device_ptr, - bool in_releaseable_sets, - uint32_t in_n_sets); + DescriptorSetGroup(const Anvil::BaseDevice* in_device_ptr, + std::vector in_ds_create_info_ptrs, + const Anvil::DescriptorPoolCreateFlags& in_descriptor_pool_create_flags, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE, + const std::vector& in_opt_overhead_allocations = std::vector() ); /** Please see create() documentation for more details. */ - DescriptorSetGroup(std::shared_ptr in_parent_dsg_ptr, - bool in_releaseable_sets); - - DescriptorSetGroup (const DescriptorSetGroup&); - DescriptorSetGroup& operator=(const DescriptorSetGroup&); + DescriptorSetGroup(const DescriptorSetGroup* in_parent_dsg_ptr); - void bake_descriptor_pool(); + bool bake_descriptor_pool(); bool bake_descriptor_sets(); /* Private members */ - std::vector > m_cached_ds; - std::vector > m_cached_ds_layouts; - std::vector m_cached_ds_vk; - - bool m_descriptor_pool_dirty; - std::shared_ptr m_descriptor_pool_ptr; - std::map m_descriptor_sets; - std::weak_ptr m_device_ptr; - uint32_t m_overhead_allocations[VK_DESCRIPTOR_TYPE_RANGE_SIZE]; - - uint32_t m_n_instantiated_sets; - uint32_t m_n_sets; - bool m_releaseable_sets; - - bool m_layout_modifications_blocked; - std::shared_ptr m_parent_dsg_ptr; + mutable DescriptorPoolUniquePtr m_descriptor_pool_ptr; + mutable std::map > m_descriptor_sets; + const Anvil::BaseDevice* m_device_ptr; + std::vector m_ds_create_info_ptrs; + + std::unordered_map > m_descriptor_type_properties; + + const Anvil::DescriptorPoolCreateFlags m_descriptor_pool_create_flags; + uint32_t m_n_unique_dses; + const Anvil::DescriptorSetGroup* m_parent_dsg_ptr; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(DescriptorSetGroup); + ANVIL_DISABLE_COPY_CONSTRUCTOR(DescriptorSetGroup); }; } /* Vulkan namespace */ diff --git a/include/wrappers/descriptor_set_layout.h b/include/wrappers/descriptor_set_layout.h index a90e37b1..d4d87fdb 100644 --- a/include/wrappers/descriptor_set_layout.h +++ b/include/wrappers/descriptor_set_layout.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -32,27 +32,16 @@ #include "misc/callbacks.h" #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" #include "wrappers/sampler.h" #include namespace Anvil { - enum - { - /* Notification fired whenever a new binding is added to the layout. - * - * callback argument: pointer to OnNewBindingAddedToDescriptorSetLayoutCallbackArgument instance. - **/ - DESCRIPTOR_SET_LAYOUT_CALLBACK_ID_BINDING_ADDED, - - /* Always last */ - DESCRIPTOR_SET_LAYOUT_CALLBACK_ID_COUNT - }; - /* Descriptor Set Layout wrapper */ - class DescriptorSetLayout : public CallbacksSupportProvider, - public DebugMarkerSupportProvider + class DescriptorSetLayout : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ @@ -64,39 +53,6 @@ namespace Anvil **/ virtual ~DescriptorSetLayout(); - /** Adds a new binding to the layout instance. - * - * It is an error to attempt to add a binding at an index, for which another binding has - * already been specified. - * - * It is an error to attempt to define immutable samplers for descriptors of type other than - * sampler or combined image+sampler. - * - * @param in_binding_index Index of the binding to configure. - * @param in_descriptor_type Type of the descriptor to use for the binding. - * @param in_descriptor_array_size Size of the descriptor array to use for the binding. - * @param in_stage_flags Rendering stages which are going to use the binding. - * @param in_opt_immutable_sampler_ptrs If not nullptr, an array of @param in_descriptor_array_size samplers should - * be passed. The binding will then be considered immutable, as per spec language. - * May be nullptr. - * - * @return true if successful, false otherwise. - **/ - bool add_binding(uint32_t in_binding_index, - VkDescriptorType in_descriptor_type, - uint32_t in_descriptor_array_size, - VkShaderStageFlags in_stage_flags, - const std::shared_ptr* in_opt_immutable_sampler_ptrs = nullptr); - - /** Converts internal layout representation to a Vulkan object. - * - * The baking will only occur if the object is internally marked as dirty. If it is not, - * the function does nothing. - * - * @return true if successful, false otherwise. - **/ - bool bake(); - /** Creates a new DescriptorSetLayout instance. * * No Vulkan Descriptor Set Layout is instantiated at creation time. One will only be created @@ -105,31 +61,14 @@ namespace Anvil * @param in_device_ptr Device the layout will be created for. * **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr); + static DescriptorSetLayoutUniquePtr create(DescriptorSetCreateInfoUniquePtr in_ds_create_info_ptr, + const Anvil::BaseDevice* in_device_ptr, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); - /** Retrieves properties of a single defined binding. - * - * @param in_n_binding Index number of the binding to retrieve properties of. - * @param out_opt_binding_index_ptr May be nullptr. If not, deref will be set to the index of the - * binding. This does NOT need to be equal to @param in_n_binding. - * @param out_opt_descriptor_type_ptr May be nullptr. If not, deref will be set to the descriptor type - * for the specified binding. - * @param out_opt_descriptor_array_size_ptr May be nullptr. If not, deref will be set to size of the descriptor - * array, associated with the specified binding. - * @param out_opt_stage_flags_ptr May be nullptr. If not, deref will be set to stage flags, - * as configured for the specified binding. - * @param out_opt_immutable_samplers_enabled_ptr May be nullptr. If not, deref will be set to true if immutable samplers - * have been defined for the specified binding; otherwise, it will be - * set to false. - * - * @return true if successful, false otherwise. - **/ - bool get_binding_properties(uint32_t in_n_binding, - uint32_t* out_opt_binding_index_ptr, - VkDescriptorType* out_opt_descriptor_type_ptr, - uint32_t* out_opt_descriptor_array_size_ptr, - VkShaderStageFlags* out_opt_stage_flags_ptr, - bool* out_opt_immutable_samplers_enabled_ptr); + const Anvil::DescriptorSetCreateInfo* get_create_info() const + { + return m_create_info_ptr.get(); + } /** Bakes a Vulkan object, if one is needed, and returns Vulkan DS layout handle. * @@ -138,84 +77,60 @@ namespace Anvil * * @return As per description. **/ - VkDescriptorSetLayout get_layout() + VkDescriptorSetLayout get_layout() const { - if (m_dirty) - { - bake(); - } + anvil_assert(m_layout != VK_NULL_HANDLE); return m_layout; } - /** Returns the number of bindings defined for the layout. */ - uint32_t get_n_bindings() const - { - return static_cast(m_bindings.size() ); - } - - private: - /* Private type definitions */ + /* Returns the maximum number of variable descriptor count binding size supported for the specified descriptor set layout. + * + * Requires VK_KHR_maintenance3 and VK_KHR_descriptor_indexing. + * + * @param in_ds_create_info_ptr Instance obtained by calling DescriptorSetInfo::create_descriptor_set_layout_create_info(). + * Must not be null. + */ + static uint32_t get_maximum_variable_descriptor_count(const DescriptorSetLayoutCreateInfoContainer* in_ds_create_info_ptr, + const Anvil::BaseDevice* in_device_ptr); - /** Describes a single descriptor set layout binding */ - typedef struct Binding - { - uint32_t descriptor_array_size; - VkDescriptorType descriptor_type; - std::vector > immutable_samplers; - - VkShaderStageFlagsVariable(stage_flags); - - /** Dummy constructor. Do not use. */ - Binding() - { - descriptor_array_size = 0; - descriptor_type = VK_DESCRIPTOR_TYPE_MAX_ENUM; - stage_flags = static_cast(0); - } - - /** Constructor. - * - * For argument discussion, please see Anvil::DescriptorSetLayout::add_binding() documentation. - **/ - Binding(uint32_t in_descriptor_array_size, - VkDescriptorType in_descriptor_type, - VkShaderStageFlags in_stage_flags, - const std::shared_ptr* in_immutable_sampler_ptrs) - { - descriptor_array_size = in_descriptor_array_size; - descriptor_type = in_descriptor_type; - stage_flags = static_cast(in_stage_flags); - - if (in_immutable_sampler_ptrs != nullptr) - { - for (uint32_t n_sampler = 0; - n_sampler < descriptor_array_size; - ++n_sampler) - { - immutable_samplers.push_back(in_immutable_sampler_ptrs[n_sampler]); - } - } - } - } Binding; - - typedef std::map BindingIndexToBindingMap; + /* Checks if the specified descriptor set layout create info structure can be used to create a descriptor set layout instance. + * + * The app should call this function if the DS create info structure defines a number of descriptors that exceeds the + * VkPhysicalDeviceMaintenance3PropertiesKHR::maxPerSetDescriptors limit. + * + * Requires VK_KHR_maintenance3. + * + * @param in_ds_create_info_ptr Instance obtained by calling DescriptorSetInfo::create_descriptor_set_layout_create_info(). + * Must not be null. + */ + static bool meets_max_per_set_descriptors_limit(const DescriptorSetLayoutCreateInfoContainer* in_ds_create_info_ptr, + const Anvil::BaseDevice* in_device_ptr); + private: /* Private functions */ + /** Converts internal layout representation to a Vulkan object. + * + * The baking will only occur if the object is internally marked as dirty. If it is not, + * the function does nothing. + * + * @return true if successful, false otherwise. + **/ + bool init(); + /* Please see create() documentation for more details */ - DescriptorSetLayout(std::weak_ptr in_device_ptr); + DescriptorSetLayout(DescriptorSetCreateInfoUniquePtr in_ds_create_info_ptr, + const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe); DescriptorSetLayout (const DescriptorSetLayout&); DescriptorSetLayout& operator=(const DescriptorSetLayout&); /* Private variables */ - BindingIndexToBindingMap m_bindings; - std::weak_ptr m_device_ptr; - bool m_dirty; + DescriptorSetCreateInfoUniquePtr m_create_info_ptr; + const Anvil::BaseDevice* m_device_ptr; VkDescriptorSetLayout m_layout; - - friend class std::shared_ptr; }; }; /* namespace Anvil */ diff --git a/include/wrappers/descriptor_set_layout_manager.h b/include/wrappers/descriptor_set_layout_manager.h new file mode 100644 index 00000000..18f61383 --- /dev/null +++ b/include/wrappers/descriptor_set_layout_manager.h @@ -0,0 +1,81 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#ifndef DESCRIPTOR_SET_LAYOUT_MANAGER_H +#define DESCRIPTOR_SET_LAYOUT_MANAGER_H + +#include "misc/mt_safety.h" +#include "misc/types.h" + +namespace Anvil +{ + class DescriptorSetLayoutManager : public MTSafetySupportProvider + { + public: + /* Public functions */ + + /** Destructor */ + ~DescriptorSetLayoutManager(); + + bool get_layout(const DescriptorSetCreateInfo* in_ds_create_info_ptr, + Anvil::DescriptorSetLayoutUniquePtr* out_ds_layout_ptr_ptr); + + protected: + /* Protected functions */ + + private: + /* Private type declarations */ + typedef struct DescriptorSetLayoutContainer + { + DescriptorSetLayoutUniquePtr ds_layout_ptr; + std::atomic n_references; + + DescriptorSetLayoutContainer() + :n_references(1) + { + /* Stub */ + } + } DescriptorSetLayoutContainer; + + typedef std::vector > DescriptorSetLayouts; + + /* Private functions */ + DescriptorSetLayoutManager(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe); + + DescriptorSetLayoutManager (const DescriptorSetLayoutManager&); + DescriptorSetLayoutManager& operator=(const DescriptorSetLayoutManager&); + + void on_descriptor_set_layout_dereferenced(Anvil::DescriptorSetLayout* in_layout_ptr); + + static Anvil::DescriptorSetLayoutManagerUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe); + + /* Private members */ + const Anvil::BaseDevice* m_device_ptr; + DescriptorSetLayouts m_descriptor_set_layouts; + + friend class BaseDevice; + }; +}; /* Vulkan namespace */ + +#endif /* DESCRIPTOR_SET_LAYOUT_MANAGER_H */ \ No newline at end of file diff --git a/include/wrappers/descriptor_update_template.h b/include/wrappers/descriptor_update_template.h new file mode 100644 index 00000000..c2b052e1 --- /dev/null +++ b/include/wrappers/descriptor_update_template.h @@ -0,0 +1,97 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#ifndef WRAPPERS_DESCRIPTOR_UPDATE_TEMPLATE_H +#define WRAPPERS_DESCRIPTOR_UPDATE_TEMPLATE_H + +#include "misc/debug_marker.h" +#include "misc/mt_safety.h" +#include "misc/types.h" + +namespace Anvil +{ + /* Descriptor Update Template wrapper */ + class DescriptorUpdateTemplate : public DebugMarkerSupportProvider, + public MTSafetySupportProvider + { + public: + /* Public functions */ + + /** Destructor. + * + * Destroys the Vulkan counterpart and unregisters the instance from the + * object tracker. + **/ + virtual ~DescriptorUpdateTemplate(); + + /** Creates a new DescriptorUpdateTemplate instance. + * + * @param in_device_ptr Device the layout will be created for. Must not be null. + * @param in_descriptor_set_layout_ptr DS layout to use as reference when creating the template. + * The function caches layout info internaly, so the object may be safely released + * after this function leaves. Must not be null. + * + **/ + static Anvil::DescriptorUpdateTemplateUniquePtr create_for_descriptor_set_updates(const Anvil::BaseDevice* in_device_ptr, + const Anvil::DescriptorSetLayout* in_descriptor_set_layout_ptr, + const std::vector& in_update_entries, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + { + return create_for_descriptor_set_updates(in_device_ptr, + in_descriptor_set_layout_ptr, + &in_update_entries.at(0), + static_cast(in_update_entries.size() ), + in_mt_safety); + } + + static Anvil::DescriptorUpdateTemplateUniquePtr create_for_descriptor_set_updates(const Anvil::BaseDevice* in_device_ptr, + const Anvil::DescriptorSetLayout* in_descriptor_set_layout_ptr, + const Anvil::DescriptorUpdateTemplateEntry* in_update_entries_ptr, + const uint32_t& in_n_update_entries, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); + + /* Issues a MT-safe (if needed) vkUpdateDescriptorSetWithTeeplateKHR() call against the specified descriptor set. */ + void update_descriptor_set(const Anvil::DescriptorSet* inout_ds_ptr, + const void* in_data_ptr) const; + + private: + /* Private functions */ + + bool init(const Anvil::DescriptorSetLayout* in_descriptor_set_layout_ptr, + const Anvil::DescriptorUpdateTemplateEntry* in_update_entries_ptr, + const uint32_t& in_n_update_entries); + + /* Please see create() documentation for more details */ + DescriptorUpdateTemplate(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe); + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(DescriptorUpdateTemplate); + ANVIL_DISABLE_COPY_CONSTRUCTOR (DescriptorUpdateTemplate); + + /* Private variables */ + const Anvil::BaseDevice* m_device_ptr; + Anvil::DescriptorSetCreateInfoUniquePtr m_ds_create_info_ptr; + VkDescriptorUpdateTemplateKHR m_vk_object; + }; +}; /* namespace Anvil */ + +#endif /* WRAPPERS_DESCRIPTOR_UPDATE_TEMPLATE_H */ diff --git a/include/wrappers/device.h b/include/wrappers/device.h index 08bbca1a..b75323f7 100644 --- a/include/wrappers/device.h +++ b/include/wrappers/device.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -26,46 +26,49 @@ * - encapsulate all logic required to manipulate devices. * - let ObjectTracker detect leaking device instances. * - * The wrapper is NOT thread-safe. + * The wrapper is thread-safe (on an opt-in basis). **/ #ifndef WRAPPERS_DEVICE_H #define WRAPPERS_DEVICE_H #include "misc/debug.h" +#include "misc/device_create_info.h" +#include "misc/extensions.h" +#include "misc/mt_safety.h" +#include "misc/struct_chainer.h" #include "misc/types.h" #include namespace Anvil { - class BaseDevice : public std::enable_shared_from_this + class BaseDevice : public Anvil::MTSafetySupportProvider { public: /* Public functions */ /* Constructor */ - BaseDevice(std::weak_ptr in_parent_instance_ptr); + BaseDevice(Anvil::DeviceCreateInfoUniquePtr in_create_info_ptr); - /** Releases all children queues and unregisters itself from the owning physical device. */ - virtual void destroy(); + virtual ~BaseDevice(); - /** Retrieves a command pool, created for the specified queue family type. + /** Retrieves a command pool, created for the specified queue family index. * - * @param in_queue_family_type Queue family to retrieve the command pool for. + * @param in_vk_queue_family_index Vulkan index of the queue family to return the command pool for. * * @return As per description **/ - std::shared_ptr get_command_pool(Anvil::QueueFamilyType in_queue_family_type) const + Anvil::CommandPool* get_command_pool_for_queue_family_index(uint32_t in_vk_queue_family_index) const { - return m_command_pool_ptrs[in_queue_family_type]; + return m_command_pool_ptr_per_vk_queue_fam.at(in_vk_queue_family_index).get(); } /** Retrieves a compute pipeline manager, created for this device instance. * * @return As per description **/ - std::shared_ptr get_compute_pipeline_manager() const + Anvil::ComputePipelineManager* get_compute_pipeline_manager() const { - return m_compute_pipeline_manager_ptr; + return m_compute_pipeline_manager_ptr.get(); } /** Returns a Queue instance, corresponding to a compute queue at index @param in_n_queue @@ -74,18 +77,28 @@ namespace Anvil * * @return As per description **/ - std::shared_ptr get_compute_queue(uint32_t in_n_queue) const + Anvil::Queue* get_compute_queue(uint32_t in_n_queue) const { - std::shared_ptr result_ptr; + Anvil::Queue* result_ptr = nullptr; if (m_compute_queues.size() > in_n_queue) { - result_ptr = m_compute_queues[in_n_queue]; + result_ptr = m_compute_queues.at(in_n_queue); } return result_ptr; } + const Anvil::DeviceCreateInfo* get_create_info_ptr() const + { + return m_create_info_ptr.get(); + } + + Anvil::DescriptorSetLayoutManager* get_descriptor_set_layout_manager() const + { + return m_descriptor_set_layout_manager_ptr.get(); + } + /** Retrieves a raw Vulkan handle for this device. * * @return As per description @@ -99,52 +112,220 @@ namespace Anvil * * Do NOT release. This object is owned by Device and will be released at object tear-down time. **/ - std::shared_ptr get_dummy_descriptor_set() const; + const Anvil::DescriptorSet* get_dummy_descriptor_set() const; /** Retrieves a DescriptorSetLayout instance, which encapsulates a single descriptor set layout, * which holds 1 descriptor set holding 0 descriptors. * * Do NOT release. This object is owned by Device and will be released at object tear-down time. **/ - std::shared_ptr get_dummy_descriptor_set_layout() const; + Anvil::DescriptorSetLayout* get_dummy_descriptor_set_layout() const; - /** Returns a container with entry-points to functions introduced by VK_AMD_draw_indirect_count extension. + /** Returns a container with entry-points to functions introduced by VK_AMD_buffer_marker extension. + * + * Will fire an assertion failure if the extension was not requested at device creation time. + **/ + const ExtensionAMDBufferMarkerEntrypoints& get_extension_amd_buffer_marker_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->amd_buffer_marker() ); + + return m_amd_buffer_marker_extension_entrypoints; + } + + + /** Returns a container with entry-points to functions introduced by VK_AMD_draw_indirect_count extension. * * Will fire an assertion failure if the extension was not requested at device creation time. **/ const ExtensionAMDDrawIndirectCountEntrypoints& get_extension_amd_draw_indirect_count_entrypoints() const { - anvil_assert(m_amd_draw_indirect_count_enabled); + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->amd_draw_indirect_count() ); return m_amd_draw_indirect_count_extension_entrypoints; } + /** Returns a container with entry-points to functions introduced by VK_AMD_shader_info extension. + * + * Will fire an assertion failure if the extension was not requested at device creation time. + **/ + const ExtensionAMDShaderInfoEntrypoints& get_extension_amd_shader_info_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->amd_shader_info() ); + + return m_amd_shader_info_extension_entrypoints; + } + /** Returns a container with entry-points to functions introduced by VK_EXT_debug_marker extension. * * Will fire an assertion failure if the extension is not supported. **/ const ExtensionEXTDebugMarkerEntrypoints& get_extension_ext_debug_marker_entrypoints() const { - anvil_assert(m_ext_debug_marker_enabled); + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->ext_debug_marker() ); return m_ext_debug_marker_extension_entrypoints; } + /** Returns a container with entry-points to functions introduced by VK_EXT_external_memory_host extension. + * + * Will fire an assertion failure if the extension is not supported. + **/ + const ExtensionEXTExternalMemoryHostEntrypoints& get_extension_ext_external_memory_host_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->ext_external_memory_host() ); + + return m_ext_external_memory_host_extension_entrypoints; + } + + /** Returns a container with entry-points to functions introduced by VK_EXT_hdr_metadata extension. + * + * Will fire an assertion failure if the extension is not supported. + **/ + const ExtensionEXTHdrMetadataEntrypoints& get_extension_ext_hdr_metadata_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->ext_hdr_metadata() ); + + return m_ext_hdr_metadata_extension_entrypoints; + } + + /** Returns a container with entry-points to functions introduced by VK_EXT_sample_locations extension. + * + * Will fire an assertion failure if the extension is not supported. + **/ + const ExtensionEXTSampleLocationsEntrypoints& get_extension_ext_sample_locations_entrypoints() const; + + /** Returns a container with entry-points to functions introduced by VK_EXT_transform_feedback extension. + * + * Will fire an assertion failure if the extension is not supported. + **/ + const ExtensionEXTTransformFeedbackEntrypoints& get_extension_ext_transform_feedback_entrypoints() const; + + /** Returns a container with entry-points to functions introduced by VK_KHR_bind_memory2 extension. **/ + const ExtensionKHRBindMemory2Entrypoints& get_extension_khr_bind_memory2_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_bind_memory2() ); + + return m_khr_bind_memory2_extension_entrypoints; + } + + /** Returns a container with entry-points to functions introduced by VK_KHR_create_renderpass2 extension. **/ + const ExtensionKHRCreateRenderpass2Entrypoints& get_extension_khr_create_renderpass2_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_create_renderpass2() ); + + return m_khr_create_renderpass2_extension_entrypoints; + } + + /** Returns a container with entry-points to functions introduced by VK_KHR_descriptor_update_template extension. **/ + const ExtensionKHRDescriptorUpdateTemplateEntrypoints& get_extension_khr_descriptor_update_template_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_descriptor_update_template() ); + + return m_khr_descriptor_update_template_extension_entrypoints; + } + + /** Returns a container with entry-points to functions introduced by VK_KHR_device_group extension. **/ + const ExtensionKHRDeviceGroupEntrypoints& get_extension_khr_device_group_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_device_group() ); + + return m_khr_device_group_extension_entrypoints; + } + + /** Returns a container with entry-points to functions introduced by VK_KHR_draw_indirect_count extension. + * + * Will fire an assertion failure if the extension was not requested at device creation time. + **/ + const ExtensionKHRDrawIndirectCountEntrypoints& get_extension_khr_draw_indirect_count_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_draw_indirect_count() ); + + return m_khr_draw_indirect_count_extension_entrypoints; + } + + #if defined(_WIN32) + const ExtensionKHRExternalFenceWin32Entrypoints& get_extension_khr_external_fence_win32_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_fence_win32() ); + + return m_khr_external_fence_win32_extension_entrypoints; + } + + const ExtensionKHRExternalMemoryWin32Entrypoints& get_extension_khr_external_memory_win32_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_memory_win32() ); + + return m_khr_external_memory_win32_extension_entrypoints; + } + + const ExtensionKHRExternalSemaphoreWin32Entrypoints& get_extension_khr_external_semaphore_win32_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_semaphore_win32() ); + + return m_khr_external_semaphore_win32_extension_entrypoints; + } + #else + const ExtensionKHRExternalFenceFdEntrypoints& get_extension_khr_external_fence_fd_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_fence_fd() ); + + return m_khr_external_fence_fd_extension_entrypoints; + } + + const ExtensionKHRExternalMemoryFdEntrypoints& get_extension_khr_external_memory_fd_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_memory_fd() ); + + return m_khr_external_memory_fd_extension_entrypoints; + } + + const ExtensionKHRExternalSemaphoreFdEntrypoints& get_extension_khr_external_semaphore_fd_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_semaphore_fd() ); + + return m_khr_external_semaphore_fd_extension_entrypoints; + } + #endif + + /** Returns a container with entry-points to functions introduced by VK_KHR_get_memory_requirements2 extension. **/ + const ExtensionKHRGetMemoryRequirements2Entrypoints& get_extension_khr_get_memory_requirements2_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_get_memory_requirements2() ); + + return m_khr_get_memory_requirements2_extension_entrypoints; + } + /** Returns a container with entry-points to functions introduced by VK_KHR_maintenance1 extension. **/ const ExtensionKHRMaintenance1Entrypoints& get_extension_khr_maintenance1_entrypoints() const { - anvil_assert(m_khr_maintenance1_enabled); + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_maintenance1() ); return m_khr_maintenance1_extension_entrypoints; } + /** Returns a container with entry-points to functions introduced by VK_KHR_maintenance3 extension. **/ + const ExtensionKHRMaintenance3Entrypoints& get_extension_khr_maintenance3_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_maintenance3() ); + + return m_khr_maintenance3_extension_entrypoints; + } + + /** Returns a container with entry-points to functions introduced by VK_KHR_sampler_ycbcr_conversion extension. **/ + const ExtensionKHRSamplerYCbCrConversionEntrypoints& get_extension_khr_sampler_ycbcr_conversion_entrypoints() const + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_sampler_ycbcr_conversion() ); + + return m_khr_sampler_ycbcr_conversion_extension_entrypoints; + } + /** Returns a container with entry-points to functions introduced by VK_KHR_swapchain extension. * * Will fire an assertion failure if the extension was not requested at device creation time. **/ const ExtensionKHRSwapchainEntrypoints& get_extension_khr_swapchain_entrypoints() const { - anvil_assert(m_khr_swapchain_enabled); + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_swapchain() ); return m_khr_swapchain_extension_entrypoints; } @@ -153,9 +334,9 @@ namespace Anvil * * @return As per description **/ - std::shared_ptr get_graphics_pipeline_manager() const + Anvil::GraphicsPipelineManager* get_graphics_pipeline_manager() const { - return m_graphics_pipeline_manager_ptr; + return m_graphics_pipeline_manager_ptr.get(); } /** Returns the number of compute queues supported by this device. @@ -182,9 +363,9 @@ namespace Anvil switch (in_family_type) { - case QUEUE_FAMILY_TYPE_COMPUTE: result = static_cast(m_compute_queues.size() ); break; - case QUEUE_FAMILY_TYPE_TRANSFER: result = static_cast(m_transfer_queues.size() ); break; - case QUEUE_FAMILY_TYPE_UNIVERSAL: result = static_cast(m_universal_queues.size() ); break; + case Anvil::QueueFamilyType::COMPUTE: result = static_cast(m_compute_queues.size() ); break; + case Anvil::QueueFamilyType::TRANSFER: result = static_cast(m_transfer_queues.size() ); break; + case Anvil::QueueFamilyType::UNIVERSAL: result = static_cast(m_universal_queues.size() ); break; default: { @@ -224,58 +405,49 @@ namespace Anvil } /** Returns Vulkan instance wrapper used to create this device. */ - std::weak_ptr get_parent_instance() const - { - return m_parent_instance_ptr; - } + const Anvil::Instance* get_parent_instance() const; + + virtual bool get_physical_device_buffer_properties(const BufferPropertiesQuery& in_query, + Anvil::BufferProperties* out_opt_result_ptr = nullptr) const = 0; /** Returns features supported by physical device(s) which have been used to instantiate * this logical device instance. **/ - virtual const VkPhysicalDeviceFeatures& get_physical_device_features() const = 0; + virtual const Anvil::PhysicalDeviceFeatures& get_physical_device_features() const = 0; - /** Returns format properties, as reported by physical device(s) which have been used to - * instantiate this logical device instance. - * - * @param in_format Format to use for the query. - */ - virtual const Anvil::FormatProperties& get_physical_device_format_properties(VkFormat in_format) const = 0; + virtual bool get_physical_device_fence_properties(const FencePropertiesQuery& in_query, + Anvil::FenceProperties* out_opt_result_ptr = nullptr) const = 0; - /** Returns image format properties, as reported by physical device(s) which have been used - * to instantiate this logical device instance. - * - * @param in_format Meaning as per Vulkan spec. - * @param in_type Meaning as per Vulkan spec. - * @param in_tiling Meaning as per Vulkan spec. - * @param in_usage Meaning as per Vulkan spec. - * @param in_flags Meaning as per Vulkan spec. - * @param out_result If the function returns true, the requested information will be stored - * at this location. - * - * @return true if successful, false otherwise. - **/ - virtual bool get_physical_device_image_format_properties(VkFormat in_format, - VkImageType in_type, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - VkImageCreateFlags in_flags, - VkImageFormatProperties& out_result) const = 0; + virtual Anvil::FormatProperties get_physical_device_format_properties(Anvil::Format in_format) const = 0; + + virtual bool get_physical_device_image_format_properties(const ImageFormatPropertiesQuery& in_query, + Anvil::ImageFormatProperties* out_opt_result_ptr = nullptr) const = 0; /** Returns memory properties, as reported by physical device(s) which have been used * to instantiate this logical device instance. **/ virtual const Anvil::MemoryProperties& get_physical_device_memory_properties() const = 0; + /** Returns multisample properties as reported for physical device(s) which have been used + * to instantiate this logical device instance. + * + * Requires VK_EXT_sample_locations + */ + virtual Anvil::MultisamplePropertiesEXT get_physical_device_multisample_properties(const Anvil::SampleCountFlagBits& in_samples) const = 0; + /** Returns general physical device properties, as reported by physical device(s) which have been used * to instantiate this logical device instance. **/ - virtual const VkPhysicalDeviceProperties& get_physical_device_properties() const = 0; + virtual const Anvil::PhysicalDeviceProperties& get_physical_device_properties() const = 0; /** Returns queue families available for the physical device(s) used to instantiate this * logical device instance. **/ virtual const QueueFamilyInfoItems& get_physical_device_queue_families() const = 0; + virtual bool get_physical_device_semaphore_properties(const SemaphorePropertiesQuery& in_query, + Anvil::SemaphoreProperties* out_opt_result_ptr = nullptr) const = 0; + /** Returns sparse image format properties, as reported by physical device(s) which have been * used to instantiate this logical device instance. * @@ -289,36 +461,36 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - virtual bool get_physical_device_sparse_image_format_properties(VkFormat in_format, - VkImageType in_type, - VkSampleCountFlagBits in_sample_count, - VkImageUsageFlags in_usage, - VkImageTiling in_tiling, - std::vector& out_result) const = 0; + virtual bool get_physical_device_sparse_image_format_properties(Anvil::Format in_format, + Anvil::ImageType in_type, + Anvil::SampleCountFlagBits in_sample_count, + ImageUsageFlags in_usage, + Anvil::ImageTiling in_tiling, + std::vector& out_result) const = 0; /** Returns surface capabilities, as reported for physical device(s) which have been used to instantiate * this logical device instance. **/ - virtual bool get_physical_device_surface_capabilities(std::shared_ptr in_surface_ptr, - VkSurfaceCapabilitiesKHR* out_result_ptr) const = 0; + virtual bool get_physical_device_surface_capabilities(Anvil::RenderingSurface* in_surface_ptr, + Anvil::SurfaceCapabilities* out_result_ptr) const = 0; /** Returns a pipeline cache, created specifically for this device. * * @return As per description **/ - std::shared_ptr get_pipeline_cache() const + Anvil::PipelineCache* get_pipeline_cache() const { - return m_pipeline_cache_ptr; + return m_pipeline_cache_ptr.get(); } /** Returns a pipeline layout manager, created specifically for this device. * * @return As per description **/ - std::shared_ptr get_pipeline_layout_manager() const + Anvil::PipelineLayoutManager* get_pipeline_layout_manager() const { - return m_pipeline_layout_manager_ptr; + return m_pipeline_layout_manager_ptr.get(); } /** Calls the device-specific implementaton of vkGetDeviceProcAddr(), using this device @@ -330,14 +502,19 @@ namespace Anvil **/ PFN_vkVoidFunction get_proc_address(const char* in_name) const { - return vkGetDeviceProcAddr(m_device, - in_name); + return Anvil::Vulkan::vkGetDeviceProcAddr(m_device, + in_name); } + PFN_vkVoidFunction get_proc_address(const std::string& in_name) const { return get_proc_address(in_name.c_str() ); } + /** TODO */ + Anvil::Queue* get_queue(const Anvil::QueueFamilyType& in_queue_family_type, + uint32_t in_n_queue) const; + /* Returns a Queue instance representing a Vulkan queue from queue family @param in_n_queue_family * at index @param in_n_queue. * @@ -346,32 +523,22 @@ namespace Anvil * * @return Requested Queue instance OR nullptr if either of the parameters is invalid. */ - std::shared_ptr get_queue(uint32_t in_n_queue_family, - uint32_t in_n_queue) const; + Anvil::Queue* get_queue_for_queue_family_index(uint32_t in_n_queue_family, + uint32_t in_n_queue) const; - /** Returns a index of the specified queue family type. - * - * @param in_family_type TODO + /** TODO * - * @return The requested queue family index, or -1 if the specified queue family type is - * not supported on this device. - **/ - uint32_t get_queue_family_index(Anvil::QueueFamilyType in_family_type) const - { - return m_device_queue_families.family_index[in_family_type]; - } + * NOTE: A single Anvil queue family MAY map onto more than one Vulkan queue family type. The same is not true the other way around. + */ + bool get_queue_family_indices_for_queue_family_type(const Anvil::QueueFamilyType& in_queue_family_type, + uint32_t* out_opt_n_queue_family_indices_ptr, + const uint32_t** out_opt_queue_family_indices_ptr_ptr) const; - /** Returns queue family type for the specified queue family index. - * - * @param in_queue_family_index TODO + /** TODO. * - * @return The requested information OR Anvil::QUEUE_FAMILY_TYPE_UNDEFINED if the index has - * not been recognized. + * NOTE: Anvil's queue family may map onto more than one Vulkan queue family. The same is not true the other way around. */ - Anvil::QueueFamilyType get_queue_family_type(uint32_t in_queue_family_index) const - { - return m_device_queue_families.family_type[in_queue_family_index]; - } + Anvil::QueueFamilyType get_queue_family_type(uint32_t in_queue_family_index) const; /** Returns detailed queue family information for a queue family at index @param in_queue_family_index . */ virtual const Anvil::QueueFamilyInfo* get_queue_family_info(uint32_t in_queue_family_index) const = 0; @@ -386,9 +553,15 @@ namespace Anvil * * @return true if successful, false otherwise. */ - bool get_sample_locations(VkSampleCountFlagBits in_sample_count, + bool get_sample_locations(Anvil::SampleCountFlagBits in_sample_count, std::vector* out_result_ptr) const; + /** Returns shader module cache instance */ + Anvil::ShaderModuleCache* get_shader_module_cache() const + { + return m_shader_module_cache_ptr.get(); + } + /** Returns a Queue instance, corresponding to a sparse binding-capable queue at index @param in_n_queue, * which supports queue family capabilities specified with @param opt_required_queue_flags. * @@ -398,8 +571,34 @@ namespace Anvil * * @return As per description **/ - std::shared_ptr get_sparse_binding_queue(uint32_t in_n_queue, - VkQueueFlags in_opt_required_queue_flags = 0) const; + Anvil::Queue* get_sparse_binding_queue(uint32_t in_n_queue, + Anvil::QueueFlags in_opt_required_queue_flags = Anvil::QueueFlags() ) const; + + /* Tells which memory types can be specified when creating an external memory handle for a Win32 handle @param in_handle + * + * For all external memory handle types EXCEPT host pointers: + * + Requires VK_KHR_external_memory_Fd under Linux. + * + Requires VK_KHR_external_memory_win32 under Windows + * + * For host pointers: + * + Requires VK_EXT_external_memory_host. + * + * @return true if successful, false otherwise. + * + * + * @param in_external_handle_type (Host pointers) Must be Anvil::ExternalMemoryHandleTypeFlagBits::HOST_ALLOCATION_BIT_EXT or + * Anvil::ExternalMemoryHandleTypeFlagBits::HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT. + * + * (Other) (Windows) must be either Anvil::ExternalMemoryHandleTypeFlagBits::OPAQUE_WIN32_BIT or + * Anvil::ExternalMemoryHandleTypeFlagBits::WIN32_KMT_BIT. + * (Linux) must be Anvil::ExternalMemoryHandleTypeFlagBits::OPAQUE_FD_BIT. + * + * @param out_supported_memory_type_bits_ptr Deref will be set to a set of bits where each index corresponds to support status + * of a memory type with corresponding index. Must not be null. + */ + bool get_memory_types_supported_for_external_handle(const Anvil::ExternalMemoryHandleTypeFlagBits& in_external_handle_type, + ExternalHandleType in_handle, + uint32_t* out_supported_memory_type_bits) const; /** Returns a Queue instance, corresponding to a transfer queue at index @param in_n_queue * @@ -407,13 +606,13 @@ namespace Anvil * * @return As per description **/ - std::shared_ptr get_transfer_queue(uint32_t in_n_queue) const + Anvil::Queue* get_transfer_queue(uint32_t in_n_queue) const { - std::shared_ptr result_ptr; + Anvil::Queue* result_ptr = nullptr; if (m_transfer_queues.size() > in_n_queue) { - result_ptr = m_transfer_queues[in_n_queue]; + result_ptr = m_transfer_queues.at(in_n_queue); } return result_ptr; @@ -425,13 +624,13 @@ namespace Anvil * * @return As per description **/ - std::shared_ptr get_universal_queue(uint32_t in_n_queue) const + Anvil::Queue* get_universal_queue(uint32_t in_n_queue) const { - std::shared_ptr result_ptr; + Anvil::Queue* result_ptr = nullptr; if (m_universal_queues.size() > in_n_queue) { - result_ptr = m_universal_queues[in_n_queue]; + result_ptr = m_universal_queues.at(in_n_queue); } return result_ptr; @@ -440,54 +639,11 @@ namespace Anvil /* Tells what type this device instance is */ virtual DeviceType get_type() const = 0; - bool is_amd_draw_indirect_count_extension_enabled() const - { - return m_amd_draw_indirect_count_enabled; - } - - bool is_amd_gcn_shader_extension_enabled() const - { - return m_amd_gcn_shader_enabled; - } - - bool is_amd_gpu_shader_half_float_extension_enabled() const - { - return m_amd_gpu_shader_half_float_enabled; - } - - bool is_amd_gpu_shader_int16_extension_enabled() const - { - return m_amd_gpu_shader_int16_enabled; - } - - bool is_amd_negative_viewport_height_extension_enabled() const - { - return m_amd_negative_viewport_height_enabled; - } - - bool is_amd_rasterization_order_extension_enabled() const - { - return m_amd_rasterization_order_enabled; - } + bool is_compute_queue_family_index(const uint32_t& in_queue_family_index) const; - bool is_amd_shader_ballot_extension_enabled() const + const Anvil::IExtensionInfoDevice* get_extension_info() const { - return m_amd_shader_ballot_enabled; - } - - bool is_amd_shader_explicit_vertex_parameter_extension_enabled() const - { - return m_amd_shader_explicit_vertex_parameter_enabled; - } - - bool is_amd_shader_trinary_minmax_extension_enabled() const - { - return m_amd_shader_trinary_minmax_enabled; - } - - bool is_amd_texture_gather_bias_lod_extension_enabled() const - { - return m_amd_texture_gather_bias_lod_enabled; + return m_extension_enabled_info_ptr->get_device_extension_info(); } /* Tells whether the device has been created with the specified extension enabled. @@ -498,68 +654,62 @@ namespace Anvil **/ bool is_extension_enabled(const std::string& in_extension_name) const { - return std::find(m_enabled_extensions.begin(), - m_enabled_extensions.end(), - in_extension_name) != m_enabled_extensions.end(); - } + anvil_assert(m_extension_enabled_info_ptr != nullptr); - bool is_ext_debug_marker_extension_enabled() const - { - return m_ext_debug_marker_enabled; + return m_extension_enabled_info_ptr->get_device_extension_info()->by_name(in_extension_name); } - bool is_ext_shader_subgroup_ballot_extension_enabled() const - { - return m_ext_shader_subgroup_ballot_enabled; - } + bool is_transfer_queue_family_index(const uint32_t& in_queue_family_index) const; - bool is_ext_shader_subgroup_vote_extension_enabled() const - { - return m_ext_shader_subgroup_vote_enabled; - } + bool is_universal_queue_family_index(const uint32_t& in_queue_family_index) const; - bool is_khr_16bit_storage_extension_enabled() const - { - return m_khr_16bit_storage_enabled; - } + bool wait_idle() const; - bool is_khr_maintenance1_extension_enabled() const - { - return m_khr_maintenance1_enabled; - } + protected: + /* Protected type definitions */ - bool is_khr_storage_buffer_storage_class_enabled() const + typedef struct DeviceQueueFamilyMemberInfo { - return m_khr_storage_buffer_storage_class_enabled; - } + uint32_t family_index; + uint32_t n_queues; - bool is_khr_surface_extension_enabled() const - { - return m_khr_surface_enabled; - } + DeviceQueueFamilyMemberInfo() + :family_index(UINT32_MAX), + n_queues (UINT32_MAX) + { + /* Stub */ + } - bool is_khr_swapchain_extension_enabled() const - { - return m_khr_swapchain_enabled; - } + explicit DeviceQueueFamilyMemberInfo(uint32_t in_family_index, + uint32_t in_n_queues) + :family_index(in_family_index), + n_queues (in_n_queues) + { + /* Stub */ + } + + bool operator==(const uint32_t& in_family_index) const + { + return family_index == in_family_index; + } + } DeviceQueueFamilyMemberInfo; - protected: - /* Protected type definitions */ typedef struct { - uint32_t family_index[Anvil::QUEUE_FAMILY_TYPE_COUNT]; - Anvil::QueueFamilyType family_type [Anvil::QUEUE_FAMILY_TYPE_COUNT]; - uint32_t n_queues [Anvil::QUEUE_FAMILY_TYPE_COUNT]; + uint32_t n_total_queues_per_family[static_cast(Anvil::QueueFamilyType::COUNT)]; + std::map > queue_families; } DeviceQueueFamilyInfo; /* Protected functions */ - virtual ~BaseDevice(); - std::vector get_queue_priorities(const QueueFamilyInfo* in_queue_family_info_ptr) const; - void init (const DeviceExtensionConfiguration& in_extensions, - const std::vector& in_layers, - bool in_transient_command_buffer_allocs_only, - bool in_support_resettable_command_buffer_allocs); + void add_physical_device_features_to_chainer(Anvil::StructChainer* in_struct_chainer) const; + + void create_device(const std::vector& in_extensions, + const std::vector& in_layers, + DeviceQueueFamilyInfo* out_queue_families_ptr); + + std::vector get_queue_priorities(const QueueFamilyInfo* in_queue_family_info_ptr) const; + bool init (); BaseDevice& operator=(const BaseDevice&); BaseDevice (const BaseDevice&); @@ -570,67 +720,80 @@ namespace Anvil * @param out_device_queue_family_info_ptr Deref will be used to store the result info. Must not be null. * **/ - virtual void get_queue_family_indices_for_physical_device(std::weak_ptr in_physical_device_ptr, - DeviceQueueFamilyInfo* out_device_queue_family_info_ptr) const; + virtual void get_queue_family_indices_for_physical_device(const Anvil::PhysicalDevice* in_physical_device_ptr, + DeviceQueueFamilyInfo* out_device_queue_family_info_ptr) const; - virtual void create_device (const std::vector& in_extensions, - const std::vector& in_layers, - const VkPhysicalDeviceFeatures& in_features, - DeviceQueueFamilyInfo* out_queue_families_ptr) = 0; virtual void init_device () = 0; virtual bool is_layer_supported (const std::string& in_layer_name) const = 0; virtual bool is_physical_device_extension_supported(const std::string& in_extension_name) const = 0; /* Protected variables */ - std::vector > m_compute_queues; - DeviceQueueFamilyInfo m_device_queue_families; - std::vector > m_sparse_binding_queues; - std::vector > m_transfer_queues; - std::vector > m_universal_queues; + Anvil::DeviceCreateInfoUniquePtr m_create_info_ptr; + + std::vector m_compute_queues; + DeviceQueueFamilyInfo m_device_queue_families; + std::vector m_sparse_binding_queues; + std::vector m_transfer_queues; + std::vector m_universal_queues; + + std::vector > m_owned_queues; - std::map > > m_queue_fams; + std::map > m_queue_family_index_to_types; + std::map > m_queue_family_type_to_queue_family_indices; + std::map > m_queue_ptrs_per_vk_queue_fam; /* Protected variables */ - bool m_destroyed; VkDevice m_device; - ExtensionAMDDrawIndirectCountEntrypoints m_amd_draw_indirect_count_extension_entrypoints; - ExtensionEXTDebugMarkerEntrypoints m_ext_debug_marker_extension_entrypoints; - ExtensionKHRMaintenance1Entrypoints m_khr_maintenance1_extension_entrypoints; - ExtensionKHRSurfaceEntrypoints m_khr_surface_extension_entrypoints; - ExtensionKHRSwapchainEntrypoints m_khr_swapchain_extension_entrypoints; + ExtensionAMDBufferMarkerEntrypoints m_amd_buffer_marker_extension_entrypoints; + ExtensionAMDDrawIndirectCountEntrypoints m_amd_draw_indirect_count_extension_entrypoints; + ExtensionAMDShaderInfoEntrypoints m_amd_shader_info_extension_entrypoints; + ExtensionEXTDebugMarkerEntrypoints m_ext_debug_marker_extension_entrypoints; + ExtensionEXTExternalMemoryHostEntrypoints m_ext_external_memory_host_extension_entrypoints; + ExtensionEXTHdrMetadataEntrypoints m_ext_hdr_metadata_extension_entrypoints; + ExtensionEXTSampleLocationsEntrypoints m_ext_sample_locations_extension_entrypoints; + ExtensionEXTTransformFeedbackEntrypoints m_ext_transform_feedback_extension_entrypoints; + ExtensionKHRBindMemory2Entrypoints m_khr_bind_memory2_extension_entrypoints; + ExtensionKHRCreateRenderpass2Entrypoints m_khr_create_renderpass2_extension_entrypoints; + ExtensionKHRDescriptorUpdateTemplateEntrypoints m_khr_descriptor_update_template_extension_entrypoints; + ExtensionKHRDeviceGroupEntrypoints m_khr_device_group_extension_entrypoints; + ExtensionKHRDrawIndirectCountEntrypoints m_khr_draw_indirect_count_extension_entrypoints; + ExtensionKHRGetMemoryRequirements2Entrypoints m_khr_get_memory_requirements2_extension_entrypoints; + ExtensionKHRMaintenance1Entrypoints m_khr_maintenance1_extension_entrypoints; + ExtensionKHRMaintenance3Entrypoints m_khr_maintenance3_extension_entrypoints; + ExtensionKHRSamplerYCbCrConversionEntrypoints m_khr_sampler_ycbcr_conversion_extension_entrypoints; + ExtensionKHRSurfaceEntrypoints m_khr_surface_extension_entrypoints; + ExtensionKHRSwapchainEntrypoints m_khr_swapchain_extension_entrypoints; + + #if defined(_WIN32) + ExtensionKHRExternalFenceWin32Entrypoints m_khr_external_fence_win32_extension_entrypoints; + ExtensionKHRExternalMemoryWin32Entrypoints m_khr_external_memory_win32_extension_entrypoints; + ExtensionKHRExternalSemaphoreWin32Entrypoints m_khr_external_semaphore_win32_extension_entrypoints; + #else + ExtensionKHRExternalFenceFdEntrypoints m_khr_external_fence_fd_extension_entrypoints; + ExtensionKHRExternalMemoryFdEntrypoints m_khr_external_memory_fd_extension_entrypoints; + ExtensionKHRExternalSemaphoreFdEntrypoints m_khr_external_semaphore_fd_extension_entrypoints; + #endif private: + /* Private functions */ + bool init_dummy_dsg () const; + bool init_extension_func_ptrs(); + /* Private variables */ - bool m_amd_draw_indirect_count_enabled; - bool m_amd_gcn_shader_enabled; - bool m_amd_gpu_shader_half_float_enabled; - bool m_amd_gpu_shader_int16_enabled; - bool m_amd_negative_viewport_height_enabled; - bool m_amd_rasterization_order_enabled; - bool m_amd_shader_ballot_enabled; - bool m_amd_shader_explicit_vertex_parameter_enabled; - bool m_amd_shader_trinary_minmax_enabled; - bool m_amd_texture_gather_bias_lod_enabled; - bool m_ext_debug_marker_enabled; - bool m_ext_shader_subgroup_ballot_enabled; - bool m_ext_shader_subgroup_vote_enabled; - bool m_khr_16bit_storage_enabled; - bool m_khr_maintenance1_enabled; - bool m_khr_storage_buffer_storage_class_enabled; - bool m_khr_surface_enabled; - bool m_khr_swapchain_enabled; - - std::shared_ptr m_compute_pipeline_manager_ptr; - std::shared_ptr m_dummy_dsg_ptr; - std::vector m_enabled_extensions; - std::shared_ptr m_graphics_pipeline_manager_ptr; - std::shared_ptr m_parent_instance_ptr; - std::shared_ptr m_pipeline_cache_ptr; - std::shared_ptr m_pipeline_layout_manager_ptr; - uint32_t m_queue_family_index[Anvil::QUEUE_FAMILY_TYPE_COUNT]; - - std::shared_ptr m_command_pool_ptrs[Anvil::QUEUE_FAMILY_TYPE_COUNT]; + + + std::unique_ptr m_compute_pipeline_manager_ptr; + DescriptorSetLayoutManagerUniquePtr m_descriptor_set_layout_manager_ptr; + mutable Anvil::DescriptorSetGroupUniquePtr m_dummy_dsg_ptr; + mutable std::mutex m_dummy_dsg_mutex; + std::unique_ptr > m_extension_enabled_info_ptr; + GraphicsPipelineManagerUniquePtr m_graphics_pipeline_manager_ptr; + PipelineCacheUniquePtr m_pipeline_cache_ptr; + PipelineLayoutManagerUniquePtr m_pipeline_layout_manager_ptr; + Anvil::ShaderModuleCacheUniquePtr m_shader_module_cache_ptr; + + std::vector m_command_pool_ptr_per_vk_queue_fam; friend struct DeviceDeleter; }; @@ -641,92 +804,85 @@ namespace Anvil public: /* Public functions */ - /** Creates a new Vulkan Device instance using a user-specified physical device instance. - * - * To release a device instance, please call destroy(). + virtual ~SGPUDevice(); + + /** Creates a new Vulkan Device instance. * - * @param in_physical_device_ptr Physical device to create this device from. Must not be nullptr. - * @param in_extensions Tells which extensions must/should be specified at creation time. - * @param in_layers A vector of layer names to be used when creating the device. - * Can be empty. - * @param in_transient_command_buffer_allocs_only True if the command pools, which are going to be initialized after - * the device is created, should be set up for transient command buffer - * support. - * @param in_support_resettable_command_buffer_allocs True if the command pools should be configured for resettable command - * buffer support. + * @param in_create_info_ptr Create info structure. Must not be nullptr. * * @return A new Device instance. **/ - static std::weak_ptr create(std::weak_ptr in_physical_device_ptr, - const DeviceExtensionConfiguration& in_extensions, - const std::vector& in_layers, - bool in_transient_command_buffer_allocs_only, - bool in_support_resettable_command_buffer_allocs); + static BaseDeviceUniquePtr create(Anvil::DeviceCreateInfoUniquePtr in_create_info_ptr); /** Creates a new swapchain instance for the device. * * @param in_parent_surface_ptr Rendering surface to create the swapchain for. Must not be nullptr. * @param in_window_ptr current window to create the swapchain for. Must not be nullptr. * @param in_image_format Format which the swap-chain should use. + * @param in_color_space TODO * @param in_present_mode Presentation mode which the swap-chain should use. * @param in_usage Image usage flags describing how the swap-chain is going to be used. * @param in_n_swapchain_images Number of images the swap-chain should use. * * @return A new Swapchain instance. **/ - std::shared_ptr create_swapchain(std::shared_ptr in_parent_surface_ptr, - std::shared_ptr in_window_ptr, - VkFormat in_image_format, - VkPresentModeKHR in_present_mode, - VkImageUsageFlags in_usage, - uint32_t in_n_swapchain_images); - - /** Releases all children queues and unregisters itself from the owning physical device. */ - virtual void destroy() override; + Anvil::SwapchainUniquePtr create_swapchain(Anvil::RenderingSurface* in_parent_surface_ptr, + Anvil::Window* in_window_ptr, + Anvil::Format in_image_format, + Anvil::ColorSpaceKHR in_color_space, + Anvil::PresentModeKHR in_present_mode, + ImageUsageFlags in_usage, + uint32_t in_n_swapchain_images); /** Retrieves a PhysicalDevice instance, from which this device instance was created. * * @return As per description **/ - std::weak_ptr get_physical_device() const + const Anvil::PhysicalDevice* get_physical_device() const { - return m_parent_physical_device_ptr; + return m_create_info_ptr->get_physical_device_ptrs().at(0); } - /** See documentation in BaseDevice for more details */ - const VkPhysicalDeviceFeatures& get_physical_device_features() const override; + bool get_physical_device_buffer_properties(const BufferPropertiesQuery& in_query, + Anvil::BufferProperties* out_opt_result_ptr = nullptr) const override; /** See documentation in BaseDevice for more details */ - const Anvil::FormatProperties& get_physical_device_format_properties(VkFormat in_format) const override; + const Anvil::PhysicalDeviceFeatures& get_physical_device_features() const override; + + bool get_physical_device_fence_properties(const FencePropertiesQuery& in_query, + Anvil::FenceProperties* out_opt_result_ptr = nullptr) const override; + + bool get_physical_device_semaphore_properties(const SemaphorePropertiesQuery& in_query, + Anvil::SemaphoreProperties* out_opt_result_ptr = nullptr) const override; + + Anvil::FormatProperties get_physical_device_format_properties(Anvil::Format in_format) const override; + + bool get_physical_device_image_format_properties(const ImageFormatPropertiesQuery& in_query, + Anvil::ImageFormatProperties* out_opt_result_ptr = nullptr) const override; /** See documentation in BaseDevice for more details */ - bool get_physical_device_image_format_properties(VkFormat in_format, - VkImageType in_type, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - VkImageCreateFlags in_flags, - VkImageFormatProperties& out_result) const override; + Anvil::MultisamplePropertiesEXT get_physical_device_multisample_properties(const Anvil::SampleCountFlagBits& in_samples) const override; /** See documentation in BaseDevice for more details */ const Anvil::MemoryProperties& get_physical_device_memory_properties() const override; /** See documentation in BaseDevice for more details */ - const VkPhysicalDeviceProperties& get_physical_device_properties() const override; + const Anvil::PhysicalDeviceProperties& get_physical_device_properties() const override; /** See documentation in BaseDevice for more details */ const QueueFamilyInfoItems& get_physical_device_queue_families() const override; /** See documentation in BaseDevice for more details */ - bool get_physical_device_sparse_image_format_properties(VkFormat in_format, - VkImageType in_type, - VkSampleCountFlagBits in_sample_count, - VkImageUsageFlags in_usage, - VkImageTiling in_tiling, - std::vector& out_result) const override; + bool get_physical_device_sparse_image_format_properties(Anvil::Format in_format, + Anvil::ImageType in_type, + Anvil::SampleCountFlagBits in_sample_count, + ImageUsageFlags in_usage, + Anvil::ImageTiling in_tiling, + std::vector& out_result) const override; /** See documentation in BaseDevice for more details */ - bool get_physical_device_surface_capabilities(std::shared_ptr in_surface_ptr, - VkSurfaceCapabilitiesKHR* out_result_ptr) const override; + bool get_physical_device_surface_capabilities(Anvil::RenderingSurface* in_surface_ptr, + Anvil::SurfaceCapabilities* out_result_ptr) const override; /** See documentation in BaseDevice for more details */ const Anvil::QueueFamilyInfo* get_queue_family_info(uint32_t in_queue_family_index) const override; @@ -734,15 +890,11 @@ namespace Anvil /* Tells what type this device instance is */ DeviceType get_type() const override { - return DEVICE_TYPE_SINGLE_GPU; + return Anvil::DeviceType::SINGLE_GPU; } protected: /* Protected functions */ - void create_device (const std::vector& in_extensions, - const std::vector& in_layers, - const VkPhysicalDeviceFeatures& in_features, - DeviceQueueFamilyInfo* out_queue_families_ptr) override; void get_queue_family_indices (DeviceQueueFamilyInfo* out_device_queue_family_info_ptr) const; void init_device () override; bool is_layer_supported (const std::string& in_layer_name) const override; @@ -750,23 +902,191 @@ namespace Anvil private: /* Private type definitions */ - struct SGPUDeviceDeleter + + /* Private functions */ + + /** Private constructor. Please use create() instead. */ + SGPUDevice(Anvil::DeviceCreateInfoUniquePtr in_create_info_ptr); + + /* Private variables */ + }; + + /* TODO */ + class MGPUDevice : public BaseDevice + { + public: + /* Public functions */ + + virtual ~MGPUDevice(); + + /** TODO */ + static Anvil::BaseDeviceUniquePtr create(Anvil::DeviceCreateInfoUniquePtr in_create_info_ptr); + + /** TODO */ + Anvil::SwapchainUniquePtr create_swapchain(Anvil::RenderingSurface* in_parent_surface_ptr, + Anvil::Window* in_window_ptr, + Anvil::Format in_image_format, + Anvil::ColorSpaceKHR in_color_space, + Anvil::PresentModeKHR in_present_mode, + ImageUsageFlags in_usage, + uint32_t in_n_swapchain_images, + bool in_support_SFR, + Anvil::DeviceGroupPresentModeFlags in_presentation_modes_to_support = Anvil::DeviceGroupPresentModeFlagBits::LOCAL_BIT_KHR); + + /** TODO + * + * This function does NOT call the driver to retrieve the requested information. Instead, it returns + * information cached at mGPU device creation time. + */ + bool get_peer_memory_features(const Anvil::PhysicalDevice* in_local_physical_device_ptr, + const Anvil::PhysicalDevice* in_remote_physical_device_ptr, + uint32_t in_memory_heap_index, + PeerMemoryFeatureFlags* out_result_ptr) const; + + /** TODO */ + uint32_t get_n_physical_devices() const { - void operator()(SGPUDevice* in_device_ptr) - { - delete in_device_ptr; - } + return static_cast(m_parent_physical_devices.size() ); + } + + /** TODO */ + const Anvil::PhysicalDevice* get_physical_device(uint32_t in_n_physical_device) const; + + bool get_physical_device_buffer_properties(const BufferPropertiesQuery& in_query, + Anvil::BufferProperties* out_opt_result_ptr = nullptr) const override; + + /** TODO */ + const Anvil::PhysicalDevice* const* get_physical_devices() const + { + return &m_parent_physical_devices_vec.at(0); + } + + /** TODO */ + const Anvil::PhysicalDeviceFeatures& get_physical_device_features() const override; + + bool get_physical_device_fence_properties(const FencePropertiesQuery& in_query, + Anvil::FenceProperties* out_opt_result_ptr = nullptr) const override; + + Anvil::FormatProperties get_physical_device_format_properties(Anvil::Format in_format) const override; + + bool get_physical_device_image_format_properties(const ImageFormatPropertiesQuery& in_query, + Anvil::ImageFormatProperties* out_opt_result_ptr = nullptr) const override; + + /** TODO */ + const Anvil::MemoryProperties& get_physical_device_memory_properties() const override; + + /** See documentation in BaseDevice for more details */ + Anvil::MultisamplePropertiesEXT get_physical_device_multisample_properties(const Anvil::SampleCountFlagBits& in_samples) const override; + + /** TODO */ + const Anvil::PhysicalDeviceProperties& get_physical_device_properties() const override; + + /** TODO */ + const QueueFamilyInfoItems& get_physical_device_queue_families() const override; + + bool get_physical_device_semaphore_properties(const SemaphorePropertiesQuery& in_query, + Anvil::SemaphoreProperties* out_opt_result_ptr = nullptr) const override; + + /** TODO */ + bool get_physical_device_sparse_image_format_properties(Anvil::Format in_format, + Anvil::ImageType in_type, + Anvil::SampleCountFlagBits in_sample_count, + ImageUsageFlags in_usage, + Anvil::ImageTiling in_tiling, + std::vector& out_result) const override; + + /** TODO */ + bool get_physical_device_surface_capabilities(Anvil::RenderingSurface* in_surface_ptr, + Anvil::SurfaceCapabilities* out_result_ptr) const override; + + /** Tells which physical devices can be parent to swapchain images that a physical device at device index @param in_device_index + * can present. + * + * By VK_KHR_device_group, *out_result_ptr is guaranteed to at least refer to the device with index @param in_device_index, + * as long as the physical device has a presentation engine and can be used for presentation purposes. + * + * This function is not a part of Anvil::PhysicalDevice, because the spec does not guarantee this data to be invariant between + * device instances. In cases where the app creates a sGPU device and a mGPU device, where both share the same physical device, + * the former device could be incorrectly assumed to be compatible with physical devices from an external device group. + * + * @param in_device_index TODO + * @param out_result_ptr TODO. Must not be NULL. + * + * @return true if successful, false otherwise. + **/ + bool get_present_compatible_physical_devices(uint32_t in_device_index, + const std::vector** out_result_ptr) const; + + /** TODO + * + * Note: The value returned by this function is NOT guaranteed to be invariant. + **/ + bool get_present_rectangles(uint32_t in_device_index, + const Anvil::RenderingSurface* in_rendering_surface_ptr, + std::vector* out_result_ptr) const; + + /** TODO */ + const Anvil::QueueFamilyInfo* get_queue_family_info(uint32_t in_queue_family_index) const override; + + /** TODO */ + Anvil::DeviceGroupPresentModeFlags get_supported_present_modes() const + { + return m_supported_present_modes; + } + + /** TODO + * + * Note: The value returned by this function is NOT guaranteed to be invariant. + * + */ + Anvil::DeviceGroupPresentModeFlags get_supported_present_modes_for_surface(const Anvil::RenderingSurface* in_surface_ptr) const; + + /* Tells what type this device instance is */ + DeviceType get_type() const override + { + return DeviceType::MULTI_GPU; + } + + /* Tells whether this logical device is a part of a device group which supports subset allocations. */ + bool supports_subset_allocations() const + { + return m_supports_subset_allocations; + } + + protected: + /* Protected functions */ + void get_queue_family_indices (DeviceQueueFamilyInfo* out_device_queue_family_info_ptr) const; + void init_device () override; + bool is_layer_supported (const std::string& in_layer_name) const override; + bool is_physical_device_extension_supported(const std::string& in_extension_name) const override; + + private: + /* Private type definitions */ + + struct ParentPhysicalDeviceProperties + { + typedef uint32_t HeapIndex; + typedef std::map HeapIndexToPeerMemoryFeaturesMap; + typedef uint32_t RemotePhysicalDeviceGroupIndex; + + const Anvil::PhysicalDevice* physical_device_ptr; + std::vector presentation_compatible_physical_devices; /* may be empty if the physical device does not have a presentation engine! */ + std::map peer_memory_features; }; /* Private functions */ - virtual ~SGPUDevice(); /** Private constructor. Please use create() instead. */ - SGPUDevice(std::weak_ptr in_physical_device_ptr); + MGPUDevice(Anvil::DeviceCreateInfoUniquePtr in_create_info_ptr); /* Private variables */ - std::weak_ptr m_parent_physical_device_ptr; + std::map m_device_index_to_physical_device_props; + std::vector m_parent_physical_devices; + std::vector m_parent_physical_devices_vec; + bool m_supports_subset_allocations; + + Anvil::DeviceGroupPresentModeFlags m_supported_present_modes; }; }; /* namespace Anvil */ -#endif /* WRAPPERS_DEVICE_H */ \ No newline at end of file +#endif /* WRAPPERS_DEVICE_H */ diff --git a/include/wrappers/event.h b/include/wrappers/event.h index 54357354..9216accd 100644 --- a/include/wrappers/event.h +++ b/include/wrappers/event.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -32,12 +32,14 @@ #define WRAPPERS_EVENT_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { /** Wrapper class for Vulkan events */ - class Event : public DebugMarkerSupportProvider + class Event : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ @@ -45,10 +47,8 @@ namespace Anvil /** Creates a new Event instance. * * Creates a single Vulkan event instance and registers the object in Object Tracker. - * - * @param in_device_ptr Device to use. */ - static std::shared_ptr create(std::weak_ptr in_device_ptr); + static Anvil::EventUniquePtr create(Anvil::EventCreateInfoUniquePtr in_create_info_ptr); /** Destructor. * @@ -56,6 +56,11 @@ namespace Anvil **/ virtual ~Event(); + const Anvil::EventCreateInfo* get_create_info_ptr() const + { + return m_create_info_ptr.get(); + } + /** Retrieves a raw Vulkan handle for the underlying VkEvent instance. */ VkEvent get_event() const { @@ -89,16 +94,17 @@ namespace Anvil /* Private functions */ /* Constructor. */ - Event(std::weak_ptr in_device_ptr); + Event(Anvil::EventCreateInfoUniquePtr in_create_info_ptr); Event (const Event&); Event& operator=(const Event&); + bool init (); void release_event(); /* Private variables */ - std::weak_ptr m_device_ptr; - VkEvent m_event; + Anvil::EventCreateInfoUniquePtr m_create_info_ptr; + VkEvent m_event; }; }; /* namespace Anvil */ diff --git a/include/wrappers/fence.h b/include/wrappers/fence.h index 4e631221..00d2a972 100644 --- a/include/wrappers/fence.h +++ b/include/wrappers/fence.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -32,12 +32,14 @@ #define WRAPPERS_FENCE_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { /* Wrapper class for Vulkan fences */ - class Fence : public DebugMarkerSupportProvider + class Fence : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ @@ -51,13 +53,25 @@ namespace Anvil /** Creates a new Fence instance. * * Creates a single Vulkan fence instance and registers the object in Object Tracker. + */ + static Anvil::FenceUniquePtr create(Anvil::FenceCreateInfoUniquePtr in_create_info_ptr); + + /* Creates a new external fence handle of the user-specified type. + * + * For NT handle types, the function can only be called once per each NT handle type. Subsequent + * calls will result in the function triggering an assertion failure and returning null. + * + * Returns nullptr if unsuccessful. * - * @param in_device_ptr Device to use. - * @param in_create_signalled true if the fence should be created as a signalled entity. - * False to make it unsignalled at creation time. + * Requires VK_KHR_external_fence_fd under Linux. + * Requires VK_KHR_external_fence_win32 under Windows. */ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - bool in_create_signalled); + ExternalHandleUniquePtr export_to_external_handle(const Anvil::ExternalFenceHandleTypeFlagBits& in_fence_handle_type); + + const Anvil::FenceCreateInfo* get_create_info_ptr() const + { + return m_create_info_ptr.get(); + } /** Retrieves a raw handle to the underlying Vulkan fence instance */ VkFence get_fence() const @@ -66,13 +80,38 @@ namespace Anvil } /** Retrieves a pointer to the raw handle to the underlying Vulkan fence instance */ - const VkFence* get_fence_ptr() + const VkFence* get_fence_ptr() const { - m_possibly_set = true; - return &m_fence; } + /* TODO + * + * Requires VK_KHR_external_fence_fd under Linux. + * Requires VK_KHR_external_fence_win32 under Windows. + * + * NOTE: Under Linux, @param in_handle is no longer valid if function returns true. + * + * @return true if successful, false otherwise. + * + * @param in_temporary_import True if a temporary import operation should be performed. False if + * a permanent import is being requested. + * @param in_handle_type Type of the handle that is being imported. + * @param in_handle (Linux): Handle to use. + * @param in_opt_handle (Windows): Handle to use. Must not be null if @param in_opt_name is null and vice versa. + * @param in_opt_name (Windows): Name of the handle to use. Must not be null if @param in_opt_handle is null and vice versa. + */ + #if defined(_WIN32) + bool import_from_external_handle(const bool& in_temporary_import, + const Anvil::ExternalFenceHandleTypeFlagBits& in_handle_type, + const ExternalHandleType& in_opt_handle, + const std::wstring& in_opt_name); + #else + bool import_from_external_handle(const bool& in_temporary_import, + const Anvil::ExternalFenceHandleTypeFlagBits& in_handle_type, + const ExternalHandleType& in_handle); + #endif + /** Tells whether the fence is signalled at the time of the call. * * @return true if the fence is set, false otherwise. @@ -106,19 +145,19 @@ namespace Anvil * * Please see documentation of create() for specification */ - Fence(std::weak_ptr in_device_ptr, - bool in_create_signalled); + Fence(Anvil::FenceCreateInfoUniquePtr in_create_info_ptr); Fence (const Fence&); Fence& operator=(const Fence&); + bool init (); void release_fence(); /* Private variables */ - std::weak_ptr m_device_ptr; - VkFence m_fence; - bool m_possibly_set; + Anvil::FenceCreateInfoUniquePtr m_create_info_ptr; + std::map m_external_fence_created_for_handle_type; + VkFence m_fence; }; }; /* namespace Anvil */ -#endif /* WRAPPERS_FENCE_H */ \ No newline at end of file +#endif /* WRAPPERS_FENCE_H */ diff --git a/include/wrappers/framebuffer.h b/include/wrappers/framebuffer.h index f3c6e032..a9ecb6e6 100644 --- a/include/wrappers/framebuffer.h +++ b/include/wrappers/framebuffer.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -30,103 +30,37 @@ #define WRAPPERS_FRAMEBUFFER_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { - class Framebuffer : public DebugMarkerSupportProvider + class Framebuffer : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ - /** Constructor. - * - * Verifies input arguments and initializes the internal ref counter to 1. - * - * @param in_device_ptr Device to use. - * @param in_width Framebuffer width. Must be at least 1. - * @param in_height Framebuffer height. Must be at least 1. - * @param in_n_layers Number of layers the framebuffer should use. Must be at least 1. - **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - uint32_t in_width, - uint32_t in_height, - uint32_t in_n_layers); + static Anvil::FramebufferUniquePtr create(Anvil::FramebufferCreateInfoUniquePtr in_create_info_ptr); /** Destructor. * * Destroys baked Vulkan counterparts and unregisters the wrapper instance from the object tracker. **/ - virtual ~Framebuffer(); - - /** Adds a new attachment to the framebuffer. - * - * If this function succeeds, the maintained framebuffer objects will NOT be automatically - * re-created. Instead, the user should either call bake() at a suitable moment, or expect - * it to be called at the next get_framebuffer() call. - * - * @param in_image_view_ptr Image view instance to use as a framebuffer attachment. - * Must not be nullptr. This object will be retained. - * @param out_opt_attachment_id_ptr If not nullptr, deref will be set to a unique ID, under which - * the attachment can be accessed. - * - * @return true if the function was successful, false otherwise. - **/ - bool add_attachment(std::shared_ptr in_image_view_ptr, - FramebufferAttachmentID* out_opt_attachment_id_ptr); - - /** Re-creates a Vulkan framebuffer object for the specified render pass instance. If a FB - * has already been created in the past, the instance will be released. - * - * At least one attachment must be defined for this function to succeed. - * - * @param in_render_pass_ptr Render pass instance to create the new Vulkan framebuffer object - * for. Must not be nullptr. - * - * @return true if the function was successful, false otherwise. - * */ - bool bake(std::shared_ptr in_render_pass_ptr); - - /* Returns an attachment at user-specified index */ - bool get_attachment_at_index(uint32_t in_attachment_index, - std::shared_ptr* out_image_view_ptr) const; - - /** Checks if an attachment has already been created for the specified image view and, if so, - * returns the attachment's ID. - * - * @param in_image_view_ptr Image view to use for the query. Must not be nullptr. - * @param out_attachment_id_ptr If the function executes successfully, deref will be set to - * the found attachment's ID, in which case the pointer must not - * be nullptr. Otherwise, ignored. - * - * @return true if successful (eg. the attachment corresponding to the specified image view was - * found), false otherwise. - **/ - bool get_attachment_id_for_image_view(std::shared_ptr in_image_view_ptr, - FramebufferAttachmentID* out_attachment_id_ptr); + ~Framebuffer(); - /** Returns the number of attachments defined for the framebuffer. */ - uint32_t get_n_attachments() const + const Anvil::FramebufferCreateInfo* get_create_info_ptr() const { - return static_cast(m_attachments.size() ); + return m_create_info_ptr.get(); } /** Returns a Vulkan framebuffer object instance for the specified render pass instance. - * - * If the object needs to be baked (because an attachment has been added since the last - * baking time, or if the object is being requested for the first time), a bake() call - * will be made automatically. * * @param in_render_pass_ptr Render pass to return the framebuffer for. * * @return Raw Vulkan framebuffer handle or VK_NULL_HANDLE, if the function failed. **/ - const VkFramebuffer get_framebuffer(std::shared_ptr in_render_pass_ptr); - - /** Returns framebuffer size, specified at creation time */ - void get_size(uint32_t* out_framebuffer_width_ptr, - uint32_t* out_framebuffer_height_ptr, - uint32_t* out_framebuffer_depth_ptr) const; + VkFramebuffer get_framebuffer(Anvil::RenderPass* in_render_pass_ptr); private: /* Private type declarations */ @@ -142,63 +76,39 @@ namespace Anvil } } BakedFramebufferData; - typedef struct FramebufferAttachment - { - std::shared_ptr image_view_ptr; - - /** Constructor. Retains the input image view instance. - * - * @param in_image_view_ptr Image view instance to use for the FB attachment. Must not - * be nullptr. - **/ - FramebufferAttachment(std::shared_ptr in_image_view_ptr); - - /** Destructor. Releases the encapsulated image view instance. */ - ~FramebufferAttachment(); - - /** Copy constructor */ - FramebufferAttachment(const FramebufferAttachment& in); - - /** Assignment operator */ - FramebufferAttachment& operator=(const FramebufferAttachment& in); - - /** Returns true if the encapsulated image view instance is the same as the one - * specified uner @param in_image_view_ptr argument. - **/ - bool operator==(std::shared_ptr in_image_view_ptr) const - { - return (image_view_ptr == in_image_view_ptr); - } - } FramebufferAttachment; - struct RenderPassComparator { - bool operator()(std::shared_ptr in_a_ptr, - std::shared_ptr in_b_ptr) const; + bool operator()(Anvil::RenderPass* in_a_ptr, + Anvil::RenderPass* in_b_ptr) const; }; - typedef std::map, BakedFramebufferData, RenderPassComparator> BakedFramebufferMap; - typedef std::vector FramebufferAttachments; + typedef std::map BakedFramebufferMap; /* Private functions */ /* Constructor. Please see specification of create() for more details */ - Framebuffer(std::weak_ptr in_device_ptr, - uint32_t in_width, - uint32_t in_height, - uint32_t in_n_layers); + Framebuffer(Anvil::FramebufferCreateInfoUniquePtr in_create_info_ptr); Framebuffer& operator=(const Framebuffer&); Framebuffer (const Framebuffer&); - void on_renderpass_changed(Anvil::CallbackArgument* in_callback_argument_ptr); + /** Re-creates a Vulkan framebuffer object for the specified render pass instance. If a FB + * has already been created in the past, the instance will be released. + * + * At least one attachment must be defined for this function to succeed. + * + * @param in_render_pass_ptr Render pass instance to create the new Vulkan framebuffer object + * for. Must not be nullptr. + * + * @return true if the function was successful, false otherwise. + * */ + bool bake(Anvil::RenderPass* in_render_pass_ptr); /* Private members */ - FramebufferAttachments m_attachments; - BakedFramebufferMap m_baked_framebuffers; - std::weak_ptr m_device_ptr; - uint32_t m_framebuffer_size[3]; + BakedFramebufferMap m_baked_framebuffers; + Anvil::FramebufferCreateInfoUniquePtr m_create_info_ptr; + const Anvil::BaseDevice* m_device_ptr; }; }; -#endif /* WRAPPERS_FRAMEBUFFER_H */ \ No newline at end of file +#endif /* WRAPPERS_FRAMEBUFFER_H */ diff --git a/include/wrappers/graphics_pipeline_manager.h b/include/wrappers/graphics_pipeline_manager.h index 62f14cc2..5f6f916f 100644 --- a/include/wrappers/graphics_pipeline_manager.h +++ b/include/wrappers/graphics_pipeline_manager.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -29,48 +29,13 @@ * - pipeline properties are assigned default values, as described below. They can be * adjusted by calling relevant entrypoints, prior to baking. * - * Each baked graphics pipeline is configured as below at Pipeline object creation time: - * - * All rendering modes & tests: disabled - * Blend constant: vec4(0.0) - * Cull mode: VK_CULL_MODE_BACK - * Depth bias: 0.0 - * Depth bias clamp: 0.0 - * Depth bias slope factor: 1.0 - * Depth test compare op: VK_COMPARE_OP_ALWAYS - * Depth writes: disabled - * Dynamic states: all disabled - * Fill mode: VK_FILL_MODE_SOLID - * Front face: VK_FRONT_FACE_CCW - * Line width: 1.0 - * Logic op: VK_LOGIC_OP_NOOP; - * Max depth boundary: 1.0 - * Min depth boundary: 0.0 - * Min sample shading: 1.0 - * Number of raster samples: 1 - * Number of tessellation patches: 0 - * Primitive topology: VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST - * Sample mask: 0xFFFFFFFF - * Slope scaled depth bias: 0.0 - * Stencil comparison mask (back/front): 0xFFFFFFFF - * Stencil comparison op (back/front): VK_COMPARE_OP_ALWAYS - * Stencil depth fail op (back/front): VK_STENCIL_OP_KEEP - * Stencil fail op (back/front): VK_STENCIL_OP_KEEP - * Stencil pass op (back/front): VK_STENCIL_OP_KEEP - * Stencil reference value (back/front): 0 - * Stencil write mask (back/front): 0xFFFFFFFF - * - * If no scissor or viewport is defined explicitly, one scissor box and one viewport, - * covering the whole screen, will be created at baking time. - * - * If VK_AMD_rasterization_order extension is supported, strict rasterization order is assumed - * for the pipeline by default. - * **/ #ifndef WRAPPERS_GRAPHICS_PIPELINE_MANAGER_H #define WRAPPERS_GRAPHICS_PIPELINE_MANAGER_H #include "misc/base_pipeline_manager.h" +#include "misc/graphics_pipeline_create_info.h" +#include "misc/struct_chainer.h" #include "misc/types.h" #include "wrappers/render_pass.h" #include @@ -81,40 +46,21 @@ namespace Anvil { public: /* Public type definitions */ - typedef enum - { - DYNAMIC_STATE_BLEND_CONSTANTS_BIT = 1 << 0, - DYNAMIC_STATE_DEPTH_BIAS_BIT = 1 << 1, - DYNAMIC_STATE_DEPTH_BOUNDS_BIT = 1 << 2, - DYNAMIC_STATE_LINE_WIDTH_BIT = 1 << 3, - DYNAMIC_STATE_SCISSOR_BIT = 1 << 4, - DYNAMIC_STATE_STENCIL_COMPARE_MASK_BIT = 1 << 5, - DYNAMIC_STATE_STENCIL_REFERENCE_BIT = 1 << 6, - DYNAMIC_STATE_STENCIL_WRITE_MASK_BIT = 1 << 7, - DYNAMIC_STATE_VIEWPORT_BIT = 1 << 8, - } DynamicStateBits; - - typedef uint32_t DynamicStateBitfield; - /* Prototype for a call-back function, invoked right after vkCreateGraphicsPipelines() call returns. **/ - typedef std::function in_device_ptr, - GraphicsPipelineID in_pipeline_id)> PipelinePostBakeFunction; + /* Public functions */ - /* Prototype for a call-back function, invoked before a Vulkan graphics pipeline is created. + /** Generates a VkPipeline instance for each outstanding pipeline object. * - * The caller can adjust any of the VkGraphicsPipelineCreateInfo fields, with an assumption - * Anvil::GraphicsPipelineManager will not adjust its internal state to sync with user-modified - * fields. + * @return true if successful, false otherwise. **/ - typedef std::function in_device_ptr, - GraphicsPipelineID in_pipeline_id, - VkGraphicsPipelineCreateInfo* in_graphics_pipeline_create_info_ptr)> PipelinePreBakeFunction; + bool bake(); - /* Public functions */ + bool delete_pipeline(PipelineID in_pipeline_id); /** Creates a new GraphicsPipelineManager instance. * * @param in_device_ptr Device to use. + * @param in_mt_safe True if more than one thread at a time is going to be issuing calls against the pipeline manager. * @param in_use_pipeline_cache true if the manager should use a pipeline cache instance. false * to pass nullptr whenever a Vulkan descriptor requires us to specify * one. @@ -123,1534 +69,108 @@ namespace Anvil * If one is not provided and the other argument is set as described, * a new pipeline cache with size 0 will be allocated. **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - bool in_use_pipeline_cache = false, - std::shared_ptr in_pipeline_cache_to_reuse_ptr = std::shared_ptr() ); + static GraphicsPipelineManagerUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + bool in_use_pipeline_cache = false, + Anvil::PipelineCache* in_pipeline_cache_to_reuse_ptr = nullptr); /** Destructor. */ virtual ~GraphicsPipelineManager(); - /** Adds a new derivative pipeline, to be used the specified renderpass' subpass. All states of the base pipeline - * will be copied to the new pipeline. - * - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_renderpass_ptr Renderpass instance the pipeline is to be created for. Must not be nullptr. - * The renderpass instance will be implicitly retained. - * @param in_subpass_id ID of the subpass the pipeline is to be created for. - * @param in_fragment_shader_stage_entrypoint_info Fragment shader to use for the pipeline. Pass an instance initialized with - * dummy constructor if the stage is irrelevant. - * @param in_geometry_shader_stage_entrypoint_info Geometry shader to use for the pipeline. Pass an instance initialized with - * dummy constructor if the stage is irrelevant. - * @param in_tess_control_shader_stage_entrypoint_info Tessellation control shader to use for the pipeline. Pass an instance initialized with - * dummy constructor if the stage is irrelevant. - * @param in_tess_evaluation_shader_stage_entrypoint_info Tessellation evaluation shader to use for the pipeline. Pass an instance initialized - * with dummy constructor if the stage is irrelevant. - * @param in_vertex_shader_stage_entrypoint_info Vertex shader to use for the pipeline. Must not be a dummy shader stage entrypoint - * descriptor. - * @param in_base_pipeline_id Graphics pipeline ID of the base graphics pipeline. The ID must have been returned - * by one of the preceding add_() calls, issued against the same GraphicsPipelineManager - * instance. Must not refer to a proxy pipeline. - * @param out_graphics_pipeline_id_ptr Deref will be set to the new graphics pipeline's ID. Must not be nullptr. - **/ - bool add_derivative_pipeline_from_sibling_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - std::shared_ptr in_renderpass_ptr, - SubPassID in_subpass_id, - const ShaderModuleStageEntryPoint& in_fragment_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_geometry_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_control_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_evaluation_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_vertex_shader_shader_stage_entrypoint_info, - GraphicsPipelineID in_base_pipeline_id, - GraphicsPipelineID* out_graphics_pipeline_id_ptr); - - /** Adds a new derivative pipeline, to be used the specified renderpass' subpass. Since the base pipeline is - * a Vulkan object, its state values are NOT automatically copied to the new derivative pipeline. - * - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_renderpass_ptr Renderpass instance the pipeline is to be created for. Must not be nullptr. - * The renderpass instance will be implicitly retained. - * @param in_subpass_id ID of the subpass the pipeline is to be created for. - * @param in_fragment_shader_stage_entrypoint_info Fragment shader to use for the pipeline. Pass an instance initialized with - * dummy constructor if the stage is irrelevant. - * @param in_geometry_shader_stage_entrypoint_info Geometry shader to use for the pipeline. Pass an instance initialized with - * dummy constructor if the stage is irrelevant. - * @param in_tess_control_shader_stage_entrypoint_info Tessellation control shader to use for the pipeline. Pass an instance initialized with - * dummy constructor if the stage is irrelevant. - * @param in_tess_evaluation_shader_stage_entrypoint_info Tessellation evaluation shader to use for the pipeline. Pass an instance initialized - * with dummy constructor if the stage is irrelevant. - * @param in_vertex_shader_stage_entrypoint_info Vertex shader to use for the pipeline. Must not be a dummy shader stage descriptor. - * @param in_base_pipeline Base graphics pipeline. Must not be nullptr. - * @param out_graphics_pipeline_id_ptr Deref will be set to the new graphics pipeline's ID. Must not be nullptr. - **/ - bool add_derivative_pipeline_from_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - std::shared_ptr in_renderpass_ptr, - SubPassID in_subpass_id, - const ShaderModuleStageEntryPoint& in_fragment_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_geometry_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_control_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_evaluation_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_vertex_shader_shader_stage_entrypoint_info, - VkPipeline in_base_pipeline, - GraphicsPipelineID* out_graphics_pipeline_id_ptr); - - /** Registers a new proxy pipeline. A proxy pipeline cannot be baked, but it can hold state data and act - * as a parent for other pipelines, which inherit their state at creation time. - * - * @param out_proxy_graphics_pipeline_id_ptr Deref will be set to the ID of the result pipeline object. - * Must not be nullptr. - * - * @return true if successful, false otherwise. - **/ - bool add_proxy_pipeline(GraphicsPipelineID* out_proxy_graphics_pipeline_id_ptr); - - /** Adds a new non-derivative pipeline, to be used the specified renderpass' subpass. - * - * @param in_disable_optimizations If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT flag. - * @param in_allow_derivatives If true, the pipeline will be created with the - * VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag. - * @param in_renderpass_ptr Renderpass instance the pipeline is to be created for. Must not be nullptr. - * The renderpass instance will be implicitly retained. - * @param in_subpass_id ID of the subpass the pipeline is to be created for. - * @param in_fragment_shader_stage_entrypoint_info Fragment shader to use for the pipeline. Pass an instance initialized with - * dummy constructor if the stage is irrelevant. - * @param in_geometry_shader_stage_entrypoint_info Geometry shader to use for the pipeline. Pass an instance initialized with - * dummy constructor if the stage is irrelevant. - * @param in_tess_control_shader_stage_entrypoint_info Tessellation control shader to use for the pipeline. Pass an instance initialized with - * dummy constructor if the stage is irrelevant. - * @param in_tess_evaluation_shader_stage_entrypoint_info Tessellation evaluation shader to use for the pipeline. Pass an instance initialized - * with dummy constructor if the stage is irrelevant. - * @param in_vertex_shader_stage_entrypoint_info Vertex shader to use for the pipeline. Must not be a dummy shader stage descriptor. - * @param out_graphics_pipeline_id_ptr Deref will be set to the new graphics pipeline's ID. Must not be nullptr. - **/ - bool add_regular_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - std::shared_ptr in_renderpass_ptr, - SubPassID in_subpass_id, - const ShaderModuleStageEntryPoint& in_fragment_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_geometry_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_control_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_evaluation_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_vertex_shader_shader_stage_entrypoint_info, - GraphicsPipelineID* out_graphics_pipeline_id_ptr); - - /** Deletes an existing pipeline. - * - * @param in_graphics_pipeline_id ID of a pipeline to delete. - * - * @return true if successful, false otherwise. - **/ - bool delete_pipeline(GraphicsPipelineID in_graphics_pipeline_id); - - /** Adds a new vertex attribute descriptor to the specified graphics pipeline. This data will be used - * at baking time to configure input vertex attribute & bindings for the Vulkan pipeline object. - * - * By default, Anvil only assigns a unique binding to those vertex attributes, whose characteristics - * are unique (ie. whose input rate & stride match). This works well for most of the use cases, the - * only exception being when you need to associate a unique offset to a specific vertex binding. In - * this case, you need to set @param in_explicit_binding_index to an index, under which your exclusive - * binding is going to be stored. - * When preparing the binding array, Anvil will not reuse user-specified "explicit" bindings for - * attributes, for which "explicit" bindings have not been requested, even if their properties match. - * - * - * @param in_graphics_pipeine_id ID of the graphics pipeline object, whose state should be changed. The ID - * must have been returned by one of the add_() functions, issued against the - * same GraphicsPipelineManager instance. - * @param in_location Vertex attribute location - * @param in_format Vertex attribute format. - * @param in_offset_in_bytes Start offset of the vertex attribute data. - * @param in_stride_in_bytes Stride of the vertex attribute data. - * @param in_step_rate Step rate to use for the vertex attribute data. - * @param in_explicit_binding_index Please see general description of the function for more details. - * - * @return true if successful, false otherwise. - **/ - bool add_vertex_attribute(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_location, - VkFormat in_format, - uint32_t in_offset_in_bytes, - uint32_t in_stride_in_bytes, - VkVertexInputRate in_step_rate, - uint32_t in_explicit_binding_index = UINT32_MAX); - - /** Generates a VkPipeline instance for each pipeline object marked as dirty. If a dirty pipeline - * has already been baked in the past, the former object instance is released. - * - * @return true if successful, false otherwise. - **/ - bool bake(); - - /** Tells whether the graphics pipeline has been created with enabled alpha-to-coverage mode. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not NULL, deref will be set to true if the mode has been - * enabled for the pipeline, or to false otherwise. - * - * @return true if successful, false otherwise. - **/ - bool get_alpha_to_coverage_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const; - - /** Tells whether the graphics pipeline has been created with enabled alpha-to-one mode. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not NULL, deref will be set to true if the mode has been - * enabled for the pipeline, or to false otherwise. - * - * @return true if successful, false otherwise. - **/ - bool get_alpha_to_one_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const; - - /** Retrieves blending properties of the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_blend_constant_vec4_ptr If not NULL, deref will be assigned four float values - * representing the blend constant. - * @param out_opt_n_blend_attachments_ptr If not NULL, deref will be set to the number of blend - * attachments the graphics pipeline supports. - * - * @return true if successful, false otherwise - **/ - bool get_blending_properties(GraphicsPipelineID in_graphics_pipeline_id, - float* out_opt_blend_constant_vec4_ptr, - uint32_t* out_opt_n_blend_attachments_ptr) const; - - /** Retrieves color blend attachment properties for the specified graphics pipeline and subpass - * attachment. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param in_attachment_id ID of the attachment the query is being made for. - * @param out_opt_blending_enabled_ptr If not null, deref will be set to true if blending has been - * enabled for this pipeline, or to false otherwise. - * @param out_opt_blend_op_color_ptr If not null, deref will be set to the specified attachment's - * color blend op. - * @param out_opt_blend_op_alpha_ptr If not null, deref will be set to the specified attachment's - * alpha blend op. - * @param out_opt_src_color_blend_factor_ptr If not null, deref will be set to the specified attachment's - * source color blend factor. - * @param out_opt_dst_color_blend_factor_ptr If not null, deref will be set to the specified attachment's - * destination color blend factor. - * @param out_opt_src_alpha_blend_factor_ptr If not null, deref will be set to the specified attachment's - * source alpha blend factor. - * @param out_opt_dst_alpha_blend_factor_ptr If not null, deref will be set to the specified attachment's - * destination alpha blend factor. - * @param out_opt_channel_write_mask_ptr If not null, deref will be set to the specified attachment's - * write mask. - * - * @return true if successful, false otherwise. - */ - bool get_color_blend_attachment_properties(GraphicsPipelineID in_graphics_pipeline_id, - SubPassAttachmentID in_attachment_id, - bool* out_opt_blending_enabled_ptr, - VkBlendOp* out_opt_blend_op_color_ptr, - VkBlendOp* out_opt_blend_op_alpha_ptr, - VkBlendFactor* out_opt_src_color_blend_factor_ptr, - VkBlendFactor* out_opt_dst_color_blend_factor_ptr, - VkBlendFactor* out_opt_src_alpha_blend_factor_ptr, - VkBlendFactor* out_opt_dst_alpha_blend_factor_ptr, - VkColorComponentFlags* out_opt_channel_write_mask_ptr) const; - - /** Retrieves depth bias-related state configuration for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not null, deref will be set to true if depth bias has - * been enabled for the pipeline, or to false otherwise. - * @param out_opt_depth_bias_constant_factor_ptr If not null, deref will be set to the depth bias constant factor - * assigned to the pipeline. - * @param out_opt_depth_bias_clamp_ptr If not null, deref will be set to the depth bias clamp value, as - * assigned to the pipeline. - * @param out_opt_depth_bias_slope_factor_ptr If not null, deref will be set to the depth bias slope factor, as - * configured for the pipeline. - * - * @return true if successful, false otherwise. - **/ - bool get_depth_bias_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - float* out_opt_depth_bias_constant_factor_ptr, - float* out_opt_depth_bias_clamp_ptr, - float* out_opt_depth_bias_slope_factor_ptr) const; - - /** Retrieves depth bounds-related state configuration for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not null, deref will be set to true if depth bounds mode - * has been enabled for the pipeline, or to false otherwise. - * @param out_opt_min_depth_bounds_ptr If not null, deref will be set to the minimum depth bound value - * assigned to the pipeline. - * @param out_opt_max_depth_bounds_ptr If not null, deref will be set to the maximum depth bound value - * assigned to the pipeline. - * - * @return true if successful, false otherwise. - **/ - bool get_depth_bounds_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - float* out_opt_min_depth_bounds_ptr, - float* out_opt_max_depth_bounds_ptr) const; - - /** Tells whether the graphics pipeline has been created with enabled depth clamping. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not NULL, deref will be set to true if depth clampnig has - * been enabled for the pipeline, or to false otherwise. - * - * @return true if successful, false otherwise. - **/ - bool get_depth_clamp_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const; - - /** Retrieves depth test-related state configuration for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not null, deref will be set to true if depth test has been - * enabled for the pipeline, or to false otherwise. - * @param out_opt_compare_op_ptr If not null, deref will be set to the depth compare op assigned - * to the pipeline. - * - * @return true if successful, false otherwise. - **/ - bool get_depth_test_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - VkCompareOp* out_opt_compare_op_ptr) const; - - /** Tells whether the graphics pipeline has been created with enabled depth writes. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not NULL, deref will be set to true if depth writes have - * been enabled for the pipeline, or to false otherwise. - * - * @return true if successful, false otherwise. - **/ - bool get_depth_write_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const; - - /** Tells the number of dynamic scissor boxes, as specified at graphics pipeline instantiation time - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_n_dynamic_scissor_boxes_ptr If not null, deref will be set to the number of dynamic - * scissor boxes used by the pipeline. - * - * @return true if successful, false otherwise. - * - **/ - bool get_dynamic_scissor_state_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t* out_opt_n_dynamic_scissor_boxes_ptr) const; - - /** Tells what dynamic states have been enabled for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_enabled_dynamic_states_ptr If not null, deref will be updated with a bitfield value, - * telling which dynamic states have been enabled for the pipeline. - * - * @return true if successful, false otherwise. - **/ - bool get_dynamic_states(GraphicsPipelineID in_graphics_pipeline_id, - DynamicStateBitfield* out_opt_enabled_dynamic_states_ptr) const; - - /** Tells the number of dynamic viewports, as specified at graphics pipeline instantiation time - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_n_dynamic_viewports_ptr If not null, deref will be set to the number of dynamic viewports - * used by the pipeline. - * - * @return true if successful, false otherwise. - * - **/ - bool get_dynamic_viewport_state_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t* out_n_dynamic_viewports_ptr) const; - - /** Retrieves general pipeline properties - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_n_scissors_ptr If not null, deref will be set to the number of scissors used - * by the pipeline. - * @param out_opt_n_viewports_ptr If not null, deref will be set to the number of viewports used - * by the pipeline. - * @param out_opt_n_vertex_input_attributes_ptr If not null, deref will be set to the number of vertex - * attributes used by the pipeline. - * @param out_opt_n_vertex_input_bindings_ptr If not null, deref will be set to the number of vertex bindings - * defined for the pipeline. - * @param out_opt_renderpass_ptr If not null, deref will be set to the renderpass assigned to - * the pipeline. - * @param out_opt_subpass_id_ptr If not null, deref will be set to the ID of the subpass the pipeline - * has been created for. - * - * @return true if successful, false otherwise. - **/ - bool get_graphics_pipeline_properties(Anvil::GraphicsPipelineID in_graphics_pipeline_id, - uint32_t* out_opt_n_scissors_ptr, - uint32_t* out_opt_n_viewports_ptr, - uint32_t* out_opt_n_vertex_input_attributes_ptr, - uint32_t* out_opt_n_vertex_input_bindings_ptr, - std::shared_ptr* out_opt_renderpass_ptr, - SubPassID* out_opt_subpass_id_ptr) const; - - /** Returns a VkPipeline instance for the specified graphics pipeline ID. If the pipeline is marked as dirty, - * the Vulkan object will be created before returning after former instance is released. - * - * @param in_pipeline_id ID of the pipeline to return the VkPipeline instance for. Must not describe - * a proxy pipeline. - * - * @return As per description. - **/ - VkPipeline get_graphics_pipeline(GraphicsPipelineID in_pipeline_id); - - /** Retrieves a PipelineLayout instance associated with the specified pipeline ID. - * - * The function will bake a pipeline object (and, possibly, a pipeline layout object, too) if - * the specified pipeline is marked as dirty. - * - * @param in_pipeline_id ID of the pipeline to return the wrapper instance for. - * Must not describe a proxy pipeline. - * - * @return Ptr to a PipelineLayout instance or nullptr if the function failed. - **/ - std::shared_ptr get_graphics_pipeline_layout(GraphicsPipelineID in_pipeline_id); - - /** Tells what primitive topology the specified graphics pipeline has been created for. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_primitive_topology_ptr If not null, deref will be set to primitive topology, as specified - * at creation time. - * - * @return true if successful, false otherwise. - **/ - bool get_input_assembly_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkPrimitiveTopology* out_opt_primitive_topology_ptr) const; - - /** Retrieves logic op-related state configuration for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not null, deref will be set to true if the logic op has - * been enabled for the pipeline, or to false otherwise. - * @param out_opt_logic_op_ptr If not null, deref will be set to the logic op enum, specified - * at creation time. - * - * @return true if successful, false otherwise. - **/ - bool get_logic_op_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - VkLogicOp* out_opt_logic_op_ptr) const; - - /** Retrieves multisampling-related state configuration for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_sample_count_ptr If not null, deref will be set to the enum value telling - * the sample count assigned to the pipeline. - * @param out_opt_sample_mask_ptr If not null, deref will be set to the sample mask assigned - * to the pipeline. - * - * @return true if successful, false otherwise. - **/ - bool get_multisampling_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkSampleCountFlags* out_opt_sample_count_ptr, - VkSampleMask* out_opt_sample_mask_ptr) const; - - /** Tells whether the graphics pipeline has been created with enabled primitive restart mode. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not NULL, deref will be set to true if primitive restart has - * been enabled for the pipeline, or to false otherwise. - * - * @return true if successful, false otherwise. - **/ - bool get_primitive_restart_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const; - - /** Tells what rasterization order the graphics pipeline has been created for. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_rasterization_order_ptr If not null, deref will be set to the rasterization order - * assigned to the pipeline. - * - * @return true if successful, false otherwise. - **/ - bool get_rasterization_order(GraphicsPipelineID in_graphics_pipeline_id, - VkRasterizationOrderAMD* out_opt_rasterization_order_ptr) const; - - /** Tells whether the graphics pipeline has been created with enabled rasterizer discard mode. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not NULL, deref will be set to true if the mode has been - * enabled for the pipeline, or to false otherwise. - * - * @return true if successful, false otherwise. - **/ - bool get_rasterizer_discard_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const; - - /** Retrieves various properties of the specified graphics pipeline which are related to - * rasterization. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_polygon_mode_ptr If not null, deref will be set to polygon mode used by the - * pipeline. - * @param out_opt_cull_mode_ptr If not null, deref will be set to cull mode used by the - * pipeline. - * @param out_opt_front_face_ptr If not null, deref will be set to front face setting, used - * by the pipeline. - * @param out_opt_line_width_ptr If not null, deref will be set to line width setting, as used - * by the pipeline. - * - * @return true if successful, false otherwise. - **/ - bool get_rasterization_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkPolygonMode* out_opt_polygon_mode_ptr, - VkCullModeFlags* out_opt_cull_mode_ptr, - VkFrontFace* out_opt_front_face_ptr, - float* out_opt_line_width_ptr) const; - - /** Retrieves state configuration related to per-sample shading for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_is_enabled_ptr If not null, deref will be set to true if per-sample shading - * is enabled for the pipeline. - * @param out_opt_min_sample_shading_ptr If not null, deref will be set to the minimum sample shading value, - * as specified at creation time. - * - * @return true if successful, false otherwise. - **/ - bool get_sample_shading_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - float* out_opt_min_sample_shading_ptr) const; - - /** Retrieves properties of a scissor box at a given index for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param in_n_scissor_box Index of the scissor box to retrieve data of. - * @param out_opt_x_ptr If not null, deref will be set to X offset of the scissor box. - * @param out_opt_y_ptr If not null, deref will be set to Y offset of the scissor box. - * @param out_opt_width_ptr If not null, deref will be set to width of the scissor box. - * @param out_opt_height_ptr If not null, deref will be set to height of the scissor box. - * - * @return true if successful, false otherwise. - **/ - bool get_scissor_box_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_scissor_box, - int32_t* out_opt_x_ptr, - int32_t* out_opt_y_ptr, - uint32_t* out_opt_width_ptr, - uint32_t* out_opt_height_ptr) const; - - /** Retrieves stencil test-related state configuration for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being - * made for. - * @param out_opt_is_enabled_ptr If not null, deref will be set to true if stencil test - * has been enabled for the pipeline. - * @param out_opt_front_stencil_fail_op_ptr If not null, deref will be set to "front stencil fail operation" - * setting, as specified at creation time. - * @param out_opt_front_stencil_pass_op_ptr If not null, deref will be set to "front stencil pass operation" - * setting, as specified at creation time. - * @param out_opt_front_stencil_depth_fail_op_ptr If not null, deref will be set to "front stencil depth fail operation" - * setting, as specified at creation time. - * @param out_opt_front_stencil_compare_op_ptr If not null, deref will be set to "front stencil compare operation" - * setting, as specified at creation time. - * @param out_opt_front_stencil_compare_mask_ptr If not null, deref will be set to front stencil compare mask, as - * specified at creation time. - * @param out_opt_front_stencil_write_mask_ptr If not null, deref will be set to front stencil write mask, as - * specified at creation time. - * @param out_opt_front_stencil_reference_ptr If not null, deref will be set to front stencil reference value, as - * specified at creation time. - * @param out_opt_back_stencil_fail_op_ptr If not null, deref will be set to "back stencil fail operation" - * setting, as specified at creation time. - * @param out_opt_back_stencil_pass_op_ptr If not null, deref will be set to "back stencil pass operation" - * setting, as specified at creation time. - * @param out_opt_back_stencil_depth_fail_op_ptr If not null, deref will be set to "back stencil depth fail operation" - * setting, as specified at creation time. - * @param out_opt_back_stencil_compare_op_ptr If not null, deref will be set to "back stencil compare operation" - * setting, as specified at creation time. - * @param out_opt_back_stencil_compare_mask_ptr If not null, deref will be set to back stencil compare mask, as - * specified at creation time. - * @param out_opt_back_stencil_write_mask_ptr If not null, deref will be set to back stencil write mask, as - * specified at creation time. - * @param out_opt_back_stencil_reference_ptr If not null, deref will be set to back stencil reference value, as - * specified at creation time. - * - * @return true if successful, false otherwise. - **/ - bool get_stencil_test_properties(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - VkStencilOp* out_opt_front_stencil_fail_op_ptr, - VkStencilOp* out_opt_front_stencil_pass_op_ptr, - VkStencilOp* out_opt_front_stencil_depth_fail_op_ptr, - VkCompareOp* out_opt_front_stencil_compare_op_ptr, - uint32_t* out_opt_front_stencil_compare_mask_ptr, - uint32_t* out_opt_front_stencil_write_mask_ptr, - uint32_t* out_opt_front_stencil_reference_ptr, - VkStencilOp* out_opt_back_stencil_fail_op_ptr, - VkStencilOp* out_opt_back_stencil_pass_op_ptr, - VkStencilOp* out_opt_back_stencil_depth_fail_op_ptr, - VkCompareOp* out_opt_back_stencil_compare_op_ptr, - uint32_t* out_opt_back_stencil_compare_mask_ptr, - uint32_t* out_opt_back_stencil_write_mask_ptr, - uint32_t* out_opt_back_stencil_reference_ptr) const; - - /** Tells the number of patch control points, as specified at creation time for the specified - * graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param out_opt_n_patch_control_points_ptr If not null, deref will be set to the number of patch control points, - * as specified at creation time. - * - * @return true if successful, false otherwise. - **/ - bool get_tessellation_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t* out_opt_n_patch_control_points_ptr) const; - - /** Returns properties of a vertex attribute at a given index for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param in_n_vertex_input_attribute Index of the vertex attribute to retrieve info of. - * @param out_opt_location_ptr If not null, deref will be set to the specified attribute's location. - * @param out_opt_binding_ptr If not null, deref will be set to the specified attribute's binding index. - * @param out_opt_format_ptr If not null, deref will be set to the specified attribute's format. - * @param out_opt_offset_ptr If not null, deref will be set to the specified attribute's start offset. - * - * @return true if successful, false otherwise. - **/ - bool get_vertex_input_attribute_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_vertex_input_attribute, - uint32_t* out_opt_location_ptr, - uint32_t* out_opt_binding_ptr, - VkFormat* out_opt_format_ptr, - uint32_t* out_opt_offset_ptr) const; - - /** Tells which binding has been assigned to a vertex attribute at location @param input_vertex_attribute_location. - * - * This function may trigger baking of one or more graphics pipelines, if the user-specified graphics pipeline - * is marked as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param in_input_vertex_attribute_location Location of the queried vertex attribute. - * @param out_input_vertex_attribute_binding_ptr Deref will be set to the index of the input vertex binding, - * assigned to the vertex attribute. Must not be null. - * - * @return true if successful, false otherwise. - */ - bool get_vertex_input_binding_for_attribute_location(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_input_vertex_attribute_location, - uint32_t* out_input_vertex_binding_ptr); - - /** Returns properties of a vertex binding at a given index for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param in_n_vertex_input_binding Index of the vertex binding to retrieve info of. - * @param out_opt_binding_ptr If not null, deref will be set to the specified binding's index. - * @param out_opt_stride_ptr If not null, deref will be set to the specified binding's stride. - * @param out_opt_input_rate_ptr If not null, deref will be set to the specified binding's input rate. - * - * @return true if successful, false otherwise. - **/ - bool get_vertex_input_binding_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_vertex_input_binding, - uint32_t* out_opt_binding_ptr, - uint32_t* out_opt_stride_ptr, - VkVertexInputRate* out_opt_input_rate_ptr) const; - - /** Retrieves properties of a viewport at a given index for the specified graphics pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline the query is being made for. - * @param in_n_viewport Index of the viewport to retrieve properties of. - * @param out_opt_origin_x_ptr If not null, deref will be set to the viewport's X origin. - * @param out_opt_origin_y_ptr If not null, deref will be set to the viewport's Y origin. - * @param out_opt_width_ptr If not null, deref will be set to the viewport's width. - * @param out_opt_height_ptr If not null, deref will be set to the viewport's height. - * @param out_opt_min_depth_ptr If not null, deref will be set to the viewport's minimum depth value. - * @param out_opt_max_depth_ptr If not null, deref will be set to the viewport's maximum depth value. - * - * @return true if successful, false otherwise. - **/ - bool get_viewport_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_viewport, - float* out_opt_origin_x_ptr, - float* out_opt_origin_y_ptr, - float* out_opt_width_ptr, - float* out_opt_height_ptr, - float* out_opt_min_depth_ptr, - float* out_opt_max_depth_ptr) const; - - /** Sets a new blend constant for the specified graphics pipeline and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_blend_constant_vec4 4 floats, specifying the constant. Must not be nullptr. - * - **/ - void set_blending_properties(GraphicsPipelineID in_graphics_pipeline_id, - const float* in_blend_constant_vec4); - - /** Updates color blend properties for the specified sub-pass attachment, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_attachment_id ID of the subpass attachment, for which the color blend properties should be applied. - * @param in_blending_enabled true if blending should be enabled for the specified attachment, false otherwise. - * @param in_blend_op_color Blend operation color to use for the attachment. - * @param in_blend_op_alpha Blend operation alpha to use for the attachment. - * @param in_src_color_blend_factor Source blend factor for color components. - * @param in_dst_color_blend_factor Destination blend factor for color components. - * @param in_src_alpha_blend_factor Source blend factor for the alpha component. - * @param in_dst_alpha_blend_factor Destination blend factor for the alpha component. - * @param in_channel_write_mask Component write mask to use for the attachment. - * - **/ - void set_color_blend_attachment_properties(GraphicsPipelineID in_graphics_pipeline_id, - SubPassAttachmentID in_attachment_id, - bool in_blending_enabled, - VkBlendOp in_blend_op_color, - VkBlendOp in_blend_op_alpha, - VkBlendFactor in_src_color_blend_factor, - VkBlendFactor in_dst_color_blend_factor, - VkBlendFactor in_src_alpha_blend_factor, - VkBlendFactor in_dst_alpha_blend_factor, - VkColorComponentFlags in_channel_write_mask); - - /** Updates the number of scissor boxes to be used, when dynamic scissor state is enabled. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_n_dynamic_scissor_boxes As per description. - */ - void set_dynamic_scissor_state_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_dynamic_scissor_boxes); - - /** Updates the number of viewports to be used, when dynamic viewport state is enabled. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_n_dynamic_viewports As per description. - */ - void set_dynamic_viewport_state_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_dynamic_viewports); - - /** Sets the primitive topology to be used for the pipeline, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_primitive_topology As per description. - */ - void set_input_assembly_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkPrimitiveTopology in_primitive_topology); - - /** Sets a number of multisampling properties to be used for the pipeline, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_sample_count Number of rasterization samples to be used (expressed as one of the enum values). - * @param in_min_sample_shading Minimum number of unique samples to shade for each fragment. - * @param in_sample_mask Sample mask to use. - */ - void set_multisampling_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkSampleCountFlagBits in_sample_count, - float in_min_sample_shading, - const VkSampleMask in_sample_mask); - - /** Sets a call-back, which GFX pipeline manager will invoke right after a vkCreateGraphicsPipeline() call is made. - * - * The call-back will be issued right before the vkCreateGraphicsPipelines() call is issued. - * - * This call overwrites any previous set_pipeline_post_bake_callback() calls. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to set the call-back for. - * @param in_pipeline_post_bake_function Function to call back. Must not be nullptr. - * - * @return true if successful, false otherwise. - **/ - bool set_pipeline_post_bake_callback(GraphicsPipelineID in_graphics_pipeline_id, - PipelinePostBakeFunction in_pipeline_post_bake_function); - - /** Sets a call-back, which GFX pipeline manager will invoke right before a vkCreateGraphicsPipeline() call is made. - * This allows the callee to adjust the VkGraphicsPipelineCreateInfo instance, eg. by modifying pNext to user-defined - * chain of structures. - * - * The call-back will be issued right before the vkCreateGraphicsPipelines() call is issued. - * - * This call overwrites any previous set_pipeline_pre_bake_callback() calls. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to set the call-back for. - * @param in_pfn_pipeline_pre_bake_proc Function to call back. Must not be nullptr. - * @param in_user_arg User argument to pass with the call-back. May be nullptr. - * - * @return true if successful, false otherwise. - **/ - bool set_pipeline_pre_bake_callback(GraphicsPipelineID in_graphics_pipeline_id, - PipelinePreBakeFunction in_pfn_pipeline_pre_bake_function); - - /** Copies graphics state from @param in_source_pipeline_id to @param in_target_pipeline_id, leaving - * the originally assigned renderpass, as well as the subpass ID, unaffected. - * - * @param in_target_pipeline_id ID of a graphics pipeline, to which state should be copied. - * @param in_source_pipeline_id ID of a graphics pipeline, from which state should be copied. - * - * @return true if successful, false otherwise. - **/ - bool set_pipeline_state_from_pipeline(GraphicsPipelineID in_target_pipeline_id, - GraphicsPipelineID in_source_pipeline_id); - - /** Configures the rasterization order for the pipeline if the VK_AMD_rasterization_order extension - * is supported by the device, for which the pipeline has been created. - * - * On drivers which do not support the extension, the call has no effect. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_rasterization_order Rasterization order to use. - **/ - void set_rasterization_order(GraphicsPipelineID in_graphics_pipeline_id, - VkRasterizationOrderAMD in_rasterization_order); - - /** Sets a number of rasterization properties to be used for the pipeline, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_polygon_mode Polygon mode to use. - * @param in_cull_mode Cull mode to use. - * @param in_front_face Front face to use. - * @param in_line_width Line width to use. - **/ - void set_rasterization_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkPolygonMode in_polygon_mode, - VkCullModeFlags in_cull_mode, - VkFrontFace in_front_face, - float in_line_width); - - /** Sets properties of a scissor box at the specified index, and marks the pipeline as dirty. - * - * If @param n_scissor_box is larger than 1, all previous scissor boxes must also be defined - * prior to baking. Number of scissor boxes must match the number of viewports defined for the - * pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_n_scissor_box Index of the scissor box to be updated. - * @param in_x X offset of the scissor box. - * @param in_y Y offset of the scissor box. - * @param in_width Width of the scissor box. - * @param in_height Height of the scissor box. - **/ - void set_scissor_box_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_scissor_box, - int32_t in_x, - int32_t in_y, - uint32_t in_width, - uint32_t in_height); - - /** Sets a number of stencil test properties to be used for the pipeline, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_update_front_face_state true if the front face stencil states should be updated; false to update the - * back stencil states instead. - * @param in_stencil_fail_op Stencil fail operation to use. - * @param in_stencil_pass_op Stencil pass operation to use. - * @param in_stencil_depth_fail_op Stencil depth fail operation to use. - * @param in_stencil_compare_op Stencil compare operation to use. - * @param in_stencil_compare_mask Stencil compare mask to use. - * @param in_stencil_write_mask Stencil write mask to use. - * @param in_stencil_reference Stencil reference value to use. - **/ - void set_stencil_test_properties(GraphicsPipelineID in_graphics_pipeline_id, - bool in_update_front_face_state, - VkStencilOp in_stencil_fail_op, - VkStencilOp in_stencil_pass_op, - VkStencilOp in_stencil_depth_fail_op, - VkCompareOp in_stencil_compare_op, - uint32_t in_stencil_compare_mask, - uint32_t in_stencil_write_mask, - uint32_t in_stencil_reference); - - /** Updates the number of tessellation patch points to be used for the pipeline, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_n_patch_control_points As per description. - **/ - void set_tessellation_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_patch_control_points); - - /** Sets properties of a viewport at the specified index, and marks the pipeline as dirty. - * - * If @param in_n_viewport is larger than 1, all previous viewports must also be defined - * prior to baking. Number of scissor boxes must match the number of viewports defined for the - * pipeline. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_n_viewport Index of the viewport, whose properties should be changed. - * @param in_origin_x X offset to use for the viewport's origin. - * @param in_origin_y Y offset to use for the viewport's origin. - * @param in_width Width of the viewport. - * @param in_height Height of the viewport. - * @param in_min_depth Minimum depth value to use for the viewport. - * @param in_max_depth Maximum depth value to use for the viewport. - **/ - void set_viewport_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_viewport, - float in_origin_x, - float in_origin_y, - float in_width, - float in_height, - float in_min_depth, - float in_max_depth); - - /** Enables or disables the "alpha to coverage" test, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the test; false to disable it. - */ - void toggle_alpha_to_coverage(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable); - - /** Enables or disables the "alpha to one" test, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the test; false to disable it. - */ - void toggle_alpha_to_one(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable); - - /** Enables or disables the "depth bias" mode, updates related state values, and marks - * the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the mode; false to disable it. - * @param in_depth_bias_constant_factor Depth bias constant factor to use for the mode. - * @param in_depth_bias_clamp Depth bias clamp to use for the mode. - * @param in_depth_bias_slope_factor Slope scale for the depth bias to use for the mode. - */ - void toggle_depth_bias(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable, - float in_depth_bias_constant_factor, - float in_depth_bias_clamp, - float in_depth_bias_slope_factor); - - /** Enables or disables the "depth bounds" test, updates related state values, and marks - * the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the test; false to disable it. - * @param in_min_depth_bounds Minimum boundary value to use for the test. - * @param in_max_depth_bounds Maximum boundary value to use for the test. - */ - void toggle_depth_bounds_test(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable, - float in_min_depth_bounds, - float in_max_depth_bounds); - - /** Enables or disables the "depth clamp" test, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the test; false to disable it. - */ - void toggle_depth_clamp(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable); - - /** Enables or disables the depth test, updates related state values, and marks the - * pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the mode; false to disable it. - * @param in_compare_op Compare operation to use for the test. - */ - void toggle_depth_test(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable, - VkCompareOp in_compare_op); - - /** Enables or disables depth writes, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the writes; false to disable them. - */ - void toggle_depth_writes(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable); - - /** Enables or disables the specified dynamic states, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the dynamic state(s) specified by @param in_dynamic_state_bits, false - * disable them if already enabled. - * @param in_dynamic_state_bits An integer whose individual bits correspond to dynamic states which should either - * be enabled or disabled. - **/ - void toggle_dynamic_states (GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable, - DynamicStateBitfield in_dynamic_state_bits); - - /** Enables or disables logic ops, specifies which logic op should be used, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the mode; false to disable it. - * @param in_logic_op Logic operation type to use. - */ - void toggle_logic_op(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable, - VkLogicOp in_logic_op); - - /** Enables or disables the "primitive restart" mode, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the mode; false to disable it. - */ - void toggle_primitive_restart(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable); - - /** Enables or disables the "rasterizer discard" mode, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the test; false to disable it. - */ - void toggle_rasterizer_discard(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable); - - /** Enables or disables the sample mask, and marks the pipeline as dirty. - * - * Note: make sure to configure the sample mask using set_multisampling_properties() if you intend to use it. - * - * Note: disabling sample mask will make the manager set VkPipelineMultisampleStateCreateInfo::pSampleMask to a non-null - * value at pipeline creation time. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable sample mask; false to disable it. - */ - void toggle_sample_mask(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable); - - /** Enables or disables the "per-sample shading" mode, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the test; false to disable it. - */ - void toggle_sample_shading(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable); - - /** Enables or disables the stencil test test, and marks the pipeline as dirty. - * - * @param in_graphics_pipeline_id ID of the graphics pipeline to update. The ID must come from a preceding add_() - * call, issued against the same GraphicsPipelineManager instance. - * @param in_should_enable true to enable the test; false to disable it. - */ - void toggle_stencil_test(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable); - private: /* Private type declarations */ + typedef std::map AttributeLocationToBindingIndexMap; - /* Defines blending properties for a single subpass attachment. - * - * This descriptor is not exposed to the Vulkan implementation. It is used to form Vulkan-specific - * descriptors at baking time instead. - */ - typedef struct BlendingProperties - { - VkColorComponentFlagsVariable(channel_write_mask); - - bool blend_enabled; - VkBlendOp blend_op_alpha; - VkBlendOp blend_op_color; - VkBlendFactor dst_alpha_blend_factor; - VkBlendFactor dst_color_blend_factor; - VkBlendFactor src_alpha_blend_factor; - VkBlendFactor src_color_blend_factor; - - /** Constructor. */ - BlendingProperties() - { - blend_enabled = false; - blend_op_alpha = VK_BLEND_OP_ADD; - blend_op_color = VK_BLEND_OP_ADD; - channel_write_mask = VK_COLOR_COMPONENT_A_BIT | - VK_COLOR_COMPONENT_B_BIT | - VK_COLOR_COMPONENT_G_BIT | - VK_COLOR_COMPONENT_R_BIT; - dst_alpha_blend_factor = VK_BLEND_FACTOR_ONE; - dst_color_blend_factor = VK_BLEND_FACTOR_ONE; - src_alpha_blend_factor = VK_BLEND_FACTOR_ONE; - src_color_blend_factor = VK_BLEND_FACTOR_ONE; - } - - /** Forms a VkPipelineColorBlendAttachmentState instance from the stored state values. */ - VkPipelineColorBlendAttachmentState get_vk_descriptor() const - { - VkPipelineColorBlendAttachmentState result; - - result.alphaBlendOp = blend_op_alpha; - result.blendEnable = static_cast((blend_enabled) ? VK_TRUE : VK_FALSE); - result.colorBlendOp = blend_op_color; - result.colorWriteMask = channel_write_mask; - result.dstAlphaBlendFactor = dst_alpha_blend_factor; - result.dstColorBlendFactor = dst_color_blend_factor; - result.srcAlphaBlendFactor = src_alpha_blend_factor; - result.srcColorBlendFactor = src_color_blend_factor; - - return result; - } - - /** Comparison operator - * - * @param in Object to compare against. - * - * @return true if all states match, false otherwise. - **/ - bool operator==(const BlendingProperties& in) const - { - return (in.blend_enabled == blend_enabled && - in.blend_op_alpha == blend_op_alpha && - in.blend_op_color == blend_op_color && - in.channel_write_mask == channel_write_mask && - in.dst_alpha_blend_factor == dst_alpha_blend_factor && - in.dst_color_blend_factor == dst_color_blend_factor && - in.src_alpha_blend_factor == src_alpha_blend_factor && - in.src_color_blend_factor == src_color_blend_factor); - } - } BlendingProperties; - - typedef std::map SubPassAttachmentToBlendingPropertiesMap; - - typedef enum - { - GRAPHICS_SHADER_STAGE_FRAGMENT, - GRAPHICS_SHADER_STAGE_GEOMETRY, - GRAPHICS_SHADER_STAGE_TESSELLATION_CONTROL, - GRAPHICS_SHADER_STAGE_TESSELLATION_EVALUATION, - GRAPHICS_SHADER_STAGE_VERTEX, - - /* Always last */ - GRAPHICS_SHADER_STAGE_COUNT - } GraphicsShaderStage; - - /** Defines a single scissor box - * - * This descriptor is not exposed to the Vulkan implementation. It is used to form Vulkan-specific - * descriptors at baking time instead. - */ - struct InternalScissorBox - { - int32_t x; - int32_t y; - uint32_t width; - uint32_t height; - - /* Constructor. Should only be used by STL. */ - InternalScissorBox() - { - x = 0; - y = 0; - width = 32u; - height = 32u; - } - - /* Constructor - * - * @param in_x X offset of the scissor box - * @param in_y Y offset of the scissor box - * @param in_width Width of the scissor box - * @param in_height Height of the scissor box - **/ - InternalScissorBox(int32_t in_x, - int32_t in_y, - uint32_t in_width, - uint32_t in_height) - { - height = in_height; - width = in_width; - x = in_x; - y = in_y; - } - - /* Converts the internal descriptor to the Vulkan equivalent structure */ - VkRect2D get_vk_descriptor() const - { - VkRect2D result; - - result.extent.height = height; - result.extent.width = width; - result.offset.x = x; - result.offset.y = y; - - return result; - } - }; - - /** Defines a single viewport - * - * This descriptor is not exposed to the Vulkan implementation. It is used to form Vulkan-specific - * descriptors at baking time instead. - */ - typedef struct InternalViewport - { - float height; - float max_depth; - float min_depth; - float origin_x; - float origin_y; - float width; - - /* Constructor. Should only be used by STL */ - InternalViewport() - { - height = 32.0f; - max_depth = 1.0f; - min_depth = 0.0f; - origin_x = 0.0f; - origin_y = 0.0f; - width = 32.0f; - } - - /* Constructor. - * - * @param in_origin_x Origin X of the viewport. - * @param in_origin_y Origin Y of the viewport. - * @param in_width Width of the viewport. - * @param in_height Height of the viewport. - * @param in_min_depth Minimum depth value the viewport should use. - * @param in_max_depth Maximum depth value the viewport should use. - **/ - InternalViewport(float in_origin_x, - float in_origin_y, - float in_width, - float in_height, - float in_min_depth, - float in_max_depth) - { - height = in_height; - max_depth = in_max_depth; - min_depth = in_min_depth; - origin_x = in_origin_x; - origin_y = in_origin_y; - width = in_width; - } - - /* Returns a Vulkan viewport descriptor, using this object's properties. */ - VkViewport get_vk_descriptor() const - { - VkViewport result; - - result.height = height; - result.maxDepth = max_depth; - result.minDepth = min_depth; - result.x = origin_x; - result.y = origin_y; - result.width = width; - - return result; - } - } InternalViewport; - - /** A vertex attribute descriptor. This descriptor is not exposed to the Vulkan implementation. Instead, - * its members are used to create Vulkan input attribute & binding descriptors at baking time. - */ - typedef struct InternalVertexAttribute + typedef struct VertexInputBinding { - uint32_t explicit_binding_index; - VkFormat format; - uint32_t location; - uint32_t offset_in_bytes; - VkVertexInputRate rate; - uint32_t stride_in_bytes; + uint32_t binding; + uint32_t divisor; + Anvil::VertexInputRate input_rate; + uint32_t stride; - /** Dummy constructor. Should only be used by STL. */ - InternalVertexAttribute() + VertexInputBinding() { - explicit_binding_index = UINT32_MAX; - format = VK_FORMAT_UNDEFINED; - location = UINT32_MAX; - offset_in_bytes = UINT32_MAX; - rate = VK_VERTEX_INPUT_RATE_MAX_ENUM; - stride_in_bytes = UINT32_MAX; + binding = 0; + divisor = 0; + input_rate = Anvil::VertexInputRate::UNKNOWN; + stride = 0; } - /** Constructor. - * - * @param in_format Attribute format. - * @param in_location Attribute location. - * @param in_offset_in_bytes Start offset in bytes. - * @param in_rate Step rate. - * @param in_stride_in_bytes Stride in bytes. - **/ - InternalVertexAttribute(uint32_t in_explicit_binding_index, - VkFormat in_format, - uint32_t in_location, - uint32_t in_offset_in_bytes, - VkVertexInputRate in_rate, - uint32_t in_stride_in_bytes) + VertexInputBinding(const VkVertexInputBindingDescription& in_binding_vk, + const uint32_t& in_divisor) { - explicit_binding_index = in_explicit_binding_index; - format = in_format; - location = in_location; - offset_in_bytes = in_offset_in_bytes; - rate = in_rate; - stride_in_bytes = in_stride_in_bytes; + binding = in_binding_vk.binding; + divisor = in_divisor; + input_rate = static_cast(in_binding_vk.inputRate); + stride = in_binding_vk.stride; } - } InternalVertexAttribute; - - typedef std::map AttributeLocationToBindingIndexMap; - typedef std::map InternalScissorBoxes; - typedef std::vector InternalVertexAttributes; - typedef std::map InternalViewports; + } VertexInputBinding; - /** Descriptor which encapsulates all state of a single graphics pipeline. - * - * This descriptor is not exposed to the Vulkan implementation. It is used to form Vulkan-specific - * descriptors at baking time instead. - * - * A single unique graphics pipeline descriptor should be assigned to each subpass. - */ - struct GraphicsPipelineConfiguration + typedef struct GraphicsPipelineData { - AttributeLocationToBindingIndexMap attribute_location_to_binding_index_map; + const Anvil::GraphicsPipelineCreateInfo* pipeline_create_info_ptr; std::vector vk_input_attributes; - std::vector vk_input_bindings; - - bool depth_bounds_test_enabled; - float max_depth_bounds; - float min_depth_bounds; - - bool depth_bias_enabled; - float depth_bias_clamp; - float depth_bias_constant_factor; - float depth_bias_slope_factor; - - bool depth_test_enabled; - VkCompareOp depth_test_compare_op; - - DynamicStateBitfield enabled_dynamic_states; - - bool alpha_to_coverage_enabled; - bool alpha_to_one_enabled; - bool depth_clamp_enabled; - bool depth_writes_enabled; - bool logic_op_enabled; - bool primitive_restart_enabled; - bool rasterizer_discard_enabled; - bool sample_mask_enabled; - bool sample_shading_enabled; - - bool stencil_test_enabled; - VkStencilOpState stencil_state_back_face; - VkStencilOpState stencil_state_front_face; - - VkRasterizationOrderAMD rasterization_order; - - InternalVertexAttributes attributes; - float blend_constant[4]; - VkPolygonMode polygon_mode; - VkFrontFace front_face; - float line_width; - VkLogicOp logic_op; - float min_sample_shading; - uint32_t n_dynamic_scissor_boxes; - uint32_t n_dynamic_viewports; - uint32_t n_patch_control_points; - VkPrimitiveTopology primitive_topology; - VkSampleMask sample_mask; - InternalScissorBoxes scissor_boxes; - SubPassAttachmentToBlendingPropertiesMap subpass_attachment_blending_properties; - InternalViewports viewports; - - VkCullModeFlagsVariable (cull_mode); - VkSampleCountFlagsVariable(sample_count); - - PipelinePostBakeFunction post_bake_function; - PipelinePreBakeFunction pre_bake_function; - - std::shared_ptr renderpass_ptr; - SubPassID subpass_id; - - /** Constructor. - * - * @param in_renderpass_ptr RenderPass instance the graphics pipeline will be used for. - * The instance is retained. May be nullptr for proxy pipelines, - * must not be nullptr for all other pipeline types. - * @param in_subpass_id ID of the subpass the graphics pipeline descriptor is created - * for. - **/ - explicit GraphicsPipelineConfiguration(std::shared_ptr in_renderpass_ptr, - SubPassID in_subpass_id) - { - alpha_to_coverage_enabled = false; - alpha_to_one_enabled = false; - depth_bias_clamp = 0.0f; - depth_bias_constant_factor = 0.0f; - depth_bias_enabled = false; - depth_bias_slope_factor = 1.0f; - depth_bounds_test_enabled = false; - depth_clamp_enabled = false; - depth_test_compare_op = VK_COMPARE_OP_ALWAYS; - depth_test_enabled = false; - depth_writes_enabled = false; - enabled_dynamic_states = 0; - front_face = VK_FRONT_FACE_COUNTER_CLOCKWISE; - logic_op = VK_LOGIC_OP_NO_OP; - logic_op_enabled = false; - max_depth_bounds = 1.0f; - min_depth_bounds = 0.0f; - n_dynamic_scissor_boxes = 0; - n_dynamic_viewports = 0; - n_patch_control_points = 0; - primitive_restart_enabled = false; - rasterizer_discard_enabled = false; - sample_mask_enabled = false; - sample_shading_enabled = false; - stencil_test_enabled = false; - - renderpass_ptr = in_renderpass_ptr; - subpass_id = in_subpass_id; - - stencil_state_back_face.compareMask = ~0u; - stencil_state_back_face.compareOp = VK_COMPARE_OP_ALWAYS; - stencil_state_back_face.depthFailOp = VK_STENCIL_OP_KEEP; - stencil_state_back_face.failOp = VK_STENCIL_OP_KEEP; - stencil_state_back_face.passOp = VK_STENCIL_OP_KEEP; - stencil_state_back_face.reference = 0; - stencil_state_back_face.writeMask = ~0u; - stencil_state_front_face = stencil_state_back_face; - rasterization_order = VK_RASTERIZATION_ORDER_STRICT_AMD; - - memset(blend_constant, - 0, - sizeof(blend_constant) ); - - cull_mode = VK_CULL_MODE_BACK_BIT; - line_width = 1.0f; - min_sample_shading = 1.0f; - sample_count = VK_SAMPLE_COUNT_1_BIT; - polygon_mode = VK_POLYGON_MODE_FILL; - primitive_topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; - sample_mask = ~0u; - } - - /** Constructor. Copies all state from the specified base pipeline configuration, replacing - * the renderpass instance & subpass ID with the argument values. - * - * @param in_base_pipeline_config_ptr As per description. Must not be nullptr. - * @param in_renderpass_ptr RenderPass instance the graphics pipeline will be used for. - * The instance is retained. May be nullptr for proxy pipelines. - * Must not be nullptr for all other pipeline types. - * @param in_subpass_id ID of the subpass the graphics pipeline descriptor is created - * for. - **/ - explicit GraphicsPipelineConfiguration(std::shared_ptr in_base_pipeline_config_ptr, - std::shared_ptr in_renderpass_ptr, - SubPassID in_subpass_id) - { - /* Copy all state from the base config.. */ - *this = *in_base_pipeline_config_ptr; - - /* Override the renderpass & the subpass with the data we've been provided */ - renderpass_ptr = in_renderpass_ptr; - subpass_id = in_subpass_id; - } - - /** Destructor. */ - ~GraphicsPipelineConfiguration() - { - /* Stub */ - } + /* NOTE: VkVertexInputBindingDescription does not include divisor information so we need to maintain a custom + * set of structs as well. + */ + std::vector input_bindings; + std::vector vk_input_bindings; - /** Copy assignment operator. */ - GraphicsPipelineConfiguration& operator=(const GraphicsPipelineConfiguration& in) + explicit GraphicsPipelineData(const Anvil::GraphicsPipelineCreateInfo* in_pipeline_create_info_ptr) { - /* NOTE: We leave window, renderpass & subpass ID intact. */ - attribute_location_to_binding_index_map = in.attribute_location_to_binding_index_map; - vk_input_attributes = in.vk_input_attributes; - vk_input_bindings = in.vk_input_bindings; + pipeline_create_info_ptr = in_pipeline_create_info_ptr; - depth_bounds_test_enabled = in.depth_bounds_test_enabled; - max_depth_bounds = in.max_depth_bounds; - min_depth_bounds = in.min_depth_bounds; - - depth_bias_enabled = in.depth_bias_enabled; - depth_bias_clamp = in.depth_bias_clamp; - depth_bias_constant_factor = in.depth_bias_constant_factor; - depth_bias_slope_factor = in.depth_bias_slope_factor; - - depth_test_enabled = in.depth_test_enabled; - depth_test_compare_op = in.depth_test_compare_op; - - enabled_dynamic_states = in.enabled_dynamic_states; - - alpha_to_coverage_enabled = in.alpha_to_coverage_enabled; - alpha_to_one_enabled = in.alpha_to_one_enabled; - depth_clamp_enabled = in.depth_clamp_enabled; - depth_writes_enabled = in.depth_writes_enabled; - logic_op_enabled = in.logic_op_enabled; - primitive_restart_enabled = in.primitive_restart_enabled; - rasterizer_discard_enabled = in.rasterizer_discard_enabled; - sample_shading_enabled = in.sample_shading_enabled; - - stencil_test_enabled = in.stencil_test_enabled; - stencil_state_back_face = in.stencil_state_back_face; - stencil_state_front_face = in.stencil_state_front_face; - - rasterization_order = in.rasterization_order; - - attributes = in.attributes; - cull_mode = in.cull_mode; - front_face = in.front_face; - line_width = in.line_width; - logic_op = in.logic_op; - min_sample_shading = in.min_sample_shading; - n_dynamic_scissor_boxes = in.n_dynamic_scissor_boxes; - n_dynamic_viewports = in.n_dynamic_viewports; - n_patch_control_points = in.n_patch_control_points; - polygon_mode = in.polygon_mode; - primitive_topology = in.primitive_topology; - sample_count = in.sample_count; - sample_mask = in.sample_mask; - scissor_boxes = in.scissor_boxes; - subpass_attachment_blending_properties = in.subpass_attachment_blending_properties; - viewports = in.viewports; - - post_bake_function = in.post_bake_function; - pre_bake_function = in.pre_bake_function; - - memcpy(blend_constant, - in.blend_constant, - sizeof(blend_constant) ); - - return *this; + bake_vk_attributes_and_bindings(); } - }; + private: + void bake_vk_attributes_and_bindings(); + } GraphicsPipelineData; - typedef std::map > GraphicsPipelineConfigurations; + typedef std::map > GraphicsPipelineDataMap; /* Private functions */ - explicit GraphicsPipelineManager(std::weak_ptr in_device_ptr, - bool in_use_pipeline_cache, - std::shared_ptr in_pipeline_cache_to_reuse_ptr); - - void bake_vk_attributes_and_bindings(std::shared_ptr inout_pipeline_config_ptr); + explicit GraphicsPipelineManager(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + bool in_use_pipeline_cache, + Anvil::PipelineCache* in_pipeline_cache_to_reuse_ptr); + + Anvil::StructChainUniquePtr bake_graphics_pipeline_create_info (const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr, + const Anvil::PipelineLayout* in_pipeline_layout_ptr, + const VkPipeline& in_opt_base_pipeline_handle, + const int32_t& in_opt_base_pipeline_index, + const VkPipelineColorBlendStateCreateInfo* in_opt_color_blend_state_create_info_ptr, + const VkPipelineDepthStencilStateCreateInfo* in_opt_depth_stencil_state_create_info_ptr, + const VkPipelineDynamicStateCreateInfo* in_opt_dynamic_state_create_info_ptr, + const VkPipelineInputAssemblyStateCreateInfo* in_input_assembly_state_create_info_ptr, + const VkPipelineMultisampleStateCreateInfo* in_opt_multisample_state_create_info_ptr, + const VkPipelineRasterizationStateCreateInfo* in_rasterization_state_create_info_ptr, + const uint32_t& in_n_shader_stage_create_info_items, + const VkPipelineShaderStageCreateInfo* in_shader_stage_create_info_items_ptr, + const VkPipelineTessellationStateCreateInfo* in_opt_tessellation_state_create_info_ptr, + const VkPipelineVertexInputStateCreateInfo* in_vertex_input_state_create_info_ptr, + const VkPipelineViewportStateCreateInfo* in_opt_viewport_state_create_info_ptr) const; + Anvil::StructChainUniquePtr bake_pipeline_color_blend_state_create_info (const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr, + const Anvil::RenderPass* in_current_renderpass_ptr, + const Anvil::SubPassID& in_subpass_id) const; + Anvil::StructChainUniquePtr bake_pipeline_depth_stencil_state_create_info (const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr, + const Anvil::RenderPass* in_current_renderpass_ptr) const; + Anvil::StructChainUniquePtr bake_pipeline_dynamic_state_create_info (const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const; + Anvil::StructChainUniquePtr bake_pipeline_input_assembly_state_create_info (const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const; + Anvil::StructChainUniquePtr bake_pipeline_multisample_state_create_info (const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const; + Anvil::StructChainUniquePtr bake_pipeline_rasterization_state_create_info (const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const; + std::unique_ptr > bake_pipeline_shader_stage_create_info_chain_vector(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const; + Anvil::StructChainUniquePtr bake_pipeline_tessellation_state_create_info (const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const; + Anvil::StructChainUniquePtr bake_pipeline_vertex_input_state_create_info (const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const; + Anvil::StructChainUniquePtr bake_pipeline_viewport_state_create_info (Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr, + const bool& in_is_dynamic_scissor_state_enabled, + const bool& in_is_dynamic_viewport_state_enabled) const; ANVIL_DISABLE_ASSIGNMENT_OPERATOR(GraphicsPipelineManager); ANVIL_DISABLE_COPY_CONSTRUCTOR (GraphicsPipelineManager); - /* Private members */ - GraphicsPipelineConfigurations m_pipeline_configurations; + /* Private variables */ }; }; /* Vulkan namespace */ diff --git a/include/wrappers/image.h b/include/wrappers/image.h index 11489911..e88a49e3 100644 --- a/include/wrappers/image.h +++ b/include/wrappers/image.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -31,8 +31,11 @@ #include "misc/callbacks.h" #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" #include "misc/page_tracker.h" +#include + namespace Anvil { @@ -68,11 +71,13 @@ namespace Anvil /** A wrapper class for a VkImage and the bound VkMemory object. */ class Image : public CallbacksSupportProvider, public DebugMarkerSupportProvider, - public std::enable_shared_from_this + public MTSafetySupportProvider { public: /* Public functions */ + static Anvil::ImageUniquePtr create(Anvil::ImageCreateInfoUniquePtr in_create_info_ptr); + /* Transitions the image from one layout to another. * * This is a blocking call. @@ -94,183 +99,18 @@ namespace Anvil * @param in_opt_set_semaphore_ptrs A raw array of semaphores to set upon finished execution of the image layout transfer command buffer. * May be null if @param in_opt_n_set_semaphores is 0. */ - void change_image_layout(std::shared_ptr in_queue_ptr, - VkAccessFlags in_src_access_mask, - VkImageLayout in_src_layout, - VkAccessFlags in_dst_access_mask, - VkImageLayout in_dst_layout, - const VkImageSubresourceRange& in_subresource_range, - const uint32_t in_opt_n_wait_semaphores = 0, - const VkPipelineStageFlags* in_opt_wait_dst_stage_mask_ptrs = nullptr, - const std::shared_ptr* in_opt_wait_semaphore_ptrs = nullptr, - const uint32_t in_opt_n_set_semaphores = 0, - const std::shared_ptr* in_opt_set_semaphore_ptrs = nullptr); - - /** Initializes a new non-sparse Image instance *without* a memory backing. A memory region should be bound - * to the object by calling Image::set_memory() before using the object for any operations. - * - * The function can also optionally fill the image with data, as soon as memory backing is - * attached. To make it do so, pass a non-null ptr to a MipmapRawData vector via the @param - * mipmaps_ptr argument. - * - * If this constructor is used, the image can be transformed automatically to the right layout - * at set_memory() call time by setting @param in_final_image_layout argument to a value other - * than VK_IMAGE_LAYOUT_UNDEFINED and VK_IMAGE_LAYOUT_PREINITIALIZED. - * - * @param in_device_ptr Device to use. - * @param in_type Vulkan image type to use. - * @param in_format Vulkan format to use. - * @param in_tiling Vulkan image tiling to use. - * @param in_usage Vulkan image usage pattern to use. - * @param in_base_mipmap_width Width of the base mip-map. - * @param in_base_mipmap_height Height of the base mip-map. Must be at least 1 for all image types. - * @param in_base_mipmap_depth Depth of the base mip-map. Must be at least 1 for all image types. - * @param in_n_layers Number of layers to use. Must be at least 1 for all image types. - * @param in_sample_count Sample count to use. - * @param in_queue_families A combination of Anvil::QUEUE_FAMILY_* bits, indicating which device queues - * the image is going to be accessed by. - * @param in_sharing_mode Vulkan sharing mode to use. - * @param in_use_full_mipmap_chain true, if all mipmaps should be created for the image. False to only allocate - * storage for the base mip-map. - * @param in_create_flags Optional image features that the created image should support. - * @param in_post_create_image_layout Image layout to transfer the image to after it is created AND assigned memory - * backing. In order to leave the image intact, set this argument to - * VK_IMAGE_LAYOUT_UNDEFINED if @param in_opt_mipmaps_ptr is NULL, or to - * VK_IMAGE_LAYOUT_PREINITIALIZED if @param in_opt_mipmaps_ptr is not NULL. - * (which depends on what value is passed to @param in_opt_mipmaps_ptr), - * @param in_opt_mipmaps_ptr If not NULL, specified data will be used to initialize created image's mipmaps - * with content, as soon as the image is assigned a memory backing. - * - * @return New image instance, if successful, or nullptr otherwise. - **/ - static std::shared_ptr create_nonsparse(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_sharing_mode, - bool in_use_full_mipmap_chain, - ImageCreateFlags in_flags, - VkImageLayout in_post_create_image_layout, - const std::vector* in_opt_mipmaps_ptr); - - /** Initializes a new non-sparse Image instance, along with a memory backing. - * - * This constructor assumes the image should be initialized in UNDEFINED layout, if no mipmap data - * is specified, or PREINITIALIZED otherwise. In the latter case, it will then proceed with filling - * the storage with mipmap data (if @param in_mipmaps_ptr is not nullptr), and finally transition - * the image to the @param in_post_create_image_layout layout. - * - * @param in_device_ptr Device to use. - * @param in_type Vulkan image type to use. - * @param in_format Vulkan format to use. - * @param in_tiling Vulkan image tiling to use. - * @param in_usage Vulkan image usage pattern to use. - * @param in_base_mipmap_width Width of the base mip-map. - * @param in_base_mipmap_height Height of the base mip-map. Must be at least 1 for all image types. - * @param in_base_mipmap_depth Depth of the base mip-map. Must be at least 1 for all image types. - * @param in_n_layers Number of layers to use. Must be at least 1 for all image types. - * @param in_sample_count Sample count to use. - * @param in_queue_families A combination of Anvil::QUEUE_FAMILY_* bits, indicating which device queues - * the image is going to be accessed by. - * @param in_sharing_mode Vulkan sharing mode to use. - * @param in_use_full_mipmap_chain true if all mipmaps should be created for the image. False to only allocate - * storage for the base mip-map. - * @param in_memory_features Memory features for the memory backing. - * @param in_create_flags Optional image features that the created image should support. - * @param in_post_create_image_layout Layout to transition the new image to. Must not be VK_IMAGE_LAYOUT_UNDEFINED or - * VK_IMAGE_LAYOUT_PREINITIALIZED. - * @param in_opt_mipmaps_ptr If not nullptr, specified MipmapRawData items will be used to drive the mipmap contents - * initialization process. Ignored if nullptr. - * - * @return New image instance, if successful, or nullptr otherwise. - **/ - static std::shared_ptr create_nonsparse(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_sharing_mode, - bool in_use_full_mipmap_chain, - MemoryFeatureFlags in_memory_features, - ImageCreateFlags in_create_flags, - VkImageLayout in_post_create_image_layout, - const std::vector* in_mipmaps_ptr); - - /** Wrapper constructor for existing VkImage instances, as reported for - * swapchain images. Object instantiated with this constructor will NOT - * release the specified VkImage instance. - * - * The image will NOT be transitioned to any specific image layout. - * - * For argument discussion, see specification of the other create() functions. - **/ - static std::shared_ptr create_nonsparse(std::weak_ptr in_device_ptr, - const VkSwapchainCreateInfoKHR& in_swapchain_create_info, - VkImage in_image); - - /** Initializes a new sparse Image instance *without* physical memory backing. Memory region(s) - * should be bound to the object by calling Image::set_memory_block() before using the object - * for any operations. Whether or not all tiles need to be assigned memory blocks prior to accessing - * image contents depends on whether sparse binding or sparse residency has been requested at - * creation time. - * - * If NONALIASED or ALIASED residency is requested and the device does not support requested image - * configuration, a NULL image will be returned. - * - * User must manually configure sparse bindings for the image by using Queue::bind_sparse_memory(), - * before uploading any mip data. The mips can be uploaded using upload_mipmaps(). - * - * @param in_device_ptr Device to use. - * @param in_type Vulkan image type to use. - * @param in_format Vulkan format to use. - * @param in_tiling Vulkan image tiling to use. - * @param in_usage Vulkan image usage pattern to use. - * @param in_base_mipmap_width Width of the base mip-map. Must be at least 1 for all image types. - * @param in_base_mipmap_height Height of the base mip-map. Must be at least 1 for all image types. - * @param in_base_mipmap_depth Depth of the base mip-map. Must be at least 1 for all image types. - * @param in_n_layers Number of layers to use. Must be at least 1 for all image types. - * @param in_sample_count Sample count to use. - * @param in_queue_families A combination of Anvil::QUEUE_FAMILY_* bits, indicating which device queues - * the image is going to be accessed by. - * @param in_sharing_mode Vulkan sharing mode to use. - * @param in_use_full_mipmap_chain true, if all mipmaps should be created for the image. False to make the image - * only use one mip. - * @param in_create_flags Optional image features that the created image should support. - * @param in_sparse_residency_scope Scope of sparse residency to request for the image. - * @param in_initial_layout Initial layout to use for the image. Must either be VK_IMAGE_LAYOUT_UNDEFINED or - * VK_IMAGE_LAYOUT_PREINITIALIZED. - * - * @return New image instance, if successful, or nullptr otherwise. - **/ - static std::shared_ptr create_sparse(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_sharing_mode, - bool in_use_full_mipmap_chain, - ImageCreateFlags in_create_flags, - Anvil::SparseResidencyScope in_residency_scope, - VkImageLayout in_initial_layout = VK_IMAGE_LAYOUT_UNDEFINED); + void change_image_layout(Anvil::Queue* in_queue_ptr, + Anvil::AccessFlags in_src_access_mask, + Anvil::ImageLayout in_src_layout, + Anvil::AccessFlags in_dst_access_mask, + Anvil::ImageLayout in_dst_layout, + const Anvil::ImageSubresourceRange& in_subresource_range, + const uint32_t in_opt_n_wait_semaphores = 0, + const Anvil::PipelineStageFlags* in_opt_wait_dst_stage_mask_ptrs = nullptr, + Anvil::Semaphore* const* in_opt_wait_semaphore_ptrs = nullptr, + const uint32_t in_opt_n_set_semaphores = 0, + Anvil::Semaphore* const* in_opt_set_semaphore_ptrs = nullptr); + /** Destructor */ virtual ~Image(); @@ -282,10 +122,15 @@ namespace Anvil * NOTE: This information is cached at image creation time, so the driver's impl will not be * called. */ - bool get_aspect_subresource_layout(VkImageAspectFlags in_aspect, - uint32_t in_n_layer, - uint32_t in_n_mip, - VkSubresourceLayout* out_subresource_layout_ptr) const; + bool get_aspect_subresource_layout(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_mip, + Anvil::SubresourceLayout* out_subresource_layout_ptr) const; + + const Anvil::ImageCreateInfo* get_create_info_ptr() const + { + return m_create_info_ptr.get(); + } /** Returns the underlying VkImage instance. * @@ -297,20 +142,17 @@ namespace Anvil * * In case of sparse images, the callback will only occur if at least one memory allocation * has been scheduled for this image instance. + * + * This behavior may optionally be disabled by setting @param in_bake_memory_if_necessary to false. + * Should only be done in special circumstances. + * */ - const VkImage& get_image(); + const VkImage& get_image(const bool& in_bake_memory_if_necessary = true); /** Returns information about the data alignment required by the underlying VkImage instance */ - VkDeviceSize get_image_alignment() const - { - return m_alignment; - } - - /** Returns flags specified at image creation time. - **/ - Anvil::ImageCreateFlags get_image_create_flags() const + VkDeviceSize get_image_alignment(const uint32_t& in_n_plane) const { - return m_create_flags; + return m_plane_index_to_memory_properties_map.at(in_n_plane).alignment; } /** Returns extent of a user-specified image mip as a VkExtent2D structure. @@ -329,16 +171,10 @@ namespace Anvil */ VkExtent3D get_image_extent_3D(uint32_t in_n_mipmap) const; - /** Returns information about the image format used to create the underlying VkImage instance */ - VkFormat get_image_format() const - { - return m_format; - } - /** Returns information about the memory types the underlying VkImage instance supports */ - uint32_t get_image_memory_types() const + uint32_t get_image_memory_types(const uint32_t& in_n_plane) const { - return m_memory_types; + return m_plane_index_to_memory_properties_map.at(in_n_plane).memory_types; } /** Returns information about size of the mipmap at index @param in_n_mipmap. @@ -358,60 +194,12 @@ namespace Anvil uint32_t* out_opt_height_ptr, uint32_t* out_opt_depth_ptr) const; - /** Returns information about the number of layers stored by the underlying VkImage instance */ - uint32_t get_image_n_layers() const - { - return m_n_layers; - } - - /** Returns information about the number of mipmaps stored by the underlying VkImage instance */ - uint32_t get_image_n_mipmaps() const - { - return m_n_mipmaps; - } - - /** Returns queue families compatible with the image */ - Anvil::QueueFamilyBits get_image_queue_families() const - { - return m_queue_families; - } - - /** Returns information about the number of samples stored by the underlying VkImage instance */ - VkSampleCountFlagBits get_image_sample_count() const - { - return static_cast(m_sample_count); - } - - /** Returns image tiling */ - VkImageTiling get_image_tiling() const - { - return m_tiling; - } - - /** Returns image sharing mode, as specified at creation time */ - VkSharingMode get_image_sharing_mode() const - { - return m_sharing_mode; - } - /** Returns information about the amount of memory the underlying VkImage instance requires * to work correctly. **/ - VkDeviceSize get_image_storage_size() const - { - return m_storage_size; - } - - /* Returns image type */ - VkImageType get_image_type() const - { - return m_type; - } - - /* Returns image usage flags */ - VkImageUsageFlags get_image_usage() const + VkDeviceSize get_image_storage_size(const uint32_t& in_n_plane) const { - return m_usage; + return m_plane_index_to_memory_properties_map.at(in_n_plane).storage_size; } /** Returns a pointer to the underlying memory block wrapper instance. @@ -424,24 +212,40 @@ namespace Anvil * * In case of sparse images, the callback will only occur if at least one memory allocation * has been scheduled for this image instance. + * + * NOTE: The function always returns nullptr for PRTs. However, the callback takes place nevertheless. + * + * @param in_n_plane Must be 0 for non-YUV or joint YUV images. For disjoint YUV images, the value must be between 0 + * and 2 (inclusive). **/ - std::shared_ptr get_memory_block(); + Anvil::MemoryBlock* get_memory_block(const uint32_t& in_n_plane = 0); - /** Returns VkMemoryRequirements structure, as reported by the driver for this Image instance. */ - const VkMemoryRequirements& get_memory_requirements() const + const uint32_t& get_n_mipmaps() const { - return m_memory_reqs; + return m_n_mipmaps; } + /** Returns SFR tile size for the image. + * + * Can only be called if the following requirements are met: + * + * 1. Parent device is a mGPU device instance. + * 2. Image has been initialized with the VK_IMAGE_CREATE_BIND_SFR_BIT_KHR flag. + * + * TODO. + * + */ + bool get_SFR_tile_size(VkExtent2D* out_result_ptr) const; + /** Returns a structure filled with details required to correctly bind tiles to a sparse image. * * This function can only be called against sparse images with ALIASED or NONALIASED residency. **/ - bool get_sparse_image_aspect_properties(const VkImageAspectFlagBits in_aspect, + bool get_sparse_image_aspect_properties(const Anvil::ImageAspectFlagBits in_aspect, const Anvil::SparseImageAspectProperties** out_result_ptr_ptr) const; /** Returns a filled subresource range descriptor, covering all layers & mipmaps of the image */ - VkImageSubresourceRange get_subresource_range() const; + Anvil::ImageSubresourceRange get_subresource_range() const; /** Tells whether this image provides data for the specified image aspects. * @@ -449,7 +253,7 @@ namespace Anvil * * @return true if data for all specified aspects is provided by the image, false otherwise. */ - bool has_aspects(VkImageAspectFlags in_aspects) const; + bool has_aspects(const Anvil::ImageAspectFlags& in_aspects) const; /** Tells whether a physical memory page is assigned to the specified texel location. * @@ -465,28 +269,34 @@ namespace Anvil * @return true if physical memory is bound to the specified location, false otherwise. * */ - bool is_memory_bound_for_texel(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_mip, - uint32_t in_x, - uint32_t in_y, - uint32_t in_z) const; - - /** Tells whether this Image wrapper instance holds a sparse image */ - bool is_sparse() const + bool is_memory_bound_for_texel(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_mip, + uint32_t in_x, + uint32_t in_y, + uint32_t in_z) const; + + bool prefers_dedicated_allocation(const uint32_t& in_n_plane) const { - return m_is_sparse; + return m_plane_index_to_memory_properties_map.at(in_n_plane).prefers_dedicated_allocation; } - /** Tells whether this Image wrapper instance holds a swapchain image */ - bool is_swapchain_image() const + bool requires_dedicated_allocation(const uint32_t& in_n_plane) const { - return m_is_swapchain_image; + return m_plane_index_to_memory_properties_map.at(in_n_plane).requires_dedicated_allocation; } /** Binds the specified region of a Vulkan memory object to an Image and caches information * about the new binding. * + * NOTE: This function can be used for both single- and multi-GPU devices. In case of the latter: + * 1. The image must NOT have been created for a particular swapchain instance. + * 2. The image must NOT be sparse. + * 3. If @param in_memory_block_ptr uses memory taken from a heap without VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR + * flag, each physical device attaches to its own instance of the memory. + * 4. If @param in_memory_block_ptr uses memory taken from a heap WITH the flag, each physical device attaches + * to memory instance 0. + * * NOTE: If used against sparse images, the assigned memory block's size must EXACTLY match * the amount of memory required by the image. Otherwise, the function will fail. * @@ -497,7 +307,41 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool set_memory(std::shared_ptr in_memory_block_ptr); + bool set_memory(MemoryBlockUniquePtr in_memory_block_ptr); + bool set_memory(Anvil::MemoryBlock* in_memory_block_ptr); + + /** Binds the image to instances of memory for physical devices specified by the caller. + * + * NOTE: This function must NOT be used for sparse images. + * NOTE: This function must NOT be used for images created for a single-GPU device. + * NOTE: This function can only be used for images, whose memory comes off a heap with the + * VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR flag. + * NOTE: It is illegal to change the memory backing, after one has been associated with an Image instance. + * + * TODO + **/ + bool set_memory(MemoryBlockUniquePtr in_memory_block_ptr, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr); + bool set_memory(Anvil::MemoryBlock* in_memory_block_ptr, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr); + + /** Binds the image to instances of memory, according to SFR rectangles specified by the caller. + * For more information, please read VK_KHR_device_group extension specification. + * + * NOTE: This function must NOT be used for sparse images. + * NOTE: This function must NOT be used for images created for a single-GPU device. + * NOTE: It is illegal to change the memory backing, after one has been associated with an Image instance. + * + * TODO + **/ + bool set_memory(MemoryBlockUniquePtr in_memory_block_ptr, + uint32_t in_n_SFR_rects, + const VkRect2D* in_SFRs_ptr); + bool set_memory(Anvil::MemoryBlock* in_memory_block_ptr, + uint32_t in_n_SFR_rects, + const VkRect2D* in_SFRs_ptr); /** Updates image with specified mip-map data. Blocks until the operation finishes executing. * @@ -510,13 +354,11 @@ namespace Anvil * to, in order to perform the request. Must not be NULL. * **/ - void upload_mipmaps(const std::vector* in_mipmaps_ptr, - VkImageLayout in_current_image_layout, - VkImageLayout* out_new_image_layout_ptr); + void upload_mipmaps(const std::vector* in_mipmaps_ptr, + Anvil::ImageLayout in_current_image_layout, + Anvil::ImageLayout* out_new_image_layout_ptr); private: - /* Private type declarations */ - /** Defines dimensions of a single image mip-map */ typedef struct Mipmap { @@ -541,7 +383,7 @@ namespace Anvil { /* Each item in this vector holds a reference to a MemoryBlock instance, which provides memory * backing for the corresponding linearized image tile location */ - std::vector > tile_to_block_mappings; + std::vector tile_to_block_mappings; uint32_t n_tiles_x; uint32_t n_tiles_y; @@ -562,7 +404,7 @@ namespace Anvil uint32_t in_y, uint32_t in_z) const { - const uint32_t tile_x = in_x / tile_width; + const uint32_t tile_x = in_x / tile_width; const uint32_t tile_y = in_y / tile_height; const uint32_t tile_z = in_z / tile_depth; @@ -632,9 +474,9 @@ namespace Anvil { std::vector mips; - uint32_t n_total_tail_pages; - std::map, uint32_t> tail_pages_per_binding; - std::vector tail_occupancy; + uint32_t n_total_tail_pages; + std::map tail_pages_per_binding; + std::vector tail_occupancy; AspectPageOccupancyLayerData() { @@ -654,160 +496,122 @@ namespace Anvil } AspectPageOccupancyData; /* Private functions */ - /** See corresponding create_nonsparse() function for specification */ - Image(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkSharingMode in_sharing_mode, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - bool in_use_full_mipmap_chain, - ImageCreateFlags in_create_flags, - Anvil::QueueFamilyBits in_queue_families, - VkImageLayout in_post_create_image_layout, - const std::vector* in_opt_mipmaps_ptr); - - /** See corresponding create_nonsparse() function for specification **/ - Image(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkSharingMode in_sharing_mode, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - Anvil::QueueFamilyBits in_queue_families, - bool in_use_full_mipmap_chain, - ImageCreateFlags in_create_flags, - VkImageLayout in_post_create_image_layout, - const std::vector* in_opt_mipmaps_ptr); - - /** See corresponding create_nonsparse() function for specification **/ - Image(std::weak_ptr in_device_ptr, - VkImage in_image, - VkFormat in_format, - VkImageTiling in_tiling, - VkSharingMode in_sharing_mode, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - uint32_t in_n_mipmaps, - VkSampleCountFlagBits in_sample_count, - uint32_t in_n_slices, - Anvil::ImageCreateFlags in_create_flags, - Anvil::QueueFamilyBits in_queue_families); - - /** See corresponding create_sparse() function for specification **/ - Image(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_sharing_mode, - bool in_use_full_mipmap_chain, - ImageCreateFlags in_create_flags, - Anvil::SparseResidencyScope in_residency_scope); - - Image (const Image&); - Image& operator=(const Image&); - - void init (bool in_use_full_mipmap_chain, - MemoryFeatureFlags in_memory_features, - const VkImageLayout* in_start_image_layout_ptr); + + Image(Anvil::ImageCreateInfoUniquePtr in_create_info_ptr); + + bool do_sanity_checks_for_physical_device_binding(const Anvil::MemoryBlock* in_memory_block_ptr, + uint32_t in_n_physical_devices) const; + bool do_sanity_checks_for_sfr_binding (uint32_t in_n_SFR_rects, + const VkRect2D* in_SFRs_ptr) const; + + bool init (); void init_mipmap_props (); - void init_page_occupancy(const std::vector& memory_reqs); - - void on_memory_backing_update (const VkImageSubresource& in_subresource, - VkOffset3D in_offset, - VkExtent3D in_extent, - std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset); - void on_memory_backing_opaque_update(VkDeviceSize in_resource_offset, - VkDeviceSize in_size, - std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset); - - void set_memory_sparse(VkDeviceSize in_resource_offset, - VkDeviceSize in_size, - std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset); - void set_memory_sparse(const VkImageSubresource& in_subresource, - VkOffset3D in_offset, - VkExtent3D in_extent, - std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset); - - void transition_to_post_create_image_layout(VkAccessFlags in_src_access_mask, - VkImageLayout in_src_layout); - - /** TODO. - * - * Does NOT set ::pQueueFamilyIndices and ::queueFamilyIndexCount! - */ - static VkImageCreateInfo get_create_info_for_swapchain(std::shared_ptr in_swapchain_ptr); + void init_page_occupancy(const std::vector& in_memory_reqs); + void init_sfr_tile_size (); + + bool set_memory_internal (Anvil::MemoryBlock* in_memory_block_ptr, + bool in_owned_by_image, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr, + uint32_t in_n_SFR_rects, + const VkRect2D* in_SFRs_ptr); + bool set_swapchain_memory_internal(uint32_t in_swapchain_image_index, + uint32_t in_opt_n_SFR_rects, + const VkRect2D* in_opt_SFRs_ptr, + uint32_t in_opt_n_device_indices, + const uint32_t* in_opt_device_indices); + + + void on_memory_backing_update (const Anvil::ImageSubresource& in_subresource, + VkOffset3D in_offset, + VkExtent3D in_extent, + Anvil::MemoryBlock* in_memory_block_ptr, + VkDeviceSize in_memory_block_start_offset, + bool in_memory_block_owned_by_image); + void on_memory_backing_opaque_update(uint32_t in_n_plane, + VkDeviceSize in_resource_offset, + VkDeviceSize in_size, + Anvil::MemoryBlock* in_memory_block_ptr, + VkDeviceSize in_memory_block_start_offset, + bool in_memory_block_owned_by_image); + + void transition_to_post_alloc_image_layout(Anvil::AccessFlags in_src_access_mask, + Anvil::ImageLayout in_src_layout); /* Private members */ - VkSampleCountFlagsVariable(m_sample_count); - VkImageUsageFlagsVariable (m_usage); - - typedef std::pair LayerMipKey; - typedef std::map LayerMipToSubresourceLayoutMap; - typedef std::map AspectToLayerMipToSubresourceLayoutMap; - - VkDeviceSize m_alignment; - AspectToLayerMipToSubresourceLayoutMap m_aspects; /* only used for linear images */ - Anvil::ImageCreateFlags m_create_flags; - uint32_t m_depth; - std::weak_ptr m_device_ptr; - VkFormat m_format; - bool m_has_transitioned_to_post_create_layout; - uint32_t m_height; + typedef std::pair LayerMipKey; + typedef std::map LayerMipToSubresourceLayoutMap; + typedef std::map AspectToLayerMipToSubresourceLayoutMap; + + /* NOTE: For YUV images, this map uses .._PLANE_x_.. aspects as keys. Non-disjoint YUV images only use PLANE_0 key. + * For all others, this map uses COLOR/DEPTH/STENCIL aspect keys. + * + * NOTE: This field is only used for linear images. + */ + AspectToLayerMipToSubresourceLayoutMap m_linear_image_aspect_data; + + Anvil::ImageCreateInfoUniquePtr m_create_info_ptr; + bool m_has_transitioned_to_post_alloc_layout; VkImage m_image; - bool m_image_owner; - bool m_is_sparse; - bool m_is_swapchain_image; - VkMemoryRequirements m_memory_reqs; - uint32_t m_memory_types; - Mipmaps m_mipmaps; - uint32_t m_n_layers; + Mipmaps m_mipmap_props; uint32_t m_n_mipmaps; - uint32_t m_n_slices; - VkImageLayout m_post_create_layout; - Anvil::QueueFamilyBits m_queue_families; - Anvil::SparseResidencyScope m_residency_scope; - VkSharingMode m_sharing_mode; - VkDeviceSize m_storage_size; - VkImageTiling m_tiling; - VkImageType m_type; - bool m_uses_full_mipmap_chain; - uint32_t m_width; - - std::shared_ptr m_metadata_memory_block_ptr; - std::shared_ptr m_memory_block_ptr; - bool m_memory_owner; - - std::vector m_mipmaps_to_upload; - std::unique_ptr m_page_tracker_ptr; /* only used for sparse non-resident images */ - std::map > m_sparse_aspect_page_occupancy; - std::map m_sparse_aspect_props; + bool m_swapchain_memory_assigned; + + struct PerPlaneMemoryProperties + { + VkDeviceSize alignment; + uint32_t memory_types; + std::unique_ptr page_tracker_ptr; /* only used for sparse binding images */ + bool prefers_dedicated_allocation; + bool requires_dedicated_allocation; + VkDeviceSize storage_size; + + PerPlaneMemoryProperties() + :alignment (UINT64_MAX), + memory_types (0), + prefers_dedicated_allocation (false), + requires_dedicated_allocation(false), + storage_size (0) + { + /* Stub */ + } + + PerPlaneMemoryProperties(const VkDeviceSize& in_alignment, + const uint32_t& in_memory_types, + const bool& in_prefers_dedicated_allocation, + const bool& in_requires_dedicated_allocation, + const VkDeviceSize& in_storage_size) + :alignment (in_alignment), + memory_types (in_memory_types), + prefers_dedicated_allocation (in_prefers_dedicated_allocation), + requires_dedicated_allocation(in_requires_dedicated_allocation), + storage_size (in_storage_size) + { + /* Stub */ + } + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(PerPlaneMemoryProperties); + }; + + /* NOTE: This map always holds exactly one item (key: 0) for single-planar (ie.non-YUV) and joint YUV images. */ + std::unordered_map m_plane_index_to_memory_properties_map; + + std::vector m_peer_device_indices; + std::vector m_peer_sfr_rects; + VkExtent2D m_sfr_tile_size; + + MemoryBlockUniquePtr m_metadata_memory_block_ptr; + std::vector m_memory_blocks_owned; + MemoryBlock *m_memory_block_external = nullptr; + + std::map m_sparse_aspect_page_occupancy; + std::vector > m_sparse_aspect_page_occupancy_data_items_owned; + std::map m_sparse_aspect_props; friend class Anvil::Queue; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(Image); + ANVIL_DISABLE_COPY_CONSTRUCTOR(Image); }; }; /* Vulkan namespace */ diff --git a/include/wrappers/image_view.h b/include/wrappers/image_view.h index ab6529bc..3a1e0ac2 100644 --- a/include/wrappers/image_view.h +++ b/include/wrappers/image_view.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -29,228 +29,18 @@ #define WRAPPERS_IMAGE_VIEW_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { - class ImageView : public DebugMarkerSupportProvider + class ImageView : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ - /** Creates a single-sample 1D image view wrapper instance. - * - * @param in_device_ptr Device to use. - * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified - * object will be retained and release at ImageView release time. - * @param in_n_base_layer Base layer index. - * @param in_n_base_mipmap_level Base mipmap level. - * @param in_n_mipmaps Number of mipmaps to include in the view. - * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. - * @param in_format Image view format. - * @param in_swizzle_red Channel to use for the red component when sampling the view. - * @param in_swizzle_green Channel to use for the green component when sampling the view. - * @param in_swizzle_blue Channel to use for the blue component when sampling the view. - * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. - * - * @return New ImageView instance, if function executes successfully; nullptr otherwise. - **/ - static std::shared_ptr create_1D(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha); - - /** Creates a single-sample 1D array image view wrapper instance. - * - * @param in_device_ptr Device to use. - * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified - * object will be retained and release at ImageView release time. - * @param in_n_base_layer Base layer index. - * @param in_n_layers Number of layers to include in the view. - * @param in_n_base_mipmap_level Base mipmap level. - * @param in_n_mipmaps Number of mipmaps to include in the view. - * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. - * @param in_format Image view format. - * @param in_swizzle_red Channel to use for the red component when sampling the view. - * @param in_swizzle_green Channel to use for the green component when sampling the view. - * @param in_swizzle_blue Channel to use for the blue component when sampling the view. - * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. - * - * @return New ImageView instance, if function executes successfully; nullptr otherwise. - **/ - static std::shared_ptr create_1D_array(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_layers, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha); - - /** Creates a single-sample or a multi-sample 2D image view wrapper instance. The view will be - * single-sample if @param in_image_ptr uses 1 sample per texel, and multi-sample otherwise. - * - * @param in_device_ptr Device to use - * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified - * object will be retained and release at ImageView release time. - * @param in_n_base_layer Base layer index. - * @param in_n_base_mipmap_level Base mipmap level. - * @param in_n_mipmaps Number of mipmaps to include in the view. - * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. - * @param in_format Image view format. - * @param in_swizzle_red Channel to use for the red component when sampling the view. - * @param in_swizzle_green Channel to use for the green component when sampling the view. - * @param in_swizzle_blue Channel to use for the blue component when sampling the view. - * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. - * - * @return New ImageView instance, if function executes successfully; nullptr otherwise. - **/ - static std::shared_ptr create_2D(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha); - - /** Creates a single-sample or a multi-sample 2D array image view wrapper instance. The view will be - * single-sample if @param in_image_ptr uses 1 sample per texel, and multi-sample otherwise. - * - * @param in_device_ptr Device to use. - * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified - * object will be retained and release at ImageView release time. - * @param in_n_base_layer Base layer index. - * @param in_n_layers Number of layers to include in the view. - * @param in_n_base_mipmap_level Base mipmap level. - * @param in_n_mipmaps Number of mipmaps to include in the view. - * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. - * @param in_format Image view format. - * @param in_swizzle_red Channel to use for the red component when sampling the view. - * @param in_swizzle_green Channel to use for the green component when sampling the view. - * @param in_swizzle_blue Channel to use for the blue component when sampling the view. - * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. - * - * @return New ImageView instance, if function executes successfully; nullptr otherwise. - **/ - static std::shared_ptr create_2D_array(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_layers, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha); - - /** Creates a single-sample 3D image view wrapper instance. - * - * @param in_device_ptr Device to use. - * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified - * object will be retained and release at ImageView release time. - * @param in_n_base_slice Base slice index. - * @param in_n_slices Number of slices to include in the view. - * @param in_n_base_mipmap_level Base mipmap level. - * @param in_n_mipmaps Number of mipmaps to include in the view. - * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. - * @param in_format Image view format. - * @param in_swizzle_red Channel to use for the red component when sampling the view. - * @param in_swizzle_green Channel to use for the green component when sampling the view. - * @param in_swizzle_blue Channel to use for the blue component when sampling the view. - * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. - * - * @return New ImageView instance, if function executes successfully; nullptr otherwise. - **/ - static std::shared_ptr create_3D(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_slice, - uint32_t in_n_slices, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha); - - /** Creates a cube-map image view wrapper instance. - * - * @param in_device_ptr Device to use. - * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified - * object will be retained and release at ImageView release time. - * @param in_n_base_layer Base layer index. - * @param in_n_base_mipmap_level Base mipmap level. - * @param in_n_mipmaps Number of mipmaps to include in the view. - * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. - * @param in_format Image view format. - * @param in_swizzle_red Channel to use for the red component when sampling the view. - * @param in_swizzle_green Channel to use for the green component when sampling the view. - * @param in_swizzle_blue Channel to use for the blue component when sampling the view. - * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. - * - * @return New ImageView instance, if function executes successfully; nullptr otherwise. - **/ - static std::shared_ptr create_cube_map(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha); - - /** Creates a cube-map array image view wrapper instance. - * - * @param in_device_ptr Device to use. - * @param in_image_ptr Image instance to create a view for. Must not be nullptr. The specified - * object will be retained and release at ImageView release time. - * @param in_n_base_layer Base layer index. - * @param in_n_cube_maps Number of cube-maps to include in the view. The number of layers created - * for the view will be equal to @param in_n_cube_maps * 6. - * @param in_n_base_mipmap_level Base mipmap level. - * @param in_n_mipmaps Number of mipmaps to include in the view. - * @param in_aspect_mask Image aspect mask to use when creating the Vulkan image view instance. - * @param in_format Image view format. - * @param in_swizzle_red Channel to use for the red component when sampling the view. - * @param in_swizzle_green Channel to use for the green component when sampling the view. - * @param in_swizzle_blue Channel to use for the blue component when sampling the view. - * @param in_swizzle_alpha Channel to use for the alpha component when sampling the view. - * - * @return New ImageView instance, if function executes successfully; nullptr otherwise. - **/ - static std::shared_ptr create_cube_map_array(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_cube_maps, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha); + static Anvil::ImageViewUniquePtr create(Anvil::ImageViewCreateInfoUniquePtr in_create_info_ptr); /** Destructor. Should only be called when the reference counter drops to zero. * @@ -258,24 +48,6 @@ namespace Anvil **/ virtual ~ImageView(); - /* Returns the aspect assigned to the image view */ - VkImageAspectFlags get_aspect() const - { - return m_aspect_mask; - } - - /* Returns base layer index used by the image view */ - uint32_t get_base_layer() const - { - return m_n_base_layer; - } - - /* Returns base mip level used by the image view */ - uint32_t get_base_mipmap_level() const - { - return m_n_base_mipmap_level; - } - /** Returns dimensions of the base mipmap, as seen from the image view. * * @param out_opt_base_mipmap_width_ptr If not nullptr, deref will be set to mipmap's width. @@ -287,71 +59,20 @@ namespace Anvil uint32_t* out_opt_base_mipmap_height_ptr, uint32_t* out_opt_base_mipmap_depth_ptr) const; - /** Returns image view's format */ - const VkFormat get_image_format() const + const Anvil::ImageViewCreateInfo* get_create_info_ptr() const { - return m_format; + return m_create_info_ptr.get(); } /** Returns the encapsulated raw Vulkan image view handle. */ - const VkImageView get_image_view() const + VkImageView get_image_view() const { return m_image_view; } - /** Returns number of layers encapsulated by the image view */ - uint32_t get_n_layers() const - { - return m_n_layers; - } - - /** Returns number of mipmaps encapsulated by the image view */ - uint32_t get_n_mipmaps() const - { - return m_n_mipmaps; - } - - /** Returns number of slices encapsulated by the image view */ - uint32_t get_n_slices() const - { - return m_n_slices; - } - - /** Returns a pointer to the parent image, from which the image view has been created. */ - std::shared_ptr get_parent_image() const - { - return m_parent_image_ptr; - } - - /** Returns a VkImageSubresourceRange struct that describes the range of subresources covered - * by this image view. + /** Returns a struct that describes the range of subresources covered by this image view. */ - VkImageSubresourceRange get_subresource_range() const - { - VkImageSubresourceRange result; - - result.aspectMask = m_aspect_mask; - result.baseArrayLayer = m_n_base_layer; - result.baseMipLevel = m_n_base_mipmap_level; - result.layerCount = (m_type == VK_IMAGE_VIEW_TYPE_3D) ? m_n_slices : m_n_layers; - result.levelCount = m_n_mipmaps; - - return result; - } - - /** Returns swizzle array assigned to the image view */ - void get_swizzle_array(VkComponentSwizzle* out_swizzle_array_ptr) const - { - memcpy(out_swizzle_array_ptr, - m_swizzle_array, - sizeof(m_swizzle_array) ); - } - - /** Returns image view type of the image view instance */ - const VkImageViewType get_type() const - { - return m_type; - } + Anvil::ImageSubresourceRange get_subresource_range() const; /** Tells whether the subresource range described by this image view intersects * with another image view's subres range. @@ -362,42 +83,21 @@ namespace Anvil * * @return True if an intersection was found, false otherwise. */ - bool intersects(std::shared_ptr in_image_view_ptr) const; + bool intersects(const Anvil::ImageView* in_image_view_ptr) const; private: /* Private functions */ ImageView (const ImageView&); ImageView& operator=(const ImageView&); - ImageView(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_image_ptr); + ImageView(Anvil::ImageViewCreateInfoUniquePtr in_create_info_ptr); - bool init(VkImageViewType in_image_view_type, - uint32_t in_n_base_layer, - uint32_t in_n_layers, - uint32_t in_n_slices, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - const VkComponentSwizzle* in_swizzle_rgba_ptr); + bool init(); /* Private members */ - std::weak_ptr m_device_ptr; - VkImageView m_image_view; - std::shared_ptr m_parent_image_ptr; - - VkImageAspectFlagsVariable(m_aspect_mask); - - VkFormat m_format; - uint32_t m_n_base_layer; - uint32_t m_n_base_mipmap_level; - uint32_t m_n_layers; - uint32_t m_n_mipmaps; - uint32_t m_n_slices; - VkComponentSwizzle m_swizzle_array[4]; - VkImageViewType m_type; + Anvil::ImageViewCreateInfoUniquePtr m_create_info_ptr; + VkImageView m_image_view; }; }; -#endif /* WRAPPERS_IMAGE_VIEW_H */ \ No newline at end of file +#endif /* WRAPPERS_IMAGE_VIEW_H */ diff --git a/include/wrappers/instance.h b/include/wrappers/instance.h index 23768f1e..ae2a070a 100644 --- a/include/wrappers/instance.h +++ b/include/wrappers/instance.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -31,50 +31,42 @@ #ifndef WRAPPERS_INSTANCE_H #define WRAPPERS_INSTANCE_H +#include "misc/extensions.h" +#include "misc/instance_create_info.h" +#include "misc/library.h" +#include "misc/mt_safety.h" #include "misc/types.h" +#include "wrappers/debug_messenger.h" namespace Anvil { - /** Debug call-back function prototype */ - typedef std::function< VkBool32(VkDebugReportFlagsEXT in_message_flags, - VkDebugReportObjectTypeEXT in_object_type, - const char* in_layer_prefix, - const char* in_message)> DebugCallbackFunction; - - class Instance : public std::enable_shared_from_this + class Instance : public Anvil::MTSafetySupportProvider { public: /** Destructor */ virtual ~Instance(); - /** Creates a new Instance wrapper instance. This process is executed in the following steps: - * - * 1. If @param opt_pfn_validation_callback_proc is specified, available instance layers are - * enumerated. Layers which support VK_EXT_debug_report extension, are cached and used - * in step 2. - * 2. A new Vulkan instance is created. - * 3. Available physical devices are enumerated. - * 4. Instance-level function pointers are extracted. - * - * Only one Instance wrapper instance should be created during application's life-time. - * - * NOTE: You MUST call destroy() for this object in order for all dependent objects to be - * destroyed correctly. - * - * - * @param in_app_name Name of the application, to be passed in VkCreateInstanceInfo - * structure. - * @param in_engine_name Name of the engine, to be passed in VkCreateInstanceInfo - * structure. - * @param in_opt_validation_callback_function If not nullptr, the specified function will be called whenever - * a call-back from any of the validation layers is received. - * Ignored otherwise. - **/ - static std::shared_ptr create(const std::string& in_app_name, - const std::string& in_engine_name, - DebugCallbackFunction in_opt_validation_callback_proc); + /** Creates a new Instance wrapper instance. **/ + static Anvil::InstanceUniquePtr create(Anvil::InstanceCreateInfoUniquePtr in_create_info_ptr); + + const Anvil::APIVersion& get_api_version() const + { + return m_api_version; + } - void destroy(); + const Anvil::IExtensionInfoInstance* get_enabled_extensions_info() const + { + return m_enabled_extensions_info_ptr->get_instance_extension_info(); + } + + const ExtensionEXTDebugReportEntrypoints& get_extension_ext_debug_report_entrypoints() const; + const ExtensionEXTDebugUtilsEntrypoints& get_extension_ext_debug_utils_entrypoints () const; + + const ExtensionKHRExternalFenceCapabilitiesEntrypoints& get_extension_khr_external_fence_capabilities_entrypoints() const; + + const ExtensionKHRExternalMemoryCapabilitiesEntrypoints& get_extension_khr_external_memory_capabilities_entrypoints() const; + + const ExtensionKHRExternalSemaphoreCapabilitiesEntrypoints& get_extension_khr_external_semaphore_capabilities_entrypoints() const; /** Returns a container with entry-points to functions introduced by VK_KHR_get_physical_device_properties2. * @@ -104,14 +96,55 @@ namespace Anvil **/ const ExtensionKHRXcbSurfaceEntrypoints& get_extension_khr_xcb_surface_entrypoints() const; #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + /** Returns a container with entry-points to functions introduced by VK_KHR_wayland_surface. + * + * Will fire an assertion failure if the extension is not supported. + **/ + const ExtensionKHRWaylandSurfaceEntrypoints& get_extension_khr_wayland_surface_entrypoints() const; + #endif +#endif +#if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + /** Returns a container with entry-points to functions introduced by VK_EXT_headless_surface. + * + * Will fire an assertion failure if the extension is not supported. + **/ + const ExtensionEXTHeadlessSurfaceEntrypoints& get_extension_ext_headless_surface_entrypoints() const; #endif + /** Returns a container with entry-points to functions introduced by VK_KHR_device_group_creation extension. + * + * Will fire an assertion failure if the extension is not supported. + **/ + const ExtensionKHRDeviceGroupCreationEntrypoints& get_extension_khr_device_group_creation_entrypoints() const; + + const Anvil::InstanceCreateInfo* get_create_info_ptr() const + { + return m_create_info_ptr.get(); + } + /** Returns a raw wrapped VkInstance handle. */ VkInstance get_instance_vk() const { return m_instance; } + /** Returns information about @param in_n_physical_device_group -th physical device group, as reported + * for this instance. + * + * @param in_n_physical_device_group Index of the physical device group to retrieve properties of. + * This value must NOT be equal or larger than the value reported by + * get_n_physical_device_groups(). + * + ** @return As per description. + **/ + const Anvil::PhysicalDeviceGroup& get_physical_device_group(uint32_t in_n_physical_device_group) const + { + anvil_assert(m_physical_device_groups.size() > in_n_physical_device_group); + + return m_physical_device_groups.at(in_n_physical_device_group); + } + /** Returns a PhysicalDevice wrapper for a physical device at index @param in_n_device. * * @param in_n_device Index of the physical device to retrieve the wrapper instance for. @@ -120,9 +153,18 @@ namespace Anvil * ** @return As per description. **/ - std::weak_ptr get_physical_device(uint32_t in_n_device) const + const Anvil::PhysicalDevice* get_physical_device(uint32_t in_n_device) const + { + return m_physical_devices.at(in_n_device).get(); + } + + /** Returns the total number of physical device groups supported on the running platform. + * + * Will return 0 if VK_KHR_physical_device_group_creation is not supported. + */ + uint32_t get_n_physical_device_groups() const { - return m_physical_devices.at(in_n_device); + return static_cast(m_physical_device_groups.size() ); } /** Returns the total number of physical devices supported on the running platform. */ @@ -131,10 +173,10 @@ namespace Anvil return static_cast(m_physical_devices.size() ); } - /** Returns shader module cache instance */ - std::shared_ptr get_shader_module_cache() const + bool is_instance_extension_enabled(const char* in_extension_name) const; + bool is_instance_extension_enabled(const std::string& in_extension_name) const { - return m_shader_module_cache_ptr; + return is_instance_extension_enabled(in_extension_name.c_str() ); } /** Tells whether the specified instance extension is supported. @@ -152,48 +194,46 @@ namespace Anvil /** Tells if validation support has been requested for this Vulkan Instance wrapper */ bool is_validation_enabled() const { - return m_validation_callback_function != nullptr; + return m_create_info_ptr->get_validation_callback() != nullptr; } private: /* Private functions */ /** Private constructor. Please use create() function instead. */ - Instance(const std::string& in_app_name, - const std::string& in_engine_name, - DebugCallbackFunction in_opt_validation_callback_function); + Instance(Anvil::InstanceCreateInfoUniquePtr in_create_info_ptr); Instance& operator=(const Instance&); Instance (const Instance&); - void register_physical_device (std::shared_ptr in_physical_device_ptr); - void unregister_physical_device(std::shared_ptr in_physical_device_ptr); - - void enumerate_instance_layers (); - void enumerate_layer_extensions(Anvil::Layer* layer_ptr); - void enumerate_physical_devices(); - void init (); - void init_debug_callbacks (); - void init_func_pointers (); - - static VkBool32 VKAPI_PTR debug_callback_pfn_proc(VkDebugReportFlagsEXT in_message_flags, - VkDebugReportObjectTypeEXT in_object_type, - uint64_t in_src_object, - size_t in_location, - int32_t in_msg_code, - const char* in_layer_prefix_ptr, - const char* in_message_ptr, - void* in_user_data); + void destroy (); + void enumerate_instance_layers (); + void enumerate_layer_extensions (Anvil::Layer* layer_ptr); + void enumerate_physical_device_groups(); + void enumerate_physical_devices (); + bool init (); + void init_debug_callbacks (); + void init_func_pointers (); + + bool init_vk_func_ptrs(); + + void debug_callback_handler(const Anvil::DebugMessageSeverityFlagBits& in_severity, + const char* in_message_ptr); /* Private variables */ VkInstance m_instance; - /* DebugReport extension function pointers and data */ - VkDebugReportCallbackEXT m_debug_callback_data; + ExtensionEXTDebugReportEntrypoints m_ext_debug_report_entrypoints; + ExtensionEXTDebugUtilsEntrypoints m_ext_debug_utils_entrypoints; + ExtensionKHRDeviceGroupCreationEntrypoints m_khr_device_group_creation_entrypoints; + ExtensionKHRExternalFenceCapabilitiesEntrypoints m_khr_external_fence_capabilities_entrypoints; + ExtensionKHRExternalMemoryCapabilitiesEntrypoints m_khr_external_memory_capabilities_entrypoints; + ExtensionKHRExternalSemaphoreCapabilitiesEntrypoints m_khr_external_semaphore_capabilities_entrypoints; + ExtensionKHRGetPhysicalDeviceProperties2 m_khr_get_physical_device_properties2_entrypoints; + ExtensionKHRSurfaceEntrypoints m_khr_surface_entrypoints; - ExtensionEXTDebugReportEntrypoints m_ext_debug_report_entrypoints; - ExtensionKHRGetPhysicalDeviceProperties2 m_khr_get_physical_device_properties2_entrypoints; - ExtensionKHRSurfaceEntrypoints m_khr_surface_entrypoints; + #if defined(_WIN32) + #endif #ifdef _WIN32 #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) @@ -203,19 +243,26 @@ namespace Anvil #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) ExtensionKHRXcbSurfaceEntrypoints m_khr_xcb_surface_entrypoints; #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + ExtensionKHRWaylandSurfaceEntrypoints m_khr_wayland_surface_entrypoints; + #endif #endif + #if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + ExtensionEXTHeadlessSurfaceEntrypoints m_ext_headless_surface_entrypoints; + #endif + + Anvil::APIVersion m_api_version; + Anvil::InstanceCreateInfoUniquePtr m_create_info_ptr; - const std::string m_app_name; - const std::string m_engine_name; - DebugCallbackFunction m_validation_callback_function; + std::unique_ptr > m_enabled_extensions_info_ptr; + std::unique_ptr > m_supported_extensions_info_ptr; - std::vector m_enabled_extensions; + Anvil::DebugMessengerUniquePtr m_debug_messenger_ptr; Anvil::Layer m_global_layer; - std::vector > m_physical_devices; - std::shared_ptr m_shader_module_cache_ptr; + std::vector m_physical_device_groups; + std::vector > m_physical_devices; std::vector m_supported_layers; - friend class Anvil::PhysicalDevice; friend struct InstanceDeleter; }; }; /* namespace Anvil */ diff --git a/include/wrappers/memory_block.h b/include/wrappers/memory_block.h index 2c1b385b..6ee478c3 100644 --- a/include/wrappers/memory_block.h +++ b/include/wrappers/memory_block.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -36,125 +36,83 @@ #ifndef WRAPPERS_MEMORY_BLOCK_H #define WRAPPERS_MEMORY_BLOCK_H +#include "misc/mt_safety.h" #include "misc/types.h" - +#include "misc/memory_block_create_info.h" namespace Anvil { - /** "About to be deleted" call-back function prototype. */ - typedef std::function OnMemoryBlockReleaseCallbackFunction; + typedef class IMemoryBlockBackendSupport + { + public: + virtual ~IMemoryBlockBackendSupport() + { + /* Stub */ + } + + virtual void set_parent_memory_allocator_backend_ptr(std::shared_ptr in_backend_ptr, + void* in_backend_object) = 0; + } IMemoryBlockBackendSupport; /** Wrapper class for memory objects. Please see header for more details */ - class MemoryBlock : public std::enable_shared_from_this + class MemoryBlock : public IMemoryBlockBackendSupport, + public MTSafetySupportProvider { public: /* Public functions */ - /** Create and bind a new device memory object to the instantiated MemoryBlock object. + /* TODO * - * @param in_device_ptr Device to use. - * @param in_allowed_memory_bits Memory type bits which meet the allocation requirements. - * @param in_size Required allocation size. - * @param in_memory_features Required memory features. - **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - uint32_t in_allowed_memory_bits, - VkDeviceSize in_size, - Anvil::MemoryFeatureFlags in_memory_features); - - /** Create a memory block whose storage space is maintained by another MemoryBlock instance. + * @param in_create_info_ptr TODO + * @param out_opt_result_ptr If not null, deref will be set to the error code reported by the API function + * used to allocate the memory. * - * @param in_parent_memory_block_ptr MemoryBlock instance to use as a parent. Must not be nullptr. - * Parent memory block must not be mapped. - * @param in_start_offset Start offset of the storage maintained by the specified parent memory block, - * from which the new MemoryBlock instance's storage should start. - * Must not be equal to or larger than parent object's storage size. - * When added to @param in_size, the result value must not be be larger than the remaining - * storage size. - * @param in_size Region size to use for the derived memory block. - **/ - static std::shared_ptr create_derived(std::shared_ptr in_parent_memory_block_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size); + * @return New memory block instance if successful, null otherwise. + */ + static MemoryBlockUniquePtr create(Anvil::MemoryBlockCreateInfoUniquePtr in_create_info_ptr, + VkResult* out_opt_result_ptr = nullptr); - /** Create a memory block whose lifetime is maintained by a separate entity. While the memory block remains reference-counted - * as usual, the destruction process is carried out by an external party via the specified call-back. + /* Creates a new external memory handle of the user-specified type. + * + * For NT handles, if one has been already created for this memory block instance & handle type, a cached instance + * of the handle will be returned instead. Otherwise, each create() call will return a new handle. + * + * Cached external memory handles will be destroyed & released at MemoryBlock's destruction time. * - * This implements a special case required for support of Vulkan Memory Allocator memory allocator backend. Applications - * are very unlikely to ever need to use this create() function. + * Supports DERIVED and REGULAR memory blocks. The latter case is only supported if memory region covered by + * the derived region completely encapsulates the underlying Vulkan allocation. * - * TODO + * Returns nullptr if unsuccessful. + * + * Requires VK_KHR_external_memory_fd under Linux. + * Requires VK_KHR_external_memory_win32 under Windows. */ - static std::shared_ptr create_derived_with_custom_delete_proc(std::weak_ptr in_device_ptr, - VkDeviceMemory in_memory, - uint32_t in_allowed_memory_bits, - Anvil::MemoryFeatureFlags in_memory_features, - uint32_t in_memory_type_index, - VkDeviceSize in_size, - VkDeviceSize in_start_offset, - OnMemoryBlockReleaseCallbackFunction in_on_release_callback_function); + ExternalHandleUniquePtr export_to_external_memory_handle(const Anvil::ExternalMemoryHandleTypeFlagBits& in_memory_handle_type); /** Releases the Vulkan counterpart and unregisters the wrapper instance from the object tracker */ virtual ~MemoryBlock(); - /* Returns the underlying raw Vulkan VkDeviceMemory handle. */ - const VkDeviceMemory& get_memory() const + const Anvil::MemoryBlockCreateInfo* get_create_info_ptr() const { - if (m_parent_memory_block_ptr != nullptr) - { - return m_parent_memory_block_ptr->m_memory; - } - else - { - return m_memory; - } + return m_create_info_ptr.get(); } - /** Returns memory features of the underlying memory region */ - Anvil::MemoryFeatureFlags get_memory_features() const + /* Returns the underlying raw Vulkan VkDeviceMemory handle. */ + const VkDeviceMemory& get_memory() const { - if (m_parent_memory_block_ptr != nullptr) - { - return m_parent_memory_block_ptr->get_memory_features(); - } - else - { - return m_memory_features; - } - } + auto parent_mem_block_ptr = m_create_info_ptr->get_parent_memory_block(); - /** Returns the memory type index the memory block was allocated from */ - uint32_t get_memory_type_index() const - { - if (m_parent_memory_block_ptr != nullptr) + if (parent_mem_block_ptr != nullptr) { - return m_parent_memory_block_ptr->get_memory_type_index(); + return parent_mem_block_ptr->m_memory; } else { - return m_memory_type_index; + return m_memory; } } - /* Returns parent memory block, if one has been defined for this instance */ - std::shared_ptr get_parent_memory_block() const - { - return m_parent_memory_block_ptr; - } - - /* Returns the size of the memory block. */ - VkDeviceSize get_size() const - { - return m_size; - } - - /* Returns the start offset of the memory block. - * - * If the memory block has a parent, the returned start offset is NOT relative to the parent memory block's - * start offset (in other words: the returned value is an absolute offset which can be directly used against - * the memory block instance) - */ - VkDeviceSize get_start_offset() const + const VkDeviceSize& get_start_offset() const { return m_start_offset; } @@ -165,7 +123,7 @@ namespace Anvil * @param in_memory_block_ptr * * @return true if intersection has been detected, false otherwise. */ - bool intersects(std::shared_ptr in_memory_block_ptr) const; + bool intersects(const Anvil::MemoryBlock* in_memory_block_ptr) const; /** Maps the specified region of the underlying memory object to the process space. * @@ -195,6 +153,13 @@ namespace Anvil * However, making that call in advance will skip map()+unmap() invocations, wnich would * otherwise have to be done for each read() call. * + * Note that reading from multi_instance memory heaps is not permitted by VK_KHR_device_group. + * Any attempt to do so will result in an assertion failure and an error being reported by + * this function. + * + * Since this function is device-agnostic, it doesn't matter if the parent device is a single- + * or a multi-GPU instance. + * * @param in_start_offset Start offset of the region to be mapped. * @param in_size Size of the region to be mapped. * @param out_result_ptr The read data will be copied to the location specified by this @@ -221,6 +186,13 @@ namespace Anvil * However, making that call in advance will skip map()+unmap() invocations, wnich would * otherwise have to be done for each write() call. * + * Note that writing to multi_instance memory heaps is not permitted by VK_KHR_device_group. + * Any attempt to do so will result in an assertion failure and an error being reported by + * this function. + * + * Since this function is device-agnostic, it doesn't matter if the parent device is a single- + * or a multi-GPU instance. + * * @param in_start_offset Start offset of the region to modify * @param in_size Size of the region to be modified. * @param in_data Data to be copied to the specified GPU memory region. @@ -233,28 +205,17 @@ namespace Anvil private: /* Private functions */ - bool init(); - - /** Please see create() for documentation */ - MemoryBlock(std::weak_ptr in_device_ptr, - uint32_t in_allowed_memory_bits, - VkDeviceSize in_size, - Anvil::MemoryFeatureFlags in_memory_features); - - /** Please see create() for documentation */ - MemoryBlock(std::shared_ptr in_parent_memory_block_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size); - - /** Please see create_derived_with_custom_delete_proc() for documentation */ - MemoryBlock(std::weak_ptr in_device_ptr, - VkDeviceMemory in_memory, - uint32_t in_allowed_memory_bits, - Anvil::MemoryFeatureFlags in_memory_features, - uint32_t in_memory_type_index, - VkDeviceSize in_size, - VkDeviceSize in_start_offset, - OnMemoryBlockReleaseCallbackFunction in_on_release_callback_function); + + /* TODO + * + * @param out_opt_result_ptr If not null, deref will be set to the error code reported by the API function + * used to allocate the memory. + * + * @return True if successful, false otherwise. + */ + bool init(VkResult* out_opt_result_ptr = nullptr); + + MemoryBlock(Anvil::MemoryBlockCreateInfoUniquePtr in_create_info_ptr); MemoryBlock (const MemoryBlock&); MemoryBlock& operator=(const MemoryBlock&); @@ -264,20 +225,48 @@ namespace Anvil Anvil::MemoryFeatureFlags in_memory_features); bool open_gpu_memory_access (); - /* Private members */ - OnMemoryBlockReleaseCallbackFunction m_on_release_callback_function; + /* IMemoryBlockBackendSupport */ + void set_parent_memory_allocator_backend_ptr(std::shared_ptr in_backend_ptr, + void* in_backend_object) + { + anvil_assert(m_owned_parent_memory_allocator_backend_ptr == nullptr); + + m_backend_object = in_backend_object; + m_owned_parent_memory_allocator_backend_ptr = in_backend_ptr; + m_parent_memory_allocator_backend_ptr = m_owned_parent_memory_allocator_backend_ptr.get(); + + { + auto parent_memory_block_ptr = m_create_info_ptr->get_parent_memory_block(); + + while (parent_memory_block_ptr != nullptr) + { + anvil_assert(parent_memory_block_ptr->m_parent_memory_allocator_backend_ptr == nullptr || + parent_memory_block_ptr->m_parent_memory_allocator_backend_ptr == in_backend_ptr.get() ); + parent_memory_block_ptr->m_backend_object = in_backend_object; + parent_memory_block_ptr->m_parent_memory_allocator_backend_ptr = in_backend_ptr.get(); + + parent_memory_block_ptr = parent_memory_block_ptr->get_create_info_ptr()->get_parent_memory_block(); + } + } + } + + /* Private members */ std::atomic m_gpu_data_map_count; /* Only set for root memory blocks */ void* m_gpu_data_ptr; /* Only set for root memory blocks */ - uint32_t m_allowed_memory_bits; - std::weak_ptr m_device_ptr; - VkDeviceMemory m_memory; - Anvil::MemoryFeatureFlags m_memory_features; - uint32_t m_memory_type_index; - std::shared_ptr m_parent_memory_block_ptr; - VkDeviceSize m_size; - VkDeviceSize m_start_offset; + void* m_backend_object; + Anvil::MemoryBlockCreateInfoUniquePtr m_create_info_ptr; + VkDeviceMemory m_memory; + const Anvil::MemoryType* m_memory_type_props_ptr; /* keep for simplified debugging */ + VkDeviceSize m_start_offset; + VkDeviceSize m_relative_start_offset; + + std::vector m_mgpu_physical_devices; + std::shared_ptr m_owned_parent_memory_allocator_backend_ptr; + Anvil::IMemoryAllocatorBackendBase* m_parent_memory_allocator_backend_ptr; + + std::map m_external_handle_type_to_external_handle; }; }; /* Vulkan namespace */ diff --git a/include/wrappers/physical_device.h b/include/wrappers/physical_device.h index 9617e3d4..7a8a54fd 100644 --- a/include/wrappers/physical_device.h +++ b/include/wrappers/physical_device.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -32,12 +32,13 @@ #define WRAPPERS_PHYSICAL_DEVICE_H #include "misc/debug.h" +#include "misc/extensions.h" #include "misc/types.h" namespace Anvil { /* Wrapper class for Vulkan physical devices */ - class PhysicalDevice : public std::enable_shared_from_this + class PhysicalDevice { public: /* Public functions */ @@ -51,147 +52,70 @@ namespace Anvil * @param in_index Index of the physical device to initialize the wrapper for. * @param in_physical_device Raw Vulkan physical device handle to encapsulate. */ - static std::weak_ptr create(std::shared_ptr in_instance_ptr, - uint32_t in_index, - VkPhysicalDevice in_physical_device); + static std::unique_ptr create(Anvil::Instance* in_instance_ptr, + uint32_t in_index, + VkPhysicalDevice in_physical_device); /** Destructor */ virtual ~PhysicalDevice(); - /** Retrieves 16-bit storage features supported by this physical device. + /** Returns a filled structure telling current consumption of memory on a per-heap basis. * - * This function will only succeed if both VK_KHR_16bit_storage_features and VK_KHR_get_physical_device_properties2 - * are supported. + * Requires VK_EXT_memory_budget support. + **/ + Anvil::MemoryBudget get_available_memory_budget() const; + + /* TODO * - * @return true if successful, false otherwise. + * Requires VK_KHR_external_memory_capabilities. */ - bool get_16bit_storage_features(StorageFeatures16Bit* out_result_ptr) const - { - if (m_16bit_storage_features_available) - { - *out_result_ptr = m_16bit_storage_features; - } - - return m_16bit_storage_features_available; - } + bool get_buffer_properties(const Anvil::BufferPropertiesQuery& in_query, + Anvil::BufferProperties* out_opt_result_ptr = nullptr) const; - /* Returns physical device's LUID. - * - * Requires VK_KHR_external_memory support, as well as VkPhysicalDeviceIDPropertiesKHR::deviceLUIDValid - * to have been set to true by the driver owning the physical device, in order to report success. - * - * @param out_result_ptr Exactly VK_LUID_SIZE_KHR bytes will be copied under *out_result_ptr if - * function returns true. Must not be nullptr. - * - * @return true if successful, false otherwise. - **/ - bool get_device_LUID(uint8_t* out_result_ptr) const + /** Retrieves features supported by the physical device */ + const Anvil::PhysicalDeviceFeatures& get_device_features() const { - bool result = false; - - if (m_device_LUID_available) - { - static_assert(sizeof(m_device_LUID) == VK_LUID_SIZE_KHR, ""); - - memcpy(out_result_ptr, - m_device_LUID, - VK_LUID_SIZE_KHR); - - result = true; - } - - return result; + return m_features; } - /* Returns physical device's UUID. - * - * Requires VK_KHR_external_memory support to return successfully. - * - * @param out_result_ptr Exactly VK_UUID_SIZE bytes will be copied under *out_result_ptr if - * function returns true. Must not be nullptr. - * - * @return true if successful, false otherwise. - **/ - bool get_device_UUID(uint8_t* out_result_ptr) const + /** Retrieves device group index of this physical device. */ + uint32_t get_device_group_index() const { - bool result = false; - - if (m_device_UUID_available) - { - static_assert(sizeof(m_device_UUID) == VK_UUID_SIZE, ""); - - memcpy(out_result_ptr, - m_device_UUID, - VK_UUID_SIZE); - - result = true; - } - - return result; + return m_device_group_index; } - /* Returns UUID of the driver owning the physical device. - * - * Requires VK_KHR_external_memory support to return successfully. - * - * @param out_result_ptr Exactly VK_UUID_SIZE bytes will be copied under *out_result_ptr if - * function returns true. Must not be nullptr. - * - * @return true if successful, false otherwise. - **/ - bool get_driver_UUID(uint8_t* out_result_ptr) const + /** Retrieves index of the physical device within a device group. */ + uint32_t get_device_group_device_index() const { - bool result = false; - - if (m_driver_UUID_available) - { - static_assert(sizeof(m_driver_UUID) == VK_UUID_SIZE, ""); - - memcpy(out_result_ptr, - m_driver_UUID, - VK_UUID_SIZE); - - result = true; - } - - return result; - } - - /** Retrieves features supported by the physical device */ - const VkPhysicalDeviceFeatures& get_device_features() const - { - return m_features; + return m_device_group_device_index; } - /** Retrieves a filled VkPhysicalDeviceProperties structure, holding properties of - * the wrapped physical device. - **/ - const VkPhysicalDeviceProperties& get_device_properties() const + const Anvil::PhysicalDeviceProperties& get_device_properties() const { return m_properties; } - /** Retrieves format properties, as reported by the wrapped physical device. + /* TODO + * + * Requires VK_KHR_external_fence_capabiltiies. + * + */ + bool get_fence_properties(const Anvil::FencePropertiesQuery& in_query, + Anvil::FenceProperties* out_opt_result_ptr = nullptr) const; + + Anvil::FormatProperties get_format_properties(Anvil::Format in_format) const; + + /** Retrieves image format properties, as reported by the wrapped physical device. + * + * For external memory handle capability queries, VK_KHR_external_memory_capabilities support is required. + * * * @param in_format Vulkan format to retrieve the filled structure for. * * @return As per description. **/ - const FormatProperties& get_format_properties(VkFormat in_format) const - { - auto format_props_iterator = m_format_properties.find(in_format); - - if (format_props_iterator != m_format_properties.end() ) - { - return format_props_iterator->second; - } - else - { - anvil_assert_fail(); - - return m_dummy; - } - } + bool get_image_format_properties(const ImageFormatPropertiesQuery& in_query, + Anvil::ImageFormatProperties* out_opt_result_ptr = nullptr) const; /** Returns index of the physical device. */ uint32_t get_index() const @@ -200,7 +124,12 @@ namespace Anvil } /** Returns parent Vulkan instance wrapper. */ - std::weak_ptr get_instance() const + const Anvil::Instance* get_instance() const + { + return m_instance_ptr; + } + + Anvil::Instance* get_instance() { return m_instance_ptr; } @@ -212,8 +141,8 @@ namespace Anvil } /** Returns a filled Anvil::MemoryProperties structure, describing the - * encapsulated physical device. - **/ + * encapsulated physical device. + **/ const Anvil::MemoryProperties& get_memory_properties() const { return m_memory_properties; @@ -233,6 +162,14 @@ namespace Anvil return m_queue_families; } + /* TODO + * + * Requires VK_KHR_external_semaphore_capabiltiies. + * + */ + bool get_semaphore_properties(const Anvil::SemaphorePropertiesQuery& in_query, + Anvil::SemaphoreProperties* out_opt_result_ptr = nullptr) const; + /** Returns sparse image format properties for this physical device. See Vulkan spec * for vkGetPhysicalDeviceSparseImageFormatProperties() function for more details. * @@ -246,12 +183,12 @@ namespace Anvil * * @return true if successful, false otherwise **/ - bool get_sparse_image_format_properties(VkFormat in_format, - VkImageType in_type, - VkSampleCountFlagBits in_sample_count, - VkImageUsageFlags in_usage, - VkImageTiling in_tiling, - std::vector& out_result) const; + bool get_sparse_image_format_properties(Anvil::Format in_format, + Anvil::ImageType in_type, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::ImageUsageFlags in_usage, + Anvil::ImageTiling in_tiling, + std::vector& out_result) const; /** Tells whether user-specified extension is supported by the physical device. * @@ -271,10 +208,10 @@ namespace Anvil **/ bool is_layer_supported(const std::string& in_layer_name) const; - private: - /* Private type definitions */ - typedef std::map FormatPropertiesMap; + /* Tells if the physical device supports VK 1.1 API (or newer) */ + bool supports_core_vk1_1() const; + private: /* Private functions */ /** Constructor. Retrieves properties & capabilities of a physical device at @@ -285,16 +222,14 @@ namespace Anvil * @param in_index Index of the physical device to initialize the wrapper for. * @param in_physical_device Raw Vulkan physical device handle to encapsulate. */ - explicit PhysicalDevice(std::weak_ptr in_instance_ptr, - uint32_t in_index, - VkPhysicalDevice in_physical_device) - :m_16bit_storage_features_available(false), - m_destroyed (false), - m_device_LUID_available (false), - m_device_UUID_available (false), - m_index (in_index), - m_instance_ptr (in_instance_ptr), - m_physical_device (in_physical_device) + explicit PhysicalDevice(Anvil::Instance* in_instance_ptr, + uint32_t in_index, + VkPhysicalDevice in_physical_device) + :m_device_group_device_index(0), + m_device_group_index (0), + m_index (in_index), + m_instance_ptr (in_instance_ptr), + m_physical_device (in_physical_device) { anvil_assert(in_physical_device != VK_NULL_HANDLE); } @@ -302,39 +237,62 @@ namespace Anvil PhysicalDevice (const PhysicalDevice&); PhysicalDevice& operator=(const PhysicalDevice&); - void destroy (); - void init (); - void register_device (std::shared_ptr in_device_ptr); - void unregister_device(std::shared_ptr in_device_ptr); + bool init (); + void set_device_group_device_index(uint32_t in_new_device_group_device_index); + void set_device_group_index (uint32_t in_new_device_group_index); + + bool supports_core_vk1_1(const uint32_t& in_api_version) const; /* Private variables */ - bool m_destroyed; - FormatProperties m_dummy; - - StorageFeatures16Bit m_16bit_storage_features; - bool m_16bit_storage_features_available; - Anvil::Extensions m_extensions; - uint32_t m_index; - std::shared_ptr m_instance_ptr; - VkPhysicalDeviceFeatures m_features; - FormatPropertiesMap m_format_properties; - Anvil::Layers m_layers; - MemoryProperties m_memory_properties; - VkPhysicalDevice m_physical_device; - QueueFamilyInfoItems m_queue_families; - VkPhysicalDeviceProperties m_properties; - - bool m_device_LUID_available; - uint8_t m_device_LUID[VK_LUID_SIZE_KHR]; - bool m_device_UUID_available; - uint8_t m_device_UUID[VK_UUID_SIZE]; - uint8_t m_driver_UUID[VK_UUID_SIZE]; - bool m_driver_UUID_available; - - std::vector > m_cached_devices; + uint32_t m_device_group_device_index; + uint32_t m_device_group_index; + std::unique_ptr > m_extension_info_ptr; + uint32_t m_index; + Anvil::Instance* m_instance_ptr; + Anvil::PhysicalDeviceFeatures m_features; + Anvil::Layers m_layers; + MemoryProperties m_memory_properties; + VkPhysicalDevice m_physical_device; + QueueFamilyInfoItems m_queue_families; + Anvil::PhysicalDeviceProperties m_properties; + + std::unique_ptr m_amd_shader_core_properties_ptr; + std::unique_ptr m_core_features_vk10_ptr; + std::unique_ptr m_core_features_vk11_ptr; + std::unique_ptr m_core_properties_vk10_ptr; + std::unique_ptr m_core_properties_vk11_ptr; + std::unique_ptr m_ext_conservative_rasterization_properties_ptr; + std::unique_ptr m_ext_depth_clip_enable_features_ptr; + std::unique_ptr m_ext_descriptor_indexing_features_ptr; + std::unique_ptr m_ext_descriptor_indexing_properties_ptr; + std::unique_ptr m_ext_external_memory_host_properties_ptr; + std::unique_ptr m_ext_inline_uniform_block_features_ptr; + std::unique_ptr m_ext_inline_uniform_block_properties_ptr; + std::unique_ptr m_ext_pci_bus_info_ptr; + std::unique_ptr m_ext_sample_locations_properties_ptr; + std::unique_ptr m_ext_sampler_filter_minmax_properties_ptr; + std::unique_ptr m_ext_scalar_block_layout_features_ptr; + std::unique_ptr m_ext_transform_feedback_features_ptr; + std::unique_ptr m_ext_transform_feedback_properties_ptr; + std::unique_ptr m_ext_vertex_attribute_divisor_properties_ptr; + std::unique_ptr m_ext_memory_priority_features_ptr; + std::unique_ptr m_khr_16_bit_storage_features_ptr; + std::unique_ptr m_khr_8_bit_storage_features_ptr; + std::unique_ptr m_khr_depth_stencil_resolve_properties_ptr; + std::unique_ptr m_khr_driver_properties_properties_ptr; + std::unique_ptr m_khr_external_memory_capabilities_physical_device_id_properties_ptr; + std::unique_ptr m_khr_float16_int8_features_ptr; + std::unique_ptr m_khr_maintenance2_physical_device_point_clipping_properties_ptr; + std::unique_ptr m_khr_maintenance3_properties_ptr; + std::unique_ptr m_khr_multiview_features_ptr; + std::unique_ptr m_khr_multiview_properties_ptr; + std::unique_ptr m_khr_sampler_ycbcr_conversion_features_ptr; + std::unique_ptr m_khr_shader_atomic_int64_features_ptr; + std::unique_ptr m_khr_shader_float_controls_properties_ptr; + std::unique_ptr m_khr_variable_pointer_features_ptr; + std::unique_ptr m_khr_vulkan_memory_model_features_ptr; friend class Anvil::Instance; - friend class Anvil::SGPUDevice; }; /** Tells whether the specified Anvil PhysicalDevice wrapper encapsulates given Vulkan physical device. @@ -344,9 +302,8 @@ namespace Anvil * * @return true if objects match, false otherwise. **/ - bool operator==(const std::shared_ptr& in_physical_device_ptr, + bool operator==(const std::unique_ptr& in_physical_device_ptr, const VkPhysicalDevice& in_physical_device_vk); - }; /* namespace Anvil */ -#endif /* WRAPPERS_FENCE_H */ \ No newline at end of file +#endif /* WRAPPERS_FENCE_H */ diff --git a/include/wrappers/pipeline_cache.h b/include/wrappers/pipeline_cache.h index 68a4ecf5..e08ec0a6 100644 --- a/include/wrappers/pipeline_cache.h +++ b/include/wrappers/pipeline_cache.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -32,12 +32,14 @@ #define WRAPPERS_PIPELINE_CACHE_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { - class PipelineCache : public DebugMarkerSupportProvider + class PipelineCache : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ @@ -45,13 +47,16 @@ namespace Anvil /** Constructor. * * @param in_device_ptr Vulkan device to initialize the pipeline cache with. + * @param in_mt_safe True if MT-safety should be enforced for functions that operate on the + * underlying Vulkan handle. * @param in_initial_data_size Number of bytes available under @param in_initial_data. Can be 0. * @param in_initial_data Initial data to initialize the new pipeline cache instance with. * May be nullptr if @param in_initial_data_size is 0. **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - size_t in_initial_data_size = 0, - const void* in_initial_data = nullptr); + static Anvil::PipelineCacheUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + size_t in_initial_data_size = 0, + const void* in_initial_data = nullptr); /** Destroys the Vulkan counterpart and unregisters the wrapper instance from the object tracker. */ virtual ~PipelineCache(); @@ -64,10 +69,13 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool get_data(size_t* out_n_data_bytes_ptr, - const void** out_data_ptr); + bool get_data(size_t* out_n_data_bytes_ptr, + void* out_data_ptr); - /** Retrieves raw Vulkan pipeline cache handle */ + /** Retrieves raw Vulkan pipeline cache handle. + * + * NOTE: Clients must guarantee MT-safety when operating directly with the Vulkan handle. + */ const VkPipelineCache& get_pipeline_cache() const { return m_pipeline_cache; @@ -80,23 +88,24 @@ namespace Anvil * * @return true if successful, false otherwise. **/ - bool merge(uint32_t in_n_pipeline_caches, - std::shared_ptr const* in_src_cache_ptrs); + bool merge(uint32_t in_n_pipeline_caches, + const Anvil::PipelineCache* const* in_src_cache_ptrs); private: /* Private functions */ /* Constructor. See create() for specification */ - PipelineCache(std::weak_ptr in_device_ptr, - size_t in_initial_data_size, - const void* in_initial_data); + PipelineCache(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + size_t in_initial_data_size, + const void* in_initial_data); PipelineCache (const PipelineCache&); PipelineCache& operator=(const PipelineCache&); /* Private variables */ - std::weak_ptr m_device_ptr; - VkPipelineCache m_pipeline_cache; + const Anvil::BaseDevice* m_device_ptr; + VkPipelineCache m_pipeline_cache; }; }; /* namespace Anvil */ diff --git a/include/wrappers/pipeline_layout.h b/include/wrappers/pipeline_layout.h index 1a755248..1825a72a 100644 --- a/include/wrappers/pipeline_layout.h +++ b/include/wrappers/pipeline_layout.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -25,101 +25,41 @@ * - encapsulate all state related to a single ipeline layout. * - let ObjectTracker detect leaking pipeline layout wrapper instances. * - * The wrapper is NOT thread-safe. + * The wrapper is thread-safe on an opt-in basis. **/ #ifndef WRAPPERS_PIPELINE_LAYOUT_H #define WRAPPERS_PIPELINE_LAYOUT_H #include "misc/callbacks.h" #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { - /* Forward declarations */ - class DescriptorSetGroup; - typedef std::vector PushConstantRanges; /** Vulkan Pipeline Layout wrapper */ - class PipelineLayout : public DebugMarkerSupportProvider + class PipelineLayout : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ - /** Initializes a new wrapper instance with no descriptor sets or push constant ranges - * defined. - * - * Layouts initialized with this constructor are mutable - dsgs and new PC ranges can - * be attached anytime. - * - * @param in_device_ptr Device the layout is being created for. Must not be nullptr. - */ - static std::shared_ptr create(std::weak_ptr in_device_ptr); - - /** Initializes a new wrapper instance with user-specified descriptor set groups (appended - * one after another, in the user-defined order) defined at creation time. - * - * This constructor can be used to initialize immutable pipeline layouts. If @param in_is_immutable - * is set to true, attach_dsg() and attach_push_constant_range() calls invoked for such object - * will result in a failure. - * - * @param in_device_ptr Device to use. Must not be nullptr. - * @param in_dsg_ptr Descriptor set group to use for the pipeline layout. - * @param in_push_constant_ranges Push constant ranges to define for the pipeline layout. - * @param in_is_immutable true if the wrapper instance should be made immutable; false otherwise. - * - **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - std::shared_ptr in_dsg_ptr, - const PushConstantRanges& in_push_constant_ranges, - bool in_is_immutable); - /** Destructor. Releases all attached descriptor set groups, as well as * the Vulkan pipeline layout object. **/ virtual ~PipelineLayout(); - /** Appends a new push constant range to the list of push constant ranges that will be used - * when baking the layout object. - * - * This function will fail if the instance is defined as immutable. - * - * @param in_layout_id ID of the pipeline layout to perform the operation on. The ID must have - * been returned by a preceding create_layout() call. - * @param in_offset Start offset of the new range. - * @param in_size Size of the new range. - * - * @return true if the function succeeded, false otherwise. - **/ - bool attach_push_constant_range(uint32_t in_offset, - uint32_t in_size, - VkShaderStageFlags in_stages); - - /** Bakes a Vulkan VkPipelineLayout instance from the object. - * - * Bake requests for wrappers not marked as dirty will be ignored. - * - * @return true if successful, false otherwise. - **/ - bool bake(); - - /** Retrieves a descriptor set group, as assigned to the pipeline layout. */ - std::shared_ptr get_attached_dsg() const - { - return m_dsg_ptr; - } - /** Retrieves a vector of push constant ranges, attached to the pipeline layout. */ const PushConstantRanges& get_attached_push_constant_ranges() const { return m_push_constant_ranges; } - /** Returns unique ID assigned to the pipeline layout instance */ - PipelineLayoutID get_id() const + const std::vector* get_ds_create_info_ptrs() const { - return m_id; + return &m_ds_create_info_ptrs; } /** Retrieves a raw Vulkan pipeline layout handle. @@ -128,54 +68,53 @@ namespace Anvil * * @return Requested handle. **/ - VkPipelineLayout get_pipeline_layout() + VkPipelineLayout get_pipeline_layout() const { - if (m_dirty) - { - bake(); - } - return m_layout_vk; } - /** Assigns the specified Descriptor Set Group to the pipeline layout. - * - * This function will fail if the instance is defined as immutable. - * - * This function marks the pipeline layout as dirty, meaning it will be re-baked at - * the next get_() call. - * - * @param in_dsg_ptr Pointer to the DescriptorSetGroup instance to use for the operation. - * This object will be retained. - * - * @return true if the operation was successful, false otherwise. - **/ - bool set_dsg(std::shared_ptr in_dsg_ptr); - private: /* Private functions */ /* Constructor. Please see create() for specification */ - PipelineLayout(std::weak_ptr in_device_ptr); - - /* Constructor. Please see create() for specification */ - PipelineLayout(std::weak_ptr in_device_ptr, - std::shared_ptr in_dsg_ptr, - const PushConstantRanges& in_push_constant_ranges, - bool in_is_immutable); + PipelineLayout(const Anvil::BaseDevice* in_device_ptr, + const PushConstantRanges& in_push_constant_ranges, + bool in_mt_safe); PipelineLayout (const PipelineLayout&); PipelineLayout& operator=(const PipelineLayout&); + /** Bakes a Vulkan VkPipelineLayout instance from the object. + * + * @return true if successful, false otherwise. + **/ + bool bake(const std::vector* in_ds_create_info_items_ptr); + + /** Initializes a new wrapper instance using information extracted from user-specified descriptor set groups (appended + * one after another, in the user-defined order). + * + * This constructor can be used to initialize immutable pipeline layouts. If @param in_is_immutable + * is set to true, attach_dsg() and attach_push_constant_range() calls invoked for such object + * will result in a failure. + * + * @param in_device_ptr Device to use. Must not be nullptr. + * @param in_ds_create_info_items_ptr TODO + * @param in_push_constant_ranges Push constant ranges to define for the pipeline layout. + * + **/ + static PipelineLayoutUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + const std::vector* in_ds_create_info_items_ptr, + const PushConstantRanges& in_push_constant_ranges, + bool in_mt_safe); + /* Private variables */ - std::weak_ptr m_device_ptr; - bool m_is_immutable; - - bool m_dirty; - std::shared_ptr m_dsg_ptr; - Anvil::PipelineLayoutID m_id; - VkPipelineLayout m_layout_vk; - PushConstantRanges m_push_constant_ranges; + const Anvil::BaseDevice* m_device_ptr; + std::vector m_ds_create_info_ptrs; + std::vector m_ds_layout_ptrs; + VkPipelineLayout m_layout_vk; + PushConstantRanges m_push_constant_ranges; + + friend class PipelineLayoutManager; }; }; /* namespace Anvil */ diff --git a/include/wrappers/pipeline_layout_manager.h b/include/wrappers/pipeline_layout_manager.h index eb2f3e2e..ccf8f093 100644 --- a/include/wrappers/pipeline_layout_manager.h +++ b/include/wrappers/pipeline_layout_manager.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -25,16 +25,18 @@ * - caches all pipeline layout wrappers and re-uses already instantiated wrappers, * if user-requested one is already available. * + * Opt-in MT-safety available. **/ #ifndef PIPELINE_LAYOUT_MANAGER_H #define PIPELINE_LAYOUT_MANAGER_H +#include "misc/mt_safety.h" #include "misc/types.h" #include namespace Anvil { - class PipelineLayoutManager + class PipelineLayoutManager : public MTSafetySupportProvider { public: /* Public functions */ @@ -45,68 +47,63 @@ namespace Anvil /** Returns a pipeline layout wrapper matching the specified DSG + push constant range configuration. * If such pipeline layout has never been defined before, it will be created at the call time. * - * If the function returns a Anvil::PipelineLayout instance, it is caller's responsibility to release it - * in order for the object to be correctly deleted when its reference counter drops to zero. - * - * @param in_dsg_ptr A DescriptorSetGroup instance, holding the set of descriptor sets which the - * layout should describe. - * @param in_push_constant_ranges A vector of PushConstantRange descriptor, describing the push constant ranges - * the layout should define. - * @param out_pipeline_layout_ptr Deref will be set to a ptr to the pipeline layout wrapper instance, matching the described - * requirements. Must not be nullptr. + * @param in_ds_create_info_items_ptr TODO. + * @param in_push_constant_ranges A vector of PushConstantRange descriptor, describing the push constant ranges + * the layout should define. + * @param out_pipeline_layout_ptr_ptr Deref will be set to a ptr to the pipeline layout wrapper instance, matching the described + * requirements. Must not be nullptr. * * @return true if successful, false otherwise. **/ - bool get_layout(std::shared_ptr in_dsg_ptr, - const PushConstantRanges& in_push_constant_ranges, - std::shared_ptr* out_pipeline_layout_ptr); - - /** Retrieves a PipelineLayout instance, assigned to the specific pipeline layout ID */ - std::shared_ptr get_layout_by_id(Anvil::PipelineLayoutID in_id) const; + bool get_layout(const std::vector* in_ds_create_info_items_ptr, + const PushConstantRanges& in_push_constant_ranges, + Anvil::PipelineLayoutUniquePtr* out_pipeline_layout_ptr_ptr); protected: /* Protected functions */ - /** Marks specified pipeline layout ID as used. - * - * NOTE: This function should only be used by PipelineLayout. - **/ - PipelineLayoutID reserve_pipeline_layout_id(); - private: /* Private type declarations */ + typedef struct PipelineLayoutContainer + { + std::atomic n_references; + PipelineLayoutUniquePtr pipeline_layout_ptr; + + PipelineLayoutContainer() + :n_references(1) + { + /* Stub */ + } + } PipelineLayoutContainer; - /* NOTE: We do NOT own pipeline layouts. As soon as all wrapper instances are out of scope, - * we drop the ID. - */ - typedef std::map > PipelineLayouts; + typedef std::vector > PipelineLayouts; /* Private functions */ - PipelineLayoutManager(std::weak_ptr in_device_ptr); + PipelineLayoutManager(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe); PipelineLayoutManager (const PipelineLayoutManager&); PipelineLayoutManager& operator=(const PipelineLayoutManager&); + void on_pipeline_layout_dereferenced(Anvil::PipelineLayout* in_layout_ptr); + /** Instantiates a new PipelineLayoutManager instance. * * NOTE: This function should only be used by Device. * * @param in_device_ptr Device to initialize the manager for. + * @param in_mt_safe Set to true if the instance should provide multi-threaded access safety. * * @return PipelineLayoutManager instance, or nullptr if the function failed. **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr); - - void on_pipeline_layout_dropped(); - void update_subscriptions (bool in_should_init); + static Anvil::PipelineLayoutManagerUniquePtr create(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe); /* Private members */ - std::weak_ptr m_device_ptr; - PipelineLayouts m_pipeline_layouts; - Anvil::PipelineLayoutID m_pipeline_layouts_created; + const Anvil::BaseDevice* m_device_ptr; + PipelineLayouts m_pipeline_layouts; friend class BaseDevice; - friend class PipelineLayout; }; }; /* Vulkan namespace */ diff --git a/include/wrappers/query_pool.h b/include/wrappers/query_pool.h index 35c9ab4c..2c4ec7d1 100644 --- a/include/wrappers/query_pool.h +++ b/include/wrappers/query_pool.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -24,6 +24,7 @@ #define WRAPPERS_QUERY_POOL_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/ref_counter.h" #include "misc/pools.h" #include "misc/types.h" @@ -32,7 +33,8 @@ namespace Anvil { /* Implements a query pool wrapper. */ - class QueryPool : public DebugMarkerSupportProvider + class QueryPool : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ @@ -50,9 +52,10 @@ namespace Anvil * for this query pool. * **/ - static std::shared_ptr create_non_ps_query_pool(std::weak_ptr in_device_ptr, - VkQueryType in_query_type, - uint32_t in_n_max_concurrent_queries); + static Anvil::QueryPoolUniquePtr create_non_ps_query_pool(const Anvil::BaseDevice* in_device_ptr, + VkQueryType in_query_type, + uint32_t in_n_max_concurrent_queries, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); /** Creates a new pipeline statistics query pool. * @@ -64,9 +67,10 @@ namespace Anvil * @param in_n_max_concurrent_queries Number of queries to preallocate in the pool. * **/ - static std::shared_ptr create_ps_query_pool(std::weak_ptr in_device_ptr, - VkQueryPipelineStatisticFlags in_pipeline_statistics, - uint32_t in_n_max_concurrent_queries); + static Anvil::QueryPoolUniquePtr create_ps_query_pool(const Anvil::BaseDevice* in_device_ptr, + Anvil::QueryPipelineStatisticFlags in_pipeline_statistics, + uint32_t in_n_max_concurrent_queries, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); /** Destructor */ virtual ~QueryPool(); @@ -83,36 +87,81 @@ namespace Anvil return m_query_pool_vk; } + /* Uses vkGetQueryPoolResults() to retrieve result values for the user-specified query range. + * + * NOTE: It is assumed result values are to be returned to a tightly-packed array of size + * @param in_n_queries * sizeof(query result type). if @param in_query_props includes QUERY_RESULT_WITH_AVAILABILITY_BIT, + * twice the amount of memory is needed for result storage. + * + * NOTE: It is caller's responsibility to follow the requirements listed in the spec which guarantee + * the results returned by this entrypoint are correct. + * + **/ + bool get_query_pool_results(const uint32_t& in_first_query_index, + const uint32_t& in_n_queries, + const Anvil::QueryResultFlags& in_query_props, + uint32_t* out_results_ptr, + bool* out_all_query_results_retrieved_ptr) + { + return get_query_pool_results_internal(in_first_query_index, + in_n_queries, + in_query_props, + false, /* should_return_uint64 */ + out_results_ptr, + out_all_query_results_retrieved_ptr); + } + + bool get_query_pool_results(const uint32_t& in_first_query_index, + const uint32_t& in_n_queries, + const Anvil::QueryResultFlags& in_query_props, + uint64_t* out_results_ptr, + bool* out_all_query_results_retrieved_ptr) + { + return get_query_pool_results_internal(in_first_query_index, + in_n_queries, + in_query_props, + true, /* should_return_uint64 */ + out_results_ptr, + out_all_query_results_retrieved_ptr); + } private: /* Constructor. Please see corresponding create() for specification */ - explicit QueryPool(std::weak_ptr in_device_ptr, - VkQueryType in_query_type, - uint32_t in_n_max_concurrent_queries); + explicit QueryPool(const Anvil::BaseDevice* in_device_ptr, + VkQueryType in_query_type, + uint32_t in_n_max_concurrent_queries, + bool in_mt_safe); /* Constructor. Please see corresponding create() for specification */ - explicit QueryPool(std::weak_ptr in_device_ptr, - VkQueryType in_query_type, - VkFlags in_query_flags, - uint32_t in_n_max_concurrent_queries); + explicit QueryPool(const Anvil::BaseDevice* in_device_ptr, + VkQueryType in_query_type, + Anvil::QueryPipelineStatisticFlags in_pipeline_statistics, + uint32_t in_n_max_concurrent_queries, + bool in_mt_safe); + + bool get_query_pool_results_internal(const uint32_t& in_first_query_index, + const uint32_t& in_n_queries, + const Anvil::QueryResultFlags& in_query_props, + const bool& in_should_return_uint64, + void* out_results_ptr, + bool* out_all_query_results_retrieved_ptr); /** Initializes the Vulkan counterpart. * - * @param in_device_ptr Device to create the query pool for. Must not be nullptr. * @param in_query_type Type of the query to instantiate the pool for. * @param in_flags Query flags to instantiate the pool with. * @param in_n_max_concurrent_queries Maximum number of queries which can be used concurrently with * the query pool. **/ - void init(std::weak_ptr in_device_ptr, - VkQueryType in_query_type, - VkFlags in_flags, - uint32_t in_n_max_concurrent_queries); + void init(VkQueryType in_query_type, + Anvil::QueryPipelineStatisticFlags in_pipeline_statistics, + uint32_t in_n_max_concurrent_queries); /* Private variables */ - std::weak_ptr m_device_ptr; - uint32_t m_n_max_indices; - VkQueryPool m_query_pool_vk; + const Anvil::BaseDevice* m_device_ptr; + uint32_t m_n_max_indices; + VkQueryPool m_query_pool_vk; + const VkQueryType m_query_type; }; }; /* namespace Anvil */ diff --git a/include/wrappers/queue.h b/include/wrappers/queue.h index 4e1d05dc..a1b072ec 100644 --- a/include/wrappers/queue.h +++ b/include/wrappers/queue.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -33,15 +33,55 @@ #include "misc/callbacks.h" #include "misc/debug.h" #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { + /** TODO */ + typedef struct LocalModePresentationItem + { + /* Tells which physical device's swapchain image at index @param in_swapchain_image_index should be presented. */ + const Anvil::PhysicalDevice* physical_device_ptr; + + uint32_t swapchain_image_index; + Anvil::Swapchain* swapchain_ptr; + + LocalModePresentationItem() + { + swapchain_image_index = ~0u; + swapchain_ptr = nullptr; + } + } LocalModePresentationItem; + + /** TODO */ + typedef struct SumModePresentationItem + { + uint32_t n_physical_devices; + const Anvil::PhysicalDevice* const* physical_devices_ptr; + uint32_t swapchain_image_index; + Anvil::Swapchain* swapchain_ptr; + + SumModePresentationItem() + { + n_physical_devices = ~0u; + physical_devices_ptr = nullptr; + swapchain_image_index = ~0u; + swapchain_ptr = nullptr; + } + } SumModePresentationItem; + + /** TODO */ + typedef SumModePresentationItem LocalMultiDeviceModePresentationItem; + + /** TODO */ + typedef LocalModePresentationItem RemoteModePresentationItem; + typedef enum { /* Notification fired right after vkQueuePresentKHR() has been issued for a swapchain. * - * callback_arg: Pointer to PresentRequestIssuedCallbackArgument instance. + * callback_arg: Pointer to OnPresentRequestIssuedCallbackArgument instance. */ QUEUE_CALLBACK_ID_PRESENT_REQUEST_ISSUED, @@ -49,36 +89,60 @@ namespace Anvil } QueueCallbacKID; class Queue : public CallbacksSupportProvider, - public DebugMarkerSupportProvider + public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ /** Initializes a new Vulkan queue instance. * - * After construction, set_swapchain() should be called individually - * to assign a swapchain to the queue. This is necessary in order for - * present() calls to work correctly. + * NOTE: This function must only be used by Anvil::*Device! * * @param in_device_ptr Device to retrieve the queue from. * @param in_queue_family_index Index of the queue family to retrieve the queue from. * @param in_queue_index Index of the queue to retrieve. + * @param in_mt_safe True if queue submissions should be protected by a mutex, guaranteeing + * no more than one thread at a time will ever submit a cmd buffer to the + * same cmd queue. + * @param in_global_priority Global priority of the new queue. Setting this value to anything else than Anvil::QueueGlobalPriority::MEDIUM_EXT + * requires VK_EXT_queue_global_priority support. **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - uint32_t in_queue_family_index, - uint32_t in_queue_index); + static std::unique_ptr create(const Anvil::BaseDevice* in_device_ptr, + uint32_t in_queue_family_index, + uint32_t in_queue_index, + bool in_mt_safe, + const Anvil::QueueGlobalPriority& in_global_priority = Anvil::QueueGlobalPriority::MEDIUM_EXT); /** Destructor */ virtual ~Queue(); + /** Starts a queue debug label region. App must call end_debug_utils_label() for the same queue instance + * at some point to declare the end of the label region. + * + * Requires VK_EXT_debug_utils support. Otherwise, the call is moot. + * + * @param in_label_name_ptr Meaning as per VkDebugUtilsLabelEXT::pLabelName. Must not be nullptr. + * @param in_color_vec4_ptr Meaning as per VkDebugUtilsLabelEXT::color. Must not be nullptr. + */ + void begin_debug_utils_label(const char* in_label_name_ptr, + const float* in_color_vec4_ptr); + /** Updates sparse resource memory bindings using this queue. * * @param in_update Detailed information about the update to be carried out. **/ - bool bind_sparse_memory(Anvil::Utils::SparseMemoryBindingUpdateInfo& in_update); + bool bind_sparse_memory(Anvil::SparseMemoryBindingUpdateInfo& in_update); + + /** Ends a queue debug label region. Requires a preceding begin_debug_utils_label() call. + * + * Requires VK_EXT_debug_utils support. Otherwise, the call is moot. + * + */ + void end_debug_utils_label(); /** Retrieves parent device instance */ - std::weak_ptr get_parent_device() const + const Anvil::BaseDevice* get_parent_device() const { return m_device_ptr; } @@ -95,171 +159,131 @@ namespace Anvil return m_queue_family_index; } + /** Retrieves global priority used to create the queue. + * + * Only meaningful if VK_EXT_queue_global_priority if supported. + */ + const Anvil::QueueGlobalPriority& get_queue_global_priority() const + { + return m_queue_global_priority; + } + /** Retrieves index of the queue */ uint32_t get_queue_index() const { return m_queue_index; } + /** Inserts a single queue debug label. + * + * Requires VK_EXT_debug_utils support. Otherwise, the call is moot. + * + * @param in_label_name_ptr Meaning as per VkDebugUtilsLabelEXT::pLabelName. Must not be nullptr. + * @param in_color_vec4_ptr Meaning as per VkDebugUtilsLabelEXT::color. Must not be nullptr. + */ + void insert_debug_utils_label(const char* in_label_name_ptr, + const float* in_color_vec4_ptr); + /** Presents the specified swapchain image using this queue. This queue must support presentation * for the swapchain's rendering surface in order for this call to succeed. * * This function will only succeed if supports_presentation() returns true. + * This function will only succeed if used for a single-GPU device instance. * * NOTE: If you are presenting to an off-screen window, make sure to transition - * the image to VK_IMAGE_LAYOUT_GENERAL, instead of VK_IMAGE_LAYOUT_PRESENT_SRC_KHR. + * the image to Anvil::ImageLayout::GENERAL, instead of Anvil::ImageLayout::PRESENT_SRC_KHR. * In off-screen rendering mode, swapchain images are actually regular images, so * presentable layout is not supported. * - * @param in_swapchain_ptr Swapchain to use for the operation. Must not be NULL. - * @param in_swapchain_image_index Index of the swapchain image to present. - * @param in_n_wait_semaphores Number of semaphores defined under @param in_wait_semaphore_ptrs. These sems will - * be waited on before presentation occurs. - * @param in_wait_semaphore_pts An array of @param in_n_wait_semaphores semaphore wrapper instances. May be nullptr - * if @param in_n_wait_semaphores is 0. + * @param in_swapchain_ptr Swapchain to use for the operation. Must not be NULL. + * @param in_swapchain_image_index Index of the swapchain image to present. + * @param in_n_wait_semaphores Number of semaphores defined under @param in_wait_semaphore_ptrs. These sems will + * be waited on before presentation occurs. + * @param in_wait_semaphore_ptrs_ptr Ptr to an array of @param in_n_wait_semaphores semaphore wrapper instances. May be nullptr + * if @param in_n_wait_semaphores is 0. * * @return Vulkan result for the operation. **/ - VkResult present(std::shared_ptr in_swapchain_ptr, - uint32_t in_swapchain_image_index, - uint32_t in_n_wait_semaphores, - std::shared_ptr* in_wait_semaphore_ptrs); - - /** Waits on the semaphores specified by the user, executes user-defined command buffers, - * and then signals the semaphores passed by arguments. If a non-nullptr fence is specified, - * the function will also wait on the call to finish GPU-side before returning. - * - * It is valid to specify 0 command buffers or signal/wait semaphores, in which case - * these steps will be skipped. + bool present(Anvil::Swapchain* in_swapchain_ptr, + uint32_t in_swapchain_image_index, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs_ptr, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr); + + /** See present() documentation for general information about this function. * - * If @param in_should_block is true and @param in_opt_fence_ptr is nullptr, the function will create - * a new fence, wait on it, and then release it prior to leaving. This may come at a performance cost. + * This function can be called for both single-GPU and multi-GPU devices. * - * @param in_n_command_buffers Number of command buffers under @param opt_cmd_buffer_ptrs - * which should be executed. May be 0. - * @param in_opt_cmd_buffer_ptrs Array of command buffers to execute. Can be nullptr if - * @param n_command_buffers is 0. - * @param in_n_semaphores_to_signal Number of semaphores to signal after command buffers finish - * executing. May be 0. - * @param in_opt_semaphore_to_signal_ptr_ptrs Array of semaphores to signal after execution. May be nullptr if - * @param n_semaphores_to_signal is 0. - * @param in_n_semaphores_to_wait_on Number of semaphores to wait on before executing command buffers. - * May be 0. - * @param in_opt_semaphore_to_wait_on_ptr_ptrs Array of semaphores to wait on prior to execution. May be nullptr - * if @param n_semaphores_to_wait_on is 0. - * @param in_opt_dst_stage_masks_to_wait_on_ptrs Array of size @param n_semaphores_to_wait_on, specifying stages - * at which the wait ops should be performed. May be nullptr if - * @param n_semaphores_to_wait_on is 0. - * @param in_should_block true if the function should wait for the scheduled commands to - * finish executing, false otherwise. - * @param in_opt_fence_ptr Fence to use when submitting the comamnd buffers to the queue. - * The fence will be waited on if @param should_block is true. - * If @param should_block is false, the fence will be passed at - * submit call time, but will not be waited on. - **/ - void submit_command_buffers(uint32_t in_n_command_buffers, - std::shared_ptr const* in_opt_cmd_buffer_ptrs, - uint32_t in_n_semaphores_to_signal, - std::shared_ptr const* in_opt_semaphore_to_signal_ptr_ptrs, - uint32_t in_n_semaphores_to_wait_on, - std::shared_ptr const* in_opt_semaphore_to_wait_on_ptr_ptrs, - const VkPipelineStageFlags* in_opt_dst_stage_masks_to_wait_on_ptrs, - bool in_should_block, - std::shared_ptr in_opt_fence_ptr = std::shared_ptr() ); - - /** Submits specified command buffer to the queue. Can optionally wait until the execution finishes. + * @param in_n_local_mode_presentation_items TODO + * @param in_local_mode_presentation_items TODO + * @param in_n_wait_semaphores TODO + * @param in_wait_semaphore_ptrs_ptr TODO * - * For argument discussion, please see submit_command_buffers() documentation + * @return TODO **/ - void submit_command_buffer(std::shared_ptr in_cmd_buffer_ptr, - bool in_should_block, - std::shared_ptr in_opt_fence_ptr = std::shared_ptr() ) - { - submit_command_buffers(1, /* n_command_buffers */ - &in_cmd_buffer_ptr, - 0, /* n_semaphores_to_signal */ - nullptr, /* semaphore_to_signal_ptrs */ - 0, /* n_semaphoires_to_wait_on */ - nullptr, /* semaphore_to_wait_on_ptrs */ - nullptr, - in_should_block, - in_opt_fence_ptr); - } + bool present_in_local_presentation_mode(uint32_t in_n_local_mode_presentation_items, + const LocalModePresentationItem* in_local_mode_presentation_items, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs_ptr, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr); - /** Submits specified command buffer to the queue. After the command buffer finishes executing - * GPU-side, user-specified semaphores will be signalled. + /** See present() documentation for general information about this function. * - * Can optionally wait until the execution finishes. + * This function can be called for both single-GPU and multi-GPU devices. * - * For argument discussion, please see submit_command_buffers() documentation + * @param in_n_local_multi_device_mode_presentation_items TODO + * @param in_local_multi_device_mode_presentation_items TODO + * @param in_n_wait_semaphores TODO + * @param in_wait_semaphore_ptrs_ptr TODO + * + * @return TODO **/ - void submit_command_buffer_with_signal_semaphores(std::shared_ptr in_opt_cmd_buffer_ptr, - uint32_t in_n_semaphores_to_signal, - std::shared_ptr const* in_semaphore_to_signal_ptr_ptrs, - bool in_should_block, - std::shared_ptr in_opt_fence_ptr = std::shared_ptr() ) - { - submit_command_buffers((in_opt_cmd_buffer_ptr != nullptr) ? 1u : 0u, - &in_opt_cmd_buffer_ptr, - in_n_semaphores_to_signal, - in_semaphore_to_signal_ptr_ptrs, - 0, /* n_semaphores_to_wait_on */ - nullptr, /* semaphore_to_wait_on_ptr_ptrs */ - nullptr, /* opt_dst_stage_masks_to_wait_on_ptrs */ - in_should_block, - in_opt_fence_ptr); - } + bool present_in_local_multi_device_presentation_mode(uint32_t in_n_local_multi_device_mode_presentation_items, + const LocalMultiDeviceModePresentationItem* in_local_multi_device_mode_presentation_items, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs_ptr, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr); - /** Waits on the user-specified semaphores and submits specified command buffer to the queue. - * After the command buffer finishes executing GPU-side, user-specified semaphores will be - * signalled. + /** See present() documentation for general information about this function. * - * Can optionally wait until the execution finishes. + * This function can be called for multi-GPU devices. * - * For argument discussion, please see submit_command_buffers() documentation + * @param in_n_remote_mode_presentation_items TODO + * @param in_remote_mode_presentation_items TODO + * @param in_n_wait_semaphores TODO + * @param in_wait_semaphore_ptrs_ptr TODO + * + * @return TODO **/ - void submit_command_buffer_with_signal_wait_semaphores(std::shared_ptr in_opt_cmd_buffer_ptr, - uint32_t in_n_semaphores_to_signal, - std::shared_ptr const* in_semaphore_to_signal_ptr_ptrs, - uint32_t in_n_semaphores_to_wait_on, - std::shared_ptr const* in_semaphore_to_wait_on_ptr_ptrs, - const VkPipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, - bool in_should_block, - std::shared_ptr in_opt_fence_ptr = std::shared_ptr() ) - { - submit_command_buffers((in_opt_cmd_buffer_ptr != nullptr) ? 1u : 0u, - &in_opt_cmd_buffer_ptr, - in_n_semaphores_to_signal, - in_semaphore_to_signal_ptr_ptrs, - in_n_semaphores_to_wait_on, - in_semaphore_to_wait_on_ptr_ptrs, - in_dst_stage_masks_to_wait_on_ptrs, - in_should_block, - in_opt_fence_ptr); - } + bool present_in_remote_presentation_mode(uint32_t in_n_remote_mode_presentation_items, + const RemoteModePresentationItem* in_remote_mode_presentation_items, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs_ptr, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr); - /** Waits on the user-specified semaphores and submits specified command buffer to the queue. + /** See present() documentation for general information about this function. * - * Can optionally wait until the execution finishes. + * This function can be called for multi-GPU devices. * - * For argument discussion, please see submit_command_buffers() documentation + * @param in_n_sum_mode_presentation_items TODO + * @param in_sum_mode_presentation_items TODO + * @param in_n_wait_semaphores TODO + * @param in_wait_semaphore_ptrs_ptr TODO + * + * @return TODO **/ - void submit_command_buffer_with_wait_semaphores(std::shared_ptr in_cmd_buffer_ptr, - uint32_t in_n_semaphores_to_wait_on, - std::shared_ptr const* in_semaphore_to_wait_on_ptr_ptrs, - const VkPipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, - bool in_should_block, - std::shared_ptr in_opt_fence_ptr = std::shared_ptr() ) + bool present_in_sum_presentation_mode(uint32_t in_n_sum_mode_presentation_items, + const SumModePresentationItem* in_sum_mode_presentation_items, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs_ptr, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr); + + VkResult submit(const SubmitInfo &in_submit_info); + + /** Tells whether the queue supports protected memory operations */ + bool supports_protected_memory_operations() const { - submit_command_buffers((in_cmd_buffer_ptr != nullptr) ? 1u : 0u, - &in_cmd_buffer_ptr, - 0u, /* n_semaphores_to_signal */ - nullptr, /* semaphore_to_signal_ptr_ptrs */ - in_n_semaphores_to_wait_on, - in_semaphore_to_wait_on_ptr_ptrs, - in_dst_stage_masks_to_wait_on_ptrs, - in_should_block, - in_opt_fence_ptr); + return m_supports_protected_memory_operations; } /** Tells whether the queue supports sparse bindings */ @@ -268,23 +292,62 @@ namespace Anvil return m_supports_sparse_bindings; } + void wait_idle(); + private: /* Private functions */ + bool present_internal (Anvil::DeviceGroupPresentModeFlagBits in_presentation_mode, + uint32_t in_n_swapchains, + Anvil::Swapchain* const* in_swapchains, + const uint32_t* in_swapchain_image_indices, + const uint32_t* in_device_masks, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr); + void present_lock_unlock(uint32_t in_n_swapchains, + const Anvil::Swapchain* const* in_swapchains, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs, + bool in_should_lock); + + void bind_sparse_memory_lock_unlock (Anvil::SparseMemoryBindingUpdateInfo& in_update, + bool in_should_lock); + void submit_command_buffers_lock_unlock(uint32_t in_n_command_buffers, + Anvil::CommandBufferBase* const* in_opt_cmd_buffer_ptrs_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_opt_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_opt_semaphore_to_wait_on_ptrs_ptr, + Anvil::Fence* in_opt_fence_ptr, + bool in_should_lock); + void submit_command_buffers_lock_unlock(uint32_t in_n_command_buffer_submissions, + const CommandBufferMGPUSubmission* in_opt_command_buffer_submissions_ptr, + uint32_t in_n_signal_semaphore_submissions, + const SemaphoreMGPUSubmission* in_opt_signal_semaphore_submissions_ptr, + uint32_t in_n_wait_semaphore_submissions, + const SemaphoreMGPUSubmission* in_opt_wait_semaphore_submissions_ptr, + Anvil::Fence* in_opt_fence_ptr, + bool in_should_lock); /* Constructor. Please see create() for specification */ - Queue(std::weak_ptr in_device_ptr, - uint32_t in_queue_family_index, - uint32_t in_queue_index); + Queue(const Anvil::BaseDevice* in_device_ptr, + uint32_t in_queue_family_index, + uint32_t in_queue_index, + bool in_mt_safe, + const Anvil::QueueGlobalPriority& in_global_priority); Queue (const Queue&); Queue operator=(const Queue&); /* Private variables */ - std::weak_ptr m_device_ptr; + const Anvil::BaseDevice* m_device_ptr; + uint32_t m_n_debug_label_regions_started; VkQueue m_queue; - uint32_t m_queue_family_index; - uint32_t m_queue_index; - std::shared_ptr m_submit_fence_ptr; + const uint32_t m_queue_family_index; + const Anvil::QueueGlobalPriority m_queue_global_priority; + const uint32_t m_queue_index; + Anvil::FenceUniquePtr m_submit_fence_ptr; + bool m_supports_protected_memory_operations; bool m_supports_sparse_bindings; }; }; /* namespace Anvil */ diff --git a/include/wrappers/render_pass.h b/include/wrappers/render_pass.h index 13be594c..2c16722d 100644 --- a/include/wrappers/render_pass.h +++ b/include/wrappers/render_pass.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,38 +20,17 @@ // THE SOFTWARE. // -/** Defines a render pass wrapper class which simplifies the following processes: - * - * - Attachment configuration & management - * - Life-time management - * - Render pass initialization and tear-down. - * - Subpass configuration & management - * - Support for adding new renderpass/subpass attachments or subpasses with automatic Vulkan FB object re-creation. - **/ #ifndef WRAPPERS_RENDER_PASS_H #define WRAPPERS_RENDER_PASS_H #include "misc/callbacks.h" #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { - enum RenderPassCallbackID - { - /* Call-back issued whenever the originating renderpass becomes dirty - * - * callback_arg: Pointer to OnRenderPassBakeNeededCallbackArgument instance. - */ - RENDER_PASS_CALLBACK_ID_BAKING_NEEDED, - - /* Always last */ - RENDER_PASS_CALLBACK_ID_COUNT - }; - - class RenderPass : public CallbacksSupportProvider, - public DebugMarkerSupportProvider, - public std::enable_shared_from_this + class RenderPass : public DebugMarkerSupportProvider { public: /* Public functions */ @@ -66,13 +45,13 @@ namespace Anvil * throw a NPE if it ever needs to deduce the window size. * * - * @param in_device_ptr Device which will consume the renderpass. Must not be nullptr. - * @param in_opt_swapchain_ptr Swapchain, that the render-pass will render to. See brief for more information. - * May be nullptr. + * @param in_renderpass_create_info_ptr TODO + * @param in_opt_swapchain_ptr Swapchain, that the render-pass will render to. See brief for more information. + * May be nullptr. * **/ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - std::shared_ptr in_opt_swapchain_ptr); + static Anvil::RenderPassUniquePtr create(Anvil::RenderPassCreateInfoUniquePtr in_renderpass_create_info_ptr, + Anvil::Swapchain* in_opt_swapchain_ptr); /** Destructor. * @@ -80,812 +59,44 @@ namespace Anvil **/ virtual ~RenderPass(); - /** Adds a new render-pass color attachment to the internal data model. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_format Image format to use for the color attachment. - * @param in_sample_count Number of samples to use for the color attachment (expressed as enum value). - * @param in_load_op Load operation to use for the color attachment. - * @param in_store_op Store operation to use for the color attachment. - * @param in_initial_layout Initial layout to use for the color attachment. - * @param in_final_layout Final layout to use for the color attachment. - * @param in_may_alias true if the memory backing of the image that will be attached may - * overlap with the backing of another attachment within the same - * subpass. - * @param out_attachment_id_ptr Deref will be set to a unique ID assigned to the new color attachment. - * Must not be nullptr. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_color_attachment(VkFormat in_format, - VkSampleCountFlags in_sample_count, - VkAttachmentLoadOp in_load_op, - VkAttachmentStoreOp in_store_op, - VkImageLayout in_initial_layout, - VkImageLayout in_final_layout, - bool in_may_alias, - RenderPassAttachmentID* out_attachment_id_ptr); - - /** Adds a new render-pass depth/stencil attachment to the internal data model. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_format Image format to use for the color attachment. - * @param in_sample_count Number of samples to use for the depth-stencil attachment (expressed as enum value). - * @param in_depth_load_op Load operation to use for the depth data. - * @param in_depth_store_op Store operation to use for the depth data. - * @param in_stencil_load_op Load operation to use for the stencil data. - * @param in_stencil_store_op Store operation to use for the stencil data. - * @param in_initial_layout Initial layout to use for the color attachment. - * @param in_final_layout Final layout to use for the color attachment. - * @param in_may_alias true if the memory backing of the image that will be attached may - * overlap with the backing of another attachment within the same - * subpass. - * @param out_attachment_id_ptr Deref will be set to a unique ID assigned to the new color attachment. - * Must not be nullptr. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_depth_stencil_attachment(VkFormat in_format, - VkSampleCountFlags in_sample_count, - VkAttachmentLoadOp in_depth_load_op, - VkAttachmentStoreOp in_depth_store_op, - VkAttachmentLoadOp in_stencil_load_op, - VkAttachmentStoreOp in_stencil_store_op, - VkImageLayout in_initial_layout, - VkImageLayout in_final_layout, - bool in_may_alias, - RenderPassAttachmentID* out_attachment_id_ptr); - - /** Adds a new subpass to the internal data model. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_fragment_shader_entrypoint Shader module stage entrypoint descriptor for the fragment stage. - * Please pass an instance created by a dummy constructor if the stage - * is irrelevant. - * @param in_geometry_shader_entrypoint Shader module stage entrypoint descriptor for the geometry stage. - * Please pass an instance created by a dummy constructor if the stage - * is irrelevant. - * @param in_tess_control_shader_entrypoint Shader module stage entrypoint descriptor for the tessellation control stage. - * Please pass an instance created by a dummy constructor if the stage - * is irrelevant. - * @param in_tess_evaluation_shader_entrypoint Shader module stage entrypoint descriptor for the tessellation - * evaluation stage. Please pass an instance created by a dummy constructor - * if the stage is irrelevant. - * @param in_vertex_shader_entrypoint Shader module stage entrypoint descriptor for the vertex stage. - * Must NOT be a dummy instance. - * @param out_subpass_id_ptr Deref will be set to a unique ID of the created subpass. - * Must not be nullptr. - * @param in_opt_pipeline_id (optional) ID of a graphics pipeline to use for the graphics pipeline. - * If not specified, a new pipeline wil be created from scratch. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_subpass(const ShaderModuleStageEntryPoint& in_fragment_shader_entrypoint, - const ShaderModuleStageEntryPoint& in_geometry_shader_entrypoint, - const ShaderModuleStageEntryPoint& in_tess_control_shader_entrypoint, - const ShaderModuleStageEntryPoint& in_tess_evaluation_shader_entrypoint, - const ShaderModuleStageEntryPoint& in_vertex_shader_entrypoint, - SubPassID* out_subpass_id_ptr, - const Anvil::PipelineID in_opt_pipeline_id = UINT32_MAX); - - /** Adds a new color attachment to the RenderPass instance's specified subpass. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_subpass_id ID of the render-pass subpass to update. - * @param in_layout Layout to use for the attachment when executing the subpass. - * Driver takes care of transforming the attachment to the requested layout - * before subpass commands starts executing. - * @param in_attachment_id ID of the render-pass attachment the new sub-pass color attachment - * should refer to. - * @param in_location Location, under which the specified attachment should be accessible to - * the fragment shader. - * @param in_attachment_resolve_id_ptr Must be nullptr if the new color attachment should be single-sample. - * For multi-sample, this argument can optionally point to a render-pass - * attachment descriptor, to which the MS attachment should be resolved to. - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_subpass_color_attachment(SubPassID in_subpass_id, - VkImageLayout in_layout, - RenderPassAttachmentID in_attachment_id, - uint32_t in_location, - const RenderPassAttachmentID* in_opt_attachment_resolve_id_ptr); - - /** Configures the depth+stencil attachment the subpass should use. - * - * Note that only up to one depth/stencil attachment may be added for each subpass. - * Any attempt to add more such attachments will results in an assertion failure. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_subpass_id ID of the subpass to update the depth+stencil attachment for. - * The subpass must have been earlier created with an add_subpass() call. - * @param in_attachment_id ID of the render-pass attachment the depth-stencil attachment should refer to. - * @param in_layout Layout to use for the attachment when executing the subpass. - * Driver takes care of transforming the attachment to the requested layout - * before subpass commands starts executing. - * - * @return true if the function executed successfully, false otherwise. - * - */ - bool add_subpass_depth_stencil_attachment(SubPassID in_subpass_id, - RenderPassAttachmentID in_attachment_id, - VkImageLayout in_layout); - - /** Adds a new input attachment to the RenderPass instance's specified subpass. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_subpass_id ID of the render-pass subpass to update. - * @param in_layout Layout to use for the attachment when executing the subpass. - * Driver takes care of transforming the attachment to the requested layout - * before subpass commands starts executing. - * @param in_attachment_id ID of the render-pass attachment the new sub-pass input attachment - * should refer to. - * @param in_attachment_index The "input attachment index", under which the specified attachment should - * be accessible to the fragment shader. Do not forget the image view also - * needs to be bound to the descriptor set for the input attachment to work! - * - * @return true if the function executed successfully, false otherwise. - **/ - bool add_subpass_input_attachment(SubPassID in_subpass_id, - VkImageLayout in_layout, - RenderPassAttachmentID in_attachment_id, - uint32_t in_attachment_index); - - /** Adds a new external->subpass dependency to the internal data model. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_destination_subpass_id ID of the destination subpass. The subpass must have been - * created earlier with an add_subpass() call. - * @param in_source_stage_mask Source pipeline stage mask. - * @param in_destination_stage_mask Destination pipeline stage mask. - * @param in_source_access_mask Source access mask. - * @param in_destination_access_mask Destination access mask. - * @param in_by_region true if a "by-region" dependency is requested; false otherwise. - * - * @return true if the dependency was added successfully; false otherwise. - * - **/ - bool add_external_to_subpass_dependency(SubPassID in_destination_subpass_id, - VkPipelineStageFlags in_source_stage_mask, - VkPipelineStageFlags in_destination_stage_mask, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region); - - /** Adds a new self-subpass dependency to the internal data model. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_destination_subpass_id ID of the subpass to create the dep for. The subpass must have been - * created earlier with an add_subpass() call. - * @param in_source_stage_mask Source pipeline stage mask. - * @param in_destination_stage_mask Destination pipeline stage mask. - * @param in_source_access_mask Source access mask. - * @param in_destination_access_mask Destination access mask. - * @param in_by_region true if a "by-region" dependency is requested; false otherwise. - * - * @return true if the dependency was added successfully; false otherwise. - * - **/ - bool add_self_subpass_dependency(SubPassID in_destination_subpass_id, - VkPipelineStageFlags in_source_stage_mask, - VkPipelineStageFlags in_destination_stage_mask, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region); - - /** Adds a new subpass->external dependency to the internal data model. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_source_subpass_id ID of the source subpass. The subpass must have been - * created earlier with an add_subpass() call. - * @param in_source_stage_mask Source pipeline stage mask. - * @param in_destination_stage_mask Destination pipeline stage mask. - * @param in_source_access_mask Source access mask. - * @param in_destination_access_mask Destination access mask. - * @param in_by_region true if a "by-region" dependency is requested; false otherwise. - * - * @return true if the dependency was added successfully; false otherwise. - **/ - bool add_subpass_to_external_dependency(SubPassID in_source_subpass_id, - VkPipelineStageFlags in_source_stage_mask, - VkPipelineStageFlags in_destination_stage_mask, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region); - - /** Adds a new subpass->subpass dependency to the internal data model. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_source_subpass_id ID of the source subpass. The subpass must have been - * created earlier with an add_subpass() call. - * @param in_destination_subpass_id ID of the source subpass. The subpass must have been - * created earlier with an add_subpass() call. - * @param in_source_stage_mask Source pipeline stage mask. - * @param in_destination_stage_mask Destination pipeline stage mask. - * @param in_source_access_mask Source access mask. - * @param in_destination_access_mask Destination access mask. - * @param in_by_region true if a "by-region" dependency is requested; false otherwise. - * - * @return true if the dependency was added successfully; false otherwise. - **/ - bool add_subpass_to_subpass_dependency(SubPassID in_source_subpass_id, - SubPassID in_destination_subpass_id, - VkPipelineStageFlags in_source_stage_mask, - VkPipelineStageFlags in_destination_stage_mask, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region); - - /** Re-creates the internal VkRenderPass object. - * - * This function should be considered expensive. - * - * @return true if successful, false otherwise. - **/ - bool bake(); - - /** Tells what type an attachment with user-specified ID has. - * - * @return true if successful, false otherwise - */ - bool get_attachment_type(RenderPassAttachmentID in_attachment_id, - AttachmentType* out_attachment_type_ptr) const; - - /** Retrieves properties of the render pass color attachment with the user-specified ID - * - * @param in_attachment_id ID of the attachment to retrieve properties of. - * @param out_opt_sample_count_ptr If not nullptr, deref will be set to the sample count, specified - * at attachment creation time. May be nullptr. - * @param out_opt_load_op_ptr If not nullptr, deref will be set to the load op, specified at - * attachment creation time. May be nullptr. - * @param out_opt_store_op_ptr If not nullptr, deref will be set to the store op, specified at - * attachment creation time. May be nullptr. - * @param out_opt_initial_layout_ptr If not nullptr, deref will be set to the initial layout, specified - * at attachment creation time. May be nullptr. - * @param out_opt_final_layout_ptr If not nullptr, deref will be set to the final layout, specified at - * attachment creation time. May be nullptr. - * @param out_opt_may_alias_ptr If not nullptr, deref will be set to set to true, if the attachment - * can alias other attachments. Otherwise, it will be set to false. - * May be nullptr. - * - * @return true if successful, false otherwise. - **/ - bool get_color_attachment_properties(RenderPassAttachmentID in_attachment_id, - VkSampleCountFlagBits* out_opt_sample_count_ptr = nullptr, - VkAttachmentLoadOp* out_opt_load_op_ptr = nullptr, - VkAttachmentStoreOp* out_opt_store_op_ptr = nullptr, - VkImageLayout* out_opt_initial_layout_ptr = nullptr, - VkImageLayout* out_opt_final_layout_ptr = nullptr, - bool* out_opt_may_alias_ptr = nullptr) const; - - /** Retrieves properties of a dependency at user-specified index. - * - * @param in_n_dependency Index of the dependency to retrieve properties of. - * @param out_destination_subpass_id_ptr Deref will be set to the ID of the dependency's destination, - * or to UINT32_MAX, if the destination of the dependency is external. - * Must not be null. - * @param out_source_subpass_id_ptr Deref will be set to the ID of the dependency's source, - * or to UINT32_MAX, if the source of the dependency is external. - * Must not be null. - * @param out_destination_stage_mask_ptr Deref will be set to the destination stage mask set for the dependency. - * Must not be null. - * @param out_source_stage_mask_ptr Deref will be set to the source stage mask set for the dependency. - * Must not be null. - * @param out_destination_access_mask_ptr Deref will be set to the destination access mask set for the dependency. - * Must not be null. - * @param out_source_access_mask_ptr Deref will be set to the source access mask set for the dependency. - * Must not be null. - * @param out_by_region_ptr Deref will be set to true, if the dependency is a by-region dependency. - * If it is not, deref will be set to false. Must not be null. - * - * @return true if successful, false otherwise - */ - bool get_dependency_properties(uint32_t in_n_dependency, - SubPassID* out_destination_subpass_id_ptr, - SubPassID* out_source_subpass_id_ptr, - VkPipelineStageFlags* out_destination_stage_mask_ptr, - VkPipelineStageFlags* out_source_stage_mask_ptr, - VkAccessFlags* out_destination_access_mask_ptr, - VkAccessFlags* out_source_access_mask_ptr, - bool* out_by_region_ptr) const; - - /** Retrieves properties of the render pass color attachment with the user-specified ID - * - * @param in_attachment_id ID of the attachment to retrieve properties of. - * @param out_opt_depth_load_op_ptr If not nullptr, deref will be set to the depth-specific load op, specified at - * attachment creation time. May be nullptr. - * @param out_opt_depth_store_op_ptr If not nullptr, deref will be set to the depth-specific store op, specified at - * attachment creation time. May be nullptr. - * @param out_opt_stencil_load_op_ptr If not nullptr, deref will be set to the stencil-specific load op, specified at - * attachment creation time. May be nullptr. - * @param out_opt_stencil_store_op_ptr If not nullptr, deref will be set to the stencil-specific store op, specified at - * attachment creation time. May be nullptr. - * @param out_opt_initial_layout_ptr If not nullptr, deref will be set to the initial layout, specified - * at attachment creation time. May be nullptr. - * @param out_opt_final_layout_ptr If not nullptr, deref will be set to the final layout, specified at - * attachment creation time. May be nullptr. - * @param out_opt_may_alias_ptr If not nullptr, deref will be set to set to true, if the attachment - * can alias other attachments. Otherwise, it will be set to false. - * May be nullptr. - * - * @return true if successful, false otherwise. - **/ - bool get_depth_stencil_attachment_properties(RenderPassAttachmentID in_attachment_id, - VkAttachmentLoadOp* out_opt_depth_load_op_ptr = nullptr, - VkAttachmentStoreOp* out_opt_depth_store_op_ptr = nullptr, - VkAttachmentLoadOp* out_opt_stencil_load_op_ptr = nullptr, - VkAttachmentStoreOp* out_opt_stencil_store_op_ptr = nullptr, - VkImageLayout* out_opt_initial_layout_ptr = nullptr, - VkImageLayout* out_opt_final_layout_ptr = nullptr, - bool* out_opt_may_alias_ptr = nullptr) const; - - /** Returns the number of added attachments */ - uint32_t get_n_attachments() const + + VkRenderPass get_render_pass() const { - return static_cast(m_attachments.size() ); - } + anvil_assert(m_render_pass != VK_NULL_HANDLE); - /** Returns the number of added dependencies */ - uint32_t get_n_dependencies() const - { - return static_cast(m_subpass_dependencies.size() ); + return m_render_pass; } - /** Returns the number of added subpasses */ - uint32_t get_n_subpasses() const + const Anvil::RenderPassCreateInfo* get_render_pass_create_info() const { - return static_cast(m_subpasses.size() ); + return m_render_pass_create_info_ptr.get(); } - /** Bakes the VkRenderPass object if the RenderPass instance is marked as dirty, and returns it. - * - * @return The requested object. - **/ - VkRenderPass get_render_pass(bool allow_rebaking = true); - - /** Retrieves subpass attachment properties, as specified at creation time. - * - * Triggers baking process if the renderpass is marked as dirty. - * - * @param in_subpass_id ID of the subpass to use for the query. - * @param in_attachment_type Type of the attachment to use for the query. Must not be ATTACHMENT_TYPE_PRESERVE. - * @param out_renderpass_attachment_id_ptr Deref will be set to the ID of the renderpass attachment this subpass - * attachment uses. - * @param out_layout_ptr Deref will be set to the image layout assigned to the attachment for the specific - * subpass. Must be NULL if @param in_attachment_type is ATTACHMENT_TYPE_PRESERVE. - * - * @return true if successful, false otherwise. - */ - bool get_subpass_attachment_properties(SubPassID in_subpass_id, - AttachmentType in_attachment_type, - uint32_t in_n_subpass_attachment, - RenderPassAttachmentID* out_renderpass_attachment_id_ptr, - VkImageLayout* out_layout_ptr); - - /** Returns the graphics pipeline ID, associated with the specified subpass. - * - * @param in_subpass_id As per description. - * @param out_graphics_pipeline_id_ptr Deref will be set to the aforementioned ID. Must not be nullptr. - * Will only be touched if the function returns true. - * - * @return true if the function was successful, false otherwise. - **/ - bool get_subpass_graphics_pipeline_id(SubPassID in_subpass_id, - GraphicsPipelineID* out_graphics_pipeline_id_ptr) const; - - /** Returns the number of attachments of user-specified type, defined for the specified subpass. - * - * This function may trigger renderpass bake process, if the renderpass is marked as dirty - * at the query time, and @param in_attachment_type is set to ATTACHMENT_TYPE_PRESERVE. - * - * @param in_subpass_id As per description. - * @param in_attachment_type Type of the attachment to use for the query. - * @param out_n_attachments_ptr Deref will be set to the value above. Must not be nullptr. - * Will only be touched if the function returns true. - * - * @return true if the function was successful, false otherwise. - **/ - bool get_subpass_n_attachments(SubPassID in_subpass_id, - AttachmentType in_attachment_type, - uint32_t* out_n_attachments_ptr); - /** Returns the Swapchain instance, associated with the RenderPass wrapper instance at creation time. */ - std::shared_ptr get_swapchain() const + Anvil::Swapchain* get_swapchain() const { return m_swapchain_ptr; } - /* Releases the graphics pipeline, as used by the specified subpass at the time of the call, - * and assigns the user-specified one. - * - * @param in_subpass_id ID of the subpass, whose graphics pipeline should be changed. - * @param in_new_graphics_pipeline_id ID of the graphics pipeline to start using. - * - * @return true if successful, false otherwise. - **/ - bool set_subpass_graphics_pipeline_id(SubPassID in_subpass_id, - GraphicsPipelineID in_new_graphics_pipeline_id); - private: /* Private type definitions */ - /** Holds properties of a single render-pass attachment. **/ - typedef struct RenderPassAttachment - { - VkAttachmentLoadOp color_depth_load_op; - VkAttachmentStoreOp color_depth_store_op; - VkImageLayout final_layout; - VkFormat format; - uint32_t index; - VkImageLayout initial_layout; - bool may_alias; - VkAttachmentLoadOp stencil_load_op; - VkAttachmentStoreOp stencil_store_op; - AttachmentType type; - - VkSampleCountFlagsVariable(sample_count); - - /** Constructor. Should only be used for color attachments. - * - * @param in_format Format that will be used by the render-pass attachment. - * @param in_sample_count Number of samples of the render-pass attachment (expressed as enum value) - * @param in_load_op Load operation to use for the render-pass attachment. - * @param in_store_op Store operation to use for the render-pass attachment. - * @param in_initial_layout Initial layout fo the render-pass attachment. - * @param in_final_layout Layout to transfer the render-pass attachment to, after - * the render-pass finishes. - * @param in_may_alias true if the attachment's memory backing may alias with - * memory region of another attachment; false otherwise. - * @param in_index Index of the created render-pass attachment. - ***/ - RenderPassAttachment(VkFormat in_format, - VkSampleCountFlags in_sample_count, - VkAttachmentLoadOp in_load_op, - VkAttachmentStoreOp in_store_op, - VkImageLayout in_initial_layout, - VkImageLayout in_final_layout, - bool in_may_alias, - uint32_t in_index) - { - color_depth_load_op = in_load_op; - color_depth_store_op = in_store_op; - final_layout = in_final_layout; - format = in_format; - index = in_index; - initial_layout = in_initial_layout; - may_alias = in_may_alias; - sample_count = in_sample_count; - stencil_load_op = VK_ATTACHMENT_LOAD_OP_DONT_CARE; - stencil_store_op = VK_ATTACHMENT_STORE_OP_DONT_CARE; - type = ATTACHMENT_TYPE_COLOR; - } - - /** Constructor. Should only be used for depth/stencil attachments - * - * @param in_format Format that will be used by the render-pass attachment. - * @param in_sample_count Number of samples of the render-pass attachment (expressed as enum value) - * @param in_depth_load_op Load operation to use for the render-pass attachment's depth data. - * @param in_depth_store_op Store operation to use for the render-pass attachment's depth data. - * @param in_stencil_load_op Load operation to use for the render-pass attachment's stencil data. - * @param in_stencil_store_op Store operation to use for the render-pass attachment's stencil data. - * @param in_initial_layout Initial layout fo the render-pass attachment. - * @param in_final_layout Layout to transfer the render-pass attachment to, after - * the render-pass finishes. - * @param in_may_alias true if the attachment's memory backing may alias with - * memory region of another attachment; false otherwise. - * @param in_index Index of the created render-pass attachment. - **/ - RenderPassAttachment(VkFormat in_format, - VkSampleCountFlags in_sample_count, - VkAttachmentLoadOp in_depth_load_op, - VkAttachmentStoreOp in_depth_store_op, - VkAttachmentLoadOp in_stencil_load_op, - VkAttachmentStoreOp in_stencil_store_op, - VkImageLayout in_initial_layout, - VkImageLayout in_final_layout, - bool in_may_alias, - uint32_t in_index) - { - color_depth_load_op = in_depth_load_op; - color_depth_store_op = in_depth_store_op; - final_layout = in_final_layout; - format = in_format; - index = in_index; - initial_layout = in_initial_layout; - may_alias = in_may_alias; - sample_count = in_sample_count; - stencil_load_op = in_stencil_load_op; - stencil_store_op = in_stencil_store_op; - type = ATTACHMENT_TYPE_DEPTH_STENCIL; - } - - /** Dummy constructor. This should only be used by STL containers. */ - RenderPassAttachment() - { - color_depth_load_op = VK_ATTACHMENT_LOAD_OP_MAX_ENUM; - color_depth_store_op = VK_ATTACHMENT_STORE_OP_MAX_ENUM; - final_layout = VK_IMAGE_LAYOUT_MAX_ENUM; - format = VK_FORMAT_MAX_ENUM; - index = UINT32_MAX; - initial_layout = VK_IMAGE_LAYOUT_MAX_ENUM; - may_alias = false; - sample_count = static_cast(0); - stencil_load_op = VK_ATTACHMENT_LOAD_OP_MAX_ENUM; - stencil_store_op = VK_ATTACHMENT_STORE_OP_MAX_ENUM; - type = ATTACHMENT_TYPE_UNKNOWN; - } - } RenderPassAttachment; - - typedef std::vector RenderPassAttachments; - - /* Holds properties of a sub-pass attachment */ - typedef struct SubPassAttachment - { - RenderPassAttachment* attachment_ptr; - uint32_t highest_subpass_index; - VkImageLayout layout; - uint32_t lowest_subpass_index; - RenderPassAttachment* resolve_attachment_ptr; - - /* Dummy constructor. Should only be used by STL containers */ - SubPassAttachment() - { - attachment_ptr = nullptr; - highest_subpass_index = UINT32_MAX; - layout = VK_IMAGE_LAYOUT_MAX_ENUM; - lowest_subpass_index = UINT32_MAX; - resolve_attachment_ptr = nullptr; - } - - /** Constructor. - * - * @param in_attachment_ptr Render-pass attachment that this sub-pass attachment should reference. - * Must not be nullptr. - * @param in_layout Layout to use for the attachment when executing the subpass. - * Driver takes care of transforming the attachment to the requested layout - * before subpass commands starts executing. - * @param in_resolve_attachment_ptr If not nullptr, this should point to the render-pass attachment, to which - * MS data of @param in_attachment_ptr should be resolved. If nullptr, it is - * assumed the sub-pass should not resolve the MS data. - **/ - SubPassAttachment(RenderPassAttachment* in_attachment_ptr, - VkImageLayout in_layout, - RenderPassAttachment* in_opt_resolve_attachment_ptr) - { - attachment_ptr = in_attachment_ptr; - highest_subpass_index = UINT32_MAX; - layout = in_layout; - lowest_subpass_index = UINT32_MAX; - resolve_attachment_ptr = in_opt_resolve_attachment_ptr; - } - } SubPassAttachment; - - typedef std::map LocationToSubPassAttachmentMap; - typedef LocationToSubPassAttachmentMap::const_iterator LocationToSubPassAttachmentMapConstIterator; - typedef std::vector SubPassAttachmentVector; - - /** Holds properties of a single sub-pass */ - typedef struct SubPass - { - LocationToSubPassAttachmentMap color_attachments_map; - SubPassAttachment depth_stencil_attachment; - std::weak_ptr device_ptr; - uint32_t index; - LocationToSubPassAttachmentMap input_attachments_map; - GraphicsPipelineID pipeline_id; - SubPassAttachmentVector preserved_attachments; - LocationToSubPassAttachmentMap resolved_attachments_map; - - SubPassAttachment* get_color_attachment_at_index(uint32_t in_index) - { - return get_attachment_at_index(color_attachments_map, - in_index); - } - - SubPassAttachment* get_input_attachment_at_index(uint32_t in_index) - { - return get_attachment_at_index(input_attachments_map, - in_index); - } - - SubPassAttachment* get_resolved_attachment_at_index(uint32_t in_index) - { - return get_attachment_at_index(resolved_attachments_map, - in_index); - } - - /** Dummy constructor. This should only be used by STL containers */ - SubPass() - { - index = UINT32_MAX; - pipeline_id = UINT32_MAX; - } - - /** Constructor. - * - * @param in_index Index of the sub-pass - * @param in_pipeline_id ID of the graphics pipeline which is associated with the subpass. - * - **/ - SubPass(std::weak_ptr in_device_ptr, - uint32_t in_index, - GraphicsPipelineID in_pipeline_id) - { - device_ptr = in_device_ptr; - index = in_index; - pipeline_id = in_pipeline_id; - } - - /* Destructor */ - ~SubPass(); - - private: - /** Returns a pointer to the SubPassAttachment instance, assigned to the index specified - * under @param index in @param in_map. - **/ - SubPassAttachment* get_attachment_at_index(LocationToSubPassAttachmentMap& in_map, - uint32_t in_index) - { - uint32_t current_index = 0; - SubPassAttachment* result_ptr = nullptr; - - for (auto attachment_iterator = in_map.begin(); - attachment_iterator != in_map.end(); - ++attachment_iterator, ++current_index) - { - if (current_index == in_index) - { - result_ptr = &attachment_iterator->second; - - break; - } - } - - return result_ptr; - } - - SubPass (const SubPass&); - SubPass& operator=(const SubPass&); - } SubPass; - - typedef std::vector SubPasses; - typedef SubPasses::const_iterator SubPassesConstIterator; - - /** Holds properties of a single subpass<->subpass dependency. */ - typedef struct SubPassDependency - { - VkAccessFlagsVariable (destination_access_mask); - VkPipelineStageFlagsVariable(destination_stage_mask); - VkAccessFlagsVariable (source_access_mask); - VkPipelineStageFlagsVariable(source_stage_mask); - - bool by_region; - const SubPass* destination_subpass_ptr; - const SubPass* source_subpass_ptr; - - /** Constructor. - * - * @param in_destination_stage_mask Destination pipeline stage mask. - * @param in_destination_subpass_ptr Pointer to the descriptor of the destination subpass. - * If nullptr, it is assumed an external destination is requested. - * @param in_source_stage_mask Source pipeline stage mask. - * @param in_source_subpass_ptr Pointer to the descriptor of the source subpass. - * If nullptr, it is assumed an external source is requested. - * @param in_source_access_mask Source access mask. - * @param in_destination_access_mask Destination access mask. - * @param in_by_region true if a "by-region" dependency is requested; false otherwise. - **/ - SubPassDependency(VkPipelineStageFlags in_destination_stage_mask, - const SubPass* in_destination_subpass_ptr, - VkPipelineStageFlags in_source_stage_mask, - const SubPass* in_source_subpass_ptr, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region) - { - by_region = in_by_region; - destination_stage_mask = static_cast(in_destination_stage_mask); - destination_subpass_ptr = in_destination_subpass_ptr; - destination_access_mask = static_cast (in_destination_access_mask); - source_access_mask = static_cast (in_source_access_mask); - source_stage_mask = static_cast(in_source_stage_mask); - source_subpass_ptr = in_source_subpass_ptr; - } - - /** Dummy constructor. Should only be used by STL containers */ - SubPassDependency() - { - destination_access_mask = static_cast (0); - destination_stage_mask = static_cast(0); - destination_subpass_ptr = nullptr; - source_access_mask = static_cast (0); - source_stage_mask = static_cast(0); - source_subpass_ptr = nullptr; - } - - /** Comparator operator */ - bool operator==(const SubPassDependency& in) - { - return in.by_region == by_region && - in.destination_access_mask == destination_access_mask && - in.destination_stage_mask == destination_stage_mask && - in.destination_subpass_ptr == destination_subpass_ptr && - in.source_access_mask == source_access_mask && - in.source_stage_mask == source_stage_mask && - in.source_subpass_ptr == source_subpass_ptr; - } - } SubPassDependency; - - typedef std::vector SubPassDependencies; - /* Private functions */ + bool init_using_core_vk10 (); + bool init_using_rp2_create_extension(); + /* Constructor. Please see create() for specification */ - RenderPass(std::weak_ptr in_device_ptr, - std::shared_ptr in_opt_swapchain_ptr); + RenderPass(Anvil::RenderPassCreateInfoUniquePtr in_renderpass_create_info_ptr, + Anvil::Swapchain* in_opt_swapchain_ptr); RenderPass& operator=(const RenderPass&); RenderPass (const RenderPass&); - bool add_dependency (SubPass* in_destination_subpass_ptr, - SubPass* in_source_subpass_ptr, - VkPipelineStageFlags in_source_stage_mask, - VkPipelineStageFlags in_destination_stage_mask, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region); - bool add_subpass_attachment(SubPassID in_subpass_id, - bool in_is_color_attachment, - VkImageLayout in_input_layout, - RenderPassAttachmentID in_attachment_id, - uint32_t in_attachment_location, - bool in_should_resolve, - RenderPassAttachmentID in_resolve_attachment_id); - - VkAttachmentReference get_attachment_reference_from_renderpass_attachment(const RenderPassAttachment& in_renderpass_attachment) const; - VkAttachmentReference get_attachment_reference_from_subpass_attachment (const SubPassAttachment& in_subpass_attachment) const; - VkAttachmentReference get_attachment_reference_for_resolve_attachment (const SubPassesConstIterator& in_subpass_iterator, - const LocationToSubPassAttachmentMapConstIterator& in_location_to_subpass_att_map_iterator) const; - void update_preserved_attachments (); - /* Private members */ - RenderPassAttachments m_attachments; - std::weak_ptr m_device_ptr; - bool m_dirty; - VkRenderPass m_render_pass; - SubPasses m_subpasses; - SubPassDependencies m_subpass_dependencies; - std::shared_ptr m_swapchain_ptr; + VkRenderPass m_render_pass; + Anvil::RenderPassCreateInfoUniquePtr m_render_pass_create_info_ptr; + Swapchain* m_swapchain_ptr; }; }; diff --git a/include/wrappers/rendering_surface.h b/include/wrappers/rendering_surface.h index b51ea53b..52b5619f 100644 --- a/include/wrappers/rendering_surface.h +++ b/include/wrappers/rendering_surface.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -31,6 +31,7 @@ #define WRAPPERS_RENDERING_SURFACE_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" #include "wrappers/device.h" #include "wrappers/physical_device.h" @@ -40,42 +41,17 @@ namespace Anvil { /* Wrapper class for Vulkan rendering surfaces */ - class RenderingSurface : public DebugMarkerSupportProvider + class RenderingSurface : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public type definitions */ - typedef struct RenderingSurfaceFormat - { - VkColorSpaceKHR color_space; - VkFormat format; - - /* Constructor. */ - RenderingSurfaceFormat(VkSurfaceFormatKHR& in_surface_format) - { - color_space = in_surface_format.colorSpace; - format = in_surface_format.format; - } - - /** Comparison operator for _vulkan_surface_format and in_format types. - * - * @param in_format Right-side value. - * - * @return true if @param in_format matches _vulkan_surface_format::format value, false - * otherwise. - **/ - bool operator==(const VkFormat& in_format) const - { - return (format == in_format); - } - } RenderingSurfaceFormat; /* Public functions */ /** Creates a single Vulkan rendering surface instance and registers the object in * Object Tracker. */ - static std::shared_ptr create(std::weak_ptr in_instance_ptr, - std::weak_ptr in_device_ptr, - std::shared_ptr in_window_ptr); + static Anvil::RenderingSurfaceUniquePtr create(Anvil::RenderingSurfaceCreateInfoUniquePtr in_create_info_ptr); /** Destructor * @@ -85,38 +61,36 @@ namespace Anvil virtual ~RenderingSurface(); /** Returns rendering surface capabilities */ - bool get_capabilities(std::weak_ptr in_physical_device_ptr, - VkSurfaceCapabilitiesKHR* out_surface_caps_ptr) const; + bool get_capabilities(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::SurfaceCapabilities* out_surface_caps_ptr) const; /** Returns rendering surface's height */ uint32_t get_height() const { - anvil_assert(m_height != 0); - return m_height; } /** Returns logical device which was used to create this surface */ - std::weak_ptr get_device() const + const Anvil::BaseDevice* get_device() const { return m_device_ptr; } /** Returns queue family indices which support presentation on a given physical device */ - bool get_queue_families_with_present_support(std::weak_ptr in_physical_device_ptr, - const std::vector** out_result_ptr) const; + bool get_queue_families_with_present_support(const Anvil::PhysicalDevice* in_physical_device_ptr, + const std::vector** out_result_ptr) const; /** Returns composite alpha modes supported by the rendering surface */ - bool get_supported_composite_alpha_flags(std::weak_ptr in_physical_device_ptr, - VkCompositeAlphaFlagsKHR* out_result_ptr) const; + bool get_supported_composite_alpha_flags(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::CompositeAlphaFlags* out_result_ptr) const; /** Returns transformations supported by the rendering surface */ - bool get_supported_transformations(std::weak_ptr in_physical_device_ptr, - VkSurfaceTransformFlagsKHR* out_result_ptr) const; + bool get_supported_transformations(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::SurfaceTransformFlags* out_result_ptr) const; /** Returns flags corresponding to image usage supported by the rendering surface */ - bool get_supported_usages(std::weak_ptr in_physical_device_ptr, - VkImageUsageFlags* out_result_ptr) const; + bool get_supported_usages(const Anvil::PhysicalDevice* in_physical_device_ptr, + ImageUsageFlags* out_result_ptr) const; /** Retrieves a raw handle to the underlying Vulkan Rendering Surface */ VkSurfaceKHR get_surface() const @@ -130,62 +104,76 @@ namespace Anvil return &m_surface; } - /** Returns type of the rendering surface */ - RenderingSurfaceType get_type() const - { - return m_type; - } - /** Returns rendering surface's width */ uint32_t get_width() const { - anvil_assert(m_width != 0); - return m_width; } /* Tells whether the specified image format can be used for swapchain image initialization, using * this rendering surface. */ - bool is_compatible_with_image_format(std::weak_ptr in_physical_device_ptr, - VkFormat in_image_format, - bool* out_result_ptr) const; + bool is_compatible_with_image_format(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::Format in_image_format, + bool* out_result_ptr) const; /* Tells whether the specified presentation mode is supported by the rendering surface */ - bool supports_presentation_mode(std::weak_ptr in_physical_device_ptr, - VkPresentModeKHR in_presentation_mode, - bool* out_result_ptr) const; + bool supports_presentation_mode(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::PresentModeKHR in_presentation_mode, + bool* out_result_ptr) const; + + void update_surface_extents() const; private: /* Private type definitions */ typedef uint32_t DeviceGroupIndex; + typedef struct RenderingSurfaceFormat + { + Anvil::ColorSpaceKHR color_space; + Anvil::Format format; + + /* Constructor. */ + RenderingSurfaceFormat(Anvil::SurfaceFormatKHR& in_surface_format) + { + color_space = in_surface_format.color_space; + format = static_cast(in_surface_format.format); + } + + /** Comparison operator for _vulkan_surface_format and in_format types. + * + * @param in_format Right-side value. + * + * @return true if @param in_format matches _vulkan_surface_format::format value, false + * otherwise. + **/ + bool operator==(const Anvil::Format& in_format) const + { + return (format == in_format); + } + } RenderingSurfaceFormat; + typedef struct PhysicalDeviceCapabilities { - VkSurfaceCapabilitiesKHR capabilities; + Anvil::SurfaceCapabilities capabilities; std::vector supported_formats; - std::vector supported_presentation_modes; + std::vector supported_presentation_modes; + Anvil::SurfaceTransformFlags supported_transformations; + ImageUsageFlags supported_usages; std::vector present_capable_queue_fams; - VkCompositeAlphaFlagsKHRVariable (supported_composite_alpha_flags); - VkSurfaceTransformFlagsKHRVariable(supported_transformations); - VkImageUsageFlagsVariable (supported_usages); + Anvil::CompositeAlphaFlags supported_composite_alpha_flags; PhysicalDeviceCapabilities() { - memset(&capabilities, - 0, - sizeof(capabilities) ); + /* Stub */ } } PhysicalDeviceCapabilities; /* Private functions */ /* Constructor. Please see create() for specification */ - RenderingSurface(std::weak_ptr in_instance_ptr, - std::weak_ptr in_device_ptr, - std::shared_ptr in_window_ptr, - bool* out_safe_to_use_ptr); + RenderingSurface(Anvil::RenderingSurfaceCreateInfoUniquePtr in_create_info_ptr); RenderingSurface (const RenderingSurface&); RenderingSurface& operator=(const RenderingSurface&); @@ -194,15 +182,12 @@ namespace Anvil bool init (); /* Private variables */ - std::weak_ptr m_device_ptr; - std::shared_ptr m_instance_ptr; + Anvil::RenderingSurfaceCreateInfoUniquePtr m_create_info_ptr; - uint32_t m_height; + mutable uint32_t m_height; std::map m_physical_device_capabilities; VkSurfaceKHR m_surface; - RenderingSurfaceType m_type; - uint32_t m_width; - std::weak_ptr m_window_ptr; + mutable uint32_t m_width; }; }; /* namespace Anvil */ diff --git a/include/wrappers/sampler.h b/include/wrappers/sampler.h index bde522e5..0ccfacc6 100644 --- a/include/wrappers/sampler.h +++ b/include/wrappers/sampler.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -31,13 +31,15 @@ #define WRAPPERS_SAMPLER_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { /** Wrapper class for Vulkan samplers */ - class Sampler : public DebugMarkerSupportProvider + class Sampler : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ @@ -48,21 +50,7 @@ namespace Anvil * * For argument discussion, please consult Vulkan API specification. */ - static std::shared_ptr create(std::weak_ptr in_device_ptr, - VkFilter in_mag_filter, - VkFilter in_min_filter, - VkSamplerMipmapMode in_mipmap_mode, - VkSamplerAddressMode in_address_mode_u, - VkSamplerAddressMode in_address_mode_v, - VkSamplerAddressMode in_address_mode_w, - float in_lod_bias, - float in_max_anisotropy, - bool in_compare_enable, - VkCompareOp in_compare_op, - float in_min_lod, - float in_max_lod, - VkBorderColor in_border_color, - bool in_use_unnormalized_coordinates); + static Anvil::SamplerUniquePtr create(Anvil::SamplerCreateInfoUniquePtr in_create_info_ptr); /** Destructor. * @@ -71,29 +59,9 @@ namespace Anvil **/ virtual ~Sampler(); - VkSamplerAddressMode get_address_mode_u() const + const SamplerCreateInfo* get_create_info_ptr() const { - return m_address_mode_u; - } - - VkSamplerAddressMode get_address_mode_v() const - { - return m_address_mode_v; - } - - VkSamplerAddressMode get_address_mode_w() const - { - return m_address_mode_w; - } - - VkBorderColor get_border_color() const - { - return m_border_color; - } - - VkCompareOp get_compare_op() const - { - return m_compare_op; + return m_create_info_ptr.get(); } /** Retrieves a raw Vulkan handle for the underlying VkSampler instance. */ @@ -102,98 +70,26 @@ namespace Anvil return m_sampler; } - float get_lod_bias() const - { - return m_lod_bias; - } - - VkFilter get_mag_filter() const - { - return m_mag_filter; - } - - float get_max_anisotropy() const - { - return m_max_anisotropy; - } - - float get_max_lod() const - { - return m_max_lod; - } - - VkFilter get_min_filter() const - { - return m_min_filter; - } - - float get_min_lod() const - { - return m_min_lod; - } - - VkSamplerMipmapMode get_mipmap_mode() const - { - return m_mipmap_mode; - } - /** Retrieves a pointer to the raw Vulkan handle for the underlying VkSampler instance. */ const VkSampler* get_sampler_ptr() const { return &m_sampler; } - bool is_compare_enabled() const - { - return m_compare_enable; - } - - bool uses_unnormalized_coordinates() const - { - return m_use_unnormalized_coordinates; - } - private: /* Private functions */ + bool init(); + /* Please see create() for specification */ - Sampler(std::weak_ptr in_device_ptr, - VkFilter in_mag_filter, - VkFilter in_min_filter, - VkSamplerMipmapMode in_mipmap_mode, - VkSamplerAddressMode in_address_mode_u, - VkSamplerAddressMode in_address_mode_v, - VkSamplerAddressMode in_address_mode_w, - float in_lod_bias, - float in_max_anisotropy, - bool in_compare_enable, - VkCompareOp in_compare_op, - float in_min_lod, - float in_max_lod, - VkBorderColor in_border_color, - bool in_use_unnormalized_coordinates); + Sampler(Anvil::SamplerCreateInfoUniquePtr in_create_info_ptr); Sampler (const Sampler&); Sampler& operator=(const Sampler&); /* Private variables */ - VkSamplerAddressMode m_address_mode_u; - VkSamplerAddressMode m_address_mode_v; - VkSamplerAddressMode m_address_mode_w; - VkBorderColor m_border_color; - bool m_compare_enable; - VkCompareOp m_compare_op; - float m_lod_bias; - VkFilter m_mag_filter; - float m_max_anisotropy; - float m_max_lod; - VkFilter m_min_filter; - float m_min_lod; - VkSamplerMipmapMode m_mipmap_mode; - bool m_use_unnormalized_coordinates; - - std::weak_ptr m_device_ptr; - VkSampler m_sampler; + SamplerCreateInfoUniquePtr m_create_info_ptr; + VkSampler m_sampler; }; }; /* namespace Anvil */ diff --git a/include/wrappers/sampler_ycbcr_conversion.h b/include/wrappers/sampler_ycbcr_conversion.h new file mode 100644 index 00000000..f1f00d41 --- /dev/null +++ b/include/wrappers/sampler_ycbcr_conversion.h @@ -0,0 +1,74 @@ +// +// Copyright (c) 2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#ifndef WRAPPERS_SAMPLER_YCBCR_CONVERSION_H +#define WRAPPERS_SAMPLER_YCBCR_CONVERSION_H + +#include "misc/debug_marker.h" +#include "misc/mt_safety.h" +#include "misc/types.h" + +namespace Anvil +{ + class SamplerYCbCrConversion : public DebugMarkerSupportProvider, + public MTSafetySupportProvider + { + public: + /* Public functions */ + + virtual ~SamplerYCbCrConversion(); + + /** TODO + * + * NOTE: This object can only be used for Vulkan devices with VK_KHR_sampler_ycbcr_conversion extension support. + * + * For argument discussion, please consult Vulkan API specification. + */ + static SamplerYCbCrConversionUniquePtr create(Anvil::SamplerYCbCrConversionCreateInfoUniquePtr in_create_info_ptr); + + const Anvil::SamplerYCbCrConversionCreateInfo* get_create_info_ptr() const + { + return m_create_info_ptr.get(); + } + + VkSamplerYcbcrConversion get_sampler_ycbcr_conversion_vk() const + { + return m_sampler_ycbcr_conversion_vk; + } + + private: + /* Private functions */ + + SamplerYCbCrConversion(Anvil::SamplerYCbCrConversionCreateInfoUniquePtr in_create_info_ptr); + + bool init(); + + /* Private variables */ + + Anvil::SamplerYCbCrConversionCreateInfoUniquePtr m_create_info_ptr; + VkSamplerYcbcrConversion m_sampler_ycbcr_conversion_vk; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(SamplerYCbCrConversion); + ANVIL_DISABLE_COPY_CONSTRUCTOR(SamplerYCbCrConversion); + }; +}; /* namespace Anvil */ + +#endif /* WRAPPERS_SAMPLER_YCBCR_CONVERSION_H */ \ No newline at end of file diff --git a/include/wrappers/semaphore.h b/include/wrappers/semaphore.h index 259e1ef7..29c0529a 100644 --- a/include/wrappers/semaphore.h +++ b/include/wrappers/semaphore.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -32,18 +32,32 @@ #define WRAPPERS_SEMAPHORE_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil { /* Wrapper class for Vulkan semaphores */ - class Semaphore : public DebugMarkerSupportProvider + class Semaphore : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ /** Creates a single Vulkan semaphore instance and registers the object in Object Tracker. */ - static std::shared_ptr create(std::weak_ptr in_device_ptr); + static Anvil::SemaphoreUniquePtr create(Anvil::SemaphoreCreateInfoUniquePtr in_create_info_ptr); + + /* Creates a new external semaphore handle of the user-specified type. + * + * For NT handle types, the function can only be called once per each NT handle type. Subsequent + * calls will result in the function triggering an assertion failure and returning null. + * + * Returns nullptr if unsuccessful. + * + * Requires VK_KHR_external_semaphore_fd under Linux. + * Requires VK_KHR_external_semaphore_win32 under Windows. + */ + ExternalHandleUniquePtr export_to_external_handle(const Anvil::ExternalSemaphoreHandleTypeFlagBits& in_semaphore_handle_type); /** Destructor. * @@ -51,6 +65,11 @@ namespace Anvil **/ virtual ~Semaphore(); + const Anvil::SemaphoreCreateInfo* get_create_info_ptr() const + { + return m_create_info_ptr.get(); + } + /** Retrieves a raw handle to the underlying Vulkan semaphore instance */ VkSemaphore get_semaphore() const { @@ -63,25 +82,52 @@ namespace Anvil return &m_semaphore; } - /** Releases the underlying Vulkan Semaphore instance and creates a new Vulkan object. */ - void reset(); + /* TODO + * + * Requires VK_KHR_external_semaphore_fd under Linux. + * Requires VK_KHR_external_semaphore_win32 under Windows. + * + * NOTE: Under Linux, @param in_handle is no longer valid if function returns true. + * + * @return true if successful, false otherwise. + * + * @param in_temporary_import True if a temporary import operation should be performed. False if + * a permanent import is being requested. + * @param in_handle_type Type of the handle that is being imported. + * @param in_handle (Linux): Handle to use. Must not be -1. + * @param in_opt_handle (Windows): Win32 NT handle to use. Must not be null if @param in_opt_name is null and vice versa. + * @param in_opt_name (Windows): Name of the handle to use. Must not be null if @param in_opt_handle is null and vice versa. + */ + #if defined(_WIN32) + bool import_from_external_handle(const bool& in_temporary_import, + const Anvil::ExternalSemaphoreHandleTypeFlagBits& in_handle_type, + const ExternalHandleType& in_opt_handle, + const std::wstring& in_opt_name); + #else + bool import_from_external_handle(const bool& in_temporary_import, + const Anvil::ExternalSemaphoreHandleTypeFlagBits& in_handle_type, + const ExternalHandleType& in_handle); + #endif + /** Releases the underlying Vulkan Semaphore instance and creates a new Vulkan object. */ + bool reset(); private: /* Private functions */ /* Constructor. Please see create() for specification */ - Semaphore(std::weak_ptr in_device_ptr); - - Semaphore (const Semaphore&); - Semaphore& operator=(const Semaphore&); + Semaphore(Anvil::SemaphoreCreateInfoUniquePtr in_create_info_ptr); void release_semaphore(); /* Private variables */ - std::weak_ptr m_device_ptr; - VkSemaphore m_semaphore; + Anvil::SemaphoreCreateInfoUniquePtr m_create_info_ptr; + std::map m_external_semaphore_created_for_handle_type; + VkSemaphore m_semaphore; + + ANVIL_DISABLE_ASSIGNMENT_OPERATOR(Semaphore); + ANVIL_DISABLE_COPY_CONSTRUCTOR(Semaphore); }; }; /* namespace Anvil */ -#endif /* WRAPPERS_SEMAPHORE_H */ \ No newline at end of file +#endif /* WRAPPERS_SEMAPHORE_H */ diff --git a/include/wrappers/shader_module.h b/include/wrappers/shader_module.h index 167d645f..4d54ccf5 100644 --- a/include/wrappers/shader_module.h +++ b/include/wrappers/shader_module.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -31,6 +31,7 @@ #define WRAPPERS_SHADER_MODULE_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" namespace Anvil @@ -39,7 +40,7 @@ namespace Anvil class GLSLShaderToSPIRVGenerator; class ShaderModule : public DebugMarkerSupportProvider, - public std::enable_shared_from_this + public MTSafetySupportProvider { public: /* Public functions */ @@ -53,8 +54,9 @@ namespace Anvil * @param in_device_ptr Device to use to instantiate the shader module. Must not be nullptr. * @param in_spirv_generator_ptr SPIR-V generator, initialized with a GLSL shader body. **/ - static std::shared_ptr create_from_spirv_generator(std::weak_ptr in_device_ptr, - std::shared_ptr in_spirv_generator_ptr); + static ShaderModuleUniquePtr create_from_spirv_generator(const Anvil::BaseDevice* in_device_ptr, + GLSLShaderToSPIRVGenerator* in_spirv_generator_ptr, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); /** Creates a new shader module instance from a raw SPIR-V blob. * @@ -77,24 +79,26 @@ namespace Anvil * @param in_opt_vs_entrypoint_name Vertex shader stage entry-point, if one is defined in the blob. * Otherwise, should be set to nullptr. **/ - static std::shared_ptr create_from_spirv_blob(std::weak_ptr in_device_ptr, - const char* in_spirv_blob, - uint32_t in_n_spirv_blob_bytes, - const char* in_opt_cs_entrypoint_name, - const char* in_opt_fs_entrypoint_name, - const char* in_opt_gs_entrypoint_name, - const char* in_opt_tc_entrypoint_name, - const char* in_opt_te_entrypoint_name, - const char* in_opt_vs_entrypoint_name); - static std::shared_ptr create_from_spirv_blob(std::weak_ptr in_device_ptr, - const char* in_spirv_blob, - uint32_t in_n_spirv_blob_bytes, - const std::string& in_opt_cs_entrypoint_name, - const std::string& in_opt_fs_entrypoint_name, - const std::string& in_opt_gs_entrypoint_name, - const std::string& in_opt_tc_entrypoint_name, - const std::string& in_opt_te_entrypoint_name, - const std::string& in_opt_vs_entrypoint_name) + static ShaderModuleUniquePtr create_from_spirv_blob(const Anvil::BaseDevice* in_device_ptr, + const char* in_spirv_blob, + uint32_t in_n_spirv_blob_bytes, + const char* in_opt_cs_entrypoint_name, + const char* in_opt_fs_entrypoint_name, + const char* in_opt_gs_entrypoint_name, + const char* in_opt_tc_entrypoint_name, + const char* in_opt_te_entrypoint_name, + const char* in_opt_vs_entrypoint_name, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE); + static ShaderModuleUniquePtr create_from_spirv_blob(const Anvil::BaseDevice* in_device_ptr, + const char* in_spirv_blob, + uint32_t in_n_spirv_blob_bytes, + const std::string& in_opt_cs_entrypoint_name, + const std::string& in_opt_fs_entrypoint_name, + const std::string& in_opt_gs_entrypoint_name, + const std::string& in_opt_tc_entrypoint_name, + const std::string& in_opt_te_entrypoint_name, + const std::string& in_opt_vs_entrypoint_name, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) { return create_from_spirv_blob(in_device_ptr, in_spirv_blob, @@ -104,17 +108,19 @@ namespace Anvil in_opt_gs_entrypoint_name.c_str(), in_opt_tc_entrypoint_name.c_str(), in_opt_te_entrypoint_name.c_str(), - in_opt_vs_entrypoint_name.c_str() ); + in_opt_vs_entrypoint_name.c_str(), + in_mt_safety); } - static std::shared_ptr create_from_spirv_blob(std::weak_ptr in_device_ptr, - const uint32_t* in_spirv_blob, - uint32_t in_n_spirv_blob_uint32s, - const std::string& in_opt_cs_entrypoint_name, - const std::string& in_opt_fs_entrypoint_name, - const std::string& in_opt_gs_entrypoint_name, - const std::string& in_opt_tc_entrypoint_name, - const std::string& in_opt_te_entrypoint_name, - const std::string& in_opt_vs_entrypoint_name) + static ShaderModuleUniquePtr create_from_spirv_blob(const Anvil::BaseDevice* in_device_ptr, + const uint32_t* in_spirv_blob, + uint32_t in_n_spirv_blob_uint32s, + const std::string& in_opt_cs_entrypoint_name, + const std::string& in_opt_fs_entrypoint_name, + const std::string& in_opt_gs_entrypoint_name, + const std::string& in_opt_tc_entrypoint_name, + const std::string& in_opt_te_entrypoint_name, + const std::string& in_opt_vs_entrypoint_name, + MTSafety in_mt_safety = Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) { return create_from_spirv_blob(in_device_ptr, reinterpret_cast(in_spirv_blob), @@ -124,7 +130,8 @@ namespace Anvil in_opt_gs_entrypoint_name.c_str(), in_opt_tc_entrypoint_name.c_str(), in_opt_te_entrypoint_name.c_str(), - in_opt_vs_entrypoint_name.c_str() ); + in_opt_vs_entrypoint_name.c_str(), + in_mt_safety); } /** Destructor. Releases internally maintained Vulkan shader module instance. */ @@ -139,6 +146,7 @@ namespace Anvil return m_cs_entrypoint_name; } +#ifdef ANVIL_LINK_WITH_GLSLANG /** Returns a disassembly of the SPIR-V blob. * * The actual disassembly is retrieved from glslang and cached for subsequent requests. @@ -146,6 +154,7 @@ namespace Anvil * This function only returns a non-empty string if ANVIL_LINK_WITH_GLSLANG is enabled. */ const std::string& get_disassembly(); +#endif /** Returns name of the fragment shader stage entry-point, as defined at construction time. * @@ -184,7 +193,7 @@ namespace Anvil } /** Returns the device, for which this shader module has been created. */ - std::weak_ptr get_parent_device() const + const Anvil::BaseDevice* get_parent_device() const { return m_device_ptr; } @@ -230,17 +239,19 @@ namespace Anvil /* Private functions */ /* Constructor. Please see create() for specification */ - explicit ShaderModule(std::weak_ptr in_device_ptr, - std::shared_ptr in_spirv_generator_ptr); - explicit ShaderModule(std::weak_ptr in_device_ptr, - const char* in_spirv_blob, - uint32_t in_n_spirv_blob_bytes, - const std::string& in_opt_cs_entrypoint_name, - const std::string& in_opt_fs_entrypoint_name, - const std::string& in_opt_gs_entrypoint_name, - const std::string& in_opt_tc_entrypoint_name, - const std::string& in_opt_te_entrypoint_name, - const std::string& in_opt_vs_entrypoint_name); + explicit ShaderModule(const Anvil::BaseDevice* in_device_ptr, + GLSLShaderToSPIRVGenerator* in_spirv_generator_ptr, + bool in_mt_safe); + explicit ShaderModule(const Anvil::BaseDevice* in_device_ptr, + const char* in_spirv_blob, + uint32_t in_n_spirv_blob_bytes, + const std::string& in_opt_cs_entrypoint_name, + const std::string& in_opt_fs_entrypoint_name, + const std::string& in_opt_gs_entrypoint_name, + const std::string& in_opt_tc_entrypoint_name, + const std::string& in_opt_te_entrypoint_name, + const std::string& in_opt_vs_entrypoint_name, + bool in_mt_safe); ShaderModule (const ShaderModule&); ShaderModule& operator=(const ShaderModule&); @@ -259,7 +270,7 @@ namespace Anvil bool init_from_spirv_blob(const char* in_spirv_blob, uint32_t in_n_spirv_blob_bytes); - void on_object_about_to_be_released(void* in_callback_arg); + void on_device_about_to_be_released(void* in_callback_arg); /* Private variables */ std::string m_cs_entrypoint_name; @@ -269,11 +280,10 @@ namespace Anvil std::string m_te_entrypoint_name; std::string m_vs_entrypoint_name; - std::weak_ptr m_device_ptr; - const Anvil::BaseDevice* m_device_raw_ptr; - std::string m_glsl_source_code; - VkShaderModule m_module; - std::vector m_spirv_blob; + const Anvil::BaseDevice* m_device_ptr; + std::string m_glsl_source_code; + VkShaderModule m_module; + std::vector m_spirv_blob; #ifdef ANVIL_LINK_WITH_GLSLANG std::string m_disassembly; diff --git a/include/wrappers/swapchain.h b/include/wrappers/swapchain.h index 36c51036..0ed91083 100644 --- a/include/wrappers/swapchain.h +++ b/include/wrappers/swapchain.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -33,6 +33,7 @@ #define WRAPPERS_SWAPCHAIN_H #include "misc/debug_marker.h" +#include "misc/mt_safety.h" #include "misc/types.h" #include "wrappers/device.h" #include "wrappers/fence.h" @@ -43,35 +44,13 @@ namespace Anvil { /* Wrapper class for a Vulkan Swapchain */ - class Swapchain : public DebugMarkerSupportProvider + class Swapchain : public DebugMarkerSupportProvider, + public MTSafetySupportProvider { public: /* Public functions */ - /** Constructor. - * - * @param in_device_ptr Device to initialize the swapchain for. - * @param in_parent_surface_ptr Rendering surface the swapchain is to be created for. Must - * not be nullptr. - * @param in_window_ptr current window to create the swapchain for. Must not be nullptr. - * @param in_format Format to use for the swapchain image. - * @param in_present_mode Presentation mode to use for the swapchain. - * @param in_usage_flags Image usage flags to use for the swapchain. - * @param in_n_images Number of swapchain images to use for the swapchain. - * @param in_khr_swapchain_entrypoints VK_KHR_swapchain entrypoint container. - * @param in_flags Swapchain create flags to pass, when creating the swapchain. - * - */ - - static std::shared_ptr create(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_surface_ptr, - std::shared_ptr in_window_ptr, - VkFormat in_format, - VkPresentModeKHR in_present_mode, - VkImageUsageFlags in_usage_flags, - uint32_t in_n_images, - const ExtensionKHRSwapchainEntrypoints& in_khr_swapchain_entrypoints, - VkSwapchainCreateFlagsKHR in_flags = 0); + static Anvil::SwapchainUniquePtr create(Anvil::SwapchainCreateInfoUniquePtr in_create_info_ptr); /** Destructor. * @@ -80,6 +59,8 @@ namespace Anvil virtual ~Swapchain(); /** Acquires a new swapchain image. + * + * Can be used for both SGPUDevice and mGPU swapchains. * * @param in_semaphore_ptr Semaphore to set upon frame acquisition. May be nullptr (assuming the implications are understood!) * @param in_should_block Set to true, if you want to wait on a fence set by vkAcquireNextImage*KHR() functions @@ -90,28 +71,18 @@ namespace Anvil * * @return Index of the swapchain image that the commands should be submitted against. **/ - uint32_t acquire_image(std::shared_ptr in_opt_semaphore_ptr, - bool in_should_block = false); - - /* By default, swapchain instance will transparently destroy the underlying Vulkan swapchain handle, right before - * the window is closed. - * - * There are certain use cases where we want the order to be reversed (ie. the swapchain handle should be destroyed only after - * the window is closed). This function can be used to enable this behavior. - * - */ - void disable_destroy_swapchain_before_parent_window_closes_behavior(); - - /** Returns device instance which has been used to create the swapchain */ - std::weak_ptr get_device() const - { - return m_device_ptr; - } - - /** Returns flags used to create the swapchain. */ - const VkSwapchainCreateFlagsKHR& get_flags() const + SwapchainOperationErrorCode acquire_image(Anvil::Semaphore* in_opt_semaphore_ptr, + uint32_t* out_result_index_ptr, + bool in_should_block = false); + SwapchainOperationErrorCode acquire_image(Anvil::Semaphore* in_opt_semaphore_ptr, + uint32_t in_n_mgpu_physical_devices, + const Anvil::PhysicalDevice* const* in_mgpu_physical_device_ptrs, + uint32_t* out_result_index_ptr, + bool in_should_block = false); + + const SwapchainCreateInfo* get_create_info_ptr() const { - return m_flags; + return m_create_info_ptr.get(); } /** Returns height of the swapchain, as specified at creation time */ @@ -120,16 +91,10 @@ namespace Anvil return m_size.height; } - /** Returns format used by swapchain images */ - VkFormat get_image_format() const - { - return m_image_format; - } - - /** Returns format used by swapchain image views. */ - VkFormat get_image_view_format() const + /** Return the actual number of swapchain images created. */ + uint32_t get_n_images() const { - return m_image_view_format; + return m_n_images; } /** Retrieves an Image instance associated with a swapchain image at index @@ -143,7 +108,7 @@ namespace Anvil * * @return As per summary. **/ - std::shared_ptr get_image(uint32_t in_n_swapchain_image) const; + Anvil::Image* get_image(uint32_t in_n_swapchain_image) const; /** Retrieves an Image View instance associated with a swapchain image at index * @param n_swapchain_image. @@ -156,7 +121,7 @@ namespace Anvil * * @return As per summary. **/ - std::shared_ptr get_image_view(uint32_t in_n_swapchain_image) const; + Anvil::ImageView* get_image_view(uint32_t in_n_swapchain_image) const; /* Returns index of the most recently acquired swapchain image. * @@ -167,18 +132,6 @@ namespace Anvil return m_last_acquired_image_index; } - /** Tells how many images the swap-chain encapsulates. */ - uint32_t get_n_images() const - { - return m_n_swapchain_images; - } - - /** Retrieves parent rendering surface. */ - std::shared_ptr get_rendering_surface() const - { - return m_parent_surface_ptr; - } - /** Retrieves a pointer to the raw Vulkan swapchain handle. */ const VkSwapchainKHR* get_swapchain_ptr() const { @@ -190,69 +143,82 @@ namespace Anvil return m_swapchain; } - /** Returns width of the swapchain, as specified at creation time */ + /** Returns width of the swapchain. */ uint32_t get_width() const { return m_size.width; } - /** Retrieves a window, to which the swapchain is bound. Note that under certain - * circumstances no window may be assigned. */ - std::shared_ptr get_window() const + /* Associates HDR metadata with one or more swapchains. + * + * Requires VK_EXT_hdr_metadata. + * + * @param in_n_swapchains Number of swapchains to update. + * @param in_swapchains_ptr At least @param in_n_swapchains swapchains to set HDR metadata for. All defined swapchains must have + * been created for the same Anvil::BaseDevice instance. + * @param in_hdr_metadata_ptr An array of @param in_n_swapchains HDR metadata descriptors to use. Must not be nullptr. + */ + static void set_hdr_metadata(const uint32_t& in_n_swapchains, + Anvil::Swapchain** in_swapchains_ptr_ptr, + const Anvil::HdrMetadataEXT* in_metadata_items_ptr); + + void set_hdr_metadata(const Anvil::HdrMetadataEXT* in_metadata_ptr) + { + Anvil::Swapchain* this_ptr = this; + + return Anvil::Swapchain::set_hdr_metadata(1, /* in_n_swapchains */ + &this_ptr, + in_metadata_ptr); + } + + /* By default, swapchain instance will transparently destroy the underlying Vulkan swapchain handle, right before + * the window is closed. + * + * There are certain use cases where we want the order to be reversed (ie. the swapchain handle should be destroyed only after + * the window is closed). This behavior can be enabled by calling this function with @param in_value set to false. + */ + void set_should_destroy_swapchain_before_parent_window_closes(const bool& in_value) { - return m_window_ptr; + m_destroy_swapchain_before_parent_window_closes = in_value; + } + + const bool& should_destroy_swapchain_before_parent_window_closes() + { + return m_destroy_swapchain_before_parent_window_closes; } private: /* Private functions */ /* Constructor. Please see create() for specification */ - Swapchain(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_surface_ptr, - std::shared_ptr in_window_ptr, - VkFormat in_format, - VkPresentModeKHR in_present_mode, - VkImageUsageFlags in_usage_flags, - uint32_t in_n_images, - VkSwapchainCreateFlagsKHR in_flags, - const ExtensionKHRSwapchainEntrypoints& in_khr_swapchain_entrypoints); + Swapchain(Anvil::SwapchainCreateInfoUniquePtr in_create_info_ptr); Swapchain (const Swapchain&); Swapchain& operator=(const Swapchain&); void destroy_swapchain(); - void init (); + bool init (); void on_parent_window_about_to_close(); - void on_present_request_issued (); + void on_present_request_issued (Anvil::CallbackArgument* in_callback_raw_ptr); /* Private variables */ - bool m_destroy_swapchain_before_parent_window_closes; - std::weak_ptr m_device_ptr; - VkSwapchainCreateFlagsKHR m_flags; - std::shared_ptr m_image_available_fence_ptr; - VkFormat m_image_format; - std::vector > m_image_ptrs; - VkFormat m_image_view_format; - std::vector > m_image_view_ptrs; - uint32_t m_last_acquired_image_index; - uint32_t m_n_swapchain_images; - std::shared_ptr m_parent_surface_ptr; - VkPresentModeKHR m_present_mode; - VkSwapchainKHR m_swapchain; - std::shared_ptr m_window_ptr; - - VkExtent2D m_size; - - VkImageUsageFlagsVariable(m_usage_flags); + Anvil::SwapchainCreateInfoUniquePtr m_create_info_ptr; + Anvil::FenceUniquePtr m_image_available_fence_ptr; + uint32_t m_n_images; /* number of images created in the swapchain. */ + std::vector m_image_ptrs; + std::vector m_image_view_ptrs; + uint32_t m_last_acquired_image_index; + VkExtent2D m_size; + VkSwapchainKHR m_swapchain; - const ExtensionKHRSwapchainEntrypoints m_khr_swapchain_entrypoints; + bool m_destroy_swapchain_before_parent_window_closes; volatile uint64_t m_n_acquire_counter; volatile uint32_t m_n_acquire_counter_rounded; volatile uint64_t m_n_present_counter; - std::vector > m_observed_queues; + std::vector m_observed_queues; }; }; /* namespace Anvil */ diff --git a/src/misc/base_pipeline_create_info.cpp b/src/misc/base_pipeline_create_info.cpp new file mode 100644 index 00000000..93962f35 --- /dev/null +++ b/src/misc/base_pipeline_create_info.cpp @@ -0,0 +1,257 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/base_pipeline_create_info.h" +#include "misc/descriptor_set_create_info.h" +#include "wrappers/descriptor_set_group.h" +#include + + +Anvil::BasePipelineCreateInfo::BasePipelineCreateInfo() +{ + /* Stub */ +} + +Anvil::BasePipelineCreateInfo::~BasePipelineCreateInfo() +{ + /* Stub */ +} + +bool Anvil::BasePipelineCreateInfo::add_specialization_constant(Anvil::ShaderStage in_shader_stage, + uint32_t in_constant_id, + uint32_t in_n_data_bytes, + const void* in_data_ptr) +{ + uint32_t data_buffer_size = 0; + bool result = false; + + if (in_n_data_bytes == 0) + { + anvil_assert(!(in_n_data_bytes == 0) ); + + goto end; + } + + if (in_data_ptr == nullptr) + { + anvil_assert(!(in_data_ptr == nullptr) ); + + goto end; + } + + /* Append specialization constant data and add a new descriptor. */ + data_buffer_size = static_cast(m_specialization_constants_data_buffer.size() ); + + + anvil_assert(m_specialization_constants_map.find(in_shader_stage) != m_specialization_constants_map.end() ); + + m_specialization_constants_map[in_shader_stage].push_back( + SpecializationConstant( + in_constant_id, + in_n_data_bytes, + data_buffer_size) + ); + + m_specialization_constants_data_buffer.resize(data_buffer_size + in_n_data_bytes); + + + memcpy(&m_specialization_constants_data_buffer.at(data_buffer_size), + in_data_ptr, + in_n_data_bytes); + + /* All done */ + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::BasePipelineCreateInfo::attach_push_constant_range(uint32_t in_offset, + uint32_t in_size, + Anvil::ShaderStageFlags in_stages) +{ + bool result = false; + + /* Retrieve pipeline's descriptor and add the specified push constant range */ + const auto new_descriptor = Anvil::PushConstantRange(in_offset, + in_size, + in_stages); + + if (std::find(m_push_constant_ranges.begin(), + m_push_constant_ranges.end(), + new_descriptor) != m_push_constant_ranges.end() ) + { + anvil_assert_fail(); + + goto end; + } + + m_push_constant_ranges.push_back(new_descriptor); + + /* All done */ + result = true; + +end: + return result; +} + +void Anvil::BasePipelineCreateInfo::copy_state_from(const BasePipelineCreateInfo* in_src_pipeline_create_info_ptr) +{ + m_base_pipeline_id = in_src_pipeline_create_info_ptr->m_base_pipeline_id; + + for (auto& current_ds_create_info_item_ptr : in_src_pipeline_create_info_ptr->m_ds_create_info_items) + { + m_ds_create_info_items.push_back( + Anvil::DescriptorSetCreateInfoUniquePtr( + new DescriptorSetCreateInfo(*current_ds_create_info_item_ptr.get() ) + ) + ); + } + + m_push_constant_ranges = in_src_pipeline_create_info_ptr->m_push_constant_ranges; + + m_create_flags = in_src_pipeline_create_info_ptr->m_create_flags; + m_shader_stages = in_src_pipeline_create_info_ptr->m_shader_stages; + + m_specialization_constants_data_buffer = in_src_pipeline_create_info_ptr->m_specialization_constants_data_buffer; + m_specialization_constants_map = in_src_pipeline_create_info_ptr->m_specialization_constants_map; +} + +bool Anvil::BasePipelineCreateInfo::get_shader_stage_properties(Anvil::ShaderStage in_shader_stage, + const ShaderModuleStageEntryPoint** out_opt_result_ptr_ptr) const +{ + bool result = false; + auto shader_stage_iterator = m_shader_stages.find(in_shader_stage); + + if (shader_stage_iterator != m_shader_stages.end() ) + { + if (out_opt_result_ptr_ptr != nullptr) + { + *out_opt_result_ptr_ptr = &shader_stage_iterator->second; + } + + result = true; + } + + return result; +} + +bool Anvil::BasePipelineCreateInfo::get_specialization_constants(Anvil::ShaderStage in_shader_stage, + const SpecializationConstants** out_opt_spec_constants_ptr, + const unsigned char** out_opt_spec_constants_data_buffer_ptr, + uint32_t *out_opt_spec_constants_data_buffer_size) const +{ + bool result = false; + const auto sc_map_iterator = m_specialization_constants_map.find(in_shader_stage); + + if (sc_map_iterator != m_specialization_constants_map.end() ) + { + if (out_opt_spec_constants_ptr != nullptr) + { + *out_opt_spec_constants_ptr = &sc_map_iterator->second; + } + + if (out_opt_spec_constants_data_buffer_ptr != nullptr) + { + *out_opt_spec_constants_data_buffer_ptr = (m_specialization_constants_data_buffer.size() > 0) ? &m_specialization_constants_data_buffer.at(0) + : nullptr; + } + + if (out_opt_spec_constants_data_buffer_size != nullptr) + *out_opt_spec_constants_data_buffer_size = m_specialization_constants_data_buffer.size(); + + result = true; + } + + return result; +} + +void Anvil::BasePipelineCreateInfo::init(const Anvil::PipelineCreateFlags& in_create_flags, + uint32_t in_n_shader_module_stage_entrypoints, + const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs, + const Anvil::PipelineID* in_opt_base_pipeline_id_ptr, + const std::vector* in_opt_ds_create_info_vec_ptr) +{ + m_base_pipeline_id = (in_opt_base_pipeline_id_ptr != nullptr) ? *in_opt_base_pipeline_id_ptr : UINT32_MAX; + m_create_flags = in_create_flags; + m_is_proxy = false; + + if (in_opt_ds_create_info_vec_ptr != nullptr) + { + set_descriptor_set_create_info(in_opt_ds_create_info_vec_ptr); + } + + init_shader_modules(in_n_shader_module_stage_entrypoints, + in_shader_module_stage_entrypoint_ptrs); +} + +void Anvil::BasePipelineCreateInfo::init_proxy() +{ + m_base_pipeline_id = UINT32_MAX; + m_create_flags = Anvil::PipelineCreateFlagBits::NONE; + m_is_proxy = true; +} + +void Anvil::BasePipelineCreateInfo::init_shader_modules(uint32_t in_n_shader_module_stage_entrypoints, + const Anvil::ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs) +{ + for (uint32_t n_shader_module_stage_entrypoint = 0; + n_shader_module_stage_entrypoint < in_n_shader_module_stage_entrypoints; + ++n_shader_module_stage_entrypoint) + { + const auto& shader_module_stage_entrypoint = in_shader_module_stage_entrypoint_ptrs[n_shader_module_stage_entrypoint]; + + if (shader_module_stage_entrypoint.stage == Anvil::ShaderStage::UNKNOWN) + { + continue; + } + + m_shader_stages [shader_module_stage_entrypoint.stage] = shader_module_stage_entrypoint; + m_specialization_constants_map[shader_module_stage_entrypoint.stage] = SpecializationConstants(); + } +} + +void Anvil::BasePipelineCreateInfo::set_descriptor_set_create_info(const std::vector* in_ds_create_info_vec_ptr) +{ + const uint32_t n_descriptor_sets = static_cast(in_ds_create_info_vec_ptr->size() ); + + for (uint32_t n_descriptor_set = 0; + n_descriptor_set < n_descriptor_sets; + ++n_descriptor_set) + { + const auto reference_ds_create_info_ptr = in_ds_create_info_vec_ptr->at(n_descriptor_set); + + if (reference_ds_create_info_ptr == nullptr) + { + m_ds_create_info_items.push_back( + Anvil::DescriptorSetCreateInfoUniquePtr() + ); + + continue; + } + + m_ds_create_info_items.push_back( + Anvil::DescriptorSetCreateInfoUniquePtr( + new Anvil::DescriptorSetCreateInfo(*reference_ds_create_info_ptr) + ) + ); + } +} \ No newline at end of file diff --git a/src/misc/base_pipeline_manager.cpp b/src/misc/base_pipeline_manager.cpp index 73ddda85..8d3e1b50 100644 --- a/src/misc/base_pipeline_manager.cpp +++ b/src/misc/base_pipeline_manager.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,6 +20,7 @@ // THE SOFTWARE. // +#include "misc/base_pipeline_create_info.h" #include "misc/base_pipeline_manager.h" #include "misc/debug.h" #include "wrappers/descriptor_set_group.h" @@ -30,16 +31,20 @@ #include /** Please see header for specification */ -Anvil::BasePipelineManager::BasePipelineManager(std::weak_ptr in_device_ptr, - bool in_use_pipeline_cache, - std::shared_ptr in_pipeline_cache_to_reuse_ptr) - :m_device_ptr (in_device_ptr), - m_pipeline_counter(0) +Anvil::BasePipelineManager::BasePipelineManager(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + bool in_use_pipeline_cache, + Anvil::PipelineCache* in_pipeline_cache_to_reuse_ptr) + :CallbacksSupportProvider(BASE_PIPELINE_MANAGER_CALLBACK_ID_COUNT), + MTSafetySupportProvider (in_mt_safe), + m_device_ptr (in_device_ptr), + m_pipeline_cache_ptr (nullptr), + m_pipeline_counter (0) { - anvil_assert(!in_use_pipeline_cache && in_pipeline_cache_to_reuse_ptr == nullptr || - in_use_pipeline_cache); + anvil_assert((!in_use_pipeline_cache && in_pipeline_cache_to_reuse_ptr == nullptr) || + in_use_pipeline_cache); - m_pipeline_layout_manager_ptr = in_device_ptr.lock()->get_pipeline_layout_manager(); + m_pipeline_layout_manager_ptr = in_device_ptr->get_pipeline_layout_manager(); anvil_assert(m_pipeline_layout_manager_ptr != nullptr); if (in_pipeline_cache_to_reuse_ptr != nullptr) @@ -51,7 +56,12 @@ Anvil::BasePipelineManager::BasePipelineManager(std::weak_ptr { if (in_use_pipeline_cache) { - m_pipeline_cache_ptr = Anvil::PipelineCache::create(m_device_ptr); + m_pipeline_cache_owned_ptr = Anvil::PipelineCache::create(m_device_ptr, + in_mt_safe, + 0, /* in_initial_data_size */ + nullptr); /* in_initial_data */ + + m_pipeline_cache_ptr = m_pipeline_cache_owned_ptr.get(); } m_use_pipeline_cache = in_use_pipeline_cache; @@ -61,9 +71,7 @@ Anvil::BasePipelineManager::BasePipelineManager(std::weak_ptr /** Please see header for specification */ Anvil::BasePipelineManager::~BasePipelineManager() { - anvil_assert(m_pipelines.size() == 0); - - m_pipeline_layout_manager_ptr.reset(); + anvil_assert(m_baked_pipelines.size() == 0); } @@ -71,301 +79,103 @@ Anvil::BasePipelineManager::~BasePipelineManager() * * The function does NOT release the layout object, since it's owned by the Pipeline Layout manager. *s*/ -void Anvil::BasePipelineManager::Pipeline::release_vulkan_objects() +void Anvil::BasePipelineManager::Pipeline::release_pipeline() { if (baked_pipeline != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(device_ptr); - - vkDestroyPipeline(device_locked_ptr->get_device_vk(), - baked_pipeline, - nullptr /* pAllocator */); + device_ptr->get_pipeline_cache()->lock(); + lock(); + { + Anvil::Vulkan::vkDestroyPipeline(device_ptr->get_device_vk(), + baked_pipeline, + nullptr /* pAllocator */); + } + unlock(); + device_ptr->get_pipeline_cache()->unlock(); baked_pipeline = VK_NULL_HANDLE; } } -/* Please see header for specification */ -bool Anvil::BasePipelineManager::add_derivative_pipeline_from_sibling_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - uint32_t in_n_shader_module_stage_entrypoints, - const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs, - PipelineID in_base_pipeline_id, - PipelineID* out_pipeline_id_ptr) -{ - std::shared_ptr base_pipeline_ptr; - PipelineID new_pipeline_id = 0; - std::shared_ptr new_pipeline_ptr; - bool result = false; - - /* Retrieve base pipeline's descriptor */ - if (m_pipelines.size() <= in_base_pipeline_id) - { - anvil_assert(!(m_pipelines.size() <= in_base_pipeline_id) ); - - goto end; - } - else - { - base_pipeline_ptr = m_pipelines.at(in_base_pipeline_id); - - anvil_assert(base_pipeline_ptr != nullptr); - } - - /* Create & store the new descriptor */ - anvil_assert(base_pipeline_ptr->allow_derivatives); - - new_pipeline_id = (m_pipeline_counter++); - new_pipeline_ptr = std::shared_ptr(new Pipeline(m_device_ptr, - in_disable_optimizations, - in_allow_derivatives, - base_pipeline_ptr, - in_n_shader_module_stage_entrypoints, - in_shader_module_stage_entrypoint_ptrs) ); - - anvil_assert(m_pipelines.find(new_pipeline_id) == m_pipelines.end() ); - - m_pipelines[new_pipeline_id] = new_pipeline_ptr; - *out_pipeline_id_ptr = new_pipeline_id; - - /* All done */ - result = true; -end: - return result; -} /* Please see header for specification */ -bool Anvil::BasePipelineManager::add_derivative_pipeline_from_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - uint32_t in_n_shader_module_stage_entrypoints, - const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs, - VkPipeline in_base_pipeline, - PipelineID* out_pipeline_id_ptr) +bool Anvil::BasePipelineManager::add_pipeline(Anvil::BasePipelineCreateInfoUniquePtr in_pipeline_create_info_ptr, + PipelineID* out_pipeline_id_ptr) { - PipelineID new_pipeline_id = 0; - std::shared_ptr new_pipeline_ptr; - bool result = false; + const Anvil::PipelineID base_pipeline_id = in_pipeline_create_info_ptr->get_base_pipeline_id(); + auto callback_arg = Anvil::OnNewPipelineCreatedCallbackData(UINT32_MAX); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + PipelineID new_pipeline_id = 0; + std::unique_ptr new_pipeline_ptr; + bool result = false; - if (in_base_pipeline == VK_NULL_HANDLE) + if (mutex_ptr != nullptr) { - anvil_assert(!(in_base_pipeline != VK_NULL_HANDLE)); - - goto end; + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); } - /* Create & store the new descriptor */ - new_pipeline_id = (m_pipeline_counter++); - new_pipeline_ptr = std::shared_ptr(new Pipeline(m_device_ptr, - in_disable_optimizations, - in_allow_derivatives, - in_base_pipeline, - in_n_shader_module_stage_entrypoints, - in_shader_module_stage_entrypoint_ptrs) ); - - *out_pipeline_id_ptr = new_pipeline_id; - m_pipelines[new_pipeline_id] = new_pipeline_ptr; - - /* All done */ - result = true; -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::BasePipelineManager::add_proxy_pipeline(PipelineID* out_pipeline_id_ptr) -{ - PipelineID new_pipeline_id = 0; - std::shared_ptr new_pipeline_ptr; - - /* Create & store the new descriptor */ - new_pipeline_id = (m_pipeline_counter++); - new_pipeline_ptr = std::shared_ptr(new Pipeline(m_device_ptr, - false, /* in_disable_optimizations */ - false, /* in_allow_derivatives */ - 0, /* in_n_shaders */ - nullptr, /* in_shaders */ - true) ); /* in_is_proxy */ - - *out_pipeline_id_ptr = new_pipeline_id; - m_pipelines[new_pipeline_id] = new_pipeline_ptr; - - /* All done */ - return true; -} - -/* Please see header for specification */ -bool Anvil::BasePipelineManager::add_regular_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - uint32_t in_n_shader_module_stage_entrypoints, - const ShaderModuleStageEntryPoint* in_shader_module_stage_entrypoint_ptrs, - PipelineID* out_pipeline_id_ptr) -{ - PipelineID new_pipeline_id = 0; - std::shared_ptr new_pipeline_ptr; - - /* Create & store the new descriptor */ - new_pipeline_id = (m_pipeline_counter++); - new_pipeline_ptr = std::shared_ptr(new Pipeline(m_device_ptr, - in_disable_optimizations, - in_allow_derivatives, - in_n_shader_module_stage_entrypoints, - in_shader_module_stage_entrypoint_ptrs, - false /* in_is_proxy */) ); - - *out_pipeline_id_ptr = new_pipeline_id; - m_pipelines[new_pipeline_id] = new_pipeline_ptr; - - /* All done */ - return true; -} - - -bool Anvil::BasePipelineManager::add_specialization_constant_to_pipeline(PipelineID in_pipeline_id, - ShaderIndex in_shader_index, - uint32_t in_constant_id, - uint32_t in_n_data_bytes, - const void* in_data_ptr) -{ - Pipelines::iterator pipeline_iterator; - std::shared_ptr pipeline_ptr; - uint32_t data_buffer_size = 0; - bool result = false; - - if (in_n_data_bytes == 0) - { - anvil_assert(!(in_n_data_bytes == 0) ); - - goto end; - } - - if (in_data_ptr == nullptr) - { - anvil_assert(!(in_data_ptr == nullptr) ); - - goto end; - } - - /* Retrieve the pipeline's descriptor */ - pipeline_iterator = m_pipelines.find(in_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() ) + if (base_pipeline_id != UINT32_MAX) { - anvil_assert(pipeline_iterator != m_pipelines.end() ) + Anvil::BasePipelineCreateInfo* base_pipeline_create_info_ptr = nullptr; + auto base_pipeline_iterator = m_baked_pipelines.find(base_pipeline_id); - goto end; - } - else - { - pipeline_ptr = pipeline_iterator->second; - } - - if (pipeline_ptr->is_proxy) - { - anvil_assert(!pipeline_ptr->is_proxy); - - goto end; - } - - /* Append specialization constant data and add a new descriptor. */ - data_buffer_size = static_cast(pipeline_ptr->specialization_constant_data_buffer.size() ); - - - anvil_assert(pipeline_ptr->specialization_constants_map.find(in_shader_index) != pipeline_ptr->specialization_constants_map.end() ); - - pipeline_ptr->specialization_constants_map[in_shader_index].push_back(SpecializationConstant(in_constant_id, - in_n_data_bytes, - data_buffer_size)); - - pipeline_ptr->specialization_constant_data_buffer.resize(data_buffer_size + in_n_data_bytes); - - - memcpy(&pipeline_ptr->specialization_constant_data_buffer.at(data_buffer_size), - in_data_ptr, - in_n_data_bytes); - - /* All done */ - result = true; -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::BasePipelineManager::set_pipeline_dsg(PipelineID in_pipeline_id, - std::shared_ptr in_dsg_ptr) -{ - std::shared_ptr pipeline_ptr; - bool result = false; - - if (in_dsg_ptr == nullptr) - { - anvil_assert(!(in_dsg_ptr == nullptr) ); - - goto end; - } - - /* Retrieve pipeline's descriptor and attach the specified DSG */ - if (m_pipelines.find(in_pipeline_id) == m_pipelines.end() ) - { - anvil_assert(!(m_pipelines.find(in_pipeline_id) == m_pipelines.end() )); + if (base_pipeline_iterator == m_baked_pipelines.end() ) + { + base_pipeline_iterator = m_outstanding_pipelines.find(base_pipeline_id); - goto end; - } - else - { - pipeline_ptr = m_pipelines.at(in_pipeline_id); - } + if (base_pipeline_iterator != m_outstanding_pipelines.end() ) + { + base_pipeline_create_info_ptr = base_pipeline_iterator->second->pipeline_create_info_ptr.get(); + } + } + else + { + base_pipeline_create_info_ptr = base_pipeline_iterator->second->pipeline_create_info_ptr.get(); + } - /* Make sure the DSG has not already been attached */ - if (pipeline_ptr->dsg_ptr == in_dsg_ptr) - { - anvil_assert_fail(); + if (base_pipeline_create_info_ptr != nullptr) + { + anvil_assert(base_pipeline_create_info_ptr->allows_derivatives() ); + } + else + { + /* Base pipeline ID is invalid */ + anvil_assert(base_pipeline_create_info_ptr != nullptr); - goto end; + goto end; + } } - /* If we reached this place, we can update the DSG */ - pipeline_ptr->dirty = true; - pipeline_ptr->dsg_ptr = in_dsg_ptr; - pipeline_ptr->layout_dirty = true; - - /* All done */ - result = true; -end: - return result; -} + /* Create & store the new descriptor */ + new_pipeline_id = (m_pipeline_counter.fetch_add(1) ); -/* Please see header for specification */ -bool Anvil::BasePipelineManager::attach_push_constant_range_to_pipeline(PipelineID in_pipeline_id, - uint32_t in_offset, - uint32_t in_size, - VkShaderStageFlags in_stages) -{ - const auto new_descriptor = Anvil::PushConstantRange(in_offset, - in_size, - in_stages); - std::shared_ptr pipeline_ptr; - bool result = false; + /* NOTE: in_pipeline_create_info_ptr becomes NULL after the call below */ + new_pipeline_ptr.reset( + new Pipeline( + m_device_ptr, + std::move(in_pipeline_create_info_ptr), + is_mt_safe() ) + ); - /* Retrieve pipeline's descriptor and add the specified push constant range */ - if (m_pipelines.find(in_pipeline_id) == m_pipelines.end() ) + if (new_pipeline_ptr->pipeline_create_info_ptr->is_proxy() ) { - anvil_assert(!(m_pipelines.find(in_pipeline_id) == m_pipelines.end())); - - goto end; + m_baked_pipelines[new_pipeline_id] = std::move(new_pipeline_ptr); } else { - pipeline_ptr = m_pipelines.at(in_pipeline_id); + m_outstanding_pipelines[new_pipeline_id] = std::move(new_pipeline_ptr); } - pipeline_ptr->dirty = true; - pipeline_ptr->layout_dirty = true; + *out_pipeline_id_ptr = new_pipeline_id; - anvil_assert(std::find(pipeline_ptr->push_constant_ranges.begin(), - pipeline_ptr->push_constant_ranges.end(), - new_descriptor) == pipeline_ptr->push_constant_ranges.end()); + /* Inform subscribers about the new pipeline. */ + callback_arg.new_pipeline_id = new_pipeline_id; - pipeline_ptr->push_constant_ranges.push_back(new_descriptor); + callback(BASE_PIPELINE_MANAGER_CALLBACK_ID_ON_NEW_PIPELINE_CREATED, + &callback_arg); /* All done */ result = true; @@ -376,10 +186,10 @@ bool Anvil::BasePipelineManager::attach_push_constant_range_to_pipeline(Pipeline /* Please see header for specification */ void Anvil::BasePipelineManager::bake_specialization_info_vk(const SpecializationConstants& in_specialization_constants, const unsigned char* in_specialization_constant_data_ptr, + const uint32_t in_specialization_constant_data_size, std::vector* out_specialization_map_entry_vk_vector_ptr, - VkSpecializationInfo* out_specialization_info_ptr) + VkSpecializationInfo* out_specialization_info_ptr) const { - uint32_t n_specialization_constant_bytes = 0; uint32_t n_specialization_constants = 0; /* Prepare specialization info descriptor */ @@ -399,12 +209,10 @@ void Anvil::BasePipelineManager::bake_specialization_info_vk(const Specializatio new_entry.offset = current_specialization_constant.start_offset; new_entry.size = current_specialization_constant.n_bytes; - n_specialization_constant_bytes += current_specialization_constant.n_bytes; - out_specialization_map_entry_vk_vector_ptr->push_back(new_entry); } - out_specialization_info_ptr->dataSize = n_specialization_constant_bytes; + out_specialization_info_ptr->dataSize = in_specialization_constant_data_size; out_specialization_info_ptr->mapEntryCount = n_specialization_constants; out_specialization_info_ptr->pData = (n_specialization_constants > 0) ? in_specialization_constant_data_ptr : nullptr; @@ -415,49 +223,37 @@ void Anvil::BasePipelineManager::bake_specialization_info_vk(const Specializatio /* Please see header for specification */ bool Anvil::BasePipelineManager::delete_pipeline(PipelineID in_pipeline_id) { - auto pipeline_iterator = m_pipelines.find(in_pipeline_id); bool result = false; - if (pipeline_iterator == m_pipelines.end() ) { - goto end; - } - - m_pipelines.erase(pipeline_iterator); - - /* All done */ - result = true; -end: - return result; -} + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + Pipelines::iterator pipeline_iterator; -/* Please see header for specification */ -bool Anvil::BasePipelineManager::get_general_pipeline_properties(PipelineID in_pipeline_id, - bool* out_opt_has_optimizations_disabled_ptr, - bool* out_opt_allows_derivatives_ptr, - bool* out_opt_is_a_derivative_ptr) const -{ - auto pipeline_iterator = m_pipelines.find(in_pipeline_id); - bool result = false; + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } - if (pipeline_iterator == m_pipelines.end() ) - { - goto end; - } + pipeline_iterator = m_baked_pipelines.find(in_pipeline_id); - if (out_opt_has_optimizations_disabled_ptr != nullptr) - { - *out_opt_has_optimizations_disabled_ptr = pipeline_iterator->second->disable_optimizations; - } + if (pipeline_iterator != m_baked_pipelines.end() ) + { + m_baked_pipelines.erase(pipeline_iterator); + } + else + { + pipeline_iterator = m_outstanding_pipelines.find(in_pipeline_id); - if (out_opt_allows_derivatives_ptr != nullptr) - { - *out_opt_allows_derivatives_ptr = pipeline_iterator->second->allow_derivatives; - } + if (pipeline_iterator == m_outstanding_pipelines.end() ) + { + goto end; + } - if (out_opt_is_a_derivative_ptr != nullptr) - { - *out_opt_is_a_derivative_ptr = pipeline_iterator->second->is_derivative; + m_outstanding_pipelines.erase(pipeline_iterator); + } } /* All done */ @@ -469,95 +265,127 @@ bool Anvil::BasePipelineManager::get_general_pipeline_properties(PipelineID in_p /* Please see header for specification */ VkPipeline Anvil::BasePipelineManager::get_pipeline(PipelineID in_pipeline_id) { - std::shared_ptr pipeline_ptr; - VkPipeline result = VK_NULL_HANDLE; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + Pipelines::const_iterator pipeline_iterator; + Pipeline* pipeline_ptr = nullptr; + VkPipeline result = VK_NULL_HANDLE; - if (m_pipelines.find(in_pipeline_id) == m_pipelines.end() ) + if (mutex_ptr != nullptr) { - anvil_assert(!(m_pipelines.find(in_pipeline_id) == m_pipelines.end()) ); - - goto end; + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); } - else + + if (m_outstanding_pipelines.size() > 0) { - pipeline_ptr = m_pipelines.at(in_pipeline_id); + bake(); } - if (pipeline_ptr->is_proxy) + pipeline_iterator = m_baked_pipelines.find(in_pipeline_id); + + if (pipeline_iterator == m_baked_pipelines.end() ) { - anvil_assert(!pipeline_ptr->is_proxy); + anvil_assert(!(pipeline_iterator == m_baked_pipelines.end()) ); goto end; } - if (pipeline_ptr->dirty) + pipeline_ptr = pipeline_iterator->second.get(); + + if (pipeline_ptr->pipeline_create_info_ptr->is_proxy() ) { - bake(); + anvil_assert(!pipeline_ptr->pipeline_create_info_ptr->is_proxy() ); - anvil_assert( pipeline_ptr->baked_pipeline != VK_NULL_HANDLE); - anvil_assert(!pipeline_ptr->dirty); + goto end; } - result = m_pipelines.at(in_pipeline_id)->baked_pipeline; + result = pipeline_ptr->baked_pipeline; + anvil_assert(result != VK_NULL_HANDLE); end: return result; } -/* Please see header for specification */ -bool Anvil::BasePipelineManager::get_pipeline_id_at_index(uint32_t in_n_pipeline, - PipelineID* out_pipeline_id_ptr) const +const Anvil::BasePipelineCreateInfo* Anvil::BasePipelineManager::get_pipeline_create_info(PipelineID in_pipeline_id) const { - Pipelines::const_iterator current_pipeline_it = m_pipelines.cbegin(); - uint32_t n_current_pipeline = 0; - bool result = false; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + Pipelines::const_iterator pipeline_iterator; + Pipeline* pipeline_ptr = nullptr; + const Anvil::BasePipelineCreateInfo* result_ptr = nullptr; - if (m_pipelines.size() <= in_n_pipeline) + if (mutex_ptr != nullptr) { - goto end; + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); } - while (n_current_pipeline != in_n_pipeline) + pipeline_iterator = m_baked_pipelines.find(in_pipeline_id); + + if (pipeline_iterator == m_baked_pipelines.end() ) { - current_pipeline_it++; - n_current_pipeline ++; + pipeline_iterator = m_outstanding_pipelines.find(in_pipeline_id); + + if (pipeline_iterator == m_outstanding_pipelines.end() ) + { + anvil_assert(!(pipeline_iterator == m_outstanding_pipelines.end() )); + + goto end; + } } - *out_pipeline_id_ptr = current_pipeline_it->first; - result = true; + pipeline_ptr = pipeline_iterator->second.get(); + result_ptr = pipeline_ptr->pipeline_create_info_ptr.get(); + end: - return result; + return result_ptr; } /* Please see header for specification */ -std::shared_ptr Anvil::BasePipelineManager::get_pipeline_layout(PipelineID in_pipeline_id) +Anvil::PipelineLayout* Anvil::BasePipelineManager::get_pipeline_layout(PipelineID in_pipeline_id) { - std::shared_ptr pipeline_ptr; - std::shared_ptr result_ptr; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + Pipelines::iterator pipeline_iterator; + Pipeline* pipeline_ptr = nullptr; + Anvil::PipelineLayout* result_ptr = nullptr; - if (m_pipelines.find(in_pipeline_id) == m_pipelines.end() ) + if (mutex_ptr != nullptr) { - anvil_assert(!(m_pipelines.find(in_pipeline_id) == m_pipelines.end()) ); - - goto end; + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); } - else + + pipeline_iterator = m_baked_pipelines.find(in_pipeline_id); + + if (pipeline_iterator == m_baked_pipelines.end() ) { - pipeline_ptr = m_pipelines.at(in_pipeline_id); + pipeline_iterator = m_outstanding_pipelines.find(in_pipeline_id); + + if (pipeline_iterator == m_outstanding_pipelines.end() ) + { + anvil_assert(!(pipeline_iterator == m_outstanding_pipelines.end() )); + + goto end; + } } - if (pipeline_ptr->is_proxy) + pipeline_ptr = pipeline_iterator->second.get(); + + if (pipeline_ptr->pipeline_create_info_ptr->is_proxy() ) { - anvil_assert(!pipeline_ptr->is_proxy); + anvil_assert(!pipeline_ptr->pipeline_create_info_ptr->is_proxy() ); goto end; } - if (pipeline_ptr->layout_dirty) + if (pipeline_ptr->layout_ptr == nullptr) { - pipeline_ptr->layout_ptr.reset(); - - if (!m_pipeline_layout_manager_ptr->get_layout(pipeline_ptr->dsg_ptr, - pipeline_ptr->push_constant_ranges, + if (!m_pipeline_layout_manager_ptr->get_layout(pipeline_ptr->pipeline_create_info_ptr->get_ds_create_info_items(), + pipeline_ptr->pipeline_create_info_ptr->get_push_constant_ranges(), &pipeline_ptr->layout_ptr) ) { anvil_assert_fail(); @@ -565,71 +393,214 @@ std::shared_ptr Anvil::BasePipelineManager::get_pipeline_ goto end; } - pipeline_ptr->layout_dirty = false; + if (pipeline_ptr->layout_ptr == nullptr) + { + anvil_assert(!(pipeline_ptr->layout_ptr == nullptr) ); + + goto end; + } } - result_ptr = pipeline_ptr->layout_ptr; + result_ptr = pipeline_ptr->layout_ptr.get(); end: return result_ptr; } /* Please see header for specification */ -bool Anvil::BasePipelineManager::get_shader_stage_properties(PipelineID in_pipeline_id, - Anvil::ShaderStage in_shader_stage, - ShaderModuleStageEntryPoint* out_opt_result_ptr) const +bool Anvil::BasePipelineManager::get_shader_info(PipelineID in_pipeline_id, + Anvil::ShaderStage in_shader_stage, + Anvil::ShaderInfoType in_info_type, + std::vector* out_data_ptr) { - auto pipeline_iterator = m_pipelines.find(in_pipeline_id); - bool result = false; + Anvil::ExtensionAMDShaderInfoEntrypoints entrypoints = m_device_ptr->get_extension_amd_shader_info_entrypoints(); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + Pipelines::const_iterator pipeline_iterator; + Pipeline* pipeline_ptr = nullptr; + size_t out_data_size = out_data_ptr->size(); + bool result = false; + const auto shader_stage_vk = Anvil::Utils::get_shader_stage_flag_bits_from_shader_stage(in_shader_stage); + VkShaderInfoTypeAMD vk_info_type; + VkResult vk_result; - if (pipeline_iterator == m_pipelines.end() ) + if (entrypoints.vkGetShaderInfoAMD == nullptr) { - anvil_assert(!(pipeline_iterator == m_pipelines.end()) ); + anvil_assert(!(entrypoints.vkGetShaderInfoAMD == nullptr)); goto end; } - for (const auto& current_stage : pipeline_iterator->second->shader_stages) + if (mutex_ptr != nullptr) { - if (current_stage.stage == in_shader_stage) + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + if (m_outstanding_pipelines.size() > 0) + { + bake(); + } + + pipeline_iterator = m_baked_pipelines.find(in_pipeline_id); + if (pipeline_iterator == m_baked_pipelines.end()) + { + anvil_assert(!(pipeline_iterator == m_baked_pipelines.end())); + + goto end; + } + + pipeline_ptr = pipeline_iterator->second.get(); + + if (pipeline_ptr->baked_pipeline == VK_NULL_HANDLE) + { + bake(); + + anvil_assert(!pipeline_ptr->baked_pipeline != 0); + } + + switch (in_info_type) + { + case ShaderInfoType::BINARY: { - if (out_opt_result_ptr != nullptr) - { - *out_opt_result_ptr = current_stage; - } + vk_info_type = VK_SHADER_INFO_TYPE_BINARY_AMD; + + break; + } - result = true; + case ShaderInfoType::DISASSEMBLY: + { + vk_info_type = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD; break; } + + default: + { + anvil_assert(!"Unknown shader info type"); + + goto end; + } } - /* All done */ + if (out_data_size == 0) + { + vk_result = entrypoints.vkGetShaderInfoAMD(m_device_ptr->get_device_vk(), + pipeline_ptr->baked_pipeline, + static_cast(shader_stage_vk), + vk_info_type, + &out_data_size, + nullptr); + + if (vk_result == VK_ERROR_FEATURE_NOT_PRESENT) + { + goto end; + } + + if (!is_vk_call_successful(vk_result) || + out_data_size == 0) + { + goto end; + } + + out_data_ptr->resize(out_data_size); + } + + vk_result = entrypoints.vkGetShaderInfoAMD(m_device_ptr->get_device_vk(), + pipeline_ptr->baked_pipeline, + static_cast(shader_stage_vk), + vk_info_type, + &out_data_size, + &(*out_data_ptr).at(0)); + + if (vk_result == VK_ERROR_FEATURE_NOT_PRESENT) + { + goto end; + } + + if (!is_vk_call_successful(vk_result) || + out_data_size == 0) + { + goto end; + } + + result = true; end: return result; } /* Please see header for specification */ -bool Anvil::BasePipelineManager::set_pipeline_bakeability(PipelineID in_pipeline_id, - bool in_bakeable) +bool Anvil::BasePipelineManager::get_shader_statistics(PipelineID in_pipeline_id, + Anvil::ShaderStage in_shader_stage, + VkShaderStatisticsInfoAMD* out_shader_statistics_ptr) { - std::shared_ptr pipeline_ptr; - bool result = false; + Anvil::ExtensionAMDShaderInfoEntrypoints entrypoints = m_device_ptr->get_extension_amd_shader_info_entrypoints(); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + Pipelines::const_iterator pipeline_iterator; + Pipeline* pipeline_ptr = nullptr; + bool result = false; + const auto shader_stage_vk = Anvil::Utils::get_shader_stage_flag_bits_from_shader_stage(in_shader_stage); + size_t shader_statistics_size = sizeof(VkShaderStatisticsInfoAMD); + VkResult vk_result; - if (m_pipelines.find(in_pipeline_id) == m_pipelines.end() ) + if (entrypoints.vkGetShaderInfoAMD == nullptr) { - anvil_assert(!(m_pipelines.find(in_pipeline_id) == m_pipelines.end()) ); + anvil_assert(!(entrypoints.vkGetShaderInfoAMD == nullptr)); goto end; } - else + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + if (m_outstanding_pipelines.size() > 0) + { + bake(); + } + + pipeline_iterator = m_baked_pipelines.find(in_pipeline_id); + if (pipeline_iterator == m_baked_pipelines.end()) { - pipeline_ptr = m_pipelines.at(in_pipeline_id); + anvil_assert(!(pipeline_iterator == m_baked_pipelines.end())); + + goto end; } - pipeline_ptr->is_bakeable = in_bakeable; + pipeline_ptr = pipeline_iterator->second.get(); + + if (pipeline_ptr->baked_pipeline == VK_NULL_HANDLE) + { + bake(); + + anvil_assert(!pipeline_ptr->baked_pipeline != 0); + } + + vk_result = entrypoints.vkGetShaderInfoAMD(m_device_ptr->get_device_vk(), + pipeline_ptr->baked_pipeline, + static_cast(shader_stage_vk), + VK_SHADER_INFO_TYPE_STATISTICS_AMD, + &shader_statistics_size, + out_shader_statistics_ptr); + + if (vk_result == VK_ERROR_FEATURE_NOT_PRESENT) + { + goto end; + } + + if (!is_vk_call_successful(vk_result) || + shader_statistics_size != sizeof(VkShaderStatisticsInfoAMD)) + { + goto end; + } result = true; + end: return result; } diff --git a/src/misc/buffer_create_info.cpp b/src/misc/buffer_create_info.cpp new file mode 100644 index 00000000..d2012323 --- /dev/null +++ b/src/misc/buffer_create_info.cpp @@ -0,0 +1,223 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/buffer_create_info.h" +#include "wrappers/buffer.h" +#include "wrappers/device.h" + + +Anvil::BufferCreateInfoUniquePtr Anvil::BufferCreateInfo::create_alloc(const Anvil::BaseDevice* in_device_ptr, + VkDeviceSize in_size, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + Anvil::BufferCreateFlags in_create_flags, + Anvil::BufferUsageFlags in_usage_flags, + Anvil::MemoryFeatureFlags in_memory_features) +{ + + Anvil::BufferCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + if ((in_create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0) + { + anvil_assert((in_create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) == 0); + + goto end; + } + + if ((in_create_flags & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) != 0) + { + anvil_assert((in_create_flags & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) == 0); + + goto end; + } + + if ((in_create_flags & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0) + { + anvil_assert((in_create_flags & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) == 0); + + goto end; + } + + result_ptr.reset( + new Anvil::BufferCreateInfo(BufferType::ALLOC, + in_device_ptr, + in_size, + in_queue_families, + in_sharing_mode, + in_create_flags, + in_usage_flags, + in_memory_features, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE, + Anvil::ExternalMemoryHandleTypeFlagBits::NONE, /* in_external_memory_handle_types */ + nullptr) /* in_opt_client_data_ptr */ + ); + +end: + return result_ptr; +} + +Anvil::BufferCreateInfoUniquePtr Anvil::BufferCreateInfo::create_no_alloc(const Anvil::BaseDevice* in_device_ptr, + VkDeviceSize in_size, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + Anvil::BufferCreateFlags in_create_flags, + Anvil::BufferUsageFlags in_usage_flags) +{ + Anvil::BufferCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new Anvil::BufferCreateInfo(BufferType::NO_ALLOC, + in_device_ptr, + in_size, + in_queue_families, + in_sharing_mode, + in_create_flags, + in_usage_flags, + Anvil::MemoryFeatureFlagBits::NONE, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE, + Anvil::ExternalMemoryHandleTypeFlagBits::NONE, /* in_external_memory_handle_types */ + nullptr) /* in_opt_client_data_ptr */ + ); + + return result_ptr; +} + +Anvil::BufferCreateInfoUniquePtr Anvil::BufferCreateInfo::create_no_alloc_child(Anvil::Buffer* in_parent_nonsparse_buffer_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) +{ + Anvil::BufferCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new Anvil::BufferCreateInfo(in_parent_nonsparse_buffer_ptr, + in_start_offset, + in_size) + ); + + return result_ptr; +} + +Anvil::BufferCreateInfo::BufferCreateInfo(const Anvil::BaseDevice* in_device_ptr, + VkDeviceSize in_size, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + Anvil::BufferCreateFlags in_create_flags, + Anvil::BufferUsageFlags in_usage_flags, + MTSafety in_mt_safety, + Anvil::ExternalMemoryHandleTypeFlags in_exportable_external_memory_handle_types) + :m_client_data_ptr (nullptr), + m_create_flags (in_create_flags), + m_device_ptr (in_device_ptr), + m_exportable_external_memory_handle_types(in_exportable_external_memory_handle_types), + m_mt_safety (in_mt_safety), + m_parent_buffer_ptr (nullptr), + m_queue_families (in_queue_families), + m_sharing_mode (in_sharing_mode), + m_size (in_size), + m_start_offset (0), + m_type (BufferType::NO_ALLOC), + m_usage_flags (in_usage_flags) +{ + /* Sanity checks */ + const auto& physical_device_features(in_device_ptr->get_physical_device_features() ); + + if (!physical_device_features.core_vk1_0_features_ptr->sparse_binding) + { + anvil_assert(physical_device_features.core_vk1_0_features_ptr->sparse_binding); + } + + if ((((m_create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0) || + ((m_create_flags & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0)) && + !physical_device_features.core_vk1_0_features_ptr->sparse_residency_buffer) + { + anvil_assert((((m_create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0) || + ((m_create_flags & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0)) && + physical_device_features.core_vk1_0_features_ptr->sparse_residency_buffer) + } + + if ( ((m_create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0) && + !physical_device_features.core_vk1_0_features_ptr->sparse_residency_aliased) + { + anvil_assert(((m_create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0) && + physical_device_features.core_vk1_0_features_ptr->sparse_residency_aliased); + } +} + +Anvil::BufferCreateInfo::BufferCreateInfo(const Anvil::BufferType& in_buffer_type, + const Anvil::BaseDevice* in_device_ptr, + VkDeviceSize in_size, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + Anvil::BufferCreateFlags in_create_flags, + Anvil::BufferUsageFlags in_usage_flags, + MemoryFeatureFlags in_memory_features, + MTSafety in_mt_safety, + Anvil::ExternalMemoryHandleTypeFlags in_exportable_external_memory_handle_types, + const void* in_opt_client_data_ptr) + :m_client_data_ptr (in_opt_client_data_ptr), + m_create_flags (in_create_flags), + m_device_ptr (in_device_ptr), + m_exportable_external_memory_handle_types(in_exportable_external_memory_handle_types), + m_memory_features (in_memory_features), + m_mt_safety (in_mt_safety), + m_parent_buffer_ptr (nullptr), + m_queue_families (in_queue_families), + m_sharing_mode (in_sharing_mode), + m_size (in_size), + m_start_offset (0), + m_type (in_buffer_type), + m_usage_flags (in_usage_flags) +{ + if ((in_memory_features & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) == 0) + { + anvil_assert((in_memory_features & Anvil::MemoryFeatureFlagBits::HOST_COHERENT_BIT) == 0) + } +} + +Anvil::BufferCreateInfo::BufferCreateInfo(Anvil::Buffer* in_parent_buffer_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) + :m_client_data_ptr (nullptr), + m_create_flags (in_parent_buffer_ptr->get_create_info_ptr()->m_create_flags), + m_device_ptr (in_parent_buffer_ptr->get_create_info_ptr()->m_device_ptr), + m_exportable_external_memory_handle_types(in_parent_buffer_ptr->get_create_info_ptr()->m_exportable_external_memory_handle_types), + m_memory_features (in_parent_buffer_ptr->get_create_info_ptr()->m_memory_features), + m_mt_safety (in_parent_buffer_ptr->get_create_info_ptr()->m_mt_safety), + m_parent_buffer_ptr (in_parent_buffer_ptr), + m_queue_families (in_parent_buffer_ptr->get_create_info_ptr()->m_queue_families), + m_sharing_mode (in_parent_buffer_ptr->get_create_info_ptr()->m_sharing_mode), + m_size (in_size), + m_start_offset (in_start_offset), + m_type (BufferType::NO_ALLOC_CHILD), + m_usage_flags (in_parent_buffer_ptr->get_create_info_ptr()->m_usage_flags) +{ + /* Sanity checks */ + anvil_assert(in_parent_buffer_ptr != nullptr); + anvil_assert(in_size > 0); + + anvil_assert((in_parent_buffer_ptr->get_create_info_ptr()->m_create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) == 0); + anvil_assert((in_parent_buffer_ptr->get_create_info_ptr()->m_create_flags & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) == 0); + anvil_assert((in_parent_buffer_ptr->get_create_info_ptr()->m_create_flags & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) == 0); + anvil_assert( in_parent_buffer_ptr->get_memory_block (0 /* in_n_memory_block */) != nullptr); +} diff --git a/src/misc/buffer_view_create_info.cpp b/src/misc/buffer_view_create_info.cpp new file mode 100644 index 00000000..8ea9f5c5 --- /dev/null +++ b/src/misc/buffer_view_create_info.cpp @@ -0,0 +1,59 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/buffer_view_create_info.h" + +Anvil::BufferViewCreateInfoUniquePtr Anvil::BufferViewCreateInfo::create(const Anvil::BaseDevice* in_device_ptr, + Anvil::Buffer* in_buffer_ptr, + Anvil::Format in_format, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) +{ + Anvil::BufferViewCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new Anvil::BufferViewCreateInfo(in_device_ptr, + in_buffer_ptr, + in_format, + in_start_offset, + in_size, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::BufferViewCreateInfo::BufferViewCreateInfo(const Anvil::BaseDevice* in_device_ptr, + Anvil::Buffer* in_buffer_ptr, + Anvil::Format in_format, + VkDeviceSize in_start_offset, + VkDeviceSize in_size, + MTSafety in_mt_safety) + :m_buffer_ptr (in_buffer_ptr), + m_device_ptr (in_device_ptr), + m_format (in_format), + m_mt_safety (in_mt_safety), + m_size (in_size), + m_start_offset(in_start_offset) +{ + /* Stub */ +} \ No newline at end of file diff --git a/src/misc/compute_pipeline_create_info.cpp b/src/misc/compute_pipeline_create_info.cpp new file mode 100644 index 00000000..66d574f9 --- /dev/null +++ b/src/misc/compute_pipeline_create_info.cpp @@ -0,0 +1,71 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/compute_pipeline_create_info.h" + +Anvil::ComputePipelineCreateInfo::ComputePipelineCreateInfo() +{ + /* Stub */ +} + +Anvil::ComputePipelineCreateInfo::~ComputePipelineCreateInfo() +{ + /* Stub */ +} + +Anvil::ComputePipelineCreateInfoUniquePtr Anvil::ComputePipelineCreateInfo::create(const Anvil::PipelineCreateFlags& in_create_flags, + const ShaderModuleStageEntryPoint& in_compute_shader_stage_entrypoint_info, + const Anvil::PipelineID* in_opt_base_pipeline_id_ptr) +{ + Anvil::ComputePipelineCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new ComputePipelineCreateInfo() + ); + + if (result_ptr != nullptr) + { + result_ptr->init(in_create_flags, + 1, /* in_n_shader_module_stage_entrypoints */ + &in_compute_shader_stage_entrypoint_info, + in_opt_base_pipeline_id_ptr); + } + + return result_ptr; +} + +Anvil::ComputePipelineCreateInfoUniquePtr Anvil::ComputePipelineCreateInfo::create_proxy() +{ + Anvil::ComputePipelineCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new ComputePipelineCreateInfo() + ); + + if (result_ptr != nullptr) + { + result_ptr->init_proxy(); + } + + return result_ptr; +} diff --git a/src/misc/debug.cpp b/src/misc/debug.cpp index 827bf48e..41aa6211 100644 --- a/src/misc/debug.cpp +++ b/src/misc/debug.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -25,6 +25,7 @@ #include #include #include +#include static void default_assertion_failure_handler(const char* in_filename, unsigned int in_line, @@ -35,21 +36,36 @@ static Anvil::AssertionFailedCallbackFunction g_anvil_assertion_check_failed_fun std::placeholders::_1, std::placeholders::_2, std::placeholders::_3); +struct AnvilAssertionFailure : public std::runtime_error +{ + AnvilAssertionFailure(const std::string &msg) + : std::runtime_error{msg} + {} +}; +std::string Anvil::result_to_string(int64_t result) +{ + return string_VkResult(static_cast(result)); +} +#include /** Please see header for specification */ void default_assertion_failure_handler(const char* in_filename, unsigned int in_line, const char* in_message) { - fprintf(stderr, + std::stringstream ss; + ss<<"Assertion failed in "< in_device_ptr, - VkDebugReportObjectTypeEXT in_vk_object_type) +Anvil::DebugMarkerSupportProviderWorker::DebugMarkerSupportProviderWorker(const Anvil::BaseDevice* in_device_ptr, + const Anvil::ObjectType& in_vk_object_type) :m_device_ptr (in_device_ptr), m_object_tag_name (0), - m_vk_object_handle(VK_NULL_HANDLE), + m_vk_object_handle(0), m_vk_object_type (in_vk_object_type) { - std::shared_ptr device_locked_ptr(m_device_ptr); + const auto& device_extension_info_ptr = m_device_ptr->get_extension_info(); + const auto& instance_extension_info_ptr = m_device_ptr->get_parent_instance()->get_enabled_extensions_info(); - m_is_ext_debug_marker_available = device_locked_ptr->is_ext_debug_marker_extension_enabled(); - m_object_tag_name = 0; + if (instance_extension_info_ptr->ext_debug_utils() ) + { + m_used_api = DebugAPI::EXT_DEBUG_UTILS; + } + else + if (device_extension_info_ptr->ext_debug_marker() ) + { + m_used_api = DebugAPI::EXT_DEBUG_MARKER; + } + else + { + m_used_api = DebugAPI::NONE; + } + + m_object_tag_name = 0; } /** Please see header for specification */ @@ -47,25 +63,71 @@ void Anvil::DebugMarkerSupportProviderWorker::set_name_internal(const std::strin { m_object_name = in_object_name; - if (m_is_ext_debug_marker_available && - (m_vk_object_handle != VK_NULL_HANDLE) ) + if (m_vk_object_handle != 0) { - std::shared_ptr device_locked_ptr(m_device_ptr); - const auto& entrypoints (device_locked_ptr->get_extension_ext_debug_marker_entrypoints() ); - VkDebugMarkerObjectNameInfoEXT name_info; - VkResult result_vk; - - name_info.object = m_vk_object_handle; - name_info.objectType = m_vk_object_type; - name_info.pNext = nullptr; - name_info.pObjectName = in_object_name.c_str(); - name_info.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT; - - result_vk = entrypoints.vkDebugMarkerSetObjectNameEXT(device_locked_ptr->get_device_vk(), - &name_info); - anvil_assert_vk_call_succeeded(result_vk); - - ANVIL_REDUNDANT_VARIABLE(result_vk); + switch (m_used_api) + { + case DebugAPI::EXT_DEBUG_MARKER: + { + const auto& entrypoints(m_device_ptr->get_extension_ext_debug_marker_entrypoints() ); + VkDebugMarkerObjectNameInfoEXT name_info; + VkResult result_vk; + + name_info.object = m_vk_object_handle; + name_info.objectType = Anvil::Utils::get_vk_debug_report_object_type_ext_from_object_type(m_vk_object_type); + name_info.pNext = nullptr; + name_info.pObjectName = in_object_name.c_str(); + name_info.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT; + + if (name_info.objectType != VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT) + { + std::lock_guard lock(m_mutex); + { + result_vk = entrypoints.vkDebugMarkerSetObjectNameEXT(m_device_ptr->get_device_vk(), + &name_info); + + anvil_assert_vk_call_succeeded(result_vk); + ANVIL_REDUNDANT_VARIABLE (result_vk); + } + } + + break; + } + + case DebugAPI::EXT_DEBUG_UTILS: + { + const auto& entrypoints(m_device_ptr->get_parent_instance()->get_extension_ext_debug_utils_entrypoints() ); + VkDebugUtilsObjectNameInfoEXT name_info; + VkResult result_vk; + + name_info.objectHandle = m_vk_object_handle; + name_info.objectType = static_cast(m_vk_object_type); + name_info.pNext = nullptr; + name_info.pObjectName = in_object_name.c_str(); + name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; + + std::lock_guard lock(m_mutex); + { + result_vk = entrypoints.vkSetDebugUtilsObjectNameEXT(m_device_ptr->get_device_vk(), + &name_info); + + anvil_assert_vk_call_succeeded(result_vk); + ANVIL_REDUNDANT_VARIABLE (result_vk); + } + + break; + } + + case DebugAPI::NONE: + { + break; + } + + default: + { + anvil_assert_fail(); + } + } } } } @@ -100,30 +162,75 @@ void Anvil::DebugMarkerSupportProviderWorker::set_tag_internal(const uint64_t in m_object_tag_name = in_tag_name; m_object_tag_data.resize(in_tag_size); - memcpy(&m_object_tag_data, + memcpy(&m_object_tag_data.at(0), in_tag_ptr, in_tag_size); - if (m_is_ext_debug_marker_available && - (m_vk_object_handle != VK_NULL_HANDLE) ) + if (m_vk_object_handle != 0) { - std::shared_ptr device_locked_ptr(m_device_ptr); - const auto& entrypoints (device_locked_ptr->get_extension_ext_debug_marker_entrypoints() ); - VkResult result_vk; - VkDebugMarkerObjectTagInfoEXT tag_info; - - tag_info.object = m_vk_object_handle; - tag_info.objectType = m_vk_object_type; - tag_info.pNext = nullptr; - tag_info.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT; - tag_info.tagName = m_object_tag_name; - tag_info.tagSize = in_tag_size; - - result_vk = entrypoints.vkDebugMarkerSetObjectTagEXT(device_locked_ptr->get_device_vk(), - &tag_info); - anvil_assert_vk_call_succeeded(result_vk); - - ANVIL_REDUNDANT_VARIABLE(result_vk); + switch (m_used_api) + { + case DebugAPI::EXT_DEBUG_MARKER: + { + const auto& entrypoints(m_device_ptr->get_extension_ext_debug_marker_entrypoints() ); + VkResult result_vk; + VkDebugMarkerObjectTagInfoEXT tag_info; + + tag_info.object = m_vk_object_handle; + tag_info.objectType = Anvil::Utils::get_vk_debug_report_object_type_ext_from_object_type(m_vk_object_type); + tag_info.pNext = nullptr; + tag_info.pTag = in_tag_ptr; + tag_info.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT; + tag_info.tagName = m_object_tag_name; + tag_info.tagSize = in_tag_size; + + std::lock_guard lock(m_mutex); + { + result_vk = entrypoints.vkDebugMarkerSetObjectTagEXT(m_device_ptr->get_device_vk(), + &tag_info); + } + + anvil_assert_vk_call_succeeded(result_vk); + + ANVIL_REDUNDANT_VARIABLE(result_vk); + } + + case DebugAPI::EXT_DEBUG_UTILS: + { + const auto& entrypoints(m_device_ptr->get_parent_instance()->get_extension_ext_debug_utils_entrypoints() ); + VkDebugUtilsObjectTagInfoEXT name_info; + VkResult result_vk; + + name_info.objectHandle = m_vk_object_handle; + name_info.objectType = static_cast(m_vk_object_type); + name_info.pNext = nullptr; + name_info.pTag = in_tag_ptr; + name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT; + name_info.tagName = m_object_tag_name; + name_info.tagSize = in_tag_size; + + std::lock_guard lock(m_mutex); + { + result_vk = entrypoints.vkSetDebugUtilsObjectTagEXT(m_device_ptr->get_device_vk(), + &name_info); + + anvil_assert_vk_call_succeeded(result_vk); + ANVIL_REDUNDANT_VARIABLE (result_vk); + } + + break; + } + + case DebugAPI::NONE: + { + break; + } + + default: + { + anvil_assert_fail(); + } + } } } } @@ -131,11 +238,11 @@ void Anvil::DebugMarkerSupportProviderWorker::set_tag_internal(const uint64_t in /** Please see header for specification */ void Anvil::DebugMarkerSupportProviderWorker::set_vk_handle_internal(uint64_t in_vk_object_handle) { - if (in_vk_object_handle == VK_NULL_HANDLE) + if (in_vk_object_handle == 0) { - anvil_assert(m_vk_object_handle != VK_NULL_HANDLE); + anvil_assert(m_vk_object_handle != 0); - m_vk_object_handle = VK_NULL_HANDLE; + m_vk_object_handle = 0; m_object_name.clear (); m_object_tag_data.clear(); @@ -145,7 +252,7 @@ void Anvil::DebugMarkerSupportProviderWorker::set_vk_handle_internal(uint64_t in else if (m_vk_object_handle != in_vk_object_handle) { - anvil_assert(m_vk_object_handle == VK_NULL_HANDLE); + anvil_assert(m_vk_object_handle == 0); m_vk_object_handle = in_vk_object_handle; diff --git a/src/misc/debug_messenger_create_info.cpp b/src/misc/debug_messenger_create_info.cpp new file mode 100644 index 00000000..e222975d --- /dev/null +++ b/src/misc/debug_messenger_create_info.cpp @@ -0,0 +1,52 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/debug_messenger_create_info.h" + +Anvil::DebugMessengerCreateInfo::DebugMessengerCreateInfo(Anvil::Instance* in_instance_ptr, + const Anvil::DebugMessageSeverityFlags& in_debug_message_severity_flags, + const Anvil::DebugMessageTypeFlags& in_debug_message_type_flags, + Anvil::DebugMessengerCallbackFunction in_callback_function) + :m_callback_function (in_callback_function), + m_debug_message_severity_flags(in_debug_message_severity_flags), + m_debug_message_type_flags (in_debug_message_type_flags), + m_instance_ptr (in_instance_ptr) +{ + /* Stub */ +} + +Anvil::DebugMessengerCreateInfoUniquePtr Anvil::DebugMessengerCreateInfo::create(Anvil::Instance* in_instance_ptr, + const Anvil::DebugMessageSeverityFlags& in_debug_message_severity_flags, + const Anvil::DebugMessageTypeFlags& in_debug_message_type_flags, + Anvil::DebugMessengerCallbackFunction in_callback_function) +{ + Anvil::DebugMessengerCreateInfoUniquePtr result_ptr; + + result_ptr.reset( + new DebugMessengerCreateInfo(in_instance_ptr, + in_debug_message_severity_flags, + in_debug_message_type_flags, + in_callback_function) + ); + anvil_assert(result_ptr != nullptr); + + return result_ptr; +} \ No newline at end of file diff --git a/src/misc/descriptor_pool_create_info.cpp b/src/misc/descriptor_pool_create_info.cpp new file mode 100644 index 00000000..ef39e97d --- /dev/null +++ b/src/misc/descriptor_pool_create_info.cpp @@ -0,0 +1,54 @@ +// +// Copyright (c) 2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/descriptor_pool_create_info.h" + +Anvil::DescriptorPoolCreateInfoUniquePtr Anvil::DescriptorPoolCreateInfo::create(const Anvil::BaseDevice* in_device_ptr, + const uint32_t& in_n_max_sets, + const Anvil::DescriptorPoolCreateFlags& in_create_flags, + const Anvil::MTSafety& in_mt_safety) +{ + Anvil::DescriptorPoolCreateInfoUniquePtr result_ptr; + + result_ptr.reset( + new Anvil::DescriptorPoolCreateInfo(in_device_ptr, + in_n_max_sets, + in_create_flags, + in_mt_safety) + ); + + anvil_assert(result_ptr != nullptr); + return result_ptr; +} + +Anvil::DescriptorPoolCreateInfo::DescriptorPoolCreateInfo(const Anvil::BaseDevice* in_device_ptr, + const uint32_t& in_n_max_sets, + const Anvil::DescriptorPoolCreateFlags& in_create_flags, + const Anvil::MTSafety& in_mt_safety) + :m_create_flags (in_create_flags), + m_device_ptr (in_device_ptr), + m_mt_safety (in_mt_safety), + m_n_max_inline_uniform_block_bindings(0), + m_n_max_sets (in_n_max_sets) +{ + /* Stub */ +} diff --git a/src/misc/descriptor_set_create_info.cpp b/src/misc/descriptor_set_create_info.cpp new file mode 100644 index 00000000..12a48b42 --- /dev/null +++ b/src/misc/descriptor_set_create_info.cpp @@ -0,0 +1,429 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/descriptor_set_create_info.h" +#include "misc/struct_chainer.h" +#include "wrappers/device.h" +#include "wrappers/sampler.h" + +/** Please see header for specification */ +Anvil::DescriptorSetCreateInfo::DescriptorSetCreateInfo() + :m_n_variable_descriptor_count_binding (UINT32_MAX), + m_variable_descriptor_count_binding_size(0) +{ + /* Stub */ +} + +/** Please see header for specification */ +Anvil::DescriptorSetCreateInfo::~DescriptorSetCreateInfo() +{ + /* Stub */ +} + +/** Please see header for specification */ +bool Anvil::DescriptorSetCreateInfo::add_binding(uint32_t in_binding_index, + Anvil::DescriptorType in_descriptor_type, + uint32_t in_descriptor_array_size, + Anvil::ShaderStageFlags in_stage_flags, + const Anvil::DescriptorBindingFlags& in_flags, + const Anvil::Sampler* const* in_immutable_sampler_ptrs) +{ + bool result = false; + + /* Make sure the binding is not already defined */ + if (m_bindings.find(in_binding_index) != m_bindings.end() ) + { + anvil_assert_fail(); + + goto end; + } + + /* Make sure the sampler array can actually be specified for this descriptor type. */ + if (in_immutable_sampler_ptrs != nullptr) + { + if (in_descriptor_type != Anvil::DescriptorType::COMBINED_IMAGE_SAMPLER && + in_descriptor_type != Anvil::DescriptorType::SAMPLER) + { + anvil_assert_fail(); + + goto end; + } + } + + if (in_descriptor_type == Anvil::DescriptorType::INLINE_UNIFORM_BLOCK) + { + anvil_assert((in_descriptor_array_size % 4) == 0); + } + + if ((in_flags & Anvil::DescriptorBindingFlagBits::VARIABLE_DESCRIPTOR_COUNT_BIT) != 0) + { + if (m_n_variable_descriptor_count_binding != UINT32_MAX) + { + /* If this assertion check fails, you're attempting to add more than 1 variable descriptor count binding + * which is illegal! */ + anvil_assert(m_n_variable_descriptor_count_binding == UINT32_MAX); + + goto end; + } + + m_n_variable_descriptor_count_binding = in_binding_index; + } + + /* Add a new binding entry and mark the layout as dirty, so that it is re-baked next time + * the user calls the getter func */ + m_bindings[in_binding_index] = Binding(in_descriptor_array_size, + in_descriptor_type, + in_stage_flags, + in_immutable_sampler_ptrs, + in_flags); + + result = true; +end: + return result; +} + +/** Please see header for specification */ +Anvil::DescriptorSetCreateInfoUniquePtr Anvil::DescriptorSetCreateInfo::create() +{ + Anvil::DescriptorSetCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new Anvil::DescriptorSetCreateInfo() + ); + + return result_ptr; +} + +/** Please see header for specification */ +std::unique_ptr Anvil::DescriptorSetCreateInfo::create_descriptor_set_layout_create_info(const Anvil::BaseDevice* in_device_ptr) const +{ + uint32_t n_binding = 0; + uint32_t n_bindings_defined = 0; + uint32_t n_samplers_defined = 0; + std::unique_ptr result_ptr; + bool should_chain_binding_flags_struct = false; + Anvil::StructChainer struct_chainer; + + result_ptr.reset( + new Anvil::DescriptorSetLayoutCreateInfoContainer() + ); + + if (result_ptr == nullptr) + { + anvil_assert(result_ptr != nullptr); + + goto end; + } + + /* Count the number of immutable samplers defined. This is needed because if sampler_items is reallocated + * after we start building the VkSampler array contents, all previously initialized VkDescriptorSetLayoutBinding + * instances will start referring to invalid sampler arrays. + */ + for (auto binding_iterator = m_bindings.begin(); + binding_iterator != m_bindings.end(); + ++binding_iterator) + { + const auto& binding_data = binding_iterator->second; + + n_samplers_defined += static_cast(binding_data.immutable_samplers.size() ); + } + + result_ptr->sampler_items.reserve(n_samplers_defined); + + /* Determine the number of non-dummy bindings. */ + if (m_bindings.size() > 1) + { + n_bindings_defined = static_cast(m_bindings.size() ); + } + else + for (auto binding : m_bindings) + { + if (binding.second.descriptor_array_size > 0) + { + ++n_bindings_defined; + } + } + + /* Convert internal representation to Vulkan structure(s) */ + result_ptr->binding_info_items.resize(n_bindings_defined); + + { + VkDescriptorSetLayoutCreateInfo create_info; + + create_info.bindingCount = n_bindings_defined; + create_info.flags = 0; + create_info.pNext = nullptr; + create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; + + if (n_bindings_defined > 0) + { + for (auto binding_iterator = m_bindings.begin(); + binding_iterator != m_bindings.end(); + ++binding_iterator, ++n_binding) + { + const auto& binding_data = binding_iterator->second; + VkDescriptorBindingFlagsEXT binding_flags = 0; + const BindingIndex& binding_index = binding_iterator->first; + VkDescriptorSetLayoutBinding& binding_vk = result_ptr->binding_info_items.at(n_binding); + + if (binding_data.descriptor_array_size == 0) + { + /* Dummy binding, ignore. */ + continue; + } + + binding_vk.binding = binding_index; + binding_vk.descriptorCount = binding_data.descriptor_array_size; + binding_vk.descriptorType = static_cast(binding_data.descriptor_type); + binding_vk.stageFlags = binding_data.stage_flags.get_vk(); + + if (binding_data.immutable_samplers.size() > 0) + { + const uint32_t sampler_array_start_index = static_cast(result_ptr->sampler_items.size() ); + + anvil_assert(binding_data.immutable_samplers.size() == binding_data.descriptor_array_size); + + for (uint32_t n_sampler = 0; + n_sampler < binding_data.descriptor_array_size; + ++n_sampler) + { + result_ptr->sampler_items.push_back(binding_data.immutable_samplers[n_sampler]->get_sampler() ); + } + + binding_vk.pImmutableSamplers = &result_ptr->sampler_items[sampler_array_start_index]; + } + else + { + binding_vk.pImmutableSamplers = nullptr; + } + + if ((binding_iterator->second.flags & Anvil::DescriptorBindingFlagBits::UPDATE_AFTER_BIND_BIT) != 0) + { + binding_flags |= VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT; + create_info.flags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT; + should_chain_binding_flags_struct = true; + } + + if ((binding_iterator->second.flags & Anvil::DescriptorBindingFlagBits::UPDATE_UNUSED_WHILE_PENDING_BIT) != 0) + { + binding_flags |= VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT; + should_chain_binding_flags_struct = true; + } + + if ((binding_iterator->second.flags & Anvil::DescriptorBindingFlagBits::PARTIALLY_BOUND_BIT) != 0) + { + binding_flags |= VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT; + should_chain_binding_flags_struct = true; + } + + if ((binding_iterator->second.flags & Anvil::DescriptorBindingFlagBits::VARIABLE_DESCRIPTOR_COUNT_BIT) != 0) + { + binding_flags |= VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT; + should_chain_binding_flags_struct = true; + } + + result_ptr->binding_flags_vec.push_back(binding_flags); + } + } + + if (result_ptr->binding_info_items.size() > 0) + { + create_info.pBindings = &result_ptr->binding_info_items.at(0); + } + else + { + create_info.pBindings = nullptr; + } + + struct_chainer.append_struct(create_info); + } + + if (should_chain_binding_flags_struct) + { + VkDescriptorSetLayoutBindingFlagsCreateInfoEXT binding_flags_struct; + + if (!in_device_ptr->get_extension_info()->ext_descriptor_indexing() ) + { + /* The bindings have been assigned flags which are only available on implementations + * that report support for VK_EXT_descriptor_indexing extension! + */ + anvil_assert(in_device_ptr->get_extension_info()->ext_descriptor_indexing() ); + + result_ptr.reset(); + goto end; + } + + anvil_assert(result_ptr->binding_flags_vec.size() > 0); + anvil_assert(result_ptr->binding_flags_vec.size() == struct_chainer.get_root_struct()->bindingCount); + + binding_flags_struct.bindingCount = static_cast(result_ptr->binding_flags_vec.size() ); + binding_flags_struct.pBindingFlags = &result_ptr->binding_flags_vec.at(0); + binding_flags_struct.pNext = nullptr; + binding_flags_struct.sType = static_cast(VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT); + + struct_chainer.append_struct(binding_flags_struct); + } + + result_ptr->struct_chain_ptr = struct_chainer.create_chain(); + +end: + return result_ptr; +} + +/** Please see header for specification */ +bool Anvil::DescriptorSetCreateInfo::get_binding_properties_by_binding_index(uint32_t in_binding_index, + Anvil::DescriptorType* out_opt_descriptor_type_ptr, + uint32_t* out_opt_descriptor_array_size_ptr, + Anvil::ShaderStageFlags* out_opt_stage_flags_ptr, + bool* out_opt_immutable_samplers_enabled_ptr, + Anvil::DescriptorBindingFlags* out_opt_flags_ptr) const +{ + auto binding_iterator = m_bindings.find(in_binding_index); + bool result = false; + + if (binding_iterator == m_bindings.end() ) + { + goto end; + } + + if (out_opt_descriptor_array_size_ptr != nullptr) + { + *out_opt_descriptor_array_size_ptr = binding_iterator->second.descriptor_array_size; + } + + if (out_opt_descriptor_type_ptr != nullptr) + { + *out_opt_descriptor_type_ptr = binding_iterator->second.descriptor_type; + } + + if (out_opt_immutable_samplers_enabled_ptr != nullptr) + { + *out_opt_immutable_samplers_enabled_ptr = (binding_iterator->second.immutable_samplers.size() != 0); + } + + if (out_opt_stage_flags_ptr != nullptr) + { + *out_opt_stage_flags_ptr = binding_iterator->second.stage_flags; + } + + if (out_opt_flags_ptr != nullptr) + { + *out_opt_flags_ptr = binding_iterator->second.flags; + } + + result = true; + +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::DescriptorSetCreateInfo::get_binding_properties_by_index_number(uint32_t in_n_binding, + uint32_t* out_opt_binding_index_ptr, + Anvil::DescriptorType* out_opt_descriptor_type_ptr, + uint32_t* out_opt_descriptor_array_size_ptr, + Anvil::ShaderStageFlags* out_opt_stage_flags_ptr, + bool* out_opt_immutable_samplers_enabled_ptr, + Anvil::DescriptorBindingFlags* out_opt_flags_ptr) const +{ + auto binding_iterator = m_bindings.begin(); + bool result = false; + + if (m_bindings.size() <= in_n_binding) + { + goto end; + } + + for (uint32_t n_current_binding = 0; + n_current_binding < in_n_binding; + ++n_current_binding) + { + binding_iterator ++; + } + + if (binding_iterator != m_bindings.end() ) + { + if (out_opt_binding_index_ptr != nullptr) + { + *out_opt_binding_index_ptr = binding_iterator->first; + } + + if (out_opt_descriptor_array_size_ptr != nullptr) + { + *out_opt_descriptor_array_size_ptr = binding_iterator->second.descriptor_array_size; + } + + if (out_opt_descriptor_type_ptr != nullptr) + { + *out_opt_descriptor_type_ptr = binding_iterator->second.descriptor_type; + } + + if (out_opt_immutable_samplers_enabled_ptr != nullptr) + { + *out_opt_immutable_samplers_enabled_ptr = (binding_iterator->second.immutable_samplers.size() != 0); + } + + if (out_opt_stage_flags_ptr != nullptr) + { + *out_opt_stage_flags_ptr = binding_iterator->second.stage_flags; + } + + if (out_opt_flags_ptr != nullptr) + { + *out_opt_flags_ptr = binding_iterator->second.flags; + } + + result = true; + } + +end: + return result; +} + +bool Anvil::DescriptorSetCreateInfo::set_binding_variable_descriptor_count(const uint32_t& in_count) +{ + uint32_t binding_index = UINT32_MAX; + bool result = false; + + if (!contains_variable_descriptor_count_binding(&binding_index) ) + { + /* This descriptor set info instance does not define a variable descriptor count binding! */ + anvil_assert_fail(); + + goto end; + } + + m_variable_descriptor_count_binding_size = in_count; + result = true; + +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::DescriptorSetCreateInfo::operator==(const Anvil::DescriptorSetCreateInfo& in_ds) const +{ + return (m_bindings == in_ds.m_bindings && + m_n_variable_descriptor_count_binding == in_ds.m_n_variable_descriptor_count_binding && + m_variable_descriptor_count_binding_size == in_ds.m_variable_descriptor_count_binding_size); +} \ No newline at end of file diff --git a/src/misc/device_create_info.cpp b/src/misc/device_create_info.cpp new file mode 100644 index 00000000..30ebb348 --- /dev/null +++ b/src/misc/device_create_info.cpp @@ -0,0 +1,92 @@ +// +// Copyright (c) 2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/device_create_info.h" +#include "wrappers/physical_device.h" + +Anvil::DeviceCreateInfoUniquePtr Anvil::DeviceCreateInfo::create_mgpu(const std::vector& in_physical_device_ptrs, + const bool& in_enable_shader_module_cache, + const DeviceExtensionConfiguration& in_extension_configuration, + const std::vector& in_layers_to_enable, + const Anvil::CommandPoolCreateFlags& in_helper_command_pool_create_flags, + const bool& in_mt_safe) +{ + Anvil::DeviceCreateInfoUniquePtr result_ptr; + + result_ptr.reset( + new Anvil::DeviceCreateInfo(in_physical_device_ptrs, + in_enable_shader_module_cache, + in_extension_configuration, + in_layers_to_enable, + in_helper_command_pool_create_flags, + in_mt_safe) + ); + + anvil_assert(result_ptr != nullptr); + return result_ptr; +} + +Anvil::DeviceCreateInfoUniquePtr Anvil::DeviceCreateInfo::create_sgpu(const Anvil::PhysicalDevice* in_physical_device_ptr, + const bool& in_enable_shader_module_cache, + const DeviceExtensionConfiguration& in_extension_configuration, + const std::vector& in_layers_to_enable, + const Anvil::CommandPoolCreateFlags& in_helper_command_pool_create_flags, + const bool& in_mt_safe) +{ + Anvil::DeviceCreateInfoUniquePtr result_ptr; + + result_ptr.reset( + new Anvil::DeviceCreateInfo({in_physical_device_ptr}, + in_enable_shader_module_cache, + in_extension_configuration, + in_layers_to_enable, + in_helper_command_pool_create_flags, + in_mt_safe) + ); + + anvil_assert(result_ptr != nullptr); + return result_ptr; +} + +Anvil::DeviceCreateInfo::DeviceCreateInfo(const std::vector& in_physical_device_ptrs, + const bool& in_enable_shader_module_cache, + const DeviceExtensionConfiguration& in_extension_configuration, + const std::vector& in_layers_to_enable, + const Anvil::CommandPoolCreateFlags& in_helper_command_pool_create_flags, + const bool& in_mt_safe) + :m_extension_configuration (in_extension_configuration), + m_helper_command_pool_create_flags (in_helper_command_pool_create_flags), + m_layers_to_enable (in_layers_to_enable), + m_memory_overallocation_behavior (Anvil::MemoryOverallocationBehavior::DEFAULT), + m_mt_safe (in_mt_safe), + m_physical_device_ptrs (in_physical_device_ptrs), + m_should_enable_shader_module_cache(in_enable_shader_module_cache) +{ + if (in_physical_device_ptrs.size() > 1) + { + for (const auto& current_physical_device_ptr : in_physical_device_ptrs) + { + ANVIL_REDUNDANT_VARIABLE_CONST(current_physical_device_ptr); + + anvil_assert(current_physical_device_ptr->get_instance() == in_physical_device_ptrs.at(0)->get_instance() ); + } + } +} \ No newline at end of file diff --git a/src/misc/dummy_window.cpp b/src/misc/dummy_window.cpp index 005730bf..cd46b9e5 100644 --- a/src/misc/dummy_window.cpp +++ b/src/misc/dummy_window.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -19,24 +19,29 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // +#include "misc/buffer_create_info.h" #include "misc/dummy_window.h" +#include "misc/image_create_info.h" #include "misc/io.h" +#include "misc/swapchain_create_info.h" #include "wrappers/buffer.h" #include "wrappers/command_buffer.h" #include "wrappers/command_pool.h" #include "wrappers/semaphore.h" #include "wrappers/swapchain.h" #include +#include #include "miniz/miniz.c" /** Please see header for specification */ -std::shared_ptr Anvil::DummyWindow::create(const std::string& in_title, - unsigned int in_width, - unsigned int in_height, - PresentCallbackFunction in_present_callback_func) +Anvil::WindowUniquePtr Anvil::DummyWindow::create(const std::string& in_title, + unsigned int in_width, + unsigned int in_height, + PresentCallbackFunction in_present_callback_func) { - std::shared_ptr result_ptr; + WindowUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::DummyWindow(in_title, @@ -47,7 +52,7 @@ std::shared_ptr Anvil::DummyWindow::create(const std::string& if (result_ptr) { - if (!result_ptr->init() ) + if (!dynamic_cast(result_ptr.get() )->init() ) { result_ptr.reset(); } @@ -61,11 +66,11 @@ Anvil::DummyWindow::DummyWindow(const std::string& in_title, unsigned int in_width, unsigned int in_height, PresentCallbackFunction in_present_callback_func) - : Window(in_title, - in_width, - in_height, - false, /* in_closable */ - in_present_callback_func) + :Window(in_title, + in_width, + in_height, + false, /* in_closable */ + in_present_callback_func) { m_window_owned = true; } @@ -88,16 +93,24 @@ bool Anvil::DummyWindow::init() /* Please see header for specification */ void Anvil::DummyWindow::run() { - bool running = true; - - while (running && !m_window_should_close) + if (m_present_callback_func) { - if (m_present_callback_func) + bool running = true; + + while (running && !m_window_should_close) { m_present_callback_func(); - } - running = !m_window_should_close; + running = !m_window_should_close; + } + } + else + { + do + { + std::this_thread::sleep_for(std::chrono::milliseconds(30) ); + } + while (!m_window_should_close); } m_window_close_finished = true; @@ -105,12 +118,13 @@ void Anvil::DummyWindow::run() /** Please see header for specification */ -std::shared_ptr Anvil::DummyWindowWithPNGSnapshots::create(const std::string& in_title, - unsigned int in_width, - unsigned int in_height, - PresentCallbackFunction in_present_callback_func) +Anvil::WindowUniquePtr Anvil::DummyWindowWithPNGSnapshots::create(const std::string& in_title, + unsigned int in_width, + unsigned int in_height, + PresentCallbackFunction in_present_callback_func) { - std::shared_ptr result_ptr; + WindowUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::DummyWindowWithPNGSnapshots(in_title, @@ -121,7 +135,7 @@ std::shared_ptr Anvil::DummyWindowWithPNGSnapshots::create(const if (result_ptr) { - if (!result_ptr->init() ) + if (!dynamic_cast(result_ptr.get() )->init() ) { result_ptr.reset(); } @@ -142,31 +156,30 @@ Anvil::DummyWindowWithPNGSnapshots::DummyWindowWithPNGSnapshots(const std::strin { m_height = in_height; m_n_frames_presented = 0; + m_swapchain_ptr = nullptr; m_title = in_title; m_width = in_width; m_window_owned = true; } /** Please see header for specification */ -std::shared_ptr Anvil::DummyWindowWithPNGSnapshots::get_swapchain_image_raw_r8g8b8a8_unorm_data(std::shared_ptr in_swapchain_image_ptr) +std::unique_ptr Anvil::DummyWindowWithPNGSnapshots::get_swapchain_image_raw_r8g8b8a8_unorm_data(Anvil::Image* in_swapchain_image_ptr) { - std::shared_ptr device_locked_ptr (m_swapchain_ptr.lock()->get_device() ); - std::shared_ptr result_ptr; - VkFormat swapchain_image_format (in_swapchain_image_ptr->get_image_format () ); - const VkImageSubresourceRange swapchain_image_subresource_range(in_swapchain_image_ptr->get_subresource_range() ); + const Anvil::BaseDevice* device_ptr (m_swapchain_ptr->get_create_info_ptr()->get_device() ); + std::unique_ptr result_ptr; + Anvil::Format swapchain_image_format (in_swapchain_image_ptr->get_create_info_ptr()->get_format() ); + const Anvil::ImageSubresourceRange swapchain_image_subresource_range(in_swapchain_image_ptr->get_subresource_range () ); /* Sanity checks .. */ ANVIL_REDUNDANT_VARIABLE(swapchain_image_format); - anvil_assert(swapchain_image_subresource_range.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT); - anvil_assert(device_locked_ptr->get_physical_device_format_properties(swapchain_image_format).optimal_tiling_capabilities & VK_FORMAT_FEATURE_BLIT_SRC_BIT); - anvil_assert(device_locked_ptr->get_physical_device_format_properties(VK_FORMAT_R8G8B8A8_UNORM).optimal_tiling_capabilities & VK_FORMAT_FEATURE_BLIT_DST_BIT); + anvil_assert(swapchain_image_subresource_range.aspect_mask == Anvil::ImageAspectFlagBits::COLOR_BIT); /* Initialize storage for the raw R8G8B8A8 image data */ - std::shared_ptr raw_image_buffer_ptr; - uint32_t raw_image_size = 0; - uint32_t swapchain_image_height = 0; - uint32_t swapchain_image_width = 0; + Anvil::BufferUniquePtr raw_image_buffer_ptr; + uint32_t raw_image_size = 0; + uint32_t swapchain_image_height = 0; + uint32_t swapchain_image_width = 0; in_swapchain_image_ptr->get_image_mipmap_size(0, /* n_mipmap */ &swapchain_image_width, @@ -177,57 +190,66 @@ std::shared_ptr Anvil::DummyWindowWithPNGSnapshots::get_swapchain result_ptr.reset(new unsigned char[raw_image_size]); - raw_image_buffer_ptr = Anvil::Buffer::create_nonsparse(device_locked_ptr, - raw_image_size, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_TRANSFER_DST_BIT, - Anvil::MEMORY_FEATURE_FLAG_MAPPABLE, - nullptr); /* opt_client_data */ + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_alloc(device_ptr, + raw_image_size, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::TRANSFER_DST_BIT, + Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT); + + create_info_ptr->set_mt_safety(Anvil::MTSafety::DISABLED); + + raw_image_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } /* 2. Create the intermediate image */ - VkImageBlit intermediate_image_blit; - std::shared_ptr intermediate_image_ptr; - - intermediate_image_ptr = Anvil::Image::create_nonsparse(device_locked_ptr, - VK_IMAGE_TYPE_2D, - VK_FORMAT_R8G8B8A8_UNORM, - VK_IMAGE_TILING_OPTIMAL, - VK_IMAGE_USAGE_TRANSFER_SRC_BIT | - VK_IMAGE_USAGE_TRANSFER_DST_BIT, - swapchain_image_width, - swapchain_image_height, - 1, /* in_base_mipmap_depth */ - 1, /* in_n_layers */ - VK_SAMPLE_COUNT_1_BIT, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - false, /* in_use_full_mipmap_chain */ - 0, /* in_memory_features */ - 0, /* in_create_flags */ - VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, - nullptr); /* in_ mipmaps_ptr */ + Anvil::ImageBlit intermediate_image_blit; + ImageUniquePtr intermediate_image_ptr; + + { + auto create_info_ptr = Anvil::ImageCreateInfo::create_alloc(device_ptr, + Anvil::ImageType::_2D, + Anvil::Format::R8G8B8A8_UNORM, + Anvil::ImageTiling::OPTIMAL, + Anvil::ImageUsageFlagBits::TRANSFER_SRC_BIT | Anvil::ImageUsageFlagBits::TRANSFER_DST_BIT, + swapchain_image_width, + swapchain_image_height, + 1, /* in_base_mipmap_depth */ + 1, /* in_n_layers */ + Anvil::SampleCountFlagBits::_1_BIT, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + false, /* in_use_full_mipmap_chain */ + Anvil::MemoryFeatureFlagBits::NONE, + Anvil::ImageCreateFlagBits::NONE, + Anvil::ImageLayout::TRANSFER_DST_OPTIMAL); + + create_info_ptr->set_mt_safety(Anvil::MTSafety::DISABLED); + + intermediate_image_ptr = Anvil::Image::create(std::move(create_info_ptr) ); + } /* 3. Set up the command buffer */ - std::shared_ptr command_buffer_ptr; - std::shared_ptr universal_command_pool_ptr; - std::shared_ptr universal_queue_ptr = device_locked_ptr->get_universal_queue (0); - const uint32_t universal_queue_family_index = universal_queue_ptr->get_queue_family_index(); + auto command_buffer_ptr = Anvil::PrimaryCommandBufferUniquePtr(); + Anvil::CommandPool* universal_command_pool_ptr = nullptr; + Anvil::Queue* universal_queue_ptr = device_ptr->get_universal_queue (0); + const uint32_t universal_queue_family_index = universal_queue_ptr->get_queue_family_index(); - universal_command_pool_ptr = device_locked_ptr->get_command_pool(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL); + universal_command_pool_ptr = device_ptr->get_command_pool_for_queue_family_index(universal_queue_ptr->get_queue_family_index() ); command_buffer_ptr = universal_command_pool_ptr->alloc_primary_level_command_buffer(); command_buffer_ptr->start_recording(true, /* one_time_submit */ false); /* simultaneous_use_allowed */ { - VkBufferImageCopy buffer_image_copy_region; + Anvil::BufferImageCopy buffer_image_copy_region; Anvil::ImageBarrier general_to_transfer_src_image_barrier( - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_MEMORY_READ_BIT, /* source_access_mask */ - VK_ACCESS_TRANSFER_READ_BIT, /* destination_access_mask */ - false, - VK_IMAGE_LAYOUT_GENERAL, - VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + Anvil::AccessFlagBits::COLOR_ATTACHMENT_WRITE_BIT | Anvil::AccessFlagBits::TRANSFER_WRITE_BIT | Anvil::AccessFlagBits::MEMORY_READ_BIT, /* source_access_mask */ + Anvil::AccessFlagBits::TRANSFER_READ_BIT, /* destination_access_mask */ + Anvil::ImageLayout::GENERAL, + Anvil::ImageLayout::TRANSFER_SRC_OPTIMAL, universal_queue_family_index, universal_queue_family_index, in_swapchain_image_ptr, @@ -235,104 +257,108 @@ std::shared_ptr Anvil::DummyWindowWithPNGSnapshots::get_swapchain ); Anvil::ImageBarrier transfer_dst_to_transfer_src_image_barrier( - VK_ACCESS_TRANSFER_WRITE_BIT, /* source_access_mask */ - VK_ACCESS_TRANSFER_READ_BIT, /* desitnation_access_mask */ - false, - VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, - VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + Anvil::AccessFlagBits::TRANSFER_WRITE_BIT, /* source_access_mask */ + Anvil::AccessFlagBits::TRANSFER_READ_BIT, /* desitnation_access_mask */ + Anvil::ImageLayout::TRANSFER_DST_OPTIMAL, + Anvil::ImageLayout::TRANSFER_SRC_OPTIMAL, universal_queue_family_index, universal_queue_family_index, - intermediate_image_ptr, + intermediate_image_ptr.get(), swapchain_image_subresource_range); Anvil::ImageBarrier transfer_src_to_general_image_barrier( - VK_ACCESS_TRANSFER_READ_BIT, /* source_access_mask */ - 0, /* destination_access_mask */ - false, - VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, - VK_IMAGE_LAYOUT_GENERAL, + Anvil::AccessFlagBits::TRANSFER_READ_BIT, /* source_access_mask */ + Anvil::AccessFlags(), /* destination_access_mask */ + Anvil::ImageLayout::TRANSFER_SRC_OPTIMAL, + Anvil::ImageLayout::GENERAL, universal_queue_family_index, universal_queue_family_index, in_swapchain_image_ptr, swapchain_image_subresource_range); - command_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT, /* src_stage_mask */ - VK_PIPELINE_STAGE_TRANSFER_BIT, /* dst_stage_mask */ - false, /* in_by_region */ - 0, /* in_memory_barrier_count */ - nullptr, /* in_memory_barrier_ptrs */ - 0, /* in_buffer_memory_barrier_count */ - nullptr, /* in_buffer_memory_barrier_ptrs */ - 1, /* in_image_memory_barrier_count */ + command_buffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::COLOR_ATTACHMENT_OUTPUT_BIT | Anvil::PipelineStageFlagBits::TRANSFER_BIT, /* src_stage_mask */ + Anvil::PipelineStageFlagBits::TRANSFER_BIT, /* dst_stage_mask */ + Anvil::DependencyFlagBits::NONE, + 0, /* in_memory_barrier_count */ + nullptr, /* in_memory_barrier_ptrs */ + 0, /* in_buffer_memory_barrier_count */ + nullptr, /* in_buffer_memory_barrier_ptrs */ + 1, /* in_image_memory_barrier_count */ &general_to_transfer_src_image_barrier); - intermediate_image_blit.dstOffsets[0].x = 0; - intermediate_image_blit.dstOffsets[0].y = 0; - intermediate_image_blit.dstOffsets[0].z = 0; - intermediate_image_blit.dstOffsets[1].x = static_cast(swapchain_image_width); - intermediate_image_blit.dstOffsets[1].y = static_cast(swapchain_image_height); - intermediate_image_blit.dstOffsets[1].z = 1; - intermediate_image_blit.dstSubresource.baseArrayLayer = 0; - intermediate_image_blit.dstSubresource.layerCount = 1; - intermediate_image_blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - intermediate_image_blit.dstSubresource.mipLevel = 0; - intermediate_image_blit.srcOffsets[0] = intermediate_image_blit.dstOffsets[0]; - intermediate_image_blit.srcOffsets[1] = intermediate_image_blit.dstOffsets[1]; - intermediate_image_blit.srcSubresource = intermediate_image_blit.dstSubresource; + intermediate_image_blit.dst_offsets[0].x = 0; + intermediate_image_blit.dst_offsets[0].y = 0; + intermediate_image_blit.dst_offsets[0].z = 0; + intermediate_image_blit.dst_offsets[1].x = static_cast(swapchain_image_width); + intermediate_image_blit.dst_offsets[1].y = static_cast(swapchain_image_height); + intermediate_image_blit.dst_offsets[1].z = 1; + intermediate_image_blit.dst_subresource.base_array_layer = 0; + intermediate_image_blit.dst_subresource.layer_count = 1; + intermediate_image_blit.dst_subresource.aspect_mask = Anvil::ImageAspectFlagBits::COLOR_BIT; + intermediate_image_blit.dst_subresource.mip_level = 0; + intermediate_image_blit.src_offsets[0] = intermediate_image_blit.dst_offsets[0]; + intermediate_image_blit.src_offsets[1] = intermediate_image_blit.dst_offsets[1]; + intermediate_image_blit.src_subresource = intermediate_image_blit.dst_subresource; command_buffer_ptr->record_blit_image(in_swapchain_image_ptr, - VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, - intermediate_image_ptr, - VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + Anvil::ImageLayout::TRANSFER_SRC_OPTIMAL, + intermediate_image_ptr.get(), + Anvil::ImageLayout::TRANSFER_DST_OPTIMAL, 1, /* regionCount */ &intermediate_image_blit, - VK_FILTER_NEAREST); - - command_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_TRANSFER_BIT, /* src_stage_mask */ - VK_PIPELINE_STAGE_TRANSFER_BIT, /* dst_stage_mask */ - VK_FALSE, /* in_by_region */ - 0, /* in_memory_barrier_count */ - nullptr, /* in_memory_barrier_ptrs */ - 0, /* in_buffer_memory_barrier_count */ - nullptr, /* in_buffer_memory_barrier_ptrs */ - 1, /* in_image_memory_barrier_count */ + Anvil::Filter::NEAREST); + + command_buffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::TRANSFER_BIT, /* src_stage_mask */ + Anvil::PipelineStageFlagBits::TRANSFER_BIT, /* dst_stage_mask */ + Anvil::DependencyFlagBits::NONE, + 0, /* in_memory_barrier_count */ + nullptr, /* in_memory_barrier_ptrs */ + 0, /* in_buffer_memory_barrier_count */ + nullptr, /* in_buffer_memory_barrier_ptrs */ + 1, /* in_image_memory_barrier_count */ &transfer_dst_to_transfer_src_image_barrier); - buffer_image_copy_region.bufferImageHeight = 0; /* assume tight packing */ - buffer_image_copy_region.bufferOffset = 0; - buffer_image_copy_region.bufferRowLength = 0; /* assume tight packing */ - buffer_image_copy_region.imageExtent.depth = 1; - buffer_image_copy_region.imageExtent.height = swapchain_image_height; - buffer_image_copy_region.imageExtent.width = swapchain_image_width; - buffer_image_copy_region.imageOffset.x = 0; - buffer_image_copy_region.imageOffset.y = 0; - buffer_image_copy_region.imageOffset.z = 0; - buffer_image_copy_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - buffer_image_copy_region.imageSubresource.baseArrayLayer = 0; - buffer_image_copy_region.imageSubresource.layerCount = 1; - buffer_image_copy_region.imageSubresource.mipLevel = 0; - - command_buffer_ptr->record_copy_image_to_buffer(intermediate_image_ptr, - VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, - raw_image_buffer_ptr, + buffer_image_copy_region.buffer_image_height = 0; /* assume tight packing */ + buffer_image_copy_region.buffer_offset = 0; + buffer_image_copy_region.buffer_row_length = 0; /* assume tight packing */ + buffer_image_copy_region.image_extent.depth = 1; + buffer_image_copy_region.image_extent.height = swapchain_image_height; + buffer_image_copy_region.image_extent.width = swapchain_image_width; + buffer_image_copy_region.image_offset.x = 0; + buffer_image_copy_region.image_offset.y = 0; + buffer_image_copy_region.image_offset.z = 0; + buffer_image_copy_region.image_subresource.aspect_mask = Anvil::ImageAspectFlagBits::COLOR_BIT; + buffer_image_copy_region.image_subresource.base_array_layer = 0; + buffer_image_copy_region.image_subresource.layer_count = 1; + buffer_image_copy_region.image_subresource.mip_level = 0; + + command_buffer_ptr->record_copy_image_to_buffer(intermediate_image_ptr.get(), + Anvil::ImageLayout::TRANSFER_SRC_OPTIMAL, + raw_image_buffer_ptr.get(), 1, /* regionCount */ &buffer_image_copy_region); - command_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_TRANSFER_BIT, /* src_stage_mask */ - VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, /* dst_stage_mask */ - VK_FALSE, /* in_by_region */ - 0, /* in_memory_barrier_count */ - nullptr, /* in_memory_barrier_ptrs */ - 0, /* in_buffer_memory_barrier_count */ - nullptr, /* in_buffer_memory_barrier_ptrs */ - 1, /* in_image_memory_barrier_count */ + command_buffer_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::TRANSFER_BIT, /* src_stage_mask */ + Anvil::PipelineStageFlagBits::TOP_OF_PIPE_BIT, /* dst_stage_mask */ + Anvil::DependencyFlagBits::NONE, + 0, /* in_memory_barrier_count */ + nullptr, /* in_memory_barrier_ptrs */ + 0, /* in_buffer_memory_barrier_count */ + nullptr, /* in_buffer_memory_barrier_ptrs */ + 1, /* in_image_memory_barrier_count */ &transfer_src_to_general_image_barrier); } command_buffer_ptr->stop_recording(); /* Execute the command buffer */ - universal_queue_ptr->submit_command_buffer(command_buffer_ptr, - true); /* should_block */ + { + Anvil::CommandBufferBase* cmd_buffer_raw_ptr = command_buffer_ptr.get(); + + universal_queue_ptr->submit(Anvil::SubmitInfo::create_execute(&cmd_buffer_raw_ptr, + 1, /* in_n_cmd_buffers */ + true) /* should_block */ + ); + } /* Read back the result data */ raw_image_buffer_ptr->read(0, /* offset */ @@ -360,9 +386,9 @@ void Anvil::DummyWindowWithPNGSnapshots::run() } /** Please see header for specification */ -void Anvil::DummyWindowWithPNGSnapshots::set_swapchain(std::weak_ptr in_swapchain_ptr) +void Anvil::DummyWindowWithPNGSnapshots::set_swapchain(Anvil::Swapchain* in_swapchain_ptr) { - anvil_assert(m_swapchain_ptr.lock() == nullptr); + anvil_assert(m_swapchain_ptr == nullptr); m_swapchain_ptr = in_swapchain_ptr; } @@ -370,15 +396,13 @@ void Anvil::DummyWindowWithPNGSnapshots::set_swapchain(std::weak_ptr swapchain_locked_ptr(m_swapchain_ptr); - - anvil_assert(swapchain_locked_ptr != nullptr); + anvil_assert(m_swapchain_ptr != nullptr); /* Retrieve image contents */ - const uint32_t swapchain_image_index = swapchain_locked_ptr->get_last_acquired_image_index(); - std::shared_ptr swapchain_image_ptr = swapchain_locked_ptr->get_image (swapchain_image_index); - std::shared_ptr swapchain_image_raw_data_ptr = get_swapchain_image_raw_r8g8b8a8_unorm_data (swapchain_image_ptr); - uint32_t swapchain_image_size[2]; + const uint32_t swapchain_image_index = m_swapchain_ptr->get_last_acquired_image_index(); + Anvil::Image* swapchain_image_ptr = m_swapchain_ptr->get_image (swapchain_image_index); + auto swapchain_image_raw_data_ptr = get_swapchain_image_raw_r8g8b8a8_unorm_data (swapchain_image_ptr); + uint32_t swapchain_image_size[2]; swapchain_image_ptr->get_image_mipmap_size(0, /* n_mipmap */ swapchain_image_size + 0, diff --git a/src/misc/event_create_info.cpp b/src/misc/event_create_info.cpp new file mode 100644 index 00000000..1c29c6f2 --- /dev/null +++ b/src/misc/event_create_info.cpp @@ -0,0 +1,43 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/event_create_info.h" + +Anvil::EventCreateInfoUniquePtr Anvil::EventCreateInfo::create(const Anvil::BaseDevice* in_device_ptr) +{ + Anvil::EventCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new Anvil::EventCreateInfo(in_device_ptr, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::EventCreateInfo::EventCreateInfo(const Anvil::BaseDevice* in_device_ptr, + MTSafety in_mt_safety) + :m_device_ptr(in_device_ptr), + m_mt_safety (in_mt_safety) +{ + /* Stub */ +} \ No newline at end of file diff --git a/src/misc/external_handle.cpp b/src/misc/external_handle.cpp new file mode 100644 index 00000000..07b3720f --- /dev/null +++ b/src/misc/external_handle.cpp @@ -0,0 +1,64 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/external_handle.h" + +#if !defined(_WIN32) + #include +#endif + +Anvil::ExternalHandle::ExternalHandle(const ExternalHandleType& in_handle, + const bool& in_close_at_destruction_time) + :m_close_at_destruction_time(in_close_at_destruction_time), + m_handle (in_handle) +{ + /* Stub */ +} + +Anvil::ExternalHandleUniquePtr Anvil::ExternalHandle::create(const ExternalHandleType& in_handle, + const bool& in_close_at_destruction_time) +{ + Anvil::ExternalHandleUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new ExternalHandle(in_handle, + in_close_at_destruction_time) + ); + + return result_ptr; +} + +Anvil::ExternalHandle::~ExternalHandle() +{ + if (m_close_at_destruction_time) + { + #if defined(_WIN32) + { + ::CloseHandle(m_handle); + } + #else + { + close(m_handle); + } + #endif + } +} \ No newline at end of file diff --git a/src/misc/fence_create_info.cpp b/src/misc/fence_create_info.cpp new file mode 100644 index 00000000..75195e41 --- /dev/null +++ b/src/misc/fence_create_info.cpp @@ -0,0 +1,52 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/fence_create_info.h" + +Anvil::FenceCreateInfoUniquePtr Anvil::FenceCreateInfo::create(const Anvil::BaseDevice* in_device_ptr, + bool in_create_signalled) +{ + Anvil::FenceCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new Anvil::FenceCreateInfo(in_device_ptr, + in_create_signalled, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::FenceCreateInfo::FenceCreateInfo(const Anvil::BaseDevice* in_device_ptr, + bool in_create_signalled, + MTSafety in_mt_safety) + :m_create_signalled (in_create_signalled), + m_device_ptr (in_device_ptr), +#if defined(_WIN32) + m_exportable_nt_handle_info_specified (false), + m_exportable_nt_handle_info_security_attributes_specified(false), +#endif + m_exportable_external_fence_handle_types (Anvil::ExternalFenceHandleTypeFlagBits::NONE), + m_mt_safety (in_mt_safety) +{ + /* Stub */ +} \ No newline at end of file diff --git a/src/misc/formats.cpp b/src/misc/formats.cpp index bca190e0..b77f239b 100644 --- a/src/misc/formats.cpp +++ b/src/misc/formats.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -22,383 +22,2242 @@ #include "misc/debug.h" #include "misc/formats.h" +#include "misc/types.h" +#include +#include -static struct FormatInfo +static const struct FormatInfo { - VkFormat format; + Anvil::Format format; const char* name; Anvil::ComponentLayout component_layout; uint8_t component_bits[4]; /* if [0] == [1] == [2] == [3] == 0, the format is a block format. */ Anvil::FormatType format_type; -} formats[] = -{ - /* format | name | component_layout | component_bits[0] | component_bits[1] | component_bits[2] | component_bits[3] | format_type */ - {VK_FORMAT_UNDEFINED, "VK_FORMAT_UNDEFINED", Anvil::COMPONENT_LAYOUT_UNKNOWN, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNKNOWN}, - {VK_FORMAT_R4G4_UNORM_PACK8, "VK_FORMAT_R4G4_UNORM_PACK8", Anvil::COMPONENT_LAYOUT_RG, 4, 4, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R4G4B4A4_UNORM_PACK16, "VK_FORMAT_R4G4B4A4_UNORM_PACK16", Anvil::COMPONENT_LAYOUT_RGBA, 4, 4, 4, 4, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_B4G4R4A4_UNORM_PACK16, "VK_FORMAT_B4G4R4A4_UNORM_PACK16", Anvil::COMPONENT_LAYOUT_BGRA, 4, 4, 4, 4, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R5G6B5_UNORM_PACK16, "VK_FORMAT_R5G6B5_UNORM_PACK16", Anvil::COMPONENT_LAYOUT_RGB, 5, 6, 5, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_B5G6R5_UNORM_PACK16, "VK_FORMAT_B5G6R5_UNORM_PACK16", Anvil::COMPONENT_LAYOUT_BGR, 5, 6, 5, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R5G5B5A1_UNORM_PACK16, "VK_FORMAT_R5G5B5A1_UNORM_PACK16", Anvil::COMPONENT_LAYOUT_RGBA, 5, 5, 5, 1, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_B5G5R5A1_UNORM_PACK16, "VK_FORMAT_B5G5R5A1_UNORM_PACK16", Anvil::COMPONENT_LAYOUT_BGRA, 5, 5, 5, 1, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_A1R5G5B5_UNORM_PACK16, "VK_FORMAT_A1R5G5B5_UNORM_PACK16", Anvil::COMPONENT_LAYOUT_ARGB, 1, 5, 5, 5, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R8_UNORM, "VK_FORMAT_R8_UNORM", Anvil::COMPONENT_LAYOUT_R, 8, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R8_SNORM, "VK_FORMAT_R8_SNORM", Anvil::COMPONENT_LAYOUT_R, 8, 0, 0, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_R8_USCALED, "VK_FORMAT_R8_USCALED", Anvil::COMPONENT_LAYOUT_R, 8, 0, 0, 0, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_R8_SSCALED, "VK_FORMAT_R8_SSCALED", Anvil::COMPONENT_LAYOUT_R, 8, 0, 0, 0, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_R8_UINT, "VK_FORMAT_R8_UINT", Anvil::COMPONENT_LAYOUT_R, 8, 0, 0, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R8_SINT, "VK_FORMAT_R8_SINT", Anvil::COMPONENT_LAYOUT_R, 8, 0, 0, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R8_SRGB, "VK_FORMAT_R8_SRGB", Anvil::COMPONENT_LAYOUT_R, 8, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_R8G8_UNORM, "VK_FORMAT_R8G8_UNORM", Anvil::COMPONENT_LAYOUT_RG, 8, 8, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R8G8_SNORM, "VK_FORMAT_R8G8_SNORM", Anvil::COMPONENT_LAYOUT_RG, 8, 8, 0, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_R8G8_USCALED, "VK_FORMAT_R8G8_USCALED", Anvil::COMPONENT_LAYOUT_RG, 8, 8, 0, 0, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_R8G8_SSCALED, "VK_FORMAT_R8G8_SSCALED", Anvil::COMPONENT_LAYOUT_RG, 8, 8, 0, 0, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_R8G8_UINT, "VK_FORMAT_R8G8_UINT", Anvil::COMPONENT_LAYOUT_RG, 8, 8, 0, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R8G8_SINT, "VK_FORMAT_R8G8_SINT", Anvil::COMPONENT_LAYOUT_RG, 8, 8, 0, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R8G8_SRGB, "VK_FORMAT_R8G8_SRGB", Anvil::COMPONENT_LAYOUT_RG, 8, 8, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_R8G8B8_UNORM, "VK_FORMAT_R8G8B8_UNORM", Anvil::COMPONENT_LAYOUT_RGB, 8, 8, 8, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R8G8B8_SNORM, "VK_FORMAT_R8G8B8_SNORM", Anvil::COMPONENT_LAYOUT_RGB, 8, 8, 8, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_R8G8B8_USCALED, "VK_FORMAT_R8G8B8_USCALED", Anvil::COMPONENT_LAYOUT_RGB, 8, 8, 8, 0, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_R8G8B8_SSCALED, "VK_FORMAT_R8G8B8_SSCALED", Anvil::COMPONENT_LAYOUT_RGB, 8, 8, 8, 0, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_R8G8B8_UINT, "VK_FORMAT_R8G8B8_UINT", Anvil::COMPONENT_LAYOUT_RGB, 8, 8, 8, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R8G8B8_SINT, "VK_FORMAT_R8G8B8_SINT", Anvil::COMPONENT_LAYOUT_RGB, 8, 8, 8, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R8G8B8_SRGB, "VK_FORMAT_R8G8B8_SRGB", Anvil::COMPONENT_LAYOUT_RGB, 8, 8, 8, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_B8G8R8_UNORM, "VK_FORMAT_B8G8R8_UNORM", Anvil::COMPONENT_LAYOUT_BGR, 8, 8, 8, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_B8G8R8_SNORM, "VK_FORMAT_B8G8R8_SNORM", Anvil::COMPONENT_LAYOUT_BGR, 8, 8, 8, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_B8G8R8_USCALED, "VK_FORMAT_B8G8R8_USCALED", Anvil::COMPONENT_LAYOUT_BGR, 8, 8, 8, 0, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_B8G8R8_SSCALED, "VK_FORMAT_B8G8R8_SSCALED", Anvil::COMPONENT_LAYOUT_BGR, 8, 8, 8, 0, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_B8G8R8_UINT, "VK_FORMAT_B8G8R8_UINT", Anvil::COMPONENT_LAYOUT_BGR, 8, 8, 8, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_B8G8R8_SINT, "VK_FORMAT_B8G8R8_SINT", Anvil::COMPONENT_LAYOUT_BGR, 8, 8, 8, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_B8G8R8_SRGB, "VK_FORMAT_B8G8R8_SRGB", Anvil::COMPONENT_LAYOUT_BGR, 8, 8, 8, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_R8G8B8A8_UNORM, "VK_FORMAT_R8G8B8A8_UNORM", Anvil::COMPONENT_LAYOUT_RGBA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R8G8B8A8_SNORM, "VK_FORMAT_R8G8B8A8_SNORM", Anvil::COMPONENT_LAYOUT_RGBA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_R8G8B8A8_USCALED, "VK_FORMAT_R8G8B8A8_USCALED", Anvil::COMPONENT_LAYOUT_RGBA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_R8G8B8A8_SSCALED, "VK_FORMAT_R8G8B8A8_SSCALED", Anvil::COMPONENT_LAYOUT_RGBA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_R8G8B8A8_UINT, "VK_FORMAT_R8G8B8A8_UINT", Anvil::COMPONENT_LAYOUT_RGBA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R8G8B8A8_SINT, "VK_FORMAT_R8G8B8A8_SINT", Anvil::COMPONENT_LAYOUT_RGBA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R8G8B8A8_SRGB, "VK_FORMAT_R8G8B8A8_SRGB", Anvil::COMPONENT_LAYOUT_RGBA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_B8G8R8A8_UNORM, "VK_FORMAT_B8G8R8A8_UNORM", Anvil::COMPONENT_LAYOUT_BGRA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_B8G8R8A8_SNORM, "VK_FORMAT_B8G8R8A8_SNORM", Anvil::COMPONENT_LAYOUT_BGRA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_B8G8R8A8_USCALED, "VK_FORMAT_B8G8R8A8_USCALED", Anvil::COMPONENT_LAYOUT_BGRA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_B8G8R8A8_SSCALED, "VK_FORMAT_B8G8R8A8_SSCALED", Anvil::COMPONENT_LAYOUT_BGRA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_B8G8R8A8_UINT, "VK_FORMAT_B8G8R8A8_UINT", Anvil::COMPONENT_LAYOUT_BGRA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_B8G8R8A8_SINT, "VK_FORMAT_B8G8R8A8_SINT", Anvil::COMPONENT_LAYOUT_BGRA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_B8G8R8A8_SRGB, "VK_FORMAT_B8G8R8A8_SRGB", Anvil::COMPONENT_LAYOUT_BGRA, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_A8B8G8R8_UNORM_PACK32, "VK_FORMAT_A8B8G8R8_UNORM_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 8, 8, 8, 8, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_A8B8G8R8_SNORM_PACK32, "VK_FORMAT_A8B8G8R8_SNORM_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_A8B8G8R8_USCALED_PACK32, "VK_FORMAT_A8B8G8R8_USCALED_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 8, 8, 8, 8, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_A8B8G8R8_SSCALED_PACK32, "VK_FORMAT_A8B8G8R8_SSCALED_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_A8B8G8R8_UINT_PACK32, "VK_FORMAT_A8B8G8R8_UINT_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 8, 8, 8, 8, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_A8B8G8R8_SINT_PACK32, "VK_FORMAT_A8B8G8R8_SINT_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_A8B8G8R8_SRGB_PACK32, "VK_FORMAT_A8B8G8R8_SRGB_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 8, 8, 8, 8, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_A2R10G10B10_UNORM_PACK32, "VK_FORMAT_A2R10G10B10_UNORM_PACK32", Anvil::COMPONENT_LAYOUT_ARGB, 2, 10, 10, 10, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_A2R10G10B10_SNORM_PACK32, "VK_FORMAT_A2R10G10B10_SNORM_PACK32", Anvil::COMPONENT_LAYOUT_ARGB, 2, 10, 10, 10, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_A2R10G10B10_USCALED_PACK32, "VK_FORMAT_A2R10G10B10_USCALED_PACK32", Anvil::COMPONENT_LAYOUT_ARGB, 2, 10, 10, 10, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_A2R10G10B10_SSCALED_PACK32, "VK_FORMAT_A2R10G10B10_SSCALED_PACK32", Anvil::COMPONENT_LAYOUT_ARGB, 2, 10, 10, 10, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_A2R10G10B10_UINT_PACK32, "VK_FORMAT_A2R10G10B10_UINT_PACK32", Anvil::COMPONENT_LAYOUT_ARGB, 2, 10, 10, 10, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_A2R10G10B10_SINT_PACK32, "VK_FORMAT_A2R10G10B10_SINT_PACK32", Anvil::COMPONENT_LAYOUT_ARGB, 2, 10, 10, 10, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_A2B10G10R10_UNORM_PACK32, "VK_FORMAT_A2B10G10R10_UNORM_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 2, 10, 10, 10, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_A2B10G10R10_SNORM_PACK32, "VK_FORMAT_A2B10G10R10_SNORM_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 2, 10, 10, 10, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_A2B10G10R10_USCALED_PACK32, "VK_FORMAT_A2B10G10R10_USCALED_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 2, 10, 10, 10, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_A2B10G10R10_SSCALED_PACK32, "VK_FORMAT_A2B10G10R10_SSCALED_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 2, 10, 10, 10, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_A2B10G10R10_UINT_PACK32, "VK_FORMAT_A2B10G10R10_UINT_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 2, 10, 10, 10, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_A2B10G10R10_SINT_PACK32, "VK_FORMAT_A2B10G10R10_SINT_PACK32", Anvil::COMPONENT_LAYOUT_ABGR, 2, 10, 10, 10, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R16_UNORM, "VK_FORMAT_R16_UNORM", Anvil::COMPONENT_LAYOUT_R, 16, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R16_SNORM, "VK_FORMAT_R16_SNORM", Anvil::COMPONENT_LAYOUT_R, 16, 0, 0, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_R16_USCALED, "VK_FORMAT_R16_USCALED", Anvil::COMPONENT_LAYOUT_R, 16, 0, 0, 0, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_R16_SSCALED, "VK_FORMAT_R16_SSCALED", Anvil::COMPONENT_LAYOUT_R, 16, 0, 0, 0, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_R16_UINT, "VK_FORMAT_R16_UINT", Anvil::COMPONENT_LAYOUT_R, 16, 0, 0, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R16_SINT, "VK_FORMAT_R16_SINT", Anvil::COMPONENT_LAYOUT_R, 16, 0, 0, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R16_SFLOAT, "VK_FORMAT_R16_SFLOAT", Anvil::COMPONENT_LAYOUT_R, 16, 0, 0, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R16G16_UNORM, "VK_FORMAT_R16G16_UNORM", Anvil::COMPONENT_LAYOUT_RG, 16, 16, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R16G16_SNORM, "VK_FORMAT_R16G16_SNORM", Anvil::COMPONENT_LAYOUT_RG, 16, 16, 0, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_R16G16_USCALED, "VK_FORMAT_R16G16_USCALED", Anvil::COMPONENT_LAYOUT_RG, 16, 16, 0, 0, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_R16G16_SSCALED, "VK_FORMAT_R16G16_SSCALED", Anvil::COMPONENT_LAYOUT_RG, 16, 16, 0, 0, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_R16G16_UINT, "VK_FORMAT_R16G16_UINT", Anvil::COMPONENT_LAYOUT_RG, 16, 16, 0, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R16G16_SINT, "VK_FORMAT_R16G16_SINT", Anvil::COMPONENT_LAYOUT_RG, 16, 16, 0, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R16G16_SFLOAT, "VK_FORMAT_R16G16_SFLOAT", Anvil::COMPONENT_LAYOUT_RG, 16, 16, 0, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R16G16B16_UNORM, "VK_FORMAT_R16G16B16_UNORM", Anvil::COMPONENT_LAYOUT_RGB, 16, 16, 16, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R16G16B16_SNORM, "VK_FORMAT_R16G16B16_SNORM", Anvil::COMPONENT_LAYOUT_RGB, 16, 16, 16, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_R16G16B16_USCALED, "VK_FORMAT_R16G16B16_USCALED", Anvil::COMPONENT_LAYOUT_RGB, 16, 16, 16, 0, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_R16G16B16_SSCALED, "VK_FORMAT_R16G16B16_SSCALED", Anvil::COMPONENT_LAYOUT_RGB, 16, 16, 16, 0, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_R16G16B16_UINT, "VK_FORMAT_R16G16B16_UINT", Anvil::COMPONENT_LAYOUT_RGB, 16, 16, 16, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R16G16B16_SINT, "VK_FORMAT_R16G16B16_SINT", Anvil::COMPONENT_LAYOUT_RGB, 16, 16, 16, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R16G16B16_SFLOAT, "VK_FORMAT_R16G16B16_SFLOAT", Anvil::COMPONENT_LAYOUT_RGB, 16, 16, 16, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R16G16B16A16_UNORM, "VK_FORMAT_R16G16B16A16_UNORM", Anvil::COMPONENT_LAYOUT_RGBA, 16, 16, 16, 16, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_R16G16B16A16_SNORM, "VK_FORMAT_R16G16B16A16_SNORM", Anvil::COMPONENT_LAYOUT_RGBA, 16, 16, 16, 16, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_R16G16B16A16_USCALED, "VK_FORMAT_R16G16B16A16_USCALED", Anvil::COMPONENT_LAYOUT_RGBA, 16, 16, 16, 16, Anvil::FORMAT_TYPE_USCALED}, - {VK_FORMAT_R16G16B16A16_SSCALED, "VK_FORMAT_R16G16B16A16_SSCALED", Anvil::COMPONENT_LAYOUT_RGBA, 16, 16, 16, 16, Anvil::FORMAT_TYPE_SSCALED}, - {VK_FORMAT_R16G16B16A16_UINT, "VK_FORMAT_R16G16B16A16_UINT", Anvil::COMPONENT_LAYOUT_RGBA, 16, 16, 16, 16, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R16G16B16A16_SINT, "VK_FORMAT_R16G16B16A16_SINT", Anvil::COMPONENT_LAYOUT_RGBA, 16, 16, 16, 16, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R16G16B16A16_SFLOAT, "VK_FORMAT_R16G16B16A16_SFLOAT", Anvil::COMPONENT_LAYOUT_RGBA, 16, 16, 16, 16, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R32_UINT, "VK_FORMAT_R32_UINT", Anvil::COMPONENT_LAYOUT_R, 32, 0, 0, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R32_SINT, "VK_FORMAT_R32_SINT", Anvil::COMPONENT_LAYOUT_R, 32, 0, 0, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R32_SFLOAT, "VK_FORMAT_R32_SFLOAT", Anvil::COMPONENT_LAYOUT_R, 32, 0, 0, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R32G32_UINT, "VK_FORMAT_R32G32_UINT", Anvil::COMPONENT_LAYOUT_RG, 32, 32, 0, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R32G32_SINT, "VK_FORMAT_R32G32_SINT", Anvil::COMPONENT_LAYOUT_RG, 32, 32, 0, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R32G32_SFLOAT, "VK_FORMAT_R32G32_SFLOAT", Anvil::COMPONENT_LAYOUT_RG, 32, 32, 0, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R32G32B32_UINT, "VK_FORMAT_R32G32B32_UINT", Anvil::COMPONENT_LAYOUT_RGB, 32, 32, 32, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R32G32B32_SINT, "VK_FORMAT_R32G32B32_SINT", Anvil::COMPONENT_LAYOUT_RGB, 32, 32, 32, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R32G32B32_SFLOAT, "VK_FORMAT_R32G32B32_SFLOAT", Anvil::COMPONENT_LAYOUT_RGB, 32, 32, 32, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R32G32B32A32_UINT, "VK_FORMAT_R32G32B32A32_UINT", Anvil::COMPONENT_LAYOUT_RGBA, 32, 32, 32, 32, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R32G32B32A32_SINT, "VK_FORMAT_R32G32B32A32_SINT", Anvil::COMPONENT_LAYOUT_RGBA, 32, 32, 32, 32, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R32G32B32A32_SFLOAT, "VK_FORMAT_R32G32B32A32_SFLOAT", Anvil::COMPONENT_LAYOUT_RGBA, 32, 32, 32, 32, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R64_UINT, "VK_FORMAT_R64_UINT", Anvil::COMPONENT_LAYOUT_R, 64, 0, 0, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R64_SINT, "VK_FORMAT_R64_SINT", Anvil::COMPONENT_LAYOUT_R, 64, 0, 0, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R64_SFLOAT, "VK_FORMAT_R64_SFLOAT", Anvil::COMPONENT_LAYOUT_R, 64, 0, 0, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R64G64_UINT, "VK_FORMAT_R64G64_UINT", Anvil::COMPONENT_LAYOUT_RG, 64, 64, 0, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R64G64_SINT, "VK_FORMAT_R64G64_SINT", Anvil::COMPONENT_LAYOUT_RG, 64, 64, 0, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R64G64_SFLOAT, "VK_FORMAT_R64G64_SFLOAT", Anvil::COMPONENT_LAYOUT_RG, 64, 64, 0, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R64G64B64_UINT, "VK_FORMAT_R64G64B64_UINT", Anvil::COMPONENT_LAYOUT_RGB, 64, 64, 64, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R64G64B64_SINT, "VK_FORMAT_R64G64B64_SINT", Anvil::COMPONENT_LAYOUT_RGB, 64, 64, 64, 0, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R64G64B64_SFLOAT, "VK_FORMAT_R64G64B64_SFLOAT", Anvil::COMPONENT_LAYOUT_RGB, 64, 64, 64, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_R64G64B64A64_UINT, "VK_FORMAT_R64G64B64A64_UINT", Anvil::COMPONENT_LAYOUT_RGBA, 64, 64, 64, 64, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_R64G64B64A64_SINT, "VK_FORMAT_R64G64B64A64_SINT", Anvil::COMPONENT_LAYOUT_RGBA, 64, 64, 64, 64, Anvil::FORMAT_TYPE_SINT}, - {VK_FORMAT_R64G64B64A64_SFLOAT, "VK_FORMAT_R64G64B64A64_SFLOAT", Anvil::COMPONENT_LAYOUT_RGBA, 64, 64, 64, 64, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_B10G11R11_UFLOAT_PACK32, "VK_FORMAT_B10G11R11_UFLOAT_PACK32", Anvil::COMPONENT_LAYOUT_BGR, 10, 11, 11, 0, Anvil::FORMAT_TYPE_UFLOAT}, - {VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32", Anvil::COMPONENT_LAYOUT_EBGR, 5, 9, 9, 9, Anvil::FORMAT_TYPE_UFLOAT}, - {VK_FORMAT_D16_UNORM, "VK_FORMAT_D16_UNORM", Anvil::COMPONENT_LAYOUT_D, 16, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_X8_D24_UNORM_PACK32, "VK_FORMAT_X8_D24_UNORM_PACK32", Anvil::COMPONENT_LAYOUT_XD, 8, 24, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_D32_SFLOAT, "VK_FORMAT_D32_SFLOAT", Anvil::COMPONENT_LAYOUT_D, 32, 0, 0, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_S8_UINT, "VK_FORMAT_S8_UINT", Anvil::COMPONENT_LAYOUT_S, 8, 0, 0, 0, Anvil::FORMAT_TYPE_UINT}, - {VK_FORMAT_D16_UNORM_S8_UINT, "VK_FORMAT_D16_UNORM_S8_UINT", Anvil::COMPONENT_LAYOUT_DS, 16, 8, 0, 0, Anvil::FORMAT_TYPE_UNORM_UINT}, - {VK_FORMAT_D24_UNORM_S8_UINT, "VK_FORMAT_D24_UNORM_S8_UINT", Anvil::COMPONENT_LAYOUT_DS, 24, 8, 0, 0, Anvil::FORMAT_TYPE_UNORM_UINT}, - {VK_FORMAT_D32_SFLOAT_S8_UINT, "VK_FORMAT_D32_SFLOAT_S8_UINT", Anvil::COMPONENT_LAYOUT_DS, 32, 8, 0, 0, Anvil::FORMAT_TYPE_SFLOAT_UINT}, - {VK_FORMAT_BC1_RGB_UNORM_BLOCK, "VK_FORMAT_BC1_RGB_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGB, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_BC1_RGB_SRGB_BLOCK, "VK_FORMAT_BC1_RGB_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGB, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_BC1_RGBA_UNORM_BLOCK, "VK_FORMAT_BC1_RGBA_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_BC1_RGBA_SRGB_BLOCK, "VK_FORMAT_BC1_RGBA_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_BC2_UNORM_BLOCK, "VK_FORMAT_BC2_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_BC2_SRGB_BLOCK, "VK_FORMAT_BC2_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_BC3_UNORM_BLOCK, "VK_FORMAT_BC3_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_BC3_SRGB_BLOCK, "VK_FORMAT_BC3_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_BC4_UNORM_BLOCK, "VK_FORMAT_BC4_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_R, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_BC4_SNORM_BLOCK, "VK_FORMAT_BC4_SNORM_BLOCK", Anvil::COMPONENT_LAYOUT_R, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_BC5_UNORM_BLOCK, "VK_FORMAT_BC5_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RG, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_BC5_SNORM_BLOCK, "VK_FORMAT_BC5_SNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RG, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_BC6H_UFLOAT_BLOCK, "VK_FORMAT_BC6H_UFLOAT_BLOCK", Anvil::COMPONENT_LAYOUT_RGB, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UFLOAT}, - {VK_FORMAT_BC6H_SFLOAT_BLOCK, "VK_FORMAT_BC6H_SFLOAT_BLOCK", Anvil::COMPONENT_LAYOUT_RGB, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SFLOAT}, - {VK_FORMAT_BC7_UNORM_BLOCK, "VK_FORMAT_BC7_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_BC7_SRGB_BLOCK, "VK_FORMAT_BC7_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, "VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGB, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, "VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGB, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, "VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, "VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, "VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, "VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_EAC_R11_UNORM_BLOCK, "VK_FORMAT_EAC_R11_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_R, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_EAC_R11_SNORM_BLOCK, "VK_FORMAT_EAC_R11_SNORM_BLOCK", Anvil::COMPONENT_LAYOUT_R, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_EAC_R11G11_UNORM_BLOCK, "VK_FORMAT_EAC_R11G11_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RG, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_EAC_R11G11_SNORM_BLOCK, "VK_FORMAT_EAC_R11G11_SNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RG, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SNORM}, - {VK_FORMAT_ASTC_4x4_UNORM_BLOCK, "VK_FORMAT_ASTC_4x4_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_4x4_SRGB_BLOCK, "VK_FORMAT_ASTC_4x4_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_5x4_UNORM_BLOCK, "VK_FORMAT_ASTC_5x4_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_5x4_SRGB_BLOCK, "VK_FORMAT_ASTC_5x4_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_5x5_UNORM_BLOCK, "VK_FORMAT_ASTC_5x5_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_5x5_SRGB_BLOCK, "VK_FORMAT_ASTC_5x5_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_6x5_UNORM_BLOCK, "VK_FORMAT_ASTC_6x5_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_6x5_SRGB_BLOCK, "VK_FORMAT_ASTC_6x5_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_6x6_UNORM_BLOCK, "VK_FORMAT_ASTC_6x6_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_6x6_SRGB_BLOCK, "VK_FORMAT_ASTC_6x6_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_8x5_UNORM_BLOCK, "VK_FORMAT_ASTC_8x5_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_8x5_SRGB_BLOCK, "VK_FORMAT_ASTC_8x5_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_8x6_UNORM_BLOCK, "VK_FORMAT_ASTC_8x6_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_8x6_SRGB_BLOCK, "VK_FORMAT_ASTC_8x6_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_8x8_UNORM_BLOCK, "VK_FORMAT_ASTC_8x8_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_8x8_SRGB_BLOCK, "VK_FORMAT_ASTC_8x8_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_10x5_UNORM_BLOCK, "VK_FORMAT_ASTC_10x5_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_10x5_SRGB_BLOCK, "VK_FORMAT_ASTC_10x5_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_10x6_UNORM_BLOCK, "VK_FORMAT_ASTC_10x6_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_10x6_SRGB_BLOCK, "VK_FORMAT_ASTC_10x6_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_10x8_UNORM_BLOCK, "VK_FORMAT_ASTC_10x8_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_10x8_SRGB_BLOCK, "VK_FORMAT_ASTC_10x8_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_10x10_UNORM_BLOCK, "VK_FORMAT_ASTC_10x10_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_10x10_SRGB_BLOCK, "VK_FORMAT_ASTC_10x10_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_12x10_UNORM_BLOCK, "VK_FORMAT_ASTC_12x10_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_12x10_SRGB_BLOCK, "VK_FORMAT_ASTC_12x10_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, - {VK_FORMAT_ASTC_12x12_UNORM_BLOCK, "VK_FORMAT_ASTC_12x12_UNORM_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_UNORM}, - {VK_FORMAT_ASTC_12x12_SRGB_BLOCK, "VK_FORMAT_ASTC_12x12_SRGB_BLOCK", Anvil::COMPONENT_LAYOUT_RGBA, 0, 0, 0, 0, Anvil::FORMAT_TYPE_SRGB}, + bool is_packed; +} g_formats[] = +{ + /* format | name | component_layout | component_bits[0] | component_bits[1] | component_bits[2] | component_bits[3] | format_type | is_packed? */ + {Anvil::Format::UNKNOWN, "VK_FORMAT_UNDEFINED", Anvil::ComponentLayout::UNKNOWN, 0, 0, 0, 0, Anvil::FormatType::UNKNOWN, false}, + {Anvil::Format::R4G4_UNORM_PACK8, "VK_FORMAT_R4G4_UNORM_PACK8", Anvil::ComponentLayout::RG, 4, 4, 0, 0, Anvil::FormatType::UNORM, true}, + {Anvil::Format::R4G4B4A4_UNORM_PACK16, "VK_FORMAT_R4G4B4A4_UNORM_PACK16", Anvil::ComponentLayout::RGBA, 4, 4, 4, 4, Anvil::FormatType::UNORM, true}, + {Anvil::Format::B4G4R4A4_UNORM_PACK16, "VK_FORMAT_B4G4R4A4_UNORM_PACK16", Anvil::ComponentLayout::BGRA, 4, 4, 4, 4, Anvil::FormatType::UNORM, true}, + {Anvil::Format::R5G6B5_UNORM_PACK16, "VK_FORMAT_R5G6B5_UNORM_PACK16", Anvil::ComponentLayout::RGB, 5, 6, 5, 0, Anvil::FormatType::UNORM, true}, + {Anvil::Format::B5G6R5_UNORM_PACK16, "VK_FORMAT_B5G6R5_UNORM_PACK16", Anvil::ComponentLayout::BGR, 5, 6, 5, 0, Anvil::FormatType::UNORM, true}, + {Anvil::Format::R5G5B5A1_UNORM_PACK16, "VK_FORMAT_R5G5B5A1_UNORM_PACK16", Anvil::ComponentLayout::RGBA, 5, 5, 5, 1, Anvil::FormatType::UNORM, true}, + {Anvil::Format::B5G5R5A1_UNORM_PACK16, "VK_FORMAT_B5G5R5A1_UNORM_PACK16", Anvil::ComponentLayout::BGRA, 5, 5, 5, 1, Anvil::FormatType::UNORM, true}, + {Anvil::Format::A1R5G5B5_UNORM_PACK16, "VK_FORMAT_A1R5G5B5_UNORM_PACK16", Anvil::ComponentLayout::ARGB, 1, 5, 5, 5, Anvil::FormatType::UNORM, true}, + {Anvil::Format::R8_UNORM, "VK_FORMAT_R8_UNORM", Anvil::ComponentLayout::R, 8, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::R8_SNORM, "VK_FORMAT_R8_SNORM", Anvil::ComponentLayout::R, 8, 0, 0, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::R8_USCALED, "VK_FORMAT_R8_USCALED", Anvil::ComponentLayout::R, 8, 0, 0, 0, Anvil::FormatType::USCALED, false}, + {Anvil::Format::R8_SSCALED, "VK_FORMAT_R8_SSCALED", Anvil::ComponentLayout::R, 8, 0, 0, 0, Anvil::FormatType::SSCALED, false}, + {Anvil::Format::R8_UINT, "VK_FORMAT_R8_UINT", Anvil::ComponentLayout::R, 8, 0, 0, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R8_SINT, "VK_FORMAT_R8_SINT", Anvil::ComponentLayout::R, 8, 0, 0, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R8_SRGB, "VK_FORMAT_R8_SRGB", Anvil::ComponentLayout::R, 8, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::R8G8_UNORM, "VK_FORMAT_R8G8_UNORM", Anvil::ComponentLayout::RG, 8, 8, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::R8G8_SNORM, "VK_FORMAT_R8G8_SNORM", Anvil::ComponentLayout::RG, 8, 8, 0, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::R8G8_USCALED, "VK_FORMAT_R8G8_USCALED", Anvil::ComponentLayout::RG, 8, 8, 0, 0, Anvil::FormatType::USCALED, false}, + {Anvil::Format::R8G8_SSCALED, "VK_FORMAT_R8G8_SSCALED", Anvil::ComponentLayout::RG, 8, 8, 0, 0, Anvil::FormatType::SSCALED, false}, + {Anvil::Format::R8G8_UINT, "VK_FORMAT_R8G8_UINT", Anvil::ComponentLayout::RG, 8, 8, 0, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R8G8_SINT, "VK_FORMAT_R8G8_SINT", Anvil::ComponentLayout::RG, 8, 8, 0, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R8G8_SRGB, "VK_FORMAT_R8G8_SRGB", Anvil::ComponentLayout::RG, 8, 8, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::R8G8B8_UNORM, "VK_FORMAT_R8G8B8_UNORM", Anvil::ComponentLayout::RGB, 8, 8, 8, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::R8G8B8_SNORM, "VK_FORMAT_R8G8B8_SNORM", Anvil::ComponentLayout::RGB, 8, 8, 8, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::R8G8B8_USCALED, "VK_FORMAT_R8G8B8_USCALED", Anvil::ComponentLayout::RGB, 8, 8, 8, 0, Anvil::FormatType::USCALED, false}, + {Anvil::Format::R8G8B8_SSCALED, "VK_FORMAT_R8G8B8_SSCALED", Anvil::ComponentLayout::RGB, 8, 8, 8, 0, Anvil::FormatType::SSCALED, false}, + {Anvil::Format::R8G8B8_UINT, "VK_FORMAT_R8G8B8_UINT", Anvil::ComponentLayout::RGB, 8, 8, 8, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R8G8B8_SINT, "VK_FORMAT_R8G8B8_SINT", Anvil::ComponentLayout::RGB, 8, 8, 8, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R8G8B8_SRGB, "VK_FORMAT_R8G8B8_SRGB", Anvil::ComponentLayout::RGB, 8, 8, 8, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::B8G8R8_UNORM, "VK_FORMAT_B8G8R8_UNORM", Anvil::ComponentLayout::BGR, 8, 8, 8, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::B8G8R8_SNORM, "VK_FORMAT_B8G8R8_SNORM", Anvil::ComponentLayout::BGR, 8, 8, 8, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::B8G8R8_USCALED, "VK_FORMAT_B8G8R8_USCALED", Anvil::ComponentLayout::BGR, 8, 8, 8, 0, Anvil::FormatType::USCALED, false}, + {Anvil::Format::B8G8R8_SSCALED, "VK_FORMAT_B8G8R8_SSCALED", Anvil::ComponentLayout::BGR, 8, 8, 8, 0, Anvil::FormatType::SSCALED, false}, + {Anvil::Format::B8G8R8_UINT, "VK_FORMAT_B8G8R8_UINT", Anvil::ComponentLayout::BGR, 8, 8, 8, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::B8G8R8_SINT, "VK_FORMAT_B8G8R8_SINT", Anvil::ComponentLayout::BGR, 8, 8, 8, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::B8G8R8_SRGB, "VK_FORMAT_B8G8R8_SRGB", Anvil::ComponentLayout::BGR, 8, 8, 8, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::R8G8B8A8_UNORM, "VK_FORMAT_R8G8B8A8_UNORM", Anvil::ComponentLayout::RGBA, 8, 8, 8, 8, Anvil::FormatType::UNORM, false}, + {Anvil::Format::R8G8B8A8_SNORM, "VK_FORMAT_R8G8B8A8_SNORM", Anvil::ComponentLayout::RGBA, 8, 8, 8, 8, Anvil::FormatType::SNORM, false}, + {Anvil::Format::R8G8B8A8_USCALED, "VK_FORMAT_R8G8B8A8_USCALED", Anvil::ComponentLayout::RGBA, 8, 8, 8, 8, Anvil::FormatType::USCALED, false}, + {Anvil::Format::R8G8B8A8_SSCALED, "VK_FORMAT_R8G8B8A8_SSCALED", Anvil::ComponentLayout::RGBA, 8, 8, 8, 8, Anvil::FormatType::SSCALED, false}, + {Anvil::Format::R8G8B8A8_UINT, "VK_FORMAT_R8G8B8A8_UINT", Anvil::ComponentLayout::RGBA, 8, 8, 8, 8, Anvil::FormatType::UINT, false}, + {Anvil::Format::R8G8B8A8_SINT, "VK_FORMAT_R8G8B8A8_SINT", Anvil::ComponentLayout::RGBA, 8, 8, 8, 8, Anvil::FormatType::SINT, false}, + {Anvil::Format::R8G8B8A8_SRGB, "VK_FORMAT_R8G8B8A8_SRGB", Anvil::ComponentLayout::RGBA, 8, 8, 8, 8, Anvil::FormatType::SRGB, false}, + {Anvil::Format::B8G8R8A8_UNORM, "VK_FORMAT_B8G8R8A8_UNORM", Anvil::ComponentLayout::BGRA, 8, 8, 8, 8, Anvil::FormatType::UNORM, false}, + {Anvil::Format::B8G8R8A8_SNORM, "VK_FORMAT_B8G8R8A8_SNORM", Anvil::ComponentLayout::BGRA, 8, 8, 8, 8, Anvil::FormatType::SNORM, false}, + {Anvil::Format::B8G8R8A8_USCALED, "VK_FORMAT_B8G8R8A8_USCALED", Anvil::ComponentLayout::BGRA, 8, 8, 8, 8, Anvil::FormatType::USCALED, false}, + {Anvil::Format::B8G8R8A8_SSCALED, "VK_FORMAT_B8G8R8A8_SSCALED", Anvil::ComponentLayout::BGRA, 8, 8, 8, 8, Anvil::FormatType::SSCALED, false}, + {Anvil::Format::B8G8R8A8_UINT, "VK_FORMAT_B8G8R8A8_UINT", Anvil::ComponentLayout::BGRA, 8, 8, 8, 8, Anvil::FormatType::UINT, false}, + {Anvil::Format::B8G8R8A8_SINT, "VK_FORMAT_B8G8R8A8_SINT", Anvil::ComponentLayout::BGRA, 8, 8, 8, 8, Anvil::FormatType::SINT, false}, + {Anvil::Format::B8G8R8A8_SRGB, "VK_FORMAT_B8G8R8A8_SRGB", Anvil::ComponentLayout::BGRA, 8, 8, 8, 8, Anvil::FormatType::SRGB, false}, + {Anvil::Format::A8B8G8R8_UNORM_PACK32, "VK_FORMAT_A8B8G8R8_UNORM_PACK32", Anvil::ComponentLayout::ABGR, 8, 8, 8, 8, Anvil::FormatType::UNORM, true}, + {Anvil::Format::A8B8G8R8_SNORM_PACK32, "VK_FORMAT_A8B8G8R8_SNORM_PACK32", Anvil::ComponentLayout::ABGR, 8, 8, 8, 8, Anvil::FormatType::SNORM, true}, + {Anvil::Format::A8B8G8R8_USCALED_PACK32, "VK_FORMAT_A8B8G8R8_USCALED_PACK32", Anvil::ComponentLayout::ABGR, 8, 8, 8, 8, Anvil::FormatType::USCALED, true}, + {Anvil::Format::A8B8G8R8_SSCALED_PACK32, "VK_FORMAT_A8B8G8R8_SSCALED_PACK32", Anvil::ComponentLayout::ABGR, 8, 8, 8, 8, Anvil::FormatType::SSCALED, true}, + {Anvil::Format::A8B8G8R8_UINT_PACK32, "VK_FORMAT_A8B8G8R8_UINT_PACK32", Anvil::ComponentLayout::ABGR, 8, 8, 8, 8, Anvil::FormatType::UINT, true}, + {Anvil::Format::A8B8G8R8_SINT_PACK32, "VK_FORMAT_A8B8G8R8_SINT_PACK32", Anvil::ComponentLayout::ABGR, 8, 8, 8, 8, Anvil::FormatType::SINT, true}, + {Anvil::Format::A8B8G8R8_SRGB_PACK32, "VK_FORMAT_A8B8G8R8_SRGB_PACK32", Anvil::ComponentLayout::ABGR, 8, 8, 8, 8, Anvil::FormatType::SRGB, true}, + {Anvil::Format::A2R10G10B10_UNORM_PACK32, "VK_FORMAT_A2R10G10B10_UNORM_PACK32", Anvil::ComponentLayout::ARGB, 2, 10, 10, 10, Anvil::FormatType::UNORM, true}, + {Anvil::Format::A2R10G10B10_SNORM_PACK32, "VK_FORMAT_A2R10G10B10_SNORM_PACK32", Anvil::ComponentLayout::ARGB, 2, 10, 10, 10, Anvil::FormatType::SNORM, true}, + {Anvil::Format::A2R10G10B10_USCALED_PACK32, "VK_FORMAT_A2R10G10B10_USCALED_PACK32", Anvil::ComponentLayout::ARGB, 2, 10, 10, 10, Anvil::FormatType::USCALED, true}, + {Anvil::Format::A2R10G10B10_SSCALED_PACK32, "VK_FORMAT_A2R10G10B10_SSCALED_PACK32", Anvil::ComponentLayout::ARGB, 2, 10, 10, 10, Anvil::FormatType::SSCALED, true}, + {Anvil::Format::A2R10G10B10_UINT_PACK32, "VK_FORMAT_A2R10G10B10_UINT_PACK32", Anvil::ComponentLayout::ARGB, 2, 10, 10, 10, Anvil::FormatType::UINT, true}, + {Anvil::Format::A2R10G10B10_SINT_PACK32, "VK_FORMAT_A2R10G10B10_SINT_PACK32", Anvil::ComponentLayout::ARGB, 2, 10, 10, 10, Anvil::FormatType::SINT, true}, + {Anvil::Format::A2B10G10R10_UNORM_PACK32, "VK_FORMAT_A2B10G10R10_UNORM_PACK32", Anvil::ComponentLayout::ABGR, 2, 10, 10, 10, Anvil::FormatType::UNORM, true}, + {Anvil::Format::A2B10G10R10_SNORM_PACK32, "VK_FORMAT_A2B10G10R10_SNORM_PACK32", Anvil::ComponentLayout::ABGR, 2, 10, 10, 10, Anvil::FormatType::SNORM, true}, + {Anvil::Format::A2B10G10R10_USCALED_PACK32, "VK_FORMAT_A2B10G10R10_USCALED_PACK32", Anvil::ComponentLayout::ABGR, 2, 10, 10, 10, Anvil::FormatType::USCALED, true}, + {Anvil::Format::A2B10G10R10_SSCALED_PACK32, "VK_FORMAT_A2B10G10R10_SSCALED_PACK32", Anvil::ComponentLayout::ABGR, 2, 10, 10, 10, Anvil::FormatType::SSCALED, true}, + {Anvil::Format::A2B10G10R10_UINT_PACK32, "VK_FORMAT_A2B10G10R10_UINT_PACK32", Anvil::ComponentLayout::ABGR, 2, 10, 10, 10, Anvil::FormatType::UINT, true}, + {Anvil::Format::A2B10G10R10_SINT_PACK32, "VK_FORMAT_A2B10G10R10_SINT_PACK32", Anvil::ComponentLayout::ABGR, 2, 10, 10, 10, Anvil::FormatType::SINT, true}, + {Anvil::Format::R16_UNORM, "VK_FORMAT_R16_UNORM", Anvil::ComponentLayout::R, 16, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::R16_SNORM, "VK_FORMAT_R16_SNORM", Anvil::ComponentLayout::R, 16, 0, 0, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::R16_USCALED, "VK_FORMAT_R16_USCALED", Anvil::ComponentLayout::R, 16, 0, 0, 0, Anvil::FormatType::USCALED, false}, + {Anvil::Format::R16_SSCALED, "VK_FORMAT_R16_SSCALED", Anvil::ComponentLayout::R, 16, 0, 0, 0, Anvil::FormatType::SSCALED, false}, + {Anvil::Format::R16_UINT, "VK_FORMAT_R16_UINT", Anvil::ComponentLayout::R, 16, 0, 0, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R16_SINT, "VK_FORMAT_R16_SINT", Anvil::ComponentLayout::R, 16, 0, 0, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R16_SFLOAT, "VK_FORMAT_R16_SFLOAT", Anvil::ComponentLayout::R, 16, 0, 0, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R16G16_UNORM, "VK_FORMAT_R16G16_UNORM", Anvil::ComponentLayout::RG, 16, 16, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::R16G16_SNORM, "VK_FORMAT_R16G16_SNORM", Anvil::ComponentLayout::RG, 16, 16, 0, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::R16G16_USCALED, "VK_FORMAT_R16G16_USCALED", Anvil::ComponentLayout::RG, 16, 16, 0, 0, Anvil::FormatType::USCALED, false}, + {Anvil::Format::R16G16_SSCALED, "VK_FORMAT_R16G16_SSCALED", Anvil::ComponentLayout::RG, 16, 16, 0, 0, Anvil::FormatType::SSCALED, false}, + {Anvil::Format::R16G16_UINT, "VK_FORMAT_R16G16_UINT", Anvil::ComponentLayout::RG, 16, 16, 0, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R16G16_SINT, "VK_FORMAT_R16G16_SINT", Anvil::ComponentLayout::RG, 16, 16, 0, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R16G16_SFLOAT, "VK_FORMAT_R16G16_SFLOAT", Anvil::ComponentLayout::RG, 16, 16, 0, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R16G16B16_UNORM, "VK_FORMAT_R16G16B16_UNORM", Anvil::ComponentLayout::RGB, 16, 16, 16, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::R16G16B16_SNORM, "VK_FORMAT_R16G16B16_SNORM", Anvil::ComponentLayout::RGB, 16, 16, 16, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::R16G16B16_USCALED, "VK_FORMAT_R16G16B16_USCALED", Anvil::ComponentLayout::RGB, 16, 16, 16, 0, Anvil::FormatType::USCALED, false}, + {Anvil::Format::R16G16B16_SSCALED, "VK_FORMAT_R16G16B16_SSCALED", Anvil::ComponentLayout::RGB, 16, 16, 16, 0, Anvil::FormatType::SSCALED, false}, + {Anvil::Format::R16G16B16_UINT, "VK_FORMAT_R16G16B16_UINT", Anvil::ComponentLayout::RGB, 16, 16, 16, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R16G16B16_SINT, "VK_FORMAT_R16G16B16_SINT", Anvil::ComponentLayout::RGB, 16, 16, 16, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R16G16B16_SFLOAT, "VK_FORMAT_R16G16B16_SFLOAT", Anvil::ComponentLayout::RGB, 16, 16, 16, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R16G16B16A16_UNORM, "VK_FORMAT_R16G16B16A16_UNORM", Anvil::ComponentLayout::RGBA, 16, 16, 16, 16, Anvil::FormatType::UNORM, false}, + {Anvil::Format::R16G16B16A16_SNORM, "VK_FORMAT_R16G16B16A16_SNORM", Anvil::ComponentLayout::RGBA, 16, 16, 16, 16, Anvil::FormatType::SNORM, false}, + {Anvil::Format::R16G16B16A16_USCALED, "VK_FORMAT_R16G16B16A16_USCALED", Anvil::ComponentLayout::RGBA, 16, 16, 16, 16, Anvil::FormatType::USCALED, false}, + {Anvil::Format::R16G16B16A16_SSCALED, "VK_FORMAT_R16G16B16A16_SSCALED", Anvil::ComponentLayout::RGBA, 16, 16, 16, 16, Anvil::FormatType::SSCALED, false}, + {Anvil::Format::R16G16B16A16_UINT, "VK_FORMAT_R16G16B16A16_UINT", Anvil::ComponentLayout::RGBA, 16, 16, 16, 16, Anvil::FormatType::UINT, false}, + {Anvil::Format::R16G16B16A16_SINT, "VK_FORMAT_R16G16B16A16_SINT", Anvil::ComponentLayout::RGBA, 16, 16, 16, 16, Anvil::FormatType::SINT, false}, + {Anvil::Format::R16G16B16A16_SFLOAT, "VK_FORMAT_R16G16B16A16_SFLOAT", Anvil::ComponentLayout::RGBA, 16, 16, 16, 16, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R32_UINT, "VK_FORMAT_R32_UINT", Anvil::ComponentLayout::R, 32, 0, 0, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R32_SINT, "VK_FORMAT_R32_SINT", Anvil::ComponentLayout::R, 32, 0, 0, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R32_SFLOAT, "VK_FORMAT_R32_SFLOAT", Anvil::ComponentLayout::R, 32, 0, 0, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R32G32_UINT, "VK_FORMAT_R32G32_UINT", Anvil::ComponentLayout::RG, 32, 32, 0, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R32G32_SINT, "VK_FORMAT_R32G32_SINT", Anvil::ComponentLayout::RG, 32, 32, 0, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R32G32_SFLOAT, "VK_FORMAT_R32G32_SFLOAT", Anvil::ComponentLayout::RG, 32, 32, 0, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R32G32B32_UINT, "VK_FORMAT_R32G32B32_UINT", Anvil::ComponentLayout::RGB, 32, 32, 32, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R32G32B32_SINT, "VK_FORMAT_R32G32B32_SINT", Anvil::ComponentLayout::RGB, 32, 32, 32, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R32G32B32_SFLOAT, "VK_FORMAT_R32G32B32_SFLOAT", Anvil::ComponentLayout::RGB, 32, 32, 32, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R32G32B32A32_UINT, "VK_FORMAT_R32G32B32A32_UINT", Anvil::ComponentLayout::RGBA, 32, 32, 32, 32, Anvil::FormatType::UINT, false}, + {Anvil::Format::R32G32B32A32_SINT, "VK_FORMAT_R32G32B32A32_SINT", Anvil::ComponentLayout::RGBA, 32, 32, 32, 32, Anvil::FormatType::SINT, false}, + {Anvil::Format::R32G32B32A32_SFLOAT, "VK_FORMAT_R32G32B32A32_SFLOAT", Anvil::ComponentLayout::RGBA, 32, 32, 32, 32, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R64_UINT, "VK_FORMAT_R64_UINT", Anvil::ComponentLayout::R, 64, 0, 0, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R64_SINT, "VK_FORMAT_R64_SINT", Anvil::ComponentLayout::R, 64, 0, 0, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R64_SFLOAT, "VK_FORMAT_R64_SFLOAT", Anvil::ComponentLayout::R, 64, 0, 0, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R64G64_UINT, "VK_FORMAT_R64G64_UINT", Anvil::ComponentLayout::RG, 64, 64, 0, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R64G64_SINT, "VK_FORMAT_R64G64_SINT", Anvil::ComponentLayout::RG, 64, 64, 0, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R64G64_SFLOAT, "VK_FORMAT_R64G64_SFLOAT", Anvil::ComponentLayout::RG, 64, 64, 0, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R64G64B64_UINT, "VK_FORMAT_R64G64B64_UINT", Anvil::ComponentLayout::RGB, 64, 64, 64, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::R64G64B64_SINT, "VK_FORMAT_R64G64B64_SINT", Anvil::ComponentLayout::RGB, 64, 64, 64, 0, Anvil::FormatType::SINT, false}, + {Anvil::Format::R64G64B64_SFLOAT, "VK_FORMAT_R64G64B64_SFLOAT", Anvil::ComponentLayout::RGB, 64, 64, 64, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::R64G64B64A64_UINT, "VK_FORMAT_R64G64B64A64_UINT", Anvil::ComponentLayout::RGBA, 64, 64, 64, 64, Anvil::FormatType::UINT, false}, + {Anvil::Format::R64G64B64A64_SINT, "VK_FORMAT_R64G64B64A64_SINT", Anvil::ComponentLayout::RGBA, 64, 64, 64, 64, Anvil::FormatType::SINT, false}, + {Anvil::Format::R64G64B64A64_SFLOAT, "VK_FORMAT_R64G64B64A64_SFLOAT", Anvil::ComponentLayout::RGBA, 64, 64, 64, 64, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::B10G11R11_UFLOAT_PACK32, "VK_FORMAT_B10G11R11_UFLOAT_PACK32", Anvil::ComponentLayout::BGR, 10, 11, 11, 0, Anvil::FormatType::UFLOAT, true}, + {Anvil::Format::E5B9G9R9_UFLOAT_PACK32, "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32", Anvil::ComponentLayout::EBGR, 5, 9, 9, 9, Anvil::FormatType::UFLOAT, true}, + {Anvil::Format::D16_UNORM, "VK_FORMAT_D16_UNORM", Anvil::ComponentLayout::D, 16, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::X8_D24_UNORM_PACK32, "VK_FORMAT_X8_D24_UNORM_PACK32", Anvil::ComponentLayout::XD, 8, 24, 0, 0, Anvil::FormatType::UNORM, true}, + {Anvil::Format::D32_SFLOAT, "VK_FORMAT_D32_SFLOAT", Anvil::ComponentLayout::D, 32, 0, 0, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::S8_UINT, "VK_FORMAT_S8_UINT", Anvil::ComponentLayout::S, 8, 0, 0, 0, Anvil::FormatType::UINT, false}, + {Anvil::Format::D16_UNORM_S8_UINT, "VK_FORMAT_D16_UNORM_S8_UINT", Anvil::ComponentLayout::DS, 16, 8, 0, 0, Anvil::FormatType::UNORM_UINT, false}, + {Anvil::Format::D24_UNORM_S8_UINT, "VK_FORMAT_D24_UNORM_S8_UINT", Anvil::ComponentLayout::DS, 24, 8, 0, 0, Anvil::FormatType::UNORM_UINT, false}, + {Anvil::Format::D32_SFLOAT_S8_UINT, "VK_FORMAT_D32_SFLOAT_S8_UINT", Anvil::ComponentLayout::DS, 32, 8, 0, 0, Anvil::FormatType::SFLOAT_UINT, false}, + {Anvil::Format::BC1_RGB_UNORM_BLOCK, "VK_FORMAT_BC1_RGB_UNORM_BLOCK", Anvil::ComponentLayout::RGB, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::BC1_RGB_SRGB_BLOCK, "VK_FORMAT_BC1_RGB_SRGB_BLOCK", Anvil::ComponentLayout::RGB, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::BC1_RGBA_UNORM_BLOCK, "VK_FORMAT_BC1_RGBA_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::BC1_RGBA_SRGB_BLOCK, "VK_FORMAT_BC1_RGBA_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::BC2_UNORM_BLOCK, "VK_FORMAT_BC2_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::BC2_SRGB_BLOCK, "VK_FORMAT_BC2_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::BC3_UNORM_BLOCK, "VK_FORMAT_BC3_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::BC3_SRGB_BLOCK, "VK_FORMAT_BC3_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::BC4_UNORM_BLOCK, "VK_FORMAT_BC4_UNORM_BLOCK", Anvil::ComponentLayout::R, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::BC4_SNORM_BLOCK, "VK_FORMAT_BC4_SNORM_BLOCK", Anvil::ComponentLayout::R, 0, 0, 0, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::BC5_UNORM_BLOCK, "VK_FORMAT_BC5_UNORM_BLOCK", Anvil::ComponentLayout::RG, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::BC5_SNORM_BLOCK, "VK_FORMAT_BC5_SNORM_BLOCK", Anvil::ComponentLayout::RG, 0, 0, 0, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::BC6H_UFLOAT_BLOCK, "VK_FORMAT_BC6H_UFLOAT_BLOCK", Anvil::ComponentLayout::RGB, 0, 0, 0, 0, Anvil::FormatType::UFLOAT, false}, + {Anvil::Format::BC6H_SFLOAT_BLOCK, "VK_FORMAT_BC6H_SFLOAT_BLOCK", Anvil::ComponentLayout::RGB, 0, 0, 0, 0, Anvil::FormatType::SFLOAT, false}, + {Anvil::Format::BC7_UNORM_BLOCK, "VK_FORMAT_BC7_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::BC7_SRGB_BLOCK, "VK_FORMAT_BC7_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ETC2_R8G8B8_UNORM_BLOCK, "VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK", Anvil::ComponentLayout::RGB, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ETC2_R8G8B8_SRGB_BLOCK, "VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK", Anvil::ComponentLayout::RGB, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ETC2_R8G8B8A1_UNORM_BLOCK, "VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ETC2_R8G8B8A1_SRGB_BLOCK, "VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ETC2_R8G8B8A8_UNORM_BLOCK, "VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ETC2_R8G8B8A8_SRGB_BLOCK, "VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::EAC_R11_UNORM_BLOCK, "VK_FORMAT_EAC_R11_UNORM_BLOCK", Anvil::ComponentLayout::R, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::EAC_R11_SNORM_BLOCK, "VK_FORMAT_EAC_R11_SNORM_BLOCK", Anvil::ComponentLayout::R, 0, 0, 0, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::EAC_R11G11_UNORM_BLOCK, "VK_FORMAT_EAC_R11G11_UNORM_BLOCK", Anvil::ComponentLayout::RG, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::EAC_R11G11_SNORM_BLOCK, "VK_FORMAT_EAC_R11G11_SNORM_BLOCK", Anvil::ComponentLayout::RG, 0, 0, 0, 0, Anvil::FormatType::SNORM, false}, + {Anvil::Format::ASTC_4x4_UNORM_BLOCK, "VK_FORMAT_ASTC_4x4_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_4x4_SRGB_BLOCK, "VK_FORMAT_ASTC_4x4_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_5x4_UNORM_BLOCK, "VK_FORMAT_ASTC_5x4_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_5x4_SRGB_BLOCK, "VK_FORMAT_ASTC_5x4_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_5x5_UNORM_BLOCK, "VK_FORMAT_ASTC_5x5_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_5x5_SRGB_BLOCK, "VK_FORMAT_ASTC_5x5_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_6x5_UNORM_BLOCK, "VK_FORMAT_ASTC_6x5_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_6x5_SRGB_BLOCK, "VK_FORMAT_ASTC_6x5_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_6x6_UNORM_BLOCK, "VK_FORMAT_ASTC_6x6_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_6x6_SRGB_BLOCK, "VK_FORMAT_ASTC_6x6_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_8x5_UNORM_BLOCK, "VK_FORMAT_ASTC_8x5_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_8x5_SRGB_BLOCK, "VK_FORMAT_ASTC_8x5_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_8x6_UNORM_BLOCK, "VK_FORMAT_ASTC_8x6_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_8x6_SRGB_BLOCK, "VK_FORMAT_ASTC_8x6_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_8x8_UNORM_BLOCK, "VK_FORMAT_ASTC_8x8_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_8x8_SRGB_BLOCK, "VK_FORMAT_ASTC_8x8_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_10x5_UNORM_BLOCK, "VK_FORMAT_ASTC_10x5_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_10x5_SRGB_BLOCK, "VK_FORMAT_ASTC_10x5_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_10x6_UNORM_BLOCK, "VK_FORMAT_ASTC_10x6_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_10x6_SRGB_BLOCK, "VK_FORMAT_ASTC_10x6_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_10x8_UNORM_BLOCK, "VK_FORMAT_ASTC_10x8_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_10x8_SRGB_BLOCK, "VK_FORMAT_ASTC_10x8_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_10x10_UNORM_BLOCK, "VK_FORMAT_ASTC_10x10_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_10x10_SRGB_BLOCK, "VK_FORMAT_ASTC_10x10_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_12x10_UNORM_BLOCK, "VK_FORMAT_ASTC_12x10_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_12x10_SRGB_BLOCK, "VK_FORMAT_ASTC_12x10_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false}, + {Anvil::Format::ASTC_12x12_UNORM_BLOCK, "VK_FORMAT_ASTC_12x12_UNORM_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::UNORM, false}, + {Anvil::Format::ASTC_12x12_SRGB_BLOCK, "VK_FORMAT_ASTC_12x12_SRGB_BLOCK", Anvil::ComponentLayout::RGBA, 0, 0, 0, 0, Anvil::FormatType::SRGB, false} +}; + +struct SubresourceLayoutInfo +{ + Anvil::Format compatible_singleplanar_format; + Anvil::ComponentLayout component_layout; + uint8_t component_bits_used [4]; + uint8_t component_bits_unused[4]; + + /* Default Constructor */ + SubresourceLayoutInfo() + { + compatible_singleplanar_format = Anvil::Format::UNKNOWN; + component_layout = Anvil::ComponentLayout::UNKNOWN; + component_bits_used [0] = component_bits_used [1] = component_bits_used [2] = component_bits_used [3] = 0; + component_bits_unused[0] = component_bits_unused[1] = component_bits_unused[2] = component_bits_unused[3] = 0; + } + + SubresourceLayoutInfo(const Anvil::Format& in_compatible_singleplanar_format, + Anvil::ComponentLayout in_component_layout, + uint8_t in_component0_bits, + uint8_t in_component1_bits, + uint8_t in_component2_bits, + uint8_t in_component3_bits) + { + compatible_singleplanar_format = in_compatible_singleplanar_format; + component_layout = in_component_layout; + component_bits_used[0] = in_component0_bits; + component_bits_used[1] = in_component1_bits; + component_bits_used[2] = in_component2_bits; + component_bits_used[3] = in_component3_bits; + + component_bits_unused[0] = component_bits_unused[1] = component_bits_unused[2] = component_bits_unused[3] = 0; + } + + /* Constructor for packed format */ + SubresourceLayoutInfo(const Anvil::Format& in_compatible_singleplanar_format, + Anvil::ComponentLayout in_component_layout, + uint8_t in_component0_bits_used, + uint8_t in_component1_bits_used, + uint8_t in_component2_bits_used, + uint8_t in_component3_bits_used, + uint8_t in_n_bits_per_component) + { + compatible_singleplanar_format = in_compatible_singleplanar_format; + component_layout = in_component_layout; + component_bits_used[0] = in_component0_bits_used; + component_bits_used[1] = in_component1_bits_used; + component_bits_used[2] = in_component2_bits_used; + component_bits_used[3] = in_component3_bits_used; + + component_bits_unused[0] = component_bits_unused[1] = component_bits_unused[2] = component_bits_unused[3] = 0; + + if (in_component0_bits_used != 0) + { + anvil_assert(in_component0_bits_used <= in_n_bits_per_component); + + component_bits_unused[0] = in_n_bits_per_component - in_component0_bits_used; + } + + if (in_component1_bits_used != 0) + { + anvil_assert(in_component1_bits_used <= in_n_bits_per_component); + + component_bits_unused[1] = in_n_bits_per_component - in_component1_bits_used; + } + + if (in_component2_bits_used != 0) + { + anvil_assert(in_component2_bits_used <= in_n_bits_per_component); + + component_bits_unused[2] = in_n_bits_per_component - in_component2_bits_used; + } + + if (in_component3_bits_used != 0) + { + anvil_assert(in_component3_bits_used <= in_n_bits_per_component); + + component_bits_unused[3] = in_n_bits_per_component - in_component3_bits_used; + } + } + +}; + +struct YUVFormatInfo +{ + const char* name; + uint8_t num_planes; + std::vector subresources; + Anvil::FormatType format_type; + bool is_multiplanar; + bool is_packed; + + YUVFormatInfo() + :name (nullptr), + num_planes (0), + format_type (Anvil::FormatType::UNKNOWN), + is_multiplanar(false), + is_packed (false) + { + /* Stub */ + } + + YUVFormatInfo(const char* in_name, + uint8_t in_num_planes, + SubresourceLayoutInfo in_subresource0, + SubresourceLayoutInfo in_subresource1, + SubresourceLayoutInfo in_subresource2, + Anvil::FormatType in_format_type, + bool in_multiplanar, + bool in_packed) + : name (in_name), + num_planes (in_num_planes), + format_type (in_format_type), + is_multiplanar(in_multiplanar), + is_packed (in_packed) + { + subresources.push_back(in_subresource0); + subresources.push_back(in_subresource1); + subresources.push_back(in_subresource2); + } +}; + +/* TODO: Component layouts are wrong for YUV formats? */ +static const std::unordered_map> g_yuv_formats = +{ + /* format | name | num_planes | subresources[0] | subresources[1] | subresources[2] | format_type | is_multiplanar? | is_packed? */ + {Anvil::Format::G8B8G8R8_422_UNORM, {"VK_FORMAT_G8B8G8R8_422_UNORM", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::GBGR, 8, 8, 8, 8}, {}, {}, Anvil::FormatType::UNORM, false, false} }, + {Anvil::Format::B8G8R8G8_422_UNORM, {"VK_FORMAT_B8G8R8G8_422_UNORM", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::BGRG, 8, 8, 8, 8}, {}, {}, Anvil::FormatType::UNORM, false, false} }, + {Anvil::Format::G8_B8_R8_3PLANE_420_UNORM, {"VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM", 3, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::R, 8, 0, 0, 0}, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::R, 8, 0, 0, 0}, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::R, 8, 0, 0, 0}, Anvil::FormatType::UNORM, true, false} }, + {Anvil::Format::G8_B8R8_2PLANE_420_UNORM, {"VK_FORMAT_G8_B8R8_2PLANE_420_UNORM", 2, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::R, 8, 0, 0, 0}, {Anvil::Format::R8G8_UNORM, Anvil::ComponentLayout::BR, 8, 8, 0, 0}, {}, Anvil::FormatType::UNORM, true, false} }, + {Anvil::Format::G8_B8_R8_3PLANE_422_UNORM, {"VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM", 3, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::G, 8, 0, 0, 0}, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::B, 8, 0, 0, 0}, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::R, 8, 0, 0, 0}, Anvil::FormatType::UNORM, true, false} }, + {Anvil::Format::G8_B8R8_2PLANE_422_UNORM, {"VK_FORMAT_G8_B8R8_2PLANE_422_UNORM", 2, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::G, 8, 0, 0, 0}, {Anvil::Format::R8G8_UNORM, Anvil::ComponentLayout::BR, 8, 8, 0, 0}, {}, Anvil::FormatType::UNORM, true, false} }, + {Anvil::Format::G8_B8_R8_3PLANE_444_UNORM, {"VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM", 3, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::G, 8, 0, 0, 0}, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::B, 8, 0, 0, 0}, {Anvil::Format::R8_UNORM, Anvil::ComponentLayout::R, 8, 0, 0, 0}, Anvil::FormatType::UNORM, true, false} }, + {Anvil::Format::R10X6_UNORM_PACK16, {"VK_FORMAT_R10X6_UNORM_PACK16", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::RX, 10, 0, 0, 0, 16}, {}, {}, Anvil::FormatType::UNORM, false, true } }, + {Anvil::Format::R10X6G10X6_UNORM_2PACK16, {"VK_FORMAT_R10X6G10X6_UNORM_2PACK16", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::RXGX, 10, 10, 0, 0, 16}, {}, {}, Anvil::FormatType::UNORM, false, true } }, + {Anvil::Format::R10X6G10X6B10X6A10X6_UNORM_4PACK16, {"VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::RXGXBXAX, 10, 10, 10, 10, 16}, {}, {}, Anvil::FormatType::UNORM, false, true } }, + {Anvil::Format::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, {"VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::GXBXGXRX, 10, 10, 10, 10, 16}, {}, {}, Anvil::FormatType::UNORM, false, true } }, + {Anvil::Format::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, {"VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::BXGXRXGX, 10, 10, 10, 10, 16}, {}, {}, Anvil::FormatType::UNORM, false, true } }, + {Anvil::Format::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, {"VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16", 3, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::GX, 10, 0, 0, 0, 16}, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::BX, 10, 0, 0, 0, 16}, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::RX, 10, 0, 0, 0, 16}, Anvil::FormatType::UNORM, true, true } }, + {Anvil::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, {"VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16", 2, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::GX, 10, 0, 0, 0, 16}, {Anvil::Format::R10X6G10X6_UNORM_2PACK16, Anvil::ComponentLayout::BXRX, 10, 10, 0, 0, 16}, {}, Anvil::FormatType::UNORM, true, true } }, + {Anvil::Format::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, {"VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16", 3, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::GX, 10, 0, 0, 0, 16}, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::BX, 10, 0, 0, 0, 16}, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::RX, 10, 0, 0, 0, 16}, Anvil::FormatType::UNORM, true, true } }, + {Anvil::Format::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, {"VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16", 2, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::GX, 10, 0, 0, 0, 16}, {Anvil::Format::R10X6G10X6_UNORM_2PACK16, Anvil::ComponentLayout::BXRX, 10, 10, 0, 0, 16}, {}, Anvil::FormatType::UNORM, true, true } }, + {Anvil::Format::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, {"VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16", 3, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::GX, 10, 0, 0, 0, 16}, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::BX, 10, 0, 0, 0, 16}, {Anvil::Format::R10X6_UNORM_PACK16, Anvil::ComponentLayout::RX, 10, 0, 0, 0, 16}, Anvil::FormatType::UNORM, true, true } }, + {Anvil::Format::R12X4_UNORM_PACK16, {"VK_FORMAT_R12X4_UNORM_PACK16", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::RX, 12, 0, 0, 0, 16}, {}, {}, Anvil::FormatType::UNORM, false, true } }, + {Anvil::Format::R12X4G12X4_UNORM_2PACK16, {"VK_FORMAT_R12X4G12X4_UNORM_2PACK16", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::RXGX, 12, 12, 0, 0, 16}, {}, {}, Anvil::FormatType::UNORM, false, true } }, + {Anvil::Format::R12X4G12X4B12X4A12X4_UNORM_4PACK16, {"VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::RXGXBXAX, 12, 12, 12, 12, 16}, {}, {}, Anvil::FormatType::UNORM, false, true } }, + {Anvil::Format::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, {"VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::GXBXGXRX, 12, 12, 12, 12, 16}, {}, {}, Anvil::FormatType::UNORM, false, true } }, + {Anvil::Format::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, {"VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::BXGXRXGX, 12, 12, 12, 12, 16}, {}, {}, Anvil::FormatType::UNORM, false, true } }, + {Anvil::Format::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, {"VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16", 3, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::GX, 12, 0, 0, 0, 16}, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::BX, 12, 0, 0, 0, 16}, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::RX, 12, 0, 0, 0, 16}, Anvil::FormatType::UNORM, true, true } }, + {Anvil::Format::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, {"VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16", 2, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::GX, 12, 0, 0, 0, 16}, {Anvil::Format::R12X4G12X4_UNORM_2PACK16, Anvil::ComponentLayout::BXRX, 12, 12, 0, 0, 16}, {}, Anvil::FormatType::UNORM, true, true } }, + {Anvil::Format::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, {"VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16", 3, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::GX, 12, 0, 0, 0, 16}, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::BX, 12, 0, 0, 0, 16}, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::RX, 12, 0, 0, 0, 16}, Anvil::FormatType::UNORM, true, true } }, + {Anvil::Format::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, {"VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16", 2, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::GX, 12, 0, 0, 0, 16}, {Anvil::Format::R12X4G12X4_UNORM_2PACK16, Anvil::ComponentLayout::BXRX, 12, 12, 0, 0, 16}, {}, Anvil::FormatType::UNORM, true, true } }, + {Anvil::Format::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, {"VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16", 3, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::GX, 12, 0, 0, 0, 16}, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::BX, 12, 0, 0, 0, 16}, {Anvil::Format::R12X4_UNORM_PACK16, Anvil::ComponentLayout::RX, 12, 0, 0, 0, 16}, Anvil::FormatType::UNORM, true, true } }, + {Anvil::Format::G16B16G16R16_422_UNORM, {"VK_FORMAT_G16B16G16R16_422_UNORM", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::GBGR, 16, 16, 16, 16}, {}, {}, Anvil::FormatType::UNORM, false, false} }, + {Anvil::Format::B16G16R16G16_422_UNORM, {"VK_FORMAT_B16G16R16G16_422_UNORM", 1, {Anvil::Format::UNKNOWN, Anvil::ComponentLayout::BGRG, 16, 16, 16, 16}, {}, {}, Anvil::FormatType::UNORM, false, false} }, + {Anvil::Format::G16_B16_R16_3PLANE_420_UNORM, {"VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM", 3, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::G, 16, 0, 0, 0}, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::B, 16, 0, 0, 0}, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::R, 16, 0, 0, 0}, Anvil::FormatType::UNORM, true, false} }, + {Anvil::Format::G16_B16R16_2PLANE_420_UNORM, {"VK_FORMAT_G16_B16R16_2PLANE_420_UNORM", 2, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::G, 16, 0, 0, 0}, {Anvil::Format::R16G16_UNORM, Anvil::ComponentLayout::BR, 16, 16, 0, 0}, {}, Anvil::FormatType::UNORM, true, false} }, + {Anvil::Format::G16_B16_R16_3PLANE_422_UNORM, {"VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM", 3, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::G, 16, 0, 0, 0}, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::B, 16, 0, 0, 0}, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::R, 16, 0, 0, 0}, Anvil::FormatType::UNORM, true, false} }, + {Anvil::Format::G16_B16R16_2PLANE_422_UNORM, {"VK_FORMAT_G16_B16R16_2PLANE_422_UNORM", 2, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::G, 16, 0, 0, 0}, {Anvil::Format::R16G16_UNORM, Anvil::ComponentLayout::BR, 16, 16, 0, 0}, {}, Anvil::FormatType::UNORM, true, false} }, + {Anvil::Format::G16_B16_R16_3PLANE_444_UNORM, {"VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM", 3, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::G, 16, 0, 0, 0}, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::B, 16, 0, 0, 0}, {Anvil::Format::R16_UNORM, Anvil::ComponentLayout::R, 16, 0, 0, 0}, Anvil::FormatType::UNORM, true, false} }, +}; + +typedef struct +{ + uint32_t red_component_start_bit_index; + uint32_t red_component_last_bit_index; + uint32_t green_component_start_bit_index; + uint32_t green_component_last_bit_index; + uint32_t blue_component_start_bit_index; + uint32_t blue_component_last_bit_index; + uint32_t alpha_component_start_bit_index; + uint32_t alpha_component_last_bit_index; + uint32_t shared_component_start_bit_index; + uint32_t shared_component_last_bit_index; + uint32_t depth_component_start_bit_index; + uint32_t depth_component_last_bit_index; + uint32_t stencil_component_start_bit_index; + uint32_t stencil_component_last_bit_index; +} NonYUVFormatBitLayoutInfo; + +typedef struct +{ + uint32_t plane0_r0_start_bit_index; + uint32_t plane0_r0_last_bit_index; + uint32_t plane0_g0_start_bit_index; + uint32_t plane0_g0_last_bit_index; + uint32_t plane0_b0_start_bit_index; + uint32_t plane0_b0_last_bit_index; + uint32_t plane0_a0_start_bit_index; + uint32_t plane0_a0_last_bit_index; + + uint32_t plane0_g1_start_bit_index; + uint32_t plane0_g1_last_bit_index; + + uint32_t plane1_r0_start_bit_index; + uint32_t plane1_r0_last_bit_index; + uint32_t plane1_g0_start_bit_index; + uint32_t plane1_g0_last_bit_index; + uint32_t plane1_b0_start_bit_index; + uint32_t plane1_b0_last_bit_index; + + uint32_t plane2_r0_start_bit_index; + uint32_t plane2_r0_last_bit_index; + uint32_t plane2_g0_start_bit_index; + uint32_t plane2_g0_last_bit_index; + uint32_t plane2_b0_start_bit_index; + uint32_t plane2_b0_last_bit_index; +} YUVFormatBitLayoutInfo; + +static const std::unordered_map > g_yuv_format_bit_layout_info = +{ + /* Single-planar non-packed YUV formats ==> */ + + /* format | p0r0 start | p0r0 end | p0g0 start | p0g0 end | p0b0 start | p0b0 end | p0a0 start | p0a0 end | p0g1 start | p0g1 end | p1r0 start | p1r0 end | p1g0 start | p1g0 end | p1b0 start | p1b0 end | p2r0 start | p2r0 end | p2g0 start | p2g0 end | p2b0 start | p2b0 end */ + {Anvil::Format::G8B8G8R8_422_UNORM, {24, 31, 0, 7, 8, 15, UINT32_MAX, UINT32_MAX, 16, 23, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8G8_422_UNORM, {16, 23, 8, 15, 0, 7, UINT32_MAX, UINT32_MAX, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G16B16G16R16_422_UNORM, {48, 63, 0, 15, 16, 31, UINT32_MAX, UINT32_MAX, 32, 47, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B16G16R16G16_422_UNORM, {32, 47, 16, 31, 0, 15, UINT32_MAX, UINT32_MAX, 48, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + + /* Single-planar packed YUV formats ==> */ + {Anvil::Format::R10X6_UNORM_PACK16, {6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R10X6G10X6_UNORM_2PACK16, {22, 31, 6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R10X6G10X6B10X6A10X6_UNORM_4PACK16, {54, 63, 38, 47, 22, 31, 6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, {48, 57, 0, 9, 16, 25, UINT32_MAX, UINT32_MAX, 32, 41, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, {32, 41, 16, 25, 0, 9, UINT32_MAX, UINT32_MAX, 48, 57, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R12X4_UNORM_PACK16, {4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R12X4G12X4_UNORM_2PACK16, {20, 31, 4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R12X4G12X4B12X4A12X4_UNORM_4PACK16, {52, 63, 36, 47, 20, 31, 4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, {48, 59, 0, 11, 16, 27, UINT32_MAX, UINT32_MAX, 32, 43, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, {32, 43, 16, 27, 0, 11, UINT32_MAX, UINT32_MAX, 48, 59, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + + /* Multi-planar non-packed YUV formats ==> */ + + {Anvil::Format::G8_B8_R8_3PLANE_420_UNORM, {UINT32_MAX, UINT32_MAX, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 7, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G8_B8R8_2PLANE_420_UNORM, {UINT32_MAX, UINT32_MAX, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 8, 15, UINT32_MAX, UINT32_MAX, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G8_B8_R8_3PLANE_422_UNORM, {UINT32_MAX, UINT32_MAX, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 7, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G8_B8R8_2PLANE_422_UNORM, {UINT32_MAX, UINT32_MAX, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 8, 15, UINT32_MAX, UINT32_MAX, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G8_B8_R8_3PLANE_444_UNORM, {UINT32_MAX, UINT32_MAX, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 7, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G16_B16_R16_3PLANE_420_UNORM, {UINT32_MAX, UINT32_MAX, 0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 15, 0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G16_B16R16_2PLANE_420_UNORM, {UINT32_MAX, UINT32_MAX, 0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 16, 31, UINT32_MAX, UINT32_MAX, 0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G16_B16_R16_3PLANE_422_UNORM, {UINT32_MAX, UINT32_MAX, 0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 15, 0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G16_B16R16_2PLANE_422_UNORM, {UINT32_MAX, UINT32_MAX, 0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 16, 31, UINT32_MAX, UINT32_MAX, 0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G16_B16_R16_3PLANE_444_UNORM, {UINT32_MAX, UINT32_MAX, 0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 15, 0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + + /* Multi-planar packed YUV formats ==> */ + + {Anvil::Format::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, {UINT32_MAX, UINT32_MAX, 6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 6, 15, 6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, {UINT32_MAX, UINT32_MAX, 6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 6, 15, UINT32_MAX, UINT32_MAX, 22, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, {UINT32_MAX, UINT32_MAX, 6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 6, 15, 6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, {UINT32_MAX, UINT32_MAX, 6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 6, 15, UINT32_MAX, UINT32_MAX, 22, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, {UINT32_MAX, UINT32_MAX, 6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 6, 15, 6, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, {UINT32_MAX, UINT32_MAX, 4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 4, 15, 4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, {UINT32_MAX, UINT32_MAX, 4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 4, 15, UINT32_MAX, UINT32_MAX, 20, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, {UINT32_MAX, UINT32_MAX, 4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 4, 15, 4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, {UINT32_MAX, UINT32_MAX, 4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 4, 15, UINT32_MAX, UINT32_MAX, 20, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, {UINT32_MAX, UINT32_MAX, 4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 4, 15, 4, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, +}; + +static const std::unordered_map > g_nonyuv_format_bit_layout_info = +{ + /* format | red start | red end | green start | green end | blue start | blue end | alpha start | alpha end | shared_start | shared_end | depth start | depth end | stencil start | stencil end */ + {Anvil::Format::UNKNOWN, {UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R4G4_UNORM_PACK8, {4, 7, 0, 3, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R4G4B4A4_UNORM_PACK16, {12, 15, 8, 11, 4, 7, 0, 3, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B4G4R4A4_UNORM_PACK16, {4, 7, 8, 11, 12, 15, 0, 3, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R5G6B5_UNORM_PACK16, {11, 15, 5, 10, 0, 4, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B5G6R5_UNORM_PACK16, {0, 4, 5, 10, 11, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R5G5B5A1_UNORM_PACK16, {11, 15, 6, 10, 1, 5, 0, 0, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B5G5R5A1_UNORM_PACK16, {1, 5, 6, 10, 11, 15, 0, 0, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A1R5G5B5_UNORM_PACK16, {10, 14, 5, 9, 0, 4, 15, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8_UNORM, {0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8_SNORM, {0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8_USCALED, {0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8_SSCALED, {0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8_UINT, {0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8_SINT, {0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8_SRGB, {0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8_UNORM, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8_SNORM, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8_USCALED, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8_SSCALED, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8_UINT, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8_SINT, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8_SRGB, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8_UNORM, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8_SNORM, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8_USCALED, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8_SSCALED, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8_UINT, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8_SINT, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8_SRGB, {0, 7, 8, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8_UNORM, {16, 23, 8, 15, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8_SNORM, {16, 23, 8, 15, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8_USCALED, {16, 23, 8, 15, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8_SSCALED, {16, 23, 8, 15, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8_UINT, {16, 23, 8, 15, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8_SINT, {16, 23, 8, 15, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8_SRGB, {16, 23, 8, 15, 0, 7, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8A8_UNORM, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8A8_SNORM, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8A8_USCALED, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8A8_SSCALED, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8A8_UINT, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8A8_SINT, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R8G8B8A8_SRGB, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8A8_UNORM, {16, 23, 8, 15, 0, 7, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8A8_SNORM, {16, 23, 8, 15, 0, 7, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8A8_USCALED, {16, 23, 8, 15, 0, 7, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8A8_SSCALED, {16, 23, 8, 15, 0, 7, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8A8_UINT, {16, 23, 8, 15, 0, 7, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8A8_SINT, {16, 23, 8, 15, 0, 7, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B8G8R8A8_SRGB, {16, 23, 8, 15, 0, 7, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A8B8G8R8_UNORM_PACK32, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A8B8G8R8_SNORM_PACK32, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A8B8G8R8_USCALED_PACK32, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A8B8G8R8_SSCALED_PACK32, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A8B8G8R8_UINT_PACK32, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A8B8G8R8_SINT_PACK32, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A8B8G8R8_SRGB_PACK32, {0, 7, 8, 15, 16, 23, 24, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2R10G10B10_UNORM_PACK32, {20, 29, 10, 19, 0, 9, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2R10G10B10_SNORM_PACK32, {20, 29, 10, 19, 0, 9, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2R10G10B10_USCALED_PACK32, {20, 29, 10, 19, 0, 9, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2R10G10B10_SSCALED_PACK32, {20, 29, 10, 19, 0, 9, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2R10G10B10_UINT_PACK32, {20, 29, 10, 19, 0, 9, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2R10G10B10_SINT_PACK32, {20, 29, 10, 19, 0, 9, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2B10G10R10_UNORM_PACK32, {0, 9, 10, 19, 20, 29, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2B10G10R10_SNORM_PACK32, {0, 9, 10, 19, 20, 29, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2B10G10R10_USCALED_PACK32, {0, 9, 10, 19, 20, 29, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2B10G10R10_SSCALED_PACK32, {0, 9, 10, 19, 20, 29, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2B10G10R10_UINT_PACK32, {0, 9, 10, 19, 20, 29, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::A2B10G10R10_SINT_PACK32, {0, 9, 10, 19, 20, 29, 30, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16_UNORM, {0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16_SNORM, {0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16_USCALED, {0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16_SSCALED, {0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16_UINT, {0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16_SINT, {0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16_SFLOAT, {0, 15, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16_UNORM, {0, 15, 16, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16_SNORM, {0, 15, 16, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16_USCALED, {0, 15, 16, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16_SSCALED, {0, 15, 16, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16_UINT, {0, 15, 16, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16_SINT, {0, 15, 16, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16_SFLOAT, {0, 15, 16, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16_UNORM, {0, 15, 16, 31, 32, 47, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16_SNORM, {0, 15, 16, 31, 32, 47, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16_USCALED, {0, 15, 16, 31, 32, 47, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16_SSCALED, {0, 15, 16, 31, 32, 47, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16_UINT, {0, 15, 16, 31, 32, 47, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16_SINT, {0, 15, 16, 31, 32, 47, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16_SFLOAT, {0, 15, 16, 31, 32, 47, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16A16_UNORM, {0, 15, 16, 31, 32, 47, 48, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16A16_SNORM, {0, 15, 16, 31, 32, 47, 48, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16A16_USCALED, {0, 15, 16, 31, 32, 47, 48, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16A16_SSCALED, {0, 15, 16, 31, 32, 47, 48, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16A16_UINT, {0, 15, 16, 31, 32, 47, 48, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16A16_SINT, {0, 15, 16, 31, 32, 47, 48, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R16G16B16A16_SFLOAT, {0, 15, 16, 31, 32, 47, 48, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32_UINT, {0, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32_SINT, {0, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32_SFLOAT, {0, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32G32_UINT, {0, 31, 32, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32G32_SINT, {0, 31, 32, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32G32_SFLOAT, {0, 31, 32, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32G32B32_UINT, {0, 31, 32, 63, 64, 95, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32G32B32_SINT, {0, 31, 32, 63, 64, 95, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32G32B32_SFLOAT, {0, 31, 32, 63, 64, 95, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32G32B32A32_UINT, {0, 31, 32, 63, 64, 95, 96, 127, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32G32B32A32_SINT, {0, 31, 32, 63, 64, 95, 96, 127, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R32G32B32A32_SFLOAT, {0, 31, 32, 63, 64, 95, 96, 127, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64_UINT, {0, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64_SINT, {0, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64_SFLOAT, {0, 63, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64G64_UINT, {0, 63, 64, 127, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64G64_SINT, {0, 63, 64, 127, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64G64_SFLOAT, {0, 63, 64, 127, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64G64B64_UINT, {0, 63, 64, 127, 128, 191, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64G64B64_SINT, {0, 63, 64, 127, 128, 191, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64G64B64_SFLOAT, {0, 63, 64, 127, 128, 191, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64G64B64A64_UINT, {0, 63, 64, 127, 128, 191, 192, 255, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64G64B64A64_SINT, {0, 63, 64, 127, 128, 191, 192, 255, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::R64G64B64A64_SFLOAT, {0, 63, 64, 127, 128, 191, 192, 255, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::B10G11R11_UFLOAT_PACK32, {0, 10, 11, 21, 22, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::E5B9G9R9_UFLOAT_PACK32, {0, 8, 9, 17, 18, 26, UINT32_MAX, UINT32_MAX, 27, 31, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::D16_UNORM, {UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 15, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::X8_D24_UNORM_PACK32, {UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 23, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::D32_SFLOAT, {UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 31, UINT32_MAX, UINT32_MAX} }, + {Anvil::Format::S8_UINT, {UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 7, } }, + {Anvil::Format::D16_UNORM_S8_UINT, {UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 15, 16, 23, } }, + {Anvil::Format::D24_UNORM_S8_UINT, {UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 23, 24, 31, } }, + {Anvil::Format::D32_SFLOAT_S8_UINT, {UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, UINT32_MAX, 0, 31, 32, 39, } }, +}; + +static const uint32_t g_layout_to_n_components[] = +{ + /* COMPONENT_LAYOUT_ABGR */ + 4, + + /* COMPONENT_LAYOUT_ARGB */ + 4, + + /* COMPONENT_LAYOUT_B */ + 1, + + /* COMPONENT_LAYOUT_BGR */ + 3, + + /* COMPONENT_LAYOUT_BGRA */ + 4, + + /* COMPONENT_LAYOUT_BGRG */ + 4, + + /* COMPONENT_LAYOUT_BR */ + 2, + + /* COMPONENT_LAYOUT_BX */ + 1, + + /* COMPONENT_LAYOUT_BXGXRXGX */ + 4, + + /* COMPONENT_LAYOUT_BXRX */ + 2, + + /* COMPONENT_LAYOUT_D */ + 1, + + /* COMPONENT_LAYOUT_DS */ + 2, + + /* COMPONENT_LAYOUT_EBGR */ + 4, + + /* COMPONENT_LAYOUT_G */ + 1, + + /* COMPONENT_LAYOUT_GBGR */ + 4, + + /* COMPONENT_LAYOUT_GX */ + 1, + + /* COMPONENT_LAYOUT_GXBXGXRX */ + 4, + + /* COMPONENT_LAYOUT_R */ + 1, + + /* COMPONENT_LAYOUT_RG */ + 2, + + /* COMPONENT_LAYOUT_RGB */ + 3, + + /* COMPONENT_LAYOUT_RGBA */ + 4, + + /* COMPONENT_LAYOUT_RX */ + 1, + + /* COMPONENT_LAYOUT_RXGX */ + 2, + + /* COMPONENT_LAYOUT_RXGXBXAX */ + 4, + + /* COMPONENT_LAYOUT_S */ + 1, + + /* COMPONENT_LAYOUT_XD */ + 2, }; -static uint32_t layout_to_n_components[] = +static const std::vector g_compatibility_classes[] = +{ + /* 8-bit */ + { + Anvil::Format::R4G4_UNORM_PACK8, + Anvil::Format::R8_UNORM, + Anvil::Format::R8_SNORM, + Anvil::Format::R8_USCALED, + Anvil::Format::R8_SSCALED, + Anvil::Format::R8_UINT, + Anvil::Format::R8_SINT, + Anvil::Format::R8_SRGB + }, + + { + Anvil::Format::G8_B8_R8_3PLANE_420_UNORM + }, + + { + Anvil::Format::G8_B8R8_2PLANE_420_UNORM + }, + + { + Anvil::Format::G8_B8_R8_3PLANE_422_UNORM + }, + + { + Anvil::Format::G8_B8R8_2PLANE_422_UNORM + }, + + { + Anvil::Format::G8_B8_R8_3PLANE_444_UNORM + }, + + /* 10-bit */ + { + Anvil::Format::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 + }, + + { + Anvil::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 + }, + + { + Anvil::Format::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 + }, + + { + Anvil::Format::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 + }, + + { + Anvil::Format::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 + }, + + /* 12-bit */ + + { + Anvil::Format::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 + }, + + { + Anvil::Format::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 + }, + + { + Anvil::Format::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 + }, + + { + Anvil::Format::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 + }, + + { + Anvil::Format::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 + }, + + /* 16-bit */ + { + Anvil::Format::R4G4B4A4_UNORM_PACK16, + Anvil::Format::B4G4R4A4_UNORM_PACK16, + Anvil::Format::R5G6B5_UNORM_PACK16, + Anvil::Format::B5G6R5_UNORM_PACK16, + Anvil::Format::R5G5B5A1_UNORM_PACK16, + Anvil::Format::B5G5R5A1_UNORM_PACK16, + Anvil::Format::A1R5G5B5_UNORM_PACK16, + Anvil::Format::R8G8_UNORM, + Anvil::Format::R8G8_SNORM, + Anvil::Format::R8G8_USCALED, + Anvil::Format::R8G8_SSCALED, + Anvil::Format::R8G8_UINT, + Anvil::Format::R8G8_SINT, + Anvil::Format::R8G8_SRGB, + Anvil::Format::R16_UNORM, + Anvil::Format::R16_SNORM, + Anvil::Format::R16_USCALED, + Anvil::Format::R16_SSCALED, + Anvil::Format::R16_UINT, + Anvil::Format::R16_SINT, + Anvil::Format::R16_SFLOAT, + + /* YUV */ + Anvil::Format::R10X6_UNORM_PACK16, + Anvil::Format::R12X4_UNORM_PACK16 + }, + + { + Anvil::Format::G16_B16_R16_3PLANE_420_UNORM, + }, + + { + Anvil::Format::G16_B16R16_2PLANE_420_UNORM, + }, + + { + Anvil::Format::G16_B16_R16_3PLANE_422_UNORM, + }, + + { + Anvil::Format::G16_B16R16_2PLANE_422_UNORM, + }, + + { + Anvil::Format::G16_B16_R16_3PLANE_444_UNORM, + }, + + /* 24-bit */ + { + Anvil::Format::R8G8B8_UNORM, + Anvil::Format::R8G8B8_SNORM, + Anvil::Format::R8G8B8_USCALED, + Anvil::Format::R8G8B8_SSCALED, + Anvil::Format::R8G8B8_UINT, + Anvil::Format::R8G8B8_SINT, + Anvil::Format::R8G8B8_SRGB, + Anvil::Format::B8G8R8_UNORM, + Anvil::Format::B8G8R8_SNORM, + Anvil::Format::B8G8R8_USCALED, + Anvil::Format::B8G8R8_SSCALED, + Anvil::Format::B8G8R8_UINT, + Anvil::Format::B8G8R8_SINT, + Anvil::Format::B8G8R8_SRGB + }, + + /* 32-bit */ + { + Anvil::Format::R8G8B8A8_UNORM, + Anvil::Format::R8G8B8A8_SNORM, + Anvil::Format::R8G8B8A8_USCALED, + Anvil::Format::R8G8B8A8_SSCALED, + Anvil::Format::R8G8B8A8_UINT, + Anvil::Format::R8G8B8A8_SINT, + Anvil::Format::R8G8B8A8_SRGB, + Anvil::Format::B8G8R8A8_UNORM, + Anvil::Format::B8G8R8A8_SNORM, + Anvil::Format::B8G8R8A8_USCALED, + Anvil::Format::B8G8R8A8_SSCALED, + Anvil::Format::B8G8R8A8_UINT, + Anvil::Format::B8G8R8A8_SINT, + Anvil::Format::B8G8R8A8_SRGB, + Anvil::Format::A8B8G8R8_UNORM_PACK32, + Anvil::Format::A8B8G8R8_SNORM_PACK32, + Anvil::Format::A8B8G8R8_USCALED_PACK32, + Anvil::Format::A8B8G8R8_SSCALED_PACK32, + Anvil::Format::A8B8G8R8_UINT_PACK32, + Anvil::Format::A8B8G8R8_SINT_PACK32, + Anvil::Format::A8B8G8R8_SRGB_PACK32, + Anvil::Format::A2R10G10B10_UNORM_PACK32, + Anvil::Format::A2R10G10B10_SNORM_PACK32, + Anvil::Format::A2R10G10B10_USCALED_PACK32, + Anvil::Format::A2R10G10B10_SSCALED_PACK32, + Anvil::Format::A2R10G10B10_UINT_PACK32, + Anvil::Format::A2R10G10B10_SINT_PACK32, + Anvil::Format::A2B10G10R10_UNORM_PACK32, + Anvil::Format::A2B10G10R10_SNORM_PACK32, + Anvil::Format::A2B10G10R10_USCALED_PACK32, + Anvil::Format::A2B10G10R10_SSCALED_PACK32, + Anvil::Format::A2B10G10R10_UINT_PACK32, + Anvil::Format::A2B10G10R10_SINT_PACK32, + Anvil::Format::R16G16_UNORM, + Anvil::Format::R16G16_SNORM, + Anvil::Format::R16G16_USCALED, + Anvil::Format::R16G16_SSCALED, + Anvil::Format::R16G16_UINT, + Anvil::Format::R16G16_SINT, + Anvil::Format::R16G16_SFLOAT, + Anvil::Format::R32_UINT, + Anvil::Format::R32_SINT, + Anvil::Format::R32_SFLOAT, + Anvil::Format::B10G11R11_UFLOAT_PACK32, + Anvil::Format::E5B9G9R9_UFLOAT_PACK32, + + /* YUV */ + Anvil::Format::R10X6G10X6_UNORM_2PACK16, + Anvil::Format::R12X4G12X4_UNORM_2PACK16, + }, + + { + Anvil::Format::G8B8G8R8_422_UNORM, + }, + + { + Anvil::Format::B8G8R8G8_422_UNORM + }, + + /* 48-bit */ + { + Anvil::Format::R16G16B16_UNORM, + Anvil::Format::R16G16B16_SNORM, + Anvil::Format::R16G16B16_USCALED, + Anvil::Format::R16G16B16_SSCALED, + Anvil::Format::R16G16B16_UINT, + Anvil::Format::R16G16B16_SINT, + Anvil::Format::R16G16B16_SFLOAT + }, + + /* 64-bit */ + { + Anvil::Format::R16G16B16A16_UNORM, + Anvil::Format::R16G16B16A16_SNORM, + Anvil::Format::R16G16B16A16_USCALED, + Anvil::Format::R16G16B16A16_SSCALED, + Anvil::Format::R16G16B16A16_UINT, + Anvil::Format::R16G16B16A16_SINT, + Anvil::Format::R16G16B16A16_SFLOAT, + Anvil::Format::R32G32_UINT, + Anvil::Format::R32G32_SINT, + Anvil::Format::R32G32_SFLOAT, + Anvil::Format::R64_UINT, + Anvil::Format::R64_SINT, + Anvil::Format::R64_SFLOAT, + }, + + { + Anvil::Format::R10X6G10X6B10X6A10X6_UNORM_4PACK16, + }, + + { + Anvil::Format::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + }, + + { + Anvil::Format::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + }, + + { + Anvil::Format::R12X4G12X4B12X4A12X4_UNORM_4PACK16, + }, + + { + Anvil::Format::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + }, + + { + Anvil::Format::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + }, + + { + Anvil::Format::G16B16G16R16_422_UNORM, + }, + + { + Anvil::Format::B16G16R16G16_422_UNORM + }, + + /* 96-bit */ + { + Anvil::Format::R32G32B32_UINT, + Anvil::Format::R32G32B32_SINT, + Anvil::Format::R32G32B32_SFLOAT + }, + + /* 128-bit */ + { + Anvil::Format::R32G32B32A32_UINT, + Anvil::Format::R32G32B32A32_SINT, + Anvil::Format::R32G32B32A32_SFLOAT, + Anvil::Format::R64G64_UINT, + Anvil::Format::R64G64_SINT, + Anvil::Format::R64G64_SFLOAT + }, + + /* 192-bit */ + { + Anvil::Format::R64G64B64_UINT, + Anvil::Format::R64G64B64_SINT, + Anvil::Format::R64G64B64_SFLOAT + }, + + /* 256-bit */ + { + Anvil::Format::R64G64B64A64_UINT, + Anvil::Format::R64G64B64A64_SINT, + Anvil::Format::R64G64B64A64_SFLOAT + }, + + /* BC1 RGB */ + { + Anvil::Format::BC1_RGB_UNORM_BLOCK, + Anvil::Format::BC1_RGB_SRGB_BLOCK + }, + + /* BC1 RGBA */ + { + Anvil::Format::BC1_RGBA_UNORM_BLOCK, + Anvil::Format::BC1_RGBA_SRGB_BLOCK + }, + + /* BC2 */ + { + Anvil::Format::BC2_UNORM_BLOCK, + Anvil::Format::BC2_SRGB_BLOCK + }, + + /* BC3 */ + { + Anvil::Format::BC3_UNORM_BLOCK, + Anvil::Format::BC3_SRGB_BLOCK + }, + + /* BC4 */ + { + Anvil::Format::BC4_UNORM_BLOCK, + Anvil::Format::BC4_SNORM_BLOCK + }, + + /* BC5 */ + { + Anvil::Format::BC5_UNORM_BLOCK, + Anvil::Format::BC5_SNORM_BLOCK + }, + + /* BC6H */ + { + Anvil::Format::BC6H_UFLOAT_BLOCK, + Anvil::Format::BC6H_SFLOAT_BLOCK + }, + + /* BC7 */ + { + Anvil::Format::BC7_UNORM_BLOCK, + Anvil::Format::BC7_SRGB_BLOCK + }, + + /* ETC2 RGB */ + { + Anvil::Format::ETC2_R8G8B8_UNORM_BLOCK, + Anvil::Format::ETC2_R8G8B8_SRGB_BLOCK + }, + + /* ETC2 RGBA */ + { + Anvil::Format::ETC2_R8G8B8A1_UNORM_BLOCK, + Anvil::Format::ETC2_R8G8B8A1_SRGB_BLOCK + }, + + /* ETC2/EAC RGBA */ + { + Anvil::Format::ETC2_R8G8B8A8_UNORM_BLOCK, + Anvil::Format::ETC2_R8G8B8A8_SRGB_BLOCK + }, + + /* EAC R */ + { + Anvil::Format::EAC_R11_UNORM_BLOCK, + Anvil::Format::EAC_R11_SNORM_BLOCK + }, + + /* EAC RG */ + { + Anvil::Format::EAC_R11G11_UNORM_BLOCK, + Anvil::Format::EAC_R11G11_SNORM_BLOCK + }, + + /* ASTC (4x4) */ + { + Anvil::Format::ASTC_4x4_UNORM_BLOCK, + Anvil::Format::ASTC_4x4_SRGB_BLOCK + }, + + /* ASTC (5x4) */ + { + Anvil::Format::ASTC_5x4_UNORM_BLOCK, + Anvil::Format::ASTC_5x4_SRGB_BLOCK + }, + + /* ASTC (5x5) */ + { + Anvil::Format::ASTC_5x5_UNORM_BLOCK, + Anvil::Format::ASTC_5x5_SRGB_BLOCK + }, + + /* ASTC (6x5) */ + { + Anvil::Format::ASTC_6x5_UNORM_BLOCK, + Anvil::Format::ASTC_6x5_SRGB_BLOCK + }, + + /* ASTC (6x6) */ + { + Anvil::Format::ASTC_6x6_UNORM_BLOCK, + Anvil::Format::ASTC_6x6_SRGB_BLOCK + }, + + /* ASTC (8x5) */ + { + Anvil::Format::ASTC_8x5_UNORM_BLOCK, + Anvil::Format::ASTC_8x5_SRGB_BLOCK + }, + + /* ASTC (8x6) */ + { + Anvil::Format::ASTC_8x6_UNORM_BLOCK, + Anvil::Format::ASTC_8x6_SRGB_BLOCK + }, + + /* ASTC (8x8) */ + { + Anvil::Format::ASTC_8x8_UNORM_BLOCK, + Anvil::Format::ASTC_8x8_SRGB_BLOCK + }, + + /* ASTC (10x5) */ + { + Anvil::Format::ASTC_10x5_UNORM_BLOCK, + Anvil::Format::ASTC_10x5_SRGB_BLOCK + }, + + /* ASTC (10x6) */ + { + Anvil::Format::ASTC_10x6_UNORM_BLOCK, + Anvil::Format::ASTC_10x6_SRGB_BLOCK + }, + + /* ASTC (10x8) */ + { + Anvil::Format::ASTC_10x8_UNORM_BLOCK, + Anvil::Format::ASTC_10x8_SRGB_BLOCK + }, + + /* ASTC (10x10) */ + { + Anvil::Format::ASTC_10x10_UNORM_BLOCK, + Anvil::Format::ASTC_10x10_SRGB_BLOCK + }, + + /* ASTC (12x10) */ + { + Anvil::Format::ASTC_12x10_UNORM_BLOCK, + Anvil::Format::ASTC_12x10_SRGB_BLOCK + }, + + /* ASTC (12x12) */ + { + Anvil::Format::ASTC_12x12_UNORM_BLOCK, + Anvil::Format::ASTC_12x12_SRGB_BLOCK + }, + + /* D16 */ + { + Anvil::Format::D16_UNORM + }, + + /* D24 */ + { + Anvil::Format::X8_D24_UNORM_PACK32 + }, + + /* D32 */ + { + Anvil::Format::D32_SFLOAT + }, + + /* S8 */ + { + Anvil::Format::S8_UINT + }, + + /* D16S8 */ + { + Anvil::Format::D16_UNORM_S8_UINT + }, + + /* D24S8 */ + { + Anvil::Format::D24_UNORM_S8_UINT + }, + + /* D32S8 */ + { + Anvil::Format::D32_SFLOAT_S8_UINT + } +}; + +/** Please see header for specification */ +bool Anvil::Formats::get_compatible_formats(Anvil::Format in_format, + uint32_t* out_n_compatible_formats_ptr, + const Anvil::Format** out_compatible_formats_ptr_ptr) +{ + bool result = false; + + for (const auto& current_class : g_compatibility_classes) + { + auto format_iterator = std::find(current_class.begin(), + current_class.end (), + in_format); + + if (format_iterator != current_class.end() ) + { + *out_n_compatible_formats_ptr = static_cast(current_class.size() ); + *out_compatible_formats_ptr_ptr = ¤t_class.at(0); + + result = true; + break; + } + } + + return result; +} + +/** Please see header for specification */ +bool Anvil::Formats::get_compressed_format_block_size(Anvil::Format in_format, + uint32_t* out_block_size_uvec2_ptr, + uint32_t* out_n_bytes_per_block_ptr) +{ + bool result = true; + + switch (in_format) + { + case Anvil::Format::ASTC_4x4_UNORM_BLOCK: + case Anvil::Format::ASTC_4x4_SRGB_BLOCK: + case Anvil::Format::BC2_UNORM_BLOCK: + case Anvil::Format::BC2_SRGB_BLOCK: + case Anvil::Format::BC3_UNORM_BLOCK: + case Anvil::Format::BC3_SRGB_BLOCK: + case Anvil::Format::BC5_UNORM_BLOCK: + case Anvil::Format::BC5_SNORM_BLOCK: + case Anvil::Format::BC6H_UFLOAT_BLOCK: + case Anvil::Format::BC6H_SFLOAT_BLOCK: + case Anvil::Format::BC7_UNORM_BLOCK: + case Anvil::Format::BC7_SRGB_BLOCK: + case Anvil::Format::EAC_R11G11_UNORM_BLOCK: + case Anvil::Format::EAC_R11G11_SNORM_BLOCK: + case Anvil::Format::ETC2_R8G8B8A8_UNORM_BLOCK: + case Anvil::Format::ETC2_R8G8B8A8_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 4; + out_block_size_uvec2_ptr[1] = 4; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::BC1_RGB_UNORM_BLOCK: + case Anvil::Format::BC1_RGB_SRGB_BLOCK: + case Anvil::Format::BC1_RGBA_UNORM_BLOCK: + case Anvil::Format::BC1_RGBA_SRGB_BLOCK: + case Anvil::Format::BC4_UNORM_BLOCK: + case Anvil::Format::BC4_SNORM_BLOCK: + case Anvil::Format::EAC_R11_UNORM_BLOCK: + case Anvil::Format::EAC_R11_SNORM_BLOCK: + case Anvil::Format::ETC2_R8G8B8_UNORM_BLOCK: + case Anvil::Format::ETC2_R8G8B8_SRGB_BLOCK: + case Anvil::Format::ETC2_R8G8B8A1_UNORM_BLOCK: + case Anvil::Format::ETC2_R8G8B8A1_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 4; + out_block_size_uvec2_ptr[1] = 4; + *out_n_bytes_per_block_ptr = 64 / 8; + + break; + } + + case Anvil::Format::ASTC_5x4_UNORM_BLOCK: + case Anvil::Format::ASTC_5x4_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 5; + out_block_size_uvec2_ptr[1] = 4; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_5x5_UNORM_BLOCK: + case Anvil::Format::ASTC_5x5_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 5; + out_block_size_uvec2_ptr[1] = 5; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_6x5_UNORM_BLOCK: + case Anvil::Format::ASTC_6x5_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 6; + out_block_size_uvec2_ptr[1] = 5; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_6x6_UNORM_BLOCK: + case Anvil::Format::ASTC_6x6_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 6; + out_block_size_uvec2_ptr[1] = 6; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_8x5_UNORM_BLOCK: + case Anvil::Format::ASTC_8x5_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 8; + out_block_size_uvec2_ptr[1] = 5; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_8x6_UNORM_BLOCK: + case Anvil::Format::ASTC_8x6_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 8; + out_block_size_uvec2_ptr[1] = 6; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_8x8_UNORM_BLOCK: + case Anvil::Format::ASTC_8x8_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 8; + out_block_size_uvec2_ptr[1] = 8; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_10x5_UNORM_BLOCK: + case Anvil::Format::ASTC_10x5_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 10; + out_block_size_uvec2_ptr[1] = 5; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_10x6_UNORM_BLOCK: + case Anvil::Format::ASTC_10x6_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 10; + out_block_size_uvec2_ptr[1] = 6; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_10x8_UNORM_BLOCK: + case Anvil::Format::ASTC_10x8_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 10; + out_block_size_uvec2_ptr[1] = 8; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_10x10_UNORM_BLOCK: + case Anvil::Format::ASTC_10x10_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 10; + out_block_size_uvec2_ptr[1] = 10; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_12x10_UNORM_BLOCK: + case Anvil::Format::ASTC_12x10_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 12; + out_block_size_uvec2_ptr[1] = 10; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + case Anvil::Format::ASTC_12x12_UNORM_BLOCK: + case Anvil::Format::ASTC_12x12_SRGB_BLOCK: + { + out_block_size_uvec2_ptr[0] = 12; + out_block_size_uvec2_ptr[1] = 12; + *out_n_bytes_per_block_ptr = 128 / 8; + + break; + } + + // YUV KHR compressed formats + case Anvil::Format::G8B8G8R8_422_UNORM: + case Anvil::Format::B8G8R8G8_422_UNORM: + { + out_block_size_uvec2_ptr[0] = 2; + out_block_size_uvec2_ptr[1] = 1; + *out_n_bytes_per_block_ptr = 32 / 8; + + break; + } + + case Anvil::Format::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: + case Anvil::Format::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: + case Anvil::Format::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: + case Anvil::Format::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: + case Anvil::Format::G16B16G16R16_422_UNORM: + case Anvil::Format::B16G16R16G16_422_UNORM: + { + out_block_size_uvec2_ptr[0] = 2; + out_block_size_uvec2_ptr[1] = 1; + *out_n_bytes_per_block_ptr = 64 / 8; + + break; + } + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/** Please see header for specification */ +Anvil::Format Anvil::Formats::get_format(ComponentLayout in_component_layout, + FormatType in_format_type, + uint32_t in_n_component0_bits, + uint32_t in_n_component1_bits, + uint32_t in_n_component2_bits, + uint32_t in_n_component3_bits) +{ + static const uint32_t n_available_formats = sizeof(g_formats) / sizeof(g_formats[0]); + Anvil::Format result = Anvil::Format::UNKNOWN; + + for (uint32_t n_format = 0; + n_format < n_available_formats; + ++n_format) + { + const FormatInfo& current_format_info = g_formats[n_format]; + + if (current_format_info.component_layout == in_component_layout && + current_format_info.format_type == in_format_type && + current_format_info.component_bits[0] == in_n_component0_bits && + current_format_info.component_bits[1] == in_n_component1_bits && + current_format_info.component_bits[2] == in_n_component2_bits && + current_format_info.component_bits[3] == in_n_component3_bits) + { + result = current_format_info.format; + + break; + } + } + + return result; +} + +/** Please see header for specification */ +bool Anvil::Formats::get_format_aspects(Anvil::Format in_format, + std::vector* out_aspects_ptr) +{ + bool result = false; + + out_aspects_ptr->clear(); + + if (Anvil::Formats::is_format_yuv_khr(in_format) ) + { + if (!Anvil::Formats::is_format_multiplanar(in_format) ) + { + out_aspects_ptr->push_back(Anvil::ImageAspectFlagBits::COLOR_BIT); + } + else + { + const auto n_planes = get_format_n_planes(in_format); + + out_aspects_ptr->push_back(Anvil::ImageAspectFlagBits::PLANE_0_BIT); + + if (n_planes >= 2) + { + out_aspects_ptr->push_back(Anvil::ImageAspectFlagBits::PLANE_1_BIT); + } + + if (n_planes >= 3) + { + out_aspects_ptr->push_back(Anvil::ImageAspectFlagBits::PLANE_2_BIT); + } + + anvil_assert(n_planes >= 1 && n_planes <= 3); + } + } + else + { + /* This image can hold color and/or depth and/or stencil aspects only */ + const Anvil::ComponentLayout format_layout = Anvil::Formats::get_format_component_layout_nonyuv(in_format); + + if (format_layout == Anvil::ComponentLayout::ABGR || + format_layout == Anvil::ComponentLayout::ARGB || + format_layout == Anvil::ComponentLayout::BGR || + format_layout == Anvil::ComponentLayout::BGRA || + format_layout == Anvil::ComponentLayout::EBGR || + format_layout == Anvil::ComponentLayout::R || + format_layout == Anvil::ComponentLayout::RG || + format_layout == Anvil::ComponentLayout::RGB || + format_layout == Anvil::ComponentLayout::RGBA) + { + out_aspects_ptr->push_back(Anvil::ImageAspectFlagBits::COLOR_BIT); + } + + if (format_layout == Anvil::ComponentLayout::D || + format_layout == Anvil::ComponentLayout::DS || + format_layout == Anvil::ComponentLayout::XD) + { + out_aspects_ptr->push_back(Anvil::ImageAspectFlagBits::DEPTH_BIT); + } + + if (format_layout == Anvil::ComponentLayout::DS || + format_layout == Anvil::ComponentLayout::S) + { + out_aspects_ptr->push_back(Anvil::ImageAspectFlagBits::STENCIL_BIT); + } + } + + result = true; + return result; +} + +/** Please see header for specification */ +void Anvil::Formats::get_format_bit_layout_nonyuv(Anvil::Format in_format, + uint32_t* out_opt_red_component_start_bit_index_ptr, + uint32_t* out_opt_red_component_end_bit_index_ptr, + uint32_t* out_opt_green_component_start_bit_index_ptr, + uint32_t* out_opt_green_component_end_bit_index_ptr, + uint32_t* out_opt_blue_component_start_bit_index_ptr, + uint32_t* out_opt_blue_component_end_bit_index_ptr, + uint32_t* out_opt_alpha_component_start_bit_index_ptr, + uint32_t* out_opt_alpha_component_end_bit_index_ptr, + uint32_t* out_opt_shared_component_start_bit_index_ptr, + uint32_t* out_opt_shared_component_end_bit_index_ptr, + uint32_t* out_opt_depth_component_start_bit_index_ptr, + uint32_t* out_opt_depth_component_end_bit_index_ptr, + uint32_t* out_opt_stencil_component_start_bit_index_ptr, + uint32_t* out_opt_stencil_component_end_bit_index_ptr) +{ + anvil_assert(!is_format_yuv_khr(in_format) ); + + const auto& format_info = g_nonyuv_format_bit_layout_info.at(in_format); + + if (out_opt_red_component_start_bit_index_ptr != nullptr) + { + *out_opt_red_component_start_bit_index_ptr = format_info.red_component_start_bit_index; + } + + if (out_opt_red_component_end_bit_index_ptr != nullptr) + { + *out_opt_red_component_end_bit_index_ptr = format_info.red_component_last_bit_index; + } + + if (out_opt_green_component_start_bit_index_ptr != nullptr) + { + *out_opt_green_component_start_bit_index_ptr = format_info.green_component_start_bit_index; + } + + if (out_opt_green_component_end_bit_index_ptr != nullptr) + { + *out_opt_green_component_end_bit_index_ptr = format_info.green_component_last_bit_index; + } + + if (out_opt_blue_component_start_bit_index_ptr != nullptr) + { + *out_opt_blue_component_start_bit_index_ptr = format_info.blue_component_start_bit_index; + } + + if (out_opt_blue_component_end_bit_index_ptr != nullptr) + { + *out_opt_blue_component_end_bit_index_ptr = format_info.blue_component_last_bit_index; + } + + if (out_opt_alpha_component_start_bit_index_ptr != nullptr) + { + *out_opt_alpha_component_start_bit_index_ptr = format_info.alpha_component_start_bit_index; + } + + if (out_opt_alpha_component_end_bit_index_ptr != nullptr) + { + *out_opt_alpha_component_end_bit_index_ptr = format_info.alpha_component_last_bit_index; + } + + if (out_opt_shared_component_start_bit_index_ptr != nullptr) + { + *out_opt_shared_component_start_bit_index_ptr = format_info.shared_component_start_bit_index; + } + + if (out_opt_shared_component_end_bit_index_ptr != nullptr) + { + *out_opt_shared_component_end_bit_index_ptr = format_info.shared_component_last_bit_index; + } + + if (out_opt_depth_component_start_bit_index_ptr != nullptr) + { + *out_opt_depth_component_start_bit_index_ptr = format_info.depth_component_start_bit_index; + } + + if (out_opt_depth_component_end_bit_index_ptr != nullptr) + { + *out_opt_depth_component_end_bit_index_ptr = format_info.depth_component_last_bit_index; + } + + if (out_opt_stencil_component_start_bit_index_ptr != nullptr) + { + *out_opt_stencil_component_start_bit_index_ptr = format_info.stencil_component_start_bit_index; + } + + if (out_opt_stencil_component_end_bit_index_ptr != nullptr) + { + *out_opt_stencil_component_end_bit_index_ptr = format_info.stencil_component_last_bit_index; + } +} + +void Anvil::Formats::get_format_bit_layout_yuv(Anvil::Format in_format, + uint32_t* out_opt_plane0_r0_start_bit_index_ptr, + uint32_t* out_opt_plane0_r0_end_bit_index_ptr, + uint32_t* out_opt_plane0_g0_start_bit_index_ptr, + uint32_t* out_opt_plane0_g0_end_bit_index_ptr, + uint32_t* out_opt_plane0_b0_start_bit_index_ptr, + uint32_t* out_opt_plane0_b0_end_bit_index_ptr, + uint32_t* out_opt_plane0_a0_start_bit_index_ptr, + uint32_t* out_opt_plane0_a0_end_bit_index_ptr, + uint32_t* out_opt_plane0_g1_start_bit_index_ptr, + uint32_t* out_opt_plane0_g1_end_bit_index_ptr, + uint32_t* out_opt_plane1_r0_start_bit_index_ptr, + uint32_t* out_opt_plane1_r0_end_bit_index_ptr, + uint32_t* out_opt_plane1_g0_start_bit_index_ptr, + uint32_t* out_opt_plane1_g0_end_bit_index_ptr, + uint32_t* out_opt_plane1_b0_start_bit_index_ptr, + uint32_t* out_opt_plane1_b0_end_bit_index_ptr, + uint32_t* out_opt_plane2_r0_start_bit_index_ptr, + uint32_t* out_opt_plane2_r0_end_bit_index_ptr, + uint32_t* out_opt_plane2_g0_start_bit_index_ptr, + uint32_t* out_opt_plane2_g0_end_bit_index_ptr, + uint32_t* out_opt_plane2_b0_start_bit_index_ptr, + uint32_t* out_opt_plane2_b0_end_bit_index_ptr) { - /* COMPONENT_LAYOUT_ABGR */ - 4, + anvil_assert(is_format_yuv_khr(in_format) ); - /* COMPONENT_LAYOUT_ARGB */ - 4, + const auto& format_info = g_yuv_format_bit_layout_info.at(in_format); - /* COMPONENT_LAYOUT_BGR */ - 3, + if (out_opt_plane0_r0_start_bit_index_ptr != nullptr) + { + *out_opt_plane0_r0_start_bit_index_ptr = format_info.plane0_r0_start_bit_index; + } - /* COMPONENT_LAYOUT_BGRA */ - 4, + if (out_opt_plane0_r0_end_bit_index_ptr != nullptr) + { + *out_opt_plane0_r0_end_bit_index_ptr = format_info.plane0_r0_last_bit_index; + } - /* COMPONENT_LAYOUT_D */ - 1, + if (out_opt_plane0_g0_start_bit_index_ptr != nullptr) + { + *out_opt_plane0_g0_start_bit_index_ptr = format_info.plane0_g0_start_bit_index; + } - /* COMPONENT_LAYOUT_DS */ - 2, + if (out_opt_plane0_g0_end_bit_index_ptr != nullptr) + { + *out_opt_plane0_g0_end_bit_index_ptr = format_info.plane0_g0_last_bit_index; + } - /* COMPONENT_LAYOUT_EBGR */ - 4, + if (out_opt_plane0_b0_start_bit_index_ptr != nullptr) + { + *out_opt_plane0_b0_start_bit_index_ptr = format_info.plane0_b0_start_bit_index; + } - /* COMPONENT_LAYOUT_R */ - 1, + if (out_opt_plane0_b0_end_bit_index_ptr != nullptr) + { + *out_opt_plane0_b0_end_bit_index_ptr = format_info.plane0_b0_last_bit_index; + } - /* COMPONENT_LAYOUT_RG */ - 2, + if (out_opt_plane0_a0_start_bit_index_ptr != nullptr) + { + *out_opt_plane0_a0_start_bit_index_ptr = format_info.plane0_a0_start_bit_index; + } - /* COMPONENT_LAYOUT_RGB */ - 3, + if (out_opt_plane0_a0_end_bit_index_ptr != nullptr) + { + *out_opt_plane0_a0_end_bit_index_ptr = format_info.plane0_a0_last_bit_index; + } - /* COMPONENT_LAYOUT_RGBA */ - 4, + if (out_opt_plane0_g1_start_bit_index_ptr != nullptr) + { + *out_opt_plane0_g1_start_bit_index_ptr = format_info.plane0_g0_start_bit_index; + } - /* COMPONENT_LAYOUT_S */ - 1, + if (out_opt_plane0_g1_end_bit_index_ptr != nullptr) + { + *out_opt_plane0_g1_end_bit_index_ptr = format_info.plane0_g1_last_bit_index; + } - /* COMPONENT_LAYOUT_XD */ - 2, -}; + if (out_opt_plane1_r0_start_bit_index_ptr != nullptr) + { + *out_opt_plane1_r0_start_bit_index_ptr = format_info.plane1_r0_start_bit_index; + } + + if (out_opt_plane1_r0_end_bit_index_ptr != nullptr) + { + *out_opt_plane1_r0_end_bit_index_ptr = format_info.plane1_r0_last_bit_index; + } + + if (out_opt_plane1_g0_start_bit_index_ptr != nullptr) + { + *out_opt_plane1_g0_start_bit_index_ptr = format_info.plane1_g0_start_bit_index; + } + + if (out_opt_plane1_g0_end_bit_index_ptr != nullptr) + { + *out_opt_plane1_g0_end_bit_index_ptr = format_info.plane1_g0_last_bit_index; + } + + if (out_opt_plane1_b0_start_bit_index_ptr != nullptr) + { + *out_opt_plane1_b0_start_bit_index_ptr = format_info.plane1_b0_start_bit_index; + } + + if (out_opt_plane1_b0_end_bit_index_ptr != nullptr) + { + *out_opt_plane1_b0_end_bit_index_ptr = format_info.plane1_b0_last_bit_index; + } + + if (out_opt_plane2_r0_start_bit_index_ptr != nullptr) + { + *out_opt_plane2_r0_start_bit_index_ptr = format_info.plane2_r0_start_bit_index; + } + + if (out_opt_plane2_r0_end_bit_index_ptr != nullptr) + { + *out_opt_plane2_r0_end_bit_index_ptr = format_info.plane2_r0_last_bit_index; + } + + if (out_opt_plane2_g0_start_bit_index_ptr != nullptr) + { + *out_opt_plane2_g0_start_bit_index_ptr = format_info.plane2_g0_start_bit_index; + } + + if (out_opt_plane2_g0_end_bit_index_ptr != nullptr) + { + *out_opt_plane2_g0_end_bit_index_ptr = format_info.plane2_g0_last_bit_index; + } + + if (out_opt_plane2_b0_start_bit_index_ptr != nullptr) + { + *out_opt_plane2_b0_start_bit_index_ptr = format_info.plane2_b0_start_bit_index; + } + + if (out_opt_plane2_b0_end_bit_index_ptr != nullptr) + { + *out_opt_plane2_b0_end_bit_index_ptr = format_info.plane2_b0_last_bit_index; + } +} /** Please see header for specification */ -VkFormat Anvil::Formats::get_format(ComponentLayout in_component_layout, - FormatType in_format_type, - uint32_t in_n_component0_bits, - uint32_t in_n_component1_bits, - uint32_t in_n_component2_bits, - uint32_t in_n_component3_bits) +Anvil::ComponentLayout Anvil::Formats::get_format_component_layout_nonyuv(Anvil::Format in_format) { - static const uint32_t n_available_formats = sizeof(formats) / sizeof(formats[0]); - VkFormat result = VK_FORMAT_UNDEFINED; + // static_assert(sizeof(g_formats) / sizeof(g_formats[0]) == VK_FORMAT_RANGE_SIZE, ""); + // anvil_assert(static_cast(in_format) < VK_FORMAT_RANGE_SIZE); + anvil_assert(!Anvil::Formats::is_format_yuv_khr(in_format) ); - for (uint32_t n_format = 0; - n_format < n_available_formats; - ++n_format) + return g_formats[static_cast(in_format)].component_layout; +} + +/** Please see header for specification */ +Anvil::ComponentLayout Anvil::Formats::get_format_component_layout_yuv(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect) +{ + const auto plane_idx = Anvil::Formats::get_yuv_format_plane_index(in_format, + in_aspect); + const auto YUV_FORMAT_KHR_SIZE = static_cast(Anvil::Format::G16_B16_R16_3PLANE_444_UNORM) - static_cast(Anvil::Format::G8B8G8R8_422_UNORM) + 1; + + ANVIL_REDUNDANT_VARIABLE_CONST(YUV_FORMAT_KHR_SIZE); + + anvil_assert(g_yuv_formats.size() == YUV_FORMAT_KHR_SIZE); + anvil_assert(Anvil::Formats::is_format_yuv_khr(in_format) ); + + return g_yuv_formats.at(in_format).subresources[plane_idx].component_layout; +} + +/** Please see header for specification */ +uint32_t Anvil::Formats::get_format_n_components_nonyuv(Anvil::Format in_format) +{ + // anvil_assert(static_cast(in_format) < VK_FORMAT_RANGE_SIZE); + anvil_assert(!Anvil::Formats::is_format_yuv_khr(in_format) ); + + return g_layout_to_n_components[static_cast(g_formats[static_cast(in_format)].component_layout)]; +} + +/** Please see header for specification */ +uint32_t Anvil::Formats::get_format_n_components_yuv(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect) +{ + const auto plane_idx = Anvil::Formats::get_yuv_format_plane_index(in_format, + in_aspect); + + anvil_assert(Anvil::Formats::is_format_yuv_khr(in_format) ); + + return g_layout_to_n_components[static_cast(g_yuv_formats.at(in_format).subresources[plane_idx].component_layout)]; +} + +/** Please see header for specification */ +void Anvil::Formats::get_format_n_component_bits_nonyuv(Anvil::Format in_format, + uint32_t* out_channel0_bits_ptr, + uint32_t* out_channel1_bits_ptr, + uint32_t* out_channel2_bits_ptr, + uint32_t* out_channel3_bits_ptr) +{ + const FormatInfo* format_props_ptr = nullptr; + + // anvil_assert(static_cast(in_format) < VK_FORMAT_RANGE_SIZE); + anvil_assert(!Anvil::Formats::is_format_yuv_khr(in_format) ); + + format_props_ptr = g_formats + static_cast(in_format); + + *out_channel0_bits_ptr = format_props_ptr->component_bits[0]; + *out_channel1_bits_ptr = format_props_ptr->component_bits[1]; + *out_channel2_bits_ptr = format_props_ptr->component_bits[2]; + *out_channel3_bits_ptr = format_props_ptr->component_bits[3]; +} + +/** Please see header for specification */ +void Anvil::Formats::get_format_n_component_bits_yuv(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect, + uint32_t* out_channel0_bits_ptr, + uint32_t* out_channel1_bits_ptr, + uint32_t* out_channel2_bits_ptr, + uint32_t* out_channel3_bits_ptr) +{ + const SubresourceLayoutInfo* format_props_ptr = nullptr; + const auto plane_idx = Anvil::Formats::get_yuv_format_plane_index(in_format, + in_aspect); + + anvil_assert(Anvil::Formats::is_format_yuv_khr(in_format) ); + + format_props_ptr = &g_yuv_formats.at(in_format).subresources.at(plane_idx); + + *out_channel0_bits_ptr = format_props_ptr->component_bits_used[0]; + *out_channel1_bits_ptr = format_props_ptr->component_bits_used[1]; + *out_channel2_bits_ptr = format_props_ptr->component_bits_used[2]; + *out_channel3_bits_ptr = format_props_ptr->component_bits_used[3]; +} + +/** Please see header for specification */ +uint32_t Anvil::Formats::get_format_n_planes(Anvil::Format in_format) +{ + uint32_t result = 1; + + switch (in_format) { - const FormatInfo& current_format_info = formats[n_format]; + case Anvil::Format::G8_B8R8_2PLANE_420_UNORM: + case Anvil::Format::G8_B8R8_2PLANE_422_UNORM: + case Anvil::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: + case Anvil::Format::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: + case Anvil::Format::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: + case Anvil::Format::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: + case Anvil::Format::G16_B16R16_2PLANE_420_UNORM: + case Anvil::Format::G16_B16R16_2PLANE_422_UNORM: + { + result = 2; - if (current_format_info.component_layout == in_component_layout && - current_format_info.format_type == in_format_type && - current_format_info.component_bits[0] == in_n_component0_bits && - current_format_info.component_bits[1] == in_n_component1_bits && - current_format_info.component_bits[2] == in_n_component2_bits && - current_format_info.component_bits[3] == in_n_component3_bits) + break; + } + + case Anvil::Format::G8_B8_R8_3PLANE_420_UNORM: + case Anvil::Format::G8_B8_R8_3PLANE_422_UNORM: + case Anvil::Format::G8_B8_R8_3PLANE_444_UNORM: + case Anvil::Format::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: + case Anvil::Format::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: + case Anvil::Format::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: + case Anvil::Format::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: + case Anvil::Format::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: + case Anvil::Format::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: + case Anvil::Format::G16_B16_R16_3PLANE_420_UNORM: + case Anvil::Format::G16_B16_R16_3PLANE_422_UNORM: + case Anvil::Format::G16_B16_R16_3PLANE_444_UNORM: { - result = current_format_info.format; + result = 3; break; } + + default: + { + /* Fall-through */ + } } return result; } /** Please see header for specification */ -bool Anvil::Formats::get_format_aspects(VkFormat in_format, - std::vector* out_aspects_ptr) +void Anvil::Formats::get_format_n_unused_component_bits_yuv(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect, + uint32_t* out_channel0_unused_bits_ptr, + uint32_t* out_channel1_unused_bits_ptr, + uint32_t* out_channel2_unused_bits_ptr, + uint32_t* out_channel3_unused_bits_ptr) { - bool result = false; + const SubresourceLayoutInfo* format_props_ptr = nullptr; + const auto plane_idx = Anvil::Formats::get_yuv_format_plane_index(in_format, + in_aspect); - out_aspects_ptr->clear(); + anvil_assert(Anvil::Formats::is_format_yuv_khr(in_format) ); + + format_props_ptr = &g_yuv_formats.at(in_format).subresources.at(plane_idx); - /* This image can hold color and/or depth and/or stencil aspects only */ - const Anvil::ComponentLayout format_layout = Anvil::Formats::get_format_component_layout(in_format); + *out_channel0_unused_bits_ptr = format_props_ptr->component_bits_unused[0]; + *out_channel1_unused_bits_ptr = format_props_ptr->component_bits_unused[1]; + *out_channel2_unused_bits_ptr = format_props_ptr->component_bits_unused[2]; + *out_channel3_unused_bits_ptr = format_props_ptr->component_bits_unused[3]; +} + +/** Please see header for specification */ +const char* Anvil::Formats::get_format_name(Anvil::Format in_format) +{ + // anvil_assert(static_cast(in_format) < VK_FORMAT_RANGE_SIZE || Anvil::Formats::is_format_yuv_khr(in_format) ); - if (format_layout == Anvil::COMPONENT_LAYOUT_ABGR || - format_layout == Anvil::COMPONENT_LAYOUT_ARGB || - format_layout == Anvil::COMPONENT_LAYOUT_BGR || - format_layout == Anvil::COMPONENT_LAYOUT_BGRA || - format_layout == Anvil::COMPONENT_LAYOUT_EBGR || - format_layout == Anvil::COMPONENT_LAYOUT_R || - format_layout == Anvil::COMPONENT_LAYOUT_RG || - format_layout == Anvil::COMPONENT_LAYOUT_RGB || - format_layout == Anvil::COMPONENT_LAYOUT_RGBA) + if (Anvil::Formats::is_format_yuv_khr(in_format) ) { - out_aspects_ptr->push_back(VK_IMAGE_ASPECT_COLOR_BIT); + return g_yuv_formats.at(in_format).name; } - - if (format_layout == Anvil::COMPONENT_LAYOUT_D || - format_layout == Anvil::COMPONENT_LAYOUT_DS || - format_layout == Anvil::COMPONENT_LAYOUT_XD) + else { - out_aspects_ptr->push_back(VK_IMAGE_ASPECT_DEPTH_BIT); + return g_formats[static_cast(in_format)].name; } +} + +/** Please see header for specification */ +Anvil::FormatType Anvil::Formats::get_format_type(Anvil::Format in_format) +{ + // anvil_assert(static_cast(in_format) < VK_FORMAT_RANGE_SIZE || Anvil::Formats::is_format_yuv_khr(in_format) ); - if (format_layout == Anvil::COMPONENT_LAYOUT_DS || - format_layout == Anvil::COMPONENT_LAYOUT_S) + if (Anvil::Formats::is_format_yuv_khr(in_format) ) + { + return g_yuv_formats.at(in_format).format_type; + } + else { - out_aspects_ptr->push_back(VK_IMAGE_ASPECT_STENCIL_BIT); + return g_formats[static_cast(in_format)].format_type; } +} - result = true; +/** Please see header for specification */ +uint32_t Anvil::Formats::get_yuv_format_n_planes(Anvil::Format in_format) +{ + anvil_assert(Anvil::Formats::is_format_yuv_khr(in_format) ); - return result; + return g_yuv_formats.at(in_format).num_planes; } +/** Please see header for specification */ +void Anvil::Formats::get_yuv_format_plane_extent(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect, + VkExtent3D in_image_extent, + VkExtent3D* out_plane_extent_ptr) +{ + anvil_assert(Anvil::Formats::is_format_yuv_khr(in_format) ); + + if (!Anvil::Formats::is_format_multiplanar(in_format) ) + { + anvil_assert(in_aspect == Anvil::ImageAspectFlagBits::COLOR_BIT); + + switch (in_format) + { + case Anvil::Format::G8B8G8R8_422_UNORM: + case Anvil::Format::B8G8R8G8_422_UNORM: + case Anvil::Format::G16B16G16R16_422_UNORM: + case Anvil::Format::B16G16R16G16_422_UNORM: + case Anvil::Format::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: + case Anvil::Format::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: + case Anvil::Format::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: + case Anvil::Format::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: + { + anvil_assert((in_image_extent.width % 2) == 0); + + *out_plane_extent_ptr = in_image_extent; + break; + } + + case Anvil::Format::R10X6_UNORM_PACK16: + case Anvil::Format::R12X4_UNORM_PACK16: + case Anvil::Format::R10X6G10X6_UNORM_2PACK16: + case Anvil::Format::R12X4G12X4_UNORM_2PACK16: + case Anvil::Format::R10X6G10X6B10X6A10X6_UNORM_4PACK16: + case Anvil::Format::R12X4G12X4B12X4A12X4_UNORM_4PACK16: + { + *out_plane_extent_ptr = in_image_extent; + break; + } + + default: + { + anvil_assert_fail(); + } + } + } + else + { + anvil_assert(in_aspect == Anvil::ImageAspectFlagBits::PLANE_0_BIT || + in_aspect == Anvil::ImageAspectFlagBits::PLANE_1_BIT || + in_aspect == Anvil::ImageAspectFlagBits::PLANE_2_BIT); + + switch (in_format) + { + case Anvil::Format::G8_B8_R8_3PLANE_420_UNORM: + case Anvil::Format::G8_B8R8_2PLANE_420_UNORM: + case Anvil::Format::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: + case Anvil::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: + case Anvil::Format::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: + case Anvil::Format::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: + case Anvil::Format::G16_B16_R16_3PLANE_420_UNORM: + case Anvil::Format::G16_B16R16_2PLANE_420_UNORM: + { + anvil_assert((in_image_extent.width % 2) == 0); + anvil_assert((in_image_extent.height % 2) == 0); + + if (in_aspect == Anvil::ImageAspectFlagBits::PLANE_0_BIT) + { + *out_plane_extent_ptr = in_image_extent; + } + else + { + *out_plane_extent_ptr = {in_image_extent.width / 2, + in_image_extent.height / 2, + in_image_extent.depth}; + } + + break; + } + case Anvil::Format::G8_B8_R8_3PLANE_422_UNORM: + case Anvil::Format::G8_B8R8_2PLANE_422_UNORM: + case Anvil::Format::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: + case Anvil::Format::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: + case Anvil::Format::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: + case Anvil::Format::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: + case Anvil::Format::G16_B16_R16_3PLANE_422_UNORM: + case Anvil::Format::G16_B16R16_2PLANE_422_UNORM: + { + anvil_assert((in_image_extent.width % 2) == 0); + + if (in_aspect == Anvil::ImageAspectFlagBits::PLANE_0_BIT) + { + *out_plane_extent_ptr = in_image_extent; + } + else + { + *out_plane_extent_ptr = {in_image_extent.width / 2, + in_image_extent.height, + in_image_extent.depth}; + } + + break; + } + + case Anvil::Format::G8_B8_R8_3PLANE_444_UNORM: + case Anvil::Format::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: + case Anvil::Format::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: + case Anvil::Format::G16_B16_R16_3PLANE_444_UNORM: + { + *out_plane_extent_ptr = in_image_extent; + + break; + } + + default: + { + anvil_assert_fail(); + } + } + } +} /** Please see header for specification */ -Anvil::ComponentLayout Anvil::Formats::get_format_component_layout(VkFormat in_format) +uint32_t Anvil::Formats::get_yuv_format_plane_index(Anvil::Format in_format, + Anvil::ImageAspectFlagBits in_aspect) { - static_assert(sizeof(formats) / sizeof(formats[0]) == VK_FORMAT_RANGE_SIZE, ""); + const auto n_planes = Anvil::Formats::get_format_n_planes(in_format); + uint32_t plane_idx = 0; + + ANVIL_REDUNDANT_VARIABLE_CONST(n_planes); + + anvil_assert(0 < n_planes && + n_planes <= 3); + + if (!Anvil::Formats::is_format_multiplanar(in_format) ) + { + anvil_assert(n_planes == 1); + anvil_assert(in_aspect == Anvil::ImageAspectFlagBits::COLOR_BIT); + + plane_idx = 0; + } + else + { + anvil_assert(in_aspect == Anvil::ImageAspectFlagBits::PLANE_0_BIT || + in_aspect == Anvil::ImageAspectFlagBits::PLANE_1_BIT || + in_aspect == Anvil::ImageAspectFlagBits::PLANE_2_BIT); + + switch (in_aspect) + { + case Anvil::ImageAspectFlagBits::PLANE_0_BIT: + { + anvil_assert(n_planes >= 1); + + plane_idx = 0; + break; + } + + case Anvil::ImageAspectFlagBits::PLANE_1_BIT: + { + anvil_assert(n_planes >= 2); + + plane_idx = 1; + break; + } - anvil_assert(in_format < VK_FORMAT_RANGE_SIZE); + case Anvil::ImageAspectFlagBits::PLANE_2_BIT: + { + anvil_assert(n_planes == 3); + + plane_idx = 2; + break; + } + + default: + { + anvil_assert_fail(); + } + } + } - return formats[in_format].component_layout; + return plane_idx; } /** Please see header for specification */ -uint32_t Anvil::Formats::get_format_n_components(VkFormat in_format) +bool Anvil::Formats::has_depth_aspect(Anvil::Format in_format) { - anvil_assert(in_format < VK_FORMAT_RANGE_SIZE); + if (!Anvil::Formats::is_format_yuv_khr(in_format) ) + { + return (g_formats[static_cast(in_format)].component_layout == Anvil::ComponentLayout::D) || + (g_formats[static_cast(in_format)].component_layout == Anvil::ComponentLayout::DS) || + (g_formats[static_cast(in_format)].component_layout == Anvil::ComponentLayout::XD); + } - return layout_to_n_components[formats[in_format].component_layout]; + return false; } /** Please see header for specification */ -void Anvil::Formats::get_format_n_component_bits(VkFormat in_format, - uint32_t* out_channel0_bits_ptr, - uint32_t* out_channel1_bits_ptr, - uint32_t* out_channel2_bits_ptr, - uint32_t* out_channel3_bits_ptr) +bool Anvil::Formats::has_stencil_aspect(Anvil::Format in_format) { - FormatInfo* format_props_ptr = nullptr; + if (!Anvil::Formats::is_format_yuv_khr(in_format) ) + { + return (g_formats[static_cast(in_format)].component_layout == Anvil::ComponentLayout::S) || + (g_formats[static_cast(in_format)].component_layout == Anvil::ComponentLayout::DS); + } - anvil_assert(in_format < VK_FORMAT_RANGE_SIZE); + return false; +} - format_props_ptr = formats + in_format; +/** Please see header for specification */ +bool Anvil::Formats::is_format_compressed(Anvil::Format in_format) +{ + // anvil_assert(static_cast(in_format) < VK_FORMAT_RANGE_SIZE || Anvil::Formats::is_format_yuv_khr(in_format)); - *out_channel0_bits_ptr = format_props_ptr->component_bits[0]; - *out_channel1_bits_ptr = format_props_ptr->component_bits[1]; - *out_channel2_bits_ptr = format_props_ptr->component_bits[2]; - *out_channel3_bits_ptr = format_props_ptr->component_bits[3]; + if (Anvil::Formats::is_format_yuv_khr(in_format) ) + { + return (g_yuv_formats.at(in_format).subresources[0].component_layout == Anvil::ComponentLayout::GBGR) || + (g_yuv_formats.at(in_format).subresources[0].component_layout == Anvil::ComponentLayout::BGRG) || + (g_yuv_formats.at(in_format).subresources[0].component_layout == Anvil::ComponentLayout::GXBXGXRX) || + (g_yuv_formats.at(in_format).subresources[0].component_layout == Anvil::ComponentLayout::BXGXRXGX); + } + else + { + return (g_formats[static_cast(in_format)].component_bits[0] == g_formats[static_cast(in_format)].component_bits[1]) && + (g_formats[static_cast(in_format)].component_bits[1] == g_formats[static_cast(in_format)].component_bits[2]) && + (g_formats[static_cast(in_format)].component_bits[2] == g_formats[static_cast(in_format)].component_bits[3]) && + (g_formats[static_cast(in_format)].component_bits[0] == 0); + } } /** Please see header for specification */ -const char* Anvil::Formats::get_format_name(VkFormat in_format) +bool Anvil::Formats::is_format_multiplanar(Anvil::Format in_format) { - anvil_assert(in_format < VK_FORMAT_RANGE_SIZE); + // anvil_assert(static_cast(in_format) < VK_FORMAT_RANGE_SIZE || Anvil::Formats::is_format_yuv_khr(in_format) ); + + if (Anvil::Formats::is_format_yuv_khr(in_format) ) + { + return g_yuv_formats.at(in_format).is_multiplanar; + } - return formats[in_format].name; + return false; } /** Please see header for specification */ -Anvil::FormatType Anvil::Formats::get_format_type(VkFormat in_format) +bool Anvil::Formats::is_format_yuv_khr(Anvil::Format in_format) { - anvil_assert(in_format < VK_FORMAT_RANGE_SIZE); - - return formats[in_format].format_type; + return (in_format == Anvil::Format::G8B8G8R8_422_UNORM || + in_format == Anvil::Format::B8G8R8G8_422_UNORM || + in_format == Anvil::Format::G8_B8_R8_3PLANE_420_UNORM || + in_format == Anvil::Format::G8_B8R8_2PLANE_420_UNORM || + in_format == Anvil::Format::G8_B8_R8_3PLANE_422_UNORM || + in_format == Anvil::Format::G8_B8R8_2PLANE_422_UNORM || + in_format == Anvil::Format::G8_B8_R8_3PLANE_444_UNORM || + in_format == Anvil::Format::R10X6_UNORM_PACK16 || + in_format == Anvil::Format::R10X6G10X6_UNORM_2PACK16 || + in_format == Anvil::Format::R10X6G10X6B10X6A10X6_UNORM_4PACK16 || + in_format == Anvil::Format::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 || + in_format == Anvil::Format::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 || + in_format == Anvil::Format::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 || + in_format == Anvil::Format::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 || + in_format == Anvil::Format::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 || + in_format == Anvil::Format::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 || + in_format == Anvil::Format::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 || + in_format == Anvil::Format::R12X4_UNORM_PACK16 || + in_format == Anvil::Format::R12X4G12X4_UNORM_2PACK16 || + in_format == Anvil::Format::R12X4G12X4B12X4A12X4_UNORM_4PACK16 || + in_format == Anvil::Format::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 || + in_format == Anvil::Format::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 || + in_format == Anvil::Format::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 || + in_format == Anvil::Format::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 || + in_format == Anvil::Format::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 || + in_format == Anvil::Format::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 || + in_format == Anvil::Format::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 || + in_format == Anvil::Format::G16B16G16R16_422_UNORM || + in_format == Anvil::Format::B16G16R16G16_422_UNORM || + in_format == Anvil::Format::G16_B16_R16_3PLANE_420_UNORM || + in_format == Anvil::Format::G16_B16R16_2PLANE_420_UNORM || + in_format == Anvil::Format::G16_B16_R16_3PLANE_422_UNORM || + in_format == Anvil::Format::G16_B16R16_2PLANE_422_UNORM || + in_format == Anvil::Format::G16_B16_R16_3PLANE_444_UNORM); } /** Please see header for specification */ -bool Anvil::Formats::is_format_compressed(VkFormat in_format) +bool Anvil::Formats::is_format_packed(Anvil::Format in_format) { - anvil_assert(in_format < VK_FORMAT_RANGE_SIZE); + // anvil_assert(static_cast(in_format) < VK_FORMAT_RANGE_SIZE || Anvil::Formats::is_format_yuv_khr(in_format) ); - return (formats[in_format].component_bits[0] == formats[in_format].component_bits[1]) && - (formats[in_format].component_bits[1] == formats[in_format].component_bits[2]) && - (formats[in_format].component_bits[2] == formats[in_format].component_bits[3]) && - (formats[in_format].component_bits[0] == 0); -} + if (Anvil::Formats::is_format_yuv_khr(in_format) ) + { + return g_yuv_formats.at(in_format).is_packed; + } + else + { + return g_formats[static_cast(in_format)].is_packed; + } +} \ No newline at end of file diff --git a/src/misc/fp16.cpp b/src/misc/fp16.cpp index 3c1b7cc2..7fe189e3 100644 --- a/src/misc/fp16.cpp +++ b/src/misc/fp16.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -23,7 +23,7 @@ #include "misc/fp16.h" // Conversion tables -static struct PrecalcedData +static const struct PrecalcedData { uint32_t basetable[512]; unsigned char shifttable[512]; @@ -154,7 +154,7 @@ Anvil::float32_t Anvil::Utils::fp16_to_fp32_full(Anvil::float16_t in_h) else // Normalized number { o.fields.Mantissa = static_cast(in_h.fields.Mantissa << 13); - o.fields.Exponent = static_cast(127 - 15 + in_h.fields.Exponent); + o.fields.Exponent = static_cast(127 - 15 + in_h.fields.Exponent); o.fields.Sign = in_h.fields.Sign; } } @@ -523,7 +523,7 @@ Anvil::float16_t Anvil::Utils::fp32_to_fp16_fast3(Anvil::float32_t in_f) Anvil::float32_t f16infty = { 31 << 23 }; Anvil::float32_t magic = { 15 << 23 }; uint32_t sign_mask = 0x80000000u; - uint32_t round_mask = ~0xfffu; + uint32_t round_mask = ~0xfffu; Anvil::float16_t o; uint32_t sign = in_f.u & sign_mask; @@ -596,7 +596,7 @@ Anvil::float16_t Anvil::Utils::fp32_to_fp16_fast3_rtne(Anvil::float32_t in_f) uint32_t mant_odd = (in_f.u >> 13) & 1; // resulting mantissa is odd // update exponent, rounding bias part 1 - in_f.u += static_cast(((15 - 127) << 23) + 0xfff); + in_f.u += 0xc8000fff; // 0xc8000fff == static_cast(((15 - 127) << 23) + 0xfff); // rounding bias part 2 in_f.u += mant_odd; // take the bits! @@ -653,4 +653,4 @@ Anvil::float16_t Anvil::Utils::fp32_to_fp16_approx(Anvil::float32_t in_f) o.u |= sign >> 16; return o; -} \ No newline at end of file +} diff --git a/src/misc/framebuffer_create_info.cpp b/src/misc/framebuffer_create_info.cpp new file mode 100644 index 00000000..0194ee88 --- /dev/null +++ b/src/misc/framebuffer_create_info.cpp @@ -0,0 +1,179 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/framebuffer_create_info.h" +#include "misc/image_view_create_info.h" +#include "wrappers/image.h" +#include "wrappers/image_view.h" + +Anvil::FramebufferCreateInfo::FramebufferAttachment::FramebufferAttachment(ImageView* in_image_view_ptr) + :image_view_ptr(in_image_view_ptr) +{ + anvil_assert(in_image_view_ptr != nullptr); +} + +Anvil::FramebufferCreateInfo::FramebufferAttachment::~FramebufferAttachment() +{ + /* Stub */ +} + +Anvil::FramebufferCreateInfo::FramebufferAttachment::FramebufferAttachment(const FramebufferAttachment& in) +{ + image_view_ptr = in.image_view_ptr; +} + +Anvil::FramebufferCreateInfo::FramebufferAttachment& Anvil::FramebufferCreateInfo::FramebufferAttachment::operator=(const Anvil::FramebufferCreateInfo::FramebufferAttachment& in) +{ + image_view_ptr = in.image_view_ptr; + + return *this; +} + +Anvil::FramebufferCreateInfo::FramebufferCreateInfo(const Anvil::BaseDevice* in_device_ptr, + const uint32_t& in_width, + const uint32_t& in_height, + const uint32_t& in_n_layers, + MTSafety in_mt_safety) + :m_device_ptr(in_device_ptr), + m_height (in_height), + m_mt_safety (in_mt_safety), + m_n_layers (in_n_layers), + m_width (in_width) +{ + anvil_assert(in_device_ptr != nullptr); + anvil_assert(in_height >= 1); + anvil_assert(in_n_layers >= 1); + anvil_assert(in_width >= 1); +} + +bool Anvil::FramebufferCreateInfo::add_attachment(ImageView* in_image_view_ptr, + FramebufferAttachmentID* out_opt_attachment_id_ptr) +{ + const Anvil::Image* parent_image_ptr; + bool result = false; + uint32_t view_size[3]; + + /* Sanity checks: Input image view must not be nullptr */ + anvil_assert(in_image_view_ptr != nullptr); + + /* Sanity checks: make sure the image views have the same, or larger size than the framebuffer's. */ + parent_image_ptr = in_image_view_ptr->get_create_info_ptr()->get_parent_image(); + + anvil_assert(parent_image_ptr != nullptr); + + if (!parent_image_ptr->get_image_mipmap_size(in_image_view_ptr->get_create_info_ptr()->get_base_mipmap_level(), + view_size + 0, + view_size + 1, + view_size + 2) ) + { + /* Why is the mipmap index invalid? */ + anvil_assert_fail(); + + goto end; + } + + if (view_size[0] < m_width || + view_size[1] < m_height) + { + /* Attachment size is wrong */ + anvil_assert_fail(); + + goto end; + } + + /* Spawn & store a new descriptor for the attachment. */ + if (out_opt_attachment_id_ptr != nullptr) + { + *out_opt_attachment_id_ptr = static_cast(m_attachments.size() ); + } + + m_attachments.push_back(FramebufferAttachment(in_image_view_ptr) ); + + /* All done */ + result = true; + +end: + return result; +} + +Anvil::FramebufferCreateInfoUniquePtr Anvil::FramebufferCreateInfo::create(const Anvil::BaseDevice* in_device_ptr, + const uint32_t& in_width, + const uint32_t& in_height, + const uint32_t& in_n_layers) +{ + Anvil::FramebufferCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new Anvil::FramebufferCreateInfo(in_device_ptr, + in_width, + in_height, + in_n_layers, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +bool Anvil::FramebufferCreateInfo::get_attachment_at_index(uint32_t in_attachment_index, + ImageView** out_image_view_ptr_ptr) const +{ + bool result = false; + + if (m_attachments.size() <= in_attachment_index) + { + goto end; + } + else + { + *out_image_view_ptr_ptr = m_attachments.at(in_attachment_index).image_view_ptr; + result = true; + } + +end: + return result; +} + +bool Anvil::FramebufferCreateInfo::get_attachment_id_for_image_view(ImageView* in_image_view_ptr, + FramebufferAttachmentID* out_attachment_id_ptr) const +{ + bool result = false; + auto attachment_iterator = std::find(m_attachments.cbegin(), + m_attachments.cend(), + in_image_view_ptr); + + if (attachment_iterator != m_attachments.end() ) + { + *out_attachment_id_ptr = static_cast(attachment_iterator - m_attachments.begin() ); + result = true; + } + + return result; +} + +void Anvil::FramebufferCreateInfo::get_size(uint32_t* out_framebuffer_width_ptr, + uint32_t* out_framebuffer_height_ptr, + uint32_t* out_framebuffer_depth_ptr) const +{ + *out_framebuffer_width_ptr = m_width; + *out_framebuffer_height_ptr = m_height; + *out_framebuffer_depth_ptr = m_n_layers; +} diff --git a/src/misc/glsl_to_spirv.cpp b/src/misc/glsl_to_spirv.cpp index 1d60cb7e..fc118b95 100644 --- a/src/misc/glsl_to_spirv.cpp +++ b/src/misc/glsl_to_spirv.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -35,6 +35,12 @@ #endif #ifdef ANVIL_LINK_WITH_GLSLANG + #ifdef max + #undef max + #endif + #ifdef min + #undef min + #endif #undef snprintf #if defined(_MSC_VER) @@ -72,22 +78,22 @@ }; /* Constructor. */ - Anvil::GLSLangLimits::GLSLangLimits(std::weak_ptr in_device_ptr) + Anvil::GLSLangLimits::GLSLangLimits(const Anvil::BaseDevice* in_device_ptr) { - const VkPhysicalDeviceLimits& limits = in_device_ptr.lock()->get_physical_device_properties().limits; - VkSampleCountFlags max_sampled_image_sample_count; - int32_t max_sampled_image_samples = 0; - VkSampleCountFlags max_storage_image_sample_count = limits.storageImageSampleCounts; - int32_t max_storage_image_samples = 0; + const auto& limits = in_device_ptr->get_physical_device_properties().core_vk1_0_properties_ptr->limits; + Anvil::SampleCountFlags max_sampled_image_sample_count; + int32_t max_sampled_image_samples = 0; + auto max_storage_image_sample_count = limits.storage_image_sample_counts; + int32_t max_storage_image_samples = 0; - max_sampled_image_sample_count = std::max(limits.sampledImageColorSampleCounts, limits.sampledImageDepthSampleCounts); - max_sampled_image_sample_count = std::max(max_sampled_image_sample_count, limits.sampledImageIntegerSampleCounts); - max_sampled_image_sample_count = std::max(max_sampled_image_sample_count, limits.sampledImageStencilSampleCounts); + max_sampled_image_sample_count = std::max(limits.sampled_image_color_sample_counts, limits.sampled_image_depth_sample_counts); + max_sampled_image_sample_count = std::max(max_sampled_image_sample_count, limits.sampled_image_integer_sample_counts); + max_sampled_image_sample_count = std::max(max_sampled_image_sample_count, limits.sampled_image_stencil_sample_counts); const struct SampleCountToSamplesData { - VkSampleCountFlags sample_count; - int32_t* out_result_ptr; + Anvil::SampleCountFlags sample_count; + int32_t* out_result_ptr; } conversion_items[] = { {max_sampled_image_sample_count, &max_sampled_image_samples}, @@ -102,22 +108,22 @@ const SampleCountToSamplesData& current_item = conversion_items[n_conversion_item]; int32_t result = 1; - if (current_item.sample_count & VK_SAMPLE_COUNT_16_BIT) + if ((current_item.sample_count & Anvil::SampleCountFlagBits::_16_BIT) != 0) { result = 16; } else - if (current_item.sample_count & VK_SAMPLE_COUNT_8_BIT) + if ((current_item.sample_count & Anvil::SampleCountFlagBits::_8_BIT) != 0) { result = 8; } else - if (current_item.sample_count & VK_SAMPLE_COUNT_4_BIT) + if ((current_item.sample_count & Anvil::SampleCountFlagBits::_4_BIT) != 0) { result = 4; } else - if (current_item.sample_count & VK_SAMPLE_COUNT_2_BIT) + if ((current_item.sample_count & Anvil::SampleCountFlagBits::_2_BIT) != 0) { result = 2; } @@ -135,7 +141,7 @@ m_resources_ptr->maxClipPlanes = 6; /* irrelevant to Vulkan */ m_resources_ptr->maxTextureUnits = 32; /* irrelevant to Vulkan */ m_resources_ptr->maxTextureCoords = 32; /* irrelevant to Vulkan */ - m_resources_ptr->maxVertexAttribs = static_cast(CLAMP_TO_INT_MAX(limits.maxVertexInputAttributes) ); + m_resources_ptr->maxVertexAttribs = static_cast(CLAMP_TO_INT_MAX(limits.max_vertex_input_attributes) ); m_resources_ptr->maxVertexUniformComponents = 4096; /* irrelevant to Vulkan */ m_resources_ptr->maxVaryingFloats = 64; /* irrelevant to Vulkan? */ m_resources_ptr->maxVertexTextureImageUnits = 32; /* irrelevant to Vulkan? */ @@ -146,55 +152,55 @@ m_resources_ptr->maxVertexUniformVectors = 128; /* irrelevant to Vulkan? */ m_resources_ptr->maxVaryingVectors = 8; /* irrelevant to Vulkan? */ m_resources_ptr->maxFragmentUniformVectors = 16; /* irrelevant to Vulkan? */ - m_resources_ptr->maxVertexOutputVectors = static_cast(CLAMP_TO_INT_MAX(limits.maxVertexOutputComponents / 4) ); - m_resources_ptr->maxFragmentInputVectors = static_cast(CLAMP_TO_INT_MAX(limits.maxFragmentInputComponents / 4) ); - m_resources_ptr->minProgramTexelOffset = static_cast(CLAMP_TO_INT_MAX(limits.minTexelOffset) ); - m_resources_ptr->maxProgramTexelOffset = static_cast(CLAMP_TO_INT_MAX(limits.maxTexelOffset) ); - m_resources_ptr->maxClipDistances = static_cast(CLAMP_TO_INT_MAX(limits.maxClipDistances) ); - m_resources_ptr->maxComputeWorkGroupCountX = static_cast(CLAMP_TO_INT_MAX(limits.maxComputeWorkGroupCount[0]) ); - m_resources_ptr->maxComputeWorkGroupCountY = static_cast(CLAMP_TO_INT_MAX(limits.maxComputeWorkGroupCount[1]) ); - m_resources_ptr->maxComputeWorkGroupCountZ = static_cast(CLAMP_TO_INT_MAX(limits.maxComputeWorkGroupCount[2]) ); - m_resources_ptr->maxComputeWorkGroupSizeX = static_cast(CLAMP_TO_INT_MAX(limits.maxComputeWorkGroupSize[0]) ); - m_resources_ptr->maxComputeWorkGroupSizeY = static_cast(CLAMP_TO_INT_MAX(limits.maxComputeWorkGroupSize[1]) ); - m_resources_ptr->maxComputeWorkGroupSizeZ = static_cast(CLAMP_TO_INT_MAX(limits.maxComputeWorkGroupSize[2]) ); + m_resources_ptr->maxVertexOutputVectors = static_cast(CLAMP_TO_INT_MAX(limits.max_vertex_output_components / 4) ); + m_resources_ptr->maxFragmentInputVectors = static_cast(CLAMP_TO_INT_MAX(limits.max_fragment_input_components / 4) ); + m_resources_ptr->minProgramTexelOffset = static_cast(CLAMP_TO_INT_MAX(limits.min_texel_offset) ); + m_resources_ptr->maxProgramTexelOffset = static_cast(CLAMP_TO_INT_MAX(limits.max_texel_offset) ); + m_resources_ptr->maxClipDistances = static_cast(CLAMP_TO_INT_MAX(limits.max_clip_distances) ); + m_resources_ptr->maxComputeWorkGroupCountX = static_cast(CLAMP_TO_INT_MAX(limits.max_compute_work_group_count[0]) ); + m_resources_ptr->maxComputeWorkGroupCountY = static_cast(CLAMP_TO_INT_MAX(limits.max_compute_work_group_count[1]) ); + m_resources_ptr->maxComputeWorkGroupCountZ = static_cast(CLAMP_TO_INT_MAX(limits.max_compute_work_group_count[2]) ); + m_resources_ptr->maxComputeWorkGroupSizeX = static_cast(CLAMP_TO_INT_MAX(limits.max_compute_work_group_size[0]) ); + m_resources_ptr->maxComputeWorkGroupSizeY = static_cast(CLAMP_TO_INT_MAX(limits.max_compute_work_group_size[1]) ); + m_resources_ptr->maxComputeWorkGroupSizeZ = static_cast(CLAMP_TO_INT_MAX(limits.max_compute_work_group_size[2]) ); m_resources_ptr->maxComputeUniformComponents = 1024; /* irrelevant to Vulkan? */ m_resources_ptr->maxComputeTextureImageUnits = 16; /* irrelevant to Vulkan? */ - m_resources_ptr->maxComputeImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.maxPerStageDescriptorStorageImages) ); + m_resources_ptr->maxComputeImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.max_per_stage_descriptor_storage_images) ); m_resources_ptr->maxComputeAtomicCounters = 8; /* irrelevant to Vulkan */ m_resources_ptr->maxComputeAtomicCounterBuffers = 1; /* irrelevant to Vulkan */ m_resources_ptr->maxVaryingComponents = 60; /* irrelevant to Vulkan */ - m_resources_ptr->maxVertexOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxVertexOutputComponents) ); - m_resources_ptr->maxGeometryInputComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxGeometryInputComponents) ); - m_resources_ptr->maxGeometryOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxGeometryOutputComponents) ); - m_resources_ptr->maxFragmentInputComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxFragmentInputComponents) ); + m_resources_ptr->maxVertexOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_vertex_output_components) ); + m_resources_ptr->maxGeometryInputComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_geometry_input_components) ); + m_resources_ptr->maxGeometryOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_geometry_output_components) ); + m_resources_ptr->maxFragmentInputComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_fragment_input_components) ); m_resources_ptr->maxImageUnits = 8; /* irrelevant to Vulkan */ m_resources_ptr->maxCombinedImageUnitsAndFragmentOutputs = 8; /* irrelevant to Vulkan? */ - m_resources_ptr->maxCombinedShaderOutputResources = static_cast(CLAMP_TO_INT_MAX(limits.maxFragmentCombinedOutputResources) ); + m_resources_ptr->maxCombinedShaderOutputResources = static_cast(CLAMP_TO_INT_MAX(limits.max_fragment_combined_output_resources) ); m_resources_ptr->maxImageSamples = max_storage_image_samples; - m_resources_ptr->maxVertexImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.maxPerStageDescriptorStorageImages) ); - m_resources_ptr->maxTessControlImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.maxPerStageDescriptorStorageImages) ); - m_resources_ptr->maxTessEvaluationImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.maxPerStageDescriptorStorageImages) ); - m_resources_ptr->maxGeometryImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.maxPerStageDescriptorStorageImages) ); - m_resources_ptr->maxFragmentImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.maxPerStageDescriptorStorageImages) ); - m_resources_ptr->maxCombinedImageUniforms = static_cast(CLAMP_TO_INT_MAX(5 /* vs, tc, te, gs, fs */ * limits.maxPerStageDescriptorStorageImages) ); + m_resources_ptr->maxVertexImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.max_per_stage_descriptor_storage_images) ); + m_resources_ptr->maxTessControlImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.max_per_stage_descriptor_storage_images) ); + m_resources_ptr->maxTessEvaluationImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.max_per_stage_descriptor_storage_images) ); + m_resources_ptr->maxGeometryImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.max_per_stage_descriptor_storage_images) ); + m_resources_ptr->maxFragmentImageUniforms = static_cast(CLAMP_TO_INT_MAX(limits.max_per_stage_descriptor_storage_images) ); + m_resources_ptr->maxCombinedImageUniforms = static_cast(CLAMP_TO_INT_MAX(5 /* vs, tc, te, gs, fs */ * limits.max_per_stage_descriptor_storage_images) ); m_resources_ptr->maxGeometryTextureImageUnits = 16; /* irrelevant to Vulkan? */ - m_resources_ptr->maxGeometryOutputVertices = static_cast(CLAMP_TO_INT_MAX(limits.maxGeometryOutputVertices) ); - m_resources_ptr->maxGeometryTotalOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxGeometryTotalOutputComponents) ); + m_resources_ptr->maxGeometryOutputVertices = static_cast(CLAMP_TO_INT_MAX(limits.max_geometry_output_vertices) ); + m_resources_ptr->maxGeometryTotalOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_geometry_total_output_components) ); m_resources_ptr->maxGeometryUniformComponents = 1024; /* irrelevant to Vulkan? */ - m_resources_ptr->maxGeometryVaryingComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxGeometryInputComponents) ); - m_resources_ptr->maxTessControlInputComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxTessellationControlPerVertexInputComponents) ); - m_resources_ptr->maxTessControlOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxTessellationControlPerVertexOutputComponents) ); + m_resources_ptr->maxGeometryVaryingComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_geometry_input_components) ); + m_resources_ptr->maxTessControlInputComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_tessellation_control_per_vertex_input_components) ); + m_resources_ptr->maxTessControlOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_tessellation_control_per_vertex_output_components) ); m_resources_ptr->maxTessControlTextureImageUnits = 16; /* irrelevant to Vulkan? */ m_resources_ptr->maxTessControlUniformComponents = 1024; /* irrelevant to Vulkan? */ - m_resources_ptr->maxTessControlTotalOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxTessellationControlTotalOutputComponents) ); - m_resources_ptr->maxTessEvaluationInputComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxTessellationEvaluationInputComponents) ); - m_resources_ptr->maxTessEvaluationOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxTessellationEvaluationOutputComponents) ); + m_resources_ptr->maxTessControlTotalOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_tessellation_control_total_output_components) ); + m_resources_ptr->maxTessEvaluationInputComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_tessellation_evaluation_input_components) ); + m_resources_ptr->maxTessEvaluationOutputComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_tessellation_evaluation_output_components) ); m_resources_ptr->maxTessEvaluationTextureImageUnits = 16; /* irrelevant to Vulkan? */ m_resources_ptr->maxTessEvaluationUniformComponents = 1024; /* irrelevant to Vulkan? */ - m_resources_ptr->maxTessPatchComponents = static_cast(CLAMP_TO_INT_MAX(limits.maxTessellationControlPerPatchOutputComponents) ); - m_resources_ptr->maxPatchVertices = static_cast(CLAMP_TO_INT_MAX(limits.maxTessellationPatchSize) ); - m_resources_ptr->maxTessGenLevel = static_cast(CLAMP_TO_INT_MAX(limits.maxTessellationGenerationLevel) ); - m_resources_ptr->maxViewports = static_cast(CLAMP_TO_INT_MAX(limits.maxViewports) ); + m_resources_ptr->maxTessPatchComponents = static_cast(CLAMP_TO_INT_MAX(limits.max_tessellation_control_per_patch_output_components) ); + m_resources_ptr->maxPatchVertices = static_cast(CLAMP_TO_INT_MAX(limits.max_tessellation_patch_size) ); + m_resources_ptr->maxTessGenLevel = static_cast(CLAMP_TO_INT_MAX(limits.max_tessellation_generation_level) ); + m_resources_ptr->maxViewports = static_cast(CLAMP_TO_INT_MAX(limits.max_viewports) ); m_resources_ptr->maxVertexAtomicCounters = 0; /* not supported in Vulkan */ m_resources_ptr->maxTessControlAtomicCounters = 0; /* not supported in Vulkan */ m_resources_ptr->maxTessEvaluationAtomicCounters = 0; /* not supported in Vulkan */ @@ -209,10 +215,8 @@ m_resources_ptr->maxFragmentAtomicCounterBuffers = 0; /* not supported in Vulkan */ m_resources_ptr->maxCombinedAtomicCounterBuffers = 0; /* not supported in Vulkan */ m_resources_ptr->maxAtomicCounterBufferSize = 0; /* not supported in Vulkan */ - m_resources_ptr->maxTransformFeedbackBuffers = 0; /* not supported in Vulkan */ - m_resources_ptr->maxTransformFeedbackInterleavedComponents = 0; /* not supported in Vulkan */ - m_resources_ptr->maxCullDistances = static_cast(CLAMP_TO_INT_MAX(limits.maxCullDistances) ); - m_resources_ptr->maxCombinedClipAndCullDistances = static_cast(CLAMP_TO_INT_MAX(limits.maxCombinedClipAndCullDistances) ); + m_resources_ptr->maxCullDistances = static_cast(CLAMP_TO_INT_MAX(limits.max_cull_distances) ); + m_resources_ptr->maxCombinedClipAndCullDistances = static_cast(CLAMP_TO_INT_MAX(limits.max_combined_clip_and_cull_distances) ); m_resources_ptr->maxSamples = (max_sampled_image_samples > max_storage_image_samples) ? CLAMP_TO_INT_MAX(max_sampled_image_samples) : CLAMP_TO_INT_MAX(max_storage_image_samples); m_resources_ptr->limits.nonInductiveForLoops = 1; @@ -224,32 +228,48 @@ m_resources_ptr->limits.generalSamplerIndexing = 1; m_resources_ptr->limits.generalVariableIndexing = 1; m_resources_ptr->limits.generalConstantMatrixVectorIndexing = 1; + + if (in_device_ptr->get_extension_info()->ext_transform_feedback() ) + { + const auto xfb_props_ptr = in_device_ptr->get_physical_device_properties().ext_transform_feedback_properties_ptr; + + m_resources_ptr->maxTransformFeedbackBuffers = xfb_props_ptr->n_max_transform_feedback_buffers; + m_resources_ptr->maxTransformFeedbackInterleavedComponents = xfb_props_ptr->max_transform_feedback_buffer_data_stride * 4; + } + else + { + m_resources_ptr->maxTransformFeedbackBuffers = 0; /* not supported in core Vulkan */ + m_resources_ptr->maxTransformFeedbackInterleavedComponents = 0; /* not supported in core Vulkan */ + } } - static GLSLangGlobalInitializer glslang_helper; + static const GLSLangGlobalInitializer glslang_helper; #endif /* Please see header for specification */ -Anvil::GLSLShaderToSPIRVGenerator::GLSLShaderToSPIRVGenerator(std::weak_ptr in_device_ptr, - const Mode& in_mode, - std::string in_data, - ShaderStage in_shader_stage) +Anvil::GLSLShaderToSPIRVGenerator::GLSLShaderToSPIRVGenerator(const Anvil::BaseDevice* in_device_ptr, + const Mode& in_mode, + std::string in_data, + ShaderStage in_shader_stage, + SpvVersion in_spirv_version) :CallbacksSupportProvider(GLSL_SHADER_TO_SPIRV_GENERATOR_CALLBACK_ID_COUNT), m_data (in_data), m_glsl_source_code_dirty(true), m_mode (in_mode), - m_shader_stage (in_shader_stage) + m_shader_stage (in_shader_stage), + m_spirv_version (in_spirv_version), + m_glsl_file_path{}, + m_with_debug_info{false} { #ifdef ANVIL_LINK_WITH_GLSLANG { - if (in_device_ptr.lock() != nullptr) + if (in_device_ptr != nullptr) { m_limits_ptr.reset( new GLSLangLimits(in_device_ptr) ); } - } #endif } @@ -259,7 +279,7 @@ Anvil::GLSLShaderToSPIRVGenerator::~GLSLShaderToSPIRVGenerator() { auto object_tracker_ptr = Anvil::ObjectTracker::get(); - object_tracker_ptr->unregister_object(Anvil::OBJECT_TYPE_GLSL_SHADER_TO_SPIRV_GENERATOR, + object_tracker_ptr->unregister_object(Anvil::ObjectType::ANVIL_GLSL_SHADER_TO_SPIRV_GENERATOR, this); m_spirv_blob.clear(); @@ -314,6 +334,30 @@ bool Anvil::GLSLShaderToSPIRVGenerator::add_definition_value_pair(std::string in return result; } +/* Please see header for specification */ +bool Anvil::GLSLShaderToSPIRVGenerator::add_placeholder_value_pair(const std::string& in_placeholder_name, + const std::string& in_value) +{ + bool result = false; + + for (const auto& placeholder_value_pair : m_placeholder_values) + { + if (placeholder_value_pair.first == in_placeholder_name) + { + anvil_assert_fail(); + + goto end; + } + } + + m_placeholder_values.push_back(std::make_pair(in_placeholder_name, in_value)); + + /* All done */ + result = true; +end: + return result; +} + /* Please see header for specification */ bool Anvil::GLSLShaderToSPIRVGenerator::add_pragma(std::string in_pragma_name, std::string in_opt_value) @@ -336,11 +380,12 @@ bool Anvil::GLSLShaderToSPIRVGenerator::add_pragma(std::string in_pragma_name, } /* Please see header for specification */ -bool Anvil::GLSLShaderToSPIRVGenerator::bake_glsl_source_code() +bool Anvil::GLSLShaderToSPIRVGenerator::bake_glsl_source_code() const { std::string final_glsl_source_string; const uint32_t n_definition_values = static_cast(m_definition_values.size() ); const uint32_t n_extension_behaviors = static_cast(m_extension_behaviors.size() ); + const uint32_t n_placeholder_values = static_cast(m_placeholder_values.size()); const uint32_t n_pragmas = static_cast(m_pragmas.size() ); bool result = false; @@ -387,6 +432,7 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_glsl_source_code() } if (n_pragmas > 0 || + n_placeholder_values > 0 || n_extension_behaviors > 0 || n_definition_values > 0) { @@ -429,7 +475,7 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_glsl_source_code() new_line); } - /* Finish with pragmas */ + /* Next define pragmas */ for (auto& current_pragma : m_pragmas) { std::string pragma_name = current_pragma.first; @@ -439,6 +485,23 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_glsl_source_code() final_glsl_source_string.insert(glsl_source_string_second_line_index, new_line); } + + /* Finish with replacing placeholders with values */ + for(auto vec_iterator = m_placeholder_values.begin(); + vec_iterator != m_placeholder_values.end(); + ++vec_iterator) + { + const std::string& current_key = vec_iterator->first; + const std::string& current_value = vec_iterator->second; + size_t glsl_source_string_pos = final_glsl_source_string.find(current_key, 0); + + while (glsl_source_string_pos != std::string::npos) + { + final_glsl_source_string.replace(glsl_source_string_pos, current_key.size(), current_value); + + glsl_source_string_pos = final_glsl_source_string.find(current_key, glsl_source_string_pos); + } + } } /* Cache the GLSL source code used for the conversion */ @@ -453,7 +516,7 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_glsl_source_code() } /* Please see header for specification */ -bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() +bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() const { bool glsl_filename_is_temporary = false; std::string glsl_filename_with_path; @@ -479,12 +542,12 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() { switch (m_shader_stage) { - case SHADER_STAGE_COMPUTE: glsl_filename_with_path = "temp.comp"; break; - case SHADER_STAGE_FRAGMENT: glsl_filename_with_path = "temp.frag"; break; - case SHADER_STAGE_GEOMETRY: glsl_filename_with_path = "temp.geom"; break; - case SHADER_STAGE_TESSELLATION_CONTROL: glsl_filename_with_path = "temp.tesc"; break; - case SHADER_STAGE_TESSELLATION_EVALUATION: glsl_filename_with_path = "temp.tese"; break; - case SHADER_STAGE_VERTEX: glsl_filename_with_path = "temp.vert"; break; + case ShaderStage::COMPUTE: glsl_filename_with_path = "temp.comp"; break; + case ShaderStage::FRAGMENT: glsl_filename_with_path = "temp.frag"; break; + case ShaderStage::GEOMETRY: glsl_filename_with_path = "temp.geom"; break; + case ShaderStage::TESSELLATION_CONTROL: glsl_filename_with_path = "temp.tesc"; break; + case ShaderStage::TESSELLATION_EVALUATION: glsl_filename_with_path = "temp.tese"; break; + case ShaderStage::VERTEX: glsl_filename_with_path = "temp.vert"; break; default: { @@ -516,7 +579,7 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() do { - auto shader_module_raw_ptr = object_tracker_ptr->get_object_at_index (Anvil::OBJECT_TYPE_SHADER_MODULE, + auto shader_module_raw_ptr = object_tracker_ptr->get_object_at_index (Anvil::ObjectType::SHADER_MODULE, n_current_shader_module); const Anvil::ShaderModule* shader_module_ptr = reinterpret_cast(shader_module_raw_ptr); @@ -551,17 +614,22 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() if (m_spirv_blob.size() == 0) { /* Need to bake a brand new SPIR-V blob */ - result = bake_spirv_blob_by_calling_glslang(m_glsl_source_code.c_str() ); + result = bake_spirv_blob_by_calling_glslang(m_glsl_source_code.c_str(), m_glsl_file_path, m_with_debug_info ); } } + #else { /* We need to point glslangvalidator at a location where it can stash the SPIR-V blob. */ result = bake_spirv_blob_by_spawning_glslang_process(glsl_filename_with_path, "temp.spv"); } + +end: #endif + + return result; } @@ -573,7 +641,7 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() * * @return true if successful, false otherwise. **/ - bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob_by_calling_glslang(const char* in_body) + bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob_by_calling_glslang(const char* in_body, const std::optional &glslFilePath, bool withDebugInfo) const { const EShLanguage glslang_shader_stage = get_glslang_shader_stage(); glslang::TIntermediate* intermediate_ptr = nullptr; @@ -588,27 +656,84 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() /* If this assertion check explodes, you're trying to build a SPIR-V blob with a generator, which has * been initialized with a null device instance. This is illegal. */ - OnGLSLToSPIRVConversionAboutToBeStartedCallbackArgument callback_arg(this); + OnGLSLToSPIRVConversionAboutToBeStartedCallbackArgument conversion_about_to_be_started_callback_arg(this); + OnGLSLToSPIRVConversionFinishedCallbackArgument conversion_finished_callback_arg (this); anvil_assert(m_limits_ptr != nullptr); callback(GLSL_SHADER_TO_SPIRV_GENERATOR_CALLBACK_ID_CONVERSION_ABOUT_TO_START, - &callback_arg); + &conversion_about_to_be_started_callback_arg); if (new_program_ptr != nullptr && new_shader_ptr != nullptr) { - bool link_result = false; + bool link_result = false; + glslang::EShTargetLanguageVersion spirv_version; /* Try to compile the shader */ - new_shader_ptr->setStrings(&in_body, - 1); + const char *path = nullptr; + int numNames = 0; + if(glslFilePath) { + path = glslFilePath->c_str(); + numNames = 1; + } + int length = strlen(in_body); + new_shader_ptr->setStringsWithLengthsAndNames(&in_body, &length, &path, numNames); + + switch (m_spirv_version) + { + case SpvVersion::_1_0: + { + spirv_version = glslang::EShTargetSpv_1_0; + + break; + } + + case SpvVersion::_1_1: + { + spirv_version = glslang::EShTargetSpv_1_1; + + break; + } + + case SpvVersion::_1_2: + { + spirv_version = glslang::EShTargetSpv_1_2; + + break; + } + + case SpvVersion::_1_3: + { + spirv_version = glslang::EShTargetSpv_1_3; + + break; + } + + case SpvVersion::_1_4: + { + spirv_version = glslang::EShTargetSpv_1_4; + + break; + } + + default: + { + spirv_version = glslang::EShTargetSpv_1_0; + + break; + } + } + + new_shader_ptr->setEnvTarget(glslang::EShTargetSpv, + spirv_version); result = new_shader_ptr->parse(m_limits_ptr->get_resource_ptr(), 110, /* defaultVersion */ false, /* forwardCompatible */ - (EShMessages) (EShMsgDefault | EShMsgSpvRules | EShMsgVulkanRules) ); + (EShMessages) (EShMsgDefault | EShMsgSpvRules | EShMsgVulkanRules | EShMsgDebugInfo) ); + m_debug_info_log = new_shader_ptr->getInfoDebugLog(); m_shader_info_log = new_shader_ptr->getInfoLog(); if (!result) @@ -624,8 +749,9 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() /* Link the program */ new_program_ptr->addShader(new_shader_ptr); - link_result = new_program_ptr->link (EShMsgDefault); - m_program_info_log = new_program_ptr->getInfoLog(); + link_result = new_program_ptr->link (EShMsgDefault); + m_program_debug_info_log = new_program_ptr->getInfoDebugLog(); + m_program_info_log = new_program_ptr->getInfoLog (); if (!link_result) { @@ -647,8 +773,13 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() goto end; } + glslang::SpvOptions options {}; + if(withDebugInfo) { + options.generateDebugInfo = true; + options.stripDebugInfo = false; + } glslang::GlslangToSpv(*intermediate_ptr, - spirv_blob); + spirv_blob, &options); if (spirv_blob.size() == 0) { @@ -662,24 +793,20 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() memcpy(&m_spirv_blob.at(0), &spirv_blob[0], m_spirv_blob.size() ); + } + callback(GLSL_SHADER_TO_SPIRV_GENERATOR_CALLBACK_ID_CONVERSION_FINISHED, + &conversion_finished_callback_arg); + /* All done */ result = true; end: - if (new_program_ptr != nullptr) - { - delete new_program_ptr; + delete new_program_ptr; + new_program_ptr = nullptr; - new_program_ptr = nullptr; - } - - if (new_shader_ptr != nullptr) - { - delete new_shader_ptr; - - new_shader_ptr = nullptr; - } + delete new_shader_ptr; + new_shader_ptr = nullptr; return result; } @@ -691,12 +818,12 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() switch (m_shader_stage) { - case Anvil::SHADER_STAGE_COMPUTE: result = EShLangCompute; break; - case Anvil::SHADER_STAGE_FRAGMENT: result = EShLangFragment; break; - case Anvil::SHADER_STAGE_GEOMETRY: result = EShLangGeometry; break; - case Anvil::SHADER_STAGE_TESSELLATION_CONTROL: result = EShLangTessControl; break; - case Anvil::SHADER_STAGE_TESSELLATION_EVALUATION: result = EShLangTessEvaluation; break; - case Anvil::SHADER_STAGE_VERTEX: result = EShLangVertex; break; + case Anvil::ShaderStage::COMPUTE: result = EShLangCompute; break; + case Anvil::ShaderStage::FRAGMENT: result = EShLangFragment; break; + case Anvil::ShaderStage::GEOMETRY: result = EShLangGeometry; break; + case Anvil::ShaderStage::TESSELLATION_CONTROL: result = EShLangTessControl; break; + case Anvil::ShaderStage::TESSELLATION_EVALUATION: result = EShLangTessEvaluation; break; + case Anvil::ShaderStage::VERTEX: result = EShLangVertex; break; default: { @@ -717,12 +844,13 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() * @return true if successful, false otherwise. **/ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob_by_spawning_glslang_process(const std::string& in_glsl_filename_with_path, - const std::string& in_spirv_filename_with_path) + const std::string& in_spirv_filename_with_path) const { auto callback_arg = OnGLSLToSPIRVConversionAboutToBeStartedCallbackArgument(this); std::string glslangvalidator_params; bool result = false; size_t spirv_file_size = 0; + char* spirv_blob_ptr = nullptr; callback(GLSL_SHADER_TO_SPIRV_GENERATOR_CALLBACK_ID_CONVERSION_ABOUT_TO_START, &callback_arg); @@ -749,7 +877,7 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() nullptr, /* lpProcessAttributes */ nullptr, /* lpThreadAttributes */ FALSE, /* bInheritHandles */ - CREATE_NO_WINDOW, + CREATE_NO_WINDOW, nullptr, /* lpEnvironment */ nullptr, /* lpCurrentDirectory */ &startup_info, @@ -813,7 +941,7 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() #endif /* Now, read the SPIR-V file contents */ - char* spirv_blob_ptr = nullptr; + Anvil::IO::read_file(in_spirv_filename_with_path.c_str(), false, /* is_text_file */ @@ -854,21 +982,24 @@ bool Anvil::GLSLShaderToSPIRVGenerator::bake_spirv_blob() #endif /* Please see header for specification */ -std::shared_ptr Anvil::GLSLShaderToSPIRVGenerator::create(std::weak_ptr in_opt_device_ptr, - const Mode& in_mode, - std::string in_data, - ShaderStage in_shader_stage) +Anvil::GLSLShaderToSPIRVGeneratorUniquePtr Anvil::GLSLShaderToSPIRVGenerator::create(const Anvil::BaseDevice* in_opt_device_ptr, + const Mode& in_mode, + std::string in_data, + ShaderStage in_shader_stage, + SpvVersion in_spirv_version) { - std::shared_ptr result_ptr; + Anvil::GLSLShaderToSPIRVGeneratorUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::GLSLShaderToSPIRVGenerator(in_opt_device_ptr, in_mode, in_data, - in_shader_stage) + in_shader_stage, + in_spirv_version) ); - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_GLSL_SHADER_TO_SPIRV_GENERATOR, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::ANVIL_GLSL_SHADER_TO_SPIRV_GENERATOR, result_ptr.get() ); return result_ptr; @@ -893,4 +1024,4 @@ std::string Anvil::GLSLShaderToSPIRVGenerator::get_extension_behavior_glsl_code( } return result; -} \ No newline at end of file +} diff --git a/src/misc/graphics_pipeline_create_info.cpp b/src/misc/graphics_pipeline_create_info.cpp new file mode 100644 index 00000000..f2f5ec5c --- /dev/null +++ b/src/misc/graphics_pipeline_create_info.cpp @@ -0,0 +1,1232 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/graphics_pipeline_create_info.h" +#include "misc/render_pass_create_info.h" +#include "wrappers/device.h" +#include "wrappers/render_pass.h" + +Anvil::GraphicsPipelineCreateInfo::GraphicsPipelineCreateInfo(const RenderPass* in_renderpass_ptr, + SubPassID in_subpass_id) +{ + m_alpha_to_coverage_enabled = false; + m_alpha_to_one_enabled = false; + m_depth_bias_clamp = 0.0f; + m_depth_bias_constant_factor = 0.0f; + m_depth_bias_enabled = false; + m_depth_bias_slope_factor = 1.0f; + m_depth_bounds_test_enabled = false; + m_depth_clamp_enabled = false; + m_depth_clip_enabled = true; + m_depth_test_compare_op = Anvil::CompareOp::ALWAYS; + m_depth_test_enabled = false; + m_depth_writes_enabled = false; + m_front_face = Anvil::FrontFace::COUNTER_CLOCKWISE; + m_logic_op = Anvil::LogicOp::NO_OP; + m_logic_op_enabled = false; + m_max_depth_bounds = 1.0f; + m_min_depth_bounds = 0.0f; + m_n_dynamic_scissor_boxes = 0; + m_n_dynamic_viewports = 0; + m_n_patch_control_points = 1; + m_primitive_restart_enabled = false; + m_rasterizer_discard_enabled = false; + m_sample_locations_enabled = false; + m_sample_mask_enabled = false; + m_sample_shading_enabled = false; + m_stencil_test_enabled = false; + + m_renderpass_ptr = in_renderpass_ptr; + m_subpass_id = in_subpass_id; + + m_stencil_state_back_face.compareMask = ~0u; + m_stencil_state_back_face.compareOp = VK_COMPARE_OP_ALWAYS; + m_stencil_state_back_face.depthFailOp = VK_STENCIL_OP_KEEP; + m_stencil_state_back_face.failOp = VK_STENCIL_OP_KEEP; + m_stencil_state_back_face.passOp = VK_STENCIL_OP_KEEP; + m_stencil_state_back_face.reference = 0; + m_stencil_state_back_face.writeMask = ~0u; + m_stencil_state_front_face = m_stencil_state_back_face; + + m_sample_locations_per_pixel = Anvil::SampleCountFlagBits::NONE; + m_sample_location_grid_size.height = 0; + m_sample_location_grid_size.width = 0; + + m_rasterization_order = Anvil::RasterizationOrderAMD::STRICT; + + m_conservative_rasterization_mode = Anvil::ConservativeRasterizationModeEXT::DISABLED; + m_extra_primitive_overestimation_size = 0.0f; + + memset(m_blend_constant, + 0, + sizeof(m_blend_constant) ); + + m_cull_mode = Anvil::CullModeFlagBits::BACK_BIT; + m_line_width = 1.0f; + m_min_sample_shading = 1.0f; + m_sample_count = Anvil::SampleCountFlagBits::_1_BIT; + m_polygon_mode = Anvil::PolygonMode::FILL; + m_primitive_topology = Anvil::PrimitiveTopology::TRIANGLE_LIST; + m_rasterization_stream_index = 0; + m_sample_mask = ~0u; + + m_tessellation_domain_origin = Anvil::TessellationDomainOrigin::UPPER_LEFT; +} + +Anvil::GraphicsPipelineCreateInfo::~GraphicsPipelineCreateInfo() +{ + /* Stub */ +} + +bool Anvil::GraphicsPipelineCreateInfo::add_vertex_binding(uint32_t in_binding, + Anvil::VertexInputRate in_step_rate, + uint32_t in_stride_in_bytes, + const uint32_t& in_n_attributes, + const Anvil::VertexInputAttribute* in_attribute_ptrs, + uint32_t in_divisor) +{ + bool result = false; + + #ifdef _DEBUG + { + /* Make sure the binding has not already been specified */ + anvil_assert(m_bindings.find(in_binding) == m_bindings.end() ); + + /* Make sure the location is not already referred to by already added attributes */ + for (const auto& current_binding : m_bindings) + { + for (const auto& current_attribute : current_binding.second.attributes) + { + for (uint32_t n_in_attribute = 0; + n_in_attribute < in_n_attributes; + ++n_in_attribute) + { + anvil_assert(current_attribute.location != in_attribute_ptrs[n_in_attribute].location); + } + } + } + } + #endif + + /* Add a new vertex attribute descriptor. At this point, we do not differentiate between + * attributes and bindings. Actual Vulkan attributes and bindings will be created at baking + * time. */ + InternalVertexBinding new_binding(in_divisor, + in_step_rate, + in_stride_in_bytes); + + new_binding.attributes.resize(in_n_attributes); + + memcpy(&new_binding.attributes.at(0), + in_attribute_ptrs, + in_n_attributes * sizeof(Anvil::VertexInputAttribute) ); + + m_bindings[in_binding] = new_binding; + + /* All done */ + result = true; + + return result; +} + +bool Anvil::GraphicsPipelineCreateInfo::are_depth_writes_enabled() const +{ + return m_depth_writes_enabled; +} + +bool Anvil::GraphicsPipelineCreateInfo::copy_gfx_state_from(const Anvil::GraphicsPipelineCreateInfo* in_src_pipeline_create_info_ptr) +{ + bool result = false; + + if (in_src_pipeline_create_info_ptr == nullptr) + { + anvil_assert(in_src_pipeline_create_info_ptr != nullptr); + + goto end; + } + + /* GFX pipeline info-level data */ + m_depth_bounds_test_enabled = in_src_pipeline_create_info_ptr->m_depth_bounds_test_enabled; + m_depth_clip_enabled = in_src_pipeline_create_info_ptr->m_depth_clip_enabled; + m_max_depth_bounds = in_src_pipeline_create_info_ptr->m_max_depth_bounds; + m_min_depth_bounds = in_src_pipeline_create_info_ptr->m_min_depth_bounds; + + m_depth_bias_enabled = in_src_pipeline_create_info_ptr->m_depth_bias_enabled; + m_depth_bias_clamp = in_src_pipeline_create_info_ptr->m_depth_bias_clamp; + m_depth_bias_constant_factor = in_src_pipeline_create_info_ptr->m_depth_bias_constant_factor; + m_depth_bias_slope_factor = in_src_pipeline_create_info_ptr->m_depth_bias_slope_factor; + + m_depth_test_enabled = in_src_pipeline_create_info_ptr->m_depth_test_enabled; + m_depth_test_compare_op = in_src_pipeline_create_info_ptr->m_depth_test_compare_op; + + m_enabled_dynamic_states = in_src_pipeline_create_info_ptr->m_enabled_dynamic_states; + + m_alpha_to_coverage_enabled = in_src_pipeline_create_info_ptr->m_alpha_to_coverage_enabled; + m_alpha_to_one_enabled = in_src_pipeline_create_info_ptr->m_alpha_to_one_enabled; + m_depth_clamp_enabled = in_src_pipeline_create_info_ptr->m_depth_clamp_enabled; + m_depth_writes_enabled = in_src_pipeline_create_info_ptr->m_depth_writes_enabled; + m_logic_op_enabled = in_src_pipeline_create_info_ptr->m_logic_op_enabled; + m_primitive_restart_enabled = in_src_pipeline_create_info_ptr->m_primitive_restart_enabled; + m_rasterizer_discard_enabled = in_src_pipeline_create_info_ptr->m_rasterizer_discard_enabled; + m_sample_locations_enabled = in_src_pipeline_create_info_ptr->m_sample_locations_enabled; + m_sample_mask_enabled = in_src_pipeline_create_info_ptr->m_sample_mask_enabled; + m_sample_shading_enabled = in_src_pipeline_create_info_ptr->m_sample_shading_enabled; + + m_stencil_test_enabled = in_src_pipeline_create_info_ptr->m_stencil_test_enabled; + m_stencil_state_back_face = in_src_pipeline_create_info_ptr->m_stencil_state_back_face; + m_stencil_state_front_face = in_src_pipeline_create_info_ptr->m_stencil_state_front_face; + + m_sample_location_grid_size = in_src_pipeline_create_info_ptr->m_sample_location_grid_size; + m_sample_locations = in_src_pipeline_create_info_ptr->m_sample_locations; + m_sample_locations_per_pixel = in_src_pipeline_create_info_ptr->m_sample_locations_per_pixel; + + m_rasterization_order = in_src_pipeline_create_info_ptr->m_rasterization_order; + + m_conservative_rasterization_mode = in_src_pipeline_create_info_ptr->m_conservative_rasterization_mode; + m_extra_primitive_overestimation_size = in_src_pipeline_create_info_ptr->m_extra_primitive_overestimation_size; + + m_tessellation_domain_origin = in_src_pipeline_create_info_ptr->m_tessellation_domain_origin; + + m_bindings = in_src_pipeline_create_info_ptr->m_bindings; + m_blend_constant[0] = in_src_pipeline_create_info_ptr->m_blend_constant[0]; + m_blend_constant[1] = in_src_pipeline_create_info_ptr->m_blend_constant[1]; + m_blend_constant[2] = in_src_pipeline_create_info_ptr->m_blend_constant[2]; + m_blend_constant[3] = in_src_pipeline_create_info_ptr->m_blend_constant[3]; + m_cull_mode = in_src_pipeline_create_info_ptr->m_cull_mode; + m_polygon_mode = in_src_pipeline_create_info_ptr->m_polygon_mode; + m_front_face = in_src_pipeline_create_info_ptr->m_front_face; + m_line_width = in_src_pipeline_create_info_ptr->m_line_width; + m_logic_op = in_src_pipeline_create_info_ptr->m_logic_op; + m_min_sample_shading = in_src_pipeline_create_info_ptr->m_min_sample_shading; + m_n_dynamic_scissor_boxes = in_src_pipeline_create_info_ptr->m_n_dynamic_scissor_boxes; + m_n_dynamic_viewports = in_src_pipeline_create_info_ptr->m_n_dynamic_viewports; + m_n_patch_control_points = in_src_pipeline_create_info_ptr->m_n_patch_control_points; + m_primitive_topology = in_src_pipeline_create_info_ptr->m_primitive_topology; + m_sample_count = in_src_pipeline_create_info_ptr->m_sample_count; + m_sample_mask = in_src_pipeline_create_info_ptr->m_sample_mask; + m_scissor_boxes = in_src_pipeline_create_info_ptr->m_scissor_boxes; + m_subpass_attachment_blending_properties = in_src_pipeline_create_info_ptr->m_subpass_attachment_blending_properties; + m_viewports = in_src_pipeline_create_info_ptr->m_viewports; + + + BasePipelineCreateInfo::copy_state_from(in_src_pipeline_create_info_ptr); + + result = true; +end: + return result; +} + +Anvil::GraphicsPipelineCreateInfoUniquePtr Anvil::GraphicsPipelineCreateInfo::create(const Anvil::PipelineCreateFlags& in_create_flags, + const RenderPass* in_renderpass_ptr, + SubPassID in_subpass_id, + const ShaderModuleStageEntryPoint& in_fragment_shader_stage_entrypoint_info, + const ShaderModuleStageEntryPoint& in_geometry_shader_stage_entrypoint_info, + const ShaderModuleStageEntryPoint& in_tess_control_shader_stage_entrypoint_info, + const ShaderModuleStageEntryPoint& in_tess_evaluation_shader_stage_entrypoint_info, + const ShaderModuleStageEntryPoint& in_vertex_shader_shader_stage_entrypoint_info, + const Anvil::GraphicsPipelineCreateInfo* in_opt_reference_pipeline_info_ptr, + const Anvil::PipelineID* in_opt_base_pipeline_id_ptr) +{ + Anvil::GraphicsPipelineCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new GraphicsPipelineCreateInfo(in_renderpass_ptr, + in_subpass_id) + ); + + if (result_ptr != nullptr) + { + const ShaderModuleStageEntryPoint stages[] = + { + in_fragment_shader_stage_entrypoint_info, + in_geometry_shader_stage_entrypoint_info, + in_tess_control_shader_stage_entrypoint_info, + in_tess_evaluation_shader_stage_entrypoint_info, + in_vertex_shader_shader_stage_entrypoint_info + }; + + if (in_opt_reference_pipeline_info_ptr != nullptr) + { + if (!result_ptr->copy_gfx_state_from(in_opt_reference_pipeline_info_ptr) ) + { + anvil_assert_fail(); + + result_ptr.reset(); + } + } + + result_ptr->init(in_create_flags, + sizeof(stages) / sizeof(stages[0]), + stages, + in_opt_base_pipeline_id_ptr); + } + + return result_ptr; +} + +Anvil::GraphicsPipelineCreateInfoUniquePtr Anvil::GraphicsPipelineCreateInfo::create_proxy() +{ + Anvil::GraphicsPipelineCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new GraphicsPipelineCreateInfo(nullptr, /* in_renderpass_ptr */ + UINT32_MAX) + ); + + if (result_ptr != nullptr) + { + result_ptr->init_proxy(); + } + + return result_ptr; +} + +void Anvil::GraphicsPipelineCreateInfo::get_blending_properties(const float** out_opt_blend_constant_vec4_ptr_ptr, + uint32_t* out_opt_n_blend_attachments_ptr) const +{ + if (out_opt_blend_constant_vec4_ptr_ptr != nullptr) + { + *out_opt_blend_constant_vec4_ptr_ptr = m_blend_constant; + } + + if (out_opt_n_blend_attachments_ptr != nullptr) + { + *out_opt_n_blend_attachments_ptr = static_cast(m_subpass_attachment_blending_properties.size() ); + } +} + +bool Anvil::GraphicsPipelineCreateInfo::get_color_blend_attachment_properties(SubPassAttachmentID in_attachment_id, + bool* out_opt_blending_enabled_ptr, + Anvil::BlendOp* out_opt_blend_op_color_ptr, + Anvil::BlendOp* out_opt_blend_op_alpha_ptr, + Anvil::BlendFactor* out_opt_src_color_blend_factor_ptr, + Anvil::BlendFactor* out_opt_dst_color_blend_factor_ptr, + Anvil::BlendFactor* out_opt_src_alpha_blend_factor_ptr, + Anvil::BlendFactor* out_opt_dst_alpha_blend_factor_ptr, + Anvil::ColorComponentFlags* out_opt_channel_write_mask_ptr) const +{ + SubPassAttachmentToBlendingPropertiesMap::const_iterator props_iterator; + bool result = false; + + props_iterator = m_subpass_attachment_blending_properties.find(in_attachment_id); + + if (props_iterator == m_subpass_attachment_blending_properties.end() ) + { + goto end; + } + + if (out_opt_blending_enabled_ptr != nullptr) + { + *out_opt_blending_enabled_ptr = props_iterator->second.blend_enabled; + } + + if (out_opt_blend_op_color_ptr != nullptr) + { + *out_opt_blend_op_color_ptr = props_iterator->second.blend_op_color; + } + + if (out_opt_blend_op_alpha_ptr != nullptr) + { + *out_opt_blend_op_alpha_ptr = props_iterator->second.blend_op_alpha; + } + + if (out_opt_src_color_blend_factor_ptr != nullptr) + { + *out_opt_src_color_blend_factor_ptr = props_iterator->second.src_color_blend_factor; + } + + if (out_opt_dst_color_blend_factor_ptr != nullptr) + { + *out_opt_dst_color_blend_factor_ptr = props_iterator->second.dst_color_blend_factor; + } + + if (out_opt_src_alpha_blend_factor_ptr != nullptr) + { + *out_opt_src_alpha_blend_factor_ptr = props_iterator->second.src_alpha_blend_factor; + } + + if (out_opt_dst_alpha_blend_factor_ptr != nullptr) + { + *out_opt_dst_alpha_blend_factor_ptr = props_iterator->second.dst_alpha_blend_factor; + } + + if (out_opt_channel_write_mask_ptr != nullptr) + { + *out_opt_channel_write_mask_ptr = props_iterator->second.channel_write_mask; + } + + result = true; +end: + return result; +} + +void Anvil::GraphicsPipelineCreateInfo::get_depth_bias_state(bool* out_opt_is_enabled_ptr, + float* out_opt_depth_bias_constant_factor_ptr, + float* out_opt_depth_bias_clamp_ptr, + float* out_opt_depth_bias_slope_factor_ptr) const +{ + + if (out_opt_is_enabled_ptr != nullptr) + { + *out_opt_is_enabled_ptr = m_depth_bias_enabled; + } + + if (out_opt_depth_bias_constant_factor_ptr != nullptr) + { + *out_opt_depth_bias_constant_factor_ptr = m_depth_bias_constant_factor; + } + + if (out_opt_depth_bias_clamp_ptr != nullptr) + { + *out_opt_depth_bias_clamp_ptr = m_depth_bias_clamp; + } + + if (out_opt_depth_bias_slope_factor_ptr != nullptr) + { + *out_opt_depth_bias_slope_factor_ptr = m_depth_bias_slope_factor; + } +} + +void Anvil::GraphicsPipelineCreateInfo::get_depth_bounds_state(bool* out_opt_is_enabled_ptr, + float* out_opt_min_depth_bounds_ptr, + float* out_opt_max_depth_bounds_ptr) const +{ + if (out_opt_is_enabled_ptr != nullptr) + { + *out_opt_is_enabled_ptr = m_depth_bounds_test_enabled; + } + + if (out_opt_min_depth_bounds_ptr != nullptr) + { + *out_opt_min_depth_bounds_ptr = m_min_depth_bounds; + } + + if (out_opt_max_depth_bounds_ptr != nullptr) + { + *out_opt_max_depth_bounds_ptr = m_max_depth_bounds; + } +} + +void Anvil::GraphicsPipelineCreateInfo::get_depth_test_state(bool* out_opt_is_enabled_ptr, + Anvil::CompareOp* out_opt_compare_op_ptr) const +{ + if (out_opt_is_enabled_ptr != nullptr) + { + *out_opt_is_enabled_ptr = m_depth_test_enabled; + } + + if (out_opt_compare_op_ptr != nullptr) + { + *out_opt_compare_op_ptr = m_depth_test_compare_op; + } +} + +void Anvil::GraphicsPipelineCreateInfo::get_enabled_dynamic_states(const Anvil::DynamicState** out_dynamic_states_ptr_ptr, + uint32_t* out_n_dynamic_states_ptr) const +{ + *out_n_dynamic_states_ptr = static_cast(m_enabled_dynamic_states.size() ); + *out_dynamic_states_ptr_ptr = ((m_enabled_dynamic_states.size() > 0) ? &m_enabled_dynamic_states.at(0) + : nullptr); +} + +void Anvil::GraphicsPipelineCreateInfo::get_graphics_pipeline_properties(uint32_t* out_opt_n_scissors_ptr, + uint32_t* out_opt_n_viewports_ptr, + uint32_t* out_opt_n_vertex_bindings_ptr, + const RenderPass** out_opt_renderpass_ptr_ptr, + SubPassID* out_opt_subpass_id_ptr) const +{ + if (out_opt_n_scissors_ptr != nullptr) + { + *out_opt_n_scissors_ptr = static_cast(m_scissor_boxes.size() ); + } + + if (out_opt_n_viewports_ptr != nullptr) + { + *out_opt_n_viewports_ptr = static_cast(m_viewports.size() ); + } + + if (out_opt_n_vertex_bindings_ptr != nullptr) + { + *out_opt_n_vertex_bindings_ptr = static_cast(m_bindings.size() ); + } + + if (out_opt_renderpass_ptr_ptr != nullptr) + { + *out_opt_renderpass_ptr_ptr = m_renderpass_ptr; + } + + if (out_opt_subpass_id_ptr != nullptr) + { + *out_opt_subpass_id_ptr = m_subpass_id; + } +} + +void Anvil::GraphicsPipelineCreateInfo::get_logic_op_state(bool* out_opt_is_enabled_ptr, + Anvil::LogicOp* out_opt_logic_op_ptr) const +{ + if (out_opt_is_enabled_ptr != nullptr) + { + *out_opt_is_enabled_ptr = m_logic_op_enabled; + } + + if (out_opt_logic_op_ptr != nullptr) + { + *out_opt_logic_op_ptr = m_logic_op; + } +} + +void Anvil::GraphicsPipelineCreateInfo::get_multisampling_properties(SampleCountFlagBits* out_opt_sample_count_ptr, + const VkSampleMask** out_opt_sample_mask_ptr_ptr) const +{ + if (out_opt_sample_count_ptr != nullptr) + { + *out_opt_sample_count_ptr = m_sample_count; + } + + if (out_opt_sample_mask_ptr_ptr != nullptr) + { + *out_opt_sample_mask_ptr_ptr = &m_sample_mask; + } +} + +uint32_t Anvil::GraphicsPipelineCreateInfo::get_n_dynamic_scissor_boxes() const +{ + return m_n_dynamic_scissor_boxes; +} + +uint32_t Anvil::GraphicsPipelineCreateInfo::get_n_dynamic_viewports() const +{ + return m_n_dynamic_viewports; +} + +uint32_t Anvil::GraphicsPipelineCreateInfo::get_n_scissor_boxes() const +{ + return static_cast(m_scissor_boxes.size() ); +} + +uint32_t Anvil::GraphicsPipelineCreateInfo::get_n_viewports() const +{ + return static_cast(m_viewports.size() ); +} + +Anvil::PrimitiveTopology Anvil::GraphicsPipelineCreateInfo::get_primitive_topology() const +{ + return m_primitive_topology; +} + +Anvil::RasterizationOrderAMD Anvil::GraphicsPipelineCreateInfo::get_rasterization_order() const +{ + return m_rasterization_order; +} + +Anvil::ConservativeRasterizationModeEXT Anvil::GraphicsPipelineCreateInfo::get_conservative_rasterization_mode() const +{ + return m_conservative_rasterization_mode; +} + +float Anvil::GraphicsPipelineCreateInfo::get_extra_primitive_overestimation_size() const +{ + return m_extra_primitive_overestimation_size; +} + +void Anvil::GraphicsPipelineCreateInfo::get_rasterization_properties(Anvil::PolygonMode* out_opt_polygon_mode_ptr, + Anvil::CullModeFlags* out_opt_cull_mode_ptr, + Anvil::FrontFace* out_opt_front_face_ptr, + float* out_opt_line_width_ptr) const +{ + if (out_opt_polygon_mode_ptr != nullptr) + { + *out_opt_polygon_mode_ptr = m_polygon_mode; + } + + if (out_opt_cull_mode_ptr != nullptr) + { + *out_opt_cull_mode_ptr = m_cull_mode; + } + + if (out_opt_front_face_ptr != nullptr) + { + *out_opt_front_face_ptr = m_front_face; + } + + if (out_opt_line_width_ptr != nullptr) + { + *out_opt_line_width_ptr = m_line_width; + } +} + +void Anvil::GraphicsPipelineCreateInfo::get_sample_location_state(bool* out_opt_is_enabled_ptr, + Anvil::SampleCountFlagBits* out_opt_sample_locations_per_pixel_ptr, + VkExtent2D* out_opt_sample_location_grid_size_ptr, + uint32_t* out_opt_n_sample_locations_ptr, + const Anvil::SampleLocation** out_opt_sample_locations_ptr_ptr) const +{ + if (out_opt_is_enabled_ptr != nullptr) + { + *out_opt_is_enabled_ptr = m_sample_locations_enabled; + } + + if (out_opt_sample_locations_per_pixel_ptr != nullptr) + { + *out_opt_sample_locations_per_pixel_ptr = m_sample_locations_per_pixel; + } + + if (out_opt_sample_location_grid_size_ptr != nullptr) + { + *out_opt_sample_location_grid_size_ptr = m_sample_location_grid_size; + } + + if (out_opt_n_sample_locations_ptr != nullptr) + { + *out_opt_n_sample_locations_ptr = static_cast(m_sample_locations.size() ); + } + + if (out_opt_sample_locations_per_pixel_ptr != nullptr) + { + *out_opt_sample_locations_ptr_ptr = (m_sample_locations.size() > 0) ? &m_sample_locations.at(0) + : nullptr; + } +} + +void Anvil::GraphicsPipelineCreateInfo::get_sample_shading_state(bool* out_opt_is_enabled_ptr, + float* out_opt_min_sample_shading_ptr) const +{ + if (out_opt_is_enabled_ptr != nullptr) + { + *out_opt_is_enabled_ptr = m_sample_shading_enabled; + } + + if (out_opt_min_sample_shading_ptr != nullptr) + { + *out_opt_min_sample_shading_ptr = m_min_sample_shading; + } +} + +bool Anvil::GraphicsPipelineCreateInfo::get_scissor_box_properties(uint32_t in_n_scissor_box, + int32_t* out_opt_x_ptr, + int32_t* out_opt_y_ptr, + uint32_t* out_opt_width_ptr, + uint32_t* out_opt_height_ptr) const +{ + bool result = false; + const InternalScissorBox* scissor_box_props_ptr = nullptr; + + if (m_scissor_boxes.find(in_n_scissor_box) == m_scissor_boxes.end() ) + { + anvil_assert(!(m_scissor_boxes.find(in_n_scissor_box) == m_scissor_boxes.end()) ); + + goto end; + } + else + { + scissor_box_props_ptr = &m_scissor_boxes.at(in_n_scissor_box); + } + + if (out_opt_x_ptr != nullptr) + { + *out_opt_x_ptr = scissor_box_props_ptr->x; + } + + if (out_opt_y_ptr != nullptr) + { + *out_opt_y_ptr = scissor_box_props_ptr->y; + } + + if (out_opt_width_ptr != nullptr) + { + *out_opt_width_ptr = scissor_box_props_ptr->width; + } + + if (out_opt_height_ptr != nullptr) + { + *out_opt_height_ptr = scissor_box_props_ptr->height; + } + + result = true; +end: + return result; +} + +void Anvil::GraphicsPipelineCreateInfo::get_stencil_test_properties(bool* out_opt_is_enabled_ptr, + Anvil::StencilOp* out_opt_front_stencil_fail_op_ptr, + Anvil::StencilOp* out_opt_front_stencil_pass_op_ptr, + Anvil::StencilOp* out_opt_front_stencil_depth_fail_op_ptr, + Anvil::CompareOp* out_opt_front_stencil_compare_op_ptr, + uint32_t* out_opt_front_stencil_compare_mask_ptr, + uint32_t* out_opt_front_stencil_write_mask_ptr, + uint32_t* out_opt_front_stencil_reference_ptr, + Anvil::StencilOp* out_opt_back_stencil_fail_op_ptr, + Anvil::StencilOp* out_opt_back_stencil_pass_op_ptr, + Anvil::StencilOp* out_opt_back_stencil_depth_fail_op_ptr, + Anvil::CompareOp* out_opt_back_stencil_compare_op_ptr, + uint32_t* out_opt_back_stencil_compare_mask_ptr, + uint32_t* out_opt_back_stencil_write_mask_ptr, + uint32_t* out_opt_back_stencil_reference_ptr) const +{ + if (out_opt_is_enabled_ptr != nullptr) + { + *out_opt_is_enabled_ptr = m_stencil_test_enabled; + } + + if (out_opt_front_stencil_fail_op_ptr != nullptr) + { + *out_opt_front_stencil_fail_op_ptr = static_cast(m_stencil_state_front_face.failOp); + } + + if (out_opt_front_stencil_pass_op_ptr != nullptr) + { + *out_opt_front_stencil_pass_op_ptr = static_cast(m_stencil_state_front_face.passOp); + } + + if (out_opt_front_stencil_depth_fail_op_ptr != nullptr) + { + *out_opt_front_stencil_depth_fail_op_ptr = static_cast(m_stencil_state_front_face.depthFailOp); + } + + if (out_opt_front_stencil_compare_op_ptr != nullptr) + { + *out_opt_front_stencil_compare_op_ptr = static_cast(m_stencil_state_front_face.compareOp); + } + + if (out_opt_front_stencil_compare_mask_ptr != nullptr) + { + *out_opt_front_stencil_compare_mask_ptr = m_stencil_state_front_face.compareMask; + } + + if (out_opt_front_stencil_write_mask_ptr != nullptr) + { + *out_opt_front_stencil_write_mask_ptr = m_stencil_state_front_face.writeMask; + } + + if (out_opt_front_stencil_reference_ptr != nullptr) + { + *out_opt_front_stencil_reference_ptr = m_stencil_state_front_face.reference; + } + + if (out_opt_back_stencil_fail_op_ptr != nullptr) + { + *out_opt_back_stencil_fail_op_ptr = static_cast(m_stencil_state_back_face.failOp); + } + + if (out_opt_back_stencil_pass_op_ptr != nullptr) + { + *out_opt_back_stencil_pass_op_ptr = static_cast(m_stencil_state_back_face.passOp); + } + + if (out_opt_back_stencil_depth_fail_op_ptr != nullptr) + { + *out_opt_back_stencil_depth_fail_op_ptr = static_cast(m_stencil_state_back_face.depthFailOp); + } + + if (out_opt_back_stencil_compare_op_ptr != nullptr) + { + *out_opt_back_stencil_compare_op_ptr = static_cast(m_stencil_state_back_face.compareOp); + } + + if (out_opt_back_stencil_compare_mask_ptr != nullptr) + { + *out_opt_back_stencil_compare_mask_ptr = m_stencil_state_back_face.compareMask; + } + + if (out_opt_back_stencil_write_mask_ptr != nullptr) + { + *out_opt_back_stencil_write_mask_ptr = m_stencil_state_back_face.writeMask; + } + + if (out_opt_back_stencil_reference_ptr != nullptr) + { + *out_opt_back_stencil_reference_ptr = m_stencil_state_back_face.reference; + } +} + +uint32_t Anvil::GraphicsPipelineCreateInfo::get_n_patch_control_points() const +{ + return m_n_patch_control_points; +} + +bool Anvil::GraphicsPipelineCreateInfo::get_vertex_binding_properties(uint32_t in_n_vertex_binding, + uint32_t* out_opt_binding_ptr, + uint32_t* out_opt_stride_ptr, + Anvil::VertexInputRate* out_opt_rate_ptr, + uint32_t* out_opt_n_attributes_ptr, + const VertexInputAttribute** out_opt_attributes_ptr_ptr, + uint32_t* out_opt_divisor_ptr) const +{ + auto binding_iterator = m_bindings.begin(); + const InternalVertexBinding* binding_ptr = nullptr; + bool result = false; + + if (m_bindings.size() < in_n_vertex_binding) + { + goto end; + } + else + { + uint32_t n_item = 0; + + while (n_item != in_n_vertex_binding) + { + binding_iterator++; + n_item ++; + + if (binding_iterator == m_bindings.end() ) + { + goto end; + } + } + + binding_ptr = &binding_iterator->second; + } + + if (out_opt_binding_ptr != nullptr) + { + *out_opt_binding_ptr = binding_iterator->first; + } + + if (out_opt_stride_ptr != nullptr) + { + *out_opt_stride_ptr = binding_ptr->stride_in_bytes; + } + + if (out_opt_rate_ptr != nullptr) + { + *out_opt_rate_ptr = binding_ptr->rate; + } + + if (out_opt_n_attributes_ptr != nullptr) + { + *out_opt_n_attributes_ptr = static_cast(binding_ptr->attributes.size() ); + } + + if (out_opt_attributes_ptr_ptr != nullptr) + { + *out_opt_attributes_ptr_ptr = (binding_ptr->attributes.size() > 0) ? &binding_ptr->attributes.at(0) + : nullptr; + } + + if (out_opt_divisor_ptr != nullptr) + { + *out_opt_divisor_ptr = binding_ptr->divisor; + } + + /* All done */ + result = true; +end: + return result; +} + +bool Anvil::GraphicsPipelineCreateInfo::get_viewport_properties(uint32_t in_n_viewport, + float* out_opt_origin_x_ptr, + float* out_opt_origin_y_ptr, + float* out_opt_width_ptr, + float* out_opt_height_ptr, + float* out_opt_min_depth_ptr, + float* out_opt_max_depth_ptr) const +{ + bool result = false; + const InternalViewport* viewport_props_ptr; + + if (m_viewports.find(in_n_viewport) == m_viewports.end() ) + { + anvil_assert(!(m_viewports.find(in_n_viewport) == m_viewports.end()) ); + + goto end; + } + else + { + viewport_props_ptr = &m_viewports.at(in_n_viewport); + } + + if (out_opt_origin_x_ptr != nullptr) + { + *out_opt_origin_x_ptr = viewport_props_ptr->origin_x; + } + + if (out_opt_origin_y_ptr != nullptr) + { + *out_opt_origin_y_ptr = viewport_props_ptr->origin_y; + } + + if (out_opt_width_ptr != nullptr) + { + *out_opt_width_ptr = viewport_props_ptr->width; + } + + if (out_opt_height_ptr != nullptr) + { + *out_opt_height_ptr = viewport_props_ptr->height; + } + + if (out_opt_min_depth_ptr != nullptr) + { + *out_opt_min_depth_ptr = viewport_props_ptr->min_depth; + } + + if (out_opt_max_depth_ptr != nullptr) + { + *out_opt_max_depth_ptr = viewport_props_ptr->max_depth; + } + + result = true; +end: + return result; +} + +bool Anvil::GraphicsPipelineCreateInfo::is_alpha_to_coverage_enabled() const +{ + return m_alpha_to_coverage_enabled; +} + +bool Anvil::GraphicsPipelineCreateInfo::is_alpha_to_one_enabled() const +{ + return m_alpha_to_one_enabled; +} + +bool Anvil::GraphicsPipelineCreateInfo::is_depth_clamp_enabled() const +{ + return m_depth_clamp_enabled; +} + +bool Anvil::GraphicsPipelineCreateInfo::is_depth_clip_enabled() const +{ + return m_depth_clip_enabled; +} + +bool Anvil::GraphicsPipelineCreateInfo::is_primitive_restart_enabled() const +{ + return m_primitive_restart_enabled; +} + +bool Anvil::GraphicsPipelineCreateInfo::is_rasterizer_discard_enabled() const +{ + return m_rasterizer_discard_enabled; +} + +bool Anvil::GraphicsPipelineCreateInfo::is_sample_mask_enabled() const +{ + return m_sample_mask_enabled; +} + +void Anvil::GraphicsPipelineCreateInfo::set_blending_properties(const float* in_blend_constant_vec4) +{ + memcpy(m_blend_constant, + in_blend_constant_vec4, + sizeof(m_blend_constant) ); +} + +void Anvil::GraphicsPipelineCreateInfo::set_color_blend_attachment_properties(SubPassAttachmentID in_attachment_id, + bool in_blending_enabled, + Anvil::BlendOp in_blend_op_color, + Anvil::BlendOp in_blend_op_alpha, + Anvil::BlendFactor in_src_color_blend_factor, + Anvil::BlendFactor in_dst_color_blend_factor, + Anvil::BlendFactor in_src_alpha_blend_factor, + Anvil::BlendFactor in_dst_alpha_blend_factor, + Anvil::ColorComponentFlags in_channel_write_mask) +{ + BlendingProperties* attachment_blending_props_ptr = &m_subpass_attachment_blending_properties[in_attachment_id]; + + attachment_blending_props_ptr->blend_enabled = in_blending_enabled; + attachment_blending_props_ptr->blend_op_alpha = in_blend_op_alpha; + attachment_blending_props_ptr->blend_op_color = in_blend_op_color; + attachment_blending_props_ptr->channel_write_mask = in_channel_write_mask; + attachment_blending_props_ptr->dst_alpha_blend_factor = in_dst_alpha_blend_factor; + attachment_blending_props_ptr->dst_color_blend_factor = in_dst_color_blend_factor; + attachment_blending_props_ptr->src_alpha_blend_factor = in_src_alpha_blend_factor; + attachment_blending_props_ptr->src_color_blend_factor = in_src_color_blend_factor; +} + +void Anvil::GraphicsPipelineCreateInfo::set_multisampling_properties(Anvil::SampleCountFlagBits in_sample_count, + float in_min_sample_shading, + const VkSampleMask in_sample_mask) +{ + m_min_sample_shading = in_min_sample_shading; + m_sample_count = in_sample_count; + m_sample_mask = in_sample_mask; +} + +void Anvil::GraphicsPipelineCreateInfo::set_n_dynamic_scissor_boxes(uint32_t in_n_dynamic_scissor_boxes) +{ + m_n_dynamic_scissor_boxes = in_n_dynamic_scissor_boxes; +} + +void Anvil::GraphicsPipelineCreateInfo::set_n_dynamic_viewports(uint32_t in_n_dynamic_viewports) +{ + m_n_dynamic_viewports = in_n_dynamic_viewports; +} + +void Anvil::GraphicsPipelineCreateInfo::set_n_patch_control_points(uint32_t in_n_patch_control_points) +{ + m_n_patch_control_points = in_n_patch_control_points; +} + +void Anvil::GraphicsPipelineCreateInfo::set_primitive_topology(Anvil::PrimitiveTopology in_primitive_topology) +{ + m_primitive_topology = in_primitive_topology; +} + +void Anvil::GraphicsPipelineCreateInfo::set_rasterization_order(Anvil::RasterizationOrderAMD in_rasterization_order) +{ + m_rasterization_order = in_rasterization_order; +} + +void Anvil::GraphicsPipelineCreateInfo::set_rasterization_stream_index(const uint32_t& in_rasterization_stream_index) +{ + m_rasterization_stream_index = in_rasterization_stream_index; +} + +void Anvil::GraphicsPipelineCreateInfo::set_conservative_rasterization_mode(Anvil::ConservativeRasterizationModeEXT in_conservative_rasterization_mode) +{ + m_conservative_rasterization_mode = in_conservative_rasterization_mode; +} + +void Anvil::GraphicsPipelineCreateInfo::set_extra_primitive_overestimation_size(float extra_primitive_overestimation_size) +{ + m_extra_primitive_overestimation_size = extra_primitive_overestimation_size; +} + +void Anvil::GraphicsPipelineCreateInfo::set_rasterization_properties(Anvil::PolygonMode in_polygon_mode, + Anvil::CullModeFlags in_cull_mode, + Anvil::FrontFace in_front_face, + float in_line_width) +{ + m_cull_mode = in_cull_mode; + m_front_face = in_front_face; + m_line_width = in_line_width; + m_polygon_mode = in_polygon_mode; +} + +void Anvil::GraphicsPipelineCreateInfo::set_sample_location_properties(const Anvil::SampleCountFlagBits& in_sample_locations_per_pixel, + const VkExtent2D& in_sample_location_grid_size, + const uint32_t& in_n_sample_locations, + const Anvil::SampleLocation* in_sample_locations_ptr) +{ + anvil_assert(in_n_sample_locations != 0); + + m_sample_locations.resize(in_n_sample_locations); + + m_sample_location_grid_size = in_sample_location_grid_size; + m_sample_locations_per_pixel = in_sample_locations_per_pixel; + + for (uint32_t n_sample_location = 0; + n_sample_location < in_n_sample_locations; + ++n_sample_location) + { + const auto& current_location_ptr = in_sample_locations_ptr + n_sample_location; + + m_sample_locations.at(n_sample_location) = *current_location_ptr; + } +} + +void Anvil::GraphicsPipelineCreateInfo::set_scissor_box_properties(uint32_t in_n_scissor_box, + int32_t in_x, + int32_t in_y, + uint32_t in_width, + uint32_t in_height) +{ + m_scissor_boxes[in_n_scissor_box] = InternalScissorBox(in_x, + in_y, + in_width, + in_height); +} + +void Anvil::GraphicsPipelineCreateInfo::set_stencil_test_properties(bool in_update_front_face_state, + Anvil::StencilOp in_stencil_fail_op, + Anvil::StencilOp in_stencil_pass_op, + Anvil::StencilOp in_stencil_depth_fail_op, + Anvil::CompareOp in_stencil_compare_op, + uint32_t in_stencil_compare_mask, + uint32_t in_stencil_write_mask, + uint32_t in_stencil_reference) +{ + VkStencilOpState* const stencil_op_state_ptr = (in_update_front_face_state) ? &m_stencil_state_front_face + : &m_stencil_state_back_face; + + stencil_op_state_ptr->compareMask = in_stencil_compare_mask; + stencil_op_state_ptr->compareOp = static_cast(in_stencil_compare_op); + stencil_op_state_ptr->depthFailOp = static_cast(in_stencil_depth_fail_op); + stencil_op_state_ptr->failOp = static_cast(in_stencil_fail_op); + stencil_op_state_ptr->passOp = static_cast(in_stencil_pass_op); + stencil_op_state_ptr->reference = in_stencil_reference; + stencil_op_state_ptr->writeMask = in_stencil_write_mask; +} + +void Anvil::GraphicsPipelineCreateInfo::set_viewport_properties(uint32_t in_n_viewport, + float in_origin_x, + float in_origin_y, + float in_width, + float in_height, + float in_min_depth, + float in_max_depth) +{ + m_viewports[in_n_viewport] = InternalViewport(in_origin_x, + in_origin_y, + in_width, + in_height, + in_min_depth, + in_max_depth); +} + +void Anvil::GraphicsPipelineCreateInfo::set_tessellation_domain_origin(const Anvil::TessellationDomainOrigin& in_new_origin) +{ + m_tessellation_domain_origin = in_new_origin; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_alpha_to_coverage(bool in_should_enable) +{ + m_alpha_to_coverage_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_alpha_to_one(bool in_should_enable) +{ + m_alpha_to_one_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_depth_bias(bool in_should_enable, + float in_depth_bias_constant_factor, + float in_depth_bias_clamp, + float in_depth_bias_slope_factor) +{ + m_depth_bias_constant_factor = in_depth_bias_constant_factor; + m_depth_bias_clamp = in_depth_bias_clamp; + m_depth_bias_enabled = in_should_enable; + m_depth_bias_slope_factor = in_depth_bias_slope_factor; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_depth_bounds_test(bool in_should_enable, + float in_min_depth_bounds, + float in_max_depth_bounds) +{ + m_depth_bounds_test_enabled = in_should_enable; + m_max_depth_bounds = in_max_depth_bounds; + m_min_depth_bounds = in_min_depth_bounds; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_depth_clamp(bool in_should_enable) +{ + m_depth_clamp_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_depth_clip(bool in_should_enable) +{ + m_depth_clip_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_depth_test(bool in_should_enable, + Anvil::CompareOp in_compare_op) +{ + m_depth_test_enabled = in_should_enable; + m_depth_test_compare_op = in_compare_op; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_depth_writes(bool in_should_enable) +{ + m_depth_writes_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_dynamic_state(bool in_should_enable, + const Anvil::DynamicState& in_dynamic_state) +{ + if (in_should_enable) + { + if (std::find(m_enabled_dynamic_states.begin(), + m_enabled_dynamic_states.end (), + in_dynamic_state) == m_enabled_dynamic_states.end() ) + { + m_enabled_dynamic_states.push_back(in_dynamic_state); + } + } + else + { + auto iterator = std::find(m_enabled_dynamic_states.begin(), + m_enabled_dynamic_states.end (), + in_dynamic_state); + + if (iterator != m_enabled_dynamic_states.end() ) + { + m_enabled_dynamic_states.erase(iterator); + } + } +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_dynamic_states(bool in_should_enable, + const Anvil::DynamicState* in_dynamic_states_ptr, + const uint32_t& in_n_dynamic_states) +{ + for (uint32_t n_dynamic_state = 0; + n_dynamic_state < in_n_dynamic_states; + ++n_dynamic_state) + { + const auto current_state = in_dynamic_states_ptr[n_dynamic_state]; + + toggle_dynamic_state(in_should_enable, + current_state); + } +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_dynamic_states(bool in_should_enable, + const std::vector& in_dynamic_states) +{ + for (const auto& current_state : in_dynamic_states) + { + toggle_dynamic_state(in_should_enable, + current_state); + } +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_logic_op(bool in_should_enable, + Anvil::LogicOp in_logic_op) +{ + m_logic_op = in_logic_op; + m_logic_op_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_primitive_restart(bool in_should_enable) +{ + m_primitive_restart_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_rasterizer_discard(bool in_should_enable) +{ + m_rasterizer_discard_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_sample_locations(bool in_should_enable) +{ + m_sample_locations_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_sample_mask(bool in_should_enable) +{ + m_sample_mask_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_sample_shading(bool in_should_enable) +{ + m_sample_shading_enabled = in_should_enable; +} + +void Anvil::GraphicsPipelineCreateInfo::toggle_stencil_test(bool in_should_enable) +{ + m_stencil_test_enabled = in_should_enable; +} diff --git a/src/misc/image_create_info.cpp b/src/misc/image_create_info.cpp new file mode 100644 index 00000000..a43428ab --- /dev/null +++ b/src/misc/image_create_info.cpp @@ -0,0 +1,298 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/image_create_info.h" +#include "misc/swapchain_create_info.h" +#include "wrappers/swapchain.h" + +Anvil::ImageCreateInfoUniquePtr Anvil::ImageCreateInfo::create_no_alloc(const Anvil::BaseDevice* in_device_ptr, + Anvil::ImageType in_type, + Anvil::Format in_format, + Anvil::ImageTiling in_tiling, + Anvil::ImageUsageFlags in_usage, + uint32_t in_base_mipmap_width, + uint32_t in_base_mipmap_height, + uint32_t in_base_mipmap_depth, + uint32_t in_n_layers, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + bool in_use_full_mipmap_chain, + ImageCreateFlags in_create_flags, + Anvil::ImageLayout in_post_alloc_image_layout, + const std::vector* in_opt_mipmaps_ptr) +{ + Anvil::ImageCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + if ((in_create_flags & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) != 0 || + (in_create_flags & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0) + { + in_post_alloc_image_layout = Anvil::ImageLayout::UNDEFINED; + } + + result_ptr.reset( + new ImageCreateInfo(Anvil::ImageInternalType::NO_ALLOC, + in_device_ptr, + in_type, + in_format, + in_tiling, + in_sharing_mode, + in_usage, + in_base_mipmap_width, + in_base_mipmap_height, + in_base_mipmap_depth, + in_n_layers, + in_sample_count, + in_use_full_mipmap_chain, + in_create_flags, + in_queue_families, + ((in_opt_mipmaps_ptr != nullptr) && (in_opt_mipmaps_ptr->size() > 0)) ? Anvil::ImageLayout::PREINITIALIZED : Anvil::ImageLayout::UNDEFINED, + in_post_alloc_image_layout, + in_opt_mipmaps_ptr, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE, + Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + Anvil::MemoryFeatureFlagBits::NONE) + ); + + return result_ptr; +} + +Anvil::ImageCreateInfoUniquePtr Anvil::ImageCreateInfo::create_alloc(const Anvil::BaseDevice* in_device_ptr, + Anvil::ImageType in_type, + Anvil::Format in_format, + Anvil::ImageTiling in_tiling, + Anvil::ImageUsageFlags in_usage, + uint32_t in_base_mipmap_width, + uint32_t in_base_mipmap_height, + uint32_t in_base_mipmap_depth, + uint32_t in_n_layers, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::SharingMode in_sharing_mode, + bool in_use_full_mipmap_chain, + MemoryFeatureFlags in_memory_features, + ImageCreateFlags in_create_flags, + Anvil::ImageLayout in_post_alloc_image_layout, + const std::vector* in_opt_mipmaps_ptr) +{ + Anvil::ImageCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + anvil_assert((in_create_flags & Anvil::ImageCreateFlagBits::SPARSE_ALIASED_BIT) == 0); + anvil_assert((in_create_flags & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) == 0); + anvil_assert((in_create_flags & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) == 0); + + result_ptr.reset( + new ImageCreateInfo(Anvil::ImageInternalType::ALLOC, + in_device_ptr, + in_type, + in_format, + in_tiling, + in_sharing_mode, + in_usage, + in_base_mipmap_width, + in_base_mipmap_height, + in_base_mipmap_depth, + in_n_layers, + in_sample_count, + in_use_full_mipmap_chain, + in_create_flags, + in_queue_families, + ((in_opt_mipmaps_ptr != nullptr) && (in_opt_mipmaps_ptr->size() > 0)) ? Anvil::ImageLayout::PREINITIALIZED : Anvil::ImageLayout::UNDEFINED, + in_post_alloc_image_layout, + in_opt_mipmaps_ptr, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE, + Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + in_memory_features) + ); + + return result_ptr; +} + +Anvil::ImageCreateInfoUniquePtr Anvil::ImageCreateInfo::create_peer_no_alloc(const Anvil::BaseDevice* in_device_ptr, + const Anvil::Swapchain* in_swapchain_ptr, + uint32_t in_n_swapchain_image) +{ + const auto memory_features ((in_device_ptr->get_type() == Anvil::DeviceType::MULTI_GPU) ? Anvil::MemoryFeatureFlagBits::MULTI_INSTANCE_BIT + : Anvil::MemoryFeatureFlagBits::NONE); + Anvil::ImageCreateInfoUniquePtr result_ptr (nullptr, + std::default_delete() ); + Anvil::Image* swapchain_image_ptr (in_swapchain_ptr->get_image(in_n_swapchain_image) ); + uint32_t swapchain_image_size[3]{0, 0, 0}; + + swapchain_image_ptr->get_image_mipmap_size(0, /* n_mipmap */ + swapchain_image_size + 0, + swapchain_image_size + 1, + swapchain_image_size + 2); + + result_ptr.reset( + new ImageCreateInfo(Anvil::ImageInternalType::PEER_NO_ALLOC, + in_device_ptr, + Anvil::ImageType::_2D, + in_swapchain_ptr->get_create_info_ptr()->get_format(), + Anvil::ImageTiling::OPTIMAL, + swapchain_image_ptr->get_create_info_ptr()->get_sharing_mode(), + swapchain_image_ptr->get_create_info_ptr()->get_usage_flags (), + swapchain_image_size[0], + swapchain_image_size[1], + 1, /* in_base_mipmap_depth */ + swapchain_image_ptr->get_create_info_ptr()->get_n_layers(), + Anvil::SampleCountFlagBits::_1_BIT, + false, /* in_use_full_mipmap_chain */ + Anvil::ImageCreateFlagBits::NONE, + swapchain_image_ptr->get_create_info_ptr()->get_queue_families(), + Anvil::ImageLayout::UNDEFINED, /* in_post_create_image_layout */ + Anvil::ImageLayout::UNDEFINED, /* in_post_alloc_image_layout */ + nullptr, /* in_opt_mipmaps_ptr */ + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE, + Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + memory_features) + ); + + result_ptr->set_swapchain (in_swapchain_ptr); + result_ptr->set_swapchain_image_index(in_n_swapchain_image); + + return result_ptr; +} + +Anvil::ImageCreateInfoUniquePtr Anvil::ImageCreateInfo::create_swapchain_wrapper(const Anvil::BaseDevice* in_device_ptr, + const Anvil::Swapchain* in_swapchain_ptr, + const VkImage& in_image, + const uint32_t& in_n_swapchain_image) +{ + Anvil::ImageCreateInfoUniquePtr result_ptr (nullptr, + std::default_delete() ); + const auto& swapchain_create_info_ptr(in_swapchain_ptr->get_create_info_ptr() ); + + result_ptr.reset( + new Anvil::ImageCreateInfo(Anvil::ImageInternalType::SWAPCHAIN_WRAPPER, + in_device_ptr, + Anvil::ImageType::_2D, + swapchain_create_info_ptr->get_format(), + Anvil::ImageTiling::OPTIMAL, + Anvil::SharingMode::EXCLUSIVE, + swapchain_create_info_ptr->get_usage_flags(), + swapchain_create_info_ptr->get_rendering_surface()->get_width (), + swapchain_create_info_ptr->get_rendering_surface()->get_height(), + 1, /* base_mipmap_depth */ + 1, /* in_n_layers */ + Anvil::SampleCountFlagBits::_1_BIT, + false, /* in_use_full_mipmap_chain */ + Anvil::ImageCreateFlagBits::NONE, + Anvil::QueueFamilyFlagBits::NONE, + Anvil::ImageLayout::UNDEFINED, + Anvil::ImageLayout::UNDEFINED, + nullptr, /* in_opt_mipmaps_ptr */ + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE, + Anvil::ExternalMemoryHandleTypeFlagBits::NONE, + Anvil::MemoryFeatureFlagBits::NONE) + ); + + if (result_ptr != nullptr) + { + result_ptr->m_swapchain_image = in_image; + result_ptr->m_n_swapchain_image = in_n_swapchain_image; + result_ptr->m_swapchain_ptr = in_swapchain_ptr; + } + + return result_ptr; +} + +Anvil::ImageCreateInfo::ImageCreateInfo(Anvil::ImageInternalType in_internal_type, + const Anvil::BaseDevice* in_device_ptr, + Anvil::ImageType in_type_vk, + Anvil::Format in_format, + Anvil::ImageTiling in_tiling, + Anvil::SharingMode in_sharing_mode, + Anvil::ImageUsageFlags in_usage, + uint32_t in_base_mipmap_width, + uint32_t in_base_mipmap_height, + uint32_t in_base_mipmap_depth, + uint32_t in_n_layers, + Anvil::SampleCountFlagBits in_sample_count, + bool in_use_full_mipmap_chain, + ImageCreateFlags in_create_flags, + Anvil::QueueFamilyFlags in_queue_families, + Anvil::ImageLayout in_post_create_image_layout, + const Anvil::ImageLayout& in_post_alloc_image_layout, + const std::vector* in_opt_mipmaps_ptr, + const Anvil::MTSafety& in_mt_safety, + Anvil::ExternalMemoryHandleTypeFlags in_exportable_external_memory_handle_types, + const Anvil::MemoryFeatureFlags& in_memory_features) + :m_create_flags (in_create_flags), + m_depth (in_base_mipmap_depth), + m_device_ptr (in_device_ptr), + m_exportable_external_memory_handle_types(in_exportable_external_memory_handle_types), + m_format (in_format), + m_height (in_base_mipmap_height), + m_internal_type (in_internal_type), + m_memory_features (in_memory_features), + m_mipmaps_to_upload ((in_opt_mipmaps_ptr != nullptr) ? *in_opt_mipmaps_ptr : std::vector() ), + m_mt_safety (in_mt_safety), + m_n_layers (in_n_layers), + m_n_swapchain_image (UINT32_MAX), + m_post_alloc_layout (in_post_alloc_image_layout), + m_post_create_layout (in_post_create_image_layout), + m_queue_families (in_queue_families), + m_sample_count (in_sample_count), + m_sharing_mode (in_sharing_mode), + m_swapchain_ptr (nullptr), + m_tiling (in_tiling), + m_type_vk (in_type_vk), + m_usage_flags (in_usage), + m_use_full_mipmap_chain (in_use_full_mipmap_chain), + m_width (in_base_mipmap_width) +{ + #if defined(_DEBUG) + { + if ((m_create_flags & Anvil::ImageCreateFlagBits::BLOCK_TEXEL_VIEW_COMPATIBLE_BIT) != 0) + { + anvil_assert((m_create_flags & Anvil::ImageCreateFlagBits::MUTABLE_FORMAT_BIT) != 0); + anvil_assert(m_use_full_mipmap_chain == false); + anvil_assert(m_n_layers == 1); + } + } + #endif +} + +/* Please see header for specification */ +void Anvil::ImageCreateInfo::get_image_view_formats(uint32_t* out_n_image_view_formats_ptr, + const Anvil::Format** out_image_view_formats_ptr_ptr) const +{ + const uint32_t n_image_view_formats = static_cast(m_image_view_formats.size() ); + + *out_n_image_view_formats_ptr = n_image_view_formats; + *out_image_view_formats_ptr_ptr = (n_image_view_formats != 0) ? &m_image_view_formats.at(0) : nullptr; +} + +/* Please see header for specification */ +void Anvil::ImageCreateInfo::set_image_view_formats(const uint32_t& in_n_image_view_formats, + const Anvil::Format* in_image_view_formats_ptr) +{ + anvil_assert(in_n_image_view_formats != 0); + + m_image_view_formats.resize(in_n_image_view_formats); + + memcpy(&m_image_view_formats.at(0), + in_image_view_formats_ptr, + sizeof(Anvil::Format) * in_n_image_view_formats); +} diff --git a/src/misc/image_view_create_info.cpp b/src/misc/image_view_create_info.cpp new file mode 100644 index 00000000..0ad36131 --- /dev/null +++ b/src/misc/image_view_create_info.cpp @@ -0,0 +1,324 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/image_view_create_info.h" + +Anvil::ImageViewCreateInfoUniquePtr Anvil::ImageViewCreateInfo::create_1D(const Anvil::BaseDevice* in_device_ptr, + Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha) +{ + Anvil::ImageViewCreateInfoUniquePtr result_ptr (nullptr, + std::default_delete() ); + const Anvil::ComponentSwizzle swizzle_rgba[] = + { + in_swizzle_red, + in_swizzle_green, + in_swizzle_blue, + in_swizzle_alpha + }; + + result_ptr.reset( + new Anvil::ImageViewCreateInfo(in_aspect_mask, + in_device_ptr, + in_format, + in_n_base_layer, + in_n_base_mipmap_level, + 1, /* in_n_layers */ + in_n_mipmaps, + in_image_ptr, + swizzle_rgba, + Anvil::ImageViewType::_1D, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::ImageViewCreateInfoUniquePtr Anvil::ImageViewCreateInfo::create_1D_array(const Anvil::BaseDevice* in_device_ptr, + Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_layers, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha) +{ + Anvil::ImageViewCreateInfoUniquePtr result_ptr (nullptr, + std::default_delete() ); + const Anvil::ComponentSwizzle swizzle_rgba[] = + { + in_swizzle_red, + in_swizzle_green, + in_swizzle_blue, + in_swizzle_alpha + }; + + result_ptr.reset( + new Anvil::ImageViewCreateInfo(in_aspect_mask, + in_device_ptr, + in_format, + in_n_base_layer, + in_n_base_mipmap_level, + in_n_layers, + in_n_mipmaps, + in_image_ptr, + swizzle_rgba, + Anvil::ImageViewType::_1D_ARRAY, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::ImageViewCreateInfoUniquePtr Anvil::ImageViewCreateInfo::create_2D(const Anvil::BaseDevice* in_device_ptr, + Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha) +{ + Anvil::ImageViewCreateInfoUniquePtr result_ptr (nullptr, + std::default_delete() ); + const Anvil::ComponentSwizzle swizzle_rgba[] = + { + in_swizzle_red, + in_swizzle_green, + in_swizzle_blue, + in_swizzle_alpha + }; + + result_ptr.reset( + new Anvil::ImageViewCreateInfo(in_aspect_mask, + in_device_ptr, + in_format, + in_n_base_layer, + in_n_base_mipmap_level, + 1, /* in_n_layers */ + in_n_mipmaps, + in_image_ptr, + swizzle_rgba, + Anvil::ImageViewType::_2D, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::ImageViewCreateInfoUniquePtr Anvil::ImageViewCreateInfo::create_2D_array(const Anvil::BaseDevice* in_device_ptr, + Anvil::Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_layers, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha) +{ + Anvil::ImageViewCreateInfoUniquePtr result_ptr (nullptr, + std::default_delete() ); + const Anvil::ComponentSwizzle swizzle_rgba[] = + { + in_swizzle_red, + in_swizzle_green, + in_swizzle_blue, + in_swizzle_alpha + }; + + result_ptr.reset( + new Anvil::ImageViewCreateInfo(in_aspect_mask, + in_device_ptr, + in_format, + in_n_base_layer, + in_n_base_mipmap_level, + in_n_layers, + in_n_mipmaps, + in_image_ptr, + swizzle_rgba, + Anvil::ImageViewType::_2D_ARRAY, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::ImageViewCreateInfoUniquePtr Anvil::ImageViewCreateInfo::create_3D(const Anvil::BaseDevice* in_device_ptr, + Image* in_image_ptr, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha) +{ + Anvil::ImageViewCreateInfoUniquePtr result_ptr (nullptr, + std::default_delete() ); + const Anvil::ComponentSwizzle swizzle_rgba[] = + { + in_swizzle_red, + in_swizzle_green, + in_swizzle_blue, + in_swizzle_alpha + }; + + result_ptr.reset( + new Anvil::ImageViewCreateInfo(in_aspect_mask, + in_device_ptr, + in_format, + 0, /* in_base_array_layer */ + in_n_base_mipmap_level, + 1, /* in_n_layers */ + in_n_mipmaps, + in_image_ptr, + swizzle_rgba, + Anvil::ImageViewType::_3D, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::ImageViewCreateInfoUniquePtr Anvil::ImageViewCreateInfo::create_cube_map(const Anvil::BaseDevice* in_device_ptr, + Anvil::Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha) +{ + Anvil::ImageViewCreateInfoUniquePtr result_ptr (nullptr, + std::default_delete() ); + const Anvil::ComponentSwizzle swizzle_rgba[] = + { + in_swizzle_red, + in_swizzle_green, + in_swizzle_blue, + in_swizzle_alpha + }; + + result_ptr.reset( + new Anvil::ImageViewCreateInfo(in_aspect_mask, + in_device_ptr, + in_format, + in_n_base_layer, + in_n_base_mipmap_level, + 6, /* in_n_layers */ + in_n_mipmaps, + in_image_ptr, + swizzle_rgba, + Anvil::ImageViewType::_CUBE, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::ImageViewCreateInfoUniquePtr Anvil::ImageViewCreateInfo::create_cube_map_array(const Anvil::BaseDevice* in_device_ptr, + Anvil::Image* in_image_ptr, + uint32_t in_n_base_layer, + uint32_t in_n_cube_maps, + uint32_t in_n_base_mipmap_level, + uint32_t in_n_mipmaps, + Anvil::ImageAspectFlags in_aspect_mask, + Anvil::Format in_format, + Anvil::ComponentSwizzle in_swizzle_red, + Anvil::ComponentSwizzle in_swizzle_green, + Anvil::ComponentSwizzle in_swizzle_blue, + Anvil::ComponentSwizzle in_swizzle_alpha) +{ + Anvil::ImageViewCreateInfoUniquePtr result_ptr (nullptr, + std::default_delete() ); + const Anvil::ComponentSwizzle swizzle_rgba[] = + { + in_swizzle_red, + in_swizzle_green, + in_swizzle_blue, + in_swizzle_alpha + }; + + result_ptr.reset( + new Anvil::ImageViewCreateInfo(in_aspect_mask, + in_device_ptr, + in_format, + in_n_base_layer, + in_n_base_mipmap_level, + in_n_cube_maps * 6, + in_n_mipmaps, + in_image_ptr, + swizzle_rgba, + Anvil::ImageViewType::_CUBE_ARRAY, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::ImageViewCreateInfo::ImageViewCreateInfo(const Anvil::ImageAspectFlags& in_aspect_mask, + const Anvil::BaseDevice* in_device_ptr, + const Anvil::Format in_format, + const uint32_t in_n_base_layer, + const uint32_t in_n_base_mipmap_level, + const uint32_t in_n_layers, + const uint32_t in_n_mipmaps, + Anvil::Image* in_parent_image_ptr, + const Anvil::ComponentSwizzle* in_swizzle_array_ptr, + const Anvil::ImageViewType in_type, + const Anvil::MTSafety& in_mt_safety) + :m_aspect_mask (in_aspect_mask), + m_device_ptr (in_device_ptr), + m_format (in_format), + m_mt_safety (in_mt_safety), + m_n_base_layer (in_n_base_layer), + m_n_base_mipmap_level (in_n_base_mipmap_level), + m_n_layers (in_n_layers), + m_n_mipmaps (in_n_mipmaps), + m_parent_image_ptr (in_parent_image_ptr), + m_sampler_ycbcr_conversion_ptr(nullptr), + m_swizzle_array ({in_swizzle_array_ptr[0], in_swizzle_array_ptr[1], in_swizzle_array_ptr[2], in_swizzle_array_ptr[3]}), + m_type (in_type) +{ + anvil_assert(in_parent_image_ptr != nullptr); +} diff --git a/src/misc/instance_create_info.cpp b/src/misc/instance_create_info.cpp new file mode 100644 index 00000000..5217a423 --- /dev/null +++ b/src/misc/instance_create_info.cpp @@ -0,0 +1,68 @@ +// +// Copyright (c) 2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +#include "misc/instance_create_info.h" + +Anvil::InstanceCreateInfo::InstanceCreateInfo(const std::string& in_app_name, + const std::string& in_engine_name, + std::vector &&in_layers, + std::vector &&in_layer_settings, + Anvil::DebugCallbackFunction in_opt_validation_callback_proc, + bool in_mt_safe, + const std::vector& in_opt_disallowed_instance_level_extensions) + :m_api_version (Anvil::APIVersion::UNKNOWN), + m_app_name (in_app_name), + m_app_version (0), + m_disallowed_instance_level_extensions(in_opt_disallowed_instance_level_extensions), + m_engine_name (in_engine_name), + m_engine_version (0), + m_layers (std::move(in_layers)), + m_layer_settings (std::move(in_layer_settings)), + m_is_mt_safe (in_mt_safe), + m_n_memory_type_to_use_for_all_alocs (UINT32_MAX), + m_validation_callback (in_opt_validation_callback_proc) +{ + /* Stub */ +} + +Anvil::InstanceCreateInfoUniquePtr Anvil::InstanceCreateInfo::create(const std::string& in_app_name, + const std::string& in_engine_name, + std::vector &&in_layers, + std::vector &&in_layer_settings, + Anvil::DebugCallbackFunction in_opt_validation_callback_proc, + bool in_mt_safe, + const std::vector& in_opt_disallowed_instance_level_extensions) +{ + Anvil::InstanceCreateInfoUniquePtr result_ptr; + + result_ptr.reset( + new Anvil::InstanceCreateInfo(in_app_name, + in_engine_name, + std::move(in_layers), + std::move(in_layer_settings), + in_opt_validation_callback_proc, + in_mt_safe, + in_opt_disallowed_instance_level_extensions) + ); + anvil_assert(result_ptr != nullptr); + + return result_ptr; +} diff --git a/src/misc/io.cpp b/src/misc/io.cpp index 5dd4a251..7f6b4b9f 100644 --- a/src/misc/io.cpp +++ b/src/misc/io.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -30,6 +30,7 @@ #include #include #include +#include #ifdef _WIN32 #include @@ -165,7 +166,7 @@ bool Anvil::IO::enumerate_files_in_directory(const std::string& in_path, { std::wstring file_name_wide (find_data.cFileName); std::wstring file_name_with_path_wide(current_path_wide + file_name_wide); - std::string file_name (file_name_with_path_wide.begin(), file_name_with_path_wide.end() ); + std::string file_name = std::wstring_convert>().to_bytes(file_name_with_path_wide); out_result_ptr->push_back(file_name); } @@ -415,10 +416,7 @@ bool Anvil::IO::read_file(std::string in_filename, if (!result_bool) { - if (result != nullptr) - { - delete [] result; - } + delete [] result; } return result_bool; @@ -543,10 +541,7 @@ bool Anvil::IO::read_file(std::string in_filename, if (!result_bool) { - if (result != nullptr) - { - delete [] result; - } + delete [] result; } return result_bool; diff --git a/src/misc/library.cpp b/src/misc/library.cpp new file mode 100644 index 00000000..5bc50d39 --- /dev/null +++ b/src/misc/library.cpp @@ -0,0 +1,121 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/debug.h" +#include "misc/library.h" + +#if !defined(_WIN32) + #include +#endif + +Anvil::Library::Library(const char* in_dll_name) + :m_dll_name(in_dll_name) +{ + anvil_assert(in_dll_name != nullptr); +} + +Anvil::Library::~Library() +{ + if (m_handle != nullptr) + { + #if defined(_WIN32) + { + ::FreeLibrary(reinterpret_cast(m_handle) ); + } + #else + { + dlclose(m_handle); + } + #endif + } +} + +Anvil::LibraryUniquePtr Anvil::Library::create(const char* in_dll_name) +{ + Anvil::LibraryUniquePtr result_ptr; + + result_ptr.reset( + new Anvil::Library(in_dll_name) + ); + + if (result_ptr != nullptr) + { + if (!result_ptr->init() ) + { + result_ptr.reset(); + } + } + + return result_ptr; +} + +void* Anvil::Library::get_proc_address(const char* in_func_name) const +{ + void* result_ptr = nullptr; + + #if defined(_WIN32) + { + result_ptr = ::GetProcAddress(reinterpret_cast(m_handle), + in_func_name); + } + #else + { + result_ptr = dlsym(m_handle, + in_func_name); + } + #endif + + return result_ptr; +} + +bool Anvil::Library::init() +{ + bool result = false; + + #if defined(_WIN32) + { + m_handle = reinterpret_cast(::LoadLibrary(m_dll_name.c_str() )); + + if (m_handle == INVALID_HANDLE_VALUE) + { + anvil_assert(m_handle != INVALID_HANDLE_VALUE); + + goto end; + } + } + #else + { + m_handle = reinterpret_cast(dlopen(m_dll_name.c_str(), + RTLD_LAZY) ); + + if (m_handle == nullptr) + { + anvil_assert(m_handle != nullptr); + + goto end; + } + } + #endif + + result = true; +end: + return result; +} \ No newline at end of file diff --git a/src/misc/memalloc_backends/backend_oneshot.cpp b/src/misc/memalloc_backends/backend_oneshot.cpp index 5a9c3eed..b28427df 100644 --- a/src/misc/memalloc_backends/backend_oneshot.cpp +++ b/src/misc/memalloc_backends/backend_oneshot.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -23,15 +23,19 @@ #include "misc/memalloc_backends/backend_oneshot.h" #include "misc/debug.h" #include "misc/formats.h" +#include "misc/image_create_info.h" #include "misc/memory_allocator.h" +#include "misc/memory_block_create_info.h" #include "wrappers/buffer.h" #include "wrappers/device.h" #include "wrappers/image.h" #include "wrappers/memory_block.h" #include +#include +#include /** Please see header for specification */ -Anvil::MemoryAllocatorBackends::OneShot::OneShot(std::weak_ptr in_device_ptr) +Anvil::MemoryAllocatorBackends::OneShot::OneShot(const Anvil::BaseDevice* in_device_ptr) :m_device_ptr(in_device_ptr), m_is_baked (false) { @@ -44,6 +48,40 @@ Anvil::MemoryAllocatorBackends::OneShot::~OneShot() /* Stub */ } + +namespace +{ + typedef struct MemoryUniqueInfo + { + uint32_t device_mask; + float memory_priority; + + MemoryUniqueInfo(const uint32_t& in_device_mask, + const float& in_memory_priority) + :device_mask (in_device_mask), + memory_priority(in_memory_priority) + { + /* Stub */ + } + + bool operator==(const MemoryUniqueInfo& in_info) const + { + if ((in_info.device_mask == device_mask) && (std::fabs(in_info.memory_priority - memory_priority) < 1e-4f)) + { + return true; + } + return false; + } + + bool operator<(const MemoryUniqueInfo& in_info) const + { + return (device_mask < in_info.device_mask) ? true + : ((device_mask == in_info.device_mask) && (memory_priority < in_info.memory_priority)); + } + + } MemoryUniqueInfo; +} + /** Tries to create a memory object of size large enough to capacitate all added objects, * given their alignment, size, and other requirements. * @@ -59,90 +97,313 @@ Anvil::MemoryAllocatorBackends::OneShot::~OneShot() **/ bool Anvil::MemoryAllocatorBackends::OneShot::bake(Anvil::MemoryAllocator::Items& in_items) { - std::shared_ptr device_locked_ptr (m_device_ptr); - const auto& memory_props (device_locked_ptr->get_physical_device_memory_properties() ); - const uint32_t n_memory_types (static_cast(memory_props.types.size() )); - std::vector > > per_mem_type_items_vector (n_memory_types); - bool result (true); + typedef struct UniqueAllocationInfo + { + Anvil::MemoryAllocator::Item* item_ptr; + uint32_t n_memory_type; + + UniqueAllocationInfo(Anvil::MemoryAllocator::Item* in_item_ptr, + const uint32_t& in_n_memory_type) + :item_ptr (in_item_ptr), + n_memory_type(in_n_memory_type) + { + /* Stub */ + } + } UniqueAllocationInfo; + + std::map > > device_mask_to_mem_type_to_item_vec; + const auto& memory_props (m_device_ptr->get_physical_device_memory_properties() ); + const uint32_t n_memory_types (static_cast(memory_props.types.size()) ); + bool result (true); + std::vector unique_allocs; /* Iterate over all block items and determine what memory types we can use. * * In certain cases, we may need to suballocate from more than one memory block, * due to the fact not all memory heaps may support features requested at * creation time. + * + * Dedicated allocations need to get their own memory blocks, too. + * + * Allocations which can be exported via external handles are tricky. Drivers do NOT need to request such allocations to be + * dedicated. However, each exportable memory allocation comes with a set of parameters (NT handle details, as an example) + * which make merging memory blocks tricky. Hence, for exportable allocs, just put each such alloc in its own memory block. + * + * Since we can only specify one device mask per memory allocation, allocation need to be further subdivided by device masks + * they have been associated with. + * */ for (auto item_iterator = in_items.begin(); item_iterator != in_items.end(); ++item_iterator) { /* Assign the item to supported memory types */ + const auto& required_memory_features = ((*item_iterator)->alloc_memory_required_features); + const auto& supported_memory_types = (*item_iterator)->alloc_memory_supported_memory_types; + for (uint32_t n_memory_type = 0; (1u << n_memory_type) <= (*item_iterator)->alloc_memory_supported_memory_types; ++n_memory_type) { - if (!((*item_iterator)->alloc_memory_supported_memory_types & (1 << n_memory_type)) ) + if (!(supported_memory_types & (1 << n_memory_type)) ) + { + continue; + } + + if ((memory_props.types.at(n_memory_type).features & static_cast(required_memory_features.get_vk() )) != required_memory_features) { continue; } - per_mem_type_items_vector.at(n_memory_type).push_back((*item_iterator) ); + if ((*item_iterator)->alloc_mgpu_peer_memory_reqs.size() > 0) + { + /* Make sure current memory type supports all required peer memory requirements specified by the user */ + bool can_continue = true; + auto mgpu_device_ptr = dynamic_cast(m_device_ptr); + + anvil_assert(m_device_ptr->get_type() == Anvil::DeviceType::MULTI_GPU); + anvil_assert(mgpu_device_ptr != nullptr); + + for (const auto& current_req : (*item_iterator)->alloc_mgpu_peer_memory_reqs) + { + Anvil::PeerMemoryFeatureFlags current_memory_type_peer_memory_features; + const auto& local_device_index = current_req.first.first; + const auto local_device_ptr = mgpu_device_ptr->get_physical_device(local_device_index); + const auto& remote_device_index = current_req.first.second; + const auto remote_device_ptr = mgpu_device_ptr->get_physical_device(remote_device_index); + const auto& required_peer_memory_features = current_req.second; + + if (!mgpu_device_ptr->get_peer_memory_features(local_device_ptr, + remote_device_ptr, + memory_props.types.at(n_memory_type).heap_ptr->index, + ¤t_memory_type_peer_memory_features) ) + { + anvil_assert_fail(); + + can_continue = false; + break; + } + + if ((current_memory_type_peer_memory_features & required_peer_memory_features) != required_peer_memory_features) + { + can_continue = false; + + break; + } + } + + if (!can_continue) + { + result = false; + + goto end; + } + } + + if ((*item_iterator)->alloc_is_dedicated_memory || + (*item_iterator)->alloc_exportable_external_handle_types != 0) + { + unique_allocs.push_back(UniqueAllocationInfo(item_iterator->get(), + n_memory_type)); + } + else + { + const MemoryUniqueInfo unique_info((*item_iterator)->alloc_device_mask, + (*item_iterator)->memory_priority); + + if (device_mask_to_mem_type_to_item_vec.find(unique_info) == device_mask_to_mem_type_to_item_vec.end() ) + { + device_mask_to_mem_type_to_item_vec[unique_info].resize(n_memory_types); + } + + device_mask_to_mem_type_to_item_vec.at(unique_info).at(n_memory_type).push_back((item_iterator->get() ) ); + } + break; } } - /* For each memory type, for each there's at least one item, bake a memory block */ + /* For each dedicated allocation, create one and associate it with the parent object. */ + for (const auto& current_unique_alloc : unique_allocs) { - std::map, VkDeviceSize> alloc_offset_map; - uint32_t current_memory_type_index(0); + Anvil::MemoryBlockUniquePtr new_memory_block_ptr_derived(nullptr, + std::default_delete() ); + Anvil::MemoryBlockUniquePtr new_memory_block_ptr_regular(nullptr, + std::default_delete() ); - for (auto mem_type_to_item_vector_iterator = per_mem_type_items_vector.begin(); - mem_type_to_item_vector_iterator != per_mem_type_items_vector.end(); - ++mem_type_to_item_vector_iterator, ++current_memory_type_index) + /* Bake the block and stash it */ { - auto& current_item_vector = *mem_type_to_item_vector_iterator; + auto create_info_ptr = Anvil::MemoryBlockCreateInfo::create_regular(m_device_ptr, + 1u << current_unique_alloc.n_memory_type, + current_unique_alloc.item_ptr->alloc_size, + (memory_props.types[current_unique_alloc.n_memory_type].features) ); - if (current_item_vector.size() > 0) + create_info_ptr->set_memory_priority(current_unique_alloc.item_ptr->memory_priority); + create_info_ptr->set_device_mask (current_unique_alloc.item_ptr->alloc_device_mask); + create_info_ptr->set_mt_safety (Anvil::Utils::convert_boolean_to_mt_safety_enum(m_device_ptr->is_mt_safe()) ); + + if (current_unique_alloc.item_ptr->alloc_is_dedicated_memory) { - std::shared_ptr new_memory_block_ptr; - VkDeviceSize n_bytes_required = 0; + create_info_ptr->use_dedicated_allocation(current_unique_alloc.item_ptr->buffer_ptr, + current_unique_alloc.item_ptr->image_ptr); + } - /* Go through the items, calculate offsets and the total amount of memory we're going - * to need to alloc off the heap */ - for (auto& current_item_ptr : current_item_vector) - { - n_bytes_required = Anvil::Utils::round_up(n_bytes_required, - current_item_ptr->alloc_memory_required_alignment); + if (current_unique_alloc.item_ptr->alloc_exportable_external_handle_types != 0) + { + create_info_ptr->set_exportable_external_memory_handle_types(current_unique_alloc.item_ptr->alloc_exportable_external_handle_types); + } - alloc_offset_map[current_item_ptr] = n_bytes_required; - n_bytes_required += current_item_ptr->alloc_size; + #if defined(_WIN32) + { + if (current_unique_alloc.item_ptr->alloc_external_nt_handle_info_ptr != nullptr) + { + create_info_ptr->set_exportable_nt_handle_info(current_unique_alloc.item_ptr->alloc_external_nt_handle_info_ptr->attributes_ptr, + current_unique_alloc.item_ptr->alloc_external_nt_handle_info_ptr->access, + current_unique_alloc.item_ptr->alloc_external_nt_handle_info_ptr->name); } + } + #endif - /* Bake the block and stash it */ - new_memory_block_ptr = Anvil::MemoryBlock::create(m_device_ptr, - 1u << current_memory_type_index, - n_bytes_required, - (memory_props.types[current_memory_type_index].features)); + new_memory_block_ptr_regular = Anvil::MemoryBlock::create(std::move(create_info_ptr) ); + } - if (new_memory_block_ptr == nullptr) - { - anvil_assert(new_memory_block_ptr != nullptr); + if (new_memory_block_ptr_regular == nullptr) + { + anvil_assert(new_memory_block_ptr_regular != nullptr); - result = false; - continue; - } + result = false; + continue; + } + + { + auto create_info_ptr = Anvil::MemoryBlockCreateInfo::create_derived(new_memory_block_ptr_regular.get(), + 0, /* in_start_offset */ + current_unique_alloc.item_ptr->alloc_size); + + new_memory_block_ptr_derived = Anvil::MemoryBlock::create(std::move(create_info_ptr) ); + + current_unique_alloc.item_ptr->alloc_memory_block_ptr = std::move(new_memory_block_ptr_regular); + current_unique_alloc.item_ptr->is_baked = (current_unique_alloc.item_ptr->alloc_memory_block_ptr != nullptr); + } + + dynamic_cast(current_unique_alloc.item_ptr->alloc_memory_block_ptr.get() )->set_parent_memory_allocator_backend_ptr(shared_from_this(), + reinterpret_cast(new_memory_block_ptr_derived->get_memory() )); - /* Go through the items again and assign the result memory block */ - for (auto& current_item_ptr : current_item_vector) + m_memory_blocks.push_back( + std::move(new_memory_block_ptr_derived) + ); + } + + /* For each memory type, for each there's at least one item, bake a memory block */ + { + for (auto device_mask_to_mem_type_to_item_vector_iterator = device_mask_to_mem_type_to_item_vec.begin(); + device_mask_to_mem_type_to_item_vector_iterator != device_mask_to_mem_type_to_item_vec.end(); + ++device_mask_to_mem_type_to_item_vector_iterator) + { + std::map alloc_offset_map; + auto& current_memory_info_to_item_vector_data = *device_mask_to_mem_type_to_item_vector_iterator; + const auto& current_memory_info = current_memory_info_to_item_vector_data.first; + uint32_t current_memory_type_index = 0; + + for (const auto& current_items : current_memory_info_to_item_vector_data.second) + { + if (current_items.size() > 0) { - current_item_ptr->alloc_memory_block_ptr = Anvil::MemoryBlock::create_derived(new_memory_block_ptr, - alloc_offset_map.at(current_item_ptr), - current_item_ptr->alloc_size); + Anvil::MemoryBlockUniquePtr new_memory_block_ptr(nullptr, + std::default_delete() ); + VkDeviceSize n_bytes_required (0); + + /* Go through the items, calculate offsets and the total amount of memory we're going + * to need to alloc off the heap */ + { + bool is_prev_item_linear = false; + Anvil::MemoryAllocator::Item* prev_item_ptr = nullptr; + + for (auto& current_item_ptr : current_items) + { + const bool is_current_item_buffer = (current_item_ptr->type == Anvil::MemoryAllocator::ITEM_TYPE_BUFFER || + current_item_ptr->type == Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_BUFFER_REGION); + const bool is_current_item_image = (current_item_ptr->type == Anvil::MemoryAllocator::ITEM_TYPE_IMAGE_WHOLE || + current_item_ptr->type == Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_IMAGE_MIPTAIL || + current_item_ptr->type == Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_IMAGE_SUBRESOURCE); + const bool is_current_item_linear = (is_current_item_buffer) || + (is_current_item_image && current_item_ptr->image_ptr->get_create_info_ptr()->get_tiling() == Anvil::ImageTiling::LINEAR); + + anvil_assert(current_item_ptr->alloc_exportable_external_handle_types == 0); + + #if defined(_WIN32) + anvil_assert(current_item_ptr->alloc_external_nt_handle_info_ptr == nullptr); + #endif + + n_bytes_required = Anvil::Utils::round_up(n_bytes_required, + current_item_ptr->alloc_memory_required_alignment); + + if (prev_item_ptr != nullptr) + { + /* Make sure to adhere to the buffer-image granularity requirement */ + if (is_prev_item_linear != is_current_item_linear) + { + n_bytes_required = Anvil::Utils::round_up(n_bytes_required, + m_device_ptr->get_physical_device_properties().core_vk1_0_properties_ptr->limits.buffer_image_granularity); + } + } + + alloc_offset_map[current_item_ptr] = n_bytes_required; + n_bytes_required += current_item_ptr->alloc_size; + + is_prev_item_linear = is_current_item_linear; + prev_item_ptr = current_item_ptr; + } + } + + /* Bake the block and stash it */ + { + auto create_info_ptr = Anvil::MemoryBlockCreateInfo::create_regular(m_device_ptr, + 1u << current_memory_type_index, + n_bytes_required, + (memory_props.types[current_memory_type_index].features) ); + + create_info_ptr->set_memory_priority(current_memory_info_to_item_vector_data.first.memory_priority); + create_info_ptr->set_device_mask (current_memory_info.device_mask); + create_info_ptr->set_mt_safety (Anvil::Utils::convert_boolean_to_mt_safety_enum(m_device_ptr->is_mt_safe()) ); + + new_memory_block_ptr = Anvil::MemoryBlock::create(std::move(create_info_ptr) ); + } + + if (new_memory_block_ptr == nullptr) + { + anvil_assert(new_memory_block_ptr != nullptr); - if (current_item_ptr->alloc_memory_block_ptr != nullptr) + result = false; + continue; + } + + /* Go through the items again and assign the result memory block */ + for (auto& current_item_ptr : current_items) { - current_item_ptr->is_baked = true; + { + auto create_info_ptr = Anvil::MemoryBlockCreateInfo::create_derived(new_memory_block_ptr.get(), + alloc_offset_map.at(current_item_ptr), + current_item_ptr->alloc_size); + + current_item_ptr->alloc_memory_block_ptr = Anvil::MemoryBlock::create(std::move(create_info_ptr) ); + } + + if (current_item_ptr->alloc_memory_block_ptr != nullptr) + { + current_item_ptr->is_baked = true; + } + + dynamic_cast(current_item_ptr->alloc_memory_block_ptr.get() )->set_parent_memory_allocator_backend_ptr(shared_from_this(), + reinterpret_cast(new_memory_block_ptr->get_memory() )); } + + m_memory_blocks.push_back( + std::move(new_memory_block_ptr) + ); } + + ++current_memory_type_index; } } } @@ -154,9 +415,26 @@ bool Anvil::MemoryAllocatorBackends::OneShot::bake(Anvil::MemoryAllocator::Items */ anvil_assert(result); +end: return result; } +VkResult Anvil::MemoryAllocatorBackends::OneShot::map(void* in_memory_object, + VkDeviceSize in_start_offset, + VkDeviceSize in_memory_block_start_offset, + VkDeviceSize in_size, + void** out_result_ptr) +{ + ANVIL_REDUNDANT_VARIABLE(in_memory_block_start_offset); + + return Anvil::Vulkan::vkMapMemory(m_device_ptr->get_device_vk(), + reinterpret_cast(in_memory_object), + in_start_offset, + in_size, + 0, /* flags */ + out_result_ptr); +} + /** Tells whether or not the backend is ready to handle allocation request. * * One-shot memory allocator backend will return true until the first bake() invocation, @@ -166,4 +444,25 @@ bool Anvil::MemoryAllocatorBackends::OneShot::bake(Anvil::MemoryAllocator::Items bool Anvil::MemoryAllocatorBackends::OneShot::supports_baking() const { return !m_is_baked; -} \ No newline at end of file +} + +bool Anvil::MemoryAllocatorBackends::OneShot::supports_device_masks() const +{ + return true; +} + +bool Anvil::MemoryAllocatorBackends::OneShot::supports_external_memory_handles(const Anvil::ExternalMemoryHandleTypeFlags&) const +{ + return true; +} + +bool Anvil::MemoryAllocatorBackends::OneShot::supports_protected_memory() const +{ + return true; +} + +void Anvil::MemoryAllocatorBackends::OneShot::unmap(void* in_memory_object) +{ + Anvil::Vulkan::vkUnmapMemory(m_device_ptr->get_device_vk(), + reinterpret_cast(in_memory_object) ); +} diff --git a/src/misc/memalloc_backends/backend_vma.cpp b/src/misc/memalloc_backends/backend_vma.cpp index 00405d6a..eac903dd 100644 --- a/src/misc/memalloc_backends/backend_vma.cpp +++ b/src/misc/memalloc_backends/backend_vma.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,9 +21,11 @@ // #include "misc/debug.h" +#include "misc/memory_block_create_info.h" #include "wrappers/device.h" #include "wrappers/memory_block.h" #include "wrappers/physical_device.h" +#include "wrappers/instance.h" /* Inject Vulkan Memory Allocator impl (ignore any warnings reported for the library) ==> */ #define VMA_IMPLEMENTATION @@ -51,7 +53,7 @@ /* Please see header for specification */ -Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::VMAAllocator(std::weak_ptr in_device_ptr) +Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::VMAAllocator(const Anvil::BaseDevice* in_device_ptr) :m_allocator (nullptr), m_device_ptr(in_device_ptr) { @@ -70,7 +72,7 @@ Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::~VMAAllocator() } /* Please see header for specifications */ -std::shared_ptr Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::create(std::weak_ptr in_device_ptr) +std::shared_ptr Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::create(const Anvil::BaseDevice* in_device_ptr) { std::shared_ptr result_ptr; @@ -95,17 +97,70 @@ std::shared_ptr Anvil::Memory **/ bool Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::init() { - VmaAllocatorCreateInfo create_info; - std::shared_ptr device_locked_ptr(m_device_ptr); - VkResult result (VK_ERROR_DEVICE_LOST); + VmaAllocatorCreateInfo create_info = {}; + const bool khr_dedicated_allocation_supported = m_device_ptr->get_extension_info()->khr_dedicated_allocation(); + VkResult result = VK_ERROR_DEVICE_LOST; - switch (device_locked_ptr->get_type() ) + /* Prepare VK func ptr array */ + m_vma_func_ptrs.reset( + new VmaVulkanFunctions() + ); + + if (m_vma_func_ptrs == nullptr) + { + anvil_assert(m_vma_func_ptrs != nullptr); + + goto end; + } + + m_vma_func_ptrs->vkAllocateMemory = Vulkan::vkAllocateMemory; + m_vma_func_ptrs->vkBindBufferMemory = Vulkan::vkBindBufferMemory; + m_vma_func_ptrs->vkBindImageMemory = Vulkan::vkBindImageMemory; + m_vma_func_ptrs->vkCreateBuffer = Vulkan::vkCreateBuffer; + m_vma_func_ptrs->vkCreateImage = Vulkan::vkCreateImage; + m_vma_func_ptrs->vkDestroyBuffer = Vulkan::vkDestroyBuffer; + m_vma_func_ptrs->vkDestroyImage = Vulkan::vkDestroyImage; + m_vma_func_ptrs->vkFreeMemory = Vulkan::vkFreeMemory; + m_vma_func_ptrs->vkGetBufferMemoryRequirements = Vulkan::vkGetBufferMemoryRequirements; + m_vma_func_ptrs->vkGetImageMemoryRequirements = Vulkan::vkGetImageMemoryRequirements; + m_vma_func_ptrs->vkGetPhysicalDeviceMemoryProperties = Vulkan::vkGetPhysicalDeviceMemoryProperties; + m_vma_func_ptrs->vkGetPhysicalDeviceProperties = Vulkan::vkGetPhysicalDeviceProperties; + m_vma_func_ptrs->vkMapMemory = Vulkan::vkMapMemory; + m_vma_func_ptrs->vkUnmapMemory = Vulkan::vkUnmapMemory; + + if (m_device_ptr->get_extension_info()->khr_get_memory_requirements2() ) + { + m_vma_func_ptrs->vkGetBufferMemoryRequirements2KHR = m_device_ptr->get_extension_khr_get_memory_requirements2_entrypoints().vkGetBufferMemoryRequirements2KHR; + m_vma_func_ptrs->vkGetImageMemoryRequirements2KHR = m_device_ptr->get_extension_khr_get_memory_requirements2_entrypoints().vkGetImageMemoryRequirements2KHR; + } + else + { + m_vma_func_ptrs->vkGetBufferMemoryRequirements2KHR = nullptr; + m_vma_func_ptrs->vkGetImageMemoryRequirements2KHR = nullptr; + } + + /* Prepare VMA create info struct */ + switch (m_device_ptr->get_type() ) { - case Anvil::DEVICE_TYPE_SINGLE_GPU: + case Anvil::DeviceType::MULTI_GPU: { - std::shared_ptr sgpu_device_locked_ptr(std::dynamic_pointer_cast(device_locked_ptr) ); + /* VMA library takes a physical device handle to extract info regarding supported + * memory types and the like. As VK_KHR_device_group provide explicit mGPU support, + * it is guaranteed all physical devices within a logical device offer exactly the + * same capabilities. This means we're safe to pass zeroth physical device to the + * library, and everything will still be OK. + */ + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr) ); + + create_info.physicalDevice = mgpu_device_ptr->get_physical_device(0)->get_physical_device(); + break; + } - create_info.physicalDevice = sgpu_device_locked_ptr->get_physical_device().lock()->get_physical_device(); + case Anvil::DeviceType::SINGLE_GPU: + { + const Anvil::SGPUDevice* sgpu_device_ptr(dynamic_cast(m_device_ptr) ); + + create_info.physicalDevice = sgpu_device_ptr->get_physical_device()->get_physical_device(); break; } @@ -115,20 +170,25 @@ bool Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::init() } } - create_info.device = device_locked_ptr->get_device_vk(); + create_info.flags = (khr_dedicated_allocation_supported) ? VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT : 0; + create_info.device = m_device_ptr->get_device_vk(); create_info.pAllocationCallbacks = nullptr; create_info.preferredLargeHeapBlockSize = 0; - create_info.preferredSmallHeapBlockSize = 0; + create_info.pVulkanFunctions = m_vma_func_ptrs.get(); + + //Ideally we need user to supply this. + create_info.instance = m_device_ptr->get_parent_instance()->get_instance_vk(); result = vmaCreateAllocator(&create_info, &m_allocator); anvil_assert_vk_call_succeeded(result); +end: return is_vk_call_successful(result); } /** Please see header for specification */ -Anvil::MemoryAllocatorBackends::VMA::VMA(std::weak_ptr in_device_ptr) +Anvil::MemoryAllocatorBackends::VMA::VMA(const Anvil::BaseDevice* in_device_ptr) :m_device_ptr(in_device_ptr) { /* Stub */ @@ -162,63 +222,96 @@ bool Anvil::MemoryAllocatorBackends::VMA::bake(Anvil::MemoryAllocator::Items& in */ for (auto& current_item_ptr : in_items) { + anvil_assert(current_item_ptr->memory_priority == FLT_MAX); /* VMA doesn't support memory_priority */ + + MemoryBlockUniquePtr new_memory_block_ptr(nullptr, + std::default_delete() ); + + VmaAllocation allocation = VK_NULL_HANDLE; + VmaAllocationCreateInfo allocation_create_info = {}; + VmaAllocationInfo allocation_info = {}; + bool is_dedicated_alloc = false; VkMemoryRequirements memory_requirements_vk; - VmaMemoryRequirements memory_requirements_vma; - std::shared_ptr new_memory_block_ptr; Anvil::OnMemoryBlockReleaseCallbackFunction release_callback_function; - VkMemoryHeapFlags required_mem_heap_flags = 0; - VkMemoryPropertyFlags required_mem_property_flags = 0; - uint32_t result_mem_type_index = UINT32_MAX; - VkMappedMemoryRange result_mem_range; + Anvil::MemoryHeapFlags required_mem_heap_flags; + Anvil::MemoryPropertyFlags required_mem_property_flags; Anvil::Utils::get_vk_property_flags_from_memory_feature_flags(current_item_ptr->alloc_memory_required_features, &required_mem_property_flags, &required_mem_heap_flags); /* NOTE: VMA does not take required memory heap flags at the moment. Adding this is on their radar. */ - anvil_assert(required_mem_heap_flags == 0); + anvil_assert(required_mem_heap_flags == Anvil::MemoryHeapFlagBits::NONE); memory_requirements_vk.alignment = current_item_ptr->alloc_memory_required_alignment; memory_requirements_vk.memoryTypeBits = current_item_ptr->alloc_memory_supported_memory_types; memory_requirements_vk.size = current_item_ptr->alloc_size; - memory_requirements_vma.neverAllocate = false; - memory_requirements_vma.ownMemory = false; - memory_requirements_vma.preferredFlags = 0; - memory_requirements_vma.requiredFlags = required_mem_property_flags; - memory_requirements_vma.usage = VMA_MEMORY_USAGE_UNKNOWN; + allocation_create_info.flags = (current_item_ptr->alloc_is_dedicated_memory) ? VmaAllocationCreateFlagBits::VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT + : 0; + allocation_create_info.requiredFlags = required_mem_property_flags.get_vk(); result_vk = vmaAllocateMemory(m_vma_allocator_ptr->get_handle(), &memory_requirements_vk, - &memory_requirements_vma, - &result_mem_range, - &result_mem_type_index); - + &allocation_create_info, + &allocation, + &allocation_info); if (!is_vk_call_successful(result_vk) ) { + anvil_assert_with_msg(is_vk_call_successful(result_vk), (std::string {"vmaAllocateMemory failed: "} +std::to_string(result_vk)).c_str()); result = false; continue; } - - anvil_assert(result_mem_range.pNext == nullptr); - anvil_assert(result_mem_range.sType == VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE); + else + { + is_dedicated_alloc = (allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + } + if (!current_item_ptr->debug_name.empty()) + vmaSetAllocationName(m_vma_allocator_ptr->get_handle(), allocation, current_item_ptr->debug_name.c_str()); /* Bake the block and stash it */ release_callback_function = std::bind( &VMAAllocator::on_vma_alloced_mem_block_gone_out_of_scope, m_vma_allocator_ptr, - std::placeholders::_1 + std::placeholders::_1, + allocation ); - new_memory_block_ptr = Anvil::MemoryBlock::create_derived_with_custom_delete_proc(m_device_ptr, - result_mem_range.memory, - memory_requirements_vk.memoryTypeBits, - current_item_ptr->alloc_memory_required_features, - result_mem_type_index, - memory_requirements_vk.size, - result_mem_range.offset, - release_callback_function); + { + auto create_info_ptr = Anvil::MemoryBlockCreateInfo::create_derived_with_custom_delete_proc(m_device_ptr, + allocation_info.deviceMemory, + memory_requirements_vk.memoryTypeBits, + current_item_ptr->alloc_memory_required_features, + allocation_info.memoryType, + memory_requirements_vk.size, + allocation_info.offset, + release_callback_function); + + if (is_dedicated_alloc) + { + if (current_item_ptr->type == Anvil::MemoryAllocator::ITEM_TYPE_BUFFER || + current_item_ptr->type == Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_BUFFER_REGION) + { + anvil_assert(current_item_ptr->buffer_ptr != nullptr); + + create_info_ptr->use_dedicated_allocation(current_item_ptr->buffer_ptr, + nullptr); /* in_opt_image_ptr */ + } + else + if (current_item_ptr->type == Anvil::MemoryAllocator::ITEM_TYPE_IMAGE_WHOLE || + current_item_ptr->type == Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_IMAGE_MIPTAIL || + current_item_ptr->type == Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_IMAGE_SUBRESOURCE) + { + anvil_assert(current_item_ptr->image_ptr != nullptr); + + create_info_ptr->use_dedicated_allocation(nullptr, /* in_opt_buffer_ptr */ + current_item_ptr->image_ptr); + } + } + + new_memory_block_ptr = Anvil::MemoryBlock::create(std::move(create_info_ptr) ); + } if (new_memory_block_ptr == nullptr) { @@ -228,7 +321,10 @@ bool Anvil::MemoryAllocatorBackends::VMA::bake(Anvil::MemoryAllocator::Items& in continue; } - current_item_ptr->alloc_memory_block_ptr = new_memory_block_ptr; + dynamic_cast(new_memory_block_ptr.get() )->set_parent_memory_allocator_backend_ptr(shared_from_this(), + allocation); + + current_item_ptr->alloc_memory_block_ptr = std::move(new_memory_block_ptr); current_item_ptr->alloc_size = memory_requirements_vk.size; current_item_ptr->is_baked = true; @@ -239,9 +335,9 @@ bool Anvil::MemoryAllocatorBackends::VMA::bake(Anvil::MemoryAllocator::Items& in } /** Please see header for specification */ -std::shared_ptr Anvil::MemoryAllocatorBackends::VMA::create(std::weak_ptr in_device_ptr) +std::unique_ptr Anvil::MemoryAllocatorBackends::VMA::create(const Anvil::BaseDevice* in_device_ptr) { - std::shared_ptr result_ptr; + std::unique_ptr result_ptr; result_ptr.reset( new VMA(in_device_ptr) @@ -269,6 +365,30 @@ bool Anvil::MemoryAllocatorBackends::VMA::init() return (m_vma_allocator_ptr != nullptr); } +VkResult Anvil::MemoryAllocatorBackends::VMA::map(void* in_memory_object, + VkDeviceSize in_start_offset, + VkDeviceSize in_memory_block_start_offset, + VkDeviceSize in_size, + void** out_result_ptr) +{ + VkResult result; + void* result_ptr = nullptr; + + ANVIL_REDUNDANT_ARGUMENT(in_size); + ANVIL_REDUNDANT_ARGUMENT(in_start_offset); + + anvil_assert(in_start_offset == 0); + + result = vmaMapMemory(m_vma_allocator_ptr->get_handle(), + static_cast(in_memory_object), + &result_ptr); + + result_ptr = reinterpret_cast(result_ptr) - in_memory_block_start_offset; + + *out_result_ptr = result_ptr; + return result; +} + /** Please see header for specification */ void Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::on_new_vma_mem_block_alloced() { @@ -284,21 +404,14 @@ void Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::on_new_vma_mem_block_all } /** Please see header for specification */ -void Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::on_vma_alloced_mem_block_gone_out_of_scope(Anvil::MemoryBlock* in_memory_block_ptr) +void Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::on_vma_alloced_mem_block_gone_out_of_scope(Anvil::MemoryBlock* in_memory_block_ptr, + VmaAllocation in_vma_allocation) { - VkMappedMemoryRange mem_range; - /* Only physically deallocate those memory blocks that are not derivatives of another memory blocks! */ - if (in_memory_block_ptr->get_parent_memory_block() == nullptr) + if (in_memory_block_ptr->get_create_info_ptr()->get_parent_memory_block() == nullptr) { - mem_range.memory = in_memory_block_ptr->get_memory (); - mem_range.offset = in_memory_block_ptr->get_start_offset(); - mem_range.pNext = nullptr; - mem_range.size = in_memory_block_ptr->get_size(); - mem_range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; - vmaFreeMemory(get_handle(), - &mem_range); + in_vma_allocation); /* Remove one cached pointer to the VMA wrapper class instance. This means that VMA instance * is going to be destroyed in case the vector's size reaches zero! @@ -315,4 +428,37 @@ void Anvil::MemoryAllocatorBackends::VMA::VMAAllocator::on_vma_alloced_mem_block bool Anvil::MemoryAllocatorBackends::VMA::supports_baking() const { return true; -} \ No newline at end of file +} + +bool Anvil::MemoryAllocatorBackends::VMA::supports_device_masks() const +{ + /* The version of VMA we currently use does not provide support for device groups. */ + return false; +} + +bool Anvil::MemoryAllocatorBackends::VMA::supports_external_memory_handles(const Anvil::ExternalMemoryHandleTypeFlags& in_external_memory_handle_types) const +{ + /* Vulkan Memory Allocator does NOT support external memory handles */ + ANVIL_REDUNDANT_VARIABLE_CONST(in_external_memory_handle_types); + + return false; +} + +bool Anvil::MemoryAllocatorBackends::VMA::supports_protected_memory() const +{ + /* Vulkan Memory Allocator does NOT support VK 1.1 features */ + return false; +} + +void Anvil::MemoryAllocatorBackends::VMA::unmap(void* in_memory_object) +{ + vmaUnmapMemory(m_vma_allocator_ptr->get_handle(), + static_cast(in_memory_object) ); +} + +std::optional Anvil::MemoryAllocatorBackends::VMA::get_underlying_allocation_size(void* in_memory_object) +{ + VmaAllocationInfo2 info2{}; + vmaGetAllocationInfo2(m_vma_allocator_ptr->get_handle(), static_cast(in_memory_object), &info2); + return info2.blockSize; +} diff --git a/src/misc/memory_allocator.cpp b/src/misc/memory_allocator.cpp index 243f1f64..05ed0f8e 100644 --- a/src/misc/memory_allocator.cpp +++ b/src/misc/memory_allocator.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,7 +20,11 @@ // THE SOFTWARE. // +#include "misc/buffer_create_info.h" +#include "misc/fence_create_info.h" #include "misc/formats.h" +#include "misc/image_create_info.h" +#include "misc/instance_create_info.h" #include "misc/memory_allocator.h" #include "misc/memalloc_backends/backend_oneshot.h" #include "misc/memalloc_backends/backend_vma.h" @@ -28,189 +32,276 @@ #include "wrappers/device.h" #include "wrappers/fence.h" #include "wrappers/image.h" +#include "wrappers/instance.h" #include "wrappers/memory_block.h" #include "wrappers/queue.h" -#include - +#include /* Please see header for specification */ -Anvil::MemoryAllocator::Item::Item(std::shared_ptr in_memory_allocator_ptr, - std::shared_ptr in_buffer_ptr, - VkDeviceSize in_alloc_size, - uint32_t in_alloc_memory_types, - VkDeviceSize in_alloc_alignment, - MemoryFeatureFlags in_alloc_required_memory_features, - uint32_t in_alloc_supported_memory_types) +Anvil::MemoryAllocator::Item::Item(Anvil::MemoryAllocator* in_memory_allocator_ptr, + Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_alloc_size, + uint32_t in_alloc_memory_types, + VkDeviceSize in_alloc_alignment, + MemoryFeatureFlags in_alloc_required_memory_features, + uint32_t in_alloc_supported_memory_types, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_alloc_external_nt_handle_info_ptr, +#endif + const bool& in_alloc_is_dedicated, + const uint32_t& in_device_mask, + const MGPUPeerMemoryRequirements& in_mgpu_peer_memory_reqs, + const MGPUBindSparseDeviceIndices& in_mgpu_bind_sparse_device_indices, + const float& in_memory_priority) { anvil_assert(in_alloc_supported_memory_types != 0); anvil_assert(in_memory_allocator_ptr != nullptr); - alloc_memory_final_type = UINT32_MAX; - alloc_memory_required_alignment = in_alloc_alignment; - alloc_memory_required_features = in_alloc_required_memory_features; - alloc_memory_supported_memory_types = in_alloc_supported_memory_types; - alloc_memory_types = in_alloc_memory_types; - alloc_size = in_alloc_size; - buffer_ptr = in_buffer_ptr; - is_baked = false; - memory_allocator_ptr = in_memory_allocator_ptr; - type = ITEM_TYPE_BUFFER; +#if defined(_WIN32) + alloc_external_nt_handle_info_ptr = in_alloc_external_nt_handle_info_ptr; +#endif + + alloc_device_mask = in_device_mask; + alloc_exportable_external_handle_types = in_opt_exportable_external_handle_types; + alloc_image_aspect = Anvil::ImageAspectFlagBits::NONE; + alloc_is_dedicated_memory = in_alloc_is_dedicated; + alloc_is_scratch_buffer_alloc = false; + alloc_memory_final_type = UINT32_MAX; + alloc_memory_required_alignment = in_alloc_alignment; + alloc_memory_required_features = in_alloc_required_memory_features; + alloc_memory_supported_memory_types = in_alloc_supported_memory_types; + alloc_memory_types = in_alloc_memory_types; + alloc_mgpu_bind_sparse_device_indices = in_mgpu_bind_sparse_device_indices; + alloc_mgpu_peer_memory_reqs = in_mgpu_peer_memory_reqs; + alloc_size = in_alloc_size; + buffer_ptr = in_buffer_ptr; + image_ptr = nullptr; + is_baked = false; + memory_allocator_ptr = in_memory_allocator_ptr; + memory_priority = in_memory_priority; + n_layer = UINT32_MAX; + n_plane = UINT32_MAX; + type = ITEM_TYPE_BUFFER; register_for_callbacks(); } -/* Please see header for specification */ -Anvil::MemoryAllocator::Item::Item(std::shared_ptr in_memory_allocator_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_layer, - VkDeviceSize in_alloc_size, - uint32_t in_alloc_memory_types, - VkDeviceSize in_miptail_offset, - VkDeviceSize in_alloc_alignment, - MemoryFeatureFlags in_alloc_required_memory_features, - uint32_t in_alloc_supported_memory_types) +Anvil::MemoryAllocator::Item::Item(Anvil::MemoryAllocator* in_memory_allocator_ptr, + Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_alloc_offset, + VkDeviceSize in_alloc_size, + uint32_t in_alloc_memory_types, + VkDeviceSize in_alloc_alignment, + MemoryFeatureFlags in_alloc_required_memory_features, + uint32_t in_alloc_supported_memory_types, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_alloc_external_nt_handle_info_ptr, +#endif + const bool& in_alloc_is_dedicated, + const uint32_t& in_device_mask, + const MGPUPeerMemoryRequirements& in_mgpu_peer_memory_reqs, + const MGPUBindSparseDeviceIndices& in_mgpu_bind_sparse_device_indices, + const float& in_memory_priority) { anvil_assert(in_alloc_supported_memory_types != 0); anvil_assert(in_memory_allocator_ptr != nullptr); - alloc_memory_final_type = UINT32_MAX; - alloc_memory_required_alignment = in_alloc_alignment; - alloc_memory_required_features = in_alloc_required_memory_features; - alloc_memory_supported_memory_types = in_alloc_supported_memory_types; - alloc_memory_types = in_alloc_memory_types; - alloc_size = in_alloc_size; - image_ptr = in_image_ptr; - is_baked = false; - memory_allocator_ptr = in_memory_allocator_ptr; - miptail_offset = in_miptail_offset; - n_layer = in_n_layer; - type = ITEM_TYPE_SPARSE_IMAGE_MIPTAIL; +#if defined(_WIN32) + alloc_external_nt_handle_info_ptr = in_alloc_external_nt_handle_info_ptr; +#endif + + alloc_device_mask = in_device_mask; + alloc_exportable_external_handle_types = in_opt_exportable_external_handle_types; + alloc_image_aspect = Anvil::ImageAspectFlagBits::NONE; + alloc_is_dedicated_memory = in_alloc_is_dedicated; + alloc_is_scratch_buffer_alloc = false; + alloc_memory_final_type = UINT32_MAX; + alloc_memory_required_alignment = in_alloc_alignment; + alloc_memory_required_features = in_alloc_required_memory_features; + alloc_memory_supported_memory_types = in_alloc_supported_memory_types; + alloc_memory_types = in_alloc_memory_types; + alloc_mgpu_bind_sparse_device_indices = in_mgpu_bind_sparse_device_indices; + alloc_mgpu_peer_memory_reqs = in_mgpu_peer_memory_reqs; + alloc_offset = in_alloc_offset; + alloc_size = in_alloc_size; + buffer_ptr = in_buffer_ptr; + image_ptr = nullptr; + is_baked = false; + memory_allocator_ptr = in_memory_allocator_ptr; + memory_priority = in_memory_priority; + n_layer = UINT32_MAX; + n_plane = UINT32_MAX; + type = ITEM_TYPE_SPARSE_BUFFER_REGION; register_for_callbacks(); } /* Please see header for specification */ -Anvil::MemoryAllocator::Item::Item(std::shared_ptr in_memory_allocator_ptr, - std::shared_ptr in_image_ptr, - const VkImageSubresource& in_subresource, - const VkOffset3D& in_offset, - const VkExtent3D& in_extent, - VkDeviceSize in_alloc_size, - uint32_t in_alloc_memory_types, - VkDeviceSize in_alloc_alignment, - MemoryFeatureFlags in_alloc_required_memory_features, - uint32_t in_alloc_supported_memory_types) +Anvil::MemoryAllocator::Item::Item(Anvil::MemoryAllocator* in_memory_allocator_ptr, + Anvil::Image* in_image_ptr, + uint32_t in_n_layer, + VkDeviceSize in_alloc_size, + uint32_t in_alloc_memory_types, + VkDeviceSize in_miptail_offset, + const Anvil::ImageAspectFlagBits& in_alloc_aspect, + VkDeviceSize in_alloc_alignment, + MemoryFeatureFlags in_alloc_required_memory_features, + uint32_t in_alloc_supported_memory_types, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_alloc_external_nt_handle_info_ptr, +#endif + const bool& in_alloc_is_dedicated, + const uint32_t& in_device_mask, + const MGPUPeerMemoryRequirements& in_mgpu_peer_memory_reqs, + const MGPUBindSparseDeviceIndices& in_mgpu_bind_sparse_device_indices, + const float& in_memory_priority) { anvil_assert(in_alloc_supported_memory_types != 0); anvil_assert(in_memory_allocator_ptr != nullptr); - alloc_memory_final_type = UINT32_MAX; - alloc_memory_types = in_alloc_memory_types; - alloc_memory_required_alignment = in_alloc_alignment; - alloc_memory_required_features = in_alloc_required_memory_features; - alloc_memory_supported_memory_types = in_alloc_supported_memory_types; - alloc_size = in_alloc_size; - extent = in_extent; - image_ptr = in_image_ptr; - is_baked = false; - memory_allocator_ptr = in_memory_allocator_ptr; - offset = in_offset; - subresource = in_subresource; - type = ITEM_TYPE_SPARSE_IMAGE_SUBRESOURCE; +#if defined(_WIN32) + alloc_external_nt_handle_info_ptr = in_alloc_external_nt_handle_info_ptr; +#endif + + alloc_device_mask = in_device_mask; + alloc_exportable_external_handle_types = in_opt_exportable_external_handle_types; + alloc_image_aspect = in_alloc_aspect; + alloc_is_dedicated_memory = in_alloc_is_dedicated; + alloc_is_scratch_buffer_alloc = false; + alloc_memory_final_type = UINT32_MAX; + alloc_memory_required_alignment = in_alloc_alignment; + alloc_memory_required_features = in_alloc_required_memory_features; + alloc_memory_supported_memory_types = in_alloc_supported_memory_types; + alloc_memory_types = in_alloc_memory_types; + alloc_mgpu_bind_sparse_device_indices = in_mgpu_bind_sparse_device_indices; + alloc_mgpu_peer_memory_reqs = in_mgpu_peer_memory_reqs; + alloc_offset = UINT64_MAX; + alloc_size = in_alloc_size; + buffer_ptr = nullptr; + image_ptr = in_image_ptr; + is_baked = false; + memory_allocator_ptr = in_memory_allocator_ptr; + memory_priority = in_memory_priority; + miptail_offset = in_miptail_offset; + n_layer = in_n_layer; + n_plane = (in_alloc_aspect == Anvil::ImageAspectFlagBits::PLANE_1_BIT) ? 1 + : (in_alloc_aspect == Anvil::ImageAspectFlagBits::PLANE_2_BIT) ? 2 + : 0; + type = ITEM_TYPE_SPARSE_IMAGE_MIPTAIL; register_for_callbacks(); } /* Please see header for specification */ -Anvil::MemoryAllocator::Item::Item(std::shared_ptr in_memory_allocator_ptr, - std::shared_ptr in_image_ptr, - VkDeviceSize in_alloc_size, - uint32_t in_alloc_memory_types, - VkDeviceSize in_alloc_alignment, - MemoryFeatureFlags in_alloc_required_memory_features, - uint32_t in_alloc_supported_memory_types) +Anvil::MemoryAllocator::Item::Item(Anvil::MemoryAllocator* in_memory_allocator_ptr, + Anvil::Image* in_image_ptr, + const Anvil::ImageSubresource& in_subresource, + const VkOffset3D& in_offset, + const VkExtent3D& in_extent, + VkDeviceSize in_alloc_size, + uint32_t in_alloc_memory_types, + VkDeviceSize in_alloc_alignment, + MemoryFeatureFlags in_alloc_required_memory_features, + uint32_t in_alloc_supported_memory_types, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_alloc_external_nt_handle_info_ptr, +#endif + const bool& in_alloc_is_dedicated, + const uint32_t& in_device_mask, + const MGPUPeerMemoryRequirements& in_mgpu_peer_memory_reqs, + const MGPUBindSparseDeviceIndices& in_mgpu_bind_sparse_device_indices, + const float& in_memory_priority) { anvil_assert(in_alloc_supported_memory_types != 0); anvil_assert(in_memory_allocator_ptr != nullptr); - alloc_memory_final_type = UINT32_MAX; - alloc_memory_required_alignment = in_alloc_alignment; - alloc_memory_required_features = in_alloc_required_memory_features; - alloc_memory_supported_memory_types = in_alloc_supported_memory_types; - alloc_memory_types = in_alloc_memory_types; - alloc_size = in_alloc_size; - image_ptr = in_image_ptr; - is_baked = false; - memory_allocator_ptr = in_memory_allocator_ptr; - type = ITEM_TYPE_IMAGE_WHOLE; +#if defined(_WIN32) + alloc_external_nt_handle_info_ptr = in_alloc_external_nt_handle_info_ptr; +#endif + + alloc_device_mask = in_device_mask; + alloc_exportable_external_handle_types = in_opt_exportable_external_handle_types; + alloc_image_aspect = Anvil::ImageAspectFlagBits::NONE; + alloc_is_dedicated_memory = in_alloc_is_dedicated; + alloc_is_scratch_buffer_alloc = false; + alloc_memory_final_type = UINT32_MAX; + alloc_memory_types = in_alloc_memory_types; + alloc_memory_required_alignment = in_alloc_alignment; + alloc_memory_required_features = in_alloc_required_memory_features; + alloc_memory_supported_memory_types = in_alloc_supported_memory_types; + alloc_mgpu_bind_sparse_device_indices = in_mgpu_bind_sparse_device_indices; + alloc_mgpu_peer_memory_reqs = in_mgpu_peer_memory_reqs; + alloc_offset = UINT64_MAX; + alloc_size = in_alloc_size; + buffer_ptr = nullptr; + extent = in_extent; + image_ptr = in_image_ptr; + is_baked = false; + memory_allocator_ptr = in_memory_allocator_ptr; + memory_priority = in_memory_priority; + n_layer = UINT32_MAX; + n_plane = (in_subresource.aspect_mask == Anvil::ImageAspectFlagBits::PLANE_1_BIT) ? 1 + : (in_subresource.aspect_mask == Anvil::ImageAspectFlagBits::PLANE_2_BIT) ? 2 + : 0; + offset = in_offset; + subresource = in_subresource; + type = ITEM_TYPE_SPARSE_IMAGE_SUBRESOURCE; register_for_callbacks(); } /* Please see header for specification */ -Anvil::MemoryAllocator::Item& Anvil::MemoryAllocator::Item::operator=(const Anvil::MemoryAllocator::Item& in) +Anvil::MemoryAllocator::Item::Item(Anvil::MemoryAllocator* in_memory_allocator_ptr, + Anvil::Image* in_image_ptr, + VkDeviceSize in_alloc_size, + uint32_t in_alloc_memory_types, + VkDeviceSize in_alloc_alignment, + MemoryFeatureFlags in_alloc_required_memory_features, + uint32_t in_alloc_supported_memory_types, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_alloc_external_nt_handle_info_ptr, +#endif + const bool& in_alloc_is_dedicated, + const uint32_t& in_device_mask, + const MGPUPeerMemoryRequirements& in_mgpu_peer_memory_reqs, + const MGPUBindSparseDeviceIndices& in_mgpu_bind_sparse_device_indices, + const float& in_memory_priority, + const uint32_t& in_n_plane) { - buffer_ptr = in.buffer_ptr; - buffer_ref_float_data_ptr = in.buffer_ref_float_data_ptr; - buffer_ref_float_vector_data_ptr = in.buffer_ref_float_vector_data_ptr; - buffer_ref_uchar8_data_ptr = in.buffer_ref_uchar8_data_ptr; - buffer_ref_uchar8_vector_data_ptr = in.buffer_ref_uchar8_vector_data_ptr; - buffer_ref_uint32_data_ptr = in.buffer_ref_uint32_data_ptr; - buffer_ref_uint32_vector_data_ptr = in.buffer_ref_uint32_vector_data_ptr; - image_ptr = in.image_ptr; - - memory_allocator_ptr = in.memory_allocator_ptr; - type = in.type; - - alloc_memory_block_ptr = in.alloc_memory_block_ptr; - alloc_memory_final_type = in.alloc_memory_final_type; - alloc_memory_required_alignment = in.alloc_memory_required_alignment; - alloc_memory_required_features = in.alloc_memory_required_features; - alloc_memory_supported_memory_types = in.alloc_memory_supported_memory_types; - alloc_memory_types = in.alloc_memory_types; - alloc_size = in.alloc_size; - - extent = in.extent; - is_baked = in.is_baked; - miptail_offset = in.miptail_offset; - n_layer = in.n_layer; - offset = in.offset; - subresource = in.subresource; - - register_for_callbacks(); - - return *this; -} + anvil_assert(in_alloc_supported_memory_types != 0); + anvil_assert(in_memory_allocator_ptr != nullptr); -/* Please see header for specification */ -Anvil::MemoryAllocator::Item::Item(const Anvil::MemoryAllocator::Item& in) -{ - buffer_ptr = in.buffer_ptr; - buffer_ref_float_data_ptr = in.buffer_ref_float_data_ptr; - buffer_ref_float_vector_data_ptr = in.buffer_ref_float_vector_data_ptr; - buffer_ref_uchar8_data_ptr = in.buffer_ref_uchar8_data_ptr; - buffer_ref_uchar8_vector_data_ptr = in.buffer_ref_uchar8_vector_data_ptr; - buffer_ref_uint32_data_ptr = in.buffer_ref_uint32_data_ptr; - buffer_ref_uint32_vector_data_ptr = in.buffer_ref_uint32_vector_data_ptr; - image_ptr = in.image_ptr; - - memory_allocator_ptr = in.memory_allocator_ptr; - type = in.type; - - alloc_memory_block_ptr = in.alloc_memory_block_ptr; - alloc_memory_final_type = in.alloc_memory_final_type; - alloc_memory_required_alignment = in.alloc_memory_required_alignment; - alloc_memory_required_features = in.alloc_memory_required_features; - alloc_memory_supported_memory_types = in.alloc_memory_supported_memory_types; - alloc_memory_types = in.alloc_memory_types; - alloc_size = in.alloc_size; - - extent = in.extent; - is_baked = in.is_baked; - miptail_offset = in.miptail_offset; - n_layer = in.n_layer; - offset = in.offset; - subresource = in.subresource; +#if defined(_WIN32) + alloc_external_nt_handle_info_ptr = in_alloc_external_nt_handle_info_ptr; +#endif + + alloc_device_mask = in_device_mask; + alloc_exportable_external_handle_types = in_opt_exportable_external_handle_types; + alloc_image_aspect = Anvil::ImageAspectFlagBits::NONE; + alloc_is_dedicated_memory = in_alloc_is_dedicated; + alloc_is_scratch_buffer_alloc = false; + alloc_memory_final_type = UINT32_MAX; + alloc_memory_required_alignment = in_alloc_alignment; + alloc_memory_required_features = in_alloc_required_memory_features; + alloc_memory_supported_memory_types = in_alloc_supported_memory_types; + alloc_memory_types = in_alloc_memory_types; + alloc_mgpu_bind_sparse_device_indices = in_mgpu_bind_sparse_device_indices; + alloc_mgpu_peer_memory_reqs = in_mgpu_peer_memory_reqs; + alloc_offset = UINT64_MAX; + alloc_size = in_alloc_size; + buffer_ptr = nullptr; + image_ptr = in_image_ptr; + is_baked = false; + memory_allocator_ptr = in_memory_allocator_ptr; + memory_priority = in_memory_priority; + n_layer = UINT32_MAX; + n_plane = in_n_plane; + type = ITEM_TYPE_IMAGE_WHOLE; register_for_callbacks(); } @@ -225,12 +316,12 @@ Anvil::MemoryAllocator::Item::~Item() void Anvil::MemoryAllocator::Item::register_for_callbacks() { auto on_implicit_bake_needed_callback_func = std::bind(&Anvil::MemoryAllocator::on_implicit_bake_needed, - memory_allocator_ptr.get() ); + memory_allocator_ptr); auto on_is_alloc_pending_for_buffer_query_callback_func = std::bind(&Anvil::MemoryAllocator::on_is_alloc_pending_for_buffer_query, - memory_allocator_ptr.get(), + memory_allocator_ptr, std::placeholders::_1); auto on_is_alloc_pending_for_image_query_callback_func = std::bind(&Anvil::MemoryAllocator::on_is_alloc_pending_for_image_query, - memory_allocator_ptr.get(), + memory_allocator_ptr, std::placeholders::_1); if (buffer_ptr != nullptr) @@ -238,11 +329,11 @@ void Anvil::MemoryAllocator::Item::register_for_callbacks() /* Sign up for "is alloc pending" callback in order to support sparsely bound/sparse buffers */ if (!buffer_ptr->is_callback_registered(BUFFER_CALLBACK_ID_IS_ALLOC_PENDING, on_is_alloc_pending_for_buffer_query_callback_func, - buffer_ptr.get() )) + buffer_ptr)) { buffer_ptr->register_for_callbacks(BUFFER_CALLBACK_ID_IS_ALLOC_PENDING, on_is_alloc_pending_for_buffer_query_callback_func, - buffer_ptr.get() ); + buffer_ptr); buffer_has_is_alloc_pending_callback_registered = true; } @@ -254,11 +345,11 @@ void Anvil::MemoryAllocator::Item::register_for_callbacks() /* Sign up for "memory needed" callback so that we can trigger an implicit bake operation */ if (!buffer_ptr->is_callback_registered(BUFFER_CALLBACK_ID_MEMORY_BLOCK_NEEDED, on_implicit_bake_needed_callback_func, - buffer_ptr.get() )) + buffer_ptr)) { buffer_ptr->register_for_callbacks(BUFFER_CALLBACK_ID_MEMORY_BLOCK_NEEDED, on_implicit_bake_needed_callback_func, - buffer_ptr.get() ); + buffer_ptr); buffer_has_memory_block_needed_callback_registered = true; } @@ -273,11 +364,11 @@ void Anvil::MemoryAllocator::Item::register_for_callbacks() /* Sign up for "is alloc pending" callback in order to support sparse images */ if (!image_ptr->is_callback_registered(IMAGE_CALLBACK_ID_IS_ALLOC_PENDING, on_is_alloc_pending_for_image_query_callback_func, - image_ptr.get() )) + image_ptr)) { image_ptr->register_for_callbacks(IMAGE_CALLBACK_ID_IS_ALLOC_PENDING, on_is_alloc_pending_for_image_query_callback_func, - image_ptr.get() ); + image_ptr); image_has_is_alloc_pending_callback_registered = true; } @@ -289,11 +380,11 @@ void Anvil::MemoryAllocator::Item::register_for_callbacks() /* Sign up for "memory needed" callback so that we can trigger an implicit bake operation */ if (!image_ptr->is_callback_registered(IMAGE_CALLBACK_ID_MEMORY_BLOCK_NEEDED, on_implicit_bake_needed_callback_func, - image_ptr.get() )) + image_ptr)) { image_ptr->register_for_callbacks(IMAGE_CALLBACK_ID_MEMORY_BLOCK_NEEDED, on_implicit_bake_needed_callback_func, - image_ptr.get() ); + image_ptr); image_has_memory_block_needed_callback_registered = true; } @@ -309,12 +400,12 @@ void Anvil::MemoryAllocator::Item::register_for_callbacks() void Anvil::MemoryAllocator::Item::unregister_from_callbacks() { auto on_implicit_bake_needed_callback_func = std::bind(&Anvil::MemoryAllocator::on_implicit_bake_needed, - memory_allocator_ptr.get() ); + memory_allocator_ptr); auto on_is_alloc_pending_for_buffer_query_callback_func = std::bind(&Anvil::MemoryAllocator::on_is_alloc_pending_for_buffer_query, - memory_allocator_ptr.get(), + memory_allocator_ptr, std::placeholders::_1); auto on_is_alloc_pending_for_image_query_callback_func = std::bind(&Anvil::MemoryAllocator::on_is_alloc_pending_for_image_query, - memory_allocator_ptr.get(), + memory_allocator_ptr, std::placeholders::_1); if (buffer_ptr != nullptr) @@ -323,14 +414,14 @@ void Anvil::MemoryAllocator::Item::unregister_from_callbacks() { buffer_ptr->unregister_from_callbacks(BUFFER_CALLBACK_ID_IS_ALLOC_PENDING, on_is_alloc_pending_for_buffer_query_callback_func, - buffer_ptr.get() ); + buffer_ptr); } if (buffer_has_memory_block_needed_callback_registered) { buffer_ptr->unregister_from_callbacks(BUFFER_CALLBACK_ID_MEMORY_BLOCK_NEEDED, on_implicit_bake_needed_callback_func, - buffer_ptr.get() ); + buffer_ptr); } } @@ -340,24 +431,26 @@ void Anvil::MemoryAllocator::Item::unregister_from_callbacks() { image_ptr->unregister_from_callbacks(IMAGE_CALLBACK_ID_IS_ALLOC_PENDING, on_is_alloc_pending_for_image_query_callback_func, - image_ptr.get() ); + image_ptr); } if (image_has_memory_block_needed_callback_registered) { image_ptr->unregister_from_callbacks(IMAGE_CALLBACK_ID_MEMORY_BLOCK_NEEDED, on_implicit_bake_needed_callback_func, - image_ptr.get() ); + image_ptr); } } } /* Please see header for specification */ -Anvil::MemoryAllocator::MemoryAllocator(std::weak_ptr in_device_ptr, - std::shared_ptr in_backend_ptr) - :m_backend_ptr(in_backend_ptr), - m_device_ptr (in_device_ptr) +Anvil::MemoryAllocator::MemoryAllocator(const Anvil::BaseDevice* in_device_ptr, + std::shared_ptr in_backend_ptr, + bool in_mt_safe) + :MTSafetySupportProvider(in_mt_safe), + m_backend_ptr (std::move(in_backend_ptr) ), + m_device_ptr (in_device_ptr) { /* Stub */ } @@ -365,20 +458,45 @@ Anvil::MemoryAllocator::MemoryAllocator(std::weak_ptr /* Please see header for specification */ Anvil::MemoryAllocator::~MemoryAllocator() { - if (m_backend_ptr->supports_baking() && - m_items.size() > 0) + if (m_items.size() > 0 && + m_backend_ptr->supports_baking() ) { bake(); } } - /** Please see header for specification */ -bool Anvil::MemoryAllocator::add_buffer(std::shared_ptr in_buffer_ptr, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_buffer(Anvil::Buffer* in_buffer_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + return add_buffer_internal(in_buffer_ptr, - in_required_memory_features); + in_required_memory_features, + in_opt_exportable_external_handle_types, +#if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, +#endif + in_opt_device_mask_ptr, + in_opt_mgpu_peer_memory_reqs_ptr, + in_opt_mgpu_bind_sparse_device_indices_ptr, + in_opt_memory_priority); } /** Determines the amount of memory, supported memory type and required alignment for the specified @@ -386,32 +504,60 @@ bool Anvil::MemoryAllocator::add_buffer(std::shared_ptr in_buffer * * @param buffer_ptr Buffer instance to assign a memory block at baking time. **/ -bool Anvil::MemoryAllocator::add_buffer_internal(std::shared_ptr in_buffer_ptr, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_buffer_internal(Anvil::Buffer* in_buffer_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { - std::shared_ptr backend_interface_ptr = std::dynamic_pointer_cast(m_backend_ptr); - VkDeviceSize buffer_alignment = 0; - uint32_t buffer_memory_types = 0; - VkDeviceSize buffer_storage_size = 0; - std::shared_ptr new_item_ptr; - bool result = true; + IMemoryAllocatorBackend* backend_interface_ptr = dynamic_cast(m_backend_ptr.get() ); + VkDeviceSize buffer_alignment = 0; + uint32_t buffer_memory_types = 0; + VkDeviceSize buffer_storage_size = 0; + uint32_t filtered_memory_types = 0; + const VkMemoryRequirements memory_reqs = in_buffer_ptr->get_memory_requirements(); + std::unique_ptr new_item_ptr; + bool result = true; + std::string name; + + ANVIL_REDUNDANT_VARIABLE(backend_interface_ptr); /* Sanity checks */ anvil_assert(backend_interface_ptr->supports_baking() ); - anvil_assert(in_buffer_ptr != nullptr); + anvil_assert(in_buffer_ptr != nullptr); + anvil_assert( in_opt_device_mask_ptr == nullptr || + (in_opt_device_mask_ptr != nullptr && m_backend_ptr->supports_device_masks() )); + anvil_assert( in_opt_mgpu_bind_sparse_device_indices_ptr == nullptr || + ((in_buffer_ptr->get_create_info_ptr()->get_create_flags() & BufferCreateFlagBits::SPARSE_BINDING_BIT) != 0)); + + anvil_assert((in_required_memory_features & Anvil::MemoryFeatureFlagBits::PROTECTED_BIT) == 0 || + (m_backend_ptr->supports_protected_memory() )); + + anvil_assert(do_bind_sparse_device_indices_sanity_check(in_opt_mgpu_bind_sparse_device_indices_ptr)); + + /* Extract external memory handle types from the specified buffer, if none were specified. */ + if (!do_external_memory_handle_type_sanity_checks(in_buffer_ptr->get_create_info_ptr()->get_exportable_external_memory_handle_types() ) ) + { + result = false; + + goto end; + } /* Determine how much space we're going to need, what alignment we need * to consider, and so on. */ - uint32_t filtered_memory_types = 0; - const VkMemoryRequirements memory_reqs = in_buffer_ptr->get_memory_requirements(); - buffer_alignment = memory_reqs.alignment; buffer_memory_types = memory_reqs.memoryTypeBits; buffer_storage_size = memory_reqs.size; - if (!is_alloc_supported(buffer_memory_types, - in_required_memory_features, - &filtered_memory_types) ) + if (!get_mem_types_supporting_mem_features(m_device_ptr, + buffer_memory_types, + in_required_memory_features, + &filtered_memory_types) ) { result = false; @@ -420,18 +566,30 @@ bool Anvil::MemoryAllocator::add_buffer_internal(std::shared_ptr /* Store a new block item descriptor. */ new_item_ptr.reset( - new Item(shared_from_this(), + new Item(this, in_buffer_ptr, buffer_storage_size, buffer_memory_types, buffer_alignment, in_required_memory_features, - filtered_memory_types) + filtered_memory_types, + in_opt_exportable_external_handle_types, +#if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, +#endif + in_buffer_ptr->requires_dedicated_allocation(), + (in_opt_device_mask_ptr != nullptr) ? *in_opt_device_mask_ptr : 0, + (in_opt_mgpu_peer_memory_reqs_ptr != nullptr) ? *in_opt_mgpu_peer_memory_reqs_ptr : MGPUPeerMemoryRequirements(), + (in_opt_mgpu_bind_sparse_device_indices_ptr != nullptr) ? *in_opt_mgpu_bind_sparse_device_indices_ptr : MGPUBindSparseDeviceIndices(), + in_opt_memory_priority) ); + name = in_buffer_ptr->get_name(); + if (!name.empty()) + new_item_ptr->debug_name = std::move(name); - m_items.push_back(new_item_ptr); + m_items.push_back(std::move(new_item_ptr)); - m_per_object_pending_alloc_status[in_buffer_ptr.get()] = true; + m_per_object_pending_alloc_status[in_buffer_ptr] = true; end: anvil_assert(result); @@ -440,178 +598,619 @@ bool Anvil::MemoryAllocator::add_buffer_internal(std::shared_ptr } /* Please see header for specification */ -bool Anvil::MemoryAllocator::add_buffer_with_float_data_ptr_based_post_fill(std::shared_ptr in_buffer_ptr, - std::shared_ptr in_data_ptr, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_buffer_with_float_data_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + std::unique_ptr in_data_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { - bool result = add_buffer_internal(in_buffer_ptr, - in_required_memory_features); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + bool result; + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + result = add_buffer_internal(in_buffer_ptr, + in_required_memory_features, + in_opt_exportable_external_handle_types, +#if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, +#endif + in_opt_device_mask_ptr, + in_opt_mgpu_peer_memory_reqs_ptr, + in_opt_mgpu_bind_sparse_device_indices_ptr, + in_opt_memory_priority); if (result) { - m_items.back()->buffer_ref_float_data_ptr = in_data_ptr; + m_items.back()->buffer_ref_float_data_ptr = std::move(in_data_ptr); } return result; } /* Please see header for specification */ -bool Anvil::MemoryAllocator::add_buffer_with_float_data_vector_ptr_based_post_fill(std::shared_ptr in_buffer_ptr, - std::shared_ptr > in_data_vector_ptr, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_buffer_with_float_data_vector_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + std::unique_ptr > in_data_vector_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { - anvil_assert(in_data_vector_ptr->size() * sizeof(float) == in_buffer_ptr->get_size() ); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + bool result; - bool result = add_buffer_internal(in_buffer_ptr, - in_required_memory_features); + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + anvil_assert(in_data_vector_ptr->size() * sizeof(float) == in_buffer_ptr->get_create_info_ptr()->get_size() ); + + result = add_buffer_internal(in_buffer_ptr, + in_required_memory_features, + in_opt_exportable_external_handle_types, +#if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, +#endif + in_opt_device_mask_ptr, + in_opt_mgpu_peer_memory_reqs_ptr, + in_opt_mgpu_bind_sparse_device_indices_ptr, + in_opt_memory_priority); if (result) { - m_items.back()->buffer_ref_float_vector_data_ptr = in_data_vector_ptr; + m_items.back()->buffer_ref_float_vector_data_ptr = std::move(in_data_vector_ptr); } return result; } /* Please see header for specification */ -bool Anvil::MemoryAllocator::add_buffer_with_uchar8_data_ptr_based_post_fill(std::shared_ptr in_buffer_ptr, - std::shared_ptr in_data_ptr, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_buffer_with_float_data_vector_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + const std::vector* in_data_vector_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { - bool result = add_buffer_internal(in_buffer_ptr, - in_required_memory_features); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + auto ptr = std::unique_ptr, std::function*) > >(const_cast* >(in_data_vector_ptr), + [](const std::vector*) + { + /* Stub */ + }); + bool result; + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + anvil_assert(in_data_vector_ptr->size() * sizeof(uint32_t) == in_buffer_ptr->get_create_info_ptr()->get_size() ); + + result = add_buffer_internal(in_buffer_ptr, + in_required_memory_features, + in_opt_exportable_external_handle_types, +#if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, +#endif + in_opt_device_mask_ptr, + in_opt_mgpu_peer_memory_reqs_ptr, + in_opt_mgpu_bind_sparse_device_indices_ptr, + in_opt_memory_priority); if (result) { - m_items.back()->buffer_ref_uchar8_data_ptr = in_data_ptr; + m_items.back()->buffer_ref_float_vector_data_ptr = std::move(ptr); } return result; } /* Please see header for specification */ -bool Anvil::MemoryAllocator::add_buffer_with_uchar8_data_vector_ptr_based_post_fill(std::shared_ptr in_buffer_ptr, - std::shared_ptr > in_data_vector_ptr, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_buffer_with_uchar8_data_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + std::unique_ptr in_data_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { - anvil_assert(in_data_vector_ptr->size() == in_buffer_ptr->get_size() ); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + bool result; - bool result = add_buffer_internal(in_buffer_ptr, - in_required_memory_features); + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + result = add_buffer_internal(in_buffer_ptr, + in_required_memory_features, + in_opt_exportable_external_handle_types, +#if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, +#endif + in_opt_device_mask_ptr, + in_opt_mgpu_peer_memory_reqs_ptr, + in_opt_mgpu_bind_sparse_device_indices_ptr, + in_opt_memory_priority); if (result) { - m_items.back()->buffer_ref_uchar8_vector_data_ptr = in_data_vector_ptr; + m_items.back()->buffer_ref_uchar8_data_ptr = std::move(in_data_ptr); } return result; } /* Please see header for specification */ -bool Anvil::MemoryAllocator::add_buffer_with_uint32_data_ptr_based_post_fill(std::shared_ptr in_buffer_ptr, - std::shared_ptr in_data_ptr, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_buffer_with_uchar8_data_vector_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + std::unique_ptr > in_data_vector_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { - bool result = add_buffer_internal(in_buffer_ptr, - in_required_memory_features); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + bool result; + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + anvil_assert(in_data_vector_ptr->size() == in_buffer_ptr->get_create_info_ptr()->get_size() ); + + result = add_buffer_internal(in_buffer_ptr, + in_required_memory_features, + in_opt_exportable_external_handle_types, +#if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, +#endif + in_opt_device_mask_ptr, + in_opt_mgpu_peer_memory_reqs_ptr, + in_opt_mgpu_bind_sparse_device_indices_ptr, + in_opt_memory_priority); + + if (result) + { + m_items.back()->buffer_ref_uchar8_vector_data_ptr = std::move(in_data_vector_ptr); + } + + return result; +} + +/* Please see header for specification */ +bool Anvil::MemoryAllocator::add_buffer_with_uint32_data_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + std::unique_ptr in_data_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) +{ + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + bool result; + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + result = add_buffer_internal(in_buffer_ptr, + in_required_memory_features, + in_opt_exportable_external_handle_types, +#if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, +#endif + in_opt_device_mask_ptr, + in_opt_mgpu_peer_memory_reqs_ptr, + in_opt_mgpu_bind_sparse_device_indices_ptr, + in_opt_memory_priority); if (result) { - m_items.back()->buffer_ref_uint32_data_ptr = in_data_ptr; + m_items.back()->buffer_ref_uint32_data_ptr = std::move(in_data_ptr); } return result; } /* Please see header for specification */ -bool Anvil::MemoryAllocator::add_buffer_with_uint32_data_vector_ptr_based_post_fill(std::shared_ptr in_buffer_ptr, - std::shared_ptr > in_data_vector_ptr, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_buffer_with_uint32_data_vector_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + std::unique_ptr > in_data_vector_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { - anvil_assert(in_data_vector_ptr->size() * sizeof(uint32_t) == in_buffer_ptr->get_size() ); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + bool result; + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + anvil_assert(in_data_vector_ptr->size() * sizeof(uint32_t) == in_buffer_ptr->get_create_info_ptr()->get_size() ); + + result = add_buffer_internal(in_buffer_ptr, + in_required_memory_features, + in_opt_exportable_external_handle_types, +#if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, +#endif + in_opt_device_mask_ptr, + in_opt_mgpu_peer_memory_reqs_ptr, + in_opt_mgpu_bind_sparse_device_indices_ptr, + in_opt_memory_priority); + + if (result) + { + m_items.back()->buffer_ref_uint32_vector_data_ptr = std::move(in_data_vector_ptr); + } + + return result; +} + +/** Please see header for specification */ +bool Anvil::MemoryAllocator::add_buffer_with_uint32_data_vector_ptr_based_post_fill(Anvil::Buffer* in_buffer_ptr, + const std::vector* in_data_vector_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) +{ + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + auto ptr = std::unique_ptr, std::function*) > >(const_cast* >(in_data_vector_ptr), + [](const std::vector*) + { + /* Stub */ + }); + bool result; + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + anvil_assert(in_data_vector_ptr->size() * sizeof(uint32_t) == in_buffer_ptr->get_create_info_ptr()->get_size() ); - bool result = add_buffer_internal(in_buffer_ptr, - in_required_memory_features); + result = add_buffer_internal(in_buffer_ptr, + in_required_memory_features, + in_opt_exportable_external_handle_types, +#if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, +#endif + in_opt_device_mask_ptr, + in_opt_mgpu_peer_memory_reqs_ptr, + in_opt_mgpu_bind_sparse_device_indices_ptr, + in_opt_memory_priority); if (result) { - m_items.back()->buffer_ref_uint32_vector_data_ptr = in_data_vector_ptr; + m_items.back()->buffer_ref_uint32_vector_data_ptr = std::move(ptr); } return result; } /** Please see header for specification */ -bool Anvil::MemoryAllocator::add_image_whole(std::shared_ptr in_image_ptr, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_image_whole(Anvil::Image* in_image_ptr, + MemoryFeatureFlags in_required_memory_features, + const Anvil::ExternalMemoryHandleTypeFlags& in_opt_exportable_external_handle_types, + +#if defined(_WIN32) + const Anvil::ExternalNTHandleInfo* in_opt_external_nt_handle_info_ptr, +#endif + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { - uint32_t filtered_memory_types = 0; - VkDeviceSize image_alignment = 0; - uint32_t image_memory_types = 0; - VkDeviceSize image_storage_size = 0; - std::shared_ptr new_item_ptr; - bool result = true; + uint32_t filtered_memory_types = 0; + VkDeviceSize image_alignment = 0; + uint32_t image_memory_types = 0; + const auto image_n_planes = Anvil::Formats::get_format_n_planes(in_image_ptr->get_create_info_ptr()->get_format() ); + VkDeviceSize image_storage_size = 0; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + std::unique_ptr new_item_ptr; + bool result = true; + std::string name; + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } /* Sanity checks */ anvil_assert(m_backend_ptr->supports_baking() ); - anvil_assert(in_image_ptr != nullptr); + anvil_assert(in_image_ptr != nullptr); + anvil_assert( in_opt_device_mask_ptr == nullptr || + (in_opt_device_mask_ptr != nullptr && m_backend_ptr->supports_device_masks() )); + anvil_assert( in_opt_mgpu_bind_sparse_device_indices_ptr == nullptr || + ((in_image_ptr->get_create_info_ptr()->get_create_flags() & ImageCreateFlagBits::SPARSE_BINDING_BIT) != 0)); + + anvil_assert(do_bind_sparse_device_indices_sanity_check(in_opt_mgpu_bind_sparse_device_indices_ptr)); + + anvil_assert((in_required_memory_features & Anvil::MemoryFeatureFlagBits::PROTECTED_BIT) == 0 || + (m_backend_ptr->supports_protected_memory() )); + + if (!do_external_memory_handle_type_sanity_checks(in_image_ptr->get_create_info_ptr()->get_external_memory_handle_types()) ) + { + result = false; + + goto end; + } /* Determine how much size is needed for the image's storage, as well as what * the allocation requirements are */ - image_alignment = in_image_ptr->get_image_alignment(); - image_memory_types = in_image_ptr->get_image_memory_types(); - image_storage_size = in_image_ptr->get_image_storage_size(); + for (uint32_t n_plane = 0; + n_plane < image_n_planes; + ++n_plane) + { + image_alignment = in_image_ptr->get_image_alignment (n_plane); + image_memory_types = in_image_ptr->get_image_memory_types(n_plane); + image_storage_size = in_image_ptr->get_image_storage_size(n_plane); + + if (!get_mem_types_supporting_mem_features(m_device_ptr, + image_memory_types, + in_required_memory_features, + &filtered_memory_types) ) + { + result = false; + + goto end; + } + + /* Store a new block item descriptor */ + new_item_ptr.reset( + new Item(this, + in_image_ptr, + image_storage_size, + image_memory_types, + image_alignment, + in_required_memory_features, + filtered_memory_types, + in_opt_exportable_external_handle_types, + #if defined(_WIN32) + in_opt_external_nt_handle_info_ptr, + #endif + in_image_ptr->requires_dedicated_allocation(n_plane), + (in_opt_device_mask_ptr != nullptr) ? *in_opt_device_mask_ptr : 0, + (in_opt_mgpu_peer_memory_reqs_ptr != nullptr) ? *in_opt_mgpu_peer_memory_reqs_ptr : MGPUPeerMemoryRequirements(), + (in_opt_mgpu_bind_sparse_device_indices_ptr != nullptr) ? *in_opt_mgpu_bind_sparse_device_indices_ptr : MGPUBindSparseDeviceIndices(), + in_opt_memory_priority, + n_plane) + ); + name = in_image_ptr->get_name(); + if (!name.empty()) + new_item_ptr->debug_name = std::move(name); + + m_items.push_back( + std::move(new_item_ptr) + ); + } - if (!is_alloc_supported(image_memory_types, - in_required_memory_features, - &filtered_memory_types) ) + m_per_object_pending_alloc_status[in_image_ptr] = true; +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::MemoryAllocator::add_sparse_buffer_region(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + VkDeviceSize in_size, + MemoryFeatureFlags in_required_memory_features, + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) +{ + uint32_t filtered_memory_types = 0; + const auto& memory_reqs = in_buffer_ptr->get_memory_requirements(); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + std::unique_ptr new_item_ptr; + bool result = true; + std::string name; + + /* Sanity checks */ + anvil_assert(in_buffer_ptr != nullptr); + anvil_assert(m_backend_ptr->supports_baking () ); + anvil_assert(in_buffer_ptr->get_memory_requirements().size >= in_offset + in_size); + anvil_assert( in_opt_device_mask_ptr == nullptr || + (in_opt_device_mask_ptr != nullptr && m_backend_ptr->supports_device_masks() )); + + anvil_assert((in_required_memory_features & Anvil::MemoryFeatureFlagBits::PROTECTED_BIT) == 0); + anvil_assert((in_buffer_ptr->get_create_info_ptr()->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); + anvil_assert(do_bind_sparse_device_indices_sanity_check(in_opt_mgpu_bind_sparse_device_indices_ptr)); + + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + if (!do_external_memory_handle_type_sanity_checks(in_buffer_ptr->get_create_info_ptr()->get_exportable_external_memory_handle_types() ) ) { result = false; goto end; } - /* Store a new block item descriptor */ + /* Determine how much space we're going to need, what alignment we need + * to consider, and so on. */ + anvil_assert((in_offset % memory_reqs.alignment) == 0); + + if (!get_mem_types_supporting_mem_features(m_device_ptr, + memory_reqs.memoryTypeBits, + in_required_memory_features, + &filtered_memory_types) ) + { + result = false; + + goto end; + } + + /* Store a new block item descriptor. */ new_item_ptr.reset( - new Item(shared_from_this(), - in_image_ptr, - image_storage_size, - image_memory_types, - image_alignment, + new Item(this, + in_buffer_ptr, + in_offset, + in_size, + memory_reqs.memoryTypeBits, + memory_reqs.alignment, in_required_memory_features, - filtered_memory_types) + filtered_memory_types, + Anvil::ExternalMemoryHandleTypeFlagBits::NONE, +#if defined(_WIN32) + nullptr, /* in_alloc_external_nt_handle_info_ptr */ +#endif + in_buffer_ptr->requires_dedicated_allocation(), + (in_opt_device_mask_ptr != nullptr) ? *in_opt_device_mask_ptr : 0, + (in_opt_mgpu_peer_memory_reqs_ptr != nullptr) ? *in_opt_mgpu_peer_memory_reqs_ptr : MGPUPeerMemoryRequirements(), + (in_opt_mgpu_bind_sparse_device_indices_ptr != nullptr) ? *in_opt_mgpu_bind_sparse_device_indices_ptr : MGPUBindSparseDeviceIndices(), + in_opt_memory_priority) ); + name = in_buffer_ptr->get_name(); + if (!name.empty()) + new_item_ptr->debug_name = std::move(name); - m_items.push_back(new_item_ptr); + m_items.push_back( + std::move(new_item_ptr) + ); + + m_per_object_pending_alloc_status[in_buffer_ptr] = true; - m_per_object_pending_alloc_status[in_image_ptr.get()] = true; end: + anvil_assert(result); + return result; } /** Please see header for specification */ -bool Anvil::MemoryAllocator::add_sparse_image_miptail(std::shared_ptr in_image_ptr, - VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_sparse_image_miptail(Anvil::Image* in_image_ptr, + Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + MemoryFeatureFlags in_required_memory_features, + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { const Anvil::SparseImageAspectProperties* aspect_props_ptr = nullptr; uint32_t filtered_memory_types = 0; - std::shared_ptr new_item_ptr; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + std::unique_ptr new_item_ptr; uint32_t miptail_memory_types = 0; VkDeviceSize miptail_offset = static_cast(UINT64_MAX); VkDeviceSize miptail_size = 0; + const uint32_t n_plane = (in_aspect == Anvil::ImageAspectFlagBits::PLANE_1_BIT) ? 1 + : (in_aspect == Anvil::ImageAspectFlagBits::PLANE_2_BIT) ? 2 + : 0; bool result = true; + std::string name; ANVIL_REDUNDANT_VARIABLE(result); /* Sanity checks */ - anvil_assert(in_image_ptr != nullptr); + anvil_assert(in_image_ptr != nullptr); + anvil_assert(m_backend_ptr->supports_baking () ); + anvil_assert(in_image_ptr->get_create_info_ptr()->get_n_layers() > in_n_layer); + anvil_assert(in_image_ptr->has_aspects (in_aspect) ); + anvil_assert( in_opt_device_mask_ptr == nullptr || + (in_opt_device_mask_ptr != nullptr && m_backend_ptr->supports_device_masks() )); + + anvil_assert((in_image_ptr->get_create_info_ptr()->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); + anvil_assert((in_required_memory_features & Anvil::MemoryFeatureFlagBits::PROTECTED_BIT) == 0); + anvil_assert(do_bind_sparse_device_indices_sanity_check (in_opt_mgpu_bind_sparse_device_indices_ptr)); + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } - anvil_assert(m_backend_ptr->supports_baking () ); - anvil_assert(in_image_ptr->is_sparse () ); - anvil_assert(in_image_ptr->get_image_n_layers() > in_n_layer); - anvil_assert(in_image_ptr->has_aspects (in_aspect) ); + if (!do_external_memory_handle_type_sanity_checks(in_image_ptr->get_create_info_ptr()->get_external_memory_handle_types()) ) + { + result = false; + + goto end; + } /* Extract aspect-specific properties which includes all the miptail data we're going to need */ result = in_image_ptr->get_sparse_image_aspect_properties(in_aspect, @@ -619,20 +1218,21 @@ bool Anvil::MemoryAllocator::add_sparse_image_miptail(std::shared_ptrflags & VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT) != 0 && - in_n_layer == 0 || - (aspect_props_ptr->flags & VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT) == 0); + anvil_assert(((aspect_props_ptr->flags & Anvil::SparseImageFormatFlagBits::SINGLE_MIPTAIL_BIT) != 0 && + in_n_layer == 0) || + ((aspect_props_ptr->flags & Anvil::SparseImageFormatFlagBits::SINGLE_MIPTAIL_BIT) == 0)); - /* Determine allocation properties */ - miptail_memory_types = in_image_ptr->get_image_memory_types(); + /* Determine allocation properties */ + miptail_memory_types = in_image_ptr->get_image_memory_types(n_plane); miptail_offset = aspect_props_ptr->mip_tail_offset + aspect_props_ptr->mip_tail_stride * in_n_layer; miptail_size = aspect_props_ptr->mip_tail_size; anvil_assert(miptail_size != 0); - if (!is_alloc_supported(miptail_memory_types, - in_required_memory_features, - &filtered_memory_types) ) + if (!get_mem_types_supporting_mem_features(m_device_ptr, + miptail_memory_types, + in_required_memory_features, + &filtered_memory_types) ) { result = false; @@ -641,64 +1241,108 @@ bool Anvil::MemoryAllocator::add_sparse_image_miptail(std::shared_ptrget_image_alignment(), + in_aspect, + in_image_ptr->get_image_alignment(n_plane), in_required_memory_features, - filtered_memory_types) + filtered_memory_types, + Anvil::ExternalMemoryHandleTypeFlagBits::NONE, /* in_opt_exportable_external_handle_types */ +#if defined(_WIN32) + nullptr, /* in_alloc_external_nt_handle_info_ptr */ +#endif + false, /* in_alloc_is_dedicated - sparse images do not supported dedicated allocs */ + (in_opt_device_mask_ptr != nullptr) ? *in_opt_device_mask_ptr : 0, + (in_opt_mgpu_peer_memory_reqs_ptr != nullptr) ? *in_opt_mgpu_peer_memory_reqs_ptr : MGPUPeerMemoryRequirements(), + (in_opt_mgpu_bind_sparse_device_indices_ptr != nullptr) ? *in_opt_mgpu_bind_sparse_device_indices_ptr : MGPUBindSparseDeviceIndices(), + in_opt_memory_priority) ); + name = in_image_ptr->get_name(); + if (!name.empty()) + new_item_ptr->debug_name = std::move(name); - m_items.push_back(new_item_ptr); + m_items.push_back( + std::move(new_item_ptr) + ); - m_per_object_pending_alloc_status[in_image_ptr.get()] = true; + m_per_object_pending_alloc_status[in_image_ptr] = true; end: return result; } /* Please see header for specification */ -bool Anvil::MemoryAllocator::add_sparse_image_subresource(std::shared_ptr in_image_ptr, - const VkImageSubresource& in_subresource, - const VkOffset3D& in_offset, - VkExtent3D in_extent, - MemoryFeatureFlags in_required_memory_features) +bool Anvil::MemoryAllocator::add_sparse_image_subresource(Anvil::Image* in_image_ptr, + const Anvil::ImageSubresource& in_subresource, + const VkOffset3D& in_offset, + VkExtent3D in_extent, + MemoryFeatureFlags in_required_memory_features, + const uint32_t* in_opt_device_mask_ptr, + const MGPUPeerMemoryRequirements* in_opt_mgpu_peer_memory_reqs_ptr, + const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr, + const float& in_opt_memory_priority) { - const Anvil::SparseImageAspectProperties* aspect_props_ptr = nullptr; - uint32_t component_size_bits[4] = {0}; - uint32_t filtered_memory_types = 0; + const Anvil::SparseImageAspectProperties* aspect_props_ptr = nullptr; + uint32_t component_size_bits[4] = {0}; + uint32_t filtered_memory_types = 0; + const auto image_format = in_image_ptr->get_create_info_ptr()->get_format(); uint32_t mip_size[3]; - std::shared_ptr new_item_ptr; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + std::unique_ptr new_item_ptr; + const uint32_t n_plane = (in_subresource.aspect_mask == Anvil::ImageAspectFlagBits::PLANE_1_BIT) ? 1 + : (in_subresource.aspect_mask == Anvil::ImageAspectFlagBits::PLANE_2_BIT) ? 2 + : 0; bool result = true; + const VkDeviceSize tile_size = in_image_ptr->get_image_alignment(n_plane); VkDeviceSize total_region_size_in_bytes = 0; + std::string name; ANVIL_REDUNDANT_VARIABLE(result); /* Sanity checks */ anvil_assert(in_image_ptr != nullptr); - anvil_assert(m_backend_ptr->supports_baking () ); - anvil_assert(in_image_ptr->is_sparse () ); - anvil_assert(in_image_ptr->has_aspects (in_subresource.aspectMask) ); - anvil_assert(in_image_ptr->get_image_n_mipmaps() > in_subresource.mipLevel); - anvil_assert(in_image_ptr->get_image_n_layers () > in_subresource.arrayLayer); + anvil_assert(m_backend_ptr->supports_baking () ); + anvil_assert(in_image_ptr->has_aspects (in_subresource.aspect_mask) ); + anvil_assert(in_image_ptr->get_n_mipmaps () > in_subresource.mip_level); + anvil_assert(in_image_ptr->get_create_info_ptr()->get_n_layers() > in_subresource.array_layer); + anvil_assert( in_opt_device_mask_ptr == nullptr || + (in_opt_device_mask_ptr != nullptr && + m_backend_ptr->supports_device_masks() )); anvil_assert(in_extent.depth >= 1); anvil_assert(in_extent.height >= 1); anvil_assert(in_extent.width >= 1); - anvil_assert(!Anvil::Formats::is_format_compressed(in_image_ptr->get_image_format() )); // TODO - anvil_assert((Anvil::Utils::is_pow2 (static_cast(in_subresource.aspectMask)) != 0)); // only permit a single aspect + anvil_assert((in_required_memory_features & Anvil::MemoryFeatureFlagBits::PROTECTED_BIT) == 0); + anvil_assert((in_image_ptr->get_create_info_ptr()->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); + anvil_assert(do_bind_sparse_device_indices_sanity_check(in_opt_mgpu_bind_sparse_device_indices_ptr)); + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + if (!do_external_memory_handle_type_sanity_checks(in_image_ptr->get_create_info_ptr()->get_external_memory_handle_types() ) ) + { + result = false; + + goto end; + } /* Extract image properties needed for calculations below. */ - result = in_image_ptr->get_sparse_image_aspect_properties(static_cast(in_subresource.aspectMask), + result = in_image_ptr->get_sparse_image_aspect_properties(static_cast(in_subresource.aspect_mask.get_vk() ), &aspect_props_ptr); anvil_assert(result); - result = in_image_ptr->get_image_mipmap_size(in_subresource.mipLevel, + result = in_image_ptr->get_image_mipmap_size(in_subresource.mip_level, mip_size + 0, mip_size + 1, mip_size + 2); @@ -743,33 +1387,82 @@ bool Anvil::MemoryAllocator::add_sparse_image_subresource(std::shared_ptrget_image_format(), - component_size_bits + 0, - component_size_bits + 1, - component_size_bits + 2, - component_size_bits + 3); - - anvil_assert(component_size_bits[0] != 0 || - component_size_bits[1] != 0 || - component_size_bits[2] != 0 || - component_size_bits[3] != 0); - anvil_assert(((component_size_bits[0] + component_size_bits[1] + - component_size_bits[2] + component_size_bits[3]) % 8) == 0); - - total_region_size_in_bytes = (component_size_bits[0] + component_size_bits[1] + component_size_bits[2] + component_size_bits[3]) / 8 /* bits in byte */ - * in_extent.width - * in_extent.height - * in_extent.depth; + if (!Anvil::Formats::is_format_compressed(image_format)) + { + if (Anvil::Formats::is_format_yuv_khr(image_format) ) + { + Anvil::Formats::get_format_n_component_bits_yuv(image_format, + static_cast(in_subresource.aspect_mask.get_vk() ), + component_size_bits + 0, + component_size_bits + 1, + component_size_bits + 2, + component_size_bits + 3); + } + else + { + Anvil::Formats::get_format_n_component_bits_nonyuv(image_format, + component_size_bits + 0, + component_size_bits + 1, + component_size_bits + 2, + component_size_bits + 3); + } - /* The region size may be smaller than the required page size. Round it up if that's the case */ - const VkDeviceSize tile_size = in_image_ptr->get_memory_requirements().alignment; + anvil_assert( component_size_bits[0] != 0 || + component_size_bits[1] != 0 || + component_size_bits[2] != 0 || + component_size_bits[3] != 0); + anvil_assert(((component_size_bits[0] + component_size_bits[1] + + component_size_bits[2] + component_size_bits[3]) % 8) == 0); + + total_region_size_in_bytes = (component_size_bits[0] + component_size_bits[1] + component_size_bits[2] + component_size_bits[3]) / 8 /* bits in byte */ + * in_extent.width + * in_extent.height + * in_extent.depth; + } + else + { + uint32_t compressed_block_size[3] = + { + 1, + 1, + 1 + }; + uint32_t n_bytes_per_block = 0; + + if (!Anvil::Formats::get_compressed_format_block_size(image_format, + compressed_block_size, + &n_bytes_per_block) ) + { + result = false; + + goto end; + } + else + { + anvil_assert(compressed_block_size[0] != 0 && + compressed_block_size[1] != 0 && + compressed_block_size[2] != 0); + anvil_assert(n_bytes_per_block != 0); + + anvil_assert( (in_extent.width % compressed_block_size[0]) == 0); + anvil_assert( (in_extent.height % compressed_block_size[1]) == 0); + anvil_assert( (in_extent.depth % compressed_block_size[2]) == 0); + + total_region_size_in_bytes = (in_extent.width / compressed_block_size[0]) * + (in_extent.height / compressed_block_size[1]) * + (in_extent.depth / compressed_block_size[2]) * + n_bytes_per_block; + } + } + /* The region size may be smaller than the required page size. Round it up if that's the case */ total_region_size_in_bytes = Anvil::Utils::round_up(total_region_size_in_bytes, tile_size); - if (!is_alloc_supported(in_image_ptr->get_image_memory_types(), - in_required_memory_features, - &filtered_memory_types) ) + if (!get_mem_types_supporting_mem_features(m_device_ptr, + in_image_ptr->get_image_memory_types(n_plane), + in_required_memory_features, + &filtered_memory_types) ) { result = false; @@ -778,21 +1471,35 @@ bool Anvil::MemoryAllocator::add_sparse_image_subresource(std::shared_ptrget_image_memory_types(), + in_image_ptr->get_image_memory_types(n_plane), tile_size, in_required_memory_features, - filtered_memory_types) + filtered_memory_types, + Anvil::ExternalMemoryHandleTypeFlagBits::NONE, /* in_opt_exportable_external_handle_types */ +#if defined(_WIN32) + nullptr, /* in_alloc_external_nt_handle_info_ptr */ +#endif + false, /* in_alloc_is_dedicated - sparse images do not supported dedicated allocs */ + (in_opt_device_mask_ptr != nullptr) ? *in_opt_device_mask_ptr : 0, + (in_opt_mgpu_peer_memory_reqs_ptr != nullptr) ? *in_opt_mgpu_peer_memory_reqs_ptr : MGPUPeerMemoryRequirements(), + (in_opt_mgpu_bind_sparse_device_indices_ptr != nullptr) ? *in_opt_mgpu_bind_sparse_device_indices_ptr : MGPUBindSparseDeviceIndices(), + in_opt_memory_priority) ); + name = in_image_ptr->get_name(); + if (!name.empty()) + new_item_ptr->debug_name = std::move(name); - m_items.push_back(new_item_ptr); + m_items.push_back( + std::move(new_item_ptr) + ); - m_per_object_pending_alloc_status[in_image_ptr.get()] = true; + m_per_object_pending_alloc_status[in_image_ptr] = true; end: return result; @@ -801,12 +1508,21 @@ bool Anvil::MemoryAllocator::add_sparse_image_subresource(std::shared_ptr device_locked_ptr = std::shared_ptr(m_device_ptr); - bool needs_sparse_memory_binding = false; - bool result = false; - Anvil::SparseMemoryBindInfoID sparse_memory_bind_info_id = UINT32_MAX; - Anvil::Utils::SparseMemoryBindingUpdateInfo sparse_memory_binding; - std::shared_ptr this_ptr; + Anvil::SparseMemoryBindInfoID default_sparse_bind_info_id = UINT32_MAX; + std::map device_index_pair_to_sparse_bind_info_map; + std::vector fences; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + bool needs_sparse_memory_binding = false; + bool result = false; + Anvil::SparseMemoryBindingUpdateInfo sparse_memory_binding; + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } if (!m_backend_ptr->supports_baking() ) { @@ -816,22 +1532,45 @@ bool Anvil::MemoryAllocator::bake() goto end; } - /* Sanity checks */ - anvil_assert(m_items.size() > 0); + if (m_items.size() == 0) + { + result = true; + + goto end; + } result = m_backend_ptr->bake(m_items); - anvil_assert(result); + if (!result) + { + m_items.clear(); + + goto end; + } /* Prepare a sparse memory binding structure, if we're going to need one */ for (auto item_iterator = m_items.begin(); - item_iterator != m_items.end() && !needs_sparse_memory_binding; + item_iterator != m_items.end(); ++item_iterator) { - switch ((*item_iterator)->type) + const auto& item_ptr = item_iterator->get(); + + switch (item_ptr->type) { case Anvil::MemoryAllocator::ITEM_TYPE_BUFFER: + case Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_BUFFER_REGION: { - needs_sparse_memory_binding |= (*item_iterator)->buffer_ptr->is_sparse(); + if ((item_ptr->buffer_ptr->get_create_info_ptr()->get_type() == Anvil::BufferType::NO_ALLOC) && + ((item_ptr->buffer_ptr->get_create_info_ptr()->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) != 0)) + { + needs_sparse_memory_binding = true; + + for (auto index_pair_iterator = item_ptr->alloc_mgpu_bind_sparse_device_indices.begin(); + index_pair_iterator != item_ptr->alloc_mgpu_bind_sparse_device_indices.end(); + ++index_pair_iterator) + { + device_index_pair_to_sparse_bind_info_map[*index_pair_iterator] = UINT32_MAX; + } + } break; } @@ -840,7 +1579,22 @@ bool Anvil::MemoryAllocator::bake() case Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_IMAGE_MIPTAIL: case Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_IMAGE_SUBRESOURCE: { - needs_sparse_memory_binding |= (*item_iterator)->image_ptr->is_sparse(); + const auto& create_flags = (*item_iterator)->image_ptr->get_create_info_ptr()->get_create_flags(); + const bool uses_sparse_bindings = ((create_flags & Anvil::ImageCreateFlagBits::SPARSE_ALIASED_BIT) != 0) || + ((create_flags & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) != 0) || + ((create_flags & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); + + if (uses_sparse_bindings) + { + needs_sparse_memory_binding = true; + + for (auto index_pair_iterator = item_ptr->alloc_mgpu_bind_sparse_device_indices.begin(); + index_pair_iterator != item_ptr->alloc_mgpu_bind_sparse_device_indices.end(); + ++index_pair_iterator) + { + device_index_pair_to_sparse_bind_info_map[*index_pair_iterator] = UINT32_MAX; + } + } break; } @@ -854,17 +1608,48 @@ bool Anvil::MemoryAllocator::bake() if (needs_sparse_memory_binding) { - std::shared_ptr wait_fence_ptr = Anvil::Fence::create(m_device_ptr, - false); /* create_signalled */ + Anvil::FenceUniquePtr wait_fence_ptr; + + { + auto create_info_ptr = Anvil::FenceCreateInfo::create(m_device_ptr, + false); /* create_signalled */ - sparse_memory_binding.set_fence(wait_fence_ptr); + create_info_ptr->set_mt_safety(Anvil::MTSafety::DISABLED); + + wait_fence_ptr = Anvil::Fence::create(std::move(create_info_ptr) ); + } - sparse_memory_bind_info_id = sparse_memory_binding.add_bind_info(0, /* n_signal_semaphores */ - nullptr, /* opt_signal_semaphores_ptr */ - 0, /* n_wait_semaphores */ - nullptr); /* opt_wait_semaphores_ptr */ + sparse_memory_binding.set_fence(wait_fence_ptr.get() ); + fences.push_back (std::move(wait_fence_ptr) ); - anvil_assert(sparse_memory_bind_info_id != UINT32_MAX); + /* Always create the default bind info, which uses the implicit resource index = 0 and memory index = 0. */ + default_sparse_bind_info_id = sparse_memory_binding.add_bind_info(0, /* n_signal_semaphores */ + nullptr, /* opt_signal_semaphores_ptr */ + 0, /* n_wait_semaphores */ + nullptr); /* opt_wait_semaphores_ptr */ + + anvil_assert(default_sparse_bind_info_id != UINT32_MAX); + + /* Bind infos for user-supplied device indices. */ + for (auto device_index_pair_iter = device_index_pair_to_sparse_bind_info_map.begin(); + device_index_pair_iter != device_index_pair_to_sparse_bind_info_map.end(); + ++device_index_pair_iter) + { + auto& sparse_bind_device_index_pair = device_index_pair_iter->first; + auto sparse_memory_bind_info_id = sparse_memory_binding.add_bind_info(0, /* n_signal_semaphores */ + nullptr, /* opt_signal_semaphores_ptr */ + 0, /* n_wait_semaphores */ + nullptr); /* opt_wait_semaphores_ptr */ + + anvil_assert(sparse_memory_bind_info_id != UINT32_MAX); + + device_index_pair_iter->second = sparse_memory_bind_info_id; + + sparse_memory_binding.set_resource_device_index(sparse_memory_bind_info_id, + sparse_bind_device_index_pair.first); + sparse_memory_binding.set_memory_device_index (sparse_memory_bind_info_id, + sparse_bind_device_index_pair.second); + } } result = true; @@ -874,7 +1659,7 @@ bool Anvil::MemoryAllocator::bake() item_iterator != m_items.end(); ++item_iterator) { - auto item_ptr = *item_iterator; + auto item_ptr = item_iterator->get(); if (item_ptr->alloc_memory_block_ptr) { @@ -884,18 +1669,112 @@ bool Anvil::MemoryAllocator::bake() { case Anvil::MemoryAllocator::ITEM_TYPE_BUFFER: { - if (!item_ptr->buffer_ptr->is_sparse() ) + if ((item_ptr->buffer_ptr->get_create_info_ptr()->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) == 0) { - item_ptr->buffer_ptr->set_nonsparse_memory(item_ptr->alloc_memory_block_ptr); + if (m_post_bake_per_buffer_item_mem_assignment_callback_function != nullptr) + { + m_post_bake_per_buffer_item_mem_assignment_callback_function(item_ptr->buffer_ptr, + std::move(item_ptr->alloc_memory_block_ptr) ); + } + else + { + /* Bind with default device indices in MGPU case. */ + item_ptr->buffer_ptr->set_nonsparse_memory( + std::move(item_ptr->alloc_memory_block_ptr) + ); + } } else { - sparse_memory_binding.append_buffer_memory_update(sparse_memory_bind_info_id, - item_ptr->buffer_ptr, - 0, /* buffer_memory_start_offset */ - item_ptr->alloc_memory_block_ptr, - 0, /* opt_memory_block_start_offset */ - item_ptr->alloc_size); + if (m_post_bake_per_buffer_item_mem_assignment_callback_function != nullptr) + { + /* TODO: Sparse support */ + anvil_assert_fail(); + } + else + { + if (item_ptr->alloc_mgpu_bind_sparse_device_indices.empty()) + { + sparse_memory_binding.append_buffer_memory_update(default_sparse_bind_info_id, + item_ptr->buffer_ptr, + 0, /* buffer_memory_start_offset */ + item_ptr->alloc_memory_block_ptr.get(), + 0, /* opt_memory_block_start_offset */ + true, /* in_opt_memory_block_owned_by_buffer */ + item_ptr->alloc_size); + } + else + { + for (auto index_pair_iterator = item_ptr->alloc_mgpu_bind_sparse_device_indices.begin(); + index_pair_iterator != item_ptr->alloc_mgpu_bind_sparse_device_indices.end(); + ++index_pair_iterator) + { + const auto matching_pair = device_index_pair_to_sparse_bind_info_map.find(*index_pair_iterator); + const auto sparse_memory_bind_info_id = (matching_pair == device_index_pair_to_sparse_bind_info_map.end()) ? default_sparse_bind_info_id + : matching_pair->second; + + sparse_memory_binding.append_buffer_memory_update(sparse_memory_bind_info_id, + item_ptr->buffer_ptr, + 0, /* buffer_memory_start_offset */ + item_ptr->alloc_memory_block_ptr.get(), + 0, /* opt_memory_block_start_offset */ + true, /* in_opt_memory_block_owned_by_buffer */ + item_ptr->alloc_size); + } + } + + /* The memory block is now owned by the buffer. */ + item_ptr->alloc_memory_block_ptr.release(); + } + } + + break; + } + + case Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_BUFFER_REGION: + { + anvil_assert((item_ptr->buffer_ptr->get_create_info_ptr()->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0 || + (item_ptr->buffer_ptr->get_create_info_ptr()->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) != 0 || + (item_ptr->buffer_ptr->get_create_info_ptr()->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); + + if (m_post_bake_per_buffer_item_mem_assignment_callback_function != nullptr) + { + /* TODO: Sparse support */ + anvil_assert_fail(); + } + else + { + if (item_ptr->alloc_mgpu_bind_sparse_device_indices.empty()) + { + sparse_memory_binding.append_buffer_memory_update(default_sparse_bind_info_id, + item_ptr->buffer_ptr, + item_ptr->alloc_offset, + item_ptr->alloc_memory_block_ptr.get(), + 0, /* opt_memory_block_start_offset */ + true, /* in_opt_memory_block_owned_by_buffer */ + item_ptr->alloc_size); + } + else + { + for (auto index_pair_iterator = item_ptr->alloc_mgpu_bind_sparse_device_indices.begin(); + index_pair_iterator != item_ptr->alloc_mgpu_bind_sparse_device_indices.end(); + ++index_pair_iterator) + { + const auto matching_pair = device_index_pair_to_sparse_bind_info_map.find(*index_pair_iterator); + const auto sparse_memory_bind_info_id = (matching_pair == device_index_pair_to_sparse_bind_info_map.end()) ? default_sparse_bind_info_id + : matching_pair->second; + + sparse_memory_binding.append_buffer_memory_update(sparse_memory_bind_info_id, + item_ptr->buffer_ptr, + item_ptr->alloc_offset, + item_ptr->alloc_memory_block_ptr.get(), + 0, /* opt_memory_block_start_offset */ + true, /* in_opt_memory_block_owned_by_buffer */ + item_ptr->alloc_size); + } + } + + item_ptr->alloc_memory_block_ptr.release(); } break; @@ -903,19 +1782,67 @@ bool Anvil::MemoryAllocator::bake() case Anvil::MemoryAllocator::ITEM_TYPE_IMAGE_WHOLE: { - if (!item_ptr->image_ptr->is_sparse() ) + if ((item_ptr->image_ptr->get_create_info_ptr()->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) == 0) { - item_ptr->image_ptr->set_memory(item_ptr->alloc_memory_block_ptr); + if (m_post_bake_per_image_item_mem_assignment_callback_function != nullptr) + { + m_post_bake_per_image_item_mem_assignment_callback_function(item_ptr->image_ptr, + std::move(item_ptr->alloc_memory_block_ptr) + ); + } + else + { + /* Bind with default device indices in MGPU case. */ + item_ptr->image_ptr->set_memory( + std::move(item_ptr->alloc_memory_block_ptr) + ); + } } else { - sparse_memory_binding.append_opaque_image_memory_update(sparse_memory_bind_info_id, - item_ptr->image_ptr, - 0, /* resource_offset */ - item_ptr->alloc_size, - 0, /* flags */ - item_ptr->alloc_memory_block_ptr, - 0); /* opt_memory_block_start_offset */ + if (m_post_bake_per_image_item_mem_assignment_callback_function != nullptr) + { + /* TODO: Sparse support */ + anvil_assert_fail(); + } + else + { + if (item_ptr->alloc_mgpu_bind_sparse_device_indices.empty()) + { + sparse_memory_binding.append_opaque_image_memory_update(default_sparse_bind_info_id, + item_ptr->image_ptr, + 0, /* resource_offset */ + item_ptr->alloc_size, + Anvil::SparseMemoryBindFlagBits::NONE, + item_ptr->alloc_memory_block_ptr.get(), + 0, /* opt_memory_block_start_offset */ + true, /* in_opt_memory_block_owned_by_image */ + item_ptr->n_plane); + } + else + { + for (auto index_pair_iterator = item_ptr->alloc_mgpu_bind_sparse_device_indices.begin(); + index_pair_iterator != item_ptr->alloc_mgpu_bind_sparse_device_indices.end(); + ++index_pair_iterator) + { + const auto matching_pair = device_index_pair_to_sparse_bind_info_map.find(*index_pair_iterator); + const auto sparse_memory_bind_info_id = (matching_pair == device_index_pair_to_sparse_bind_info_map.end()) ? default_sparse_bind_info_id + : matching_pair->second; + + sparse_memory_binding.append_opaque_image_memory_update(sparse_memory_bind_info_id, + item_ptr->image_ptr, + 0, /* resource_offset */ + item_ptr->alloc_size, + Anvil::SparseMemoryBindFlagBits::NONE, + item_ptr->alloc_memory_block_ptr.get(), + 0, /* opt_memory_block_start_offset */ + true, /* in_opt_memory_block_owned_by_image */ + item_ptr->n_plane); + } + } + + item_ptr->alloc_memory_block_ptr.release(); + } } break; @@ -923,27 +1850,102 @@ bool Anvil::MemoryAllocator::bake() case Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_IMAGE_MIPTAIL: { - sparse_memory_binding.append_opaque_image_memory_update(sparse_memory_bind_info_id, - item_ptr->image_ptr, - item_ptr->miptail_offset, - item_ptr->alloc_size, - 0, /* flags */ - item_ptr->alloc_memory_block_ptr, - 0); /* opt_memory_block_start_offset */ + if (m_post_bake_per_image_item_mem_assignment_callback_function != nullptr) + { + /* TODO: Sparse support */ + anvil_assert_fail(); + } + else + { + const Anvil::SparseMemoryBindFlagBits bind_flags = (item_ptr->alloc_image_aspect == Anvil::ImageAspectFlagBits::METADATA_BIT) ? Anvil::SparseMemoryBindFlagBits::BIND_METADATA_BIT + : Anvil::SparseMemoryBindFlagBits::NONE; + + if (item_ptr->alloc_mgpu_bind_sparse_device_indices.empty()) + { + sparse_memory_binding.append_opaque_image_memory_update(default_sparse_bind_info_id, + item_ptr->image_ptr, + item_ptr->miptail_offset, + item_ptr->alloc_size, + bind_flags, + item_ptr->alloc_memory_block_ptr.get(), + 0, /* opt_memory_block_start_offset */ + true, /* in_opt_memory_block_owned_by_image */ + item_ptr->n_plane); + } + else + { + for (auto index_pair_iterator = item_ptr->alloc_mgpu_bind_sparse_device_indices.begin(); + index_pair_iterator != item_ptr->alloc_mgpu_bind_sparse_device_indices.end(); + ++index_pair_iterator) + { + const auto matching_pair = device_index_pair_to_sparse_bind_info_map.find(*index_pair_iterator); + const auto sparse_memory_bind_info_id = (matching_pair == device_index_pair_to_sparse_bind_info_map.end()) ? default_sparse_bind_info_id + : matching_pair->second; + + sparse_memory_binding.append_opaque_image_memory_update(sparse_memory_bind_info_id, + item_ptr->image_ptr, + item_ptr->miptail_offset, + item_ptr->alloc_size, + bind_flags, + item_ptr->alloc_memory_block_ptr.get(), + 0, /* opt_memory_block_start_offset */ + true, /* in_opt_memory_block_owned_by_image */ + item_ptr->n_plane); + } + } + + + item_ptr->alloc_memory_block_ptr.release(); + } break; } case Anvil::MemoryAllocator::ITEM_TYPE_SPARSE_IMAGE_SUBRESOURCE: { - sparse_memory_binding.append_image_memory_update(sparse_memory_bind_info_id, - item_ptr->image_ptr, - item_ptr->subresource, - item_ptr->offset, - item_ptr->extent, - 0, /* flags */ - item_ptr->alloc_memory_block_ptr, - 0); /* opt_memory_block_start_offset */ + if (m_post_bake_per_image_item_mem_assignment_callback_function != nullptr) + { + /* TODO: Sparse support */ + anvil_assert_fail(); + } + else + { + if (item_ptr->alloc_mgpu_bind_sparse_device_indices.empty()) + { + sparse_memory_binding.append_image_memory_update(default_sparse_bind_info_id, + item_ptr->image_ptr, + item_ptr->subresource, + item_ptr->offset, + item_ptr->extent, + Anvil::SparseMemoryBindFlagBits::NONE, + item_ptr->alloc_memory_block_ptr.get(), + 0, /* opt_memory_block_start_offset */ + true); /* in_opt_memory_block_owned_by_image */ + } + else + { + for (auto index_pair_iterator = item_ptr->alloc_mgpu_bind_sparse_device_indices.begin(); + index_pair_iterator != item_ptr->alloc_mgpu_bind_sparse_device_indices.end(); + ++index_pair_iterator) + { + const auto matching_pair = device_index_pair_to_sparse_bind_info_map.find(*index_pair_iterator); + const auto sparse_memory_bind_info_id = (matching_pair == device_index_pair_to_sparse_bind_info_map.end()) ? default_sparse_bind_info_id + : matching_pair->second; + + sparse_memory_binding.append_image_memory_update(sparse_memory_bind_info_id, + item_ptr->image_ptr, + item_ptr->subresource, + item_ptr->offset, + item_ptr->extent, + Anvil::SparseMemoryBindFlagBits::NONE, + item_ptr->alloc_memory_block_ptr.get(), + 0, /* opt_memory_block_start_offset */ + true); /* in_opt_memory_block_owned_by_image */ + } + } + + item_ptr->alloc_memory_block_ptr.release(); + } break; } @@ -956,20 +1958,23 @@ bool Anvil::MemoryAllocator::bake() } } - /* If memory backing is needed for one or more sparse resources, bind these now */ - if (sparse_memory_bind_info_id != UINT32_MAX) + if (m_post_bake_per_buffer_item_mem_assignment_callback_function == nullptr) { - std::shared_ptr sparse_queue_ptr(device_locked_ptr->get_sparse_binding_queue(0) ); + /* If memory backing is needed for one or more sparse resources, bind these now */ + if (needs_sparse_memory_binding) + { + Anvil::Queue* sparse_queue_ptr(m_device_ptr->get_sparse_binding_queue(0) ); - result = sparse_queue_ptr->bind_sparse_memory(sparse_memory_binding); - anvil_assert(result); + result = sparse_queue_ptr->bind_sparse_memory(sparse_memory_binding); + anvil_assert(result); - /* Block until the sparse memory bindings are in place */ - vkWaitForFences(device_locked_ptr->get_device_vk(), - 1, /* fenceCount */ - sparse_memory_binding.get_fence()->get_fence_ptr(), - VK_FALSE, /* waitAll */ - UINT64_MAX); + /* Block until the sparse memory bindings are in place */ + Anvil::Vulkan::vkWaitForFences(m_device_ptr->get_device_vk(), + 1, /* fenceCount */ + sparse_memory_binding.get_fence()->get_fence_ptr(), + VK_FALSE, /* waitAll */ + UINT64_MAX); + } } /* If the user does not keep the memory allocator around and all items are assigned memory backing, @@ -978,15 +1983,13 @@ bool Anvil::MemoryAllocator::bake() * to prevent the premature destruction of the allocator, cache a shared ptr to this instance, so that * the allocator only goes out of scope when this function leaves. */ - this_ptr = shared_from_this(); - for (uint32_t n_item = 0; n_item < m_items.size(); ++n_item ) { - auto item_iterator = m_items.begin() + n_item; - auto item_ptr = *item_iterator; + auto item_iterator = m_items.begin() + n_item; + auto& item_ptr = *item_iterator; if (item_ptr->is_baked) { @@ -994,10 +1997,12 @@ bool Anvil::MemoryAllocator::bake() switch (item_ptr->type) { - case ITEM_TYPE_BUFFER: alloc_status_map_iterator = m_per_object_pending_alloc_status.find(item_ptr->buffer_ptr.get() ); break; + case ITEM_TYPE_BUFFER: /* fall-through */ + case ITEM_TYPE_SPARSE_BUFFER_REGION: alloc_status_map_iterator = m_per_object_pending_alloc_status.find(item_ptr->buffer_ptr); break; + case ITEM_TYPE_IMAGE_WHOLE: /* fall-through */ case ITEM_TYPE_SPARSE_IMAGE_MIPTAIL: /* fall-through */ - case ITEM_TYPE_SPARSE_IMAGE_SUBRESOURCE: alloc_status_map_iterator = m_per_object_pending_alloc_status.find(item_ptr->image_ptr.get() ); break; + case ITEM_TYPE_SPARSE_IMAGE_SUBRESOURCE: alloc_status_map_iterator = m_per_object_pending_alloc_status.find(item_ptr->image_ptr); break; default: { @@ -1013,62 +2018,65 @@ bool Anvil::MemoryAllocator::bake() } /* Perform post-alloc fill actions */ - for (const auto& current_item_ptr : m_items) + if (m_post_bake_per_buffer_item_mem_assignment_callback_function == nullptr) { - VkDeviceSize buffer_size = 0; - - if (current_item_ptr->type != Anvil::MemoryAllocator::ITEM_TYPE_BUFFER) + for (const auto& current_item_ptr : m_items) { - continue; - } + VkDeviceSize buffer_size = 0; - if (!current_item_ptr->is_baked) - { - continue; - } + if (current_item_ptr->type != Anvil::MemoryAllocator::ITEM_TYPE_BUFFER) + { + continue; + } - buffer_size = current_item_ptr->buffer_ptr->get_size(); + if (!current_item_ptr->is_baked) + { + continue; + } - if (current_item_ptr->buffer_ref_float_data_ptr != nullptr) - { - current_item_ptr->buffer_ptr->write(0, /* start_offset */ - buffer_size, - current_item_ptr->buffer_ref_float_data_ptr.get() ); - } - else - if (current_item_ptr->buffer_ref_float_vector_data_ptr != nullptr) - { - current_item_ptr->buffer_ptr->write(0, /* start_offset */ - buffer_size, - &(*current_item_ptr->buffer_ref_float_vector_data_ptr)[0]); - } - else - if (current_item_ptr->buffer_ref_uchar8_data_ptr != nullptr) - { - current_item_ptr->buffer_ptr->write(0, /* start_offset */ - buffer_size, - current_item_ptr->buffer_ref_uchar8_data_ptr.get() ); - } - else - if (current_item_ptr->buffer_ref_uchar8_vector_data_ptr != nullptr) - { - current_item_ptr->buffer_ptr->write(0, /* start_offset */ - buffer_size, - &(*current_item_ptr->buffer_ref_uchar8_vector_data_ptr)[0]); - } - else - if (current_item_ptr->buffer_ref_uint32_data_ptr != nullptr) - { - current_item_ptr->buffer_ptr->write(0, /* start_offset */ - buffer_size, - current_item_ptr->buffer_ref_uint32_data_ptr.get() ); - } - else - if (current_item_ptr->buffer_ref_uint32_vector_data_ptr != nullptr) - { - current_item_ptr->buffer_ptr->write(0, /* start_offset */ - buffer_size, - &(*current_item_ptr->buffer_ref_uint32_vector_data_ptr)[0]); + buffer_size = current_item_ptr->buffer_ptr->get_create_info_ptr()->get_size(); + + if (current_item_ptr->buffer_ref_float_data_ptr != nullptr) + { + current_item_ptr->buffer_ptr->write(0, /* start_offset */ + buffer_size, + current_item_ptr->buffer_ref_float_data_ptr.get() ); + } + else + if (current_item_ptr->buffer_ref_float_vector_data_ptr != nullptr) + { + current_item_ptr->buffer_ptr->write(0, /* start_offset */ + buffer_size, + &(*current_item_ptr->buffer_ref_float_vector_data_ptr)[0]); + } + else + if (current_item_ptr->buffer_ref_uchar8_data_ptr != nullptr) + { + current_item_ptr->buffer_ptr->write(0, /* start_offset */ + buffer_size, + current_item_ptr->buffer_ref_uchar8_data_ptr.get() ); + } + else + if (current_item_ptr->buffer_ref_uchar8_vector_data_ptr != nullptr) + { + current_item_ptr->buffer_ptr->write(0, /* start_offset */ + buffer_size, + &(*current_item_ptr->buffer_ref_uchar8_vector_data_ptr)[0]); + } + else + if (current_item_ptr->buffer_ref_uint32_data_ptr != nullptr) + { + current_item_ptr->buffer_ptr->write(0, /* start_offset */ + buffer_size, + current_item_ptr->buffer_ref_uint32_data_ptr.get() ); + } + else + if (current_item_ptr->buffer_ref_uint32_vector_data_ptr != nullptr) + { + current_item_ptr->buffer_ptr->write(0, /* start_offset */ + buffer_size, + &(*current_item_ptr->buffer_ref_uint32_vector_data_ptr)[0]); + } } } @@ -1080,14 +2088,23 @@ bool Anvil::MemoryAllocator::bake() } end: + if (mutex_lock.owns_lock() ) + { + mutex_lock.unlock(); + } + return result; } /* Please see header for specification */ -std::shared_ptr Anvil::MemoryAllocator::create_oneshot(std::weak_ptr in_device_ptr) +Anvil::MemoryAllocatorUniquePtr Anvil::MemoryAllocator::create_oneshot(const Anvil::BaseDevice* in_device_ptr, + MTSafety in_mt_safety) { std::shared_ptr backend_ptr; - std::shared_ptr result_ptr; + const bool mt_safe (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr) ); + std::unique_ptr result_ptr (nullptr, + std::default_delete() ); backend_ptr.reset( new Anvil::MemoryAllocatorBackends::OneShot(in_device_ptr) @@ -1097,18 +2114,23 @@ std::shared_ptr Anvil::MemoryAllocator::create_oneshot(s { result_ptr.reset( new Anvil::MemoryAllocator(in_device_ptr, - backend_ptr) + backend_ptr, + mt_safe) ); } - return result_ptr; + return std::move(result_ptr); } /* Please see header for specification */ -std::shared_ptr Anvil::MemoryAllocator::create_vma(std::weak_ptr in_device_ptr) +Anvil::MemoryAllocatorUniquePtr Anvil::MemoryAllocator::create_vma(const Anvil::BaseDevice* in_device_ptr, + MTSafety in_mt_safety) { std::shared_ptr backend_ptr; - std::shared_ptr result_ptr; + const bool mt_safe (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr) ); + std::unique_ptr result_ptr(nullptr, + std::default_delete() ); backend_ptr = Anvil::MemoryAllocatorBackends::VMA::create(in_device_ptr); @@ -1116,37 +2138,67 @@ std::shared_ptr Anvil::MemoryAllocator::create_vma(std:: { result_ptr.reset( new Anvil::MemoryAllocator(in_device_ptr, - backend_ptr) + std::move(backend_ptr), + mt_safe) ); } - return result_ptr; + return std::move(result_ptr); +} + +bool Anvil::MemoryAllocator::do_external_memory_handle_type_sanity_checks(const Anvil::ExternalMemoryHandleTypeFlags& in_external_memory_handle_types) const +{ + bool result = true; + + if (in_external_memory_handle_types != 0) + { + if (!m_backend_ptr->supports_external_memory_handles(in_external_memory_handle_types) ) + { + anvil_assert(m_backend_ptr->supports_external_memory_handles(in_external_memory_handle_types) ); + + result = false; + } + } + + return result; } /** Tells whether or not a given set of memory types supports the requested memory features. */ -bool Anvil::MemoryAllocator::is_alloc_supported(uint32_t in_memory_types, - Anvil::MemoryFeatureFlags in_memory_features, - uint32_t* out_opt_filtered_memory_types_ptr) const +bool Anvil::MemoryAllocator::get_mem_types_supporting_mem_features(const Anvil::BaseDevice* in_device_ptr, + uint32_t in_memory_types, + const Anvil::MemoryFeatureFlags& in_memory_features, + uint32_t* out_opt_filtered_memory_types_ptr) { - std::shared_ptr device_locked_ptr (m_device_ptr); - const bool is_coherent_memory_required (((in_memory_features & MEMORY_FEATURE_FLAG_HOST_COHERENT) != 0) ); - const bool is_device_local_memory_required (((in_memory_features & MEMORY_FEATURE_FLAG_DEVICE_LOCAL) != 0) ); - const bool is_host_cached_memory_required (((in_memory_features & MEMORY_FEATURE_FLAG_HOST_CACHED) != 0) ); - const bool is_lazily_allocated_memory_required(((in_memory_features & MEMORY_FEATURE_FLAG_LAZILY_ALLOCATED) != 0) ); - const bool is_mappable_memory_required (((in_memory_features & MEMORY_FEATURE_FLAG_MAPPABLE) != 0) ); - const auto& memory_props (device_locked_ptr->get_physical_device_memory_properties() ); - bool result (true); + const bool is_coherent_memory_required (((in_memory_features & Anvil::MemoryFeatureFlagBits::HOST_COHERENT_BIT) != 0) ); + const bool is_device_local_memory_required (((in_memory_features & Anvil::MemoryFeatureFlagBits::DEVICE_LOCAL_BIT) != 0) ); + const bool is_host_cached_memory_required (((in_memory_features & Anvil::MemoryFeatureFlagBits::HOST_CACHED_BIT) != 0) ); + const bool is_lazily_allocated_memory_required(((in_memory_features & Anvil::MemoryFeatureFlagBits::LAZILY_ALLOCATED_BIT) != 0) ); + const bool is_mappable_memory_required (((in_memory_features & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) != 0) ); + const bool is_multi_instance_memory_required (((in_memory_features & Anvil::MemoryFeatureFlagBits::MULTI_INSTANCE_BIT) != 0) ); + const bool is_protected_memory_required (((in_memory_features & Anvil::MemoryFeatureFlagBits::PROTECTED_BIT) != 0) ); + const auto& memory_props (in_device_ptr->get_physical_device_memory_properties() ); + const auto n_forced_mem_type (in_device_ptr->get_parent_instance()->get_create_info_ptr()->get_n_memory_type_to_use_for_all_allocs() ); + bool result (true); + + if (n_forced_mem_type != UINT32_MAX) + { + in_memory_types = 1 << n_forced_mem_type; + + goto end; + } /* Filter out memory types that do not support features requested at creation time */ for (uint32_t n_memory_type = 0; (1u << n_memory_type) <= in_memory_types; ++n_memory_type) { - if ((is_coherent_memory_required && !(memory_props.types[n_memory_type].flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) || - (is_device_local_memory_required && !(memory_props.types[n_memory_type].flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)) || - (is_host_cached_memory_required && !(memory_props.types[n_memory_type].flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT)) || - (is_lazily_allocated_memory_required && !(memory_props.types[n_memory_type].flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT)) || - (is_mappable_memory_required && !(memory_props.types[n_memory_type].flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)) ) + if ((is_coherent_memory_required && ((memory_props.types[n_memory_type].flags & Anvil::MemoryPropertyFlagBits::HOST_COHERENT_BIT)) == 0) || + (is_device_local_memory_required && ((memory_props.types[n_memory_type].flags & Anvil::MemoryPropertyFlagBits::DEVICE_LOCAL_BIT)) == 0) || + (is_host_cached_memory_required && ((memory_props.types[n_memory_type].flags & Anvil::MemoryPropertyFlagBits::HOST_CACHED_BIT)) == 0) || + (is_lazily_allocated_memory_required && ((memory_props.types[n_memory_type].flags & Anvil::MemoryPropertyFlagBits::LAZILY_ALLOCATED_BIT)) == 0) || + (is_mappable_memory_required && ((memory_props.types[n_memory_type].flags & Anvil::MemoryPropertyFlagBits::HOST_VISIBLE_BIT)) == 0) || + (is_multi_instance_memory_required && ((memory_props.types[n_memory_type].heap_ptr->flags & Anvil::MemoryHeapFlagBits::MULTI_INSTANCE_BIT_KHR)) == 0) || + (is_protected_memory_required && ((memory_props.types[n_memory_type].flags & Anvil::MemoryPropertyFlagBits::PROTECTED_BIT)) == 0) ) { in_memory_types &= ~(1 << n_memory_type); } @@ -1160,12 +2212,13 @@ bool Anvil::MemoryAllocator::is_alloc_supported(uint32_t in_mem } } - if (out_opt_filtered_memory_types_ptr != nullptr) +end: + if (result && + out_opt_filtered_memory_types_ptr != nullptr) { *out_opt_filtered_memory_types_ptr = in_memory_types; } -end: return result; } @@ -1173,7 +2226,16 @@ bool Anvil::MemoryAllocator::is_alloc_supported(uint32_t in_mem void Anvil::MemoryAllocator::on_is_alloc_pending_for_buffer_query(CallbackArgument* in_callback_arg_ptr) { IsBufferMemoryAllocPendingQueryCallbackArgument* query_ptr = dynamic_cast(in_callback_arg_ptr); - auto alloc_status_map_iterator = m_per_object_pending_alloc_status.find (query_ptr->buffer_ptr.get() ); + auto alloc_status_map_iterator = m_per_object_pending_alloc_status.find (query_ptr->buffer_ptr); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } if (alloc_status_map_iterator != m_per_object_pending_alloc_status.end() ) { @@ -1185,7 +2247,16 @@ void Anvil::MemoryAllocator::on_is_alloc_pending_for_buffer_query(CallbackArgume void Anvil::MemoryAllocator::on_is_alloc_pending_for_image_query(CallbackArgument* in_callback_arg_ptr) { IsImageMemoryAllocPendingQueryCallbackArgument* query_ptr = dynamic_cast(in_callback_arg_ptr); - auto alloc_status_map_iterator = m_per_object_pending_alloc_status.find (query_ptr->image_ptr.get() ); + auto alloc_status_map_iterator = m_per_object_pending_alloc_status.find (query_ptr->image_ptr); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } if (alloc_status_map_iterator != m_per_object_pending_alloc_status.end() ) { @@ -1205,6 +2276,16 @@ void Anvil::MemoryAllocator::on_implicit_bake_needed() /* Please see header for specification */ void Anvil::MemoryAllocator::set_post_bake_callback(MemoryAllocatorBakeCallbackFunction in_post_bake_callback_function) { + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + anvil_assert(m_post_bake_callback_function == nullptr); if (m_post_bake_callback_function == nullptr) @@ -1212,3 +2293,45 @@ void Anvil::MemoryAllocator::set_post_bake_callback(MemoryAllocatorBakeCallbackF m_post_bake_callback_function = in_post_bake_callback_function; } } + +bool Anvil::MemoryAllocator::do_bind_sparse_device_indices_sanity_check(const MGPUBindSparseDeviceIndices* in_opt_mgpu_bind_sparse_device_indices_ptr) const +{ + if (in_opt_mgpu_bind_sparse_device_indices_ptr == nullptr) + { + return true; + } + else + { + std::set resource_indices; + std::set index_pairs; + + for (auto pair_iterator = in_opt_mgpu_bind_sparse_device_indices_ptr->cbegin(); + pair_iterator != in_opt_mgpu_bind_sparse_device_indices_ptr->cend(); + ++pair_iterator) + { + const auto& pair = *pair_iterator; + auto found_resource_index = resource_indices.find(pair.first); + auto found_pair = index_pairs.find(pair); + + if (found_resource_index != resource_indices.end()) + { + return false; + } + else + { + resource_indices.insert(pair.first); + } + + if (found_pair != index_pairs.end()) + { + return false; + } + else + { + index_pairs.insert(pair); + } + } + } + + return true; +} diff --git a/src/misc/memory_block_create_info.cpp b/src/misc/memory_block_create_info.cpp new file mode 100644 index 00000000..553aaf67 --- /dev/null +++ b/src/misc/memory_block_create_info.cpp @@ -0,0 +1,296 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/memory_block_create_info.h" +#include "wrappers/device.h" +#include "wrappers/memory_block.h" +#include "wrappers/physical_device.h" +#include + + +Anvil::MemoryBlockCreateInfoUniquePtr Anvil::MemoryBlockCreateInfo::create_derived(MemoryBlock* in_parent_memory_block_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) +{ + const uint32_t n_physical_devices = static_cast(in_parent_memory_block_ptr->get_create_info_ptr()->m_physical_devices.size() ); + auto result_ptr = Anvil::MemoryBlockCreateInfoUniquePtr(nullptr, + std::default_delete() ); + + anvil_assert(in_start_offset + in_size <= in_parent_memory_block_ptr->get_create_info_ptr()->get_start_offset() + in_parent_memory_block_ptr->get_create_info_ptr()->get_size() ); + + result_ptr.reset( + new Anvil::MemoryBlockCreateInfo(Anvil::MemoryBlockType::DERIVED, + in_parent_memory_block_ptr->get_create_info_ptr()->m_allowed_memory_bits, + const_cast(in_parent_memory_block_ptr->get_create_info_ptr()->m_device_ptr), + VK_NULL_HANDLE, /* in_memory */ + in_parent_memory_block_ptr->get_create_info_ptr()->m_memory_features, + in_parent_memory_block_ptr->get_create_info_ptr()->m_memory_type_index, + n_physical_devices, + Anvil::OnMemoryBlockReleaseCallbackFunction(), + (n_physical_devices != 0) ? &in_parent_memory_block_ptr->get_create_info_ptr()->m_physical_devices.at(0) + : nullptr, + in_parent_memory_block_ptr, + in_size, + in_start_offset) + ); + + return result_ptr; +} + +Anvil::MemoryBlockCreateInfoUniquePtr Anvil::MemoryBlockCreateInfo::create_derived_with_custom_delete_proc(const Anvil::BaseDevice* in_device_ptr, + VkDeviceMemory in_memory, + uint32_t in_allowed_memory_bits, + Anvil::MemoryFeatureFlags in_memory_features, + uint32_t in_memory_type_index, + VkDeviceSize in_size, + VkDeviceSize in_start_offset, + Anvil::OnMemoryBlockReleaseCallbackFunction in_on_release_callback_function) +{ + auto result_ptr = Anvil::MemoryBlockCreateInfoUniquePtr(nullptr, + std::default_delete() ); + + anvil_assert(in_device_ptr->get_type() == Anvil::DeviceType::SINGLE_GPU); /* todo: pass physical device array below accordingly */ + + result_ptr.reset( + new Anvil::MemoryBlockCreateInfo(Anvil::MemoryBlockType::DERIVED_WITH_CUSTOM_DELETE_PROC, + in_allowed_memory_bits, + in_device_ptr, + in_memory, + in_memory_features, + in_memory_type_index, + 0, /* in_n_physical_devices */ + in_on_release_callback_function, + nullptr, /* in_physical_device_ptr_ptr */ + nullptr, /* in_parent_memory_block_ptr */ + in_size, + in_start_offset) + ); + + return result_ptr; +} + +Anvil::MemoryBlockCreateInfoUniquePtr Anvil::MemoryBlockCreateInfo::create_regular(const Anvil::BaseDevice* in_device_ptr, + uint32_t in_allowed_memory_bits, + VkDeviceSize in_size, + Anvil::MemoryFeatureFlags in_memory_features) +{ + std::vector physical_devices; + auto result_ptr = Anvil::MemoryBlockCreateInfoUniquePtr(nullptr, + std::default_delete() ); + + if (in_device_ptr->get_type() == Anvil::DeviceType::MULTI_GPU) + { + const auto device_mgpu_ptr = dynamic_cast(in_device_ptr); + const uint32_t n_physical_devices = device_mgpu_ptr->get_n_physical_devices(); + + anvil_assert(n_physical_devices > 0); + + physical_devices.resize(n_physical_devices); + + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_devices; + ++n_physical_device) + { + physical_devices.at(n_physical_device) = device_mgpu_ptr->get_physical_device(n_physical_device); + } + } + else + { + /* Moot */ + } + + result_ptr.reset( + new Anvil::MemoryBlockCreateInfo(Anvil::MemoryBlockType::REGULAR, + in_allowed_memory_bits, + in_device_ptr, + VK_NULL_HANDLE, /* in_memory */ + in_memory_features, + 0, /* in_memory_type_index */ + static_cast(physical_devices.size() ), + OnMemoryBlockReleaseCallbackFunction(), + (physical_devices.size() > 0) ? &physical_devices.at(0) + : nullptr, + nullptr, /* in_parent_memory_block_ptr */ + in_size, + 0) /* in_start_offset */ + ); + + return result_ptr; +} + +Anvil::MemoryBlockCreateInfoUniquePtr Anvil::MemoryBlockCreateInfo::create_with_memory_type(const Anvil::BaseDevice* in_device_ptr, + uint32_t in_allowed_memory_bits, + VkDeviceSize in_size, + Anvil::MemoryFeatureFlags in_memory_features, + uint32_t in_memory_type_index) +{ + std::vector physical_devices; + auto result_ptr = Anvil::MemoryBlockCreateInfoUniquePtr(nullptr, + std::default_delete() ); + + if (in_device_ptr->get_type() == Anvil::DeviceType::MULTI_GPU) + { + const auto device_mgpu_ptr = dynamic_cast(in_device_ptr); + const uint32_t n_physical_devices = device_mgpu_ptr->get_n_physical_devices(); + + anvil_assert(n_physical_devices > 0); + + physical_devices.resize(n_physical_devices); + + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_devices; + ++n_physical_device) + { + physical_devices.at(n_physical_device) = device_mgpu_ptr->get_physical_device(n_physical_device); + } + } + else + { + /* Moot */ + } + + { + /* check that in_memory_type_index is not out of range availables memory types */ + anvil_assert(static_cast(in_memory_type_index) < in_device_ptr->get_physical_device_memory_properties().types.size()); + + const auto& device_features = in_device_ptr->get_physical_device_memory_properties().types[in_memory_type_index].features; + ANVIL_REDUNDANT_VARIABLE_CONST(device_features); + + /* check requested memory type features with requested features */ + anvil_assert((device_features & in_memory_features) == in_memory_features); + } + + result_ptr.reset( + new Anvil::MemoryBlockCreateInfo(Anvil::MemoryBlockType::REGULAR_WITH_MEMORY_TYPE, + in_allowed_memory_bits, + in_device_ptr, + VK_NULL_HANDLE, /* in_memory */ + in_memory_features, + in_memory_type_index, + static_cast(physical_devices.size() ), + OnMemoryBlockReleaseCallbackFunction(), + (physical_devices.size() > 0) ? &physical_devices.at(0) + : nullptr, + nullptr, /* in_parent_memory_block_ptr */ + in_size, + 0) /* in_start_offset */ + ); + + return result_ptr; +} + +Anvil::MemoryBlockCreateInfo::MemoryBlockCreateInfo(const Anvil::MemoryBlockType& in_type, + const uint32_t& in_allowed_memory_bits, + const Anvil::BaseDevice* in_device_ptr, + VkDeviceMemory in_memory, + const Anvil::MemoryFeatureFlags& in_memory_features, + const uint32_t& in_memory_type_index, + const uint32_t& in_n_physical_devices, + const Anvil::OnMemoryBlockReleaseCallbackFunction& in_on_release_callback_function, + const Anvil::PhysicalDevice* const* in_opt_physical_device_ptr_ptr, + Anvil::MemoryBlock* in_parent_memory_block_ptr, + const VkDeviceSize& in_size, + const VkDeviceSize& in_start_offset) + :m_allowed_memory_bits (in_allowed_memory_bits), + m_dedicated_allocation_buffer_ptr (nullptr), + m_dedicated_allocation_image_ptr (nullptr), + m_device_mask (0), + m_device_ptr (in_device_ptr), + m_exportable_external_memory_handle_types (Anvil::ExternalMemoryHandleTypeFlagBits::NONE), +#ifdef _WIN32 + m_exportable_nt_handle_info_specified (false), +#endif + m_external_handle_import_info_specified (false), + m_imported_external_memory_handle_type (Anvil::ExternalMemoryHandleTypeFlagBits::NONE), + m_memory (in_memory), + m_memory_features (in_memory_features), + m_memory_priority (FLT_MAX), + m_memory_type_index (in_memory_type_index), + m_mt_safety (Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE), + m_on_release_callback_function (in_on_release_callback_function), + m_parent_memory_block_ptr (in_parent_memory_block_ptr), + m_size (in_size), + m_start_offset (in_start_offset), + m_type (in_type), + m_use_dedicated_allocation (false) +{ + if (in_parent_memory_block_ptr != nullptr) + { + m_device_mask = in_parent_memory_block_ptr->get_create_info_ptr()->m_device_mask; + m_physical_devices = in_parent_memory_block_ptr->get_create_info_ptr()->m_physical_devices; + } + else + if (in_n_physical_devices != 0) + { + anvil_assert(in_opt_physical_device_ptr_ptr != nullptr); + + m_physical_devices.resize(in_n_physical_devices); + + for (uint32_t n_physical_device = 0; + n_physical_device < in_n_physical_devices; + ++n_physical_device) + { + m_device_mask |= 1 << (in_opt_physical_device_ptr_ptr[n_physical_device]->get_device_group_device_index() ); + + m_physical_devices.at(n_physical_device) = in_opt_physical_device_ptr_ptr[n_physical_device]; + } + } + else + { + if (m_device_ptr->get_type() == Anvil::DeviceType::MULTI_GPU) + { + const auto device_mgpu_ptr = dynamic_cast(m_device_ptr); + const uint32_t n_physical_devices = device_mgpu_ptr->get_n_physical_devices(); + + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_devices; + ++n_physical_device) + { + m_device_mask |= 1 << (device_mgpu_ptr->get_physical_device(n_physical_device)->get_device_group_device_index() ); + } + } + } +} + +Anvil::MemoryFeatureFlags Anvil::MemoryBlockCreateInfo::get_memory_features() const +{ + if (m_parent_memory_block_ptr != nullptr) + { + return m_parent_memory_block_ptr->get_create_info_ptr()->get_memory_features(); + } + else + { + return m_memory_features; + } +} + +/** Returns the memory type index the memory block was allocated from */ +uint32_t Anvil::MemoryBlockCreateInfo::get_memory_type_index() const +{ + if (m_parent_memory_block_ptr != nullptr) + { + return m_parent_memory_block_ptr->get_create_info_ptr()->get_memory_type_index(); + } + else + { + return m_memory_type_index; + } +} diff --git a/src/misc/object_tracker.cpp b/src/misc/object_tracker.cpp index 932885d0..33eafe4e 100644 --- a/src/misc/object_tracker.cpp +++ b/src/misc/object_tracker.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -32,20 +32,15 @@ static Anvil::ObjectTracker* object_tracker_ptr = nullptr; Anvil::ObjectTracker::ObjectTracker() :CallbacksSupportProvider(OBJECT_TRACKER_CALLBACK_ID_COUNT) { - memset(m_n_objects_allocated_array, - 0, - sizeof(m_n_objects_allocated_array) ); + /* Stub */ } /* Please see header for specification */ void Anvil::ObjectTracker::destroy() { - if (object_tracker_ptr != nullptr) - { - delete object_tracker_ptr; + delete object_tracker_ptr; - object_tracker_ptr = nullptr; - } + object_tracker_ptr = nullptr; } /* Please see header for specification */ @@ -64,26 +59,22 @@ void Anvil::ObjectTracker::check_for_leaks() const { std::unique_lock lock(m_cs); - for (ObjectType current_object_type = OBJECT_TYPE_FIRST; - current_object_type < OBJECT_TYPE_COUNT; - current_object_type = static_cast(current_object_type + 1)) + for (const auto& current_object_type_alloc_data : m_object_allocations) { - const uint32_t n_object_allocations = (uint32_t) m_object_allocations[current_object_type].size(); + const uint32_t n_object_allocations = static_cast(current_object_type_alloc_data.second.size() ); if (n_object_allocations > 0) { fprintf(stdout, "The following %s instances have not been released:\n", - get_object_type_name(current_object_type) ); + get_object_type_name(current_object_type_alloc_data.first) ); - for (uint32_t n_allocation = 0; - n_allocation < n_object_allocations; - ++n_allocation) + for (const auto& current_alloc : current_object_type_alloc_data.second) { fprintf(stdout, "[%d]. %p\n", - m_object_allocations[current_object_type][n_allocation].n_allocation, - m_object_allocations[current_object_type][n_allocation].object_ptr); + current_alloc.n_allocation, + current_alloc.object_ptr); } fprintf(stdout, @@ -98,60 +89,63 @@ void Anvil::ObjectTracker::check_for_leaks() const * * @return As per description. **/ -const char* Anvil::ObjectTracker::get_object_type_name(ObjectType in_object_type) const +const char* Anvil::ObjectTracker::get_object_type_name(const Anvil::ObjectType& in_object_type) const { - static const char* result_array[] = + const char* result_ptr = "?!"; + + switch (in_object_type) { - "Buffer", - "Buffer View", - "Command Buffer", - "Command Pool", - "Compute Pipeline Manager", - "Descriptor Pool", - "Descriptor Set", - "Descriptor Set Group", - "Descriptor Set Layout", - "Device", - "Event", - "Fence", - "Framebuffer", - "Graphics Pipeline Manager", - "Image", - "Image View", - "Instance", - "Memory Block", - "Physical Device", - "Pipeline Cache", - "Pipeline Layout", - "Pipeline Layout Manager", - "Query Pool", - "Queue", - "Render Pass", - "Rendering Surface", - "Sampler", - "Semaphore", - "Shader Module", - "Swapchain", - - "GLSL shader -> SPIR-V generator", - "Graphics Pipeline (fake)" - }; - - static_assert(sizeof(result_array) / sizeof(result_array[0]) == OBJECT_TYPE_COUNT, - "Number of object types change detected - update result_array."); - - return result_array[in_object_type]; + case Anvil::ObjectType::BUFFER: result_ptr = "Buffer"; break; + case Anvil::ObjectType::BUFFER_VIEW: result_ptr = "Buffer View"; break; + case Anvil::ObjectType::COMMAND_BUFFER: result_ptr = "Command Buffer"; break; + case Anvil::ObjectType::COMMAND_POOL: result_ptr = "Command Pool"; break; + case Anvil::ObjectType::DESCRIPTOR_POOL: result_ptr = "Descriptor Pool"; break; + case Anvil::ObjectType::DESCRIPTOR_SET: result_ptr = "Descriptor Set"; break; + case Anvil::ObjectType::DESCRIPTOR_SET_LAYOUT: result_ptr = "Descriptor Set Layout"; break; + case Anvil::ObjectType::DESCRIPTOR_UPDATE_TEMPLATE: result_ptr = "Descriptor Update Template"; break; + case Anvil::ObjectType::DEVICE: result_ptr = "Device"; break; + case Anvil::ObjectType::EVENT: result_ptr = "Event"; break; + case Anvil::ObjectType::FENCE: result_ptr = "Fence"; break; + case Anvil::ObjectType::FRAMEBUFFER: result_ptr = "Framebuffer"; break; + case Anvil::ObjectType::IMAGE: result_ptr = "Image"; break; + case Anvil::ObjectType::IMAGE_VIEW: result_ptr = "Image View"; break; + case Anvil::ObjectType::INSTANCE: result_ptr = "Instance"; break; + case Anvil::ObjectType::PHYSICAL_DEVICE: result_ptr = "Physical Device"; break; + case Anvil::ObjectType::PIPELINE_CACHE: result_ptr = "Pipeline Cache"; break; + case Anvil::ObjectType::PIPELINE_LAYOUT: result_ptr = "Pipeline Layout"; break; + case Anvil::ObjectType::QUERY_POOL: result_ptr = "Query Pool"; break; + case Anvil::ObjectType::QUEUE: result_ptr = "Queue"; break; + case Anvil::ObjectType::RENDER_PASS: result_ptr = "Render Pass"; break; + case Anvil::ObjectType::RENDERING_SURFACE: result_ptr = "Rendering Surface"; break; + case Anvil::ObjectType::SAMPLER: result_ptr = "Sampler"; break; + case Anvil::ObjectType::SEMAPHORE: result_ptr = "Semaphore"; break; + case Anvil::ObjectType::SHADER_MODULE: result_ptr = "Shader Module"; break; + case Anvil::ObjectType::SWAPCHAIN: result_ptr = "Swapchain"; break; + + case Anvil::ObjectType::ANVIL_COMPUTE_PIPELINE_MANAGER: result_ptr = "Anvil Compute Pipeline Manager"; break; + case Anvil::ObjectType::ANVIL_DESCRIPTOR_SET_GROUP: result_ptr = "Anvil Descriptor Set Group"; break; + case Anvil::ObjectType::ANVIL_DESCRIPTOR_SET_LAYOUT_MANAGER: result_ptr = "Anvil Descriptor Set Layout Manager"; break; + case Anvil::ObjectType::ANVIL_GLSL_SHADER_TO_SPIRV_GENERATOR: result_ptr = "Anvil GLSL Shader->SPIRV Generator"; break; + case Anvil::ObjectType::ANVIL_GRAPHICS_PIPELINE_MANAGER: result_ptr = "Anvil Graphics Pipeline Manager"; break; + case Anvil::ObjectType::ANVIL_MEMORY_BLOCK: result_ptr = "Anvil Memory Block"; break; + case Anvil::ObjectType::ANVIL_PIPELINE_LAYOUT_MANAGER: result_ptr = "Anvil Pipeline Layout Manager"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result_ptr; } /* Please see header for specification */ -void* Anvil::ObjectTracker::get_object_at_index(ObjectType in_object_type, - uint32_t in_alloc_index) const +void* Anvil::ObjectTracker::get_object_at_index(const ObjectType& in_object_type, + uint32_t in_alloc_index) const { std::unique_lock lock (m_cs); void* result(nullptr); - anvil_assert(in_object_type < OBJECT_TYPE_COUNT); - if (m_object_allocations[in_object_type].size() > in_alloc_index) { result = m_object_allocations[in_object_type][in_alloc_index].object_ptr; @@ -161,11 +155,10 @@ void* Anvil::ObjectTracker::get_object_at_index(ObjectType in_object_type, } /* Please see header for specification */ -void Anvil::ObjectTracker::register_object(ObjectType in_object_type, - void* in_object_ptr) +void Anvil::ObjectTracker::register_object(const ObjectType& in_object_type, + void* in_object_ptr) { - anvil_assert(in_object_ptr != nullptr); - anvil_assert(in_object_type < OBJECT_TYPE_COUNT); + anvil_assert(in_object_ptr != nullptr); { std::unique_lock lock(m_cs); @@ -178,13 +171,22 @@ void Anvil::ObjectTracker::register_object(ObjectType in_object_type, OnObjectRegisteredCallbackArgument callback_arg(in_object_type, in_object_ptr); - callback_safe(OBJECT_TRACKER_CALLBACK_ID_ON_OBJECT_REGISTERED, - &callback_arg); + if (in_object_type == Anvil::ObjectType::ANVIL_GLSL_SHADER_TO_SPIRV_GENERATOR) + { + callback_safe(OBJECT_TRACKER_CALLBACK_ID_ON_GLSL_SHADER_TO_SPIRV_GENERATOR_OBJECT_REGISTERED, + &callback_arg); + } + else + if (in_object_type == Anvil::ObjectType::SHADER_MODULE) + { + callback_safe(OBJECT_TRACKER_CALLBACK_ID_ON_SHADER_MODULE_OBJECT_REGISTERED, + &callback_arg); + } } /* Please see header for specification */ -void Anvil::ObjectTracker::unregister_object(ObjectType in_object_type, - void* in_object_ptr) +void Anvil::ObjectTracker::unregister_object(const ObjectType& in_object_type, + void* in_object_ptr) { OnObjectAboutToBeUnregisteredCallbackArgument callback_arg(in_object_type, in_object_ptr); @@ -206,20 +208,29 @@ void Anvil::ObjectTracker::unregister_object(ObjectType in_object_type, } /* Notify any observers about the event. */ - if (in_object_type == OBJECT_TYPE_DEVICE) + if (in_object_type == Anvil::ObjectType::DEVICE) { callback_safe(OBJECT_TRACKER_CALLBACK_ID_ON_DEVICE_OBJECT_ABOUT_TO_BE_UNREGISTERED, &callback_arg); } else - if (in_object_type == OBJECT_TYPE_PIPELINE_LAYOUT) + if (in_object_type == Anvil::ObjectType::ANVIL_GLSL_SHADER_TO_SPIRV_GENERATOR) + { + callback_safe(OBJECT_TRACKER_CALLBACK_ID_ON_GLSL_SHADER_TO_SPIRV_GENERATOR_OBJECT_ABOUT_TO_BE_UNREGISTERED, + &callback_arg); + } + else + if (in_object_type == Anvil::ObjectType::PIPELINE_LAYOUT) { callback_safe(OBJECT_TRACKER_CALLBACK_ID_ON_PIPELINE_LAYOUT_OBJECT_ABOUT_TO_BE_UNREGISTERED, &callback_arg); } - - callback_safe(OBJECT_TRACKER_CALLBACK_ID_ON_OBJECT_ABOUT_TO_BE_UNREGISTERED, - &callback_arg); + else + if (in_object_type == Anvil::ObjectType::SHADER_MODULE) + { + callback_safe(OBJECT_TRACKER_CALLBACK_ID_ON_SHADER_MODULE_OBJECT_ABOUT_TO_BE_UNREGISTERED, + &callback_arg); + } end: ; diff --git a/src/misc/page_tracker.cpp b/src/misc/page_tracker.cpp index 573e1955..0d23473e 100644 --- a/src/misc/page_tracker.cpp +++ b/src/misc/page_tracker.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -19,7 +19,8 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // - +#include "misc/types.h" +#include "wrappers/memory_block.h" #include "misc/debug.h" #include "misc/page_tracker.h" @@ -38,22 +39,15 @@ Anvil::PageTracker::PageTracker(VkDeviceSize in_region_size, } /** Please see header for specification */ -std::shared_ptr Anvil::PageTracker::get_memory_block(VkDeviceSize in_start_offset_page_aligned, - VkDeviceSize* out_memory_region_start_offset_ptr) +Anvil::MemoryBlock* Anvil::PageTracker::get_memory_block(VkDeviceSize in_start_offset, + VkDeviceSize in_size, + VkDeviceSize* out_memory_region_start_offset_ptr) const { - std::shared_ptr result_ptr; - - /* Sanity checks */ - if ((in_start_offset_page_aligned % m_page_size) != 0) - { - anvil_assert( (in_start_offset_page_aligned % m_page_size) == 0); - - goto end; - } + Anvil::MemoryBlock* result_ptr = nullptr; - if (in_start_offset_page_aligned + m_page_size > m_n_total_pages * m_page_size) + if (in_size > m_page_size) { - anvil_assert(!(in_start_offset_page_aligned + m_page_size > m_n_total_pages * m_page_size) ); + anvil_assert(!(in_size > m_page_size) ); goto end; } @@ -61,11 +55,11 @@ std::shared_ptr Anvil::PageTracker::get_memory_block(VkDevic /* Handle the request */ for (const auto& current_memory_block : m_memory_blocks) { - if (current_memory_block.start_offset <= in_start_offset_page_aligned && - current_memory_block.start_offset + current_memory_block.size >= in_start_offset_page_aligned + m_page_size) + if (current_memory_block.start_offset <= in_start_offset && + current_memory_block.start_offset + current_memory_block.size >= in_start_offset + in_size) { result_ptr = current_memory_block.memory_block_ptr; - *out_memory_region_start_offset_ptr = current_memory_block.start_offset + in_start_offset_page_aligned; + *out_memory_region_start_offset_ptr = in_start_offset - current_memory_block.start_offset; break; } @@ -76,15 +70,17 @@ std::shared_ptr Anvil::PageTracker::get_memory_block(VkDevic } /** Please see header for specification */ -bool Anvil::PageTracker::set_binding(std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) +bool Anvil::PageTracker::set_binding(MemoryBlock* in_memory_block_ptr, + VkDeviceSize in_memory_block_start_offset, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) { - uint32_t n_pages; - uint32_t occupancy_item_start_index; - uint32_t occupancy_vec_start_index; - bool result = false; + const auto end_offset_page_aligned = Anvil::Utils::round_up(in_start_offset + in_size, + m_page_size); + uint32_t n_pages; + uint32_t occupancy_item_start_index; + const uint32_t pages_per_vec_item = 32; + bool result = false; /* Sanity checks */ if (in_start_offset + in_size > m_region_size) @@ -101,18 +97,16 @@ bool Anvil::PageTracker::set_binding(std::shared_ptr in_memory_bloc goto end; } - if ((in_size % m_page_size) != 0) + if (((in_start_offset + in_size) % m_page_size) != 0 && + end_offset_page_aligned != m_region_size) { - anvil_assert(!(in_size % m_page_size) != 0); + anvil_assert(!((in_start_offset + in_size) % m_page_size) != 0 && + end_offset_page_aligned != m_region_size); goto end; } /* Store the memory block binding */ - n_pages = static_cast(in_size / m_page_size); - occupancy_item_start_index = static_cast(in_start_offset % m_page_size); - occupancy_vec_start_index = static_cast(in_start_offset / m_page_size / 32 /* pages in a single vec item */); - for (auto mem_binding_iterator = m_memory_blocks.begin(); mem_binding_iterator != m_memory_blocks.end(); ++mem_binding_iterator) @@ -201,34 +195,37 @@ bool Anvil::PageTracker::set_binding(std::shared_ptr in_memory_bloc in_start_offset) ); - /* Update page occupancy info - * - * TODO: Perf of the code below could definitely be improved - */ + /* Update page occupancy info */ + n_pages = static_cast(in_size / m_page_size); + occupancy_item_start_index = static_cast(in_start_offset / m_page_size); + + /* TODO: Perf of the code below could definitely be improved */ for (uint32_t n_page = 0; n_page < n_pages; ++n_page) { - uint32_t occupancy_item_index = occupancy_item_start_index + n_page; - uint32_t occupancy_vec_index = occupancy_vec_start_index; - - while (occupancy_item_index >= 32) - { - occupancy_item_index -= 32; - occupancy_vec_index ++; - } + const uint32_t occupancy_item_index = (occupancy_item_start_index + n_page) % pages_per_vec_item; + const uint32_t occupancy_vec_index = (occupancy_item_start_index + n_page) / pages_per_vec_item; + const uint32_t n_page_mask = m_sparse_page_occupancy[occupancy_vec_index].raw & (1 << occupancy_item_index); + /* Change the number of memory-backed pages only if a bit would be flipped */ if (in_memory_block_ptr != nullptr) { - m_sparse_page_occupancy[occupancy_vec_index].raw |= (1 << occupancy_item_index); + if (n_page_mask == 0) + { + m_sparse_page_occupancy[occupancy_vec_index].raw |= (1 << occupancy_item_index); - ++m_n_pages_with_memory_backing; + ++m_n_pages_with_memory_backing; + } } else { - m_sparse_page_occupancy[occupancy_vec_index].raw &= ~(1 << occupancy_item_index); + if (n_page_mask != 0) + { + m_sparse_page_occupancy[occupancy_vec_index].raw &= ~(1 << occupancy_item_index); - --m_n_pages_with_memory_backing; + --m_n_pages_with_memory_backing; + } } } diff --git a/src/misc/pools.cpp b/src/misc/pools.cpp index 16711a82..56ec2f5e 100644 --- a/src/misc/pools.cpp +++ b/src/misc/pools.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -25,23 +25,23 @@ #include "wrappers/command_buffer.h" #include "wrappers/command_pool.h" -std::shared_ptr Anvil::PrimaryCommandBufferPoolWorker::create_item() +Anvil::PrimaryCommandBufferUniquePtr Anvil::PrimaryCommandBufferPoolWorker::create_item() { return m_parent_command_pool_ptr->alloc_primary_level_command_buffer(); } -void Anvil::PrimaryCommandBufferPoolWorker::reset_item(std::shared_ptr in_item_ptr) +void Anvil::PrimaryCommandBufferPoolWorker::reset_item(Anvil::PrimaryCommandBufferUniquePtr& in_item_ptr) { in_item_ptr->reset(false /* should_release_resources */); } -std::shared_ptr Anvil::SecondaryCommandBufferPoolWorker::create_item() +Anvil::SecondaryCommandBufferUniquePtr Anvil::SecondaryCommandBufferPoolWorker::create_item() { return m_parent_command_pool_ptr->alloc_secondary_level_command_buffer(); } -void Anvil::SecondaryCommandBufferPoolWorker::reset_item(std::shared_ptr in_item_ptr) +void Anvil::SecondaryCommandBufferPoolWorker::reset_item(Anvil::SecondaryCommandBufferUniquePtr& in_item_ptr) { in_item_ptr->reset(false /* should_release_resources */); } \ No newline at end of file diff --git a/src/misc/render_pass_create_info.cpp b/src/misc/render_pass_create_info.cpp new file mode 100644 index 00000000..3e154269 --- /dev/null +++ b/src/misc/render_pass_create_info.cpp @@ -0,0 +1,1432 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/render_pass_create_info.h" +#include "wrappers/device.h" +#include +#include + +#if defined(max) + #undef max +#endif + +Anvil::RenderPassCreateInfo::RenderPassCreateInfo(const Anvil::BaseDevice* in_device_ptr) + :m_device_ptr (in_device_ptr), + m_multiview_enabled (false), + m_update_preserved_attachments(false) +{ + anvil_assert(in_device_ptr != nullptr); +} + +Anvil::RenderPassCreateInfo::~RenderPassCreateInfo() +{ + /* Stub */ +} + +/* Please see haeder for specification */ +bool Anvil::RenderPassCreateInfo::add_color_attachment(Anvil::Format in_format, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::AttachmentLoadOp in_load_op, + Anvil::AttachmentStoreOp in_store_op, + Anvil::ImageLayout in_initial_layout, + Anvil::ImageLayout in_final_layout, + bool in_may_alias, + RenderPassAttachmentID* out_attachment_id_ptr) +{ + uint32_t new_attachment_index = UINT32_MAX; + bool result = false; + + if (out_attachment_id_ptr == nullptr) + { + anvil_assert(out_attachment_id_ptr != nullptr); + + goto end; + } + + new_attachment_index = static_cast(m_attachments.size() ); + *out_attachment_id_ptr = new_attachment_index; + + m_attachments.push_back(RenderPassAttachment(in_format, + in_sample_count, + in_load_op, + in_store_op, + in_initial_layout, + in_final_layout, + in_may_alias, + new_attachment_index) ); + + result = true; + +end: + return result; +} + +/** Adds a new dependency to the internal data model. + * + * @param in_destination_subpass_ptr Pointer to the descriptor of the destination subpass. + * If nullptr, it is assumed an external destination is requested. + * @param in_source_subpass_ptr Pointer to the descriptor of the source subpass. + * If nullptr, it is assumed an external source is requested. + * @param in_source_stage_mask Source pipeline stage mask. + * @param in_destination_stage_mask Destination pipeline stage mask. + * @param in_source_access_mask Source access mask. + * @param in_destination_access_mask Destination access mask. + * @param in_by_region true if a "by-region" dependency is requested; false otherwise. + * + * @return true if the dependency was added successfully; false otherwise. + * + **/ +bool Anvil::RenderPassCreateInfo::add_dependency(SubPass* in_destination_subpass_ptr, + SubPass* in_source_subpass_ptr, + Anvil::PipelineStageFlags in_source_stage_mask, + Anvil::PipelineStageFlags in_destination_stage_mask, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::DependencyFlags in_dependency_flags) +{ + auto new_dep = SubPassDependency(in_destination_stage_mask, + in_destination_subpass_ptr, + in_source_stage_mask, + in_source_subpass_ptr, + in_source_access_mask, + in_destination_access_mask, + in_dependency_flags); + + if (std::find(m_subpass_dependencies.begin(), + m_subpass_dependencies.end(), + new_dep) == m_subpass_dependencies.end() ) + { + m_subpass_dependencies.push_back(new_dep); + } + + return true; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::add_depth_stencil_attachment(Anvil::Format in_format, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::AttachmentLoadOp in_depth_load_op, + Anvil::AttachmentStoreOp in_depth_store_op, + Anvil::AttachmentLoadOp in_stencil_load_op, + Anvil::AttachmentStoreOp in_stencil_store_op, + Anvil::ImageLayout in_initial_layout, + Anvil::ImageLayout in_final_layout, + bool in_may_alias, + RenderPassAttachmentID* out_attachment_id_ptr) +{ + uint32_t new_attachment_index = UINT32_MAX; + bool result = false; + + if (out_attachment_id_ptr == nullptr) + { + anvil_assert(out_attachment_id_ptr != nullptr); + + goto end; + } + + new_attachment_index = static_cast(m_attachments.size() ); + *out_attachment_id_ptr = new_attachment_index; + + m_attachments.push_back(RenderPassAttachment(in_format, + in_sample_count, + in_depth_load_op, + in_depth_store_op, + in_stencil_load_op, + in_stencil_store_op, + in_initial_layout, + in_final_layout, + in_may_alias, + new_attachment_index) ); + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::add_external_to_subpass_dependency(SubPassID in_destination_subpass_id, + Anvil::PipelineStageFlags in_source_stage_mask, + Anvil::PipelineStageFlags in_destination_stage_mask, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::DependencyFlags in_dependency_flags) +{ + SubPass* destination_subpass_ptr = nullptr; + bool result = false; + + if (m_subpasses.size() <= in_destination_subpass_id) + { + anvil_assert(!(m_subpasses.size() <= in_destination_subpass_id) ); + + goto end; + } + + destination_subpass_ptr = m_subpasses[in_destination_subpass_id].get(); + + result = add_dependency(destination_subpass_ptr, + nullptr, /* source_subpass_ptr */ + in_source_stage_mask, + in_destination_stage_mask, + in_source_access_mask, + in_destination_access_mask, + in_dependency_flags); +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::add_self_subpass_dependency(SubPassID in_destination_subpass_id, + Anvil::PipelineStageFlags in_source_stage_mask, + Anvil::PipelineStageFlags in_destination_stage_mask, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::DependencyFlags in_dependency_flags) +{ + SubPass* destination_subpass_ptr = nullptr; + bool result = false; + + if (m_subpasses.size() <= in_destination_subpass_id) + { + anvil_assert(!(m_subpasses.size() <= in_destination_subpass_id) ); + + goto end; + } + + destination_subpass_ptr = m_subpasses[in_destination_subpass_id].get(); + result = add_dependency(destination_subpass_ptr, + destination_subpass_ptr, + in_source_stage_mask, + in_destination_stage_mask, + in_source_access_mask, + in_destination_access_mask, + in_dependency_flags); +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::add_subpass(SubPassID* out_subpass_id_ptr) +{ + uint32_t new_subpass_index = UINT32_MAX; + bool result = false; + + if (out_subpass_id_ptr == nullptr) + { + anvil_assert(out_subpass_id_ptr != nullptr); + + goto end; + } + + new_subpass_index = static_cast(m_subpasses.size() ); + + /* Spawn a new descriptor */ + { + std::unique_ptr new_subpass_ptr( + new SubPass(new_subpass_index) + ); + + m_subpasses.push_back( + std::move(new_subpass_ptr) + ); + + *out_subpass_id_ptr = new_subpass_index; + } + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::add_subpass_color_attachment(SubPassID in_subpass_id, + Anvil::ImageLayout in_input_layout, + RenderPassAttachmentID in_attachment_id, + uint32_t in_location, + const RenderPassAttachmentID* in_attachment_resolve_id_ptr) +{ + return add_subpass_color_input_attachment(in_subpass_id, + true, /* is_color_attachment */ + in_input_layout, + in_attachment_id, + in_location, + (in_attachment_resolve_id_ptr != nullptr), + (in_attachment_resolve_id_ptr != nullptr) ? *in_attachment_resolve_id_ptr + : UINT32_MAX, + Anvil::ImageAspectFlagBits::NONE); /* in_stencil_resolve_mode */ +} + +/** Adds a new attachment to the specified subpass. + * + * @param in_subpass_id ID of the subpass to update. The subpass must have been earlier + * created with an add_subpass() call. + * @param in_is_color_attachment true if the added attachment is a color attachment;false if it's + * an input attachment. + * @param in_layout Layout to use for the attachment when executing the subpass. + * Driver takes care of transforming the attachment to the requested layout + * before subpass commands starts executing. + * @param in_attachment_id ID of a render-pass attachment ID this sub-pass attachment should + * refer to. + * @param in_attachment_location Location, under which the specified attachment should be accessible. + * @param in_should_resolve true if the specified attachment is multisample and should be + * resolved at the end of the sub-pass. + * @parma in_resolve_attachment_id If @param should_resolve is true, this argument should specify the + * ID of a render-pass attachment, to which the resolved data should + * written to. + * + * @return true if the function executed successfully, false otherwise. + * + **/ +bool Anvil::RenderPassCreateInfo::add_subpass_color_input_attachment(SubPassID in_subpass_id, + bool in_is_color_attachment, + Anvil::ImageLayout in_layout, + RenderPassAttachmentID in_attachment_id, + uint32_t in_attachment_location, + bool in_should_resolve, + RenderPassAttachmentID in_resolve_attachment_id, + const Anvil::ImageAspectFlags& in_aspects_accessed) +{ + bool result = false; + LocationToSubPassAttachmentMap* subpass_attachments_ptr = nullptr; + SubPass* subpass_ptr = nullptr; + + /* Retrieve the subpass descriptor */ + if (in_subpass_id >= m_subpasses.size() ) + { + anvil_assert(!(in_subpass_id >= m_subpasses.size() )); + + goto end; + } + else + { + subpass_ptr = m_subpasses.at(in_subpass_id).get(); + } + + /* Sanity checks */ + if (in_attachment_id >= m_attachments.size() ) + { + anvil_assert(!(in_attachment_id >= m_attachments.size()) ); + + goto end; + } + + if (in_should_resolve && + in_resolve_attachment_id >= m_attachments.size() ) + { + anvil_assert(!(in_should_resolve && in_resolve_attachment_id >= m_attachments.size()) ); + + goto end; + } + + /* Make sure the attachment location is not already assigned an attachment */ + subpass_attachments_ptr = (in_is_color_attachment) ? &subpass_ptr->color_attachments_map + : &subpass_ptr->input_attachments_map; + + if (subpass_attachments_ptr->find(in_attachment_location) != subpass_attachments_ptr->end() ) + { + anvil_assert(!(subpass_attachments_ptr->find(in_attachment_location) != subpass_attachments_ptr->end()) ); + + goto end; + } + + /* Add the attachment */ + (*subpass_attachments_ptr)[in_attachment_location] = SubPassAttachment(in_attachment_id, + in_layout, + in_resolve_attachment_id, + in_aspects_accessed, + Anvil::ResolveModeFlagBits::NONE, + Anvil::ResolveModeFlagBits::NONE); + + if (in_should_resolve) + { + anvil_assert(in_is_color_attachment); + + subpass_ptr->resolved_attachments_map[in_attachment_location] = SubPassAttachment(in_resolve_attachment_id, + in_layout, + UINT32_MAX, /* in_opt_resolve_attachment_index */ + in_aspects_accessed, + Anvil::ResolveModeFlagBits::NONE, /* in_depth_resolve_mode */ + Anvil::ResolveModeFlagBits::NONE); /* in_stencil_resolve_mode */ + } + + if (subpass_ptr->n_highest_location_used < in_attachment_location) + { + subpass_ptr->n_highest_location_used = in_attachment_location; + } + + m_update_preserved_attachments = true; + result = true; + +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::add_subpass_depth_stencil_attachment(SubPassID in_subpass_id, + Anvil::ImageLayout in_layout, + RenderPassAttachmentID in_attachment_id, + const RenderPassAttachmentID* in_attachment_resolve_id_ptr, + const Anvil::ResolveModeFlagBits* in_depth_resolve_mode_ptr, + const Anvil::ResolveModeFlagBits* in_stencil_resolve_mode_ptr) +{ + bool result = false; + SubPass* subpass_ptr = nullptr; + + /* Sanity checks .. */ + if (in_attachment_resolve_id_ptr != nullptr) + { + if (!m_device_ptr->get_extension_info()->khr_depth_stencil_resolve() ) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_depth_stencil_resolve() ); + + goto end; + } + + if (*in_attachment_resolve_id_ptr >= m_attachments.size() ) + { + anvil_assert(!(*in_attachment_resolve_id_ptr >= m_attachments.size()) ); + + goto end; + } + } + + /* Retrieve the subpass descriptor */ + if (m_subpasses.size() <= in_subpass_id) + { + anvil_assert(!(m_subpasses.size() <= in_subpass_id) ); + + goto end; + } + else + { + subpass_ptr = m_subpasses.at(in_subpass_id).get(); + } + + /* Retrieve the attachment descriptors */ + if (m_attachments.size() <= in_attachment_id) + { + anvil_assert(!(m_attachments.size() <= in_attachment_id) ); + + goto end; + } + + /* Update the depth/stencil attachment for the subpass */ + if (subpass_ptr->depth_stencil_attachment.attachment_index != UINT32_MAX) + { + anvil_assert(!(subpass_ptr->depth_stencil_attachment.attachment_index != UINT32_MAX) ); + + goto end; + } + + subpass_ptr->depth_stencil_attachment = SubPassAttachment(in_attachment_id, + in_layout, + (in_attachment_resolve_id_ptr != nullptr) ? *in_attachment_resolve_id_ptr + : UINT32_MAX, + Anvil::ImageAspectFlagBits::NONE, + Anvil::ResolveModeFlagBits::NONE, /* in_depth_resolve_mode */ + Anvil::ResolveModeFlagBits::NONE); /* in_stencil_resolve_mode */ + + if ( in_attachment_resolve_id_ptr != nullptr && + *in_attachment_resolve_id_ptr != UINT32_MAX) + { + subpass_ptr->ds_resolve_attachment = SubPassAttachment(*in_attachment_resolve_id_ptr, + in_layout, + UINT32_MAX, /* in_opt_resolve_attachment_index */ + Anvil::ImageAspectFlagBits::NONE, + *in_depth_resolve_mode_ptr, + *in_stencil_resolve_mode_ptr); + } + + m_update_preserved_attachments = true; + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::add_subpass_input_attachment(SubPassID in_subpass_id, + Anvil::ImageLayout in_layout, + RenderPassAttachmentID in_attachment_id, + uint32_t in_attachment_index, + const Anvil::ImageAspectFlags& in_opt_aspects_accessed) +{ + return add_subpass_color_input_attachment(in_subpass_id, + false, /* is_color_attachment */ + in_layout, + in_attachment_id, + in_attachment_index, + false, /* should_resolve */ + UINT32_MAX, /* resolve_attachment_id */ + in_opt_aspects_accessed); +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::add_subpass_to_external_dependency(SubPassID in_source_subpass_id, + Anvil::PipelineStageFlags in_source_stage_mask, + Anvil::PipelineStageFlags in_destination_stage_mask, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::DependencyFlags in_dependency_flags) +{ + bool result = false; + SubPass* source_subpass_ptr = nullptr; + + if (m_subpasses.size() <= in_source_subpass_id) + { + anvil_assert(!(m_subpasses.size() <= in_source_subpass_id) ); + + goto end; + } + + source_subpass_ptr = m_subpasses[in_source_subpass_id].get(); + + result = add_dependency(nullptr, /* destination_subpass_ptr */ + source_subpass_ptr, + in_source_stage_mask, + in_destination_stage_mask, + in_source_access_mask, + in_destination_access_mask, + in_dependency_flags); +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::add_subpass_to_subpass_dependency(SubPassID in_source_subpass_id, + SubPassID in_destination_subpass_id, + Anvil::PipelineStageFlags in_source_stage_mask, + Anvil::PipelineStageFlags in_destination_stage_mask, + Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::DependencyFlags in_dependency_flags) +{ + SubPass* destination_subpass_ptr = nullptr; + bool result = false; + SubPass* source_subpass_ptr = nullptr; + + if (m_subpasses.size() <= in_destination_subpass_id) + { + anvil_assert(!(m_subpasses.size() <= in_destination_subpass_id) ); + + goto end; + } + + if (m_subpasses.size() <= in_source_subpass_id) + { + anvil_assert(!(m_subpasses.size() <= in_source_subpass_id) ); + + goto end; + } + + destination_subpass_ptr = m_subpasses[in_destination_subpass_id].get(); + source_subpass_ptr = m_subpasses[in_source_subpass_id].get(); + + result = add_dependency(destination_subpass_ptr, + source_subpass_ptr, + in_source_stage_mask, + in_destination_stage_mask, + in_source_access_mask, + in_destination_access_mask, + in_dependency_flags); +end: + return result; +} + +/** Creates a VkAttachmentReference descriptor from the specified RenderPassAttachment instance. + * + * @param in_renderpass_attachment Renderpass attachment descriptor to create the Vulkan descriptor from. + * + * @return As per description. + **/ +VkAttachmentReference Anvil::RenderPassCreateInfo::get_attachment_reference_from_renderpass_attachment(const RenderPassAttachment& in_renderpass_attachment) const +{ + VkAttachmentReference attachment_vk; + + attachment_vk.attachment = in_renderpass_attachment.index; + attachment_vk.layout = static_cast(in_renderpass_attachment.initial_layout); + + return attachment_vk; +} + +/** Creates a VkAttachmentReference descriptor from the specified SubPassAttachment instance. + * + * @param in_renderpass_attachment Subpass attachment descriptor to create the Vulkan descriptor from. + * + * @return As per description. + **/ +VkAttachmentReference Anvil::RenderPassCreateInfo::get_attachment_reference_from_subpass_attachment(const SubPassAttachment& in_subpass_attachment) const +{ + VkAttachmentReference attachment_vk; + + attachment_vk.attachment = m_attachments.at(in_subpass_attachment.attachment_index).index; + attachment_vk.layout = static_cast(in_subpass_attachment.layout); + + return attachment_vk; +} + +/** Creates a VkAttachmentReference descriptor for a resolve attachment for a color attachment specified by the user. + * + * @param in_subpass_iterator Iterator pointing at the subpass which uses the color attachment of interest. + * @param in_location_to_subpass_att_map_iterator Iterator pointing at a color attachment which has a resolve attachment attached. + * + * @return As per description. + **/ +VkAttachmentReference Anvil::RenderPassCreateInfo::get_attachment_reference_for_resolve_attachment(const SubPassesConstIterator& in_subpass_iterator, + const LocationToSubPassAttachmentMapConstIterator& in_location_to_subpass_att_map_iterator) const +{ + VkAttachmentReference result; + + anvil_assert((*in_subpass_iterator)->resolved_attachments_map.find(in_location_to_subpass_att_map_iterator->first) != (*in_subpass_iterator)->resolved_attachments_map.end() ); + + result.attachment = m_attachments.at(in_location_to_subpass_att_map_iterator->second.resolve_attachment_index).index; + result.layout = static_cast((*in_subpass_iterator)->resolved_attachments_map.at(in_location_to_subpass_att_map_iterator->first).layout); + + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::get_attachment_type(RenderPassAttachmentID in_attachment_id, + AttachmentType* out_attachment_type_ptr) const +{ + bool result = false; + + if (m_attachments.size() <= in_attachment_id) + { + anvil_assert(m_attachments.size() > in_attachment_id); + + goto end; + } + + *out_attachment_type_ptr = m_attachments[in_attachment_id].type; + + /* All done */ + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::get_color_attachment_properties(RenderPassAttachmentID in_attachment_id, + Anvil::Format* out_opt_format_ptr, + Anvil::SampleCountFlagBits* out_opt_sample_count_ptr, + Anvil::AttachmentLoadOp* out_opt_load_op_ptr, + Anvil::AttachmentStoreOp* out_opt_store_op_ptr, + Anvil::ImageLayout* out_opt_initial_layout_ptr, + Anvil::ImageLayout* out_opt_final_layout_ptr, + bool* out_opt_may_alias_ptr) const +{ + bool result = false; + + if (m_attachments.size() <= in_attachment_id) + { + goto end; + } + + if (out_opt_format_ptr != nullptr) + { + *out_opt_format_ptr = m_attachments[in_attachment_id].format; + } + + if (out_opt_sample_count_ptr != nullptr) + { + *out_opt_sample_count_ptr = m_attachments[in_attachment_id].sample_count; + } + + if (out_opt_load_op_ptr != nullptr) + { + *out_opt_load_op_ptr = m_attachments[in_attachment_id].color_depth_load_op; + } + + if (out_opt_store_op_ptr != nullptr) + { + *out_opt_store_op_ptr = m_attachments[in_attachment_id].color_depth_store_op; + } + + if (out_opt_initial_layout_ptr != nullptr) + { + *out_opt_initial_layout_ptr = m_attachments[in_attachment_id].initial_layout; + } + + if (out_opt_final_layout_ptr != nullptr) + { + *out_opt_final_layout_ptr = m_attachments[in_attachment_id].final_layout; + } + + if (out_opt_may_alias_ptr != nullptr) + { + *out_opt_may_alias_ptr = m_attachments[in_attachment_id].may_alias; + } + + result = true; +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::RenderPassCreateInfo::get_dependency_properties(uint32_t in_n_dependency, + SubPassID* out_destination_subpass_id_ptr, + SubPassID* out_source_subpass_id_ptr, + Anvil::PipelineStageFlags* out_destination_stage_mask_ptr, + Anvil::PipelineStageFlags* out_source_stage_mask_ptr, + Anvil::AccessFlags* out_destination_access_mask_ptr, + Anvil::AccessFlags* out_source_access_mask_ptr, + DependencyFlags* out_flags_ptr) const +{ + const Anvil::RenderPassCreateInfo::SubPassDependency* dep_ptr = nullptr; + bool result = false; + + if (m_subpass_dependencies.size() <= in_n_dependency) + { + anvil_assert_fail(); + + goto end; + } + + dep_ptr = &m_subpass_dependencies[in_n_dependency]; + + *out_destination_subpass_id_ptr = (dep_ptr->destination_subpass_ptr != nullptr) ? dep_ptr->destination_subpass_ptr->index + : UINT32_MAX; + *out_flags_ptr = dep_ptr->flags; + *out_source_subpass_id_ptr = (dep_ptr->source_subpass_ptr != nullptr) ? dep_ptr->source_subpass_ptr->index + : UINT32_MAX; + *out_destination_stage_mask_ptr = dep_ptr->destination_stage_mask; + *out_source_stage_mask_ptr = dep_ptr->source_stage_mask; + *out_destination_access_mask_ptr = dep_ptr->destination_access_mask; + *out_source_access_mask_ptr = dep_ptr->source_access_mask; + + /* All done */ + result = true; +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::RenderPassCreateInfo::get_dependency_multiview_properties(uint32_t in_n_dependency, + int32_t* out_view_offset_ptr) const +{ + bool result = false; + + if (!m_multiview_enabled) + { + anvil_assert(m_multiview_enabled); + + goto end; + } + + if (m_subpass_dependencies.size() <= in_n_dependency) + { + anvil_assert(m_subpass_dependencies.size() > in_n_dependency); + + goto end; + } + + *out_view_offset_ptr = m_subpass_dependencies.at(in_n_dependency).multiview_view_offset; + result = true; +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::RenderPassCreateInfo::get_depth_stencil_attachment_properties(RenderPassAttachmentID in_attachment_id, + Anvil::Format* out_opt_format_ptr, + Anvil::SampleCountFlagBits* out_opt_sample_count_ptr, + Anvil::AttachmentLoadOp* out_opt_depth_load_op_ptr, + Anvil::AttachmentStoreOp* out_opt_depth_store_op_ptr, + Anvil::AttachmentLoadOp* out_opt_stencil_load_op_ptr, + Anvil::AttachmentStoreOp* out_opt_stencil_store_op_ptr, + Anvil::ImageLayout* out_opt_initial_layout_ptr, + Anvil::ImageLayout* out_opt_final_layout_ptr, + bool* out_opt_may_alias_ptr) const +{ + bool result = false; + + if (m_attachments.size() <= in_attachment_id) + { + goto end; + } + + if (out_opt_format_ptr != nullptr) + { + *out_opt_format_ptr = m_attachments[in_attachment_id].format; + } + + if (out_opt_sample_count_ptr != nullptr) + { + *out_opt_sample_count_ptr = m_attachments[in_attachment_id].sample_count; + } + + if (out_opt_depth_load_op_ptr != nullptr) + { + *out_opt_depth_load_op_ptr = m_attachments[in_attachment_id].color_depth_load_op; + } + + if (out_opt_depth_store_op_ptr != nullptr) + { + *out_opt_depth_store_op_ptr = m_attachments[in_attachment_id].color_depth_store_op; + } + + if (out_opt_stencil_load_op_ptr != nullptr) + { + *out_opt_stencil_load_op_ptr = m_attachments[in_attachment_id].stencil_load_op; + } + + if (out_opt_stencil_store_op_ptr != nullptr) + { + *out_opt_stencil_store_op_ptr = m_attachments[in_attachment_id].stencil_store_op; + } + + if (out_opt_initial_layout_ptr != nullptr) + { + *out_opt_initial_layout_ptr = m_attachments[in_attachment_id].initial_layout; + } + + if (out_opt_final_layout_ptr != nullptr) + { + *out_opt_final_layout_ptr = m_attachments[in_attachment_id].final_layout; + } + + if (out_opt_may_alias_ptr != nullptr) + { + *out_opt_may_alias_ptr = m_attachments[in_attachment_id].may_alias; + } + + result = true; +end: + return result; +} + +/* Please see header for specification */ +uint32_t Anvil::RenderPassCreateInfo::get_max_color_location_used_by_subpass(const SubPassID& in_subpass_id) const +{ + uint32_t result = UINT32_MAX; + auto subpass_iterator = (m_subpasses.size() > in_subpass_id) ? m_subpasses.begin() + in_subpass_id : m_subpasses.end(); + + if (subpass_iterator == m_subpasses.end() ) + { + anvil_assert(subpass_iterator != m_subpasses.end() ); + + goto end; + } + + result = 0; + + for (const auto& current_color_attachment_data : (*subpass_iterator)->color_attachments_map) + { + result = std::max(result, + current_color_attachment_data.first); + } + +end: + return result; +} + +/* Please see header for specification */ +void Anvil::RenderPassCreateInfo::get_multiview_correlation_masks(uint32_t* out_n_correlation_masks_ptr, + const uint32_t** out_correlation_masks_ptr_ptr) const +{ + anvil_assert(m_multiview_enabled); + + *out_correlation_masks_ptr_ptr = (m_correlation_masks.size() > 0) ? &m_correlation_masks.at(0) : nullptr; + *out_n_correlation_masks_ptr = static_cast(m_correlation_masks.size() ); +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::get_subpass_n_attachments(SubPassID in_subpass_id, + AttachmentType in_attachment_type, + uint32_t* out_n_attachments_ptr) const +{ + bool result = false; + + if (m_subpasses.size() <= in_subpass_id) + { + anvil_assert(!(m_subpasses.size() <= in_subpass_id) ); + + goto end; + } + else + { + result = true; + + if (in_attachment_type == Anvil::AttachmentType::PRESERVE && + m_update_preserved_attachments) + { + update_preserved_attachments(); + + anvil_assert(!m_update_preserved_attachments); + } + + switch (in_attachment_type) + { + case Anvil::AttachmentType::COLOR: *out_n_attachments_ptr = static_cast(m_subpasses[in_subpass_id]->color_attachments_map.size() ); break; + case Anvil::AttachmentType::INPUT: *out_n_attachments_ptr = static_cast(m_subpasses[in_subpass_id]->input_attachments_map.size() ); break; + case Anvil::AttachmentType::PRESERVE: *out_n_attachments_ptr = static_cast(m_subpasses[in_subpass_id]->preserved_attachments.size() ); break; + case Anvil::AttachmentType::RESOLVE: *out_n_attachments_ptr = static_cast(m_subpasses[in_subpass_id]->resolved_attachments_map.size() ); break; + + case Anvil::AttachmentType::DEPTH_STENCIL: + { + *out_n_attachments_ptr = (m_subpasses[in_subpass_id]->depth_stencil_attachment.attachment_index != UINT32_MAX) ? 1u : 0u; + + break; + } + + default: + { + anvil_assert_fail(); + + result = false; + } + } + } + +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::get_subpass_attachment_properties(SubPassID in_subpass_id, + AttachmentType in_attachment_type, + uint32_t in_n_subpass_attachment, + RenderPassAttachmentID* out_renderpass_attachment_id_ptr, + Anvil::ImageLayout* out_layout_ptr, + Anvil::ImageAspectFlags* out_opt_aspects_accessed_ptr, + RenderPassAttachmentID* out_opt_attachment_resolve_id_ptr, + uint32_t* out_opt_location_ptr) const +{ + SubPassAttachment attachment; + bool result = false; + LocationToSubPassAttachmentMap* subpass_attachments_ptr = nullptr; + SubPass* subpass_ptr = nullptr; + + /* Sanity checks */ + if (in_attachment_type == Anvil::AttachmentType::PRESERVE && + out_layout_ptr != nullptr) + { + anvil_assert(!(in_attachment_type == Anvil::AttachmentType::PRESERVE && + out_layout_ptr != nullptr) ); + + goto end; + } + + if (m_subpasses.size() <= in_subpass_id) + { + anvil_assert(!(m_subpasses.size() <= in_subpass_id) ); + + goto end; + } + + subpass_ptr = m_subpasses[in_subpass_id].get(); + + /* Even more sanity checks.. */ + switch (in_attachment_type) + { + case Anvil::AttachmentType::COLOR: /* Fall-through */ + case Anvil::AttachmentType::INPUT: /* Fall-through */ + case Anvil::AttachmentType::RESOLVE: + { + subpass_attachments_ptr = (in_attachment_type == Anvil::AttachmentType::COLOR) ? &subpass_ptr->color_attachments_map + : (in_attachment_type == Anvil::AttachmentType::INPUT) ? &subpass_ptr->input_attachments_map + : &subpass_ptr->resolved_attachments_map; + + auto iterator = subpass_attachments_ptr->begin(); + + for (uint32_t n_key = 0; + n_key < in_n_subpass_attachment; + ++n_key) + { + if (iterator == subpass_attachments_ptr->end() ) + { + goto end; + } + + iterator ++; + } + + if (iterator == subpass_attachments_ptr->end() ) + { + goto end; + } + + if (out_opt_aspects_accessed_ptr != nullptr) + { + *out_opt_aspects_accessed_ptr = iterator->second.aspects_accessed; + } + + if (out_opt_attachment_resolve_id_ptr != nullptr) + { + *out_opt_attachment_resolve_id_ptr = iterator->second.resolve_attachment_index; + } + + if (out_opt_location_ptr != nullptr) + { + *out_opt_location_ptr = iterator->first; + } + + *out_layout_ptr = iterator->second.layout; + *out_renderpass_attachment_id_ptr = m_attachments.at(iterator->second.attachment_index).index; + + break; + } + + case Anvil::AttachmentType::DEPTH_STENCIL: + { + if (in_n_subpass_attachment > 0) + { + anvil_assert(in_n_subpass_attachment == 0); + + goto end; + } + + if (out_opt_aspects_accessed_ptr != nullptr) + { + *out_opt_aspects_accessed_ptr = Anvil::ImageAspectFlagBits::NONE; + } + + if (out_opt_attachment_resolve_id_ptr != nullptr) + { + *out_opt_attachment_resolve_id_ptr = subpass_ptr->depth_stencil_attachment.resolve_attachment_index; + } + + if (out_opt_location_ptr != nullptr) + { + *out_opt_location_ptr = 0; + } + + *out_layout_ptr = subpass_ptr->depth_stencil_attachment.layout; + *out_renderpass_attachment_id_ptr = m_attachments.at(subpass_ptr->depth_stencil_attachment.attachment_index).index; + + break; + } + + case Anvil::AttachmentType::PRESERVE: + { + if (subpass_ptr->preserved_attachments.size() <= in_n_subpass_attachment) + { + anvil_assert(subpass_ptr->preserved_attachments.size() > in_n_subpass_attachment); + + goto end; + } + + const auto& attachment_props = subpass_ptr->preserved_attachments[in_n_subpass_attachment]; + + *out_renderpass_attachment_id_ptr = m_attachments.at(attachment_props.attachment_index).index; + + if (out_opt_aspects_accessed_ptr != nullptr) + { + *out_opt_aspects_accessed_ptr = Anvil::ImageAspectFlagBits::NONE; + } + + if (out_opt_location_ptr != nullptr) + { + *out_opt_location_ptr = UINT32_MAX; + } + + anvil_assert(out_opt_attachment_resolve_id_ptr == nullptr); + anvil_assert(out_opt_location_ptr == nullptr); + + break; + } + + default: + { + anvil_assert_fail(); + + goto end; + } + } + + /* All done */ + result = true; + +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::get_subpass_ds_resolve_attachment_properties(SubPassID in_subpass_id, + RenderPassAttachmentID* out_opt_renderpass_attachment_id_ptr, + Anvil::ImageLayout* out_opt_layout_ptr, + Anvil::ResolveModeFlagBits* out_opt_depth_resolve_mode_ptr, + Anvil::ResolveModeFlagBits* out_opt_stencil_resolve_mode_ptr) const +{ + SubPassAttachment attachment; + bool result = false; + const SubPassAttachment* subpass_attachment_ptr = nullptr; + SubPass* subpass_ptr = nullptr; + + /* Sanity checks */ + if (m_subpasses.size() <= in_subpass_id) + { + anvil_assert(!(m_subpasses.size() <= in_subpass_id) ); + + goto end; + } + + subpass_ptr = m_subpasses[in_subpass_id].get(); + + /* Even more sanity checks.. */ + subpass_attachment_ptr = &subpass_ptr->ds_resolve_attachment; + + if (subpass_attachment_ptr->depth_resolve_mode == Anvil::ResolveModeFlagBits::NONE && + subpass_attachment_ptr->stencil_resolve_mode == Anvil::ResolveModeFlagBits::NONE) + { + goto end; + } + + if (out_opt_depth_resolve_mode_ptr != nullptr) + { + *out_opt_depth_resolve_mode_ptr = subpass_attachment_ptr->depth_resolve_mode; + } + + if (out_opt_layout_ptr != nullptr) + { + *out_opt_layout_ptr = subpass_attachment_ptr->layout; + } + + if (out_opt_renderpass_attachment_id_ptr != nullptr) + { + *out_opt_renderpass_attachment_id_ptr = m_attachments.at(subpass_attachment_ptr->attachment_index).index; + } + + if (out_opt_stencil_resolve_mode_ptr != nullptr) + { + *out_opt_stencil_resolve_mode_ptr = subpass_attachment_ptr->stencil_resolve_mode; + } + + /* All done */ + result = true; + +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::get_subpass_highest_location(SubPassID in_subpass_id, + uint32_t* out_result_ptr) const +{ + bool result = false; + + if (m_subpasses.size() <= in_subpass_id) + { + anvil_assert(m_subpasses.size() > in_subpass_id); + + goto end; + } + + if (m_subpasses[in_subpass_id]->color_attachments_map.size () == 0 && + m_subpasses[in_subpass_id]->input_attachments_map.size () == 0 && + m_subpasses[in_subpass_id]->resolved_attachments_map.size() == 0) + { + goto end; + } + + *out_result_ptr = m_subpasses[in_subpass_id]->n_highest_location_used; + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::get_subpass_view_mask(SubPassID in_subpass_id, + uint32_t* out_view_mask_ptr) const +{ + bool result = false; + + if (!m_multiview_enabled) + { + anvil_assert(m_multiview_enabled); + + goto end; + } + + if (m_subpasses.size() <= in_subpass_id) + { + anvil_assert(m_subpasses.size() > in_subpass_id); + + goto end; + } + + *out_view_mask_ptr = m_subpasses.at(in_subpass_id)->multiview_view_mask; + result = true; +end: + return result; +} + +/* Please see header for specification */ +void Anvil::RenderPassCreateInfo::set_correlation_masks(const uint32_t& in_n_correlation_masks, + const uint32_t* in_correlation_masks_ptr) +{ + anvil_assert(in_n_correlation_masks != 0); + + m_correlation_masks.resize(in_n_correlation_masks); + + memcpy(&m_correlation_masks.at(0), + in_correlation_masks_ptr, + in_n_correlation_masks * sizeof(uint32_t) ); + + if (!m_multiview_enabled) + { + m_multiview_enabled = true; + } +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::set_dependency_view_local_properties(const uint32_t& in_n_dependency, + const int32_t& in_view_offset) +{ + decltype(m_subpass_dependencies)::iterator dep_iterator; + bool result = false; + + if (m_subpass_dependencies.size() <= in_n_dependency) + { + anvil_assert(m_subpass_dependencies.size() > in_n_dependency); + + goto end; + } + + dep_iterator = m_subpass_dependencies.begin() + in_n_dependency; + dep_iterator->multiview_view_offset = in_view_offset; + + if (!m_multiview_enabled) + { + m_multiview_enabled = true; + } + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::RenderPassCreateInfo::set_subpass_view_mask(SubPassID in_subpass_id, + const uint32_t& in_view_mask) +{ + bool result = false; + decltype(m_subpasses)::iterator subpass_iterator; + + if (m_subpasses.size() <= in_subpass_id) + { + anvil_assert(m_subpasses.size() > in_subpass_id); + + goto end; + } + + subpass_iterator = m_subpasses.begin() + in_subpass_id; + (*subpass_iterator)->multiview_view_mask = in_view_mask; + + if (!m_multiview_enabled) + { + m_multiview_enabled = true; + } + + result = true; +end: + return result; +} + +/** Initializes the vector of preserved attachments for all defined attachments. The algorithm + * used is as follows: + * + * For each subpass: + * + * 1. Check what the highest subpass index that the attachment is preserved/used in is. + * 2. For all previous subpasses, determine which subpasses do not use it. For each subpass, preserve + * the attachment. + * + * This approach may need to be changed or extended in the future. + * + * This function should be considered expensive. + **/ +void Anvil::RenderPassCreateInfo::update_preserved_attachments() const +{ + anvil_assert(m_update_preserved_attachments); + + /* Cache color, depth+stencil, resolve attachments, as used by all defined subpasses. + * Make sure not to insert duplicate items. */ + std::vector unique_attachments; + + for (auto subpass_iterator = m_subpasses.begin(); + subpass_iterator != m_subpasses.end(); + ++subpass_iterator) + { + SubPass* current_subpass_ptr = subpass_iterator->get(); + + for (uint32_t n_attachment_type = 0; + n_attachment_type < 3; /* color, depth+stencil, resolve */ + ++n_attachment_type) + { + const uint32_t n_attachments = (n_attachment_type == 0) ? static_cast(current_subpass_ptr->color_attachments_map.size() ) + : (n_attachment_type == 1) ? ((current_subpass_ptr->depth_stencil_attachment.attachment_index != UINT32_MAX) ? 1 : 0) + : static_cast(current_subpass_ptr->resolved_attachments_map.size() ); + + for (uint32_t n_attachment = 0; + n_attachment < n_attachments; + ++n_attachment) + { + SubPassAttachment* current_attachment_ptr = (n_attachment_type == 0) ? current_subpass_ptr->get_color_attachment_at_index(n_attachment) + : (n_attachment_type == 1) ? ¤t_subpass_ptr->depth_stencil_attachment + : current_subpass_ptr->get_resolved_attachment_at_index(n_attachment); + + if (std::find(unique_attachments.begin(), + unique_attachments.end(), + current_attachment_ptr) == unique_attachments.end() ) + { + unique_attachments.push_back(current_attachment_ptr); + } + } + } + + /* Clean subpass's preserved attachments vector along the way.. */ + current_subpass_ptr->preserved_attachments.clear(); + } + + /* Determine what the index of the subpass which uses each unique attachment for the first, and for the last time, is. */ + for (std::vector::iterator unique_attachment_iterator = unique_attachments.begin(); + unique_attachment_iterator != unique_attachments.end(); + ++unique_attachment_iterator) + { + uint32_t current_subpass_index = 0; + const SubPassAttachment* current_unique_attachment_ptr = *unique_attachment_iterator; + uint32_t lowest_subpass_index = static_cast(m_subpasses.size() - 1); + uint32_t highest_subpass_index = 0; + + for (auto subpass_iterator = m_subpasses.begin(); + subpass_iterator != m_subpasses.end(); + ++subpass_iterator, ++current_subpass_index) + { + SubPass* current_subpass_ptr = subpass_iterator->get(); + bool subpass_processed = false; + + for (uint32_t n_attachment_type = 0; + n_attachment_type < 3 /* color, depth+stencil, resolve */ && !subpass_processed; + ++n_attachment_type) + { + const uint32_t n_attachments = (n_attachment_type == 0) ? static_cast(current_subpass_ptr->color_attachments_map.size() ) + : (n_attachment_type == 1) ? ((current_subpass_ptr->depth_stencil_attachment.attachment_index != UINT32_MAX) ? 1 : 0) + : static_cast(current_subpass_ptr->resolved_attachments_map.size() ); + + for (uint32_t n_attachment = 0; + n_attachment < n_attachments && !subpass_processed; + ++n_attachment) + { + SubPassAttachment* current_attachment_ptr = (n_attachment_type == 0) ? current_subpass_ptr->get_color_attachment_at_index(n_attachment) + : (n_attachment_type == 1) ? ¤t_subpass_ptr->depth_stencil_attachment + : current_subpass_ptr->get_resolved_attachment_at_index(n_attachment); + + if (current_attachment_ptr == current_unique_attachment_ptr) + { + if (lowest_subpass_index > current_subpass_index) + { + lowest_subpass_index = current_subpass_index; + } + + highest_subpass_index = current_subpass_index; + subpass_processed = true; + } + } + } + } + + (*unique_attachment_iterator)->highest_subpass_index = highest_subpass_index; + (*unique_attachment_iterator)->lowest_subpass_index = lowest_subpass_index; + } + + /* For each unique attachment, add it to the list of preserved attachments for all subpasses that precede the + * one at the highest subpass index and follow the one at the lowest subpass index, as long as the + * attachment is not used by a subpass. */ + for (std::vector::iterator unique_attachment_iterator = unique_attachments.begin(); + unique_attachment_iterator != unique_attachments.end(); + ++unique_attachment_iterator) + { + const SubPassAttachment* current_unique_attachment_ptr = *unique_attachment_iterator; + + if ((*unique_attachment_iterator)->highest_subpass_index == (*unique_attachment_iterator)->lowest_subpass_index) + { + /* There is only one producer subpass and no consumer subpasses defined for this renderpass. + *No need to create any preserve attachments. */ + continue; + } + + for (auto subpass_iterator = (m_subpasses.begin() + static_cast(current_unique_attachment_ptr->lowest_subpass_index) ); + subpass_iterator != (m_subpasses.begin() + static_cast(current_unique_attachment_ptr->highest_subpass_index) ) + 1; + ++subpass_iterator) + { + SubPass* current_subpass_ptr = subpass_iterator->get(); + bool is_subpass_attachment = false; + + for (uint32_t n_attachment_type = 0; + n_attachment_type < 3 /* color, depth+stencil, resolve */ && !is_subpass_attachment; + ++n_attachment_type) + { + const uint32_t n_attachments = (n_attachment_type == 0) ? static_cast(current_subpass_ptr->color_attachments_map.size() ) + : (n_attachment_type == 1) ? ((current_subpass_ptr->depth_stencil_attachment.attachment_index != UINT32_MAX) ? 1 : 0) + : static_cast(current_subpass_ptr->resolved_attachments_map.size() ); + + for (uint32_t n_attachment = 0; + n_attachment < n_attachments && !is_subpass_attachment; + ++n_attachment) + { + SubPassAttachment* current_attachment_ptr = (n_attachment_type == 0) ? current_subpass_ptr->get_color_attachment_at_index(n_attachment) + : (n_attachment_type == 1) ? ¤t_subpass_ptr->depth_stencil_attachment + : current_subpass_ptr->get_resolved_attachment_at_index(n_attachment); + + if (current_attachment_ptr == current_unique_attachment_ptr) + { + is_subpass_attachment = true; + } + } + } + + if (!is_subpass_attachment) + { + #ifdef _DEBUG + { + bool is_already_preserved = false; + const uint32_t n_preserved_attachments = static_cast(current_subpass_ptr->preserved_attachments.size() ); + + for (uint32_t n_preserved_attachment = 0; + n_preserved_attachment < n_preserved_attachments; + ++n_preserved_attachment) + { + if (¤t_subpass_ptr->preserved_attachments[n_preserved_attachment] == current_unique_attachment_ptr) + { + is_already_preserved = true; + + break; + } + } + + anvil_assert(!is_already_preserved); + } + #endif + + current_subpass_ptr->preserved_attachments.push_back(*current_unique_attachment_ptr); + } + } + } + + m_update_preserved_attachments = false; +} + +/** Please see header for specification */ +Anvil::RenderPassCreateInfo::SubPass::~SubPass() +{ + /* Stub */ +} diff --git a/src/misc/rendering_surface_create_info.cpp b/src/misc/rendering_surface_create_info.cpp new file mode 100644 index 00000000..39c02d2b --- /dev/null +++ b/src/misc/rendering_surface_create_info.cpp @@ -0,0 +1,58 @@ +// +// Copyright (c) 2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/rendering_surface_create_info.h" + +Anvil::RenderingSurfaceCreateInfo::RenderingSurfaceCreateInfo(Anvil::Instance* in_instance_ptr, + const Anvil::BaseDevice* in_device_ptr, + const Anvil::Window* in_window_ptr, + MTSafety in_mt_safety) + :m_device_ptr (in_device_ptr), + m_instance_ptr(in_instance_ptr), + m_mt_safety (in_mt_safety), + m_window_ptr (in_window_ptr) +{ + /* Stub */ +} + +Anvil::RenderingSurfaceCreateInfo::~RenderingSurfaceCreateInfo() +{ + /* Stub */ +} + +Anvil::RenderingSurfaceCreateInfoUniquePtr Anvil::RenderingSurfaceCreateInfo::create(Anvil::Instance* in_instance_ptr, + const Anvil::BaseDevice* in_device_ptr, + const Anvil::Window* in_window_ptr, + MTSafety in_mt_safety) +{ + Anvil::RenderingSurfaceCreateInfoUniquePtr result_ptr; + + result_ptr.reset( + new Anvil::RenderingSurfaceCreateInfo(in_instance_ptr, + in_device_ptr, + in_window_ptr, + in_mt_safety) + ); + anvil_assert(result_ptr != nullptr); + + return result_ptr; +} diff --git a/src/misc/sampler_create_info.cpp b/src/misc/sampler_create_info.cpp new file mode 100644 index 00000000..82227da2 --- /dev/null +++ b/src/misc/sampler_create_info.cpp @@ -0,0 +1,102 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/sampler_create_info.h" + +Anvil::SamplerCreateInfoUniquePtr Anvil::SamplerCreateInfo::create(const Anvil::BaseDevice* in_device_ptr, + Anvil::Filter in_mag_filter, + Anvil::Filter in_min_filter, + Anvil::SamplerMipmapMode in_mipmap_mode, + Anvil::SamplerAddressMode in_address_mode_u, + Anvil::SamplerAddressMode in_address_mode_v, + Anvil::SamplerAddressMode in_address_mode_w, + float in_lod_bias, + float in_max_anisotropy, + bool in_compare_enable, + Anvil::CompareOp in_compare_op, + float in_min_lod, + float in_max_lod, + Anvil::BorderColor in_border_color, + bool in_use_unnormalized_coordinates, + MTSafety in_mt_safety) +{ + Anvil::SamplerCreateInfoUniquePtr result_ptr; + + result_ptr.reset( + new Anvil::SamplerCreateInfo(in_device_ptr, + in_mag_filter, + in_min_filter, + in_mipmap_mode, + in_address_mode_u, + in_address_mode_v, + in_address_mode_w, + in_lod_bias, + in_max_anisotropy, + in_compare_enable, + in_compare_op, + in_min_lod, + in_max_lod, + in_border_color, + in_use_unnormalized_coordinates, + in_mt_safety) + ); + + return result_ptr; +} + +Anvil::SamplerCreateInfo::SamplerCreateInfo(const Anvil::BaseDevice* in_device_ptr, + Anvil::Filter in_mag_filter, + Anvil::Filter in_min_filter, + Anvil::SamplerMipmapMode in_mipmap_mode, + Anvil::SamplerAddressMode in_address_mode_u, + Anvil::SamplerAddressMode in_address_mode_v, + Anvil::SamplerAddressMode in_address_mode_w, + float in_lod_bias, + float in_max_anisotropy, + bool in_compare_enable, + Anvil::CompareOp in_compare_op, + float in_min_lod, + float in_max_lod, + Anvil::BorderColor in_border_color, + bool in_use_unnormalized_coordinates, + MTSafety in_mt_safety) + :m_address_mode_u (in_address_mode_u), + m_address_mode_v (in_address_mode_v), + m_address_mode_w (in_address_mode_w), + m_border_color (in_border_color), + m_compare_enable (in_compare_enable), + m_compare_op (in_compare_op), + m_device_ptr (in_device_ptr), + m_lod_bias (in_lod_bias), + m_mag_filter (in_mag_filter), + m_max_anisotropy (in_max_anisotropy), + m_max_lod (in_max_lod), + m_min_filter (in_min_filter), + m_min_lod (in_min_lod), + m_mipmap_mode (in_mipmap_mode), + m_mt_safety (in_mt_safety), + m_sampler_reduction_mode (Anvil::SamplerReductionMode::UNKNOWN), + m_sampler_ycbcr_conversion_ptr(nullptr), + m_use_unnormalized_coordinates(in_use_unnormalized_coordinates) +{ + /* Stub */ +} diff --git a/src/misc/sampler_ycbcr_conversion_create_info.cpp b/src/misc/sampler_ycbcr_conversion_create_info.cpp new file mode 100644 index 00000000..bcea23a7 --- /dev/null +++ b/src/misc/sampler_ycbcr_conversion_create_info.cpp @@ -0,0 +1,77 @@ +// +// Copyright (c) 2019 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/sampler_ycbcr_conversion_create_info.h" + +Anvil::SamplerYCbCrConversionCreateInfoUniquePtr Anvil::SamplerYCbCrConversionCreateInfo::create(const Anvil::BaseDevice* in_device_ptr, + const Anvil::Format& in_format, + const Anvil::SamplerYCbCrModelConversion& in_ycbcr_model_conversion, + const Anvil::SamplerYCbCrRange& in_ycbcr_range, + const Anvil::ComponentMapping& in_components, + const Anvil::ChromaLocation& in_x_chroma_offset, + const Anvil::ChromaLocation& in_y_chroma_offset, + const Anvil::Filter& in_chroma_filter, + const bool& in_should_force_explicit_reconstruction, + MTSafety in_mt_safety) +{ + Anvil::SamplerYCbCrConversionCreateInfoUniquePtr result_ptr; + + result_ptr.reset( + new Anvil::SamplerYCbCrConversionCreateInfo(in_device_ptr, + in_format, + in_ycbcr_model_conversion, + in_ycbcr_range, + in_components, + in_x_chroma_offset, + in_y_chroma_offset, + in_chroma_filter, + in_should_force_explicit_reconstruction, + in_mt_safety) + ); + + anvil_assert(result_ptr != nullptr); + return result_ptr; +} + +Anvil::SamplerYCbCrConversionCreateInfo::SamplerYCbCrConversionCreateInfo(const Anvil::BaseDevice* in_device_ptr, + const Anvil::Format& in_format, + const Anvil::SamplerYCbCrModelConversion& in_ycbcr_model_conversion, + const Anvil::SamplerYCbCrRange& in_ycbcr_range, + const Anvil::ComponentMapping& in_components, + const Anvil::ChromaLocation& in_x_chroma_offset, + const Anvil::ChromaLocation& in_y_chroma_offset, + const Anvil::Filter& in_chroma_filter, + const bool& in_should_force_explicit_reconstruction, + MTSafety in_mt_safety) + :m_chroma_filter (in_chroma_filter), + m_components (in_components), + m_device_ptr (in_device_ptr), + m_format (in_format), + m_mt_safety (in_mt_safety), + m_should_force_explicit_reconstruction(in_should_force_explicit_reconstruction), + m_x_chroma_offset (in_x_chroma_offset), + m_y_chroma_offset (in_y_chroma_offset), + m_ycbcr_model_conversion (in_ycbcr_model_conversion), + m_ycbcr_range (in_ycbcr_range) +{ + /* Stub */ +} diff --git a/src/misc/semaphore_create_info.cpp b/src/misc/semaphore_create_info.cpp new file mode 100644 index 00000000..0ae25fe3 --- /dev/null +++ b/src/misc/semaphore_create_info.cpp @@ -0,0 +1,49 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/semaphore_create_info.h" +#include "wrappers/device.h" + +Anvil::SemaphoreCreateInfoUniquePtr Anvil::SemaphoreCreateInfo::create(const Anvil::BaseDevice* in_device_ptr) +{ + Anvil::SemaphoreCreateInfoUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new Anvil::SemaphoreCreateInfo(in_device_ptr, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE) + ); + + return result_ptr; +} + +Anvil::SemaphoreCreateInfo::SemaphoreCreateInfo(const Anvil::BaseDevice* in_device_ptr, + MTSafety in_mt_safety) + :m_device_ptr (in_device_ptr), + m_exportable_external_semaphore_handle_types (Anvil::ExternalSemaphoreHandleTypeFlagBits::NONE), +#if defined(_WIN32) + m_exportable_nt_handle_info_specified (false), + m_exportable_nt_handle_info_security_attributes_specified(false), +#endif + m_mt_safety (in_mt_safety) +{ + /* Stub */ +} \ No newline at end of file diff --git a/src/misc/shader_module_cache.cpp b/src/misc/shader_module_cache.cpp index 017746c8..f70fef3e 100644 --- a/src/misc/shader_module_cache.cpp +++ b/src/misc/shader_module_cache.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -26,6 +26,7 @@ /** Please see header for documentation */ Anvil::ShaderModuleCache::ShaderModuleCache() + :MTSafetySupportProvider(true) { update_subscriptions(true); } @@ -33,14 +34,11 @@ Anvil::ShaderModuleCache::ShaderModuleCache() /** Please see header for documentation */ Anvil::ShaderModuleCache::~ShaderModuleCache() { - std::unique_lock lock(m_cs); - { - update_subscriptions(false); - } + update_subscriptions(false); } /** TODO */ -void Anvil::ShaderModuleCache::cache(std::shared_ptr in_shader_module_ptr) +void Anvil::ShaderModuleCache::cache(Anvil::ShaderModule* in_shader_module_ptr) { const auto shader_module_cs_entrypoint_name = in_shader_module_ptr->get_cs_entrypoint_name(); const auto shader_module_device_ptr = in_shader_module_ptr->get_parent_device (); @@ -63,12 +61,12 @@ void Anvil::ShaderModuleCache::cache(std::shared_ptr in_sha anvil_assert(in_shader_module_ptr != nullptr); { - std::unique_lock lock(m_cs); + std::unique_lock mutex_lock(*get_mutex() ); - auto items_map_iterator = m_items.find(hash); + auto items_map_iterator = m_item_ptrs.find(hash); bool should_store_new_item = false; - if (items_map_iterator == m_items.end() ) + if (items_map_iterator == m_item_ptrs.end() ) { /* No collision case */ should_store_new_item = true; @@ -81,20 +79,20 @@ void Anvil::ShaderModuleCache::cache(std::shared_ptr in_sha bool item_found = false; auto& item_list = items_map_iterator->second; - for (const auto& current_item : item_list) + for (const auto& current_item_ptr : item_list) { - if (current_item.matches(shader_module_device_ptr, - reinterpret_cast(&shader_module_spirv_blob.at(0) ), - static_cast(shader_module_spirv_blob.size() * sizeof(shader_module_spirv_blob.at(0) )), - shader_module_cs_entrypoint_name, - shader_module_fs_entrypoint_name, - shader_module_gs_entrypoint_name, - shader_module_tc_entrypoint_name, - shader_module_te_entrypoint_name, - shader_module_vs_entrypoint_name) ) + if (current_item_ptr->matches(shader_module_device_ptr, + reinterpret_cast(&shader_module_spirv_blob.at(0) ), + static_cast(shader_module_spirv_blob.size() * sizeof(shader_module_spirv_blob.at(0) )), + shader_module_cs_entrypoint_name, + shader_module_fs_entrypoint_name, + shader_module_gs_entrypoint_name, + shader_module_tc_entrypoint_name, + shader_module_te_entrypoint_name, + shader_module_vs_entrypoint_name) ) { /* This assertion check should never explode */ - anvil_assert(current_item.shader_module_ptr == in_shader_module_ptr); + anvil_assert(current_item_ptr->shader_module_owned_ptr.get() == in_shader_module_ptr); item_found = true; break; @@ -106,24 +104,29 @@ void Anvil::ShaderModuleCache::cache(std::shared_ptr in_sha if (should_store_new_item) { - auto new_item = HashMapItem(shader_module_device_ptr, - shader_module_spirv_blob, - shader_module_cs_entrypoint_name, - shader_module_fs_entrypoint_name, - shader_module_gs_entrypoint_name, - shader_module_tc_entrypoint_name, - shader_module_te_entrypoint_name, - shader_module_vs_entrypoint_name, - in_shader_module_ptr); - - m_items[hash].push_front(new_item); + std::unique_ptr new_item_ptr( + new HashMapItem(shader_module_device_ptr, + shader_module_spirv_blob, + shader_module_cs_entrypoint_name, + shader_module_fs_entrypoint_name, + shader_module_gs_entrypoint_name, + shader_module_tc_entrypoint_name, + shader_module_te_entrypoint_name, + shader_module_vs_entrypoint_name, + in_shader_module_ptr) + ); + + m_item_ptrs[hash].push_front( + std::move(new_item_ptr) + ); } } } -std::shared_ptr Anvil::ShaderModuleCache::create() +Anvil::ShaderModuleCacheUniquePtr Anvil::ShaderModuleCache::create() { - std::shared_ptr result_ptr; + ShaderModuleCacheUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::ShaderModuleCache() @@ -133,20 +136,20 @@ std::shared_ptr Anvil::ShaderModuleCache::create() } /** Please see header for documentation */ -std::shared_ptr Anvil::ShaderModuleCache::get_cached_shader_module(std::weak_ptr in_device_ptr, - const char* in_spirv_blob, - uint32_t in_n_spirv_blob_bytes, - const std::string& in_cs_entrypoint_name, - const std::string& in_fs_entrypoint_name, - const std::string& in_gs_entrypoint_name, - const std::string& in_tc_entrypoint_name, - const std::string& in_te_entrypoint_name, - const std::string& in_vs_entrypoint_name) +Anvil::ShaderModuleUniquePtr Anvil::ShaderModuleCache::get_cached_shader_module(const Anvil::BaseDevice* in_device_ptr, + const char* in_spirv_blob, + uint32_t in_n_spirv_blob_bytes, + const std::string& in_cs_entrypoint_name, + const std::string& in_fs_entrypoint_name, + const std::string& in_gs_entrypoint_name, + const std::string& in_tc_entrypoint_name, + const std::string& in_te_entrypoint_name, + const std::string& in_vs_entrypoint_name) { - std::shared_ptr result_ptr; + Anvil::ShaderModuleUniquePtr result_ptr; { - std::unique_lock lock(m_cs); + std::unique_lock mutex_lock(*get_mutex() ); const auto hash (get_hash(in_spirv_blob, in_n_spirv_blob_bytes, @@ -156,25 +159,32 @@ std::shared_ptr Anvil::ShaderModuleCache::get_cached_shader in_tc_entrypoint_name, in_te_entrypoint_name, in_vs_entrypoint_name) ); - auto items_map_iterator(m_items.find(hash) ); + auto items_map_iterator(m_item_ptrs.find(hash) ); - if (items_map_iterator != m_items.end() ) + if (items_map_iterator != m_item_ptrs.end() ) { const auto& items = items_map_iterator->second; - for (const auto& current_item : items) + for (const auto& current_item_ptr : items) { - if (current_item.matches(in_device_ptr, - in_spirv_blob, - in_n_spirv_blob_bytes, - in_cs_entrypoint_name, - in_fs_entrypoint_name, - in_gs_entrypoint_name, - in_tc_entrypoint_name, - in_te_entrypoint_name, - in_vs_entrypoint_name) ) + if (current_item_ptr->matches(in_device_ptr, + in_spirv_blob, + in_n_spirv_blob_bytes, + in_cs_entrypoint_name, + in_fs_entrypoint_name, + in_gs_entrypoint_name, + in_tc_entrypoint_name, + in_te_entrypoint_name, + in_vs_entrypoint_name) ) { - result_ptr = current_item.shader_module_ptr; + anvil_assert(current_item_ptr->shader_module_owned_ptr != nullptr); + anvil_assert(current_item_ptr->shader_module_owned_ptr->get_module() != VK_NULL_HANDLE); + + result_ptr = Anvil::ShaderModuleUniquePtr(current_item_ptr->shader_module_owned_ptr.get(), + [](ShaderModule*) + { + /* Stub */ + }); break; } } @@ -220,64 +230,56 @@ size_t Anvil::ShaderModuleCache::get_hash(const char* in_spirv_blob, } /** TODO */ -void Anvil::ShaderModuleCache::on_object_about_to_be_released(CallbackArgument* in_callback_arg_ptr) +void Anvil::ShaderModuleCache::on_shader_module_object_about_to_be_released(CallbackArgument* in_callback_arg_ptr) { - const Anvil::OnObjectAboutToBeUnregisteredCallbackArgument* callback_arg_ptr = dynamic_cast(in_callback_arg_ptr); - - anvil_assert(callback_arg_ptr != nullptr); - - if (callback_arg_ptr->object_type == OBJECT_TYPE_SHADER_MODULE) - { - /* Technically we should never reach this place, as shader module cache does not implement - * any sort of GC mechanism. - * - * Please investigate if this assertion check fires. - */ - anvil_assert_fail(); - } + /* Technically we should never reach this place, as shader module cache does not implement + * any sort of GC mechanism. + * + * Please investigate if this assertion check fires. + */ + ANVIL_REDUNDANT_ARGUMENT(in_callback_arg_ptr); + + anvil_assert_fail(); } /** TODO */ -void Anvil::ShaderModuleCache::on_object_registered(CallbackArgument* in_callback_arg_ptr) +void Anvil::ShaderModuleCache::on_shader_module_object_registered(CallbackArgument* in_callback_arg_ptr) { const auto callback_arg_ptr = dynamic_cast(in_callback_arg_ptr); anvil_assert(callback_arg_ptr != nullptr); - if (callback_arg_ptr->object_type == OBJECT_TYPE_SHADER_MODULE) - { - auto shader_module_ptr = static_cast(callback_arg_ptr->object_raw_ptr); + auto shader_module_ptr = static_cast(callback_arg_ptr->object_raw_ptr); - cache(shader_module_ptr->shared_from_this() ); - } + cache(shader_module_ptr); } void Anvil::ShaderModuleCache::update_subscriptions(bool in_should_init) { auto object_tracker_ptr = Anvil::ObjectTracker::get(); - auto on_object_about_to_be_released_func = std::bind(&ShaderModuleCache::on_object_about_to_be_released, + auto on_object_about_to_be_released_func = std::bind(&ShaderModuleCache::on_shader_module_object_about_to_be_released, this, std::placeholders::_1); - auto on_object_registered_func = std::bind(&ShaderModuleCache::on_object_registered, + auto on_object_registered_func = std::bind(&ShaderModuleCache::on_shader_module_object_registered, this, std::placeholders::_1); if (in_should_init) { - object_tracker_ptr->register_for_callbacks(OBJECT_TRACKER_CALLBACK_ID_ON_OBJECT_ABOUT_TO_BE_UNREGISTERED, + object_tracker_ptr->register_for_callbacks(OBJECT_TRACKER_CALLBACK_ID_ON_SHADER_MODULE_OBJECT_ABOUT_TO_BE_UNREGISTERED, on_object_about_to_be_released_func, this); - object_tracker_ptr->register_for_callbacks(OBJECT_TRACKER_CALLBACK_ID_ON_OBJECT_REGISTERED, + object_tracker_ptr->register_for_callbacks(OBJECT_TRACKER_CALLBACK_ID_ON_SHADER_MODULE_OBJECT_REGISTERED, on_object_registered_func, this); } else { - object_tracker_ptr->unregister_from_callbacks(OBJECT_TRACKER_CALLBACK_ID_ON_OBJECT_ABOUT_TO_BE_UNREGISTERED, + object_tracker_ptr->unregister_from_callbacks(OBJECT_TRACKER_CALLBACK_ID_ON_SHADER_MODULE_OBJECT_ABOUT_TO_BE_UNREGISTERED, on_object_about_to_be_released_func, this); - object_tracker_ptr->unregister_from_callbacks(OBJECT_TRACKER_CALLBACK_ID_ON_OBJECT_REGISTERED, + object_tracker_ptr->unregister_from_callbacks(OBJECT_TRACKER_CALLBACK_ID_ON_SHADER_MODULE_OBJECT_REGISTERED, on_object_registered_func, this); } diff --git a/src/misc/swapchain_create_info.cpp b/src/misc/swapchain_create_info.cpp new file mode 100644 index 00000000..28f95161 --- /dev/null +++ b/src/misc/swapchain_create_info.cpp @@ -0,0 +1,103 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/swapchain_create_info.h" + + +Anvil::SwapchainCreateInfoUniquePtr Anvil::SwapchainCreateInfo::create(Anvil::BaseDevice* in_device_ptr, + Anvil::RenderingSurface* in_parent_surface_ptr, + Anvil::Window* in_window_ptr, + Anvil::Format in_format, + Anvil::ColorSpaceKHR in_color_space, + Anvil::PresentModeKHR in_present_mode, + Anvil::ImageUsageFlags in_usage_flags, + uint32_t in_n_images, + const bool& in_clipped, + const Anvil::Swapchain* in_opt_old_swapchain_ptr) +{ + SwapchainCreateInfoUniquePtr result_ptr = SwapchainCreateInfoUniquePtr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new SwapchainCreateInfo(in_device_ptr, + in_parent_surface_ptr, + in_window_ptr, + in_format, + in_color_space, + in_present_mode, + in_usage_flags, + in_n_images, + Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE, + Anvil::SwapchainCreateFlagBits::NONE, + Anvil::DeviceGroupPresentModeFlagBits::LOCAL_BIT_KHR, /* in_mgpu_present_mode_flags */ + in_clipped, + in_opt_old_swapchain_ptr) + ); + + return result_ptr; +} + +Anvil::SwapchainCreateInfo::SwapchainCreateInfo(Anvil::BaseDevice* in_device_ptr, + Anvil::RenderingSurface* in_parent_surface_ptr, + Anvil::Window* in_window_ptr, + Anvil::Format in_format, + Anvil::ColorSpaceKHR in_color_space, + Anvil::PresentModeKHR in_present_mode, + Anvil::ImageUsageFlags in_usage_flags, + uint32_t in_n_images, + MTSafety in_mt_safety, + Anvil::SwapchainCreateFlags in_flags, + Anvil::DeviceGroupPresentModeFlags in_mgpu_present_mode_flags, + const bool& in_clipped, + const Anvil::Swapchain* in_opt_old_swapchain_ptr) + :m_clipped (in_clipped), + m_color_space (in_color_space), + m_device_ptr (in_device_ptr), + m_flags (in_flags), + m_format (in_format), + m_mgpu_present_mode_flags(in_mgpu_present_mode_flags), + m_mt_safety (in_mt_safety), + m_n_images (in_n_images), + m_old_swapchain_ptr (in_opt_old_swapchain_ptr), + m_parent_surface_ptr (in_parent_surface_ptr), + m_present_mode (in_present_mode), + m_usage_flags (in_usage_flags), + m_window_ptr (in_window_ptr) +{ + anvil_assert(in_n_images > 0); + anvil_assert(in_parent_surface_ptr != nullptr); + anvil_assert(in_usage_flags != 0); +} + +void Anvil::SwapchainCreateInfo::set_view_format_list(const Anvil::Format* in_compatible_formats_ptr, + const uint32_t& in_n_compatible_formats) +{ + anvil_assert(in_n_compatible_formats > 0); + + m_flags |= Anvil::SwapchainCreateFlagBits::CREATE_MUTABLE_FORMAT_BIT; + + m_compatible_formats.resize(in_n_compatible_formats); + + memcpy(&m_compatible_formats.at(0), + in_compatible_formats_ptr, + sizeof(Anvil::Format) * in_n_compatible_formats); +} \ No newline at end of file diff --git a/src/misc/time.cpp b/src/misc/time.cpp index dea7fbfc..ef8a3661 100644 --- a/src/misc/time.cpp +++ b/src/misc/time.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/src/misc/types.cpp b/src/misc/types.cpp index daba6c09..db3d1c41 100644 --- a/src/misc/types.cpp +++ b/src/misc/types.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -19,2426 +19,43 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // - -#include "misc/debug.h" -#include "wrappers/buffer.h" -#include "wrappers/device.h" -#include "wrappers/image.h" -#include "wrappers/memory_block.h" -#include "wrappers/physical_device.h" -#include "wrappers/semaphore.h" -#include "wrappers/shader_module.h" - -/** Please see header for specification */ -Anvil::BufferBarrier::BufferBarrier(const BufferBarrier& in) -{ - buffer = in.buffer; - buffer_barrier_vk = in.buffer_barrier_vk; - buffer_ptr = in.buffer_ptr; - dst_access_mask = in.dst_access_mask; - dst_queue_family_index = in.dst_queue_family_index; - offset = in.offset; - size = in.size; - src_access_mask = in.src_access_mask; - src_queue_family_index = in.src_queue_family_index; -} - -/** Please see header for specification */ -Anvil::BufferBarrier::BufferBarrier(VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - uint32_t in_src_queue_family_index, - uint32_t in_dst_queue_family_index, - std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - VkDeviceSize in_size) -{ - buffer = in_buffer_ptr->get_buffer(); - buffer_ptr = in_buffer_ptr; - dst_access_mask = static_cast(in_destination_access_mask); - dst_queue_family_index = in_dst_queue_family_index; - offset = in_offset; - size = in_size; - src_access_mask = static_cast(in_source_access_mask); - src_queue_family_index = in_src_queue_family_index; - - buffer_barrier_vk.buffer = in_buffer_ptr->get_buffer(); - buffer_barrier_vk.dstAccessMask = in_destination_access_mask; - buffer_barrier_vk.dstQueueFamilyIndex = in_dst_queue_family_index; - buffer_barrier_vk.offset = in_offset; - buffer_barrier_vk.pNext = nullptr; - buffer_barrier_vk.size = in_size; - buffer_barrier_vk.srcAccessMask = in_source_access_mask, - buffer_barrier_vk.srcQueueFamilyIndex = in_src_queue_family_index; - buffer_barrier_vk.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; - - /* NOTE: For an image barrier to work correctly, the underlying subresource range must be assigned memory. - * Query for a memory block in order to force any listening memory allocators to bake */ - auto memory_block_ptr = buffer_ptr->get_memory_block(0 /* in_n_memory_block */); - - ANVIL_REDUNDANT_VARIABLE(memory_block_ptr); -} - -/** Please see header for specification */ -Anvil::BufferBarrier::~BufferBarrier() -{ - /* Stub */ -} - -/** Please see header for specification */ -Anvil::DeviceExtensionConfiguration::DeviceExtensionConfiguration() -{ - amd_draw_indirect_count = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - amd_gcn_shader = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - amd_gpu_shader_half_float = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - amd_gpu_shader_int16 = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - amd_rasterization_order = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - amd_shader_ballot = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - amd_shader_explicit_vertex_parameter = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - amd_shader_trinary_minmax = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - amd_texture_gather_bias_lod = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - ext_shader_subgroup_ballot = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - ext_shader_subgroup_vote = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - khr_16bit_storage = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - khr_maintenance1 = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - khr_storage_buffer_storage_class = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - khr_surface = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - khr_swapchain = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - - /* VK_AMD_negative_viewport_height interacts with KHR_maintenance1, hence it needs - * to be enabled manually. - */ - amd_negative_viewport_height = EXTENSION_AVAILABILITY_IGNORE; - - /* VK_EXT_debug_marker is only useful for debugging. */ - #if defined(_DEBUG) - { - ext_debug_marker = EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE; - } - #else - { - ext_debug_marker = EXTENSION_AVAILABILITY_IGNORE; - } - #endif - -} - -/** Please see header for specification */ -bool Anvil::DeviceExtensionConfiguration::is_supported_by_physical_device(std::weak_ptr in_physical_device_ptr, - std::vector* out_opt_unsupported_extensions_ptr) const -{ - typedef struct ExtensionItem - { - const char* extension_name; - bool is_required; - - ExtensionItem(const char* in_extension_name, - const bool& in_is_required) - { - extension_name = in_extension_name; - is_required = in_is_required; - } - } ExtensionItem; - - std::shared_ptr physical_device_locked_ptr(in_physical_device_ptr); - bool result (true); - std::vector extensions = - { - ExtensionItem(VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME, amd_draw_indirect_count == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_AMD_GCN_SHADER_EXTENSION_NAME, amd_gcn_shader == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME, amd_gpu_shader_half_float == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME, amd_gpu_shader_int16 == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME, amd_negative_viewport_height == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME, amd_rasterization_order == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_AMD_SHADER_BALLOT_EXTENSION_NAME, amd_shader_ballot == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME, amd_shader_explicit_vertex_parameter == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME, amd_shader_trinary_minmax == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME, amd_texture_gather_bias_lod == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_EXT_DEBUG_MARKER_EXTENSION_NAME, ext_debug_marker == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, ext_shader_subgroup_vote == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, ext_shader_subgroup_vote == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_KHR_16BIT_STORAGE_EXTENSION_NAME, khr_16bit_storage == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_KHR_MAINTENANCE1_EXTENSION_NAME, khr_maintenance1 == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem("VK_KHR_storage_buffer_storage_class", khr_storage_buffer_storage_class == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_KHR_SURFACE_EXTENSION_NAME, khr_surface == Anvil::EXTENSION_AVAILABILITY_REQUIRE), - ExtensionItem(VK_KHR_SWAPCHAIN_EXTENSION_NAME, khr_swapchain == Anvil::EXTENSION_AVAILABILITY_REQUIRE) - }; - - if (out_opt_unsupported_extensions_ptr != nullptr) - { - out_opt_unsupported_extensions_ptr->clear(); - } - - for (const auto& extension : other_extensions) - { - if (extension.second == Anvil::EXTENSION_AVAILABILITY_REQUIRE) - { - extensions.push_back( - ExtensionItem(extension.first.c_str(), - true) - ); - } - } - - for (const auto& current_extension : extensions) - { - if (!physical_device_locked_ptr->is_device_extension_supported(current_extension.extension_name) && - current_extension.is_required) - { - result = false; - - if (out_opt_unsupported_extensions_ptr == nullptr) - { - break; - } - else - { - out_opt_unsupported_extensions_ptr->push_back(current_extension.extension_name); - } - } - } - - return result; -} - -bool Anvil::DeviceExtensionConfiguration::operator==(const Anvil::DeviceExtensionConfiguration& in_config) const -{ - bool result; - - result = (amd_draw_indirect_count == in_config.amd_draw_indirect_count) && - (amd_gcn_shader == in_config.amd_gcn_shader) && - (amd_gpu_shader_half_float == in_config.amd_gpu_shader_half_float) && - (amd_gpu_shader_int16 == in_config.amd_gpu_shader_int16) && - (amd_negative_viewport_height == in_config.amd_negative_viewport_height) && - (amd_rasterization_order == in_config.amd_rasterization_order) && - (amd_shader_ballot == in_config.amd_shader_ballot) && - (amd_shader_explicit_vertex_parameter == in_config.amd_shader_explicit_vertex_parameter) && - (amd_shader_trinary_minmax == in_config.amd_shader_trinary_minmax) && - (amd_texture_gather_bias_lod == in_config.amd_texture_gather_bias_lod) && - (ext_debug_marker == in_config.ext_debug_marker) && - (ext_shader_subgroup_ballot == in_config.ext_shader_subgroup_ballot) && - (ext_shader_subgroup_vote == in_config.ext_shader_subgroup_vote) && - (khr_16bit_storage == in_config.khr_16bit_storage) && - (khr_storage_buffer_storage_class == in_config.khr_storage_buffer_storage_class) && - (khr_surface == in_config.khr_surface) && - (khr_swapchain == in_config.khr_swapchain); - - if (result) - { - for (const auto& current_other_extension : other_extensions) - { - auto iterator = std::find(in_config.other_extensions.begin(), - in_config.other_extensions.end(), - current_other_extension); - - if (iterator == in_config.other_extensions.end() ) - { - result = false; - - break; - } - } - } - - return result; -} - -/** Please see header for specification */ -bool Anvil::operator==(const Anvil::FormatProperties& in1, - const Anvil::FormatProperties& in2) -{ - return memcmp(&in1, - &in2, - sizeof(Anvil::FormatProperties) ) == 0; -} - -/** Please see header for specification */ -Anvil::ImageBarrier::ImageBarrier(const ImageBarrier& in) -{ - dst_access_mask = in.dst_access_mask; - dst_queue_family_index = in.dst_queue_family_index; - image = in.image; - image_barrier_vk = in.image_barrier_vk; - image_ptr = in.image_ptr; - new_layout = in.new_layout; - old_layout = in.old_layout; - src_access_mask = in.src_access_mask; - src_queue_family_index = in.src_queue_family_index; - subresource_range = in.subresource_range; -} - -/** Please see header for specification */ -Anvil::ImageBarrier::ImageBarrier(VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region_barrier, - VkImageLayout in_old_layout, - VkImageLayout in_new_layout, - uint32_t in_src_queue_family_index, - uint32_t in_dst_queue_family_index, - std::shared_ptr in_image_ptr, - VkImageSubresourceRange in_image_subresource_range) -{ - by_region = in_by_region_barrier; - dst_access_mask = static_cast(in_destination_access_mask); - dst_queue_family_index = in_dst_queue_family_index; - image = (in_image_ptr != VK_NULL_HANDLE) ? in_image_ptr->get_image() - : VK_NULL_HANDLE; - image_ptr = in_image_ptr; - new_layout = in_new_layout; - old_layout = in_old_layout; - src_access_mask = static_cast(in_source_access_mask); - src_queue_family_index = in_src_queue_family_index; - subresource_range = in_image_subresource_range; - - image_barrier_vk.dstAccessMask = in_destination_access_mask; - image_barrier_vk.dstQueueFamilyIndex = in_dst_queue_family_index; - image_barrier_vk.image = (in_image_ptr != nullptr) ? in_image_ptr->get_image() - : VK_NULL_HANDLE; - image_barrier_vk.newLayout = in_new_layout; - image_barrier_vk.oldLayout = in_old_layout; - image_barrier_vk.pNext = nullptr; - image_barrier_vk.srcAccessMask = in_source_access_mask; - image_barrier_vk.srcQueueFamilyIndex = in_src_queue_family_index; - image_barrier_vk.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; - image_barrier_vk.subresourceRange = in_image_subresource_range; - - /* NOTE: For an image barrier to work correctly, the underlying subresource range must be assigned memory. - * Query for a memory block in order to force any listening memory allocators to bake */ - auto memory_block_ptr = image_ptr->get_memory_block(); - - ANVIL_REDUNDANT_VARIABLE(memory_block_ptr); -} - -/** Please see header for specification */ -Anvil::ImageBarrier::~ImageBarrier() -{ - /* Stub */ -} - -/* Please see header for specification */ -bool Anvil::operator==(const MemoryProperties& in1, - const MemoryProperties& in2) -{ - bool result = (in1.types.size() == in2.types.size() ); - - if (result) - { - for (uint32_t n_type = 0; - n_type < static_cast(in1.types.size() ) && result; - ++n_type) - { - const auto& current_type_in1 = in1.types.at(n_type); - const auto& current_type_in2 = in2.types.at(n_type); - - result &= (current_type_in1 == current_type_in2); - } - } - - return result; -} - -/* Please see header for specification */ -void Anvil::MemoryProperties::init(const VkPhysicalDeviceMemoryProperties& in_mem_properties) -{ - n_heaps = in_mem_properties.memoryHeapCount; - - heaps = new Anvil::MemoryHeap[n_heaps]; - anvil_assert(heaps != nullptr); - - for (unsigned int n_heap = 0; - n_heap < in_mem_properties.memoryHeapCount; - ++n_heap) - { - heaps[n_heap].flags = static_cast(in_mem_properties.memoryHeaps[n_heap].flags); - heaps[n_heap].size = in_mem_properties.memoryHeaps[n_heap].size; - } - - types.reserve(in_mem_properties.memoryTypeCount); - - for (unsigned int n_type = 0; - n_type < in_mem_properties.memoryTypeCount; - ++n_type) - { - types.push_back(MemoryType(in_mem_properties.memoryTypes[n_type], - this) ); - } -} - -/** Please see header for specification */ -bool Anvil::operator==(const MemoryHeap& in1, - const MemoryHeap& in2) -{ - return (in1.flags == in2.flags && - in1.size == in2.size); -} - -/* Please see header for specification */ -Anvil::MemoryType::MemoryType(const VkMemoryType& in_type, - struct MemoryProperties* in_memory_props_ptr) -{ - flags = static_cast(in_type.propertyFlags); - heap_ptr = &in_memory_props_ptr->heaps[in_type.heapIndex]; - - features = Anvil::Utils::get_memory_feature_flags_from_vk_property_flags(flags, - heap_ptr->flags); -} - -/* Please see header for specification */ -bool Anvil::operator==(const Anvil::MemoryType& in1, - const Anvil::MemoryType& in2) -{ - return ( in1.flags == in2.flags && - *in1.heap_ptr == *in2.heap_ptr); -} - -/** Returns a filled MipmapRawData structure for a 1D mip. - * - * NOTE: It is caller's responsibility to configure one of the data storage pointer members. - */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_1D(VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - uint32_t in_row_size) -{ - MipmapRawData result; - - memset(&result, - 0, - sizeof(result) ); - - result.aspect = in_aspect; - result.data_size = in_row_size; - result.row_size = in_row_size; - result.n_layers = 1; - result.n_slices = 1; - result.n_mipmap = in_n_mipmap; - - return result; -} - -/** Returns a filled MipmapRawData structure for a 1D Array mip. - * - * NOTE: It is caller's responsibility to configure one of the data storage pointer members. - */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_array(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - uint32_t in_row_size, - uint32_t in_data_size) -{ - MipmapRawData result; - - memset(&result, - 0, - sizeof(result) ); - - result.aspect = in_aspect; - result.data_size = in_data_size; - result.n_layer = in_n_layer; - result.n_layers = in_n_layers; - result.n_mipmap = in_n_mipmap; - result.n_slices = 1; - result.row_size = in_row_size; - - return result; -} - -/** Returns a filled MipmapRawData structure for a 2D mip. - * - * NOTE: It is caller's responsibility to configure one of the data storage pointer members. - */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_2D(VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result; - - memset(&result, - 0, - sizeof(result) ); - - result.aspect = in_aspect; - result.data_size = in_data_size; - result.n_layers = 1; - result.n_mipmap = in_n_mipmap; - result.n_slices = 1; - result.row_size = in_row_size; - - return result; -} - -/** Returns a filled MipmapRawData structure for a 2D array mip. - * - * NOTE: It is caller's responsibility to configure one of the data storage pointer members. - */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_array(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result; - - memset(&result, - 0, - sizeof(result) ); - - result.aspect = in_aspect; - result.data_size = in_data_size; - result.n_layer = in_n_layer; - result.n_layers = in_n_layers; - result.n_mipmap = in_n_mipmap; - result.n_slices = 1; - result.row_size = in_row_size; - - return result; -} - -/** Returns a filled MipmapRawData structure for a 3D mip. - * - * NOTE: It is caller's responsibility to configure one of the data storage pointer members. - */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_3D(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_slices, - uint32_t in_n_mipmap, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result; - - memset(&result, - 0, - sizeof(result) ); - - result.aspect = in_aspect; - result.data_size = in_data_size; - result.n_layers = 1; - result.n_layer = in_n_layer; - result.n_slices = in_n_slices; - result.n_mipmap = in_n_mipmap; - result.row_size = in_row_size; - - return result; -} - - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_row_size) -{ - MipmapRawData result = create_1D(in_aspect, - in_n_mipmap, - in_row_size); - - result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_row_size) -{ - MipmapRawData result = create_1D(in_aspect, - in_n_mipmap, - in_row_size); - - result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_row_size) -{ - MipmapRawData result = create_1D(in_aspect, - in_n_mipmap, - in_row_size); - - result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_array_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_row_size, - uint32_t in_data_size) -{ - MipmapRawData result = create_1D_array(in_aspect, - in_n_layer, - in_n_layers, - in_n_mipmap, - in_row_size, - in_data_size); - - result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_array_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_row_size, - uint32_t in_data_size) -{ - MipmapRawData result = create_1D_array(in_aspect, - in_n_layer, - in_n_layers, - in_n_mipmap, - in_row_size, - in_data_size); - - result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_array_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_row_size, - uint32_t in_data_size) -{ - MipmapRawData result = create_1D_array(in_aspect, - in_n_layer, - in_n_layers, - in_n_mipmap, - in_row_size, - in_data_size); - - result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_2D(in_aspect, - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_2D(in_aspect, - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_2D(in_aspect, - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_array_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_2D_array(in_aspect, - in_n_layer, - in_n_layers, - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_array_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_2D_array(in_aspect, - in_n_layer, - in_n_layers, - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_array_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_2D_array(in_aspect, - in_n_layer, - in_n_layers, - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - - - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_3D_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layer_slices, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_slice_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_3D(in_aspect, - in_n_layer, - in_n_layer_slices, - in_n_mipmap, - in_slice_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_3D_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layer_slices, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_slice_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_3D(in_aspect, - in_n_layer, - in_n_layer_slices, - in_n_mipmap, - in_slice_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_3D_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layer_slices, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_slice_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_3D(in_aspect, - in_n_layer, - in_n_layer_slices, - in_n_mipmap, - in_slice_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - anvil_assert(in_n_layer < 6); - - MipmapRawData result = create_2D_array(in_aspect, - in_n_layer, - 1, /* n_layer_slices */ - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - anvil_assert(in_n_layer < 6); - - MipmapRawData result = create_2D_array(in_aspect, - in_n_layer, - 1, /* n_layer_slices */ - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - anvil_assert(in_n_layer < 6); - - MipmapRawData result = create_2D_array(in_aspect, - in_n_layer, - 1, /* n_layer_slices */ - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_array_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_2D_array(in_aspect, - in_n_layer, - in_n_layers, - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_array_from_uchar_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - const unsigned char* in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_2D_array(in_aspect, - in_n_layer, - in_n_layers, - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/* Please see header for specification */ -Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_array_from_uchar_vector_ptr(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_layers, - uint32_t in_n_mipmap, - std::shared_ptr > in_linear_tightly_packed_data_ptr, - uint32_t in_data_size, - uint32_t in_row_size) -{ - MipmapRawData result = create_2D_array(in_aspect, - in_n_layer, - in_n_layers, - in_n_mipmap, - in_data_size, - in_row_size); - - result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; - - return result; -} - -/** Please see header for specification */ -bool Anvil::operator==(const Anvil::QueueFamilyInfo& in1, - const Anvil::QueueFamilyInfo& in2) -{ - return (in1.flags == in2.flags && - in1.min_image_transfer_granularity.depth == in2.min_image_transfer_granularity.depth && - in1.min_image_transfer_granularity.height == in2.min_image_transfer_granularity.height && - in1.min_image_transfer_granularity.width == in2.min_image_transfer_granularity.width && - in1.n_queues == in2.n_queues && - in1.n_timestamp_bits == in2.n_timestamp_bits); -} - -/** Please see header for specification */ -Anvil::ShaderModuleStageEntryPoint::ShaderModuleStageEntryPoint() -{ - stage = SHADER_STAGE_UNKNOWN; -} - -/** Please see header for specification */ -Anvil::ShaderModuleStageEntryPoint::ShaderModuleStageEntryPoint(const std::string& in_name, - std::shared_ptr in_shader_module_ptr, - ShaderStage in_stage) -{ - anvil_assert(in_shader_module_ptr != nullptr); - - name = in_name; - shader_module_ptr = in_shader_module_ptr; - stage = in_stage; -} - -/** Please see header for specification */ -Anvil::ShaderModuleStageEntryPoint::ShaderModuleStageEntryPoint(const ShaderModuleStageEntryPoint& in) -{ - name = in.name; - shader_module_ptr = in.shader_module_ptr; - stage = in.stage; -} - -/** Please see header for specification */ -Anvil::ShaderModuleStageEntryPoint::~ShaderModuleStageEntryPoint() -{ - /* Stub */ -} - -/** Please see header for specification */ -Anvil::ShaderModuleStageEntryPoint& Anvil::ShaderModuleStageEntryPoint::operator=(const Anvil::ShaderModuleStageEntryPoint& in) -{ - name = in.name; - shader_module_ptr = in.shader_module_ptr; - stage = in.stage; - - return *this; -} - -/** Please see header for specification */ -Anvil::Utils::SparseMemoryBindingUpdateInfo::SparseMemoryBindingUpdateInfo() -{ - m_dirty = true; -} - -/** Please see header for specification */ -Anvil::SparseMemoryBindInfoID Anvil::Utils::SparseMemoryBindingUpdateInfo::add_bind_info(uint32_t in_n_signal_semaphores, - std::shared_ptr* in_opt_signal_semaphores_ptr, - uint32_t in_n_wait_semaphores, - std::shared_ptr* in_opt_wait_semaphores_ptr) -{ - Anvil::SparseMemoryBindInfoID result_id = static_cast(m_bindings.size() ); - BindingInfo new_binding; - - for (uint32_t n_signal_sem = 0; - n_signal_sem < in_n_signal_semaphores; - ++n_signal_sem) - { - new_binding.signal_semaphores.push_back(in_opt_signal_semaphores_ptr[n_signal_sem]); - } - - for (uint32_t n_wait_sem = 0; - n_wait_sem < in_n_wait_semaphores; - ++n_wait_sem) - { - new_binding.wait_semaphores.push_back(in_opt_wait_semaphores_ptr[n_wait_sem]); - } - - m_bindings.push_back(new_binding); - - return result_id; -} - -/** Please see header for specification */ -void Anvil::Utils::SparseMemoryBindingUpdateInfo::append_buffer_memory_update(SparseMemoryBindInfoID in_bind_info_id, - std::shared_ptr in_buffer_ptr, - VkDeviceSize in_buffer_memory_start_offset, - std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset, - VkDeviceSize in_size) -{ - /* Sanity checks */ - anvil_assert(in_buffer_ptr != nullptr); - anvil_assert(m_bindings.size() > in_bind_info_id); - anvil_assert(in_buffer_ptr->get_memory_requirements().size >= in_buffer_memory_start_offset + in_size); - - if (in_memory_block_ptr != nullptr) - { - anvil_assert(in_memory_block_ptr->get_size() >= in_memory_block_start_offset + in_size); - } - - /* Cache the update */ - auto& binding = m_bindings.at(in_bind_info_id); - GeneralBindInfo update; - VkSparseMemoryBind update_vk; - - update.memory_block_ptr = in_memory_block_ptr; - update.memory_block_start_offset = in_memory_block_start_offset; - update.size = in_size; - update.start_offset = in_buffer_memory_start_offset; - - update_vk.flags = 0; - update_vk.memory = (in_memory_block_ptr != nullptr) ? in_memory_block_ptr->get_memory() - : VK_NULL_HANDLE; - update_vk.memoryOffset = (in_memory_block_ptr != nullptr) ? (in_memory_block_ptr->get_start_offset() + in_memory_block_start_offset) - : UINT32_MAX; - update_vk.resourceOffset = (in_buffer_ptr->get_start_offset() + in_buffer_memory_start_offset); - update_vk.size = in_size; - - binding.buffer_updates[in_buffer_ptr].first.push_back (update); - binding.buffer_updates[in_buffer_ptr].second.push_back(update_vk); -} - -/** Please see header for specification */ -void Anvil::Utils::SparseMemoryBindingUpdateInfo::append_image_memory_update(SparseMemoryBindInfoID in_bind_info_id, - std::shared_ptr in_image_ptr, - const VkImageSubresource& in_subresource, - const VkOffset3D& in_offset, - const VkExtent3D& in_extent, - VkSparseMemoryBindFlags in_flags, - std::shared_ptr in_opt_memory_block_ptr, - VkDeviceSize in_opt_memory_block_start_offset) -{ - /* Sanity checks .. */ - anvil_assert(in_image_ptr != nullptr); - anvil_assert(in_flags == 0); - anvil_assert(m_bindings.size() > in_bind_info_id); - - anvil_assert(in_image_ptr->get_image_n_layers() > in_subresource.arrayLayer); - anvil_assert(in_image_ptr->get_image_n_mipmaps() > in_subresource.mipLevel); - anvil_assert(in_image_ptr->has_aspects(in_subresource.aspectMask) ); - - if (in_opt_memory_block_ptr != nullptr) - { - anvil_assert(in_opt_memory_block_ptr->get_size() > in_opt_memory_block_start_offset); - } - - /* Cache the update */ - auto& binding = m_bindings.at(in_bind_info_id); - ImageBindInfo update; - VkSparseImageMemoryBind update_vk; - - update.extent = in_extent; - update.flags = in_flags; - update.memory_block_ptr = in_opt_memory_block_ptr; - update.memory_block_start_offset = in_opt_memory_block_start_offset; - update.offset = in_offset; - update.subresource = in_subresource; - - update_vk.extent = in_extent; - update_vk.flags = in_flags; - update_vk.memory = (in_opt_memory_block_ptr != nullptr) ? in_opt_memory_block_ptr->get_memory() - : VK_NULL_HANDLE; - update_vk.memoryOffset = (in_opt_memory_block_ptr != nullptr) ? in_opt_memory_block_ptr->get_start_offset() + in_opt_memory_block_start_offset - : UINT32_MAX; - update_vk.offset = in_offset; - update_vk.subresource = in_subresource; - - binding.image_updates[in_image_ptr].first.push_back (update); - binding.image_updates[in_image_ptr].second.push_back(update_vk); -} - -/** Please see header for specification */ -void Anvil::Utils::SparseMemoryBindingUpdateInfo::append_opaque_image_memory_update(SparseMemoryBindInfoID in_bind_info_id, - std::shared_ptr in_image_ptr, - VkDeviceSize in_resource_offset, - VkDeviceSize in_size, - VkSparseMemoryBindFlags in_flags, - std::shared_ptr in_opt_memory_block_ptr, - VkDeviceSize in_opt_memory_block_start_offset) -{ - /* Sanity checks */ - anvil_assert(in_image_ptr != nullptr); - anvil_assert(m_bindings.size() > in_bind_info_id); - anvil_assert(in_image_ptr->get_memory_requirements().size >= in_resource_offset + in_size); - - if (in_opt_memory_block_ptr != nullptr) - { - anvil_assert(in_opt_memory_block_ptr->get_size() >= in_opt_memory_block_start_offset + in_size); - } - - /* Cache the update */ - auto& binding = m_bindings.at(in_bind_info_id); - GeneralBindInfo update; - VkSparseMemoryBind update_vk; - - update.flags = in_flags; - update.memory_block_ptr = in_opt_memory_block_ptr; - update.memory_block_start_offset = in_opt_memory_block_start_offset; - update.size = in_size; - update.start_offset = in_resource_offset; - - update_vk.flags = in_flags; - update_vk.memory = (in_opt_memory_block_ptr != nullptr) ? in_opt_memory_block_ptr->get_memory() - : VK_NULL_HANDLE; - update_vk.memoryOffset = (in_opt_memory_block_ptr != nullptr) ? (in_opt_memory_block_ptr->get_start_offset() + in_opt_memory_block_start_offset) - : UINT32_MAX; - update_vk.resourceOffset = in_resource_offset; - update_vk.size = in_size; - - binding.image_opaque_updates[in_image_ptr].first.push_back (update); - binding.image_opaque_updates[in_image_ptr].second.push_back(update_vk); -} - -/** Please see header for specification */ -void Anvil::Utils::SparseMemoryBindingUpdateInfo::bake() -{ - const uint32_t n_bindings = static_cast(m_bindings.size() ); - - anvil_assert(m_dirty); - - if (n_bindings > 0) - { - m_bindings_vk.resize(n_bindings); - - m_buffer_bindings_vk.clear(); - - for (uint32_t n_binding = 0; - n_binding < n_bindings; - ++n_binding) - { - auto& bind_info = m_bindings [n_binding]; - uint32_t n_buffer_bindings_start_index = ~0u; - uint32_t n_image_bindings_start_index = ~0u; - uint32_t n_image_opaque_bindings_start_index = ~0u; - auto& vk_binding = m_bindings_vk[n_binding]; - - bind_info.signal_semaphores_vk.clear(); - bind_info.wait_semaphores_vk.clear (); - - bind_info.signal_semaphores_vk.reserve(bind_info.signal_semaphores.size() ); - bind_info.wait_semaphores_vk.reserve (bind_info.wait_semaphores.size () ); - - for (auto& signal_semaphore_ptr : bind_info.signal_semaphores) - { - bind_info.signal_semaphores_vk.push_back(signal_semaphore_ptr->get_semaphore() ); - } - - for (auto& wait_semaphore_ptr : bind_info.wait_semaphores) - { - bind_info.wait_semaphores_vk.push_back(wait_semaphore_ptr->get_semaphore() ); - } - - vk_binding.bufferBindCount = static_cast(bind_info.buffer_updates.size() ); - vk_binding.imageBindCount = static_cast(bind_info.image_updates.size() ); - vk_binding.imageOpaqueBindCount = static_cast(bind_info.image_opaque_updates.size() ); - vk_binding.pNext = nullptr; - vk_binding.pSignalSemaphores = (bind_info.signal_semaphores_vk.size() > 0) ? &bind_info.signal_semaphores_vk[0] : nullptr; - vk_binding.pWaitSemaphores = (bind_info.wait_semaphores_vk.size() > 0) ? &bind_info.wait_semaphores_vk [0] : nullptr; - vk_binding.signalSemaphoreCount = static_cast(bind_info.signal_semaphores_vk.size() ); - vk_binding.sType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO; - vk_binding.waitSemaphoreCount = static_cast(bind_info.wait_semaphores_vk.size() ); - - n_buffer_bindings_start_index = static_cast(m_buffer_bindings_vk.size() ); - n_image_bindings_start_index = static_cast(m_image_bindings_vk.size() ); - n_image_opaque_bindings_start_index = static_cast(m_image_opaque_bindings_vk.size() ); - - for (auto& buffer_update : bind_info.buffer_updates) - { - const VkBuffer current_buffer_vk = buffer_update.first->get_buffer(); - VkSparseBufferMemoryBindInfo buffer_bind_info; - - anvil_assert(buffer_update.second.second.size() > 0); - - buffer_bind_info.bindCount = static_cast(buffer_update.second.second.size() ); - buffer_bind_info.buffer = current_buffer_vk; - buffer_bind_info.pBinds = &buffer_update.second.second[0]; - - m_buffer_bindings_vk.push_back(buffer_bind_info); - } - - for (auto& image_update : bind_info.image_updates) - { - const VkImage current_image_vk = image_update.first->get_image(); - VkSparseImageMemoryBindInfo image_bind_info; - - anvil_assert(image_update.second.second.size() > 0); - - image_bind_info.bindCount = static_cast(image_update.second.second.size() ); - image_bind_info.image = current_image_vk; - image_bind_info.pBinds = &image_update.second.second[0]; - - m_image_bindings_vk.push_back(image_bind_info); - } - - for (auto& image_opaque_update : bind_info.image_opaque_updates) - { - const VkImage current_image_vk = image_opaque_update.first->get_image(); - VkSparseImageOpaqueMemoryBindInfo image_opaque_bind_info; - - anvil_assert(image_opaque_update.second.second.size() > 0); - - image_opaque_bind_info.bindCount = static_cast(image_opaque_update.second.second.size() ); - image_opaque_bind_info.image = current_image_vk; - image_opaque_bind_info.pBinds = &image_opaque_update.second.second[0]; - - m_image_opaque_bindings_vk.push_back(image_opaque_bind_info); - } - - vk_binding.pBufferBinds = (bind_info.buffer_updates.size() > 0) ? &m_buffer_bindings_vk [n_buffer_bindings_start_index] : nullptr; - vk_binding.pImageBinds = (bind_info.image_updates.size() > 0) ? &m_image_bindings_vk [n_image_bindings_start_index] : nullptr; - vk_binding.pImageOpaqueBinds = (bind_info.image_opaque_updates.size() > 0) ? &m_image_opaque_bindings_vk[n_image_opaque_bindings_start_index] : nullptr; - } - } - else - { - m_bindings_vk.clear(); - } - - m_dirty = false; -} - -/** Please see header for specification */ -bool Anvil::Utils::SparseMemoryBindingUpdateInfo::get_bind_info_properties(SparseMemoryBindInfoID in_bind_info_id, - uint32_t* const out_opt_n_buffer_memory_updates_ptr, - uint32_t* const out_opt_n_image_memory_updates_ptr, - uint32_t* const out_opt_n_image_opaque_memory_updates_ptr, - uint32_t* const out_opt_n_signal_semaphores_ptr, - const std::shared_ptr** out_opt_signal_semaphores_ptr_ptr, - uint32_t* const out_opt_n_wait_semaphores_ptr, - const std::shared_ptr** out_opt_wait_semaphores_ptr_ptr) const -{ - decltype(m_bindings)::const_iterator binding_iterator; - bool result = false; - - if (m_bindings.size() <= in_bind_info_id) - { - anvil_assert(m_bindings.size() > in_bind_info_id); - - goto end; - } - - binding_iterator = m_bindings.begin() + static_cast(in_bind_info_id); - - if (out_opt_n_buffer_memory_updates_ptr != nullptr) - { - uint32_t n_buffer_mem_updates = 0; - - for (const auto& buffer_update_iterator : binding_iterator->buffer_updates) - { - n_buffer_mem_updates += static_cast(buffer_update_iterator.second.first.size() ); - } - - *out_opt_n_buffer_memory_updates_ptr = n_buffer_mem_updates; - } - - if (out_opt_n_image_memory_updates_ptr != nullptr) - { - uint32_t n_image_mem_updates = 0; - - for (const auto& image_update_iterator : binding_iterator->image_updates) - { - n_image_mem_updates += static_cast(image_update_iterator.second.first.size() ); - } - - *out_opt_n_image_memory_updates_ptr = n_image_mem_updates; - } - - if (out_opt_n_image_opaque_memory_updates_ptr != nullptr) - { - uint32_t n_image_opaque_mem_updates = 0; - - for (const auto& image_opaque_update_iterator : binding_iterator->image_opaque_updates) - { - n_image_opaque_mem_updates += static_cast(image_opaque_update_iterator.second.first.size() ); - } - - *out_opt_n_image_opaque_memory_updates_ptr = n_image_opaque_mem_updates; - } - - if (out_opt_n_signal_semaphores_ptr != nullptr) - { - *out_opt_n_signal_semaphores_ptr = static_cast(binding_iterator->signal_semaphores.size() ); - } - - if (out_opt_signal_semaphores_ptr_ptr != nullptr) - { - *out_opt_signal_semaphores_ptr_ptr = &binding_iterator->signal_semaphores[0]; - } - - if (out_opt_n_wait_semaphores_ptr != nullptr) - { - *out_opt_n_wait_semaphores_ptr = static_cast(binding_iterator->wait_semaphores.size() ); - } - - if (out_opt_wait_semaphores_ptr_ptr != nullptr) - { - *out_opt_wait_semaphores_ptr_ptr = &binding_iterator->wait_semaphores[0]; - } - - /* All done */ - result = true; -end: - return result; -} - -/** Please see header for specification */ -void Anvil::Utils::SparseMemoryBindingUpdateInfo::get_bind_sparse_call_args(uint32_t* out_bind_info_count_ptr, - const VkBindSparseInfo** out_bind_info_ptr, - std::shared_ptr* out_fence_to_set_ptr) -{ - if (m_dirty) - { - bake(); - - anvil_assert(!m_dirty); - } - - *out_bind_info_count_ptr = static_cast(m_bindings.size() ); - *out_bind_info_ptr = (m_bindings_vk.size() > 0) ? &m_bindings_vk[0] : nullptr; - *out_fence_to_set_ptr = m_fence_ptr; -} - -/** Please see header for specification */ -bool Anvil::Utils::SparseMemoryBindingUpdateInfo::get_buffer_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, - uint32_t in_n_update, - std::shared_ptr* out_opt_buffer_ptr, - VkDeviceSize* out_opt_buffer_memory_start_offset_ptr, - std::shared_ptr* out_opt_memory_block_ptr, - VkDeviceSize* out_opt_memory_block_start_offset_ptr, - VkDeviceSize* out_opt_size_ptr) const -{ - GeneralBindInfo buffer_bind; - BufferBindUpdateMap::const_iterator buffer_binding_map_iterator; - decltype(m_bindings)::const_iterator binding_iterator; - uint32_t n_current_update = 0; - bool result = false; - - if (m_bindings.size() <= in_bind_info_id) - { - anvil_assert(!(m_bindings.size() <= in_bind_info_id) ); - - goto end; - } - - binding_iterator = m_bindings.cbegin() + static_cast(in_bind_info_id); - buffer_binding_map_iterator = binding_iterator->buffer_updates.begin(); - - while (buffer_binding_map_iterator != binding_iterator->buffer_updates.end() ) - { - const uint32_t n_buffer_bindings = static_cast(buffer_binding_map_iterator->second.first.size() ); - - if (n_current_update + n_buffer_bindings > in_n_update) - { - buffer_bind = buffer_binding_map_iterator->second.first.at(in_n_update - n_current_update); - - break; - } - else - { - n_current_update += n_buffer_bindings; - buffer_binding_map_iterator ++; - } - } - - if (buffer_binding_map_iterator == binding_iterator->buffer_updates.end() ) - { - anvil_assert(!(buffer_binding_map_iterator == binding_iterator->buffer_updates.end()) ); - - goto end; - } - - if (out_opt_buffer_ptr != nullptr) - { - *out_opt_buffer_ptr = buffer_binding_map_iterator->first; - } - - if (out_opt_buffer_memory_start_offset_ptr != nullptr) - { - *out_opt_buffer_memory_start_offset_ptr = buffer_bind.start_offset; - } - - if (out_opt_memory_block_ptr != nullptr) - { - *out_opt_memory_block_ptr = buffer_bind.memory_block_ptr; - } - - if (out_opt_memory_block_start_offset_ptr != nullptr) - { - *out_opt_memory_block_start_offset_ptr = buffer_bind.memory_block_start_offset; - } - - if (out_opt_size_ptr != nullptr) - { - *out_opt_size_ptr = buffer_bind.size; - } - - /* All done */ - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::Utils::SparseMemoryBindingUpdateInfo::get_image_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, - uint32_t in_n_update, - std::shared_ptr* out_opt_image_ptr_ptr, - VkImageSubresource* out_opt_subresource_ptr, - VkOffset3D* out_opt_offset_ptr, - VkExtent3D* out_opt_extent_ptr, - VkSparseMemoryBindFlags* out_opt_flags_ptr, - std::shared_ptr* out_opt_memory_block_ptr_ptr, - VkDeviceSize* out_opt_memory_block_start_offset_ptr) const -{ - decltype(m_bindings)::const_iterator binding_iterator; - ImageBindInfo image_bind; - ImageBindUpdateMap::const_iterator image_binding_map_iterator; - uint32_t n_current_update = 0; - bool result = false; - - if (m_bindings.size() <= in_bind_info_id) - { - anvil_assert(!(m_bindings.size() <= in_bind_info_id) ); - - goto end; - } - - binding_iterator = m_bindings.cbegin() + static_cast(in_bind_info_id); - image_binding_map_iterator = binding_iterator->image_updates.begin(); - - while (image_binding_map_iterator != binding_iterator->image_updates.end() ) - { - const uint32_t n_image_bindings = static_cast(image_binding_map_iterator->second.first.size() ); - - if (n_current_update + n_image_bindings > in_n_update) - { - image_bind = image_binding_map_iterator->second.first.at(in_n_update - n_current_update); - - break; - } - else - { - n_current_update += n_image_bindings; - image_binding_map_iterator ++; - } - } - - if (image_binding_map_iterator == binding_iterator->image_updates.end() ) - { - anvil_assert(!(image_binding_map_iterator == binding_iterator->image_updates.end()) ); - - goto end; - } - - if (out_opt_image_ptr_ptr != nullptr) - { - *out_opt_image_ptr_ptr = image_binding_map_iterator->first; - } - - if (out_opt_subresource_ptr != nullptr) - { - *out_opt_subresource_ptr = image_bind.subresource; - } - - if (out_opt_offset_ptr != nullptr) - { - *out_opt_offset_ptr = image_bind.offset; - } - - if (out_opt_extent_ptr != nullptr) - { - *out_opt_extent_ptr = image_bind.extent; - } - - if (out_opt_flags_ptr != nullptr) - { - *out_opt_flags_ptr = image_bind.flags; - } - - if (out_opt_memory_block_ptr_ptr != nullptr) - { - *out_opt_memory_block_ptr_ptr = image_bind.memory_block_ptr; - } - - if (out_opt_memory_block_start_offset_ptr != nullptr) - { - *out_opt_memory_block_start_offset_ptr = image_bind.memory_block_start_offset; - } - - /* All done */ - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::Utils::SparseMemoryBindingUpdateInfo::get_image_opaque_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, - uint32_t in_n_update, - std::shared_ptr* out_opt_image_ptr_ptr, - VkDeviceSize* out_opt_resource_offset_ptr, - VkDeviceSize* out_opt_size_ptr, - VkSparseMemoryBindFlags* out_opt_flags_ptr, - std::shared_ptr* out_opt_memory_block_ptr_ptr, - VkDeviceSize* out_opt_memory_block_start_offset_ptr) const -{ - decltype(m_bindings)::const_iterator binding_iterator; - GeneralBindInfo image_opaque_bind; - ImageOpaqueBindUpdateMap::const_iterator image_opaque_binding_map_iterator; - uint32_t n_current_update = 0; - bool result = false; - - if (m_bindings.size() <= in_bind_info_id) - { - anvil_assert(!(m_bindings.size() <= in_bind_info_id) ); - - goto end; - } - - binding_iterator = m_bindings.cbegin() + static_cast(in_bind_info_id); - image_opaque_binding_map_iterator = binding_iterator->image_opaque_updates.begin(); - - while (image_opaque_binding_map_iterator != binding_iterator->image_opaque_updates.end() ) - { - const uint32_t n_image_opaque_bindings = static_cast(image_opaque_binding_map_iterator->second.first.size() ); - - if (n_current_update + n_image_opaque_bindings > in_n_update) - { - image_opaque_bind = image_opaque_binding_map_iterator->second.first.at(in_n_update - n_current_update); - - break; - } - else - { - n_current_update += n_image_opaque_bindings; - image_opaque_binding_map_iterator ++; - } - } - - if (image_opaque_binding_map_iterator == binding_iterator->image_opaque_updates.end() ) - { - anvil_assert(!(image_opaque_binding_map_iterator == binding_iterator->image_opaque_updates.end()) ); - - goto end; - } - - if (out_opt_image_ptr_ptr != nullptr) - { - *out_opt_image_ptr_ptr = image_opaque_binding_map_iterator->first; - } - - if (out_opt_resource_offset_ptr != nullptr) - { - *out_opt_resource_offset_ptr = image_opaque_bind.start_offset; - } - - if (out_opt_size_ptr != nullptr) - { - *out_opt_size_ptr = image_opaque_bind.size; - } - - if (out_opt_flags_ptr != nullptr) - { - *out_opt_flags_ptr = image_opaque_bind.flags; - } - - if (out_opt_memory_block_ptr_ptr != nullptr) - { - *out_opt_memory_block_ptr_ptr = image_opaque_bind.memory_block_ptr; - } - - if (out_opt_memory_block_start_offset_ptr != nullptr) - { - *out_opt_memory_block_start_offset_ptr = image_opaque_bind.memory_block_start_offset; - } - - /* All done */ - result = true; -end: - return result; -} - -/** Please see header for specification */ -Anvil::MemoryFeatureFlags Anvil::Utils::get_memory_feature_flags_from_vk_property_flags(VkMemoryPropertyFlags in_mem_type_flags, - VkMemoryHeapFlags in_mem_heap_flags) -{ - Anvil::MemoryFeatureFlags result = 0; - - ANVIL_REDUNDANT_ARGUMENT(in_mem_heap_flags); - - if ((in_mem_type_flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0) - { - result |= MEMORY_FEATURE_FLAG_DEVICE_LOCAL; - } - - if ((in_mem_type_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) - { - result |= MEMORY_FEATURE_FLAG_MAPPABLE; - } - - if ((in_mem_type_flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0) - { - result |= MEMORY_FEATURE_FLAG_HOST_COHERENT; - } - - if ((in_mem_type_flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0) - { - result |= MEMORY_FEATURE_FLAG_HOST_CACHED; - } - - if ((in_mem_type_flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0) - { - result |= MEMORY_FEATURE_FLAG_LAZILY_ALLOCATED; - } - - return result; -} - -/** Please see header for specification */ -void Anvil::Utils::convert_queue_family_bits_to_family_indices(std::weak_ptr in_device_ptr, - Anvil::QueueFamilyBits in_queue_families, - uint32_t* out_opt_queue_family_indices_ptr, - uint32_t* out_opt_n_queue_family_indices_ptr) -{ - std::shared_ptr device_locked_ptr (in_device_ptr); - uint32_t n_queue_family_indices (0); - bool universal_queue_included(false); - - if ((in_queue_families & QUEUE_FAMILY_COMPUTE_BIT) != 0) - { - if (out_opt_queue_family_indices_ptr != nullptr) - { - out_opt_queue_family_indices_ptr[n_queue_family_indices] = device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_COMPUTE); - - /* If the compute queue family is not available, use universal instead */ - if (out_opt_queue_family_indices_ptr[n_queue_family_indices] == UINT32_MAX) - { - out_opt_queue_family_indices_ptr[n_queue_family_indices] = device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL); - - ++n_queue_family_indices; - universal_queue_included = true; - } - else - { - ++n_queue_family_indices; - } - } - } - - if ((in_queue_families & QUEUE_FAMILY_DMA_BIT) != 0) - { - if (out_opt_queue_family_indices_ptr != nullptr) - { - out_opt_queue_family_indices_ptr[n_queue_family_indices] = device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_TRANSFER); - - /* If the DMA queue family is unavailable, use universal instead */ - if (out_opt_queue_family_indices_ptr[n_queue_family_indices] == UINT32_MAX && - !universal_queue_included) - { - out_opt_queue_family_indices_ptr[n_queue_family_indices] = device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL); - - ++n_queue_family_indices; - universal_queue_included = true; - } - else - { - ++n_queue_family_indices; - } - } - } - - if (((in_queue_families & QUEUE_FAMILY_GRAPHICS_BIT) != 0) && - !universal_queue_included) - { - if (out_opt_queue_family_indices_ptr != nullptr) - { - out_opt_queue_family_indices_ptr[n_queue_family_indices] = device_locked_ptr->get_queue_family_index(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL); - } - - ++n_queue_family_indices; - universal_queue_included = true; - } - - anvil_assert(n_queue_family_indices > 0); - - if (out_opt_n_queue_family_indices_ptr != nullptr) - { - *out_opt_n_queue_family_indices_ptr = n_queue_family_indices; - } -} - -/** Please see header for specification */ -VkAccessFlags Anvil::Utils::get_access_mask_from_image_layout(VkImageLayout in_layout, - Anvil::QueueFamilyType in_queue_family_type) -{ - VkAccessFlags result = 0; - - switch (in_layout) - { - case VK_IMAGE_LAYOUT_UNDEFINED: - { - result = 0; - - break; - } - - case VK_IMAGE_LAYOUT_GENERAL: - { - result = VK_ACCESS_INDIRECT_COMMAND_READ_BIT | - VK_ACCESS_INDEX_READ_BIT | - VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | - VK_ACCESS_UNIFORM_READ_BIT | - VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | - VK_ACCESS_SHADER_READ_BIT | - VK_ACCESS_SHADER_WRITE_BIT | - VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | - VK_ACCESS_TRANSFER_READ_BIT | - VK_ACCESS_TRANSFER_WRITE_BIT | - VK_ACCESS_HOST_READ_BIT | - VK_ACCESS_HOST_WRITE_BIT | - VK_ACCESS_MEMORY_READ_BIT | - VK_ACCESS_MEMORY_WRITE_BIT; - - break; - } - - case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: - { - result = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - - break; - } - - case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: - { - result = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; - - break; - } - - case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: - { - result = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; - - break; - } - - case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: - { - result = VK_ACCESS_SHADER_READ_BIT; - - break; - } - - case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: - { - result = VK_ACCESS_TRANSFER_READ_BIT; - - break; - } - - case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: - { - result = VK_ACCESS_TRANSFER_WRITE_BIT; - - break; - } - - case VK_IMAGE_LAYOUT_PREINITIALIZED: - { - result = VK_ACCESS_SHADER_READ_BIT; - - break; - } - - case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: - { - result = VK_ACCESS_MEMORY_READ_BIT; - - break; - } - - default: - { - /* Invalid VkImageLayout argument value */ - anvil_assert_fail(); - } - } - - switch (in_queue_family_type) - { - case Anvil::QUEUE_FAMILY_TYPE_COMPUTE: - { - result &= (VK_ACCESS_INDIRECT_COMMAND_READ_BIT | - VK_ACCESS_MEMORY_READ_BIT | - VK_ACCESS_MEMORY_WRITE_BIT | - VK_ACCESS_SHADER_READ_BIT | - VK_ACCESS_SHADER_WRITE_BIT | - VK_ACCESS_TRANSFER_READ_BIT | - VK_ACCESS_TRANSFER_WRITE_BIT | - VK_ACCESS_UNIFORM_READ_BIT); - - break; - } - - case Anvil::QUEUE_FAMILY_TYPE_TRANSFER: - { - result &= (VK_ACCESS_MEMORY_READ_BIT | - VK_ACCESS_MEMORY_WRITE_BIT | - VK_ACCESS_TRANSFER_READ_BIT | - VK_ACCESS_TRANSFER_WRITE_BIT); - - break; - } - - case Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL: - { - result &= (VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | - VK_ACCESS_INDIRECT_COMMAND_READ_BIT | - VK_ACCESS_INDEX_READ_BIT | - VK_ACCESS_MEMORY_READ_BIT | - VK_ACCESS_MEMORY_WRITE_BIT | - VK_ACCESS_SHADER_READ_BIT | - VK_ACCESS_SHADER_WRITE_BIT | - VK_ACCESS_TRANSFER_READ_BIT | - VK_ACCESS_TRANSFER_WRITE_BIT | - VK_ACCESS_UNIFORM_READ_BIT | - VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT); - - break; - } - - case Anvil::QUEUE_FAMILY_TYPE_UNDEFINED: - { - break; - } - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkAttachmentLoadOp in_load_op) -{ - static const char* attachment_load_op_strings[] = - { - "VK_ATTACHMENT_LOAD_OP_LOAD", - "VK_ATTACHMENT_LOAD_OP_CLEAR", - "VK_ATTACHMENT_LOAD_OP_DONT_CARE", - }; - static const uint32_t n_attachment_load_op_strings = sizeof(attachment_load_op_strings) / sizeof(attachment_load_op_strings[0]); - - static_assert(n_attachment_load_op_strings == VK_ATTACHMENT_LOAD_OP_RANGE_SIZE, ""); - anvil_assert (in_load_op <= VK_ATTACHMENT_LOAD_OP_END_RANGE); - - return attachment_load_op_strings[in_load_op]; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkAttachmentStoreOp in_store_op) -{ - static const char* attachment_store_op_strings[] = - { - "VK_ATTACHMENT_STORE_OP_STORE", - "VK_ATTACHMENT_STORE_OP_DONT_CARE", - }; - static const uint32_t n_attachment_store_op_strings = sizeof(attachment_store_op_strings) / sizeof(attachment_store_op_strings[0]); - - static_assert(n_attachment_store_op_strings == VK_ATTACHMENT_STORE_OP_RANGE_SIZE, ""); - anvil_assert (in_store_op <= VK_ATTACHMENT_STORE_OP_END_RANGE); - - return attachment_store_op_strings[in_store_op]; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkBlendFactor in_blend_factor) -{ - const char* result = "?"; - - switch (in_blend_factor) - { - case VK_BLEND_FACTOR_ZERO: result = "VK_BLEND_FACTOR_ZERO"; break; - case VK_BLEND_FACTOR_ONE: result = "VK_BLEND_FACTOR_ONE"; break; - case VK_BLEND_FACTOR_SRC_COLOR: result = "VK_BLEND_FACTOR_SRC_COLOR"; break; - case VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR: result = "VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR"; break; - case VK_BLEND_FACTOR_DST_COLOR: result = "VK_BLEND_FACTOR_DST_COLOR"; break; - case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR: result = "VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR"; break; - case VK_BLEND_FACTOR_SRC_ALPHA: result = "VK_BLEND_FACTOR_SRC_ALPHA"; break; - case VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA: result = "VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA"; break; - case VK_BLEND_FACTOR_DST_ALPHA: result = "VK_BLEND_FACTOR_DST_ALPHA"; break; - case VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA: result = "VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA"; break; - case VK_BLEND_FACTOR_CONSTANT_COLOR: result = "VK_BLEND_FACTOR_CONSTANT_COLOR"; break; - case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR: result = "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR"; break; - case VK_BLEND_FACTOR_CONSTANT_ALPHA: result = "VK_BLEND_FACTOR_CONSTANT_ALPHA"; break; - case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA: result = "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA"; break; - case VK_BLEND_FACTOR_SRC_ALPHA_SATURATE: result = "VK_BLEND_FACTOR_SRC_ALPHA_SATURATE"; break; - case VK_BLEND_FACTOR_SRC1_COLOR: result = "VK_BLEND_FACTOR_SRC1_COLOR"; break; - case VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR: result = "VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR"; break; - case VK_BLEND_FACTOR_SRC1_ALPHA: result = "VK_BLEND_FACTOR_SRC1_ALPHA"; break; - case VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA: result = "VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkBlendOp in_blend_op) -{ - const char* result = "?"; - - switch (in_blend_op) - { - case VK_BLEND_OP_ADD: result = "VK_BLEND_OP_ADD"; break; - case VK_BLEND_OP_SUBTRACT: result = "VK_BLEND_OP_SUBTRACT"; break; - case VK_BLEND_OP_REVERSE_SUBTRACT: result = "VK_BLEND_OP_REVERSE_SUBTRACT"; break; - case VK_BLEND_OP_MIN: result = "VK_BLEND_OP_MIN"; break; - case VK_BLEND_OP_MAX: result = "VK_BLEND_OP_MAX"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkCompareOp in_compare_op) -{ - const char* result = "?"; - - switch (in_compare_op) - { - case VK_COMPARE_OP_NEVER: result = "VK_COMPARE_OP_NEVER"; break; - case VK_COMPARE_OP_LESS: result = "VK_COMPARE_OP_LESS"; break; - case VK_COMPARE_OP_EQUAL: result = "VK_COMPARE_OP_EQUAL"; break; - case VK_COMPARE_OP_LESS_OR_EQUAL: result = "VK_COMPARE_OP_LESS_OR_EQUAL"; break; - case VK_COMPARE_OP_GREATER: result = "VK_COMPARE_OP_GREATER"; break; - case VK_COMPARE_OP_NOT_EQUAL: result = "VK_COMPARE_OP_NOT_EQUAL"; break; - case VK_COMPARE_OP_GREATER_OR_EQUAL: result = "VK_COMPARE_OP_GREATER_OR_EQUAL"; break; - case VK_COMPARE_OP_ALWAYS: result = "VK_COMPARE_OP_ALWAYS"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkCullModeFlagBits in_cull_mode) -{ - const char* result = "?"; - - switch (in_cull_mode) - { - case VK_CULL_MODE_NONE: result = "VK_CULL_MODE_NONE"; break; - case VK_CULL_MODE_FRONT_BIT: result = "VK_CULL_MODE_FRONT_BIT"; break; - case VK_CULL_MODE_BACK_BIT: result = "VK_CULL_MODE_BACK_BIT"; break; - case VK_CULL_MODE_FRONT_AND_BACK: result = "VK_CULL_MODE_FRONT_AND_BACK"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkDescriptorType in_descriptor_type) -{ - const char* result = "?"; - - switch (in_descriptor_type) - { - case VK_DESCRIPTOR_TYPE_SAMPLER: result = "VK_DESCRIPTOR_TYPE_SAMPLER"; break; - case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: result = "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER"; break; - case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: result = "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE"; break; - case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: result = "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE"; break; - case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: result = "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER"; break; - case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: result = "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER"; break; - case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: result = "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER"; break; - case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: result = "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER"; break; - case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: result = "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC"; break; - case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: result = "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC"; break; - case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: result = "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkFrontFace in_front_face) -{ - const char* result = "?"; - - switch (in_front_face) - { - case VK_FRONT_FACE_COUNTER_CLOCKWISE: result = "VK_FRONT_FACE_COUNTER_CLOCKWISE"; break; - case VK_FRONT_FACE_CLOCKWISE: result = "VK_FRONT_FACE_CLOCKWISE"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkImageAspectFlagBits in_image_aspect_flag) -{ - const char* result = "?"; - - switch (in_image_aspect_flag) - { - case VK_IMAGE_ASPECT_COLOR_BIT: result = "VK_IMAGE_ASPECT_COLOR_BIT"; break; - case VK_IMAGE_ASPECT_DEPTH_BIT: result = "VK_IMAGE_ASPECT_DEPTH_BIT"; break; - case VK_IMAGE_ASPECT_STENCIL_BIT: result = "VK_IMAGE_ASPECT_STENCIL_BIT"; break; - case VK_IMAGE_ASPECT_METADATA_BIT: result = "VK_IMAGE_ASPECT_METADATA_BIT"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; - -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkImageLayout in_image_layout) -{ - const char* result = "?!"; - - /* Note: we can't use an array-based solution here because of PRESENT_SRC_KHR */ - switch (in_image_layout) - { - case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: result = "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL"; break; - case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: result = "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL"; break; - case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: result = "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL"; break; - case VK_IMAGE_LAYOUT_GENERAL: result = "VK_IMAGE_LAYOUT_GENERAL"; break; - case VK_IMAGE_LAYOUT_PREINITIALIZED: result = "VK_IMAGE_LAYOUT_PREINITIALIZED"; break; - case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: result = "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR"; break; - case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: result = "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL"; break; - case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: result = "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL"; break; - case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: result = "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL"; break; - case VK_IMAGE_LAYOUT_UNDEFINED: result = "VK_IMAGE_LAYOUT_UNDEFINED"; break; - - default: - { - anvil_assert_fail(); - - break; - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkImageTiling in_image_tiling) -{ - static const char* image_tilings[] = - { - "VK_IMAGE_TILING_OPTIMAL", - "VK_IMAGE_TILING_LINEAR" - }; - static const int32_t n_image_tilings = sizeof(image_tilings) / sizeof(image_tilings[0]); - - static_assert(n_image_tilings == VK_IMAGE_TILING_RANGE_SIZE, ""); - anvil_assert (in_image_tiling < n_image_tilings); - - return image_tilings[in_image_tiling]; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkImageType in_image_type) -{ - static const char* image_types[] = - { - "VK_IMAGE_TYPE_1D", - "VK_IMAGE_TYPE_2D", - "VK_IMAGE_TYPE_3D" - }; - static const uint32_t n_image_types = sizeof(image_types) / sizeof(image_types[0]); - - static_assert(n_image_types == VK_IMAGE_TYPE_RANGE_SIZE, ""); - anvil_assert (in_image_type < VK_IMAGE_TYPE_RANGE_SIZE); - - return image_types[in_image_type]; -} - -/** Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkImageViewType in_image_view_type) -{ - static const char* image_view_types[] = - { - "VK_IMAGE_VIEW_TYPE_1D", - "VK_IMAGE_VIEW_TYPE_2D", - "VK_IMAGE_VIEW_TYPE_3D", - "VK_IMAGE_VIEW_TYPE_CUBE", - "VK_IMAGE_VIEW_TYPE_1D_ARRAY", - "VK_IMAGE_VIEW_TYPE_2D_ARRAY", - "VK_IMAGE_VIEW_TYPE_CUBE_ARRAY", - }; - static const uint32_t n_image_view_types = sizeof(image_view_types) / sizeof(image_view_types[0]); - - static_assert(n_image_view_types == VK_IMAGE_VIEW_TYPE_RANGE_SIZE, ""); - anvil_assert (in_image_view_type < VK_IMAGE_VIEW_TYPE_RANGE_SIZE); - - return image_view_types[in_image_view_type]; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkLogicOp in_logic_op) -{ - const char* result = "?"; - - switch (in_logic_op) - { - case VK_LOGIC_OP_CLEAR: result = "VK_LOGIC_OP_CLEAR"; break; - case VK_LOGIC_OP_AND: result = "VK_LOGIC_OP_AND"; break; - case VK_LOGIC_OP_AND_REVERSE: result = "VK_LOGIC_OP_AND_REVERSE"; break; - case VK_LOGIC_OP_COPY: result = "VK_LOGIC_OP_COPY"; break; - case VK_LOGIC_OP_AND_INVERTED: result = "VK_LOGIC_OP_AND_INVERTED"; break; - case VK_LOGIC_OP_NO_OP: result = "VK_LOGIC_OP_NO_OP"; break; - case VK_LOGIC_OP_XOR: result = "VK_LOGIC_OP_XOR"; break; - case VK_LOGIC_OP_OR: result = "VK_LOGIC_OP_OR"; break; - case VK_LOGIC_OP_NOR: result = "VK_LOGIC_OP_NOR"; break; - case VK_LOGIC_OP_EQUIVALENT: result = "VK_LOGIC_OP_EQUIVALENT"; break; - case VK_LOGIC_OP_INVERT: result = "VK_LOGIC_OP_INVERT"; break; - case VK_LOGIC_OP_OR_REVERSE: result = "VK_LOGIC_OP_OR_REVERSE"; break; - case VK_LOGIC_OP_COPY_INVERTED: result = "VK_LOGIC_OP_COPY_INVERTED"; break; - case VK_LOGIC_OP_OR_INVERTED: result = "VK_LOGIC_OP_OR_INVERTED"; break; - case VK_LOGIC_OP_NAND: result = "VK_LOGIC_OP_NAND"; break; - case VK_LOGIC_OP_SET: result = "VK_LOGIC_OP_SET"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkPolygonMode in_polygon_mode) -{ - const char* result = "?"; - - switch (in_polygon_mode) - { - case VK_POLYGON_MODE_FILL: result = "VK_POLYGON_MODE_FILL"; break; - case VK_POLYGON_MODE_LINE: result = "VK_POLYGON_MODE_LINE"; break; - case VK_POLYGON_MODE_POINT: result = "VK_POLYGON_MODE_POINT"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkPrimitiveTopology in_topology) -{ - const char* result = "?"; - - switch (in_topology) - { - case VK_PRIMITIVE_TOPOLOGY_POINT_LIST: result = "VK_PRIMITIVE_TOPOLOGY_POINT_LIST"; break; - case VK_PRIMITIVE_TOPOLOGY_LINE_LIST: result = "VK_PRIMITIVE_TOPOLOGY_LINE_LIST"; break; - case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP: result = "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP"; break; - case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST: result = "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST"; break; - case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP: result = "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP"; break; - case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN: result = "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN"; break; - case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY: result = "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY"; break; - case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY: result = "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY"; break; - case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY: result = "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY"; break; - case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY: result = "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY"; break; - case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST: result = "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkSampleCountFlagBits in_sample_count) -{ - const char* result = "?"; - - switch (in_sample_count) - { - case VK_SAMPLE_COUNT_1_BIT: result = "VK_SAMPLE_COUNT_1_BIT"; break; - case VK_SAMPLE_COUNT_2_BIT: result = "VK_SAMPLE_COUNT_2_BIT"; break; - case VK_SAMPLE_COUNT_4_BIT: result = "VK_SAMPLE_COUNT_4_BIT"; break; - case VK_SAMPLE_COUNT_8_BIT: result = "VK_SAMPLE_COUNT_8_BIT"; break; - case VK_SAMPLE_COUNT_16_BIT: result = "VK_SAMPLE_COUNT_16_BIT"; break; - case VK_SAMPLE_COUNT_32_BIT: result = "VK_SAMPLE_COUNT_32_BIT"; break; - case VK_SAMPLE_COUNT_64_BIT: result = "VK_SAMPLE_COUNT_64_BIT"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(Anvil::ShaderStage in_shader_stage) -{ - const char* result = "?"; - - switch (in_shader_stage) - { - case SHADER_STAGE_COMPUTE: result = "SHADER_STAGE_COMPUTE"; break; - case SHADER_STAGE_FRAGMENT: result = "SHADER_STAGE_FRAGMENT"; break; - case SHADER_STAGE_GEOMETRY: result = "SHADER_STAGE_GEOMETRY"; break; - case SHADER_STAGE_TESSELLATION_CONTROL: result = "SHADER_STAGE_TESSELLATION_CONTROL"; break; - case SHADER_STAGE_TESSELLATION_EVALUATION: result = "SHADER_STAGE_TESSELLATION_EVALUATION"; break; - case SHADER_STAGE_VERTEX: result = "SHADER_STAGE_VERTEX"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkShaderStageFlagBits in_shader_stage) -{ - const char* result = "?"; - - switch (in_shader_stage) - { - case VK_SHADER_STAGE_ALL_GRAPHICS: result = "VK_SHADER_STAGE_ALL_GRAPHICS"; break; - case VK_SHADER_STAGE_COMPUTE_BIT: result = "VK_SHADER_STAGE_COMPUTE_BIT"; break; - case VK_SHADER_STAGE_FRAGMENT_BIT: result = "VK_SHADER_STAGE_FRAGMENT_BIT"; break; - case VK_SHADER_STAGE_GEOMETRY_BIT: result = "VK_SHADER_STAGE_GEOMETRY_BIT"; break; - case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: result = "VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT"; break; - case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: result = "VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT"; break; - case VK_SHADER_STAGE_VERTEX_BIT: result = "VK_SHADER_STAGE_VERTEX_BIT"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkSharingMode in_sharing_mode) -{ - static const char* sharing_modes[] = - { - "VK_SHARING_MODE_EXCLUSIVE", - "VK_SHARING_MODE_CONCURRENT" - }; - static const int32_t n_sharing_modes = sizeof(sharing_modes) / sizeof(sharing_modes[0]); - - static_assert(n_sharing_modes == VK_SHARING_MODE_RANGE_SIZE, ""); - anvil_assert (in_sharing_mode < n_sharing_modes); - - return sharing_modes[in_sharing_mode]; -} - -/* Please see header for specification */ -const char* Anvil::Utils::get_raw_string(VkStencilOp in_stencil_op) -{ - const char* result = "?"; - - switch (in_stencil_op) - { - case VK_STENCIL_OP_KEEP: result = "VK_STENCIL_OP_KEEP"; break; - case VK_STENCIL_OP_ZERO: result = "VK_STENCIL_OP_ZERO"; break; - case VK_STENCIL_OP_REPLACE: result = "VK_STENCIL_OP_REPLACE"; break; - case VK_STENCIL_OP_INCREMENT_AND_CLAMP: result = "VK_STENCIL_OP_INCREMENT_AND_CLAMP"; break; - case VK_STENCIL_OP_DECREMENT_AND_CLAMP: result = "VK_STENCIL_OP_DECREMENT_AND_CLAMP"; break; - case VK_STENCIL_OP_INVERT: result = "VK_STENCIL_OP_INVERT"; break; - case VK_STENCIL_OP_INCREMENT_AND_WRAP: result = "VK_STENCIL_OP_INCREMENT_AND_WRAP"; break; - case VK_STENCIL_OP_DECREMENT_AND_WRAP: result = "VK_STENCIL_OP_DECREMENT_AND_WRAP"; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -VkShaderStageFlagBits Anvil::Utils::get_shader_stage_flag_bits_from_shader_stage(Anvil::ShaderStage in_shader_stage) -{ - VkShaderStageFlagBits result = VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM; - - switch (in_shader_stage) - { - case Anvil::ShaderStage::SHADER_STAGE_COMPUTE: result = VK_SHADER_STAGE_COMPUTE_BIT; break; - case Anvil::ShaderStage::SHADER_STAGE_FRAGMENT: result = VK_SHADER_STAGE_FRAGMENT_BIT; break; - case Anvil::ShaderStage::SHADER_STAGE_GEOMETRY: result = VK_SHADER_STAGE_GEOMETRY_BIT; break; - case Anvil::ShaderStage::SHADER_STAGE_TESSELLATION_CONTROL: result = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT; break; - case Anvil::ShaderStage::SHADER_STAGE_TESSELLATION_EVALUATION: result = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT; break; - case Anvil::ShaderStage::SHADER_STAGE_VERTEX: result = VK_SHADER_STAGE_VERTEX_BIT; break; - - default: - { - anvil_assert_fail(); - } - } - - return result; -} - -/* Please see header for specification */ -void Anvil::Utils::get_vk_property_flags_from_memory_feature_flags(Anvil::MemoryFeatureFlags in_mem_feature_flags, - VkMemoryPropertyFlags* out_mem_type_flags_ptr, - VkMemoryHeapFlags* out_mem_heap_flags_ptr) -{ - VkMemoryHeapFlags result_mem_heap_flags = 0; - VkMemoryPropertyFlags result_mem_type_flags = 0; - - if ((in_mem_feature_flags & MEMORY_FEATURE_FLAG_DEVICE_LOCAL) != 0) - { - result_mem_type_flags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - } - - if ((in_mem_feature_flags & MEMORY_FEATURE_FLAG_MAPPABLE) != 0) - { - result_mem_type_flags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; - } - - if ((in_mem_feature_flags & MEMORY_FEATURE_FLAG_HOST_COHERENT) != 0) - { - result_mem_type_flags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; - } - - if ((in_mem_feature_flags & MEMORY_FEATURE_FLAG_HOST_CACHED) != 0) - { - result_mem_type_flags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; - } - - if ((in_mem_feature_flags & MEMORY_FEATURE_FLAG_LAZILY_ALLOCATED) != 0) - { - result_mem_type_flags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT; - } - - *out_mem_heap_flags_ptr = result_mem_heap_flags; - *out_mem_type_flags_ptr = result_mem_type_flags; -} +#include "misc/types.h" + +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::AccessFlags, VkAccessFlags, Anvil::AccessFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::BufferCreateFlags, VkBufferCreateFlags, Anvil::BufferCreateFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::BufferUsageFlags, VkBufferUsageFlags, Anvil::BufferUsageFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::ColorComponentFlags, VkColorComponentFlags, Anvil::ColorComponentFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::CompositeAlphaFlags, VkCompositeAlphaFlagsKHR, Anvil::CompositeAlphaFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::CullModeFlags, VkCullModeFlags, Anvil::CullModeFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::DebugMessageSeverityFlags, VkDebugUtilsMessageSeverityFlagsEXT, Anvil::DebugMessageSeverityFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::DebugMessageTypeFlags, VkDebugUtilsMessageTypeFlagsEXT, Anvil::DebugMessageTypeFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::DependencyFlags, VkDependencyFlags, Anvil::DependencyFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::DescriptorBindingFlags, VkDescriptorBindingFlagsEXT, Anvil::DescriptorBindingFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::DescriptorPoolCreateFlags, VkDescriptorPoolCreateFlags, Anvil::DescriptorPoolCreateFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::DeviceGroupPresentModeFlags, VkDeviceGroupPresentModeFlagsKHR, Anvil::DeviceGroupPresentModeFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::ExternalFenceHandleTypeFlags, VkExternalFenceHandleTypeFlagsKHR, Anvil::ExternalFenceHandleTypeFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::ExternalMemoryHandleTypeFlags, VkExternalMemoryHandleTypeFlagsKHR, Anvil::ExternalMemoryHandleTypeFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::ExternalSemaphoreHandleTypeFlags, VkExternalSemaphoreHandleTypeFlagsKHR, Anvil::ExternalSemaphoreHandleTypeFlagBits) +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::FormatFeatureFlags, VkFormatFeatureFlags, Anvil::FormatFeatureFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::ImageAspectFlags, VkImageAspectFlags, Anvil::ImageAspectFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::ImageCreateFlags, VkImageCreateFlags, Anvil::ImageCreateFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::ImageUsageFlags, VkImageUsageFlags, Anvil::ImageUsageFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::MemoryFeatureFlags, uint32_t, Anvil::MemoryFeatureFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::MemoryHeapFlags, VkMemoryHeapFlags, Anvil::MemoryHeapFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::MemoryPropertyFlags, VkMemoryPropertyFlags, Anvil::MemoryPropertyFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::PeerMemoryFeatureFlags, VkPeerMemoryFeatureFlagsKHR, Anvil::PeerMemoryFeatureFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::PipelineCreateFlags, VkPipelineCreateFlags, Anvil::PipelineCreateFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::PipelineStageFlags, VkPipelineStageFlags, Anvil::PipelineStageFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::QueueFamilyFlags, uint32_t, Anvil::QueueFamilyFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::QueueFlags, VkQueueFlags, Anvil::QueueFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::QueryControlFlags, VkQueryControlFlags, Anvil::QueryControlFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::QueryPipelineStatisticFlags, VkQueryPipelineStatisticFlags, Anvil::QueryPipelineStatisticFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::QueryResultFlags, VkQueryResultFlags, Anvil::QueryResultFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::ResolveModeFlags, VkResolveModeFlagsKHR, Anvil::ResolveModeFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::SampleCountFlags, VkSampleCountFlags, Anvil::SampleCountFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::ShaderStageFlags, VkShaderStageFlags, Anvil::ShaderStageFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::SparseImageFormatFlags, VkSparseImageFormatFlags, Anvil::SparseImageFormatFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::SparseMemoryBindFlags, VkSparseMemoryBindFlags, Anvil::SparseMemoryBindFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::StencilFaceFlags, VkStencilFaceFlags, Anvil::StencilFaceFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::SurfaceTransformFlags, VkSurfaceTransformFlagsKHR, Anvil::SurfaceTransformFlagBits); +INJECT_BITFIELD_HELPER_FUNC_IMPLEMENTATION(Anvil::SwapchainCreateFlags, VkSwapchainCreateFlagsKHR, Anvil::SwapchainCreateFlagBits); \ No newline at end of file diff --git a/src/misc/types_classes.cpp b/src/misc/types_classes.cpp new file mode 100644 index 00000000..28c9986a --- /dev/null +++ b/src/misc/types_classes.cpp @@ -0,0 +1,844 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/buffer_create_info.h" +#include "misc/image_create_info.h" +#include "misc/types.h" +#include "wrappers/buffer.h" +#include "wrappers/image.h" +#include "wrappers/memory_block.h" +#include "wrappers/semaphore.h" + +/** Please see header for specification */ +Anvil::SparseMemoryBindInfoID Anvil::SparseMemoryBindingUpdateInfo::add_bind_info(uint32_t in_n_signal_semaphores, + Anvil::Semaphore* const* in_opt_signal_semaphores_ptr, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_opt_wait_semaphores_ptr) +{ + Anvil::SparseMemoryBindInfoID result_id = static_cast(m_bindings.size() ); + BindingInfo new_binding; + + for (uint32_t n_signal_sem = 0; + n_signal_sem < in_n_signal_semaphores; + ++n_signal_sem) + { + new_binding.signal_semaphores.push_back(in_opt_signal_semaphores_ptr[n_signal_sem]); + } + + for (uint32_t n_wait_sem = 0; + n_wait_sem < in_n_wait_semaphores; + ++n_wait_sem) + { + new_binding.wait_semaphores.push_back(in_opt_wait_semaphores_ptr[n_wait_sem]); + } + + m_bindings.push_back(new_binding); + + return result_id; +} + +/** Please see header for specification */ +void Anvil::SparseMemoryBindingUpdateInfo::append_buffer_memory_update(SparseMemoryBindInfoID in_bind_info_id, + Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_buffer_memory_start_offset, + Anvil::MemoryBlock* in_memory_block_ptr, + VkDeviceSize in_memory_block_start_offset, + bool in_memory_block_owned_by_buffer, + VkDeviceSize in_size) +{ + /* Sanity checks */ + anvil_assert(in_buffer_ptr != nullptr); + anvil_assert(m_bindings.size() > in_bind_info_id); + anvil_assert(in_buffer_ptr->get_memory_requirements().size >= in_buffer_memory_start_offset + in_size); + + if (in_memory_block_ptr != nullptr) + { + anvil_assert(in_memory_block_ptr->get_create_info_ptr()->get_size() >= in_memory_block_start_offset + in_size); + } + + /* Cache the update */ + auto& binding = m_bindings.at(in_bind_info_id); + GeneralBindInfo update; + VkSparseMemoryBind update_vk; + + update.memory_block_owned_by_target = in_memory_block_owned_by_buffer; + update.memory_block_ptr = in_memory_block_ptr; + update.memory_block_start_offset = in_memory_block_start_offset; + update.size = in_size; + update.start_offset = in_buffer_memory_start_offset; + + update_vk.flags = 0; + update_vk.memory = (in_memory_block_ptr != nullptr) ? in_memory_block_ptr->get_memory() + : VK_NULL_HANDLE; + update_vk.memoryOffset = (in_memory_block_ptr != nullptr) ? (in_memory_block_ptr->get_start_offset() + in_memory_block_start_offset) + : UINT32_MAX; + update_vk.resourceOffset = (in_buffer_ptr->get_create_info_ptr()->get_start_offset() + in_buffer_memory_start_offset); + update_vk.size = in_size; + + binding.buffer_updates[in_buffer_ptr].first.push_back (update); + binding.buffer_updates[in_buffer_ptr].second.push_back(update_vk); +} + +/** Please see header for specification */ +void Anvil::SparseMemoryBindingUpdateInfo::append_image_memory_update(SparseMemoryBindInfoID in_bind_info_id, + Anvil::Image* in_image_ptr, + const Anvil::ImageSubresource& in_subresource, + const VkOffset3D& in_offset, + const VkExtent3D& in_extent, + Anvil::SparseMemoryBindFlags in_flags, + Anvil::MemoryBlock* in_opt_memory_block_ptr, + VkDeviceSize in_opt_memory_block_start_offset, + bool in_opt_memory_block_owned_by_image) +{ + /* Sanity checks .. */ + anvil_assert(in_image_ptr != nullptr); + anvil_assert(in_flags == 0); + anvil_assert(m_bindings.size() > in_bind_info_id); + + anvil_assert(in_image_ptr->get_create_info_ptr()->get_n_layers() > in_subresource.array_layer); + anvil_assert(in_image_ptr->get_n_mipmaps () > in_subresource.mip_level); + anvil_assert(in_image_ptr->has_aspects (in_subresource.aspect_mask) ); + + if (in_opt_memory_block_ptr != nullptr) + { + anvil_assert(in_opt_memory_block_ptr->get_create_info_ptr()->get_size() > in_opt_memory_block_start_offset); + } + + /* Cache the update */ + auto& binding = m_bindings.at(in_bind_info_id); + ImageBindInfo update; + VkSparseImageMemoryBind update_vk; + + update.extent = in_extent; + update.flags = in_flags; + update.memory_block_owned_by_image = in_opt_memory_block_owned_by_image; + update.memory_block_ptr = in_opt_memory_block_ptr; + update.memory_block_start_offset = in_opt_memory_block_start_offset; + update.offset = in_offset; + update.subresource = in_subresource; + + update_vk.extent = in_extent; + update_vk.flags = in_flags.get_vk(); + update_vk.memory = (in_opt_memory_block_ptr != nullptr) ? in_opt_memory_block_ptr->get_memory() + : VK_NULL_HANDLE; + update_vk.memoryOffset = (in_opt_memory_block_ptr != nullptr) ? in_opt_memory_block_ptr->get_start_offset() + in_opt_memory_block_start_offset + : UINT32_MAX; + update_vk.offset = in_offset; + update_vk.subresource = in_subresource.get_vk(); + + binding.image_updates[in_image_ptr].first.push_back (update); + binding.image_updates[in_image_ptr].second.push_back(update_vk); +} + +/** Please see header for specification */ +void Anvil::SparseMemoryBindingUpdateInfo::append_opaque_image_memory_update(SparseMemoryBindInfoID in_bind_info_id, + Anvil::Image* in_image_ptr, + VkDeviceSize in_resource_offset, + VkDeviceSize in_size, + Anvil::SparseMemoryBindFlags in_flags, + Anvil::MemoryBlock* in_opt_memory_block_ptr, + VkDeviceSize in_opt_memory_block_start_offset, + bool in_opt_memory_block_owned_by_image, + uint32_t in_n_plane) +{ + /* Sanity checks */ + anvil_assert(in_image_ptr != nullptr); + anvil_assert(m_bindings.size () > in_bind_info_id); + anvil_assert(in_image_ptr->get_image_storage_size(in_n_plane) >= in_resource_offset + in_size); + + if (in_opt_memory_block_ptr != nullptr) + { + anvil_assert(in_opt_memory_block_ptr->get_create_info_ptr()->get_size() >= in_opt_memory_block_start_offset + in_size); + } + + /* Cache the update */ + auto& binding = m_bindings.at(in_bind_info_id); + GeneralBindInfo update; + VkSparseMemoryBind update_vk; + + update.flags = in_flags; + update.memory_block_owned_by_target = in_opt_memory_block_owned_by_image; + update.memory_block_ptr = in_opt_memory_block_ptr; + update.memory_block_start_offset = in_opt_memory_block_start_offset; + update.n_plane = in_n_plane; + update.size = in_size; + update.start_offset = in_resource_offset; + + update_vk.flags = in_flags.get_vk(); + update_vk.memory = (in_opt_memory_block_ptr != nullptr) ? in_opt_memory_block_ptr->get_memory() + : VK_NULL_HANDLE; + update_vk.memoryOffset = (in_opt_memory_block_ptr != nullptr) ? (in_opt_memory_block_ptr->get_start_offset() + in_opt_memory_block_start_offset) + : UINT32_MAX; + update_vk.resourceOffset = in_resource_offset; + update_vk.size = in_size; + + binding.image_opaque_updates[in_image_ptr].first.push_back (update); + binding.image_opaque_updates[in_image_ptr].second.push_back(update_vk); +} + +/** Please see header for specification */ +void Anvil::SparseMemoryBindingUpdateInfo::bake() +{ + const uint32_t n_bindings = static_cast(m_bindings.size() ); + + anvil_assert(m_dirty); + + if (n_bindings > 0) + { + uint32_t n_buffer_updates_used = 0; + uint32_t n_image_updates_used = 0; + uint32_t n_opaque_image_updates_used = 0; + + m_bindings_vk.resize (n_bindings); + m_device_group_bindings_vk.resize(n_bindings); + + { + uint32_t n_total_buffer_updates = 0; + uint32_t n_total_image_updates = 0; + uint32_t n_total_opaque_image_updates = 0; + + for (uint32_t n_binding = 0; + n_binding < n_bindings; + ++n_binding) + { + const auto& bind_info = m_bindings.at(n_binding); + + n_total_buffer_updates += static_cast(bind_info.buffer_updates.size () ); + n_total_image_updates += static_cast(bind_info.image_updates.size () ); + n_total_opaque_image_updates += static_cast(bind_info.image_opaque_updates.size() ); + } + + m_buffer_bindings_vk.resize (n_total_buffer_updates); + m_image_bindings_vk.resize (n_total_image_updates); + m_image_opaque_bindings_vk.resize(n_total_opaque_image_updates); + } + + for (uint32_t n_binding = 0; + n_binding < n_bindings; + ++n_binding) + { + auto& bind_info = m_bindings [n_binding]; + uint32_t n_buffer_bindings_start_index = ~0u; + uint32_t n_image_bindings_start_index = ~0u; + uint32_t n_image_opaque_bindings_start_index = ~0u; + auto& vk_binding = m_bindings_vk[n_binding]; + void** next_ptr_ptr = const_cast(&vk_binding.pNext); + + bind_info.signal_semaphores_vk.clear(); + bind_info.wait_semaphores_vk.clear (); + + bind_info.signal_semaphores_vk.reserve(bind_info.signal_semaphores.size() ); + bind_info.wait_semaphores_vk.reserve (bind_info.wait_semaphores.size () ); + + for (auto& signal_semaphore_ptr : bind_info.signal_semaphores) + { + bind_info.signal_semaphores_vk.push_back(signal_semaphore_ptr->get_semaphore() ); + } + + for (auto& wait_semaphore_ptr : bind_info.wait_semaphores) + { + bind_info.wait_semaphores_vk.push_back(wait_semaphore_ptr->get_semaphore() ); + } + + vk_binding.bufferBindCount = static_cast(bind_info.buffer_updates.size() ); + vk_binding.imageBindCount = static_cast(bind_info.image_updates.size() ); + vk_binding.imageOpaqueBindCount = static_cast(bind_info.image_opaque_updates.size() ); + vk_binding.pNext = nullptr; + vk_binding.pSignalSemaphores = (bind_info.signal_semaphores_vk.size() > 0) ? &bind_info.signal_semaphores_vk[0] : nullptr; + vk_binding.pWaitSemaphores = (bind_info.wait_semaphores_vk.size() > 0) ? &bind_info.wait_semaphores_vk [0] : nullptr; + vk_binding.signalSemaphoreCount = static_cast(bind_info.signal_semaphores_vk.size() ); + vk_binding.sType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO; + vk_binding.waitSemaphoreCount = static_cast(bind_info.wait_semaphores_vk.size() ); + + if (bind_info.memory_device_index != 0 || + bind_info.resource_device_index != 0) + { + auto& device_group_binding_info = m_device_group_bindings_vk.at(n_binding); + + device_group_binding_info.memoryDeviceIndex = bind_info.memory_device_index; + device_group_binding_info.pNext = nullptr; + device_group_binding_info.resourceDeviceIndex = bind_info.resource_device_index; + device_group_binding_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR; + + *next_ptr_ptr = &device_group_binding_info; + next_ptr_ptr = const_cast(&device_group_binding_info.pNext); + } + + n_buffer_bindings_start_index = n_buffer_updates_used; + n_image_bindings_start_index = n_image_updates_used; + n_image_opaque_bindings_start_index = n_opaque_image_updates_used; + + for (auto& buffer_update : bind_info.buffer_updates) + { + const VkBuffer current_buffer_vk = buffer_update.first->get_buffer(); + VkSparseBufferMemoryBindInfo buffer_bind_info; + + anvil_assert(buffer_update.second.second.size() > 0); + + buffer_bind_info.bindCount = static_cast(buffer_update.second.second.size() ); + buffer_bind_info.buffer = current_buffer_vk; + buffer_bind_info.pBinds = &buffer_update.second.second[0]; + + m_buffer_bindings_vk.at(n_buffer_updates_used) = buffer_bind_info; + ++n_buffer_updates_used; + } + + for (auto& image_update : bind_info.image_updates) + { + const VkImage current_image_vk = image_update.first->get_image(); + VkSparseImageMemoryBindInfo image_bind_info; + + anvil_assert(image_update.second.second.size() > 0); + + image_bind_info.bindCount = static_cast(image_update.second.second.size() ); + image_bind_info.image = current_image_vk; + image_bind_info.pBinds = &image_update.second.second[0]; + + m_image_bindings_vk.at(n_image_updates_used) = image_bind_info; + ++n_image_updates_used; + } + + for (auto& image_opaque_update : bind_info.image_opaque_updates) + { + const VkImage current_image_vk = image_opaque_update.first->get_image(); + VkSparseImageOpaqueMemoryBindInfo image_opaque_bind_info; + + anvil_assert(image_opaque_update.second.second.size() > 0); + + image_opaque_bind_info.bindCount = static_cast(image_opaque_update.second.second.size() ); + image_opaque_bind_info.image = current_image_vk; + image_opaque_bind_info.pBinds = &image_opaque_update.second.second[0]; + + m_image_opaque_bindings_vk.at(n_opaque_image_updates_used) = image_opaque_bind_info; + ++n_opaque_image_updates_used; + } + + vk_binding.pBufferBinds = (bind_info.buffer_updates.size() > 0) ? &m_buffer_bindings_vk.at (n_buffer_bindings_start_index) : nullptr; + vk_binding.pImageBinds = (bind_info.image_updates.size() > 0) ? &m_image_bindings_vk.at (n_image_bindings_start_index) : nullptr; + vk_binding.pImageOpaqueBinds = (bind_info.image_opaque_updates.size() > 0) ? &m_image_opaque_bindings_vk.at(n_image_opaque_bindings_start_index) : nullptr; + } + } + else + { + m_bindings_vk.clear(); + } + + m_dirty = false; +} + +/** Please see header for specification */ +bool Anvil::SparseMemoryBindingUpdateInfo::get_bind_info_properties(SparseMemoryBindInfoID in_bind_info_id, + uint32_t* const out_opt_n_buffer_memory_updates_ptr, + uint32_t* const out_opt_n_image_memory_updates_ptr, + uint32_t* const out_opt_n_image_opaque_memory_updates_ptr, + uint32_t* const out_opt_n_signal_semaphores_ptr, + Anvil::Semaphore*** out_opt_signal_semaphores_ptr_ptr_ptr, + uint32_t* const out_opt_n_wait_semaphores_ptr, + Anvil::Semaphore*** out_opt_wait_semaphores_ptr_ptr_ptr) +{ + decltype(m_bindings)::iterator binding_iterator; + bool result = false; + + if (m_bindings.size() <= in_bind_info_id) + { + anvil_assert(m_bindings.size() > in_bind_info_id); + + goto end; + } + + binding_iterator = m_bindings.begin() + static_cast(in_bind_info_id); + + if (out_opt_n_buffer_memory_updates_ptr != nullptr) + { + uint32_t n_buffer_mem_updates = 0; + + for (const auto& buffer_update_iterator : binding_iterator->buffer_updates) + { + n_buffer_mem_updates += static_cast(buffer_update_iterator.second.first.size() ); + } + + *out_opt_n_buffer_memory_updates_ptr = n_buffer_mem_updates; + } + + if (out_opt_n_image_memory_updates_ptr != nullptr) + { + uint32_t n_image_mem_updates = 0; + + for (const auto& image_update_iterator : binding_iterator->image_updates) + { + n_image_mem_updates += static_cast(image_update_iterator.second.first.size() ); + } + + *out_opt_n_image_memory_updates_ptr = n_image_mem_updates; + } + + if (out_opt_n_image_opaque_memory_updates_ptr != nullptr) + { + uint32_t n_image_opaque_mem_updates = 0; + + for (const auto& image_opaque_update_iterator : binding_iterator->image_opaque_updates) + { + n_image_opaque_mem_updates += static_cast(image_opaque_update_iterator.second.first.size() ); + } + + *out_opt_n_image_opaque_memory_updates_ptr = n_image_opaque_mem_updates; + } + + if (out_opt_n_signal_semaphores_ptr != nullptr) + { + *out_opt_n_signal_semaphores_ptr = static_cast(binding_iterator->signal_semaphores.size() ); + } + + if (out_opt_signal_semaphores_ptr_ptr_ptr != nullptr && + binding_iterator->signal_semaphores.size() > 0) + { + *out_opt_signal_semaphores_ptr_ptr_ptr = &binding_iterator->signal_semaphores.at(0); + } + + if (out_opt_n_wait_semaphores_ptr != nullptr) + { + *out_opt_n_wait_semaphores_ptr = static_cast(binding_iterator->wait_semaphores.size() ); + } + + if (out_opt_wait_semaphores_ptr_ptr_ptr != nullptr && + binding_iterator->wait_semaphores.size() > 0) + { + *out_opt_wait_semaphores_ptr_ptr_ptr = &binding_iterator->wait_semaphores.at(0); + } + + /* All done */ + result = true; +end: + return result; +} + +/** Please see header for specification */ +void Anvil::SparseMemoryBindingUpdateInfo::get_bind_sparse_call_args(uint32_t* out_bind_info_count_ptr, + const VkBindSparseInfo** out_bind_info_ptr, + Anvil::Fence** out_fence_to_set_ptr) +{ + if (m_dirty) + { + bake(); + + anvil_assert(!m_dirty); + } + + *out_bind_info_count_ptr = static_cast(m_bindings.size() ); + *out_bind_info_ptr = (m_bindings_vk.size() > 0) ? &m_bindings_vk.at(0) : nullptr; + *out_fence_to_set_ptr = m_fence_ptr; +} + +/** Please see header for specification */ +bool Anvil::SparseMemoryBindingUpdateInfo::get_buffer_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, + uint32_t in_n_update, + Anvil::Buffer** out_opt_buffer_ptr_ptr, + VkDeviceSize* out_opt_buffer_memory_start_offset_ptr, + Anvil::MemoryBlock** out_opt_memory_block_ptr_ptr, + VkDeviceSize* out_opt_memory_block_start_offset_ptr, + bool* out_opt_memory_block_owned_by_buffer_ptr, + VkDeviceSize* out_opt_size_ptr) const +{ + GeneralBindInfo buffer_bind; + BufferBindUpdateMap::const_iterator buffer_binding_map_iterator; + decltype(m_bindings)::const_iterator binding_iterator; + uint32_t n_current_update = 0; + bool result = false; + + if (m_bindings.size() <= in_bind_info_id) + { + anvil_assert(!(m_bindings.size() <= in_bind_info_id) ); + + goto end; + } + + binding_iterator = m_bindings.cbegin() + static_cast(in_bind_info_id); + buffer_binding_map_iterator = binding_iterator->buffer_updates.begin(); + + while (buffer_binding_map_iterator != binding_iterator->buffer_updates.end() ) + { + const uint32_t n_buffer_bindings = static_cast(buffer_binding_map_iterator->second.first.size() ); + + if (n_current_update + n_buffer_bindings > in_n_update) + { + buffer_bind = buffer_binding_map_iterator->second.first.at(in_n_update - n_current_update); + + break; + } + else + { + n_current_update += n_buffer_bindings; + buffer_binding_map_iterator ++; + } + } + + if (buffer_binding_map_iterator == binding_iterator->buffer_updates.end() ) + { + anvil_assert(!(buffer_binding_map_iterator == binding_iterator->buffer_updates.end()) ); + + goto end; + } + + if (out_opt_buffer_ptr_ptr != nullptr) + { + *out_opt_buffer_ptr_ptr = buffer_binding_map_iterator->first; + } + + if (out_opt_buffer_memory_start_offset_ptr != nullptr) + { + *out_opt_buffer_memory_start_offset_ptr = buffer_bind.start_offset; + } + + if (out_opt_memory_block_owned_by_buffer_ptr != nullptr) + { + *out_opt_memory_block_owned_by_buffer_ptr = buffer_bind.memory_block_owned_by_target; + } + + if (out_opt_memory_block_ptr_ptr != nullptr) + { + *out_opt_memory_block_ptr_ptr = buffer_bind.memory_block_ptr; + } + + if (out_opt_memory_block_start_offset_ptr != nullptr) + { + *out_opt_memory_block_start_offset_ptr = buffer_bind.memory_block_start_offset; + } + + if (out_opt_size_ptr != nullptr) + { + *out_opt_size_ptr = buffer_bind.size; + } + + /* All done */ + result = true; +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::SparseMemoryBindingUpdateInfo::get_device_indices(SparseMemoryBindInfoID in_bind_info_id, + uint32_t* out_opt_resource_device_index_ptr, + uint32_t* out_opt_memory_device_index_ptr) const +{ + decltype(m_bindings)::const_iterator binding_iterator; + bool result (false); + + if (m_bindings.size() <= in_bind_info_id) + { + anvil_assert(!(m_bindings.size() <= in_bind_info_id) ); + + goto end; + } + + binding_iterator = m_bindings.cbegin() + static_cast(in_bind_info_id); + + if (out_opt_resource_device_index_ptr != nullptr) + { + *out_opt_resource_device_index_ptr = binding_iterator->resource_device_index; + } + + if (out_opt_memory_device_index_ptr != nullptr) + { + *out_opt_memory_device_index_ptr = binding_iterator->memory_device_index; + } + + /* All done */ + result = true; +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::SparseMemoryBindingUpdateInfo::get_image_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, + uint32_t in_n_update, + Anvil::Image** out_opt_image_ptr_ptr, + Anvil::ImageSubresource* out_opt_subresource_ptr, + VkOffset3D* out_opt_offset_ptr, + VkExtent3D* out_opt_extent_ptr, + Anvil::SparseMemoryBindFlags* out_opt_flags_ptr, + Anvil::MemoryBlock** out_opt_memory_block_ptr_ptr, + VkDeviceSize* out_opt_memory_block_start_offset_ptr, + bool* out_opt_memory_block_owned_by_image_ptr) const +{ + decltype(m_bindings)::const_iterator binding_iterator; + ImageBindInfo image_bind; + ImageBindUpdateMap::const_iterator image_binding_map_iterator; + uint32_t n_current_update = 0; + bool result = false; + + if (m_bindings.size() <= in_bind_info_id) + { + anvil_assert(!(m_bindings.size() <= in_bind_info_id) ); + + goto end; + } + + binding_iterator = m_bindings.cbegin() + static_cast(in_bind_info_id); + image_binding_map_iterator = binding_iterator->image_updates.begin(); + + while (image_binding_map_iterator != binding_iterator->image_updates.end() ) + { + const uint32_t n_image_bindings = static_cast(image_binding_map_iterator->second.first.size() ); + + if (n_current_update + n_image_bindings > in_n_update) + { + image_bind = image_binding_map_iterator->second.first.at(in_n_update - n_current_update); + + break; + } + else + { + n_current_update += n_image_bindings; + image_binding_map_iterator ++; + } + } + + if (image_binding_map_iterator == binding_iterator->image_updates.end() ) + { + anvil_assert(!(image_binding_map_iterator == binding_iterator->image_updates.end()) ); + + goto end; + } + + if (out_opt_image_ptr_ptr != nullptr) + { + *out_opt_image_ptr_ptr = image_binding_map_iterator->first; + } + + if (out_opt_subresource_ptr != nullptr) + { + *out_opt_subresource_ptr = image_bind.subresource; + } + + if (out_opt_offset_ptr != nullptr) + { + *out_opt_offset_ptr = image_bind.offset; + } + + if (out_opt_extent_ptr != nullptr) + { + *out_opt_extent_ptr = image_bind.extent; + } + + if (out_opt_flags_ptr != nullptr) + { + *out_opt_flags_ptr = image_bind.flags; + } + + if (out_opt_memory_block_ptr_ptr != nullptr) + { + *out_opt_memory_block_ptr_ptr = image_bind.memory_block_ptr; + } + + if (out_opt_memory_block_start_offset_ptr != nullptr) + { + *out_opt_memory_block_start_offset_ptr = image_bind.memory_block_start_offset; + } + + if (out_opt_memory_block_owned_by_image_ptr != nullptr) + { + *out_opt_memory_block_owned_by_image_ptr = image_bind.memory_block_owned_by_image; + } + + /* All done */ + result = true; +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::SparseMemoryBindingUpdateInfo::get_image_opaque_memory_update_properties(SparseMemoryBindInfoID in_bind_info_id, + uint32_t in_n_update, + Anvil::Image** out_opt_image_ptr_ptr, + VkDeviceSize* out_opt_resource_offset_ptr, + VkDeviceSize* out_opt_size_ptr, + Anvil::SparseMemoryBindFlags* out_opt_flags_ptr, + Anvil::MemoryBlock** out_opt_memory_block_ptr_ptr, + VkDeviceSize* out_opt_memory_block_start_offset_ptr, + bool* out_opt_memory_block_owned_by_image_ptr, + uint32_t* out_opt_n_plane_ptr) const +{ + decltype(m_bindings)::const_iterator binding_iterator; + GeneralBindInfo image_opaque_bind; + ImageOpaqueBindUpdateMap::const_iterator image_opaque_binding_map_iterator; + uint32_t n_current_update = 0; + bool result = false; + + if (m_bindings.size() <= in_bind_info_id) + { + anvil_assert(!(m_bindings.size() <= in_bind_info_id) ); + + goto end; + } + + binding_iterator = m_bindings.cbegin() + static_cast(in_bind_info_id); + image_opaque_binding_map_iterator = binding_iterator->image_opaque_updates.begin(); + + while (image_opaque_binding_map_iterator != binding_iterator->image_opaque_updates.end() ) + { + const uint32_t n_image_opaque_bindings = static_cast(image_opaque_binding_map_iterator->second.first.size() ); + + if (n_current_update + n_image_opaque_bindings > in_n_update) + { + image_opaque_bind = image_opaque_binding_map_iterator->second.first.at(in_n_update - n_current_update); + + break; + } + else + { + n_current_update += n_image_opaque_bindings; + image_opaque_binding_map_iterator ++; + } + } + + if (image_opaque_binding_map_iterator == binding_iterator->image_opaque_updates.end() ) + { + anvil_assert(!(image_opaque_binding_map_iterator == binding_iterator->image_opaque_updates.end()) ); + + goto end; + } + + if (out_opt_image_ptr_ptr != nullptr) + { + *out_opt_image_ptr_ptr = image_opaque_binding_map_iterator->first; + } + + if (out_opt_resource_offset_ptr != nullptr) + { + *out_opt_resource_offset_ptr = image_opaque_bind.start_offset; + } + + if (out_opt_size_ptr != nullptr) + { + *out_opt_size_ptr = image_opaque_bind.size; + } + + if (out_opt_flags_ptr != nullptr) + { + *out_opt_flags_ptr = image_opaque_bind.flags; + } + + if (out_opt_memory_block_ptr_ptr != nullptr) + { + *out_opt_memory_block_ptr_ptr = image_opaque_bind.memory_block_ptr; + } + + if (out_opt_memory_block_start_offset_ptr != nullptr) + { + *out_opt_memory_block_start_offset_ptr = image_opaque_bind.memory_block_start_offset; + } + + if (out_opt_memory_block_owned_by_image_ptr != nullptr) + { + *out_opt_memory_block_owned_by_image_ptr = image_opaque_bind.memory_block_owned_by_target; + } + + if (out_opt_n_plane_ptr != nullptr) + { + *out_opt_n_plane_ptr = image_opaque_bind.n_plane; + } + + /* All done */ + result = true; +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::SparseMemoryBindingUpdateInfo::is_device_group_support_required() const +{ + bool result(false); + + for (const auto& binding_iterator : m_bindings) + { + if (binding_iterator.memory_device_index != 0 || + binding_iterator.resource_device_index != 0) + { + result = true; + + break; + } + } + + return result; +} + +/* Updates memory device index, associated with this batch. + * + * Do not modify unless VK_KHR_device_group is supported + * + * @param in_memory_device_index New memory device index to use. + * + **/ +void Anvil::SparseMemoryBindingUpdateInfo::set_memory_device_index(SparseMemoryBindInfoID in_bind_info_id, + const uint32_t& in_memory_device_index) +{ + decltype(m_bindings)::iterator binding_iterator; + + if (m_bindings.size() <= in_bind_info_id) + { + anvil_assert(!(m_bindings.size() <= in_bind_info_id) ); + + goto end; + } + + binding_iterator = m_bindings.begin() + static_cast(in_bind_info_id); + + if (binding_iterator->memory_device_index != in_memory_device_index) + { + m_dirty = true; + binding_iterator->memory_device_index = in_memory_device_index; + } + +end: + ; +} + +/* Updates resource device index, associated with this batch. + * + * Do not modify unless VK_KHR_device_group is supported + * + * @param in_resource_device_index New resource device index to use. + * + **/ +void Anvil::SparseMemoryBindingUpdateInfo::set_resource_device_index(SparseMemoryBindInfoID in_bind_info_id, + const uint32_t& in_resource_device_index) +{ + decltype(m_bindings)::iterator binding_iterator; + + if (m_bindings.size() <= in_bind_info_id) + { + anvil_assert(!(m_bindings.size() <= in_bind_info_id) ); + + goto end; + } + + binding_iterator = m_bindings.begin() + static_cast(in_bind_info_id); + + if (binding_iterator->resource_device_index != in_resource_device_index) + { + m_dirty = true; + binding_iterator->resource_device_index = in_resource_device_index; + } + +end: + ; +} diff --git a/src/misc/types_struct.cpp b/src/misc/types_struct.cpp new file mode 100644 index 00000000..008b910b --- /dev/null +++ b/src/misc/types_struct.cpp @@ -0,0 +1,4661 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/descriptor_set_create_info.h" +#include "misc/formats.h" +#include "misc/image_create_info.h" +#include "misc/types.h" +#include "wrappers/buffer.h" +#include "wrappers/descriptor_set_layout.h" +#include "wrappers/image.h" +#include +#include + +#define BOOL_TO_VK_BOOL32(x) ((x) ? VK_TRUE : VK_FALSE); +#define VK_BOOL32_TO_BOOL(x) ((x == VK_FALSE) ? false : true) + +#ifdef max + #undef max +#endif + +Anvil::AMDShaderCoreProperties::AMDShaderCoreProperties() +{ + memset(this, + 0, + sizeof(*this)); +} + +Anvil::AMDShaderCoreProperties::AMDShaderCoreProperties(const VkPhysicalDeviceShaderCorePropertiesAMD& in_props) +{ + compute_units_per_shader_array = in_props.computeUnitsPerShaderArray; + max_sgpr_allocation = in_props.maxSgprAllocation; + max_vgpr_allocation = in_props.maxVgprAllocation; + min_sgpr_allocation = in_props.minSgprAllocation; + min_vgpr_allocation = in_props.minVgprAllocation; + shader_arrays_per_engine_count = in_props.shaderArraysPerEngineCount; + shader_engine_count = in_props.shaderEngineCount; + sgpr_allocation_granularity = in_props.sgprAllocationGranularity; + sgprs_per_simd = in_props.sgprsPerSimd; + simd_per_compute_unit = in_props.simdPerComputeUnit; + vgpr_allocation_granularity = in_props.vgprAllocationGranularity; + vgprs_per_simd = in_props.vgprsPerSimd; + wavefronts_per_simd = in_props.wavefrontsPerSimd; + wavefront_size = in_props.wavefrontSize; +} + +bool Anvil::AMDShaderCoreProperties::operator==(const Anvil::AMDShaderCoreProperties& in_props) const +{ + return (compute_units_per_shader_array == in_props.compute_units_per_shader_array && + max_sgpr_allocation == in_props.max_sgpr_allocation && + max_vgpr_allocation == in_props.max_vgpr_allocation && + min_sgpr_allocation == in_props.min_sgpr_allocation && + min_vgpr_allocation == in_props.min_vgpr_allocation && + sgprs_per_simd == in_props.sgprs_per_simd && + sgpr_allocation_granularity == in_props.sgpr_allocation_granularity && + shader_arrays_per_engine_count == in_props.shader_arrays_per_engine_count && + shader_engine_count == in_props.shader_engine_count && + simd_per_compute_unit == in_props.simd_per_compute_unit && + wavefronts_per_simd == in_props.wavefronts_per_simd && + wavefront_size == in_props.wavefront_size && + vgpr_allocation_granularity == in_props.vgpr_allocation_granularity && + vgprs_per_simd == in_props.vgprs_per_simd); +} + +/** Please see header for specification */ +Anvil::BufferBarrier::BufferBarrier(const BufferBarrier& in) +{ + buffer = in.buffer; + buffer_ptr = in.buffer_ptr; + dst_access_mask = in.dst_access_mask; + dst_queue_family_index = in.dst_queue_family_index; + offset = in.offset; + size = in.size; + src_access_mask = in.src_access_mask; + src_queue_family_index = in.src_queue_family_index; +} + +/** Please see header for specification */ +Anvil::BufferBarrier::BufferBarrier(Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + uint32_t in_src_queue_family_index, + uint32_t in_dst_queue_family_index, + Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + VkDeviceSize in_size) +{ + buffer = in_buffer_ptr->get_buffer(); + buffer_ptr = in_buffer_ptr; + dst_access_mask = in_destination_access_mask; + dst_queue_family_index = in_dst_queue_family_index; + offset = in_offset; + size = in_size; + src_access_mask = in_source_access_mask; + src_queue_family_index = in_src_queue_family_index; + + /* NOTE: For an image barrier to work correctly, the underlying subresource range must be assigned memory. + * Query for a memory block in order to force any listening memory allocators to bake */ + auto memory_block_ptr = buffer_ptr->get_memory_block(0 /* in_n_memory_block */); + + ANVIL_REDUNDANT_VARIABLE(memory_block_ptr); +} + +/** Please see header for specification */ +Anvil::BufferBarrier::~BufferBarrier() +{ + /* Stub */ +} + +bool Anvil::BufferBarrier::operator==(const Anvil::BufferBarrier& in_barrier) const +{ + return (dst_access_mask == in_barrier.dst_access_mask && + src_access_mask == in_barrier.src_access_mask && + buffer_ptr == in_barrier.buffer_ptr && + dst_queue_family_index == in_barrier.dst_queue_family_index && + offset == in_barrier.offset && + size == in_barrier.size && + src_queue_family_index == in_barrier.src_queue_family_index); +} + +VkBufferMemoryBarrier Anvil::BufferBarrier::get_barrier_vk() const +{ + VkBufferMemoryBarrier result; + + result.buffer = buffer_ptr->get_buffer(); + result.dstAccessMask = dst_access_mask.get_vk(); + result.dstQueueFamilyIndex = dst_queue_family_index; + result.offset = offset; + result.pNext = nullptr; + result.size = size; + result.srcAccessMask = src_access_mask.get_vk(), + result.srcQueueFamilyIndex = src_queue_family_index; + result.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER; + + return result; +} + +Anvil::BufferProperties::BufferProperties() +{ + /* Stub */ +} + +Anvil::BufferProperties::BufferProperties(const ExternalMemoryProperties& in_external_handle_properties) + :external_handle_properties (in_external_handle_properties) +{ + /* Stub */ +} + + +Anvil::BufferMemoryBindingUpdate::BufferMemoryBindingUpdate() +{ + buffer_ptr = nullptr; + memory_block_owned_by_buffer = false; + memory_block_ptr = nullptr; +} + +Anvil::DebugObjectNameInfo::DebugObjectNameInfo(const VkDebugUtilsObjectNameInfoEXT& in_name_info_vk) +{ + object_handle = in_name_info_vk.objectHandle; + object_name_ptr = in_name_info_vk.pObjectName; + object_type = Anvil::Utils::get_object_type_for_vk_object_type(in_name_info_vk.objectType); +} + +Anvil::DescriptorSetAllocation::DescriptorSetAllocation(const Anvil::DescriptorSetLayout* in_ds_layout_ptr) +{ + anvil_assert( in_ds_layout_ptr != nullptr); + anvil_assert(!in_ds_layout_ptr->get_create_info()->contains_variable_descriptor_count_binding() ); + + ds_layout_ptr = in_ds_layout_ptr; + n_variable_descriptor_bindings = UINT32_MAX; +} + +/* Constructor. + * + * Use if you need to allocate a descriptor set using a descriptor set layout which + * CONTAINS a variable count descriptor binding. + */ +Anvil::DescriptorSetAllocation::DescriptorSetAllocation(const Anvil::DescriptorSetLayout* in_ds_layout_ptr, + const uint32_t& in_n_variable_descriptor_bindings) +{ + anvil_assert(in_ds_layout_ptr != nullptr); + + uint32_t binding_array_size = 0; + uint32_t binding_index = UINT32_MAX; + const auto ds_create_info_ptr = in_ds_layout_ptr->get_create_info(); + + ds_create_info_ptr->contains_variable_descriptor_count_binding(&binding_index); + anvil_assert(binding_index != UINT32_MAX); + + ds_create_info_ptr->get_binding_properties_by_binding_index(binding_index, + nullptr, /* out_opt_descriptor_type_ptr */ + &binding_array_size); + anvil_assert(in_n_variable_descriptor_bindings <= binding_array_size); + + ds_layout_ptr = in_ds_layout_ptr; + n_variable_descriptor_bindings = in_n_variable_descriptor_bindings; +} + +Anvil::DescriptorUpdateTemplateEntry::DescriptorUpdateTemplateEntry() + :descriptor_type (Anvil::DescriptorType::UNKNOWN), + n_descriptors (UINT32_MAX), + n_destination_array_element(UINT32_MAX), + n_destination_binding (UINT32_MAX), + offset (SIZE_MAX), + stride (SIZE_MAX) +{ + /* Stub */ +} + +Anvil::DescriptorUpdateTemplateEntry::DescriptorUpdateTemplateEntry(const Anvil::DescriptorType& in_descriptor_type, + const uint32_t& in_n_destination_array_element, + const uint32_t& in_n_destination_binding, + const uint32_t& in_n_descriptors, + const size_t& in_offset, + const size_t& in_stride) + :descriptor_type (in_descriptor_type), + n_descriptors (in_n_descriptors), + n_destination_array_element(in_n_destination_array_element), + n_destination_binding (in_n_destination_binding), + offset (in_offset), + stride (in_stride) +{ + if (in_descriptor_type == Anvil::DescriptorType::INLINE_UNIFORM_BLOCK) + { + n_destination_array_element *= 4; + n_descriptors *= 4; + } +} + +bool Anvil::DescriptorUpdateTemplateEntry::operator==(const Anvil::DescriptorUpdateTemplateEntry& in_entry) const +{ + return (in_entry.descriptor_type == descriptor_type) && + (in_entry.n_descriptors == n_descriptors) && + (in_entry.n_destination_array_element == n_destination_array_element) && + (in_entry.n_destination_binding == n_destination_binding) && + (in_entry.offset == offset) && + (in_entry.stride == stride); +} + +bool Anvil::DescriptorUpdateTemplateEntry::operator<(const Anvil::DescriptorUpdateTemplateEntry& in_entry) const +{ + if (in_entry.descriptor_type < descriptor_type) + { + return true; + } + else + if (in_entry.descriptor_type > descriptor_type) + { + return false; + } + + if (in_entry.n_descriptors < n_descriptors) + { + return true; + } + else + if (in_entry.n_descriptors > n_descriptors) + { + return false; + } + + if (in_entry.n_destination_array_element < n_destination_array_element) + { + return true; + } + else + if (in_entry.n_destination_array_element > n_destination_array_element) + { + return false; + } + + if (in_entry.n_destination_binding < n_destination_binding) + { + return true; + } + else + if (in_entry.n_destination_binding > n_destination_binding) + { + return false; + } + + if (in_entry.offset < offset) + { + return true; + } + else + if (in_entry.offset > offset) + { + return false; + } + + if (in_entry.stride < stride) + { + return true; + } + + return false; +} + +/** Please see header for specification */ +VkDescriptorUpdateTemplateEntryKHR Anvil::DescriptorUpdateTemplateEntry::get_vk_descriptor_update_template_entry_khr() const +{ + VkDescriptorUpdateTemplateEntryKHR result; + + result.descriptorCount = n_descriptors; + result.descriptorType = static_cast(descriptor_type); + result.dstArrayElement = n_destination_array_element; + result.dstBinding = n_destination_binding; + result.offset = offset; + result.stride = stride; + + return result; +} + +Anvil::ExtensionAMDBufferMarkerEntrypoints::ExtensionAMDBufferMarkerEntrypoints() +{ + vkCmdWriteBufferMarkerAMD = nullptr; +} + +Anvil::ExtensionAMDDrawIndirectCountEntrypoints::ExtensionAMDDrawIndirectCountEntrypoints() +{ + vkCmdDrawIndexedIndirectCountAMD = nullptr; + vkCmdDrawIndirectCountAMD = nullptr; +} + +Anvil::ExtensionAMDShaderInfoEntrypoints::ExtensionAMDShaderInfoEntrypoints() +{ + vkGetShaderInfoAMD = nullptr; +} + +Anvil::ExtensionEXTDebugMarkerEntrypoints::ExtensionEXTDebugMarkerEntrypoints() +{ + vkCmdDebugMarkerBeginEXT = nullptr; + vkCmdDebugMarkerEndEXT = nullptr; + vkCmdDebugMarkerInsertEXT = nullptr; + vkDebugMarkerSetObjectNameEXT = nullptr; + vkDebugMarkerSetObjectTagEXT = nullptr; +} + +Anvil::ExtensionEXTDebugReportEntrypoints::ExtensionEXTDebugReportEntrypoints() +{ + vkCreateDebugReportCallbackEXT = nullptr; + vkDebugReportMessageEXT = nullptr; + vkDestroyDebugReportCallbackEXT = nullptr; +} + +Anvil::ExtensionEXTDebugUtilsEntrypoints::ExtensionEXTDebugUtilsEntrypoints() +{ + vkCmdBeginDebugUtilsLabelEXT = nullptr; + vkCmdEndDebugUtilsLabelEXT = nullptr; + vkCmdInsertDebugUtilsLabelEXT = nullptr; + vkCreateDebugUtilsMessengerEXT = nullptr; + vkDestroyDebugUtilsMessengerEXT = nullptr; + vkSetDebugUtilsObjectNameEXT = nullptr; + vkSetDebugUtilsObjectTagEXT = nullptr; + vkQueueBeginDebugUtilsLabelEXT = nullptr; + vkQueueEndDebugUtilsLabelEXT = nullptr; + vkQueueInsertDebugUtilsLabelEXT = nullptr; + vkSubmitDebugUtilsMessageEXT = nullptr; +} + +Anvil::ExtensionEXTExternalMemoryHostEntrypoints::ExtensionEXTExternalMemoryHostEntrypoints() +{ + vkGetMemoryHostPointerPropertiesEXT = nullptr; +} + +Anvil::ExtensionEXTHdrMetadataEntrypoints::ExtensionEXTHdrMetadataEntrypoints() +{ + vkSetHdrMetadataEXT = nullptr; +} + +Anvil::ExtensionEXTSampleLocationsEntrypoints::ExtensionEXTSampleLocationsEntrypoints() +{ + vkCmdSetSampleLocationsEXT = nullptr; + vkGetPhysicalDeviceMultisamplePropertiesEXT = nullptr; +} + +Anvil::ExtensionEXTTransformFeedbackEntrypoints::ExtensionEXTTransformFeedbackEntrypoints() +{ + vkCmdBeginQueryIndexedEXT = nullptr; + vkCmdBeginTransformFeedbackEXT = nullptr; + vkCmdBindTransformFeedbackBuffersEXT = nullptr; + vkCmdDrawIndirectByteCountEXT = nullptr; + vkCmdEndQueryIndexedEXT = nullptr; + vkCmdEndTransformFeedbackEXT = nullptr; +} + +Anvil::ExtensionKHRCreateRenderpass2Entrypoints::ExtensionKHRCreateRenderpass2Entrypoints() +{ + vkCmdBeginRenderPass2KHR = nullptr; + vkCmdEndRenderPass2KHR = nullptr; + vkCmdNextSubpass2KHR = nullptr; + vkCreateRenderPass2KHR = nullptr; +} + +Anvil::ExtensionKHRDeviceGroupEntrypoints::ExtensionKHRDeviceGroupEntrypoints() +{ + vkAcquireNextImage2KHR = nullptr; + vkCmdDispatchBaseKHR = nullptr; + vkGetDeviceGroupPeerMemoryFeaturesKHR = nullptr; + vkGetDeviceGroupPresentCapabilitiesKHR = nullptr; + vkGetDeviceGroupSurfacePresentModesKHR = nullptr; + vkGetPhysicalDevicePresentRectanglesKHR = nullptr; + vkCmdSetDeviceMaskKHR = nullptr; +} + +Anvil::ExtensionKHRBindMemory2Entrypoints::ExtensionKHRBindMemory2Entrypoints() +{ + vkBindBufferMemory2KHR = nullptr; + vkBindImageMemory2KHR = nullptr; +} + +Anvil::ExtensionKHRDescriptorUpdateTemplateEntrypoints::ExtensionKHRDescriptorUpdateTemplateEntrypoints() +{ + vkCreateDescriptorUpdateTemplateKHR = nullptr; + vkDestroyDescriptorUpdateTemplateKHR = nullptr; + vkUpdateDescriptorSetWithTemplateKHR = nullptr; +} + +Anvil::ExtensionKHRDeviceGroupCreationEntrypoints::ExtensionKHRDeviceGroupCreationEntrypoints() +{ + vkEnumeratePhysicalDeviceGroupsKHR = nullptr; +} + +Anvil::ExtensionKHRDrawIndirectCountEntrypoints::ExtensionKHRDrawIndirectCountEntrypoints() +{ + vkCmdDrawIndexedIndirectCountKHR = nullptr; + vkCmdDrawIndirectCountKHR = nullptr; +} + +Anvil::ExtensionKHRExternalFenceCapabilitiesEntrypoints::ExtensionKHRExternalFenceCapabilitiesEntrypoints() +{ + vkGetPhysicalDeviceExternalFencePropertiesKHR = nullptr; +} + +Anvil::ExtensionKHRExternalMemoryCapabilitiesEntrypoints::ExtensionKHRExternalMemoryCapabilitiesEntrypoints() +{ + vkGetPhysicalDeviceExternalBufferPropertiesKHR = nullptr; +} + +Anvil::ExtensionKHRExternalSemaphoreCapabilitiesEntrypoints::ExtensionKHRExternalSemaphoreCapabilitiesEntrypoints() +{ + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = nullptr; +} + +#ifdef _WIN32 + Anvil::ExtensionKHRExternalFenceWin32Entrypoints::ExtensionKHRExternalFenceWin32Entrypoints() + { + vkGetFenceWin32HandleKHR = nullptr; + vkImportFenceWin32HandleKHR = nullptr; + } + + Anvil::ExtensionKHRExternalMemoryWin32Entrypoints::ExtensionKHRExternalMemoryWin32Entrypoints() + { + vkGetMemoryWin32HandleKHR = nullptr; + vkGetMemoryWin32HandlePropertiesKHR = nullptr; + } + + Anvil::ExtensionKHRExternalSemaphoreWin32Entrypoints::ExtensionKHRExternalSemaphoreWin32Entrypoints() + { + vkGetSemaphoreWin32HandleKHR = nullptr; + vkImportSemaphoreWin32HandleKHR = nullptr; + } +#else + Anvil::ExtensionKHRExternalFenceFdEntrypoints::ExtensionKHRExternalFenceFdEntrypoints() + { + vkGetFenceFdKHR = nullptr; + vkImportFenceFdKHR = nullptr; + } + + Anvil::ExtensionKHRExternalMemoryFdEntrypoints::ExtensionKHRExternalMemoryFdEntrypoints() + { + vkGetMemoryFdKHR = nullptr; + vkGetMemoryFdPropertiesKHR = nullptr; + } + + Anvil::ExtensionKHRExternalSemaphoreFdEntrypoints::ExtensionKHRExternalSemaphoreFdEntrypoints() + { + vkGetSemaphoreFdKHR = nullptr; + vkImportSemaphoreFdKHR = nullptr; + } +#endif + +Anvil::ExtensionKHRMaintenance1Entrypoints::ExtensionKHRMaintenance1Entrypoints() +{ + vkTrimCommandPoolKHR = nullptr; +} + +Anvil::ExtensionKHRMaintenance3Entrypoints::ExtensionKHRMaintenance3Entrypoints() +{ + vkGetDescriptorSetLayoutSupportKHR = nullptr; +} + +Anvil::ExtensionKHRSamplerYCbCrConversionEntrypoints::ExtensionKHRSamplerYCbCrConversionEntrypoints() +{ + vkCreateSamplerYcbcrConversionKHR = nullptr; + vkDestroySamplerYcbcrConversionKHR = nullptr; +} + +Anvil::ExtensionKHRSurfaceEntrypoints::ExtensionKHRSurfaceEntrypoints() +{ + vkDestroySurfaceKHR = nullptr; + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr; + vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr; + vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr; + vkGetPhysicalDeviceSurfaceSupportKHR = nullptr; +} + +Anvil::ExtensionKHRSwapchainEntrypoints::ExtensionKHRSwapchainEntrypoints() +{ + vkAcquireNextImageKHR = nullptr; + vkCreateSwapchainKHR = nullptr; + vkDestroySwapchainKHR = nullptr; + vkGetSwapchainImagesKHR = nullptr; + vkQueuePresentKHR = nullptr; +} + +#ifdef _WIN32 + #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) + Anvil::ExtensionKHRWin32SurfaceEntrypoints::ExtensionKHRWin32SurfaceEntrypoints() + { + vkCreateWin32SurfaceKHR = nullptr; + vkGetPhysicalDeviceWin32PresentationSupportKHR = nullptr; + } + #endif +#else + #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) + Anvil::ExtensionKHRXcbSurfaceEntrypoints::ExtensionKHRXcbSurfaceEntrypoints() + { + vkCreateXcbSurfaceKHR = nullptr; + } + #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + Anvil::ExtensionKHRWaylandSurfaceEntrypoints::ExtensionKHRWaylandSurfaceEntrypoints() + { + vkCreateWaylandSurfaceKHR = nullptr; + } + #endif +#endif +#if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + Anvil::ExtensionEXTHeadlessSurfaceEntrypoints::ExtensionEXTHeadlessSurfaceEntrypoints() + { + vkCreateHeadlessSurfaceEXT = nullptr; + } +#endif + +Anvil::ExtensionKHRGetMemoryRequirements2Entrypoints::ExtensionKHRGetMemoryRequirements2Entrypoints() +{ + vkGetBufferMemoryRequirements2KHR = nullptr; + vkGetImageMemoryRequirements2KHR = nullptr; + vkGetImageSparseMemoryRequirements2KHR = nullptr; +} + +Anvil::ExtensionKHRGetPhysicalDeviceProperties2::ExtensionKHRGetPhysicalDeviceProperties2() +{ + vkGetPhysicalDeviceFeatures2KHR = nullptr; + vkGetPhysicalDeviceFormatProperties2KHR = nullptr; + vkGetPhysicalDeviceImageFormatProperties2KHR = nullptr; + vkGetPhysicalDeviceMemoryProperties2KHR = nullptr; + vkGetPhysicalDeviceProperties2KHR = nullptr; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = nullptr; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = nullptr; +} + +Anvil::EXTConservativeRasterizationProperties::EXTConservativeRasterizationProperties() + :conservative_point_and_line_rasterization (false), + conservative_rasterization_post_depth_coverage (false), + degenerate_lines_rasterized (false), + degenerate_triangles_rasterized (false), + extra_primitive_overestimation_size_granularity (0.0), + fully_covered_fragment_shader_input_variable (false), + max_extra_primitive_overestimation_size (0.0), + primitive_overestimation_size (0.0), + primitive_underestimation (false) +{ + /* Stub */ +} + +Anvil::EXTConservativeRasterizationProperties::EXTConservativeRasterizationProperties(const VkPhysicalDeviceConservativeRasterizationPropertiesEXT& in_properties) + :conservative_point_and_line_rasterization (VK_BOOL32_TO_BOOL(in_properties.conservativePointAndLineRasterization)), + conservative_rasterization_post_depth_coverage (VK_BOOL32_TO_BOOL(in_properties.conservativeRasterizationPostDepthCoverage)), + degenerate_lines_rasterized (VK_BOOL32_TO_BOOL(in_properties.degenerateLinesRasterized)), + degenerate_triangles_rasterized (VK_BOOL32_TO_BOOL(in_properties.degenerateTrianglesRasterized)), + extra_primitive_overestimation_size_granularity (in_properties.extraPrimitiveOverestimationSizeGranularity), + fully_covered_fragment_shader_input_variable (VK_BOOL32_TO_BOOL(in_properties.fullyCoveredFragmentShaderInputVariable)), + max_extra_primitive_overestimation_size (in_properties.maxExtraPrimitiveOverestimationSize), + primitive_overestimation_size (in_properties.primitiveOverestimationSize), + primitive_underestimation (VK_BOOL32_TO_BOOL(in_properties.primitiveUnderestimation) ) +{ + /* Stub */ +} + +bool Anvil::EXTConservativeRasterizationProperties::operator==(const Anvil::EXTConservativeRasterizationProperties& in_properties) const +{ + return (conservative_point_and_line_rasterization == in_properties.conservative_point_and_line_rasterization && + conservative_rasterization_post_depth_coverage == in_properties.conservative_rasterization_post_depth_coverage && + degenerate_lines_rasterized == in_properties.degenerate_lines_rasterized && + degenerate_triangles_rasterized == in_properties.degenerate_triangles_rasterized && + extra_primitive_overestimation_size_granularity == in_properties.extra_primitive_overestimation_size_granularity && + fully_covered_fragment_shader_input_variable == in_properties.fully_covered_fragment_shader_input_variable && + max_extra_primitive_overestimation_size == in_properties.max_extra_primitive_overestimation_size && + primitive_overestimation_size == in_properties.primitive_overestimation_size && + primitive_underestimation == in_properties.primitive_underestimation); +} + +VkPhysicalDeviceConservativeRasterizationPropertiesEXT Anvil::EXTConservativeRasterizationProperties::get_vk_physical_device_conservative_rasterization_properties() const +{ + VkPhysicalDeviceConservativeRasterizationPropertiesEXT result; + + result.conservativePointAndLineRasterization = BOOL_TO_VK_BOOL32(conservative_point_and_line_rasterization); + result.conservativeRasterizationPostDepthCoverage = BOOL_TO_VK_BOOL32(conservative_rasterization_post_depth_coverage); + result.degenerateLinesRasterized = BOOL_TO_VK_BOOL32(degenerate_lines_rasterized); + result.degenerateTrianglesRasterized = BOOL_TO_VK_BOOL32(degenerate_triangles_rasterized); + result.extraPrimitiveOverestimationSizeGranularity = extra_primitive_overestimation_size_granularity; + result.fullyCoveredFragmentShaderInputVariable = BOOL_TO_VK_BOOL32(fully_covered_fragment_shader_input_variable); + result.maxExtraPrimitiveOverestimationSize = max_extra_primitive_overestimation_size; + result.pNext = nullptr; + result.primitiveOverestimationSize = primitive_overestimation_size; + result.primitiveUnderestimation = BOOL_TO_VK_BOOL32(primitive_underestimation); + result.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT); + + return result; +} + +Anvil::EXTDepthClipEnableFeatures::EXTDepthClipEnableFeatures() + :depth_clip_enable(false) +{ + /* Stub */ +} + +Anvil::EXTDepthClipEnableFeatures::EXTDepthClipEnableFeatures(const VkPhysicalDeviceDepthClipEnableFeaturesEXT& in_features) + :depth_clip_enable(VK_BOOL32_TO_BOOL(in_features.depthClipEnable) ) +{ + /* Stub */ +} + +VkPhysicalDeviceDepthClipEnableFeaturesEXT Anvil::EXTDepthClipEnableFeatures::get_vk_physical_device_depth_clip_enable_features() const +{ + VkPhysicalDeviceDepthClipEnableFeaturesEXT result; + + result.depthClipEnable = BOOL_TO_VK_BOOL32(depth_clip_enable); + result.pNext = nullptr; + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT; + + return result; +} + +bool Anvil::EXTDepthClipEnableFeatures::operator==(const EXTDepthClipEnableFeatures& in_features) const +{ + return (depth_clip_enable == in_features.depth_clip_enable); +} + +Anvil::EXTDescriptorIndexingFeatures::EXTDescriptorIndexingFeatures() + :descriptor_binding_partially_bound (false), + descriptor_binding_sampled_image_update_after_bind (false), + descriptor_binding_storage_buffer_update_after_bind (false), + descriptor_binding_storage_image_update_after_bind (false), + descriptor_binding_storage_texel_buffer_update_after_bind(false), + descriptor_binding_uniform_buffer_update_after_bind (false), + descriptor_binding_uniform_texel_buffer_update_after_bind(false), + descriptor_binding_update_unused_while_pending (false), + descriptor_binding_variable_descriptor_count (false), + runtime_descriptor_array (false), + shader_input_attachment_array_dynamic_indexing (false), + shader_input_attachment_array_non_uniform_indexing (false), + shader_sampled_image_array_non_uniform_indexing (false), + shader_storage_buffer_array_non_uniform_indexing (false), + shader_storage_image_array_non_uniform_indexing (false), + shader_storage_texel_buffer_array_dynamic_indexing (false), + shader_storage_texel_buffer_array_non_uniform_indexing (false), + shader_uniform_buffer_array_non_uniform_indexing (false), + shader_uniform_texel_buffer_array_dynamic_indexing (false), + shader_uniform_texel_buffer_array_non_uniform_indexing (false) +{ + /* Stub */ +} + +Anvil::EXTDescriptorIndexingFeatures::EXTDescriptorIndexingFeatures(const VkPhysicalDeviceDescriptorIndexingFeaturesEXT& in_features) + :descriptor_binding_partially_bound (VK_BOOL32_TO_BOOL(in_features.descriptorBindingPartiallyBound) ), + descriptor_binding_sampled_image_update_after_bind (VK_BOOL32_TO_BOOL(in_features.descriptorBindingSampledImageUpdateAfterBind) ), + descriptor_binding_storage_buffer_update_after_bind (VK_BOOL32_TO_BOOL(in_features.descriptorBindingStorageBufferUpdateAfterBind) ), + descriptor_binding_storage_image_update_after_bind (VK_BOOL32_TO_BOOL(in_features.descriptorBindingStorageImageUpdateAfterBind) ), + descriptor_binding_storage_texel_buffer_update_after_bind(VK_BOOL32_TO_BOOL(in_features.descriptorBindingStorageTexelBufferUpdateAfterBind) ), + descriptor_binding_uniform_buffer_update_after_bind (VK_BOOL32_TO_BOOL(in_features.descriptorBindingUniformBufferUpdateAfterBind) ), + descriptor_binding_uniform_texel_buffer_update_after_bind(VK_BOOL32_TO_BOOL(in_features.descriptorBindingUniformTexelBufferUpdateAfterBind) ), + descriptor_binding_update_unused_while_pending (VK_BOOL32_TO_BOOL(in_features.descriptorBindingUpdateUnusedWhilePending) ), + descriptor_binding_variable_descriptor_count (VK_BOOL32_TO_BOOL(in_features.descriptorBindingVariableDescriptorCount) ), + runtime_descriptor_array (VK_BOOL32_TO_BOOL(in_features.runtimeDescriptorArray) ), + shader_input_attachment_array_dynamic_indexing (VK_BOOL32_TO_BOOL(in_features.shaderInputAttachmentArrayDynamicIndexing) ), + shader_input_attachment_array_non_uniform_indexing (VK_BOOL32_TO_BOOL(in_features.shaderInputAttachmentArrayNonUniformIndexing) ), + shader_sampled_image_array_non_uniform_indexing (VK_BOOL32_TO_BOOL(in_features.shaderSampledImageArrayNonUniformIndexing) ), + shader_storage_buffer_array_non_uniform_indexing (VK_BOOL32_TO_BOOL(in_features.shaderStorageBufferArrayNonUniformIndexing) ), + shader_storage_image_array_non_uniform_indexing (VK_BOOL32_TO_BOOL(in_features.shaderStorageImageArrayNonUniformIndexing) ), + shader_storage_texel_buffer_array_dynamic_indexing (VK_BOOL32_TO_BOOL(in_features.shaderStorageTexelBufferArrayDynamicIndexing) ), + shader_storage_texel_buffer_array_non_uniform_indexing (VK_BOOL32_TO_BOOL(in_features.shaderStorageTexelBufferArrayNonUniformIndexing) ), + shader_uniform_buffer_array_non_uniform_indexing (VK_BOOL32_TO_BOOL(in_features.shaderUniformBufferArrayNonUniformIndexing) ), + shader_uniform_texel_buffer_array_dynamic_indexing (VK_BOOL32_TO_BOOL(in_features.shaderUniformTexelBufferArrayDynamicIndexing) ), + shader_uniform_texel_buffer_array_non_uniform_indexing (VK_BOOL32_TO_BOOL(in_features.shaderUniformTexelBufferArrayNonUniformIndexing) ) +{ + /* Stub */ +} + +bool Anvil::EXTDescriptorIndexingFeatures::operator==(const EXTDescriptorIndexingFeatures& in_features) const +{ + return (descriptor_binding_partially_bound == in_features.descriptor_binding_partially_bound && + descriptor_binding_sampled_image_update_after_bind == in_features.descriptor_binding_sampled_image_update_after_bind && + descriptor_binding_storage_buffer_update_after_bind == in_features.descriptor_binding_storage_buffer_update_after_bind && + descriptor_binding_storage_image_update_after_bind == in_features.descriptor_binding_storage_image_update_after_bind && + descriptor_binding_storage_texel_buffer_update_after_bind == in_features.descriptor_binding_storage_texel_buffer_update_after_bind && + descriptor_binding_uniform_buffer_update_after_bind == in_features.descriptor_binding_uniform_buffer_update_after_bind && + descriptor_binding_uniform_texel_buffer_update_after_bind == in_features.descriptor_binding_uniform_texel_buffer_update_after_bind && + descriptor_binding_update_unused_while_pending == in_features.descriptor_binding_update_unused_while_pending && + descriptor_binding_variable_descriptor_count == in_features.descriptor_binding_variable_descriptor_count && + runtime_descriptor_array == in_features.runtime_descriptor_array && + shader_input_attachment_array_dynamic_indexing == in_features.shader_input_attachment_array_dynamic_indexing && + shader_input_attachment_array_non_uniform_indexing == in_features.shader_input_attachment_array_non_uniform_indexing && + shader_sampled_image_array_non_uniform_indexing == in_features.shader_sampled_image_array_non_uniform_indexing && + shader_storage_buffer_array_non_uniform_indexing == in_features.shader_storage_buffer_array_non_uniform_indexing && + shader_storage_image_array_non_uniform_indexing == in_features.shader_storage_image_array_non_uniform_indexing && + shader_storage_texel_buffer_array_dynamic_indexing == in_features.shader_storage_texel_buffer_array_dynamic_indexing && + shader_storage_texel_buffer_array_non_uniform_indexing == in_features.shader_storage_texel_buffer_array_non_uniform_indexing && + shader_uniform_buffer_array_non_uniform_indexing == in_features.shader_uniform_buffer_array_non_uniform_indexing && + shader_uniform_texel_buffer_array_dynamic_indexing == in_features.shader_uniform_texel_buffer_array_dynamic_indexing && + shader_uniform_texel_buffer_array_non_uniform_indexing == in_features.shader_uniform_texel_buffer_array_non_uniform_indexing); +} + +VkPhysicalDeviceDescriptorIndexingFeaturesEXT Anvil::EXTDescriptorIndexingFeatures::get_vk_physical_device_descriptor_indexing_features() const +{ + VkPhysicalDeviceDescriptorIndexingFeaturesEXT result; + + result.descriptorBindingPartiallyBound = BOOL_TO_VK_BOOL32(descriptor_binding_partially_bound); + result.descriptorBindingSampledImageUpdateAfterBind = BOOL_TO_VK_BOOL32(descriptor_binding_sampled_image_update_after_bind); + result.descriptorBindingStorageBufferUpdateAfterBind = BOOL_TO_VK_BOOL32(descriptor_binding_storage_buffer_update_after_bind); + result.descriptorBindingStorageImageUpdateAfterBind = BOOL_TO_VK_BOOL32(descriptor_binding_storage_image_update_after_bind); + result.descriptorBindingStorageTexelBufferUpdateAfterBind = BOOL_TO_VK_BOOL32(descriptor_binding_storage_texel_buffer_update_after_bind); + result.descriptorBindingUniformBufferUpdateAfterBind = BOOL_TO_VK_BOOL32(descriptor_binding_uniform_buffer_update_after_bind); + result.descriptorBindingUniformTexelBufferUpdateAfterBind = BOOL_TO_VK_BOOL32(descriptor_binding_uniform_texel_buffer_update_after_bind); + result.descriptorBindingUpdateUnusedWhilePending = BOOL_TO_VK_BOOL32(descriptor_binding_update_unused_while_pending); + result.descriptorBindingVariableDescriptorCount = BOOL_TO_VK_BOOL32(descriptor_binding_variable_descriptor_count); + result.pNext = nullptr; + result.runtimeDescriptorArray = BOOL_TO_VK_BOOL32(runtime_descriptor_array); + result.shaderInputAttachmentArrayDynamicIndexing = BOOL_TO_VK_BOOL32(shader_input_attachment_array_dynamic_indexing); + result.shaderInputAttachmentArrayNonUniformIndexing = BOOL_TO_VK_BOOL32(shader_input_attachment_array_non_uniform_indexing); + result.shaderSampledImageArrayNonUniformIndexing = BOOL_TO_VK_BOOL32(shader_sampled_image_array_non_uniform_indexing); + result.shaderStorageBufferArrayNonUniformIndexing = BOOL_TO_VK_BOOL32(shader_storage_buffer_array_non_uniform_indexing); + result.shaderStorageImageArrayNonUniformIndexing = BOOL_TO_VK_BOOL32(shader_storage_image_array_non_uniform_indexing); + result.shaderStorageTexelBufferArrayDynamicIndexing = BOOL_TO_VK_BOOL32(shader_storage_texel_buffer_array_dynamic_indexing); + result.shaderStorageTexelBufferArrayNonUniformIndexing = BOOL_TO_VK_BOOL32(shader_storage_texel_buffer_array_non_uniform_indexing); + result.shaderUniformBufferArrayNonUniformIndexing = BOOL_TO_VK_BOOL32(shader_uniform_buffer_array_non_uniform_indexing); + result.shaderUniformTexelBufferArrayDynamicIndexing = BOOL_TO_VK_BOOL32(shader_uniform_texel_buffer_array_dynamic_indexing); + result.shaderUniformTexelBufferArrayNonUniformIndexing = BOOL_TO_VK_BOOL32(shader_uniform_texel_buffer_array_non_uniform_indexing); + result.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT); + + return result; +} + +Anvil::EXTDescriptorIndexingProperties::EXTDescriptorIndexingProperties() + :max_descriptor_set_update_after_bind_input_attachments (UINT32_MAX), + max_descriptor_set_update_after_bind_sampled_images (UINT32_MAX), + max_descriptor_set_update_after_bind_samplers (UINT32_MAX), + max_descriptor_set_update_after_bind_storage_buffers (UINT32_MAX), + max_descriptor_set_update_after_bind_storage_buffers_dynamic(UINT32_MAX), + max_descriptor_set_update_after_bind_storage_images (UINT32_MAX), + max_descriptor_set_update_after_bind_uniform_buffers (UINT32_MAX), + max_descriptor_set_update_after_bind_uniform_buffers_dynamic(UINT32_MAX), + max_per_stage_descriptor_update_after_bind_input_attachments(UINT32_MAX), + max_per_stage_descriptor_update_after_bind_sampled_images (UINT32_MAX), + max_per_stage_descriptor_update_after_bind_samplers (UINT32_MAX), + max_per_stage_descriptor_update_after_bind_storage_buffers (UINT32_MAX), + max_per_stage_descriptor_update_after_bind_storage_images (UINT32_MAX), + max_per_stage_descriptor_update_after_bind_uniform_buffers (UINT32_MAX), + max_per_stage_update_after_bind_resources (UINT32_MAX), + max_update_after_bind_descriptors_in_all_pools (UINT32_MAX), + shader_input_attachment_array_non_uniform_indexing_native (false), + shader_sampled_image_array_non_uniform_indexing_native (false), + shader_storage_buffer_array_non_uniform_indexing_native (false), + shader_storage_image_array_non_uniform_indexing_native (false), + shader_uniform_buffer_array_non_uniform_indexing_native (false) +{ + /* Stub */ +} + +Anvil::EXTDescriptorIndexingProperties::EXTDescriptorIndexingProperties(const VkPhysicalDeviceDescriptorIndexingPropertiesEXT& in_props) + :max_descriptor_set_update_after_bind_input_attachments (in_props.maxDescriptorSetUpdateAfterBindInputAttachments), + max_descriptor_set_update_after_bind_sampled_images (in_props.maxDescriptorSetUpdateAfterBindSampledImages), + max_descriptor_set_update_after_bind_samplers (in_props.maxDescriptorSetUpdateAfterBindSamplers), + max_descriptor_set_update_after_bind_storage_buffers (in_props.maxDescriptorSetUpdateAfterBindStorageBuffers), + max_descriptor_set_update_after_bind_storage_buffers_dynamic(in_props.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic), + max_descriptor_set_update_after_bind_storage_images (in_props.maxDescriptorSetUpdateAfterBindStorageImages), + max_descriptor_set_update_after_bind_uniform_buffers (in_props.maxDescriptorSetUpdateAfterBindUniformBuffers), + max_descriptor_set_update_after_bind_uniform_buffers_dynamic(in_props.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic), + max_per_stage_descriptor_update_after_bind_input_attachments(in_props.maxPerStageDescriptorUpdateAfterBindInputAttachments), + max_per_stage_descriptor_update_after_bind_sampled_images (in_props.maxPerStageDescriptorUpdateAfterBindSampledImages), + max_per_stage_descriptor_update_after_bind_samplers (in_props.maxPerStageDescriptorUpdateAfterBindSamplers), + max_per_stage_descriptor_update_after_bind_storage_buffers (in_props.maxPerStageDescriptorUpdateAfterBindStorageBuffers), + max_per_stage_descriptor_update_after_bind_storage_images (in_props.maxPerStageDescriptorUpdateAfterBindStorageImages), + max_per_stage_descriptor_update_after_bind_uniform_buffers (in_props.maxPerStageDescriptorUpdateAfterBindUniformBuffers), + max_per_stage_update_after_bind_resources (in_props.maxPerStageUpdateAfterBindResources), + max_update_after_bind_descriptors_in_all_pools (in_props.maxUpdateAfterBindDescriptorsInAllPools), + shader_input_attachment_array_non_uniform_indexing_native (VK_BOOL32_TO_BOOL(in_props.shaderInputAttachmentArrayNonUniformIndexingNative) ), + shader_sampled_image_array_non_uniform_indexing_native (VK_BOOL32_TO_BOOL(in_props.shaderSampledImageArrayNonUniformIndexingNative) ), + shader_storage_buffer_array_non_uniform_indexing_native (VK_BOOL32_TO_BOOL(in_props.shaderStorageBufferArrayNonUniformIndexingNative) ), + shader_storage_image_array_non_uniform_indexing_native (VK_BOOL32_TO_BOOL(in_props.shaderStorageImageArrayNonUniformIndexingNative) ), + shader_uniform_buffer_array_non_uniform_indexing_native (VK_BOOL32_TO_BOOL(in_props.shaderUniformBufferArrayNonUniformIndexingNative) ) +{ + /* Stub */ +} + +bool Anvil::EXTDescriptorIndexingProperties::operator==(const EXTDescriptorIndexingProperties& in_props) const +{ + return (max_descriptor_set_update_after_bind_input_attachments == in_props.max_descriptor_set_update_after_bind_input_attachments && + max_descriptor_set_update_after_bind_sampled_images == in_props.max_descriptor_set_update_after_bind_sampled_images && + max_descriptor_set_update_after_bind_samplers == in_props.max_descriptor_set_update_after_bind_samplers && + max_descriptor_set_update_after_bind_storage_buffers == in_props.max_descriptor_set_update_after_bind_storage_buffers && + max_descriptor_set_update_after_bind_storage_buffers_dynamic == in_props.max_descriptor_set_update_after_bind_storage_buffers_dynamic && + max_descriptor_set_update_after_bind_storage_images == in_props.max_descriptor_set_update_after_bind_storage_images && + max_descriptor_set_update_after_bind_uniform_buffers == in_props.max_descriptor_set_update_after_bind_uniform_buffers && + max_descriptor_set_update_after_bind_uniform_buffers_dynamic == in_props.max_descriptor_set_update_after_bind_uniform_buffers_dynamic && + max_per_stage_descriptor_update_after_bind_input_attachments == in_props.max_per_stage_descriptor_update_after_bind_input_attachments && + max_per_stage_descriptor_update_after_bind_sampled_images == in_props.max_per_stage_descriptor_update_after_bind_sampled_images && + max_per_stage_descriptor_update_after_bind_samplers == in_props.max_per_stage_descriptor_update_after_bind_samplers && + max_per_stage_descriptor_update_after_bind_storage_buffers == in_props.max_per_stage_descriptor_update_after_bind_storage_buffers && + max_per_stage_descriptor_update_after_bind_storage_images == in_props.max_per_stage_descriptor_update_after_bind_storage_images && + max_per_stage_descriptor_update_after_bind_uniform_buffers == in_props.max_per_stage_descriptor_update_after_bind_uniform_buffers && + max_per_stage_update_after_bind_resources == in_props.max_per_stage_update_after_bind_resources && + max_update_after_bind_descriptors_in_all_pools == in_props.max_update_after_bind_descriptors_in_all_pools && + shader_input_attachment_array_non_uniform_indexing_native == in_props.shader_input_attachment_array_non_uniform_indexing_native && + shader_sampled_image_array_non_uniform_indexing_native == in_props.shader_sampled_image_array_non_uniform_indexing_native && + shader_storage_buffer_array_non_uniform_indexing_native == in_props.shader_storage_buffer_array_non_uniform_indexing_native && + shader_storage_image_array_non_uniform_indexing_native == in_props.shader_storage_image_array_non_uniform_indexing_native && + shader_uniform_buffer_array_non_uniform_indexing_native == in_props.shader_uniform_buffer_array_non_uniform_indexing_native); +} + +Anvil::EXTExternalMemoryHostProperties::EXTExternalMemoryHostProperties() +{ + min_imported_host_pointer_alignment = 0; +} + +Anvil::EXTExternalMemoryHostProperties::EXTExternalMemoryHostProperties(const VkPhysicalDeviceExternalMemoryHostPropertiesEXT& in_props) +{ + min_imported_host_pointer_alignment = in_props.minImportedHostPointerAlignment; +} + +bool Anvil::EXTExternalMemoryHostProperties::operator==(const Anvil::EXTExternalMemoryHostProperties& in_props) const +{ + return (min_imported_host_pointer_alignment == in_props.min_imported_host_pointer_alignment); +} + +Anvil::EXTInlineUniformBlockFeatures::EXTInlineUniformBlockFeatures() + :descriptor_binding_inline_uniform_block_update_after_bind(false), + inline_uniform_block (false) +{ + /* Stub */ +} + +Anvil::EXTInlineUniformBlockFeatures::EXTInlineUniformBlockFeatures(const VkPhysicalDeviceInlineUniformBlockFeaturesEXT& in_features) + :descriptor_binding_inline_uniform_block_update_after_bind(VK_BOOL32_TO_BOOL(in_features.descriptorBindingInlineUniformBlockUpdateAfterBind) ), + inline_uniform_block (VK_BOOL32_TO_BOOL(in_features.inlineUniformBlock) ) +{ + /* Stub */ +} + +VkPhysicalDeviceInlineUniformBlockFeaturesEXT Anvil::EXTInlineUniformBlockFeatures::get_vk_physical_device_inline_uniform_block_features() const +{ + VkPhysicalDeviceInlineUniformBlockFeaturesEXT result; + + result.descriptorBindingInlineUniformBlockUpdateAfterBind = BOOL_TO_VK_BOOL32(descriptor_binding_inline_uniform_block_update_after_bind); + result.inlineUniformBlock = BOOL_TO_VK_BOOL32(inline_uniform_block); + result.pNext = nullptr; + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT; + + return result; +} + +bool Anvil::EXTInlineUniformBlockFeatures::operator==(const EXTInlineUniformBlockFeatures& in_features) const +{ + return (in_features.descriptor_binding_inline_uniform_block_update_after_bind == descriptor_binding_inline_uniform_block_update_after_bind) && + (in_features.inline_uniform_block == inline_uniform_block); +} + +Anvil::EXTInlineUniformBlockProperties::EXTInlineUniformBlockProperties() + :max_descriptor_set_inline_uniform_blocks (0), + max_descriptor_set_update_after_bind_inline_uniform_blocks (0), + max_inline_uniform_block_size (0), + max_per_stage_descriptor_inline_uniform_blocks (0), + max_per_stage_descriptor_update_after_bind_inline_uniform_blocks(0) +{ + /* Stub */ +} + +Anvil::EXTInlineUniformBlockProperties::EXTInlineUniformBlockProperties(const VkPhysicalDeviceInlineUniformBlockPropertiesEXT& in_props) + :max_descriptor_set_inline_uniform_blocks (in_props.maxDescriptorSetInlineUniformBlocks), + max_descriptor_set_update_after_bind_inline_uniform_blocks (in_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks), + max_inline_uniform_block_size (in_props.maxInlineUniformBlockSize), + max_per_stage_descriptor_inline_uniform_blocks (in_props.maxPerStageDescriptorInlineUniformBlocks), + max_per_stage_descriptor_update_after_bind_inline_uniform_blocks(in_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks) +{ + /* Stub */ +} + +bool Anvil::EXTInlineUniformBlockProperties::operator==(const EXTInlineUniformBlockProperties& in_props) const +{ + return (max_descriptor_set_inline_uniform_blocks == in_props.max_descriptor_set_inline_uniform_blocks) && + (max_descriptor_set_update_after_bind_inline_uniform_blocks == in_props.max_descriptor_set_update_after_bind_inline_uniform_blocks) && + (max_inline_uniform_block_size == in_props.max_inline_uniform_block_size) && + (max_per_stage_descriptor_inline_uniform_blocks == in_props.max_per_stage_descriptor_inline_uniform_blocks) && + (max_per_stage_descriptor_update_after_bind_inline_uniform_blocks == in_props.max_per_stage_descriptor_update_after_bind_inline_uniform_blocks); +} + +Anvil::EXTPCIBusInfoProperties::EXTPCIBusInfoProperties() + :pci_bus (0), + pci_device (0), + pci_domain (0), + pci_function(0) +{ + /* Stub */ +} + +Anvil::EXTPCIBusInfoProperties::EXTPCIBusInfoProperties(const VkPhysicalDevicePCIBusInfoPropertiesEXT& in_props) + :pci_bus (in_props.pciBus), + pci_device (in_props.pciDevice), + pci_domain (in_props.pciDomain), + pci_function(in_props.pciFunction) +{ + /* Stub */ +} + +bool Anvil::EXTPCIBusInfoProperties::operator==(const Anvil::EXTPCIBusInfoProperties& in_props) const +{ + return (in_props.pci_bus == pci_bus && + in_props.pci_device == pci_device && + in_props.pci_domain == pci_domain && + in_props.pci_function == pci_function); +} + +Anvil::EXTSampleLocationsProperties::EXTSampleLocationsProperties() +{ + max_sample_location_grid_size.height = 0; + max_sample_location_grid_size.width = 0; + sample_location_coordinate_range[0] = 0.0f; + sample_location_coordinate_range[1] = 0.0f; + sample_location_sample_counts = Anvil::SampleCountFlagBits::NONE; + sample_location_sub_pixel_bits = 0; + variable_sample_locations = false; +} + +Anvil::EXTSampleLocationsProperties::EXTSampleLocationsProperties(const VkPhysicalDeviceSampleLocationsPropertiesEXT& in_props) +{ + max_sample_location_grid_size.height = in_props.maxSampleLocationGridSize.height; + max_sample_location_grid_size.width = in_props.maxSampleLocationGridSize.width; + sample_location_coordinate_range[0] = in_props.sampleLocationCoordinateRange[0]; + sample_location_coordinate_range[1] = in_props.sampleLocationCoordinateRange[1]; + sample_location_sample_counts = static_cast(in_props.sampleLocationSampleCounts); + sample_location_sub_pixel_bits = in_props.sampleLocationSubPixelBits; + variable_sample_locations = (in_props.variableSampleLocations == VK_TRUE); +} + +bool Anvil::EXTSampleLocationsProperties::operator==(const Anvil::EXTSampleLocationsProperties& in_props) const +{ + return (max_sample_location_grid_size.height == in_props.max_sample_location_grid_size.height && + max_sample_location_grid_size.width == in_props.max_sample_location_grid_size.width && + sample_location_coordinate_range[0] == in_props.sample_location_coordinate_range[0] && + sample_location_coordinate_range[1] == in_props.sample_location_coordinate_range[1] && + sample_location_sample_counts == in_props.sample_location_sample_counts && + sample_location_sub_pixel_bits == in_props.sample_location_sub_pixel_bits && + variable_sample_locations == in_props.variable_sample_locations); +} + +Anvil::EXTSamplerFilterMinmaxProperties::EXTSamplerFilterMinmaxProperties() +{ + filter_minmax_image_component_mapping = false; + filter_minmax_single_component_formats = false; +} + +Anvil::EXTSamplerFilterMinmaxProperties::EXTSamplerFilterMinmaxProperties(const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& in_props) +{ + filter_minmax_image_component_mapping = (in_props.filterMinmaxImageComponentMapping == VK_TRUE); + filter_minmax_single_component_formats = (in_props.filterMinmaxSingleComponentFormats == VK_TRUE); +} + +bool Anvil::EXTSamplerFilterMinmaxProperties::operator==(const Anvil::EXTSamplerFilterMinmaxProperties& in_props) const +{ + return (filter_minmax_image_component_mapping == in_props.filter_minmax_image_component_mapping && + filter_minmax_single_component_formats == in_props.filter_minmax_single_component_formats); +} + +Anvil::EXTScalarBlockLayoutFeatures::EXTScalarBlockLayoutFeatures() + :scalar_block_layout(false) +{ + /* Stub */ +} + +Anvil::EXTScalarBlockLayoutFeatures::EXTScalarBlockLayoutFeatures(const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& in_features) +{ + scalar_block_layout = VK_BOOL32_TO_BOOL(in_features.scalarBlockLayout); +} + +VkPhysicalDeviceScalarBlockLayoutFeaturesEXT Anvil::EXTScalarBlockLayoutFeatures::get_vk_physical_device_scalar_block_layout_features_ext() const +{ + VkPhysicalDeviceScalarBlockLayoutFeaturesEXT result; + + result.pNext = nullptr; + result.scalarBlockLayout = BOOL_TO_VK_BOOL32(scalar_block_layout); + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT; + + return result; +} + +bool Anvil::EXTScalarBlockLayoutFeatures::operator==(const EXTScalarBlockLayoutFeatures& in_features) const +{ + return (in_features.scalar_block_layout == scalar_block_layout); +} + +Anvil::EXTTransformFeedbackFeatures::EXTTransformFeedbackFeatures() + :geometry_streams (false), + transform_feedback(false) +{ + /* Stub */ +} + +Anvil::EXTTransformFeedbackFeatures::EXTTransformFeedbackFeatures(const VkPhysicalDeviceTransformFeedbackFeaturesEXT& in_features) + :geometry_streams (in_features.geometryStreams == VK_TRUE), + transform_feedback(in_features.transformFeedback == VK_TRUE) +{ + /* Stub */ +} + +VkPhysicalDeviceTransformFeedbackFeaturesEXT Anvil::EXTTransformFeedbackFeatures::get_vk_physical_device_transform_feedback_features() const +{ + VkPhysicalDeviceTransformFeedbackFeaturesEXT result; + + result.geometryStreams = (geometry_streams) ? VK_TRUE : VK_FALSE; + result.pNext = nullptr; + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT; + result.transformFeedback = (transform_feedback) ? VK_TRUE : VK_FALSE; + + return result; +} + +bool Anvil::EXTTransformFeedbackFeatures::operator==(const EXTTransformFeedbackFeatures& in_features) const +{ + return (in_features.geometry_streams == geometry_streams && + in_features.transform_feedback == transform_feedback); +} + +Anvil::EXTTransformFeedbackProperties::EXTTransformFeedbackProperties() + :max_transform_feedback_buffer_data_size (0), + max_transform_feedback_buffer_data_stride (0), + max_transform_feedback_buffer_size (0), + max_transform_feedback_stream_data_size (0), + n_max_transform_feedback_buffers (0), + n_max_transform_feedback_streams (0), + supports_transform_feedback_draw (false), + supports_transform_feedback_queries (false), + supports_transform_feedback_rasterization_stream_select(false), + supports_transform_feedback_streams_lines_triangles (false) +{ + /* Stub */ +} + +Anvil::EXTTransformFeedbackProperties::EXTTransformFeedbackProperties(const VkPhysicalDeviceTransformFeedbackPropertiesEXT& in_props) + :max_transform_feedback_buffer_data_size (in_props.maxTransformFeedbackBufferDataSize), + max_transform_feedback_buffer_data_stride (in_props.maxTransformFeedbackBufferDataStride), + max_transform_feedback_buffer_size (in_props.maxTransformFeedbackBufferSize), + max_transform_feedback_stream_data_size (in_props.maxTransformFeedbackStreamDataSize), + n_max_transform_feedback_buffers (in_props.maxTransformFeedbackBuffers), + n_max_transform_feedback_streams (in_props.maxTransformFeedbackStreams), + supports_transform_feedback_draw (in_props.transformFeedbackDraw == VK_TRUE), + supports_transform_feedback_queries (in_props.transformFeedbackQueries == VK_TRUE), + supports_transform_feedback_rasterization_stream_select(in_props.transformFeedbackRasterizationStreamSelect == VK_TRUE), + supports_transform_feedback_streams_lines_triangles (in_props.transformFeedbackStreamsLinesTriangles == VK_TRUE) +{ + /* Stub */ +} + +bool Anvil::EXTTransformFeedbackProperties::operator==(const Anvil::EXTTransformFeedbackProperties& in_props) const +{ + return (max_transform_feedback_buffer_data_size == in_props.max_transform_feedback_buffer_data_size && + max_transform_feedback_buffer_data_stride == in_props.max_transform_feedback_buffer_data_stride && + max_transform_feedback_buffer_size == in_props.max_transform_feedback_buffer_size && + max_transform_feedback_stream_data_size == in_props.max_transform_feedback_stream_data_size && + n_max_transform_feedback_buffers == in_props.n_max_transform_feedback_buffers && + n_max_transform_feedback_streams == in_props.n_max_transform_feedback_streams && + supports_transform_feedback_draw == in_props.supports_transform_feedback_draw && + supports_transform_feedback_queries == in_props.supports_transform_feedback_queries && + supports_transform_feedback_rasterization_stream_select == in_props.supports_transform_feedback_rasterization_stream_select && + supports_transform_feedback_streams_lines_triangles == in_props.supports_transform_feedback_streams_lines_triangles); +} + +Anvil::EXTVertexAttributeDivisorProperties::EXTVertexAttributeDivisorProperties() + :max_vertex_attribute_divisor(0) +{ + /* Stub */ +} + +Anvil::EXTVertexAttributeDivisorProperties::EXTVertexAttributeDivisorProperties(const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& in_props) + :max_vertex_attribute_divisor(in_props.maxVertexAttribDivisor) +{ + /* Stub */ +} + +bool Anvil::EXTVertexAttributeDivisorProperties::operator==(const Anvil::EXTVertexAttributeDivisorProperties& in_props) const +{ + return (max_vertex_attribute_divisor == in_props.max_vertex_attribute_divisor); +} + +Anvil::ExternalFenceProperties::ExternalFenceProperties() +{ + is_exportable = false; + is_importable = false; +} + +Anvil::ExternalFenceProperties::ExternalFenceProperties(const VkExternalFencePropertiesKHR& in_external_fence_props) +{ + compatible_external_handle_types = static_cast(in_external_fence_props.compatibleHandleTypes); + export_from_imported_external_handle_types = static_cast(in_external_fence_props.exportFromImportedHandleTypes); + is_exportable = (in_external_fence_props.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR) != 0; + is_importable = (in_external_fence_props.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR) != 0; +} + +Anvil::ExternalMemoryProperties::ExternalMemoryProperties() +{ + is_exportable = false; + is_importable = false; +} + +Anvil::ExternalMemoryProperties::ExternalMemoryProperties(const VkExternalMemoryPropertiesKHR& in_external_memory_props) +{ + compatible_external_handle_types = static_cast(in_external_memory_props.compatibleHandleTypes); + export_from_imported_external_handle_types = static_cast(in_external_memory_props.exportFromImportedHandleTypes); + is_exportable = (in_external_memory_props.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR) != 0; + is_importable = (in_external_memory_props.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR) != 0; +} + +Anvil::ExternalSemaphoreProperties::ExternalSemaphoreProperties() +{ + is_exportable = false; + is_importable = false; +} + +Anvil::ExternalSemaphoreProperties::ExternalSemaphoreProperties(const VkExternalSemaphorePropertiesKHR& in_external_semaphore_props) +{ + compatible_external_handle_types = static_cast(in_external_semaphore_props.compatibleHandleTypes); + export_from_imported_external_handle_types = static_cast(in_external_semaphore_props.exportFromImportedHandleTypes); + is_exportable = (in_external_semaphore_props.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR) != 0; + is_importable = (in_external_semaphore_props.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR) != 0; +} + +Anvil::FenceProperties::FenceProperties() +{ + /* Stub */ +} + +Anvil::FenceProperties::FenceProperties(const ExternalFenceProperties& in_external_fence_properties) + :external_fence_properties(in_external_fence_properties) +{ + /* Stub */ +} + +Anvil::FormatProperties::FormatProperties() +{ + memset(this, + 0, + sizeof(*this) ); +} + +Anvil::FormatProperties::FormatProperties(const VkFormatProperties& in_format_props) +{ + buffer_capabilities = static_cast(in_format_props.bufferFeatures); + linear_tiling_capabilities = static_cast(in_format_props.linearTilingFeatures); + optimal_tiling_capabilities = static_cast(in_format_props.optimalTilingFeatures); +} + +Anvil::ImageFormatProperties::ImageFormatProperties() + :max_resource_size (0), + n_combined_image_sampler_descriptors_used(0), + n_max_array_layers (0), + n_max_mip_levels (0), + sample_counts (Anvil::SampleCountFlagBits::NONE), + supports_amd_texture_gather_bias_lod (false), + valid_stencil_aspect_image_usage_flags (Anvil::ImageUsageFlagBits::NONE) +{ + max_extent.depth = 0; + max_extent.height = 0; + max_extent.width = 0; +} + +Anvil::ImageFormatProperties::ImageFormatProperties(const VkImageFormatProperties& in_image_format_props, + const bool& in_supports_amd_texture_gather_bias_lod, + const ExternalMemoryProperties& in_external_handle_properties, + const Anvil::ImageUsageFlags& in_valid_stencil_aspect_image_usage_flags, + const uint32_t& in_n_combined_image_sampler_descriptors_used) +{ + external_handle_properties = in_external_handle_properties; + max_extent = in_image_format_props.maxExtent; + max_resource_size = in_image_format_props.maxResourceSize; + n_combined_image_sampler_descriptors_used = in_n_combined_image_sampler_descriptors_used; + n_max_array_layers = in_image_format_props.maxArrayLayers; + n_max_mip_levels = in_image_format_props.maxMipLevels; + sample_counts = static_cast(in_image_format_props.sampleCounts); + supports_amd_texture_gather_bias_lod = in_supports_amd_texture_gather_bias_lod; + valid_stencil_aspect_image_usage_flags = in_valid_stencil_aspect_image_usage_flags; +} + +/** Please see header for specification */ +Anvil::ImageBarrier::ImageBarrier(const ImageBarrier& in) +{ + dst_access_mask = in.dst_access_mask; + dst_queue_family_index = in.dst_queue_family_index; + image = in.image; + image_barrier_vk = in.image_barrier_vk; + image_ptr = in.image_ptr; + new_layout = in.new_layout; + old_layout = in.old_layout; + src_access_mask = in.src_access_mask; + src_queue_family_index = in.src_queue_family_index; + subresource_range = in.subresource_range; +} + +/** Please see header for specification */ +Anvil::ImageBarrier::ImageBarrier(Anvil::AccessFlags in_source_access_mask, + Anvil::AccessFlags in_destination_access_mask, + Anvil::ImageLayout in_old_layout, + Anvil::ImageLayout in_new_layout, + uint32_t in_src_queue_family_index, + uint32_t in_dst_queue_family_index, + Anvil::Image* in_image_ptr, + Anvil::ImageSubresourceRange in_image_subresource_range) +{ + dst_access_mask = in_destination_access_mask; + dst_queue_family_index = in_dst_queue_family_index; + image = (in_image_ptr != VK_NULL_HANDLE) ? in_image_ptr->get_image() + : VK_NULL_HANDLE; + image_ptr = in_image_ptr; + new_layout = in_new_layout; + old_layout = in_old_layout; + src_access_mask = in_source_access_mask; + src_queue_family_index = in_src_queue_family_index; + subresource_range = in_image_subresource_range; + + /* NOTE: Barriers referring to DS images must always specify both aspects. */ + { + const auto image_format = in_image_ptr->get_create_info_ptr()->get_format(); + + if (Anvil::Formats::has_depth_aspect (image_format) && + Anvil::Formats::has_stencil_aspect(image_format) ) + { + if (subresource_range.aspect_mask != (Anvil::ImageAspectFlagBits::DEPTH_BIT | Anvil::ImageAspectFlagBits::STENCIL_BIT) ) + { + subresource_range.aspect_mask = (Anvil::ImageAspectFlagBits::DEPTH_BIT | Anvil::ImageAspectFlagBits::STENCIL_BIT); + } + } + } + + image_barrier_vk.dstAccessMask = in_destination_access_mask.get_vk(); + image_barrier_vk.dstQueueFamilyIndex = in_dst_queue_family_index; + image_barrier_vk.image = (in_image_ptr != nullptr) ? in_image_ptr->get_image() + : VK_NULL_HANDLE; + image_barrier_vk.newLayout = static_cast(in_new_layout); + image_barrier_vk.oldLayout = static_cast(in_old_layout); + image_barrier_vk.pNext = nullptr; + image_barrier_vk.srcAccessMask = in_source_access_mask.get_vk(); + image_barrier_vk.srcQueueFamilyIndex = in_src_queue_family_index; + image_barrier_vk.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_barrier_vk.subresourceRange = subresource_range.get_vk(); + + /* NOTE: For an image barrier to work correctly, the underlying subresource range must be assigned memory. + * Query for a memory block in order to force any listening memory allocators to bake */ + auto memory_block_ptr = image_ptr->get_memory_block(); + + ANVIL_REDUNDANT_VARIABLE(memory_block_ptr); +} + +/** Please see header for specification */ +Anvil::ImageBarrier::~ImageBarrier() +{ + /* Stub */ +} + +bool Anvil::ImageBarrier::operator==(const ImageBarrier& in_barrier) const +{ + bool result = true; + + result &= (dst_access_mask == in_barrier.dst_access_mask); + result &= (src_access_mask == in_barrier.src_access_mask); + + result &= (dst_queue_family_index == in_barrier.dst_queue_family_index); + result &= (image == in_barrier.image); + result &= (image_ptr == in_barrier.image_ptr); + result &= (new_layout == in_barrier.new_layout); + result &= (old_layout == in_barrier.old_layout); + result &= (src_queue_family_index == in_barrier.src_queue_family_index); + + result &= (subresource_range == in_barrier.subresource_range); + + return result; +} + +bool Anvil::ImageSubresourceRange::operator==(const Anvil::ImageSubresourceRange& in_subresource_range) const +{ + return (aspect_mask == in_subresource_range.aspect_mask && + base_mip_level == in_subresource_range.base_mip_level && + level_count == in_subresource_range.level_count && + base_array_layer == in_subresource_range.base_array_layer && + layer_count == in_subresource_range.layer_count); +} + +Anvil::KHRSamplerYCbCrConversionFeatures::KHRSamplerYCbCrConversionFeatures() + :sampler_ycbcr_conversion(false) +{ + /* Stub */ +} + +Anvil::KHRSamplerYCbCrConversionFeatures::KHRSamplerYCbCrConversionFeatures(const VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR& in_features) + :sampler_ycbcr_conversion(VK_BOOL32_TO_BOOL(in_features.samplerYcbcrConversion) ) +{ + /* Stub */ +} + +VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR Anvil::KHRSamplerYCbCrConversionFeatures::get_vk_physical_device_sampler_ycbcr_conversion_features() const +{ + VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR result; + + result.pNext = nullptr; + result.samplerYcbcrConversion = BOOL_TO_VK_BOOL32(sampler_ycbcr_conversion); + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR; + + return result; +} + +bool Anvil::KHRSamplerYCbCrConversionFeatures::operator==(const Anvil::KHRSamplerYCbCrConversionFeatures& in_features) const +{ + return (in_features.sampler_ycbcr_conversion == sampler_ycbcr_conversion); +} + +Anvil::MemoryBudget::MemoryBudget() +{ + heap_budget.fill(0); + heap_usage.fill (0); +} + +Anvil::MemoryBudget::MemoryBudget(const VkPhysicalDeviceMemoryBudgetPropertiesEXT& in_properties) +{ + anvil_assert(heap_budget.size() == sizeof(in_properties.heapBudget) / sizeof(in_properties.heapBudget[0])); + anvil_assert(heap_usage.size() == sizeof(in_properties.heapUsage) / sizeof(in_properties.heapUsage[0])); + + memcpy(heap_budget.data(), + in_properties.heapBudget, + sizeof(in_properties.heapBudget)); + + memcpy(heap_usage.data(), + in_properties.heapUsage, + sizeof(in_properties.heapUsage)); +} + +Anvil::EXTMemoryPriorityFeatures::EXTMemoryPriorityFeatures() +{ + is_memory_priority_supported = false; +} + +Anvil::EXTMemoryPriorityFeatures::EXTMemoryPriorityFeatures(const VkPhysicalDeviceMemoryPriorityFeaturesEXT& in_features) +{ + is_memory_priority_supported = VK_BOOL32_TO_BOOL(in_features.memoryPriority); +} + +VkPhysicalDeviceMemoryPriorityFeaturesEXT Anvil::EXTMemoryPriorityFeatures::get_vk_physical_device_memory_priority_features() const +{ + VkPhysicalDeviceMemoryPriorityFeaturesEXT result; + + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT; + result.pNext = nullptr; + result.memoryPriority = BOOL_TO_VK_BOOL32(is_memory_priority_supported); + + return result; +} + +bool Anvil::EXTMemoryPriorityFeatures::operator==(const EXTMemoryPriorityFeatures& in_memory_priority_features) const +{ + return (is_memory_priority_supported == in_memory_priority_features.is_memory_priority_supported); +} + +Anvil::KHR16BitStorageFeatures::KHR16BitStorageFeatures() +{ + is_input_output_storage_supported = false; + is_push_constant_16_bit_storage_supported = false; + is_storage_buffer_16_bit_access_supported = false; + is_uniform_and_storage_buffer_16_bit_access_supported = false; +} + +Anvil::KHR16BitStorageFeatures::KHR16BitStorageFeatures(const VkPhysicalDevice16BitStorageFeaturesKHR& in_features) +{ + is_input_output_storage_supported = VK_BOOL32_TO_BOOL(in_features.storageInputOutput16); + is_push_constant_16_bit_storage_supported = VK_BOOL32_TO_BOOL(in_features.storagePushConstant16); + is_storage_buffer_16_bit_access_supported = VK_BOOL32_TO_BOOL(in_features.storageBuffer16BitAccess); + is_uniform_and_storage_buffer_16_bit_access_supported = VK_BOOL32_TO_BOOL(in_features.uniformAndStorageBuffer16BitAccess); +} + +bool Anvil::KHR16BitStorageFeatures::operator==(const KHR16BitStorageFeatures& in_features) const +{ + return (in_features.is_input_output_storage_supported == is_input_output_storage_supported && + in_features.is_push_constant_16_bit_storage_supported == is_push_constant_16_bit_storage_supported && + in_features.is_storage_buffer_16_bit_access_supported == is_storage_buffer_16_bit_access_supported && + in_features.is_uniform_and_storage_buffer_16_bit_access_supported == is_uniform_and_storage_buffer_16_bit_access_supported); +} + +VkPhysicalDevice16BitStorageFeaturesKHR Anvil::KHR16BitStorageFeatures::get_vk_physical_device_16_bit_storage_features() const +{ + VkPhysicalDevice16BitStorageFeaturesKHR result; + + result.pNext = nullptr; + result.storageBuffer16BitAccess = BOOL_TO_VK_BOOL32(is_storage_buffer_16_bit_access_supported); + result.storageInputOutput16 = BOOL_TO_VK_BOOL32(is_input_output_storage_supported); + result.storagePushConstant16 = BOOL_TO_VK_BOOL32(is_push_constant_16_bit_storage_supported); + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR; + result.uniformAndStorageBuffer16BitAccess = BOOL_TO_VK_BOOL32(is_uniform_and_storage_buffer_16_bit_access_supported); + + return result; + +} + +Anvil::KHR8BitStorageFeatures::KHR8BitStorageFeatures() +{ + storage_buffer_8_bit_access = false; + storage_push_constant_8 = false; + uniform_and_storage_buffer_8_bit_access = false; +} + +Anvil::KHR8BitStorageFeatures::KHR8BitStorageFeatures(const VkPhysicalDevice8BitStorageFeaturesKHR& in_features) +{ + storage_buffer_8_bit_access = (in_features.storageBuffer8BitAccess == VK_TRUE); + storage_push_constant_8 = (in_features.storagePushConstant8 == VK_TRUE); + uniform_and_storage_buffer_8_bit_access = (in_features.uniformAndStorageBuffer8BitAccess == VK_TRUE); +} + +VkPhysicalDevice8BitStorageFeaturesKHR Anvil::KHR8BitStorageFeatures::get_vk_physical_device_8_bit_storage_features() const +{ + VkPhysicalDevice8BitStorageFeaturesKHR result; + + result.pNext = nullptr; + result.storageBuffer8BitAccess = (storage_buffer_8_bit_access) ? VK_TRUE : VK_FALSE; + result.storagePushConstant8 = (storage_push_constant_8) ? VK_TRUE : VK_FALSE; + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR; + result.uniformAndStorageBuffer8BitAccess = (uniform_and_storage_buffer_8_bit_access) ? VK_TRUE : VK_FALSE; + + return result; +} + +bool Anvil::KHR8BitStorageFeatures::operator==(const Anvil::KHR8BitStorageFeatures& in_features) const +{ + return (storage_buffer_8_bit_access == in_features.storage_buffer_8_bit_access && + storage_push_constant_8 == in_features.storage_push_constant_8 && + uniform_and_storage_buffer_8_bit_access == in_features.uniform_and_storage_buffer_8_bit_access); +} + +Anvil::KHRDepthStencilResolveProperties::KHRDepthStencilResolveProperties() +{ + independent_resolve = false; + independent_resolve_none = false; + supported_depth_resolve_modes = Anvil::ResolveModeFlagBits::NONE; + supported_stencil_resolve_modes = Anvil::ResolveModeFlagBits::NONE; +} + +Anvil::KHRDepthStencilResolveProperties::KHRDepthStencilResolveProperties(const VkPhysicalDeviceDepthStencilResolvePropertiesKHR& in_properties) +{ + independent_resolve = (in_properties.independentResolve == VK_TRUE); + independent_resolve_none = (in_properties.independentResolveNone == VK_TRUE); + supported_depth_resolve_modes = static_cast(in_properties.supportedDepthResolveModes); + supported_stencil_resolve_modes = static_cast(in_properties.supportedStencilResolveModes); +} + +bool Anvil::KHRDepthStencilResolveProperties::operator==(const Anvil::KHRDepthStencilResolveProperties& in_props) const +{ + return (in_props.independent_resolve == independent_resolve) && + (in_props.independent_resolve_none == independent_resolve_none) && + (in_props.supported_depth_resolve_modes == supported_depth_resolve_modes) && + (in_props.supported_stencil_resolve_modes == supported_stencil_resolve_modes); +} + + +Anvil::KHRDriverPropertiesProperties::KHRDriverPropertiesProperties() + :driver_id(Anvil::DriverIdKHR::UNKNOWN) +{ + memset(driver_info, + 0, + sizeof(driver_info) ); + memset(driver_name, + 0, + sizeof(driver_name) ); +} + +Anvil::KHRDriverPropertiesProperties::KHRDriverPropertiesProperties(const VkPhysicalDeviceDriverPropertiesKHR& in_properties) +{ + static_assert(sizeof(driver_info) == sizeof(in_properties.driverInfo), "Field size mismatch"); + static_assert(sizeof(driver_name) == sizeof(in_properties.driverName), "Field size mismatch"); + + conformance_version = Anvil::ConformanceVersionKHR (in_properties.conformanceVersion); + driver_id = static_cast(in_properties.driverID); + + memcpy(driver_info, + in_properties.driverInfo, + sizeof(driver_info) ); + memcpy(driver_name, + in_properties.driverName, + sizeof(driver_name) ); +} + +bool Anvil::KHRDriverPropertiesProperties::operator==(const KHRDriverPropertiesProperties& in_props) const +{ + bool result = false; + + if (!(in_props.conformance_version == conformance_version) ) + { + goto end; + } + + if (in_props.driver_id != driver_id) + { + goto end; + } + + if (memcmp(in_props.driver_info, + driver_info, + sizeof(driver_info) ) != 0) + { + goto end; + } + + if (memcmp(in_props.driver_name, + driver_name, + sizeof(driver_name) ) != 0) + { + goto end; + } + + result = true; +end: + return result; +} + + +Anvil::KHRFloat16Int8Features::KHRFloat16Int8Features() +{ + shader_float16 = false; + shader_int8 = false; +} + +Anvil::KHRFloat16Int8Features::KHRFloat16Int8Features(const VkPhysicalDeviceFloat16Int8FeaturesKHR& in_features) +{ + shader_float16 = (in_features.shaderFloat16 == VK_TRUE); + shader_int8 = (in_features.shaderInt8 == VK_TRUE); +} + +VkPhysicalDeviceFloat16Int8FeaturesKHR Anvil::KHRFloat16Int8Features::get_vk_physical_device_float16_int8_features() const +{ + VkPhysicalDeviceFloat16Int8FeaturesKHR result; + + result.pNext = nullptr; + result.shaderFloat16 = (shader_float16) ? VK_TRUE : VK_FALSE; + result.shaderInt8 = (shader_int8) ? VK_TRUE : VK_FALSE; + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR; + + return result; +} + +bool Anvil::KHRFloat16Int8Features::operator==(const Anvil::KHRFloat16Int8Features& in_features) const +{ + return (shader_float16 == in_features.shader_float16 && + shader_int8 == in_features.shader_int8); +} + +Anvil::KHRMaintenance2PhysicalDevicePointClippingProperties::KHRMaintenance2PhysicalDevicePointClippingProperties() + :point_clipping_behavior(PointClippingBehavior::UNKNOWN) +{ + /* Stub */ +} + +Anvil::KHRMaintenance2PhysicalDevicePointClippingProperties::KHRMaintenance2PhysicalDevicePointClippingProperties(const VkPhysicalDevicePointClippingPropertiesKHR& in_props) + :point_clipping_behavior(static_cast(in_props.pointClippingBehavior) ) +{ + /* Stub */ +} + +bool Anvil::KHRMaintenance2PhysicalDevicePointClippingProperties::operator==(const Anvil::KHRMaintenance2PhysicalDevicePointClippingProperties& in_props) const +{ + return (in_props.point_clipping_behavior == point_clipping_behavior); +} + +Anvil::KHRMaintenance3Properties::KHRMaintenance3Properties() + :max_memory_allocation_size(std::numeric_limits::max() ), + max_per_set_descriptors (UINT32_MAX) +{ + /* Stub */ +} + +Anvil::KHRMaintenance3Properties::KHRMaintenance3Properties(const VkPhysicalDeviceMaintenance3PropertiesKHR& in_props) + :max_memory_allocation_size(in_props.maxMemoryAllocationSize), + max_per_set_descriptors (in_props.maxPerSetDescriptors) +{ + /* Stub */ +} + +bool Anvil::KHRMaintenance3Properties::operator==(const Anvil::KHRMaintenance3Properties& in_props) const +{ + return (max_memory_allocation_size == in_props.max_memory_allocation_size && + max_per_set_descriptors == in_props.max_per_set_descriptors); +} + +Anvil::KHRMultiviewFeatures::KHRMultiviewFeatures() +{ + multiview = false; + multiview_geometry_shader = false; + multiview_tessellation_shader = false; +} + +Anvil::KHRMultiviewFeatures::KHRMultiviewFeatures(const VkPhysicalDeviceMultiviewFeatures& in_features) +{ + multiview = (in_features.multiview == VK_TRUE); + multiview_geometry_shader = (in_features.multiviewGeometryShader == VK_TRUE); + multiview_tessellation_shader = (in_features.multiviewTessellationShader == VK_TRUE); +} + +VkPhysicalDeviceMultiviewFeatures Anvil::KHRMultiviewFeatures::get_vk_physical_device_multiview_features() const +{ + VkPhysicalDeviceMultiviewFeatures result; + + result.multiview = (multiview) ? VK_TRUE : VK_FALSE; + result.multiviewGeometryShader = (multiview_geometry_shader) ? VK_TRUE : VK_FALSE; + result.multiviewTessellationShader = (multiview_tessellation_shader) ? VK_TRUE : VK_FALSE; + result.pNext = nullptr; + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR; + + return result; +} + +bool Anvil::KHRMultiviewFeatures::operator==(const Anvil::KHRMultiviewFeatures& in_features) const +{ + return (in_features.multiview == multiview && + in_features.multiview_geometry_shader == multiview_geometry_shader && + in_features.multiview_tessellation_shader == multiview_tessellation_shader); +} + +Anvil::KHRMultiviewProperties::KHRMultiviewProperties() +{ + max_multiview_instance_index = 0; + max_multiview_view_count = 0; +} + +Anvil::KHRMultiviewProperties::KHRMultiviewProperties(const VkPhysicalDeviceMultiviewPropertiesKHR& in_props) +{ + max_multiview_instance_index = in_props.maxMultiviewInstanceIndex; + max_multiview_view_count = in_props.maxMultiviewViewCount; +} + +bool Anvil::KHRMultiviewProperties::operator==(const KHRMultiviewProperties& in_props) const +{ + return (max_multiview_instance_index == in_props.max_multiview_instance_index && + max_multiview_view_count == in_props.max_multiview_view_count); +} + +Anvil::KHRShaderAtomicInt64Features::KHRShaderAtomicInt64Features() + :shader_buffer_int64_atomics(false), + shader_shared_int64_atomics(false) +{ + /* Stub */ +} + +Anvil::KHRShaderAtomicInt64Features::KHRShaderAtomicInt64Features(const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& in_features) +{ + shader_buffer_int64_atomics = VK_BOOL32_TO_BOOL(in_features.shaderBufferInt64Atomics); + shader_shared_int64_atomics = VK_BOOL32_TO_BOOL(in_features.shaderSharedInt64Atomics); +} + +VkPhysicalDeviceShaderAtomicInt64FeaturesKHR Anvil::KHRShaderAtomicInt64Features::get_vk_physical_device_shader_atomic_int64_features() const +{ + VkPhysicalDeviceShaderAtomicInt64FeaturesKHR result; + + result.pNext = nullptr; + result.shaderBufferInt64Atomics = BOOL_TO_VK_BOOL32(shader_buffer_int64_atomics); + result.shaderSharedInt64Atomics = BOOL_TO_VK_BOOL32(shader_shared_int64_atomics); + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR; + + return result; +} + +bool Anvil::KHRShaderAtomicInt64Features::operator==(const KHRShaderAtomicInt64Features& in_features) const +{ + return (in_features.shader_buffer_int64_atomics == shader_buffer_int64_atomics) && + (in_features.shader_shared_int64_atomics == shader_shared_int64_atomics); +} + +Anvil::KHRShaderFloatControlsProperties::KHRShaderFloatControlsProperties() + :denorm_behavior_independence (VkShaderFloatControlsIndependenceKHR::VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR), + rounding_mode_independence (VkShaderFloatControlsIndependenceKHR::VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR), + shader_denorm_flush_to_zero_float16 (false), + shader_denorm_flush_to_zero_float32 (false), + shader_denorm_flush_to_zero_float64 (false), + shader_denorm_preserve_float16 (false), + shader_denorm_preserve_float32 (false), + shader_denorm_preserve_float64 (false), + shader_rounding_mode_RTE_float16 (false), + shader_rounding_mode_RTE_float32 (false), + shader_rounding_mode_RTE_float64 (false), + shader_rounding_mode_RTZ_float16 (false), + shader_rounding_mode_RTZ_float32 (false), + shader_rounding_mode_RTZ_float64 (false), + shader_signed_zero_inf_nan_preserve_float16(false), + shader_signed_zero_inf_nan_preserve_float32(false), + shader_signed_zero_inf_nan_preserve_float64(false) +{ + /* Stub */ +} + +Anvil::KHRShaderFloatControlsProperties::KHRShaderFloatControlsProperties(const VkPhysicalDeviceFloatControlsPropertiesKHR& in_properties) + :denorm_behavior_independence (in_properties.denormBehaviorIndependence ), + rounding_mode_independence (in_properties.roundingModeIndependence ), + shader_denorm_flush_to_zero_float16 (VK_BOOL32_TO_BOOL(in_properties.shaderDenormFlushToZeroFloat16) ), + shader_denorm_flush_to_zero_float32 (VK_BOOL32_TO_BOOL(in_properties.shaderDenormFlushToZeroFloat32) ), + shader_denorm_flush_to_zero_float64 (VK_BOOL32_TO_BOOL(in_properties.shaderDenormFlushToZeroFloat64) ), + shader_denorm_preserve_float16 (VK_BOOL32_TO_BOOL(in_properties.shaderDenormPreserveFloat16) ), + shader_denorm_preserve_float32 (VK_BOOL32_TO_BOOL(in_properties.shaderDenormPreserveFloat32) ), + shader_denorm_preserve_float64 (VK_BOOL32_TO_BOOL(in_properties.shaderDenormPreserveFloat64) ), + shader_rounding_mode_RTE_float16 (VK_BOOL32_TO_BOOL(in_properties.shaderRoundingModeRTEFloat16) ), + shader_rounding_mode_RTE_float32 (VK_BOOL32_TO_BOOL(in_properties.shaderRoundingModeRTEFloat32) ), + shader_rounding_mode_RTE_float64 (VK_BOOL32_TO_BOOL(in_properties.shaderRoundingModeRTEFloat64) ), + shader_rounding_mode_RTZ_float16 (VK_BOOL32_TO_BOOL(in_properties.shaderRoundingModeRTZFloat16) ), + shader_rounding_mode_RTZ_float32 (VK_BOOL32_TO_BOOL(in_properties.shaderRoundingModeRTZFloat32) ), + shader_rounding_mode_RTZ_float64 (VK_BOOL32_TO_BOOL(in_properties.shaderRoundingModeRTZFloat64) ), + shader_signed_zero_inf_nan_preserve_float16(VK_BOOL32_TO_BOOL(in_properties.shaderSignedZeroInfNanPreserveFloat16) ), + shader_signed_zero_inf_nan_preserve_float32(VK_BOOL32_TO_BOOL(in_properties.shaderSignedZeroInfNanPreserveFloat32) ), + shader_signed_zero_inf_nan_preserve_float64(VK_BOOL32_TO_BOOL(in_properties.shaderSignedZeroInfNanPreserveFloat64) ) +{ + /* Stub */ +} + +VkPhysicalDeviceFloatControlsPropertiesKHR Anvil::KHRShaderFloatControlsProperties::get_vk_physical_device_float_controls_properties() const +{ + VkPhysicalDeviceFloatControlsPropertiesKHR result; + + result.pNext = nullptr; + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR; + result.denormBehaviorIndependence = denorm_behavior_independence; + result.roundingModeIndependence = rounding_mode_independence; + result.shaderDenormFlushToZeroFloat16 = shader_denorm_flush_to_zero_float16; + result.shaderDenormFlushToZeroFloat32 = shader_denorm_flush_to_zero_float32; + result.shaderDenormFlushToZeroFloat64 = shader_denorm_flush_to_zero_float64; + result.shaderDenormPreserveFloat16 = shader_denorm_preserve_float16; + result.shaderDenormPreserveFloat32 = shader_denorm_preserve_float32; + result.shaderDenormPreserveFloat64 = shader_denorm_preserve_float64; + result.shaderRoundingModeRTEFloat16 = shader_rounding_mode_RTE_float16; + result.shaderRoundingModeRTEFloat32 = shader_rounding_mode_RTE_float32; + result.shaderRoundingModeRTEFloat64 = shader_rounding_mode_RTE_float64; + result.shaderRoundingModeRTZFloat16 = shader_rounding_mode_RTZ_float16; + result.shaderRoundingModeRTZFloat32 = shader_rounding_mode_RTZ_float32; + result.shaderRoundingModeRTZFloat64 = shader_rounding_mode_RTZ_float64; + result.shaderSignedZeroInfNanPreserveFloat16 = shader_signed_zero_inf_nan_preserve_float16; + result.shaderSignedZeroInfNanPreserveFloat32 = shader_signed_zero_inf_nan_preserve_float32; + result.shaderSignedZeroInfNanPreserveFloat64 = shader_signed_zero_inf_nan_preserve_float64; + + return result; +} + +bool Anvil::KHRShaderFloatControlsProperties::operator==(const KHRShaderFloatControlsProperties& in_properties) const +{ + return + (denorm_behavior_independence == in_properties.denorm_behavior_independence) && + (rounding_mode_independence == in_properties.rounding_mode_independence) && + (shader_denorm_flush_to_zero_float16 == in_properties.shader_denorm_flush_to_zero_float16) && + (shader_denorm_flush_to_zero_float32 == in_properties.shader_denorm_flush_to_zero_float32) && + (shader_denorm_flush_to_zero_float64 == in_properties.shader_denorm_flush_to_zero_float64) && + (shader_denorm_preserve_float16 == in_properties.shader_denorm_preserve_float16) && + (shader_denorm_preserve_float32 == in_properties.shader_denorm_preserve_float32) && + (shader_denorm_preserve_float64 == in_properties.shader_denorm_preserve_float64) && + (shader_rounding_mode_RTE_float16 == in_properties.shader_rounding_mode_RTE_float16) && + (shader_rounding_mode_RTE_float32 == in_properties.shader_rounding_mode_RTE_float32) && + (shader_rounding_mode_RTE_float64 == in_properties.shader_rounding_mode_RTE_float64) && + (shader_rounding_mode_RTZ_float16 == in_properties.shader_rounding_mode_RTZ_float16) && + (shader_rounding_mode_RTZ_float32 == in_properties.shader_rounding_mode_RTZ_float32) && + (shader_rounding_mode_RTZ_float64 == in_properties.shader_rounding_mode_RTZ_float64) && + (shader_signed_zero_inf_nan_preserve_float16 == in_properties.shader_signed_zero_inf_nan_preserve_float16) && + (shader_signed_zero_inf_nan_preserve_float32 == in_properties.shader_signed_zero_inf_nan_preserve_float32) && + (shader_signed_zero_inf_nan_preserve_float64 == in_properties.shader_signed_zero_inf_nan_preserve_float64); +} + +Anvil::KHRVariablePointerFeatures::KHRVariablePointerFeatures() +{ + variable_pointers = false; + variable_pointers_storage_buffer = false; +} + +Anvil::KHRVariablePointerFeatures::KHRVariablePointerFeatures(const VkPhysicalDeviceVariablePointerFeatures& in_features) +{ + variable_pointers = (in_features.variablePointers == VK_TRUE); + variable_pointers_storage_buffer = (in_features.variablePointersStorageBuffer == VK_TRUE); +} + +VkPhysicalDeviceVariablePointerFeaturesKHR Anvil::KHRVariablePointerFeatures::get_vk_physical_device_variable_pointer_features() const +{ + VkPhysicalDeviceVariablePointerFeaturesKHR result; + + result.pNext = nullptr; + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR; + result.variablePointers = (variable_pointers) ? VK_TRUE : VK_FALSE; + result.variablePointersStorageBuffer = (variable_pointers_storage_buffer) ? VK_TRUE : VK_FALSE; + + return result; +} + +bool Anvil::KHRVariablePointerFeatures::operator==(const Anvil::KHRVariablePointerFeatures& in_props) const +{ + return (variable_pointers == in_props.variable_pointers && + variable_pointers_storage_buffer == in_props.variable_pointers_storage_buffer); +} + +Anvil::KHRVulkanMemoryModelFeatures::KHRVulkanMemoryModelFeatures() +{ + vulkan_memory_model = false; + vulkan_memory_model_availability_visibility_chains = false; + vulkan_memory_model_device_scope = false; +} + +Anvil::KHRVulkanMemoryModelFeatures::KHRVulkanMemoryModelFeatures(const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& in_features) +{ + vulkan_memory_model = VK_BOOL32_TO_BOOL(in_features.vulkanMemoryModel); + vulkan_memory_model_availability_visibility_chains = VK_BOOL32_TO_BOOL(in_features.vulkanMemoryModelAvailabilityVisibilityChains); + vulkan_memory_model_device_scope = VK_BOOL32_TO_BOOL(in_features.vulkanMemoryModelDeviceScope); +} + +VkPhysicalDeviceVulkanMemoryModelFeaturesKHR Anvil::KHRVulkanMemoryModelFeatures::get_vk_physical_device_vulkan_memory_model_features() const +{ + VkPhysicalDeviceVulkanMemoryModelFeaturesKHR result; + + result.pNext = nullptr; + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR; + result.vulkanMemoryModel = BOOL_TO_VK_BOOL32(vulkan_memory_model); + result.vulkanMemoryModelAvailabilityVisibilityChains = BOOL_TO_VK_BOOL32(vulkan_memory_model_availability_visibility_chains); + result.vulkanMemoryModelDeviceScope = BOOL_TO_VK_BOOL32(vulkan_memory_model_device_scope); + + return result; +} + +bool Anvil::KHRVulkanMemoryModelFeatures::operator==(const KHRVulkanMemoryModelFeatures& in_features) const +{ + return (in_features.vulkan_memory_model == vulkan_memory_model) && + (in_features.vulkan_memory_model_availability_visibility_chains == vulkan_memory_model_availability_visibility_chains) && + (in_features.vulkan_memory_model_device_scope == vulkan_memory_model_device_scope); +} + +Anvil::Layer::Layer(const std::string& in_layer_name) +{ + implementation_version = 0; + name = in_layer_name; + spec_version = 0; +} + +Anvil::Layer::Layer(const VkLayerProperties& in_layer_props) +{ + description = in_layer_props.description; + implementation_version = in_layer_props.implementationVersion; + name = in_layer_props.layerName; + spec_version = in_layer_props.specVersion; +} + +bool Anvil::Layer::operator==(const std::string& in_layer_name) const +{ + return name == in_layer_name; +} + +Anvil::MemoryBarrierAnv::MemoryBarrierAnv(Anvil::AccessFlags in_destination_access_mask, + Anvil::AccessFlags in_source_access_mask) +{ + destination_access_mask = in_destination_access_mask; + source_access_mask = in_source_access_mask; + + memory_barrier_vk.dstAccessMask = destination_access_mask.get_vk(); + memory_barrier_vk.pNext = nullptr; + memory_barrier_vk.srcAccessMask = source_access_mask.get_vk(); + memory_barrier_vk.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; +} + +Anvil::MemoryHeap::MemoryHeap() +{ + index = UINT32_MAX; + size = 0; +} + +/* Please see header for specification */ +bool Anvil::operator==(const MemoryProperties& in1, + const MemoryProperties& in2) +{ + bool result = (in1.types.size() == in2.types.size() ); + + if (result) + { + for (uint32_t n_type = 0; + n_type < static_cast(in1.types.size() ) && result; + ++n_type) + { + const auto& current_type_in1 = in1.types.at(n_type); + const auto& current_type_in2 = in2.types.at(n_type); + + result &= (current_type_in1 == current_type_in2); + } + } + + return result; +} + +Anvil::MemoryProperties::MemoryProperties() +{ + heaps = nullptr; + n_heaps = 0; +} + +/** Destructor */ +Anvil::MemoryProperties::~MemoryProperties() +{ + delete [] heaps; + + heaps = nullptr; +} + +/* Please see header for specification */ +void Anvil::MemoryProperties::init(const VkPhysicalDeviceMemoryProperties& in_mem_properties) +{ + n_heaps = in_mem_properties.memoryHeapCount; + + heaps = new Anvil::MemoryHeap[n_heaps]; + anvil_assert(heaps != nullptr); + + for (unsigned int n_heap = 0; + n_heap < in_mem_properties.memoryHeapCount; + ++n_heap) + { + heaps[n_heap].flags = static_cast(in_mem_properties.memoryHeaps[n_heap].flags); + heaps[n_heap].index = n_heap; + heaps[n_heap].size = in_mem_properties.memoryHeaps[n_heap].size; + } + + types.reserve(in_mem_properties.memoryTypeCount); + + for (unsigned int n_type = 0; + n_type < in_mem_properties.memoryTypeCount; + ++n_type) + { + types.push_back(MemoryType(in_mem_properties.memoryTypes[n_type], + this) ); + } +} + +/** Please see header for specification */ +bool Anvil::operator==(const MemoryHeap& in1, + const MemoryHeap& in2) +{ + return (in1.flags == in2.flags && + in1.size == in2.size); +} + +/* Please see header for specification */ +Anvil::MemoryType::MemoryType(const VkMemoryType& in_type, + struct MemoryProperties* in_memory_props_ptr) +{ + flags = static_cast(in_type.propertyFlags); + heap_ptr = &in_memory_props_ptr->heaps[in_type.heapIndex]; + + features = Anvil::Utils::get_memory_feature_flags_from_vk_property_flags(flags, + heap_ptr->flags); +} + +/* Please see header for specification */ +bool Anvil::operator==(const Anvil::MemoryType& in1, + const Anvil::MemoryType& in2) +{ + return ( in1.flags == in2.flags && + *in1.heap_ptr == *in2.heap_ptr); +} + +/** Returns a filled MipmapRawData structure for a 1D mip. + * + * NOTE: It is caller's responsibility to configure one of the data storage pointer members. + */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_1D(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + uint32_t in_row_size) +{ + MipmapRawData result; + + result.aspect = in_aspect; + result.data_size = in_row_size; + result.row_size = in_row_size; + result.n_layers = 1; + result.n_slices = 1; + result.n_mipmap = in_n_mipmap; + + return result; +} + +/** Returns a filled MipmapRawData structure for a 1D Array mip. + * + * NOTE: It is caller's responsibility to configure one of the data storage pointer members. + */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_array(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + uint32_t in_row_size, + uint32_t in_data_size) +{ + MipmapRawData result; + + result.aspect = in_aspect; + result.data_size = in_data_size; + result.n_layer = in_n_layer; + result.n_layers = in_n_layers; + result.n_mipmap = in_n_mipmap; + result.n_slices = 1; + result.row_size = in_row_size; + + return result; +} + +/** Returns a filled MipmapRawData structure for a 2D mip. + * + * NOTE: It is caller's responsibility to configure one of the data storage pointer members. + */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_2D(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result; + + result.aspect = in_aspect; + result.data_size = in_data_size; + result.n_layers = 1; + result.n_mipmap = in_n_mipmap; + result.n_slices = 1; + result.row_size = in_row_size; + + return result; +} + +/** Returns a filled MipmapRawData structure for a 2D array mip. + * + * NOTE: It is caller's responsibility to configure one of the data storage pointer members. + */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_array(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result; + + result.aspect = in_aspect; + result.data_size = in_data_size; + result.n_layer = in_n_layer; + result.n_layers = in_n_layers; + result.n_mipmap = in_n_mipmap; + result.n_slices = 1; + result.row_size = in_row_size; + + return result; +} + +/** Returns a filled MipmapRawData structure for a 3D mip. + * + * NOTE: It is caller's responsibility to configure one of the data storage pointer members. + */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_3D(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_slices, + uint32_t in_n_mipmap, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result; + + result.aspect = in_aspect; + result.data_size = in_data_size; + result.n_layers = 1; + result.n_layer = in_n_layer; + result.n_slices = in_n_slices; + result.n_mipmap = in_n_mipmap; + result.row_size = in_row_size; + + return result; +} + + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_row_size) +{ + MipmapRawData result = create_1D(in_aspect, + in_n_mipmap, + in_row_size); + + result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_row_size) +{ + MipmapRawData result = create_1D(in_aspect, + in_n_mipmap, + in_row_size); + + result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_row_size) +{ + MipmapRawData result = create_1D(in_aspect, + in_n_mipmap, + in_row_size); + + result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_array_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_row_size, + uint32_t in_data_size) +{ + MipmapRawData result = create_1D_array(in_aspect, + in_n_layer, + in_n_layers, + in_n_mipmap, + in_row_size, + in_data_size); + + result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_array_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_row_size, + uint32_t in_data_size) +{ + MipmapRawData result = create_1D_array(in_aspect, + in_n_layer, + in_n_layers, + in_n_mipmap, + in_row_size, + in_data_size); + + result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_1D_array_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_row_size, + uint32_t in_data_size) +{ + MipmapRawData result = create_1D_array(in_aspect, + in_n_layer, + in_n_layers, + in_n_mipmap, + in_row_size, + in_data_size); + + result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_2D(in_aspect, + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_2D(in_aspect, + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_2D(in_aspect, + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_array_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_2D_array(in_aspect, + in_n_layer, + in_n_layers, + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_array_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_2D_array(in_aspect, + in_n_layer, + in_n_layers, + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_2D_array_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_2D_array(in_aspect, + in_n_layer, + in_n_layers, + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + + + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_3D_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layer_slices, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_slice_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_3D(in_aspect, + in_n_layer, + in_n_layer_slices, + in_n_mipmap, + in_slice_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_3D_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layer_slices, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_slice_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_3D(in_aspect, + in_n_layer, + in_n_layer_slices, + in_n_mipmap, + in_slice_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_3D_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layer_slices, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_slice_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_3D(in_aspect, + in_n_layer, + in_n_layer_slices, + in_n_mipmap, + in_slice_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + anvil_assert(in_n_layer < 6); + + MipmapRawData result = create_2D_array(in_aspect, + in_n_layer, + 1, /* n_layer_slices */ + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + anvil_assert(in_n_layer < 6); + + MipmapRawData result = create_2D_array(in_aspect, + in_n_layer, + 1, /* n_layer_slices */ + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + anvil_assert(in_n_layer < 6); + + MipmapRawData result = create_2D_array(in_aspect, + in_n_layer, + 1, /* n_layer_slices */ + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_array_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_2D_array(in_aspect, + in_n_layer, + in_n_layers, + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_array_from_uchar_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + const unsigned char* in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_2D_array(in_aspect, + in_n_layer, + in_n_layers, + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_raw_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +/* Please see header for specification */ +Anvil::MipmapRawData Anvil::MipmapRawData::create_cube_map_array_from_uchar_vector_ptr(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_layers, + uint32_t in_n_mipmap, + std::shared_ptr > in_linear_tightly_packed_data_ptr, + uint32_t in_data_size, + uint32_t in_row_size) +{ + MipmapRawData result = create_2D_array(in_aspect, + in_n_layer, + in_n_layers, + in_n_mipmap, + in_data_size, + in_row_size); + + result.linear_tightly_packed_data_uchar_vec_ptr = in_linear_tightly_packed_data_ptr; + + return result; +} + +Anvil::PhysicalDeviceProperties::PhysicalDeviceProperties() +{ + amd_shader_core_properties_ptr = nullptr; + core_vk1_0_properties_ptr = nullptr; + core_vk1_1_properties_ptr = nullptr; + ext_conservative_rasterization_properties_ptr = nullptr; + ext_descriptor_indexing_properties_ptr = nullptr; + ext_external_memory_host_properties_ptr = nullptr; + ext_inline_uniform_block_properties_ptr = nullptr; + ext_pci_bus_info_properties_ptr = nullptr; + ext_sample_locations_properties_ptr = nullptr; + ext_sampler_filter_minmax_properties_ptr = nullptr; + ext_transform_feedback_properties_ptr = nullptr; + ext_vertex_attribute_divisor_properties_ptr = nullptr; + khr_depth_stencil_resolve_properties_ptr = nullptr; + khr_driver_properties_properties_ptr = nullptr; + khr_external_memory_capabilities_physical_device_id_properties_ptr = nullptr; + khr_maintenance2_point_clipping_properties_ptr = nullptr; + khr_maintenance3_properties_ptr = nullptr; + khr_multiview_properties_ptr = nullptr; + khr_shader_float_controls_properties_ptr = nullptr; +} + +Anvil::PhysicalDeviceProperties::PhysicalDeviceProperties(const AMDShaderCoreProperties* in_amd_shader_core_properties_ptr, + const PhysicalDevicePropertiesCoreVK10* in_core_vk1_0_properties_ptr, + const PhysicalDevicePropertiesCoreVK11* in_core_vk1_1_properties_ptr, + const EXTConservativeRasterizationProperties* in_ext_conservative_rasterization_properties_ptr, + const EXTDescriptorIndexingProperties* in_ext_descriptor_indexing_properties_ptr, + const EXTExternalMemoryHostProperties* in_ext_external_memory_host_properties_ptr, + const EXTInlineUniformBlockProperties* in_ext_inline_uniform_block_properties_ptr, + const EXTPCIBusInfoProperties* in_ext_pci_bus_info_properties_ptr, + const EXTSampleLocationsProperties* in_ext_sample_locations_properties_ptr, + const EXTSamplerFilterMinmaxProperties* in_ext_sampler_filter_minmax_properties_ptr, + const EXTTransformFeedbackProperties* in_ext_transform_feedback_properties_ptr, + const EXTVertexAttributeDivisorProperties* in_ext_vertex_attribute_divisor_properties_ptr, + const Anvil::KHRDepthStencilResolveProperties* in_khr_depth_stencil_resolve_props_ptr, + const KHRDriverPropertiesProperties* in_khr_driver_properties_props_ptr, + const Anvil::KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties* in_khr_external_memory_caps_physical_device_id_props_ptr, + const KHRMaintenance3Properties* in_khr_maintenance3_properties_ptr, + const Anvil::KHRMaintenance2PhysicalDevicePointClippingProperties* in_khr_maintenance2_point_clipping_properties_ptr, + const Anvil::KHRMultiviewProperties* in_khr_multiview_properties_ptr, + const KHRShaderFloatControlsProperties* in_khr_shader_float_controls_properties_ptr) + :amd_shader_core_properties_ptr (in_amd_shader_core_properties_ptr), + core_vk1_0_properties_ptr (in_core_vk1_0_properties_ptr), + core_vk1_1_properties_ptr (in_core_vk1_1_properties_ptr), + ext_conservative_rasterization_properties_ptr (in_ext_conservative_rasterization_properties_ptr), + ext_descriptor_indexing_properties_ptr (in_ext_descriptor_indexing_properties_ptr), + ext_external_memory_host_properties_ptr (in_ext_external_memory_host_properties_ptr), + ext_inline_uniform_block_properties_ptr (in_ext_inline_uniform_block_properties_ptr), + ext_pci_bus_info_properties_ptr (in_ext_pci_bus_info_properties_ptr), + ext_sample_locations_properties_ptr (in_ext_sample_locations_properties_ptr), + ext_sampler_filter_minmax_properties_ptr (in_ext_sampler_filter_minmax_properties_ptr), + ext_transform_feedback_properties_ptr (in_ext_transform_feedback_properties_ptr), + ext_vertex_attribute_divisor_properties_ptr (in_ext_vertex_attribute_divisor_properties_ptr), + khr_depth_stencil_resolve_properties_ptr (in_khr_depth_stencil_resolve_props_ptr), + khr_driver_properties_properties_ptr (in_khr_driver_properties_props_ptr), + khr_external_memory_capabilities_physical_device_id_properties_ptr(in_khr_external_memory_caps_physical_device_id_props_ptr), + khr_maintenance2_point_clipping_properties_ptr (in_khr_maintenance2_point_clipping_properties_ptr), + khr_maintenance3_properties_ptr (in_khr_maintenance3_properties_ptr), + khr_multiview_properties_ptr (in_khr_multiview_properties_ptr), + khr_shader_float_controls_properties_ptr (in_khr_shader_float_controls_properties_ptr) +{ + /* Stub */ +} + +bool Anvil::PhysicalDeviceProperties::operator==(const PhysicalDeviceProperties& in_props) const +{ + bool amd_shader_core_properties_match = false; + const bool core_vk1_0_features_match = (*core_vk1_0_properties_ptr == *in_props.core_vk1_0_properties_ptr); + bool core_vk1_1_features_match = false; + bool ext_conservative_rasterization_properties_match = false; + bool ext_descriptor_indexing_properties_match = false; + bool ext_external_memory_host_properties_match = false; + bool ext_inline_uniform_block_properties_match = false; + bool ext_pci_bus_info_properties_match = false; + bool ext_sample_locations_properties_match = false; + bool ext_sampler_filter_minmax_properties_match = false; + bool ext_vertex_attribute_divisor_properties_match = false; + bool khr_depth_stencil_resolve_properties_match = false; + bool khr_driver_properties_properties_match = false; + bool khr_external_memory_capabilities_properties_match = false; + bool khr_maintenance2_properties_match = false; + bool khr_maintenance3_properties_match = false; + bool khr_multiview_properties_match = false; + bool khr_shader_float_controls_properties_match = false; + + if (amd_shader_core_properties_ptr != nullptr && + in_props.amd_shader_core_properties_ptr != nullptr) + { + amd_shader_core_properties_match = (*amd_shader_core_properties_ptr == *in_props.amd_shader_core_properties_ptr); + } + else + { + amd_shader_core_properties_match = (amd_shader_core_properties_ptr == nullptr && + in_props.amd_shader_core_properties_ptr == nullptr); + } + + if (core_vk1_1_properties_ptr != nullptr && + in_props.core_vk1_1_properties_ptr != nullptr) + { + core_vk1_1_features_match = (*core_vk1_1_properties_ptr == *in_props.core_vk1_1_properties_ptr); + } + else + { + core_vk1_1_features_match = (core_vk1_1_properties_ptr == nullptr && + in_props.core_vk1_1_properties_ptr == nullptr); + } + + if (ext_conservative_rasterization_properties_ptr != nullptr && + in_props.ext_conservative_rasterization_properties_ptr != nullptr) + { + ext_conservative_rasterization_properties_match = (*ext_conservative_rasterization_properties_ptr == *in_props.ext_conservative_rasterization_properties_ptr); + } + else + { + ext_conservative_rasterization_properties_match = (ext_conservative_rasterization_properties_ptr == nullptr && + in_props.ext_conservative_rasterization_properties_ptr == nullptr); + } + + if (ext_descriptor_indexing_properties_ptr != nullptr && + in_props.ext_descriptor_indexing_properties_ptr != nullptr) + { + ext_descriptor_indexing_properties_match = (*ext_descriptor_indexing_properties_ptr == *in_props.ext_descriptor_indexing_properties_ptr); + } + else + { + ext_descriptor_indexing_properties_match = (ext_descriptor_indexing_properties_ptr == nullptr && + in_props.ext_descriptor_indexing_properties_ptr == nullptr); + } + + if (ext_external_memory_host_properties_ptr != nullptr && + in_props.ext_external_memory_host_properties_ptr != nullptr) + { + ext_external_memory_host_properties_match = (*ext_external_memory_host_properties_ptr == *in_props.ext_external_memory_host_properties_ptr); + } + else + { + ext_external_memory_host_properties_match = (ext_external_memory_host_properties_ptr == nullptr && + in_props.ext_external_memory_host_properties_ptr == nullptr); + } + + if (ext_inline_uniform_block_properties_ptr != nullptr && + in_props.ext_inline_uniform_block_properties_ptr != nullptr) + { + ext_inline_uniform_block_properties_match = (*ext_inline_uniform_block_properties_ptr == *in_props.ext_inline_uniform_block_properties_ptr); + } + else + { + ext_inline_uniform_block_properties_match = (ext_inline_uniform_block_properties_ptr == nullptr && + in_props.ext_inline_uniform_block_properties_ptr == nullptr); + } + + if (ext_pci_bus_info_properties_ptr != nullptr && + in_props.ext_pci_bus_info_properties_ptr != nullptr) + { + ext_pci_bus_info_properties_match = (*ext_pci_bus_info_properties_ptr == *in_props.ext_pci_bus_info_properties_ptr); + } + else + { + ext_pci_bus_info_properties_match = (ext_pci_bus_info_properties_ptr == nullptr && + in_props.ext_pci_bus_info_properties_ptr == nullptr); + } + + if (ext_sample_locations_properties_ptr != nullptr && + in_props.ext_sample_locations_properties_ptr != nullptr) + { + ext_sample_locations_properties_match = (*ext_sample_locations_properties_ptr == *in_props.ext_sample_locations_properties_ptr); + } + else + { + ext_sample_locations_properties_match = (ext_sample_locations_properties_ptr == nullptr && + in_props.ext_sample_locations_properties_ptr == nullptr); + } + + if (ext_sampler_filter_minmax_properties_ptr != nullptr && + in_props.ext_sampler_filter_minmax_properties_ptr != nullptr) + { + ext_sampler_filter_minmax_properties_match = (*ext_sampler_filter_minmax_properties_ptr == *in_props.ext_sampler_filter_minmax_properties_ptr); + } + else + { + ext_sampler_filter_minmax_properties_match = (ext_sampler_filter_minmax_properties_ptr == nullptr && + in_props.ext_sampler_filter_minmax_properties_ptr == nullptr); + } + + if (ext_vertex_attribute_divisor_properties_ptr != nullptr && + in_props.ext_vertex_attribute_divisor_properties_ptr != nullptr) + { + ext_vertex_attribute_divisor_properties_match = (*ext_vertex_attribute_divisor_properties_ptr == *in_props.ext_vertex_attribute_divisor_properties_ptr); + } + else + { + ext_vertex_attribute_divisor_properties_match = (ext_vertex_attribute_divisor_properties_ptr == nullptr && + in_props.ext_vertex_attribute_divisor_properties_ptr == nullptr); + } + + if (khr_depth_stencil_resolve_properties_ptr != nullptr && + in_props.khr_depth_stencil_resolve_properties_ptr != nullptr) + { + khr_depth_stencil_resolve_properties_match = (*khr_depth_stencil_resolve_properties_ptr == *in_props.khr_depth_stencil_resolve_properties_ptr); + } + else + { + khr_depth_stencil_resolve_properties_match = (khr_depth_stencil_resolve_properties_ptr == nullptr && + in_props.khr_depth_stencil_resolve_properties_ptr == nullptr); + } + + if (khr_driver_properties_properties_ptr != nullptr && + in_props.khr_driver_properties_properties_ptr != nullptr) + { + khr_driver_properties_properties_match = (*khr_driver_properties_properties_ptr == *in_props.khr_driver_properties_properties_ptr); + } + else + { + khr_driver_properties_properties_match = (khr_driver_properties_properties_ptr == nullptr && + in_props.khr_driver_properties_properties_ptr == nullptr); + } + + if (khr_external_memory_capabilities_physical_device_id_properties_ptr != nullptr && + in_props.khr_external_memory_capabilities_physical_device_id_properties_ptr!= nullptr) + { + khr_external_memory_capabilities_properties_match = (*khr_external_memory_capabilities_physical_device_id_properties_ptr == *in_props.khr_external_memory_capabilities_physical_device_id_properties_ptr); + } + else + { + khr_external_memory_capabilities_properties_match = (khr_external_memory_capabilities_physical_device_id_properties_ptr == nullptr && + in_props.khr_external_memory_capabilities_physical_device_id_properties_ptr == nullptr); + } + + if (khr_maintenance2_point_clipping_properties_ptr != nullptr && + in_props.khr_maintenance2_point_clipping_properties_ptr != nullptr) + { + khr_maintenance2_properties_match = (*khr_maintenance2_point_clipping_properties_ptr == *in_props.khr_maintenance2_point_clipping_properties_ptr); + } + else + { + khr_maintenance2_properties_match = (khr_maintenance2_point_clipping_properties_ptr == nullptr && + in_props.khr_maintenance2_point_clipping_properties_ptr == nullptr); + } + + if (khr_maintenance3_properties_ptr != nullptr && + in_props.khr_maintenance3_properties_ptr != nullptr) + { + khr_maintenance3_properties_match = (*khr_maintenance3_properties_ptr == *in_props.khr_maintenance3_properties_ptr); + } + else + { + khr_maintenance3_properties_match = (khr_maintenance3_properties_ptr == nullptr && + in_props.khr_maintenance3_properties_ptr == nullptr); + } + + if (khr_multiview_properties_ptr != nullptr && + in_props.khr_multiview_properties_ptr != nullptr) + { + khr_multiview_properties_match = (*khr_multiview_properties_ptr == *in_props.khr_multiview_properties_ptr); + } + else + { + khr_multiview_properties_match = (khr_multiview_properties_ptr == nullptr && + in_props.khr_multiview_properties_ptr == nullptr); + } + + if (khr_shader_float_controls_properties_ptr != nullptr && + in_props.khr_shader_float_controls_properties_ptr != nullptr) + { + khr_shader_float_controls_properties_match = (*khr_shader_float_controls_properties_ptr == *in_props.khr_shader_float_controls_properties_ptr); + } + else + { + khr_shader_float_controls_properties_match = (khr_shader_float_controls_properties_ptr == nullptr && + in_props.khr_shader_float_controls_properties_ptr == nullptr); + } + + return amd_shader_core_properties_match && + core_vk1_0_features_match && + core_vk1_1_features_match && + ext_conservative_rasterization_properties_match && + ext_descriptor_indexing_properties_match && + ext_external_memory_host_properties_match && + ext_inline_uniform_block_properties_match && + ext_pci_bus_info_properties_match && + ext_sample_locations_properties_match && + ext_sampler_filter_minmax_properties_match && + ext_vertex_attribute_divisor_properties_match && + khr_depth_stencil_resolve_properties_match && + khr_driver_properties_properties_match && + khr_external_memory_capabilities_properties_match && + khr_maintenance2_properties_match && + khr_maintenance3_properties_match && + khr_multiview_properties_match && + khr_shader_float_controls_properties_match; +} + +Anvil::PhysicalDevicePropertiesCoreVK10::PhysicalDevicePropertiesCoreVK10() + :api_version (UINT32_MAX), + device_id (UINT32_MAX), + device_type (VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM), + driver_version(UINT32_MAX), + vendor_id (UINT32_MAX) +{ + memset(device_name, + 0xFF, + sizeof(device_name) ); + memset(pipeline_cache_uuid, + 0xFF, + sizeof(pipeline_cache_uuid) ); +} + +Anvil::PhysicalDevicePropertiesCoreVK11::PhysicalDevicePropertiesCoreVK11() +{ + /* Stub */ +} + +Anvil::PhysicalDeviceFeatures::PhysicalDeviceFeatures() +{ + core_vk1_0_features_ptr = nullptr; + core_vk1_1_features_ptr = nullptr; + ext_depth_clip_enable_features_ptr = nullptr; + ext_descriptor_indexing_features_ptr = nullptr; + ext_inline_uniform_block_features_ptr = nullptr; + ext_scalar_block_layout_features_ptr = nullptr; + ext_transform_feedback_features_ptr = nullptr; + ext_memory_priority_features_ptr = nullptr; + khr_16bit_storage_features_ptr = nullptr; + khr_8bit_storage_features_ptr = nullptr; + khr_float16_int8_features_ptr = nullptr; + khr_multiview_features_ptr = nullptr; + khr_sampler_ycbcr_conversion_features_ptr = nullptr; + khr_shader_atomic_int64_features_ptr = nullptr; + khr_variable_pointer_features_ptr = nullptr; + khr_vulkan_memory_model_features_ptr = nullptr; +} + +Anvil::PhysicalDeviceFeatures::PhysicalDeviceFeatures(const PhysicalDeviceFeaturesCoreVK10* in_core_vk1_0_features_ptr, + const PhysicalDeviceFeaturesCoreVK11* in_core_vk1_1_features_ptr, + const EXTDepthClipEnableFeatures* in_ext_depth_clip_enable_features_ptr, + const EXTDescriptorIndexingFeatures* in_ext_descriptor_indexing_features_ptr, + const EXTInlineUniformBlockFeatures* in_ext_inline_uniform_block_features_ptr, + const EXTScalarBlockLayoutFeatures* in_ext_scalar_block_layout_features_ptr, + const EXTTransformFeedbackFeatures* in_ext_transform_feedback_features_ptr, + const EXTMemoryPriorityFeatures* in_ext_memory_priority_features_ptr, + const KHR16BitStorageFeatures* in_khr_16_bit_storage_features_ptr, + const KHR8BitStorageFeatures* in_khr_8_bit_storage_features_ptr, + const KHRFloat16Int8Features* in_khr_float16_int8_features_ptr, + const KHRMultiviewFeatures* in_khr_multiview_features_ptr, + const KHRSamplerYCbCrConversionFeatures* in_khr_sampler_ycbcr_conversion_features_ptr, + const KHRShaderAtomicInt64Features* in_khr_shader_atomic_int64_features_ptr, + const KHRVariablePointerFeatures* in_khr_variable_pointer_features_ptr, + const KHRVulkanMemoryModelFeatures* in_khr_vulkan_memory_model_features_ptr) +{ + core_vk1_0_features_ptr = in_core_vk1_0_features_ptr; + core_vk1_1_features_ptr = in_core_vk1_1_features_ptr; + ext_depth_clip_enable_features_ptr = in_ext_depth_clip_enable_features_ptr; + ext_descriptor_indexing_features_ptr = in_ext_descriptor_indexing_features_ptr; + ext_inline_uniform_block_features_ptr = in_ext_inline_uniform_block_features_ptr; + ext_scalar_block_layout_features_ptr = in_ext_scalar_block_layout_features_ptr; + ext_transform_feedback_features_ptr = in_ext_transform_feedback_features_ptr; + ext_memory_priority_features_ptr = in_ext_memory_priority_features_ptr; + khr_16bit_storage_features_ptr = in_khr_16_bit_storage_features_ptr; + khr_8bit_storage_features_ptr = in_khr_8_bit_storage_features_ptr; + khr_float16_int8_features_ptr = in_khr_float16_int8_features_ptr; + khr_multiview_features_ptr = in_khr_multiview_features_ptr; + khr_sampler_ycbcr_conversion_features_ptr = in_khr_sampler_ycbcr_conversion_features_ptr; + khr_shader_atomic_int64_features_ptr = in_khr_shader_atomic_int64_features_ptr; + khr_variable_pointer_features_ptr = in_khr_variable_pointer_features_ptr; + khr_vulkan_memory_model_features_ptr = in_khr_vulkan_memory_model_features_ptr; +} + +bool Anvil::PhysicalDeviceFeatures::operator==(const PhysicalDeviceFeatures& in_physical_device_features) const +{ + const bool core_vk1_0_features_match = (*core_vk1_0_features_ptr == *in_physical_device_features.core_vk1_0_features_ptr); + const bool core_vk1_1_features_match = ( core_vk1_1_features_ptr == nullptr && in_physical_device_features.core_vk1_1_features_ptr == nullptr) || + (*core_vk1_1_features_ptr == *in_physical_device_features.core_vk1_1_features_ptr); + bool ext_depth_clip_enable_features_match = false; + bool ext_descriptor_indexing_features_match = false; + bool ext_inline_uniform_block_features_match = false; + bool ext_scalar_block_layout_features_match = false; + bool ext_transform_feedback_features_match = false; + bool ext_memory_priority_features_match = false; + bool khr_16bit_storage_features_match = false; + bool khr_8bit_storage_features_match = false; + bool khr_float16_int8_features_match = false; + bool khr_multiview_features_match = false; + bool khr_sampler_ycbcr_conversion_features_match = false; + bool khr_shader_atomic_int64_features_match = false; + bool khr_variable_pointer_features_match = false; + bool khr_vulkan_memory_features_match = false; + + if (ext_depth_clip_enable_features_ptr != nullptr && + in_physical_device_features.ext_depth_clip_enable_features_ptr != nullptr) + { + ext_depth_clip_enable_features_match = (*ext_depth_clip_enable_features_ptr == *in_physical_device_features.ext_depth_clip_enable_features_ptr); + } + else + { + ext_depth_clip_enable_features_match = (ext_depth_clip_enable_features_ptr == nullptr && + in_physical_device_features.ext_depth_clip_enable_features_ptr == nullptr); + } + + if (ext_descriptor_indexing_features_ptr != nullptr && + in_physical_device_features.ext_descriptor_indexing_features_ptr != nullptr) + { + ext_descriptor_indexing_features_match = (*ext_descriptor_indexing_features_ptr == *in_physical_device_features.ext_descriptor_indexing_features_ptr); + } + else + { + ext_descriptor_indexing_features_match = (ext_descriptor_indexing_features_ptr == nullptr && + in_physical_device_features.ext_descriptor_indexing_features_ptr == nullptr); + } + + if (ext_inline_uniform_block_features_ptr != nullptr && + in_physical_device_features.ext_inline_uniform_block_features_ptr != nullptr) + { + ext_inline_uniform_block_features_match = (*ext_inline_uniform_block_features_ptr == *in_physical_device_features.ext_inline_uniform_block_features_ptr); + } + else + { + ext_inline_uniform_block_features_match = (ext_inline_uniform_block_features_ptr == nullptr && + in_physical_device_features.ext_inline_uniform_block_features_ptr == nullptr); + } + + if (ext_scalar_block_layout_features_ptr != nullptr && + in_physical_device_features.ext_scalar_block_layout_features_ptr != nullptr) + { + ext_scalar_block_layout_features_match = (*ext_scalar_block_layout_features_ptr == *in_physical_device_features.ext_scalar_block_layout_features_ptr); + } + else + { + ext_scalar_block_layout_features_match = (ext_scalar_block_layout_features_ptr == nullptr && + in_physical_device_features.ext_scalar_block_layout_features_ptr == nullptr); + } + + if (ext_transform_feedback_features_ptr != nullptr && + in_physical_device_features.ext_transform_feedback_features_ptr != nullptr) + { + ext_transform_feedback_features_match = (*ext_transform_feedback_features_ptr == *in_physical_device_features.ext_transform_feedback_features_ptr); + } + else + { + ext_transform_feedback_features_match = (ext_transform_feedback_features_ptr == nullptr && + in_physical_device_features.ext_transform_feedback_features_ptr == nullptr); + } + + if (ext_memory_priority_features_ptr != nullptr && + in_physical_device_features.ext_memory_priority_features_ptr != nullptr) + { + ext_memory_priority_features_match = (*ext_memory_priority_features_ptr == *in_physical_device_features.ext_memory_priority_features_ptr); + } + + if (khr_16bit_storage_features_ptr != nullptr && + in_physical_device_features.khr_16bit_storage_features_ptr != nullptr) + { + khr_16bit_storage_features_match = (*khr_16bit_storage_features_ptr == *in_physical_device_features.khr_16bit_storage_features_ptr); + } + else + { + khr_16bit_storage_features_match = (khr_16bit_storage_features_ptr == nullptr && + in_physical_device_features.khr_16bit_storage_features_ptr == nullptr); + } + + if (khr_8bit_storage_features_ptr != nullptr && + in_physical_device_features.khr_8bit_storage_features_ptr != nullptr) + { + khr_8bit_storage_features_match = (*khr_8bit_storage_features_ptr == *in_physical_device_features.khr_8bit_storage_features_ptr); + } + else + { + khr_8bit_storage_features_match = (khr_8bit_storage_features_ptr == nullptr && + in_physical_device_features.khr_8bit_storage_features_ptr == nullptr); + } + + if (khr_float16_int8_features_ptr != nullptr && + in_physical_device_features.khr_float16_int8_features_ptr != nullptr) + { + khr_float16_int8_features_match = (*khr_float16_int8_features_ptr == *in_physical_device_features.khr_float16_int8_features_ptr); + } + else + { + khr_float16_int8_features_match = (khr_float16_int8_features_ptr == nullptr && + in_physical_device_features.khr_float16_int8_features_ptr == nullptr); + } + + if (khr_multiview_features_ptr != nullptr && + in_physical_device_features.khr_multiview_features_ptr != nullptr) + { + khr_multiview_features_match = (*khr_multiview_features_ptr == *in_physical_device_features.khr_multiview_features_ptr); + } + else + { + khr_multiview_features_match = (khr_multiview_features_ptr == nullptr && + in_physical_device_features.khr_multiview_features_ptr == nullptr); + } + + if (khr_sampler_ycbcr_conversion_features_ptr != nullptr && + in_physical_device_features.khr_sampler_ycbcr_conversion_features_ptr != nullptr) + { + khr_sampler_ycbcr_conversion_features_match = (*khr_sampler_ycbcr_conversion_features_ptr == *in_physical_device_features.khr_sampler_ycbcr_conversion_features_ptr); + } + else + { + khr_sampler_ycbcr_conversion_features_match = (khr_sampler_ycbcr_conversion_features_ptr == nullptr && + in_physical_device_features.khr_sampler_ycbcr_conversion_features_ptr == nullptr); + } + + if (khr_shader_atomic_int64_features_ptr != nullptr && + in_physical_device_features.khr_shader_atomic_int64_features_ptr != nullptr) + { + khr_shader_atomic_int64_features_match = (*khr_shader_atomic_int64_features_ptr == *in_physical_device_features.khr_shader_atomic_int64_features_ptr); + } + else + { + khr_shader_atomic_int64_features_match = (khr_shader_atomic_int64_features_ptr == nullptr && + in_physical_device_features.khr_shader_atomic_int64_features_ptr == nullptr); + } + + if (khr_variable_pointer_features_ptr != nullptr && + in_physical_device_features.khr_variable_pointer_features_ptr != nullptr) + { + khr_variable_pointer_features_match = (*khr_variable_pointer_features_ptr == *in_physical_device_features.khr_variable_pointer_features_ptr); + } + else + { + khr_variable_pointer_features_match = (khr_variable_pointer_features_ptr == nullptr && + in_physical_device_features.khr_variable_pointer_features_ptr == nullptr); + } + + if (khr_vulkan_memory_model_features_ptr != nullptr && + in_physical_device_features.khr_vulkan_memory_model_features_ptr != nullptr) + { + khr_vulkan_memory_features_match = (*khr_vulkan_memory_model_features_ptr == *in_physical_device_features.khr_vulkan_memory_model_features_ptr); + } + else + { + khr_vulkan_memory_features_match = (khr_vulkan_memory_model_features_ptr == nullptr && + in_physical_device_features.khr_vulkan_memory_model_features_ptr == nullptr); + } + + return core_vk1_0_features_match && + core_vk1_1_features_match && + ext_depth_clip_enable_features_match && + ext_descriptor_indexing_features_match && + ext_inline_uniform_block_features_match && + ext_scalar_block_layout_features_match && + ext_transform_feedback_features_match && + ext_memory_priority_features_match && + khr_16bit_storage_features_match && + khr_8bit_storage_features_match && + khr_float16_int8_features_match && + khr_multiview_features_match && + khr_sampler_ycbcr_conversion_features_match && + khr_shader_atomic_int64_features_match && + khr_variable_pointer_features_match && + khr_vulkan_memory_features_match; +} + +Anvil::PhysicalDeviceGroup::PhysicalDeviceGroup() +{ + supports_subset_allocations = false; +} + +Anvil::PhysicalDeviceLimits::PhysicalDeviceLimits() + :buffer_image_granularity (std::numeric_limits::max() ), + discrete_queue_priorities (UINT32_MAX), + framebuffer_color_sample_counts (Anvil::SampleCountFlagBits::NONE), + framebuffer_depth_sample_counts (Anvil::SampleCountFlagBits::NONE), + framebuffer_no_attachments_sample_counts (Anvil::SampleCountFlagBits::NONE), + framebuffer_stencil_sample_counts (Anvil::SampleCountFlagBits::NONE), + line_width_granularity (FLT_MAX), + max_bound_descriptor_sets (UINT32_MAX), + max_clip_distances (UINT32_MAX), + max_color_attachments (UINT32_MAX), + max_combined_clip_and_cull_distances (UINT32_MAX), + max_compute_shared_memory_size (UINT32_MAX), + max_compute_work_group_invocations (UINT32_MAX), + max_cull_distances (UINT32_MAX), + max_descriptor_set_input_attachments (UINT32_MAX), + max_descriptor_set_sampled_images (UINT32_MAX), + max_descriptor_set_samplers (UINT32_MAX), + max_descriptor_set_storage_buffers (UINT32_MAX), + max_descriptor_set_storage_buffers_dynamic (UINT32_MAX), + max_descriptor_set_storage_images (UINT32_MAX), + max_descriptor_set_uniform_buffers (UINT32_MAX), + max_descriptor_set_uniform_buffers_dynamic (UINT32_MAX), + max_draw_indexed_index_value (UINT32_MAX), + max_draw_indirect_count (UINT32_MAX), + max_fragment_combined_output_resources (UINT32_MAX), + max_fragment_dual_src_attachments (UINT32_MAX), + max_fragment_input_components (UINT32_MAX), + max_fragment_output_attachments (UINT32_MAX), + max_framebuffer_height (UINT32_MAX), + max_framebuffer_layers (UINT32_MAX), + max_framebuffer_width (UINT32_MAX), + max_geometry_input_components (UINT32_MAX), + max_geometry_output_components (UINT32_MAX), + max_geometry_output_vertices (UINT32_MAX), + max_geometry_shader_invocations (UINT32_MAX), + max_geometry_total_output_components (UINT32_MAX), + max_image_array_layers (UINT32_MAX), + max_image_dimension_1D (UINT32_MAX), + max_image_dimension_2D (UINT32_MAX), + max_image_dimension_3D (UINT32_MAX), + max_image_dimension_cube (UINT32_MAX), + max_interpolation_offset (FLT_MAX), + max_memory_allocation_count (UINT32_MAX), + max_per_stage_descriptor_input_attachments (UINT32_MAX), + max_per_stage_descriptor_sampled_images (UINT32_MAX), + max_per_stage_descriptor_samplers (UINT32_MAX), + max_per_stage_descriptor_storage_buffers (UINT32_MAX), + max_per_stage_descriptor_storage_images (UINT32_MAX), + max_per_stage_descriptor_uniform_buffers (UINT32_MAX), + max_per_stage_resources (UINT32_MAX), + max_push_constants_size (UINT32_MAX), + max_sample_mask_words (UINT32_MAX), + max_sampler_allocation_count (UINT32_MAX), + max_sampler_anisotropy (FLT_MAX), + max_sampler_lod_bias (FLT_MAX), + max_storage_buffer_range (UINT32_MAX), + max_viewports (UINT32_MAX), + max_tessellation_control_per_patch_output_components (UINT32_MAX), + max_tessellation_control_per_vertex_input_components (UINT32_MAX), + max_tessellation_control_per_vertex_output_components(UINT32_MAX), + max_tessellation_control_total_output_components (UINT32_MAX), + max_tessellation_evaluation_input_components (UINT32_MAX), + max_tessellation_evaluation_output_components (UINT32_MAX), + max_tessellation_generation_level (UINT32_MAX), + max_tessellation_patch_size (UINT32_MAX), + max_texel_buffer_elements (UINT32_MAX), + max_texel_gather_offset (UINT32_MAX), + max_texel_offset (UINT32_MAX), + max_uniform_buffer_range (UINT32_MAX), + max_vertex_input_attributes (UINT32_MAX), + max_vertex_input_attribute_offset (UINT32_MAX), + max_vertex_input_bindings (UINT32_MAX), + max_vertex_input_binding_stride (UINT32_MAX), + max_vertex_output_components (UINT32_MAX), + min_interpolation_offset (FLT_MAX), + min_memory_map_alignment (std::numeric_limits::max () ), + min_storage_buffer_offset_alignment (std::numeric_limits::max() ), + min_texel_buffer_offset_alignment (std::numeric_limits::max() ), + min_texel_gather_offset (INT32_MAX), + min_texel_offset (INT32_MAX), + min_uniform_buffer_offset_alignment (std::numeric_limits::max() ), + mipmap_precision_bits (UINT32_MAX), + non_coherent_atom_size (std::numeric_limits::max() ), + optimal_buffer_copy_offset_alignment (std::numeric_limits::max() ), + optimal_buffer_copy_row_pitch_alignment (std::numeric_limits::max() ), + point_size_granularity (FLT_MAX), + sampled_image_color_sample_counts (Anvil::SampleCountFlagBits::NONE), + sampled_image_depth_sample_counts (Anvil::SampleCountFlagBits::NONE), + sampled_image_integer_sample_counts (Anvil::SampleCountFlagBits::NONE), + sampled_image_stencil_sample_counts (Anvil::SampleCountFlagBits::NONE), + sparse_address_space_size (std::numeric_limits::max() ), + standard_sample_locations (false), + storage_image_sample_counts (Anvil::SampleCountFlagBits::NONE), + strict_lines (false), + sub_pixel_interpolation_offset_bits (UINT32_MAX), + sub_pixel_precision_bits (UINT32_MAX), + sub_texel_precision_bits (UINT32_MAX), + timestamp_compute_and_graphics (false), + timestamp_period (FLT_MAX), + viewport_sub_pixel_bits (UINT32_MAX) +{ + line_width_range[0] = FLT_MAX; + line_width_range[1] = FLT_MAX; + + max_compute_work_group_count[0] = UINT32_MAX; + max_compute_work_group_count[1] = UINT32_MAX; + max_compute_work_group_count[2] = UINT32_MAX; + + max_compute_work_group_size[0] = UINT32_MAX; + max_compute_work_group_size[1] = UINT32_MAX; + max_compute_work_group_size[2] = UINT32_MAX; + + max_viewport_dimensions[0] = UINT32_MAX; + max_viewport_dimensions[1] = UINT32_MAX; + + point_size_range[0] = FLT_MAX; + point_size_range[1] = FLT_MAX; + + viewport_bounds_range[0] = FLT_MAX; + viewport_bounds_range[1] = FLT_MAX; +} + +Anvil::PhysicalDeviceLimits::PhysicalDeviceLimits(const VkPhysicalDeviceLimits& in_device_limits) + :buffer_image_granularity (in_device_limits.bufferImageGranularity), + discrete_queue_priorities (in_device_limits.discreteQueuePriorities), + framebuffer_color_sample_counts (static_cast(in_device_limits.framebufferColorSampleCounts) ), + framebuffer_depth_sample_counts (static_cast(in_device_limits.framebufferDepthSampleCounts) ), + framebuffer_no_attachments_sample_counts (static_cast(in_device_limits.framebufferNoAttachmentsSampleCounts) ), + framebuffer_stencil_sample_counts (static_cast(in_device_limits.framebufferStencilSampleCounts) ), + line_width_granularity (in_device_limits.lineWidthGranularity), + max_bound_descriptor_sets (in_device_limits.maxBoundDescriptorSets), + max_clip_distances (in_device_limits.maxClipDistances), + max_color_attachments (in_device_limits.maxColorAttachments), + max_combined_clip_and_cull_distances (in_device_limits.maxCombinedClipAndCullDistances), + max_compute_shared_memory_size (in_device_limits.maxComputeSharedMemorySize), + max_compute_work_group_invocations (in_device_limits.maxComputeWorkGroupInvocations), + max_cull_distances (in_device_limits.maxCullDistances), + max_descriptor_set_input_attachments (in_device_limits.maxDescriptorSetInputAttachments), + max_descriptor_set_sampled_images (in_device_limits.maxDescriptorSetSampledImages), + max_descriptor_set_samplers (in_device_limits.maxDescriptorSetSamplers), + max_descriptor_set_storage_buffers (in_device_limits.maxDescriptorSetStorageBuffers), + max_descriptor_set_storage_buffers_dynamic (in_device_limits.maxDescriptorSetStorageBuffersDynamic), + max_descriptor_set_storage_images (in_device_limits.maxDescriptorSetStorageImages), + max_descriptor_set_uniform_buffers (in_device_limits.maxDescriptorSetUniformBuffers), + max_descriptor_set_uniform_buffers_dynamic (in_device_limits.maxDescriptorSetUniformBuffersDynamic), + max_draw_indexed_index_value (in_device_limits.maxDrawIndexedIndexValue), + max_draw_indirect_count (in_device_limits.maxDrawIndirectCount), + max_fragment_combined_output_resources (in_device_limits.maxFragmentCombinedOutputResources), + max_fragment_dual_src_attachments (in_device_limits.maxFragmentDualSrcAttachments), + max_fragment_input_components (in_device_limits.maxFragmentInputComponents), + max_fragment_output_attachments (in_device_limits.maxFragmentOutputAttachments), + max_framebuffer_height (in_device_limits.maxFramebufferHeight), + max_framebuffer_layers (in_device_limits.maxFramebufferLayers), + max_framebuffer_width (in_device_limits.maxFramebufferWidth), + max_geometry_input_components (in_device_limits.maxGeometryInputComponents), + max_geometry_output_components (in_device_limits.maxGeometryOutputComponents), + max_geometry_output_vertices (in_device_limits.maxGeometryOutputVertices), + max_geometry_shader_invocations (in_device_limits.maxGeometryShaderInvocations), + max_geometry_total_output_components (in_device_limits.maxGeometryTotalOutputComponents), + max_image_array_layers (in_device_limits.maxImageArrayLayers), + max_image_dimension_1D (in_device_limits.maxImageDimension1D), + max_image_dimension_2D (in_device_limits.maxImageDimension2D), + max_image_dimension_3D (in_device_limits.maxImageDimension3D), + max_image_dimension_cube (in_device_limits.maxImageDimensionCube), + max_interpolation_offset (in_device_limits.maxInterpolationOffset), + max_memory_allocation_count (in_device_limits.maxMemoryAllocationCount), + max_per_stage_descriptor_input_attachments (in_device_limits.maxPerStageDescriptorInputAttachments), + max_per_stage_descriptor_sampled_images (in_device_limits.maxPerStageDescriptorSampledImages), + max_per_stage_descriptor_samplers (in_device_limits.maxPerStageDescriptorSamplers), + max_per_stage_descriptor_storage_buffers (in_device_limits.maxPerStageDescriptorStorageBuffers), + max_per_stage_descriptor_storage_images (in_device_limits.maxPerStageDescriptorStorageImages), + max_per_stage_descriptor_uniform_buffers (in_device_limits.maxPerStageDescriptorUniformBuffers), + max_per_stage_resources (in_device_limits.maxPerStageResources), + max_push_constants_size (in_device_limits.maxPushConstantsSize), + max_sample_mask_words (in_device_limits.maxSampleMaskWords), + max_sampler_allocation_count (in_device_limits.maxSamplerAllocationCount), + max_sampler_anisotropy (in_device_limits.maxSamplerAnisotropy), + max_sampler_lod_bias (in_device_limits.maxSamplerLodBias), + max_storage_buffer_range (in_device_limits.maxStorageBufferRange), + max_viewports (in_device_limits.maxViewports), + max_tessellation_control_per_patch_output_components (in_device_limits.maxTessellationControlPerPatchOutputComponents), + max_tessellation_control_per_vertex_input_components (in_device_limits.maxTessellationControlPerVertexInputComponents), + max_tessellation_control_per_vertex_output_components(in_device_limits.maxTessellationControlPerVertexOutputComponents), + max_tessellation_control_total_output_components (in_device_limits.maxTessellationControlTotalOutputComponents), + max_tessellation_evaluation_input_components (in_device_limits.maxTessellationEvaluationInputComponents), + max_tessellation_evaluation_output_components (in_device_limits.maxTessellationEvaluationOutputComponents), + max_tessellation_generation_level (in_device_limits.maxTessellationGenerationLevel), + max_tessellation_patch_size (in_device_limits.maxTessellationPatchSize), + max_texel_buffer_elements (in_device_limits.maxTexelBufferElements), + max_texel_gather_offset (in_device_limits.maxTexelGatherOffset), + max_texel_offset (in_device_limits.maxTexelOffset), + max_uniform_buffer_range (in_device_limits.maxUniformBufferRange), + max_vertex_input_attributes (in_device_limits.maxVertexInputAttributes), + max_vertex_input_attribute_offset (in_device_limits.maxVertexInputAttributeOffset), + max_vertex_input_bindings (in_device_limits.maxVertexInputBindings), + max_vertex_input_binding_stride (in_device_limits.maxVertexInputBindingStride), + max_vertex_output_components (in_device_limits.maxVertexOutputComponents), + min_interpolation_offset (in_device_limits.minInterpolationOffset), + min_memory_map_alignment (in_device_limits.minMemoryMapAlignment), + min_storage_buffer_offset_alignment (in_device_limits.minStorageBufferOffsetAlignment), + min_texel_buffer_offset_alignment (in_device_limits.minTexelBufferOffsetAlignment), + min_texel_gather_offset (in_device_limits.minTexelGatherOffset), + min_texel_offset (in_device_limits.minTexelOffset), + min_uniform_buffer_offset_alignment (in_device_limits.minUniformBufferOffsetAlignment), + mipmap_precision_bits (in_device_limits.mipmapPrecisionBits), + non_coherent_atom_size (in_device_limits.nonCoherentAtomSize), + optimal_buffer_copy_offset_alignment (in_device_limits.optimalBufferCopyOffsetAlignment), + optimal_buffer_copy_row_pitch_alignment (in_device_limits.optimalBufferCopyRowPitchAlignment), + point_size_granularity (in_device_limits.pointSizeGranularity), + sampled_image_color_sample_counts (static_cast(in_device_limits.sampledImageColorSampleCounts) ), + sampled_image_depth_sample_counts (static_cast(in_device_limits.sampledImageDepthSampleCounts) ), + sampled_image_integer_sample_counts (static_cast(in_device_limits.sampledImageIntegerSampleCounts) ), + sampled_image_stencil_sample_counts (static_cast(in_device_limits.sampledImageStencilSampleCounts) ), + sparse_address_space_size (in_device_limits.sparseAddressSpaceSize), + standard_sample_locations (VK_BOOL32_TO_BOOL(in_device_limits.standardSampleLocations) ), + storage_image_sample_counts (static_cast(in_device_limits.storageImageSampleCounts) ), + strict_lines (VK_BOOL32_TO_BOOL(in_device_limits.strictLines) ), + sub_pixel_interpolation_offset_bits (in_device_limits.subPixelInterpolationOffsetBits), + sub_pixel_precision_bits (in_device_limits.subPixelPrecisionBits), + sub_texel_precision_bits (in_device_limits.subTexelPrecisionBits), + timestamp_compute_and_graphics (VK_BOOL32_TO_BOOL(in_device_limits.timestampComputeAndGraphics) ), + timestamp_period (in_device_limits.timestampPeriod), + viewport_sub_pixel_bits (in_device_limits.viewportSubPixelBits) +{ + memcpy(line_width_range, + in_device_limits.lineWidthRange, + sizeof(line_width_range) ); + + memcpy(max_compute_work_group_count, + in_device_limits.maxComputeWorkGroupCount, + sizeof(max_compute_work_group_count) ); + + memcpy(max_compute_work_group_size, + in_device_limits.maxComputeWorkGroupSize, + sizeof(max_compute_work_group_size) ); + + memcpy(max_viewport_dimensions, + in_device_limits.maxViewportDimensions, + sizeof(max_viewport_dimensions) ); + + memcpy(point_size_range, + in_device_limits.pointSizeRange, + sizeof(point_size_range) ); + + memcpy(viewport_bounds_range, + in_device_limits.viewportBoundsRange, + sizeof(viewport_bounds_range) ); +} + +bool Anvil::PhysicalDeviceLimits::operator==(const Anvil::PhysicalDeviceLimits& in_device_limits) const +{ + bool result = false; + + if (buffer_image_granularity == in_device_limits.buffer_image_granularity && + discrete_queue_priorities == in_device_limits.discrete_queue_priorities && + framebuffer_color_sample_counts == in_device_limits.framebuffer_color_sample_counts && + framebuffer_depth_sample_counts == in_device_limits.framebuffer_depth_sample_counts && + framebuffer_no_attachments_sample_counts == in_device_limits.framebuffer_no_attachments_sample_counts && + framebuffer_stencil_sample_counts == in_device_limits.framebuffer_stencil_sample_counts && + max_bound_descriptor_sets == in_device_limits.max_bound_descriptor_sets && + max_clip_distances == in_device_limits.max_clip_distances && + max_color_attachments == in_device_limits.max_color_attachments && + max_combined_clip_and_cull_distances == in_device_limits.max_combined_clip_and_cull_distances && + max_compute_shared_memory_size == in_device_limits.max_compute_shared_memory_size && + max_compute_work_group_count[0] == in_device_limits.max_compute_work_group_count[0] && + max_compute_work_group_count[1] == in_device_limits.max_compute_work_group_count[1] && + max_compute_work_group_count[2] == in_device_limits.max_compute_work_group_count[2] && + max_compute_work_group_invocations == in_device_limits.max_compute_work_group_invocations && + max_compute_work_group_size[0] == in_device_limits.max_compute_work_group_size[0] && + max_compute_work_group_size[1] == in_device_limits.max_compute_work_group_size[1] && + max_compute_work_group_size[2] == in_device_limits.max_compute_work_group_size[2] && + max_cull_distances == in_device_limits.max_cull_distances && + max_descriptor_set_input_attachments == in_device_limits.max_descriptor_set_input_attachments && + max_descriptor_set_sampled_images == in_device_limits.max_descriptor_set_sampled_images && + max_descriptor_set_samplers == in_device_limits.max_descriptor_set_samplers && + max_descriptor_set_storage_buffers == in_device_limits.max_descriptor_set_storage_buffers && + max_descriptor_set_storage_buffers_dynamic == in_device_limits.max_descriptor_set_storage_buffers_dynamic && + max_descriptor_set_storage_images == in_device_limits.max_descriptor_set_storage_images && + max_descriptor_set_uniform_buffers == in_device_limits.max_descriptor_set_uniform_buffers && + max_descriptor_set_uniform_buffers_dynamic == in_device_limits.max_descriptor_set_uniform_buffers_dynamic && + max_draw_indexed_index_value == in_device_limits.max_draw_indexed_index_value && + max_draw_indirect_count == in_device_limits.max_draw_indirect_count && + max_fragment_combined_output_resources == in_device_limits.max_fragment_combined_output_resources && + max_fragment_dual_src_attachments == in_device_limits.max_fragment_dual_src_attachments && + max_fragment_input_components == in_device_limits.max_fragment_input_components && + max_fragment_output_attachments == in_device_limits.max_fragment_output_attachments && + max_framebuffer_height == in_device_limits.max_framebuffer_height && + max_framebuffer_layers == in_device_limits.max_framebuffer_layers && + max_framebuffer_width == in_device_limits.max_framebuffer_width && + max_geometry_input_components == in_device_limits.max_geometry_input_components && + max_geometry_output_components == in_device_limits.max_geometry_output_components && + max_geometry_output_vertices == in_device_limits.max_geometry_output_vertices && + max_geometry_shader_invocations == in_device_limits.max_geometry_shader_invocations && + max_geometry_total_output_components == in_device_limits.max_geometry_total_output_components && + max_image_array_layers == in_device_limits.max_image_array_layers && + max_image_dimension_1D == in_device_limits.max_image_dimension_1D && + max_image_dimension_2D == in_device_limits.max_image_dimension_2D && + max_image_dimension_3D == in_device_limits.max_image_dimension_3D && + max_image_dimension_cube == in_device_limits.max_image_dimension_cube && + max_memory_allocation_count == in_device_limits.max_memory_allocation_count && + max_per_stage_descriptor_input_attachments == in_device_limits.max_per_stage_descriptor_input_attachments && + max_per_stage_descriptor_sampled_images == in_device_limits.max_per_stage_descriptor_sampled_images && + max_per_stage_descriptor_samplers == in_device_limits.max_per_stage_descriptor_samplers && + max_per_stage_descriptor_storage_buffers == in_device_limits.max_per_stage_descriptor_storage_buffers && + max_per_stage_descriptor_storage_images == in_device_limits.max_per_stage_descriptor_storage_images && + max_per_stage_descriptor_uniform_buffers == in_device_limits.max_per_stage_descriptor_uniform_buffers && + max_per_stage_resources == in_device_limits.max_per_stage_resources && + max_push_constants_size == in_device_limits.max_push_constants_size && + max_sample_mask_words == in_device_limits.max_sample_mask_words && + max_sampler_allocation_count == in_device_limits.max_sampler_allocation_count && + max_storage_buffer_range == in_device_limits.max_storage_buffer_range && + max_viewport_dimensions[0] == in_device_limits.max_viewport_dimensions[0] && + max_viewport_dimensions[1] == in_device_limits.max_viewport_dimensions[1] && + max_viewports == in_device_limits.max_viewports && + max_tessellation_control_per_patch_output_components == in_device_limits.max_tessellation_control_per_patch_output_components && + max_tessellation_control_per_vertex_input_components == in_device_limits.max_tessellation_control_per_vertex_input_components && + max_tessellation_control_per_vertex_output_components == in_device_limits.max_tessellation_control_per_vertex_output_components && + max_tessellation_control_total_output_components == in_device_limits.max_tessellation_control_total_output_components && + max_tessellation_evaluation_input_components == in_device_limits.max_tessellation_evaluation_input_components && + max_tessellation_evaluation_output_components == in_device_limits.max_tessellation_evaluation_output_components && + max_tessellation_generation_level == in_device_limits.max_tessellation_generation_level && + max_tessellation_patch_size == in_device_limits.max_tessellation_patch_size && + max_texel_buffer_elements == in_device_limits.max_texel_buffer_elements && + max_texel_gather_offset == in_device_limits.max_texel_gather_offset && + max_texel_offset == in_device_limits.max_texel_offset && + max_uniform_buffer_range == in_device_limits.max_uniform_buffer_range && + max_vertex_input_attributes == in_device_limits.max_vertex_input_attributes && + max_vertex_input_attribute_offset == in_device_limits.max_vertex_input_attribute_offset && + max_vertex_input_bindings == in_device_limits.max_vertex_input_bindings && + max_vertex_input_binding_stride == in_device_limits.max_vertex_input_binding_stride && + max_vertex_output_components == in_device_limits.max_vertex_output_components && + min_memory_map_alignment == in_device_limits.min_memory_map_alignment && + min_storage_buffer_offset_alignment == in_device_limits.min_storage_buffer_offset_alignment && + min_texel_buffer_offset_alignment == in_device_limits.min_texel_buffer_offset_alignment && + min_texel_gather_offset == in_device_limits.min_texel_gather_offset && + min_texel_offset == in_device_limits.min_texel_offset && + min_uniform_buffer_offset_alignment == in_device_limits.min_uniform_buffer_offset_alignment && + mipmap_precision_bits == in_device_limits.mipmap_precision_bits && + non_coherent_atom_size == in_device_limits.non_coherent_atom_size && + optimal_buffer_copy_offset_alignment == in_device_limits.optimal_buffer_copy_offset_alignment && + optimal_buffer_copy_row_pitch_alignment == in_device_limits.optimal_buffer_copy_row_pitch_alignment && + sampled_image_color_sample_counts == in_device_limits.sampled_image_color_sample_counts && + sampled_image_depth_sample_counts == in_device_limits.sampled_image_depth_sample_counts && + sampled_image_integer_sample_counts == in_device_limits.sampled_image_integer_sample_counts && + sampled_image_stencil_sample_counts == in_device_limits.sampled_image_stencil_sample_counts && + sparse_address_space_size == in_device_limits.sparse_address_space_size && + standard_sample_locations == in_device_limits.standard_sample_locations && + storage_image_sample_counts == in_device_limits.storage_image_sample_counts && + strict_lines == in_device_limits.strict_lines && + sub_pixel_interpolation_offset_bits == in_device_limits.sub_pixel_interpolation_offset_bits && + sub_pixel_precision_bits == in_device_limits.sub_pixel_precision_bits && + sub_texel_precision_bits == in_device_limits.sub_texel_precision_bits && + timestamp_compute_and_graphics == in_device_limits.timestamp_compute_and_graphics && + viewport_sub_pixel_bits == in_device_limits.viewport_sub_pixel_bits) + { + /* Floats */ + if (fabs(line_width_range[0] - in_device_limits.line_width_range[0]) < 1e-5f && + fabs(line_width_range[1] - in_device_limits.line_width_range[1]) < 1e-5f && + fabs(line_width_granularity - in_device_limits.line_width_granularity) < 1e-5f && + fabs(max_interpolation_offset - in_device_limits.max_interpolation_offset) < 1e-5f && + fabs(max_sampler_anisotropy - in_device_limits.max_sampler_anisotropy) < 1e-5f && + fabs(max_sampler_lod_bias - in_device_limits.max_sampler_lod_bias) < 1e-5f && + fabs(min_interpolation_offset - in_device_limits.min_interpolation_offset) < 1e-5f && + fabs(point_size_granularity - in_device_limits.point_size_granularity) < 1e-5f && + fabs(point_size_range[0] - in_device_limits.point_size_range[0]) < 1e-5f && + fabs(point_size_range[1] - in_device_limits.point_size_range[1]) < 1e-5f && + fabs(timestamp_period - in_device_limits.timestamp_period) < 1e-5f && + fabs(viewport_bounds_range[0] - in_device_limits.viewport_bounds_range[0]) < 1e-5f && + fabs(viewport_bounds_range[1] - in_device_limits.viewport_bounds_range[1]) < 1e-5f) + { + result = true; + } + } + + return result; +} + +Anvil::KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties::KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties() +{ + device_luid_valid = false; + device_node_mask = 0; + + memset(device_luid, + 0, + VK_LUID_SIZE); + memset(device_uuid, + 0, + VK_UUID_SIZE); + memset(driver_uuid, + 0, + VK_UUID_SIZE); +} + +Anvil::KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties::KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties(const VkPhysicalDeviceIDPropertiesKHR& in_properties) +{ + device_luid_valid = VK_BOOL32_TO_BOOL(in_properties.deviceLUIDValid); + device_node_mask = in_properties.deviceNodeMask; + + memcpy(device_luid, + in_properties.deviceLUID, + VK_LUID_SIZE); + memcpy(device_uuid, + in_properties.deviceUUID, + VK_UUID_SIZE); + memcpy(driver_uuid, + in_properties.driverUUID, + VK_UUID_SIZE); +} + +bool Anvil::KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties::operator==(const Anvil::KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties& in_props) const +{ + bool result = false; + + if (device_luid_valid != in_props.device_luid_valid || + device_node_mask != in_props.device_node_mask) + { + goto end; + } + + if (device_luid_valid) + { + if (memcmp(device_luid, + in_props.device_luid, + sizeof(device_luid)) != 0) + { + goto end; + } + } + + if (memcmp(device_uuid, + in_props.device_uuid, + sizeof(device_uuid)) != 0) + { + goto end; + } + + if (memcmp(driver_uuid, + in_props.driver_uuid, + sizeof(driver_uuid)) != 0) + { + goto end; + } + + result = true; +end: + return result; +} + +Anvil::PhysicalDevicePropertiesCoreVK10::PhysicalDevicePropertiesCoreVK10(const VkPhysicalDeviceProperties& in_physical_device_properties) + :api_version (in_physical_device_properties.apiVersion), + device_id (in_physical_device_properties.deviceID), + device_type (in_physical_device_properties.deviceType), + driver_version (in_physical_device_properties.driverVersion), + limits (PhysicalDeviceLimits (in_physical_device_properties.limits) ), + sparse_properties(PhysicalDeviceSparseProperties(in_physical_device_properties.sparseProperties) ), + vendor_id (in_physical_device_properties.vendorID) +{ + memcpy(device_name, + in_physical_device_properties.deviceName, + sizeof(device_name) ); + memcpy(pipeline_cache_uuid, + in_physical_device_properties.pipelineCacheUUID, + sizeof(pipeline_cache_uuid) ); +} + +Anvil::PhysicalDevicePropertiesCoreVK11::PhysicalDevicePropertiesCoreVK11(const VkPhysicalDeviceProtectedMemoryProperties& in_protected_memory_properties, + const VkPhysicalDeviceSubgroupProperties& in_subgroup_properties) + :protected_memory_properties(in_protected_memory_properties), + subgroup_properties (in_subgroup_properties) +{ + /* Stub */ +} + +bool Anvil::PhysicalDevicePropertiesCoreVK10::operator==(const PhysicalDevicePropertiesCoreVK10& in_props) const +{ + bool result = false; + + if (in_props.api_version == api_version && + in_props.device_id == device_id && + in_props.device_type == device_type && + in_props.driver_version == driver_version && + in_props.limits == limits && + in_props.sparse_properties == sparse_properties && + in_props.vendor_id == vendor_id) + { + if (memcmp(device_name, + in_props.device_name, + sizeof(device_name) ) == 0 && + memcmp(pipeline_cache_uuid, + in_props.pipeline_cache_uuid, + sizeof(pipeline_cache_uuid) ) == 0) + { + result = true; + } + } + + return result; +} + +bool Anvil::PhysicalDevicePropertiesCoreVK11::operator==(const PhysicalDevicePropertiesCoreVK11& in_props) const +{ + bool result = false; + + if (protected_memory_properties == in_props.protected_memory_properties && + subgroup_properties == in_props.subgroup_properties) + { + result = true; + } + + return result; +} + +Anvil::PhysicalDeviceProtectedMemoryFeatures::PhysicalDeviceProtectedMemoryFeatures() + :protected_memory(false) +{ + /* Stub */ +} + +Anvil::PhysicalDeviceProtectedMemoryFeatures::PhysicalDeviceProtectedMemoryFeatures(const VkPhysicalDeviceProtectedMemoryFeatures& in_features) +{ + protected_memory = VK_BOOL32_TO_BOOL(in_features.protectedMemory); +} + +VkPhysicalDeviceProtectedMemoryFeatures Anvil::PhysicalDeviceProtectedMemoryFeatures::get_vk_physical_device_protected_memory_features() const +{ + VkPhysicalDeviceProtectedMemoryFeatures result; + + result.pNext = nullptr; + result.protectedMemory = BOOL_TO_VK_BOOL32(protected_memory); + result.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES; + + return result; +} + +bool Anvil::PhysicalDeviceProtectedMemoryFeatures::operator==(const PhysicalDeviceProtectedMemoryFeatures& in_features) const +{ + return (protected_memory == in_features.protected_memory); +} + +Anvil::PhysicalDeviceProtectedMemoryProperties::PhysicalDeviceProtectedMemoryProperties() + :protected_no_fault(false) +{ + /* Stub */ +} + +Anvil::PhysicalDeviceProtectedMemoryProperties::PhysicalDeviceProtectedMemoryProperties(const VkPhysicalDeviceProtectedMemoryProperties& in_props) + :protected_no_fault(VK_BOOL32_TO_BOOL(in_props.protectedNoFault) ) +{ + /* Stub */ +} + +bool Anvil::PhysicalDeviceProtectedMemoryProperties::operator==(const PhysicalDeviceProtectedMemoryProperties& in_props) const +{ + return (protected_no_fault == in_props.protected_no_fault); +} + +Anvil::PhysicalDeviceSparseProperties::PhysicalDeviceSparseProperties() + :residency_standard_2D_block_shape (false), + residency_standard_2D_multisample_block_shape(false), + residency_standard_3D_block_shape (false), + residency_aligned_mip_size (false), + residency_non_resident_strict (false) +{ + /* Stub */ +} + +Anvil::PhysicalDeviceSparseProperties::PhysicalDeviceSparseProperties(const VkPhysicalDeviceSparseProperties& in_sparse_props) + :residency_standard_2D_block_shape (VK_BOOL32_TO_BOOL(in_sparse_props.residencyStandard2DBlockShape) ), + residency_standard_2D_multisample_block_shape(VK_BOOL32_TO_BOOL(in_sparse_props.residencyStandard2DMultisampleBlockShape) ), + residency_standard_3D_block_shape (VK_BOOL32_TO_BOOL(in_sparse_props.residencyStandard3DBlockShape) ), + residency_aligned_mip_size (VK_BOOL32_TO_BOOL(in_sparse_props.residencyAlignedMipSize) ), + residency_non_resident_strict (VK_BOOL32_TO_BOOL(in_sparse_props.residencyNonResidentStrict) ) +{ + /* Stub */ +} + +bool Anvil::PhysicalDeviceSparseProperties::operator==(const Anvil::PhysicalDeviceSparseProperties& in_props) const +{ + return (residency_standard_2D_block_shape == in_props.residency_standard_2D_block_shape && + residency_standard_2D_multisample_block_shape == in_props.residency_standard_2D_multisample_block_shape && + residency_standard_3D_block_shape == in_props.residency_standard_3D_block_shape && + residency_aligned_mip_size == in_props.residency_aligned_mip_size && + residency_non_resident_strict == in_props.residency_non_resident_strict); +} + +Anvil::PhysicalDeviceSubgroupProperties::PhysicalDeviceSubgroupProperties() +{ + quad_operations_in_all_stages = false; + subgroup_size = 0; + supported_operations = Anvil::SubgroupFeatureFlagBits::NONE; + supported_stages = Anvil::ShaderStageFlagBits::NONE; +} + +Anvil::PhysicalDeviceSubgroupProperties::PhysicalDeviceSubgroupProperties(const VkPhysicalDeviceSubgroupProperties & in_props) +{ + quad_operations_in_all_stages = (in_props.quadOperationsInAllStages == VK_TRUE); + subgroup_size = in_props.subgroupSize; + supported_operations = static_cast(in_props.supportedOperations); + supported_stages = static_cast(in_props.supportedStages); +} + +bool Anvil::PhysicalDeviceSubgroupProperties::operator==(const PhysicalDeviceSubgroupProperties& in_props) const +{ + return (quad_operations_in_all_stages == in_props.quad_operations_in_all_stages && + subgroup_size == in_props.subgroup_size && + supported_operations == in_props.supported_operations && + supported_stages == in_props.supported_stages); +} + +Anvil::PushConstantRange::PushConstantRange(uint32_t in_offset, + uint32_t in_size, + Anvil::ShaderStageFlags in_stages) +{ + offset = in_offset; + size = in_size; + stages = in_stages; +} + +bool Anvil::PushConstantRange::operator==(const PushConstantRange& in) const +{ + return (offset == in.offset && + size == in.size && + stages == in.stages); +} + +Anvil::QueueFamilyInfo::QueueFamilyInfo(const VkQueueFamilyProperties& in_props) +{ + flags = static_cast(in_props.queueFlags); + min_image_transfer_granularity = in_props.minImageTransferGranularity; + n_queues = in_props.queueCount; + n_timestamp_bits = in_props.timestampValidBits; +} + +/** Please see header for specification */ +bool Anvil::operator==(const Anvil::QueueFamilyInfo& in1, + const Anvil::QueueFamilyInfo& in2) +{ + return (in1.flags == in2.flags && + in1.min_image_transfer_granularity.depth == in2.min_image_transfer_granularity.depth && + in1.min_image_transfer_granularity.height == in2.min_image_transfer_granularity.height && + in1.min_image_transfer_granularity.width == in2.min_image_transfer_granularity.width && + in1.n_queues == in2.n_queues && + in1.n_timestamp_bits == in2.n_timestamp_bits); +} + +/** Please see header for specification */ +Anvil::SemaphoreProperties::SemaphoreProperties() +{ + /* Stub */ +} + +/** Please see header for specification */ +Anvil::SemaphoreProperties::SemaphoreProperties(const ExternalSemaphoreProperties& in_external_semaphore_properties) + :external_semaphore_properties(in_external_semaphore_properties) +{ + /* Stub */ +} + +/** Please see header for specification */ +Anvil::ShaderModuleStageEntryPoint::ShaderModuleStageEntryPoint() +{ + shader_module_ptr = nullptr; + stage = ShaderStage::UNKNOWN; +} + +/** Please see header for specification */ +Anvil::ShaderModuleStageEntryPoint::ShaderModuleStageEntryPoint(const std::string& in_name, + ShaderModule* in_shader_module_ptr, + ShaderStage in_stage) +{ + anvil_assert(in_shader_module_ptr != nullptr); + + name = in_name; + shader_module_ptr = in_shader_module_ptr; + stage = in_stage; +} + +Anvil::ShaderModuleStageEntryPoint::ShaderModuleStageEntryPoint(const std::string& in_name, + ShaderModuleUniquePtr in_shader_module_ptr, + ShaderStage in_stage) +{ + anvil_assert(in_shader_module_ptr != nullptr); + + name = in_name; + shader_module_ptr = in_shader_module_ptr.get(); + stage = in_stage; + + shader_module_owned_ptr = std::move(in_shader_module_ptr); +} + +/** Please see header for specification */ +Anvil::ShaderModuleStageEntryPoint::ShaderModuleStageEntryPoint(const ShaderModuleStageEntryPoint& in) +{ + name = in.name; + shader_module_ptr = in.shader_module_ptr; + stage = in.stage; +} + +/** Please see header for specification */ +Anvil::ShaderModuleStageEntryPoint::~ShaderModuleStageEntryPoint() +{ + /* Stub */ +} + +/** Please see header for specification */ +Anvil::ShaderModuleStageEntryPoint& Anvil::ShaderModuleStageEntryPoint::operator=(const Anvil::ShaderModuleStageEntryPoint& in) +{ + name = in.name; + shader_module_ptr = in.shader_module_ptr; + stage = in.stage; + + return *this; +} + +Anvil::SparseImageAspectProperties::SparseImageAspectProperties() +{ + memset(this, + 0, + sizeof(*this) ); +} + +Anvil::SparseImageAspectProperties::SparseImageAspectProperties(const Anvil::SparseImageMemoryRequirements& in_req) +{ + aspect_mask = in_req.format_properties.aspect_mask; + flags = in_req.format_properties.flags; + granularity = in_req.format_properties.image_granularity; + mip_tail_first_lod = in_req.image_mip_tail_first_lod; + mip_tail_offset = in_req.image_mip_tail_offset; + mip_tail_size = in_req.image_mip_tail_size; + mip_tail_stride = in_req.image_mip_tail_stride; +} + +Anvil::SparseMemoryBindingUpdateInfo::SparseMemoryBindingUpdateInfo() +{ + m_dirty = true; + m_fence_ptr = nullptr; +} + +Anvil::SpecializationConstant::SpecializationConstant() +{ + constant_id = UINT32_MAX; + n_bytes = UINT32_MAX; + start_offset = UINT32_MAX; +} + +Anvil::SpecializationConstant::SpecializationConstant(uint32_t in_constant_id, + uint32_t in_n_bytes, + uint32_t in_start_offset) +{ + constant_id = in_constant_id; + n_bytes = in_n_bytes; + start_offset = in_start_offset; +} + +Anvil::PhysicalDeviceFeaturesCoreVK10::PhysicalDeviceFeaturesCoreVK10() + :alpha_to_one (false), + depth_bias_clamp (false), + depth_bounds (false), + depth_clamp (false), + draw_indirect_first_instance (false), + dual_src_blend (false), + fill_mode_non_solid (false), + fragment_stores_and_atomics (false), + full_draw_index_uint32 (false), + geometry_shader (false), + image_cube_array (false), + independent_blend (false), + inherited_queries (false), + large_points (false), + logic_op (false), + multi_draw_indirect (false), + multi_viewport (false), + occlusion_query_precise (false), + pipeline_statistics_query (false), + robust_buffer_access (false), + sampler_anisotropy (false), + sample_rate_shading (false), + shader_clip_distance (false), + shader_cull_distance (false), + shader_float64 (false), + shader_image_gather_extended (false), + shader_int16 (false), + shader_int64 (false), + shader_resource_residency (false), + shader_resource_min_lod (false), + shader_sampled_image_array_dynamic_indexing (false), + shader_storage_buffer_array_dynamic_indexing(false), + shader_storage_image_array_dynamic_indexing (false), + shader_storage_image_extended_formats (false), + shader_storage_image_multisample (false), + shader_storage_image_read_without_format (false), + shader_storage_image_write_without_format (false), + shader_tessellation_and_geometry_point_size (false), + shader_uniform_buffer_array_dynamic_indexing(false), + sparse_binding (false), + sparse_residency_2_samples (false), + sparse_residency_4_samples (false), + sparse_residency_8_samples (false), + sparse_residency_16_samples (false), + sparse_residency_aliased (false), + sparse_residency_buffer (false), + sparse_residency_image_2D (false), + sparse_residency_image_3D (false), + tessellation_shader (false), + texture_compression_ASTC_LDR (false), + texture_compression_BC (false), + texture_compression_ETC2 (false), + variable_multisample_rate (false), + vertex_pipeline_stores_and_atomics (false), + wide_lines (false) +{ + /* Stub */ +} + +Anvil::PhysicalDeviceFeaturesCoreVK10::PhysicalDeviceFeaturesCoreVK10(const VkPhysicalDeviceFeatures& in_physical_device_features) + :alpha_to_one (VK_BOOL32_TO_BOOL(in_physical_device_features.alphaToOne) ), + depth_bias_clamp (VK_BOOL32_TO_BOOL(in_physical_device_features.depthBiasClamp) ), + depth_bounds (VK_BOOL32_TO_BOOL(in_physical_device_features.depthBounds) ), + depth_clamp (VK_BOOL32_TO_BOOL(in_physical_device_features.depthClamp) ), + draw_indirect_first_instance (VK_BOOL32_TO_BOOL(in_physical_device_features.drawIndirectFirstInstance) ), + dual_src_blend (VK_BOOL32_TO_BOOL(in_physical_device_features.dualSrcBlend) ), + fill_mode_non_solid (VK_BOOL32_TO_BOOL(in_physical_device_features.fillModeNonSolid) ), + fragment_stores_and_atomics (VK_BOOL32_TO_BOOL(in_physical_device_features.fragmentStoresAndAtomics) ), + full_draw_index_uint32 (VK_BOOL32_TO_BOOL(in_physical_device_features.fullDrawIndexUint32) ), + geometry_shader (VK_BOOL32_TO_BOOL(in_physical_device_features.geometryShader) ), + image_cube_array (VK_BOOL32_TO_BOOL(in_physical_device_features.imageCubeArray) ), + independent_blend (VK_BOOL32_TO_BOOL(in_physical_device_features.independentBlend) ), + inherited_queries (VK_BOOL32_TO_BOOL(in_physical_device_features.inheritedQueries) ), + large_points (VK_BOOL32_TO_BOOL(in_physical_device_features.largePoints) ), + logic_op (VK_BOOL32_TO_BOOL(in_physical_device_features.logicOp) ), + multi_draw_indirect (VK_BOOL32_TO_BOOL(in_physical_device_features.multiDrawIndirect) ), + multi_viewport (VK_BOOL32_TO_BOOL(in_physical_device_features.multiViewport) ), + occlusion_query_precise (VK_BOOL32_TO_BOOL(in_physical_device_features.occlusionQueryPrecise) ), + pipeline_statistics_query (VK_BOOL32_TO_BOOL(in_physical_device_features.pipelineStatisticsQuery) ), + robust_buffer_access (VK_BOOL32_TO_BOOL(in_physical_device_features.robustBufferAccess) ), + sampler_anisotropy (VK_BOOL32_TO_BOOL(in_physical_device_features.samplerAnisotropy) ), + sample_rate_shading (VK_BOOL32_TO_BOOL(in_physical_device_features.sampleRateShading) ), + shader_clip_distance (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderClipDistance) ), + shader_cull_distance (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderCullDistance) ), + shader_float64 (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderFloat64) ), + shader_image_gather_extended (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderImageGatherExtended) ), + shader_int16 (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderInt16) ), + shader_int64 (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderInt64) ), + shader_resource_residency (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderResourceResidency) ), + shader_resource_min_lod (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderResourceMinLod) ), + shader_sampled_image_array_dynamic_indexing (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderSampledImageArrayDynamicIndexing) ), + shader_storage_buffer_array_dynamic_indexing(VK_BOOL32_TO_BOOL(in_physical_device_features.shaderStorageBufferArrayDynamicIndexing) ), + shader_storage_image_array_dynamic_indexing (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderStorageImageArrayDynamicIndexing) ), + shader_storage_image_extended_formats (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderStorageImageExtendedFormats) ), + shader_storage_image_multisample (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderStorageImageMultisample) ), + shader_storage_image_read_without_format (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderStorageImageReadWithoutFormat) ), + shader_storage_image_write_without_format (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderStorageImageWriteWithoutFormat) ), + shader_tessellation_and_geometry_point_size (VK_BOOL32_TO_BOOL(in_physical_device_features.shaderTessellationAndGeometryPointSize) ), + shader_uniform_buffer_array_dynamic_indexing(VK_BOOL32_TO_BOOL(in_physical_device_features.shaderUniformBufferArrayDynamicIndexing) ), + sparse_binding (VK_BOOL32_TO_BOOL(in_physical_device_features.sparseBinding) ), + sparse_residency_2_samples (VK_BOOL32_TO_BOOL(in_physical_device_features.sparseResidency2Samples) ), + sparse_residency_4_samples (VK_BOOL32_TO_BOOL(in_physical_device_features.sparseResidency4Samples) ), + sparse_residency_8_samples (VK_BOOL32_TO_BOOL(in_physical_device_features.sparseResidency8Samples) ), + sparse_residency_16_samples (VK_BOOL32_TO_BOOL(in_physical_device_features.sparseResidency16Samples) ), + sparse_residency_aliased (VK_BOOL32_TO_BOOL(in_physical_device_features.sparseResidencyAliased) ), + sparse_residency_buffer (VK_BOOL32_TO_BOOL(in_physical_device_features.sparseResidencyBuffer) ), + sparse_residency_image_2D (VK_BOOL32_TO_BOOL(in_physical_device_features.sparseResidencyImage2D) ), + sparse_residency_image_3D (VK_BOOL32_TO_BOOL(in_physical_device_features.sparseResidencyImage3D) ), + tessellation_shader (VK_BOOL32_TO_BOOL(in_physical_device_features.tessellationShader) ), + texture_compression_ASTC_LDR (VK_BOOL32_TO_BOOL(in_physical_device_features.textureCompressionASTC_LDR) ), + texture_compression_BC (VK_BOOL32_TO_BOOL(in_physical_device_features.textureCompressionBC) ), + texture_compression_ETC2 (VK_BOOL32_TO_BOOL(in_physical_device_features.textureCompressionETC2) ), + variable_multisample_rate (VK_BOOL32_TO_BOOL(in_physical_device_features.variableMultisampleRate) ), + vertex_pipeline_stores_and_atomics (VK_BOOL32_TO_BOOL(in_physical_device_features.vertexPipelineStoresAndAtomics) ), + wide_lines (VK_BOOL32_TO_BOOL(in_physical_device_features.wideLines) ) +{ + /* Stub */ +} + +VkPhysicalDeviceFeatures Anvil::PhysicalDeviceFeaturesCoreVK10::get_vk_physical_device_features() const +{ + VkPhysicalDeviceFeatures result; + + result.alphaToOne = BOOL_TO_VK_BOOL32(alpha_to_one); + result.depthBiasClamp = BOOL_TO_VK_BOOL32(depth_bias_clamp); + result.depthBounds = BOOL_TO_VK_BOOL32(depth_bounds); + result.depthClamp = BOOL_TO_VK_BOOL32(depth_clamp); + result.drawIndirectFirstInstance = BOOL_TO_VK_BOOL32(draw_indirect_first_instance); + result.dualSrcBlend = BOOL_TO_VK_BOOL32(dual_src_blend); + result.fillModeNonSolid = BOOL_TO_VK_BOOL32(fill_mode_non_solid); + result.fragmentStoresAndAtomics = BOOL_TO_VK_BOOL32(fragment_stores_and_atomics); + result.fullDrawIndexUint32 = BOOL_TO_VK_BOOL32(full_draw_index_uint32); + result.geometryShader = BOOL_TO_VK_BOOL32(geometry_shader); + result.imageCubeArray = BOOL_TO_VK_BOOL32(image_cube_array); + result.independentBlend = BOOL_TO_VK_BOOL32(independent_blend); + result.inheritedQueries = BOOL_TO_VK_BOOL32(inherited_queries); + result.largePoints = BOOL_TO_VK_BOOL32(large_points); + result.logicOp = BOOL_TO_VK_BOOL32(logic_op); + result.multiDrawIndirect = BOOL_TO_VK_BOOL32(multi_draw_indirect); + result.multiViewport = BOOL_TO_VK_BOOL32(multi_viewport); + result.occlusionQueryPrecise = BOOL_TO_VK_BOOL32(occlusion_query_precise); + result.pipelineStatisticsQuery = BOOL_TO_VK_BOOL32(pipeline_statistics_query); + result.robustBufferAccess = BOOL_TO_VK_BOOL32(robust_buffer_access); + result.samplerAnisotropy = BOOL_TO_VK_BOOL32(sampler_anisotropy); + result.sampleRateShading = BOOL_TO_VK_BOOL32(sample_rate_shading); + result.shaderClipDistance = BOOL_TO_VK_BOOL32(shader_clip_distance); + result.shaderCullDistance = BOOL_TO_VK_BOOL32(shader_cull_distance); + result.shaderFloat64 = BOOL_TO_VK_BOOL32(shader_float64); + result.shaderImageGatherExtended = BOOL_TO_VK_BOOL32(shader_image_gather_extended); + result.shaderInt16 = BOOL_TO_VK_BOOL32(shader_int16); + result.shaderInt64 = BOOL_TO_VK_BOOL32(shader_int64); + result.shaderResourceResidency = BOOL_TO_VK_BOOL32(shader_resource_residency); + result.shaderResourceMinLod = BOOL_TO_VK_BOOL32(shader_resource_min_lod); + result.shaderSampledImageArrayDynamicIndexing = BOOL_TO_VK_BOOL32(shader_sampled_image_array_dynamic_indexing); + result.shaderStorageBufferArrayDynamicIndexing = BOOL_TO_VK_BOOL32(shader_storage_buffer_array_dynamic_indexing); + result.shaderStorageImageArrayDynamicIndexing = BOOL_TO_VK_BOOL32(shader_storage_image_array_dynamic_indexing); + result.shaderStorageImageExtendedFormats = BOOL_TO_VK_BOOL32(shader_storage_image_extended_formats); + result.shaderStorageImageMultisample = BOOL_TO_VK_BOOL32(shader_storage_image_multisample); + result.shaderStorageImageReadWithoutFormat = BOOL_TO_VK_BOOL32(shader_storage_image_read_without_format); + result.shaderStorageImageWriteWithoutFormat = BOOL_TO_VK_BOOL32(shader_storage_image_write_without_format); + result.shaderTessellationAndGeometryPointSize = BOOL_TO_VK_BOOL32(shader_tessellation_and_geometry_point_size); + result.shaderUniformBufferArrayDynamicIndexing = BOOL_TO_VK_BOOL32(shader_uniform_buffer_array_dynamic_indexing); + result.sparseBinding = BOOL_TO_VK_BOOL32(sparse_binding); + result.sparseResidency2Samples = BOOL_TO_VK_BOOL32(sparse_residency_2_samples); + result.sparseResidency4Samples = BOOL_TO_VK_BOOL32(sparse_residency_4_samples); + result.sparseResidency8Samples = BOOL_TO_VK_BOOL32(sparse_residency_8_samples); + result.sparseResidency16Samples = BOOL_TO_VK_BOOL32(sparse_residency_16_samples); + result.sparseResidencyAliased = BOOL_TO_VK_BOOL32(sparse_residency_aliased); + result.sparseResidencyBuffer = BOOL_TO_VK_BOOL32(sparse_residency_buffer); + result.sparseResidencyImage2D = BOOL_TO_VK_BOOL32(sparse_residency_image_2D); + result.sparseResidencyImage3D = BOOL_TO_VK_BOOL32(sparse_residency_image_3D); + result.tessellationShader = BOOL_TO_VK_BOOL32(tessellation_shader); + result.textureCompressionASTC_LDR = BOOL_TO_VK_BOOL32(texture_compression_ASTC_LDR); + result.textureCompressionBC = BOOL_TO_VK_BOOL32(texture_compression_BC); + result.textureCompressionETC2 = BOOL_TO_VK_BOOL32(texture_compression_ETC2); + result.variableMultisampleRate = BOOL_TO_VK_BOOL32(variable_multisample_rate); + result.vertexPipelineStoresAndAtomics = BOOL_TO_VK_BOOL32(vertex_pipeline_stores_and_atomics); + result.wideLines = BOOL_TO_VK_BOOL32(wide_lines); + + return result; +} + +bool Anvil::PhysicalDeviceFeaturesCoreVK10::operator==(const Anvil::PhysicalDeviceFeaturesCoreVK10& in_data) const +{ + return (alpha_to_one == in_data.alpha_to_one) && + (depth_bias_clamp == in_data.depth_bias_clamp) && + (depth_bounds == in_data.depth_bounds) && + (depth_clamp == in_data.depth_clamp) && + (draw_indirect_first_instance == in_data.draw_indirect_first_instance) && + (dual_src_blend == in_data.dual_src_blend) && + (fill_mode_non_solid == in_data.fill_mode_non_solid) && + (fragment_stores_and_atomics == in_data.fragment_stores_and_atomics) && + (full_draw_index_uint32 == in_data.full_draw_index_uint32) && + (geometry_shader == in_data.geometry_shader) && + (image_cube_array == in_data.image_cube_array) && + (independent_blend == in_data.independent_blend) && + (inherited_queries == in_data.inherited_queries) && + (large_points == in_data.large_points) && + (logic_op == in_data.logic_op) && + (multi_draw_indirect == in_data.multi_draw_indirect) && + (multi_viewport == in_data.multi_viewport) && + (occlusion_query_precise == in_data.occlusion_query_precise) && + (pipeline_statistics_query == in_data.pipeline_statistics_query) && + (robust_buffer_access == in_data.robust_buffer_access) && + (sampler_anisotropy == in_data.sampler_anisotropy) && + (sample_rate_shading == in_data.sample_rate_shading) && + (shader_clip_distance == in_data.shader_clip_distance) && + (shader_cull_distance == in_data.shader_cull_distance) && + (shader_float64 == in_data.shader_float64) && + (shader_image_gather_extended == in_data.shader_image_gather_extended) && + (shader_int16 == in_data.shader_int16) && + (shader_int64 == in_data.shader_int64) && + (shader_resource_residency == in_data.shader_resource_residency) && + (shader_resource_min_lod == in_data.shader_resource_min_lod) && + (shader_sampled_image_array_dynamic_indexing == in_data.shader_sampled_image_array_dynamic_indexing) && + (shader_storage_buffer_array_dynamic_indexing == in_data.shader_storage_buffer_array_dynamic_indexing) && + (shader_storage_image_array_dynamic_indexing == in_data.shader_storage_image_array_dynamic_indexing) && + (shader_storage_image_extended_formats == in_data.shader_storage_image_extended_formats) && + (shader_storage_image_multisample == in_data.shader_storage_image_multisample) && + (shader_storage_image_read_without_format == in_data.shader_storage_image_read_without_format) && + (shader_storage_image_write_without_format == in_data.shader_storage_image_write_without_format) && + (shader_tessellation_and_geometry_point_size == in_data.shader_tessellation_and_geometry_point_size) && + (shader_uniform_buffer_array_dynamic_indexing == in_data.shader_uniform_buffer_array_dynamic_indexing) && + (sparse_binding == in_data.sparse_binding) && + (sparse_residency_2_samples == in_data.sparse_residency_2_samples) && + (sparse_residency_4_samples == in_data.sparse_residency_4_samples) && + (sparse_residency_8_samples == in_data.sparse_residency_8_samples) && + (sparse_residency_16_samples == in_data.sparse_residency_16_samples) && + (sparse_residency_aliased == in_data.sparse_residency_aliased) && + (sparse_residency_buffer == in_data.sparse_residency_buffer) && + (sparse_residency_image_2D == in_data.sparse_residency_image_2D) && + (sparse_residency_image_3D == in_data.sparse_residency_image_3D) && + (tessellation_shader == in_data.tessellation_shader) && + (texture_compression_ASTC_LDR == in_data.texture_compression_ASTC_LDR) && + (texture_compression_BC == in_data.texture_compression_BC) && + (texture_compression_ETC2 == in_data.texture_compression_ETC2) && + (variable_multisample_rate == in_data.variable_multisample_rate) && + (vertex_pipeline_stores_and_atomics == in_data.vertex_pipeline_stores_and_atomics) && + (wide_lines == in_data.wide_lines); +} + +bool Anvil::PhysicalDeviceFeaturesCoreVK11::operator==(const PhysicalDeviceFeaturesCoreVK11& in_data) const +{ + return (protected_memory_features == in_data.protected_memory_features); +} + +Anvil::PhysicalDeviceFeaturesCoreVK11::PhysicalDeviceFeaturesCoreVK11() +{ + /* Stub */ +} + +Anvil::PhysicalDeviceFeaturesCoreVK11::PhysicalDeviceFeaturesCoreVK11(const PhysicalDeviceProtectedMemoryFeatures& in_protected_memory_features) + :protected_memory_features(in_protected_memory_features) +{ + /* Stub */ +} + +Anvil::SubmitInfo::SubmitInfo(uint32_t in_n_command_buffers, + Anvil::CommandBufferBase* in_opt_single_cmd_buffer_ptr, + Anvil::CommandBufferBase* const* in_opt_cmd_buffer_ptrs_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_opt_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_opt_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_opt_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) + :command_buffers_mgpu_ptr (nullptr), + command_buffers_sgpu_ptr (in_opt_cmd_buffer_ptrs_ptr), +#if defined(_WIN32) + d3d12_fence_signal_semaphore_values_ptr (nullptr), + d3d12_fence_wait_semaphore_values_ptr (nullptr), +#endif + dst_stage_wait_masks (in_n_semaphores_to_wait_on), + fence_ptr (in_opt_fence_ptr), +#if defined(_WIN32) + keyed_mutex_n_acquire_keys (0), + keyed_mutex_acquire_d3d11_memory_block_ptrs_ptr(nullptr), + keyed_mutex_acquire_mutex_key_value_ptrs (nullptr), + keyed_mutex_acquire_timeout_ptrs (nullptr), + keyed_mutex_n_release_keys (0), + keyed_mutex_release_d3d11_memory_block_ptrs_ptr(nullptr), + keyed_mutex_release_mutex_key_value_ptrs (nullptr), +#endif + is_protected (false), + n_command_buffers (in_n_command_buffers), + n_signal_semaphores (in_n_semaphores_to_signal), + n_wait_semaphores (in_n_semaphores_to_wait_on), + signal_semaphores_mgpu_ptr (nullptr), + signal_semaphores_sgpu_ptr (in_opt_semaphore_to_signal_ptrs_ptr), + should_block (in_should_block), + timeout (UINT64_MAX), + type (SubmissionType::SGPU), + wait_semaphores_mgpu_ptr (nullptr), + wait_semaphores_sgpu_ptr (in_opt_semaphore_to_wait_on_ptrs_ptr) +{ + for (uint32_t n_wait_mask = 0; + n_wait_mask < in_n_semaphores_to_wait_on; + ++n_wait_mask) + { + dst_stage_wait_masks.at(n_wait_mask) = in_opt_dst_stage_masks_to_wait_on_ptrs[n_wait_mask].get_vk(); + } + + if (in_opt_single_cmd_buffer_ptr) + { + anvil_assert(in_n_command_buffers == 1); + + helper_cmd_buffer_raw_ptr = in_opt_single_cmd_buffer_ptr; + command_buffers_sgpu_ptr = &helper_cmd_buffer_raw_ptr; + } + else + { + anvil_assert((in_n_command_buffers == 0) || + (in_n_command_buffers != 0 && in_opt_cmd_buffer_ptrs_ptr != nullptr) ); + } + + anvil_assert((in_n_semaphores_to_signal == 0) || + (in_n_semaphores_to_signal != 0 && in_opt_semaphore_to_signal_ptrs_ptr != nullptr) ); + + anvil_assert((in_n_semaphores_to_wait_on == 0) || + (in_n_semaphores_to_wait_on != 0 && in_opt_semaphore_to_wait_on_ptrs_ptr != nullptr && in_opt_dst_stage_masks_to_wait_on_ptrs != nullptr) ); +} + +Anvil::SubmitInfo::SubmitInfo(uint32_t in_n_command_buffer_submissions, + const CommandBufferMGPUSubmission* in_opt_command_buffer_submissions_ptr, + uint32_t in_n_signal_semaphore_submissions, + const SemaphoreMGPUSubmission* in_opt_signal_semaphore_submissions_ptr, + uint32_t in_n_wait_semaphore_submissions, + const SemaphoreMGPUSubmission* in_opt_wait_semaphore_submissions_ptr, + const Anvil::PipelineStageFlags* in_opt_dst_stage_masks_to_wait_on_ptr, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) + :command_buffers_mgpu_ptr (in_opt_command_buffer_submissions_ptr), + command_buffers_sgpu_ptr (nullptr), +#if defined(_WIN32) + d3d12_fence_signal_semaphore_values_ptr (nullptr), + d3d12_fence_wait_semaphore_values_ptr (nullptr), +#endif + dst_stage_wait_masks (in_n_wait_semaphore_submissions), + fence_ptr (in_opt_fence_ptr), + is_protected (false), +#if defined(_WIN32) + keyed_mutex_n_acquire_keys (0), + keyed_mutex_acquire_d3d11_memory_block_ptrs_ptr(nullptr), + keyed_mutex_acquire_mutex_key_value_ptrs (nullptr), + keyed_mutex_acquire_timeout_ptrs (nullptr), + keyed_mutex_n_release_keys (0), + keyed_mutex_release_d3d11_memory_block_ptrs_ptr(nullptr), + keyed_mutex_release_mutex_key_value_ptrs (nullptr), +#endif + n_command_buffers (in_n_command_buffer_submissions), + n_signal_semaphores (in_n_signal_semaphore_submissions), + n_wait_semaphores (in_n_wait_semaphore_submissions), + signal_semaphores_mgpu_ptr (in_opt_signal_semaphore_submissions_ptr), + signal_semaphores_sgpu_ptr (nullptr), + should_block (in_should_block), + timeout (UINT64_MAX), + type (SubmissionType::MGPU), + wait_semaphores_mgpu_ptr (in_opt_wait_semaphore_submissions_ptr), + wait_semaphores_sgpu_ptr (nullptr) +{ + for (uint32_t n_wait_mask = 0; + n_wait_mask < in_n_wait_semaphore_submissions; + ++n_wait_mask) + { + dst_stage_wait_masks.at(n_wait_mask) = in_opt_dst_stage_masks_to_wait_on_ptr[n_wait_mask].get_vk(); + } + + anvil_assert((in_n_command_buffer_submissions == 0) || + (in_n_command_buffer_submissions != 0 && in_opt_command_buffer_submissions_ptr != nullptr) ); + + anvil_assert((in_n_signal_semaphore_submissions == 0) || + (in_n_signal_semaphore_submissions != 0 && in_opt_signal_semaphore_submissions_ptr != nullptr) ); + + anvil_assert((in_n_wait_semaphore_submissions == 0) || + (in_n_wait_semaphore_submissions != 0 && in_opt_wait_semaphore_submissions_ptr != nullptr && in_opt_dst_stage_masks_to_wait_on_ptr != nullptr) ); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create(Anvil::CommandBufferBase* in_opt_cmd_buffer_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_opt_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_opt_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_opt_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + return Anvil::SubmitInfo((in_opt_cmd_buffer_ptr != nullptr ? 1 : 0), + in_opt_cmd_buffer_ptr, + nullptr, /* in_opt_command_buffer_submissions_ptr */ + in_n_semaphores_to_signal, + in_opt_semaphore_to_signal_ptrs_ptr, + in_n_semaphores_to_wait_on, + in_opt_semaphore_to_wait_on_ptrs_ptr, + in_opt_dst_stage_masks_to_wait_on_ptrs, + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create(uint32_t in_n_cmd_buffers, + Anvil::CommandBufferBase* const* in_opt_cmd_buffer_ptrs_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_opt_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_opt_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_opt_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + return Anvil::SubmitInfo(in_n_cmd_buffers, + nullptr, /* in_opt_command_buffer_single_submission_ptr */ + in_opt_cmd_buffer_ptrs_ptr, + in_n_semaphores_to_signal, + in_opt_semaphore_to_signal_ptrs_ptr, + in_n_semaphores_to_wait_on, + in_opt_semaphore_to_wait_on_ptrs_ptr, + in_opt_dst_stage_masks_to_wait_on_ptrs, + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_execute(Anvil::CommandBufferBase* in_cmd_buffer_ptr, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_ptr != nullptr); + + return Anvil::SubmitInfo(1, /* in_n_cmd_buffers */ + in_cmd_buffer_ptr, + nullptr, /* in_opt_command_buffer_submissions_ptr */ + 0, /* in_n_semaphores_to_signal */ + nullptr, /* in_opt_semaphore_to_signal_ptrs_ptr */ + 0, /* in_n_semaphores_to_wait_on */ + nullptr, /* in_opt_semaphore_to_wait_on_ptrs_ptr */ + nullptr, /* in_opt_dst_stage_masks_to_wait_on_ptrs */ + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_execute(Anvil::CommandBufferBase* const* in_cmd_buffer_ptrs_ptr, + uint32_t in_n_cmd_buffers, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_ptrs_ptr != nullptr); + anvil_assert(in_n_cmd_buffers > 0); + + return Anvil::SubmitInfo(in_n_cmd_buffers, + nullptr, /* in_opt_command_buffer_single_submission_ptr */ + in_cmd_buffer_ptrs_ptr, + 0, /* in_n_semaphores_to_signal */ + nullptr, /* in_opt_semaphore_to_signal_ptrs_ptr */ + 0, /* in_n_semaphores_to_wait_on */ + nullptr, /* in_opt_semaphore_to_wait_on_ptrs_ptr */ + nullptr, /* in_opt_dst_stage_masks_to_wait_on_ptrs */ + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_execute(const CommandBufferMGPUSubmission* in_cmd_buffer_submissions_ptr, + uint32_t in_n_command_buffer_submissions, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_submissions_ptr != nullptr); + anvil_assert(in_n_command_buffer_submissions > 0); + + return Anvil::SubmitInfo(in_n_command_buffer_submissions, + in_cmd_buffer_submissions_ptr, + 0, /* in_n_semaphores_to_signal */ + nullptr, /* in_opt_semaphore_to_signal_ptrs_ptr */ + 0, /* in_n_semaphores_to_wait_on */ + nullptr, /* in_opt_semaphore_to_wait_on_ptrs_ptr */ + nullptr, /* in_opt_dst_stage_masks_to_wait_on_ptrs */ + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_execute_signal(Anvil::CommandBufferBase* in_cmd_buffer_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_ptr != nullptr); + anvil_assert(in_semaphore_to_signal_ptrs_ptr != nullptr); + + return Anvil::SubmitInfo(1, /* in_n_cmd_buffers */ + in_cmd_buffer_ptr, + nullptr, /* in_opt_command_buffer_submissions_ptr */ + in_n_semaphores_to_signal, + in_semaphore_to_signal_ptrs_ptr, + 0, /* in_n_semaphores_to_wait_on */ + nullptr, /* in_opt_semaphore_to_wait_on_ptrs_ptr */ + nullptr, /* in_opt_dst_stage_masks_to_wait_on_ptrs */ + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_execute_signal(Anvil::CommandBufferBase* const* in_cmd_buffer_ptrs_ptr, + uint32_t in_n_cmd_buffers, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_ptrs_ptr != nullptr); + anvil_assert(in_n_cmd_buffers > 0); + anvil_assert(in_semaphore_to_signal_ptrs_ptr != nullptr); + + return Anvil::SubmitInfo(in_n_cmd_buffers, + nullptr, /* in_opt_command_buffer_single_submission_ptr */ + in_cmd_buffer_ptrs_ptr, + in_n_semaphores_to_signal, + in_semaphore_to_signal_ptrs_ptr, + 0, /* in_n_semaphores_to_wait_on */ + nullptr, /* in_opt_semaphore_to_wait_on_ptrs_ptr */ + nullptr, /* in_opt_dst_stage_masks_to_wait_on_ptrs */ + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_execute_signal(const Anvil::CommandBufferMGPUSubmission* in_cmd_buffer_submissions_ptr, + uint32_t in_n_command_buffer_submissions, + uint32_t in_n_signal_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_signal_semaphore_submissions_ptr, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_submissions_ptr != nullptr); + anvil_assert(in_n_command_buffer_submissions > 0); + anvil_assert(in_signal_semaphore_submissions_ptr != nullptr); + + return Anvil::SubmitInfo(in_n_command_buffer_submissions, + in_cmd_buffer_submissions_ptr, + in_n_signal_semaphore_submissions, + in_signal_semaphore_submissions_ptr, + 0, /* in_n_semaphores_to_wait_on */ + nullptr, /* in_opt_semaphore_to_wait_on_ptrs_ptr */ + nullptr, /* in_opt_dst_stage_masks_to_wait_on_ptrs */ + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_signal(uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_n_semaphores_to_signal > 0); + anvil_assert(in_semaphore_to_signal_ptrs_ptr != nullptr); + + return Anvil::SubmitInfo(0, /* in_n_command_buffers */ + nullptr, /* in_opt_command_buffer_single_submission_ptr */ + nullptr, /* in_opt_command_buffer_submissions_ptr */ + in_n_semaphores_to_signal, + in_semaphore_to_signal_ptrs_ptr, + 0, /* in_n_semaphores_to_wait_on */ + nullptr, /* in_semaphore_to_wait_on_ptrs_ptr */ + nullptr, /* in_dst_stage_masks_to_wait_on_ptrs */ + true, /* in_should_block */ + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_signal(uint32_t in_n_signal_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_signal_semaphore_submissions_ptr, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_n_signal_semaphore_submissions > 0); + anvil_assert(in_signal_semaphore_submissions_ptr != nullptr); + + return Anvil::SubmitInfo(0, /* in_n_command_buffer_submissions, */ + nullptr, /* in_opt_command_buffer_submissions_ptr, */ + in_n_signal_semaphore_submissions, + in_signal_semaphore_submissions_ptr, + 0, /* in_n_wait_semaphore_submissions, */ + nullptr, /* in_opt_wait_semaphore_submissions_ptr, */ + nullptr, /* in_opt_dst_stage_masks_to_wait_on_ptr, */ + true, /* in_should_block, */ + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_signal_wait(uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_n_semaphores_to_signal > 0); + anvil_assert(in_semaphore_to_signal_ptrs_ptr != nullptr); + anvil_assert(in_n_semaphores_to_wait_on > 0); + anvil_assert(in_semaphore_to_wait_on_ptrs_ptr != nullptr); + + return Anvil::SubmitInfo(0, /* in_n_command_buffers */ + nullptr, /* in_opt_command_buffer_single_submission_ptr */ + nullptr, /* in_opt_command_buffer_submissions_ptr */ + in_n_semaphores_to_signal, + in_semaphore_to_signal_ptrs_ptr, + in_n_semaphores_to_wait_on, + in_semaphore_to_wait_on_ptrs_ptr, + in_dst_stage_masks_to_wait_on_ptrs, + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_signal_wait(uint32_t in_n_signal_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_signal_semaphore_submissions_ptr, + uint32_t in_n_wait_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_wait_semaphore_submissions_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_n_signal_semaphore_submissions > 0); + anvil_assert(in_signal_semaphore_submissions_ptr != nullptr); + anvil_assert(in_n_wait_semaphore_submissions > 0); + anvil_assert(in_wait_semaphore_submissions_ptr != nullptr); + + return Anvil::SubmitInfo(0, /* in_n_command_buffers */ + nullptr, /* in_opt_command_buffer_submissions_ptr */ + in_n_signal_semaphore_submissions, + in_signal_semaphore_submissions_ptr, + in_n_wait_semaphore_submissions, + in_wait_semaphore_submissions_ptr, + in_dst_stage_masks_to_wait_on_ptrs, + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_wait(uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_dst_stage_masks_to_wait_on_ptrs != nullptr); + anvil_assert(in_n_semaphores_to_wait_on > 0); + anvil_assert(in_semaphore_to_wait_on_ptrs_ptr != nullptr); + + return Anvil::SubmitInfo(0, /* in_n_command_buffers */ + nullptr, /* in_opt_command_buffer_single_submission_ptr */ + nullptr, /* in_opt_command_buffer_submissions_ptr */ + 0, /* in_n_semaphores_to_signal */ + nullptr, /* in_semaphore_to_signal_ptrs_ptr */ + in_n_semaphores_to_wait_on, + in_semaphore_to_wait_on_ptrs_ptr, + in_dst_stage_masks_to_wait_on_ptrs, + true, /* in_should_block */ + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_wait(uint32_t in_n_wait_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_wait_semaphore_submissions_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_dst_stage_masks_to_wait_on_ptrs != nullptr); + anvil_assert(in_n_wait_semaphore_submissions > 0); + anvil_assert(in_wait_semaphore_submissions_ptr != nullptr); + + return Anvil::SubmitInfo(0, /* in_n_command_buffer_submissions, */ + nullptr, /* in_opt_command_buffer_submissions_ptr, */ + 0, /* in_n_signal_semaphore_submissions, */ + nullptr, /* in_opt_signal_semaphore_submissions_ptr,*/ + in_n_wait_semaphore_submissions, + in_wait_semaphore_submissions_ptr, + in_dst_stage_masks_to_wait_on_ptrs, + true, /* in_should_block, */ + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_wait_execute(Anvil::CommandBufferBase* in_cmd_buffer_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_ptr != nullptr); + anvil_assert(in_dst_stage_masks_to_wait_on_ptrs != nullptr); + anvil_assert(in_semaphore_to_wait_on_ptrs_ptr != nullptr); + + return Anvil::SubmitInfo(1, /* in_n_command_buffers */ + in_cmd_buffer_ptr, + nullptr, /* in_opt_command_buffer_submissions_ptr */ + 0, /* in_n_semaphores_to_signal */ + nullptr, /* in_opt_semaphore_to_signal_ptrs_ptr */ + in_n_semaphores_to_wait_on, + in_semaphore_to_wait_on_ptrs_ptr, + in_dst_stage_masks_to_wait_on_ptrs, + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_wait_execute(Anvil::CommandBufferBase* const* in_cmd_buffer_ptrs_ptr, + uint32_t in_n_cmd_buffers, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_ptrs_ptr != nullptr); + anvil_assert(in_n_cmd_buffers > 0); + anvil_assert(in_dst_stage_masks_to_wait_on_ptrs != nullptr); + anvil_assert(in_semaphore_to_wait_on_ptrs_ptr != nullptr); + + return Anvil::SubmitInfo(in_n_cmd_buffers, + nullptr, /* in_opt_command_buffer_single_submission_ptr */ + in_cmd_buffer_ptrs_ptr, + 0, /* in_n_semaphores_to_signal */ + nullptr, /* in_opt_semaphore_to_signal_ptrs_ptr */ + in_n_semaphores_to_wait_on, + in_semaphore_to_wait_on_ptrs_ptr, + in_dst_stage_masks_to_wait_on_ptrs, + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_wait_execute(const Anvil::CommandBufferMGPUSubmission* in_cmd_buffer_submissions_ptr, + uint32_t in_n_command_buffer_submissions, + uint32_t in_n_wait_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_wait_semaphore_submissions_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_submissions_ptr != nullptr); + anvil_assert(in_n_command_buffer_submissions > 0); + anvil_assert(in_wait_semaphore_submissions_ptr != nullptr); + + return Anvil::SubmitInfo(in_n_command_buffer_submissions, + in_cmd_buffer_submissions_ptr, + 0, /* in_n_semaphores_to_signal */ + nullptr, /* in_opt_semaphore_to_signal_ptrs_ptr */ + in_n_wait_semaphore_submissions, + in_wait_semaphore_submissions_ptr, + in_dst_stage_masks_to_wait_on_ptrs, + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_wait_execute_signal(Anvil::CommandBufferBase* in_cmd_buffer_ptr, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_ptr != nullptr); + anvil_assert(in_semaphore_to_signal_ptrs_ptr != nullptr); + anvil_assert(in_semaphore_to_wait_on_ptrs_ptr != nullptr); + + return Anvil::SubmitInfo(1, /* in_n_command_buffers */ + in_cmd_buffer_ptr, + nullptr, /* in_opt_command_buffer_submissions_ptr */ + in_n_semaphores_to_signal, + in_semaphore_to_signal_ptrs_ptr, + in_n_semaphores_to_wait_on, + in_semaphore_to_wait_on_ptrs_ptr, + in_dst_stage_masks_to_wait_on_ptrs, + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_wait_execute_signal(Anvil::CommandBufferBase* const* in_cmd_buffer_ptrs_ptr, + uint32_t in_n_cmd_buffers, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_semaphore_to_signal_ptrs_ptr, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_semaphore_to_wait_on_ptrs_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_ptrs_ptr != nullptr); + anvil_assert(in_n_cmd_buffers > 0); + anvil_assert(in_semaphore_to_signal_ptrs_ptr != nullptr); + anvil_assert(in_semaphore_to_wait_on_ptrs_ptr != nullptr); + + return Anvil::SubmitInfo(in_n_cmd_buffers, + nullptr, /* in_opt_command_buffer_single_submission_ptr */ + in_cmd_buffer_ptrs_ptr, + in_n_semaphores_to_signal, + in_semaphore_to_signal_ptrs_ptr, + in_n_semaphores_to_wait_on, + in_semaphore_to_wait_on_ptrs_ptr, + in_dst_stage_masks_to_wait_on_ptrs, + in_should_block, + in_opt_fence_ptr); +} + +Anvil::SubmitInfo Anvil::SubmitInfo::create_wait_execute_signal(const Anvil::CommandBufferMGPUSubmission* in_cmd_buffer_submissions_ptr, + uint32_t in_n_command_buffer_submissions, + uint32_t in_n_signal_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_signal_semaphore_submissions_ptr, + uint32_t in_n_wait_semaphore_submissions, + const Anvil::SemaphoreMGPUSubmission* in_wait_semaphore_submissions_ptr, + const Anvil::PipelineStageFlags* in_dst_stage_masks_to_wait_on_ptrs, + bool in_should_block, + Anvil::Fence* in_opt_fence_ptr) +{ + anvil_assert(in_cmd_buffer_submissions_ptr != nullptr); + anvil_assert(in_n_command_buffer_submissions > 0); + anvil_assert(in_dst_stage_masks_to_wait_on_ptrs != nullptr); + anvil_assert(in_signal_semaphore_submissions_ptr != nullptr); + anvil_assert(in_wait_semaphore_submissions_ptr != nullptr); + + return Anvil::SubmitInfo(in_n_command_buffer_submissions, + in_cmd_buffer_submissions_ptr, + in_n_signal_semaphore_submissions, + in_signal_semaphore_submissions_ptr, + in_n_wait_semaphore_submissions, + in_wait_semaphore_submissions_ptr, + in_dst_stage_masks_to_wait_on_ptrs, + in_should_block, + in_opt_fence_ptr); +} + diff --git a/src/misc/types_utils.cpp b/src/misc/types_utils.cpp new file mode 100644 index 00000000..cc937839 --- /dev/null +++ b/src/misc/types_utils.cpp @@ -0,0 +1,1100 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#include "misc/types.h" +#include "wrappers/buffer.h" +#include "wrappers/device.h" + +/** Please see header for specification */ +void Anvil::Utils::get_version_chunks_for_api_version(const Anvil::APIVersion& in_api_version, + uint32_t* out_major_version_ptr, + uint32_t* out_minor_version_ptr) +{ + switch (in_api_version) + { + case Anvil::APIVersion::_1_0: + { + *out_major_version_ptr = 1; + *out_minor_version_ptr = 0; + + break; + } + + case Anvil::APIVersion::_1_1: + { + *out_major_version_ptr = 1; + *out_minor_version_ptr = 1; + + break; + } + + default: + { + /* in_api_version must NOT be Anvil::APIVersion::UNKNOWN! */ + anvil_assert_fail(); + } + } +} + +/** Please see header for specification */ +Anvil::MemoryFeatureFlags Anvil::Utils::get_memory_feature_flags_from_vk_property_flags(Anvil::MemoryPropertyFlags in_mem_type_flags, + Anvil::MemoryHeapFlags in_mem_heap_flags) +{ + Anvil::MemoryFeatureFlags result; + + if ((in_mem_type_flags & Anvil::MemoryPropertyFlagBits::DEVICE_LOCAL_BIT) != 0) + { + result |= Anvil::MemoryFeatureFlagBits::DEVICE_LOCAL_BIT; + } + + if ((in_mem_type_flags & Anvil::MemoryPropertyFlagBits::HOST_VISIBLE_BIT) != 0) + { + result |= Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT; + } + + if ((in_mem_type_flags & Anvil::MemoryPropertyFlagBits::HOST_COHERENT_BIT) != 0) + { + result |= Anvil::MemoryFeatureFlagBits::HOST_COHERENT_BIT; + } + + if ((in_mem_type_flags & Anvil::MemoryPropertyFlagBits::HOST_CACHED_BIT) != 0) + { + result |= Anvil::MemoryFeatureFlagBits::HOST_CACHED_BIT; + } + + if ((in_mem_type_flags & Anvil::MemoryPropertyFlagBits::LAZILY_ALLOCATED_BIT) != 0) + { + result |= Anvil::MemoryFeatureFlagBits::LAZILY_ALLOCATED_BIT; + } + + if ((in_mem_heap_flags & Anvil::MemoryHeapFlagBits::MULTI_INSTANCE_BIT_KHR) != 0) + { + result |= Anvil::MemoryFeatureFlagBits::MULTI_INSTANCE_BIT; + } + + if ((in_mem_type_flags & Anvil::MemoryPropertyFlagBits::PROTECTED_BIT) != 0) + { + result |= Anvil::MemoryFeatureFlagBits::PROTECTED_BIT; + } + + return result; +} + +Anvil::MTSafety Anvil::Utils::convert_boolean_to_mt_safety_enum(bool in_mt_safe) +{ + return (in_mt_safe) ? Anvil::MTSafety::ENABLED + : Anvil::MTSafety::DISABLED; +} + +bool Anvil::Utils::convert_mt_safety_enum_to_boolean(Anvil::MTSafety in_mt_safety, + const Anvil::BaseDevice* in_device_ptr) +{ + bool result = false; + + switch (in_mt_safety) + { + case Anvil::MTSafety::DISABLED: result = false; break; + case Anvil::MTSafety::ENABLED: result = true; break; + + case Anvil::MTSafety::INHERIT_FROM_PARENT_DEVICE: + { + anvil_assert(in_device_ptr != nullptr); + + result = in_device_ptr->is_mt_safe(); + break; + } + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/** Please see header for specification */ +void Anvil::Utils::convert_queue_family_bits_to_family_indices(const Anvil::BaseDevice* in_device_ptr, + Anvil::QueueFamilyFlags in_queue_families, + uint32_t* out_opt_queue_family_indices_ptr, + uint32_t* out_opt_n_queue_family_indices_ptr) +{ + uint32_t n_result_queue_family_indices(0); + + static const struct + { + Anvil::QueueFamilyFlagBits queue_family; + Anvil::QueueFamilyType queue_family_type; + } queue_family_data[] = + { + {Anvil::QueueFamilyFlagBits::COMPUTE_BIT, Anvil::QueueFamilyType::COMPUTE}, + {Anvil::QueueFamilyFlagBits::DMA_BIT, Anvil::QueueFamilyType::TRANSFER}, + {Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, Anvil::QueueFamilyType::UNIVERSAL}, + }; + + for (const auto& current_queue_fam_data : queue_family_data) + { + if ((in_queue_families & current_queue_fam_data.queue_family) != 0) + { + uint32_t n_queue_family_indices = 0; + const uint32_t* queue_family_indices_ptr = nullptr; + + in_device_ptr->get_queue_family_indices_for_queue_family_type(current_queue_fam_data.queue_family_type, + &n_queue_family_indices, + &queue_family_indices_ptr); + + if (out_opt_queue_family_indices_ptr != nullptr) + { + for (uint32_t n_queue_family_index = 0; + n_queue_family_index < n_queue_family_indices; + ++n_queue_family_index, ++n_result_queue_family_indices) + { + out_opt_queue_family_indices_ptr[n_result_queue_family_indices] = queue_family_indices_ptr[n_queue_family_index]; + } + } + else + { + n_result_queue_family_indices += n_queue_family_indices; + } + } + } + + if (out_opt_n_queue_family_indices_ptr != nullptr) + { + *out_opt_n_queue_family_indices_ptr = n_result_queue_family_indices; + } +} + +/** Please see header for specification */ +Anvil::AccessFlags Anvil::Utils::get_access_mask_from_image_layout(Anvil::ImageLayout in_layout, + Anvil::QueueFamilyType in_queue_family_type) +{ + Anvil::AccessFlags result; + + switch (in_layout) + { + case Anvil::ImageLayout::UNDEFINED: + { + break; + } + + case Anvil::ImageLayout::GENERAL: + { + result = Anvil::AccessFlagBits::INDIRECT_COMMAND_READ_BIT | + Anvil::AccessFlagBits::INDEX_READ_BIT | + Anvil::AccessFlagBits::VERTEX_ATTRIBUTE_READ_BIT | + Anvil::AccessFlagBits::UNIFORM_READ_BIT | + Anvil::AccessFlagBits::INPUT_ATTACHMENT_READ_BIT | + Anvil::AccessFlagBits::SHADER_READ_BIT | + Anvil::AccessFlagBits::SHADER_WRITE_BIT | + Anvil::AccessFlagBits::COLOR_ATTACHMENT_READ_BIT | + Anvil::AccessFlagBits::COLOR_ATTACHMENT_WRITE_BIT | + Anvil::AccessFlagBits::DEPTH_STENCIL_ATTACHMENT_READ_BIT | + Anvil::AccessFlagBits::DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | + Anvil::AccessFlagBits::TRANSFER_READ_BIT | + Anvil::AccessFlagBits::TRANSFER_WRITE_BIT | + Anvil::AccessFlagBits::HOST_READ_BIT | + Anvil::AccessFlagBits::HOST_WRITE_BIT | + Anvil::AccessFlagBits::MEMORY_READ_BIT | + Anvil::AccessFlagBits::MEMORY_WRITE_BIT; + + break; + } + + case Anvil::ImageLayout::COLOR_ATTACHMENT_OPTIMAL: + { + result = Anvil::AccessFlagBits::COLOR_ATTACHMENT_READ_BIT | + Anvil::AccessFlagBits::COLOR_ATTACHMENT_WRITE_BIT; + + break; + } + + case Anvil::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL: + { + result = Anvil::AccessFlagBits::DEPTH_STENCIL_ATTACHMENT_READ_BIT | + Anvil::AccessFlagBits::DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; + + break; + } + + case Anvil::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL: + { + result = Anvil::AccessFlagBits::DEPTH_STENCIL_ATTACHMENT_READ_BIT; + + break; + } + + case Anvil::ImageLayout::SHADER_READ_ONLY_OPTIMAL: + { + result = Anvil::AccessFlagBits::SHADER_READ_BIT; + + break; + } + + case Anvil::ImageLayout::TRANSFER_SRC_OPTIMAL: + { + result = Anvil::AccessFlagBits::TRANSFER_READ_BIT; + + break; + } + + case Anvil::ImageLayout::TRANSFER_DST_OPTIMAL: + { + result = Anvil::AccessFlagBits::TRANSFER_WRITE_BIT; + + break; + } + + case Anvil::ImageLayout::PREINITIALIZED: + { + result = Anvil::AccessFlagBits::SHADER_READ_BIT; + + break; + } + + case Anvil::ImageLayout::PRESENT_SRC_KHR: + { + result = Anvil::AccessFlagBits::MEMORY_READ_BIT; + + break; + } + + default: + { + /* Invalid Anvil::ImageLayout argument value */ + anvil_assert_fail(); + } + } + + switch (in_queue_family_type) + { + case Anvil::QueueFamilyType::COMPUTE: + { + result &= (Anvil::AccessFlagBits::INDIRECT_COMMAND_READ_BIT | + Anvil::AccessFlagBits::MEMORY_READ_BIT | + Anvil::AccessFlagBits::MEMORY_WRITE_BIT | + Anvil::AccessFlagBits::SHADER_READ_BIT | + Anvil::AccessFlagBits::SHADER_WRITE_BIT | + Anvil::AccessFlagBits::TRANSFER_READ_BIT | + Anvil::AccessFlagBits::TRANSFER_WRITE_BIT | + Anvil::AccessFlagBits::UNIFORM_READ_BIT); + + break; + } + + case Anvil::QueueFamilyType::TRANSFER: + { + result &= (Anvil::AccessFlagBits::MEMORY_READ_BIT | + Anvil::AccessFlagBits::MEMORY_WRITE_BIT | + Anvil::AccessFlagBits::TRANSFER_READ_BIT | + Anvil::AccessFlagBits::TRANSFER_WRITE_BIT); + + break; + } + + case Anvil::QueueFamilyType::UNIVERSAL: + { + result &= (Anvil::AccessFlagBits::COLOR_ATTACHMENT_READ_BIT | + Anvil::AccessFlagBits::COLOR_ATTACHMENT_WRITE_BIT | + Anvil::AccessFlagBits::DEPTH_STENCIL_ATTACHMENT_READ_BIT | + Anvil::AccessFlagBits::DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | + Anvil::AccessFlagBits::INDIRECT_COMMAND_READ_BIT | + Anvil::AccessFlagBits::INDEX_READ_BIT | + Anvil::AccessFlagBits::MEMORY_READ_BIT | + Anvil::AccessFlagBits::MEMORY_WRITE_BIT | + Anvil::AccessFlagBits::SHADER_READ_BIT | + Anvil::AccessFlagBits::SHADER_WRITE_BIT | + Anvil::AccessFlagBits::TRANSFER_READ_BIT | + Anvil::AccessFlagBits::TRANSFER_WRITE_BIT | + Anvil::AccessFlagBits::UNIFORM_READ_BIT | + Anvil::AccessFlagBits::VERTEX_ATTRIBUTE_READ_BIT); + + break; + } + + case Anvil::QueueFamilyType::UNDEFINED: + { + break; + } + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +Anvil::ObjectType Anvil::Utils::get_object_type_for_vk_debug_report_object_type(const VkDebugReportObjectTypeEXT& in_object_type) +{ + Anvil::ObjectType result = Anvil::ObjectType::UNKNOWN; + + switch (in_object_type) + { + case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: result = Anvil::ObjectType::BUFFER; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: result = Anvil::ObjectType::BUFFER_VIEW; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT: result = Anvil::ObjectType::COMMAND_BUFFER; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: result = Anvil::ObjectType::COMMAND_POOL; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT: result = Anvil::ObjectType::DEBUG_REPORT_CALLBACK; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT: result = Anvil::ObjectType::DESCRIPTOR_POOL; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT: result = Anvil::ObjectType::DESCRIPTOR_SET; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT: result = Anvil::ObjectType::DESCRIPTOR_SET_LAYOUT; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT: result = Anvil::ObjectType::DESCRIPTOR_UPDATE_TEMPLATE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT: result = Anvil::ObjectType::DEVICE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT: result = Anvil::ObjectType::ANVIL_MEMORY_BLOCK; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT: result = Anvil::ObjectType::EVENT; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT: result = Anvil::ObjectType::FENCE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT: result = Anvil::ObjectType::FRAMEBUFFER; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: result = Anvil::ObjectType::IMAGE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: result = Anvil::ObjectType::IMAGE_VIEW; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT: result = Anvil::ObjectType::INSTANCE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT: result = Anvil::ObjectType::PHYSICAL_DEVICE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT: result = Anvil::ObjectType::PIPELINE_CACHE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT: result = Anvil::ObjectType::PIPELINE_LAYOUT; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT: result = Anvil::ObjectType::QUEUE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT: result = Anvil::ObjectType::QUERY_POOL; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT: result = Anvil::ObjectType::RENDER_PASS; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT: result = Anvil::ObjectType::SAMPLER; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT: result = Anvil::ObjectType::SEMAPHORE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT: result = Anvil::ObjectType::SHADER_MODULE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT: result = Anvil::ObjectType::RENDERING_SURFACE; break; + case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT: result = Anvil::ObjectType::SWAPCHAIN; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +Anvil::ObjectType Anvil::Utils::get_object_type_for_vk_object_type(const VkObjectType& in_object_type) +{ + return static_cast(in_object_type); +} + +/* Please see header for specification */ +Anvil::QueueFamilyFlags Anvil::Utils::get_queue_family_flags_from_queue_family_type(Anvil::QueueFamilyType in_queue_family_type) +{ + Anvil::QueueFamilyFlags result; + + switch (in_queue_family_type) + { + case Anvil::QueueFamilyType::COMPUTE: result = Anvil::QueueFamilyFlagBits::COMPUTE_BIT; break; + case Anvil::QueueFamilyType::TRANSFER: result = Anvil::QueueFamilyFlagBits::DMA_BIT; break; + case Anvil::QueueFamilyType::UNIVERSAL: result = Anvil::QueueFamilyFlagBits::COMPUTE_BIT | Anvil::QueueFamilyFlagBits::GRAPHICS_BIT; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(Anvil::QueueFamilyType in_queue_family_type) +{ + static const char* result_strings[] = + { + "Compute", + "Transfer", + "Universal", + }; + static const uint32_t n_result_strings = sizeof(result_strings) / sizeof(result_strings[0]); + + static_assert(n_result_strings == static_cast(Anvil::QueueFamilyType::COUNT), ""); + + return result_strings[static_cast(in_queue_family_type)]; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkAttachmentLoadOp in_load_op) +{ + static const char* attachment_load_op_strings[] = + { + "VK_ATTACHMENT_LOAD_OP_LOAD", + "VK_ATTACHMENT_LOAD_OP_CLEAR", + "VK_ATTACHMENT_LOAD_OP_DONT_CARE", + }; + static const uint32_t n_attachment_load_op_strings = sizeof(attachment_load_op_strings) / sizeof(attachment_load_op_strings[0]); + + // static_assert(n_attachment_load_op_strings == VK_ATTACHMENT_LOAD_OP_RANGE_SIZE, ""); + // anvil_assert (in_load_op <= VK_ATTACHMENT_LOAD_OP_END_RANGE); + + return attachment_load_op_strings[in_load_op]; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkAttachmentStoreOp in_store_op) +{ + static const char* attachment_store_op_strings[] = + { + "VK_ATTACHMENT_STORE_OP_STORE", + "VK_ATTACHMENT_STORE_OP_DONT_CARE", + }; + static const uint32_t n_attachment_store_op_strings = sizeof(attachment_store_op_strings) / sizeof(attachment_store_op_strings[0]); + + // static_assert(n_attachment_store_op_strings == VK_ATTACHMENT_STORE_OP_RANGE_SIZE, ""); + // anvil_assert (in_store_op <= VK_ATTACHMENT_STORE_OP_END_RANGE); + + return attachment_store_op_strings[in_store_op]; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkBlendFactor in_blend_factor) +{ + const char* result = "?"; + + switch (in_blend_factor) + { + case VK_BLEND_FACTOR_ZERO: result = "VK_BLEND_FACTOR_ZERO"; break; + case VK_BLEND_FACTOR_ONE: result = "VK_BLEND_FACTOR_ONE"; break; + case VK_BLEND_FACTOR_SRC_COLOR: result = "VK_BLEND_FACTOR_SRC_COLOR"; break; + case VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR: result = "VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR"; break; + case VK_BLEND_FACTOR_DST_COLOR: result = "VK_BLEND_FACTOR_DST_COLOR"; break; + case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR: result = "VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR"; break; + case VK_BLEND_FACTOR_SRC_ALPHA: result = "VK_BLEND_FACTOR_SRC_ALPHA"; break; + case VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA: result = "VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA"; break; + case VK_BLEND_FACTOR_DST_ALPHA: result = "VK_BLEND_FACTOR_DST_ALPHA"; break; + case VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA: result = "VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA"; break; + case VK_BLEND_FACTOR_CONSTANT_COLOR: result = "VK_BLEND_FACTOR_CONSTANT_COLOR"; break; + case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR: result = "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR"; break; + case VK_BLEND_FACTOR_CONSTANT_ALPHA: result = "VK_BLEND_FACTOR_CONSTANT_ALPHA"; break; + case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA: result = "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA"; break; + case VK_BLEND_FACTOR_SRC_ALPHA_SATURATE: result = "VK_BLEND_FACTOR_SRC_ALPHA_SATURATE"; break; + case VK_BLEND_FACTOR_SRC1_COLOR: result = "VK_BLEND_FACTOR_SRC1_COLOR"; break; + case VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR: result = "VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR"; break; + case VK_BLEND_FACTOR_SRC1_ALPHA: result = "VK_BLEND_FACTOR_SRC1_ALPHA"; break; + case VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA: result = "VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkBlendOp in_blend_op) +{ + const char* result = "?"; + + switch (in_blend_op) + { + case VK_BLEND_OP_ADD: result = "VK_BLEND_OP_ADD"; break; + case VK_BLEND_OP_SUBTRACT: result = "VK_BLEND_OP_SUBTRACT"; break; + case VK_BLEND_OP_REVERSE_SUBTRACT: result = "VK_BLEND_OP_REVERSE_SUBTRACT"; break; + case VK_BLEND_OP_MIN: result = "VK_BLEND_OP_MIN"; break; + case VK_BLEND_OP_MAX: result = "VK_BLEND_OP_MAX"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkCompareOp in_compare_op) +{ + const char* result = "?"; + + switch (in_compare_op) + { + case VK_COMPARE_OP_NEVER: result = "VK_COMPARE_OP_NEVER"; break; + case VK_COMPARE_OP_LESS: result = "VK_COMPARE_OP_LESS"; break; + case VK_COMPARE_OP_EQUAL: result = "VK_COMPARE_OP_EQUAL"; break; + case VK_COMPARE_OP_LESS_OR_EQUAL: result = "VK_COMPARE_OP_LESS_OR_EQUAL"; break; + case VK_COMPARE_OP_GREATER: result = "VK_COMPARE_OP_GREATER"; break; + case VK_COMPARE_OP_NOT_EQUAL: result = "VK_COMPARE_OP_NOT_EQUAL"; break; + case VK_COMPARE_OP_GREATER_OR_EQUAL: result = "VK_COMPARE_OP_GREATER_OR_EQUAL"; break; + case VK_COMPARE_OP_ALWAYS: result = "VK_COMPARE_OP_ALWAYS"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkCullModeFlagBits in_cull_mode) +{ + const char* result = "?"; + + switch (in_cull_mode) + { + case VK_CULL_MODE_NONE: result = "VK_CULL_MODE_NONE"; break; + case VK_CULL_MODE_FRONT_BIT: result = "VK_CULL_MODE_FRONT_BIT"; break; + case VK_CULL_MODE_BACK_BIT: result = "VK_CULL_MODE_BACK_BIT"; break; + case VK_CULL_MODE_FRONT_AND_BACK: result = "VK_CULL_MODE_FRONT_AND_BACK"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkDescriptorType in_descriptor_type) +{ + const char* result = "?"; + + switch (in_descriptor_type) + { + case VK_DESCRIPTOR_TYPE_SAMPLER: result = "VK_DESCRIPTOR_TYPE_SAMPLER"; break; + case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: result = "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER"; break; + case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: result = "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE"; break; + case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: result = "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE"; break; + case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: result = "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER"; break; + case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: result = "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER"; break; + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: result = "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER"; break; + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: result = "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER"; break; + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: result = "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC"; break; + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: result = "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC"; break; + case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: result = "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkFrontFace in_front_face) +{ + const char* result = "?"; + + switch (in_front_face) + { + case VK_FRONT_FACE_COUNTER_CLOCKWISE: result = "VK_FRONT_FACE_COUNTER_CLOCKWISE"; break; + case VK_FRONT_FACE_CLOCKWISE: result = "VK_FRONT_FACE_CLOCKWISE"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkImageAspectFlagBits in_image_aspect_flag) +{ + const char* result = "?"; + + switch (in_image_aspect_flag) + { + case VK_IMAGE_ASPECT_COLOR_BIT: result = "VK_IMAGE_ASPECT_COLOR_BIT"; break; + case VK_IMAGE_ASPECT_DEPTH_BIT: result = "VK_IMAGE_ASPECT_DEPTH_BIT"; break; + case VK_IMAGE_ASPECT_STENCIL_BIT: result = "VK_IMAGE_ASPECT_STENCIL_BIT"; break; + case VK_IMAGE_ASPECT_METADATA_BIT: result = "VK_IMAGE_ASPECT_METADATA_BIT"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; + +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkImageLayout in_image_layout) +{ + const char* result = "?!"; + + /* Note: we can't use an array-based solution here because of PRESENT_SRC_KHR */ + switch (in_image_layout) + { + case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: result = "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL"; break; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR: result = "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR"; break; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR: result = "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR"; break; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: result = "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL"; break; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: result = "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL"; break; + case VK_IMAGE_LAYOUT_GENERAL: result = "VK_IMAGE_LAYOUT_GENERAL"; break; + case VK_IMAGE_LAYOUT_PREINITIALIZED: result = "VK_IMAGE_LAYOUT_PREINITIALIZED"; break; + case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: result = "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR"; break; + case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: result = "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL"; break; + case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: result = "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL"; break; + case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: result = "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL"; break; + case VK_IMAGE_LAYOUT_UNDEFINED: result = "VK_IMAGE_LAYOUT_UNDEFINED"; break; + + default: + { + anvil_assert_fail(); + + break; + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkImageTiling in_image_tiling) +{ + static const char* image_tilings[] = + { + "VK_IMAGE_TILING_OPTIMAL", + "VK_IMAGE_TILING_LINEAR" + }; + static const int32_t n_image_tilings = sizeof(image_tilings) / sizeof(image_tilings[0]); + + // static_assert(n_image_tilings == VK_IMAGE_TILING_RANGE_SIZE, ""); + anvil_assert (in_image_tiling < n_image_tilings); + + return image_tilings[in_image_tiling]; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkImageType in_image_type) +{ + static const char* image_types[] = + { + "VK_IMAGE_TYPE_1D", + "VK_IMAGE_TYPE_2D", + "VK_IMAGE_TYPE_3D" + }; + static const uint32_t n_image_types = sizeof(image_types) / sizeof(image_types[0]); + + // static_assert(n_image_types == VK_IMAGE_TYPE_RANGE_SIZE, ""); + // anvil_assert (in_image_type < VK_IMAGE_TYPE_RANGE_SIZE); + + return image_types[in_image_type]; +} + +/** Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkImageViewType in_image_view_type) +{ + static const char* image_view_types[] = + { + "VK_IMAGE_VIEW_TYPE_1D", + "VK_IMAGE_VIEW_TYPE_2D", + "VK_IMAGE_VIEW_TYPE_3D", + "VK_IMAGE_VIEW_TYPE_CUBE", + "VK_IMAGE_VIEW_TYPE_1D_ARRAY", + "VK_IMAGE_VIEW_TYPE_2D_ARRAY", + "VK_IMAGE_VIEW_TYPE_CUBE_ARRAY", + }; + static const uint32_t n_image_view_types = sizeof(image_view_types) / sizeof(image_view_types[0]); + + // static_assert(n_image_view_types == VK_IMAGE_VIEW_TYPE_RANGE_SIZE, ""); + // anvil_assert (in_image_view_type < VK_IMAGE_VIEW_TYPE_RANGE_SIZE); + + return image_view_types[in_image_view_type]; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkLogicOp in_logic_op) +{ + const char* result = "?"; + + switch (in_logic_op) + { + case VK_LOGIC_OP_CLEAR: result = "VK_LOGIC_OP_CLEAR"; break; + case VK_LOGIC_OP_AND: result = "VK_LOGIC_OP_AND"; break; + case VK_LOGIC_OP_AND_REVERSE: result = "VK_LOGIC_OP_AND_REVERSE"; break; + case VK_LOGIC_OP_COPY: result = "VK_LOGIC_OP_COPY"; break; + case VK_LOGIC_OP_AND_INVERTED: result = "VK_LOGIC_OP_AND_INVERTED"; break; + case VK_LOGIC_OP_NO_OP: result = "VK_LOGIC_OP_NO_OP"; break; + case VK_LOGIC_OP_XOR: result = "VK_LOGIC_OP_XOR"; break; + case VK_LOGIC_OP_OR: result = "VK_LOGIC_OP_OR"; break; + case VK_LOGIC_OP_NOR: result = "VK_LOGIC_OP_NOR"; break; + case VK_LOGIC_OP_EQUIVALENT: result = "VK_LOGIC_OP_EQUIVALENT"; break; + case VK_LOGIC_OP_INVERT: result = "VK_LOGIC_OP_INVERT"; break; + case VK_LOGIC_OP_OR_REVERSE: result = "VK_LOGIC_OP_OR_REVERSE"; break; + case VK_LOGIC_OP_COPY_INVERTED: result = "VK_LOGIC_OP_COPY_INVERTED"; break; + case VK_LOGIC_OP_OR_INVERTED: result = "VK_LOGIC_OP_OR_INVERTED"; break; + case VK_LOGIC_OP_NAND: result = "VK_LOGIC_OP_NAND"; break; + case VK_LOGIC_OP_SET: result = "VK_LOGIC_OP_SET"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkPolygonMode in_polygon_mode) +{ + const char* result = "?"; + + switch (in_polygon_mode) + { + case VK_POLYGON_MODE_FILL: result = "VK_POLYGON_MODE_FILL"; break; + case VK_POLYGON_MODE_LINE: result = "VK_POLYGON_MODE_LINE"; break; + case VK_POLYGON_MODE_POINT: result = "VK_POLYGON_MODE_POINT"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkPrimitiveTopology in_topology) +{ + const char* result = "?"; + + switch (in_topology) + { + case VK_PRIMITIVE_TOPOLOGY_POINT_LIST: result = "VK_PRIMITIVE_TOPOLOGY_POINT_LIST"; break; + case VK_PRIMITIVE_TOPOLOGY_LINE_LIST: result = "VK_PRIMITIVE_TOPOLOGY_LINE_LIST"; break; + case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP: result = "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP"; break; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST: result = "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST"; break; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP: result = "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP"; break; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN: result = "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN"; break; + case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY: result = "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY"; break; + case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY: result = "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY"; break; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY: result = "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY"; break; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY: result = "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY"; break; + case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST: result = "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkSampleCountFlagBits in_sample_count) +{ + const char* result = "?"; + + switch (in_sample_count) + { + case VK_SAMPLE_COUNT_1_BIT: result = "VK_SAMPLE_COUNT_1_BIT"; break; + case VK_SAMPLE_COUNT_2_BIT: result = "VK_SAMPLE_COUNT_2_BIT"; break; + case VK_SAMPLE_COUNT_4_BIT: result = "VK_SAMPLE_COUNT_4_BIT"; break; + case VK_SAMPLE_COUNT_8_BIT: result = "VK_SAMPLE_COUNT_8_BIT"; break; + case VK_SAMPLE_COUNT_16_BIT: result = "VK_SAMPLE_COUNT_16_BIT"; break; + case VK_SAMPLE_COUNT_32_BIT: result = "VK_SAMPLE_COUNT_32_BIT"; break; + case VK_SAMPLE_COUNT_64_BIT: result = "VK_SAMPLE_COUNT_64_BIT"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(Anvil::ShaderStage in_shader_stage) +{ + const char* result = "?"; + + switch (in_shader_stage) + { + case ShaderStage::COMPUTE: result = "SHADER_STAGE_COMPUTE"; break; + case ShaderStage::FRAGMENT: result = "SHADER_STAGE_FRAGMENT"; break; + case ShaderStage::GEOMETRY: result = "SHADER_STAGE_GEOMETRY"; break; + case ShaderStage::TESSELLATION_CONTROL: result = "SHADER_STAGE_TESSELLATION_CONTROL"; break; + case ShaderStage::TESSELLATION_EVALUATION: result = "SHADER_STAGE_TESSELLATION_EVALUATION"; break; + case ShaderStage::VERTEX: result = "SHADER_STAGE_VERTEX"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkShaderStageFlagBits in_shader_stage) +{ + const char* result = "?"; + + switch (in_shader_stage) + { + case VK_SHADER_STAGE_ALL_GRAPHICS: result = "VK_SHADER_STAGE_ALL_GRAPHICS"; break; + case VK_SHADER_STAGE_COMPUTE_BIT: result = "VK_SHADER_STAGE_COMPUTE_BIT"; break; + case VK_SHADER_STAGE_FRAGMENT_BIT: result = "VK_SHADER_STAGE_FRAGMENT_BIT"; break; + case VK_SHADER_STAGE_GEOMETRY_BIT: result = "VK_SHADER_STAGE_GEOMETRY_BIT"; break; + case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: result = "VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT"; break; + case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: result = "VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT"; break; + case VK_SHADER_STAGE_VERTEX_BIT: result = "VK_SHADER_STAGE_VERTEX_BIT"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkSharingMode in_sharing_mode) +{ + static const char* sharing_modes[] = + { + "VK_SHARING_MODE_EXCLUSIVE", + "VK_SHARING_MODE_CONCURRENT" + }; + static const int32_t n_sharing_modes = sizeof(sharing_modes) / sizeof(sharing_modes[0]); + + // static_assert(n_sharing_modes == VK_SHARING_MODE_RANGE_SIZE, ""); + anvil_assert (in_sharing_mode < n_sharing_modes); + + return sharing_modes[in_sharing_mode]; +} + +/* Please see header for specification */ +const char* Anvil::Utils::get_raw_string(VkStencilOp in_stencil_op) +{ + const char* result = "?"; + + switch (in_stencil_op) + { + case VK_STENCIL_OP_KEEP: result = "VK_STENCIL_OP_KEEP"; break; + case VK_STENCIL_OP_ZERO: result = "VK_STENCIL_OP_ZERO"; break; + case VK_STENCIL_OP_REPLACE: result = "VK_STENCIL_OP_REPLACE"; break; + case VK_STENCIL_OP_INCREMENT_AND_CLAMP: result = "VK_STENCIL_OP_INCREMENT_AND_CLAMP"; break; + case VK_STENCIL_OP_DECREMENT_AND_CLAMP: result = "VK_STENCIL_OP_DECREMENT_AND_CLAMP"; break; + case VK_STENCIL_OP_INVERT: result = "VK_STENCIL_OP_INVERT"; break; + case VK_STENCIL_OP_INCREMENT_AND_WRAP: result = "VK_STENCIL_OP_INCREMENT_AND_WRAP"; break; + case VK_STENCIL_OP_DECREMENT_AND_WRAP: result = "VK_STENCIL_OP_DECREMENT_AND_WRAP"; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +Anvil::ShaderStageFlagBits Anvil::Utils::get_shader_stage_flag_bits_from_shader_stage(Anvil::ShaderStage in_shader_stage) +{ + Anvil::ShaderStageFlagBits result = static_cast(0); + + switch (in_shader_stage) + { + case Anvil::ShaderStage::COMPUTE: result = Anvil::ShaderStageFlagBits::COMPUTE_BIT; break; + case Anvil::ShaderStage::FRAGMENT: result = Anvil::ShaderStageFlagBits::FRAGMENT_BIT; break; + case Anvil::ShaderStage::GEOMETRY: result = Anvil::ShaderStageFlagBits::GEOMETRY_BIT; break; + case Anvil::ShaderStage::TESSELLATION_CONTROL: result = Anvil::ShaderStageFlagBits::TESSELLATION_CONTROL_BIT; break; + case Anvil::ShaderStage::TESSELLATION_EVALUATION: result = Anvil::ShaderStageFlagBits::TESSELLATION_EVALUATION_BIT; break; + case Anvil::ShaderStage::VERTEX: result = Anvil::ShaderStageFlagBits::VERTEX_BIT; break; + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +/* Please see header for specification */ +VkDebugReportObjectTypeEXT Anvil::Utils::get_vk_debug_report_object_type_ext_from_object_type(const Anvil::ObjectType& in_object_type) +{ + VkDebugReportObjectTypeEXT result = VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT; + + switch (in_object_type) + { + case Anvil::ObjectType::BUFFER: result = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT; break; + case Anvil::ObjectType::BUFFER_VIEW: result = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT; break; + case Anvil::ObjectType::COMMAND_BUFFER: result = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT; break; + case Anvil::ObjectType::COMMAND_POOL: result = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT; break; + case Anvil::ObjectType::DESCRIPTOR_POOL: result = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT; break; + case Anvil::ObjectType::DESCRIPTOR_SET: result = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT; break; + case Anvil::ObjectType::DESCRIPTOR_SET_LAYOUT: result = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT; break; + case Anvil::ObjectType::DESCRIPTOR_UPDATE_TEMPLATE: result = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT; break; + case Anvil::ObjectType::DEVICE: result = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT; break; + case Anvil::ObjectType::EVENT: result = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT; break; + case Anvil::ObjectType::FENCE: result = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT; break; + case Anvil::ObjectType::FRAMEBUFFER: result = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT; break; + case Anvil::ObjectType::IMAGE: result = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT; break; + case Anvil::ObjectType::IMAGE_VIEW: result = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT; break; + case Anvil::ObjectType::INSTANCE: result = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT; break; + case Anvil::ObjectType::PHYSICAL_DEVICE: result = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT; break; + case Anvil::ObjectType::PIPELINE_CACHE: result = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT; break; + case Anvil::ObjectType::PIPELINE_LAYOUT: result = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT; break; + case Anvil::ObjectType::QUERY_POOL: result = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT; break; + case Anvil::ObjectType::QUEUE: result = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT; break; + case Anvil::ObjectType::RENDER_PASS: result = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT; break; + case Anvil::ObjectType::RENDERING_SURFACE: result = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT; break; + case Anvil::ObjectType::SAMPLER: result = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT; break; + case Anvil::ObjectType::SEMAPHORE: result = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT; break; + case Anvil::ObjectType::SHADER_MODULE: result = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT; break; + case Anvil::ObjectType::SWAPCHAIN: result = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT; break; + + default: + { + /* Either Anvil-specific object OR one that is not recognized by VK_EXT_debug_utils. The extension has + * been deprecated, so there's not much we can do at this point, other than return MAX_ENUM_EXT and hope + * for the best. + */ + } + } + + return result; +} + +/* Please see header for specification */ +VkObjectType Anvil::Utils::get_vk_object_type_for_object_type(const Anvil::ObjectType& in_object_type) +{ + VkObjectType result = VK_OBJECT_TYPE_MAX_ENUM; + + switch (in_object_type) + { + case Anvil::ObjectType::BUFFER: result = VK_OBJECT_TYPE_BUFFER; break; + case Anvil::ObjectType::BUFFER_VIEW: result = VK_OBJECT_TYPE_BUFFER_VIEW; break; + case Anvil::ObjectType::COMMAND_BUFFER: result = VK_OBJECT_TYPE_COMMAND_BUFFER; break; + case Anvil::ObjectType::COMMAND_POOL: result = VK_OBJECT_TYPE_COMMAND_POOL; break; + case Anvil::ObjectType::DESCRIPTOR_POOL: result = VK_OBJECT_TYPE_DESCRIPTOR_POOL; break; + case Anvil::ObjectType::DESCRIPTOR_SET: result = VK_OBJECT_TYPE_DESCRIPTOR_SET; break; + case Anvil::ObjectType::DESCRIPTOR_SET_LAYOUT: result = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT; break; + case Anvil::ObjectType::DESCRIPTOR_UPDATE_TEMPLATE: result = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE; break; + case Anvil::ObjectType::DEVICE: result = VK_OBJECT_TYPE_DEVICE; break; + case Anvil::ObjectType::EVENT: result = VK_OBJECT_TYPE_EVENT; break; + case Anvil::ObjectType::FENCE: result = VK_OBJECT_TYPE_FENCE; break; + case Anvil::ObjectType::FRAMEBUFFER: result = VK_OBJECT_TYPE_FRAMEBUFFER; break; + case Anvil::ObjectType::IMAGE: result = VK_OBJECT_TYPE_IMAGE; break; + case Anvil::ObjectType::IMAGE_VIEW: result = VK_OBJECT_TYPE_IMAGE_VIEW; break; + case Anvil::ObjectType::INSTANCE: result = VK_OBJECT_TYPE_INSTANCE; break; + case Anvil::ObjectType::PHYSICAL_DEVICE: result = VK_OBJECT_TYPE_PHYSICAL_DEVICE; break; + case Anvil::ObjectType::PIPELINE_CACHE: result = VK_OBJECT_TYPE_PIPELINE_CACHE; break; + case Anvil::ObjectType::PIPELINE_LAYOUT: result = VK_OBJECT_TYPE_PIPELINE_LAYOUT; break; + case Anvil::ObjectType::QUERY_POOL: result = VK_OBJECT_TYPE_QUERY_POOL; break; + case Anvil::ObjectType::QUEUE: result = VK_OBJECT_TYPE_QUEUE; break; + case Anvil::ObjectType::RENDER_PASS: result = VK_OBJECT_TYPE_RENDER_PASS; break; + case Anvil::ObjectType::RENDERING_SURFACE: result = VK_OBJECT_TYPE_SURFACE_KHR; break; + case Anvil::ObjectType::SAMPLER: result = VK_OBJECT_TYPE_SAMPLER; break; + case Anvil::ObjectType::SEMAPHORE: result = VK_OBJECT_TYPE_SEMAPHORE; break; + case Anvil::ObjectType::SHADER_MODULE: result = VK_OBJECT_TYPE_SHADER_MODULE; break; + case Anvil::ObjectType::SWAPCHAIN: result = VK_OBJECT_TYPE_SWAPCHAIN_KHR; break; + + default: + { + /* Either Anvil-specific object OR one that is not recognized by VK_EXT_debug_utils. The extension has + * been deprecated, so there's not much we can do at this point, other than return MAX_ENUM_EXT and hope + * for the best. + */ + } + } + + return result; +} + +/* Please see header for specification */ +void Anvil::Utils::get_vk_property_flags_from_memory_feature_flags(Anvil::MemoryFeatureFlags in_mem_feature_flags, + Anvil::MemoryPropertyFlags* out_mem_type_flags_ptr, + Anvil::MemoryHeapFlags* out_mem_heap_flags_ptr) +{ + Anvil::MemoryHeapFlags result_mem_heap_flags; + Anvil::MemoryPropertyFlags result_mem_type_flags; + + if ((in_mem_feature_flags & Anvil::MemoryFeatureFlagBits::DEVICE_LOCAL_BIT) != 0) + { + result_mem_type_flags |= Anvil::MemoryPropertyFlagBits::DEVICE_LOCAL_BIT; + } + + if ((in_mem_feature_flags & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) != 0) + { + result_mem_type_flags |= Anvil::MemoryPropertyFlagBits::HOST_VISIBLE_BIT; + } + + if ((in_mem_feature_flags & Anvil::MemoryFeatureFlagBits::HOST_COHERENT_BIT) != 0) + { + result_mem_type_flags |= Anvil::MemoryPropertyFlagBits::HOST_COHERENT_BIT; + } + + if ((in_mem_feature_flags & Anvil::MemoryFeatureFlagBits::HOST_CACHED_BIT) != 0) + { + result_mem_type_flags |= Anvil::MemoryPropertyFlagBits::HOST_CACHED_BIT; + } + + if ((in_mem_feature_flags & Anvil::MemoryFeatureFlagBits::LAZILY_ALLOCATED_BIT) != 0) + { + result_mem_type_flags |= Anvil::MemoryPropertyFlagBits::LAZILY_ALLOCATED_BIT; + } + + if ((in_mem_feature_flags & Anvil::MemoryFeatureFlagBits::MULTI_INSTANCE_BIT) != 0) + { + result_mem_heap_flags |= Anvil::MemoryHeapFlagBits::MULTI_INSTANCE_BIT_KHR; + } + + if ((in_mem_feature_flags & Anvil::MemoryFeatureFlagBits::PROTECTED_BIT) != 0) + { + result_mem_type_flags |= Anvil::MemoryPropertyFlagBits::PROTECTED_BIT; + } + + *out_mem_heap_flags_ptr = result_mem_heap_flags; + *out_mem_type_flags_ptr = result_mem_type_flags; +} + +#ifdef _WIN32 + bool Anvil::Utils::is_nt_handle(const Anvil::ExternalFenceHandleTypeFlagBits& in_type) + { + return (in_type == Anvil::ExternalFenceHandleTypeFlagBits::OPAQUE_WIN32_BIT); + } + + bool Anvil::Utils::is_nt_handle(const Anvil::ExternalMemoryHandleTypeFlagBits& in_type) + { + return (in_type == Anvil::ExternalMemoryHandleTypeFlagBits::D3D11_TEXTURE_BIT || + in_type == Anvil::ExternalMemoryHandleTypeFlagBits::D3D12_HEAP_BIT || + in_type == Anvil::ExternalMemoryHandleTypeFlagBits::D3D12_RESOURCE_BIT || + in_type == Anvil::ExternalMemoryHandleTypeFlagBits::OPAQUE_WIN32_BIT); + } + + bool Anvil::Utils::is_nt_handle(const Anvil::ExternalSemaphoreHandleTypeFlagBits& in_type) + { + return (in_type == Anvil::ExternalSemaphoreHandleTypeFlagBits::D3D12_FENCE_BIT || + in_type == Anvil::ExternalSemaphoreHandleTypeFlagBits::OPAQUE_WIN32_BIT); + } +#endif \ No newline at end of file diff --git a/src/misc/vulkan.cpp b/src/misc/vulkan.cpp new file mode 100644 index 00000000..71f3ffce --- /dev/null +++ b/src/misc/vulkan.cpp @@ -0,0 +1,366 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +#define ANVIL_VULKAN_CPP + +#include "misc/vulkan.h" +#include + +/* For Anvil builds which statically link with Vulkan DLL, the pointers are set to the system-provided func ptrs. + * For Anvil builds which link dynamically with Vulkan DLL, the pointers are initialized at Anvil::Instance creation time. + */ + +#if !defined(ANVIL_LINK_STATICALLY_WITH_VULKAN_LIB) + PFN_vkCreateInstance Anvil::Vulkan::vkCreateInstance = nullptr; + PFN_vkDestroyInstance Anvil::Vulkan::vkDestroyInstance = nullptr; + PFN_vkEnumeratePhysicalDevices Anvil::Vulkan::vkEnumeratePhysicalDevices = nullptr; + PFN_vkGetPhysicalDeviceFeatures Anvil::Vulkan::vkGetPhysicalDeviceFeatures = nullptr; + PFN_vkGetPhysicalDeviceFormatProperties Anvil::Vulkan::vkGetPhysicalDeviceFormatProperties = nullptr; + PFN_vkGetPhysicalDeviceImageFormatProperties Anvil::Vulkan::vkGetPhysicalDeviceImageFormatProperties = nullptr; + PFN_vkGetPhysicalDeviceProperties Anvil::Vulkan::vkGetPhysicalDeviceProperties = nullptr; + PFN_vkGetPhysicalDeviceQueueFamilyProperties Anvil::Vulkan::vkGetPhysicalDeviceQueueFamilyProperties = nullptr; + PFN_vkGetPhysicalDeviceMemoryProperties Anvil::Vulkan::vkGetPhysicalDeviceMemoryProperties = nullptr; + PFN_vkGetInstanceProcAddr Anvil::Vulkan::vkGetInstanceProcAddr = nullptr; + PFN_vkGetDeviceProcAddr Anvil::Vulkan::vkGetDeviceProcAddr = nullptr; + PFN_vkCreateDevice Anvil::Vulkan::vkCreateDevice = nullptr; + PFN_vkDestroyDevice Anvil::Vulkan::vkDestroyDevice = nullptr; + PFN_vkEnumerateInstanceExtensionProperties Anvil::Vulkan::vkEnumerateInstanceExtensionProperties = nullptr; + PFN_vkEnumerateDeviceExtensionProperties Anvil::Vulkan::vkEnumerateDeviceExtensionProperties = nullptr; + PFN_vkEnumerateInstanceLayerProperties Anvil::Vulkan::vkEnumerateInstanceLayerProperties = nullptr; + PFN_vkEnumerateDeviceLayerProperties Anvil::Vulkan::vkEnumerateDeviceLayerProperties = nullptr; + PFN_vkGetDeviceQueue Anvil::Vulkan::vkGetDeviceQueue = nullptr; + PFN_vkQueueSubmit Anvil::Vulkan::vkQueueSubmit = nullptr; + PFN_vkQueueWaitIdle Anvil::Vulkan::vkQueueWaitIdle = nullptr; + PFN_vkDeviceWaitIdle Anvil::Vulkan::vkDeviceWaitIdle = nullptr; + PFN_vkAllocateMemory Anvil::Vulkan::vkAllocateMemory = nullptr; + PFN_vkFreeMemory Anvil::Vulkan::vkFreeMemory = nullptr; + PFN_vkMapMemory Anvil::Vulkan::vkMapMemory = nullptr; + PFN_vkUnmapMemory Anvil::Vulkan::vkUnmapMemory = nullptr; + PFN_vkFlushMappedMemoryRanges Anvil::Vulkan::vkFlushMappedMemoryRanges = nullptr; + PFN_vkInvalidateMappedMemoryRanges Anvil::Vulkan::vkInvalidateMappedMemoryRanges = nullptr; + PFN_vkGetDeviceMemoryCommitment Anvil::Vulkan::vkGetDeviceMemoryCommitment = nullptr; + PFN_vkBindBufferMemory Anvil::Vulkan::vkBindBufferMemory = nullptr; + PFN_vkBindImageMemory Anvil::Vulkan::vkBindImageMemory = nullptr; + PFN_vkGetBufferMemoryRequirements Anvil::Vulkan::vkGetBufferMemoryRequirements = nullptr; + PFN_vkGetImageMemoryRequirements Anvil::Vulkan::vkGetImageMemoryRequirements = nullptr; + PFN_vkGetImageSparseMemoryRequirements Anvil::Vulkan::vkGetImageSparseMemoryRequirements = nullptr; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties = nullptr; + PFN_vkQueueBindSparse Anvil::Vulkan::vkQueueBindSparse = nullptr; + PFN_vkCreateFence Anvil::Vulkan::vkCreateFence = nullptr; + PFN_vkDestroyFence Anvil::Vulkan::vkDestroyFence = nullptr; + PFN_vkResetFences Anvil::Vulkan::vkResetFences = nullptr; + PFN_vkGetFenceStatus Anvil::Vulkan::vkGetFenceStatus = nullptr; + PFN_vkWaitForFences Anvil::Vulkan::vkWaitForFences = nullptr; + PFN_vkCreateSemaphore Anvil::Vulkan::vkCreateSemaphore = nullptr; + PFN_vkDestroySemaphore Anvil::Vulkan::vkDestroySemaphore = nullptr; + PFN_vkCreateEvent Anvil::Vulkan::vkCreateEvent = nullptr; + PFN_vkDestroyEvent Anvil::Vulkan::vkDestroyEvent = nullptr; + PFN_vkGetEventStatus Anvil::Vulkan::vkGetEventStatus = nullptr; + PFN_vkSetEvent Anvil::Vulkan::vkSetEvent = nullptr; + PFN_vkResetEvent Anvil::Vulkan::vkResetEvent = nullptr; + PFN_vkCreateQueryPool Anvil::Vulkan::vkCreateQueryPool = nullptr; + PFN_vkDestroyQueryPool Anvil::Vulkan::vkDestroyQueryPool = nullptr; + PFN_vkGetQueryPoolResults Anvil::Vulkan::vkGetQueryPoolResults = nullptr; + PFN_vkCreateBuffer Anvil::Vulkan::vkCreateBuffer = nullptr; + PFN_vkDestroyBuffer Anvil::Vulkan::vkDestroyBuffer = nullptr; + PFN_vkCreateBufferView Anvil::Vulkan::vkCreateBufferView = nullptr; + PFN_vkDestroyBufferView Anvil::Vulkan::vkDestroyBufferView = nullptr; + PFN_vkCreateImage Anvil::Vulkan::vkCreateImage = nullptr; + PFN_vkDestroyImage Anvil::Vulkan::vkDestroyImage = nullptr; + PFN_vkGetImageSubresourceLayout Anvil::Vulkan::vkGetImageSubresourceLayout = nullptr; + PFN_vkCreateImageView Anvil::Vulkan::vkCreateImageView = nullptr; + PFN_vkDestroyImageView Anvil::Vulkan::vkDestroyImageView = nullptr; + PFN_vkCreateShaderModule Anvil::Vulkan::vkCreateShaderModule = nullptr; + PFN_vkDestroyShaderModule Anvil::Vulkan::vkDestroyShaderModule = nullptr; + PFN_vkCreatePipelineCache Anvil::Vulkan::vkCreatePipelineCache = nullptr; + PFN_vkDestroyPipelineCache Anvil::Vulkan::vkDestroyPipelineCache = nullptr; + PFN_vkGetPipelineCacheData Anvil::Vulkan::vkGetPipelineCacheData = nullptr; + PFN_vkMergePipelineCaches Anvil::Vulkan::vkMergePipelineCaches = nullptr; + PFN_vkCreateGraphicsPipelines Anvil::Vulkan::vkCreateGraphicsPipelines = nullptr; + PFN_vkCreateComputePipelines Anvil::Vulkan::vkCreateComputePipelines = nullptr; + PFN_vkDestroyPipeline Anvil::Vulkan::vkDestroyPipeline = nullptr; + PFN_vkCreatePipelineLayout Anvil::Vulkan::vkCreatePipelineLayout = nullptr; + PFN_vkDestroyPipelineLayout Anvil::Vulkan::vkDestroyPipelineLayout = nullptr; + PFN_vkCreateSampler Anvil::Vulkan::vkCreateSampler = nullptr; + PFN_vkDestroySampler Anvil::Vulkan::vkDestroySampler = nullptr; + PFN_vkCreateDescriptorSetLayout Anvil::Vulkan::vkCreateDescriptorSetLayout = nullptr; + PFN_vkDestroyDescriptorSetLayout Anvil::Vulkan::vkDestroyDescriptorSetLayout = nullptr; + PFN_vkCreateDescriptorPool Anvil::Vulkan::vkCreateDescriptorPool = nullptr; + PFN_vkDestroyDescriptorPool Anvil::Vulkan::vkDestroyDescriptorPool = nullptr; + PFN_vkResetDescriptorPool Anvil::Vulkan::vkResetDescriptorPool = nullptr; + PFN_vkAllocateDescriptorSets Anvil::Vulkan::vkAllocateDescriptorSets = nullptr; + PFN_vkFreeDescriptorSets Anvil::Vulkan::vkFreeDescriptorSets = nullptr; + PFN_vkUpdateDescriptorSets Anvil::Vulkan::vkUpdateDescriptorSets = nullptr; + PFN_vkCreateFramebuffer Anvil::Vulkan::vkCreateFramebuffer = nullptr; + PFN_vkDestroyFramebuffer Anvil::Vulkan::vkDestroyFramebuffer = nullptr; + PFN_vkCreateRenderPass Anvil::Vulkan::vkCreateRenderPass = nullptr; + PFN_vkDestroyRenderPass Anvil::Vulkan::vkDestroyRenderPass = nullptr; + PFN_vkGetRenderAreaGranularity Anvil::Vulkan::vkGetRenderAreaGranularity = nullptr; + PFN_vkCreateCommandPool Anvil::Vulkan::vkCreateCommandPool = nullptr; + PFN_vkDestroyCommandPool Anvil::Vulkan::vkDestroyCommandPool = nullptr; + PFN_vkResetCommandPool Anvil::Vulkan::vkResetCommandPool = nullptr; + PFN_vkAllocateCommandBuffers Anvil::Vulkan::vkAllocateCommandBuffers = nullptr; + PFN_vkFreeCommandBuffers Anvil::Vulkan::vkFreeCommandBuffers = nullptr; + PFN_vkBeginCommandBuffer Anvil::Vulkan::vkBeginCommandBuffer = nullptr; + PFN_vkEndCommandBuffer Anvil::Vulkan::vkEndCommandBuffer = nullptr; + PFN_vkResetCommandBuffer Anvil::Vulkan::vkResetCommandBuffer = nullptr; + PFN_vkCmdBindPipeline Anvil::Vulkan::vkCmdBindPipeline = nullptr; + PFN_vkCmdSetViewport Anvil::Vulkan::vkCmdSetViewport = nullptr; + PFN_vkCmdSetScissor Anvil::Vulkan::vkCmdSetScissor = nullptr; + PFN_vkCmdSetLineWidth Anvil::Vulkan::vkCmdSetLineWidth = nullptr; + PFN_vkCmdSetDepthBias Anvil::Vulkan::vkCmdSetDepthBias = nullptr; + PFN_vkCmdSetBlendConstants Anvil::Vulkan::vkCmdSetBlendConstants = nullptr; + PFN_vkCmdSetDepthBounds Anvil::Vulkan::vkCmdSetDepthBounds = nullptr; + PFN_vkCmdSetStencilCompareMask Anvil::Vulkan::vkCmdSetStencilCompareMask = nullptr; + PFN_vkCmdSetStencilWriteMask Anvil::Vulkan::vkCmdSetStencilWriteMask = nullptr; + PFN_vkCmdSetStencilReference Anvil::Vulkan::vkCmdSetStencilReference = nullptr; + PFN_vkCmdBindDescriptorSets Anvil::Vulkan::vkCmdBindDescriptorSets = nullptr; + PFN_vkCmdBindIndexBuffer Anvil::Vulkan::vkCmdBindIndexBuffer = nullptr; + PFN_vkCmdBindVertexBuffers Anvil::Vulkan::vkCmdBindVertexBuffers = nullptr; + PFN_vkCmdDraw Anvil::Vulkan::vkCmdDraw = nullptr; + PFN_vkCmdDrawIndexed Anvil::Vulkan::vkCmdDrawIndexed = nullptr; + PFN_vkCmdDrawIndirect Anvil::Vulkan::vkCmdDrawIndirect = nullptr; + PFN_vkCmdDrawIndexedIndirect Anvil::Vulkan::vkCmdDrawIndexedIndirect = nullptr; + PFN_vkCmdDispatch Anvil::Vulkan::vkCmdDispatch = nullptr; + PFN_vkCmdDispatchIndirect Anvil::Vulkan::vkCmdDispatchIndirect = nullptr; + PFN_vkCmdCopyBuffer Anvil::Vulkan::vkCmdCopyBuffer = nullptr; + PFN_vkCmdCopyImage Anvil::Vulkan::vkCmdCopyImage = nullptr; + PFN_vkCmdBlitImage Anvil::Vulkan::vkCmdBlitImage = nullptr; + PFN_vkCmdCopyBufferToImage Anvil::Vulkan::vkCmdCopyBufferToImage = nullptr; + PFN_vkCmdCopyImageToBuffer Anvil::Vulkan::vkCmdCopyImageToBuffer = nullptr; + PFN_vkCmdUpdateBuffer Anvil::Vulkan::vkCmdUpdateBuffer = nullptr; + PFN_vkCmdFillBuffer Anvil::Vulkan::vkCmdFillBuffer = nullptr; + PFN_vkCmdClearColorImage Anvil::Vulkan::vkCmdClearColorImage = nullptr; + PFN_vkCmdClearDepthStencilImage Anvil::Vulkan::vkCmdClearDepthStencilImage = nullptr; + PFN_vkCmdClearAttachments Anvil::Vulkan::vkCmdClearAttachments = nullptr; + PFN_vkCmdResolveImage Anvil::Vulkan::vkCmdResolveImage = nullptr; + PFN_vkCmdSetEvent Anvil::Vulkan::vkCmdSetEvent = nullptr; + PFN_vkCmdResetEvent Anvil::Vulkan::vkCmdResetEvent = nullptr; + PFN_vkCmdWaitEvents Anvil::Vulkan::vkCmdWaitEvents = nullptr; + PFN_vkCmdPipelineBarrier Anvil::Vulkan::vkCmdPipelineBarrier = nullptr; + PFN_vkCmdBeginQuery Anvil::Vulkan::vkCmdBeginQuery = nullptr; + PFN_vkCmdEndQuery Anvil::Vulkan::vkCmdEndQuery = nullptr; + PFN_vkCmdResetQueryPool Anvil::Vulkan::vkCmdResetQueryPool = nullptr; + PFN_vkCmdWriteTimestamp Anvil::Vulkan::vkCmdWriteTimestamp = nullptr; + PFN_vkCmdCopyQueryPoolResults Anvil::Vulkan::vkCmdCopyQueryPoolResults = nullptr; + PFN_vkCmdPushConstants Anvil::Vulkan::vkCmdPushConstants = nullptr; + PFN_vkCmdBeginRenderPass Anvil::Vulkan::vkCmdBeginRenderPass = nullptr; + PFN_vkCmdNextSubpass Anvil::Vulkan::vkCmdNextSubpass = nullptr; + PFN_vkCmdEndRenderPass Anvil::Vulkan::vkCmdEndRenderPass = nullptr; + PFN_vkCmdExecuteCommands Anvil::Vulkan::vkCmdExecuteCommands = nullptr; + + PFN_vkBindBufferMemory2 Anvil::Vulkan::vkBindBufferMemory2 = nullptr; + PFN_vkBindImageMemory2 Anvil::Vulkan::vkBindImageMemory2 = nullptr; + PFN_vkCmdDispatchBase Anvil::Vulkan::vkCmdDispatchBase = nullptr; + PFN_vkCmdSetDeviceMask Anvil::Vulkan::vkCmdSetDeviceMask = nullptr; + PFN_vkCreateDescriptorUpdateTemplate Anvil::Vulkan::vkCreateDescriptorUpdateTemplate = nullptr; + PFN_vkCreateSamplerYcbcrConversion Anvil::Vulkan::vkCreateSamplerYcbcrConversion = nullptr; + PFN_vkDestroyDescriptorUpdateTemplate Anvil::Vulkan::vkDestroyDescriptorUpdateTemplate = nullptr; + PFN_vkDestroySamplerYcbcrConversion Anvil::Vulkan::vkDestroySamplerYcbcrConversion = nullptr; + PFN_vkEnumerateInstanceVersion Anvil::Vulkan::vkEnumerateInstanceVersion = nullptr; + PFN_vkEnumeratePhysicalDeviceGroups Anvil::Vulkan::vkEnumeratePhysicalDeviceGroups = nullptr; + PFN_vkGetBufferMemoryRequirements2 Anvil::Vulkan::vkGetBufferMemoryRequirements2 = nullptr; + PFN_vkGetDescriptorSetLayoutSupport Anvil::Vulkan::vkGetDescriptorSetLayoutSupport = nullptr; + PFN_vkGetDeviceGroupPeerMemoryFeatures Anvil::Vulkan::vkGetDeviceGroupPeerMemoryFeatures = nullptr; + PFN_vkGetDeviceQueue2 Anvil::Vulkan::vkGetDeviceQueue2 = nullptr; + PFN_vkGetImageMemoryRequirements2 Anvil::Vulkan::vkGetImageMemoryRequirements2 = nullptr; + PFN_vkGetImageSparseMemoryRequirements2 Anvil::Vulkan::vkGetImageSparseMemoryRequirements2 = nullptr; + PFN_vkGetPhysicalDeviceExternalBufferProperties Anvil::Vulkan::vkGetPhysicalDeviceExternalBufferProperties = nullptr; + PFN_vkGetPhysicalDeviceExternalFenceProperties Anvil::Vulkan::vkGetPhysicalDeviceExternalFenceProperties = nullptr; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties Anvil::Vulkan::vkGetPhysicalDeviceExternalSemaphoreProperties = nullptr; + PFN_vkGetPhysicalDeviceFeatures2 Anvil::Vulkan::vkGetPhysicalDeviceFeatures2 = nullptr; + PFN_vkGetPhysicalDeviceFormatProperties2 Anvil::Vulkan::vkGetPhysicalDeviceFormatProperties2 = nullptr; + PFN_vkGetPhysicalDeviceImageFormatProperties2 Anvil::Vulkan::vkGetPhysicalDeviceImageFormatProperties2 = nullptr; + PFN_vkGetPhysicalDeviceMemoryProperties2 Anvil::Vulkan::vkGetPhysicalDeviceMemoryProperties2 = nullptr; + PFN_vkGetPhysicalDeviceProperties2 Anvil::Vulkan::vkGetPhysicalDeviceProperties2 = nullptr; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 Anvil::Vulkan::vkGetPhysicalDeviceQueueFamilyProperties2 = nullptr; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties2 = nullptr; + PFN_vkTrimCommandPool Anvil::Vulkan::vkTrimCommandPool = nullptr; + PFN_vkUpdateDescriptorSetWithTemplate Anvil::Vulkan::vkUpdateDescriptorSetWithTemplate = nullptr; + +#else + PFN_vkCreateInstance Anvil::Vulkan::vkCreateInstance = ::vkCreateInstance; + PFN_vkDestroyInstance Anvil::Vulkan::vkDestroyInstance = ::vkDestroyInstance; + PFN_vkEnumeratePhysicalDevices Anvil::Vulkan::vkEnumeratePhysicalDevices = ::vkEnumeratePhysicalDevices; + PFN_vkGetPhysicalDeviceFeatures Anvil::Vulkan::vkGetPhysicalDeviceFeatures = ::vkGetPhysicalDeviceFeatures; + PFN_vkGetPhysicalDeviceFormatProperties Anvil::Vulkan::vkGetPhysicalDeviceFormatProperties = ::vkGetPhysicalDeviceFormatProperties; + PFN_vkGetPhysicalDeviceImageFormatProperties Anvil::Vulkan::vkGetPhysicalDeviceImageFormatProperties = ::vkGetPhysicalDeviceImageFormatProperties; + PFN_vkGetPhysicalDeviceProperties Anvil::Vulkan::vkGetPhysicalDeviceProperties = ::vkGetPhysicalDeviceProperties; + PFN_vkGetPhysicalDeviceQueueFamilyProperties Anvil::Vulkan::vkGetPhysicalDeviceQueueFamilyProperties = ::vkGetPhysicalDeviceQueueFamilyProperties; + PFN_vkGetPhysicalDeviceMemoryProperties Anvil::Vulkan::vkGetPhysicalDeviceMemoryProperties = ::vkGetPhysicalDeviceMemoryProperties; + PFN_vkGetInstanceProcAddr Anvil::Vulkan::vkGetInstanceProcAddr = ::vkGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr Anvil::Vulkan::vkGetDeviceProcAddr = ::vkGetDeviceProcAddr; + PFN_vkCreateDevice Anvil::Vulkan::vkCreateDevice = ::vkCreateDevice; + PFN_vkDestroyDevice Anvil::Vulkan::vkDestroyDevice = ::vkDestroyDevice; + PFN_vkEnumerateInstanceExtensionProperties Anvil::Vulkan::vkEnumerateInstanceExtensionProperties = ::vkEnumerateInstanceExtensionProperties; + PFN_vkEnumerateDeviceExtensionProperties Anvil::Vulkan::vkEnumerateDeviceExtensionProperties = ::vkEnumerateDeviceExtensionProperties; + PFN_vkEnumerateInstanceLayerProperties Anvil::Vulkan::vkEnumerateInstanceLayerProperties = ::vkEnumerateInstanceLayerProperties; + PFN_vkEnumerateDeviceLayerProperties Anvil::Vulkan::vkEnumerateDeviceLayerProperties = ::vkEnumerateDeviceLayerProperties; + PFN_vkGetDeviceQueue Anvil::Vulkan::vkGetDeviceQueue = ::vkGetDeviceQueue; + PFN_vkQueueSubmit Anvil::Vulkan::vkQueueSubmit = ::vkQueueSubmit; + PFN_vkQueueWaitIdle Anvil::Vulkan::vkQueueWaitIdle = ::vkQueueWaitIdle; + PFN_vkDeviceWaitIdle Anvil::Vulkan::vkDeviceWaitIdle = ::vkDeviceWaitIdle; + PFN_vkAllocateMemory Anvil::Vulkan::vkAllocateMemory = ::vkAllocateMemory; + PFN_vkFreeMemory Anvil::Vulkan::vkFreeMemory = ::vkFreeMemory; + PFN_vkMapMemory Anvil::Vulkan::vkMapMemory = ::vkMapMemory; + PFN_vkUnmapMemory Anvil::Vulkan::vkUnmapMemory = ::vkUnmapMemory; + PFN_vkFlushMappedMemoryRanges Anvil::Vulkan::vkFlushMappedMemoryRanges = ::vkFlushMappedMemoryRanges; + PFN_vkInvalidateMappedMemoryRanges Anvil::Vulkan::vkInvalidateMappedMemoryRanges = ::vkInvalidateMappedMemoryRanges; + PFN_vkGetDeviceMemoryCommitment Anvil::Vulkan::vkGetDeviceMemoryCommitment = ::vkGetDeviceMemoryCommitment; + PFN_vkBindBufferMemory Anvil::Vulkan::vkBindBufferMemory = ::vkBindBufferMemory; + PFN_vkBindImageMemory Anvil::Vulkan::vkBindImageMemory = ::vkBindImageMemory; + PFN_vkGetBufferMemoryRequirements Anvil::Vulkan::vkGetBufferMemoryRequirements = ::vkGetBufferMemoryRequirements; + PFN_vkGetImageMemoryRequirements Anvil::Vulkan::vkGetImageMemoryRequirements = ::vkGetImageMemoryRequirements; + PFN_vkGetImageSparseMemoryRequirements Anvil::Vulkan::vkGetImageSparseMemoryRequirements = ::vkGetImageSparseMemoryRequirements; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties = ::vkGetPhysicalDeviceSparseImageFormatProperties; + PFN_vkQueueBindSparse Anvil::Vulkan::vkQueueBindSparse = ::vkQueueBindSparse; + PFN_vkCreateFence Anvil::Vulkan::vkCreateFence = ::vkCreateFence; + PFN_vkDestroyFence Anvil::Vulkan::vkDestroyFence = ::vkDestroyFence; + PFN_vkResetFences Anvil::Vulkan::vkResetFences = ::vkResetFences; + PFN_vkGetFenceStatus Anvil::Vulkan::vkGetFenceStatus = ::vkGetFenceStatus; + PFN_vkWaitForFences Anvil::Vulkan::vkWaitForFences = ::vkWaitForFences; + PFN_vkCreateSemaphore Anvil::Vulkan::vkCreateSemaphore = ::vkCreateSemaphore; + PFN_vkDestroySemaphore Anvil::Vulkan::vkDestroySemaphore = ::vkDestroySemaphore; + PFN_vkCreateEvent Anvil::Vulkan::vkCreateEvent = ::vkCreateEvent; + PFN_vkDestroyEvent Anvil::Vulkan::vkDestroyEvent = ::vkDestroyEvent; + PFN_vkGetEventStatus Anvil::Vulkan::vkGetEventStatus = ::vkGetEventStatus; + PFN_vkSetEvent Anvil::Vulkan::vkSetEvent = ::vkSetEvent; + PFN_vkResetEvent Anvil::Vulkan::vkResetEvent = ::vkResetEvent; + PFN_vkCreateQueryPool Anvil::Vulkan::vkCreateQueryPool = ::vkCreateQueryPool; + PFN_vkDestroyQueryPool Anvil::Vulkan::vkDestroyQueryPool = ::vkDestroyQueryPool; + PFN_vkGetQueryPoolResults Anvil::Vulkan::vkGetQueryPoolResults = ::vkGetQueryPoolResults; + PFN_vkCreateBuffer Anvil::Vulkan::vkCreateBuffer = ::vkCreateBuffer; + PFN_vkDestroyBuffer Anvil::Vulkan::vkDestroyBuffer = ::vkDestroyBuffer; + PFN_vkCreateBufferView Anvil::Vulkan::vkCreateBufferView = ::vkCreateBufferView; + PFN_vkDestroyBufferView Anvil::Vulkan::vkDestroyBufferView = ::vkDestroyBufferView; + PFN_vkCreateImage Anvil::Vulkan::vkCreateImage = ::vkCreateImage; + PFN_vkDestroyImage Anvil::Vulkan::vkDestroyImage = ::vkDestroyImage; + PFN_vkGetImageSubresourceLayout Anvil::Vulkan::vkGetImageSubresourceLayout = ::vkGetImageSubresourceLayout; + PFN_vkCreateImageView Anvil::Vulkan::vkCreateImageView = ::vkCreateImageView; + PFN_vkDestroyImageView Anvil::Vulkan::vkDestroyImageView = ::vkDestroyImageView; + PFN_vkCreateShaderModule Anvil::Vulkan::vkCreateShaderModule = ::vkCreateShaderModule; + PFN_vkDestroyShaderModule Anvil::Vulkan::vkDestroyShaderModule = ::vkDestroyShaderModule; + PFN_vkCreatePipelineCache Anvil::Vulkan::vkCreatePipelineCache = ::vkCreatePipelineCache; + PFN_vkDestroyPipelineCache Anvil::Vulkan::vkDestroyPipelineCache = ::vkDestroyPipelineCache; + PFN_vkGetPipelineCacheData Anvil::Vulkan::vkGetPipelineCacheData = ::vkGetPipelineCacheData; + PFN_vkMergePipelineCaches Anvil::Vulkan::vkMergePipelineCaches = ::vkMergePipelineCaches; + PFN_vkCreateGraphicsPipelines Anvil::Vulkan::vkCreateGraphicsPipelines = ::vkCreateGraphicsPipelines; + PFN_vkCreateComputePipelines Anvil::Vulkan::vkCreateComputePipelines = ::vkCreateComputePipelines; + PFN_vkDestroyPipeline Anvil::Vulkan::vkDestroyPipeline = ::vkDestroyPipeline; + PFN_vkCreatePipelineLayout Anvil::Vulkan::vkCreatePipelineLayout = ::vkCreatePipelineLayout; + PFN_vkDestroyPipelineLayout Anvil::Vulkan::vkDestroyPipelineLayout = ::vkDestroyPipelineLayout; + PFN_vkCreateSampler Anvil::Vulkan::vkCreateSampler = ::vkCreateSampler; + PFN_vkDestroySampler Anvil::Vulkan::vkDestroySampler = ::vkDestroySampler; + PFN_vkCreateDescriptorSetLayout Anvil::Vulkan::vkCreateDescriptorSetLayout = ::vkCreateDescriptorSetLayout; + PFN_vkDestroyDescriptorSetLayout Anvil::Vulkan::vkDestroyDescriptorSetLayout = ::vkDestroyDescriptorSetLayout; + PFN_vkCreateDescriptorPool Anvil::Vulkan::vkCreateDescriptorPool = ::vkCreateDescriptorPool; + PFN_vkDestroyDescriptorPool Anvil::Vulkan::vkDestroyDescriptorPool = ::vkDestroyDescriptorPool; + PFN_vkResetDescriptorPool Anvil::Vulkan::vkResetDescriptorPool = ::vkResetDescriptorPool; + PFN_vkAllocateDescriptorSets Anvil::Vulkan::vkAllocateDescriptorSets = ::vkAllocateDescriptorSets; + PFN_vkFreeDescriptorSets Anvil::Vulkan::vkFreeDescriptorSets = ::vkFreeDescriptorSets; + PFN_vkUpdateDescriptorSets Anvil::Vulkan::vkUpdateDescriptorSets = ::vkUpdateDescriptorSets; + PFN_vkCreateFramebuffer Anvil::Vulkan::vkCreateFramebuffer = ::vkCreateFramebuffer; + PFN_vkDestroyFramebuffer Anvil::Vulkan::vkDestroyFramebuffer = ::vkDestroyFramebuffer; + PFN_vkCreateRenderPass Anvil::Vulkan::vkCreateRenderPass = ::vkCreateRenderPass; + PFN_vkDestroyRenderPass Anvil::Vulkan::vkDestroyRenderPass = ::vkDestroyRenderPass; + PFN_vkGetRenderAreaGranularity Anvil::Vulkan::vkGetRenderAreaGranularity = ::vkGetRenderAreaGranularity; + PFN_vkCreateCommandPool Anvil::Vulkan::vkCreateCommandPool = ::vkCreateCommandPool; + PFN_vkDestroyCommandPool Anvil::Vulkan::vkDestroyCommandPool = ::vkDestroyCommandPool; + PFN_vkResetCommandPool Anvil::Vulkan::vkResetCommandPool = ::vkResetCommandPool; + PFN_vkAllocateCommandBuffers Anvil::Vulkan::vkAllocateCommandBuffers = ::vkAllocateCommandBuffers; + PFN_vkFreeCommandBuffers Anvil::Vulkan::vkFreeCommandBuffers = ::vkFreeCommandBuffers; + PFN_vkBeginCommandBuffer Anvil::Vulkan::vkBeginCommandBuffer = ::vkBeginCommandBuffer; + PFN_vkEndCommandBuffer Anvil::Vulkan::vkEndCommandBuffer = ::vkEndCommandBuffer; + PFN_vkResetCommandBuffer Anvil::Vulkan::vkResetCommandBuffer = ::vkResetCommandBuffer; + PFN_vkCmdBindPipeline Anvil::Vulkan::vkCmdBindPipeline = ::vkCmdBindPipeline; + PFN_vkCmdSetViewport Anvil::Vulkan::vkCmdSetViewport = ::vkCmdSetViewport; + PFN_vkCmdSetScissor Anvil::Vulkan::vkCmdSetScissor = ::vkCmdSetScissor; + PFN_vkCmdSetLineWidth Anvil::Vulkan::vkCmdSetLineWidth = ::vkCmdSetLineWidth; + PFN_vkCmdSetDepthBias Anvil::Vulkan::vkCmdSetDepthBias = ::vkCmdSetDepthBias; + PFN_vkCmdSetBlendConstants Anvil::Vulkan::vkCmdSetBlendConstants = ::vkCmdSetBlendConstants; + PFN_vkCmdSetDepthBounds Anvil::Vulkan::vkCmdSetDepthBounds = ::vkCmdSetDepthBounds; + PFN_vkCmdSetStencilCompareMask Anvil::Vulkan::vkCmdSetStencilCompareMask = ::vkCmdSetStencilCompareMask; + PFN_vkCmdSetStencilWriteMask Anvil::Vulkan::vkCmdSetStencilWriteMask = ::vkCmdSetStencilWriteMask; + PFN_vkCmdSetStencilReference Anvil::Vulkan::vkCmdSetStencilReference = ::vkCmdSetStencilReference; + PFN_vkCmdBindDescriptorSets Anvil::Vulkan::vkCmdBindDescriptorSets = ::vkCmdBindDescriptorSets; + PFN_vkCmdBindIndexBuffer Anvil::Vulkan::vkCmdBindIndexBuffer = ::vkCmdBindIndexBuffer; + PFN_vkCmdBindVertexBuffers Anvil::Vulkan::vkCmdBindVertexBuffers = ::vkCmdBindVertexBuffers; + PFN_vkCmdDraw Anvil::Vulkan::vkCmdDraw = ::vkCmdDraw; + PFN_vkCmdDrawIndexed Anvil::Vulkan::vkCmdDrawIndexed = ::vkCmdDrawIndexed; + PFN_vkCmdDrawIndirect Anvil::Vulkan::vkCmdDrawIndirect = ::vkCmdDrawIndirect; + PFN_vkCmdDrawIndexedIndirect Anvil::Vulkan::vkCmdDrawIndexedIndirect = ::vkCmdDrawIndexedIndirect; + PFN_vkCmdDispatch Anvil::Vulkan::vkCmdDispatch = ::vkCmdDispatch; + PFN_vkCmdDispatchIndirect Anvil::Vulkan::vkCmdDispatchIndirect = ::vkCmdDispatchIndirect; + PFN_vkCmdCopyBuffer Anvil::Vulkan::vkCmdCopyBuffer = ::vkCmdCopyBuffer; + PFN_vkCmdCopyImage Anvil::Vulkan::vkCmdCopyImage = ::vkCmdCopyImage; + PFN_vkCmdBlitImage Anvil::Vulkan::vkCmdBlitImage = ::vkCmdBlitImage; + PFN_vkCmdCopyBufferToImage Anvil::Vulkan::vkCmdCopyBufferToImage = ::vkCmdCopyBufferToImage; + PFN_vkCmdCopyImageToBuffer Anvil::Vulkan::vkCmdCopyImageToBuffer = ::vkCmdCopyImageToBuffer; + PFN_vkCmdUpdateBuffer Anvil::Vulkan::vkCmdUpdateBuffer = ::vkCmdUpdateBuffer; + PFN_vkCmdFillBuffer Anvil::Vulkan::vkCmdFillBuffer = ::vkCmdFillBuffer; + PFN_vkCmdClearColorImage Anvil::Vulkan::vkCmdClearColorImage = ::vkCmdClearColorImage; + PFN_vkCmdClearDepthStencilImage Anvil::Vulkan::vkCmdClearDepthStencilImage = ::vkCmdClearDepthStencilImage; + PFN_vkCmdClearAttachments Anvil::Vulkan::vkCmdClearAttachments = ::vkCmdClearAttachments; + PFN_vkCmdResolveImage Anvil::Vulkan::vkCmdResolveImage = ::vkCmdResolveImage; + PFN_vkCmdSetEvent Anvil::Vulkan::vkCmdSetEvent = ::vkCmdSetEvent; + PFN_vkCmdResetEvent Anvil::Vulkan::vkCmdResetEvent = ::vkCmdResetEvent; + PFN_vkCmdWaitEvents Anvil::Vulkan::vkCmdWaitEvents = ::vkCmdWaitEvents; + PFN_vkCmdPipelineBarrier Anvil::Vulkan::vkCmdPipelineBarrier = ::vkCmdPipelineBarrier; + PFN_vkCmdBeginQuery Anvil::Vulkan::vkCmdBeginQuery = ::vkCmdBeginQuery; + PFN_vkCmdEndQuery Anvil::Vulkan::vkCmdEndQuery = ::vkCmdEndQuery; + PFN_vkCmdResetQueryPool Anvil::Vulkan::vkCmdResetQueryPool = ::vkCmdResetQueryPool; + PFN_vkCmdWriteTimestamp Anvil::Vulkan::vkCmdWriteTimestamp = ::vkCmdWriteTimestamp; + PFN_vkCmdCopyQueryPoolResults Anvil::Vulkan::vkCmdCopyQueryPoolResults = ::vkCmdCopyQueryPoolResults; + PFN_vkCmdPushConstants Anvil::Vulkan::vkCmdPushConstants = ::vkCmdPushConstants; + PFN_vkCmdBeginRenderPass Anvil::Vulkan::vkCmdBeginRenderPass = ::vkCmdBeginRenderPass; + PFN_vkCmdNextSubpass Anvil::Vulkan::vkCmdNextSubpass = ::vkCmdNextSubpass; + PFN_vkCmdEndRenderPass Anvil::Vulkan::vkCmdEndRenderPass = ::vkCmdEndRenderPass; + PFN_vkCmdExecuteCommands Anvil::Vulkan::vkCmdExecuteCommands = ::vkCmdExecuteCommands; + + PFN_vkBindBufferMemory2 Anvil::Vulkan::vkBindBufferMemory2 = nullptr; + PFN_vkBindImageMemory2 Anvil::Vulkan::vkBindImageMemory2 = nullptr; + PFN_vkCmdDispatchBase Anvil::Vulkan::vkCmdDispatchBase = nullptr; + PFN_vkCmdSetDeviceMask Anvil::Vulkan::vkCmdSetDeviceMask = nullptr; + PFN_vkCreateDescriptorUpdateTemplate Anvil::Vulkan::vkCreateDescriptorUpdateTemplate = nullptr; + PFN_vkCreateSamplerYcbcrConversion Anvil::Vulkan::vkCreateSamplerYcbcrConversion = nullptr; + PFN_vkDestroyDescriptorUpdateTemplate Anvil::Vulkan::vkDestroyDescriptorUpdateTemplate = nullptr; + PFN_vkDestroySamplerYcbcrConversion Anvil::Vulkan::vkDestroySamplerYcbcrConversion = nullptr; + PFN_vkEnumerateInstanceVersion Anvil::Vulkan::vkEnumerateInstanceVersion = nullptr; + PFN_vkEnumeratePhysicalDeviceGroups Anvil::Vulkan::vkEnumeratePhysicalDeviceGroups = nullptr; + PFN_vkGetBufferMemoryRequirements2 Anvil::Vulkan::vkGetBufferMemoryRequirements2 = nullptr; + PFN_vkGetDescriptorSetLayoutSupport Anvil::Vulkan::vkGetDescriptorSetLayoutSupport = nullptr; + PFN_vkGetDeviceGroupPeerMemoryFeatures Anvil::Vulkan::vkGetDeviceGroupPeerMemoryFeatures = nullptr; + PFN_vkGetDeviceQueue2 Anvil::Vulkan::vkGetDeviceQueue2 = nullptr; + PFN_vkGetImageMemoryRequirements2 Anvil::Vulkan::vkGetImageMemoryRequirements2 = nullptr; + PFN_vkGetImageSparseMemoryRequirements2 Anvil::Vulkan::vkGetImageSparseMemoryRequirements2 = nullptr; + PFN_vkGetPhysicalDeviceExternalBufferProperties Anvil::Vulkan::vkGetPhysicalDeviceExternalBufferProperties = nullptr; + PFN_vkGetPhysicalDeviceExternalFenceProperties Anvil::Vulkan::vkGetPhysicalDeviceExternalFenceProperties = nullptr; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties Anvil::Vulkan::vkGetPhysicalDeviceExternalSemaphoreProperties = nullptr; + PFN_vkGetPhysicalDeviceFeatures2 Anvil::Vulkan::vkGetPhysicalDeviceFeatures2 = nullptr; + PFN_vkGetPhysicalDeviceFormatProperties2 Anvil::Vulkan::vkGetPhysicalDeviceFormatProperties2 = nullptr; + PFN_vkGetPhysicalDeviceImageFormatProperties2 Anvil::Vulkan::vkGetPhysicalDeviceImageFormatProperties2 = nullptr; + PFN_vkGetPhysicalDeviceMemoryProperties2 Anvil::Vulkan::vkGetPhysicalDeviceMemoryProperties2 = nullptr; + PFN_vkGetPhysicalDeviceProperties2 Anvil::Vulkan::vkGetPhysicalDeviceProperties2 = nullptr; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 Anvil::Vulkan::vkGetPhysicalDeviceQueueFamilyProperties2 = nullptr; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties2 = nullptr; + PFN_vkTrimCommandPool Anvil::Vulkan::vkTrimCommandPool = nullptr; + PFN_vkUpdateDescriptorSetWithTemplate Anvil::Vulkan::vkUpdateDescriptorSetWithTemplate = nullptr; +#endif diff --git a/src/misc/window.cpp b/src/misc/window.cpp index 1ee05e88..98123052 100644 --- a/src/misc/window.cpp +++ b/src/misc/window.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/src/misc/window_factory.cpp b/src/misc/window_factory.cpp index c07d19df..cb8c642a 100644 --- a/src/misc/window_factory.cpp +++ b/src/misc/window_factory.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -22,14 +22,16 @@ #include "misc/window_factory.h" -std::shared_ptr Anvil::WindowFactory::create_window(WindowPlatform in_platform, - const std::string& in_title, - unsigned int in_width, - unsigned int in_height, - bool in_closable, - PresentCallbackFunction in_present_callback_func) +Anvil::WindowUniquePtr Anvil::WindowFactory::create_window(WindowPlatform in_platform, + const std::string& in_title, + unsigned int in_width, + unsigned int in_height, + bool in_closable, + PresentCallbackFunction in_present_callback_func, + bool in_visible) { - std::shared_ptr result_ptr; + WindowUniquePtr result_ptr(nullptr, + std::default_delete() ); switch (in_platform) { @@ -62,7 +64,8 @@ std::shared_ptr Anvil::WindowFactory::create_window(WindowPlatfor in_width, in_height, in_closable, - in_present_callback_func); + in_present_callback_func, + in_visible); break; } @@ -75,7 +78,8 @@ std::shared_ptr Anvil::WindowFactory::create_window(WindowPlatfor in_width, in_height, in_closable, - in_present_callback_func); + in_present_callback_func, + in_visible); break; } @@ -97,11 +101,12 @@ std::shared_ptr Anvil::WindowFactory::create_window(WindowPlatfor return result_ptr; } -std::shared_ptr Anvil::WindowFactory::create_window(WindowPlatform in_platform, - WindowHandle in_handle, - void* in_xcb_connection_ptr) +Anvil::WindowUniquePtr Anvil::WindowFactory::create_window(WindowPlatform in_platform, + WindowHandle in_handle, + void* in_xcb_connection_ptr) { - std::shared_ptr result_ptr; + WindowUniquePtr result_ptr(nullptr, + std::default_delete() ); /* NOTE: These arguments may not be used at all, depending on ANVIL_INCLUDE_*_WINDOW_SYSTEM_SUPPORT configuration */ ANVIL_REDUNDANT_ARGUMENT(in_handle); diff --git a/src/misc/window_generic.cpp b/src/misc/window_generic.cpp new file mode 100644 index 00000000..72951e16 --- /dev/null +++ b/src/misc/window_generic.cpp @@ -0,0 +1,103 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/window_generic.h" +#include + +/* See create() for documentation */ +Anvil::WindowGeneric::WindowGeneric(Type type, + Handle handle, + const std::string &in_title, + Display display, + Connection connection, + unsigned int in_width, + unsigned int in_height, + uint32_t fbWidth, + uint32_t fbHeight, + Anvil::PresentCallbackFunction in_present_callback_func) + :Window(in_title, + in_width, + in_height, + true, /* in_closable */ + in_present_callback_func) +{ + m_window_owned = false; + m_type = type; + m_display = display; + m_connection = connection; + m_handle = handle; + m_fbWidth = fbWidth; + m_fbHeight = fbHeight; +} + +/** Please see header for specification */ +Anvil::WindowUniquePtr Anvil::WindowGeneric::create(Type type, Handle handle, Display display, Connection connection, uint32_t width, uint32_t height, bool visible, uint32_t fbWidth, uint32_t fbHeight) +{ + WindowUniquePtr result_ptr (nullptr, + std::default_delete() ); + uint32_t window_size[2] = {0}; + + window_size[0] = width; + window_size[1] = height; + + /* Go ahead and create the window wrapper instance */ + result_ptr.reset( + new Anvil::WindowGeneric(type, + handle, + "Generic wrapper window instance", + display, + connection, + window_size[0], + window_size[1], + fbWidth, + fbHeight, + nullptr) /* present_callback_func_ptr */ + ); + + if (result_ptr) + { + if (!dynamic_cast(result_ptr.get() )->init(visible)) + { + result_ptr.reset(); + } + } + +end: + return result_ptr; +} + +void Anvil::WindowGeneric::close() {} + +/** Creates a new system window and prepares it for usage. */ +bool Anvil::WindowGeneric::init(const bool& in_visible) +{ + bool result = false; + const char* window_class_name = (m_closable) ? "Anvil window (closable)" + : "Anvil window (non-closable)"; + + result = true; + return result; +} + +void Anvil::WindowGeneric::run() {} + +void Anvil::WindowGeneric::set_title(const std::string& in_new_title) {} diff --git a/src/misc/window_win3264.cpp b/src/misc/window_win3264.cpp index f27ddac6..6c410cfb 100644 --- a/src/misc/window_win3264.cpp +++ b/src/misc/window_win3264.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -58,23 +58,25 @@ Anvil::WindowWin3264::WindowWin3264(HWND in_handle, } /** Please see header for specification */ -std::shared_ptr Anvil::WindowWin3264::create(const std::string& in_title, - unsigned int in_width, - unsigned int in_height, - bool in_closable, - Anvil::PresentCallbackFunction in_present_callback_func) +Anvil::WindowUniquePtr Anvil::WindowWin3264::create(const std::string& in_title, + unsigned int in_width, + unsigned int in_height, + bool in_closable, + Anvil::PresentCallbackFunction in_present_callback_func, + bool in_visible) { - std::shared_ptr result_ptr( + WindowUniquePtr result_ptr( new Anvil::WindowWin3264(in_title, in_width, in_height, in_closable, - in_present_callback_func) + in_present_callback_func), + std::default_delete() ); if (result_ptr) { - if (!result_ptr->init() ) + if (!dynamic_cast(result_ptr.get() )->init(in_visible) ) { result_ptr.reset(); } @@ -84,13 +86,12 @@ std::shared_ptr Anvil::WindowWin3264::create(const std::string& } /** Please see header for specification */ -std::shared_ptr Anvil::WindowWin3264::create(HWND in_window_handle) +Anvil::WindowUniquePtr Anvil::WindowWin3264::create(HWND in_window_handle) { - std::shared_ptr result_ptr; - RECT window_rect; - uint32_t window_size[2] = {0}; - std::vector window_title; - uint32_t window_title_length = 0; + WindowUniquePtr result_ptr (nullptr, + std::default_delete() ); + RECT window_rect; + uint32_t window_size[2] = {0}; /* The window has already been spawned by the user. Gather all the info we need in order to instantiate * the wrapper instance. @@ -113,21 +114,10 @@ std::shared_ptr Anvil::WindowWin3264::create(HWND in_window_handl window_size[0] = static_cast(window_rect.right - window_rect.left); window_size[1] = static_cast(window_rect.bottom - window_rect.top); - window_title_length = static_cast(::GetWindowTextLength(in_window_handle) ); - - if (window_title_length != 0) - { - window_title.resize(window_title_length); - - ::GetWindowText(in_window_handle, - static_cast(&window_title.at(0) ), - static_cast (window_title_length) ); - } - /* Go ahead and create the window wrapper instance */ result_ptr.reset( new Anvil::WindowWin3264(in_window_handle, - std::string(&window_title.at(0) ), + "HWND wrapper window instance", window_size[0], window_size[1], nullptr) /* present_callback_func_ptr */ @@ -135,7 +125,7 @@ std::shared_ptr Anvil::WindowWin3264::create(HWND in_window_handl if (result_ptr) { - if (!result_ptr->init() ) + if (!dynamic_cast(result_ptr.get() )->init(::IsWindowVisible(in_window_handle) == TRUE)) { result_ptr.reset(); } @@ -163,12 +153,11 @@ void Anvil::WindowWin3264::close() } /** Creates a new system window and prepares it for usage. */ -bool Anvil::WindowWin3264::init() +bool Anvil::WindowWin3264::init(const bool& in_visible) { - static volatile uint32_t n_windows_spawned = 0; - bool result = false; - const char* window_class_name = (m_closable) ? "Anvil window (closable)" - : "Anvil window (non-closable)"; + bool result = false; + const char* window_class_name = (m_closable) ? "Anvil window (closable)" + : "Anvil window (non-closable)"; if (m_window_owned) { @@ -226,10 +215,12 @@ bool Anvil::WindowWin3264::init() } /* NOTE: Anvil currently does not support automatic swapchain resizing so make sure the window is not scalable. */ + const auto visibility_flag = (in_visible) ? WS_VISIBLE : 0; + m_window = ::CreateWindowEx(0, /* dwExStyle */ window_class_name, m_title.c_str(), - (WS_OVERLAPPEDWINDOW | WS_VISIBLE | WS_SYSMENU) ^ WS_THICKFRAME, + (WS_OVERLAPPEDWINDOW | visibility_flag | WS_SYSMENU) ^ WS_THICKFRAME, 0, /* X */ 0, /* Y */ window_rect.right - window_rect.left, diff --git a/src/misc/window_xcb.cpp b/src/misc/window_xcb.cpp index 8784e764..3ff62468 100644 --- a/src/misc/window_xcb.cpp +++ b/src/misc/window_xcb.cpp @@ -40,13 +40,15 @@ typedef struct /** Please see header for specification */ -std::shared_ptr Anvil::WindowXcb::create(const std::string& in_title, - unsigned int in_width, - unsigned int in_height, - bool in_closable, - Anvil::PresentCallbackFunction in_present_callback_func) +Anvil::WindowUniquePtr Anvil::WindowXcb::create(const std::string& in_title, + unsigned int in_width, + unsigned int in_height, + bool in_closable, + Anvil::PresentCallbackFunction in_present_callback_func, + bool in_visible) { - std::shared_ptr result_ptr; + WindowUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::WindowXcb(in_title, @@ -58,7 +60,7 @@ std::shared_ptr Anvil::WindowXcb::create(const std::string& if (result_ptr) { - if (!result_ptr->init() ) + if (!dynamic_cast(result_ptr.get() )->init(in_visible) ) { result_ptr.reset(); } @@ -67,10 +69,11 @@ std::shared_ptr Anvil::WindowXcb::create(const std::string& return result_ptr; } -std::shared_ptr Anvil::WindowXcb::create(xcb_connection_t* in_connection_ptr, - WindowHandle in_window_handle) +Anvil::WindowUniquePtr Anvil::WindowXcb::create(xcb_connection_t* in_connection_ptr, + WindowHandle in_window_handle) { - std::shared_ptr result_ptr; + WindowUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::WindowXcb(in_connection_ptr, @@ -79,7 +82,7 @@ std::shared_ptr Anvil::WindowXcb::create(xcb_connection_t* in_con if (result_ptr) { - if (!result_ptr->init() ) + if (!dynamic_cast(result_ptr.get() )->init(true /* in_visible */) ) { result_ptr.reset(); } @@ -158,7 +161,7 @@ void Anvil::WindowXcb::close() } /** Creates a new system window and prepares it for usage. */ -bool Anvil::WindowXcb::init() +bool Anvil::WindowXcb::init(const bool& in_visible) { bool result = false; const XCBLoaderFuncs* xcb_procs_ptr = nullptr; @@ -280,9 +283,13 @@ bool Anvil::WindowXcb::init() free(atom_reply_ptr); - xcb_procs_ptr->pfn_xcbMapWindow(m_connection_ptr, - m_window); - xcb_procs_ptr->pfn_xcbFlush (m_connection_ptr); + if (in_visible) + { + xcb_procs_ptr->pfn_xcbMapWindow(m_connection_ptr, + m_window); + } + + xcb_procs_ptr->pfn_xcbFlush(m_connection_ptr); m_atom_wm_delete_window_ptr = atom_wm_delete_window_ptr; m_key_symbols = xcb_procs_ptr->pfn_xcbKeySymbolsAlloc(m_connection_ptr); @@ -367,7 +374,7 @@ void Anvil::WindowXcb::run() { case XCB_CLIENT_MESSAGE: { - if (reinterpret_cast(event_ptr)->data.data32[0] == m_atom_wm_delete_window_ptr->atom && + if ((reinterpret_cast(event_ptr)->data.data32[0] == m_atom_wm_delete_window_ptr->atom) && m_closable) { running = false; @@ -431,5 +438,7 @@ void Anvil::WindowXcb::run() } } + close(); + m_window_close_finished = true; } diff --git a/src/wrappers/buffer.cpp b/src/wrappers/buffer.cpp index b9780725..e2569b71 100644 --- a/src/wrappers/buffer.cpp +++ b/src/wrappers/buffer.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,8 +20,10 @@ // THE SOFTWARE. // +#include "misc/buffer_create_info.h" #include "misc/debug.h" #include "misc/object_tracker.h" +#include "misc/struct_chainer.h" #include "wrappers/buffer.h" #include "wrappers/command_buffer.h" #include "wrappers/command_pool.h" @@ -31,106 +33,36 @@ #include "wrappers/physical_device.h" #include "wrappers/queue.h" -/* Please see header for specification */ -Anvil::Buffer::Buffer(std::weak_ptr in_device_ptr, - VkDeviceSize in_size, - QueueFamilyBits in_queue_families, - VkSharingMode in_queue_sharing_mode, - VkBufferUsageFlags in_usage_flags, - Anvil::SparseResidencyScope in_residency_scope) +Anvil::Buffer::Buffer(Anvil::BufferCreateInfoUniquePtr in_create_info_ptr) :CallbacksSupportProvider (BUFFER_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT), - m_buffer (VK_NULL_HANDLE), - m_buffer_size (in_size), - m_create_flags (0), - m_device_ptr (in_device_ptr), - m_is_sparse (true), - m_parent_buffer_ptr(0), - m_queue_families (in_queue_families), - m_residency_scope (in_residency_scope), - m_sharing_mode (in_queue_sharing_mode), - m_start_offset (0) + DebugMarkerSupportProvider(in_create_info_ptr->get_device(), + Anvil::ObjectType::BUFFER), + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )), + m_buffer (VK_NULL_HANDLE), + m_memory_block_ptr (nullptr), + m_prefers_dedicated_allocation (false), + m_requires_dedicated_allocation (false), + m_staging_buffer_queue_ptr (nullptr) { - switch (in_residency_scope) + if (in_create_info_ptr->get_type() == BufferType::NO_ALLOC) { - case Anvil::SPARSE_RESIDENCY_SCOPE_ALIASED: m_create_flags = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT | VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_BINDING_BIT; break; - case Anvil::SPARSE_RESIDENCY_SCOPE_NONALIASED: m_create_flags = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_BINDING_BIT; break; - case Anvil::SPARSE_RESIDENCY_SCOPE_NONE: m_create_flags = VK_BUFFER_CREATE_SPARSE_BINDING_BIT; break; - - default: + if (((in_create_info_ptr->get_memory_features() & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) == 0) || + ((in_create_info_ptr->get_create_flags () & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) == 0) || + ((in_create_info_ptr->get_create_flags () & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) == 0)) { - anvil_assert_fail(); - } - } - - /* Assume the user may try to bind memory from non-mappable memory heap, in which case - * we're going to need to copy data from a staging buffer to this buffer if the user - * ever uses write(), and vice versa. */ - m_usage_flags = static_cast(in_usage_flags | - VK_BUFFER_USAGE_TRANSFER_DST_BIT | - VK_BUFFER_USAGE_TRANSFER_SRC_BIT); -} + /* For host->gpu writes to work in this case, we will need the buffer to work as a target + * for buffer->buffer copy operations. Same goes for the other way around. + */ + auto usage_flags = in_create_info_ptr->get_usage_flags(); -/* Please see header for specification */ -Anvil::Buffer::Buffer(std::weak_ptr in_device_ptr, - VkDeviceSize in_size, - Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_queue_sharing_mode, - VkBufferUsageFlags in_usage_flags, - Anvil::MemoryFeatureFlags in_memory_features) - :CallbacksSupportProvider (BUFFER_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT), - m_buffer (VK_NULL_HANDLE), - m_buffer_size (in_size), - m_create_flags (0), - m_device_ptr (in_device_ptr), - m_is_sparse (false), - m_parent_buffer_ptr(0), - m_queue_families (in_queue_families), - m_residency_scope (Anvil::SPARSE_RESIDENCY_SCOPE_UNDEFINED), - m_sharing_mode (in_queue_sharing_mode), - m_start_offset (0), - m_usage_flags (static_cast(in_usage_flags) ) -{ - /* Sanity checks */ - if ((in_memory_features & MEMORY_FEATURE_FLAG_MAPPABLE) == 0) - { - anvil_assert((in_memory_features & MEMORY_FEATURE_FLAG_HOST_COHERENT) == 0) + usage_flags |= Anvil::BufferUsageFlagBits::TRANSFER_DST_BIT | Anvil::BufferUsageFlagBits::TRANSFER_SRC_BIT; - /* For host->gpu writes to work in this case, we will need the buffer to work as a target - * for buffer->buffer copy operations. Same goes for the other way around. - */ - m_usage_flags |= static_cast(m_usage_flags | VK_BUFFER_USAGE_TRANSFER_DST_BIT - | VK_BUFFER_USAGE_TRANSFER_SRC_BIT); + in_create_info_ptr->set_usage_flags(usage_flags); + } } -} - -/* Please see header for specification */ -Anvil::Buffer::Buffer(std::shared_ptr in_parent_buffer_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) - :CallbacksSupportProvider (BUFFER_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider(in_parent_buffer_ptr->m_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT), - m_buffer (VK_NULL_HANDLE), - m_buffer_size (in_size), - m_create_flags (0), - m_is_sparse (false), - m_parent_buffer_ptr(in_parent_buffer_ptr), - m_residency_scope (Anvil::SPARSE_RESIDENCY_SCOPE_UNDEFINED), - m_sharing_mode (VK_SHARING_MODE_MAX_ENUM), - m_start_offset (in_start_offset) -{ - /* Sanity checks */ - anvil_assert(in_parent_buffer_ptr != nullptr); - anvil_assert(in_size > 0); - - anvil_assert(!in_parent_buffer_ptr->is_sparse() ); - m_create_flags = in_parent_buffer_ptr->m_create_flags; - m_usage_flags = in_parent_buffer_ptr->m_usage_flags; + m_create_info_ptr = std::move(in_create_info_ptr); } @@ -138,364 +70,552 @@ Anvil::Buffer::Buffer(std::shared_ptr in_parent_buffer_ptr, Anvil::Buffer::~Buffer() { /* Unregister the object */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_BUFFER, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::BUFFER, this); - if (m_buffer != VK_NULL_HANDLE && - m_parent_buffer_ptr == nullptr) + if (m_buffer != VK_NULL_HANDLE && + m_create_info_ptr->get_parent_buffer_ptr() == nullptr) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyBuffer(device_locked_ptr->get_device_vk(), - m_buffer, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyBuffer(m_device_ptr->get_device_vk(), + m_buffer, + nullptr /* pAllocator */); + } + unlock(); m_buffer = VK_NULL_HANDLE; } - - m_parent_buffer_ptr = nullptr; } -/** Please see header for specification */ -std::shared_ptr Anvil::Buffer::create_nonsparse(std::weak_ptr in_device_ptr, - VkDeviceSize in_size, - QueueFamilyBits in_queue_families, - VkSharingMode in_queue_sharing_mode, - VkBufferUsageFlags in_usage_flags) +Anvil::BufferUniquePtr Anvil::Buffer::create(Anvil::BufferCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr new_buffer_ptr; + Anvil::BufferUniquePtr new_buffer_ptr(nullptr, + std::default_delete() ); new_buffer_ptr.reset( - new Anvil::Buffer(in_device_ptr, - in_size, - in_queue_families, - in_queue_sharing_mode, - in_usage_flags, - 0) /* in_memory_features */ + new Anvil::Buffer(std::move(in_create_info_ptr) ) ); - /* Initialize */ - new_buffer_ptr->create_buffer(in_queue_families, - in_queue_sharing_mode, - in_size); + if (new_buffer_ptr != nullptr) + { + if (!new_buffer_ptr->init() ) + { + new_buffer_ptr.reset(); + } + } /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_BUFFER, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::BUFFER, new_buffer_ptr.get() ); return new_buffer_ptr; } -/** Please see header for specification */ -std::shared_ptr Anvil::Buffer::create_nonsparse(std::weak_ptr in_device_ptr, - VkDeviceSize in_size, - QueueFamilyBits in_queue_families, - VkSharingMode in_queue_sharing_mode, - VkBufferUsageFlags in_usage_flags, - Anvil::MemoryFeatureFlags in_memory_features, - const void* in_opt_client_data) +/* Please see header for specification */ +const Anvil::Buffer* Anvil::Buffer::get_base_buffer() { - std::shared_ptr new_buffer_ptr; - - new_buffer_ptr.reset( - new Anvil::Buffer(in_device_ptr, - in_size, - in_queue_families, - in_queue_sharing_mode, - in_usage_flags, - in_memory_features) - ); - - /* Initialize */ - new_buffer_ptr->create_buffer(in_queue_families, - in_queue_sharing_mode, - in_size); + const Anvil::Buffer* result_ptr = this; + Anvil::Buffer* parent_ptr = nullptr; - /* Create a memory object and preallocate as much space as we need */ + if ( (parent_ptr = result_ptr->get_create_info_ptr()->get_parent_buffer_ptr()) != nullptr) { - std::shared_ptr memory_block_ptr; + result_ptr = parent_ptr; + } - memory_block_ptr = Anvil::MemoryBlock::create(in_device_ptr, - new_buffer_ptr->m_buffer_memory_reqs.memoryTypeBits, - new_buffer_ptr->m_buffer_memory_reqs.size, - in_memory_features); + return result_ptr; +} - new_buffer_ptr->set_nonsparse_memory(memory_block_ptr); +/* Please see header for specification */ +VkBuffer Anvil::Buffer::get_buffer(const bool& in_bake_memory_if_necessary) +{ + const auto& create_flags = get_create_info_ptr()->get_create_flags(); + const bool is_sparse = ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0) || + ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) != 0) || + ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); - if (in_opt_client_data != nullptr) + if (!is_sparse) + { + if (in_bake_memory_if_necessary && + m_memory_block_ptr == nullptr) { - new_buffer_ptr->write(0, - in_size, - in_opt_client_data); + get_memory_block(0 /* in_n_memory_block */); } } - /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_BUFFER, - new_buffer_ptr.get() ); - - return new_buffer_ptr; + return m_buffer; } -/** Please see header for specification */ -std::shared_ptr Anvil::Buffer::create_nonsparse(std::shared_ptr in_parent_nonsparse_buffer_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) +/* Please see header for specification */ +Anvil::MemoryBlock* Anvil::Buffer::get_memory_block(uint32_t in_n_memory_block) { - std::shared_ptr new_buffer_ptr; - std::shared_ptr new_mem_block_ptr; + const auto& create_flags = get_create_info_ptr()->get_create_flags(); + bool is_callback_needed = false; + const bool is_sparse = ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0) || + ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) != 0) || + ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); - if (in_parent_nonsparse_buffer_ptr->is_sparse() ) + if (is_sparse) { - anvil_assert(!in_parent_nonsparse_buffer_ptr->is_sparse() ); + IsBufferMemoryAllocPendingQueryCallbackArgument callback_arg(this); - goto end; + callback(BUFFER_CALLBACK_ID_IS_ALLOC_PENDING, + &callback_arg); + + is_callback_needed = callback_arg.result; + } + else + { + is_callback_needed = (m_memory_block_ptr == nullptr); } - new_buffer_ptr.reset( - new Anvil::Buffer(in_parent_nonsparse_buffer_ptr, - in_start_offset, - in_size) - ); + if (is_callback_needed) + { + OnMemoryBlockNeededForBufferCallbackArgument callback_argument(this); - /* Initialize */ + anvil_assert(m_create_info_ptr->get_parent_buffer_ptr() == nullptr); - anvil_assert(in_parent_nonsparse_buffer_ptr->get_memory_block(0 /* in_n_memory_block */) != nullptr); + callback_safe(BUFFER_CALLBACK_ID_MEMORY_BLOCK_NEEDED, + &callback_argument); + } - new_buffer_ptr->m_buffer = in_parent_nonsparse_buffer_ptr->m_buffer; - new_buffer_ptr->m_memory_block_ptr = Anvil::MemoryBlock::create_derived (in_parent_nonsparse_buffer_ptr->get_memory_block(0 /* in_n_memory_block */), - in_start_offset, - in_size); - new_buffer_ptr->m_queue_families = in_parent_nonsparse_buffer_ptr->get_queue_families(); - new_buffer_ptr->m_sharing_mode = in_parent_nonsparse_buffer_ptr->get_sharing_mode (); - new_buffer_ptr->m_usage_flags = in_parent_nonsparse_buffer_ptr->get_usage (); + if (is_sparse) + { + return m_page_tracker_ptr->get_memory_block(in_n_memory_block); + } + else + { + return m_memory_block_ptr; + } +} - /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_BUFFER, - new_buffer_ptr.get() ); +VkMemoryRequirements Anvil::Buffer::get_memory_requirements() const +{ + auto parent_buffer_ptr = m_create_info_ptr->get_parent_buffer_ptr(); -end: - return new_buffer_ptr; + if (parent_buffer_ptr != nullptr) + { + return parent_buffer_ptr->get_memory_requirements(); + } + else + { + return m_buffer_memory_reqs; + } } -/** Please see header for specification */ -std::shared_ptr Anvil::Buffer::create_sparse(std::weak_ptr in_device_ptr, - VkDeviceSize in_size, - QueueFamilyBits in_queue_families, - VkSharingMode in_queue_sharing_mode, - VkBufferUsageFlags in_usage_flags, - Anvil::SparseResidencyScope in_residency_scope) +uint32_t Anvil::Buffer::get_n_memory_blocks() const { - std::shared_ptr device_locked_ptr (in_device_ptr); - const VkPhysicalDeviceFeatures& physical_device_features (device_locked_ptr->get_physical_device_features() ); - std::shared_ptr result_ptr; + const auto& create_flags = get_create_info_ptr()->get_create_flags(); + const bool is_sparse = ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0) || + ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); - /* Sanity checks */ - if (!physical_device_features.sparseBinding) + if (is_sparse) { - anvil_assert(physical_device_features.sparseBinding); - - goto end; + return m_page_tracker_ptr->get_n_memory_blocks(); } - - if ((in_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_ALIASED || - in_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_NONALIASED) && - !physical_device_features.sparseResidencyBuffer) + else { - anvil_assert((in_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_ALIASED || - in_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_NONALIASED) && - physical_device_features.sparseResidencyBuffer); + return 1; + } +} - goto end; +bool Anvil::Buffer::init() +{ + uint32_t n_queue_family_indices; + uint32_t queue_family_indices [8]; + VkResult result (VK_ERROR_INITIALIZATION_FAILED); + Anvil::StructChainer struct_chainer; + bool use_dedicated_allocation(false); + + if ( m_create_info_ptr->get_client_data () != nullptr && + (m_create_info_ptr->get_memory_features() & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) == 0) + { + m_create_info_ptr->set_usage_flags(m_create_info_ptr->get_usage_flags() | Anvil::BufferUsageFlagBits::TRANSFER_DST_BIT); } - if ( in_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_ALIASED && - !physical_device_features.sparseResidencyAliased) + if (m_create_info_ptr->get_type() != BufferType::NO_ALLOC_CHILD) { - anvil_assert(in_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_ALIASED && - physical_device_features.sparseResidencyAliased); + /* Determine which queues the buffer should be available to. */ + Anvil::Utils::convert_queue_family_bits_to_family_indices(m_device_ptr, + m_create_info_ptr->get_queue_families(), + queue_family_indices, + &n_queue_family_indices); - goto end; - } + anvil_assert(n_queue_family_indices > 0); + anvil_assert(n_queue_family_indices < sizeof(queue_family_indices) / sizeof(queue_family_indices[0]) ); - result_ptr.reset( - new Anvil::Buffer(in_device_ptr, - in_size, - in_queue_families, - in_queue_sharing_mode, - in_usage_flags, - in_residency_scope) - ); + /* Prepare the create info structure */ + { + VkBufferCreateInfo buffer_create_info; + + buffer_create_info.flags = m_create_info_ptr->get_create_flags().get_vk(); + buffer_create_info.pNext = nullptr; + buffer_create_info.pQueueFamilyIndices = queue_family_indices; + buffer_create_info.queueFamilyIndexCount = n_queue_family_indices; + buffer_create_info.sharingMode = static_cast(m_create_info_ptr->get_sharing_mode() ); + buffer_create_info.size = m_create_info_ptr->get_size(); + buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; + buffer_create_info.usage = m_create_info_ptr->get_usage_flags().get_vk(); + + struct_chainer.append_struct(buffer_create_info); + } - /* Initialize */ - result_ptr->create_buffer(in_queue_families, - in_queue_sharing_mode, - in_size); + { + const auto& external_memory_handle_types = m_create_info_ptr->get_exportable_external_memory_handle_types(); - result_ptr->m_page_tracker_ptr.reset( - new Anvil::PageTracker(Anvil::Utils::round_up(in_size, - result_ptr->m_buffer_memory_reqs.alignment), - result_ptr->m_buffer_memory_reqs.alignment) - ); + if (external_memory_handle_types != 0) + { + VkExternalMemoryBufferCreateInfoKHR external_memory_buffer_create_info; - /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_BUFFER, - result_ptr.get() ); + external_memory_buffer_create_info.handleTypes = external_memory_handle_types.get_vk(); + external_memory_buffer_create_info.pNext = nullptr; + external_memory_buffer_create_info.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR; -end: - return result_ptr; -} + struct_chainer.append_struct(external_memory_buffer_create_info); + } + } -/* Creates a new Vulkan buffer object and caches memory requirements for the created buffer. - * - * @param in_queue_families Queue families the buffer needs to support. - * @param in_sharing_mode Sharing mode the buffer needs to support. - * @param in_size Size of the buffer. - **/ -void Anvil::Buffer::create_buffer(Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_sharing_mode, - VkDeviceSize in_size) -{ - VkBufferCreateInfo buffer_create_info; - std::shared_ptr device_locked_ptr(m_device_ptr); - uint32_t n_queue_family_indices; - uint32_t queue_family_indices[8]; - VkResult result(VK_ERROR_INITIALIZATION_FAILED); - - ANVIL_REDUNDANT_VARIABLE(result); - - /* Determine which queues the buffer should be available to. */ - Anvil::Utils::convert_queue_family_bits_to_family_indices(m_device_ptr, - in_queue_families, - queue_family_indices, - &n_queue_family_indices); - - anvil_assert(n_queue_family_indices > 0); - anvil_assert(n_queue_family_indices < sizeof(queue_family_indices) / sizeof(queue_family_indices[0]) ); - - /* Prepare the create info structure */ - buffer_create_info.flags = m_create_flags; - buffer_create_info.pNext = nullptr; - buffer_create_info.pQueueFamilyIndices = queue_family_indices; - buffer_create_info.queueFamilyIndexCount = n_queue_family_indices; - buffer_create_info.sharingMode = (n_queue_family_indices == 1) ? VK_SHARING_MODE_EXCLUSIVE : in_sharing_mode; - buffer_create_info.size = in_size; - buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; - buffer_create_info.usage = m_usage_flags; - - /* Create the buffer object */ - result = vkCreateBuffer(device_locked_ptr->get_device_vk(), - &buffer_create_info, - nullptr, /* pAllocator */ - &m_buffer); - anvil_assert_vk_call_succeeded(result); - - if (is_vk_call_successful(result) ) - { - set_vk_handle(m_buffer); - - /* Cache buffer data memory requirements */ - vkGetBufferMemoryRequirements(device_locked_ptr->get_device_vk(), - m_buffer, - &m_buffer_memory_reqs); - } -} + /* Create the buffer object */ + { + auto struct_chain_ptr = struct_chainer.create_chain(); -/* Please see header for specification */ -std::shared_ptr Anvil::Buffer::get_base_buffer() -{ - std::shared_ptr parent_ptr; - std::shared_ptr result_ptr = shared_from_this(); + result = Anvil::Vulkan::vkCreateBuffer(m_device_ptr->get_device_vk(), + struct_chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_buffer); + } - if ( (parent_ptr = result_ptr->get_parent_buffer_ptr()) != nullptr) - { - result_ptr = parent_ptr; + anvil_assert_vk_call_succeeded(result); + if (is_vk_call_successful(result) ) + { + set_vk_handle(m_buffer); + + /* Cache buffer data memory requirements. + * + * Prefer facility exposed by VK_KHR_get_memory_requirements2, unless the extension is unavailable. + */ + if (m_device_ptr->get_extension_info()->khr_get_memory_requirements2() ) + { + Anvil::StructID dedicated_reqs_struct_id; + const auto gmr2_entrypoints = m_device_ptr->get_extension_khr_get_memory_requirements2_entrypoints(); + VkBufferMemoryRequirementsInfo2KHR info; + const bool khr_dedicated_allocation_available = m_device_ptr->get_extension_info()->khr_dedicated_allocation(); + Anvil::StructChainUniquePtr result_reqs_chain_ptr; + VkMemoryRequirements2KHR* result_reqs_chain_raw_ptr = nullptr; + Anvil::StructChainer result_reqs_chainer; + + info.buffer = m_buffer; + info.pNext = nullptr; + info.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR; + + { + VkMemoryRequirements2KHR reqs; + + reqs.pNext = nullptr; + reqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR; + + result_reqs_chainer.append_struct(reqs); + } + + if (khr_dedicated_allocation_available) + { + VkMemoryDedicatedRequirementsKHR dedicated_reqs; + + dedicated_reqs.pNext = nullptr; + dedicated_reqs.prefersDedicatedAllocation = VK_FALSE; + dedicated_reqs.requiresDedicatedAllocation = VK_FALSE; + dedicated_reqs.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR; + + dedicated_reqs_struct_id = result_reqs_chainer.append_struct(dedicated_reqs); + } + + result_reqs_chain_ptr = result_reqs_chainer.create_chain(); + anvil_assert(result_reqs_chain_ptr != nullptr); + + result_reqs_chain_raw_ptr = result_reqs_chain_ptr->get_root_struct(); + anvil_assert(result_reqs_chain_raw_ptr != nullptr); + + gmr2_entrypoints.vkGetBufferMemoryRequirements2KHR(m_device_ptr->get_device_vk(), + &info, + result_reqs_chain_raw_ptr); + + m_buffer_memory_reqs = result_reqs_chain_raw_ptr->memoryRequirements; + + if (khr_dedicated_allocation_available) + { + const auto dedicated_alloc_info_ptr = result_reqs_chain_ptr->get_struct_with_id(dedicated_reqs_struct_id); + + m_prefers_dedicated_allocation = (dedicated_alloc_info_ptr->prefersDedicatedAllocation == VK_TRUE); + m_requires_dedicated_allocation = (dedicated_alloc_info_ptr->requiresDedicatedAllocation == VK_TRUE); + + use_dedicated_allocation = m_requires_dedicated_allocation; + } + } + else + { + Anvil::Vulkan::vkGetBufferMemoryRequirements(m_device_ptr->get_device_vk(), + m_buffer, + &m_buffer_memory_reqs); + } + } } + else + { + m_buffer = m_create_info_ptr->get_parent_buffer_ptr()->m_buffer; - return result_ptr; -} + anvil_assert(m_buffer != VK_NULL_HANDLE); + if (m_buffer != VK_NULL_HANDLE) + { + result = VK_SUCCESS; + } + } -/* Please see header for specification */ -VkBuffer Anvil::Buffer::get_buffer() -{ - if (!m_is_sparse) + switch (m_create_info_ptr->get_type() ) { - if (m_memory_block_ptr == nullptr) + case Anvil::BufferType::ALLOC: { - get_memory_block(0 /* in_n_memory_block */); + /* Create a memory object and preallocate as much space as we need */ + auto client_data_ptr = m_create_info_ptr->get_client_data(); + Anvil::MemoryBlockUniquePtr memory_block_ptr; + + { + auto create_info_ptr = Anvil::MemoryBlockCreateInfo::create_regular(m_create_info_ptr->get_device(), + m_buffer_memory_reqs.memoryTypeBits, + m_buffer_memory_reqs.size, + m_create_info_ptr->get_memory_features() ); + + create_info_ptr->set_mt_safety(m_create_info_ptr->get_mt_safety() ); + + if (use_dedicated_allocation) + { + create_info_ptr->use_dedicated_allocation(this, + nullptr); /* in_opt_image_ptr */ + } + + memory_block_ptr = Anvil::MemoryBlock::create(std::move(create_info_ptr) ); + } + + if (!set_nonsparse_memory( std::move(memory_block_ptr) )) + { + anvil_assert_fail(); + + result = VK_ERROR_INITIALIZATION_FAILED; + + goto end; + } + + if (client_data_ptr != nullptr) + { + if (!write(0, /* in_start_offset */ + m_create_info_ptr->get_size(), + client_data_ptr) ) + { + anvil_assert_fail(); + + result = VK_ERROR_INITIALIZATION_FAILED; + + goto end; + } + } + + break; + } + + case Anvil::BufferType::NO_ALLOC_CHILD: + { + Anvil::MemoryBlockUniquePtr mem_block_ptr; + + { + auto create_info_ptr = Anvil::MemoryBlockCreateInfo::create_derived(m_create_info_ptr->get_parent_buffer_ptr()->get_memory_block(0 /* in_n_memory_block */), + m_create_info_ptr->get_start_offset(), + m_create_info_ptr->get_size() ); + + mem_block_ptr = Anvil::MemoryBlock::create(std::move(create_info_ptr) ); + + if (use_dedicated_allocation) + { + create_info_ptr->use_dedicated_allocation(this, + nullptr); /* in_opt_image_ptr */ + } + } + + anvil_assert(!is_memory_block_owned(mem_block_ptr.get())); + + m_owned_memory_blocks.push_back(std::move(mem_block_ptr) ); + + m_memory_block_ptr = m_owned_memory_blocks.back().get(); + anvil_assert(m_memory_block_ptr != nullptr); + + break; + } + + case BufferType::NO_ALLOC: + { + const auto& create_flags = get_create_info_ptr()->get_create_flags(); + const bool is_sparse = ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0) || + ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) != 0) || + ((create_flags & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); + + if (is_sparse) + { + m_page_tracker_ptr.reset( + new Anvil::PageTracker(Anvil::Utils::round_up(m_create_info_ptr->get_size(), + m_buffer_memory_reqs.alignment), + m_buffer_memory_reqs.alignment) + ); + } + + /* No special action needed */ + break; + } + + default: + { + anvil_assert_fail(); } } - return m_buffer; +end: + return is_vk_call_successful(result); } -/* Please see header for specification */ -std::shared_ptr Anvil::Buffer::get_memory_block(uint32_t in_n_memory_block) +/* TODO */ +bool Anvil::Buffer::init_staging_buffer(const VkDeviceSize& in_size, + Anvil::Queue* in_opt_queue_ptr) { - bool is_callback_needed = false; + Anvil::PrimaryCommandBufferUniquePtr copy_cmdbuf_ptr; + std::vector helper_vec; + const auto queue_fams = m_create_info_ptr->get_queue_families(); + Anvil::QueueFamilyFlagBits staging_buffer_queue_fam_bits = Anvil::QueueFamilyFlagBits::NONE; - if (m_is_sparse) + m_staging_buffer_ptr.reset(); + m_staging_buffer_queue_ptr = nullptr; + + if (m_create_info_ptr->get_sharing_mode() == Anvil::SharingMode::EXCLUSIVE) { - IsBufferMemoryAllocPendingQueryCallbackArgument callback_arg(shared_from_this() ); + /* We need to use a user-specified queue, since we can't tell which queue fam the buffer is currently configured to be compatible with. + * + * This can be worked around if the buffer can only be used with a specific queue family type. + */ + if (Anvil::Utils::is_pow2(queue_fams.get_vk() ) ) + { + switch (queue_fams.get_vk() ) + { + case static_cast(Anvil::QueueFamilyFlagBits::COMPUTE_BIT): m_staging_buffer_queue_ptr = m_device_ptr->get_compute_queue (0); break; + case static_cast(Anvil::QueueFamilyFlagBits::DMA_BIT): m_staging_buffer_queue_ptr = m_device_ptr->get_transfer_queue (0); break; + case static_cast(Anvil::QueueFamilyFlagBits::GRAPHICS_BIT): m_staging_buffer_queue_ptr = m_device_ptr->get_universal_queue(0); break; - callback(BUFFER_CALLBACK_ID_IS_ALLOC_PENDING, - &callback_arg); + default: + { + anvil_assert_fail(); + } + } + } + else + { + anvil_assert(in_opt_queue_ptr != nullptr); - is_callback_needed = callback_arg.result; + m_staging_buffer_queue_ptr = in_opt_queue_ptr; + } + + anvil_assert(m_staging_buffer_queue_ptr != nullptr); + + switch (m_device_ptr->get_queue_family_type(m_staging_buffer_queue_ptr->get_queue_family_index() ) ) + { + case Anvil::QueueFamilyType::COMPUTE: staging_buffer_queue_fam_bits = Anvil::QueueFamilyFlagBits::COMPUTE_BIT; break; + case Anvil::QueueFamilyType::TRANSFER: staging_buffer_queue_fam_bits = Anvil::QueueFamilyFlagBits::DMA_BIT; break; + case Anvil::QueueFamilyType::UNIVERSAL: staging_buffer_queue_fam_bits = Anvil::QueueFamilyFlagBits::GRAPHICS_BIT; break; + + default: + { + anvil_assert_fail(); + } + } } else { - is_callback_needed = (m_memory_block_ptr == nullptr); + /* We can use any queue from the list of queue fams this buffer is compatible with, in order to perform the copy op. */ + if ((queue_fams & Anvil::QueueFamilyFlagBits::GRAPHICS_BIT) != 0) + { + m_staging_buffer_queue_ptr = m_device_ptr->get_universal_queue(0); + staging_buffer_queue_fam_bits = Anvil::QueueFamilyFlagBits::GRAPHICS_BIT; + } + else + if ((queue_fams & Anvil::QueueFamilyFlagBits::DMA_BIT) != 0) + { + m_staging_buffer_queue_ptr = m_device_ptr->get_transfer_queue(0); + staging_buffer_queue_fam_bits = Anvil::QueueFamilyFlagBits::DMA_BIT; + } + else + { + anvil_assert((queue_fams & Anvil::QueueFamilyFlagBits::COMPUTE_BIT) != 0) + + m_staging_buffer_queue_ptr = m_device_ptr->get_compute_queue(0); + staging_buffer_queue_fam_bits = Anvil::QueueFamilyFlagBits::COMPUTE_BIT; + } } - if (is_callback_needed) + if (m_staging_buffer_ptr == nullptr || + m_staging_buffer_ptr->get_create_info_ptr()->get_size() < in_size) { - OnMemoryBlockNeededForBufferCallbackArgument callback_argument(shared_from_this() ); + { + const auto sharing_mode = Anvil::Utils::is_pow2(static_cast(staging_buffer_queue_fam_bits) ) ? Anvil::SharingMode::EXCLUSIVE + : Anvil::SharingMode::CONCURRENT; - anvil_assert(m_parent_buffer_ptr == nullptr); + auto create_info_ptr = Anvil::BufferCreateInfo::create_alloc(m_device_ptr, + in_size, + staging_buffer_queue_fam_bits, + sharing_mode, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::TRANSFER_DST_BIT | Anvil::BufferUsageFlagBits::TRANSFER_SRC_BIT, + Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT); - callback_safe(BUFFER_CALLBACK_ID_MEMORY_BLOCK_NEEDED, - &callback_argument); - } + create_info_ptr->set_mt_safety (Anvil::MTSafety::DISABLED); - if (m_is_sparse) - { - return m_page_tracker_ptr->get_memory_block(in_n_memory_block); - } - else - { - return m_memory_block_ptr; - } - -} + m_staging_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } -/* Please see header for specification */ -VkDeviceSize Anvil::Buffer::get_size() const -{ - anvil_assert(m_buffer_size != 0); + if (m_staging_buffer_ptr == nullptr) + { + anvil_assert(m_staging_buffer_ptr != nullptr); + } + } - return m_buffer_size; + return (m_staging_buffer_ptr != nullptr); } /* Please see header for specification */ -VkDeviceSize Anvil::Buffer::get_start_offset() const +bool Anvil::Buffer::read(VkDeviceSize in_start_offset, + VkDeviceSize in_size, + void* out_result_ptr) { - return m_start_offset; + return read(in_start_offset, + in_size, + UINT32_MAX, /* in_device_mask */ + out_result_ptr); } /* Please see header for specification */ bool Anvil::Buffer::read(VkDeviceSize in_start_offset, VkDeviceSize in_size, + uint32_t in_device_mask, void* out_result_ptr) { - std::shared_ptr device_locked_ptr(m_device_ptr); - auto memory_block_ptr (get_memory_block(0 /* in_n_memory_block */) ); - bool result (false); + const Anvil::DeviceType device_type (m_device_ptr->get_type() ); + auto memory_block_ptr (get_memory_block(0 /* in_n_memory_block */) ); + bool result (false); - /* TODO: Support for sparse buffers */ - anvil_assert(!is_sparse()); + /* TODO: Complete support for sparsely-bound & sparse-resident buffers */ + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) == 0); - if ((memory_block_ptr->get_memory_features() & MEMORY_FEATURE_FLAG_MAPPABLE) != 0) + if ((m_create_info_ptr->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) != 0) + { + anvil_assert(m_page_tracker_ptr->get_n_memory_blocks () == 1); + anvil_assert(m_page_tracker_ptr->get_n_pages_with_memory_backing() == m_page_tracker_ptr->get_n_pages() ); + } + + + if ((memory_block_ptr->get_create_info_ptr()->get_memory_features() & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) != 0) { result = memory_block_ptr->read(in_start_offset, in_size, @@ -505,61 +625,83 @@ bool Anvil::Buffer::read(VkDeviceSize in_start_offset, { /* The buffer memory is not mappable. We need to create a staging buffer, * do a non-mappable->mappable memory copy, and then read back data from the mappable buffer. */ - const uint32_t n_transfer_queues = device_locked_ptr->get_n_transfer_queues(); - std::shared_ptr queue_ptr = (n_transfer_queues > 0) ? device_locked_ptr->get_transfer_queue (0) - : device_locked_ptr->get_universal_queue(0); - std::shared_ptr staging_buffer_ptr; - const Anvil::QueueFamilyBits staging_buffer_queue_fam_bits = (n_transfer_queues > 0) ? Anvil::QUEUE_FAMILY_DMA_BIT : Anvil::QUEUE_FAMILY_GRAPHICS_BIT; - const Anvil::QueueFamilyType staging_buffer_queue_fam_type = (n_transfer_queues > 0) ? Anvil::QUEUE_FAMILY_TYPE_TRANSFER : Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL; + Anvil::PrimaryCommandBufferUniquePtr copy_cmdbuf_ptr; + + if (m_staging_buffer_ptr == nullptr || + m_staging_buffer_ptr->get_create_info_ptr()->get_size() < in_size) + { + if (!init_staging_buffer(in_size, + nullptr) ) /* in_opt_queue_ptr */ + { + result = false; - std::shared_ptr copy_cmdbuf_ptr = device_locked_ptr->get_command_pool(staging_buffer_queue_fam_type)->alloc_primary_level_command_buffer(); + goto end; + } + } - if (copy_cmdbuf_ptr == nullptr) + if (m_staging_buffer_ptr == nullptr) { - anvil_assert(copy_cmdbuf_ptr != nullptr); + anvil_assert(m_staging_buffer_ptr != nullptr); goto end; } - staging_buffer_ptr = Anvil::Buffer::create_nonsparse( - m_device_ptr, - in_size, - staging_buffer_queue_fam_bits, - VK_SHARING_MODE_CONCURRENT, - VK_BUFFER_USAGE_TRANSFER_DST_BIT, - MEMORY_FEATURE_FLAG_MAPPABLE, - nullptr); + copy_cmdbuf_ptr = m_device_ptr->get_command_pool_for_queue_family_index(m_staging_buffer_queue_ptr->get_queue_family_index() )->alloc_primary_level_command_buffer(); - if (staging_buffer_ptr == nullptr) + if (copy_cmdbuf_ptr == nullptr) { - anvil_assert(staging_buffer_ptr != nullptr); + anvil_assert(copy_cmdbuf_ptr != nullptr); goto end; + } + if (device_type == Anvil::DeviceType::SINGLE_GPU) + { + copy_cmdbuf_ptr->start_recording(true, /* one_time_submit */ + false); /* simultaneous_use_allowed */ + } + else + { + anvil_assert(device_type == Anvil::DeviceType::MULTI_GPU); + anvil_assert(Utils::count_set_bits(in_device_mask) == 1); - copy_cmdbuf_ptr->start_recording(true, /* one_time_submit */ - false); /* simultaneous_use_allowed */ + copy_cmdbuf_ptr->start_recording(true, /* one_time_submit */ + false, /* simultaneous_use_allowed */ + in_device_mask); /* in_opt_device_mask */ + } { - Anvil::BufferBarrier buffer_barrier(VK_ACCESS_TRANSFER_WRITE_BIT, - VK_ACCESS_HOST_READ_BIT, + Anvil::BufferBarrier buffer_barrier(Anvil::AccessFlagBits::TRANSFER_WRITE_BIT, + Anvil::AccessFlagBits::HOST_READ_BIT, VK_QUEUE_FAMILY_IGNORED, VK_QUEUE_FAMILY_IGNORED, - staging_buffer_ptr, + m_staging_buffer_ptr.get(), 0, /* in_offset */ - staging_buffer_ptr->get_size() ); - VkBufferCopy copy_region; - - copy_region.dstOffset = 0; - copy_region.size = in_size; - copy_region.srcOffset = in_start_offset; - - copy_cmdbuf_ptr->record_copy_buffer (shared_from_this(), - staging_buffer_ptr, + in_size); + Anvil::BufferCopy copy_region; + Anvil::MemoryBarrierAnv pre_copy_barrier(Anvil::AccessFlagBits::TRANSFER_READ_BIT, /* in_destination_access_mask */ + Anvil::AccessFlagBits::HOST_WRITE_BIT | Anvil::AccessFlagBits::MEMORY_WRITE_BIT | Anvil::AccessFlagBits::SHADER_WRITE_BIT | Anvil::AccessFlagBits::TRANSFER_WRITE_BIT); + + copy_region.dst_offset = 0; + copy_region.size = in_size; + copy_region.src_offset = in_start_offset; + + copy_cmdbuf_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::ALL_COMMANDS_BIT, /* in_src_stage_mask */ + Anvil::PipelineStageFlagBits::TRANSFER_BIT, /* in_dst_stage_mask */ + Anvil::DependencyFlagBits::NONE, + 1, /* in_memory_barrier_count */ + &pre_copy_barrier, + 0, /* in_buffer_memory_barrier_count */ + nullptr, /* in_buffer_memory_barriers_ptr */ + 0, /* in_image_memory_barrier_count */ + nullptr); /* in_iamge_memory_barriers_ptr */ + + copy_cmdbuf_ptr->record_copy_buffer (this, + m_staging_buffer_ptr.get(), 1, /* in_region_count */ ©_region); - copy_cmdbuf_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_TRANSFER_BIT, - VK_PIPELINE_STAGE_HOST_BIT, - VK_FALSE, /* in_by_region */ + copy_cmdbuf_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::TRANSFER_BIT, + Anvil::PipelineStageFlagBits::HOST_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barriers_ptr */ 1, /* in_buffer_memory_barrier_count */ @@ -569,26 +711,45 @@ bool Anvil::Buffer::read(VkDeviceSize in_start_offset, } copy_cmdbuf_ptr->stop_recording(); - queue_ptr->submit_command_buffer(copy_cmdbuf_ptr, - true /* should_block */); + if (device_type == Anvil::DeviceType::SINGLE_GPU) + { + m_staging_buffer_queue_ptr->submit( + Anvil::SubmitInfo::create_execute(copy_cmdbuf_ptr.get(), + true /* should_block */) + ); + } + else + { + Anvil::CommandBufferMGPUSubmission cmd_buffer_submission; + + anvil_assert((in_device_mask != 0) && (in_device_mask != UINT32_MAX)); + + cmd_buffer_submission.cmd_buffer_ptr = copy_cmdbuf_ptr.get(); + cmd_buffer_submission.device_mask = in_device_mask; + + m_staging_buffer_queue_ptr->submit( + Anvil::SubmitInfo::create_execute(&cmd_buffer_submission, + 1, /* in_n_command_buffer_submissions */ + true /* should_block */) + ); + } - result = staging_buffer_ptr->read(0, - in_size, - out_result_ptr); + result = m_staging_buffer_ptr->read(0, + in_size, + out_result_ptr); } end: return result; } -/* Please see header for specification */ -bool Anvil::Buffer::set_nonsparse_memory(std::shared_ptr in_memory_block_ptr) +bool Anvil::Buffer::set_memory_nonsparse_internal(MemoryBlockUniquePtr in_memory_block_ptr, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr) { - std::shared_ptr device_locked_ptr(m_device_ptr); - bool result (false); - VkResult result_vk; - std::shared_ptr sgpu_device_locked_ptr; - std::weak_ptr sgpu_physical_device_ptr; + const Anvil::DeviceType device_type (m_device_ptr->get_type() ); + bool result (false); + VkResult result_vk; if (in_memory_block_ptr == nullptr) { @@ -604,21 +765,62 @@ bool Anvil::Buffer::set_nonsparse_memory(std::shared_ptr in_ goto end; } - if (m_is_sparse) + if (((m_create_info_ptr->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) != 0) || + ((m_create_info_ptr->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0) ) { - anvil_assert(!m_is_sparse); + anvil_assert(((m_create_info_ptr->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_ALIASED_BIT) == 0) && + ((m_create_info_ptr->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) == 0) ); goto end; } - sgpu_device_locked_ptr = std::shared_ptr(std::dynamic_pointer_cast(device_locked_ptr) ); - sgpu_physical_device_ptr = sgpu_device_locked_ptr->get_physical_device(); - /* Bind the memory object to the buffer object */ - result_vk = vkBindBufferMemory(device_locked_ptr->get_device_vk(), - m_buffer, - in_memory_block_ptr->get_memory (), - in_memory_block_ptr->get_start_offset() ); + if (device_type == Anvil::DeviceType::SINGLE_GPU) + { + lock(); + { + result_vk = Anvil::Vulkan::vkBindBufferMemory(m_device_ptr->get_device_vk(), + m_buffer, + in_memory_block_ptr->get_memory (), + in_memory_block_ptr->get_start_offset() ); + } + unlock(); + } + else + { + StructChainUniquePtr bind_info_struct_chain_ptr; + Anvil::StructChainer bind_info_struct_chainer; + + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr) ); + const auto& entrypoints (mgpu_device_ptr->get_extension_khr_bind_memory2_entrypoints() ); + + anvil_assert(device_type == Anvil::DeviceType::MULTI_GPU); + + { + VkBindBufferMemoryInfoKHR bind_info; + VkBindBufferMemoryDeviceGroupInfoKHR bind_info_dg; + + bind_info_dg.deviceIndexCount = in_n_device_group_indices; + bind_info_dg.pDeviceIndices = in_device_group_indices_ptr; + bind_info_dg.pNext = nullptr; + bind_info_dg.sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR; + + bind_info.buffer = m_buffer; + bind_info.memory = in_memory_block_ptr->get_memory (); + bind_info.memoryOffset = in_memory_block_ptr->get_start_offset(); + bind_info.pNext = nullptr; + bind_info.sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR; + + bind_info_struct_chainer.append_struct(bind_info); + bind_info_struct_chainer.append_struct(bind_info_dg); + } + + bind_info_struct_chain_ptr = bind_info_struct_chainer.create_chain(); + + result_vk = entrypoints.vkBindBufferMemory2KHR(m_device_ptr->get_device_vk(), + 1, /* bindInfoCount */ + bind_info_struct_chain_ptr ->get_root_struct() ); + } if (!is_vk_call_successful(result_vk) ) { @@ -628,131 +830,334 @@ bool Anvil::Buffer::set_nonsparse_memory(std::shared_ptr in_ } /* All done */ - m_memory_block_ptr = in_memory_block_ptr; + m_memory_block_ptr = in_memory_block_ptr.get(); result = true; + + anvil_assert(!is_memory_block_owned(in_memory_block_ptr.get())); + + m_owned_memory_blocks.push_back( + std::move(in_memory_block_ptr) + ); + end: return result; } /* Please see header for specification */ -bool Anvil::Buffer::set_memory_sparse(std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) +bool Anvil::Buffer::set_memory_sparse(MemoryBlock* in_memory_block_ptr, + bool in_memory_block_owned_by_buffer, + VkDeviceSize in_memory_block_start_offset, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) { - anvil_assert(m_is_sparse); + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_BINDING_BIT) != 0); + + if (m_page_tracker_ptr->set_binding(in_memory_block_ptr, + in_memory_block_start_offset, + in_start_offset, + in_size) ) + { + if (in_memory_block_owned_by_buffer && !is_memory_block_owned(in_memory_block_ptr)) + { + MemoryBlockUniquePtr mem_block_ptr(in_memory_block_ptr, + std::default_delete() ); + + m_owned_memory_blocks.push_back( + std::move(mem_block_ptr) + ); + } - return m_page_tracker_ptr->set_binding(in_memory_block_ptr, - in_memory_block_start_offset, - in_start_offset, - in_size); + return true; + } + else + { + return false; + } } /* Please see header for specification */ -bool Anvil::Buffer::write(VkDeviceSize in_start_offset, - VkDeviceSize in_size, - const void* in_data, - std::shared_ptr in_opt_queue_ptr) +bool Anvil::Buffer::set_nonsparse_memory(MemoryBlockUniquePtr in_memory_block_ptr) { - std::shared_ptr base_device_locked_ptr(m_device_ptr); - bool result (false); + MemoryBlock* memory_block_raw_ptr = in_memory_block_ptr.release(); - /** TODO: Support for sparse-resident buffers whose n_memory_blocks > 1 */ - std::shared_ptr memory_block_ptr(get_memory_block(0) ); + return set_nonsparse_memory(memory_block_raw_ptr, + true); +} + +bool Anvil::Buffer::set_nonsparse_memory(MemoryBlock* in_memory_block_ptr, + bool in_memory_block_owned_by_buffer) +{ + MemoryBlockUniquePtr memory_block_ptr; - if (m_is_sparse) + if (in_memory_block_owned_by_buffer) { - anvil_assert(m_page_tracker_ptr->get_n_memory_blocks() == 1); + memory_block_ptr = MemoryBlockUniquePtr(in_memory_block_ptr, + std::default_delete() ); + } + else + { + memory_block_ptr = MemoryBlockUniquePtr(in_memory_block_ptr, + [](MemoryBlock*) + { + /* Stub */ + }); } - anvil_assert(memory_block_ptr != nullptr); - anvil_assert(memory_block_ptr->get_size() >= in_size); + return set_memory_nonsparse_internal(std::move(memory_block_ptr), + 0, /* n_physical_devices */ + nullptr); /* physical_devices_ptr */ +} + +/* Please see header for specification */ +bool Anvil::Buffer::set_nonsparse_memory(MemoryBlockUniquePtr in_memory_block_ptr, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr) +{ + MemoryBlockUniquePtr memory_block_ptr = MemoryBlockUniquePtr(in_memory_block_ptr.release(), + std::default_delete() ); - if ((memory_block_ptr->get_memory_features() & MEMORY_FEATURE_FLAG_MAPPABLE) != 0) + return set_memory_nonsparse_internal(std::move(memory_block_ptr), + in_n_device_group_indices, + in_device_group_indices_ptr); +} + +/* Please see header for specification */ +bool Anvil::Buffer::set_nonsparse_memory(MemoryBlock* in_memory_block_ptr, + bool in_memory_block_owned_by_buffer, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr) +{ + MemoryBlockUniquePtr memory_block_ptr; + + if (in_memory_block_owned_by_buffer) { - result = memory_block_ptr->write(in_start_offset, - in_size, - in_data); + memory_block_ptr = MemoryBlockUniquePtr(in_memory_block_ptr, + std::default_delete() ); } else { - /* The buffer memory is not mappable. We need to create a staging memory, - * upload user's data there, and then issue a copy op. */ - std::shared_ptr copy_cmdbuf_ptr; - std::shared_ptr queue_ptr; - std::shared_ptr staging_buffer_ptr; - Anvil::QueueFamilyBits staging_buffer_queue_fam_bits = 0; - Anvil::QueueFamilyType staging_buffer_queue_fam_type = Anvil::QUEUE_FAMILY_TYPE_UNDEFINED; + memory_block_ptr = MemoryBlockUniquePtr(in_memory_block_ptr, + [](MemoryBlock*) + { + /* Stub */ + }); + } - if (m_sharing_mode == VK_SHARING_MODE_EXCLUSIVE) - { - /* We need to use a user-specified queue, since we can't tell which queue fam the buffer is currently configured to be compatible with. - * - * This can be worked around if the buffer can only be used with a specific queue family type. - */ - if (Anvil::Utils::is_pow2(m_queue_families) ) - { - switch (m_queue_families) - { - case Anvil::QUEUE_FAMILY_COMPUTE_BIT: queue_ptr = base_device_locked_ptr->get_compute_queue (0); break; - case Anvil::QUEUE_FAMILY_DMA_BIT: queue_ptr = base_device_locked_ptr->get_transfer_queue (0); break; - case Anvil::QUEUE_FAMILY_GRAPHICS_BIT: queue_ptr = base_device_locked_ptr->get_universal_queue(0); break; - - default: - { - anvil_assert_fail(); - } - } - } - else - { - anvil_assert(in_opt_queue_ptr != nullptr); + return set_memory_nonsparse_internal(std::move(memory_block_ptr), + in_n_device_group_indices, + in_device_group_indices_ptr); +} - queue_ptr = in_opt_queue_ptr; - } +/* Please see header for specification */ +bool Anvil::Buffer::set_nonsparse_memory_multi(uint32_t in_n_buffer_memory_binding_updates, + BufferMemoryBindingUpdate* in_updates_ptr) +{ + StructChainVector bind_info_struct_chains; + const Anvil::BaseDevice* device_ptr (nullptr); + uint32_t n_total_physical_devices(0); + bool result (false); + + /* Sanity checks */ + if (in_n_buffer_memory_binding_updates == 0) + { + anvil_assert(in_n_buffer_memory_binding_updates != 0); - anvil_assert(queue_ptr != nullptr); - staging_buffer_queue_fam_type = base_device_locked_ptr->get_queue_family_type(queue_ptr->get_queue_family_index() ); + goto end; + } - switch (staging_buffer_queue_fam_type) - { - case Anvil::QUEUE_FAMILY_TYPE_COMPUTE: staging_buffer_queue_fam_bits = Anvil::QUEUE_FAMILY_COMPUTE_BIT; break; - case Anvil::QUEUE_FAMILY_TYPE_TRANSFER: staging_buffer_queue_fam_bits = Anvil::QUEUE_FAMILY_DMA_BIT; break; - case Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL: staging_buffer_queue_fam_bits = Anvil::QUEUE_FAMILY_GRAPHICS_BIT; break; + /* Convert input structure into VkBindBufferMemoryInfoKHR structures */ + for (uint32_t n_update = 0; + n_update < in_n_buffer_memory_binding_updates; + ++n_update) + { + const auto& current_update (in_updates_ptr[n_update]); + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(current_update.buffer_ptr->m_device_ptr) ); - default: - { - anvil_assert_fail(); - } - } + if (current_update.physical_devices.size() != mgpu_device_ptr->get_n_physical_devices() ) + { + anvil_assert(current_update.physical_devices.size() == mgpu_device_ptr->get_n_physical_devices() || + current_update.physical_devices.size() == 0); + + goto end; + } + + n_total_physical_devices += static_cast(current_update.physical_devices.size() ); + + if (n_update == 0) + { + device_ptr = current_update.buffer_ptr->m_device_ptr; } else { - /* We can use any queue from the list of queue fams this buffer is compatible, in order to perform the copy op. */ - if ((m_queue_families & Anvil::QUEUE_FAMILY_GRAPHICS_BIT) != 0) + anvil_assert(device_ptr == current_update.buffer_ptr->m_device_ptr); + } + } + + { + std::vector device_indices (n_total_physical_devices); + uint32_t n_current_physical_device(0); + VkResult result_vk; + const auto& entrypoints (device_ptr->get_extension_khr_bind_memory2_entrypoints() ); + + for (uint32_t n_update = 0; + n_update < in_n_buffer_memory_binding_updates; + ++n_update) + { + const auto& current_update = in_updates_ptr[n_update]; + + for (uint32_t n_physical_device = 0; + n_physical_device < current_update.physical_devices.size(); + ++n_physical_device, ++n_current_physical_device) { - queue_ptr = base_device_locked_ptr->get_universal_queue(0); - staging_buffer_queue_fam_bits = Anvil::QUEUE_FAMILY_GRAPHICS_BIT; - staging_buffer_queue_fam_type = Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL; + device_indices.at(n_current_physical_device) = current_update.physical_devices.at(n_physical_device)->get_device_group_device_index(); } - else - if ((m_queue_families & Anvil::QUEUE_FAMILY_DMA_BIT) != 0) + } + + n_current_physical_device = 0; + + for (uint32_t n_update = 0; + n_update < in_n_buffer_memory_binding_updates; + ++n_update) + { + VkBindBufferMemoryInfoKHR current_bind_info; + VkBindBufferMemoryDeviceGroupInfoKHR current_bind_info_device_group; + StructChainer current_bind_info_chain; + const auto& current_update = in_updates_ptr[n_update]; + + current_bind_info_device_group.sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR; + current_bind_info_device_group.deviceIndexCount = static_cast(current_update.physical_devices.size()); + current_bind_info_device_group.pDeviceIndices = &device_indices.at(n_current_physical_device); + + current_bind_info.buffer = current_update.buffer_ptr->get_buffer (); + current_bind_info.memory = current_update.memory_block_ptr->get_memory (); + current_bind_info.memoryOffset = current_update.memory_block_ptr->get_start_offset(); + current_bind_info.pNext = nullptr; + current_bind_info.sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR; + + current_bind_info_chain.append_struct(current_bind_info); + current_bind_info_chain.append_struct(current_bind_info_device_group); + + bind_info_struct_chains.append_struct_chain(current_bind_info_chain.create_chain() ); + + n_current_physical_device += static_cast(current_update.physical_devices.size()); + } + + result_vk = entrypoints.vkBindBufferMemory2KHR(device_ptr->get_device_vk(), + in_n_buffer_memory_binding_updates, + bind_info_struct_chains.get_root_structs() ); + + if (!is_vk_call_successful(result_vk) ) + { + anvil_assert(is_vk_call_successful(result_vk) ); + + goto end; + } + + for (uint32_t n_update = 0; + n_update < in_n_buffer_memory_binding_updates; + ++n_update) + { + auto& current_update = in_updates_ptr[n_update]; + + anvil_assert(current_update.buffer_ptr->m_memory_block_ptr == nullptr); + + anvil_assert(std::find_if(current_update.buffer_ptr->m_owned_memory_blocks.begin(), + current_update.buffer_ptr->m_owned_memory_blocks.end (), + [=](const MemoryBlockUniquePtr& in_memory_block_ptr) + { + return (in_memory_block_ptr.get() == current_update.memory_block_ptr); + }) == current_update.buffer_ptr->m_owned_memory_blocks.end() ); + + current_update.buffer_ptr->m_memory_block_ptr = current_update.memory_block_ptr; + + if (current_update.memory_block_owned_by_buffer) { - queue_ptr = base_device_locked_ptr->get_transfer_queue(0); - staging_buffer_queue_fam_bits = Anvil::QUEUE_FAMILY_DMA_BIT; - staging_buffer_queue_fam_type = Anvil::QUEUE_FAMILY_TYPE_TRANSFER; + MemoryBlockUniquePtr memory_block_ptr(current_update.memory_block_ptr, + std::default_delete() ); + + anvil_assert(!current_update.buffer_ptr->is_memory_block_owned(current_update.memory_block_ptr)); + + current_update.buffer_ptr->m_owned_memory_blocks.push_back( + std::move(memory_block_ptr) + ); } - else + } + } + + /* All done */ + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::Buffer::write(VkDeviceSize in_start_offset, + VkDeviceSize in_size, + const void* in_data, + Anvil::Queue* in_opt_queue_ptr) +{ + return write(in_start_offset, + in_size, + in_data, + UINT32_MAX, /* in_device_mask */ + in_opt_queue_ptr); +} + +/* Please see header for specification */ +bool Anvil::Buffer::write(VkDeviceSize in_start_offset, + VkDeviceSize in_size, + const void* in_data, + uint32_t in_device_mask, + Anvil::Queue* in_opt_queue_ptr) +{ + const Anvil::DeviceType device_type(m_device_ptr->get_type() ); + bool result (false); + + /** TODO: Support for sparse-resident buffers whose n_memory_blocks > 1 */ + Anvil::MemoryBlock* memory_block_ptr(get_memory_block(0) ); + + if ( m_create_info_ptr->get_type() == Anvil::BufferType::NO_ALLOC && + (m_create_info_ptr->get_create_flags() & Anvil::BufferCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0) + { + anvil_assert(m_page_tracker_ptr->get_n_memory_blocks() == 1); + } + + anvil_assert(memory_block_ptr != nullptr); + anvil_assert(memory_block_ptr->get_create_info_ptr()->get_size() >= in_size); + + if ((memory_block_ptr->get_create_info_ptr()->get_memory_features() & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) != 0) + { + anvil_assert((memory_block_ptr->get_create_info_ptr()->get_memory_features() & Anvil::MemoryFeatureFlagBits::MULTI_INSTANCE_BIT) == 0); + + result = memory_block_ptr->write(in_start_offset, + in_size, + in_data); + } + else + { + /* The buffer memory is not mappable. We need to create a staging memory, + * upload user's data there, and then issue a copy op. */ + Anvil::PrimaryCommandBufferUniquePtr copy_cmdbuf_ptr; + + if (m_staging_buffer_ptr == nullptr || + m_staging_buffer_ptr->get_create_info_ptr()->get_size() < in_size) + { + if (!init_staging_buffer(in_size, + in_opt_queue_ptr) ) { - anvil_assert((m_queue_families & Anvil::QUEUE_FAMILY_COMPUTE_BIT) != 0) + result = false; - queue_ptr = base_device_locked_ptr->get_compute_queue(0); - staging_buffer_queue_fam_bits = Anvil::QUEUE_FAMILY_COMPUTE_BIT; - staging_buffer_queue_fam_type = Anvil::QUEUE_FAMILY_TYPE_COMPUTE; + goto end; } + + anvil_assert(m_staging_buffer_ptr != nullptr); } - copy_cmdbuf_ptr = base_device_locked_ptr->get_command_pool(staging_buffer_queue_fam_type)->alloc_primary_level_command_buffer(); + m_staging_buffer_ptr->write(0, /* in_start_offset */ + in_size, + in_data); + + copy_cmdbuf_ptr = m_device_ptr->get_command_pool_for_queue_family_index(m_staging_buffer_queue_ptr->get_queue_family_index() )->alloc_primary_level_command_buffer(); if (copy_cmdbuf_ptr == nullptr) { @@ -761,60 +1166,103 @@ bool Anvil::Buffer::write(VkDeviceSize in_start_offset, goto end; } - staging_buffer_ptr = Anvil::Buffer::create_nonsparse( - m_device_ptr, - in_size, - staging_buffer_queue_fam_bits, - VK_SHARING_MODE_CONCURRENT, - VK_BUFFER_USAGE_TRANSFER_SRC_BIT, - Anvil::MEMORY_FEATURE_FLAG_MAPPABLE, - in_data); - - if (staging_buffer_ptr == nullptr) + if (device_type == Anvil::DeviceType::SINGLE_GPU) { - anvil_assert(staging_buffer_ptr != nullptr); - - goto end; + copy_cmdbuf_ptr->start_recording(true, /* one_time_submit */ + false); /* simultaneous_use_allowed */ } + else + { + anvil_assert(device_type == Anvil::DeviceType::MULTI_GPU); - copy_cmdbuf_ptr->start_recording(true, /* one_time_submit */ - false); /* simultaneous_use_allowed */ + copy_cmdbuf_ptr->start_recording(true, /* one_time_submit */ + false, /* simultaneous_use_allowed */ + in_device_mask); + } { - BufferBarrier buffer_barrier(VK_ACCESS_HOST_WRITE_BIT, - VK_ACCESS_TRANSFER_READ_BIT, - VK_QUEUE_FAMILY_IGNORED, - VK_QUEUE_FAMILY_IGNORED, - staging_buffer_ptr, - 0, /* in_offset */ - staging_buffer_ptr->get_size() ); - VkBufferCopy copy_region; + BufferBarrier buffer_barrier(Anvil::AccessFlagBits::HOST_WRITE_BIT, /* in_source_access_mask */ + Anvil::AccessFlagBits::TRANSFER_READ_BIT, + VK_QUEUE_FAMILY_IGNORED, + VK_QUEUE_FAMILY_IGNORED, + m_staging_buffer_ptr.get(), + 0, /* in_offset */ + in_size); + Anvil::BufferCopy copy_region; + + copy_region.dst_offset = in_start_offset; + copy_region.size = in_size; + copy_region.src_offset = 0; - copy_region.dstOffset = in_start_offset; - copy_region.size = in_size; - copy_region.srcOffset = 0; - copy_cmdbuf_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_HOST_BIT, - VK_PIPELINE_STAGE_TRANSFER_BIT, - VK_FALSE, /* in_by_region */ + copy_cmdbuf_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::HOST_BIT, + Anvil::PipelineStageFlagBits::ALL_COMMANDS_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barriers_ptr */ 1, /* in_buffer_memory_barrier_count */ &buffer_barrier, 0, /* in_image_memory_barrier_count */ nullptr); /* in_image_memory_barriers_ptr */ - copy_cmdbuf_ptr->record_copy_buffer (staging_buffer_ptr, - shared_from_this(), + copy_cmdbuf_ptr->record_copy_buffer (m_staging_buffer_ptr.get(), + this, 1, /* in_region_count */ ©_region); } copy_cmdbuf_ptr->stop_recording(); - queue_ptr->submit_command_buffer(copy_cmdbuf_ptr, - true /* should_block */); + if (device_type == Anvil::DeviceType::SINGLE_GPU) + { + m_staging_buffer_queue_ptr->submit( + Anvil::SubmitInfo::create_execute(copy_cmdbuf_ptr.get(), + true /* should_block */) + ); + } + else + { + Anvil::CommandBufferMGPUSubmission copy_cmdbuf_submission; + + copy_cmdbuf_submission.cmd_buffer_ptr = copy_cmdbuf_ptr.get(); + copy_cmdbuf_submission.device_mask = in_device_mask; + + /* Need to update all memory instances */ + if ((memory_block_ptr->get_create_info_ptr()->get_memory_features() & Anvil::MemoryFeatureFlagBits::MULTI_INSTANCE_BIT) != 0) + { + auto device_mask = memory_block_ptr->get_create_info_ptr()->get_device_mask(); + const Anvil::MGPUDevice* mgpu_device_ptr = dynamic_cast(m_device_ptr); + + if (device_mask == 0) + { + device_mask = (1 << mgpu_device_ptr->get_n_physical_devices()) - 1; + } + + copy_cmdbuf_submission.device_mask = device_mask; + } + + m_staging_buffer_queue_ptr->submit( + Anvil::SubmitInfo::create_execute(©_cmdbuf_submission, + 1, /* in_n_command_buffer_submissions */ + true /* should_block */) + ); + } result = true; } end: return result; -} \ No newline at end of file +} + +bool Anvil::Buffer::is_memory_block_owned(const MemoryBlock* in_memory_block_ptr) const +{ + for (auto memory_block_ptr_iter = m_owned_memory_blocks.begin(); + memory_block_ptr_iter != m_owned_memory_blocks.end(); + ++memory_block_ptr_iter) + { + if (in_memory_block_ptr == memory_block_ptr_iter->get()) + { + return true; + } + } + + return false; +} diff --git a/src/wrappers/buffer_view.cpp b/src/wrappers/buffer_view.cpp index 91874b05..33e37fb6 100644 --- a/src/wrappers/buffer_view.cpp +++ b/src/wrappers/buffer_view.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,6 +20,7 @@ // THE SOFTWARE. // +#include "misc/buffer_view_create_info.h" #include "misc/debug.h" #include "misc/object_tracker.h" #include "wrappers/buffer.h" @@ -27,44 +28,17 @@ #include "wrappers/device.h" /** Please see header for specification */ -Anvil::BufferView::BufferView(std::weak_ptr in_device_ptr, - std::shared_ptr in_buffer_ptr, - VkFormat in_format, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) - :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT), - m_buffer_ptr (in_buffer_ptr), - m_device_ptr (in_device_ptr), - m_format (in_format), - m_size (in_size), - m_start_offset(in_start_offset) +Anvil::BufferView::BufferView(Anvil::BufferViewCreateInfoUniquePtr in_create_info_ptr) + :DebugMarkerSupportProvider(in_create_info_ptr->get_device(), + Anvil::ObjectType::BUFFER_VIEW), + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )) { - VkBufferViewCreateInfo buffer_view_create_info; - std::shared_ptr device_locked_ptr(in_device_ptr); - VkResult result (VK_ERROR_INITIALIZATION_FAILED); + m_create_info_ptr = std::move(in_create_info_ptr); - ANVIL_REDUNDANT_VARIABLE(result); - - /* Spawn a new Vulkan buffer view */ - buffer_view_create_info.buffer = in_buffer_ptr->get_buffer(); - buffer_view_create_info.flags = 0; - buffer_view_create_info.format = in_format; - buffer_view_create_info.offset = in_start_offset; - buffer_view_create_info.pNext = nullptr; - buffer_view_create_info.range = in_size; - buffer_view_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO; - - result = vkCreateBufferView(device_locked_ptr->get_device_vk(), - &buffer_view_create_info, - nullptr, /* pAllocator */ - &m_buffer_view); - - anvil_assert_vk_call_succeeded(result); - if (is_vk_call_successful(result) ) - { - set_vk_handle(m_buffer_view); - } + /* Register the buffer view instance */ + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::BUFFER_VIEW, + this); } /** Destructor. @@ -74,41 +48,69 @@ Anvil::BufferView::BufferView(std::weak_ptr in_device_ptr, **/ Anvil::BufferView::~BufferView() { - std::shared_ptr device_locked_ptr(m_device_ptr); - - - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_BUFFER_VIEW, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::BUFFER_VIEW, this); - vkDestroyBufferView(device_locked_ptr->get_device_vk(), - m_buffer_view, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyBufferView(m_device_ptr->get_device_vk(), + m_buffer_view, + nullptr /* pAllocator */); + } + unlock(); m_buffer_view = VK_NULL_HANDLE; } -/** Please see header for specification */ -std::shared_ptr Anvil::BufferView::create(std::weak_ptr in_device_ptr, - std::shared_ptr in_buffer_ptr, - VkFormat in_format, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) +Anvil::BufferViewUniquePtr Anvil::BufferView::create(Anvil::BufferViewCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr result_ptr; + Anvil::BufferViewUniquePtr result_ptr(nullptr, + std::default_delete() ); - /* Instantiate the object */ result_ptr.reset( - new BufferView( - in_device_ptr, - in_buffer_ptr, - in_format, - in_start_offset, - in_size) + new Anvil::BufferView(std::move(in_create_info_ptr) ) ); - /* Register the buffer view instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_BUFFER_VIEW, - result_ptr.get() ); + if (result_ptr != nullptr) + { + if (!result_ptr->init() ) + { + result_ptr.reset(); + } + } return result_ptr; -} \ No newline at end of file +} + +bool Anvil::BufferView::init() +{ + VkBufferViewCreateInfo buffer_view_create_info; + VkResult result (VK_ERROR_INITIALIZATION_FAILED); + + ANVIL_REDUNDANT_VARIABLE(result); + + /* Spawn a new Vulkan buffer view */ + buffer_view_create_info.buffer = m_create_info_ptr->get_parent_buffer()->get_buffer(); + buffer_view_create_info.flags = 0; + buffer_view_create_info.format = static_cast(m_create_info_ptr->get_format() ); + buffer_view_create_info.offset = m_create_info_ptr->get_start_offset(); + buffer_view_create_info.pNext = nullptr; + buffer_view_create_info.range = m_create_info_ptr->get_size(); + buffer_view_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO; + + result = Anvil::Vulkan::vkCreateBufferView(m_create_info_ptr->get_device()->get_device_vk(), + &buffer_view_create_info, + nullptr, /* pAllocator */ + &m_buffer_view); + + if (is_vk_call_successful(result) ) + { + anvil_assert_vk_call_succeeded(result); + if (is_vk_call_successful(result) ) + { + set_vk_handle(m_buffer_view); + } + } + + return is_vk_call_successful(result); +} diff --git a/src/wrappers/command_buffer.cpp b/src/wrappers/command_buffer.cpp index 44a9afbe..6c8a16db 100644 --- a/src/wrappers/command_buffer.cpp +++ b/src/wrappers/command_buffer.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -22,12 +22,16 @@ #include "misc/callbacks.h" #include "misc/debug.h" +#include "misc/descriptor_set_create_info.h" +#include "misc/memory_block_create_info.h" +#include "misc/struct_chainer.h" #include "wrappers/buffer.h" #include "wrappers/buffer_view.h" #include "wrappers/command_buffer.h" #include "wrappers/command_pool.h" #include "wrappers/compute_pipeline_manager.h" #include "wrappers/descriptor_set.h" +#include "wrappers/descriptor_set_layout.h" #include "wrappers/device.h" #include "wrappers/event.h" #include "wrappers/framebuffer.h" @@ -35,20 +39,20 @@ #include "wrappers/image.h" #include "wrappers/image_view.h" #include "wrappers/instance.h" +#include "wrappers/memory_block.h" #include "wrappers/physical_device.h" #include "wrappers/pipeline_layout.h" #include "wrappers/query_pool.h" #include "wrappers/render_pass.h" - /* Command stashing should be enabled by default for builds that care. */ bool Anvil::CommandBufferBase::m_command_stashing_disabled = false; /** Please see header for specification */ -Anvil::CommandBufferBase::BeginQueryCommand::BeginQueryCommand(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry, - VkQueryControlFlags in_flags) +Anvil::CommandBufferBase::BeginQueryCommand::BeginQueryCommand(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry, + Anvil::QueryControlFlags in_flags) :Command(COMMAND_TYPE_BEGIN_QUERY) { entry = in_entry; @@ -57,17 +61,36 @@ Anvil::CommandBufferBase::BeginQueryCommand::BeginQueryCommand(std::shared_ptr in_fbo_ptr, - uint32_t in_n_physical_devices, - const std::weak_ptr* in_physical_devices, - const VkRect2D* in_render_areas, - std::shared_ptr in_render_pass_ptr, - VkSubpassContents in_contents) +Anvil::BeginQueryIndexedEXTCommand::BeginQueryIndexedEXTCommand(Anvil::QueryPool* in_query_pool_ptr, + const uint32_t& in_query, + const Anvil::QueryControlFlags& in_flags, + const uint32_t& in_index) + :Command (COMMAND_TYPE_BEGIN_QUERY_INDEXED_EXT), + flags (in_flags), + index (in_index), + query (in_query), + query_pool_ptr(in_query_pool_ptr) +{ + /* Stub */ +} + +/** Please see header for specification */ +Anvil::BeginRenderPassCommand::BeginRenderPassCommand(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + uint32_t in_device_mask, + uint32_t in_n_render_areas, + const VkRect2D* in_render_areas_ptr, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_n_attachment_initial_sample_locations, + const Anvil::AttachmentSampleLocations* in_attachment_initial_sample_locations_ptr, + const uint32_t& in_n_post_subpass_sample_locations, + const Anvil::SubpassSampleLocations* in_post_subpass_sample_locations_ptr) :Command(COMMAND_TYPE_BEGIN_RENDER_PASS) { contents = in_contents; + device_mask = in_device_mask; fbo_ptr = in_fbo_ptr; render_pass_ptr = in_render_pass_ptr; @@ -78,23 +101,52 @@ Anvil::BeginRenderPassCommand::BeginRenderPassCommand(uint32_t clear_values.push_back(in_clear_value_ptrs[n_clear_value]); } - for (uint32_t n_physical_device = 0; - n_physical_device < in_n_physical_devices; - ++n_physical_device) + attachment_initial_sample_locations.resize(in_n_attachment_initial_sample_locations); + post_subpass_sample_locations.resize (in_n_post_subpass_sample_locations); + render_areas.resize (in_n_render_areas); + + for (uint32_t n_render_area = 0; + n_render_area < in_n_render_areas; + ++n_render_area) + { + render_areas.at(n_render_area) = in_render_areas_ptr[n_render_area]; + } + + for (uint32_t n_attachment = 0; + n_attachment < in_n_attachment_initial_sample_locations; + ++n_attachment) + { + attachment_initial_sample_locations.at(n_attachment) = in_attachment_initial_sample_locations_ptr[n_attachment]; + } + + for (uint32_t n_subpass = 0; + n_subpass < in_n_post_subpass_sample_locations; + ++n_subpass) { - physical_devices.push_back(in_physical_devices[n_physical_device]); - render_areas.push_back (in_render_areas [n_physical_device]); + post_subpass_sample_locations.at(n_subpass) = in_post_subpass_sample_locations_ptr[n_subpass]; } } /** Please see header for specification */ -Anvil::CommandBufferBase::BindDescriptorSetsCommand::BindDescriptorSetsCommand(VkPipelineBindPoint in_pipeline_bind_point, - std::shared_ptr in_layout_ptr, - uint32_t in_first_set, - uint32_t in_set_count, - std::shared_ptr* in_descriptor_set_ptrs, - uint32_t in_dynamic_offset_count, - const uint32_t* in_dynamic_offset_ptrs) +Anvil::BeginTransformFeedbackEXTCommand::BeginTransformFeedbackEXTCommand(const uint32_t& in_first_counter_buffer, + const std::vector& in_counter_buffer_ptrs, + const std::vector& in_counter_buffer_offsets) + :Command (COMMAND_TYPE_BEGIN_TRANSFORM_FEEDBACK_EXT), + counter_buffer_ptrs (in_counter_buffer_ptrs), + counter_buffer_offsets(in_counter_buffer_offsets), + first_counter_buffer (in_first_counter_buffer) +{ + /* Stub */ +} + +/** Please see header for specification */ +Anvil::CommandBufferBase::BindDescriptorSetsCommand::BindDescriptorSetsCommand(Anvil::PipelineBindPoint in_pipeline_bind_point, + Anvil::PipelineLayout* in_layout_ptr, + uint32_t in_first_set, + uint32_t in_set_count, + const Anvil::DescriptorSet* const* in_descriptor_set_ptrs, + uint32_t in_dynamic_offset_count, + const uint32_t* in_dynamic_offset_ptrs) :Command(COMMAND_TYPE_BIND_DESCRIPTOR_SETS) { first_set = in_first_set; @@ -117,9 +169,9 @@ Anvil::CommandBufferBase::BindDescriptorSetsCommand::BindDescriptorSetsCommand(V } /** Please see header for specification */ -Anvil::CommandBufferBase::BindIndexBufferCommand::BindIndexBufferCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - VkIndexType in_index_type) +Anvil::CommandBufferBase::BindIndexBufferCommand::BindIndexBufferCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::IndexType in_index_type) :Command(COMMAND_TYPE_BIND_INDEX_BUFFER) { buffer = in_buffer_ptr->get_buffer(); @@ -129,8 +181,8 @@ Anvil::CommandBufferBase::BindIndexBufferCommand::BindIndexBufferCommand(std::sh } /** Please see header for specification */ -Anvil::CommandBufferBase::BindPipelineCommand::BindPipelineCommand(VkPipelineBindPoint in_pipeline_bind_point, - Anvil::PipelineID in_pipeline_id) +Anvil::CommandBufferBase::BindPipelineCommand::BindPipelineCommand(Anvil::PipelineBindPoint in_pipeline_bind_point, + Anvil::PipelineID in_pipeline_id) :Command(COMMAND_TYPE_BIND_PIPELINE) { pipeline_bind_point = in_pipeline_bind_point; @@ -138,14 +190,14 @@ Anvil::CommandBufferBase::BindPipelineCommand::BindPipelineCommand(VkPipelineBin } /** Please see header for specification */ -Anvil::CommandBufferBase::BindVertexBuffersCommand::BindVertexBuffersCommand(uint32_t in_start_binding, - uint32_t in_binding_count, - std::shared_ptr* in_buffer_ptrs, - const VkDeviceSize* in_offset_ptrs) +Anvil::CommandBufferBase::BindVertexBuffersCommand::BindVertexBuffersCommand(uint32_t in_start_binding, + uint32_t in_binding_count, + Anvil::Buffer** in_buffer_ptrs, + const VkDeviceSize* in_offset_ptrs) :Command(COMMAND_TYPE_BIND_VERTEX_BUFFER) { start_binding = in_start_binding; - + for (uint32_t n_binding = 0; n_binding < in_binding_count; ++n_binding) @@ -164,8 +216,8 @@ Anvil::CommandBufferBase::BindVertexBuffersCommandBinding::BindVertexBuffersComm } /** Please see header for specification */ -Anvil::CommandBufferBase::BindVertexBuffersCommandBinding::BindVertexBuffersCommandBinding(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset) +Anvil::CommandBufferBase::BindVertexBuffersCommandBinding::BindVertexBuffersCommandBinding(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset) { buffer = in_buffer_ptr->get_buffer(); buffer_ptr = in_buffer_ptr; @@ -173,13 +225,13 @@ Anvil::CommandBufferBase::BindVertexBuffersCommandBinding::BindVertexBuffersComm } /** Please see header for specification */ -Anvil::CommandBufferBase::BlitImageCommand::BlitImageCommand(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageBlit* in_region_ptrs, - VkFilter in_filter) +Anvil::CommandBufferBase::BlitImageCommand::BlitImageCommand(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageBlit* in_region_ptrs, + Anvil::Filter in_filter) :Command(COMMAND_TYPE_BLIT_IMAGE) { dst_image = in_dst_image_ptr->get_image(); @@ -199,19 +251,35 @@ Anvil::CommandBufferBase::BlitImageCommand::BlitImageCommand(std::shared_ptr& in_buffer_ptrs, + const std::vector& in_offsets, + const std::vector& in_sizes) + :Command (COMMAND_TYPE_BIND_TRANSFORM_FEEDBACK_BUFFERS_EXT), + buffer_ptrs (in_buffer_ptrs), + first_binding(in_first_binding), + n_bindings (in_n_bindings), + offsets (in_offsets), + sizes (in_sizes) +{ + /* Stub */ +} + +/** Please see header for specification */ +Anvil::CommandBufferBase::ClearAttachmentsCommand::ClearAttachmentsCommand(uint32_t in_n_attachments, + const Anvil::ClearAttachment* in_attachments, + uint32_t in_n_rects, + const VkClearRect* in_rect_ptrs) :Command(COMMAND_TYPE_CLEAR_ATTACHMENTS) { for (uint32_t n_attachment = 0; n_attachment < in_n_attachments; ++n_attachment) { - attachments.push_back(ClearAttachmentsCommandAttachment(in_attachments[n_attachment].aspectMask, - in_attachments[n_attachment].clearValue, - in_attachments[n_attachment].colorAttachment) ); + attachments.push_back(ClearAttachmentsCommandAttachment(in_attachments[n_attachment].aspect_mask, + in_attachments[n_attachment].clear_value, + in_attachments[n_attachment].color_attachment) ); } for (uint32_t n_rect = 0; @@ -223,11 +291,11 @@ Anvil::CommandBufferBase::ClearAttachmentsCommand::ClearAttachmentsCommand(uint3 } /** Please see header for specification */ -Anvil::CommandBufferBase::ClearColorImageCommand::ClearColorImageCommand(std::shared_ptr in_image_ptr, - VkImageLayout in_image_layout, - const VkClearColorValue* in_color_ptr, - uint32_t in_range_count, - const VkImageSubresourceRange* in_range_ptrs) +Anvil::CommandBufferBase::ClearColorImageCommand::ClearColorImageCommand(Anvil::Image* in_image_ptr, + Anvil::ImageLayout in_image_layout, + const VkClearColorValue* in_color_ptr, + uint32_t in_range_count, + const Anvil::ImageSubresourceRange* in_range_ptrs) :Command(COMMAND_TYPE_CLEAR_COLOR_IMAGE) { color = *in_color_ptr; @@ -244,11 +312,11 @@ Anvil::CommandBufferBase::ClearColorImageCommand::ClearColorImageCommand(std::sh } /** Please see header for specification */ -Anvil::CommandBufferBase::ClearDepthStencilImageCommand::ClearDepthStencilImageCommand(std::shared_ptr in_image_ptr, - VkImageLayout in_image_layout, - const VkClearDepthStencilValue* in_depth_stencil_ptr, - uint32_t in_range_count, - const VkImageSubresourceRange* in_range_ptrs) +Anvil::CommandBufferBase::ClearDepthStencilImageCommand::ClearDepthStencilImageCommand(Anvil::Image* in_image_ptr, + Anvil::ImageLayout in_image_layout, + const VkClearDepthStencilValue* in_depth_stencil_ptr, + uint32_t in_range_count, + const Anvil::ImageSubresourceRange* in_range_ptrs) :Command(COMMAND_TYPE_CLEAR_DEPTH_STENCIL_IMAGE) { depth_stencil = *in_depth_stencil_ptr; @@ -265,10 +333,10 @@ Anvil::CommandBufferBase::ClearDepthStencilImageCommand::ClearDepthStencilImageC } /** Please see header for specification */ -Anvil::CommandBufferBase::CopyBufferCommand::CopyBufferCommand(std::shared_ptr in_src_buffer_ptr, - std::shared_ptr in_dst_buffer_ptr, - uint32_t in_region_count, - const VkBufferCopy* in_region_ptrs) +Anvil::CommandBufferBase::CopyBufferCommand::CopyBufferCommand(Anvil::Buffer* in_src_buffer_ptr, + Anvil::Buffer* in_dst_buffer_ptr, + uint32_t in_region_count, + const Anvil::BufferCopy* in_region_ptrs) :Command(COMMAND_TYPE_COPY_BUFFER) { dst_buffer = in_dst_buffer_ptr->get_buffer(); @@ -285,11 +353,11 @@ Anvil::CommandBufferBase::CopyBufferCommand::CopyBufferCommand(std::shared_ptr in_src_buffer_ptr, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkBufferImageCopy* in_region_ptrs) +Anvil::CommandBufferBase::CopyBufferToImageCommand::CopyBufferToImageCommand(Anvil::Buffer* in_src_buffer_ptr, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::BufferImageCopy* in_region_ptrs) :Command(COMMAND_TYPE_COPY_BUFFER_TO_IMAGE) { dst_image = in_dst_image_ptr->get_image(); @@ -307,12 +375,12 @@ Anvil::CommandBufferBase::CopyBufferToImageCommand::CopyBufferToImageCommand(std } /** Please see header for specification */ -Anvil::CommandBufferBase::CopyImageCommand::CopyImageCommand(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageCopy* in_region_ptrs) +Anvil::CommandBufferBase::CopyImageCommand::CopyImageCommand(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageCopy* in_region_ptrs) :Command(COMMAND_TYPE_COPY_IMAGE) { dst_image = in_dst_image_ptr->get_image(); @@ -331,11 +399,11 @@ Anvil::CommandBufferBase::CopyImageCommand::CopyImageCommand(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_buffer_ptr, - uint32_t in_region_count, - const VkBufferImageCopy* in_region_ptrs) +Anvil::CommandBufferBase::CopyImageToBufferCommand::CopyImageToBufferCommand(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Buffer* in_dst_buffer_ptr, + uint32_t in_region_count, + const Anvil::BufferImageCopy* in_region_ptrs) :Command(COMMAND_TYPE_COPY_IMAGE_TO_BUFFER) { dst_buffer = in_dst_buffer_ptr->get_buffer(); @@ -353,13 +421,13 @@ Anvil::CommandBufferBase::CopyImageToBufferCommand::CopyImageToBufferCommand(std } /** Please see header for specification */ -Anvil::CommandBufferBase::CopyQueryPoolResultsCommand::CopyQueryPoolResultsCommand(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_start_query, - uint32_t in_query_count, - std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_dst_stride, - VkQueryResultFlags in_flags) +Anvil::CommandBufferBase::CopyQueryPoolResultsCommand::CopyQueryPoolResultsCommand(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_start_query, + uint32_t in_query_count, + Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_dst_stride, + VkQueryResultFlags in_flags) :Command(COMMAND_TYPE_COPY_QUERY_POOL_RESULTS) { dst_buffer = in_dst_buffer_ptr->get_buffer(); @@ -433,8 +501,25 @@ Anvil::CommandBufferBase::DispatchCommand::DispatchCommand(uint32_t in_x, } /** Please see header for specification */ -Anvil::CommandBufferBase::DispatchIndirectCommand::DispatchIndirectCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset) +Anvil::CommandBufferBase::DispatchBaseKHRCommand::DispatchBaseKHRCommand(uint32_t in_base_group_x, + uint32_t in_base_group_y, + uint32_t in_base_group_z, + uint32_t in_group_count_x, + uint32_t in_group_count_y, + uint32_t in_group_count_z) + :Command(COMMAND_TYPE_DISPATCH_BASE_KHR) +{ + base_group_x = in_base_group_x; + base_group_y = in_base_group_y; + base_group_z = in_base_group_z; + group_count_x = in_group_count_x; + group_count_y = in_group_count_y; + group_count_z = in_group_count_z; +} + +/** Please see header for specification */ +Anvil::CommandBufferBase::DispatchIndirectCommand::DispatchIndirectCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset) :Command(COMMAND_TYPE_DISPATCH_INDIRECT) { buffer = in_buffer_ptr->get_buffer(); @@ -471,10 +556,10 @@ Anvil::CommandBufferBase::DrawIndexedCommand::DrawIndexedCommand(uint32_t in_ind } /** Please see header for specification */ -Anvil::CommandBufferBase::DrawIndexedIndirectCommand::DrawIndexedIndirectCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - uint32_t in_draw_count, - uint32_t in_stride) +Anvil::CommandBufferBase::DrawIndexedIndirectCommand::DrawIndexedIndirectCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + uint32_t in_draw_count, + uint32_t in_stride) :Command(COMMAND_TYPE_DRAW_INDEXED_INDIRECT) { buffer = in_buffer_ptr->get_buffer(); @@ -485,12 +570,29 @@ Anvil::CommandBufferBase::DrawIndexedIndirectCommand::DrawIndexedIndirectCommand } /** Please see header for specification */ -Anvil::CommandBufferBase::DrawIndexedIndirectCountAMDCommand::DrawIndexedIndirectCountAMDCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - std::shared_ptr in_count_buffer_ptr, - VkDeviceSize in_count_offset, - uint32_t in_max_draw_count, - uint32_t in_stride) +Anvil::CommandBufferBase::DrawIndirectByteCountEXTCommand::DrawIndirectByteCountEXTCommand(const uint32_t& in_instance_count, + const uint32_t& in_first_instance, + Anvil::Buffer* in_counter_buffer_ptr, + const VkDeviceSize& in_counter_buffer_offset, + const uint32_t& in_counter_offset, + const uint32_t& in_vertex_stride) + :Command(COMMAND_TYPE_DRAW_INDIRECT_BYTE_COUNT_EXT) +{ + counter_buffer_offset = in_counter_buffer_offset; + counter_buffer_ptr = in_counter_buffer_ptr; + counter_offset = in_counter_offset; + first_instance = in_first_instance; + instance_count = in_instance_count; + vertex_stride = in_vertex_stride; +} + +/** Please see header for specification */ +Anvil::CommandBufferBase::DrawIndexedIndirectCountAMDCommand::DrawIndexedIndirectCountAMDCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride) :Command(COMMAND_TYPE_DRAW_INDEXED_INDIRECT_COUNT_AMD) { buffer = in_buffer_ptr->get_buffer(); @@ -504,10 +606,29 @@ Anvil::CommandBufferBase::DrawIndexedIndirectCountAMDCommand::DrawIndexedIndirec } /** Please see header for specification */ -Anvil::CommandBufferBase::DrawIndirectCommand::DrawIndirectCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - uint32_t in_count, - uint32_t in_stride) +Anvil::CommandBufferBase::DrawIndexedIndirectCountKHRCommand::DrawIndexedIndirectCountKHRCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride) + :Command(COMMAND_TYPE_DRAW_INDEXED_INDIRECT_COUNT_KHR) +{ + buffer = in_buffer_ptr->get_buffer(); + buffer_ptr = in_buffer_ptr; + count_buffer = in_count_buffer_ptr->get_buffer(); + count_buffer_ptr = in_count_buffer_ptr; + count_offset = in_count_offset; + max_draw_count = in_max_draw_count; + offset = in_offset; + stride = in_stride; +} + +/** Please see header for specification */ +Anvil::CommandBufferBase::DrawIndirectCommand::DrawIndirectCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + uint32_t in_count, + uint32_t in_stride) :Command(COMMAND_TYPE_DRAW_INDIRECT) { buffer = in_buffer_ptr->get_buffer(); @@ -518,12 +639,12 @@ Anvil::CommandBufferBase::DrawIndirectCommand::DrawIndirectCommand(std::shared_p } /** Please see header for specification */ -Anvil::CommandBufferBase::DrawIndirectCountAMDCommand::DrawIndirectCountAMDCommand(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - std::shared_ptr in_count_buffer_ptr, - VkDeviceSize in_count_offset, - uint32_t in_max_draw_count, - uint32_t in_stride) +Anvil::CommandBufferBase::DrawIndirectCountAMDCommand::DrawIndirectCountAMDCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride) :Command(COMMAND_TYPE_DRAW_INDIRECT_COUNT_AMD) { buffer = in_buffer_ptr->get_buffer(); @@ -537,14 +658,45 @@ Anvil::CommandBufferBase::DrawIndirectCountAMDCommand::DrawIndirectCountAMDComma } /** Please see header for specification */ -Anvil::CommandBufferBase::EndQueryCommand::EndQueryCommand(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry) +Anvil::CommandBufferBase::DrawIndirectCountKHRCommand::DrawIndirectCountKHRCommand(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride) + :Command(COMMAND_TYPE_DRAW_INDIRECT_COUNT_KHR) +{ + buffer = in_buffer_ptr->get_buffer(); + buffer_ptr = in_buffer_ptr; + count_buffer = in_count_buffer_ptr->get_buffer(); + count_buffer_ptr = in_count_buffer_ptr; + count_offset = in_count_offset; + max_draw_count = in_max_draw_count; + offset = in_offset; + stride = in_stride; +} + +/** Please see header for specification */ +Anvil::CommandBufferBase::EndQueryCommand::EndQueryCommand(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry) :Command(COMMAND_TYPE_END_QUERY) { entry = in_entry; query_pool_ptr = in_query_pool_ptr; } +/** Please see header for specification */ +Anvil::CommandBufferBase::EndQueryIndexedEXTCommand::EndQueryIndexedEXTCommand(Anvil::QueryPool* in_query_pool_ptr, + const Anvil::QueryIndex& in_query, + const uint32_t& in_index) + :Command (COMMAND_TYPE_END_QUERY_INDEXED_EXT), + index (in_index), + query (in_query), + query_pool_ptr(in_query_pool_ptr) +{ + /* Stub */ +} + /** Please see header for specification */ Anvil::EndRenderPassCommand::EndRenderPassCommand() :Command(COMMAND_TYPE_END_RENDER_PASS) @@ -552,8 +704,20 @@ Anvil::EndRenderPassCommand::EndRenderPassCommand() } /** Please see header for specification */ -Anvil::CommandBufferBase::ExecuteCommandsCommand::ExecuteCommandsCommand(uint32_t in_cmd_buffers_count, - std::shared_ptr* in_cmd_buffer_ptrs) +Anvil::CommandBufferBase::EndTransformFeedbackEXTCommand::EndTransformFeedbackEXTCommand(const uint32_t& in_first_counter_buffer, + const std::vector& in_counter_buffer_ptrs, + const std::vector& in_counter_buffer_offsets) + :Command (COMMAND_TYPE_END_TRANSFORM_FEEDBACK_EXT), + first_counter_buffer (in_first_counter_buffer), + counter_buffer_ptrs (in_counter_buffer_ptrs), + counter_buffer_offsets(in_counter_buffer_offsets) +{ + /* Stub */ +} + +/** Please see header for specification */ +Anvil::CommandBufferBase::ExecuteCommandsCommand::ExecuteCommandsCommand(uint32_t in_cmd_buffers_count, + Anvil::SecondaryCommandBuffer** in_cmd_buffer_ptrs) :Command(COMMAND_TYPE_EXECUTE_COMMANDS) { for (uint32_t n_cmd_buffer = 0; @@ -566,10 +730,10 @@ Anvil::CommandBufferBase::ExecuteCommandsCommand::ExecuteCommandsCommand(uint32_ } /** Please see header for specification */ -Anvil::CommandBufferBase::FillBufferCommand::FillBufferCommand(std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_size, - uint32_t in_data) +Anvil::CommandBufferBase::FillBufferCommand::FillBufferCommand(Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_size, + uint32_t in_data) :Command(COMMAND_TYPE_FILL_BUFFER) { data = in_data; @@ -580,27 +744,27 @@ Anvil::CommandBufferBase::FillBufferCommand::FillBufferCommand(std::shared_ptr(in_dst_stage_mask); + dst_stage_mask = in_dst_stage_mask; flags = in_flags; - src_stage_mask = static_cast(in_src_stage_mask); + src_stage_mask = in_src_stage_mask; for (uint32_t n_buffer_memory_barrier = 0; n_buffer_memory_barrier < in_buffer_memory_barrier_count; @@ -625,11 +789,11 @@ Anvil::PipelineBarrierCommand::PipelineBarrierCommand(VkPipelineStageFlags } /** Please see header for specification */ -Anvil::CommandBufferBase::PushConstantsCommand::PushConstantsCommand(std::shared_ptr in_layout_ptr, - VkShaderStageFlags in_stage_flags, - uint32_t in_offset, - uint32_t in_size, - const void* in_values) +Anvil::CommandBufferBase::PushConstantsCommand::PushConstantsCommand(Anvil::PipelineLayout* in_layout_ptr, + Anvil::ShaderStageFlags in_stage_flags, + uint32_t in_offset, + uint32_t in_size, + const void* in_values) :Command(COMMAND_TYPE_PUSH_CONSTANTS) { layout_ptr = in_layout_ptr; @@ -640,8 +804,8 @@ Anvil::CommandBufferBase::PushConstantsCommand::PushConstantsCommand(std::shared } /** Please see header for specification */ -Anvil::CommandBufferBase::ResetEventCommand::ResetEventCommand(std::shared_ptr in_event_ptr, - VkPipelineStageFlags in_stage_mask) +Anvil::CommandBufferBase::ResetEventCommand::ResetEventCommand(Anvil::Event* in_event_ptr, + Anvil::PipelineStageFlags in_stage_mask) :Command(COMMAND_TYPE_RESET_EVENT) { event = in_event_ptr->get_event(); @@ -650,9 +814,9 @@ Anvil::CommandBufferBase::ResetEventCommand::ResetEventCommand(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_start_query, - uint32_t in_query_count) +Anvil::CommandBufferBase::ResetQueryPoolCommand::ResetQueryPoolCommand(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_start_query, + uint32_t in_query_count) :Command(COMMAND_TYPE_RESET_QUERY_POOL) { query_count = in_query_count; @@ -661,12 +825,12 @@ Anvil::CommandBufferBase::ResetQueryPoolCommand::ResetQueryPoolCommand(std::shar } /** Please see header for specification */ -Anvil::CommandBufferBase::ResolveImageCommand::ResolveImageCommand(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageResolve* in_region_ptrs) +Anvil::CommandBufferBase::ResolveImageCommand::ResolveImageCommand(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageResolve* in_region_ptrs) :Command(COMMAND_TYPE_RESOLVE_IMAGE) { dst_image = in_dst_image_ptr->get_image(); @@ -714,8 +878,15 @@ Anvil::CommandBufferBase::SetDepthBoundsCommand::SetDepthBoundsCommand(float in_ } /** Please see header for specification */ -Anvil::CommandBufferBase::SetEventCommand::SetEventCommand(std::shared_ptr in_event_ptr, - VkPipelineStageFlags in_stage_mask) +Anvil::CommandBufferBase::SetDeviceMaskKHRCommand::SetDeviceMaskKHRCommand(uint32_t in_device_mask) + :Command(COMMAND_TYPE_SET_DEVICE_MASK_KHR), + device_mask(in_device_mask) +{ +} + +/** Please see header for specification */ +Anvil::CommandBufferBase::SetEventCommand::SetEventCommand(Anvil::Event* in_event_ptr, + Anvil::PipelineStageFlags in_stage_mask) :Command(COMMAND_TYPE_SET_EVENT) { event = in_event_ptr->get_event(); @@ -730,6 +901,13 @@ Anvil::CommandBufferBase::SetLineWidthCommand::SetLineWidthCommand(float in_line line_width = in_line_width; } +/** Please see header for specification */ +Anvil::CommandBufferBase::SetSampleLocationsEXTCommand::SetSampleLocationsEXTCommand(const Anvil::SampleLocationsInfo& in_sample_locations_info) + :Command(COMMAND_TYPE_SET_SAMPLE_LOCATIONS_EXT) +{ + sample_locations_info = in_sample_locations_info; +} + /** Please see header for specification */ Anvil::CommandBufferBase::SetScissorCommand::SetScissorCommand(uint32_t in_first_scissor, uint32_t in_scissor_count, @@ -747,8 +925,8 @@ Anvil::CommandBufferBase::SetScissorCommand::SetScissorCommand(uint32_t i } /** Please see header for specification */ -Anvil::CommandBufferBase::SetStencilCompareMaskCommand::SetStencilCompareMaskCommand(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_compare_mask) +Anvil::CommandBufferBase::SetStencilCompareMaskCommand::SetStencilCompareMaskCommand(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_compare_mask) :Command(COMMAND_TYPE_SET_STENCIL_COMPARE_MASK) { face_mask = in_face_mask; @@ -756,8 +934,8 @@ Anvil::CommandBufferBase::SetStencilCompareMaskCommand::SetStencilCompareMaskCom } /** Please see header for specification */ -Anvil::CommandBufferBase::SetStencilReferenceCommand::SetStencilReferenceCommand(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_reference) +Anvil::CommandBufferBase::SetStencilReferenceCommand::SetStencilReferenceCommand(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_reference) :Command(COMMAND_TYPE_SET_STENCIL_REFERENCE) { face_mask = in_face_mask; @@ -765,8 +943,8 @@ Anvil::CommandBufferBase::SetStencilReferenceCommand::SetStencilReferenceCommand } /** Please see header for specification */ -Anvil::CommandBufferBase::SetStencilWriteMaskCommand::SetStencilWriteMaskCommand(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_write_mask) +Anvil::CommandBufferBase::SetStencilWriteMaskCommand::SetStencilWriteMaskCommand(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_write_mask) :Command(COMMAND_TYPE_SET_STENCIL_WRITE_MASK) { face_mask = in_face_mask; @@ -790,10 +968,10 @@ Anvil::CommandBufferBase::SetViewportCommand::SetViewportCommand(uint32_t } /** Please see header for specification */ -Anvil::CommandBufferBase::UpdateBufferCommand::UpdateBufferCommand(std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_data_size, - const uint32_t* in_data_ptr) +Anvil::CommandBufferBase::UpdateBufferCommand::UpdateBufferCommand(Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_data_size, + const void* in_data_ptr) :Command(COMMAND_TYPE_UPDATE_BUFFER) { data_ptr = in_data_ptr; @@ -804,20 +982,20 @@ Anvil::CommandBufferBase::UpdateBufferCommand::UpdateBufferCommand(std::shared_p } /** Please see header for specification */ -Anvil::CommandBufferBase::WaitEventsCommand::WaitEventsCommand(uint32_t in_event_count, - std::shared_ptr* in_event_ptrs, - VkPipelineStageFlags in_src_stage_mask, - VkPipelineStageFlags in_dst_stage_mask, - uint32_t in_memory_barrier_count, - const MemoryBarrier* const in_memory_barriers_ptr, - uint32_t in_buffer_memory_barrier_count, - const BufferBarrier* const in_buffer_memory_barriers_ptr, - uint32_t in_image_memory_barrier_count, - const ImageBarrier* const in_image_memory_barriers_ptr) +Anvil::CommandBufferBase::WaitEventsCommand::WaitEventsCommand(uint32_t in_event_count, + Anvil::Event* const* in_event_ptrs, + Anvil::PipelineStageFlags in_src_stage_mask, + Anvil::PipelineStageFlags in_dst_stage_mask, + uint32_t in_memory_barrier_count, + const MemoryBarrierAnv* const in_memory_barriers_ptr, + uint32_t in_buffer_memory_barrier_count, + const BufferBarrier* const in_buffer_memory_barriers_ptr, + uint32_t in_image_memory_barrier_count, + const ImageBarrier* const in_image_memory_barriers_ptr) :Command(COMMAND_TYPE_WAIT_EVENTS) { - dst_stage_mask = static_cast(in_dst_stage_mask); - src_stage_mask = static_cast(in_src_stage_mask); + dst_stage_mask = in_dst_stage_mask; + src_stage_mask = in_src_stage_mask; for (uint32_t n_event = 0; n_event < in_event_count; @@ -850,9 +1028,22 @@ Anvil::CommandBufferBase::WaitEventsCommand::WaitEventsCommand(uint32_t } /** Please see header for specification */ -Anvil::CommandBufferBase::WriteTimestampCommand::WriteTimestampCommand(VkPipelineStageFlagBits in_pipeline_stage, - std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry) +Anvil::CommandBufferBase::WriteBufferMarkerAMDCommand::WriteBufferMarkerAMDCommand(const Anvil::PipelineStageFlagBits& in_pipeline_stage, + Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + const uint32_t& in_marker) + :Command(COMMAND_TYPE_WRITE_BUFFER_MARKER_AMD) +{ + dst_buffer_ptr = in_dst_buffer_ptr; + dst_offset = in_dst_offset; + marker = in_marker; + pipeline_stage = in_pipeline_stage; +} + +/** Please see header for specification */ +Anvil::CommandBufferBase::WriteTimestampCommand::WriteTimestampCommand(Anvil::PipelineStageFlagBits in_pipeline_stage, + Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry) :Command(COMMAND_TYPE_WRITE_TIMESTAMP) { entry = in_entry; @@ -867,18 +1058,23 @@ Anvil::CommandBufferBase::WriteTimestampCommand::WriteTimestampCommand(VkPipelin * @param parent_command_pool_ptr Command pool to allocate the commands from. Must not be nullptr. * @param type Command buffer type **/ -Anvil::CommandBufferBase::CommandBufferBase(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_command_pool_ptr, - Anvil::CommandBufferType in_type) - :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT), - CallbacksSupportProvider (COMMAND_BUFFER_CALLBACK_ID_COUNT), - m_command_buffer (VK_NULL_HANDLE), - m_device_ptr (in_device_ptr), - m_is_renderpass_active (false), - m_parent_command_pool_ptr (in_parent_command_pool_ptr), - m_recording_in_progress (false), - m_type (in_type) +Anvil::CommandBufferBase::CommandBufferBase(const Anvil::BaseDevice* in_device_ptr, + Anvil::CommandPool* in_parent_command_pool_ptr, + Anvil::CommandBufferType in_type, + bool in_mt_safe) + :MTSafetySupportProvider (in_mt_safe), + DebugMarkerSupportProvider (in_device_ptr, + Anvil::ObjectType::COMMAND_BUFFER), + CallbacksSupportProvider (COMMAND_BUFFER_CALLBACK_ID_COUNT), + m_command_buffer (VK_NULL_HANDLE), + m_device_mask (0), + m_device_ptr (in_device_ptr), + m_is_renderpass_active (false), + m_n_debug_label_regions_started(0), + m_parent_command_pool_ptr (in_parent_command_pool_ptr), + m_recording_in_progress (false), + m_renderpass_device_mask (0), + m_type (in_type) { anvil_assert(in_parent_command_pool_ptr != nullptr); } @@ -893,17 +1089,20 @@ Anvil::CommandBufferBase::~CommandBufferBase() { anvil_assert(!m_recording_in_progress); - if (m_command_buffer != VK_NULL_HANDLE && - m_parent_command_pool_ptr.expired() == false) + if (m_command_buffer != VK_NULL_HANDLE && + m_parent_command_pool_ptr != nullptr) { - std::shared_ptr command_pool_locked_ptr(m_parent_command_pool_ptr); - std::shared_ptr device_locked_ptr (m_device_ptr); - /* Physically free the command buffer we own */ - vkFreeCommandBuffers(device_locked_ptr->get_device_vk(), - command_pool_locked_ptr->get_command_pool(), - 1, /* commandBufferCount */ - &m_command_buffer); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkFreeCommandBuffers(m_device_ptr->get_device_vk(), + m_parent_command_pool_ptr->get_command_pool(), + 1, /* commandBufferCount */ + &m_command_buffer); + } + unlock(); + m_parent_command_pool_ptr->unlock(); m_command_buffer = VK_NULL_HANDLE; } @@ -915,423 +1114,203 @@ Anvil::CommandBufferBase::~CommandBufferBase() #endif } -/** Stores a specified buffer instance in m_referenced_buffers if the buffer has not already been cached. - * - * @param in_buffer_ptr Buffer instance to cache. Must not be null. - **/ -void Anvil::CommandBufferBase::cache_referenced_buffer(std::shared_ptr in_buffer_ptr) +/** Please see header for specification */ +void Anvil::CommandBufferBase::begin_debug_utils_label(const char* in_label_name_ptr, + const float* in_color_vec4_ptr) { - auto buffer_iterator = std::find(m_referenced_buffers.begin(), - m_referenced_buffers.end(), - in_buffer_ptr); - - if (buffer_iterator == m_referenced_buffers.end() ) + if (!m_device_ptr->get_parent_instance()->get_enabled_extensions_info()->ext_debug_utils() ) { - /* Anvil's Memory Allocator defers memory allocation in time unless the user explicitly requested a bake operation. - * - * vkCmd*() commands may use memory backing which is bound to the object at command recording time. For implicit - * baking op to work correctly, we therefore need to ensure all scheduled allocs are handled BEFORE the vkCmd*() - * invocation is passed down to the driver. - */ + goto end; + } - /* NOTE: Buffer::get_memory_block() triggers implicit bake op if needed */ - auto buffer_mem_block_ptr = in_buffer_ptr->get_memory_block(0); + { + const auto& entrypoints = m_device_ptr->get_parent_instance()->get_extension_ext_debug_utils_entrypoints(); + VkDebugUtilsLabelEXT label_info; - ANVIL_REDUNDANT_VARIABLE(buffer_mem_block_ptr); + label_info.color[0] = in_color_vec4_ptr[0]; + label_info.color[1] = in_color_vec4_ptr[1]; + label_info.color[2] = in_color_vec4_ptr[2]; + label_info.color[3] = in_color_vec4_ptr[3]; + label_info.pLabelName = in_label_name_ptr; + label_info.pNext = nullptr; + label_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT; - m_referenced_buffers.push_back(in_buffer_ptr); + entrypoints.vkCmdBeginDebugUtilsLabelEXT(m_command_buffer, + &label_info); } -} -/** Stores a specified command buffer instance in m_referenced_command_buffers if the cmd buffer has not already been cached. - * - * @param in_cmd_buffer_ptr Command buffer instance to cache. Must not be null. - **/ -void Anvil::CommandBufferBase::cache_referenced_command_buffer(std::shared_ptr in_cmd_buffer_ptr) -{ - auto cmd_buffer_iterator = std::find(m_referenced_command_buffers.begin(), - m_referenced_command_buffers.end(), - in_cmd_buffer_ptr); + ++m_n_debug_label_regions_started; +end: + ; +} - if (cmd_buffer_iterator == m_referenced_command_buffers.end() ) +#ifdef STORE_COMMAND_BUFFER_COMMANDS + /** Clears the command vector by releasing all command descriptors back to the heap memory. */ + void Anvil::CommandBufferBase::clear_commands() { - m_referenced_command_buffers.push_back(in_cmd_buffer_ptr); + m_commands.clear(); } -} +#endif -/** Stores a specified descriptor set instance in m_referenced_descriptor_sets if the DS instance has not - * already been cached, along with underlying objects specified DS' bindings refer to. The latter are cached - * in corresponding m_referenced_* vectors. - * - * @param in_ds_ptr Descriptor set instance to cache. Must not be null. - **/ -void Anvil::CommandBufferBase::cache_referenced_descriptor_set(std::shared_ptr in_ds_ptr) +/* Please see header for specification */ +void Anvil::CommandBufferBase::end_debug_utils_label() { - decltype(m_referenced_descriptor_sets)::iterator ds_iterator; - const uint32_t n_bindings = in_ds_ptr->get_n_bindings(); - - /* Extract any buffer / image instances from container objects and cache them separately. - * This is needed for implicit memory allocation bake support. - * - * Other object types need not be cached exclusively, as caching the descriptor set will automatically imply - * the objects owned by DS do not go out of scope. - */ - for (uint32_t n_binding = 0; - n_binding < n_bindings; - ++n_binding) + if (!m_device_ptr->get_parent_instance()->get_enabled_extensions_info()->ext_debug_utils() ) { - VkDescriptorType current_binding_type; - uint32_t n_binding_array_items = 0; - - if (!in_ds_ptr->get_binding_array_size(n_binding, - &n_binding_array_items) ) - { - continue; - } - - if (n_binding_array_items > 0) - { - if (!in_ds_ptr->get_binding_descriptor_type(n_binding, - ¤t_binding_type) ) - { - /* This should never happen */ - anvil_assert_fail(); + goto end; + } - continue; - } + if (m_n_debug_label_regions_started == 0) + { + anvil_assert(m_n_debug_label_regions_started != 0); - for (uint32_t n_binding_array_item = 0; - n_binding_array_item < n_binding_array_items; - ++n_binding_array_item) - { - switch (current_binding_type) - { - case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: - { - std::shared_ptr image_view_ptr; - - if (!in_ds_ptr->get_combined_image_sampler_binding_properties(n_binding, - n_binding_array_item, - nullptr, /* out_opt_image_layout_ptr */ - &image_view_ptr, - nullptr) ) /* out_opt_sampler_ptr */ - { - /* This should never happen */ - anvil_assert_fail(); - - continue; - } - - cache_referenced_image(image_view_ptr->get_parent_image() ); - break; - } - - case VK_DESCRIPTOR_TYPE_SAMPLER: - { - /* Don't care */ - break; - } - - case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: - { - std::shared_ptr image_view_ptr; - - if (!in_ds_ptr->get_input_attachment_binding_properties(n_binding, - n_binding_array_item, - nullptr, /* out_opt_image_layout_ptr */ - &image_view_ptr) ) - { - /* This should never happen */ - anvil_assert_fail(); - - continue; - } - - cache_referenced_image(image_view_ptr->get_parent_image() ); - break; - } - - case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: - { - std::shared_ptr image_view_ptr; - - if (!in_ds_ptr->get_sampled_image_binding_properties(n_binding, - n_binding_array_item, - nullptr, /* out_opt_image_layout_ptr */ - &image_view_ptr) ) - { - /* This should never happen */ - anvil_assert_fail(); - - continue; - } - - cache_referenced_image(image_view_ptr->get_parent_image() ); - break; - } - - case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: - { - std::shared_ptr image_view_ptr; - - if (!in_ds_ptr->get_storage_image_binding_properties(n_binding, - n_binding_array_item, - nullptr, /* out_opt_image_layout_ptr */ - &image_view_ptr) ) - { - /* This should never happen */ - anvil_assert_fail(); - - continue; - } - - cache_referenced_image(image_view_ptr->get_parent_image() ); - break; - } - - case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: - case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: - { - std::shared_ptr buffer_ptr; - - if (!in_ds_ptr->get_storage_buffer_binding_properties(n_binding, - n_binding_array_item, - &buffer_ptr, - nullptr, /* out_opt_size_ptr */ - nullptr) ) /* out_opt_start_offset_ptr */ - { - /* This should never happen */ - anvil_assert_fail(); - - continue; - } - - cache_referenced_buffer(buffer_ptr); - break; - } - - case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: - { - std::shared_ptr buffer_view_ptr; - - if (!in_ds_ptr->get_storage_texel_buffer_binding_properties(n_binding, - n_binding_array_item, - &buffer_view_ptr) ) - { - /* This should never happen */ - anvil_assert_fail(); - - continue; - } - - cache_referenced_buffer(buffer_view_ptr->get_parent_buffer() ); - break; - } - - - case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: - case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: - { - std::shared_ptr buffer_ptr; - - if (!in_ds_ptr->get_uniform_buffer_binding_properties(n_binding, - n_binding_array_item, - &buffer_ptr, - nullptr, /* out_opt_size_ptr */ - nullptr) ) /* out_opt_start_offset_ptr */ - { - /* This should never happen */ - anvil_assert_fail(); - - continue; - } - - cache_referenced_buffer(buffer_ptr); - break; - } - - case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: - { - std::shared_ptr buffer_view_ptr; - - if (!in_ds_ptr->get_uniform_texel_buffer_binding_properties(n_binding, - n_binding_array_item, - &buffer_view_ptr) ) - { - /* This should never happen */ - anvil_assert_fail(); - - continue; - } - - cache_referenced_buffer(buffer_view_ptr->get_parent_buffer() ); - break; - } - - default: - { - anvil_assert_fail(); - } - } - } - } + goto end; } - ds_iterator = std::find(m_referenced_descriptor_sets.begin(), - m_referenced_descriptor_sets.end(), - in_ds_ptr); - - if (ds_iterator == m_referenced_descriptor_sets.end() ) { - m_referenced_descriptor_sets.push_back(in_ds_ptr); + const auto& entrypoints = m_device_ptr->get_parent_instance()->get_extension_ext_debug_utils_entrypoints(); + + entrypoints.vkCmdEndDebugUtilsLabelEXT(m_command_buffer); } + + --m_n_debug_label_regions_started; +end: + ; } -/** Stores a specified event instance in m_referenced_events, if the event has not already been cached. - * - * @param in_event_ptr Event instance to cache. Must not be null. - **/ -void Anvil::CommandBufferBase::cache_referenced_event(std::shared_ptr in_event_ptr) +/** Please see header for specification */ +void Anvil::CommandBufferBase::insert_debug_utils_label(const char* in_label_name_ptr, + const float* in_color_vec4_ptr) { - auto event_iterator = std::find(m_referenced_events.begin(), - m_referenced_events.end(), - in_event_ptr); - - if (event_iterator == m_referenced_events.end() ) + if (!m_device_ptr->get_parent_instance()->get_enabled_extensions_info()->ext_debug_utils() ) { - m_referenced_events.push_back(in_event_ptr); + goto end; } -} -/** Stores a specified framebuffer instance in m_referenced_framebuffers, if the framebuffer has not already been cached. - * Any attachments defined for the FB are also cached. - * - * @param in_fb_ptr Framebuffer instance to cache. Must not be null. - **/ -void Anvil::CommandBufferBase::cache_referenced_framebuffer(std::shared_ptr in_fb_ptr) -{ - decltype(m_referenced_framebuffers)::iterator fb_iterator; - const auto n_attachments = in_fb_ptr->get_n_attachments(); - - /* Extract image instances that the specified framebuffer points to. This is needed for - * implicit memory allocation bake support. - */ - for (uint32_t n_attachment = 0; - n_attachment < n_attachments; - ++n_attachment) { - std::shared_ptr image_view_ptr; - - if (!in_fb_ptr->get_attachment_at_index(n_attachment, - &image_view_ptr) ) - { - /* This should never happen */ - anvil_assert_fail(); + const auto& entrypoints = m_device_ptr->get_parent_instance()->get_extension_ext_debug_utils_entrypoints(); + VkDebugUtilsLabelEXT label_info; - continue; - } + label_info.color[0] = in_color_vec4_ptr[0]; + label_info.color[1] = in_color_vec4_ptr[1]; + label_info.color[2] = in_color_vec4_ptr[2]; + label_info.color[3] = in_color_vec4_ptr[3]; + label_info.pLabelName = in_label_name_ptr; + label_info.pNext = nullptr; + label_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT; - cache_referenced_image(image_view_ptr->get_parent_image() ); + entrypoints.vkCmdInsertDebugUtilsLabelEXT(m_command_buffer, + &label_info); } - fb_iterator = std::find(m_referenced_framebuffers.begin(), - m_referenced_framebuffers.end(), - in_fb_ptr); - - if (fb_iterator == m_referenced_framebuffers.end() ) - { - m_referenced_framebuffers.push_back(in_fb_ptr); - } +end: + ; } -/** Stores a specified image instance in m_referenced_images, if the image has not already been cached. - * - * @param in_image_ptr Image instance to cache. Must not be null. - **/ -void Anvil::CommandBufferBase::cache_referenced_image(std::shared_ptr in_image_ptr) +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_begin_query(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry, + Anvil::QueryControlFlags in_flags) { - auto image_iterator = std::find(m_referenced_images.begin(), - m_referenced_images.end(), - in_image_ptr); + /* NOTE: The command can be executed both inside and outside a renderpass */ + bool result = false; - if (image_iterator == m_referenced_images.end() ) + if (!m_recording_in_progress) { - /* Anvil's Memory Allocator defers memory allocation in time unless the user explicitly requested a bake operation. - * - * vkCmd*() commands may use memory backing which is bound to the object at command recording time. For implicit - * baking op to work correctly, we therefore need to ensure all scheduled allocs are handled BEFORE the vkCmd*() - * invocation is passed down to the driver. - */ + anvil_assert(m_recording_in_progress); - /* NOTE: Image::get_memory_block() triggers implicit bake op if needed */ - auto image_mem_block_ptr = in_image_ptr->get_memory_block(); + goto end; + } - ANVIL_REDUNDANT_VARIABLE(image_mem_block_ptr); + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + m_commands.push_back(BeginQueryCommand(in_query_pool_ptr, + in_entry, + in_flags) ); + } + } + #endif - m_referenced_images.push_back(in_image_ptr); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdBeginQuery(m_command_buffer, + in_query_pool_ptr->get_query_pool(), + in_entry, + in_flags.get_vk() ); } + unlock(); + m_parent_command_pool_ptr->unlock(); + + result = true; +end: + return result; } -/** Stores a specified query pool instance in m_referenced_query_pools, if the query pool has not already been cached. - * - * @param in_query_pool_ptr Query pool instance to cache. Must not be null. - **/ -void Anvil::CommandBufferBase::cache_referenced_query_pool(std::shared_ptr in_query_pool_ptr) +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_begin_query_indexed(Anvil::QueryPool* in_query_pool_ptr, + const Anvil::QueryIndex& in_query, + const Anvil::QueryControlFlags& in_flags, + const uint32_t& in_index) { - auto qp_iterator = std::find(m_referenced_query_pools.begin(), - m_referenced_query_pools.end(), - in_query_pool_ptr); + /* NOTE: The command can be executed both inside and outside a renderpass */ + const auto& entrypoints = m_device_ptr->get_extension_ext_transform_feedback_entrypoints(); + bool result = false; - if (qp_iterator == m_referenced_query_pools.end() ) + if (!m_recording_in_progress) { - m_referenced_query_pools.push_back(in_query_pool_ptr); - } -} + anvil_assert(m_recording_in_progress); -/** Stores a specified renderpass instance in m_referenced_renderpasses, if the renderpass has not already been cached. - * - * @param in_renderpass_ptr Renderpass instance to cache. Must not be null. - **/ -void Anvil::CommandBufferBase::cache_referenced_renderpass(std::shared_ptr in_renderpass_ptr) -{ - auto rp_iterator = std::find(m_referenced_renderpasses.begin(), - m_referenced_renderpasses.end(), - in_renderpass_ptr); + goto end; + } - if (rp_iterator == m_referenced_renderpasses.end() ) + #ifdef STORE_COMMAND_BUFFER_COMMANDS { - m_referenced_renderpasses.push_back(in_renderpass_ptr); + if (!m_command_stashing_disabled) + { + m_commands.push_back(BeginQueryIndexedEXTCommand(in_query_pool_ptr, + in_query, + in_flags, + in_index) ); + } } -} + #endif -#ifdef STORE_COMMAND_BUFFER_COMMANDS - /** Clears the command vector by releasing all command descriptors back to the heap memory. */ - void Anvil::CommandBufferBase::clear_commands() + m_parent_command_pool_ptr->lock(); + lock(); { - m_commands.clear(); + entrypoints.vkCmdBeginQueryIndexedEXT(m_command_buffer, + in_query_pool_ptr->get_query_pool(), + in_query, + in_flags.get_vk(), + in_index); } -#endif + unlock(); + m_parent_command_pool_ptr->unlock(); -/** Clears all m_referenced_* vectors. */ -void Anvil::CommandBufferBase::clear_referenced_objects() -{ - m_referenced_buffers.clear (); - m_referenced_command_buffers.clear(); - m_referenced_descriptor_sets.clear(); - m_referenced_events.clear (); - m_referenced_framebuffers.clear (); - m_referenced_images.clear (); - m_referenced_query_pools.clear (); - m_referenced_renderpasses.clear (); + result = true; +end: + return result; } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_begin_query(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry, - VkQueryControlFlags in_flags) +bool Anvil::CommandBufferBase::record_begin_transform_feedback_EXT(const uint32_t& in_first_counter_buffer, + const uint32_t& in_n_counter_buffers, + Anvil::Buffer** in_opt_counter_buffer_ptrs, + const VkDeviceSize* in_opt_counter_buffer_offsets) { - /* NOTE: The command can be executed both inside and outside a renderpass */ - bool result = false; + auto counter_buffer_ptrs = std::vector(in_n_counter_buffers); + const auto& entrypoints = m_device_ptr->get_extension_ext_transform_feedback_entrypoints(); + bool result = false; + + if (!m_is_renderpass_active) + { + anvil_assert(m_is_renderpass_active); + + goto end; + } if (!m_recording_in_progress) { @@ -1340,23 +1319,56 @@ bool Anvil::CommandBufferBase::record_begin_query(std::shared_ptrget_buffer() + : VK_NULL_HANDLE; + } + #ifdef STORE_COMMAND_BUFFER_COMMANDS { if (!m_command_stashing_disabled) { - m_commands.push_back(BeginQueryCommand(in_query_pool_ptr, - in_entry, - in_flags) ); + std::vector buffer_ptr_vec(in_n_counter_buffers, + nullptr); + std::vector offset_vec (in_n_counter_buffers); + + for (uint32_t n_counter_buffer = 0; + n_counter_buffer < in_n_counter_buffers; + ++n_counter_buffer) + { + if (in_opt_counter_buffer_ptrs != nullptr && + in_opt_counter_buffer_ptrs[n_counter_buffer] != nullptr) + { + buffer_ptr_vec.at(n_counter_buffer) = in_opt_counter_buffer_ptrs[n_counter_buffer]; + } + + if (in_opt_counter_buffer_offsets != nullptr) + { + offset_vec.at(n_counter_buffer) = in_opt_counter_buffer_offsets[n_counter_buffer]; + } + } + + m_commands.push_back(BeginTransformFeedbackEXTCommand(in_first_counter_buffer, + buffer_ptr_vec, + offset_vec) ); } } #endif - cache_referenced_query_pool(in_query_pool_ptr); - - vkCmdBeginQuery(m_command_buffer, - in_query_pool_ptr->get_query_pool(), - in_entry, - in_flags); + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdBeginTransformFeedbackEXT(m_command_buffer, + in_first_counter_buffer, + in_n_counter_buffers, + (counter_buffer_ptrs.size() > 0) ? &counter_buffer_ptrs.at(0) : nullptr, + in_opt_counter_buffer_offsets); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1364,25 +1376,23 @@ bool Anvil::CommandBufferBase::record_begin_query(std::shared_ptr in_layout_ptr, - uint32_t in_first_set, - uint32_t in_set_count, - std::shared_ptr* in_descriptor_set_ptrs, - uint32_t in_dynamic_offset_count, - const uint32_t* in_dynamic_offset_ptrs) +bool Anvil::CommandBufferBase::record_bind_descriptor_sets(Anvil::PipelineBindPoint in_pipeline_bind_point, + Anvil::PipelineLayout* in_layout_ptr, + uint32_t in_first_set, + uint32_t in_set_count, + const Anvil::DescriptorSet* const* in_descriptor_set_ptrs, + uint32_t in_dynamic_offset_count, + const uint32_t* in_dynamic_offset_ptrs) { /* Note: Command supported inside and outside the renderpass. */ - VkDescriptorSet dss_vk[100]; - bool result = false; - - anvil_assert(in_set_count < sizeof(dss_vk) / sizeof(dss_vk[0]) ); + auto dss_vk = std::vector(in_set_count); + bool result = false; for (uint32_t n_set = 0; n_set < in_set_count; ++n_set) { - dss_vk[n_set] = in_descriptor_set_ptrs[n_set]->get_descriptor_set_vk(); + dss_vk.at(n_set) = in_descriptor_set_ptrs[n_set]->get_descriptor_set_vk(); } if (!m_recording_in_progress) @@ -1407,21 +1417,20 @@ bool Anvil::CommandBufferBase::record_bind_descriptor_sets(VkPipelineBindPoint } #endif - for (uint32_t n_ds = 0; - n_ds < in_set_count; - ++n_ds) + m_parent_command_pool_ptr->lock(); + lock(); { - cache_referenced_descriptor_set(in_descriptor_set_ptrs[n_ds]); + Anvil::Vulkan::vkCmdBindDescriptorSets(m_command_buffer, + static_cast(in_pipeline_bind_point), + in_layout_ptr->get_pipeline_layout(), + in_first_set, + in_set_count, + (in_set_count > 0) ? &dss_vk.at(0) : nullptr, + in_dynamic_offset_count, + in_dynamic_offset_ptrs); } - - vkCmdBindDescriptorSets(m_command_buffer, - in_pipeline_bind_point, - in_layout_ptr->get_pipeline_layout(), - in_first_set, - in_set_count, - dss_vk, - in_dynamic_offset_count, - in_dynamic_offset_ptrs); + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1429,9 +1438,9 @@ bool Anvil::CommandBufferBase::record_bind_descriptor_sets(VkPipelineBindPoint } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_bind_index_buffer(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - VkIndexType in_index_type) +bool Anvil::CommandBufferBase::record_bind_index_buffer(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::IndexType in_index_type) { /* Note: Command supported inside and outside the renderpass. */ bool result = false; @@ -1454,12 +1463,16 @@ bool Anvil::CommandBufferBase::record_bind_index_buffer(std::shared_ptrget_buffer(), - in_offset, - in_index_type); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdBindIndexBuffer(m_command_buffer, + in_buffer_ptr->get_buffer(), + in_offset, + static_cast(in_index_type) ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1467,13 +1480,12 @@ bool Anvil::CommandBufferBase::record_bind_index_buffer(std::shared_ptr device_locked_ptr(m_device_ptr); - VkPipeline pipeline_vk = VK_NULL_HANDLE; - bool result = false; + VkPipeline pipeline_vk (VK_NULL_HANDLE); + bool result (false); if (!m_recording_in_progress) { @@ -1482,9 +1494,11 @@ bool Anvil::CommandBufferBase::record_bind_pipeline(VkPipelineBindPoint in_pipel goto end; } - pipeline_vk = (in_pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) ? device_locked_ptr->get_compute_pipeline_manager()->get_compute_pipeline (in_pipeline_id) - : (in_pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) ? device_locked_ptr->get_graphics_pipeline_manager()->get_graphics_pipeline(in_pipeline_id) - : VK_NULL_HANDLE; + anvil_assert(in_pipeline_bind_point == Anvil::PipelineBindPoint::COMPUTE || + in_pipeline_bind_point == Anvil::PipelineBindPoint::GRAPHICS); + + pipeline_vk = (in_pipeline_bind_point == Anvil::PipelineBindPoint::COMPUTE) ? m_device_ptr->get_compute_pipeline_manager ()->get_pipeline(in_pipeline_id) + : m_device_ptr->get_graphics_pipeline_manager()->get_pipeline(in_pipeline_id); #ifdef STORE_COMMAND_BUFFER_COMMANDS { @@ -1496,9 +1510,15 @@ bool Anvil::CommandBufferBase::record_bind_pipeline(VkPipelineBindPoint in_pipel } #endif - vkCmdBindPipeline(m_command_buffer, - in_pipeline_bind_point, - pipeline_vk); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdBindPipeline(m_command_buffer, + static_cast(in_pipeline_bind_point), + pipeline_vk); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1506,16 +1526,16 @@ bool Anvil::CommandBufferBase::record_bind_pipeline(VkPipelineBindPoint in_pipel } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_bind_vertex_buffers(uint32_t in_start_binding, - uint32_t in_binding_count, - std::shared_ptr* in_buffer_ptrs, - const VkDeviceSize* in_offset_ptrs) +bool Anvil::CommandBufferBase::record_bind_transform_feedback_buffers_EXT(const uint32_t& in_first_binding, + const uint32_t& in_n_bindings, + Anvil::Buffer** in_buffer_ptrs, + const VkDeviceSize* in_offsets_ptr, + const VkDeviceSize* in_sizes_ptr) { /* Note: Command supported inside and outside the renderpass. */ - VkBuffer buffers[100]; - bool result = false; - - anvil_assert(in_binding_count < sizeof(buffers) / sizeof(buffers[0]) ); + auto buffers = std::vector(in_n_bindings); + const auto& entrypoints = m_device_ptr->get_extension_ext_transform_feedback_entrypoints (); + bool result = false; if (!m_recording_in_progress) { @@ -1528,28 +1548,47 @@ bool Anvil::CommandBufferBase::record_bind_vertex_buffers(uint32_t { if (!m_command_stashing_disabled) { - m_commands.push_back(BindVertexBuffersCommand(in_start_binding, - in_binding_count, - in_buffer_ptrs, - in_offset_ptrs) ); + std::vector buffer_vec(in_n_bindings); + std::vector offset_vec(in_n_bindings); + std::vector size_vec (in_n_bindings); + + for (uint32_t n_binding = 0; + n_binding < in_n_bindings; + ++n_binding) + { + buffer_vec.at(n_binding) = in_buffer_ptrs[n_binding]; + offset_vec.at(n_binding) = in_offsets_ptr[n_binding]; + size_vec.at (n_binding) = in_sizes_ptr [n_binding]; + } + + m_commands.push_back(BindTransformFeedbackBuffersEXTCommand(in_first_binding, + in_n_bindings, + buffer_vec, + offset_vec, + size_vec) ); } } #endif for (uint32_t n_binding = 0; - n_binding < in_binding_count; + n_binding < in_n_bindings; ++n_binding) { - buffers[n_binding] = in_buffer_ptrs[n_binding]->get_buffer(); - - cache_referenced_buffer(in_buffer_ptrs[n_binding]); + buffers.at(n_binding) = in_buffer_ptrs[n_binding]->get_buffer(); } - vkCmdBindVertexBuffers(m_command_buffer, - in_start_binding, - in_binding_count, - buffers, - in_offset_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdBindTransformFeedbackBuffersEXT(m_command_buffer, + in_first_binding, + in_n_bindings, + (in_n_bindings > 0) ? &buffers.at(0) : nullptr, + in_offsets_ptr, + in_sizes_ptr); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1557,19 +1596,72 @@ bool Anvil::CommandBufferBase::record_bind_vertex_buffers(uint32_t } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_blit_image(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageBlit* in_region_ptrs, - VkFilter in_filter) +bool Anvil::CommandBufferBase::record_bind_vertex_buffers(uint32_t in_start_binding, + uint32_t in_binding_count, + Anvil::Buffer** in_buffer_ptrs, + const VkDeviceSize* in_offset_ptrs) { - bool result = false; + /* Note: Command supported inside and outside the renderpass. */ + auto buffers = std::vector(in_binding_count); + bool result = false; - if (m_is_renderpass_active) + if (!m_recording_in_progress) { - anvil_assert(!m_is_renderpass_active); + anvil_assert(m_recording_in_progress); + + goto end; + } + + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + m_commands.push_back(BindVertexBuffersCommand(in_start_binding, + in_binding_count, + in_buffer_ptrs, + in_offset_ptrs) ); + } + } + #endif + + for (uint32_t n_binding = 0; + n_binding < in_binding_count; + ++n_binding) + { + buffers.at(n_binding) = in_buffer_ptrs[n_binding]->get_buffer(); + } + + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdBindVertexBuffers(m_command_buffer, + in_start_binding, + in_binding_count, + (in_binding_count > 0) ? &buffers.at(0) : nullptr, + in_offset_ptrs); + } + unlock(); + m_parent_command_pool_ptr->unlock(); + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_blit_image(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageBlit* in_region_ptrs, + Anvil::Filter in_filter) +{ + bool result = false; + + if (m_is_renderpass_active) + { + anvil_assert(!m_is_renderpass_active); goto end; } @@ -1596,17 +1688,20 @@ bool Anvil::CommandBufferBase::record_blit_image(std::shared_ptr i } #endif - cache_referenced_image(in_src_image_ptr); - cache_referenced_image(in_dst_image_ptr); - - vkCmdBlitImage(m_command_buffer, - in_src_image_ptr->get_image(), - in_src_image_layout, - in_dst_image_ptr->get_image(), - in_dst_image_layout, - in_region_count, - in_region_ptrs, - in_filter); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdBlitImage(m_command_buffer, + in_src_image_ptr->get_image(), + static_cast(in_src_image_layout), + in_dst_image_ptr->get_image(), + static_cast(in_dst_image_layout), + in_region_count, + reinterpret_cast(in_region_ptrs), + static_cast(in_filter) ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1614,10 +1709,10 @@ bool Anvil::CommandBufferBase::record_blit_image(std::shared_ptr i } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_clear_attachments(uint32_t in_n_attachments, - const VkClearAttachment* in_attachment_ptrs, - uint32_t in_n_rects, - const VkClearRect* in_rect_ptrs) +bool Anvil::CommandBufferBase::record_clear_attachments(uint32_t in_n_attachments, + const Anvil::ClearAttachment* in_attachment_ptrs, + uint32_t in_n_rects, + const VkClearRect* in_rect_ptrs) { bool result = false; @@ -1647,11 +1742,17 @@ bool Anvil::CommandBufferBase::record_clear_attachments(uint32_t } #endif - vkCmdClearAttachments(m_command_buffer, - in_n_attachments, - in_attachment_ptrs, - in_n_rects, - in_rect_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdClearAttachments(m_command_buffer, + in_n_attachments, + reinterpret_cast(in_attachment_ptrs), + in_n_rects, + in_rect_ptrs); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1659,11 +1760,11 @@ bool Anvil::CommandBufferBase::record_clear_attachments(uint32_t } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_clear_color_image(std::shared_ptr in_image_ptr, - VkImageLayout in_image_layout, - const VkClearColorValue* in_color_ptr, - uint32_t in_range_count, - const VkImageSubresourceRange* in_range_ptrs) +bool Anvil::CommandBufferBase::record_clear_color_image(Anvil::Image* in_image_ptr, + Anvil::ImageLayout in_image_layout, + const VkClearColorValue* in_color_ptr, + uint32_t in_range_count, + const Anvil::ImageSubresourceRange* in_range_ptrs) { bool result = false; @@ -1694,14 +1795,18 @@ bool Anvil::CommandBufferBase::record_clear_color_image(std::shared_ptrget_image(), - in_image_layout, - in_color_ptr, - in_range_count, - in_range_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdClearColorImage(m_command_buffer, + in_image_ptr->get_image(), + static_cast(in_image_layout), + in_color_ptr, + in_range_count, + reinterpret_cast(in_range_ptrs) ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1709,11 +1814,11 @@ bool Anvil::CommandBufferBase::record_clear_color_image(std::shared_ptr in_image_ptr, - VkImageLayout in_image_layout, - const VkClearDepthStencilValue* in_depth_stencil_ptr, - uint32_t in_range_count, - const VkImageSubresourceRange* in_range_ptrs) +bool Anvil::CommandBufferBase::record_clear_depth_stencil_image(Anvil::Image* in_image_ptr, + Anvil::ImageLayout in_image_layout, + const VkClearDepthStencilValue* in_depth_stencil_ptr, + uint32_t in_range_count, + const Anvil::ImageSubresourceRange* in_range_ptrs) { bool result = false; @@ -1744,14 +1849,18 @@ bool Anvil::CommandBufferBase::record_clear_depth_stencil_image(std::shared_ptr< } #endif - cache_referenced_image(in_image_ptr); - - vkCmdClearDepthStencilImage(m_command_buffer, - in_image_ptr->get_image(), - in_image_layout, - in_depth_stencil_ptr, - in_range_count, - in_range_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdClearDepthStencilImage(m_command_buffer, + in_image_ptr->get_image(), + static_cast(in_image_layout), + in_depth_stencil_ptr, + in_range_count, + reinterpret_cast(in_range_ptrs) ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1759,10 +1868,10 @@ bool Anvil::CommandBufferBase::record_clear_depth_stencil_image(std::shared_ptr< } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_copy_buffer(std::shared_ptr in_src_buffer_ptr, - std::shared_ptr in_dst_buffer_ptr, - uint32_t in_region_count, - const VkBufferCopy* in_region_ptrs) +bool Anvil::CommandBufferBase::record_copy_buffer(Anvil::Buffer* in_src_buffer_ptr, + Anvil::Buffer* in_dst_buffer_ptr, + uint32_t in_region_count, + const Anvil::BufferCopy* in_region_ptrs) { bool result = false; @@ -1792,14 +1901,17 @@ bool Anvil::CommandBufferBase::record_copy_buffer(std::shared_ptr } #endif - cache_referenced_buffer(in_src_buffer_ptr); - cache_referenced_buffer(in_dst_buffer_ptr); - - vkCmdCopyBuffer(m_command_buffer, - in_src_buffer_ptr->get_buffer(), - in_dst_buffer_ptr->get_buffer(), - in_region_count, - in_region_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdCopyBuffer(m_command_buffer, + in_src_buffer_ptr->get_buffer(), + in_dst_buffer_ptr->get_buffer(), + in_region_count, + reinterpret_cast(in_region_ptrs) ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1807,11 +1919,11 @@ bool Anvil::CommandBufferBase::record_copy_buffer(std::shared_ptr } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_copy_buffer_to_image(std::shared_ptr in_src_buffer_ptr, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkBufferImageCopy* in_region_ptrs) +bool Anvil::CommandBufferBase::record_copy_buffer_to_image(Anvil::Buffer* in_src_buffer_ptr, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::BufferImageCopy* in_region_ptrs) { bool result = false; @@ -1842,15 +1954,18 @@ bool Anvil::CommandBufferBase::record_copy_buffer_to_image(std::shared_ptrget_buffer(), - in_dst_image_ptr->get_image(), - in_dst_image_layout, - in_region_count, - in_region_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdCopyBufferToImage(m_command_buffer, + in_src_buffer_ptr->get_buffer(), + in_dst_image_ptr->get_image(), + static_cast(in_dst_image_layout), + in_region_count, + reinterpret_cast(in_region_ptrs) ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1858,12 +1973,12 @@ bool Anvil::CommandBufferBase::record_copy_buffer_to_image(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageCopy* in_region_ptrs) +bool Anvil::CommandBufferBase::record_copy_image(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageCopy* in_region_ptrs) { bool result = false; @@ -1895,16 +2010,19 @@ bool Anvil::CommandBufferBase::record_copy_image(std::shared_ptr i } #endif - cache_referenced_image(in_src_image_ptr); - cache_referenced_image(in_dst_image_ptr); - - vkCmdCopyImage(m_command_buffer, - in_src_image_ptr->get_image(), - in_src_image_layout, - in_dst_image_ptr->get_image(), - in_dst_image_layout, - in_region_count, - in_region_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdCopyImage(m_command_buffer, + in_src_image_ptr->get_image(), + static_cast(in_src_image_layout), + in_dst_image_ptr->get_image(), + static_cast(in_dst_image_layout), + in_region_count, + reinterpret_cast(in_region_ptrs) ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1912,11 +2030,11 @@ bool Anvil::CommandBufferBase::record_copy_image(std::shared_ptr i } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_copy_image_to_buffer(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_buffer_ptr, - uint32_t in_region_count, - const VkBufferImageCopy* in_region_ptrs) +bool Anvil::CommandBufferBase::record_copy_image_to_buffer(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Buffer* in_dst_buffer_ptr, + uint32_t in_region_count, + const Anvil::BufferImageCopy* in_region_ptrs) { bool result = false; @@ -1947,15 +2065,18 @@ bool Anvil::CommandBufferBase::record_copy_image_to_buffer(std::shared_ptrget_image(), - in_src_image_layout, - in_dst_buffer_ptr->get_buffer(), - in_region_count, - in_region_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdCopyImageToBuffer(m_command_buffer, + in_src_image_ptr->get_image(), + static_cast(in_src_image_layout), + in_dst_buffer_ptr->get_buffer(), + in_region_count, + reinterpret_cast(in_region_ptrs) ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -1963,13 +2084,13 @@ bool Anvil::CommandBufferBase::record_copy_image_to_buffer(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_start_query, - uint32_t in_query_count, - std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_dst_stride, - VkQueryResultFlags in_flags) +bool Anvil::CommandBufferBase::record_copy_query_pool_results(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_start_query, + uint32_t in_query_count, + Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_dst_stride, + VkQueryResultFlags in_flags) { bool result = false; @@ -2002,17 +2123,20 @@ bool Anvil::CommandBufferBase::record_copy_query_pool_results(std::shared_ptrget_query_pool(), - in_start_query, - in_query_count, - in_dst_buffer_ptr->get_buffer(), - in_dst_offset, - in_dst_stride, - in_flags); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdCopyQueryPoolResults(m_command_buffer, + in_query_pool_ptr->get_query_pool(), + in_start_query, + in_query_count, + in_dst_buffer_ptr->get_buffer(), + in_dst_offset, + in_dst_stride, + in_flags); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2051,10 +2175,16 @@ bool Anvil::CommandBufferBase::record_dispatch(uint32_t in_x, } #endif - vkCmdDispatch(m_command_buffer, - in_x, - in_y, - in_z); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdDispatch(m_command_buffer, + in_x, + in_y, + in_z); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2065,10 +2195,9 @@ bool Anvil::CommandBufferBase::record_dispatch(uint32_t in_x, bool Anvil::CommandBufferBase::record_debug_marker_begin_EXT(const std::string& in_marker_name, const float* in_opt_color) { - std::shared_ptr device_locked_ptr(m_device_ptr); - const auto& entrypoints (device_locked_ptr->get_extension_ext_debug_marker_entrypoints() ); - VkDebugMarkerMarkerInfoEXT marker_info; - bool result = false; + const auto& entrypoints (m_device_ptr->get_extension_ext_debug_marker_entrypoints() ); + VkDebugMarkerMarkerInfoEXT marker_info; + bool result (false); if (!m_recording_in_progress) { @@ -2087,7 +2216,7 @@ bool Anvil::CommandBufferBase::record_debug_marker_begin_EXT(const std::string& } #endif - anvil_assert(device_locked_ptr->is_ext_debug_marker_extension_enabled() ); + anvil_assert(m_device_ptr->get_extension_info()->ext_debug_marker() ); if (in_opt_color != nullptr) { @@ -2107,8 +2236,12 @@ bool Anvil::CommandBufferBase::record_debug_marker_begin_EXT(const std::string& marker_info.pNext = nullptr; marker_info.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT; - entrypoints.vkCmdDebugMarkerBeginEXT(m_command_buffer, - &marker_info); + m_parent_command_pool_ptr->lock(); + { + entrypoints.vkCmdDebugMarkerBeginEXT(m_command_buffer, + &marker_info); + } + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2118,9 +2251,8 @@ bool Anvil::CommandBufferBase::record_debug_marker_begin_EXT(const std::string& /* Please see header for specification */ bool Anvil::CommandBufferBase::record_debug_marker_end_EXT() { - std::shared_ptr device_locked_ptr(m_device_ptr); - const auto& entrypoints (device_locked_ptr->get_extension_ext_debug_marker_entrypoints() ); - bool result (false); + const auto& entrypoints(m_device_ptr->get_extension_ext_debug_marker_entrypoints() ); + bool result (false); if (!m_recording_in_progress) { @@ -2129,7 +2261,7 @@ bool Anvil::CommandBufferBase::record_debug_marker_end_EXT() goto end; } - anvil_assert(device_locked_ptr->is_ext_debug_marker_extension_enabled() ); + anvil_assert(m_device_ptr->get_extension_info()->ext_debug_marker() ); #ifdef STORE_COMMAND_BUFFER_COMMANDS @@ -2141,7 +2273,11 @@ bool Anvil::CommandBufferBase::record_debug_marker_end_EXT() } #endif - entrypoints.vkCmdDebugMarkerEndEXT(m_command_buffer); + m_parent_command_pool_ptr->lock(); + { + entrypoints.vkCmdDebugMarkerEndEXT(m_command_buffer); + } + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2152,10 +2288,9 @@ bool Anvil::CommandBufferBase::record_debug_marker_end_EXT() bool Anvil::CommandBufferBase::record_debug_marker_insert_EXT(const std::string& in_marker_name, const float* in_opt_color) { - std::shared_ptr device_locked_ptr(m_device_ptr); - const auto& entrypoints (device_locked_ptr->get_extension_ext_debug_marker_entrypoints() ); - VkDebugMarkerMarkerInfoEXT marker_info; - bool result (false); + const auto& entrypoints (m_device_ptr->get_extension_ext_debug_marker_entrypoints() ); + VkDebugMarkerMarkerInfoEXT marker_info; + bool result (false); if (!m_recording_in_progress) { @@ -2164,7 +2299,7 @@ bool Anvil::CommandBufferBase::record_debug_marker_insert_EXT(const std::string& goto end; } - anvil_assert(device_locked_ptr->is_ext_debug_marker_extension_enabled() ); + anvil_assert(m_device_ptr->get_extension_info()->ext_debug_marker() ); #ifdef STORE_COMMAND_BUFFER_COMMANDS @@ -2195,8 +2330,73 @@ bool Anvil::CommandBufferBase::record_debug_marker_insert_EXT(const std::string& marker_info.pNext = nullptr; marker_info.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT; - entrypoints.vkCmdDebugMarkerInsertEXT(m_command_buffer, - &marker_info); + m_parent_command_pool_ptr->lock(); + { + entrypoints.vkCmdDebugMarkerInsertEXT(m_command_buffer, + &marker_info); + } + m_parent_command_pool_ptr->unlock(); + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_dispatch_base_KHR(uint32_t in_base_group_x, + uint32_t in_base_group_y, + uint32_t in_base_group_z, + uint32_t in_group_count_x, + uint32_t in_group_count_y, + uint32_t in_group_count_z) +{ + const auto& entrypoints(m_device_ptr->get_extension_khr_device_group_entrypoints() ); + bool result (false); + + if (m_is_renderpass_active) + { + anvil_assert(!m_is_renderpass_active); + + goto end; + } + + if (!m_recording_in_progress) + { + anvil_assert(m_recording_in_progress); + + goto end; + } + + anvil_assert(m_device_ptr->get_extension_info()->khr_device_group() ); + + + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + m_commands.push_back(DispatchBaseKHRCommand(in_base_group_x, + in_base_group_y, + in_base_group_z, + in_group_count_x, + in_group_count_y, + in_group_count_z) ); + } + } + #endif + + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdDispatchBaseKHR(m_command_buffer, + in_base_group_x, + in_base_group_y, + in_base_group_z, + in_group_count_x, + in_group_count_y, + in_group_count_z); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2204,8 +2404,8 @@ bool Anvil::CommandBufferBase::record_debug_marker_insert_EXT(const std::string& } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_dispatch_indirect(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset) +bool Anvil::CommandBufferBase::record_dispatch_indirect(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset) { bool result = false; @@ -2233,11 +2433,15 @@ bool Anvil::CommandBufferBase::record_dispatch_indirect(std::shared_ptrget_buffer(), - in_offset); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdDispatchIndirect(m_command_buffer, + in_buffer_ptr->get_buffer(), + in_offset); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2278,11 +2482,17 @@ bool Anvil::CommandBufferBase::record_draw(uint32_t in_vertex_count, } #endif - vkCmdDraw(m_command_buffer, - in_vertex_count, - in_instance_count, - in_first_vertex, - in_first_instance); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdDraw(m_command_buffer, + in_vertex_count, + in_instance_count, + in_first_vertex, + in_first_instance); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2325,12 +2535,18 @@ bool Anvil::CommandBufferBase::record_draw_indexed(uint32_t in_index_count, } #endif - vkCmdDrawIndexed(m_command_buffer, - in_index_count, - in_instance_count, - in_first_index, - in_vertex_offset, - in_first_instance); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdDrawIndexed(m_command_buffer, + in_index_count, + in_instance_count, + in_first_index, + in_vertex_offset, + in_first_instance); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2338,10 +2554,10 @@ bool Anvil::CommandBufferBase::record_draw_indexed(uint32_t in_index_count, } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_draw_indexed_indirect(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - uint32_t in_count, - uint32_t in_stride) +bool Anvil::CommandBufferBase::record_draw_indexed_indirect(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + uint32_t in_count, + uint32_t in_stride) { bool result = false; @@ -2371,13 +2587,75 @@ bool Anvil::CommandBufferBase::record_draw_indexed_indirect(std::shared_ptrlock(); + lock(); + { + Anvil::Vulkan::vkCmdDrawIndexedIndirect(m_command_buffer, + in_buffer_ptr->get_buffer(), + in_offset, + in_count, + in_stride); + } + unlock(); + m_parent_command_pool_ptr->unlock(); + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_draw_indirect_byte_count_EXT(const uint32_t& in_instance_count, + const uint32_t& in_first_instance, + Anvil::Buffer* in_counter_buffer_ptr, + const VkDeviceSize& in_counter_buffer_offset, + const uint32_t& in_counter_offset, + const uint32_t& in_vertex_stride) +{ + const auto& entrypoints = m_device_ptr->get_extension_ext_transform_feedback_entrypoints(); + bool result = false; + + if (!m_is_renderpass_active) + { + anvil_assert(m_is_renderpass_active); + + goto end; + } + + if (!m_recording_in_progress) + { + anvil_assert(m_recording_in_progress); + + goto end; + } + + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + m_commands.push_back(DrawIndirectByteCountEXTCommand(in_instance_count, + in_first_instance, + in_counter_buffer_ptr, + in_counter_buffer_offset, + in_counter_offset, + in_vertex_stride) ); + } + } + #endif - vkCmdDrawIndexedIndirect(m_command_buffer, - in_buffer_ptr->get_buffer(), - in_offset, - in_count, - in_stride); + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdDrawIndirectByteCountEXT(m_command_buffer, + in_instance_count, + in_first_instance, + in_counter_buffer_ptr->get_buffer(), + in_counter_buffer_offset, + in_counter_offset, + in_vertex_stride); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2385,16 +2663,15 @@ bool Anvil::CommandBufferBase::record_draw_indexed_indirect(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - std::shared_ptr in_count_buffer_ptr, - VkDeviceSize in_count_offset, - uint32_t in_max_draw_count, - uint32_t in_stride) +bool Anvil::CommandBufferBase::record_draw_indexed_indirect_count_AMD(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride) { - std::shared_ptr device_locked_ptr(m_device_ptr); Anvil::ExtensionAMDDrawIndirectCountEntrypoints entrypoints; - bool result (false); + bool result (false); if (!m_is_renderpass_active) { @@ -2410,7 +2687,7 @@ bool Anvil::CommandBufferBase::record_draw_indexed_indirect_count_AMD(std::share goto end; } - anvil_assert(device_locked_ptr->is_amd_draw_indirect_count_extension_enabled() ); + anvil_assert(m_device_ptr->get_extension_info()->amd_draw_indirect_count() ); #ifdef STORE_COMMAND_BUFFER_COMMANDS @@ -2427,18 +2704,84 @@ bool Anvil::CommandBufferBase::record_draw_indexed_indirect_count_AMD(std::share } #endif - entrypoints = device_locked_ptr->get_extension_amd_draw_indirect_count_entrypoints(); - - cache_referenced_buffer(in_buffer_ptr); - cache_referenced_buffer(in_count_buffer_ptr); + entrypoints = m_device_ptr->get_extension_amd_draw_indirect_count_entrypoints(); + + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdDrawIndexedIndirectCountAMD(m_command_buffer, + in_buffer_ptr->get_buffer(), + in_offset, + in_count_buffer_ptr->get_buffer(), + in_count_offset, + in_max_draw_count, + in_stride); + } + unlock(); + m_parent_command_pool_ptr->unlock(); + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_draw_indexed_indirect_count_KHR(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride) +{ + Anvil::ExtensionKHRDrawIndirectCountEntrypoints entrypoints; + bool result (false); + + if (!m_is_renderpass_active) + { + anvil_assert(m_is_renderpass_active); + + goto end; + } + + if (!m_recording_in_progress) + { + anvil_assert(m_recording_in_progress); + + goto end; + } + + anvil_assert(m_device_ptr->get_extension_info()->khr_draw_indirect_count() ); + + + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + m_commands.push_back(DrawIndexedIndirectCountKHRCommand(in_buffer_ptr, + in_offset, + in_count_buffer_ptr, + in_count_offset, + in_max_draw_count, + in_stride) ); + } + } + #endif + + entrypoints = m_device_ptr->get_extension_khr_draw_indirect_count_entrypoints(); - entrypoints.vkCmdDrawIndexedIndirectCountAMD(m_command_buffer, - in_buffer_ptr->get_buffer(), - in_offset, - in_count_buffer_ptr->get_buffer(), - in_count_offset, - in_max_draw_count, - in_stride); + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdDrawIndexedIndirectCountKHR(m_command_buffer, + in_buffer_ptr->get_buffer(), + in_offset, + in_count_buffer_ptr->get_buffer(), + in_count_offset, + in_max_draw_count, + in_stride); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2446,10 +2789,10 @@ bool Anvil::CommandBufferBase::record_draw_indexed_indirect_count_AMD(std::share } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_draw_indirect(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - uint32_t in_count, - uint32_t in_stride) +bool Anvil::CommandBufferBase::record_draw_indirect(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + uint32_t in_count, + uint32_t in_stride) { bool result = false; @@ -2479,13 +2822,17 @@ bool Anvil::CommandBufferBase::record_draw_indirect(std::shared_ptrget_buffer(), - in_offset, - in_count, - in_stride); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdDrawIndirect(m_command_buffer, + in_buffer_ptr->get_buffer(), + in_offset, + in_count, + in_stride); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2493,16 +2840,15 @@ bool Anvil::CommandBufferBase::record_draw_indirect(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_offset, - std::shared_ptr in_count_buffer_ptr, - VkDeviceSize in_count_offset, - uint32_t in_max_draw_count, - uint32_t in_stride) +bool Anvil::CommandBufferBase::record_draw_indirect_count_AMD(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride) { - std::shared_ptr device_locked_ptr(m_device_ptr); Anvil::ExtensionAMDDrawIndirectCountEntrypoints entrypoints; - bool result (false); + bool result (false); if (!m_is_renderpass_active) { @@ -2518,7 +2864,7 @@ bool Anvil::CommandBufferBase::record_draw_indirect_count_AMD(std::shared_ptris_amd_draw_indirect_count_extension_enabled() ); + anvil_assert(m_device_ptr->get_extension_info()->amd_draw_indirect_count() ); #ifdef STORE_COMMAND_BUFFER_COMMANDS @@ -2535,18 +2881,21 @@ bool Anvil::CommandBufferBase::record_draw_indirect_count_AMD(std::shared_ptrget_extension_amd_draw_indirect_count_entrypoints(); + entrypoints = m_device_ptr->get_extension_amd_draw_indirect_count_entrypoints(); - cache_referenced_buffer(in_buffer_ptr); - cache_referenced_buffer(in_count_buffer_ptr); - - entrypoints.vkCmdDrawIndirectCountAMD(m_command_buffer, - in_buffer_ptr->get_buffer(), - in_offset, - in_count_buffer_ptr->get_buffer(), - in_count_offset, - in_max_draw_count, - in_stride); + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdDrawIndirectCountAMD(m_command_buffer, + in_buffer_ptr->get_buffer(), + in_offset, + in_count_buffer_ptr->get_buffer(), + in_count_offset, + in_max_draw_count, + in_stride); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2554,11 +2903,22 @@ bool Anvil::CommandBufferBase::record_draw_indirect_count_AMD(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_entry) +bool Anvil::CommandBufferBase::record_draw_indirect_count_KHR(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_offset, + Anvil::Buffer* in_count_buffer_ptr, + VkDeviceSize in_count_offset, + uint32_t in_max_draw_count, + uint32_t in_stride) { - /* NOTE: The command can be executed both inside and outside a renderpass */ - bool result = false; + Anvil::ExtensionKHRDrawIndirectCountEntrypoints entrypoints; + bool result (false); + + if (!m_is_renderpass_active) + { + anvil_assert(m_is_renderpass_active); + + goto end; + } if (!m_recording_in_progress) { @@ -2567,21 +2927,38 @@ bool Anvil::CommandBufferBase::record_end_query(std::shared_ptrget_extension_info()->khr_draw_indirect_count() ); + + #ifdef STORE_COMMAND_BUFFER_COMMANDS { if (!m_command_stashing_disabled) { - m_commands.push_back(EndQueryCommand(in_query_pool_ptr, - in_entry) ); + m_commands.push_back(DrawIndirectCountKHRCommand(in_buffer_ptr, + in_offset, + in_count_buffer_ptr, + in_count_offset, + in_max_draw_count, + in_stride) ); } } #endif - cache_referenced_query_pool(in_query_pool_ptr); + entrypoints = m_device_ptr->get_extension_khr_draw_indirect_count_entrypoints(); - vkCmdEndQuery(m_command_buffer, - in_query_pool_ptr->get_query_pool(), - in_entry); + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdDrawIndirectCountKHR(m_command_buffer, + in_buffer_ptr->get_buffer(), + in_offset, + in_count_buffer_ptr->get_buffer(), + in_count_offset, + in_max_draw_count, + in_stride); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2589,20 +2966,12 @@ bool Anvil::CommandBufferBase::record_end_query(std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_size, - uint32_t in_data) +bool Anvil::CommandBufferBase::record_end_query(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_entry) { + /* NOTE: The command can be executed both inside and outside a renderpass */ bool result = false; - if (m_is_renderpass_active) - { - anvil_assert(!m_is_renderpass_active); - - goto end; - } - if (!m_recording_in_progress) { anvil_assert(m_recording_in_progress); @@ -2614,21 +2983,21 @@ bool Anvil::CommandBufferBase::record_fill_buffer(std::shared_ptr { if (!m_command_stashing_disabled) { - m_commands.push_back(FillBufferCommand(in_dst_buffer_ptr, - in_dst_offset, - in_size, - in_data) ); + m_commands.push_back(EndQueryCommand(in_query_pool_ptr, + in_entry) ); } } #endif - cache_referenced_buffer(in_dst_buffer_ptr); - - vkCmdFillBuffer(m_command_buffer, - in_dst_buffer_ptr->get_buffer(), - in_dst_offset, - in_size, - in_data); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdEndQuery(m_command_buffer, + in_query_pool_ptr->get_query_pool(), + in_entry); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2636,21 +3005,13 @@ bool Anvil::CommandBufferBase::record_fill_buffer(std::shared_ptr } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_pipeline_barrier(VkPipelineStageFlags in_src_stage_mask, - VkPipelineStageFlags in_dst_stage_mask, - VkBool32 in_by_region, - uint32_t in_memory_barrier_count, - const MemoryBarrier* const in_memory_barriers_ptr, - uint32_t in_buffer_memory_barrier_count, - const BufferBarrier* const in_buffer_memory_barriers_ptr, - uint32_t in_image_memory_barrier_count, - const ImageBarrier* const in_image_memory_barriers_ptr) +bool Anvil::CommandBufferBase::record_end_query_indexed_EXT(Anvil::QueryPool* in_query_pool_ptr, + const Anvil::QueryIndex& in_query, + const uint32_t& in_index) { /* NOTE: The command can be executed both inside and outside a renderpass */ - VkBufferMemoryBarrier buffer_barriers_vk[100]; - VkImageMemoryBarrier image_barriers_vk [100]; - VkMemoryBarrier memory_barriers_vk[100]; - bool result = false; + const auto& entrypoints = m_device_ptr->get_extension_ext_transform_feedback_entrypoints(); + bool result = false; if (!m_recording_in_progress) { @@ -2663,27 +3024,211 @@ bool Anvil::CommandBufferBase::record_pipeline_barrier(VkPipelineStageFlags { if (!m_command_stashing_disabled) { - m_commands.push_back(PipelineBarrierCommand(in_src_stage_mask, - in_dst_stage_mask, - in_by_region, - in_memory_barrier_count, - in_memory_barriers_ptr, - in_buffer_memory_barrier_count, - in_buffer_memory_barriers_ptr, - in_image_memory_barrier_count, - in_image_memory_barriers_ptr) ); + m_commands.push_back(EndQueryIndexedEXTCommand(in_query_pool_ptr, + in_query, + in_index) ); } } #endif - if (get_n_of_callback_subscribers(COMMAND_BUFFER_CALLBACK_ID_PIPELINE_BARRIER_COMMAND_RECORDED) > 0) + m_parent_command_pool_ptr->lock(); + lock(); { - PipelineBarrierCommand command_data(in_src_stage_mask, - in_dst_stage_mask, - in_by_region, - in_memory_barrier_count, - in_memory_barriers_ptr, - in_buffer_memory_barrier_count, + entrypoints.vkCmdEndQueryIndexedEXT(m_command_buffer, + in_query_pool_ptr->get_query_pool(), + in_query, + in_index); + } + unlock(); + m_parent_command_pool_ptr->unlock(); + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_end_transform_feedback_EXT(const uint32_t& in_first_counter_buffer, + const uint32_t& in_n_counter_buffers, + Anvil::Buffer** in_opt_counter_buffer_ptrs, + const VkDeviceSize* in_opt_counter_buffer_offsets) +{ + auto counter_buffer_ptrs = std::vector(in_n_counter_buffers); + const auto& entrypoints = m_device_ptr->get_extension_ext_transform_feedback_entrypoints(); + bool result = false; + + if (!m_is_renderpass_active) + { + anvil_assert(m_is_renderpass_active); + + goto end; + } + + if (!m_recording_in_progress) + { + anvil_assert(m_recording_in_progress); + + goto end; + } + + for (uint32_t n_counter_buffer = 0; + n_counter_buffer < in_n_counter_buffers; + ++n_counter_buffer) + { + counter_buffer_ptrs.at(n_counter_buffer) = (in_opt_counter_buffer_ptrs != nullptr && in_opt_counter_buffer_ptrs[n_counter_buffer] != nullptr) ? in_opt_counter_buffer_ptrs[n_counter_buffer]->get_buffer() + : VK_NULL_HANDLE; + } + + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + std::vector buffer_ptr_vec(in_n_counter_buffers); + std::vector offset_vec (in_n_counter_buffers); + + for (uint32_t n_counter_buffer = 0; + n_counter_buffer < in_n_counter_buffers; + ++n_counter_buffer) + { + if (in_opt_counter_buffer_ptrs != nullptr && + in_opt_counter_buffer_ptrs[n_counter_buffer] != nullptr) + { + buffer_ptr_vec.at(n_counter_buffer) = in_opt_counter_buffer_ptrs[n_counter_buffer]; + } + + if (in_opt_counter_buffer_offsets != nullptr) + { + offset_vec.at(n_counter_buffer) = in_opt_counter_buffer_offsets[n_counter_buffer]; + } + } + + m_commands.push_back(EndTransformFeedbackEXTCommand(in_first_counter_buffer, + buffer_ptr_vec, + offset_vec) ); + } + } + #endif + + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdEndTransformFeedbackEXT(m_command_buffer, + in_first_counter_buffer, + in_n_counter_buffers, + (in_n_counter_buffers > 0) ? &counter_buffer_ptrs.at(0) : nullptr, + in_opt_counter_buffer_offsets); + } + unlock(); + m_parent_command_pool_ptr->unlock(); + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_fill_buffer(Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_size, + uint32_t in_data) +{ + bool result = false; + + if (m_is_renderpass_active) + { + anvil_assert(!m_is_renderpass_active); + + goto end; + } + + if (!m_recording_in_progress) + { + anvil_assert(m_recording_in_progress); + + goto end; + } + + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + m_commands.push_back(FillBufferCommand(in_dst_buffer_ptr, + in_dst_offset, + in_size, + in_data) ); + } + } + #endif + + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdFillBuffer(m_command_buffer, + in_dst_buffer_ptr->get_buffer(), + in_dst_offset, + in_size, + in_data); + } + unlock(); + m_parent_command_pool_ptr->unlock(); + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_pipeline_barrier(Anvil::PipelineStageFlags in_src_stage_mask, + Anvil::PipelineStageFlags in_dst_stage_mask, + Anvil::DependencyFlags in_dependency_flags, + uint32_t in_memory_barrier_count, + const MemoryBarrierAnv* const in_memory_barriers_ptr, + uint32_t in_buffer_memory_barrier_count, + const BufferBarrier* const in_buffer_memory_barriers_ptr, + uint32_t in_image_memory_barrier_count, + const ImageBarrier* const in_image_memory_barriers_ptr) +{ + /* NOTE: The command can be executed both inside and outside a renderpass */ + auto buffer_barriers_vk = std::vector(in_buffer_memory_barrier_count); + auto image_barriers_vk = std::vector (in_image_memory_barrier_count); + auto memory_barriers_vk = std::vector (in_memory_barrier_count); + bool result = false; + + if (!m_recording_in_progress) + { + anvil_assert(m_recording_in_progress); + + goto end; + } + + anvil_assert((!m_is_renderpass_active) || + ((m_is_renderpass_active) && (in_dependency_flags & Anvil::DependencyFlagBits::VIEW_LOCAL_BIT) == 0)); + + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + m_commands.push_back(PipelineBarrierCommand(in_src_stage_mask, + in_dst_stage_mask, + in_dependency_flags, + in_memory_barrier_count, + in_memory_barriers_ptr, + in_buffer_memory_barrier_count, + in_buffer_memory_barriers_ptr, + in_image_memory_barrier_count, + in_image_memory_barriers_ptr) ); + } + } + #endif + + if (get_n_of_callback_subscribers(COMMAND_BUFFER_CALLBACK_ID_PIPELINE_BARRIER_COMMAND_RECORDED) > 0) + { + PipelineBarrierCommand command_data(in_src_stage_mask, + in_dst_stage_mask, + in_dependency_flags, + in_memory_barrier_count, + in_memory_barriers_ptr, + in_buffer_memory_barrier_count, in_buffer_memory_barriers_ptr, in_image_memory_barrier_count, in_image_memory_barriers_ptr); @@ -2694,45 +3239,43 @@ bool Anvil::CommandBufferBase::record_pipeline_barrier(VkPipelineStageFlags &callback_data); } - anvil_assert(sizeof(buffer_barriers_vk) / sizeof(buffer_barriers_vk[0]) >= in_buffer_memory_barrier_count && - sizeof(image_barriers_vk) / sizeof(image_barriers_vk [0]) >= in_image_memory_barrier_count && - sizeof(memory_barriers_vk) / sizeof(memory_barriers_vk[0]) >= in_memory_barrier_count); - for (uint32_t n_buffer_barrier = 0; n_buffer_barrier < in_buffer_memory_barrier_count; ++n_buffer_barrier) { - buffer_barriers_vk[n_buffer_barrier] = in_buffer_memory_barriers_ptr[n_buffer_barrier].get_barrier_vk(); - - cache_referenced_buffer(in_buffer_memory_barriers_ptr[n_buffer_barrier].buffer_ptr); + buffer_barriers_vk.at(n_buffer_barrier) = in_buffer_memory_barriers_ptr[n_buffer_barrier].get_barrier_vk(); } for (uint32_t n_image_barrier = 0; n_image_barrier < in_image_memory_barrier_count; ++n_image_barrier) { - image_barriers_vk[n_image_barrier] = in_image_memory_barriers_ptr[n_image_barrier].get_barrier_vk(); - - cache_referenced_image(in_image_memory_barriers_ptr[n_image_barrier].image_ptr); + image_barriers_vk.at(n_image_barrier) = in_image_memory_barriers_ptr[n_image_barrier].get_barrier_vk(); } for (uint32_t n_memory_barrier = 0; n_memory_barrier < in_memory_barrier_count; ++n_memory_barrier) { - memory_barriers_vk[n_memory_barrier] = in_memory_barriers_ptr[n_memory_barrier].get_barrier_vk(); + memory_barriers_vk.at(n_memory_barrier) = in_memory_barriers_ptr[n_memory_barrier].get_barrier_vk(); } - vkCmdPipelineBarrier(m_command_buffer, - in_src_stage_mask, - in_dst_stage_mask, - in_by_region, - in_memory_barrier_count, - memory_barriers_vk, - in_buffer_memory_barrier_count, - buffer_barriers_vk, - in_image_memory_barrier_count, - image_barriers_vk); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdPipelineBarrier(m_command_buffer, + in_src_stage_mask.get_vk (), + in_dst_stage_mask.get_vk (), + in_dependency_flags.get_vk(), + in_memory_barrier_count, + (in_memory_barrier_count > 0) ? &memory_barriers_vk.at(0) : nullptr, + in_buffer_memory_barrier_count, + (in_buffer_memory_barrier_count > 0) ? &buffer_barriers_vk.at(0) : nullptr, + in_image_memory_barrier_count, + (in_image_memory_barrier_count > 0) ? &image_barriers_vk.at(0) : nullptr); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2740,11 +3283,11 @@ bool Anvil::CommandBufferBase::record_pipeline_barrier(VkPipelineStageFlags } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_push_constants(std::shared_ptr in_layout_ptr, - VkShaderStageFlags in_stage_flags, - uint32_t in_offset, - uint32_t in_size, - const void* in_values) +bool Anvil::CommandBufferBase::record_push_constants(Anvil::PipelineLayout* in_layout_ptr, + Anvil::ShaderStageFlags in_stage_flags, + uint32_t in_offset, + uint32_t in_size, + const void* in_values) { /* NOTE: The command can be executed both inside and outside a renderpass */ bool result = false; @@ -2769,12 +3312,18 @@ bool Anvil::CommandBufferBase::record_push_constants(std::shared_ptrget_pipeline_layout(), - in_stage_flags, - in_offset, - in_size, - in_values); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdPushConstants(m_command_buffer, + in_layout_ptr->get_pipeline_layout(), + in_stage_flags.get_vk(), + in_offset, + in_size, + in_values); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2782,8 +3331,8 @@ bool Anvil::CommandBufferBase::record_push_constants(std::shared_ptr in_event_ptr, - VkPipelineStageFlags in_stage_mask) +bool Anvil::CommandBufferBase::record_reset_event(Anvil::Event* in_event_ptr, + Anvil::PipelineStageFlags in_stage_mask) { bool result = false; @@ -2811,11 +3360,15 @@ bool Anvil::CommandBufferBase::record_reset_event(std::shared_ptr } #endif - cache_referenced_event(in_event_ptr); - - vkCmdResetEvent(m_command_buffer, - in_event_ptr->get_event(), - in_stage_mask); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdResetEvent(m_command_buffer, + in_event_ptr->get_event(), + in_stage_mask.get_vk() ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2823,9 +3376,9 @@ bool Anvil::CommandBufferBase::record_reset_event(std::shared_ptr } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_reset_query_pool(std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_start_query, - uint32_t in_query_count) +bool Anvil::CommandBufferBase::record_reset_query_pool(Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_start_query, + uint32_t in_query_count) { bool result = false; @@ -2854,12 +3407,16 @@ bool Anvil::CommandBufferBase::record_reset_query_pool(std::shared_ptrget_query_pool(), - in_start_query, - in_query_count); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdResetQueryPool(m_command_buffer, + in_query_pool_ptr->get_query_pool(), + in_start_query, + in_query_count); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2867,12 +3424,12 @@ bool Anvil::CommandBufferBase::record_reset_query_pool(std::shared_ptr in_src_image_ptr, - VkImageLayout in_src_image_layout, - std::shared_ptr in_dst_image_ptr, - VkImageLayout in_dst_image_layout, - uint32_t in_region_count, - const VkImageResolve* in_region_ptrs) +bool Anvil::CommandBufferBase::record_resolve_image(Anvil::Image* in_src_image_ptr, + Anvil::ImageLayout in_src_image_layout, + Anvil::Image* in_dst_image_ptr, + Anvil::ImageLayout in_dst_image_layout, + uint32_t in_region_count, + const Anvil::ImageResolve* in_region_ptrs) { bool result = false; @@ -2904,16 +3461,19 @@ bool Anvil::CommandBufferBase::record_resolve_image(std::shared_ptrget_image(), - in_src_image_layout, - in_dst_image_ptr->get_image(), - in_dst_image_layout, - in_region_count, - in_region_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdResolveImage(m_command_buffer, + in_src_image_ptr->get_image(), + static_cast(in_src_image_layout), + in_dst_image_ptr->get_image(), + static_cast(in_dst_image_layout), + in_region_count, + reinterpret_cast(in_region_ptrs) ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2942,8 +3502,14 @@ bool Anvil::CommandBufferBase::record_set_blend_constants(const float in_blend_c } #endif - vkCmdSetBlendConstants(m_command_buffer, - in_blend_constants); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdSetBlendConstants(m_command_buffer, + in_blend_constants); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -2976,10 +3542,16 @@ bool Anvil::CommandBufferBase::record_set_depth_bias(float in_depth_bias_constan } #endif - vkCmdSetDepthBias(m_command_buffer, - in_depth_bias_constant_factor, - in_depth_bias_clamp, - in_slope_scaled_depth_bias); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdSetDepthBias(m_command_buffer, + in_depth_bias_constant_factor, + in_depth_bias_clamp, + in_slope_scaled_depth_bias); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3010,9 +3582,15 @@ bool Anvil::CommandBufferBase::record_set_depth_bounds(float in_min_depth_bounds } #endif - vkCmdSetDepthBounds(m_command_buffer, - in_min_depth_bounds, - in_max_depth_bounds); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdSetDepthBounds(m_command_buffer, + in_min_depth_bounds, + in_max_depth_bounds); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3020,10 +3598,65 @@ bool Anvil::CommandBufferBase::record_set_depth_bounds(float in_min_depth_bounds } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_set_event(std::shared_ptr in_event_ptr, - VkPipelineStageFlags in_stage_mask) +bool Anvil::CommandBufferBase::record_set_device_mask_KHR(uint32_t in_device_mask) { - bool result = false; + /* Note: Command supported inside and outside the renderpass. */ + const auto& entrypoints (m_device_ptr->get_extension_khr_device_group_entrypoints() ); + bool result (false); + + if (!m_recording_in_progress) + { + anvil_assert(m_recording_in_progress); + + goto end; + } + + anvil_assert(m_device_ptr->get_extension_info()->khr_device_group() ); + anvil_assert(in_device_mask != 0); + + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + m_commands.push_back(SetDeviceMaskKHRCommand(in_device_mask) ); + } + } + #endif + + if (m_is_renderpass_active) + { + if ((m_renderpass_device_mask & in_device_mask) != in_device_mask) + { + /* It is illegal to try to activate a device which has not been enabled + * for the renderpass. + */ + anvil_assert_fail(); + + goto end; + } + } + + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdSetDeviceMaskKHR(m_command_buffer, + in_device_mask); + } + unlock(); + m_parent_command_pool_ptr->unlock(); + + m_device_mask = in_device_mask; + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_set_event(Anvil::Event* in_event_ptr, + Anvil::PipelineStageFlags in_stage_mask) +{ + const Anvil::DeviceType device_type = m_device_ptr->get_type(); + bool result = false; if (m_is_renderpass_active) { @@ -3039,6 +3672,15 @@ bool Anvil::CommandBufferBase::record_set_event(std::shared_ptr in goto end; } + if (device_type == Anvil::DeviceType::MULTI_GPU && + !Anvil::Utils::is_pow2(static_cast(m_device_mask) )) + { + /* Only one device may be active at the time of this call */ + anvil_assert(Anvil::Utils::is_pow2(static_cast(m_device_mask) ) ); + + goto end; + } + #ifdef STORE_COMMAND_BUFFER_COMMANDS { if (!m_command_stashing_disabled) @@ -3049,11 +3691,15 @@ bool Anvil::CommandBufferBase::record_set_event(std::shared_ptr in } #endif - cache_referenced_event(in_event_ptr); - - vkCmdSetEvent(m_command_buffer, - in_event_ptr->get_event(), - in_stage_mask); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdSetEvent(m_command_buffer, + in_event_ptr->get_event(), + in_stage_mask.get_vk() ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3082,8 +3728,54 @@ bool Anvil::CommandBufferBase::record_set_line_width(float in_line_width) } #endif - vkCmdSetLineWidth(m_command_buffer, - in_line_width); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdSetLineWidth(m_command_buffer, + in_line_width); + } + unlock(); + m_parent_command_pool_ptr->unlock(); + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_set_sample_locations_EXT(const Anvil::SampleLocationsInfo& in_sample_locations_info) +{ + /* Note: Command supported inside and outside the renderpass. */ + bool result = false; + VkSampleLocationsInfoEXT sample_locations_info_vk; + const auto& sl_entrypoints = m_device_ptr->get_extension_ext_sample_locations_entrypoints(); + + if (!m_recording_in_progress) + { + anvil_assert(m_recording_in_progress); + + goto end; + } + + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + m_commands.push_back(SetSampleLocationsEXTCommand(in_sample_locations_info) ); + } + } + #endif + + sample_locations_info_vk = in_sample_locations_info.get_vk(); + + m_parent_command_pool_ptr->lock(); + lock(); + { + sl_entrypoints.vkCmdSetSampleLocationsEXT(m_command_buffer, + &sample_locations_info_vk); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3116,10 +3808,16 @@ bool Anvil::CommandBufferBase::record_set_scissor(uint32_t in_first_sciss } #endif - vkCmdSetScissor(m_command_buffer, - in_first_scissor, - in_scissor_count, - in_scissor_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdSetScissor(m_command_buffer, + in_first_scissor, + in_scissor_count, + in_scissor_ptrs); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3127,8 +3825,8 @@ bool Anvil::CommandBufferBase::record_set_scissor(uint32_t in_first_sciss } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_set_stencil_compare_mask(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_compare_mask) +bool Anvil::CommandBufferBase::record_set_stencil_compare_mask(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_compare_mask) { /* Note: Command supported inside and outside the renderpass. */ bool result = false; @@ -3150,9 +3848,15 @@ bool Anvil::CommandBufferBase::record_set_stencil_compare_mask(VkStencilFaceFlag } #endif - vkCmdSetStencilCompareMask(m_command_buffer, - in_face_mask, - in_stencil_compare_mask); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdSetStencilCompareMask(m_command_buffer, + in_face_mask.get_vk(), + in_stencil_compare_mask); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3160,8 +3864,8 @@ bool Anvil::CommandBufferBase::record_set_stencil_compare_mask(VkStencilFaceFlag } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_set_stencil_reference(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_reference) +bool Anvil::CommandBufferBase::record_set_stencil_reference(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_reference) { /* Note: Command supported inside and outside the renderpass. */ bool result = false; @@ -3183,9 +3887,15 @@ bool Anvil::CommandBufferBase::record_set_stencil_reference(VkStencilFaceFlags i } #endif - vkCmdSetStencilReference(m_command_buffer, - in_face_mask, - in_stencil_reference); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdSetStencilReference(m_command_buffer, + in_face_mask.get_vk(), + in_stencil_reference); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3193,8 +3903,8 @@ bool Anvil::CommandBufferBase::record_set_stencil_reference(VkStencilFaceFlags i } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_set_stencil_write_mask(VkStencilFaceFlags in_face_mask, - uint32_t in_stencil_write_mask) +bool Anvil::CommandBufferBase::record_set_stencil_write_mask(Anvil::StencilFaceFlags in_face_mask, + uint32_t in_stencil_write_mask) { /* Note: Command supported inside and outside the renderpass. */ bool result = false; @@ -3216,9 +3926,15 @@ bool Anvil::CommandBufferBase::record_set_stencil_write_mask(VkStencilFaceFlags } #endif - vkCmdSetStencilWriteMask(m_command_buffer, - in_face_mask, - in_stencil_write_mask); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdSetStencilWriteMask(m_command_buffer, + in_face_mask.get_vk(), + in_stencil_write_mask); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3251,10 +3967,16 @@ bool Anvil::CommandBufferBase::record_set_viewport(uint32_t in_first_vi } #endif - vkCmdSetViewport(m_command_buffer, - in_first_viewport, - in_viewport_count, - in_viewport_ptrs); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdSetViewport(m_command_buffer, + in_first_viewport, + in_viewport_count, + in_viewport_ptrs); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3262,10 +3984,10 @@ bool Anvil::CommandBufferBase::record_set_viewport(uint32_t in_first_vi } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_update_buffer(std::shared_ptr in_dst_buffer_ptr, - VkDeviceSize in_dst_offset, - VkDeviceSize in_data_size, - const uint32_t* in_data_ptr) +bool Anvil::CommandBufferBase::record_update_buffer(Anvil::Buffer* in_dst_buffer_ptr, + VkDeviceSize in_dst_offset, + VkDeviceSize in_data_size, + const void* in_data_ptr) { bool result = false; @@ -3295,13 +4017,18 @@ bool Anvil::CommandBufferBase::record_update_buffer(std::shared_ptrget_buffer(), - in_dst_offset, - in_data_size, - in_data_ptr); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdUpdateBuffer(m_command_buffer, + in_dst_buffer_ptr->get_buffer(), + in_dst_offset, + in_data_size, + in_data_ptr); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3309,30 +4036,26 @@ bool Anvil::CommandBufferBase::record_update_buffer(std::shared_ptr* in_events, - VkPipelineStageFlags in_src_stage_mask, - VkPipelineStageFlags in_dst_stage_mask, - uint32_t in_memory_barrier_count, - const MemoryBarrier* const in_memory_barriers_ptr, - uint32_t in_buffer_memory_barrier_count, - const BufferBarrier* const in_buffer_memory_barriers_ptr, - uint32_t in_image_memory_barrier_count, - const ImageBarrier* const in_image_memory_barriers_ptr) +bool Anvil::CommandBufferBase::record_wait_events(uint32_t in_event_count, + Anvil::Event* const* in_events, + Anvil::PipelineStageFlags in_src_stage_mask, + Anvil::PipelineStageFlags in_dst_stage_mask, + uint32_t in_memory_barrier_count, + const MemoryBarrierAnv* const in_memory_barriers_ptr, + uint32_t in_buffer_memory_barrier_count, + const BufferBarrier* const in_buffer_memory_barriers_ptr, + uint32_t in_image_memory_barrier_count, + const ImageBarrier* const in_image_memory_barriers_ptr) { /* NOTE: The command can be executed both inside and outside a renderpass */ - VkEvent events [100]; - VkBufferMemoryBarrier buffer_barriers_vk[100]; - VkImageMemoryBarrier image_barriers_vk [100]; - VkMemoryBarrier memory_barriers_vk[100]; - bool result = false; - - anvil_assert(in_event_count > 0); /* as per spec - easy to miss */ - anvil_assert(in_event_count < sizeof(events) / sizeof(events [0]) ); - anvil_assert(in_buffer_memory_barrier_count < sizeof(buffer_barriers_vk) / sizeof(buffer_barriers_vk[0]) ); - anvil_assert(in_image_memory_barrier_count < sizeof(image_barriers_vk) / sizeof(image_barriers_vk [0]) ); - anvil_assert(in_memory_barrier_count < sizeof(memory_barriers_vk) / sizeof(memory_barriers_vk[0]) ); + std::vector events (in_event_count); + std::vector buffer_barriers_vk(in_buffer_memory_barrier_count); + std::vector image_barriers_vk (in_image_memory_barrier_count); + std::vector memory_barriers_vk(in_memory_barrier_count); + bool result (false); + + anvil_assert(in_event_count > 0); /* as per spec - easy to miss */ if (!m_recording_in_progress) { @@ -3359,51 +4082,98 @@ bool Anvil::CommandBufferBase::record_wait_events(uint32_t } #endif + for (uint32_t n_event = 0; n_event < in_event_count; ++n_event) { - events[n_event] = in_events[n_event]->get_event(); - - cache_referenced_event(in_events[n_event]); + events.at(n_event) = in_events[n_event]->get_event(); } for (uint32_t n_buffer_barrier = 0; n_buffer_barrier < in_buffer_memory_barrier_count; ++n_buffer_barrier) { - buffer_barriers_vk[n_buffer_barrier] = in_buffer_memory_barriers_ptr[n_buffer_barrier].get_barrier_vk(); - - cache_referenced_buffer(in_buffer_memory_barriers_ptr[n_buffer_barrier].buffer_ptr); + buffer_barriers_vk.at(n_buffer_barrier) = in_buffer_memory_barriers_ptr[n_buffer_barrier].get_barrier_vk(); } for (uint32_t n_image_barrier = 0; n_image_barrier < in_image_memory_barrier_count; ++n_image_barrier) { - image_barriers_vk[n_image_barrier] = in_image_memory_barriers_ptr[n_image_barrier].get_barrier_vk(); - - cache_referenced_image(in_image_memory_barriers_ptr[n_image_barrier].image_ptr); + image_barriers_vk.at(n_image_barrier) = in_image_memory_barriers_ptr[n_image_barrier].get_barrier_vk(); } for (uint32_t n_memory_barrier = 0; n_memory_barrier < in_memory_barrier_count; ++n_memory_barrier) { - memory_barriers_vk[n_memory_barrier] = in_memory_barriers_ptr[n_memory_barrier].get_barrier_vk(); + memory_barriers_vk.at(n_memory_barrier) = in_memory_barriers_ptr[n_memory_barrier].get_barrier_vk(); + } + + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdWaitEvents(m_command_buffer, + in_event_count, + (in_event_count > 0) ? &events.at(0) : nullptr, + in_src_stage_mask.get_vk(), + in_dst_stage_mask.get_vk(), + in_memory_barrier_count, + (in_memory_barrier_count > 0) ? &memory_barriers_vk.at(0) : nullptr, + in_buffer_memory_barrier_count, + (in_buffer_memory_barrier_count > 0) ? &buffer_barriers_vk.at(0) : nullptr, + in_image_memory_barrier_count, + (in_image_memory_barrier_count > 0) ? &image_barriers_vk.at(0) : nullptr); + } + unlock(); + m_parent_command_pool_ptr->unlock(); + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::CommandBufferBase::record_write_buffer_marker_AMD(const Anvil::PipelineStageFlagBits& in_pipeline_stage, + Anvil::Buffer* in_dst_buffer_ptr, + const VkDeviceSize& in_dst_offset, + const uint32_t& in_marker) +{ + /* NOTE: The command can be executed both inside and outside a renderpass */ + const auto& entrypoints = m_device_ptr->get_extension_amd_buffer_marker_entrypoints(); + bool result = false; + + if (!m_recording_in_progress) + { + anvil_assert(m_recording_in_progress); + + goto end; + } + + #ifdef STORE_COMMAND_BUFFER_COMMANDS + { + if (!m_command_stashing_disabled) + { + m_commands.push_back(WriteBufferMarkerAMDCommand(in_pipeline_stage, + in_dst_buffer_ptr, + in_dst_offset, + in_marker) ); + } } + #endif - vkCmdWaitEvents(m_command_buffer, - in_event_count, - events, - in_src_stage_mask, - in_dst_stage_mask, - in_memory_barrier_count, - memory_barriers_vk, - in_buffer_memory_barrier_count, - buffer_barriers_vk, - in_image_memory_barrier_count, - image_barriers_vk); + m_parent_command_pool_ptr->lock(); + lock(); + { + entrypoints.vkCmdWriteBufferMarkerAMD(m_command_buffer, + static_cast(in_pipeline_stage), + in_dst_buffer_ptr->get_buffer(), + in_dst_offset, + in_marker); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3411,9 +4181,9 @@ bool Anvil::CommandBufferBase::record_wait_events(uint32_t } /* Please see header for specification */ -bool Anvil::CommandBufferBase::record_write_timestamp(VkPipelineStageFlagBits in_pipeline_stage, - std::shared_ptr in_query_pool_ptr, - Anvil::QueryIndex in_query_index) +bool Anvil::CommandBufferBase::record_write_timestamp(Anvil::PipelineStageFlagBits in_pipeline_stage, + Anvil::QueryPool* in_query_pool_ptr, + Anvil::QueryIndex in_query_index) { /* NOTE: The command can be executed both inside and outside a renderpass */ bool result = false; @@ -3436,12 +4206,16 @@ bool Anvil::CommandBufferBase::record_write_timestamp(VkPipelineStageFlagBits } #endif - cache_referenced_query_pool(in_query_pool_ptr); - - vkCmdWriteTimestamp(m_command_buffer, - in_pipeline_stage, - in_query_pool_ptr->get_query_pool(), - in_query_index); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdWriteTimestamp(m_command_buffer, + static_cast(in_pipeline_stage), + in_query_pool_ptr->get_query_pool(), + in_query_index); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3461,8 +4235,14 @@ bool Anvil::CommandBufferBase::reset(bool in_should_release_resources) goto end; } - result_vk = vkResetCommandBuffer(m_command_buffer, - (in_should_release_resources) ? VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT : 0u); + m_parent_command_pool_ptr->lock(); + lock(); + { + result_vk = Anvil::Vulkan::vkResetCommandBuffer(m_command_buffer, + (in_should_release_resources) ? VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT : 0u); + } + unlock(); + m_parent_command_pool_ptr->unlock(); if (!is_vk_call_successful(result_vk) ) { @@ -3485,7 +4265,7 @@ bool Anvil::CommandBufferBase::reset(bool in_should_release_resources) /* Please see header for specification */ bool Anvil::CommandBufferBase::stop_recording() { - bool result = false; + bool result = false; VkResult result_vk; if (!m_recording_in_progress) @@ -3495,7 +4275,13 @@ bool Anvil::CommandBufferBase::stop_recording() goto end; } - result_vk = vkEndCommandBuffer(m_command_buffer); + m_parent_command_pool_ptr->lock(); + lock(); + { + result_vk = Anvil::Vulkan::vkEndCommandBuffer(m_command_buffer); + } + unlock(); + m_parent_command_pool_ptr->unlock(); if (!is_vk_call_successful(result_vk)) { @@ -3511,16 +4297,16 @@ bool Anvil::CommandBufferBase::stop_recording() } /* Please see header for specification */ -Anvil::PrimaryCommandBuffer::PrimaryCommandBuffer(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_command_pool_ptr) +Anvil::PrimaryCommandBuffer::PrimaryCommandBuffer(const Anvil::BaseDevice* in_device_ptr, + Anvil::CommandPool* in_parent_command_pool_ptr, + bool in_mt_safe) :CommandBufferBase(in_device_ptr, in_parent_command_pool_ptr, - COMMAND_BUFFER_TYPE_PRIMARY) - + COMMAND_BUFFER_TYPE_PRIMARY, + in_mt_safe) { - VkCommandBufferAllocateInfo alloc_info; - std::shared_ptr device_locked_ptr(in_device_ptr); - VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); + VkCommandBufferAllocateInfo alloc_info; + VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); ANVIL_REDUNDANT_VARIABLE(result_vk); @@ -3530,26 +4316,148 @@ Anvil::PrimaryCommandBuffer::PrimaryCommandBuffer(std::weak_ptrget_device_vk(), - &alloc_info, - &m_command_buffer); + in_parent_command_pool_ptr->lock(); + { + result_vk = Anvil::Vulkan::vkAllocateCommandBuffers(m_device_ptr->get_device_vk(), + &alloc_info, + &m_command_buffer); + } + in_parent_command_pool_ptr->unlock(); anvil_assert_vk_call_succeeded(result_vk); +} +/* Please see header for specification */ +bool Anvil::PrimaryCommandBuffer::record_begin_render_pass(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + VkRect2D in_render_area, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_opt_n_attachment_initial_sample_locations, + const Anvil::AttachmentSampleLocations* in_opt_attachment_initial_sample_locations_ptr, + const uint32_t& in_opt_n_post_subpass_sample_locations, + const Anvil::SubpassSampleLocations* in_opt_post_subpass_sample_locations_ptr) +{ + return record_begin_render_pass(in_n_clear_values, + in_clear_value_ptrs, + in_fbo_ptr, + 0, /* in_device_mask */ + 1, /* in_n_render_areas */ + &in_render_area, + in_render_pass_ptr, + in_contents, + in_opt_n_attachment_initial_sample_locations, + in_opt_attachment_initial_sample_locations_ptr, + in_opt_n_post_subpass_sample_locations, + in_opt_post_subpass_sample_locations_ptr); } /* Please see header for specification */ -bool Anvil::PrimaryCommandBuffer::record_begin_render_pass(uint32_t in_n_clear_values, - const VkClearValue* in_clear_value_ptrs, - std::shared_ptr in_fbo_ptr, - VkRect2D in_render_area, - std::shared_ptr in_render_pass_ptr, - VkSubpassContents in_contents) +bool Anvil::PrimaryCommandBuffer::record_begin_render_pass(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + uint32_t in_device_mask, + uint32_t in_n_render_areas, + const VkRect2D* in_render_areas_ptr, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_opt_n_attachment_initial_sample_locations, + const Anvil::AttachmentSampleLocations* in_opt_attachment_initial_sample_locations_ptr, + const uint32_t& in_opt_n_post_subpass_sample_locations, + const Anvil::SubpassSampleLocations* in_opt_post_subpass_sample_locations_ptr) { - std::weak_ptr physical_device_ptr; - VkRenderPassBeginInfo render_pass_begin_info; - bool result = false; - std::shared_ptr sgpu_device_locked_ptr(std::dynamic_pointer_cast(m_device_ptr.lock() ) ); + return record_begin_render_pass_internal(false, /* in_use_khr_create_rp2_extension */ + in_n_clear_values, + in_clear_value_ptrs, + in_fbo_ptr, + in_device_mask, + in_n_render_areas, + in_render_areas_ptr, + in_render_pass_ptr, + in_contents, + in_opt_n_attachment_initial_sample_locations, + in_opt_attachment_initial_sample_locations_ptr, + in_opt_n_post_subpass_sample_locations, + in_opt_post_subpass_sample_locations_ptr); +} + +/* Please see header for specification */ +bool Anvil::PrimaryCommandBuffer::record_begin_render_pass2_KHR(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + VkRect2D in_render_area, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_opt_n_attachment_initial_sample_locations, + const Anvil::AttachmentSampleLocations* in_opt_attachment_initial_sample_locations_ptr, + const uint32_t& in_opt_n_post_subpass_sample_locations, + const Anvil::SubpassSampleLocations* in_opt_post_subpass_sample_locations_ptr) +{ + return record_begin_render_pass2_KHR(in_n_clear_values, + in_clear_value_ptrs, + in_fbo_ptr, + 0, /* in_device_mask */ + 1, /* in_n_render_areas */ + &in_render_area, + in_render_pass_ptr, + in_contents, + in_opt_n_attachment_initial_sample_locations, + in_opt_attachment_initial_sample_locations_ptr, + in_opt_n_post_subpass_sample_locations, + in_opt_post_subpass_sample_locations_ptr); +} + +/* Please see header for specification */ +bool Anvil::PrimaryCommandBuffer::record_begin_render_pass2_KHR(uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + uint32_t in_device_mask, + uint32_t in_n_render_areas, + const VkRect2D* in_render_areas_ptr, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_opt_n_attachment_initial_sample_locations, + const Anvil::AttachmentSampleLocations* in_opt_attachment_initial_sample_locations_ptr, + const uint32_t& in_opt_n_post_subpass_sample_locations, + const Anvil::SubpassSampleLocations* in_opt_post_subpass_sample_locations_ptr) +{ + anvil_assert(m_device_ptr->get_extension_info()->khr_create_renderpass2() ); + + return record_begin_render_pass_internal(true, /* in_use_khr_create_rp2_extension */ + in_n_clear_values, + in_clear_value_ptrs, + in_fbo_ptr, + in_device_mask, + in_n_render_areas, + in_render_areas_ptr, + in_render_pass_ptr, + in_contents, + in_opt_n_attachment_initial_sample_locations, + in_opt_attachment_initial_sample_locations_ptr, + in_opt_n_post_subpass_sample_locations, + in_opt_post_subpass_sample_locations_ptr); +} + +/* Please see header for specification */ +bool Anvil::PrimaryCommandBuffer::record_begin_render_pass_internal(const bool& in_use_khr_create_rp2_extension, + uint32_t in_n_clear_values, + const VkClearValue* in_clear_value_ptrs, + Anvil::Framebuffer* in_fbo_ptr, + uint32_t in_device_mask, + uint32_t in_n_render_areas, + const VkRect2D* in_render_areas_ptr, + Anvil::RenderPass* in_render_pass_ptr, + Anvil::SubpassContents in_contents, + const uint32_t& in_opt_n_attachment_initial_sample_locations, + const Anvil::AttachmentSampleLocations* in_opt_attachment_initial_sample_locations_ptr, + const uint32_t& in_opt_n_post_subpass_sample_locations, + const Anvil::SubpassSampleLocations* in_opt_post_subpass_sample_locations_ptr) +{ + const Anvil::DeviceType device_type = m_device_ptr->get_type(); + bool result = false; + + Anvil::StructChainer render_pass_begin_info_chain; if (m_is_renderpass_active) { @@ -3565,41 +4473,137 @@ bool Anvil::PrimaryCommandBuffer::record_begin_render_pass(uint32_t goto end; } - - anvil_assert(sgpu_device_locked_ptr != nullptr); /* User attempted to call this function prototype with in_n_physical_devices set to 0 */ - - physical_device_ptr = sgpu_device_locked_ptr->get_physical_device(); - #ifdef STORE_COMMAND_BUFFER_COMMANDS { if (!m_command_stashing_disabled) { - m_commands.push_back(BeginRenderPassCommand(in_n_clear_values, - in_clear_value_ptrs, - in_fbo_ptr, - 1, /* in_n_physical_devices */ - &physical_device_ptr, - &in_render_area, - in_render_pass_ptr, - in_contents) ); + if (in_use_khr_create_rp2_extension) + { + m_commands.push_back(BeginRenderPass2KHRCommand(in_n_clear_values, + in_clear_value_ptrs, + in_fbo_ptr, + in_device_mask, + in_n_render_areas, + in_render_areas_ptr, + in_render_pass_ptr, + in_contents, + in_opt_n_attachment_initial_sample_locations, + in_opt_attachment_initial_sample_locations_ptr, + in_opt_n_post_subpass_sample_locations, + in_opt_post_subpass_sample_locations_ptr) ); + } + else + { + m_commands.push_back(BeginRenderPassCommand(in_n_clear_values, + in_clear_value_ptrs, + in_fbo_ptr, + in_device_mask, + in_n_render_areas, + in_render_areas_ptr, + in_render_pass_ptr, + in_contents, + in_opt_n_attachment_initial_sample_locations, + in_opt_attachment_initial_sample_locations_ptr, + in_opt_n_post_subpass_sample_locations, + in_opt_post_subpass_sample_locations_ptr) ); + } } } #endif - render_pass_begin_info.clearValueCount = in_n_clear_values; - render_pass_begin_info.framebuffer = in_fbo_ptr->get_framebuffer(in_render_pass_ptr); - render_pass_begin_info.pClearValues = in_clear_value_ptrs; - render_pass_begin_info.pNext = nullptr; - render_pass_begin_info.renderArea = in_render_area; - render_pass_begin_info.renderPass = in_render_pass_ptr->get_render_pass(); - render_pass_begin_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; + anvil_assert(in_render_areas_ptr != nullptr); + + { + VkRenderPassBeginInfo render_pass_begin_info; + + render_pass_begin_info.clearValueCount = in_n_clear_values; + render_pass_begin_info.framebuffer = in_fbo_ptr->get_framebuffer(in_render_pass_ptr); + render_pass_begin_info.pClearValues = in_clear_value_ptrs; + render_pass_begin_info.pNext = nullptr; + render_pass_begin_info.renderArea = in_render_areas_ptr[0]; + render_pass_begin_info.renderPass = in_render_pass_ptr->get_render_pass(); + render_pass_begin_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; + + render_pass_begin_info_chain.append_struct(render_pass_begin_info); + } + + if (device_type == Anvil::DeviceType::MULTI_GPU) + { + VkDeviceGroupRenderPassBeginInfoKHR render_pass_device_group_begin_info; + + render_pass_device_group_begin_info.deviceMask = in_device_mask; + render_pass_device_group_begin_info.deviceRenderAreaCount = in_n_render_areas; + render_pass_device_group_begin_info.pDeviceRenderAreas = in_render_areas_ptr; + render_pass_device_group_begin_info.pNext = nullptr; + render_pass_device_group_begin_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR; + + render_pass_begin_info_chain.append_struct(render_pass_device_group_begin_info); + + m_renderpass_device_mask = render_pass_device_group_begin_info.deviceMask; + } + + if (in_opt_n_attachment_initial_sample_locations != 0 || + in_opt_n_post_subpass_sample_locations != 0) + { + std::vector attachment_initial_sample_locations_vk(in_opt_n_attachment_initial_sample_locations); + VkRenderPassSampleLocationsBeginInfoEXT sl_begin_info; + std::vector post_subpass_sample_locations_vk (in_opt_n_post_subpass_sample_locations); + + anvil_assert(m_device_ptr->get_extension_info()->ext_sample_locations() ); + + for (uint32_t n_attachment = 0; + n_attachment < in_opt_n_attachment_initial_sample_locations; + ++n_attachment) + { + attachment_initial_sample_locations_vk.at(n_attachment) = in_opt_attachment_initial_sample_locations_ptr[n_attachment].get_vk(); + } - cache_referenced_framebuffer(in_fbo_ptr); - cache_referenced_renderpass (in_render_pass_ptr); + for (uint32_t n_subpass = 0; + n_subpass < in_opt_n_post_subpass_sample_locations; + ++n_subpass) + { + post_subpass_sample_locations_vk.at(n_subpass) = in_opt_post_subpass_sample_locations_ptr[n_subpass].get_vk(); + } - vkCmdBeginRenderPass(m_command_buffer, - &render_pass_begin_info, - in_contents); + sl_begin_info.attachmentInitialSampleLocationsCount = in_opt_n_attachment_initial_sample_locations; + sl_begin_info.pAttachmentInitialSampleLocations = (in_opt_n_attachment_initial_sample_locations != 0) ? &attachment_initial_sample_locations_vk.at(0) + : nullptr; + sl_begin_info.pNext = nullptr; + sl_begin_info.postSubpassSampleLocationsCount = in_opt_n_post_subpass_sample_locations; + sl_begin_info.pPostSubpassSampleLocations = (in_opt_n_post_subpass_sample_locations != 0) ? &post_subpass_sample_locations_vk.at(0) + : nullptr; + sl_begin_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT; + + render_pass_begin_info_chain.append_struct(sl_begin_info); + } + + m_parent_command_pool_ptr->lock(); + lock(); + { + auto chain_ptr = render_pass_begin_info_chain.create_chain(); + + if (!in_use_khr_create_rp2_extension) + { + Anvil::Vulkan::vkCmdBeginRenderPass(m_command_buffer, + chain_ptr->get_root_struct(), + static_cast(in_contents) ); + } + else + { + const auto& crp2_entrypoints = m_device_ptr->get_extension_khr_create_renderpass2_entrypoints(); + VkSubpassBeginInfoKHR subpass_begin_info; + + subpass_begin_info.contents = static_cast(in_contents); + subpass_begin_info.pNext = nullptr; + subpass_begin_info.sType = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR; + + crp2_entrypoints.vkCmdBeginRenderPass2KHR(m_command_buffer, + chain_ptr->get_root_struct(), + &subpass_begin_info); + } + } + unlock(); + m_parent_command_pool_ptr->unlock(); m_is_renderpass_active = true; result = true; @@ -3609,6 +4613,20 @@ bool Anvil::PrimaryCommandBuffer::record_begin_render_pass(uint32_t /* Please see header for specification */ bool Anvil::PrimaryCommandBuffer::record_end_render_pass() +{ + return record_end_render_pass_internal(false); /* in_use_khr_create_rp2_extension */ +} + +/* Please see header for specification */ +bool Anvil::PrimaryCommandBuffer::record_end_render_pass2_KHR() +{ + anvil_assert(m_device_ptr->get_extension_info()->khr_create_renderpass2() ); + + return record_end_render_pass_internal(true); /* in_use_khr_create_rp2_extension */ +} + +/* Please see header for specification */ +bool Anvil::PrimaryCommandBuffer::record_end_render_pass_internal(const bool& in_use_khr_create_rp2_extension) { bool result = false; @@ -3630,12 +4648,39 @@ bool Anvil::PrimaryCommandBuffer::record_end_render_pass() { if (!m_command_stashing_disabled) { - m_commands.push_back(EndRenderPassCommand() ); + if (in_use_khr_create_rp2_extension) + { + m_commands.push_back(EndRenderPass2KHRCommand() ); + } + else + { + m_commands.push_back(EndRenderPassCommand() ); + } } } #endif - vkCmdEndRenderPass(m_command_buffer); + m_parent_command_pool_ptr->lock(); + lock(); + { + if (in_use_khr_create_rp2_extension) + { + const auto& crp2_entrypoints = m_device_ptr->get_extension_khr_create_renderpass2_entrypoints(); + VkSubpassEndInfoKHR subpass_end_info; + + subpass_end_info.pNext = nullptr; + subpass_end_info.sType = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR; + + crp2_entrypoints.vkCmdEndRenderPass2KHR(m_command_buffer, + &subpass_end_info); + } + else + { + Anvil::Vulkan::vkCmdEndRenderPass(m_command_buffer); + } + } + unlock(); + m_parent_command_pool_ptr->unlock(); m_is_renderpass_active = false; result = true; @@ -3643,16 +4688,13 @@ bool Anvil::PrimaryCommandBuffer::record_end_render_pass() return result; } - /* Please see header for specification */ -bool Anvil::PrimaryCommandBuffer::record_execute_commands(uint32_t in_cmd_buffers_count, - std::shared_ptr* in_cmd_buffer_ptrs) +bool Anvil::PrimaryCommandBuffer::record_execute_commands(uint32_t in_cmd_buffers_count, + Anvil::SecondaryCommandBuffer** in_cmd_buffer_ptrs) { /* NOTE: The command can be executed both inside and outside a renderpass */ - VkCommandBuffer cmd_buffers[100]; - bool result = false; - - anvil_assert(in_cmd_buffers_count < sizeof(cmd_buffers) / sizeof(cmd_buffers[0]) ); + auto cmd_buffers = std::vector(in_cmd_buffers_count); + bool result = false; if (!m_recording_in_progress) { @@ -3675,14 +4717,18 @@ bool Anvil::PrimaryCommandBuffer::record_execute_commands(uint32_t n_cmd_buffer < in_cmd_buffers_count; ++n_cmd_buffer) { - cmd_buffers[n_cmd_buffer] = in_cmd_buffer_ptrs[n_cmd_buffer]->get_command_buffer(); - - cache_referenced_command_buffer(in_cmd_buffer_ptrs[n_cmd_buffer]); + cmd_buffers.at(n_cmd_buffer) = in_cmd_buffer_ptrs[n_cmd_buffer]->get_command_buffer(); } - vkCmdExecuteCommands(m_command_buffer, - in_cmd_buffers_count, - cmd_buffers); + m_parent_command_pool_ptr->lock(); + lock(); + { + Anvil::Vulkan::vkCmdExecuteCommands(m_command_buffer, + in_cmd_buffers_count, + (in_cmd_buffers_count > 0) ? &cmd_buffers.at(0) : nullptr); + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3690,7 +4736,24 @@ bool Anvil::PrimaryCommandBuffer::record_execute_commands(uint32_t } /* Please see header for specification */ -bool Anvil::PrimaryCommandBuffer::record_next_subpass(VkSubpassContents in_contents) +bool Anvil::PrimaryCommandBuffer::record_next_subpass(Anvil::SubpassContents in_contents) +{ + return record_next_subpass_internal(false, /* in_use_khr_create_rp2_extension */ + in_contents); +} + +/* Please see header for specification */ +bool Anvil::PrimaryCommandBuffer::record_next_subpass2_KHR(Anvil::SubpassContents in_contents) +{ + anvil_assert(m_device_ptr->get_extension_info()->khr_create_renderpass2() ); + + return record_next_subpass_internal(true, /* in_use_khr_create_rp2_extension */ + in_contents); +} + +/* Please see header for specification */ +bool Anvil::PrimaryCommandBuffer::record_next_subpass_internal(const bool& in_use_khr_create_rp2_extension, + Anvil::SubpassContents in_contents) { bool result = false; @@ -3712,13 +4775,46 @@ bool Anvil::PrimaryCommandBuffer::record_next_subpass(VkSubpassContents in_conte { if (!m_command_stashing_disabled) { - m_commands.push_back(NextSubpassCommand(in_contents) ); + if (in_use_khr_create_rp2_extension) + { + m_commands.push_back(NextSubpass2KHRCommand(in_contents) ); + } + else + { + m_commands.push_back(NextSubpassCommand(in_contents) ); + } } } #endif - vkCmdNextSubpass(m_command_buffer, - in_contents); + m_parent_command_pool_ptr->lock(); + lock(); + { + if (in_use_khr_create_rp2_extension) + { + const auto& crp2_entrypoints = m_device_ptr->get_extension_khr_create_renderpass2_entrypoints(); + VkSubpassBeginInfoKHR subpass_begin_info; + VkSubpassEndInfoKHR subpass_end_info; + + subpass_begin_info.contents = static_cast(in_contents); + subpass_begin_info.pNext = nullptr; + subpass_begin_info.sType = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR; + + subpass_end_info.pNext = nullptr; + subpass_end_info.sType = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR; + + crp2_entrypoints.vkCmdNextSubpass2KHR(m_command_buffer, + &subpass_begin_info, + &subpass_end_info); + } + else + { + Anvil::Vulkan::vkCmdNextSubpass(m_command_buffer, + static_cast(in_contents) ); + } + } + unlock(); + m_parent_command_pool_ptr->unlock(); result = true; end: @@ -3726,13 +4822,14 @@ bool Anvil::PrimaryCommandBuffer::record_next_subpass(VkSubpassContents in_conte } /* Please see header for specification */ -bool Anvil::PrimaryCommandBuffer::start_recording(bool in_one_time_submit, - bool in_simultaneous_use_allowed) +bool Anvil::PrimaryCommandBuffer::start_recording(bool in_one_time_submit, + bool in_simultaneous_use_allowed, + uint32_t in_opt_device_mask) { - VkCommandBufferBeginInfo command_buffer_begin_info; - std::shared_ptr device_locked_ptr (m_device_ptr); - bool result (false); - VkResult result_vk; + const Anvil::DeviceType device_type (m_device_ptr->get_type() ); + bool result (false); + VkResult result_vk; + Anvil::StructChainer struct_chainer; if (m_recording_in_progress) { @@ -3741,14 +4838,51 @@ bool Anvil::PrimaryCommandBuffer::start_recording(bool in_one_time_submit, goto end; } - command_buffer_begin_info.flags = ((in_one_time_submit) ? VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT : 0u) | - ((in_simultaneous_use_allowed) ? VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT : 0u); - command_buffer_begin_info.pNext = nullptr; - command_buffer_begin_info.pInheritanceInfo = nullptr; /* Only relevant for secondary-level command buffers */ - command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + { + VkCommandBufferBeginInfo command_buffer_begin_info; + + command_buffer_begin_info.flags = ((in_one_time_submit) ? VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT : 0u) | + ((in_simultaneous_use_allowed) ? VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT : 0u); + command_buffer_begin_info.pNext = nullptr; + command_buffer_begin_info.pInheritanceInfo = nullptr; /* Only relevant for secondary-level command buffers */ + command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; - result_vk = vkBeginCommandBuffer(m_command_buffer, - &command_buffer_begin_info); + struct_chainer.append_struct(command_buffer_begin_info); + } + + if (device_type == Anvil::DeviceType::MULTI_GPU) + { + VkDeviceGroupCommandBufferBeginInfoKHR command_buffer_device_group_begin_info; + auto device_mgpu_ptr = dynamic_cast(m_device_ptr); + + if (in_opt_device_mask == UINT32_MAX) + { + in_opt_device_mask = (1 << device_mgpu_ptr->get_n_physical_devices()) - 1; + } + + anvil_assert(in_opt_device_mask != 0); + + command_buffer_device_group_begin_info.deviceMask = in_opt_device_mask; + command_buffer_device_group_begin_info.pNext = nullptr; + command_buffer_device_group_begin_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR; + + struct_chainer.append_struct(command_buffer_device_group_begin_info); + } + else + { + anvil_assert(device_type == Anvil::DeviceType::SINGLE_GPU); + } + + m_parent_command_pool_ptr->lock(); + lock(); + { + auto chain_ptr = struct_chainer.create_chain(); + + result_vk = Anvil::Vulkan::vkBeginCommandBuffer(m_command_buffer, + chain_ptr->get_root_struct() ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); if (!is_vk_call_successful(result_vk) ) { @@ -3764,8 +4898,7 @@ bool Anvil::PrimaryCommandBuffer::start_recording(bool in_one_time_submit, } #endif - clear_referenced_objects(); - + m_device_mask = in_opt_device_mask; m_recording_in_progress = true; result = true; @@ -3775,15 +4908,16 @@ bool Anvil::PrimaryCommandBuffer::start_recording(bool in_one_time_submit, /* Please see header for specification */ -Anvil::SecondaryCommandBuffer::SecondaryCommandBuffer(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_command_pool_ptr) +Anvil::SecondaryCommandBuffer::SecondaryCommandBuffer(const Anvil::BaseDevice* in_device_ptr, + Anvil::CommandPool* in_parent_command_pool_ptr, + bool in_mt_safe) :CommandBufferBase(in_device_ptr, in_parent_command_pool_ptr, - COMMAND_BUFFER_TYPE_SECONDARY) + COMMAND_BUFFER_TYPE_SECONDARY, + in_mt_safe) { - VkCommandBufferAllocateInfo command_buffer_alloc_info; - std::shared_ptr device_locked_ptr(in_device_ptr); - VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); + VkCommandBufferAllocateInfo command_buffer_alloc_info; + VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); ANVIL_REDUNDANT_VARIABLE(result_vk); @@ -3793,29 +4927,34 @@ Anvil::SecondaryCommandBuffer::SecondaryCommandBuffer(std::weak_ptrget_device_vk(), - &command_buffer_alloc_info, - &m_command_buffer); + in_parent_command_pool_ptr->lock(); + { + result_vk = Anvil::Vulkan::vkAllocateCommandBuffers(m_device_ptr->get_device_vk(), + &command_buffer_alloc_info, + &m_command_buffer); + } + in_parent_command_pool_ptr->unlock(); anvil_assert_vk_call_succeeded(result_vk); } /* Please see header for specification */ -bool Anvil::SecondaryCommandBuffer::start_recording(bool in_one_time_submit, - bool in_simultaneous_use_allowed, - bool in_renderpass_usage_only, - std::shared_ptr in_framebuffer_ptr, - std::shared_ptr in_render_pass_ptr, - SubPassID in_subpass_id, - OcclusionQuerySupportScope in_required_occlusion_query_support_scope, - bool in_occlusion_query_used_by_primary_command_buffer, - VkQueryPipelineStatisticFlags in_required_pipeline_statistics_scope) +bool Anvil::SecondaryCommandBuffer::start_recording(bool in_one_time_submit, + bool in_simultaneous_use_allowed, + bool in_renderpass_usage_only, + Framebuffer* in_framebuffer_ptr, + RenderPass* in_render_pass_ptr, + SubPassID in_subpass_id, + OcclusionQuerySupportScope in_required_occlusion_query_support_scope, + bool in_occlusion_query_used_by_primary_command_buffer, + Anvil::QueryPipelineStatisticFlags in_required_pipeline_statistics_scope, + uint32_t in_opt_device_mask) { - VkCommandBufferBeginInfo command_buffer_begin_info; - VkCommandBufferInheritanceInfo command_buffer_inheritance_info; - std::shared_ptr device_locked_ptr (m_device_ptr); - bool result = false; - VkResult result_vk; + VkCommandBufferInheritanceInfo command_buffer_inheritance_info; + const Anvil::DeviceType device_type (m_device_ptr->get_type() ); + bool result (false); + VkResult result_vk; + Anvil::StructChainer struct_chainer; if (m_recording_in_progress) { @@ -3824,35 +4963,62 @@ bool Anvil::SecondaryCommandBuffer::start_recording(bool goto end; } - command_buffer_inheritance_info.framebuffer = (in_framebuffer_ptr != nullptr) ? in_framebuffer_ptr->get_framebuffer(in_render_pass_ptr) : VK_NULL_HANDLE; - command_buffer_inheritance_info.occlusionQueryEnable = (in_occlusion_query_used_by_primary_command_buffer) ? VK_TRUE : VK_FALSE; - command_buffer_inheritance_info.pipelineStatistics = in_required_pipeline_statistics_scope; - command_buffer_inheritance_info.pNext = nullptr; - command_buffer_inheritance_info.queryFlags = (in_occlusion_query_used_by_primary_command_buffer && - in_required_occlusion_query_support_scope == OCCLUSION_QUERY_SUPPORT_SCOPE_REQUIRED_PRECISE) ? VK_QUERY_CONTROL_PRECISE_BIT : 0u; - command_buffer_inheritance_info.renderPass = (in_render_pass_ptr != nullptr) ? in_render_pass_ptr->get_render_pass() : VK_NULL_HANDLE; - command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO; - command_buffer_inheritance_info.subpass = in_subpass_id; + { + VkCommandBufferBeginInfo command_buffer_begin_info; + + command_buffer_begin_info.flags = ((in_one_time_submit) ? VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT : 0u) | + ((in_simultaneous_use_allowed) ? VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT : 0u) | + ((in_renderpass_usage_only) ? VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT : 0u); + command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info; + command_buffer_begin_info.pNext = nullptr; + command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; - command_buffer_begin_info.flags = ((in_one_time_submit) ? VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT : 0u) | - ((in_simultaneous_use_allowed) ? VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT : 0u) | - ((in_renderpass_usage_only) ? VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT : 0u); - command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info; - command_buffer_begin_info.pNext = nullptr; - command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + struct_chainer.append_struct(command_buffer_begin_info); + } - if (in_framebuffer_ptr != nullptr) { - cache_referenced_framebuffer(in_framebuffer_ptr); + command_buffer_inheritance_info.framebuffer = (in_framebuffer_ptr != nullptr) ? in_framebuffer_ptr->get_framebuffer(in_render_pass_ptr) : VK_NULL_HANDLE; + command_buffer_inheritance_info.occlusionQueryEnable = (in_occlusion_query_used_by_primary_command_buffer) ? VK_TRUE : VK_FALSE; + command_buffer_inheritance_info.pipelineStatistics = in_required_pipeline_statistics_scope.get_vk(); + command_buffer_inheritance_info.pNext = nullptr; + command_buffer_inheritance_info.queryFlags = (in_occlusion_query_used_by_primary_command_buffer && + in_required_occlusion_query_support_scope == OcclusionQuerySupportScope::REQUIRED_PRECISE) ? VK_QUERY_CONTROL_PRECISE_BIT : 0u; + command_buffer_inheritance_info.renderPass = (in_render_pass_ptr != nullptr) ? in_render_pass_ptr->get_render_pass() : VK_NULL_HANDLE; + command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO; + command_buffer_inheritance_info.subpass = in_subpass_id; } - if (in_render_pass_ptr != nullptr) + if (device_type == Anvil::DeviceType::MULTI_GPU) { - cache_referenced_renderpass(in_render_pass_ptr); + anvil_assert((in_opt_device_mask != 0) && (in_opt_device_mask != UINT32_MAX)); + + VkDeviceGroupCommandBufferBeginInfoKHR command_buffer_device_group_begin_info; + + command_buffer_device_group_begin_info.deviceMask = in_opt_device_mask; + command_buffer_device_group_begin_info.pNext = nullptr; + command_buffer_device_group_begin_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR; + + struct_chainer.append_struct(command_buffer_device_group_begin_info); + + m_device_mask = in_opt_device_mask; } + else + { + anvil_assert(device_type == Anvil::DeviceType::SINGLE_GPU); - result_vk = vkBeginCommandBuffer(m_command_buffer, - &command_buffer_begin_info); + m_device_mask = 0; + } + + m_parent_command_pool_ptr->lock(); + lock(); + { + auto chain_ptr = struct_chainer.create_chain(); + + result_vk = Anvil::Vulkan::vkBeginCommandBuffer(m_command_buffer, + chain_ptr->get_root_struct() ); + } + unlock(); + m_parent_command_pool_ptr->unlock(); if (!is_vk_call_successful(result_vk) ) { @@ -3868,9 +5034,7 @@ bool Anvil::SecondaryCommandBuffer::start_recording(bool } #endif - clear_referenced_objects(); - - m_is_renderpass_active = in_renderpass_usage_only; + m_is_renderpass_active = in_renderpass_usage_only; m_recording_in_progress = true; result = true; diff --git a/src/wrappers/command_pool.cpp b/src/wrappers/command_pool.cpp index f70dae69..a4c852de 100644 --- a/src/wrappers/command_pool.cpp +++ b/src/wrappers/command_pool.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -30,40 +30,34 @@ #include /* Please see header for specification */ -Anvil::CommandPool::CommandPool(std::weak_ptr in_device_ptr, - bool in_transient_allocations_friendly, - bool in_support_per_cmdbuf_reset_ops, - Anvil::QueueFamilyType in_queue_family) - - :DebugMarkerSupportProvider (in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT), - m_command_pool (VK_NULL_HANDLE), - m_device_ptr (in_device_ptr), - m_is_transient_allocations_friendly(in_transient_allocations_friendly), - m_queue_family (in_queue_family), - m_supports_per_cmdbuf_reset_ops (in_support_per_cmdbuf_reset_ops) +Anvil::CommandPool::CommandPool(Anvil::BaseDevice* in_device_ptr, + const Anvil::CommandPoolCreateFlags& in_create_flags, + uint32_t in_queue_family_index, + bool in_mt_safe) + + :DebugMarkerSupportProvider(in_device_ptr, + Anvil::ObjectType::COMMAND_POOL), + MTSafetySupportProvider (in_mt_safe), + m_command_pool (VK_NULL_HANDLE), + m_create_flags (in_create_flags), + m_device_ptr (in_device_ptr), + m_queue_family_index (in_queue_family_index) { - VkCommandPoolCreateInfo command_pool_create_info; - std::shared_ptr device_locked_ptr (in_device_ptr); - uint32_t queue_family_index (UINT32_MAX); - VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); + VkCommandPoolCreateInfo command_pool_create_info; + VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); ANVIL_REDUNDANT_VARIABLE(result_vk); - queue_family_index = (device_locked_ptr->get_queue_family_index(in_queue_family) ); - anvil_assert(queue_family_index != UINT32_MAX); - /* Go on and create the command pool */ - command_pool_create_info.flags = ((in_transient_allocations_friendly) ? VK_COMMAND_POOL_CREATE_TRANSIENT_BIT : 0u) | - ((in_support_per_cmdbuf_reset_ops) ? VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT : 0u); + command_pool_create_info.flags = in_create_flags.get_vk(); command_pool_create_info.pNext = nullptr; - command_pool_create_info.queueFamilyIndex = queue_family_index; + command_pool_create_info.queueFamilyIndex = in_queue_family_index; command_pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; - result_vk = vkCreateCommandPool(device_locked_ptr->get_device_vk(), - &command_pool_create_info, - nullptr, /* pAllocator */ - &m_command_pool); + result_vk = Anvil::Vulkan::vkCreateCommandPool(in_device_ptr->get_device_vk(), + &command_pool_create_info, + nullptr, /* pAllocator */ + &m_command_pool); anvil_assert_vk_call_succeeded(result_vk); if (is_vk_call_successful(result_vk) ) @@ -72,7 +66,7 @@ Anvil::CommandPool::CommandPool(std::weak_ptr in_device_ptr, } /* Register the command pool instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_COMMAND_POOL, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::COMMAND_POOL, this); } @@ -80,53 +74,70 @@ Anvil::CommandPool::CommandPool(std::weak_ptr in_device_ptr, Anvil::CommandPool::~CommandPool() { /* Unregister the object */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_COMMAND_POOL, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::COMMAND_POOL, this); /* Release the Vulkan command pool */ if (m_command_pool != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyCommandPool(device_locked_ptr->get_device_vk(), - m_command_pool, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyCommandPool(m_device_ptr->get_device_vk(), + m_command_pool, + nullptr /* pAllocator */); + } + unlock(); m_command_pool = VK_NULL_HANDLE; } } /* Please see header for specification */ -std::shared_ptr Anvil::CommandPool::alloc_primary_level_command_buffer() +Anvil::PrimaryCommandBufferUniquePtr Anvil::CommandPool::alloc_primary_level_command_buffer() { - std::shared_ptr new_buffer_ptr(new PrimaryCommandBuffer(m_device_ptr, - shared_from_this() )); + Anvil::PrimaryCommandBufferUniquePtr new_buffer_ptr(nullptr, + std::default_delete() ); + + new_buffer_ptr.reset( + new PrimaryCommandBuffer(m_device_ptr, + this, + is_mt_safe() ) + ); return new_buffer_ptr; } /* Please see header for specification */ -std::shared_ptr Anvil::CommandPool::alloc_secondary_level_command_buffer() +Anvil::SecondaryCommandBufferUniquePtr Anvil::CommandPool::alloc_secondary_level_command_buffer() { - std::shared_ptr new_buffer_ptr(new SecondaryCommandBuffer(m_device_ptr, - shared_from_this() )); + Anvil::SecondaryCommandBufferUniquePtr new_buffer_ptr(nullptr, + std::default_delete() ); + + new_buffer_ptr.reset( + new SecondaryCommandBuffer(m_device_ptr, + this, + is_mt_safe() ) + ); return new_buffer_ptr; } /* Please see header for specification */ -std::shared_ptr Anvil::CommandPool::create(std::weak_ptr in_device_ptr, - bool in_transient_allocations_friendly, - bool in_support_per_cmdbuf_reset_ops, - Anvil::QueueFamilyType in_queue_family) +Anvil::CommandPoolUniquePtr Anvil::CommandPool::create(Anvil::BaseDevice* in_device_ptr, + const Anvil::CommandPoolCreateFlags& in_create_flags, + uint32_t in_queue_family_index, + MTSafety in_mt_safety) { - std::shared_ptr result_ptr; + const bool is_mt_safe = Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr); + Anvil::CommandPoolUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::CommandPool(in_device_ptr, - in_transient_allocations_friendly, - in_support_per_cmdbuf_reset_ops, - in_queue_family) + in_create_flags, + in_queue_family_index, + is_mt_safe) ); return result_ptr; @@ -135,12 +146,16 @@ std::shared_ptr Anvil::CommandPool::create(std::weak_ptr device_locked_ptr(m_device_ptr); - VkResult result_vk; + std::unique_lock mutex_lock; + VkResult result_vk; - result_vk = vkResetCommandPool(device_locked_ptr->get_device_vk(), - m_command_pool, - ((in_release_resources) ? VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT : 0u) ); + lock(); + { + result_vk = Anvil::Vulkan::vkResetCommandPool(m_device_ptr->get_device_vk(), + m_command_pool, + ((in_release_resources) ? VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT : 0u) ); + } + unlock(); anvil_assert_vk_call_succeeded(result_vk); @@ -150,16 +165,18 @@ bool Anvil::CommandPool::reset(bool in_release_resources) /* Please see header for specification */ void Anvil::CommandPool::trim() { - std::shared_ptr device_locked_ptr(m_device_ptr); - - if (device_locked_ptr->is_khr_maintenance1_extension_enabled() ) + if (m_device_ptr->get_extension_info()->khr_maintenance1() ) { - device_locked_ptr->get_extension_khr_maintenance1_entrypoints().vkTrimCommandPoolKHR(device_locked_ptr->get_device_vk(), - m_command_pool, - 0); /* flags */ + lock(); + { + m_device_ptr->get_extension_khr_maintenance1_entrypoints().vkTrimCommandPoolKHR(m_device_ptr->get_device_vk(), + m_command_pool, + 0); /* flags */ + } + unlock(); } else { - anvil_assert(device_locked_ptr->is_khr_maintenance1_extension_enabled() ); + anvil_assert(m_device_ptr->get_extension_info()->khr_maintenance1() ); } } \ No newline at end of file diff --git a/src/wrappers/compute_pipeline_manager.cpp b/src/wrappers/compute_pipeline_manager.cpp index f9546261..643d19af 100644 --- a/src/wrappers/compute_pipeline_manager.cpp +++ b/src/wrappers/compute_pipeline_manager.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,6 +20,7 @@ // THE SOFTWARE. // +#include "misc/compute_pipeline_create_info.h" #include "misc/debug.h" #include "misc/object_tracker.h" #include "wrappers/compute_pipeline_manager.h" @@ -30,26 +31,17 @@ #include -/** Constructor. Initializes the compute pipeline manager. - * - * @param in_device_ptr Device to use. - * @param in_use_pipeline_cache true if a VkPipelineCache instance should be used to spawn new pipeline objects. - * What pipeline cache ends up being used depends on @param pipeline_cache_to_reuse - - * if a nullptr object is passed via this argument, a new pipeline cache instance will - * be created, and later released by the destructor. If a non-nullptr object is passed, - * it will be used instead. In the latter scenario, it is the caller's responsibility - * to release the cache when no longer needed! - * @param in_pipeline_cache_to_reuse Please see above. - **/ -Anvil::ComputePipelineManager::ComputePipelineManager(std::weak_ptr in_device_ptr, - bool in_use_pipeline_cache, - std::shared_ptr in_pipeline_cache_to_reuse_ptr) +Anvil::ComputePipelineManager::ComputePipelineManager(Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + bool in_use_pipeline_cache, + Anvil::PipelineCache* in_pipeline_cache_to_reuse_ptr) :BasePipelineManager(in_device_ptr, + in_mt_safe, in_use_pipeline_cache, in_pipeline_cache_to_reuse_ptr) { /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_COMPUTE_PIPELINE_MANAGER, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::ANVIL_COMPUTE_PIPELINE_MANAGER, this); } @@ -58,10 +50,11 @@ Anvil::ComputePipelineManager::ComputePipelineManager(std::weak_ptrunregister_object(Anvil::OBJECT_TYPE_COMPUTE_PIPELINE_MANAGER, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::ANVIL_COMPUTE_PIPELINE_MANAGER, this); - m_pipelines.clear(); + m_baked_pipelines.clear (); + m_outstanding_pipelines.clear(); } /** Re-creates Vulkan compute pipeline objects for all added non-proxy pipelines marked @@ -72,79 +65,87 @@ Anvil::ComputePipelineManager::~ComputePipelineManager() **/ bool Anvil::ComputePipelineManager::bake() { - std::shared_ptr locked_device_ptr(m_device_ptr); - std::vector pipeline_create_info_items_vk; - bool result = false; - std::vector result_pipeline_items_vk; - VkResult result_vk; - std::vector > specialization_map_entries_vk(m_pipelines.size() ); - std::vector specialization_info_vk(m_pipelines.size()); - - typedef struct _bake_item + typedef struct BakeItem { VkComputePipelineCreateInfo create_info; - std::shared_ptr pipeline_ptr; + PipelineID pipeline_id; + Pipeline* pipeline_ptr; - _bake_item(const VkComputePipelineCreateInfo& in_create_info, - std::shared_ptr in_pipeline_ptr) + BakeItem(const VkComputePipelineCreateInfo& in_create_info, + PipelineID in_pipeline_id, + Pipeline* in_pipeline_ptr) { create_info = in_create_info; + pipeline_id = in_pipeline_id; pipeline_ptr = in_pipeline_ptr; } - bool operator==(std::shared_ptr in_pipeline_ptr) + bool operator==(const PipelineID& in_pipeline_id) const { - return pipeline_ptr == in_pipeline_ptr; + return pipeline_id == in_pipeline_id; } - } _bake_item; + } BakeItem; + + std::map > layout_to_bake_item_map; + std::unique_lock mutex_lock; + auto mutex_ptr (get_mutex() ); + uint32_t n_current_pipeline (0); + std::vector pipeline_create_info_items_vk; + bool result (false); + std::vector result_pipeline_items_vk; + VkResult result_vk; + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } - std::map > layout_to_bake_item_map; - uint32_t n_current_pipeline = 0; + std::vector > specialization_map_entries_vk(m_outstanding_pipelines.size() ); + std::vector specialization_info_vk (m_outstanding_pipelines.size()); - /* Iterate over all compute pipelines and identify the ones marked as dirty. Only these - * need to be re-created */ - for (auto pipeline_iterator = m_pipelines.begin(); - pipeline_iterator != m_pipelines.end(); + for (auto pipeline_iterator = m_outstanding_pipelines.begin(); + pipeline_iterator != m_outstanding_pipelines.end(); ++pipeline_iterator, ++n_current_pipeline) { - std::shared_ptr current_pipeline_ptr = pipeline_iterator->second; - VkComputePipelineCreateInfo pipeline_create_info; - - if (!current_pipeline_ptr->dirty || - current_pipeline_ptr->is_proxy) - { - continue; - } - - if (current_pipeline_ptr->baked_pipeline != VK_NULL_HANDLE) + auto current_pipeline_id = pipeline_iterator->first; + Pipeline* current_pipeline_ptr = pipeline_iterator->second.get(); + const auto current_pipeline_create_info_ptr = current_pipeline_ptr->pipeline_create_info_ptr.get(); + VkComputePipelineCreateInfo pipeline_create_info; + const Anvil::ShaderModuleStageEntryPoint* shader_stage_entry_point_ptr = nullptr; + const unsigned char* spec_constants_data_buffer_ptr = nullptr; + uint32_t spec_constants_data_buffer_size = 0; + const SpecializationConstants* specialization_constants_ptr = nullptr; + + anvil_assert(current_pipeline_ptr->baked_pipeline == VK_NULL_HANDLE); + + if (current_pipeline_ptr->layout_ptr == nullptr) { - vkDestroyPipeline(locked_device_ptr->get_device_vk(), - current_pipeline_ptr->baked_pipeline, - nullptr /* pAllocator */); + get_pipeline_layout(pipeline_iterator->first); - current_pipeline_ptr->baked_pipeline = VK_NULL_HANDLE; + anvil_assert(current_pipeline_ptr->layout_ptr != nullptr); } - if (current_pipeline_ptr->layout_dirty) - { - current_pipeline_ptr->layout_ptr = get_pipeline_layout(pipeline_iterator->first); - } + current_pipeline_create_info_ptr->get_specialization_constants(Anvil::ShaderStage::COMPUTE, + &specialization_constants_ptr, + &spec_constants_data_buffer_ptr, + &spec_constants_data_buffer_size); - if (current_pipeline_ptr->specialization_constants_map[0].size() > 0) + if (specialization_constants_ptr->size() > 0) { - anvil_assert(current_pipeline_ptr->specialization_constant_data_buffer.size() > 0); - - bake_specialization_info_vk(current_pipeline_ptr->specialization_constants_map.at (0), - ¤t_pipeline_ptr->specialization_constant_data_buffer.at(0), - &specialization_map_entries_vk[n_current_pipeline], - &specialization_info_vk[n_current_pipeline]); + bake_specialization_info_vk(*specialization_constants_ptr, + spec_constants_data_buffer_ptr, + spec_constants_data_buffer_size, + &specialization_map_entries_vk[n_current_pipeline], + &specialization_info_vk [n_current_pipeline]); } /* Prepare the Vulkan create info descriptor & store it in the map for later baking */ - if (current_pipeline_ptr->base_pipeline_ptr != nullptr) - { - anvil_assert(current_pipeline_ptr->base_pipeline == VK_NULL_HANDLE); + const auto current_pipeline_base_pipeline_id = current_pipeline_create_info_ptr->get_base_pipeline_id(); + if (current_pipeline_base_pipeline_id != UINT32_MAX) + { /* There are three cases we need to handle separately here: * * 1. The base pipeline is to be baked in the call we're preparing for. Determine @@ -156,71 +157,80 @@ bool Anvil::ComputePipelineManager::bake() * * NOTE: A slightly adjusted version of this code is re-used in GraphicsPipelineManager::bake() */ auto& pipeline_vector = layout_to_bake_item_map[pipeline_create_info.layout]; + auto base_pipeline_id = current_pipeline_ptr->pipeline_create_info_ptr->get_base_pipeline_id(); auto base_pipeline_iterator = std::find(pipeline_vector.begin(), pipeline_vector.end(), - current_pipeline_ptr->base_pipeline_ptr); + base_pipeline_id); if (base_pipeline_iterator != pipeline_vector.end() ) { /* Case 1 */ - pipeline_create_info.basePipelineHandle = current_pipeline_ptr->base_pipeline; + pipeline_create_info.basePipelineHandle = VK_NULL_HANDLE; pipeline_create_info.basePipelineIndex = static_cast(base_pipeline_iterator - pipeline_vector.begin() ); } else - if (current_pipeline_ptr->base_pipeline_ptr != nullptr && - current_pipeline_ptr->base_pipeline_ptr->baked_pipeline != VK_NULL_HANDLE) - { - /* Case 2 */ - pipeline_create_info.basePipelineHandle = current_pipeline_ptr->base_pipeline_ptr->baked_pipeline; - pipeline_create_info.basePipelineIndex = UINT32_MAX; /* unused. */ - } - else { - /* Case 3 */ - anvil_assert_fail(); + auto baked_pipeline_iterator = m_baked_pipelines.find(base_pipeline_id); + + if (baked_pipeline_iterator != m_baked_pipelines.end() && + baked_pipeline_iterator->second->baked_pipeline != VK_NULL_HANDLE) + { + /* Case 2 */ + pipeline_create_info.basePipelineHandle = baked_pipeline_iterator->second->baked_pipeline; + pipeline_create_info.basePipelineIndex = UINT32_MAX; + } + else + { + /* Case 3 */ + anvil_assert_fail(); + } } } else - if (current_pipeline_ptr->base_pipeline != VK_NULL_HANDLE) - { - pipeline_create_info.basePipelineHandle = current_pipeline_ptr->base_pipeline; - pipeline_create_info.basePipelineIndex = UINT32_MAX; /* unused. */ - } - else { /* No base pipeline requested */ pipeline_create_info.basePipelineHandle = VK_NULL_HANDLE; - pipeline_create_info.basePipelineIndex = UINT32_MAX; /* unused */ + pipeline_create_info.basePipelineIndex = UINT32_MAX; } - anvil_assert(!current_pipeline_ptr->layout_dirty); + anvil_assert(current_pipeline_ptr->layout_ptr != nullptr); + + current_pipeline_create_info_ptr->get_shader_stage_properties(Anvil::ShaderStage::COMPUTE, + &shader_stage_entry_point_ptr); pipeline_create_info.flags = 0; pipeline_create_info.layout = current_pipeline_ptr->layout_ptr->get_pipeline_layout(); - pipeline_create_info.pNext = VK_NULL_HANDLE; + pipeline_create_info.pNext = nullptr; pipeline_create_info.stage.flags = 0; - pipeline_create_info.stage.pName = current_pipeline_ptr->shader_stages[0].name.c_str(); + pipeline_create_info.stage.pName = shader_stage_entry_point_ptr->name.c_str(); pipeline_create_info.stage.pNext = nullptr; - pipeline_create_info.stage.pSpecializationInfo = (current_pipeline_ptr->specialization_constants_map[0].size() > 0) ? &specialization_info_vk[n_current_pipeline] - : VK_NULL_HANDLE; + pipeline_create_info.stage.pSpecializationInfo = (specialization_constants_ptr->size() > 0) ? &specialization_info_vk[n_current_pipeline] + : VK_NULL_HANDLE; pipeline_create_info.stage.stage = VK_SHADER_STAGE_COMPUTE_BIT; pipeline_create_info.stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; pipeline_create_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO; - pipeline_create_info.stage.module = current_pipeline_ptr->shader_stages[0].shader_module_ptr->get_module(); + pipeline_create_info.stage.module = shader_stage_entry_point_ptr->shader_module_ptr->get_module(); - if (pipeline_create_info.basePipelineHandle != VK_NULL_HANDLE || - pipeline_create_info.basePipelineIndex != INT32_MAX) + anvil_assert(pipeline_create_info.stage.module != VK_NULL_HANDLE); + + if (pipeline_create_info.basePipelineHandle != VK_NULL_HANDLE || + pipeline_create_info.basePipelineIndex != static_cast(UINT32_MAX) ) { pipeline_create_info.flags |= VK_PIPELINE_CREATE_DERIVATIVE_BIT; } - pipeline_create_info.flags |= ((current_pipeline_ptr->allow_derivatives) ? VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT : 0) | - ((current_pipeline_ptr->disable_optimizations) ? VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT : 0) | - ((pipeline_create_info.basePipelineHandle != VK_NULL_HANDLE) ? VK_PIPELINE_CREATE_DERIVATIVE_BIT : 0); + pipeline_create_info.flags |= ((current_pipeline_create_info_ptr->allows_derivatives () ) ? VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT : 0) | + ((current_pipeline_create_info_ptr->has_optimizations_disabled() ) ? VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT : 0); + + if (m_device_ptr->get_type() == Anvil::DeviceType::MULTI_GPU) + { + pipeline_create_info.flags |= VK_PIPELINE_CREATE_DISPATCH_BASE_KHR; + } - layout_to_bake_item_map[pipeline_create_info.layout].push_back(_bake_item(pipeline_create_info, - current_pipeline_ptr) ); + layout_to_bake_item_map[pipeline_create_info.layout].push_back(BakeItem(pipeline_create_info, + current_pipeline_id, + current_pipeline_ptr) ); } /* We can finally bake the pipeline objects. */ @@ -236,19 +246,23 @@ bool Anvil::ComputePipelineManager::bake() item_iterator != map_iterator->second.end(); ++item_iterator) { - const _bake_item& current_bake_item = *item_iterator; + const BakeItem& current_bake_item = *item_iterator; pipeline_create_info_items_vk.push_back(current_bake_item.create_info); } result_pipeline_items_vk.resize(pipeline_create_info_items_vk.size() ); - result_vk = vkCreateComputePipelines(locked_device_ptr->get_device_vk(), - m_pipeline_cache_ptr->get_pipeline_cache(), - (uint32_t) pipeline_create_info_items_vk.size(), - &pipeline_create_info_items_vk[0], - nullptr, /* pAllocator */ - &result_pipeline_items_vk[0]); + m_pipeline_cache_ptr->lock(); + { + result_vk = Anvil::Vulkan::vkCreateComputePipelines(m_device_ptr->get_device_vk(), + m_pipeline_cache_ptr->get_pipeline_cache(), + static_cast(pipeline_create_info_items_vk.size() ), + &pipeline_create_info_items_vk[0], + nullptr, /* pAllocator */ + &result_pipeline_items_vk[0]); + } + m_pipeline_cache_ptr->unlock(); if (!is_vk_call_successful(result_vk)) { @@ -261,15 +275,22 @@ bool Anvil::ComputePipelineManager::bake() item_iterator != map_iterator->second.end(); ++item_iterator, ++n_current_item) { - const _bake_item& current_bake_item = *item_iterator; + const BakeItem& current_bake_item = *item_iterator; - anvil_assert(result_pipeline_items_vk[n_current_item] != VK_NULL_HANDLE); + anvil_assert(m_baked_pipelines.find(current_bake_item.pipeline_id) == m_baked_pipelines.end() ); + anvil_assert(result_pipeline_items_vk[n_current_item] != VK_NULL_HANDLE); current_bake_item.pipeline_ptr->baked_pipeline = result_pipeline_items_vk[n_current_item]; - current_bake_item.pipeline_ptr->dirty = false; } } + for (auto& current_baked_pipeline : m_outstanding_pipelines) + { + m_baked_pipelines[current_baked_pipeline.first] = std::move(current_baked_pipeline.second); + } + + m_outstanding_pipelines.clear(); + /* All done */ result = true; end: @@ -277,17 +298,19 @@ bool Anvil::ComputePipelineManager::bake() } /* Please see header for specification */ -std::shared_ptr Anvil::ComputePipelineManager::create(std::weak_ptr in_device_ptr, - bool in_use_pipeline_cache, - std::shared_ptr in_pipeline_cache_to_reuse_ptr) +std::unique_ptr Anvil::ComputePipelineManager::create(Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + bool in_use_pipeline_cache, + Anvil::PipelineCache* in_pipeline_cache_to_reuse_ptr) { - std::shared_ptr result_ptr; + std::unique_ptr result_ptr; result_ptr.reset( new Anvil::ComputePipelineManager(in_device_ptr, + in_mt_safe, in_use_pipeline_cache, in_pipeline_cache_to_reuse_ptr) ); return result_ptr; -} \ No newline at end of file +} diff --git a/src/wrappers/debug_messenger.cpp b/src/wrappers/debug_messenger.cpp new file mode 100644 index 00000000..96458c45 --- /dev/null +++ b/src/wrappers/debug_messenger.cpp @@ -0,0 +1,419 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/debug_messenger_create_info.h" +#include "wrappers/debug_messenger.h" +#include "wrappers/instance.h" + +Anvil::DebugMessenger::DebugMessenger(const DebugAPI& in_debug_api, + Anvil::DebugMessengerCreateInfoUniquePtr in_create_info_ptr) + :MTSafetySupportProvider (in_create_info_ptr->get_instance_ptr()->is_mt_safe() ), + m_debug_api (in_debug_api), + m_debug_callback (VK_NULL_HANDLE), + m_messenger (VK_NULL_HANDLE) +{ + auto instance_ptr = in_create_info_ptr->get_instance_ptr(); + + m_create_info_ptr = std::move(in_create_info_ptr); + + switch (m_debug_api) + { + case DebugAPI::EXT_DEBUG_REPORT: + { + VkDebugReportCallbackCreateInfoEXT create_info; + const auto& entrypoints = instance_ptr->get_extension_ext_debug_report_entrypoints(); + VkResult result_vk; + + create_info.flags = get_debug_report_flags_for_debug_message_severity_flags(m_create_info_ptr->get_debug_message_severity() ); + create_info.pfnCallback = callback_handler_ext_debug_report; + create_info.pNext = nullptr; + create_info.pUserData = this; + create_info.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT; + + result_vk = entrypoints.vkCreateDebugReportCallbackEXT(instance_ptr->get_instance_vk(), + &create_info, + nullptr, /* pAllocator */ + &m_debug_callback); + + anvil_assert_vk_call_succeeded(result_vk); + ANVIL_REDUNDANT_VARIABLE (result_vk); + + break; + } + + case DebugAPI::EXT_DEBUG_UTILS: + { + VkDebugUtilsMessengerCreateInfoEXT create_info; + const auto& entrypoints = m_create_info_ptr->get_instance_ptr()->get_extension_ext_debug_utils_entrypoints(); + VkResult result_vk; + + create_info.flags = 0; + create_info.messageSeverity = m_create_info_ptr->get_debug_message_severity().get_vk(); + create_info.messageType = m_create_info_ptr->get_debug_message_types().get_vk (); + create_info.pfnUserCallback = callback_handler_ext_debug_utils; + create_info.pNext = nullptr; + create_info.pUserData = this; + create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; + + result_vk = entrypoints.vkCreateDebugUtilsMessengerEXT(instance_ptr->get_instance_vk(), + &create_info, + nullptr, /* pAllocator */ + &m_messenger); + + anvil_assert_vk_call_succeeded(result_vk); + ANVIL_REDUNDANT_VARIABLE (result_vk); + + break; + } + + default: + { + anvil_assert_fail(); + } + } +} + +Anvil::DebugMessenger::~DebugMessenger() +{ + auto instance_ptr = m_create_info_ptr->get_instance_ptr(); + + switch (m_debug_api) + { + case DebugAPI::EXT_DEBUG_REPORT: + { + const auto& entrypoints = instance_ptr->get_extension_ext_debug_report_entrypoints(); + + if (m_debug_callback != VK_NULL_HANDLE) + { + entrypoints.vkDestroyDebugReportCallbackEXT(instance_ptr->get_instance_vk(), + m_debug_callback, + nullptr); /* pAllocator */ + } + + break; + } + + case DebugAPI::EXT_DEBUG_UTILS: + { + const auto& entrypoints = instance_ptr->get_extension_ext_debug_utils_entrypoints(); + + if (m_messenger != VK_NULL_HANDLE) + { + entrypoints.vkDestroyDebugUtilsMessengerEXT(instance_ptr->get_instance_vk(), + m_messenger, + nullptr); /* pAllocator */ + } + + break; + } + + default: + { + anvil_assert_fail(); + } + } +} + +VkBool32 VKAPI_PTR Anvil::DebugMessenger::callback_handler_ext_debug_report(VkDebugReportFlagsEXT in_flags, + VkDebugReportObjectTypeEXT in_object_type, + uint64_t in_object, + size_t in_location, + int32_t in_message_code, + const char* in_layer_prefix_ptr, + const char* in_message_ptr, + void* in_user_data_ptr) +{ + auto objects = std::vector(1); + Anvil::DebugMessenger* this_ptr = reinterpret_cast(in_user_data_ptr); + auto create_info_ptr = this_ptr->get_create_info_ptr(); + + ANVIL_REDUNDANT_ARGUMENT_CONST(in_layer_prefix_ptr); + ANVIL_REDUNDANT_ARGUMENT_CONST(in_location); + ANVIL_REDUNDANT_ARGUMENT_CONST(in_location); + + objects.at(0) = Anvil::DebugObjectNameInfo(in_object, + "UNKNOWN", /* in_object_name_ptr */ + Anvil::Utils::get_object_type_for_vk_debug_report_object_type(in_object_type) ); + + create_info_ptr->get_callback_function()(static_cast(get_debug_message_severity_flags_for_debug_report_flags(in_flags).get_vk() ), + create_info_ptr->get_debug_message_types(), + nullptr, /* in_opt_message_id_name_ptr */ + in_message_code, + in_message_ptr, + std::vector(), + std::vector(), + objects); + + return VK_FALSE; +} + +VkBool32 VKAPI_PTR Anvil::DebugMessenger::callback_handler_ext_debug_utils(VkDebugUtilsMessageSeverityFlagBitsEXT in_message_severity, + VkDebugUtilsMessageTypeFlagsEXT in_message_types, + const VkDebugUtilsMessengerCallbackDataEXT* in_callback_data_ptr, + void* in_user_data_ptr) +{ + auto cmd_buffer_labels = std::vector (in_callback_data_ptr->cmdBufLabelCount); + auto objects = std::vector (in_callback_data_ptr->objectCount); + auto queue_labels = std::vector (in_callback_data_ptr->queueLabelCount); + Anvil::DebugMessenger* this_ptr = reinterpret_cast(in_user_data_ptr); + + for (uint32_t n_cmd_buffer_label = 0; + n_cmd_buffer_label < in_callback_data_ptr->cmdBufLabelCount; + ++n_cmd_buffer_label) + { + const auto debug_label = Anvil::DebugLabel(in_callback_data_ptr->pCmdBufLabels[n_cmd_buffer_label]); + + cmd_buffer_labels.at(n_cmd_buffer_label) = debug_label; + } + + for (uint32_t n_object = 0; + n_object < in_callback_data_ptr->objectCount; + ++n_object) + { + const auto object = Anvil::DebugObjectNameInfo(in_callback_data_ptr->pObjects[n_object]); + + objects.at(n_object) = object; + } + + for (uint32_t n_queue_label = 0; + n_queue_label < in_callback_data_ptr->queueLabelCount; + ++n_queue_label) + { + const auto debug_label = Anvil::DebugLabel(in_callback_data_ptr->pQueueLabels[n_queue_label]); + + queue_labels.at(n_queue_label) = debug_label; + } + + this_ptr->get_create_info_ptr()->get_callback_function()(static_cast(in_message_severity), + static_cast (in_message_types), + in_callback_data_ptr->pMessageIdName, + in_callback_data_ptr->messageIdNumber, + in_callback_data_ptr->pMessage, + queue_labels, + cmd_buffer_labels, + objects); + + return VK_FALSE; +} + +Anvil::DebugMessengerUniquePtr Anvil::DebugMessenger::create(Anvil::DebugMessengerCreateInfoUniquePtr in_create_info_ptr) +{ + Anvil::DebugMessengerUniquePtr result_ptr (nullptr, + std::default_delete() ); + auto instance_ptr(in_create_info_ptr->get_instance_ptr() ); + + if (instance_ptr->get_enabled_extensions_info()->ext_debug_utils() ) + { + result_ptr.reset( + new Anvil::DebugMessenger(DebugAPI::EXT_DEBUG_UTILS, + std::move(in_create_info_ptr) ) + ); + } + else + if (instance_ptr->get_enabled_extensions_info()->ext_debug_report() ) + { + result_ptr.reset( + new Anvil::DebugMessenger(DebugAPI::EXT_DEBUG_REPORT, + std::move(in_create_info_ptr) ) + ); + } + else + { + anvil_assert_fail(); + } + + anvil_assert(result_ptr != nullptr); + return result_ptr; +} + +Anvil::DebugMessageSeverityFlags Anvil::DebugMessenger::get_debug_message_severity_flags_for_debug_report_flags(const VkDebugReportFlagsEXT& in_flags) +{ + Anvil::DebugMessageSeverityFlags result = Anvil::DebugMessageSeverityFlagBits::NONE; + + if ((in_flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT) != 0) + { + result |= Anvil::DebugMessageSeverityFlagBits::INFO_BIT; + } + + if ((in_flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0) + { + result |= Anvil::DebugMessageSeverityFlagBits::WARNING_BIT; + } + + if ((in_flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) != 0) + { + result |= Anvil::DebugMessageSeverityFlagBits::WARNING_BIT; + } + + if ((in_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0) + { + result |= Anvil::DebugMessageSeverityFlagBits::ERROR_BIT; + } + + if ((in_flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT) != 0) + { + result |= Anvil::DebugMessageSeverityFlagBits::VERBOSE_BIT; + } + + return result; +} + +VkDebugReportFlagsEXT Anvil::DebugMessenger::get_debug_report_flags_for_debug_message_severity_flags(const Anvil::DebugMessageSeverityFlags& in_flags) +{ + VkDebugReportFlagsEXT result = 0; + + if ((in_flags & Anvil::DebugMessageSeverityFlagBits::ERROR_BIT) != 0) + { + result |= VK_DEBUG_REPORT_ERROR_BIT_EXT; + } + + if ((in_flags & Anvil::DebugMessageSeverityFlagBits::INFO_BIT) != 0) + { + result |= VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT; + } + + if ((in_flags & Anvil::DebugMessageSeverityFlagBits::VERBOSE_BIT) != 0) + { + result |= VK_DEBUG_REPORT_DEBUG_BIT_EXT; + } + + if ((in_flags & Anvil::DebugMessageSeverityFlagBits::WARNING_BIT) != 0) + { + result |= VK_DEBUG_REPORT_WARNING_BIT_EXT; + } + + return result; +} + + +void Anvil::DebugMessenger::submit_message(const Anvil::DebugMessageSeverityFlagBits& in_message_severity, + const Anvil::DebugMessageTypeFlags& in_message_type_flags, + const char* in_message_id_name_ptr, + int32_t in_message_id, + const char* in_message_ptr, + const std::vector& in_queue_labels, + const std::vector& in_cmd_buffer_labels, + const std::vector& in_objects) +{ + auto instance_ptr = m_create_info_ptr->get_instance_ptr(); + + switch (m_debug_api) + { + case DebugAPI::EXT_DEBUG_REPORT: + { + const auto& entrypoints = instance_ptr->get_extension_ext_debug_report_entrypoints(); + + entrypoints.vkDebugReportMessageEXT(instance_ptr->get_instance_vk(), + get_debug_report_flags_for_debug_message_severity_flags(in_message_severity), + (in_objects.size() > 0) ? Anvil::Utils::get_vk_debug_report_object_type_ext_from_object_type(in_objects.at(0).object_type) + : VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + (in_objects.size() > 0) ? reinterpret_cast(&in_objects.at(0).object_handle) + : 0, + 0, /* location */ + in_message_id, + "", /* pLayerPrefix */ + in_message_ptr); + + break; + } + + case DebugAPI::EXT_DEBUG_UTILS: + { + VkDebugUtilsMessengerCallbackDataEXT callback_data; + auto cmd_buffer_labels_vk = std::vector (in_cmd_buffer_labels.size() ); + const auto& entrypoints = instance_ptr->get_extension_ext_debug_utils_entrypoints(); + auto objects_vk = std::vector (in_objects.size () ); + auto queue_labels_vk = std::vector (in_queue_labels.size () ); + + for (uint32_t n_cmd_buffer_label = 0; + n_cmd_buffer_label < static_cast(in_cmd_buffer_labels.size() ); + ++n_cmd_buffer_label) + { + auto& cmd_buffer_label_vk = cmd_buffer_labels_vk.at(n_cmd_buffer_label); + const auto& src_cmd_buffer_label = in_cmd_buffer_labels.at(n_cmd_buffer_label); + + cmd_buffer_label_vk.color[0] = src_cmd_buffer_label.color[0]; + cmd_buffer_label_vk.color[1] = src_cmd_buffer_label.color[1]; + cmd_buffer_label_vk.color[2] = src_cmd_buffer_label.color[2]; + cmd_buffer_label_vk.color[3] = src_cmd_buffer_label.color[3]; + cmd_buffer_label_vk.pLabelName = src_cmd_buffer_label.name_ptr; + cmd_buffer_label_vk.pNext = nullptr; + cmd_buffer_label_vk.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT; + } + + for (uint32_t n_object = 0; + n_object < static_cast(in_objects.size() ); + ++n_object) + { + auto& object_vk = objects_vk.at(n_object); + const auto& src_object = in_objects.at(n_object); + + object_vk.objectHandle = src_object.object_handle; + object_vk.objectType = Anvil::Utils::get_vk_object_type_for_object_type(src_object.object_type); + object_vk.pNext = nullptr; + object_vk.pObjectName = src_object.object_name_ptr; + object_vk.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; + } + + for (uint32_t n_queue_label = 0; + n_queue_label < static_cast(in_queue_labels.size() ); + ++n_queue_label) + { + auto& queue_label_vk = queue_labels_vk.at(n_queue_label); + const auto& src_queue_label = in_queue_labels.at(n_queue_label); + + queue_label_vk.color[0] = src_queue_label.color[0]; + queue_label_vk.color[1] = src_queue_label.color[1]; + queue_label_vk.color[2] = src_queue_label.color[2]; + queue_label_vk.color[3] = src_queue_label.color[3]; + queue_label_vk.pLabelName = src_queue_label.name_ptr; + queue_label_vk.pNext = nullptr; + queue_label_vk.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT; + } + + callback_data.cmdBufLabelCount = static_cast(in_cmd_buffer_labels.size() ); + callback_data.flags = 0; + callback_data.messageIdNumber = in_message_id; + callback_data.objectCount = static_cast(in_objects.size() ); + callback_data.pCmdBufLabels = (cmd_buffer_labels_vk.size() > 0) ? &cmd_buffer_labels_vk.at(0) : nullptr; + callback_data.pMessage = in_message_ptr; + callback_data.pMessageIdName = in_message_id_name_ptr; + callback_data.pNext = nullptr; + callback_data.pObjects = (objects_vk.size () > 0) ? &objects_vk.at (0) : nullptr; + callback_data.pQueueLabels = (queue_labels_vk.size() > 0) ? &queue_labels_vk.at(0) : nullptr; + callback_data.queueLabelCount = static_cast(queue_labels_vk.size() ); + callback_data.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT; + + entrypoints.vkSubmitDebugUtilsMessageEXT(instance_ptr->get_instance_vk(), + static_cast(in_message_severity), + in_message_type_flags.get_vk(), + &callback_data); + + break; + } + + default: + { + anvil_assert_fail(); + } + } +} diff --git a/src/wrappers/descriptor_pool.cpp b/src/wrappers/descriptor_pool.cpp index fe1526ef..f7e715e2 100644 --- a/src/wrappers/descriptor_pool.cpp +++ b/src/wrappers/descriptor_pool.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,7 +21,10 @@ // #include "misc/debug.h" +#include "misc/descriptor_pool_create_info.h" +#include "misc/descriptor_set_create_info.h" #include "misc/object_tracker.h" +#include "misc/struct_chainer.h" #include "wrappers/descriptor_pool.h" #include "wrappers/descriptor_set.h" #include "wrappers/descriptor_set_layout.h" @@ -29,24 +32,18 @@ /* Please see header for specification */ -Anvil::DescriptorPool::DescriptorPool(std::weak_ptr in_device_ptr, - uint32_t in_n_max_sets, - bool in_releaseable_sets) +Anvil::DescriptorPool::DescriptorPool(Anvil::DescriptorPoolCreateInfoUniquePtr in_create_info_ptr, + bool in_mt_safe) :CallbacksSupportProvider (DESCRIPTOR_POOL_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT), - m_baked (false), - m_device_ptr (in_device_ptr), - m_n_max_sets (in_n_max_sets), - m_pool (VK_NULL_HANDLE), - m_releaseable_sets (in_releaseable_sets) + DebugMarkerSupportProvider(in_create_info_ptr->get_device_ptr(), + Anvil::ObjectType::DESCRIPTOR_POOL), + MTSafetySupportProvider (in_mt_safe), + m_pool (VK_NULL_HANDLE) { - memset(m_descriptor_count, - 0, - sizeof(m_descriptor_count) ); + m_create_info_ptr = std::move(in_create_info_ptr); /* Register the object in the Object Tracker */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_DESCRIPTOR_POOL, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::DESCRIPTOR_POOL, this); } @@ -54,33 +51,35 @@ Anvil::DescriptorPool::DescriptorPool(std::weak_ptr in_device Anvil::DescriptorPool::~DescriptorPool() { /* Unregister the instance */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_DESCRIPTOR_POOL, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::DESCRIPTOR_POOL, this); if (m_pool != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyDescriptorPool(device_locked_ptr->get_device_vk(), - m_pool, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyDescriptorPool(m_device_ptr->get_device_vk(), + m_pool, + nullptr /* pAllocator */); + } + unlock(); m_pool = VK_NULL_HANDLE; } } /* Please see header for specification */ -bool Anvil::DescriptorPool::alloc_descriptor_sets(uint32_t in_n_sets, - std::shared_ptr* in_descriptor_set_layouts_ptr, - std::shared_ptr* out_descriptor_sets_ptr, - VkResult* out_opt_result_ptr) +bool Anvil::DescriptorPool::alloc_descriptor_sets(uint32_t in_n_sets, + const DescriptorSetAllocation* in_ds_allocations_ptr, + DescriptorSetUniquePtr* out_descriptor_sets_ptr, + VkResult* out_opt_result_ptr) { bool result = false; m_ds_cache.resize(in_n_sets); result = alloc_descriptor_sets(in_n_sets, - in_descriptor_set_layouts_ptr, + in_ds_allocations_ptr, &m_ds_cache.at(0), out_opt_result_ptr); @@ -91,9 +90,29 @@ bool Anvil::DescriptorPool::alloc_descriptor_sets(uint32_t ++n_set) { out_descriptor_sets_ptr[n_set] = Anvil::DescriptorSet::create(m_device_ptr, - shared_from_this(), - in_descriptor_set_layouts_ptr[n_set], - m_ds_cache[n_set]); + this, + in_ds_allocations_ptr[n_set].ds_layout_ptr, + m_ds_cache[n_set], + Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe()) ); + + if (out_descriptor_sets_ptr[n_set] == nullptr) + { + anvil_assert_fail(); + + result = false; + goto end; + } + } + } + +end: + if (!result) + { + for (uint32_t n_set = 0; + n_set < in_n_sets; + ++n_set) + { + out_descriptor_sets_ptr[n_set].reset(); } } @@ -101,132 +120,236 @@ bool Anvil::DescriptorPool::alloc_descriptor_sets(uint32_t } /* Please see header for specification */ -bool Anvil::DescriptorPool::alloc_descriptor_sets(uint32_t in_n_sets, - std::shared_ptr* in_descriptor_set_layouts_ptr, - VkDescriptorSet* out_descriptor_sets_vk_ptr, - VkResult* out_opt_result_ptr) +bool Anvil::DescriptorPool::alloc_descriptor_sets(uint32_t in_n_sets, + const DescriptorSetAllocation* in_ds_allocations_ptr, + VkDescriptorSet* out_descriptor_sets_vk_ptr, + VkResult* out_opt_result_ptr) { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkDescriptorSetAllocateInfo ds_alloc_info; - VkResult result_vk; - - if (!m_baked) + const auto& dp_create_flags (m_create_info_ptr->get_create_flags() ); + bool result (false); + VkResult result_vk; + bool should_chain_variable_descriptor_count_struct(false); + Anvil::StructChainer struct_chainer; + std::vector variable_descriptor_counts; + + lock(); { - bake(); + m_ds_layout_cache.resize(in_n_sets); - anvil_assert(m_baked); - } + for (uint32_t n_set = 0; + n_set < in_n_sets; + ++n_set) + { + if (in_ds_allocations_ptr[n_set].ds_layout_ptr != nullptr) + { + auto ds_create_info_ptr = in_ds_allocations_ptr[n_set].ds_layout_ptr->get_create_info(); + + if (ds_create_info_ptr->contains_variable_descriptor_count_binding() ) + { + if ((dp_create_flags & Anvil::DescriptorPoolCreateFlagBits::UPDATE_AFTER_BIND_BIT) != 0) + { + anvil_assert_fail(); + + result = false; + goto end; + } + + should_chain_variable_descriptor_count_struct = true; + + variable_descriptor_counts.push_back(in_ds_allocations_ptr[n_set].n_variable_descriptor_bindings); + } + else + { + variable_descriptor_counts.push_back(0); + } + + m_ds_layout_cache[n_set] = in_ds_allocations_ptr[n_set].ds_layout_ptr->get_layout(); + } + else + { + /* This is a "gap" set. */ + m_ds_layout_cache[n_set] = m_device_ptr->get_dummy_descriptor_set_layout()->get_layout(); + } + } - m_ds_layout_cache.resize(in_n_sets); + { + VkDescriptorSetAllocateInfo ds_alloc_info; - for (uint32_t n_set = 0; - n_set < in_n_sets; - ++n_set) - { - m_ds_layout_cache[n_set] = in_descriptor_set_layouts_ptr[n_set]->get_layout(); - } + ds_alloc_info.descriptorPool = m_pool; + ds_alloc_info.descriptorSetCount = in_n_sets; + ds_alloc_info.pNext = nullptr; + ds_alloc_info.pSetLayouts = &m_ds_layout_cache.at(0); + ds_alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; + + struct_chainer.append_struct(ds_alloc_info); + } + + if (should_chain_variable_descriptor_count_struct) + { + VkDescriptorSetVariableDescriptorCountAllocateInfoEXT variable_descriptor_count_struct; - ds_alloc_info.descriptorPool = m_pool; - ds_alloc_info.descriptorSetCount = in_n_sets; - ds_alloc_info.pNext = nullptr; - ds_alloc_info.pSetLayouts = &m_ds_layout_cache[0]; - ds_alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; + anvil_assert(variable_descriptor_counts.size() == in_n_sets); + + variable_descriptor_count_struct.descriptorSetCount = in_n_sets; + variable_descriptor_count_struct.pDescriptorCounts = &variable_descriptor_counts.at(0); + variable_descriptor_count_struct.pNext = nullptr; + variable_descriptor_count_struct.sType = static_cast(VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT); + + struct_chainer.append_struct(variable_descriptor_count_struct); + } + + { + auto chain_ptr = struct_chainer.create_chain(); - result_vk = vkAllocateDescriptorSets(device_locked_ptr->get_device_vk(), - &ds_alloc_info, - out_descriptor_sets_vk_ptr); + result_vk = Anvil::Vulkan::vkAllocateDescriptorSets(m_device_ptr->get_device_vk(), + chain_ptr->get_root_struct(), + out_descriptor_sets_vk_ptr); + } + } + unlock(); if (out_opt_result_ptr != nullptr) { *out_opt_result_ptr = result_vk; } - return is_vk_call_successful(result_vk); + result = is_vk_call_successful(result_vk); +end: + return result; } /* Please see header for specification */ -void Anvil::DescriptorPool::bake() +Anvil::DescriptorPoolUniquePtr Anvil::DescriptorPool::create(Anvil::DescriptorPoolCreateInfoUniquePtr in_create_info_ptr) { - VkDescriptorPoolCreateInfo descriptor_pool_create_info; - VkDescriptorPoolSize descriptor_pool_sizes[VK_DESCRIPTOR_TYPE_RANGE_SIZE]; - std::shared_ptr device_locked_ptr (m_device_ptr); - uint32_t n_descriptor_types_used(0); - VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); + const bool is_mt_safe = Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety (), + in_create_info_ptr->get_device_ptr() ); + DescriptorPoolUniquePtr result_ptr(nullptr, + std::default_delete() ); - ANVIL_REDUNDANT_VARIABLE(result_vk); + result_ptr.reset( + new Anvil::DescriptorPool(std::move(in_create_info_ptr), + is_mt_safe) + ); - if (m_pool != VK_NULL_HANDLE) + if (result_ptr != nullptr) { - vkDestroyDescriptorPool(device_locked_ptr->get_device_vk(), - m_pool, - nullptr /* pAllocator */); + if (!result_ptr->init() ) + { + result_ptr.reset(); + } + } - set_vk_handle(VK_NULL_HANDLE); - m_pool = VK_NULL_HANDLE; + return result_ptr; +} + +/* Please see header for specification */ +bool Anvil::DescriptorPool::init() +{ + static const Anvil::DescriptorType supported_descriptor_types[] = + { + Anvil::DescriptorType::COMBINED_IMAGE_SAMPLER, + Anvil::DescriptorType::INLINE_UNIFORM_BLOCK, + Anvil::DescriptorType::INPUT_ATTACHMENT, + Anvil::DescriptorType::SAMPLED_IMAGE, + Anvil::DescriptorType::SAMPLER, + Anvil::DescriptorType::STORAGE_BUFFER, + Anvil::DescriptorType::STORAGE_BUFFER_DYNAMIC, + Anvil::DescriptorType::STORAGE_IMAGE, + Anvil::DescriptorType::STORAGE_TEXEL_BUFFER, + Anvil::DescriptorType::UNIFORM_BUFFER, + Anvil::DescriptorType::UNIFORM_BUFFER_DYNAMIC, + Anvil::DescriptorType::UNIFORM_TEXEL_BUFFER, + }; + const uint32_t n_supported_descriptor_types = sizeof(supported_descriptor_types) / sizeof(supported_descriptor_types[0]); + + VkDescriptorPoolSize descriptor_pool_sizes[n_supported_descriptor_types]; + const auto& dp_create_flags (m_create_info_ptr->get_create_flags() ); + uint32_t n_descriptor_types_used (0); + bool result (false); + VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); + Anvil::StructChainer struct_chainer; + + if ((dp_create_flags & Anvil::DescriptorPoolCreateFlagBits::UPDATE_AFTER_BIND_BIT) != 0) + { + if (!m_device_ptr->get_extension_info()->ext_descriptor_indexing() ) + { + anvil_assert_fail(); + + goto end; + } } /* Convert the counters to an arrayed, linear representation */ - for (uint32_t n_descriptor_type = 0; - n_descriptor_type < VK_DESCRIPTOR_TYPE_RANGE_SIZE; - ++n_descriptor_type) + for (const auto& current_descriptor_type : supported_descriptor_types) { - if (m_descriptor_count[n_descriptor_type] > 0) + const auto n_descriptors_needed = m_create_info_ptr->get_n_descriptors_for_descriptor_type(current_descriptor_type); + + if (n_descriptors_needed > 0) { uint32_t current_index = n_descriptor_types_used; - descriptor_pool_sizes[current_index].descriptorCount = m_descriptor_count[n_descriptor_type]; - descriptor_pool_sizes[current_index].type = (VkDescriptorType) n_descriptor_type; + descriptor_pool_sizes[current_index].descriptorCount = n_descriptors_needed; + descriptor_pool_sizes[current_index].type = static_cast(current_descriptor_type); n_descriptor_types_used++; } } - if (n_descriptor_types_used == 0) + /* Set up the descriptor pool instance */ { - /* If an empty pool is needed, request space for a single dummy descriptor. */ - descriptor_pool_sizes[0].descriptorCount = 1; - descriptor_pool_sizes[0].type = VK_DESCRIPTOR_TYPE_SAMPLER; + VkDescriptorPoolCreateInfo create_info; + + create_info.flags = m_create_info_ptr->get_create_flags().get_vk(); + create_info.maxSets = m_create_info_ptr->get_n_maximum_sets(); + create_info.pNext = nullptr; + create_info.poolSizeCount = n_descriptor_types_used; + create_info.pPoolSizes = descriptor_pool_sizes; + create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; - n_descriptor_types_used = 1; + struct_chainer.append_struct(create_info); } - /* Set up the descriptor pool instance */ - descriptor_pool_create_info.flags = (m_releaseable_sets) ? VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT - : 0u; - descriptor_pool_create_info.maxSets = m_n_max_sets; - descriptor_pool_create_info.pNext = nullptr; - descriptor_pool_create_info.poolSizeCount = n_descriptor_types_used; - descriptor_pool_create_info.pPoolSizes = descriptor_pool_sizes; - descriptor_pool_create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; - - result_vk = vkCreateDescriptorPool(device_locked_ptr->get_device_vk(), - &descriptor_pool_create_info, - nullptr, /* pAllocator */ - &m_pool); + { + const auto& n_max_inline_uniform_block_bindings = m_create_info_ptr->get_n_maximum_inline_uniform_block_bindings(); + + if (n_max_inline_uniform_block_bindings > 0) + { + VkDescriptorPoolInlineUniformBlockCreateInfoEXT create_info; + + anvil_assert(m_device_ptr->get_extension_info()->ext_inline_uniform_block() ); + + create_info.maxInlineUniformBlockBindings = n_max_inline_uniform_block_bindings; + create_info.pNext = nullptr; + create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT; + + struct_chainer.append_struct(create_info); + } + } + + { + auto chain_ptr = struct_chainer.create_chain(); + + result_vk = Anvil::Vulkan::vkCreateDescriptorPool(m_device_ptr->get_device_vk(), + chain_ptr->get_root_struct (), + nullptr, /* pAllocator */ + &m_pool); + } anvil_assert_vk_call_succeeded(result_vk); if (is_vk_call_successful(result_vk) ) { set_vk_handle(m_pool); } + else + { + anvil_assert(result); - m_baked = true; -} - -/* Please see header for specification */ -std::shared_ptr Anvil::DescriptorPool::create(std::weak_ptr in_device_ptr, - uint32_t in_n_max_sets, - bool in_releaseable_sets) -{ - std::shared_ptr result_ptr; - - result_ptr.reset( - new Anvil::DescriptorPool(in_device_ptr, - in_n_max_sets, - in_releaseable_sets) - ); + goto end; + } - return result_ptr; + result = true; +end: + return result; } /* Please see header for specification */ @@ -236,11 +359,15 @@ bool Anvil::DescriptorPool::reset() if (m_pool != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); + /* TODO: Host synchronization to VkDescriptorSetObjects alloc'ed from the pool. */ + lock(); + { + result_vk = Anvil::Vulkan::vkResetDescriptorPool(m_device_ptr->get_device_vk(), + m_pool, + 0 /* flags */); + } + unlock(); - result_vk = vkResetDescriptorPool(device_locked_ptr->get_device_vk(), - m_pool, - 0 /* flags */); anvil_assert_vk_call_succeeded(result_vk); if (is_vk_call_successful(result_vk) ) diff --git a/src/wrappers/descriptor_set.cpp b/src/wrappers/descriptor_set.cpp index ec2caa94..6326bc3b 100644 --- a/src/wrappers/descriptor_set.cpp +++ b/src/wrappers/descriptor_set.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,24 +20,31 @@ // THE SOFTWARE. // +#include "misc/buffer_create_info.h" #include "misc/debug.h" +#include "misc/descriptor_set_create_info.h" #include "misc/object_tracker.h" #include "wrappers/buffer.h" #include "wrappers/buffer_view.h" #include "wrappers/descriptor_pool.h" #include "wrappers/descriptor_set.h" #include "wrappers/descriptor_set_layout.h" +#include "wrappers/descriptor_update_template.h" #include "wrappers/device.h" #include "wrappers/image_view.h" #include "wrappers/sampler.h" +#ifdef max + #undef max +#endif + /** Please see header for specification */ Anvil::DescriptorSet::BindingItem& Anvil::DescriptorSet::BindingItem::operator=(const BufferBindingElement& in_element) { buffer_ptr = in_element.buffer_ptr; buffer_view_ptr = nullptr; dirty = true; - image_layout = VK_IMAGE_LAYOUT_UNDEFINED; + image_layout = Anvil::ImageLayout::UNDEFINED; image_view_ptr = nullptr; sampler_ptr = nullptr; size = in_element.size; @@ -85,7 +92,7 @@ Anvil::DescriptorSet::BindingItem& Anvil::DescriptorSet::BindingItem::operator=( buffer_ptr = nullptr; buffer_view_ptr = nullptr; dirty = true; - image_layout = VK_IMAGE_LAYOUT_UNDEFINED; + image_layout = Anvil::ImageLayout::UNDEFINED; image_view_ptr = nullptr; sampler_ptr = in_element.sampler_ptr; size = UINT64_MAX; @@ -101,7 +108,7 @@ Anvil::DescriptorSet::BindingItem& Anvil::DescriptorSet::BindingItem::operator=( buffer_ptr = nullptr; buffer_view_ptr = in_element.buffer_view_ptr; dirty = true; - image_layout = VK_IMAGE_LAYOUT_UNDEFINED; + image_layout = Anvil::ImageLayout::UNDEFINED; image_view_ptr = nullptr; sampler_ptr = nullptr; size = UINT64_MAX; @@ -114,11 +121,11 @@ Anvil::DescriptorSet::BindingItem& Anvil::DescriptorSet::BindingItem::operator=( /** Please see header for specification */ Anvil::DescriptorSet::BindingItem::~BindingItem() { - /* Stub */ + buffer_ptr = nullptr; } /** Please see header for specification */ -Anvil::DescriptorSet::BufferBindingElement::BufferBindingElement(std::shared_ptr in_buffer_ptr) +Anvil::DescriptorSet::BufferBindingElement::BufferBindingElement(Anvil::Buffer* in_buffer_ptr) { anvil_assert(in_buffer_ptr != nullptr); @@ -128,15 +135,15 @@ Anvil::DescriptorSet::BufferBindingElement::BufferBindingElement(std::shared_ptr } /** Please see header for specification */ -Anvil::DescriptorSet::BufferBindingElement::BufferBindingElement(std::shared_ptr in_buffer_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) +Anvil::DescriptorSet::BufferBindingElement::BufferBindingElement(Anvil::Buffer* in_buffer_ptr, + VkDeviceSize in_start_offset, + VkDeviceSize in_size) { anvil_assert(in_buffer_ptr != nullptr); if (in_size != VK_WHOLE_SIZE) { - anvil_assert(in_start_offset + in_size <= in_buffer_ptr->get_size() ); + anvil_assert(in_start_offset + in_size <= in_buffer_ptr->get_create_info_ptr()->get_size() ); } buffer_ptr = in_buffer_ptr; @@ -153,13 +160,15 @@ Anvil::DescriptorSet::BufferBindingElement::~BufferBindingElement() /** Please see header for specification */ Anvil::DescriptorSet::BufferBindingElement::BufferBindingElement(const BufferBindingElement& in) { - buffer_ptr = in.buffer_ptr; + buffer_ptr = in.buffer_ptr; + size = in.size; + start_offset = in.start_offset; } /** Please see header for specification */ -Anvil::DescriptorSet::CombinedImageSamplerBindingElement::CombinedImageSamplerBindingElement(VkImageLayout in_image_layout, - std::shared_ptr in_image_view_ptr, - std::shared_ptr in_sampler_ptr) +Anvil::DescriptorSet::CombinedImageSamplerBindingElement::CombinedImageSamplerBindingElement(Anvil::ImageLayout in_image_layout, + Anvil::ImageView* in_image_view_ptr, + Anvil::Sampler* in_sampler_ptr) { anvil_assert(in_image_view_ptr != nullptr); @@ -171,7 +180,8 @@ Anvil::DescriptorSet::CombinedImageSamplerBindingElement::CombinedImageSamplerBi /** Please see header for specification */ Anvil::DescriptorSet::CombinedImageSamplerBindingElement::~CombinedImageSamplerBindingElement() { - /* Stub */ + image_view_ptr = nullptr; + sampler_ptr = nullptr; } /** Please see header for specification */ @@ -183,8 +193,8 @@ Anvil::DescriptorSet::CombinedImageSamplerBindingElement::CombinedImageSamplerBi } /** Please see header for specification */ -Anvil::DescriptorSet::ImageBindingElement::ImageBindingElement(VkImageLayout in_image_layout, - std::shared_ptr in_image_view_ptr) +Anvil::DescriptorSet::ImageBindingElement::ImageBindingElement(Anvil::ImageLayout in_image_layout, + Anvil::ImageView* in_image_view_ptr) { anvil_assert(in_image_view_ptr != nullptr); @@ -206,7 +216,7 @@ Anvil::DescriptorSet::ImageBindingElement::~ImageBindingElement() } /** Please see header for specification */ -Anvil::DescriptorSet::SamplerBindingElement::SamplerBindingElement(std::shared_ptr in_sampler_ptr) +Anvil::DescriptorSet::SamplerBindingElement::SamplerBindingElement(Anvil::Sampler* in_sampler_ptr) { anvil_assert(in_sampler_ptr != nullptr); @@ -222,11 +232,11 @@ Anvil::DescriptorSet::SamplerBindingElement::SamplerBindingElement(const Sampler /** Please see header for specification */ Anvil::DescriptorSet::SamplerBindingElement::~SamplerBindingElement() { - /* Stub */ + sampler_ptr = nullptr; } /** Please see header for specification */ -Anvil::DescriptorSet::TexelBufferBindingElement::TexelBufferBindingElement(std::shared_ptr in_buffer_view_ptr) +Anvil::DescriptorSet::TexelBufferBindingElement::TexelBufferBindingElement(Anvil::BufferView* in_buffer_view_ptr) { anvil_assert(in_buffer_view_ptr != nullptr); @@ -246,12 +256,14 @@ Anvil::DescriptorSet::TexelBufferBindingElement::TexelBufferBindingElement(const } /** Please see header for specification */ -Anvil::DescriptorSet::DescriptorSet(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_pool_ptr, - std::shared_ptr in_layout_ptr, - VkDescriptorSet in_descriptor_set) +Anvil::DescriptorSet::DescriptorSet(const Anvil::BaseDevice* in_device_ptr, + Anvil::DescriptorPool* in_parent_pool_ptr, + const Anvil::DescriptorSetLayout* in_layout_ptr, + VkDescriptorSet in_descriptor_set, + bool in_mt_safe) :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT), + Anvil::ObjectType::DESCRIPTOR_SET), + MTSafetySupportProvider (in_mt_safe), m_descriptor_set (in_descriptor_set), m_device_ptr (in_device_ptr), m_dirty (true), @@ -261,12 +273,10 @@ Anvil::DescriptorSet::DescriptorSet(std::weak_ptr { alloc_bindings(); - in_layout_ptr->register_for_callbacks( - Anvil::DESCRIPTOR_SET_LAYOUT_CALLBACK_ID_BINDING_ADDED, - std::bind(&DescriptorSet::on_binding_added_to_layout, - this), - this - ); + if (in_device_ptr->get_extension_info()->ext_inline_uniform_block() ) + { + m_cached_ds_write_iub_items_vk.resize(m_device_ptr->get_physical_device_properties().ext_inline_uniform_block_properties_ptr->max_descriptor_set_inline_uniform_blocks); + } m_parent_pool_ptr->register_for_callbacks( Anvil::DESCRIPTOR_POOL_CALLBACK_ID_POOL_RESET, @@ -275,318 +285,178 @@ Anvil::DescriptorSet::DescriptorSet(std::weak_ptr this ); - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_DESCRIPTOR_SET, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::DESCRIPTOR_SET, this); } /** Please see header for specification */ Anvil::DescriptorSet::~DescriptorSet() { - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_DESCRIPTOR_SET, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::DESCRIPTOR_SET, this); } -/** Takes the descriptor set layout and: - * - * 1) resizes m_caches_ds_write_items_vk to hold as many bindings as the layout describes - * 2) resizes each m_bindings item, described by the layout, to hold as many descriptor - * items, as described by the layout object. - **/ +/** TODO */ void Anvil::DescriptorSet::alloc_bindings() { - const uint32_t n_bindings = m_layout_ptr->get_n_bindings(); + const auto layout_info_ptr = m_layout_ptr->get_create_info(); + bool has_variable_descriptor_count_binding = false; + const uint32_t n_bindings = layout_info_ptr->get_n_bindings(); + uint32_t variable_descriptor_count_binding_index = UINT32_MAX; + uint32_t variable_descriptor_count_binding_size = 0; m_cached_ds_write_items_vk.resize(n_bindings); + has_variable_descriptor_count_binding = layout_info_ptr->contains_variable_descriptor_count_binding(&variable_descriptor_count_binding_index, + &variable_descriptor_count_binding_size); + for (uint32_t n_binding = 0; n_binding < n_bindings; ++n_binding) { - uint32_t array_size = 0; - uint32_t binding_index; - - m_layout_ptr->get_binding_properties(n_binding, - &binding_index, - nullptr, /* out_opt_descriptor_type_ptr */ - &array_size, - nullptr, /* out_opt_stage_flags_ptr */ - nullptr); /* out_opt_immutable_samplers_enabled_ptr */ - - if (m_bindings[binding_index].size() != array_size) + uint32_t array_size = 0; + uint32_t binding_index = UINT32_MAX; + Anvil::DescriptorType descriptor_type = Anvil::DescriptorType::UNKNOWN; + + layout_info_ptr->get_binding_properties_by_index_number(n_binding, + &binding_index, + &descriptor_type, + &array_size, + nullptr, /* out_opt_stage_flags_ptr */ + nullptr, /* out_opt_immutable_samplers_enabled_ptr */ + nullptr); /* out_opt_flags_ptr */ + + if (has_variable_descriptor_count_binding && + binding_index == variable_descriptor_count_binding_index) { - m_bindings[binding_index] = BindingItems(); - - m_bindings[binding_index].resize(array_size); + array_size = variable_descriptor_count_binding_size; } - } -} - -/* Please see header for specification */ -bool Anvil::DescriptorSet::bake() -{ - bool result = false; - - anvil_assert(!m_unusable); - - if (m_dirty) - { - uint32_t cached_ds_buffer_info_items_array_offset = 0; - uint32_t cached_ds_image_info_items_array_offset = 0; - uint32_t cached_ds_texel_buffer_info_items_array_offset = 0; - const uint32_t n_bindings = static_cast(m_bindings.size() ); - uint32_t n_max_ds_info_items_to_cache = 0; - m_cached_ds_info_buffer_info_items_vk.clear(); - m_cached_ds_info_image_info_items_vk.clear(); - m_cached_ds_info_texel_buffer_info_items_vk.clear(); - m_cached_ds_write_items_vk.clear(); + auto binding_iterator = m_binding_ptrs.find(binding_index); - for (auto& binding : m_bindings) + if (binding_iterator == m_binding_ptrs.end() || + binding_iterator->second.size() != array_size) { - const uint32_t n_current_binding_items = static_cast(binding.second.size() ); - - n_max_ds_info_items_to_cache += n_current_binding_items; - } - - m_cached_ds_info_buffer_info_items_vk.reserve (n_max_ds_info_items_to_cache); - m_cached_ds_info_image_info_items_vk.reserve (n_max_ds_info_items_to_cache); - m_cached_ds_info_texel_buffer_info_items_vk.reserve(n_max_ds_info_items_to_cache); + m_binding_ptrs[binding_index] = BindingItemUniquePtrs(); - for (uint32_t n_binding = 0; - n_binding < n_bindings; - ++n_binding) - { - uint32_t current_binding_index; - VkDescriptorType descriptor_type; - bool immutable_samplers_enabled = false; - const uint32_t start_ds_buffer_info_items_array_offset = cached_ds_buffer_info_items_array_offset; - const uint32_t start_ds_image_info_items_array_offset = cached_ds_image_info_items_array_offset; - const uint32_t start_ds_texel_buffer_info_items_array_offset = cached_ds_texel_buffer_info_items_array_offset; - VkWriteDescriptorSet write_ds_vk; - - if (!m_layout_ptr->get_binding_properties(n_binding, - ¤t_binding_index, - &descriptor_type, - nullptr, /* out_opt_descriptor_array_size_ptr */ - nullptr, /* out_opt_stage_flags_ptr */ - &immutable_samplers_enabled) ) + if (descriptor_type != Anvil::DescriptorType::INLINE_UNIFORM_BLOCK) { - anvil_assert_fail(); + m_binding_ptrs[binding_index].resize(array_size); } - - /* For each array item, initialize a descriptor info item.. */ - BindingItems& current_binding_items = m_bindings[current_binding_index]; - const uint32_t n_current_binding_items = static_cast(current_binding_items.size() ); - - for (uint32_t n_current_binding_item = 0; - n_current_binding_item < n_current_binding_items; - ++n_current_binding_item) - { - BindingItem& current_binding_item = current_binding_items[n_current_binding_item]; - - if (!current_binding_item.dirty) - { - continue; - } - - if (current_binding_item.buffer_ptr != nullptr) - { - VkDescriptorBufferInfo buffer_info; - - buffer_info.buffer = current_binding_item.buffer_ptr->get_buffer(); - - if (current_binding_item.start_offset != UINT64_MAX) - { - buffer_info.offset = current_binding_item.start_offset; - buffer_info.range = current_binding_item.size; - } - else - { - buffer_info.offset = current_binding_item.buffer_ptr->get_start_offset(); - buffer_info.range = current_binding_item.buffer_ptr->get_size (); - } - - m_cached_ds_info_buffer_info_items_vk.push_back(buffer_info); - - ++cached_ds_buffer_info_items_array_offset; - } - else - if (current_binding_item.buffer_view_ptr != nullptr) - { - m_cached_ds_info_texel_buffer_info_items_vk.push_back( current_binding_item.buffer_view_ptr->get_buffer_view() ); - - ++cached_ds_texel_buffer_info_items_array_offset; - } - else - if (current_binding_item.image_view_ptr != nullptr) - { - VkDescriptorImageInfo image_info; - - image_info.imageLayout = current_binding_item.image_layout; - image_info.imageView = current_binding_item.image_view_ptr->get_image_view(); - - if (false == immutable_samplers_enabled && - nullptr != current_binding_item.sampler_ptr) - { - image_info.sampler = current_binding_item.sampler_ptr->get_sampler(); - } - else - { - image_info.sampler = VK_NULL_HANDLE; - } - - m_cached_ds_info_image_info_items_vk.push_back(image_info); - - ++cached_ds_image_info_items_array_offset; - } - else - { - anvil_assert_fail(); - - goto end; - } - } - - /* We can finally fill the write descriptor */ - if (current_binding_items.size() > 0) - { - write_ds_vk.descriptorCount = (uint32_t) current_binding_items.size(); - write_ds_vk.descriptorType = descriptor_type; - write_ds_vk.dstArrayElement = 0; - write_ds_vk.dstBinding = current_binding_index; - write_ds_vk.dstSet = m_descriptor_set; - write_ds_vk.pBufferInfo = (start_ds_buffer_info_items_array_offset != cached_ds_buffer_info_items_array_offset) ? &m_cached_ds_info_buffer_info_items_vk[start_ds_buffer_info_items_array_offset] - : nullptr; - write_ds_vk.pImageInfo = (start_ds_image_info_items_array_offset != cached_ds_image_info_items_array_offset) ? &m_cached_ds_info_image_info_items_vk[start_ds_image_info_items_array_offset] - : nullptr; - write_ds_vk.pNext = nullptr; - write_ds_vk.pTexelBufferView = (start_ds_texel_buffer_info_items_array_offset != cached_ds_texel_buffer_info_items_array_offset) ? &m_cached_ds_info_texel_buffer_info_items_vk[start_ds_texel_buffer_info_items_array_offset] - : nullptr; - write_ds_vk.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; - - m_cached_ds_write_items_vk.push_back(write_ds_vk); - } - } - - /* Issue the Vulkan call */ - if (m_cached_ds_write_items_vk.size() > 0) - { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkUpdateDescriptorSets(device_locked_ptr->get_device_vk(), - (uint32_t) m_cached_ds_write_items_vk.size(), - &m_cached_ds_write_items_vk[0], - 0, /* copyCount */ - nullptr); /* pDescriptorCopies */ } - - m_dirty = false; } - - result = true; - -end: - return result; } /* Please see header for specification */ -std::shared_ptr Anvil::DescriptorSet::create(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_pool_ptr, - std::shared_ptr in_layout_ptr, - VkDescriptorSet in_descriptor_set) +Anvil::DescriptorSetUniquePtr Anvil::DescriptorSet::create(const Anvil::BaseDevice* in_device_ptr, + Anvil::DescriptorPool* in_parent_pool_ptr, + const Anvil::DescriptorSetLayout* in_layout_ptr, + VkDescriptorSet in_descriptor_set, + MTSafety in_mt_safety) { - std::shared_ptr result_ptr; + const bool is_mt_safe = Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr); + Anvil::DescriptorSetUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::DescriptorSet(in_device_ptr, in_parent_pool_ptr, in_layout_ptr, - in_descriptor_set) + in_descriptor_set, + is_mt_safe) ); return result_ptr; } -/* Please see header for specification */ -bool Anvil::DescriptorSet::get_binding_array_size(uint32_t in_n_binding, - uint32_t* out_result_ptr) const +void Anvil::DescriptorSet::fill_buffer_info_vk_descriptor(const Anvil::DescriptorSet::BindingItem& in_binding_item, + VkDescriptorBufferInfo* out_descriptor_ptr) const { - bool result = true; + out_descriptor_ptr->buffer = in_binding_item.buffer_ptr->get_buffer(); - if (m_bindings.find(in_n_binding) == m_bindings.end() ) + if (in_binding_item.start_offset != UINT64_MAX) { - result = false; - goto end; + out_descriptor_ptr->offset = in_binding_item.start_offset; + out_descriptor_ptr->range = in_binding_item.size; + } + else + { + out_descriptor_ptr->offset = in_binding_item.buffer_ptr->get_create_info_ptr()->get_start_offset(); + out_descriptor_ptr->range = in_binding_item.buffer_ptr->get_create_info_ptr()->get_size (); } - - *out_result_ptr = static_cast(m_bindings.at(in_n_binding).size() ); - -end: - return result; } -/* Please see header for specification */ -bool Anvil::DescriptorSet::get_binding_descriptor_type(uint32_t in_n_binding, - VkDescriptorType* out_descriptor_type_ptr) const +void Anvil::DescriptorSet::fill_image_info_vk_descriptor(const Anvil::DescriptorSet::BindingItem& in_binding_item, + const bool& in_immutable_samplers_enabled, + VkDescriptorImageInfo* out_descriptor_ptr) const { - bool result = true; + out_descriptor_ptr->imageLayout = static_cast(in_binding_item.image_layout); + out_descriptor_ptr->imageView = (in_binding_item.image_view_ptr != nullptr) ? in_binding_item.image_view_ptr->get_image_view() : VK_NULL_HANDLE; - if (m_bindings.find(in_n_binding) == m_bindings.end() ) + if (false == in_immutable_samplers_enabled && + nullptr != in_binding_item.sampler_ptr) { - anvil_assert(m_bindings.find(in_n_binding) != m_bindings.end() ); - - result = false; - goto end; + out_descriptor_ptr->sampler = in_binding_item.sampler_ptr->get_sampler(); } + else + { + out_descriptor_ptr->sampler = VK_NULL_HANDLE; + } +} - *out_descriptor_type_ptr = m_bindings.at(in_n_binding).at(0).type_vk; - -end: - return result; +/* Please see header for specification */ +void Anvil::DescriptorSet::fill_iub_vk_descriptor(const Anvil::DescriptorSet::BindingItem& in_binding_item, + VkWriteDescriptorSetInlineUniformBlockEXT* out_descriptor_ptr) const +{ + out_descriptor_ptr->dataSize = static_cast(in_binding_item.size); + out_descriptor_ptr->pData = in_binding_item.iub_update_data_ptr.get(); + out_descriptor_ptr->pNext = nullptr; + out_descriptor_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT; } /* Please see header for specification */ -bool Anvil::DescriptorSet::get_combined_image_sampler_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - VkImageLayout* out_opt_image_layout_ptr, - std::shared_ptr* out_opt_image_view_ptr, - std::shared_ptr* out_opt_sampler_ptr) +bool Anvil::DescriptorSet::get_combined_image_sampler_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::ImageLayout* out_opt_image_layout_ptr, + Anvil::ImageView** out_opt_image_view_ptr_ptr, + Anvil::Sampler** out_opt_sampler_ptr_ptr) { - decltype(m_bindings)::const_iterator binding_iterator = m_bindings.find(in_n_binding); - bool result = true; + decltype(m_binding_ptrs)::const_iterator binding_ptr_iterator = m_binding_ptrs.find(in_n_binding); + bool result = true; - if (binding_iterator == m_bindings.end() ) + if (binding_ptr_iterator == m_binding_ptrs.end() ) { - anvil_assert(binding_iterator != m_bindings.end()); + anvil_assert(binding_ptr_iterator != m_binding_ptrs.end()); result = false; goto end; } - if (binding_iterator->second.size() <= in_n_binding_array_item) + if (binding_ptr_iterator->second.size() <= in_n_binding_array_item) { - anvil_assert(binding_iterator->second.size() > in_n_binding_array_item); + anvil_assert(binding_ptr_iterator->second.size() > in_n_binding_array_item); result = false; goto end; } else { - auto binding_item_iterator = binding_iterator->second.begin() + in_n_binding_array_item; + auto binding_item_iterator = binding_ptr_iterator->second.begin() + in_n_binding_array_item; if (out_opt_image_layout_ptr != nullptr) { - *out_opt_image_layout_ptr = binding_item_iterator->image_layout; + *out_opt_image_layout_ptr = (*binding_item_iterator)->image_layout; } - if (out_opt_image_view_ptr != nullptr) + if (out_opt_image_view_ptr_ptr != nullptr) { - *out_opt_image_view_ptr = binding_item_iterator->image_view_ptr; + *out_opt_image_view_ptr_ptr = (*binding_item_iterator)->image_view_ptr; } - if (out_opt_sampler_ptr != nullptr) + if (out_opt_sampler_ptr_ptr != nullptr) { - *out_opt_sampler_ptr = binding_item_iterator->sampler_ptr; + *out_opt_sampler_ptr_ptr = (*binding_item_iterator)->sampler_ptr; } } @@ -595,17 +465,17 @@ bool Anvil::DescriptorSet::get_combined_image_sampler_binding_properties(uint32_ } /* Please see header for specification */ -bool Anvil::DescriptorSet::get_input_attachment_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - VkImageLayout* out_opt_image_layout_ptr, - std::shared_ptr* out_opt_image_view_ptr) const +bool Anvil::DescriptorSet::get_input_attachment_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::ImageLayout* out_opt_image_layout_ptr, + Anvil::ImageView** out_opt_image_view_ptr_ptr) const { - decltype(m_bindings)::const_iterator binding_iterator = m_bindings.find(in_n_binding); - bool result = true; + decltype(m_binding_ptrs)::const_iterator binding_iterator = m_binding_ptrs.find(in_n_binding); + bool result = true; - if (binding_iterator == m_bindings.end() ) + if (binding_iterator == m_binding_ptrs.end() ) { - anvil_assert(binding_iterator != m_bindings.end()); + anvil_assert(binding_iterator != m_binding_ptrs.end()); result = false; goto end; @@ -624,12 +494,12 @@ bool Anvil::DescriptorSet::get_input_attachment_binding_properties(uint32_t if (out_opt_image_layout_ptr != nullptr) { - *out_opt_image_layout_ptr = binding_item_iterator->image_layout; + *out_opt_image_layout_ptr = (*binding_item_iterator)->image_layout; } - if (out_opt_image_view_ptr != nullptr) + if (out_opt_image_view_ptr_ptr != nullptr) { - *out_opt_image_view_ptr = binding_item_iterator->image_view_ptr; + *out_opt_image_view_ptr_ptr = (*binding_item_iterator)->image_view_ptr; } } @@ -638,16 +508,16 @@ bool Anvil::DescriptorSet::get_input_attachment_binding_properties(uint32_t } /* Please see header for specification */ -bool Anvil::DescriptorSet::get_sampler_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - std::shared_ptr* out_sampler_ptr) const +bool Anvil::DescriptorSet::get_sampler_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::Sampler** out_sampler_ptr_ptr) const { - decltype(m_bindings)::const_iterator binding_iterator = m_bindings.find(in_n_binding); - bool result = true; + decltype(m_binding_ptrs)::const_iterator binding_iterator = m_binding_ptrs.find(in_n_binding); + bool result = true; - if (binding_iterator == m_bindings.end() ) + if (binding_iterator == m_binding_ptrs.end() ) { - anvil_assert(binding_iterator != m_bindings.end()); + anvil_assert(binding_iterator != m_binding_ptrs.end()); result = false; goto end; @@ -664,9 +534,9 @@ bool Anvil::DescriptorSet::get_sampler_binding_properties(uint32_t { auto binding_item_iterator = binding_iterator->second.begin() + in_n_binding_array_item; - if (out_sampler_ptr != nullptr) + if (out_sampler_ptr_ptr != nullptr) { - *out_sampler_ptr = binding_item_iterator->sampler_ptr; + *out_sampler_ptr_ptr = (*binding_item_iterator)->sampler_ptr; } } @@ -675,18 +545,18 @@ bool Anvil::DescriptorSet::get_sampler_binding_properties(uint32_t } /* Please see header for specification */ -bool Anvil::DescriptorSet::get_storage_buffer_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - std::shared_ptr* out_opt_buffer_ptr, - VkDeviceSize* out_opt_size_ptr, - VkDeviceSize* out_opt_start_offset_ptr) const +bool Anvil::DescriptorSet::get_storage_buffer_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::Buffer** out_opt_buffer_ptr_ptr, + VkDeviceSize* out_opt_size_ptr, + VkDeviceSize* out_opt_start_offset_ptr) const { - decltype(m_bindings)::const_iterator binding_iterator = m_bindings.find(in_n_binding); - bool result = true; + decltype(m_binding_ptrs)::const_iterator binding_iterator = m_binding_ptrs.find(in_n_binding); + bool result = true; - if (binding_iterator == m_bindings.end() ) + if (binding_iterator == m_binding_ptrs.end() ) { - anvil_assert(binding_iterator != m_bindings.end()); + anvil_assert(binding_iterator != m_binding_ptrs.end()); result = false; goto end; @@ -703,19 +573,19 @@ bool Anvil::DescriptorSet::get_storage_buffer_binding_properties(uint32_t { auto binding_item_iterator = binding_iterator->second.begin() + in_n_binding_array_item; - if (out_opt_buffer_ptr != nullptr) + if (out_opt_buffer_ptr_ptr != nullptr) { - *out_opt_buffer_ptr = binding_item_iterator->buffer_ptr; + *out_opt_buffer_ptr_ptr = (*binding_item_iterator)->buffer_ptr; } if (out_opt_size_ptr != nullptr) { - *out_opt_size_ptr = binding_item_iterator->size; + *out_opt_size_ptr = (*binding_item_iterator)->size; } if (out_opt_start_offset_ptr != nullptr) { - *out_opt_start_offset_ptr = binding_item_iterator->start_offset; + *out_opt_start_offset_ptr = (*binding_item_iterator)->start_offset; } } @@ -724,16 +594,16 @@ bool Anvil::DescriptorSet::get_storage_buffer_binding_properties(uint32_t } /* Please see header for specification */ -bool Anvil::DescriptorSet::get_storage_texel_buffer_binding_properties(uint32_t in_n_binding, - uint32_t in_n_binding_array_item, - std::shared_ptr* out_opt_buffer_view_ptr) const +bool Anvil::DescriptorSet::get_storage_texel_buffer_binding_properties(uint32_t in_n_binding, + uint32_t in_n_binding_array_item, + Anvil::BufferView** out_opt_buffer_view_ptr_ptr) const { - decltype(m_bindings)::const_iterator binding_iterator = m_bindings.find(in_n_binding); - bool result = true; + decltype(m_binding_ptrs)::const_iterator binding_iterator = m_binding_ptrs.find(in_n_binding); + bool result = true; - if (binding_iterator == m_bindings.end() ) + if (binding_iterator == m_binding_ptrs.end() ) { - anvil_assert(binding_iterator != m_bindings.end()); + anvil_assert(binding_iterator != m_binding_ptrs.end()); result = false; goto end; @@ -750,9 +620,9 @@ bool Anvil::DescriptorSet::get_storage_texel_buffer_binding_properties(uint32_t { auto binding_item_iterator = binding_iterator->second.begin() + in_n_binding_array_item; - if (out_opt_buffer_view_ptr != nullptr) + if (out_opt_buffer_view_ptr_ptr != nullptr) { - *out_opt_buffer_view_ptr = binding_item_iterator->buffer_view_ptr; + *out_opt_buffer_view_ptr_ptr = (*binding_item_iterator)->buffer_view_ptr; } } @@ -760,18 +630,6 @@ bool Anvil::DescriptorSet::get_storage_texel_buffer_binding_properties(uint32_t return result; } -/** Entry-point called back whenever a new binding is added to the parent layout. - * - * This will resize a number of internally managed vectors. - * - * @param layout_raw_ptr Ignored. - * - **/ -void Anvil::DescriptorSet::on_binding_added_to_layout() -{ - alloc_bindings(); -} - /** Called back whenever parent descriptor pool is reset. * * Resets m_descriptor_set back to VK_NULL_HANDLE and marks the descriptor set as unusable. @@ -779,21 +637,515 @@ void Anvil::DescriptorSet::on_binding_added_to_layout() **/ void Anvil::DescriptorSet::on_parent_pool_reset() { - /* This descriptor set instance is no longer usable. - * - * To restore functionality, a new Vulkan DS handle should be assigned to this instance - * by calling set_new_vk_handle() */ + /* This descriptor set instance is no longer usable. */ m_descriptor_set = VK_NULL_HANDLE; m_unusable = true; } -/** Please see header for specification */ -void Anvil::DescriptorSet::set_new_vk_handle(VkDescriptorSet in_ds) +/* Please see header for specification */ +bool Anvil::DescriptorSet::set_inline_uniform_block_binding_data(const BindingIndex& in_binding_index, + const uint32_t& in_start_offset, + const uint32_t& in_size, + const void* in_raw_data_ptr, + const bool& in_should_cache_raw_data) +{ + BindingItemUniquePtrs& iub_binding_item_ptrs = m_binding_ptrs[in_binding_index]; + auto new_iub_binding_item_ptr = BindingItemUniquePtr(new BindingItem() ); + bool result = false; + + anvil_assert(!m_unusable); + anvil_assert((in_start_offset % 4) == 0); + anvil_assert((in_size % 4) == 0); + + if (in_should_cache_raw_data) + { + new_iub_binding_item_ptr->iub_update_data_ptr = decltype(new_iub_binding_item_ptr->iub_update_data_ptr)(new uint8_t[in_size], + [](uint8_t* in_ptr){delete [] in_ptr;}); + + memcpy(new_iub_binding_item_ptr->iub_update_data_ptr.get(), + in_raw_data_ptr, + in_size); + } + else + { + new_iub_binding_item_ptr->iub_update_data_ptr = decltype(new_iub_binding_item_ptr->iub_update_data_ptr)(const_cast(reinterpret_cast(in_raw_data_ptr) ), + [](uint8_t*){} ); + } + + new_iub_binding_item_ptr->dirty = true; + new_iub_binding_item_ptr->size = in_size; + new_iub_binding_item_ptr->start_offset = in_start_offset; + new_iub_binding_item_ptr->type_vk = Anvil::DescriptorType::INLINE_UNIFORM_BLOCK; + + iub_binding_item_ptrs.push_back( + std::move(new_iub_binding_item_ptr) + ); + + m_dirty = true; + result = true; + + return result; +} + +bool Anvil::DescriptorSet::update(const DescriptorSetUpdateMethod& in_update_method) const +{ + bool result; + + lock(); + { + switch (in_update_method) + { + case Anvil::DescriptorSetUpdateMethod::CORE: + { + result = update_using_core_method(); + + break; + } + + case Anvil::DescriptorSetUpdateMethod::TEMPLATE: + { + result = update_using_template_method(); + + break; + } + + default: + { + anvil_assert_fail(); + + result = false; + } + } + } + unlock(); + + return result; +} + +/* Please see header for specification */ +bool Anvil::DescriptorSet::update_using_core_method() const +{ + std::vector iub_binding_indices; + const auto layout_info_ptr = m_layout_ptr->get_create_info(); + bool result = false; + + anvil_assert(!m_unusable); + + if (m_dirty) + { + uint32_t cached_ds_buffer_info_items_array_offset = 0; + uint32_t cached_ds_image_info_items_array_offset = 0; + uint32_t cached_ds_iub_array_offset = 0; + uint32_t cached_ds_texel_buffer_info_items_array_offset = 0; + const uint32_t n_bindings = static_cast(m_binding_ptrs.size() ); + + m_cached_ds_info_buffer_info_items_vk.clear (); + m_cached_ds_info_image_info_items_vk.clear (); + m_cached_ds_info_texel_buffer_info_items_vk.clear(); + m_cached_ds_write_items_vk.clear (); + + { + uint32_t n_max_ds_info_items_to_cache = 0; + + for (auto& binding_map_item : m_binding_ptrs) + { + const uint32_t n_current_binding_items = static_cast(binding_map_item.second.size() ); + + n_max_ds_info_items_to_cache += n_current_binding_items; + } + + m_cached_ds_info_buffer_info_items_vk.reserve (n_max_ds_info_items_to_cache); + m_cached_ds_info_image_info_items_vk.reserve (n_max_ds_info_items_to_cache); + m_cached_ds_info_texel_buffer_info_items_vk.reserve(n_max_ds_info_items_to_cache); + } + + for (uint32_t n_binding = 0; + n_binding < n_bindings; + ++n_binding) + { + Anvil::DescriptorBindingFlags current_binding_flags; + uint32_t current_binding_index; + uint32_t descriptor_array_size = 0; + Anvil::DescriptorType descriptor_type; + bool immutable_samplers_enabled = false; + uint32_t start_ds_buffer_info_items_array_offset = cached_ds_buffer_info_items_array_offset; + uint32_t start_ds_image_info_items_array_offset = cached_ds_image_info_items_array_offset; + uint32_t start_ds_iub_array_offset = cached_ds_iub_array_offset; + uint32_t start_ds_texel_buffer_info_items_array_offset = cached_ds_texel_buffer_info_items_array_offset; + VkWriteDescriptorSet write_ds_vk; + + if (!layout_info_ptr->get_binding_properties_by_index_number(n_binding, + ¤t_binding_index, + &descriptor_type, + &descriptor_array_size, + nullptr, /* out_opt_stage_flags_ptr */ + &immutable_samplers_enabled, + ¤t_binding_flags) ) + { + anvil_assert_fail(); + } + + /* For each array item, initialize a descriptor info item.. */ + BindingItemUniquePtrs& current_binding_item_ptrs = m_binding_ptrs.at(current_binding_index); + uint32_t n_current_binding_items = static_cast(current_binding_item_ptrs.size() ); + int32_t n_last_binding_item = -1; + + for (uint32_t n_current_binding_item = 0; + n_current_binding_item < n_current_binding_items; + ++n_current_binding_item) + { + auto& current_binding_item_ptr = current_binding_item_ptrs.at(n_current_binding_item); + bool needs_write_item = ((n_current_binding_item + 1) == n_current_binding_items); + + if (descriptor_type == Anvil::DescriptorType::INLINE_UNIFORM_BLOCK) + { + /* Binding items for this descriptor type correspond internally to consecutive update requests which have been scheduled for + * the same IUB binding. As per API restrictions, only one such update can be carried out using a single VkWriteDescriptorSet struct. + */ + n_last_binding_item = static_cast(current_binding_item_ptr->start_offset) - 1; //< write_ds_vk.dstArrayElement corresponds to start offset for inline uniform blocks + + if (n_current_binding_item == 0) + { + iub_binding_indices.push_back(n_binding); + } + } + + /* TODO: For arrayed binding items, avoid updating all binding items every time baking is triggered. */ + if ( current_binding_item_ptr != nullptr && + !current_binding_item_ptr->dirty && + n_current_binding_item == 0 && + n_current_binding_items == 1) + { + continue; + } + + if (current_binding_item_ptr != nullptr && + current_binding_item_ptr->buffer_ptr != nullptr) + { + VkDescriptorBufferInfo buffer_info; + + fill_buffer_info_vk_descriptor(*current_binding_item_ptr, + &buffer_info); + + m_cached_ds_info_buffer_info_items_vk.push_back(buffer_info); + + ++cached_ds_buffer_info_items_array_offset; + } + else + if (current_binding_item_ptr != nullptr && + current_binding_item_ptr->buffer_view_ptr != nullptr) + { + m_cached_ds_info_texel_buffer_info_items_vk.push_back(current_binding_item_ptr->buffer_view_ptr->get_buffer_view() ); + + ++cached_ds_texel_buffer_info_items_array_offset; + } + else + if ( current_binding_item_ptr != nullptr && + (current_binding_item_ptr->image_view_ptr != nullptr || + current_binding_item_ptr->sampler_ptr != nullptr) ) + { + VkDescriptorImageInfo image_info; + + fill_image_info_vk_descriptor(*current_binding_item_ptr, + immutable_samplers_enabled, + &image_info); + + m_cached_ds_info_image_info_items_vk.push_back(image_info); + + ++cached_ds_image_info_items_array_offset; + } + else + if (descriptor_type == Anvil::DescriptorType::INLINE_UNIFORM_BLOCK) + { + VkWriteDescriptorSetInlineUniformBlockEXT iub_info; + + fill_iub_vk_descriptor(*current_binding_item_ptr, + &iub_info); + + m_cached_ds_write_iub_items_vk.at(start_ds_iub_array_offset) = iub_info; + + needs_write_item = true; + cached_ds_iub_array_offset ++; + } + else + { + /* Arrayed bindings are only permitted if the binding has been created with the PARTIALLY_BOUND flag */ + if ((current_binding_flags & Anvil::DescriptorBindingFlagBits::PARTIALLY_BOUND_BIT) == 0) + { + anvil_assert_fail(); + + goto end; + } + + /* Need to cache a write item at this point since current binding has not been assigned a descriptor */ + needs_write_item = true; + } + + if (needs_write_item) + { + uint32_t n_descriptors = 0; + + if (descriptor_type == Anvil::DescriptorType::INLINE_UNIFORM_BLOCK) + { + anvil_assert((cached_ds_buffer_info_items_array_offset - start_ds_buffer_info_items_array_offset) + + (cached_ds_image_info_items_array_offset - start_ds_image_info_items_array_offset) + + (cached_ds_texel_buffer_info_items_array_offset - start_ds_texel_buffer_info_items_array_offset) == 0); + + n_descriptors = m_cached_ds_write_iub_items_vk.at(start_ds_iub_array_offset).dataSize; + + anvil_assert(n_descriptors != 0); + } + else + { + anvil_assert(cached_ds_iub_array_offset == start_ds_iub_array_offset); + + n_descriptors = (cached_ds_buffer_info_items_array_offset - start_ds_buffer_info_items_array_offset) + + (cached_ds_image_info_items_array_offset - start_ds_image_info_items_array_offset) + + (cached_ds_texel_buffer_info_items_array_offset - start_ds_texel_buffer_info_items_array_offset); + } + + if (n_descriptors > 0) + { + write_ds_vk.descriptorCount = n_descriptors; + write_ds_vk.descriptorType = static_cast(descriptor_type); + write_ds_vk.dstArrayElement = n_last_binding_item + 1; + write_ds_vk.dstBinding = current_binding_index; + write_ds_vk.dstSet = m_descriptor_set; + write_ds_vk.pBufferInfo = (start_ds_buffer_info_items_array_offset != cached_ds_buffer_info_items_array_offset) ? &m_cached_ds_info_buffer_info_items_vk[start_ds_buffer_info_items_array_offset] + : nullptr; + write_ds_vk.pImageInfo = (start_ds_image_info_items_array_offset != cached_ds_image_info_items_array_offset) ? &m_cached_ds_info_image_info_items_vk[start_ds_image_info_items_array_offset] + : nullptr; + write_ds_vk.pNext = nullptr; + write_ds_vk.pTexelBufferView = (start_ds_texel_buffer_info_items_array_offset != cached_ds_texel_buffer_info_items_array_offset) ? &m_cached_ds_info_texel_buffer_info_items_vk[start_ds_texel_buffer_info_items_array_offset] + : nullptr; + write_ds_vk.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + + anvil_assert(write_ds_vk.descriptorCount != 0); + + m_cached_ds_write_items_vk.push_back(write_ds_vk); + } + + if (start_ds_iub_array_offset - cached_ds_iub_array_offset) + { + /* TODO: This is ugly but will always work, as you can't use vkUpdateDescriptorSets() for any other updates if inline uniform block's contents is + * being refreshed. Still, we should be using struct chains instead here. + */ + anvil_assert((cached_ds_iub_array_offset - start_ds_iub_array_offset) == 1); + + m_cached_ds_write_items_vk.back().pNext = &m_cached_ds_write_iub_items_vk.at(start_ds_iub_array_offset); + } + + n_last_binding_item = n_current_binding_item; + start_ds_buffer_info_items_array_offset = cached_ds_buffer_info_items_array_offset; + start_ds_image_info_items_array_offset = cached_ds_image_info_items_array_offset; + start_ds_iub_array_offset = cached_ds_iub_array_offset; + start_ds_texel_buffer_info_items_array_offset = cached_ds_texel_buffer_info_items_array_offset; + } + + if (current_binding_item_ptr != nullptr) + { + current_binding_item_ptr->dirty = false; + } + } + } + + /* Issue the Vulkan call */ + if (m_cached_ds_write_items_vk.size() > 0) + { + Anvil::Vulkan::vkUpdateDescriptorSets(m_device_ptr->get_device_vk(), + static_cast(m_cached_ds_write_items_vk.size() ), + &m_cached_ds_write_items_vk[0], + 0, /* copyCount */ + nullptr); /* pDescriptorCopies */ + + /* If any IUB bindings have been processed, wipe out binding items associated with these, as the corresponding updates have already + * been performed. + */ + for (const auto& current_iub_binding_index : iub_binding_indices) + { + auto& current_iub_binding = m_binding_ptrs.at(current_iub_binding_index); + + current_iub_binding.clear(); + } + } + + m_dirty = false; + } + + result = true; + +end: + + return result; +} + +bool Anvil::DescriptorSet::update_using_template_method() const { - anvil_assert(m_unusable); - anvil_assert(in_ds != VK_NULL_HANDLE); + std::vector data_vector; + const auto layout_info_ptr = m_layout_ptr->get_create_info(); + bool result = false; + + if (!m_device_ptr->get_extension_info()->khr_descriptor_update_template() ) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_descriptor_update_template() ); - m_descriptor_set = in_ds; - m_dirty = true; - m_unusable = false; + goto end; + } + + anvil_assert(!m_unusable); + + if (m_dirty) + { + /* First build up a vector of template entries we need the template to encapsulate. While on it, + * also construct an array of descriptors we're going to pass along the template. + */ + const uint32_t n_bindings = static_cast(m_binding_ptrs.size() ); + decltype(m_template_object_map)::const_iterator template_object_iterator; + + m_template_entries.clear (); + m_template_raw_data.clear(); + + for (uint32_t n_binding = 0; + n_binding < n_bindings; + ++n_binding) + { + const std::vector* binding_element_ptr_vec_ptr = nullptr; + uint32_t current_binding_index = UINT32_MAX; + Anvil::DescriptorType descriptor_type = Anvil::DescriptorType::UNKNOWN; + bool immutable_samplers_enabled = false; + uint32_t n_binding_elements = 0; + + if (!layout_info_ptr->get_binding_properties_by_index_number(n_binding, + ¤t_binding_index, + &descriptor_type, + nullptr, /* out_opt_descriptor_array_size_ptr */ + nullptr, /* out_opt_stage_flags_ptr */ + &immutable_samplers_enabled, + nullptr) ) /* out_opt_flags_ptr */ + { + anvil_assert_fail(); + + result = false; + goto end; + } + + binding_element_ptr_vec_ptr = &m_binding_ptrs.at(current_binding_index); + n_binding_elements = static_cast(binding_element_ptr_vec_ptr->size() ); + + for (uint32_t n_binding_element = 0; + n_binding_element < n_binding_elements; + ++n_binding_element) + { + const auto& current_binding_element = *binding_element_ptr_vec_ptr->at(n_binding_element); + const uint32_t current_template_raw_data_size = static_cast(m_template_raw_data.size() ); + + if (!current_binding_element.dirty) + { + continue; + } + + /* Append the new descriptor to the raw data vector. + * + * TODO: Consecutive dirty binding elements could be merged into a single item here. + */ + if (current_binding_element.buffer_ptr != nullptr) + { + m_template_raw_data.resize(current_template_raw_data_size + sizeof(VkDescriptorBufferInfo) ); + + fill_buffer_info_vk_descriptor(current_binding_element, + reinterpret_cast(&m_template_raw_data.at(current_template_raw_data_size) )); + } + else + if (current_binding_element.buffer_view_ptr != nullptr) + { + m_template_raw_data.resize(current_template_raw_data_size + sizeof(VkBufferView) ); + + *reinterpret_cast(&m_template_raw_data.at(current_template_raw_data_size) ) = current_binding_element.buffer_view_ptr->get_buffer_view(); + } + else + if (current_binding_element.image_view_ptr != nullptr) + { + m_template_raw_data.resize(current_template_raw_data_size + sizeof(VkDescriptorImageInfo) ); + + fill_image_info_vk_descriptor(current_binding_element, + immutable_samplers_enabled, + reinterpret_cast(&m_template_raw_data.at(current_template_raw_data_size) )); + } + else + if (current_binding_element.type_vk == Anvil::DescriptorType::INLINE_UNIFORM_BLOCK) + { + m_template_raw_data.resize(current_template_raw_data_size + sizeof(VkWriteDescriptorSetInlineUniformBlockEXT) ); + + fill_iub_vk_descriptor(current_binding_element, + reinterpret_cast(&m_template_raw_data.at(current_template_raw_data_size) )); + } + else + { + anvil_assert_fail(); + + result = false; + goto end; + } + + m_template_entries.push_back( + DescriptorUpdateTemplateEntry(descriptor_type, + n_binding_element, + current_binding_index, + 1, /* in_n_descriptors */ + current_template_raw_data_size, + 0) /* in_stride */ + ); + } + } + + if (m_template_entries.size() == 0) + { + goto end; + } + else + { + anvil_assert(m_template_raw_data.size() > 0); + } + + /* Has a template object matching our needs already been created in the past? */ + template_object_iterator = m_template_object_map.find(m_template_entries); + + if (template_object_iterator == m_template_object_map.end() ) + { + /* Need to create a new template object.. */ + m_template_object_map[m_template_entries] = std::move( + Anvil::DescriptorUpdateTemplate::create_for_descriptor_set_updates(m_device_ptr, + m_layout_ptr, + m_template_entries, + Anvil::MTSafety::DISABLED) ); + + /* Update the iterator */ + template_object_iterator = m_template_object_map.find(m_template_entries); + anvil_assert(template_object_iterator != m_template_object_map.end() ); + + if (template_object_iterator->second == nullptr) + { + anvil_assert(template_object_iterator->second != nullptr); + + result = false; + goto end; + } + } + + /* Issue the Vulkan call. + * + * NOTE: The order MUST be reversed, since update_descriptor_set() calls DescriptorSet::get_descriptor_set_vk() which would invoke update() + * had m_dirty been set to true. + */ + m_dirty = false; + + template_object_iterator->second->update_descriptor_set(this, + &m_template_raw_data.at(0) ); + } + + result = true; + +end: + + return result; } \ No newline at end of file diff --git a/src/wrappers/descriptor_set_group.cpp b/src/wrappers/descriptor_set_group.cpp index af9d6f9c..cb93bae1 100644 --- a/src/wrappers/descriptor_set_group.cpp +++ b/src/wrappers/descriptor_set_group.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,81 +21,151 @@ // #include "misc/debug.h" +#include "misc/descriptor_pool_create_info.h" #include "misc/object_tracker.h" #include "wrappers/descriptor_pool.h" #include "wrappers/descriptor_set.h" #include "wrappers/descriptor_set_group.h" #include "wrappers/descriptor_set_layout.h" +#include "wrappers/descriptor_set_layout_manager.h" +#include "wrappers/device.h" #include "wrappers/pipeline_layout.h" #include /* Please see header for specification */ -Anvil::DescriptorSetGroup::DescriptorSetGroup(std::weak_ptr in_device_ptr, - bool in_releaseable_sets, - uint32_t in_n_sets) - :m_descriptor_pool_dirty (false), +Anvil::DescriptorSetGroup::DescriptorSetGroup(const Anvil::BaseDevice* in_device_ptr, + std::vector in_ds_create_info_ptrs, + const Anvil::DescriptorPoolCreateFlags& in_descriptor_pool_create_flags, + MTSafety in_mt_safety, + const std::vector& in_opt_overhead_allocations) + :MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr) ), + m_descriptor_pool_create_flags(in_descriptor_pool_create_flags), m_device_ptr (in_device_ptr), - m_layout_modifications_blocked(false), - m_n_instantiated_sets (0), - m_n_sets (in_n_sets), - m_parent_dsg_ptr (nullptr), - m_releaseable_sets (in_releaseable_sets) + m_n_unique_dses (0), + m_parent_dsg_ptr (nullptr) { - anvil_assert(in_n_sets >= 1); + auto ds_layout_manager_ptr = m_device_ptr->get_descriptor_set_layout_manager(); - memset(m_overhead_allocations, - 0, - sizeof(m_overhead_allocations) ); + for (const auto& overhead_alloc : in_opt_overhead_allocations) + { + m_descriptor_type_properties[overhead_alloc.descriptor_type].n_overhead_allocations = overhead_alloc.n_overhead_allocations; + } /* Initialize descriptor pool */ - m_descriptor_pool_ptr = Anvil::DescriptorPool::create(in_device_ptr, - in_n_sets, - in_releaseable_sets); + m_n_unique_dses = static_cast(in_ds_create_info_ptrs.size() ); + + for (uint32_t n_layout_info_ptr = 0; + n_layout_info_ptr < static_cast(in_ds_create_info_ptrs.size() ); + ++n_layout_info_ptr) + { + auto& current_layout_info_ptr = in_ds_create_info_ptrs.at(n_layout_info_ptr); + + m_descriptor_sets[n_layout_info_ptr].reset( + new DescriptorSetInfoContainer() + ); + + if (current_layout_info_ptr != nullptr) + { + if (ds_layout_manager_ptr != nullptr) + { + ds_layout_manager_ptr->get_layout(current_layout_info_ptr.get(), + &m_descriptor_sets.at(n_layout_info_ptr)->layout_ptr); + + anvil_assert(m_descriptor_sets.at(n_layout_info_ptr)->layout_ptr != nullptr); + } + else + { + /* Required at device bring-up time. */ + m_descriptor_sets.at(n_layout_info_ptr)->layout_ptr = Anvil::DescriptorSetLayout::create(std::move(current_layout_info_ptr), + in_device_ptr, + in_mt_safety); + } + + m_ds_create_info_ptrs.push_back(m_descriptor_sets.at(n_layout_info_ptr)->layout_ptr->get_create_info() ); + } + else + { + m_ds_create_info_ptrs.push_back(nullptr); + } + } /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_DESCRIPTOR_SET_GROUP, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::ANVIL_DESCRIPTOR_SET_GROUP, this); } /* Please see header for specification */ -Anvil::DescriptorSetGroup::DescriptorSetGroup(std::shared_ptr in_parent_dsg_ptr, - bool in_releaseable_sets) - :m_descriptor_pool_dirty (true), - m_descriptor_pool_ptr (nullptr), +Anvil::DescriptorSetGroup::DescriptorSetGroup(const DescriptorSetGroup* in_parent_dsg_ptr) + :MTSafetySupportProvider (in_parent_dsg_ptr->is_mt_safe() ), + m_descriptor_pool_create_flags(in_parent_dsg_ptr->m_descriptor_pool_create_flags), m_device_ptr (in_parent_dsg_ptr->m_device_ptr), - m_layout_modifications_blocked(true), - m_n_sets (UINT32_MAX), - m_parent_dsg_ptr (in_parent_dsg_ptr), - m_releaseable_sets (in_releaseable_sets) + m_parent_dsg_ptr (in_parent_dsg_ptr) { - anvil_assert(in_parent_dsg_ptr != nullptr); - anvil_assert(in_parent_dsg_ptr->m_parent_dsg_ptr == nullptr); - anvil_assert(in_parent_dsg_ptr->m_descriptor_pool_ptr->are_sets_releaseable() == in_releaseable_sets); + auto descriptor_set_layout_manager_ptr = m_device_ptr->get_descriptor_set_layout_manager(); + + anvil_assert(in_parent_dsg_ptr->m_parent_dsg_ptr == nullptr); - memset(m_overhead_allocations, - 0, - sizeof(m_overhead_allocations) ); + m_descriptor_type_properties = in_parent_dsg_ptr->m_descriptor_type_properties; + + for (auto& current_descriptor_type_props : m_descriptor_type_properties) + { + current_descriptor_type_props.second.n_overhead_allocations = 0; + } /* Initialize descriptor pool */ - m_descriptor_pool_ptr = Anvil::DescriptorPool::create(in_parent_dsg_ptr->m_device_ptr, - in_parent_dsg_ptr->m_n_sets, - in_releaseable_sets); + { + auto dp_create_info_ptr = Anvil::DescriptorPoolCreateInfo::create(in_parent_dsg_ptr->m_device_ptr, + in_parent_dsg_ptr->m_descriptor_pool_ptr->get_create_info_ptr()->get_n_maximum_sets(), + in_parent_dsg_ptr->m_descriptor_pool_ptr->get_create_info_ptr()->get_create_flags (), + Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe() )); + uint32_t total_pool_size = 0; - /* Configure the new DSG instance to use the specified parent DSG */ - m_descriptor_sets = in_parent_dsg_ptr->m_descriptor_sets; + for (const auto& current_descriptor_type_props : m_descriptor_type_properties) + { + dp_create_info_ptr->set_n_descriptors_for_descriptor_type(current_descriptor_type_props.first, + current_descriptor_type_props.second.pool_size); + + total_pool_size += current_descriptor_type_props.second.pool_size; + } + + if (total_pool_size == 0) + { + /* Request space for anything. This is required for correct dummy DSG support, as zero-sized pools are forbidden by the spec. */ + anvil_assert(dp_create_info_ptr->get_n_descriptors_for_descriptor_type(Anvil::DescriptorType::SAMPLER) == 0); + + dp_create_info_ptr->set_n_descriptors_for_descriptor_type(Anvil::DescriptorType::SAMPLER, + 1); + } - for (auto ds : m_descriptor_sets) + m_descriptor_pool_ptr = Anvil::DescriptorPool::create(std::move(dp_create_info_ptr) ); + } + + /* Configure the new DSG instance to use the specified parent DSG */ + for (const auto& ds : in_parent_dsg_ptr->m_descriptor_sets) { - m_descriptor_sets[ds.first].descriptor_set_ptr = nullptr; + m_descriptor_sets[ds.first].reset( + new DescriptorSetInfoContainer() + ); + + if (ds.second->layout_ptr != nullptr) + { + /* NOTE: We must use pipeline layout manager to acquire a clone of the input layout, instead of wrapping + * the raw pointer inside a customized std::unique_ptr. Reason for this is the descriptor set layout manager + * manually reference-counts users of each instantiated layout, and the counter is only bumped at get_layout() + * call time. + */ + descriptor_set_layout_manager_ptr->get_layout(ds.second->layout_ptr->get_create_info(), + &m_descriptor_sets.at(ds.first)->layout_ptr); + + anvil_assert(m_descriptor_sets.at(ds.first)->layout_ptr.get() == ds.second->layout_ptr.get() ); + } } - /* The parent descriptor set group should be locked, so that it is no longer possible to modify - * its descriptor set layout. Introducing support for such behavior would require significant - * work and testing. */ - m_parent_dsg_ptr->m_layout_modifications_blocked = true; + m_n_unique_dses = in_parent_dsg_ptr->m_n_unique_dses; /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_DESCRIPTOR_SET_GROUP, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::ANVIL_DESCRIPTOR_SET_GROUP, this); } @@ -103,220 +173,211 @@ Anvil::DescriptorSetGroup::DescriptorSetGroup(std::shared_ptrunregister_object(Anvil::OBJECT_TYPE_DESCRIPTOR_SET_GROUP, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::ANVIL_DESCRIPTOR_SET_GROUP, this); } -/* Please see header for specification */ -bool Anvil::DescriptorSetGroup::add_binding(uint32_t in_n_set, - uint32_t in_binding, - VkDescriptorType in_type, - uint32_t in_n_elements, - VkShaderStageFlags in_shader_stages, - const std::shared_ptr* in_opt_immutable_sampler_ptrs) +/** Re-creates internally-maintained descriptor pool. **/ +bool Anvil::DescriptorSetGroup::bake_descriptor_pool() { - bool result = false; - - /* Sanity check: The DSG must not be locked. If you run into this assertion failure, you are trying - * to add a new binding to a descriptor set group which shares its layout with other - * DSGs. This modification would have invalidated layouts used by children DSGs, and - * currently there's no mechanism implemented to inform them about such event. */ - anvil_assert(!m_layout_modifications_blocked); - - /* Sanity check: make sure no more than the number of descriptor sets specified at creation time is - * used - */ - if (m_descriptor_sets.find(in_n_set) == m_descriptor_sets.end() ) - { - if (m_descriptor_sets.size() > (m_n_instantiated_sets + 1) ) - { - anvil_assert(!(m_descriptor_sets.size() > (m_n_instantiated_sets + 1)) ); + Anvil::DescriptorPoolCreateFlags flags = m_descriptor_pool_create_flags; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + std::unordered_map > n_descriptors_needed_map; + bool result = false; - goto end; - } - - m_descriptor_sets[in_n_set].layout_ptr = Anvil::DescriptorSetLayout::create(m_device_ptr); + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); } - /* Pass the call down to DescriptorSet instance */ - result = m_descriptor_sets[in_n_set].layout_ptr->add_binding(in_binding, - in_type, - in_n_elements, - in_shader_stages, - in_opt_immutable_sampler_ptrs); - - m_descriptor_pool_dirty = true; -end: - return result; -} - -/** Re-creates internally-maintained descriptor pool. **/ -void Anvil::DescriptorSetGroup::bake_descriptor_pool() -{ - anvil_assert(m_descriptor_pool_dirty); - anvil_assert(m_descriptor_sets.size() != 0); + if (m_descriptor_sets.size() == 0) + { + goto end; + } /* Count how many descriptor of what types need to have pool space allocated */ - uint32_t n_descriptors_needed_array[VK_DESCRIPTOR_TYPE_RANGE_SIZE]; - - memset(n_descriptors_needed_array, - 0, - sizeof(n_descriptors_needed_array) ); - - for (auto current_ds : m_descriptor_sets) + for (auto& current_ds : m_descriptor_sets) { - uint32_t n_ds_bindings; + const Anvil::DescriptorSetCreateInfo* current_ds_create_info_ptr = nullptr; + uint32_t n_ds_bindings; + uint32_t variable_descriptor_binding_index = UINT32_MAX; + uint32_t variable_descriptor_binding_size = 0; - if (current_ds.second.layout_ptr == nullptr) + if (current_ds.second->layout_ptr == nullptr) { - continue; + current_ds_create_info_ptr = m_device_ptr->get_dummy_descriptor_set_layout()->get_create_info(); + } + else + { + current_ds_create_info_ptr = current_ds.second->layout_ptr->get_create_info(); } - n_ds_bindings = static_cast(current_ds.second.layout_ptr->get_n_bindings() ); + n_ds_bindings = static_cast(current_ds_create_info_ptr->get_n_bindings() ); + + current_ds_create_info_ptr->contains_variable_descriptor_count_binding(&variable_descriptor_binding_index, + &variable_descriptor_binding_size); for (uint32_t n_ds_binding = 0; n_ds_binding < n_ds_bindings; ++n_ds_binding) { - uint32_t ds_binding_array_size; - VkDescriptorType ds_binding_type; + uint32_t ds_binding_array_size; + Anvil::DescriptorBindingFlags ds_binding_flags; + uint32_t ds_binding_index = UINT32_MAX; + Anvil::DescriptorType ds_binding_type = Anvil::DescriptorType::UNKNOWN; + + current_ds_create_info_ptr->get_binding_properties_by_index_number(n_ds_binding, + &ds_binding_index, + &ds_binding_type, + &ds_binding_array_size, + nullptr, /* out_opt_stage_flags_ptr */ + nullptr, /* out_opt_immutable_samplers_enabled_ptr */ + &ds_binding_flags); + + if (ds_binding_index == variable_descriptor_binding_index) + { + ds_binding_array_size = variable_descriptor_binding_size; + } - current_ds.second.layout_ptr->get_binding_properties(n_ds_binding, - nullptr, /* out_opt_binding_index_ptr */ - &ds_binding_type, - &ds_binding_array_size, - nullptr, /* out_opt_stage_flags_ptr */ - nullptr); /* out_opt_immutable_samplers_enabled_ptr */ + if ((ds_binding_flags & Anvil::DescriptorBindingFlagBits::UPDATE_AFTER_BIND_BIT) != 0) + { + flags |= Anvil::DescriptorPoolCreateFlagBits::UPDATE_AFTER_BIND_BIT; + } - n_descriptors_needed_array[ds_binding_type] += ds_binding_array_size; + n_descriptors_needed_map[ds_binding_type] += ds_binding_array_size; } } - /* Configure the underlying descriptor pool wrapper */ - for (uint32_t n_descriptor_type = 0; - n_descriptor_type < VK_DESCRIPTOR_TYPE_RANGE_SIZE; - ++n_descriptor_type) + for (auto& current_map_entry : n_descriptors_needed_map) { - m_descriptor_pool_ptr->set_descriptor_array_size(static_cast(n_descriptor_type), - n_descriptors_needed_array[n_descriptor_type] + m_overhead_allocations[n_descriptor_type]); + current_map_entry.second += m_descriptor_type_properties[current_map_entry.first].n_overhead_allocations; } - m_descriptor_pool_ptr->bake(); + /* Verify we can actually create the pool.. */ + if ((flags & Anvil::DescriptorPoolCreateFlagBits::UPDATE_AFTER_BIND_BIT) != 0) + { + if (!m_device_ptr->get_extension_info()->ext_descriptor_indexing() ) + { + anvil_assert(m_device_ptr->get_extension_info()->ext_descriptor_indexing() ); - /* The descriptor pool now matches the layout's configuration */ - m_descriptor_pool_dirty = false; -} + goto end; + } + } -/* Please see header for specification */ -bool Anvil::DescriptorSetGroup::bake_descriptor_sets() -{ - Anvil::DescriptorSetGroup* layout_vk_owner_ptr = (m_parent_dsg_ptr != nullptr) ? m_parent_dsg_ptr.get() - : this; - const uint32_t n_sets = (uint32_t) m_descriptor_sets.size(); - bool result = false; + /* Create the pool */ + { + auto dp_create_info_ptr = Anvil::DescriptorPoolCreateInfo::create(m_device_ptr, + m_n_unique_dses, + flags, + Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe() )); - /* Copy layout descriptors to the helper vector.. */ - m_cached_ds_layouts.clear(); + for (const auto& current_n_descriptors_needed_map_entry : n_descriptors_needed_map) + { + dp_create_info_ptr->set_n_descriptors_for_descriptor_type(current_n_descriptors_needed_map_entry.first, + current_n_descriptors_needed_map_entry.second); + } - for (auto ds : layout_vk_owner_ptr->m_descriptor_sets) - { - m_cached_ds_layouts.push_back(ds.second.layout_ptr); + m_descriptor_pool_ptr = Anvil::DescriptorPool::create(std::move(dp_create_info_ptr) ); } - /* Allocate the descriptor pool, if necessary */ - if (m_descriptor_pool_dirty) + if (m_descriptor_pool_ptr == nullptr) { - bake_descriptor_pool(); + anvil_assert(m_descriptor_pool_ptr != nullptr); - anvil_assert(!m_descriptor_pool_dirty); + goto end; } - /* Reset all previous allocations */ - m_descriptor_pool_ptr->reset(); + result = true; +end: + return result; +} - /* Grab descriptor sets from the pool. - * - * Note that baking can only occur if 1 or more bindings or sets were added. If we already have a number of - * DescriptorSet instances cached, we need to re-use existing instances in order to retain the bindings, - * which are cached internally by DescriptorSet instances. - */ - if (m_descriptor_sets.begin()->second.descriptor_set_ptr != nullptr) +/* Please see header for specification */ +bool Anvil::DescriptorSetGroup::bake_descriptor_sets() +{ + std::vector allocations; + std::vector dses; + const Anvil::DescriptorSetGroup* layout_vk_owner_ptr = (m_parent_dsg_ptr != nullptr) ? m_parent_dsg_ptr + : this; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + bool result = false; + + if (mutex_ptr != nullptr) { - const uint32_t n_dses_to_assign_handles = m_n_instantiated_sets; + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } - anvil_assert(n_dses_to_assign_handles != 0); + anvil_assert(m_descriptor_pool_ptr != nullptr); - m_cached_ds.resize (n_sets); - m_cached_ds_vk.resize(n_dses_to_assign_handles); - if (n_sets - m_n_instantiated_sets > 0) - { - auto ds = layout_vk_owner_ptr->m_descriptor_sets.begin(); - - result = m_descriptor_pool_ptr->alloc_descriptor_sets(n_sets - m_n_instantiated_sets, - &m_cached_ds_layouts[m_n_instantiated_sets], - &m_cached_ds [m_n_instantiated_sets]); + /* Copy layout descriptors to the helper vector.. */ + const auto n_sets = static_cast(m_descriptor_sets.size() ); - for (uint32_t n_skipped_ds = 0; - n_skipped_ds < m_n_instantiated_sets; - ++n_skipped_ds) - { - ds ++; - } + for (const auto& ds_data : layout_vk_owner_ptr->m_descriptor_sets) + { + const auto& ds_ptr = ds_data.second; - for (uint32_t n_set = m_n_instantiated_sets; - n_set < n_sets; - ++n_set, ++ds) - { - ds->second.descriptor_set_ptr = m_cached_ds[n_set]; - } + if (ds_data.second->layout_ptr == nullptr) + { + allocations.push_back( + Anvil::DescriptorSetAllocation(m_device_ptr->get_dummy_descriptor_set_layout() ) + ); } else { - result = true; - } - - result &= m_descriptor_pool_ptr->alloc_descriptor_sets(n_dses_to_assign_handles, - &m_cached_ds_layouts[0], - &m_cached_ds_vk[0]); - anvil_assert(result); + bool has_variable_descriptor_count_binding = false; + uint32_t variable_descriptor_count_binding_index = 0; + uint32_t variable_descriptor_count_binding_size = 0; - /* Assign the allocated DSes to relevant descriptor set descriptors */ - auto ds_iterator = m_descriptor_sets.begin(); + has_variable_descriptor_count_binding = ds_ptr->layout_ptr->get_create_info()->contains_variable_descriptor_count_binding(&variable_descriptor_count_binding_index, + &variable_descriptor_count_binding_size); - for (uint32_t n_set = 0; - n_set < m_n_instantiated_sets; - ++n_set, ++ds_iterator) - { - anvil_assert(m_cached_ds[n_set] != nullptr); - anvil_assert(ds_iterator->second.descriptor_set_ptr != nullptr); + if (has_variable_descriptor_count_binding) + { + anvil_assert(variable_descriptor_count_binding_size != 0); - ds_iterator->second.descriptor_set_ptr->set_new_vk_handle(m_cached_ds_vk[n_set]); + allocations.push_back( + Anvil::DescriptorSetAllocation(ds_ptr->layout_ptr.get(), + variable_descriptor_count_binding_size) + ); + } + else + { + allocations.push_back( + Anvil::DescriptorSetAllocation(ds_ptr->layout_ptr.get() ) + ); + } } } - else - { - auto ds_iterator = m_descriptor_sets.begin(); - m_cached_ds.resize(n_sets); + /* Reset all previous allocations */ + m_descriptor_pool_ptr->reset(); - /* Allocate everything from scratch */ - result = m_descriptor_pool_ptr->alloc_descriptor_sets(n_sets, - &m_cached_ds_layouts[0], - &m_cached_ds[0]); - anvil_assert(result); + /* Grab descriptor sets from the pool. */ + auto ds_iterator = m_descriptor_sets.begin(); - for (uint32_t n_set = 0; - n_set < n_sets; - ++n_set, ++ds_iterator) - { - anvil_assert(m_cached_ds[n_set] != nullptr); - anvil_assert(ds_iterator->second.descriptor_set_ptr == nullptr); + dses.resize(n_sets); - ds_iterator->second.descriptor_set_ptr = m_cached_ds[n_set]; - } + /* Allocate everything from scratch */ + result = m_descriptor_pool_ptr->alloc_descriptor_sets(n_sets, + &allocations.at(0), + &dses.at (0) ); + anvil_assert(result); + + for (uint32_t n_set = 0; + n_set < n_sets; + ++n_set, ++ds_iterator) + { + anvil_assert(dses[n_set] != nullptr); + anvil_assert(ds_iterator->second->descriptor_set_ptr == nullptr); - m_n_instantiated_sets = n_sets; + ds_iterator->second->descriptor_set_ptr = std::move(dses.at(n_set) ); } /* All done */ @@ -326,127 +387,161 @@ bool Anvil::DescriptorSetGroup::bake_descriptor_sets() } /* Please see header for specification */ -std::shared_ptr Anvil::DescriptorSetGroup::create(std::weak_ptr in_device_ptr, - bool in_releaseable_sets, - uint32_t in_n_sets) +Anvil::DescriptorSetGroupUniquePtr Anvil::DescriptorSetGroup::create(const Anvil::BaseDevice* in_device_ptr, + std::vector& in_ds_create_info_ptrs, + const Anvil::DescriptorPoolCreateFlags& in_descriptor_pool_create_flags, + MTSafety in_mt_safety, + const std::vector& in_opt_overhead_allocations) { - std::shared_ptr result_ptr; + Anvil::DescriptorSetGroupUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::DescriptorSetGroup(in_device_ptr, - in_releaseable_sets, - in_n_sets) + std::move(in_ds_create_info_ptrs), + in_descriptor_pool_create_flags, + in_mt_safety, + in_opt_overhead_allocations) ); + if (result_ptr != nullptr) + { + result_ptr->bake_descriptor_pool(); + + if (!result_ptr->bake_descriptor_sets() ) + { + result_ptr.reset(); + } + } + return result_ptr; } /* Please see header for specification */ -std::shared_ptr Anvil::DescriptorSetGroup::create(std::shared_ptr in_parent_dsg_ptr, - bool in_releaseable_sets) +Anvil::DescriptorSetGroupUniquePtr Anvil::DescriptorSetGroup::create(const Anvil::DescriptorSetGroup* in_parent_dsg_ptr) { - std::shared_ptr result_ptr; + Anvil::DescriptorSetGroupUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( - new Anvil::DescriptorSetGroup(in_parent_dsg_ptr, - in_releaseable_sets) + new Anvil::DescriptorSetGroup(in_parent_dsg_ptr) ); + if (result_ptr != nullptr) + { + result_ptr->bake_descriptor_pool(); + + if (!result_ptr->bake_descriptor_sets() ) + { + result_ptr.reset(); + } + } + return result_ptr; } /* Please see header for specification */ -std::shared_ptr Anvil::DescriptorSetGroup::get_descriptor_set(uint32_t in_n_set) +Anvil::DescriptorSet* Anvil::DescriptorSetGroup::get_descriptor_set(uint32_t in_n_set) { - bool pool_rebaked = false; + decltype(m_descriptor_sets)::const_iterator ds_iterator; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); - anvil_assert(m_descriptor_sets.find(in_n_set) != m_descriptor_sets.end() ); - - if (m_descriptor_pool_dirty) + if (mutex_ptr != nullptr) { - bake_descriptor_pool(); - - anvil_assert(!m_descriptor_pool_dirty); - - pool_rebaked = true; + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); } - if (pool_rebaked || - m_descriptor_sets[in_n_set].descriptor_set_ptr == nullptr) - { - bake_descriptor_sets(); + ds_iterator = m_descriptor_sets.find(in_n_set); + anvil_assert(ds_iterator != m_descriptor_sets.end() ); - anvil_assert(m_descriptor_sets[in_n_set].descriptor_set_ptr != nullptr); - } - - return m_descriptor_sets[in_n_set].descriptor_set_ptr; + return ds_iterator->second->descriptor_set_ptr.get(); } -/* Please see header for specification */ -uint32_t Anvil::DescriptorSetGroup::get_descriptor_set_binding_index(uint32_t in_n_set) const +const std::vector* Anvil::DescriptorSetGroup::get_descriptor_set_create_info() const { - std::map::const_iterator dsg_iterator = m_descriptor_sets.begin(); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + const std::vector* result_ptr = nullptr; - anvil_assert(m_descriptor_sets.size() > in_n_set); - - for (uint32_t n_current_set = 0; - n_current_set < in_n_set; - ++n_current_set, ++dsg_iterator) + if (mutex_ptr != nullptr) { - /* Stub */ + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); } - return dsg_iterator->first; + result_ptr = &m_ds_create_info_ptrs; + + return result_ptr; } /* Please see header for specification */ -std::shared_ptr Anvil::DescriptorSetGroup::get_descriptor_set_layout(uint32_t in_n_set) +const Anvil::DescriptorSetCreateInfo* Anvil::DescriptorSetGroup::get_descriptor_set_create_info(uint32_t in_n_set) const { - if (m_parent_dsg_ptr != nullptr) + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + const Anvil::DescriptorSetCreateInfo* result_ptr = nullptr; + + if (mutex_ptr != nullptr) { - return m_parent_dsg_ptr->get_descriptor_set_layout(in_n_set); + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); } - else + + if (m_ds_create_info_ptrs.size() < in_n_set) { - anvil_assert(m_descriptor_sets.find(in_n_set) != m_descriptor_sets.end() ); - anvil_assert(m_descriptor_sets[in_n_set].layout_ptr != nullptr) + anvil_assert_fail(); - return m_descriptor_sets[in_n_set].layout_ptr; + goto end; } + result_ptr = m_ds_create_info_ptrs.at(in_n_set); + +end: + return result_ptr; } /* Please see header for specification */ -void Anvil::DescriptorSetGroup::set_descriptor_pool_overhead_allocations(VkDescriptorType in_descriptor_type, - uint32_t in_n_overhead_allocations) +Anvil::DescriptorSetLayout* Anvil::DescriptorSetGroup::get_descriptor_set_layout(uint32_t in_n_set) const { - anvil_assert(in_descriptor_type < VK_DESCRIPTOR_TYPE_RANGE_SIZE); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); - if (m_overhead_allocations[in_descriptor_type] != in_n_overhead_allocations) + if (mutex_ptr != nullptr) { - m_overhead_allocations[in_descriptor_type] = in_n_overhead_allocations; - m_descriptor_pool_dirty = true; + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); } -} -/** Please see header for specification */ -Anvil::DescriptorSetGroup::DescriptorSetInfo::DescriptorSetInfo(const DescriptorSetInfo& in) -{ - descriptor_set_ptr = in.descriptor_set_ptr; - layout_ptr = in.layout_ptr; -} -/** Please see header for specification */ -Anvil::DescriptorSetGroup::DescriptorSetInfo& Anvil::DescriptorSetGroup::DescriptorSetInfo::operator=(const DescriptorSetInfo& in) -{ - descriptor_set_ptr = in.descriptor_set_ptr; - layout_ptr = in.layout_ptr; + if (m_parent_dsg_ptr != nullptr) + { + return m_parent_dsg_ptr->get_descriptor_set_layout(in_n_set); + } + else + { + auto ds_iterator = m_descriptor_sets.find(in_n_set); + + if (ds_iterator != m_descriptor_sets.end() ) + { + anvil_assert(ds_iterator->second->layout_ptr != nullptr) - return *this; + return ds_iterator->second->layout_ptr.get(); + } + else + { + return nullptr; + } + } } /** Please see header for specification */ -Anvil::DescriptorSetGroup::DescriptorSetInfo::~DescriptorSetInfo() +Anvil::DescriptorSetGroup::DescriptorSetInfoContainer::~DescriptorSetInfoContainer() { /* Stub */ } \ No newline at end of file diff --git a/src/wrappers/descriptor_set_layout.cpp b/src/wrappers/descriptor_set_layout.cpp index 6f8a5e9e..229ca078 100644 --- a/src/wrappers/descriptor_set_layout.cpp +++ b/src/wrappers/descriptor_set_layout.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,295 +21,205 @@ // #include "misc/debug.h" +#include "misc/descriptor_set_create_info.h" #include "misc/object_tracker.h" +#include "misc/struct_chainer.h" #include "wrappers/descriptor_set_layout.h" #include "wrappers/device.h" #include "wrappers/sampler.h" /** Please see header for specification */ -Anvil::DescriptorSetLayout::DescriptorSetLayout(std::weak_ptr in_device_ptr) - :CallbacksSupportProvider (DESCRIPTOR_SET_LAYOUT_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT), +Anvil::DescriptorSetLayout::DescriptorSetLayout(Anvil::DescriptorSetCreateInfoUniquePtr in_ds_create_info_ptr, + const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe) + :DebugMarkerSupportProvider(in_device_ptr, + Anvil::ObjectType::DESCRIPTOR_SET_LAYOUT), + MTSafetySupportProvider (in_mt_safe), + m_create_info_ptr (std::move(in_ds_create_info_ptr) ), m_device_ptr (in_device_ptr), - m_dirty (true), m_layout (VK_NULL_HANDLE) { - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::DESCRIPTOR_SET_LAYOUT, this); } /** Please see header for specification */ Anvil::DescriptorSetLayout::~DescriptorSetLayout() { - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::DESCRIPTOR_SET_LAYOUT, this); if (m_layout != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyDescriptorSetLayout(device_locked_ptr->get_device_vk(), - m_layout, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyDescriptorSetLayout(m_device_ptr->get_device_vk(), + m_layout, + nullptr /* pAllocator */); + } + unlock(); m_layout = VK_NULL_HANDLE; } } /** Please see header for specification */ -bool Anvil::DescriptorSetLayout::add_binding(uint32_t in_binding_index, - VkDescriptorType in_descriptor_type, - uint32_t in_descriptor_array_size, - VkShaderStageFlags in_stage_flags, - const std::shared_ptr* in_immutable_sampler_ptrs) +Anvil::DescriptorSetLayoutUniquePtr Anvil::DescriptorSetLayout::create(Anvil::DescriptorSetCreateInfoUniquePtr in_ds_create_info_ptr, + const Anvil::BaseDevice* in_device_ptr, + MTSafety in_mt_safety) { - bool result = false; + const bool mt_safe = Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr); + DescriptorSetLayoutUniquePtr result_ptr(nullptr, + std::default_delete() ); - /* Make sure the binding is not already defined */ - if (m_bindings.find(in_binding_index) != m_bindings.end() ) - { - anvil_assert_fail(); - - goto end; - } + result_ptr.reset( + new Anvil::DescriptorSetLayout(std::move(in_ds_create_info_ptr), + in_device_ptr, + mt_safe) + ); - /* Make sure the sampler array can actually be specified for this descriptor type. */ - if (in_immutable_sampler_ptrs != nullptr) + if (result_ptr != nullptr) { - if (in_descriptor_type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER && - in_descriptor_type != VK_DESCRIPTOR_TYPE_SAMPLER) + if (!result_ptr->init() ) { - anvil_assert_fail(); - - goto end; + result_ptr.reset(); } } - /* Add a new binding entry and mark the layout as dirty, so that it is re-baked next time - * the user calls the getter func */ - m_bindings[in_binding_index] = Binding(in_descriptor_array_size, - in_descriptor_type, - in_stage_flags, - in_immutable_sampler_ptrs); + return result_ptr; +} + +uint32_t Anvil::DescriptorSetLayout::get_maximum_variable_descriptor_count(const DescriptorSetLayoutCreateInfoContainer* in_ds_create_info_ptr, + const Anvil::BaseDevice* in_device_ptr) +{ + const Anvil::ExtensionKHRMaintenance3Entrypoints* entrypoints_ptr = nullptr; + Anvil::StructID query_struct_id; + const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* query_struct_ptr = nullptr; + uint32_t result = 0; + Anvil::StructChainUniquePtr struct_chain_ptr; + Anvil::StructChainer struct_chainer; + if (!in_device_ptr->get_extension_info()->ext_descriptor_indexing() ) { - OnNewBindingAddedToDescriptorSetLayoutCallbackArgument callback_argument(this); - - callback(DESCRIPTOR_SET_LAYOUT_CALLBACK_ID_BINDING_ADDED, - &callback_argument); + anvil_assert(in_device_ptr->get_extension_info()->ext_descriptor_indexing() ); + goto end; } - m_dirty = true; - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::DescriptorSetLayout::bake() -{ - std::vector binding_info_items; - VkDescriptorSetLayoutCreateInfo create_info; - std::shared_ptr device_locked_ptr(m_device_ptr); - uint32_t n_binding = 0; - uint32_t n_bindings_defined = 0; - uint32_t n_samplers_defined = 0; - bool result = false; - VkResult result_vk; - std::vector sampler_items; - - if (!m_dirty) + if (!in_device_ptr->get_extension_info()->khr_maintenance3() ) { - result = true; + anvil_assert(in_device_ptr->get_extension_info()->khr_maintenance3()); goto end; } + else + { + entrypoints_ptr = &in_device_ptr->get_extension_khr_maintenance3_entrypoints(); + } - /* Release an existing Vulkan layout, if one's already been baked in the past */ - if (m_layout != VK_NULL_HANDLE) { - vkDestroyDescriptorSetLayout(device_locked_ptr->get_device_vk(), - m_layout, - nullptr /* pAllocator */); + VkDescriptorSetLayoutSupportKHR root; - m_layout = VK_NULL_HANDLE; - set_vk_handle(VK_NULL_HANDLE); + root.pNext = nullptr; + root.sType = static_cast(VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR); + root.supported = VK_FALSE; + + struct_chainer.append_struct(root); } - /* Count the number of immutable samplers defined. This is needed because if sampler_items is reallocated - * after we start building the VkSampler array contents, all previously initialized VkDescriptorSetLayoutBinding - * instances will start referring to invalid sampler arrays. */ - for (auto binding_iterator = m_bindings.begin(); - binding_iterator != m_bindings.end(); - ++binding_iterator) { - const Binding& binding_data = binding_iterator->second; + VkDescriptorSetVariableDescriptorCountLayoutSupportEXT query; - n_samplers_defined += static_cast(binding_data.immutable_samplers.size() ); + query.maxVariableDescriptorCount = 0; + query.pNext = nullptr; + query.sType = static_cast(VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT); + + query_struct_id = struct_chainer.append_struct(query); } - sampler_items.reserve(n_samplers_defined); + struct_chain_ptr = struct_chainer.create_chain (); + query_struct_ptr = struct_chain_ptr->get_struct_with_id(query_struct_id); - /* Determine the number of non-dummy bindings. */ - if (m_bindings.size() > 1) + if (query_struct_ptr == nullptr) { - n_bindings_defined = static_cast(m_bindings.size() ); - } - else - for (auto binding : m_bindings) - { - if (binding.second.descriptor_array_size > 0) - { - ++n_bindings_defined; - } + anvil_assert(query_struct_ptr != nullptr); + + goto end; } - /* Convert internal representation to Vulkan structure(s) */ - binding_info_items.resize(n_bindings_defined); + entrypoints_ptr->vkGetDescriptorSetLayoutSupportKHR(in_device_ptr->get_device_vk(), + in_ds_create_info_ptr->struct_chain_ptr->get_root_struct(), + struct_chain_ptr->get_root_struct() ); - create_info.bindingCount = n_bindings_defined; - create_info.flags = 0; - create_info.pNext = nullptr; - create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; + result = query_struct_ptr->maxVariableDescriptorCount; - if (n_bindings_defined > 0) - { - for (auto binding_iterator = m_bindings.begin(); - binding_iterator != m_bindings.end(); - ++binding_iterator, ++n_binding) - { - const Binding& binding_data = binding_iterator->second; - const BindingIndex& binding_index = binding_iterator->first; - VkDescriptorSetLayoutBinding& binding_vk = binding_info_items[n_binding]; - - if (binding_data.descriptor_array_size == 0) - { - /* Dummy binding, ignore. */ - continue; - } - - binding_vk.binding = binding_index; - binding_vk.descriptorCount = binding_data.descriptor_array_size; - binding_vk.descriptorType = binding_data.descriptor_type; - binding_vk.stageFlags = binding_data.stage_flags; - - if (binding_data.immutable_samplers.size() > 0) - { - const uint32_t sampler_array_start_index = static_cast(sampler_items.size() ); - - anvil_assert(binding_data.immutable_samplers.size() == binding_data.descriptor_array_size); - - for (uint32_t n_sampler = 0; - n_sampler < binding_data.descriptor_array_size; - ++n_sampler) - { - sampler_items.push_back(binding_data.immutable_samplers[n_sampler]->get_sampler() ); - } - - binding_vk.pImmutableSamplers = &sampler_items[sampler_array_start_index]; - } - else - { - binding_vk.pImmutableSamplers = nullptr; - } - } - } +end: + return result; +} - if (binding_info_items.size() > 0) +bool Anvil::DescriptorSetLayout::meets_max_per_set_descriptors_limit(const DescriptorSetLayoutCreateInfoContainer* in_ds_create_info_ptr, + const Anvil::BaseDevice* in_device_ptr) +{ + const Anvil::ExtensionKHRMaintenance3Entrypoints* entrypoints_ptr = nullptr; + VkDescriptorSetLayoutSupportKHR query; + bool result = false; + + if (!in_device_ptr->get_extension_info()->khr_maintenance3() ) { - create_info.pBindings = &binding_info_items[0]; + anvil_assert(in_device_ptr->get_extension_info()->khr_maintenance3()); + + goto end; } else { - create_info.pBindings = nullptr; + entrypoints_ptr = &in_device_ptr->get_extension_khr_maintenance3_entrypoints(); } - /* Bake the Vulkan object */ - result_vk = vkCreateDescriptorSetLayout(device_locked_ptr->get_device_vk(), - &create_info, - nullptr, /* pAllocator */ - &m_layout); + query.pNext = nullptr; + query.sType = static_cast(VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR); + query.supported = VK_FALSE; - anvil_assert_vk_call_succeeded(result_vk); - if (is_vk_call_successful(result_vk) ) - { - m_dirty = false; + entrypoints_ptr->vkGetDescriptorSetLayoutSupportKHR(in_device_ptr->get_device_vk(), + in_ds_create_info_ptr->struct_chain_ptr->get_root_struct(), + &query); - set_vk_handle(m_layout); - } - - result = is_vk_call_successful(result_vk); + result = (query.supported == VK_TRUE); end: return result; } /** Please see header for specification */ -std::shared_ptr Anvil::DescriptorSetLayout::create(std::weak_ptr in_device_ptr) +bool Anvil::DescriptorSetLayout::init() { - std::shared_ptr result_ptr; + bool result = false; + VkResult result_vk; - result_ptr.reset( - new Anvil::DescriptorSetLayout(in_device_ptr) - ); - - return result_ptr; -} + anvil_assert(m_layout == VK_NULL_HANDLE); -/** Please see header for specification */ -bool Anvil::DescriptorSetLayout::get_binding_properties(uint32_t in_n_binding, - uint32_t* out_opt_binding_index_ptr, - VkDescriptorType* out_opt_descriptor_type_ptr, - uint32_t* out_opt_descriptor_array_size_ptr, - VkShaderStageFlags* out_opt_stage_flags_ptr, - bool* out_opt_immutable_samplers_enabled_ptr) -{ - auto binding_iterator = m_bindings.begin(); - bool result = false; + /* Bake the Vulkan object */ + auto create_info_ptr = m_create_info_ptr->create_descriptor_set_layout_create_info(m_device_ptr); - if (m_bindings.size() <= in_n_binding) + if (create_info_ptr == nullptr) { + anvil_assert(create_info_ptr != nullptr); + goto end; } - for (uint32_t n_current_binding = 0; - n_current_binding < in_n_binding; - ++n_current_binding) - { - binding_iterator ++; - } + result_vk = Anvil::Vulkan::vkCreateDescriptorSetLayout(m_device_ptr->get_device_vk(), + create_info_ptr->struct_chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_layout); - if (binding_iterator != m_bindings.end() ) + anvil_assert_vk_call_succeeded(result_vk); + if (is_vk_call_successful(result_vk) ) { - if (out_opt_binding_index_ptr != nullptr) - { - *out_opt_binding_index_ptr = binding_iterator->first; - } - - if (out_opt_descriptor_array_size_ptr != nullptr) - { - *out_opt_descriptor_array_size_ptr = binding_iterator->second.descriptor_array_size; - } - - if (out_opt_descriptor_type_ptr != nullptr) - { - *out_opt_descriptor_type_ptr = binding_iterator->second.descriptor_type; - } - - if (out_opt_immutable_samplers_enabled_ptr != nullptr) - { - *out_opt_immutable_samplers_enabled_ptr = (binding_iterator->second.immutable_samplers.size() != 0); - } - - if (out_opt_stage_flags_ptr != nullptr) - { - *out_opt_stage_flags_ptr = binding_iterator->second.stage_flags; - } - - result = true; + set_vk_handle(m_layout); } + result = is_vk_call_successful(result_vk); + end: return result; -} \ No newline at end of file +} diff --git a/src/wrappers/descriptor_set_layout_manager.cpp b/src/wrappers/descriptor_set_layout_manager.cpp new file mode 100644 index 00000000..0b2e224f --- /dev/null +++ b/src/wrappers/descriptor_set_layout_manager.cpp @@ -0,0 +1,171 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/debug.h" +#include "misc/descriptor_set_create_info.h" +#include "misc/object_tracker.h" +#include "wrappers/descriptor_set_layout.h" +#include "wrappers/descriptor_set_layout_manager.h" +#include + + +/** Constructor. */ +Anvil::DescriptorSetLayoutManager::DescriptorSetLayoutManager(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe) + :MTSafetySupportProvider(in_mt_safe), + m_device_ptr (in_device_ptr) +{ + /* Register the object */ + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::ANVIL_DESCRIPTOR_SET_LAYOUT_MANAGER, + this); +} + +/** Destructor */ +Anvil::DescriptorSetLayoutManager::~DescriptorSetLayoutManager() +{ + anvil_assert(m_descriptor_set_layouts.size() == 0); + + /* Unregister the object */ + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::ANVIL_DESCRIPTOR_SET_LAYOUT_MANAGER, + this); +} + +/* Please see header for specification */ +Anvil::DescriptorSetLayoutManagerUniquePtr Anvil::DescriptorSetLayoutManager::create(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe) +{ + DescriptorSetLayoutManagerUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new Anvil::DescriptorSetLayoutManager(in_device_ptr, + in_mt_safe) + ); + + anvil_assert(result_ptr != nullptr); + return result_ptr; +} + +/* Please see header for specification */ +bool Anvil::DescriptorSetLayoutManager::get_layout(const DescriptorSetCreateInfo* in_ds_create_info_ptr, + Anvil::DescriptorSetLayoutUniquePtr* out_ds_layout_ptr_ptr) +{ + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + bool result = false; + Anvil::DescriptorSetLayout* result_ds_layout_ptr = nullptr; + + anvil_assert(in_ds_create_info_ptr != nullptr); + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + for (auto layout_iterator = m_descriptor_set_layouts.begin(); + layout_iterator != m_descriptor_set_layouts.end(); + ++layout_iterator) + { + auto& current_ds_layout_container_ptr = *layout_iterator; + auto& current_ds_layout_ptr = current_ds_layout_container_ptr->ds_layout_ptr; + auto current_ds_create_info_ptr = current_ds_layout_ptr->get_create_info(); + + if (*in_ds_create_info_ptr == *current_ds_create_info_ptr) + { + result = true; + result_ds_layout_ptr = current_ds_layout_ptr.get(); + + current_ds_layout_container_ptr->n_references.fetch_add(1); + + break; + } + } + + if (!result) + { + auto ds_create_info_clone_ptr = DescriptorSetCreateInfoUniquePtr (new DescriptorSetCreateInfo(*in_ds_create_info_ptr), + std::default_delete() ); + auto new_ds_layout_ptr = Anvil::DescriptorSetLayout::create (std::move(ds_create_info_clone_ptr), + m_device_ptr, + Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe() )); + auto new_ds_layout_container_ptr = std::unique_ptr(new DescriptorSetLayoutContainer() ); + + result = true; + result_ds_layout_ptr = new_ds_layout_ptr.get(); + new_ds_layout_container_ptr->ds_layout_ptr = std::move(new_ds_layout_ptr); + + m_descriptor_set_layouts.push_back( + std::move(new_ds_layout_container_ptr) + ); + } + + if (result) + { + anvil_assert(result_ds_layout_ptr != nullptr); + + *out_ds_layout_ptr_ptr = Anvil::DescriptorSetLayoutUniquePtr(result_ds_layout_ptr, + std::bind(&DescriptorSetLayoutManager::on_descriptor_set_layout_dereferenced, + this, + result_ds_layout_ptr) + ); + } + + return result; +} + +void Anvil::DescriptorSetLayoutManager::on_descriptor_set_layout_dereferenced(Anvil::DescriptorSetLayout* in_layout_ptr) +{ + bool has_found = false; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + for (auto layout_iterator = m_descriptor_set_layouts.begin(); + layout_iterator != m_descriptor_set_layouts.end() && !has_found; + ++layout_iterator) + { + auto& current_ds_layout_container_ptr = *layout_iterator; + auto& current_ds_layout_ptr = current_ds_layout_container_ptr->ds_layout_ptr; + + if (current_ds_layout_ptr.get() == in_layout_ptr) + { + has_found = true; + + if (current_ds_layout_container_ptr->n_references.fetch_sub(1) == 1) + { + m_descriptor_set_layouts.erase(layout_iterator); + } + + break; + } + } + + anvil_assert(has_found); +} \ No newline at end of file diff --git a/src/wrappers/descriptor_update_template.cpp b/src/wrappers/descriptor_update_template.cpp new file mode 100644 index 00000000..07c6b444 --- /dev/null +++ b/src/wrappers/descriptor_update_template.cpp @@ -0,0 +1,240 @@ +// +// Copyright (c) 2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/debug.h" +#include "misc/descriptor_set_create_info.h" +#include "misc/object_tracker.h" +#include "wrappers/descriptor_set.h" +#include "wrappers/descriptor_set_layout.h" +#include "wrappers/descriptor_update_template.h" +#include "wrappers/device.h" +#include "wrappers/physical_device.h" + +Anvil::DescriptorUpdateTemplate::DescriptorUpdateTemplate(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe) + :DebugMarkerSupportProvider(in_device_ptr, + Anvil::ObjectType::DESCRIPTOR_UPDATE_TEMPLATE), + MTSafetySupportProvider (in_mt_safe), + m_device_ptr (in_device_ptr), + m_vk_object (VK_NULL_HANDLE) +{ + /* Register this instance */ + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::DESCRIPTOR_UPDATE_TEMPLATE, + this); + +} + +Anvil::DescriptorUpdateTemplate::~DescriptorUpdateTemplate() +{ + if (m_vk_object != VK_NULL_HANDLE) + { + const auto& entrypoints = m_device_ptr->get_extension_khr_descriptor_update_template_entrypoints(); + + entrypoints.vkDestroyDescriptorUpdateTemplateKHR(m_device_ptr->get_device_vk(), + m_vk_object, + nullptr); /* pAllocator */ + + m_vk_object = VK_NULL_HANDLE; + } + + /* Unregister the object */ + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::DESCRIPTOR_UPDATE_TEMPLATE, + this); + +} + +Anvil::DescriptorUpdateTemplateUniquePtr Anvil::DescriptorUpdateTemplate::create_for_descriptor_set_updates(const Anvil::BaseDevice* in_device_ptr, + const Anvil::DescriptorSetLayout* in_descriptor_set_layout_ptr, + const Anvil::DescriptorUpdateTemplateEntry* in_update_entries_ptr, + const uint32_t& in_n_update_entries, + MTSafety in_mt_safety) +{ + DescriptorUpdateTemplateUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new DescriptorUpdateTemplate(in_device_ptr, + Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr) ) + ); + + if (result_ptr != nullptr) + { + if (!result_ptr->init(in_descriptor_set_layout_ptr, + in_update_entries_ptr, + in_n_update_entries) ) + { + result_ptr.reset(); + } + } + + return result_ptr; +} + +bool Anvil::DescriptorUpdateTemplate::init(const Anvil::DescriptorSetLayout* in_descriptor_set_layout_ptr, + const Anvil::DescriptorUpdateTemplateEntry* in_update_entries_ptr, + const uint32_t& in_n_update_entries) +{ + const Anvil::ExtensionKHRDescriptorUpdateTemplateEntrypoints* entrypoints_ptr = nullptr; + bool result = true; + + if (!m_device_ptr->get_extension_info()->khr_descriptor_update_template() ) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_descriptor_update_template() ); + + result = false; + goto end; + } + + if (in_descriptor_set_layout_ptr == nullptr) + { + anvil_assert(in_descriptor_set_layout_ptr != nullptr); + + result = false; + goto end; + } + + if (in_n_update_entries == 0) + { + anvil_assert(in_n_update_entries != 0); + + result = false; + goto end; + } + + if (in_update_entries_ptr == nullptr) + { + anvil_assert(in_update_entries_ptr != nullptr); + + result = false; + goto end; + } + + entrypoints_ptr = &m_device_ptr->get_extension_khr_descriptor_update_template_entrypoints(); + + if (entrypoints_ptr == nullptr) + { + anvil_assert(entrypoints_ptr != nullptr); + + result = false; + goto end; + } + + { + VkDescriptorUpdateTemplateCreateInfoKHR create_info; + std::vector update_entries(in_n_update_entries); + + for (uint32_t n_update_entry = 0; + n_update_entry < in_n_update_entries; + ++n_update_entry) + { + update_entries.at(n_update_entry) = in_update_entries_ptr[n_update_entry].get_vk_descriptor_update_template_entry_khr(); + } + + create_info.descriptorSetLayout = in_descriptor_set_layout_ptr->get_layout(); + create_info.descriptorUpdateEntryCount = in_n_update_entries; + create_info.flags = 0; + create_info.pDescriptorUpdateEntries = &update_entries.at(0); + create_info.pipelineBindPoint = VK_PIPELINE_BIND_POINT_MAX_ENUM; + create_info.pipelineLayout = VK_NULL_HANDLE; + create_info.pNext = nullptr; + create_info.set = 0; + create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR; + create_info.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR; + + in_descriptor_set_layout_ptr->lock(); + { + result = is_vk_call_successful(entrypoints_ptr->vkCreateDescriptorUpdateTemplateKHR(m_device_ptr->get_device_vk(), + &create_info, + nullptr, /* pAllocator */ + &m_vk_object) ); + } + in_descriptor_set_layout_ptr->unlock(); + } + + if (!result || + m_vk_object == VK_NULL_HANDLE) + { + anvil_assert(result); + anvil_assert(m_vk_object != VK_NULL_HANDLE); + + result = false; + goto end; + } + + set_vk_handle(m_vk_object); + + /* Finally, also cache descriptor set create info using the user-specified DS layout */ + m_ds_create_info_ptr.reset( + new Anvil::DescriptorSetCreateInfo(*in_descriptor_set_layout_ptr->get_create_info() ) + ); + + if (m_ds_create_info_ptr == nullptr) + { + anvil_assert(m_ds_create_info_ptr != nullptr); + + result = false; + goto end; + } + +end: + return result; +} + +void Anvil::DescriptorUpdateTemplate::update_descriptor_set(const Anvil::DescriptorSet* inout_ds_ptr, + const void* in_data_ptr) const +{ + const auto& entrypoints = m_device_ptr->get_extension_khr_descriptor_update_template_entrypoints(); + + if (in_data_ptr == nullptr) + { + anvil_assert(in_data_ptr != nullptr); + + goto end; + } + + #ifdef _DEBUG + { + if (!(*inout_ds_ptr->get_descriptor_set_layout()->get_create_info() == *m_ds_create_info_ptr) ) + { + anvil_assert_fail(); + + goto end; + } + } + #endif + + inout_ds_ptr->lock(); + lock(); + { + entrypoints.vkUpdateDescriptorSetWithTemplateKHR(m_device_ptr->get_device_vk(), + inout_ds_ptr->get_descriptor_set_vk(), + m_vk_object, + in_data_ptr); + } + unlock(); + inout_ds_ptr->unlock(); + +end: + ; +} + diff --git a/src/wrappers/device.cpp b/src/wrappers/device.cpp index ea0599da..d334e45c 100644 --- a/src/wrappers/device.cpp +++ b/src/wrappers/device.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -22,11 +22,15 @@ #include "misc/debug.h" #include "misc/object_tracker.h" +#include "misc/shader_module_cache.h" +#include "misc/struct_chainer.h" +#include "misc/swapchain_create_info.h" #include "wrappers/command_pool.h" #include "wrappers/compute_pipeline_manager.h" #include "wrappers/descriptor_set.h" #include "wrappers/descriptor_set_group.h" #include "wrappers/descriptor_set_layout.h" +#include "wrappers/descriptor_set_layout_manager.h" #include "wrappers/device.h" #include "wrappers/graphics_pipeline_manager.h" #include "wrappers/instance.h" @@ -37,98 +41,445 @@ #include "wrappers/rendering_surface.h" #include "wrappers/swapchain.h" +#ifdef max +#undef max +#endif /* Please see header for specification */ -Anvil::BaseDevice::BaseDevice(std::weak_ptr in_parent_instance_ptr) - :m_destroyed (false), - m_device (VK_NULL_HANDLE), - m_ext_debug_marker_enabled(false), - m_parent_instance_ptr (in_parent_instance_ptr) +Anvil::BaseDevice::BaseDevice(Anvil::DeviceCreateInfoUniquePtr in_create_info_ptr) + :MTSafetySupportProvider(in_create_info_ptr->should_be_mt_safe() ), + m_create_info_ptr (std::move(in_create_info_ptr) ), + m_device (VK_NULL_HANDLE) { - std::shared_ptr instance_locked_ptr(in_parent_instance_ptr); - - m_khr_surface_extension_entrypoints = instance_locked_ptr->get_extension_khr_surface_entrypoints(); + m_khr_surface_extension_entrypoints = m_create_info_ptr->get_physical_device_ptrs().at(0)->get_instance()->get_extension_khr_surface_entrypoints(); /* Register the instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_DEVICE, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::DEVICE, this); } Anvil::BaseDevice::~BaseDevice() { - anvil_assert(m_destroyed); - /* Unregister the instance. Tihs needs to happen before actual Vulkan object destruction, as there might * be observers who postpone their destruction until the device is about to go down. */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_DEVICE, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::DEVICE, this); - if (m_device != nullptr) + if (m_device != VK_NULL_HANDLE) + { + wait_idle(); + } + + m_command_pool_ptr_per_vk_queue_fam.clear(); + m_compute_pipeline_manager_ptr.reset (); + m_dummy_dsg_ptr.reset (); + m_graphics_pipeline_manager_ptr.reset (); + m_descriptor_set_layout_manager_ptr.reset(); + m_pipeline_cache_ptr.reset (); + m_pipeline_layout_manager_ptr.reset (); + m_owned_queues.clear (); + + if (m_device != VK_NULL_HANDLE) { - vkDeviceWaitIdle(m_device); - vkDestroyDevice (m_device, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyDevice(m_device, + nullptr); /* pAllocator */ + } + unlock(); m_device = nullptr; } } -/** Please see header for specification */ -void Anvil::BaseDevice::destroy() +/** TODO */ +void Anvil::BaseDevice::create_device(const std::vector& in_extensions, + const std::vector& in_layers, + DeviceQueueFamilyInfo* out_queue_families_ptr) { - anvil_assert(!m_destroyed); + std::vector device_queue_priorities; + const auto& physical_device_ptrs (m_create_info_ptr->get_physical_device_ptrs() ); + std::vector physical_devices (physical_device_ptrs.size () ); + VkResult result (VK_ERROR_INITIALIZATION_FAILED); + Anvil::StructChainer struct_chainer; + const Anvil::PhysicalDevice* zeroth_physical_device_ptr (physical_device_ptrs.at (0) ); + const auto& zeroth_physical_device_queue_fams(zeroth_physical_device_ptr->get_queue_families() ); + + ANVIL_REDUNDANT_VARIABLE(result); + + for (uint32_t n_physical_device = 0; + n_physical_device < static_cast(physical_devices.size() ); + ++n_physical_device) + { + physical_devices.at(n_physical_device) = physical_device_ptrs.at(n_physical_device)->get_physical_device(); + } + + { + Anvil::StructID root_create_info_struct_id; + + /* Root structure */ + { + VkDeviceCreateInfo create_info; + + create_info.enabledExtensionCount = static_cast(in_extensions.size() ); + create_info.enabledLayerCount = static_cast(in_layers.size () ); + create_info.flags = 0; + create_info.pNext = nullptr; + create_info.pEnabledFeatures = nullptr; /* chained */ + create_info.ppEnabledExtensionNames = (in_extensions.size() > 0) ? &in_extensions[0] : nullptr; + create_info.ppEnabledLayerNames = (in_layers.size () > 0) ? &in_layers [0] : nullptr; + create_info.pQueueCreateInfos = nullptr; /* chained later */ + create_info.queueCreateInfoCount = 0; /* filled later */ + create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; + + root_create_info_struct_id = struct_chainer.append_struct(create_info); + } + + /* Queue create info items */ + { + std::vector device_queue_create_info_items; + const auto n_queue_fams = static_cast(zeroth_physical_device_queue_fams.size() ); + + /* For any queue that uses non-medium global priority, we're going to need a dedicated device queue create info struct. This is + * to ensure the global priority does not propagate to other queues. + * + * Creation requests for all other queues can be merged into one struct, one per each queue family index. + */ + static VkDeviceQueueGlobalPriorityCreateInfoEXT low_priority_create_info = + { + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + nullptr, /* pNext */ + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT + }; + static VkDeviceQueueGlobalPriorityCreateInfoEXT high_priority_create_info = + { + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + nullptr, /* pNext */ + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT + }; + static VkDeviceQueueGlobalPriorityCreateInfoEXT realtime_priority_create_info = + { + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + nullptr, /* pNext */ + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT + }; + + /* Prepare a vec of queue priorities we're going to stick into DeviceQueueCreateInfo structure */ + { + uint32_t n_queues_used_so_far = 0; + uint32_t n_total_queues = 0; + + for (uint32_t n_queue_fam = 0; + n_queue_fam < n_queue_fams; + ++n_queue_fam) + { + const auto& n_queues = zeroth_physical_device_queue_fams.at(n_queue_fam).n_queues; + + n_total_queues += n_queues; + } - m_destroyed = true; + device_queue_priorities.resize(n_total_queues, + 0.0f); + + for (uint32_t n_queue_fam = 0; + n_queue_fam < n_queue_fams; + ++n_queue_fam) + { + const auto& n_queues = zeroth_physical_device_queue_fams.at(n_queue_fam).n_queues; + + for (uint32_t n_queue = 0; + n_queue < n_queues; + ++n_queue, ++n_queues_used_so_far) + { + device_queue_priorities.at(n_queues_used_so_far) = m_create_info_ptr->get_queue_priority(n_queue_fam, + n_queue); + } + } + } + + /* Prepare device queue create info structs */ + { + uint32_t n_queues_defined_so_far = 0; + + for (uint32_t n_queue_fam = 0; + n_queue_fam < n_queue_fams; + ++n_queue_fam) + { + const auto& current_queue_fam(zeroth_physical_device_queue_fams.at(n_queue_fam) ); + + if (current_queue_fam.n_queues > 0) + { + int32_t n_queue_consumed_last = -1; + + for (uint32_t n_queue = 0; + n_queue < current_queue_fam.n_queues; + ++n_queue) + { + const auto& current_queue_global_priority = m_create_info_ptr->get_queue_global_priority (n_queue_fam, + n_queue); + const bool current_queue_must_support_protected_memory_ops = m_create_info_ptr->get_queue_must_support_protected_memory_operations(n_queue_fam, + n_queue); + + if (current_queue_must_support_protected_memory_ops || + current_queue_global_priority != Anvil::QueueGlobalPriority::MEDIUM_EXT || + n_queue == static_cast(current_queue_fam.n_queues - 1) ) + { + VkDeviceQueueCreateInfo queue_create_info; + + queue_create_info.flags = 0; + queue_create_info.pNext = nullptr; + queue_create_info.pQueuePriorities = &device_queue_priorities.at(n_queues_defined_so_far); + queue_create_info.queueCount = (n_queue - n_queue_consumed_last); + queue_create_info.queueFamilyIndex = n_queue_fam; + queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + + if (current_queue_must_support_protected_memory_ops) + { + queue_create_info.flags |= VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT; + } + + if (current_queue_global_priority == Anvil::QueueGlobalPriority::HIGH_EXT || + current_queue_global_priority == Anvil::QueueGlobalPriority::LOW_EXT || + current_queue_global_priority == Anvil::QueueGlobalPriority::REALTIME_EXT) + { + queue_create_info.pNext = (current_queue_global_priority == Anvil::QueueGlobalPriority::HIGH_EXT) ? &high_priority_create_info + : (current_queue_global_priority == Anvil::QueueGlobalPriority::LOW_EXT) ? &low_priority_create_info + : &realtime_priority_create_info; + } + else + { + anvil_assert(current_queue_global_priority == Anvil::QueueGlobalPriority::MEDIUM_EXT); + } + + device_queue_create_info_items.push_back(queue_create_info); + + n_queue_consumed_last = static_cast(n_queue); + n_queues_defined_so_far += (n_queue - n_queue_consumed_last); + } + } + + } + } + } + + struct_chainer.get_root_struct()->queueCreateInfoCount = static_cast(device_queue_create_info_items.size() ); + + struct_chainer.store_helper_structure_vector(device_queue_create_info_items, + root_create_info_struct_id, + offsetof(VkDeviceCreateInfo, pQueueCreateInfos) ); + } + } - for (uint32_t n_command_pool = 0; - n_command_pool < sizeof(m_command_pool_ptrs) / sizeof(m_command_pool_ptrs[0]); - ++n_command_pool) { - m_command_pool_ptrs[n_command_pool] = nullptr; + const auto& mem_overallocation_behavior = m_create_info_ptr->get_memory_overallocation_behavior(); + + if (mem_overallocation_behavior != Anvil::MemoryOverallocationBehavior::DEFAULT) + { + VkDeviceMemoryOverallocationCreateInfoAMD overallocation_info; + + overallocation_info.overallocationBehavior = static_cast(mem_overallocation_behavior); + overallocation_info.pNext = nullptr; + overallocation_info.sType = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD; + + anvil_assert(is_extension_enabled(VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME) ); + + struct_chainer.append_struct(overallocation_info); + } } - m_compute_pipeline_manager_ptr = nullptr; - m_dummy_dsg_ptr = nullptr; - m_graphics_pipeline_manager_ptr = nullptr; - m_pipeline_cache_ptr = nullptr; - m_pipeline_layout_manager_ptr = nullptr; + if (physical_device_ptrs.size() > 1) + { + VkDeviceGroupDeviceCreateInfoKHR device_group_device_create_info; + + device_group_device_create_info.physicalDeviceCount = static_cast(physical_device_ptrs.size() ); + device_group_device_create_info.pPhysicalDevices = &physical_devices[0]; + device_group_device_create_info.pNext = nullptr; + device_group_device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR; + + struct_chainer.append_struct(device_group_device_create_info); + } - /* Proceed with device-specific instances */ - m_queue_fams.clear (); - m_sparse_binding_queues.clear(); + /* Enable all features available. */ + add_physical_device_features_to_chainer(&struct_chainer); - for (Anvil::QueueFamilyType queue_family_type = Anvil::QUEUE_FAMILY_TYPE_FIRST; - queue_family_type < Anvil::QUEUE_FAMILY_TYPE_COUNT + 1; - queue_family_type = static_cast(queue_family_type + 1)) + /* Issue the request */ { - std::vector >& queues = (queue_family_type == Anvil::QUEUE_FAMILY_TYPE_COMPUTE) ? m_compute_queues - : (queue_family_type == Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL) ? m_universal_queues - : m_transfer_queues; + auto struct_chain_ptr = struct_chainer.create_chain(m_create_info_ptr->pNext); + + result = Anvil::Vulkan::vkCreateDevice(physical_device_ptrs.at(0)->get_physical_device(), + struct_chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_device); - queues.clear(); + anvil_assert_vk_call_succeeded(result); } + + /* Now that all queues are available, assign them to queue families Anvil recognizes. */ + get_queue_family_indices_for_physical_device(physical_device_ptrs.at(0), + out_queue_families_ptr); } /** Please see header for specification */ -std::shared_ptr Anvil::BaseDevice::get_dummy_descriptor_set() const +const Anvil::DescriptorSet* Anvil::BaseDevice::get_dummy_descriptor_set() const { + std::unique_lock lock(m_dummy_dsg_mutex); + + if (m_dummy_dsg_ptr == nullptr) + { + init_dummy_dsg(); + } + return m_dummy_dsg_ptr->get_descriptor_set(0); } /** Please see header for specification */ -std::shared_ptr Anvil::BaseDevice::get_dummy_descriptor_set_layout() const +Anvil::DescriptorSetLayout* Anvil::BaseDevice::get_dummy_descriptor_set_layout() const { + std::unique_lock lock(m_dummy_dsg_mutex); + + if (m_dummy_dsg_ptr == nullptr) + { + init_dummy_dsg(); + } + return m_dummy_dsg_ptr->get_descriptor_set_layout(0); } +const Anvil::ExtensionEXTSampleLocationsEntrypoints& Anvil::BaseDevice::get_extension_ext_sample_locations_entrypoints() const +{ + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->ext_sample_locations() ); + + return m_ext_sample_locations_extension_entrypoints; +} + +const Anvil::ExtensionEXTTransformFeedbackEntrypoints& Anvil::BaseDevice::get_extension_ext_transform_feedback_entrypoints() const +{ + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->ext_transform_feedback() ); + + return m_ext_transform_feedback_extension_entrypoints; +} + +bool Anvil::BaseDevice::get_memory_types_supported_for_external_handle(const Anvil::ExternalMemoryHandleTypeFlagBits& in_external_handle_type, + ExternalHandleType in_handle, + uint32_t* out_supported_memory_type_bits) const +{ + bool result = false; + + if (in_external_handle_type != Anvil::ExternalMemoryHandleTypeFlagBits::HOST_ALLOCATION_BIT_EXT && + in_external_handle_type != Anvil::ExternalMemoryHandleTypeFlagBits::HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT) + { + /* Sanity checks */ + #if defined(_WIN32) + { + if (!m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_memory_win32() ) + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_memory_win32() ); + + goto end; + } + + if (in_external_handle_type != Anvil::ExternalMemoryHandleTypeFlagBits::OPAQUE_WIN32_BIT && + in_external_handle_type != Anvil::ExternalMemoryHandleTypeFlagBits::OPAQUE_WIN32_KMT_BIT) + { + anvil_assert(in_external_handle_type == Anvil::ExternalMemoryHandleTypeFlagBits::OPAQUE_WIN32_BIT || + in_external_handle_type == Anvil::ExternalMemoryHandleTypeFlagBits::OPAQUE_WIN32_KMT_BIT); + + goto end; + } + } + #else + { + if (!m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_memory_fd() ) + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_memory_fd() ); + + goto end; + } + + if (in_external_handle_type != Anvil::ExternalMemoryHandleTypeFlagBits::OPAQUE_FD_BIT) + { + anvil_assert(in_external_handle_type == Anvil::ExternalMemoryHandleTypeFlagBits::OPAQUE_FD_BIT); + + goto end; + } + } + #endif + + /* Go ahead with the query */ + #if defined(_WIN32) + VkMemoryWin32HandlePropertiesKHR result_props; + + result_props.pNext = nullptr; + result_props.sType = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR; + #else + VkMemoryFdPropertiesKHR result_props; + + result_props.pNext = nullptr; + result_props.sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR; + #endif + + #if defined(_WIN32) + if (m_khr_external_memory_win32_extension_entrypoints.vkGetMemoryWin32HandlePropertiesKHR(m_device, + static_cast(in_external_handle_type), + in_handle, + &result_props) != VK_SUCCESS) + #else + if (m_khr_external_memory_fd_extension_entrypoints.vkGetMemoryFdPropertiesKHR(m_device, + static_cast(in_external_handle_type), + in_handle, + &result_props) != VK_SUCCESS) + #endif + { + anvil_assert_fail(); + + goto end; + } + + *out_supported_memory_type_bits = result_props.memoryTypeBits; + } + else + { + /* Sanity checks */ + if (!m_extension_enabled_info_ptr->get_device_extension_info()->ext_external_memory_host() ) + { + anvil_assert(m_extension_enabled_info_ptr->get_device_extension_info()->ext_external_memory_host() ); + + goto end; + } + + /* Go ahead with the query */ + VkMemoryHostPointerPropertiesEXT result_props; + + result_props.pNext = nullptr; + result_props.sType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT; + + if (m_ext_external_memory_host_extension_entrypoints.vkGetMemoryHostPointerPropertiesEXT(m_device, + static_cast(in_external_handle_type), + reinterpret_cast(in_handle), + &result_props) != VK_SUCCESS) + { + anvil_assert_fail(); + + goto end; + } + + *out_supported_memory_type_bits = result_props.memoryTypeBits; + } + + /* All done */ + result = true; +end: + return result; +} + /** Please see header for specification */ uint32_t Anvil::BaseDevice::get_n_queues(uint32_t in_n_queue_family) const { - auto map_iterator = m_queue_fams.find(in_n_queue_family); + auto map_iterator = m_queue_ptrs_per_vk_queue_fam.find(in_n_queue_family); uint32_t result = 0; - if (map_iterator != m_queue_fams.end()) + if (map_iterator != m_queue_ptrs_per_vk_queue_fam.end()) { result = static_cast(map_iterator->second.size() ); } @@ -136,18 +487,118 @@ uint32_t Anvil::BaseDevice::get_n_queues(uint32_t in_n_queue_family) const return result; } -/** Please see header for specification */ -std::shared_ptr Anvil::BaseDevice::get_queue(uint32_t in_n_queue_family, - uint32_t in_n_queue) const +/* Please see header for specification */ +void Anvil::BaseDevice::add_physical_device_features_to_chainer(Anvil::StructChainer* in_struct_chainer_ptr) const { - auto map_iterator = m_queue_fams.find(in_n_queue_family); - std::shared_ptr result_ptr; + const auto& features = get_physical_device_features(); + const bool should_add_features_struct = m_create_info_ptr->get_physical_device_ptrs().at(0)->get_instance()->get_enabled_extensions_info()->khr_get_physical_device_properties2(); - if (map_iterator != m_queue_fams.end()) + anvil_assert(in_struct_chainer_ptr != nullptr); + + if (should_add_features_struct) { - if (map_iterator->second.size() > in_n_queue) + VkPhysicalDeviceFeatures2KHR features_khr; + + features_khr.features = features.core_vk1_0_features_ptr->get_vk_physical_device_features(); + features_khr.pNext = nullptr; + features_khr.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR; + + in_struct_chainer_ptr->append_struct(features_khr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_depth_clip_enable() ) + { + in_struct_chainer_ptr->append_struct(features.ext_depth_clip_enable_features_ptr->get_vk_physical_device_depth_clip_enable_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_descriptor_indexing() ) + { + in_struct_chainer_ptr->append_struct(features.ext_descriptor_indexing_features_ptr->get_vk_physical_device_descriptor_indexing_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_inline_uniform_block() ) + { + in_struct_chainer_ptr->append_struct(features.ext_inline_uniform_block_features_ptr->get_vk_physical_device_inline_uniform_block_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_memory_priority() ) + { + in_struct_chainer_ptr->append_struct(features.ext_memory_priority_features_ptr->get_vk_physical_device_memory_priority_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_scalar_block_layout() ) + { + in_struct_chainer_ptr->append_struct(features.ext_scalar_block_layout_features_ptr->get_vk_physical_device_scalar_block_layout_features_ext() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_transform_feedback() ) + { + in_struct_chainer_ptr->append_struct(features.ext_transform_feedback_features_ptr->get_vk_physical_device_transform_feedback_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_16bit_storage() ) + { + in_struct_chainer_ptr->append_struct(features.khr_16bit_storage_features_ptr->get_vk_physical_device_16_bit_storage_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_8bit_storage() ) + { + in_struct_chainer_ptr->append_struct(features.khr_8bit_storage_features_ptr->get_vk_physical_device_8_bit_storage_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_multiview() ) + { + in_struct_chainer_ptr->append_struct(features.khr_multiview_features_ptr->get_vk_physical_device_multiview_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_sampler_ycbcr_conversion() ) + { + in_struct_chainer_ptr->append_struct(features.khr_sampler_ycbcr_conversion_features_ptr->get_vk_physical_device_sampler_ycbcr_conversion_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_shader_atomic_int64() ) + { + in_struct_chainer_ptr->append_struct(features.khr_shader_atomic_int64_features_ptr->get_vk_physical_device_shader_atomic_int64_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_shader_float16_int8() ) + { + in_struct_chainer_ptr->append_struct(features.khr_float16_int8_features_ptr->get_vk_physical_device_float16_int8_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_variable_pointers() ) + { + in_struct_chainer_ptr->append_struct(features.khr_variable_pointer_features_ptr->get_vk_physical_device_variable_pointer_features() ); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_vulkan_memory_model() ) + { + in_struct_chainer_ptr->append_struct(features.khr_vulkan_memory_model_features_ptr->get_vk_physical_device_vulkan_memory_model_features() ); + } +} + +/* Please see header for specification */ +const Anvil::Instance* Anvil::BaseDevice::get_parent_instance() const +{ + return m_create_info_ptr->get_physical_device_ptrs().at(0)->get_instance(); +} + +/* Please see header for specification */ + +Anvil::Queue* Anvil::BaseDevice::get_queue(const Anvil::QueueFamilyType& in_queue_family_type, + uint32_t in_n_queue) const +{ + Anvil::Queue* result_ptr = nullptr; + + switch (in_queue_family_type) + { + case Anvil::QueueFamilyType::COMPUTE: result_ptr = get_compute_queue (in_n_queue); break; + case Anvil::QueueFamilyType::TRANSFER: result_ptr = get_transfer_queue (in_n_queue); break; + case Anvil::QueueFamilyType::UNIVERSAL: result_ptr = get_universal_queue(in_n_queue); break; + + default: { - result_ptr = map_iterator->second.at(in_n_queue); + anvil_assert_fail(); } } @@ -155,105 +606,157 @@ std::shared_ptr Anvil::BaseDevice::get_queue(uint32_t in_n_queue_f } /* Please see header for specification */ -void Anvil::BaseDevice::get_queue_family_indices_for_physical_device(std::weak_ptr in_physical_device_ptr, - DeviceQueueFamilyInfo* out_device_queue_family_info_ptr) const +void Anvil::BaseDevice::get_queue_family_indices_for_physical_device(const Anvil::PhysicalDevice* in_physical_device_ptr, + DeviceQueueFamilyInfo* out_device_queue_family_info_ptr) const { - std::shared_ptr physical_device_locked_ptr(in_physical_device_ptr); - - /* Retrieve a compute-only queue, and then look for another queue which can handle graphics tasks. */ - const size_t n_queue_families = physical_device_locked_ptr->get_queue_families().size(); - uint32_t result_compute_queue_family_index = UINT32_MAX; - uint32_t result_dma_queue_family_index = UINT32_MAX; - uint32_t result_n_compute_queues = 0; - uint32_t result_n_dma_queues = 0; - uint32_t result_n_universal_queues = 0; - uint32_t result_universal_queue_family_index = UINT32_MAX; + const auto n_queue_families = static_cast(in_physical_device_ptr->get_queue_families().size() ); + std::vector result_compute_queue_families; + std::vector result_transfer_queue_families; + std::vector result_universal_queue_families; for (uint32_t n_iteration = 0; n_iteration < 3; ++n_iteration) { - for (size_t n_queue_family_index = 0; - n_queue_family_index < n_queue_families; - ++n_queue_family_index) + for (uint32_t n_queue_family_index = 0; + n_queue_family_index < n_queue_families; + ++n_queue_family_index) { - const Anvil::QueueFamilyInfo& current_queue_family = physical_device_locked_ptr->get_queue_families()[n_queue_family_index]; + const Anvil::QueueFamilyInfo& current_queue_family = in_physical_device_ptr->get_queue_families()[n_queue_family_index]; if (n_iteration == 0) { - if ( (current_queue_family.flags & VK_QUEUE_COMPUTE_BIT) && - !(current_queue_family.flags & VK_QUEUE_GRAPHICS_BIT) ) + if ( ((current_queue_family.flags & Anvil::QueueFlagBits::COMPUTE_BIT) != 0) && + ((current_queue_family.flags & Anvil::QueueFlagBits::GRAPHICS_BIT) == 0) ) { - result_compute_queue_family_index = (uint32_t) n_queue_family_index; - result_n_compute_queues = current_queue_family.n_queues; - - break; + result_compute_queue_families.push_back( + DeviceQueueFamilyMemberInfo(n_queue_family_index, + current_queue_family.n_queues) + ); } } else if (n_iteration == 1) { - if (current_queue_family.flags & VK_QUEUE_GRAPHICS_BIT && - n_queue_family_index != result_compute_queue_family_index) + if ((current_queue_family.flags & Anvil::QueueFlagBits::GRAPHICS_BIT) != 0) { - result_universal_queue_family_index = (uint32_t) n_queue_family_index; - result_n_universal_queues = current_queue_family.n_queues; - - break; + if (std::find(result_compute_queue_families.begin(), + result_compute_queue_families.end (), + n_queue_family_index) == result_compute_queue_families.end() ) + { + result_universal_queue_families.push_back( + DeviceQueueFamilyMemberInfo(n_queue_family_index, + current_queue_family.n_queues) + ); + } } } else if (n_iteration == 2) { - if (current_queue_family.flags & VK_QUEUE_TRANSFER_BIT && - n_queue_family_index != result_compute_queue_family_index && - n_queue_family_index != result_universal_queue_family_index) + if ((current_queue_family.flags & Anvil::QueueFlagBits::TRANSFER_BIT) != 0) { - result_dma_queue_family_index = (uint32_t) n_queue_family_index; - result_n_dma_queues = current_queue_family.n_queues; - - break; + if (std::find(result_compute_queue_families.begin(), + result_compute_queue_families.end (), + n_queue_family_index) == result_compute_queue_families.end() && + std::find(result_universal_queue_families.begin(), + result_universal_queue_families.end (), + n_queue_family_index) == result_universal_queue_families.end() ) + { + result_transfer_queue_families.push_back( + DeviceQueueFamilyMemberInfo(n_queue_family_index, + current_queue_family.n_queues) + ); + } } } } } /* NOTE: Vulkan API only guarantees universal queue family's availability */ - anvil_assert(result_universal_queue_family_index != UINT32_MAX); + anvil_assert(result_universal_queue_families.size() > 0); + + out_device_queue_family_info_ptr->queue_families[Anvil::QueueFamilyType::COMPUTE] = result_compute_queue_families; + out_device_queue_family_info_ptr->queue_families[Anvil::QueueFamilyType::TRANSFER] = result_transfer_queue_families; + out_device_queue_family_info_ptr->queue_families[Anvil::QueueFamilyType::UNIVERSAL] = result_universal_queue_families; + + for (Anvil::QueueFamilyType current_queue_family_type = Anvil::QueueFamilyType::FIRST; + current_queue_family_type != Anvil::QueueFamilyType::COUNT; + current_queue_family_type = static_cast(static_cast(current_queue_family_type) + 1) ) + { + uint32_t n_total_queues = 0; - out_device_queue_family_info_ptr->family_index[Anvil::QUEUE_FAMILY_TYPE_COMPUTE] = result_compute_queue_family_index; - out_device_queue_family_info_ptr->family_index[Anvil::QUEUE_FAMILY_TYPE_TRANSFER] = result_dma_queue_family_index; - out_device_queue_family_info_ptr->family_index[Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL] = result_universal_queue_family_index; + for (const auto& current_queue_fam : out_device_queue_family_info_ptr->queue_families[current_queue_family_type]) + { + n_total_queues += current_queue_fam.n_queues; + } + + out_device_queue_family_info_ptr->n_total_queues_per_family[static_cast(current_queue_family_type)] = n_total_queues; + } +} + +/** Please see header for specification */ +bool Anvil::BaseDevice::get_queue_family_indices_for_queue_family_type(const Anvil::QueueFamilyType& in_queue_family_type, + uint32_t* out_opt_n_queue_family_indices_ptr, + const uint32_t** out_opt_queue_family_indices_ptr_ptr) const +{ + bool result = false; + uint32_t result_n_queue_family_indices = 0; + const uint32_t* result_queue_family_indices_ptr = nullptr; - for (uint32_t n_queue_family_type = 0; - n_queue_family_type < static_cast(Anvil::QUEUE_FAMILY_TYPE_COUNT); - ++n_queue_family_type) + auto map_iterator = m_queue_family_type_to_queue_family_indices.find(in_queue_family_type); + + if (map_iterator != m_queue_family_type_to_queue_family_indices.end() ) + { + result = true; + result_n_queue_family_indices = static_cast(map_iterator->second.size() ); + result_queue_family_indices_ptr = (map_iterator->second.size() > 0) ? &map_iterator->second.at(0) + : nullptr; + } + else { - out_device_queue_family_info_ptr->family_type[n_queue_family_type] = Anvil::QUEUE_FAMILY_TYPE_UNDEFINED; + result = true; } - if (result_compute_queue_family_index != UINT32_MAX) + if (out_opt_n_queue_family_indices_ptr != nullptr) { - out_device_queue_family_info_ptr->family_type[result_compute_queue_family_index] = Anvil::QUEUE_FAMILY_TYPE_COMPUTE; + *out_opt_n_queue_family_indices_ptr = result_n_queue_family_indices; } - if (result_dma_queue_family_index != UINT32_MAX) + if (out_opt_queue_family_indices_ptr_ptr != nullptr) { - out_device_queue_family_info_ptr->family_type[result_dma_queue_family_index] = Anvil::QUEUE_FAMILY_TYPE_TRANSFER; + *out_opt_queue_family_indices_ptr_ptr = result_queue_family_indices_ptr; } - if (result_universal_queue_family_index != UINT32_MAX) + return result; +} + +/** Please see header for specification */ +Anvil::QueueFamilyType Anvil::BaseDevice::get_queue_family_type(uint32_t in_queue_family_index) const +{ + return m_queue_family_index_to_types.at(in_queue_family_index).at(0); +} + +/** Please see header for specification */ +Anvil::Queue* Anvil::BaseDevice::get_queue_for_queue_family_index(uint32_t in_n_queue_family, + uint32_t in_n_queue) const +{ + auto map_iterator = m_queue_ptrs_per_vk_queue_fam.find(in_n_queue_family); + Anvil::Queue* result_ptr = nullptr; + + if (map_iterator != m_queue_ptrs_per_vk_queue_fam.end()) { - out_device_queue_family_info_ptr->family_type[result_universal_queue_family_index] = Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL; + if (map_iterator->second.size() > in_n_queue) + { + result_ptr = map_iterator->second.at(in_n_queue); + } } - out_device_queue_family_info_ptr->n_queues[Anvil::QUEUE_FAMILY_TYPE_COMPUTE] = result_n_compute_queues; - out_device_queue_family_info_ptr->n_queues[Anvil::QUEUE_FAMILY_TYPE_TRANSFER] = result_n_dma_queues; - out_device_queue_family_info_ptr->n_queues[Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL] = result_n_universal_queues; + return result_ptr; } /* Please see header for specification */ -bool Anvil::BaseDevice::get_sample_locations(VkSampleCountFlagBits in_sample_count, +bool Anvil::BaseDevice::get_sample_locations(Anvil::SampleCountFlagBits in_sample_count, std::vector* out_result_ptr) const { bool result = true; @@ -262,12 +765,24 @@ bool Anvil::BaseDevice::get_sample_locations(VkSampleCountFlagBits in_sam switch (get_type() ) { - case Anvil::DEVICE_TYPE_SINGLE_GPU: + case Anvil::DeviceType::MULTI_GPU: + { + /* Sample locations must not differ for all devices within a given device group, so it technically + * doesn't matter which physical device we choose to check for standard sample locations support. + */ + const Anvil::MGPUDevice* mgpu_device_ptr = reinterpret_cast(this); + anvil_assert(mgpu_device_ptr != nullptr); + + standard_sample_locations_support = (mgpu_device_ptr->get_physical_device_properties().core_vk1_0_properties_ptr->limits.standard_sample_locations); + break; + } + + case Anvil::DeviceType::SINGLE_GPU: { const Anvil::SGPUDevice* sgpu_device_ptr = reinterpret_cast(this); anvil_assert(sgpu_device_ptr != nullptr); - standard_sample_locations_support = (sgpu_device_ptr->get_physical_device_properties().limits.standardSampleLocations == VK_TRUE); + standard_sample_locations_support = (sgpu_device_ptr->get_physical_device_properties().core_vk1_0_properties_ptr->limits.standard_sample_locations); break; } @@ -286,7 +801,7 @@ bool Anvil::BaseDevice::get_sample_locations(VkSampleCountFlagBits in_sam switch (in_sample_count) { - case VK_SAMPLE_COUNT_1_BIT: + case Anvil::SampleCountFlagBits::_1_BIT: { out_result_ptr->clear(); @@ -295,7 +810,7 @@ bool Anvil::BaseDevice::get_sample_locations(VkSampleCountFlagBits in_sam break; } - case VK_SAMPLE_COUNT_2_BIT: + case Anvil::SampleCountFlagBits::_2_BIT: { out_result_ptr->clear(); @@ -306,7 +821,7 @@ bool Anvil::BaseDevice::get_sample_locations(VkSampleCountFlagBits in_sam } - case VK_SAMPLE_COUNT_4_BIT: + case Anvil::SampleCountFlagBits::_4_BIT: { out_result_ptr->clear(); @@ -318,7 +833,7 @@ bool Anvil::BaseDevice::get_sample_locations(VkSampleCountFlagBits in_sam break; } - case VK_SAMPLE_COUNT_8_BIT: + case Anvil::SampleCountFlagBits::_8_BIT: { out_result_ptr->clear(); @@ -334,7 +849,7 @@ bool Anvil::BaseDevice::get_sample_locations(VkSampleCountFlagBits in_sam break; } - case VK_SAMPLE_COUNT_16_BIT: + case Anvil::SampleCountFlagBits::_16_BIT: { out_result_ptr->clear(); @@ -371,11 +886,11 @@ bool Anvil::BaseDevice::get_sample_locations(VkSampleCountFlagBits in_sam } /* Please see header for specification */ -std::shared_ptr Anvil::BaseDevice::get_sparse_binding_queue(uint32_t in_n_queue, - VkQueueFlags in_opt_required_queue_flags) const +Anvil::Queue* Anvil::BaseDevice::get_sparse_binding_queue(uint32_t in_n_queue, + Anvil::QueueFlags in_opt_required_queue_flags) const { - uint32_t n_queues_found = 0; - std::shared_ptr result_ptr; + uint32_t n_queues_found = 0; + Anvil::Queue* result_ptr = nullptr; for (auto queue_ptr : m_sparse_binding_queues) { @@ -400,141 +915,186 @@ std::shared_ptr Anvil::BaseDevice::get_sparse_binding_queue(uint32 } /* Initializes a new Device instance */ -void Anvil::BaseDevice::init(const DeviceExtensionConfiguration& in_extensions, - const std::vector& in_layers, - bool in_transient_command_buffer_allocs_only, - bool in_support_resettable_command_buffer_allocs) +bool Anvil::BaseDevice::init() { + const auto parent_instance_ptr(m_create_info_ptr->get_physical_device_ptrs().at(0)->get_instance() ); - std::vector extensions_final; - VkPhysicalDeviceFeatures features_to_enable; - std::shared_ptr instance_locked_ptr (m_parent_instance_ptr); - const bool is_validation_enabled(instance_locked_ptr->is_validation_enabled() ); - std::vector layers_final; + std::map extensions_final_enabled_status; + const bool is_validation_enabled(parent_instance_ptr->is_validation_enabled() ); + std::vector layers_final; + const auto mt_safety (Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe()) ); + bool result (false); /* If validation is enabled, retrieve names of all suported validation layers and * append them to the list of layers the user has alreaedy specified. **/ - for (auto current_layer : in_layers) + for (auto current_layer : m_create_info_ptr->get_layers_to_enable() ) { layers_final.push_back(current_layer.c_str() ); } if (is_validation_enabled) { - if (is_layer_supported("VK_LAYER_LUNARG_standard_validation") ) - { - layers_final.push_back("VK_LAYER_LUNARG_standard_validation"); - } - else + // anvil_assert(is_layer_supported("VK_LAYER_KHRONOS_validation") ); + if (is_layer_supported("VK_LAYER_KHRONOS_validation") ) { - anvil_assert(is_layer_supported("VK_LAYER_LUNARG_core_validation") ); - - layers_final.push_back("VK_LAYER_LUNARG_core_validation"); + layers_final.push_back("VK_LAYER_KHRONOS_validation"); } } /* Go through the extension struct, verify availability of the requested extensions, - * and cache the ones that have been requested and which are available in a linear vector. */ - typedef struct ExtensionItem + * and cache the ones that have been requested and which are available in a vector. */ { - const char* name; - ExtensionAvailability requested_availability; - bool* ext_enabled_flag_ptr; + bool is_amd_negative_viewport_height_defined = false; + bool is_khr_maintenance1_defined = false; - ExtensionItem(const char* in_name, - ExtensionAvailability in_availability, - bool* in_ext_enabled_flag_ptr) + for (const auto& current_extension : m_create_info_ptr->get_extension_configuration().extension_status) { - ext_enabled_flag_ptr = in_ext_enabled_flag_ptr; - name = in_name; - requested_availability = in_availability; - } - } ExtensionItem; - - std::vector specified_extensions = - { - ExtensionItem(VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME, in_extensions.amd_draw_indirect_count, &m_amd_draw_indirect_count_enabled), - ExtensionItem(VK_AMD_GCN_SHADER_EXTENSION_NAME, in_extensions.amd_gcn_shader, &m_amd_gcn_shader_enabled), - ExtensionItem(VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME, in_extensions.amd_gpu_shader_half_float, &m_amd_gpu_shader_half_float_enabled), - ExtensionItem(VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME, in_extensions.amd_gpu_shader_int16, &m_amd_gpu_shader_int16_enabled), - ExtensionItem(VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME, in_extensions.amd_negative_viewport_height, &m_amd_negative_viewport_height_enabled), - ExtensionItem(VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME, in_extensions.amd_rasterization_order, &m_amd_rasterization_order_enabled), - ExtensionItem(VK_AMD_SHADER_BALLOT_EXTENSION_NAME, in_extensions.amd_shader_ballot, &m_amd_shader_ballot_enabled), - ExtensionItem(VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME, in_extensions.amd_shader_explicit_vertex_parameter, &m_amd_shader_explicit_vertex_parameter_enabled), - ExtensionItem(VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME, in_extensions.amd_shader_trinary_minmax, &m_amd_shader_trinary_minmax_enabled), - ExtensionItem(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME, in_extensions.amd_texture_gather_bias_lod, &m_amd_texture_gather_bias_lod_enabled), - ExtensionItem(VK_EXT_DEBUG_MARKER_EXTENSION_NAME, in_extensions.ext_debug_marker, &m_ext_debug_marker_enabled), - ExtensionItem(VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, in_extensions.ext_shader_subgroup_ballot, &m_ext_shader_subgroup_ballot_enabled), - ExtensionItem(VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, in_extensions.ext_shader_subgroup_vote, &m_ext_shader_subgroup_vote_enabled), - ExtensionItem(VK_KHR_16BIT_STORAGE_EXTENSION_NAME, in_extensions.khr_16bit_storage, &m_khr_16bit_storage_enabled), - ExtensionItem(VK_KHR_MAINTENANCE1_EXTENSION_NAME, in_extensions.khr_maintenance1, &m_khr_maintenance1_enabled), - ExtensionItem("VK_KHR_storage_buffer_storage_class", in_extensions.khr_storage_buffer_storage_class, &m_khr_storage_buffer_storage_class_enabled), - ExtensionItem(VK_KHR_SURFACE_EXTENSION_NAME, in_extensions.khr_surface, &m_khr_surface_enabled), - ExtensionItem(VK_KHR_SWAPCHAIN_EXTENSION_NAME, in_extensions.khr_swapchain, &m_khr_swapchain_enabled), - }; - - for (const auto& misc_extension : in_extensions.other_extensions) - { - specified_extensions.push_back( - ExtensionItem(misc_extension.first.c_str(), - misc_extension.second, - nullptr) - ); - } - - for (const auto& current_extension : specified_extensions) - { - const bool is_ext_supported = is_physical_device_extension_supported(current_extension.name); + const bool is_ext_supported = is_physical_device_extension_supported(current_extension.first); - if (current_extension.ext_enabled_flag_ptr != nullptr) - { - *current_extension.ext_enabled_flag_ptr = false; - } + is_amd_negative_viewport_height_defined |= (current_extension.first == VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME && + current_extension.second != Anvil::ExtensionAvailability::IGNORE); + is_khr_maintenance1_defined |= (current_extension.first == VK_KHR_MAINTENANCE1_EXTENSION_NAME && + current_extension.second != Anvil::ExtensionAvailability::IGNORE); - switch (current_extension.requested_availability) - { - case EXTENSION_AVAILABILITY_ENABLE_IF_AVAILABLE: + switch (current_extension.second) { - if (is_ext_supported) + case Anvil::ExtensionAvailability::ENABLE_IF_AVAILABLE: { - extensions_final.push_back(current_extension.name); + extensions_final_enabled_status[current_extension.first] = is_ext_supported; + + break; } - break; - } + case Anvil::ExtensionAvailability::IGNORE: + { + extensions_final_enabled_status[current_extension.first] = false; - case EXTENSION_AVAILABILITY_IGNORE: - { - continue; - } + break; + } - case EXTENSION_AVAILABILITY_REQUIRE: - { - if (!is_ext_supported) + case Anvil::ExtensionAvailability::REQUIRE: { - char temp[1024]; - - if (snprintf(temp, - sizeof(temp), - "Device extension [%s] is unsupported", - current_extension.name) > 0) + if (!is_ext_supported) { - fprintf(stderr, - "%s", - temp); + char temp[1024]; + + if (snprintf(temp, + sizeof(temp), + "Device extension [%s] is unsupported", + current_extension.first.c_str() ) > 0) + { + fprintf(stderr, + "%s", + temp); + } + + anvil_assert_fail(); } + extensions_final_enabled_status[current_extension.first.c_str()] = is_ext_supported; + + break; + } + + default: + { anvil_assert_fail(); - continue; } + } + } + + if (is_amd_negative_viewport_height_defined && + is_khr_maintenance1_defined) + { + /* VK_AMD_negative_viewport_height and VK_KHR_maintenance1 extensions are mutually exclusive. */ + anvil_assert_fail(); + + goto end; + } + + /* Instantiate the device. Actual behavior behind this is implemented by the overriding class. */ + { + m_extension_enabled_info_ptr = Anvil::ExtensionInfo::create_device_extension_info(extensions_final_enabled_status, + true); /* in_unspecified_extension_name_value */ + + anvil_assert(m_device == VK_NULL_HANDLE); + { + std::vector extension_name_raw_ptrs; + + extension_name_raw_ptrs.reserve(extensions_final_enabled_status.size() ); - if (is_ext_supported) + for (const auto& current_extension_data : extensions_final_enabled_status) { - extensions_final.push_back(current_extension.name); + if (current_extension_data.second) + { + extension_name_raw_ptrs.push_back(current_extension_data.first.c_str() ); + } } - break; + create_device(extension_name_raw_ptrs, + layers_final, + &m_device_queue_families); } + anvil_assert(m_device != VK_NULL_HANDLE); + } + + /* Re-create the "extension enabled info" variable, this time taking into account contexts newer than 1.0. + * + * This is important for applications that use VK 1.1 contexts (or newer) and do not take into account that Vulkan does not + * require implementations to report support for extensions that have been folded into core. + */ + if (m_create_info_ptr->get_physical_device_ptrs().at(0)->supports_core_vk1_1() ) + { + extensions_final_enabled_status[VK_KHR_16BIT_STORAGE_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_BIND_MEMORY_2_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_DEVICE_GROUP_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_MAINTENANCE1_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_MAINTENANCE2_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_MAINTENANCE3_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_MULTIVIEW_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME] = true; + extensions_final_enabled_status[VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME] = true; + + m_extension_enabled_info_ptr = Anvil::ExtensionInfo::create_device_extension_info(extensions_final_enabled_status, + true); /* in_unspecified_extension_name_value */ + } + } + + /* Retrieve device-specific func pointers */ + if (!init_extension_func_ptrs() ) + { + anvil_assert_fail(); + + goto end; + } + + /* Spawn queue wrappers */ + for (Anvil::QueueFamilyType queue_family_type = Anvil::QueueFamilyType::FIRST; + queue_family_type < Anvil::QueueFamilyType::COUNT; + queue_family_type = static_cast(static_cast(queue_family_type) + 1)) + { + const uint32_t n_queues = m_device_queue_families.n_total_queues_per_family[static_cast(queue_family_type)]; + std::vector* out_queues_ptr = nullptr; + + if (n_queues == 0) + { + continue; + } + + switch (queue_family_type) + { + case Anvil::QueueFamilyType::COMPUTE: out_queues_ptr = &m_compute_queues; break; + case Anvil::QueueFamilyType::TRANSFER: out_queues_ptr = &m_transfer_queues; break; + case Anvil::QueueFamilyType::UNIVERSAL: out_queues_ptr = &m_universal_queues; break; default: { @@ -542,38 +1102,181 @@ void Anvil::BaseDevice::init(const DeviceExtensionConfiguration& in_extensions, } } - if (is_ext_supported) + for (const auto& current_queue_fam : m_device_queue_families.queue_families[queue_family_type]) + { + auto queue_ptr_storage_ptr = &m_queue_ptrs_per_vk_queue_fam[current_queue_fam.family_index]; + + anvil_assert(std::find(m_queue_family_type_to_queue_family_indices[queue_family_type].begin(), + m_queue_family_type_to_queue_family_indices[queue_family_type].end(), + current_queue_fam.family_index) == m_queue_family_type_to_queue_family_indices[queue_family_type].end() ); + + m_queue_family_index_to_types [current_queue_fam.family_index].push_back(queue_family_type); + m_queue_family_type_to_queue_family_indices[queue_family_type].push_back (current_queue_fam.family_index); + + for (uint32_t n_queue = 0; + n_queue < current_queue_fam.n_queues; + ++n_queue) + { + const auto new_queue_global_priority = m_create_info_ptr->get_queue_global_priority(current_queue_fam.family_index, + n_queue); + std::unique_ptr new_queue_ptr = Anvil::Queue::create (this, + current_queue_fam.family_index, + n_queue, + is_mt_safe(), + new_queue_global_priority); + + { + anvil_assert(out_queues_ptr != nullptr); + + out_queues_ptr->push_back(new_queue_ptr.get() ); + } + + /* If this queue supports sparse binding ops, cache it in a separate vector as well */ + if (new_queue_ptr->supports_sparse_bindings() ) + { + m_sparse_binding_queues.push_back(new_queue_ptr.get() ); + } + + /* Cache the queue in general-purpose storage as well */ + if (std::find(queue_ptr_storage_ptr->cbegin(), + queue_ptr_storage_ptr->cend (), + new_queue_ptr.get() ) == queue_ptr_storage_ptr->cend() ) + { + queue_ptr_storage_ptr->push_back(new_queue_ptr.get() ); + } + + m_owned_queues.push_back( + std::move(new_queue_ptr) + ); + } + } + } + + /* Instantiate per-queue family command pools */ + m_command_pool_ptr_per_vk_queue_fam.resize(m_device_queue_families.queue_families.size() ); + + for (const auto& current_queue_fam : m_device_queue_families.queue_families) + { + for (const auto& current_queue_fam_queue : current_queue_fam.second) { - m_enabled_extensions.push_back(current_extension.name); + if (m_command_pool_ptr_per_vk_queue_fam.size() <= current_queue_fam_queue.family_index) + { + m_command_pool_ptr_per_vk_queue_fam.resize(current_queue_fam_queue.family_index + 1); + } + + if (m_command_pool_ptr_per_vk_queue_fam[current_queue_fam_queue.family_index] == nullptr) + { + m_command_pool_ptr_per_vk_queue_fam[current_queue_fam_queue.family_index] = + Anvil::CommandPool::create(this, + m_create_info_ptr->get_helper_command_pool_create_flags(), + current_queue_fam_queue.family_index, + mt_safety); + + anvil_assert(m_command_pool_ptr_per_vk_queue_fam[current_queue_fam_queue.family_index] != nullptr); + } } + } + + /* Set up shader module cache, if one was requested. */ + if (m_create_info_ptr->should_enable_shader_module_cache() ) + { + m_shader_module_cache_ptr = Anvil::ShaderModuleCache::create(); + } + + /* Set up the pipeline cache */ + { + auto pipeline_cache_ptr = m_create_info_ptr->get_pipeline_cache_ptr(); - if (current_extension.ext_enabled_flag_ptr != nullptr) + if (pipeline_cache_ptr == nullptr) + { + m_pipeline_cache_ptr = Anvil::PipelineCache::create(this, + is_mt_safe() ); + } + else { - *current_extension.ext_enabled_flag_ptr = is_ext_supported; + m_pipeline_cache_ptr = Anvil::PipelineCacheUniquePtr(pipeline_cache_ptr, + [](Anvil::PipelineCache*){}); } } - if (m_amd_negative_viewport_height_enabled && - m_khr_maintenance1_enabled) + /* Cache a pipeline layout manager instance. */ + m_pipeline_layout_manager_ptr = Anvil::PipelineLayoutManager::create(this, + is_mt_safe() ); + + /* Cache a descriptor set layout manager. */ + m_descriptor_set_layout_manager_ptr = Anvil::DescriptorSetLayoutManager::create(this, + is_mt_safe() ); + + /* Initialize compute & graphics pipeline managers */ + m_compute_pipeline_manager_ptr = Anvil::ComputePipelineManager::create (this, + is_mt_safe() , + true /* use_pipeline_cache */, + m_pipeline_cache_ptr.get() ); + m_graphics_pipeline_manager_ptr = Anvil::GraphicsPipelineManager::create(this, + is_mt_safe() , + true /* use_pipeline_cache */, + m_pipeline_cache_ptr.get() ); + + /* Continue with specialized initialization */ + init_device(); + + result = true; +end: + return result; +} + +bool Anvil::BaseDevice::init_dummy_dsg() const +{ + bool result = false; + + /* Initialize a dummy descriptor set group */ + std::vector dummy_ds_create_info_ptrs(1); + std::vector dummy_overhead_allocs; + + dummy_overhead_allocs.push_back( + Anvil::OverheadAllocation(Anvil::DescriptorType::SAMPLER, + 1) /* in_n_overhead_allocs */ + ); + + dummy_ds_create_info_ptrs[0] = Anvil::DescriptorSetCreateInfo::create(); + dummy_ds_create_info_ptrs[0]->add_binding(0, /* n_binding */ + Anvil::DescriptorType::UNKNOWN, + 0, /* n_elements */ + Anvil::ShaderStageFlagBits::NONE); /* stage_flags */ + + m_dummy_dsg_ptr = Anvil::DescriptorSetGroup::create(this, + dummy_ds_create_info_ptrs, + Anvil::DescriptorPoolCreateFlagBits::NONE, + Anvil::MTSafety::DISABLED, + dummy_overhead_allocs); + + if (m_dummy_dsg_ptr == nullptr) { - /* VK_AMD_negative_viewport_height and VK_KHR_maintenance1 extensions are mutually exclusive. */ - anvil_assert_fail(); + anvil_assert(m_dummy_dsg_ptr != nullptr); + + goto end; } - /* Instantiate the device. Actual behavior behind this is implemented by the overriding class. */ - features_to_enable = get_physical_device_features(); + m_dummy_dsg_ptr->get_descriptor_set(0)->update(); + + result = true; +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::BaseDevice::init_extension_func_ptrs() +{ + const bool is_core_vk11_device(m_create_info_ptr->get_physical_device_ptrs().at(0)->supports_core_vk1_1() ); - anvil_assert(m_device == VK_NULL_HANDLE); + if (m_extension_enabled_info_ptr->get_device_extension_info()->amd_buffer_marker() ) { - create_device(extensions_final, - layers_final, - features_to_enable, - &m_device_queue_families); + m_amd_buffer_marker_extension_entrypoints.vkCmdWriteBufferMarkerAMD = reinterpret_cast(get_proc_address("vkCmdWriteBufferMarkerAMD") ); + + anvil_assert(m_amd_buffer_marker_extension_entrypoints.vkCmdWriteBufferMarkerAMD != nullptr); } - anvil_assert(m_device != VK_NULL_HANDLE); - /* Retrieve device-specific func pointers */ - if (m_amd_draw_indirect_count_enabled) + if (m_extension_enabled_info_ptr->get_device_extension_info()->amd_draw_indirect_count() ) { m_amd_draw_indirect_count_extension_entrypoints.vkCmdDrawIndexedIndirectCountAMD = reinterpret_cast(get_proc_address("vkCmdDrawIndexedIndirectCountAMD") ); m_amd_draw_indirect_count_extension_entrypoints.vkCmdDrawIndirectCountAMD = reinterpret_cast (get_proc_address("vkCmdDrawIndirectCountAMD") ); @@ -582,7 +1285,14 @@ void Anvil::BaseDevice::init(const DeviceExtensionConfiguration& in_extensions, anvil_assert(m_amd_draw_indirect_count_extension_entrypoints.vkCmdDrawIndirectCountAMD != nullptr); } - if (m_ext_debug_marker_enabled) + if (m_extension_enabled_info_ptr->get_device_extension_info()->amd_shader_info() ) + { + m_amd_shader_info_extension_entrypoints.vkGetShaderInfoAMD = reinterpret_cast(get_proc_address("vkGetShaderInfoAMD") ); + + anvil_assert(m_amd_shader_info_extension_entrypoints.vkGetShaderInfoAMD != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_debug_marker() ) { m_ext_debug_marker_extension_entrypoints.vkCmdDebugMarkerBeginEXT = reinterpret_cast (get_proc_address("vkCmdDebugMarkerBeginEXT") ); m_ext_debug_marker_extension_entrypoints.vkCmdDebugMarkerEndEXT = reinterpret_cast (get_proc_address("vkCmdDebugMarkerEndEXT") ); @@ -597,14 +1307,254 @@ void Anvil::BaseDevice::init(const DeviceExtensionConfiguration& in_extensions, anvil_assert(m_ext_debug_marker_extension_entrypoints.vkDebugMarkerSetObjectTagEXT != nullptr); } - if (m_khr_maintenance1_enabled) + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_external_memory_host() ) + { + m_ext_external_memory_host_extension_entrypoints.vkGetMemoryHostPointerPropertiesEXT = reinterpret_cast(get_proc_address("vkGetMemoryHostPointerPropertiesEXT") ); + + anvil_assert(m_ext_external_memory_host_extension_entrypoints.vkGetMemoryHostPointerPropertiesEXT != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_hdr_metadata() ) { - m_khr_maintenance1_extension_entrypoints.vkTrimCommandPoolKHR = reinterpret_cast(get_proc_address("vkTrimCommandPoolKHR") ); + m_ext_hdr_metadata_extension_entrypoints.vkSetHdrMetadataEXT = reinterpret_cast(get_proc_address("vkSetHdrMetadataEXT") ); + + anvil_assert(m_ext_hdr_metadata_extension_entrypoints.vkSetHdrMetadataEXT != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_sample_locations() ) + { + m_ext_sample_locations_extension_entrypoints.vkCmdSetSampleLocationsEXT = reinterpret_cast (get_proc_address("vkCmdSetSampleLocationsEXT") ); + m_ext_sample_locations_extension_entrypoints.vkGetPhysicalDeviceMultisamplePropertiesEXT = reinterpret_cast(get_proc_address("vkGetPhysicalDeviceMultisamplePropertiesEXT") ); + + anvil_assert(m_ext_sample_locations_extension_entrypoints.vkCmdSetSampleLocationsEXT != nullptr); + // anvil_assert(m_ext_sample_locations_extension_entrypoints.vkGetPhysicalDeviceMultisamplePropertiesEXT != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->ext_transform_feedback() ) + { + m_ext_transform_feedback_extension_entrypoints.vkCmdBeginQueryIndexedEXT = reinterpret_cast (get_proc_address("vkCmdBeginQueryIndexedEXT") ); + m_ext_transform_feedback_extension_entrypoints.vkCmdBeginTransformFeedbackEXT = reinterpret_cast (get_proc_address("vkCmdBeginTransformFeedbackEXT") ); + m_ext_transform_feedback_extension_entrypoints.vkCmdBindTransformFeedbackBuffersEXT = reinterpret_cast(get_proc_address("vkCmdBindTransformFeedbackBuffersEXT") ); + m_ext_transform_feedback_extension_entrypoints.vkCmdDrawIndirectByteCountEXT = reinterpret_cast (get_proc_address("vkCmdDrawIndirectByteCountEXT") ); + m_ext_transform_feedback_extension_entrypoints.vkCmdEndQueryIndexedEXT = reinterpret_cast (get_proc_address("vkCmdEndQueryIndexedEXT") ); + m_ext_transform_feedback_extension_entrypoints.vkCmdEndTransformFeedbackEXT = reinterpret_cast (get_proc_address("vkCmdEndTransformFeedbackEXT") ); + + anvil_assert(m_ext_transform_feedback_extension_entrypoints.vkCmdBeginQueryIndexedEXT != nullptr); + anvil_assert(m_ext_transform_feedback_extension_entrypoints.vkCmdBeginTransformFeedbackEXT != nullptr); + anvil_assert(m_ext_transform_feedback_extension_entrypoints.vkCmdBindTransformFeedbackBuffersEXT != nullptr); + anvil_assert(m_ext_transform_feedback_extension_entrypoints.vkCmdDrawIndirectByteCountEXT != nullptr); + anvil_assert(m_ext_transform_feedback_extension_entrypoints.vkCmdEndQueryIndexedEXT != nullptr); + anvil_assert(m_ext_transform_feedback_extension_entrypoints.vkCmdEndTransformFeedbackEXT != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_descriptor_update_template() || + is_core_vk11_device) + { + const auto vk_create_descriptor_update_template_entrypoint_name = (is_core_vk11_device) ? "vkCreateDescriptorUpdateTemplate" + : "vkCreateDescriptorUpdateTemplateKHR"; + const auto vk_destroy_descriptor_update_template_entrypoint_name = (is_core_vk11_device) ? "vkDestroyDescriptorUpdateTemplate" + : "vkDestroyDescriptorUpdateTemplateKHR"; + const auto vk_update_descriptor_update_template_entrypoint_name = (is_core_vk11_device) ? "vkUpdateDescriptorSetWithTemplate" + : "vkUpdateDescriptorSetWithTemplateKHR"; + + m_khr_descriptor_update_template_extension_entrypoints.vkCreateDescriptorUpdateTemplateKHR = reinterpret_cast (get_proc_address(vk_create_descriptor_update_template_entrypoint_name) ); + m_khr_descriptor_update_template_extension_entrypoints.vkDestroyDescriptorUpdateTemplateKHR = reinterpret_cast(get_proc_address(vk_destroy_descriptor_update_template_entrypoint_name)); + m_khr_descriptor_update_template_extension_entrypoints.vkUpdateDescriptorSetWithTemplateKHR = reinterpret_cast(get_proc_address(vk_update_descriptor_update_template_entrypoint_name) ); + + anvil_assert(m_khr_descriptor_update_template_extension_entrypoints.vkCreateDescriptorUpdateTemplateKHR != nullptr); + anvil_assert(m_khr_descriptor_update_template_extension_entrypoints.vkDestroyDescriptorUpdateTemplateKHR != nullptr); + anvil_assert(m_khr_descriptor_update_template_extension_entrypoints.vkUpdateDescriptorSetWithTemplateKHR != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_device_group() || + is_core_vk11_device) + { + const auto vk_cmd_dispatch_base_entrypoint_name = (is_core_vk11_device) ? "vkCmdDispatchBase" + : "vkCmdDispatchBaseKHR"; + const auto vk_cmd_set_device_mask_entrypoint_name = (is_core_vk11_device) ? "vkCmdSetDeviceMask" + : "vkCmdSetDeviceMaskKHR"; + const auto vk_get_device_group_peer_memory_features_entrypoint_name = (is_core_vk11_device) ? "vkGetDeviceGroupPeerMemoryFeatures" + : "vkGetDeviceGroupPeerMemoryFeaturesKHR"; + + m_khr_device_group_extension_entrypoints.vkCmdDispatchBaseKHR = reinterpret_cast (get_proc_address(vk_cmd_dispatch_base_entrypoint_name) ); + m_khr_device_group_extension_entrypoints.vkCmdSetDeviceMaskKHR = reinterpret_cast (get_proc_address(vk_cmd_set_device_mask_entrypoint_name) ); + m_khr_device_group_extension_entrypoints.vkGetDeviceGroupPeerMemoryFeaturesKHR = reinterpret_cast (get_proc_address(vk_get_device_group_peer_memory_features_entrypoint_name) ); + + anvil_assert(m_khr_device_group_extension_entrypoints.vkCmdDispatchBaseKHR != nullptr); + anvil_assert(m_khr_device_group_extension_entrypoints.vkCmdSetDeviceMaskKHR != nullptr); + anvil_assert(m_khr_device_group_extension_entrypoints.vkGetDeviceGroupPeerMemoryFeaturesKHR != nullptr); + + { + /* NOTE: Certain device group entrypoints are only available if KHR_swapchain is also enabled */ + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_swapchain() ) + { + m_khr_device_group_extension_entrypoints.vkAcquireNextImage2KHR = reinterpret_cast (get_proc_address("vkAcquireNextImage2KHR") ); + m_khr_device_group_extension_entrypoints.vkGetPhysicalDevicePresentRectanglesKHR = reinterpret_cast(get_proc_address("vkGetPhysicalDevicePresentRectanglesKHR") ); + m_khr_device_group_extension_entrypoints.vkGetDeviceGroupSurfacePresentModesKHR = reinterpret_cast (get_proc_address("vkGetDeviceGroupSurfacePresentModesKHR") ); + m_khr_device_group_extension_entrypoints.vkGetDeviceGroupPresentCapabilitiesKHR = reinterpret_cast (get_proc_address("vkGetDeviceGroupPresentCapabilitiesKHR") ); + + anvil_assert(m_khr_device_group_extension_entrypoints.vkAcquireNextImage2KHR != nullptr); + anvil_assert(m_khr_device_group_extension_entrypoints.vkGetDeviceGroupPresentCapabilitiesKHR != nullptr); + anvil_assert(m_khr_device_group_extension_entrypoints.vkGetDeviceGroupSurfacePresentModesKHR != nullptr); + // anvil_assert(m_khr_device_group_extension_entrypoints.vkGetPhysicalDevicePresentRectanglesKHR != nullptr); + } + } + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_bind_memory2() || + is_core_vk11_device) + { + const char* vk_bind_buffer_memory_2_entrypoint_name = (is_core_vk11_device) ? "vkBindBufferMemory2" + : "vkBindBufferMemory2KHR"; + const char* vk_bind_image_memory_2_entrypoint_name = (is_core_vk11_device) ? "vkBindImageMemory2" + : "vkBindImageMemory2KHR"; + + m_khr_bind_memory2_extension_entrypoints.vkBindBufferMemory2KHR = reinterpret_cast(get_proc_address(vk_bind_buffer_memory_2_entrypoint_name)); + m_khr_bind_memory2_extension_entrypoints.vkBindImageMemory2KHR = reinterpret_cast (get_proc_address(vk_bind_image_memory_2_entrypoint_name)); + + anvil_assert(m_khr_bind_memory2_extension_entrypoints.vkBindBufferMemory2KHR != nullptr); + anvil_assert(m_khr_bind_memory2_extension_entrypoints.vkBindImageMemory2KHR != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_create_renderpass2() ) + { + m_khr_create_renderpass2_extension_entrypoints.vkCmdBeginRenderPass2KHR = reinterpret_cast(get_proc_address("vkCmdBeginRenderPass2KHR")); + m_khr_create_renderpass2_extension_entrypoints.vkCmdEndRenderPass2KHR = reinterpret_cast (get_proc_address("vkCmdEndRenderPass2KHR")); + m_khr_create_renderpass2_extension_entrypoints.vkCmdNextSubpass2KHR = reinterpret_cast (get_proc_address("vkCmdNextSubpass2KHR")); + m_khr_create_renderpass2_extension_entrypoints.vkCreateRenderPass2KHR = reinterpret_cast (get_proc_address("vkCreateRenderPass2KHR")); + + anvil_assert(m_khr_create_renderpass2_extension_entrypoints.vkCmdBeginRenderPass2KHR != nullptr); + anvil_assert(m_khr_create_renderpass2_extension_entrypoints.vkCmdEndRenderPass2KHR != nullptr); + anvil_assert(m_khr_create_renderpass2_extension_entrypoints.vkCmdNextSubpass2KHR != nullptr); + anvil_assert(m_khr_create_renderpass2_extension_entrypoints.vkCreateRenderPass2KHR != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_draw_indirect_count() ) + { + m_khr_draw_indirect_count_extension_entrypoints.vkCmdDrawIndexedIndirectCountKHR = reinterpret_cast(get_proc_address("vkCmdDrawIndexedIndirectCountKHR") ); + m_khr_draw_indirect_count_extension_entrypoints.vkCmdDrawIndirectCountKHR = reinterpret_cast (get_proc_address("vkCmdDrawIndirectCountKHR") ); + + anvil_assert(m_khr_draw_indirect_count_extension_entrypoints.vkCmdDrawIndexedIndirectCountKHR != nullptr); + anvil_assert(m_khr_draw_indirect_count_extension_entrypoints.vkCmdDrawIndirectCountKHR != nullptr); + } + + #if defined(_WIN32) + { + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_fence_win32() ) + { + m_khr_external_fence_win32_extension_entrypoints.vkGetFenceWin32HandleKHR = reinterpret_cast (get_proc_address("vkGetFenceWin32HandleKHR") ); + m_khr_external_fence_win32_extension_entrypoints.vkImportFenceWin32HandleKHR = reinterpret_cast(get_proc_address("vkImportFenceWin32HandleKHR") ); + + anvil_assert(m_khr_external_fence_win32_extension_entrypoints.vkGetFenceWin32HandleKHR != nullptr); + anvil_assert(m_khr_external_fence_win32_extension_entrypoints.vkImportFenceWin32HandleKHR != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_memory_win32() ) + { + m_khr_external_memory_win32_extension_entrypoints.vkGetMemoryWin32HandleKHR = reinterpret_cast (get_proc_address("vkGetMemoryWin32HandleKHR")); + m_khr_external_memory_win32_extension_entrypoints.vkGetMemoryWin32HandlePropertiesKHR = reinterpret_cast(get_proc_address("vkGetMemoryWin32HandlePropertiesKHR")); + + anvil_assert(m_khr_external_memory_win32_extension_entrypoints.vkGetMemoryWin32HandleKHR != nullptr); + anvil_assert(m_khr_external_memory_win32_extension_entrypoints.vkGetMemoryWin32HandlePropertiesKHR != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_semaphore_win32() ) + { + m_khr_external_semaphore_win32_extension_entrypoints.vkGetSemaphoreWin32HandleKHR = reinterpret_cast (get_proc_address("vkGetSemaphoreWin32HandleKHR")); + m_khr_external_semaphore_win32_extension_entrypoints.vkImportSemaphoreWin32HandleKHR = reinterpret_cast(get_proc_address("vkImportSemaphoreWin32HandleKHR")); + + anvil_assert(m_khr_external_semaphore_win32_extension_entrypoints.vkGetSemaphoreWin32HandleKHR != nullptr); + anvil_assert(m_khr_external_semaphore_win32_extension_entrypoints.vkImportSemaphoreWin32HandleKHR != nullptr); + } + } + #else + { + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_fence_fd() ) + { + m_khr_external_fence_fd_extension_entrypoints.vkGetFenceFdKHR = reinterpret_cast (get_proc_address("vkGetFenceFdKHR") ); + m_khr_external_fence_fd_extension_entrypoints.vkImportFenceFdKHR = reinterpret_cast(get_proc_address("vkImportFenceFdKHR") ); + + anvil_assert(m_khr_external_fence_fd_extension_entrypoints.vkGetFenceFdKHR != nullptr); + anvil_assert(m_khr_external_fence_fd_extension_entrypoints.vkImportFenceFdKHR != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_memory_fd() ) + { + m_khr_external_memory_fd_extension_entrypoints.vkGetMemoryFdKHR = reinterpret_cast (get_proc_address("vkGetMemoryFdKHR")); + m_khr_external_memory_fd_extension_entrypoints.vkGetMemoryFdPropertiesKHR = reinterpret_cast(get_proc_address("vkGetMemoryFdPropertiesKHR")); + + anvil_assert(m_khr_external_memory_fd_extension_entrypoints.vkGetMemoryFdKHR != nullptr); + anvil_assert(m_khr_external_memory_fd_extension_entrypoints.vkGetMemoryFdPropertiesKHR != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_external_semaphore_fd() ) + { + m_khr_external_semaphore_fd_extension_entrypoints.vkGetSemaphoreFdKHR = reinterpret_cast (get_proc_address("vkGetSemaphoreFdKHR")); + m_khr_external_semaphore_fd_extension_entrypoints.vkImportSemaphoreFdKHR = reinterpret_cast(get_proc_address("vkImportSemaphoreFdKHR")); + + anvil_assert(m_khr_external_semaphore_fd_extension_entrypoints.vkGetSemaphoreFdKHR != nullptr); + anvil_assert(m_khr_external_semaphore_fd_extension_entrypoints.vkImportSemaphoreFdKHR != nullptr); + } + } + #endif + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_get_memory_requirements2() || + is_core_vk11_device) + { + const char* vk_get_buffer_memory_requirements_2_entrypoint_name = (is_core_vk11_device) ? "vkGetBufferMemoryRequirements2" + : "vkGetBufferMemoryRequirements2KHR"; + const char* vk_get_image_memory_requirements_2_entrypoint_name = (is_core_vk11_device) ? "vkGetImageMemoryRequirements2" + : "vkGetImageMemoryRequirements2KHR"; + const char* vk_get_image_sparse_memory_requirements_2_entrypoint_name = (is_core_vk11_device) ? "vkGetImageSparseMemoryRequirements2" + : "vkGetImageSparseMemoryRequirements2KHR"; + + m_khr_get_memory_requirements2_extension_entrypoints.vkGetBufferMemoryRequirements2KHR = reinterpret_cast (get_proc_address(vk_get_buffer_memory_requirements_2_entrypoint_name) ); + m_khr_get_memory_requirements2_extension_entrypoints.vkGetImageMemoryRequirements2KHR = reinterpret_cast (get_proc_address(vk_get_image_memory_requirements_2_entrypoint_name) ); + m_khr_get_memory_requirements2_extension_entrypoints.vkGetImageSparseMemoryRequirements2KHR = reinterpret_cast(get_proc_address(vk_get_image_sparse_memory_requirements_2_entrypoint_name) ); + + anvil_assert(m_khr_get_memory_requirements2_extension_entrypoints.vkGetBufferMemoryRequirements2KHR != nullptr); + anvil_assert(m_khr_get_memory_requirements2_extension_entrypoints.vkGetImageMemoryRequirements2KHR != nullptr); + anvil_assert(m_khr_get_memory_requirements2_extension_entrypoints.vkGetImageSparseMemoryRequirements2KHR != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_maintenance1() || + is_core_vk11_device) + { + const char* vk_trim_command_pool_entrypoint_name = (is_core_vk11_device) ? "vkTrimCommandPool" + : "vkTrimCommandPoolKHR"; + + m_khr_maintenance1_extension_entrypoints.vkTrimCommandPoolKHR = reinterpret_cast(get_proc_address(vk_trim_command_pool_entrypoint_name) ); anvil_assert(m_khr_maintenance1_extension_entrypoints.vkTrimCommandPoolKHR != nullptr); } - if (m_khr_swapchain_enabled) + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_maintenance3() || + is_core_vk11_device) + { + const char* vk_get_descriptor_set_layout_support_entrypoint_name = (is_core_vk11_device) ? "vkGetDescriptorSetLayoutSupport" + : "vkGetDescriptorSetLayoutSupportKHR"; + + m_khr_maintenance3_extension_entrypoints.vkGetDescriptorSetLayoutSupportKHR = reinterpret_cast(get_proc_address(vk_get_descriptor_set_layout_support_entrypoint_name) ); + + anvil_assert(m_khr_maintenance3_extension_entrypoints.vkGetDescriptorSetLayoutSupportKHR != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_sampler_ycbcr_conversion() || + is_core_vk11_device) + { + const char* vk_create_sampler_ycbcr_conversion_entrypoint_name = (is_core_vk11_device) ? "vkCreateSamplerYcbcrConversion" + : "vkCreateSamplerYcbcrConversionKHR"; + const char* vk_destroy_sampler_ycbcr_conversion_entrypoint_name = (is_core_vk11_device) ? "vkDestroySamplerYcbcrConversion" + : "vkDestroySamplerYcbcrConversionKHR"; + + m_khr_sampler_ycbcr_conversion_extension_entrypoints.vkCreateSamplerYcbcrConversionKHR = reinterpret_cast (get_proc_address(vk_create_sampler_ycbcr_conversion_entrypoint_name) ); + m_khr_sampler_ycbcr_conversion_extension_entrypoints.vkDestroySamplerYcbcrConversionKHR = reinterpret_cast(get_proc_address(vk_destroy_sampler_ycbcr_conversion_entrypoint_name) ); + + anvil_assert(m_khr_sampler_ycbcr_conversion_extension_entrypoints.vkCreateSamplerYcbcrConversionKHR != nullptr); + anvil_assert(m_khr_sampler_ycbcr_conversion_extension_entrypoints.vkDestroySamplerYcbcrConversionKHR != nullptr); + } + + if (m_extension_enabled_info_ptr->get_device_extension_info()->khr_swapchain() ) { m_khr_swapchain_extension_entrypoints.vkAcquireNextImageKHR = reinterpret_cast (get_proc_address("vkAcquireNextImageKHR") ); m_khr_swapchain_extension_entrypoints.vkCreateSwapchainKHR = reinterpret_cast (get_proc_address("vkCreateSwapchainKHR") ); @@ -619,366 +1569,841 @@ void Anvil::BaseDevice::init(const DeviceExtensionConfiguration& in_extensions, anvil_assert(m_khr_swapchain_extension_entrypoints.vkQueuePresentKHR != nullptr); } - /* Spawn queue wrappers */ - for (Anvil::QueueFamilyType queue_family_type = Anvil::QUEUE_FAMILY_TYPE_FIRST; - queue_family_type < Anvil::QUEUE_FAMILY_TYPE_COUNT; - queue_family_type = static_cast(queue_family_type + 1)) + return true; +} + +/** Please see header for specification */ +bool Anvil::BaseDevice::is_compute_queue_family_index(const uint32_t& in_queue_family_index) const +{ + return (m_queue_family_index_to_types.find(in_queue_family_index) != m_queue_family_index_to_types.end()) && + (m_queue_family_index_to_types.at (in_queue_family_index).at(0) == Anvil::QueueFamilyType::COMPUTE); +} + +/** Please see header for specification */ +bool Anvil::BaseDevice::is_transfer_queue_family_index(const uint32_t& in_queue_family_index) const +{ + return (m_queue_family_index_to_types.find(in_queue_family_index) != m_queue_family_index_to_types.end()) && + (m_queue_family_index_to_types.at (in_queue_family_index).at(0) == Anvil::QueueFamilyType::TRANSFER); +} + +/** Please see header for specification */ +bool Anvil::BaseDevice::is_universal_queue_family_index(const uint32_t& in_queue_family_index) const +{ + return (m_queue_family_index_to_types.find(in_queue_family_index) != m_queue_family_index_to_types.end() ) && + (m_queue_family_index_to_types.at (in_queue_family_index).at(0) == Anvil::QueueFamilyType::UNIVERSAL); +} + +/* Please see header for specification */ +bool Anvil::BaseDevice::wait_idle() const +{ + const bool mt_safe = is_mt_safe(); + + VkResult result_vk; + + if (mt_safe) { - decltype(m_queue_fams)::iterator current_queue_fam_queues_vec_iterator; - const uint32_t n_queues = m_device_queue_families.n_queues[queue_family_type]; - std::vector >& queues = (queue_family_type == Anvil::QUEUE_FAMILY_TYPE_COMPUTE) ? m_compute_queues - : (queue_family_type == Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL) ? m_universal_queues - : m_transfer_queues; + for (const auto& queue_fam : m_queue_ptrs_per_vk_queue_fam) + { + for (const auto& queue_ptr : queue_fam.second) + { + queue_ptr->lock(); + } + } + } - if (n_queues == 0) + result_vk = Anvil::Vulkan::vkDeviceWaitIdle(m_device); + + if (mt_safe) + { + for (const auto& queue_fam : m_queue_ptrs_per_vk_queue_fam) { - continue; + for (const auto& queue_ptr : queue_fam.second) + { + queue_ptr->unlock(); + } } + } + + return is_vk_call_successful(result_vk); +} - /* Create a dummy queue vector and instantiate an iterator we're going to fill up with queue instances */ - anvil_assert(m_queue_fams.find(m_device_queue_families.family_index[queue_family_type]) == m_queue_fams.end() ); - m_queue_fams[m_device_queue_families.family_index[queue_family_type] ]; - current_queue_fam_queues_vec_iterator = m_queue_fams.find(m_device_queue_families.family_index[queue_family_type]); +/* Please see header for specification */ +Anvil::MGPUDevice::MGPUDevice(Anvil::DeviceCreateInfoUniquePtr in_create_info_ptr) + :BaseDevice (std::move(in_create_info_ptr) ), + m_supported_present_modes(static_cast(0) ) +{ + const auto& physical_device_ptrs = m_create_info_ptr->get_physical_device_ptrs(); + const uint32_t n_physical_device_ptrs = static_cast(physical_device_ptrs.size() ); - for (uint32_t n_queue = 0; - n_queue < n_queues; - ++n_queue) + ANVIL_REDUNDANT_VARIABLE_CONST(n_physical_device_ptrs); + + /* MGPUDevice instance should not be used for cases where SGPUDevice would do */ + anvil_assert(n_physical_device_ptrs > 1); + + /* Sanity checks */ + #ifdef _DEBUG + { + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_device_ptrs - 1; + ++n_physical_device) { - std::shared_ptr new_queue_ptr = Anvil::Queue::create(shared_from_this(), - m_device_queue_families.family_index[queue_family_type], - n_queue); + auto next_device_ptr(physical_device_ptrs[n_physical_device + 1]); + auto this_device_ptr(physical_device_ptrs[n_physical_device]); - queues.push_back(new_queue_ptr); + /* Physical devices must come from the same Vulkan instance */ + anvil_assert(this_device_ptr->get_instance() == next_device_ptr->get_instance() ); - anvil_assert(std::find(current_queue_fam_queues_vec_iterator->second.begin(), - current_queue_fam_queues_vec_iterator->second.end(), - new_queue_ptr) == current_queue_fam_queues_vec_iterator->second.end() ); + /* Physical devices must not repeat */ + anvil_assert(this_device_ptr != next_device_ptr); - current_queue_fam_queues_vec_iterator->second.push_back(new_queue_ptr); + /* All physical devices must come from the same device group */ + anvil_assert(this_device_ptr->get_device_group_index() == next_device_ptr->get_device_group_index() ); - /* If this queue supports sparse binding ops, cache it in a separate vector as well */ - if (new_queue_ptr->supports_sparse_bindings() ) - { - m_sparse_binding_queues.push_back(new_queue_ptr); - } + /* ..but must be ed unique device indices */ + anvil_assert(this_device_ptr->get_device_group_device_index() != next_device_ptr->get_device_group_device_index() ); + + /* Extension spec is a bit vague about this restriction, but our implementation assumes that all physical devices + * in a device group offer exactly the same set of queues and queue families. Their ordering also must match. + */ + anvil_assert(this_device_ptr->get_queue_families() == next_device_ptr->get_queue_families() ); } } + #endif + + for (auto physical_device_ptr : physical_device_ptrs) + { + ParentPhysicalDeviceProperties device_props; + + device_props.physical_device_ptr = physical_device_ptr; + + m_parent_physical_devices.push_back (device_props); + m_parent_physical_devices_vec.push_back(physical_device_ptr); + } + + for (uint32_t n_physical_device = 0; + n_physical_device < static_cast(m_parent_physical_devices.size() ); + ++n_physical_device) + { + const auto& physical_device_info = m_parent_physical_devices.at(n_physical_device); + + anvil_assert(m_device_index_to_physical_device_props.find(n_physical_device) == m_device_index_to_physical_device_props.end() ); + m_device_index_to_physical_device_props[n_physical_device] = &physical_device_info; + } +} + +/* Please see header for specification */ +Anvil::MGPUDevice::~MGPUDevice() +{ + /* Stub */ +} + +/* Please see header for specification */ +Anvil::BaseDeviceUniquePtr Anvil::MGPUDevice::create(Anvil::DeviceCreateInfoUniquePtr in_create_info_ptr) +{ + Anvil::BaseDeviceUniquePtr result_ptr; + + /* Make sure the caller has requested the VK_KHR_device_group and VK_KHR_bind_memory2 extensions */ + #ifdef _DEBUG + { + const auto& extension_configuration = in_create_info_ptr->get_extension_configuration(); + + anvil_assert(extension_configuration.extension_status.at(VK_KHR_DEVICE_GROUP_EXTENSION_NAME) == Anvil::ExtensionAvailability::REQUIRE); + anvil_assert(extension_configuration.extension_status.at(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME) == Anvil::ExtensionAvailability::REQUIRE); + } + #endif + + result_ptr = Anvil::BaseDeviceUniquePtr( + new Anvil::MGPUDevice(std::move(in_create_info_ptr) ), + std::default_delete() + ); + + if (result_ptr != nullptr) + { + dynamic_cast(result_ptr.get() )->init(); + } + + return result_ptr; +} + +/** Please see header for specification */ +Anvil::SwapchainUniquePtr Anvil::MGPUDevice::create_swapchain(Anvil::RenderingSurface* in_parent_surface_ptr, + Anvil::Window* in_window_ptr, + Anvil::Format in_image_format, + Anvil::ColorSpaceKHR in_color_space, + Anvil::PresentModeKHR in_present_mode, + Anvil::ImageUsageFlags in_usage, + uint32_t in_n_swapchain_images, + bool in_support_SFR, + Anvil::DeviceGroupPresentModeFlags in_presentation_modes_to_support) +{ + SwapchainUniquePtr result_ptr(nullptr, + std::default_delete() ); - /* Instantiate per-queue family command pools */ - for (Anvil::QueueFamilyType queue_family_type = Anvil::QUEUE_FAMILY_TYPE_FIRST; - queue_family_type < Anvil::QUEUE_FAMILY_TYPE_COUNT; - queue_family_type = static_cast(queue_family_type + 1)) { - if (get_queue_family_index(queue_family_type) != UINT32_MAX) + auto swapchain_create_info_ptr = Anvil::SwapchainCreateInfo::create(this, + in_parent_surface_ptr, + in_window_ptr, + in_image_format, + in_color_space, + in_present_mode, + in_usage, + in_n_swapchain_images); + + swapchain_create_info_ptr->set_mt_safety (Anvil::MTSafety::ENABLED); + swapchain_create_info_ptr->set_mgpu_present_mode_flags(in_presentation_modes_to_support); + + if (in_support_SFR) { - m_command_pool_ptrs[queue_family_type] = Anvil::CommandPool::create(shared_from_this(), - in_transient_command_buffer_allocs_only, - in_support_resettable_command_buffer_allocs, - queue_family_type); + swapchain_create_info_ptr->set_flags(Anvil::SwapchainCreateFlagBits::SPLIT_INSTANCE_BIND_REGIONS_BIT); } + + result_ptr = Anvil::Swapchain::create(std::move(swapchain_create_info_ptr) ); } - /* Initialize a dummy descriptor set group */ - m_dummy_dsg_ptr = Anvil::DescriptorSetGroup::create(shared_from_this(), - false, /* releaseable_sets */ - 1); /* n_sets */ - - m_dummy_dsg_ptr->add_binding(0, /* n_set */ - 0, /* n_binding */ - VK_DESCRIPTOR_TYPE_SAMPLER, - 0, /* n_elements */ - VK_SHADER_STAGE_ALL); - m_dummy_dsg_ptr->get_descriptor_set_layout(0)->bake(); - m_dummy_dsg_ptr->get_descriptor_set(0)->bake(); + anvil_assert(result_ptr != nullptr); - /* Set up the pipeline cache */ - m_pipeline_cache_ptr = Anvil::PipelineCache::create(shared_from_this() ); + return result_ptr; +} + +/** Please see header for specification */ +bool Anvil::MGPUDevice::get_peer_memory_features(const Anvil::PhysicalDevice* in_local_physical_device_ptr, + const Anvil::PhysicalDevice* in_remote_physical_device_ptr, + uint32_t in_memory_heap_index, + PeerMemoryFeatureFlags* out_result_ptr) const +{ + const uint32_t& local_physical_device_index = in_local_physical_device_ptr->get_device_group_device_index(); + decltype(m_device_index_to_physical_device_props)::const_iterator local_physical_device_props_iterator; + ParentPhysicalDeviceProperties::HeapIndexToPeerMemoryFeaturesMap::const_iterator memory_heap_iterator; + const uint32_t& remote_physical_device_index = in_remote_physical_device_ptr->get_device_group_device_index(); + decltype(ParentPhysicalDeviceProperties::peer_memory_features)::const_iterator remote_physical_device_iterator; + bool result = false; + + if (in_local_physical_device_ptr == in_remote_physical_device_ptr) + { + anvil_assert(in_local_physical_device_ptr != in_remote_physical_device_ptr); + + goto end; + } + + /* Retrieve cached peer memory features data for the local phys dev + remote phys dev + heap index tuple */ + local_physical_device_props_iterator = m_device_index_to_physical_device_props.find(local_physical_device_index); + + if (local_physical_device_props_iterator == m_device_index_to_physical_device_props.end() ) + { + /* The specified local physical device is not a part of this logical device? */ + anvil_assert(local_physical_device_props_iterator != m_device_index_to_physical_device_props.end()); + + goto end; + } + + + remote_physical_device_iterator = local_physical_device_props_iterator->second->peer_memory_features.find(remote_physical_device_index); + + if (remote_physical_device_iterator == local_physical_device_props_iterator->second->peer_memory_features.end() ) + { + /* The specified remote physical device is not a part of this logical device? */ + anvil_assert(remote_physical_device_iterator != local_physical_device_props_iterator->second->peer_memory_features.end() ); + + goto end; + } + + + memory_heap_iterator = remote_physical_device_iterator->second.find(in_memory_heap_index); + + if (memory_heap_iterator == remote_physical_device_iterator->second.end() ) + { + anvil_assert(memory_heap_iterator != remote_physical_device_iterator->second.end()); + + goto end; + } + + /* All done */ + *out_result_ptr = memory_heap_iterator->second; + result = true; +end: + return result; +} + +/** Please see header for specification */ +const Anvil::PhysicalDevice* Anvil::MGPUDevice::get_physical_device(uint32_t in_n_physical_device) const +{ + anvil_assert(m_parent_physical_devices.size() > in_n_physical_device); + + return m_parent_physical_devices.at(in_n_physical_device).physical_device_ptr; +} - /* Cache a pipeline layout manager. This is needed to ensure the manager nevers goes out of scope while - * the device is alive */ - m_pipeline_layout_manager_ptr = Anvil::PipelineLayoutManager::create(shared_from_this() ); +bool Anvil::MGPUDevice::get_physical_device_buffer_properties(const BufferPropertiesQuery& in_query, + Anvil::BufferProperties* out_opt_result_ptr) const +{ + /* NOTE: All physical devices within a device group are assumed to report the same format properties */ + return m_parent_physical_devices.at(0).physical_device_ptr->get_buffer_properties(in_query, + out_opt_result_ptr); +} - /* Initialize compute & graphics pipeline managers +/** Please see header for specification */ +const Anvil::PhysicalDeviceFeatures& Anvil::MGPUDevice::get_physical_device_features() const +{ + /* NOTE: We're assuming here all physical devices within a device group offer the same set of features. * - * NOTE: We force pipeline cache usage here because of LunarG#223 and LunarG#227 */ - m_compute_pipeline_manager_ptr = Anvil::ComputePipelineManager::create (shared_from_this(), - true /* use_pipeline_cache */, - m_pipeline_cache_ptr); - m_graphics_pipeline_manager_ptr = Anvil::GraphicsPipelineManager::create(shared_from_this(), - true /* use_pipeline_cache */, - m_pipeline_cache_ptr); + * An assertion check to double-check this holds is implemented in the init function. + */ + return m_parent_physical_devices.at(0).physical_device_ptr->get_device_features(); +} - /* Continue with specialized initialization */ - init_device(); +bool Anvil::MGPUDevice::get_physical_device_fence_properties(const FencePropertiesQuery& in_query, + Anvil::FenceProperties* out_opt_result_ptr) const +{ + /* NOTE: All physical devices within a device group are assumed to report the same fence properties */ + return m_parent_physical_devices.at(0).physical_device_ptr->get_fence_properties(in_query, + out_opt_result_ptr); +} + +/** Please see header for specification */ +Anvil::FormatProperties Anvil::MGPUDevice::get_physical_device_format_properties(Anvil::Format in_format) const +{ + /* NOTE: All physical devices within a device group are assumed to report the same format properties */ + return m_parent_physical_devices.at(0).physical_device_ptr->get_format_properties(in_format); +} + + +/** Please see header for specification */ +bool Anvil::MGPUDevice::get_physical_device_image_format_properties(const ImageFormatPropertiesQuery& in_query, + Anvil::ImageFormatProperties* out_opt_result_ptr) const +{ + /* NOTE: All physical devices within a device group are assumed to report the same image format properties */ + return m_parent_physical_devices.at(0).physical_device_ptr->get_image_format_properties(in_query, + out_opt_result_ptr); } +/** Please see header for specification */ +const Anvil::MemoryProperties& Anvil::MGPUDevice::get_physical_device_memory_properties() const +{ + /* NOTE: We're assuming here all physical devices within a device group have the same memory caps. + * + * An assertion check to double-check this holds is implemented in the init function. + */ + return m_parent_physical_devices.at(0).physical_device_ptr->get_memory_properties(); +} + +Anvil::MultisamplePropertiesEXT Anvil::MGPUDevice::get_physical_device_multisample_properties(const Anvil::SampleCountFlagBits& in_samples) const +{ + /* NOTE: We're assuming here all physical devices within a device group have the same memory caps. */ + VkMultisamplePropertiesEXT result_vk; + + anvil_assert(is_extension_enabled(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME) ); + + result_vk.maxSampleLocationGridSize.height = 0; + result_vk.maxSampleLocationGridSize.width = 0; + result_vk.pNext = nullptr; + result_vk.sType = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT; + + m_ext_sample_locations_extension_entrypoints.vkGetPhysicalDeviceMultisamplePropertiesEXT(m_parent_physical_devices.at(0).physical_device_ptr->get_physical_device(), + static_cast(in_samples), + &result_vk); + + return Anvil::MultisamplePropertiesEXT(result_vk); +} /* Please see header for specification */ -Anvil::SGPUDevice::SGPUDevice(std::weak_ptr in_physical_device_ptr) - :BaseDevice (in_physical_device_ptr.lock()->get_instance() ), - m_parent_physical_device_ptr(in_physical_device_ptr) +const Anvil::PhysicalDeviceProperties& Anvil::MGPUDevice::get_physical_device_properties() const { - /* Stub */ + /* NOTE: We're assuming here all physical devices within a device group have the same properties. + * + * An assertion check to double-check this holds is implemented in the init function. + */ + return m_parent_physical_devices.at(0).physical_device_ptr->get_device_properties(); } /* Please see header for specification */ -Anvil::SGPUDevice::~SGPUDevice() +const Anvil::QueueFamilyInfoItems& Anvil::MGPUDevice::get_physical_device_queue_families() const { - /* Stub */ + /* NOTE: We're assuming here all physical devices within a device group offer the same set of queue families. + * + * An assertion check to double-check this holds is implemented in the init function. + */ + return m_parent_physical_devices.at(0).physical_device_ptr->get_queue_families(); +} + +bool Anvil::MGPUDevice::get_physical_device_semaphore_properties(const SemaphorePropertiesQuery& in_query, + Anvil::SemaphoreProperties* out_opt_result_ptr) const +{ + /* NOTE: All physical devices within a device group are assumed to report the same semaphore properties */ + return m_parent_physical_devices.at(0).physical_device_ptr->get_semaphore_properties(in_query, + out_opt_result_ptr); } +/** Please see header for specification */ +bool Anvil::MGPUDevice::get_physical_device_sparse_image_format_properties(Anvil::Format in_format, + Anvil::ImageType in_type, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::ImageUsageFlags in_usage, + Anvil::ImageTiling in_tiling, + std::vector& out_result) const +{ + /* NOTE: All physical devices within a device group are assumed to report the same sparse image format properties */ + uint32_t n_props = 0; + + Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties(m_parent_physical_devices.at(0).physical_device_ptr->get_physical_device(), + static_cast (in_format), + static_cast (in_type), + static_cast(in_sample_count), + in_usage.get_vk(), + static_cast(in_tiling), + &n_props, + nullptr); /* pProperties */ + + if (n_props > 0) + { + out_result.resize(n_props); + + Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties(m_parent_physical_devices.at(0).physical_device_ptr->get_physical_device(), + static_cast (in_format), + static_cast (in_type), + static_cast(in_sample_count), + in_usage.get_vk(), + static_cast(in_tiling), + &n_props, + reinterpret_cast(&out_result[0] )); + } + + return true; +} /* Please see header for specification */ -std::weak_ptr Anvil::SGPUDevice::create(std::weak_ptr in_physical_device_ptr, - const DeviceExtensionConfiguration& in_extensions, - const std::vector& in_layers, - bool in_transient_command_buffer_allocs_only, - bool in_support_resettable_command_buffer_allocs) +bool Anvil::MGPUDevice::get_physical_device_surface_capabilities(Anvil::RenderingSurface* in_surface_ptr, + Anvil::SurfaceCapabilities* out_result_ptr) const { - std::shared_ptr result_ptr; + /* NOTE: All physical devices within a device group are assumed to report the same sparse image format properties */ + return (m_khr_surface_extension_entrypoints.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(m_parent_physical_devices.at(0).physical_device_ptr->get_physical_device(), + in_surface_ptr->get_surface(), + reinterpret_cast(out_result_ptr) ) == VK_SUCCESS); +} - result_ptr = std::shared_ptr(new Anvil::SGPUDevice(in_physical_device_ptr), - Anvil::SGPUDevice::SGPUDeviceDeleter() ); +/* Please see header for specification */ +bool Anvil::MGPUDevice::get_present_compatible_physical_devices(uint32_t in_device_index, + const std::vector** out_result_ptr) const +{ + bool result = false; - result_ptr->init(in_extensions, - in_layers, - in_transient_command_buffer_allocs_only, - in_support_resettable_command_buffer_allocs); + if (m_parent_physical_devices.size() > in_device_index) + { + result = true; + + *out_result_ptr = &m_parent_physical_devices.at(in_device_index).presentation_compatible_physical_devices; + } + else + { + anvil_assert(m_parent_physical_devices.size() > in_device_index); + } + + return result; +} + +/* Please see header for specification */ +bool Anvil::MGPUDevice::get_present_rectangles(uint32_t in_device_index, + const Anvil::RenderingSurface* in_rendering_surface_ptr, + std::vector* out_result_ptr) const +{ + uint32_t n_rectangles = 0; + const Anvil::PhysicalDevice* physical_device_ptr = nullptr; + bool result = false; + VkResult result_vk; + + if (m_parent_physical_devices.size() <= in_device_index) + { + anvil_assert(!(m_parent_physical_devices.size() <= in_device_index) ); + + goto end; + } + else + { + physical_device_ptr = m_parent_physical_devices.at(in_device_index).physical_device_ptr; + } + + in_rendering_surface_ptr->lock(); + { + result_vk = m_khr_device_group_extension_entrypoints.vkGetPhysicalDevicePresentRectanglesKHR(physical_device_ptr->get_physical_device(), + in_rendering_surface_ptr->get_surface(), + &n_rectangles, + nullptr); /* pRects */ + + if (is_vk_call_successful(result_vk) ) + { + out_result_ptr->resize(n_rectangles); + + result_vk = m_khr_device_group_extension_entrypoints.vkGetPhysicalDevicePresentRectanglesKHR(physical_device_ptr->get_physical_device(), + in_rendering_surface_ptr->get_surface(), + &n_rectangles, + &((*out_result_ptr)[0])); /* pRects */ + } + } + in_rendering_surface_ptr->unlock(); + + if (!is_vk_call_successful(result_vk) ) + { + anvil_assert(is_vk_call_successful(result_vk)); + + goto end; + } + + /* All done */ + result = true; +end: + return result; +} + +/* Please see header for specification */ +void Anvil::MGPUDevice::get_queue_family_indices(DeviceQueueFamilyInfo* out_device_queue_family_info_ptr) const +{ + /* Assuming all physical devices within a device group offer the same set of queue families and queues, + * and that the ordering of queue families matches across all those physical devices, we're good to + * specify any physical device we're going to use to set up a multi-GPU VkDevice instance. + */ + get_queue_family_indices_for_physical_device(m_parent_physical_devices.at(0).physical_device_ptr, + out_device_queue_family_info_ptr); +} + +/** Please see header for specification */ +const Anvil::QueueFamilyInfo* Anvil::MGPUDevice::get_queue_family_info(uint32_t in_queue_family_index) const +{ + /* NOTE: It is assumed here that all physical devices within a device group have a matching set of queue families */ + const auto physical_device_ptr(m_parent_physical_devices.at(0).physical_device_ptr); + const auto& queue_fams (physical_device_ptr->get_queue_families() ); + const Anvil::QueueFamilyInfo* result_ptr (nullptr); + + if (queue_fams.size() > in_queue_family_index) + { + result_ptr = &queue_fams.at(in_queue_family_index); + } return result_ptr; } /** Please see header for specification */ -void Anvil::SGPUDevice::create_device(const std::vector& in_extensions, - const std::vector& in_layers, - const VkPhysicalDeviceFeatures& in_features, - DeviceQueueFamilyInfo* out_queue_families_ptr) +Anvil::DeviceGroupPresentModeFlags Anvil::MGPUDevice::get_supported_present_modes_for_surface(const Anvil::RenderingSurface* in_surface_ptr) const { - VkDeviceCreateInfo create_info; - std::vector device_queue_create_info_items; - std::vector device_queue_priorities; - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); - const auto& physical_device_queue_fams(physical_device_locked_ptr->get_queue_families() ); - VkResult result (VK_ERROR_INITIALIZATION_FAILED); + VkDeviceGroupPresentModeFlagsKHR result_flags = 0; + VkResult result_vk = VK_ERROR_INITIALIZATION_FAILED; + + ANVIL_REDUNDANT_VARIABLE(result_vk); + + result_vk = m_khr_device_group_extension_entrypoints.vkGetDeviceGroupSurfacePresentModesKHR(m_device, + in_surface_ptr->get_surface(), + &result_flags); + anvil_assert_vk_call_succeeded(result_vk); + + return Anvil::DeviceGroupPresentModeFlags(static_cast(result_flags) ); +} + +/** Please see header for specification */ +void Anvil::MGPUDevice::init_device() +{ + VkResult result(VK_ERROR_INITIALIZATION_FAILED); ANVIL_REDUNDANT_VARIABLE(result); - /* Set up queue create info structure instances. - * - * Use up all available queues. - */ - for (uint32_t n_queue_fam = 0; - n_queue_fam < static_cast(physical_device_queue_fams.size() ); - ++n_queue_fam) + /* Extract present capabilities of each physical device in the device group */ + VkDeviceGroupPresentCapabilitiesKHR present_caps; + + present_caps.pNext = nullptr; + present_caps.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR; + + result = m_khr_device_group_extension_entrypoints.vkGetDeviceGroupPresentCapabilitiesKHR(m_device, + &present_caps); + + anvil_assert_vk_call_succeeded(result); + anvil_assert ((present_caps.modes & static_cast(Anvil::DeviceGroupPresentModeFlagBits::LOCAL_BIT_KHR) )); + + m_supported_present_modes = Anvil::DeviceGroupPresentModeFlags(static_cast(present_caps.modes) ); + + for (uint32_t n_physical_device = 0; + n_physical_device < static_cast(m_parent_physical_devices.size() ); + ++n_physical_device) { - const auto& current_queue_fam(physical_device_queue_fams.at(n_queue_fam) ); + auto& physical_device_props = m_parent_physical_devices.at(n_physical_device); + + anvil_assert( present_caps.presentMask[n_physical_device] == 0 || + (present_caps.presentMask[n_physical_device] != 0 && (present_caps.presentMask[n_physical_device] & (1 << n_physical_device) ) != 0)); - if (device_queue_priorities.size() < current_queue_fam.n_queues) + for (uint32_t n_sub_physical_device = 0; + n_sub_physical_device < static_cast(m_parent_physical_devices.size() ); + ++n_sub_physical_device) { - device_queue_priorities.resize(current_queue_fam.n_queues, - 1.0f); + if ((present_caps.presentMask[n_physical_device] & (1 << n_sub_physical_device) ) != 0) + { + const Anvil::PhysicalDevice* sub_physical_device_ptr(m_parent_physical_devices.at(n_sub_physical_device).physical_device_ptr); + + physical_device_props.presentation_compatible_physical_devices.push_back(sub_physical_device_ptr); + } } } - for (uint32_t n_queue_fam = 0; - n_queue_fam < static_cast(physical_device_queue_fams.size() ); - ++n_queue_fam) + /* Extract & cache peer memory features information */ + for (uint32_t n_src_physical_device = 0; + n_src_physical_device < static_cast(m_parent_physical_devices.size() ); + ++n_src_physical_device) { - const auto& current_queue_fam(physical_device_queue_fams.at(n_queue_fam) ); + auto& src_physical_device_info = m_parent_physical_devices.at(n_src_physical_device); + const auto& src_physical_device_ptr = src_physical_device_info.physical_device_ptr; + const uint32_t n_src_memory_heaps = src_physical_device_ptr->get_memory_properties().n_heaps; - if (current_queue_fam.n_queues > 0) + for (uint32_t n_dst_physical_device = 0; + n_dst_physical_device < static_cast(m_parent_physical_devices.size() ); + ++n_dst_physical_device) { - VkDeviceQueueCreateInfo queue_create_info; + const auto& dst_physical_device_ptr = m_parent_physical_devices.at(n_dst_physical_device).physical_device_ptr; + const uint32_t n_dst_memory_heaps = dst_physical_device_ptr->get_memory_properties().n_heaps; + + ANVIL_REDUNDANT_VARIABLE_CONST(n_dst_memory_heaps); + + if (n_src_physical_device == n_dst_physical_device) + { + continue; + } - queue_create_info.flags = 0; - queue_create_info.pNext = nullptr; - queue_create_info.pQueuePriorities = &device_queue_priorities.at(0); - queue_create_info.queueCount = current_queue_fam.n_queues; - queue_create_info.queueFamilyIndex = n_queue_fam; - queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + anvil_assert(n_src_memory_heaps == n_dst_memory_heaps); - device_queue_create_info_items.push_back(queue_create_info); + for (uint32_t n_heap = 0; + n_heap < n_src_memory_heaps; + ++n_heap) + { + const uint32_t dst_physical_device_index = dst_physical_device_ptr->get_device_group_device_index(); + VkPeerMemoryFeatureFlagsKHR memory_features = 0; + const uint32_t src_physical_device_index = src_physical_device_ptr->get_device_group_device_index(); + + m_khr_device_group_extension_entrypoints.vkGetDeviceGroupPeerMemoryFeaturesKHR(m_device, + n_heap, + src_physical_device_index, + dst_physical_device_index, + &memory_features); + + anvil_assert( (memory_features & VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR) != 0); /* As per spec */ + + src_physical_device_info.peer_memory_features[dst_physical_device_index][n_heap] = static_cast(memory_features); + } } } - /* Set up the device create info descriptor. Enable all features available. */ - create_info.enabledExtensionCount = static_cast(in_extensions.size() ); - create_info.enabledLayerCount = static_cast(in_layers.size() ); - create_info.flags = 0; - create_info.pEnabledFeatures = &in_features; - create_info.pNext = nullptr; - create_info.ppEnabledExtensionNames = (in_extensions.size() > 0) ? &in_extensions[0] : nullptr; - create_info.ppEnabledLayerNames = (in_layers.size() > 0) ? &in_layers [0] : nullptr; - create_info.pQueueCreateInfos = &device_queue_create_info_items[0]; - create_info.queueCreateInfoCount = static_cast(device_queue_create_info_items.size() ); - create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; + /* Cache info whether this logical device supports subset allocations or not */ + const Anvil::PhysicalDevice* ref_physical_device_ptr = m_parent_physical_devices.at(0).physical_device_ptr; + const Anvil::Instance* instance_ptr = ref_physical_device_ptr->get_instance(); + const uint32_t device_group_index = ref_physical_device_ptr->get_device_group_index(); - result = vkCreateDevice(m_parent_physical_device_ptr.lock()->get_physical_device(), - &create_info, - nullptr, /* pAllocator */ - &m_device); - anvil_assert_vk_call_succeeded(result); + m_supports_subset_allocations = instance_ptr->get_physical_device_group(device_group_index).supports_subset_allocations; +} - /* Now that all queues are available, assign them to queue families Anvil recognizes. */ - get_queue_family_indices(out_queue_families_ptr); +/** Please see header for specification */ +bool Anvil::MGPUDevice::is_layer_supported(const std::string& in_layer_name) const +{ + /* Note: All physical devices within a device group must support exactly the same layers. */ + return m_parent_physical_devices.at(0).physical_device_ptr->is_layer_supported(in_layer_name); } /** Please see header for specification */ -std::shared_ptr Anvil::SGPUDevice::create_swapchain(std::shared_ptr in_parent_surface_ptr, - std::shared_ptr in_window_ptr, - VkFormat in_image_format, - VkPresentModeKHR in_present_mode, - VkImageUsageFlags in_usage, - uint32_t in_n_swapchain_images) +bool Anvil::MGPUDevice::is_physical_device_extension_supported(const std::string& in_extension_name) const +{ + /* Note: All physical devices within a device group must support exactly the same device extensions. */ + return m_parent_physical_devices.at(0).physical_device_ptr->is_device_extension_supported(in_extension_name); +} + + +/* Please see header for specification */ +Anvil::SGPUDevice::SGPUDevice(Anvil::DeviceCreateInfoUniquePtr in_create_info_ptr) + :BaseDevice(std::move(in_create_info_ptr) ) +{ + /* Stub */ +} + +/* Please see header for specification */ +Anvil::SGPUDevice::~SGPUDevice() { - std::shared_ptr result_ptr; + /* Stub */ +} - result_ptr = Anvil::Swapchain::create(shared_from_this(), - in_parent_surface_ptr, - in_window_ptr, - in_image_format, - in_present_mode, - in_usage, - in_n_swapchain_images, - m_khr_swapchain_extension_entrypoints); + +/* Please see header for specification */ +Anvil::BaseDeviceUniquePtr Anvil::SGPUDevice::create(Anvil::DeviceCreateInfoUniquePtr in_create_info_ptr) +{ + BaseDeviceUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr = std::unique_ptr( + new Anvil::SGPUDevice(std::move(in_create_info_ptr) ) + ); + + dynamic_cast(result_ptr.get() )->init(); return result_ptr; } /** Please see header for specification */ -void Anvil::SGPUDevice::destroy() +Anvil::SwapchainUniquePtr Anvil::SGPUDevice::create_swapchain(Anvil::RenderingSurface* in_parent_surface_ptr, + Anvil::Window* in_window_ptr, + Anvil::Format in_image_format, + Anvil::ColorSpaceKHR in_color_space, + Anvil::PresentModeKHR in_present_mode, + Anvil::ImageUsageFlags in_usage, + uint32_t in_n_swapchain_images) { - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); + SwapchainUniquePtr result_ptr(nullptr, + std::default_delete() ); - /* Release common stuff first */ - Anvil::BaseDevice::destroy(); + { + auto create_info_ptr = Anvil::SwapchainCreateInfo::create(this, + in_parent_surface_ptr, + in_window_ptr, + in_image_format, + in_color_space, + in_present_mode, + in_usage, + in_n_swapchain_images); + + create_info_ptr->set_mt_safety(Anvil::MTSafety::ENABLED); + + result_ptr = Anvil::Swapchain::create(std::move(create_info_ptr) ); + } - /* Unregister the instance from physical device, so that Device instance gets released as soon - * as all external shared pointers go out of scope. - */ - physical_device_locked_ptr->unregister_device(shared_from_this() ); + return result_ptr; +} + +bool Anvil::SGPUDevice::get_physical_device_buffer_properties(const BufferPropertiesQuery& in_query, + Anvil::BufferProperties* out_opt_result_ptr) const +{ + return m_create_info_ptr->get_physical_device_ptrs().at(0)->get_buffer_properties(in_query, + out_opt_result_ptr); } /** Please see header for specification */ -const VkPhysicalDeviceFeatures& Anvil::SGPUDevice::get_physical_device_features() const +const Anvil::PhysicalDeviceFeatures& Anvil::SGPUDevice::get_physical_device_features() const { - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); + return m_create_info_ptr->get_physical_device_ptrs().at(0)->get_device_features(); +} - return physical_device_locked_ptr->get_device_features(); +/** Please see header for specification */ +bool Anvil::SGPUDevice::get_physical_device_fence_properties(const FencePropertiesQuery& in_query, + Anvil::FenceProperties* out_opt_result_ptr) const +{ + return m_create_info_ptr->get_physical_device_ptrs().at(0)->get_fence_properties(in_query, + out_opt_result_ptr); } /** Please see header for specification */ -const Anvil::FormatProperties& Anvil::SGPUDevice::get_physical_device_format_properties(VkFormat in_format) const +Anvil::FormatProperties Anvil::SGPUDevice::get_physical_device_format_properties(Anvil::Format in_format) const { - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); + return m_create_info_ptr->get_physical_device_ptrs().at(0)->get_format_properties(in_format); +} - return physical_device_locked_ptr->get_format_properties(in_format); +/** Please see header for specification */ +bool Anvil::SGPUDevice::get_physical_device_image_format_properties(const ImageFormatPropertiesQuery& in_query, + Anvil::ImageFormatProperties* out_opt_result_ptr) const +{ + return m_create_info_ptr->get_physical_device_ptrs().at(0)->get_image_format_properties(in_query, + out_opt_result_ptr); } /** Please see header for specification */ -bool Anvil::SGPUDevice::get_physical_device_image_format_properties(VkFormat in_format, - VkImageType in_type, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - VkImageCreateFlags in_flags, - VkImageFormatProperties& out_result) const +Anvil::MultisamplePropertiesEXT Anvil::SGPUDevice::get_physical_device_multisample_properties(const Anvil::SampleCountFlagBits& in_samples) const { - bool result; + VkMultisamplePropertiesEXT result_vk; - result = is_vk_call_successful(vkGetPhysicalDeviceImageFormatProperties(m_parent_physical_device_ptr.lock()->get_physical_device(), - in_format, - in_type, - in_tiling, - in_usage, - in_flags, - &out_result) ); + anvil_assert(is_extension_enabled(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME) ); - return result; + result_vk.maxSampleLocationGridSize.height = 0; + result_vk.maxSampleLocationGridSize.width = 0; + result_vk.pNext = nullptr; + result_vk.sType = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT; + + m_ext_sample_locations_extension_entrypoints.vkGetPhysicalDeviceMultisamplePropertiesEXT(m_create_info_ptr->get_physical_device_ptrs().at(0)->get_physical_device(), + static_cast(in_samples), + &result_vk); + + return Anvil::MultisamplePropertiesEXT(result_vk); } /** Please see header for specification */ const Anvil::MemoryProperties& Anvil::SGPUDevice::get_physical_device_memory_properties() const { - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); - - return physical_device_locked_ptr->get_memory_properties(); + return m_create_info_ptr->get_physical_device_ptrs().at(0)->get_memory_properties(); } /** Please see header for specification */ -const VkPhysicalDeviceProperties& Anvil::SGPUDevice::get_physical_device_properties() const +const Anvil::PhysicalDeviceProperties& Anvil::SGPUDevice::get_physical_device_properties() const { - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); - - return physical_device_locked_ptr->get_device_properties(); + return m_create_info_ptr->get_physical_device_ptrs().at(0)->get_device_properties(); } /** Please see header for specification */ const Anvil::QueueFamilyInfoItems& Anvil::SGPUDevice::get_physical_device_queue_families() const { - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); + return m_create_info_ptr->get_physical_device_ptrs().at(0)->get_queue_families(); +} - return physical_device_locked_ptr->get_queue_families(); +/** Please see header for specification */ +bool Anvil::SGPUDevice::get_physical_device_semaphore_properties(const SemaphorePropertiesQuery& in_query, + Anvil::SemaphoreProperties* out_opt_result_ptr) const +{ + return m_create_info_ptr->get_physical_device_ptrs().at(0)->get_semaphore_properties(in_query, + out_opt_result_ptr); } /** Please see header for specification */ -bool Anvil::SGPUDevice::get_physical_device_sparse_image_format_properties(VkFormat in_format, - VkImageType in_type, - VkSampleCountFlagBits in_sample_count, - VkImageUsageFlags in_usage, - VkImageTiling in_tiling, - std::vector& out_result) const +bool Anvil::SGPUDevice::get_physical_device_sparse_image_format_properties(Anvil::Format in_format, + Anvil::ImageType in_type, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::ImageUsageFlags in_usage, + Anvil::ImageTiling in_tiling, + std::vector& out_result) const { uint32_t n_props = 0; - vkGetPhysicalDeviceSparseImageFormatProperties(m_parent_physical_device_ptr.lock()->get_physical_device(), - in_format, - in_type, - in_sample_count, - in_usage, - in_tiling, - &n_props, - nullptr); /* pProperties */ + Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties(m_create_info_ptr->get_physical_device_ptrs().at(0)->get_physical_device(), + static_cast (in_format), + static_cast (in_type), + static_cast(in_sample_count), + in_usage.get_vk(), + static_cast(in_tiling), + &n_props, + nullptr); /* pProperties */ if (n_props > 0) { out_result.resize(n_props); - vkGetPhysicalDeviceSparseImageFormatProperties(m_parent_physical_device_ptr.lock()->get_physical_device(), - in_format, - in_type, - in_sample_count, - in_usage, - in_tiling, - &n_props, - &out_result[0]); + Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties(m_create_info_ptr->get_physical_device_ptrs().at(0)->get_physical_device(), + static_cast (in_format), + static_cast (in_type), + static_cast(in_sample_count), + in_usage.get_vk(), + static_cast(in_tiling), + &n_props, + reinterpret_cast(&out_result[0]) ); } return true; } /* Please see header for specification */ -bool Anvil::SGPUDevice::get_physical_device_surface_capabilities(std::shared_ptr in_surface_ptr, - VkSurfaceCapabilitiesKHR* out_result_ptr) const +bool Anvil::SGPUDevice::get_physical_device_surface_capabilities(Anvil::RenderingSurface* in_surface_ptr, + Anvil::SurfaceCapabilities* out_result_ptr) const { - return (vkGetPhysicalDeviceSurfaceCapabilitiesKHR(m_parent_physical_device_ptr.lock()->get_physical_device(), - in_surface_ptr->get_surface(), - out_result_ptr) == VK_SUCCESS); + return (m_khr_surface_extension_entrypoints.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(m_create_info_ptr->get_physical_device_ptrs().at(0)->get_physical_device(), + in_surface_ptr->get_surface(), + reinterpret_cast(out_result_ptr) ) == VK_SUCCESS); } /* Please see header for specification */ void Anvil::SGPUDevice::get_queue_family_indices(DeviceQueueFamilyInfo* out_device_queue_family_info_ptr) const { - get_queue_family_indices_for_physical_device(m_parent_physical_device_ptr, + get_queue_family_indices_for_physical_device(m_create_info_ptr->get_physical_device_ptrs().at(0), out_device_queue_family_info_ptr); } /** Please see header for specification */ const Anvil::QueueFamilyInfo* Anvil::SGPUDevice::get_queue_family_info(uint32_t in_queue_family_index) const { - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); - const auto& queue_fams (physical_device_locked_ptr->get_queue_families() ); - const Anvil::QueueFamilyInfo* result_ptr (nullptr); + const auto& queue_fams(m_create_info_ptr->get_physical_device_ptrs().at(0)->get_queue_families() ); + const Anvil::QueueFamilyInfo* result_ptr(nullptr); if (queue_fams.size() > in_queue_family_index) { @@ -988,31 +2413,19 @@ const Anvil::QueueFamilyInfo* Anvil::SGPUDevice::get_queue_family_info(uint32_t return result_ptr; } -/** Register this SGPUDevice with the owning physical device. This is necessary to ensure this device instance - * is not released prematurely. - **/ void Anvil::SGPUDevice::init_device() { - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); - - /* Cache a shared pointer owning this instance in PhysicalDevice. We will release it at destroy() time, - * having destroyed all children objects we also own and which take a weak pointer to this Device. - */ - physical_device_locked_ptr->register_device(shared_from_this() ); + /* Stub */ } /** Please see header for specification */ bool Anvil::SGPUDevice::is_layer_supported(const std::string& in_layer_name) const { - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); - - return physical_device_locked_ptr->is_layer_supported(in_layer_name); + return m_create_info_ptr->get_physical_device_ptrs().at(0)->is_layer_supported(in_layer_name); } /** Please see header for specification */ bool Anvil::SGPUDevice::is_physical_device_extension_supported(const std::string& in_extension_name) const { - std::shared_ptr physical_device_locked_ptr(m_parent_physical_device_ptr); - - return physical_device_locked_ptr->is_device_extension_supported(in_extension_name); + return m_create_info_ptr->get_physical_device_ptrs().at(0)->is_device_extension_supported(in_extension_name); } diff --git a/src/wrappers/event.cpp b/src/wrappers/event.cpp index 1929ff6c..b0fa1141 100644 --- a/src/wrappers/event.cpp +++ b/src/wrappers/event.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,41 +21,23 @@ // #include "misc/debug.h" +#include "misc/event_create_info.h" #include "misc/object_tracker.h" #include "wrappers/device.h" #include "wrappers/event.h" /* Please see header for specification */ -Anvil::Event::Event(std::weak_ptr in_device_ptr) - :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT), - m_device_ptr (in_device_ptr), +Anvil::Event::Event(Anvil::EventCreateInfoUniquePtr in_create_info_ptr) + :DebugMarkerSupportProvider(in_create_info_ptr->get_device(), + Anvil::ObjectType::EVENT), + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )), m_event (VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkEventCreateInfo event_create_info; - VkResult result (VK_ERROR_INITIALIZATION_FAILED); - - ANVIL_REDUNDANT_VARIABLE(result); - - /* Spawn a new event */ - event_create_info.flags = 0; - event_create_info.pNext = nullptr; - event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO; - - result = vkCreateEvent(device_locked_ptr->get_device_vk(), - &event_create_info, - nullptr, /* pAllocator */ - &m_event); - - anvil_assert_vk_call_succeeded(result); - if (is_vk_call_successful(result) ) - { - set_vk_handle(m_event); - } + m_create_info_ptr = std::move(in_create_info_ptr); /* Register the event instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_EVENT, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::EVENT, this); } @@ -66,32 +48,64 @@ Anvil::Event::Event(std::weak_ptr in_device_ptr) **/ Anvil::Event::~Event() { - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_EVENT, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::EVENT, this); release_event(); } /* Please see header for specification */ -std::shared_ptr Anvil::Event::create(std::weak_ptr in_device_ptr) +Anvil::EventUniquePtr Anvil::Event::create(Anvil::EventCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr result_ptr; + Anvil::EventUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( - new Anvil::Event(in_device_ptr) + new Anvil::Event(std::move(in_create_info_ptr) ) ); + if (result_ptr != nullptr) + { + if (!result_ptr->init() ) + { + result_ptr.reset(); + } + } + return result_ptr; } +bool Anvil::Event::init() +{ + VkEventCreateInfo event_create_info; + VkResult result (VK_ERROR_INITIALIZATION_FAILED); + + /* Spawn a new event */ + event_create_info.flags = 0; + event_create_info.pNext = nullptr; + event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO; + + result = Anvil::Vulkan::vkCreateEvent(m_device_ptr->get_device_vk(), + &event_create_info, + nullptr, /* pAllocator */ + &m_event); + + anvil_assert_vk_call_succeeded(result); + if (is_vk_call_successful(result) ) + { + set_vk_handle(m_event); + } + + return is_vk_call_successful(result); +} + /* Please see header for specification */ bool Anvil::Event::is_set() const { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkResult result; + VkResult result; - result = vkGetEventStatus(device_locked_ptr->get_device_vk(), - m_event); + result = Anvil::Vulkan::vkGetEventStatus(m_device_ptr->get_device_vk(), + m_event); anvil_assert(result == VK_EVENT_RESET || result == VK_EVENT_SET); @@ -104,11 +118,13 @@ void Anvil::Event::release_event() { if (m_event != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyEvent(device_locked_ptr->get_device_vk(), - m_event, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyEvent(m_device_ptr->get_device_vk(), + m_event, + nullptr /* pAllocator */); + } + unlock(); m_event = VK_NULL_HANDLE; } @@ -117,11 +133,15 @@ void Anvil::Event::release_event() /* Please see header for specification */ bool Anvil::Event::reset() { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkResult result; + VkResult result; + + lock(); + { + result = Anvil::Vulkan::vkResetEvent(m_device_ptr->get_device_vk(), + m_event); + } + unlock(); - result = vkResetEvent(device_locked_ptr->get_device_vk(), - m_event); anvil_assert_vk_call_succeeded(result); return (result == VK_SUCCESS); @@ -130,11 +150,15 @@ bool Anvil::Event::reset() /* Please see header for specification */ bool Anvil::Event::set() { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkResult result; + VkResult result; + + lock(); + { + result = Anvil::Vulkan::vkSetEvent(m_device_ptr->get_device_vk(), + m_event); + } + unlock(); - result = vkSetEvent(device_locked_ptr->get_device_vk(), - m_event); anvil_assert_vk_call_succeeded(result); return (result == VK_SUCCESS); diff --git a/src/wrappers/fence.cpp b/src/wrappers/fence.cpp index 3f2a682c..898609c2 100644 --- a/src/wrappers/fence.cpp +++ b/src/wrappers/fence.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,79 +21,342 @@ // #include "misc/debug.h" +#include "misc/external_handle.h" +#include "misc/fence_create_info.h" #include "misc/object_tracker.h" +#include "misc/struct_chainer.h" #include "wrappers/device.h" #include "wrappers/fence.h" #include + /* Please see header for specification */ -Anvil::Fence::Fence(std::weak_ptr in_device_ptr, - bool in_create_signalled) - :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT), - m_device_ptr (in_device_ptr), - m_fence (VK_NULL_HANDLE), - m_possibly_set (false) +Anvil::Fence::Fence(Anvil::FenceCreateInfoUniquePtr in_create_info_ptr) + :DebugMarkerSupportProvider(in_create_info_ptr->get_device(), + Anvil::ObjectType::FENCE), + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )), + m_fence (VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkFenceCreateInfo fence_create_info; - VkResult result (VK_ERROR_INITIALIZATION_FAILED); - - ANVIL_REDUNDANT_VARIABLE(result); - - /* Spawn a new fence */ - fence_create_info.flags = (in_create_signalled) ? VK_FENCE_CREATE_SIGNALED_BIT - : 0u; - fence_create_info.pNext = nullptr; - fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; - - result = vkCreateFence(device_locked_ptr->get_device_vk(), - &fence_create_info, - nullptr, /* pAllocator */ - &m_fence); - - anvil_assert_vk_call_succeeded(result); - if (is_vk_call_successful(result) ) - { - set_vk_handle(m_fence); - } + m_create_info_ptr = std::move(in_create_info_ptr); /* Register the event instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_FENCE, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::FENCE, this); } /* Please see header for specification */ Anvil::Fence::~Fence() { - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_FENCE, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::FENCE, this); release_fence(); } /* Please see header for specification */ -std::shared_ptr Anvil::Fence::create(std::weak_ptr in_device_ptr, - bool in_create_signalled) +Anvil::FenceUniquePtr Anvil::Fence::create(Anvil::FenceCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr result_ptr; + Anvil::FenceUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( - new Anvil::Fence(in_device_ptr, - in_create_signalled) + new Anvil::Fence(std::move(in_create_info_ptr) ) ); + if (result_ptr != nullptr) + { + if (!result_ptr->init() ) + { + result_ptr.reset(); + } + } + return result_ptr; } +/* Please see header for specification */ +Anvil::ExternalHandleUniquePtr Anvil::Fence::export_to_external_handle(const Anvil::ExternalFenceHandleTypeFlagBits& in_fence_handle_type) +{ + #if defined(_WIN32) + const auto invalid_handle = nullptr; + const bool handle_needs_manual_release = Anvil::Utils::is_nt_handle(in_fence_handle_type); + const bool only_one_handle_ever_permitted = Anvil::Utils::is_nt_handle(in_fence_handle_type); + #else + const int invalid_handle = -1; + const bool handle_needs_manual_release = true; + const bool only_one_handle_ever_permitted = false; + #endif + + ExternalHandleType result_handle = 0; + Anvil::ExternalHandleUniquePtr result_ptr; + + /* Sanity checks */ + #if defined(_WIN32) + { + if (!m_create_info_ptr->get_device()->get_extension_info()->khr_external_fence_win32() ) + { + anvil_assert(m_create_info_ptr->get_device()->get_extension_info()->khr_external_fence_win32() ); + + goto end; + } + } + #else + { + if (!m_create_info_ptr->get_device()->get_extension_info()->khr_external_fence_fd() ) + { + anvil_assert(m_create_info_ptr->get_device()->get_extension_info()->khr_external_fence_fd() ); + + goto end; + } + } + #endif + + if (only_one_handle_ever_permitted && + m_external_fence_created_for_handle_type.find(in_fence_handle_type) != m_external_fence_created_for_handle_type.end() ) + { + anvil_assert_fail(); + + goto end; + } + + /* Go and try to open a new handle. */ + #if defined(_WIN32) + { + const auto entrypoints_ptr = &m_create_info_ptr->get_device()->get_extension_khr_external_fence_win32_entrypoints(); + VkFenceGetWin32HandleInfoKHR info; + + anvil_assert(m_fence != VK_NULL_HANDLE); + + info.fence = m_fence; + info.handleType = static_cast(in_fence_handle_type); + info.pNext = nullptr; + info.sType = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR; + + if (entrypoints_ptr->vkGetFenceWin32HandleKHR(m_create_info_ptr->get_device()->get_device_vk(), + &info, + &result_handle) != VK_SUCCESS) + { + anvil_assert_fail(); + + goto end; + } + } + #else + { + const auto entrypoints_ptr = &m_create_info_ptr->get_device()->get_extension_khr_external_fence_fd_entrypoints(); + VkFenceGetFdInfoKHR info; + + anvil_assert(m_fence != VK_NULL_HANDLE); + + info.fence = m_fence; + info.handleType = static_cast(in_fence_handle_type); + info.pNext = nullptr; + info.sType = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR; + + if (entrypoints_ptr->vkGetFenceFdKHR(m_create_info_ptr->get_device()->get_device_vk(), + &info, + &result_handle) != VK_SUCCESS) + { + anvil_assert_fail(); + + goto end; + } + } + #endif + + if (result_handle == invalid_handle) + { + anvil_assert(result_handle != invalid_handle); + + goto end; + } + + /* If this is necessary, cache the newly created handle so that we do not let the app attempt to re-create the external handle + * for the same Vulkan fence handle. + */ + if (only_one_handle_ever_permitted) + { + m_external_fence_created_for_handle_type[in_fence_handle_type] = true; + } + + result_ptr = Anvil::ExternalHandle::create(result_handle, + handle_needs_manual_release); /* in_close_at_destruction_time */ + +end: + return result_ptr; +} + +#if defined(_WIN32) + bool Anvil::Fence::import_from_external_handle(const bool& in_temporary_import, + const Anvil::ExternalFenceHandleTypeFlagBits& in_handle_type, + const ExternalHandleType& in_opt_handle, + const std::wstring& in_opt_name) +#else + bool Anvil::Fence::import_from_external_handle(const bool& in_temporary_import, + const Anvil::ExternalFenceHandleTypeFlagBits& in_handle_type, + const ExternalHandleType& in_handle) +#endif +{ + #if defined(_WIN32) + const auto entrypoints_ptr = &m_device_ptr->get_extension_khr_external_fence_win32_entrypoints(); + #else + const auto entrypoints_ptr = &m_device_ptr->get_extension_khr_external_fence_fd_entrypoints(); + #endif + + bool result = false; + + /* Sanity checks */ + #if defined(_WIN32) + { + if (!m_device_ptr->get_extension_info()->khr_external_fence_win32() ) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_external_fence_win32() ); + + goto end; + } + } + #else + { + if (!m_device_ptr->get_extension_info()->khr_external_fence_fd() ) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_external_fence_fd() ); + + goto end; + } + } + #endif + + /* Proceed */ + #if defined(_WIN32) + { + VkImportFenceWin32HandleInfoKHR info_vk; + + info_vk.fence = m_fence; + info_vk.flags = (in_temporary_import) ? VK_FENCE_IMPORT_TEMPORARY_BIT_KHR + : 0; + info_vk.handle = in_opt_handle; + info_vk.handleType = static_cast(in_handle_type); + info_vk.name = (in_opt_name.size() > 0) ? &in_opt_name.at(0) + : nullptr; + info_vk.pNext = nullptr; + info_vk.sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR; + + result = (entrypoints_ptr->vkImportFenceWin32HandleKHR(m_device_ptr->get_device_vk(), + &info_vk) == VK_SUCCESS); + } + #else + { + VkImportFenceFdInfoKHR info_vk; + + info_vk.fd = in_handle; + info_vk.fence = m_fence; + info_vk.flags = (in_temporary_import) ? VK_FENCE_IMPORT_TEMPORARY_BIT_KHR + : 0; + info_vk.handleType = static_cast(in_handle_type); + info_vk.pNext = nullptr; + info_vk.sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR; + + result = (entrypoints_ptr->vkImportFenceFdKHR(m_device_ptr->get_device_vk(), + &info_vk) == VK_SUCCESS); + } + #endif +end: + return result; +} + +bool Anvil::Fence::init() +{ + VkFenceCreateInfo fence_create_info; + VkResult result (VK_ERROR_INITIALIZATION_FAILED); + Anvil::StructChainer struct_chainer; + Anvil::StructChainUniquePtr struct_chain_ptr; + + /* Sanity checks */ + if (m_create_info_ptr->get_exportable_external_fence_handle_types() != Anvil::ExternalFenceHandleTypeFlagBits::NONE) + { + if (!m_device_ptr->get_extension_info()->khr_external_fence() ) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_external_fence() ); + + goto end; + } + } + + /* Spawn a new fence */ + { + fence_create_info.flags = (m_create_info_ptr->should_create_signalled() ) ? VK_FENCE_CREATE_SIGNALED_BIT + : 0u; + fence_create_info.pNext = nullptr; + fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; + + struct_chainer.append_struct(fence_create_info); + } + + if (m_create_info_ptr->get_exportable_external_fence_handle_types() != Anvil::ExternalFenceHandleTypeFlagBits::NONE) + { + VkExportFenceCreateInfo create_info; + + create_info.handleTypes = m_create_info_ptr->get_exportable_external_fence_handle_types().get_vk(); + create_info.pNext = nullptr; + create_info.sType = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR; + + struct_chainer.append_struct(create_info); + } + + #if defined(_WIN32) + { + const Anvil::ExternalNTHandleInfo* nt_handle_info_ptr = nullptr; + + if (m_create_info_ptr->get_exportable_nt_handle_info(&nt_handle_info_ptr) ) + { + VkExportFenceWin32HandleInfoKHR handle_info; + + anvil_assert(nt_handle_info_ptr != nullptr); + anvil_assert((m_create_info_ptr->get_exportable_external_fence_handle_types() & Anvil::ExternalFenceHandleTypeFlagBits::OPAQUE_WIN32_BIT) != 0); + + handle_info.dwAccess = nt_handle_info_ptr->access; + handle_info.name = (nt_handle_info_ptr->name.size() > 0) ? &nt_handle_info_ptr->name.at(0) + : nullptr; + handle_info.pAttributes = nt_handle_info_ptr->attributes_ptr; + handle_info.pNext = nullptr; + handle_info.sType = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR; + + struct_chainer.append_struct(handle_info); + } + } + #endif + + struct_chain_ptr = struct_chainer.create_chain(); + if (struct_chain_ptr == nullptr) + { + anvil_assert(struct_chain_ptr != nullptr); + + goto end; + } + + result = Anvil::Vulkan::vkCreateFence(m_device_ptr->get_device_vk(), + struct_chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_fence); + + anvil_assert_vk_call_succeeded(result); + if (is_vk_call_successful(result) ) + { + set_vk_handle(m_fence); + } + +end: + return is_vk_call_successful(result); +} + /* Please see header for specification */ bool Anvil::Fence::is_set() const { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkResult result; + VkResult result; - result = vkGetFenceStatus(device_locked_ptr->get_device_vk(), - m_fence); + result = Anvil::Vulkan::vkGetFenceStatus(m_device_ptr->get_device_vk(), + m_fence); anvil_assert(result == VK_SUCCESS || result == VK_NOT_READY); @@ -106,11 +369,13 @@ void Anvil::Fence::release_fence() { if (m_fence != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyFence(device_locked_ptr->get_device_vk(), - m_fence, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyFence(m_device_ptr->get_device_vk(), + m_fence, + nullptr /* pAllocator */); + } + unlock(); m_fence = VK_NULL_HANDLE; } @@ -119,15 +384,15 @@ void Anvil::Fence::release_fence() /* Please see header for specification */ bool Anvil::Fence::reset() { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkResult result; + VkResult result; - result = vkResetFences(device_locked_ptr->get_device_vk(), - 1, /* fenceCount */ - &m_fence); - anvil_assert_vk_call_succeeded(result); - - m_possibly_set = false; + lock(); + { + result = Anvil::Vulkan::vkResetFences(m_device_ptr->get_device_vk(), + 1, /* fenceCount */ + &m_fence); + } + unlock(); return (result == VK_SUCCESS); } @@ -136,11 +401,11 @@ bool Anvil::Fence::reset() bool Anvil::Fence::reset_fences(const uint32_t in_n_fences, Fence* in_fences) { - std::shared_ptr device_locked_ptr; - VkFence fence_cache[32]; - static const uint32_t fence_cache_capacity = sizeof(fence_cache) / sizeof(fence_cache[0]); - bool result = true; - VkResult result_vk; + const Anvil::BaseDevice* device_ptr = nullptr; + auto fence_cache = std::vector(in_n_fences); + static const uint32_t fence_cache_capacity = sizeof(fence_cache) / sizeof(fence_cache[0]); + bool result = true; + VkResult result_vk; if (in_n_fences == 0) { @@ -159,18 +424,28 @@ bool Anvil::Fence::reset_fences(const uint32_t in_n_fences, { Anvil::Fence& current_fence = in_fences[n_fence_batch * fence_cache_capacity + n_fence]; - anvil_assert(device_locked_ptr == nullptr || - device_locked_ptr != nullptr && !current_fence.m_device_ptr.expired()); + anvil_assert((device_ptr == nullptr) || + (device_ptr != nullptr && current_fence.m_device_ptr != nullptr) ); - device_locked_ptr = current_fence.m_device_ptr.lock(); + device_ptr = current_fence.m_device_ptr; fence_cache[n_fence] = current_fence.m_fence; - current_fence.m_possibly_set = false; + current_fence.lock(); + } + { + result_vk = Anvil::Vulkan::vkResetFences(device_ptr->get_device_vk(), + n_fences_remaining, + (n_fences_remaining > 0) ? &fence_cache.at(0) : nullptr); + } + for (uint32_t n_fence = 0; + n_fence < n_fences_remaining; + ++n_fence) + { + Anvil::Fence& current_fence = in_fences[n_fence_batch * fence_cache_capacity + n_fence]; + + current_fence.unlock(); } - result_vk = vkResetFences(device_locked_ptr->get_device_vk(), - n_fences_remaining, - fence_cache); anvil_assert_vk_call_succeeded(result_vk); if (!is_vk_call_successful(result_vk) ) @@ -181,4 +456,4 @@ bool Anvil::Fence::reset_fences(const uint32_t in_n_fences, end: return result; -} \ No newline at end of file +} diff --git a/src/wrappers/framebuffer.cpp b/src/wrappers/framebuffer.cpp index 12ee1a6b..6e7e1e1f 100644 --- a/src/wrappers/framebuffer.cpp +++ b/src/wrappers/framebuffer.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,7 +21,9 @@ // #include "misc/debug.h" +#include "misc/framebuffer_create_info.h" #include "misc/object_tracker.h" +#include "misc/render_pass_create_info.h" #include "wrappers/device.h" #include "wrappers/framebuffer.h" #include "wrappers/image.h" @@ -29,62 +31,26 @@ #include "wrappers/render_pass.h" #include -bool Anvil::Framebuffer::RenderPassComparator::operator()(std::shared_ptr in_a_ptr, - std::shared_ptr in_b_ptr) const +bool Anvil::Framebuffer::RenderPassComparator::operator()(Anvil::RenderPass* in_a_ptr, + Anvil::RenderPass* in_b_ptr) const { - return in_a_ptr->get_render_pass(false /* allow_rebaking */) < - in_b_ptr->get_render_pass(false /* allow_rebaking */); + return in_a_ptr->get_render_pass() < in_b_ptr->get_render_pass(); } -/** Please see header for specification */ -Anvil::Framebuffer::FramebufferAttachment::FramebufferAttachment(std::shared_ptr in_image_view_ptr) -{ - anvil_assert(in_image_view_ptr != nullptr); - - image_view_ptr = in_image_view_ptr; -} - -/** Please see header for specification */ -Anvil::Framebuffer::FramebufferAttachment::FramebufferAttachment(const FramebufferAttachment& in) -{ - image_view_ptr = in.image_view_ptr; -} - -/** Please see header for specification */ -Anvil::Framebuffer::FramebufferAttachment& Anvil::Framebuffer::FramebufferAttachment::operator=(const Anvil::Framebuffer::FramebufferAttachment& in) -{ - image_view_ptr = in.image_view_ptr; - - return *this; -} - -/** Destructor. Releases the wrapped image view instance. */ -Anvil::Framebuffer::FramebufferAttachment::~FramebufferAttachment() -{ - /* Stub */ -} - /* Please see header for specification */ -Anvil::Framebuffer::Framebuffer(std::weak_ptr in_device_ptr, - uint32_t in_width, - uint32_t in_height, - uint32_t in_n_layers) - :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, +Anvil::Framebuffer::Framebuffer(Anvil::FramebufferCreateInfoUniquePtr in_create_info_ptr) + :DebugMarkerSupportProvider(in_create_info_ptr->get_device(), + Anvil::ObjectType::FRAMEBUFFER, true), /* in_use_delegate_workers */ - m_device_ptr (in_device_ptr) + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )), + m_device_ptr (in_create_info_ptr->get_device () ) { - anvil_assert(in_width >= 1); - anvil_assert(in_height >= 1); - anvil_assert(in_n_layers >= 1); - - m_framebuffer_size[0] = in_width; - m_framebuffer_size[1] = in_height; - m_framebuffer_size[2] = in_n_layers; + m_create_info_ptr = std::move(in_create_info_ptr); /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_FRAMEBUFFER, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::FRAMEBUFFER, this); } @@ -95,10 +61,8 @@ Anvil::Framebuffer::Framebuffer(std::weak_ptr in_device_ptr, **/ Anvil::Framebuffer::~Framebuffer() { - std::shared_ptr device_locked_ptr(m_device_ptr); - /* Unregister the object */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_FRAMEBUFFER, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::FRAMEBUFFER, this); for (auto fb_iterator = m_baked_framebuffers.begin(); @@ -108,94 +72,28 @@ Anvil::Framebuffer::~Framebuffer() anvil_assert(fb_iterator->second.framebuffer != VK_NULL_HANDLE); /* Destroy the Vulkan framebuffer object */ - vkDestroyFramebuffer(device_locked_ptr->get_device_vk(), - fb_iterator->second.framebuffer, - nullptr /* pAllocator */); - - /* Carry on and release the renderpass the framebuffer had been baked for */ - fb_iterator->first->unregister_from_callbacks( - RENDER_PASS_CALLBACK_ID_BAKING_NEEDED, - std::bind(&Framebuffer::on_renderpass_changed, - this, - std::placeholders::_1), - this - ); + lock(); + { + Anvil::Vulkan::vkDestroyFramebuffer(m_device_ptr->get_device_vk(), + fb_iterator->second.framebuffer, + nullptr /* pAllocator */); + } + unlock(); } m_baked_framebuffers.clear(); - m_attachments.clear(); } /* Please see header for specification */ -bool Anvil::Framebuffer::add_attachment(std::shared_ptr in_image_view_ptr, - FramebufferAttachmentID* out_opt_attachment_id_ptr) +bool Anvil::Framebuffer::bake(Anvil::RenderPass* in_render_pass_ptr) { - std::shared_ptr parent_image_ptr; - bool result = false; - uint32_t view_size[3]; - - /* Sanity checks: Input image view must not be nullptr */ - anvil_assert(in_image_view_ptr != nullptr); - - /* Sanity checks: make sure the image views have the same, or larger size than the framebuffer's. */ - parent_image_ptr = in_image_view_ptr->get_parent_image(); - - anvil_assert(parent_image_ptr != nullptr); - - if (!parent_image_ptr->get_image_mipmap_size(in_image_view_ptr->get_base_mipmap_level(), - view_size + 0, - view_size + 1, - view_size + 2) ) - { - /* Why is the mipmap index invalid? */ - anvil_assert_fail(); - - goto end; - } - - if (view_size[0] < m_framebuffer_size[0] || - view_size[1] < m_framebuffer_size[1] || - view_size[2] < m_framebuffer_size[2]) - { - /* Attachment size is wrong */ - anvil_assert_fail(); - - goto end; - } - - /* Spawn & store a new descriptor for the attachment. */ - if (out_opt_attachment_id_ptr != nullptr) - { - *out_opt_attachment_id_ptr = static_cast(m_attachments.size() ); - } - - m_attachments.push_back(FramebufferAttachment(in_image_view_ptr) ); - - /* Mark all baked framebuffers as dirty. */ - for (auto baked_fb_iterator = m_baked_framebuffers.begin(); - baked_fb_iterator != m_baked_framebuffers.end(); - ++baked_fb_iterator) - { - baked_fb_iterator->second.dirty = true; - } - - /* All done */ - result = true; - -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::Framebuffer::bake(std::shared_ptr in_render_pass_ptr) -{ - BakedFramebufferMap::iterator baked_fb_iterator; - std::shared_ptr device_locked_ptr(m_device_ptr); - VkFramebufferCreateInfo fb_create_info; - std::vector image_view_attachments; - bool result = false; - VkFramebuffer result_fb = VK_NULL_HANDLE; - VkResult result_vk = VK_ERROR_INITIALIZATION_FAILED; + BakedFramebufferMap::iterator baked_fb_iterator; + VkFramebufferCreateInfo fb_create_info; + std::vector image_view_attachments; + const auto n_attachments = m_create_info_ptr->get_n_attachments(); + bool result = false; + VkFramebuffer result_fb = VK_NULL_HANDLE; + VkResult result_vk = VK_ERROR_INITIALIZATION_FAILED; ANVIL_REDUNDANT_VARIABLE(result_vk); @@ -207,24 +105,21 @@ bool Anvil::Framebuffer::bake(std::shared_ptr in_render_pass_ goto end; } - /* Sanity checks: Make sure none of the FB size dimensions are 0 */ - if (m_framebuffer_size[0] < 1 || - m_framebuffer_size[1] < 1 || - m_framebuffer_size[2] < 1) - { - anvil_assert_fail(); - - goto end; - } - - /* Release the existing Vulkan object handle, if one is already present */ + /* Release the existing Vulkan object handle, if one is already present + * + * TODO: Leverage the concept of render pass compatibility here! + */ baked_fb_iterator = m_baked_framebuffers.find(in_render_pass_ptr); if (baked_fb_iterator != m_baked_framebuffers.end() ) { - vkDestroyFramebuffer(device_locked_ptr->get_device_vk(), - baked_fb_iterator->second.framebuffer, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyFramebuffer(m_device_ptr->get_device_vk(), + baked_fb_iterator->second.framebuffer, + nullptr /* pAllocator */); + } + unlock(); DebugMarkerSupportProvider::remove_delegate(baked_fb_iterator->second.framebuffer); @@ -232,32 +127,46 @@ bool Anvil::Framebuffer::bake(std::shared_ptr in_render_pass_ } /* Prepare the image view array we will use as input for the create info descriptor */ - image_view_attachments.reserve(m_attachments.size() ); + image_view_attachments.reserve(n_attachments); - for (auto attachment_iterator = m_attachments.begin(); - attachment_iterator != m_attachments.end(); - ++attachment_iterator) + for (uint32_t n_attachment = 0; + n_attachment < n_attachments; + ++n_attachment) { - image_view_attachments.push_back(attachment_iterator->image_view_ptr->get_image_view() ); + Anvil::ImageView* image_view_ptr = nullptr; + + if (!m_create_info_ptr->get_attachment_at_index(n_attachment, + &image_view_ptr) ) + { + anvil_assert_fail(); + + goto end; + } + + anvil_assert(image_view_ptr != nullptr); + + image_view_attachments.push_back(image_view_ptr->get_image_view() ); } /* Prepare the create info descriptor */ - fb_create_info.attachmentCount = (uint32_t) image_view_attachments.size(); + anvil_assert(in_render_pass_ptr->get_render_pass_create_info()->get_n_attachments() == image_view_attachments.size() ); + + fb_create_info.attachmentCount = static_cast(image_view_attachments.size() ); fb_create_info.flags = 0; - fb_create_info.height = m_framebuffer_size[1]; - fb_create_info.layers = m_framebuffer_size[2]; + fb_create_info.height = m_create_info_ptr->get_height (); + fb_create_info.layers = m_create_info_ptr->get_n_layers(); fb_create_info.pAttachments = (fb_create_info.attachmentCount > 0) ? &image_view_attachments[0] : nullptr; fb_create_info.pNext = nullptr; fb_create_info.renderPass = in_render_pass_ptr->get_render_pass(); fb_create_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; - fb_create_info.width = m_framebuffer_size[0]; + fb_create_info.width = m_create_info_ptr->get_width(); /* Create the framebuffer instance and store it */ - result_vk = vkCreateFramebuffer(device_locked_ptr->get_device_vk(), - &fb_create_info, - nullptr, /* pAllocator */ - &result_fb); + result_vk = Anvil::Vulkan::vkCreateFramebuffer(m_device_ptr->get_device_vk(), + &fb_create_info, + nullptr, /* pAllocator */ + &result_fb); anvil_assert_vk_call_succeeded(result_vk); if (is_vk_call_successful(result_vk) ) @@ -270,15 +179,6 @@ bool Anvil::Framebuffer::bake(std::shared_ptr in_render_pass_ DebugMarkerSupportProvider::add_delegate(result_fb); } - /* If the render pass is ever changed, make sure we re-bake the framebuffer when needed */ - in_render_pass_ptr->register_for_callbacks( - RENDER_PASS_CALLBACK_ID_BAKING_NEEDED, - std::bind(&Framebuffer::on_renderpass_changed, - this, - std::placeholders::_1), - this - ); - /* All done */ result = true; @@ -287,63 +187,22 @@ bool Anvil::Framebuffer::bake(std::shared_ptr in_render_pass_ } /* Please see header for specification */ -std::shared_ptr Anvil::Framebuffer::create(std::weak_ptr in_device_ptr, - uint32_t in_width, - uint32_t in_height, - uint32_t in_n_layers) +Anvil::FramebufferUniquePtr Anvil::Framebuffer::create(Anvil::FramebufferCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr result_ptr; + Anvil::FramebufferUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( - new Anvil::Framebuffer(in_device_ptr, - in_width, - in_height, - in_n_layers) + new Anvil::Framebuffer( + std::move(in_create_info_ptr) + ) ); return result_ptr; } /* Please see header for specification */ -bool Anvil::Framebuffer::get_attachment_at_index(uint32_t in_attachment_index, - std::shared_ptr* out_image_view_ptr) const -{ - bool result = false; - - if (m_attachments.size() <= in_attachment_index) - { - goto end; - } - else - { - *out_image_view_ptr = m_attachments[in_attachment_index].image_view_ptr; - result = true; - } - -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::Framebuffer::get_attachment_id_for_image_view(std::shared_ptr in_image_view_ptr, - FramebufferAttachmentID* out_attachment_id_ptr) -{ - bool result = false; - auto attachment_iterator = std::find(m_attachments.cbegin(), - m_attachments.cend(), - in_image_view_ptr); - - if (attachment_iterator != m_attachments.end() ) - { - *out_attachment_id_ptr = static_cast(attachment_iterator - m_attachments.begin() ); - result = true; - } - - return result; -} - -/* Please see header for specification */ -const VkFramebuffer Anvil::Framebuffer::get_framebuffer(std::shared_ptr in_render_pass_ptr) +VkFramebuffer Anvil::Framebuffer::get_framebuffer(Anvil::RenderPass* in_render_pass_ptr) { auto fb_iterator = m_baked_framebuffers.find(in_render_pass_ptr); VkFramebuffer result_fb = VK_NULL_HANDLE; @@ -379,39 +238,3 @@ const VkFramebuffer Anvil::Framebuffer::get_framebuffer(std::shared_ptr(in_callback_argument_ptr); - auto renderpass_ptr = std::shared_ptr (callback_argument_ptr->renderpass_ptr, - Anvil::NullDeleter() ); - - auto baked_fb_iterator = m_baked_framebuffers.find(renderpass_ptr); - - if (baked_fb_iterator == m_baked_framebuffers.end() ) - { - anvil_assert_fail(); - - goto end; - } - - /* Mark as dirty so that the framebuffer is rebaked the next time it is requested. */ - baked_fb_iterator->second.dirty = true; -end: - ; -} diff --git a/src/wrappers/graphics_pipeline_manager.cpp b/src/wrappers/graphics_pipeline_manager.cpp index e658396a..03aeff72 100644 --- a/src/wrappers/graphics_pipeline_manager.cpp +++ b/src/wrappers/graphics_pipeline_manager.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,9 +21,11 @@ // #include +#include "misc/base_pipeline_create_info.h" #include "misc/base_pipeline_manager.h" #include "misc/debug.h" #include "misc/object_tracker.h" +#include "misc/render_pass_create_info.h" #include "wrappers/device.h" #include "wrappers/graphics_pipeline_manager.h" #include "wrappers/pipeline_cache.h" @@ -37,987 +39,396 @@ #endif -/** When baking a graphics pipeline descriptor, we currently use STL vectors to maintain storage - * for various Vulkan descriptors. These need to be stored on heap because: - * - * 1. The number of required descriptors depends on the pipeline configuration. - * 2. Some parent descriptors use pointers to point at other child descriptors. - * - * In order for vectors not to reallocate memory, we need to reserve memory up-front before - * we start pushing stuff. The number defined by N_CACHE_ITEMS tells how many items each - * vector will hold at max. - * - * This #define could be removed by handling memory allocations manually, but for simplicity - * reasons we'll go with STL vectors for now. - * */ -#define N_CACHE_ITEMS 128 - - /* Please see header for specification */ -Anvil::GraphicsPipelineManager::GraphicsPipelineManager(std::weak_ptr in_device_ptr, - bool in_use_pipeline_cache, - std::shared_ptr in_pipeline_cache_to_reuse_ptr) +Anvil::GraphicsPipelineManager::GraphicsPipelineManager(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + bool in_use_pipeline_cache, + Anvil::PipelineCache* in_pipeline_cache_to_reuse_ptr) :BasePipelineManager(in_device_ptr, + in_mt_safe, in_use_pipeline_cache, in_pipeline_cache_to_reuse_ptr) { /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_GRAPHICS_PIPELINE_MANAGER, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::ANVIL_GRAPHICS_PIPELINE_MANAGER, this); } /* Please see header for specification */ Anvil::GraphicsPipelineManager::~GraphicsPipelineManager() { - m_pipelines.clear(); + m_baked_pipelines.clear (); + m_outstanding_pipelines.clear(); /* Unregister the object */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_GRAPHICS_PIPELINE_MANAGER, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::ANVIL_GRAPHICS_PIPELINE_MANAGER, this); } -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::add_derivative_pipeline_from_sibling_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - std::shared_ptr in_renderpass_ptr, - SubPassID in_subpass_id, - const ShaderModuleStageEntryPoint& in_fragment_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_geometry_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_control_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_evaluation_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_vertex_shader_shader_stage_entrypoint_info, - GraphicsPipelineID in_base_pipeline_id, - GraphicsPipelineID* out_graphics_pipeline_id_ptr) -{ - bool result; - ShaderModuleStageEntryPoint stages[GRAPHICS_SHADER_STAGE_COUNT]; - - stages[GRAPHICS_SHADER_STAGE_FRAGMENT] = in_fragment_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_GEOMETRY] = in_geometry_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_TESSELLATION_CONTROL] = in_tess_control_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_TESSELLATION_EVALUATION] = in_tess_evaluation_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_VERTEX] = in_vertex_shader_shader_stage_entrypoint_info; - - result = BasePipelineManager::add_derivative_pipeline_from_sibling_pipeline(in_disable_optimizations, - in_allow_derivatives, - GRAPHICS_SHADER_STAGE_COUNT, - stages, - in_base_pipeline_id, - out_graphics_pipeline_id_ptr); - anvil_assert(result); - - if (result) - { - anvil_assert(m_pipeline_configurations.find(in_base_pipeline_id) != m_pipeline_configurations.end() ); - anvil_assert(m_pipeline_configurations.find(*out_graphics_pipeline_id_ptr) == m_pipeline_configurations.end() ); - - m_pipeline_configurations[*out_graphics_pipeline_id_ptr] = std::shared_ptr(new GraphicsPipelineConfiguration(m_pipeline_configurations[in_base_pipeline_id], - in_renderpass_ptr, - in_subpass_id) ); - } - - return result; -} - -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::add_derivative_pipeline_from_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - std::shared_ptr in_renderpass_ptr, - SubPassID in_subpass_id, - const ShaderModuleStageEntryPoint& in_fragment_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_geometry_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_control_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_evaluation_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_vertex_shader_shader_stage_entrypoint_info, - VkPipeline in_base_pipeline, - GraphicsPipelineID* out_graphics_pipeline_id_ptr) -{ - bool result; - ShaderModuleStageEntryPoint stages[GRAPHICS_SHADER_STAGE_COUNT]; - - stages[GRAPHICS_SHADER_STAGE_FRAGMENT] = in_fragment_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_GEOMETRY] = in_geometry_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_TESSELLATION_CONTROL] = in_tess_control_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_TESSELLATION_EVALUATION] = in_tess_evaluation_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_VERTEX] = in_vertex_shader_shader_stage_entrypoint_info; - - result = BasePipelineManager::add_derivative_pipeline_from_pipeline(in_disable_optimizations, - in_allow_derivatives, - GRAPHICS_SHADER_STAGE_COUNT, - stages, - in_base_pipeline, - out_graphics_pipeline_id_ptr); - anvil_assert(result); - - if (result) - { - anvil_assert(m_pipeline_configurations.find(*out_graphics_pipeline_id_ptr) == m_pipeline_configurations.end() ); - - m_pipeline_configurations[*out_graphics_pipeline_id_ptr] = std::shared_ptr(new GraphicsPipelineConfiguration(in_renderpass_ptr, - in_subpass_id) ); - } - - return result; -} - -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::add_proxy_pipeline(GraphicsPipelineID* out_proxy_graphics_pipeline_id_ptr) -{ - bool result = false; - - /* For proxy pipelines, we *only* need the configuration container. */ - result = BasePipelineManager::add_proxy_pipeline(out_proxy_graphics_pipeline_id_ptr); - - anvil_assert(result); - if (result) - { - anvil_assert(m_pipeline_configurations.find(*out_proxy_graphics_pipeline_id_ptr) == m_pipeline_configurations.end() ); - - m_pipeline_configurations[*out_proxy_graphics_pipeline_id_ptr] = std::shared_ptr(new GraphicsPipelineConfiguration(nullptr, /* in_renderpass_ptr */ - UINT32_MAX) ); /* in_subpass_id */ - } - - return result; -} - -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::add_regular_pipeline(bool in_disable_optimizations, - bool in_allow_derivatives, - std::shared_ptr in_renderpass_ptr, - SubPassID in_subpass_id, - const ShaderModuleStageEntryPoint& in_fragment_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_geometry_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_control_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_tess_evaluation_shader_stage_entrypoint_info, - const ShaderModuleStageEntryPoint& in_vertex_shader_shader_stage_entrypoint_info, - GraphicsPipelineID* out_graphics_pipeline_id_ptr) -{ - bool result; - ShaderModuleStageEntryPoint stages[GRAPHICS_SHADER_STAGE_COUNT]; - - stages[GRAPHICS_SHADER_STAGE_FRAGMENT] = in_fragment_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_GEOMETRY] = in_geometry_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_TESSELLATION_CONTROL] = in_tess_control_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_TESSELLATION_EVALUATION] = in_tess_evaluation_shader_stage_entrypoint_info; - stages[GRAPHICS_SHADER_STAGE_VERTEX] = in_vertex_shader_shader_stage_entrypoint_info; - - result = BasePipelineManager::add_regular_pipeline(in_disable_optimizations, - in_allow_derivatives, - GRAPHICS_SHADER_STAGE_COUNT, - stages, - out_graphics_pipeline_id_ptr); - anvil_assert(result); - - if (result) - { - anvil_assert(m_pipeline_configurations.find(*out_graphics_pipeline_id_ptr) == m_pipeline_configurations.end() ); - - m_pipeline_configurations[*out_graphics_pipeline_id_ptr] = std::shared_ptr(new GraphicsPipelineConfiguration(in_renderpass_ptr, - in_subpass_id) ); - } - - return result; -} - -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::add_vertex_attribute(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_location, - VkFormat in_format, - uint32_t in_offset_in_bytes, - uint32_t in_stride_in_bytes, - VkVertexInputRate in_rate, - uint32_t in_explicit_binding_index) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - /* Retrieve the pipeline config descriptor ..*/ - if (pipeline_iterator == m_pipeline_configurations.end() ) - { - anvil_assert_fail(); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_iterator->second; - } - - #ifdef _DEBUG - { - /* Make sure the location is not already referred to by already added attributes */ - for (auto attribute_iterator = pipeline_config_ptr->attributes.cbegin(); - attribute_iterator != pipeline_config_ptr->attributes.cend(); - ++attribute_iterator) - { - anvil_assert(attribute_iterator->location != in_location); - } - - /* If an explicit binding has been requested for the new attribute, we need to make sure that any user-specified attributes - * that refer to this binding specify the same stride and input rate. */ - if (in_explicit_binding_index != UINT32_MAX) - { - for (auto attribute_iterator = pipeline_config_ptr->attributes.cbegin(); - attribute_iterator != pipeline_config_ptr->attributes.cend(); - ++attribute_iterator) - { - const auto& current_attribute = *attribute_iterator; - - if (current_attribute.explicit_binding_index == in_explicit_binding_index) - { - anvil_assert(current_attribute.rate == in_rate); - anvil_assert(current_attribute.stride_in_bytes == in_stride_in_bytes); - } - } - } - } - #endif - - /* Add a new vertex attribute descriptor. At this point, we do not differentiate between - * attributes and bindings. Actual Vulkan attributes and bindings will be created at baking - * time. */ - pipeline_config_ptr->attributes.push_back(InternalVertexAttribute(in_explicit_binding_index, - in_format, - in_location, - in_offset_in_bytes, - in_rate, - in_stride_in_bytes)); - - /* All done */ - result = true; -end: - return result; -} - /* Please see header for specification */ bool Anvil::GraphicsPipelineManager::bake() { - std::vector color_blend_attachment_states_vk_cache; - std::vector color_blend_state_create_info_items_vk_cache; - std::vector depth_stencil_state_create_info_items_vk_cache; - std::vector dynamic_state_create_info_items_vk_cache; - std::vector enabled_dynamic_states_vk_cache; - std::vector graphics_pipeline_create_info_items_vk_cache; - std::vector input_assembly_state_create_info_items_vk_cache; - std::vector multisample_state_create_info_items_vk_cache; - uint32_t n_consumed_graphics_pipelines = 0; - std::vector raster_state_create_info_items_vk_cache; - bool result = false; - std::vector result_graphics_pipelines; - VkResult result_vk; - std::vector scissor_boxes_vk_cache; - std::vector shader_stage_create_info_items_vk_cache; - std::vector specialization_info_vk_cache; - std::vector specialization_map_entry_vk_cache; - std::vector tessellation_state_create_info_items_vk_cache; - std::vector vertex_input_state_create_info_items_vk_cache; - std::vector viewport_state_create_info_items_vk_cache; - std::vector viewports_vk_cache; - - std::shared_ptr device_locked_ptr(m_device_ptr); - - static const VkPipelineRasterizationStateRasterizationOrderAMD relaxed_rasterization_order_item = - { - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, - nullptr, - VK_RASTERIZATION_ORDER_RELAXED_AMD - }; - static const VkPipelineRasterizationStateRasterizationOrderAMD strict_rasterization_order_item = - { - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, - nullptr, - VK_RASTERIZATION_ORDER_STRICT_AMD - }; - - /* Iterate over all stored pipelines, focus on the ones marked as dirty. */ - typedef struct _bake_item + typedef struct BakeItem { - PipelineID pipeline_id; - std::shared_ptr pipeline_ptr; + PipelineID pipeline_id; + Pipeline* pipeline_ptr; - _bake_item(PipelineID in_pipeline_id, - std::shared_ptr in_pipeline_ptr) + BakeItem(PipelineID in_pipeline_id, + Pipeline* in_pipeline_ptr) { pipeline_id = in_pipeline_id; pipeline_ptr = in_pipeline_ptr; } - bool operator==(std::shared_ptr in_pipeline_ptr) + bool operator==(PipelineID in_pipeline_id) { - return pipeline_ptr == in_pipeline_ptr; + return pipeline_id == in_pipeline_id; } - } _bake_item; - - std::vector<_bake_item> bake_items; - - - for (auto pipeline_iterator = m_pipelines.begin(); - pipeline_iterator != m_pipelines.end(); + } BakeItem; + + std::vector bake_items; + auto color_blend_state_create_info_chain_cache = std::vector > > (); + auto depth_stencil_state_create_info_chain_cache = std::vector > > (); + auto dynamic_state_create_info_chain_cache = std::vector > > (); + auto graphics_pipeline_create_info_chains = Anvil::StructChainVector (); + auto input_assembly_state_create_info_chain_cache = std::vector > >(); + auto multisample_state_create_info_chain_cache = std::vector > > (); + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + uint32_t n_consumed_graphics_pipelines = 0; + auto raster_state_create_info_chain_cache = std::vector > >(); + bool result = false; + std::vector result_graphics_pipelines; + VkResult result_vk; + auto shader_stage_create_info_chain_ptrs = std::vector > >(); + auto tessellation_state_create_info_chain_cache = std::vector > >(); + auto vertex_input_state_create_info_chain_cache = std::vector > > (); + auto viewport_state_create_info_chain_cache = std::vector > > (); + + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } + + for (auto pipeline_iterator = m_outstanding_pipelines.begin(); + pipeline_iterator != m_outstanding_pipelines.end(); ++pipeline_iterator) { - if ( pipeline_iterator->second->dirty && - !pipeline_iterator->second->is_proxy && - pipeline_iterator->second->is_bakeable) + if (pipeline_iterator->second->layout_ptr == nullptr) { - if (pipeline_iterator->second->layout_dirty) - { - pipeline_iterator->second->layout_ptr = get_pipeline_layout(pipeline_iterator->first); - } + get_pipeline_layout(pipeline_iterator->first); - bake_items.push_back(_bake_item(pipeline_iterator->first, - pipeline_iterator->second) ); + anvil_assert(pipeline_iterator->second->layout_ptr != nullptr); } + + bake_items.push_back( + BakeItem(pipeline_iterator->first, + pipeline_iterator->second.get() ) + ); } - color_blend_attachment_states_vk_cache.reserve (N_CACHE_ITEMS); - color_blend_state_create_info_items_vk_cache.reserve (N_CACHE_ITEMS); - depth_stencil_state_create_info_items_vk_cache.reserve (N_CACHE_ITEMS); - dynamic_state_create_info_items_vk_cache.reserve (N_CACHE_ITEMS); - enabled_dynamic_states_vk_cache.reserve (N_CACHE_ITEMS); - graphics_pipeline_create_info_items_vk_cache.reserve (N_CACHE_ITEMS); - input_assembly_state_create_info_items_vk_cache.reserve(N_CACHE_ITEMS); - multisample_state_create_info_items_vk_cache.reserve (N_CACHE_ITEMS); - raster_state_create_info_items_vk_cache.reserve (N_CACHE_ITEMS); - scissor_boxes_vk_cache.reserve (N_CACHE_ITEMS); - shader_stage_create_info_items_vk_cache.reserve (N_CACHE_ITEMS); - specialization_info_vk_cache.reserve (N_CACHE_ITEMS); - specialization_map_entry_vk_cache.reserve (N_CACHE_ITEMS); - tessellation_state_create_info_items_vk_cache.reserve (N_CACHE_ITEMS); - vertex_input_state_create_info_items_vk_cache.reserve (N_CACHE_ITEMS); - viewport_state_create_info_items_vk_cache.reserve (N_CACHE_ITEMS); - viewports_vk_cache.reserve (N_CACHE_ITEMS); + if (bake_items.size() == 0) + { + result = true; + + goto end; + } for (auto bake_item_iterator = bake_items.begin(); bake_item_iterator != bake_items.end(); ++bake_item_iterator) { - bool color_blend_state_used = false; - std::shared_ptr current_pipeline_config_ptr = nullptr; - const GraphicsPipelineID current_pipeline_id = bake_item_iterator->pipeline_id; - std::shared_ptr current_pipeline_ptr = bake_item_iterator->pipeline_ptr; - bool depth_stencil_state_used = false; - bool dynamic_state_used = false; - VkGraphicsPipelineCreateInfo graphics_pipeline_create_info = {}; - VkPipelineInputAssemblyStateCreateInfo input_assembly_state_create_info; - bool multisample_state_used = false; - VkPipelineRasterizationStateCreateInfo raster_state_create_info; - uint32_t shader_stage_start_offset = UINT32_MAX; - uint32_t subpass_n_color_attachments = 0; - bool tessellation_state_used = false; - VkPipelineVertexInputStateCreateInfo vertex_input_state_create_info; - bool viewport_state_used = false; - - if (!current_pipeline_ptr->dirty) + bool color_blend_state_used = false; + auto current_pipeline_create_info_ptr = dynamic_cast(bake_item_iterator->pipeline_ptr->pipeline_create_info_ptr.get() ); + Pipeline* current_pipeline_ptr = bake_item_iterator->pipeline_ptr; + const Anvil::RenderPass* current_pipeline_renderpass_ptr = nullptr; + Anvil::SubPassID current_pipeline_subpass_id; + bool depth_stencil_state_used = false; + bool dynamic_state_used = false; + Anvil::StructChainer graphics_pipeline_create_info_chainer; + bool is_dynamic_scissor_state_enabled = false; + bool is_dynamic_viewport_state_enabled = false; + bool multisample_state_used = false; + uint32_t n_shader_stages_used = 0; + bool tessellation_state_used = false; + bool viewport_state_used = false; + + if (current_pipeline_create_info_ptr == nullptr) { + anvil_assert(current_pipeline_create_info_ptr != nullptr); + continue; } - if (m_pipeline_configurations.find(current_pipeline_id) == m_pipeline_configurations.end() ) - { - anvil_assert_fail(); + current_pipeline_renderpass_ptr = current_pipeline_create_info_ptr->get_renderpass(); + current_pipeline_subpass_id = current_pipeline_create_info_ptr->get_subpass_id(); - goto end; - } - else + if (current_pipeline_create_info_ptr->is_proxy() ) { - current_pipeline_config_ptr = m_pipeline_configurations[current_pipeline_id]; + continue; } - /* Extract subpass information */ - current_pipeline_config_ptr->renderpass_ptr->get_subpass_n_attachments(current_pipeline_config_ptr->subpass_id, - ATTACHMENT_TYPE_COLOR, - &subpass_n_color_attachments); - /* Form the color blend state create info descriptor, if needed */ - if (!current_pipeline_config_ptr->rasterizer_discard_enabled && - subpass_n_color_attachments > 0) { - VkPipelineColorBlendStateCreateInfo color_blend_state_create_info; - const uint32_t start_offset = static_cast(color_blend_attachment_states_vk_cache.size() ); - - color_blend_state_create_info.attachmentCount = subpass_n_color_attachments; - color_blend_state_create_info.flags = 0; - color_blend_state_create_info.logicOp = current_pipeline_config_ptr->logic_op; - color_blend_state_create_info.logicOpEnable = static_cast((current_pipeline_config_ptr->logic_op_enabled) ? VK_TRUE : VK_FALSE); - color_blend_state_create_info.pAttachments = nullptr; - color_blend_state_create_info.pNext = nullptr; - color_blend_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; - - memcpy(color_blend_state_create_info.blendConstants, - current_pipeline_config_ptr->blend_constant, - sizeof(color_blend_state_create_info.blendConstants) ); + auto color_blend_state_create_info_chain_ptr = bake_pipeline_color_blend_state_create_info(current_pipeline_create_info_ptr, + current_pipeline_renderpass_ptr, + current_pipeline_subpass_id); - for (uint32_t n_subpass_color_attachment = 0; - n_subpass_color_attachment < subpass_n_color_attachments; - ++n_subpass_color_attachment) + if (color_blend_state_create_info_chain_ptr != nullptr) { - VkPipelineColorBlendAttachmentState blend_attachment_state; - BlendingProperties* current_attachment_blending_props_ptr = nullptr; - - if (current_pipeline_config_ptr->subpass_attachment_blending_properties.find(n_subpass_color_attachment) == current_pipeline_config_ptr->subpass_attachment_blending_properties.end() ) - { - /* The user has not defined blending properties for current color attachment. Use default state values .. */ - current_attachment_blending_props_ptr = ¤t_pipeline_config_ptr->subpass_attachment_blending_properties[n_subpass_color_attachment]; - - current_attachment_blending_props_ptr->blend_enabled = false; - current_attachment_blending_props_ptr->blend_op_alpha = VK_BLEND_OP_ADD; - current_attachment_blending_props_ptr->blend_op_color = VK_BLEND_OP_ADD; - current_attachment_blending_props_ptr->channel_write_mask = VK_COLOR_COMPONENT_A_BIT | - VK_COLOR_COMPONENT_B_BIT | - VK_COLOR_COMPONENT_G_BIT | - VK_COLOR_COMPONENT_R_BIT; - current_attachment_blending_props_ptr->dst_alpha_blend_factor = VK_BLEND_FACTOR_ONE; - current_attachment_blending_props_ptr->dst_color_blend_factor = VK_BLEND_FACTOR_ONE; - current_attachment_blending_props_ptr->src_alpha_blend_factor = VK_BLEND_FACTOR_ONE; - current_attachment_blending_props_ptr->src_color_blend_factor = VK_BLEND_FACTOR_ONE; - } - else - { - current_attachment_blending_props_ptr = ¤t_pipeline_config_ptr->subpass_attachment_blending_properties[n_subpass_color_attachment]; - } - - #ifdef _DEBUG - { - if (n_subpass_color_attachment > 0) - { - const BlendingProperties& zero_attachment_blending_props = current_pipeline_config_ptr->subpass_attachment_blending_properties[0]; - - anvil_assert(zero_attachment_blending_props == *current_attachment_blending_props_ptr); - } - } - #endif /* _DEBUG */ + color_blend_state_create_info_chain_cache.push_back(std::move(color_blend_state_create_info_chain_ptr) ); - /* Convert internal descriptor to Vulkan descriptor & stash it in the cache vector. */ - blend_attachment_state = current_attachment_blending_props_ptr->get_vk_descriptor(); - - color_blend_attachment_states_vk_cache.push_back(blend_attachment_state); + color_blend_state_used = true; } + else + { + /* No color attachments available. Make sure none of the dependent modes are enabled. */ + bool logic_op_enabled = false; - color_blend_state_create_info.pAttachments = (subpass_n_color_attachments > 0) ? &color_blend_attachment_states_vk_cache[start_offset] - : nullptr; - color_blend_state_used = true; + current_pipeline_create_info_ptr->get_logic_op_state(&logic_op_enabled, + nullptr); /* out_opt_logic_op_ptr */ - color_blend_state_create_info_items_vk_cache.push_back(color_blend_state_create_info); - } - else - { - /* No color attachments available. Make sure none of the dependent modes are enabled. */ - anvil_assert(!current_pipeline_config_ptr->logic_op_enabled); + anvil_assert(!logic_op_enabled); - color_blend_state_used = false; + color_blend_state_used = false; + } } /* Form the depth stencil state create info descriptor, if needed */ - uint32_t n_depth_stencil_attachments = 0; - - current_pipeline_config_ptr->renderpass_ptr->get_subpass_n_attachments(current_pipeline_config_ptr->subpass_id, - ATTACHMENT_TYPE_DEPTH_STENCIL, - &n_depth_stencil_attachments); - - if (n_depth_stencil_attachments) - { - VkPipelineDepthStencilStateCreateInfo depth_stencil_state_create_info; - - depth_stencil_state_create_info.back = current_pipeline_config_ptr->stencil_state_back_face; - depth_stencil_state_create_info.depthBoundsTestEnable = static_cast(current_pipeline_config_ptr->depth_bounds_test_enabled ? VK_TRUE : VK_FALSE); - depth_stencil_state_create_info.depthCompareOp = current_pipeline_config_ptr->depth_test_compare_op; - depth_stencil_state_create_info.depthTestEnable = static_cast(current_pipeline_config_ptr->depth_test_enabled ? VK_TRUE : VK_FALSE); - depth_stencil_state_create_info.depthWriteEnable = static_cast(current_pipeline_config_ptr->depth_writes_enabled ? VK_TRUE : VK_FALSE); - depth_stencil_state_create_info.flags = 0; - depth_stencil_state_create_info.front = current_pipeline_config_ptr->stencil_state_front_face; - depth_stencil_state_create_info.maxDepthBounds = current_pipeline_config_ptr->max_depth_bounds; - depth_stencil_state_create_info.minDepthBounds = current_pipeline_config_ptr->min_depth_bounds; - depth_stencil_state_create_info.pNext = nullptr; - depth_stencil_state_create_info.stencilTestEnable = static_cast(current_pipeline_config_ptr->stencil_test_enabled ? VK_TRUE : VK_FALSE); - depth_stencil_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; - - depth_stencil_state_used = true; - - depth_stencil_state_create_info_items_vk_cache.push_back(depth_stencil_state_create_info); - } - else { - /* No depth/stencil attachment available. Make sure none of the dependent modes are enabled. */ - anvil_assert(!current_pipeline_config_ptr->depth_bounds_test_enabled); - anvil_assert(!current_pipeline_config_ptr->depth_test_enabled); - anvil_assert(!current_pipeline_config_ptr->depth_writes_enabled); - anvil_assert(!current_pipeline_config_ptr->stencil_test_enabled); - - depth_stencil_state_used = false; - } - - /* Form the dynamic state create info descriptor, if needed */ - if (current_pipeline_config_ptr->enabled_dynamic_states != 0) - { - VkPipelineDynamicStateCreateInfo dynamic_state_create_info; - const uint32_t start_offset = static_cast(enabled_dynamic_states_vk_cache.size() ); - - if ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_BLEND_CONSTANTS_BIT) != 0) - { - enabled_dynamic_states_vk_cache.push_back(VK_DYNAMIC_STATE_BLEND_CONSTANTS); - } + auto depth_stencil_state_create_info_chain_ptr = bake_pipeline_depth_stencil_state_create_info(current_pipeline_create_info_ptr, + current_pipeline_renderpass_ptr); - if ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_DEPTH_BIAS_BIT) != 0) + if (depth_stencil_state_create_info_chain_ptr != nullptr) { - enabled_dynamic_states_vk_cache.push_back(VK_DYNAMIC_STATE_DEPTH_BIAS); - } - - if ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_DEPTH_BOUNDS_BIT) != 0) - { - enabled_dynamic_states_vk_cache.push_back(VK_DYNAMIC_STATE_DEPTH_BOUNDS); - } + depth_stencil_state_create_info_chain_cache.push_back(std::move(depth_stencil_state_create_info_chain_ptr) ); - if ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_LINE_WIDTH_BIT) != 0) - { - enabled_dynamic_states_vk_cache.push_back(VK_DYNAMIC_STATE_LINE_WIDTH); + depth_stencil_state_used = true; } - - if ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_SCISSOR_BIT) != 0) + else { - enabled_dynamic_states_vk_cache.push_back(VK_DYNAMIC_STATE_SCISSOR); + depth_stencil_state_used = false; } + } - if ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_STENCIL_COMPARE_MASK_BIT) != 0) - { - enabled_dynamic_states_vk_cache.push_back(VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK); - } + /* Form the dynamic state create info descriptor, if needed */ + { + auto dynamic_state_create_info_ptr = bake_pipeline_dynamic_state_create_info(current_pipeline_create_info_ptr); - if ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_STENCIL_REFERENCE_BIT) != 0) - { - enabled_dynamic_states_vk_cache.push_back(VK_DYNAMIC_STATE_STENCIL_REFERENCE); - } + dynamic_state_used = false; + is_dynamic_scissor_state_enabled = false; + is_dynamic_viewport_state_enabled = false; - if ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_STENCIL_WRITE_MASK_BIT) != 0) + if (dynamic_state_create_info_ptr != nullptr) { - enabled_dynamic_states_vk_cache.push_back(VK_DYNAMIC_STATE_STENCIL_WRITE_MASK); - } + const Anvil::DynamicState* enabled_dynamic_states_ptr = nullptr; + uint32_t n_enabled_dynamic_states = 0; - if ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_VIEWPORT_BIT) != 0) - { - enabled_dynamic_states_vk_cache.push_back(VK_DYNAMIC_STATE_VIEWPORT); - } + current_pipeline_create_info_ptr->get_enabled_dynamic_states(&enabled_dynamic_states_ptr, + &n_enabled_dynamic_states); - dynamic_state_create_info.dynamicStateCount = static_cast(enabled_dynamic_states_vk_cache.size() - start_offset); - dynamic_state_create_info.flags = 0; - dynamic_state_create_info.pDynamicStates = (dynamic_state_create_info.dynamicStateCount > 0) ? &enabled_dynamic_states_vk_cache[start_offset] - : VK_NULL_HANDLE; - dynamic_state_create_info.pNext = nullptr; - dynamic_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + for (uint32_t n_dynamic_state = 0; + n_dynamic_state < n_enabled_dynamic_states; + ++n_dynamic_state) + { + if (enabled_dynamic_states_ptr[n_dynamic_state] == Anvil::DynamicState::SCISSOR) + { + is_dynamic_scissor_state_enabled = true; + } + else + if (enabled_dynamic_states_ptr[n_dynamic_state] == Anvil::DynamicState::VIEWPORT) + { + is_dynamic_viewport_state_enabled = true; + } + } - dynamic_state_used = true; + dynamic_state_used = true; - dynamic_state_create_info_items_vk_cache.push_back(dynamic_state_create_info); - } - else - { - dynamic_state_used = false; + dynamic_state_create_info_chain_cache.push_back(std::move(dynamic_state_create_info_ptr) ); + } } /* Form the input assembly create info descriptor */ - input_assembly_state_create_info.flags = 0; - input_assembly_state_create_info.pNext = nullptr; - input_assembly_state_create_info.primitiveRestartEnable = static_cast(current_pipeline_config_ptr->primitive_restart_enabled ? VK_TRUE : VK_FALSE); - input_assembly_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; - input_assembly_state_create_info.topology = current_pipeline_config_ptr->primitive_topology; - - input_assembly_state_create_info_items_vk_cache.push_back(input_assembly_state_create_info); - - /* Form the multisample state create info descriptor, if needed */ - if (!current_pipeline_config_ptr->rasterizer_discard_enabled) { - VkPipelineMultisampleStateCreateInfo multisample_state_create_info; - - /* If sample mask is not enabled, Vulkan spec will assume all samples need to pass. This is what the default sample mask value mimics. - * - * Hence, if the application specified a non-~0u sample mask and has NOT enabled the sample mask using toggle_sample_mask(), it's (in - * all likelihood) a trivial app-side issue. - */ - anvil_assert((!current_pipeline_config_ptr->sample_mask_enabled && current_pipeline_config_ptr->sample_mask == ~0u) || - current_pipeline_config_ptr->sample_mask_enabled); - - multisample_state_create_info.alphaToCoverageEnable = static_cast(current_pipeline_config_ptr->alpha_to_coverage_enabled ? VK_TRUE : VK_FALSE); - multisample_state_create_info.alphaToOneEnable = static_cast(current_pipeline_config_ptr->alpha_to_one_enabled ? VK_TRUE : VK_FALSE); - multisample_state_create_info.flags = 0; - multisample_state_create_info.minSampleShading = current_pipeline_config_ptr->min_sample_shading; - multisample_state_create_info.pNext = nullptr; - multisample_state_create_info.pSampleMask = (current_pipeline_config_ptr->sample_mask_enabled) ? ¤t_pipeline_config_ptr->sample_mask : nullptr; - multisample_state_create_info.rasterizationSamples = static_cast(current_pipeline_config_ptr->sample_count); - multisample_state_create_info.sampleShadingEnable = static_cast(current_pipeline_config_ptr->sample_shading_enabled ? VK_TRUE : VK_FALSE); - multisample_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; - - multisample_state_used = true; + auto input_assembly_create_info_ptr = bake_pipeline_input_assembly_state_create_info(current_pipeline_create_info_ptr); - multisample_state_create_info_items_vk_cache.push_back(multisample_state_create_info); - } - else - { - /* Make sure any dependent modes are disabled */ - anvil_assert(!current_pipeline_config_ptr->alpha_to_coverage_enabled); - anvil_assert(!current_pipeline_config_ptr->alpha_to_one_enabled); - anvil_assert(!current_pipeline_config_ptr->sample_shading_enabled); + anvil_assert(input_assembly_create_info_ptr != nullptr); - multisample_state_used = false; + input_assembly_state_create_info_chain_cache.push_back(std::move(input_assembly_create_info_ptr) ); } - /* Form the raster state create info descriptor */ - raster_state_create_info.cullMode = current_pipeline_config_ptr->cull_mode; - raster_state_create_info.depthBiasClamp = current_pipeline_config_ptr->depth_bias_clamp; - raster_state_create_info.depthBiasConstantFactor = current_pipeline_config_ptr->depth_bias_constant_factor; - raster_state_create_info.depthBiasEnable = static_cast(current_pipeline_config_ptr->depth_bias_enabled ? VK_TRUE : VK_FALSE); - raster_state_create_info.depthBiasSlopeFactor = current_pipeline_config_ptr->depth_bias_slope_factor; - raster_state_create_info.depthClampEnable = static_cast(current_pipeline_config_ptr->depth_clamp_enabled ? VK_TRUE : VK_FALSE); - raster_state_create_info.flags = 0; - raster_state_create_info.frontFace = current_pipeline_config_ptr->front_face; - raster_state_create_info.lineWidth = current_pipeline_config_ptr->line_width; - raster_state_create_info.pNext = nullptr; - raster_state_create_info.polygonMode = current_pipeline_config_ptr->polygon_mode; - raster_state_create_info.rasterizerDiscardEnable = static_cast(current_pipeline_config_ptr->rasterizer_discard_enabled ? VK_TRUE : VK_FALSE); - raster_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; - - if (device_locked_ptr->is_extension_enabled(VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME) ) + /* Form the multisample state create info descriptor, if needed */ { - /* Chain a predefined struct which will toggle the relaxed rasterization, as long as the device supports the - * VK_AMD_rasterization_order extension. - */ - const VkPipelineRasterizationStateRasterizationOrderAMD* chained_item_ptr = nullptr; + auto multisample_state_create_info_ptr = bake_pipeline_multisample_state_create_info(current_pipeline_create_info_ptr); - if (current_pipeline_config_ptr->rasterization_order != VK_RASTERIZATION_ORDER_STRICT_AMD) + if (multisample_state_create_info_ptr != nullptr) { - anvil_assert(current_pipeline_config_ptr->rasterization_order == relaxed_rasterization_order_item.rasterizationOrder); - - chained_item_ptr = &relaxed_rasterization_order_item; + multisample_state_used = true; } else { - chained_item_ptr = &strict_rasterization_order_item; + multisample_state_used = false; } - raster_state_create_info.pNext = chained_item_ptr; + multisample_state_create_info_chain_cache.push_back(std::move(multisample_state_create_info_ptr) ); } - else - if (current_pipeline_config_ptr->rasterization_order != VK_RASTERIZATION_ORDER_STRICT_AMD) + + /* Form the raster state create info chain */ { - fprintf(stderr, - "[!] Cannot enable out-of-order rasterization - VK_AMD_rasterization_order extension not enabled at device creation time"); - } + auto raster_state_create_info_ptr = bake_pipeline_rasterization_state_create_info(current_pipeline_create_info_ptr); - raster_state_create_info_items_vk_cache.push_back(raster_state_create_info); + anvil_assert(raster_state_create_info_ptr != nullptr); - /* Form stage descriptors */ - shader_stage_start_offset = static_cast(shader_stage_create_info_items_vk_cache.size() ); + raster_state_create_info_chain_cache.push_back(std::move(raster_state_create_info_ptr) ); + } - for (uint32_t n_shader = 0; - n_shader < GRAPHICS_SHADER_STAGE_COUNT; - ++n_shader) + /* Form stage descriptors */ { - if (current_pipeline_ptr->shader_stages[n_shader].name.size() != 0) - { - VkPipelineShaderStageCreateInfo current_shader_stage_create_info; - std::shared_ptr shader_module_ptr; + auto shader_stage_create_info_chain_vec_ptr = bake_pipeline_shader_stage_create_info_chain_vector(current_pipeline_create_info_ptr); + anvil_assert(shader_stage_create_info_chain_vec_ptr != nullptr); - if (current_pipeline_ptr->specialization_constants_map[n_shader].size() > 0) - { - bake_specialization_info_vk(current_pipeline_ptr->specialization_constants_map [n_shader], - ¤t_pipeline_ptr->specialization_constant_data_buffer[0], - &specialization_map_entry_vk_cache, - &specialization_info_vk_cache[n_shader]); - } + n_shader_stages_used = shader_stage_create_info_chain_vec_ptr->get_n_structs(); - shader_module_ptr = current_pipeline_ptr->shader_stages[n_shader].shader_module_ptr; - - current_shader_stage_create_info.module = shader_module_ptr->get_module(); - current_shader_stage_create_info.pName = (n_shader == GRAPHICS_SHADER_STAGE_FRAGMENT) ? shader_module_ptr->get_fs_entrypoint_name().c_str() - : (n_shader == GRAPHICS_SHADER_STAGE_GEOMETRY) ? shader_module_ptr->get_gs_entrypoint_name().c_str() - : (n_shader == GRAPHICS_SHADER_STAGE_TESSELLATION_CONTROL) ? shader_module_ptr->get_tc_entrypoint_name().c_str() - : (n_shader == GRAPHICS_SHADER_STAGE_TESSELLATION_EVALUATION) ? shader_module_ptr->get_te_entrypoint_name().c_str() - : (n_shader == GRAPHICS_SHADER_STAGE_VERTEX) ? shader_module_ptr->get_vs_entrypoint_name().c_str() - : nullptr; - - current_shader_stage_create_info.flags = 0; - current_shader_stage_create_info.pNext = nullptr; - current_shader_stage_create_info.pSpecializationInfo = (current_pipeline_ptr->specialization_constants_map[n_shader].size() > 0) ? &specialization_info_vk_cache[n_shader] - : VK_NULL_HANDLE; - current_shader_stage_create_info.stage = (n_shader == GRAPHICS_SHADER_STAGE_FRAGMENT) ? VK_SHADER_STAGE_FRAGMENT_BIT - : (n_shader == GRAPHICS_SHADER_STAGE_GEOMETRY) ? VK_SHADER_STAGE_GEOMETRY_BIT - : (n_shader == GRAPHICS_SHADER_STAGE_TESSELLATION_CONTROL) ? VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT - : (n_shader == GRAPHICS_SHADER_STAGE_TESSELLATION_EVALUATION) ? VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT - : VK_SHADER_STAGE_VERTEX_BIT; - current_shader_stage_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; - - shader_stage_create_info_items_vk_cache.push_back(current_shader_stage_create_info); - } + shader_stage_create_info_chain_ptrs.push_back(std::move(shader_stage_create_info_chain_vec_ptr) ); } /* Form the tessellation state create info descriptor if needed */ - if (current_pipeline_ptr->shader_stages[GRAPHICS_SHADER_STAGE_TESSELLATION_CONTROL].name.size() != 0 || - current_pipeline_ptr->shader_stages[GRAPHICS_SHADER_STAGE_TESSELLATION_EVALUATION].name.size() != 0) { - VkPipelineTessellationStateCreateInfo tessellation_state_create_info; - - tessellation_state_create_info.flags = 0; - tessellation_state_create_info.patchControlPoints = current_pipeline_config_ptr->n_patch_control_points; - tessellation_state_create_info.pNext = nullptr; - tessellation_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO; + auto tessellation_state_create_info_ptr = bake_pipeline_tessellation_state_create_info(current_pipeline_create_info_ptr); - tessellation_state_used = true; + if (tessellation_state_create_info_ptr != nullptr) + { + tessellation_state_create_info_chain_cache.push_back(std::move(tessellation_state_create_info_ptr) ); - tessellation_state_create_info_items_vk_cache.push_back(tessellation_state_create_info); - } - else - { - tessellation_state_used = false; + tessellation_state_used = true; + } + else + { + tessellation_state_used = false; + } } /* Form the vertex input state create info descriptor */ - bake_vk_attributes_and_bindings(current_pipeline_config_ptr); - - vertex_input_state_create_info.vertexAttributeDescriptionCount = static_cast(current_pipeline_config_ptr->vk_input_attributes.size()); - vertex_input_state_create_info.vertexBindingDescriptionCount = static_cast(current_pipeline_config_ptr->vk_input_bindings.size() ); + { + auto vertex_input_state_create_info_ptr = bake_pipeline_vertex_input_state_create_info(current_pipeline_create_info_ptr); - vertex_input_state_create_info.flags = 0; - vertex_input_state_create_info.pNext = nullptr; - vertex_input_state_create_info.pVertexAttributeDescriptions = (vertex_input_state_create_info.vertexAttributeDescriptionCount > 0) ? ¤t_pipeline_config_ptr->vk_input_attributes.at(0) - : nullptr; - vertex_input_state_create_info.pVertexBindingDescriptions = (vertex_input_state_create_info.vertexBindingDescriptionCount > 0) ? ¤t_pipeline_config_ptr->vk_input_bindings.at(0) - : nullptr; - vertex_input_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + anvil_assert(vertex_input_state_create_info_ptr != nullptr); - vertex_input_state_create_info_items_vk_cache.push_back(vertex_input_state_create_info); + vertex_input_state_create_info_chain_cache.push_back(std::move(vertex_input_state_create_info_ptr) ); + } /* Form the viewport state create info descriptor, if needed */ - if (!current_pipeline_config_ptr->rasterizer_discard_enabled) { - const uint32_t scissor_boxes_start_offset = static_cast(scissor_boxes_vk_cache.size() ); - const uint32_t viewports_start_offset = static_cast(viewports_vk_cache.size() ); - VkPipelineViewportStateCreateInfo viewport_state_create_info; - - #ifdef _DEBUG - { - const uint32_t n_scissor_boxes = (current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_SCISSOR_BIT) ? current_pipeline_config_ptr->n_dynamic_scissor_boxes - : static_cast(current_pipeline_config_ptr->scissor_boxes.size() ); - const uint32_t n_viewports = (current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_VIEWPORT_BIT) ? current_pipeline_config_ptr->n_dynamic_viewports - : static_cast(current_pipeline_config_ptr->viewports.size() ); + auto viewport_state_create_info_ptr = bake_pipeline_viewport_state_create_info(current_pipeline_create_info_ptr, + is_dynamic_scissor_state_enabled, + is_dynamic_viewport_state_enabled); - anvil_assert(n_scissor_boxes == n_viewports); - } - #endif /* _DEBUG */ - - if (current_pipeline_config_ptr->scissor_boxes.size() == 0) + if (viewport_state_create_info_ptr != nullptr) { - /* No scissor boxes / viewport defined. Use default settings.. */ - InternalScissorBox default_scissor_box; - InternalViewport default_viewport; - std::shared_ptr swapchain_ptr = current_pipeline_config_ptr->renderpass_ptr->get_swapchain(); - uint32_t window_size[2] = {0}; - - /* NOTE: If you hit this assertion, you either need to pass a Swapchain instance when this renderpass is being created, - * *or* specify scissor & viewport information for the GFX pipeline. - */ - anvil_assert(swapchain_ptr != nullptr); - - swapchain_ptr->get_image(0)->get_image_mipmap_size(0, /* n_mipmap */ - window_size + 0, - window_size + 1, - nullptr); /* out_opt_depth_ptr */ - - anvil_assert(window_size[0] != 0 && - window_size[1] != 0); - - default_scissor_box.height = window_size[1]; - default_scissor_box.width = window_size[0]; - default_scissor_box.x = 0; - default_scissor_box.y = 0; - - default_viewport.height = float(window_size[1]); - default_viewport.max_depth = 1.0f; - default_viewport.min_depth = 0.0f; - default_viewport.origin_x = 0.0f; - default_viewport.origin_y = 0.0f; - default_viewport.width = float(window_size[0]); - - current_pipeline_config_ptr->scissor_boxes[0] = default_scissor_box; - current_pipeline_config_ptr->viewports [0] = default_viewport; - } + viewport_state_create_info_chain_cache.push_back(std::move(viewport_state_create_info_ptr) ); - /* Convert internal scissor box & viewport representations to Vulkan descriptors */ - for (auto scissor_box_iterator = current_pipeline_config_ptr->scissor_boxes.cbegin(); - scissor_box_iterator != current_pipeline_config_ptr->scissor_boxes.cend(); - ++scissor_box_iterator) - { - scissor_boxes_vk_cache.push_back(scissor_box_iterator->second.get_vk_descriptor() ); + viewport_state_used = true; } - - for (auto viewport_iterator = current_pipeline_config_ptr->viewports.cbegin(); - viewport_iterator != current_pipeline_config_ptr->viewports.cend(); - ++viewport_iterator) + else { - viewports_vk_cache.push_back(viewport_iterator->second.get_vk_descriptor() ); + viewport_state_used = false; } - - /* Bake the descriptor */ - viewport_state_create_info.flags = 0; - viewport_state_create_info.pNext = nullptr; - viewport_state_create_info.pScissors = ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_SCISSOR_BIT) != 0) ? VK_NULL_HANDLE - : &scissor_boxes_vk_cache[scissor_boxes_start_offset]; - viewport_state_create_info.pViewports = ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_VIEWPORT_BIT) != 0) ? VK_NULL_HANDLE - : &viewports_vk_cache [viewports_start_offset]; - viewport_state_create_info.scissorCount = ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_SCISSOR_BIT) != 0) ? current_pipeline_config_ptr->n_dynamic_scissor_boxes - : static_cast(current_pipeline_config_ptr->scissor_boxes.size() ); - viewport_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; - viewport_state_create_info.viewportCount = ((current_pipeline_config_ptr->enabled_dynamic_states & DYNAMIC_STATE_VIEWPORT_BIT) != 0) ? current_pipeline_config_ptr->n_dynamic_viewports - : static_cast(current_pipeline_config_ptr->viewports.size() ); - - anvil_assert(viewport_state_create_info.scissorCount == viewport_state_create_info.viewportCount); - anvil_assert(viewport_state_create_info.scissorCount != 0); - - viewport_state_used = true; - - viewport_state_create_info_items_vk_cache.push_back(viewport_state_create_info); - } - else - { - viewport_state_used = false; } - /* Configure base pipeline handle/indices fields of the create info descriptor */ - if (current_pipeline_ptr->base_pipeline_ptr != nullptr) + /* Bake the GFX pipeline info struct chain */ { - anvil_assert(current_pipeline_ptr->base_pipeline == VK_NULL_HANDLE); - - /* There are three cases we need to handle separately here: - * - * 1. The base pipeline is to be baked in the call we're preparing for. Determine - * its index in the create info descriptor array and use it. - * 2. The pipeline has been baked earlier. We should be able to work around this - * by providing a handle to the pipeline, instead of the index. - * 3. The pipeline under specified index uses a different layout. This indicates - * a bug in the app or the manager. - * - * NOTE: A slightly adjusted version of this code is re-used in ComputePipelineManager::bake() - */ - auto base_bake_item_iterator = std::find(bake_items.begin(), - bake_items.end(), - current_pipeline_ptr->base_pipeline_ptr); + VkPipeline base_pipeline_handle = VK_NULL_HANDLE; + int32_t base_pipeline_index = UINT32_MAX; + const auto current_pipeline_base_pipeline_id = current_pipeline_create_info_ptr->get_base_pipeline_id(); - if (base_bake_item_iterator != bake_items.end() ) + if (current_pipeline_base_pipeline_id != UINT32_MAX) { - /* Case 1 */ - graphics_pipeline_create_info.basePipelineHandle = current_pipeline_ptr->base_pipeline; - graphics_pipeline_create_info.basePipelineIndex = static_cast(base_bake_item_iterator - bake_items.begin() ); - } - else - if (current_pipeline_ptr->base_pipeline_ptr != nullptr && - current_pipeline_ptr->base_pipeline_ptr->baked_pipeline != VK_NULL_HANDLE) - { - /* Case 2 */ - graphics_pipeline_create_info.basePipelineHandle = current_pipeline_ptr->base_pipeline_ptr->baked_pipeline; - graphics_pipeline_create_info.basePipelineIndex = -1; /* unused. */ + /* There are three cases we need to handle separately here: + * + * 1. The base pipeline is to be baked in the call we're preparing for. Determine + * its index in the create info descriptor array and use it. + * 2. The pipeline has been baked earlier. Pass the baked pipeline's handle. + * 3. The pipeline under specified index uses a different layout. This indicates + * a bug in the app or the manager. + * + * NOTE: A slightly adjusted version of this code is re-used in ComputePipelineManager::bake() + */ + auto base_bake_item_iterator = std::find(bake_items.begin(), + bake_items.end(), + current_pipeline_base_pipeline_id); + + if (base_bake_item_iterator != bake_items.end() ) + { + /* Case 1 */ + base_pipeline_index = static_cast(base_bake_item_iterator - bake_items.begin() ); + } + else + { + auto baked_pipeline_iterator = m_baked_pipelines.find(current_pipeline_base_pipeline_id); + + if (baked_pipeline_iterator != m_baked_pipelines.end() && + baked_pipeline_iterator->second->baked_pipeline != VK_NULL_HANDLE) + { + /* Case 2 */ + base_pipeline_handle = baked_pipeline_iterator->second->baked_pipeline; + } + else + { + /* Case 3 */ + anvil_assert_fail(); + } + } } else { - /* Case 3 */ - anvil_assert_fail(); + /* No base pipeline requested */ } - } - else - if (current_pipeline_ptr->base_pipeline != VK_NULL_HANDLE) - { - graphics_pipeline_create_info.basePipelineHandle = current_pipeline_ptr->base_pipeline; - graphics_pipeline_create_info.basePipelineIndex = -1; /* unused. */ - } - else - { - /* No base pipeline requested */ - graphics_pipeline_create_info.basePipelineHandle = VK_NULL_HANDLE; - graphics_pipeline_create_info.basePipelineIndex = -1; /* unused */ - } - /* Form the rest of the create info descriptor */ - anvil_assert(!current_pipeline_ptr->layout_dirty); - - graphics_pipeline_create_info.flags = 0; - graphics_pipeline_create_info.layout = current_pipeline_ptr->layout_ptr->get_pipeline_layout(); - graphics_pipeline_create_info.pColorBlendState = (color_blend_state_used) ? &color_blend_state_create_info_items_vk_cache[color_blend_state_create_info_items_vk_cache.size() - 1] - : VK_NULL_HANDLE; - graphics_pipeline_create_info.pDepthStencilState = (depth_stencil_state_used) ? &depth_stencil_state_create_info_items_vk_cache[depth_stencil_state_create_info_items_vk_cache.size() - 1] - : VK_NULL_HANDLE; - graphics_pipeline_create_info.pDynamicState = (dynamic_state_used) ? &dynamic_state_create_info_items_vk_cache[dynamic_state_create_info_items_vk_cache.size() - 1] - : VK_NULL_HANDLE; - graphics_pipeline_create_info.pInputAssemblyState = &input_assembly_state_create_info_items_vk_cache[input_assembly_state_create_info_items_vk_cache.size() - 1]; - graphics_pipeline_create_info.pMultisampleState = (multisample_state_used) ? &multisample_state_create_info_items_vk_cache[multisample_state_create_info_items_vk_cache.size() - 1] - : VK_NULL_HANDLE; - graphics_pipeline_create_info.pNext = nullptr; - graphics_pipeline_create_info.pRasterizationState = &raster_state_create_info_items_vk_cache[raster_state_create_info_items_vk_cache.size() - 1]; - graphics_pipeline_create_info.pStages = &shader_stage_create_info_items_vk_cache[shader_stage_start_offset]; - graphics_pipeline_create_info.pTessellationState = (tessellation_state_used) ? &tessellation_state_create_info_items_vk_cache[tessellation_state_create_info_items_vk_cache.size() - 1] - : VK_NULL_HANDLE; - graphics_pipeline_create_info.pVertexInputState = &vertex_input_state_create_info_items_vk_cache[vertex_input_state_create_info_items_vk_cache.size() - 1]; - graphics_pipeline_create_info.pViewportState = (viewport_state_used) ? &viewport_state_create_info_items_vk_cache[viewport_state_create_info_items_vk_cache.size() - 1] - : VK_NULL_HANDLE; - graphics_pipeline_create_info.renderPass = current_pipeline_config_ptr->renderpass_ptr->get_render_pass(); - graphics_pipeline_create_info.stageCount = static_cast(shader_stage_create_info_items_vk_cache.size() - shader_stage_start_offset); - graphics_pipeline_create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; - graphics_pipeline_create_info.subpass = current_pipeline_config_ptr->subpass_id; - - if (graphics_pipeline_create_info.basePipelineHandle != VK_NULL_HANDLE || - graphics_pipeline_create_info.basePipelineIndex != -1) - { - graphics_pipeline_create_info.flags |= VK_PIPELINE_CREATE_DERIVATIVE_BIT; + auto result_ptr = bake_graphics_pipeline_create_info(current_pipeline_create_info_ptr, + current_pipeline_ptr->layout_ptr.get(), + base_pipeline_handle, + base_pipeline_index, + (color_blend_state_used) ? color_blend_state_create_info_chain_cache.back()->get_root_struct() + : nullptr, + (depth_stencil_state_used) ? depth_stencil_state_create_info_chain_cache.back()->get_root_struct() + : nullptr, + (dynamic_state_used) ? dynamic_state_create_info_chain_cache.back()->get_root_struct() + : nullptr, + input_assembly_state_create_info_chain_cache.back()->get_root_struct(), + (multisample_state_used) ? multisample_state_create_info_chain_cache.back()->get_root_struct() + : nullptr, + raster_state_create_info_chain_cache.back()->root_struct_ptr, + n_shader_stages_used, + (n_shader_stages_used > 0) ? shader_stage_create_info_chain_ptrs.back()->get_root_structs() + : nullptr, + (tessellation_state_used) ? tessellation_state_create_info_chain_cache.back()->get_root_struct() + : nullptr, + vertex_input_state_create_info_chain_cache.back()->get_root_struct(), + (viewport_state_used) ? viewport_state_create_info_chain_cache.back()->get_root_struct() + : nullptr); + + /* Stash the descriptor for now. We will issue one expensive vkCreateGraphicsPipelines() call after all pipeline objects + * are iterated over. */ + graphics_pipeline_create_info_chains.append_struct_chain(std::move(result_ptr) ); } - - graphics_pipeline_create_info.flags |= ((current_pipeline_ptr->allow_derivatives) ? VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT : 0) | - ((current_pipeline_ptr->disable_optimizations) ? VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT : 0); - - if (current_pipeline_config_ptr->pre_bake_function != nullptr) - { - current_pipeline_config_ptr->pre_bake_function(m_device_ptr, - current_pipeline_id, - &graphics_pipeline_create_info); - } - - /* Stash the descriptor for now. We will issue one expensive vkCreateGraphicsPipelines() call after all pipeline objects - * are iterated over. */ - graphics_pipeline_create_info_items_vk_cache.push_back(graphics_pipeline_create_info); - } - - #ifdef _DEBUG - { - /* If you hit any of the below assertion failures, please bump up the value under the #define. - * - * We use the vectors to hold structure instances. If more than N_CACHE_ITEMS items is ever requested from any of these vectors, - * the underlying storage will likely be reallocated. Many of the descriptors use pointers to refer to children descriptors, - * and when the data is moved around, the pointers are no longer valid. - */ - anvil_assert(color_blend_attachment_states_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(color_blend_state_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(depth_stencil_state_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(dynamic_state_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(enabled_dynamic_states_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(graphics_pipeline_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(input_assembly_state_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(multisample_state_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(raster_state_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(scissor_boxes_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(shader_stage_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(specialization_info_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(specialization_map_entry_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(tessellation_state_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(vertex_input_state_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(viewport_state_create_info_items_vk_cache.size() <= N_CACHE_ITEMS); - anvil_assert(viewports_vk_cache.size() <= N_CACHE_ITEMS); - } - #endif + } /* All right. Try to bake all pipeline objects at once */ result_graphics_pipelines.resize(bake_items.size() ); - result_vk = vkCreateGraphicsPipelines(device_locked_ptr->get_device_vk(), - m_pipeline_cache_ptr->get_pipeline_cache(), - static_cast(graphics_pipeline_create_info_items_vk_cache.size() ), - &graphics_pipeline_create_info_items_vk_cache[0], - nullptr, /* pAllocator */ - &result_graphics_pipelines[0]); + m_pipeline_cache_ptr->lock(); + { + result_vk = Anvil::Vulkan::vkCreateGraphicsPipelines(m_device_ptr->get_device_vk(), + m_pipeline_cache_ptr->get_pipeline_cache(), + graphics_pipeline_create_info_chains.get_n_structs (), + graphics_pipeline_create_info_chains.get_root_structs(), + nullptr, /* pAllocator */ + &result_graphics_pipelines[0]); + } + m_pipeline_cache_ptr->unlock(); if (!is_vk_call_successful(result_vk) ) { @@ -1026,2341 +437,1069 @@ bool Anvil::GraphicsPipelineManager::bake() goto end; } - for (auto bake_item_iterator = bake_items.begin(); - bake_item_iterator != bake_items.end(); - ++bake_item_iterator) - { - if (!bake_item_iterator->pipeline_ptr->dirty) - { - continue; - } - - if (m_pipeline_configurations[bake_item_iterator->pipeline_id]->post_bake_function != nullptr) - { - m_pipeline_configurations[bake_item_iterator->pipeline_id]->post_bake_function(m_device_ptr, - bake_item_iterator->pipeline_id); - } - } - /* Distribute the result pipeline objects to pipeline configuration descriptors */ - for (auto pipeline_iterator = m_pipelines.begin(); - pipeline_iterator != m_pipelines.end(); + for (auto pipeline_iterator = m_outstanding_pipelines.begin(); + pipeline_iterator != m_outstanding_pipelines.end(); ++pipeline_iterator) { - const GraphicsPipelineID current_pipeline_id = pipeline_iterator->first; - std::shared_ptr current_pipeline_ptr = pipeline_iterator->second; + const PipelineID& current_pipeline_id = pipeline_iterator->first; - if (!current_pipeline_ptr->dirty || - current_pipeline_ptr->is_proxy || - !current_pipeline_ptr->is_bakeable) - { - continue; - } + anvil_assert(m_baked_pipelines.find(current_pipeline_id) == m_baked_pipelines.end() ); - m_pipelines[current_pipeline_id]->baked_pipeline = result_graphics_pipelines[n_consumed_graphics_pipelines++]; - current_pipeline_ptr->dirty = false; + pipeline_iterator->second->baked_pipeline = result_graphics_pipelines[n_consumed_graphics_pipelines++]; + m_baked_pipelines[current_pipeline_id] = std::move(pipeline_iterator->second); } anvil_assert(n_consumed_graphics_pipelines == static_cast(result_graphics_pipelines.size() )); + m_outstanding_pipelines.clear(); + /* All done */ result = true; end: return result; } -/** Converts the internal vertex attribute descriptors to one or more VkVertexInputAttributeDescription & VkVertexInputBindingDescription - * structures. - * - * The function goes an extra mile to re-use the same vertex binding for attributes whose binding properties match. - * - * @param pipeline_config_ptr Pipeline configuration descriptor to use. Must not be nullptr. - */ -void Anvil::GraphicsPipelineManager::bake_vk_attributes_and_bindings(std::shared_ptr inout_pipeline_config_ptr) +/* Please see header for specification */ +Anvil::StructChainUniquePtr Anvil::GraphicsPipelineManager::bake_graphics_pipeline_create_info(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr, + const Anvil::PipelineLayout* in_pipeline_layout_ptr, + const VkPipeline& in_opt_base_pipeline_handle, + const int32_t& in_opt_base_pipeline_index, + const VkPipelineColorBlendStateCreateInfo* in_opt_color_blend_state_create_info_ptr, + const VkPipelineDepthStencilStateCreateInfo* in_opt_depth_stencil_state_create_info_ptr, + const VkPipelineDynamicStateCreateInfo* in_opt_dynamic_state_create_info_ptr, + const VkPipelineInputAssemblyStateCreateInfo* in_input_assembly_state_create_info_ptr, + const VkPipelineMultisampleStateCreateInfo* in_opt_multisample_state_create_info_ptr, + const VkPipelineRasterizationStateCreateInfo* in_rasterization_state_create_info_ptr, + const uint32_t& in_n_shader_stage_create_info_items, + const VkPipelineShaderStageCreateInfo* in_shader_stage_create_info_items_ptr, + const VkPipelineTessellationStateCreateInfo* in_opt_tessellation_state_create_info_ptr, + const VkPipelineVertexInputStateCreateInfo* in_vertex_input_state_create_info_ptr, + const VkPipelineViewportStateCreateInfo* in_opt_viewport_state_create_info_ptr) const +{ + Anvil::StructChainer chainer; + + { + VkGraphicsPipelineCreateInfo create_info; + + create_info.basePipelineHandle = in_opt_base_pipeline_handle; + create_info.basePipelineIndex = in_opt_base_pipeline_index; + create_info.flags = 0; + create_info.layout = in_pipeline_layout_ptr->get_pipeline_layout(); + create_info.pColorBlendState = in_opt_color_blend_state_create_info_ptr; + create_info.pDepthStencilState = in_opt_depth_stencil_state_create_info_ptr; + create_info.pDynamicState = in_opt_dynamic_state_create_info_ptr; + create_info.pInputAssemblyState = in_input_assembly_state_create_info_ptr; + create_info.pMultisampleState = in_opt_multisample_state_create_info_ptr; + create_info.pNext = nullptr; + create_info.pRasterizationState = in_rasterization_state_create_info_ptr; + create_info.pStages = in_shader_stage_create_info_items_ptr; + create_info.pTessellationState = in_opt_tessellation_state_create_info_ptr; + create_info.pVertexInputState = in_vertex_input_state_create_info_ptr; + create_info.pViewportState = in_opt_viewport_state_create_info_ptr; + create_info.renderPass = in_gfx_pipeline_create_info_ptr->get_renderpass()->get_render_pass(); + create_info.stageCount = in_n_shader_stage_create_info_items; + create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + create_info.subpass = in_gfx_pipeline_create_info_ptr->get_subpass_id(); + + if (create_info.basePipelineIndex != static_cast(UINT32_MAX) ) + { + create_info.flags |= VK_PIPELINE_CREATE_DERIVATIVE_BIT; + } + + create_info.flags |= ((in_gfx_pipeline_create_info_ptr->allows_derivatives () ) ? VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT : 0) | + ((in_gfx_pipeline_create_info_ptr->has_optimizations_disabled() ) ? VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT : 0); + + chainer.append_struct(create_info); + } + + return chainer.create_chain(); +} + +/* Please see header for specification */ +Anvil::StructChainUniquePtr Anvil::GraphicsPipelineManager::bake_pipeline_color_blend_state_create_info(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr, + const Anvil::RenderPass* in_current_renderpass_ptr, + const Anvil::SubPassID& in_subpass_id) const { - inout_pipeline_config_ptr->attribute_location_to_binding_index_map.clear(); - inout_pipeline_config_ptr->vk_input_attributes.clear(); - inout_pipeline_config_ptr->vk_input_bindings.clear(); + Anvil::StructChainUniquePtr result_chain_ptr; + uint32_t subpass_n_color_attachments = 0; + + in_current_renderpass_ptr->get_render_pass_create_info()->get_subpass_n_attachments(in_subpass_id, + Anvil::AttachmentType::COLOR, + &subpass_n_color_attachments); - for (auto attribute_iterator = inout_pipeline_config_ptr->attributes.cbegin(); - attribute_iterator != inout_pipeline_config_ptr->attributes.cend(); - ++attribute_iterator) + if (!in_gfx_pipeline_create_info_ptr->is_rasterizer_discard_enabled() && + subpass_n_color_attachments > 0) { - /* Identify the binding index we should use for the attribute. - * - * If an explicit binding has been specified by the application, this step can be skipped */ - const auto& current_attribute = *attribute_iterator; - VkVertexInputAttributeDescription current_attribute_vk; - uint32_t n_attribute_binding = UINT32_MAX; - bool has_found = false; + const float* blend_constant_ptr = nullptr; + Anvil::StructChainer color_blend_state_create_info_chainer; + Anvil::StructID color_blend_state_create_info_struct_id; + Anvil::LogicOp logic_op = Anvil::LogicOp::UNKNOWN; + bool logic_op_enabled = false; + uint32_t max_location_index = UINT32_MAX; - if (current_attribute.explicit_binding_index == UINT32_MAX) - { - for (auto vk_input_binding_iterator = inout_pipeline_config_ptr->vk_input_bindings.begin(); - vk_input_binding_iterator != inout_pipeline_config_ptr->vk_input_bindings.end(); - ++vk_input_binding_iterator) - { - if (vk_input_binding_iterator->inputRate == attribute_iterator->rate && - vk_input_binding_iterator->stride == attribute_iterator->stride_in_bytes) - { - has_found = true; - n_attribute_binding = static_cast(vk_input_binding_iterator - inout_pipeline_config_ptr->vk_input_bindings.begin()); + max_location_index = in_current_renderpass_ptr->get_render_pass_create_info()->get_max_color_location_used_by_subpass(in_subpass_id); - break; - } - } - } - else - { - has_found = false; - n_attribute_binding = current_attribute.explicit_binding_index; - } + in_gfx_pipeline_create_info_ptr->get_blending_properties(&blend_constant_ptr, + nullptr); /* out_opt_n_blend_attachments_ptr */ + + in_gfx_pipeline_create_info_ptr->get_logic_op_state(&logic_op_enabled, + &logic_op); - if (!has_found) { - /* Got to create a new binding descriptor .. */ - VkVertexInputBindingDescription new_binding_vk; + VkPipelineColorBlendStateCreateInfo color_blend_state_create_info; - new_binding_vk.binding = (current_attribute.explicit_binding_index == UINT32_MAX) ? static_cast(inout_pipeline_config_ptr->vk_input_bindings.size() ) - : current_attribute.explicit_binding_index; - new_binding_vk.inputRate = attribute_iterator->rate; - new_binding_vk.stride = attribute_iterator->stride_in_bytes; + color_blend_state_create_info.attachmentCount = max_location_index + 1; + color_blend_state_create_info.flags = 0; + color_blend_state_create_info.logicOp = static_cast(logic_op); + color_blend_state_create_info.logicOpEnable = (logic_op_enabled) ? VK_TRUE : VK_FALSE; + color_blend_state_create_info.pAttachments = nullptr; /* will be patched by the chainer */ + color_blend_state_create_info.pNext = nullptr; + color_blend_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; - inout_pipeline_config_ptr->vk_input_bindings.push_back(new_binding_vk); + memcpy(color_blend_state_create_info.blendConstants, + blend_constant_ptr, + sizeof(color_blend_state_create_info.blendConstants) ); - n_attribute_binding = new_binding_vk.binding; + color_blend_state_create_info_struct_id = color_blend_state_create_info_chainer.append_struct(color_blend_state_create_info); } - /* Good to convert the internal attribute descriptor to the Vulkan's input attribute descriptor */ - current_attribute_vk.binding = n_attribute_binding; - current_attribute_vk.format = attribute_iterator->format; - current_attribute_vk.location = attribute_iterator->location; - current_attribute_vk.offset = attribute_iterator->offset_in_bytes; + anvil_assert(subpass_n_color_attachments <= in_current_renderpass_ptr->get_render_pass_create_info()->get_n_attachments() ); + + { + std::vector color_blend_attachment_state_vec(max_location_index + 1); + + for (uint32_t n_subpass_color_attachment = 0; + n_subpass_color_attachment <= max_location_index; + ++n_subpass_color_attachment) + { + VkPipelineColorBlendAttachmentState* blend_state_ptr = &color_blend_attachment_state_vec.at(n_subpass_color_attachment); + Anvil::ImageLayout dummy = Anvil::ImageLayout::UNKNOWN; + bool is_blending_enabled_for_attachment = false; + Anvil::RenderPassAttachmentID rp_attachment_id = UINT32_MAX; + + Anvil::BlendOp alpha_blend_op = Anvil::BlendOp::UNKNOWN; + Anvil::BlendOp color_blend_op = Anvil::BlendOp::UNKNOWN; + Anvil::ColorComponentFlags color_component_flags = Anvil::ColorComponentFlagBits::NONE; + Anvil::BlendFactor dst_alpha_blend_factor = Anvil::BlendFactor::UNKNOWN; + Anvil::BlendFactor dst_color_blend_factor = Anvil::BlendFactor::UNKNOWN; + Anvil::BlendFactor src_alpha_blend_factor = Anvil::BlendFactor::UNKNOWN; + Anvil::BlendFactor src_color_blend_factor = Anvil::BlendFactor::UNKNOWN; + + if (!in_current_renderpass_ptr->get_render_pass_create_info()->get_subpass_attachment_properties(in_subpass_id, + Anvil::AttachmentType::COLOR, + n_subpass_color_attachment, + &rp_attachment_id, + &dummy) || /* out_layout_ptr */ + !in_gfx_pipeline_create_info_ptr->get_color_blend_attachment_properties (rp_attachment_id, + &is_blending_enabled_for_attachment, + &color_blend_op, + &alpha_blend_op, + &src_color_blend_factor, + &dst_color_blend_factor, + &src_alpha_blend_factor, + &dst_alpha_blend_factor, + &color_component_flags) ) + { + /* The user has not defined blending properties for current color attachment. Use default state values .. */ + blend_state_ptr->blendEnable = VK_FALSE; + blend_state_ptr->alphaBlendOp = VK_BLEND_OP_ADD; + blend_state_ptr->colorBlendOp = VK_BLEND_OP_ADD; + blend_state_ptr->colorWriteMask = VK_COLOR_COMPONENT_A_BIT | + VK_COLOR_COMPONENT_B_BIT | + VK_COLOR_COMPONENT_G_BIT | + VK_COLOR_COMPONENT_R_BIT; + blend_state_ptr->dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE; + blend_state_ptr->dstColorBlendFactor = VK_BLEND_FACTOR_ONE; + blend_state_ptr->srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE; + blend_state_ptr->srcColorBlendFactor = VK_BLEND_FACTOR_ONE; + } + else + { + blend_state_ptr->alphaBlendOp = static_cast(alpha_blend_op); + blend_state_ptr->blendEnable = (is_blending_enabled_for_attachment) ? VK_TRUE : VK_FALSE; + blend_state_ptr->colorBlendOp = static_cast (color_blend_op); + blend_state_ptr->colorWriteMask = color_component_flags.get_vk(); + blend_state_ptr->dstAlphaBlendFactor = static_cast(dst_alpha_blend_factor); + blend_state_ptr->dstColorBlendFactor = static_cast(dst_color_blend_factor); + blend_state_ptr->srcAlphaBlendFactor = static_cast(src_alpha_blend_factor); + blend_state_ptr->srcColorBlendFactor = static_cast(src_color_blend_factor); + } + } - /* Associate attribute locations with assigned bindings */ - anvil_assert(inout_pipeline_config_ptr->attribute_location_to_binding_index_map.find(attribute_iterator->location) == inout_pipeline_config_ptr->attribute_location_to_binding_index_map.end() ); - inout_pipeline_config_ptr->attribute_location_to_binding_index_map[attribute_iterator->location] = current_attribute_vk.binding; + color_blend_state_create_info_chainer.store_helper_structure_vector(color_blend_attachment_state_vec, + color_blend_state_create_info_struct_id, + offsetof(VkPipelineColorBlendStateCreateInfo, pAttachments) ); + } - /* Cache the descriptor */ - inout_pipeline_config_ptr->vk_input_attributes.push_back(current_attribute_vk); + result_chain_ptr = color_blend_state_create_info_chainer.create_chain(); } + + return result_chain_ptr; } /* Please see header for specification */ -std::shared_ptr Anvil::GraphicsPipelineManager::create(std::weak_ptr in_device_ptr, - bool in_use_pipeline_cache, - std::shared_ptr in_pipeline_cache_to_reuse_ptr) -{ - std::shared_ptr result_ptr; - - result_ptr.reset( - new Anvil::GraphicsPipelineManager(in_device_ptr, - in_use_pipeline_cache, - in_pipeline_cache_to_reuse_ptr) - ); +Anvil::StructChainUniquePtr Anvil::GraphicsPipelineManager::bake_pipeline_depth_stencil_state_create_info(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr, + const Anvil::RenderPass* in_current_renderpass_ptr) const +{ + VkPipelineDepthStencilStateCreateInfo depth_stencil_state_create_info; + auto depth_test_compare_op = Anvil::CompareOp::UNKNOWN; + bool is_depth_bounds_test_enabled = false; + bool is_depth_test_enabled = false; + bool is_stencil_test_enabled = false; + float max_depth_bounds = std::numeric_limits::max(); + float min_depth_bounds = std::numeric_limits::max(); + uint32_t n_depth_stencil_attachments = 0; + Anvil::StructChainUniquePtr result_ptr; + + in_gfx_pipeline_create_info_ptr->get_depth_bounds_state(&is_depth_bounds_test_enabled, + &min_depth_bounds, + &max_depth_bounds); + + in_gfx_pipeline_create_info_ptr->get_depth_test_state(&is_depth_test_enabled, + &depth_test_compare_op); + + { + Anvil::CompareOp back_compare_op = Anvil::CompareOp::UNKNOWN; + Anvil::StencilOp back_depth_fail_op = Anvil::StencilOp::UNKNOWN; + Anvil::StencilOp back_fail_op = Anvil::StencilOp::UNKNOWN; + Anvil::StencilOp back_pass_op = Anvil::StencilOp::UNKNOWN; + Anvil::CompareOp front_compare_op = Anvil::CompareOp::UNKNOWN; + Anvil::StencilOp front_depth_fail_op = Anvil::StencilOp::UNKNOWN; + Anvil::StencilOp front_fail_op = Anvil::StencilOp::UNKNOWN; + Anvil::StencilOp front_pass_op = Anvil::StencilOp::UNKNOWN; + + in_gfx_pipeline_create_info_ptr->get_stencil_test_properties(&is_stencil_test_enabled, + &front_fail_op, + &front_pass_op, + &front_depth_fail_op, + &front_compare_op, + &depth_stencil_state_create_info.front.compareMask, + &depth_stencil_state_create_info.front.writeMask, + &depth_stencil_state_create_info.front.reference, + &back_fail_op, + &back_pass_op, + &back_depth_fail_op, + &back_compare_op, + &depth_stencil_state_create_info.back.compareMask, + &depth_stencil_state_create_info.back.writeMask, + &depth_stencil_state_create_info.back.reference); + + depth_stencil_state_create_info.back.compareOp = static_cast(back_compare_op); + depth_stencil_state_create_info.back.depthFailOp = static_cast(back_depth_fail_op); + depth_stencil_state_create_info.back.failOp = static_cast(back_fail_op); + depth_stencil_state_create_info.back.passOp = static_cast(back_pass_op); + depth_stencil_state_create_info.front.compareOp = static_cast(front_compare_op); + depth_stencil_state_create_info.front.depthFailOp = static_cast(front_depth_fail_op); + depth_stencil_state_create_info.front.failOp = static_cast(front_fail_op); + depth_stencil_state_create_info.front.passOp = static_cast(front_pass_op); + } + + in_current_renderpass_ptr->get_render_pass_create_info()->get_subpass_n_attachments(in_gfx_pipeline_create_info_ptr->get_subpass_id(), + Anvil::AttachmentType::DEPTH_STENCIL, + &n_depth_stencil_attachments); + + if (n_depth_stencil_attachments) + { + Anvil::StructChainer depth_stencil_state_create_info_chainer; + + depth_stencil_state_create_info.depthBoundsTestEnable = is_depth_bounds_test_enabled ? VK_TRUE : VK_FALSE; + depth_stencil_state_create_info.depthCompareOp = static_cast(depth_test_compare_op); + depth_stencil_state_create_info.depthTestEnable = is_depth_test_enabled ? VK_TRUE : VK_FALSE; + depth_stencil_state_create_info.depthWriteEnable = in_gfx_pipeline_create_info_ptr->are_depth_writes_enabled() ? VK_TRUE : VK_FALSE; + depth_stencil_state_create_info.flags = 0; + depth_stencil_state_create_info.maxDepthBounds = max_depth_bounds; + depth_stencil_state_create_info.minDepthBounds = min_depth_bounds; + depth_stencil_state_create_info.pNext = nullptr; + depth_stencil_state_create_info.stencilTestEnable = is_stencil_test_enabled ? VK_TRUE : VK_FALSE; + depth_stencil_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; + + depth_stencil_state_create_info_chainer.append_struct(depth_stencil_state_create_info); + + result_ptr = depth_stencil_state_create_info_chainer.create_chain(); + } + else + { + /* No depth/stencil attachment available. Make sure none of the dependent modes are enabled. */ + anvil_assert(!is_depth_bounds_test_enabled); + anvil_assert(!is_depth_test_enabled); + anvil_assert(!in_gfx_pipeline_create_info_ptr->are_depth_writes_enabled()); + anvil_assert(!is_stencil_test_enabled); + } return result_ptr; } /* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::delete_pipeline(GraphicsPipelineID in_graphics_pipeline_id) +Anvil::StructChainUniquePtr Anvil::GraphicsPipelineManager::bake_pipeline_dynamic_state_create_info(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const { - GraphicsPipelineConfigurations::iterator configuration_iterator; - bool result = false; + const Anvil::DynamicState* enabled_dynamic_states_ptr = nullptr; + uint32_t n_enabled_dynamic_states = 0; + Anvil::StructChainUniquePtr result_ptr; - result = BasePipelineManager::delete_pipeline(in_graphics_pipeline_id); + in_gfx_pipeline_create_info_ptr->get_enabled_dynamic_states(&enabled_dynamic_states_ptr, + &n_enabled_dynamic_states); - if (!result) + if (n_enabled_dynamic_states != 0) { - goto end; - } + Anvil::StructChainer dynamic_state_create_info_chainer; + VkPipelineDynamicStateCreateInfo dynamic_state_create_info; - configuration_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); + dynamic_state_create_info.dynamicStateCount = n_enabled_dynamic_states; + dynamic_state_create_info.flags = 0; + dynamic_state_create_info.pDynamicStates = (dynamic_state_create_info.dynamicStateCount > 0) ? reinterpret_cast(enabled_dynamic_states_ptr) + : VK_NULL_HANDLE; + dynamic_state_create_info.pNext = nullptr; + dynamic_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; - if (configuration_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(configuration_iterator == m_pipeline_configurations.end()) ); + dynamic_state_create_info_chainer.append_struct(dynamic_state_create_info); - goto end; - } - else - { - m_pipeline_configurations.erase(configuration_iterator); + result_ptr = dynamic_state_create_info_chainer.create_chain(); } - result = true; -end: - return result; + return result_ptr; } -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_alpha_to_coverage_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const +/* Please see header for specification */ +Anvil::StructChainUniquePtr Anvil::GraphicsPipelineManager::bake_pipeline_input_assembly_state_create_info(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const { - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); + Anvil::StructChainer input_assembly_state_create_info_chainer; + VkPipelineInputAssemblyStateCreateInfo input_assembly_state_create_info; - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + input_assembly_state_create_info.flags = 0; + input_assembly_state_create_info.pNext = nullptr; + input_assembly_state_create_info.primitiveRestartEnable = in_gfx_pipeline_create_info_ptr->is_primitive_restart_enabled() ? VK_TRUE : VK_FALSE; + input_assembly_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + input_assembly_state_create_info.topology = static_cast(in_gfx_pipeline_create_info_ptr->get_primitive_topology() ); - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->alpha_to_coverage_enabled; - } + input_assembly_state_create_info_chainer.append_struct(input_assembly_state_create_info); - result = true; -end: - return result; + return input_assembly_state_create_info_chainer.create_chain(); } -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_alpha_to_one_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const +/* Please see header for specification */ +Anvil::StructChainUniquePtr Anvil::GraphicsPipelineManager::bake_pipeline_multisample_state_create_info(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const { - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; + bool is_sample_shading_enabled = false; + float min_sample_shading = std::numeric_limits::max(); + Anvil::StructChainUniquePtr result_ptr; + + in_gfx_pipeline_create_info_ptr->get_sample_shading_state(&is_sample_shading_enabled, + &min_sample_shading); - if (pipeline_config_iterator == m_pipeline_configurations.end() ) + if (!in_gfx_pipeline_create_info_ptr->is_rasterizer_discard_enabled() ) { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); + bool are_custom_sample_locations_enabled = false; + VkExtent2D custom_sample_location_grid_size = {0, 0}; + Anvil::SampleCountFlagBits custom_sample_locations_per_pixel = Anvil::SampleCountFlagBits::NONE; + const Anvil::SampleLocation* custom_sample_locations_ptr = nullptr; + Anvil::StructChainer chainer; + const bool is_sample_mask_enabled = in_gfx_pipeline_create_info_ptr->is_sample_mask_enabled(); + uint32_t n_custom_sample_locations = 0; - goto end; + Anvil::SampleCountFlagBits sample_count = static_cast(0); + const VkSampleMask* sample_mask_ptr = nullptr; + + in_gfx_pipeline_create_info_ptr->get_multisampling_properties(&sample_count, + &sample_mask_ptr); + in_gfx_pipeline_create_info_ptr->get_sample_location_state (&are_custom_sample_locations_enabled, + &custom_sample_locations_per_pixel, + &custom_sample_location_grid_size, + &n_custom_sample_locations, + &custom_sample_locations_ptr); + + /* If sample mask is not enabled, Vulkan spec will assume all samples need to pass. This is what the default sample mask value mimics. + * + * Hence, if the application specified a non-~0u sample mask and has NOT enabled the sample mask using toggle_sample_mask(), it's (in + * all likelihood) a trivial app-side issue. + */ + anvil_assert((!is_sample_mask_enabled && *sample_mask_ptr == ~0u) || + is_sample_mask_enabled); + + { + VkPipelineMultisampleStateCreateInfo multisample_state_create_info; + + multisample_state_create_info.alphaToCoverageEnable = in_gfx_pipeline_create_info_ptr->is_alpha_to_coverage_enabled() ? VK_TRUE : VK_FALSE; + multisample_state_create_info.alphaToOneEnable = in_gfx_pipeline_create_info_ptr->is_alpha_to_one_enabled() ? VK_TRUE : VK_FALSE; + multisample_state_create_info.flags = 0; + multisample_state_create_info.minSampleShading = min_sample_shading; + multisample_state_create_info.pNext = nullptr; + multisample_state_create_info.pSampleMask = (is_sample_mask_enabled) ? sample_mask_ptr : nullptr; + multisample_state_create_info.rasterizationSamples = static_cast(sample_count); + multisample_state_create_info.sampleShadingEnable = is_sample_shading_enabled ? VK_TRUE : VK_FALSE; + multisample_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; + + chainer.append_struct(multisample_state_create_info); + } + + if (are_custom_sample_locations_enabled) + { + VkPipelineSampleLocationsStateCreateInfoEXT psl_state_create_info; + VkSampleLocationsInfoEXT sample_locations_info; + + sample_locations_info.pNext = nullptr; + sample_locations_info.pSampleLocations = reinterpret_cast(custom_sample_locations_ptr); + sample_locations_info.sampleLocationGridSize = custom_sample_location_grid_size; + sample_locations_info.sampleLocationsCount = n_custom_sample_locations; + sample_locations_info.sampleLocationsPerPixel = static_cast(custom_sample_locations_per_pixel); + sample_locations_info.sType = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT; + + psl_state_create_info.pNext = nullptr; + psl_state_create_info.sampleLocationsEnable = VK_TRUE; + psl_state_create_info.sampleLocationsInfo = sample_locations_info; + psl_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT; + + anvil_assert(m_device_ptr->get_extension_info()->ext_sample_locations() ); + + chainer.append_struct(psl_state_create_info); + } + + result_ptr = chainer.create_chain(); } else { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->alpha_to_one_enabled; + /* Make sure any dependent modes are disabled */ + anvil_assert(!in_gfx_pipeline_create_info_ptr->is_alpha_to_coverage_enabled() ); + anvil_assert(!in_gfx_pipeline_create_info_ptr->is_alpha_to_one_enabled () ); + anvil_assert(!is_sample_shading_enabled); } - result = true; -end: - return result; + return result_ptr; } -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_blending_properties(GraphicsPipelineID in_graphics_pipeline_id, - float* out_opt_blend_constant_vec4_ptr, - uint32_t* out_opt_n_blend_attachments_ptr) const +Anvil::StructChainUniquePtr Anvil::GraphicsPipelineManager::bake_pipeline_rasterization_state_create_info(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const { - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; + Anvil::CullModeFlags cull_mode = Anvil::CullModeFlagBits::NONE; + float depth_bias_clamp = std::numeric_limits::max(); + float depth_bias_constant_factor = std::numeric_limits::max(); + float depth_bias_slope_factor = std::numeric_limits::max(); + Anvil::FrontFace front_face = Anvil::FrontFace::UNKNOWN; + bool is_depth_bias_enabled = false; + float line_width = std::numeric_limits::max(); + Anvil::PolygonMode polygon_mode = Anvil::PolygonMode::UNKNOWN; + Anvil::StructChainer raster_state_create_info_chainer; - if (pipeline_config_iterator == m_pipeline_configurations.end() ) + static const VkPipelineRasterizationStateRasterizationOrderAMD relaxed_rasterization_order_item = { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + nullptr, + VK_RASTERIZATION_ORDER_RELAXED_AMD + }; + static const VkPipelineRasterizationStateRasterizationOrderAMD strict_rasterization_order_item = { - pipeline_config_ptr = pipeline_config_iterator->second; - } + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + nullptr, + VK_RASTERIZATION_ORDER_STRICT_AMD + }; - if (out_opt_blend_constant_vec4_ptr != nullptr) - { - memcpy(out_opt_blend_constant_vec4_ptr, - pipeline_config_ptr->blend_constant, - sizeof(pipeline_config_ptr->blend_constant) ); - } - if (out_opt_n_blend_attachments_ptr != nullptr) + in_gfx_pipeline_create_info_ptr->get_depth_bias_state (&is_depth_bias_enabled, + &depth_bias_constant_factor, + &depth_bias_clamp, + &depth_bias_slope_factor); + in_gfx_pipeline_create_info_ptr->get_rasterization_properties(&polygon_mode, + &cull_mode, + &front_face, + &line_width); + { - *out_opt_n_blend_attachments_ptr = static_cast(pipeline_config_ptr->subpass_attachment_blending_properties.size() ); - } + VkPipelineRasterizationStateCreateInfo raster_state_create_info; - result = true; -end: - return result; -} + raster_state_create_info.cullMode = cull_mode.get_vk(); + raster_state_create_info.depthBiasClamp = depth_bias_clamp; + raster_state_create_info.depthBiasConstantFactor = depth_bias_constant_factor; + raster_state_create_info.depthBiasEnable = is_depth_bias_enabled ? VK_TRUE : VK_FALSE; + raster_state_create_info.depthBiasSlopeFactor = depth_bias_slope_factor; + raster_state_create_info.depthClampEnable = in_gfx_pipeline_create_info_ptr->is_depth_clamp_enabled() ? VK_TRUE : VK_FALSE; + raster_state_create_info.flags = 0; + raster_state_create_info.frontFace = static_cast(front_face); + raster_state_create_info.lineWidth = line_width; + raster_state_create_info.pNext = nullptr; + raster_state_create_info.polygonMode = static_cast(polygon_mode); + raster_state_create_info.rasterizerDiscardEnable = in_gfx_pipeline_create_info_ptr->is_rasterizer_discard_enabled() ? VK_TRUE : VK_FALSE; + raster_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_color_blend_attachment_properties(GraphicsPipelineID in_graphics_pipeline_id, - SubPassAttachmentID in_attachment_id, - bool* out_opt_blending_enabled_ptr, - VkBlendOp* out_opt_blend_op_color_ptr, - VkBlendOp* out_opt_blend_op_alpha_ptr, - VkBlendFactor* out_opt_src_color_blend_factor_ptr, - VkBlendFactor* out_opt_dst_color_blend_factor_ptr, - VkBlendFactor* out_opt_src_alpha_blend_factor_ptr, - VkBlendFactor* out_opt_dst_alpha_blend_factor_ptr, - VkColorComponentFlags* out_opt_channel_write_mask_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - SubPassAttachmentToBlendingPropertiesMap::const_iterator props_iterator; - bool result = false; + raster_state_create_info_chainer.append_struct(raster_state_create_info); + } - if (pipeline_config_iterator == m_pipeline_configurations.end() ) + if (m_device_ptr->is_extension_enabled(VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME) ) { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); + /* Chain a predefined struct which will toggle the relaxed rasterization, as long as the device supports the + * VK_AMD_rasterization_order extension. + */ + if (in_gfx_pipeline_create_info_ptr->get_rasterization_order() != Anvil::RasterizationOrderAMD::STRICT) + { + anvil_assert(in_gfx_pipeline_create_info_ptr->get_rasterization_order() == static_cast(relaxed_rasterization_order_item.rasterizationOrder) ); - goto end; + raster_state_create_info_chainer.append_struct(relaxed_rasterization_order_item); + } + else + { + raster_state_create_info_chainer.append_struct(strict_rasterization_order_item); + } } else { - pipeline_config_ptr = pipeline_config_iterator->second; + anvil_assert(in_gfx_pipeline_create_info_ptr->get_rasterization_order() == Anvil::RasterizationOrderAMD::STRICT); } - props_iterator = pipeline_config_ptr->subpass_attachment_blending_properties.find(in_attachment_id); - - if (props_iterator == pipeline_config_ptr->subpass_attachment_blending_properties.end() ) + if (m_device_ptr->is_extension_enabled(VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME) ) { - anvil_assert(!(props_iterator == pipeline_config_ptr->subpass_attachment_blending_properties.end() ) ); + /* Chain a predefined struct which will toggle the conservative rasterization mode, as long as the device supports the + * VK_EXT_conservative_rasterization extension. + */ + const VkPipelineRasterizationConservativeStateCreateInfoEXT conservative_rasterization_item + { + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + nullptr, + 0, + static_cast(in_gfx_pipeline_create_info_ptr->get_conservative_rasterization_mode() ), + in_gfx_pipeline_create_info_ptr->get_extra_primitive_overestimation_size() + }; - goto end; + raster_state_create_info_chainer.append_struct(conservative_rasterization_item); } - - if (out_opt_blending_enabled_ptr != nullptr) + else { - *out_opt_blending_enabled_ptr = props_iterator->second.blend_enabled; + anvil_assert(in_gfx_pipeline_create_info_ptr->get_conservative_rasterization_mode() == Anvil::ConservativeRasterizationModeEXT::DISABLED); } - if (out_opt_blend_op_color_ptr != nullptr) + if (m_device_ptr->is_extension_enabled(VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME) ) { - *out_opt_blend_op_color_ptr = props_iterator->second.blend_op_color; - } + const bool is_depth_clip_enabled = in_gfx_pipeline_create_info_ptr->is_depth_clip_enabled(); - if (out_opt_blend_op_alpha_ptr != nullptr) - { - *out_opt_blend_op_alpha_ptr = props_iterator->second.blend_op_alpha; - } + /* NOTE: No need to chain the struct IF ::depthClipEnable is to be specified as VK_TRUE, since it does not affect Vulkan impl's behavior. */ + if (!is_depth_clip_enabled) + { + VkPipelineRasterizationDepthClipStateCreateInfoEXT create_info; - if (out_opt_src_color_blend_factor_ptr != nullptr) - { - *out_opt_src_color_blend_factor_ptr = props_iterator->second.src_color_blend_factor; - } + create_info.depthClipEnable = VK_FALSE; + create_info.flags = 0; + create_info.pNext = nullptr; + create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT; - if (out_opt_dst_color_blend_factor_ptr != nullptr) - { - *out_opt_dst_color_blend_factor_ptr = props_iterator->second.dst_color_blend_factor; + raster_state_create_info_chainer.append_struct(create_info); + } } - if (out_opt_src_alpha_blend_factor_ptr != nullptr) + if (m_device_ptr->is_extension_enabled(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME) ) { - *out_opt_src_alpha_blend_factor_ptr = props_iterator->second.src_alpha_blend_factor; - } + const auto& rasterization_stream_index = in_gfx_pipeline_create_info_ptr->get_rasterization_stream_index(); - if (out_opt_dst_alpha_blend_factor_ptr != nullptr) - { - *out_opt_dst_alpha_blend_factor_ptr = props_iterator->second.dst_alpha_blend_factor; - } + if (rasterization_stream_index != 0) + { + VkPipelineRasterizationStateStreamCreateInfoEXT create_info; - if (out_opt_channel_write_mask_ptr != nullptr) + create_info.flags = 0; + create_info.pNext = nullptr; + create_info.rasterizationStream = rasterization_stream_index; + create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT; + + raster_state_create_info_chainer.append_struct(create_info); + } + } + else { - *out_opt_channel_write_mask_ptr = props_iterator->second.channel_write_mask; + anvil_assert(in_gfx_pipeline_create_info_ptr->get_rasterization_stream_index() == 0); } - result = true; -end: - return result; + return raster_state_create_info_chainer.create_chain(); } -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_depth_bias_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - float* out_opt_depth_bias_constant_factor_ptr, - float* out_opt_depth_bias_clamp_ptr, - float* out_opt_depth_bias_slope_factor_ptr) const +/* Please see header for specification */ +std::unique_ptr > Anvil::GraphicsPipelineManager::bake_pipeline_shader_stage_create_info_chain_vector(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const { - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) + static const Anvil::ShaderStage graphics_shader_stages[] = { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); + Anvil::ShaderStage::FRAGMENT, + Anvil::ShaderStage::GEOMETRY, + Anvil::ShaderStage::TESSELLATION_CONTROL, + Anvil::ShaderStage::TESSELLATION_EVALUATION, + Anvil::ShaderStage::VERTEX + }; - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + std::unique_ptr > shader_stage_create_info_chainer_ptr(new Anvil::StructChainVector() ); - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->depth_bias_enabled; - } - if (out_opt_depth_bias_constant_factor_ptr != nullptr) + for (const auto& current_graphics_shader_stage : graphics_shader_stages) { - *out_opt_depth_bias_constant_factor_ptr = pipeline_config_ptr->depth_bias_constant_factor; - } + const Anvil::ShaderModuleStageEntryPoint* shader_stage_entry_point_ptr = nullptr; - if (out_opt_depth_bias_clamp_ptr != nullptr) - { - *out_opt_depth_bias_clamp_ptr = pipeline_config_ptr->depth_bias_clamp; - } + in_gfx_pipeline_create_info_ptr->get_shader_stage_properties(current_graphics_shader_stage, + &shader_stage_entry_point_ptr); - if (out_opt_depth_bias_slope_factor_ptr != nullptr) - { - *out_opt_depth_bias_slope_factor_ptr = pipeline_config_ptr->depth_bias_slope_factor; - } + if (shader_stage_entry_point_ptr != nullptr) + { + const unsigned char* spec_constants_data_buffer_ptr = nullptr; + uint32_t spec_constants_data_buffer_size = 0; + const SpecializationConstants* current_shader_stage_specialization_constants_ptr = nullptr; + Anvil::ShaderModule* shader_module_ptr = shader_stage_entry_point_ptr->shader_module_ptr; - result = true; -end: - return result; -} + in_gfx_pipeline_create_info_ptr->get_specialization_constants(current_graphics_shader_stage, + ¤t_shader_stage_specialization_constants_ptr, + &spec_constants_data_buffer_ptr, + &spec_constants_data_buffer_size); -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_depth_bounds_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - float* out_opt_min_depth_bounds_ptr, - float* out_opt_max_depth_bounds_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; + { + Anvil::StructID root_struct_id; + Anvil::StructChainer shader_stage_create_info_chainer; - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); + { + VkPipelineShaderStageCreateInfo shader_stage_create_info; + + shader_stage_create_info.module = shader_module_ptr->get_module(); + shader_stage_create_info.pName = (shader_stage_entry_point_ptr->stage == Anvil::ShaderStage::FRAGMENT) ? shader_module_ptr->get_fs_entrypoint_name().c_str() + : (shader_stage_entry_point_ptr->stage == Anvil::ShaderStage::GEOMETRY) ? shader_module_ptr->get_gs_entrypoint_name().c_str() + : (shader_stage_entry_point_ptr->stage == Anvil::ShaderStage::TESSELLATION_CONTROL) ? shader_module_ptr->get_tc_entrypoint_name().c_str() + : (shader_stage_entry_point_ptr->stage == Anvil::ShaderStage::TESSELLATION_EVALUATION) ? shader_module_ptr->get_te_entrypoint_name().c_str() + : (shader_stage_entry_point_ptr->stage == Anvil::ShaderStage::VERTEX) ? shader_module_ptr->get_vs_entrypoint_name().c_str() + : nullptr; + + shader_stage_create_info.flags = 0; + shader_stage_create_info.pNext = nullptr; + shader_stage_create_info.pSpecializationInfo = nullptr; /* patched by the chainer below */ + shader_stage_create_info.stage = static_cast(Anvil::Utils::get_shader_stage_flag_bits_from_shader_stage(shader_stage_entry_point_ptr->stage) ); + shader_stage_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + + root_struct_id = shader_stage_create_info_chainer.append_struct(shader_stage_create_info); + } - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + if (current_shader_stage_specialization_constants_ptr->size() > 0) + { + Anvil::StructID helper_struct_id; + VkSpecializationInfo specialization_info; + std::vector specialization_map_entries; - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->depth_bounds_test_enabled; - } + if (current_shader_stage_specialization_constants_ptr->size() > 0) + { + bake_specialization_info_vk(*current_shader_stage_specialization_constants_ptr, + spec_constants_data_buffer_ptr, + spec_constants_data_buffer_size, + &specialization_map_entries, + &specialization_info); + } - if (out_opt_min_depth_bounds_ptr != nullptr) - { - *out_opt_min_depth_bounds_ptr = pipeline_config_ptr->min_depth_bounds; - } + helper_struct_id = shader_stage_create_info_chainer.store_helper_structure(specialization_info, + root_struct_id, + offsetof(VkPipelineShaderStageCreateInfo, pSpecializationInfo) ); - if (out_opt_max_depth_bounds_ptr != nullptr) - { - *out_opt_max_depth_bounds_ptr = pipeline_config_ptr->max_depth_bounds; + shader_stage_create_info_chainer.store_helper_structure_vector(specialization_map_entries, + helper_struct_id, + offsetof(VkSpecializationInfo, pMapEntries) ); + } + + shader_stage_create_info_chainer_ptr->append_struct_chain(shader_stage_create_info_chainer.create_chain() ); + } + } } - result = true; -end: - return result; + return shader_stage_create_info_chainer_ptr; } -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_depth_clamp_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const +/* Please see header for specification */ +Anvil::StructChainUniquePtr Anvil::GraphicsPipelineManager::bake_pipeline_tessellation_state_create_info(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const { - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; + Anvil::StructChainUniquePtr result_ptr; + const Anvil::ShaderModuleStageEntryPoint* tc_shader_stage_entry_point_ptr = nullptr; + const Anvil::ShaderModuleStageEntryPoint* te_shader_stage_entry_point_ptr = nullptr; - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); + in_gfx_pipeline_create_info_ptr->get_shader_stage_properties(Anvil::ShaderStage::TESSELLATION_CONTROL, + &tc_shader_stage_entry_point_ptr); + in_gfx_pipeline_create_info_ptr->get_shader_stage_properties(Anvil::ShaderStage::TESSELLATION_EVALUATION, + &te_shader_stage_entry_point_ptr); - goto end; - } - else + if (tc_shader_stage_entry_point_ptr != nullptr && + te_shader_stage_entry_point_ptr != nullptr) { - pipeline_config_ptr = pipeline_config_iterator->second; - } + Anvil::StructChainer tessellation_state_create_info_chainer; - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->depth_clamp_enabled; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_depth_test_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - VkCompareOp* out_opt_compare_op_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->depth_test_enabled; - } - - if (out_opt_compare_op_ptr != nullptr) - { - *out_opt_compare_op_ptr = pipeline_config_ptr->depth_test_compare_op; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_depth_write_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->depth_writes_enabled; - } - - result = true; -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_dynamic_scissor_state_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t* out_opt_n_dynamic_scissor_boxes_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_n_dynamic_scissor_boxes_ptr != nullptr) - { - *out_opt_n_dynamic_scissor_boxes_ptr = pipeline_config_ptr->n_dynamic_scissor_boxes; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_dynamic_states(GraphicsPipelineID in_graphics_pipeline_id, - DynamicStateBitfield* out_opt_enabled_dynamic_states_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_enabled_dynamic_states_ptr != nullptr) - { - *out_opt_enabled_dynamic_states_ptr = pipeline_config_ptr->enabled_dynamic_states; - } - - result = true; -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_dynamic_viewport_state_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t* out_n_dynamic_viewports_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_n_dynamic_viewports_ptr!= nullptr) - { - *out_n_dynamic_viewports_ptr = pipeline_config_ptr->n_dynamic_viewports; - } - - result = true; -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_graphics_pipeline_properties(Anvil::GraphicsPipelineID in_graphics_pipeline_id, - uint32_t* out_opt_n_scissors_ptr, - uint32_t* out_opt_n_viewports_ptr, - uint32_t* out_opt_n_vertex_input_attributes_ptr, - uint32_t* out_opt_n_vertex_input_bindings_ptr, - std::shared_ptr* out_opt_renderpass_ptr, - SubPassID* out_opt_subpass_id_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_n_scissors_ptr != nullptr) - { - *out_opt_n_scissors_ptr = static_cast(pipeline_config_ptr->scissor_boxes.size() ); - } - - if (out_opt_n_viewports_ptr != nullptr) - { - *out_opt_n_viewports_ptr = static_cast(pipeline_config_ptr->viewports.size() ); - } - - if (out_opt_n_vertex_input_attributes_ptr != nullptr) - { - *out_opt_n_vertex_input_attributes_ptr = static_cast(pipeline_config_ptr->vk_input_attributes.size() ); - } - - if (out_opt_n_vertex_input_bindings_ptr != nullptr) - { - *out_opt_n_vertex_input_bindings_ptr = static_cast(pipeline_config_ptr->vk_input_bindings.size() ); - } - - if (out_opt_renderpass_ptr != nullptr) - { - *out_opt_renderpass_ptr = pipeline_config_ptr->renderpass_ptr; - } - - if (out_opt_subpass_id_ptr != nullptr) - { - *out_opt_subpass_id_ptr = pipeline_config_ptr->subpass_id; - } - - /* All done */ - result = true; -end: - return result; -} - -/* Please see header for specification */ -VkPipeline Anvil::GraphicsPipelineManager::get_graphics_pipeline(GraphicsPipelineID in_pipeline_id) -{ - auto pipeline_iterator = m_pipelines.find(in_pipeline_id); - bool result = false; - VkPipeline result_pipeline = VK_NULL_HANDLE; - - ANVIL_REDUNDANT_VARIABLE(result); - - if (pipeline_iterator == m_pipelines.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end()) ); - - goto end; - } - - if (pipeline_iterator->second->baked_pipeline == VK_NULL_HANDLE || - pipeline_iterator->second->dirty) - { - anvil_assert(!pipeline_iterator->second->is_proxy); - - result = bake(); - - anvil_assert(result); - anvil_assert(pipeline_iterator->second->dirty == false); - anvil_assert(pipeline_iterator->second->baked_pipeline != VK_NULL_HANDLE); - } - - result_pipeline = pipeline_iterator->second->baked_pipeline; - -end: - return result_pipeline; -} - -/* Please see header for specification */ -std::shared_ptr Anvil::GraphicsPipelineManager::get_graphics_pipeline_layout(GraphicsPipelineID in_pipeline_id) -{ - return get_pipeline_layout(in_pipeline_id); -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_input_assembly_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkPrimitiveTopology* out_opt_primitive_topology_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_primitive_topology_ptr != nullptr) - { - *out_opt_primitive_topology_ptr = pipeline_config_ptr->primitive_topology; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_logic_op_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - VkLogicOp* out_opt_logic_op_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->logic_op_enabled; - } - - if (out_opt_logic_op_ptr != nullptr) - { - *out_opt_logic_op_ptr = pipeline_config_ptr->logic_op; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_multisampling_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkSampleCountFlags* out_opt_sample_count_ptr, - VkSampleMask* out_opt_sample_mask_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_sample_count_ptr != nullptr) - { - *out_opt_sample_count_ptr = pipeline_config_ptr->sample_count; - } - - if (out_opt_sample_mask_ptr != nullptr) - { - *out_opt_sample_mask_ptr = pipeline_config_ptr->sample_mask; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_primitive_restart_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->primitive_restart_enabled; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_rasterization_order(GraphicsPipelineID in_graphics_pipeline_id, - VkRasterizationOrderAMD* out_opt_rasterization_order_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_rasterization_order_ptr != nullptr) - { - *out_opt_rasterization_order_ptr = pipeline_config_ptr->rasterization_order; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_rasterizer_discard_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->rasterizer_discard_enabled; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_rasterization_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkPolygonMode* out_opt_polygon_mode_ptr, - VkCullModeFlags* out_opt_cull_mode_ptr, - VkFrontFace* out_opt_front_face_ptr, - float* out_opt_line_width_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_polygon_mode_ptr != nullptr) - { - *out_opt_polygon_mode_ptr = pipeline_config_ptr->polygon_mode; - } - - if (out_opt_cull_mode_ptr != nullptr) - { - *out_opt_cull_mode_ptr = pipeline_config_ptr->cull_mode; - } - - if (out_opt_front_face_ptr != nullptr) - { - *out_opt_front_face_ptr = pipeline_config_ptr->front_face; - } - - if (out_opt_line_width_ptr != nullptr) - { - *out_opt_line_width_ptr = pipeline_config_ptr->line_width; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_sample_shading_state(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - float* out_opt_min_sample_shading_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->sample_shading_enabled; - } - - if (out_opt_min_sample_shading_ptr != nullptr) - { - *out_opt_min_sample_shading_ptr = pipeline_config_ptr->min_sample_shading; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_scissor_box_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_scissor_box, - int32_t* out_opt_x_ptr, - int32_t* out_opt_y_ptr, - uint32_t* out_opt_width_ptr, - uint32_t* out_opt_height_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - const InternalScissorBox* scissor_box_props_ptr = nullptr; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (pipeline_config_ptr->scissor_boxes.find(in_n_scissor_box) == pipeline_config_ptr->scissor_boxes.end() ) - { - anvil_assert(!(pipeline_config_ptr->scissor_boxes.find(in_n_scissor_box) == pipeline_config_ptr->scissor_boxes.end()) ); - - goto end; - } - else - { - scissor_box_props_ptr = &pipeline_config_ptr->scissor_boxes.at(in_n_scissor_box); - } - - if (out_opt_x_ptr != nullptr) - { - *out_opt_x_ptr = scissor_box_props_ptr->x; - } - - if (out_opt_y_ptr != nullptr) - { - *out_opt_y_ptr = scissor_box_props_ptr->y; - } - - if (out_opt_width_ptr != nullptr) - { - *out_opt_width_ptr = scissor_box_props_ptr->width; - } - - if (out_opt_height_ptr != nullptr) - { - *out_opt_height_ptr = scissor_box_props_ptr->height; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_stencil_test_properties(GraphicsPipelineID in_graphics_pipeline_id, - bool* out_opt_is_enabled_ptr, - VkStencilOp* out_opt_front_stencil_fail_op_ptr, - VkStencilOp* out_opt_front_stencil_pass_op_ptr, - VkStencilOp* out_opt_front_stencil_depth_fail_op_ptr, - VkCompareOp* out_opt_front_stencil_compare_op_ptr, - uint32_t* out_opt_front_stencil_compare_mask_ptr, - uint32_t* out_opt_front_stencil_write_mask_ptr, - uint32_t* out_opt_front_stencil_reference_ptr, - VkStencilOp* out_opt_back_stencil_fail_op_ptr, - VkStencilOp* out_opt_back_stencil_pass_op_ptr, - VkStencilOp* out_opt_back_stencil_depth_fail_op_ptr, - VkCompareOp* out_opt_back_stencil_compare_op_ptr, - uint32_t* out_opt_back_stencil_compare_mask_ptr, - uint32_t* out_opt_back_stencil_write_mask_ptr, - uint32_t* out_opt_back_stencil_reference_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_is_enabled_ptr != nullptr) - { - *out_opt_is_enabled_ptr = pipeline_config_ptr->stencil_test_enabled; - } - - if (out_opt_front_stencil_fail_op_ptr != nullptr) - { - *out_opt_front_stencil_fail_op_ptr = pipeline_config_ptr->stencil_state_front_face.failOp; - } - - if (out_opt_front_stencil_pass_op_ptr != nullptr) - { - *out_opt_front_stencil_pass_op_ptr = pipeline_config_ptr->stencil_state_front_face.passOp; - } - - if (out_opt_front_stencil_depth_fail_op_ptr != nullptr) - { - *out_opt_front_stencil_depth_fail_op_ptr = pipeline_config_ptr->stencil_state_front_face.depthFailOp; - } - - if (out_opt_front_stencil_compare_op_ptr != nullptr) - { - *out_opt_front_stencil_compare_op_ptr = pipeline_config_ptr->stencil_state_front_face.compareOp; - } - - if (out_opt_front_stencil_compare_mask_ptr != nullptr) - { - *out_opt_front_stencil_compare_mask_ptr = pipeline_config_ptr->stencil_state_front_face.compareMask; - } - - if (out_opt_front_stencil_write_mask_ptr != nullptr) - { - *out_opt_front_stencil_write_mask_ptr = pipeline_config_ptr->stencil_state_front_face.writeMask; - } - - if (out_opt_front_stencil_reference_ptr != nullptr) - { - *out_opt_front_stencil_reference_ptr = pipeline_config_ptr->stencil_state_front_face.reference; - } - - if (out_opt_back_stencil_fail_op_ptr != nullptr) - { - *out_opt_back_stencil_fail_op_ptr = pipeline_config_ptr->stencil_state_back_face.failOp; - } - - if (out_opt_back_stencil_pass_op_ptr != nullptr) - { - *out_opt_back_stencil_pass_op_ptr = pipeline_config_ptr->stencil_state_back_face.passOp; - } - - if (out_opt_back_stencil_depth_fail_op_ptr != nullptr) - { - *out_opt_back_stencil_depth_fail_op_ptr = pipeline_config_ptr->stencil_state_back_face.depthFailOp; - } - - if (out_opt_back_stencil_compare_op_ptr != nullptr) - { - *out_opt_back_stencil_compare_op_ptr = pipeline_config_ptr->stencil_state_back_face.compareOp; - } - - if (out_opt_back_stencil_compare_mask_ptr != nullptr) - { - *out_opt_back_stencil_compare_mask_ptr = pipeline_config_ptr->stencil_state_back_face.compareMask; - } - - if (out_opt_back_stencil_write_mask_ptr != nullptr) - { - *out_opt_back_stencil_write_mask_ptr = pipeline_config_ptr->stencil_state_back_face.writeMask; - } - - if (out_opt_back_stencil_reference_ptr != nullptr) - { - *out_opt_back_stencil_reference_ptr = pipeline_config_ptr->stencil_state_back_face.reference; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_tessellation_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t* out_opt_n_patch_control_points_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (out_opt_n_patch_control_points_ptr != nullptr) - { - *out_opt_n_patch_control_points_ptr = pipeline_config_ptr->n_patch_control_points; - } - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_vertex_input_attribute_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_vertex_input_attribute, - uint32_t* out_opt_location_ptr, - uint32_t* out_opt_binding_ptr, - VkFormat* out_opt_format_ptr, - uint32_t* out_opt_offset_ptr) const -{ - VkVertexInputAttributeDescription* attribute_ptr; - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (pipeline_config_ptr->vk_input_attributes.size() <= in_n_vertex_input_attribute) - { - anvil_assert(!(pipeline_config_ptr->vk_input_attributes.size() <= in_n_vertex_input_attribute) ); - - goto end; - } - else - { - attribute_ptr = &pipeline_config_ptr->vk_input_attributes.at(in_n_vertex_input_attribute); - } - - if (out_opt_location_ptr != nullptr) - { - *out_opt_location_ptr = attribute_ptr->location; - } - - if (out_opt_binding_ptr != nullptr) - { - *out_opt_binding_ptr = attribute_ptr->binding; - } - - if (out_opt_format_ptr != nullptr) - { - *out_opt_format_ptr = attribute_ptr->format; - } - - if (out_opt_offset_ptr != nullptr) - { - *out_opt_offset_ptr = attribute_ptr->offset; - } - - /* All done */ - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_vertex_input_binding_for_attribute_location(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_input_vertex_attribute_location, - uint32_t* out_input_vertex_binding_ptr) -{ - AttributeLocationToBindingIndexMap::const_iterator attribute_location_iterator; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - std::shared_ptr pipeline_config_ptr; - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - std::shared_ptr pipeline_ptr; - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() || - pipeline_iterator == m_pipelines.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end() )); - anvil_assert(!(pipeline_iterator == m_pipelines.end() )); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - pipeline_ptr = pipeline_iterator->second; - } - - if (pipeline_ptr->dirty) - { - const bool has_been_successfully_baked = bake(); - - if (!has_been_successfully_baked) - { - anvil_assert(has_been_successfully_baked); - - goto end; - } - - anvil_assert(!pipeline_ptr->dirty); - } - - if ( (attribute_location_iterator = pipeline_config_ptr->attribute_location_to_binding_index_map.find(in_input_vertex_attribute_location)) == pipeline_config_ptr->attribute_location_to_binding_index_map.end() ) - { - anvil_assert(!(attribute_location_iterator == pipeline_config_ptr->attribute_location_to_binding_index_map.end() )); - - goto end; - } - else - { - *out_input_vertex_binding_ptr = attribute_location_iterator->second; - result = true; - } - -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_vertex_input_binding_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_vertex_input_binding, - uint32_t* out_opt_binding_ptr, - uint32_t* out_opt_stride_ptr, - VkVertexInputRate* out_opt_input_rate_ptr) const -{ - VkVertexInputBindingDescription* binding_ptr; - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (pipeline_config_ptr->vk_input_bindings.size() <= in_n_vertex_input_binding) - { - anvil_assert(!(pipeline_config_ptr->vk_input_bindings.size() <= in_n_vertex_input_binding) ); - - goto end; - } - else - { - binding_ptr = &pipeline_config_ptr->vk_input_bindings.at(in_n_vertex_input_binding); - } - - if (out_opt_binding_ptr != nullptr) - { - *out_opt_binding_ptr = binding_ptr->binding; - } - - if (out_opt_stride_ptr != nullptr) - { - *out_opt_stride_ptr = binding_ptr->stride; - } - - if (out_opt_input_rate_ptr != nullptr) - { - *out_opt_input_rate_ptr = binding_ptr->inputRate; - } - - /* All done */ - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::GraphicsPipelineManager::get_viewport_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_viewport, - float* out_opt_origin_x_ptr, - float* out_opt_origin_y_ptr, - float* out_opt_width_ptr, - float* out_opt_height_ptr, - float* out_opt_min_depth_ptr, - float* out_opt_max_depth_ptr) const -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - bool result = false; - InternalViewport* viewport_props_ptr; - - if (pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (pipeline_config_ptr->viewports.find(in_n_viewport) == pipeline_config_ptr->viewports.end() ) - { - anvil_assert(!(pipeline_config_ptr->viewports.find(in_n_viewport) == pipeline_config_ptr->viewports.end()) ); - - goto end; - } - else - { - viewport_props_ptr = &pipeline_config_ptr->viewports.at(in_n_viewport); - } - - if (out_opt_origin_x_ptr != nullptr) - { - *out_opt_origin_x_ptr = viewport_props_ptr->origin_x; - } - - if (out_opt_origin_y_ptr != nullptr) - { - *out_opt_origin_y_ptr = viewport_props_ptr->origin_y; - } - - if (out_opt_width_ptr != nullptr) - { - *out_opt_width_ptr = viewport_props_ptr->width; - } - - if (out_opt_height_ptr != nullptr) - { - *out_opt_height_ptr = viewport_props_ptr->height; - } - - if (out_opt_min_depth_ptr != nullptr) - { - *out_opt_min_depth_ptr = viewport_props_ptr->min_depth; - } - - if (out_opt_max_depth_ptr != nullptr) - { - *out_opt_max_depth_ptr = viewport_props_ptr->max_depth; - } - - result = true; -end: - return result; -} - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_blending_properties(GraphicsPipelineID in_graphics_pipeline_id, - const float* in_blend_constant_vec4) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - memcpy(pipeline_config_ptr->blend_constant, - in_blend_constant_vec4, - sizeof(pipeline_config_ptr->blend_constant) ); - - pipeline_iterator->second->dirty = true; -end: - ; -} - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_color_blend_attachment_properties(GraphicsPipelineID in_graphics_pipeline_id, - SubPassAttachmentID in_attachment_id, - bool in_blending_enabled, - VkBlendOp in_blend_op_color, - VkBlendOp in_blend_op_alpha, - VkBlendFactor in_src_color_blend_factor, - VkBlendFactor in_dst_color_blend_factor, - VkBlendFactor in_src_alpha_blend_factor, - VkBlendFactor in_dst_alpha_blend_factor, - VkColorComponentFlags in_channel_write_mask) -{ - BlendingProperties* attachment_blending_props_ptr = nullptr; - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - /* Retrieve and update the attachment descriptor. */ - attachment_blending_props_ptr = &pipeline_config_ptr->subpass_attachment_blending_properties[in_attachment_id]; - - attachment_blending_props_ptr->blend_enabled = in_blending_enabled; - attachment_blending_props_ptr->blend_op_alpha = in_blend_op_alpha; - attachment_blending_props_ptr->blend_op_color = in_blend_op_color; - attachment_blending_props_ptr->channel_write_mask = in_channel_write_mask; - attachment_blending_props_ptr->dst_alpha_blend_factor = in_dst_alpha_blend_factor; - attachment_blending_props_ptr->dst_color_blend_factor = in_dst_color_blend_factor; - attachment_blending_props_ptr->src_alpha_blend_factor = in_src_alpha_blend_factor; - attachment_blending_props_ptr->src_color_blend_factor = in_src_color_blend_factor; - pipeline_iterator->second->dirty = true; -end: - ; -} - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_dynamic_scissor_state_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_dynamic_scissor_boxes) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->n_dynamic_scissor_boxes = in_n_dynamic_scissor_boxes; - pipeline_iterator->second->dirty = true; -end: - ; -} - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_dynamic_viewport_state_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_dynamic_viewports) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->n_dynamic_viewports = in_n_dynamic_viewports; - pipeline_iterator->second->dirty = true; -end: - ; -} - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_input_assembly_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkPrimitiveTopology in_primitive_topology) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->primitive_topology = in_primitive_topology; - pipeline_iterator->second->dirty = true; -end: - ; -} - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_multisampling_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkSampleCountFlagBits in_sample_count, - float in_min_sample_shading, - const VkSampleMask in_sample_mask) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - anvil_assert(in_sample_count <= VK_SAMPLE_COUNT_32_BIT); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->min_sample_shading = in_min_sample_shading; - pipeline_config_ptr->sample_count = in_sample_count; - pipeline_config_ptr->sample_mask = in_sample_mask; - pipeline_iterator->second->dirty = true; - -end: - ; -} - -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::set_pipeline_post_bake_callback(GraphicsPipelineID in_graphics_pipeline_id, - PipelinePostBakeFunction in_pipeline_post_bake_function) -{ - bool result = false; - - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->post_bake_function = in_pipeline_post_bake_function; - result = true; - -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::set_pipeline_pre_bake_callback(GraphicsPipelineID in_graphics_pipeline_id, - PipelinePreBakeFunction in_pfn_pipeline_pre_bake_function) -{ - bool result = false; - - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->pre_bake_function = in_pfn_pipeline_pre_bake_function; - result = true; -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::GraphicsPipelineManager::set_pipeline_state_from_pipeline(GraphicsPipelineID in_target_pipeline_id, - GraphicsPipelineID in_source_pipeline_id) -{ - std::shared_ptr cached_renderpass_ptr; - SubPassID cached_subpass_id = UINT32_MAX; - bool result = false; - std::shared_ptr source_pipeline_config_ptr; - - if (m_pipelines.find(in_target_pipeline_id) == m_pipelines.end() ) - { - anvil_assert(!(m_pipelines.find(in_target_pipeline_id) == m_pipelines.end()) ); - - goto end; - } - - if (m_pipeline_configurations.find(in_target_pipeline_id) == m_pipeline_configurations.end() ) - { - anvil_assert(!(m_pipeline_configurations.find(in_target_pipeline_id) == m_pipeline_configurations.end() )); - - goto end; - } - - if (m_pipeline_configurations.find(in_source_pipeline_id) == m_pipeline_configurations.end() ) - { - anvil_assert(!(m_pipeline_configurations.find(in_source_pipeline_id) == m_pipeline_configurations.end() )); - - goto end; - } - - cached_renderpass_ptr = m_pipeline_configurations[in_target_pipeline_id]->renderpass_ptr; - cached_subpass_id = m_pipeline_configurations[in_target_pipeline_id]->subpass_id; - - m_pipeline_configurations[in_target_pipeline_id] = m_pipeline_configurations[in_source_pipeline_id]; - m_pipelines [in_target_pipeline_id]->dsg_ptr = m_pipelines [in_source_pipeline_id]->dsg_ptr; - m_pipelines [in_target_pipeline_id]->dirty = true; - m_pipelines [in_target_pipeline_id]->push_constant_ranges = m_pipelines[in_source_pipeline_id]->push_constant_ranges; - - m_pipeline_configurations[in_target_pipeline_id]->renderpass_ptr = cached_renderpass_ptr; - m_pipeline_configurations[in_target_pipeline_id]->subpass_id = cached_subpass_id; - - result = true; -end: - return result; -} - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_rasterization_order(GraphicsPipelineID in_graphics_pipeline_id, - VkRasterizationOrderAMD in_rasterization_order) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - if (pipeline_config_ptr->rasterization_order != in_rasterization_order) - { - pipeline_config_ptr->rasterization_order = in_rasterization_order; - pipeline_iterator->second->dirty = true; - } - -end: - ; -} - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_rasterization_properties(GraphicsPipelineID in_graphics_pipeline_id, - VkPolygonMode in_polygon_mode, - VkCullModeFlags in_cull_mode, - VkFrontFace in_front_face, - float in_line_width) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->cull_mode = in_cull_mode; - pipeline_config_ptr->front_face = in_front_face; - pipeline_config_ptr->line_width = in_line_width; - pipeline_config_ptr->polygon_mode = in_polygon_mode; - - pipeline_iterator->second->dirty = true; -end: - ; -} - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_scissor_box_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_scissor_box, - int32_t in_x, - int32_t in_y, - uint32_t in_width, - uint32_t in_height) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + { + VkPipelineTessellationStateCreateInfo tessellation_state_create_info; - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + tessellation_state_create_info.flags = 0; + tessellation_state_create_info.patchControlPoints = in_gfx_pipeline_create_info_ptr->get_n_patch_control_points(); + tessellation_state_create_info.pNext = nullptr; + tessellation_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO; - pipeline_config_ptr->scissor_boxes[in_n_scissor_box] = InternalScissorBox(in_x, - in_y, - in_width, - in_height); + tessellation_state_create_info_chainer.append_struct(tessellation_state_create_info); + } - pipeline_iterator->second->dirty = true; -end: - ; -} + if (m_device_ptr->is_extension_enabled(VK_KHR_MAINTENANCE2_EXTENSION_NAME) ) + { + VkPipelineTessellationDomainOriginStateCreateInfoKHR domain_origin_state_create_info; -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_stencil_test_properties(GraphicsPipelineID in_graphics_pipeline_id, - bool in_update_front_face_state, - VkStencilOp in_stencil_fail_op, - VkStencilOp in_stencil_pass_op, - VkStencilOp in_stencil_depth_fail_op, - VkCompareOp in_stencil_compare_op, - uint32_t in_stencil_compare_mask, - uint32_t in_stencil_write_mask, - uint32_t in_stencil_reference) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - VkStencilOpState* stencil_op_state_ptr = nullptr; + domain_origin_state_create_info.domainOrigin = static_cast(in_gfx_pipeline_create_info_ptr->get_tessellation_domain_origin() ); + domain_origin_state_create_info.pNext = nullptr; + domain_origin_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR; - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + tessellation_state_create_info_chainer.append_struct(domain_origin_state_create_info); + } + else + { + /* App wants to adjust the default tessellation domain origin value, even though KHR_maintenance2 is unsupported! */ + anvil_assert(in_gfx_pipeline_create_info_ptr->get_tessellation_domain_origin() == Anvil::TessellationDomainOrigin::UPPER_LEFT); + } - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; + result_ptr = tessellation_state_create_info_chainer.create_chain(); } - stencil_op_state_ptr = (in_update_front_face_state) ? &pipeline_config_ptr->stencil_state_front_face - : &pipeline_config_ptr->stencil_state_back_face; - - stencil_op_state_ptr->compareMask = in_stencil_compare_mask; - stencil_op_state_ptr->compareOp = in_stencil_compare_op; - stencil_op_state_ptr->depthFailOp = in_stencil_depth_fail_op; - stencil_op_state_ptr->failOp = in_stencil_fail_op; - stencil_op_state_ptr->passOp = in_stencil_pass_op; - stencil_op_state_ptr->reference = in_stencil_reference; - stencil_op_state_ptr->writeMask = in_stencil_write_mask; - - pipeline_iterator->second->dirty = true; -end: - ; + return result_ptr; } /* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_tessellation_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_patch_control_points) +Anvil::StructChainUniquePtr Anvil::GraphicsPipelineManager::bake_pipeline_vertex_input_state_create_info(const Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr) const { - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + GraphicsPipelineData current_pipeline_gfx_data (in_gfx_pipeline_create_info_ptr); + Anvil::StructChainer vertex_input_state_create_info_chainer; - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + VkPipelineVertexInputStateCreateInfo create_info; + Anvil::StructID root_struct_id; - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + create_info.vertexAttributeDescriptionCount = static_cast(current_pipeline_gfx_data.vk_input_attributes.size()); + create_info.vertexBindingDescriptionCount = static_cast(current_pipeline_gfx_data.vk_input_bindings.size ()); - pipeline_config_ptr->n_patch_control_points = in_n_patch_control_points; - pipeline_iterator->second->dirty = true; -end: - ; -} + create_info.flags = 0; + create_info.pNext = nullptr; + create_info.pVertexAttributeDescriptions = nullptr; /* will be patched by the chainer */ + create_info.pVertexBindingDescriptions = nullptr; /* will be patched by the chainer */ + create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::set_viewport_properties(GraphicsPipelineID in_graphics_pipeline_id, - uint32_t in_n_viewport, - float in_origin_x, - float in_origin_y, - float in_width, - float in_height, - float in_min_depth, - float in_max_depth) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + root_struct_id = vertex_input_state_create_info_chainer.append_struct(create_info); - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + if (create_info.vertexAttributeDescriptionCount > 0) + { + vertex_input_state_create_info_chainer.store_helper_structure_vector(current_pipeline_gfx_data.vk_input_attributes, + root_struct_id, + offsetof(VkPipelineVertexInputStateCreateInfo, pVertexAttributeDescriptions) ); + } - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; + if (create_info.vertexBindingDescriptionCount > 0) + { + vertex_input_state_create_info_chainer.store_helper_structure_vector(current_pipeline_gfx_data.vk_input_bindings, + root_struct_id, + offsetof(VkPipelineVertexInputStateCreateInfo, pVertexBindingDescriptions) ); + } } - pipeline_config_ptr->viewports[in_n_viewport] = InternalViewport(in_origin_x, - in_origin_y, - in_width, - in_height, - in_min_depth, - in_max_depth); - - pipeline_iterator->second->dirty = true; -end: - ; -} - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_alpha_to_coverage(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else + if (current_pipeline_gfx_data.input_bindings.size() > 0) { - pipeline_config_ptr = pipeline_config_iterator->second; - } + std::vector divisor_descriptors; - pipeline_config_ptr->alpha_to_coverage_enabled = in_should_enable; - pipeline_iterator->second->dirty = true; -end: - ; -} + divisor_descriptors.reserve(current_pipeline_gfx_data.input_bindings.size() ); -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_alpha_to_one(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + for (const auto& current_binding_info : current_pipeline_gfx_data.input_bindings) + { + /* Make sure to chain VkVertexInputBindingDivisorDescriptionEXT for each binding which uses a non-default divisor value */ + if (current_binding_info.divisor != 1) + { + VkVertexInputBindingDivisorDescriptionEXT divisor_info; - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + divisor_info.binding = current_binding_info.binding; + divisor_info.divisor = current_binding_info.divisor; - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + divisor_descriptors.push_back(divisor_info); + } + } - pipeline_config_ptr->alpha_to_one_enabled = in_should_enable; - pipeline_iterator->second->dirty = true; -end: - ; -} + if (divisor_descriptors.size() != 0) + { + VkPipelineVertexInputDivisorStateCreateInfoEXT divisor_state_create_info; + Anvil::StructID divisor_state_struct_id; -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_depth_bounds_test(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable, - float in_min_depth_bounds, - float in_max_depth_bounds) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + divisor_state_create_info.pNext = nullptr; + divisor_state_create_info.pVertexBindingDivisors = nullptr; /* NOTE: This field is going to be patched by vertex_input_state_create_info_chainer ! */ + divisor_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT; + divisor_state_create_info.vertexBindingDivisorCount = static_cast(divisor_descriptors.size() ); - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + divisor_state_struct_id = vertex_input_state_create_info_chainer.append_struct(divisor_state_create_info); - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; + vertex_input_state_create_info_chainer.store_helper_structure_vector(divisor_descriptors, + divisor_state_struct_id, + offsetof(VkPipelineVertexInputDivisorStateCreateInfoEXT, pVertexBindingDivisors) ); + } } - pipeline_config_ptr->depth_bounds_test_enabled = in_should_enable; - pipeline_config_ptr->max_depth_bounds = in_max_depth_bounds; - pipeline_config_ptr->min_depth_bounds = in_min_depth_bounds; - pipeline_iterator->second->dirty = true; -end: - ; + return vertex_input_state_create_info_chainer.create_chain(); } /* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_depth_bias(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable, - float in_depth_bias_constant_factor, - float in_depth_bias_clamp, - float in_depth_bias_slope_factor) +Anvil::StructChainUniquePtr Anvil::GraphicsPipelineManager::bake_pipeline_viewport_state_create_info(Anvil::GraphicsPipelineCreateInfo* in_gfx_pipeline_create_info_ptr, + const bool& in_is_dynamic_scissor_state_enabled, + const bool& in_is_dynamic_viewport_state_enabled) const { - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + Anvil::StructChainUniquePtr result_ptr; - goto end; - } - else + if (!in_gfx_pipeline_create_info_ptr->is_rasterizer_discard_enabled() ) { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->depth_bias_constant_factor = in_depth_bias_constant_factor; - pipeline_config_ptr->depth_bias_clamp = in_depth_bias_clamp; - pipeline_config_ptr->depth_bias_enabled = in_should_enable; - pipeline_config_ptr->depth_bias_slope_factor = in_depth_bias_slope_factor; - pipeline_iterator->second->dirty = true; -end: - ; -} + uint32_t n_scissor_boxes = (in_is_dynamic_scissor_state_enabled) ? in_gfx_pipeline_create_info_ptr->get_n_dynamic_scissor_boxes() + : in_gfx_pipeline_create_info_ptr->get_n_scissor_boxes (); + uint32_t n_viewports = (in_is_dynamic_viewport_state_enabled) ? in_gfx_pipeline_create_info_ptr->get_n_dynamic_viewports () + : in_gfx_pipeline_create_info_ptr->get_n_viewports (); + std::vector scissor_boxes; + std::vector viewports; -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_depth_clamp(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + anvil_assert(n_scissor_boxes == n_viewports); - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + if (n_scissor_boxes == 0) + { + /* No scissor boxes / viewport defined. Use default settings.. */ + auto swapchain_ptr = in_gfx_pipeline_create_info_ptr->get_renderpass()->get_swapchain(); + uint32_t window_size[2] = {0}; - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + /* NOTE: If you hit this assertion, you either need to pass a Swapchain instance when this renderpass is being created, + * *or* specify scissor & viewport information for the GFX pipeline. + */ + anvil_assert(swapchain_ptr != nullptr); + anvil_assert(n_viewports == 0); + + swapchain_ptr->get_image(0)->get_image_mipmap_size(0, /* n_mipmap */ + window_size + 0, + window_size + 1, + nullptr); /* out_opt_depth_ptr */ + + anvil_assert(window_size[0] != 0 && + window_size[1] != 0); + + in_gfx_pipeline_create_info_ptr->set_scissor_box_properties(0, /* in_n_scissor_box */ + 0, /* in_x */ + 0, /* in_y */ + window_size[0], + window_size[1]); + in_gfx_pipeline_create_info_ptr->set_viewport_properties (0, /* in_n_viewport */ + 0.0f, /* in_origin_x */ + 0.0f, /* in_origin_y */ + static_cast(window_size[0]), + static_cast(window_size[1]), + 0.0f, /* in_min_depth */ + 1.0f); /* in_max_depth */ + + n_scissor_boxes = 1; + n_viewports = 1; + } - pipeline_config_ptr->depth_clamp_enabled = in_should_enable; - pipeline_iterator->second->dirty = true; + /* Convert internal scissor box & viewport representations to Vulkan descriptors */ + if (!in_is_dynamic_scissor_state_enabled) + { + for (uint32_t n_scissor_box = 0; + n_scissor_box < n_scissor_boxes; + ++n_scissor_box) + { + uint32_t current_scissor_box_height = UINT32_MAX; + uint32_t current_scissor_box_width = UINT32_MAX; + int32_t current_scissor_box_x = INT32_MAX; + int32_t current_scissor_box_y = INT32_MAX; + VkRect2D current_scissor_box_vk; + + in_gfx_pipeline_create_info_ptr->get_scissor_box_properties(n_scissor_box, + ¤t_scissor_box_x, + ¤t_scissor_box_y, + ¤t_scissor_box_width, + ¤t_scissor_box_height); + current_scissor_box_vk.extent.height = current_scissor_box_height; + current_scissor_box_vk.extent.width = current_scissor_box_width; + current_scissor_box_vk.offset.x = current_scissor_box_x; + current_scissor_box_vk.offset.y = current_scissor_box_y; + + scissor_boxes.push_back(current_scissor_box_vk); + } + } -end: - ; -} + if (!in_is_dynamic_viewport_state_enabled) + { + for (uint32_t n_viewport = 0; + n_viewport < n_viewports; + ++n_viewport) + { + float current_viewport_height = std::numeric_limits::max(); + float current_viewport_max_depth = std::numeric_limits::max(); + float current_viewport_min_depth = std::numeric_limits::max(); + float current_viewport_origin_x = std::numeric_limits::max(); + float current_viewport_origin_y = std::numeric_limits::max(); + float current_viewport_width = std::numeric_limits::max(); + VkViewport current_viewport_vk; + + in_gfx_pipeline_create_info_ptr->get_viewport_properties(n_viewport, + ¤t_viewport_origin_x, + ¤t_viewport_origin_y, + ¤t_viewport_width, + ¤t_viewport_height, + ¤t_viewport_min_depth, + ¤t_viewport_max_depth); + + current_viewport_vk.height = current_viewport_height; + current_viewport_vk.maxDepth = current_viewport_max_depth; + current_viewport_vk.minDepth = current_viewport_min_depth; + current_viewport_vk.x = current_viewport_origin_x; + current_viewport_vk.y = current_viewport_origin_y; + current_viewport_vk.width = current_viewport_width; + + viewports.push_back(current_viewport_vk); + } + } -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_depth_test(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable, - VkCompareOp in_compare_op) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + /* Bake the descriptor */ + { + Anvil::StructID root_struct_id; + Anvil::StructChainer viewport_state_create_info_chainer; - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + { + VkPipelineViewportStateCreateInfo viewport_state_create_info; - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + viewport_state_create_info.flags = 0; + viewport_state_create_info.pNext = nullptr; /* will be patched by the chainer below */ + viewport_state_create_info.pScissors = nullptr; /* will be patched by the chainer below */ + viewport_state_create_info.pViewports = nullptr; /* will be patched by the chainer below */ + viewport_state_create_info.scissorCount = n_scissor_boxes; + viewport_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + viewport_state_create_info.viewportCount = n_viewports; - pipeline_config_ptr->depth_test_enabled = in_should_enable; - pipeline_config_ptr->depth_test_compare_op = in_compare_op; - pipeline_iterator->second->dirty = true; + anvil_assert(viewport_state_create_info.scissorCount == viewport_state_create_info.viewportCount); + anvil_assert(viewport_state_create_info.scissorCount != 0); -end: - ; -} + root_struct_id = viewport_state_create_info_chainer.append_struct(viewport_state_create_info); + } -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_depth_writes(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + if (!in_is_dynamic_scissor_state_enabled) + { + viewport_state_create_info_chainer.store_helper_structure_vector(scissor_boxes, + root_struct_id, + offsetof(VkPipelineViewportStateCreateInfo, pScissors) ); + } - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + if (!in_is_dynamic_viewport_state_enabled) + { + viewport_state_create_info_chainer.store_helper_structure_vector(viewports, + root_struct_id, + offsetof(VkPipelineViewportStateCreateInfo, pViewports) ); + } - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; + result_ptr = viewport_state_create_info_chainer.create_chain(); + anvil_assert(result_ptr != nullptr); + } } - pipeline_config_ptr->depth_writes_enabled = in_should_enable; - pipeline_iterator->second->dirty = true; -end: - ; + return result_ptr; } /* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_dynamic_states(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable, - DynamicStateBitfield in_dynamic_state_bits) +bool Anvil::GraphicsPipelineManager::delete_pipeline(PipelineID in_pipeline_id) { - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + bool result; - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) + lock(); { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + result = BasePipelineManager::delete_pipeline(in_pipeline_id); - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + if (result) + { + auto baked_pipelines_iterator = m_baked_pipelines.find(in_pipeline_id); - if (in_should_enable) - { - pipeline_config_ptr->enabled_dynamic_states |= in_dynamic_state_bits; - } - else - { - pipeline_config_ptr->enabled_dynamic_states &= ~in_dynamic_state_bits; + if (baked_pipelines_iterator != m_baked_pipelines.end() ) + { + m_baked_pipelines.erase(baked_pipelines_iterator); + } + } } + unlock(); - pipeline_iterator->second->dirty = true; -end: - ; + return result; } -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_logic_op(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable, - VkLogicOp in_logic_op) +/** Converts the internal vertex attribute descriptors to one or more VkVertexInputAttributeDescription & VkVertexInputBindingDescription + * structures. + * + * The function goes an extra mile to re-use the same vertex binding for attributes whose binding properties match. + */ +void Anvil::GraphicsPipelineManager::GraphicsPipelineData::bake_vk_attributes_and_bindings() { - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->logic_op = in_logic_op; - pipeline_config_ptr->logic_op_enabled = in_should_enable; - pipeline_iterator->second->dirty = true; -end: - ; -} + uint32_t n_bindings = 0; -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_primitive_restart(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + anvil_assert(input_bindings.size () == 0); + anvil_assert(vk_input_attributes.size() == 0); + anvil_assert(vk_input_bindings.size () == 0); - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + pipeline_create_info_ptr->get_graphics_pipeline_properties(nullptr, /* out_opt_n_scissors_ptr */ + nullptr, /* out_opt_n_viewports_ptr */ + &n_bindings, /* out_opt_n_vertex_bindings_ptr */ + nullptr, /* out_opt_renderpass_ptr */ + nullptr); /* out_opt_subpass_id_ptr */ - goto end; - } - else + for (uint32_t n_binding = 0; + n_binding < n_bindings; + ++n_binding) { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->primitive_restart_enabled = in_should_enable; - pipeline_iterator->second->dirty = true; - -end: - ; -} + /* Identify the binding index we should use for the attribute. + * + * If an explicit binding has been specified by the application, this step can be skipped */ + const Anvil::VertexInputAttribute* current_binding_attributes_ptr = nullptr; + uint32_t current_binding_divisor = 0; + uint32_t current_binding_index = UINT32_MAX; + uint32_t current_binding_n_attributes = 0; + Anvil::VertexInputRate current_binding_rate = Anvil::VertexInputRate::UNKNOWN; + uint32_t current_binding_stride = UINT32_MAX; + + if (!pipeline_create_info_ptr->get_vertex_binding_properties(n_binding, + ¤t_binding_index, + ¤t_binding_stride, + ¤t_binding_rate, + ¤t_binding_n_attributes, + ¤t_binding_attributes_ptr, + ¤t_binding_divisor) ) + { + anvil_assert_fail(); -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_rasterizer_discard(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + continue; + } - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + /* Create a new VK binding descriptor for current binding */ + VertexInputBinding new_binding_anvil; + VkVertexInputBindingDescription new_binding_vk; - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + new_binding_vk.binding = current_binding_index; + new_binding_vk.inputRate = static_cast(current_binding_rate); + new_binding_vk.stride = current_binding_stride; - pipeline_config_ptr->rasterizer_discard_enabled = in_should_enable; - pipeline_iterator->second->dirty = true; + new_binding_anvil = VertexInputBinding(new_binding_vk, + current_binding_divisor); -end: - ; -} + input_bindings.push_back (new_binding_anvil); + vk_input_bindings.push_back(new_binding_vk); -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_sample_mask(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + /* Good to convert the internal attribute descriptor to the Vulkan's input attribute descriptor */ + for (uint32_t n_current_attribute = 0; + n_current_attribute < current_binding_n_attributes; + ++n_current_attribute) + { + const auto& current_attribute = current_binding_attributes_ptr[n_current_attribute]; + VkVertexInputAttributeDescription current_attribute_vk; - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); + current_attribute_vk.binding = current_binding_index; + current_attribute_vk.format = static_cast(current_attribute.format); + current_attribute_vk.location = current_attribute.location; + current_attribute_vk.offset = current_attribute.offset_in_bytes; - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; + /* Cache the descriptor */ + vk_input_attributes.push_back(current_attribute_vk); + } } - - pipeline_config_ptr->sample_mask_enabled = in_should_enable; - pipeline_iterator->second->dirty = true; -end: - ; } /* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_sample_shading(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable) +Anvil::GraphicsPipelineManagerUniquePtr Anvil::GraphicsPipelineManager::create(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + bool in_use_pipeline_cache, + Anvil::PipelineCache* in_pipeline_cache_to_reuse_ptr) { - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); + GraphicsPipelineManagerUniquePtr result_ptr(nullptr, + std::default_delete() ); - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } + result_ptr.reset( + new Anvil::GraphicsPipelineManager(in_device_ptr, + in_mt_safe, + in_use_pipeline_cache, + in_pipeline_cache_to_reuse_ptr) + ); - pipeline_config_ptr->sample_shading_enabled = in_should_enable; - pipeline_iterator->second->dirty = true; -end: - ; + return result_ptr; } - -/* Please see header for specification */ -void Anvil::GraphicsPipelineManager::toggle_stencil_test(GraphicsPipelineID in_graphics_pipeline_id, - bool in_should_enable) -{ - std::shared_ptr pipeline_config_ptr; - auto pipeline_config_iterator = m_pipeline_configurations.find(in_graphics_pipeline_id); - auto pipeline_iterator = m_pipelines.find (in_graphics_pipeline_id); - - if (pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end() ) - { - anvil_assert(!(pipeline_iterator == m_pipelines.end() || - pipeline_config_iterator == m_pipeline_configurations.end()) ); - - goto end; - } - else - { - pipeline_config_ptr = pipeline_config_iterator->second; - } - - pipeline_config_ptr->stencil_test_enabled = in_should_enable; - pipeline_iterator->second->dirty = true; - -end: - ; -} \ No newline at end of file diff --git a/src/wrappers/image.cpp b/src/wrappers/image.cpp index 52a813bd..6ca30391 100644 --- a/src/wrappers/image.cpp +++ b/src/wrappers/image.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,9 +20,14 @@ // THE SOFTWARE. // +#include "misc/buffer_create_info.h" #include "misc/debug.h" #include "misc/formats.h" +#include "misc/image_create_info.h" +#include "misc/memory_block_create_info.h" #include "misc/object_tracker.h" +#include "misc/struct_chainer.h" +#include "misc/swapchain_create_info.h" #include "wrappers/buffer.h" #include "wrappers/command_buffer.h" #include "wrappers/command_pool.h" @@ -39,270 +44,106 @@ #undef max #endif +#ifdef min + #undef min +#endif + /* Please see header for specification */ -Anvil::Image::Image(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkSharingMode in_sharing_mode, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - bool in_use_full_mipmap_chain, - Anvil::ImageCreateFlags in_create_flags, - Anvil::QueueFamilyBits in_queue_families, - VkImageLayout in_post_create_image_layout, - const std::vector* in_opt_mipmaps_ptr) - :CallbacksSupportProvider (IMAGE_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider (in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT), - m_alignment (0), - m_create_flags (in_create_flags), - m_depth (in_base_mipmap_depth), - m_device_ptr (in_device_ptr), - m_format (in_format), - m_has_transitioned_to_post_create_layout(false), - m_height (in_base_mipmap_height), - m_image (VK_NULL_HANDLE), - m_image_owner (true), - m_is_sparse (false), - m_is_swapchain_image (false), - m_memory_types (0), - m_n_mipmaps (0), - m_n_layers (in_n_layers), - m_n_slices (0), - m_post_create_layout (in_post_create_image_layout), - m_queue_families (in_queue_families), - m_residency_scope (Anvil::SPARSE_RESIDENCY_SCOPE_UNDEFINED), - m_sample_count (in_sample_count), - m_sharing_mode (in_sharing_mode), - m_storage_size (0), - m_tiling (in_tiling), - m_type (in_type), - m_usage (static_cast(in_usage)), - m_uses_full_mipmap_chain (in_use_full_mipmap_chain), - m_width (in_base_mipmap_width), - m_memory_owner (false) +Anvil::Image::Image(Anvil::ImageCreateInfoUniquePtr in_create_info_ptr) + :CallbacksSupportProvider (IMAGE_CALLBACK_ID_COUNT), + DebugMarkerSupportProvider (in_create_info_ptr->get_device(), + Anvil::ObjectType::IMAGE), + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )), + m_has_transitioned_to_post_alloc_layout(false), + m_image (VK_NULL_HANDLE), + m_n_mipmaps (0), + m_swapchain_memory_assigned (false) { - if (in_opt_mipmaps_ptr != nullptr) - { - m_mipmaps_to_upload = *in_opt_mipmaps_ptr; - } + m_create_info_ptr = std::move(in_create_info_ptr); /* Register this instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_IMAGE, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::IMAGE, this); } -/* Please see header for specification */ -Anvil::Image::Image(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkSharingMode in_sharing_mode, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - Anvil::QueueFamilyBits in_queue_families, - bool in_use_full_mipmap_chain, - Anvil::ImageCreateFlags in_create_flags, - VkImageLayout in_post_create_image_layout, - const std::vector* in_mipmaps_ptr) - :CallbacksSupportProvider (IMAGE_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider (in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT), - m_alignment (0), - m_create_flags (in_create_flags), - m_depth (in_base_mipmap_depth), - m_device_ptr (in_device_ptr), - m_format (in_format), - m_has_transitioned_to_post_create_layout(false), - m_height (in_base_mipmap_height), - m_image (VK_NULL_HANDLE), - m_image_owner (true), - m_is_sparse (false), - m_is_swapchain_image (false), - m_memory_owner (true), - m_memory_types (0), - m_n_layers (in_n_layers), - m_n_mipmaps (0), - m_n_slices (0), - m_post_create_layout (in_post_create_image_layout), - m_queue_families (in_queue_families), - m_residency_scope (Anvil::SPARSE_RESIDENCY_SCOPE_UNDEFINED), - m_sample_count (in_sample_count), - m_sharing_mode (in_sharing_mode), - m_storage_size (0), - m_tiling (in_tiling), - m_type (in_type), - m_usage (static_cast(in_usage)), - m_uses_full_mipmap_chain (in_use_full_mipmap_chain), - m_width (in_base_mipmap_width) +/** Releases the Vulkan image object, as well as the memory object associated with the Image instance. */ +Anvil::Image::~Image() { - if (in_mipmaps_ptr != nullptr) + if (m_image != VK_NULL_HANDLE && + m_create_info_ptr->get_internal_type() != Anvil::ImageInternalType::SWAPCHAIN_WRAPPER) { - m_mipmaps_to_upload = *in_mipmaps_ptr; - } - - /* Register this instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_IMAGE, - this); -} + lock(); + { + Anvil::Vulkan::vkDestroyImage(m_device_ptr->get_device_vk(), + m_image, + nullptr /* pAllocator */); + } + unlock(); -/* Please see header for specification */ -Anvil::Image::Image(std::weak_ptr in_device_ptr, - VkImage in_image, - VkFormat in_format, - VkImageTiling in_tiling, - VkSharingMode in_sharing_mode, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - uint32_t in_n_mipmaps, - VkSampleCountFlagBits in_sample_count, - uint32_t in_n_slices, - Anvil::ImageCreateFlags in_create_flags, - Anvil::QueueFamilyBits in_queue_families) - :CallbacksSupportProvider (IMAGE_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider (in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT), - m_alignment (0), - m_create_flags (in_create_flags), - m_depth (in_base_mipmap_depth), - m_device_ptr (in_device_ptr), - m_format (in_format), - m_has_transitioned_to_post_create_layout(true), - m_height (in_base_mipmap_height), - m_image (in_image), - m_image_owner (false), - m_is_sparse (false), - m_is_swapchain_image (false), - m_memory_owner (false), - m_memory_types (UINT32_MAX), - m_n_layers (in_n_layers), - m_n_mipmaps (in_n_mipmaps), - m_n_slices (in_n_slices), - m_post_create_layout (VK_IMAGE_LAYOUT_UNDEFINED), - m_queue_families (in_queue_families), - m_residency_scope (Anvil::SPARSE_RESIDENCY_SCOPE_UNDEFINED), - m_sample_count (in_sample_count), - m_sharing_mode (in_sharing_mode), - m_storage_size (UINT64_MAX), - m_tiling (in_tiling), - m_type (VK_IMAGE_TYPE_MAX_ENUM), - m_usage (static_cast(in_usage)), - m_uses_full_mipmap_chain (false), - m_width (in_base_mipmap_width) -{ - /* Register this instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_IMAGE, - this); -} + m_image = VK_NULL_HANDLE; + } -/** Please see header for specification */ -Anvil::Image::Image(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_sharing_mode, - bool in_use_full_mipmap_chain, - Anvil::ImageCreateFlags in_create_flags, - Anvil::SparseResidencyScope in_residency_scope) - :CallbacksSupportProvider (IMAGE_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider (in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT), - m_alignment (0), - m_create_flags (in_create_flags), - m_depth (in_base_mipmap_depth), - m_device_ptr (in_device_ptr), - m_format (in_format), - m_has_transitioned_to_post_create_layout(true), - m_height (in_base_mipmap_height), - m_image (VK_NULL_HANDLE), - m_image_owner (true), - m_is_sparse (true), - m_is_swapchain_image (false), - m_memory_owner (false), - m_memory_types (UINT32_MAX), - m_n_layers (in_n_layers), - m_n_mipmaps (0), - m_n_slices (0), - m_post_create_layout (VK_IMAGE_LAYOUT_UNDEFINED), - m_queue_families (in_queue_families), - m_residency_scope (in_residency_scope), - m_sample_count (in_sample_count), - m_sharing_mode (in_sharing_mode), - m_storage_size (UINT64_MAX), - m_tiling (in_tiling), - m_type (in_type), - m_usage (static_cast(in_usage)), - m_uses_full_mipmap_chain (in_use_full_mipmap_chain), - m_width (in_base_mipmap_width) -{ - /* Register this instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_IMAGE, - this); + /* Unregister the object */ + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::IMAGE, + this); } /** Please see header for specification */ -void Anvil::Image::change_image_layout(std::shared_ptr in_queue_ptr, - VkAccessFlags in_src_access_mask, - VkImageLayout in_src_layout, - VkAccessFlags in_dst_access_mask, - VkImageLayout in_dst_layout, - const VkImageSubresourceRange& in_subresource_range, - const uint32_t in_opt_n_wait_semaphores, - const VkPipelineStageFlags* in_opt_wait_dst_stage_mask_ptrs, - const std::shared_ptr* in_opt_wait_semaphore_ptrs, - const uint32_t in_opt_n_set_semaphores, - const std::shared_ptr* in_opt_set_semaphore_ptrs) +void Anvil::Image::change_image_layout(Anvil::Queue* in_queue_ptr, + Anvil::AccessFlags in_src_access_mask, + Anvil::ImageLayout in_src_layout, + Anvil::AccessFlags in_dst_access_mask, + Anvil::ImageLayout in_dst_layout, + const Anvil::ImageSubresourceRange& in_subresource_range, + const uint32_t in_opt_n_wait_semaphores, + const Anvil::PipelineStageFlags* in_opt_wait_dst_stage_mask_ptrs, + Anvil::Semaphore* const* in_opt_wait_semaphore_ptrs, + const uint32_t in_opt_n_set_semaphores, + Anvil::Semaphore* const* in_opt_set_semaphore_ptrs) { - std::shared_ptr device_locked_ptr (m_device_ptr); - Anvil::QueueFamilyType in_queue_family_type (Anvil::QUEUE_FAMILY_TYPE_UNDEFINED); - const uint32_t in_queue_family_index (in_queue_ptr->get_queue_family_index() ); - auto mem_block_ptr (get_memory_block() ); - std::shared_ptr transition_command_buffer_ptr; + const Anvil::DeviceType device_type (m_device_ptr->get_type() ); + const uint32_t in_queue_family_index (in_queue_ptr->get_queue_family_index() ); + auto mem_block_ptr (get_memory_block() ); + Anvil::PrimaryCommandBufferUniquePtr transition_command_buffer_ptr; /* mem_block_ptr is only used here in order to trigger memory allocator bakes if there are any pending. Other than that, * we are not going to be accessing it in this func. */ ANVIL_REDUNDANT_VARIABLE(mem_block_ptr); - in_queue_family_type = device_locked_ptr->get_queue_family_type(in_queue_ptr->get_queue_family_index() ); - transition_command_buffer_ptr = device_locked_ptr->get_command_pool (in_queue_family_type)->alloc_primary_level_command_buffer(); + transition_command_buffer_ptr = m_device_ptr->get_command_pool_for_queue_family_index(in_queue_ptr->get_queue_family_index())->alloc_primary_level_command_buffer(); transition_command_buffer_ptr->start_recording(true, /* one_time_submit */ false); /* simultaneous_use_allowed */ { - Anvil::ImageBarrier image_barrier(in_src_access_mask, - in_dst_access_mask, - true, /* in_by_region_barrier */ - in_src_layout, - in_dst_layout, - (m_sharing_mode == VK_SHARING_MODE_CONCURRENT) ? VK_QUEUE_FAMILY_IGNORED : in_queue_family_index, - (m_sharing_mode == VK_SHARING_MODE_CONCURRENT) ? VK_QUEUE_FAMILY_IGNORED : in_queue_family_index, - shared_from_this(), - in_subresource_range); - - transition_command_buffer_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, - VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, - VK_FALSE, /* in_by_region */ + const auto sharing_mode (m_create_info_ptr->get_sharing_mode() ); + const auto queue_fam_index((sharing_mode == Anvil::SharingMode::CONCURRENT) ? VK_QUEUE_FAMILY_IGNORED : in_queue_family_index); + + Anvil::ImageBarrier image_barrier (in_src_access_mask, + in_dst_access_mask, + in_src_layout, + in_dst_layout, + queue_fam_index, + queue_fam_index, + this, + in_subresource_range); + + PipelineStageFlags src_stage_mask = PipelineStageFlagBits::ALL_COMMANDS_BIT; + PipelineStageFlags dst_stage_mask = PipelineStageFlagBits::ALL_COMMANDS_BIT; + + if ((in_src_access_mask & AccessFlagBits::HOST_WRITE_BIT) != 0 || + (in_src_access_mask & AccessFlagBits::HOST_READ_BIT) != 0) + src_stage_mask |= PipelineStageFlagBits::HOST_BIT; + + if ((in_dst_access_mask & AccessFlagBits::HOST_WRITE_BIT) != 0 || + (in_dst_access_mask & AccessFlagBits::HOST_READ_BIT) != 0) + dst_stage_mask |= PipelineStageFlagBits::HOST_BIT; + + transition_command_buffer_ptr->record_pipeline_barrier(src_stage_mask, + dst_stage_mask, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barrier_ptrs */ 0, /* in_buffer_memory_barrier_count */ @@ -312,221 +153,86 @@ void Anvil::Image::change_image_layout(std::shared_ptr } transition_command_buffer_ptr->stop_recording(); - in_queue_ptr->submit_command_buffer_with_signal_wait_semaphores(transition_command_buffer_ptr, - in_opt_n_set_semaphores, - in_opt_set_semaphore_ptrs, - in_opt_n_wait_semaphores, - in_opt_wait_semaphore_ptrs, - in_opt_wait_dst_stage_mask_ptrs, - true /* should_block */); -} - -/** Please see header for specification */ -std::shared_ptr Anvil::Image::create_nonsparse(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_sharing_mode, - bool in_use_full_mipmap_chain, - ImageCreateFlags in_create_flags, - VkImageLayout in_post_create_image_layout, - const std::vector* in_opt_mipmaps_ptr) -{ - std::shared_ptr result_ptr( - new Image(in_device_ptr, - in_type, - in_format, - in_tiling, - in_sharing_mode, - in_usage, - in_base_mipmap_width, - in_base_mipmap_height, - in_base_mipmap_depth, - in_n_layers, - in_sample_count, - in_use_full_mipmap_chain, - in_create_flags, - in_queue_families, - in_post_create_image_layout, - in_opt_mipmaps_ptr) - ); - - result_ptr->init(in_use_full_mipmap_chain, - 0, /* in_memory_features */ - nullptr); /* start_image_layout_ptr - irrelevant */ + if (device_type == Anvil::DeviceType::SINGLE_GPU) + { + Anvil::CommandBufferBase* cmd_buffer_ptr = transition_command_buffer_ptr.get(); + + in_queue_ptr->submit( + Anvil::SubmitInfo::create(1, /* in_n_cmd_bufers */ + &cmd_buffer_ptr, + in_opt_n_set_semaphores, + in_opt_set_semaphore_ptrs, + in_opt_n_wait_semaphores, + in_opt_wait_semaphore_ptrs, + in_opt_wait_dst_stage_mask_ptrs, + true /* should_block */) + ); + } + else + { + Anvil::CommandBufferMGPUSubmission cmd_buffer_submission; + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr) ); - return result_ptr; -} + cmd_buffer_submission.cmd_buffer_ptr = transition_command_buffer_ptr.get (); + cmd_buffer_submission.device_mask = (1 << mgpu_device_ptr->get_n_physical_devices()) - 1; -/** Please see header for specification */ -std::shared_ptr Anvil::Image::create_nonsparse(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_sharing_mode, - bool in_use_full_mipmap_chain, - MemoryFeatureFlags in_memory_features, - ImageCreateFlags in_create_flags, - VkImageLayout in_post_create_image_layout, - const std::vector* in_mipmaps_ptr) -{ - const VkImageLayout start_image_layout = (in_mipmaps_ptr != nullptr && in_mipmaps_ptr->size() > 0) ? VK_IMAGE_LAYOUT_PREINITIALIZED - : VK_IMAGE_LAYOUT_UNDEFINED; - - std::shared_ptr new_image_ptr( - new Anvil::Image(in_device_ptr, - in_type, - in_format, - in_tiling, - in_sharing_mode, - in_usage, - in_base_mipmap_width, - in_base_mipmap_height, - in_base_mipmap_depth, - in_n_layers, - in_sample_count, - in_queue_families, - in_use_full_mipmap_chain, - in_create_flags, - in_post_create_image_layout, - in_mipmaps_ptr) - ); - - new_image_ptr->init(in_use_full_mipmap_chain, - in_memory_features, - &start_image_layout); - - return new_image_ptr; -} + /* TODO */ + anvil_assert(in_opt_n_set_semaphores == 0); + anvil_assert(in_opt_n_wait_semaphores == 0); -/** Please see header for specification */ -std::shared_ptr Anvil::Image::create_nonsparse(std::weak_ptr in_device_ptr, - const VkSwapchainCreateInfoKHR& in_swapchain_create_info, - VkImage in_image) -{ - std::shared_ptr device_locked_ptr(in_device_ptr); - - std::shared_ptr new_image_ptr( - new Anvil::Image(in_device_ptr, - in_image, - in_swapchain_create_info.imageFormat, - VK_IMAGE_TILING_OPTIMAL, - in_swapchain_create_info.imageSharingMode, - in_swapchain_create_info.imageUsage, - in_swapchain_create_info.imageExtent.width, - in_swapchain_create_info.imageExtent.height, - 1, /* base_mipmap_depth */ - in_swapchain_create_info.imageArrayLayers, - 1, /* n_mipmaps */ - VK_SAMPLE_COUNT_1_BIT, - 1, /* n_slices */ - 0, /* in_create_flags */ - Anvil::QUEUE_FAMILY_TYPE_UNDEFINED) - ); - - new_image_ptr->m_memory_types = 0; - new_image_ptr->m_storage_size = 0; - new_image_ptr->m_type = VK_IMAGE_TYPE_2D; - new_image_ptr->m_is_swapchain_image = true; - - new_image_ptr->init_mipmap_props(); - - return new_image_ptr; + in_queue_ptr->submit( + Anvil::SubmitInfo::create_execute(&cmd_buffer_submission, + 1, /* in_n_command_buffer_submissions */ + true /* should_block */) + ); + } } /** Please see header for specification */ -std::shared_ptr Anvil::Image::create_sparse(std::weak_ptr in_device_ptr, - VkImageType in_type, - VkFormat in_format, - VkImageTiling in_tiling, - VkImageUsageFlags in_usage, - uint32_t in_base_mipmap_width, - uint32_t in_base_mipmap_height, - uint32_t in_base_mipmap_depth, - uint32_t in_n_layers, - VkSampleCountFlagBits in_sample_count, - Anvil::QueueFamilyBits in_queue_families, - VkSharingMode in_sharing_mode, - bool in_use_full_mipmap_chain, - ImageCreateFlags in_create_flags, - Anvil::SparseResidencyScope in_residency_scope, - VkImageLayout in_initial_layout) +Anvil::ImageUniquePtr Anvil::Image::create(Anvil::ImageCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr device_locked_ptr(in_device_ptr); - const VkPhysicalDeviceFeatures& features (device_locked_ptr->get_physical_device_features() ); - std::shared_ptr result_ptr; - - anvil_assert(in_initial_layout == VK_IMAGE_LAYOUT_PREINITIALIZED || - in_initial_layout == VK_IMAGE_LAYOUT_UNDEFINED); + ImageUniquePtr result_ptr(new Image(std::move(in_create_info_ptr) ), + std::default_delete() ); - /* Make sure requested functionality is supported by the device */ - if (!features.sparseBinding) + if (result_ptr != nullptr) { - anvil_assert(features.sparseBinding); + const auto image_type = result_ptr->m_create_info_ptr->get_internal_type(); - goto end; - } - - if (in_residency_scope != Anvil::SPARSE_RESIDENCY_SCOPE_NONE) - { - /* For sparse residency, we need to verify the device can actually support the requested configuration */ - std::vector result_properties; - - device_locked_ptr->get_physical_device_sparse_image_format_properties(in_format, - in_type, - in_sample_count, - in_usage, - in_tiling, - result_properties); - - if (result_properties.size() == 0) - { - goto end; - } - - switch (in_type) + switch (image_type) { - case VK_IMAGE_TYPE_1D: - { - /* Not supported in Vulkan 1.0 */ - anvil_assert_fail(); - - goto end; - } - - case VK_IMAGE_TYPE_2D: + case Anvil::ImageInternalType::ALLOC: + case Anvil::ImageInternalType::NO_ALLOC: + case Anvil::ImageInternalType::PEER_NO_ALLOC: { - if (!features.sparseResidencyImage2D) + if (!result_ptr->init() ) { - anvil_assert_fail(); - - goto end; + result_ptr.reset(); } break; } - case VK_IMAGE_TYPE_3D: + case Anvil::ImageInternalType::SWAPCHAIN_WRAPPER: { - if (!features.sparseResidencyImage3D) - { - anvil_assert_fail(); + result_ptr->m_image = result_ptr->m_create_info_ptr->get_swapchain_image(); + result_ptr->m_n_mipmaps = 1; + + /* NOTE: Swapchain never uses YUV image format. */ + auto& plane_props = result_ptr->m_plane_index_to_memory_properties_map[0]; + + plane_props.alignment = 0; + plane_props.memory_types = 0; + plane_props.prefers_dedicated_allocation = false; + plane_props.requires_dedicated_allocation = false; + plane_props.storage_size = 0; - goto end; + anvil_assert(result_ptr->m_image != VK_NULL_HANDLE); + + result_ptr->init_mipmap_props(); + + if ((result_ptr->m_create_info_ptr->get_swapchain()->get_create_info_ptr()->get_flags() & Anvil::SwapchainCreateFlagBits::SPLIT_INSTANCE_BIND_REGIONS_BIT) != 0) + { + result_ptr->init_sfr_tile_size(); } break; @@ -536,114 +242,163 @@ std::shared_ptr Anvil::Image::create_sparse(std::weak_ptr physical_device_indices = result_ptr->m_create_info_ptr->get_device_indices(); + const auto& sfr_rects = result_ptr->m_create_info_ptr->get_sfr_rects(); + const auto n_sfr_rects = static_cast(sfr_rects.size() ); + + result_ptr->set_swapchain_memory_internal(result_ptr->m_create_info_ptr->get_swapchain_image_index(), + n_sfr_rects, + (n_sfr_rects > 0) ? &sfr_rects.at(0) : nullptr, + static_cast(physical_device_indices.size() ), + (physical_device_indices.size() > 0) ? &physical_device_indices.at(0) : nullptr); + } + } - case VK_SAMPLE_COUNT_2_BIT: - { - if (!features.sparseResidency2Samples) - { - anvil_assert(features.sparseResidency2Samples); + return result_ptr; +} - goto end; - } +/** TODO */ +bool Anvil::Image::do_sanity_checks_for_physical_device_binding(const Anvil::MemoryBlock* in_memory_block_ptr, + uint32_t in_n_physical_devices) const +{ + const uint32_t memory_block_type_index(in_memory_block_ptr->get_create_info_ptr()->get_memory_type_index() ); + const Anvil::MGPUDevice* mgpu_device_ptr (dynamic_cast(m_device_ptr) ); + bool result (false); - break; - } + if (!m_device_ptr->is_extension_enabled(VK_KHR_DEVICE_GROUP_EXTENSION_NAME) ) + { + anvil_assert(m_device_ptr->is_extension_enabled(VK_KHR_DEVICE_GROUP_EXTENSION_NAME) ); - case VK_SAMPLE_COUNT_4_BIT: - { - if (!features.sparseResidency4Samples) - { - anvil_assert(features.sparseResidency4Samples); + goto end; + } - goto end; - } + if (in_n_physical_devices != mgpu_device_ptr->get_n_physical_devices() ) + { + anvil_assert(in_n_physical_devices == mgpu_device_ptr->get_n_physical_devices()); - break; - } + goto end; + } - case VK_SAMPLE_COUNT_8_BIT: - { - if (!features.sparseResidency8Samples) - { - anvil_assert(features.sparseResidency8Samples); + if ((m_device_ptr->get_physical_device_memory_properties().types[memory_block_type_index].heap_ptr->flags & Anvil::MemoryHeapFlagBits::MULTI_INSTANCE_BIT_KHR) == 0) + { + anvil_assert((m_device_ptr->get_physical_device_memory_properties().types[memory_block_type_index].heap_ptr->flags & Anvil::MemoryHeapFlagBits::MULTI_INSTANCE_BIT_KHR) != 0); - goto end; - } + goto end; + } - break; - } + result = true; - case VK_SAMPLE_COUNT_16_BIT: - { - if (!features.sparseResidency4Samples) - { - anvil_assert(features.sparseResidency16Samples); +end: + return result; +} - goto end; - } +/** TODO */ +bool Anvil::Image::do_sanity_checks_for_sfr_binding(uint32_t in_n_SFR_rects, + const VkRect2D* in_SFRs_ptr) const +{ + const Anvil::DeviceType device_type (m_device_ptr->get_type() ); + bool result (false); + const Anvil::SparseImageFormatProperties* sparse_image_format_props_ptr(nullptr); + std::vector sparse_image_format_props_vec; - break; - } + anvil_assert( m_create_info_ptr->get_internal_type() == Anvil::ImageInternalType::NO_ALLOC && + (m_create_info_ptr->get_create_flags () & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) == 0); + anvil_assert(m_mipmap_props.size () > 0); + anvil_assert(m_memory_blocks_owned.size () == 0); - default: - { - anvil_assert_fail(); + if (!m_device_ptr->is_extension_enabled(VK_KHR_DEVICE_GROUP_EXTENSION_NAME) ) + { + anvil_assert(m_device_ptr->is_extension_enabled(VK_KHR_DEVICE_GROUP_EXTENSION_NAME) ); - goto end; - } - } + goto end; + } + + if (device_type != Anvil::DeviceType::MULTI_GPU) + { + anvil_assert(device_type == Anvil::DeviceType::MULTI_GPU); + + goto end; + } + + if (!m_device_ptr->get_physical_device_sparse_image_format_properties(m_create_info_ptr->get_format (), + m_create_info_ptr->get_type (), + m_create_info_ptr->get_sample_count(), + m_create_info_ptr->get_usage_flags (), + m_create_info_ptr->get_tiling (), + sparse_image_format_props_vec) ) + { + anvil_assert_fail(); + + goto end; + } + + if (sparse_image_format_props_vec.size() != 1) + { + anvil_assert(sparse_image_format_props_vec.size() == 1); + + goto end; } - /* Spawn a new Image instance and initialize it */ - result_ptr.reset( - new Anvil::Image(in_device_ptr, - in_type, - in_format, - in_tiling, - in_usage, - in_base_mipmap_width, - in_base_mipmap_height, - in_base_mipmap_depth, - in_n_layers, - in_sample_count, - in_queue_families, - in_sharing_mode, - in_use_full_mipmap_chain, - in_create_flags, - in_residency_scope) - ); - - result_ptr->init(in_use_full_mipmap_chain, - 0, /* in_memory_features */ - &in_initial_layout); /* start_image_layout - irrelevant */ + if (!m_device_ptr->get_physical_device_sparse_image_format_properties(m_create_info_ptr->get_format (), + m_create_info_ptr->get_type (), + m_create_info_ptr->get_sample_count(), + m_create_info_ptr->get_usage_flags (), + m_create_info_ptr->get_tiling (), + sparse_image_format_props_vec) ) + { + anvil_assert_fail(); + goto end; + } + + sparse_image_format_props_ptr = &sparse_image_format_props_vec.at(0); + + for (uint32_t n_rect = 0; + n_rect < in_n_SFR_rects; + ++n_rect) + { + const auto& current_rect = in_SFRs_ptr[n_rect]; + + if ((current_rect.offset.x % sparse_image_format_props_ptr->image_granularity.width) != 0 || + (current_rect.offset.y % sparse_image_format_props_ptr->image_granularity.height) != 0) + { + anvil_assert_fail(); + + goto end; + } + + if (((current_rect.offset.x + current_rect.extent.width) % sparse_image_format_props_ptr->image_granularity.width) != 0 && + ((current_rect.offset.x + current_rect.extent.height) % sparse_image_format_props_ptr->image_granularity.height) != 0) + { + anvil_assert_fail(); + + goto end; + } + } + + result = true; end: - return result_ptr; + return result; } /** Please see header for specification */ -bool Anvil::Image::get_aspect_subresource_layout(VkImageAspectFlags in_aspect, - uint32_t in_n_layer, - uint32_t in_n_mip, - VkSubresourceLayout* out_subresource_layout_ptr) const +bool Anvil::Image::get_aspect_subresource_layout(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_mip, + Anvil::SubresourceLayout* out_subresource_layout_ptr) const { - auto aspect_iterator = m_aspects.find(static_cast(in_aspect) ); + auto aspect_iterator = m_linear_image_aspect_data.find(in_aspect); bool result = false; - anvil_assert(m_tiling == VK_IMAGE_TILING_LINEAR); + anvil_assert(m_create_info_ptr->get_tiling() == Anvil::ImageTiling::LINEAR); - if (aspect_iterator != m_aspects.end() ) + if (aspect_iterator != m_linear_image_aspect_data.end() ) { auto layer_mip_iterator = aspect_iterator->second.find(LayerMipKey(in_n_layer, in_n_mip) ); @@ -658,13 +413,17 @@ bool Anvil::Image::get_aspect_subresource_layout(VkImageAspectFlags in_aspect, } /** Please see header for specification */ -std::shared_ptr Anvil::Image::get_memory_block() +Anvil::MemoryBlock* Anvil::Image::get_memory_block(const uint32_t& in_n_plane) { - bool is_callback_needed = false; + if(in_n_plane == 0 && m_memory_block_external) + return m_memory_block_external; + bool is_callback_needed = false; + const auto is_prt = (m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0; + const auto uses_sparse_bindings = (m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) != 0; - if (m_is_sparse) + if (uses_sparse_bindings) { - IsImageMemoryAllocPendingQueryCallbackArgument callback_arg(shared_from_this() ); + IsImageMemoryAllocPendingQueryCallbackArgument callback_arg(this); callback(IMAGE_CALLBACK_ID_IS_ALLOC_PENDING, &callback_arg); @@ -672,350 +431,570 @@ std::shared_ptr Anvil::Image::get_memory_block() is_callback_needed = callback_arg.result; } else - if (m_memory_block_ptr == nullptr) + if (m_memory_blocks_owned.size() == 0) { is_callback_needed = true; } if (is_callback_needed) { - OnMemoryBlockNeededForImageCallbackArgument callback_argument(shared_from_this() ); + OnMemoryBlockNeededForImageCallbackArgument callback_argument(this); callback_safe(IMAGE_CALLBACK_ID_MEMORY_BLOCK_NEEDED, &callback_argument); } - return m_memory_block_ptr; + if (uses_sparse_bindings) + { + if (!is_prt) + { + const auto& plane_memory_reqs = m_plane_index_to_memory_properties_map.at(in_n_plane); + + anvil_assert(plane_memory_reqs.page_tracker_ptr != nullptr); + + return plane_memory_reqs.page_tracker_ptr->get_memory_block(0); + } + else + { + /* More than just one memory block may exist. You need to use page tracker manually. */ + return nullptr; + } + } + else + { + /* TODO: Refactoring needed for better support of disjoint YUV images .. */ + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::CREATE_DISJOINT_BIT) == 0); + + return (m_memory_blocks_owned.size() > 0) ? m_memory_blocks_owned.at(0).get() + : nullptr; + } } /** Private function which initializes the Image instance. * * For argument discussion, please see documentation of the constructors. **/ -void Anvil::Image::init(bool in_use_full_mipmap_chain, - Anvil::MemoryFeatureFlags in_memory_features, - const VkImageLayout* in_start_image_layout_ptr) +#include +bool Anvil::Image::init() { - std::vector aspects_used; - std::shared_ptr device_locked_ptr(m_device_ptr); - VkImageFormatProperties image_format_props; - uint32_t max_dimension; - uint32_t n_queue_family_indices = 0; - uint32_t queue_family_indices[3]; - VkResult result (VK_ERROR_INITIALIZATION_FAILED); - bool result_bool(false); - - ANVIL_REDUNDANT_VARIABLE(result); - ANVIL_REDUNDANT_VARIABLE(result_bool); - - if (m_memory_owner && - (in_memory_features & MEMORY_FEATURE_FLAG_MAPPABLE) == 0) - { - anvil_assert((in_memory_features & MEMORY_FEATURE_FLAG_HOST_COHERENT) == 0); - } + VkResult result = VK_ERROR_INITIALIZATION_FAILED; + bool result_bool = true; - /* Determine the maximum dimension size. */ - max_dimension = m_width; - - if (max_dimension < m_height) + /* Sanity checks */ { - max_dimension = m_height; - } + const auto memory_features = m_create_info_ptr->get_memory_features(); - if (max_dimension < m_depth) - { - max_dimension = m_depth; - } + if ((memory_features & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) == 0) + { + anvil_assert((memory_features & Anvil::MemoryFeatureFlagBits::HOST_COHERENT_BIT) == 0); + } - /* Form the queue family array */ - Anvil::Utils::convert_queue_family_bits_to_family_indices(m_device_ptr, - m_queue_families, - queue_family_indices, - &n_queue_family_indices); + if ( m_create_info_ptr->get_internal_type() == Anvil::ImageInternalType::SWAPCHAIN_WRAPPER && + (m_create_info_ptr->get_swapchain()->get_create_info_ptr()->get_flags() & Anvil::SwapchainCreateFlagBits::SPLIT_INSTANCE_BIND_REGIONS_BIT) != 0) + { + anvil_assert(!m_create_info_ptr->uses_full_mipmap_chain() ); + anvil_assert(m_create_info_ptr->get_n_layers () == 1); + anvil_assert(m_create_info_ptr->get_type () == Anvil::ImageType::_2D); + anvil_assert(m_create_info_ptr->get_tiling () == Anvil::ImageTiling::OPTIMAL); + } - /* Is the requested texture size valid? */ - result_bool = device_locked_ptr->get_physical_device_image_format_properties(m_format, - m_type, - m_tiling, - m_usage, - 0, /* flags */ - image_format_props); - anvil_assert(result_bool); + if ( (m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::CUBE_COMPATIBLE_BIT) != 0) + { + anvil_assert(m_create_info_ptr->get_type () == Anvil::ImageType::_2D); + anvil_assert((m_create_info_ptr->get_n_layers() % 6) == 0); + } - anvil_assert(m_width >= 1 && - m_height >= 1 && - m_depth >= 1); + if ( (m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::_2D_ARRAY_COMPATIBLE_BIT) != 0) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_maintenance1() ); + anvil_assert(m_create_info_ptr->get_type () == Anvil::ImageType::_3D); + } - anvil_assert(m_width <= (uint32_t) image_format_props.maxExtent.width); + if ( (m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPLIT_INSTANCE_BIND_REGIONS_BIT) != 0) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_bind_memory2() ); + } - if (m_height > 1) - { - anvil_assert(m_height <= (uint32_t) image_format_props.maxExtent.height); + if ( (m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::ALIAS_BIT) != 0) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_bind_memory2() ); + } } - if (m_depth > 1) + /* If application intends to fill the image with data at bring-up time, make sure the image supports transfer_dst usage. */ + if (m_create_info_ptr->get_mipmaps_to_upload().size() > 0) { - anvil_assert(m_depth <= (uint32_t) image_format_props.maxExtent.depth); + m_create_info_ptr->set_usage_flags(m_create_info_ptr->get_usage_flags() | Anvil::ImageUsageFlagBits::TRANSFER_DST_BIT); } - anvil_assert(m_n_layers >= 1); - - if (m_n_layers > 1) + /* Cache the number of mips we want the image to use. */ { - anvil_assert(m_n_layers <= (uint32_t) image_format_props.maxArrayLayers); - } + const auto max_dimension = std::max(std::max(m_create_info_ptr->get_base_mip_depth(), + m_create_info_ptr->get_base_mip_height() ), + m_create_info_ptr->get_base_mip_width() ); - /* If multisample image is requested, make sure the number of samples is supported. */ - anvil_assert(m_sample_count >= 1); - - if (m_sample_count > 1) - { - anvil_assert((image_format_props.sampleCounts & m_sample_count) != 0); + m_n_mipmaps = static_cast((m_create_info_ptr->uses_full_mipmap_chain() ) ? (1 + log2(max_dimension)) + : 1); } /* Create the image object */ - VkImageCreateInfo image_create_info; - VkImageCreateFlags image_flags = 0; - - if ( (m_create_flags & Anvil::IMAGE_CREATE_FLAG_CUBE_COMPATIBLE_BIT) != 0) { - anvil_assert(m_type == VK_IMAGE_TYPE_2D); - anvil_assert((m_n_layers % 6) == 0); + Anvil::StructChainer struct_chainer; - image_flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT; - } + { + VkImageCreateInfo image_create_info; + uint32_t n_queue_family_indices = 0; + uint32_t queue_family_indices[3]; + + Anvil::Utils::convert_queue_family_bits_to_family_indices(m_device_ptr, + m_create_info_ptr->get_queue_families(), + queue_family_indices, + &n_queue_family_indices); + + image_create_info.arrayLayers = m_create_info_ptr->get_n_layers (); + image_create_info.extent.depth = m_create_info_ptr->get_base_mip_depth (); + image_create_info.extent.height = m_create_info_ptr->get_base_mip_height(); + image_create_info.extent.width = m_create_info_ptr->get_base_mip_width (); + image_create_info.flags = m_create_info_ptr->get_create_flags ().get_vk(); + image_create_info.format = static_cast (m_create_info_ptr->get_format () ); + image_create_info.imageType = static_cast (m_create_info_ptr->get_type () ); + image_create_info.initialLayout = static_cast(m_create_info_ptr->get_post_create_image_layout() ); + image_create_info.mipLevels = m_n_mipmaps; + image_create_info.pNext = nullptr; + image_create_info.pQueueFamilyIndices = queue_family_indices; + image_create_info.queueFamilyIndexCount = n_queue_family_indices; + image_create_info.samples = static_cast(m_create_info_ptr->get_sample_count() ); + image_create_info.sharingMode = static_cast (m_create_info_ptr->get_sharing_mode() ); + image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_create_info.tiling = static_cast(m_create_info_ptr->get_tiling() ); + image_create_info.usage = m_create_info_ptr->get_usage_flags().get_vk(); + + if (m_create_info_ptr->get_external_memory_handle_types() != 0) + { + anvil_assert(static_cast(image_create_info.initialLayout) == Anvil::ImageLayout::UNDEFINED); + } - if ( (m_create_flags & Anvil::IMAGE_CREATE_FLAG_MUTABLE_FORMAT_BIT) != 0) - { - image_flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT; - } + struct_chainer.append_struct(image_create_info); + } - if ( (m_create_flags & Anvil::IMAGE_CREATE_FLAG_2D_ARRAY_COMPATIBLE_BIT) != 0) - { - anvil_assert(device_locked_ptr->is_khr_maintenance1_extension_enabled() ); - anvil_assert(m_type == VK_IMAGE_TYPE_3D); + if (m_create_info_ptr->get_external_memory_handle_types() != 0) + { + VkExternalMemoryImageCreateInfoKHR external_memory_image_create_info; + const auto handle_types = m_create_info_ptr->get_external_memory_handle_types(); - image_flags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR; - } + external_memory_image_create_info.handleTypes = handle_types.get_vk(); + external_memory_image_create_info.pNext = nullptr; + external_memory_image_create_info.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR; + + struct_chainer.append_struct(external_memory_image_create_info); + } - if (m_is_sparse) - { - /* Convert residency scope to Vulkan image create flags */ - switch (m_residency_scope) { - case Anvil::SPARSE_RESIDENCY_SCOPE_ALIASED: image_flags |= VK_IMAGE_CREATE_SPARSE_ALIASED_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_IMAGE_CREATE_SPARSE_BINDING_BIT; break; - case Anvil::SPARSE_RESIDENCY_SCOPE_NONALIASED: image_flags |= VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_IMAGE_CREATE_SPARSE_BINDING_BIT; break; - case Anvil::SPARSE_RESIDENCY_SCOPE_NONE: image_flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT; break; + const Anvil::Format* compatible_image_view_formats = nullptr; + uint32_t n_compatible_image_view_formats = 0; - default: + m_create_info_ptr->get_image_view_formats(&n_compatible_image_view_formats, + &compatible_image_view_formats); + + if (n_compatible_image_view_formats > 0) { - anvil_assert_fail(); + VkImageFormatListCreateInfoKHR image_format_list_create_info; + + anvil_assert((m_create_info_ptr->get_create_flags () & Anvil::ImageCreateFlagBits::MUTABLE_FORMAT_BIT) != 0); + anvil_assert(m_device_ptr->get_extension_info()->khr_image_format_list() ); + + image_format_list_create_info.pNext = nullptr; + image_format_list_create_info.pViewFormats = reinterpret_cast(compatible_image_view_formats); + image_format_list_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR; + image_format_list_create_info.viewFormatCount = n_compatible_image_view_formats; + + struct_chainer.append_struct(image_format_list_create_info); } } - } - image_create_info.arrayLayers = m_n_layers; - image_create_info.extent.depth = m_depth; - image_create_info.extent.height = m_height; - image_create_info.extent.width = m_width; - image_create_info.flags = image_flags; - image_create_info.format = m_format; - image_create_info.imageType = m_type; - image_create_info.initialLayout = (in_start_image_layout_ptr != nullptr) ? *in_start_image_layout_ptr - : (m_mipmaps_to_upload.size() > 0) ? VK_IMAGE_LAYOUT_PREINITIALIZED - : VK_IMAGE_LAYOUT_UNDEFINED; - image_create_info.mipLevels = static_cast((in_use_full_mipmap_chain) ? (1 + log2(max_dimension)) : 1); - image_create_info.pNext = nullptr; - image_create_info.pQueueFamilyIndices = queue_family_indices; - image_create_info.queueFamilyIndexCount = n_queue_family_indices; - image_create_info.samples = static_cast(m_sample_count); - image_create_info.sharingMode = m_sharing_mode; - image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; - image_create_info.tiling = m_tiling; - image_create_info.usage = m_usage; - - result = vkCreateImage(device_locked_ptr->get_device_vk(), - &image_create_info, - nullptr, /* pAllocator */ - &m_image); + if (m_create_info_ptr->get_stencil_image_aspect_usage() != Anvil::ImageUsageFlagBits::NONE) + { + VkImageStencilUsageCreateInfoEXT create_info; - anvil_assert_vk_call_succeeded(result); - if (is_vk_call_successful(result) ) - { - set_vk_handle(m_image); - } + anvil_assert(m_device_ptr->get_extension_info()->ext_separate_stencil_usage() ); - m_n_mipmaps = image_create_info.mipLevels; - m_n_slices = (m_type == VK_IMAGE_TYPE_3D) ? m_depth : 1; + create_info.pNext = nullptr; + create_info.stencilUsage = m_create_info_ptr->get_stencil_image_aspect_usage().get_vk(); + create_info.sType = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT; - /* Extract various image properties we're going to need later */ - vkGetImageMemoryRequirements(device_locked_ptr->get_device_vk(), - m_image, - &m_memory_reqs); + struct_chainer.append_struct(create_info); + } - m_alignment = m_memory_reqs.alignment; - m_memory_types = m_memory_reqs.memoryTypeBits; - m_storage_size = m_memory_reqs.size; + { + auto struct_chain_ptr = struct_chainer.create_chain(); - /* Cache aspect subresource properties if we're dealing with a linear image */ - if (m_tiling == VK_IMAGE_TILING_LINEAR) - { - Anvil::Formats::get_format_aspects(m_format, - &aspects_used); + result = Anvil::Vulkan::vkCreateImage(m_device_ptr->get_device_vk (), + struct_chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_image); + } - for (const auto& current_aspect : aspects_used) + if (!is_vk_call_successful(result) || !m_image) { - VkImageSubresource subresource; + anvil_assert_vk_call_succeeded(result); + + result_bool = false; + goto end; + } + } - subresource.aspectMask = current_aspect; + /* Cache the handle .. */ + set_vk_handle(m_image); - for (uint32_t n_layer = 0; - n_layer < m_n_layers; - ++n_layer) - { - subresource.arrayLayer = n_layer; + /* Prepare metadata for the generated image. */ + { + std::vector image_aspects; + + const bool is_disjoint_yuv_image = (m_create_info_ptr->get_create_flags () & Anvil::ImageCreateFlagBits::CREATE_DISJOINT_BIT) != 0; + const bool is_linear_image = (m_create_info_ptr->get_tiling () == Anvil::ImageTiling::LINEAR); + const bool is_prt_image = (m_create_info_ptr->get_create_flags () & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0; + const bool is_sparse_binding_image = (m_create_info_ptr->get_create_flags () & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) != 0; + const bool is_swapchain_wrapper_image = (m_create_info_ptr->get_internal_type() == Anvil::ImageInternalType::SWAPCHAIN_WRAPPER); + const bool uses_sfr = (m_create_info_ptr->get_create_flags () & Anvil::ImageCreateFlagBits::SPLIT_INSTANCE_BIND_REGIONS_BIT) != 0; + const bool is_yuv_format = Anvil::Formats::is_format_yuv_khr (m_create_info_ptr->get_format() ); + const uint32_t n_format_planes = Anvil::Formats::get_format_n_planes(m_create_info_ptr->get_format() ); - for (uint32_t n_mip = 0; - n_mip < m_n_mipmaps; - ++n_mip) + ANVIL_REDUNDANT_VARIABLE_CONST(is_yuv_format); + + Anvil::Formats::get_format_aspects(m_create_info_ptr->get_format(), + &image_aspects); + + if (!is_swapchain_wrapper_image) + { + /* Extract various image properties we're going to need later. + * + * Prefer facilities exposed by VK_KHR_get_memory_requirements2 unless unsupported. */ + if (m_device_ptr->get_extension_info()->khr_get_memory_requirements2() ) + { + const auto gmr2_entrypoints = m_device_ptr->get_extension_khr_get_memory_requirements2_entrypoints(); + Anvil::StructChainer input_struct_chainer; + Anvil::StructChainUniquePtr input_struct_chain_ptr; + const bool khr_dedicated_allocation_available = m_device_ptr->get_extension_info()->khr_dedicated_allocation(); + Anvil::StructID memory_dedicated_reqs_struct_id; + Anvil::StructChainer result_struct_chainer; + Anvil::StructChainUniquePtr result_struct_chain_ptr; + + /* NOTE: For disjoint images, we need to cache mem reqs for individual planes. This requires use of VkImagePlaneMemoryRequirementsInfoKHR struct. */ + const uint32_t n_planes_to_query = (is_disjoint_yuv_image) ? n_format_planes + : 1; + + for (uint32_t n_current_plane = 0; + n_current_plane < n_planes_to_query; + ++n_current_plane) { - VkSubresourceLayout subresource_layout; + auto& current_plane_properties = m_plane_index_to_memory_properties_map[n_current_plane]; + + { + VkImageMemoryRequirementsInfo2KHR info; - subresource.mipLevel = n_mip; + info.image = m_image; + info.pNext = nullptr; + info.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR; - vkGetImageSubresourceLayout(device_locked_ptr->get_device_vk(), - m_image, - &subresource, - &subresource_layout); + input_struct_chainer.append_struct(info); + } - m_aspects[static_cast(current_aspect)][LayerMipKey(n_layer, n_mip)] = subresource_layout; - } - } - } - } + if (is_disjoint_yuv_image) + { + VkImagePlaneMemoryRequirementsInfoKHR plane_mem_reqs_info; - /* Initialize mipmap props storage */ - init_mipmap_props(); + plane_mem_reqs_info.planeAspect = (n_current_plane == 0) ? VK_IMAGE_ASPECT_PLANE_0_BIT_KHR + : (n_current_plane == 1) ? VK_IMAGE_ASPECT_PLANE_1_BIT_KHR + : VK_IMAGE_ASPECT_PLANE_2_BIT_KHR; + plane_mem_reqs_info.pNext = nullptr; + plane_mem_reqs_info.sType = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR; - if (m_is_sparse && - m_residency_scope != SPARSE_RESIDENCY_SCOPE_NONE) - { - uint32_t n_reqs = 0; - std::vector sparse_image_memory_reqs; + input_struct_chainer.append_struct(plane_mem_reqs_info); + + anvil_assert(n_current_plane <= 2); + } + + { + VkMemoryRequirements2KHR result_reqs; - /* Retrieve image aspect properties. Since Vulkan lets a single props structure to refer to more than - * just a single aspect, we first cache the exposed info in a vec and then distribute the information to - * a map, whose key is allowed to consist of a single bit ( = individual aspect) only */ - vkGetImageSparseMemoryRequirements(device_locked_ptr->get_device_vk(), - m_image, - &n_reqs, - nullptr); + result_reqs.pNext = nullptr; + result_reqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR; - anvil_assert(n_reqs >= 1); + result_struct_chainer.append_struct(result_reqs); + } + + if (khr_dedicated_allocation_available) + { + VkMemoryDedicatedRequirementsKHR memory_dedicated_reqs; + + memory_dedicated_reqs.pNext = nullptr; + memory_dedicated_reqs.prefersDedicatedAllocation = VK_FALSE; + memory_dedicated_reqs.requiresDedicatedAllocation = VK_FALSE; + memory_dedicated_reqs.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR; + + memory_dedicated_reqs_struct_id = result_struct_chainer.append_struct(memory_dedicated_reqs); + } - sparse_image_memory_reqs.resize(n_reqs); + input_struct_chain_ptr = input_struct_chainer.create_chain (); + result_struct_chain_ptr = result_struct_chainer.create_chain(); - vkGetImageSparseMemoryRequirements(device_locked_ptr->get_device_vk(), - m_image, - &n_reqs, - &sparse_image_memory_reqs[0]); + gmr2_entrypoints.vkGetImageMemoryRequirements2KHR(m_device_ptr->get_device_vk (), + input_struct_chain_ptr->get_root_struct (), + result_struct_chain_ptr->get_root_struct() ); - for (const auto& image_memory_req : sparse_image_memory_reqs) + + const auto& memory_reqs = result_struct_chain_ptr->get_root_struct()->memoryRequirements; + + current_plane_properties.alignment = memory_reqs.alignment; + current_plane_properties.memory_types = memory_reqs.memoryTypeBits; + current_plane_properties.storage_size = memory_reqs.size; + + if (memory_dedicated_reqs_struct_id.is_valid() ) + { + const auto memory_dedicated_reqs_ptr = result_struct_chain_ptr->get_struct_with_id(memory_dedicated_reqs_struct_id); + + current_plane_properties.prefers_dedicated_allocation = (memory_dedicated_reqs_ptr->prefersDedicatedAllocation == VK_TRUE); + current_plane_properties.requires_dedicated_allocation = (memory_dedicated_reqs_ptr->requiresDedicatedAllocation == VK_TRUE); + } + } + } + else + { + /* NOTE: YUV formats are NOT supported unless GIMR2 is available. */ + auto& current_plane_properties = m_plane_index_to_memory_properties_map[0]; + VkMemoryRequirements memory_reqs; + + anvil_assert(!is_yuv_format); + + Anvil::Vulkan::vkGetImageMemoryRequirements(m_device_ptr->get_device_vk(), + m_image, + &memory_reqs); + + current_plane_properties.alignment = memory_reqs.alignment; + current_plane_properties.memory_types = memory_reqs.memoryTypeBits; + current_plane_properties.storage_size = memory_reqs.size; + } + } + + /* Cache aspect subresource properties if we're dealing with a linear image */ + if (is_linear_image) { - for (uint32_t n_bit = 0; - (1u << n_bit) <= image_memory_req.formatProperties.aspectMask; - ++n_bit) + const auto n_layers = m_create_info_ptr->get_n_layers(); + + /* NOTE: For YUV formats, image_aspects holds PLANE_.. aspects. + * For non-YUV formats, the vector is filled with COLOR/DEPTH/STENCIL aspects. + */ + for (const auto& current_aspect : image_aspects) { - VkImageAspectFlagBits aspect = static_cast(1 << n_bit); + Anvil::ImageSubresource subresource; - if ((image_memory_req.formatProperties.aspectMask & aspect) == 0) + subresource.aspect_mask = current_aspect; + + for (uint32_t n_layer = 0; + n_layer < n_layers; + ++n_layer) { - continue; - } + subresource.array_layer = n_layer; + + for (uint32_t n_mip = 0; + n_mip < m_n_mipmaps; + ++n_mip) + { + Anvil::SubresourceLayout subresource_layout; + + subresource.mip_level = n_mip; - anvil_assert(m_sparse_aspect_props.find(aspect) == m_sparse_aspect_props.end() ); + Anvil::Vulkan::vkGetImageSubresourceLayout(m_device_ptr->get_device_vk(), + m_image, + reinterpret_cast(&subresource), + reinterpret_cast (&subresource_layout) ); - m_sparse_aspect_props[aspect] = Anvil::SparseImageAspectProperties(image_memory_req); + m_linear_image_aspect_data[static_cast(current_aspect.get_vk() )][LayerMipKey(n_layer, n_mip)] = subresource_layout; + } + } } } - /* Continue by setting up storage for page occupancy data */ - init_page_occupancy(sparse_image_memory_reqs); - - /* Finally, partially-resident images require metadata aspect to be bound memory before they can be - * accessed. Allocate & associate a dedicated memory block to the image */ - auto metadata_aspect_iterator = m_sparse_aspect_props.find(VK_IMAGE_ASPECT_METADATA_BIT); + init_mipmap_props(); - if (metadata_aspect_iterator != m_sparse_aspect_props.end() ) + if (is_prt_image) { - Anvil::SparseMemoryBindInfoID metadata_binding_bind_info_id; - Anvil::Utils::SparseMemoryBindingUpdateInfo metadata_binding_update; - std::shared_ptr sparse_queue_ptr(device_locked_ptr->get_sparse_binding_queue(0) ); + uint32_t n_reqs = 0; + std::vector sparse_image_memory_reqs; - /* TODO: Right now, we only support cases where image uses only one metadata block, no matter how many - * layers there are. */ - anvil_assert((metadata_aspect_iterator->second.flags & VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT) != 0); + anvil_assert(m_create_info_ptr->get_internal_type() != Anvil::ImageInternalType::SWAPCHAIN_WRAPPER); /* TODO: can images, to which swapchains can be bound, be sparse? */ - /* Instantiate the memory block which is going to hold the metadata + /* Retrieve image aspect properties. Since Vulkan lets a single props structure to refer to more than + * just a single aspect, we first cache the exposed info in a vec and then distribute the information to + * a map, whose key is allowed to consist of a single bit ( = individual aspect) only * - * NOTE: We should use an instance-wide allocator, preferably resizable, for this. + * Prefer facilities exposed by VK_KHR_get_memory_requirements2 unless unsupported. This will be leveraged + * in the future, but for now admittedly is a bit of a moot move. */ - m_metadata_memory_block_ptr = Anvil::MemoryBlock::create(m_device_ptr, - m_memory_reqs.memoryTypeBits, - metadata_aspect_iterator->second.mip_tail_size, - 0); /* in_memory_features */ + if (m_device_ptr->get_extension_info()->khr_get_memory_requirements2() ) + { + const auto gmr2_entrypoints = m_device_ptr->get_extension_khr_get_memory_requirements2_entrypoints(); + VkImageSparseMemoryRequirementsInfo2KHR info; + uint32_t n_current_item = 0; + std::vector temp_vec; + + info.image = m_image; + info.pNext = nullptr; + info.sType = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR; + + gmr2_entrypoints.vkGetImageSparseMemoryRequirements2KHR(m_device_ptr->get_device_vk(), + &info, + &n_reqs, + nullptr); /* pSparseMemoryRequirements */ + + anvil_assert(n_reqs >= 1); + sparse_image_memory_reqs.resize(n_reqs); + temp_vec.resize (n_reqs); + + gmr2_entrypoints.vkGetImageSparseMemoryRequirements2KHR(m_device_ptr->get_device_vk(), + &info, + &n_reqs, + &temp_vec[0]); /* pSparseMemoryRequirements */ + + for (const auto& current_temp_vec_item : temp_vec) + { + sparse_image_memory_reqs[n_current_item] = current_temp_vec_item.memoryRequirements; + + /* Move on */ + n_current_item++; + } + } + else + { + Anvil::Vulkan::vkGetImageSparseMemoryRequirements(m_device_ptr->get_device_vk(), + m_image, + &n_reqs, + nullptr); + + anvil_assert(n_reqs >= 1); + sparse_image_memory_reqs.resize(n_reqs); + + Anvil::Vulkan::vkGetImageSparseMemoryRequirements(m_device_ptr->get_device_vk(), + m_image, + &n_reqs, + reinterpret_cast(&sparse_image_memory_reqs[0]) ); + } - /* Set up bind info update structure. */ - metadata_binding_bind_info_id = metadata_binding_update.add_bind_info(0, /* n_signal_semaphores */ - nullptr, /* opt_signal_semaphores_ptr */ - 0, /* n_wait_semaphores */ - nullptr); /* opt_wait_semaphores_ptr */ + for (const auto& image_memory_req : sparse_image_memory_reqs) + { + for (uint32_t n_bit = 0; + (1u << n_bit) <= image_memory_req.format_properties.aspect_mask.get_vk(); + ++n_bit) + { + VkImageAspectFlagBits aspect = static_cast(1 << n_bit); + + if ((image_memory_req.format_properties.aspect_mask.get_vk() & aspect) == 0) + { + continue; + } + + anvil_assert(m_sparse_aspect_props.find(static_cast(aspect) ) == m_sparse_aspect_props.end() ); - metadata_binding_update.append_opaque_image_memory_update(metadata_binding_bind_info_id, - shared_from_this(), - metadata_aspect_iterator->second.mip_tail_offset, - metadata_aspect_iterator->second.mip_tail_size, - VK_SPARSE_MEMORY_BIND_METADATA_BIT, - m_metadata_memory_block_ptr, - 0); /* opt_memory_block_start_offset */ + m_sparse_aspect_props[static_cast(aspect)] = Anvil::SparseImageAspectProperties(image_memory_req); + } + } - result_bool = sparse_queue_ptr->bind_sparse_memory(metadata_binding_update); - anvil_assert(result_bool); + /* Continue by setting up storage for page occupancy data */ + init_page_occupancy(sparse_image_memory_reqs); + } + else + if (is_sparse_binding_image) + { + for (auto& current_plane_item : m_plane_index_to_memory_properties_map) + { + current_plane_item.second.page_tracker_ptr.reset( + new Anvil::PageTracker(current_plane_item.second.storage_size, + current_plane_item.second.alignment) + ); + } + } + + /* If the image has been initialized for SFR bindings, calculate & cache SFR tile size */ + if (uses_sfr) + { + init_sfr_tile_size(); } - } - else - if (m_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_NONE && m_is_sparse) - { - m_page_tracker_ptr.reset( - new Anvil::PageTracker(m_storage_size, - m_alignment) - ); } - if (m_memory_owner) + /* If the image was created with alloc-memory-at-init-time flag, make sure to do so before we leave */ { - anvil_assert(!m_is_sparse); + const bool should_alloc_and_bind_mem = (m_create_info_ptr->get_internal_type() == Anvil::ImageInternalType::ALLOC); + + if (should_alloc_and_bind_mem) + { + /* Allocate memory for the image. We need one memory block per each plane. */ + Anvil::MemoryBlockUniquePtr memory_block_ptr; + + for (auto& current_plane_item : m_plane_index_to_memory_properties_map) + { + auto memFeatures = m_create_info_ptr->get_memory_features(); + auto create_info_ptr = Anvil::MemoryBlockCreateInfo::create_regular(m_device_ptr, + current_plane_item.second.memory_types, + current_plane_item.second.storage_size, + m_create_info_ptr->get_memory_features() ); + + create_info_ptr->set_mt_safety(Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe()) ); - /* Allocate memory for the image */ - auto memory_block_ptr = Anvil::MemoryBlock::create(m_device_ptr, - m_memory_reqs.memoryTypeBits, - m_memory_reqs.size, - in_memory_features); + if (current_plane_item.second.requires_dedicated_allocation) + { + create_info_ptr->use_dedicated_allocation(nullptr, /* in_opt_buffer_ptr */ + this); + } + + VkResult resultPtr {}; + memory_block_ptr = Anvil::MemoryBlock::create(std::move(create_info_ptr), &resultPtr); + std::cout<<"Vk mem alloc result: "<get_physical_device_memory_properties(); + for(uint32_t i=0;i(heap.flags.get_vk())<(heap.size)<& in_memory_reqs) +void Anvil::Image::init_page_occupancy(const std::vector& in_memory_reqs) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - anvil_assert(m_residency_scope != Anvil::SPARSE_RESIDENCY_SCOPE_NONE && - m_residency_scope != Anvil::SPARSE_RESIDENCY_SCOPE_UNDEFINED); + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); /* First, allocate space for an AspectPageOccupancyData instance for all aspects used by the image. * @@ -1024,22 +1003,22 @@ void Anvil::Image::init_page_occupancy(const std::vector occupancy_data_ptr; + AspectPageOccupancyData* occupancy_data_ptr = nullptr; for (uint32_t n_aspect_bit = 0; - (1u << n_aspect_bit) <= memory_req.formatProperties.aspectMask; + (1u << n_aspect_bit) <= memory_req.format_properties.aspect_mask.get_vk(); ++n_aspect_bit) { VkImageAspectFlagBits current_aspect = static_cast(1 << n_aspect_bit); - if ((memory_req.formatProperties.aspectMask & current_aspect) == 0) + if ((memory_req.format_properties.aspect_mask.get_vk() & current_aspect) == 0) { continue; } - if (m_sparse_aspect_page_occupancy.find(current_aspect) != m_sparse_aspect_page_occupancy.end() ) + if (m_sparse_aspect_page_occupancy.find(static_cast(current_aspect) ) != m_sparse_aspect_page_occupancy.end() ) { - occupancy_data_ptr = m_sparse_aspect_page_occupancy.at(current_aspect); + occupancy_data_ptr = m_sparse_aspect_page_occupancy.at(static_cast(current_aspect) ); break; } @@ -1047,36 +1026,40 @@ void Anvil::Image::init_page_occupancy(const std::vector(new AspectPageOccupancyData() ) ); } for (uint32_t n_aspect_bit = 0; - (1u << n_aspect_bit) <= memory_req.formatProperties.aspectMask; + (1u << n_aspect_bit) <= memory_req.format_properties.aspect_mask.get_vk(); ++n_aspect_bit) { - VkImageAspectFlagBits current_aspect = static_cast(1 << n_aspect_bit); + Anvil::ImageAspectFlagBits current_aspect = static_cast(1 << n_aspect_bit); - if ((memory_req.formatProperties.aspectMask & current_aspect) == 0) + if ((memory_req.format_properties.aspect_mask & current_aspect) == 0) { continue; } - m_sparse_aspect_page_occupancy[current_aspect] = occupancy_data_ptr; + m_sparse_aspect_page_occupancy[static_cast(current_aspect)] = m_sparse_aspect_page_occupancy_data_items_owned.back().get(); } } /* Next, iterate over each aspect and initialize storage for the mip chain */ - anvil_assert(m_memory_reqs.alignment != 0); - for (auto occupancy_iterator = m_sparse_aspect_page_occupancy.begin(); occupancy_iterator != m_sparse_aspect_page_occupancy.end(); ++occupancy_iterator) { - const VkImageAspectFlagBits& current_aspect = occupancy_iterator->first; + const Anvil::ImageAspectFlagBits& current_aspect = occupancy_iterator->first; decltype(m_sparse_aspect_props)::const_iterator current_aspect_props_iterator; - std::shared_ptr page_occupancy_ptr = occupancy_iterator->second; + AspectPageOccupancyData* page_occupancy_ptr = occupancy_iterator->second; + const auto& plane_index = (current_aspect == Anvil::ImageAspectFlagBits::PLANE_1_BIT) ? 1 + : (current_aspect == Anvil::ImageAspectFlagBits::PLANE_2_BIT) ? 2 + : 0; + const auto& plane_memory_reqs = m_plane_index_to_memory_properties_map.at(plane_index); + + anvil_assert(plane_memory_reqs.alignment != 0); if (page_occupancy_ptr->layers.size() > 0) { @@ -1084,7 +1067,7 @@ void Anvil::Image::init_page_occupancy(const std::vectorsecond.granularity.width >= 1); + anvil_assert(current_aspect_props_iterator->second.granularity.width >= 1); anvil_assert(current_aspect_props_iterator->second.granularity.height >= 1); anvil_assert(current_aspect_props_iterator->second.granularity.depth >= 1); /* Initialize storage for layer data.. */ for (uint32_t n_layer = 0; - n_layer < m_n_layers; + n_layer < m_create_info_ptr->get_n_layers(); ++n_layer) { page_occupancy_ptr->layers.push_back(AspectPageOccupancyLayerData() ); @@ -1110,14 +1093,14 @@ void Anvil::Image::init_page_occupancy(const std::vectorsecond.mip_tail_size > 0) { - const bool is_single_miptail = (current_aspect_props_iterator->second.flags & VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT) != 0; + const bool is_single_miptail = (current_aspect_props_iterator->second.flags & Anvil::SparseImageFormatFlagBits::SINGLE_MIPTAIL_BIT) != 0; if ((is_single_miptail && n_layer == 0) || !is_single_miptail) { - anvil_assert( (current_aspect_props_iterator->second.mip_tail_size % m_memory_reqs.alignment) == 0); + anvil_assert( (current_aspect_props_iterator->second.mip_tail_size % plane_memory_reqs.alignment) == 0); - current_layer.n_total_tail_pages = static_cast(current_aspect_props_iterator->second.mip_tail_size / m_memory_reqs.alignment); + current_layer.n_total_tail_pages = static_cast(current_aspect_props_iterator->second.mip_tail_size / plane_memory_reqs.alignment); current_layer.tail_occupancy.resize(1 + current_layer.n_total_tail_pages / (sizeof(PageOccupancyStatus) * 8 /* bits in byte */) ); } @@ -1132,7 +1115,7 @@ void Anvil::Image::init_page_occupancy(const std::vector aspects; + uint32_t n_channel_bits[4]; + uint32_t n_bytes_per_texel; + + anvil_assert(m_create_info_ptr->get_type() == Anvil::ImageType::_2D); + + Anvil::Formats::get_format_aspects(m_create_info_ptr->get_format(), + &aspects); + + if (aspects.size() != 1) { - std::shared_ptr device_locked_ptr(m_device_ptr); + /* TODO: It is currently undefined how SFR tile size should be calculated for images + * with more than 1 aspect. */ + anvil_assert(aspects.size() == 1); + + goto end; + } - vkDestroyImage(device_locked_ptr->get_device_vk(), - m_image, - nullptr /* pAllocator */); + if (Anvil::Formats::is_format_compressed(m_create_info_ptr->get_format() ) ) + { + /* TODO */ + anvil_assert(!Anvil::Formats::is_format_compressed(m_create_info_ptr->get_format() )); - m_image = VK_NULL_HANDLE; + goto end; } - m_memory_block_ptr.reset(); + Anvil::Formats::get_format_n_component_bits_nonyuv(m_create_info_ptr->get_format(), + n_channel_bits + 0, + n_channel_bits + 1, + n_channel_bits + 2, + n_channel_bits + 3); + + n_bytes_per_texel = (n_channel_bits[0] + n_channel_bits[1] + n_channel_bits[2] + n_channel_bits[3]) / 8; + + switch (n_bytes_per_texel) + { + case 1: + { + m_sfr_tile_size.width = 256; + m_sfr_tile_size.height = 256; + + break; + } + + case 2: + { + m_sfr_tile_size.width = 256; + m_sfr_tile_size.height = 128; + + break; + } + + case 4: + { + m_sfr_tile_size.width = 128; + m_sfr_tile_size.height = 128; + + break; + } + + case 8: + { + m_sfr_tile_size.width = 128; + m_sfr_tile_size.height = 64; + + break; + } + + case 16: + { + m_sfr_tile_size.width = 64; + m_sfr_tile_size.height = 64; + + break; + } - /* Unregister the object */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_IMAGE, - this); + default: + { + anvil_assert_fail(); + } + } +end: + ; } /* Please see header for specification */ -const VkImage& Anvil::Image::get_image() +const VkImage& Anvil::Image::get_image(const bool& in_bake_memory_if_necessary) { - if (!m_is_sparse) + if ((m_create_info_ptr->get_create_flags () & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) == 0) { - if (m_memory_block_ptr == nullptr) + if (m_memory_blocks_owned.size() == 0 && + in_bake_memory_if_necessary) { get_memory_block(); } @@ -1190,7 +1240,7 @@ const VkImage& Anvil::Image::get_image() /* Please see header for specification */ VkExtent2D Anvil::Image::get_image_extent_2D(uint32_t in_n_mipmap) const { - VkExtent2D result; + VkExtent2D result = {0u, 0u}; uint32_t size[2]; if (!get_image_mipmap_size(in_n_mipmap, @@ -1213,7 +1263,7 @@ VkExtent2D Anvil::Image::get_image_extent_2D(uint32_t in_n_mipmap) const /* Please see header for specification */ VkExtent3D Anvil::Image::get_image_extent_3D(uint32_t in_n_mipmap) const { - VkExtent3D result; + VkExtent3D result = {0u, 0u, 0u}; uint32_t size[3]; if (!get_image_mipmap_size(in_n_mipmap, @@ -1243,7 +1293,7 @@ bool Anvil::Image::get_image_mipmap_size(uint32_t in_n_mipmap, bool result = false; /* Is this a sensible mipmap index? */ - if (m_mipmaps.size() <= in_n_mipmap) + if (m_mipmap_props.size() <= in_n_mipmap) { goto end; } @@ -1251,17 +1301,17 @@ bool Anvil::Image::get_image_mipmap_size(uint32_t in_n_mipmap, /* Return the result data.. */ if (out_opt_width_ptr != nullptr) { - *out_opt_width_ptr = m_mipmaps[in_n_mipmap].width; + *out_opt_width_ptr = m_mipmap_props[in_n_mipmap].width; } if (out_opt_height_ptr != nullptr) { - *out_opt_height_ptr = m_mipmaps[in_n_mipmap].height; + *out_opt_height_ptr = m_mipmap_props[in_n_mipmap].height; } if (out_opt_depth_ptr != nullptr) { - *out_opt_depth_ptr = m_mipmaps[in_n_mipmap].depth; + *out_opt_depth_ptr = m_mipmap_props[in_n_mipmap].depth; } /* All done */ @@ -1272,22 +1322,42 @@ bool Anvil::Image::get_image_mipmap_size(uint32_t in_n_mipmap, } /** Please see header for specification */ -bool Anvil::Image::get_sparse_image_aspect_properties(const VkImageAspectFlagBits in_aspect, +bool Anvil::Image::get_SFR_tile_size(VkExtent2D* out_result_ptr) const +{ + const Anvil::MGPUDevice* device_ptr(dynamic_cast(m_device_ptr)); + bool result (false); + + if (device_ptr == nullptr) + { + anvil_assert_fail(); + + goto end; + } + + /* All done */ + *out_result_ptr = m_sfr_tile_size; + result = true; +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::Image::get_sparse_image_aspect_properties(const Anvil::ImageAspectFlagBits in_aspect, const Anvil::SparseImageAspectProperties** out_result_ptr_ptr) const { decltype(m_sparse_aspect_props)::const_iterator prop_iterator; bool result = false; - if (!m_is_sparse) + if (m_create_info_ptr->get_internal_type() != Anvil::ImageInternalType::NO_ALLOC) { - anvil_assert(m_is_sparse); + anvil_assert(m_create_info_ptr->get_internal_type() == Anvil::ImageInternalType::NO_ALLOC); goto end; } - if (m_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_NONE) + if ((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) == 0) { - anvil_assert(m_residency_scope != Anvil::SPARSE_RESIDENCY_SCOPE_NONE); + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); goto end; } @@ -1308,95 +1378,66 @@ bool Anvil::Image::get_sparse_image_aspect_properties(const VkImageAspectFlagBit return result; } -/** TODO */ -VkImageCreateInfo Anvil::Image::get_create_info_for_swapchain(std::shared_ptr in_swapchain_ptr) -{ - VkImageCreateInfo result; - - in_swapchain_ptr->get_image(0)->get_image_mipmap_size(0, /* n_mipmap */ - &result.extent.width, - &result.extent.height, - &result.arrayLayers); - - result.extent.depth = 1; - result.flags = in_swapchain_ptr->get_flags(); - result.format = in_swapchain_ptr->get_image_format(); - result.imageType = VK_IMAGE_TYPE_2D; - result.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; - result.mipLevels = 1; - result.pNext = nullptr; - result.pQueueFamilyIndices = nullptr; - result.queueFamilyIndexCount = UINT32_MAX; - result.samples = VK_SAMPLE_COUNT_1_BIT; - result.sharingMode = in_swapchain_ptr->get_image(0)->get_image_sharing_mode(); - result.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; - result.tiling = VK_IMAGE_TILING_OPTIMAL; - result.usage = in_swapchain_ptr->get_image(0)->get_image_usage(); - - return result; -} - /** Please see header for specification */ -VkImageSubresourceRange Anvil::Image::get_subresource_range() const +Anvil::ImageSubresourceRange Anvil::Image::get_subresource_range() const { - VkImageSubresourceRange result; + Anvil::ImageSubresourceRange result; - switch (m_format) + switch (m_create_info_ptr->get_format() ) { - case VK_FORMAT_D16_UNORM: - case VK_FORMAT_D32_SFLOAT: + case Anvil::Format::D16_UNORM: + case Anvil::Format::D32_SFLOAT: { - result.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT; + result.aspect_mask = Anvil::ImageAspectFlagBits::DEPTH_BIT; break; } - case VK_FORMAT_D16_UNORM_S8_UINT: - case VK_FORMAT_D24_UNORM_S8_UINT: - case VK_FORMAT_D32_SFLOAT_S8_UINT: + case Anvil::Format::D16_UNORM_S8_UINT: + case Anvil::Format::D24_UNORM_S8_UINT: + case Anvil::Format::D32_SFLOAT_S8_UINT: { - result.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; + result.aspect_mask = Anvil::ImageAspectFlagBits::DEPTH_BIT | Anvil::ImageAspectFlagBits::STENCIL_BIT; break; } - case VK_FORMAT_S8_UINT: + case Anvil::Format::S8_UINT: { - result.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT; + result.aspect_mask = Anvil::ImageAspectFlagBits::STENCIL_BIT; break; } default: { - result.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + result.aspect_mask = Anvil::ImageAspectFlagBits::COLOR_BIT; break; } } - result.baseArrayLayer = 0; - result.baseMipLevel = 0; - result.layerCount = VK_REMAINING_ARRAY_LAYERS; - result.levelCount = VK_REMAINING_MIP_LEVELS; + result.base_array_layer = 0; + result.base_mip_level = 0; + result.layer_count = VK_REMAINING_ARRAY_LAYERS; + result.level_count = VK_REMAINING_MIP_LEVELS; return result; } /** Please see header for specification */ -bool Anvil::Image::has_aspects(VkImageAspectFlags in_aspects) const +bool Anvil::Image::has_aspects(const Anvil::ImageAspectFlags& in_aspects) const { - VkImageAspectFlags checked_aspects = 0; - bool result = true; + Anvil::ImageAspectFlags checked_aspects; + bool result = true; - if (m_is_sparse && - m_residency_scope != SPARSE_RESIDENCY_SCOPE_NONE) + if ((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0) { for (uint32_t n_bit = 0; n_bit < sizeof(uint32_t) * 8 /* bits in byte */ && result; ++n_bit) { - VkImageAspectFlagBits current_aspect = static_cast(1 << n_bit); + Anvil::ImageAspectFlagBits current_aspect = static_cast(1 << n_bit); if ((in_aspects & current_aspect) != 0 && m_sparse_aspect_props.find(current_aspect) == m_sparse_aspect_props.end() ) @@ -1407,80 +1448,110 @@ bool Anvil::Image::has_aspects(VkImageAspectFlags in_aspects) const } else { - const Anvil::ComponentLayout component_layout = Anvil::Formats::get_format_component_layout(m_format); - bool has_color_components = false; - bool has_depth_components = false; - bool has_stencil_components = false; - - switch (component_layout) - { - case COMPONENT_LAYOUT_ABGR: - case COMPONENT_LAYOUT_ARGB: - case COMPONENT_LAYOUT_BGR: - case COMPONENT_LAYOUT_BGRA: - case COMPONENT_LAYOUT_EBGR: - case COMPONENT_LAYOUT_R: - case COMPONENT_LAYOUT_RG: - case COMPONENT_LAYOUT_RGB: - case COMPONENT_LAYOUT_RGBA: - { - has_color_components = true; + const auto format = m_create_info_ptr->get_format (); + const bool is_yuv_format = Anvil::Formats::is_format_yuv_khr(format); - break; - } + if (is_yuv_format) + { + const auto n_planes = Anvil::Formats::get_format_n_planes(format); - case COMPONENT_LAYOUT_D: - case COMPONENT_LAYOUT_XD: + if ((in_aspects & Anvil::ImageAspectFlagBits::PLANE_0_BIT) == Anvil::ImageAspectFlagBits::PLANE_0_BIT) { - has_depth_components = true; - - break; + /* No check needed. YUV formats always use plane 0 */ + checked_aspects |= Anvil::ImageAspectFlagBits::PLANE_0_BIT; } - case COMPONENT_LAYOUT_DS: + if ((in_aspects & Anvil::ImageAspectFlagBits::PLANE_1_BIT) == Anvil::ImageAspectFlagBits::PLANE_1_BIT) { - has_depth_components = true; - has_stencil_components = true; + result &= (n_planes >= 2); - break; + checked_aspects |= Anvil::ImageAspectFlagBits::PLANE_1_BIT; } - case COMPONENT_LAYOUT_S: + if ((in_aspects & Anvil::ImageAspectFlagBits::PLANE_2_BIT) == Anvil::ImageAspectFlagBits::PLANE_2_BIT) { - has_stencil_components = true; + result &= (n_planes >= 3); - break; + checked_aspects |= Anvil::ImageAspectFlagBits::PLANE_2_BIT; } + } + else + { + const Anvil::ComponentLayout component_layout = Anvil::Formats::get_format_component_layout_nonyuv(format); + bool has_color_components = false; + bool has_depth_components = false; + bool has_stencil_components = false; - default: + switch (component_layout) { - anvil_assert_fail(); + case ComponentLayout::ABGR: + case ComponentLayout::ARGB: + case ComponentLayout::BGR: + case ComponentLayout::BGRA: + case ComponentLayout::EBGR: + case ComponentLayout::R: + case ComponentLayout::RG: + case ComponentLayout::RGB: + case ComponentLayout::RGBA: + { + has_color_components = true; + + break; + } + + case ComponentLayout::D: + case ComponentLayout::XD: + { + has_depth_components = true; + + break; + } + + case ComponentLayout::DS: + { + has_depth_components = true; + has_stencil_components = true; + + break; + } + + case ComponentLayout::S: + { + has_stencil_components = true; + + break; + } + + default: + { + anvil_assert_fail(); + } } - } - if (in_aspects & VK_IMAGE_ASPECT_COLOR_BIT) - { - result &= has_color_components; + if ((in_aspects & Anvil::ImageAspectFlagBits::COLOR_BIT) != 0) + { + result &= has_color_components; - checked_aspects |= VK_IMAGE_ASPECT_COLOR_BIT; - } + checked_aspects |= Anvil::ImageAspectFlagBits::COLOR_BIT; + } - if (result && (in_aspects & VK_IMAGE_ASPECT_DEPTH_BIT) != 0) - { - result &= has_depth_components; + if (result && (in_aspects & Anvil::ImageAspectFlagBits::DEPTH_BIT) != 0) + { + result &= has_depth_components; - checked_aspects |= VK_IMAGE_ASPECT_DEPTH_BIT; - } + checked_aspects |= Anvil::ImageAspectFlagBits::DEPTH_BIT; + } - if (result && (in_aspects & VK_IMAGE_ASPECT_STENCIL_BIT) != 0) - { - result &= has_stencil_components; + if (result && (in_aspects & Anvil::ImageAspectFlagBits::STENCIL_BIT) != 0) + { + result &= has_stencil_components; - checked_aspects |= VK_IMAGE_ASPECT_STENCIL_BIT; + checked_aspects |= Anvil::ImageAspectFlagBits::STENCIL_BIT; + } } - anvil_assert(!result || - result && checked_aspects == in_aspects); + anvil_assert( !result || + ( result && checked_aspects == in_aspects)); } return result; @@ -1491,26 +1562,26 @@ void Anvil::Image::init_mipmap_props() { uint32_t current_mipmap_size[3] = { - m_width, - m_height, - m_depth, + m_create_info_ptr->get_base_mip_width (), + m_create_info_ptr->get_base_mip_height(), + m_create_info_ptr->get_base_mip_depth () }; - anvil_assert(m_n_mipmaps != 0); - anvil_assert(m_mipmaps.size() == 0); + anvil_assert(m_n_mipmaps != 0); + anvil_assert(m_mipmap_props.size() == 0); for (uint32_t mipmap_level = 0; mipmap_level < m_n_mipmaps; ++mipmap_level) { - m_mipmaps.push_back(Mipmap(current_mipmap_size[0], - current_mipmap_size[1], - current_mipmap_size[2]) ); + m_mipmap_props.push_back(Mipmap(current_mipmap_size[0], + current_mipmap_size[1], + current_mipmap_size[2]) ); current_mipmap_size[0] /= 2; current_mipmap_size[1] /= 2; - if (m_type == VK_IMAGE_TYPE_3D) + if (m_create_info_ptr->get_type() == Anvil::ImageType::_3D) { current_mipmap_size[2] /= 2; } @@ -1533,22 +1604,20 @@ void Anvil::Image::init_mipmap_props() } /* Please see header for specification */ -bool Anvil::Image::is_memory_bound_for_texel(VkImageAspectFlagBits in_aspect, - uint32_t in_n_layer, - uint32_t in_n_mip, - uint32_t in_x, - uint32_t in_y, - uint32_t in_z) const +bool Anvil::Image::is_memory_bound_for_texel(Anvil::ImageAspectFlagBits in_aspect, + uint32_t in_n_layer, + uint32_t in_n_mip, + uint32_t in_x, + uint32_t in_y, + uint32_t in_z) const { bool result = false; /* Sanity checks */ - anvil_assert(m_is_sparse); - anvil_assert(m_residency_scope == SPARSE_RESIDENCY_SCOPE_ALIASED || - m_residency_scope == SPARSE_RESIDENCY_SCOPE_NONALIASED); + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); - anvil_assert(m_n_layers > in_n_layer); - anvil_assert(m_n_mipmaps > in_n_mip); + anvil_assert(m_create_info_ptr->get_n_layers () > in_n_layer); + anvil_assert(m_n_mipmaps > in_n_mip); anvil_assert(m_sparse_aspect_page_occupancy.find(in_aspect) != m_sparse_aspect_page_occupancy.end() ); @@ -1580,39 +1649,46 @@ bool Anvil::Image::is_memory_bound_for_texel(VkImageAspectFlagBits in_aspect, /** Updates page tracker (for non-resident images) OR tile-to-block mappings & tail page counteres, * as per the specified opaque image memory update properties. * - * @param in_resource_offset Raw image data offset, from which the update has been performed - * @param in_size Size of the updated region. - * @param in_memory_block_ptr Memory block, bound to the specified region. - * @param in_memory_block_start_offset Start offset relative to @param memory_block_ptr used for the update. + * @param in_resource_offset Raw image data offset, from which the update has been performed + * @param in_size Size of the updated region. + * @param in_memory_block_ptr Memory block, bound to the specified region. + * @param in_memory_block_start_offset Start offset relative to @param memory_block_ptr used for the update. + * @param in_memory_block_owned_by_image TODO + * @param in_n_plane Index of image plane the binding is to be made for. This must be 0 for non-YUV + * and joint YUV images. For disjoint YUV images, the value must be between 0 and 2. **/ -void Anvil::Image::on_memory_backing_opaque_update(VkDeviceSize in_resource_offset, - VkDeviceSize in_size, - std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset) +void Anvil::Image::on_memory_backing_opaque_update(uint32_t in_n_plane, + VkDeviceSize in_resource_offset, + VkDeviceSize in_size, + Anvil::MemoryBlock* in_memory_block_ptr, + VkDeviceSize in_memory_block_start_offset, + bool in_memory_block_owned_by_image) { const bool is_unbinding = (in_memory_block_ptr == nullptr); /* Sanity checks */ - anvil_assert(m_is_sparse); + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) != 0); if (in_memory_block_ptr != nullptr) { - anvil_assert(in_memory_block_ptr->get_size() <= in_memory_block_start_offset + in_size); + anvil_assert(in_memory_block_ptr->get_create_info_ptr()->get_size() <= in_memory_block_start_offset + in_size); } - if (m_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_NONE) + if ((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) == 0) { /* Non-resident image: underlying memory is viewed as an opaque linear region. */ - m_page_tracker_ptr->set_binding(in_memory_block_ptr, - in_memory_block_start_offset, - in_resource_offset, - in_size); + PerPlaneMemoryProperties* plane_memory_reqs_ptr = &m_plane_index_to_memory_properties_map.at(in_n_plane); + + plane_memory_reqs_ptr->page_tracker_ptr->set_binding(in_memory_block_ptr, + in_memory_block_start_offset, + in_resource_offset, + in_size); } else { /* The following use cases are expected to make us reach this block: * - * 1) Application is about to bind a memory backing to all tiles OR unbind memory backing + * 1) Application is about to bind a memory backing to all tiles (of a single plane) OR unbind memory backing * from all tiles forming all aspects at once. * * 2) Application wants to bind (or unbind) a memory backing to/from miptail tile(s). @@ -1622,7 +1698,7 @@ void Anvil::Image::on_memory_backing_opaque_update(VkDeviceSize * by the Image wrapper, so there's nothing we need to do here. */ bool is_miptail_tile_binding_operation = false; - auto metadata_aspect_iterator = m_sparse_aspect_props.find(VK_IMAGE_ASPECT_METADATA_BIT); + auto metadata_aspect_iterator = m_sparse_aspect_props.find(Anvil::ImageAspectFlagBits::METADATA_BIT); if (metadata_aspect_iterator != m_sparse_aspect_props.end() ) { @@ -1630,6 +1706,12 @@ void Anvil::Image::on_memory_backing_opaque_update(VkDeviceSize if (in_resource_offset == metadata_aspect_iterator->second.mip_tail_offset && in_size == metadata_aspect_iterator->second.mip_tail_size) { + anvil_assert(m_metadata_memory_block_ptr == nullptr); + anvil_assert(in_memory_block_owned_by_image); + + m_metadata_memory_block_ptr = MemoryBlockUniquePtr(in_memory_block_ptr, + std::default_delete() ); + return; } else @@ -1646,47 +1728,38 @@ void Anvil::Image::on_memory_backing_opaque_update(VkDeviceSize aspect_iterator != m_sparse_aspect_page_occupancy.end() && !is_miptail_tile_binding_operation; ++aspect_iterator) { - VkImageAspectFlagBits current_aspect = aspect_iterator->first; + Anvil::ImageAspectFlagBits current_aspect = aspect_iterator->first; const SparseImageAspectProperties& current_aspect_props = m_sparse_aspect_props.at(current_aspect); auto occupancy_data_ptr = aspect_iterator->second; ANVIL_REDUNDANT_VARIABLE_CONST(current_aspect_props); - if ( in_resource_offset != 0 && - !is_miptail_tile_binding_operation) + if (in_size == current_aspect_props.mip_tail_size) { - is_miptail_tile_binding_operation = (in_size == current_aspect_props.mip_tail_size); - - if (is_miptail_tile_binding_operation) + for (uint32_t n_layer = 0; + n_layer < m_create_info_ptr->get_n_layers(); + ++n_layer) { - is_miptail_tile_binding_operation = false; + auto& current_layer = occupancy_data_ptr->layers.at(n_layer); - for (uint32_t n_layer = 0; - n_layer < m_n_layers; - ++n_layer) + if (in_resource_offset == current_aspect_props.mip_tail_offset + current_aspect_props.mip_tail_stride * n_layer) { - auto& current_layer = occupancy_data_ptr->layers.at(n_layer); - - if (in_resource_offset == current_aspect_props.mip_tail_offset + current_aspect_props.mip_tail_stride * n_layer && - in_size == current_aspect_props.mip_tail_size) - { - is_miptail_tile_binding_operation = true; - - memset(¤t_layer.tail_occupancy[0], - (!is_unbinding) ? ~0 : 0, - current_layer.tail_occupancy.size() * sizeof(current_layer.tail_occupancy[0])); + is_miptail_tile_binding_operation = true; - if (!is_unbinding) - { - current_layer.tail_pages_per_binding[in_memory_block_ptr] = current_layer.n_total_tail_pages; - } - else - { - current_layer.tail_pages_per_binding.clear(); - } + memset(¤t_layer.tail_occupancy[0], + (!is_unbinding) ? ~0 : 0, + current_layer.tail_occupancy.size() * sizeof(current_layer.tail_occupancy[0])); - break; + if (!is_unbinding) + { + current_layer.tail_pages_per_binding[in_memory_block_ptr] = current_layer.n_total_tail_pages; + } + else + { + current_layer.tail_pages_per_binding.clear(); } + + break; } } } @@ -1721,10 +1794,17 @@ void Anvil::Image::on_memory_backing_opaque_update(VkDeviceSize } } } + + if (in_memory_block_owned_by_image) + { + m_memory_blocks_owned.push_back( + Anvil::MemoryBlockUniquePtr(in_memory_block_ptr, + std::default_delete() ) + ); + } } -/** Updates page tracker (for non-resident images) OR tile-to-block mappings & tail page counteres, - * as per the specified opaque image update properties. +/** Updates tile-to-block mappings & tail page counteres, as per the specified opaque image update properties. * * @param in_subresource Subresource specified for the update. * @param in_offset Offset specified for the update. @@ -1732,24 +1812,24 @@ void Anvil::Image::on_memory_backing_opaque_update(VkDeviceSize * @param in_memory_block_ptr Memory block, bound to the specified region. * @param in_memory_block_start_offset Start offset relative to @param memory_block_ptr used for the update. **/ -void Anvil::Image::on_memory_backing_update(const VkImageSubresource& in_subresource, - VkOffset3D in_offset, - VkExtent3D in_extent, - std::shared_ptr in_memory_block_ptr, - VkDeviceSize in_memory_block_start_offset) +void Anvil::Image::on_memory_backing_update(const Anvil::ImageSubresource& in_subresource, + VkOffset3D in_offset, + VkExtent3D in_extent, + Anvil::MemoryBlock* in_memory_block_ptr, + VkDeviceSize in_memory_block_start_offset, + bool in_memory_block_owned_by_image) { AspectPageOccupancyLayerData* aspect_layer_ptr; AspectPageOccupancyLayerMipData* aspect_layer_mip_ptr; decltype(m_sparse_aspect_page_occupancy)::iterator aspect_page_occupancy_iterator; decltype(m_sparse_aspect_props)::iterator aspect_props_iterator; - anvil_assert(m_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_ALIASED || - m_residency_scope == Anvil::SPARSE_RESIDENCY_SCOPE_NONALIASED); + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) != 0); ANVIL_REDUNDANT_ARGUMENT(in_memory_block_start_offset); - if (in_subresource.aspectMask == VK_IMAGE_ASPECT_METADATA_BIT) + if (in_subresource.aspect_mask == Anvil::ImageAspectFlagBits::METADATA_BIT) { /* Metadata is not tracked since it needs to be completely bound in order for the sparse image to be usable. */ anvil_assert_fail(); @@ -1757,8 +1837,8 @@ void Anvil::Image::on_memory_backing_update(const VkImageSubresource& return; } - aspect_page_occupancy_iterator = m_sparse_aspect_page_occupancy.find(static_cast(in_subresource.aspectMask) ); - aspect_props_iterator = m_sparse_aspect_props.find (static_cast(in_subresource.aspectMask) ); + aspect_page_occupancy_iterator = m_sparse_aspect_page_occupancy.find(static_cast(in_subresource.aspect_mask.get_vk() ) ); + aspect_props_iterator = m_sparse_aspect_props.find (static_cast(in_subresource.aspect_mask.get_vk() ) ); anvil_assert(aspect_page_occupancy_iterator != m_sparse_aspect_page_occupancy.end() ); anvil_assert(aspect_props_iterator != m_sparse_aspect_props.end() ); @@ -1770,11 +1850,11 @@ void Anvil::Image::on_memory_backing_update(const VkImageSubresource& (in_extent.height % aspect_props_iterator->second.granularity.height) == 0 && (in_extent.depth % aspect_props_iterator->second.granularity.depth) == 0); - anvil_assert(aspect_page_occupancy_iterator->second->layers.size() >= in_subresource.arrayLayer); - aspect_layer_ptr = &aspect_page_occupancy_iterator->second->layers.at(in_subresource.arrayLayer); + anvil_assert(aspect_page_occupancy_iterator->second->layers.size() >= in_subresource.array_layer); + aspect_layer_ptr = &aspect_page_occupancy_iterator->second->layers.at(in_subresource.array_layer); - anvil_assert(aspect_layer_ptr->mips.size() > in_subresource.mipLevel); - aspect_layer_mip_ptr = &aspect_layer_ptr->mips.at(in_subresource.mipLevel); + anvil_assert(aspect_layer_ptr->mips.size() > in_subresource.mip_level); + aspect_layer_mip_ptr = &aspect_layer_ptr->mips.at(in_subresource.mip_level); const uint32_t extent_tile[] = { @@ -1813,111 +1893,412 @@ void Anvil::Image::on_memory_backing_update(const VkImageSubresource& } } } + + if (in_memory_block_owned_by_image) + { + m_memory_blocks_owned.push_back( + MemoryBlockUniquePtr(in_memory_block_ptr, + std::default_delete() ) + ); + } } /* Please see header for specification */ -bool Anvil::Image::set_memory(std::shared_ptr in_memory_block_ptr) +bool Anvil::Image::set_memory(MemoryBlockUniquePtr in_memory_block_ptr) +{ + return set_memory_internal(in_memory_block_ptr.release(), + true, /* in_owned_by_image */ + 0, /* in_n_device_group_indices */ + nullptr, /* in_device_group_indices_ptr */ + 0, /* in_n_SFR_rects */ + nullptr); /* in_SFRs_ptr */ +} + +bool Anvil::Image::set_memory(Anvil::MemoryBlock* in_memory_block_ptr) +{ + return set_memory_internal(in_memory_block_ptr, + false, /* in_owned_by_image */ + 0, /* in_n_device_group_indices */ + nullptr, /* in_device_group_indices_ptr */ + 0, /* in_n_SFR_rects */ + nullptr); /* in_SFRs_ptr */ +} + +/** Please see header for specification */ +bool Anvil::Image::set_memory(MemoryBlockUniquePtr in_memory_block_ptr, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr) +{ + return set_memory_internal(in_memory_block_ptr.release(), + true, /* in_owned_by_image */ + in_n_device_group_indices, + in_device_group_indices_ptr, + 0, /* in_n_SFR_rects */ + nullptr); /* in_SFRs_ptr */ +} + +bool Anvil::Image::set_memory(Anvil::MemoryBlock* in_memory_block_ptr, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr) +{ + return set_memory_internal(in_memory_block_ptr, + false, /* in_owned_by_image */ + in_n_device_group_indices, + in_device_group_indices_ptr, + 0, /* in_n_SFR_rects */ + nullptr); /* in_SFRs_ptr */ +} + +/** Please see header for specification */ +bool Anvil::Image::set_memory(MemoryBlockUniquePtr in_memory_block_ptr, + uint32_t in_n_SFR_rects, + const VkRect2D* in_SFRs_ptr) +{ + return set_memory_internal(in_memory_block_ptr.release(), + true, /* in_owned_by_image */ + 0, /* in_n_device_group_indices */ + nullptr, /* in_device_group_indices_ptr */ + in_n_SFR_rects, + in_SFRs_ptr); +} + +bool Anvil::Image::set_memory(Anvil::MemoryBlock* in_memory_block_ptr, + uint32_t in_n_SFR_rects, + const VkRect2D* in_SFRs_ptr) +{ + return set_memory_internal(in_memory_block_ptr, + false, /* in_owned_by_image */ + 0, /* in_n_device_group_indices */ + nullptr, /* in_device_group_indices_ptr */ + in_n_SFR_rects, + in_SFRs_ptr); +} + +bool Anvil::Image::set_memory_internal(Anvil::MemoryBlock* in_memory_block_ptr, + bool in_owned_by_image, + uint32_t in_n_device_group_indices, + const uint32_t* in_device_group_indices_ptr, + uint32_t in_n_SFR_rects, + const VkRect2D* in_SFRs_ptr) { - std::shared_ptr device_locked_ptr(m_device_ptr); - const Anvil::DeviceType device_type (device_locked_ptr->get_type() ); - VkResult result (VK_ERROR_INITIALIZATION_FAILED); + const auto& entrypoints (m_device_ptr->get_extension_khr_bind_memory2_entrypoints() ); + const bool is_disjoint_yuv_image((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::CREATE_DISJOINT_BIT) != 0); + VkResult result (VK_ERROR_INITIALIZATION_FAILED); + + const uint32_t n_planes = (is_disjoint_yuv_image ? Anvil::Formats::get_format_n_planes(m_create_info_ptr->get_format() ) + : 1); /* Sanity checks */ - anvil_assert(in_memory_block_ptr != nullptr); - anvil_assert(!m_is_sparse); - anvil_assert(m_mipmaps.size() > 0); - anvil_assert(m_memory_block_ptr == nullptr); + anvil_assert(in_memory_block_ptr != nullptr); + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) == 0); + anvil_assert(m_mipmap_props.size() > 0); + anvil_assert(m_memory_blocks_owned.size() == 0); + anvil_assert(m_create_info_ptr->get_internal_type() != Anvil::ImageInternalType::SWAPCHAIN_WRAPPER); - /* Bind the memory object to the image object */ - if (device_type == Anvil::DEVICE_TYPE_SINGLE_GPU) + + if (in_n_device_group_indices > 0) { - result = vkBindImageMemory(device_locked_ptr->get_device_vk(), - m_image, - in_memory_block_ptr->get_memory(), - in_memory_block_ptr->get_start_offset() ); + if (!do_sanity_checks_for_physical_device_binding(in_memory_block_ptr, + in_n_device_group_indices) ) + { + anvil_assert_fail(); + + goto end; + } } - anvil_assert_vk_call_succeeded(result); - if (is_vk_call_successful(result) ) { - VkImageLayout src_image_layout = (m_tiling == VK_IMAGE_TILING_LINEAR && m_mipmaps_to_upload.size() > 0) ? VK_IMAGE_LAYOUT_PREINITIALIZED - : VK_IMAGE_LAYOUT_UNDEFINED; + std::vector bind_info_vec; + std::vector > struct_chain_ptrs; + + bind_info_vec.resize (n_planes); + struct_chain_ptrs.resize(n_planes); + + for (uint32_t n_current_plane = 0; + n_current_plane < n_planes; + ++n_current_plane) + { + Anvil::StructChainUniquePtr struct_chain_ptr; + Anvil::StructChainer struct_chainer; + + { + VkBindImageMemoryInfoKHR bind_info; + + bind_info.image = m_image; + bind_info.memory = in_memory_block_ptr->get_memory (); + bind_info.memoryOffset = in_memory_block_ptr->get_start_offset(); + bind_info.pNext = nullptr; + bind_info.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR; + + struct_chainer.append_struct(bind_info); + } + + if (in_n_device_group_indices > 0) + { + VkBindImageMemoryDeviceGroupInfoKHR bind_info_dg; + + bind_info_dg.deviceIndexCount = in_n_device_group_indices; + bind_info_dg.pDeviceIndices = in_device_group_indices_ptr; + bind_info_dg.pSplitInstanceBindRegions = in_SFRs_ptr; + bind_info_dg.splitInstanceBindRegionCount = in_n_SFR_rects; + bind_info_dg.pNext = nullptr; + bind_info_dg.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR; + + struct_chainer.append_struct(bind_info_dg); + } + + if (is_disjoint_yuv_image) + { + VkBindImagePlaneMemoryInfoKHR plane_info; - m_memory_block_ptr = in_memory_block_ptr; + plane_info.planeAspect = (n_current_plane == 0) ? VK_IMAGE_ASPECT_PLANE_0_BIT + : (n_current_plane == 1) ? VK_IMAGE_ASPECT_PLANE_1_BIT + : VK_IMAGE_ASPECT_PLANE_2_BIT; + plane_info.pNext = nullptr; + plane_info.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR; - /* Fill the storage with mipmap contents, if mipmap data was specified at input */ - if (m_mipmaps_to_upload.size() > 0) + struct_chainer.append_struct(plane_info); + + anvil_assert(n_current_plane >= 0 && + n_current_plane <= 2); + } + + struct_chain_ptr = struct_chainer.create_chain(); + + bind_info_vec.at (n_current_plane) = *struct_chain_ptr->get_root_struct(); + struct_chain_ptrs.at(n_current_plane) = std::move (struct_chain_ptr); + } + + anvil_assert(bind_info_vec.size() > 0); + + result = entrypoints.vkBindImageMemory2KHR(m_device_ptr->get_device_vk (), + static_cast (bind_info_vec.size() ), + reinterpret_cast(&bind_info_vec.at(0) )); + } + + anvil_assert_vk_call_succeeded(result); + if (is_vk_call_successful(result) ) + { + if (in_owned_by_image) { - upload_mipmaps(&m_mipmaps_to_upload, - src_image_layout, - &src_image_layout); + m_memory_blocks_owned.push_back( + MemoryBlockUniquePtr(in_memory_block_ptr, + std::default_delete() ) + ); } + else + m_memory_block_external = in_memory_block_ptr; - if (m_post_create_layout != VK_IMAGE_LAYOUT_PREINITIALIZED && - m_post_create_layout != VK_IMAGE_LAYOUT_UNDEFINED) { - const uint32_t n_mipmaps_to_upload = static_cast(m_mipmaps_to_upload.size()); - VkAccessFlags src_access_mask = 0; + const auto& mips_to_upload = m_create_info_ptr->get_mipmaps_to_upload(); + const auto tiling = m_create_info_ptr->get_tiling (); + Anvil::ImageLayout src_image_layout = (tiling == Anvil::ImageTiling::LINEAR && mips_to_upload.size() > 0) ? Anvil::ImageLayout::PREINITIALIZED + : Anvil::ImageLayout::UNDEFINED; - if (n_mipmaps_to_upload > 0) + /* Fill the storage with mipmap contents, if mipmap data was specified at input */ + if (mips_to_upload.size() > 0) { - if (m_tiling == VK_IMAGE_TILING_LINEAR) - { - src_access_mask = VK_ACCESS_HOST_WRITE_BIT; - } - else + upload_mipmaps(&mips_to_upload, + src_image_layout, + &src_image_layout); + } + + if (m_create_info_ptr->get_post_alloc_image_layout() != m_create_info_ptr->get_post_create_image_layout() ) + { + const uint32_t n_mipmaps_to_upload = static_cast(mips_to_upload.size()); + Anvil::AccessFlags src_access_mask; + + if (n_mipmaps_to_upload > 0) { - src_access_mask = VK_ACCESS_TRANSFER_WRITE_BIT; + if (tiling == Anvil::ImageTiling::LINEAR) + { + src_access_mask = Anvil::AccessFlagBits::HOST_WRITE_BIT; + } + else + { + src_access_mask = Anvil::AccessFlagBits::TRANSFER_WRITE_BIT; + } } + + transition_to_post_alloc_image_layout(src_access_mask, + src_image_layout); } - transition_to_post_create_image_layout(src_access_mask, - src_image_layout); + m_create_info_ptr->clear_mipmaps_to_upload(); } - m_mipmaps_to_upload.clear(); } +end: return is_vk_call_successful(result); } -/* Transitions the underlying Vulkan image to the layout stored in m_post_create_layout. - * - * @param in_source_access_mask All access types used to fill the image with data. - * @param in_src_layout Layout to transition from. - **/ -void Anvil::Image::transition_to_post_create_image_layout(VkAccessFlags in_source_access_mask, - VkImageLayout in_src_layout) +bool Anvil::Image::set_swapchain_memory_internal(uint32_t in_swapchain_image_index, + uint32_t in_opt_n_SFR_rects, + const VkRect2D* in_opt_SFRs_ptr, + uint32_t in_opt_n_device_indices, + const uint32_t* in_opt_device_indices) +{ + const auto& entrypoints (m_device_ptr->get_extension_khr_bind_memory2_entrypoints() ); + VkResult result (VK_ERROR_INITIALIZATION_FAILED); + Anvil::StructChainer struct_chainer; + + /* Sanity checks */ + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::CREATE_DISJOINT_BIT) == 0); + anvil_assert((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) == 0); + anvil_assert(m_mipmap_props.size () == 1); + anvil_assert(m_peer_device_indices.size () == 0); + anvil_assert(m_peer_sfr_rects.size () == 0); + anvil_assert(m_create_info_ptr->get_internal_type() == Anvil::ImageInternalType::PEER_NO_ALLOC || + m_create_info_ptr->get_internal_type() == Anvil::ImageInternalType::SWAPCHAIN_WRAPPER); + anvil_assert(m_create_info_ptr->get_swapchain()->get_n_images() > in_swapchain_image_index); + + if (!m_device_ptr->is_extension_enabled(VK_KHR_DEVICE_GROUP_EXTENSION_NAME) ) + { + anvil_assert(m_device_ptr->is_extension_enabled(VK_KHR_DEVICE_GROUP_EXTENSION_NAME) ); + + goto end; + } + + if (m_swapchain_memory_assigned) + { + anvil_assert(!m_swapchain_memory_assigned); + + goto end; + } + + if (in_opt_n_SFR_rects != 0 && in_opt_n_device_indices != 0) + { + anvil_assert(!(in_opt_n_SFR_rects != 0 && in_opt_n_device_indices != 0) ); + + goto end; + } + + if (in_opt_n_SFR_rects > 0) + { + if (!do_sanity_checks_for_sfr_binding(in_opt_n_SFR_rects, + in_opt_SFRs_ptr) ) + { + anvil_assert_fail(); + + goto end; + } + } + + if (in_opt_n_device_indices > 0) + { + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr) ); + + anvil_assert(mgpu_device_ptr != nullptr); + + if (in_opt_n_device_indices != mgpu_device_ptr->get_n_physical_devices() ) + { + anvil_assert_fail(); + + goto end; + } + } + + /* Bind the memory object to the image object */ + { + VkBindImageMemoryInfoKHR bind_info; + + bind_info.image = m_image; + bind_info.memory = VK_NULL_HANDLE; + bind_info.memoryOffset = 0; + bind_info.pNext = nullptr; + bind_info.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR; + + struct_chainer.append_struct(bind_info); + } + + { + VkBindImageMemoryDeviceGroupInfoKHR bind_info_dg; + + bind_info_dg.deviceIndexCount = in_opt_n_device_indices; + bind_info_dg.pDeviceIndices = in_opt_device_indices; + bind_info_dg.pSplitInstanceBindRegions = in_opt_SFRs_ptr; + bind_info_dg.splitInstanceBindRegionCount = in_opt_n_SFR_rects; + bind_info_dg.pNext = nullptr; + bind_info_dg.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR; + + struct_chainer.append_struct(bind_info_dg); + } + + { + VkBindImageMemorySwapchainInfoKHR bind_swapchain_info; + + bind_swapchain_info.imageIndex = in_swapchain_image_index; + bind_swapchain_info.pNext = nullptr; + bind_swapchain_info.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR; + bind_swapchain_info.swapchain = m_create_info_ptr->get_swapchain()->get_swapchain_vk(); + + struct_chainer.append_struct(bind_swapchain_info); + } + + m_create_info_ptr->get_swapchain()->lock(); + { + auto chain_ptr = struct_chainer.create_chain(); + + result = entrypoints.vkBindImageMemory2KHR(m_device_ptr->get_device_vk(), + 1, /* bindInfoCount */ + chain_ptr->get_root_struct() ); + } + m_create_info_ptr->get_swapchain()->unlock(); + + anvil_assert_vk_call_succeeded(result); + + if (is_vk_call_successful(result) ) + { + m_swapchain_memory_assigned = true; + + for (uint32_t n_SFR_rect = 0; + n_SFR_rect < in_opt_n_SFR_rects; + ++n_SFR_rect) + { + m_peer_sfr_rects.push_back(in_opt_SFRs_ptr[n_SFR_rect]); + } + + for (uint32_t n_device_index = 0; + n_device_index < in_opt_n_device_indices; + ++n_device_index) + { + m_peer_device_indices.push_back(in_opt_device_indices[n_device_index]); + } + } +end: + return is_vk_call_successful(result); +} + +void Anvil::Image::transition_to_post_alloc_image_layout(Anvil::AccessFlags in_source_access_mask, + Anvil::ImageLayout in_src_layout) { - std::shared_ptr device_locked_ptr(m_device_ptr); + const auto post_alloc_layout = m_create_info_ptr->get_post_alloc_image_layout(); - anvil_assert(!m_has_transitioned_to_post_create_layout); + anvil_assert(!m_has_transitioned_to_post_alloc_layout); - change_image_layout(device_locked_ptr->get_universal_queue(0), + change_image_layout(m_device_ptr->get_universal_queue(0), in_source_access_mask, in_src_layout, - Anvil::Utils::get_access_mask_from_image_layout(m_post_create_layout), - m_post_create_layout, + Anvil::Utils::get_access_mask_from_image_layout(post_alloc_layout), + post_alloc_layout, get_subresource_range() ); - m_has_transitioned_to_post_create_layout = true; + m_has_transitioned_to_post_alloc_layout = true; } /** Please see header for specification */ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_ptr, - VkImageLayout in_current_image_layout, - VkImageLayout* out_new_image_layout_ptr) + Anvil::ImageLayout in_current_image_layout, + Anvil::ImageLayout* out_new_image_layout_ptr) { - std::shared_ptr device_locked_ptr (m_device_ptr); - std::map > image_aspect_to_mipmap_raw_data_map; - VkImageAspectFlags image_aspects_touched (0); - std::shared_ptr image_mem_block_ptr; - VkImageSubresourceRange image_subresource_range; - const VkDeviceSize sparse_page_size ( (m_page_tracker_ptr != nullptr) ? m_page_tracker_ptr->get_page_size() : 0); - const std::shared_ptr universal_queue_ptr (device_locked_ptr->get_universal_queue(0) ); + std::map > image_aspect_to_mipmap_raw_data_map; + Anvil::ImageAspectFlags image_aspects_touched; + Anvil::ImageSubresourceRange image_subresource_range; + Anvil::Queue* universal_queue_ptr (m_device_ptr->get_universal_queue(0) ); /* Make sure image has been assigned at least one memory block before we go ahead with the upload process */ - image_mem_block_ptr = get_memory_block(); + get_memory_block(); /* Each image aspect needs to be modified separately. Iterate over the input vector and move MipmapRawData * to separate vectors corresponding to which aspect they need to update. */ @@ -1941,59 +2322,67 @@ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_p * For optimal tiling, we need to copy the raw data to temporary buffer * and use vkCmdCopyBufferToImage() to let the driver rearrange the data as needed. */ - image_subresource_range.aspectMask = image_aspects_touched; - image_subresource_range.baseArrayLayer = 0; - image_subresource_range.baseMipLevel = 0; - image_subresource_range.layerCount = m_n_layers; - image_subresource_range.levelCount = m_n_mipmaps; + image_subresource_range.aspect_mask = image_aspects_touched; + image_subresource_range.base_array_layer = 0; + image_subresource_range.base_mip_level = 0; + image_subresource_range.layer_count = m_create_info_ptr->get_n_layers(); + image_subresource_range.level_count = m_n_mipmaps; - if (m_tiling == VK_IMAGE_TILING_LINEAR) + if (m_create_info_ptr->get_tiling() == Anvil::ImageTiling::LINEAR) { /* TODO: Transition the subresource ranges, if necessary. */ - anvil_assert(in_current_image_layout != VK_IMAGE_LAYOUT_UNDEFINED); + anvil_assert(in_current_image_layout != Anvil::ImageLayout::UNDEFINED); for (auto aspect_to_mipmap_data_iterator = image_aspect_to_mipmap_raw_data_map.begin(); aspect_to_mipmap_data_iterator != image_aspect_to_mipmap_raw_data_map.end(); ++aspect_to_mipmap_data_iterator) { - VkImageAspectFlagBits current_aspect = aspect_to_mipmap_data_iterator->first; - const auto& mipmap_raw_data_items = aspect_to_mipmap_data_iterator->second; + Anvil::ImageAspectFlagBits current_aspect = aspect_to_mipmap_data_iterator->first; + const uint32_t current_plane_index = (current_aspect == Anvil::ImageAspectFlagBits::PLANE_1_BIT) ? 1 + : (current_aspect == Anvil::ImageAspectFlagBits::PLANE_2_BIT) ? 2 + : 0; + const auto& current_plane_mem_reqs = m_plane_index_to_memory_properties_map.at(current_plane_index); + const auto& mipmap_raw_data_items = aspect_to_mipmap_data_iterator->second; + const VkDeviceSize sparse_page_size = (current_plane_mem_reqs.page_tracker_ptr != nullptr) ? current_plane_mem_reqs.page_tracker_ptr->get_page_size() + : 0; for (auto mipmap_raw_data_item_iterator = mipmap_raw_data_items.begin(); mipmap_raw_data_item_iterator != mipmap_raw_data_items.end(); ++mipmap_raw_data_item_iterator) { - const unsigned char* current_mipmap_data_ptr = nullptr; - uint32_t current_mipmap_height = 0; - const auto& current_mipmap_raw_data_item_ptr = *mipmap_raw_data_item_iterator; - uint32_t current_mipmap_slices = 0; - uint32_t current_row_size = 0; - uint32_t current_slice_size = 0; - VkDeviceSize dst_slice_offset = 0; - const unsigned char* src_slice_ptr = nullptr; - VkImageSubresource image_subresource; - VkSubresourceLayout image_subresource_layout; + const unsigned char* current_mipmap_data_ptr = nullptr; + uint32_t current_mipmap_height = 0; + const auto& current_mipmap_raw_data_item_ptr = *mipmap_raw_data_item_iterator; + uint32_t current_mipmap_slices = 0; + uint32_t current_row_size = 0; + uint32_t current_slice_size = 0; + VkDeviceSize dst_slice_offset = 0; + const unsigned char* src_slice_ptr = nullptr; + Anvil::ImageSubresource image_subresource; + Anvil::SubresourceLayout image_subresource_layout; current_mipmap_data_ptr = (current_mipmap_raw_data_item_ptr->linear_tightly_packed_data_uchar_ptr != nullptr) ? current_mipmap_raw_data_item_ptr->linear_tightly_packed_data_uchar_ptr.get() : (current_mipmap_raw_data_item_ptr->linear_tightly_packed_data_uchar_raw_ptr != nullptr) ? current_mipmap_raw_data_item_ptr->linear_tightly_packed_data_uchar_raw_ptr : &(*current_mipmap_raw_data_item_ptr->linear_tightly_packed_data_uchar_vec_ptr)[0]; - image_subresource.arrayLayer = current_mipmap_raw_data_item_ptr->n_layer; - image_subresource.aspectMask = current_aspect; - image_subresource.mipLevel = current_mipmap_raw_data_item_ptr->n_mipmap; + image_subresource.array_layer = current_mipmap_raw_data_item_ptr->n_layer; + image_subresource.aspect_mask = current_aspect; + image_subresource.mip_level = current_mipmap_raw_data_item_ptr->n_mipmap; - vkGetImageSubresourceLayout(device_locked_ptr->get_device_vk(), - m_image, - &image_subresource, - &image_subresource_layout); + Anvil::Vulkan::vkGetImageSubresourceLayout(m_device_ptr->get_device_vk(), + m_image, + reinterpret_cast(&image_subresource), + reinterpret_cast (&image_subresource_layout) ); /* Determine row size for the mipmap. * * NOTE: Current implementation can only handle power-of-two textures. */ - anvil_assert(m_height == 1 || (m_height % 2) == 0); + const auto base_mip_height = m_create_info_ptr->get_base_mip_height(); + + // anvil_assert(base_mip_height == 1 || (base_mip_height % 2) == 0); - current_mipmap_height = m_height / (1 << current_mipmap_raw_data_item_ptr->n_mipmap); + current_mipmap_height = base_mip_height / (1 << current_mipmap_raw_data_item_ptr->n_mipmap); if (current_mipmap_height < 1) { @@ -2013,24 +2402,29 @@ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_p n_slice < current_mipmap_slices; ++n_slice) { - dst_slice_offset = image_subresource_layout.offset + - image_subresource_layout.depthPitch * n_slice; - src_slice_ptr = current_mipmap_data_ptr + - current_slice_size * n_slice; + dst_slice_offset = image_subresource_layout.offset + + image_subresource_layout.depth_pitch * n_slice; + src_slice_ptr = current_mipmap_data_ptr + + current_slice_size * n_slice; for (unsigned int n_row = 0; n_row < current_mipmap_height; - ++n_row, dst_slice_offset += image_subresource_layout.rowPitch) + ++n_row, dst_slice_offset += image_subresource_layout.row_pitch) { - std::shared_ptr mem_block_ptr; - VkDeviceSize write_dst_slice_offset = dst_slice_offset; + Anvil::MemoryBlock* mem_block_ptr = nullptr; + VkDeviceSize write_dst_slice_offset = dst_slice_offset; - if (!m_is_sparse) + if ((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) == 0) { - mem_block_ptr = m_memory_block_ptr; + anvil_assert(m_memory_blocks_owned.size() == 1 || m_memory_block_external != nullptr); + + mem_block_ptr = m_memory_block_external; + if(mem_block_ptr == nullptr) + mem_block_ptr = m_memory_blocks_owned.at(0).get(); } else - if (m_residency_scope == SPARSE_RESIDENCY_SCOPE_NONE) + if ((m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_BINDING_BIT) != 0 && + (m_create_info_ptr->get_create_flags() & Anvil::ImageCreateFlagBits::SPARSE_RESIDENCY_BIT) == 0) { const VkDeviceSize dst_slice_offset_page_aligned = Anvil::Utils::round_down(dst_slice_offset, sparse_page_size); @@ -2039,15 +2433,19 @@ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_p anvil_assert(sparse_page_size != 0); anvil_assert(( dst_slice_offset_page_aligned % sparse_page_size) == 0); - mem_block_ptr = m_page_tracker_ptr->get_memory_block(dst_slice_offset_page_aligned, - &memory_region_start_offset); + mem_block_ptr = current_plane_mem_reqs.page_tracker_ptr->get_memory_block(dst_slice_offset_page_aligned, + sparse_page_size, + &memory_region_start_offset); anvil_assert(mem_block_ptr != nullptr); - if (dst_slice_offset + image_subresource_layout.rowPitch > dst_slice_offset_page_aligned + sparse_page_size) + if (dst_slice_offset + image_subresource_layout.row_pitch > dst_slice_offset_page_aligned + sparse_page_size) { VkDeviceSize dummy; - auto mem_block2_ptr = m_page_tracker_ptr->get_memory_block(dst_slice_offset_page_aligned + sparse_page_size, - &dummy); + auto mem_block2_ptr = current_plane_mem_reqs.page_tracker_ptr->get_memory_block(dst_slice_offset_page_aligned + sparse_page_size, + sparse_page_size, + &dummy); + + ANVIL_REDUNDANT_VARIABLE(mem_block2_ptr); // todo: the slice spans across >1 memory blocks. need more than just one write op to handle this case correctly. anvil_assert(mem_block2_ptr == mem_block_ptr); @@ -2069,15 +2467,15 @@ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_p } } - *out_new_image_layout_ptr = VK_IMAGE_LAYOUT_PREINITIALIZED; + *out_new_image_layout_ptr = in_current_image_layout; } else { - anvil_assert(m_tiling == VK_IMAGE_TILING_OPTIMAL); + anvil_assert(m_create_info_ptr->get_tiling() == Anvil::ImageTiling::OPTIMAL); - std::shared_ptr temp_buffer_ptr; - std::shared_ptr temp_cmdbuf_ptr; - VkDeviceSize total_raw_mips_size = 0; + Anvil::BufferUniquePtr temp_buffer_ptr; + Anvil::PrimaryCommandBufferUniquePtr temp_cmdbuf_ptr; + VkDeviceSize total_raw_mips_size = 0; /* Count how much space all specified mipmaps take in raw format. */ for (auto mipmap_iterator = in_mipmaps_ptr->cbegin(); @@ -2092,7 +2490,8 @@ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_p */ if ((total_raw_mips_size % 4) != 0) { - total_raw_mips_size += (4 - (total_raw_mips_size % 4)); + total_raw_mips_size = Anvil::Utils::round_up(total_raw_mips_size, + static_cast(4) ); } } @@ -2106,14 +2505,17 @@ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_p /* NOTE: The memcpy() call, as well as the way we implement copy op calls below, assume * POT resolution of the base mipmap */ - anvil_assert(m_height < 2 || (m_height % 2) == 0); + const auto base_mip_height = m_create_info_ptr->get_base_mip_height(); + + // anvil_assert(base_mip_height < 2 || (base_mip_height % 2) == 0); for (auto mipmap_iterator = in_mipmaps_ptr->cbegin(); mipmap_iterator != in_mipmaps_ptr->cend(); ++mipmap_iterator) { - const auto& current_mipmap = *mipmap_iterator; + const auto& current_mipmap = *mipmap_iterator; const unsigned char* current_mipmap_data_ptr; + const auto current_mipmap_data_size = current_mipmap.n_slices * current_mipmap.data_size; current_mipmap_data_ptr = (mipmap_iterator->linear_tightly_packed_data_uchar_ptr != nullptr) ? mipmap_iterator->linear_tightly_packed_data_uchar_ptr.get() : (mipmap_iterator->linear_tightly_packed_data_uchar_raw_ptr != nullptr) ? mipmap_iterator->linear_tightly_packed_data_uchar_raw_ptr @@ -2121,9 +2523,11 @@ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_p mip_data_offsets.push_back(current_mip_offset); + anvil_assert(current_mip_offset + current_mipmap_data_size <= total_raw_mips_size); + memcpy(merged_mip_storage.get() + current_mip_offset, current_mipmap_data_ptr, - current_mipmap.n_slices * current_mipmap.data_size); + current_mipmap_data_size); current_mip_offset += current_mipmap.n_slices * current_mipmap.data_size; @@ -2137,42 +2541,51 @@ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_p } } - temp_buffer_ptr = Anvil::Buffer::create_nonsparse(m_device_ptr, - total_raw_mips_size, - Anvil::QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - VK_BUFFER_USAGE_TRANSFER_SRC_BIT, - 0, /* in_memory_features */ - merged_mip_storage.get() ); + { + auto create_info_ptr = Anvil::BufferCreateInfo::create_alloc(m_device_ptr, + total_raw_mips_size, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + Anvil::BufferCreateFlagBits::NONE, + Anvil::BufferUsageFlagBits::TRANSFER_SRC_BIT, + Anvil::MemoryFeatureFlagBits::NONE); + + create_info_ptr->set_client_data(merged_mip_storage.get() ); + create_info_ptr->set_mt_safety (Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe() )); + + temp_buffer_ptr = Anvil::Buffer::create(std::move(create_info_ptr) ); + } merged_mip_storage.reset(); /* Set up a command buffer we will use to copy the data to the image */ - temp_cmdbuf_ptr = device_locked_ptr->get_command_pool(Anvil::QUEUE_FAMILY_TYPE_UNIVERSAL)->alloc_primary_level_command_buffer(); + temp_cmdbuf_ptr = m_device_ptr->get_command_pool_for_queue_family_index(universal_queue_ptr->get_queue_family_index() )->alloc_primary_level_command_buffer(); anvil_assert(temp_cmdbuf_ptr != nullptr); temp_cmdbuf_ptr->start_recording(true, /* one_time_submit */ false /* simultaneous_use_allowed */); { - std::vector copy_regions; + std::vector copy_regions; /* Transfer the image to the transfer_destination layout if not already in this or general layout */ - if (in_current_image_layout != VK_IMAGE_LAYOUT_GENERAL && - in_current_image_layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) + if (in_current_image_layout != Anvil::ImageLayout::GENERAL && + in_current_image_layout != Anvil::ImageLayout::TRANSFER_DST_OPTIMAL) { - Anvil::ImageBarrier image_barrier(0, /* source_access_mask */ - VK_ACCESS_TRANSFER_WRITE_BIT, - false, - in_current_image_layout, - VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, - (m_sharing_mode == VK_SHARING_MODE_EXCLUSIVE) ? universal_queue_ptr->get_queue_family_index() : VK_QUEUE_FAMILY_IGNORED, - (m_sharing_mode == VK_SHARING_MODE_EXCLUSIVE) ? universal_queue_ptr->get_queue_family_index() : VK_QUEUE_FAMILY_IGNORED, - shared_from_this(), - image_subresource_range); - - temp_cmdbuf_ptr->record_pipeline_barrier(VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, - VK_PIPELINE_STAGE_TRANSFER_BIT, - VK_FALSE, /* in_by_region */ + const auto sharing_mode (m_create_info_ptr->get_sharing_mode() ); + const auto queue_fam_index((sharing_mode == Anvil::SharingMode::EXCLUSIVE) ? universal_queue_ptr->get_queue_family_index() : VK_QUEUE_FAMILY_IGNORED); + + Anvil::ImageBarrier image_barrier (Anvil::AccessFlagBits::NONE, /* source_access_mask */ + Anvil::AccessFlagBits::TRANSFER_WRITE_BIT, + in_current_image_layout, + Anvil::ImageLayout::TRANSFER_DST_OPTIMAL, + queue_fam_index, + queue_fam_index, + this, + image_subresource_range); + + temp_cmdbuf_ptr->record_pipeline_barrier(Anvil::PipelineStageFlagBits::TOP_OF_PIPE_BIT, + Anvil::PipelineStageFlagBits::TRANSFER_BIT, + Anvil::DependencyFlagBits::NONE, 0, /* in_memory_barrier_count */ nullptr, /* in_memory_barrier_ptrs */ 0, /* in_buffer_memory_barrier_count */ @@ -2180,7 +2593,7 @@ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_p 1, /* in_image_memory_barrier_count */ &image_barrier); - *out_new_image_layout_ptr = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; + *out_new_image_layout_ptr = Anvil::ImageLayout::TRANSFER_DST_OPTIMAL; } else { @@ -2194,51 +2607,71 @@ void Anvil::Image::upload_mipmaps(const std::vector* in_mipmaps_p mipmap_iterator != in_mipmaps_ptr->cend(); ++mipmap_iterator) { - VkBufferImageCopy current_copy_region; - const auto& current_mipmap = *mipmap_iterator; - - current_copy_region.bufferImageHeight = std::max(m_height / (1 << current_mipmap.n_mipmap), 1u); - current_copy_region.bufferOffset = mip_data_offsets[static_cast(mipmap_iterator - in_mipmaps_ptr->cbegin()) ]; - current_copy_region.bufferRowLength = 0; - current_copy_region.imageOffset.x = 0; - current_copy_region.imageOffset.y = 0; - current_copy_region.imageOffset.z = 0; - current_copy_region.imageSubresource.baseArrayLayer = current_mipmap.n_layer; - current_copy_region.imageSubresource.layerCount = current_mipmap.n_layers; - current_copy_region.imageSubresource.aspectMask = current_mipmap.aspect; - current_copy_region.imageSubresource.mipLevel = current_mipmap.n_mipmap; - current_copy_region.imageExtent.depth = current_mipmap.n_slices; - current_copy_region.imageExtent.height = std::max(m_height / (1 << current_mipmap.n_mipmap), 1u); - current_copy_region.imageExtent.width = std::max(m_width / (1 << current_mipmap.n_mipmap), 1u); - - if (current_copy_region.imageExtent.depth < 1) + const auto base_mip_width = m_create_info_ptr->get_base_mip_width (); + Anvil::BufferImageCopy current_copy_region; + const auto& current_mipmap = *mipmap_iterator; + + current_copy_region.buffer_image_height = std::max(base_mip_height / (1 << current_mipmap.n_mipmap), 1u); + current_copy_region.buffer_offset = mip_data_offsets[static_cast(mipmap_iterator - in_mipmaps_ptr->cbegin()) ]; + current_copy_region.buffer_row_length = 0; + current_copy_region.image_offset.x = 0; + current_copy_region.image_offset.y = 0; + current_copy_region.image_offset.z = 0; + current_copy_region.image_subresource.base_array_layer = current_mipmap.n_layer; + current_copy_region.image_subresource.layer_count = current_mipmap.n_layers; + current_copy_region.image_subresource.aspect_mask = current_mipmap.aspect; + current_copy_region.image_subresource.mip_level = current_mipmap.n_mipmap; + current_copy_region.image_extent.depth = current_mipmap.n_slices; + current_copy_region.image_extent.height = std::max(base_mip_height / (1 << current_mipmap.n_mipmap), 1u); + current_copy_region.image_extent.width = std::max(base_mip_width / (1 << current_mipmap.n_mipmap), 1u); + + if (current_copy_region.image_extent.depth < 1) { - current_copy_region.imageExtent.depth = 1; + current_copy_region.image_extent.depth = 1; } - if (current_copy_region.imageExtent.height < 1) + if (current_copy_region.image_extent.height < 1) { - current_copy_region.imageExtent.height = 1; + current_copy_region.image_extent.height = 1; } - if (current_copy_region.imageExtent.width < 1) + if (current_copy_region.image_extent.width < 1) { - current_copy_region.imageExtent.width = 1; + current_copy_region.image_extent.width = 1; } copy_regions.push_back(current_copy_region); } - temp_cmdbuf_ptr->record_copy_buffer_to_image(temp_buffer_ptr, - shared_from_this(), - *out_new_image_layout_ptr, - static_cast(copy_regions.size() ), - ©_regions[0]); + /* Issue the copy ops. */ + const uint32_t n_copy_regions = static_cast(copy_regions.size() ); + static const uint32_t n_max_copy_regions_per_copy_call = 1024; + + for (uint32_t n_copy_region = 0; + n_copy_region < n_copy_regions; + n_copy_region += n_max_copy_regions_per_copy_call) + { + const uint32_t n_copy_regions_to_use = std::min(n_max_copy_regions_per_copy_call, + n_copy_regions - n_copy_region); + + temp_cmdbuf_ptr->record_copy_buffer_to_image(temp_buffer_ptr.get(), + this, + *out_new_image_layout_ptr, + n_copy_regions_to_use, + ©_regions.at(n_copy_region) ); + } } temp_cmdbuf_ptr->stop_recording(); /* Execute the command buffer */ - universal_queue_ptr->submit_command_buffer(temp_cmdbuf_ptr, - true /* should_block */); + { + Anvil::CommandBufferBase* cmd_buffer_raw_ptr = temp_cmdbuf_ptr.get(); + + universal_queue_ptr->submit( + Anvil::SubmitInfo::create_execute(&cmd_buffer_raw_ptr, + 1, /* in_n_cmd_buffers */ + true) /* should_block */ + ); + } } -} \ No newline at end of file +} diff --git a/src/wrappers/image_view.cpp b/src/wrappers/image_view.cpp index 8e87c9e3..0cdea88e 100644 --- a/src/wrappers/image_view.cpp +++ b/src/wrappers/image_view.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,328 +21,65 @@ // #include "misc/debug.h" +#include "misc/image_create_info.h" +#include "misc/image_view_create_info.h" #include "misc/object_tracker.h" +#include "misc/struct_chainer.h" #include "wrappers/device.h" #include "wrappers/image.h" #include "wrappers/image_view.h" +#include "wrappers/sampler_ycbcr_conversion.h" /* Please see header for specification */ -std::shared_ptr Anvil::ImageView::create_1D(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha) +Anvil::ImageViewUniquePtr Anvil::ImageView::create(Anvil::ImageViewCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr new_image_view_ptr (new ImageView(in_device_ptr, - in_image_ptr) ); - const VkComponentSwizzle swizzle_rgba[] = - { - in_swizzle_red, - in_swizzle_green, - in_swizzle_blue, - in_swizzle_alpha - }; - - if (!new_image_view_ptr->init(VK_IMAGE_VIEW_TYPE_1D, - in_n_base_layer, - 1, /* n_layers */ - 1, /* n_slices */ - in_n_base_mipmap_level, - in_n_mipmaps, - in_aspect_mask, - in_format, - swizzle_rgba) ) - { - new_image_view_ptr = nullptr; - } - - return new_image_view_ptr; -} - -/* Please see header for specification */ -std::shared_ptr Anvil::ImageView::create_1D_array(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_layers, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha) -{ - std::shared_ptr new_image_view_ptr(new ImageView(in_device_ptr, - in_image_ptr) ); - const VkComponentSwizzle swizzle_rgba[] = - { - in_swizzle_red, - in_swizzle_green, - in_swizzle_blue, - in_swizzle_alpha - }; - - if (!new_image_view_ptr->init(VK_IMAGE_VIEW_TYPE_1D_ARRAY, - in_n_base_layer, - in_n_layers, - 1, /* n_slices */ - in_n_base_mipmap_level, - in_n_mipmaps, - in_aspect_mask, - in_format, - swizzle_rgba) ) - { - new_image_view_ptr = nullptr; - } - - return new_image_view_ptr; -} - -/* Please see header for specification */ -std::shared_ptr Anvil::ImageView::create_2D(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha) -{ - std::shared_ptr new_image_view_ptr(new ImageView(in_device_ptr, - in_image_ptr) ); - const VkComponentSwizzle swizzle_rgba[] = - { - in_swizzle_red, - in_swizzle_green, - in_swizzle_blue, - in_swizzle_alpha - }; - - if (!new_image_view_ptr->init(VK_IMAGE_VIEW_TYPE_2D, - in_n_base_layer, - 1, /* n_layers */ - 1, /* n_slices */ - in_n_base_mipmap_level, - in_n_mipmaps, - in_aspect_mask, - in_format, - swizzle_rgba) ) - { - new_image_view_ptr = nullptr; - } - - return new_image_view_ptr; -} - -/* Please see header for specification */ -std::shared_ptr Anvil::ImageView::create_2D_array(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_layers, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha) -{ - std::shared_ptr new_image_view_ptr(new ImageView(in_device_ptr, - in_image_ptr) ); - const VkComponentSwizzle swizzle_rgba[] = - { - in_swizzle_red, - in_swizzle_green, - in_swizzle_blue, - in_swizzle_alpha - }; - - if (!new_image_view_ptr->init(VK_IMAGE_VIEW_TYPE_2D_ARRAY, - in_n_base_layer, - in_n_layers, - 1, /* n_slices */ - in_n_base_mipmap_level, - in_n_mipmaps, - in_aspect_mask, - in_format, - swizzle_rgba) ) - { - new_image_view_ptr = nullptr; - } - - return new_image_view_ptr; -} + Anvil::ImageViewUniquePtr result_ptr = Anvil::ImageViewUniquePtr(nullptr, + std::default_delete() ); -/* Please see header for specification */ -std::shared_ptr Anvil::ImageView::create_3D(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_slice, - uint32_t in_n_slices, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha) -{ - std::shared_ptr new_image_view_ptr(new ImageView(in_device_ptr, - in_image_ptr) ); - const VkComponentSwizzle swizzle_rgba[] = - { - in_swizzle_red, - in_swizzle_green, - in_swizzle_blue, - in_swizzle_alpha - }; - - if (!new_image_view_ptr->init(VK_IMAGE_VIEW_TYPE_3D, - in_n_base_slice, - 1, /* n_layers */ - in_n_slices, - in_n_base_mipmap_level, - in_n_mipmaps, - in_aspect_mask, - in_format, - swizzle_rgba) ) - { - new_image_view_ptr = nullptr; - } + result_ptr.reset( + new Anvil::ImageView(std::move(in_create_info_ptr) ) + ); - return new_image_view_ptr; -} - -/* Please see header for specification */ -std::shared_ptr Anvil::ImageView::create_cube_map(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha) -{ - std::shared_ptr new_image_view_ptr(new ImageView(in_device_ptr, - in_image_ptr) ); - const VkComponentSwizzle swizzle_rgba[] = - { - in_swizzle_red, - in_swizzle_green, - in_swizzle_blue, - in_swizzle_alpha - }; - - if (!new_image_view_ptr->init(VK_IMAGE_VIEW_TYPE_CUBE, - in_n_base_layer, - 6, /* n_layers */ - 1, /* n_slices */ - in_n_base_mipmap_level, - in_n_mipmaps, - in_aspect_mask, - in_format, - swizzle_rgba) ) - { - new_image_view_ptr = nullptr; - } - - return new_image_view_ptr; -} - -/* Please see header for specification */ -std::shared_ptr Anvil::ImageView::create_cube_map_array(std::weak_ptr in_device_ptr, - std::shared_ptr in_image_ptr, - uint32_t in_n_base_layer, - uint32_t in_n_cube_maps, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - VkComponentSwizzle in_swizzle_red, - VkComponentSwizzle in_swizzle_green, - VkComponentSwizzle in_swizzle_blue, - VkComponentSwizzle in_swizzle_alpha) -{ - std::shared_ptr new_image_view_ptr(new ImageView(in_device_ptr, - in_image_ptr) ); - const VkComponentSwizzle swizzle_rgba[] = + if (result_ptr != nullptr) { - in_swizzle_red, - in_swizzle_green, - in_swizzle_blue, - in_swizzle_alpha - }; - - if (!new_image_view_ptr->init(VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, - in_n_base_layer, - in_n_cube_maps * 6, - 1, /* n_slices */ - in_n_base_mipmap_level, - in_n_mipmaps, - in_aspect_mask, - in_format, - swizzle_rgba) ) - { - new_image_view_ptr = nullptr; + if (!result_ptr->init() ) + { + result_ptr.reset(); + } } - return new_image_view_ptr; + return result_ptr; } -/** Constructor. Retains the user-specified Image instance and sets all members - * to default values. - * - * @param in_device_ptr Device to use. Must not be nullptr. - * @param in_parent_image_ptr Image to create the view for. Must not be nullptr. - **/ -Anvil::ImageView::ImageView(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_image_ptr) - :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT) +Anvil::ImageView::ImageView(Anvil::ImageViewCreateInfoUniquePtr in_create_info_ptr) + :DebugMarkerSupportProvider(in_create_info_ptr->get_device(), + Anvil::ObjectType::IMAGE_VIEW), + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )), + m_image_view (VK_NULL_HANDLE) { - anvil_assert(in_parent_image_ptr != nullptr); - - m_device_ptr = in_device_ptr; - m_format = VK_FORMAT_UNDEFINED; - m_image_view = VK_NULL_HANDLE; - m_n_base_mipmap_level = UINT32_MAX; - m_n_layers = UINT32_MAX; - m_n_mipmaps = UINT32_MAX; - m_n_slices = UINT32_MAX; - m_parent_image_ptr = in_parent_image_ptr; + m_create_info_ptr = std::move(in_create_info_ptr); /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_IMAGE_VIEW, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::IMAGE_VIEW, this); } Anvil::ImageView::~ImageView() { /* Unregister the object */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_IMAGE_VIEW, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::IMAGE_VIEW, this); if (m_image_view != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyImageView(device_locked_ptr->get_device_vk(), - m_image_view, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyImageView(m_device_ptr->get_device_vk(), + m_image_view, + nullptr /* pAllocator */); + } + unlock(); m_image_view = VK_NULL_HANDLE; } @@ -353,26 +90,28 @@ void Anvil::ImageView::get_base_mipmap_size(uint32_t* out_opt_base_mipmap_width_ uint32_t* out_opt_base_mipmap_height_ptr, uint32_t* out_opt_base_mipmap_depth_ptr) const { - bool result (false); - uint32_t result_depth(0); + const auto n_base_mip_level(m_create_info_ptr->get_base_mipmap_level() ); + const auto parent_image_ptr(m_create_info_ptr->get_parent_image () ); + bool result (false); + uint32_t result_depth (0); ANVIL_REDUNDANT_VARIABLE(result); - result = m_parent_image_ptr->get_image_mipmap_size(m_n_base_mipmap_level, - out_opt_base_mipmap_width_ptr, - out_opt_base_mipmap_height_ptr, - nullptr); + result = parent_image_ptr->get_image_mipmap_size(n_base_mip_level, + out_opt_base_mipmap_width_ptr, + out_opt_base_mipmap_height_ptr, + out_opt_base_mipmap_depth_ptr); anvil_assert(result); - switch (m_type) + switch (m_create_info_ptr->get_type() ) { - case VK_IMAGE_VIEW_TYPE_1D: result_depth = 1; break; - case VK_IMAGE_VIEW_TYPE_1D_ARRAY: result_depth = m_n_layers; break; - case VK_IMAGE_VIEW_TYPE_2D: result_depth = 1; break; - case VK_IMAGE_VIEW_TYPE_2D_ARRAY: result_depth = m_n_layers; break; - case VK_IMAGE_VIEW_TYPE_3D: result_depth = m_n_slices; break; - case VK_IMAGE_VIEW_TYPE_CUBE: result_depth = m_n_layers; break; - case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY: result_depth = m_n_layers; break; + case Anvil::ImageViewType::_1D: result_depth = 1; break; + case Anvil::ImageViewType::_1D_ARRAY: result_depth = m_create_info_ptr->get_n_layers(); break; + case Anvil::ImageViewType::_2D: result_depth = 1; break; + case Anvil::ImageViewType::_2D_ARRAY: result_depth = m_create_info_ptr->get_n_layers(); break; + case Anvil::ImageViewType::_3D: break; + case Anvil::ImageViewType::_CUBE: result_depth = m_create_info_ptr->get_n_layers(); break; + case Anvil::ImageViewType::_CUBE_ARRAY: result_depth = m_create_info_ptr->get_n_layers(); break; default: { @@ -386,6 +125,19 @@ void Anvil::ImageView::get_base_mipmap_size(uint32_t* out_opt_base_mipmap_width_ } } +Anvil::ImageSubresourceRange Anvil::ImageView::get_subresource_range() const +{ + Anvil::ImageSubresourceRange result; + + result.aspect_mask = m_create_info_ptr->get_aspect (); + result.base_array_layer = m_create_info_ptr->get_base_layer (); + result.base_mip_level = m_create_info_ptr->get_base_mipmap_level(); + result.layer_count = m_create_info_ptr->get_n_layers (); + result.level_count = m_create_info_ptr->get_n_mipmaps (); + + return result; +} + /** Performs a number of image view type-specific sanity checks and creates the requested * Vulkan image view instance. * @@ -393,95 +145,76 @@ void Anvil::ImageView::get_base_mipmap_size(uint32_t* out_opt_base_mipmap_width_ * * @return true if the function executed successfully, false otherwise. **/ -bool Anvil::ImageView::init(VkImageViewType in_image_view_type, - uint32_t in_n_base_layer, - uint32_t in_n_layers, - uint32_t in_n_slices, - uint32_t in_n_base_mipmap_level, - uint32_t in_n_mipmaps, - VkImageAspectFlags in_aspect_mask, - VkFormat in_format, - const VkComponentSwizzle* in_swizzle_rgba_ptr) +bool Anvil::ImageView::init() { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkImageViewCreateInfo image_view_create_info; - VkFormat parent_image_format = VK_FORMAT_UNDEFINED; - uint32_t parent_image_n_layers = 0; - uint32_t parent_image_n_mipmaps = 0; - bool result = false; - VkResult result_vk; - - /* Sanity checks. Only focus on the basic stuff - hopefully more complicated issues - * will be caught by the validation layers. - */ - if (!(m_parent_image_ptr != nullptr) ) - { - anvil_assert(m_parent_image_ptr != nullptr); - - goto end; - } - - if (!m_parent_image_ptr->is_swapchain_image() && - !m_parent_image_ptr->is_sparse () && - m_parent_image_ptr->get_memory_block () == nullptr) - { - anvil_assert(!(m_parent_image_ptr->get_memory_block() == nullptr)); - - goto end; - } - - parent_image_format = m_parent_image_ptr->get_image_format (); - parent_image_n_mipmaps = m_parent_image_ptr->get_image_n_mipmaps(); - - if (m_parent_image_ptr->get_image_type() != VK_IMAGE_TYPE_3D) + const auto aspect_mask = m_create_info_ptr->get_aspect (); + const auto format = m_create_info_ptr->get_format (); + const auto image_view_type = m_create_info_ptr->get_type (); + const auto n_base_layer = m_create_info_ptr->get_base_layer (); + const auto n_base_mip = m_create_info_ptr->get_base_mipmap_level(); + const auto n_layers = m_create_info_ptr->get_n_layers (); + const auto n_mips = m_create_info_ptr->get_n_mipmaps (); + uint32_t parent_image_n_layers = 0; + uint32_t parent_image_n_mipmaps = 0; + auto parent_image_ptr = m_create_info_ptr->get_parent_image(); + bool result = false; + VkResult result_vk; + const auto sampler_ycbcr_conversion_ptr = m_create_info_ptr->get_sampler_ycbcr_conversion_ptr(); + Anvil::StructChainer struct_chainer; + const auto& swizzle_array = m_create_info_ptr->get_swizzle_array(); + auto usage = m_create_info_ptr->get_usage (); + + parent_image_n_mipmaps = parent_image_ptr->get_n_mipmaps(); + + if (parent_image_ptr->get_create_info_ptr()->get_type() != Anvil::ImageType::_3D) { - parent_image_n_layers = m_parent_image_ptr->get_image_n_layers (); + parent_image_n_layers = parent_image_ptr->get_create_info_ptr()->get_n_layers(); } else { - m_parent_image_ptr->get_image_mipmap_size(0, /* in_n_mipmap */ - nullptr, /* out_opt_width_ptr */ - nullptr, /* out_opt_height_ptr */ - &parent_image_n_layers); + parent_image_ptr->get_image_mipmap_size(0, /* in_n_mipmap */ + nullptr, /* out_opt_width_ptr */ + nullptr, /* out_opt_height_ptr */ + &parent_image_n_layers); } - if (!(parent_image_n_layers >= in_n_base_layer + in_n_layers)) + if (!(parent_image_n_layers >= n_base_layer + n_layers)) { - anvil_assert(parent_image_n_layers >= in_n_base_layer + in_n_layers); + anvil_assert(parent_image_n_layers >= n_base_layer + n_layers); goto end; } - if (!(parent_image_n_mipmaps >= in_n_base_mipmap_level + in_n_mipmaps)) + if (!(parent_image_n_mipmaps >= n_base_mip + n_mips)) { - anvil_assert(parent_image_n_mipmaps >= in_n_base_mipmap_level + in_n_mipmaps); + anvil_assert(parent_image_n_mipmaps >= n_base_mip + n_mips); goto end; } - if (((m_parent_image_ptr->get_image_create_flags() & Anvil::IMAGE_CREATE_FLAG_MUTABLE_FORMAT_BIT) == 0) && - (parent_image_format != in_format)) + if (usage != Anvil::ImageUsageFlagBits::NONE && + !m_device_ptr->is_extension_enabled(VK_KHR_MAINTENANCE2_EXTENSION_NAME) ) { - anvil_assert(parent_image_format == in_format); + anvil_assert(m_device_ptr->is_extension_enabled(VK_KHR_MAINTENANCE2_EXTENSION_NAME) ); goto end; } - if (m_parent_image_ptr->get_image_type() == VK_IMAGE_TYPE_3D) + if (parent_image_ptr->get_create_info_ptr()->get_type() == Anvil::ImageType::_3D) { - if (in_image_view_type == VK_IMAGE_VIEW_TYPE_2D || - in_image_view_type == VK_IMAGE_VIEW_TYPE_2D_ARRAY) + if (image_view_type == Anvil::ImageViewType::_2D || + image_view_type == Anvil::ImageViewType::_2D_ARRAY) { - if (!device_locked_ptr->is_khr_maintenance1_extension_enabled() ) + if (!m_device_ptr->get_extension_info()->khr_maintenance1() ) { - anvil_assert(device_locked_ptr->is_khr_maintenance1_extension_enabled()); + anvil_assert(m_device_ptr->get_extension_info()->khr_maintenance1()); goto end; } - if ((m_parent_image_ptr->get_image_create_flags() & Anvil::IMAGE_CREATE_FLAG_2D_ARRAY_COMPATIBLE_BIT) == 0) + if ((parent_image_ptr->get_create_info_ptr()->get_create_flags() & Anvil::ImageCreateFlagBits::_2D_ARRAY_COMPATIBLE_BIT) == 0) { - anvil_assert((m_parent_image_ptr->get_image_create_flags() & Anvil::IMAGE_CREATE_FLAG_2D_ARRAY_COMPATIBLE_BIT) != 0); + anvil_assert((parent_image_ptr->get_create_info_ptr()->get_create_flags() & Anvil::ImageCreateFlagBits::_2D_ARRAY_COMPATIBLE_BIT) != 0); goto end; } @@ -489,26 +222,60 @@ bool Anvil::ImageView::init(VkImageViewType in_image_view_type, } /* Create the image view instance */ - image_view_create_info.components.a = in_swizzle_rgba_ptr[3]; - image_view_create_info.components.b = in_swizzle_rgba_ptr[2]; - image_view_create_info.components.g = in_swizzle_rgba_ptr[1]; - image_view_create_info.components.r = in_swizzle_rgba_ptr[0]; - image_view_create_info.flags = 0; - image_view_create_info.format = in_format; - image_view_create_info.image = m_parent_image_ptr->get_image(); - image_view_create_info.pNext = nullptr; - image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; - image_view_create_info.subresourceRange.aspectMask = in_aspect_mask; - image_view_create_info.subresourceRange.baseArrayLayer = in_n_base_layer; - image_view_create_info.subresourceRange.baseMipLevel = in_n_base_mipmap_level; - image_view_create_info.subresourceRange.layerCount = in_n_layers; - image_view_create_info.subresourceRange.levelCount = in_n_mipmaps; - image_view_create_info.viewType = in_image_view_type; - - result_vk = vkCreateImageView(device_locked_ptr->get_device_vk(), - &image_view_create_info, - nullptr, /* pAllocator */ - &m_image_view); + { + VkImageViewCreateInfo image_view_create_info; + + image_view_create_info.components.a = static_cast(swizzle_array[3]); + image_view_create_info.components.b = static_cast(swizzle_array[2]); + image_view_create_info.components.g = static_cast(swizzle_array[1]); + image_view_create_info.components.r = static_cast(swizzle_array[0]); + image_view_create_info.flags = 0; + image_view_create_info.format = static_cast(format); + image_view_create_info.image = parent_image_ptr->get_image(); + image_view_create_info.pNext = nullptr; + image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + image_view_create_info.subresourceRange.aspectMask = aspect_mask.get_vk(); + image_view_create_info.subresourceRange.baseArrayLayer = n_base_layer; + image_view_create_info.subresourceRange.baseMipLevel = n_base_mip; + image_view_create_info.subresourceRange.layerCount = n_layers; + image_view_create_info.subresourceRange.levelCount = n_mips; + image_view_create_info.viewType = static_cast(image_view_type); + + struct_chainer.append_struct(image_view_create_info); + } + + if (usage != Anvil::ImageUsageFlagBits::NONE) + { + VkImageViewUsageCreateInfo usage_create_info; + + usage_create_info.pNext = nullptr; + usage_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR; + usage_create_info.usage = usage.get_vk(); + + struct_chainer.append_struct(usage_create_info); + } + + if (sampler_ycbcr_conversion_ptr != nullptr) + { + VkSamplerYcbcrConversionInfoKHR conversion_info; + + anvil_assert(m_device_ptr->get_extension_info()->khr_sampler_ycbcr_conversion() ); + + conversion_info.conversion = sampler_ycbcr_conversion_ptr->get_sampler_ycbcr_conversion_vk(); + conversion_info.pNext = nullptr; + conversion_info.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR; + + struct_chainer.append_struct(conversion_info); + } + + { + auto chain_ptr = struct_chainer.create_chain(); + + result_vk = Anvil::Vulkan::vkCreateImageView(m_device_ptr->get_device_vk(), + chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_image_view); + } if (!is_vk_call_successful(result_vk) ) { @@ -520,19 +287,6 @@ bool Anvil::ImageView::init(VkImageViewType in_image_view_type, /* Cache the properties */ set_vk_handle(m_image_view); - m_aspect_mask = in_aspect_mask; - m_format = in_format; - m_n_base_layer = in_n_base_layer; - m_n_base_mipmap_level = in_n_base_mipmap_level; - m_n_layers = in_n_layers; - m_n_mipmaps = in_n_mipmaps; - m_n_slices = in_n_slices; - m_type = in_image_view_type; - - memcpy(m_swizzle_array, - in_swizzle_rgba_ptr, - sizeof(m_swizzle_array) ); - /* All done */ result = true; @@ -541,33 +295,28 @@ bool Anvil::ImageView::init(VkImageViewType in_image_view_type, } /* Please see header for specification */ -bool Anvil::ImageView::intersects(std::shared_ptr in_image_view_ptr) const +bool Anvil::ImageView::intersects(const Anvil::ImageView* in_image_view_ptr) const { - bool result = false; + auto parent_image_ptr = m_create_info_ptr->get_parent_image(); + bool result = false; - if (get_parent_image() == in_image_view_ptr->get_parent_image() ) + if (parent_image_ptr == in_image_view_ptr->m_create_info_ptr->get_parent_image() ) { auto in_subresource_range = in_image_view_ptr->get_subresource_range(); auto this_subresource_range = get_subresource_range(); /* Aspect mask */ - if ((this_subresource_range.aspectMask & in_subresource_range.aspectMask) != 0) - { - result = true; - } - else - /* Layers + mips */ - if (!(this_subresource_range.baseArrayLayer < in_subresource_range.baseArrayLayer && - this_subresource_range.baseArrayLayer + this_subresource_range.layerCount < in_subresource_range.baseArrayLayer && - in_subresource_range.baseArrayLayer < this_subresource_range.baseArrayLayer && - in_subresource_range.baseArrayLayer + in_subresource_range.layerCount < this_subresource_range.baseArrayLayer) ) + if ((this_subresource_range.aspect_mask & in_subresource_range.aspect_mask) != 0) { - if (!(this_subresource_range.baseMipLevel < in_subresource_range.baseMipLevel && - this_subresource_range.baseMipLevel + this_subresource_range.levelCount < in_subresource_range.baseMipLevel && - in_subresource_range.baseMipLevel < this_subresource_range.baseMipLevel && - in_subresource_range.baseMipLevel + in_subresource_range.levelCount < this_subresource_range.baseMipLevel) ) + /* Layers + mips */ + if (!((this_subresource_range.base_array_layer + this_subresource_range.layer_count <= in_subresource_range.base_array_layer) || + (in_subresource_range.base_array_layer + in_subresource_range.layer_count <= this_subresource_range.base_array_layer) )) { - result = true; + if (!((this_subresource_range.base_mip_level + this_subresource_range.level_count <= in_subresource_range.base_mip_level) || + (in_subresource_range.base_mip_level + in_subresource_range.level_count <= this_subresource_range.base_mip_level) )) + { + result = true; + } } } } diff --git a/src/wrappers/instance.cpp b/src/wrappers/instance.cpp index 5fa9178c..5bb4025e 100644 --- a/src/wrappers/instance.cpp +++ b/src/wrappers/instance.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -19,59 +19,66 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // - #include "misc/debug.h" +#include "misc/debug_messenger_create_info.h" #include "misc/object_tracker.h" -#include "misc/shader_module_cache.h" #include "wrappers/instance.h" #include "wrappers/physical_device.h" +static bool g_core_func_ptrs_inited = false; +static bool g_instance_func_ptrs_inited = false; +static std::mutex g_vk_func_ptr_init_mutex; +static Anvil::LibraryUniquePtr g_vk_library_ptr; + /** Please see header for specification */ -Anvil::Instance::Instance(const std::string& in_app_name, - const std::string& in_engine_name, - DebugCallbackFunction in_opt_validation_callback_function) - :m_app_name (in_app_name), - m_debug_callback_data (0), - m_engine_name (in_engine_name), - m_global_layer (""), - m_validation_callback_function(in_opt_validation_callback_function) +Anvil::Instance::Instance(Anvil::InstanceCreateInfoUniquePtr in_create_info_ptr) + :MTSafetySupportProvider(in_create_info_ptr->is_mt_safe() ), + m_api_version (APIVersion::UNKNOWN), + m_debug_messenger_ptr (Anvil::DebugMessengerUniquePtr(nullptr, std::default_delete() )), + m_global_layer (""), + m_instance (VK_NULL_HANDLE) { - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_INSTANCE, + m_create_info_ptr = std::move(in_create_info_ptr); + + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::INSTANCE, this); } /** Please see header for specification */ Anvil::Instance::~Instance() { - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_INSTANCE, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::INSTANCE, this); destroy(); if (m_instance != VK_NULL_HANDLE) { - vkDestroyInstance(m_instance, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyInstance(m_instance, + nullptr /* pAllocator */); + } + unlock(); m_instance = VK_NULL_HANDLE; } } /** Please see header for specification */ -std::shared_ptr Anvil::Instance::create(const std::string& in_app_name, - const std::string& in_engine_name, - DebugCallbackFunction in_opt_validation_callback_proc) +Anvil::InstanceUniquePtr Anvil::Instance::create(Anvil::InstanceCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr new_instance_ptr; + InstanceUniquePtr new_instance_ptr(nullptr, + std::default_delete() ); - new_instance_ptr = std::shared_ptr( - new Instance( - in_app_name, - in_engine_name, - in_opt_validation_callback_proc) + new_instance_ptr.reset( + new Instance(std::move(in_create_info_ptr) ) ); - new_instance_ptr->init(); + if (!new_instance_ptr->init() ) + { + new_instance_ptr.reset(); + } return new_instance_ptr; } @@ -80,46 +87,33 @@ std::shared_ptr Anvil::Instance::create(const std::string& i * * For argument discussion, please see the extension specification **/ -VkBool32 VKAPI_PTR Anvil::Instance::debug_callback_pfn_proc(VkDebugReportFlagsEXT in_message_flags, - VkDebugReportObjectTypeEXT in_object_type, - uint64_t in_src_object, - size_t in_location, - int32_t in_msg_code, - const char* in_layer_prefix_ptr, - const char* in_message_ptr, - void* in_user_data) +void Anvil::Instance::debug_callback_handler(const Anvil::DebugMessageSeverityFlagBits& in_severity, + const char* in_message_ptr) { - Anvil::Instance* instance_ptr = static_cast(in_user_data); - - ANVIL_REDUNDANT_ARGUMENT(in_src_object); - ANVIL_REDUNDANT_ARGUMENT(in_location); - ANVIL_REDUNDANT_ARGUMENT(in_msg_code); - ANVIL_REDUNDANT_ARGUMENT(in_user_data); - - return instance_ptr->m_validation_callback_function(in_message_flags, - in_object_type, - in_layer_prefix_ptr, - in_message_ptr); + get_create_info_ptr()->get_validation_callback()(in_severity, + in_message_ptr); } /** Please see header for specification */ void Anvil::Instance::destroy() { - if (m_debug_callback_data != VK_NULL_HANDLE) - { - m_ext_debug_report_entrypoints.vkDestroyDebugReportCallbackEXT(m_instance, - m_debug_callback_data, - nullptr /* pAllocator */); - - m_debug_callback_data = VK_NULL_HANDLE; - } + m_debug_messenger_ptr.reset (); + m_physical_devices.clear (); + m_physical_device_groups.clear(); - while (m_physical_devices.size() > 0) + #ifdef _DEBUG { - m_physical_devices.back()->destroy(); - } + /* Make sure no physical devices are still registered with Object Tracker at this point */ + auto object_manager_ptr = Anvil::ObjectTracker::get(); - m_shader_module_cache_ptr.reset(); + if (object_manager_ptr->get_object_at_index(Anvil::ObjectType::INSTANCE, + 0) == nullptr) + { + anvil_assert(object_manager_ptr->get_object_at_index(Anvil::ObjectType::PHYSICAL_DEVICE, + 0) == nullptr); + } + } + #endif } /** Enumerates and caches all layers supported by the Vulkan Instance. */ @@ -132,14 +126,14 @@ void Anvil::Instance::enumerate_instance_layers() ANVIL_REDUNDANT_VARIABLE(result); /* Retrieve layer data */ - result = vkEnumerateInstanceLayerProperties(&n_layers, - nullptr); /* pProperties */ + result = Anvil::Vulkan::vkEnumerateInstanceLayerProperties(&n_layers, + nullptr); /* pProperties */ anvil_assert_vk_call_succeeded(result); layer_props.resize(n_layers + 1 /* global layer */); - result = vkEnumerateInstanceLayerProperties(&n_layers, - &layer_props[0]); + result = Anvil::Vulkan::vkEnumerateInstanceLayerProperties(&n_layers, + &layer_props[0]); anvil_assert_vk_call_succeeded(result); @@ -182,10 +176,14 @@ void Anvil::Instance::enumerate_layer_extensions(Anvil::Layer* in_layer_ptr) in_layer_ptr = &m_global_layer; } - layer_name = in_layer_ptr->name.c_str(); - result = vkEnumerateInstanceExtensionProperties(layer_name, - &n_extensions, - nullptr); /* pProperties */ + if (in_layer_ptr->name != "") + { + layer_name = in_layer_ptr->name.c_str(); + } + + result = Anvil::Vulkan::vkEnumerateInstanceExtensionProperties(layer_name, + &n_extensions, + nullptr); /* pProperties */ anvil_assert_vk_call_succeeded(result); @@ -195,9 +193,9 @@ void Anvil::Instance::enumerate_layer_extensions(Anvil::Layer* in_layer_ptr) extension_props.resize(n_extensions); - result = vkEnumerateInstanceExtensionProperties(layer_name, - &n_extensions, - &extension_props[0]); + result = Anvil::Vulkan::vkEnumerateInstanceExtensionProperties(layer_name, + &n_extensions, + &extension_props[0]); anvil_assert_vk_call_succeeded(result); @@ -206,7 +204,73 @@ void Anvil::Instance::enumerate_layer_extensions(Anvil::Layer* in_layer_ptr) n_extension < n_extensions; ++n_extension) { - in_layer_ptr->extensions.push_back(Anvil::Extension(extension_props[n_extension]) ); + in_layer_ptr->extensions.push_back(extension_props[n_extension].extensionName); + } + } +} + +/** Enumerates and caches all available physical device groups. */ +void Anvil::Instance::enumerate_physical_device_groups() +{ + uint32_t n_physical_device_groups = 0; + VkResult result = VK_ERROR_INITIALIZATION_FAILED; + + ANVIL_REDUNDANT_VARIABLE(result); + + /* Enumerate available physical device groups and cache them for further use. */ + result = m_khr_device_group_creation_entrypoints.vkEnumeratePhysicalDeviceGroupsKHR(m_instance, + &n_physical_device_groups, + nullptr); /* pPhysicalDeviceGroupProperties */ + + if (n_physical_device_groups > 0) + { + std::vector device_group_props(n_physical_device_groups); + + for (uint32_t n_physical_device_group = 0; + n_physical_device_group < n_physical_device_groups; + ++n_physical_device_group) + { + auto& current_props = device_group_props.at(n_physical_device_group); + + current_props.pNext = nullptr; + current_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR; + } + + result = m_khr_device_group_creation_entrypoints.vkEnumeratePhysicalDeviceGroupsKHR(m_instance, + &n_physical_device_groups, + &device_group_props[0]); + anvil_assert_vk_call_succeeded(result); + + for (uint32_t n_group = 0; + n_group < n_physical_device_groups; + ++n_group) + { + Anvil::PhysicalDeviceGroup new_group; + const auto& this_group_props = device_group_props.at(n_group); + + anvil_assert(this_group_props.physicalDeviceCount > 0); + + for (uint32_t n_physical_device = 0; + n_physical_device < this_group_props.physicalDeviceCount; + ++n_physical_device) + { + auto physical_device_iterator = std::find(m_physical_devices.begin(), + m_physical_devices.end(), + this_group_props.physicalDevices[n_physical_device]); + + anvil_assert(physical_device_iterator != m_physical_devices.end() ); + if (physical_device_iterator != m_physical_devices.end() ) + { + new_group.physical_device_ptrs.push_back(physical_device_iterator->get() ); + + (*physical_device_iterator)->set_device_group_index (n_group); + (*physical_device_iterator)->set_device_group_device_index(n_physical_device); + } + } + + new_group.supports_subset_allocations = (this_group_props.subsetAllocation == VK_TRUE); + + m_physical_device_groups.push_back(new_group); } } } @@ -221,9 +285,9 @@ void Anvil::Instance::enumerate_physical_devices() ANVIL_REDUNDANT_VARIABLE(result); /* Retrieve physical device handles */ - result = vkEnumeratePhysicalDevices(m_instance, - &n_physical_devices, - nullptr); /* pPhysicalDevices */ + result = Anvil::Vulkan::vkEnumeratePhysicalDevices(m_instance, + &n_physical_devices, + nullptr); /* pPhysicalDevices */ anvil_assert_vk_call_succeeded(result); if (n_physical_devices == 0) @@ -236,9 +300,9 @@ void Anvil::Instance::enumerate_physical_devices() devices.resize(n_physical_devices); - result = vkEnumeratePhysicalDevices(m_instance, - &n_physical_devices, - &devices[0]); + result = Anvil::Vulkan::vkEnumeratePhysicalDevices(m_instance, + &n_physical_devices, + &devices[0]); anvil_assert_vk_call_succeeded(result); /* Fill out internal physical device descriptors */ @@ -246,20 +310,62 @@ void Anvil::Instance::enumerate_physical_devices() n_physical_device < n_physical_devices; ++n_physical_device) { - std::shared_ptr new_physical_device_ptr; + std::unique_ptr new_physical_device_ptr; - Anvil::PhysicalDevice::create(shared_from_this(), + new_physical_device_ptr = Anvil::PhysicalDevice::create(this, n_physical_device, devices[n_physical_device]); + + m_physical_devices.push_back( + std::move(new_physical_device_ptr) + ); } } +/** Please see header for specification */ +const Anvil::ExtensionEXTDebugReportEntrypoints& Anvil::Instance::get_extension_ext_debug_report_entrypoints() const +{ + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->ext_debug_report() ); + + return m_ext_debug_report_entrypoints; +} + +/** Please see header for specification */ +const Anvil::ExtensionEXTDebugUtilsEntrypoints& Anvil::Instance::get_extension_ext_debug_utils_entrypoints() const +{ + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->ext_debug_utils() ); + + return m_ext_debug_utils_entrypoints; +} + +/** Please see header for specification */ +const Anvil::ExtensionKHRExternalFenceCapabilitiesEntrypoints& Anvil::Instance::get_extension_khr_external_fence_capabilities_entrypoints() const +{ + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_external_fence_capabilities() ); + + return m_khr_external_fence_capabilities_entrypoints; +} + +/** Please see header for specification */ +const Anvil::ExtensionKHRExternalMemoryCapabilitiesEntrypoints& Anvil::Instance::get_extension_khr_external_memory_capabilities_entrypoints() const +{ + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_external_memory_capabilities() ); + + return m_khr_external_memory_capabilities_entrypoints; +} + +/** Please see header for specification */ +const Anvil::ExtensionKHRExternalSemaphoreCapabilitiesEntrypoints& Anvil::Instance::get_extension_khr_external_semaphore_capabilities_entrypoints() const +{ + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_external_semaphore_capabilities() ); + + return m_khr_external_semaphore_capabilities_entrypoints; +} + /** Please see header for specification */ const Anvil::ExtensionKHRGetPhysicalDeviceProperties2& Anvil::Instance::get_extension_khr_get_physical_device_properties2_entrypoints() const { - anvil_assert(std::find(m_enabled_extensions.begin(), - m_enabled_extensions.end(), - VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) != m_enabled_extensions.end() ); + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_get_physical_device_properties2() ); return m_khr_get_physical_device_properties2_entrypoints; } @@ -267,9 +373,7 @@ const Anvil::ExtensionKHRGetPhysicalDeviceProperties2& Anvil::Instance::get_exte /** Please see header for specification */ const Anvil::ExtensionKHRSurfaceEntrypoints& Anvil::Instance::get_extension_khr_surface_entrypoints() const { - anvil_assert(std::find(m_enabled_extensions.begin(), - m_enabled_extensions.end(), - VK_KHR_SURFACE_EXTENSION_NAME) != m_enabled_extensions.end() ); + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_surface() ); return m_khr_surface_entrypoints; } @@ -279,9 +383,7 @@ const Anvil::ExtensionKHRSurfaceEntrypoints& Anvil::Instance::get_extension_khr_ /** Please see header for specification */ const Anvil::ExtensionKHRWin32SurfaceEntrypoints& Anvil::Instance::get_extension_khr_win32_surface_entrypoints() const { - anvil_assert(std::find(m_enabled_extensions.begin(), - m_enabled_extensions.end(), - VK_KHR_WIN32_SURFACE_EXTENSION_NAME) != m_enabled_extensions.end() ); + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_win32_surface() ); return m_khr_win32_surface_entrypoints; } @@ -291,25 +393,75 @@ const Anvil::ExtensionKHRSurfaceEntrypoints& Anvil::Instance::get_extension_khr_ /** Please see header for specification */ const Anvil::ExtensionKHRXcbSurfaceEntrypoints& Anvil::Instance::get_extension_khr_xcb_surface_entrypoints() const { - anvil_assert(std::find(m_enabled_extensions.begin(), - m_enabled_extensions.end(), - VK_KHR_XCB_SURFACE_EXTENSION_NAME) != m_enabled_extensions.end() ); + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_xcb_surface() ); return m_khr_xcb_surface_entrypoints; } #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + /** Please see header for specification */ + const Anvil::ExtensionKHRWaylandSurfaceEntrypoints& Anvil::Instance::get_extension_khr_wayland_surface_entrypoints() const + { + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_wayland_surface() ); + + return m_khr_wayland_surface_entrypoints; + } + #endif #endif + #if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + /** Please see header for specification */ + const Anvil::ExtensionEXTHeadlessSurfaceEntrypoints& Anvil::Instance::get_extension_ext_headless_surface_entrypoints() const + { + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->ext_headless_surface() ); + + return m_ext_headless_surface_entrypoints; + } + #endif + +/** Please see header for specification */ +const Anvil::ExtensionKHRDeviceGroupCreationEntrypoints& Anvil::Instance::get_extension_khr_device_group_creation_entrypoints() const +{ + anvil_assert(m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_device_group_creation() ); + + return m_khr_device_group_creation_entrypoints; +} /** Initializes the wrapper. */ -void Anvil::Instance::init() +bool Anvil::Instance::init() { - VkApplicationInfo app_info; - VkInstanceCreateInfo create_info; - std::vector enabled_layers; - size_t n_instance_layers = 0; - VkResult result = VK_ERROR_INITIALIZATION_FAILED; + uint32_t api_version_to_use = 0; + std::vector enabled_extensions_raw; + std::vector enabled_layers; + std::vector layerSettings; + std::map extension_enabled_status; + bool is_device_group_creation_supported = true; + size_t n_instance_layers = 0; + bool result = false; + VkResult result_vk = VK_ERROR_INITIALIZATION_FAILED; + + ANVIL_REDUNDANT_VARIABLE(result_vk); + + if (!init_vk_func_ptrs() ) + { + anvil_assert_fail(); - ANVIL_REDUNDANT_VARIABLE(result); + goto end; + } + + enabled_layers.reserve(m_create_info_ptr->get_layers().size()); + for(const auto &layer : m_create_info_ptr->get_layers()) + enabled_layers.push_back(layer.c_str()); + + layerSettings.reserve(m_create_info_ptr->get_layer_settings().size()); + for(const auto &setting : m_create_info_ptr->get_layer_settings()) { + VkLayerSettingEXT vkSetting; + vkSetting.pLayerName = setting.pLayerName; + vkSetting.pSettingName = setting.pSettingName; + vkSetting.valueCount = setting.valueCount; + vkSetting.type = static_cast(setting.type); + vkSetting.pValues = setting.pValues; + layerSettings.push_back(vkSetting); + } /* Enumerate available layers */ enumerate_instance_layers(); @@ -327,6 +479,12 @@ void Anvil::Instance::init() #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) VK_KHR_XCB_SURFACE_EXTENSION_NAME, #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, + #endif + #endif + #if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, #endif VK_EXT_DEBUG_REPORT_EXTENSION_NAME @@ -343,169 +501,320 @@ void Anvil::Instance::init() #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) VK_KHR_XCB_SURFACE_EXTENSION_NAME, #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, + #endif + #endif + #if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, #endif }; - if (m_validation_callback_function != nullptr) + /* Determine API version to use */ + if (m_create_info_ptr->get_api_version() == Anvil::APIVersion::UNKNOWN) { - for (uint32_t n_extension = 0; - n_extension < sizeof(desired_extensions_with_validation) / sizeof(desired_extensions_with_validation[0]); - ++n_extension) + /* Use the latest version available .. */ + uint32_t available_vk_version = 0; + uint32_t available_vk_version_major = 0; + uint32_t available_vk_version_minor = 0; + + if (Anvil::Vulkan::vkEnumerateInstanceVersion != nullptr) { - if (is_instance_extension_supported(desired_extensions_with_validation[n_extension])) + if (Anvil::Vulkan::vkEnumerateInstanceVersion(&available_vk_version) != VK_SUCCESS) { - m_enabled_extensions.push_back(desired_extensions_with_validation[n_extension]); + anvil_assert_fail(); + + goto end; } } + else + { + /* Assume VK 1.0 is the highest API version available. */ + available_vk_version = VK_MAKE_VERSION(1, 0, 0); + } + + available_vk_version_major = VK_VERSION_MAJOR(available_vk_version); + available_vk_version_minor = VK_VERSION_MINOR(available_vk_version); + + if ( available_vk_version_major >= 2 || + (available_vk_version_major == 1 && available_vk_version_minor >= 1)) + { + api_version_to_use = VK_MAKE_VERSION(1, 1, 0); + m_api_version = APIVersion::_1_1; + } + else + { + api_version_to_use = VK_MAKE_VERSION(1, 0, 0); + m_api_version = APIVersion::_1_0; + } } else { - for (uint32_t n_extension = 0; - n_extension < sizeof(desired_extensions_without_validation) / sizeof(desired_extensions_without_validation[0]); - ++n_extension) + switch (m_create_info_ptr->get_api_version() ) { - if (is_instance_extension_supported(desired_extensions_without_validation[n_extension])) + case APIVersion::_1_0: + { + api_version_to_use = VK_MAKE_VERSION(1, 0, 0); + m_api_version = APIVersion::_1_0; + + break; + } + + case APIVersion::_1_1: { - m_enabled_extensions.push_back(desired_extensions_without_validation[n_extension]); + api_version_to_use = VK_MAKE_VERSION(1, 1, 0); + m_api_version = APIVersion::_1_1; + + break; + } + + default: + { + anvil_assert_fail(); + + goto end; } } } - /* Set up the app info descriptor **/ - app_info.apiVersion = VK_MAKE_VERSION(1, 0, 0); - app_info.applicationVersion = 0; - app_info.engineVersion = 0; - app_info.pApplicationName = m_app_name.c_str(); - app_info.pEngineName = m_engine_name.c_str(); - app_info.pNext = nullptr; - app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; - /* Set up the create info descriptor */ - memset(&create_info, - 0, - sizeof(create_info) ); - n_instance_layers = static_cast(m_supported_layers.size() ); for (size_t n_instance_layer = 0; n_instance_layer < n_instance_layers; ++n_instance_layer) { - const std::vector& layer_extensions = m_supported_layers[n_instance_layer].extensions; - const std::string& layer_name = m_supported_layers[n_instance_layer].name; + const std::string& layer_description = m_supported_layers[n_instance_layer].description; + const std::string& layer_name = m_supported_layers[n_instance_layer].name; /* If validation is enabled and this is a layer which issues debug call-backs, cache it, so that * we can request for it at vkCreateInstance() call time */ - if (m_validation_callback_function != nullptr && - std::find(layer_extensions.begin(), - layer_extensions.end(), - VK_EXT_DEBUG_REPORT_EXTENSION_NAME) != layer_extensions.end() ) + if (get_create_info_ptr()->get_validation_callback() != nullptr && + layer_description.find ("alidation") != std::string::npos) { enabled_layers.push_back(layer_name.c_str() ); } } - /* Enable known instance-level extensions by default */ - if (is_instance_extension_supported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) ) { - m_enabled_extensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME); + if (get_create_info_ptr()->get_validation_callback() != nullptr) + { + for (uint32_t n_extension = 0; + n_extension < sizeof(desired_extensions_with_validation) / sizeof(desired_extensions_with_validation[0]); + ++n_extension) + { + if (is_instance_extension_supported(desired_extensions_with_validation[n_extension])) + { + extension_enabled_status[desired_extensions_with_validation[n_extension] ] = true; + } + } + } + else + { + for (uint32_t n_extension = 0; + n_extension < sizeof(desired_extensions_without_validation) / sizeof(desired_extensions_without_validation[0]); + ++n_extension) + { + if (is_instance_extension_supported(desired_extensions_without_validation[n_extension])) + { + extension_enabled_status[desired_extensions_without_validation[n_extension] ] = true; + } + } + } + /* Enable known instance-level extensions by default */ + if (is_instance_extension_supported(VK_EXT_DEBUG_UTILS_EXTENSION_NAME) ) + { + extension_enabled_status[VK_EXT_DEBUG_UTILS_EXTENSION_NAME] = true; + } + + if (is_instance_extension_supported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) ) + { + extension_enabled_status[VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME] = true; + } + + if (is_instance_extension_supported(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME) ) + { + extension_enabled_status[VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME] = true; + } + else + { + is_device_group_creation_supported = false; + } + + if (is_instance_extension_supported(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME) ) + { + extension_enabled_status[VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME] = true; + } + + if (is_instance_extension_supported(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME) ) + { + extension_enabled_status[VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME] = true; + } + + if (is_instance_extension_supported(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME) ) + { + extension_enabled_status[VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME] = true; + } + + // These are required for VR on Nvidia GPUs. + // TODO It would be better if the caller could define these as custom extensions! + if (is_instance_extension_supported(VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME) ) + { + extension_enabled_status[VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME] = true; + } + + if (is_instance_extension_supported(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME) ) + { + extension_enabled_status[VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME] = true; + } + + if (is_instance_extension_supported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) ) + { + extension_enabled_status[VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME] = true; + } + // + + /* Filter out undesired extensions */ + for (const auto& current_extension_name : get_create_info_ptr()->get_disallowed_instance_level_extensions() ) + { + auto ext_iterator = extension_enabled_status.find(current_extension_name); + + if (ext_iterator != extension_enabled_status.end() ) + { + if (current_extension_name == VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME) + { + is_device_group_creation_supported = false; + } + + extension_enabled_status.erase(ext_iterator); + } + } + + m_enabled_extensions_info_ptr = Anvil::ExtensionInfo::create_instance_extension_info(extension_enabled_status, + false); /* in_unspecified_extension_name_value */ } /* We're ready to create a new Vulkan instance */ - std::vector enabled_extensions_raw; - - for (auto& ext_name : m_enabled_extensions) + for (auto& ext_name : extension_enabled_status) { - enabled_extensions_raw.push_back(ext_name.c_str() ); + enabled_extensions_raw.push_back(ext_name.first.c_str() ); } - create_info.enabledExtensionCount = static_cast(m_enabled_extensions.size() ); - create_info.enabledLayerCount = static_cast(enabled_layers.size() ); - create_info.flags = 0; - create_info.pApplicationInfo = &app_info; - create_info.pNext = nullptr; - create_info.ppEnabledExtensionNames = (enabled_extensions_raw.size() > 0) ? &enabled_extensions_raw[0] : nullptr; - create_info.ppEnabledLayerNames = (enabled_layers.size() > 0) ? &enabled_layers [0] : nullptr; - create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; + { + VkApplicationInfo app_info; + VkInstanceCreateInfo create_info; + + /* Set up the app info descriptor */ + app_info.apiVersion = api_version_to_use; + app_info.applicationVersion = m_create_info_ptr->get_app_version (); + app_info.engineVersion = m_create_info_ptr->get_engine_version(); + app_info.pApplicationName = m_create_info_ptr->get_app_name ().c_str(); + app_info.pEngineName = m_create_info_ptr->get_engine_name ().c_str(); + app_info.pNext = nullptr; + app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; + + VkLayerSettingsCreateInfoEXT layer_settings_create_info {VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT, nullptr, static_cast(layerSettings.size()), layerSettings.data()}; + layer_settings_create_info.pNext = m_create_info_ptr->pNext; + + create_info.enabledExtensionCount = static_cast(enabled_extensions_raw.size() ); + create_info.enabledLayerCount = static_cast(enabled_layers.size () ); + create_info.flags = 0; + create_info.pApplicationInfo = &app_info; + create_info.pNext = &layer_settings_create_info; + create_info.ppEnabledExtensionNames = (enabled_extensions_raw.size() > 0) ? &enabled_extensions_raw.at(0) : nullptr; + create_info.ppEnabledLayerNames = (enabled_layers.size() > 0) ? &enabled_layers.at (0) : nullptr; + create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; + + result_vk = Anvil::Vulkan::vkCreateInstance(&create_info, + nullptr, /* pAllocator */ + &m_instance); + + anvil_assert_vk_call_succeeded(result_vk); + } - result = vkCreateInstance(&create_info, - nullptr, /* pAllocator */ - &m_instance); + /* If this is a VK 1.1 instance, explicitly mark VK1.1 specific extensions as enabled. This is to provide backwards compatibility + * with VK 1.0 applications which may not be aware that VK 1.0 extensions that were folded into VK 1.1 do not necessarily have to + * be reported as supported. + */ + if (m_api_version == APIVersion::_1_1) + { + extension_enabled_status[VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME] = true; + extension_enabled_status[VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME] = true; + extension_enabled_status[VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME] = true; + extension_enabled_status[VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME] = true; + extension_enabled_status[VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME] = true; + + m_enabled_extensions_info_ptr = Anvil::ExtensionInfo::create_instance_extension_info(extension_enabled_status, + false); /* in_unspecified_extension_name_value */ + } - anvil_assert_vk_call_succeeded(result); + /* Retrieve the remaining part of instance-dependent function pointers */ + if (!init_vk_func_ptrs() ) + { + /* TODO: Return null instance in case of a failure */ + anvil_assert_fail(); + } - /* Continue initializing */ init_func_pointers(); - if (m_validation_callback_function != nullptr) + if (m_create_info_ptr->get_validation_callback() != nullptr) { init_debug_callbacks(); } enumerate_physical_devices(); - m_shader_module_cache_ptr = Anvil::ShaderModuleCache::create(); + if (is_device_group_creation_supported) + { + enumerate_physical_device_groups(); + } + + result = true; +end: + return result; } /** Initializes debug callback support. */ void Anvil::Instance::init_debug_callbacks() { - VkResult result = VK_ERROR_INITIALIZATION_FAILED; - - ANVIL_REDUNDANT_VARIABLE(result); - - /* Set up the debug call-backs, while we're at it */ - VkDebugReportCallbackCreateInfoEXT debug_report_callback_create_info; - - debug_report_callback_create_info.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT; - debug_report_callback_create_info.pfnCallback = debug_callback_pfn_proc; - debug_report_callback_create_info.pNext = nullptr; - debug_report_callback_create_info.pUserData = this; - debug_report_callback_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT; - - result = m_ext_debug_report_entrypoints.vkCreateDebugReportCallbackEXT(m_instance, - &debug_report_callback_create_info, - nullptr, /* pAllocator */ - &m_debug_callback_data); - anvil_assert_vk_call_succeeded(result); + auto create_info_ptr = Anvil::DebugMessengerCreateInfo::create(this, + Anvil::DebugMessageSeverityFlagBits::ERROR_BIT | Anvil::DebugMessageSeverityFlagBits::INFO_BIT | Anvil::DebugMessageSeverityFlagBits::WARNING_BIT, + Anvil::DebugMessageTypeFlagBits::VALIDATION_BIT, + std::bind(&Anvil::Instance::debug_callback_handler, + this, + std::placeholders::_1, /* severity */ + std::placeholders::_5) ); /* message */ + + anvil_assert(create_info_ptr != nullptr); + + m_debug_messenger_ptr = Anvil::DebugMessenger::create(std::move(create_info_ptr) ); + anvil_assert(m_debug_messenger_ptr != nullptr); } /** Initializes all required instance-level function pointers. */ void Anvil::Instance::init_func_pointers() { - m_khr_surface_entrypoints.vkDestroySurfaceKHR = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkDestroySurfaceKHR") ); - m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceCapabilitiesKHR = reinterpret_cast(vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceSurfaceCapabilitiesKHR") ); - m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceFormatsKHR = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceSurfaceFormatsKHR") ); - m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfacePresentModesKHR = reinterpret_cast(vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceSurfacePresentModesKHR") ); - m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceSupportKHR = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceSurfaceSupportKHR") ); - - m_ext_debug_report_entrypoints.vkCreateDebugReportCallbackEXT = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkCreateDebugReportCallbackEXT") ); - m_ext_debug_report_entrypoints.vkDestroyDebugReportCallbackEXT = reinterpret_cast(vkGetInstanceProcAddr(m_instance, - "vkDestroyDebugReportCallbackEXT") ); - - anvil_assert(m_khr_surface_entrypoints.vkDestroySurfaceKHR != nullptr); - anvil_assert(m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceCapabilitiesKHR != nullptr); - anvil_assert(m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceFormatsKHR != nullptr); - anvil_assert(m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfacePresentModesKHR != nullptr); - anvil_assert(m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceSupportKHR != nullptr); + /* NOTE: Vulkan 1.1 instances do not need to report support for extensions considered core in 1.1. To address this, any func pointers corresponding to VK1.1 functionality + * need to be taken from core entrypoint pool. + * + * Anvil reports support for extensions corresponding to wrapped core functionality, irrelevant of what extensions the implementation actually reports for. + */ + const bool is_vk11_instance = (m_api_version == APIVersion::_1_1); #ifdef _WIN32 { #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) { - m_khr_win32_surface_entrypoints.vkCreateWin32SurfaceKHR = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkCreateWin32SurfaceKHR") ); - m_khr_win32_surface_entrypoints.vkGetPhysicalDeviceWin32PresentationSupportKHR = reinterpret_cast(vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceWin32PresentationSupportKHR") ); + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_win32_surface() ) + { + m_khr_win32_surface_entrypoints.vkCreateWin32SurfaceKHR = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkCreateWin32SurfaceKHR") ); + m_khr_win32_surface_entrypoints.vkGetPhysicalDeviceWin32PresentationSupportKHR = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkGetPhysicalDeviceWin32PresentationSupportKHR") ); - anvil_assert(m_khr_win32_surface_entrypoints.vkCreateWin32SurfaceKHR != nullptr); - anvil_assert(m_khr_win32_surface_entrypoints.vkGetPhysicalDeviceWin32PresentationSupportKHR != nullptr); + anvil_assert(m_khr_win32_surface_entrypoints.vkCreateWin32SurfaceKHR != nullptr); + anvil_assert(m_khr_win32_surface_entrypoints.vkGetPhysicalDeviceWin32PresentationSupportKHR != nullptr); + } } #endif } @@ -513,33 +822,124 @@ void Anvil::Instance::init_func_pointers() { #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) { - m_khr_xcb_surface_entrypoints.vkCreateXcbSurfaceKHR = reinterpret_cast(vkGetInstanceProcAddr(m_instance, - "vkCreateXcbSurfaceKHR") ); + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_xcb_surface() ) + { + m_khr_xcb_surface_entrypoints.vkCreateXcbSurfaceKHR = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkCreateXcbSurfaceKHR") ); + + anvil_assert(m_khr_xcb_surface_entrypoints.vkCreateXcbSurfaceKHR != nullptr); + } + } + #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + { + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_wayland_surface() ) + { + m_khr_wayland_surface_entrypoints.vkCreateWaylandSurfaceKHR = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkCreateWaylandSurfaceKHR") ); - anvil_assert(m_khr_xcb_surface_entrypoints.vkCreateXcbSurfaceKHR != nullptr); + anvil_assert(m_khr_wayland_surface_entrypoints.vkCreateWaylandSurfaceKHR != nullptr); + } } #endif } #endif + #if defined(ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT) + { + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->ext_headless_surface() ) + { + m_ext_headless_surface_entrypoints.vkCreateHeadlessSurfaceEXT = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkCreateHeadlessSurfaceEXT") ); + + anvil_assert(m_ext_headless_surface_entrypoints.vkCreateHeadlessSurfaceEXT != nullptr); + } + } + #endif + + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->ext_debug_report() ) + { + m_ext_debug_report_entrypoints.vkCreateDebugReportCallbackEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkCreateDebugReportCallbackEXT") ); + m_ext_debug_report_entrypoints.vkDebugReportMessageEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkDebugReportMessageEXT") ); + m_ext_debug_report_entrypoints.vkDestroyDebugReportCallbackEXT = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkDestroyDebugReportCallbackEXT") ); + + anvil_assert(m_ext_debug_report_entrypoints.vkCreateDebugReportCallbackEXT != nullptr); + anvil_assert(m_ext_debug_report_entrypoints.vkDebugReportMessageEXT != nullptr); + anvil_assert(m_ext_debug_report_entrypoints.vkDestroyDebugReportCallbackEXT != nullptr); + } - if (std::find(m_enabled_extensions.begin(), - m_enabled_extensions.end(), - VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) != m_enabled_extensions.end() ) - { - m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceFeatures2KHR = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceFeatures2KHR") ); - m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceFormatProperties2KHR = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceFormatProperties2KHR") ); - m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceImageFormatProperties2KHR = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceImageFormatProperties2KHR") ); - m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceMemoryProperties2KHR = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceMemoryProperties2KHR") ); - m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceProperties2KHR = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceProperties2KHR") ); - m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceQueueFamilyProperties2KHR = reinterpret_cast (vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceQueueFamilyProperties2KHR") ); - m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceSparseImageFormatProperties2KHR = reinterpret_cast(vkGetInstanceProcAddr(m_instance, - "vkGetPhysicalDeviceSparseImageFormatProperties2KHR") ); + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->ext_debug_utils() ) + { + m_ext_debug_utils_entrypoints.vkCmdBeginDebugUtilsLabelEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkCmdBeginDebugUtilsLabelEXT") ); + m_ext_debug_utils_entrypoints.vkCmdEndDebugUtilsLabelEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkCmdEndDebugUtilsLabelEXT") ); + m_ext_debug_utils_entrypoints.vkCmdInsertDebugUtilsLabelEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkCmdInsertDebugUtilsLabelEXT") ); + m_ext_debug_utils_entrypoints.vkCreateDebugUtilsMessengerEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkCreateDebugUtilsMessengerEXT") ); + m_ext_debug_utils_entrypoints.vkDestroyDebugUtilsMessengerEXT = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkDestroyDebugUtilsMessengerEXT") ); + m_ext_debug_utils_entrypoints.vkSetDebugUtilsObjectNameEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkSetDebugUtilsObjectNameEXT") ); + m_ext_debug_utils_entrypoints.vkSetDebugUtilsObjectTagEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkSetDebugUtilsObjectTagEXT") ); + m_ext_debug_utils_entrypoints.vkQueueBeginDebugUtilsLabelEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkQueueBeginDebugUtilsLabelEXT") ); + m_ext_debug_utils_entrypoints.vkQueueEndDebugUtilsLabelEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkQueueEndDebugUtilsLabelEXT") ); + m_ext_debug_utils_entrypoints.vkQueueInsertDebugUtilsLabelEXT = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkQueueInsertDebugUtilsLabelEXT") ); + m_ext_debug_utils_entrypoints.vkSubmitDebugUtilsMessageEXT = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkSubmitDebugUtilsMessageEXT") ); + + anvil_assert(m_ext_debug_utils_entrypoints.vkCmdBeginDebugUtilsLabelEXT != nullptr); + anvil_assert(m_ext_debug_utils_entrypoints.vkCmdEndDebugUtilsLabelEXT != nullptr); + anvil_assert(m_ext_debug_utils_entrypoints.vkCmdInsertDebugUtilsLabelEXT != nullptr); + anvil_assert(m_ext_debug_utils_entrypoints.vkCreateDebugUtilsMessengerEXT != nullptr); + anvil_assert(m_ext_debug_utils_entrypoints.vkDestroyDebugUtilsMessengerEXT != nullptr); + anvil_assert(m_ext_debug_utils_entrypoints.vkSetDebugUtilsObjectNameEXT != nullptr); + anvil_assert(m_ext_debug_utils_entrypoints.vkSetDebugUtilsObjectTagEXT != nullptr); + anvil_assert(m_ext_debug_utils_entrypoints.vkQueueBeginDebugUtilsLabelEXT != nullptr); + anvil_assert(m_ext_debug_utils_entrypoints.vkQueueEndDebugUtilsLabelEXT != nullptr); + anvil_assert(m_ext_debug_utils_entrypoints.vkQueueInsertDebugUtilsLabelEXT != nullptr); + anvil_assert(m_ext_debug_utils_entrypoints.vkSubmitDebugUtilsMessageEXT != nullptr); + } + + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_get_physical_device_properties2() || + is_vk11_instance) + { + const char* vk_get_physical_device_features_2_entrypoint_name = (is_vk11_instance) ? "vkGetPhysicalDeviceFeatures2" + : "vkGetPhysicalDeviceFeatures2KHR"; + const char* vk_get_physical_device_format_properties_2_entrypoint_name = (is_vk11_instance) ? "vkGetPhysicalDeviceFormatProperties2" + : "vkGetPhysicalDeviceFormatProperties2KHR"; + const char* vk_get_physical_device_image_format_properties_2_entrypoint_name = (is_vk11_instance) ? "vkGetPhysicalDeviceImageFormatProperties2" + : "vkGetPhysicalDeviceImageFormatProperties2KHR"; + const char* vk_get_physical_device_memory_properties_2_entrypoint_name = (is_vk11_instance) ? "vkGetPhysicalDeviceMemoryProperties2" + : "vkGetPhysicalDeviceMemoryProperties2KHR"; + const char* vk_get_physical_device_properties_2_entrypoint_name = (is_vk11_instance) ? "vkGetPhysicalDeviceProperties2" + : "vkGetPhysicalDeviceProperties2KHR"; + const char* vk_get_physical_device_queue_family_properties_2_entrypoint_name = (is_vk11_instance) ? "vkGetPhysicalDeviceQueueFamilyProperties2" + : "vkGetPhysicalDeviceQueueFamilyProperties2KHR"; + const char* vk_get_physical_device_sparse_image_format_properties_2_entrypoint_name = (is_vk11_instance) ? "vkGetPhysicalDeviceSparseImageFormatProperties2" + : "vkGetPhysicalDeviceSparseImageFormatProperties2KHR"; + + m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceFeatures2KHR = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_get_physical_device_features_2_entrypoint_name) ); + m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceFormatProperties2KHR = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_get_physical_device_format_properties_2_entrypoint_name) ); + m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceImageFormatProperties2KHR = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_get_physical_device_image_format_properties_2_entrypoint_name) ); + m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceMemoryProperties2KHR = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_get_physical_device_memory_properties_2_entrypoint_name) ); + m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceProperties2KHR = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_get_physical_device_properties_2_entrypoint_name) ); + m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceQueueFamilyProperties2KHR = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_get_physical_device_queue_family_properties_2_entrypoint_name) ); + m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceSparseImageFormatProperties2KHR = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_get_physical_device_sparse_image_format_properties_2_entrypoint_name) ); anvil_assert(m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceFeatures2KHR != nullptr); anvil_assert(m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceFormatProperties2KHR != nullptr); @@ -549,42 +949,360 @@ void Anvil::Instance::init_func_pointers() anvil_assert(m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceQueueFamilyProperties2KHR != nullptr); anvil_assert(m_khr_get_physical_device_properties2_entrypoints.vkGetPhysicalDeviceSparseImageFormatProperties2KHR != nullptr); } -} -/** Please see header for specification */ -bool Anvil::Instance::is_instance_extension_supported(const char* in_extension_name) const -{ - return std::find(m_global_layer.extensions.begin(), - m_global_layer.extensions.end(), - in_extension_name) != m_global_layer.extensions.end(); -} + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_device_group_creation() || + is_vk11_instance) + { + const char* vk_enumerate_physical_device_groups_entrypoint_name = (is_vk11_instance) ? "vkEnumeratePhysicalDeviceGroups" + : "vkEnumeratePhysicalDeviceGroupsKHR"; -/** Please see header for specification */ -void Anvil::Instance::register_physical_device(std::shared_ptr in_physical_device_ptr) -{ - auto iterator = std::find(m_physical_devices.begin(), - m_physical_devices.end(), - in_physical_device_ptr); + m_khr_device_group_creation_entrypoints.vkEnumeratePhysicalDeviceGroupsKHR = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_enumerate_physical_device_groups_entrypoint_name) ); + + anvil_assert(m_khr_device_group_creation_entrypoints.vkEnumeratePhysicalDeviceGroupsKHR != nullptr); + } - anvil_assert(iterator == m_physical_devices.end() ); + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_external_fence_capabilities() || + is_vk11_instance) + { + const char* vk_get_physical_device_external_fence_properties_entrypoint_name = (is_vk11_instance) ? "vkGetPhysicalDeviceExternalFenceProperties" + : "vkGetPhysicalDeviceExternalFencePropertiesKHR"; + + m_khr_external_fence_capabilities_entrypoints.vkGetPhysicalDeviceExternalFencePropertiesKHR = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_get_physical_device_external_fence_properties_entrypoint_name) ); + + anvil_assert(m_khr_external_fence_capabilities_entrypoints.vkGetPhysicalDeviceExternalFencePropertiesKHR != nullptr); + } + + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_external_memory_capabilities() || + is_vk11_instance) + { + const char* vk_get_physical_device_external_buffer_properties_entrypoint_name = (is_vk11_instance) ? "vkGetPhysicalDeviceExternalBufferProperties" + : "vkGetPhysicalDeviceExternalBufferPropertiesKHR"; + + m_khr_external_memory_capabilities_entrypoints.vkGetPhysicalDeviceExternalBufferPropertiesKHR = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_get_physical_device_external_buffer_properties_entrypoint_name) ); + + anvil_assert(m_khr_external_memory_capabilities_entrypoints.vkGetPhysicalDeviceExternalBufferPropertiesKHR != nullptr); + } - if (iterator == m_physical_devices.end() ) + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_external_semaphore_capabilities() || + is_vk11_instance) { - m_physical_devices.push_back(in_physical_device_ptr); + const char* vk_get_physical_device_external_semaphore_properties_entrypoint_name = (is_vk11_instance) ? "vkGetPhysicalDeviceExternalSemaphoreProperties" + : "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR"; + + m_khr_external_semaphore_capabilities_entrypoints.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + vk_get_physical_device_external_semaphore_properties_entrypoint_name) ); + + anvil_assert(m_khr_external_semaphore_capabilities_entrypoints.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR!= nullptr); + } + + if (m_enabled_extensions_info_ptr->get_instance_extension_info()->khr_surface() ) + { + m_khr_surface_entrypoints.vkDestroySurfaceKHR = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkDestroySurfaceKHR") ); + m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceCapabilitiesKHR = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkGetPhysicalDeviceSurfaceCapabilitiesKHR") ); + m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceFormatsKHR = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkGetPhysicalDeviceSurfaceFormatsKHR") ); + m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfacePresentModesKHR = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkGetPhysicalDeviceSurfacePresentModesKHR") ); + m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceSupportKHR = reinterpret_cast (Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + "vkGetPhysicalDeviceSurfaceSupportKHR") ); + + anvil_assert(m_khr_surface_entrypoints.vkDestroySurfaceKHR != nullptr); + anvil_assert(m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceCapabilitiesKHR != nullptr); + anvil_assert(m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceFormatsKHR != nullptr); + anvil_assert(m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfacePresentModesKHR != nullptr); + anvil_assert(m_khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceSupportKHR != nullptr); } } -/** Please see header for specification */ -void Anvil::Instance::unregister_physical_device(std::shared_ptr in_physical_device_ptr) +bool Anvil::Instance::init_vk_func_ptrs() { - auto iterator = std::find(m_physical_devices.begin(), - m_physical_devices.end(), - in_physical_device_ptr); + std::lock_guard lock (g_vk_func_ptr_init_mutex); + bool result(true); + + typedef struct + { + std::string func_name; + bool requires_getter_call; + void** result_func_ptr; + } FunctionData; + + #if !defined(ANVIL_LINK_STATICALLY_WITH_VULKAN_LIB) + FunctionData functions_vk10[] = + { + /* NOTE: All functions with @param requires_getter_call equal to false should come first! */ + {"vkCreateInstance", false, reinterpret_cast(&Anvil::Vulkan::vkCreateInstance)}, + {"vkDestroyInstance", false, reinterpret_cast(&Anvil::Vulkan::vkDestroyInstance)}, + {"vkEnumeratePhysicalDevices", false, reinterpret_cast(&Anvil::Vulkan::vkEnumeratePhysicalDevices)}, + {"vkEnumerateInstanceExtensionProperties", false, reinterpret_cast(&Anvil::Vulkan::vkEnumerateInstanceExtensionProperties)}, + {"vkEnumerateInstanceLayerProperties", false, reinterpret_cast(&Anvil::Vulkan::vkEnumerateInstanceLayerProperties)}, + {"vkGetDeviceProcAddr", false, reinterpret_cast(&Anvil::Vulkan::vkGetDeviceProcAddr)}, + {"vkGetInstanceProcAddr", false, reinterpret_cast(&Anvil::Vulkan::vkGetInstanceProcAddr)}, + + /* instance-dependent functions */ + {"vkGetPhysicalDeviceFeatures", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceFeatures)}, + {"vkGetPhysicalDeviceFormatProperties", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceFormatProperties)}, + {"vkGetPhysicalDeviceImageFormatProperties", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceImageFormatProperties)}, + {"vkGetPhysicalDeviceProperties", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceProperties)}, + {"vkGetPhysicalDeviceQueueFamilyProperties", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceQueueFamilyProperties)}, + {"vkGetPhysicalDeviceMemoryProperties", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceMemoryProperties)}, + {"vkCreateDevice", true, reinterpret_cast(&Anvil::Vulkan::vkCreateDevice)}, + {"vkDestroyDevice", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyDevice)}, + {"vkEnumerateDeviceExtensionProperties", true, reinterpret_cast(&Anvil::Vulkan::vkEnumerateDeviceExtensionProperties)}, + {"vkEnumerateDeviceLayerProperties", true, reinterpret_cast(&Anvil::Vulkan::vkEnumerateDeviceLayerProperties)}, + {"vkGetDeviceQueue", true, reinterpret_cast(&Anvil::Vulkan::vkGetDeviceQueue)}, + {"vkQueueSubmit", true, reinterpret_cast(&Anvil::Vulkan::vkQueueSubmit)}, + {"vkQueueWaitIdle", true, reinterpret_cast(&Anvil::Vulkan::vkQueueWaitIdle)}, + {"vkDeviceWaitIdle", true, reinterpret_cast(&Anvil::Vulkan::vkDeviceWaitIdle)}, + {"vkAllocateMemory", true, reinterpret_cast(&Anvil::Vulkan::vkAllocateMemory)}, + {"vkFreeMemory", true, reinterpret_cast(&Anvil::Vulkan::vkFreeMemory)}, + {"vkMapMemory", true, reinterpret_cast(&Anvil::Vulkan::vkMapMemory)}, + {"vkUnmapMemory", true, reinterpret_cast(&Anvil::Vulkan::vkUnmapMemory)}, + {"vkFlushMappedMemoryRanges", true, reinterpret_cast(&Anvil::Vulkan::vkFlushMappedMemoryRanges)}, + {"vkInvalidateMappedMemoryRanges", true, reinterpret_cast(&Anvil::Vulkan::vkInvalidateMappedMemoryRanges)}, + {"vkGetDeviceMemoryCommitment", true, reinterpret_cast(&Anvil::Vulkan::vkGetDeviceMemoryCommitment)}, + {"vkBindBufferMemory", true, reinterpret_cast(&Anvil::Vulkan::vkBindBufferMemory)}, + {"vkBindImageMemory", true, reinterpret_cast(&Anvil::Vulkan::vkBindImageMemory)}, + {"vkGetBufferMemoryRequirements", true, reinterpret_cast(&Anvil::Vulkan::vkGetBufferMemoryRequirements)}, + {"vkGetImageMemoryRequirements", true, reinterpret_cast(&Anvil::Vulkan::vkGetImageMemoryRequirements)}, + {"vkGetImageSparseMemoryRequirements", true, reinterpret_cast(&Anvil::Vulkan::vkGetImageSparseMemoryRequirements)}, + {"vkGetPhysicalDeviceSparseImageFormatProperties", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties)}, + {"vkQueueBindSparse", true, reinterpret_cast(&Anvil::Vulkan::vkQueueBindSparse)}, + {"vkCreateFence", true, reinterpret_cast(&Anvil::Vulkan::vkCreateFence)}, + {"vkDestroyFence", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyFence)}, + {"vkResetFences", true, reinterpret_cast(&Anvil::Vulkan::vkResetFences)}, + {"vkGetFenceStatus", true, reinterpret_cast(&Anvil::Vulkan::vkGetFenceStatus)}, + {"vkWaitForFences", true, reinterpret_cast(&Anvil::Vulkan::vkWaitForFences)}, + {"vkCreateSemaphore", true, reinterpret_cast(&Anvil::Vulkan::vkCreateSemaphore)}, + {"vkDestroySemaphore", true, reinterpret_cast(&Anvil::Vulkan::vkDestroySemaphore)}, + {"vkCreateEvent", true, reinterpret_cast(&Anvil::Vulkan::vkCreateEvent)}, + {"vkDestroyEvent", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyEvent)}, + {"vkGetEventStatus", true, reinterpret_cast(&Anvil::Vulkan::vkGetEventStatus)}, + {"vkSetEvent", true, reinterpret_cast(&Anvil::Vulkan::vkSetEvent)}, + {"vkResetEvent", true, reinterpret_cast(&Anvil::Vulkan::vkResetEvent)}, + {"vkCreateQueryPool", true, reinterpret_cast(&Anvil::Vulkan::vkCreateQueryPool)}, + {"vkDestroyQueryPool", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyQueryPool)}, + {"vkGetQueryPoolResults", true, reinterpret_cast(&Anvil::Vulkan::vkGetQueryPoolResults)}, + {"vkCreateBuffer", true, reinterpret_cast(&Anvil::Vulkan::vkCreateBuffer)}, + {"vkDestroyBuffer", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyBuffer)}, + {"vkCreateBufferView", true, reinterpret_cast(&Anvil::Vulkan::vkCreateBufferView)}, + {"vkDestroyBufferView", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyBufferView)}, + {"vkCreateImage", true, reinterpret_cast(&Anvil::Vulkan::vkCreateImage)}, + {"vkDestroyImage", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyImage)}, + {"vkGetImageSubresourceLayout", true, reinterpret_cast(&Anvil::Vulkan::vkGetImageSubresourceLayout)}, + {"vkCreateImageView", true, reinterpret_cast(&Anvil::Vulkan::vkCreateImageView)}, + {"vkDestroyImageView", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyImageView)}, + {"vkCreateShaderModule", true, reinterpret_cast(&Anvil::Vulkan::vkCreateShaderModule)}, + {"vkDestroyShaderModule", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyShaderModule)}, + {"vkCreatePipelineCache", true, reinterpret_cast(&Anvil::Vulkan::vkCreatePipelineCache)}, + {"vkDestroyPipelineCache", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyPipelineCache)}, + {"vkGetPipelineCacheData", true, reinterpret_cast(&Anvil::Vulkan::vkGetPipelineCacheData)}, + {"vkMergePipelineCaches", true, reinterpret_cast(&Anvil::Vulkan::vkMergePipelineCaches)}, + {"vkCreateGraphicsPipelines", true, reinterpret_cast(&Anvil::Vulkan::vkCreateGraphicsPipelines)}, + {"vkCreateComputePipelines", true, reinterpret_cast(&Anvil::Vulkan::vkCreateComputePipelines)}, + {"vkDestroyPipeline", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyPipeline)}, + {"vkCreatePipelineLayout", true, reinterpret_cast(&Anvil::Vulkan::vkCreatePipelineLayout)}, + {"vkDestroyPipelineLayout", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyPipelineLayout)}, + {"vkCreateSampler", true, reinterpret_cast(&Anvil::Vulkan::vkCreateSampler)}, + {"vkDestroySampler", true, reinterpret_cast(&Anvil::Vulkan::vkDestroySampler)}, + {"vkCreateDescriptorSetLayout", true, reinterpret_cast(&Anvil::Vulkan::vkCreateDescriptorSetLayout)}, + {"vkDestroyDescriptorSetLayout", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyDescriptorSetLayout)}, + {"vkCreateDescriptorPool", true, reinterpret_cast(&Anvil::Vulkan::vkCreateDescriptorPool)}, + {"vkDestroyDescriptorPool", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyDescriptorPool)}, + {"vkResetDescriptorPool", true, reinterpret_cast(&Anvil::Vulkan::vkResetDescriptorPool)}, + {"vkAllocateDescriptorSets", true, reinterpret_cast(&Anvil::Vulkan::vkAllocateDescriptorSets)}, + {"vkFreeDescriptorSets", true, reinterpret_cast(&Anvil::Vulkan::vkFreeDescriptorSets)}, + {"vkUpdateDescriptorSets", true, reinterpret_cast(&Anvil::Vulkan::vkUpdateDescriptorSets)}, + {"vkCreateFramebuffer", true, reinterpret_cast(&Anvil::Vulkan::vkCreateFramebuffer)}, + {"vkDestroyFramebuffer", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyFramebuffer)}, + {"vkCreateRenderPass", true, reinterpret_cast(&Anvil::Vulkan::vkCreateRenderPass)}, + {"vkDestroyRenderPass", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyRenderPass)}, + {"vkGetRenderAreaGranularity", true, reinterpret_cast(&Anvil::Vulkan::vkGetRenderAreaGranularity)}, + {"vkCreateCommandPool", true, reinterpret_cast(&Anvil::Vulkan::vkCreateCommandPool)}, + {"vkDestroyCommandPool", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyCommandPool)}, + {"vkResetCommandPool", true, reinterpret_cast(&Anvil::Vulkan::vkResetCommandPool)}, + {"vkAllocateCommandBuffers", true, reinterpret_cast(&Anvil::Vulkan::vkAllocateCommandBuffers)}, + {"vkFreeCommandBuffers", true, reinterpret_cast(&Anvil::Vulkan::vkFreeCommandBuffers)}, + {"vkBeginCommandBuffer", true, reinterpret_cast(&Anvil::Vulkan::vkBeginCommandBuffer)}, + {"vkEndCommandBuffer", true, reinterpret_cast(&Anvil::Vulkan::vkEndCommandBuffer)}, + {"vkResetCommandBuffer", true, reinterpret_cast(&Anvil::Vulkan::vkResetCommandBuffer)}, + {"vkCmdBindPipeline", true, reinterpret_cast(&Anvil::Vulkan::vkCmdBindPipeline)}, + {"vkCmdSetViewport", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetViewport)}, + {"vkCmdSetScissor", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetScissor)}, + {"vkCmdSetLineWidth", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetLineWidth)}, + {"vkCmdSetDepthBias", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetDepthBias)}, + {"vkCmdSetBlendConstants", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetBlendConstants)}, + {"vkCmdSetDepthBounds", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetDepthBounds)}, + {"vkCmdSetStencilCompareMask", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetStencilCompareMask)}, + {"vkCmdSetStencilWriteMask", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetStencilWriteMask)}, + {"vkCmdSetStencilReference", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetStencilReference)}, + {"vkCmdBindDescriptorSets", true, reinterpret_cast(&Anvil::Vulkan::vkCmdBindDescriptorSets)}, + {"vkCmdBindIndexBuffer", true, reinterpret_cast(&Anvil::Vulkan::vkCmdBindIndexBuffer)}, + {"vkCmdBindVertexBuffers", true, reinterpret_cast(&Anvil::Vulkan::vkCmdBindVertexBuffers)}, + {"vkCmdDraw", true, reinterpret_cast(&Anvil::Vulkan::vkCmdDraw)}, + {"vkCmdDrawIndexed", true, reinterpret_cast(&Anvil::Vulkan::vkCmdDrawIndexed)}, + {"vkCmdDrawIndirect", true, reinterpret_cast(&Anvil::Vulkan::vkCmdDrawIndirect)}, + {"vkCmdDrawIndexedIndirect", true, reinterpret_cast(&Anvil::Vulkan::vkCmdDrawIndexedIndirect)}, + {"vkCmdDispatch", true, reinterpret_cast(&Anvil::Vulkan::vkCmdDispatch)}, + {"vkCmdDispatchIndirect", true, reinterpret_cast(&Anvil::Vulkan::vkCmdDispatchIndirect)}, + {"vkCmdCopyBuffer", true, reinterpret_cast(&Anvil::Vulkan::vkCmdCopyBuffer)}, + {"vkCmdCopyImage", true, reinterpret_cast(&Anvil::Vulkan::vkCmdCopyImage)}, + {"vkCmdBlitImage", true, reinterpret_cast(&Anvil::Vulkan::vkCmdBlitImage)}, + {"vkCmdCopyBufferToImage", true, reinterpret_cast(&Anvil::Vulkan::vkCmdCopyBufferToImage)}, + {"vkCmdCopyImageToBuffer", true, reinterpret_cast(&Anvil::Vulkan::vkCmdCopyImageToBuffer)}, + {"vkCmdUpdateBuffer", true, reinterpret_cast(&Anvil::Vulkan::vkCmdUpdateBuffer)}, + {"vkCmdFillBuffer", true, reinterpret_cast(&Anvil::Vulkan::vkCmdFillBuffer)}, + {"vkCmdClearColorImage", true, reinterpret_cast(&Anvil::Vulkan::vkCmdClearColorImage)}, + {"vkCmdClearDepthStencilImage", true, reinterpret_cast(&Anvil::Vulkan::vkCmdClearDepthStencilImage)}, + {"vkCmdClearAttachments", true, reinterpret_cast(&Anvil::Vulkan::vkCmdClearAttachments)}, + {"vkCmdResolveImage", true, reinterpret_cast(&Anvil::Vulkan::vkCmdResolveImage)}, + {"vkCmdSetEvent", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetEvent)}, + {"vkCmdResetEvent", true, reinterpret_cast(&Anvil::Vulkan::vkCmdResetEvent)}, + {"vkCmdWaitEvents", true, reinterpret_cast(&Anvil::Vulkan::vkCmdWaitEvents)}, + {"vkCmdPipelineBarrier", true, reinterpret_cast(&Anvil::Vulkan::vkCmdPipelineBarrier)}, + {"vkCmdBeginQuery", true, reinterpret_cast(&Anvil::Vulkan::vkCmdBeginQuery)}, + {"vkCmdEndQuery", true, reinterpret_cast(&Anvil::Vulkan::vkCmdEndQuery)}, + {"vkCmdResetQueryPool", true, reinterpret_cast(&Anvil::Vulkan::vkCmdResetQueryPool)}, + {"vkCmdWriteTimestamp", true, reinterpret_cast(&Anvil::Vulkan::vkCmdWriteTimestamp)}, + {"vkCmdCopyQueryPoolResults", true, reinterpret_cast(&Anvil::Vulkan::vkCmdCopyQueryPoolResults)}, + {"vkCmdPushConstants", true, reinterpret_cast(&Anvil::Vulkan::vkCmdPushConstants)}, + {"vkCmdBeginRenderPass", true, reinterpret_cast(&Anvil::Vulkan::vkCmdBeginRenderPass)}, + {"vkCmdNextSubpass", true, reinterpret_cast(&Anvil::Vulkan::vkCmdNextSubpass)}, + {"vkCmdEndRenderPass", true, reinterpret_cast(&Anvil::Vulkan::vkCmdEndRenderPass)}, + {"vkCmdExecuteCommands", true, reinterpret_cast(&Anvil::Vulkan::vkCmdExecuteCommands)} + }; + #endif + + FunctionData functions_vk11[] = + { + {"vkBindBufferMemory2", true, reinterpret_cast(&Anvil::Vulkan::vkBindBufferMemory2)}, + {"vkCmdDispatchBase", true, reinterpret_cast(&Anvil::Vulkan::vkCmdDispatchBase)}, + {"vkCmdSetDeviceMask", true, reinterpret_cast(&Anvil::Vulkan::vkCmdSetDeviceMask)}, + {"vkCreateDescriptorUpdateTemplate", true, reinterpret_cast(&Anvil::Vulkan::vkCreateDescriptorUpdateTemplate)}, + {"vkCreateSamplerYcbcrConversion", true, reinterpret_cast(&Anvil::Vulkan::vkCreateSamplerYcbcrConversion)}, + {"vkDestroyDescriptorUpdateTemplate", true, reinterpret_cast(&Anvil::Vulkan::vkDestroyDescriptorUpdateTemplate)}, + {"vkDestroySamplerYcbcrConversion", true, reinterpret_cast(&Anvil::Vulkan::vkDestroySamplerYcbcrConversion)}, + {"vkEnumerateInstanceVersion", false, reinterpret_cast(&Anvil::Vulkan::vkEnumerateInstanceVersion)}, + {"vkEnumeratePhysicalDeviceGroups", true, reinterpret_cast(&Anvil::Vulkan::vkEnumeratePhysicalDeviceGroups)}, + {"vkGetBufferMemoryRequirements2", true, reinterpret_cast(&Anvil::Vulkan::vkGetBufferMemoryRequirements2)}, + {"vkGetDescriptorSetLayoutSupport", true, reinterpret_cast(&Anvil::Vulkan::vkGetDescriptorSetLayoutSupport)}, + {"vkGetDeviceGroupPeerMemoryFeatures", true, reinterpret_cast(&Anvil::Vulkan::vkGetDeviceGroupPeerMemoryFeatures)}, + {"vkGetDeviceQueue2", true, reinterpret_cast(&Anvil::Vulkan::vkGetDeviceQueue2)}, + {"vkGetImageMemoryRequirements2", true, reinterpret_cast(&Anvil::Vulkan::vkGetImageMemoryRequirements2)}, + {"vkGetImageSparseMemoryRequirements2", true, reinterpret_cast(&Anvil::Vulkan::vkGetImageSparseMemoryRequirements2)}, + {"vkGetPhysicalDeviceExternalBufferProperties", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceExternalBufferProperties)}, + {"vkGetPhysicalDeviceExternalFenceProperties", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceExternalFenceProperties)}, + {"vkGetPhysicalDeviceExternalSemaphoreProperties", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceExternalSemaphoreProperties)}, + {"vkGetPhysicalDeviceFeatures2", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceFeatures2)}, + {"vkGetPhysicalDeviceFormatProperties2", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceFormatProperties2)}, + {"vkGetPhysicalDeviceImageFormatProperties2", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceImageFormatProperties2)}, + {"vkGetPhysicalDeviceMemoryProperties2", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceMemoryProperties2)}, + {"vkGetPhysicalDeviceProperties2", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceProperties2)}, + {"vkGetPhysicalDeviceQueueFamilyProperties2", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceQueueFamilyProperties2)}, + {"vkGetPhysicalDeviceSparseImageFormatProperties2", true, reinterpret_cast(&Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties2)}, + {"vkTrimCommandPool", true, reinterpret_cast(&Anvil::Vulkan::vkTrimCommandPool)}, + {"vkUpdateDescriptorSetWithTemplate", true, reinterpret_cast(&Anvil::Vulkan::vkUpdateDescriptorSetWithTemplate)}, + }; + + if (g_core_func_ptrs_inited && + g_instance_func_ptrs_inited) + { + result = true; + + goto end; + } + + #if !defined(ANVIL_LINK_STATICALLY_WITH_VULKAN_LIB) + { + if (g_vk_library_ptr == nullptr) + { + g_vk_library_ptr = Anvil::Library::create(ANVIL_VULKAN_DYNAMIC_DLL); + + if (g_vk_library_ptr == nullptr) + { + anvil_assert(g_vk_library_ptr != nullptr); + + goto end; + } + } + + /* VK 1.0 func ptrgetters - all entrypoints must be present */ + for (const auto& current_func_data : functions_vk10) + { + if (!current_func_data.requires_getter_call && + !g_core_func_ptrs_inited) + { + *current_func_data.result_func_ptr = g_vk_library_ptr->get_proc_address(current_func_data.func_name.c_str() ); + + if (*current_func_data.result_func_ptr == nullptr) + { + anvil_assert(*current_func_data.result_func_ptr != nullptr); + + goto end; + } + } + + if ((current_func_data.requires_getter_call) && + (g_core_func_ptrs_inited) && + (m_instance != VK_NULL_HANDLE) ) + { + *current_func_data.result_func_ptr = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + current_func_data.func_name.c_str() )); + + if (*current_func_data.result_func_ptr == nullptr) + { + anvil_assert(*current_func_data.result_func_ptr != nullptr); + + goto end; + } + } + } + } + #endif + + /* VK 1.1 func ptr getters - all entrypoints may be missing on implementations that do not support the API */ + for (const auto& current_func_data : functions_vk11) + { + if (!current_func_data.requires_getter_call) + { + *current_func_data.result_func_ptr = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + current_func_data.func_name.c_str() )); + } + + if ((current_func_data.requires_getter_call) && + (m_instance != VK_NULL_HANDLE) ) + { + *current_func_data.result_func_ptr = reinterpret_cast(Anvil::Vulkan::vkGetInstanceProcAddr(m_instance, + current_func_data.func_name.c_str() )); + } + } - anvil_assert(iterator != m_physical_devices.end() ); + /* Done */ + g_core_func_ptrs_inited = true; - if (iterator != m_physical_devices.end() ) + if (m_instance != VK_NULL_HANDLE) { - m_physical_devices.erase(iterator); + g_instance_func_ptrs_inited = true; } -} \ No newline at end of file + +end: + return result; +} + +/** Please see header for specification */ +bool Anvil::Instance::is_instance_extension_enabled(const char* in_extension_name) const +{ + return m_enabled_extensions_info_ptr->get_instance_extension_info()->by_name(in_extension_name); +} + +/** Please see header for specification */ +bool Anvil::Instance::is_instance_extension_supported(const char* in_extension_name) const +{ + return std::find(m_global_layer.extensions.begin(), + m_global_layer.extensions.end(), + in_extension_name) != m_global_layer.extensions.end(); +} diff --git a/src/wrappers/memory_block.cpp b/src/wrappers/memory_block.cpp index 0c7260ee..8d32db43 100644 --- a/src/wrappers/memory_block.cpp +++ b/src/wrappers/memory_block.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,7 +21,11 @@ // #include "misc/debug.h" +#include "misc/external_handle.h" +#include "misc/memory_allocator.h" +#include "misc/memory_block_create_info.h" #include "misc/object_tracker.h" +#include "misc/struct_chainer.h" #include "wrappers/buffer.h" #include "wrappers/device.h" #include "wrappers/image.h" @@ -29,108 +33,69 @@ #include "wrappers/physical_device.h" /* Please see header for specification */ -Anvil::MemoryBlock::MemoryBlock(std::weak_ptr in_device_ptr, - uint32_t in_allowed_memory_bits, - VkDeviceSize in_size, - Anvil::MemoryFeatureFlags in_memory_features) - :m_allowed_memory_bits (in_allowed_memory_bits), - m_device_ptr (in_device_ptr), - m_gpu_data_map_count (0), - m_gpu_data_ptr (nullptr), - m_memory (VK_NULL_HANDLE), - m_memory_features (in_memory_features), - m_parent_memory_block_ptr(nullptr), - m_size (in_size), - m_start_offset (0) +Anvil::MemoryBlock::MemoryBlock(Anvil::MemoryBlockCreateInfoUniquePtr in_create_info_ptr) + :MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )), + m_backend_object (nullptr), + m_gpu_data_map_count (0), + m_gpu_data_ptr (nullptr), + m_memory (VK_NULL_HANDLE), + m_parent_memory_allocator_backend_ptr(nullptr) { - /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_MEMORY_BLOCK, - this); -} + m_create_info_ptr = std::move(in_create_info_ptr); -/* Please see header for specification */ -Anvil::MemoryBlock::MemoryBlock(std::shared_ptr in_parent_memory_block_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) -{ - anvil_assert(in_parent_memory_block_ptr != nullptr); - anvil_assert(in_parent_memory_block_ptr->m_gpu_data_ptr == nullptr); - - m_allowed_memory_bits = in_parent_memory_block_ptr->m_allowed_memory_bits; - m_device_ptr = in_parent_memory_block_ptr->m_device_ptr; - m_gpu_data_map_count = UINT32_MAX; - m_gpu_data_ptr = nullptr; - m_memory_features = in_parent_memory_block_ptr->m_memory_features; - m_memory = VK_NULL_HANDLE; - m_memory_type_index = UINT32_MAX; - m_on_release_callback_function = in_parent_memory_block_ptr->m_on_release_callback_function; - m_parent_memory_block_ptr = in_parent_memory_block_ptr; - m_size = in_size; - m_start_offset = in_start_offset + m_parent_memory_block_ptr->m_start_offset; - - anvil_assert(m_start_offset >= m_parent_memory_block_ptr->m_start_offset); - anvil_assert(m_start_offset + m_size <= m_parent_memory_block_ptr->m_start_offset + m_parent_memory_block_ptr->m_size); - - /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_MEMORY_BLOCK, - this); -} - -/* Please see header for specification */ -Anvil::MemoryBlock::MemoryBlock(std::weak_ptr in_device_ptr, - VkDeviceMemory in_memory, - uint32_t in_allowed_memory_bits, - Anvil::MemoryFeatureFlags in_memory_features, - uint32_t in_memory_type_index, - VkDeviceSize in_size, - VkDeviceSize in_start_offset, - OnMemoryBlockReleaseCallbackFunction in_on_release_callback_function) -{ - anvil_assert(in_on_release_callback_function != nullptr); - - m_allowed_memory_bits = in_allowed_memory_bits; - m_device_ptr = in_device_ptr; - m_gpu_data_map_count = 0; - m_gpu_data_ptr = nullptr; - m_memory_features = in_memory_features; - m_memory = in_memory; - m_memory_type_index = in_memory_type_index; - m_parent_memory_block_ptr = nullptr; - m_on_release_callback_function = in_on_release_callback_function; - m_size = in_size; - m_start_offset = in_start_offset; + if (m_create_info_ptr->get_parent_memory_block() != nullptr) + { + m_relative_start_offset = m_create_info_ptr->get_parent_memory_block()->m_start_offset; + m_start_offset = m_create_info_ptr->get_start_offset() + m_relative_start_offset; + } + else + { + m_relative_start_offset = 0; + m_start_offset = m_create_info_ptr->get_start_offset(); + } /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_MEMORY_BLOCK, - this); + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::ANVIL_MEMORY_BLOCK, + this); } /** Releases the underlying Vulkan memory object. */ Anvil::MemoryBlock::~MemoryBlock() { - if (m_parent_memory_block_ptr == nullptr) + auto on_release_callback_function = m_create_info_ptr->get_on_release_callback_function(); + + #ifdef _DEBUG { - anvil_assert(m_gpu_data_map_count.load() == 0); + auto parent_memory_block_ptr = m_create_info_ptr->get_parent_memory_block(); + + if (parent_memory_block_ptr == nullptr) + { + anvil_assert(m_gpu_data_map_count.load() == 0); + } } + #endif - if (m_on_release_callback_function != nullptr) + if (on_release_callback_function != nullptr) { - m_on_release_callback_function(this); + on_release_callback_function(this); } /* Unregister the object */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_MEMORY_BLOCK, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::ANVIL_MEMORY_BLOCK, this); if (m_memory != VK_NULL_HANDLE) { - if (m_on_release_callback_function == nullptr) + if (on_release_callback_function == nullptr) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkFreeMemory(device_locked_ptr->get_device_vk(), - m_memory, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkFreeMemory(m_create_info_ptr->get_device()->get_device_vk(), + m_memory, + nullptr /* pAllocator */); + } + unlock(); } m_memory = VK_NULL_HANDLE; @@ -140,11 +105,11 @@ Anvil::MemoryBlock::~MemoryBlock() /** Finishes the memory mapping process, opened earlier with a open_gpu_memory_access() call. */ void Anvil::MemoryBlock::close_gpu_memory_access() { - std::shared_ptr device_locked_ptr(m_device_ptr); + auto parent_memory_block_ptr = m_create_info_ptr->get_parent_memory_block(); - if (m_parent_memory_block_ptr != nullptr) + if (parent_memory_block_ptr != nullptr) { - m_parent_memory_block_ptr->close_gpu_memory_access(); + parent_memory_block_ptr->close_gpu_memory_access(); } else { @@ -152,8 +117,20 @@ void Anvil::MemoryBlock::close_gpu_memory_access() if (m_gpu_data_map_count.fetch_sub(1) == 1) { - vkUnmapMemory(device_locked_ptr->get_device_vk(), - m_memory); + lock(); + { + if (m_parent_memory_allocator_backend_ptr != nullptr) + { + m_parent_memory_allocator_backend_ptr->unmap(m_backend_object); + } + else + { + /* This block will be entered for memory blocks instantiated without a memory allocator */ + Anvil::Vulkan::vkUnmapMemory(m_create_info_ptr->get_device()->get_device_vk(), + m_memory); + } + } + unlock(); m_gpu_data_ptr = nullptr; } @@ -161,76 +138,182 @@ void Anvil::MemoryBlock::close_gpu_memory_access() } /* Please see header for specification */ -std::shared_ptr Anvil::MemoryBlock::create(std::weak_ptr in_device_ptr, - uint32_t in_allowed_memory_bits, - VkDeviceSize in_size, - Anvil::MemoryFeatureFlags in_memory_features) +Anvil::MemoryBlockUniquePtr Anvil::MemoryBlock::create(Anvil::MemoryBlockCreateInfoUniquePtr in_create_info_ptr, + VkResult* out_opt_result_ptr) { - std::shared_ptr result_ptr; + MemoryBlockUniquePtr result_ptr(nullptr, + std::default_delete() ); + const auto type (in_create_info_ptr->get_type() ); result_ptr.reset( - new Anvil::MemoryBlock(in_device_ptr, - in_allowed_memory_bits, - in_size, - in_memory_features) + new Anvil::MemoryBlock( + std::move(in_create_info_ptr) + ) ); - if (!result_ptr->init() ) + if (result_ptr != nullptr) { - result_ptr.reset(); + if (type == Anvil::MemoryBlockType::REGULAR || + type == Anvil::MemoryBlockType::REGULAR_WITH_MEMORY_TYPE ) + { + if (!result_ptr->init(out_opt_result_ptr) ) + { + result_ptr.reset(); + } + } + else + { + result_ptr->m_memory_type_props_ptr = &result_ptr->m_create_info_ptr->get_device()->get_physical_device_memory_properties().types.at(result_ptr->m_create_info_ptr->get_memory_type_index() ); + + if (type == Anvil::MemoryBlockType::DERIVED_WITH_CUSTOM_DELETE_PROC) + { + result_ptr->m_memory = result_ptr->m_create_info_ptr->get_memory(); + } + + if (out_opt_result_ptr != nullptr) + { + *out_opt_result_ptr = VkResult::VK_SUCCESS; + } + } } return result_ptr; } -/* Please see header for specification */ -std::shared_ptr Anvil::MemoryBlock::create_derived(std::shared_ptr in_parent_memory_block_ptr, - VkDeviceSize in_start_offset, - VkDeviceSize in_size) +Anvil::ExternalHandleUniquePtr Anvil::MemoryBlock::export_to_external_memory_handle(const Anvil::ExternalMemoryHandleTypeFlagBits& in_memory_handle_type) { - std::shared_ptr result_ptr; + #if defined(_WIN32) + const auto invalid_handle = nullptr; + const bool is_autorelease_handle = Anvil::Utils::is_nt_handle(in_memory_handle_type); + const bool only_one_handle_ever_permitted = Anvil::Utils::is_nt_handle(in_memory_handle_type); + #else + const auto invalid_handle = -1; + const bool is_autorelease_handle = true; + const bool only_one_handle_ever_permitted = false; + #endif + + auto memory = get_memory(); + MemoryBlock* parent_memory_block_ptr = nullptr; + ExternalHandleType result_handle = invalid_handle; + Anvil::ExternalHandleUniquePtr result_returned_ptr; - if (in_parent_memory_block_ptr == nullptr) + /* Sanity checks */ + #if defined(_WIN32) { - anvil_assert(in_parent_memory_block_ptr != nullptr); + if (!m_create_info_ptr->get_device()->get_extension_info()->khr_external_memory_win32() ) + { + anvil_assert(m_create_info_ptr->get_device()->get_extension_info()->khr_external_memory_win32() ); - goto end; + goto end; + } } + #else + { + if (!m_create_info_ptr->get_device()->get_extension_info()->khr_external_memory_fd() ) + { + anvil_assert(m_create_info_ptr->get_device()->get_extension_info()->khr_external_memory_fd() ); - result_ptr.reset( - new Anvil::MemoryBlock(in_parent_memory_block_ptr, - in_start_offset, - in_size) - ); + goto end; + } + } + #endif -end: - return result_ptr; -} + if (m_create_info_ptr->get_type() != Anvil::MemoryBlockType::REGULAR && + m_create_info_ptr->get_type() != Anvil::MemoryBlockType::REGULAR_WITH_MEMORY_TYPE ) + { + parent_memory_block_ptr = m_create_info_ptr->get_parent_memory_block(); -/* Please see header for specification */ -std::shared_ptr Anvil::MemoryBlock::create_derived_with_custom_delete_proc(std::weak_ptr in_device_ptr, - VkDeviceMemory in_memory, - uint32_t in_allowed_memory_bits, - Anvil::MemoryFeatureFlags in_memory_features, - uint32_t in_memory_type_index, - VkDeviceSize in_size, - VkDeviceSize in_start_offset, - OnMemoryBlockReleaseCallbackFunction in_on_release_callback_function) -{ - std::shared_ptr result_ptr; + while (parent_memory_block_ptr->get_create_info_ptr()->get_parent_memory_block() != nullptr) + { + parent_memory_block_ptr = parent_memory_block_ptr->get_create_info_ptr()->get_parent_memory_block(); + } - result_ptr.reset( - new Anvil::MemoryBlock(in_device_ptr, - in_memory, - in_allowed_memory_bits, - in_memory_features, - in_memory_type_index, - in_size, - in_start_offset, - in_on_release_callback_function) - ); + anvil_assert(m_create_info_ptr->get_type () == Anvil::MemoryBlockType::DERIVED && + m_create_info_ptr->get_start_offset() == 0 && + m_create_info_ptr->get_size () == parent_memory_block_ptr->get_create_info_ptr()->get_size() ); + } + else + { + anvil_assert(m_create_info_ptr->get_parent_memory_block() == nullptr); - return result_ptr; + parent_memory_block_ptr = this; + } + + /* Should we try to return a handle which has already been created for this memory block? */ + if (only_one_handle_ever_permitted) + { + auto map_iterator = parent_memory_block_ptr->m_external_handle_type_to_external_handle.find(in_memory_handle_type); + if (map_iterator != parent_memory_block_ptr->m_external_handle_type_to_external_handle.end() ) + { + result_returned_ptr = Anvil::ExternalHandleUniquePtr(map_iterator->second.get(), + [](Anvil::ExternalHandle*){} ); + + goto end; + } + } + + /* Apparently not, go and try to open the new handle then. */ + { + #if defined(_WIN32) + const auto entrypoints_ptr = &m_create_info_ptr->get_device()->get_extension_khr_external_memory_win32_entrypoints(); + VkMemoryGetWin32HandleInfoKHR info; + #else + const auto entrypoints_ptr = &m_create_info_ptr->get_device()->get_extension_khr_external_memory_fd_entrypoints(); + VkMemoryGetFdInfoKHR info; + #endif + + anvil_assert(memory != VK_NULL_HANDLE); + + info.handleType = static_cast(in_memory_handle_type); + info.memory = memory; + info.pNext = nullptr; + + #if defined(_WIN32) + { + info.sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR; + } + #else + { + info.sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR; + } + #endif + + #if defined(_WIN32) + if (entrypoints_ptr->vkGetMemoryWin32HandleKHR(m_create_info_ptr->get_device()->get_device_vk(), + &info, + &result_handle) != VK_SUCCESS) + #else + if (entrypoints_ptr->vkGetMemoryFdKHR(m_create_info_ptr->get_device()->get_device_vk(), + &info, + &result_handle) != VK_SUCCESS) + #endif + { + goto end; + } + } + + /* Cache the newly created handle if it's a NT handle */ + if (only_one_handle_ever_permitted) + { + Anvil::ExternalHandleUniquePtr result_owned_ptr; + + result_owned_ptr = Anvil::ExternalHandle::create (result_handle, + is_autorelease_handle); /* in_close_at_destruction_time */ + result_returned_ptr = Anvil::ExternalHandleUniquePtr(result_owned_ptr.get(), + [](Anvil::ExternalHandle*){} ); + + parent_memory_block_ptr->m_external_handle_type_to_external_handle[in_memory_handle_type] = std::move(result_owned_ptr); + } + else + { + result_returned_ptr = Anvil::ExternalHandle::create(result_handle, + is_autorelease_handle); /* in_close_at_destruction_time */ + } + + anvil_assert(result_returned_ptr != nullptr); + +end: + return result_returned_ptr; } /** Returns index of a memory type which meets the specified requirements @@ -249,79 +332,231 @@ std::shared_ptr Anvil::MemoryBlock::create_derived_with_cust uint32_t Anvil::MemoryBlock::get_device_memory_type_index(uint32_t in_memory_type_bits, Anvil::MemoryFeatureFlags in_memory_features) { - std::shared_ptr device_locked_ptr (m_device_ptr); - const bool is_coherent_memory_required ((in_memory_features & MEMORY_FEATURE_FLAG_HOST_COHERENT) != 0); - const bool is_device_local_memory_required ((in_memory_features & MEMORY_FEATURE_FLAG_DEVICE_LOCAL) != 0); - const bool is_host_cached_memory_required ((in_memory_features & MEMORY_FEATURE_FLAG_HOST_CACHED) != 0); - const bool is_lazily_allocated_memory_required((in_memory_features & MEMORY_FEATURE_FLAG_LAZILY_ALLOCATED) != 0); - const bool is_mappable_memory_required ((in_memory_features & MEMORY_FEATURE_FLAG_MAPPABLE) != 0); - const Anvil::MemoryTypes& memory_types (device_locked_ptr->get_physical_device_memory_properties().types); - - if (!is_mappable_memory_required) - { - anvil_assert(!is_coherent_memory_required); - } - - const std::size_t n_memory_types = memory_types.size(); - uint32_t result = UINT32_MAX; - - for (uint32_t n_memory_type = 0; - n_memory_type < n_memory_types; - ++n_memory_type) - { - const Anvil::MemoryType& current_memory_type = memory_types[n_memory_type]; - - if (((is_coherent_memory_required && (current_memory_type.flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) || - !is_coherent_memory_required) && - ((is_device_local_memory_required && (current_memory_type.flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)) || - !is_device_local_memory_required) && - ((is_host_cached_memory_required && (current_memory_type.flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT)) || - !is_host_cached_memory_required) && - ((is_lazily_allocated_memory_required && (current_memory_type.flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT)) || - !is_lazily_allocated_memory_required) && - ((is_mappable_memory_required && (current_memory_type.flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)) || - !is_mappable_memory_required) ) - { - if ( (in_memory_type_bits & (1 << n_memory_type)) != 0) - { - result = n_memory_type; + uint32_t filtered_mem_types = 0; + uint32_t result = UINT32_MAX; - break; - } - } + if (!Anvil::MemoryAllocator::get_mem_types_supporting_mem_features(get_create_info_ptr()->get_device(), + in_memory_type_bits, + in_memory_features, + &filtered_mem_types) ) + { + goto end; } + /* Simply pick the first lit bit */ + result = 0; + + while ((filtered_mem_types & (1 << 0)) == 0) + { + result ++; + filtered_mem_types >>= 1; + } + +end: anvil_assert(result != UINT32_MAX); return result; } /* Allocates actual memory and caches a number of properties used to spawn the memory block */ -bool Anvil::MemoryBlock::init() +bool Anvil::MemoryBlock::init(VkResult* out_opt_result) { - VkMemoryAllocateInfo buffer_data_alloc_info; - std::shared_ptr device_locked_ptr (m_device_ptr); - VkResult result; - bool result_bool = false; - - buffer_data_alloc_info.allocationSize = m_size; - buffer_data_alloc_info.memoryTypeIndex = get_device_memory_type_index(m_allowed_memory_bits, - m_memory_features); - buffer_data_alloc_info.pNext = nullptr; - buffer_data_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; - - result = vkAllocateMemory(device_locked_ptr->get_device_vk(), - &buffer_data_alloc_info, - nullptr, /* pAllocator */ - &m_memory); - - anvil_assert_vk_call_succeeded(result); + VkResult result; + bool result_bool = false; + Anvil::StructChainer struct_chainer; + + { + VkMemoryAllocateInfo mem_alloc_info; + + mem_alloc_info.allocationSize = m_create_info_ptr->get_size(); + mem_alloc_info.memoryTypeIndex = (m_create_info_ptr->get_imported_external_memory_handle_type() != Anvil::ExternalMemoryHandleTypeFlagBits::NONE) ? m_create_info_ptr->get_memory_type_index() + : (m_create_info_ptr->get_type () != Anvil::MemoryBlockType::REGULAR) ? m_create_info_ptr->get_memory_type_index() + : get_device_memory_type_index (m_create_info_ptr->get_allowed_memory_bits(), + m_create_info_ptr->get_memory_features () ); + mem_alloc_info.pNext = nullptr; + mem_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; + + m_memory_type_props_ptr = &m_create_info_ptr->get_device()->get_physical_device_memory_properties().types.at(mem_alloc_info.memoryTypeIndex); + + struct_chainer.append_struct(mem_alloc_info); + + /* Cache the memory type index in the create info struct. */ + m_create_info_ptr->set_memory_type_index(mem_alloc_info.memoryTypeIndex); + } + + { + Anvil::Buffer* dedicated_allocation_buffer_ptr = nullptr; + Anvil::Image* dedicated_allocation_image_ptr = nullptr; + bool use_dedicated_allocation = false; + + m_create_info_ptr->get_dedicated_allocation_properties(&use_dedicated_allocation, + &dedicated_allocation_buffer_ptr, + &dedicated_allocation_image_ptr); + + if (use_dedicated_allocation) + { + VkMemoryDedicatedAllocateInfoKHR alloc_info; + + alloc_info.buffer = (dedicated_allocation_buffer_ptr != nullptr) ? dedicated_allocation_buffer_ptr->get_buffer(false) : VK_NULL_HANDLE; + alloc_info.image = (dedicated_allocation_image_ptr != nullptr) ? dedicated_allocation_image_ptr->get_image (false) : VK_NULL_HANDLE; + alloc_info.pNext = nullptr; + alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR; + + struct_chainer.append_struct(alloc_info); + } + } + + { + const float& memory_priority = m_create_info_ptr->get_memory_priority(); + + if (memory_priority != FLT_MAX) + { + anvil_assert(memory_priority >= 0.0f && memory_priority <= 1.0f); + + VkMemoryPriorityAllocateInfoEXT alloc_info; + + alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT; + alloc_info.pNext = nullptr; + alloc_info.priority = memory_priority; + + struct_chainer.append_struct(alloc_info); + } + } + + if (m_create_info_ptr->get_exportable_external_memory_handle_types() != Anvil::ExternalMemoryHandleTypeFlagBits::NONE) + { + VkExportMemoryAllocateInfoKHR export_memory_alloc_info; + + export_memory_alloc_info.handleTypes = m_create_info_ptr->get_exportable_external_memory_handle_types().get_vk(); + export_memory_alloc_info.pNext = nullptr; + export_memory_alloc_info.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR; + + struct_chainer.append_struct(export_memory_alloc_info); + + #if defined(_WIN32) + { + const Anvil::ExternalNTHandleInfo* nt_handle_info_ptr = nullptr; + + if (m_create_info_ptr->get_exportable_nt_handle_info(&nt_handle_info_ptr) ) + { + VkExportMemoryWin32HandleInfoKHR handle_info_khr; + + handle_info_khr.dwAccess = nt_handle_info_ptr->access; + handle_info_khr.name = (nt_handle_info_ptr->name.size() > 0) ? nt_handle_info_ptr->name.c_str() + : nullptr; + handle_info_khr.pAttributes = nt_handle_info_ptr->attributes_ptr; + handle_info_khr.pNext = nullptr; + handle_info_khr.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR; + + struct_chainer.append_struct(handle_info_khr); + } + } + #endif + } + + { + const Anvil::ExternalMemoryHandleImportInfo* handle_import_info_ptr = nullptr; + + if (m_create_info_ptr->get_external_handle_import_info(&handle_import_info_ptr) ) + { + const auto imported_external_memory_handle_type = m_create_info_ptr->get_imported_external_memory_handle_type(); + + if (imported_external_memory_handle_type == Anvil::ExternalMemoryHandleTypeFlagBits::HOST_ALLOCATION_BIT_EXT || + imported_external_memory_handle_type == Anvil::ExternalMemoryHandleTypeFlagBits::HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT) + { + VkImportMemoryHostPointerInfoEXT handle_info_khr; + + anvil_assert(handle_import_info_ptr->host_ptr != nullptr); + anvil_assert(handle_import_info_ptr->handle == 0); + + handle_info_khr.handleType = static_cast(imported_external_memory_handle_type); + handle_info_khr.pHostPointer = handle_import_info_ptr->host_ptr; + handle_info_khr.pNext = nullptr; + handle_info_khr.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT; + + struct_chainer.append_struct(handle_info_khr); + } + else + { + #if defined(_WIN32) + VkImportMemoryWin32HandleInfoKHR handle_info_khr; + #else + VkImportMemoryFdInfoKHR handle_info_khr; + #endif + + anvil_assert(handle_import_info_ptr->host_ptr == nullptr); + + #if defined(_WIN32) + { + anvil_assert(handle_import_info_ptr->handle != static_cast(nullptr) || + handle_import_info_ptr->name.size() > 0); + } + #else + { + anvil_assert(handle_import_info_ptr->handle != 0); + } + #endif + + handle_info_khr.handleType = static_cast(imported_external_memory_handle_type); + handle_info_khr.pNext = nullptr; + + #if defined(_WIN32) + handle_info_khr.handle = handle_import_info_ptr->handle; + handle_info_khr.name = (handle_import_info_ptr->name.size() > 0) ? handle_import_info_ptr->name.c_str() + : nullptr; + handle_info_khr.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR; + #else + handle_info_khr.fd = handle_import_info_ptr->handle; + handle_info_khr.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR; + #endif + + struct_chainer.append_struct(handle_info_khr); + } + } + } + + if (m_create_info_ptr->get_device_mask() != 0 && + m_create_info_ptr->get_device()->get_type() == Anvil::DeviceType::MULTI_GPU) + { + VkMemoryAllocateFlagsInfoKHR alloc_info_khr; + + alloc_info_khr.deviceMask = m_create_info_ptr->get_device_mask(); + + /* NOTE: Host-mappable memory must not be multi-instance heap and must exist on only one device. */ + if (((m_create_info_ptr->get_memory_features() & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) != 0) && + (Utils::count_set_bits (alloc_info_khr.deviceMask) > 1) ) + { + alloc_info_khr.flags = 0; + } + else + { + alloc_info_khr.flags = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT; + } + + alloc_info_khr.pNext = nullptr; + alloc_info_khr.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR; + + struct_chainer.append_struct(alloc_info_khr); + } + + { + auto chain_ptr = struct_chainer.create_chain(); + + result = Anvil::Vulkan::vkAllocateMemory(m_create_info_ptr->get_device()->get_device_vk(), + chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_memory); + } + + if (out_opt_result != nullptr) + { + *out_opt_result = result; + } + if (!is_vk_call_successful(result) ) { goto end; } - m_memory_type_index = buffer_data_alloc_info.memoryTypeIndex; - /* All done */ result_bool = true; end: @@ -329,16 +564,18 @@ bool Anvil::MemoryBlock::init() } /* Please see header for specification */ -bool Anvil::MemoryBlock::intersects(std::shared_ptr in_memory_block_ptr) const +bool Anvil::MemoryBlock::intersects(const Anvil::MemoryBlock* in_memory_block_ptr) const { - bool result = (in_memory_block_ptr == shared_from_this() ); + bool result = (m_memory == in_memory_block_ptr->m_memory); if (result) { - if (!(m_start_offset < in_memory_block_ptr->m_start_offset && - m_start_offset + m_size < in_memory_block_ptr->m_start_offset && - in_memory_block_ptr->m_start_offset < m_start_offset && - in_memory_block_ptr->m_start_offset + m_size < m_start_offset) ) + const auto size = m_create_info_ptr->get_size(); + + if (!(m_start_offset < in_memory_block_ptr->m_start_offset && + m_start_offset + size < in_memory_block_ptr->m_start_offset && + in_memory_block_ptr->m_start_offset < m_start_offset && + in_memory_block_ptr->m_start_offset + size < m_start_offset) ) { result = true; } @@ -356,14 +593,15 @@ bool Anvil::MemoryBlock::map(VkDeviceSize in_start_offset, VkDeviceSize in_size, void** out_opt_data_ptr) { - bool result = false; + const auto memory_features = m_create_info_ptr->get_memory_features(); + bool result = false; /* Sanity checks */ - if (m_parent_memory_block_ptr != nullptr) + if (m_create_info_ptr->get_parent_memory_block() != nullptr) { - result = m_parent_memory_block_ptr->map(m_start_offset + in_start_offset, - in_size, - out_opt_data_ptr); + result = m_create_info_ptr->get_parent_memory_block()->map(m_relative_start_offset + in_start_offset, + in_size, + out_opt_data_ptr); } else { @@ -374,24 +612,31 @@ bool Anvil::MemoryBlock::map(VkDeviceSize in_start_offset, goto end; } - if ((m_memory_features & Anvil::MEMORY_FEATURE_FLAG_HOST_COHERENT) == 0) + if ((memory_features & Anvil::MemoryFeatureFlagBits::HOST_COHERENT_BIT) == 0) { /* Make sure the mapped region is invalidated before letting the user read from it */ - std::shared_ptr device_locked_ptr (m_device_ptr); - VkMappedMemoryRange mapped_memory_range; - VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); + VkMappedMemoryRange mapped_memory_range; + const auto non_coherent_atom_size(m_create_info_ptr->get_device()->get_physical_device_properties().core_vk1_0_properties_ptr->limits.non_coherent_atom_size); + VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); ANVIL_REDUNDANT_VARIABLE(result_vk); mapped_memory_range.memory = get_memory(); - mapped_memory_range.offset = m_start_offset + in_start_offset; + mapped_memory_range.offset = Anvil::Utils::round_down(in_start_offset, + non_coherent_atom_size); mapped_memory_range.pNext = nullptr; - mapped_memory_range.size = in_size; + mapped_memory_range.size = Anvil::Utils::round_up(in_size, + non_coherent_atom_size); mapped_memory_range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; - result_vk = vkInvalidateMappedMemoryRanges(device_locked_ptr->get_device_vk(), - 1, /* memRangeCount */ - &mapped_memory_range); + if (mapped_memory_range.size + mapped_memory_range.offset > m_create_info_ptr->get_size() ) + { + mapped_memory_range.size = VK_WHOLE_SIZE; + } + + result_vk = Anvil::Vulkan::vkInvalidateMappedMemoryRanges(m_create_info_ptr->get_device()->get_device_vk(), + 1, /* memRangeCount */ + &mapped_memory_range); anvil_assert_vk_call_succeeded(result_vk); } @@ -416,16 +661,16 @@ bool Anvil::MemoryBlock::map(VkDeviceSize in_start_offset, **/ bool Anvil::MemoryBlock::open_gpu_memory_access() { - std::shared_ptr device_locked_ptr(m_device_ptr); - bool result (false); - VkResult result_vk; + const auto memory_features(m_create_info_ptr->get_memory_features() ); + bool result (false); + VkResult result_vk; /* Sanity checks */ - anvil_assert(m_parent_memory_block_ptr == nullptr); + anvil_assert(m_create_info_ptr->get_parent_memory_block() == nullptr); - if ((m_memory_features & Anvil::MEMORY_FEATURE_FLAG_MAPPABLE) == 0) + if ((memory_features & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) == 0) { - anvil_assert((m_memory_features & Anvil::MEMORY_FEATURE_FLAG_MAPPABLE) != 0); + anvil_assert((memory_features & Anvil::MemoryFeatureFlagBits::MAPPABLE_BIT) != 0); goto end; } @@ -433,12 +678,40 @@ bool Anvil::MemoryBlock::open_gpu_memory_access() if (m_gpu_data_map_count.fetch_add(1) == 0) { /* Map the memory region into process space */ - result_vk = vkMapMemory(device_locked_ptr->get_device_vk(), - m_memory, - 0, /* offset */ - m_size, - 0, /* flags */ - (void**) &m_gpu_data_ptr); + lock(); + { + if (m_parent_memory_allocator_backend_ptr != nullptr) + { + result_vk = m_parent_memory_allocator_backend_ptr->map(m_backend_object, + 0, /* in_start_offset */ + m_create_info_ptr->get_start_offset(), + m_create_info_ptr->get_size (), + static_cast(&m_gpu_data_ptr) ); + + m_gpu_data_ptr = reinterpret_cast(m_gpu_data_ptr); + } + else + { + /* This block will be entered for memory blocks instantiated without a memory allocator + * + * TODO: A memory block may hold both buffer and image data. This means that the invocation below may trigger validation warnings telling + * that image memory which is not in GENERAL and PREINITIALIZED layout must not be modified by the host. As long as the map request + * is done specifically for a buffer, this warning is harmless. + * Avoiding this warning is tricky, given the VK restriction where a memory allocation X must not be mapped more than once at a time. + * Effectively, we would need to make the memory alloc backends create separate memory allocs for buffer and image objects, in order + * to support cases where apps need to map >1 memory regions, coming from the same memory block, at once. It would also make the implementation + * less readable. Might want to consider doing this nevertheless one day. + * + */ + result_vk = Anvil::Vulkan::vkMapMemory(m_create_info_ptr->get_device()->get_device_vk(), + m_memory, + 0, /* offset */ + m_create_info_ptr->get_size(), + 0, /* flags */ + static_cast(&m_gpu_data_ptr) ); + } + } + unlock(); anvil_assert_vk_call_succeeded(result_vk); result = is_vk_call_successful(result_vk); @@ -457,18 +730,17 @@ bool Anvil::MemoryBlock::read(VkDeviceSize in_start_offset, VkDeviceSize in_size, void* out_result_ptr) { - std::shared_ptr device_locked_ptr(m_device_ptr); - bool result (false); + bool result(false); anvil_assert(in_size > 0); - anvil_assert(in_start_offset + in_size <= m_size); - anvil_assert(out_result_ptr != nullptr); + anvil_assert(in_start_offset + in_size <= m_create_info_ptr->get_size() ); + anvil_assert(out_result_ptr != nullptr); - if (m_parent_memory_block_ptr != nullptr) + if (m_create_info_ptr->get_parent_memory_block() != nullptr) { - result = m_parent_memory_block_ptr->read(m_start_offset + in_start_offset, - in_size, - out_result_ptr); + result = m_create_info_ptr->get_parent_memory_block()->read(m_relative_start_offset + in_start_offset, + in_size, + out_result_ptr); } else { @@ -479,28 +751,48 @@ bool Anvil::MemoryBlock::read(VkDeviceSize in_start_offset, goto end; } - if ((m_memory_features & Anvil::MEMORY_FEATURE_FLAG_HOST_COHERENT) == 0) + if ((m_create_info_ptr->get_memory_features() & Anvil::MemoryFeatureFlagBits::HOST_COHERENT_BIT) == 0) { VkMappedMemoryRange mapped_memory_range; - VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); + const auto mem_block_size = m_create_info_ptr->get_size(); + const auto non_coherent_atom_size = m_create_info_ptr->get_device()->get_physical_device_properties().core_vk1_0_properties_ptr->limits.non_coherent_atom_size; + VkResult result_vk = VK_ERROR_INITIALIZATION_FAILED; - anvil_assert (m_start_offset == 0); + // anvil_assert (m_start_offset == 0); ANVIL_REDUNDANT_VARIABLE(result_vk); mapped_memory_range.memory = m_memory; - mapped_memory_range.offset = in_start_offset; + mapped_memory_range.offset = Anvil::Utils::round_down(m_start_offset +in_start_offset, + non_coherent_atom_size); mapped_memory_range.pNext = nullptr; - mapped_memory_range.size = in_size; + mapped_memory_range.size = Anvil::Utils::round_up(in_size, + non_coherent_atom_size); mapped_memory_range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; - result_vk = vkInvalidateMappedMemoryRanges(device_locked_ptr->get_device_vk(), - 1, /* memRangeCount */ - &mapped_memory_range); + if(m_parent_memory_allocator_backend_ptr) { + auto underlyingSize = m_parent_memory_allocator_backend_ptr->get_underlying_allocation_size(m_backend_object); + if (underlyingSize && mapped_memory_range.size + mapped_memory_range.offset > *underlyingSize) + mapped_memory_range.size = *underlyingSize - mapped_memory_range.offset; + } + else if (mapped_memory_range.size + mapped_memory_range.offset > mem_block_size) + mapped_memory_range.size = mem_block_size - mapped_memory_range.offset; + + if(m_parent_memory_allocator_backend_ptr) { + auto underlyingSize = m_parent_memory_allocator_backend_ptr->get_underlying_allocation_size(m_backend_object); + if (underlyingSize && mapped_memory_range.size + mapped_memory_range.offset > *underlyingSize) + mapped_memory_range.size = *underlyingSize - mapped_memory_range.offset; + } + else if (mapped_memory_range.size + mapped_memory_range.offset > mem_block_size) + mapped_memory_range.size = mem_block_size - mapped_memory_range.offset; + + result_vk = Anvil::Vulkan::vkInvalidateMappedMemoryRanges(m_create_info_ptr->get_device()->get_device_vk(), + 1, /* memRangeCount */ + &mapped_memory_range); anvil_assert_vk_call_succeeded(result_vk); } memcpy(out_result_ptr, - static_cast(m_gpu_data_ptr) + static_cast(in_start_offset), + static_cast(m_gpu_data_ptr) + static_cast(m_start_offset +in_start_offset), static_cast(in_size)); close_gpu_memory_access(); @@ -517,9 +809,9 @@ bool Anvil::MemoryBlock::unmap() { bool result = false; - if (m_parent_memory_block_ptr != nullptr) + if (m_create_info_ptr->get_parent_memory_block() != nullptr) { - result = m_parent_memory_block_ptr->unmap(); + result = m_create_info_ptr->get_parent_memory_block()->unmap(); goto end; } @@ -546,18 +838,17 @@ bool Anvil::MemoryBlock::write(VkDeviceSize in_start_offset, VkDeviceSize in_size, const void* in_data) { - std::shared_ptr device_locked_ptr(m_device_ptr); - bool result (false); + bool result(false); anvil_assert(in_size > 0); - anvil_assert(in_start_offset + in_size <= in_start_offset + m_size); + anvil_assert(in_start_offset + in_size <= in_start_offset + m_create_info_ptr->get_size() ); anvil_assert(in_data != nullptr); - if (m_parent_memory_block_ptr != nullptr) + if (m_create_info_ptr->get_parent_memory_block() != nullptr) { - result = m_parent_memory_block_ptr->write(m_start_offset + in_start_offset, - in_size, - in_data); + result = m_create_info_ptr->get_parent_memory_block()->write(m_relative_start_offset + in_start_offset, + in_size, + in_data); } else { @@ -566,26 +857,38 @@ bool Anvil::MemoryBlock::write(VkDeviceSize in_start_offset, goto end; } - memcpy(static_cast(m_gpu_data_ptr) + static_cast(in_start_offset), + memcpy(static_cast(m_gpu_data_ptr) + static_cast(m_start_offset +in_start_offset), in_data, static_cast(in_size)); - if ((m_memory_features & Anvil::MEMORY_FEATURE_FLAG_HOST_COHERENT) == 0) + if ((m_create_info_ptr->get_memory_features() & Anvil::MemoryFeatureFlagBits::HOST_COHERENT_BIT) == 0) { VkMappedMemoryRange mapped_memory_range; - VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); + const auto mem_block_size = m_create_info_ptr->get_size(); + const auto non_coherent_atom_size = m_create_info_ptr->get_device()->get_physical_device_properties().core_vk1_0_properties_ptr->limits.non_coherent_atom_size; + auto result_vk = VkResult(VK_ERROR_INITIALIZATION_FAILED); ANVIL_REDUNDANT_VARIABLE(result_vk); mapped_memory_range.memory = m_memory; - mapped_memory_range.offset = in_start_offset; + mapped_memory_range.offset = Anvil::Utils::round_down(m_start_offset +in_start_offset, + non_coherent_atom_size); mapped_memory_range.pNext = nullptr; - mapped_memory_range.size = in_size; + mapped_memory_range.size = Anvil::Utils::round_up(in_size, + non_coherent_atom_size); mapped_memory_range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; - result_vk = vkFlushMappedMemoryRanges(device_locked_ptr->get_device_vk(), - 1, /* memRangeCount */ - &mapped_memory_range); + if(m_parent_memory_allocator_backend_ptr) { + auto underlyingSize = m_parent_memory_allocator_backend_ptr->get_underlying_allocation_size(m_backend_object); + if (underlyingSize && mapped_memory_range.size + mapped_memory_range.offset > *underlyingSize) + mapped_memory_range.size = *underlyingSize - mapped_memory_range.offset; + } + else if (mapped_memory_range.size + mapped_memory_range.offset > mem_block_size) + mapped_memory_range.size = mem_block_size - mapped_memory_range.offset; + + result_vk = Anvil::Vulkan::vkFlushMappedMemoryRanges(m_create_info_ptr->get_device()->get_device_vk(), + 1, /* memRangeCount */ + &mapped_memory_range); anvil_assert_vk_call_succeeded(result_vk); } @@ -596,4 +899,4 @@ bool Anvil::MemoryBlock::write(VkDeviceSize in_start_offset, end: return result; -} \ No newline at end of file +} diff --git a/src/wrappers/physical_device.cpp b/src/wrappers/physical_device.cpp index cd810bb9..f03a9de3 100644 --- a/src/wrappers/physical_device.cpp +++ b/src/wrappers/physical_device.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,33 +21,36 @@ // #include "misc/debug.h" +#include "misc/formats.h" #include "misc/object_tracker.h" +#include "misc/struct_chainer.h" #include "wrappers/device.h" #include "wrappers/instance.h" #include "wrappers/physical_device.h" #include /* Please see header for specification */ -bool Anvil::operator==(const std::shared_ptr& in_physical_device_ptr, +bool Anvil::operator==(const std::unique_ptr& in_physical_device_ptr, const VkPhysicalDevice& in_physical_device_vk) { return (in_physical_device_ptr->get_physical_device() == in_physical_device_vk); } /* Please see header for specification */ -std::weak_ptr Anvil::PhysicalDevice::create(std::shared_ptr in_instance_ptr, - uint32_t in_index, - VkPhysicalDevice in_physical_device) +std::unique_ptr Anvil::PhysicalDevice::create(Anvil::Instance* in_instance_ptr, + uint32_t in_index, + VkPhysicalDevice in_physical_device) { - std::shared_ptr physical_device_ptr(new Anvil::PhysicalDevice(in_instance_ptr, - in_index, - in_physical_device) ); + std::unique_ptr physical_device_ptr( + new Anvil::PhysicalDevice( + in_instance_ptr, + in_index, + in_physical_device) ); - physical_device_ptr->init (); - in_instance_ptr->register_physical_device(physical_device_ptr); + physical_device_ptr->init(); /* Register the physical device instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_PHYSICAL_DEVICE, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::PHYSICAL_DEVICE, physical_device_ptr.get() ); return physical_device_ptr; @@ -56,164 +59,1118 @@ std::weak_ptr Anvil::PhysicalDevice::create(std::shared_p /* Please see header for specification */ Anvil::PhysicalDevice::~PhysicalDevice() { - anvil_assert(m_destroyed); + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::PHYSICAL_DEVICE, + this); +} + +/** Initializes _vulkan_physical_device descriptor with actual data by issuing a number + * of Vulkan API calls. + * + * @param physical_device_ptr Pointer to a _vulkan_physical_device instance to be initialized. + * Must not be nullptr. + **/ +bool Anvil::PhysicalDevice::init() +{ + uint32_t n_extensions = 0; + uint32_t n_physical_device_layers = 0; + uint32_t n_physical_device_queues = 0; + bool result = true; + VkResult result_vk = VK_ERROR_INITIALIZATION_FAILED; + bool supports_vk1_1 = false; + + anvil_assert(m_physical_device != VK_NULL_HANDLE); + + { + VkPhysicalDeviceProperties props_vk; + + Anvil::Vulkan::vkGetPhysicalDeviceProperties(m_physical_device, + &props_vk); + + supports_vk1_1 = supports_core_vk1_1(props_vk.apiVersion); + } + + /* Retrieve device extensions */ + result_vk = Anvil::Vulkan::vkEnumerateDeviceExtensionProperties(m_physical_device, + nullptr, /* pLayerName */ + &n_extensions, + nullptr); /* pProperties */ + + if (!is_vk_call_successful(result_vk) ) + { + anvil_assert_vk_call_succeeded(result_vk); + + result = false; + goto end; + } + + if (n_extensions > 0) + { + std::map extension_availability; + std::vector extension_props; + + extension_props.resize(n_extensions); + + result_vk = Anvil::Vulkan::vkEnumerateDeviceExtensionProperties(m_physical_device, + nullptr, /* pLayerName */ + &n_extensions, + &extension_props[0]); + anvil_assert_vk_call_succeeded(result_vk); + + for (uint32_t n_extension = 0; + n_extension < n_extensions; + ++n_extension) + { + extension_availability[extension_props[n_extension].extensionName] = true; + } + + m_extension_info_ptr = Anvil::ExtensionInfo::create_device_extension_info(extension_availability, + false); /* in_unspecified_extension_name_value */ + } + + /* Retrieve and cache device features */ + { + VkPhysicalDeviceFeatures features_vk; + + Anvil::Vulkan::vkGetPhysicalDeviceFeatures(m_physical_device, + &features_vk); + + if (m_instance_ptr->get_enabled_extensions_info()->khr_get_physical_device_properties2() ) + { + Anvil::StructID depth_clip_enable_features_struct_id; + Anvil::StructID descriptor_indexing_features_struct_id; + Anvil::StructID inline_uniform_block_features_struct_id; + Anvil::StructID memory_priority_features_struct_id; + const auto& gpdp2_entrypoints = m_instance_ptr->get_extension_khr_get_physical_device_properties2_entrypoints(); + Anvil::StructID multiview_features_struct_id; + Anvil::StructID protected_memory_features_struct_id; + Anvil::StructID sampler_ycbcr_conversion_features_struct_id; + Anvil::StructID scalar_block_layout_features_struct_id; + Anvil::StructID shader_atomic_int64_features_struct_id; + Anvil::StructID shader_float16_int8_struct_id; + Anvil::StructID storage_features16_struct_id; + Anvil::StructID storage_features8_struct_id; + Anvil::StructChainUniquePtr struct_chain_ptr; + Anvil::StructChainer struct_chainer; + Anvil::StructID transform_feedback_features_struct_id; + Anvil::StructID variable_pointer_features_struct_id; + Anvil::StructID vulkan_memory_model_features_struct_id; + + /* Chain query structs .. */ + { + VkPhysicalDeviceFeatures2KHR features; + + features.pNext = nullptr; + features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR; + + struct_chainer.append_struct(features); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_depth_clip_enable() ) + { + VkPhysicalDeviceDepthClipEnableFeaturesEXT depth_clip_enable_features; + + depth_clip_enable_features.pNext = nullptr; + depth_clip_enable_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT); + + depth_clip_enable_features_struct_id = struct_chainer.append_struct(depth_clip_enable_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_descriptor_indexing() ) + { + VkPhysicalDeviceDescriptorIndexingFeaturesEXT descriptor_indexing_features; + + descriptor_indexing_features.pNext = nullptr; + descriptor_indexing_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT); + + descriptor_indexing_features_struct_id = struct_chainer.append_struct(descriptor_indexing_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_inline_uniform_block() ) + { + VkPhysicalDeviceInlineUniformBlockFeaturesEXT inline_uniform_block_features; + + inline_uniform_block_features.pNext = nullptr; + inline_uniform_block_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT); + + inline_uniform_block_features_struct_id = struct_chainer.append_struct(inline_uniform_block_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_memory_priority()) + { + VkPhysicalDeviceMemoryPriorityFeaturesEXT memory_priority_features; + + memory_priority_features.pNext = nullptr; + memory_priority_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT); + + memory_priority_features_struct_id = struct_chainer.append_struct(memory_priority_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_scalar_block_layout() ) + { + VkPhysicalDeviceScalarBlockLayoutFeaturesEXT scalar_block_layout_features; + + scalar_block_layout_features.pNext = nullptr; + scalar_block_layout_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT); + + scalar_block_layout_features_struct_id = struct_chainer.append_struct(scalar_block_layout_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_transform_feedback() ) + { + VkPhysicalDeviceTransformFeedbackFeaturesEXT transform_feedback_features; + + transform_feedback_features.pNext = nullptr; + transform_feedback_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT); + + transform_feedback_features_struct_id = struct_chainer.append_struct(transform_feedback_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_16bit_storage() || + supports_vk1_1) + { + VkPhysicalDevice16BitStorageFeaturesKHR storage_features; + + storage_features.pNext = nullptr; + storage_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR); + + storage_features16_struct_id = struct_chainer.append_struct(storage_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_8bit_storage() ) + { + VkPhysicalDevice8BitStorageFeaturesKHR storage_features; + + storage_features.pNext = nullptr; + storage_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR); + + storage_features8_struct_id = struct_chainer.append_struct(storage_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_multiview() || + supports_vk1_1) + { + VkPhysicalDeviceMultiviewFeaturesKHR multiview_features; + + multiview_features.pNext = nullptr; + multiview_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR); + + multiview_features_struct_id = struct_chainer.append_struct(multiview_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_sampler_ycbcr_conversion() || + supports_vk1_1) + { + VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR sampler_ycbcr_conversion_features; + + sampler_ycbcr_conversion_features.pNext = nullptr; + sampler_ycbcr_conversion_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR); + + sampler_ycbcr_conversion_features_struct_id = struct_chainer.append_struct(sampler_ycbcr_conversion_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_shader_atomic_int64()) + { + VkPhysicalDeviceShaderAtomicInt64FeaturesKHR shader_atomic_int64_features; + + shader_atomic_int64_features.pNext = nullptr; + shader_atomic_int64_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR); + + shader_atomic_int64_features_struct_id = struct_chainer.append_struct(shader_atomic_int64_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_shader_float16_int8() ) + { + VkPhysicalDeviceFloat16Int8FeaturesKHR shader_float16_int8_features; + + shader_float16_int8_features.pNext = nullptr; + shader_float16_int8_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR); + + shader_float16_int8_struct_id = struct_chainer.append_struct(shader_float16_int8_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_variable_pointers() || + supports_vk1_1) + { + VkPhysicalDeviceVariablePointerFeaturesKHR vp_features; + + vp_features.pNext = nullptr; + vp_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR); + + variable_pointer_features_struct_id = struct_chainer.append_struct(vp_features); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_vulkan_memory_model() ) + { + VkPhysicalDeviceVulkanMemoryModelFeaturesKHR vmm_features; + + vmm_features.pNext = nullptr; + vmm_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR); + + vulkan_memory_model_features_struct_id = struct_chainer.append_struct(vmm_features); + } + + if (supports_vk1_1) + { + VkPhysicalDeviceProtectedMemoryFeatures protected_mem_features; + + protected_mem_features.pNext = nullptr; + protected_mem_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES; + + protected_memory_features_struct_id = struct_chainer.append_struct(protected_mem_features); + } + + /* Retrieve the features.. */ + struct_chain_ptr = struct_chainer.create_chain(); + + gpdp2_entrypoints.vkGetPhysicalDeviceFeatures2KHR(m_physical_device, + struct_chain_ptr->get_root_struct() ); + + /* Cache the results */ + + if (depth_clip_enable_features_struct_id.is_valid() ) + { + m_ext_depth_clip_enable_features_ptr.reset( + new EXTDepthClipEnableFeatures(*struct_chain_ptr->get_struct_with_id(depth_clip_enable_features_struct_id) ) + ); + + if (m_ext_depth_clip_enable_features_ptr == nullptr) + { + anvil_assert(m_ext_depth_clip_enable_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (descriptor_indexing_features_struct_id.is_valid() ) + { + m_ext_descriptor_indexing_features_ptr.reset( + new EXTDescriptorIndexingFeatures(*struct_chain_ptr->get_struct_with_id(descriptor_indexing_features_struct_id) ) + ); + + if (m_ext_descriptor_indexing_features_ptr == nullptr) + { + anvil_assert(m_ext_descriptor_indexing_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (inline_uniform_block_features_struct_id.is_valid() ) + { + m_ext_inline_uniform_block_features_ptr.reset( + new EXTInlineUniformBlockFeatures(*struct_chain_ptr->get_struct_with_id(inline_uniform_block_features_struct_id) ) + ); + + if (m_ext_inline_uniform_block_features_ptr == nullptr) + { + anvil_assert(m_ext_inline_uniform_block_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (multiview_features_struct_id.is_valid() ) + { + m_khr_multiview_features_ptr.reset( + new KHRMultiviewFeatures(*struct_chain_ptr->get_struct_with_id(multiview_features_struct_id) ) + ); + + if (m_khr_multiview_features_ptr == nullptr) + { + anvil_assert(m_khr_multiview_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (sampler_ycbcr_conversion_features_struct_id.is_valid() ) + { + m_khr_sampler_ycbcr_conversion_features_ptr.reset( + new KHRSamplerYCbCrConversionFeatures(*struct_chain_ptr->get_struct_with_id(sampler_ycbcr_conversion_features_struct_id) ) + ); + + if (m_khr_sampler_ycbcr_conversion_features_ptr == nullptr) + { + anvil_assert(m_khr_sampler_ycbcr_conversion_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (scalar_block_layout_features_struct_id.is_valid() ) + { + m_ext_scalar_block_layout_features_ptr.reset( + new EXTScalarBlockLayoutFeatures(*struct_chain_ptr->get_struct_with_id(scalar_block_layout_features_struct_id) ) + ); + + if (m_ext_scalar_block_layout_features_ptr == nullptr) + { + anvil_assert(m_ext_scalar_block_layout_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (shader_atomic_int64_features_struct_id.is_valid() ) + { + m_khr_shader_atomic_int64_features_ptr.reset( + new KHRShaderAtomicInt64Features(*struct_chain_ptr->get_struct_with_id(shader_atomic_int64_features_struct_id) ) + ); + + if (m_khr_shader_atomic_int64_features_ptr == nullptr) + { + anvil_assert(m_khr_shader_atomic_int64_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (transform_feedback_features_struct_id.is_valid() ) + { + m_ext_transform_feedback_features_ptr.reset( + new EXTTransformFeedbackFeatures(*struct_chain_ptr->get_struct_with_id(transform_feedback_features_struct_id) ) + ); + + if (m_ext_transform_feedback_features_ptr == nullptr) + { + anvil_assert(m_ext_transform_feedback_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (storage_features16_struct_id.is_valid() ) + { + m_khr_16_bit_storage_features_ptr.reset( + new KHR16BitStorageFeatures(*struct_chain_ptr->get_struct_with_id(storage_features16_struct_id) ) + ); + + if (m_khr_16_bit_storage_features_ptr == nullptr) + { + anvil_assert(m_khr_16_bit_storage_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (storage_features8_struct_id.is_valid() ) + { + m_khr_8_bit_storage_features_ptr.reset( + new KHR8BitStorageFeatures(*struct_chain_ptr->get_struct_with_id(storage_features8_struct_id) ) + ); + + if (m_khr_8_bit_storage_features_ptr == nullptr) + { + anvil_assert(m_khr_8_bit_storage_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (memory_priority_features_struct_id.is_valid() ) + { + m_ext_memory_priority_features_ptr.reset( + new EXTMemoryPriorityFeatures(*struct_chain_ptr->get_struct_with_id(memory_priority_features_struct_id)) + ); + + if (m_ext_memory_priority_features_ptr == nullptr) + { + anvil_assert(m_ext_memory_priority_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (shader_float16_int8_struct_id.is_valid() ) + { + m_khr_float16_int8_features_ptr.reset( + new KHRFloat16Int8Features(*struct_chain_ptr->get_struct_with_id(shader_float16_int8_struct_id) ) + ); + + if (m_khr_float16_int8_features_ptr == nullptr) + { + anvil_assert(m_khr_float16_int8_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (variable_pointer_features_struct_id.is_valid() ) + { + m_khr_variable_pointer_features_ptr.reset( + new KHRVariablePointerFeatures(*struct_chain_ptr->get_struct_with_id(variable_pointer_features_struct_id) ) + ); + + if (m_khr_variable_pointer_features_ptr == nullptr) + { + anvil_assert(m_khr_variable_pointer_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (vulkan_memory_model_features_struct_id.is_valid() ) + { + m_khr_vulkan_memory_model_features_ptr.reset( + new KHRVulkanMemoryModelFeatures(*struct_chain_ptr->get_struct_with_id(vulkan_memory_model_features_struct_id) ) + ); + + if (m_khr_vulkan_memory_model_features_ptr == nullptr) + { + anvil_assert(m_khr_vulkan_memory_model_features_ptr != nullptr); + + result = false; + goto end; + } + } + + if (supports_vk1_1) + { + const auto protected_memory_features = *struct_chain_ptr->get_struct_with_id(protected_memory_features_struct_id); + + m_core_features_vk11_ptr.reset( + new Anvil::PhysicalDeviceFeaturesCoreVK11(protected_memory_features) + ); + + if (m_core_features_vk11_ptr == nullptr) + { + anvil_assert(m_core_features_vk11_ptr != nullptr); + + result = false; + goto end; + } + } + } + + m_core_features_vk10_ptr.reset( + new Anvil::PhysicalDeviceFeaturesCoreVK10(features_vk) + ); + + if (m_core_features_vk10_ptr == nullptr) + { + anvil_assert(m_core_features_vk10_ptr != nullptr); + + result = false; + goto end; + } + + m_features = Anvil::PhysicalDeviceFeatures(m_core_features_vk10_ptr.get (), + m_core_features_vk11_ptr.get (), + m_ext_depth_clip_enable_features_ptr.get (), + m_ext_descriptor_indexing_features_ptr.get (), + m_ext_inline_uniform_block_features_ptr.get (), + m_ext_scalar_block_layout_features_ptr.get (), + m_ext_transform_feedback_features_ptr.get (), + m_ext_memory_priority_features_ptr.get (), + m_khr_16_bit_storage_features_ptr.get (), + m_khr_8_bit_storage_features_ptr.get (), + m_khr_float16_int8_features_ptr.get (), + m_khr_multiview_features_ptr.get (), + m_khr_sampler_ycbcr_conversion_features_ptr.get(), + m_khr_shader_atomic_int64_features_ptr.get (), + m_khr_variable_pointer_features_ptr.get (), + m_khr_vulkan_memory_model_features_ptr.get () ); + } + + /* Retrieve device layers */ + result_vk = Anvil::Vulkan::vkEnumerateDeviceLayerProperties(m_physical_device, + &n_physical_device_layers, + nullptr); /* pProperties */ + + if (!is_vk_call_successful(result_vk) ) + { + anvil_assert_vk_call_succeeded(result_vk); + + result = false; + goto end; + } + + if (n_physical_device_layers > 0) + { + std::vector layer_props; + + layer_props.resize(n_physical_device_layers); + + result_vk = Anvil::Vulkan::vkEnumerateDeviceLayerProperties(m_physical_device, + &n_physical_device_layers, + &layer_props[0]); + + if (!is_vk_call_successful(result_vk) ) + { + anvil_assert(is_vk_call_successful(result_vk) ); + + result = false; + goto end; + } + + for (uint32_t n_layer = 0; + n_layer < n_physical_device_layers; + ++n_layer) + { + m_layers.push_back(Anvil::Layer(layer_props[n_layer]) ); + } + } + + /* Retrieve device properties */ + { + VkPhysicalDeviceProperties props; + + Anvil::Vulkan::vkGetPhysicalDeviceProperties(m_physical_device, + &props); + + m_core_properties_vk10_ptr.reset( + new Anvil::PhysicalDevicePropertiesCoreVK10(props) + ); + + if (m_core_properties_vk10_ptr == nullptr) + { + anvil_assert(m_core_properties_vk10_ptr != nullptr); + + result = false; + goto end; + } + } + + /* Retrieve additional device info */ + if (m_instance_ptr->get_enabled_extensions_info()->khr_get_physical_device_properties2() ) + { + Anvil::StructID conservative_rasterization_struct_id; + Anvil::StructID depth_stencil_resolve_props_struct_id; + Anvil::StructID descriptor_indexing_props_struct_id; + Anvil::StructID device_id_props_struct_id; + Anvil::StructID driver_properties_props_struct_id; + Anvil::StructID external_memory_host_props_struct_id; + const auto& gpdp2_entrypoints = m_instance_ptr->get_extension_khr_get_physical_device_properties2_entrypoints(); + Anvil::StructID inline_uniform_block_props_struct_id; + Anvil::StructID maintenance3_struct_id; + Anvil::StructID multiview_struct_id; + Anvil::StructID pci_bus_info_props_struct_id; + Anvil::StructID point_clipping_props_struct_id; + Anvil::StructID protected_memory_props_struct_id; + Anvil::StructID sample_locations_props_struct_id; + Anvil::StructID sampler_filter_minmax_props_struct_id; + Anvil::StructID shader_core_struct_id; + Anvil::StructID shader_float_controls_props_struct_id; + Anvil::StructChainUniquePtr struct_chain_ptr; + Anvil::StructChainer struct_chainer; + Anvil::StructID subgroup_props_struct_id; + Anvil::StructID transform_feedback_props_struct_id; + Anvil::StructID vertex_attribute_divisor_props_struct_id; + + /* Chain query structs */ + { + VkPhysicalDeviceProperties2KHR general_props; + + general_props.pNext = nullptr; + general_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR; + + struct_chainer.append_struct(general_props); + } + + if (m_core_properties_vk10_ptr->api_version >= VK_MAKE_VERSION(1, 1, 0)) + { + VkPhysicalDeviceProtectedMemoryProperties protected_memory_properties; + VkPhysicalDeviceSubgroupProperties subgroup_properties; + + protected_memory_properties.pNext = nullptr; + protected_memory_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES; + + subgroup_properties.pNext = nullptr; + subgroup_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES; + + protected_memory_props_struct_id = struct_chainer.append_struct(protected_memory_properties); + subgroup_props_struct_id = struct_chainer.append_struct(subgroup_properties); + } + + if (m_extension_info_ptr->get_device_extension_info()->amd_shader_core_properties() ) + { + VkPhysicalDeviceShaderCorePropertiesAMD shader_core_properties; + + shader_core_properties.pNext = nullptr; + shader_core_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD; + + shader_core_struct_id = struct_chainer.append_struct(shader_core_properties); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_conservative_rasterization() ) + { + VkPhysicalDeviceConservativeRasterizationPropertiesEXT conservative_rasterization_properties; + + conservative_rasterization_properties.pNext = nullptr; + conservative_rasterization_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT; + + conservative_rasterization_struct_id = struct_chainer.append_struct(conservative_rasterization_properties); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_descriptor_indexing() ) + { + VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_properties; + + descriptor_indexing_properties.pNext = nullptr; + descriptor_indexing_properties.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT); + + descriptor_indexing_props_struct_id = struct_chainer.append_struct(descriptor_indexing_properties); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_external_memory_host() ) + { + VkPhysicalDeviceExternalMemoryHostPropertiesEXT external_memory_host_props; + + external_memory_host_props.pNext = nullptr; + external_memory_host_props.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT); + + external_memory_host_props_struct_id = struct_chainer.append_struct(external_memory_host_props); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_inline_uniform_block() ) + { + VkPhysicalDeviceInlineUniformBlockPropertiesEXT inline_uniform_block_props; + + inline_uniform_block_props.pNext = nullptr; + inline_uniform_block_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT; + + inline_uniform_block_props_struct_id = struct_chainer.append_struct(inline_uniform_block_props); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_pci_bus_info() ) + { + VkPhysicalDevicePCIBusInfoPropertiesEXT pci_bus_info_props; + + pci_bus_info_props.pNext = nullptr; + pci_bus_info_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT; + + pci_bus_info_props_struct_id = struct_chainer.append_struct(pci_bus_info_props); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_sample_locations() ) + { + VkPhysicalDeviceSampleLocationsPropertiesEXT sample_locations_properties; + + sample_locations_properties.pNext = nullptr; + sample_locations_properties.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT); + + sample_locations_props_struct_id = struct_chainer.append_struct(sample_locations_properties); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_sampler_filter_minmax() ) + { + VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT sampler_filter_minmax_properties; + + sampler_filter_minmax_properties.pNext = nullptr; + sampler_filter_minmax_properties.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT); + + sampler_filter_minmax_props_struct_id = struct_chainer.append_struct(sampler_filter_minmax_properties); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_transform_feedback() ) + { + VkPhysicalDeviceTransformFeedbackPropertiesEXT transform_feedback_properties; + + transform_feedback_properties.pNext = nullptr; + transform_feedback_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT; + + transform_feedback_props_struct_id = struct_chainer.append_struct(transform_feedback_properties); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_vertex_attribute_divisor() ) + { + VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT vertex_attribute_divisor_properties; + + vertex_attribute_divisor_properties.pNext = nullptr; + vertex_attribute_divisor_properties.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT); + + vertex_attribute_divisor_props_struct_id = struct_chainer.append_struct(vertex_attribute_divisor_properties); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_depth_stencil_resolve() ) + { + VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_properties; + + depth_stencil_resolve_properties.pNext = nullptr; + depth_stencil_resolve_properties.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR); + + depth_stencil_resolve_props_struct_id = struct_chainer.append_struct(depth_stencil_resolve_properties); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_driver_properties() ) + { + VkPhysicalDeviceDriverPropertiesKHR driver_properties_properties; + + driver_properties_properties.pNext = nullptr; + driver_properties_properties.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR); + + driver_properties_props_struct_id = struct_chainer.append_struct(driver_properties_properties); + } + + if (m_instance_ptr->get_enabled_extensions_info()->khr_external_memory_capabilities() ) + { + VkPhysicalDeviceIDPropertiesKHR device_id_props; + + device_id_props.pNext = nullptr; + device_id_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR; + + device_id_props_struct_id = struct_chainer.append_struct(device_id_props); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_maintenance2() || + supports_vk1_1) + { + VkPhysicalDevicePointClippingPropertiesKHR point_clipping_props; + + point_clipping_props.pNext = nullptr; + point_clipping_props.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR); + + point_clipping_props_struct_id = struct_chainer.append_struct(point_clipping_props); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_maintenance3() || + supports_vk1_1) + { + VkPhysicalDeviceMaintenance3PropertiesKHR maintenance3_props; + + maintenance3_props.pNext = nullptr; + maintenance3_props.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR); + + maintenance3_struct_id = struct_chainer.append_struct(maintenance3_props); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_multiview() || + supports_vk1_1) + { + VkPhysicalDeviceMultiviewPropertiesKHR multiview_props; + + multiview_props.pNext = nullptr; + multiview_props.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR); + + multiview_struct_id = struct_chainer.append_struct(multiview_props); + } + + if (m_extension_info_ptr->get_device_extension_info()->khr_shader_float_controls() ) + { + VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props; + + float_controls_props.pNext = nullptr; + float_controls_props.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR); + + shader_float_controls_props_struct_id = struct_chainer.append_struct(float_controls_props); + } + + /* Retrieve the results */ + struct_chain_ptr = struct_chainer.create_chain(); + + gpdp2_entrypoints.vkGetPhysicalDeviceProperties2KHR(m_physical_device, + struct_chain_ptr->get_root_struct() ); + + if (m_core_properties_vk10_ptr->api_version >= VK_MAKE_VERSION(1, 1, 0)) + { + m_core_properties_vk11_ptr.reset( + new PhysicalDevicePropertiesCoreVK11(*struct_chain_ptr->get_struct_with_id(protected_memory_props_struct_id), + *struct_chain_ptr->get_struct_with_id (subgroup_props_struct_id) ) + ); + + if (m_core_properties_vk11_ptr == nullptr) + { + anvil_assert(m_core_properties_vk11_ptr != nullptr); + + result = false; + goto end; + } + } + + /* Cache the retrieved properties */ + if (conservative_rasterization_struct_id.is_valid() ) + { + m_ext_conservative_rasterization_properties_ptr.reset( + new EXTConservativeRasterizationProperties(*struct_chain_ptr->get_struct_with_id(conservative_rasterization_struct_id) ) + ); + + if (m_ext_conservative_rasterization_properties_ptr == nullptr) + { + anvil_assert(m_ext_conservative_rasterization_properties_ptr != nullptr); + + result = false; + goto end; + } + } + + if (depth_stencil_resolve_props_struct_id.is_valid() ) + { + m_khr_depth_stencil_resolve_properties_ptr.reset( + new KHRDepthStencilResolveProperties(*struct_chain_ptr->get_struct_with_id(depth_stencil_resolve_props_struct_id) ) + ); + + if (m_khr_depth_stencil_resolve_properties_ptr == nullptr) + { + anvil_assert(m_khr_depth_stencil_resolve_properties_ptr != nullptr); + + result = false; + goto end; + } + } + + if (descriptor_indexing_props_struct_id.is_valid() ) + { + m_ext_descriptor_indexing_properties_ptr.reset( + new EXTDescriptorIndexingProperties(*struct_chain_ptr->get_struct_with_id(descriptor_indexing_props_struct_id) ) + ); + + if (m_ext_descriptor_indexing_properties_ptr == nullptr) + { + anvil_assert(m_ext_descriptor_indexing_properties_ptr != nullptr); + + result = false; + goto end; + } + } + + if (driver_properties_props_struct_id.is_valid() ) + { + m_khr_driver_properties_properties_ptr.reset( + new KHRDriverPropertiesProperties(*struct_chain_ptr->get_struct_with_id(driver_properties_props_struct_id) ) + ); + + if (m_khr_driver_properties_properties_ptr == nullptr) + { + anvil_assert(m_khr_driver_properties_properties_ptr != nullptr); + + result = false; + goto end; + } + } - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_PHYSICAL_DEVICE, - this); -} + if (device_id_props_struct_id.is_valid() ) + { + m_khr_external_memory_capabilities_physical_device_id_properties_ptr.reset( + new KHRExternalMemoryCapabilitiesPhysicalDeviceIDProperties(*struct_chain_ptr->get_struct_with_id(device_id_props_struct_id) ) + ); -/* Please see header for specification */ -void Anvil::PhysicalDevice::destroy() -{ - anvil_assert(!m_destroyed); + if (m_khr_external_memory_capabilities_physical_device_id_properties_ptr == nullptr) + { + anvil_assert(m_khr_external_memory_capabilities_physical_device_id_properties_ptr != nullptr); - m_destroyed = true; + result = false; + goto end; + } + } - while (m_cached_devices.size() > 0) - { - m_cached_devices.back()->destroy(); - } + if (external_memory_host_props_struct_id.is_valid() ) + { + m_ext_external_memory_host_properties_ptr.reset( + new EXTExternalMemoryHostProperties(*struct_chain_ptr->get_struct_with_id(external_memory_host_props_struct_id) ) + ); - m_instance_ptr->unregister_physical_device(shared_from_this() ); + if (m_ext_external_memory_host_properties_ptr == nullptr) + { + anvil_assert(m_ext_external_memory_host_properties_ptr != nullptr); -} + result = false; + goto end; + } + } -/** Initializes _vulkan_physical_device descriptor with actual data by issuing a number - * of Vulkan API calls. - * - * @param physical_device_ptr Pointer to a _vulkan_physical_device instance to be initialized. - * Must not be nullptr. - **/ -void Anvil::PhysicalDevice::init() -{ - const Anvil::ExtensionKHRGetPhysicalDeviceProperties2* gpdp2_entrypoints_ptr = nullptr; - VkPhysicalDeviceMemoryProperties memory_properties; - uint32_t n_extensions = 0; - uint32_t n_physical_device_layers = 0; - uint32_t n_physical_device_queues = 0; - VkResult result = VK_ERROR_INITIALIZATION_FAILED; + if (inline_uniform_block_props_struct_id.is_valid() ) + { + m_ext_inline_uniform_block_properties_ptr.reset( + new EXTInlineUniformBlockProperties(*struct_chain_ptr->get_struct_with_id(inline_uniform_block_props_struct_id) ) + ); - ANVIL_REDUNDANT_VARIABLE(result); + if (m_ext_inline_uniform_block_properties_ptr == nullptr) + { + anvil_assert(m_ext_inline_uniform_block_properties_ptr != nullptr); - anvil_assert(m_physical_device != VK_NULL_HANDLE); + result = false; + goto end; + } + } - if (m_instance_ptr->is_instance_extension_supported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) ) - { - gpdp2_entrypoints_ptr = &m_instance_ptr->get_extension_khr_get_physical_device_properties2_entrypoints(); - } + if (maintenance3_struct_id.is_valid() ) + { + m_khr_maintenance3_properties_ptr.reset( + new KHRMaintenance3Properties(*struct_chain_ptr->get_struct_with_id(maintenance3_struct_id) ) + ); - /* Retrieve device extensions */ - result = vkEnumerateDeviceExtensionProperties(m_physical_device, - nullptr, /* pLayerName */ - &n_extensions, - nullptr); /* pProperties */ - anvil_assert_vk_call_succeeded(result); + if (m_khr_maintenance3_properties_ptr == nullptr) + { + anvil_assert(m_khr_maintenance3_properties_ptr != nullptr); - if (n_extensions > 0) - { - std::vector extension_props; + result = false; + goto end; + } + } - extension_props.resize(n_extensions); + if (multiview_struct_id.is_valid() ) + { + m_khr_multiview_properties_ptr.reset( + new KHRMultiviewProperties(*struct_chain_ptr->get_struct_with_id(multiview_struct_id) ) + ); - result = vkEnumerateDeviceExtensionProperties(m_physical_device, - nullptr, /* pLayerName */ - &n_extensions, - &extension_props[0]); - anvil_assert_vk_call_succeeded(result); + if (m_khr_multiview_properties_ptr == nullptr) + { + anvil_assert(m_khr_multiview_properties_ptr != nullptr); - for (uint32_t n_extension = 0; - n_extension < n_extensions; - ++n_extension) + result = false; + goto end; + } + } + + if (pci_bus_info_props_struct_id.is_valid() ) { - m_extensions.push_back(Anvil::Extension(extension_props[n_extension]) ); + m_ext_pci_bus_info_ptr.reset( + new EXTPCIBusInfoProperties(*struct_chain_ptr->get_struct_with_id(pci_bus_info_props_struct_id) ) + ); + + if (m_ext_pci_bus_info_ptr == nullptr) + { + anvil_assert(m_ext_pci_bus_info_ptr != nullptr); + + result = false; + goto end; + } } - } - /* Retrieve device features */ - vkGetPhysicalDeviceFeatures(m_physical_device, - &m_features); + if (point_clipping_props_struct_id.is_valid() ) + { + m_khr_maintenance2_physical_device_point_clipping_properties_ptr.reset( + new KHRMaintenance2PhysicalDevicePointClippingProperties(*struct_chain_ptr->get_struct_with_id(point_clipping_props_struct_id)) + ); - /* Retrieve device format properties */ - const bool texture_gather_bias_lod_support = is_device_extension_supported(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME); + if (m_khr_maintenance2_physical_device_point_clipping_properties_ptr == nullptr) + { + anvil_assert(m_khr_maintenance2_physical_device_point_clipping_properties_ptr != nullptr); - for (VkFormat current_format = (VkFormat)1; /* skip the _UNDEFINED format */ - current_format < VK_FORMAT_RANGE_SIZE; - current_format = static_cast(current_format + 1)) - { - VkFormatProperties format_props; + result = false; + goto end; + } + } + + if (sample_locations_props_struct_id.is_valid() ) + { + m_ext_sample_locations_properties_ptr.reset( + new EXTSampleLocationsProperties(*struct_chain_ptr->get_struct_with_id(sample_locations_props_struct_id) ) + ); - vkGetPhysicalDeviceFormatProperties(m_physical_device, - current_format, - &format_props); + if (m_ext_sample_locations_properties_ptr == nullptr) + { + anvil_assert(m_ext_sample_locations_properties_ptr != nullptr); - m_format_properties[current_format] = FormatProperties(format_props); + result = false; + goto end; + } + } - /* Can this format be used with VK_AMD_texture_gather_bias_lod? */ - if (gpdp2_entrypoints_ptr != nullptr && - texture_gather_bias_lod_support) + if (sampler_filter_minmax_props_struct_id.is_valid() ) { - VkPhysicalDeviceImageFormatInfo2KHR format_info; - VkImageFormatProperties2KHR image_format_props = {}; - VkTextureLODGatherFormatPropertiesAMD texture_lod_gather_support = {}; + m_ext_sampler_filter_minmax_properties_ptr.reset( + new EXTSamplerFilterMinmaxProperties(*struct_chain_ptr->get_struct_with_id(sampler_filter_minmax_props_struct_id) ) + ); - texture_lod_gather_support.sType = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD; - texture_lod_gather_support.pNext = nullptr; + if (m_ext_sampler_filter_minmax_properties_ptr == nullptr) + { + anvil_assert(m_ext_sampler_filter_minmax_properties_ptr != nullptr); - format_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR; - format_info.pNext = nullptr; - format_info.format = current_format; - format_info.type = VK_IMAGE_TYPE_2D; // irrelevant - format_info.tiling = VK_IMAGE_TILING_LINEAR; // irrelevant - format_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT; // irrelevant - format_info.flags = 0; // irrelevant + result = false; + goto end; + } + } - image_format_props.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR; - image_format_props.pNext = &texture_lod_gather_support; + if (shader_core_struct_id.is_valid() ) + { + m_amd_shader_core_properties_ptr.reset( + new AMDShaderCoreProperties(*struct_chain_ptr->get_struct_with_id(shader_core_struct_id) ) + ); - if (gpdp2_entrypoints_ptr->vkGetPhysicalDeviceImageFormatProperties2KHR(m_physical_device, - &format_info, - &image_format_props) == VK_SUCCESS) + if (m_amd_shader_core_properties_ptr == nullptr) { - m_format_properties.at(current_format).supports_amd_texture_gather_bias_lod = (texture_lod_gather_support.supportsTextureGatherLODBiasAMD == VK_TRUE); + anvil_assert(m_amd_shader_core_properties_ptr != nullptr); + + result = false; + goto end; } } - } - if (is_device_extension_supported ("VK_KHR_16bit_storage") && - m_instance_ptr->is_instance_extension_supported("VK_KHR_get_physical_device_properties2") ) - { - const auto& gpdp2_entrypoints = m_instance_ptr->get_extension_khr_get_physical_device_properties2_entrypoints(); + if (shader_float_controls_props_struct_id.is_valid() ) + { + m_khr_shader_float_controls_properties_ptr.reset( + new KHRShaderFloatControlsProperties(*struct_chain_ptr->get_struct_with_id(shader_float_controls_props_struct_id) ) + ); + + if (m_khr_shader_float_controls_properties_ptr == nullptr) + { + anvil_assert(m_khr_shader_float_controls_properties_ptr != nullptr); + + result = false; + goto end; + } + } - if (is_device_extension_supported(VK_KHR_16BIT_STORAGE_EXTENSION_NAME) ) + if (transform_feedback_props_struct_id.is_valid() ) { - VkPhysicalDeviceFeatures2KHR features; - VkPhysicalDevice16BitStorageFeaturesKHR storage_features; + m_ext_transform_feedback_properties_ptr.reset( + new EXTTransformFeedbackProperties(*struct_chain_ptr->get_struct_with_id(transform_feedback_props_struct_id) ) + ); + + if (m_ext_transform_feedback_properties_ptr == nullptr) + { + anvil_assert(m_ext_transform_feedback_properties_ptr != nullptr); - features.pNext = &storage_features; - features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR; + result = false; + goto end; + } + } - storage_features.pNext = nullptr; - storage_features.sType = static_cast(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR); + if (vertex_attribute_divisor_props_struct_id.is_valid() ) + { + m_ext_vertex_attribute_divisor_properties_ptr.reset( + new EXTVertexAttributeDivisorProperties(*struct_chain_ptr->get_struct_with_id(vertex_attribute_divisor_props_struct_id) ) + ); - gpdp2_entrypoints.vkGetPhysicalDeviceFeatures2KHR(m_physical_device, - &features); + if (m_ext_vertex_attribute_divisor_properties_ptr == nullptr) + { + anvil_assert(m_ext_vertex_attribute_divisor_properties_ptr != nullptr); - m_16bit_storage_features = StorageFeatures16Bit( (storage_features.storageInputOutput16 == VK_TRUE), - (storage_features.storagePushConstant16 == VK_TRUE), - (storage_features.storageBuffer16BitAccess == VK_TRUE), - (storage_features.uniformAndStorageBuffer16BitAccess == VK_TRUE)); - m_16bit_storage_features_available = true; + result = false; + goto end; + } } } - /* Retrieve device properties */ - vkGetPhysicalDeviceProperties(m_physical_device, - &m_properties); + m_properties = Anvil::PhysicalDeviceProperties(m_amd_shader_core_properties_ptr.get (), + m_core_properties_vk10_ptr.get (), + m_core_properties_vk11_ptr.get (), + m_ext_conservative_rasterization_properties_ptr.get (), + m_ext_descriptor_indexing_properties_ptr.get (), + m_ext_external_memory_host_properties_ptr.get (), + m_ext_inline_uniform_block_properties_ptr.get (), + m_ext_pci_bus_info_ptr.get (), + m_ext_sample_locations_properties_ptr.get (), + m_ext_sampler_filter_minmax_properties_ptr.get (), + m_ext_transform_feedback_properties_ptr.get (), + m_ext_vertex_attribute_divisor_properties_ptr.get (), + m_khr_depth_stencil_resolve_properties_ptr.get (), + m_khr_driver_properties_properties_ptr.get (), + m_khr_external_memory_capabilities_physical_device_id_properties_ptr.get(), + m_khr_maintenance3_properties_ptr.get (), + m_khr_maintenance2_physical_device_point_clipping_properties_ptr.get (), + m_khr_multiview_properties_ptr.get (), + m_khr_shader_float_controls_properties_ptr.get () ); /* Retrieve device queue data */ - vkGetPhysicalDeviceQueueFamilyProperties(m_physical_device, - &n_physical_device_queues, /* pCount */ - nullptr); /* pQueueFamilyProperties */ + Anvil::Vulkan::vkGetPhysicalDeviceQueueFamilyProperties(m_physical_device, + &n_physical_device_queues, /* pCount */ + nullptr); /* pQueueFamilyProperties */ if (n_physical_device_queues > 0) { @@ -221,9 +1178,9 @@ void Anvil::PhysicalDevice::init() queue_props.resize(n_physical_device_queues); - vkGetPhysicalDeviceQueueFamilyProperties(m_physical_device, - &n_physical_device_queues, - &queue_props[0]); + Anvil::Vulkan::vkGetPhysicalDeviceQueueFamilyProperties(m_physical_device, + &n_physical_device_queues, + &queue_props[0]); for (uint32_t n_physical_device_queue = 0; n_physical_device_queue < n_physical_device_queues; @@ -234,70 +1191,408 @@ void Anvil::PhysicalDevice::init() } /* Retrieve memory properties */ - vkGetPhysicalDeviceMemoryProperties(m_physical_device, - &memory_properties); + { + VkPhysicalDeviceMemoryProperties memory_properties; - m_memory_properties.init(memory_properties); + Anvil::Vulkan::vkGetPhysicalDeviceMemoryProperties(m_physical_device, + &memory_properties); - /* Retrieve device layers */ - result = vkEnumerateDeviceLayerProperties(m_physical_device, - &n_physical_device_layers, - nullptr); /* pProperties */ - anvil_assert_vk_call_succeeded(result); + m_memory_properties.init(memory_properties); + } - if (n_physical_device_layers > 0) +end: + return result; +} + +Anvil::MemoryBudget Anvil::PhysicalDevice::get_available_memory_budget() const +{ + if (!m_instance_ptr->get_enabled_extensions_info()->khr_get_physical_device_properties2() || + !m_extension_info_ptr->get_device_extension_info()->ext_memory_budget ()) { - std::vector layer_props; + anvil_assert_fail(); + } - layer_props.resize(n_physical_device_layers); + const auto& gpdp2_entrypoints = m_instance_ptr->get_extension_khr_get_physical_device_properties2_entrypoints(); + Anvil::StructID memory_budget_properties_struct_id; + VkPhysicalDeviceMemoryBudgetPropertiesEXT memory_budget_properties; + VkPhysicalDeviceMemoryProperties2KHR memory_properties2; + Anvil::StructChainer struct_chainer; + Anvil::StructChainUniquePtr struct_chain_ptr; - result = vkEnumerateDeviceLayerProperties(m_physical_device, - &n_physical_device_layers, - &layer_props[0]); + memory_properties2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2; + memory_properties2.pNext = nullptr; + + memory_budget_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT; + memory_budget_properties.pNext = nullptr; + + struct_chainer.append_struct(memory_properties2); + memory_budget_properties_struct_id = struct_chainer.append_struct(memory_budget_properties); + + struct_chain_ptr = struct_chainer.create_chain(); + + gpdp2_entrypoints.vkGetPhysicalDeviceMemoryProperties2KHR(m_physical_device, + struct_chain_ptr->get_root_struct()); + + return MemoryBudget(*struct_chain_ptr->get_struct_with_id(memory_budget_properties_struct_id)); +} + +bool Anvil::PhysicalDevice::get_buffer_properties(const Anvil::BufferPropertiesQuery& in_query, + Anvil::BufferProperties* out_opt_result_ptr) const +{ + const Anvil::ExtensionKHRExternalMemoryCapabilitiesEntrypoints* emc_entrypoints_ptr = nullptr; + VkPhysicalDeviceExternalBufferInfoKHR input_struct; + VkExternalBufferPropertiesKHR result_struct; + bool result = false; + + if (!m_instance_ptr->get_enabled_extensions_info()->khr_external_memory_capabilities() ) + { + anvil_assert(m_instance_ptr->get_enabled_extensions_info()->khr_external_memory_capabilities() ); + + goto end; + } + else + { + emc_entrypoints_ptr = &m_instance_ptr->get_extension_khr_external_memory_capabilities_entrypoints(); + } + + input_struct.flags = in_query.create_flags.get_vk(); + input_struct.handleType = static_cast(in_query.external_memory_handle_type.get_vk() ); + input_struct.pNext = nullptr; + input_struct.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR; + input_struct.usage = in_query.usage_flags.get_vk(); + + result_struct.pNext = nullptr; + result_struct.sType = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR; + + emc_entrypoints_ptr->vkGetPhysicalDeviceExternalBufferPropertiesKHR(m_physical_device, + &input_struct, + &result_struct); + + if (out_opt_result_ptr != nullptr) + { + *out_opt_result_ptr = std::move(Anvil::BufferProperties(ExternalMemoryProperties(result_struct.externalMemoryProperties) )); + } + + /* All done */ + result = true; + +end: + return result; +} + +bool Anvil::PhysicalDevice::get_fence_properties(const Anvil::FencePropertiesQuery& in_query, + Anvil::FenceProperties* out_opt_result_ptr) const +{ + const Anvil::ExtensionKHRExternalFenceCapabilitiesEntrypoints* entrypoints_ptr = nullptr; + VkPhysicalDeviceExternalFenceInfoKHR input_struct; + VkExternalFencePropertiesKHR result_struct; + bool result = false; + + if (!m_instance_ptr->get_enabled_extensions_info()->khr_external_fence_capabilities() ) + { + anvil_assert(m_instance_ptr->get_enabled_extensions_info()->khr_external_fence_capabilities() ); + + goto end; + } + else + { + entrypoints_ptr = &m_instance_ptr->get_extension_khr_external_fence_capabilities_entrypoints(); + } + + input_struct.handleType = static_cast(in_query.external_fence_handle_type); + input_struct.pNext = nullptr; + input_struct.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR; + + result_struct.pNext = nullptr; + result_struct.sType = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR; + + entrypoints_ptr->vkGetPhysicalDeviceExternalFencePropertiesKHR(m_physical_device, + &input_struct, + &result_struct); + + if (out_opt_result_ptr != nullptr) + { + *out_opt_result_ptr = std::move(Anvil::FenceProperties(ExternalFenceProperties(result_struct) )); + } + + /* All done */ + result = true; + +end: + return result; +} + +Anvil::FormatProperties Anvil::PhysicalDevice::get_format_properties(Anvil::Format in_format) const +{ + VkFormatProperties core_vk10_format_props; + + Anvil::Vulkan::vkGetPhysicalDeviceFormatProperties(m_physical_device, + static_cast(in_format), + &core_vk10_format_props); + + return Anvil::FormatProperties(core_vk10_format_props); +} + +/* Please see header for specification */ +bool Anvil::PhysicalDevice::get_image_format_properties(const ImageFormatPropertiesQuery& in_query, + Anvil::ImageFormatProperties* out_opt_result_ptr) const +{ + VkFormatProperties core_vk10_format_props; + VkImageFormatProperties core_vk10_image_format_properties; + Anvil::ExternalMemoryProperties external_handle_props; + const Anvil::ExtensionKHRGetPhysicalDeviceProperties2* gpdp2_entrypoints_ptr = nullptr; + uint32_t n_combined_image_sampler_descriptors_used = 0; + bool result = false; + bool supports_amd_texture_gather_bias_lod = false; + Anvil::ImageUsageFlags valid_stencil_image_usage_aspect_flags; + + if (m_instance_ptr->get_enabled_extensions_info()->khr_get_physical_device_properties2() ) + { + gpdp2_entrypoints_ptr = &m_instance_ptr->get_extension_khr_get_physical_device_properties2_entrypoints(); + } + + /* Retrieve core VK1.0 information first. */ + Anvil::Vulkan::vkGetPhysicalDeviceFormatProperties(m_physical_device, + static_cast(in_query.format), + &core_vk10_format_props); + + if (Anvil::Vulkan::vkGetPhysicalDeviceImageFormatProperties(m_physical_device, + static_cast (in_query.format), + static_cast (in_query.image_type), + static_cast(in_query.tiling), + in_query.usage_flags.get_vk (), + in_query.create_flags.get_vk(), + &core_vk10_image_format_properties) != VK_SUCCESS) + { + goto end; + } + + if (gpdp2_entrypoints_ptr != nullptr) + { + Anvil::StructID external_image_format_props_struct_id; + Anvil::StructID image_stencil_usage_create_info_struct_id; + Anvil::StructChainer input_struct_chainer; + auto instance_extensions_ptr = m_instance_ptr->get_enabled_extensions_info(); + Anvil::StructChainer output_struct_chainer; + Anvil::StructID sampler_ycbcr_conversion_image_format_props_struct_id; + Anvil::StructID texture_lod_gather_support_struct_id; + + ANVIL_REDUNDANT_VARIABLE(instance_extensions_ptr); - for (uint32_t n_layer = 0; - n_layer < n_physical_device_layers; - ++n_layer) { - m_layers.push_back(Anvil::Layer(layer_props[n_layer]) ); + VkPhysicalDeviceImageFormatInfo2KHR format_info; + + format_info.flags = in_query.create_flags.get_vk(); + format_info.format = static_cast(in_query.format); + format_info.pNext = nullptr; + format_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR; + format_info.tiling = static_cast(in_query.tiling); + format_info.type = static_cast (in_query.image_type); + format_info.usage = in_query.usage_flags.get_vk(); + + input_struct_chainer.append_struct(format_info); + } + + { + VkImageFormatProperties2KHR image_format_props = {}; + + image_format_props.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR; + image_format_props.pNext = nullptr; + + output_struct_chainer.append_struct(image_format_props); + } + + if (m_extension_info_ptr->get_device_extension_info()->amd_texture_gather_bias_lod() ) + { + VkTextureLODGatherFormatPropertiesAMD texture_lod_gather_support = {}; + + texture_lod_gather_support.sType = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD; + texture_lod_gather_support.pNext = nullptr; + + texture_lod_gather_support_struct_id = output_struct_chainer.append_struct(texture_lod_gather_support); + } + + if (m_extension_info_ptr->get_device_extension_info()->ext_separate_stencil_usage() && in_query.stencil_usage_flags != Anvil::ImageUsageFlagBits::NONE ) + { + VkImageStencilUsageCreateInfoEXT create_info; + + create_info.pNext = nullptr; + create_info.stencilUsage = in_query.stencil_usage_flags.get_vk(); + create_info.sType = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT; + + image_stencil_usage_create_info_struct_id = input_struct_chainer.append_struct(create_info); + } + + if (in_query.external_memory_handle_type != Anvil::ExternalMemoryHandleTypeFlagBits::NONE) + { + anvil_assert(instance_extensions_ptr->khr_external_memory_capabilities() ); + + VkPhysicalDeviceExternalImageFormatInfoKHR external_image_format_info; + VkExternalImageFormatPropertiesKHR external_image_format_props; + + external_image_format_info.handleType = static_cast(in_query.external_memory_handle_type); + external_image_format_info.pNext = nullptr; + external_image_format_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR; + + external_image_format_props.pNext = nullptr; + external_image_format_props.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR; + + input_struct_chainer.append_struct(external_image_format_info); + + external_image_format_props_struct_id = output_struct_chainer.append_struct(external_image_format_props); } + + if (m_extension_info_ptr->get_device_extension_info()->khr_sampler_ycbcr_conversion() && + Anvil::Formats::is_format_yuv_khr (in_query.format) ) + { + VkSamplerYcbcrConversionImageFormatPropertiesKHR image_format_properties; + + image_format_properties.combinedImageSamplerDescriptorCount = 0; + image_format_properties.pNext = nullptr; + image_format_properties.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR; + + sampler_ycbcr_conversion_image_format_props_struct_id = output_struct_chainer.append_struct(image_format_properties); + } + + auto input_struct_chain_ptr = input_struct_chainer.create_chain (); + auto output_struct_chain_ptr = output_struct_chainer.create_chain(); + + if (gpdp2_entrypoints_ptr->vkGetPhysicalDeviceImageFormatProperties2KHR(m_physical_device, + input_struct_chain_ptr->get_root_struct (), + output_struct_chain_ptr->get_root_struct() ) == VK_SUCCESS) + { + if (m_extension_info_ptr->get_device_extension_info()->amd_texture_gather_bias_lod() ) + { + /* Can this format be used with AMD_texture_lod_gather_bias? */ + auto texture_lod_gather_support_ptr = output_struct_chain_ptr->get_struct_with_id(texture_lod_gather_support_struct_id); + + supports_amd_texture_gather_bias_lod = (texture_lod_gather_support_ptr->supportsTextureGatherLODBiasAMD == VK_TRUE); + } + + if (in_query.external_memory_handle_type != Anvil::ExternalMemoryHandleTypeFlagBits::NONE) + { + auto image_format_props_ptr = output_struct_chain_ptr->get_struct_with_id(external_image_format_props_struct_id); + + external_handle_props = Anvil::ExternalMemoryProperties(image_format_props_ptr->externalMemoryProperties); + } + + if (image_stencil_usage_create_info_struct_id.is_valid() ) + { + auto stencil_usage_create_info_ptr = reinterpret_cast(input_struct_chain_ptr->get_struct_with_id(image_stencil_usage_create_info_struct_id) ); + + valid_stencil_image_usage_aspect_flags = static_cast(stencil_usage_create_info_ptr->stencilUsage); + } + + if (sampler_ycbcr_conversion_image_format_props_struct_id.is_valid() ) + { + auto image_format_props_ptr = reinterpret_cast(input_struct_chain_ptr->get_struct_with_id(sampler_ycbcr_conversion_image_format_props_struct_id) ); + + n_combined_image_sampler_descriptors_used = image_format_props_ptr->combinedImageSamplerDescriptorCount; + } + } + else + { + goto end; + } + } + + if (out_opt_result_ptr != nullptr) + { + *out_opt_result_ptr = Anvil::ImageFormatProperties(core_vk10_image_format_properties, + supports_amd_texture_gather_bias_lod, + external_handle_props, + valid_stencil_image_usage_aspect_flags, + n_combined_image_sampler_descriptors_used); + } + + result = true; +end: + return result; +} + +/* Please see header for specification */ +bool Anvil::PhysicalDevice::get_semaphore_properties(const Anvil::SemaphorePropertiesQuery& in_query, + Anvil::SemaphoreProperties* out_opt_result_ptr) const +{ + const Anvil::ExtensionKHRExternalSemaphoreCapabilitiesEntrypoints* entrypoints_ptr = nullptr; + Anvil::StructChainer struct_chainer; + VkExternalSemaphorePropertiesKHR result_struct; + bool result = false; + + if (!m_instance_ptr->get_enabled_extensions_info()->khr_external_semaphore_capabilities() ) + { + anvil_assert(m_instance_ptr->get_enabled_extensions_info()->khr_external_semaphore_capabilities() ); + + goto end; + } + else + { + entrypoints_ptr = &m_instance_ptr->get_extension_khr_external_semaphore_capabilities_entrypoints(); + } + + { + VkPhysicalDeviceExternalSemaphoreInfoKHR root_struct; + + root_struct.handleType = static_cast(in_query.external_semaphore_handle_type); + root_struct.pNext = nullptr; + root_struct.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR; + + struct_chainer.append_struct(root_struct); + } + + result_struct.pNext = nullptr; + result_struct.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR; + + entrypoints_ptr->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(m_physical_device, + struct_chainer.create_chain()->get_root_struct(), + &result_struct); + + if (out_opt_result_ptr != nullptr) + { + *out_opt_result_ptr = std::move(Anvil::SemaphoreProperties(ExternalSemaphoreProperties(result_struct) )); } + + /* All done */ + result = true; + +end: + return result; } /* Please see header for specification */ -bool Anvil::PhysicalDevice::get_sparse_image_format_properties(VkFormat in_format, - VkImageType in_type, - VkSampleCountFlagBits in_sample_count, - VkImageUsageFlags in_usage, - VkImageTiling in_tiling, - std::vector& out_result) const +bool Anvil::PhysicalDevice::get_sparse_image_format_properties(Anvil::Format in_format, + Anvil::ImageType in_type, + Anvil::SampleCountFlagBits in_sample_count, + Anvil::ImageUsageFlags in_usage, + Anvil::ImageTiling in_tiling, + std::vector& out_result) const { /* TODO: It might be a good idea to cache the retrieved properties */ uint32_t n_properties = 0; out_result.clear(); - vkGetPhysicalDeviceSparseImageFormatProperties(m_physical_device, - in_format, - in_type, - in_sample_count, - in_usage, - in_tiling, - &n_properties, - nullptr); /* pProperties */ + Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties(m_physical_device, + static_cast (in_format), + static_cast(in_type), + static_cast(in_sample_count), + in_usage.get_vk(), + static_cast(in_tiling), + &n_properties, + nullptr); /* pProperties */ if (n_properties > 0) { out_result.resize(n_properties); - vkGetPhysicalDeviceSparseImageFormatProperties(m_physical_device, - in_format, - in_type, - in_sample_count, - in_usage, - in_tiling, - &n_properties, - &out_result[0]); + Anvil::Vulkan::vkGetPhysicalDeviceSparseImageFormatProperties(m_physical_device, + static_cast (in_format), + static_cast (in_type), + static_cast(in_sample_count), + in_usage.get_vk(), + static_cast(in_tiling), + &n_properties, + reinterpret_cast(&out_result[0]) ); } return true; @@ -306,9 +1601,16 @@ bool Anvil::PhysicalDevice::get_sparse_image_format_properties(VkFormat /* Please see header for specification */ bool Anvil::PhysicalDevice::is_device_extension_supported(const std::string& in_extension_name) const { - return std::find(m_extensions.begin(), - m_extensions.end(), - in_extension_name) != m_extensions.end(); + anvil_assert(m_extension_info_ptr != nullptr); + + auto isSupported = false; + try + { + isSupported = m_extension_info_ptr->get_device_extension_info()->by_name(in_extension_name); + } + catch(const std::out_of_range&) + {} + return isSupported; } /* Please see header for specification */ @@ -319,32 +1621,49 @@ bool Anvil::PhysicalDevice::is_layer_supported(const std::string& in_layer_name) in_layer_name) != m_layers.end(); } -/* Please see header for specification */ -void Anvil::PhysicalDevice::register_device(std::shared_ptr in_device_ptr) +/* Adjusts the device group index for this physical device. + * + * @param new_device_group_index New device group index to assign. Must not be 0. + */ +void Anvil::PhysicalDevice::set_device_group_index(uint32_t in_new_device_group_index) { - auto device_iterator = std::find(m_cached_devices.begin(), - m_cached_devices.end(), - in_device_ptr); + m_device_group_index = in_new_device_group_index; +} - anvil_assert(device_iterator == m_cached_devices.end() ); +/** TODO */ +void Anvil::PhysicalDevice::set_device_group_device_index(uint32_t in_new_device_group_device_index) +{ + m_device_group_device_index = in_new_device_group_device_index; +} - if (device_iterator == m_cached_devices.end() ) - { - m_cached_devices.push_back(in_device_ptr); - } +/* Please see header for specification */ +bool Anvil::PhysicalDevice::supports_core_vk1_1() const +{ + return supports_core_vk1_1(get_device_properties().core_vk1_0_properties_ptr->api_version); } /* Please see header for specification */ -void Anvil::PhysicalDevice::unregister_device(std::shared_ptr in_device_ptr) +bool Anvil::PhysicalDevice::supports_core_vk1_1(const uint32_t& in_api_version) const { - auto device_iterator = std::find(m_cached_devices.begin(), - m_cached_devices.end(), - in_device_ptr); + auto vk_major_version = VK_VERSION_MAJOR(in_api_version); + auto vk_minor_version = VK_VERSION_MINOR(in_api_version); + + /* Make sure to clamp the version in case API version used to create the parent instance is lower! */ + const auto instance_api_version = m_instance_ptr->get_api_version(); + uint32_t instance_major_version = 0; + uint32_t instance_minor_version = 0; - anvil_assert(device_iterator != m_cached_devices.end() ); + Anvil::Utils::get_version_chunks_for_api_version(instance_api_version, + &instance_major_version, + &instance_minor_version); - if (device_iterator != m_cached_devices.end() ) + if ( instance_major_version < vk_major_version || + (instance_major_version == vk_major_version && instance_minor_version < vk_minor_version)) { - m_cached_devices.erase(device_iterator); + vk_major_version = instance_major_version; + vk_minor_version = instance_minor_version; } -} + + return (vk_major_version >= 2) || + (vk_major_version == 1 && vk_minor_version >= 1); +} \ No newline at end of file diff --git a/src/wrappers/pipeline_cache.cpp b/src/wrappers/pipeline_cache.cpp index dd541f63..c5572713 100644 --- a/src/wrappers/pipeline_cache.cpp +++ b/src/wrappers/pipeline_cache.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -27,17 +27,18 @@ /** Please see header for specification */ -Anvil::PipelineCache::PipelineCache(std::weak_ptr in_device_ptr, - size_t in_initial_data_size, - const void* in_initial_data) +Anvil::PipelineCache::PipelineCache(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + size_t in_initial_data_size, + const void* in_initial_data) :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT), + Anvil::ObjectType::PIPELINE_CACHE), + MTSafetySupportProvider (in_mt_safe), m_device_ptr (in_device_ptr), m_pipeline_cache (VK_NULL_HANDLE) { - VkPipelineCacheCreateInfo cache_create_info; - std::shared_ptr device_locked_ptr(in_device_ptr); - VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); + VkPipelineCacheCreateInfo cache_create_info; + VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); ANVIL_REDUNDANT_VARIABLE(result_vk); @@ -47,10 +48,10 @@ Anvil::PipelineCache::PipelineCache(std::weak_ptr in_device_p cache_create_info.pNext = nullptr; cache_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO; - result_vk = vkCreatePipelineCache(device_locked_ptr->get_device_vk(), - &cache_create_info, - nullptr, /* pAllocator */ - &m_pipeline_cache); + result_vk = Anvil::Vulkan::vkCreatePipelineCache(m_device_ptr->get_device_vk(), + &cache_create_info, + nullptr, /* pAllocator */ + &m_pipeline_cache); anvil_assert_vk_call_succeeded(result_vk); if (is_vk_call_successful(result_vk) ) @@ -61,7 +62,7 @@ Anvil::PipelineCache::PipelineCache(std::weak_ptr in_device_p anvil_assert(m_pipeline_cache != VK_NULL_HANDLE); /* Register the instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_PIPELINE_CACHE, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::PIPELINE_CACHE, this); } @@ -69,30 +70,35 @@ Anvil::PipelineCache::PipelineCache(std::weak_ptr in_device_p Anvil::PipelineCache::~PipelineCache() { /* Unregister the instance */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_PIPELINE_CACHE, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::PIPELINE_CACHE, this); if (m_pipeline_cache != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyPipelineCache(device_locked_ptr->get_device_vk(), - m_pipeline_cache, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyPipelineCache(m_device_ptr->get_device_vk(), + m_pipeline_cache, + nullptr /* pAllocator */); + } + unlock(); m_pipeline_cache = VK_NULL_HANDLE; } } /** Please see header for specification */ -std::shared_ptr Anvil::PipelineCache::create(std::weak_ptr in_device_ptr, - size_t in_initial_data_size, - const void* in_initial_data) +Anvil::PipelineCacheUniquePtr Anvil::PipelineCache::create(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe, + size_t in_initial_data_size, + const void* in_initial_data) { - std::shared_ptr result_ptr; + PipelineCacheUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::PipelineCache(in_device_ptr, + in_mt_safe, in_initial_data_size, in_initial_data) ); @@ -101,32 +107,29 @@ std::shared_ptr Anvil::PipelineCache::create(std::weak_ptr } /** Please see header for specification */ -bool Anvil::PipelineCache::get_data(size_t* out_n_data_bytes_ptr, - const void** out_data_ptr) +bool Anvil::PipelineCache::get_data(size_t* out_n_data_bytes_ptr, + void* out_data_ptr) { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkResult result_vk; - - result_vk = vkGetPipelineCacheData(device_locked_ptr->get_device_vk(), - m_pipeline_cache, - out_n_data_bytes_ptr, - out_data_ptr); + VkResult result_vk; - anvil_assert(result_vk); + result_vk = Anvil::Vulkan::vkGetPipelineCacheData(m_device_ptr->get_device_vk(), + m_pipeline_cache, + out_n_data_bytes_ptr, + out_data_ptr); return is_vk_call_successful(result_vk); } /** Please see header for specification */ -bool Anvil::PipelineCache::merge(uint32_t in_n_pipeline_caches, - std::shared_ptr const* in_src_cache_ptrs) +bool Anvil::PipelineCache::merge(uint32_t in_n_pipeline_caches, + const Anvil::PipelineCache* const* in_src_cache_ptrs) { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkResult result_vk; - VkPipelineCache src_pipeline_caches[64]; + VkResult result_vk; + std::vector src_pipeline_caches(in_n_pipeline_caches); anvil_assert(in_n_pipeline_caches < sizeof(src_pipeline_caches) / sizeof(src_pipeline_caches[0]) ); + for (uint32_t n_pipeline_cache = 0; n_pipeline_cache < in_n_pipeline_caches; ++n_pipeline_cache) @@ -134,10 +137,14 @@ bool Anvil::PipelineCache::merge(uint32_t src_pipeline_caches[n_pipeline_cache] = in_src_cache_ptrs[n_pipeline_cache]->get_pipeline_cache(); } - result_vk = vkMergePipelineCaches(device_locked_ptr->get_device_vk(), - m_pipeline_cache, - in_n_pipeline_caches, - src_pipeline_caches); + lock(); + { + result_vk = Anvil::Vulkan::vkMergePipelineCaches(m_device_ptr->get_device_vk(), + m_pipeline_cache, + in_n_pipeline_caches, + &src_pipeline_caches.at(0) ); + } + unlock(); anvil_assert(result_vk); diff --git a/src/wrappers/pipeline_layout.cpp b/src/wrappers/pipeline_layout.cpp index 12673304..743f7582 100644 --- a/src/wrappers/pipeline_layout.cpp +++ b/src/wrappers/pipeline_layout.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -24,155 +24,103 @@ #include "misc/object_tracker.h" #include "wrappers/descriptor_set_group.h" #include "wrappers/descriptor_set_layout.h" +#include "wrappers/descriptor_set_layout_manager.h" #include "wrappers/device.h" #include "wrappers/pipeline_layout.h" #include "wrappers/pipeline_layout_manager.h" /** Please see header for specification */ -Anvil::PipelineLayout::PipelineLayout(std::weak_ptr in_device_ptr) +Anvil::PipelineLayout::PipelineLayout(const Anvil::BaseDevice* in_device_ptr, + const Anvil::PushConstantRanges& in_push_constant_ranges, + bool in_mt_safe) :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT) + Anvil::ObjectType::PIPELINE_LAYOUT), + MTSafetySupportProvider (in_mt_safe) { - - m_device_ptr = in_device_ptr; - m_dirty = true; - m_is_immutable = false; - m_layout_vk = VK_NULL_HANDLE; - - m_id = in_device_ptr.lock()->get_pipeline_layout_manager()->reserve_pipeline_layout_id(); - - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_PIPELINE_LAYOUT, - this); -} - -/** Please see header for specification */ -Anvil::PipelineLayout::PipelineLayout(std::weak_ptr in_device_ptr, - std::shared_ptr in_dsg_ptr, - const Anvil::PushConstantRanges& in_push_constant_ranges, - bool in_is_immutable) - :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT) -{ - m_device_ptr = in_device_ptr; - m_dirty = true; - m_is_immutable = in_is_immutable; - m_layout_vk = VK_NULL_HANDLE; - - m_id = in_device_ptr.lock()->get_pipeline_layout_manager()->reserve_pipeline_layout_id(); - - m_dsg_ptr = in_dsg_ptr; + m_device_ptr = in_device_ptr; + m_layout_vk = VK_NULL_HANDLE; m_push_constant_ranges = in_push_constant_ranges; - - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_PIPELINE_LAYOUT, - this); } /** Please see header for specification */ Anvil::PipelineLayout::~PipelineLayout() { - m_dsg_ptr = nullptr; - - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_PIPELINE_LAYOUT, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::PIPELINE_LAYOUT, this); if (m_layout_vk != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyPipelineLayout(device_locked_ptr->get_device_vk(), - m_layout_vk, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyPipelineLayout(m_device_ptr->get_device_vk(), + m_layout_vk, + nullptr /* pAllocator */); + } + unlock(); m_layout_vk = VK_NULL_HANDLE; } } /** Please see header for specification */ -bool Anvil::PipelineLayout::attach_push_constant_range(uint32_t in_offset, - uint32_t in_size, - VkShaderStageFlags in_stages) +bool Anvil::PipelineLayout::bake(const std::vector* in_ds_create_info_items_ptr) { - bool result = false; - - if (m_is_immutable) - { - anvil_assert(!m_is_immutable); - - goto end; - } - - /* Attach the specified push constant range */ - m_dirty = true; - m_push_constant_ranges.push_back(PushConstantRange(in_offset, - in_size, - in_stages) ); - - result = true; -end: - return result; -} - -/** Please see header for specification */ -bool Anvil::PipelineLayout::bake() -{ - std::shared_ptr device_locked_ptr(m_device_ptr); + auto ds_layout_manager_ptr = m_device_ptr->get_descriptor_set_layout_manager(); std::vector ds_layouts_vk; VkPipelineLayoutCreateInfo pipeline_layout_create_info; std::vector push_constant_ranges_vk; VkResult result_vk; /* Convert descriptor set layouts to Vulkan equivalents */ - const VkDescriptorSetLayout dummy_ds_layout = device_locked_ptr->get_dummy_descriptor_set_layout()->get_layout(); - uint32_t max_ds_binding_index = 0; + const VkDescriptorSetLayout dummy_ds_layout = m_device_ptr->get_dummy_descriptor_set_layout()->get_layout(); - if (!m_dirty) - { - result_vk = VK_SUCCESS; - - goto end; - } - - if (m_layout_vk != VK_NULL_HANDLE) - { - vkDestroyPipelineLayout(device_locked_ptr->get_device_vk(), - m_layout_vk, - nullptr /* pAllocator */); - - m_layout_vk = VK_NULL_HANDLE; - set_vk_handle(VK_NULL_HANDLE); - } + ds_layouts_vk.resize(in_ds_create_info_items_ptr->size(), + dummy_ds_layout); - if (m_dsg_ptr != nullptr) + if (in_ds_create_info_items_ptr != nullptr && + in_ds_create_info_items_ptr->size() > 0) { - const uint32_t n_dses = m_dsg_ptr->get_n_of_descriptor_sets(); + const uint32_t n_current_descriptor_sets = static_cast(in_ds_create_info_items_ptr->size() ); - for (uint32_t n_ds = 0; - n_ds < n_dses; - ++n_ds) + for (uint32_t n_current_descriptor_set = 0; + n_current_descriptor_set < n_current_descriptor_sets; + ++n_current_descriptor_set) { - const uint32_t current_binding_index = m_dsg_ptr->get_descriptor_set_binding_index(n_ds); + auto& current_ds_create_info_ptr = in_ds_create_info_items_ptr->at(n_current_descriptor_set); - if (current_binding_index > max_ds_binding_index) + if (current_ds_create_info_ptr == nullptr) { - max_ds_binding_index = current_binding_index; + m_ds_create_info_ptrs.push_back( + Anvil::DescriptorSetCreateInfoUniquePtr() + ); + + continue; } - } - } + else + { + Anvil::DescriptorSetLayoutUniquePtr new_ds_layout_ptr; - ds_layouts_vk.resize(max_ds_binding_index + 1, - dummy_ds_layout); + m_ds_create_info_ptrs.push_back( + Anvil::DescriptorSetCreateInfoUniquePtr( + new Anvil::DescriptorSetCreateInfo(*current_ds_create_info_ptr) + ) + ); - if (m_dsg_ptr != nullptr) - { - const uint32_t n_current_dsg_sets = m_dsg_ptr->get_n_of_descriptor_sets(); + if (!ds_layout_manager_ptr->get_layout(current_ds_create_info_ptr.get(), + &new_ds_layout_ptr) ) + { + anvil_assert_fail(); - for (uint32_t n_current_dsg_set = 0; - n_current_dsg_set < n_current_dsg_sets; - ++n_current_dsg_set) - { - uint32_t ds_binding_index = m_dsg_ptr->get_descriptor_set_binding_index(n_current_dsg_set); + result_vk = VK_ERROR_INITIALIZATION_FAILED; + goto end; + } + + ds_layouts_vk.at(n_current_descriptor_set) = new_ds_layout_ptr->get_layout(); - ds_layouts_vk[ds_binding_index] = m_dsg_ptr->get_descriptor_set_layout(ds_binding_index)->get_layout(); + m_ds_layout_ptrs.push_back( + std::move(new_ds_layout_ptr) + ); + } } } @@ -187,7 +135,7 @@ bool Anvil::PipelineLayout::bake() new_push_constant_range.offset = push_constant_range_iterator->offset; new_push_constant_range.size = push_constant_range_iterator->size; - new_push_constant_range.stageFlags = push_constant_range_iterator->stages; + new_push_constant_range.stageFlags = push_constant_range_iterator->stages.get_vk(); push_constant_ranges_vk.push_back(new_push_constant_range); } @@ -196,24 +144,22 @@ bool Anvil::PipelineLayout::bake() pipeline_layout_create_info.flags = 0; pipeline_layout_create_info.setLayoutCount = static_cast(ds_layouts_vk.size() ); pipeline_layout_create_info.pNext = nullptr; - pipeline_layout_create_info.pPushConstantRanges = (push_constant_ranges_vk.size() > 0) ? &push_constant_ranges_vk[0] + pipeline_layout_create_info.pPushConstantRanges = (push_constant_ranges_vk.size() > 0) ? &push_constant_ranges_vk.at(0) : nullptr; - pipeline_layout_create_info.pSetLayouts = (ds_layouts_vk.size() > 0) ? &ds_layouts_vk[0] + pipeline_layout_create_info.pSetLayouts = (ds_layouts_vk.size() > 0) ? &ds_layouts_vk.at(0) : nullptr; - pipeline_layout_create_info.pushConstantRangeCount = (uint32_t) m_push_constant_ranges.size(); + pipeline_layout_create_info.pushConstantRangeCount = static_cast(m_push_constant_ranges.size() ); pipeline_layout_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; - result_vk = vkCreatePipelineLayout(device_locked_ptr->get_device_vk(), - &pipeline_layout_create_info, - nullptr, /* pAllocator */ - &m_layout_vk); + result_vk = Anvil::Vulkan::vkCreatePipelineLayout(m_device_ptr->get_device_vk(), + &pipeline_layout_create_info, + nullptr, /* pAllocator */ + &m_layout_vk); anvil_assert_vk_call_succeeded(result_vk); if (is_vk_call_successful(result_vk)) { set_vk_handle(m_layout_vk); - - m_dirty = false; } end: @@ -221,52 +167,33 @@ bool Anvil::PipelineLayout::bake() } /* Please see header for specification */ -std::shared_ptr Anvil::PipelineLayout::create(std::weak_ptr in_device_ptr) +Anvil::PipelineLayoutUniquePtr Anvil::PipelineLayout::create(const Anvil::BaseDevice* in_device_ptr, + const std::vector* in_ds_create_info_items_ptr, + const PushConstantRanges& in_push_constant_ranges, + bool in_mt_safe) { - std::shared_ptr result_ptr; - - result_ptr.reset( - new Anvil::PipelineLayout(in_device_ptr) - ); - - return result_ptr; -} - -/* Please see header for specification */ -std::shared_ptr Anvil::PipelineLayout::create(std::weak_ptr in_device_ptr, - std::shared_ptr in_dsg_ptr, - const PushConstantRanges& in_push_constant_ranges, - bool in_is_immutable) -{ - std::shared_ptr result_ptr; + PipelineLayoutUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::PipelineLayout(in_device_ptr, - in_dsg_ptr, in_push_constant_ranges, - in_is_immutable) + in_mt_safe) ); - return result_ptr; -} - -/** Please see header for specification */ -bool Anvil::PipelineLayout::set_dsg(std::shared_ptr in_dsg_ptr) -{ - bool result = false; - - if (m_is_immutable) + if (result_ptr != nullptr) { - anvil_assert(!m_is_immutable); - - goto end; + if (!result_ptr->bake(in_ds_create_info_items_ptr) ) + { + result_ptr.reset(); + } + else + { + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::PIPELINE_LAYOUT, + result_ptr.get() ); + } } - /* Attach the specified DSG */ - m_dirty = true; - m_dsg_ptr = in_dsg_ptr; - - result = true; -end: - return result; + return result_ptr; } + diff --git a/src/wrappers/pipeline_layout_manager.cpp b/src/wrappers/pipeline_layout_manager.cpp index 2c7d2359..54c778e2 100644 --- a/src/wrappers/pipeline_layout_manager.cpp +++ b/src/wrappers/pipeline_layout_manager.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -29,171 +29,186 @@ /** Constructor. */ -Anvil::PipelineLayoutManager::PipelineLayoutManager(std::weak_ptr in_device_ptr) - :m_device_ptr (in_device_ptr), - m_pipeline_layouts_created(0) +Anvil::PipelineLayoutManager::PipelineLayoutManager(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe) + :MTSafetySupportProvider(in_mt_safe), + m_device_ptr (in_device_ptr) { - update_subscriptions(true); - /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_PIPELINE_LAYOUT_MANAGER, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::ANVIL_PIPELINE_LAYOUT_MANAGER, this); } /** Destructor */ Anvil::PipelineLayoutManager::~PipelineLayoutManager() { - update_subscriptions(false); + /* If this assertion check explodes, your app has not released all pipelines it has created. */ + anvil_assert(m_pipeline_layouts.size() == 0); /* Unregister the object */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_PIPELINE_LAYOUT_MANAGER, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::ANVIL_PIPELINE_LAYOUT_MANAGER, this); } /* Please see header for specification */ -std::shared_ptr Anvil::PipelineLayoutManager::create(std::weak_ptr in_device_ptr) +Anvil::PipelineLayoutManagerUniquePtr Anvil::PipelineLayoutManager::create(const Anvil::BaseDevice* in_device_ptr, + bool in_mt_safe) { - std::shared_ptr device_locked_ptr(in_device_ptr); - std::shared_ptr result_ptr; + PipelineLayoutManagerUniquePtr result_ptr(nullptr, + std::default_delete() ); - result_ptr.reset(new Anvil::PipelineLayoutManager(in_device_ptr) ); - anvil_assert(result_ptr != nullptr); + result_ptr.reset( + new Anvil::PipelineLayoutManager(in_device_ptr, + in_mt_safe) + ); + anvil_assert(result_ptr != nullptr); return result_ptr; } /* Please see header for specification */ -bool Anvil::PipelineLayoutManager::get_layout(std::shared_ptr in_dsg_ptr, - const PushConstantRanges& in_push_constant_ranges, - std::shared_ptr* out_pipeline_layout_ptr_ptr) +bool Anvil::PipelineLayoutManager::get_layout(const std::vector* in_ds_create_info_items_ptr, + const PushConstantRanges& in_push_constant_ranges, + Anvil::PipelineLayoutUniquePtr* out_pipeline_layout_ptr_ptr) { - bool result = false; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); + const uint32_t n_descriptor_sets_in_in_dsg = static_cast(in_ds_create_info_items_ptr->size() ); + bool result = false; + Anvil::PipelineLayout* result_pipeline_layout_ptr = nullptr; + + if (mutex_ptr != nullptr) + { + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); + } for (auto layout_iterator = m_pipeline_layouts.begin(); layout_iterator != m_pipeline_layouts.end(); ++layout_iterator) { - if (layout_iterator->second.expired() ) + auto& current_pipeline_layout_container_ptr = *layout_iterator; + auto& current_pipeline_layout_ptr = current_pipeline_layout_container_ptr->pipeline_layout_ptr; + auto current_pipeline_ds_create_info_ptrs = current_pipeline_layout_ptr->get_ds_create_info_ptrs(); + bool dss_match = true; + const auto n_descriptor_sets_in_current_pipeline_dsg = static_cast(current_pipeline_ds_create_info_ptrs->size() ); + + if (n_descriptor_sets_in_current_pipeline_dsg != n_descriptor_sets_in_in_dsg) { continue; } - std::shared_ptr current_pipeline_layout_ptr = layout_iterator->second.lock(); - - if (current_pipeline_layout_ptr->get_attached_dsg() == in_dsg_ptr && - current_pipeline_layout_ptr->get_attached_push_constant_ranges() == in_push_constant_ranges) + if (current_pipeline_layout_ptr->get_attached_push_constant_ranges() != in_push_constant_ranges) { - *out_pipeline_layout_ptr_ptr = current_pipeline_layout_ptr; - result = true; - - break; + continue; } - } - - if (!result) - { - result = true; - /* Try to create a new layout for the specified DSG + push constant range set */ - std::shared_ptr new_layout_ptr = Anvil::PipelineLayout::create(m_device_ptr); - - result = new_layout_ptr->set_dsg(in_dsg_ptr); - - if (!result) + for (uint32_t n_ds = 0; + n_ds < n_descriptor_sets_in_in_dsg && dss_match; + ++n_ds) { - goto end; - } + auto& in_dsg_ds_create_info_ptr = in_ds_create_info_items_ptr->at (n_ds); + const auto& current_pipeline_dsg_ds_create_info_ptr = current_pipeline_ds_create_info_ptrs->at(n_ds); - for (auto push_constant_range_iterator = in_push_constant_ranges.begin(); - push_constant_range_iterator != in_push_constant_ranges.end(); - ++push_constant_range_iterator) - { - result = new_layout_ptr->attach_push_constant_range((*push_constant_range_iterator).offset, - (*push_constant_range_iterator).size, - (*push_constant_range_iterator).stages); + if ((in_dsg_ds_create_info_ptr != nullptr && current_pipeline_dsg_ds_create_info_ptr == nullptr) || + (in_dsg_ds_create_info_ptr == nullptr && current_pipeline_dsg_ds_create_info_ptr != nullptr) ) + { + dss_match = false; + + break; + } - if (!result) + if (in_dsg_ds_create_info_ptr != nullptr && + current_pipeline_dsg_ds_create_info_ptr != nullptr) { - goto end; + if (!(*in_dsg_ds_create_info_ptr == *current_pipeline_dsg_ds_create_info_ptr) ) + { + dss_match = false; + + break; + } } } - if (result) + if (!dss_match) { - const PipelineLayoutID pipeline_layout_id = new_layout_ptr->get_id(); + continue; + } - anvil_assert(m_pipeline_layouts.find(pipeline_layout_id) == m_pipeline_layouts.end() ); + result = true; + result_pipeline_layout_ptr = current_pipeline_layout_container_ptr->pipeline_layout_ptr.get(); - new_layout_ptr->bake(); + current_pipeline_layout_container_ptr->n_references.fetch_add(1); - m_pipeline_layouts[pipeline_layout_id] = new_layout_ptr; - *out_pipeline_layout_ptr_ptr = new_layout_ptr; - } + break; } -end: - return result; -} -/* Please see header for specification */ -std::shared_ptr Anvil::PipelineLayoutManager::get_layout_by_id(Anvil::PipelineLayoutID in_id) const -{ - if (m_pipeline_layouts.find(in_id) == m_pipeline_layouts.end() || - m_pipeline_layouts.at (in_id).expired() ) - { - return std::shared_ptr(); - } - else + if (!result) { - return m_pipeline_layouts.at(in_id).lock(); + auto new_layout_ptr = Anvil::PipelineLayout::create(m_device_ptr, + in_ds_create_info_items_ptr, + in_push_constant_ranges, + is_mt_safe() ); + auto new_layout_container_ptr = std::unique_ptr( + new PipelineLayoutContainer() + ); + + result = true; + result_pipeline_layout_ptr = new_layout_ptr.get(); + new_layout_container_ptr->pipeline_layout_ptr = std::move(new_layout_ptr); + + m_pipeline_layouts.push_back( + std::move(new_layout_container_ptr) + ); } -} - -/** Called back whenever a pipeline layout is released **/ -void Anvil::PipelineLayoutManager::on_pipeline_layout_dropped() -{ - PipelineLayouts::iterator layout_iterator; - /* Are we the last standing layout user? */ - for (layout_iterator = m_pipeline_layouts.begin(); - layout_iterator != m_pipeline_layouts.end(); - ) + if (result) { - if (layout_iterator->second.expired() ) - { - m_pipeline_layouts.erase(layout_iterator); + anvil_assert(result_pipeline_layout_ptr != nullptr); - layout_iterator = m_pipeline_layouts.begin(); - } - else - { - ++layout_iterator; - } + *out_pipeline_layout_ptr_ptr = Anvil::PipelineLayoutUniquePtr(result_pipeline_layout_ptr, + std::bind(&PipelineLayoutManager::on_pipeline_layout_dereferenced, + this, + result_pipeline_layout_ptr) + ); } -} -/* Please see header for specification */ -Anvil::PipelineLayoutID Anvil::PipelineLayoutManager::reserve_pipeline_layout_id() -{ - return m_pipeline_layouts_created++; + return result; } -void Anvil::PipelineLayoutManager::update_subscriptions(bool in_should_init) +void Anvil::PipelineLayoutManager::on_pipeline_layout_dereferenced(Anvil::PipelineLayout* in_layout_ptr) { - const auto callback_func = std::bind(&PipelineLayoutManager::on_pipeline_layout_dropped, - this); - void* callback_owner = this; - auto object_tracker_ptr = Anvil::ObjectTracker::get(); + bool has_found = false; + std::unique_lock mutex_lock; + auto mutex_ptr = get_mutex(); - if (in_should_init) + if (mutex_ptr != nullptr) { - object_tracker_ptr->register_for_callbacks(OBJECT_TRACKER_CALLBACK_ID_ON_PIPELINE_LAYOUT_OBJECT_ABOUT_TO_BE_UNREGISTERED, - callback_func, - callback_owner); + mutex_lock = std::move( + std::unique_lock(*mutex_ptr) + ); } - else + + for (auto layout_iterator = m_pipeline_layouts.begin(); + layout_iterator != m_pipeline_layouts.end() && !has_found; + ++layout_iterator) { - object_tracker_ptr->unregister_from_callbacks(OBJECT_TRACKER_CALLBACK_ID_ON_PIPELINE_LAYOUT_OBJECT_ABOUT_TO_BE_UNREGISTERED, - callback_func, - callback_owner); + auto& current_pipeline_layout_container_ptr = *layout_iterator; + auto& current_pipeline_layout_ptr = current_pipeline_layout_container_ptr->pipeline_layout_ptr; + + if (current_pipeline_layout_ptr.get() == in_layout_ptr) + { + has_found = true; + + if (current_pipeline_layout_container_ptr->n_references.fetch_sub(1) == 1) + { + m_pipeline_layouts.erase(layout_iterator); + } + + break; + } } -} + + anvil_assert(has_found); +} \ No newline at end of file diff --git a/src/wrappers/query_pool.cpp b/src/wrappers/query_pool.cpp index c533da08..337105b0 100644 --- a/src/wrappers/query_pool.cpp +++ b/src/wrappers/query_pool.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -28,44 +28,49 @@ /* Please see header for specification */ -Anvil::QueryPool::QueryPool(std::weak_ptr in_device_ptr, - VkQueryType in_query_type, - uint32_t in_n_max_concurrent_queries) +Anvil::QueryPool::QueryPool(const Anvil::BaseDevice* in_device_ptr, + VkQueryType in_query_type, + uint32_t in_n_max_concurrent_queries, + bool in_mt_safe) :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT), + Anvil::ObjectType::QUERY_POOL), + MTSafetySupportProvider (in_mt_safe), m_device_ptr (in_device_ptr), - m_n_max_indices (in_n_max_concurrent_queries) + m_n_max_indices (in_n_max_concurrent_queries), + m_query_type (in_query_type) { - anvil_assert(in_query_type == VK_QUERY_TYPE_OCCLUSION || - in_query_type == VK_QUERY_TYPE_TIMESTAMP); + anvil_assert(in_query_type == VK_QUERY_TYPE_OCCLUSION || + in_query_type == VK_QUERY_TYPE_TIMESTAMP || + in_query_type == VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT); - init(in_device_ptr, - in_query_type, - 0, /* in_flags - irrelevant */ + init(in_query_type, + Anvil::QueryPipelineStatisticFlagBits::NONE, in_n_max_concurrent_queries); /* Register the pool wrapper instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_QUERY_POOL, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::QUERY_POOL, this); } /* Please see header for specification */ -Anvil::QueryPool::QueryPool(std::weak_ptr in_device_ptr, - VkQueryType in_query_type, - VkFlags in_query_flags, - uint32_t in_n_max_concurrent_queries) +Anvil::QueryPool::QueryPool(const Anvil::BaseDevice* in_device_ptr, + VkQueryType in_query_type, + Anvil::QueryPipelineStatisticFlags in_pipeline_statistics, + uint32_t in_n_max_concurrent_queries, + bool in_mt_safe) :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT), - m_device_ptr (in_device_ptr), - m_n_max_indices(in_n_max_concurrent_queries) + Anvil::ObjectType::QUERY_POOL), + MTSafetySupportProvider (in_mt_safe), + m_device_ptr (in_device_ptr), + m_n_max_indices (in_n_max_concurrent_queries), + m_query_type (in_query_type) { - init(in_device_ptr, - in_query_type, - in_query_flags, + init(in_query_type, + in_pipeline_statistics, in_n_max_concurrent_queries); /* Register the pool wrapper instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_QUERY_POOL, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::QUERY_POOL, this); } @@ -74,77 +79,166 @@ Anvil::QueryPool::QueryPool(std::weak_ptr in_device_ptr, Anvil::QueryPool::~QueryPool() { /* Unregister the pool wrapper instance */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_QUERY_POOL, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::QUERY_POOL, this); if (m_query_pool_vk != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyQueryPool(device_locked_ptr->get_device_vk(), - m_query_pool_vk, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyQueryPool(m_device_ptr->get_device_vk(), + m_query_pool_vk, + nullptr /* pAllocator */); + } + unlock(); m_query_pool_vk = VK_NULL_HANDLE; } } /* Please see header for specification */ -std::shared_ptr Anvil::QueryPool::create_non_ps_query_pool(std::weak_ptr in_device_ptr, - VkQueryType in_query_type, - uint32_t in_n_max_concurrent_queries) +Anvil::QueryPoolUniquePtr Anvil::QueryPool::create_non_ps_query_pool(const Anvil::BaseDevice* in_device_ptr, + VkQueryType in_query_type, + uint32_t in_n_max_concurrent_queries, + MTSafety in_mt_safety) { - std::shared_ptr result_ptr; + const bool mt_safe = Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr); + QueryPoolUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::QueryPool(in_device_ptr, in_query_type, - in_n_max_concurrent_queries) + in_n_max_concurrent_queries, + mt_safe) ); return result_ptr; } /* Please see header for specification */ -std::shared_ptr Anvil::QueryPool::create_ps_query_pool(std::weak_ptr in_device_ptr, - VkQueryPipelineStatisticFlags in_pipeline_statistics, - uint32_t in_n_max_concurrent_queries) +Anvil::QueryPoolUniquePtr Anvil::QueryPool::create_ps_query_pool(const Anvil::BaseDevice* in_device_ptr, + Anvil::QueryPipelineStatisticFlags in_pipeline_statistics, + uint32_t in_n_max_concurrent_queries, + MTSafety in_mt_safety) { - std::shared_ptr result_ptr; + const bool mt_safe = Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr); + QueryPoolUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( new Anvil::QueryPool(in_device_ptr, VK_QUERY_TYPE_PIPELINE_STATISTICS, in_pipeline_statistics, - in_n_max_concurrent_queries) + in_n_max_concurrent_queries, + mt_safe) ); return result_ptr; } +bool Anvil::QueryPool::get_query_pool_results_internal(const uint32_t& in_first_query_index, + const uint32_t& in_n_queries, + const Anvil::QueryResultFlags& in_query_props, + const bool& in_should_return_uint64, + void* out_results_ptr, + bool* out_all_query_results_retrieved_ptr) +{ + VkQueryResultFlags flags = 0; + uint32_t result_query_size = 0; + bool result = false; + VkResult result_vk; + + if (in_first_query_index + in_n_queries > m_n_max_indices) + { + anvil_assert(in_first_query_index + in_n_queries <= m_n_max_indices); + + goto end; + } + + if (in_should_return_uint64) + { + flags |= VK_QUERY_RESULT_64_BIT; + result_query_size = sizeof(uint64_t); + } + else + { + result_query_size = sizeof(uint32_t); + } + + if ((in_query_props & Anvil::QueryResultFlagBits::PARTIAL_BIT) != 0) + { + flags |= VK_QUERY_RESULT_PARTIAL_BIT; + + if (m_query_type == VK_QUERY_TYPE_TIMESTAMP) + { + anvil_assert(m_query_type != VK_QUERY_TYPE_TIMESTAMP); + + goto end; + } + } + + if ((in_query_props & Anvil::QueryResultFlagBits::WAIT_BIT) != 0) + { + flags |= VK_QUERY_RESULT_WAIT_BIT; + } + + if ((in_query_props & Anvil::QueryResultFlagBits::WITH_AVAILABILITY_BIT) != 0) + { + flags |= VK_QUERY_RESULT_WITH_AVAILABILITY_BIT; + result_query_size *= 2; + } + + /* Execute the request */ + result_vk = Anvil::Vulkan::vkGetQueryPoolResults(m_device_ptr->get_device_vk(), + m_query_pool_vk, + in_first_query_index, + in_n_queries, + result_query_size * in_n_queries, + out_results_ptr, + (in_should_return_uint64) ? sizeof(uint64_t) : sizeof(uint32_t), + flags); + + if ((in_query_props & Anvil::QueryResultFlagBits::PARTIAL_BIT) != 0) + { + result = is_vk_call_successful(result_vk); + *out_all_query_results_retrieved_ptr = (result_vk == VK_SUCCESS); + } + else + { + result = is_vk_call_successful(result_vk); + *out_all_query_results_retrieved_ptr = (result); + } + + /* All done */ +end: + return result; +} + /* Please see header for specification */ -void Anvil::QueryPool::init(std::weak_ptr in_device_ptr, - VkQueryType in_query_type, - VkFlags in_query_flags, - uint32_t in_n_max_concurrent_queries) +void Anvil::QueryPool::init(VkQueryType in_query_type, + Anvil::QueryPipelineStatisticFlags in_pipeline_statistics, + uint32_t in_n_max_concurrent_queries) { - VkQueryPoolCreateInfo create_info; - std::shared_ptr device_locked_ptr(m_device_ptr); - VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); + VkQueryPoolCreateInfo create_info; + VkResult result_vk (VK_ERROR_INITIALIZATION_FAILED); ANVIL_REDUNDANT_VARIABLE(result_vk); create_info.flags = 0; - create_info.pipelineStatistics = (in_query_type == VK_QUERY_TYPE_PIPELINE_STATISTICS) ? in_query_flags : 0; + create_info.pipelineStatistics = (in_query_type == VK_QUERY_TYPE_PIPELINE_STATISTICS) ? in_pipeline_statistics.get_vk() + : 0; create_info.pNext = nullptr; create_info.queryCount = in_n_max_concurrent_queries; create_info.queryType = in_query_type; create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO; - result_vk = vkCreateQueryPool(device_locked_ptr->get_device_vk(), - &create_info, - nullptr, /* pAllocator */ - &m_query_pool_vk); + result_vk = Anvil::Vulkan::vkCreateQueryPool(m_device_ptr->get_device_vk(), + &create_info, + nullptr, /* pAllocator */ + &m_query_pool_vk); anvil_assert_vk_call_succeeded(result_vk); if (is_vk_call_successful(result_vk) ) diff --git a/src/wrappers/queue.cpp b/src/wrappers/queue.cpp index c8a8688b..35c89261 100644 --- a/src/wrappers/queue.cpp +++ b/src/wrappers/queue.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,13 +21,17 @@ // #include "misc/debug.h" +#include "misc/fence_create_info.h" #include "misc/object_tracker.h" +#include "misc/struct_chainer.h" +#include "misc/swapchain_create_info.h" #include "misc/window.h" #include "wrappers/buffer.h" #include "wrappers/command_buffer.h" #include "wrappers/device.h" #include "wrappers/fence.h" #include "wrappers/instance.h" +#include "wrappers/memory_block.h" #include "wrappers/queue.h" #include "wrappers/rendering_surface.h" #include "wrappers/semaphore.h" @@ -37,65 +41,140 @@ /** Please see header for specification */ -Anvil::Queue::Queue(std::weak_ptr in_device_ptr, - uint32_t in_queue_family_index, - uint32_t in_queue_index) - - :CallbacksSupportProvider (QUEUE_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT), - m_device_ptr (in_device_ptr), - m_queue (VK_NULL_HANDLE), - m_queue_family_index (in_queue_family_index), - m_queue_index (in_queue_index) +Anvil::Queue::Queue(const Anvil::BaseDevice* in_device_ptr, + uint32_t in_queue_family_index, + uint32_t in_queue_index, + bool in_mt_safe, + const Anvil::QueueGlobalPriority& in_global_priority) + + :CallbacksSupportProvider (QUEUE_CALLBACK_ID_COUNT), + DebugMarkerSupportProvider (in_device_ptr, + Anvil::ObjectType::QUEUE), + MTSafetySupportProvider (in_mt_safe), + m_device_ptr (in_device_ptr), + m_n_debug_label_regions_started(0), + m_queue (VK_NULL_HANDLE), + m_queue_family_index (in_queue_family_index), + m_queue_global_priority (in_global_priority), + m_queue_index (in_queue_index) { - std::shared_ptr device_locked_ptr(in_device_ptr); - /* Retrieve the Vulkan handle */ - vkGetDeviceQueue(device_locked_ptr->get_device_vk(), - in_queue_family_index, - in_queue_index, - &m_queue); + Anvil::Vulkan::vkGetDeviceQueue(m_device_ptr->get_device_vk(), + in_queue_family_index, + in_queue_index, + &m_queue); anvil_assert(m_queue != VK_NULL_HANDLE); - /* Determine whether the queue supports sparse bindings */ - m_supports_sparse_bindings = !!(device_locked_ptr->get_queue_family_info(in_queue_family_index)->flags & VK_QUEUE_SPARSE_BINDING_BIT); + /* Determine additional properties of the queue */ + m_supports_protected_memory_operations = (m_device_ptr->get_queue_family_info(in_queue_family_index)->flags & Anvil::QueueFlagBits::PROTECTED_BIT) != 0; + m_supports_sparse_bindings = (m_device_ptr->get_queue_family_info(in_queue_family_index)->flags & Anvil::QueueFlagBits::SPARSE_BINDING_BIT) != 0; /* Cache a fence that may be optionally used for submissions */ - m_submit_fence_ptr = Anvil::Fence::create(m_device_ptr, - false /* create_signalled */); + { + auto create_info_ptr = Anvil::FenceCreateInfo::create(m_device_ptr, + false); /* create_signalled */ + + create_info_ptr->set_mt_safety(Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe()) ); + + m_submit_fence_ptr = Anvil::Fence::create(std::move(create_info_ptr) ); + } /* OK, register the wrapper instance and leave */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_QUEUE, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::QUEUE, this); } /** Please see header for specification */ Anvil::Queue::~Queue() { - m_submit_fence_ptr.reset(); + anvil_assert(m_n_debug_label_regions_started == 0); - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_QUEUE, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::QUEUE, this); } /** Please see header for specification */ -bool Anvil::Queue::bind_sparse_memory(Anvil::Utils::SparseMemoryBindingUpdateInfo& in_update) +void Anvil::Queue::begin_debug_utils_label(const char* in_label_name_ptr, + const float* in_color_vec4_ptr) { - const VkBindSparseInfo* bind_info_items = nullptr; - std::shared_ptr fence_ptr; - uint32_t n_bind_info_items = 0; - VkResult result = VK_ERROR_INITIALIZATION_FAILED; + if (!m_device_ptr->get_parent_instance()->get_enabled_extensions_info()->ext_debug_utils() ) + { + goto end; + } + + { + const auto& entrypoints = m_device_ptr->get_parent_instance()->get_extension_ext_debug_utils_entrypoints(); + VkDebugUtilsLabelEXT label_info; + + label_info.color[0] = in_color_vec4_ptr[0]; + label_info.color[1] = in_color_vec4_ptr[1]; + label_info.color[2] = in_color_vec4_ptr[2]; + label_info.color[3] = in_color_vec4_ptr[3]; + label_info.pLabelName = in_label_name_ptr; + label_info.pNext = nullptr; + label_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT; + + entrypoints.vkQueueBeginDebugUtilsLabelEXT(m_queue, + &label_info); + } + + ++m_n_debug_label_regions_started; +end: + ; +} + +/** Please see header for specification */ +bool Anvil::Queue::bind_sparse_memory(Anvil::SparseMemoryBindingUpdateInfo& in_update) +{ + const VkBindSparseInfo* bind_info_items = nullptr; + Anvil::Fence* fence_ptr = nullptr; + const bool mt_safe = is_mt_safe(); + uint32_t n_bind_info_items = 0; + VkResult result = VK_ERROR_INITIALIZATION_FAILED; in_update.get_bind_sparse_call_args(&n_bind_info_items, &bind_info_items, &fence_ptr); - result = vkQueueBindSparse(m_queue, - n_bind_info_items, - bind_info_items, - (fence_ptr != nullptr) ? fence_ptr->get_fence() : VK_NULL_HANDLE); + /* If any of the bindings we are about to request requires non-zero memory or resource device indices, + * make sure we're using a mGPU device */ + if (in_update.is_device_group_support_required() ) + { + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr)); + + if (mgpu_device_ptr == nullptr) + { + anvil_assert(mgpu_device_ptr != nullptr); + + goto end; + } + + if (!mgpu_device_ptr->is_extension_enabled(VK_KHR_DEVICE_GROUP_EXTENSION_NAME) ) + { + anvil_assert(mgpu_device_ptr->is_extension_enabled(VK_KHR_DEVICE_GROUP_EXTENSION_NAME)); + + goto end; + } + } + + if (mt_safe) + { + bind_sparse_memory_lock_unlock(in_update, + true); /* in_should_lock */ + } + { + result = Anvil::Vulkan::vkQueueBindSparse(m_queue, + n_bind_info_items, + bind_info_items, + (fence_ptr != nullptr) ? fence_ptr->get_fence() : VK_NULL_HANDLE); + } + if (mt_safe) + { + bind_sparse_memory_lock_unlock(in_update, + false); /* in_should_lock */ + } + anvil_assert(result == VK_SUCCESS); for (uint32_t n_bind_info = 0; @@ -119,11 +198,12 @@ bool Anvil::Queue::bind_sparse_memory(Anvil::Utils::SparseMemoryBindingUpdateInf n_buffer_memory_update < n_buffer_memory_updates; ++n_buffer_memory_update) { - VkDeviceSize alloc_size = UINT64_MAX; - VkDeviceSize buffer_memory_start_offset = UINT64_MAX; - std::shared_ptr buffer_ptr; - std::shared_ptr memory_block_ptr; - VkDeviceSize memory_block_start_offset; + VkDeviceSize alloc_size = UINT64_MAX; + VkDeviceSize buffer_memory_start_offset = UINT64_MAX; + Anvil::Buffer* buffer_ptr = nullptr; + bool memory_block_owned_by_buffer = false; + Anvil::MemoryBlock* memory_block_ptr = nullptr; + VkDeviceSize memory_block_start_offset; in_update.get_buffer_memory_update_properties(n_bind_info, n_buffer_memory_update, @@ -131,9 +211,11 @@ bool Anvil::Queue::bind_sparse_memory(Anvil::Utils::SparseMemoryBindingUpdateInf &buffer_memory_start_offset, &memory_block_ptr, &memory_block_start_offset, + &memory_block_owned_by_buffer, &alloc_size); buffer_ptr->set_memory_sparse(memory_block_ptr, + memory_block_owned_by_buffer, memory_block_start_offset, buffer_memory_start_offset, alloc_size); @@ -143,13 +225,14 @@ bool Anvil::Queue::bind_sparse_memory(Anvil::Utils::SparseMemoryBindingUpdateInf n_image_memory_update < n_image_memory_updates; ++n_image_memory_update) { - std::shared_ptr image_ptr; - VkExtent3D extent; - VkSparseMemoryBindFlags flags; - std::shared_ptr memory_block_ptr; - VkDeviceSize memory_block_start_offset; - VkOffset3D offset; - VkImageSubresource subresource; + Anvil::Image* image_ptr = nullptr; + VkExtent3D extent; + Anvil::SparseMemoryBindFlags flags; + bool memory_block_owned_by_image = false; + Anvil::MemoryBlock* memory_block_ptr = nullptr; + VkDeviceSize memory_block_start_offset; + VkOffset3D offset; + Anvil::ImageSubresource subresource; in_update.get_image_memory_update_properties(n_bind_info, n_image_memory_update, @@ -159,26 +242,29 @@ bool Anvil::Queue::bind_sparse_memory(Anvil::Utils::SparseMemoryBindingUpdateInf &extent, &flags, &memory_block_ptr, - &memory_block_start_offset); - + &memory_block_start_offset, + &memory_block_owned_by_image); image_ptr->on_memory_backing_update(subresource, offset, extent, memory_block_ptr, - memory_block_start_offset); + memory_block_start_offset, + memory_block_owned_by_image); } for (uint32_t n_image_opaque_memory_update = 0; n_image_opaque_memory_update < n_image_opaque_memory_updates; ++n_image_opaque_memory_update) { - VkSparseMemoryBindFlags flags; - std::shared_ptr image_ptr; - std::shared_ptr memory_block_ptr; - VkDeviceSize memory_block_start_offset; - VkDeviceSize resource_offset; - VkDeviceSize size; + Anvil::SparseMemoryBindFlags flags; + Anvil::Image* image_ptr = nullptr; + bool memory_block_owned_by_image = false; + Anvil::MemoryBlock* memory_block_ptr = nullptr; + VkDeviceSize memory_block_start_offset; + uint32_t n_plane = UINT32_MAX; + VkDeviceSize resource_offset; + VkDeviceSize size; in_update.get_image_opaque_memory_update_properties(n_bind_info, n_image_opaque_memory_update, @@ -187,99 +273,628 @@ bool Anvil::Queue::bind_sparse_memory(Anvil::Utils::SparseMemoryBindingUpdateInf &size, &flags, &memory_block_ptr, - &memory_block_start_offset); + &memory_block_start_offset, + &memory_block_owned_by_image, + &n_plane); - image_ptr->on_memory_backing_opaque_update(resource_offset, + image_ptr->on_memory_backing_opaque_update(n_plane, + resource_offset, size, memory_block_ptr, - memory_block_start_offset); + memory_block_start_offset, + memory_block_owned_by_image); } } +end: return (result == VK_SUCCESS); } +void Anvil::Queue::bind_sparse_memory_lock_unlock(Anvil::SparseMemoryBindingUpdateInfo& in_update, + bool in_should_lock) +{ + const VkBindSparseInfo* bind_info_items = nullptr; + Anvil::Fence* fence_ptr = nullptr; + uint32_t n_bind_info_items = 0; + + in_update.get_bind_sparse_call_args(&n_bind_info_items, + &bind_info_items, + &fence_ptr); + + if (in_should_lock) + { + lock(); + } + else + { + unlock(); + } + + if (fence_ptr != nullptr) + { + if (in_should_lock) + { + fence_ptr->lock(); + } + else + { + fence_ptr->unlock(); + } + } + + for (uint32_t n_bind_info_item = 0; + n_bind_info_item < n_bind_info_items; + ++n_bind_info_item) + { + uint32_t n_buffer_memory_updates = 0; + uint32_t n_image_memory_updates = 0; + uint32_t n_image_opaque_memory_updates = 0; + uint32_t n_signal_sems = 0; + uint32_t n_wait_sems = 0; + Anvil::Semaphore** signal_sem_ptrs = nullptr; + Anvil::Semaphore** wait_sem_ptrs = nullptr; + + in_update.get_bind_info_properties(n_bind_info_item, + &n_buffer_memory_updates, + &n_image_memory_updates, + &n_image_opaque_memory_updates, + &n_signal_sems, + &signal_sem_ptrs, + &n_wait_sems, + &wait_sem_ptrs); + + for (uint32_t n_signal_sem = 0; + n_signal_sem < n_signal_sems; + ++n_signal_sem) + { + if (in_should_lock) + { + signal_sem_ptrs[n_signal_sem]->lock(); + } + else + { + signal_sem_ptrs[n_signal_sem]->unlock(); + } + } + + for (uint32_t n_wait_sem = 0; + n_wait_sem < n_wait_sems; + ++n_wait_sem) + { + if (in_should_lock) + { + wait_sem_ptrs[n_wait_sem]->lock(); + } + else + { + wait_sem_ptrs[n_wait_sem]->unlock(); + } + } + + for (uint32_t n_buffer_memory_update = 0; + n_buffer_memory_update < n_buffer_memory_updates; + ++n_buffer_memory_update) + { + Anvil::Buffer* buffer_ptr = nullptr; + + in_update.get_buffer_memory_update_properties(n_bind_info_item, + n_buffer_memory_update, + &buffer_ptr, + nullptr, /* out_opt_buffer_memory_start_offset_ptr */ + nullptr, /* out_opt_memory_block_ptr */ + nullptr, /* out_opt_memory_block_start_offset_ptr */ + nullptr, /* out_opt_memory_block_owned_by_buffer_ptr */ + nullptr); /* out_opt_size_ptr */ + + if (in_should_lock) + { + buffer_ptr->lock(); + } + else + { + buffer_ptr->unlock(); + } + } + + for (uint32_t n_image_memory_update = 0; + n_image_memory_update < n_image_memory_updates; + ++n_image_memory_update) + { + Anvil::Image* image_ptr = nullptr; + + in_update.get_image_memory_update_properties(n_bind_info_item, + n_image_memory_update, + &image_ptr, + nullptr, /* out_opt_subresource_ptr */ + nullptr, /* out_opt_offset_ptr */ + nullptr, /* out_opt_extent_ptr */ + nullptr, /* out_opt_flags_ptr */ + nullptr, /* out_opt_memory_block_ptr_ptr */ + nullptr, /* out_opt_memory_block_start_offset_ptr */ + nullptr); /* out_opt_memory_block_owned_by_image_ptr */ + + if (in_should_lock) + { + image_ptr->lock(); + } + else + { + image_ptr->unlock(); + } + } + + for (uint32_t n_opaque_image_memory_update = 0; + n_opaque_image_memory_update < n_image_opaque_memory_updates; + ++n_opaque_image_memory_update) + { + Anvil::Image* image_ptr = nullptr; + + in_update.get_image_opaque_memory_update_properties(n_bind_info_item, + n_opaque_image_memory_update, + &image_ptr, + nullptr, /* out_opt_resource_offset_ptr */ + nullptr, /* out_opt_size_ptr */ + nullptr, /* out_opt_flags_ptr */ + nullptr, /* out_opt_memory_block_ptr_ptr */ + nullptr, /* out_opt_memory_block_start_offset_ptr */ + nullptr, /* out_opt_memory_block_owned_by_image_ptr */ + nullptr); /* out_opt_n_plane_ptr */ + + if (in_should_lock) + { + image_ptr->lock(); + } + else + { + image_ptr->unlock(); + } + } + } +} + /** Please see header for specification */ -std::shared_ptr Anvil::Queue::create(std::weak_ptr in_device_ptr, - uint32_t in_queue_family_index, - uint32_t in_queue_index) +std::unique_ptr Anvil::Queue::create(const Anvil::BaseDevice* in_device_ptr, + uint32_t in_queue_family_index, + uint32_t in_queue_index, + bool in_mt_safe, + const Anvil::QueueGlobalPriority& in_queue_global_priority) { - std::shared_ptr result_ptr; + std::unique_ptr result_ptr; result_ptr.reset( new Anvil::Queue(in_device_ptr, in_queue_family_index, - in_queue_index) + in_queue_index, + in_mt_safe, + in_queue_global_priority) ); return result_ptr; } /** Please see header for specification */ -VkResult Anvil::Queue::present(std::shared_ptr in_swapchain_ptr, - uint32_t in_swapchain_image_index, - uint32_t in_n_wait_semaphores, - std::shared_ptr* in_wait_semaphore_ptrs) +void Anvil::Queue::end_debug_utils_label() +{ + if (!m_device_ptr->get_parent_instance()->get_enabled_extensions_info()->ext_debug_utils() ) + { + goto end; + } + + if (m_n_debug_label_regions_started == 0) + { + anvil_assert(m_n_debug_label_regions_started != 0); + + goto end; + } + + { + const auto& entrypoints = m_device_ptr->get_parent_instance()->get_extension_ext_debug_utils_entrypoints(); + + entrypoints.vkQueueEndDebugUtilsLabelEXT(m_queue); + } + + --m_n_debug_label_regions_started; +end: + ; +} + +/** Please see header for specification */ +void Anvil::Queue::insert_debug_utils_label(const char* in_label_name_ptr, + const float* in_color_vec4_ptr) +{ + if (!m_device_ptr->get_parent_instance()->get_enabled_extensions_info()->ext_debug_utils() ) + { + goto end; + } + + { + const auto& entrypoints = m_device_ptr->get_parent_instance()->get_extension_ext_debug_utils_entrypoints(); + VkDebugUtilsLabelEXT label_info; + + label_info.color[0] = in_color_vec4_ptr[0]; + label_info.color[1] = in_color_vec4_ptr[1]; + label_info.color[2] = in_color_vec4_ptr[2]; + label_info.color[3] = in_color_vec4_ptr[3]; + label_info.pLabelName = in_label_name_ptr; + label_info.pNext = nullptr; + label_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT; + + entrypoints.vkQueueInsertDebugUtilsLabelEXT(m_queue, + &label_info); + } + +end: + ; +} + +/** Please see header for specification */ +bool Anvil::Queue::present(Anvil::Swapchain* in_swapchain_ptr, + uint32_t in_swapchain_image_index, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr) +{ + static const uint32_t device_mask = 0x1; + + return present_internal(Anvil::DeviceGroupPresentModeFlagBits::LOCAL_BIT_KHR, + 1, /* n_swapchain_image_indices */ + &in_swapchain_ptr, + &in_swapchain_image_index, + &device_mask, + in_n_wait_semaphores, + in_wait_semaphore_ptrs, + out_present_results_ptr); +} + +/** Please see header for specification */ +bool Anvil::Queue::present_in_local_presentation_mode(uint32_t in_n_local_mode_presentation_items, + const LocalModePresentationItem* in_local_mode_presentation_items, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr) +{ + const Anvil::DeviceType device_type (m_device_ptr->get_type() ); + uint32_t n_swapchains; + bool result; + + uint32_t device_masks [MAX_SWAPCHAINS]; + uint32_t swapchain_image_indices[MAX_SWAPCHAINS]; + Anvil::Swapchain* swapchains [MAX_SWAPCHAINS]; + + if (device_type == Anvil::DeviceType::SINGLE_GPU) + { + const Anvil::SGPUDevice* sgpu_device_ptr(dynamic_cast(m_device_ptr) ); + + ANVIL_REDUNDANT_VARIABLE(sgpu_device_ptr); + + anvil_assert(in_n_local_mode_presentation_items == 1); + anvil_assert(in_local_mode_presentation_items[0].physical_device_ptr->get_physical_device() == sgpu_device_ptr->get_physical_device()->get_physical_device() ); + + device_masks[0] = 0x1; + n_swapchains = 1; + swapchain_image_indices[0] = in_local_mode_presentation_items[0].swapchain_image_index; + swapchains[0] = in_local_mode_presentation_items[0].swapchain_ptr; + } + else + { + anvil_assert(device_type == Anvil::DeviceType::MULTI_GPU); + anvil_assert(in_n_local_mode_presentation_items < MAX_SWAPCHAINS); + + n_swapchains = in_n_local_mode_presentation_items; + + for (uint32_t n_item = 0; + n_item < in_n_local_mode_presentation_items; + ++n_item) + { + const auto current_item_ptr = in_local_mode_presentation_items + n_item; + + device_masks [n_item] = (1u << current_item_ptr->physical_device_ptr->get_device_group_device_index()); + swapchains [n_item] = current_item_ptr->swapchain_ptr; + swapchain_image_indices[n_item] = current_item_ptr->swapchain_image_index; + } + } + + result = present_internal(Anvil::DeviceGroupPresentModeFlagBits::LOCAL_BIT_KHR, + n_swapchains, + swapchains, + swapchain_image_indices, + device_masks, + in_n_wait_semaphores, + in_wait_semaphore_ptrs, + out_present_results_ptr); + + return result; +} + +/** Please see header for specification */ +bool Anvil::Queue::present_in_local_multi_device_presentation_mode(uint32_t in_n_local_multi_device_mode_presentation_items, + const Anvil::LocalMultiDeviceModePresentationItem* in_local_multi_device_mode_presentation_items, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr) +{ + uint32_t n_swapchains; + bool result; + + uint32_t device_masks [MAX_SWAPCHAINS]; + uint32_t swapchain_image_indices[MAX_SWAPCHAINS]; + Anvil::Swapchain* swapchains [MAX_SWAPCHAINS]; + + anvil_assert(in_n_local_multi_device_mode_presentation_items < MAX_SWAPCHAINS); + + n_swapchains = in_n_local_multi_device_mode_presentation_items; + + for (uint32_t n_item = 0; + n_item < in_n_local_multi_device_mode_presentation_items; + ++n_item) + { + const auto current_item_ptr(in_local_multi_device_mode_presentation_items + n_item); + uint32_t device_mask (0); + + for (uint32_t n_physical_device = 0; + n_physical_device < current_item_ptr->n_physical_devices; + ++n_physical_device) + { + const uint32_t device_index = current_item_ptr->physical_devices_ptr[n_physical_device]->get_device_group_device_index(); + + device_mask |= 1 << device_index; + } + + device_masks [n_item] = device_mask; + swapchains [n_item] = current_item_ptr->swapchain_ptr; + swapchain_image_indices[n_item] = current_item_ptr->swapchain_image_index; + } + + result = present_internal(Anvil::DeviceGroupPresentModeFlagBits::LOCAL_MULTI_DEVICE_BIT_KHR, + n_swapchains, + swapchains, + swapchain_image_indices, + device_masks, + in_n_wait_semaphores, + in_wait_semaphore_ptrs, + out_present_results_ptr); + + return result; +} + +/** Please see header for specification */ +bool Anvil::Queue::present_in_remote_presentation_mode(uint32_t in_n_remote_mode_presentation_items, + const Anvil::RemoteModePresentationItem* in_remote_mode_presentation_items, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr) +{ + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr)); + uint32_t n_swapchains; + bool result; + + uint32_t device_masks [MAX_SWAPCHAINS]; + uint32_t swapchain_image_indices[MAX_SWAPCHAINS]; + Anvil::Swapchain* swapchains [MAX_SWAPCHAINS]; + + ANVIL_REDUNDANT_VARIABLE_CONST(mgpu_device_ptr); + anvil_assert (mgpu_device_ptr != nullptr); + anvil_assert (in_n_remote_mode_presentation_items < MAX_SWAPCHAINS); + + n_swapchains = in_n_remote_mode_presentation_items; + + for (uint32_t n_item = 0; + n_item < in_n_remote_mode_presentation_items; + ++n_item) + { + const auto current_item_ptr = in_remote_mode_presentation_items + n_item; + const uint32_t device_index = current_item_ptr->physical_device_ptr->get_device_group_device_index(); + + #if defined(_DEBUG) + { + const std::vector* present_compatible_physical_devices_ptr; + + mgpu_device_ptr->get_present_compatible_physical_devices(device_index, + &present_compatible_physical_devices_ptr); + + anvil_assert(present_compatible_physical_devices_ptr->size() > 0); + } + #endif + + device_masks [n_item] = (1u << device_index); + swapchains [n_item] = current_item_ptr->swapchain_ptr; + swapchain_image_indices[n_item] = current_item_ptr->swapchain_image_index; + } + + result = present_internal(Anvil::DeviceGroupPresentModeFlagBits::REMOTE_BIT_KHR, + n_swapchains, + swapchains, + swapchain_image_indices, + device_masks, + in_n_wait_semaphores, + in_wait_semaphore_ptrs, + out_present_results_ptr); + + return result; +} + +/** Please see header for specification */ +bool Anvil::Queue::present_in_sum_presentation_mode(uint32_t in_n_sum_mode_presentation_items, + const Anvil::SumModePresentationItem* in_sum_mode_presentation_items, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr) +{ + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr)); + uint32_t n_swapchains; + bool result; + + uint32_t device_masks [MAX_SWAPCHAINS]; + uint32_t swapchain_image_indices[MAX_SWAPCHAINS]; + Anvil::Swapchain* swapchains [MAX_SWAPCHAINS]; + + ANVIL_REDUNDANT_VARIABLE_CONST(mgpu_device_ptr); + anvil_assert (mgpu_device_ptr != nullptr); + anvil_assert (in_n_sum_mode_presentation_items < MAX_SWAPCHAINS); + + n_swapchains = in_n_sum_mode_presentation_items; + + for (uint32_t n_item = 0; + n_item < in_n_sum_mode_presentation_items; + ++n_item) + { + const auto current_item_ptr (in_sum_mode_presentation_items + n_item); + uint32_t device_mask (0); + + #ifdef _DEBUG + const uint32_t n_physical_devices = mgpu_device_ptr->get_n_physical_devices(); + bool request_supported = false; + + anvil_assert(mgpu_device_ptr->get_supported_present_modes_for_surface(current_item_ptr->swapchain_ptr->get_create_info_ptr()->get_rendering_surface() ) != Anvil::DeviceGroupPresentModeFlagBits::NONE); + + /* Make sure at least one physical device supports SUM presentation mode for all the specified physical devices */ + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_devices && !request_supported; + ++n_physical_device) + { + const std::vector* compatible_physical_devices_ptr; + + if (!mgpu_device_ptr->get_present_compatible_physical_devices(n_physical_device, + &compatible_physical_devices_ptr) ) + { + anvil_assert_fail(); + + continue; + } + + request_supported = true; + + for (uint32_t n_checked_physical_device = 0; + n_checked_physical_device < current_item_ptr->n_physical_devices && request_supported; + ++n_checked_physical_device) + { + const auto& physical_device_ptr = current_item_ptr->physical_devices_ptr[n_checked_physical_device]; + bool match_found = false; + + for (uint32_t n_compatible_physical_device = 0; + n_compatible_physical_device < compatible_physical_devices_ptr->size() && !match_found; + ++n_compatible_physical_device) + { + auto compatible_physical_device_ptr = compatible_physical_devices_ptr->at(n_compatible_physical_device); + + if (compatible_physical_device_ptr == physical_device_ptr) + { + match_found = true; + } + } + + request_supported = match_found; + } + } + + anvil_assert(request_supported); + #endif + + for (uint32_t n_physical_device = 0; + n_physical_device < current_item_ptr->n_physical_devices; + ++n_physical_device) + { + const uint32_t device_index = current_item_ptr->physical_devices_ptr[n_physical_device]->get_device_group_device_index(); + + device_mask |= 1 << device_index; + } + + device_masks [n_item] = device_mask; + swapchains [n_item] = current_item_ptr->swapchain_ptr; + swapchain_image_indices[n_item] = current_item_ptr->swapchain_image_index; + } + + result = present_internal(Anvil::DeviceGroupPresentModeFlagBits::SUM_BIT_KHR, + n_swapchains, + swapchains, + swapchain_image_indices, + device_masks, + in_n_wait_semaphores, + in_wait_semaphore_ptrs, + out_present_results_ptr); + + return result; +} + +/** Please see header for specification */ +bool Anvil::Queue::present_internal(DeviceGroupPresentModeFlagBits in_presentation_mode, + uint32_t in_n_swapchains, + Anvil::Swapchain* const* in_swapchains, + const uint32_t* in_swapchain_image_indices, + const uint32_t* in_device_masks, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs, + Anvil::SwapchainOperationErrorCode* out_present_results_ptr) { - std::shared_ptr device_locked_ptr (m_device_ptr); - VkPresentInfoKHR image_presentation_info; - VkResult presentation_results [MAX_SWAPCHAINS]; - VkResult result; - const auto& swapchain_entrypoints (m_device_ptr.lock()->get_extension_khr_swapchain_entrypoints() ); - VkSwapchainKHR swapchains_vk [MAX_SWAPCHAINS]; - VkSemaphore wait_semaphores_vk [8]; + const Anvil::DeviceType device_type (m_device_ptr->get_type() ); + VkResult presentation_results [MAX_SWAPCHAINS]; + bool result (false); + VkResult result_vk; + Anvil::StructChainer struct_chainer; + const ExtensionKHRSwapchainEntrypoints* swapchain_entrypoints_ptr(nullptr); + VkSwapchainKHR swapchains_vk [MAX_SWAPCHAINS]; + std::vector wait_semaphores_vk (in_n_wait_semaphores); /* Sanity checks */ - anvil_assert(in_n_wait_semaphores < sizeof(wait_semaphores_vk) / sizeof(wait_semaphores_vk[0]) ); - anvil_assert(in_swapchain_ptr != nullptr); + anvil_assert(in_n_swapchains < MAX_SWAPCHAINS); + anvil_assert(in_swapchains != nullptr); + + if (device_type == Anvil::DeviceType::SINGLE_GPU) + { + anvil_assert(*in_device_masks == 1); + anvil_assert(in_n_swapchains == 1); + anvil_assert(in_presentation_mode == Anvil::DeviceGroupPresentModeFlagBits::LOCAL_BIT_KHR); + } /* If the application is only interested in off-screen rendering, do *not* post the present request, * since the fake swapchain image is not presentable. We still have to wait on the user-specified * semaphores though. */ - if (in_swapchain_ptr != nullptr) + if (in_swapchains[0] != nullptr) { - std::shared_ptr window_locked_ptr; - std::weak_ptr window_ptr; + Anvil::Window* window_ptr = nullptr; - window_ptr = in_swapchain_ptr->get_window(); + window_ptr = in_swapchains[0]->get_create_info_ptr()->get_window(); - if (!window_ptr.expired() ) + if (window_ptr != nullptr) { - window_locked_ptr = window_ptr.lock(); - } - - if (window_locked_ptr != nullptr) - { - const WindowPlatform window_platform = window_locked_ptr->get_platform(); + const WindowPlatform window_platform = window_ptr->get_platform(); if (window_platform == WINDOW_PLATFORM_DUMMY || window_platform == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS) { - static const VkPipelineStageFlags dst_stage_mask(VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT); + static const Anvil::PipelineStageFlags dst_stage_mask(Anvil::PipelineStageFlagBits::TOP_OF_PIPE_BIT); - device_locked_ptr->get_universal_queue(0)->submit_command_buffer_with_wait_semaphores(nullptr, /* cmd_buffer_ptr */ - in_n_wait_semaphores, - in_wait_semaphore_ptrs, - &dst_stage_mask, - true); /* should_block */ + m_device_ptr->get_universal_queue(0)->submit( + SubmitInfo::create_wait(in_n_wait_semaphores, + in_wait_semaphore_ptrs, + &dst_stage_mask) + ); for (uint32_t n_presentation = 0; - n_presentation < 1; + n_presentation < in_n_swapchains; ++n_presentation) { - OnPresentRequestIssuedCallbackArgument callback_argument(in_swapchain_ptr.get() ); + OnPresentRequestIssuedCallbackArgument callback_argument(in_swapchains[n_presentation]); CallbacksSupportProvider::callback(QUEUE_CALLBACK_ID_PRESENT_REQUEST_ISSUED, &callback_argument); } - result = VK_SUCCESS; + result = true; goto end; } } } - swapchains_vk[0] = in_swapchain_ptr->get_swapchain_vk(); + /* Convert arrays of Anvil objects to raw Vulkan handle arrays */ + for (uint32_t n_swapchain = 0; + n_swapchain < in_n_swapchains; + ++n_swapchain) + { + anvil_assert(in_swapchains[n_swapchain] != nullptr); + + swapchains_vk[n_swapchain] = in_swapchains[n_swapchain]->get_swapchain_vk(); + } for (uint32_t n_wait_semaphore = 0; n_wait_semaphore < in_n_wait_semaphores; @@ -288,170 +903,633 @@ VkResult Anvil::Queue::present(std::shared_ptr in_swapchain_p wait_semaphores_vk[n_wait_semaphore] = in_wait_semaphore_ptrs[n_wait_semaphore]->get_semaphore(); } - image_presentation_info.pImageIndices = &in_swapchain_image_index; - image_presentation_info.pNext = nullptr; - image_presentation_info.pResults = presentation_results; - image_presentation_info.pSwapchains = swapchains_vk; - image_presentation_info.pWaitSemaphores = wait_semaphores_vk; - image_presentation_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; - image_presentation_info.swapchainCount = 1; - image_presentation_info.waitSemaphoreCount = in_n_wait_semaphores; + { + VkPresentInfoKHR image_presentation_info; + + image_presentation_info.pImageIndices = in_swapchain_image_indices; + image_presentation_info.pNext = nullptr; + image_presentation_info.pResults = presentation_results; + image_presentation_info.pSwapchains = swapchains_vk; + image_presentation_info.pWaitSemaphores = (in_n_wait_semaphores != 0) ? &wait_semaphores_vk.at(0) : nullptr; + image_presentation_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + image_presentation_info.swapchainCount = in_n_swapchains; + image_presentation_info.waitSemaphoreCount = in_n_wait_semaphores; + + struct_chainer.append_struct(image_presentation_info); + } + + /* For multi-GPU support, we're likely going to need to attach the VkDeviceGroupPresentInfoKHR struct */ + if (device_type == Anvil::DeviceType::MULTI_GPU) + { + VkDeviceGroupPresentInfoKHR device_group_present_info; + + device_group_present_info.mode = static_cast(in_presentation_mode); + device_group_present_info.pDeviceMasks = in_device_masks; + device_group_present_info.pNext = nullptr; + device_group_present_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR; + device_group_present_info.swapchainCount = in_n_swapchains; + + struct_chainer.append_struct(device_group_present_info); + } + + swapchain_entrypoints_ptr = &m_device_ptr->get_extension_khr_swapchain_entrypoints(); - result = swapchain_entrypoints.vkQueuePresentKHR(m_queue, - &image_presentation_info); + present_lock_unlock(in_n_swapchains, + in_swapchains, + in_n_wait_semaphores, + in_wait_semaphore_ptrs, + true); + { + auto chain_ptr = struct_chainer.create_chain(); - anvil_assert_vk_call_succeeded(result); + result_vk = swapchain_entrypoints_ptr->vkQueuePresentKHR(m_queue, + chain_ptr->get_root_struct() ); + } + present_lock_unlock(in_n_swapchains, + in_swapchains, + in_n_wait_semaphores, + in_wait_semaphore_ptrs, + false); + + result = (result_vk == VK_SUCCESS); - if (is_vk_call_successful(result) ) + for (uint32_t n_presentation = 0; + n_presentation < in_n_swapchains; + ++n_presentation) { - anvil_assert(is_vk_call_successful(presentation_results[0])); + out_present_results_ptr[n_presentation] = static_cast(presentation_results[n_presentation]); + + { + OnPresentRequestIssuedCallbackArgument callback_argument(in_swapchains[n_presentation]); + + CallbacksSupportProvider::callback(QUEUE_CALLBACK_ID_PRESENT_REQUEST_ISSUED, + &callback_argument); + } + } + +end: + return result; +} + +/** Please see header for specification */ +void Anvil::Queue::present_lock_unlock(uint32_t in_n_swapchains, + const Anvil::Swapchain* const* in_swapchains, + uint32_t in_n_wait_semaphores, + Anvil::Semaphore* const* in_wait_semaphore_ptrs, + bool in_should_lock) +{ + if (in_should_lock) + { + lock(); + } + else + { + unlock(); + } + + for (uint32_t n_semaphore = 0; + n_semaphore < in_n_wait_semaphores; + ++n_semaphore) + { + if (in_should_lock) + { + in_wait_semaphore_ptrs[n_semaphore]->lock(); + } + else + { + in_wait_semaphore_ptrs[n_semaphore]->unlock(); + } + } + + for (uint32_t n_swapchain = 0; + n_swapchain < in_n_swapchains; + ++n_swapchain) + { + if (in_should_lock) + { + in_swapchains[n_swapchain]->lock(); + } + else + { + in_swapchains[n_swapchain]->unlock(); + } + } +} + +/** Please see header for specification */ +VkResult Anvil::Queue::submit(const Anvil::SubmitInfo &in_submit_info) +{ + Anvil::Fence* fence_ptr (in_submit_info.get_fence() ); + bool needs_fence_reset(false); + VkResult result (VK_ERROR_INITIALIZATION_FAILED); + Anvil::StructChainer struct_chainer; + + std::vector cmd_buffers_vk (in_submit_info.get_n_command_buffers () ); + std::vector device_memory_block_vec(0); + std::vector signal_semaphores_vk (in_submit_info.get_n_signal_semaphores() ); + std::vector wait_semaphores_vk (in_submit_info.get_n_wait_semaphores () ); + + std::vector cmd_buffer_device_masks = std::vector(in_submit_info.get_n_command_buffers() ); + std::vector signal_semaphore_device_indices = std::vector(in_submit_info.get_n_signal_semaphores() ); + std::vector wait_semaphore_device_indices = std::vector(in_submit_info.get_n_wait_semaphores () ); + - /* Return the most important error code reported */ - if (result != VK_ERROR_DEVICE_LOST) + ANVIL_REDUNDANT_VARIABLE(result); + + /* Prepare for the submission */ + switch (in_submit_info.get_type() ) + { + case SubmissionType::MGPU: { - switch (presentation_results[0]) + uint32_t n_cmd_buffers = 0; + + if (in_submit_info.is_protected_submission() ) { - case VK_ERROR_DEVICE_LOST: - { - result = VK_ERROR_DEVICE_LOST; + anvil_assert(reinterpret_cast(m_device_ptr)->get_physical_device(0)->supports_core_vk1_1() ); + } - break; - } + for (uint32_t n_command_buffer_submission = 0; + n_command_buffer_submission < in_submit_info.get_n_command_buffers(); + ++n_command_buffer_submission) + { + const auto& current_submission = in_submit_info.get_command_buffers_mgpu()[n_command_buffer_submission]; - case VK_ERROR_SURFACE_LOST_KHR: + if (current_submission.cmd_buffer_ptr != nullptr) { - if (result != VK_ERROR_DEVICE_LOST) - { - result = VK_ERROR_SURFACE_LOST_KHR; - } + cmd_buffers_vk.at (n_cmd_buffers) = current_submission.cmd_buffer_ptr->get_command_buffer(); + cmd_buffer_device_masks.at(n_cmd_buffers) = current_submission.device_mask; - break; + ++n_cmd_buffers; } + } - case VK_ERROR_OUT_OF_DATE_KHR: - { - if (result != VK_ERROR_DEVICE_LOST && - result != VK_ERROR_SURFACE_LOST_KHR) - { - result = VK_ERROR_OUT_OF_DATE_KHR; - } + for (uint32_t n_signal_semaphore_submission = 0; + n_signal_semaphore_submission < in_submit_info.get_n_signal_semaphores(); + ++n_signal_semaphore_submission) + { + const auto& current_submission = in_submit_info.get_signal_semaphores_mgpu()[n_signal_semaphore_submission]; - break; - } + anvil_assert(current_submission.device_index < reinterpret_cast(m_device_ptr)->get_n_physical_devices() ); - case VK_SUBOPTIMAL_KHR: - { - if (result != VK_ERROR_DEVICE_LOST && - result != VK_ERROR_SURFACE_LOST_KHR && - result != VK_ERROR_OUT_OF_DATE_KHR) - { - result = VK_SUBOPTIMAL_KHR; - } + signal_semaphore_device_indices.at(n_signal_semaphore_submission) = current_submission.device_index; + signal_semaphores_vk.at (n_signal_semaphore_submission) = current_submission.semaphore_ptr->get_semaphore(); + } - break; - } + for (uint32_t n_wait_semaphore_submission = 0; + n_wait_semaphore_submission < in_submit_info.get_n_wait_semaphores(); + ++n_wait_semaphore_submission) + { + const auto& current_submission = in_submit_info.get_wait_semaphores_mgpu()[n_wait_semaphore_submission]; - default: - { - anvil_assert(presentation_results[0] == VK_SUCCESS); - } + anvil_assert(current_submission.device_index < reinterpret_cast(m_device_ptr)->get_n_physical_devices() ); + + wait_semaphore_device_indices.at(n_wait_semaphore_submission) = current_submission.device_index; + wait_semaphores_vk.at (n_wait_semaphore_submission) = current_submission.semaphore_ptr->get_semaphore(); + } + + { + VkSubmitInfo submit_info; + + submit_info.commandBufferCount = in_submit_info.get_n_command_buffers(); + submit_info.pCommandBuffers = (submit_info.commandBufferCount != 0) ? &cmd_buffers_vk.at(0) : nullptr; + submit_info.pNext = nullptr; + submit_info.pSignalSemaphores = (in_submit_info.get_n_signal_semaphores() != 0) ? &signal_semaphores_vk.at(0) : nullptr; + submit_info.pWaitDstStageMask = in_submit_info.get_destination_stage_wait_masks(); + submit_info.pWaitSemaphores = (in_submit_info.get_n_wait_semaphores() != 0) ? &wait_semaphores_vk.at(0) : nullptr; + submit_info.signalSemaphoreCount = in_submit_info.get_n_signal_semaphores(); + submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submit_info.waitSemaphoreCount = in_submit_info.get_n_wait_semaphores(); + + struct_chainer.append_struct(submit_info); + } + + { + VkDeviceGroupSubmitInfoKHR submit_info_device_group; + + submit_info_device_group.commandBufferCount = n_cmd_buffers; + submit_info_device_group.pCommandBufferDeviceMasks = (n_cmd_buffers != 0) ? &cmd_buffer_device_masks.at(0) : nullptr; + submit_info_device_group.pNext = nullptr; + submit_info_device_group.pSignalSemaphoreDeviceIndices = (in_submit_info.get_n_signal_semaphores() != 0) ? &signal_semaphore_device_indices.at(0) : nullptr; + submit_info_device_group.pWaitSemaphoreDeviceIndices = (in_submit_info.get_n_wait_semaphores () != 0) ? &wait_semaphore_device_indices.at (0) : nullptr; + submit_info_device_group.signalSemaphoreCount = in_submit_info.get_n_signal_semaphores(); + submit_info_device_group.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR; + submit_info_device_group.waitSemaphoreCount = in_submit_info.get_n_wait_semaphores(); + + struct_chainer.append_struct(submit_info_device_group); + } + + break; + } + + case SubmissionType::SGPU: + { + VkSubmitInfo submit_info; + + if (in_submit_info.is_protected_submission() ) + { + anvil_assert(reinterpret_cast(m_device_ptr)->get_physical_device()->supports_core_vk1_1() ); + } + + for (uint32_t n_command_buffer = 0; + n_command_buffer < in_submit_info.get_n_command_buffers(); + ++n_command_buffer) + { + cmd_buffers_vk.at(n_command_buffer) = in_submit_info.get_command_buffers_sgpu()[n_command_buffer]->get_command_buffer(); } + for (uint32_t n_signal_semaphore = 0; + n_signal_semaphore < in_submit_info.get_n_signal_semaphores(); + ++n_signal_semaphore) { - OnPresentRequestIssuedCallbackArgument callback_argument(in_swapchain_ptr.get() ); + auto sem_ptr = in_submit_info.get_signal_semaphores_sgpu()[n_signal_semaphore]; - CallbacksSupportProvider::callback(QUEUE_CALLBACK_ID_PRESENT_REQUEST_ISSUED, - &callback_argument); + signal_semaphores_vk.at(n_signal_semaphore) = sem_ptr->get_semaphore(); } + + for (uint32_t n_wait_semaphore = 0; + n_wait_semaphore < in_submit_info.get_n_wait_semaphores(); + ++n_wait_semaphore) + { + wait_semaphores_vk.at(n_wait_semaphore) = in_submit_info.get_wait_semaphores_sgpu()[n_wait_semaphore]->get_semaphore(); + } + + submit_info.commandBufferCount = in_submit_info.get_n_command_buffers (); + submit_info.pCommandBuffers = (in_submit_info.get_n_command_buffers() != 0) ? &cmd_buffers_vk.at(0) : nullptr; + submit_info.pNext = nullptr; + submit_info.pSignalSemaphores = (in_submit_info.get_n_signal_semaphores() != 0) ? &signal_semaphores_vk.at(0) : nullptr; + submit_info.pWaitDstStageMask = in_submit_info.get_destination_stage_wait_masks(); + submit_info.pWaitSemaphores = (in_submit_info.get_n_wait_semaphores() != 0) ? &wait_semaphores_vk.at(0) : nullptr; + submit_info.signalSemaphoreCount = in_submit_info.get_n_signal_semaphores(); + submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submit_info.waitSemaphoreCount = in_submit_info.get_n_wait_semaphores(); + + struct_chainer.append_struct(submit_info); + + break; + } + + default: + { + anvil_assert_fail(); } } -end: - return result; -} + /* Any additional structs to chain? */ + #if defined(_WIN32) + { + const uint64_t* d3d12_fence_signal_semaphore_values_ptr = nullptr; + const uint64_t* d3d12_fence_wait_semaphore_values_ptr = nullptr; -/** Please see header for specification */ -void Anvil::Queue::submit_command_buffers(uint32_t in_n_command_buffers, - std::shared_ptr const* in_opt_cmd_buffer_ptrs, - uint32_t in_n_semaphores_to_signal, - std::shared_ptr const* in_opt_semaphore_to_signal_ptr_ptrs, - uint32_t in_n_semaphores_to_wait_on, - std::shared_ptr const* in_opt_semaphore_to_wait_on_ptr_ptrs, - const VkPipelineStageFlags* in_opt_dst_stage_masks_to_wait_on_ptrs, - bool in_should_block, - std::shared_ptr in_opt_fence_ptr) -{ - VkResult result (VK_ERROR_INITIALIZATION_FAILED); - VkSubmitInfo submit_info; + if (in_submit_info.get_d3d12_fence_semaphore_values(&d3d12_fence_signal_semaphore_values_ptr, + &d3d12_fence_wait_semaphore_values_ptr) ) + { + VkD3D12FenceSubmitInfoKHR fence_info; - VkCommandBuffer cmd_buffers_vk [64]; - VkSemaphore signal_semaphores_vk[64]; - VkSemaphore wait_semaphores_vk [64]; + fence_info.pNext = nullptr; + fence_info.pSignalSemaphoreValues = d3d12_fence_signal_semaphore_values_ptr; + fence_info.pWaitSemaphoreValues = d3d12_fence_wait_semaphore_values_ptr; + fence_info.signalSemaphoreValuesCount = in_submit_info.get_n_signal_semaphores(); + fence_info.sType = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR; + fence_info.waitSemaphoreValuesCount = in_submit_info.get_n_wait_semaphores(); - ANVIL_REDUNDANT_VARIABLE(result); + struct_chainer.append_struct(fence_info); + } + } + #endif - /* Sanity checks */ - anvil_assert(in_n_command_buffers < sizeof(cmd_buffers_vk) / sizeof(cmd_buffers_vk [0]) ); - anvil_assert(in_n_semaphores_to_signal < sizeof(signal_semaphores_vk) / sizeof(signal_semaphores_vk[0]) ); - anvil_assert(in_n_semaphores_to_wait_on < sizeof(wait_semaphores_vk) / sizeof(wait_semaphores_vk [0]) ); + #if defined(_WIN32) + { + const Anvil::MemoryBlock** acquire_d3d11_memory_block_ptrs = nullptr; + const uint64_t* acquire_mutex_key_value_ptrs = nullptr; + const uint32_t* acquire_timeout_ptrs = nullptr; + uint32_t n_acquire_keys = 0; + uint32_t n_release_keys = 0; + const Anvil::MemoryBlock** release_d3d11_memory_block_ptrs = nullptr; + const uint64_t* release_mutex_key_value_ptrs = nullptr; + + if (in_submit_info.get_keyed_mutex_acquire_release_info(&n_acquire_keys, + &acquire_d3d11_memory_block_ptrs, + &acquire_mutex_key_value_ptrs, + &acquire_timeout_ptrs, + &n_release_keys, + &release_d3d11_memory_block_ptrs, + &release_mutex_key_value_ptrs) ) + { + VkWin32KeyedMutexAcquireReleaseInfoKHR info; - /* Prepare for the submission */ - if (in_opt_fence_ptr == nullptr && - in_should_block) + anvil_assert(n_acquire_keys + n_release_keys > 0); + + device_memory_block_vec.resize(n_acquire_keys + n_release_keys); + + VkDeviceMemory* acquire_sync_ptr = (n_acquire_keys > 0) ? &device_memory_block_vec.at(0) + : nullptr; + VkDeviceMemory* release_sync_ptr = (n_release_keys > 0) ? &device_memory_block_vec.at(n_acquire_keys) + : nullptr; + + for (uint32_t n_acquire_sync = 0; + n_acquire_sync < n_acquire_keys; + ++n_acquire_sync) + { + acquire_sync_ptr[n_acquire_sync] = acquire_d3d11_memory_block_ptrs[n_acquire_sync]->get_memory(); + } + + for (uint32_t n_release_sync = 0; + n_release_sync < n_release_keys; + ++n_release_sync) + { + release_sync_ptr[n_release_sync] = release_d3d11_memory_block_ptrs[n_release_sync]->get_memory(); + } + + info.acquireCount = n_acquire_keys; + info.pAcquireKeys = acquire_mutex_key_value_ptrs; + info.pAcquireSyncs = acquire_sync_ptr; + info.pAcquireTimeouts = acquire_timeout_ptrs; + info.pNext = nullptr; + info.pReleaseKeys = release_mutex_key_value_ptrs; + info.pReleaseSyncs = release_sync_ptr; + info.releaseCount = n_release_keys; + info.sType = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR; + + struct_chainer.append_struct(info); + } + } + #endif + + if (in_submit_info.is_protected_submission() ) { - m_submit_fence_ptr->reset(); + VkProtectedSubmitInfo submit_info; + + submit_info.pNext = nullptr; + submit_info.protectedSubmit = VK_TRUE; + submit_info.sType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO; - in_opt_fence_ptr = m_submit_fence_ptr; + struct_chainer.append_struct(submit_info); + } + + /* Go for it */ + if (fence_ptr == nullptr && + in_submit_info.get_should_block() ) + { + fence_ptr = m_submit_fence_ptr.get(); + needs_fence_reset = true; + } + + switch (in_submit_info.get_type() ) + { + case SubmissionType::MGPU: + { + submit_command_buffers_lock_unlock(in_submit_info.get_n_command_buffers (), + in_submit_info.get_command_buffers_mgpu (), + in_submit_info.get_n_signal_semaphores (), + in_submit_info.get_signal_semaphores_mgpu(), + in_submit_info.get_n_wait_semaphores (), + in_submit_info.get_wait_semaphores_mgpu (), + fence_ptr, + true); + + break; + } + + case SubmissionType::SGPU: + { + submit_command_buffers_lock_unlock(in_submit_info.get_n_command_buffers (), + in_submit_info.get_command_buffers_sgpu (), + in_submit_info.get_n_signal_semaphores (), + in_submit_info.get_signal_semaphores_sgpu(), + in_submit_info.get_n_wait_semaphores (), + in_submit_info.get_wait_semaphores_sgpu (), + fence_ptr, + true); /* in_should_lock */ + + break; + } + + default: + { + anvil_assert_fail(); + } + } + + { + auto chain_ptr = struct_chainer.create_chain(); + + if (needs_fence_reset) + { + m_submit_fence_ptr->reset(); + } + + result = Anvil::Vulkan::vkQueueSubmit(m_queue, + 1, /* submitCount */ + chain_ptr->get_root_struct(), + (fence_ptr != nullptr) ? fence_ptr->get_fence() + : VK_NULL_HANDLE); + + if (in_submit_info.get_should_block() ) + { + /* Wait till initialization finishes GPU-side */ + result = Anvil::Vulkan::vkWaitForFences(m_device_ptr->get_device_vk(), + 1, /* fenceCount */ + fence_ptr->get_fence_ptr(), + VK_TRUE, /* waitAll */ + in_submit_info.get_timeout() ); + } + } + + switch (in_submit_info.get_type() ) + { + case SubmissionType::MGPU: + { + submit_command_buffers_lock_unlock(in_submit_info.get_n_command_buffers (), + in_submit_info.get_command_buffers_mgpu (), + in_submit_info.get_n_signal_semaphores (), + in_submit_info.get_signal_semaphores_mgpu(), + in_submit_info.get_n_wait_semaphores (), + in_submit_info.get_wait_semaphores_mgpu (), + fence_ptr, + false); /* in_should_lock */ + + break; + } + + case SubmissionType::SGPU: + { + submit_command_buffers_lock_unlock(in_submit_info.get_n_command_buffers (), + in_submit_info.get_command_buffers_sgpu (), + in_submit_info.get_n_signal_semaphores (), + in_submit_info.get_signal_semaphores_sgpu(), + in_submit_info.get_n_wait_semaphores (), + in_submit_info.get_wait_semaphores_sgpu (), + fence_ptr, + false); /* in_should_lock */ + + break; + } + + default: + { + anvil_assert_fail(); + } + } + + return result; +} + +void Anvil::Queue::submit_command_buffers_lock_unlock(uint32_t in_n_command_buffers, + Anvil::CommandBufferBase* const* in_opt_cmd_buffer_ptrs, + uint32_t in_n_semaphores_to_signal, + Anvil::Semaphore* const* in_opt_semaphore_to_signal_ptr_ptrs, + uint32_t in_n_semaphores_to_wait_on, + Anvil::Semaphore* const* in_opt_semaphore_to_wait_on_ptr_ptrs, + Anvil::Fence* in_opt_fence_ptr, + bool in_should_lock) +{ + if (in_should_lock) + { + lock(); + } + else + { + unlock(); } for (uint32_t n_command_buffer = 0; n_command_buffer < in_n_command_buffers; ++n_command_buffer) { - cmd_buffers_vk[n_command_buffer] = in_opt_cmd_buffer_ptrs[n_command_buffer]->get_command_buffer(); + if (in_should_lock) + { + in_opt_cmd_buffer_ptrs[n_command_buffer]->lock(); + } + else + { + in_opt_cmd_buffer_ptrs[n_command_buffer]->unlock(); + } } for (uint32_t n_signal_semaphore = 0; n_signal_semaphore < in_n_semaphores_to_signal; ++n_signal_semaphore) { - signal_semaphores_vk[n_signal_semaphore] = in_opt_semaphore_to_signal_ptr_ptrs[n_signal_semaphore]->get_semaphore(); + if (in_should_lock) + { + in_opt_semaphore_to_signal_ptr_ptrs[n_signal_semaphore]->lock(); + } + else + { + in_opt_semaphore_to_signal_ptr_ptrs[n_signal_semaphore]->unlock(); + } } for (uint32_t n_wait_semaphore = 0; n_wait_semaphore < in_n_semaphores_to_wait_on; ++n_wait_semaphore) { - wait_semaphores_vk[n_wait_semaphore] = in_opt_semaphore_to_wait_on_ptr_ptrs[n_wait_semaphore]->get_semaphore(); + if (in_should_lock) + { + in_opt_semaphore_to_wait_on_ptr_ptrs[n_wait_semaphore]->lock(); + } + else + { + in_opt_semaphore_to_wait_on_ptr_ptrs[n_wait_semaphore]->unlock(); + } } - submit_info.commandBufferCount = in_n_command_buffers; - submit_info.pCommandBuffers = cmd_buffers_vk; - submit_info.pNext = nullptr; - submit_info.pSignalSemaphores = signal_semaphores_vk; - submit_info.pWaitDstStageMask = in_opt_dst_stage_masks_to_wait_on_ptrs; - submit_info.pWaitSemaphores = wait_semaphores_vk; - submit_info.signalSemaphoreCount = in_n_semaphores_to_signal; - submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; - submit_info.waitSemaphoreCount = in_n_semaphores_to_wait_on; + if (in_opt_fence_ptr != nullptr) + { + if (in_should_lock) + { + in_opt_fence_ptr->lock(); + } + else + { + in_opt_fence_ptr->unlock(); + } + } +} - /* Go for it */ - result = vkQueueSubmit(m_queue, - 1, /* submitCount */ - &submit_info, - (in_opt_fence_ptr != nullptr) ? in_opt_fence_ptr->get_fence() - : VK_NULL_HANDLE); - anvil_assert_vk_call_succeeded(result); +void Anvil::Queue::submit_command_buffers_lock_unlock(uint32_t in_n_command_buffer_submissions, + const CommandBufferMGPUSubmission* in_opt_command_buffer_submissions_ptr, + uint32_t in_n_signal_semaphore_submissions, + const SemaphoreMGPUSubmission* in_opt_signal_semaphore_submissions_ptr, + uint32_t in_n_wait_semaphore_submissions, + const SemaphoreMGPUSubmission* in_opt_wait_semaphore_submissions_ptr, + Anvil::Fence* in_opt_fence_ptr, + bool in_should_lock) +{ + if (in_should_lock) + { + lock(); + } + else + { + unlock(); + } - if (in_should_block) + for (uint32_t n_command_buffer_submission = 0; + n_command_buffer_submission < in_n_command_buffer_submissions; + ++n_command_buffer_submission) { - /* Wait till initialization finishes GPU-side */ - result = vkWaitForFences(m_device_ptr.lock()->get_device_vk(), - 1, /* fenceCount */ - in_opt_fence_ptr->get_fence_ptr(), - VK_TRUE, /* waitAll */ - UINT64_MAX); /* timeout */ + const auto& current_submission = in_opt_command_buffer_submissions_ptr[n_command_buffer_submission]; - anvil_assert_vk_call_succeeded(result); + if (current_submission.cmd_buffer_ptr != nullptr) + { + if (in_should_lock) + { + current_submission.cmd_buffer_ptr->lock(); + } + else + { + current_submission.cmd_buffer_ptr->unlock(); + } + } + } + + for (uint32_t n_signal_semaphore_submission = 0; + n_signal_semaphore_submission < in_n_signal_semaphore_submissions; + ++n_signal_semaphore_submission) + { + if (in_should_lock) + { + in_opt_signal_semaphore_submissions_ptr[n_signal_semaphore_submission].semaphore_ptr->lock(); + } + else + { + in_opt_signal_semaphore_submissions_ptr[n_signal_semaphore_submission].semaphore_ptr->unlock(); + } + } + + for (uint32_t n_wait_semaphore_submission = 0; + n_wait_semaphore_submission < in_n_wait_semaphore_submissions; + ++n_wait_semaphore_submission) + { + if (in_should_lock) + { + in_opt_wait_semaphore_submissions_ptr[n_wait_semaphore_submission].semaphore_ptr->lock(); + } + else + { + in_opt_wait_semaphore_submissions_ptr[n_wait_semaphore_submission].semaphore_ptr->unlock(); + } + } + + if (in_opt_fence_ptr != nullptr) + { + if (in_should_lock) + { + in_opt_fence_ptr->lock(); + } + else + { + in_opt_fence_ptr->unlock(); + } } } + +void Anvil::Queue::wait_idle() +{ + lock(); + { + Anvil::Vulkan::vkQueueWaitIdle(m_queue); + } + unlock(); +} \ No newline at end of file diff --git a/src/wrappers/render_pass.cpp b/src/wrappers/render_pass.cpp index 688b5b7b..d0f1ec7f 100644 --- a/src/wrappers/render_pass.cpp +++ b/src/wrappers/render_pass.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,29 +21,28 @@ // #include "misc/debug.h" +#include "misc/formats.h" #include "misc/object_tracker.h" +#include "misc/render_pass_create_info.h" +#include "misc/struct_chainer.h" #include "wrappers/device.h" #include "wrappers/graphics_pipeline_manager.h" #include "wrappers/pipeline_layout.h" #include "wrappers/render_pass.h" #include "wrappers/swapchain.h" -#include -#include /** Contsructor. Initializes the Renderpass instance with default values. */ -Anvil::RenderPass::RenderPass(std::weak_ptr in_device_ptr, - std::shared_ptr in_opt_swapchain_ptr) - :CallbacksSupportProvider (RENDER_PASS_CALLBACK_ID_COUNT), - DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT), - m_device_ptr (in_device_ptr), - m_dirty (false), - m_render_pass (VK_NULL_HANDLE), - m_swapchain_ptr (in_opt_swapchain_ptr) +Anvil::RenderPass::RenderPass(Anvil::RenderPassCreateInfoUniquePtr in_renderpass_create_info_ptr, + Anvil::Swapchain* in_opt_swapchain_ptr) + :DebugMarkerSupportProvider (in_renderpass_create_info_ptr->get_device(), + Anvil::ObjectType::RENDER_PASS), + m_render_pass (VK_NULL_HANDLE), + m_render_pass_create_info_ptr(std::move(in_renderpass_create_info_ptr) ), + m_swapchain_ptr (in_opt_swapchain_ptr) { /* Register the object */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_RENDER_PASS, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::RENDER_PASS, this); } @@ -51,585 +50,68 @@ Anvil::RenderPass::RenderPass(std::weak_ptr in_device_ptr, Anvil::RenderPass::~RenderPass() { /* Unregister the object */ - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_RENDER_PASS, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::RENDER_PASS, this); if (m_render_pass != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroyRenderPass(device_locked_ptr->get_device_vk(), - m_render_pass, - nullptr /* pAllocator */); + Anvil::Vulkan::vkDestroyRenderPass(m_render_pass_create_info_ptr->get_device()->get_device_vk(), + m_render_pass, + nullptr /* pAllocator */); m_render_pass = VK_NULL_HANDLE; } - while (m_subpasses.size() > 0) - { - auto iterator = m_subpasses.begin(); - - delete *iterator; - - m_subpasses.erase(iterator); - } - m_swapchain_ptr = nullptr; } -/* Please see haeder for specification */ -bool Anvil::RenderPass::add_color_attachment(VkFormat in_format, - VkSampleCountFlags in_sample_count, - VkAttachmentLoadOp in_load_op, - VkAttachmentStoreOp in_store_op, - VkImageLayout in_initial_layout, - VkImageLayout in_final_layout, - bool in_may_alias, - RenderPassAttachmentID* out_attachment_id_ptr) -{ - uint32_t new_attachment_index = UINT32_MAX; - bool result = false; - - if (out_attachment_id_ptr == nullptr) - { - anvil_assert(out_attachment_id_ptr != nullptr); - - goto end; - } - - m_dirty = true; - new_attachment_index = static_cast(m_attachments.size() ); - - *out_attachment_id_ptr = new_attachment_index; - - m_attachments.push_back(RenderPassAttachment(in_format, - in_sample_count, - in_load_op, - in_store_op, - in_initial_layout, - in_final_layout, - in_may_alias, - new_attachment_index) ); - - result = true; - - /* Notify the subscribers about the event */ - { - OnRenderPassBakeNeededCallbackArgument callback_argument(this); - - callback(RENDER_PASS_CALLBACK_ID_BAKING_NEEDED, - &callback_argument); - } - -end: - return result; -} - -/** Adds a new dependency to the internal data model. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_destination_subpass_ptr Pointer to the descriptor of the destination subpass. - * If nullptr, it is assumed an external destination is requested. - * @param in_source_subpass_ptr Pointer to the descriptor of the source subpass. - * If nullptr, it is assumed an external source is requested. - * @param in_source_stage_mask Source pipeline stage mask. - * @param in_destination_stage_mask Destination pipeline stage mask. - * @param in_source_access_mask Source access mask. - * @param in_destination_access_mask Destination access mask. - * @param in_by_region true if a "by-region" dependency is requested; false otherwise. - * - * @return true if the dependency was added successfully; false otherwise. - * - **/ -bool Anvil::RenderPass::add_dependency(SubPass* in_destination_subpass_ptr, - SubPass* in_source_subpass_ptr, - VkPipelineStageFlags in_source_stage_mask, - VkPipelineStageFlags in_destination_stage_mask, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region) -{ - SubPassDependency new_dep(in_destination_stage_mask, - in_destination_subpass_ptr, - in_source_stage_mask, - in_source_subpass_ptr, - in_source_access_mask, - in_destination_access_mask, - in_by_region); - - if (std::find(m_subpass_dependencies.begin(), - m_subpass_dependencies.end(), - new_dep) == m_subpass_dependencies.end() ) - { - m_subpass_dependencies.push_back(new_dep); - - m_dirty = true; - - /* Notify the subscribers about the event */ - { - OnRenderPassBakeNeededCallbackArgument callback_argument(this); - - callback(RENDER_PASS_CALLBACK_ID_BAKING_NEEDED, - &callback_argument); - } - } - - return true; -} - -/* Please see header for specification */ -bool Anvil::RenderPass::add_depth_stencil_attachment(VkFormat in_format, - VkSampleCountFlags in_sample_count, - VkAttachmentLoadOp in_depth_load_op, - VkAttachmentStoreOp in_depth_store_op, - VkAttachmentLoadOp in_stencil_load_op, - VkAttachmentStoreOp in_stencil_store_op, - VkImageLayout in_initial_layout, - VkImageLayout in_final_layout, - bool in_may_alias, - RenderPassAttachmentID* out_attachment_id_ptr) -{ - uint32_t new_attachment_index = UINT32_MAX; - bool result = false; - - if (out_attachment_id_ptr == nullptr) - { - anvil_assert(out_attachment_id_ptr != nullptr); - - goto end; - } - - m_dirty = true; - new_attachment_index = static_cast(m_attachments.size() ); - - *out_attachment_id_ptr = new_attachment_index; - - m_attachments.push_back(RenderPassAttachment(in_format, - in_sample_count, - in_depth_load_op, - in_depth_store_op, - in_stencil_load_op, - in_stencil_store_op, - in_initial_layout, - in_final_layout, - in_may_alias, - new_attachment_index) ); - - result = true; -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::RenderPass::add_external_to_subpass_dependency(SubPassID in_destination_subpass_id, - VkPipelineStageFlags in_source_stage_mask, - VkPipelineStageFlags in_destination_stage_mask, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region) -{ - SubPass* destination_subpass_ptr = nullptr; - bool result = false; - - if (m_subpasses.size() <= in_destination_subpass_id) - { - anvil_assert(!(m_subpasses.size() <= in_destination_subpass_id) ); - - goto end; - } - - destination_subpass_ptr = m_subpasses[in_destination_subpass_id]; - - result = add_dependency(destination_subpass_ptr, - nullptr, /* source_subpass_ptr */ - in_source_stage_mask, - in_destination_stage_mask, - in_source_access_mask, - in_destination_access_mask, - in_by_region); -end: - return result; -} - /* Please see header for specification */ -bool Anvil::RenderPass::add_self_subpass_dependency(SubPassID in_destination_subpass_id, - VkPipelineStageFlags in_source_stage_mask, - VkPipelineStageFlags in_destination_stage_mask, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region) +Anvil::RenderPassUniquePtr Anvil::RenderPass::create(Anvil::RenderPassCreateInfoUniquePtr in_renderpass_create_info_ptr, + Anvil::Swapchain* in_opt_swapchain_ptr) { - SubPass* destination_subpass_ptr = nullptr; - bool result = false; + std::unique_ptr result_ptr (nullptr, + std::default_delete() ); + auto rp_create_info_raw_ptr(in_renderpass_create_info_ptr.get() ); - if (m_subpasses.size() <= in_destination_subpass_id) - { - anvil_assert(!(m_subpasses.size() <= in_destination_subpass_id) ); - - goto end; - } - - destination_subpass_ptr = m_subpasses[in_destination_subpass_id]; - - result = add_dependency(destination_subpass_ptr, - destination_subpass_ptr, - in_source_stage_mask, - in_destination_stage_mask, - in_source_access_mask, - in_destination_access_mask, - in_by_region); -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::RenderPass::add_subpass(const ShaderModuleStageEntryPoint& in_fragment_shader_entrypoint, - const ShaderModuleStageEntryPoint& in_geometry_shader_entrypoint, - const ShaderModuleStageEntryPoint& in_tess_control_shader_entrypoint, - const ShaderModuleStageEntryPoint& in_tess_evaluation_shader_entrypoint, - const ShaderModuleStageEntryPoint& in_vertex_shader_entrypoint, - SubPassID* out_subpass_id_ptr, - const Anvil::PipelineID in_opt_pipeline_id) -{ - uint32_t new_subpass_index = UINT32_MAX; - GraphicsPipelineID new_subpass_pipeline_id = UINT32_MAX; - bool result = false; - - if (out_subpass_id_ptr == nullptr) - { - anvil_assert(out_subpass_id_ptr != nullptr); - - goto end; - } - - m_dirty = true; - new_subpass_index = static_cast(m_subpasses.size() ); - - /* Create a new graphics pipeline for the subpass */ - anvil_assert(in_vertex_shader_entrypoint.name.size() != 0); - - if (in_opt_pipeline_id == UINT32_MAX) - { - std::shared_ptr device_locked_ptr(m_device_ptr); - - device_locked_ptr->get_graphics_pipeline_manager()->add_regular_pipeline(false, /* disable_optimizations */ - false, /* allow_derivatives */ - shared_from_this(), - new_subpass_index, - in_fragment_shader_entrypoint, - in_geometry_shader_entrypoint, - in_tess_control_shader_entrypoint, - in_tess_evaluation_shader_entrypoint, - in_vertex_shader_entrypoint, - &new_subpass_pipeline_id); - } - else - { - new_subpass_pipeline_id = in_opt_pipeline_id; - } - - /* Spawn a new descriptor */ - { - SubPass* new_subpass_ptr = new SubPass(m_device_ptr, - new_subpass_index, - new_subpass_pipeline_id); - - m_subpasses.push_back(new_subpass_ptr); - - *out_subpass_id_ptr = new_subpass_index; - } - - result = true; -end: - return result; -} - -/** Adds a new attachment to the specified subpass. - * - * This function does NOT re-create the internal VkRenderPass instance. Instead, - * it marks the RenderPass as dirty, which will cause the object to be re-created - * at next bake() or get_render_pass() request. - * - * @param in_subpass_id ID of the subpass to update. The subpass must have been earlier - * created with an add_subpass() call. - * @param in_is_color_attachment true if the added attachment is a color attachment;false if it's - * an input attachment. - * @param in_layout Layout to use for the attachment when executing the subpass. - * Driver takes care of transforming the attachment to the requested layout - * before subpass commands starts executing. - * @param in_attachment_id ID of a render-pass attachment ID this sub-pass attachment should - * refer to. - * @param in_attachment_location Location, under which the specified attachment should be accessible. - * @param in_should_resolve true if the specified attachment is multisample and should be - * resolved at the end of the sub-pass. - * @parma in_resolve_attachment_id If @param should_resolve is true, this argument should specify the - * ID of a render-pass attachment, to which the resolved data should - * written to. - * - * @return true if the function executed successfully, false otherwise. - * - **/ -bool Anvil::RenderPass::add_subpass_attachment(SubPassID in_subpass_id, - bool in_is_color_attachment, - VkImageLayout in_layout, - RenderPassAttachmentID in_attachment_id, - uint32_t in_attachment_location, - bool in_should_resolve, - RenderPassAttachmentID in_resolve_attachment_id) -{ - RenderPassAttachment* renderpass_attachment_ptr = nullptr; - RenderPassAttachment* resolve_attachment_ptr = nullptr; - bool result = false; - LocationToSubPassAttachmentMap* subpass_attachments_ptr = nullptr; - SubPass* subpass_ptr = nullptr; - - /* Retrieve the subpass descriptor */ - if (in_subpass_id >= m_subpasses.size() ) - { - anvil_assert(!(in_subpass_id >= m_subpasses.size() )); - - goto end; - } - else - { - subpass_ptr = m_subpasses.at(in_subpass_id); - } - - /* Retrieve the renderpass attachment descriptor */ - if (in_attachment_id >= m_attachments.size() ) - { - anvil_assert(!(in_attachment_id >= m_attachments.size()) ); + result_ptr.reset( + new Anvil::RenderPass(std::move(in_renderpass_create_info_ptr), + in_opt_swapchain_ptr) + ); - goto end; - } - else + if (result_ptr != nullptr) { - renderpass_attachment_ptr = &m_attachments.at(in_attachment_id); - } + bool result = false; - /* Retrieve the resolve attachment descriptor, if one was requested */ - if (in_should_resolve) - { - if (in_resolve_attachment_id >= m_attachments.size() ) + if (rp_create_info_raw_ptr->get_device()->get_extension_info()->khr_create_renderpass2() ) { - anvil_assert(!(in_resolve_attachment_id >= m_attachments.size()) ); - - goto end; + result = result_ptr->init_using_rp2_create_extension(); } else { - resolve_attachment_ptr = &m_attachments.at(in_resolve_attachment_id); + result = result_ptr->init_using_core_vk10(); } - } - - /* Make sure the attachment location is not already assigned an attachment */ - subpass_attachments_ptr = (in_is_color_attachment) ? &subpass_ptr->color_attachments_map - : &subpass_ptr->input_attachments_map; - - if (subpass_attachments_ptr->find(in_attachment_location) != subpass_attachments_ptr->end() ) - { - anvil_assert(!(subpass_attachments_ptr->find(in_attachment_location) != subpass_attachments_ptr->end()) ); - - goto end; - } - - /* Add the attachment */ - (*subpass_attachments_ptr)[in_attachment_location] = SubPassAttachment(renderpass_attachment_ptr, - in_layout, - resolve_attachment_ptr); - - if (in_should_resolve) - { - anvil_assert(resolve_attachment_ptr != nullptr); - - subpass_ptr->resolved_attachments_map[in_attachment_location] = SubPassAttachment(resolve_attachment_ptr, - in_layout, - nullptr); - } - m_dirty = true; - result = true; - - /* Notify the subscribers about the event */ - { - OnRenderPassBakeNeededCallbackArgument callback_argument(this); - - callback(RENDER_PASS_CALLBACK_ID_BAKING_NEEDED, - &callback_argument); - } - -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::RenderPass::add_subpass_color_attachment(SubPassID in_subpass_id, - VkImageLayout in_input_layout, - RenderPassAttachmentID in_attachment_id, - uint32_t in_location, - const RenderPassAttachmentID* in_attachment_resolve_id_ptr) -{ - return add_subpass_attachment(in_subpass_id, - true, /* is_color_attachment */ - in_input_layout, - in_attachment_id, - in_location, - (in_attachment_resolve_id_ptr != nullptr), - (in_attachment_resolve_id_ptr != nullptr) ? *in_attachment_resolve_id_ptr - : UINT32_MAX); -} - -/* Please see header for specification */ -bool Anvil::RenderPass::add_subpass_depth_stencil_attachment(SubPassID in_subpass_id, - RenderPassAttachmentID in_attachment_id, - VkImageLayout in_layout) -{ - RenderPassAttachment* ds_attachment_ptr = nullptr; - RenderPassAttachment* resolve_attachment_ptr = nullptr; - bool result = false; - SubPass* subpass_ptr = nullptr; - - /* Retrieve the subpass descriptor */ - if (m_subpasses.size() <= in_subpass_id) - { - anvil_assert(!(m_subpasses.size() <= in_subpass_id) ); - - goto end; - } - else - { - subpass_ptr = m_subpasses.at(in_subpass_id); - } - - /* Retrieve the attachment descriptors */ - if (m_attachments.size() <= in_attachment_id) - { - anvil_assert(!(m_attachments.size() <= in_attachment_id) ); - - goto end; - } - else - { - ds_attachment_ptr = &m_attachments.at(in_attachment_id); - } - - /* Update the depth/stencil attachment for the subpass */ - if (subpass_ptr->depth_stencil_attachment.attachment_ptr != nullptr) - { - anvil_assert(!(subpass_ptr->depth_stencil_attachment.attachment_ptr != nullptr) ); - - goto end; - } - - subpass_ptr->depth_stencil_attachment = SubPassAttachment(ds_attachment_ptr, - in_layout, - resolve_attachment_ptr); - - result = true; -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::RenderPass::add_subpass_input_attachment(SubPassID in_subpass_id, - VkImageLayout in_layout, - RenderPassAttachmentID in_attachment_id, - uint32_t in_attachment_index) -{ - return add_subpass_attachment(in_subpass_id, - false, /* is_color_attachment */ - in_layout, - in_attachment_id, - in_attachment_index, - false, /* should_resolve */ - UINT32_MAX); /* resolve_attachment_id */ -} - -/* Please see header for specification */ -bool Anvil::RenderPass::add_subpass_to_external_dependency(SubPassID in_source_subpass_id, - VkPipelineStageFlags in_source_stage_mask, - VkPipelineStageFlags in_destination_stage_mask, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region) -{ - SubPass* source_subpass_ptr = nullptr; - bool result = false; - - if (m_subpasses.size() <= in_source_subpass_id) - { - anvil_assert(!(m_subpasses.size() <= in_source_subpass_id) ); - - goto end; - } - - source_subpass_ptr = m_subpasses[in_source_subpass_id]; - - result = add_dependency(nullptr, /* destination_subpass_ptr */ - source_subpass_ptr, - in_source_stage_mask, - in_destination_stage_mask, - in_source_access_mask, - in_destination_access_mask, - in_by_region); -end: - return result; -} - -/* Please see header for specification */ -bool Anvil::RenderPass::add_subpass_to_subpass_dependency(SubPassID in_source_subpass_id, - SubPassID in_destination_subpass_id, - VkPipelineStageFlags in_source_stage_mask, - VkPipelineStageFlags in_destination_stage_mask, - VkAccessFlags in_source_access_mask, - VkAccessFlags in_destination_access_mask, - bool in_by_region) -{ - SubPass* destination_subpass_ptr = nullptr; - bool result = false; - SubPass* source_subpass_ptr = nullptr; - - if (m_subpasses.size() <= in_destination_subpass_id) - { - anvil_assert(!(m_subpasses.size() <= in_destination_subpass_id) ); - - goto end; - } - - if (m_subpasses.size() <= in_source_subpass_id) - { - anvil_assert(!(m_subpasses.size() <= in_source_subpass_id) ); - - goto end; + if (!result) + { + result_ptr.reset(); + } } - destination_subpass_ptr = m_subpasses[in_destination_subpass_id]; - source_subpass_ptr = m_subpasses[in_source_subpass_id]; - - result = add_dependency(destination_subpass_ptr, - source_subpass_ptr, - in_source_stage_mask, - in_destination_stage_mask, - in_source_access_mask, - in_destination_access_mask, - in_by_region); -end: - return result; + return std::move(result_ptr); } /* Please see header for specification */ -bool Anvil::RenderPass::bake() +bool Anvil::RenderPass::init_using_core_vk10() { - std::shared_ptr device_locked_ptr (m_device_ptr); - std::vector renderpass_attachments_vk; - VkRenderPassCreateInfo render_pass_create_info; - bool result (false); - VkResult result_vk; - std::vector subpass_dependencies_vk; - std::vector subpass_descriptions_vk; + std::vector > input_attachment_aspect_reference_ptrs; + std::unique_ptr > multiview_view_mask_vec_ptr; + std::unique_ptr > multiview_view_offset_vec_ptr; + std::vector renderpass_attachments_vk; + Anvil::StructChainer render_pass_create_info_chainer; + bool result (false); + VkResult result_vk; + std::vector subpass_dependencies_vk; + std::vector subpass_descriptions_vk; /* NOTE: We need to reserve storage in advance for each of the vectors below, * so that it is guaranteed the push_back() calls do not cause a realloc() @@ -688,71 +170,91 @@ bool Anvil::RenderPass::bake() uint32_t n_max_preserve_attachments; } SubPassAttachmentSet; - std::vector > subpass_attachment_sets; + std::vector > subpass_attachment_sets; - /* If this is not first baking request, release the previously created render pass instance */ - if (m_render_pass != VK_NULL_HANDLE) + anvil_assert(m_render_pass == VK_NULL_HANDLE); + + if (m_render_pass_create_info_ptr->is_multiview_enabled() ) { - vkDestroyRenderPass(device_locked_ptr->get_device_vk(), - m_render_pass, - nullptr /* pAllocator */); + if (!m_device_ptr->get_extension_info()->khr_multiview() ) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_multiview() ); - m_render_pass = VK_NULL_HANDLE; - set_vk_handle(VK_NULL_HANDLE); + goto end; + } } /* Set up helper descriptor storage space */ - subpass_dependencies_vk.reserve(m_subpass_dependencies.size() ); - subpass_descriptions_vk.reserve(m_subpasses.size() ); + subpass_dependencies_vk.reserve(m_render_pass_create_info_ptr->m_subpass_dependencies.size() ); + subpass_descriptions_vk.reserve(m_render_pass_create_info_ptr->m_subpasses.size() ); - for (auto renderpass_attachment_iterator = m_attachments.cbegin(); - renderpass_attachment_iterator != m_attachments.cend(); + for (auto renderpass_attachment_iterator = m_render_pass_create_info_ptr->m_attachments.cbegin(); + renderpass_attachment_iterator != m_render_pass_create_info_ptr->m_attachments.cend(); ++renderpass_attachment_iterator) { VkAttachmentDescription attachment_vk; - attachment_vk.finalLayout = renderpass_attachment_iterator->final_layout; + attachment_vk.finalLayout = static_cast(renderpass_attachment_iterator->final_layout); attachment_vk.flags = (renderpass_attachment_iterator->may_alias) ? VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT : 0u; - attachment_vk.format = renderpass_attachment_iterator->format; - attachment_vk.initialLayout = renderpass_attachment_iterator->initial_layout; - attachment_vk.loadOp = renderpass_attachment_iterator->color_depth_load_op; + attachment_vk.format = static_cast (renderpass_attachment_iterator->format); + attachment_vk.initialLayout = static_cast (renderpass_attachment_iterator->initial_layout); + attachment_vk.loadOp = static_cast (renderpass_attachment_iterator->color_depth_load_op); attachment_vk.samples = static_cast(renderpass_attachment_iterator->sample_count); - attachment_vk.stencilLoadOp = renderpass_attachment_iterator->stencil_load_op; - attachment_vk.stencilStoreOp = renderpass_attachment_iterator->stencil_store_op; - attachment_vk.storeOp = renderpass_attachment_iterator->color_depth_store_op; + attachment_vk.stencilLoadOp = static_cast (renderpass_attachment_iterator->stencil_load_op); + attachment_vk.stencilStoreOp = static_cast (renderpass_attachment_iterator->stencil_store_op); + attachment_vk.storeOp = static_cast (renderpass_attachment_iterator->color_depth_store_op); renderpass_attachments_vk.push_back(attachment_vk); } - for (auto subpass_dependency_iterator = m_subpass_dependencies.cbegin(); - subpass_dependency_iterator != m_subpass_dependencies.cend(); + for (auto subpass_dependency_iterator = m_render_pass_create_info_ptr->m_subpass_dependencies.cbegin(); + subpass_dependency_iterator != m_render_pass_create_info_ptr->m_subpass_dependencies.cend(); ++subpass_dependency_iterator) { VkSubpassDependency dependency_vk; - dependency_vk.dependencyFlags = ((subpass_dependency_iterator->by_region) ? VK_DEPENDENCY_BY_REGION_BIT : 0u); - dependency_vk.dstAccessMask = subpass_dependency_iterator->destination_access_mask; - dependency_vk.dstStageMask = subpass_dependency_iterator->destination_stage_mask; + dependency_vk.dependencyFlags = subpass_dependency_iterator->flags.get_vk(); + dependency_vk.dstAccessMask = subpass_dependency_iterator->destination_access_mask.get_vk(); + dependency_vk.dstStageMask = subpass_dependency_iterator->destination_stage_mask.get_vk (); dependency_vk.dstSubpass = (subpass_dependency_iterator->destination_subpass_ptr != nullptr) ? subpass_dependency_iterator->destination_subpass_ptr->index : VK_SUBPASS_EXTERNAL; - dependency_vk.srcAccessMask = subpass_dependency_iterator->source_access_mask; - dependency_vk.srcStageMask = subpass_dependency_iterator->source_stage_mask; + dependency_vk.srcAccessMask = subpass_dependency_iterator->source_access_mask.get_vk(); + dependency_vk.srcStageMask = subpass_dependency_iterator->source_stage_mask.get_vk (); dependency_vk.srcSubpass = (subpass_dependency_iterator->source_subpass_ptr != nullptr) ? subpass_dependency_iterator->source_subpass_ptr->index : VK_SUBPASS_EXTERNAL; + #if defined(_DEBUG) + { + if (m_render_pass_create_info_ptr->is_multiview_enabled() ) + { + if (dependency_vk.dstSubpass == dependency_vk.srcSubpass && + dependency_vk.dstSubpass != VK_SUBPASS_EXTERNAL) + { + uint32_t n_views_active = 0; + uint32_t view_mask = 0; + + m_render_pass_create_info_ptr->get_subpass_view_mask(dependency_vk.dstSubpass, + &view_mask); + + n_views_active = Anvil::Utils::count_set_bits(view_mask); + + anvil_assert( (n_views_active <= 1) || + ((n_views_active > 1 && (dependency_vk.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) != 0)) ); + } + } + } + #endif + subpass_dependencies_vk.push_back(dependency_vk); } - /* Update preserved attachments for all subpasses */ - update_preserved_attachments(); - /* We now have all the data needed to create Vulkan subpass instances. */ - for (auto subpass_iterator = m_subpasses.cbegin(); - subpass_iterator != m_subpasses.cend(); + for (auto subpass_iterator = m_render_pass_create_info_ptr->m_subpasses.cbegin(); + subpass_iterator != m_render_pass_create_info_ptr->m_subpasses.cend(); ++subpass_iterator) { - std::shared_ptr current_subpass_attachment_set_ptr; + std::unique_ptr current_subpass_attachment_set_ptr; uint32_t highest_subpass_color_attachment_location = UINT32_MAX; uint32_t highest_subpass_input_attachment_index = UINT32_MAX; bool need_color_resolve_attachments = false; @@ -760,14 +262,14 @@ bool Anvil::RenderPass::bake() VkAttachmentReference unused_reference; unused_reference.attachment = VK_ATTACHMENT_UNUSED; - unused_reference.layout = VK_IMAGE_LAYOUT_UNDEFINED; + unused_reference.layout = static_cast(Anvil::ImageLayout::UNDEFINED); /* Determine whether any of the color attachments are going to be resolved. */ for (auto subpass_color_attachment_iterator = (*subpass_iterator)->color_attachments_map.cbegin(); subpass_color_attachment_iterator != (*subpass_iterator)->color_attachments_map.cend(); ++subpass_color_attachment_iterator) { - if (subpass_color_attachment_iterator->second.resolve_attachment_ptr != nullptr) + if (subpass_color_attachment_iterator->second.resolve_attachment_index != UINT32_MAX) { need_color_resolve_attachments = true; @@ -775,6 +277,9 @@ bool Anvil::RenderPass::bake() } } + /* NOTE: DS resolve operations are only accessible via KHR_depth_stencil_resolve + KHR_create_renderpass2. */ + anvil_assert( (*subpass_iterator)->depth_stencil_attachment.resolve_attachment_index == UINT32_MAX); + /* Determine the highest color attachment location & input attachment index. */ for (auto subpass_color_attachment_iterator = (*subpass_iterator)->color_attachments_map.cbegin(); subpass_color_attachment_iterator != (*subpass_iterator)->color_attachments_map.cend(); @@ -830,21 +335,21 @@ bool Anvil::RenderPass::bake() subpass_color_attachment_iterator != (*subpass_iterator)->color_attachments_map.cend(); ++subpass_color_attachment_iterator) { - current_subpass_attachment_set_ptr->color_attachments_vk[subpass_color_attachment_iterator->first] = get_attachment_reference_from_subpass_attachment(subpass_color_attachment_iterator->second); + current_subpass_attachment_set_ptr->color_attachments_vk[subpass_color_attachment_iterator->first] = m_render_pass_create_info_ptr->get_attachment_reference_from_subpass_attachment(subpass_color_attachment_iterator->second); if (need_color_resolve_attachments) { - if (subpass_color_attachment_iterator->second.resolve_attachment_ptr != nullptr) + if (subpass_color_attachment_iterator->second.resolve_attachment_index != UINT32_MAX) { - current_subpass_attachment_set_ptr->resolve_color_attachments_vk[subpass_color_attachment_iterator->first] = get_attachment_reference_for_resolve_attachment(subpass_iterator, - subpass_color_attachment_iterator); + current_subpass_attachment_set_ptr->resolve_color_attachments_vk[subpass_color_attachment_iterator->first] = m_render_pass_create_info_ptr->get_attachment_reference_for_resolve_attachment(subpass_iterator, + subpass_color_attachment_iterator); } } } - if ((*subpass_iterator)->depth_stencil_attachment.attachment_ptr != nullptr) + if ((*subpass_iterator)->depth_stencil_attachment.attachment_index != UINT32_MAX) { - current_subpass_attachment_set_ptr->depth_attachment_vk = get_attachment_reference_from_subpass_attachment((*subpass_iterator)->depth_stencil_attachment); + current_subpass_attachment_set_ptr->depth_attachment_vk = m_render_pass_create_info_ptr->get_attachment_reference_from_subpass_attachment((*subpass_iterator)->depth_stencil_attachment); } else { @@ -855,7 +360,7 @@ bool Anvil::RenderPass::bake() subpass_input_attachment_iterator != (*subpass_iterator)->input_attachments_map.cend(); ++subpass_input_attachment_iterator) { - current_subpass_attachment_set_ptr->input_attachments_vk[subpass_input_attachment_iterator->first] = get_attachment_reference_from_subpass_attachment(subpass_input_attachment_iterator->second); + current_subpass_attachment_set_ptr->input_attachments_vk[subpass_input_attachment_iterator->first] = m_render_pass_create_info_ptr->get_attachment_reference_from_subpass_attachment(subpass_input_attachment_iterator->second); } /* Fill the preserved attachments vector. These do not use indices or locations, so the process is much simpler */ @@ -863,7 +368,9 @@ bool Anvil::RenderPass::bake() subpass_preserve_attachment_iterator != (*subpass_iterator)->preserved_attachments.cend(); ++subpass_preserve_attachment_iterator) { - current_subpass_attachment_set_ptr->preserve_attachments_vk.push_back(subpass_preserve_attachment_iterator->attachment_ptr->index); + current_subpass_attachment_set_ptr->preserve_attachments_vk.push_back( + m_render_pass_create_info_ptr->m_attachments.at(subpass_preserve_attachment_iterator->attachment_index).index + ); } /* Prepare the VK subpass descriptor */ @@ -873,369 +380,253 @@ bool Anvil::RenderPass::bake() const uint32_t n_resolved_attachments = ((*subpass_iterator)->resolved_attachments_map.size() == 0) ? 0 : n_color_attachments; - subpass_vk.colorAttachmentCount = n_color_attachments; - subpass_vk.flags = 0; - subpass_vk.inputAttachmentCount = n_input_attachments; - subpass_vk.pColorAttachments = (n_color_attachments > 0) ? ¤t_subpass_attachment_set_ptr->color_attachments_vk.at(0) - : nullptr; - subpass_vk.pDepthStencilAttachment = ((*subpass_iterator)->depth_stencil_attachment.attachment_ptr != nullptr) ? ¤t_subpass_attachment_set_ptr->depth_attachment_vk - : nullptr; - subpass_vk.pInputAttachments = (n_input_attachments > 0) ? ¤t_subpass_attachment_set_ptr->input_attachments_vk.at(0) - : nullptr; - subpass_vk.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; - subpass_vk.pPreserveAttachments = (n_preserved_attachments > 0) ? ¤t_subpass_attachment_set_ptr->preserve_attachments_vk.at(0) - : nullptr; - subpass_vk.preserveAttachmentCount = n_preserved_attachments; - subpass_vk.pResolveAttachments = (n_resolved_attachments > 0) ? ¤t_subpass_attachment_set_ptr->resolve_color_attachments_vk.at(0) - : nullptr; + subpass_vk.colorAttachmentCount = n_color_attachments; + subpass_vk.flags = 0; + subpass_vk.inputAttachmentCount = n_input_attachments; + subpass_vk.pColorAttachments = (n_color_attachments > 0) ? ¤t_subpass_attachment_set_ptr->color_attachments_vk.at(0) + : nullptr; + subpass_vk.pDepthStencilAttachment = ((*subpass_iterator)->depth_stencil_attachment.attachment_index != UINT32_MAX) ? ¤t_subpass_attachment_set_ptr->depth_attachment_vk + : nullptr; + subpass_vk.pInputAttachments = (n_input_attachments > 0) ? ¤t_subpass_attachment_set_ptr->input_attachments_vk.at(0) + : nullptr; + subpass_vk.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; + subpass_vk.pPreserveAttachments = (n_preserved_attachments > 0) ? ¤t_subpass_attachment_set_ptr->preserve_attachments_vk.at(0) + : nullptr; + subpass_vk.preserveAttachmentCount = n_preserved_attachments; + subpass_vk.pResolveAttachments = (n_resolved_attachments > 0) ? ¤t_subpass_attachment_set_ptr->resolve_color_attachments_vk.at(0) + : nullptr; current_subpass_attachment_set_ptr->do_sanity_checks(); - subpass_attachment_sets.push_back(current_subpass_attachment_set_ptr); + subpass_attachment_sets.push_back( + std::move(current_subpass_attachment_set_ptr) + ); + subpass_descriptions_vk.push_back(subpass_vk); } /* Set up a create info descriptor and spawn a new Vulkan RenderPass object. */ - render_pass_create_info.attachmentCount = static_cast(m_attachments.size() ); - render_pass_create_info.dependencyCount = static_cast(m_subpass_dependencies.size() ); - render_pass_create_info.subpassCount = static_cast(m_subpasses.size() ); - render_pass_create_info.flags = 0; - render_pass_create_info.pAttachments = (render_pass_create_info.attachmentCount > 0) ? &renderpass_attachments_vk.at(0) - : nullptr; - render_pass_create_info.pDependencies = (render_pass_create_info.dependencyCount > 0) ? &subpass_dependencies_vk.at(0) - : nullptr; - render_pass_create_info.pNext = nullptr; - render_pass_create_info.pSubpasses = (render_pass_create_info.subpassCount > 0) ? &subpass_descriptions_vk.at(0) - : nullptr; - render_pass_create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; - - result_vk = vkCreateRenderPass(device_locked_ptr->get_device_vk(), - &render_pass_create_info, - nullptr, /* pAllocator */ - &m_render_pass); - - if (!is_vk_call_successful(result_vk) ) { - anvil_assert_vk_call_succeeded(result_vk); - - goto end; - } - - set_vk_handle(m_render_pass); - - m_dirty = false; - result = true; - -end: - return result; -} - -/* Please see header for specification */ -std::shared_ptr Anvil::RenderPass::create(std::weak_ptr in_device_ptr, - std::shared_ptr in_opt_swapchain_ptr) -{ - std::shared_ptr result_ptr; - - result_ptr.reset( - new Anvil::RenderPass(in_device_ptr, - in_opt_swapchain_ptr) - ); - - return result_ptr; -} - -/** Creates a VkAttachmentReference descriptor from the specified RenderPassAttachment instance. - * - * @param in_renderpass_attachment Renderpass attachment descriptor to create the Vulkan descriptor from. - * - * @return As per description. - **/ -VkAttachmentReference Anvil::RenderPass::get_attachment_reference_from_renderpass_attachment(const RenderPassAttachment& in_renderpass_attachment) const -{ - VkAttachmentReference attachment_vk; - - attachment_vk.attachment = in_renderpass_attachment.index; - attachment_vk.layout = in_renderpass_attachment.initial_layout; - - return attachment_vk; -} - -/** Creates a VkAttachmentReference descriptor from the specified SubPassAttachment instance. - * - * @param in_renderpass_attachment Subpass attachment descriptor to create the Vulkan descriptor from. - * - * @return As per description. - **/ -VkAttachmentReference Anvil::RenderPass::get_attachment_reference_from_subpass_attachment(const SubPassAttachment& in_subpass_attachment) const -{ - VkAttachmentReference attachment_vk; - - attachment_vk.attachment = in_subpass_attachment.attachment_ptr->index; - attachment_vk.layout = in_subpass_attachment.layout; - - return attachment_vk; -} - -/** Creates a VkAttachmentReference descriptor for a resolve attachment for a color attachment specified by the user. - * - * @param in_subpass_iterator Iterator pointing at the subpass which uses the color attachment of interest. - * @param in_location_to_subpass_att_map_iterator Iterator pointing at a color attachment which has a resolve attachment attached. - * - * @return As per description. - **/ -VkAttachmentReference Anvil::RenderPass::get_attachment_reference_for_resolve_attachment(const SubPassesConstIterator& in_subpass_iterator, - const LocationToSubPassAttachmentMapConstIterator& in_location_to_subpass_att_map_iterator) const -{ - VkAttachmentReference result; - - anvil_assert((*in_subpass_iterator)->resolved_attachments_map.find(in_location_to_subpass_att_map_iterator->first) != (*in_subpass_iterator)->resolved_attachments_map.end() ); - - result.attachment = in_location_to_subpass_att_map_iterator->second.resolve_attachment_ptr->index; - result.layout = (*in_subpass_iterator)->resolved_attachments_map.at(in_location_to_subpass_att_map_iterator->first).layout; - - return result; -} - -/* Please see header for specification */ -bool Anvil::RenderPass::get_attachment_type(RenderPassAttachmentID in_attachment_id, - AttachmentType* out_attachment_type_ptr) const -{ - bool result = false; + VkRenderPassCreateInfo render_pass_create_info; - if (m_attachments.size() <= in_attachment_id) - { - anvil_assert(m_attachments.size() > in_attachment_id); + render_pass_create_info.attachmentCount = static_cast(m_render_pass_create_info_ptr->m_attachments.size () ); + render_pass_create_info.dependencyCount = static_cast(m_render_pass_create_info_ptr->m_subpass_dependencies.size() ); + render_pass_create_info.subpassCount = static_cast(m_render_pass_create_info_ptr->m_subpasses.size () ); + render_pass_create_info.flags = 0; + render_pass_create_info.pAttachments = (render_pass_create_info.attachmentCount > 0) ? &renderpass_attachments_vk.at(0) + : nullptr; + render_pass_create_info.pDependencies = (render_pass_create_info.dependencyCount > 0) ? &subpass_dependencies_vk.at(0) + : nullptr; + render_pass_create_info.pNext = nullptr; + render_pass_create_info.pSubpasses = (render_pass_create_info.subpassCount > 0) ? &subpass_descriptions_vk.at(0) + : nullptr; + render_pass_create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; - goto end; + render_pass_create_info_chainer.append_struct(render_pass_create_info); } - *out_attachment_type_ptr = m_attachments[in_attachment_id].type; + /* Also chain info regarding aspects accessed by input attachments, if KHR_maintenance2 is supported and relevant info + * has been provided at attachment addition time. + */ + if (m_device_ptr->is_extension_enabled(VK_KHR_MAINTENANCE2_EXTENSION_NAME) ) + { + for (auto subpass_iterator = m_render_pass_create_info_ptr->m_subpasses.cbegin(); + subpass_iterator != m_render_pass_create_info_ptr->m_subpasses.cend(); + ++subpass_iterator) + { + const auto current_subpass_index = (*subpass_iterator)->index; - /* All done */ - result = true; -end: - return result; -} + for (auto subpass_input_attachment_iterator = (*subpass_iterator)->input_attachments_map.cbegin(); + subpass_input_attachment_iterator != (*subpass_iterator)->input_attachments_map.cend(); + ++subpass_input_attachment_iterator) + { + const auto& current_attachment_accessed_aspects = subpass_input_attachment_iterator->second.aspects_accessed; + const auto& current_attachment_index = subpass_input_attachment_iterator->first; -/* Please see header for specification */ -bool Anvil::RenderPass::get_color_attachment_properties(RenderPassAttachmentID in_attachment_id, - VkSampleCountFlagBits* out_opt_sample_count_ptr, - VkAttachmentLoadOp* out_opt_load_op_ptr, - VkAttachmentStoreOp* out_opt_store_op_ptr, - VkImageLayout* out_opt_initial_layout_ptr, - VkImageLayout* out_opt_final_layout_ptr, - bool* out_opt_may_alias_ptr) const -{ - bool result = false; + if (current_attachment_accessed_aspects != Anvil::ImageAspectFlagBits::NONE) + { + std::unique_ptr attachment_aspect_reference_info_ptr(new VkInputAttachmentAspectReferenceKHR() ); - if (m_attachments.size() <= in_attachment_id) - { - goto end; - } + attachment_aspect_reference_info_ptr->aspectMask = current_attachment_accessed_aspects.get_vk(); + attachment_aspect_reference_info_ptr->inputAttachmentIndex = current_attachment_index; + attachment_aspect_reference_info_ptr->subpass = current_subpass_index; - if (out_opt_sample_count_ptr != nullptr) - { - *out_opt_sample_count_ptr = static_cast(m_attachments[in_attachment_id].sample_count); - } + input_attachment_aspect_reference_ptrs.push_back( std::move(attachment_aspect_reference_info_ptr) ); + } + } + } - if (out_opt_load_op_ptr != nullptr) - { - *out_opt_load_op_ptr = m_attachments[in_attachment_id].color_depth_load_op; - } + if (input_attachment_aspect_reference_ptrs.size() > 0) + { + VkRenderPassInputAttachmentAspectCreateInfoKHR input_attachment_aspect_create_info; - if (out_opt_store_op_ptr != nullptr) - { - *out_opt_store_op_ptr = m_attachments[in_attachment_id].color_depth_store_op; - } + input_attachment_aspect_create_info.aspectReferenceCount = static_cast(input_attachment_aspect_reference_ptrs.size() ); + input_attachment_aspect_create_info.pAspectReferences = reinterpret_cast(&input_attachment_aspect_reference_ptrs.at(0) ); + input_attachment_aspect_create_info.pNext = nullptr; + input_attachment_aspect_create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR; - if (out_opt_initial_layout_ptr != nullptr) - { - *out_opt_initial_layout_ptr = m_attachments[in_attachment_id].initial_layout; + render_pass_create_info_chainer.append_struct(input_attachment_aspect_create_info); + } } - if (out_opt_final_layout_ptr != nullptr) + /* Don't forget about multiview structure, if multiview has been enabled for the renderpass. */ + if (m_render_pass_create_info_ptr->is_multiview_enabled() ) { - *out_opt_final_layout_ptr = m_attachments[in_attachment_id].final_layout; - } + const uint32_t n_subpasses = m_render_pass_create_info_ptr->get_n_subpasses(); - if (out_opt_may_alias_ptr != nullptr) - { - *out_opt_may_alias_ptr = m_attachments[in_attachment_id].may_alias; - } + if (n_subpasses > 0) + { + VkRenderPassMultiviewCreateInfoKHR multiview_create_info; + const uint32_t* correlation_masks_ptr = nullptr; + uint32_t n_correlation_masks = 0; + const uint32_t n_dependencies = m_render_pass_create_info_ptr->get_n_dependencies(); - result = true; -end: - return result; -} + m_render_pass_create_info_ptr->get_multiview_correlation_masks(&n_correlation_masks, + &correlation_masks_ptr); -/** Please see header for specification */ -bool Anvil::RenderPass::get_dependency_properties(uint32_t in_n_dependency, - SubPassID* out_destination_subpass_id_ptr, - SubPassID* out_source_subpass_id_ptr, - VkPipelineStageFlags* out_destination_stage_mask_ptr, - VkPipelineStageFlags* out_source_stage_mask_ptr, - VkAccessFlags* out_destination_access_mask_ptr, - VkAccessFlags* out_source_access_mask_ptr, - bool* out_by_region_ptr) const -{ - const Anvil::RenderPass::SubPassDependency* dep_ptr = nullptr; - bool result = false; + multiview_view_mask_vec_ptr.reset(new std::vector(n_subpasses) ); + anvil_assert(multiview_view_mask_vec_ptr!= nullptr); - if (m_subpass_dependencies.size() <= in_n_dependency) - { - anvil_assert_fail(); + multiview_view_offset_vec_ptr.reset(new std::vector(n_dependencies, UINT32_MAX) ); + anvil_assert(multiview_view_offset_vec_ptr != nullptr); - goto end; - } + for (uint32_t n_subpass = 0; + n_subpass < n_subpasses; + ++n_subpass) + { + uint32_t multiview_mask = 0; - dep_ptr = &m_subpass_dependencies[in_n_dependency]; + if (!m_render_pass_create_info_ptr->get_subpass_view_mask(n_subpass, + &multiview_mask) ) + { + anvil_assert_fail(); - *out_destination_subpass_id_ptr = (dep_ptr->destination_subpass_ptr != nullptr) ? dep_ptr->destination_subpass_ptr->index - : UINT32_MAX; - *out_source_subpass_id_ptr = (dep_ptr->source_subpass_ptr != nullptr) ? dep_ptr->source_subpass_ptr->index - : UINT32_MAX; - *out_destination_stage_mask_ptr = dep_ptr->destination_stage_mask; - *out_source_stage_mask_ptr = dep_ptr->source_stage_mask; - *out_destination_access_mask_ptr = dep_ptr->destination_access_mask; - *out_source_access_mask_ptr = dep_ptr->source_access_mask; - *out_by_region_ptr = dep_ptr->by_region; + goto end; + } - /* All done */ - result = true; -end: - return result; -} + multiview_view_mask_vec_ptr->at(n_subpass) = multiview_mask; + } -/** Please see header for specification */ -bool Anvil::RenderPass::get_depth_stencil_attachment_properties(RenderPassAttachmentID in_attachment_id, - VkAttachmentLoadOp* out_opt_depth_load_op_ptr, - VkAttachmentStoreOp* out_opt_depth_store_op_ptr, - VkAttachmentLoadOp* out_opt_stencil_load_op_ptr, - VkAttachmentStoreOp* out_opt_stencil_store_op_ptr, - VkImageLayout* out_opt_initial_layout_ptr, - VkImageLayout* out_opt_final_layout_ptr, - bool* out_opt_may_alias_ptr) const -{bool result = false; - - if (m_attachments.size() <= in_attachment_id) - { - goto end; - } + for (uint32_t n_dependency = 0; + n_dependency < n_dependencies; + ++n_dependency) + { + int32_t view_offset = INT32_MAX; - if (out_opt_depth_load_op_ptr != nullptr) - { - *out_opt_depth_load_op_ptr = m_attachments[in_attachment_id].color_depth_load_op; - } + if (!m_render_pass_create_info_ptr->get_dependency_multiview_properties(n_dependency, + &view_offset) ) + { + anvil_assert_fail(); - if (out_opt_depth_store_op_ptr != nullptr) - { - *out_opt_depth_store_op_ptr = m_attachments[in_attachment_id].color_depth_store_op; - } + goto end; + } - if (out_opt_stencil_load_op_ptr != nullptr) - { - *out_opt_stencil_load_op_ptr = m_attachments[in_attachment_id].stencil_load_op; - } + multiview_view_offset_vec_ptr->at(n_dependency) = view_offset; + } - if (out_opt_stencil_store_op_ptr != nullptr) - { - *out_opt_stencil_store_op_ptr = m_attachments[in_attachment_id].stencil_store_op; - } + multiview_create_info.correlationMaskCount = n_correlation_masks; + multiview_create_info.dependencyCount = n_dependencies; + multiview_create_info.pCorrelationMasks = correlation_masks_ptr; + multiview_create_info.pNext = nullptr; + multiview_create_info.pViewMasks = &multiview_view_mask_vec_ptr->at(0); + multiview_create_info.pViewOffsets = (n_dependencies != 0) ? &multiview_view_offset_vec_ptr->at(0) : nullptr; + multiview_create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR; + multiview_create_info.subpassCount = n_subpasses; - if (out_opt_initial_layout_ptr != nullptr) - { - *out_opt_initial_layout_ptr = m_attachments[in_attachment_id].initial_layout; + render_pass_create_info_chainer.append_struct(multiview_create_info); + } } - if (out_opt_final_layout_ptr != nullptr) + /* All done! Spawn the final chain and try to create the renderpass. */ { - *out_opt_final_layout_ptr = m_attachments[in_attachment_id].final_layout; - } + auto create_info_chain_ptr = render_pass_create_info_chainer.create_chain(); - if (out_opt_may_alias_ptr != nullptr) - { - *out_opt_may_alias_ptr = m_attachments[in_attachment_id].may_alias; + result_vk = Anvil::Vulkan::vkCreateRenderPass(m_device_ptr->get_device_vk(), + create_info_chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_render_pass); } - result = true; -end: - return result; -} - -/* Please see header for specification */ -VkRenderPass Anvil::RenderPass::get_render_pass(bool allow_rebaking) -{ - VkRenderPass result = VK_NULL_HANDLE; - - if (m_dirty && allow_rebaking) + if (!is_vk_call_successful(result_vk) ) { - bake(); + anvil_assert_vk_call_succeeded(result_vk); - anvil_assert(!m_dirty); + goto end; } - result = m_render_pass; - - anvil_assert(result != VK_NULL_HANDLE); - return result; -} - -/* Please see header for specification */ -bool Anvil::RenderPass::get_subpass_graphics_pipeline_id(SubPassID in_subpass_id, - GraphicsPipelineID* out_graphics_pipeline_id_ptr) const -{ - bool result = false; - - if (m_subpasses.size() <= in_subpass_id) - { - anvil_assert(!(m_subpasses.size() <= in_subpass_id) ); + set_vk_handle(m_render_pass); - goto end; - } - else - { - *out_graphics_pipeline_id_ptr = m_subpasses[in_subpass_id]->pipeline_id; - result = true; - } + result = true; end: return result; } /* Please see header for specification */ -bool Anvil::RenderPass::get_subpass_n_attachments(SubPassID in_subpass_id, - AttachmentType in_attachment_type, - uint32_t* out_n_attachments_ptr) +bool Anvil::RenderPass::init_using_rp2_create_extension() { - bool result = false; - - if (m_subpasses.size() <= in_subpass_id) - { - anvil_assert(!(m_subpasses.size() <= in_subpass_id) ); - - goto end; - } - else - { - result = true; - - if (in_attachment_type == ATTACHMENT_TYPE_PRESERVE && m_dirty) + const auto& crp2_entrypoints = m_render_pass_create_info_ptr->get_device ()->get_extension_khr_create_renderpass2_entrypoints(); + const uint32_t n_rp_attachments = m_render_pass_create_info_ptr->get_n_attachments (); + const uint32_t n_subpass_deps = m_render_pass_create_info_ptr->get_n_dependencies(); + const uint32_t n_subpasses = m_render_pass_create_info_ptr->get_n_subpasses (); + bool result = false; + Anvil::StructChainer struct_chainer; + + std::vector rp_attachment_vec (n_rp_attachments); + std::vector subpass_attachment_reference_vec; + Anvil::StructChainVector subpass_chain_vec; + std::vector > subpass_chainer_vec (n_subpasses); + std::vector subpass_dep_vec (n_subpass_deps); + std::vector subpass_preserve_attachment_vec; + + /* Prepare renderpass attachment vec */ + for (uint32_t n_rp_attachment = 0; + n_rp_attachment < n_rp_attachments; + ++n_rp_attachment) + { + Anvil::ImageLayout current_rp_attachment_final_layout = Anvil::ImageLayout::UNKNOWN; + Anvil::Format current_rp_attachment_format = Anvil::Format::UNKNOWN; + VkAttachmentDescription2KHR& current_rp_attachment_info = rp_attachment_vec.at(n_rp_attachment); + Anvil::ImageLayout current_rp_attachment_initial_layout = Anvil::ImageLayout::UNKNOWN; + bool current_rp_attachment_may_alias = false; + Anvil::AttachmentLoadOp current_rp_attachment_load_op = Anvil::AttachmentLoadOp::UNKNOWN; + Anvil::SampleCountFlagBits current_rp_attachment_sample_count = Anvil::SampleCountFlagBits::NONE; + Anvil::AttachmentLoadOp current_rp_attachment_stencil_load_op = Anvil::AttachmentLoadOp::DONT_CARE; + Anvil::AttachmentStoreOp current_rp_attachment_stencil_store_op = Anvil::AttachmentStoreOp::DONT_CARE; + Anvil::AttachmentStoreOp current_rp_attachment_store_op = Anvil::AttachmentStoreOp::UNKNOWN; + Anvil::AttachmentType current_rp_attachment_type = Anvil::AttachmentType::UNKNOWN; + + m_render_pass_create_info_ptr->get_attachment_type(n_rp_attachment, + ¤t_rp_attachment_type); + + switch (current_rp_attachment_type) { - bake(); - - anvil_assert(!m_dirty); - } + case Anvil::AttachmentType::COLOR: + { + m_render_pass_create_info_ptr->get_color_attachment_properties(n_rp_attachment, + ¤t_rp_attachment_format, + ¤t_rp_attachment_sample_count, + ¤t_rp_attachment_load_op, + ¤t_rp_attachment_store_op, + ¤t_rp_attachment_initial_layout, + ¤t_rp_attachment_final_layout, + ¤t_rp_attachment_may_alias); - switch (in_attachment_type) - { - case ATTACHMENT_TYPE_COLOR: *out_n_attachments_ptr = static_cast(m_subpasses[in_subpass_id]->color_attachments_map.size() ); break; - case ATTACHMENT_TYPE_INPUT: *out_n_attachments_ptr = static_cast(m_subpasses[in_subpass_id]->input_attachments_map.size() ); break; - case ATTACHMENT_TYPE_PRESERVE: *out_n_attachments_ptr = static_cast(m_subpasses[in_subpass_id]->preserved_attachments.size() ); break; - case ATTACHMENT_TYPE_RESOLVE: *out_n_attachments_ptr = static_cast(m_subpasses[in_subpass_id]->resolved_attachments_map.size() ); break; + break; + } - case ATTACHMENT_TYPE_DEPTH_STENCIL: + case Anvil::AttachmentType::DEPTH_STENCIL: { - *out_n_attachments_ptr = (m_subpasses[in_subpass_id]->depth_stencil_attachment.attachment_ptr != nullptr) ? 1u : 0u; + m_render_pass_create_info_ptr->get_depth_stencil_attachment_properties(n_rp_attachment, + ¤t_rp_attachment_format, + ¤t_rp_attachment_sample_count, + ¤t_rp_attachment_load_op, + ¤t_rp_attachment_store_op, + ¤t_rp_attachment_stencil_load_op, + ¤t_rp_attachment_stencil_store_op, + ¤t_rp_attachment_initial_layout, + ¤t_rp_attachment_final_layout, + ¤t_rp_attachment_may_alias); break; } @@ -1244,366 +635,491 @@ bool Anvil::RenderPass::get_subpass_n_attachments(SubPassID in_subpass_id, { anvil_assert_fail(); - result = false; + goto end; } } - } -end: - return result; -} + current_rp_attachment_info.finalLayout = static_cast(current_rp_attachment_final_layout); + current_rp_attachment_info.flags = (current_rp_attachment_may_alias) ? VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT + : 0; + current_rp_attachment_info.format = static_cast (current_rp_attachment_format); + current_rp_attachment_info.initialLayout = static_cast (current_rp_attachment_initial_layout); + current_rp_attachment_info.loadOp = static_cast(current_rp_attachment_load_op); + current_rp_attachment_info.pNext = nullptr; + current_rp_attachment_info.samples = static_cast(current_rp_attachment_sample_count); + current_rp_attachment_info.stencilLoadOp = static_cast (current_rp_attachment_stencil_load_op); + current_rp_attachment_info.stencilStoreOp = static_cast (current_rp_attachment_stencil_store_op); + current_rp_attachment_info.storeOp = static_cast (current_rp_attachment_store_op); + current_rp_attachment_info.sType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR; + } + + /* Prepare subpass dep vec */ + for (uint32_t n_subpass_dep = 0; + n_subpass_dep < n_subpass_deps; + ++n_subpass_dep) + { + auto& current_subpass_dep = subpass_dep_vec.at(n_subpass_dep); + Anvil::AccessFlags current_subpass_dep_dst_access_mask = Anvil::AccessFlagBits::NONE; + Anvil::PipelineStageFlags current_subpass_dep_dst_stage_mask = Anvil::PipelineStageFlagBits::NONE; + Anvil::SubPassID current_subpass_dep_dst_subpass_id = UINT32_MAX; + Anvil::DependencyFlags current_subpass_dep_flags = Anvil::DependencyFlagBits::NONE; + int32_t current_subpass_dep_multiview_offset = 0; + Anvil::AccessFlags current_subpass_dep_src_access_mask = Anvil::AccessFlagBits::NONE; + Anvil::SubPassID current_subpass_dep_src_subpass_id = UINT32_MAX; + Anvil::PipelineStageFlags current_subpass_dep_src_stage_mask = Anvil::PipelineStageFlagBits::NONE; + + if (m_render_pass_create_info_ptr->is_multiview_enabled() ) + { + m_render_pass_create_info_ptr->get_dependency_multiview_properties(n_subpass_dep, + ¤t_subpass_dep_multiview_offset); + } -/* Please see header for specification */ -bool Anvil::RenderPass::get_subpass_attachment_properties(SubPassID in_subpass_id, - AttachmentType in_attachment_type, - uint32_t in_n_subpass_attachment, - RenderPassAttachmentID* out_renderpass_attachment_id_ptr, - VkImageLayout* out_layout_ptr) -{ - SubPassAttachment attachment; - bool result = false; - LocationToSubPassAttachmentMap* subpass_attachments_ptr = nullptr; - SubPass* subpass_ptr = nullptr; - - /* Sanity checks */ - if (in_attachment_type == ATTACHMENT_TYPE_PRESERVE && - out_layout_ptr != nullptr) - { - anvil_assert(!(in_attachment_type == ATTACHMENT_TYPE_PRESERVE && - out_layout_ptr != nullptr) ); + m_render_pass_create_info_ptr->get_dependency_properties(n_subpass_dep, + ¤t_subpass_dep_dst_subpass_id, + ¤t_subpass_dep_src_subpass_id, + ¤t_subpass_dep_dst_stage_mask, + ¤t_subpass_dep_src_stage_mask, + ¤t_subpass_dep_dst_access_mask, + ¤t_subpass_dep_src_access_mask, + ¤t_subpass_dep_flags); + + current_subpass_dep.dependencyFlags = current_subpass_dep_flags.get_vk (); + current_subpass_dep.dstAccessMask = current_subpass_dep_dst_access_mask.get_vk(); + current_subpass_dep.dstStageMask = current_subpass_dep_dst_stage_mask.get_vk (); + current_subpass_dep.dstSubpass = (current_subpass_dep_dst_subpass_id == UINT32_MAX) ? VK_SUBPASS_EXTERNAL + : current_subpass_dep_dst_subpass_id; + current_subpass_dep.pNext = nullptr; + current_subpass_dep.srcAccessMask = current_subpass_dep_src_access_mask.get_vk(); + current_subpass_dep.srcStageMask = current_subpass_dep_src_stage_mask.get_vk (); + current_subpass_dep.srcSubpass = (current_subpass_dep_src_subpass_id == UINT32_MAX) ? VK_SUBPASS_EXTERNAL + : current_subpass_dep_src_subpass_id; + current_subpass_dep.sType = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR; + current_subpass_dep.viewOffset = current_subpass_dep_multiview_offset; + } + + /* Prepare subpass vec */ + for (uint32_t n_subpass = 0; + n_subpass < n_subpasses; + ++n_subpass) + { + uint32_t current_subpass_attachment_reference_vec_base_index = static_cast(subpass_attachment_reference_vec.size() ); + uint32_t current_subpass_highest_location = 0; + uint32_t current_subpass_preserve_attachment_vec_base_index = static_cast(subpass_preserve_attachment_vec.size () ); + bool current_subpass_uses_ds_resolve_operation = false; + uint32_t current_subpass_view_mask = 0; + uint32_t n_current_subpass_color_attachments_actual = 0; + uint32_t n_current_subpass_ds_attachments = 0; + uint32_t n_current_subpass_input_attachments_actual = 0; + uint32_t n_current_subpass_color_input_resolve_attachments_needed = 0; + uint32_t n_current_subpass_preserve_attachments = 0; + uint32_t n_current_subpass_resolve_attachments_actual = 0; + + m_render_pass_create_info_ptr->get_subpass_highest_location(n_subpass, + ¤t_subpass_highest_location); + + n_current_subpass_color_input_resolve_attachments_needed = current_subpass_highest_location + 1; + + + m_render_pass_create_info_ptr->get_subpass_n_attachments(n_subpass, + Anvil::AttachmentType::COLOR, + &n_current_subpass_color_attachments_actual); + m_render_pass_create_info_ptr->get_subpass_n_attachments(n_subpass, + Anvil::AttachmentType::DEPTH_STENCIL, + &n_current_subpass_ds_attachments); + m_render_pass_create_info_ptr->get_subpass_n_attachments(n_subpass, + Anvil::AttachmentType::INPUT, + &n_current_subpass_input_attachments_actual); + m_render_pass_create_info_ptr->get_subpass_n_attachments(n_subpass, + Anvil::AttachmentType::PRESERVE, + &n_current_subpass_preserve_attachments); + m_render_pass_create_info_ptr->get_subpass_n_attachments(n_subpass, + Anvil::AttachmentType::RESOLVE, + &n_current_subpass_resolve_attachments_actual); + + const struct + { + Anvil::AttachmentType subpass_attachment_type; + uint32_t n_subpass_attachments; + uint32_t subpass_attachment_reference_vec_offset; + } subpass_attachment_data[] = + { + /* NOTE: Preserve attachments are handled elsewhere */ + {Anvil::AttachmentType::COLOR, n_current_subpass_color_input_resolve_attachments_needed, 0}, + {Anvil::AttachmentType::DEPTH_STENCIL, n_current_subpass_ds_attachments, n_current_subpass_color_input_resolve_attachments_needed}, + {Anvil::AttachmentType::INPUT, n_current_subpass_color_input_resolve_attachments_needed, n_current_subpass_color_input_resolve_attachments_needed + n_current_subpass_ds_attachments}, + {Anvil::AttachmentType::RESOLVE, n_current_subpass_color_input_resolve_attachments_needed, n_current_subpass_color_input_resolve_attachments_needed + n_current_subpass_ds_attachments + n_current_subpass_color_input_resolve_attachments_needed}, + }; + + const auto& subpass_color_attachment_data = subpass_attachment_data[0]; + const auto& subpass_ds_attachment_data = subpass_attachment_data[1]; + const auto& subpass_input_attachment_data = subpass_attachment_data[2]; + const auto& subpass_resolve_attachment_data = subpass_attachment_data[3]; + + anvil_assert(subpass_color_attachment_data.subpass_attachment_type == Anvil::AttachmentType::COLOR); + anvil_assert(subpass_ds_attachment_data.subpass_attachment_type == Anvil::AttachmentType::DEPTH_STENCIL); + anvil_assert(subpass_input_attachment_data.subpass_attachment_type == Anvil::AttachmentType::INPUT); + anvil_assert(subpass_resolve_attachment_data.subpass_attachment_type == Anvil::AttachmentType::RESOLVE); + + const uint32_t current_subpass_color_attachment_reference_vec_base_index = current_subpass_attachment_reference_vec_base_index; + const uint32_t current_subpass_ds_attachment_reference_vec_base_index = current_subpass_color_attachment_reference_vec_base_index + subpass_color_attachment_data.n_subpass_attachments; + uint32_t current_subpass_ds_resolve_attachment_reference_vec_base_index = UINT32_MAX; /* optionally updated later */ + const uint32_t current_subpass_input_attachment_reference_vec_base_index = current_subpass_ds_attachment_reference_vec_base_index + subpass_ds_attachment_data.n_subpass_attachments; + const uint32_t current_subpass_resolve_attachment_reference_vec_base_index = current_subpass_input_attachment_reference_vec_base_index + subpass_input_attachment_data.n_subpass_attachments; + + subpass_attachment_reference_vec.resize(subpass_attachment_reference_vec.size() + + subpass_color_attachment_data.n_subpass_attachments + + subpass_ds_attachment_data.n_subpass_attachments + + subpass_input_attachment_data.n_subpass_attachments + + subpass_resolve_attachment_data.n_subpass_attachments); + + subpass_preserve_attachment_vec.resize (subpass_preserve_attachment_vec.size() + + n_current_subpass_preserve_attachments); + + /* Fill the new attachment reference structs with dummy data */ + for (uint32_t n_subpass_attachment_reference_item = current_subpass_attachment_reference_vec_base_index; + n_subpass_attachment_reference_item < static_cast(subpass_attachment_reference_vec.size() ); + ++n_subpass_attachment_reference_item) + { + auto& current_subpass_attachment_reference = subpass_attachment_reference_vec.at(n_subpass_attachment_reference_item); - goto end; - } + current_subpass_attachment_reference.aspectMask = 0; + current_subpass_attachment_reference.attachment = VK_ATTACHMENT_UNUSED; + current_subpass_attachment_reference.layout = VK_IMAGE_LAYOUT_UNDEFINED; + current_subpass_attachment_reference.pNext = nullptr; + current_subpass_attachment_reference.sType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR; + } - if (m_subpasses.size() <= in_subpass_id) - { - anvil_assert(!(m_subpasses.size() <= in_subpass_id) ); + for (const auto& current_subpass_attachment_data : subpass_attachment_data) + { + for (uint32_t n_subpass_attachment = 0; + n_subpass_attachment < current_subpass_attachment_data.n_subpass_attachments; + ++n_subpass_attachment) + { + Anvil::ImageAspectFlags current_subpass_attachment_aspects_accessed = Anvil::ImageAspectFlagBits::NONE; + Anvil::ImageLayout current_subpass_attachment_layout = Anvil::ImageLayout::UNKNOWN; + uint32_t current_subpass_attachment_location = UINT32_MAX; + Anvil::RenderPassAttachmentID current_subpass_attachment_rp_attachment_id = VK_ATTACHMENT_UNUSED; + uint32_t reference_vec_base_index = UINT32_MAX; + + if (!m_render_pass_create_info_ptr->get_subpass_attachment_properties(n_subpass, + current_subpass_attachment_data.subpass_attachment_type, + n_subpass_attachment, + ¤t_subpass_attachment_rp_attachment_id, + ¤t_subpass_attachment_layout, + ¤t_subpass_attachment_aspects_accessed, + nullptr, /* out_opt_attachment_resolve_id_ptr */ + ¤t_subpass_attachment_location) ) + { + /* Out of attachments of this type, move on. */ + break; + } - goto end; - } + if (current_subpass_attachment_aspects_accessed == Anvil::ImageAspectFlagBits::NONE) + { + Anvil::AttachmentType rp_attachment_type = Anvil::AttachmentType::UNKNOWN; - /* Bake the renderpass if needed.. */ - subpass_ptr = m_subpasses[in_subpass_id]; + if (!m_render_pass_create_info_ptr->get_attachment_type(current_subpass_attachment_rp_attachment_id, + &rp_attachment_type) ) + { + anvil_assert_fail(); - if (m_dirty) - { - bake(); + goto end; + } - if (m_dirty) - { - anvil_assert(!m_dirty); + if (rp_attachment_type == Anvil::AttachmentType::COLOR) + { + current_subpass_attachment_aspects_accessed = Anvil::ImageAspectFlagBits::COLOR_BIT; + } + else + { + Anvil::Format rp_attachment_format = Anvil::Format::UNKNOWN; - goto end; - } - } + anvil_assert(rp_attachment_type == Anvil::AttachmentType::DEPTH_STENCIL); - /* Even more sanity checks.. */ - switch (in_attachment_type) - { - case ATTACHMENT_TYPE_COLOR: /* Fall-through */ - case ATTACHMENT_TYPE_INPUT: /* Fall-through */ - case ATTACHMENT_TYPE_RESOLVE: - { - subpass_attachments_ptr = (in_attachment_type == ATTACHMENT_TYPE_COLOR) ? &subpass_ptr->color_attachments_map - : (in_attachment_type == ATTACHMENT_TYPE_INPUT) ? &subpass_ptr->input_attachments_map - : &subpass_ptr->resolved_attachments_map; + m_render_pass_create_info_ptr->get_depth_stencil_attachment_properties(current_subpass_attachment_rp_attachment_id, + &rp_attachment_format); - auto iterator = subpass_attachments_ptr->find(in_n_subpass_attachment); + anvil_assert(rp_attachment_format != Anvil::Format::UNKNOWN); - if (iterator == subpass_attachments_ptr->end() ) - { - anvil_assert_fail(); + current_subpass_attachment_aspects_accessed = ((Anvil::Formats::has_depth_aspect (rp_attachment_format) ) ? Anvil::ImageAspectFlagBits::DEPTH_BIT : Anvil::ImageAspectFlagBits::NONE) | + ((Anvil::Formats::has_stencil_aspect(rp_attachment_format) ) ? Anvil::ImageAspectFlagBits::STENCIL_BIT : Anvil::ImageAspectFlagBits::NONE); + } + } - goto end; - } + switch (current_subpass_attachment_data.subpass_attachment_type) + { + case AttachmentType::COLOR: reference_vec_base_index = current_subpass_color_attachment_reference_vec_base_index; break; + case AttachmentType::DEPTH_STENCIL: reference_vec_base_index = current_subpass_ds_attachment_reference_vec_base_index; break; + case AttachmentType::INPUT: reference_vec_base_index = current_subpass_input_attachment_reference_vec_base_index; break; + case AttachmentType::RESOLVE: reference_vec_base_index = current_subpass_resolve_attachment_reference_vec_base_index; break; - *out_layout_ptr = iterator->second.layout; - *out_renderpass_attachment_id_ptr = iterator->second.attachment_ptr->index; + default: + { + anvil_assert_fail(); - break; - } + goto end; + } + } - case ATTACHMENT_TYPE_DEPTH_STENCIL: - { - if (in_n_subpass_attachment > 0) - { - anvil_assert(in_n_subpass_attachment == 0); + auto& current_subpass_attachment_reference = subpass_attachment_reference_vec.at(reference_vec_base_index + current_subpass_attachment_location); - goto end; + current_subpass_attachment_reference.aspectMask = current_subpass_attachment_aspects_accessed.get_vk(); + current_subpass_attachment_reference.attachment = current_subpass_attachment_rp_attachment_id; + current_subpass_attachment_reference.layout = static_cast(current_subpass_attachment_layout); + current_subpass_attachment_reference.pNext = nullptr; + current_subpass_attachment_reference.sType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR; } - *out_layout_ptr = subpass_ptr->depth_stencil_attachment.layout; - *out_renderpass_attachment_id_ptr = subpass_ptr->depth_stencil_attachment.attachment_ptr->index; + if (current_subpass_attachment_data.subpass_attachment_type == AttachmentType::DEPTH_STENCIL) + { + Anvil::ResolveModeFlagBits current_subpass_ds_resolve_attachment_depth_resolve_mode = Anvil::ResolveModeFlagBits::NONE; + Anvil::ImageLayout current_subpass_ds_resolve_attachment_layout = Anvil::ImageLayout::UNKNOWN; + Anvil::RenderPassAttachmentID current_subpass_ds_resolve_attachment_renderpass_attachment_id = UINT32_MAX; + Anvil::ResolveModeFlagBits current_subpass_ds_resolve_attachment_stencil_resolve_mode = Anvil::ResolveModeFlagBits::NONE; + + current_subpass_uses_ds_resolve_operation = (m_render_pass_create_info_ptr->get_subpass_ds_resolve_attachment_properties(n_subpass, + ¤t_subpass_ds_resolve_attachment_renderpass_attachment_id, + ¤t_subpass_ds_resolve_attachment_layout, + ¤t_subpass_ds_resolve_attachment_depth_resolve_mode, + ¤t_subpass_ds_resolve_attachment_stencil_resolve_mode) ); + + if (current_subpass_uses_ds_resolve_operation) + { + subpass_attachment_reference_vec.resize(subpass_attachment_reference_vec.size() + 1); - break; - } + auto& current_subpass_ds_resolve_attachment_reference = subpass_attachment_reference_vec.back(); - case ATTACHMENT_TYPE_PRESERVE: - { - if (subpass_ptr->preserved_attachments.size() <= in_n_subpass_attachment) - { - anvil_assert(subpass_ptr->preserved_attachments.size() > in_n_subpass_attachment); + current_subpass_ds_resolve_attachment_reference.aspectMask = static_cast(((current_subpass_ds_resolve_attachment_depth_resolve_mode != Anvil::ResolveModeFlagBits::NONE) ? Anvil::ImageAspectFlagBits::DEPTH_BIT : Anvil::ImageAspectFlagBits::NONE) | + ((current_subpass_ds_resolve_attachment_stencil_resolve_mode != Anvil::ResolveModeFlagBits::NONE) ? Anvil::ImageAspectFlagBits::STENCIL_BIT : Anvil::ImageAspectFlagBits::NONE) ); + current_subpass_ds_resolve_attachment_reference.attachment = current_subpass_ds_resolve_attachment_renderpass_attachment_id; + current_subpass_ds_resolve_attachment_reference.layout = static_cast(current_subpass_ds_resolve_attachment_layout); + current_subpass_ds_resolve_attachment_reference.pNext = nullptr; + current_subpass_ds_resolve_attachment_reference.sType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR; - goto end; + current_subpass_ds_resolve_attachment_reference_vec_base_index = static_cast(subpass_attachment_reference_vec.size() - 1); + } } - const auto& attachment_props = subpass_ptr->preserved_attachments[in_n_subpass_attachment]; - - *out_renderpass_attachment_id_ptr = attachment_props.attachment_ptr->index; - break; + current_subpass_attachment_reference_vec_base_index += current_subpass_attachment_data.n_subpass_attachments; } - default: + for (uint32_t n_subpass_preserve_attachment = 0; + n_subpass_preserve_attachment < n_current_subpass_preserve_attachments; + ++n_subpass_preserve_attachment) { - anvil_assert_fail(); + auto& current_subpass_preserve_attachment = subpass_preserve_attachment_vec.at(current_subpass_preserve_attachment_vec_base_index + n_subpass_preserve_attachment); + Anvil::ImageLayout dummy_layout; - goto end; - } - } - - /* All done */ - result = true; - -end: - return result; -} + m_render_pass_create_info_ptr->get_subpass_attachment_properties(n_subpass, + Anvil::AttachmentType::PRESERVE, + n_subpass_preserve_attachment, + ¤t_subpass_preserve_attachment, + &dummy_layout); -/* Please see header for specification */ -bool Anvil::RenderPass::set_subpass_graphics_pipeline_id(SubPassID in_subpass_id, - GraphicsPipelineID new_graphics_pipeline_id) -{ - std::shared_ptr device_locked_ptr(m_device_ptr); - bool result = false; + } - if (m_subpasses.size() <= in_subpass_id) - { - anvil_assert(!(m_subpasses.size() <= in_subpass_id) ); + if (m_render_pass_create_info_ptr->is_multiview_enabled() ) + { + m_render_pass_create_info_ptr->get_subpass_view_mask(n_subpass, + ¤t_subpass_view_mask); + } - goto end; - } + /* NOTE: For ptrs, we only store offsets to reference structs in this pass. Once all subpasses are processed, base pointer + * is added. This is required since std::vector will likely realloc its storage as we keep pushing structs into it. + */ + { + auto& current_subpass_chainer = subpass_chainer_vec.at(n_subpass); - if (in_subpass_id == m_subpasses[in_subpass_id]->pipeline_id) - { - goto end; - } + /* Chain the root subpass struct */ + { + VkSubpassDescription2KHR current_subpass; + + current_subpass.colorAttachmentCount = (n_current_subpass_color_attachments_actual > 0) ? n_current_subpass_color_input_resolve_attachments_needed + : 0; + current_subpass.flags = 0; + current_subpass.inputAttachmentCount = (n_current_subpass_input_attachments_actual > 0) ? n_current_subpass_color_input_resolve_attachments_needed + : 0; + current_subpass.pColorAttachments = (n_current_subpass_color_attachments_actual > 0) ? reinterpret_cast(current_subpass_color_attachment_reference_vec_base_index * sizeof(VkAttachmentReference2KHR) ) + : nullptr; + current_subpass.pDepthStencilAttachment = (n_current_subpass_ds_attachments > 0) ? reinterpret_cast(current_subpass_ds_attachment_reference_vec_base_index * sizeof(VkAttachmentReference2KHR) ) + : nullptr; + current_subpass.pInputAttachments = (n_current_subpass_input_attachments_actual > 0) ? reinterpret_cast(current_subpass_input_attachment_reference_vec_base_index * sizeof(VkAttachmentReference2KHR) ) + : nullptr; + current_subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; + current_subpass.pNext = nullptr; + current_subpass.pPreserveAttachments = (n_current_subpass_preserve_attachments > 0) ? reinterpret_cast(reinterpret_cast(0) + current_subpass_preserve_attachment_vec_base_index) + : nullptr; + current_subpass.preserveAttachmentCount = n_current_subpass_preserve_attachments; + current_subpass.pResolveAttachments = (n_current_subpass_resolve_attachments_actual > 0) ? reinterpret_cast(current_subpass_resolve_attachment_reference_vec_base_index * sizeof(VkAttachmentReference2KHR) ) + : nullptr; + current_subpass.sType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR; + current_subpass.viewMask = current_subpass_view_mask; + + current_subpass_chainer.append_struct(current_subpass); + } - /* Remove the former graphics pipeline. */ - if (!device_locked_ptr->get_graphics_pipeline_manager()->delete_pipeline(m_subpasses[in_subpass_id]->pipeline_id) ) - { - anvil_assert_fail(); - } + /* Chain the DS resolve struct if needed. */ + if (current_subpass_uses_ds_resolve_operation) + { + Anvil::ResolveModeFlagBits depth_resolve_mode = Anvil::ResolveModeFlagBits::NONE; + VkSubpassDescriptionDepthStencilResolveKHR ds_resolve; + Anvil::ResolveModeFlagBits stencil_resolve_mode = Anvil::ResolveModeFlagBits::NONE; - m_subpasses[in_subpass_id]->pipeline_id = new_graphics_pipeline_id; + anvil_assert(current_subpass_ds_resolve_attachment_reference_vec_base_index != UINT32_MAX); - m_dirty = true; - result = true; + m_render_pass_create_info_ptr->get_subpass_ds_resolve_attachment_properties(n_subpass, + nullptr, /* out_opt_renderpass_attachment_id_ptr */ + nullptr, /* out_opt_layout_ptr */ + &depth_resolve_mode, + &stencil_resolve_mode); - /* Notify the subscribers about the event */ - { - OnRenderPassBakeNeededCallbackArgument callback_argument(this); + ds_resolve.depthResolveMode = static_cast (depth_resolve_mode); + ds_resolve.pDepthStencilResolveAttachment = reinterpret_cast(current_subpass_ds_resolve_attachment_reference_vec_base_index * sizeof(VkAttachmentReference2KHR) ); + ds_resolve.pNext = nullptr; + ds_resolve.stencilResolveMode = static_cast(stencil_resolve_mode); + ds_resolve.sType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR; - callback(RENDER_PASS_CALLBACK_ID_BAKING_NEEDED, - &callback_argument); + current_subpass_chainer.append_struct(ds_resolve); + } + } } -end: - return result; -} + /* Convert offsets to actual pointers */ + for (uint32_t n_subpass = 0; + n_subpass < n_subpasses; + ++n_subpass) + { + auto& current_subpass_chainer = subpass_chainer_vec.at (n_subpass); + auto current_subpass_ptr = current_subpass_chainer.get_root_struct(); + bool current_subpass_uses_ds_resolve_attachment = false; + uint32_t n_current_subpass_ds_attachments = 0; + uint32_t n_current_subpass_preserve_attachments = 0; -/** Initializes the vector of preserved attachments for all defined attachments. The algorithm - * used is as follows: - * - * For each subpass: - * - * 1. Check what the highest subpass index that the attachment is preserved/used in is. - * 2. For all previous subpasses, determine which subpasses do not use it. For each subpass, preserve - * the attachment. - * - * This approach may need to be changed or extended in the future. - * - * This function should be considered expensive. - **/ -void Anvil::RenderPass::update_preserved_attachments() -{ - /* Cache color, depth+stencil, resolve attachments, as used by all defined subpasses. - * Make sure not to insert duplicate items. */ - std::vector unique_attachments; + m_render_pass_create_info_ptr->get_subpass_n_attachments(n_subpass, + Anvil::AttachmentType::DEPTH_STENCIL, + &n_current_subpass_ds_attachments); + m_render_pass_create_info_ptr->get_subpass_n_attachments(n_subpass, + Anvil::AttachmentType::PRESERVE, + &n_current_subpass_preserve_attachments); - for (auto subpass_iterator = m_subpasses.begin(); - subpass_iterator != m_subpasses.end(); - ++subpass_iterator) - { - SubPass* current_subpass_ptr = *subpass_iterator; + current_subpass_uses_ds_resolve_attachment = m_render_pass_create_info_ptr->get_subpass_ds_resolve_attachment_properties(n_subpass); - for (uint32_t n_attachment_type = 0; - n_attachment_type < 3; /* color, depth+stencil, resolve */ - ++n_attachment_type) + const struct { - const uint32_t n_attachments = (n_attachment_type == 0) ? static_cast(current_subpass_ptr->color_attachments_map.size() ) - : (n_attachment_type == 1) ? ((current_subpass_ptr->depth_stencil_attachment.attachment_ptr != nullptr) ? 1 : 0) - : static_cast(current_subpass_ptr->resolved_attachments_map.size() ); + bool active; + const VkAttachmentReference2KHR** attachment_reference_ptr_ptr; + } subpass_attachment_data[] = + { + {current_subpass_ptr->colorAttachmentCount > 0, ¤t_subpass_ptr->pColorAttachments}, + {n_current_subpass_ds_attachments > 0, ¤t_subpass_ptr->pDepthStencilAttachment}, + {current_subpass_ptr->inputAttachmentCount > 0, ¤t_subpass_ptr->pInputAttachments}, + {current_subpass_ptr->pResolveAttachments != nullptr, ¤t_subpass_ptr->pResolveAttachments}, + }; - for (uint32_t n_attachment = 0; - n_attachment < n_attachments; - ++n_attachment) + for (const auto& current_subpass_attachment_data : subpass_attachment_data) + { + if (!current_subpass_attachment_data.active) { - SubPassAttachment* current_attachment_ptr = (n_attachment_type == 0) ? current_subpass_ptr->get_color_attachment_at_index(n_attachment) - : (n_attachment_type == 1) ? ¤t_subpass_ptr->depth_stencil_attachment - : current_subpass_ptr->get_resolved_attachment_at_index(n_attachment); - - if (std::find(unique_attachments.begin(), - unique_attachments.end(), - current_attachment_ptr) == unique_attachments.end() ) - { - unique_attachments.push_back(current_attachment_ptr); - } + continue; } + + *current_subpass_attachment_data.attachment_reference_ptr_ptr = reinterpret_cast(reinterpret_cast(*current_subpass_attachment_data.attachment_reference_ptr_ptr) + reinterpret_cast(&subpass_attachment_reference_vec.at(0) )); } - /* Clean subpass's preserved attachments vector along the way.. */ - current_subpass_ptr->preserved_attachments.clear(); - } + if (n_current_subpass_preserve_attachments > 0) + { + current_subpass_ptr->pPreserveAttachments = reinterpret_cast(reinterpret_cast(current_subpass_ptr->pPreserveAttachments) + reinterpret_cast(&subpass_preserve_attachment_vec.at(0) )); + } - /* Determine what the index of the subpass which uses each unique attachment for the first, and for the last time, is. */ - for (std::vector::iterator unique_attachment_iterator = unique_attachments.begin(); - unique_attachment_iterator != unique_attachments.end(); - ++unique_attachment_iterator) - { - uint32_t current_subpass_index = 0; - const SubPassAttachment* current_unique_attachment_ptr = *unique_attachment_iterator; - uint32_t lowest_subpass_index = static_cast(m_subpasses.size() - 1); - uint32_t highest_subpass_index = 0; - - for (auto subpass_iterator = m_subpasses.begin(); - subpass_iterator != m_subpasses.end(); - ++subpass_iterator, ++current_subpass_index) + if (current_subpass_uses_ds_resolve_attachment) { - SubPass* current_subpass_ptr = *subpass_iterator; - bool subpass_processed = false; + VkSubpassDescriptionDepthStencilResolveKHR* ds_resolve_struct_ptr = nullptr; + const auto n_structs = current_subpass_chainer.get_n_structs(); - for (uint32_t n_attachment_type = 0; - n_attachment_type < 3 /* color, depth+stencil, resolve */ && !subpass_processed; - ++n_attachment_type) + for (uint32_t n_struct = 1; + n_struct < n_structs; + ++n_struct) { - const uint32_t n_attachments = (n_attachment_type == 0) ? static_cast(current_subpass_ptr->color_attachments_map.size() ) - : (n_attachment_type == 1) ? ((current_subpass_ptr->depth_stencil_attachment.attachment_ptr != nullptr) ? 1 : 0) - : static_cast(current_subpass_ptr->resolved_attachments_map.size() ); + auto struct_header_ptr = current_subpass_chainer.get_struct_at_index(n_struct); + anvil_assert(struct_header_ptr != nullptr); - for (uint32_t n_attachment = 0; - n_attachment < n_attachments && !subpass_processed; - ++n_attachment) + if (struct_header_ptr->type == VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR) { - SubPassAttachment* current_attachment_ptr = (n_attachment_type == 0) ? current_subpass_ptr->get_color_attachment_at_index(n_attachment) - : (n_attachment_type == 1) ? ¤t_subpass_ptr->depth_stencil_attachment - : current_subpass_ptr->get_resolved_attachment_at_index(n_attachment); - - if (current_attachment_ptr == current_unique_attachment_ptr) - { - if (lowest_subpass_index > current_subpass_index) - { - lowest_subpass_index = current_subpass_index; - } + ds_resolve_struct_ptr = reinterpret_cast(struct_header_ptr); - highest_subpass_index = current_subpass_index; - subpass_processed = true; - } - } - } - } + break; + }} - (*unique_attachment_iterator)->highest_subpass_index = highest_subpass_index; - (*unique_attachment_iterator)->lowest_subpass_index = lowest_subpass_index; - } - /* For each unique attachment, add it to the list of preserved attachments for all subpasses that precede the - * one at the highest subpass index and follow the one at the lowest subpass index, as long as the - * attachment is not used by a subpass. */ - for (std::vector::iterator unique_attachment_iterator = unique_attachments.begin(); - unique_attachment_iterator != unique_attachments.end(); - ++unique_attachment_iterator) - { - const SubPassAttachment* current_unique_attachment_ptr = *unique_attachment_iterator; + anvil_assert(ds_resolve_struct_ptr != nullptr); - if ((*unique_attachment_iterator)->highest_subpass_index == (*unique_attachment_iterator)->lowest_subpass_index) - { - /* There is only one producer subpass and no consumer subpasses defined for this renderpass. - *No need to create any preserve attachments. */ - continue; + ds_resolve_struct_ptr->pDepthStencilResolveAttachment = reinterpret_cast(reinterpret_cast(ds_resolve_struct_ptr->pDepthStencilResolveAttachment) + reinterpret_cast(&subpass_attachment_reference_vec.at(0) )); } + } - for (auto subpass_iterator = (m_subpasses.begin() + static_cast(current_unique_attachment_ptr->lowest_subpass_index) ); - subpass_iterator != (m_subpasses.begin() + static_cast(current_unique_attachment_ptr->highest_subpass_index) ) + 1; - ++subpass_iterator) - { - SubPass* current_subpass_ptr = *subpass_iterator; - bool is_subpass_attachment = false; - - for (uint32_t n_attachment_type = 0; - n_attachment_type < 3 /* color, depth+stencil, resolve */ && !is_subpass_attachment; - ++n_attachment_type) - { - const uint32_t n_attachments = (n_attachment_type == 0) ? static_cast(current_subpass_ptr->color_attachments_map.size() ) - : (n_attachment_type == 1) ? ((current_subpass_ptr->depth_stencil_attachment.attachment_ptr != nullptr) ? 1 : 0) - : static_cast(current_subpass_ptr->resolved_attachments_map.size() ); - - for (uint32_t n_attachment = 0; - n_attachment < n_attachments && !is_subpass_attachment; - ++n_attachment) - { - SubPassAttachment* current_attachment_ptr = (n_attachment_type == 0) ? current_subpass_ptr->get_color_attachment_at_index(n_attachment) - : (n_attachment_type == 1) ? ¤t_subpass_ptr->depth_stencil_attachment - : current_subpass_ptr->get_resolved_attachment_at_index(n_attachment); + /* Create subpass struct chains. */ + for (uint32_t n_subpass_chainer = 0; + n_subpass_chainer < static_cast(subpass_chainer_vec.size() ); + ++n_subpass_chainer) + { + const auto& current_subpass_chainer = subpass_chainer_vec.at (n_subpass_chainer); + auto subpass_chain_ptr = current_subpass_chainer.create_chain(); - if (current_attachment_ptr == current_unique_attachment_ptr) - { - is_subpass_attachment = true; - } - } - } + anvil_assert(subpass_chain_ptr != nullptr); - if (!is_subpass_attachment) - { - #ifdef _DEBUG - { - bool is_already_preserved = false; - const uint32_t n_preserved_attachments = static_cast(current_subpass_ptr->preserved_attachments.size() ); + subpass_chain_vec.append_struct_chain(std::move(subpass_chain_ptr) ); + } - for (uint32_t n_preserved_attachment = 0; - n_preserved_attachment < n_preserved_attachments; - ++n_preserved_attachment) - { - if (¤t_subpass_ptr->preserved_attachments[n_preserved_attachment] == current_unique_attachment_ptr) - { - is_already_preserved = true; + /* Chain the root struct. */ + { + VkRenderPassCreateInfo2KHR create_info; + const uint32_t* multiview_correlated_masks = nullptr; + uint32_t n_multiview_correlated_masks = 0; - break; - } - } + if (m_render_pass_create_info_ptr->is_multiview_enabled() ) + { + m_render_pass_create_info_ptr->get_multiview_correlation_masks(&n_multiview_correlated_masks, + &multiview_correlated_masks); + } - anvil_assert(!is_already_preserved); - } - #endif + create_info.attachmentCount = n_rp_attachments; + create_info.correlatedViewMaskCount = n_multiview_correlated_masks; + create_info.dependencyCount = n_subpass_deps; + create_info.flags = 0; + create_info.pAttachments = (n_rp_attachments > 0) ? &rp_attachment_vec.at(0) + : nullptr; + create_info.pCorrelatedViewMasks = multiview_correlated_masks; + create_info.pDependencies = (n_subpass_deps > 0) ? &subpass_dep_vec.at(0) + : nullptr; + create_info.pNext = nullptr; + create_info.pSubpasses = (n_subpasses > 0) ? subpass_chain_vec.get_root_structs() + : nullptr; + create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR; + create_info.subpassCount = m_render_pass_create_info_ptr->get_n_subpasses(); - current_subpass_ptr->preserved_attachments.push_back(*current_unique_attachment_ptr); - } - } + struct_chainer.append_struct(create_info); } -} -/** Please see header for specification */ -Anvil::RenderPass::SubPass::~SubPass() -{ - if (pipeline_id != UINT32_MAX) + /* All done! Spawn the final chain and try to create the renderpass. */ { - std::shared_ptr device_locked_ptr (device_ptr); - std::weak_ptr gfx_pipeline_manager_ptr(device_locked_ptr->get_graphics_pipeline_manager() ); + auto create_info_chain_ptr = struct_chainer.create_chain(); + VkResult result_vk = VK_ERROR_DEVICE_LOST; - /* If this subpass is being automatically destroyed as a part of gfx pipeline manager destruction, the weak pointer - * will have become expired. Since pipelines are automatically destroyed as well, only delete the pipeline, if the - * manager is still around. */ - if (!gfx_pipeline_manager_ptr.expired() ) + result_vk = crp2_entrypoints.vkCreateRenderPass2KHR(m_device_ptr->get_device_vk (), + create_info_chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_render_pass); + + if (!is_vk_call_successful(result_vk) ) { - gfx_pipeline_manager_ptr.lock()->delete_pipeline(pipeline_id); + anvil_assert_vk_call_succeeded(result_vk); + + goto end; } } + + set_vk_handle(m_render_pass); + result = true; +end: + return result; } diff --git a/src/wrappers/rendering_surface.cpp b/src/wrappers/rendering_surface.cpp index 89148e3a..09642aca 100644 --- a/src/wrappers/rendering_surface.cpp +++ b/src/wrappers/rendering_surface.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -22,48 +22,52 @@ #include "misc/debug.h" #include "misc/object_tracker.h" +#include "misc/rendering_surface_create_info.h" #include "misc/window.h" +#include "misc/window_generic.h" #include "wrappers/instance.h" #include "wrappers/physical_device.h" #include "wrappers/pipeline_layout.h" #include "wrappers/rendering_surface.h" #include "wrappers/queue.h" +#include "config.h" + +#undef max /* Please see header for specification */ -Anvil::RenderingSurface::RenderingSurface(std::weak_ptr in_instance_ptr, - std::weak_ptr in_device_ptr, - std::shared_ptr in_window_ptr, - bool* out_safe_to_use_ptr) - :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT), - m_device_ptr (in_device_ptr), +Anvil::RenderingSurface::RenderingSurface(Anvil::RenderingSurfaceCreateInfoUniquePtr in_create_info_ptr) + :DebugMarkerSupportProvider(in_create_info_ptr->get_device_ptr(), + Anvil::ObjectType::RENDERING_SURFACE), + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety (), + in_create_info_ptr->get_device_ptr() )), m_height (0), - m_instance_ptr (in_instance_ptr), m_surface (VK_NULL_HANDLE), - m_type (RENDERING_SURFACE_TYPE_GENERAL), - m_width (0), - m_window_ptr (in_window_ptr) + m_width (0) { - *out_safe_to_use_ptr = init(); + m_create_info_ptr = std::move(in_create_info_ptr); /* Register the instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_RENDERING_SURFACE, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::RENDERING_SURFACE, this); } /* Please see header for specification */ Anvil::RenderingSurface::~RenderingSurface() { - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_RENDERING_SURFACE, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::RENDERING_SURFACE, this); if (m_surface != VK_NULL_HANDLE) { - std::shared_ptr instance_locked_ptr(m_instance_ptr); + lock(); + { + auto instance_ptr = m_create_info_ptr->get_instance_ptr(); - m_instance_ptr->get_extension_khr_surface_entrypoints().vkDestroySurfaceKHR(instance_locked_ptr->get_instance_vk(), - m_surface, - nullptr /* pAllocator */); + instance_ptr->get_extension_khr_surface_entrypoints().vkDestroySurfaceKHR(instance_ptr->get_instance_vk(), + m_surface, + nullptr /* pAllocator */); + } + unlock(); m_surface = VK_NULL_HANDLE; } @@ -72,26 +76,26 @@ Anvil::RenderingSurface::~RenderingSurface() /* Please see header for specification */ void Anvil::RenderingSurface::cache_surface_properties() { - std::shared_ptr device_locked_ptr (m_device_ptr); - auto khr_surface_entrypoints (m_instance_ptr->get_extension_khr_surface_entrypoints() ); - bool is_offscreen_rendering_enabled(true); - VkPhysicalDevice physical_device_vk (VK_NULL_HANDLE); - std::shared_ptr sgpu_device_locked_ptr (std::dynamic_pointer_cast(device_locked_ptr)); - std::vector supported_formats; - - if (m_window_ptr.lock() ) + const Anvil::DeviceType& device_type (m_device_ptr->get_type() ); + bool is_offscreen_rendering_enabled(true); + auto khr_surface_entrypoints (m_create_info_ptr->get_instance_ptr()->get_extension_khr_surface_entrypoints() ); + const Anvil::MGPUDevice* mgpu_device_ptr (dynamic_cast(m_device_ptr)); + uint32_t n_physical_devices (0); + const Anvil::SGPUDevice* sgpu_device_ptr (dynamic_cast(m_device_ptr)); + std::vector supported_formats; + auto window_ptr (m_create_info_ptr->get_window_ptr() ); + + if (window_ptr != nullptr) { - std::shared_ptr window_locked_ptr(m_window_ptr); - const WindowPlatform window_platform (window_locked_ptr->get_platform() ); + const WindowPlatform window_platform(window_ptr->get_platform() ); is_offscreen_rendering_enabled = (window_platform == WINDOW_PLATFORM_DUMMY || - window_platform == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS) && - (m_type == RENDERING_SURFACE_TYPE_GENERAL); + window_platform == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS); if (is_offscreen_rendering_enabled) { - m_height = window_locked_ptr->get_height_at_creation_time(); - m_width = window_locked_ptr->get_width_at_creation_time (); + m_height = window_ptr->get_height_at_creation_time(); + m_width = window_ptr->get_width_at_creation_time (); } else { @@ -103,127 +107,165 @@ void Anvil::RenderingSurface::cache_surface_properties() /* In this case, width & height may change at run-time */ } - /* Retrieve general properties */ - uint32_t n_supported_formats (0); - uint32_t n_supported_presentation_modes(0); - VkResult result (VK_ERROR_INITIALIZATION_FAILED); + switch (device_type) + { + case Anvil::DeviceType::MULTI_GPU: n_physical_devices = mgpu_device_ptr->get_n_physical_devices(); break; + case Anvil::DeviceType::SINGLE_GPU: n_physical_devices = 1; break; - ANVIL_REDUNDANT_VARIABLE(result); + default: + { + anvil_assert_fail(); + } + } - std::shared_ptr physical_device_locked_ptr(sgpu_device_locked_ptr->get_physical_device()); + /* Retrieve general properties */ + uint32_t n_supported_formats (0); + uint32_t n_supported_presentation_modes(0); + VkResult result (VK_ERROR_INITIALIZATION_FAILED); - auto& result_caps = m_physical_device_capabilities[0]; + ANVIL_REDUNDANT_VARIABLE(result); - if (is_offscreen_rendering_enabled) + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_devices; + ++n_physical_device) { - result_caps.supported_composite_alpha_flags = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR; - result_caps.supported_transformations = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR; - result_caps.supported_usages = static_cast (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | - VK_IMAGE_USAGE_TRANSFER_SRC_BIT | - VK_IMAGE_USAGE_TRANSFER_DST_BIT | - VK_IMAGE_USAGE_STORAGE_BIT); - - goto end; - } + const Anvil::PhysicalDevice* physical_device_ptr = nullptr; - physical_device_vk = physical_device_locked_ptr->get_physical_device(); + switch (device_type) + { + case Anvil::DeviceType::MULTI_GPU: physical_device_ptr = mgpu_device_ptr->get_physical_device(n_physical_device); break; + case Anvil::DeviceType::SINGLE_GPU: physical_device_ptr = sgpu_device_ptr->get_physical_device(); break; - result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device_vk, - m_surface, - &result_caps.capabilities); + default: + { + anvil_assert_fail(); + } + } - anvil_assert_vk_call_succeeded(result); + auto& result_caps = m_physical_device_capabilities[physical_device_ptr->get_device_group_device_index()]; - m_height = result_caps.capabilities.currentExtent.height; - m_width = result_caps.capabilities.currentExtent.width; + if (m_surface == VK_NULL_HANDLE) + { + result_caps.supported_composite_alpha_flags = Anvil::CompositeAlphaFlagBits::INHERIT_BIT_KHR; + result_caps.supported_transformations = Anvil::SurfaceTransformFlagBits::INHERIT_BIT_KHR; + result_caps.supported_usages = static_cast (Anvil::ImageUsageFlagBits::COLOR_ATTACHMENT_BIT | + Anvil::ImageUsageFlagBits::TRANSFER_SRC_BIT | + Anvil::ImageUsageFlagBits::TRANSFER_DST_BIT | + Anvil::ImageUsageFlagBits::STORAGE_BIT); - result_caps.supported_composite_alpha_flags = static_cast (result_caps.capabilities.supportedCompositeAlpha); - result_caps.supported_transformations = static_cast(result_caps.capabilities.supportedTransforms); - result_caps.supported_usages = static_cast (result_caps.capabilities.supportedUsageFlags); + result_caps.supported_presentation_modes.push_back(Anvil::PresentModeKHR::IMMEDIATE_KHR); - /* Retrieve a list of formats supported by the surface */ - result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device_vk, - m_surface, - &n_supported_formats, - nullptr /* pSurfaceFormats */); + continue; + } - anvil_assert (n_supported_formats > 0); - anvil_assert_vk_call_succeeded(result); + const VkPhysicalDevice physical_device_vk = physical_device_ptr->get_physical_device(); - supported_formats.resize(n_supported_formats); + result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device_vk, + m_surface, + reinterpret_cast(&result_caps.capabilities) ); - result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device_vk, - m_surface, - &n_supported_formats, - &supported_formats.at(0) ); - anvil_assert_vk_call_succeeded(result); + anvil_assert_vk_call_succeeded(result); - for (unsigned int n_format = 0; - n_format < n_supported_formats; - ++n_format) - { - result_caps.supported_formats.push_back(RenderingSurfaceFormat(supported_formats[n_format]) ); - } + if (n_physical_device == 0) + { + m_height = result_caps.capabilities.current_extent.height; + m_width = result_caps.capabilities.current_extent.width; + } + else + { + anvil_assert(m_height == result_caps.capabilities.current_extent.height); + anvil_assert(m_width == result_caps.capabilities.current_extent.width); + } - /* Retrieve a list of supported presentation modes */ - result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device_vk, - m_surface, - &n_supported_presentation_modes, - nullptr /* pPresentModes */); + result_caps.supported_composite_alpha_flags = result_caps.capabilities.supported_composite_alpha; + result_caps.supported_transformations = result_caps.capabilities.supported_transforms; + result_caps.supported_usages = result_caps.capabilities.supported_usage_flags; - anvil_assert (n_supported_presentation_modes > 0); - anvil_assert_vk_call_succeeded(result); + /* Retrieve a list of formats supported by the surface */ + result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device_vk, + m_surface, + &n_supported_formats, + nullptr /* pSurfaceFormats */); - result_caps.supported_presentation_modes.resize(n_supported_presentation_modes); + anvil_assert (n_supported_formats > 0); + anvil_assert_vk_call_succeeded(result); - result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device_vk, - m_surface, - &n_supported_presentation_modes, - &result_caps.supported_presentation_modes[0]); - anvil_assert_vk_call_succeeded(result); + supported_formats.resize(n_supported_formats); - if (n_supported_presentation_modes > 0) - { - result_caps.supported_presentation_modes.resize(n_supported_presentation_modes); + result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device_vk, + m_surface, + &n_supported_formats, + reinterpret_cast(&supported_formats.at(0) )); + anvil_assert_vk_call_succeeded(result); + for (unsigned int n_format = 0; + n_format < n_supported_formats; + ++n_format) + { + result_caps.supported_formats.push_back(RenderingSurfaceFormat(supported_formats[n_format]) ); + } + + /* Retrieve a list of supported presentation modes + * + * NOTE: In case of mGPU devices, n_supported_presentation_modes may actually be 0 here for slave devices. + */ result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device_vk, m_surface, &n_supported_presentation_modes, - &result_caps.supported_presentation_modes.at(0)); + nullptr /* pPresentModes */); + anvil_assert_vk_call_succeeded(result); - } -end: - ; + if (n_supported_presentation_modes > 0) + { + std::vector temp_storage(n_supported_presentation_modes); + + result_caps.supported_presentation_modes.resize(n_supported_presentation_modes); + + result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device_vk, + m_surface, + &n_supported_presentation_modes, + &temp_storage.at(0) ); + anvil_assert_vk_call_succeeded(result); + + for (uint32_t n_presentation_mode = 0; + n_presentation_mode < static_cast(temp_storage.size() ); + ++n_presentation_mode) + { + result_caps.supported_presentation_modes.at(n_presentation_mode) = static_cast(temp_storage.at(n_presentation_mode) ); + } + } + } } /* Please see header for specification */ -std::shared_ptr Anvil::RenderingSurface::create(std::weak_ptr in_instance_ptr, - std::weak_ptr in_device_ptr, - std::shared_ptr in_window_ptr) +Anvil::RenderingSurfaceUniquePtr Anvil::RenderingSurface::create(Anvil::RenderingSurfaceCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr result_ptr; - bool success = false; + RenderingSurfaceUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( - new Anvil::RenderingSurface(in_instance_ptr, - in_device_ptr, - in_window_ptr, - &success) ); + new Anvil::RenderingSurface( + std::move(in_create_info_ptr) + ) + ); - if (!success) + if (result_ptr != nullptr) { - result_ptr.reset(); + if (!result_ptr->init() ) + { + result_ptr.reset(); + } } return result_ptr; } /* Please see header for specification */ -bool Anvil::RenderingSurface::get_capabilities(std::weak_ptr in_physical_device_ptr, - VkSurfaceCapabilitiesKHR* out_surface_caps_ptr) const +bool Anvil::RenderingSurface::get_capabilities(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::SurfaceCapabilities* out_surface_caps_ptr) const { - auto caps_iterator = m_physical_device_capabilities.find(0); + auto caps_iterator = m_physical_device_capabilities.find(in_physical_device_ptr->get_device_group_device_index()); bool result = false; if (caps_iterator != m_physical_device_capabilities.end() ) @@ -237,10 +279,10 @@ bool Anvil::RenderingSurface::get_capabilities(std::weak_ptr in_physical_device_ptr, - const std::vector** out_result_ptr) const +bool Anvil::RenderingSurface::get_queue_families_with_present_support(const Anvil::PhysicalDevice* in_physical_device_ptr, + const std::vector** out_result_ptr) const { - auto caps_iterator = m_physical_device_capabilities.find(0); + auto caps_iterator = m_physical_device_capabilities.find(in_physical_device_ptr->get_device_group_device_index()); bool result = false; if (caps_iterator != m_physical_device_capabilities.end() ) @@ -254,10 +296,10 @@ bool Anvil::RenderingSurface::get_queue_families_with_present_support(std::weak_ } /* Please see header for specification */ -bool Anvil::RenderingSurface::get_supported_composite_alpha_flags(std::weak_ptr in_physical_device_ptr, - VkCompositeAlphaFlagsKHR* out_result_ptr) const +bool Anvil::RenderingSurface::get_supported_composite_alpha_flags(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::CompositeAlphaFlags* out_result_ptr) const { - auto caps_iterator = m_physical_device_capabilities.find(0); + auto caps_iterator = m_physical_device_capabilities.find(in_physical_device_ptr->get_device_group_device_index()); bool result = false; if (caps_iterator != m_physical_device_capabilities.end() ) @@ -271,10 +313,10 @@ bool Anvil::RenderingSurface::get_supported_composite_alpha_flags(std::weak_ptr< } /* Please see header for specification */ -bool Anvil::RenderingSurface::get_supported_transformations(std::weak_ptr in_physical_device_ptr, - VkSurfaceTransformFlagsKHR* out_result_ptr) const +bool Anvil::RenderingSurface::get_supported_transformations(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::SurfaceTransformFlags* out_result_ptr) const { - auto caps_iterator = m_physical_device_capabilities.find(0); + auto caps_iterator = m_physical_device_capabilities.find(in_physical_device_ptr->get_device_group_device_index()); bool result = false; if (caps_iterator != m_physical_device_capabilities.end() ) @@ -288,10 +330,10 @@ bool Anvil::RenderingSurface::get_supported_transformations(std::weak_ptr in_physical_device_ptr, - VkImageUsageFlags* out_result_ptr) const +bool Anvil::RenderingSurface::get_supported_usages(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::ImageUsageFlags* out_result_ptr) const { - auto caps_iterator = m_physical_device_capabilities.find(0); + auto caps_iterator = m_physical_device_capabilities.find(in_physical_device_ptr->get_device_group_device_index()); bool result = false; if (caps_iterator != m_physical_device_capabilities.end() ) @@ -307,76 +349,157 @@ bool Anvil::RenderingSurface::get_supported_usages(std::weak_ptr device_locked_ptr (m_device_ptr); - bool init_successful (false); - uint32_t n_physical_devices (1); - VkResult result (VK_SUCCESS); - const WindowPlatform window_platform ((m_type == RENDERING_SURFACE_TYPE_GENERAL) ? m_window_ptr.lock()->get_platform() - : WINDOW_PLATFORM_UNKNOWN); + const Anvil::DeviceType& device_type (m_device_ptr->get_type() ); + bool init_successful (false); + auto instance_ptr (m_create_info_ptr->get_instance_ptr() ); + uint32_t n_physical_devices(0); + VkResult result (VK_SUCCESS); + WindowPlatform window_platform = WindowPlatform::WINDOW_PLATFORM_DUMMY; + auto *windowPtr = m_create_info_ptr->get_window_ptr(); + if (windowPtr) + window_platform = (windowPtr->get_platform()); - const bool is_dummy_window_platform(window_platform == WINDOW_PLATFORM_DUMMY || - window_platform == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS); + const bool is_dummy_window_platform(window_platform == WINDOW_PLATFORM_DUMMY || + window_platform == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS); - for (uint32_t n_physical_device = 0; - n_physical_device < n_physical_devices; - ++n_physical_device) + + switch (device_type) { - m_physical_device_capabilities[n_physical_device] = PhysicalDeviceCapabilities(); + case Anvil::DeviceType::MULTI_GPU: + { + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr) ); + + n_physical_devices = mgpu_device_ptr->get_n_physical_devices(); + + break; + } + + case Anvil::DeviceType::SINGLE_GPU: + { + n_physical_devices = 1; + + break; + } + + default: + { + anvil_assert_fail(); + + goto end; + } } if (!is_dummy_window_platform) { - std::shared_ptr instance_locked_ptr(m_instance_ptr); + auto window_ptr = m_create_info_ptr->get_window_ptr(); + + auto *generic_ptr = dynamic_cast(window_ptr); + if(generic_ptr != nullptr) + { + auto type = generic_ptr->get_type(); + auto handled = false; +#ifdef ANVIL_INCLUDE_HEADLESS_WINDOW_SYSTEM_SUPPORT + if (type == Anvil::WindowGeneric::Type::Windowless) { + auto vkInstance = instance_ptr->get_instance_vk(); + auto &entrypoints = instance_ptr->get_extension_ext_headless_surface_entrypoints(); + VkHeadlessSurfaceCreateInfoEXT surface_create_info; + surface_create_info.flags = 0; + surface_create_info.pNext = nullptr; + surface_create_info.sType = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT; + result = entrypoints.vkCreateHeadlessSurfaceEXT(vkInstance, &surface_create_info, nullptr, &m_surface); + handled = true; + } +#endif +#ifdef _WIN32 + if (!handled) { + if(type != Anvil::WindowGeneric::Type::Win32) + result = VK_ERROR_INITIALIZATION_FAILED; + else { + VkWin32SurfaceCreateInfoKHR surface_create_info; + + surface_create_info.flags = 0; + surface_create_info.hinstance = GetModuleHandle(nullptr); + surface_create_info.hwnd = static_cast(generic_ptr->get_generic_handle().win32Window); + surface_create_info.pNext = nullptr; + surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR; + + result = instance_ptr->get_extension_khr_win32_surface_entrypoints().vkCreateWin32SurfaceKHR(instance_ptr->get_instance_vk(), &surface_create_info, nullptr, /* pAllocator */ + &m_surface); + } + } +#else + if (!handled) { + if(type == Anvil::WindowGeneric::Type::Xcb) { + VkXcbSurfaceCreateInfoKHR surface_create_info; + + surface_create_info.flags = 0; + surface_create_info.window = generic_ptr->get_generic_handle().xcbWindow; + surface_create_info.connection = static_cast(window_ptr->get_connection()); + surface_create_info.pNext = nullptr; + surface_create_info.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR; + + result = instance_ptr->get_extension_khr_xcb_surface_entrypoints().vkCreateXcbSurfaceKHR(instance_ptr->get_instance_vk(), &surface_create_info, nullptr, /* pAllocator */ + &m_surface); + } + else if(type == Anvil::WindowGeneric::Type::Wayland) { + auto vkInstance = instance_ptr->get_instance_vk(); + auto &entrypoints = instance_ptr->get_extension_khr_wayland_surface_entrypoints(); + VkWaylandSurfaceCreateInfoKHR surface_create_info; + surface_create_info.flags = 0; + surface_create_info.display = reinterpret_cast(window_ptr->get_connection()); + surface_create_info.surface = reinterpret_cast(generic_ptr->get_generic_handle().waylandWindow); + surface_create_info.pNext = nullptr; + surface_create_info.sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR; + result = entrypoints.vkCreateWaylandSurfaceKHR(vkInstance, &surface_create_info, nullptr, /* pAllocator */ + &m_surface); + } + else + result = VK_ERROR_INITIALIZATION_FAILED; + } +#endif + } + else + { + #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) && defined(_WIN32) + { + VkWin32SurfaceCreateInfoKHR surface_create_info; + + surface_create_info.flags = 0; + surface_create_info.hinstance = GetModuleHandle(nullptr); + surface_create_info.hwnd = window_ptr->get_handle(); + surface_create_info.pNext = nullptr; + surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR; + + result = instance_ptr->get_extension_khr_win32_surface_entrypoints().vkCreateWin32SurfaceKHR(instance_ptr->get_instance_vk(), + &surface_create_info, + nullptr, /* pAllocator */ + &m_surface); + } + #endif + #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) && !defined(_WIN32) + { + VkXcbSurfaceCreateInfoKHR surface_create_info; + + surface_create_info.flags = 0; + surface_create_info.window = window_ptr->get_handle(); + surface_create_info.connection = static_cast(window_ptr->get_connection()); + surface_create_info.pNext = nullptr; + surface_create_info.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR; + + result = instance_ptr->get_extension_khr_xcb_surface_entrypoints().vkCreateXcbSurfaceKHR(instance_ptr->get_instance_vk(), + &surface_create_info, + nullptr, /* pAllocator */ + &m_surface); + } + #endif + } - #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) || defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) + anvil_assert_vk_call_succeeded(result); + if (is_vk_call_successful(result) ) { - if (m_type == RENDERING_SURFACE_TYPE_GENERAL) - { - #ifdef _WIN32 - { - VkWin32SurfaceCreateInfoKHR surface_create_info; - - surface_create_info.flags = 0; - surface_create_info.hinstance = GetModuleHandle(nullptr); - surface_create_info.hwnd = m_window_ptr.lock()->get_handle(); - surface_create_info.pNext = nullptr; - surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR; - - result = m_instance_ptr->get_extension_khr_win32_surface_entrypoints().vkCreateWin32SurfaceKHR(instance_locked_ptr->get_instance_vk(), - &surface_create_info, - nullptr, /* pAllocator */ - &m_surface); - } - #else - { - VkXcbSurfaceCreateInfoKHR surface_create_info; - - surface_create_info.flags = 0; - surface_create_info.window = m_window_ptr.lock()->get_handle(); - surface_create_info.connection = static_cast(m_window_ptr.lock()->get_connection()); - surface_create_info.pNext = nullptr; - surface_create_info.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR; - - result = m_instance_ptr->get_extension_khr_xcb_surface_entrypoints().vkCreateXcbSurfaceKHR(instance_locked_ptr->get_instance_vk(), - &surface_create_info, - nullptr, /* pAllocator */ - &m_surface); - } - #endif - } - else - { - anvil_assert_fail(); - } - - anvil_assert_vk_call_succeeded(result); - if (is_vk_call_successful(result) ) - { - set_vk_handle(m_surface); - } + set_vk_handle(m_surface); } - #endif } else { @@ -387,39 +510,65 @@ bool Anvil::RenderingSurface::init() { /* Is there at least one queue fam that can be used together with at least one physical device associated with * the logical device to present using the surface we've just spawned and the physical device user has specified? */ - const auto& queue_families (device_locked_ptr->get_physical_device_queue_families() ); - std::shared_ptr sgpu_device_locked_ptr(std::dynamic_pointer_cast(device_locked_ptr) ); - auto physical_device_locked_ptr = sgpu_device_locked_ptr->get_physical_device().lock(); - auto physical_device_caps_ptr = &m_physical_device_capabilities[0]; + const auto& queue_families(m_device_ptr->get_physical_device_queue_families() ); - switch (m_type) + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_devices; + ++n_physical_device) { - case Anvil::RENDERING_SURFACE_TYPE_GENERAL: + Anvil::RenderingSurface::PhysicalDeviceCapabilities* physical_device_caps_ptr = nullptr; + const Anvil::PhysicalDevice* physical_device_ptr = nullptr; + + switch (device_type) { - for (uint32_t n_queue_family = 0; - n_queue_family < static_cast(queue_families.size() ); - ++n_queue_family) + case Anvil::DeviceType::MULTI_GPU: { - VkBool32 is_presentation_supported = VK_FALSE; + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr) ); - result = vkGetPhysicalDeviceSurfaceSupportKHR(physical_device_locked_ptr->get_physical_device(), - n_queue_family, - m_surface, - &is_presentation_supported); + physical_device_ptr = mgpu_device_ptr->get_physical_device(n_physical_device); + physical_device_caps_ptr = &m_physical_device_capabilities[physical_device_ptr->get_device_group_device_index()]; - if (is_vk_call_successful(result) && - is_presentation_supported == VK_TRUE) - { - physical_device_caps_ptr->present_capable_queue_fams.push_back(n_queue_family); - } + break; + } + + case Anvil::DeviceType::SINGLE_GPU: + { + const Anvil::SGPUDevice* sgpu_device_ptr(dynamic_cast(m_device_ptr) ); + + physical_device_ptr = sgpu_device_ptr->get_physical_device(); + physical_device_caps_ptr = &m_physical_device_capabilities[physical_device_ptr->get_device_group_device_index()]; + + break; } - break; + default: + { + anvil_assert_fail(); + + goto end; + } } - default: + for (uint32_t n_queue_family = 0; + n_queue_family < static_cast(queue_families.size() ); + ++n_queue_family) { - anvil_assert_fail(); + VkBool32 is_presentation_supported = VK_FALSE; + + { + const auto& khr_surface_entrypoints = instance_ptr->get_extension_khr_surface_entrypoints(); + + result = khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceSupportKHR(physical_device_ptr->get_physical_device(), + n_queue_family, + m_surface, + &is_presentation_supported); + } + + if (is_vk_call_successful(result) && + is_presentation_supported == VK_TRUE) + { + physical_device_caps_ptr->present_capable_queue_fams.push_back(n_queue_family); + } } } } @@ -430,14 +579,44 @@ bool Anvil::RenderingSurface::init() n_physical_device < n_physical_devices; ++n_physical_device) { - std::shared_ptr sgpu_device_locked_ptr(std::dynamic_pointer_cast(device_locked_ptr) ); - - if (sgpu_device_locked_ptr->get_n_universal_queues() > 0) + switch (device_type) { - std::shared_ptr physical_device_locked_ptr = sgpu_device_locked_ptr->get_physical_device().lock(); - auto& result_caps = m_physical_device_capabilities[0]; + case Anvil::DeviceType::MULTI_GPU: + { + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr) ); + + if (mgpu_device_ptr->get_n_universal_queues() > 0) + { + const Anvil::PhysicalDevice* physical_device_ptr = mgpu_device_ptr->get_physical_device(n_physical_device); + auto& result_caps = m_physical_device_capabilities[physical_device_ptr->get_device_group_device_index()]; + + result_caps.present_capable_queue_fams.push_back(mgpu_device_ptr->get_universal_queue(0)->get_queue_family_index() ); + } + + break; + } + + case Anvil::DeviceType::SINGLE_GPU: + { + const Anvil::SGPUDevice* sgpu_device_ptr(dynamic_cast(m_device_ptr) ); + + if (sgpu_device_ptr->get_n_universal_queues() > 0) + { + const Anvil::PhysicalDevice* physical_device_ptr = sgpu_device_ptr->get_physical_device(); + auto& result_caps = m_physical_device_capabilities[physical_device_ptr->get_device_group_device_index()]; - result_caps.present_capable_queue_fams.push_back(sgpu_device_locked_ptr->get_universal_queue(0)->get_queue_family_index() ); + result_caps.present_capable_queue_fams.push_back(sgpu_device_ptr->get_universal_queue(0)->get_queue_family_index() ); + } + + break; + } + + default: + { + anvil_assert_fail(); + + goto end; + } } } @@ -458,15 +637,16 @@ bool Anvil::RenderingSurface::init() init_successful = true; } +end: return init_successful; } /* Please see header for specification */ -bool Anvil::RenderingSurface::is_compatible_with_image_format(std::weak_ptr in_physical_device_ptr, - VkFormat in_image_format, - bool* out_result_ptr) const +bool Anvil::RenderingSurface::is_compatible_with_image_format(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::Format in_image_format, + bool* out_result_ptr) const { - auto caps_iterator = m_physical_device_capabilities.find(0); + auto caps_iterator = m_physical_device_capabilities.find(in_physical_device_ptr->get_device_group_device_index()); bool result = false; if (caps_iterator != m_physical_device_capabilities.end() ) @@ -481,11 +661,11 @@ bool Anvil::RenderingSurface::is_compatible_with_image_format(std::weak_ptr in_physical_device_ptr, - VkPresentModeKHR in_presentation_mode, - bool* out_result_ptr) const +bool Anvil::RenderingSurface::supports_presentation_mode(const Anvil::PhysicalDevice* in_physical_device_ptr, + Anvil::PresentModeKHR in_presentation_mode, + bool* out_result_ptr) const { - auto caps_iterator = m_physical_device_capabilities.find(0); + auto caps_iterator = m_physical_device_capabilities.find(in_physical_device_ptr->get_device_group_device_index()); bool result = false; if (caps_iterator != m_physical_device_capabilities.end() ) @@ -497,4 +677,119 @@ bool Anvil::RenderingSurface::supports_presentation_mode(std::weak_ptrget_type () ); + auto instance_ptr (m_create_info_ptr->get_instance_ptr () ); + auto khr_surface_entrypoints (instance_ptr->get_extension_khr_surface_entrypoints() ); + const Anvil::MGPUDevice* mgpu_device_ptr (dynamic_cast (m_device_ptr)); + uint32_t n_physical_devices (0); + const Anvil::SGPUDevice* sgpu_device_ptr (dynamic_cast(m_device_ptr)); + auto window_ptr (m_create_info_ptr->get_window_ptr () ); + + if (window_ptr != nullptr) + { + const WindowPlatform window_platform(window_ptr->get_platform() ); + + if (window_platform == WINDOW_PLATFORM_DUMMY || + window_platform == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS) + { + /* Nothing to update - off-screen rendering is active. */ + goto end; + } + else + { + /* In this case, width & height may change at run-time */ + } + } + else + { + /* In this case, width & height may change at run-time */ + } + + switch (device_type) + { + case Anvil::DeviceType::MULTI_GPU: n_physical_devices = mgpu_device_ptr->get_n_physical_devices(); break; + case Anvil::DeviceType::SINGLE_GPU: n_physical_devices = 1; break; + + default: + { + anvil_assert_fail(); + } + } + + /* Retrieve general properties */ + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_devices; + ++n_physical_device) + { + const Anvil::PhysicalDevice* physical_device_ptr = nullptr; + VkResult result_vk; + Anvil::SurfaceCapabilities surface_caps; + + ANVIL_REDUNDANT_VARIABLE_CONST(result_vk); + + switch (device_type) + { + case Anvil::DeviceType::MULTI_GPU: physical_device_ptr = mgpu_device_ptr->get_physical_device(n_physical_device); break; + case Anvil::DeviceType::SINGLE_GPU: physical_device_ptr = sgpu_device_ptr->get_physical_device(); break; + + default: + { + anvil_assert_fail(); + } + } + + if (m_surface == VK_NULL_HANDLE) + { + /* Nothing to update */ + goto end; + } + + const VkPhysicalDevice physical_device_vk = physical_device_ptr->get_physical_device(); + + result_vk = khr_surface_entrypoints.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device_vk, + m_surface, + reinterpret_cast(&surface_caps) ); + + anvil_assert_vk_call_succeeded(result_vk); + + if (n_physical_device == 0) + { + m_height = surface_caps.current_extent.height; + m_width = surface_caps.current_extent.width; + + if(m_width == m_height && m_width == std::numeric_limits::max()) + { + // The surface is not limited to a specific size, so we'll just use the window framebuffer size + auto *generic_ptr = dynamic_cast(window_ptr); + if(generic_ptr != nullptr) + { + m_height = generic_ptr->get_framebuffer_height(); + m_width = generic_ptr->get_framebuffer_width(); + + // Clamp to max extents + if(m_height > surface_caps.max_image_extent.height) + m_height = surface_caps.max_image_extent.height; + if(m_width > surface_caps.max_image_extent.width) + m_width = surface_caps.max_image_extent.width; + } + else + { + m_height = surface_caps.min_image_extent.height; + m_width = surface_caps.min_image_extent.width; + } + } + } + else + { + anvil_assert(m_height == surface_caps.current_extent.height); + anvil_assert(m_width == surface_caps.current_extent.width); + } + } + +end: + ; +} diff --git a/src/wrappers/sampler.cpp b/src/wrappers/sampler.cpp index 69c8b85f..418f3392 100644 --- a/src/wrappers/sampler.cpp +++ b/src/wrappers/sampler.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -22,141 +22,141 @@ #include "misc/debug.h" #include "misc/object_tracker.h" +#include "misc/sampler_create_info.h" +#include "misc/struct_chainer.h" #include "wrappers/device.h" #include "wrappers/physical_device.h" #include "wrappers/sampler.h" +#include "wrappers/sampler_ycbcr_conversion.h" /** Please see header for specification */ -Anvil::Sampler::Sampler(std::weak_ptr in_device_ptr, - VkFilter in_mag_filter, - VkFilter in_min_filter, - VkSamplerMipmapMode in_mipmap_mode, - VkSamplerAddressMode in_address_mode_u, - VkSamplerAddressMode in_address_mode_v, - VkSamplerAddressMode in_address_mode_w, - float in_lod_bias, - float in_max_anisotropy, - bool in_compare_enable, - VkCompareOp in_compare_op, - float in_min_lod, - float in_max_lod, - VkBorderColor in_border_color, - bool in_use_unnormalized_coordinates) - :DebugMarkerSupportProvider (in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT), - m_address_mode_u (in_address_mode_u), - m_address_mode_v (in_address_mode_v), - m_address_mode_w (in_address_mode_w), - m_border_color (in_border_color), - m_compare_enable (in_compare_enable), - m_compare_op (in_compare_op), - m_device_ptr (in_device_ptr), - m_lod_bias (in_lod_bias), - m_mag_filter (in_mag_filter), - m_max_anisotropy (in_max_anisotropy), - m_max_lod (in_max_lod), - m_min_filter (in_min_filter), - m_mipmap_mode (in_mipmap_mode), - m_sampler (VK_NULL_HANDLE), - m_use_unnormalized_coordinates(in_use_unnormalized_coordinates) +Anvil::Sampler::Sampler(Anvil::SamplerCreateInfoUniquePtr in_create_info_ptr) + :DebugMarkerSupportProvider(in_create_info_ptr->get_device(), + Anvil::ObjectType::SAMPLER), + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )) { - std::shared_ptr device_locked_ptr (m_device_ptr); - VkSamplerCreateInfo sampler_create_info; - VkResult result (VK_ERROR_INITIALIZATION_FAILED); - - ANVIL_REDUNDANT_VARIABLE(result); - - /* Spawn a new sampler */ - sampler_create_info.addressModeU = in_address_mode_u; - sampler_create_info.addressModeV = in_address_mode_v; - sampler_create_info.addressModeW = in_address_mode_w; - sampler_create_info.anisotropyEnable = static_cast((in_max_anisotropy > 1.0f) ? VK_TRUE : VK_FALSE); - sampler_create_info.borderColor = in_border_color; - sampler_create_info.compareEnable = static_cast(in_compare_enable ? VK_TRUE : VK_FALSE); - sampler_create_info.compareOp = in_compare_op; - sampler_create_info.flags = 0; - sampler_create_info.magFilter = in_mag_filter; - sampler_create_info.maxAnisotropy = in_max_anisotropy; - sampler_create_info.maxLod = in_max_lod; - sampler_create_info.minFilter = in_min_filter; - sampler_create_info.minLod = in_min_lod; - sampler_create_info.mipLodBias = in_lod_bias; - sampler_create_info.mipmapMode = in_mipmap_mode; - sampler_create_info.pNext = nullptr; - sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; - sampler_create_info.unnormalizedCoordinates = static_cast(in_use_unnormalized_coordinates ? VK_TRUE : VK_FALSE); - - result = vkCreateSampler(device_locked_ptr->get_device_vk(), - &sampler_create_info, - nullptr, /* pAllocator */ - &m_sampler); - - anvil_assert_vk_call_succeeded(result); - if (is_vk_call_successful(result) ) - { - set_vk_handle(m_sampler); - } + m_create_info_ptr = std::move(in_create_info_ptr); /* Register the event instance */ - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_SAMPLER, - this); + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::SAMPLER, + this); } /* Please see header for specification */ Anvil::Sampler::~Sampler() { - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_SAMPLER, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::SAMPLER, this); if (m_sampler != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroySampler(device_locked_ptr->get_device_vk(), - m_sampler, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroySampler(m_device_ptr->get_device_vk(), + m_sampler, + nullptr /* pAllocator */); + } + unlock(); m_sampler = VK_NULL_HANDLE; } } /* Please see header for specification */ -std::shared_ptr Anvil::Sampler::create(std::weak_ptr in_device_ptr, - VkFilter in_mag_filter, - VkFilter in_min_filter, - VkSamplerMipmapMode in_mipmap_mode, - VkSamplerAddressMode in_address_mode_u, - VkSamplerAddressMode in_address_mode_v, - VkSamplerAddressMode in_address_mode_w, - float in_lod_bias, - float in_max_anisotropy, - bool in_compare_enable, - VkCompareOp in_compare_op, - float in_min_lod, - float in_max_lod, - VkBorderColor in_border_color, - bool in_use_unnormalized_coordinates) +Anvil::SamplerUniquePtr Anvil::Sampler::create(Anvil::SamplerCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr device_locked_ptr(in_device_ptr); - std::shared_ptr result_ptr; + SamplerUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( - new Anvil::Sampler(in_device_ptr, - in_mag_filter, - in_min_filter, - in_mipmap_mode, - in_address_mode_u, - in_address_mode_v, - in_address_mode_w, - in_lod_bias, - in_max_anisotropy, - in_compare_enable, - in_compare_op, - in_min_lod, - in_max_lod, - in_border_color, - in_use_unnormalized_coordinates) + new Anvil::Sampler(std::move(in_create_info_ptr) ) ); + if (result_ptr != nullptr) + { + if (!result_ptr->init() ) + { + result_ptr.reset(); + } + } + return result_ptr; +} + +bool Anvil::Sampler::init() +{ + VkResult result (VK_ERROR_INITIALIZATION_FAILED); + const auto sampler_reduction_mode (m_create_info_ptr->get_sampler_reduction_mode () ); + const auto& sampler_ycbcr_conversion_ptr(m_create_info_ptr->get_sampler_ycbcr_conversion_ptr() ); + Anvil::StructChainer struct_chainer; + + ANVIL_REDUNDANT_VARIABLE(result); + + /* Spawn a new sampler */ + { + const auto max_anisotropy = m_create_info_ptr->get_max_anisotropy(); + VkSamplerCreateInfo sampler_create_info; + + sampler_create_info.addressModeU = static_cast(m_create_info_ptr->get_address_mode_u() ); + sampler_create_info.addressModeV = static_cast(m_create_info_ptr->get_address_mode_v() ); + sampler_create_info.addressModeW = static_cast(m_create_info_ptr->get_address_mode_w() ); + sampler_create_info.anisotropyEnable = (max_anisotropy > 1.0f) ? VK_TRUE : VK_FALSE; + sampler_create_info.borderColor = static_cast(m_create_info_ptr->get_border_color() ); + sampler_create_info.compareEnable = m_create_info_ptr->is_compare_enabled() ? VK_TRUE : VK_FALSE; + sampler_create_info.compareOp = static_cast(m_create_info_ptr->get_compare_op() ); + sampler_create_info.flags = 0; + sampler_create_info.magFilter = static_cast(m_create_info_ptr->get_mag_filter() ); + sampler_create_info.maxAnisotropy = max_anisotropy; + sampler_create_info.maxLod = m_create_info_ptr->get_max_lod(); + sampler_create_info.minFilter = static_cast(m_create_info_ptr->get_min_filter() ); + sampler_create_info.minLod = m_create_info_ptr->get_min_lod(); + sampler_create_info.mipLodBias = m_create_info_ptr->get_lod_bias(); + sampler_create_info.mipmapMode = static_cast(m_create_info_ptr->get_mipmap_mode() ); + sampler_create_info.pNext = nullptr; + sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; + sampler_create_info.unnormalizedCoordinates = m_create_info_ptr->uses_unnormalized_coordinates() ? VK_TRUE : VK_FALSE; + + struct_chainer.append_struct(sampler_create_info); + } + + if (sampler_reduction_mode != Anvil::SamplerReductionMode::UNKNOWN) + { + VkSamplerReductionModeCreateInfoEXT srm_create_info; + + srm_create_info.pNext = nullptr; + srm_create_info.reductionMode = static_cast(sampler_reduction_mode); + srm_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT; + + struct_chainer.append_struct(srm_create_info); + } + + if (sampler_ycbcr_conversion_ptr != nullptr) + { + VkSamplerYcbcrConversionInfoKHR conversion_info; + + conversion_info.conversion = sampler_ycbcr_conversion_ptr->get_sampler_ycbcr_conversion_vk(); + conversion_info.pNext = nullptr; + conversion_info.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR; + + struct_chainer.append_struct(conversion_info); + } + + { + auto chain_ptr = struct_chainer.create_chain(); + + result = Anvil::Vulkan::vkCreateSampler(m_device_ptr->get_device_vk(), + chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_sampler); + } + + anvil_assert_vk_call_succeeded(result); + if (is_vk_call_successful(result) ) + { + set_vk_handle(m_sampler); + } + + /* All done */ + return is_vk_call_successful(result); } \ No newline at end of file diff --git a/src/wrappers/sampler_ycbcr_conversion.cpp b/src/wrappers/sampler_ycbcr_conversion.cpp new file mode 100644 index 00000000..5c1e51f5 --- /dev/null +++ b/src/wrappers/sampler_ycbcr_conversion.cpp @@ -0,0 +1,105 @@ +// +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#include "misc/sampler_ycbcr_conversion_create_info.h" +#include "wrappers/device.h" +#include "wrappers/sampler_ycbcr_conversion.h" + +Anvil::SamplerYCbCrConversion::SamplerYCbCrConversion(Anvil::SamplerYCbCrConversionCreateInfoUniquePtr in_create_info_ptr) + :Anvil::DebugMarkerSupportProvider(in_create_info_ptr->get_device(), + Anvil::ObjectType::SAMPLER_YCBCR_CONVERSION), + Anvil::MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )), + m_sampler_ycbcr_conversion_vk (VK_NULL_HANDLE) +{ + m_create_info_ptr = std::move(in_create_info_ptr); +} + +Anvil::SamplerYCbCrConversion::~SamplerYCbCrConversion() +{ + if (m_sampler_ycbcr_conversion_vk != VK_NULL_HANDLE) + { + auto device_ptr = m_create_info_ptr->get_device (); + auto& entrypoints = device_ptr->get_extension_khr_sampler_ycbcr_conversion_entrypoints(); + + lock(); + { + entrypoints.vkDestroySamplerYcbcrConversionKHR(device_ptr->get_device_vk(), + m_sampler_ycbcr_conversion_vk, + nullptr); /* pAllocator */ + } + unlock(); + + m_sampler_ycbcr_conversion_vk = VK_NULL_HANDLE; + } +} + +Anvil::SamplerYCbCrConversionUniquePtr Anvil::SamplerYCbCrConversion::create(Anvil::SamplerYCbCrConversionCreateInfoUniquePtr in_create_info_ptr) +{ + Anvil::SamplerYCbCrConversionUniquePtr result_ptr(nullptr, + std::default_delete() ); + + result_ptr.reset( + new Anvil::SamplerYCbCrConversion(std::move(in_create_info_ptr) ) + ); + + anvil_assert(result_ptr != nullptr); + if (result_ptr != nullptr) + { + if (!result_ptr->init() ) + { + result_ptr.reset(); + } + } + + return result_ptr; +} + +bool Anvil::SamplerYCbCrConversion::init() +{ + VkSamplerYcbcrConversionCreateInfoKHR create_info; + const auto& entrypoints = m_create_info_ptr->get_device()->get_extension_khr_sampler_ycbcr_conversion_entrypoints(); + bool result = false; + + create_info.chromaFilter = static_cast(m_create_info_ptr->get_chroma_filter() ); + create_info.components = m_create_info_ptr->get_components().get_vk (); + create_info.forceExplicitReconstruction = m_create_info_ptr->should_force_explicit_reconstruction (); + create_info.format = static_cast(m_create_info_ptr->get_format () ); + create_info.pNext = nullptr; + create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR; + create_info.xChromaOffset = static_cast (m_create_info_ptr->get_x_chroma_offset () ); + create_info.ycbcrModel = static_cast(m_create_info_ptr->get_ycbcr_model_conversion() ); + create_info.ycbcrRange = static_cast (m_create_info_ptr->get_ycbcr_range () ); + create_info.yChromaOffset = static_cast (m_create_info_ptr->get_y_chroma_offset () ); + + if (is_vk_call_successful(entrypoints.vkCreateSamplerYcbcrConversionKHR(m_device_ptr->get_device_vk(), + &create_info, + nullptr, /* pAllocator */ + &m_sampler_ycbcr_conversion_vk) )) + { + result = true; + + set_vk_handle(m_sampler_ycbcr_conversion_vk); + } + + return result; +} diff --git a/src/wrappers/semaphore.cpp b/src/wrappers/semaphore.cpp index 0e5095d0..fb29e040 100644 --- a/src/wrappers/semaphore.cpp +++ b/src/wrappers/semaphore.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,54 +21,263 @@ // #include "misc/debug.h" +#include "misc/external_handle.h" #include "misc/object_tracker.h" +#include "misc/semaphore_create_info.h" +#include "misc/struct_chainer.h" #include "wrappers/device.h" #include "wrappers/semaphore.h" + /* Please see header for specification */ -Anvil::Semaphore::Semaphore(std::weak_ptr in_device_ptr) - :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT), - m_device_ptr (in_device_ptr), +Anvil::Semaphore::Semaphore(Anvil::SemaphoreCreateInfoUniquePtr in_create_info_ptr) + :DebugMarkerSupportProvider(in_create_info_ptr->get_device(), + Anvil::ObjectType::SEMAPHORE), + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )), m_semaphore (VK_NULL_HANDLE) { - reset(); + m_create_info_ptr = std::move(in_create_info_ptr); - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_SEMAPHORE, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::SEMAPHORE, this); } /* Please see header for specification */ Anvil::Semaphore::~Semaphore() { - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_SEMAPHORE, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::SEMAPHORE, this); release_semaphore(); } /** Please see header for specification */ -std::shared_ptr Anvil::Semaphore::create(std::weak_ptr in_device_ptr) +Anvil::SemaphoreUniquePtr Anvil::Semaphore::create(Anvil::SemaphoreCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr result_ptr; + SemaphoreUniquePtr result_ptr(nullptr, + std::default_delete() ); result_ptr.reset( - new Anvil::Semaphore(in_device_ptr) + new Anvil::Semaphore(std::move(in_create_info_ptr) ) ); + if (result_ptr != nullptr) + { + if (!result_ptr->reset() ) + { + result_ptr.reset(); + } + } + + return result_ptr; +} + +/* Please see header for specification */ +Anvil::ExternalHandleUniquePtr Anvil::Semaphore::export_to_external_handle(const Anvil::ExternalSemaphoreHandleTypeFlagBits& in_semaphore_handle_type) +{ + #if defined(_WIN32) + const auto invalid_handle = nullptr; + const bool is_autorelease_handle = Anvil::Utils::is_nt_handle(in_semaphore_handle_type); + const bool only_one_handle_ever_permitted = Anvil::Utils::is_nt_handle(in_semaphore_handle_type); + #else + const int invalid_handle = -1; + const bool is_autorelease_handle = true; + const bool only_one_handle_ever_permitted = false; + #endif + + ExternalHandleType result_handle = invalid_handle; + Anvil::ExternalHandleUniquePtr result_ptr; + + /* Sanity checks */ + #if defined(_WIN32) + { + if (!m_create_info_ptr->get_device()->get_extension_info()->khr_external_semaphore_win32() ) + { + anvil_assert(m_create_info_ptr->get_device()->get_extension_info()->khr_external_semaphore_win32() ); + + goto end; + } + } + #else + { + if (!m_create_info_ptr->get_device()->get_extension_info()->khr_external_semaphore_fd() ) + { + anvil_assert(m_create_info_ptr->get_device()->get_extension_info()->khr_external_semaphore_fd() ); + + goto end; + } + } + #endif + + if (only_one_handle_ever_permitted && + m_external_semaphore_created_for_handle_type.find(in_semaphore_handle_type) != m_external_semaphore_created_for_handle_type.end() ) + { + anvil_assert_fail(); + + goto end; + } + + /* Go and try to open a new handle. */ + { + #if defined(_WIN32) + const auto entrypoints_ptr = &m_create_info_ptr->get_device()->get_extension_khr_external_semaphore_win32_entrypoints(); + VkSemaphoreGetWin32HandleInfoKHR info; + #else + const auto entrypoints_ptr = &m_create_info_ptr->get_device()->get_extension_khr_external_semaphore_fd_entrypoints(); + VkSemaphoreGetFdInfoKHR info; + #endif + + + anvil_assert(m_semaphore != VK_NULL_HANDLE); + + info.handleType = static_cast(in_semaphore_handle_type); + info.pNext = nullptr; + info.semaphore = m_semaphore; + + #if defined(_WIN32) + { + info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR; + } + #else + { + info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR; + } + #endif + + #if defined(_WIN32) + if (entrypoints_ptr->vkGetSemaphoreWin32HandleKHR(m_create_info_ptr->get_device()->get_device_vk(), + &info, + &result_handle) != VK_SUCCESS) + #else + if (entrypoints_ptr->vkGetSemaphoreFdKHR(m_create_info_ptr->get_device()->get_device_vk(), + &info, + &result_handle) != VK_SUCCESS) + #endif + { + anvil_assert_fail(); + + goto end; + } + + if (result_handle == invalid_handle) + { + anvil_assert(result_handle != invalid_handle); + + goto end; + } + } + + /* Cache the newly created handle if it's a NT handle */ + if (only_one_handle_ever_permitted) + { + m_external_semaphore_created_for_handle_type[in_semaphore_handle_type] = true; + } + + result_ptr = Anvil::ExternalHandle::create(result_handle, + is_autorelease_handle); /* in_close_at_destruction_time */ + +end: return result_ptr; } +#if defined(_WIN32) + bool Anvil::Semaphore::import_from_external_handle(const bool& in_temporary_import, + const Anvil::ExternalSemaphoreHandleTypeFlagBits& in_handle_type, + const ExternalHandleType& in_opt_handle, + const std::wstring& in_opt_name) +#else + bool Anvil::Semaphore::import_from_external_handle(const bool& in_temporary_import, + const Anvil::ExternalSemaphoreHandleTypeFlagBits& in_handle_type, + const ExternalHandleType& in_handle) +#endif +{ + #if defined(_WIN32) + const auto entrypoints_ptr = &m_device_ptr->get_extension_khr_external_semaphore_win32_entrypoints(); + #else + const auto entrypoints_ptr = &m_device_ptr->get_extension_khr_external_semaphore_fd_entrypoints(); + #endif + + bool result = false; + + /* Sanity checks */ + #if defined(_WIN32) + { + if (!m_device_ptr->get_extension_info()->khr_external_semaphore_win32() ) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_external_semaphore_win32() ); + + goto end; + } + } + #else + { + if (!m_device_ptr->get_extension_info()->khr_external_semaphore_fd() ) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_external_semaphore_fd() ); + + goto end; + } + } + #endif + + /* Proceed */ + { + #if defined(_WIN32) + VkImportSemaphoreWin32HandleInfoKHR info_vk; + #else + VkImportSemaphoreFdInfoKHR info_vk; + #endif + + info_vk.flags = (in_temporary_import) ? VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR + : 0; + info_vk.handleType = static_cast(in_handle_type); + info_vk.pNext = nullptr; + info_vk.semaphore = m_semaphore; + + #if defined(_WIN32) + { + info_vk.handle = in_opt_handle; + info_vk.name = (in_opt_name.size() > 0) ? &in_opt_name.at(0) + : nullptr; + info_vk.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR; + } + #else + { + info_vk.fd = in_handle; + info_vk.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR; + } + #endif + + #if defined(_WIN32) + { + result = (entrypoints_ptr->vkImportSemaphoreWin32HandleKHR(m_device_ptr->get_device_vk(), + &info_vk) == VK_SUCCESS); + } + #else + { + result = (entrypoints_ptr->vkImportSemaphoreFdKHR(m_device_ptr->get_device_vk(), + &info_vk) == VK_SUCCESS); + } + #endif + } + +end: + return result; +} + /** Destroys the underlying Vulkan Semaphore instance. */ void Anvil::Semaphore::release_semaphore() { if (m_semaphore != VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(m_device_ptr); - - vkDestroySemaphore(device_locked_ptr->get_device_vk(), - m_semaphore, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroySemaphore(m_device_ptr->get_device_vk(), + m_semaphore, + nullptr /* pAllocator */); + } + unlock(); m_semaphore = VK_NULL_HANDLE; set_vk_handle(m_semaphore); @@ -76,29 +285,90 @@ void Anvil::Semaphore::release_semaphore() } /* Please see header for specification */ -void Anvil::Semaphore::reset() +bool Anvil::Semaphore::reset() { - std::shared_ptr device_locked_ptr (m_device_ptr); - VkResult result (VK_ERROR_INITIALIZATION_FAILED); - VkSemaphoreCreateInfo semaphore_create_info; - - ANVIL_REDUNDANT_VARIABLE(result); + VkResult result (VK_ERROR_INITIALIZATION_FAILED); + Anvil::StructChainer struct_chainer; + Anvil::StructChainUniquePtr struct_chain_ptr; release_semaphore(); + /* Sanity checks */ + if (m_create_info_ptr->get_exportable_external_semaphore_handle_types() != Anvil::ExternalSemaphoreHandleTypeFlagBits::NONE) + { + if (!m_device_ptr->get_extension_info()->khr_external_semaphore() ) + { + anvil_assert(m_device_ptr->get_extension_info()->khr_external_semaphore() ); + + goto end; + } + } + /* Spawn a new semaphore */ - semaphore_create_info.flags = 0; - semaphore_create_info.pNext = nullptr; - semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; + { + VkSemaphoreCreateInfo semaphore_create_info; - result = vkCreateSemaphore(device_locked_ptr->get_device_vk(), - &semaphore_create_info, - nullptr, /* pAllocator */ - &m_semaphore); + semaphore_create_info.flags = 0; + semaphore_create_info.pNext = nullptr; + semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; + + struct_chainer.append_struct(semaphore_create_info); + } + + if (m_create_info_ptr->get_exportable_external_semaphore_handle_types() != Anvil::ExternalSemaphoreHandleTypeFlagBits::NONE) + { + VkExportSemaphoreCreateInfo create_info; + + create_info.handleTypes = m_create_info_ptr->get_exportable_external_semaphore_handle_types().get_vk(); + create_info.pNext = nullptr; + create_info.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR; + + struct_chainer.append_struct(create_info); + } + + #if defined(_WIN32) + { + const Anvil::ExternalNTHandleInfo* nt_handle_info_ptr = nullptr; + + if (m_create_info_ptr->get_exportable_nt_handle_info(&nt_handle_info_ptr) ) + { + VkExportSemaphoreWin32HandleInfoKHR handle_info; + + anvil_assert( nt_handle_info_ptr != nullptr); + anvil_assert(((m_create_info_ptr->get_exportable_external_semaphore_handle_types() & Anvil::ExternalSemaphoreHandleTypeFlagBits::OPAQUE_WIN32_BIT) != 0) || + ((m_create_info_ptr->get_exportable_external_semaphore_handle_types() & Anvil::ExternalSemaphoreHandleTypeFlagBits::D3D12_FENCE_BIT) != 0)); + + handle_info.dwAccess = nt_handle_info_ptr->access; + handle_info.name = (nt_handle_info_ptr->name.size() > 0) ? &nt_handle_info_ptr->name.at(0) + : nullptr; + handle_info.pAttributes = nt_handle_info_ptr->attributes_ptr; + handle_info.pNext = nullptr; + handle_info.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR; + + struct_chainer.append_struct(handle_info); + } + } + #endif + + struct_chain_ptr = struct_chainer.create_chain(); + if (struct_chain_ptr == nullptr) + { + anvil_assert(struct_chain_ptr != nullptr); + + goto end; + } + + result = Anvil::Vulkan::vkCreateSemaphore(m_device_ptr->get_device_vk(), + struct_chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_semaphore); anvil_assert_vk_call_succeeded(result); if (is_vk_call_successful(result) ) { set_vk_handle(m_semaphore); } -} \ No newline at end of file + +end: + return is_vk_call_successful(result); +} diff --git a/src/wrappers/shader_module.cpp b/src/wrappers/shader_module.cpp index 1743dac3..1150f48c 100644 --- a/src/wrappers/shader_module.cpp +++ b/src/wrappers/shader_module.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -34,12 +34,13 @@ /** Please see header for specification */ -Anvil::ShaderModule::ShaderModule(std::weak_ptr in_device_ptr, - std::shared_ptr in_spirv_generator_ptr) +Anvil::ShaderModule::ShaderModule(const Anvil::BaseDevice* in_device_ptr, + GLSLShaderToSPIRVGenerator* in_spirv_generator_ptr, + bool in_mt_safe) :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT), - m_device_ptr (in_device_ptr), - m_device_raw_ptr (in_device_ptr.lock().get() ) + Anvil::ObjectType::SHADER_MODULE), + MTSafetySupportProvider (in_mt_safe), + m_device_ptr (in_device_ptr) { bool result = false; const char* shader_spirv_blob = in_spirv_generator_ptr->get_spirv_blob(); @@ -51,13 +52,13 @@ Anvil::ShaderModule::ShaderModule(std::weak_ptr in anvil_assert(shader_spirv_blob != nullptr); anvil_assert(shader_spirv_blob_size > 0); - m_cs_entrypoint_name = (shader_stage == SHADER_STAGE_COMPUTE) ? "main" : ""; - m_fs_entrypoint_name = (shader_stage == SHADER_STAGE_FRAGMENT) ? "main" : ""; + m_cs_entrypoint_name = (shader_stage == ShaderStage::COMPUTE) ? "main" : ""; + m_fs_entrypoint_name = (shader_stage == ShaderStage::FRAGMENT) ? "main" : ""; m_glsl_source_code = in_spirv_generator_ptr->get_glsl_source_code(); - m_gs_entrypoint_name = (shader_stage == SHADER_STAGE_GEOMETRY) ? "main" : ""; - m_tc_entrypoint_name = (shader_stage == SHADER_STAGE_TESSELLATION_CONTROL) ? "main" : ""; - m_te_entrypoint_name = (shader_stage == SHADER_STAGE_TESSELLATION_EVALUATION) ? "main" : ""; - m_vs_entrypoint_name = (shader_stage == SHADER_STAGE_VERTEX) ? "main" : ""; + m_gs_entrypoint_name = (shader_stage == ShaderStage::GEOMETRY) ? "main" : ""; + m_tc_entrypoint_name = (shader_stage == ShaderStage::TESSELLATION_CONTROL) ? "main" : ""; + m_te_entrypoint_name = (shader_stage == ShaderStage::TESSELLATION_EVALUATION) ? "main" : ""; + m_vs_entrypoint_name = (shader_stage == ShaderStage::VERTEX) ? "main" : ""; result = init_from_spirv_blob(shader_spirv_blob, shader_spirv_blob_size); @@ -66,20 +67,21 @@ Anvil::ShaderModule::ShaderModule(std::weak_ptr in } /** Please see header for specification */ -Anvil::ShaderModule::ShaderModule(std::weak_ptr in_device_ptr, - const char* in_spirv_blob, - uint32_t in_n_spirv_blob_bytes, - const std::string& in_cs_entrypoint_name, - const std::string& in_fs_entrypoint_name, - const std::string& in_gs_entrypoint_name, - const std::string& in_tc_entrypoint_name, - const std::string& in_te_entrypoint_name, - const std::string& in_vs_entrypoint_name) +Anvil::ShaderModule::ShaderModule(const Anvil::BaseDevice* in_device_ptr, + const char* in_spirv_blob, + uint32_t in_n_spirv_blob_bytes, + const std::string& in_cs_entrypoint_name, + const std::string& in_fs_entrypoint_name, + const std::string& in_gs_entrypoint_name, + const std::string& in_tc_entrypoint_name, + const std::string& in_te_entrypoint_name, + const std::string& in_vs_entrypoint_name, + bool in_mt_safe) :DebugMarkerSupportProvider(in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT), + Anvil::ObjectType::SHADER_MODULE), + MTSafetySupportProvider (in_mt_safe), m_cs_entrypoint_name (in_cs_entrypoint_name), m_device_ptr (in_device_ptr), - m_device_raw_ptr (in_device_ptr.lock().get() ), m_fs_entrypoint_name (in_fs_entrypoint_name), m_gs_entrypoint_name (in_gs_entrypoint_name), m_tc_entrypoint_name (in_tc_entrypoint_name), @@ -98,7 +100,7 @@ Anvil::ShaderModule::~ShaderModule() { auto object_tracker_ptr = Anvil::ObjectTracker::get(); - object_tracker_ptr->unregister_object(Anvil::OBJECT_TYPE_SHADER_MODULE, + object_tracker_ptr->unregister_object(Anvil::ObjectType::SHADER_MODULE, this); /* Release the Vulkan handle */ @@ -107,7 +109,7 @@ Anvil::ShaderModule::~ShaderModule() /* Unregister from any callbacks we have subscribed for */ object_tracker_ptr->unregister_from_callbacks( Anvil::OBJECT_TRACKER_CALLBACK_ID_ON_DEVICE_OBJECT_ABOUT_TO_BE_UNREGISTERED, - std::bind(&ShaderModule::on_object_about_to_be_released, + std::bind(&ShaderModule::on_device_about_to_be_released, this, std::placeholders::_1), this @@ -115,34 +117,66 @@ Anvil::ShaderModule::~ShaderModule() } /** Please see header for specification */ -std::shared_ptr Anvil::ShaderModule::create_from_spirv_generator(std::weak_ptr in_device_ptr, - std::shared_ptr in_spirv_generator_ptr) +Anvil::ShaderModuleUniquePtr Anvil::ShaderModule::create_from_spirv_generator(const Anvil::BaseDevice* in_device_ptr, + GLSLShaderToSPIRVGenerator* in_spirv_generator_ptr, + MTSafety in_mt_safety) { - std::shared_ptr result_ptr; - auto shader_module_cache_ptr = in_device_ptr.lock()->get_parent_instance().lock()->get_shader_module_cache(); - const auto shader_stage = in_spirv_generator_ptr->get_shader_stage(); - - /* First check if a shader module with specified parameters has not already been created. If so, - * we can safely re-use it. */ - result_ptr = shader_module_cache_ptr->get_cached_shader_module(in_device_ptr, - in_spirv_generator_ptr->get_spirv_blob(), - in_spirv_generator_ptr->get_spirv_blob_size(), - (shader_stage == SHADER_STAGE_COMPUTE) ? "main" : "", - (shader_stage == SHADER_STAGE_FRAGMENT) ? "main" : "", - (shader_stage == SHADER_STAGE_GEOMETRY) ? "main" : "", - (shader_stage == SHADER_STAGE_TESSELLATION_CONTROL) ? "main" : "", - (shader_stage == SHADER_STAGE_TESSELLATION_EVALUATION) ? "main" : "", - (shader_stage == SHADER_STAGE_VERTEX) ? "main" : ""); - - if (result_ptr == nullptr) + const bool mt_safe = Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr); + Anvil::ShaderModuleUniquePtr result_ptr = Anvil::ShaderModuleUniquePtr(nullptr, + std::default_delete() ); + auto shader_module_cache_ptr = in_device_ptr->get_shader_module_cache(); + const auto shader_stage = in_spirv_generator_ptr->get_shader_stage(); + + if (shader_module_cache_ptr != nullptr) + { + shader_module_cache_ptr->lock(); + { + /* First check if a shader module with specified parameters has not already been created. If so, + * we can safely re-use it. */ + result_ptr = shader_module_cache_ptr->get_cached_shader_module(in_device_ptr, + in_spirv_generator_ptr->get_spirv_blob(), + in_spirv_generator_ptr->get_spirv_blob_size(), + (shader_stage == ShaderStage::COMPUTE) ? "main" : "", + (shader_stage == ShaderStage::FRAGMENT) ? "main" : "", + (shader_stage == ShaderStage::GEOMETRY) ? "main" : "", + (shader_stage == ShaderStage::TESSELLATION_CONTROL) ? "main" : "", + (shader_stage == ShaderStage::TESSELLATION_EVALUATION) ? "main" : "", + (shader_stage == ShaderStage::VERTEX) ? "main" : ""); + + if (result_ptr == nullptr) + { + /* Nope? Need to create a new instance then. + * + * Make sure not to specify any deleter. The created shader module is going to be observed and acquired + * by the cache which will take care of destroying at tear-down time. */ + result_ptr = Anvil::ShaderModuleUniquePtr( + new Anvil::ShaderModule(in_device_ptr, + in_spirv_generator_ptr, + mt_safe), + [](Anvil::ShaderModule*) + { + /* Stub */ + }); + + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::SHADER_MODULE, + result_ptr.get() ); + } + } + shader_module_cache_ptr->unlock(); + } + else { - /* Nope? Need to create a new instance then. */ + /* Just spawn a new instance .. */ result_ptr.reset( new Anvil::ShaderModule(in_device_ptr, - in_spirv_generator_ptr) + in_spirv_generator_ptr, + mt_safe) ); - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_SHADER_MODULE, + anvil_assert(result_ptr->get_module() != VK_NULL_HANDLE); + + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::SHADER_MODULE, result_ptr.get() ); } @@ -150,34 +184,70 @@ std::shared_ptr Anvil::ShaderModule::create_from_spirv_gene } /** Please see header for specification */ -std::shared_ptr Anvil::ShaderModule::create_from_spirv_blob(std::weak_ptr in_device_ptr, - const char* in_spirv_blob, - uint32_t in_n_spirv_blob_bytes, - const char* in_cs_entrypoint_name, - const char* in_fs_entrypoint_name, - const char* in_gs_entrypoint_name, - const char* in_tc_entrypoint_name, - const char* in_te_entrypoint_name, - const char* in_vs_entrypoint_name) +Anvil::ShaderModuleUniquePtr Anvil::ShaderModule::create_from_spirv_blob(const Anvil::BaseDevice* in_device_ptr, + const char* in_spirv_blob, + uint32_t in_n_spirv_blob_bytes, + const char* in_cs_entrypoint_name, + const char* in_fs_entrypoint_name, + const char* in_gs_entrypoint_name, + const char* in_tc_entrypoint_name, + const char* in_te_entrypoint_name, + const char* in_vs_entrypoint_name, + MTSafety in_mt_safety) { - std::shared_ptr result_ptr; - auto shader_module_cache_ptr = in_device_ptr.lock()->get_parent_instance().lock()->get_shader_module_cache(); - - /* First check if a shader module with specified parameters has not already been created. If so, - * we can safely re-use it. */ - result_ptr = shader_module_cache_ptr->get_cached_shader_module(in_device_ptr, - in_spirv_blob, - in_n_spirv_blob_bytes, - in_cs_entrypoint_name, - in_fs_entrypoint_name, - in_gs_entrypoint_name, - in_tc_entrypoint_name, - in_te_entrypoint_name, - in_vs_entrypoint_name); - - if (result_ptr == nullptr) + const bool mt_safe = Anvil::Utils::convert_mt_safety_enum_to_boolean(in_mt_safety, + in_device_ptr); + Anvil::ShaderModuleUniquePtr result_ptr (nullptr, + std::default_delete() ); + auto shader_module_cache_ptr = in_device_ptr->get_shader_module_cache(); + + if (shader_module_cache_ptr != nullptr) + { + shader_module_cache_ptr->lock(); + { + /* First check if a shader module with specified parameters has not already been created. If so, + * we can safely re-use it. */ + result_ptr = shader_module_cache_ptr->get_cached_shader_module(in_device_ptr, + in_spirv_blob, + in_n_spirv_blob_bytes, + in_cs_entrypoint_name, + in_fs_entrypoint_name, + in_gs_entrypoint_name, + in_tc_entrypoint_name, + in_te_entrypoint_name, + in_vs_entrypoint_name); + + if (result_ptr == nullptr) + { + /* Nope? Need to create a new instance then. + * + * Make sure not to specify any deleter. The created shader module is going to be observed and acquired + * by the cache which will take care of destroying at tear-down time. */ + result_ptr = Anvil::ShaderModuleUniquePtr( + new Anvil::ShaderModule(in_device_ptr, + in_spirv_blob, + in_n_spirv_blob_bytes, + in_cs_entrypoint_name, + in_fs_entrypoint_name, + in_gs_entrypoint_name, + in_tc_entrypoint_name, + in_te_entrypoint_name, + in_vs_entrypoint_name, + mt_safe), + [](Anvil::ShaderModule*) + { + /* Stub */ + }); + + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::SHADER_MODULE, + result_ptr.get() ); + } + } + shader_module_cache_ptr->unlock(); + } + else { - /* Nope? Need to create a new instance then. */ + /* Just spawn the new instance .. */ result_ptr.reset( new Anvil::ShaderModule(in_device_ptr, in_spirv_blob, @@ -187,10 +257,13 @@ std::shared_ptr Anvil::ShaderModule::create_from_spirv_blob in_gs_entrypoint_name, in_tc_entrypoint_name, in_te_entrypoint_name, - in_vs_entrypoint_name) + in_vs_entrypoint_name, + mt_safe) ); - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_SHADER_MODULE, + anvil_assert(result_ptr->get_module() != VK_NULL_HANDLE); + + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::SHADER_MODULE, result_ptr.get() ); } @@ -202,18 +275,22 @@ void Anvil::ShaderModule::destroy() { if (m_module != VK_NULL_HANDLE) { - vkDestroyShaderModule(m_device_raw_ptr->get_device_vk(), - m_module, - nullptr /* pAllocator */); + lock(); + { + Anvil::Vulkan::vkDestroyShaderModule(m_device_ptr->get_device_vk(), + m_module, + nullptr /* pAllocator */); + } + unlock(); m_module = VK_NULL_HANDLE; } } +#ifdef ANVIL_LINK_WITH_GLSLANG /** Please see header for specification */ const std::string& Anvil::ShaderModule::get_disassembly() { - #ifdef ANVIL_LINK_WITH_GLSLANG { if (m_disassembly.size() == 0) { @@ -226,18 +303,17 @@ const std::string& Anvil::ShaderModule::get_disassembly() m_disassembly = disassembly_sstream.str(); } } - #endif return m_disassembly; } +#endif /** Please see header for specification */ bool Anvil::ShaderModule::init_from_spirv_blob(const char* in_spirv_blob, uint32_t in_n_spirv_blob_bytes) { - std::shared_ptr device_locked_ptr(m_device_ptr); - VkResult result_vk; - VkShaderModuleCreateInfo shader_module_create_info; + VkResult result_vk; + VkShaderModuleCreateInfo shader_module_create_info; /* Set up the "shader module" create info descriptor */ anvil_assert((in_n_spirv_blob_bytes % sizeof(uint32_t) ) == 0); @@ -248,10 +324,10 @@ bool Anvil::ShaderModule::init_from_spirv_blob(const char* in_spirv_blob, shader_module_create_info.pNext = nullptr; shader_module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; - result_vk = vkCreateShaderModule(device_locked_ptr->get_device_vk(), - &shader_module_create_info, - nullptr, /* pAllocator */ - &m_module); + result_vk = Anvil::Vulkan::vkCreateShaderModule(m_device_ptr->get_device_vk(), + &shader_module_create_info, + nullptr, /* pAllocator */ + &m_module); anvil_assert_vk_call_succeeded(result_vk); if (is_vk_call_successful(result_vk) ) @@ -268,7 +344,7 @@ bool Anvil::ShaderModule::init_from_spirv_blob(const char* in_spirv_blob, /* Sign for device destruction notification, in which case we need to destroy the shader module. */ Anvil::ObjectTracker::get()->register_for_callbacks( Anvil::OBJECT_TRACKER_CALLBACK_ID_ON_DEVICE_OBJECT_ABOUT_TO_BE_UNREGISTERED, - std::bind(&ShaderModule::on_object_about_to_be_released, + std::bind(&ShaderModule::on_device_about_to_be_released, this, std::placeholders::_1), this @@ -278,11 +354,11 @@ bool Anvil::ShaderModule::init_from_spirv_blob(const char* in_spirv_blob, } /** TODO */ -void Anvil::ShaderModule::on_object_about_to_be_released(void* in_callback_arg_ptr) +void Anvil::ShaderModule::on_device_about_to_be_released(void* in_callback_arg_ptr) { const auto callback_arg_ptr = reinterpret_cast(in_callback_arg_ptr); - if (m_device_raw_ptr == callback_arg_ptr->object_raw_ptr) + if (m_device_ptr == callback_arg_ptr->object_raw_ptr) { /* Make sure to release the shader module handle before we let the object actually proceed with destruction! */ destroy(); diff --git a/src/wrappers/swapchain.cpp b/src/wrappers/swapchain.cpp index 6f6098f9..304272c2 100644 --- a/src/wrappers/swapchain.cpp +++ b/src/wrappers/swapchain.cpp @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved. +// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -22,7 +22,12 @@ #include "misc/debug.h" #include "misc/dummy_window.h" +#include "misc/fence_create_info.h" +#include "misc/image_create_info.h" +#include "misc/image_view_create_info.h" #include "misc/object_tracker.h" +#include "misc/struct_chainer.h" +#include "misc/swapchain_create_info.h" #include "misc/window.h" #include "wrappers/command_buffer.h" #include "wrappers/command_pool.h" @@ -31,64 +36,41 @@ #include "wrappers/swapchain.h" /** Please see header for specification */ -Anvil::Swapchain::Swapchain(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_surface_ptr, - std::shared_ptr in_window_ptr, - VkFormat in_format, - VkPresentModeKHR in_present_mode, - VkImageUsageFlags in_usage_flags, - VkSwapchainCreateFlagsKHR in_flags, - uint32_t in_n_images, - const ExtensionKHRSwapchainEntrypoints& in_khr_swapchain_entrypoints) - :DebugMarkerSupportProvider (in_device_ptr, - VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT), +Anvil::Swapchain::Swapchain(Anvil::SwapchainCreateInfoUniquePtr in_create_info_ptr) + :DebugMarkerSupportProvider (in_create_info_ptr->get_device(), + Anvil::ObjectType::SWAPCHAIN), + MTSafetySupportProvider (Anvil::Utils::convert_mt_safety_enum_to_boolean(in_create_info_ptr->get_mt_safety(), + in_create_info_ptr->get_device () )), m_destroy_swapchain_before_parent_window_closes(true), - m_device_ptr (in_device_ptr), - m_flags (in_flags), - m_image_format (in_format), m_last_acquired_image_index (UINT32_MAX), m_n_acquire_counter (0), m_n_acquire_counter_rounded (0), m_n_present_counter (0), - m_n_swapchain_images (in_n_images), - m_parent_surface_ptr (in_parent_surface_ptr), - m_present_mode (in_present_mode), - m_swapchain (0), - m_window_ptr (in_window_ptr), - m_usage_flags (static_cast(in_usage_flags) ), - m_khr_swapchain_entrypoints (in_khr_swapchain_entrypoints) + m_swapchain (VK_NULL_HANDLE) { - std::shared_ptr device_locked_ptr(in_device_ptr); - - anvil_assert(in_n_images > 0); - anvil_assert(in_parent_surface_ptr != nullptr); - anvil_assert(in_usage_flags != 0); + { + auto create_info_ptr = Anvil::FenceCreateInfo::create(m_device_ptr, + false); /* create_signalled */ - m_image_ptrs.resize (in_n_images); - m_image_view_ptrs.resize(in_n_images); + create_info_ptr->set_mt_safety(in_create_info_ptr->get_mt_safety() ); - m_image_available_fence_ptr = Anvil::Fence::create(m_device_ptr, - false /* create_signalled */); + m_image_available_fence_ptr = Anvil::Fence::create(std::move(create_info_ptr) ); + } - init(); + m_create_info_ptr = std::move(in_create_info_ptr); - Anvil::ObjectTracker::get()->register_object(Anvil::OBJECT_TYPE_SWAPCHAIN, + Anvil::ObjectTracker::get()->register_object(Anvil::ObjectType::SWAPCHAIN, this); } /** Please see header for specification */ Anvil::Swapchain::~Swapchain() { - Anvil::ObjectTracker::get()->unregister_object(Anvil::OBJECT_TYPE_SWAPCHAIN, + Anvil::ObjectTracker::get()->unregister_object(Anvil::ObjectType::SWAPCHAIN, this); destroy_swapchain(); - if (m_parent_surface_ptr != nullptr) - { - m_parent_surface_ptr = nullptr; - } - m_image_ptrs.clear (); m_image_available_fence_ptr.reset(); m_image_view_ptrs.clear (); @@ -98,12 +80,13 @@ Anvil::Swapchain::~Swapchain() queue_ptr->unregister_from_callbacks( QUEUE_CALLBACK_ID_PRESENT_REQUEST_ISSUED, std::bind(&Swapchain::on_present_request_issued, - this), + this, + std::placeholders::_1), this ); } - m_window_ptr->unregister_from_callbacks( + m_create_info_ptr->get_window()->unregister_from_callbacks( WINDOW_CALLBACK_ID_ABOUT_TO_CLOSE, std::bind(&Swapchain::on_parent_window_about_to_close, this), @@ -112,109 +95,219 @@ Anvil::Swapchain::~Swapchain() } /** Please see header for specification */ -uint32_t Anvil::Swapchain::acquire_image(std::shared_ptr in_opt_semaphore_ptr, - bool in_should_block) +Anvil::SwapchainOperationErrorCode Anvil::Swapchain::acquire_image(Anvil::Semaphore* in_opt_semaphore_ptr, + uint32_t* out_result_index_ptr, + bool in_should_block) { - std::shared_ptr device_locked_ptr = m_device_ptr.lock(); - const RenderingSurfaceType rendering_surface = m_parent_surface_ptr->get_type(); + uint32_t result = UINT32_MAX; + Anvil::SwapchainOperationErrorCode result_status = Anvil::SwapchainOperationErrorCode::SUCCESS; + + switch (m_device_ptr->get_type() ) + { + case Anvil::DeviceType::MULTI_GPU: + { + const Anvil::MGPUDevice* mgpu_device_ptr (dynamic_cast(m_device_ptr) ); + const uint32_t n_physical_devices(mgpu_device_ptr->get_n_physical_devices() ); + const Anvil::PhysicalDevice* physical_devices[32]; + + anvil_assert(n_physical_devices < sizeof(physical_devices) / sizeof(physical_devices[0]) ); + + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_devices; + ++n_physical_device) + { + physical_devices[n_physical_device] = mgpu_device_ptr->get_physical_device(n_physical_device); + } + + result_status = acquire_image(in_opt_semaphore_ptr, + n_physical_devices, + physical_devices, + &result, + in_should_block); + + break; + } + + case Anvil::DeviceType::SINGLE_GPU: + { + const Anvil::PhysicalDevice* physical_device_ptr(nullptr); + const Anvil::SGPUDevice* sgpu_device_ptr (dynamic_cast(m_device_ptr) ); + + physical_device_ptr = sgpu_device_ptr->get_physical_device(); + + result_status = acquire_image(in_opt_semaphore_ptr, + 1, /* n_mgpu_physical_devices */ + &physical_device_ptr, + &result, + in_should_block); + + break; + } + + default: + { + anvil_assert_fail(); + } + } + + *out_result_index_ptr = result; + + return result_status; +} + +/** Please see header for specification */ +Anvil::SwapchainOperationErrorCode Anvil::Swapchain::acquire_image(Anvil::Semaphore* in_opt_semaphore_ptr, + uint32_t in_n_mgpu_physical_devices, + const Anvil::PhysicalDevice* const* in_mgpu_physical_device_ptrs, + uint32_t* out_result_index_ptr, + bool in_should_block) +{ + const Anvil::DeviceType device_type = m_device_ptr->get_type(); + uint32_t result = UINT32_MAX; - VkResult result_vk = VK_ERROR_INITIALIZATION_FAILED; - const WindowPlatform window_platform = m_window_ptr->get_platform(); + Anvil::SwapchainOperationErrorCode result_status = Anvil::SwapchainOperationErrorCode::SUCCESS; + const WindowPlatform window_platform = m_create_info_ptr->get_window()->get_platform(); const bool is_offscreen_rendering_enabled = (window_platform == WINDOW_PLATFORM_DUMMY || - window_platform == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS) && - (rendering_surface == Anvil::RENDERING_SURFACE_TYPE_GENERAL); - - ANVIL_REDUNDANT_VARIABLE(result_vk); + window_platform == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS); if (!is_offscreen_rendering_enabled) { VkFence fence_handle = VK_NULL_HANDLE; - if (in_should_block) + if (in_opt_semaphore_ptr != nullptr) { - m_image_available_fence_ptr->reset(); - - fence_handle = m_image_available_fence_ptr->get_fence(); + in_opt_semaphore_ptr->lock(); } - result_vk = m_khr_swapchain_entrypoints.vkAcquireNextImageKHR(device_locked_ptr->get_device_vk(), - m_swapchain, - UINT64_MAX, - in_opt_semaphore_ptr->get_semaphore(), - fence_handle, - &result); + m_image_available_fence_ptr->lock(); + lock(); + { + if (in_should_block) + { + m_image_available_fence_ptr->reset(); + + fence_handle = m_image_available_fence_ptr->get_fence(); + } - anvil_assert_vk_call_succeeded(result_vk); + if (device_type == Anvil::DeviceType::SINGLE_GPU) + { + const auto& khr_swapchain_entrypoints = m_device_ptr->get_extension_khr_swapchain_entrypoints(); + + result_status = static_cast(khr_swapchain_entrypoints.vkAcquireNextImageKHR(m_device_ptr->get_device_vk(), + m_swapchain, + UINT64_MAX, + (in_opt_semaphore_ptr != nullptr) ? in_opt_semaphore_ptr->get_semaphore() : VK_NULL_HANDLE, + fence_handle, + &result) ); + } + else + { + VkAcquireNextImageInfoKHR info; + const auto& khr_device_group_entrypoints(m_device_ptr->get_extension_khr_device_group_entrypoints() ); + const Anvil::MGPUDevice* mgpu_device_ptr (dynamic_cast(m_device_ptr) ); + + anvil_assert(device_type == Anvil::DeviceType::MULTI_GPU); + + info.deviceMask = 0; + info.fence = fence_handle; + info.pNext = nullptr; + info.semaphore = (in_opt_semaphore_ptr != nullptr) ? in_opt_semaphore_ptr->get_semaphore() : VK_NULL_HANDLE; + info.sType = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR; + info.swapchain = m_swapchain; + info.timeout = UINT64_MAX; + + for (uint32_t n_physical_device = 0; + n_physical_device < in_n_mgpu_physical_devices; + ++n_physical_device) + { + const Anvil::PhysicalDevice* physical_device_ptr (in_mgpu_physical_device_ptrs[n_physical_device]); + const uint32_t physical_device_index(physical_device_ptr->get_index() ); - if (fence_handle != VK_NULL_HANDLE) - { - result_vk = vkWaitForFences(device_locked_ptr->get_device_vk(), - 1, /* fenceCount */ - &fence_handle, - VK_TRUE, /* waitAll */ - UINT64_MAX); + anvil_assert((info.deviceMask & (1 << physical_device_index)) == 0); + + info.deviceMask |= (1 << physical_device_index); + } + + result_status = static_cast(khr_device_group_entrypoints.vkAcquireNextImage2KHR(mgpu_device_ptr->get_device_vk(), + &info, + &result) ); + } + + if (fence_handle != VK_NULL_HANDLE) + { + result_status = static_cast(Anvil::Vulkan::vkWaitForFences(m_device_ptr->get_device_vk(), + 1, /* fenceCount */ + &fence_handle, + VK_TRUE, /* waitAll */ + UINT64_MAX) ); + } + } + unlock(); + m_image_available_fence_ptr->unlock(); - anvil_assert_vk_call_succeeded(result_vk); + if (in_opt_semaphore_ptr != nullptr) + { + in_opt_semaphore_ptr->unlock(); } } else { if (in_should_block) { - vkDeviceWaitIdle(m_device_ptr.lock()->get_device_vk() ); + m_device_ptr->wait_idle(); } if (in_opt_semaphore_ptr != nullptr) { /* We need to set the semaphore manually in this scenario */ - device_locked_ptr->get_universal_queue(0)->submit_command_buffer_with_signal_semaphores(nullptr, /* cmd_buffer_ptr */ - 1, /* n_semaphores_to_signal */ - &in_opt_semaphore_ptr, - true); /* should_block */ + m_device_ptr->get_universal_queue(0)->submit( + Anvil::SubmitInfo::create_signal(1, /* n_semaphores_to_signal */ + &in_opt_semaphore_ptr) + ); } result = m_n_acquire_counter_rounded; } m_n_acquire_counter++; - m_n_acquire_counter_rounded = (m_n_acquire_counter_rounded + 1) % m_n_swapchain_images; + m_n_acquire_counter_rounded = (m_n_acquire_counter_rounded + 1) % m_create_info_ptr->get_n_images(); m_last_acquired_image_index = result; + *out_result_index_ptr = result; - return result; + return result_status; } /** Please see header for specification */ -std::shared_ptr Anvil::Swapchain::create(std::weak_ptr in_device_ptr, - std::shared_ptr in_parent_surface_ptr, - std::shared_ptr in_window_ptr, - VkFormat in_format, - VkPresentModeKHR in_present_mode, - VkImageUsageFlags in_usage_flags, - uint32_t in_n_images, - const ExtensionKHRSwapchainEntrypoints& in_khr_swapchain_entrypoints, - VkSwapchainCreateFlagsKHR in_flags) +Anvil::SwapchainUniquePtr Anvil::Swapchain::create(Anvil::SwapchainCreateInfoUniquePtr in_create_info_ptr) { - std::shared_ptr result_ptr; + SwapchainUniquePtr result_ptr(nullptr, + std::default_delete() ); + auto window_ptr(in_create_info_ptr->get_window() ); result_ptr.reset( - new Anvil::Swapchain(in_device_ptr, - in_parent_surface_ptr, - in_window_ptr, - in_format, - in_present_mode, - in_usage_flags, - in_flags, - in_n_images, - in_khr_swapchain_entrypoints) + new Anvil::Swapchain( + std::move(in_create_info_ptr) + ) ); - if (in_window_ptr != nullptr && - in_window_ptr->get_platform() == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS) + if (result_ptr) { - std::dynamic_pointer_cast(in_window_ptr)->set_swapchain(result_ptr); + if (!result_ptr->init() ) + { + result_ptr.reset(); + + goto end; + } + + if (window_ptr != nullptr && + window_ptr->get_platform() == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS) + { + dynamic_cast(window_ptr)->set_swapchain(result_ptr.get() ); + } } +end: return result_ptr; } @@ -224,75 +317,79 @@ void Anvil::Swapchain::destroy_swapchain() /* If this assertion failure explodes, your application attempted to release a swapchain without presenting all acquired swapchain images. * That's illegal per Vulkan spec. */ - anvil_assert(m_n_acquire_counter == m_n_present_counter); - if (m_swapchain != VK_NULL_HANDLE) + lock(); { - std::shared_ptr device_locked_ptr(m_device_ptr); + if (m_swapchain != VK_NULL_HANDLE) + { + const auto& khr_swapchain_entrypoints = m_device_ptr->get_extension_khr_swapchain_entrypoints(); + + // anvil_assert(m_n_acquire_counter == m_n_present_counter); - m_khr_swapchain_entrypoints.vkDestroySwapchainKHR(device_locked_ptr->get_device_vk(), - m_swapchain, - nullptr /* pAllocator */); + khr_swapchain_entrypoints.vkDestroySwapchainKHR(m_device_ptr->get_device_vk(), + m_swapchain, + nullptr /* pAllocator */); + } m_swapchain = VK_NULL_HANDLE; } + unlock(); } /** Please see header for specification */ -void Anvil::Swapchain::disable_destroy_swapchain_before_parent_window_closes_behavior() +Anvil::Image* Anvil::Swapchain::get_image(uint32_t in_n_swapchain_image) const { - m_destroy_swapchain_before_parent_window_closes = false; -} + anvil_assert(in_n_swapchain_image < m_n_images ); -/** Please see header for specification */ -std::shared_ptr Anvil::Swapchain::get_image(uint32_t in_n_swapchain_image) const -{ - anvil_assert(in_n_swapchain_image < m_n_swapchain_images); - - return m_image_ptrs[in_n_swapchain_image]; + return m_image_ptrs.at(in_n_swapchain_image).get(); } /** Please see header for specification */ -std::shared_ptr Anvil::Swapchain::get_image_view(uint32_t in_n_swapchain_image) const +Anvil::ImageView* Anvil::Swapchain::get_image_view(uint32_t in_n_swapchain_image) const { - anvil_assert(in_n_swapchain_image < m_n_swapchain_images); + anvil_assert(in_n_swapchain_image < m_n_images ); - return m_image_view_ptrs[in_n_swapchain_image]; + return m_image_view_ptrs.at(in_n_swapchain_image).get(); } /** Initializes the swapchain object. */ -void Anvil::Swapchain::init() +bool Anvil::Swapchain::init() { - VkSwapchainCreateInfoKHR create_info; - std::shared_ptr device_locked_ptr = m_device_ptr.lock(); - uint32_t n_swapchain_images = 0; - VkResult result = VK_ERROR_INITIALIZATION_FAILED; - const RenderingSurfaceType rendering_surface = m_parent_surface_ptr->get_type(); - std::vector swapchain_images; - const VkSurfaceTransformFlagBitsKHR swapchain_transformation = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR; - const WindowPlatform window_platform = m_window_ptr->get_platform(); - const bool is_offscreen_rendering_enabled = (window_platform == WINDOW_PLATFORM_DUMMY || - window_platform == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS) && - (rendering_surface == Anvil::RENDERING_SURFACE_TYPE_GENERAL); - - ANVIL_REDUNDANT_VARIABLE(result); - - m_image_view_format = m_image_format; - m_size.width = m_parent_surface_ptr->get_width (); - m_size.height = m_parent_surface_ptr->get_height(); + auto parent_surface_ptr = m_create_info_ptr->get_rendering_surface(); + VkResult result = VK_ERROR_INITIALIZATION_FAILED; + Anvil::StructChainUniquePtr struct_chain_ptr; + std::vector swapchain_images; + const Anvil::SurfaceTransformFlagBits swapchain_transformation = Anvil::SurfaceTransformFlagBits::IDENTITY_BIT_KHR; + const WindowPlatform window_platform = m_create_info_ptr->get_window()->get_platform(); + const bool is_offscreen_rendering_enabled = (window_platform == WINDOW_PLATFORM_DUMMY || + window_platform == WINDOW_PLATFORM_DUMMY_WITH_PNG_SNAPSHOTS); + + parent_surface_ptr->update_surface_extents(); + + m_n_images = 0; + m_size.width = parent_surface_ptr->get_width (); + m_size.height = parent_surface_ptr->get_height(); /* not doing offscreen rendering */ if (!is_offscreen_rendering_enabled) { - std::shared_ptr sgpu_device_locked_ptr(std::dynamic_pointer_cast(device_locked_ptr) ); + Anvil::CompositeAlphaFlagBits composite_alpha = Anvil::CompositeAlphaFlagBits::NONE; + const auto& khr_swapchain_entrypoints = m_device_ptr->get_extension_khr_swapchain_entrypoints(); + Anvil::StructChainer struct_chainer; - #ifdef _DEBUG { - uint32_t n_physical_devices = 1; - bool result_bool = false; - const char* required_surface_extension_name = nullptr; - VkSurfaceCapabilitiesKHR surface_caps; - VkCompositeAlphaFlagsKHR supported_composite_alpha_flags = static_cast(0); - VkSurfaceTransformFlagsKHR supported_surface_transform_flags; + const Anvil::MGPUDevice* mgpu_device_ptr(dynamic_cast(m_device_ptr) ); + const Anvil::SGPUDevice* sgpu_device_ptr(dynamic_cast(m_device_ptr) ); + + const Anvil::DeviceType device_type = m_device_ptr->get_type(); + uint32_t n_physical_devices = 0; + bool result_bool = false; + const char* required_surface_extension_name = nullptr; + Anvil::SurfaceCapabilities surface_caps; + Anvil::CompositeAlphaFlags supported_composite_alpha_flags; + Anvil::SurfaceTransformFlags supported_surface_transform_flags; + + ANVIL_REDUNDANT_VARIABLE(required_surface_extension_name); + ANVIL_REDUNDANT_VARIABLE(result_bool); #ifdef _WIN32 #if defined(ANVIL_INCLUDE_WIN3264_WINDOW_SYSTEM_SUPPORT) @@ -302,69 +399,151 @@ void Anvil::Swapchain::init() #if defined(ANVIL_INCLUDE_XCB_WINDOW_SYSTEM_SUPPORT) required_surface_extension_name = VK_KHR_XCB_SURFACE_EXTENSION_NAME; #endif + #if defined(ANVIL_INCLUDE_WAYLAND_WINDOW_SYSTEM_SUPPORT) + required_surface_extension_name = VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME; + #endif #endif - anvil_assert(required_surface_extension_name == nullptr || - m_device_ptr.lock()->get_parent_instance().lock()->is_instance_extension_supported(required_surface_extension_name) ); + anvil_assert(required_surface_extension_name == nullptr || + m_device_ptr->get_parent_instance()->is_instance_extension_supported(required_surface_extension_name) ); + + switch (device_type) + { + case Anvil::DeviceType::MULTI_GPU: n_physical_devices = mgpu_device_ptr->get_n_physical_devices(); break; + case Anvil::DeviceType::SINGLE_GPU: n_physical_devices = 1; break; + + default: + { + anvil_assert_fail(); + } + } for (uint32_t n_physical_device = 0; n_physical_device < n_physical_devices; ++n_physical_device) { - std::shared_ptr current_physical_device_ptr(sgpu_device_locked_ptr->get_physical_device() ); + const Anvil::PhysicalDevice* current_physical_device_ptr = nullptr; + + ANVIL_REDUNDANT_VARIABLE(current_physical_device_ptr); + + switch (device_type) + { + case Anvil::DeviceType::MULTI_GPU: current_physical_device_ptr = mgpu_device_ptr->get_physical_device(n_physical_device); break; + case Anvil::DeviceType::SINGLE_GPU: current_physical_device_ptr = sgpu_device_ptr->get_physical_device(); break; + + default: + { + anvil_assert_fail(); + } + } /* Ensure opaque composite alpha mode is supported */ - anvil_assert(m_parent_surface_ptr->get_supported_composite_alpha_flags(current_physical_device_ptr, - &supported_composite_alpha_flags) ); + anvil_assert(parent_surface_ptr->get_supported_composite_alpha_flags(current_physical_device_ptr, + &supported_composite_alpha_flags) ); - anvil_assert(supported_composite_alpha_flags & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR); + { + composite_alpha = Anvil::CompositeAlphaFlagBits::OPAQUE_BIT_KHR; + + anvil_assert((supported_composite_alpha_flags & composite_alpha) != 0); + } /* Ensure we can use the swapchain image format */ - anvil_assert(m_parent_surface_ptr->is_compatible_with_image_format(current_physical_device_ptr, - m_image_format, - &result_bool) ); + anvil_assert(parent_surface_ptr->is_compatible_with_image_format(current_physical_device_ptr, + m_create_info_ptr->get_format(), + &result_bool) ); anvil_assert(result_bool); /* Ensure the transformation we're about to request is supported by the rendering surface */ - anvil_assert(m_parent_surface_ptr->get_supported_transformations(current_physical_device_ptr, - &supported_surface_transform_flags) ); + anvil_assert(parent_surface_ptr->get_supported_transformations(current_physical_device_ptr, + &supported_surface_transform_flags) ); - anvil_assert(supported_surface_transform_flags & swapchain_transformation); + anvil_assert((supported_surface_transform_flags & swapchain_transformation) != 0); /* Ensure the requested number of swapchain images is reasonable*/ - anvil_assert(m_parent_surface_ptr->get_capabilities(current_physical_device_ptr, - &surface_caps) ); + anvil_assert(parent_surface_ptr->get_capabilities(current_physical_device_ptr, + &surface_caps) ); - anvil_assert(surface_caps.maxImageCount == 0 || - surface_caps.maxImageCount >= m_n_swapchain_images); + anvil_assert(surface_caps.max_image_count == 0 || + surface_caps.max_image_count >= m_create_info_ptr->get_n_images() ); } } - #endif - create_info.clipped = true; /* we won't be reading from the presentable images */ - create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; - create_info.flags = m_flags; - create_info.imageArrayLayers = 1; - create_info.imageColorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR; - create_info.imageExtent.height = m_parent_surface_ptr->get_height(); - create_info.imageExtent.width = m_parent_surface_ptr->get_width(); - create_info.imageFormat = m_image_format; - create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; - create_info.imageUsage = m_usage_flags; - create_info.minImageCount = m_n_swapchain_images; - create_info.oldSwapchain = VK_NULL_HANDLE; - create_info.pNext = nullptr; - create_info.pQueueFamilyIndices = nullptr; - create_info.presentMode = m_present_mode; - create_info.preTransform = swapchain_transformation; - create_info.queueFamilyIndexCount = 0; - create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; - create_info.surface = m_parent_surface_ptr->get_surface(); - - result = m_khr_swapchain_entrypoints.vkCreateSwapchainKHR(device_locked_ptr->get_device_vk(), - &create_info, - nullptr, /* pAllocator */ - &m_swapchain); + { + VkSwapchainCreateInfoKHR create_info; + const auto& old_swapchain_ptr = m_create_info_ptr->get_old_swapchain(); + + create_info.clipped = m_create_info_ptr->get_clipped(); + create_info.compositeAlpha = static_cast(composite_alpha); + create_info.flags = m_create_info_ptr->get_flags().get_vk(); + create_info.imageArrayLayers = 1; + create_info.imageColorSpace = static_cast (m_create_info_ptr->get_color_space() ); + create_info.imageExtent.height = parent_surface_ptr->get_height(); + create_info.imageExtent.width = parent_surface_ptr->get_width (); + create_info.imageFormat = static_cast (m_create_info_ptr->get_format() ); + create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; + create_info.imageUsage = m_create_info_ptr->get_usage_flags().get_vk(); + create_info.minImageCount = m_create_info_ptr->get_n_images (); + create_info.oldSwapchain = (old_swapchain_ptr != nullptr) ? old_swapchain_ptr->get_swapchain_vk() + : VK_NULL_HANDLE; + create_info.pNext = nullptr; + create_info.pQueueFamilyIndices = nullptr; + create_info.presentMode = static_cast (m_create_info_ptr->get_present_mode() ); + create_info.preTransform = static_cast(swapchain_transformation); + create_info.queueFamilyIndexCount = 0; + create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; + create_info.surface = parent_surface_ptr->get_surface(); + + struct_chainer.append_struct(create_info); + } + + /* If present mode flags specific to mGPU support (exposed via VK_KHR_device_group) have been requested, + * make sure to chain this information */ + const auto mgpu_present_mode_flags = m_create_info_ptr->get_mgpu_present_mode_flags(); + + if (mgpu_present_mode_flags != Anvil::DeviceGroupPresentModeFlagBits::LOCAL_BIT_KHR) + { + VkDeviceGroupSwapchainCreateInfoKHR mgpu_create_info; + + mgpu_create_info.modes = mgpu_present_mode_flags.get_vk(); + mgpu_create_info.pNext = nullptr; + mgpu_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR; + + struct_chainer.append_struct(mgpu_create_info); + } + + if ((m_create_info_ptr->get_flags() & Anvil::SwapchainCreateFlagBits::CREATE_MUTABLE_FORMAT_BIT) != 0) + { + const Anvil::Format* image_formats_ptr = nullptr; + VkImageFormatListCreateInfoKHR image_format_list_create_info; + uint32_t n_image_formats = 0; + + anvil_assert(m_device_ptr->get_extension_info()->khr_swapchain_mutable_format() ); + + m_create_info_ptr->get_view_format_list(&image_formats_ptr, + &n_image_formats); + + anvil_assert(image_formats_ptr != nullptr); + anvil_assert(n_image_formats > 0); + + image_format_list_create_info.pNext = nullptr; + image_format_list_create_info.pViewFormats = reinterpret_cast(image_formats_ptr); + image_format_list_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR; + image_format_list_create_info.viewFormatCount = n_image_formats; + + struct_chainer.append_struct(image_format_list_create_info); + } + + struct_chain_ptr = struct_chainer.create_chain(); + + parent_surface_ptr->lock(); + { + result = khr_swapchain_entrypoints.vkCreateSwapchainKHR(m_device_ptr->get_device_vk(), + struct_chain_ptr->get_root_struct(), + nullptr, /* pAllocator */ + &m_swapchain); + } + parent_surface_ptr->unlock(); + anvil_assert_vk_call_succeeded(result); if (is_vk_call_successful(result) ) { @@ -372,120 +551,198 @@ void Anvil::Swapchain::init() } /* Retrieve swap-chain images */ - result = m_khr_swapchain_entrypoints.vkGetSwapchainImagesKHR(device_locked_ptr->get_device_vk(), - m_swapchain, - &n_swapchain_images, - nullptr); /* pSwapchainImages */ + result = khr_swapchain_entrypoints.vkGetSwapchainImagesKHR(m_device_ptr->get_device_vk(), + m_swapchain, + &m_n_images, + nullptr); /* pSwapchainImages */ anvil_assert_vk_call_succeeded(result); - anvil_assert (n_swapchain_images > 0); + anvil_assert (m_n_images > 0); - swapchain_images.resize(n_swapchain_images); + swapchain_images.resize(m_n_images); - result = m_khr_swapchain_entrypoints.vkGetSwapchainImagesKHR(device_locked_ptr->get_device_vk(), - m_swapchain, - &n_swapchain_images, - &swapchain_images[0]); + result = khr_swapchain_entrypoints.vkGetSwapchainImagesKHR(m_device_ptr->get_device_vk(), + m_swapchain, + &m_n_images, + &swapchain_images[0]); anvil_assert_vk_call_succeeded(result); } else /* offscreen rendering */ { - m_usage_flags = static_cast(static_cast(m_usage_flags) | VK_IMAGE_USAGE_TRANSFER_SRC_BIT); - n_swapchain_images = m_n_swapchain_images; + m_create_info_ptr->set_usage_flags(m_create_info_ptr->get_usage_flags() | Anvil::ImageUsageFlagBits::TRANSFER_SRC_BIT); + + m_n_images = m_create_info_ptr->get_n_images(); } - for (size_t n_result_image = 0; - n_result_image < n_swapchain_images; - ++n_result_image) + /* Adjust capacity of m_image_ptrs and m_image_view_ptrs to the number of swapchain images actually created. */ + m_image_ptrs.resize (m_n_images); + m_image_view_ptrs.resize(m_n_images); + + for (uint32_t n_result_image = 0; + n_result_image < m_n_images; + ++n_result_image) { /* Spawn an Image wrapper class for the swap-chain image. */ if (!is_offscreen_rendering_enabled) { - /* NOTE: The constructor we use below is special, in that the wrapped VkImage instance will not be destroyed - * when the Image instance goes out of scope. - */ - m_image_ptrs[n_result_image] = Anvil::Image::create_nonsparse(m_device_ptr, - create_info, - swapchain_images[n_result_image]); + auto create_info_ptr = Anvil::ImageCreateInfo::create_swapchain_wrapper(m_device_ptr, + this, + swapchain_images[n_result_image], + n_result_image); + + create_info_ptr->set_mt_safety(Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe() ) ); + + m_image_ptrs[n_result_image] = Anvil::Image::create(std::move(create_info_ptr) ); } else { - m_image_ptrs[n_result_image] = Anvil::Image::create_nonsparse(m_device_ptr, - VK_IMAGE_TYPE_2D, - m_image_format, - VK_IMAGE_TILING_OPTIMAL, - m_usage_flags, - m_size.width, - m_size.height, - 1, /* base_mipmap_depth */ - 1, - VK_SAMPLE_COUNT_1_BIT, - QUEUE_FAMILY_GRAPHICS_BIT, - VK_SHARING_MODE_EXCLUSIVE, - false, /* in_use_full_mipmap_chain */ - 0, /* in_memory_features */ - 0, /* in_create_flags */ - VK_IMAGE_LAYOUT_GENERAL, - nullptr); + auto create_info_ptr = Anvil::ImageCreateInfo::create_alloc(m_device_ptr, + Anvil::ImageType::_2D, + m_create_info_ptr->get_format(), + Anvil::ImageTiling::OPTIMAL, + m_create_info_ptr->get_usage_flags(), + m_size.width, + m_size.height, + 1, /* base_mipmap_depth */ + 1, + Anvil::SampleCountFlagBits::_1_BIT, + Anvil::QueueFamilyFlagBits::GRAPHICS_BIT, + Anvil::SharingMode::EXCLUSIVE, + false, /* in_use_full_mipmap_chain */ + Anvil::MemoryFeatureFlagBits::NONE, + Anvil::ImageCreateFlagBits::NONE, + Anvil::ImageLayout::GENERAL, + nullptr); + + create_info_ptr->set_mt_safety(Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe() ) ); + + m_image_ptrs[n_result_image] = Anvil::Image::create(std::move(create_info_ptr) ); } /* For each swap-chain image, create a relevant view */ - m_image_view_ptrs[n_result_image] = Anvil::ImageView::create_2D(m_device_ptr, - m_image_ptrs[n_result_image], + { + auto create_info_ptr = Anvil::ImageViewCreateInfo::create_2D(m_device_ptr, + m_image_ptrs[n_result_image].get(), 0, /* n_base_layer */ 0, /* n_base_mipmap_level */ 1, /* n_mipmaps */ - VK_IMAGE_ASPECT_COLOR_BIT, - m_image_format, - VK_COMPONENT_SWIZZLE_R, - VK_COMPONENT_SWIZZLE_G, - VK_COMPONENT_SWIZZLE_B, - VK_COMPONENT_SWIZZLE_A); + Anvil::ImageAspectFlagBits::COLOR_BIT, + m_create_info_ptr->get_format(), + Anvil::ComponentSwizzle::R, + Anvil::ComponentSwizzle::G, + Anvil::ComponentSwizzle::B, + Anvil::ComponentSwizzle::A); + + create_info_ptr->set_mt_safety(Anvil::Utils::convert_boolean_to_mt_safety_enum(is_mt_safe() ) ); + + m_image_view_ptrs[n_result_image] = Anvil::ImageView::create(std::move(create_info_ptr) ); + } + + result = VK_SUCCESS; } /* Sign up for present submission notifications. This is needed to ensure that number of presented frames == * number of acquired frames at destruction time. */ { - std::vector > queues; - - const std::vector* queue_fams_with_present_support_ptr(nullptr); - std::shared_ptr sgpu_device_locked_ptr (std::dynamic_pointer_cast(device_locked_ptr) ); - auto physical_device_ptr (sgpu_device_locked_ptr->get_physical_device() ); + std::vector queues; - if (!m_parent_surface_ptr->get_queue_families_with_present_support(physical_device_ptr, - &queue_fams_with_present_support_ptr) ) + switch (m_device_ptr->get_type() ) { - anvil_assert_fail(); - } - - if (queue_fams_with_present_support_ptr == nullptr) - { - anvil_assert(queue_fams_with_present_support_ptr != nullptr); - } - else - { - for (const auto queue_fam : *queue_fams_with_present_support_ptr) + case Anvil::DeviceType::MULTI_GPU: { - const uint32_t n_queues = sgpu_device_locked_ptr->get_n_queues(queue_fam); + const Anvil::MGPUDevice* mgpu_device_ptr (dynamic_cast(m_device_ptr) ); + const uint32_t n_physical_devices(mgpu_device_ptr->get_n_physical_devices() ); - for (uint32_t n_queue = 0; - n_queue < n_queues; - ++n_queue) + for (uint32_t n_physical_device = 0; + n_physical_device < n_physical_devices; + ++n_physical_device) { - auto queue_ptr = sgpu_device_locked_ptr->get_queue(queue_fam, - n_queue); + auto physical_device_ptr (mgpu_device_ptr->get_physical_device(n_physical_device) ); + const std::vector* queue_fams_with_present_support_ptr(nullptr); + auto rendering_surface_ptr (m_create_info_ptr->get_rendering_surface() ); - anvil_assert(queue_ptr != nullptr); + if (!rendering_surface_ptr->get_queue_families_with_present_support(physical_device_ptr, + &queue_fams_with_present_support_ptr) ) + { + break; + } - if (std::find(queues.begin(), - queues.end(), - queue_ptr) == queues.end() ) + if (queue_fams_with_present_support_ptr == nullptr) + { + anvil_assert(queue_fams_with_present_support_ptr != nullptr); + } + else + { + for (const auto queue_fam : *queue_fams_with_present_support_ptr) + { + const uint32_t n_queues = mgpu_device_ptr->get_n_queues(queue_fam); + + for (uint32_t n_queue = 0; + n_queue < n_queues; + ++n_queue) + { + auto queue_ptr = mgpu_device_ptr->get_queue_for_queue_family_index(queue_fam, + n_queue); + + if (std::find(queues.begin(), + queues.end(), + queue_ptr) == queues.end() ) + { + queues.push_back(queue_ptr); + } + } + } + } + } + + break; + } + + case Anvil::DeviceType::SINGLE_GPU: + { + const std::vector* queue_fams_with_present_support_ptr(nullptr); + const auto rendering_surface_ptr (m_create_info_ptr->get_rendering_surface() ); + const Anvil::SGPUDevice* sgpu_device_ptr (dynamic_cast(m_device_ptr) ); + auto physical_device_ptr (sgpu_device_ptr->get_physical_device() ); + + if (!rendering_surface_ptr->get_queue_families_with_present_support(physical_device_ptr, + &queue_fams_with_present_support_ptr) ) + { + break; + } + + if (queue_fams_with_present_support_ptr == nullptr) + { + anvil_assert(queue_fams_with_present_support_ptr != nullptr); + } + else + { + for (const auto queue_fam : *queue_fams_with_present_support_ptr) { - queues.push_back(queue_ptr); + const uint32_t n_queues = sgpu_device_ptr->get_n_queues(queue_fam); + + for (uint32_t n_queue = 0; + n_queue < n_queues; + ++n_queue) + { + auto queue_ptr = sgpu_device_ptr->get_queue_for_queue_family_index(queue_fam, + n_queue); + + anvil_assert(queue_ptr != nullptr); + + if (std::find(queues.begin(), + queues.end(), + queue_ptr) == queues.end() ) + { + queues.push_back(queue_ptr); + } + } } } + + break; } } @@ -494,7 +751,8 @@ void Anvil::Swapchain::init() queue_ptr->register_for_callbacks( QUEUE_CALLBACK_ID_PRESENT_REQUEST_ISSUED, std::bind(&Swapchain::on_present_request_issued, - this), + this, + std::placeholders::_1), this ); @@ -505,12 +763,14 @@ void Anvil::Swapchain::init() /* Sign up for "about to close the parent window" notifications. Swapchain instance SHOULD be deinitialized * before the window is destroyed, so we're going to act as nice citizens. */ - m_window_ptr->register_for_callbacks( + m_create_info_ptr->get_window()->register_for_callbacks( WINDOW_CALLBACK_ID_ABOUT_TO_CLOSE, std::bind(&Swapchain::on_parent_window_about_to_close, this), this ); + + return is_vk_call_successful(result); } /** TODO */ @@ -523,7 +783,48 @@ void Anvil::Swapchain::on_parent_window_about_to_close() } /** TODO */ -void Anvil::Swapchain::on_present_request_issued() +void Anvil::Swapchain::on_present_request_issued(Anvil::CallbackArgument* in_callback_raw_ptr) { - m_n_present_counter++; + auto* callback_arg_ptr = dynamic_cast(in_callback_raw_ptr); + + anvil_assert(callback_arg_ptr != nullptr); + if (callback_arg_ptr != nullptr) + { + if (callback_arg_ptr->swapchain_ptr == this) + { + m_n_present_counter++; + } + } +} + +/** Please see header for specification */ +void Anvil::Swapchain::set_hdr_metadata(const uint32_t& in_n_swapchains, + Anvil::Swapchain** in_swapchains_ptr_ptr, + const Anvil::HdrMetadataEXT* in_metadata_items_ptr) +{ + anvil_assert(in_n_swapchains > 0); + + auto device_ptr = in_swapchains_ptr_ptr[0]->get_create_info_ptr()->get_device(); + const auto& entrypoints = device_ptr->get_extension_ext_hdr_metadata_entrypoints (); + auto metadata_vk_vec = std::vector(in_n_swapchains); + auto swapchain_vk_vec = std::vector (in_n_swapchains); + + for (uint32_t n_swapchain = 0; + n_swapchain < in_n_swapchains; + ++n_swapchain) + { + metadata_vk_vec.at (n_swapchain) = in_metadata_items_ptr[n_swapchain].get_vk (); + swapchain_vk_vec.at(n_swapchain) = in_swapchains_ptr_ptr[n_swapchain]->get_swapchain_vk(); + + #if defined(_DEBUG) + { + anvil_assert(in_swapchains_ptr_ptr[n_swapchain]->get_create_info_ptr()->get_device() == device_ptr); + } + #endif + } + + entrypoints.vkSetHdrMetadataEXT(device_ptr->get_device_vk(), + in_n_swapchains, + &swapchain_vk_vec.at(0), + &metadata_vk_vec.at (0) ); } \ No newline at end of file